summaryrefslogtreecommitdiff
path: root/nacl/crypto_stream/aes128ctr/core2
diff options
context:
space:
mode:
Diffstat (limited to 'nacl/crypto_stream/aes128ctr/core2')
-rw-r--r--nacl/crypto_stream/aes128ctr/core2/afternm.s12308
-rw-r--r--nacl/crypto_stream/aes128ctr/core2/api.h3
-rw-r--r--nacl/crypto_stream/aes128ctr/core2/beforenm.s13694
-rw-r--r--nacl/crypto_stream/aes128ctr/core2/stream.c14
-rw-r--r--nacl/crypto_stream/aes128ctr/core2/xor.c15
-rw-r--r--nacl/crypto_stream/aes128ctr/core2/xor_afternm.s12407
6 files changed, 38441 insertions, 0 deletions
diff --git a/nacl/crypto_stream/aes128ctr/core2/afternm.s b/nacl/crypto_stream/aes128ctr/core2/afternm.s
new file mode 100644
index 00000000..c1ba79ef
--- /dev/null
+++ b/nacl/crypto_stream/aes128ctr/core2/afternm.s
@@ -0,0 +1,12308 @@
1# Author: Emilia Käsper and Peter Schwabe
2# Date: 2009-03-19
3# +2010.01.31: minor namespace modifications
4# Public domain
5
6.data
7.p2align 6
8
9RCON: .int 0x00000000, 0x00000000, 0x00000000, 0xffffffff
10ROTB: .int 0x0c000000, 0x00000000, 0x04000000, 0x08000000
11EXPB0: .int 0x03030303, 0x07070707, 0x0b0b0b0b, 0x0f0f0f0f
12CTRINC1: .int 0x00000001, 0x00000000, 0x00000000, 0x00000000
13CTRINC2: .int 0x00000002, 0x00000000, 0x00000000, 0x00000000
14CTRINC3: .int 0x00000003, 0x00000000, 0x00000000, 0x00000000
15CTRINC4: .int 0x00000004, 0x00000000, 0x00000000, 0x00000000
16CTRINC5: .int 0x00000005, 0x00000000, 0x00000000, 0x00000000
17CTRINC6: .int 0x00000006, 0x00000000, 0x00000000, 0x00000000
18CTRINC7: .int 0x00000007, 0x00000000, 0x00000000, 0x00000000
19RCTRINC1: .int 0x00000000, 0x00000000, 0x00000000, 0x00000001
20RCTRINC2: .int 0x00000000, 0x00000000, 0x00000000, 0x00000002
21RCTRINC3: .int 0x00000000, 0x00000000, 0x00000000, 0x00000003
22RCTRINC4: .int 0x00000000, 0x00000000, 0x00000000, 0x00000004
23RCTRINC5: .int 0x00000000, 0x00000000, 0x00000000, 0x00000005
24RCTRINC6: .int 0x00000000, 0x00000000, 0x00000000, 0x00000006
25RCTRINC7: .int 0x00000000, 0x00000000, 0x00000000, 0x00000007
26
27SWAP32: .int 0x00010203, 0x04050607, 0x08090a0b, 0x0c0d0e0f
28M0SWAP: .quad 0x0105090d0004080c , 0x03070b0f02060a0e
29
30BS0: .quad 0x5555555555555555, 0x5555555555555555
31BS1: .quad 0x3333333333333333, 0x3333333333333333
32BS2: .quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f
33ONE: .quad 0xffffffffffffffff, 0xffffffffffffffff
34M0: .quad 0x02060a0e03070b0f, 0x0004080c0105090d
35SRM0: .quad 0x0304090e00050a0f, 0x01060b0c0207080d
36SR: .quad 0x0504070600030201, 0x0f0e0d0c0a09080b
37
38# qhasm: int64 outp
39
40# qhasm: int64 len
41
42# qhasm: int64 np
43
44# qhasm: int64 c
45
46# qhasm: input outp
47
48# qhasm: input len
49
50# qhasm: input np
51
52# qhasm: input c
53
54# qhasm: int64 lensav
55
56# qhasm: int6464 xmm0
57
58# qhasm: int6464 xmm1
59
60# qhasm: int6464 xmm2
61
62# qhasm: int6464 xmm3
63
64# qhasm: int6464 xmm4
65
66# qhasm: int6464 xmm5
67
68# qhasm: int6464 xmm6
69
70# qhasm: int6464 xmm7
71
72# qhasm: int6464 xmm8
73
74# qhasm: int6464 xmm9
75
76# qhasm: int6464 xmm10
77
78# qhasm: int6464 xmm11
79
80# qhasm: int6464 xmm12
81
82# qhasm: int6464 xmm13
83
84# qhasm: int6464 xmm14
85
86# qhasm: int6464 xmm15
87
88# qhasm: int6464 t
89
90# qhasm: stack1024 bl
91
92# qhasm: stack128 nonce_stack
93
94# qhasm: int64 blp
95
96# qhasm: int64 b
97
98# qhasm: int64 tmp
99
100# qhasm: enter crypto_stream_aes128ctr_core2_afternm
101.text
102.p2align 5
103.globl _crypto_stream_aes128ctr_core2_afternm
104.globl crypto_stream_aes128ctr_core2_afternm
105_crypto_stream_aes128ctr_core2_afternm:
106crypto_stream_aes128ctr_core2_afternm:
107mov %rsp,%r11
108and $31,%r11
109add $160,%r11
110sub %r11,%rsp
111
112# qhasm: xmm0 = *(int128 *) (np + 0)
113# asm 1: movdqa 0(<np=int64#3),>xmm0=int6464#1
114# asm 2: movdqa 0(<np=%rdx),>xmm0=%xmm0
115movdqa 0(%rdx),%xmm0
116
117# qhasm: nonce_stack = xmm0
118# asm 1: movdqa <xmm0=int6464#1,>nonce_stack=stack128#1
119# asm 2: movdqa <xmm0=%xmm0,>nonce_stack=0(%rsp)
120movdqa %xmm0,0(%rsp)
121
122# qhasm: np = &nonce_stack
123# asm 1: leaq <nonce_stack=stack128#1,>np=int64#3
124# asm 2: leaq <nonce_stack=0(%rsp),>np=%rdx
125leaq 0(%rsp),%rdx
126
127# qhasm: enc_block:
128._enc_block:
129
130# qhasm: xmm0 = *(int128 *) (np + 0)
131# asm 1: movdqa 0(<np=int64#3),>xmm0=int6464#1
132# asm 2: movdqa 0(<np=%rdx),>xmm0=%xmm0
133movdqa 0(%rdx),%xmm0
134
135# qhasm: xmm1 = xmm0
136# asm 1: movdqa <xmm0=int6464#1,>xmm1=int6464#2
137# asm 2: movdqa <xmm0=%xmm0,>xmm1=%xmm1
138movdqa %xmm0,%xmm1
139
140# qhasm: shuffle bytes of xmm1 by SWAP32
141# asm 1: pshufb SWAP32,<xmm1=int6464#2
142# asm 2: pshufb SWAP32,<xmm1=%xmm1
143pshufb SWAP32,%xmm1
144
145# qhasm: xmm2 = xmm1
146# asm 1: movdqa <xmm1=int6464#2,>xmm2=int6464#3
147# asm 2: movdqa <xmm1=%xmm1,>xmm2=%xmm2
148movdqa %xmm1,%xmm2
149
150# qhasm: xmm3 = xmm1
151# asm 1: movdqa <xmm1=int6464#2,>xmm3=int6464#4
152# asm 2: movdqa <xmm1=%xmm1,>xmm3=%xmm3
153movdqa %xmm1,%xmm3
154
155# qhasm: xmm4 = xmm1
156# asm 1: movdqa <xmm1=int6464#2,>xmm4=int6464#5
157# asm 2: movdqa <xmm1=%xmm1,>xmm4=%xmm4
158movdqa %xmm1,%xmm4
159
160# qhasm: xmm5 = xmm1
161# asm 1: movdqa <xmm1=int6464#2,>xmm5=int6464#6
162# asm 2: movdqa <xmm1=%xmm1,>xmm5=%xmm5
163movdqa %xmm1,%xmm5
164
165# qhasm: xmm6 = xmm1
166# asm 1: movdqa <xmm1=int6464#2,>xmm6=int6464#7
167# asm 2: movdqa <xmm1=%xmm1,>xmm6=%xmm6
168movdqa %xmm1,%xmm6
169
170# qhasm: xmm7 = xmm1
171# asm 1: movdqa <xmm1=int6464#2,>xmm7=int6464#8
172# asm 2: movdqa <xmm1=%xmm1,>xmm7=%xmm7
173movdqa %xmm1,%xmm7
174
175# qhasm: int32323232 xmm1 += RCTRINC1
176# asm 1: paddd RCTRINC1,<xmm1=int6464#2
177# asm 2: paddd RCTRINC1,<xmm1=%xmm1
178paddd RCTRINC1,%xmm1
179
180# qhasm: int32323232 xmm2 += RCTRINC2
181# asm 1: paddd RCTRINC2,<xmm2=int6464#3
182# asm 2: paddd RCTRINC2,<xmm2=%xmm2
183paddd RCTRINC2,%xmm2
184
185# qhasm: int32323232 xmm3 += RCTRINC3
186# asm 1: paddd RCTRINC3,<xmm3=int6464#4
187# asm 2: paddd RCTRINC3,<xmm3=%xmm3
188paddd RCTRINC3,%xmm3
189
190# qhasm: int32323232 xmm4 += RCTRINC4
191# asm 1: paddd RCTRINC4,<xmm4=int6464#5
192# asm 2: paddd RCTRINC4,<xmm4=%xmm4
193paddd RCTRINC4,%xmm4
194
195# qhasm: int32323232 xmm5 += RCTRINC5
196# asm 1: paddd RCTRINC5,<xmm5=int6464#6
197# asm 2: paddd RCTRINC5,<xmm5=%xmm5
198paddd RCTRINC5,%xmm5
199
200# qhasm: int32323232 xmm6 += RCTRINC6
201# asm 1: paddd RCTRINC6,<xmm6=int6464#7
202# asm 2: paddd RCTRINC6,<xmm6=%xmm6
203paddd RCTRINC6,%xmm6
204
205# qhasm: int32323232 xmm7 += RCTRINC7
206# asm 1: paddd RCTRINC7,<xmm7=int6464#8
207# asm 2: paddd RCTRINC7,<xmm7=%xmm7
208paddd RCTRINC7,%xmm7
209
210# qhasm: shuffle bytes of xmm0 by M0
211# asm 1: pshufb M0,<xmm0=int6464#1
212# asm 2: pshufb M0,<xmm0=%xmm0
213pshufb M0,%xmm0
214
215# qhasm: shuffle bytes of xmm1 by M0SWAP
216# asm 1: pshufb M0SWAP,<xmm1=int6464#2
217# asm 2: pshufb M0SWAP,<xmm1=%xmm1
218pshufb M0SWAP,%xmm1
219
220# qhasm: shuffle bytes of xmm2 by M0SWAP
221# asm 1: pshufb M0SWAP,<xmm2=int6464#3
222# asm 2: pshufb M0SWAP,<xmm2=%xmm2
223pshufb M0SWAP,%xmm2
224
225# qhasm: shuffle bytes of xmm3 by M0SWAP
226# asm 1: pshufb M0SWAP,<xmm3=int6464#4
227# asm 2: pshufb M0SWAP,<xmm3=%xmm3
228pshufb M0SWAP,%xmm3
229
230# qhasm: shuffle bytes of xmm4 by M0SWAP
231# asm 1: pshufb M0SWAP,<xmm4=int6464#5
232# asm 2: pshufb M0SWAP,<xmm4=%xmm4
233pshufb M0SWAP,%xmm4
234
235# qhasm: shuffle bytes of xmm5 by M0SWAP
236# asm 1: pshufb M0SWAP,<xmm5=int6464#6
237# asm 2: pshufb M0SWAP,<xmm5=%xmm5
238pshufb M0SWAP,%xmm5
239
240# qhasm: shuffle bytes of xmm6 by M0SWAP
241# asm 1: pshufb M0SWAP,<xmm6=int6464#7
242# asm 2: pshufb M0SWAP,<xmm6=%xmm6
243pshufb M0SWAP,%xmm6
244
245# qhasm: shuffle bytes of xmm7 by M0SWAP
246# asm 1: pshufb M0SWAP,<xmm7=int6464#8
247# asm 2: pshufb M0SWAP,<xmm7=%xmm7
248pshufb M0SWAP,%xmm7
249
250# qhasm: xmm8 = xmm6
251# asm 1: movdqa <xmm6=int6464#7,>xmm8=int6464#9
252# asm 2: movdqa <xmm6=%xmm6,>xmm8=%xmm8
253movdqa %xmm6,%xmm8
254
255# qhasm: uint6464 xmm8 >>= 1
256# asm 1: psrlq $1,<xmm8=int6464#9
257# asm 2: psrlq $1,<xmm8=%xmm8
258psrlq $1,%xmm8
259
260# qhasm: xmm8 ^= xmm7
261# asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9
262# asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8
263pxor %xmm7,%xmm8
264
265# qhasm: xmm8 &= BS0
266# asm 1: pand BS0,<xmm8=int6464#9
267# asm 2: pand BS0,<xmm8=%xmm8
268pand BS0,%xmm8
269
270# qhasm: xmm7 ^= xmm8
271# asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8
272# asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7
273pxor %xmm8,%xmm7
274
275# qhasm: uint6464 xmm8 <<= 1
276# asm 1: psllq $1,<xmm8=int6464#9
277# asm 2: psllq $1,<xmm8=%xmm8
278psllq $1,%xmm8
279
280# qhasm: xmm6 ^= xmm8
281# asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7
282# asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6
283pxor %xmm8,%xmm6
284
285# qhasm: xmm8 = xmm4
286# asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9
287# asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8
288movdqa %xmm4,%xmm8
289
290# qhasm: uint6464 xmm8 >>= 1
291# asm 1: psrlq $1,<xmm8=int6464#9
292# asm 2: psrlq $1,<xmm8=%xmm8
293psrlq $1,%xmm8
294
295# qhasm: xmm8 ^= xmm5
296# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
297# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
298pxor %xmm5,%xmm8
299
300# qhasm: xmm8 &= BS0
301# asm 1: pand BS0,<xmm8=int6464#9
302# asm 2: pand BS0,<xmm8=%xmm8
303pand BS0,%xmm8
304
305# qhasm: xmm5 ^= xmm8
306# asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6
307# asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5
308pxor %xmm8,%xmm5
309
310# qhasm: uint6464 xmm8 <<= 1
311# asm 1: psllq $1,<xmm8=int6464#9
312# asm 2: psllq $1,<xmm8=%xmm8
313psllq $1,%xmm8
314
315# qhasm: xmm4 ^= xmm8
316# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
317# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
318pxor %xmm8,%xmm4
319
320# qhasm: xmm8 = xmm2
321# asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9
322# asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8
323movdqa %xmm2,%xmm8
324
325# qhasm: uint6464 xmm8 >>= 1
326# asm 1: psrlq $1,<xmm8=int6464#9
327# asm 2: psrlq $1,<xmm8=%xmm8
328psrlq $1,%xmm8
329
330# qhasm: xmm8 ^= xmm3
331# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#9
332# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm8
333pxor %xmm3,%xmm8
334
335# qhasm: xmm8 &= BS0
336# asm 1: pand BS0,<xmm8=int6464#9
337# asm 2: pand BS0,<xmm8=%xmm8
338pand BS0,%xmm8
339
340# qhasm: xmm3 ^= xmm8
341# asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4
342# asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3
343pxor %xmm8,%xmm3
344
345# qhasm: uint6464 xmm8 <<= 1
346# asm 1: psllq $1,<xmm8=int6464#9
347# asm 2: psllq $1,<xmm8=%xmm8
348psllq $1,%xmm8
349
350# qhasm: xmm2 ^= xmm8
351# asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3
352# asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2
353pxor %xmm8,%xmm2
354
355# qhasm: xmm8 = xmm0
356# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
357# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
358movdqa %xmm0,%xmm8
359
360# qhasm: uint6464 xmm8 >>= 1
361# asm 1: psrlq $1,<xmm8=int6464#9
362# asm 2: psrlq $1,<xmm8=%xmm8
363psrlq $1,%xmm8
364
365# qhasm: xmm8 ^= xmm1
366# asm 1: pxor <xmm1=int6464#2,<xmm8=int6464#9
367# asm 2: pxor <xmm1=%xmm1,<xmm8=%xmm8
368pxor %xmm1,%xmm8
369
370# qhasm: xmm8 &= BS0
371# asm 1: pand BS0,<xmm8=int6464#9
372# asm 2: pand BS0,<xmm8=%xmm8
373pand BS0,%xmm8
374
375# qhasm: xmm1 ^= xmm8
376# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
377# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
378pxor %xmm8,%xmm1
379
380# qhasm: uint6464 xmm8 <<= 1
381# asm 1: psllq $1,<xmm8=int6464#9
382# asm 2: psllq $1,<xmm8=%xmm8
383psllq $1,%xmm8
384
385# qhasm: xmm0 ^= xmm8
386# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
387# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
388pxor %xmm8,%xmm0
389
390# qhasm: xmm8 = xmm5
391# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#9
392# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm8
393movdqa %xmm5,%xmm8
394
395# qhasm: uint6464 xmm8 >>= 2
396# asm 1: psrlq $2,<xmm8=int6464#9
397# asm 2: psrlq $2,<xmm8=%xmm8
398psrlq $2,%xmm8
399
400# qhasm: xmm8 ^= xmm7
401# asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9
402# asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8
403pxor %xmm7,%xmm8
404
405# qhasm: xmm8 &= BS1
406# asm 1: pand BS1,<xmm8=int6464#9
407# asm 2: pand BS1,<xmm8=%xmm8
408pand BS1,%xmm8
409
410# qhasm: xmm7 ^= xmm8
411# asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8
412# asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7
413pxor %xmm8,%xmm7
414
415# qhasm: uint6464 xmm8 <<= 2
416# asm 1: psllq $2,<xmm8=int6464#9
417# asm 2: psllq $2,<xmm8=%xmm8
418psllq $2,%xmm8
419
420# qhasm: xmm5 ^= xmm8
421# asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6
422# asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5
423pxor %xmm8,%xmm5
424
425# qhasm: xmm8 = xmm4
426# asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9
427# asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8
428movdqa %xmm4,%xmm8
429
430# qhasm: uint6464 xmm8 >>= 2
431# asm 1: psrlq $2,<xmm8=int6464#9
432# asm 2: psrlq $2,<xmm8=%xmm8
433psrlq $2,%xmm8
434
435# qhasm: xmm8 ^= xmm6
436# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#9
437# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm8
438pxor %xmm6,%xmm8
439
440# qhasm: xmm8 &= BS1
441# asm 1: pand BS1,<xmm8=int6464#9
442# asm 2: pand BS1,<xmm8=%xmm8
443pand BS1,%xmm8
444
445# qhasm: xmm6 ^= xmm8
446# asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7
447# asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6
448pxor %xmm8,%xmm6
449
450# qhasm: uint6464 xmm8 <<= 2
451# asm 1: psllq $2,<xmm8=int6464#9
452# asm 2: psllq $2,<xmm8=%xmm8
453psllq $2,%xmm8
454
455# qhasm: xmm4 ^= xmm8
456# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
457# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
458pxor %xmm8,%xmm4
459
460# qhasm: xmm8 = xmm1
461# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9
462# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8
463movdqa %xmm1,%xmm8
464
465# qhasm: uint6464 xmm8 >>= 2
466# asm 1: psrlq $2,<xmm8=int6464#9
467# asm 2: psrlq $2,<xmm8=%xmm8
468psrlq $2,%xmm8
469
470# qhasm: xmm8 ^= xmm3
471# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#9
472# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm8
473pxor %xmm3,%xmm8
474
475# qhasm: xmm8 &= BS1
476# asm 1: pand BS1,<xmm8=int6464#9
477# asm 2: pand BS1,<xmm8=%xmm8
478pand BS1,%xmm8
479
480# qhasm: xmm3 ^= xmm8
481# asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4
482# asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3
483pxor %xmm8,%xmm3
484
485# qhasm: uint6464 xmm8 <<= 2
486# asm 1: psllq $2,<xmm8=int6464#9
487# asm 2: psllq $2,<xmm8=%xmm8
488psllq $2,%xmm8
489
490# qhasm: xmm1 ^= xmm8
491# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
492# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
493pxor %xmm8,%xmm1
494
495# qhasm: xmm8 = xmm0
496# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
497# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
498movdqa %xmm0,%xmm8
499
500# qhasm: uint6464 xmm8 >>= 2
501# asm 1: psrlq $2,<xmm8=int6464#9
502# asm 2: psrlq $2,<xmm8=%xmm8
503psrlq $2,%xmm8
504
505# qhasm: xmm8 ^= xmm2
506# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#9
507# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm8
508pxor %xmm2,%xmm8
509
510# qhasm: xmm8 &= BS1
511# asm 1: pand BS1,<xmm8=int6464#9
512# asm 2: pand BS1,<xmm8=%xmm8
513pand BS1,%xmm8
514
515# qhasm: xmm2 ^= xmm8
516# asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3
517# asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2
518pxor %xmm8,%xmm2
519
520# qhasm: uint6464 xmm8 <<= 2
521# asm 1: psllq $2,<xmm8=int6464#9
522# asm 2: psllq $2,<xmm8=%xmm8
523psllq $2,%xmm8
524
525# qhasm: xmm0 ^= xmm8
526# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
527# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
528pxor %xmm8,%xmm0
529
530# qhasm: xmm8 = xmm3
531# asm 1: movdqa <xmm3=int6464#4,>xmm8=int6464#9
532# asm 2: movdqa <xmm3=%xmm3,>xmm8=%xmm8
533movdqa %xmm3,%xmm8
534
535# qhasm: uint6464 xmm8 >>= 4
536# asm 1: psrlq $4,<xmm8=int6464#9
537# asm 2: psrlq $4,<xmm8=%xmm8
538psrlq $4,%xmm8
539
540# qhasm: xmm8 ^= xmm7
541# asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9
542# asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8
543pxor %xmm7,%xmm8
544
545# qhasm: xmm8 &= BS2
546# asm 1: pand BS2,<xmm8=int6464#9
547# asm 2: pand BS2,<xmm8=%xmm8
548pand BS2,%xmm8
549
550# qhasm: xmm7 ^= xmm8
551# asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8
552# asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7
553pxor %xmm8,%xmm7
554
555# qhasm: uint6464 xmm8 <<= 4
556# asm 1: psllq $4,<xmm8=int6464#9
557# asm 2: psllq $4,<xmm8=%xmm8
558psllq $4,%xmm8
559
560# qhasm: xmm3 ^= xmm8
561# asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4
562# asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3
563pxor %xmm8,%xmm3
564
565# qhasm: xmm8 = xmm2
566# asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9
567# asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8
568movdqa %xmm2,%xmm8
569
570# qhasm: uint6464 xmm8 >>= 4
571# asm 1: psrlq $4,<xmm8=int6464#9
572# asm 2: psrlq $4,<xmm8=%xmm8
573psrlq $4,%xmm8
574
575# qhasm: xmm8 ^= xmm6
576# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#9
577# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm8
578pxor %xmm6,%xmm8
579
580# qhasm: xmm8 &= BS2
581# asm 1: pand BS2,<xmm8=int6464#9
582# asm 2: pand BS2,<xmm8=%xmm8
583pand BS2,%xmm8
584
585# qhasm: xmm6 ^= xmm8
586# asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7
587# asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6
588pxor %xmm8,%xmm6
589
590# qhasm: uint6464 xmm8 <<= 4
591# asm 1: psllq $4,<xmm8=int6464#9
592# asm 2: psllq $4,<xmm8=%xmm8
593psllq $4,%xmm8
594
595# qhasm: xmm2 ^= xmm8
596# asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3
597# asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2
598pxor %xmm8,%xmm2
599
600# qhasm: xmm8 = xmm1
601# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9
602# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8
603movdqa %xmm1,%xmm8
604
605# qhasm: uint6464 xmm8 >>= 4
606# asm 1: psrlq $4,<xmm8=int6464#9
607# asm 2: psrlq $4,<xmm8=%xmm8
608psrlq $4,%xmm8
609
610# qhasm: xmm8 ^= xmm5
611# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
612# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
613pxor %xmm5,%xmm8
614
615# qhasm: xmm8 &= BS2
616# asm 1: pand BS2,<xmm8=int6464#9
617# asm 2: pand BS2,<xmm8=%xmm8
618pand BS2,%xmm8
619
620# qhasm: xmm5 ^= xmm8
621# asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6
622# asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5
623pxor %xmm8,%xmm5
624
625# qhasm: uint6464 xmm8 <<= 4
626# asm 1: psllq $4,<xmm8=int6464#9
627# asm 2: psllq $4,<xmm8=%xmm8
628psllq $4,%xmm8
629
630# qhasm: xmm1 ^= xmm8
631# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
632# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
633pxor %xmm8,%xmm1
634
635# qhasm: xmm8 = xmm0
636# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
637# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
638movdqa %xmm0,%xmm8
639
640# qhasm: uint6464 xmm8 >>= 4
641# asm 1: psrlq $4,<xmm8=int6464#9
642# asm 2: psrlq $4,<xmm8=%xmm8
643psrlq $4,%xmm8
644
645# qhasm: xmm8 ^= xmm4
646# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#9
647# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm8
648pxor %xmm4,%xmm8
649
650# qhasm: xmm8 &= BS2
651# asm 1: pand BS2,<xmm8=int6464#9
652# asm 2: pand BS2,<xmm8=%xmm8
653pand BS2,%xmm8
654
655# qhasm: xmm4 ^= xmm8
656# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
657# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
658pxor %xmm8,%xmm4
659
660# qhasm: uint6464 xmm8 <<= 4
661# asm 1: psllq $4,<xmm8=int6464#9
662# asm 2: psllq $4,<xmm8=%xmm8
663psllq $4,%xmm8
664
665# qhasm: xmm0 ^= xmm8
666# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
667# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
668pxor %xmm8,%xmm0
669
670# qhasm: xmm0 ^= *(int128 *)(c + 0)
671# asm 1: pxor 0(<c=int64#4),<xmm0=int6464#1
672# asm 2: pxor 0(<c=%rcx),<xmm0=%xmm0
673pxor 0(%rcx),%xmm0
674
675# qhasm: shuffle bytes of xmm0 by SR
676# asm 1: pshufb SR,<xmm0=int6464#1
677# asm 2: pshufb SR,<xmm0=%xmm0
678pshufb SR,%xmm0
679
680# qhasm: xmm1 ^= *(int128 *)(c + 16)
681# asm 1: pxor 16(<c=int64#4),<xmm1=int6464#2
682# asm 2: pxor 16(<c=%rcx),<xmm1=%xmm1
683pxor 16(%rcx),%xmm1
684
685# qhasm: shuffle bytes of xmm1 by SR
686# asm 1: pshufb SR,<xmm1=int6464#2
687# asm 2: pshufb SR,<xmm1=%xmm1
688pshufb SR,%xmm1
689
690# qhasm: xmm2 ^= *(int128 *)(c + 32)
691# asm 1: pxor 32(<c=int64#4),<xmm2=int6464#3
692# asm 2: pxor 32(<c=%rcx),<xmm2=%xmm2
693pxor 32(%rcx),%xmm2
694
695# qhasm: shuffle bytes of xmm2 by SR
696# asm 1: pshufb SR,<xmm2=int6464#3
697# asm 2: pshufb SR,<xmm2=%xmm2
698pshufb SR,%xmm2
699
700# qhasm: xmm3 ^= *(int128 *)(c + 48)
701# asm 1: pxor 48(<c=int64#4),<xmm3=int6464#4
702# asm 2: pxor 48(<c=%rcx),<xmm3=%xmm3
703pxor 48(%rcx),%xmm3
704
705# qhasm: shuffle bytes of xmm3 by SR
706# asm 1: pshufb SR,<xmm3=int6464#4
707# asm 2: pshufb SR,<xmm3=%xmm3
708pshufb SR,%xmm3
709
710# qhasm: xmm4 ^= *(int128 *)(c + 64)
711# asm 1: pxor 64(<c=int64#4),<xmm4=int6464#5
712# asm 2: pxor 64(<c=%rcx),<xmm4=%xmm4
713pxor 64(%rcx),%xmm4
714
715# qhasm: shuffle bytes of xmm4 by SR
716# asm 1: pshufb SR,<xmm4=int6464#5
717# asm 2: pshufb SR,<xmm4=%xmm4
718pshufb SR,%xmm4
719
720# qhasm: xmm5 ^= *(int128 *)(c + 80)
721# asm 1: pxor 80(<c=int64#4),<xmm5=int6464#6
722# asm 2: pxor 80(<c=%rcx),<xmm5=%xmm5
723pxor 80(%rcx),%xmm5
724
725# qhasm: shuffle bytes of xmm5 by SR
726# asm 1: pshufb SR,<xmm5=int6464#6
727# asm 2: pshufb SR,<xmm5=%xmm5
728pshufb SR,%xmm5
729
730# qhasm: xmm6 ^= *(int128 *)(c + 96)
731# asm 1: pxor 96(<c=int64#4),<xmm6=int6464#7
732# asm 2: pxor 96(<c=%rcx),<xmm6=%xmm6
733pxor 96(%rcx),%xmm6
734
735# qhasm: shuffle bytes of xmm6 by SR
736# asm 1: pshufb SR,<xmm6=int6464#7
737# asm 2: pshufb SR,<xmm6=%xmm6
738pshufb SR,%xmm6
739
740# qhasm: xmm7 ^= *(int128 *)(c + 112)
741# asm 1: pxor 112(<c=int64#4),<xmm7=int6464#8
742# asm 2: pxor 112(<c=%rcx),<xmm7=%xmm7
743pxor 112(%rcx),%xmm7
744
745# qhasm: shuffle bytes of xmm7 by SR
746# asm 1: pshufb SR,<xmm7=int6464#8
747# asm 2: pshufb SR,<xmm7=%xmm7
748pshufb SR,%xmm7
749
750# qhasm: xmm5 ^= xmm6
751# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
752# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
753pxor %xmm6,%xmm5
754
755# qhasm: xmm2 ^= xmm1
756# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
757# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
758pxor %xmm1,%xmm2
759
760# qhasm: xmm5 ^= xmm0
761# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
762# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
763pxor %xmm0,%xmm5
764
765# qhasm: xmm6 ^= xmm2
766# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
767# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
768pxor %xmm2,%xmm6
769
770# qhasm: xmm3 ^= xmm0
771# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
772# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
773pxor %xmm0,%xmm3
774
775# qhasm: xmm6 ^= xmm3
776# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
777# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
778pxor %xmm3,%xmm6
779
780# qhasm: xmm3 ^= xmm7
781# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
782# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
783pxor %xmm7,%xmm3
784
785# qhasm: xmm3 ^= xmm4
786# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
787# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
788pxor %xmm4,%xmm3
789
790# qhasm: xmm7 ^= xmm5
791# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
792# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
793pxor %xmm5,%xmm7
794
795# qhasm: xmm3 ^= xmm1
796# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
797# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
798pxor %xmm1,%xmm3
799
800# qhasm: xmm4 ^= xmm5
801# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
802# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
803pxor %xmm5,%xmm4
804
805# qhasm: xmm2 ^= xmm7
806# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
807# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
808pxor %xmm7,%xmm2
809
810# qhasm: xmm1 ^= xmm5
811# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
812# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
813pxor %xmm5,%xmm1
814
815# qhasm: xmm11 = xmm7
816# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
817# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
818movdqa %xmm7,%xmm8
819
820# qhasm: xmm10 = xmm1
821# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
822# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
823movdqa %xmm1,%xmm9
824
825# qhasm: xmm9 = xmm5
826# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
827# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
828movdqa %xmm5,%xmm10
829
830# qhasm: xmm13 = xmm2
831# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
832# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
833movdqa %xmm2,%xmm11
834
835# qhasm: xmm12 = xmm6
836# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
837# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
838movdqa %xmm6,%xmm12
839
840# qhasm: xmm11 ^= xmm4
841# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
842# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
843pxor %xmm4,%xmm8
844
845# qhasm: xmm10 ^= xmm2
846# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
847# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
848pxor %xmm2,%xmm9
849
850# qhasm: xmm9 ^= xmm3
851# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
852# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
853pxor %xmm3,%xmm10
854
855# qhasm: xmm13 ^= xmm4
856# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
857# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
858pxor %xmm4,%xmm11
859
860# qhasm: xmm12 ^= xmm0
861# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
862# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
863pxor %xmm0,%xmm12
864
865# qhasm: xmm14 = xmm11
866# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
867# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
868movdqa %xmm8,%xmm13
869
870# qhasm: xmm8 = xmm10
871# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
872# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
873movdqa %xmm9,%xmm14
874
875# qhasm: xmm15 = xmm11
876# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
877# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
878movdqa %xmm8,%xmm15
879
880# qhasm: xmm10 |= xmm9
881# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
882# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
883por %xmm10,%xmm9
884
885# qhasm: xmm11 |= xmm12
886# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
887# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
888por %xmm12,%xmm8
889
890# qhasm: xmm15 ^= xmm8
891# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
892# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
893pxor %xmm14,%xmm15
894
895# qhasm: xmm14 &= xmm12
896# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
897# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
898pand %xmm12,%xmm13
899
900# qhasm: xmm8 &= xmm9
901# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
902# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
903pand %xmm10,%xmm14
904
905# qhasm: xmm12 ^= xmm9
906# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
907# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
908pxor %xmm10,%xmm12
909
910# qhasm: xmm15 &= xmm12
911# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
912# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
913pand %xmm12,%xmm15
914
915# qhasm: xmm12 = xmm3
916# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
917# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
918movdqa %xmm3,%xmm10
919
920# qhasm: xmm12 ^= xmm0
921# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
922# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
923pxor %xmm0,%xmm10
924
925# qhasm: xmm13 &= xmm12
926# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
927# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
928pand %xmm10,%xmm11
929
930# qhasm: xmm11 ^= xmm13
931# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
932# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
933pxor %xmm11,%xmm8
934
935# qhasm: xmm10 ^= xmm13
936# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
937# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
938pxor %xmm11,%xmm9
939
940# qhasm: xmm13 = xmm7
941# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
942# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
943movdqa %xmm7,%xmm10
944
945# qhasm: xmm13 ^= xmm1
946# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
947# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
948pxor %xmm1,%xmm10
949
950# qhasm: xmm12 = xmm5
951# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
952# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
953movdqa %xmm5,%xmm11
954
955# qhasm: xmm9 = xmm13
956# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
957# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
958movdqa %xmm10,%xmm12
959
960# qhasm: xmm12 ^= xmm6
961# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
962# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
963pxor %xmm6,%xmm11
964
965# qhasm: xmm9 |= xmm12
966# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
967# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
968por %xmm11,%xmm12
969
970# qhasm: xmm13 &= xmm12
971# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
972# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
973pand %xmm11,%xmm10
974
975# qhasm: xmm8 ^= xmm13
976# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
977# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
978pxor %xmm10,%xmm14
979
980# qhasm: xmm11 ^= xmm15
981# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
982# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
983pxor %xmm15,%xmm8
984
985# qhasm: xmm10 ^= xmm14
986# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
987# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
988pxor %xmm13,%xmm9
989
990# qhasm: xmm9 ^= xmm15
991# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
992# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
993pxor %xmm15,%xmm12
994
995# qhasm: xmm8 ^= xmm14
996# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
997# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
998pxor %xmm13,%xmm14
999
1000# qhasm: xmm9 ^= xmm14
1001# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
1002# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
1003pxor %xmm13,%xmm12
1004
1005# qhasm: xmm12 = xmm2
1006# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
1007# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
1008movdqa %xmm2,%xmm10
1009
1010# qhasm: xmm13 = xmm4
1011# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
1012# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
1013movdqa %xmm4,%xmm11
1014
1015# qhasm: xmm14 = xmm1
1016# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
1017# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
1018movdqa %xmm1,%xmm13
1019
1020# qhasm: xmm15 = xmm7
1021# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
1022# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
1023movdqa %xmm7,%xmm15
1024
1025# qhasm: xmm12 &= xmm3
1026# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
1027# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
1028pand %xmm3,%xmm10
1029
1030# qhasm: xmm13 &= xmm0
1031# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
1032# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
1033pand %xmm0,%xmm11
1034
1035# qhasm: xmm14 &= xmm5
1036# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
1037# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
1038pand %xmm5,%xmm13
1039
1040# qhasm: xmm15 |= xmm6
1041# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
1042# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
1043por %xmm6,%xmm15
1044
1045# qhasm: xmm11 ^= xmm12
1046# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
1047# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
1048pxor %xmm10,%xmm8
1049
1050# qhasm: xmm10 ^= xmm13
1051# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
1052# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
1053pxor %xmm11,%xmm9
1054
1055# qhasm: xmm9 ^= xmm14
1056# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
1057# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
1058pxor %xmm13,%xmm12
1059
1060# qhasm: xmm8 ^= xmm15
1061# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
1062# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
1063pxor %xmm15,%xmm14
1064
1065# qhasm: xmm12 = xmm11
1066# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
1067# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
1068movdqa %xmm8,%xmm10
1069
1070# qhasm: xmm12 ^= xmm10
1071# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
1072# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
1073pxor %xmm9,%xmm10
1074
1075# qhasm: xmm11 &= xmm9
1076# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
1077# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
1078pand %xmm12,%xmm8
1079
1080# qhasm: xmm14 = xmm8
1081# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
1082# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
1083movdqa %xmm14,%xmm11
1084
1085# qhasm: xmm14 ^= xmm11
1086# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
1087# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
1088pxor %xmm8,%xmm11
1089
1090# qhasm: xmm15 = xmm12
1091# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
1092# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
1093movdqa %xmm10,%xmm13
1094
1095# qhasm: xmm15 &= xmm14
1096# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
1097# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
1098pand %xmm11,%xmm13
1099
1100# qhasm: xmm15 ^= xmm10
1101# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
1102# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
1103pxor %xmm9,%xmm13
1104
1105# qhasm: xmm13 = xmm9
1106# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
1107# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
1108movdqa %xmm12,%xmm15
1109
1110# qhasm: xmm13 ^= xmm8
1111# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
1112# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
1113pxor %xmm14,%xmm15
1114
1115# qhasm: xmm11 ^= xmm10
1116# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
1117# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
1118pxor %xmm9,%xmm8
1119
1120# qhasm: xmm13 &= xmm11
1121# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
1122# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
1123pand %xmm8,%xmm15
1124
1125# qhasm: xmm13 ^= xmm8
1126# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
1127# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
1128pxor %xmm14,%xmm15
1129
1130# qhasm: xmm9 ^= xmm13
1131# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
1132# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
1133pxor %xmm15,%xmm12
1134
1135# qhasm: xmm10 = xmm14
1136# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
1137# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
1138movdqa %xmm11,%xmm8
1139
1140# qhasm: xmm10 ^= xmm13
1141# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
1142# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
1143pxor %xmm15,%xmm8
1144
1145# qhasm: xmm10 &= xmm8
1146# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
1147# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
1148pand %xmm14,%xmm8
1149
1150# qhasm: xmm9 ^= xmm10
1151# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
1152# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
1153pxor %xmm8,%xmm12
1154
1155# qhasm: xmm14 ^= xmm10
1156# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
1157# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
1158pxor %xmm8,%xmm11
1159
1160# qhasm: xmm14 &= xmm15
1161# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
1162# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
1163pand %xmm13,%xmm11
1164
1165# qhasm: xmm14 ^= xmm12
1166# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
1167# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
1168pxor %xmm10,%xmm11
1169
1170# qhasm: xmm12 = xmm6
1171# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
1172# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
1173movdqa %xmm6,%xmm8
1174
1175# qhasm: xmm8 = xmm5
1176# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
1177# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
1178movdqa %xmm5,%xmm9
1179
1180# qhasm: xmm10 = xmm15
1181# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
1182# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
1183movdqa %xmm13,%xmm10
1184
1185# qhasm: xmm10 ^= xmm14
1186# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
1187# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
1188pxor %xmm11,%xmm10
1189
1190# qhasm: xmm10 &= xmm6
1191# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
1192# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
1193pand %xmm6,%xmm10
1194
1195# qhasm: xmm6 ^= xmm5
1196# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
1197# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
1198pxor %xmm5,%xmm6
1199
1200# qhasm: xmm6 &= xmm14
1201# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
1202# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
1203pand %xmm11,%xmm6
1204
1205# qhasm: xmm5 &= xmm15
1206# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
1207# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
1208pand %xmm13,%xmm5
1209
1210# qhasm: xmm6 ^= xmm5
1211# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
1212# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
1213pxor %xmm5,%xmm6
1214
1215# qhasm: xmm5 ^= xmm10
1216# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
1217# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
1218pxor %xmm10,%xmm5
1219
1220# qhasm: xmm12 ^= xmm0
1221# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
1222# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
1223pxor %xmm0,%xmm8
1224
1225# qhasm: xmm8 ^= xmm3
1226# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
1227# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
1228pxor %xmm3,%xmm9
1229
1230# qhasm: xmm15 ^= xmm13
1231# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
1232# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
1233pxor %xmm15,%xmm13
1234
1235# qhasm: xmm14 ^= xmm9
1236# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
1237# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
1238pxor %xmm12,%xmm11
1239
1240# qhasm: xmm11 = xmm15
1241# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1242# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1243movdqa %xmm13,%xmm10
1244
1245# qhasm: xmm11 ^= xmm14
1246# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1247# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1248pxor %xmm11,%xmm10
1249
1250# qhasm: xmm11 &= xmm12
1251# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
1252# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
1253pand %xmm8,%xmm10
1254
1255# qhasm: xmm12 ^= xmm8
1256# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
1257# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
1258pxor %xmm9,%xmm8
1259
1260# qhasm: xmm12 &= xmm14
1261# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
1262# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
1263pand %xmm11,%xmm8
1264
1265# qhasm: xmm8 &= xmm15
1266# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
1267# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
1268pand %xmm13,%xmm9
1269
1270# qhasm: xmm8 ^= xmm12
1271# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
1272# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
1273pxor %xmm8,%xmm9
1274
1275# qhasm: xmm12 ^= xmm11
1276# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
1277# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
1278pxor %xmm10,%xmm8
1279
1280# qhasm: xmm10 = xmm13
1281# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1282# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1283movdqa %xmm15,%xmm10
1284
1285# qhasm: xmm10 ^= xmm9
1286# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
1287# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
1288pxor %xmm12,%xmm10
1289
1290# qhasm: xmm10 &= xmm0
1291# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
1292# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
1293pand %xmm0,%xmm10
1294
1295# qhasm: xmm0 ^= xmm3
1296# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
1297# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
1298pxor %xmm3,%xmm0
1299
1300# qhasm: xmm0 &= xmm9
1301# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
1302# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
1303pand %xmm12,%xmm0
1304
1305# qhasm: xmm3 &= xmm13
1306# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
1307# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
1308pand %xmm15,%xmm3
1309
1310# qhasm: xmm0 ^= xmm3
1311# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
1312# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
1313pxor %xmm3,%xmm0
1314
1315# qhasm: xmm3 ^= xmm10
1316# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
1317# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
1318pxor %xmm10,%xmm3
1319
1320# qhasm: xmm6 ^= xmm12
1321# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
1322# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
1323pxor %xmm8,%xmm6
1324
1325# qhasm: xmm0 ^= xmm12
1326# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
1327# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
1328pxor %xmm8,%xmm0
1329
1330# qhasm: xmm5 ^= xmm8
1331# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
1332# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
1333pxor %xmm9,%xmm5
1334
1335# qhasm: xmm3 ^= xmm8
1336# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
1337# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
1338pxor %xmm9,%xmm3
1339
1340# qhasm: xmm12 = xmm7
1341# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
1342# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
1343movdqa %xmm7,%xmm8
1344
1345# qhasm: xmm8 = xmm1
1346# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
1347# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
1348movdqa %xmm1,%xmm9
1349
1350# qhasm: xmm12 ^= xmm4
1351# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
1352# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
1353pxor %xmm4,%xmm8
1354
1355# qhasm: xmm8 ^= xmm2
1356# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
1357# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
1358pxor %xmm2,%xmm9
1359
1360# qhasm: xmm11 = xmm15
1361# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1362# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1363movdqa %xmm13,%xmm10
1364
1365# qhasm: xmm11 ^= xmm14
1366# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1367# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1368pxor %xmm11,%xmm10
1369
1370# qhasm: xmm11 &= xmm12
1371# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
1372# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
1373pand %xmm8,%xmm10
1374
1375# qhasm: xmm12 ^= xmm8
1376# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
1377# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
1378pxor %xmm9,%xmm8
1379
1380# qhasm: xmm12 &= xmm14
1381# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
1382# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
1383pand %xmm11,%xmm8
1384
1385# qhasm: xmm8 &= xmm15
1386# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
1387# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
1388pand %xmm13,%xmm9
1389
1390# qhasm: xmm8 ^= xmm12
1391# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
1392# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
1393pxor %xmm8,%xmm9
1394
1395# qhasm: xmm12 ^= xmm11
1396# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
1397# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
1398pxor %xmm10,%xmm8
1399
1400# qhasm: xmm10 = xmm13
1401# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1402# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1403movdqa %xmm15,%xmm10
1404
1405# qhasm: xmm10 ^= xmm9
1406# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
1407# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
1408pxor %xmm12,%xmm10
1409
1410# qhasm: xmm10 &= xmm4
1411# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
1412# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
1413pand %xmm4,%xmm10
1414
1415# qhasm: xmm4 ^= xmm2
1416# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
1417# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
1418pxor %xmm2,%xmm4
1419
1420# qhasm: xmm4 &= xmm9
1421# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
1422# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
1423pand %xmm12,%xmm4
1424
1425# qhasm: xmm2 &= xmm13
1426# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
1427# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
1428pand %xmm15,%xmm2
1429
1430# qhasm: xmm4 ^= xmm2
1431# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
1432# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
1433pxor %xmm2,%xmm4
1434
1435# qhasm: xmm2 ^= xmm10
1436# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
1437# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
1438pxor %xmm10,%xmm2
1439
1440# qhasm: xmm15 ^= xmm13
1441# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
1442# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
1443pxor %xmm15,%xmm13
1444
1445# qhasm: xmm14 ^= xmm9
1446# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
1447# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
1448pxor %xmm12,%xmm11
1449
1450# qhasm: xmm11 = xmm15
1451# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1452# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1453movdqa %xmm13,%xmm10
1454
1455# qhasm: xmm11 ^= xmm14
1456# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1457# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1458pxor %xmm11,%xmm10
1459
1460# qhasm: xmm11 &= xmm7
1461# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
1462# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
1463pand %xmm7,%xmm10
1464
1465# qhasm: xmm7 ^= xmm1
1466# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
1467# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
1468pxor %xmm1,%xmm7
1469
1470# qhasm: xmm7 &= xmm14
1471# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
1472# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
1473pand %xmm11,%xmm7
1474
1475# qhasm: xmm1 &= xmm15
1476# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
1477# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
1478pand %xmm13,%xmm1
1479
1480# qhasm: xmm7 ^= xmm1
1481# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
1482# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
1483pxor %xmm1,%xmm7
1484
1485# qhasm: xmm1 ^= xmm11
1486# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
1487# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
1488pxor %xmm10,%xmm1
1489
1490# qhasm: xmm7 ^= xmm12
1491# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
1492# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
1493pxor %xmm8,%xmm7
1494
1495# qhasm: xmm4 ^= xmm12
1496# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
1497# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
1498pxor %xmm8,%xmm4
1499
1500# qhasm: xmm1 ^= xmm8
1501# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
1502# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
1503pxor %xmm9,%xmm1
1504
1505# qhasm: xmm2 ^= xmm8
1506# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
1507# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
1508pxor %xmm9,%xmm2
1509
1510# qhasm: xmm7 ^= xmm0
1511# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
1512# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
1513pxor %xmm0,%xmm7
1514
1515# qhasm: xmm1 ^= xmm6
1516# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
1517# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
1518pxor %xmm6,%xmm1
1519
1520# qhasm: xmm4 ^= xmm7
1521# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
1522# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
1523pxor %xmm7,%xmm4
1524
1525# qhasm: xmm6 ^= xmm0
1526# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
1527# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
1528pxor %xmm0,%xmm6
1529
1530# qhasm: xmm0 ^= xmm1
1531# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
1532# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
1533pxor %xmm1,%xmm0
1534
1535# qhasm: xmm1 ^= xmm5
1536# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
1537# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
1538pxor %xmm5,%xmm1
1539
1540# qhasm: xmm5 ^= xmm2
1541# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
1542# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
1543pxor %xmm2,%xmm5
1544
1545# qhasm: xmm4 ^= xmm5
1546# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
1547# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
1548pxor %xmm5,%xmm4
1549
1550# qhasm: xmm2 ^= xmm3
1551# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
1552# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
1553pxor %xmm3,%xmm2
1554
1555# qhasm: xmm3 ^= xmm5
1556# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
1557# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
1558pxor %xmm5,%xmm3
1559
1560# qhasm: xmm6 ^= xmm3
1561# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
1562# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
1563pxor %xmm3,%xmm6
1564
1565# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
1566# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
1567# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
1568pshufd $0x93,%xmm0,%xmm8
1569
1570# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
1571# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
1572# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
1573pshufd $0x93,%xmm1,%xmm9
1574
1575# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
1576# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
1577# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
1578pshufd $0x93,%xmm4,%xmm10
1579
1580# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
1581# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
1582# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
1583pshufd $0x93,%xmm6,%xmm11
1584
1585# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
1586# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
1587# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
1588pshufd $0x93,%xmm3,%xmm12
1589
1590# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
1591# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
1592# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
1593pshufd $0x93,%xmm7,%xmm13
1594
1595# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
1596# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
1597# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
1598pshufd $0x93,%xmm2,%xmm14
1599
1600# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
1601# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
1602# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
1603pshufd $0x93,%xmm5,%xmm15
1604
1605# qhasm: xmm0 ^= xmm8
1606# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
1607# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
1608pxor %xmm8,%xmm0
1609
1610# qhasm: xmm1 ^= xmm9
1611# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
1612# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
1613pxor %xmm9,%xmm1
1614
1615# qhasm: xmm4 ^= xmm10
1616# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
1617# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
1618pxor %xmm10,%xmm4
1619
1620# qhasm: xmm6 ^= xmm11
1621# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
1622# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
1623pxor %xmm11,%xmm6
1624
1625# qhasm: xmm3 ^= xmm12
1626# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
1627# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
1628pxor %xmm12,%xmm3
1629
1630# qhasm: xmm7 ^= xmm13
1631# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
1632# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
1633pxor %xmm13,%xmm7
1634
1635# qhasm: xmm2 ^= xmm14
1636# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
1637# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
1638pxor %xmm14,%xmm2
1639
1640# qhasm: xmm5 ^= xmm15
1641# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
1642# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
1643pxor %xmm15,%xmm5
1644
1645# qhasm: xmm8 ^= xmm5
1646# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
1647# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
1648pxor %xmm5,%xmm8
1649
1650# qhasm: xmm9 ^= xmm0
1651# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
1652# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
1653pxor %xmm0,%xmm9
1654
1655# qhasm: xmm10 ^= xmm1
1656# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
1657# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
1658pxor %xmm1,%xmm10
1659
1660# qhasm: xmm9 ^= xmm5
1661# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
1662# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
1663pxor %xmm5,%xmm9
1664
1665# qhasm: xmm11 ^= xmm4
1666# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
1667# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
1668pxor %xmm4,%xmm11
1669
1670# qhasm: xmm12 ^= xmm6
1671# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
1672# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
1673pxor %xmm6,%xmm12
1674
1675# qhasm: xmm13 ^= xmm3
1676# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
1677# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
1678pxor %xmm3,%xmm13
1679
1680# qhasm: xmm11 ^= xmm5
1681# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
1682# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
1683pxor %xmm5,%xmm11
1684
1685# qhasm: xmm14 ^= xmm7
1686# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
1687# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
1688pxor %xmm7,%xmm14
1689
1690# qhasm: xmm15 ^= xmm2
1691# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
1692# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
1693pxor %xmm2,%xmm15
1694
1695# qhasm: xmm12 ^= xmm5
1696# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
1697# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
1698pxor %xmm5,%xmm12
1699
1700# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
1701# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
1702# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
1703pshufd $0x4E,%xmm0,%xmm0
1704
1705# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
1706# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
1707# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
1708pshufd $0x4E,%xmm1,%xmm1
1709
1710# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
1711# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
1712# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
1713pshufd $0x4E,%xmm4,%xmm4
1714
1715# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
1716# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
1717# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
1718pshufd $0x4E,%xmm6,%xmm6
1719
1720# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
1721# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
1722# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
1723pshufd $0x4E,%xmm3,%xmm3
1724
1725# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
1726# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
1727# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
1728pshufd $0x4E,%xmm7,%xmm7
1729
1730# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
1731# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
1732# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
1733pshufd $0x4E,%xmm2,%xmm2
1734
1735# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
1736# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
1737# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
1738pshufd $0x4E,%xmm5,%xmm5
1739
1740# qhasm: xmm8 ^= xmm0
1741# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
1742# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
1743pxor %xmm0,%xmm8
1744
1745# qhasm: xmm9 ^= xmm1
1746# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
1747# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
1748pxor %xmm1,%xmm9
1749
1750# qhasm: xmm10 ^= xmm4
1751# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
1752# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
1753pxor %xmm4,%xmm10
1754
1755# qhasm: xmm11 ^= xmm6
1756# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
1757# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
1758pxor %xmm6,%xmm11
1759
1760# qhasm: xmm12 ^= xmm3
1761# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
1762# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
1763pxor %xmm3,%xmm12
1764
1765# qhasm: xmm13 ^= xmm7
1766# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
1767# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
1768pxor %xmm7,%xmm13
1769
1770# qhasm: xmm14 ^= xmm2
1771# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
1772# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
1773pxor %xmm2,%xmm14
1774
1775# qhasm: xmm15 ^= xmm5
1776# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
1777# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
1778pxor %xmm5,%xmm15
1779
1780# qhasm: xmm8 ^= *(int128 *)(c + 128)
1781# asm 1: pxor 128(<c=int64#4),<xmm8=int6464#9
1782# asm 2: pxor 128(<c=%rcx),<xmm8=%xmm8
1783pxor 128(%rcx),%xmm8
1784
1785# qhasm: shuffle bytes of xmm8 by SR
1786# asm 1: pshufb SR,<xmm8=int6464#9
1787# asm 2: pshufb SR,<xmm8=%xmm8
1788pshufb SR,%xmm8
1789
1790# qhasm: xmm9 ^= *(int128 *)(c + 144)
1791# asm 1: pxor 144(<c=int64#4),<xmm9=int6464#10
1792# asm 2: pxor 144(<c=%rcx),<xmm9=%xmm9
1793pxor 144(%rcx),%xmm9
1794
1795# qhasm: shuffle bytes of xmm9 by SR
1796# asm 1: pshufb SR,<xmm9=int6464#10
1797# asm 2: pshufb SR,<xmm9=%xmm9
1798pshufb SR,%xmm9
1799
1800# qhasm: xmm10 ^= *(int128 *)(c + 160)
1801# asm 1: pxor 160(<c=int64#4),<xmm10=int6464#11
1802# asm 2: pxor 160(<c=%rcx),<xmm10=%xmm10
1803pxor 160(%rcx),%xmm10
1804
1805# qhasm: shuffle bytes of xmm10 by SR
1806# asm 1: pshufb SR,<xmm10=int6464#11
1807# asm 2: pshufb SR,<xmm10=%xmm10
1808pshufb SR,%xmm10
1809
1810# qhasm: xmm11 ^= *(int128 *)(c + 176)
1811# asm 1: pxor 176(<c=int64#4),<xmm11=int6464#12
1812# asm 2: pxor 176(<c=%rcx),<xmm11=%xmm11
1813pxor 176(%rcx),%xmm11
1814
1815# qhasm: shuffle bytes of xmm11 by SR
1816# asm 1: pshufb SR,<xmm11=int6464#12
1817# asm 2: pshufb SR,<xmm11=%xmm11
1818pshufb SR,%xmm11
1819
1820# qhasm: xmm12 ^= *(int128 *)(c + 192)
1821# asm 1: pxor 192(<c=int64#4),<xmm12=int6464#13
1822# asm 2: pxor 192(<c=%rcx),<xmm12=%xmm12
1823pxor 192(%rcx),%xmm12
1824
1825# qhasm: shuffle bytes of xmm12 by SR
1826# asm 1: pshufb SR,<xmm12=int6464#13
1827# asm 2: pshufb SR,<xmm12=%xmm12
1828pshufb SR,%xmm12
1829
1830# qhasm: xmm13 ^= *(int128 *)(c + 208)
1831# asm 1: pxor 208(<c=int64#4),<xmm13=int6464#14
1832# asm 2: pxor 208(<c=%rcx),<xmm13=%xmm13
1833pxor 208(%rcx),%xmm13
1834
1835# qhasm: shuffle bytes of xmm13 by SR
1836# asm 1: pshufb SR,<xmm13=int6464#14
1837# asm 2: pshufb SR,<xmm13=%xmm13
1838pshufb SR,%xmm13
1839
1840# qhasm: xmm14 ^= *(int128 *)(c + 224)
1841# asm 1: pxor 224(<c=int64#4),<xmm14=int6464#15
1842# asm 2: pxor 224(<c=%rcx),<xmm14=%xmm14
1843pxor 224(%rcx),%xmm14
1844
1845# qhasm: shuffle bytes of xmm14 by SR
1846# asm 1: pshufb SR,<xmm14=int6464#15
1847# asm 2: pshufb SR,<xmm14=%xmm14
1848pshufb SR,%xmm14
1849
1850# qhasm: xmm15 ^= *(int128 *)(c + 240)
1851# asm 1: pxor 240(<c=int64#4),<xmm15=int6464#16
1852# asm 2: pxor 240(<c=%rcx),<xmm15=%xmm15
1853pxor 240(%rcx),%xmm15
1854
1855# qhasm: shuffle bytes of xmm15 by SR
1856# asm 1: pshufb SR,<xmm15=int6464#16
1857# asm 2: pshufb SR,<xmm15=%xmm15
1858pshufb SR,%xmm15
1859
1860# qhasm: xmm13 ^= xmm14
1861# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
1862# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
1863pxor %xmm14,%xmm13
1864
1865# qhasm: xmm10 ^= xmm9
1866# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
1867# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
1868pxor %xmm9,%xmm10
1869
1870# qhasm: xmm13 ^= xmm8
1871# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
1872# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
1873pxor %xmm8,%xmm13
1874
1875# qhasm: xmm14 ^= xmm10
1876# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
1877# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
1878pxor %xmm10,%xmm14
1879
1880# qhasm: xmm11 ^= xmm8
1881# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
1882# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
1883pxor %xmm8,%xmm11
1884
1885# qhasm: xmm14 ^= xmm11
1886# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
1887# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
1888pxor %xmm11,%xmm14
1889
1890# qhasm: xmm11 ^= xmm15
1891# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
1892# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
1893pxor %xmm15,%xmm11
1894
1895# qhasm: xmm11 ^= xmm12
1896# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
1897# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
1898pxor %xmm12,%xmm11
1899
1900# qhasm: xmm15 ^= xmm13
1901# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
1902# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
1903pxor %xmm13,%xmm15
1904
1905# qhasm: xmm11 ^= xmm9
1906# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
1907# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
1908pxor %xmm9,%xmm11
1909
1910# qhasm: xmm12 ^= xmm13
1911# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
1912# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
1913pxor %xmm13,%xmm12
1914
1915# qhasm: xmm10 ^= xmm15
1916# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
1917# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
1918pxor %xmm15,%xmm10
1919
1920# qhasm: xmm9 ^= xmm13
1921# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
1922# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
1923pxor %xmm13,%xmm9
1924
1925# qhasm: xmm3 = xmm15
1926# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
1927# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
1928movdqa %xmm15,%xmm0
1929
1930# qhasm: xmm2 = xmm9
1931# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
1932# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
1933movdqa %xmm9,%xmm1
1934
1935# qhasm: xmm1 = xmm13
1936# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
1937# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
1938movdqa %xmm13,%xmm2
1939
1940# qhasm: xmm5 = xmm10
1941# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
1942# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
1943movdqa %xmm10,%xmm3
1944
1945# qhasm: xmm4 = xmm14
1946# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
1947# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
1948movdqa %xmm14,%xmm4
1949
1950# qhasm: xmm3 ^= xmm12
1951# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
1952# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
1953pxor %xmm12,%xmm0
1954
1955# qhasm: xmm2 ^= xmm10
1956# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
1957# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
1958pxor %xmm10,%xmm1
1959
1960# qhasm: xmm1 ^= xmm11
1961# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
1962# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
1963pxor %xmm11,%xmm2
1964
1965# qhasm: xmm5 ^= xmm12
1966# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
1967# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
1968pxor %xmm12,%xmm3
1969
1970# qhasm: xmm4 ^= xmm8
1971# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
1972# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
1973pxor %xmm8,%xmm4
1974
1975# qhasm: xmm6 = xmm3
1976# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
1977# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
1978movdqa %xmm0,%xmm5
1979
1980# qhasm: xmm0 = xmm2
1981# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
1982# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
1983movdqa %xmm1,%xmm6
1984
1985# qhasm: xmm7 = xmm3
1986# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
1987# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
1988movdqa %xmm0,%xmm7
1989
1990# qhasm: xmm2 |= xmm1
1991# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
1992# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
1993por %xmm2,%xmm1
1994
1995# qhasm: xmm3 |= xmm4
1996# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
1997# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
1998por %xmm4,%xmm0
1999
2000# qhasm: xmm7 ^= xmm0
2001# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
2002# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
2003pxor %xmm6,%xmm7
2004
2005# qhasm: xmm6 &= xmm4
2006# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
2007# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
2008pand %xmm4,%xmm5
2009
2010# qhasm: xmm0 &= xmm1
2011# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
2012# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
2013pand %xmm2,%xmm6
2014
2015# qhasm: xmm4 ^= xmm1
2016# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
2017# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
2018pxor %xmm2,%xmm4
2019
2020# qhasm: xmm7 &= xmm4
2021# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
2022# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
2023pand %xmm4,%xmm7
2024
2025# qhasm: xmm4 = xmm11
2026# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
2027# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
2028movdqa %xmm11,%xmm2
2029
2030# qhasm: xmm4 ^= xmm8
2031# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
2032# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
2033pxor %xmm8,%xmm2
2034
2035# qhasm: xmm5 &= xmm4
2036# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
2037# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
2038pand %xmm2,%xmm3
2039
2040# qhasm: xmm3 ^= xmm5
2041# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
2042# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
2043pxor %xmm3,%xmm0
2044
2045# qhasm: xmm2 ^= xmm5
2046# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
2047# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
2048pxor %xmm3,%xmm1
2049
2050# qhasm: xmm5 = xmm15
2051# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
2052# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
2053movdqa %xmm15,%xmm2
2054
2055# qhasm: xmm5 ^= xmm9
2056# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
2057# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
2058pxor %xmm9,%xmm2
2059
2060# qhasm: xmm4 = xmm13
2061# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
2062# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
2063movdqa %xmm13,%xmm3
2064
2065# qhasm: xmm1 = xmm5
2066# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
2067# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
2068movdqa %xmm2,%xmm4
2069
2070# qhasm: xmm4 ^= xmm14
2071# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
2072# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
2073pxor %xmm14,%xmm3
2074
2075# qhasm: xmm1 |= xmm4
2076# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
2077# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
2078por %xmm3,%xmm4
2079
2080# qhasm: xmm5 &= xmm4
2081# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
2082# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
2083pand %xmm3,%xmm2
2084
2085# qhasm: xmm0 ^= xmm5
2086# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
2087# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
2088pxor %xmm2,%xmm6
2089
2090# qhasm: xmm3 ^= xmm7
2091# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
2092# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
2093pxor %xmm7,%xmm0
2094
2095# qhasm: xmm2 ^= xmm6
2096# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
2097# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
2098pxor %xmm5,%xmm1
2099
2100# qhasm: xmm1 ^= xmm7
2101# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
2102# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
2103pxor %xmm7,%xmm4
2104
2105# qhasm: xmm0 ^= xmm6
2106# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
2107# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
2108pxor %xmm5,%xmm6
2109
2110# qhasm: xmm1 ^= xmm6
2111# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
2112# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
2113pxor %xmm5,%xmm4
2114
2115# qhasm: xmm4 = xmm10
2116# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
2117# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
2118movdqa %xmm10,%xmm2
2119
2120# qhasm: xmm5 = xmm12
2121# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
2122# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
2123movdqa %xmm12,%xmm3
2124
2125# qhasm: xmm6 = xmm9
2126# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
2127# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
2128movdqa %xmm9,%xmm5
2129
2130# qhasm: xmm7 = xmm15
2131# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
2132# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
2133movdqa %xmm15,%xmm7
2134
2135# qhasm: xmm4 &= xmm11
2136# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
2137# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
2138pand %xmm11,%xmm2
2139
2140# qhasm: xmm5 &= xmm8
2141# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
2142# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
2143pand %xmm8,%xmm3
2144
2145# qhasm: xmm6 &= xmm13
2146# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
2147# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
2148pand %xmm13,%xmm5
2149
2150# qhasm: xmm7 |= xmm14
2151# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
2152# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
2153por %xmm14,%xmm7
2154
2155# qhasm: xmm3 ^= xmm4
2156# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
2157# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
2158pxor %xmm2,%xmm0
2159
2160# qhasm: xmm2 ^= xmm5
2161# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
2162# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
2163pxor %xmm3,%xmm1
2164
2165# qhasm: xmm1 ^= xmm6
2166# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
2167# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
2168pxor %xmm5,%xmm4
2169
2170# qhasm: xmm0 ^= xmm7
2171# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
2172# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
2173pxor %xmm7,%xmm6
2174
2175# qhasm: xmm4 = xmm3
2176# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
2177# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
2178movdqa %xmm0,%xmm2
2179
2180# qhasm: xmm4 ^= xmm2
2181# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
2182# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
2183pxor %xmm1,%xmm2
2184
2185# qhasm: xmm3 &= xmm1
2186# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
2187# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
2188pand %xmm4,%xmm0
2189
2190# qhasm: xmm6 = xmm0
2191# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
2192# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
2193movdqa %xmm6,%xmm3
2194
2195# qhasm: xmm6 ^= xmm3
2196# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
2197# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
2198pxor %xmm0,%xmm3
2199
2200# qhasm: xmm7 = xmm4
2201# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
2202# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
2203movdqa %xmm2,%xmm5
2204
2205# qhasm: xmm7 &= xmm6
2206# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
2207# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
2208pand %xmm3,%xmm5
2209
2210# qhasm: xmm7 ^= xmm2
2211# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
2212# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
2213pxor %xmm1,%xmm5
2214
2215# qhasm: xmm5 = xmm1
2216# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
2217# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
2218movdqa %xmm4,%xmm7
2219
2220# qhasm: xmm5 ^= xmm0
2221# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
2222# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
2223pxor %xmm6,%xmm7
2224
2225# qhasm: xmm3 ^= xmm2
2226# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
2227# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
2228pxor %xmm1,%xmm0
2229
2230# qhasm: xmm5 &= xmm3
2231# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
2232# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
2233pand %xmm0,%xmm7
2234
2235# qhasm: xmm5 ^= xmm0
2236# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
2237# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
2238pxor %xmm6,%xmm7
2239
2240# qhasm: xmm1 ^= xmm5
2241# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
2242# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
2243pxor %xmm7,%xmm4
2244
2245# qhasm: xmm2 = xmm6
2246# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
2247# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
2248movdqa %xmm3,%xmm0
2249
2250# qhasm: xmm2 ^= xmm5
2251# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
2252# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
2253pxor %xmm7,%xmm0
2254
2255# qhasm: xmm2 &= xmm0
2256# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
2257# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
2258pand %xmm6,%xmm0
2259
2260# qhasm: xmm1 ^= xmm2
2261# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
2262# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
2263pxor %xmm0,%xmm4
2264
2265# qhasm: xmm6 ^= xmm2
2266# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
2267# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
2268pxor %xmm0,%xmm3
2269
2270# qhasm: xmm6 &= xmm7
2271# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
2272# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
2273pand %xmm5,%xmm3
2274
2275# qhasm: xmm6 ^= xmm4
2276# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
2277# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
2278pxor %xmm2,%xmm3
2279
2280# qhasm: xmm4 = xmm14
2281# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
2282# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
2283movdqa %xmm14,%xmm0
2284
2285# qhasm: xmm0 = xmm13
2286# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
2287# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
2288movdqa %xmm13,%xmm1
2289
2290# qhasm: xmm2 = xmm7
2291# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
2292# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
2293movdqa %xmm5,%xmm2
2294
2295# qhasm: xmm2 ^= xmm6
2296# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
2297# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
2298pxor %xmm3,%xmm2
2299
2300# qhasm: xmm2 &= xmm14
2301# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
2302# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
2303pand %xmm14,%xmm2
2304
2305# qhasm: xmm14 ^= xmm13
2306# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
2307# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
2308pxor %xmm13,%xmm14
2309
2310# qhasm: xmm14 &= xmm6
2311# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
2312# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
2313pand %xmm3,%xmm14
2314
2315# qhasm: xmm13 &= xmm7
2316# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
2317# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
2318pand %xmm5,%xmm13
2319
2320# qhasm: xmm14 ^= xmm13
2321# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
2322# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
2323pxor %xmm13,%xmm14
2324
2325# qhasm: xmm13 ^= xmm2
2326# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
2327# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
2328pxor %xmm2,%xmm13
2329
2330# qhasm: xmm4 ^= xmm8
2331# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
2332# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
2333pxor %xmm8,%xmm0
2334
2335# qhasm: xmm0 ^= xmm11
2336# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
2337# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
2338pxor %xmm11,%xmm1
2339
2340# qhasm: xmm7 ^= xmm5
2341# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
2342# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
2343pxor %xmm7,%xmm5
2344
2345# qhasm: xmm6 ^= xmm1
2346# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
2347# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
2348pxor %xmm4,%xmm3
2349
2350# qhasm: xmm3 = xmm7
2351# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2352# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2353movdqa %xmm5,%xmm2
2354
2355# qhasm: xmm3 ^= xmm6
2356# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
2357# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
2358pxor %xmm3,%xmm2
2359
2360# qhasm: xmm3 &= xmm4
2361# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
2362# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
2363pand %xmm0,%xmm2
2364
2365# qhasm: xmm4 ^= xmm0
2366# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
2367# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
2368pxor %xmm1,%xmm0
2369
2370# qhasm: xmm4 &= xmm6
2371# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
2372# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
2373pand %xmm3,%xmm0
2374
2375# qhasm: xmm0 &= xmm7
2376# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
2377# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
2378pand %xmm5,%xmm1
2379
2380# qhasm: xmm0 ^= xmm4
2381# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
2382# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
2383pxor %xmm0,%xmm1
2384
2385# qhasm: xmm4 ^= xmm3
2386# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
2387# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
2388pxor %xmm2,%xmm0
2389
2390# qhasm: xmm2 = xmm5
2391# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
2392# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
2393movdqa %xmm7,%xmm2
2394
2395# qhasm: xmm2 ^= xmm1
2396# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
2397# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
2398pxor %xmm4,%xmm2
2399
2400# qhasm: xmm2 &= xmm8
2401# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
2402# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
2403pand %xmm8,%xmm2
2404
2405# qhasm: xmm8 ^= xmm11
2406# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
2407# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
2408pxor %xmm11,%xmm8
2409
2410# qhasm: xmm8 &= xmm1
2411# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
2412# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
2413pand %xmm4,%xmm8
2414
2415# qhasm: xmm11 &= xmm5
2416# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
2417# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
2418pand %xmm7,%xmm11
2419
2420# qhasm: xmm8 ^= xmm11
2421# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
2422# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
2423pxor %xmm11,%xmm8
2424
2425# qhasm: xmm11 ^= xmm2
2426# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
2427# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
2428pxor %xmm2,%xmm11
2429
2430# qhasm: xmm14 ^= xmm4
2431# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
2432# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
2433pxor %xmm0,%xmm14
2434
2435# qhasm: xmm8 ^= xmm4
2436# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
2437# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
2438pxor %xmm0,%xmm8
2439
2440# qhasm: xmm13 ^= xmm0
2441# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
2442# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
2443pxor %xmm1,%xmm13
2444
2445# qhasm: xmm11 ^= xmm0
2446# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
2447# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
2448pxor %xmm1,%xmm11
2449
2450# qhasm: xmm4 = xmm15
2451# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
2452# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
2453movdqa %xmm15,%xmm0
2454
2455# qhasm: xmm0 = xmm9
2456# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
2457# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
2458movdqa %xmm9,%xmm1
2459
2460# qhasm: xmm4 ^= xmm12
2461# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
2462# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
2463pxor %xmm12,%xmm0
2464
2465# qhasm: xmm0 ^= xmm10
2466# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
2467# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
2468pxor %xmm10,%xmm1
2469
2470# qhasm: xmm3 = xmm7
2471# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2472# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2473movdqa %xmm5,%xmm2
2474
2475# qhasm: xmm3 ^= xmm6
2476# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
2477# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
2478pxor %xmm3,%xmm2
2479
2480# qhasm: xmm3 &= xmm4
2481# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
2482# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
2483pand %xmm0,%xmm2
2484
2485# qhasm: xmm4 ^= xmm0
2486# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
2487# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
2488pxor %xmm1,%xmm0
2489
2490# qhasm: xmm4 &= xmm6
2491# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
2492# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
2493pand %xmm3,%xmm0
2494
2495# qhasm: xmm0 &= xmm7
2496# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
2497# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
2498pand %xmm5,%xmm1
2499
2500# qhasm: xmm0 ^= xmm4
2501# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
2502# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
2503pxor %xmm0,%xmm1
2504
2505# qhasm: xmm4 ^= xmm3
2506# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
2507# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
2508pxor %xmm2,%xmm0
2509
2510# qhasm: xmm2 = xmm5
2511# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
2512# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
2513movdqa %xmm7,%xmm2
2514
2515# qhasm: xmm2 ^= xmm1
2516# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
2517# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
2518pxor %xmm4,%xmm2
2519
2520# qhasm: xmm2 &= xmm12
2521# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
2522# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
2523pand %xmm12,%xmm2
2524
2525# qhasm: xmm12 ^= xmm10
2526# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
2527# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
2528pxor %xmm10,%xmm12
2529
2530# qhasm: xmm12 &= xmm1
2531# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
2532# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
2533pand %xmm4,%xmm12
2534
2535# qhasm: xmm10 &= xmm5
2536# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
2537# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
2538pand %xmm7,%xmm10
2539
2540# qhasm: xmm12 ^= xmm10
2541# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
2542# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
2543pxor %xmm10,%xmm12
2544
2545# qhasm: xmm10 ^= xmm2
2546# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
2547# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
2548pxor %xmm2,%xmm10
2549
2550# qhasm: xmm7 ^= xmm5
2551# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
2552# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
2553pxor %xmm7,%xmm5
2554
2555# qhasm: xmm6 ^= xmm1
2556# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
2557# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
2558pxor %xmm4,%xmm3
2559
2560# qhasm: xmm3 = xmm7
2561# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2562# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2563movdqa %xmm5,%xmm2
2564
2565# qhasm: xmm3 ^= xmm6
2566# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
2567# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
2568pxor %xmm3,%xmm2
2569
2570# qhasm: xmm3 &= xmm15
2571# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
2572# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
2573pand %xmm15,%xmm2
2574
2575# qhasm: xmm15 ^= xmm9
2576# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
2577# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
2578pxor %xmm9,%xmm15
2579
2580# qhasm: xmm15 &= xmm6
2581# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
2582# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
2583pand %xmm3,%xmm15
2584
2585# qhasm: xmm9 &= xmm7
2586# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
2587# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
2588pand %xmm5,%xmm9
2589
2590# qhasm: xmm15 ^= xmm9
2591# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
2592# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
2593pxor %xmm9,%xmm15
2594
2595# qhasm: xmm9 ^= xmm3
2596# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
2597# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
2598pxor %xmm2,%xmm9
2599
2600# qhasm: xmm15 ^= xmm4
2601# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
2602# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
2603pxor %xmm0,%xmm15
2604
2605# qhasm: xmm12 ^= xmm4
2606# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
2607# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
2608pxor %xmm0,%xmm12
2609
2610# qhasm: xmm9 ^= xmm0
2611# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
2612# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
2613pxor %xmm1,%xmm9
2614
2615# qhasm: xmm10 ^= xmm0
2616# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
2617# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
2618pxor %xmm1,%xmm10
2619
2620# qhasm: xmm15 ^= xmm8
2621# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
2622# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
2623pxor %xmm8,%xmm15
2624
2625# qhasm: xmm9 ^= xmm14
2626# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
2627# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
2628pxor %xmm14,%xmm9
2629
2630# qhasm: xmm12 ^= xmm15
2631# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
2632# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
2633pxor %xmm15,%xmm12
2634
2635# qhasm: xmm14 ^= xmm8
2636# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
2637# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
2638pxor %xmm8,%xmm14
2639
2640# qhasm: xmm8 ^= xmm9
2641# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
2642# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
2643pxor %xmm9,%xmm8
2644
2645# qhasm: xmm9 ^= xmm13
2646# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
2647# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
2648pxor %xmm13,%xmm9
2649
2650# qhasm: xmm13 ^= xmm10
2651# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
2652# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
2653pxor %xmm10,%xmm13
2654
2655# qhasm: xmm12 ^= xmm13
2656# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
2657# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
2658pxor %xmm13,%xmm12
2659
2660# qhasm: xmm10 ^= xmm11
2661# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
2662# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
2663pxor %xmm11,%xmm10
2664
2665# qhasm: xmm11 ^= xmm13
2666# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
2667# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
2668pxor %xmm13,%xmm11
2669
2670# qhasm: xmm14 ^= xmm11
2671# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
2672# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
2673pxor %xmm11,%xmm14
2674
2675# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
2676# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
2677# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
2678pshufd $0x93,%xmm8,%xmm0
2679
2680# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
2681# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
2682# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
2683pshufd $0x93,%xmm9,%xmm1
2684
2685# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
2686# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
2687# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
2688pshufd $0x93,%xmm12,%xmm2
2689
2690# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
2691# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
2692# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
2693pshufd $0x93,%xmm14,%xmm3
2694
2695# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
2696# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
2697# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
2698pshufd $0x93,%xmm11,%xmm4
2699
2700# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
2701# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
2702# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
2703pshufd $0x93,%xmm15,%xmm5
2704
2705# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
2706# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
2707# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
2708pshufd $0x93,%xmm10,%xmm6
2709
2710# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
2711# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
2712# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
2713pshufd $0x93,%xmm13,%xmm7
2714
2715# qhasm: xmm8 ^= xmm0
2716# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
2717# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
2718pxor %xmm0,%xmm8
2719
2720# qhasm: xmm9 ^= xmm1
2721# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
2722# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
2723pxor %xmm1,%xmm9
2724
2725# qhasm: xmm12 ^= xmm2
2726# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
2727# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
2728pxor %xmm2,%xmm12
2729
2730# qhasm: xmm14 ^= xmm3
2731# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
2732# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
2733pxor %xmm3,%xmm14
2734
2735# qhasm: xmm11 ^= xmm4
2736# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
2737# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
2738pxor %xmm4,%xmm11
2739
2740# qhasm: xmm15 ^= xmm5
2741# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
2742# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
2743pxor %xmm5,%xmm15
2744
2745# qhasm: xmm10 ^= xmm6
2746# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
2747# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
2748pxor %xmm6,%xmm10
2749
2750# qhasm: xmm13 ^= xmm7
2751# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
2752# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
2753pxor %xmm7,%xmm13
2754
2755# qhasm: xmm0 ^= xmm13
2756# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
2757# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
2758pxor %xmm13,%xmm0
2759
2760# qhasm: xmm1 ^= xmm8
2761# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
2762# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
2763pxor %xmm8,%xmm1
2764
2765# qhasm: xmm2 ^= xmm9
2766# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
2767# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
2768pxor %xmm9,%xmm2
2769
2770# qhasm: xmm1 ^= xmm13
2771# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
2772# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
2773pxor %xmm13,%xmm1
2774
2775# qhasm: xmm3 ^= xmm12
2776# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
2777# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
2778pxor %xmm12,%xmm3
2779
2780# qhasm: xmm4 ^= xmm14
2781# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
2782# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
2783pxor %xmm14,%xmm4
2784
2785# qhasm: xmm5 ^= xmm11
2786# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
2787# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
2788pxor %xmm11,%xmm5
2789
2790# qhasm: xmm3 ^= xmm13
2791# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
2792# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
2793pxor %xmm13,%xmm3
2794
2795# qhasm: xmm6 ^= xmm15
2796# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
2797# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
2798pxor %xmm15,%xmm6
2799
2800# qhasm: xmm7 ^= xmm10
2801# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
2802# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
2803pxor %xmm10,%xmm7
2804
2805# qhasm: xmm4 ^= xmm13
2806# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
2807# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
2808pxor %xmm13,%xmm4
2809
2810# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
2811# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
2812# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
2813pshufd $0x4E,%xmm8,%xmm8
2814
2815# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
2816# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
2817# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
2818pshufd $0x4E,%xmm9,%xmm9
2819
2820# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
2821# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
2822# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
2823pshufd $0x4E,%xmm12,%xmm12
2824
2825# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
2826# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
2827# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
2828pshufd $0x4E,%xmm14,%xmm14
2829
2830# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
2831# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
2832# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
2833pshufd $0x4E,%xmm11,%xmm11
2834
2835# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
2836# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
2837# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
2838pshufd $0x4E,%xmm15,%xmm15
2839
2840# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
2841# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
2842# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
2843pshufd $0x4E,%xmm10,%xmm10
2844
2845# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
2846# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
2847# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
2848pshufd $0x4E,%xmm13,%xmm13
2849
2850# qhasm: xmm0 ^= xmm8
2851# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
2852# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
2853pxor %xmm8,%xmm0
2854
2855# qhasm: xmm1 ^= xmm9
2856# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
2857# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
2858pxor %xmm9,%xmm1
2859
2860# qhasm: xmm2 ^= xmm12
2861# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
2862# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
2863pxor %xmm12,%xmm2
2864
2865# qhasm: xmm3 ^= xmm14
2866# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
2867# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
2868pxor %xmm14,%xmm3
2869
2870# qhasm: xmm4 ^= xmm11
2871# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
2872# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
2873pxor %xmm11,%xmm4
2874
2875# qhasm: xmm5 ^= xmm15
2876# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
2877# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
2878pxor %xmm15,%xmm5
2879
2880# qhasm: xmm6 ^= xmm10
2881# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
2882# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
2883pxor %xmm10,%xmm6
2884
2885# qhasm: xmm7 ^= xmm13
2886# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
2887# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
2888pxor %xmm13,%xmm7
2889
2890# qhasm: xmm0 ^= *(int128 *)(c + 256)
2891# asm 1: pxor 256(<c=int64#4),<xmm0=int6464#1
2892# asm 2: pxor 256(<c=%rcx),<xmm0=%xmm0
2893pxor 256(%rcx),%xmm0
2894
2895# qhasm: shuffle bytes of xmm0 by SR
2896# asm 1: pshufb SR,<xmm0=int6464#1
2897# asm 2: pshufb SR,<xmm0=%xmm0
2898pshufb SR,%xmm0
2899
2900# qhasm: xmm1 ^= *(int128 *)(c + 272)
2901# asm 1: pxor 272(<c=int64#4),<xmm1=int6464#2
2902# asm 2: pxor 272(<c=%rcx),<xmm1=%xmm1
2903pxor 272(%rcx),%xmm1
2904
2905# qhasm: shuffle bytes of xmm1 by SR
2906# asm 1: pshufb SR,<xmm1=int6464#2
2907# asm 2: pshufb SR,<xmm1=%xmm1
2908pshufb SR,%xmm1
2909
2910# qhasm: xmm2 ^= *(int128 *)(c + 288)
2911# asm 1: pxor 288(<c=int64#4),<xmm2=int6464#3
2912# asm 2: pxor 288(<c=%rcx),<xmm2=%xmm2
2913pxor 288(%rcx),%xmm2
2914
2915# qhasm: shuffle bytes of xmm2 by SR
2916# asm 1: pshufb SR,<xmm2=int6464#3
2917# asm 2: pshufb SR,<xmm2=%xmm2
2918pshufb SR,%xmm2
2919
2920# qhasm: xmm3 ^= *(int128 *)(c + 304)
2921# asm 1: pxor 304(<c=int64#4),<xmm3=int6464#4
2922# asm 2: pxor 304(<c=%rcx),<xmm3=%xmm3
2923pxor 304(%rcx),%xmm3
2924
2925# qhasm: shuffle bytes of xmm3 by SR
2926# asm 1: pshufb SR,<xmm3=int6464#4
2927# asm 2: pshufb SR,<xmm3=%xmm3
2928pshufb SR,%xmm3
2929
2930# qhasm: xmm4 ^= *(int128 *)(c + 320)
2931# asm 1: pxor 320(<c=int64#4),<xmm4=int6464#5
2932# asm 2: pxor 320(<c=%rcx),<xmm4=%xmm4
2933pxor 320(%rcx),%xmm4
2934
2935# qhasm: shuffle bytes of xmm4 by SR
2936# asm 1: pshufb SR,<xmm4=int6464#5
2937# asm 2: pshufb SR,<xmm4=%xmm4
2938pshufb SR,%xmm4
2939
2940# qhasm: xmm5 ^= *(int128 *)(c + 336)
2941# asm 1: pxor 336(<c=int64#4),<xmm5=int6464#6
2942# asm 2: pxor 336(<c=%rcx),<xmm5=%xmm5
2943pxor 336(%rcx),%xmm5
2944
2945# qhasm: shuffle bytes of xmm5 by SR
2946# asm 1: pshufb SR,<xmm5=int6464#6
2947# asm 2: pshufb SR,<xmm5=%xmm5
2948pshufb SR,%xmm5
2949
2950# qhasm: xmm6 ^= *(int128 *)(c + 352)
2951# asm 1: pxor 352(<c=int64#4),<xmm6=int6464#7
2952# asm 2: pxor 352(<c=%rcx),<xmm6=%xmm6
2953pxor 352(%rcx),%xmm6
2954
2955# qhasm: shuffle bytes of xmm6 by SR
2956# asm 1: pshufb SR,<xmm6=int6464#7
2957# asm 2: pshufb SR,<xmm6=%xmm6
2958pshufb SR,%xmm6
2959
2960# qhasm: xmm7 ^= *(int128 *)(c + 368)
2961# asm 1: pxor 368(<c=int64#4),<xmm7=int6464#8
2962# asm 2: pxor 368(<c=%rcx),<xmm7=%xmm7
2963pxor 368(%rcx),%xmm7
2964
2965# qhasm: shuffle bytes of xmm7 by SR
2966# asm 1: pshufb SR,<xmm7=int6464#8
2967# asm 2: pshufb SR,<xmm7=%xmm7
2968pshufb SR,%xmm7
2969
2970# qhasm: xmm5 ^= xmm6
2971# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
2972# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
2973pxor %xmm6,%xmm5
2974
2975# qhasm: xmm2 ^= xmm1
2976# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
2977# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
2978pxor %xmm1,%xmm2
2979
2980# qhasm: xmm5 ^= xmm0
2981# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
2982# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
2983pxor %xmm0,%xmm5
2984
2985# qhasm: xmm6 ^= xmm2
2986# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
2987# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
2988pxor %xmm2,%xmm6
2989
2990# qhasm: xmm3 ^= xmm0
2991# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
2992# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
2993pxor %xmm0,%xmm3
2994
2995# qhasm: xmm6 ^= xmm3
2996# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
2997# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
2998pxor %xmm3,%xmm6
2999
3000# qhasm: xmm3 ^= xmm7
3001# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
3002# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
3003pxor %xmm7,%xmm3
3004
3005# qhasm: xmm3 ^= xmm4
3006# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
3007# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
3008pxor %xmm4,%xmm3
3009
3010# qhasm: xmm7 ^= xmm5
3011# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
3012# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
3013pxor %xmm5,%xmm7
3014
3015# qhasm: xmm3 ^= xmm1
3016# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
3017# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
3018pxor %xmm1,%xmm3
3019
3020# qhasm: xmm4 ^= xmm5
3021# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
3022# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
3023pxor %xmm5,%xmm4
3024
3025# qhasm: xmm2 ^= xmm7
3026# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
3027# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
3028pxor %xmm7,%xmm2
3029
3030# qhasm: xmm1 ^= xmm5
3031# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
3032# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
3033pxor %xmm5,%xmm1
3034
3035# qhasm: xmm11 = xmm7
3036# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
3037# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
3038movdqa %xmm7,%xmm8
3039
3040# qhasm: xmm10 = xmm1
3041# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
3042# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
3043movdqa %xmm1,%xmm9
3044
3045# qhasm: xmm9 = xmm5
3046# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
3047# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
3048movdqa %xmm5,%xmm10
3049
3050# qhasm: xmm13 = xmm2
3051# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
3052# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
3053movdqa %xmm2,%xmm11
3054
3055# qhasm: xmm12 = xmm6
3056# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
3057# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
3058movdqa %xmm6,%xmm12
3059
3060# qhasm: xmm11 ^= xmm4
3061# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
3062# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
3063pxor %xmm4,%xmm8
3064
3065# qhasm: xmm10 ^= xmm2
3066# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
3067# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
3068pxor %xmm2,%xmm9
3069
3070# qhasm: xmm9 ^= xmm3
3071# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
3072# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
3073pxor %xmm3,%xmm10
3074
3075# qhasm: xmm13 ^= xmm4
3076# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
3077# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
3078pxor %xmm4,%xmm11
3079
3080# qhasm: xmm12 ^= xmm0
3081# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
3082# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
3083pxor %xmm0,%xmm12
3084
3085# qhasm: xmm14 = xmm11
3086# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
3087# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
3088movdqa %xmm8,%xmm13
3089
3090# qhasm: xmm8 = xmm10
3091# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
3092# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
3093movdqa %xmm9,%xmm14
3094
3095# qhasm: xmm15 = xmm11
3096# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
3097# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
3098movdqa %xmm8,%xmm15
3099
3100# qhasm: xmm10 |= xmm9
3101# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
3102# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
3103por %xmm10,%xmm9
3104
3105# qhasm: xmm11 |= xmm12
3106# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
3107# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
3108por %xmm12,%xmm8
3109
3110# qhasm: xmm15 ^= xmm8
3111# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
3112# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
3113pxor %xmm14,%xmm15
3114
3115# qhasm: xmm14 &= xmm12
3116# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
3117# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
3118pand %xmm12,%xmm13
3119
3120# qhasm: xmm8 &= xmm9
3121# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
3122# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
3123pand %xmm10,%xmm14
3124
3125# qhasm: xmm12 ^= xmm9
3126# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
3127# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
3128pxor %xmm10,%xmm12
3129
3130# qhasm: xmm15 &= xmm12
3131# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
3132# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
3133pand %xmm12,%xmm15
3134
3135# qhasm: xmm12 = xmm3
3136# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
3137# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
3138movdqa %xmm3,%xmm10
3139
3140# qhasm: xmm12 ^= xmm0
3141# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
3142# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
3143pxor %xmm0,%xmm10
3144
3145# qhasm: xmm13 &= xmm12
3146# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
3147# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
3148pand %xmm10,%xmm11
3149
3150# qhasm: xmm11 ^= xmm13
3151# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
3152# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
3153pxor %xmm11,%xmm8
3154
3155# qhasm: xmm10 ^= xmm13
3156# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
3157# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
3158pxor %xmm11,%xmm9
3159
3160# qhasm: xmm13 = xmm7
3161# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
3162# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
3163movdqa %xmm7,%xmm10
3164
3165# qhasm: xmm13 ^= xmm1
3166# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
3167# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
3168pxor %xmm1,%xmm10
3169
3170# qhasm: xmm12 = xmm5
3171# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
3172# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
3173movdqa %xmm5,%xmm11
3174
3175# qhasm: xmm9 = xmm13
3176# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
3177# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
3178movdqa %xmm10,%xmm12
3179
3180# qhasm: xmm12 ^= xmm6
3181# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
3182# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
3183pxor %xmm6,%xmm11
3184
3185# qhasm: xmm9 |= xmm12
3186# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
3187# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
3188por %xmm11,%xmm12
3189
3190# qhasm: xmm13 &= xmm12
3191# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
3192# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
3193pand %xmm11,%xmm10
3194
3195# qhasm: xmm8 ^= xmm13
3196# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
3197# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
3198pxor %xmm10,%xmm14
3199
3200# qhasm: xmm11 ^= xmm15
3201# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
3202# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
3203pxor %xmm15,%xmm8
3204
3205# qhasm: xmm10 ^= xmm14
3206# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
3207# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
3208pxor %xmm13,%xmm9
3209
3210# qhasm: xmm9 ^= xmm15
3211# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
3212# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
3213pxor %xmm15,%xmm12
3214
3215# qhasm: xmm8 ^= xmm14
3216# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
3217# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
3218pxor %xmm13,%xmm14
3219
3220# qhasm: xmm9 ^= xmm14
3221# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
3222# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
3223pxor %xmm13,%xmm12
3224
3225# qhasm: xmm12 = xmm2
3226# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
3227# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
3228movdqa %xmm2,%xmm10
3229
3230# qhasm: xmm13 = xmm4
3231# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
3232# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
3233movdqa %xmm4,%xmm11
3234
3235# qhasm: xmm14 = xmm1
3236# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
3237# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
3238movdqa %xmm1,%xmm13
3239
3240# qhasm: xmm15 = xmm7
3241# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
3242# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
3243movdqa %xmm7,%xmm15
3244
3245# qhasm: xmm12 &= xmm3
3246# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
3247# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
3248pand %xmm3,%xmm10
3249
3250# qhasm: xmm13 &= xmm0
3251# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
3252# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
3253pand %xmm0,%xmm11
3254
3255# qhasm: xmm14 &= xmm5
3256# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
3257# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
3258pand %xmm5,%xmm13
3259
3260# qhasm: xmm15 |= xmm6
3261# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
3262# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
3263por %xmm6,%xmm15
3264
3265# qhasm: xmm11 ^= xmm12
3266# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
3267# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
3268pxor %xmm10,%xmm8
3269
3270# qhasm: xmm10 ^= xmm13
3271# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
3272# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
3273pxor %xmm11,%xmm9
3274
3275# qhasm: xmm9 ^= xmm14
3276# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
3277# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
3278pxor %xmm13,%xmm12
3279
3280# qhasm: xmm8 ^= xmm15
3281# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
3282# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
3283pxor %xmm15,%xmm14
3284
3285# qhasm: xmm12 = xmm11
3286# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
3287# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
3288movdqa %xmm8,%xmm10
3289
3290# qhasm: xmm12 ^= xmm10
3291# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
3292# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
3293pxor %xmm9,%xmm10
3294
3295# qhasm: xmm11 &= xmm9
3296# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
3297# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
3298pand %xmm12,%xmm8
3299
3300# qhasm: xmm14 = xmm8
3301# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
3302# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
3303movdqa %xmm14,%xmm11
3304
3305# qhasm: xmm14 ^= xmm11
3306# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
3307# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
3308pxor %xmm8,%xmm11
3309
3310# qhasm: xmm15 = xmm12
3311# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
3312# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
3313movdqa %xmm10,%xmm13
3314
3315# qhasm: xmm15 &= xmm14
3316# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
3317# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
3318pand %xmm11,%xmm13
3319
3320# qhasm: xmm15 ^= xmm10
3321# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
3322# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
3323pxor %xmm9,%xmm13
3324
3325# qhasm: xmm13 = xmm9
3326# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
3327# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
3328movdqa %xmm12,%xmm15
3329
3330# qhasm: xmm13 ^= xmm8
3331# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
3332# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
3333pxor %xmm14,%xmm15
3334
3335# qhasm: xmm11 ^= xmm10
3336# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
3337# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
3338pxor %xmm9,%xmm8
3339
3340# qhasm: xmm13 &= xmm11
3341# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
3342# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
3343pand %xmm8,%xmm15
3344
3345# qhasm: xmm13 ^= xmm8
3346# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
3347# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
3348pxor %xmm14,%xmm15
3349
3350# qhasm: xmm9 ^= xmm13
3351# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
3352# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
3353pxor %xmm15,%xmm12
3354
3355# qhasm: xmm10 = xmm14
3356# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
3357# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
3358movdqa %xmm11,%xmm8
3359
3360# qhasm: xmm10 ^= xmm13
3361# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
3362# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
3363pxor %xmm15,%xmm8
3364
3365# qhasm: xmm10 &= xmm8
3366# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
3367# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
3368pand %xmm14,%xmm8
3369
3370# qhasm: xmm9 ^= xmm10
3371# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
3372# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
3373pxor %xmm8,%xmm12
3374
3375# qhasm: xmm14 ^= xmm10
3376# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
3377# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
3378pxor %xmm8,%xmm11
3379
3380# qhasm: xmm14 &= xmm15
3381# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
3382# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
3383pand %xmm13,%xmm11
3384
3385# qhasm: xmm14 ^= xmm12
3386# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
3387# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
3388pxor %xmm10,%xmm11
3389
3390# qhasm: xmm12 = xmm6
3391# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
3392# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
3393movdqa %xmm6,%xmm8
3394
3395# qhasm: xmm8 = xmm5
3396# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
3397# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
3398movdqa %xmm5,%xmm9
3399
3400# qhasm: xmm10 = xmm15
3401# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
3402# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
3403movdqa %xmm13,%xmm10
3404
3405# qhasm: xmm10 ^= xmm14
3406# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
3407# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
3408pxor %xmm11,%xmm10
3409
3410# qhasm: xmm10 &= xmm6
3411# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
3412# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
3413pand %xmm6,%xmm10
3414
3415# qhasm: xmm6 ^= xmm5
3416# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
3417# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
3418pxor %xmm5,%xmm6
3419
3420# qhasm: xmm6 &= xmm14
3421# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
3422# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
3423pand %xmm11,%xmm6
3424
3425# qhasm: xmm5 &= xmm15
3426# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
3427# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
3428pand %xmm13,%xmm5
3429
3430# qhasm: xmm6 ^= xmm5
3431# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
3432# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
3433pxor %xmm5,%xmm6
3434
3435# qhasm: xmm5 ^= xmm10
3436# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
3437# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
3438pxor %xmm10,%xmm5
3439
3440# qhasm: xmm12 ^= xmm0
3441# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
3442# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
3443pxor %xmm0,%xmm8
3444
3445# qhasm: xmm8 ^= xmm3
3446# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
3447# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
3448pxor %xmm3,%xmm9
3449
3450# qhasm: xmm15 ^= xmm13
3451# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
3452# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
3453pxor %xmm15,%xmm13
3454
3455# qhasm: xmm14 ^= xmm9
3456# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
3457# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
3458pxor %xmm12,%xmm11
3459
3460# qhasm: xmm11 = xmm15
3461# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3462# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3463movdqa %xmm13,%xmm10
3464
3465# qhasm: xmm11 ^= xmm14
3466# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3467# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3468pxor %xmm11,%xmm10
3469
3470# qhasm: xmm11 &= xmm12
3471# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
3472# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
3473pand %xmm8,%xmm10
3474
3475# qhasm: xmm12 ^= xmm8
3476# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
3477# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
3478pxor %xmm9,%xmm8
3479
3480# qhasm: xmm12 &= xmm14
3481# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
3482# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
3483pand %xmm11,%xmm8
3484
3485# qhasm: xmm8 &= xmm15
3486# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
3487# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
3488pand %xmm13,%xmm9
3489
3490# qhasm: xmm8 ^= xmm12
3491# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
3492# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
3493pxor %xmm8,%xmm9
3494
3495# qhasm: xmm12 ^= xmm11
3496# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
3497# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
3498pxor %xmm10,%xmm8
3499
3500# qhasm: xmm10 = xmm13
3501# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3502# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3503movdqa %xmm15,%xmm10
3504
3505# qhasm: xmm10 ^= xmm9
3506# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
3507# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
3508pxor %xmm12,%xmm10
3509
3510# qhasm: xmm10 &= xmm0
3511# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
3512# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
3513pand %xmm0,%xmm10
3514
3515# qhasm: xmm0 ^= xmm3
3516# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
3517# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
3518pxor %xmm3,%xmm0
3519
3520# qhasm: xmm0 &= xmm9
3521# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
3522# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
3523pand %xmm12,%xmm0
3524
3525# qhasm: xmm3 &= xmm13
3526# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
3527# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
3528pand %xmm15,%xmm3
3529
3530# qhasm: xmm0 ^= xmm3
3531# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
3532# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
3533pxor %xmm3,%xmm0
3534
3535# qhasm: xmm3 ^= xmm10
3536# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
3537# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
3538pxor %xmm10,%xmm3
3539
3540# qhasm: xmm6 ^= xmm12
3541# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
3542# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
3543pxor %xmm8,%xmm6
3544
3545# qhasm: xmm0 ^= xmm12
3546# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
3547# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
3548pxor %xmm8,%xmm0
3549
3550# qhasm: xmm5 ^= xmm8
3551# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
3552# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
3553pxor %xmm9,%xmm5
3554
3555# qhasm: xmm3 ^= xmm8
3556# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
3557# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
3558pxor %xmm9,%xmm3
3559
3560# qhasm: xmm12 = xmm7
3561# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
3562# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
3563movdqa %xmm7,%xmm8
3564
3565# qhasm: xmm8 = xmm1
3566# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
3567# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
3568movdqa %xmm1,%xmm9
3569
3570# qhasm: xmm12 ^= xmm4
3571# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
3572# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
3573pxor %xmm4,%xmm8
3574
3575# qhasm: xmm8 ^= xmm2
3576# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
3577# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
3578pxor %xmm2,%xmm9
3579
3580# qhasm: xmm11 = xmm15
3581# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3582# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3583movdqa %xmm13,%xmm10
3584
3585# qhasm: xmm11 ^= xmm14
3586# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3587# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3588pxor %xmm11,%xmm10
3589
3590# qhasm: xmm11 &= xmm12
3591# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
3592# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
3593pand %xmm8,%xmm10
3594
3595# qhasm: xmm12 ^= xmm8
3596# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
3597# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
3598pxor %xmm9,%xmm8
3599
3600# qhasm: xmm12 &= xmm14
3601# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
3602# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
3603pand %xmm11,%xmm8
3604
3605# qhasm: xmm8 &= xmm15
3606# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
3607# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
3608pand %xmm13,%xmm9
3609
3610# qhasm: xmm8 ^= xmm12
3611# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
3612# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
3613pxor %xmm8,%xmm9
3614
3615# qhasm: xmm12 ^= xmm11
3616# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
3617# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
3618pxor %xmm10,%xmm8
3619
3620# qhasm: xmm10 = xmm13
3621# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3622# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3623movdqa %xmm15,%xmm10
3624
3625# qhasm: xmm10 ^= xmm9
3626# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
3627# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
3628pxor %xmm12,%xmm10
3629
3630# qhasm: xmm10 &= xmm4
3631# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
3632# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
3633pand %xmm4,%xmm10
3634
3635# qhasm: xmm4 ^= xmm2
3636# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
3637# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
3638pxor %xmm2,%xmm4
3639
3640# qhasm: xmm4 &= xmm9
3641# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
3642# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
3643pand %xmm12,%xmm4
3644
3645# qhasm: xmm2 &= xmm13
3646# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
3647# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
3648pand %xmm15,%xmm2
3649
3650# qhasm: xmm4 ^= xmm2
3651# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
3652# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
3653pxor %xmm2,%xmm4
3654
3655# qhasm: xmm2 ^= xmm10
3656# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
3657# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
3658pxor %xmm10,%xmm2
3659
3660# qhasm: xmm15 ^= xmm13
3661# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
3662# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
3663pxor %xmm15,%xmm13
3664
3665# qhasm: xmm14 ^= xmm9
3666# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
3667# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
3668pxor %xmm12,%xmm11
3669
3670# qhasm: xmm11 = xmm15
3671# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3672# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3673movdqa %xmm13,%xmm10
3674
3675# qhasm: xmm11 ^= xmm14
3676# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3677# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3678pxor %xmm11,%xmm10
3679
3680# qhasm: xmm11 &= xmm7
3681# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
3682# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
3683pand %xmm7,%xmm10
3684
3685# qhasm: xmm7 ^= xmm1
3686# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
3687# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
3688pxor %xmm1,%xmm7
3689
3690# qhasm: xmm7 &= xmm14
3691# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
3692# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
3693pand %xmm11,%xmm7
3694
3695# qhasm: xmm1 &= xmm15
3696# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
3697# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
3698pand %xmm13,%xmm1
3699
3700# qhasm: xmm7 ^= xmm1
3701# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
3702# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
3703pxor %xmm1,%xmm7
3704
3705# qhasm: xmm1 ^= xmm11
3706# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
3707# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
3708pxor %xmm10,%xmm1
3709
3710# qhasm: xmm7 ^= xmm12
3711# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
3712# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
3713pxor %xmm8,%xmm7
3714
3715# qhasm: xmm4 ^= xmm12
3716# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
3717# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
3718pxor %xmm8,%xmm4
3719
3720# qhasm: xmm1 ^= xmm8
3721# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
3722# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
3723pxor %xmm9,%xmm1
3724
3725# qhasm: xmm2 ^= xmm8
3726# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
3727# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
3728pxor %xmm9,%xmm2
3729
3730# qhasm: xmm7 ^= xmm0
3731# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
3732# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
3733pxor %xmm0,%xmm7
3734
3735# qhasm: xmm1 ^= xmm6
3736# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
3737# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
3738pxor %xmm6,%xmm1
3739
3740# qhasm: xmm4 ^= xmm7
3741# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
3742# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
3743pxor %xmm7,%xmm4
3744
3745# qhasm: xmm6 ^= xmm0
3746# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
3747# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
3748pxor %xmm0,%xmm6
3749
3750# qhasm: xmm0 ^= xmm1
3751# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
3752# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
3753pxor %xmm1,%xmm0
3754
3755# qhasm: xmm1 ^= xmm5
3756# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
3757# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
3758pxor %xmm5,%xmm1
3759
3760# qhasm: xmm5 ^= xmm2
3761# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
3762# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
3763pxor %xmm2,%xmm5
3764
3765# qhasm: xmm4 ^= xmm5
3766# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
3767# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
3768pxor %xmm5,%xmm4
3769
3770# qhasm: xmm2 ^= xmm3
3771# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
3772# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
3773pxor %xmm3,%xmm2
3774
3775# qhasm: xmm3 ^= xmm5
3776# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
3777# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
3778pxor %xmm5,%xmm3
3779
3780# qhasm: xmm6 ^= xmm3
3781# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
3782# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
3783pxor %xmm3,%xmm6
3784
3785# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
3786# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
3787# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
3788pshufd $0x93,%xmm0,%xmm8
3789
3790# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
3791# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
3792# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
3793pshufd $0x93,%xmm1,%xmm9
3794
3795# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
3796# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
3797# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
3798pshufd $0x93,%xmm4,%xmm10
3799
3800# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
3801# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
3802# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
3803pshufd $0x93,%xmm6,%xmm11
3804
3805# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
3806# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
3807# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
3808pshufd $0x93,%xmm3,%xmm12
3809
3810# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
3811# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
3812# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
3813pshufd $0x93,%xmm7,%xmm13
3814
3815# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
3816# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
3817# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
3818pshufd $0x93,%xmm2,%xmm14
3819
3820# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
3821# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
3822# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
3823pshufd $0x93,%xmm5,%xmm15
3824
3825# qhasm: xmm0 ^= xmm8
3826# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
3827# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
3828pxor %xmm8,%xmm0
3829
3830# qhasm: xmm1 ^= xmm9
3831# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
3832# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
3833pxor %xmm9,%xmm1
3834
3835# qhasm: xmm4 ^= xmm10
3836# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
3837# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
3838pxor %xmm10,%xmm4
3839
3840# qhasm: xmm6 ^= xmm11
3841# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
3842# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
3843pxor %xmm11,%xmm6
3844
3845# qhasm: xmm3 ^= xmm12
3846# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
3847# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
3848pxor %xmm12,%xmm3
3849
3850# qhasm: xmm7 ^= xmm13
3851# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
3852# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
3853pxor %xmm13,%xmm7
3854
3855# qhasm: xmm2 ^= xmm14
3856# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
3857# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
3858pxor %xmm14,%xmm2
3859
3860# qhasm: xmm5 ^= xmm15
3861# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
3862# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
3863pxor %xmm15,%xmm5
3864
3865# qhasm: xmm8 ^= xmm5
3866# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
3867# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
3868pxor %xmm5,%xmm8
3869
3870# qhasm: xmm9 ^= xmm0
3871# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
3872# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
3873pxor %xmm0,%xmm9
3874
3875# qhasm: xmm10 ^= xmm1
3876# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
3877# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
3878pxor %xmm1,%xmm10
3879
3880# qhasm: xmm9 ^= xmm5
3881# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
3882# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
3883pxor %xmm5,%xmm9
3884
3885# qhasm: xmm11 ^= xmm4
3886# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
3887# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
3888pxor %xmm4,%xmm11
3889
3890# qhasm: xmm12 ^= xmm6
3891# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
3892# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
3893pxor %xmm6,%xmm12
3894
3895# qhasm: xmm13 ^= xmm3
3896# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
3897# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
3898pxor %xmm3,%xmm13
3899
3900# qhasm: xmm11 ^= xmm5
3901# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
3902# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
3903pxor %xmm5,%xmm11
3904
3905# qhasm: xmm14 ^= xmm7
3906# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
3907# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
3908pxor %xmm7,%xmm14
3909
3910# qhasm: xmm15 ^= xmm2
3911# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
3912# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
3913pxor %xmm2,%xmm15
3914
3915# qhasm: xmm12 ^= xmm5
3916# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
3917# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
3918pxor %xmm5,%xmm12
3919
3920# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
3921# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
3922# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
3923pshufd $0x4E,%xmm0,%xmm0
3924
3925# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
3926# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
3927# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
3928pshufd $0x4E,%xmm1,%xmm1
3929
3930# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
3931# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
3932# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
3933pshufd $0x4E,%xmm4,%xmm4
3934
3935# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
3936# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
3937# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
3938pshufd $0x4E,%xmm6,%xmm6
3939
3940# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
3941# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
3942# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
3943pshufd $0x4E,%xmm3,%xmm3
3944
3945# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
3946# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
3947# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
3948pshufd $0x4E,%xmm7,%xmm7
3949
3950# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
3951# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
3952# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
3953pshufd $0x4E,%xmm2,%xmm2
3954
3955# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
3956# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
3957# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
3958pshufd $0x4E,%xmm5,%xmm5
3959
3960# qhasm: xmm8 ^= xmm0
3961# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
3962# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
3963pxor %xmm0,%xmm8
3964
3965# qhasm: xmm9 ^= xmm1
3966# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
3967# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
3968pxor %xmm1,%xmm9
3969
3970# qhasm: xmm10 ^= xmm4
3971# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
3972# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
3973pxor %xmm4,%xmm10
3974
3975# qhasm: xmm11 ^= xmm6
3976# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
3977# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
3978pxor %xmm6,%xmm11
3979
3980# qhasm: xmm12 ^= xmm3
3981# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
3982# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
3983pxor %xmm3,%xmm12
3984
3985# qhasm: xmm13 ^= xmm7
3986# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
3987# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
3988pxor %xmm7,%xmm13
3989
3990# qhasm: xmm14 ^= xmm2
3991# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
3992# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
3993pxor %xmm2,%xmm14
3994
3995# qhasm: xmm15 ^= xmm5
3996# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
3997# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
3998pxor %xmm5,%xmm15
3999
4000# qhasm: xmm8 ^= *(int128 *)(c + 384)
4001# asm 1: pxor 384(<c=int64#4),<xmm8=int6464#9
4002# asm 2: pxor 384(<c=%rcx),<xmm8=%xmm8
4003pxor 384(%rcx),%xmm8
4004
4005# qhasm: shuffle bytes of xmm8 by SR
4006# asm 1: pshufb SR,<xmm8=int6464#9
4007# asm 2: pshufb SR,<xmm8=%xmm8
4008pshufb SR,%xmm8
4009
4010# qhasm: xmm9 ^= *(int128 *)(c + 400)
4011# asm 1: pxor 400(<c=int64#4),<xmm9=int6464#10
4012# asm 2: pxor 400(<c=%rcx),<xmm9=%xmm9
4013pxor 400(%rcx),%xmm9
4014
4015# qhasm: shuffle bytes of xmm9 by SR
4016# asm 1: pshufb SR,<xmm9=int6464#10
4017# asm 2: pshufb SR,<xmm9=%xmm9
4018pshufb SR,%xmm9
4019
4020# qhasm: xmm10 ^= *(int128 *)(c + 416)
4021# asm 1: pxor 416(<c=int64#4),<xmm10=int6464#11
4022# asm 2: pxor 416(<c=%rcx),<xmm10=%xmm10
4023pxor 416(%rcx),%xmm10
4024
4025# qhasm: shuffle bytes of xmm10 by SR
4026# asm 1: pshufb SR,<xmm10=int6464#11
4027# asm 2: pshufb SR,<xmm10=%xmm10
4028pshufb SR,%xmm10
4029
4030# qhasm: xmm11 ^= *(int128 *)(c + 432)
4031# asm 1: pxor 432(<c=int64#4),<xmm11=int6464#12
4032# asm 2: pxor 432(<c=%rcx),<xmm11=%xmm11
4033pxor 432(%rcx),%xmm11
4034
4035# qhasm: shuffle bytes of xmm11 by SR
4036# asm 1: pshufb SR,<xmm11=int6464#12
4037# asm 2: pshufb SR,<xmm11=%xmm11
4038pshufb SR,%xmm11
4039
4040# qhasm: xmm12 ^= *(int128 *)(c + 448)
4041# asm 1: pxor 448(<c=int64#4),<xmm12=int6464#13
4042# asm 2: pxor 448(<c=%rcx),<xmm12=%xmm12
4043pxor 448(%rcx),%xmm12
4044
4045# qhasm: shuffle bytes of xmm12 by SR
4046# asm 1: pshufb SR,<xmm12=int6464#13
4047# asm 2: pshufb SR,<xmm12=%xmm12
4048pshufb SR,%xmm12
4049
4050# qhasm: xmm13 ^= *(int128 *)(c + 464)
4051# asm 1: pxor 464(<c=int64#4),<xmm13=int6464#14
4052# asm 2: pxor 464(<c=%rcx),<xmm13=%xmm13
4053pxor 464(%rcx),%xmm13
4054
4055# qhasm: shuffle bytes of xmm13 by SR
4056# asm 1: pshufb SR,<xmm13=int6464#14
4057# asm 2: pshufb SR,<xmm13=%xmm13
4058pshufb SR,%xmm13
4059
4060# qhasm: xmm14 ^= *(int128 *)(c + 480)
4061# asm 1: pxor 480(<c=int64#4),<xmm14=int6464#15
4062# asm 2: pxor 480(<c=%rcx),<xmm14=%xmm14
4063pxor 480(%rcx),%xmm14
4064
4065# qhasm: shuffle bytes of xmm14 by SR
4066# asm 1: pshufb SR,<xmm14=int6464#15
4067# asm 2: pshufb SR,<xmm14=%xmm14
4068pshufb SR,%xmm14
4069
4070# qhasm: xmm15 ^= *(int128 *)(c + 496)
4071# asm 1: pxor 496(<c=int64#4),<xmm15=int6464#16
4072# asm 2: pxor 496(<c=%rcx),<xmm15=%xmm15
4073pxor 496(%rcx),%xmm15
4074
4075# qhasm: shuffle bytes of xmm15 by SR
4076# asm 1: pshufb SR,<xmm15=int6464#16
4077# asm 2: pshufb SR,<xmm15=%xmm15
4078pshufb SR,%xmm15
4079
4080# qhasm: xmm13 ^= xmm14
4081# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
4082# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
4083pxor %xmm14,%xmm13
4084
4085# qhasm: xmm10 ^= xmm9
4086# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
4087# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
4088pxor %xmm9,%xmm10
4089
4090# qhasm: xmm13 ^= xmm8
4091# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
4092# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
4093pxor %xmm8,%xmm13
4094
4095# qhasm: xmm14 ^= xmm10
4096# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
4097# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
4098pxor %xmm10,%xmm14
4099
4100# qhasm: xmm11 ^= xmm8
4101# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
4102# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
4103pxor %xmm8,%xmm11
4104
4105# qhasm: xmm14 ^= xmm11
4106# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
4107# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
4108pxor %xmm11,%xmm14
4109
4110# qhasm: xmm11 ^= xmm15
4111# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
4112# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
4113pxor %xmm15,%xmm11
4114
4115# qhasm: xmm11 ^= xmm12
4116# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
4117# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
4118pxor %xmm12,%xmm11
4119
4120# qhasm: xmm15 ^= xmm13
4121# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
4122# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
4123pxor %xmm13,%xmm15
4124
4125# qhasm: xmm11 ^= xmm9
4126# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
4127# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
4128pxor %xmm9,%xmm11
4129
4130# qhasm: xmm12 ^= xmm13
4131# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
4132# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
4133pxor %xmm13,%xmm12
4134
4135# qhasm: xmm10 ^= xmm15
4136# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
4137# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
4138pxor %xmm15,%xmm10
4139
4140# qhasm: xmm9 ^= xmm13
4141# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
4142# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
4143pxor %xmm13,%xmm9
4144
4145# qhasm: xmm3 = xmm15
4146# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
4147# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
4148movdqa %xmm15,%xmm0
4149
4150# qhasm: xmm2 = xmm9
4151# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
4152# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
4153movdqa %xmm9,%xmm1
4154
4155# qhasm: xmm1 = xmm13
4156# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
4157# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
4158movdqa %xmm13,%xmm2
4159
4160# qhasm: xmm5 = xmm10
4161# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
4162# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
4163movdqa %xmm10,%xmm3
4164
4165# qhasm: xmm4 = xmm14
4166# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
4167# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
4168movdqa %xmm14,%xmm4
4169
4170# qhasm: xmm3 ^= xmm12
4171# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
4172# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
4173pxor %xmm12,%xmm0
4174
4175# qhasm: xmm2 ^= xmm10
4176# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
4177# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
4178pxor %xmm10,%xmm1
4179
4180# qhasm: xmm1 ^= xmm11
4181# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
4182# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
4183pxor %xmm11,%xmm2
4184
4185# qhasm: xmm5 ^= xmm12
4186# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
4187# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
4188pxor %xmm12,%xmm3
4189
4190# qhasm: xmm4 ^= xmm8
4191# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
4192# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
4193pxor %xmm8,%xmm4
4194
4195# qhasm: xmm6 = xmm3
4196# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
4197# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
4198movdqa %xmm0,%xmm5
4199
4200# qhasm: xmm0 = xmm2
4201# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
4202# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
4203movdqa %xmm1,%xmm6
4204
4205# qhasm: xmm7 = xmm3
4206# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
4207# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
4208movdqa %xmm0,%xmm7
4209
4210# qhasm: xmm2 |= xmm1
4211# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
4212# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
4213por %xmm2,%xmm1
4214
4215# qhasm: xmm3 |= xmm4
4216# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
4217# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
4218por %xmm4,%xmm0
4219
4220# qhasm: xmm7 ^= xmm0
4221# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
4222# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
4223pxor %xmm6,%xmm7
4224
4225# qhasm: xmm6 &= xmm4
4226# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
4227# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
4228pand %xmm4,%xmm5
4229
4230# qhasm: xmm0 &= xmm1
4231# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
4232# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
4233pand %xmm2,%xmm6
4234
4235# qhasm: xmm4 ^= xmm1
4236# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
4237# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
4238pxor %xmm2,%xmm4
4239
4240# qhasm: xmm7 &= xmm4
4241# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
4242# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
4243pand %xmm4,%xmm7
4244
4245# qhasm: xmm4 = xmm11
4246# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
4247# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
4248movdqa %xmm11,%xmm2
4249
4250# qhasm: xmm4 ^= xmm8
4251# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
4252# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
4253pxor %xmm8,%xmm2
4254
4255# qhasm: xmm5 &= xmm4
4256# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
4257# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
4258pand %xmm2,%xmm3
4259
4260# qhasm: xmm3 ^= xmm5
4261# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
4262# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
4263pxor %xmm3,%xmm0
4264
4265# qhasm: xmm2 ^= xmm5
4266# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
4267# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
4268pxor %xmm3,%xmm1
4269
4270# qhasm: xmm5 = xmm15
4271# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
4272# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
4273movdqa %xmm15,%xmm2
4274
4275# qhasm: xmm5 ^= xmm9
4276# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
4277# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
4278pxor %xmm9,%xmm2
4279
4280# qhasm: xmm4 = xmm13
4281# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
4282# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
4283movdqa %xmm13,%xmm3
4284
4285# qhasm: xmm1 = xmm5
4286# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
4287# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
4288movdqa %xmm2,%xmm4
4289
4290# qhasm: xmm4 ^= xmm14
4291# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
4292# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
4293pxor %xmm14,%xmm3
4294
4295# qhasm: xmm1 |= xmm4
4296# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
4297# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
4298por %xmm3,%xmm4
4299
4300# qhasm: xmm5 &= xmm4
4301# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
4302# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
4303pand %xmm3,%xmm2
4304
4305# qhasm: xmm0 ^= xmm5
4306# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
4307# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
4308pxor %xmm2,%xmm6
4309
4310# qhasm: xmm3 ^= xmm7
4311# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
4312# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
4313pxor %xmm7,%xmm0
4314
4315# qhasm: xmm2 ^= xmm6
4316# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
4317# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
4318pxor %xmm5,%xmm1
4319
4320# qhasm: xmm1 ^= xmm7
4321# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
4322# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
4323pxor %xmm7,%xmm4
4324
4325# qhasm: xmm0 ^= xmm6
4326# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
4327# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
4328pxor %xmm5,%xmm6
4329
4330# qhasm: xmm1 ^= xmm6
4331# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
4332# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
4333pxor %xmm5,%xmm4
4334
4335# qhasm: xmm4 = xmm10
4336# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
4337# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
4338movdqa %xmm10,%xmm2
4339
4340# qhasm: xmm5 = xmm12
4341# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
4342# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
4343movdqa %xmm12,%xmm3
4344
4345# qhasm: xmm6 = xmm9
4346# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
4347# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
4348movdqa %xmm9,%xmm5
4349
4350# qhasm: xmm7 = xmm15
4351# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
4352# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
4353movdqa %xmm15,%xmm7
4354
4355# qhasm: xmm4 &= xmm11
4356# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
4357# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
4358pand %xmm11,%xmm2
4359
4360# qhasm: xmm5 &= xmm8
4361# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
4362# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
4363pand %xmm8,%xmm3
4364
4365# qhasm: xmm6 &= xmm13
4366# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
4367# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
4368pand %xmm13,%xmm5
4369
4370# qhasm: xmm7 |= xmm14
4371# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
4372# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
4373por %xmm14,%xmm7
4374
4375# qhasm: xmm3 ^= xmm4
4376# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
4377# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
4378pxor %xmm2,%xmm0
4379
4380# qhasm: xmm2 ^= xmm5
4381# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
4382# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
4383pxor %xmm3,%xmm1
4384
4385# qhasm: xmm1 ^= xmm6
4386# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
4387# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
4388pxor %xmm5,%xmm4
4389
4390# qhasm: xmm0 ^= xmm7
4391# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
4392# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
4393pxor %xmm7,%xmm6
4394
4395# qhasm: xmm4 = xmm3
4396# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
4397# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
4398movdqa %xmm0,%xmm2
4399
4400# qhasm: xmm4 ^= xmm2
4401# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
4402# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
4403pxor %xmm1,%xmm2
4404
4405# qhasm: xmm3 &= xmm1
4406# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
4407# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
4408pand %xmm4,%xmm0
4409
4410# qhasm: xmm6 = xmm0
4411# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
4412# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
4413movdqa %xmm6,%xmm3
4414
4415# qhasm: xmm6 ^= xmm3
4416# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
4417# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
4418pxor %xmm0,%xmm3
4419
4420# qhasm: xmm7 = xmm4
4421# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
4422# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
4423movdqa %xmm2,%xmm5
4424
4425# qhasm: xmm7 &= xmm6
4426# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
4427# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
4428pand %xmm3,%xmm5
4429
4430# qhasm: xmm7 ^= xmm2
4431# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
4432# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
4433pxor %xmm1,%xmm5
4434
4435# qhasm: xmm5 = xmm1
4436# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
4437# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
4438movdqa %xmm4,%xmm7
4439
4440# qhasm: xmm5 ^= xmm0
4441# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
4442# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
4443pxor %xmm6,%xmm7
4444
4445# qhasm: xmm3 ^= xmm2
4446# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
4447# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
4448pxor %xmm1,%xmm0
4449
4450# qhasm: xmm5 &= xmm3
4451# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
4452# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
4453pand %xmm0,%xmm7
4454
4455# qhasm: xmm5 ^= xmm0
4456# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
4457# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
4458pxor %xmm6,%xmm7
4459
4460# qhasm: xmm1 ^= xmm5
4461# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
4462# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
4463pxor %xmm7,%xmm4
4464
4465# qhasm: xmm2 = xmm6
4466# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
4467# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
4468movdqa %xmm3,%xmm0
4469
4470# qhasm: xmm2 ^= xmm5
4471# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
4472# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
4473pxor %xmm7,%xmm0
4474
4475# qhasm: xmm2 &= xmm0
4476# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
4477# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
4478pand %xmm6,%xmm0
4479
4480# qhasm: xmm1 ^= xmm2
4481# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
4482# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
4483pxor %xmm0,%xmm4
4484
4485# qhasm: xmm6 ^= xmm2
4486# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
4487# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
4488pxor %xmm0,%xmm3
4489
4490# qhasm: xmm6 &= xmm7
4491# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
4492# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
4493pand %xmm5,%xmm3
4494
4495# qhasm: xmm6 ^= xmm4
4496# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
4497# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
4498pxor %xmm2,%xmm3
4499
4500# qhasm: xmm4 = xmm14
4501# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
4502# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
4503movdqa %xmm14,%xmm0
4504
4505# qhasm: xmm0 = xmm13
4506# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
4507# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
4508movdqa %xmm13,%xmm1
4509
4510# qhasm: xmm2 = xmm7
4511# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
4512# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
4513movdqa %xmm5,%xmm2
4514
4515# qhasm: xmm2 ^= xmm6
4516# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
4517# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
4518pxor %xmm3,%xmm2
4519
4520# qhasm: xmm2 &= xmm14
4521# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
4522# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
4523pand %xmm14,%xmm2
4524
4525# qhasm: xmm14 ^= xmm13
4526# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
4527# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
4528pxor %xmm13,%xmm14
4529
4530# qhasm: xmm14 &= xmm6
4531# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
4532# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
4533pand %xmm3,%xmm14
4534
4535# qhasm: xmm13 &= xmm7
4536# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
4537# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
4538pand %xmm5,%xmm13
4539
4540# qhasm: xmm14 ^= xmm13
4541# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
4542# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
4543pxor %xmm13,%xmm14
4544
4545# qhasm: xmm13 ^= xmm2
4546# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
4547# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
4548pxor %xmm2,%xmm13
4549
4550# qhasm: xmm4 ^= xmm8
4551# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
4552# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
4553pxor %xmm8,%xmm0
4554
4555# qhasm: xmm0 ^= xmm11
4556# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
4557# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
4558pxor %xmm11,%xmm1
4559
4560# qhasm: xmm7 ^= xmm5
4561# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
4562# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
4563pxor %xmm7,%xmm5
4564
4565# qhasm: xmm6 ^= xmm1
4566# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
4567# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
4568pxor %xmm4,%xmm3
4569
4570# qhasm: xmm3 = xmm7
4571# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4572# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4573movdqa %xmm5,%xmm2
4574
4575# qhasm: xmm3 ^= xmm6
4576# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
4577# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
4578pxor %xmm3,%xmm2
4579
4580# qhasm: xmm3 &= xmm4
4581# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
4582# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
4583pand %xmm0,%xmm2
4584
4585# qhasm: xmm4 ^= xmm0
4586# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
4587# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
4588pxor %xmm1,%xmm0
4589
4590# qhasm: xmm4 &= xmm6
4591# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
4592# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
4593pand %xmm3,%xmm0
4594
4595# qhasm: xmm0 &= xmm7
4596# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
4597# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
4598pand %xmm5,%xmm1
4599
4600# qhasm: xmm0 ^= xmm4
4601# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
4602# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
4603pxor %xmm0,%xmm1
4604
4605# qhasm: xmm4 ^= xmm3
4606# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
4607# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
4608pxor %xmm2,%xmm0
4609
4610# qhasm: xmm2 = xmm5
4611# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
4612# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
4613movdqa %xmm7,%xmm2
4614
4615# qhasm: xmm2 ^= xmm1
4616# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
4617# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
4618pxor %xmm4,%xmm2
4619
4620# qhasm: xmm2 &= xmm8
4621# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
4622# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
4623pand %xmm8,%xmm2
4624
4625# qhasm: xmm8 ^= xmm11
4626# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
4627# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
4628pxor %xmm11,%xmm8
4629
4630# qhasm: xmm8 &= xmm1
4631# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
4632# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
4633pand %xmm4,%xmm8
4634
4635# qhasm: xmm11 &= xmm5
4636# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
4637# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
4638pand %xmm7,%xmm11
4639
4640# qhasm: xmm8 ^= xmm11
4641# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
4642# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
4643pxor %xmm11,%xmm8
4644
4645# qhasm: xmm11 ^= xmm2
4646# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
4647# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
4648pxor %xmm2,%xmm11
4649
4650# qhasm: xmm14 ^= xmm4
4651# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
4652# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
4653pxor %xmm0,%xmm14
4654
4655# qhasm: xmm8 ^= xmm4
4656# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
4657# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
4658pxor %xmm0,%xmm8
4659
4660# qhasm: xmm13 ^= xmm0
4661# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
4662# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
4663pxor %xmm1,%xmm13
4664
4665# qhasm: xmm11 ^= xmm0
4666# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
4667# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
4668pxor %xmm1,%xmm11
4669
4670# qhasm: xmm4 = xmm15
4671# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
4672# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
4673movdqa %xmm15,%xmm0
4674
4675# qhasm: xmm0 = xmm9
4676# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
4677# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
4678movdqa %xmm9,%xmm1
4679
4680# qhasm: xmm4 ^= xmm12
4681# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
4682# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
4683pxor %xmm12,%xmm0
4684
4685# qhasm: xmm0 ^= xmm10
4686# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
4687# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
4688pxor %xmm10,%xmm1
4689
4690# qhasm: xmm3 = xmm7
4691# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4692# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4693movdqa %xmm5,%xmm2
4694
4695# qhasm: xmm3 ^= xmm6
4696# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
4697# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
4698pxor %xmm3,%xmm2
4699
4700# qhasm: xmm3 &= xmm4
4701# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
4702# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
4703pand %xmm0,%xmm2
4704
4705# qhasm: xmm4 ^= xmm0
4706# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
4707# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
4708pxor %xmm1,%xmm0
4709
4710# qhasm: xmm4 &= xmm6
4711# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
4712# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
4713pand %xmm3,%xmm0
4714
4715# qhasm: xmm0 &= xmm7
4716# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
4717# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
4718pand %xmm5,%xmm1
4719
4720# qhasm: xmm0 ^= xmm4
4721# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
4722# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
4723pxor %xmm0,%xmm1
4724
4725# qhasm: xmm4 ^= xmm3
4726# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
4727# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
4728pxor %xmm2,%xmm0
4729
4730# qhasm: xmm2 = xmm5
4731# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
4732# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
4733movdqa %xmm7,%xmm2
4734
4735# qhasm: xmm2 ^= xmm1
4736# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
4737# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
4738pxor %xmm4,%xmm2
4739
4740# qhasm: xmm2 &= xmm12
4741# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
4742# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
4743pand %xmm12,%xmm2
4744
4745# qhasm: xmm12 ^= xmm10
4746# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
4747# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
4748pxor %xmm10,%xmm12
4749
4750# qhasm: xmm12 &= xmm1
4751# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
4752# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
4753pand %xmm4,%xmm12
4754
4755# qhasm: xmm10 &= xmm5
4756# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
4757# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
4758pand %xmm7,%xmm10
4759
4760# qhasm: xmm12 ^= xmm10
4761# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
4762# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
4763pxor %xmm10,%xmm12
4764
4765# qhasm: xmm10 ^= xmm2
4766# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
4767# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
4768pxor %xmm2,%xmm10
4769
4770# qhasm: xmm7 ^= xmm5
4771# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
4772# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
4773pxor %xmm7,%xmm5
4774
4775# qhasm: xmm6 ^= xmm1
4776# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
4777# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
4778pxor %xmm4,%xmm3
4779
4780# qhasm: xmm3 = xmm7
4781# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4782# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4783movdqa %xmm5,%xmm2
4784
4785# qhasm: xmm3 ^= xmm6
4786# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
4787# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
4788pxor %xmm3,%xmm2
4789
4790# qhasm: xmm3 &= xmm15
4791# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
4792# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
4793pand %xmm15,%xmm2
4794
4795# qhasm: xmm15 ^= xmm9
4796# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
4797# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
4798pxor %xmm9,%xmm15
4799
4800# qhasm: xmm15 &= xmm6
4801# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
4802# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
4803pand %xmm3,%xmm15
4804
4805# qhasm: xmm9 &= xmm7
4806# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
4807# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
4808pand %xmm5,%xmm9
4809
4810# qhasm: xmm15 ^= xmm9
4811# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
4812# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
4813pxor %xmm9,%xmm15
4814
4815# qhasm: xmm9 ^= xmm3
4816# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
4817# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
4818pxor %xmm2,%xmm9
4819
4820# qhasm: xmm15 ^= xmm4
4821# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
4822# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
4823pxor %xmm0,%xmm15
4824
4825# qhasm: xmm12 ^= xmm4
4826# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
4827# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
4828pxor %xmm0,%xmm12
4829
4830# qhasm: xmm9 ^= xmm0
4831# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
4832# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
4833pxor %xmm1,%xmm9
4834
4835# qhasm: xmm10 ^= xmm0
4836# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
4837# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
4838pxor %xmm1,%xmm10
4839
4840# qhasm: xmm15 ^= xmm8
4841# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
4842# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
4843pxor %xmm8,%xmm15
4844
4845# qhasm: xmm9 ^= xmm14
4846# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
4847# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
4848pxor %xmm14,%xmm9
4849
4850# qhasm: xmm12 ^= xmm15
4851# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
4852# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
4853pxor %xmm15,%xmm12
4854
4855# qhasm: xmm14 ^= xmm8
4856# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
4857# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
4858pxor %xmm8,%xmm14
4859
4860# qhasm: xmm8 ^= xmm9
4861# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
4862# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
4863pxor %xmm9,%xmm8
4864
4865# qhasm: xmm9 ^= xmm13
4866# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
4867# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
4868pxor %xmm13,%xmm9
4869
4870# qhasm: xmm13 ^= xmm10
4871# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
4872# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
4873pxor %xmm10,%xmm13
4874
4875# qhasm: xmm12 ^= xmm13
4876# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
4877# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
4878pxor %xmm13,%xmm12
4879
4880# qhasm: xmm10 ^= xmm11
4881# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
4882# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
4883pxor %xmm11,%xmm10
4884
4885# qhasm: xmm11 ^= xmm13
4886# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
4887# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
4888pxor %xmm13,%xmm11
4889
4890# qhasm: xmm14 ^= xmm11
4891# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
4892# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
4893pxor %xmm11,%xmm14
4894
4895# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
4896# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
4897# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
4898pshufd $0x93,%xmm8,%xmm0
4899
4900# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
4901# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
4902# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
4903pshufd $0x93,%xmm9,%xmm1
4904
4905# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
4906# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
4907# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
4908pshufd $0x93,%xmm12,%xmm2
4909
4910# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
4911# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
4912# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
4913pshufd $0x93,%xmm14,%xmm3
4914
4915# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
4916# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
4917# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
4918pshufd $0x93,%xmm11,%xmm4
4919
4920# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
4921# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
4922# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
4923pshufd $0x93,%xmm15,%xmm5
4924
4925# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
4926# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
4927# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
4928pshufd $0x93,%xmm10,%xmm6
4929
4930# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
4931# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
4932# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
4933pshufd $0x93,%xmm13,%xmm7
4934
4935# qhasm: xmm8 ^= xmm0
4936# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
4937# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
4938pxor %xmm0,%xmm8
4939
4940# qhasm: xmm9 ^= xmm1
4941# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
4942# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
4943pxor %xmm1,%xmm9
4944
4945# qhasm: xmm12 ^= xmm2
4946# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
4947# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
4948pxor %xmm2,%xmm12
4949
4950# qhasm: xmm14 ^= xmm3
4951# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
4952# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
4953pxor %xmm3,%xmm14
4954
4955# qhasm: xmm11 ^= xmm4
4956# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
4957# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
4958pxor %xmm4,%xmm11
4959
4960# qhasm: xmm15 ^= xmm5
4961# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
4962# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
4963pxor %xmm5,%xmm15
4964
4965# qhasm: xmm10 ^= xmm6
4966# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
4967# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
4968pxor %xmm6,%xmm10
4969
4970# qhasm: xmm13 ^= xmm7
4971# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
4972# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
4973pxor %xmm7,%xmm13
4974
4975# qhasm: xmm0 ^= xmm13
4976# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
4977# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
4978pxor %xmm13,%xmm0
4979
4980# qhasm: xmm1 ^= xmm8
4981# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
4982# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
4983pxor %xmm8,%xmm1
4984
4985# qhasm: xmm2 ^= xmm9
4986# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
4987# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
4988pxor %xmm9,%xmm2
4989
4990# qhasm: xmm1 ^= xmm13
4991# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
4992# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
4993pxor %xmm13,%xmm1
4994
4995# qhasm: xmm3 ^= xmm12
4996# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
4997# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
4998pxor %xmm12,%xmm3
4999
5000# qhasm: xmm4 ^= xmm14
5001# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
5002# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
5003pxor %xmm14,%xmm4
5004
5005# qhasm: xmm5 ^= xmm11
5006# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
5007# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
5008pxor %xmm11,%xmm5
5009
5010# qhasm: xmm3 ^= xmm13
5011# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
5012# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
5013pxor %xmm13,%xmm3
5014
5015# qhasm: xmm6 ^= xmm15
5016# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
5017# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
5018pxor %xmm15,%xmm6
5019
5020# qhasm: xmm7 ^= xmm10
5021# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
5022# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
5023pxor %xmm10,%xmm7
5024
5025# qhasm: xmm4 ^= xmm13
5026# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
5027# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
5028pxor %xmm13,%xmm4
5029
5030# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
5031# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
5032# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
5033pshufd $0x4E,%xmm8,%xmm8
5034
5035# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
5036# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
5037# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
5038pshufd $0x4E,%xmm9,%xmm9
5039
5040# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
5041# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
5042# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
5043pshufd $0x4E,%xmm12,%xmm12
5044
5045# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
5046# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
5047# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
5048pshufd $0x4E,%xmm14,%xmm14
5049
5050# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
5051# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
5052# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
5053pshufd $0x4E,%xmm11,%xmm11
5054
5055# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
5056# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
5057# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
5058pshufd $0x4E,%xmm15,%xmm15
5059
5060# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
5061# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
5062# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
5063pshufd $0x4E,%xmm10,%xmm10
5064
5065# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
5066# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
5067# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
5068pshufd $0x4E,%xmm13,%xmm13
5069
5070# qhasm: xmm0 ^= xmm8
5071# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
5072# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
5073pxor %xmm8,%xmm0
5074
5075# qhasm: xmm1 ^= xmm9
5076# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
5077# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
5078pxor %xmm9,%xmm1
5079
5080# qhasm: xmm2 ^= xmm12
5081# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
5082# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
5083pxor %xmm12,%xmm2
5084
5085# qhasm: xmm3 ^= xmm14
5086# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
5087# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
5088pxor %xmm14,%xmm3
5089
5090# qhasm: xmm4 ^= xmm11
5091# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
5092# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
5093pxor %xmm11,%xmm4
5094
5095# qhasm: xmm5 ^= xmm15
5096# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
5097# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
5098pxor %xmm15,%xmm5
5099
5100# qhasm: xmm6 ^= xmm10
5101# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
5102# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
5103pxor %xmm10,%xmm6
5104
5105# qhasm: xmm7 ^= xmm13
5106# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
5107# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
5108pxor %xmm13,%xmm7
5109
5110# qhasm: xmm0 ^= *(int128 *)(c + 512)
5111# asm 1: pxor 512(<c=int64#4),<xmm0=int6464#1
5112# asm 2: pxor 512(<c=%rcx),<xmm0=%xmm0
5113pxor 512(%rcx),%xmm0
5114
5115# qhasm: shuffle bytes of xmm0 by SR
5116# asm 1: pshufb SR,<xmm0=int6464#1
5117# asm 2: pshufb SR,<xmm0=%xmm0
5118pshufb SR,%xmm0
5119
5120# qhasm: xmm1 ^= *(int128 *)(c + 528)
5121# asm 1: pxor 528(<c=int64#4),<xmm1=int6464#2
5122# asm 2: pxor 528(<c=%rcx),<xmm1=%xmm1
5123pxor 528(%rcx),%xmm1
5124
5125# qhasm: shuffle bytes of xmm1 by SR
5126# asm 1: pshufb SR,<xmm1=int6464#2
5127# asm 2: pshufb SR,<xmm1=%xmm1
5128pshufb SR,%xmm1
5129
5130# qhasm: xmm2 ^= *(int128 *)(c + 544)
5131# asm 1: pxor 544(<c=int64#4),<xmm2=int6464#3
5132# asm 2: pxor 544(<c=%rcx),<xmm2=%xmm2
5133pxor 544(%rcx),%xmm2
5134
5135# qhasm: shuffle bytes of xmm2 by SR
5136# asm 1: pshufb SR,<xmm2=int6464#3
5137# asm 2: pshufb SR,<xmm2=%xmm2
5138pshufb SR,%xmm2
5139
5140# qhasm: xmm3 ^= *(int128 *)(c + 560)
5141# asm 1: pxor 560(<c=int64#4),<xmm3=int6464#4
5142# asm 2: pxor 560(<c=%rcx),<xmm3=%xmm3
5143pxor 560(%rcx),%xmm3
5144
5145# qhasm: shuffle bytes of xmm3 by SR
5146# asm 1: pshufb SR,<xmm3=int6464#4
5147# asm 2: pshufb SR,<xmm3=%xmm3
5148pshufb SR,%xmm3
5149
5150# qhasm: xmm4 ^= *(int128 *)(c + 576)
5151# asm 1: pxor 576(<c=int64#4),<xmm4=int6464#5
5152# asm 2: pxor 576(<c=%rcx),<xmm4=%xmm4
5153pxor 576(%rcx),%xmm4
5154
5155# qhasm: shuffle bytes of xmm4 by SR
5156# asm 1: pshufb SR,<xmm4=int6464#5
5157# asm 2: pshufb SR,<xmm4=%xmm4
5158pshufb SR,%xmm4
5159
5160# qhasm: xmm5 ^= *(int128 *)(c + 592)
5161# asm 1: pxor 592(<c=int64#4),<xmm5=int6464#6
5162# asm 2: pxor 592(<c=%rcx),<xmm5=%xmm5
5163pxor 592(%rcx),%xmm5
5164
5165# qhasm: shuffle bytes of xmm5 by SR
5166# asm 1: pshufb SR,<xmm5=int6464#6
5167# asm 2: pshufb SR,<xmm5=%xmm5
5168pshufb SR,%xmm5
5169
5170# qhasm: xmm6 ^= *(int128 *)(c + 608)
5171# asm 1: pxor 608(<c=int64#4),<xmm6=int6464#7
5172# asm 2: pxor 608(<c=%rcx),<xmm6=%xmm6
5173pxor 608(%rcx),%xmm6
5174
5175# qhasm: shuffle bytes of xmm6 by SR
5176# asm 1: pshufb SR,<xmm6=int6464#7
5177# asm 2: pshufb SR,<xmm6=%xmm6
5178pshufb SR,%xmm6
5179
5180# qhasm: xmm7 ^= *(int128 *)(c + 624)
5181# asm 1: pxor 624(<c=int64#4),<xmm7=int6464#8
5182# asm 2: pxor 624(<c=%rcx),<xmm7=%xmm7
5183pxor 624(%rcx),%xmm7
5184
5185# qhasm: shuffle bytes of xmm7 by SR
5186# asm 1: pshufb SR,<xmm7=int6464#8
5187# asm 2: pshufb SR,<xmm7=%xmm7
5188pshufb SR,%xmm7
5189
5190# qhasm: xmm5 ^= xmm6
5191# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
5192# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
5193pxor %xmm6,%xmm5
5194
5195# qhasm: xmm2 ^= xmm1
5196# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
5197# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
5198pxor %xmm1,%xmm2
5199
5200# qhasm: xmm5 ^= xmm0
5201# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
5202# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
5203pxor %xmm0,%xmm5
5204
5205# qhasm: xmm6 ^= xmm2
5206# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
5207# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
5208pxor %xmm2,%xmm6
5209
5210# qhasm: xmm3 ^= xmm0
5211# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
5212# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
5213pxor %xmm0,%xmm3
5214
5215# qhasm: xmm6 ^= xmm3
5216# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
5217# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
5218pxor %xmm3,%xmm6
5219
5220# qhasm: xmm3 ^= xmm7
5221# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
5222# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
5223pxor %xmm7,%xmm3
5224
5225# qhasm: xmm3 ^= xmm4
5226# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
5227# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
5228pxor %xmm4,%xmm3
5229
5230# qhasm: xmm7 ^= xmm5
5231# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
5232# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
5233pxor %xmm5,%xmm7
5234
5235# qhasm: xmm3 ^= xmm1
5236# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
5237# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
5238pxor %xmm1,%xmm3
5239
5240# qhasm: xmm4 ^= xmm5
5241# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
5242# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
5243pxor %xmm5,%xmm4
5244
5245# qhasm: xmm2 ^= xmm7
5246# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
5247# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
5248pxor %xmm7,%xmm2
5249
5250# qhasm: xmm1 ^= xmm5
5251# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
5252# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
5253pxor %xmm5,%xmm1
5254
5255# qhasm: xmm11 = xmm7
5256# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
5257# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
5258movdqa %xmm7,%xmm8
5259
5260# qhasm: xmm10 = xmm1
5261# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
5262# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
5263movdqa %xmm1,%xmm9
5264
5265# qhasm: xmm9 = xmm5
5266# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
5267# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
5268movdqa %xmm5,%xmm10
5269
5270# qhasm: xmm13 = xmm2
5271# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
5272# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
5273movdqa %xmm2,%xmm11
5274
5275# qhasm: xmm12 = xmm6
5276# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
5277# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
5278movdqa %xmm6,%xmm12
5279
5280# qhasm: xmm11 ^= xmm4
5281# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
5282# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
5283pxor %xmm4,%xmm8
5284
5285# qhasm: xmm10 ^= xmm2
5286# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
5287# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
5288pxor %xmm2,%xmm9
5289
5290# qhasm: xmm9 ^= xmm3
5291# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
5292# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
5293pxor %xmm3,%xmm10
5294
5295# qhasm: xmm13 ^= xmm4
5296# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
5297# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
5298pxor %xmm4,%xmm11
5299
5300# qhasm: xmm12 ^= xmm0
5301# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
5302# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
5303pxor %xmm0,%xmm12
5304
5305# qhasm: xmm14 = xmm11
5306# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
5307# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
5308movdqa %xmm8,%xmm13
5309
5310# qhasm: xmm8 = xmm10
5311# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
5312# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
5313movdqa %xmm9,%xmm14
5314
5315# qhasm: xmm15 = xmm11
5316# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
5317# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
5318movdqa %xmm8,%xmm15
5319
5320# qhasm: xmm10 |= xmm9
5321# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
5322# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
5323por %xmm10,%xmm9
5324
5325# qhasm: xmm11 |= xmm12
5326# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
5327# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
5328por %xmm12,%xmm8
5329
5330# qhasm: xmm15 ^= xmm8
5331# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
5332# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
5333pxor %xmm14,%xmm15
5334
5335# qhasm: xmm14 &= xmm12
5336# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
5337# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
5338pand %xmm12,%xmm13
5339
5340# qhasm: xmm8 &= xmm9
5341# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
5342# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
5343pand %xmm10,%xmm14
5344
5345# qhasm: xmm12 ^= xmm9
5346# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
5347# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
5348pxor %xmm10,%xmm12
5349
5350# qhasm: xmm15 &= xmm12
5351# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
5352# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
5353pand %xmm12,%xmm15
5354
5355# qhasm: xmm12 = xmm3
5356# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
5357# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
5358movdqa %xmm3,%xmm10
5359
5360# qhasm: xmm12 ^= xmm0
5361# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
5362# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
5363pxor %xmm0,%xmm10
5364
5365# qhasm: xmm13 &= xmm12
5366# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
5367# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
5368pand %xmm10,%xmm11
5369
5370# qhasm: xmm11 ^= xmm13
5371# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
5372# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
5373pxor %xmm11,%xmm8
5374
5375# qhasm: xmm10 ^= xmm13
5376# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
5377# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
5378pxor %xmm11,%xmm9
5379
5380# qhasm: xmm13 = xmm7
5381# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
5382# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
5383movdqa %xmm7,%xmm10
5384
5385# qhasm: xmm13 ^= xmm1
5386# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
5387# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
5388pxor %xmm1,%xmm10
5389
5390# qhasm: xmm12 = xmm5
5391# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
5392# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
5393movdqa %xmm5,%xmm11
5394
5395# qhasm: xmm9 = xmm13
5396# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
5397# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
5398movdqa %xmm10,%xmm12
5399
5400# qhasm: xmm12 ^= xmm6
5401# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
5402# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
5403pxor %xmm6,%xmm11
5404
5405# qhasm: xmm9 |= xmm12
5406# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
5407# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
5408por %xmm11,%xmm12
5409
5410# qhasm: xmm13 &= xmm12
5411# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
5412# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
5413pand %xmm11,%xmm10
5414
5415# qhasm: xmm8 ^= xmm13
5416# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
5417# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
5418pxor %xmm10,%xmm14
5419
5420# qhasm: xmm11 ^= xmm15
5421# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
5422# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
5423pxor %xmm15,%xmm8
5424
5425# qhasm: xmm10 ^= xmm14
5426# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
5427# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
5428pxor %xmm13,%xmm9
5429
5430# qhasm: xmm9 ^= xmm15
5431# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
5432# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
5433pxor %xmm15,%xmm12
5434
5435# qhasm: xmm8 ^= xmm14
5436# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
5437# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
5438pxor %xmm13,%xmm14
5439
5440# qhasm: xmm9 ^= xmm14
5441# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
5442# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
5443pxor %xmm13,%xmm12
5444
5445# qhasm: xmm12 = xmm2
5446# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
5447# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
5448movdqa %xmm2,%xmm10
5449
5450# qhasm: xmm13 = xmm4
5451# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
5452# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
5453movdqa %xmm4,%xmm11
5454
5455# qhasm: xmm14 = xmm1
5456# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
5457# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
5458movdqa %xmm1,%xmm13
5459
5460# qhasm: xmm15 = xmm7
5461# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
5462# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
5463movdqa %xmm7,%xmm15
5464
5465# qhasm: xmm12 &= xmm3
5466# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
5467# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
5468pand %xmm3,%xmm10
5469
5470# qhasm: xmm13 &= xmm0
5471# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
5472# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
5473pand %xmm0,%xmm11
5474
5475# qhasm: xmm14 &= xmm5
5476# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
5477# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
5478pand %xmm5,%xmm13
5479
5480# qhasm: xmm15 |= xmm6
5481# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
5482# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
5483por %xmm6,%xmm15
5484
5485# qhasm: xmm11 ^= xmm12
5486# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
5487# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
5488pxor %xmm10,%xmm8
5489
5490# qhasm: xmm10 ^= xmm13
5491# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
5492# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
5493pxor %xmm11,%xmm9
5494
5495# qhasm: xmm9 ^= xmm14
5496# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
5497# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
5498pxor %xmm13,%xmm12
5499
5500# qhasm: xmm8 ^= xmm15
5501# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
5502# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
5503pxor %xmm15,%xmm14
5504
5505# qhasm: xmm12 = xmm11
5506# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
5507# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
5508movdqa %xmm8,%xmm10
5509
5510# qhasm: xmm12 ^= xmm10
5511# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
5512# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
5513pxor %xmm9,%xmm10
5514
5515# qhasm: xmm11 &= xmm9
5516# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
5517# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
5518pand %xmm12,%xmm8
5519
5520# qhasm: xmm14 = xmm8
5521# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
5522# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
5523movdqa %xmm14,%xmm11
5524
5525# qhasm: xmm14 ^= xmm11
5526# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
5527# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
5528pxor %xmm8,%xmm11
5529
5530# qhasm: xmm15 = xmm12
5531# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
5532# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
5533movdqa %xmm10,%xmm13
5534
5535# qhasm: xmm15 &= xmm14
5536# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
5537# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
5538pand %xmm11,%xmm13
5539
5540# qhasm: xmm15 ^= xmm10
5541# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
5542# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
5543pxor %xmm9,%xmm13
5544
5545# qhasm: xmm13 = xmm9
5546# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
5547# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
5548movdqa %xmm12,%xmm15
5549
5550# qhasm: xmm13 ^= xmm8
5551# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
5552# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
5553pxor %xmm14,%xmm15
5554
5555# qhasm: xmm11 ^= xmm10
5556# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
5557# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
5558pxor %xmm9,%xmm8
5559
5560# qhasm: xmm13 &= xmm11
5561# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
5562# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
5563pand %xmm8,%xmm15
5564
5565# qhasm: xmm13 ^= xmm8
5566# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
5567# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
5568pxor %xmm14,%xmm15
5569
5570# qhasm: xmm9 ^= xmm13
5571# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
5572# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
5573pxor %xmm15,%xmm12
5574
5575# qhasm: xmm10 = xmm14
5576# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
5577# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
5578movdqa %xmm11,%xmm8
5579
5580# qhasm: xmm10 ^= xmm13
5581# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
5582# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
5583pxor %xmm15,%xmm8
5584
5585# qhasm: xmm10 &= xmm8
5586# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
5587# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
5588pand %xmm14,%xmm8
5589
5590# qhasm: xmm9 ^= xmm10
5591# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
5592# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
5593pxor %xmm8,%xmm12
5594
5595# qhasm: xmm14 ^= xmm10
5596# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
5597# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
5598pxor %xmm8,%xmm11
5599
5600# qhasm: xmm14 &= xmm15
5601# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
5602# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
5603pand %xmm13,%xmm11
5604
5605# qhasm: xmm14 ^= xmm12
5606# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
5607# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
5608pxor %xmm10,%xmm11
5609
5610# qhasm: xmm12 = xmm6
5611# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
5612# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
5613movdqa %xmm6,%xmm8
5614
5615# qhasm: xmm8 = xmm5
5616# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
5617# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
5618movdqa %xmm5,%xmm9
5619
5620# qhasm: xmm10 = xmm15
5621# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
5622# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
5623movdqa %xmm13,%xmm10
5624
5625# qhasm: xmm10 ^= xmm14
5626# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
5627# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
5628pxor %xmm11,%xmm10
5629
5630# qhasm: xmm10 &= xmm6
5631# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
5632# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
5633pand %xmm6,%xmm10
5634
5635# qhasm: xmm6 ^= xmm5
5636# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
5637# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
5638pxor %xmm5,%xmm6
5639
5640# qhasm: xmm6 &= xmm14
5641# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
5642# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
5643pand %xmm11,%xmm6
5644
5645# qhasm: xmm5 &= xmm15
5646# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
5647# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
5648pand %xmm13,%xmm5
5649
5650# qhasm: xmm6 ^= xmm5
5651# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
5652# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
5653pxor %xmm5,%xmm6
5654
5655# qhasm: xmm5 ^= xmm10
5656# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
5657# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
5658pxor %xmm10,%xmm5
5659
5660# qhasm: xmm12 ^= xmm0
5661# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
5662# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
5663pxor %xmm0,%xmm8
5664
5665# qhasm: xmm8 ^= xmm3
5666# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
5667# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
5668pxor %xmm3,%xmm9
5669
5670# qhasm: xmm15 ^= xmm13
5671# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
5672# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
5673pxor %xmm15,%xmm13
5674
5675# qhasm: xmm14 ^= xmm9
5676# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
5677# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
5678pxor %xmm12,%xmm11
5679
5680# qhasm: xmm11 = xmm15
5681# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5682# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5683movdqa %xmm13,%xmm10
5684
5685# qhasm: xmm11 ^= xmm14
5686# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5687# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5688pxor %xmm11,%xmm10
5689
5690# qhasm: xmm11 &= xmm12
5691# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
5692# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
5693pand %xmm8,%xmm10
5694
5695# qhasm: xmm12 ^= xmm8
5696# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
5697# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
5698pxor %xmm9,%xmm8
5699
5700# qhasm: xmm12 &= xmm14
5701# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
5702# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
5703pand %xmm11,%xmm8
5704
5705# qhasm: xmm8 &= xmm15
5706# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
5707# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
5708pand %xmm13,%xmm9
5709
5710# qhasm: xmm8 ^= xmm12
5711# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
5712# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
5713pxor %xmm8,%xmm9
5714
5715# qhasm: xmm12 ^= xmm11
5716# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
5717# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
5718pxor %xmm10,%xmm8
5719
5720# qhasm: xmm10 = xmm13
5721# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5722# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5723movdqa %xmm15,%xmm10
5724
5725# qhasm: xmm10 ^= xmm9
5726# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
5727# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
5728pxor %xmm12,%xmm10
5729
5730# qhasm: xmm10 &= xmm0
5731# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
5732# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
5733pand %xmm0,%xmm10
5734
5735# qhasm: xmm0 ^= xmm3
5736# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
5737# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
5738pxor %xmm3,%xmm0
5739
5740# qhasm: xmm0 &= xmm9
5741# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
5742# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
5743pand %xmm12,%xmm0
5744
5745# qhasm: xmm3 &= xmm13
5746# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
5747# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
5748pand %xmm15,%xmm3
5749
5750# qhasm: xmm0 ^= xmm3
5751# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
5752# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
5753pxor %xmm3,%xmm0
5754
5755# qhasm: xmm3 ^= xmm10
5756# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
5757# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
5758pxor %xmm10,%xmm3
5759
5760# qhasm: xmm6 ^= xmm12
5761# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
5762# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
5763pxor %xmm8,%xmm6
5764
5765# qhasm: xmm0 ^= xmm12
5766# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
5767# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
5768pxor %xmm8,%xmm0
5769
5770# qhasm: xmm5 ^= xmm8
5771# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
5772# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
5773pxor %xmm9,%xmm5
5774
5775# qhasm: xmm3 ^= xmm8
5776# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
5777# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
5778pxor %xmm9,%xmm3
5779
5780# qhasm: xmm12 = xmm7
5781# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
5782# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
5783movdqa %xmm7,%xmm8
5784
5785# qhasm: xmm8 = xmm1
5786# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
5787# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
5788movdqa %xmm1,%xmm9
5789
5790# qhasm: xmm12 ^= xmm4
5791# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
5792# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
5793pxor %xmm4,%xmm8
5794
5795# qhasm: xmm8 ^= xmm2
5796# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
5797# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
5798pxor %xmm2,%xmm9
5799
5800# qhasm: xmm11 = xmm15
5801# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5802# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5803movdqa %xmm13,%xmm10
5804
5805# qhasm: xmm11 ^= xmm14
5806# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5807# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5808pxor %xmm11,%xmm10
5809
5810# qhasm: xmm11 &= xmm12
5811# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
5812# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
5813pand %xmm8,%xmm10
5814
5815# qhasm: xmm12 ^= xmm8
5816# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
5817# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
5818pxor %xmm9,%xmm8
5819
5820# qhasm: xmm12 &= xmm14
5821# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
5822# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
5823pand %xmm11,%xmm8
5824
5825# qhasm: xmm8 &= xmm15
5826# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
5827# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
5828pand %xmm13,%xmm9
5829
5830# qhasm: xmm8 ^= xmm12
5831# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
5832# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
5833pxor %xmm8,%xmm9
5834
5835# qhasm: xmm12 ^= xmm11
5836# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
5837# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
5838pxor %xmm10,%xmm8
5839
5840# qhasm: xmm10 = xmm13
5841# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5842# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5843movdqa %xmm15,%xmm10
5844
5845# qhasm: xmm10 ^= xmm9
5846# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
5847# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
5848pxor %xmm12,%xmm10
5849
5850# qhasm: xmm10 &= xmm4
5851# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
5852# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
5853pand %xmm4,%xmm10
5854
5855# qhasm: xmm4 ^= xmm2
5856# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
5857# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
5858pxor %xmm2,%xmm4
5859
5860# qhasm: xmm4 &= xmm9
5861# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
5862# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
5863pand %xmm12,%xmm4
5864
5865# qhasm: xmm2 &= xmm13
5866# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
5867# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
5868pand %xmm15,%xmm2
5869
5870# qhasm: xmm4 ^= xmm2
5871# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
5872# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
5873pxor %xmm2,%xmm4
5874
5875# qhasm: xmm2 ^= xmm10
5876# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
5877# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
5878pxor %xmm10,%xmm2
5879
5880# qhasm: xmm15 ^= xmm13
5881# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
5882# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
5883pxor %xmm15,%xmm13
5884
5885# qhasm: xmm14 ^= xmm9
5886# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
5887# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
5888pxor %xmm12,%xmm11
5889
5890# qhasm: xmm11 = xmm15
5891# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5892# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5893movdqa %xmm13,%xmm10
5894
5895# qhasm: xmm11 ^= xmm14
5896# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5897# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5898pxor %xmm11,%xmm10
5899
5900# qhasm: xmm11 &= xmm7
5901# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
5902# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
5903pand %xmm7,%xmm10
5904
5905# qhasm: xmm7 ^= xmm1
5906# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
5907# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
5908pxor %xmm1,%xmm7
5909
5910# qhasm: xmm7 &= xmm14
5911# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
5912# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
5913pand %xmm11,%xmm7
5914
5915# qhasm: xmm1 &= xmm15
5916# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
5917# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
5918pand %xmm13,%xmm1
5919
5920# qhasm: xmm7 ^= xmm1
5921# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
5922# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
5923pxor %xmm1,%xmm7
5924
5925# qhasm: xmm1 ^= xmm11
5926# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
5927# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
5928pxor %xmm10,%xmm1
5929
5930# qhasm: xmm7 ^= xmm12
5931# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
5932# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
5933pxor %xmm8,%xmm7
5934
5935# qhasm: xmm4 ^= xmm12
5936# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
5937# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
5938pxor %xmm8,%xmm4
5939
5940# qhasm: xmm1 ^= xmm8
5941# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
5942# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
5943pxor %xmm9,%xmm1
5944
5945# qhasm: xmm2 ^= xmm8
5946# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
5947# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
5948pxor %xmm9,%xmm2
5949
5950# qhasm: xmm7 ^= xmm0
5951# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
5952# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
5953pxor %xmm0,%xmm7
5954
5955# qhasm: xmm1 ^= xmm6
5956# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
5957# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
5958pxor %xmm6,%xmm1
5959
5960# qhasm: xmm4 ^= xmm7
5961# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
5962# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
5963pxor %xmm7,%xmm4
5964
5965# qhasm: xmm6 ^= xmm0
5966# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
5967# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
5968pxor %xmm0,%xmm6
5969
5970# qhasm: xmm0 ^= xmm1
5971# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
5972# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
5973pxor %xmm1,%xmm0
5974
5975# qhasm: xmm1 ^= xmm5
5976# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
5977# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
5978pxor %xmm5,%xmm1
5979
5980# qhasm: xmm5 ^= xmm2
5981# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
5982# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
5983pxor %xmm2,%xmm5
5984
5985# qhasm: xmm4 ^= xmm5
5986# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
5987# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
5988pxor %xmm5,%xmm4
5989
5990# qhasm: xmm2 ^= xmm3
5991# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
5992# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
5993pxor %xmm3,%xmm2
5994
5995# qhasm: xmm3 ^= xmm5
5996# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
5997# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
5998pxor %xmm5,%xmm3
5999
6000# qhasm: xmm6 ^= xmm3
6001# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
6002# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
6003pxor %xmm3,%xmm6
6004
6005# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
6006# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
6007# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
6008pshufd $0x93,%xmm0,%xmm8
6009
6010# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
6011# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
6012# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
6013pshufd $0x93,%xmm1,%xmm9
6014
6015# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
6016# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
6017# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
6018pshufd $0x93,%xmm4,%xmm10
6019
6020# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
6021# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
6022# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
6023pshufd $0x93,%xmm6,%xmm11
6024
6025# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
6026# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
6027# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
6028pshufd $0x93,%xmm3,%xmm12
6029
6030# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
6031# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
6032# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
6033pshufd $0x93,%xmm7,%xmm13
6034
6035# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
6036# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
6037# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
6038pshufd $0x93,%xmm2,%xmm14
6039
6040# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
6041# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
6042# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
6043pshufd $0x93,%xmm5,%xmm15
6044
6045# qhasm: xmm0 ^= xmm8
6046# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
6047# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
6048pxor %xmm8,%xmm0
6049
6050# qhasm: xmm1 ^= xmm9
6051# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
6052# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
6053pxor %xmm9,%xmm1
6054
6055# qhasm: xmm4 ^= xmm10
6056# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
6057# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
6058pxor %xmm10,%xmm4
6059
6060# qhasm: xmm6 ^= xmm11
6061# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
6062# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
6063pxor %xmm11,%xmm6
6064
6065# qhasm: xmm3 ^= xmm12
6066# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
6067# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
6068pxor %xmm12,%xmm3
6069
6070# qhasm: xmm7 ^= xmm13
6071# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
6072# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
6073pxor %xmm13,%xmm7
6074
6075# qhasm: xmm2 ^= xmm14
6076# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
6077# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
6078pxor %xmm14,%xmm2
6079
6080# qhasm: xmm5 ^= xmm15
6081# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
6082# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
6083pxor %xmm15,%xmm5
6084
6085# qhasm: xmm8 ^= xmm5
6086# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
6087# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
6088pxor %xmm5,%xmm8
6089
6090# qhasm: xmm9 ^= xmm0
6091# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
6092# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
6093pxor %xmm0,%xmm9
6094
6095# qhasm: xmm10 ^= xmm1
6096# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
6097# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
6098pxor %xmm1,%xmm10
6099
6100# qhasm: xmm9 ^= xmm5
6101# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
6102# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
6103pxor %xmm5,%xmm9
6104
6105# qhasm: xmm11 ^= xmm4
6106# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
6107# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
6108pxor %xmm4,%xmm11
6109
6110# qhasm: xmm12 ^= xmm6
6111# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
6112# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
6113pxor %xmm6,%xmm12
6114
6115# qhasm: xmm13 ^= xmm3
6116# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
6117# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
6118pxor %xmm3,%xmm13
6119
6120# qhasm: xmm11 ^= xmm5
6121# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
6122# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
6123pxor %xmm5,%xmm11
6124
6125# qhasm: xmm14 ^= xmm7
6126# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
6127# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
6128pxor %xmm7,%xmm14
6129
6130# qhasm: xmm15 ^= xmm2
6131# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
6132# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
6133pxor %xmm2,%xmm15
6134
6135# qhasm: xmm12 ^= xmm5
6136# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
6137# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
6138pxor %xmm5,%xmm12
6139
6140# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
6141# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
6142# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
6143pshufd $0x4E,%xmm0,%xmm0
6144
6145# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
6146# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
6147# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
6148pshufd $0x4E,%xmm1,%xmm1
6149
6150# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
6151# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
6152# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
6153pshufd $0x4E,%xmm4,%xmm4
6154
6155# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
6156# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
6157# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
6158pshufd $0x4E,%xmm6,%xmm6
6159
6160# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
6161# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
6162# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
6163pshufd $0x4E,%xmm3,%xmm3
6164
6165# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
6166# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
6167# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
6168pshufd $0x4E,%xmm7,%xmm7
6169
6170# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
6171# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
6172# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
6173pshufd $0x4E,%xmm2,%xmm2
6174
6175# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
6176# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
6177# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
6178pshufd $0x4E,%xmm5,%xmm5
6179
6180# qhasm: xmm8 ^= xmm0
6181# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
6182# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
6183pxor %xmm0,%xmm8
6184
6185# qhasm: xmm9 ^= xmm1
6186# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
6187# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
6188pxor %xmm1,%xmm9
6189
6190# qhasm: xmm10 ^= xmm4
6191# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
6192# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
6193pxor %xmm4,%xmm10
6194
6195# qhasm: xmm11 ^= xmm6
6196# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
6197# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
6198pxor %xmm6,%xmm11
6199
6200# qhasm: xmm12 ^= xmm3
6201# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
6202# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
6203pxor %xmm3,%xmm12
6204
6205# qhasm: xmm13 ^= xmm7
6206# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
6207# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
6208pxor %xmm7,%xmm13
6209
6210# qhasm: xmm14 ^= xmm2
6211# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
6212# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
6213pxor %xmm2,%xmm14
6214
6215# qhasm: xmm15 ^= xmm5
6216# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
6217# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
6218pxor %xmm5,%xmm15
6219
6220# qhasm: xmm8 ^= *(int128 *)(c + 640)
6221# asm 1: pxor 640(<c=int64#4),<xmm8=int6464#9
6222# asm 2: pxor 640(<c=%rcx),<xmm8=%xmm8
6223pxor 640(%rcx),%xmm8
6224
6225# qhasm: shuffle bytes of xmm8 by SR
6226# asm 1: pshufb SR,<xmm8=int6464#9
6227# asm 2: pshufb SR,<xmm8=%xmm8
6228pshufb SR,%xmm8
6229
6230# qhasm: xmm9 ^= *(int128 *)(c + 656)
6231# asm 1: pxor 656(<c=int64#4),<xmm9=int6464#10
6232# asm 2: pxor 656(<c=%rcx),<xmm9=%xmm9
6233pxor 656(%rcx),%xmm9
6234
6235# qhasm: shuffle bytes of xmm9 by SR
6236# asm 1: pshufb SR,<xmm9=int6464#10
6237# asm 2: pshufb SR,<xmm9=%xmm9
6238pshufb SR,%xmm9
6239
6240# qhasm: xmm10 ^= *(int128 *)(c + 672)
6241# asm 1: pxor 672(<c=int64#4),<xmm10=int6464#11
6242# asm 2: pxor 672(<c=%rcx),<xmm10=%xmm10
6243pxor 672(%rcx),%xmm10
6244
6245# qhasm: shuffle bytes of xmm10 by SR
6246# asm 1: pshufb SR,<xmm10=int6464#11
6247# asm 2: pshufb SR,<xmm10=%xmm10
6248pshufb SR,%xmm10
6249
6250# qhasm: xmm11 ^= *(int128 *)(c + 688)
6251# asm 1: pxor 688(<c=int64#4),<xmm11=int6464#12
6252# asm 2: pxor 688(<c=%rcx),<xmm11=%xmm11
6253pxor 688(%rcx),%xmm11
6254
6255# qhasm: shuffle bytes of xmm11 by SR
6256# asm 1: pshufb SR,<xmm11=int6464#12
6257# asm 2: pshufb SR,<xmm11=%xmm11
6258pshufb SR,%xmm11
6259
6260# qhasm: xmm12 ^= *(int128 *)(c + 704)
6261# asm 1: pxor 704(<c=int64#4),<xmm12=int6464#13
6262# asm 2: pxor 704(<c=%rcx),<xmm12=%xmm12
6263pxor 704(%rcx),%xmm12
6264
6265# qhasm: shuffle bytes of xmm12 by SR
6266# asm 1: pshufb SR,<xmm12=int6464#13
6267# asm 2: pshufb SR,<xmm12=%xmm12
6268pshufb SR,%xmm12
6269
6270# qhasm: xmm13 ^= *(int128 *)(c + 720)
6271# asm 1: pxor 720(<c=int64#4),<xmm13=int6464#14
6272# asm 2: pxor 720(<c=%rcx),<xmm13=%xmm13
6273pxor 720(%rcx),%xmm13
6274
6275# qhasm: shuffle bytes of xmm13 by SR
6276# asm 1: pshufb SR,<xmm13=int6464#14
6277# asm 2: pshufb SR,<xmm13=%xmm13
6278pshufb SR,%xmm13
6279
6280# qhasm: xmm14 ^= *(int128 *)(c + 736)
6281# asm 1: pxor 736(<c=int64#4),<xmm14=int6464#15
6282# asm 2: pxor 736(<c=%rcx),<xmm14=%xmm14
6283pxor 736(%rcx),%xmm14
6284
6285# qhasm: shuffle bytes of xmm14 by SR
6286# asm 1: pshufb SR,<xmm14=int6464#15
6287# asm 2: pshufb SR,<xmm14=%xmm14
6288pshufb SR,%xmm14
6289
6290# qhasm: xmm15 ^= *(int128 *)(c + 752)
6291# asm 1: pxor 752(<c=int64#4),<xmm15=int6464#16
6292# asm 2: pxor 752(<c=%rcx),<xmm15=%xmm15
6293pxor 752(%rcx),%xmm15
6294
6295# qhasm: shuffle bytes of xmm15 by SR
6296# asm 1: pshufb SR,<xmm15=int6464#16
6297# asm 2: pshufb SR,<xmm15=%xmm15
6298pshufb SR,%xmm15
6299
6300# qhasm: xmm13 ^= xmm14
6301# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
6302# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
6303pxor %xmm14,%xmm13
6304
6305# qhasm: xmm10 ^= xmm9
6306# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
6307# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
6308pxor %xmm9,%xmm10
6309
6310# qhasm: xmm13 ^= xmm8
6311# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
6312# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
6313pxor %xmm8,%xmm13
6314
6315# qhasm: xmm14 ^= xmm10
6316# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
6317# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
6318pxor %xmm10,%xmm14
6319
6320# qhasm: xmm11 ^= xmm8
6321# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
6322# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
6323pxor %xmm8,%xmm11
6324
6325# qhasm: xmm14 ^= xmm11
6326# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
6327# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
6328pxor %xmm11,%xmm14
6329
6330# qhasm: xmm11 ^= xmm15
6331# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
6332# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
6333pxor %xmm15,%xmm11
6334
6335# qhasm: xmm11 ^= xmm12
6336# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
6337# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
6338pxor %xmm12,%xmm11
6339
6340# qhasm: xmm15 ^= xmm13
6341# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
6342# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
6343pxor %xmm13,%xmm15
6344
6345# qhasm: xmm11 ^= xmm9
6346# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
6347# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
6348pxor %xmm9,%xmm11
6349
6350# qhasm: xmm12 ^= xmm13
6351# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
6352# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
6353pxor %xmm13,%xmm12
6354
6355# qhasm: xmm10 ^= xmm15
6356# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
6357# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
6358pxor %xmm15,%xmm10
6359
6360# qhasm: xmm9 ^= xmm13
6361# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
6362# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
6363pxor %xmm13,%xmm9
6364
6365# qhasm: xmm3 = xmm15
6366# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
6367# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
6368movdqa %xmm15,%xmm0
6369
6370# qhasm: xmm2 = xmm9
6371# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
6372# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
6373movdqa %xmm9,%xmm1
6374
6375# qhasm: xmm1 = xmm13
6376# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
6377# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
6378movdqa %xmm13,%xmm2
6379
6380# qhasm: xmm5 = xmm10
6381# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
6382# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
6383movdqa %xmm10,%xmm3
6384
6385# qhasm: xmm4 = xmm14
6386# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
6387# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
6388movdqa %xmm14,%xmm4
6389
6390# qhasm: xmm3 ^= xmm12
6391# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
6392# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
6393pxor %xmm12,%xmm0
6394
6395# qhasm: xmm2 ^= xmm10
6396# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
6397# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
6398pxor %xmm10,%xmm1
6399
6400# qhasm: xmm1 ^= xmm11
6401# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
6402# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
6403pxor %xmm11,%xmm2
6404
6405# qhasm: xmm5 ^= xmm12
6406# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
6407# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
6408pxor %xmm12,%xmm3
6409
6410# qhasm: xmm4 ^= xmm8
6411# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
6412# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
6413pxor %xmm8,%xmm4
6414
6415# qhasm: xmm6 = xmm3
6416# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
6417# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
6418movdqa %xmm0,%xmm5
6419
6420# qhasm: xmm0 = xmm2
6421# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
6422# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
6423movdqa %xmm1,%xmm6
6424
6425# qhasm: xmm7 = xmm3
6426# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
6427# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
6428movdqa %xmm0,%xmm7
6429
6430# qhasm: xmm2 |= xmm1
6431# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
6432# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
6433por %xmm2,%xmm1
6434
6435# qhasm: xmm3 |= xmm4
6436# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
6437# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
6438por %xmm4,%xmm0
6439
6440# qhasm: xmm7 ^= xmm0
6441# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
6442# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
6443pxor %xmm6,%xmm7
6444
6445# qhasm: xmm6 &= xmm4
6446# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
6447# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
6448pand %xmm4,%xmm5
6449
6450# qhasm: xmm0 &= xmm1
6451# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
6452# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
6453pand %xmm2,%xmm6
6454
6455# qhasm: xmm4 ^= xmm1
6456# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
6457# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
6458pxor %xmm2,%xmm4
6459
6460# qhasm: xmm7 &= xmm4
6461# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
6462# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
6463pand %xmm4,%xmm7
6464
6465# qhasm: xmm4 = xmm11
6466# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
6467# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
6468movdqa %xmm11,%xmm2
6469
6470# qhasm: xmm4 ^= xmm8
6471# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
6472# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
6473pxor %xmm8,%xmm2
6474
6475# qhasm: xmm5 &= xmm4
6476# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
6477# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
6478pand %xmm2,%xmm3
6479
6480# qhasm: xmm3 ^= xmm5
6481# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
6482# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
6483pxor %xmm3,%xmm0
6484
6485# qhasm: xmm2 ^= xmm5
6486# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
6487# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
6488pxor %xmm3,%xmm1
6489
6490# qhasm: xmm5 = xmm15
6491# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
6492# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
6493movdqa %xmm15,%xmm2
6494
6495# qhasm: xmm5 ^= xmm9
6496# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
6497# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
6498pxor %xmm9,%xmm2
6499
6500# qhasm: xmm4 = xmm13
6501# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
6502# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
6503movdqa %xmm13,%xmm3
6504
6505# qhasm: xmm1 = xmm5
6506# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
6507# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
6508movdqa %xmm2,%xmm4
6509
6510# qhasm: xmm4 ^= xmm14
6511# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
6512# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
6513pxor %xmm14,%xmm3
6514
6515# qhasm: xmm1 |= xmm4
6516# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
6517# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
6518por %xmm3,%xmm4
6519
6520# qhasm: xmm5 &= xmm4
6521# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
6522# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
6523pand %xmm3,%xmm2
6524
6525# qhasm: xmm0 ^= xmm5
6526# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
6527# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
6528pxor %xmm2,%xmm6
6529
6530# qhasm: xmm3 ^= xmm7
6531# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
6532# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
6533pxor %xmm7,%xmm0
6534
6535# qhasm: xmm2 ^= xmm6
6536# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
6537# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
6538pxor %xmm5,%xmm1
6539
6540# qhasm: xmm1 ^= xmm7
6541# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
6542# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
6543pxor %xmm7,%xmm4
6544
6545# qhasm: xmm0 ^= xmm6
6546# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
6547# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
6548pxor %xmm5,%xmm6
6549
6550# qhasm: xmm1 ^= xmm6
6551# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
6552# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
6553pxor %xmm5,%xmm4
6554
6555# qhasm: xmm4 = xmm10
6556# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
6557# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
6558movdqa %xmm10,%xmm2
6559
6560# qhasm: xmm5 = xmm12
6561# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
6562# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
6563movdqa %xmm12,%xmm3
6564
6565# qhasm: xmm6 = xmm9
6566# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
6567# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
6568movdqa %xmm9,%xmm5
6569
6570# qhasm: xmm7 = xmm15
6571# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
6572# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
6573movdqa %xmm15,%xmm7
6574
6575# qhasm: xmm4 &= xmm11
6576# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
6577# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
6578pand %xmm11,%xmm2
6579
6580# qhasm: xmm5 &= xmm8
6581# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
6582# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
6583pand %xmm8,%xmm3
6584
6585# qhasm: xmm6 &= xmm13
6586# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
6587# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
6588pand %xmm13,%xmm5
6589
6590# qhasm: xmm7 |= xmm14
6591# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
6592# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
6593por %xmm14,%xmm7
6594
6595# qhasm: xmm3 ^= xmm4
6596# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
6597# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
6598pxor %xmm2,%xmm0
6599
6600# qhasm: xmm2 ^= xmm5
6601# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
6602# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
6603pxor %xmm3,%xmm1
6604
6605# qhasm: xmm1 ^= xmm6
6606# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
6607# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
6608pxor %xmm5,%xmm4
6609
6610# qhasm: xmm0 ^= xmm7
6611# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
6612# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
6613pxor %xmm7,%xmm6
6614
6615# qhasm: xmm4 = xmm3
6616# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
6617# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
6618movdqa %xmm0,%xmm2
6619
6620# qhasm: xmm4 ^= xmm2
6621# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
6622# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
6623pxor %xmm1,%xmm2
6624
6625# qhasm: xmm3 &= xmm1
6626# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
6627# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
6628pand %xmm4,%xmm0
6629
6630# qhasm: xmm6 = xmm0
6631# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
6632# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
6633movdqa %xmm6,%xmm3
6634
6635# qhasm: xmm6 ^= xmm3
6636# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
6637# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
6638pxor %xmm0,%xmm3
6639
6640# qhasm: xmm7 = xmm4
6641# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
6642# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
6643movdqa %xmm2,%xmm5
6644
6645# qhasm: xmm7 &= xmm6
6646# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
6647# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
6648pand %xmm3,%xmm5
6649
6650# qhasm: xmm7 ^= xmm2
6651# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
6652# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
6653pxor %xmm1,%xmm5
6654
6655# qhasm: xmm5 = xmm1
6656# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
6657# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
6658movdqa %xmm4,%xmm7
6659
6660# qhasm: xmm5 ^= xmm0
6661# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
6662# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
6663pxor %xmm6,%xmm7
6664
6665# qhasm: xmm3 ^= xmm2
6666# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
6667# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
6668pxor %xmm1,%xmm0
6669
6670# qhasm: xmm5 &= xmm3
6671# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
6672# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
6673pand %xmm0,%xmm7
6674
6675# qhasm: xmm5 ^= xmm0
6676# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
6677# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
6678pxor %xmm6,%xmm7
6679
6680# qhasm: xmm1 ^= xmm5
6681# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
6682# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
6683pxor %xmm7,%xmm4
6684
6685# qhasm: xmm2 = xmm6
6686# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
6687# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
6688movdqa %xmm3,%xmm0
6689
6690# qhasm: xmm2 ^= xmm5
6691# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
6692# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
6693pxor %xmm7,%xmm0
6694
6695# qhasm: xmm2 &= xmm0
6696# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
6697# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
6698pand %xmm6,%xmm0
6699
6700# qhasm: xmm1 ^= xmm2
6701# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
6702# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
6703pxor %xmm0,%xmm4
6704
6705# qhasm: xmm6 ^= xmm2
6706# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
6707# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
6708pxor %xmm0,%xmm3
6709
6710# qhasm: xmm6 &= xmm7
6711# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
6712# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
6713pand %xmm5,%xmm3
6714
6715# qhasm: xmm6 ^= xmm4
6716# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
6717# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
6718pxor %xmm2,%xmm3
6719
6720# qhasm: xmm4 = xmm14
6721# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
6722# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
6723movdqa %xmm14,%xmm0
6724
6725# qhasm: xmm0 = xmm13
6726# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
6727# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
6728movdqa %xmm13,%xmm1
6729
6730# qhasm: xmm2 = xmm7
6731# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
6732# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
6733movdqa %xmm5,%xmm2
6734
6735# qhasm: xmm2 ^= xmm6
6736# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
6737# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
6738pxor %xmm3,%xmm2
6739
6740# qhasm: xmm2 &= xmm14
6741# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
6742# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
6743pand %xmm14,%xmm2
6744
6745# qhasm: xmm14 ^= xmm13
6746# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
6747# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
6748pxor %xmm13,%xmm14
6749
6750# qhasm: xmm14 &= xmm6
6751# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
6752# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
6753pand %xmm3,%xmm14
6754
6755# qhasm: xmm13 &= xmm7
6756# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
6757# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
6758pand %xmm5,%xmm13
6759
6760# qhasm: xmm14 ^= xmm13
6761# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
6762# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
6763pxor %xmm13,%xmm14
6764
6765# qhasm: xmm13 ^= xmm2
6766# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
6767# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
6768pxor %xmm2,%xmm13
6769
6770# qhasm: xmm4 ^= xmm8
6771# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
6772# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
6773pxor %xmm8,%xmm0
6774
6775# qhasm: xmm0 ^= xmm11
6776# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
6777# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
6778pxor %xmm11,%xmm1
6779
6780# qhasm: xmm7 ^= xmm5
6781# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
6782# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
6783pxor %xmm7,%xmm5
6784
6785# qhasm: xmm6 ^= xmm1
6786# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
6787# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
6788pxor %xmm4,%xmm3
6789
6790# qhasm: xmm3 = xmm7
6791# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
6792# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
6793movdqa %xmm5,%xmm2
6794
6795# qhasm: xmm3 ^= xmm6
6796# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
6797# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
6798pxor %xmm3,%xmm2
6799
6800# qhasm: xmm3 &= xmm4
6801# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
6802# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
6803pand %xmm0,%xmm2
6804
6805# qhasm: xmm4 ^= xmm0
6806# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
6807# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
6808pxor %xmm1,%xmm0
6809
6810# qhasm: xmm4 &= xmm6
6811# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
6812# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
6813pand %xmm3,%xmm0
6814
6815# qhasm: xmm0 &= xmm7
6816# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
6817# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
6818pand %xmm5,%xmm1
6819
6820# qhasm: xmm0 ^= xmm4
6821# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
6822# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
6823pxor %xmm0,%xmm1
6824
6825# qhasm: xmm4 ^= xmm3
6826# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
6827# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
6828pxor %xmm2,%xmm0
6829
6830# qhasm: xmm2 = xmm5
6831# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
6832# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
6833movdqa %xmm7,%xmm2
6834
6835# qhasm: xmm2 ^= xmm1
6836# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
6837# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
6838pxor %xmm4,%xmm2
6839
6840# qhasm: xmm2 &= xmm8
6841# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
6842# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
6843pand %xmm8,%xmm2
6844
6845# qhasm: xmm8 ^= xmm11
6846# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
6847# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
6848pxor %xmm11,%xmm8
6849
6850# qhasm: xmm8 &= xmm1
6851# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
6852# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
6853pand %xmm4,%xmm8
6854
6855# qhasm: xmm11 &= xmm5
6856# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
6857# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
6858pand %xmm7,%xmm11
6859
6860# qhasm: xmm8 ^= xmm11
6861# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
6862# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
6863pxor %xmm11,%xmm8
6864
6865# qhasm: xmm11 ^= xmm2
6866# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
6867# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
6868pxor %xmm2,%xmm11
6869
6870# qhasm: xmm14 ^= xmm4
6871# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
6872# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
6873pxor %xmm0,%xmm14
6874
6875# qhasm: xmm8 ^= xmm4
6876# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
6877# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
6878pxor %xmm0,%xmm8
6879
6880# qhasm: xmm13 ^= xmm0
6881# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
6882# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
6883pxor %xmm1,%xmm13
6884
6885# qhasm: xmm11 ^= xmm0
6886# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
6887# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
6888pxor %xmm1,%xmm11
6889
6890# qhasm: xmm4 = xmm15
6891# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
6892# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
6893movdqa %xmm15,%xmm0
6894
6895# qhasm: xmm0 = xmm9
6896# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
6897# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
6898movdqa %xmm9,%xmm1
6899
6900# qhasm: xmm4 ^= xmm12
6901# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
6902# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
6903pxor %xmm12,%xmm0
6904
6905# qhasm: xmm0 ^= xmm10
6906# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
6907# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
6908pxor %xmm10,%xmm1
6909
6910# qhasm: xmm3 = xmm7
6911# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
6912# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
6913movdqa %xmm5,%xmm2
6914
6915# qhasm: xmm3 ^= xmm6
6916# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
6917# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
6918pxor %xmm3,%xmm2
6919
6920# qhasm: xmm3 &= xmm4
6921# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
6922# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
6923pand %xmm0,%xmm2
6924
6925# qhasm: xmm4 ^= xmm0
6926# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
6927# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
6928pxor %xmm1,%xmm0
6929
6930# qhasm: xmm4 &= xmm6
6931# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
6932# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
6933pand %xmm3,%xmm0
6934
6935# qhasm: xmm0 &= xmm7
6936# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
6937# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
6938pand %xmm5,%xmm1
6939
6940# qhasm: xmm0 ^= xmm4
6941# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
6942# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
6943pxor %xmm0,%xmm1
6944
6945# qhasm: xmm4 ^= xmm3
6946# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
6947# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
6948pxor %xmm2,%xmm0
6949
6950# qhasm: xmm2 = xmm5
6951# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
6952# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
6953movdqa %xmm7,%xmm2
6954
6955# qhasm: xmm2 ^= xmm1
6956# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
6957# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
6958pxor %xmm4,%xmm2
6959
6960# qhasm: xmm2 &= xmm12
6961# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
6962# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
6963pand %xmm12,%xmm2
6964
6965# qhasm: xmm12 ^= xmm10
6966# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
6967# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
6968pxor %xmm10,%xmm12
6969
6970# qhasm: xmm12 &= xmm1
6971# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
6972# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
6973pand %xmm4,%xmm12
6974
6975# qhasm: xmm10 &= xmm5
6976# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
6977# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
6978pand %xmm7,%xmm10
6979
6980# qhasm: xmm12 ^= xmm10
6981# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
6982# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
6983pxor %xmm10,%xmm12
6984
6985# qhasm: xmm10 ^= xmm2
6986# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
6987# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
6988pxor %xmm2,%xmm10
6989
6990# qhasm: xmm7 ^= xmm5
6991# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
6992# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
6993pxor %xmm7,%xmm5
6994
6995# qhasm: xmm6 ^= xmm1
6996# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
6997# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
6998pxor %xmm4,%xmm3
6999
7000# qhasm: xmm3 = xmm7
7001# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
7002# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
7003movdqa %xmm5,%xmm2
7004
7005# qhasm: xmm3 ^= xmm6
7006# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
7007# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
7008pxor %xmm3,%xmm2
7009
7010# qhasm: xmm3 &= xmm15
7011# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
7012# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
7013pand %xmm15,%xmm2
7014
7015# qhasm: xmm15 ^= xmm9
7016# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
7017# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
7018pxor %xmm9,%xmm15
7019
7020# qhasm: xmm15 &= xmm6
7021# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
7022# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
7023pand %xmm3,%xmm15
7024
7025# qhasm: xmm9 &= xmm7
7026# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
7027# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
7028pand %xmm5,%xmm9
7029
7030# qhasm: xmm15 ^= xmm9
7031# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
7032# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
7033pxor %xmm9,%xmm15
7034
7035# qhasm: xmm9 ^= xmm3
7036# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
7037# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
7038pxor %xmm2,%xmm9
7039
7040# qhasm: xmm15 ^= xmm4
7041# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
7042# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
7043pxor %xmm0,%xmm15
7044
7045# qhasm: xmm12 ^= xmm4
7046# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
7047# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
7048pxor %xmm0,%xmm12
7049
7050# qhasm: xmm9 ^= xmm0
7051# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
7052# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
7053pxor %xmm1,%xmm9
7054
7055# qhasm: xmm10 ^= xmm0
7056# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
7057# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
7058pxor %xmm1,%xmm10
7059
7060# qhasm: xmm15 ^= xmm8
7061# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
7062# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
7063pxor %xmm8,%xmm15
7064
7065# qhasm: xmm9 ^= xmm14
7066# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
7067# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
7068pxor %xmm14,%xmm9
7069
7070# qhasm: xmm12 ^= xmm15
7071# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
7072# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
7073pxor %xmm15,%xmm12
7074
7075# qhasm: xmm14 ^= xmm8
7076# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
7077# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
7078pxor %xmm8,%xmm14
7079
7080# qhasm: xmm8 ^= xmm9
7081# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
7082# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
7083pxor %xmm9,%xmm8
7084
7085# qhasm: xmm9 ^= xmm13
7086# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
7087# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
7088pxor %xmm13,%xmm9
7089
7090# qhasm: xmm13 ^= xmm10
7091# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
7092# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
7093pxor %xmm10,%xmm13
7094
7095# qhasm: xmm12 ^= xmm13
7096# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
7097# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
7098pxor %xmm13,%xmm12
7099
7100# qhasm: xmm10 ^= xmm11
7101# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
7102# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
7103pxor %xmm11,%xmm10
7104
7105# qhasm: xmm11 ^= xmm13
7106# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
7107# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
7108pxor %xmm13,%xmm11
7109
7110# qhasm: xmm14 ^= xmm11
7111# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
7112# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
7113pxor %xmm11,%xmm14
7114
7115# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
7116# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
7117# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
7118pshufd $0x93,%xmm8,%xmm0
7119
7120# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
7121# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
7122# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
7123pshufd $0x93,%xmm9,%xmm1
7124
7125# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
7126# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
7127# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
7128pshufd $0x93,%xmm12,%xmm2
7129
7130# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
7131# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
7132# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
7133pshufd $0x93,%xmm14,%xmm3
7134
7135# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
7136# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
7137# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
7138pshufd $0x93,%xmm11,%xmm4
7139
7140# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
7141# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
7142# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
7143pshufd $0x93,%xmm15,%xmm5
7144
7145# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
7146# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
7147# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
7148pshufd $0x93,%xmm10,%xmm6
7149
7150# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
7151# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
7152# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
7153pshufd $0x93,%xmm13,%xmm7
7154
7155# qhasm: xmm8 ^= xmm0
7156# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
7157# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
7158pxor %xmm0,%xmm8
7159
7160# qhasm: xmm9 ^= xmm1
7161# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
7162# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
7163pxor %xmm1,%xmm9
7164
7165# qhasm: xmm12 ^= xmm2
7166# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
7167# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
7168pxor %xmm2,%xmm12
7169
7170# qhasm: xmm14 ^= xmm3
7171# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
7172# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
7173pxor %xmm3,%xmm14
7174
7175# qhasm: xmm11 ^= xmm4
7176# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
7177# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
7178pxor %xmm4,%xmm11
7179
7180# qhasm: xmm15 ^= xmm5
7181# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
7182# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
7183pxor %xmm5,%xmm15
7184
7185# qhasm: xmm10 ^= xmm6
7186# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
7187# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
7188pxor %xmm6,%xmm10
7189
7190# qhasm: xmm13 ^= xmm7
7191# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
7192# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
7193pxor %xmm7,%xmm13
7194
7195# qhasm: xmm0 ^= xmm13
7196# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
7197# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
7198pxor %xmm13,%xmm0
7199
7200# qhasm: xmm1 ^= xmm8
7201# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
7202# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
7203pxor %xmm8,%xmm1
7204
7205# qhasm: xmm2 ^= xmm9
7206# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
7207# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
7208pxor %xmm9,%xmm2
7209
7210# qhasm: xmm1 ^= xmm13
7211# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
7212# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
7213pxor %xmm13,%xmm1
7214
7215# qhasm: xmm3 ^= xmm12
7216# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
7217# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
7218pxor %xmm12,%xmm3
7219
7220# qhasm: xmm4 ^= xmm14
7221# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
7222# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
7223pxor %xmm14,%xmm4
7224
7225# qhasm: xmm5 ^= xmm11
7226# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
7227# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
7228pxor %xmm11,%xmm5
7229
7230# qhasm: xmm3 ^= xmm13
7231# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
7232# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
7233pxor %xmm13,%xmm3
7234
7235# qhasm: xmm6 ^= xmm15
7236# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
7237# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
7238pxor %xmm15,%xmm6
7239
7240# qhasm: xmm7 ^= xmm10
7241# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
7242# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
7243pxor %xmm10,%xmm7
7244
7245# qhasm: xmm4 ^= xmm13
7246# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
7247# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
7248pxor %xmm13,%xmm4
7249
7250# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
7251# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
7252# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
7253pshufd $0x4E,%xmm8,%xmm8
7254
7255# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
7256# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
7257# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
7258pshufd $0x4E,%xmm9,%xmm9
7259
7260# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
7261# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
7262# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
7263pshufd $0x4E,%xmm12,%xmm12
7264
7265# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
7266# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
7267# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
7268pshufd $0x4E,%xmm14,%xmm14
7269
7270# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
7271# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
7272# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
7273pshufd $0x4E,%xmm11,%xmm11
7274
7275# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
7276# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
7277# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
7278pshufd $0x4E,%xmm15,%xmm15
7279
7280# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
7281# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
7282# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
7283pshufd $0x4E,%xmm10,%xmm10
7284
7285# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
7286# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
7287# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
7288pshufd $0x4E,%xmm13,%xmm13
7289
7290# qhasm: xmm0 ^= xmm8
7291# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
7292# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
7293pxor %xmm8,%xmm0
7294
7295# qhasm: xmm1 ^= xmm9
7296# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
7297# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
7298pxor %xmm9,%xmm1
7299
7300# qhasm: xmm2 ^= xmm12
7301# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
7302# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
7303pxor %xmm12,%xmm2
7304
7305# qhasm: xmm3 ^= xmm14
7306# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
7307# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
7308pxor %xmm14,%xmm3
7309
7310# qhasm: xmm4 ^= xmm11
7311# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
7312# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
7313pxor %xmm11,%xmm4
7314
7315# qhasm: xmm5 ^= xmm15
7316# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
7317# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
7318pxor %xmm15,%xmm5
7319
7320# qhasm: xmm6 ^= xmm10
7321# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
7322# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
7323pxor %xmm10,%xmm6
7324
7325# qhasm: xmm7 ^= xmm13
7326# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
7327# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
7328pxor %xmm13,%xmm7
7329
7330# qhasm: xmm0 ^= *(int128 *)(c + 768)
7331# asm 1: pxor 768(<c=int64#4),<xmm0=int6464#1
7332# asm 2: pxor 768(<c=%rcx),<xmm0=%xmm0
7333pxor 768(%rcx),%xmm0
7334
7335# qhasm: shuffle bytes of xmm0 by SR
7336# asm 1: pshufb SR,<xmm0=int6464#1
7337# asm 2: pshufb SR,<xmm0=%xmm0
7338pshufb SR,%xmm0
7339
7340# qhasm: xmm1 ^= *(int128 *)(c + 784)
7341# asm 1: pxor 784(<c=int64#4),<xmm1=int6464#2
7342# asm 2: pxor 784(<c=%rcx),<xmm1=%xmm1
7343pxor 784(%rcx),%xmm1
7344
7345# qhasm: shuffle bytes of xmm1 by SR
7346# asm 1: pshufb SR,<xmm1=int6464#2
7347# asm 2: pshufb SR,<xmm1=%xmm1
7348pshufb SR,%xmm1
7349
7350# qhasm: xmm2 ^= *(int128 *)(c + 800)
7351# asm 1: pxor 800(<c=int64#4),<xmm2=int6464#3
7352# asm 2: pxor 800(<c=%rcx),<xmm2=%xmm2
7353pxor 800(%rcx),%xmm2
7354
7355# qhasm: shuffle bytes of xmm2 by SR
7356# asm 1: pshufb SR,<xmm2=int6464#3
7357# asm 2: pshufb SR,<xmm2=%xmm2
7358pshufb SR,%xmm2
7359
7360# qhasm: xmm3 ^= *(int128 *)(c + 816)
7361# asm 1: pxor 816(<c=int64#4),<xmm3=int6464#4
7362# asm 2: pxor 816(<c=%rcx),<xmm3=%xmm3
7363pxor 816(%rcx),%xmm3
7364
7365# qhasm: shuffle bytes of xmm3 by SR
7366# asm 1: pshufb SR,<xmm3=int6464#4
7367# asm 2: pshufb SR,<xmm3=%xmm3
7368pshufb SR,%xmm3
7369
7370# qhasm: xmm4 ^= *(int128 *)(c + 832)
7371# asm 1: pxor 832(<c=int64#4),<xmm4=int6464#5
7372# asm 2: pxor 832(<c=%rcx),<xmm4=%xmm4
7373pxor 832(%rcx),%xmm4
7374
7375# qhasm: shuffle bytes of xmm4 by SR
7376# asm 1: pshufb SR,<xmm4=int6464#5
7377# asm 2: pshufb SR,<xmm4=%xmm4
7378pshufb SR,%xmm4
7379
7380# qhasm: xmm5 ^= *(int128 *)(c + 848)
7381# asm 1: pxor 848(<c=int64#4),<xmm5=int6464#6
7382# asm 2: pxor 848(<c=%rcx),<xmm5=%xmm5
7383pxor 848(%rcx),%xmm5
7384
7385# qhasm: shuffle bytes of xmm5 by SR
7386# asm 1: pshufb SR,<xmm5=int6464#6
7387# asm 2: pshufb SR,<xmm5=%xmm5
7388pshufb SR,%xmm5
7389
7390# qhasm: xmm6 ^= *(int128 *)(c + 864)
7391# asm 1: pxor 864(<c=int64#4),<xmm6=int6464#7
7392# asm 2: pxor 864(<c=%rcx),<xmm6=%xmm6
7393pxor 864(%rcx),%xmm6
7394
7395# qhasm: shuffle bytes of xmm6 by SR
7396# asm 1: pshufb SR,<xmm6=int6464#7
7397# asm 2: pshufb SR,<xmm6=%xmm6
7398pshufb SR,%xmm6
7399
7400# qhasm: xmm7 ^= *(int128 *)(c + 880)
7401# asm 1: pxor 880(<c=int64#4),<xmm7=int6464#8
7402# asm 2: pxor 880(<c=%rcx),<xmm7=%xmm7
7403pxor 880(%rcx),%xmm7
7404
7405# qhasm: shuffle bytes of xmm7 by SR
7406# asm 1: pshufb SR,<xmm7=int6464#8
7407# asm 2: pshufb SR,<xmm7=%xmm7
7408pshufb SR,%xmm7
7409
7410# qhasm: xmm5 ^= xmm6
7411# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
7412# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
7413pxor %xmm6,%xmm5
7414
7415# qhasm: xmm2 ^= xmm1
7416# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
7417# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
7418pxor %xmm1,%xmm2
7419
7420# qhasm: xmm5 ^= xmm0
7421# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
7422# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
7423pxor %xmm0,%xmm5
7424
7425# qhasm: xmm6 ^= xmm2
7426# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
7427# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
7428pxor %xmm2,%xmm6
7429
7430# qhasm: xmm3 ^= xmm0
7431# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
7432# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
7433pxor %xmm0,%xmm3
7434
7435# qhasm: xmm6 ^= xmm3
7436# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
7437# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
7438pxor %xmm3,%xmm6
7439
7440# qhasm: xmm3 ^= xmm7
7441# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
7442# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
7443pxor %xmm7,%xmm3
7444
7445# qhasm: xmm3 ^= xmm4
7446# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
7447# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
7448pxor %xmm4,%xmm3
7449
7450# qhasm: xmm7 ^= xmm5
7451# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
7452# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
7453pxor %xmm5,%xmm7
7454
7455# qhasm: xmm3 ^= xmm1
7456# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
7457# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
7458pxor %xmm1,%xmm3
7459
7460# qhasm: xmm4 ^= xmm5
7461# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
7462# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
7463pxor %xmm5,%xmm4
7464
7465# qhasm: xmm2 ^= xmm7
7466# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
7467# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
7468pxor %xmm7,%xmm2
7469
7470# qhasm: xmm1 ^= xmm5
7471# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
7472# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
7473pxor %xmm5,%xmm1
7474
7475# qhasm: xmm11 = xmm7
7476# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
7477# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
7478movdqa %xmm7,%xmm8
7479
7480# qhasm: xmm10 = xmm1
7481# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
7482# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
7483movdqa %xmm1,%xmm9
7484
7485# qhasm: xmm9 = xmm5
7486# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
7487# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
7488movdqa %xmm5,%xmm10
7489
7490# qhasm: xmm13 = xmm2
7491# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
7492# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
7493movdqa %xmm2,%xmm11
7494
7495# qhasm: xmm12 = xmm6
7496# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
7497# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
7498movdqa %xmm6,%xmm12
7499
7500# qhasm: xmm11 ^= xmm4
7501# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
7502# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
7503pxor %xmm4,%xmm8
7504
7505# qhasm: xmm10 ^= xmm2
7506# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
7507# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
7508pxor %xmm2,%xmm9
7509
7510# qhasm: xmm9 ^= xmm3
7511# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
7512# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
7513pxor %xmm3,%xmm10
7514
7515# qhasm: xmm13 ^= xmm4
7516# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
7517# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
7518pxor %xmm4,%xmm11
7519
7520# qhasm: xmm12 ^= xmm0
7521# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
7522# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
7523pxor %xmm0,%xmm12
7524
7525# qhasm: xmm14 = xmm11
7526# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
7527# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
7528movdqa %xmm8,%xmm13
7529
7530# qhasm: xmm8 = xmm10
7531# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
7532# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
7533movdqa %xmm9,%xmm14
7534
7535# qhasm: xmm15 = xmm11
7536# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
7537# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
7538movdqa %xmm8,%xmm15
7539
7540# qhasm: xmm10 |= xmm9
7541# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
7542# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
7543por %xmm10,%xmm9
7544
7545# qhasm: xmm11 |= xmm12
7546# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
7547# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
7548por %xmm12,%xmm8
7549
7550# qhasm: xmm15 ^= xmm8
7551# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
7552# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
7553pxor %xmm14,%xmm15
7554
7555# qhasm: xmm14 &= xmm12
7556# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
7557# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
7558pand %xmm12,%xmm13
7559
7560# qhasm: xmm8 &= xmm9
7561# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
7562# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
7563pand %xmm10,%xmm14
7564
7565# qhasm: xmm12 ^= xmm9
7566# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
7567# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
7568pxor %xmm10,%xmm12
7569
7570# qhasm: xmm15 &= xmm12
7571# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
7572# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
7573pand %xmm12,%xmm15
7574
7575# qhasm: xmm12 = xmm3
7576# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
7577# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
7578movdqa %xmm3,%xmm10
7579
7580# qhasm: xmm12 ^= xmm0
7581# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
7582# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
7583pxor %xmm0,%xmm10
7584
7585# qhasm: xmm13 &= xmm12
7586# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
7587# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
7588pand %xmm10,%xmm11
7589
7590# qhasm: xmm11 ^= xmm13
7591# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
7592# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
7593pxor %xmm11,%xmm8
7594
7595# qhasm: xmm10 ^= xmm13
7596# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
7597# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
7598pxor %xmm11,%xmm9
7599
7600# qhasm: xmm13 = xmm7
7601# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
7602# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
7603movdqa %xmm7,%xmm10
7604
7605# qhasm: xmm13 ^= xmm1
7606# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
7607# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
7608pxor %xmm1,%xmm10
7609
7610# qhasm: xmm12 = xmm5
7611# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
7612# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
7613movdqa %xmm5,%xmm11
7614
7615# qhasm: xmm9 = xmm13
7616# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
7617# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
7618movdqa %xmm10,%xmm12
7619
7620# qhasm: xmm12 ^= xmm6
7621# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
7622# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
7623pxor %xmm6,%xmm11
7624
7625# qhasm: xmm9 |= xmm12
7626# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
7627# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
7628por %xmm11,%xmm12
7629
7630# qhasm: xmm13 &= xmm12
7631# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
7632# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
7633pand %xmm11,%xmm10
7634
7635# qhasm: xmm8 ^= xmm13
7636# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
7637# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
7638pxor %xmm10,%xmm14
7639
7640# qhasm: xmm11 ^= xmm15
7641# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
7642# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
7643pxor %xmm15,%xmm8
7644
7645# qhasm: xmm10 ^= xmm14
7646# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
7647# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
7648pxor %xmm13,%xmm9
7649
7650# qhasm: xmm9 ^= xmm15
7651# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
7652# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
7653pxor %xmm15,%xmm12
7654
7655# qhasm: xmm8 ^= xmm14
7656# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
7657# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
7658pxor %xmm13,%xmm14
7659
7660# qhasm: xmm9 ^= xmm14
7661# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
7662# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
7663pxor %xmm13,%xmm12
7664
7665# qhasm: xmm12 = xmm2
7666# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
7667# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
7668movdqa %xmm2,%xmm10
7669
7670# qhasm: xmm13 = xmm4
7671# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
7672# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
7673movdqa %xmm4,%xmm11
7674
7675# qhasm: xmm14 = xmm1
7676# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
7677# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
7678movdqa %xmm1,%xmm13
7679
7680# qhasm: xmm15 = xmm7
7681# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
7682# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
7683movdqa %xmm7,%xmm15
7684
7685# qhasm: xmm12 &= xmm3
7686# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
7687# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
7688pand %xmm3,%xmm10
7689
7690# qhasm: xmm13 &= xmm0
7691# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
7692# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
7693pand %xmm0,%xmm11
7694
7695# qhasm: xmm14 &= xmm5
7696# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
7697# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
7698pand %xmm5,%xmm13
7699
7700# qhasm: xmm15 |= xmm6
7701# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
7702# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
7703por %xmm6,%xmm15
7704
7705# qhasm: xmm11 ^= xmm12
7706# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
7707# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
7708pxor %xmm10,%xmm8
7709
7710# qhasm: xmm10 ^= xmm13
7711# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
7712# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
7713pxor %xmm11,%xmm9
7714
7715# qhasm: xmm9 ^= xmm14
7716# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
7717# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
7718pxor %xmm13,%xmm12
7719
7720# qhasm: xmm8 ^= xmm15
7721# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
7722# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
7723pxor %xmm15,%xmm14
7724
7725# qhasm: xmm12 = xmm11
7726# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
7727# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
7728movdqa %xmm8,%xmm10
7729
7730# qhasm: xmm12 ^= xmm10
7731# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
7732# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
7733pxor %xmm9,%xmm10
7734
7735# qhasm: xmm11 &= xmm9
7736# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
7737# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
7738pand %xmm12,%xmm8
7739
7740# qhasm: xmm14 = xmm8
7741# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
7742# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
7743movdqa %xmm14,%xmm11
7744
7745# qhasm: xmm14 ^= xmm11
7746# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
7747# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
7748pxor %xmm8,%xmm11
7749
7750# qhasm: xmm15 = xmm12
7751# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
7752# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
7753movdqa %xmm10,%xmm13
7754
7755# qhasm: xmm15 &= xmm14
7756# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
7757# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
7758pand %xmm11,%xmm13
7759
7760# qhasm: xmm15 ^= xmm10
7761# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
7762# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
7763pxor %xmm9,%xmm13
7764
7765# qhasm: xmm13 = xmm9
7766# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
7767# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
7768movdqa %xmm12,%xmm15
7769
7770# qhasm: xmm13 ^= xmm8
7771# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
7772# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
7773pxor %xmm14,%xmm15
7774
7775# qhasm: xmm11 ^= xmm10
7776# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
7777# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
7778pxor %xmm9,%xmm8
7779
7780# qhasm: xmm13 &= xmm11
7781# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
7782# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
7783pand %xmm8,%xmm15
7784
7785# qhasm: xmm13 ^= xmm8
7786# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
7787# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
7788pxor %xmm14,%xmm15
7789
7790# qhasm: xmm9 ^= xmm13
7791# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
7792# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
7793pxor %xmm15,%xmm12
7794
7795# qhasm: xmm10 = xmm14
7796# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
7797# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
7798movdqa %xmm11,%xmm8
7799
7800# qhasm: xmm10 ^= xmm13
7801# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
7802# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
7803pxor %xmm15,%xmm8
7804
7805# qhasm: xmm10 &= xmm8
7806# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
7807# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
7808pand %xmm14,%xmm8
7809
7810# qhasm: xmm9 ^= xmm10
7811# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
7812# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
7813pxor %xmm8,%xmm12
7814
7815# qhasm: xmm14 ^= xmm10
7816# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
7817# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
7818pxor %xmm8,%xmm11
7819
7820# qhasm: xmm14 &= xmm15
7821# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
7822# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
7823pand %xmm13,%xmm11
7824
7825# qhasm: xmm14 ^= xmm12
7826# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
7827# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
7828pxor %xmm10,%xmm11
7829
7830# qhasm: xmm12 = xmm6
7831# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
7832# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
7833movdqa %xmm6,%xmm8
7834
7835# qhasm: xmm8 = xmm5
7836# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
7837# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
7838movdqa %xmm5,%xmm9
7839
7840# qhasm: xmm10 = xmm15
7841# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
7842# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
7843movdqa %xmm13,%xmm10
7844
7845# qhasm: xmm10 ^= xmm14
7846# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
7847# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
7848pxor %xmm11,%xmm10
7849
7850# qhasm: xmm10 &= xmm6
7851# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
7852# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
7853pand %xmm6,%xmm10
7854
7855# qhasm: xmm6 ^= xmm5
7856# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
7857# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
7858pxor %xmm5,%xmm6
7859
7860# qhasm: xmm6 &= xmm14
7861# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
7862# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
7863pand %xmm11,%xmm6
7864
7865# qhasm: xmm5 &= xmm15
7866# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
7867# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
7868pand %xmm13,%xmm5
7869
7870# qhasm: xmm6 ^= xmm5
7871# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
7872# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
7873pxor %xmm5,%xmm6
7874
7875# qhasm: xmm5 ^= xmm10
7876# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
7877# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
7878pxor %xmm10,%xmm5
7879
7880# qhasm: xmm12 ^= xmm0
7881# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
7882# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
7883pxor %xmm0,%xmm8
7884
7885# qhasm: xmm8 ^= xmm3
7886# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
7887# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
7888pxor %xmm3,%xmm9
7889
7890# qhasm: xmm15 ^= xmm13
7891# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
7892# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
7893pxor %xmm15,%xmm13
7894
7895# qhasm: xmm14 ^= xmm9
7896# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
7897# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
7898pxor %xmm12,%xmm11
7899
7900# qhasm: xmm11 = xmm15
7901# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
7902# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
7903movdqa %xmm13,%xmm10
7904
7905# qhasm: xmm11 ^= xmm14
7906# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
7907# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
7908pxor %xmm11,%xmm10
7909
7910# qhasm: xmm11 &= xmm12
7911# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
7912# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
7913pand %xmm8,%xmm10
7914
7915# qhasm: xmm12 ^= xmm8
7916# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
7917# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
7918pxor %xmm9,%xmm8
7919
7920# qhasm: xmm12 &= xmm14
7921# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
7922# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
7923pand %xmm11,%xmm8
7924
7925# qhasm: xmm8 &= xmm15
7926# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
7927# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
7928pand %xmm13,%xmm9
7929
7930# qhasm: xmm8 ^= xmm12
7931# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
7932# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
7933pxor %xmm8,%xmm9
7934
7935# qhasm: xmm12 ^= xmm11
7936# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
7937# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
7938pxor %xmm10,%xmm8
7939
7940# qhasm: xmm10 = xmm13
7941# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
7942# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
7943movdqa %xmm15,%xmm10
7944
7945# qhasm: xmm10 ^= xmm9
7946# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
7947# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
7948pxor %xmm12,%xmm10
7949
7950# qhasm: xmm10 &= xmm0
7951# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
7952# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
7953pand %xmm0,%xmm10
7954
7955# qhasm: xmm0 ^= xmm3
7956# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
7957# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
7958pxor %xmm3,%xmm0
7959
7960# qhasm: xmm0 &= xmm9
7961# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
7962# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
7963pand %xmm12,%xmm0
7964
7965# qhasm: xmm3 &= xmm13
7966# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
7967# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
7968pand %xmm15,%xmm3
7969
7970# qhasm: xmm0 ^= xmm3
7971# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
7972# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
7973pxor %xmm3,%xmm0
7974
7975# qhasm: xmm3 ^= xmm10
7976# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
7977# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
7978pxor %xmm10,%xmm3
7979
7980# qhasm: xmm6 ^= xmm12
7981# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
7982# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
7983pxor %xmm8,%xmm6
7984
7985# qhasm: xmm0 ^= xmm12
7986# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
7987# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
7988pxor %xmm8,%xmm0
7989
7990# qhasm: xmm5 ^= xmm8
7991# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
7992# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
7993pxor %xmm9,%xmm5
7994
7995# qhasm: xmm3 ^= xmm8
7996# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
7997# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
7998pxor %xmm9,%xmm3
7999
8000# qhasm: xmm12 = xmm7
8001# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
8002# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
8003movdqa %xmm7,%xmm8
8004
8005# qhasm: xmm8 = xmm1
8006# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
8007# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
8008movdqa %xmm1,%xmm9
8009
8010# qhasm: xmm12 ^= xmm4
8011# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
8012# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
8013pxor %xmm4,%xmm8
8014
8015# qhasm: xmm8 ^= xmm2
8016# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
8017# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
8018pxor %xmm2,%xmm9
8019
8020# qhasm: xmm11 = xmm15
8021# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
8022# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
8023movdqa %xmm13,%xmm10
8024
8025# qhasm: xmm11 ^= xmm14
8026# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
8027# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
8028pxor %xmm11,%xmm10
8029
8030# qhasm: xmm11 &= xmm12
8031# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
8032# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
8033pand %xmm8,%xmm10
8034
8035# qhasm: xmm12 ^= xmm8
8036# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
8037# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
8038pxor %xmm9,%xmm8
8039
8040# qhasm: xmm12 &= xmm14
8041# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
8042# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
8043pand %xmm11,%xmm8
8044
8045# qhasm: xmm8 &= xmm15
8046# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
8047# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
8048pand %xmm13,%xmm9
8049
8050# qhasm: xmm8 ^= xmm12
8051# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
8052# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
8053pxor %xmm8,%xmm9
8054
8055# qhasm: xmm12 ^= xmm11
8056# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
8057# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
8058pxor %xmm10,%xmm8
8059
8060# qhasm: xmm10 = xmm13
8061# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
8062# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
8063movdqa %xmm15,%xmm10
8064
8065# qhasm: xmm10 ^= xmm9
8066# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
8067# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
8068pxor %xmm12,%xmm10
8069
8070# qhasm: xmm10 &= xmm4
8071# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
8072# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
8073pand %xmm4,%xmm10
8074
8075# qhasm: xmm4 ^= xmm2
8076# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
8077# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
8078pxor %xmm2,%xmm4
8079
8080# qhasm: xmm4 &= xmm9
8081# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
8082# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
8083pand %xmm12,%xmm4
8084
8085# qhasm: xmm2 &= xmm13
8086# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
8087# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
8088pand %xmm15,%xmm2
8089
8090# qhasm: xmm4 ^= xmm2
8091# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
8092# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
8093pxor %xmm2,%xmm4
8094
8095# qhasm: xmm2 ^= xmm10
8096# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
8097# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
8098pxor %xmm10,%xmm2
8099
8100# qhasm: xmm15 ^= xmm13
8101# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
8102# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
8103pxor %xmm15,%xmm13
8104
8105# qhasm: xmm14 ^= xmm9
8106# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
8107# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
8108pxor %xmm12,%xmm11
8109
8110# qhasm: xmm11 = xmm15
8111# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
8112# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
8113movdqa %xmm13,%xmm10
8114
8115# qhasm: xmm11 ^= xmm14
8116# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
8117# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
8118pxor %xmm11,%xmm10
8119
8120# qhasm: xmm11 &= xmm7
8121# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
8122# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
8123pand %xmm7,%xmm10
8124
8125# qhasm: xmm7 ^= xmm1
8126# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
8127# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
8128pxor %xmm1,%xmm7
8129
8130# qhasm: xmm7 &= xmm14
8131# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
8132# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
8133pand %xmm11,%xmm7
8134
8135# qhasm: xmm1 &= xmm15
8136# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
8137# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
8138pand %xmm13,%xmm1
8139
8140# qhasm: xmm7 ^= xmm1
8141# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
8142# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
8143pxor %xmm1,%xmm7
8144
8145# qhasm: xmm1 ^= xmm11
8146# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
8147# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
8148pxor %xmm10,%xmm1
8149
8150# qhasm: xmm7 ^= xmm12
8151# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
8152# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
8153pxor %xmm8,%xmm7
8154
8155# qhasm: xmm4 ^= xmm12
8156# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
8157# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
8158pxor %xmm8,%xmm4
8159
8160# qhasm: xmm1 ^= xmm8
8161# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
8162# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
8163pxor %xmm9,%xmm1
8164
8165# qhasm: xmm2 ^= xmm8
8166# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
8167# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
8168pxor %xmm9,%xmm2
8169
8170# qhasm: xmm7 ^= xmm0
8171# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
8172# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
8173pxor %xmm0,%xmm7
8174
8175# qhasm: xmm1 ^= xmm6
8176# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
8177# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
8178pxor %xmm6,%xmm1
8179
8180# qhasm: xmm4 ^= xmm7
8181# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
8182# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
8183pxor %xmm7,%xmm4
8184
8185# qhasm: xmm6 ^= xmm0
8186# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
8187# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
8188pxor %xmm0,%xmm6
8189
8190# qhasm: xmm0 ^= xmm1
8191# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
8192# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
8193pxor %xmm1,%xmm0
8194
8195# qhasm: xmm1 ^= xmm5
8196# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
8197# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
8198pxor %xmm5,%xmm1
8199
8200# qhasm: xmm5 ^= xmm2
8201# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
8202# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
8203pxor %xmm2,%xmm5
8204
8205# qhasm: xmm4 ^= xmm5
8206# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
8207# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
8208pxor %xmm5,%xmm4
8209
8210# qhasm: xmm2 ^= xmm3
8211# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
8212# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
8213pxor %xmm3,%xmm2
8214
8215# qhasm: xmm3 ^= xmm5
8216# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
8217# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
8218pxor %xmm5,%xmm3
8219
8220# qhasm: xmm6 ^= xmm3
8221# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
8222# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
8223pxor %xmm3,%xmm6
8224
8225# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
8226# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
8227# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
8228pshufd $0x93,%xmm0,%xmm8
8229
8230# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
8231# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
8232# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
8233pshufd $0x93,%xmm1,%xmm9
8234
8235# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
8236# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
8237# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
8238pshufd $0x93,%xmm4,%xmm10
8239
8240# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
8241# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
8242# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
8243pshufd $0x93,%xmm6,%xmm11
8244
8245# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
8246# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
8247# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
8248pshufd $0x93,%xmm3,%xmm12
8249
8250# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
8251# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
8252# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
8253pshufd $0x93,%xmm7,%xmm13
8254
8255# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
8256# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
8257# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
8258pshufd $0x93,%xmm2,%xmm14
8259
8260# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
8261# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
8262# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
8263pshufd $0x93,%xmm5,%xmm15
8264
8265# qhasm: xmm0 ^= xmm8
8266# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
8267# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
8268pxor %xmm8,%xmm0
8269
8270# qhasm: xmm1 ^= xmm9
8271# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
8272# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
8273pxor %xmm9,%xmm1
8274
8275# qhasm: xmm4 ^= xmm10
8276# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
8277# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
8278pxor %xmm10,%xmm4
8279
8280# qhasm: xmm6 ^= xmm11
8281# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
8282# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
8283pxor %xmm11,%xmm6
8284
8285# qhasm: xmm3 ^= xmm12
8286# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
8287# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
8288pxor %xmm12,%xmm3
8289
8290# qhasm: xmm7 ^= xmm13
8291# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
8292# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
8293pxor %xmm13,%xmm7
8294
8295# qhasm: xmm2 ^= xmm14
8296# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
8297# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
8298pxor %xmm14,%xmm2
8299
8300# qhasm: xmm5 ^= xmm15
8301# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
8302# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
8303pxor %xmm15,%xmm5
8304
8305# qhasm: xmm8 ^= xmm5
8306# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
8307# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
8308pxor %xmm5,%xmm8
8309
8310# qhasm: xmm9 ^= xmm0
8311# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
8312# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
8313pxor %xmm0,%xmm9
8314
8315# qhasm: xmm10 ^= xmm1
8316# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
8317# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
8318pxor %xmm1,%xmm10
8319
8320# qhasm: xmm9 ^= xmm5
8321# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
8322# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
8323pxor %xmm5,%xmm9
8324
8325# qhasm: xmm11 ^= xmm4
8326# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
8327# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
8328pxor %xmm4,%xmm11
8329
8330# qhasm: xmm12 ^= xmm6
8331# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
8332# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
8333pxor %xmm6,%xmm12
8334
8335# qhasm: xmm13 ^= xmm3
8336# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
8337# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
8338pxor %xmm3,%xmm13
8339
8340# qhasm: xmm11 ^= xmm5
8341# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
8342# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
8343pxor %xmm5,%xmm11
8344
8345# qhasm: xmm14 ^= xmm7
8346# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
8347# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
8348pxor %xmm7,%xmm14
8349
8350# qhasm: xmm15 ^= xmm2
8351# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
8352# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
8353pxor %xmm2,%xmm15
8354
8355# qhasm: xmm12 ^= xmm5
8356# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
8357# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
8358pxor %xmm5,%xmm12
8359
8360# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
8361# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
8362# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
8363pshufd $0x4E,%xmm0,%xmm0
8364
8365# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
8366# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
8367# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
8368pshufd $0x4E,%xmm1,%xmm1
8369
8370# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
8371# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
8372# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
8373pshufd $0x4E,%xmm4,%xmm4
8374
8375# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
8376# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
8377# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
8378pshufd $0x4E,%xmm6,%xmm6
8379
8380# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
8381# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
8382# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
8383pshufd $0x4E,%xmm3,%xmm3
8384
8385# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
8386# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
8387# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
8388pshufd $0x4E,%xmm7,%xmm7
8389
8390# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
8391# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
8392# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
8393pshufd $0x4E,%xmm2,%xmm2
8394
8395# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
8396# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
8397# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
8398pshufd $0x4E,%xmm5,%xmm5
8399
8400# qhasm: xmm8 ^= xmm0
8401# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
8402# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
8403pxor %xmm0,%xmm8
8404
8405# qhasm: xmm9 ^= xmm1
8406# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
8407# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
8408pxor %xmm1,%xmm9
8409
8410# qhasm: xmm10 ^= xmm4
8411# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
8412# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
8413pxor %xmm4,%xmm10
8414
8415# qhasm: xmm11 ^= xmm6
8416# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
8417# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
8418pxor %xmm6,%xmm11
8419
8420# qhasm: xmm12 ^= xmm3
8421# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
8422# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
8423pxor %xmm3,%xmm12
8424
8425# qhasm: xmm13 ^= xmm7
8426# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
8427# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
8428pxor %xmm7,%xmm13
8429
8430# qhasm: xmm14 ^= xmm2
8431# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
8432# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
8433pxor %xmm2,%xmm14
8434
8435# qhasm: xmm15 ^= xmm5
8436# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
8437# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
8438pxor %xmm5,%xmm15
8439
8440# qhasm: xmm8 ^= *(int128 *)(c + 896)
8441# asm 1: pxor 896(<c=int64#4),<xmm8=int6464#9
8442# asm 2: pxor 896(<c=%rcx),<xmm8=%xmm8
8443pxor 896(%rcx),%xmm8
8444
8445# qhasm: shuffle bytes of xmm8 by SR
8446# asm 1: pshufb SR,<xmm8=int6464#9
8447# asm 2: pshufb SR,<xmm8=%xmm8
8448pshufb SR,%xmm8
8449
8450# qhasm: xmm9 ^= *(int128 *)(c + 912)
8451# asm 1: pxor 912(<c=int64#4),<xmm9=int6464#10
8452# asm 2: pxor 912(<c=%rcx),<xmm9=%xmm9
8453pxor 912(%rcx),%xmm9
8454
8455# qhasm: shuffle bytes of xmm9 by SR
8456# asm 1: pshufb SR,<xmm9=int6464#10
8457# asm 2: pshufb SR,<xmm9=%xmm9
8458pshufb SR,%xmm9
8459
8460# qhasm: xmm10 ^= *(int128 *)(c + 928)
8461# asm 1: pxor 928(<c=int64#4),<xmm10=int6464#11
8462# asm 2: pxor 928(<c=%rcx),<xmm10=%xmm10
8463pxor 928(%rcx),%xmm10
8464
8465# qhasm: shuffle bytes of xmm10 by SR
8466# asm 1: pshufb SR,<xmm10=int6464#11
8467# asm 2: pshufb SR,<xmm10=%xmm10
8468pshufb SR,%xmm10
8469
8470# qhasm: xmm11 ^= *(int128 *)(c + 944)
8471# asm 1: pxor 944(<c=int64#4),<xmm11=int6464#12
8472# asm 2: pxor 944(<c=%rcx),<xmm11=%xmm11
8473pxor 944(%rcx),%xmm11
8474
8475# qhasm: shuffle bytes of xmm11 by SR
8476# asm 1: pshufb SR,<xmm11=int6464#12
8477# asm 2: pshufb SR,<xmm11=%xmm11
8478pshufb SR,%xmm11
8479
8480# qhasm: xmm12 ^= *(int128 *)(c + 960)
8481# asm 1: pxor 960(<c=int64#4),<xmm12=int6464#13
8482# asm 2: pxor 960(<c=%rcx),<xmm12=%xmm12
8483pxor 960(%rcx),%xmm12
8484
8485# qhasm: shuffle bytes of xmm12 by SR
8486# asm 1: pshufb SR,<xmm12=int6464#13
8487# asm 2: pshufb SR,<xmm12=%xmm12
8488pshufb SR,%xmm12
8489
8490# qhasm: xmm13 ^= *(int128 *)(c + 976)
8491# asm 1: pxor 976(<c=int64#4),<xmm13=int6464#14
8492# asm 2: pxor 976(<c=%rcx),<xmm13=%xmm13
8493pxor 976(%rcx),%xmm13
8494
8495# qhasm: shuffle bytes of xmm13 by SR
8496# asm 1: pshufb SR,<xmm13=int6464#14
8497# asm 2: pshufb SR,<xmm13=%xmm13
8498pshufb SR,%xmm13
8499
8500# qhasm: xmm14 ^= *(int128 *)(c + 992)
8501# asm 1: pxor 992(<c=int64#4),<xmm14=int6464#15
8502# asm 2: pxor 992(<c=%rcx),<xmm14=%xmm14
8503pxor 992(%rcx),%xmm14
8504
8505# qhasm: shuffle bytes of xmm14 by SR
8506# asm 1: pshufb SR,<xmm14=int6464#15
8507# asm 2: pshufb SR,<xmm14=%xmm14
8508pshufb SR,%xmm14
8509
8510# qhasm: xmm15 ^= *(int128 *)(c + 1008)
8511# asm 1: pxor 1008(<c=int64#4),<xmm15=int6464#16
8512# asm 2: pxor 1008(<c=%rcx),<xmm15=%xmm15
8513pxor 1008(%rcx),%xmm15
8514
8515# qhasm: shuffle bytes of xmm15 by SR
8516# asm 1: pshufb SR,<xmm15=int6464#16
8517# asm 2: pshufb SR,<xmm15=%xmm15
8518pshufb SR,%xmm15
8519
8520# qhasm: xmm13 ^= xmm14
8521# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
8522# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
8523pxor %xmm14,%xmm13
8524
8525# qhasm: xmm10 ^= xmm9
8526# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
8527# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
8528pxor %xmm9,%xmm10
8529
8530# qhasm: xmm13 ^= xmm8
8531# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
8532# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
8533pxor %xmm8,%xmm13
8534
8535# qhasm: xmm14 ^= xmm10
8536# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
8537# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
8538pxor %xmm10,%xmm14
8539
8540# qhasm: xmm11 ^= xmm8
8541# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
8542# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
8543pxor %xmm8,%xmm11
8544
8545# qhasm: xmm14 ^= xmm11
8546# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
8547# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
8548pxor %xmm11,%xmm14
8549
8550# qhasm: xmm11 ^= xmm15
8551# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
8552# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
8553pxor %xmm15,%xmm11
8554
8555# qhasm: xmm11 ^= xmm12
8556# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
8557# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
8558pxor %xmm12,%xmm11
8559
8560# qhasm: xmm15 ^= xmm13
8561# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
8562# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
8563pxor %xmm13,%xmm15
8564
8565# qhasm: xmm11 ^= xmm9
8566# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
8567# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
8568pxor %xmm9,%xmm11
8569
8570# qhasm: xmm12 ^= xmm13
8571# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
8572# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
8573pxor %xmm13,%xmm12
8574
8575# qhasm: xmm10 ^= xmm15
8576# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
8577# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
8578pxor %xmm15,%xmm10
8579
8580# qhasm: xmm9 ^= xmm13
8581# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
8582# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
8583pxor %xmm13,%xmm9
8584
8585# qhasm: xmm3 = xmm15
8586# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
8587# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
8588movdqa %xmm15,%xmm0
8589
8590# qhasm: xmm2 = xmm9
8591# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
8592# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
8593movdqa %xmm9,%xmm1
8594
8595# qhasm: xmm1 = xmm13
8596# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
8597# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
8598movdqa %xmm13,%xmm2
8599
8600# qhasm: xmm5 = xmm10
8601# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
8602# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
8603movdqa %xmm10,%xmm3
8604
8605# qhasm: xmm4 = xmm14
8606# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
8607# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
8608movdqa %xmm14,%xmm4
8609
8610# qhasm: xmm3 ^= xmm12
8611# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
8612# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
8613pxor %xmm12,%xmm0
8614
8615# qhasm: xmm2 ^= xmm10
8616# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
8617# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
8618pxor %xmm10,%xmm1
8619
8620# qhasm: xmm1 ^= xmm11
8621# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
8622# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
8623pxor %xmm11,%xmm2
8624
8625# qhasm: xmm5 ^= xmm12
8626# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
8627# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
8628pxor %xmm12,%xmm3
8629
8630# qhasm: xmm4 ^= xmm8
8631# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
8632# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
8633pxor %xmm8,%xmm4
8634
8635# qhasm: xmm6 = xmm3
8636# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
8637# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
8638movdqa %xmm0,%xmm5
8639
8640# qhasm: xmm0 = xmm2
8641# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
8642# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
8643movdqa %xmm1,%xmm6
8644
8645# qhasm: xmm7 = xmm3
8646# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
8647# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
8648movdqa %xmm0,%xmm7
8649
8650# qhasm: xmm2 |= xmm1
8651# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
8652# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
8653por %xmm2,%xmm1
8654
8655# qhasm: xmm3 |= xmm4
8656# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
8657# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
8658por %xmm4,%xmm0
8659
8660# qhasm: xmm7 ^= xmm0
8661# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
8662# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
8663pxor %xmm6,%xmm7
8664
8665# qhasm: xmm6 &= xmm4
8666# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
8667# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
8668pand %xmm4,%xmm5
8669
8670# qhasm: xmm0 &= xmm1
8671# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
8672# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
8673pand %xmm2,%xmm6
8674
8675# qhasm: xmm4 ^= xmm1
8676# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
8677# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
8678pxor %xmm2,%xmm4
8679
8680# qhasm: xmm7 &= xmm4
8681# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
8682# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
8683pand %xmm4,%xmm7
8684
8685# qhasm: xmm4 = xmm11
8686# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
8687# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
8688movdqa %xmm11,%xmm2
8689
8690# qhasm: xmm4 ^= xmm8
8691# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
8692# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
8693pxor %xmm8,%xmm2
8694
8695# qhasm: xmm5 &= xmm4
8696# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
8697# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
8698pand %xmm2,%xmm3
8699
8700# qhasm: xmm3 ^= xmm5
8701# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
8702# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
8703pxor %xmm3,%xmm0
8704
8705# qhasm: xmm2 ^= xmm5
8706# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
8707# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
8708pxor %xmm3,%xmm1
8709
8710# qhasm: xmm5 = xmm15
8711# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
8712# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
8713movdqa %xmm15,%xmm2
8714
8715# qhasm: xmm5 ^= xmm9
8716# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
8717# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
8718pxor %xmm9,%xmm2
8719
8720# qhasm: xmm4 = xmm13
8721# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
8722# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
8723movdqa %xmm13,%xmm3
8724
8725# qhasm: xmm1 = xmm5
8726# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
8727# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
8728movdqa %xmm2,%xmm4
8729
8730# qhasm: xmm4 ^= xmm14
8731# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
8732# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
8733pxor %xmm14,%xmm3
8734
8735# qhasm: xmm1 |= xmm4
8736# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
8737# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
8738por %xmm3,%xmm4
8739
8740# qhasm: xmm5 &= xmm4
8741# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
8742# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
8743pand %xmm3,%xmm2
8744
8745# qhasm: xmm0 ^= xmm5
8746# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
8747# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
8748pxor %xmm2,%xmm6
8749
8750# qhasm: xmm3 ^= xmm7
8751# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
8752# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
8753pxor %xmm7,%xmm0
8754
8755# qhasm: xmm2 ^= xmm6
8756# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
8757# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
8758pxor %xmm5,%xmm1
8759
8760# qhasm: xmm1 ^= xmm7
8761# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
8762# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
8763pxor %xmm7,%xmm4
8764
8765# qhasm: xmm0 ^= xmm6
8766# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
8767# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
8768pxor %xmm5,%xmm6
8769
8770# qhasm: xmm1 ^= xmm6
8771# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
8772# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
8773pxor %xmm5,%xmm4
8774
8775# qhasm: xmm4 = xmm10
8776# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
8777# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
8778movdqa %xmm10,%xmm2
8779
8780# qhasm: xmm5 = xmm12
8781# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
8782# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
8783movdqa %xmm12,%xmm3
8784
8785# qhasm: xmm6 = xmm9
8786# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
8787# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
8788movdqa %xmm9,%xmm5
8789
8790# qhasm: xmm7 = xmm15
8791# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
8792# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
8793movdqa %xmm15,%xmm7
8794
8795# qhasm: xmm4 &= xmm11
8796# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
8797# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
8798pand %xmm11,%xmm2
8799
8800# qhasm: xmm5 &= xmm8
8801# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
8802# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
8803pand %xmm8,%xmm3
8804
8805# qhasm: xmm6 &= xmm13
8806# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
8807# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
8808pand %xmm13,%xmm5
8809
8810# qhasm: xmm7 |= xmm14
8811# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
8812# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
8813por %xmm14,%xmm7
8814
8815# qhasm: xmm3 ^= xmm4
8816# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
8817# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
8818pxor %xmm2,%xmm0
8819
8820# qhasm: xmm2 ^= xmm5
8821# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
8822# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
8823pxor %xmm3,%xmm1
8824
8825# qhasm: xmm1 ^= xmm6
8826# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
8827# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
8828pxor %xmm5,%xmm4
8829
8830# qhasm: xmm0 ^= xmm7
8831# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
8832# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
8833pxor %xmm7,%xmm6
8834
8835# qhasm: xmm4 = xmm3
8836# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
8837# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
8838movdqa %xmm0,%xmm2
8839
8840# qhasm: xmm4 ^= xmm2
8841# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
8842# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
8843pxor %xmm1,%xmm2
8844
8845# qhasm: xmm3 &= xmm1
8846# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
8847# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
8848pand %xmm4,%xmm0
8849
8850# qhasm: xmm6 = xmm0
8851# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
8852# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
8853movdqa %xmm6,%xmm3
8854
8855# qhasm: xmm6 ^= xmm3
8856# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
8857# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
8858pxor %xmm0,%xmm3
8859
8860# qhasm: xmm7 = xmm4
8861# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
8862# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
8863movdqa %xmm2,%xmm5
8864
8865# qhasm: xmm7 &= xmm6
8866# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
8867# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
8868pand %xmm3,%xmm5
8869
8870# qhasm: xmm7 ^= xmm2
8871# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
8872# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
8873pxor %xmm1,%xmm5
8874
8875# qhasm: xmm5 = xmm1
8876# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
8877# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
8878movdqa %xmm4,%xmm7
8879
8880# qhasm: xmm5 ^= xmm0
8881# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
8882# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
8883pxor %xmm6,%xmm7
8884
8885# qhasm: xmm3 ^= xmm2
8886# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
8887# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
8888pxor %xmm1,%xmm0
8889
8890# qhasm: xmm5 &= xmm3
8891# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
8892# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
8893pand %xmm0,%xmm7
8894
8895# qhasm: xmm5 ^= xmm0
8896# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
8897# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
8898pxor %xmm6,%xmm7
8899
8900# qhasm: xmm1 ^= xmm5
8901# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
8902# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
8903pxor %xmm7,%xmm4
8904
8905# qhasm: xmm2 = xmm6
8906# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
8907# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
8908movdqa %xmm3,%xmm0
8909
8910# qhasm: xmm2 ^= xmm5
8911# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
8912# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
8913pxor %xmm7,%xmm0
8914
8915# qhasm: xmm2 &= xmm0
8916# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
8917# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
8918pand %xmm6,%xmm0
8919
8920# qhasm: xmm1 ^= xmm2
8921# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
8922# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
8923pxor %xmm0,%xmm4
8924
8925# qhasm: xmm6 ^= xmm2
8926# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
8927# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
8928pxor %xmm0,%xmm3
8929
8930# qhasm: xmm6 &= xmm7
8931# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
8932# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
8933pand %xmm5,%xmm3
8934
8935# qhasm: xmm6 ^= xmm4
8936# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
8937# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
8938pxor %xmm2,%xmm3
8939
8940# qhasm: xmm4 = xmm14
8941# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
8942# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
8943movdqa %xmm14,%xmm0
8944
8945# qhasm: xmm0 = xmm13
8946# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
8947# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
8948movdqa %xmm13,%xmm1
8949
8950# qhasm: xmm2 = xmm7
8951# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
8952# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
8953movdqa %xmm5,%xmm2
8954
8955# qhasm: xmm2 ^= xmm6
8956# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
8957# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
8958pxor %xmm3,%xmm2
8959
8960# qhasm: xmm2 &= xmm14
8961# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
8962# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
8963pand %xmm14,%xmm2
8964
8965# qhasm: xmm14 ^= xmm13
8966# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
8967# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
8968pxor %xmm13,%xmm14
8969
8970# qhasm: xmm14 &= xmm6
8971# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
8972# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
8973pand %xmm3,%xmm14
8974
8975# qhasm: xmm13 &= xmm7
8976# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
8977# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
8978pand %xmm5,%xmm13
8979
8980# qhasm: xmm14 ^= xmm13
8981# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
8982# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
8983pxor %xmm13,%xmm14
8984
8985# qhasm: xmm13 ^= xmm2
8986# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
8987# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
8988pxor %xmm2,%xmm13
8989
8990# qhasm: xmm4 ^= xmm8
8991# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
8992# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
8993pxor %xmm8,%xmm0
8994
8995# qhasm: xmm0 ^= xmm11
8996# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
8997# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
8998pxor %xmm11,%xmm1
8999
9000# qhasm: xmm7 ^= xmm5
9001# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
9002# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
9003pxor %xmm7,%xmm5
9004
9005# qhasm: xmm6 ^= xmm1
9006# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
9007# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
9008pxor %xmm4,%xmm3
9009
9010# qhasm: xmm3 = xmm7
9011# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9012# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9013movdqa %xmm5,%xmm2
9014
9015# qhasm: xmm3 ^= xmm6
9016# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
9017# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
9018pxor %xmm3,%xmm2
9019
9020# qhasm: xmm3 &= xmm4
9021# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
9022# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
9023pand %xmm0,%xmm2
9024
9025# qhasm: xmm4 ^= xmm0
9026# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
9027# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
9028pxor %xmm1,%xmm0
9029
9030# qhasm: xmm4 &= xmm6
9031# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
9032# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
9033pand %xmm3,%xmm0
9034
9035# qhasm: xmm0 &= xmm7
9036# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
9037# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
9038pand %xmm5,%xmm1
9039
9040# qhasm: xmm0 ^= xmm4
9041# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
9042# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
9043pxor %xmm0,%xmm1
9044
9045# qhasm: xmm4 ^= xmm3
9046# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
9047# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
9048pxor %xmm2,%xmm0
9049
9050# qhasm: xmm2 = xmm5
9051# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
9052# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
9053movdqa %xmm7,%xmm2
9054
9055# qhasm: xmm2 ^= xmm1
9056# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
9057# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
9058pxor %xmm4,%xmm2
9059
9060# qhasm: xmm2 &= xmm8
9061# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
9062# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
9063pand %xmm8,%xmm2
9064
9065# qhasm: xmm8 ^= xmm11
9066# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
9067# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
9068pxor %xmm11,%xmm8
9069
9070# qhasm: xmm8 &= xmm1
9071# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
9072# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
9073pand %xmm4,%xmm8
9074
9075# qhasm: xmm11 &= xmm5
9076# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
9077# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
9078pand %xmm7,%xmm11
9079
9080# qhasm: xmm8 ^= xmm11
9081# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
9082# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
9083pxor %xmm11,%xmm8
9084
9085# qhasm: xmm11 ^= xmm2
9086# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
9087# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
9088pxor %xmm2,%xmm11
9089
9090# qhasm: xmm14 ^= xmm4
9091# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
9092# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
9093pxor %xmm0,%xmm14
9094
9095# qhasm: xmm8 ^= xmm4
9096# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
9097# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
9098pxor %xmm0,%xmm8
9099
9100# qhasm: xmm13 ^= xmm0
9101# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
9102# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
9103pxor %xmm1,%xmm13
9104
9105# qhasm: xmm11 ^= xmm0
9106# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
9107# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
9108pxor %xmm1,%xmm11
9109
9110# qhasm: xmm4 = xmm15
9111# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
9112# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
9113movdqa %xmm15,%xmm0
9114
9115# qhasm: xmm0 = xmm9
9116# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
9117# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
9118movdqa %xmm9,%xmm1
9119
9120# qhasm: xmm4 ^= xmm12
9121# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
9122# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
9123pxor %xmm12,%xmm0
9124
9125# qhasm: xmm0 ^= xmm10
9126# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
9127# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
9128pxor %xmm10,%xmm1
9129
9130# qhasm: xmm3 = xmm7
9131# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9132# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9133movdqa %xmm5,%xmm2
9134
9135# qhasm: xmm3 ^= xmm6
9136# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
9137# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
9138pxor %xmm3,%xmm2
9139
9140# qhasm: xmm3 &= xmm4
9141# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
9142# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
9143pand %xmm0,%xmm2
9144
9145# qhasm: xmm4 ^= xmm0
9146# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
9147# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
9148pxor %xmm1,%xmm0
9149
9150# qhasm: xmm4 &= xmm6
9151# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
9152# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
9153pand %xmm3,%xmm0
9154
9155# qhasm: xmm0 &= xmm7
9156# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
9157# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
9158pand %xmm5,%xmm1
9159
9160# qhasm: xmm0 ^= xmm4
9161# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
9162# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
9163pxor %xmm0,%xmm1
9164
9165# qhasm: xmm4 ^= xmm3
9166# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
9167# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
9168pxor %xmm2,%xmm0
9169
9170# qhasm: xmm2 = xmm5
9171# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
9172# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
9173movdqa %xmm7,%xmm2
9174
9175# qhasm: xmm2 ^= xmm1
9176# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
9177# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
9178pxor %xmm4,%xmm2
9179
9180# qhasm: xmm2 &= xmm12
9181# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
9182# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
9183pand %xmm12,%xmm2
9184
9185# qhasm: xmm12 ^= xmm10
9186# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
9187# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
9188pxor %xmm10,%xmm12
9189
9190# qhasm: xmm12 &= xmm1
9191# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
9192# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
9193pand %xmm4,%xmm12
9194
9195# qhasm: xmm10 &= xmm5
9196# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
9197# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
9198pand %xmm7,%xmm10
9199
9200# qhasm: xmm12 ^= xmm10
9201# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
9202# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
9203pxor %xmm10,%xmm12
9204
9205# qhasm: xmm10 ^= xmm2
9206# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
9207# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
9208pxor %xmm2,%xmm10
9209
9210# qhasm: xmm7 ^= xmm5
9211# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
9212# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
9213pxor %xmm7,%xmm5
9214
9215# qhasm: xmm6 ^= xmm1
9216# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
9217# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
9218pxor %xmm4,%xmm3
9219
9220# qhasm: xmm3 = xmm7
9221# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9222# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9223movdqa %xmm5,%xmm2
9224
9225# qhasm: xmm3 ^= xmm6
9226# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
9227# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
9228pxor %xmm3,%xmm2
9229
9230# qhasm: xmm3 &= xmm15
9231# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
9232# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
9233pand %xmm15,%xmm2
9234
9235# qhasm: xmm15 ^= xmm9
9236# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
9237# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
9238pxor %xmm9,%xmm15
9239
9240# qhasm: xmm15 &= xmm6
9241# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
9242# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
9243pand %xmm3,%xmm15
9244
9245# qhasm: xmm9 &= xmm7
9246# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
9247# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
9248pand %xmm5,%xmm9
9249
9250# qhasm: xmm15 ^= xmm9
9251# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
9252# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
9253pxor %xmm9,%xmm15
9254
9255# qhasm: xmm9 ^= xmm3
9256# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
9257# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
9258pxor %xmm2,%xmm9
9259
9260# qhasm: xmm15 ^= xmm4
9261# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
9262# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
9263pxor %xmm0,%xmm15
9264
9265# qhasm: xmm12 ^= xmm4
9266# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
9267# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
9268pxor %xmm0,%xmm12
9269
9270# qhasm: xmm9 ^= xmm0
9271# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
9272# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
9273pxor %xmm1,%xmm9
9274
9275# qhasm: xmm10 ^= xmm0
9276# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
9277# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
9278pxor %xmm1,%xmm10
9279
9280# qhasm: xmm15 ^= xmm8
9281# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
9282# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
9283pxor %xmm8,%xmm15
9284
9285# qhasm: xmm9 ^= xmm14
9286# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
9287# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
9288pxor %xmm14,%xmm9
9289
9290# qhasm: xmm12 ^= xmm15
9291# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
9292# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
9293pxor %xmm15,%xmm12
9294
9295# qhasm: xmm14 ^= xmm8
9296# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
9297# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
9298pxor %xmm8,%xmm14
9299
9300# qhasm: xmm8 ^= xmm9
9301# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
9302# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
9303pxor %xmm9,%xmm8
9304
9305# qhasm: xmm9 ^= xmm13
9306# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
9307# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
9308pxor %xmm13,%xmm9
9309
9310# qhasm: xmm13 ^= xmm10
9311# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
9312# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
9313pxor %xmm10,%xmm13
9314
9315# qhasm: xmm12 ^= xmm13
9316# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
9317# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
9318pxor %xmm13,%xmm12
9319
9320# qhasm: xmm10 ^= xmm11
9321# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
9322# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
9323pxor %xmm11,%xmm10
9324
9325# qhasm: xmm11 ^= xmm13
9326# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
9327# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
9328pxor %xmm13,%xmm11
9329
9330# qhasm: xmm14 ^= xmm11
9331# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
9332# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
9333pxor %xmm11,%xmm14
9334
9335# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
9336# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
9337# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
9338pshufd $0x93,%xmm8,%xmm0
9339
9340# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
9341# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
9342# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
9343pshufd $0x93,%xmm9,%xmm1
9344
9345# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
9346# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
9347# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
9348pshufd $0x93,%xmm12,%xmm2
9349
9350# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
9351# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
9352# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
9353pshufd $0x93,%xmm14,%xmm3
9354
9355# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
9356# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
9357# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
9358pshufd $0x93,%xmm11,%xmm4
9359
9360# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
9361# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
9362# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
9363pshufd $0x93,%xmm15,%xmm5
9364
9365# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
9366# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
9367# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
9368pshufd $0x93,%xmm10,%xmm6
9369
9370# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
9371# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
9372# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
9373pshufd $0x93,%xmm13,%xmm7
9374
9375# qhasm: xmm8 ^= xmm0
9376# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
9377# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
9378pxor %xmm0,%xmm8
9379
9380# qhasm: xmm9 ^= xmm1
9381# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
9382# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
9383pxor %xmm1,%xmm9
9384
9385# qhasm: xmm12 ^= xmm2
9386# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
9387# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
9388pxor %xmm2,%xmm12
9389
9390# qhasm: xmm14 ^= xmm3
9391# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
9392# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
9393pxor %xmm3,%xmm14
9394
9395# qhasm: xmm11 ^= xmm4
9396# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
9397# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
9398pxor %xmm4,%xmm11
9399
9400# qhasm: xmm15 ^= xmm5
9401# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
9402# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
9403pxor %xmm5,%xmm15
9404
9405# qhasm: xmm10 ^= xmm6
9406# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
9407# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
9408pxor %xmm6,%xmm10
9409
9410# qhasm: xmm13 ^= xmm7
9411# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
9412# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
9413pxor %xmm7,%xmm13
9414
9415# qhasm: xmm0 ^= xmm13
9416# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
9417# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
9418pxor %xmm13,%xmm0
9419
9420# qhasm: xmm1 ^= xmm8
9421# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
9422# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
9423pxor %xmm8,%xmm1
9424
9425# qhasm: xmm2 ^= xmm9
9426# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
9427# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
9428pxor %xmm9,%xmm2
9429
9430# qhasm: xmm1 ^= xmm13
9431# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
9432# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
9433pxor %xmm13,%xmm1
9434
9435# qhasm: xmm3 ^= xmm12
9436# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
9437# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
9438pxor %xmm12,%xmm3
9439
9440# qhasm: xmm4 ^= xmm14
9441# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
9442# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
9443pxor %xmm14,%xmm4
9444
9445# qhasm: xmm5 ^= xmm11
9446# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
9447# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
9448pxor %xmm11,%xmm5
9449
9450# qhasm: xmm3 ^= xmm13
9451# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
9452# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
9453pxor %xmm13,%xmm3
9454
9455# qhasm: xmm6 ^= xmm15
9456# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
9457# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
9458pxor %xmm15,%xmm6
9459
9460# qhasm: xmm7 ^= xmm10
9461# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
9462# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
9463pxor %xmm10,%xmm7
9464
9465# qhasm: xmm4 ^= xmm13
9466# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
9467# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
9468pxor %xmm13,%xmm4
9469
9470# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
9471# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
9472# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
9473pshufd $0x4E,%xmm8,%xmm8
9474
9475# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
9476# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
9477# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
9478pshufd $0x4E,%xmm9,%xmm9
9479
9480# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
9481# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
9482# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
9483pshufd $0x4E,%xmm12,%xmm12
9484
9485# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
9486# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
9487# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
9488pshufd $0x4E,%xmm14,%xmm14
9489
9490# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
9491# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
9492# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
9493pshufd $0x4E,%xmm11,%xmm11
9494
9495# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
9496# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
9497# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
9498pshufd $0x4E,%xmm15,%xmm15
9499
9500# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
9501# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
9502# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
9503pshufd $0x4E,%xmm10,%xmm10
9504
9505# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
9506# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
9507# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
9508pshufd $0x4E,%xmm13,%xmm13
9509
9510# qhasm: xmm0 ^= xmm8
9511# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
9512# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
9513pxor %xmm8,%xmm0
9514
9515# qhasm: xmm1 ^= xmm9
9516# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
9517# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
9518pxor %xmm9,%xmm1
9519
9520# qhasm: xmm2 ^= xmm12
9521# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
9522# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
9523pxor %xmm12,%xmm2
9524
9525# qhasm: xmm3 ^= xmm14
9526# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
9527# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
9528pxor %xmm14,%xmm3
9529
9530# qhasm: xmm4 ^= xmm11
9531# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
9532# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
9533pxor %xmm11,%xmm4
9534
9535# qhasm: xmm5 ^= xmm15
9536# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
9537# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
9538pxor %xmm15,%xmm5
9539
9540# qhasm: xmm6 ^= xmm10
9541# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
9542# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
9543pxor %xmm10,%xmm6
9544
9545# qhasm: xmm7 ^= xmm13
9546# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
9547# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
9548pxor %xmm13,%xmm7
9549
9550# qhasm: xmm0 ^= *(int128 *)(c + 1024)
9551# asm 1: pxor 1024(<c=int64#4),<xmm0=int6464#1
9552# asm 2: pxor 1024(<c=%rcx),<xmm0=%xmm0
9553pxor 1024(%rcx),%xmm0
9554
9555# qhasm: shuffle bytes of xmm0 by SR
9556# asm 1: pshufb SR,<xmm0=int6464#1
9557# asm 2: pshufb SR,<xmm0=%xmm0
9558pshufb SR,%xmm0
9559
9560# qhasm: xmm1 ^= *(int128 *)(c + 1040)
9561# asm 1: pxor 1040(<c=int64#4),<xmm1=int6464#2
9562# asm 2: pxor 1040(<c=%rcx),<xmm1=%xmm1
9563pxor 1040(%rcx),%xmm1
9564
9565# qhasm: shuffle bytes of xmm1 by SR
9566# asm 1: pshufb SR,<xmm1=int6464#2
9567# asm 2: pshufb SR,<xmm1=%xmm1
9568pshufb SR,%xmm1
9569
9570# qhasm: xmm2 ^= *(int128 *)(c + 1056)
9571# asm 1: pxor 1056(<c=int64#4),<xmm2=int6464#3
9572# asm 2: pxor 1056(<c=%rcx),<xmm2=%xmm2
9573pxor 1056(%rcx),%xmm2
9574
9575# qhasm: shuffle bytes of xmm2 by SR
9576# asm 1: pshufb SR,<xmm2=int6464#3
9577# asm 2: pshufb SR,<xmm2=%xmm2
9578pshufb SR,%xmm2
9579
9580# qhasm: xmm3 ^= *(int128 *)(c + 1072)
9581# asm 1: pxor 1072(<c=int64#4),<xmm3=int6464#4
9582# asm 2: pxor 1072(<c=%rcx),<xmm3=%xmm3
9583pxor 1072(%rcx),%xmm3
9584
9585# qhasm: shuffle bytes of xmm3 by SR
9586# asm 1: pshufb SR,<xmm3=int6464#4
9587# asm 2: pshufb SR,<xmm3=%xmm3
9588pshufb SR,%xmm3
9589
9590# qhasm: xmm4 ^= *(int128 *)(c + 1088)
9591# asm 1: pxor 1088(<c=int64#4),<xmm4=int6464#5
9592# asm 2: pxor 1088(<c=%rcx),<xmm4=%xmm4
9593pxor 1088(%rcx),%xmm4
9594
9595# qhasm: shuffle bytes of xmm4 by SR
9596# asm 1: pshufb SR,<xmm4=int6464#5
9597# asm 2: pshufb SR,<xmm4=%xmm4
9598pshufb SR,%xmm4
9599
9600# qhasm: xmm5 ^= *(int128 *)(c + 1104)
9601# asm 1: pxor 1104(<c=int64#4),<xmm5=int6464#6
9602# asm 2: pxor 1104(<c=%rcx),<xmm5=%xmm5
9603pxor 1104(%rcx),%xmm5
9604
9605# qhasm: shuffle bytes of xmm5 by SR
9606# asm 1: pshufb SR,<xmm5=int6464#6
9607# asm 2: pshufb SR,<xmm5=%xmm5
9608pshufb SR,%xmm5
9609
9610# qhasm: xmm6 ^= *(int128 *)(c + 1120)
9611# asm 1: pxor 1120(<c=int64#4),<xmm6=int6464#7
9612# asm 2: pxor 1120(<c=%rcx),<xmm6=%xmm6
9613pxor 1120(%rcx),%xmm6
9614
9615# qhasm: shuffle bytes of xmm6 by SR
9616# asm 1: pshufb SR,<xmm6=int6464#7
9617# asm 2: pshufb SR,<xmm6=%xmm6
9618pshufb SR,%xmm6
9619
9620# qhasm: xmm7 ^= *(int128 *)(c + 1136)
9621# asm 1: pxor 1136(<c=int64#4),<xmm7=int6464#8
9622# asm 2: pxor 1136(<c=%rcx),<xmm7=%xmm7
9623pxor 1136(%rcx),%xmm7
9624
9625# qhasm: shuffle bytes of xmm7 by SR
9626# asm 1: pshufb SR,<xmm7=int6464#8
9627# asm 2: pshufb SR,<xmm7=%xmm7
9628pshufb SR,%xmm7
9629
9630# qhasm: xmm5 ^= xmm6
9631# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
9632# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
9633pxor %xmm6,%xmm5
9634
9635# qhasm: xmm2 ^= xmm1
9636# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
9637# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
9638pxor %xmm1,%xmm2
9639
9640# qhasm: xmm5 ^= xmm0
9641# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
9642# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
9643pxor %xmm0,%xmm5
9644
9645# qhasm: xmm6 ^= xmm2
9646# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
9647# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
9648pxor %xmm2,%xmm6
9649
9650# qhasm: xmm3 ^= xmm0
9651# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
9652# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
9653pxor %xmm0,%xmm3
9654
9655# qhasm: xmm6 ^= xmm3
9656# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
9657# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
9658pxor %xmm3,%xmm6
9659
9660# qhasm: xmm3 ^= xmm7
9661# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
9662# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
9663pxor %xmm7,%xmm3
9664
9665# qhasm: xmm3 ^= xmm4
9666# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
9667# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
9668pxor %xmm4,%xmm3
9669
9670# qhasm: xmm7 ^= xmm5
9671# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
9672# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
9673pxor %xmm5,%xmm7
9674
9675# qhasm: xmm3 ^= xmm1
9676# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
9677# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
9678pxor %xmm1,%xmm3
9679
9680# qhasm: xmm4 ^= xmm5
9681# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
9682# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
9683pxor %xmm5,%xmm4
9684
9685# qhasm: xmm2 ^= xmm7
9686# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
9687# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
9688pxor %xmm7,%xmm2
9689
9690# qhasm: xmm1 ^= xmm5
9691# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
9692# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
9693pxor %xmm5,%xmm1
9694
9695# qhasm: xmm11 = xmm7
9696# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
9697# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
9698movdqa %xmm7,%xmm8
9699
9700# qhasm: xmm10 = xmm1
9701# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
9702# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
9703movdqa %xmm1,%xmm9
9704
9705# qhasm: xmm9 = xmm5
9706# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
9707# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
9708movdqa %xmm5,%xmm10
9709
9710# qhasm: xmm13 = xmm2
9711# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
9712# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
9713movdqa %xmm2,%xmm11
9714
9715# qhasm: xmm12 = xmm6
9716# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
9717# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
9718movdqa %xmm6,%xmm12
9719
9720# qhasm: xmm11 ^= xmm4
9721# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
9722# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
9723pxor %xmm4,%xmm8
9724
9725# qhasm: xmm10 ^= xmm2
9726# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
9727# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
9728pxor %xmm2,%xmm9
9729
9730# qhasm: xmm9 ^= xmm3
9731# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
9732# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
9733pxor %xmm3,%xmm10
9734
9735# qhasm: xmm13 ^= xmm4
9736# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
9737# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
9738pxor %xmm4,%xmm11
9739
9740# qhasm: xmm12 ^= xmm0
9741# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
9742# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
9743pxor %xmm0,%xmm12
9744
9745# qhasm: xmm14 = xmm11
9746# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
9747# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
9748movdqa %xmm8,%xmm13
9749
9750# qhasm: xmm8 = xmm10
9751# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
9752# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
9753movdqa %xmm9,%xmm14
9754
9755# qhasm: xmm15 = xmm11
9756# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
9757# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
9758movdqa %xmm8,%xmm15
9759
9760# qhasm: xmm10 |= xmm9
9761# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
9762# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
9763por %xmm10,%xmm9
9764
9765# qhasm: xmm11 |= xmm12
9766# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
9767# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
9768por %xmm12,%xmm8
9769
9770# qhasm: xmm15 ^= xmm8
9771# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
9772# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
9773pxor %xmm14,%xmm15
9774
9775# qhasm: xmm14 &= xmm12
9776# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
9777# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
9778pand %xmm12,%xmm13
9779
9780# qhasm: xmm8 &= xmm9
9781# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
9782# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
9783pand %xmm10,%xmm14
9784
9785# qhasm: xmm12 ^= xmm9
9786# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
9787# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
9788pxor %xmm10,%xmm12
9789
9790# qhasm: xmm15 &= xmm12
9791# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
9792# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
9793pand %xmm12,%xmm15
9794
9795# qhasm: xmm12 = xmm3
9796# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
9797# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
9798movdqa %xmm3,%xmm10
9799
9800# qhasm: xmm12 ^= xmm0
9801# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
9802# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
9803pxor %xmm0,%xmm10
9804
9805# qhasm: xmm13 &= xmm12
9806# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
9807# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
9808pand %xmm10,%xmm11
9809
9810# qhasm: xmm11 ^= xmm13
9811# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
9812# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
9813pxor %xmm11,%xmm8
9814
9815# qhasm: xmm10 ^= xmm13
9816# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
9817# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
9818pxor %xmm11,%xmm9
9819
9820# qhasm: xmm13 = xmm7
9821# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
9822# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
9823movdqa %xmm7,%xmm10
9824
9825# qhasm: xmm13 ^= xmm1
9826# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
9827# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
9828pxor %xmm1,%xmm10
9829
9830# qhasm: xmm12 = xmm5
9831# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
9832# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
9833movdqa %xmm5,%xmm11
9834
9835# qhasm: xmm9 = xmm13
9836# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
9837# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
9838movdqa %xmm10,%xmm12
9839
9840# qhasm: xmm12 ^= xmm6
9841# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
9842# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
9843pxor %xmm6,%xmm11
9844
9845# qhasm: xmm9 |= xmm12
9846# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
9847# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
9848por %xmm11,%xmm12
9849
9850# qhasm: xmm13 &= xmm12
9851# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
9852# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
9853pand %xmm11,%xmm10
9854
9855# qhasm: xmm8 ^= xmm13
9856# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
9857# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
9858pxor %xmm10,%xmm14
9859
9860# qhasm: xmm11 ^= xmm15
9861# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
9862# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
9863pxor %xmm15,%xmm8
9864
9865# qhasm: xmm10 ^= xmm14
9866# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
9867# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
9868pxor %xmm13,%xmm9
9869
9870# qhasm: xmm9 ^= xmm15
9871# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
9872# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
9873pxor %xmm15,%xmm12
9874
9875# qhasm: xmm8 ^= xmm14
9876# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
9877# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
9878pxor %xmm13,%xmm14
9879
9880# qhasm: xmm9 ^= xmm14
9881# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
9882# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
9883pxor %xmm13,%xmm12
9884
9885# qhasm: xmm12 = xmm2
9886# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
9887# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
9888movdqa %xmm2,%xmm10
9889
9890# qhasm: xmm13 = xmm4
9891# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
9892# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
9893movdqa %xmm4,%xmm11
9894
9895# qhasm: xmm14 = xmm1
9896# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
9897# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
9898movdqa %xmm1,%xmm13
9899
9900# qhasm: xmm15 = xmm7
9901# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
9902# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
9903movdqa %xmm7,%xmm15
9904
9905# qhasm: xmm12 &= xmm3
9906# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
9907# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
9908pand %xmm3,%xmm10
9909
9910# qhasm: xmm13 &= xmm0
9911# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
9912# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
9913pand %xmm0,%xmm11
9914
9915# qhasm: xmm14 &= xmm5
9916# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
9917# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
9918pand %xmm5,%xmm13
9919
9920# qhasm: xmm15 |= xmm6
9921# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
9922# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
9923por %xmm6,%xmm15
9924
9925# qhasm: xmm11 ^= xmm12
9926# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
9927# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
9928pxor %xmm10,%xmm8
9929
9930# qhasm: xmm10 ^= xmm13
9931# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
9932# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
9933pxor %xmm11,%xmm9
9934
9935# qhasm: xmm9 ^= xmm14
9936# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
9937# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
9938pxor %xmm13,%xmm12
9939
9940# qhasm: xmm8 ^= xmm15
9941# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
9942# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
9943pxor %xmm15,%xmm14
9944
9945# qhasm: xmm12 = xmm11
9946# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
9947# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
9948movdqa %xmm8,%xmm10
9949
9950# qhasm: xmm12 ^= xmm10
9951# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
9952# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
9953pxor %xmm9,%xmm10
9954
9955# qhasm: xmm11 &= xmm9
9956# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
9957# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
9958pand %xmm12,%xmm8
9959
9960# qhasm: xmm14 = xmm8
9961# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
9962# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
9963movdqa %xmm14,%xmm11
9964
9965# qhasm: xmm14 ^= xmm11
9966# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
9967# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
9968pxor %xmm8,%xmm11
9969
9970# qhasm: xmm15 = xmm12
9971# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
9972# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
9973movdqa %xmm10,%xmm13
9974
9975# qhasm: xmm15 &= xmm14
9976# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
9977# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
9978pand %xmm11,%xmm13
9979
9980# qhasm: xmm15 ^= xmm10
9981# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
9982# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
9983pxor %xmm9,%xmm13
9984
9985# qhasm: xmm13 = xmm9
9986# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
9987# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
9988movdqa %xmm12,%xmm15
9989
9990# qhasm: xmm13 ^= xmm8
9991# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
9992# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
9993pxor %xmm14,%xmm15
9994
9995# qhasm: xmm11 ^= xmm10
9996# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
9997# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
9998pxor %xmm9,%xmm8
9999
10000# qhasm: xmm13 &= xmm11
10001# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
10002# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
10003pand %xmm8,%xmm15
10004
10005# qhasm: xmm13 ^= xmm8
10006# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
10007# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
10008pxor %xmm14,%xmm15
10009
10010# qhasm: xmm9 ^= xmm13
10011# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
10012# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
10013pxor %xmm15,%xmm12
10014
10015# qhasm: xmm10 = xmm14
10016# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
10017# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
10018movdqa %xmm11,%xmm8
10019
10020# qhasm: xmm10 ^= xmm13
10021# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
10022# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
10023pxor %xmm15,%xmm8
10024
10025# qhasm: xmm10 &= xmm8
10026# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
10027# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
10028pand %xmm14,%xmm8
10029
10030# qhasm: xmm9 ^= xmm10
10031# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
10032# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
10033pxor %xmm8,%xmm12
10034
10035# qhasm: xmm14 ^= xmm10
10036# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
10037# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
10038pxor %xmm8,%xmm11
10039
10040# qhasm: xmm14 &= xmm15
10041# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
10042# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
10043pand %xmm13,%xmm11
10044
10045# qhasm: xmm14 ^= xmm12
10046# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
10047# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
10048pxor %xmm10,%xmm11
10049
10050# qhasm: xmm12 = xmm6
10051# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
10052# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
10053movdqa %xmm6,%xmm8
10054
10055# qhasm: xmm8 = xmm5
10056# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
10057# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
10058movdqa %xmm5,%xmm9
10059
10060# qhasm: xmm10 = xmm15
10061# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
10062# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
10063movdqa %xmm13,%xmm10
10064
10065# qhasm: xmm10 ^= xmm14
10066# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
10067# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
10068pxor %xmm11,%xmm10
10069
10070# qhasm: xmm10 &= xmm6
10071# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
10072# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
10073pand %xmm6,%xmm10
10074
10075# qhasm: xmm6 ^= xmm5
10076# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
10077# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
10078pxor %xmm5,%xmm6
10079
10080# qhasm: xmm6 &= xmm14
10081# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
10082# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
10083pand %xmm11,%xmm6
10084
10085# qhasm: xmm5 &= xmm15
10086# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
10087# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
10088pand %xmm13,%xmm5
10089
10090# qhasm: xmm6 ^= xmm5
10091# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
10092# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
10093pxor %xmm5,%xmm6
10094
10095# qhasm: xmm5 ^= xmm10
10096# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
10097# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
10098pxor %xmm10,%xmm5
10099
10100# qhasm: xmm12 ^= xmm0
10101# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
10102# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
10103pxor %xmm0,%xmm8
10104
10105# qhasm: xmm8 ^= xmm3
10106# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
10107# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
10108pxor %xmm3,%xmm9
10109
10110# qhasm: xmm15 ^= xmm13
10111# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
10112# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
10113pxor %xmm15,%xmm13
10114
10115# qhasm: xmm14 ^= xmm9
10116# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
10117# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
10118pxor %xmm12,%xmm11
10119
10120# qhasm: xmm11 = xmm15
10121# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10122# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10123movdqa %xmm13,%xmm10
10124
10125# qhasm: xmm11 ^= xmm14
10126# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10127# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10128pxor %xmm11,%xmm10
10129
10130# qhasm: xmm11 &= xmm12
10131# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
10132# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
10133pand %xmm8,%xmm10
10134
10135# qhasm: xmm12 ^= xmm8
10136# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
10137# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
10138pxor %xmm9,%xmm8
10139
10140# qhasm: xmm12 &= xmm14
10141# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
10142# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
10143pand %xmm11,%xmm8
10144
10145# qhasm: xmm8 &= xmm15
10146# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
10147# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
10148pand %xmm13,%xmm9
10149
10150# qhasm: xmm8 ^= xmm12
10151# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
10152# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
10153pxor %xmm8,%xmm9
10154
10155# qhasm: xmm12 ^= xmm11
10156# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
10157# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
10158pxor %xmm10,%xmm8
10159
10160# qhasm: xmm10 = xmm13
10161# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10162# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10163movdqa %xmm15,%xmm10
10164
10165# qhasm: xmm10 ^= xmm9
10166# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
10167# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
10168pxor %xmm12,%xmm10
10169
10170# qhasm: xmm10 &= xmm0
10171# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
10172# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
10173pand %xmm0,%xmm10
10174
10175# qhasm: xmm0 ^= xmm3
10176# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
10177# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
10178pxor %xmm3,%xmm0
10179
10180# qhasm: xmm0 &= xmm9
10181# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
10182# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
10183pand %xmm12,%xmm0
10184
10185# qhasm: xmm3 &= xmm13
10186# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
10187# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
10188pand %xmm15,%xmm3
10189
10190# qhasm: xmm0 ^= xmm3
10191# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
10192# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
10193pxor %xmm3,%xmm0
10194
10195# qhasm: xmm3 ^= xmm10
10196# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
10197# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
10198pxor %xmm10,%xmm3
10199
10200# qhasm: xmm6 ^= xmm12
10201# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
10202# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
10203pxor %xmm8,%xmm6
10204
10205# qhasm: xmm0 ^= xmm12
10206# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
10207# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
10208pxor %xmm8,%xmm0
10209
10210# qhasm: xmm5 ^= xmm8
10211# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
10212# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
10213pxor %xmm9,%xmm5
10214
10215# qhasm: xmm3 ^= xmm8
10216# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
10217# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
10218pxor %xmm9,%xmm3
10219
10220# qhasm: xmm12 = xmm7
10221# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
10222# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
10223movdqa %xmm7,%xmm8
10224
10225# qhasm: xmm8 = xmm1
10226# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
10227# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
10228movdqa %xmm1,%xmm9
10229
10230# qhasm: xmm12 ^= xmm4
10231# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
10232# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
10233pxor %xmm4,%xmm8
10234
10235# qhasm: xmm8 ^= xmm2
10236# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
10237# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
10238pxor %xmm2,%xmm9
10239
10240# qhasm: xmm11 = xmm15
10241# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10242# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10243movdqa %xmm13,%xmm10
10244
10245# qhasm: xmm11 ^= xmm14
10246# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10247# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10248pxor %xmm11,%xmm10
10249
10250# qhasm: xmm11 &= xmm12
10251# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
10252# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
10253pand %xmm8,%xmm10
10254
10255# qhasm: xmm12 ^= xmm8
10256# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
10257# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
10258pxor %xmm9,%xmm8
10259
10260# qhasm: xmm12 &= xmm14
10261# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
10262# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
10263pand %xmm11,%xmm8
10264
10265# qhasm: xmm8 &= xmm15
10266# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
10267# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
10268pand %xmm13,%xmm9
10269
10270# qhasm: xmm8 ^= xmm12
10271# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
10272# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
10273pxor %xmm8,%xmm9
10274
10275# qhasm: xmm12 ^= xmm11
10276# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
10277# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
10278pxor %xmm10,%xmm8
10279
10280# qhasm: xmm10 = xmm13
10281# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10282# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10283movdqa %xmm15,%xmm10
10284
10285# qhasm: xmm10 ^= xmm9
10286# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
10287# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
10288pxor %xmm12,%xmm10
10289
10290# qhasm: xmm10 &= xmm4
10291# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
10292# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
10293pand %xmm4,%xmm10
10294
10295# qhasm: xmm4 ^= xmm2
10296# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
10297# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
10298pxor %xmm2,%xmm4
10299
10300# qhasm: xmm4 &= xmm9
10301# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
10302# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
10303pand %xmm12,%xmm4
10304
10305# qhasm: xmm2 &= xmm13
10306# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
10307# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
10308pand %xmm15,%xmm2
10309
10310# qhasm: xmm4 ^= xmm2
10311# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
10312# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
10313pxor %xmm2,%xmm4
10314
10315# qhasm: xmm2 ^= xmm10
10316# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
10317# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
10318pxor %xmm10,%xmm2
10319
10320# qhasm: xmm15 ^= xmm13
10321# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
10322# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
10323pxor %xmm15,%xmm13
10324
10325# qhasm: xmm14 ^= xmm9
10326# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
10327# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
10328pxor %xmm12,%xmm11
10329
10330# qhasm: xmm11 = xmm15
10331# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10332# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10333movdqa %xmm13,%xmm10
10334
10335# qhasm: xmm11 ^= xmm14
10336# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10337# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10338pxor %xmm11,%xmm10
10339
10340# qhasm: xmm11 &= xmm7
10341# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
10342# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
10343pand %xmm7,%xmm10
10344
10345# qhasm: xmm7 ^= xmm1
10346# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
10347# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
10348pxor %xmm1,%xmm7
10349
10350# qhasm: xmm7 &= xmm14
10351# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
10352# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
10353pand %xmm11,%xmm7
10354
10355# qhasm: xmm1 &= xmm15
10356# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
10357# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
10358pand %xmm13,%xmm1
10359
10360# qhasm: xmm7 ^= xmm1
10361# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
10362# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
10363pxor %xmm1,%xmm7
10364
10365# qhasm: xmm1 ^= xmm11
10366# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
10367# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
10368pxor %xmm10,%xmm1
10369
10370# qhasm: xmm7 ^= xmm12
10371# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
10372# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
10373pxor %xmm8,%xmm7
10374
10375# qhasm: xmm4 ^= xmm12
10376# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
10377# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
10378pxor %xmm8,%xmm4
10379
10380# qhasm: xmm1 ^= xmm8
10381# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
10382# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
10383pxor %xmm9,%xmm1
10384
10385# qhasm: xmm2 ^= xmm8
10386# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
10387# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
10388pxor %xmm9,%xmm2
10389
10390# qhasm: xmm7 ^= xmm0
10391# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
10392# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
10393pxor %xmm0,%xmm7
10394
10395# qhasm: xmm1 ^= xmm6
10396# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
10397# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
10398pxor %xmm6,%xmm1
10399
10400# qhasm: xmm4 ^= xmm7
10401# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
10402# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
10403pxor %xmm7,%xmm4
10404
10405# qhasm: xmm6 ^= xmm0
10406# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
10407# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
10408pxor %xmm0,%xmm6
10409
10410# qhasm: xmm0 ^= xmm1
10411# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
10412# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
10413pxor %xmm1,%xmm0
10414
10415# qhasm: xmm1 ^= xmm5
10416# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
10417# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
10418pxor %xmm5,%xmm1
10419
10420# qhasm: xmm5 ^= xmm2
10421# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
10422# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
10423pxor %xmm2,%xmm5
10424
10425# qhasm: xmm4 ^= xmm5
10426# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
10427# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
10428pxor %xmm5,%xmm4
10429
10430# qhasm: xmm2 ^= xmm3
10431# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
10432# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
10433pxor %xmm3,%xmm2
10434
10435# qhasm: xmm3 ^= xmm5
10436# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
10437# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
10438pxor %xmm5,%xmm3
10439
10440# qhasm: xmm6 ^= xmm3
10441# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
10442# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
10443pxor %xmm3,%xmm6
10444
10445# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
10446# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
10447# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
10448pshufd $0x93,%xmm0,%xmm8
10449
10450# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
10451# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
10452# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
10453pshufd $0x93,%xmm1,%xmm9
10454
10455# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
10456# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
10457# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
10458pshufd $0x93,%xmm4,%xmm10
10459
10460# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
10461# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
10462# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
10463pshufd $0x93,%xmm6,%xmm11
10464
10465# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
10466# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
10467# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
10468pshufd $0x93,%xmm3,%xmm12
10469
10470# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
10471# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
10472# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
10473pshufd $0x93,%xmm7,%xmm13
10474
10475# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
10476# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
10477# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
10478pshufd $0x93,%xmm2,%xmm14
10479
10480# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
10481# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
10482# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
10483pshufd $0x93,%xmm5,%xmm15
10484
10485# qhasm: xmm0 ^= xmm8
10486# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
10487# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
10488pxor %xmm8,%xmm0
10489
10490# qhasm: xmm1 ^= xmm9
10491# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
10492# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
10493pxor %xmm9,%xmm1
10494
10495# qhasm: xmm4 ^= xmm10
10496# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
10497# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
10498pxor %xmm10,%xmm4
10499
10500# qhasm: xmm6 ^= xmm11
10501# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
10502# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
10503pxor %xmm11,%xmm6
10504
10505# qhasm: xmm3 ^= xmm12
10506# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
10507# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
10508pxor %xmm12,%xmm3
10509
10510# qhasm: xmm7 ^= xmm13
10511# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
10512# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
10513pxor %xmm13,%xmm7
10514
10515# qhasm: xmm2 ^= xmm14
10516# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
10517# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
10518pxor %xmm14,%xmm2
10519
10520# qhasm: xmm5 ^= xmm15
10521# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
10522# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
10523pxor %xmm15,%xmm5
10524
10525# qhasm: xmm8 ^= xmm5
10526# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
10527# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
10528pxor %xmm5,%xmm8
10529
10530# qhasm: xmm9 ^= xmm0
10531# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
10532# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
10533pxor %xmm0,%xmm9
10534
10535# qhasm: xmm10 ^= xmm1
10536# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
10537# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
10538pxor %xmm1,%xmm10
10539
10540# qhasm: xmm9 ^= xmm5
10541# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
10542# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
10543pxor %xmm5,%xmm9
10544
10545# qhasm: xmm11 ^= xmm4
10546# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
10547# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
10548pxor %xmm4,%xmm11
10549
10550# qhasm: xmm12 ^= xmm6
10551# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
10552# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
10553pxor %xmm6,%xmm12
10554
10555# qhasm: xmm13 ^= xmm3
10556# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
10557# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
10558pxor %xmm3,%xmm13
10559
10560# qhasm: xmm11 ^= xmm5
10561# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
10562# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
10563pxor %xmm5,%xmm11
10564
10565# qhasm: xmm14 ^= xmm7
10566# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
10567# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
10568pxor %xmm7,%xmm14
10569
10570# qhasm: xmm15 ^= xmm2
10571# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
10572# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
10573pxor %xmm2,%xmm15
10574
10575# qhasm: xmm12 ^= xmm5
10576# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
10577# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
10578pxor %xmm5,%xmm12
10579
10580# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
10581# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
10582# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
10583pshufd $0x4E,%xmm0,%xmm0
10584
10585# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
10586# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
10587# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
10588pshufd $0x4E,%xmm1,%xmm1
10589
10590# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
10591# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
10592# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
10593pshufd $0x4E,%xmm4,%xmm4
10594
10595# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
10596# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
10597# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
10598pshufd $0x4E,%xmm6,%xmm6
10599
10600# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
10601# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
10602# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
10603pshufd $0x4E,%xmm3,%xmm3
10604
10605# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
10606# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
10607# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
10608pshufd $0x4E,%xmm7,%xmm7
10609
10610# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
10611# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
10612# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
10613pshufd $0x4E,%xmm2,%xmm2
10614
10615# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
10616# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
10617# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
10618pshufd $0x4E,%xmm5,%xmm5
10619
10620# qhasm: xmm8 ^= xmm0
10621# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
10622# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
10623pxor %xmm0,%xmm8
10624
10625# qhasm: xmm9 ^= xmm1
10626# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
10627# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
10628pxor %xmm1,%xmm9
10629
10630# qhasm: xmm10 ^= xmm4
10631# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
10632# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
10633pxor %xmm4,%xmm10
10634
10635# qhasm: xmm11 ^= xmm6
10636# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
10637# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
10638pxor %xmm6,%xmm11
10639
10640# qhasm: xmm12 ^= xmm3
10641# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
10642# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
10643pxor %xmm3,%xmm12
10644
10645# qhasm: xmm13 ^= xmm7
10646# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
10647# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
10648pxor %xmm7,%xmm13
10649
10650# qhasm: xmm14 ^= xmm2
10651# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
10652# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
10653pxor %xmm2,%xmm14
10654
10655# qhasm: xmm15 ^= xmm5
10656# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
10657# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
10658pxor %xmm5,%xmm15
10659
10660# qhasm: xmm8 ^= *(int128 *)(c + 1152)
10661# asm 1: pxor 1152(<c=int64#4),<xmm8=int6464#9
10662# asm 2: pxor 1152(<c=%rcx),<xmm8=%xmm8
10663pxor 1152(%rcx),%xmm8
10664
10665# qhasm: shuffle bytes of xmm8 by SRM0
10666# asm 1: pshufb SRM0,<xmm8=int6464#9
10667# asm 2: pshufb SRM0,<xmm8=%xmm8
10668pshufb SRM0,%xmm8
10669
10670# qhasm: xmm9 ^= *(int128 *)(c + 1168)
10671# asm 1: pxor 1168(<c=int64#4),<xmm9=int6464#10
10672# asm 2: pxor 1168(<c=%rcx),<xmm9=%xmm9
10673pxor 1168(%rcx),%xmm9
10674
10675# qhasm: shuffle bytes of xmm9 by SRM0
10676# asm 1: pshufb SRM0,<xmm9=int6464#10
10677# asm 2: pshufb SRM0,<xmm9=%xmm9
10678pshufb SRM0,%xmm9
10679
10680# qhasm: xmm10 ^= *(int128 *)(c + 1184)
10681# asm 1: pxor 1184(<c=int64#4),<xmm10=int6464#11
10682# asm 2: pxor 1184(<c=%rcx),<xmm10=%xmm10
10683pxor 1184(%rcx),%xmm10
10684
10685# qhasm: shuffle bytes of xmm10 by SRM0
10686# asm 1: pshufb SRM0,<xmm10=int6464#11
10687# asm 2: pshufb SRM0,<xmm10=%xmm10
10688pshufb SRM0,%xmm10
10689
10690# qhasm: xmm11 ^= *(int128 *)(c + 1200)
10691# asm 1: pxor 1200(<c=int64#4),<xmm11=int6464#12
10692# asm 2: pxor 1200(<c=%rcx),<xmm11=%xmm11
10693pxor 1200(%rcx),%xmm11
10694
10695# qhasm: shuffle bytes of xmm11 by SRM0
10696# asm 1: pshufb SRM0,<xmm11=int6464#12
10697# asm 2: pshufb SRM0,<xmm11=%xmm11
10698pshufb SRM0,%xmm11
10699
10700# qhasm: xmm12 ^= *(int128 *)(c + 1216)
10701# asm 1: pxor 1216(<c=int64#4),<xmm12=int6464#13
10702# asm 2: pxor 1216(<c=%rcx),<xmm12=%xmm12
10703pxor 1216(%rcx),%xmm12
10704
10705# qhasm: shuffle bytes of xmm12 by SRM0
10706# asm 1: pshufb SRM0,<xmm12=int6464#13
10707# asm 2: pshufb SRM0,<xmm12=%xmm12
10708pshufb SRM0,%xmm12
10709
10710# qhasm: xmm13 ^= *(int128 *)(c + 1232)
10711# asm 1: pxor 1232(<c=int64#4),<xmm13=int6464#14
10712# asm 2: pxor 1232(<c=%rcx),<xmm13=%xmm13
10713pxor 1232(%rcx),%xmm13
10714
10715# qhasm: shuffle bytes of xmm13 by SRM0
10716# asm 1: pshufb SRM0,<xmm13=int6464#14
10717# asm 2: pshufb SRM0,<xmm13=%xmm13
10718pshufb SRM0,%xmm13
10719
10720# qhasm: xmm14 ^= *(int128 *)(c + 1248)
10721# asm 1: pxor 1248(<c=int64#4),<xmm14=int6464#15
10722# asm 2: pxor 1248(<c=%rcx),<xmm14=%xmm14
10723pxor 1248(%rcx),%xmm14
10724
10725# qhasm: shuffle bytes of xmm14 by SRM0
10726# asm 1: pshufb SRM0,<xmm14=int6464#15
10727# asm 2: pshufb SRM0,<xmm14=%xmm14
10728pshufb SRM0,%xmm14
10729
10730# qhasm: xmm15 ^= *(int128 *)(c + 1264)
10731# asm 1: pxor 1264(<c=int64#4),<xmm15=int6464#16
10732# asm 2: pxor 1264(<c=%rcx),<xmm15=%xmm15
10733pxor 1264(%rcx),%xmm15
10734
10735# qhasm: shuffle bytes of xmm15 by SRM0
10736# asm 1: pshufb SRM0,<xmm15=int6464#16
10737# asm 2: pshufb SRM0,<xmm15=%xmm15
10738pshufb SRM0,%xmm15
10739
10740# qhasm: xmm13 ^= xmm14
10741# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
10742# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
10743pxor %xmm14,%xmm13
10744
10745# qhasm: xmm10 ^= xmm9
10746# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
10747# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
10748pxor %xmm9,%xmm10
10749
10750# qhasm: xmm13 ^= xmm8
10751# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
10752# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
10753pxor %xmm8,%xmm13
10754
10755# qhasm: xmm14 ^= xmm10
10756# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
10757# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
10758pxor %xmm10,%xmm14
10759
10760# qhasm: xmm11 ^= xmm8
10761# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
10762# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
10763pxor %xmm8,%xmm11
10764
10765# qhasm: xmm14 ^= xmm11
10766# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
10767# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
10768pxor %xmm11,%xmm14
10769
10770# qhasm: xmm11 ^= xmm15
10771# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
10772# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
10773pxor %xmm15,%xmm11
10774
10775# qhasm: xmm11 ^= xmm12
10776# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
10777# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
10778pxor %xmm12,%xmm11
10779
10780# qhasm: xmm15 ^= xmm13
10781# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
10782# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
10783pxor %xmm13,%xmm15
10784
10785# qhasm: xmm11 ^= xmm9
10786# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
10787# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
10788pxor %xmm9,%xmm11
10789
10790# qhasm: xmm12 ^= xmm13
10791# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
10792# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
10793pxor %xmm13,%xmm12
10794
10795# qhasm: xmm10 ^= xmm15
10796# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
10797# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
10798pxor %xmm15,%xmm10
10799
10800# qhasm: xmm9 ^= xmm13
10801# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
10802# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
10803pxor %xmm13,%xmm9
10804
10805# qhasm: xmm3 = xmm15
10806# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
10807# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
10808movdqa %xmm15,%xmm0
10809
10810# qhasm: xmm2 = xmm9
10811# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
10812# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
10813movdqa %xmm9,%xmm1
10814
10815# qhasm: xmm1 = xmm13
10816# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
10817# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
10818movdqa %xmm13,%xmm2
10819
10820# qhasm: xmm5 = xmm10
10821# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
10822# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
10823movdqa %xmm10,%xmm3
10824
10825# qhasm: xmm4 = xmm14
10826# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
10827# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
10828movdqa %xmm14,%xmm4
10829
10830# qhasm: xmm3 ^= xmm12
10831# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
10832# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
10833pxor %xmm12,%xmm0
10834
10835# qhasm: xmm2 ^= xmm10
10836# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
10837# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
10838pxor %xmm10,%xmm1
10839
10840# qhasm: xmm1 ^= xmm11
10841# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
10842# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
10843pxor %xmm11,%xmm2
10844
10845# qhasm: xmm5 ^= xmm12
10846# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
10847# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
10848pxor %xmm12,%xmm3
10849
10850# qhasm: xmm4 ^= xmm8
10851# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
10852# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
10853pxor %xmm8,%xmm4
10854
10855# qhasm: xmm6 = xmm3
10856# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
10857# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
10858movdqa %xmm0,%xmm5
10859
10860# qhasm: xmm0 = xmm2
10861# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
10862# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
10863movdqa %xmm1,%xmm6
10864
10865# qhasm: xmm7 = xmm3
10866# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
10867# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
10868movdqa %xmm0,%xmm7
10869
10870# qhasm: xmm2 |= xmm1
10871# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
10872# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
10873por %xmm2,%xmm1
10874
10875# qhasm: xmm3 |= xmm4
10876# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
10877# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
10878por %xmm4,%xmm0
10879
10880# qhasm: xmm7 ^= xmm0
10881# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
10882# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
10883pxor %xmm6,%xmm7
10884
10885# qhasm: xmm6 &= xmm4
10886# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
10887# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
10888pand %xmm4,%xmm5
10889
10890# qhasm: xmm0 &= xmm1
10891# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
10892# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
10893pand %xmm2,%xmm6
10894
10895# qhasm: xmm4 ^= xmm1
10896# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
10897# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
10898pxor %xmm2,%xmm4
10899
10900# qhasm: xmm7 &= xmm4
10901# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
10902# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
10903pand %xmm4,%xmm7
10904
10905# qhasm: xmm4 = xmm11
10906# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
10907# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
10908movdqa %xmm11,%xmm2
10909
10910# qhasm: xmm4 ^= xmm8
10911# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
10912# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
10913pxor %xmm8,%xmm2
10914
10915# qhasm: xmm5 &= xmm4
10916# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
10917# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
10918pand %xmm2,%xmm3
10919
10920# qhasm: xmm3 ^= xmm5
10921# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
10922# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
10923pxor %xmm3,%xmm0
10924
10925# qhasm: xmm2 ^= xmm5
10926# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
10927# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
10928pxor %xmm3,%xmm1
10929
10930# qhasm: xmm5 = xmm15
10931# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
10932# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
10933movdqa %xmm15,%xmm2
10934
10935# qhasm: xmm5 ^= xmm9
10936# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
10937# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
10938pxor %xmm9,%xmm2
10939
10940# qhasm: xmm4 = xmm13
10941# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
10942# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
10943movdqa %xmm13,%xmm3
10944
10945# qhasm: xmm1 = xmm5
10946# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
10947# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
10948movdqa %xmm2,%xmm4
10949
10950# qhasm: xmm4 ^= xmm14
10951# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
10952# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
10953pxor %xmm14,%xmm3
10954
10955# qhasm: xmm1 |= xmm4
10956# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
10957# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
10958por %xmm3,%xmm4
10959
10960# qhasm: xmm5 &= xmm4
10961# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
10962# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
10963pand %xmm3,%xmm2
10964
10965# qhasm: xmm0 ^= xmm5
10966# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
10967# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
10968pxor %xmm2,%xmm6
10969
10970# qhasm: xmm3 ^= xmm7
10971# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
10972# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
10973pxor %xmm7,%xmm0
10974
10975# qhasm: xmm2 ^= xmm6
10976# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
10977# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
10978pxor %xmm5,%xmm1
10979
10980# qhasm: xmm1 ^= xmm7
10981# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
10982# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
10983pxor %xmm7,%xmm4
10984
10985# qhasm: xmm0 ^= xmm6
10986# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
10987# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
10988pxor %xmm5,%xmm6
10989
10990# qhasm: xmm1 ^= xmm6
10991# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
10992# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
10993pxor %xmm5,%xmm4
10994
10995# qhasm: xmm4 = xmm10
10996# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
10997# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
10998movdqa %xmm10,%xmm2
10999
11000# qhasm: xmm5 = xmm12
11001# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
11002# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
11003movdqa %xmm12,%xmm3
11004
11005# qhasm: xmm6 = xmm9
11006# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
11007# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
11008movdqa %xmm9,%xmm5
11009
11010# qhasm: xmm7 = xmm15
11011# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
11012# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
11013movdqa %xmm15,%xmm7
11014
11015# qhasm: xmm4 &= xmm11
11016# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
11017# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
11018pand %xmm11,%xmm2
11019
11020# qhasm: xmm5 &= xmm8
11021# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
11022# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
11023pand %xmm8,%xmm3
11024
11025# qhasm: xmm6 &= xmm13
11026# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
11027# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
11028pand %xmm13,%xmm5
11029
11030# qhasm: xmm7 |= xmm14
11031# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
11032# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
11033por %xmm14,%xmm7
11034
11035# qhasm: xmm3 ^= xmm4
11036# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
11037# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
11038pxor %xmm2,%xmm0
11039
11040# qhasm: xmm2 ^= xmm5
11041# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
11042# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
11043pxor %xmm3,%xmm1
11044
11045# qhasm: xmm1 ^= xmm6
11046# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
11047# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
11048pxor %xmm5,%xmm4
11049
11050# qhasm: xmm0 ^= xmm7
11051# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
11052# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
11053pxor %xmm7,%xmm6
11054
11055# qhasm: xmm4 = xmm3
11056# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
11057# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
11058movdqa %xmm0,%xmm2
11059
11060# qhasm: xmm4 ^= xmm2
11061# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
11062# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
11063pxor %xmm1,%xmm2
11064
11065# qhasm: xmm3 &= xmm1
11066# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
11067# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
11068pand %xmm4,%xmm0
11069
11070# qhasm: xmm6 = xmm0
11071# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
11072# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
11073movdqa %xmm6,%xmm3
11074
11075# qhasm: xmm6 ^= xmm3
11076# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
11077# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
11078pxor %xmm0,%xmm3
11079
11080# qhasm: xmm7 = xmm4
11081# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
11082# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
11083movdqa %xmm2,%xmm5
11084
11085# qhasm: xmm7 &= xmm6
11086# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
11087# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
11088pand %xmm3,%xmm5
11089
11090# qhasm: xmm7 ^= xmm2
11091# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
11092# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
11093pxor %xmm1,%xmm5
11094
11095# qhasm: xmm5 = xmm1
11096# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
11097# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
11098movdqa %xmm4,%xmm7
11099
11100# qhasm: xmm5 ^= xmm0
11101# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
11102# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
11103pxor %xmm6,%xmm7
11104
11105# qhasm: xmm3 ^= xmm2
11106# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
11107# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
11108pxor %xmm1,%xmm0
11109
11110# qhasm: xmm5 &= xmm3
11111# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
11112# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
11113pand %xmm0,%xmm7
11114
11115# qhasm: xmm5 ^= xmm0
11116# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
11117# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
11118pxor %xmm6,%xmm7
11119
11120# qhasm: xmm1 ^= xmm5
11121# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
11122# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
11123pxor %xmm7,%xmm4
11124
11125# qhasm: xmm2 = xmm6
11126# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
11127# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
11128movdqa %xmm3,%xmm0
11129
11130# qhasm: xmm2 ^= xmm5
11131# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
11132# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
11133pxor %xmm7,%xmm0
11134
11135# qhasm: xmm2 &= xmm0
11136# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
11137# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
11138pand %xmm6,%xmm0
11139
11140# qhasm: xmm1 ^= xmm2
11141# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
11142# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
11143pxor %xmm0,%xmm4
11144
11145# qhasm: xmm6 ^= xmm2
11146# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
11147# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
11148pxor %xmm0,%xmm3
11149
11150# qhasm: xmm6 &= xmm7
11151# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
11152# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
11153pand %xmm5,%xmm3
11154
11155# qhasm: xmm6 ^= xmm4
11156# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
11157# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
11158pxor %xmm2,%xmm3
11159
11160# qhasm: xmm4 = xmm14
11161# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
11162# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
11163movdqa %xmm14,%xmm0
11164
11165# qhasm: xmm0 = xmm13
11166# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
11167# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
11168movdqa %xmm13,%xmm1
11169
11170# qhasm: xmm2 = xmm7
11171# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
11172# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
11173movdqa %xmm5,%xmm2
11174
11175# qhasm: xmm2 ^= xmm6
11176# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
11177# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
11178pxor %xmm3,%xmm2
11179
11180# qhasm: xmm2 &= xmm14
11181# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
11182# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
11183pand %xmm14,%xmm2
11184
11185# qhasm: xmm14 ^= xmm13
11186# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
11187# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
11188pxor %xmm13,%xmm14
11189
11190# qhasm: xmm14 &= xmm6
11191# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
11192# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
11193pand %xmm3,%xmm14
11194
11195# qhasm: xmm13 &= xmm7
11196# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
11197# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
11198pand %xmm5,%xmm13
11199
11200# qhasm: xmm14 ^= xmm13
11201# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
11202# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
11203pxor %xmm13,%xmm14
11204
11205# qhasm: xmm13 ^= xmm2
11206# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
11207# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
11208pxor %xmm2,%xmm13
11209
11210# qhasm: xmm4 ^= xmm8
11211# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
11212# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
11213pxor %xmm8,%xmm0
11214
11215# qhasm: xmm0 ^= xmm11
11216# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
11217# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
11218pxor %xmm11,%xmm1
11219
11220# qhasm: xmm7 ^= xmm5
11221# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
11222# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
11223pxor %xmm7,%xmm5
11224
11225# qhasm: xmm6 ^= xmm1
11226# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
11227# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
11228pxor %xmm4,%xmm3
11229
11230# qhasm: xmm3 = xmm7
11231# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11232# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11233movdqa %xmm5,%xmm2
11234
11235# qhasm: xmm3 ^= xmm6
11236# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
11237# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
11238pxor %xmm3,%xmm2
11239
11240# qhasm: xmm3 &= xmm4
11241# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
11242# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
11243pand %xmm0,%xmm2
11244
11245# qhasm: xmm4 ^= xmm0
11246# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
11247# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
11248pxor %xmm1,%xmm0
11249
11250# qhasm: xmm4 &= xmm6
11251# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
11252# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
11253pand %xmm3,%xmm0
11254
11255# qhasm: xmm0 &= xmm7
11256# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
11257# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
11258pand %xmm5,%xmm1
11259
11260# qhasm: xmm0 ^= xmm4
11261# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
11262# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
11263pxor %xmm0,%xmm1
11264
11265# qhasm: xmm4 ^= xmm3
11266# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
11267# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
11268pxor %xmm2,%xmm0
11269
11270# qhasm: xmm2 = xmm5
11271# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
11272# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
11273movdqa %xmm7,%xmm2
11274
11275# qhasm: xmm2 ^= xmm1
11276# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
11277# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
11278pxor %xmm4,%xmm2
11279
11280# qhasm: xmm2 &= xmm8
11281# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
11282# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
11283pand %xmm8,%xmm2
11284
11285# qhasm: xmm8 ^= xmm11
11286# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
11287# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
11288pxor %xmm11,%xmm8
11289
11290# qhasm: xmm8 &= xmm1
11291# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
11292# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
11293pand %xmm4,%xmm8
11294
11295# qhasm: xmm11 &= xmm5
11296# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
11297# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
11298pand %xmm7,%xmm11
11299
11300# qhasm: xmm8 ^= xmm11
11301# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
11302# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
11303pxor %xmm11,%xmm8
11304
11305# qhasm: xmm11 ^= xmm2
11306# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
11307# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
11308pxor %xmm2,%xmm11
11309
11310# qhasm: xmm14 ^= xmm4
11311# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
11312# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
11313pxor %xmm0,%xmm14
11314
11315# qhasm: xmm8 ^= xmm4
11316# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
11317# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
11318pxor %xmm0,%xmm8
11319
11320# qhasm: xmm13 ^= xmm0
11321# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
11322# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
11323pxor %xmm1,%xmm13
11324
11325# qhasm: xmm11 ^= xmm0
11326# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
11327# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
11328pxor %xmm1,%xmm11
11329
11330# qhasm: xmm4 = xmm15
11331# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
11332# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
11333movdqa %xmm15,%xmm0
11334
11335# qhasm: xmm0 = xmm9
11336# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
11337# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
11338movdqa %xmm9,%xmm1
11339
11340# qhasm: xmm4 ^= xmm12
11341# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
11342# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
11343pxor %xmm12,%xmm0
11344
11345# qhasm: xmm0 ^= xmm10
11346# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
11347# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
11348pxor %xmm10,%xmm1
11349
11350# qhasm: xmm3 = xmm7
11351# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11352# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11353movdqa %xmm5,%xmm2
11354
11355# qhasm: xmm3 ^= xmm6
11356# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
11357# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
11358pxor %xmm3,%xmm2
11359
11360# qhasm: xmm3 &= xmm4
11361# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
11362# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
11363pand %xmm0,%xmm2
11364
11365# qhasm: xmm4 ^= xmm0
11366# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
11367# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
11368pxor %xmm1,%xmm0
11369
11370# qhasm: xmm4 &= xmm6
11371# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
11372# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
11373pand %xmm3,%xmm0
11374
11375# qhasm: xmm0 &= xmm7
11376# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
11377# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
11378pand %xmm5,%xmm1
11379
11380# qhasm: xmm0 ^= xmm4
11381# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
11382# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
11383pxor %xmm0,%xmm1
11384
11385# qhasm: xmm4 ^= xmm3
11386# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
11387# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
11388pxor %xmm2,%xmm0
11389
11390# qhasm: xmm2 = xmm5
11391# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
11392# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
11393movdqa %xmm7,%xmm2
11394
11395# qhasm: xmm2 ^= xmm1
11396# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
11397# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
11398pxor %xmm4,%xmm2
11399
11400# qhasm: xmm2 &= xmm12
11401# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
11402# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
11403pand %xmm12,%xmm2
11404
11405# qhasm: xmm12 ^= xmm10
11406# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
11407# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
11408pxor %xmm10,%xmm12
11409
11410# qhasm: xmm12 &= xmm1
11411# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
11412# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
11413pand %xmm4,%xmm12
11414
11415# qhasm: xmm10 &= xmm5
11416# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
11417# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
11418pand %xmm7,%xmm10
11419
11420# qhasm: xmm12 ^= xmm10
11421# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
11422# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
11423pxor %xmm10,%xmm12
11424
11425# qhasm: xmm10 ^= xmm2
11426# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
11427# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
11428pxor %xmm2,%xmm10
11429
11430# qhasm: xmm7 ^= xmm5
11431# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
11432# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
11433pxor %xmm7,%xmm5
11434
11435# qhasm: xmm6 ^= xmm1
11436# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
11437# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
11438pxor %xmm4,%xmm3
11439
11440# qhasm: xmm3 = xmm7
11441# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11442# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11443movdqa %xmm5,%xmm2
11444
11445# qhasm: xmm3 ^= xmm6
11446# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
11447# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
11448pxor %xmm3,%xmm2
11449
11450# qhasm: xmm3 &= xmm15
11451# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
11452# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
11453pand %xmm15,%xmm2
11454
11455# qhasm: xmm15 ^= xmm9
11456# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
11457# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
11458pxor %xmm9,%xmm15
11459
11460# qhasm: xmm15 &= xmm6
11461# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
11462# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
11463pand %xmm3,%xmm15
11464
11465# qhasm: xmm9 &= xmm7
11466# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
11467# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
11468pand %xmm5,%xmm9
11469
11470# qhasm: xmm15 ^= xmm9
11471# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
11472# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
11473pxor %xmm9,%xmm15
11474
11475# qhasm: xmm9 ^= xmm3
11476# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
11477# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
11478pxor %xmm2,%xmm9
11479
11480# qhasm: xmm15 ^= xmm4
11481# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
11482# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
11483pxor %xmm0,%xmm15
11484
11485# qhasm: xmm12 ^= xmm4
11486# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
11487# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
11488pxor %xmm0,%xmm12
11489
11490# qhasm: xmm9 ^= xmm0
11491# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
11492# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
11493pxor %xmm1,%xmm9
11494
11495# qhasm: xmm10 ^= xmm0
11496# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
11497# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
11498pxor %xmm1,%xmm10
11499
11500# qhasm: xmm15 ^= xmm8
11501# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
11502# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
11503pxor %xmm8,%xmm15
11504
11505# qhasm: xmm9 ^= xmm14
11506# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
11507# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
11508pxor %xmm14,%xmm9
11509
11510# qhasm: xmm12 ^= xmm15
11511# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
11512# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
11513pxor %xmm15,%xmm12
11514
11515# qhasm: xmm14 ^= xmm8
11516# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
11517# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
11518pxor %xmm8,%xmm14
11519
11520# qhasm: xmm8 ^= xmm9
11521# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
11522# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
11523pxor %xmm9,%xmm8
11524
11525# qhasm: xmm9 ^= xmm13
11526# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
11527# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
11528pxor %xmm13,%xmm9
11529
11530# qhasm: xmm13 ^= xmm10
11531# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
11532# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
11533pxor %xmm10,%xmm13
11534
11535# qhasm: xmm12 ^= xmm13
11536# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
11537# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
11538pxor %xmm13,%xmm12
11539
11540# qhasm: xmm10 ^= xmm11
11541# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
11542# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
11543pxor %xmm11,%xmm10
11544
11545# qhasm: xmm11 ^= xmm13
11546# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
11547# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
11548pxor %xmm13,%xmm11
11549
11550# qhasm: xmm14 ^= xmm11
11551# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
11552# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
11553pxor %xmm11,%xmm14
11554
11555# qhasm: xmm8 ^= *(int128 *)(c + 1280)
11556# asm 1: pxor 1280(<c=int64#4),<xmm8=int6464#9
11557# asm 2: pxor 1280(<c=%rcx),<xmm8=%xmm8
11558pxor 1280(%rcx),%xmm8
11559
11560# qhasm: xmm9 ^= *(int128 *)(c + 1296)
11561# asm 1: pxor 1296(<c=int64#4),<xmm9=int6464#10
11562# asm 2: pxor 1296(<c=%rcx),<xmm9=%xmm9
11563pxor 1296(%rcx),%xmm9
11564
11565# qhasm: xmm12 ^= *(int128 *)(c + 1312)
11566# asm 1: pxor 1312(<c=int64#4),<xmm12=int6464#13
11567# asm 2: pxor 1312(<c=%rcx),<xmm12=%xmm12
11568pxor 1312(%rcx),%xmm12
11569
11570# qhasm: xmm14 ^= *(int128 *)(c + 1328)
11571# asm 1: pxor 1328(<c=int64#4),<xmm14=int6464#15
11572# asm 2: pxor 1328(<c=%rcx),<xmm14=%xmm14
11573pxor 1328(%rcx),%xmm14
11574
11575# qhasm: xmm11 ^= *(int128 *)(c + 1344)
11576# asm 1: pxor 1344(<c=int64#4),<xmm11=int6464#12
11577# asm 2: pxor 1344(<c=%rcx),<xmm11=%xmm11
11578pxor 1344(%rcx),%xmm11
11579
11580# qhasm: xmm15 ^= *(int128 *)(c + 1360)
11581# asm 1: pxor 1360(<c=int64#4),<xmm15=int6464#16
11582# asm 2: pxor 1360(<c=%rcx),<xmm15=%xmm15
11583pxor 1360(%rcx),%xmm15
11584
11585# qhasm: xmm10 ^= *(int128 *)(c + 1376)
11586# asm 1: pxor 1376(<c=int64#4),<xmm10=int6464#11
11587# asm 2: pxor 1376(<c=%rcx),<xmm10=%xmm10
11588pxor 1376(%rcx),%xmm10
11589
11590# qhasm: xmm13 ^= *(int128 *)(c + 1392)
11591# asm 1: pxor 1392(<c=int64#4),<xmm13=int6464#14
11592# asm 2: pxor 1392(<c=%rcx),<xmm13=%xmm13
11593pxor 1392(%rcx),%xmm13
11594
11595# qhasm: xmm0 = xmm10
11596# asm 1: movdqa <xmm10=int6464#11,>xmm0=int6464#1
11597# asm 2: movdqa <xmm10=%xmm10,>xmm0=%xmm0
11598movdqa %xmm10,%xmm0
11599
11600# qhasm: uint6464 xmm0 >>= 1
11601# asm 1: psrlq $1,<xmm0=int6464#1
11602# asm 2: psrlq $1,<xmm0=%xmm0
11603psrlq $1,%xmm0
11604
11605# qhasm: xmm0 ^= xmm13
11606# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
11607# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
11608pxor %xmm13,%xmm0
11609
11610# qhasm: xmm0 &= BS0
11611# asm 1: pand BS0,<xmm0=int6464#1
11612# asm 2: pand BS0,<xmm0=%xmm0
11613pand BS0,%xmm0
11614
11615# qhasm: xmm13 ^= xmm0
11616# asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14
11617# asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13
11618pxor %xmm0,%xmm13
11619
11620# qhasm: uint6464 xmm0 <<= 1
11621# asm 1: psllq $1,<xmm0=int6464#1
11622# asm 2: psllq $1,<xmm0=%xmm0
11623psllq $1,%xmm0
11624
11625# qhasm: xmm10 ^= xmm0
11626# asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11
11627# asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10
11628pxor %xmm0,%xmm10
11629
11630# qhasm: xmm0 = xmm11
11631# asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1
11632# asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0
11633movdqa %xmm11,%xmm0
11634
11635# qhasm: uint6464 xmm0 >>= 1
11636# asm 1: psrlq $1,<xmm0=int6464#1
11637# asm 2: psrlq $1,<xmm0=%xmm0
11638psrlq $1,%xmm0
11639
11640# qhasm: xmm0 ^= xmm15
11641# asm 1: pxor <xmm15=int6464#16,<xmm0=int6464#1
11642# asm 2: pxor <xmm15=%xmm15,<xmm0=%xmm0
11643pxor %xmm15,%xmm0
11644
11645# qhasm: xmm0 &= BS0
11646# asm 1: pand BS0,<xmm0=int6464#1
11647# asm 2: pand BS0,<xmm0=%xmm0
11648pand BS0,%xmm0
11649
11650# qhasm: xmm15 ^= xmm0
11651# asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16
11652# asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15
11653pxor %xmm0,%xmm15
11654
11655# qhasm: uint6464 xmm0 <<= 1
11656# asm 1: psllq $1,<xmm0=int6464#1
11657# asm 2: psllq $1,<xmm0=%xmm0
11658psllq $1,%xmm0
11659
11660# qhasm: xmm11 ^= xmm0
11661# asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12
11662# asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11
11663pxor %xmm0,%xmm11
11664
11665# qhasm: xmm0 = xmm12
11666# asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1
11667# asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0
11668movdqa %xmm12,%xmm0
11669
11670# qhasm: uint6464 xmm0 >>= 1
11671# asm 1: psrlq $1,<xmm0=int6464#1
11672# asm 2: psrlq $1,<xmm0=%xmm0
11673psrlq $1,%xmm0
11674
11675# qhasm: xmm0 ^= xmm14
11676# asm 1: pxor <xmm14=int6464#15,<xmm0=int6464#1
11677# asm 2: pxor <xmm14=%xmm14,<xmm0=%xmm0
11678pxor %xmm14,%xmm0
11679
11680# qhasm: xmm0 &= BS0
11681# asm 1: pand BS0,<xmm0=int6464#1
11682# asm 2: pand BS0,<xmm0=%xmm0
11683pand BS0,%xmm0
11684
11685# qhasm: xmm14 ^= xmm0
11686# asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15
11687# asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14
11688pxor %xmm0,%xmm14
11689
11690# qhasm: uint6464 xmm0 <<= 1
11691# asm 1: psllq $1,<xmm0=int6464#1
11692# asm 2: psllq $1,<xmm0=%xmm0
11693psllq $1,%xmm0
11694
11695# qhasm: xmm12 ^= xmm0
11696# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
11697# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
11698pxor %xmm0,%xmm12
11699
11700# qhasm: xmm0 = xmm8
11701# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11702# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11703movdqa %xmm8,%xmm0
11704
11705# qhasm: uint6464 xmm0 >>= 1
11706# asm 1: psrlq $1,<xmm0=int6464#1
11707# asm 2: psrlq $1,<xmm0=%xmm0
11708psrlq $1,%xmm0
11709
11710# qhasm: xmm0 ^= xmm9
11711# asm 1: pxor <xmm9=int6464#10,<xmm0=int6464#1
11712# asm 2: pxor <xmm9=%xmm9,<xmm0=%xmm0
11713pxor %xmm9,%xmm0
11714
11715# qhasm: xmm0 &= BS0
11716# asm 1: pand BS0,<xmm0=int6464#1
11717# asm 2: pand BS0,<xmm0=%xmm0
11718pand BS0,%xmm0
11719
11720# qhasm: xmm9 ^= xmm0
11721# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
11722# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
11723pxor %xmm0,%xmm9
11724
11725# qhasm: uint6464 xmm0 <<= 1
11726# asm 1: psllq $1,<xmm0=int6464#1
11727# asm 2: psllq $1,<xmm0=%xmm0
11728psllq $1,%xmm0
11729
11730# qhasm: xmm8 ^= xmm0
11731# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
11732# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
11733pxor %xmm0,%xmm8
11734
11735# qhasm: xmm0 = xmm15
11736# asm 1: movdqa <xmm15=int6464#16,>xmm0=int6464#1
11737# asm 2: movdqa <xmm15=%xmm15,>xmm0=%xmm0
11738movdqa %xmm15,%xmm0
11739
11740# qhasm: uint6464 xmm0 >>= 2
11741# asm 1: psrlq $2,<xmm0=int6464#1
11742# asm 2: psrlq $2,<xmm0=%xmm0
11743psrlq $2,%xmm0
11744
11745# qhasm: xmm0 ^= xmm13
11746# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
11747# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
11748pxor %xmm13,%xmm0
11749
11750# qhasm: xmm0 &= BS1
11751# asm 1: pand BS1,<xmm0=int6464#1
11752# asm 2: pand BS1,<xmm0=%xmm0
11753pand BS1,%xmm0
11754
11755# qhasm: xmm13 ^= xmm0
11756# asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14
11757# asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13
11758pxor %xmm0,%xmm13
11759
11760# qhasm: uint6464 xmm0 <<= 2
11761# asm 1: psllq $2,<xmm0=int6464#1
11762# asm 2: psllq $2,<xmm0=%xmm0
11763psllq $2,%xmm0
11764
11765# qhasm: xmm15 ^= xmm0
11766# asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16
11767# asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15
11768pxor %xmm0,%xmm15
11769
11770# qhasm: xmm0 = xmm11
11771# asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1
11772# asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0
11773movdqa %xmm11,%xmm0
11774
11775# qhasm: uint6464 xmm0 >>= 2
11776# asm 1: psrlq $2,<xmm0=int6464#1
11777# asm 2: psrlq $2,<xmm0=%xmm0
11778psrlq $2,%xmm0
11779
11780# qhasm: xmm0 ^= xmm10
11781# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#1
11782# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm0
11783pxor %xmm10,%xmm0
11784
11785# qhasm: xmm0 &= BS1
11786# asm 1: pand BS1,<xmm0=int6464#1
11787# asm 2: pand BS1,<xmm0=%xmm0
11788pand BS1,%xmm0
11789
11790# qhasm: xmm10 ^= xmm0
11791# asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11
11792# asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10
11793pxor %xmm0,%xmm10
11794
11795# qhasm: uint6464 xmm0 <<= 2
11796# asm 1: psllq $2,<xmm0=int6464#1
11797# asm 2: psllq $2,<xmm0=%xmm0
11798psllq $2,%xmm0
11799
11800# qhasm: xmm11 ^= xmm0
11801# asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12
11802# asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11
11803pxor %xmm0,%xmm11
11804
11805# qhasm: xmm0 = xmm9
11806# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1
11807# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0
11808movdqa %xmm9,%xmm0
11809
11810# qhasm: uint6464 xmm0 >>= 2
11811# asm 1: psrlq $2,<xmm0=int6464#1
11812# asm 2: psrlq $2,<xmm0=%xmm0
11813psrlq $2,%xmm0
11814
11815# qhasm: xmm0 ^= xmm14
11816# asm 1: pxor <xmm14=int6464#15,<xmm0=int6464#1
11817# asm 2: pxor <xmm14=%xmm14,<xmm0=%xmm0
11818pxor %xmm14,%xmm0
11819
11820# qhasm: xmm0 &= BS1
11821# asm 1: pand BS1,<xmm0=int6464#1
11822# asm 2: pand BS1,<xmm0=%xmm0
11823pand BS1,%xmm0
11824
11825# qhasm: xmm14 ^= xmm0
11826# asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15
11827# asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14
11828pxor %xmm0,%xmm14
11829
11830# qhasm: uint6464 xmm0 <<= 2
11831# asm 1: psllq $2,<xmm0=int6464#1
11832# asm 2: psllq $2,<xmm0=%xmm0
11833psllq $2,%xmm0
11834
11835# qhasm: xmm9 ^= xmm0
11836# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
11837# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
11838pxor %xmm0,%xmm9
11839
11840# qhasm: xmm0 = xmm8
11841# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11842# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11843movdqa %xmm8,%xmm0
11844
11845# qhasm: uint6464 xmm0 >>= 2
11846# asm 1: psrlq $2,<xmm0=int6464#1
11847# asm 2: psrlq $2,<xmm0=%xmm0
11848psrlq $2,%xmm0
11849
11850# qhasm: xmm0 ^= xmm12
11851# asm 1: pxor <xmm12=int6464#13,<xmm0=int6464#1
11852# asm 2: pxor <xmm12=%xmm12,<xmm0=%xmm0
11853pxor %xmm12,%xmm0
11854
11855# qhasm: xmm0 &= BS1
11856# asm 1: pand BS1,<xmm0=int6464#1
11857# asm 2: pand BS1,<xmm0=%xmm0
11858pand BS1,%xmm0
11859
11860# qhasm: xmm12 ^= xmm0
11861# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
11862# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
11863pxor %xmm0,%xmm12
11864
11865# qhasm: uint6464 xmm0 <<= 2
11866# asm 1: psllq $2,<xmm0=int6464#1
11867# asm 2: psllq $2,<xmm0=%xmm0
11868psllq $2,%xmm0
11869
11870# qhasm: xmm8 ^= xmm0
11871# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
11872# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
11873pxor %xmm0,%xmm8
11874
11875# qhasm: xmm0 = xmm14
11876# asm 1: movdqa <xmm14=int6464#15,>xmm0=int6464#1
11877# asm 2: movdqa <xmm14=%xmm14,>xmm0=%xmm0
11878movdqa %xmm14,%xmm0
11879
11880# qhasm: uint6464 xmm0 >>= 4
11881# asm 1: psrlq $4,<xmm0=int6464#1
11882# asm 2: psrlq $4,<xmm0=%xmm0
11883psrlq $4,%xmm0
11884
11885# qhasm: xmm0 ^= xmm13
11886# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
11887# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
11888pxor %xmm13,%xmm0
11889
11890# qhasm: xmm0 &= BS2
11891# asm 1: pand BS2,<xmm0=int6464#1
11892# asm 2: pand BS2,<xmm0=%xmm0
11893pand BS2,%xmm0
11894
11895# qhasm: xmm13 ^= xmm0
11896# asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14
11897# asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13
11898pxor %xmm0,%xmm13
11899
11900# qhasm: uint6464 xmm0 <<= 4
11901# asm 1: psllq $4,<xmm0=int6464#1
11902# asm 2: psllq $4,<xmm0=%xmm0
11903psllq $4,%xmm0
11904
11905# qhasm: xmm14 ^= xmm0
11906# asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15
11907# asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14
11908pxor %xmm0,%xmm14
11909
11910# qhasm: xmm0 = xmm12
11911# asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1
11912# asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0
11913movdqa %xmm12,%xmm0
11914
11915# qhasm: uint6464 xmm0 >>= 4
11916# asm 1: psrlq $4,<xmm0=int6464#1
11917# asm 2: psrlq $4,<xmm0=%xmm0
11918psrlq $4,%xmm0
11919
11920# qhasm: xmm0 ^= xmm10
11921# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#1
11922# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm0
11923pxor %xmm10,%xmm0
11924
11925# qhasm: xmm0 &= BS2
11926# asm 1: pand BS2,<xmm0=int6464#1
11927# asm 2: pand BS2,<xmm0=%xmm0
11928pand BS2,%xmm0
11929
11930# qhasm: xmm10 ^= xmm0
11931# asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11
11932# asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10
11933pxor %xmm0,%xmm10
11934
11935# qhasm: uint6464 xmm0 <<= 4
11936# asm 1: psllq $4,<xmm0=int6464#1
11937# asm 2: psllq $4,<xmm0=%xmm0
11938psllq $4,%xmm0
11939
11940# qhasm: xmm12 ^= xmm0
11941# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
11942# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
11943pxor %xmm0,%xmm12
11944
11945# qhasm: xmm0 = xmm9
11946# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1
11947# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0
11948movdqa %xmm9,%xmm0
11949
11950# qhasm: uint6464 xmm0 >>= 4
11951# asm 1: psrlq $4,<xmm0=int6464#1
11952# asm 2: psrlq $4,<xmm0=%xmm0
11953psrlq $4,%xmm0
11954
11955# qhasm: xmm0 ^= xmm15
11956# asm 1: pxor <xmm15=int6464#16,<xmm0=int6464#1
11957# asm 2: pxor <xmm15=%xmm15,<xmm0=%xmm0
11958pxor %xmm15,%xmm0
11959
11960# qhasm: xmm0 &= BS2
11961# asm 1: pand BS2,<xmm0=int6464#1
11962# asm 2: pand BS2,<xmm0=%xmm0
11963pand BS2,%xmm0
11964
11965# qhasm: xmm15 ^= xmm0
11966# asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16
11967# asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15
11968pxor %xmm0,%xmm15
11969
11970# qhasm: uint6464 xmm0 <<= 4
11971# asm 1: psllq $4,<xmm0=int6464#1
11972# asm 2: psllq $4,<xmm0=%xmm0
11973psllq $4,%xmm0
11974
11975# qhasm: xmm9 ^= xmm0
11976# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
11977# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
11978pxor %xmm0,%xmm9
11979
11980# qhasm: xmm0 = xmm8
11981# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11982# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11983movdqa %xmm8,%xmm0
11984
11985# qhasm: uint6464 xmm0 >>= 4
11986# asm 1: psrlq $4,<xmm0=int6464#1
11987# asm 2: psrlq $4,<xmm0=%xmm0
11988psrlq $4,%xmm0
11989
11990# qhasm: xmm0 ^= xmm11
11991# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#1
11992# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm0
11993pxor %xmm11,%xmm0
11994
11995# qhasm: xmm0 &= BS2
11996# asm 1: pand BS2,<xmm0=int6464#1
11997# asm 2: pand BS2,<xmm0=%xmm0
11998pand BS2,%xmm0
11999
12000# qhasm: xmm11 ^= xmm0
12001# asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12
12002# asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11
12003pxor %xmm0,%xmm11
12004
12005# qhasm: uint6464 xmm0 <<= 4
12006# asm 1: psllq $4,<xmm0=int6464#1
12007# asm 2: psllq $4,<xmm0=%xmm0
12008psllq $4,%xmm0
12009
12010# qhasm: xmm8 ^= xmm0
12011# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
12012# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
12013pxor %xmm0,%xmm8
12014
12015# qhasm: unsigned<? =? len-128
12016# asm 1: cmp $128,<len=int64#2
12017# asm 2: cmp $128,<len=%rsi
12018cmp $128,%rsi
12019# comment:fp stack unchanged by jump
12020
12021# qhasm: goto partial if unsigned<
12022jb ._partial
12023# comment:fp stack unchanged by jump
12024
12025# qhasm: goto full if =
12026je ._full
12027
12028# qhasm: tmp = *(uint32 *)(np + 12)
12029# asm 1: movl 12(<np=int64#3),>tmp=int64#5d
12030# asm 2: movl 12(<np=%rdx),>tmp=%r8d
12031movl 12(%rdx),%r8d
12032
12033# qhasm: (uint32) bswap tmp
12034# asm 1: bswap <tmp=int64#5d
12035# asm 2: bswap <tmp=%r8d
12036bswap %r8d
12037
12038# qhasm: tmp += 8
12039# asm 1: add $8,<tmp=int64#5
12040# asm 2: add $8,<tmp=%r8
12041add $8,%r8
12042
12043# qhasm: (uint32) bswap tmp
12044# asm 1: bswap <tmp=int64#5d
12045# asm 2: bswap <tmp=%r8d
12046bswap %r8d
12047
12048# qhasm: *(uint32 *)(np + 12) = tmp
12049# asm 1: movl <tmp=int64#5d,12(<np=int64#3)
12050# asm 2: movl <tmp=%r8d,12(<np=%rdx)
12051movl %r8d,12(%rdx)
12052
12053# qhasm: *(int128 *) (outp + 0) = xmm8
12054# asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1)
12055# asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi)
12056movdqa %xmm8,0(%rdi)
12057
12058# qhasm: *(int128 *) (outp + 16) = xmm9
12059# asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1)
12060# asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi)
12061movdqa %xmm9,16(%rdi)
12062
12063# qhasm: *(int128 *) (outp + 32) = xmm12
12064# asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1)
12065# asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi)
12066movdqa %xmm12,32(%rdi)
12067
12068# qhasm: *(int128 *) (outp + 48) = xmm14
12069# asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1)
12070# asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi)
12071movdqa %xmm14,48(%rdi)
12072
12073# qhasm: *(int128 *) (outp + 64) = xmm11
12074# asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1)
12075# asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi)
12076movdqa %xmm11,64(%rdi)
12077
12078# qhasm: *(int128 *) (outp + 80) = xmm15
12079# asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1)
12080# asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi)
12081movdqa %xmm15,80(%rdi)
12082
12083# qhasm: *(int128 *) (outp + 96) = xmm10
12084# asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1)
12085# asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi)
12086movdqa %xmm10,96(%rdi)
12087
12088# qhasm: *(int128 *) (outp + 112) = xmm13
12089# asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1)
12090# asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi)
12091movdqa %xmm13,112(%rdi)
12092
12093# qhasm: len -= 128
12094# asm 1: sub $128,<len=int64#2
12095# asm 2: sub $128,<len=%rsi
12096sub $128,%rsi
12097
12098# qhasm: outp += 128
12099# asm 1: add $128,<outp=int64#1
12100# asm 2: add $128,<outp=%rdi
12101add $128,%rdi
12102# comment:fp stack unchanged by jump
12103
12104# qhasm: goto enc_block
12105jmp ._enc_block
12106
12107# qhasm: partial:
12108._partial:
12109
12110# qhasm: lensav = len
12111# asm 1: mov <len=int64#2,>lensav=int64#4
12112# asm 2: mov <len=%rsi,>lensav=%rcx
12113mov %rsi,%rcx
12114
12115# qhasm: (uint32) len >>= 4
12116# asm 1: shr $4,<len=int64#2d
12117# asm 2: shr $4,<len=%esi
12118shr $4,%esi
12119
12120# qhasm: tmp = *(uint32 *)(np + 12)
12121# asm 1: movl 12(<np=int64#3),>tmp=int64#5d
12122# asm 2: movl 12(<np=%rdx),>tmp=%r8d
12123movl 12(%rdx),%r8d
12124
12125# qhasm: (uint32) bswap tmp
12126# asm 1: bswap <tmp=int64#5d
12127# asm 2: bswap <tmp=%r8d
12128bswap %r8d
12129
12130# qhasm: tmp += len
12131# asm 1: add <len=int64#2,<tmp=int64#5
12132# asm 2: add <len=%rsi,<tmp=%r8
12133add %rsi,%r8
12134
12135# qhasm: (uint32) bswap tmp
12136# asm 1: bswap <tmp=int64#5d
12137# asm 2: bswap <tmp=%r8d
12138bswap %r8d
12139
12140# qhasm: *(uint32 *)(np + 12) = tmp
12141# asm 1: movl <tmp=int64#5d,12(<np=int64#3)
12142# asm 2: movl <tmp=%r8d,12(<np=%rdx)
12143movl %r8d,12(%rdx)
12144
12145# qhasm: blp = &bl
12146# asm 1: leaq <bl=stack1024#1,>blp=int64#2
12147# asm 2: leaq <bl=32(%rsp),>blp=%rsi
12148leaq 32(%rsp),%rsi
12149
12150# qhasm: *(int128 *)(blp + 0) = xmm8
12151# asm 1: movdqa <xmm8=int6464#9,0(<blp=int64#2)
12152# asm 2: movdqa <xmm8=%xmm8,0(<blp=%rsi)
12153movdqa %xmm8,0(%rsi)
12154
12155# qhasm: *(int128 *)(blp + 16) = xmm9
12156# asm 1: movdqa <xmm9=int6464#10,16(<blp=int64#2)
12157# asm 2: movdqa <xmm9=%xmm9,16(<blp=%rsi)
12158movdqa %xmm9,16(%rsi)
12159
12160# qhasm: *(int128 *)(blp + 32) = xmm12
12161# asm 1: movdqa <xmm12=int6464#13,32(<blp=int64#2)
12162# asm 2: movdqa <xmm12=%xmm12,32(<blp=%rsi)
12163movdqa %xmm12,32(%rsi)
12164
12165# qhasm: *(int128 *)(blp + 48) = xmm14
12166# asm 1: movdqa <xmm14=int6464#15,48(<blp=int64#2)
12167# asm 2: movdqa <xmm14=%xmm14,48(<blp=%rsi)
12168movdqa %xmm14,48(%rsi)
12169
12170# qhasm: *(int128 *)(blp + 64) = xmm11
12171# asm 1: movdqa <xmm11=int6464#12,64(<blp=int64#2)
12172# asm 2: movdqa <xmm11=%xmm11,64(<blp=%rsi)
12173movdqa %xmm11,64(%rsi)
12174
12175# qhasm: *(int128 *)(blp + 80) = xmm15
12176# asm 1: movdqa <xmm15=int6464#16,80(<blp=int64#2)
12177# asm 2: movdqa <xmm15=%xmm15,80(<blp=%rsi)
12178movdqa %xmm15,80(%rsi)
12179
12180# qhasm: *(int128 *)(blp + 96) = xmm10
12181# asm 1: movdqa <xmm10=int6464#11,96(<blp=int64#2)
12182# asm 2: movdqa <xmm10=%xmm10,96(<blp=%rsi)
12183movdqa %xmm10,96(%rsi)
12184
12185# qhasm: *(int128 *)(blp + 112) = xmm13
12186# asm 1: movdqa <xmm13=int6464#14,112(<blp=int64#2)
12187# asm 2: movdqa <xmm13=%xmm13,112(<blp=%rsi)
12188movdqa %xmm13,112(%rsi)
12189
12190# qhasm: bytes:
12191._bytes:
12192
12193# qhasm: =? lensav-0
12194# asm 1: cmp $0,<lensav=int64#4
12195# asm 2: cmp $0,<lensav=%rcx
12196cmp $0,%rcx
12197# comment:fp stack unchanged by jump
12198
12199# qhasm: goto end if =
12200je ._end
12201
12202# qhasm: b = *(uint8 *)(blp + 0)
12203# asm 1: movzbq 0(<blp=int64#2),>b=int64#3
12204# asm 2: movzbq 0(<blp=%rsi),>b=%rdx
12205movzbq 0(%rsi),%rdx
12206
12207# qhasm: *(uint8 *)(outp + 0) = b
12208# asm 1: movb <b=int64#3b,0(<outp=int64#1)
12209# asm 2: movb <b=%dl,0(<outp=%rdi)
12210movb %dl,0(%rdi)
12211
12212# qhasm: blp += 1
12213# asm 1: add $1,<blp=int64#2
12214# asm 2: add $1,<blp=%rsi
12215add $1,%rsi
12216
12217# qhasm: outp +=1
12218# asm 1: add $1,<outp=int64#1
12219# asm 2: add $1,<outp=%rdi
12220add $1,%rdi
12221
12222# qhasm: lensav -= 1
12223# asm 1: sub $1,<lensav=int64#4
12224# asm 2: sub $1,<lensav=%rcx
12225sub $1,%rcx
12226# comment:fp stack unchanged by jump
12227
12228# qhasm: goto bytes
12229jmp ._bytes
12230
12231# qhasm: full:
12232._full:
12233
12234# qhasm: tmp = *(uint32 *)(np + 12)
12235# asm 1: movl 12(<np=int64#3),>tmp=int64#4d
12236# asm 2: movl 12(<np=%rdx),>tmp=%ecx
12237movl 12(%rdx),%ecx
12238
12239# qhasm: (uint32) bswap tmp
12240# asm 1: bswap <tmp=int64#4d
12241# asm 2: bswap <tmp=%ecx
12242bswap %ecx
12243
12244# qhasm: tmp += len
12245# asm 1: add <len=int64#2,<tmp=int64#4
12246# asm 2: add <len=%rsi,<tmp=%rcx
12247add %rsi,%rcx
12248
12249# qhasm: (uint32) bswap tmp
12250# asm 1: bswap <tmp=int64#4d
12251# asm 2: bswap <tmp=%ecx
12252bswap %ecx
12253
12254# qhasm: *(uint32 *)(np + 12) = tmp
12255# asm 1: movl <tmp=int64#4d,12(<np=int64#3)
12256# asm 2: movl <tmp=%ecx,12(<np=%rdx)
12257movl %ecx,12(%rdx)
12258
12259# qhasm: *(int128 *) (outp + 0) = xmm8
12260# asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1)
12261# asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi)
12262movdqa %xmm8,0(%rdi)
12263
12264# qhasm: *(int128 *) (outp + 16) = xmm9
12265# asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1)
12266# asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi)
12267movdqa %xmm9,16(%rdi)
12268
12269# qhasm: *(int128 *) (outp + 32) = xmm12
12270# asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1)
12271# asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi)
12272movdqa %xmm12,32(%rdi)
12273
12274# qhasm: *(int128 *) (outp + 48) = xmm14
12275# asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1)
12276# asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi)
12277movdqa %xmm14,48(%rdi)
12278
12279# qhasm: *(int128 *) (outp + 64) = xmm11
12280# asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1)
12281# asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi)
12282movdqa %xmm11,64(%rdi)
12283
12284# qhasm: *(int128 *) (outp + 80) = xmm15
12285# asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1)
12286# asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi)
12287movdqa %xmm15,80(%rdi)
12288
12289# qhasm: *(int128 *) (outp + 96) = xmm10
12290# asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1)
12291# asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi)
12292movdqa %xmm10,96(%rdi)
12293
12294# qhasm: *(int128 *) (outp + 112) = xmm13
12295# asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1)
12296# asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi)
12297movdqa %xmm13,112(%rdi)
12298# comment:fp stack unchanged by fallthrough
12299
12300# qhasm: end:
12301._end:
12302
12303# qhasm: leave
12304add %r11,%rsp
12305mov %rdi,%rax
12306mov %rsi,%rdx
12307xor %rax,%rax
12308ret
diff --git a/nacl/crypto_stream/aes128ctr/core2/api.h b/nacl/crypto_stream/aes128ctr/core2/api.h
new file mode 100644
index 00000000..62fc8d88
--- /dev/null
+++ b/nacl/crypto_stream/aes128ctr/core2/api.h
@@ -0,0 +1,3 @@
1#define CRYPTO_KEYBYTES 16
2#define CRYPTO_NONCEBYTES 16
3#define CRYPTO_BEFORENMBYTES 1408
diff --git a/nacl/crypto_stream/aes128ctr/core2/beforenm.s b/nacl/crypto_stream/aes128ctr/core2/beforenm.s
new file mode 100644
index 00000000..689ad8c3
--- /dev/null
+++ b/nacl/crypto_stream/aes128ctr/core2/beforenm.s
@@ -0,0 +1,13694 @@
1# Author: Emilia Käsper and Peter Schwabe
2# Date: 2009-03-19
3# +2010.01.31: minor namespace modifications
4# Public domain
5
6.data
7.p2align 6
8
9RCON: .int 0x00000000, 0x00000000, 0x00000000, 0xffffffff
10ROTB: .int 0x0c000000, 0x00000000, 0x04000000, 0x08000000
11EXPB0: .int 0x03030303, 0x07070707, 0x0b0b0b0b, 0x0f0f0f0f
12CTRINC1: .int 0x00000001, 0x00000000, 0x00000000, 0x00000000
13CTRINC2: .int 0x00000002, 0x00000000, 0x00000000, 0x00000000
14CTRINC3: .int 0x00000003, 0x00000000, 0x00000000, 0x00000000
15CTRINC4: .int 0x00000004, 0x00000000, 0x00000000, 0x00000000
16CTRINC5: .int 0x00000005, 0x00000000, 0x00000000, 0x00000000
17CTRINC6: .int 0x00000006, 0x00000000, 0x00000000, 0x00000000
18CTRINC7: .int 0x00000007, 0x00000000, 0x00000000, 0x00000000
19RCTRINC1: .int 0x00000000, 0x00000000, 0x00000000, 0x00000001
20RCTRINC2: .int 0x00000000, 0x00000000, 0x00000000, 0x00000002
21RCTRINC3: .int 0x00000000, 0x00000000, 0x00000000, 0x00000003
22RCTRINC4: .int 0x00000000, 0x00000000, 0x00000000, 0x00000004
23RCTRINC5: .int 0x00000000, 0x00000000, 0x00000000, 0x00000005
24RCTRINC6: .int 0x00000000, 0x00000000, 0x00000000, 0x00000006
25RCTRINC7: .int 0x00000000, 0x00000000, 0x00000000, 0x00000007
26
27SWAP32: .int 0x00010203, 0x04050607, 0x08090a0b, 0x0c0d0e0f
28M0SWAP: .quad 0x0105090d0004080c , 0x03070b0f02060a0e
29
30BS0: .quad 0x5555555555555555, 0x5555555555555555
31BS1: .quad 0x3333333333333333, 0x3333333333333333
32BS2: .quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f
33ONE: .quad 0xffffffffffffffff, 0xffffffffffffffff
34M0: .quad 0x02060a0e03070b0f, 0x0004080c0105090d
35SRM0: .quad 0x0304090e00050a0f, 0x01060b0c0207080d
36SR: .quad 0x0504070600030201, 0x0f0e0d0c0a09080b
37
38# qhasm: int64 arg1
39
40# qhasm: int64 arg2
41
42# qhasm: input arg1
43
44# qhasm: input arg2
45
46# qhasm: int64 r11_caller
47
48# qhasm: int64 r12_caller
49
50# qhasm: int64 r13_caller
51
52# qhasm: int64 r14_caller
53
54# qhasm: int64 r15_caller
55
56# qhasm: int64 rbx_caller
57
58# qhasm: int64 rbp_caller
59
60# qhasm: caller r11_caller
61
62# qhasm: caller r12_caller
63
64# qhasm: caller r13_caller
65
66# qhasm: caller r14_caller
67
68# qhasm: caller r15_caller
69
70# qhasm: caller rbx_caller
71
72# qhasm: caller rbp_caller
73
74# qhasm: int64 sboxp
75
76# qhasm: int64 c
77
78# qhasm: int64 k
79
80# qhasm: int64 x0
81
82# qhasm: int64 x1
83
84# qhasm: int64 x2
85
86# qhasm: int64 x3
87
88# qhasm: int64 e
89
90# qhasm: int64 q0
91
92# qhasm: int64 q1
93
94# qhasm: int64 q2
95
96# qhasm: int64 q3
97
98# qhasm: int6464 xmm0
99
100# qhasm: int6464 xmm1
101
102# qhasm: int6464 xmm2
103
104# qhasm: int6464 xmm3
105
106# qhasm: int6464 xmm4
107
108# qhasm: int6464 xmm5
109
110# qhasm: int6464 xmm6
111
112# qhasm: int6464 xmm7
113
114# qhasm: int6464 xmm8
115
116# qhasm: int6464 xmm9
117
118# qhasm: int6464 xmm10
119
120# qhasm: int6464 xmm11
121
122# qhasm: int6464 xmm12
123
124# qhasm: int6464 xmm13
125
126# qhasm: int6464 xmm14
127
128# qhasm: int6464 xmm15
129
130# qhasm: int6464 t
131
132# qhasm: enter crypto_stream_aes128ctr_core2_beforenm
133.text
134.p2align 5
135.globl _crypto_stream_aes128ctr_core2_beforenm
136.globl crypto_stream_aes128ctr_core2_beforenm
137_crypto_stream_aes128ctr_core2_beforenm:
138crypto_stream_aes128ctr_core2_beforenm:
139mov %rsp,%r11
140and $31,%r11
141add $0,%r11
142sub %r11,%rsp
143
144# qhasm: c = arg1
145# asm 1: mov <arg1=int64#1,>c=int64#1
146# asm 2: mov <arg1=%rdi,>c=%rdi
147mov %rdi,%rdi
148
149# qhasm: k = arg2
150# asm 1: mov <arg2=int64#2,>k=int64#2
151# asm 2: mov <arg2=%rsi,>k=%rsi
152mov %rsi,%rsi
153
154# qhasm: xmm0 = *(int128 *) (k + 0)
155# asm 1: movdqa 0(<k=int64#2),>xmm0=int6464#1
156# asm 2: movdqa 0(<k=%rsi),>xmm0=%xmm0
157movdqa 0(%rsi),%xmm0
158
159# qhasm: shuffle bytes of xmm0 by M0
160# asm 1: pshufb M0,<xmm0=int6464#1
161# asm 2: pshufb M0,<xmm0=%xmm0
162pshufb M0,%xmm0
163
164# qhasm: xmm1 = xmm0
165# asm 1: movdqa <xmm0=int6464#1,>xmm1=int6464#2
166# asm 2: movdqa <xmm0=%xmm0,>xmm1=%xmm1
167movdqa %xmm0,%xmm1
168
169# qhasm: xmm2 = xmm0
170# asm 1: movdqa <xmm0=int6464#1,>xmm2=int6464#3
171# asm 2: movdqa <xmm0=%xmm0,>xmm2=%xmm2
172movdqa %xmm0,%xmm2
173
174# qhasm: xmm3 = xmm0
175# asm 1: movdqa <xmm0=int6464#1,>xmm3=int6464#4
176# asm 2: movdqa <xmm0=%xmm0,>xmm3=%xmm3
177movdqa %xmm0,%xmm3
178
179# qhasm: xmm4 = xmm0
180# asm 1: movdqa <xmm0=int6464#1,>xmm4=int6464#5
181# asm 2: movdqa <xmm0=%xmm0,>xmm4=%xmm4
182movdqa %xmm0,%xmm4
183
184# qhasm: xmm5 = xmm0
185# asm 1: movdqa <xmm0=int6464#1,>xmm5=int6464#6
186# asm 2: movdqa <xmm0=%xmm0,>xmm5=%xmm5
187movdqa %xmm0,%xmm5
188
189# qhasm: xmm6 = xmm0
190# asm 1: movdqa <xmm0=int6464#1,>xmm6=int6464#7
191# asm 2: movdqa <xmm0=%xmm0,>xmm6=%xmm6
192movdqa %xmm0,%xmm6
193
194# qhasm: xmm7 = xmm0
195# asm 1: movdqa <xmm0=int6464#1,>xmm7=int6464#8
196# asm 2: movdqa <xmm0=%xmm0,>xmm7=%xmm7
197movdqa %xmm0,%xmm7
198
199# qhasm: t = xmm6
200# asm 1: movdqa <xmm6=int6464#7,>t=int6464#9
201# asm 2: movdqa <xmm6=%xmm6,>t=%xmm8
202movdqa %xmm6,%xmm8
203
204# qhasm: uint6464 t >>= 1
205# asm 1: psrlq $1,<t=int6464#9
206# asm 2: psrlq $1,<t=%xmm8
207psrlq $1,%xmm8
208
209# qhasm: t ^= xmm7
210# asm 1: pxor <xmm7=int6464#8,<t=int6464#9
211# asm 2: pxor <xmm7=%xmm7,<t=%xmm8
212pxor %xmm7,%xmm8
213
214# qhasm: t &= BS0
215# asm 1: pand BS0,<t=int6464#9
216# asm 2: pand BS0,<t=%xmm8
217pand BS0,%xmm8
218
219# qhasm: xmm7 ^= t
220# asm 1: pxor <t=int6464#9,<xmm7=int6464#8
221# asm 2: pxor <t=%xmm8,<xmm7=%xmm7
222pxor %xmm8,%xmm7
223
224# qhasm: uint6464 t <<= 1
225# asm 1: psllq $1,<t=int6464#9
226# asm 2: psllq $1,<t=%xmm8
227psllq $1,%xmm8
228
229# qhasm: xmm6 ^= t
230# asm 1: pxor <t=int6464#9,<xmm6=int6464#7
231# asm 2: pxor <t=%xmm8,<xmm6=%xmm6
232pxor %xmm8,%xmm6
233
234# qhasm: t = xmm4
235# asm 1: movdqa <xmm4=int6464#5,>t=int6464#9
236# asm 2: movdqa <xmm4=%xmm4,>t=%xmm8
237movdqa %xmm4,%xmm8
238
239# qhasm: uint6464 t >>= 1
240# asm 1: psrlq $1,<t=int6464#9
241# asm 2: psrlq $1,<t=%xmm8
242psrlq $1,%xmm8
243
244# qhasm: t ^= xmm5
245# asm 1: pxor <xmm5=int6464#6,<t=int6464#9
246# asm 2: pxor <xmm5=%xmm5,<t=%xmm8
247pxor %xmm5,%xmm8
248
249# qhasm: t &= BS0
250# asm 1: pand BS0,<t=int6464#9
251# asm 2: pand BS0,<t=%xmm8
252pand BS0,%xmm8
253
254# qhasm: xmm5 ^= t
255# asm 1: pxor <t=int6464#9,<xmm5=int6464#6
256# asm 2: pxor <t=%xmm8,<xmm5=%xmm5
257pxor %xmm8,%xmm5
258
259# qhasm: uint6464 t <<= 1
260# asm 1: psllq $1,<t=int6464#9
261# asm 2: psllq $1,<t=%xmm8
262psllq $1,%xmm8
263
264# qhasm: xmm4 ^= t
265# asm 1: pxor <t=int6464#9,<xmm4=int6464#5
266# asm 2: pxor <t=%xmm8,<xmm4=%xmm4
267pxor %xmm8,%xmm4
268
269# qhasm: t = xmm2
270# asm 1: movdqa <xmm2=int6464#3,>t=int6464#9
271# asm 2: movdqa <xmm2=%xmm2,>t=%xmm8
272movdqa %xmm2,%xmm8
273
274# qhasm: uint6464 t >>= 1
275# asm 1: psrlq $1,<t=int6464#9
276# asm 2: psrlq $1,<t=%xmm8
277psrlq $1,%xmm8
278
279# qhasm: t ^= xmm3
280# asm 1: pxor <xmm3=int6464#4,<t=int6464#9
281# asm 2: pxor <xmm3=%xmm3,<t=%xmm8
282pxor %xmm3,%xmm8
283
284# qhasm: t &= BS0
285# asm 1: pand BS0,<t=int6464#9
286# asm 2: pand BS0,<t=%xmm8
287pand BS0,%xmm8
288
289# qhasm: xmm3 ^= t
290# asm 1: pxor <t=int6464#9,<xmm3=int6464#4
291# asm 2: pxor <t=%xmm8,<xmm3=%xmm3
292pxor %xmm8,%xmm3
293
294# qhasm: uint6464 t <<= 1
295# asm 1: psllq $1,<t=int6464#9
296# asm 2: psllq $1,<t=%xmm8
297psllq $1,%xmm8
298
299# qhasm: xmm2 ^= t
300# asm 1: pxor <t=int6464#9,<xmm2=int6464#3
301# asm 2: pxor <t=%xmm8,<xmm2=%xmm2
302pxor %xmm8,%xmm2
303
304# qhasm: t = xmm0
305# asm 1: movdqa <xmm0=int6464#1,>t=int6464#9
306# asm 2: movdqa <xmm0=%xmm0,>t=%xmm8
307movdqa %xmm0,%xmm8
308
309# qhasm: uint6464 t >>= 1
310# asm 1: psrlq $1,<t=int6464#9
311# asm 2: psrlq $1,<t=%xmm8
312psrlq $1,%xmm8
313
314# qhasm: t ^= xmm1
315# asm 1: pxor <xmm1=int6464#2,<t=int6464#9
316# asm 2: pxor <xmm1=%xmm1,<t=%xmm8
317pxor %xmm1,%xmm8
318
319# qhasm: t &= BS0
320# asm 1: pand BS0,<t=int6464#9
321# asm 2: pand BS0,<t=%xmm8
322pand BS0,%xmm8
323
324# qhasm: xmm1 ^= t
325# asm 1: pxor <t=int6464#9,<xmm1=int6464#2
326# asm 2: pxor <t=%xmm8,<xmm1=%xmm1
327pxor %xmm8,%xmm1
328
329# qhasm: uint6464 t <<= 1
330# asm 1: psllq $1,<t=int6464#9
331# asm 2: psllq $1,<t=%xmm8
332psllq $1,%xmm8
333
334# qhasm: xmm0 ^= t
335# asm 1: pxor <t=int6464#9,<xmm0=int6464#1
336# asm 2: pxor <t=%xmm8,<xmm0=%xmm0
337pxor %xmm8,%xmm0
338
339# qhasm: t = xmm5
340# asm 1: movdqa <xmm5=int6464#6,>t=int6464#9
341# asm 2: movdqa <xmm5=%xmm5,>t=%xmm8
342movdqa %xmm5,%xmm8
343
344# qhasm: uint6464 t >>= 2
345# asm 1: psrlq $2,<t=int6464#9
346# asm 2: psrlq $2,<t=%xmm8
347psrlq $2,%xmm8
348
349# qhasm: t ^= xmm7
350# asm 1: pxor <xmm7=int6464#8,<t=int6464#9
351# asm 2: pxor <xmm7=%xmm7,<t=%xmm8
352pxor %xmm7,%xmm8
353
354# qhasm: t &= BS1
355# asm 1: pand BS1,<t=int6464#9
356# asm 2: pand BS1,<t=%xmm8
357pand BS1,%xmm8
358
359# qhasm: xmm7 ^= t
360# asm 1: pxor <t=int6464#9,<xmm7=int6464#8
361# asm 2: pxor <t=%xmm8,<xmm7=%xmm7
362pxor %xmm8,%xmm7
363
364# qhasm: uint6464 t <<= 2
365# asm 1: psllq $2,<t=int6464#9
366# asm 2: psllq $2,<t=%xmm8
367psllq $2,%xmm8
368
369# qhasm: xmm5 ^= t
370# asm 1: pxor <t=int6464#9,<xmm5=int6464#6
371# asm 2: pxor <t=%xmm8,<xmm5=%xmm5
372pxor %xmm8,%xmm5
373
374# qhasm: t = xmm4
375# asm 1: movdqa <xmm4=int6464#5,>t=int6464#9
376# asm 2: movdqa <xmm4=%xmm4,>t=%xmm8
377movdqa %xmm4,%xmm8
378
379# qhasm: uint6464 t >>= 2
380# asm 1: psrlq $2,<t=int6464#9
381# asm 2: psrlq $2,<t=%xmm8
382psrlq $2,%xmm8
383
384# qhasm: t ^= xmm6
385# asm 1: pxor <xmm6=int6464#7,<t=int6464#9
386# asm 2: pxor <xmm6=%xmm6,<t=%xmm8
387pxor %xmm6,%xmm8
388
389# qhasm: t &= BS1
390# asm 1: pand BS1,<t=int6464#9
391# asm 2: pand BS1,<t=%xmm8
392pand BS1,%xmm8
393
394# qhasm: xmm6 ^= t
395# asm 1: pxor <t=int6464#9,<xmm6=int6464#7
396# asm 2: pxor <t=%xmm8,<xmm6=%xmm6
397pxor %xmm8,%xmm6
398
399# qhasm: uint6464 t <<= 2
400# asm 1: psllq $2,<t=int6464#9
401# asm 2: psllq $2,<t=%xmm8
402psllq $2,%xmm8
403
404# qhasm: xmm4 ^= t
405# asm 1: pxor <t=int6464#9,<xmm4=int6464#5
406# asm 2: pxor <t=%xmm8,<xmm4=%xmm4
407pxor %xmm8,%xmm4
408
409# qhasm: t = xmm1
410# asm 1: movdqa <xmm1=int6464#2,>t=int6464#9
411# asm 2: movdqa <xmm1=%xmm1,>t=%xmm8
412movdqa %xmm1,%xmm8
413
414# qhasm: uint6464 t >>= 2
415# asm 1: psrlq $2,<t=int6464#9
416# asm 2: psrlq $2,<t=%xmm8
417psrlq $2,%xmm8
418
419# qhasm: t ^= xmm3
420# asm 1: pxor <xmm3=int6464#4,<t=int6464#9
421# asm 2: pxor <xmm3=%xmm3,<t=%xmm8
422pxor %xmm3,%xmm8
423
424# qhasm: t &= BS1
425# asm 1: pand BS1,<t=int6464#9
426# asm 2: pand BS1,<t=%xmm8
427pand BS1,%xmm8
428
429# qhasm: xmm3 ^= t
430# asm 1: pxor <t=int6464#9,<xmm3=int6464#4
431# asm 2: pxor <t=%xmm8,<xmm3=%xmm3
432pxor %xmm8,%xmm3
433
434# qhasm: uint6464 t <<= 2
435# asm 1: psllq $2,<t=int6464#9
436# asm 2: psllq $2,<t=%xmm8
437psllq $2,%xmm8
438
439# qhasm: xmm1 ^= t
440# asm 1: pxor <t=int6464#9,<xmm1=int6464#2
441# asm 2: pxor <t=%xmm8,<xmm1=%xmm1
442pxor %xmm8,%xmm1
443
444# qhasm: t = xmm0
445# asm 1: movdqa <xmm0=int6464#1,>t=int6464#9
446# asm 2: movdqa <xmm0=%xmm0,>t=%xmm8
447movdqa %xmm0,%xmm8
448
449# qhasm: uint6464 t >>= 2
450# asm 1: psrlq $2,<t=int6464#9
451# asm 2: psrlq $2,<t=%xmm8
452psrlq $2,%xmm8
453
454# qhasm: t ^= xmm2
455# asm 1: pxor <xmm2=int6464#3,<t=int6464#9
456# asm 2: pxor <xmm2=%xmm2,<t=%xmm8
457pxor %xmm2,%xmm8
458
459# qhasm: t &= BS1
460# asm 1: pand BS1,<t=int6464#9
461# asm 2: pand BS1,<t=%xmm8
462pand BS1,%xmm8
463
464# qhasm: xmm2 ^= t
465# asm 1: pxor <t=int6464#9,<xmm2=int6464#3
466# asm 2: pxor <t=%xmm8,<xmm2=%xmm2
467pxor %xmm8,%xmm2
468
469# qhasm: uint6464 t <<= 2
470# asm 1: psllq $2,<t=int6464#9
471# asm 2: psllq $2,<t=%xmm8
472psllq $2,%xmm8
473
474# qhasm: xmm0 ^= t
475# asm 1: pxor <t=int6464#9,<xmm0=int6464#1
476# asm 2: pxor <t=%xmm8,<xmm0=%xmm0
477pxor %xmm8,%xmm0
478
479# qhasm: t = xmm3
480# asm 1: movdqa <xmm3=int6464#4,>t=int6464#9
481# asm 2: movdqa <xmm3=%xmm3,>t=%xmm8
482movdqa %xmm3,%xmm8
483
484# qhasm: uint6464 t >>= 4
485# asm 1: psrlq $4,<t=int6464#9
486# asm 2: psrlq $4,<t=%xmm8
487psrlq $4,%xmm8
488
489# qhasm: t ^= xmm7
490# asm 1: pxor <xmm7=int6464#8,<t=int6464#9
491# asm 2: pxor <xmm7=%xmm7,<t=%xmm8
492pxor %xmm7,%xmm8
493
494# qhasm: t &= BS2
495# asm 1: pand BS2,<t=int6464#9
496# asm 2: pand BS2,<t=%xmm8
497pand BS2,%xmm8
498
499# qhasm: xmm7 ^= t
500# asm 1: pxor <t=int6464#9,<xmm7=int6464#8
501# asm 2: pxor <t=%xmm8,<xmm7=%xmm7
502pxor %xmm8,%xmm7
503
504# qhasm: uint6464 t <<= 4
505# asm 1: psllq $4,<t=int6464#9
506# asm 2: psllq $4,<t=%xmm8
507psllq $4,%xmm8
508
509# qhasm: xmm3 ^= t
510# asm 1: pxor <t=int6464#9,<xmm3=int6464#4
511# asm 2: pxor <t=%xmm8,<xmm3=%xmm3
512pxor %xmm8,%xmm3
513
514# qhasm: t = xmm2
515# asm 1: movdqa <xmm2=int6464#3,>t=int6464#9
516# asm 2: movdqa <xmm2=%xmm2,>t=%xmm8
517movdqa %xmm2,%xmm8
518
519# qhasm: uint6464 t >>= 4
520# asm 1: psrlq $4,<t=int6464#9
521# asm 2: psrlq $4,<t=%xmm8
522psrlq $4,%xmm8
523
524# qhasm: t ^= xmm6
525# asm 1: pxor <xmm6=int6464#7,<t=int6464#9
526# asm 2: pxor <xmm6=%xmm6,<t=%xmm8
527pxor %xmm6,%xmm8
528
529# qhasm: t &= BS2
530# asm 1: pand BS2,<t=int6464#9
531# asm 2: pand BS2,<t=%xmm8
532pand BS2,%xmm8
533
534# qhasm: xmm6 ^= t
535# asm 1: pxor <t=int6464#9,<xmm6=int6464#7
536# asm 2: pxor <t=%xmm8,<xmm6=%xmm6
537pxor %xmm8,%xmm6
538
539# qhasm: uint6464 t <<= 4
540# asm 1: psllq $4,<t=int6464#9
541# asm 2: psllq $4,<t=%xmm8
542psllq $4,%xmm8
543
544# qhasm: xmm2 ^= t
545# asm 1: pxor <t=int6464#9,<xmm2=int6464#3
546# asm 2: pxor <t=%xmm8,<xmm2=%xmm2
547pxor %xmm8,%xmm2
548
549# qhasm: t = xmm1
550# asm 1: movdqa <xmm1=int6464#2,>t=int6464#9
551# asm 2: movdqa <xmm1=%xmm1,>t=%xmm8
552movdqa %xmm1,%xmm8
553
554# qhasm: uint6464 t >>= 4
555# asm 1: psrlq $4,<t=int6464#9
556# asm 2: psrlq $4,<t=%xmm8
557psrlq $4,%xmm8
558
559# qhasm: t ^= xmm5
560# asm 1: pxor <xmm5=int6464#6,<t=int6464#9
561# asm 2: pxor <xmm5=%xmm5,<t=%xmm8
562pxor %xmm5,%xmm8
563
564# qhasm: t &= BS2
565# asm 1: pand BS2,<t=int6464#9
566# asm 2: pand BS2,<t=%xmm8
567pand BS2,%xmm8
568
569# qhasm: xmm5 ^= t
570# asm 1: pxor <t=int6464#9,<xmm5=int6464#6
571# asm 2: pxor <t=%xmm8,<xmm5=%xmm5
572pxor %xmm8,%xmm5
573
574# qhasm: uint6464 t <<= 4
575# asm 1: psllq $4,<t=int6464#9
576# asm 2: psllq $4,<t=%xmm8
577psllq $4,%xmm8
578
579# qhasm: xmm1 ^= t
580# asm 1: pxor <t=int6464#9,<xmm1=int6464#2
581# asm 2: pxor <t=%xmm8,<xmm1=%xmm1
582pxor %xmm8,%xmm1
583
584# qhasm: t = xmm0
585# asm 1: movdqa <xmm0=int6464#1,>t=int6464#9
586# asm 2: movdqa <xmm0=%xmm0,>t=%xmm8
587movdqa %xmm0,%xmm8
588
589# qhasm: uint6464 t >>= 4
590# asm 1: psrlq $4,<t=int6464#9
591# asm 2: psrlq $4,<t=%xmm8
592psrlq $4,%xmm8
593
594# qhasm: t ^= xmm4
595# asm 1: pxor <xmm4=int6464#5,<t=int6464#9
596# asm 2: pxor <xmm4=%xmm4,<t=%xmm8
597pxor %xmm4,%xmm8
598
599# qhasm: t &= BS2
600# asm 1: pand BS2,<t=int6464#9
601# asm 2: pand BS2,<t=%xmm8
602pand BS2,%xmm8
603
604# qhasm: xmm4 ^= t
605# asm 1: pxor <t=int6464#9,<xmm4=int6464#5
606# asm 2: pxor <t=%xmm8,<xmm4=%xmm4
607pxor %xmm8,%xmm4
608
609# qhasm: uint6464 t <<= 4
610# asm 1: psllq $4,<t=int6464#9
611# asm 2: psllq $4,<t=%xmm8
612psllq $4,%xmm8
613
614# qhasm: xmm0 ^= t
615# asm 1: pxor <t=int6464#9,<xmm0=int6464#1
616# asm 2: pxor <t=%xmm8,<xmm0=%xmm0
617pxor %xmm8,%xmm0
618
619# qhasm: *(int128 *) (c + 0) = xmm0
620# asm 1: movdqa <xmm0=int6464#1,0(<c=int64#1)
621# asm 2: movdqa <xmm0=%xmm0,0(<c=%rdi)
622movdqa %xmm0,0(%rdi)
623
624# qhasm: *(int128 *) (c + 16) = xmm1
625# asm 1: movdqa <xmm1=int6464#2,16(<c=int64#1)
626# asm 2: movdqa <xmm1=%xmm1,16(<c=%rdi)
627movdqa %xmm1,16(%rdi)
628
629# qhasm: *(int128 *) (c + 32) = xmm2
630# asm 1: movdqa <xmm2=int6464#3,32(<c=int64#1)
631# asm 2: movdqa <xmm2=%xmm2,32(<c=%rdi)
632movdqa %xmm2,32(%rdi)
633
634# qhasm: *(int128 *) (c + 48) = xmm3
635# asm 1: movdqa <xmm3=int6464#4,48(<c=int64#1)
636# asm 2: movdqa <xmm3=%xmm3,48(<c=%rdi)
637movdqa %xmm3,48(%rdi)
638
639# qhasm: *(int128 *) (c + 64) = xmm4
640# asm 1: movdqa <xmm4=int6464#5,64(<c=int64#1)
641# asm 2: movdqa <xmm4=%xmm4,64(<c=%rdi)
642movdqa %xmm4,64(%rdi)
643
644# qhasm: *(int128 *) (c + 80) = xmm5
645# asm 1: movdqa <xmm5=int6464#6,80(<c=int64#1)
646# asm 2: movdqa <xmm5=%xmm5,80(<c=%rdi)
647movdqa %xmm5,80(%rdi)
648
649# qhasm: *(int128 *) (c + 96) = xmm6
650# asm 1: movdqa <xmm6=int6464#7,96(<c=int64#1)
651# asm 2: movdqa <xmm6=%xmm6,96(<c=%rdi)
652movdqa %xmm6,96(%rdi)
653
654# qhasm: *(int128 *) (c + 112) = xmm7
655# asm 1: movdqa <xmm7=int6464#8,112(<c=int64#1)
656# asm 2: movdqa <xmm7=%xmm7,112(<c=%rdi)
657movdqa %xmm7,112(%rdi)
658
659# qhasm: shuffle bytes of xmm0 by ROTB
660# asm 1: pshufb ROTB,<xmm0=int6464#1
661# asm 2: pshufb ROTB,<xmm0=%xmm0
662pshufb ROTB,%xmm0
663
664# qhasm: shuffle bytes of xmm1 by ROTB
665# asm 1: pshufb ROTB,<xmm1=int6464#2
666# asm 2: pshufb ROTB,<xmm1=%xmm1
667pshufb ROTB,%xmm1
668
669# qhasm: shuffle bytes of xmm2 by ROTB
670# asm 1: pshufb ROTB,<xmm2=int6464#3
671# asm 2: pshufb ROTB,<xmm2=%xmm2
672pshufb ROTB,%xmm2
673
674# qhasm: shuffle bytes of xmm3 by ROTB
675# asm 1: pshufb ROTB,<xmm3=int6464#4
676# asm 2: pshufb ROTB,<xmm3=%xmm3
677pshufb ROTB,%xmm3
678
679# qhasm: shuffle bytes of xmm4 by ROTB
680# asm 1: pshufb ROTB,<xmm4=int6464#5
681# asm 2: pshufb ROTB,<xmm4=%xmm4
682pshufb ROTB,%xmm4
683
684# qhasm: shuffle bytes of xmm5 by ROTB
685# asm 1: pshufb ROTB,<xmm5=int6464#6
686# asm 2: pshufb ROTB,<xmm5=%xmm5
687pshufb ROTB,%xmm5
688
689# qhasm: shuffle bytes of xmm6 by ROTB
690# asm 1: pshufb ROTB,<xmm6=int6464#7
691# asm 2: pshufb ROTB,<xmm6=%xmm6
692pshufb ROTB,%xmm6
693
694# qhasm: shuffle bytes of xmm7 by ROTB
695# asm 1: pshufb ROTB,<xmm7=int6464#8
696# asm 2: pshufb ROTB,<xmm7=%xmm7
697pshufb ROTB,%xmm7
698
699# qhasm: xmm5 ^= xmm6
700# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
701# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
702pxor %xmm6,%xmm5
703
704# qhasm: xmm2 ^= xmm1
705# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
706# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
707pxor %xmm1,%xmm2
708
709# qhasm: xmm5 ^= xmm0
710# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
711# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
712pxor %xmm0,%xmm5
713
714# qhasm: xmm6 ^= xmm2
715# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
716# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
717pxor %xmm2,%xmm6
718
719# qhasm: xmm3 ^= xmm0
720# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
721# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
722pxor %xmm0,%xmm3
723
724# qhasm: xmm6 ^= xmm3
725# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
726# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
727pxor %xmm3,%xmm6
728
729# qhasm: xmm3 ^= xmm7
730# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
731# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
732pxor %xmm7,%xmm3
733
734# qhasm: xmm3 ^= xmm4
735# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
736# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
737pxor %xmm4,%xmm3
738
739# qhasm: xmm7 ^= xmm5
740# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
741# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
742pxor %xmm5,%xmm7
743
744# qhasm: xmm3 ^= xmm1
745# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
746# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
747pxor %xmm1,%xmm3
748
749# qhasm: xmm4 ^= xmm5
750# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
751# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
752pxor %xmm5,%xmm4
753
754# qhasm: xmm2 ^= xmm7
755# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
756# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
757pxor %xmm7,%xmm2
758
759# qhasm: xmm1 ^= xmm5
760# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
761# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
762pxor %xmm5,%xmm1
763
764# qhasm: xmm11 = xmm7
765# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
766# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
767movdqa %xmm7,%xmm8
768
769# qhasm: xmm10 = xmm1
770# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
771# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
772movdqa %xmm1,%xmm9
773
774# qhasm: xmm9 = xmm5
775# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
776# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
777movdqa %xmm5,%xmm10
778
779# qhasm: xmm13 = xmm2
780# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
781# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
782movdqa %xmm2,%xmm11
783
784# qhasm: xmm12 = xmm6
785# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
786# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
787movdqa %xmm6,%xmm12
788
789# qhasm: xmm11 ^= xmm4
790# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
791# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
792pxor %xmm4,%xmm8
793
794# qhasm: xmm10 ^= xmm2
795# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
796# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
797pxor %xmm2,%xmm9
798
799# qhasm: xmm9 ^= xmm3
800# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
801# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
802pxor %xmm3,%xmm10
803
804# qhasm: xmm13 ^= xmm4
805# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
806# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
807pxor %xmm4,%xmm11
808
809# qhasm: xmm12 ^= xmm0
810# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
811# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
812pxor %xmm0,%xmm12
813
814# qhasm: xmm14 = xmm11
815# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
816# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
817movdqa %xmm8,%xmm13
818
819# qhasm: xmm8 = xmm10
820# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
821# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
822movdqa %xmm9,%xmm14
823
824# qhasm: xmm15 = xmm11
825# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
826# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
827movdqa %xmm8,%xmm15
828
829# qhasm: xmm10 |= xmm9
830# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
831# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
832por %xmm10,%xmm9
833
834# qhasm: xmm11 |= xmm12
835# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
836# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
837por %xmm12,%xmm8
838
839# qhasm: xmm15 ^= xmm8
840# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
841# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
842pxor %xmm14,%xmm15
843
844# qhasm: xmm14 &= xmm12
845# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
846# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
847pand %xmm12,%xmm13
848
849# qhasm: xmm8 &= xmm9
850# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
851# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
852pand %xmm10,%xmm14
853
854# qhasm: xmm12 ^= xmm9
855# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
856# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
857pxor %xmm10,%xmm12
858
859# qhasm: xmm15 &= xmm12
860# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
861# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
862pand %xmm12,%xmm15
863
864# qhasm: xmm12 = xmm3
865# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
866# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
867movdqa %xmm3,%xmm10
868
869# qhasm: xmm12 ^= xmm0
870# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
871# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
872pxor %xmm0,%xmm10
873
874# qhasm: xmm13 &= xmm12
875# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
876# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
877pand %xmm10,%xmm11
878
879# qhasm: xmm11 ^= xmm13
880# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
881# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
882pxor %xmm11,%xmm8
883
884# qhasm: xmm10 ^= xmm13
885# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
886# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
887pxor %xmm11,%xmm9
888
889# qhasm: xmm13 = xmm7
890# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
891# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
892movdqa %xmm7,%xmm10
893
894# qhasm: xmm13 ^= xmm1
895# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
896# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
897pxor %xmm1,%xmm10
898
899# qhasm: xmm12 = xmm5
900# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
901# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
902movdqa %xmm5,%xmm11
903
904# qhasm: xmm9 = xmm13
905# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
906# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
907movdqa %xmm10,%xmm12
908
909# qhasm: xmm12 ^= xmm6
910# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
911# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
912pxor %xmm6,%xmm11
913
914# qhasm: xmm9 |= xmm12
915# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
916# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
917por %xmm11,%xmm12
918
919# qhasm: xmm13 &= xmm12
920# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
921# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
922pand %xmm11,%xmm10
923
924# qhasm: xmm8 ^= xmm13
925# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
926# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
927pxor %xmm10,%xmm14
928
929# qhasm: xmm11 ^= xmm15
930# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
931# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
932pxor %xmm15,%xmm8
933
934# qhasm: xmm10 ^= xmm14
935# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
936# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
937pxor %xmm13,%xmm9
938
939# qhasm: xmm9 ^= xmm15
940# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
941# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
942pxor %xmm15,%xmm12
943
944# qhasm: xmm8 ^= xmm14
945# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
946# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
947pxor %xmm13,%xmm14
948
949# qhasm: xmm9 ^= xmm14
950# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
951# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
952pxor %xmm13,%xmm12
953
954# qhasm: xmm12 = xmm2
955# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
956# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
957movdqa %xmm2,%xmm10
958
959# qhasm: xmm13 = xmm4
960# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
961# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
962movdqa %xmm4,%xmm11
963
964# qhasm: xmm14 = xmm1
965# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
966# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
967movdqa %xmm1,%xmm13
968
969# qhasm: xmm15 = xmm7
970# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
971# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
972movdqa %xmm7,%xmm15
973
974# qhasm: xmm12 &= xmm3
975# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
976# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
977pand %xmm3,%xmm10
978
979# qhasm: xmm13 &= xmm0
980# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
981# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
982pand %xmm0,%xmm11
983
984# qhasm: xmm14 &= xmm5
985# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
986# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
987pand %xmm5,%xmm13
988
989# qhasm: xmm15 |= xmm6
990# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
991# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
992por %xmm6,%xmm15
993
994# qhasm: xmm11 ^= xmm12
995# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
996# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
997pxor %xmm10,%xmm8
998
999# qhasm: xmm10 ^= xmm13
1000# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
1001# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
1002pxor %xmm11,%xmm9
1003
1004# qhasm: xmm9 ^= xmm14
1005# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
1006# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
1007pxor %xmm13,%xmm12
1008
1009# qhasm: xmm8 ^= xmm15
1010# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
1011# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
1012pxor %xmm15,%xmm14
1013
1014# qhasm: xmm12 = xmm11
1015# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
1016# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
1017movdqa %xmm8,%xmm10
1018
1019# qhasm: xmm12 ^= xmm10
1020# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
1021# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
1022pxor %xmm9,%xmm10
1023
1024# qhasm: xmm11 &= xmm9
1025# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
1026# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
1027pand %xmm12,%xmm8
1028
1029# qhasm: xmm14 = xmm8
1030# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
1031# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
1032movdqa %xmm14,%xmm11
1033
1034# qhasm: xmm14 ^= xmm11
1035# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
1036# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
1037pxor %xmm8,%xmm11
1038
1039# qhasm: xmm15 = xmm12
1040# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
1041# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
1042movdqa %xmm10,%xmm13
1043
1044# qhasm: xmm15 &= xmm14
1045# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
1046# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
1047pand %xmm11,%xmm13
1048
1049# qhasm: xmm15 ^= xmm10
1050# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
1051# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
1052pxor %xmm9,%xmm13
1053
1054# qhasm: xmm13 = xmm9
1055# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
1056# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
1057movdqa %xmm12,%xmm15
1058
1059# qhasm: xmm13 ^= xmm8
1060# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
1061# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
1062pxor %xmm14,%xmm15
1063
1064# qhasm: xmm11 ^= xmm10
1065# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
1066# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
1067pxor %xmm9,%xmm8
1068
1069# qhasm: xmm13 &= xmm11
1070# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
1071# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
1072pand %xmm8,%xmm15
1073
1074# qhasm: xmm13 ^= xmm8
1075# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
1076# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
1077pxor %xmm14,%xmm15
1078
1079# qhasm: xmm9 ^= xmm13
1080# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
1081# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
1082pxor %xmm15,%xmm12
1083
1084# qhasm: xmm10 = xmm14
1085# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
1086# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
1087movdqa %xmm11,%xmm8
1088
1089# qhasm: xmm10 ^= xmm13
1090# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
1091# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
1092pxor %xmm15,%xmm8
1093
1094# qhasm: xmm10 &= xmm8
1095# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
1096# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
1097pand %xmm14,%xmm8
1098
1099# qhasm: xmm9 ^= xmm10
1100# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
1101# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
1102pxor %xmm8,%xmm12
1103
1104# qhasm: xmm14 ^= xmm10
1105# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
1106# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
1107pxor %xmm8,%xmm11
1108
1109# qhasm: xmm14 &= xmm15
1110# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
1111# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
1112pand %xmm13,%xmm11
1113
1114# qhasm: xmm14 ^= xmm12
1115# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
1116# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
1117pxor %xmm10,%xmm11
1118
1119# qhasm: xmm12 = xmm6
1120# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
1121# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
1122movdqa %xmm6,%xmm8
1123
1124# qhasm: xmm8 = xmm5
1125# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
1126# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
1127movdqa %xmm5,%xmm9
1128
1129# qhasm: xmm10 = xmm15
1130# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
1131# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
1132movdqa %xmm13,%xmm10
1133
1134# qhasm: xmm10 ^= xmm14
1135# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
1136# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
1137pxor %xmm11,%xmm10
1138
1139# qhasm: xmm10 &= xmm6
1140# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
1141# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
1142pand %xmm6,%xmm10
1143
1144# qhasm: xmm6 ^= xmm5
1145# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
1146# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
1147pxor %xmm5,%xmm6
1148
1149# qhasm: xmm6 &= xmm14
1150# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
1151# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
1152pand %xmm11,%xmm6
1153
1154# qhasm: xmm5 &= xmm15
1155# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
1156# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
1157pand %xmm13,%xmm5
1158
1159# qhasm: xmm6 ^= xmm5
1160# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
1161# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
1162pxor %xmm5,%xmm6
1163
1164# qhasm: xmm5 ^= xmm10
1165# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
1166# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
1167pxor %xmm10,%xmm5
1168
1169# qhasm: xmm12 ^= xmm0
1170# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
1171# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
1172pxor %xmm0,%xmm8
1173
1174# qhasm: xmm8 ^= xmm3
1175# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
1176# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
1177pxor %xmm3,%xmm9
1178
1179# qhasm: xmm15 ^= xmm13
1180# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
1181# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
1182pxor %xmm15,%xmm13
1183
1184# qhasm: xmm14 ^= xmm9
1185# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
1186# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
1187pxor %xmm12,%xmm11
1188
1189# qhasm: xmm11 = xmm15
1190# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1191# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1192movdqa %xmm13,%xmm10
1193
1194# qhasm: xmm11 ^= xmm14
1195# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1196# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1197pxor %xmm11,%xmm10
1198
1199# qhasm: xmm11 &= xmm12
1200# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
1201# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
1202pand %xmm8,%xmm10
1203
1204# qhasm: xmm12 ^= xmm8
1205# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
1206# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
1207pxor %xmm9,%xmm8
1208
1209# qhasm: xmm12 &= xmm14
1210# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
1211# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
1212pand %xmm11,%xmm8
1213
1214# qhasm: xmm8 &= xmm15
1215# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
1216# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
1217pand %xmm13,%xmm9
1218
1219# qhasm: xmm8 ^= xmm12
1220# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
1221# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
1222pxor %xmm8,%xmm9
1223
1224# qhasm: xmm12 ^= xmm11
1225# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
1226# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
1227pxor %xmm10,%xmm8
1228
1229# qhasm: xmm10 = xmm13
1230# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1231# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1232movdqa %xmm15,%xmm10
1233
1234# qhasm: xmm10 ^= xmm9
1235# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
1236# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
1237pxor %xmm12,%xmm10
1238
1239# qhasm: xmm10 &= xmm0
1240# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
1241# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
1242pand %xmm0,%xmm10
1243
1244# qhasm: xmm0 ^= xmm3
1245# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
1246# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
1247pxor %xmm3,%xmm0
1248
1249# qhasm: xmm0 &= xmm9
1250# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
1251# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
1252pand %xmm12,%xmm0
1253
1254# qhasm: xmm3 &= xmm13
1255# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
1256# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
1257pand %xmm15,%xmm3
1258
1259# qhasm: xmm0 ^= xmm3
1260# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
1261# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
1262pxor %xmm3,%xmm0
1263
1264# qhasm: xmm3 ^= xmm10
1265# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
1266# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
1267pxor %xmm10,%xmm3
1268
1269# qhasm: xmm6 ^= xmm12
1270# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
1271# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
1272pxor %xmm8,%xmm6
1273
1274# qhasm: xmm0 ^= xmm12
1275# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
1276# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
1277pxor %xmm8,%xmm0
1278
1279# qhasm: xmm5 ^= xmm8
1280# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
1281# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
1282pxor %xmm9,%xmm5
1283
1284# qhasm: xmm3 ^= xmm8
1285# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
1286# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
1287pxor %xmm9,%xmm3
1288
1289# qhasm: xmm12 = xmm7
1290# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
1291# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
1292movdqa %xmm7,%xmm8
1293
1294# qhasm: xmm8 = xmm1
1295# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
1296# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
1297movdqa %xmm1,%xmm9
1298
1299# qhasm: xmm12 ^= xmm4
1300# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
1301# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
1302pxor %xmm4,%xmm8
1303
1304# qhasm: xmm8 ^= xmm2
1305# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
1306# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
1307pxor %xmm2,%xmm9
1308
1309# qhasm: xmm11 = xmm15
1310# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1311# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1312movdqa %xmm13,%xmm10
1313
1314# qhasm: xmm11 ^= xmm14
1315# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1316# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1317pxor %xmm11,%xmm10
1318
1319# qhasm: xmm11 &= xmm12
1320# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
1321# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
1322pand %xmm8,%xmm10
1323
1324# qhasm: xmm12 ^= xmm8
1325# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
1326# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
1327pxor %xmm9,%xmm8
1328
1329# qhasm: xmm12 &= xmm14
1330# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
1331# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
1332pand %xmm11,%xmm8
1333
1334# qhasm: xmm8 &= xmm15
1335# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
1336# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
1337pand %xmm13,%xmm9
1338
1339# qhasm: xmm8 ^= xmm12
1340# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
1341# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
1342pxor %xmm8,%xmm9
1343
1344# qhasm: xmm12 ^= xmm11
1345# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
1346# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
1347pxor %xmm10,%xmm8
1348
1349# qhasm: xmm10 = xmm13
1350# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1351# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1352movdqa %xmm15,%xmm10
1353
1354# qhasm: xmm10 ^= xmm9
1355# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
1356# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
1357pxor %xmm12,%xmm10
1358
1359# qhasm: xmm10 &= xmm4
1360# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
1361# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
1362pand %xmm4,%xmm10
1363
1364# qhasm: xmm4 ^= xmm2
1365# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
1366# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
1367pxor %xmm2,%xmm4
1368
1369# qhasm: xmm4 &= xmm9
1370# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
1371# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
1372pand %xmm12,%xmm4
1373
1374# qhasm: xmm2 &= xmm13
1375# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
1376# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
1377pand %xmm15,%xmm2
1378
1379# qhasm: xmm4 ^= xmm2
1380# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
1381# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
1382pxor %xmm2,%xmm4
1383
1384# qhasm: xmm2 ^= xmm10
1385# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
1386# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
1387pxor %xmm10,%xmm2
1388
1389# qhasm: xmm15 ^= xmm13
1390# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
1391# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
1392pxor %xmm15,%xmm13
1393
1394# qhasm: xmm14 ^= xmm9
1395# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
1396# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
1397pxor %xmm12,%xmm11
1398
1399# qhasm: xmm11 = xmm15
1400# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1401# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1402movdqa %xmm13,%xmm10
1403
1404# qhasm: xmm11 ^= xmm14
1405# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1406# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1407pxor %xmm11,%xmm10
1408
1409# qhasm: xmm11 &= xmm7
1410# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
1411# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
1412pand %xmm7,%xmm10
1413
1414# qhasm: xmm7 ^= xmm1
1415# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
1416# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
1417pxor %xmm1,%xmm7
1418
1419# qhasm: xmm7 &= xmm14
1420# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
1421# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
1422pand %xmm11,%xmm7
1423
1424# qhasm: xmm1 &= xmm15
1425# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
1426# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
1427pand %xmm13,%xmm1
1428
1429# qhasm: xmm7 ^= xmm1
1430# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
1431# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
1432pxor %xmm1,%xmm7
1433
1434# qhasm: xmm1 ^= xmm11
1435# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
1436# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
1437pxor %xmm10,%xmm1
1438
1439# qhasm: xmm7 ^= xmm12
1440# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
1441# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
1442pxor %xmm8,%xmm7
1443
1444# qhasm: xmm4 ^= xmm12
1445# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
1446# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
1447pxor %xmm8,%xmm4
1448
1449# qhasm: xmm1 ^= xmm8
1450# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
1451# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
1452pxor %xmm9,%xmm1
1453
1454# qhasm: xmm2 ^= xmm8
1455# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
1456# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
1457pxor %xmm9,%xmm2
1458
1459# qhasm: xmm7 ^= xmm0
1460# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
1461# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
1462pxor %xmm0,%xmm7
1463
1464# qhasm: xmm1 ^= xmm6
1465# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
1466# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
1467pxor %xmm6,%xmm1
1468
1469# qhasm: xmm4 ^= xmm7
1470# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
1471# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
1472pxor %xmm7,%xmm4
1473
1474# qhasm: xmm6 ^= xmm0
1475# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
1476# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
1477pxor %xmm0,%xmm6
1478
1479# qhasm: xmm0 ^= xmm1
1480# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
1481# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
1482pxor %xmm1,%xmm0
1483
1484# qhasm: xmm1 ^= xmm5
1485# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
1486# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
1487pxor %xmm5,%xmm1
1488
1489# qhasm: xmm5 ^= xmm2
1490# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
1491# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
1492pxor %xmm2,%xmm5
1493
1494# qhasm: xmm4 ^= xmm5
1495# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
1496# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
1497pxor %xmm5,%xmm4
1498
1499# qhasm: xmm2 ^= xmm3
1500# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
1501# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
1502pxor %xmm3,%xmm2
1503
1504# qhasm: xmm3 ^= xmm5
1505# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
1506# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
1507pxor %xmm5,%xmm3
1508
1509# qhasm: xmm6 ^= xmm3
1510# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
1511# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
1512pxor %xmm3,%xmm6
1513
1514# qhasm: xmm0 ^= RCON
1515# asm 1: pxor RCON,<xmm0=int6464#1
1516# asm 2: pxor RCON,<xmm0=%xmm0
1517pxor RCON,%xmm0
1518
1519# qhasm: shuffle bytes of xmm0 by EXPB0
1520# asm 1: pshufb EXPB0,<xmm0=int6464#1
1521# asm 2: pshufb EXPB0,<xmm0=%xmm0
1522pshufb EXPB0,%xmm0
1523
1524# qhasm: shuffle bytes of xmm1 by EXPB0
1525# asm 1: pshufb EXPB0,<xmm1=int6464#2
1526# asm 2: pshufb EXPB0,<xmm1=%xmm1
1527pshufb EXPB0,%xmm1
1528
1529# qhasm: shuffle bytes of xmm4 by EXPB0
1530# asm 1: pshufb EXPB0,<xmm4=int6464#5
1531# asm 2: pshufb EXPB0,<xmm4=%xmm4
1532pshufb EXPB0,%xmm4
1533
1534# qhasm: shuffle bytes of xmm6 by EXPB0
1535# asm 1: pshufb EXPB0,<xmm6=int6464#7
1536# asm 2: pshufb EXPB0,<xmm6=%xmm6
1537pshufb EXPB0,%xmm6
1538
1539# qhasm: shuffle bytes of xmm3 by EXPB0
1540# asm 1: pshufb EXPB0,<xmm3=int6464#4
1541# asm 2: pshufb EXPB0,<xmm3=%xmm3
1542pshufb EXPB0,%xmm3
1543
1544# qhasm: shuffle bytes of xmm7 by EXPB0
1545# asm 1: pshufb EXPB0,<xmm7=int6464#8
1546# asm 2: pshufb EXPB0,<xmm7=%xmm7
1547pshufb EXPB0,%xmm7
1548
1549# qhasm: shuffle bytes of xmm2 by EXPB0
1550# asm 1: pshufb EXPB0,<xmm2=int6464#3
1551# asm 2: pshufb EXPB0,<xmm2=%xmm2
1552pshufb EXPB0,%xmm2
1553
1554# qhasm: shuffle bytes of xmm5 by EXPB0
1555# asm 1: pshufb EXPB0,<xmm5=int6464#6
1556# asm 2: pshufb EXPB0,<xmm5=%xmm5
1557pshufb EXPB0,%xmm5
1558
1559# qhasm: xmm8 = *(int128 *)(c + 0)
1560# asm 1: movdqa 0(<c=int64#1),>xmm8=int6464#9
1561# asm 2: movdqa 0(<c=%rdi),>xmm8=%xmm8
1562movdqa 0(%rdi),%xmm8
1563
1564# qhasm: xmm9 = *(int128 *)(c + 16)
1565# asm 1: movdqa 16(<c=int64#1),>xmm9=int6464#10
1566# asm 2: movdqa 16(<c=%rdi),>xmm9=%xmm9
1567movdqa 16(%rdi),%xmm9
1568
1569# qhasm: xmm10 = *(int128 *)(c + 32)
1570# asm 1: movdqa 32(<c=int64#1),>xmm10=int6464#11
1571# asm 2: movdqa 32(<c=%rdi),>xmm10=%xmm10
1572movdqa 32(%rdi),%xmm10
1573
1574# qhasm: xmm11 = *(int128 *)(c + 48)
1575# asm 1: movdqa 48(<c=int64#1),>xmm11=int6464#12
1576# asm 2: movdqa 48(<c=%rdi),>xmm11=%xmm11
1577movdqa 48(%rdi),%xmm11
1578
1579# qhasm: xmm12 = *(int128 *)(c + 64)
1580# asm 1: movdqa 64(<c=int64#1),>xmm12=int6464#13
1581# asm 2: movdqa 64(<c=%rdi),>xmm12=%xmm12
1582movdqa 64(%rdi),%xmm12
1583
1584# qhasm: xmm13 = *(int128 *)(c + 80)
1585# asm 1: movdqa 80(<c=int64#1),>xmm13=int6464#14
1586# asm 2: movdqa 80(<c=%rdi),>xmm13=%xmm13
1587movdqa 80(%rdi),%xmm13
1588
1589# qhasm: xmm14 = *(int128 *)(c + 96)
1590# asm 1: movdqa 96(<c=int64#1),>xmm14=int6464#15
1591# asm 2: movdqa 96(<c=%rdi),>xmm14=%xmm14
1592movdqa 96(%rdi),%xmm14
1593
1594# qhasm: xmm15 = *(int128 *)(c + 112)
1595# asm 1: movdqa 112(<c=int64#1),>xmm15=int6464#16
1596# asm 2: movdqa 112(<c=%rdi),>xmm15=%xmm15
1597movdqa 112(%rdi),%xmm15
1598
1599# qhasm: xmm0 ^= xmm8
1600# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
1601# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
1602pxor %xmm8,%xmm0
1603
1604# qhasm: xmm1 ^= xmm9
1605# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
1606# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
1607pxor %xmm9,%xmm1
1608
1609# qhasm: xmm4 ^= xmm10
1610# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
1611# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
1612pxor %xmm10,%xmm4
1613
1614# qhasm: xmm6 ^= xmm11
1615# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
1616# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
1617pxor %xmm11,%xmm6
1618
1619# qhasm: xmm3 ^= xmm12
1620# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
1621# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
1622pxor %xmm12,%xmm3
1623
1624# qhasm: xmm7 ^= xmm13
1625# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
1626# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
1627pxor %xmm13,%xmm7
1628
1629# qhasm: xmm2 ^= xmm14
1630# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
1631# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
1632pxor %xmm14,%xmm2
1633
1634# qhasm: xmm5 ^= xmm15
1635# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
1636# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
1637pxor %xmm15,%xmm5
1638
1639# qhasm: uint32323232 xmm8 >>= 8
1640# asm 1: psrld $8,<xmm8=int6464#9
1641# asm 2: psrld $8,<xmm8=%xmm8
1642psrld $8,%xmm8
1643
1644# qhasm: uint32323232 xmm9 >>= 8
1645# asm 1: psrld $8,<xmm9=int6464#10
1646# asm 2: psrld $8,<xmm9=%xmm9
1647psrld $8,%xmm9
1648
1649# qhasm: uint32323232 xmm10 >>= 8
1650# asm 1: psrld $8,<xmm10=int6464#11
1651# asm 2: psrld $8,<xmm10=%xmm10
1652psrld $8,%xmm10
1653
1654# qhasm: uint32323232 xmm11 >>= 8
1655# asm 1: psrld $8,<xmm11=int6464#12
1656# asm 2: psrld $8,<xmm11=%xmm11
1657psrld $8,%xmm11
1658
1659# qhasm: uint32323232 xmm12 >>= 8
1660# asm 1: psrld $8,<xmm12=int6464#13
1661# asm 2: psrld $8,<xmm12=%xmm12
1662psrld $8,%xmm12
1663
1664# qhasm: uint32323232 xmm13 >>= 8
1665# asm 1: psrld $8,<xmm13=int6464#14
1666# asm 2: psrld $8,<xmm13=%xmm13
1667psrld $8,%xmm13
1668
1669# qhasm: uint32323232 xmm14 >>= 8
1670# asm 1: psrld $8,<xmm14=int6464#15
1671# asm 2: psrld $8,<xmm14=%xmm14
1672psrld $8,%xmm14
1673
1674# qhasm: uint32323232 xmm15 >>= 8
1675# asm 1: psrld $8,<xmm15=int6464#16
1676# asm 2: psrld $8,<xmm15=%xmm15
1677psrld $8,%xmm15
1678
1679# qhasm: xmm0 ^= xmm8
1680# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
1681# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
1682pxor %xmm8,%xmm0
1683
1684# qhasm: xmm1 ^= xmm9
1685# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
1686# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
1687pxor %xmm9,%xmm1
1688
1689# qhasm: xmm4 ^= xmm10
1690# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
1691# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
1692pxor %xmm10,%xmm4
1693
1694# qhasm: xmm6 ^= xmm11
1695# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
1696# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
1697pxor %xmm11,%xmm6
1698
1699# qhasm: xmm3 ^= xmm12
1700# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
1701# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
1702pxor %xmm12,%xmm3
1703
1704# qhasm: xmm7 ^= xmm13
1705# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
1706# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
1707pxor %xmm13,%xmm7
1708
1709# qhasm: xmm2 ^= xmm14
1710# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
1711# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
1712pxor %xmm14,%xmm2
1713
1714# qhasm: xmm5 ^= xmm15
1715# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
1716# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
1717pxor %xmm15,%xmm5
1718
1719# qhasm: uint32323232 xmm8 >>= 8
1720# asm 1: psrld $8,<xmm8=int6464#9
1721# asm 2: psrld $8,<xmm8=%xmm8
1722psrld $8,%xmm8
1723
1724# qhasm: uint32323232 xmm9 >>= 8
1725# asm 1: psrld $8,<xmm9=int6464#10
1726# asm 2: psrld $8,<xmm9=%xmm9
1727psrld $8,%xmm9
1728
1729# qhasm: uint32323232 xmm10 >>= 8
1730# asm 1: psrld $8,<xmm10=int6464#11
1731# asm 2: psrld $8,<xmm10=%xmm10
1732psrld $8,%xmm10
1733
1734# qhasm: uint32323232 xmm11 >>= 8
1735# asm 1: psrld $8,<xmm11=int6464#12
1736# asm 2: psrld $8,<xmm11=%xmm11
1737psrld $8,%xmm11
1738
1739# qhasm: uint32323232 xmm12 >>= 8
1740# asm 1: psrld $8,<xmm12=int6464#13
1741# asm 2: psrld $8,<xmm12=%xmm12
1742psrld $8,%xmm12
1743
1744# qhasm: uint32323232 xmm13 >>= 8
1745# asm 1: psrld $8,<xmm13=int6464#14
1746# asm 2: psrld $8,<xmm13=%xmm13
1747psrld $8,%xmm13
1748
1749# qhasm: uint32323232 xmm14 >>= 8
1750# asm 1: psrld $8,<xmm14=int6464#15
1751# asm 2: psrld $8,<xmm14=%xmm14
1752psrld $8,%xmm14
1753
1754# qhasm: uint32323232 xmm15 >>= 8
1755# asm 1: psrld $8,<xmm15=int6464#16
1756# asm 2: psrld $8,<xmm15=%xmm15
1757psrld $8,%xmm15
1758
1759# qhasm: xmm0 ^= xmm8
1760# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
1761# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
1762pxor %xmm8,%xmm0
1763
1764# qhasm: xmm1 ^= xmm9
1765# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
1766# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
1767pxor %xmm9,%xmm1
1768
1769# qhasm: xmm4 ^= xmm10
1770# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
1771# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
1772pxor %xmm10,%xmm4
1773
1774# qhasm: xmm6 ^= xmm11
1775# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
1776# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
1777pxor %xmm11,%xmm6
1778
1779# qhasm: xmm3 ^= xmm12
1780# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
1781# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
1782pxor %xmm12,%xmm3
1783
1784# qhasm: xmm7 ^= xmm13
1785# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
1786# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
1787pxor %xmm13,%xmm7
1788
1789# qhasm: xmm2 ^= xmm14
1790# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
1791# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
1792pxor %xmm14,%xmm2
1793
1794# qhasm: xmm5 ^= xmm15
1795# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
1796# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
1797pxor %xmm15,%xmm5
1798
1799# qhasm: uint32323232 xmm8 >>= 8
1800# asm 1: psrld $8,<xmm8=int6464#9
1801# asm 2: psrld $8,<xmm8=%xmm8
1802psrld $8,%xmm8
1803
1804# qhasm: uint32323232 xmm9 >>= 8
1805# asm 1: psrld $8,<xmm9=int6464#10
1806# asm 2: psrld $8,<xmm9=%xmm9
1807psrld $8,%xmm9
1808
1809# qhasm: uint32323232 xmm10 >>= 8
1810# asm 1: psrld $8,<xmm10=int6464#11
1811# asm 2: psrld $8,<xmm10=%xmm10
1812psrld $8,%xmm10
1813
1814# qhasm: uint32323232 xmm11 >>= 8
1815# asm 1: psrld $8,<xmm11=int6464#12
1816# asm 2: psrld $8,<xmm11=%xmm11
1817psrld $8,%xmm11
1818
1819# qhasm: uint32323232 xmm12 >>= 8
1820# asm 1: psrld $8,<xmm12=int6464#13
1821# asm 2: psrld $8,<xmm12=%xmm12
1822psrld $8,%xmm12
1823
1824# qhasm: uint32323232 xmm13 >>= 8
1825# asm 1: psrld $8,<xmm13=int6464#14
1826# asm 2: psrld $8,<xmm13=%xmm13
1827psrld $8,%xmm13
1828
1829# qhasm: uint32323232 xmm14 >>= 8
1830# asm 1: psrld $8,<xmm14=int6464#15
1831# asm 2: psrld $8,<xmm14=%xmm14
1832psrld $8,%xmm14
1833
1834# qhasm: uint32323232 xmm15 >>= 8
1835# asm 1: psrld $8,<xmm15=int6464#16
1836# asm 2: psrld $8,<xmm15=%xmm15
1837psrld $8,%xmm15
1838
1839# qhasm: xmm0 ^= xmm8
1840# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
1841# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
1842pxor %xmm8,%xmm0
1843
1844# qhasm: xmm1 ^= xmm9
1845# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
1846# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
1847pxor %xmm9,%xmm1
1848
1849# qhasm: xmm4 ^= xmm10
1850# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
1851# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
1852pxor %xmm10,%xmm4
1853
1854# qhasm: xmm6 ^= xmm11
1855# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
1856# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
1857pxor %xmm11,%xmm6
1858
1859# qhasm: xmm3 ^= xmm12
1860# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
1861# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
1862pxor %xmm12,%xmm3
1863
1864# qhasm: xmm7 ^= xmm13
1865# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
1866# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
1867pxor %xmm13,%xmm7
1868
1869# qhasm: xmm2 ^= xmm14
1870# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
1871# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
1872pxor %xmm14,%xmm2
1873
1874# qhasm: xmm5 ^= xmm15
1875# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
1876# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
1877pxor %xmm15,%xmm5
1878
1879# qhasm: *(int128 *)(c + 128) = xmm0
1880# asm 1: movdqa <xmm0=int6464#1,128(<c=int64#1)
1881# asm 2: movdqa <xmm0=%xmm0,128(<c=%rdi)
1882movdqa %xmm0,128(%rdi)
1883
1884# qhasm: *(int128 *)(c + 144) = xmm1
1885# asm 1: movdqa <xmm1=int6464#2,144(<c=int64#1)
1886# asm 2: movdqa <xmm1=%xmm1,144(<c=%rdi)
1887movdqa %xmm1,144(%rdi)
1888
1889# qhasm: *(int128 *)(c + 160) = xmm4
1890# asm 1: movdqa <xmm4=int6464#5,160(<c=int64#1)
1891# asm 2: movdqa <xmm4=%xmm4,160(<c=%rdi)
1892movdqa %xmm4,160(%rdi)
1893
1894# qhasm: *(int128 *)(c + 176) = xmm6
1895# asm 1: movdqa <xmm6=int6464#7,176(<c=int64#1)
1896# asm 2: movdqa <xmm6=%xmm6,176(<c=%rdi)
1897movdqa %xmm6,176(%rdi)
1898
1899# qhasm: *(int128 *)(c + 192) = xmm3
1900# asm 1: movdqa <xmm3=int6464#4,192(<c=int64#1)
1901# asm 2: movdqa <xmm3=%xmm3,192(<c=%rdi)
1902movdqa %xmm3,192(%rdi)
1903
1904# qhasm: *(int128 *)(c + 208) = xmm7
1905# asm 1: movdqa <xmm7=int6464#8,208(<c=int64#1)
1906# asm 2: movdqa <xmm7=%xmm7,208(<c=%rdi)
1907movdqa %xmm7,208(%rdi)
1908
1909# qhasm: *(int128 *)(c + 224) = xmm2
1910# asm 1: movdqa <xmm2=int6464#3,224(<c=int64#1)
1911# asm 2: movdqa <xmm2=%xmm2,224(<c=%rdi)
1912movdqa %xmm2,224(%rdi)
1913
1914# qhasm: *(int128 *)(c + 240) = xmm5
1915# asm 1: movdqa <xmm5=int6464#6,240(<c=int64#1)
1916# asm 2: movdqa <xmm5=%xmm5,240(<c=%rdi)
1917movdqa %xmm5,240(%rdi)
1918
1919# qhasm: xmm0 ^= ONE
1920# asm 1: pxor ONE,<xmm0=int6464#1
1921# asm 2: pxor ONE,<xmm0=%xmm0
1922pxor ONE,%xmm0
1923
1924# qhasm: xmm1 ^= ONE
1925# asm 1: pxor ONE,<xmm1=int6464#2
1926# asm 2: pxor ONE,<xmm1=%xmm1
1927pxor ONE,%xmm1
1928
1929# qhasm: xmm7 ^= ONE
1930# asm 1: pxor ONE,<xmm7=int6464#8
1931# asm 2: pxor ONE,<xmm7=%xmm7
1932pxor ONE,%xmm7
1933
1934# qhasm: xmm2 ^= ONE
1935# asm 1: pxor ONE,<xmm2=int6464#3
1936# asm 2: pxor ONE,<xmm2=%xmm2
1937pxor ONE,%xmm2
1938
1939# qhasm: shuffle bytes of xmm0 by ROTB
1940# asm 1: pshufb ROTB,<xmm0=int6464#1
1941# asm 2: pshufb ROTB,<xmm0=%xmm0
1942pshufb ROTB,%xmm0
1943
1944# qhasm: shuffle bytes of xmm1 by ROTB
1945# asm 1: pshufb ROTB,<xmm1=int6464#2
1946# asm 2: pshufb ROTB,<xmm1=%xmm1
1947pshufb ROTB,%xmm1
1948
1949# qhasm: shuffle bytes of xmm4 by ROTB
1950# asm 1: pshufb ROTB,<xmm4=int6464#5
1951# asm 2: pshufb ROTB,<xmm4=%xmm4
1952pshufb ROTB,%xmm4
1953
1954# qhasm: shuffle bytes of xmm6 by ROTB
1955# asm 1: pshufb ROTB,<xmm6=int6464#7
1956# asm 2: pshufb ROTB,<xmm6=%xmm6
1957pshufb ROTB,%xmm6
1958
1959# qhasm: shuffle bytes of xmm3 by ROTB
1960# asm 1: pshufb ROTB,<xmm3=int6464#4
1961# asm 2: pshufb ROTB,<xmm3=%xmm3
1962pshufb ROTB,%xmm3
1963
1964# qhasm: shuffle bytes of xmm7 by ROTB
1965# asm 1: pshufb ROTB,<xmm7=int6464#8
1966# asm 2: pshufb ROTB,<xmm7=%xmm7
1967pshufb ROTB,%xmm7
1968
1969# qhasm: shuffle bytes of xmm2 by ROTB
1970# asm 1: pshufb ROTB,<xmm2=int6464#3
1971# asm 2: pshufb ROTB,<xmm2=%xmm2
1972pshufb ROTB,%xmm2
1973
1974# qhasm: shuffle bytes of xmm5 by ROTB
1975# asm 1: pshufb ROTB,<xmm5=int6464#6
1976# asm 2: pshufb ROTB,<xmm5=%xmm5
1977pshufb ROTB,%xmm5
1978
1979# qhasm: xmm7 ^= xmm2
1980# asm 1: pxor <xmm2=int6464#3,<xmm7=int6464#8
1981# asm 2: pxor <xmm2=%xmm2,<xmm7=%xmm7
1982pxor %xmm2,%xmm7
1983
1984# qhasm: xmm4 ^= xmm1
1985# asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5
1986# asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4
1987pxor %xmm1,%xmm4
1988
1989# qhasm: xmm7 ^= xmm0
1990# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
1991# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
1992pxor %xmm0,%xmm7
1993
1994# qhasm: xmm2 ^= xmm4
1995# asm 1: pxor <xmm4=int6464#5,<xmm2=int6464#3
1996# asm 2: pxor <xmm4=%xmm4,<xmm2=%xmm2
1997pxor %xmm4,%xmm2
1998
1999# qhasm: xmm6 ^= xmm0
2000# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
2001# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
2002pxor %xmm0,%xmm6
2003
2004# qhasm: xmm2 ^= xmm6
2005# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
2006# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
2007pxor %xmm6,%xmm2
2008
2009# qhasm: xmm6 ^= xmm5
2010# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
2011# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
2012pxor %xmm5,%xmm6
2013
2014# qhasm: xmm6 ^= xmm3
2015# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
2016# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
2017pxor %xmm3,%xmm6
2018
2019# qhasm: xmm5 ^= xmm7
2020# asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6
2021# asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5
2022pxor %xmm7,%xmm5
2023
2024# qhasm: xmm6 ^= xmm1
2025# asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7
2026# asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6
2027pxor %xmm1,%xmm6
2028
2029# qhasm: xmm3 ^= xmm7
2030# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
2031# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
2032pxor %xmm7,%xmm3
2033
2034# qhasm: xmm4 ^= xmm5
2035# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
2036# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
2037pxor %xmm5,%xmm4
2038
2039# qhasm: xmm1 ^= xmm7
2040# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
2041# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
2042pxor %xmm7,%xmm1
2043
2044# qhasm: xmm11 = xmm5
2045# asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9
2046# asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8
2047movdqa %xmm5,%xmm8
2048
2049# qhasm: xmm10 = xmm1
2050# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
2051# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
2052movdqa %xmm1,%xmm9
2053
2054# qhasm: xmm9 = xmm7
2055# asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11
2056# asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10
2057movdqa %xmm7,%xmm10
2058
2059# qhasm: xmm13 = xmm4
2060# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
2061# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
2062movdqa %xmm4,%xmm11
2063
2064# qhasm: xmm12 = xmm2
2065# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#13
2066# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm12
2067movdqa %xmm2,%xmm12
2068
2069# qhasm: xmm11 ^= xmm3
2070# asm 1: pxor <xmm3=int6464#4,<xmm11=int6464#9
2071# asm 2: pxor <xmm3=%xmm3,<xmm11=%xmm8
2072pxor %xmm3,%xmm8
2073
2074# qhasm: xmm10 ^= xmm4
2075# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#10
2076# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm9
2077pxor %xmm4,%xmm9
2078
2079# qhasm: xmm9 ^= xmm6
2080# asm 1: pxor <xmm6=int6464#7,<xmm9=int6464#11
2081# asm 2: pxor <xmm6=%xmm6,<xmm9=%xmm10
2082pxor %xmm6,%xmm10
2083
2084# qhasm: xmm13 ^= xmm3
2085# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#12
2086# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm11
2087pxor %xmm3,%xmm11
2088
2089# qhasm: xmm12 ^= xmm0
2090# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
2091# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
2092pxor %xmm0,%xmm12
2093
2094# qhasm: xmm14 = xmm11
2095# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
2096# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
2097movdqa %xmm8,%xmm13
2098
2099# qhasm: xmm8 = xmm10
2100# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
2101# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
2102movdqa %xmm9,%xmm14
2103
2104# qhasm: xmm15 = xmm11
2105# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
2106# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
2107movdqa %xmm8,%xmm15
2108
2109# qhasm: xmm10 |= xmm9
2110# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
2111# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
2112por %xmm10,%xmm9
2113
2114# qhasm: xmm11 |= xmm12
2115# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
2116# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
2117por %xmm12,%xmm8
2118
2119# qhasm: xmm15 ^= xmm8
2120# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
2121# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
2122pxor %xmm14,%xmm15
2123
2124# qhasm: xmm14 &= xmm12
2125# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
2126# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
2127pand %xmm12,%xmm13
2128
2129# qhasm: xmm8 &= xmm9
2130# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
2131# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
2132pand %xmm10,%xmm14
2133
2134# qhasm: xmm12 ^= xmm9
2135# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
2136# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
2137pxor %xmm10,%xmm12
2138
2139# qhasm: xmm15 &= xmm12
2140# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
2141# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
2142pand %xmm12,%xmm15
2143
2144# qhasm: xmm12 = xmm6
2145# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11
2146# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10
2147movdqa %xmm6,%xmm10
2148
2149# qhasm: xmm12 ^= xmm0
2150# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
2151# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
2152pxor %xmm0,%xmm10
2153
2154# qhasm: xmm13 &= xmm12
2155# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
2156# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
2157pand %xmm10,%xmm11
2158
2159# qhasm: xmm11 ^= xmm13
2160# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
2161# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
2162pxor %xmm11,%xmm8
2163
2164# qhasm: xmm10 ^= xmm13
2165# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
2166# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
2167pxor %xmm11,%xmm9
2168
2169# qhasm: xmm13 = xmm5
2170# asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11
2171# asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10
2172movdqa %xmm5,%xmm10
2173
2174# qhasm: xmm13 ^= xmm1
2175# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
2176# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
2177pxor %xmm1,%xmm10
2178
2179# qhasm: xmm12 = xmm7
2180# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12
2181# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11
2182movdqa %xmm7,%xmm11
2183
2184# qhasm: xmm9 = xmm13
2185# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
2186# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
2187movdqa %xmm10,%xmm12
2188
2189# qhasm: xmm12 ^= xmm2
2190# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#12
2191# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm11
2192pxor %xmm2,%xmm11
2193
2194# qhasm: xmm9 |= xmm12
2195# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
2196# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
2197por %xmm11,%xmm12
2198
2199# qhasm: xmm13 &= xmm12
2200# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
2201# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
2202pand %xmm11,%xmm10
2203
2204# qhasm: xmm8 ^= xmm13
2205# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
2206# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
2207pxor %xmm10,%xmm14
2208
2209# qhasm: xmm11 ^= xmm15
2210# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
2211# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
2212pxor %xmm15,%xmm8
2213
2214# qhasm: xmm10 ^= xmm14
2215# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
2216# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
2217pxor %xmm13,%xmm9
2218
2219# qhasm: xmm9 ^= xmm15
2220# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
2221# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
2222pxor %xmm15,%xmm12
2223
2224# qhasm: xmm8 ^= xmm14
2225# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
2226# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
2227pxor %xmm13,%xmm14
2228
2229# qhasm: xmm9 ^= xmm14
2230# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
2231# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
2232pxor %xmm13,%xmm12
2233
2234# qhasm: xmm12 = xmm4
2235# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11
2236# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10
2237movdqa %xmm4,%xmm10
2238
2239# qhasm: xmm13 = xmm3
2240# asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12
2241# asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11
2242movdqa %xmm3,%xmm11
2243
2244# qhasm: xmm14 = xmm1
2245# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
2246# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
2247movdqa %xmm1,%xmm13
2248
2249# qhasm: xmm15 = xmm5
2250# asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16
2251# asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15
2252movdqa %xmm5,%xmm15
2253
2254# qhasm: xmm12 &= xmm6
2255# asm 1: pand <xmm6=int6464#7,<xmm12=int6464#11
2256# asm 2: pand <xmm6=%xmm6,<xmm12=%xmm10
2257pand %xmm6,%xmm10
2258
2259# qhasm: xmm13 &= xmm0
2260# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
2261# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
2262pand %xmm0,%xmm11
2263
2264# qhasm: xmm14 &= xmm7
2265# asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14
2266# asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13
2267pand %xmm7,%xmm13
2268
2269# qhasm: xmm15 |= xmm2
2270# asm 1: por <xmm2=int6464#3,<xmm15=int6464#16
2271# asm 2: por <xmm2=%xmm2,<xmm15=%xmm15
2272por %xmm2,%xmm15
2273
2274# qhasm: xmm11 ^= xmm12
2275# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
2276# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
2277pxor %xmm10,%xmm8
2278
2279# qhasm: xmm10 ^= xmm13
2280# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
2281# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
2282pxor %xmm11,%xmm9
2283
2284# qhasm: xmm9 ^= xmm14
2285# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
2286# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
2287pxor %xmm13,%xmm12
2288
2289# qhasm: xmm8 ^= xmm15
2290# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
2291# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
2292pxor %xmm15,%xmm14
2293
2294# qhasm: xmm12 = xmm11
2295# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
2296# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
2297movdqa %xmm8,%xmm10
2298
2299# qhasm: xmm12 ^= xmm10
2300# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
2301# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
2302pxor %xmm9,%xmm10
2303
2304# qhasm: xmm11 &= xmm9
2305# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
2306# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
2307pand %xmm12,%xmm8
2308
2309# qhasm: xmm14 = xmm8
2310# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
2311# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
2312movdqa %xmm14,%xmm11
2313
2314# qhasm: xmm14 ^= xmm11
2315# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
2316# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
2317pxor %xmm8,%xmm11
2318
2319# qhasm: xmm15 = xmm12
2320# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
2321# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
2322movdqa %xmm10,%xmm13
2323
2324# qhasm: xmm15 &= xmm14
2325# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
2326# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
2327pand %xmm11,%xmm13
2328
2329# qhasm: xmm15 ^= xmm10
2330# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
2331# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
2332pxor %xmm9,%xmm13
2333
2334# qhasm: xmm13 = xmm9
2335# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
2336# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
2337movdqa %xmm12,%xmm15
2338
2339# qhasm: xmm13 ^= xmm8
2340# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
2341# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
2342pxor %xmm14,%xmm15
2343
2344# qhasm: xmm11 ^= xmm10
2345# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
2346# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
2347pxor %xmm9,%xmm8
2348
2349# qhasm: xmm13 &= xmm11
2350# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
2351# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
2352pand %xmm8,%xmm15
2353
2354# qhasm: xmm13 ^= xmm8
2355# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
2356# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
2357pxor %xmm14,%xmm15
2358
2359# qhasm: xmm9 ^= xmm13
2360# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
2361# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
2362pxor %xmm15,%xmm12
2363
2364# qhasm: xmm10 = xmm14
2365# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
2366# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
2367movdqa %xmm11,%xmm8
2368
2369# qhasm: xmm10 ^= xmm13
2370# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
2371# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
2372pxor %xmm15,%xmm8
2373
2374# qhasm: xmm10 &= xmm8
2375# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
2376# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
2377pand %xmm14,%xmm8
2378
2379# qhasm: xmm9 ^= xmm10
2380# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
2381# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
2382pxor %xmm8,%xmm12
2383
2384# qhasm: xmm14 ^= xmm10
2385# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
2386# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
2387pxor %xmm8,%xmm11
2388
2389# qhasm: xmm14 &= xmm15
2390# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
2391# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
2392pand %xmm13,%xmm11
2393
2394# qhasm: xmm14 ^= xmm12
2395# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
2396# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
2397pxor %xmm10,%xmm11
2398
2399# qhasm: xmm12 = xmm2
2400# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#9
2401# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm8
2402movdqa %xmm2,%xmm8
2403
2404# qhasm: xmm8 = xmm7
2405# asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10
2406# asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9
2407movdqa %xmm7,%xmm9
2408
2409# qhasm: xmm10 = xmm15
2410# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
2411# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
2412movdqa %xmm13,%xmm10
2413
2414# qhasm: xmm10 ^= xmm14
2415# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
2416# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
2417pxor %xmm11,%xmm10
2418
2419# qhasm: xmm10 &= xmm2
2420# asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11
2421# asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10
2422pand %xmm2,%xmm10
2423
2424# qhasm: xmm2 ^= xmm7
2425# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
2426# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
2427pxor %xmm7,%xmm2
2428
2429# qhasm: xmm2 &= xmm14
2430# asm 1: pand <xmm14=int6464#12,<xmm2=int6464#3
2431# asm 2: pand <xmm14=%xmm11,<xmm2=%xmm2
2432pand %xmm11,%xmm2
2433
2434# qhasm: xmm7 &= xmm15
2435# asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8
2436# asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7
2437pand %xmm13,%xmm7
2438
2439# qhasm: xmm2 ^= xmm7
2440# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
2441# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
2442pxor %xmm7,%xmm2
2443
2444# qhasm: xmm7 ^= xmm10
2445# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
2446# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
2447pxor %xmm10,%xmm7
2448
2449# qhasm: xmm12 ^= xmm0
2450# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
2451# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
2452pxor %xmm0,%xmm8
2453
2454# qhasm: xmm8 ^= xmm6
2455# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10
2456# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9
2457pxor %xmm6,%xmm9
2458
2459# qhasm: xmm15 ^= xmm13
2460# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
2461# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
2462pxor %xmm15,%xmm13
2463
2464# qhasm: xmm14 ^= xmm9
2465# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
2466# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
2467pxor %xmm12,%xmm11
2468
2469# qhasm: xmm11 = xmm15
2470# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
2471# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
2472movdqa %xmm13,%xmm10
2473
2474# qhasm: xmm11 ^= xmm14
2475# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
2476# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
2477pxor %xmm11,%xmm10
2478
2479# qhasm: xmm11 &= xmm12
2480# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
2481# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
2482pand %xmm8,%xmm10
2483
2484# qhasm: xmm12 ^= xmm8
2485# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
2486# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
2487pxor %xmm9,%xmm8
2488
2489# qhasm: xmm12 &= xmm14
2490# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
2491# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
2492pand %xmm11,%xmm8
2493
2494# qhasm: xmm8 &= xmm15
2495# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
2496# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
2497pand %xmm13,%xmm9
2498
2499# qhasm: xmm8 ^= xmm12
2500# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
2501# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
2502pxor %xmm8,%xmm9
2503
2504# qhasm: xmm12 ^= xmm11
2505# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
2506# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
2507pxor %xmm10,%xmm8
2508
2509# qhasm: xmm10 = xmm13
2510# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
2511# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
2512movdqa %xmm15,%xmm10
2513
2514# qhasm: xmm10 ^= xmm9
2515# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
2516# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
2517pxor %xmm12,%xmm10
2518
2519# qhasm: xmm10 &= xmm0
2520# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
2521# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
2522pand %xmm0,%xmm10
2523
2524# qhasm: xmm0 ^= xmm6
2525# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
2526# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
2527pxor %xmm6,%xmm0
2528
2529# qhasm: xmm0 &= xmm9
2530# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
2531# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
2532pand %xmm12,%xmm0
2533
2534# qhasm: xmm6 &= xmm13
2535# asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7
2536# asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6
2537pand %xmm15,%xmm6
2538
2539# qhasm: xmm0 ^= xmm6
2540# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
2541# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
2542pxor %xmm6,%xmm0
2543
2544# qhasm: xmm6 ^= xmm10
2545# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
2546# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
2547pxor %xmm10,%xmm6
2548
2549# qhasm: xmm2 ^= xmm12
2550# asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3
2551# asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2
2552pxor %xmm8,%xmm2
2553
2554# qhasm: xmm0 ^= xmm12
2555# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
2556# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
2557pxor %xmm8,%xmm0
2558
2559# qhasm: xmm7 ^= xmm8
2560# asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8
2561# asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7
2562pxor %xmm9,%xmm7
2563
2564# qhasm: xmm6 ^= xmm8
2565# asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7
2566# asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6
2567pxor %xmm9,%xmm6
2568
2569# qhasm: xmm12 = xmm5
2570# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9
2571# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8
2572movdqa %xmm5,%xmm8
2573
2574# qhasm: xmm8 = xmm1
2575# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
2576# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
2577movdqa %xmm1,%xmm9
2578
2579# qhasm: xmm12 ^= xmm3
2580# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#9
2581# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm8
2582pxor %xmm3,%xmm8
2583
2584# qhasm: xmm8 ^= xmm4
2585# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10
2586# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9
2587pxor %xmm4,%xmm9
2588
2589# qhasm: xmm11 = xmm15
2590# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
2591# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
2592movdqa %xmm13,%xmm10
2593
2594# qhasm: xmm11 ^= xmm14
2595# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
2596# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
2597pxor %xmm11,%xmm10
2598
2599# qhasm: xmm11 &= xmm12
2600# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
2601# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
2602pand %xmm8,%xmm10
2603
2604# qhasm: xmm12 ^= xmm8
2605# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
2606# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
2607pxor %xmm9,%xmm8
2608
2609# qhasm: xmm12 &= xmm14
2610# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
2611# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
2612pand %xmm11,%xmm8
2613
2614# qhasm: xmm8 &= xmm15
2615# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
2616# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
2617pand %xmm13,%xmm9
2618
2619# qhasm: xmm8 ^= xmm12
2620# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
2621# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
2622pxor %xmm8,%xmm9
2623
2624# qhasm: xmm12 ^= xmm11
2625# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
2626# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
2627pxor %xmm10,%xmm8
2628
2629# qhasm: xmm10 = xmm13
2630# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
2631# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
2632movdqa %xmm15,%xmm10
2633
2634# qhasm: xmm10 ^= xmm9
2635# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
2636# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
2637pxor %xmm12,%xmm10
2638
2639# qhasm: xmm10 &= xmm3
2640# asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11
2641# asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10
2642pand %xmm3,%xmm10
2643
2644# qhasm: xmm3 ^= xmm4
2645# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
2646# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
2647pxor %xmm4,%xmm3
2648
2649# qhasm: xmm3 &= xmm9
2650# asm 1: pand <xmm9=int6464#13,<xmm3=int6464#4
2651# asm 2: pand <xmm9=%xmm12,<xmm3=%xmm3
2652pand %xmm12,%xmm3
2653
2654# qhasm: xmm4 &= xmm13
2655# asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5
2656# asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4
2657pand %xmm15,%xmm4
2658
2659# qhasm: xmm3 ^= xmm4
2660# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
2661# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
2662pxor %xmm4,%xmm3
2663
2664# qhasm: xmm4 ^= xmm10
2665# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
2666# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
2667pxor %xmm10,%xmm4
2668
2669# qhasm: xmm15 ^= xmm13
2670# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
2671# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
2672pxor %xmm15,%xmm13
2673
2674# qhasm: xmm14 ^= xmm9
2675# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
2676# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
2677pxor %xmm12,%xmm11
2678
2679# qhasm: xmm11 = xmm15
2680# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
2681# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
2682movdqa %xmm13,%xmm10
2683
2684# qhasm: xmm11 ^= xmm14
2685# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
2686# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
2687pxor %xmm11,%xmm10
2688
2689# qhasm: xmm11 &= xmm5
2690# asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11
2691# asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10
2692pand %xmm5,%xmm10
2693
2694# qhasm: xmm5 ^= xmm1
2695# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
2696# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
2697pxor %xmm1,%xmm5
2698
2699# qhasm: xmm5 &= xmm14
2700# asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6
2701# asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5
2702pand %xmm11,%xmm5
2703
2704# qhasm: xmm1 &= xmm15
2705# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
2706# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
2707pand %xmm13,%xmm1
2708
2709# qhasm: xmm5 ^= xmm1
2710# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
2711# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
2712pxor %xmm1,%xmm5
2713
2714# qhasm: xmm1 ^= xmm11
2715# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
2716# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
2717pxor %xmm10,%xmm1
2718
2719# qhasm: xmm5 ^= xmm12
2720# asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6
2721# asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5
2722pxor %xmm8,%xmm5
2723
2724# qhasm: xmm3 ^= xmm12
2725# asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4
2726# asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3
2727pxor %xmm8,%xmm3
2728
2729# qhasm: xmm1 ^= xmm8
2730# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
2731# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
2732pxor %xmm9,%xmm1
2733
2734# qhasm: xmm4 ^= xmm8
2735# asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5
2736# asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4
2737pxor %xmm9,%xmm4
2738
2739# qhasm: xmm5 ^= xmm0
2740# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
2741# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
2742pxor %xmm0,%xmm5
2743
2744# qhasm: xmm1 ^= xmm2
2745# asm 1: pxor <xmm2=int6464#3,<xmm1=int6464#2
2746# asm 2: pxor <xmm2=%xmm2,<xmm1=%xmm1
2747pxor %xmm2,%xmm1
2748
2749# qhasm: xmm3 ^= xmm5
2750# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
2751# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
2752pxor %xmm5,%xmm3
2753
2754# qhasm: xmm2 ^= xmm0
2755# asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3
2756# asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2
2757pxor %xmm0,%xmm2
2758
2759# qhasm: xmm0 ^= xmm1
2760# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
2761# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
2762pxor %xmm1,%xmm0
2763
2764# qhasm: xmm1 ^= xmm7
2765# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
2766# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
2767pxor %xmm7,%xmm1
2768
2769# qhasm: xmm7 ^= xmm4
2770# asm 1: pxor <xmm4=int6464#5,<xmm7=int6464#8
2771# asm 2: pxor <xmm4=%xmm4,<xmm7=%xmm7
2772pxor %xmm4,%xmm7
2773
2774# qhasm: xmm3 ^= xmm7
2775# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
2776# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
2777pxor %xmm7,%xmm3
2778
2779# qhasm: xmm4 ^= xmm6
2780# asm 1: pxor <xmm6=int6464#7,<xmm4=int6464#5
2781# asm 2: pxor <xmm6=%xmm6,<xmm4=%xmm4
2782pxor %xmm6,%xmm4
2783
2784# qhasm: xmm6 ^= xmm7
2785# asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7
2786# asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6
2787pxor %xmm7,%xmm6
2788
2789# qhasm: xmm2 ^= xmm6
2790# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
2791# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
2792pxor %xmm6,%xmm2
2793
2794# qhasm: xmm1 ^= RCON
2795# asm 1: pxor RCON,<xmm1=int6464#2
2796# asm 2: pxor RCON,<xmm1=%xmm1
2797pxor RCON,%xmm1
2798
2799# qhasm: shuffle bytes of xmm0 by EXPB0
2800# asm 1: pshufb EXPB0,<xmm0=int6464#1
2801# asm 2: pshufb EXPB0,<xmm0=%xmm0
2802pshufb EXPB0,%xmm0
2803
2804# qhasm: shuffle bytes of xmm1 by EXPB0
2805# asm 1: pshufb EXPB0,<xmm1=int6464#2
2806# asm 2: pshufb EXPB0,<xmm1=%xmm1
2807pshufb EXPB0,%xmm1
2808
2809# qhasm: shuffle bytes of xmm3 by EXPB0
2810# asm 1: pshufb EXPB0,<xmm3=int6464#4
2811# asm 2: pshufb EXPB0,<xmm3=%xmm3
2812pshufb EXPB0,%xmm3
2813
2814# qhasm: shuffle bytes of xmm2 by EXPB0
2815# asm 1: pshufb EXPB0,<xmm2=int6464#3
2816# asm 2: pshufb EXPB0,<xmm2=%xmm2
2817pshufb EXPB0,%xmm2
2818
2819# qhasm: shuffle bytes of xmm6 by EXPB0
2820# asm 1: pshufb EXPB0,<xmm6=int6464#7
2821# asm 2: pshufb EXPB0,<xmm6=%xmm6
2822pshufb EXPB0,%xmm6
2823
2824# qhasm: shuffle bytes of xmm5 by EXPB0
2825# asm 1: pshufb EXPB0,<xmm5=int6464#6
2826# asm 2: pshufb EXPB0,<xmm5=%xmm5
2827pshufb EXPB0,%xmm5
2828
2829# qhasm: shuffle bytes of xmm4 by EXPB0
2830# asm 1: pshufb EXPB0,<xmm4=int6464#5
2831# asm 2: pshufb EXPB0,<xmm4=%xmm4
2832pshufb EXPB0,%xmm4
2833
2834# qhasm: shuffle bytes of xmm7 by EXPB0
2835# asm 1: pshufb EXPB0,<xmm7=int6464#8
2836# asm 2: pshufb EXPB0,<xmm7=%xmm7
2837pshufb EXPB0,%xmm7
2838
2839# qhasm: xmm8 = *(int128 *)(c + 128)
2840# asm 1: movdqa 128(<c=int64#1),>xmm8=int6464#9
2841# asm 2: movdqa 128(<c=%rdi),>xmm8=%xmm8
2842movdqa 128(%rdi),%xmm8
2843
2844# qhasm: xmm9 = *(int128 *)(c + 144)
2845# asm 1: movdqa 144(<c=int64#1),>xmm9=int6464#10
2846# asm 2: movdqa 144(<c=%rdi),>xmm9=%xmm9
2847movdqa 144(%rdi),%xmm9
2848
2849# qhasm: xmm10 = *(int128 *)(c + 160)
2850# asm 1: movdqa 160(<c=int64#1),>xmm10=int6464#11
2851# asm 2: movdqa 160(<c=%rdi),>xmm10=%xmm10
2852movdqa 160(%rdi),%xmm10
2853
2854# qhasm: xmm11 = *(int128 *)(c + 176)
2855# asm 1: movdqa 176(<c=int64#1),>xmm11=int6464#12
2856# asm 2: movdqa 176(<c=%rdi),>xmm11=%xmm11
2857movdqa 176(%rdi),%xmm11
2858
2859# qhasm: xmm12 = *(int128 *)(c + 192)
2860# asm 1: movdqa 192(<c=int64#1),>xmm12=int6464#13
2861# asm 2: movdqa 192(<c=%rdi),>xmm12=%xmm12
2862movdqa 192(%rdi),%xmm12
2863
2864# qhasm: xmm13 = *(int128 *)(c + 208)
2865# asm 1: movdqa 208(<c=int64#1),>xmm13=int6464#14
2866# asm 2: movdqa 208(<c=%rdi),>xmm13=%xmm13
2867movdqa 208(%rdi),%xmm13
2868
2869# qhasm: xmm14 = *(int128 *)(c + 224)
2870# asm 1: movdqa 224(<c=int64#1),>xmm14=int6464#15
2871# asm 2: movdqa 224(<c=%rdi),>xmm14=%xmm14
2872movdqa 224(%rdi),%xmm14
2873
2874# qhasm: xmm15 = *(int128 *)(c + 240)
2875# asm 1: movdqa 240(<c=int64#1),>xmm15=int6464#16
2876# asm 2: movdqa 240(<c=%rdi),>xmm15=%xmm15
2877movdqa 240(%rdi),%xmm15
2878
2879# qhasm: xmm8 ^= ONE
2880# asm 1: pxor ONE,<xmm8=int6464#9
2881# asm 2: pxor ONE,<xmm8=%xmm8
2882pxor ONE,%xmm8
2883
2884# qhasm: xmm9 ^= ONE
2885# asm 1: pxor ONE,<xmm9=int6464#10
2886# asm 2: pxor ONE,<xmm9=%xmm9
2887pxor ONE,%xmm9
2888
2889# qhasm: xmm13 ^= ONE
2890# asm 1: pxor ONE,<xmm13=int6464#14
2891# asm 2: pxor ONE,<xmm13=%xmm13
2892pxor ONE,%xmm13
2893
2894# qhasm: xmm14 ^= ONE
2895# asm 1: pxor ONE,<xmm14=int6464#15
2896# asm 2: pxor ONE,<xmm14=%xmm14
2897pxor ONE,%xmm14
2898
2899# qhasm: xmm0 ^= xmm8
2900# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
2901# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
2902pxor %xmm8,%xmm0
2903
2904# qhasm: xmm1 ^= xmm9
2905# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
2906# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
2907pxor %xmm9,%xmm1
2908
2909# qhasm: xmm3 ^= xmm10
2910# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
2911# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
2912pxor %xmm10,%xmm3
2913
2914# qhasm: xmm2 ^= xmm11
2915# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
2916# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
2917pxor %xmm11,%xmm2
2918
2919# qhasm: xmm6 ^= xmm12
2920# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
2921# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
2922pxor %xmm12,%xmm6
2923
2924# qhasm: xmm5 ^= xmm13
2925# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
2926# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
2927pxor %xmm13,%xmm5
2928
2929# qhasm: xmm4 ^= xmm14
2930# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
2931# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
2932pxor %xmm14,%xmm4
2933
2934# qhasm: xmm7 ^= xmm15
2935# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
2936# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
2937pxor %xmm15,%xmm7
2938
2939# qhasm: uint32323232 xmm8 >>= 8
2940# asm 1: psrld $8,<xmm8=int6464#9
2941# asm 2: psrld $8,<xmm8=%xmm8
2942psrld $8,%xmm8
2943
2944# qhasm: uint32323232 xmm9 >>= 8
2945# asm 1: psrld $8,<xmm9=int6464#10
2946# asm 2: psrld $8,<xmm9=%xmm9
2947psrld $8,%xmm9
2948
2949# qhasm: uint32323232 xmm10 >>= 8
2950# asm 1: psrld $8,<xmm10=int6464#11
2951# asm 2: psrld $8,<xmm10=%xmm10
2952psrld $8,%xmm10
2953
2954# qhasm: uint32323232 xmm11 >>= 8
2955# asm 1: psrld $8,<xmm11=int6464#12
2956# asm 2: psrld $8,<xmm11=%xmm11
2957psrld $8,%xmm11
2958
2959# qhasm: uint32323232 xmm12 >>= 8
2960# asm 1: psrld $8,<xmm12=int6464#13
2961# asm 2: psrld $8,<xmm12=%xmm12
2962psrld $8,%xmm12
2963
2964# qhasm: uint32323232 xmm13 >>= 8
2965# asm 1: psrld $8,<xmm13=int6464#14
2966# asm 2: psrld $8,<xmm13=%xmm13
2967psrld $8,%xmm13
2968
2969# qhasm: uint32323232 xmm14 >>= 8
2970# asm 1: psrld $8,<xmm14=int6464#15
2971# asm 2: psrld $8,<xmm14=%xmm14
2972psrld $8,%xmm14
2973
2974# qhasm: uint32323232 xmm15 >>= 8
2975# asm 1: psrld $8,<xmm15=int6464#16
2976# asm 2: psrld $8,<xmm15=%xmm15
2977psrld $8,%xmm15
2978
2979# qhasm: xmm0 ^= xmm8
2980# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
2981# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
2982pxor %xmm8,%xmm0
2983
2984# qhasm: xmm1 ^= xmm9
2985# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
2986# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
2987pxor %xmm9,%xmm1
2988
2989# qhasm: xmm3 ^= xmm10
2990# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
2991# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
2992pxor %xmm10,%xmm3
2993
2994# qhasm: xmm2 ^= xmm11
2995# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
2996# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
2997pxor %xmm11,%xmm2
2998
2999# qhasm: xmm6 ^= xmm12
3000# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
3001# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
3002pxor %xmm12,%xmm6
3003
3004# qhasm: xmm5 ^= xmm13
3005# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
3006# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
3007pxor %xmm13,%xmm5
3008
3009# qhasm: xmm4 ^= xmm14
3010# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
3011# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
3012pxor %xmm14,%xmm4
3013
3014# qhasm: xmm7 ^= xmm15
3015# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
3016# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
3017pxor %xmm15,%xmm7
3018
3019# qhasm: uint32323232 xmm8 >>= 8
3020# asm 1: psrld $8,<xmm8=int6464#9
3021# asm 2: psrld $8,<xmm8=%xmm8
3022psrld $8,%xmm8
3023
3024# qhasm: uint32323232 xmm9 >>= 8
3025# asm 1: psrld $8,<xmm9=int6464#10
3026# asm 2: psrld $8,<xmm9=%xmm9
3027psrld $8,%xmm9
3028
3029# qhasm: uint32323232 xmm10 >>= 8
3030# asm 1: psrld $8,<xmm10=int6464#11
3031# asm 2: psrld $8,<xmm10=%xmm10
3032psrld $8,%xmm10
3033
3034# qhasm: uint32323232 xmm11 >>= 8
3035# asm 1: psrld $8,<xmm11=int6464#12
3036# asm 2: psrld $8,<xmm11=%xmm11
3037psrld $8,%xmm11
3038
3039# qhasm: uint32323232 xmm12 >>= 8
3040# asm 1: psrld $8,<xmm12=int6464#13
3041# asm 2: psrld $8,<xmm12=%xmm12
3042psrld $8,%xmm12
3043
3044# qhasm: uint32323232 xmm13 >>= 8
3045# asm 1: psrld $8,<xmm13=int6464#14
3046# asm 2: psrld $8,<xmm13=%xmm13
3047psrld $8,%xmm13
3048
3049# qhasm: uint32323232 xmm14 >>= 8
3050# asm 1: psrld $8,<xmm14=int6464#15
3051# asm 2: psrld $8,<xmm14=%xmm14
3052psrld $8,%xmm14
3053
3054# qhasm: uint32323232 xmm15 >>= 8
3055# asm 1: psrld $8,<xmm15=int6464#16
3056# asm 2: psrld $8,<xmm15=%xmm15
3057psrld $8,%xmm15
3058
3059# qhasm: xmm0 ^= xmm8
3060# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
3061# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
3062pxor %xmm8,%xmm0
3063
3064# qhasm: xmm1 ^= xmm9
3065# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
3066# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
3067pxor %xmm9,%xmm1
3068
3069# qhasm: xmm3 ^= xmm10
3070# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
3071# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
3072pxor %xmm10,%xmm3
3073
3074# qhasm: xmm2 ^= xmm11
3075# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
3076# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
3077pxor %xmm11,%xmm2
3078
3079# qhasm: xmm6 ^= xmm12
3080# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
3081# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
3082pxor %xmm12,%xmm6
3083
3084# qhasm: xmm5 ^= xmm13
3085# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
3086# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
3087pxor %xmm13,%xmm5
3088
3089# qhasm: xmm4 ^= xmm14
3090# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
3091# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
3092pxor %xmm14,%xmm4
3093
3094# qhasm: xmm7 ^= xmm15
3095# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
3096# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
3097pxor %xmm15,%xmm7
3098
3099# qhasm: uint32323232 xmm8 >>= 8
3100# asm 1: psrld $8,<xmm8=int6464#9
3101# asm 2: psrld $8,<xmm8=%xmm8
3102psrld $8,%xmm8
3103
3104# qhasm: uint32323232 xmm9 >>= 8
3105# asm 1: psrld $8,<xmm9=int6464#10
3106# asm 2: psrld $8,<xmm9=%xmm9
3107psrld $8,%xmm9
3108
3109# qhasm: uint32323232 xmm10 >>= 8
3110# asm 1: psrld $8,<xmm10=int6464#11
3111# asm 2: psrld $8,<xmm10=%xmm10
3112psrld $8,%xmm10
3113
3114# qhasm: uint32323232 xmm11 >>= 8
3115# asm 1: psrld $8,<xmm11=int6464#12
3116# asm 2: psrld $8,<xmm11=%xmm11
3117psrld $8,%xmm11
3118
3119# qhasm: uint32323232 xmm12 >>= 8
3120# asm 1: psrld $8,<xmm12=int6464#13
3121# asm 2: psrld $8,<xmm12=%xmm12
3122psrld $8,%xmm12
3123
3124# qhasm: uint32323232 xmm13 >>= 8
3125# asm 1: psrld $8,<xmm13=int6464#14
3126# asm 2: psrld $8,<xmm13=%xmm13
3127psrld $8,%xmm13
3128
3129# qhasm: uint32323232 xmm14 >>= 8
3130# asm 1: psrld $8,<xmm14=int6464#15
3131# asm 2: psrld $8,<xmm14=%xmm14
3132psrld $8,%xmm14
3133
3134# qhasm: uint32323232 xmm15 >>= 8
3135# asm 1: psrld $8,<xmm15=int6464#16
3136# asm 2: psrld $8,<xmm15=%xmm15
3137psrld $8,%xmm15
3138
3139# qhasm: xmm0 ^= xmm8
3140# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
3141# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
3142pxor %xmm8,%xmm0
3143
3144# qhasm: xmm1 ^= xmm9
3145# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
3146# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
3147pxor %xmm9,%xmm1
3148
3149# qhasm: xmm3 ^= xmm10
3150# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
3151# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
3152pxor %xmm10,%xmm3
3153
3154# qhasm: xmm2 ^= xmm11
3155# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
3156# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
3157pxor %xmm11,%xmm2
3158
3159# qhasm: xmm6 ^= xmm12
3160# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
3161# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
3162pxor %xmm12,%xmm6
3163
3164# qhasm: xmm5 ^= xmm13
3165# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
3166# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
3167pxor %xmm13,%xmm5
3168
3169# qhasm: xmm4 ^= xmm14
3170# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
3171# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
3172pxor %xmm14,%xmm4
3173
3174# qhasm: xmm7 ^= xmm15
3175# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
3176# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
3177pxor %xmm15,%xmm7
3178
3179# qhasm: *(int128 *)(c + 256) = xmm0
3180# asm 1: movdqa <xmm0=int6464#1,256(<c=int64#1)
3181# asm 2: movdqa <xmm0=%xmm0,256(<c=%rdi)
3182movdqa %xmm0,256(%rdi)
3183
3184# qhasm: *(int128 *)(c + 272) = xmm1
3185# asm 1: movdqa <xmm1=int6464#2,272(<c=int64#1)
3186# asm 2: movdqa <xmm1=%xmm1,272(<c=%rdi)
3187movdqa %xmm1,272(%rdi)
3188
3189# qhasm: *(int128 *)(c + 288) = xmm3
3190# asm 1: movdqa <xmm3=int6464#4,288(<c=int64#1)
3191# asm 2: movdqa <xmm3=%xmm3,288(<c=%rdi)
3192movdqa %xmm3,288(%rdi)
3193
3194# qhasm: *(int128 *)(c + 304) = xmm2
3195# asm 1: movdqa <xmm2=int6464#3,304(<c=int64#1)
3196# asm 2: movdqa <xmm2=%xmm2,304(<c=%rdi)
3197movdqa %xmm2,304(%rdi)
3198
3199# qhasm: *(int128 *)(c + 320) = xmm6
3200# asm 1: movdqa <xmm6=int6464#7,320(<c=int64#1)
3201# asm 2: movdqa <xmm6=%xmm6,320(<c=%rdi)
3202movdqa %xmm6,320(%rdi)
3203
3204# qhasm: *(int128 *)(c + 336) = xmm5
3205# asm 1: movdqa <xmm5=int6464#6,336(<c=int64#1)
3206# asm 2: movdqa <xmm5=%xmm5,336(<c=%rdi)
3207movdqa %xmm5,336(%rdi)
3208
3209# qhasm: *(int128 *)(c + 352) = xmm4
3210# asm 1: movdqa <xmm4=int6464#5,352(<c=int64#1)
3211# asm 2: movdqa <xmm4=%xmm4,352(<c=%rdi)
3212movdqa %xmm4,352(%rdi)
3213
3214# qhasm: *(int128 *)(c + 368) = xmm7
3215# asm 1: movdqa <xmm7=int6464#8,368(<c=int64#1)
3216# asm 2: movdqa <xmm7=%xmm7,368(<c=%rdi)
3217movdqa %xmm7,368(%rdi)
3218
3219# qhasm: xmm0 ^= ONE
3220# asm 1: pxor ONE,<xmm0=int6464#1
3221# asm 2: pxor ONE,<xmm0=%xmm0
3222pxor ONE,%xmm0
3223
3224# qhasm: xmm1 ^= ONE
3225# asm 1: pxor ONE,<xmm1=int6464#2
3226# asm 2: pxor ONE,<xmm1=%xmm1
3227pxor ONE,%xmm1
3228
3229# qhasm: xmm5 ^= ONE
3230# asm 1: pxor ONE,<xmm5=int6464#6
3231# asm 2: pxor ONE,<xmm5=%xmm5
3232pxor ONE,%xmm5
3233
3234# qhasm: xmm4 ^= ONE
3235# asm 1: pxor ONE,<xmm4=int6464#5
3236# asm 2: pxor ONE,<xmm4=%xmm4
3237pxor ONE,%xmm4
3238
3239# qhasm: shuffle bytes of xmm0 by ROTB
3240# asm 1: pshufb ROTB,<xmm0=int6464#1
3241# asm 2: pshufb ROTB,<xmm0=%xmm0
3242pshufb ROTB,%xmm0
3243
3244# qhasm: shuffle bytes of xmm1 by ROTB
3245# asm 1: pshufb ROTB,<xmm1=int6464#2
3246# asm 2: pshufb ROTB,<xmm1=%xmm1
3247pshufb ROTB,%xmm1
3248
3249# qhasm: shuffle bytes of xmm3 by ROTB
3250# asm 1: pshufb ROTB,<xmm3=int6464#4
3251# asm 2: pshufb ROTB,<xmm3=%xmm3
3252pshufb ROTB,%xmm3
3253
3254# qhasm: shuffle bytes of xmm2 by ROTB
3255# asm 1: pshufb ROTB,<xmm2=int6464#3
3256# asm 2: pshufb ROTB,<xmm2=%xmm2
3257pshufb ROTB,%xmm2
3258
3259# qhasm: shuffle bytes of xmm6 by ROTB
3260# asm 1: pshufb ROTB,<xmm6=int6464#7
3261# asm 2: pshufb ROTB,<xmm6=%xmm6
3262pshufb ROTB,%xmm6
3263
3264# qhasm: shuffle bytes of xmm5 by ROTB
3265# asm 1: pshufb ROTB,<xmm5=int6464#6
3266# asm 2: pshufb ROTB,<xmm5=%xmm5
3267pshufb ROTB,%xmm5
3268
3269# qhasm: shuffle bytes of xmm4 by ROTB
3270# asm 1: pshufb ROTB,<xmm4=int6464#5
3271# asm 2: pshufb ROTB,<xmm4=%xmm4
3272pshufb ROTB,%xmm4
3273
3274# qhasm: shuffle bytes of xmm7 by ROTB
3275# asm 1: pshufb ROTB,<xmm7=int6464#8
3276# asm 2: pshufb ROTB,<xmm7=%xmm7
3277pshufb ROTB,%xmm7
3278
3279# qhasm: xmm5 ^= xmm4
3280# asm 1: pxor <xmm4=int6464#5,<xmm5=int6464#6
3281# asm 2: pxor <xmm4=%xmm4,<xmm5=%xmm5
3282pxor %xmm4,%xmm5
3283
3284# qhasm: xmm3 ^= xmm1
3285# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
3286# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
3287pxor %xmm1,%xmm3
3288
3289# qhasm: xmm5 ^= xmm0
3290# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
3291# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
3292pxor %xmm0,%xmm5
3293
3294# qhasm: xmm4 ^= xmm3
3295# asm 1: pxor <xmm3=int6464#4,<xmm4=int6464#5
3296# asm 2: pxor <xmm3=%xmm3,<xmm4=%xmm4
3297pxor %xmm3,%xmm4
3298
3299# qhasm: xmm2 ^= xmm0
3300# asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3
3301# asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2
3302pxor %xmm0,%xmm2
3303
3304# qhasm: xmm4 ^= xmm2
3305# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
3306# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
3307pxor %xmm2,%xmm4
3308
3309# qhasm: xmm2 ^= xmm7
3310# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
3311# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
3312pxor %xmm7,%xmm2
3313
3314# qhasm: xmm2 ^= xmm6
3315# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
3316# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
3317pxor %xmm6,%xmm2
3318
3319# qhasm: xmm7 ^= xmm5
3320# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
3321# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
3322pxor %xmm5,%xmm7
3323
3324# qhasm: xmm2 ^= xmm1
3325# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
3326# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
3327pxor %xmm1,%xmm2
3328
3329# qhasm: xmm6 ^= xmm5
3330# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
3331# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
3332pxor %xmm5,%xmm6
3333
3334# qhasm: xmm3 ^= xmm7
3335# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
3336# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
3337pxor %xmm7,%xmm3
3338
3339# qhasm: xmm1 ^= xmm5
3340# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
3341# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
3342pxor %xmm5,%xmm1
3343
3344# qhasm: xmm11 = xmm7
3345# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
3346# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
3347movdqa %xmm7,%xmm8
3348
3349# qhasm: xmm10 = xmm1
3350# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
3351# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
3352movdqa %xmm1,%xmm9
3353
3354# qhasm: xmm9 = xmm5
3355# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
3356# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
3357movdqa %xmm5,%xmm10
3358
3359# qhasm: xmm13 = xmm3
3360# asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12
3361# asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11
3362movdqa %xmm3,%xmm11
3363
3364# qhasm: xmm12 = xmm4
3365# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#13
3366# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm12
3367movdqa %xmm4,%xmm12
3368
3369# qhasm: xmm11 ^= xmm6
3370# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#9
3371# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm8
3372pxor %xmm6,%xmm8
3373
3374# qhasm: xmm10 ^= xmm3
3375# asm 1: pxor <xmm3=int6464#4,<xmm10=int6464#10
3376# asm 2: pxor <xmm3=%xmm3,<xmm10=%xmm9
3377pxor %xmm3,%xmm9
3378
3379# qhasm: xmm9 ^= xmm2
3380# asm 1: pxor <xmm2=int6464#3,<xmm9=int6464#11
3381# asm 2: pxor <xmm2=%xmm2,<xmm9=%xmm10
3382pxor %xmm2,%xmm10
3383
3384# qhasm: xmm13 ^= xmm6
3385# asm 1: pxor <xmm6=int6464#7,<xmm13=int6464#12
3386# asm 2: pxor <xmm6=%xmm6,<xmm13=%xmm11
3387pxor %xmm6,%xmm11
3388
3389# qhasm: xmm12 ^= xmm0
3390# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
3391# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
3392pxor %xmm0,%xmm12
3393
3394# qhasm: xmm14 = xmm11
3395# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
3396# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
3397movdqa %xmm8,%xmm13
3398
3399# qhasm: xmm8 = xmm10
3400# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
3401# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
3402movdqa %xmm9,%xmm14
3403
3404# qhasm: xmm15 = xmm11
3405# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
3406# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
3407movdqa %xmm8,%xmm15
3408
3409# qhasm: xmm10 |= xmm9
3410# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
3411# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
3412por %xmm10,%xmm9
3413
3414# qhasm: xmm11 |= xmm12
3415# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
3416# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
3417por %xmm12,%xmm8
3418
3419# qhasm: xmm15 ^= xmm8
3420# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
3421# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
3422pxor %xmm14,%xmm15
3423
3424# qhasm: xmm14 &= xmm12
3425# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
3426# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
3427pand %xmm12,%xmm13
3428
3429# qhasm: xmm8 &= xmm9
3430# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
3431# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
3432pand %xmm10,%xmm14
3433
3434# qhasm: xmm12 ^= xmm9
3435# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
3436# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
3437pxor %xmm10,%xmm12
3438
3439# qhasm: xmm15 &= xmm12
3440# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
3441# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
3442pand %xmm12,%xmm15
3443
3444# qhasm: xmm12 = xmm2
3445# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
3446# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
3447movdqa %xmm2,%xmm10
3448
3449# qhasm: xmm12 ^= xmm0
3450# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
3451# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
3452pxor %xmm0,%xmm10
3453
3454# qhasm: xmm13 &= xmm12
3455# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
3456# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
3457pand %xmm10,%xmm11
3458
3459# qhasm: xmm11 ^= xmm13
3460# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
3461# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
3462pxor %xmm11,%xmm8
3463
3464# qhasm: xmm10 ^= xmm13
3465# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
3466# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
3467pxor %xmm11,%xmm9
3468
3469# qhasm: xmm13 = xmm7
3470# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
3471# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
3472movdqa %xmm7,%xmm10
3473
3474# qhasm: xmm13 ^= xmm1
3475# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
3476# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
3477pxor %xmm1,%xmm10
3478
3479# qhasm: xmm12 = xmm5
3480# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
3481# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
3482movdqa %xmm5,%xmm11
3483
3484# qhasm: xmm9 = xmm13
3485# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
3486# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
3487movdqa %xmm10,%xmm12
3488
3489# qhasm: xmm12 ^= xmm4
3490# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#12
3491# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm11
3492pxor %xmm4,%xmm11
3493
3494# qhasm: xmm9 |= xmm12
3495# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
3496# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
3497por %xmm11,%xmm12
3498
3499# qhasm: xmm13 &= xmm12
3500# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
3501# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
3502pand %xmm11,%xmm10
3503
3504# qhasm: xmm8 ^= xmm13
3505# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
3506# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
3507pxor %xmm10,%xmm14
3508
3509# qhasm: xmm11 ^= xmm15
3510# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
3511# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
3512pxor %xmm15,%xmm8
3513
3514# qhasm: xmm10 ^= xmm14
3515# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
3516# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
3517pxor %xmm13,%xmm9
3518
3519# qhasm: xmm9 ^= xmm15
3520# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
3521# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
3522pxor %xmm15,%xmm12
3523
3524# qhasm: xmm8 ^= xmm14
3525# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
3526# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
3527pxor %xmm13,%xmm14
3528
3529# qhasm: xmm9 ^= xmm14
3530# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
3531# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
3532pxor %xmm13,%xmm12
3533
3534# qhasm: xmm12 = xmm3
3535# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
3536# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
3537movdqa %xmm3,%xmm10
3538
3539# qhasm: xmm13 = xmm6
3540# asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12
3541# asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11
3542movdqa %xmm6,%xmm11
3543
3544# qhasm: xmm14 = xmm1
3545# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
3546# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
3547movdqa %xmm1,%xmm13
3548
3549# qhasm: xmm15 = xmm7
3550# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
3551# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
3552movdqa %xmm7,%xmm15
3553
3554# qhasm: xmm12 &= xmm2
3555# asm 1: pand <xmm2=int6464#3,<xmm12=int6464#11
3556# asm 2: pand <xmm2=%xmm2,<xmm12=%xmm10
3557pand %xmm2,%xmm10
3558
3559# qhasm: xmm13 &= xmm0
3560# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
3561# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
3562pand %xmm0,%xmm11
3563
3564# qhasm: xmm14 &= xmm5
3565# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
3566# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
3567pand %xmm5,%xmm13
3568
3569# qhasm: xmm15 |= xmm4
3570# asm 1: por <xmm4=int6464#5,<xmm15=int6464#16
3571# asm 2: por <xmm4=%xmm4,<xmm15=%xmm15
3572por %xmm4,%xmm15
3573
3574# qhasm: xmm11 ^= xmm12
3575# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
3576# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
3577pxor %xmm10,%xmm8
3578
3579# qhasm: xmm10 ^= xmm13
3580# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
3581# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
3582pxor %xmm11,%xmm9
3583
3584# qhasm: xmm9 ^= xmm14
3585# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
3586# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
3587pxor %xmm13,%xmm12
3588
3589# qhasm: xmm8 ^= xmm15
3590# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
3591# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
3592pxor %xmm15,%xmm14
3593
3594# qhasm: xmm12 = xmm11
3595# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
3596# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
3597movdqa %xmm8,%xmm10
3598
3599# qhasm: xmm12 ^= xmm10
3600# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
3601# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
3602pxor %xmm9,%xmm10
3603
3604# qhasm: xmm11 &= xmm9
3605# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
3606# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
3607pand %xmm12,%xmm8
3608
3609# qhasm: xmm14 = xmm8
3610# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
3611# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
3612movdqa %xmm14,%xmm11
3613
3614# qhasm: xmm14 ^= xmm11
3615# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
3616# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
3617pxor %xmm8,%xmm11
3618
3619# qhasm: xmm15 = xmm12
3620# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
3621# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
3622movdqa %xmm10,%xmm13
3623
3624# qhasm: xmm15 &= xmm14
3625# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
3626# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
3627pand %xmm11,%xmm13
3628
3629# qhasm: xmm15 ^= xmm10
3630# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
3631# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
3632pxor %xmm9,%xmm13
3633
3634# qhasm: xmm13 = xmm9
3635# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
3636# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
3637movdqa %xmm12,%xmm15
3638
3639# qhasm: xmm13 ^= xmm8
3640# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
3641# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
3642pxor %xmm14,%xmm15
3643
3644# qhasm: xmm11 ^= xmm10
3645# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
3646# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
3647pxor %xmm9,%xmm8
3648
3649# qhasm: xmm13 &= xmm11
3650# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
3651# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
3652pand %xmm8,%xmm15
3653
3654# qhasm: xmm13 ^= xmm8
3655# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
3656# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
3657pxor %xmm14,%xmm15
3658
3659# qhasm: xmm9 ^= xmm13
3660# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
3661# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
3662pxor %xmm15,%xmm12
3663
3664# qhasm: xmm10 = xmm14
3665# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
3666# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
3667movdqa %xmm11,%xmm8
3668
3669# qhasm: xmm10 ^= xmm13
3670# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
3671# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
3672pxor %xmm15,%xmm8
3673
3674# qhasm: xmm10 &= xmm8
3675# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
3676# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
3677pand %xmm14,%xmm8
3678
3679# qhasm: xmm9 ^= xmm10
3680# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
3681# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
3682pxor %xmm8,%xmm12
3683
3684# qhasm: xmm14 ^= xmm10
3685# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
3686# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
3687pxor %xmm8,%xmm11
3688
3689# qhasm: xmm14 &= xmm15
3690# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
3691# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
3692pand %xmm13,%xmm11
3693
3694# qhasm: xmm14 ^= xmm12
3695# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
3696# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
3697pxor %xmm10,%xmm11
3698
3699# qhasm: xmm12 = xmm4
3700# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#9
3701# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm8
3702movdqa %xmm4,%xmm8
3703
3704# qhasm: xmm8 = xmm5
3705# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
3706# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
3707movdqa %xmm5,%xmm9
3708
3709# qhasm: xmm10 = xmm15
3710# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
3711# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
3712movdqa %xmm13,%xmm10
3713
3714# qhasm: xmm10 ^= xmm14
3715# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
3716# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
3717pxor %xmm11,%xmm10
3718
3719# qhasm: xmm10 &= xmm4
3720# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
3721# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
3722pand %xmm4,%xmm10
3723
3724# qhasm: xmm4 ^= xmm5
3725# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
3726# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
3727pxor %xmm5,%xmm4
3728
3729# qhasm: xmm4 &= xmm14
3730# asm 1: pand <xmm14=int6464#12,<xmm4=int6464#5
3731# asm 2: pand <xmm14=%xmm11,<xmm4=%xmm4
3732pand %xmm11,%xmm4
3733
3734# qhasm: xmm5 &= xmm15
3735# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
3736# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
3737pand %xmm13,%xmm5
3738
3739# qhasm: xmm4 ^= xmm5
3740# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
3741# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
3742pxor %xmm5,%xmm4
3743
3744# qhasm: xmm5 ^= xmm10
3745# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
3746# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
3747pxor %xmm10,%xmm5
3748
3749# qhasm: xmm12 ^= xmm0
3750# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
3751# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
3752pxor %xmm0,%xmm8
3753
3754# qhasm: xmm8 ^= xmm2
3755# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
3756# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
3757pxor %xmm2,%xmm9
3758
3759# qhasm: xmm15 ^= xmm13
3760# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
3761# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
3762pxor %xmm15,%xmm13
3763
3764# qhasm: xmm14 ^= xmm9
3765# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
3766# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
3767pxor %xmm12,%xmm11
3768
3769# qhasm: xmm11 = xmm15
3770# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3771# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3772movdqa %xmm13,%xmm10
3773
3774# qhasm: xmm11 ^= xmm14
3775# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3776# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3777pxor %xmm11,%xmm10
3778
3779# qhasm: xmm11 &= xmm12
3780# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
3781# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
3782pand %xmm8,%xmm10
3783
3784# qhasm: xmm12 ^= xmm8
3785# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
3786# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
3787pxor %xmm9,%xmm8
3788
3789# qhasm: xmm12 &= xmm14
3790# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
3791# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
3792pand %xmm11,%xmm8
3793
3794# qhasm: xmm8 &= xmm15
3795# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
3796# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
3797pand %xmm13,%xmm9
3798
3799# qhasm: xmm8 ^= xmm12
3800# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
3801# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
3802pxor %xmm8,%xmm9
3803
3804# qhasm: xmm12 ^= xmm11
3805# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
3806# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
3807pxor %xmm10,%xmm8
3808
3809# qhasm: xmm10 = xmm13
3810# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3811# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3812movdqa %xmm15,%xmm10
3813
3814# qhasm: xmm10 ^= xmm9
3815# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
3816# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
3817pxor %xmm12,%xmm10
3818
3819# qhasm: xmm10 &= xmm0
3820# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
3821# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
3822pand %xmm0,%xmm10
3823
3824# qhasm: xmm0 ^= xmm2
3825# asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1
3826# asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0
3827pxor %xmm2,%xmm0
3828
3829# qhasm: xmm0 &= xmm9
3830# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
3831# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
3832pand %xmm12,%xmm0
3833
3834# qhasm: xmm2 &= xmm13
3835# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
3836# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
3837pand %xmm15,%xmm2
3838
3839# qhasm: xmm0 ^= xmm2
3840# asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1
3841# asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0
3842pxor %xmm2,%xmm0
3843
3844# qhasm: xmm2 ^= xmm10
3845# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
3846# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
3847pxor %xmm10,%xmm2
3848
3849# qhasm: xmm4 ^= xmm12
3850# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
3851# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
3852pxor %xmm8,%xmm4
3853
3854# qhasm: xmm0 ^= xmm12
3855# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
3856# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
3857pxor %xmm8,%xmm0
3858
3859# qhasm: xmm5 ^= xmm8
3860# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
3861# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
3862pxor %xmm9,%xmm5
3863
3864# qhasm: xmm2 ^= xmm8
3865# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
3866# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
3867pxor %xmm9,%xmm2
3868
3869# qhasm: xmm12 = xmm7
3870# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
3871# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
3872movdqa %xmm7,%xmm8
3873
3874# qhasm: xmm8 = xmm1
3875# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
3876# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
3877movdqa %xmm1,%xmm9
3878
3879# qhasm: xmm12 ^= xmm6
3880# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#9
3881# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm8
3882pxor %xmm6,%xmm8
3883
3884# qhasm: xmm8 ^= xmm3
3885# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
3886# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
3887pxor %xmm3,%xmm9
3888
3889# qhasm: xmm11 = xmm15
3890# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3891# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3892movdqa %xmm13,%xmm10
3893
3894# qhasm: xmm11 ^= xmm14
3895# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3896# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3897pxor %xmm11,%xmm10
3898
3899# qhasm: xmm11 &= xmm12
3900# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
3901# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
3902pand %xmm8,%xmm10
3903
3904# qhasm: xmm12 ^= xmm8
3905# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
3906# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
3907pxor %xmm9,%xmm8
3908
3909# qhasm: xmm12 &= xmm14
3910# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
3911# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
3912pand %xmm11,%xmm8
3913
3914# qhasm: xmm8 &= xmm15
3915# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
3916# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
3917pand %xmm13,%xmm9
3918
3919# qhasm: xmm8 ^= xmm12
3920# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
3921# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
3922pxor %xmm8,%xmm9
3923
3924# qhasm: xmm12 ^= xmm11
3925# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
3926# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
3927pxor %xmm10,%xmm8
3928
3929# qhasm: xmm10 = xmm13
3930# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3931# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3932movdqa %xmm15,%xmm10
3933
3934# qhasm: xmm10 ^= xmm9
3935# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
3936# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
3937pxor %xmm12,%xmm10
3938
3939# qhasm: xmm10 &= xmm6
3940# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
3941# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
3942pand %xmm6,%xmm10
3943
3944# qhasm: xmm6 ^= xmm3
3945# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
3946# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
3947pxor %xmm3,%xmm6
3948
3949# qhasm: xmm6 &= xmm9
3950# asm 1: pand <xmm9=int6464#13,<xmm6=int6464#7
3951# asm 2: pand <xmm9=%xmm12,<xmm6=%xmm6
3952pand %xmm12,%xmm6
3953
3954# qhasm: xmm3 &= xmm13
3955# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
3956# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
3957pand %xmm15,%xmm3
3958
3959# qhasm: xmm6 ^= xmm3
3960# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
3961# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
3962pxor %xmm3,%xmm6
3963
3964# qhasm: xmm3 ^= xmm10
3965# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
3966# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
3967pxor %xmm10,%xmm3
3968
3969# qhasm: xmm15 ^= xmm13
3970# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
3971# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
3972pxor %xmm15,%xmm13
3973
3974# qhasm: xmm14 ^= xmm9
3975# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
3976# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
3977pxor %xmm12,%xmm11
3978
3979# qhasm: xmm11 = xmm15
3980# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3981# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3982movdqa %xmm13,%xmm10
3983
3984# qhasm: xmm11 ^= xmm14
3985# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3986# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3987pxor %xmm11,%xmm10
3988
3989# qhasm: xmm11 &= xmm7
3990# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
3991# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
3992pand %xmm7,%xmm10
3993
3994# qhasm: xmm7 ^= xmm1
3995# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
3996# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
3997pxor %xmm1,%xmm7
3998
3999# qhasm: xmm7 &= xmm14
4000# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
4001# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
4002pand %xmm11,%xmm7
4003
4004# qhasm: xmm1 &= xmm15
4005# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
4006# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
4007pand %xmm13,%xmm1
4008
4009# qhasm: xmm7 ^= xmm1
4010# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
4011# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
4012pxor %xmm1,%xmm7
4013
4014# qhasm: xmm1 ^= xmm11
4015# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
4016# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
4017pxor %xmm10,%xmm1
4018
4019# qhasm: xmm7 ^= xmm12
4020# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
4021# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
4022pxor %xmm8,%xmm7
4023
4024# qhasm: xmm6 ^= xmm12
4025# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
4026# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
4027pxor %xmm8,%xmm6
4028
4029# qhasm: xmm1 ^= xmm8
4030# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
4031# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
4032pxor %xmm9,%xmm1
4033
4034# qhasm: xmm3 ^= xmm8
4035# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
4036# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
4037pxor %xmm9,%xmm3
4038
4039# qhasm: xmm7 ^= xmm0
4040# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
4041# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
4042pxor %xmm0,%xmm7
4043
4044# qhasm: xmm1 ^= xmm4
4045# asm 1: pxor <xmm4=int6464#5,<xmm1=int6464#2
4046# asm 2: pxor <xmm4=%xmm4,<xmm1=%xmm1
4047pxor %xmm4,%xmm1
4048
4049# qhasm: xmm6 ^= xmm7
4050# asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7
4051# asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6
4052pxor %xmm7,%xmm6
4053
4054# qhasm: xmm4 ^= xmm0
4055# asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5
4056# asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4
4057pxor %xmm0,%xmm4
4058
4059# qhasm: xmm0 ^= xmm1
4060# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
4061# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
4062pxor %xmm1,%xmm0
4063
4064# qhasm: xmm1 ^= xmm5
4065# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
4066# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
4067pxor %xmm5,%xmm1
4068
4069# qhasm: xmm5 ^= xmm3
4070# asm 1: pxor <xmm3=int6464#4,<xmm5=int6464#6
4071# asm 2: pxor <xmm3=%xmm3,<xmm5=%xmm5
4072pxor %xmm3,%xmm5
4073
4074# qhasm: xmm6 ^= xmm5
4075# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
4076# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
4077pxor %xmm5,%xmm6
4078
4079# qhasm: xmm3 ^= xmm2
4080# asm 1: pxor <xmm2=int6464#3,<xmm3=int6464#4
4081# asm 2: pxor <xmm2=%xmm2,<xmm3=%xmm3
4082pxor %xmm2,%xmm3
4083
4084# qhasm: xmm2 ^= xmm5
4085# asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3
4086# asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2
4087pxor %xmm5,%xmm2
4088
4089# qhasm: xmm4 ^= xmm2
4090# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
4091# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
4092pxor %xmm2,%xmm4
4093
4094# qhasm: xmm6 ^= RCON
4095# asm 1: pxor RCON,<xmm6=int6464#7
4096# asm 2: pxor RCON,<xmm6=%xmm6
4097pxor RCON,%xmm6
4098
4099# qhasm: shuffle bytes of xmm0 by EXPB0
4100# asm 1: pshufb EXPB0,<xmm0=int6464#1
4101# asm 2: pshufb EXPB0,<xmm0=%xmm0
4102pshufb EXPB0,%xmm0
4103
4104# qhasm: shuffle bytes of xmm1 by EXPB0
4105# asm 1: pshufb EXPB0,<xmm1=int6464#2
4106# asm 2: pshufb EXPB0,<xmm1=%xmm1
4107pshufb EXPB0,%xmm1
4108
4109# qhasm: shuffle bytes of xmm6 by EXPB0
4110# asm 1: pshufb EXPB0,<xmm6=int6464#7
4111# asm 2: pshufb EXPB0,<xmm6=%xmm6
4112pshufb EXPB0,%xmm6
4113
4114# qhasm: shuffle bytes of xmm4 by EXPB0
4115# asm 1: pshufb EXPB0,<xmm4=int6464#5
4116# asm 2: pshufb EXPB0,<xmm4=%xmm4
4117pshufb EXPB0,%xmm4
4118
4119# qhasm: shuffle bytes of xmm2 by EXPB0
4120# asm 1: pshufb EXPB0,<xmm2=int6464#3
4121# asm 2: pshufb EXPB0,<xmm2=%xmm2
4122pshufb EXPB0,%xmm2
4123
4124# qhasm: shuffle bytes of xmm7 by EXPB0
4125# asm 1: pshufb EXPB0,<xmm7=int6464#8
4126# asm 2: pshufb EXPB0,<xmm7=%xmm7
4127pshufb EXPB0,%xmm7
4128
4129# qhasm: shuffle bytes of xmm3 by EXPB0
4130# asm 1: pshufb EXPB0,<xmm3=int6464#4
4131# asm 2: pshufb EXPB0,<xmm3=%xmm3
4132pshufb EXPB0,%xmm3
4133
4134# qhasm: shuffle bytes of xmm5 by EXPB0
4135# asm 1: pshufb EXPB0,<xmm5=int6464#6
4136# asm 2: pshufb EXPB0,<xmm5=%xmm5
4137pshufb EXPB0,%xmm5
4138
4139# qhasm: xmm8 = *(int128 *)(c + 256)
4140# asm 1: movdqa 256(<c=int64#1),>xmm8=int6464#9
4141# asm 2: movdqa 256(<c=%rdi),>xmm8=%xmm8
4142movdqa 256(%rdi),%xmm8
4143
4144# qhasm: xmm9 = *(int128 *)(c + 272)
4145# asm 1: movdqa 272(<c=int64#1),>xmm9=int6464#10
4146# asm 2: movdqa 272(<c=%rdi),>xmm9=%xmm9
4147movdqa 272(%rdi),%xmm9
4148
4149# qhasm: xmm10 = *(int128 *)(c + 288)
4150# asm 1: movdqa 288(<c=int64#1),>xmm10=int6464#11
4151# asm 2: movdqa 288(<c=%rdi),>xmm10=%xmm10
4152movdqa 288(%rdi),%xmm10
4153
4154# qhasm: xmm11 = *(int128 *)(c + 304)
4155# asm 1: movdqa 304(<c=int64#1),>xmm11=int6464#12
4156# asm 2: movdqa 304(<c=%rdi),>xmm11=%xmm11
4157movdqa 304(%rdi),%xmm11
4158
4159# qhasm: xmm12 = *(int128 *)(c + 320)
4160# asm 1: movdqa 320(<c=int64#1),>xmm12=int6464#13
4161# asm 2: movdqa 320(<c=%rdi),>xmm12=%xmm12
4162movdqa 320(%rdi),%xmm12
4163
4164# qhasm: xmm13 = *(int128 *)(c + 336)
4165# asm 1: movdqa 336(<c=int64#1),>xmm13=int6464#14
4166# asm 2: movdqa 336(<c=%rdi),>xmm13=%xmm13
4167movdqa 336(%rdi),%xmm13
4168
4169# qhasm: xmm14 = *(int128 *)(c + 352)
4170# asm 1: movdqa 352(<c=int64#1),>xmm14=int6464#15
4171# asm 2: movdqa 352(<c=%rdi),>xmm14=%xmm14
4172movdqa 352(%rdi),%xmm14
4173
4174# qhasm: xmm15 = *(int128 *)(c + 368)
4175# asm 1: movdqa 368(<c=int64#1),>xmm15=int6464#16
4176# asm 2: movdqa 368(<c=%rdi),>xmm15=%xmm15
4177movdqa 368(%rdi),%xmm15
4178
4179# qhasm: xmm8 ^= ONE
4180# asm 1: pxor ONE,<xmm8=int6464#9
4181# asm 2: pxor ONE,<xmm8=%xmm8
4182pxor ONE,%xmm8
4183
4184# qhasm: xmm9 ^= ONE
4185# asm 1: pxor ONE,<xmm9=int6464#10
4186# asm 2: pxor ONE,<xmm9=%xmm9
4187pxor ONE,%xmm9
4188
4189# qhasm: xmm13 ^= ONE
4190# asm 1: pxor ONE,<xmm13=int6464#14
4191# asm 2: pxor ONE,<xmm13=%xmm13
4192pxor ONE,%xmm13
4193
4194# qhasm: xmm14 ^= ONE
4195# asm 1: pxor ONE,<xmm14=int6464#15
4196# asm 2: pxor ONE,<xmm14=%xmm14
4197pxor ONE,%xmm14
4198
4199# qhasm: xmm0 ^= xmm8
4200# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
4201# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
4202pxor %xmm8,%xmm0
4203
4204# qhasm: xmm1 ^= xmm9
4205# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
4206# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
4207pxor %xmm9,%xmm1
4208
4209# qhasm: xmm6 ^= xmm10
4210# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
4211# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
4212pxor %xmm10,%xmm6
4213
4214# qhasm: xmm4 ^= xmm11
4215# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
4216# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
4217pxor %xmm11,%xmm4
4218
4219# qhasm: xmm2 ^= xmm12
4220# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
4221# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
4222pxor %xmm12,%xmm2
4223
4224# qhasm: xmm7 ^= xmm13
4225# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
4226# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
4227pxor %xmm13,%xmm7
4228
4229# qhasm: xmm3 ^= xmm14
4230# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
4231# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
4232pxor %xmm14,%xmm3
4233
4234# qhasm: xmm5 ^= xmm15
4235# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
4236# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
4237pxor %xmm15,%xmm5
4238
4239# qhasm: uint32323232 xmm8 >>= 8
4240# asm 1: psrld $8,<xmm8=int6464#9
4241# asm 2: psrld $8,<xmm8=%xmm8
4242psrld $8,%xmm8
4243
4244# qhasm: uint32323232 xmm9 >>= 8
4245# asm 1: psrld $8,<xmm9=int6464#10
4246# asm 2: psrld $8,<xmm9=%xmm9
4247psrld $8,%xmm9
4248
4249# qhasm: uint32323232 xmm10 >>= 8
4250# asm 1: psrld $8,<xmm10=int6464#11
4251# asm 2: psrld $8,<xmm10=%xmm10
4252psrld $8,%xmm10
4253
4254# qhasm: uint32323232 xmm11 >>= 8
4255# asm 1: psrld $8,<xmm11=int6464#12
4256# asm 2: psrld $8,<xmm11=%xmm11
4257psrld $8,%xmm11
4258
4259# qhasm: uint32323232 xmm12 >>= 8
4260# asm 1: psrld $8,<xmm12=int6464#13
4261# asm 2: psrld $8,<xmm12=%xmm12
4262psrld $8,%xmm12
4263
4264# qhasm: uint32323232 xmm13 >>= 8
4265# asm 1: psrld $8,<xmm13=int6464#14
4266# asm 2: psrld $8,<xmm13=%xmm13
4267psrld $8,%xmm13
4268
4269# qhasm: uint32323232 xmm14 >>= 8
4270# asm 1: psrld $8,<xmm14=int6464#15
4271# asm 2: psrld $8,<xmm14=%xmm14
4272psrld $8,%xmm14
4273
4274# qhasm: uint32323232 xmm15 >>= 8
4275# asm 1: psrld $8,<xmm15=int6464#16
4276# asm 2: psrld $8,<xmm15=%xmm15
4277psrld $8,%xmm15
4278
4279# qhasm: xmm0 ^= xmm8
4280# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
4281# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
4282pxor %xmm8,%xmm0
4283
4284# qhasm: xmm1 ^= xmm9
4285# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
4286# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
4287pxor %xmm9,%xmm1
4288
4289# qhasm: xmm6 ^= xmm10
4290# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
4291# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
4292pxor %xmm10,%xmm6
4293
4294# qhasm: xmm4 ^= xmm11
4295# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
4296# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
4297pxor %xmm11,%xmm4
4298
4299# qhasm: xmm2 ^= xmm12
4300# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
4301# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
4302pxor %xmm12,%xmm2
4303
4304# qhasm: xmm7 ^= xmm13
4305# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
4306# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
4307pxor %xmm13,%xmm7
4308
4309# qhasm: xmm3 ^= xmm14
4310# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
4311# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
4312pxor %xmm14,%xmm3
4313
4314# qhasm: xmm5 ^= xmm15
4315# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
4316# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
4317pxor %xmm15,%xmm5
4318
4319# qhasm: uint32323232 xmm8 >>= 8
4320# asm 1: psrld $8,<xmm8=int6464#9
4321# asm 2: psrld $8,<xmm8=%xmm8
4322psrld $8,%xmm8
4323
4324# qhasm: uint32323232 xmm9 >>= 8
4325# asm 1: psrld $8,<xmm9=int6464#10
4326# asm 2: psrld $8,<xmm9=%xmm9
4327psrld $8,%xmm9
4328
4329# qhasm: uint32323232 xmm10 >>= 8
4330# asm 1: psrld $8,<xmm10=int6464#11
4331# asm 2: psrld $8,<xmm10=%xmm10
4332psrld $8,%xmm10
4333
4334# qhasm: uint32323232 xmm11 >>= 8
4335# asm 1: psrld $8,<xmm11=int6464#12
4336# asm 2: psrld $8,<xmm11=%xmm11
4337psrld $8,%xmm11
4338
4339# qhasm: uint32323232 xmm12 >>= 8
4340# asm 1: psrld $8,<xmm12=int6464#13
4341# asm 2: psrld $8,<xmm12=%xmm12
4342psrld $8,%xmm12
4343
4344# qhasm: uint32323232 xmm13 >>= 8
4345# asm 1: psrld $8,<xmm13=int6464#14
4346# asm 2: psrld $8,<xmm13=%xmm13
4347psrld $8,%xmm13
4348
4349# qhasm: uint32323232 xmm14 >>= 8
4350# asm 1: psrld $8,<xmm14=int6464#15
4351# asm 2: psrld $8,<xmm14=%xmm14
4352psrld $8,%xmm14
4353
4354# qhasm: uint32323232 xmm15 >>= 8
4355# asm 1: psrld $8,<xmm15=int6464#16
4356# asm 2: psrld $8,<xmm15=%xmm15
4357psrld $8,%xmm15
4358
4359# qhasm: xmm0 ^= xmm8
4360# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
4361# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
4362pxor %xmm8,%xmm0
4363
4364# qhasm: xmm1 ^= xmm9
4365# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
4366# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
4367pxor %xmm9,%xmm1
4368
4369# qhasm: xmm6 ^= xmm10
4370# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
4371# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
4372pxor %xmm10,%xmm6
4373
4374# qhasm: xmm4 ^= xmm11
4375# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
4376# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
4377pxor %xmm11,%xmm4
4378
4379# qhasm: xmm2 ^= xmm12
4380# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
4381# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
4382pxor %xmm12,%xmm2
4383
4384# qhasm: xmm7 ^= xmm13
4385# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
4386# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
4387pxor %xmm13,%xmm7
4388
4389# qhasm: xmm3 ^= xmm14
4390# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
4391# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
4392pxor %xmm14,%xmm3
4393
4394# qhasm: xmm5 ^= xmm15
4395# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
4396# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
4397pxor %xmm15,%xmm5
4398
4399# qhasm: uint32323232 xmm8 >>= 8
4400# asm 1: psrld $8,<xmm8=int6464#9
4401# asm 2: psrld $8,<xmm8=%xmm8
4402psrld $8,%xmm8
4403
4404# qhasm: uint32323232 xmm9 >>= 8
4405# asm 1: psrld $8,<xmm9=int6464#10
4406# asm 2: psrld $8,<xmm9=%xmm9
4407psrld $8,%xmm9
4408
4409# qhasm: uint32323232 xmm10 >>= 8
4410# asm 1: psrld $8,<xmm10=int6464#11
4411# asm 2: psrld $8,<xmm10=%xmm10
4412psrld $8,%xmm10
4413
4414# qhasm: uint32323232 xmm11 >>= 8
4415# asm 1: psrld $8,<xmm11=int6464#12
4416# asm 2: psrld $8,<xmm11=%xmm11
4417psrld $8,%xmm11
4418
4419# qhasm: uint32323232 xmm12 >>= 8
4420# asm 1: psrld $8,<xmm12=int6464#13
4421# asm 2: psrld $8,<xmm12=%xmm12
4422psrld $8,%xmm12
4423
4424# qhasm: uint32323232 xmm13 >>= 8
4425# asm 1: psrld $8,<xmm13=int6464#14
4426# asm 2: psrld $8,<xmm13=%xmm13
4427psrld $8,%xmm13
4428
4429# qhasm: uint32323232 xmm14 >>= 8
4430# asm 1: psrld $8,<xmm14=int6464#15
4431# asm 2: psrld $8,<xmm14=%xmm14
4432psrld $8,%xmm14
4433
4434# qhasm: uint32323232 xmm15 >>= 8
4435# asm 1: psrld $8,<xmm15=int6464#16
4436# asm 2: psrld $8,<xmm15=%xmm15
4437psrld $8,%xmm15
4438
4439# qhasm: xmm0 ^= xmm8
4440# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
4441# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
4442pxor %xmm8,%xmm0
4443
4444# qhasm: xmm1 ^= xmm9
4445# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
4446# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
4447pxor %xmm9,%xmm1
4448
4449# qhasm: xmm6 ^= xmm10
4450# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
4451# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
4452pxor %xmm10,%xmm6
4453
4454# qhasm: xmm4 ^= xmm11
4455# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
4456# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
4457pxor %xmm11,%xmm4
4458
4459# qhasm: xmm2 ^= xmm12
4460# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
4461# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
4462pxor %xmm12,%xmm2
4463
4464# qhasm: xmm7 ^= xmm13
4465# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
4466# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
4467pxor %xmm13,%xmm7
4468
4469# qhasm: xmm3 ^= xmm14
4470# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
4471# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
4472pxor %xmm14,%xmm3
4473
4474# qhasm: xmm5 ^= xmm15
4475# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
4476# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
4477pxor %xmm15,%xmm5
4478
4479# qhasm: *(int128 *)(c + 384) = xmm0
4480# asm 1: movdqa <xmm0=int6464#1,384(<c=int64#1)
4481# asm 2: movdqa <xmm0=%xmm0,384(<c=%rdi)
4482movdqa %xmm0,384(%rdi)
4483
4484# qhasm: *(int128 *)(c + 400) = xmm1
4485# asm 1: movdqa <xmm1=int6464#2,400(<c=int64#1)
4486# asm 2: movdqa <xmm1=%xmm1,400(<c=%rdi)
4487movdqa %xmm1,400(%rdi)
4488
4489# qhasm: *(int128 *)(c + 416) = xmm6
4490# asm 1: movdqa <xmm6=int6464#7,416(<c=int64#1)
4491# asm 2: movdqa <xmm6=%xmm6,416(<c=%rdi)
4492movdqa %xmm6,416(%rdi)
4493
4494# qhasm: *(int128 *)(c + 432) = xmm4
4495# asm 1: movdqa <xmm4=int6464#5,432(<c=int64#1)
4496# asm 2: movdqa <xmm4=%xmm4,432(<c=%rdi)
4497movdqa %xmm4,432(%rdi)
4498
4499# qhasm: *(int128 *)(c + 448) = xmm2
4500# asm 1: movdqa <xmm2=int6464#3,448(<c=int64#1)
4501# asm 2: movdqa <xmm2=%xmm2,448(<c=%rdi)
4502movdqa %xmm2,448(%rdi)
4503
4504# qhasm: *(int128 *)(c + 464) = xmm7
4505# asm 1: movdqa <xmm7=int6464#8,464(<c=int64#1)
4506# asm 2: movdqa <xmm7=%xmm7,464(<c=%rdi)
4507movdqa %xmm7,464(%rdi)
4508
4509# qhasm: *(int128 *)(c + 480) = xmm3
4510# asm 1: movdqa <xmm3=int6464#4,480(<c=int64#1)
4511# asm 2: movdqa <xmm3=%xmm3,480(<c=%rdi)
4512movdqa %xmm3,480(%rdi)
4513
4514# qhasm: *(int128 *)(c + 496) = xmm5
4515# asm 1: movdqa <xmm5=int6464#6,496(<c=int64#1)
4516# asm 2: movdqa <xmm5=%xmm5,496(<c=%rdi)
4517movdqa %xmm5,496(%rdi)
4518
4519# qhasm: xmm0 ^= ONE
4520# asm 1: pxor ONE,<xmm0=int6464#1
4521# asm 2: pxor ONE,<xmm0=%xmm0
4522pxor ONE,%xmm0
4523
4524# qhasm: xmm1 ^= ONE
4525# asm 1: pxor ONE,<xmm1=int6464#2
4526# asm 2: pxor ONE,<xmm1=%xmm1
4527pxor ONE,%xmm1
4528
4529# qhasm: xmm7 ^= ONE
4530# asm 1: pxor ONE,<xmm7=int6464#8
4531# asm 2: pxor ONE,<xmm7=%xmm7
4532pxor ONE,%xmm7
4533
4534# qhasm: xmm3 ^= ONE
4535# asm 1: pxor ONE,<xmm3=int6464#4
4536# asm 2: pxor ONE,<xmm3=%xmm3
4537pxor ONE,%xmm3
4538
4539# qhasm: shuffle bytes of xmm0 by ROTB
4540# asm 1: pshufb ROTB,<xmm0=int6464#1
4541# asm 2: pshufb ROTB,<xmm0=%xmm0
4542pshufb ROTB,%xmm0
4543
4544# qhasm: shuffle bytes of xmm1 by ROTB
4545# asm 1: pshufb ROTB,<xmm1=int6464#2
4546# asm 2: pshufb ROTB,<xmm1=%xmm1
4547pshufb ROTB,%xmm1
4548
4549# qhasm: shuffle bytes of xmm6 by ROTB
4550# asm 1: pshufb ROTB,<xmm6=int6464#7
4551# asm 2: pshufb ROTB,<xmm6=%xmm6
4552pshufb ROTB,%xmm6
4553
4554# qhasm: shuffle bytes of xmm4 by ROTB
4555# asm 1: pshufb ROTB,<xmm4=int6464#5
4556# asm 2: pshufb ROTB,<xmm4=%xmm4
4557pshufb ROTB,%xmm4
4558
4559# qhasm: shuffle bytes of xmm2 by ROTB
4560# asm 1: pshufb ROTB,<xmm2=int6464#3
4561# asm 2: pshufb ROTB,<xmm2=%xmm2
4562pshufb ROTB,%xmm2
4563
4564# qhasm: shuffle bytes of xmm7 by ROTB
4565# asm 1: pshufb ROTB,<xmm7=int6464#8
4566# asm 2: pshufb ROTB,<xmm7=%xmm7
4567pshufb ROTB,%xmm7
4568
4569# qhasm: shuffle bytes of xmm3 by ROTB
4570# asm 1: pshufb ROTB,<xmm3=int6464#4
4571# asm 2: pshufb ROTB,<xmm3=%xmm3
4572pshufb ROTB,%xmm3
4573
4574# qhasm: shuffle bytes of xmm5 by ROTB
4575# asm 1: pshufb ROTB,<xmm5=int6464#6
4576# asm 2: pshufb ROTB,<xmm5=%xmm5
4577pshufb ROTB,%xmm5
4578
4579# qhasm: xmm7 ^= xmm3
4580# asm 1: pxor <xmm3=int6464#4,<xmm7=int6464#8
4581# asm 2: pxor <xmm3=%xmm3,<xmm7=%xmm7
4582pxor %xmm3,%xmm7
4583
4584# qhasm: xmm6 ^= xmm1
4585# asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7
4586# asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6
4587pxor %xmm1,%xmm6
4588
4589# qhasm: xmm7 ^= xmm0
4590# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
4591# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
4592pxor %xmm0,%xmm7
4593
4594# qhasm: xmm3 ^= xmm6
4595# asm 1: pxor <xmm6=int6464#7,<xmm3=int6464#4
4596# asm 2: pxor <xmm6=%xmm6,<xmm3=%xmm3
4597pxor %xmm6,%xmm3
4598
4599# qhasm: xmm4 ^= xmm0
4600# asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5
4601# asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4
4602pxor %xmm0,%xmm4
4603
4604# qhasm: xmm3 ^= xmm4
4605# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
4606# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
4607pxor %xmm4,%xmm3
4608
4609# qhasm: xmm4 ^= xmm5
4610# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
4611# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
4612pxor %xmm5,%xmm4
4613
4614# qhasm: xmm4 ^= xmm2
4615# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
4616# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
4617pxor %xmm2,%xmm4
4618
4619# qhasm: xmm5 ^= xmm7
4620# asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6
4621# asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5
4622pxor %xmm7,%xmm5
4623
4624# qhasm: xmm4 ^= xmm1
4625# asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5
4626# asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4
4627pxor %xmm1,%xmm4
4628
4629# qhasm: xmm2 ^= xmm7
4630# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
4631# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
4632pxor %xmm7,%xmm2
4633
4634# qhasm: xmm6 ^= xmm5
4635# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
4636# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
4637pxor %xmm5,%xmm6
4638
4639# qhasm: xmm1 ^= xmm7
4640# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
4641# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
4642pxor %xmm7,%xmm1
4643
4644# qhasm: xmm11 = xmm5
4645# asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9
4646# asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8
4647movdqa %xmm5,%xmm8
4648
4649# qhasm: xmm10 = xmm1
4650# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
4651# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
4652movdqa %xmm1,%xmm9
4653
4654# qhasm: xmm9 = xmm7
4655# asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11
4656# asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10
4657movdqa %xmm7,%xmm10
4658
4659# qhasm: xmm13 = xmm6
4660# asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12
4661# asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11
4662movdqa %xmm6,%xmm11
4663
4664# qhasm: xmm12 = xmm3
4665# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#13
4666# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm12
4667movdqa %xmm3,%xmm12
4668
4669# qhasm: xmm11 ^= xmm2
4670# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#9
4671# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm8
4672pxor %xmm2,%xmm8
4673
4674# qhasm: xmm10 ^= xmm6
4675# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#10
4676# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm9
4677pxor %xmm6,%xmm9
4678
4679# qhasm: xmm9 ^= xmm4
4680# asm 1: pxor <xmm4=int6464#5,<xmm9=int6464#11
4681# asm 2: pxor <xmm4=%xmm4,<xmm9=%xmm10
4682pxor %xmm4,%xmm10
4683
4684# qhasm: xmm13 ^= xmm2
4685# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#12
4686# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm11
4687pxor %xmm2,%xmm11
4688
4689# qhasm: xmm12 ^= xmm0
4690# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
4691# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
4692pxor %xmm0,%xmm12
4693
4694# qhasm: xmm14 = xmm11
4695# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
4696# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
4697movdqa %xmm8,%xmm13
4698
4699# qhasm: xmm8 = xmm10
4700# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
4701# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
4702movdqa %xmm9,%xmm14
4703
4704# qhasm: xmm15 = xmm11
4705# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
4706# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
4707movdqa %xmm8,%xmm15
4708
4709# qhasm: xmm10 |= xmm9
4710# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
4711# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
4712por %xmm10,%xmm9
4713
4714# qhasm: xmm11 |= xmm12
4715# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
4716# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
4717por %xmm12,%xmm8
4718
4719# qhasm: xmm15 ^= xmm8
4720# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
4721# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
4722pxor %xmm14,%xmm15
4723
4724# qhasm: xmm14 &= xmm12
4725# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
4726# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
4727pand %xmm12,%xmm13
4728
4729# qhasm: xmm8 &= xmm9
4730# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
4731# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
4732pand %xmm10,%xmm14
4733
4734# qhasm: xmm12 ^= xmm9
4735# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
4736# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
4737pxor %xmm10,%xmm12
4738
4739# qhasm: xmm15 &= xmm12
4740# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
4741# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
4742pand %xmm12,%xmm15
4743
4744# qhasm: xmm12 = xmm4
4745# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11
4746# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10
4747movdqa %xmm4,%xmm10
4748
4749# qhasm: xmm12 ^= xmm0
4750# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
4751# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
4752pxor %xmm0,%xmm10
4753
4754# qhasm: xmm13 &= xmm12
4755# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
4756# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
4757pand %xmm10,%xmm11
4758
4759# qhasm: xmm11 ^= xmm13
4760# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
4761# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
4762pxor %xmm11,%xmm8
4763
4764# qhasm: xmm10 ^= xmm13
4765# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
4766# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
4767pxor %xmm11,%xmm9
4768
4769# qhasm: xmm13 = xmm5
4770# asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11
4771# asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10
4772movdqa %xmm5,%xmm10
4773
4774# qhasm: xmm13 ^= xmm1
4775# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
4776# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
4777pxor %xmm1,%xmm10
4778
4779# qhasm: xmm12 = xmm7
4780# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12
4781# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11
4782movdqa %xmm7,%xmm11
4783
4784# qhasm: xmm9 = xmm13
4785# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
4786# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
4787movdqa %xmm10,%xmm12
4788
4789# qhasm: xmm12 ^= xmm3
4790# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#12
4791# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm11
4792pxor %xmm3,%xmm11
4793
4794# qhasm: xmm9 |= xmm12
4795# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
4796# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
4797por %xmm11,%xmm12
4798
4799# qhasm: xmm13 &= xmm12
4800# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
4801# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
4802pand %xmm11,%xmm10
4803
4804# qhasm: xmm8 ^= xmm13
4805# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
4806# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
4807pxor %xmm10,%xmm14
4808
4809# qhasm: xmm11 ^= xmm15
4810# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
4811# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
4812pxor %xmm15,%xmm8
4813
4814# qhasm: xmm10 ^= xmm14
4815# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
4816# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
4817pxor %xmm13,%xmm9
4818
4819# qhasm: xmm9 ^= xmm15
4820# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
4821# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
4822pxor %xmm15,%xmm12
4823
4824# qhasm: xmm8 ^= xmm14
4825# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
4826# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
4827pxor %xmm13,%xmm14
4828
4829# qhasm: xmm9 ^= xmm14
4830# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
4831# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
4832pxor %xmm13,%xmm12
4833
4834# qhasm: xmm12 = xmm6
4835# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11
4836# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10
4837movdqa %xmm6,%xmm10
4838
4839# qhasm: xmm13 = xmm2
4840# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
4841# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
4842movdqa %xmm2,%xmm11
4843
4844# qhasm: xmm14 = xmm1
4845# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
4846# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
4847movdqa %xmm1,%xmm13
4848
4849# qhasm: xmm15 = xmm5
4850# asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16
4851# asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15
4852movdqa %xmm5,%xmm15
4853
4854# qhasm: xmm12 &= xmm4
4855# asm 1: pand <xmm4=int6464#5,<xmm12=int6464#11
4856# asm 2: pand <xmm4=%xmm4,<xmm12=%xmm10
4857pand %xmm4,%xmm10
4858
4859# qhasm: xmm13 &= xmm0
4860# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
4861# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
4862pand %xmm0,%xmm11
4863
4864# qhasm: xmm14 &= xmm7
4865# asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14
4866# asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13
4867pand %xmm7,%xmm13
4868
4869# qhasm: xmm15 |= xmm3
4870# asm 1: por <xmm3=int6464#4,<xmm15=int6464#16
4871# asm 2: por <xmm3=%xmm3,<xmm15=%xmm15
4872por %xmm3,%xmm15
4873
4874# qhasm: xmm11 ^= xmm12
4875# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
4876# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
4877pxor %xmm10,%xmm8
4878
4879# qhasm: xmm10 ^= xmm13
4880# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
4881# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
4882pxor %xmm11,%xmm9
4883
4884# qhasm: xmm9 ^= xmm14
4885# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
4886# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
4887pxor %xmm13,%xmm12
4888
4889# qhasm: xmm8 ^= xmm15
4890# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
4891# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
4892pxor %xmm15,%xmm14
4893
4894# qhasm: xmm12 = xmm11
4895# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
4896# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
4897movdqa %xmm8,%xmm10
4898
4899# qhasm: xmm12 ^= xmm10
4900# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
4901# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
4902pxor %xmm9,%xmm10
4903
4904# qhasm: xmm11 &= xmm9
4905# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
4906# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
4907pand %xmm12,%xmm8
4908
4909# qhasm: xmm14 = xmm8
4910# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
4911# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
4912movdqa %xmm14,%xmm11
4913
4914# qhasm: xmm14 ^= xmm11
4915# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
4916# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
4917pxor %xmm8,%xmm11
4918
4919# qhasm: xmm15 = xmm12
4920# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
4921# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
4922movdqa %xmm10,%xmm13
4923
4924# qhasm: xmm15 &= xmm14
4925# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
4926# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
4927pand %xmm11,%xmm13
4928
4929# qhasm: xmm15 ^= xmm10
4930# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
4931# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
4932pxor %xmm9,%xmm13
4933
4934# qhasm: xmm13 = xmm9
4935# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
4936# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
4937movdqa %xmm12,%xmm15
4938
4939# qhasm: xmm13 ^= xmm8
4940# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
4941# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
4942pxor %xmm14,%xmm15
4943
4944# qhasm: xmm11 ^= xmm10
4945# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
4946# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
4947pxor %xmm9,%xmm8
4948
4949# qhasm: xmm13 &= xmm11
4950# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
4951# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
4952pand %xmm8,%xmm15
4953
4954# qhasm: xmm13 ^= xmm8
4955# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
4956# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
4957pxor %xmm14,%xmm15
4958
4959# qhasm: xmm9 ^= xmm13
4960# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
4961# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
4962pxor %xmm15,%xmm12
4963
4964# qhasm: xmm10 = xmm14
4965# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
4966# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
4967movdqa %xmm11,%xmm8
4968
4969# qhasm: xmm10 ^= xmm13
4970# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
4971# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
4972pxor %xmm15,%xmm8
4973
4974# qhasm: xmm10 &= xmm8
4975# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
4976# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
4977pand %xmm14,%xmm8
4978
4979# qhasm: xmm9 ^= xmm10
4980# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
4981# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
4982pxor %xmm8,%xmm12
4983
4984# qhasm: xmm14 ^= xmm10
4985# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
4986# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
4987pxor %xmm8,%xmm11
4988
4989# qhasm: xmm14 &= xmm15
4990# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
4991# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
4992pand %xmm13,%xmm11
4993
4994# qhasm: xmm14 ^= xmm12
4995# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
4996# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
4997pxor %xmm10,%xmm11
4998
4999# qhasm: xmm12 = xmm3
5000# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#9
5001# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm8
5002movdqa %xmm3,%xmm8
5003
5004# qhasm: xmm8 = xmm7
5005# asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10
5006# asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9
5007movdqa %xmm7,%xmm9
5008
5009# qhasm: xmm10 = xmm15
5010# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
5011# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
5012movdqa %xmm13,%xmm10
5013
5014# qhasm: xmm10 ^= xmm14
5015# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
5016# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
5017pxor %xmm11,%xmm10
5018
5019# qhasm: xmm10 &= xmm3
5020# asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11
5021# asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10
5022pand %xmm3,%xmm10
5023
5024# qhasm: xmm3 ^= xmm7
5025# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
5026# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
5027pxor %xmm7,%xmm3
5028
5029# qhasm: xmm3 &= xmm14
5030# asm 1: pand <xmm14=int6464#12,<xmm3=int6464#4
5031# asm 2: pand <xmm14=%xmm11,<xmm3=%xmm3
5032pand %xmm11,%xmm3
5033
5034# qhasm: xmm7 &= xmm15
5035# asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8
5036# asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7
5037pand %xmm13,%xmm7
5038
5039# qhasm: xmm3 ^= xmm7
5040# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
5041# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
5042pxor %xmm7,%xmm3
5043
5044# qhasm: xmm7 ^= xmm10
5045# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
5046# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
5047pxor %xmm10,%xmm7
5048
5049# qhasm: xmm12 ^= xmm0
5050# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
5051# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
5052pxor %xmm0,%xmm8
5053
5054# qhasm: xmm8 ^= xmm4
5055# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10
5056# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9
5057pxor %xmm4,%xmm9
5058
5059# qhasm: xmm15 ^= xmm13
5060# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
5061# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
5062pxor %xmm15,%xmm13
5063
5064# qhasm: xmm14 ^= xmm9
5065# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
5066# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
5067pxor %xmm12,%xmm11
5068
5069# qhasm: xmm11 = xmm15
5070# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5071# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5072movdqa %xmm13,%xmm10
5073
5074# qhasm: xmm11 ^= xmm14
5075# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5076# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5077pxor %xmm11,%xmm10
5078
5079# qhasm: xmm11 &= xmm12
5080# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
5081# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
5082pand %xmm8,%xmm10
5083
5084# qhasm: xmm12 ^= xmm8
5085# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
5086# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
5087pxor %xmm9,%xmm8
5088
5089# qhasm: xmm12 &= xmm14
5090# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
5091# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
5092pand %xmm11,%xmm8
5093
5094# qhasm: xmm8 &= xmm15
5095# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
5096# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
5097pand %xmm13,%xmm9
5098
5099# qhasm: xmm8 ^= xmm12
5100# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
5101# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
5102pxor %xmm8,%xmm9
5103
5104# qhasm: xmm12 ^= xmm11
5105# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
5106# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
5107pxor %xmm10,%xmm8
5108
5109# qhasm: xmm10 = xmm13
5110# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5111# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5112movdqa %xmm15,%xmm10
5113
5114# qhasm: xmm10 ^= xmm9
5115# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
5116# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
5117pxor %xmm12,%xmm10
5118
5119# qhasm: xmm10 &= xmm0
5120# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
5121# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
5122pand %xmm0,%xmm10
5123
5124# qhasm: xmm0 ^= xmm4
5125# asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1
5126# asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0
5127pxor %xmm4,%xmm0
5128
5129# qhasm: xmm0 &= xmm9
5130# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
5131# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
5132pand %xmm12,%xmm0
5133
5134# qhasm: xmm4 &= xmm13
5135# asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5
5136# asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4
5137pand %xmm15,%xmm4
5138
5139# qhasm: xmm0 ^= xmm4
5140# asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1
5141# asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0
5142pxor %xmm4,%xmm0
5143
5144# qhasm: xmm4 ^= xmm10
5145# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
5146# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
5147pxor %xmm10,%xmm4
5148
5149# qhasm: xmm3 ^= xmm12
5150# asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4
5151# asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3
5152pxor %xmm8,%xmm3
5153
5154# qhasm: xmm0 ^= xmm12
5155# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
5156# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
5157pxor %xmm8,%xmm0
5158
5159# qhasm: xmm7 ^= xmm8
5160# asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8
5161# asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7
5162pxor %xmm9,%xmm7
5163
5164# qhasm: xmm4 ^= xmm8
5165# asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5
5166# asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4
5167pxor %xmm9,%xmm4
5168
5169# qhasm: xmm12 = xmm5
5170# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9
5171# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8
5172movdqa %xmm5,%xmm8
5173
5174# qhasm: xmm8 = xmm1
5175# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
5176# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
5177movdqa %xmm1,%xmm9
5178
5179# qhasm: xmm12 ^= xmm2
5180# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#9
5181# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm8
5182pxor %xmm2,%xmm8
5183
5184# qhasm: xmm8 ^= xmm6
5185# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10
5186# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9
5187pxor %xmm6,%xmm9
5188
5189# qhasm: xmm11 = xmm15
5190# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5191# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5192movdqa %xmm13,%xmm10
5193
5194# qhasm: xmm11 ^= xmm14
5195# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5196# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5197pxor %xmm11,%xmm10
5198
5199# qhasm: xmm11 &= xmm12
5200# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
5201# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
5202pand %xmm8,%xmm10
5203
5204# qhasm: xmm12 ^= xmm8
5205# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
5206# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
5207pxor %xmm9,%xmm8
5208
5209# qhasm: xmm12 &= xmm14
5210# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
5211# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
5212pand %xmm11,%xmm8
5213
5214# qhasm: xmm8 &= xmm15
5215# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
5216# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
5217pand %xmm13,%xmm9
5218
5219# qhasm: xmm8 ^= xmm12
5220# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
5221# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
5222pxor %xmm8,%xmm9
5223
5224# qhasm: xmm12 ^= xmm11
5225# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
5226# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
5227pxor %xmm10,%xmm8
5228
5229# qhasm: xmm10 = xmm13
5230# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5231# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5232movdqa %xmm15,%xmm10
5233
5234# qhasm: xmm10 ^= xmm9
5235# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
5236# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
5237pxor %xmm12,%xmm10
5238
5239# qhasm: xmm10 &= xmm2
5240# asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11
5241# asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10
5242pand %xmm2,%xmm10
5243
5244# qhasm: xmm2 ^= xmm6
5245# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
5246# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
5247pxor %xmm6,%xmm2
5248
5249# qhasm: xmm2 &= xmm9
5250# asm 1: pand <xmm9=int6464#13,<xmm2=int6464#3
5251# asm 2: pand <xmm9=%xmm12,<xmm2=%xmm2
5252pand %xmm12,%xmm2
5253
5254# qhasm: xmm6 &= xmm13
5255# asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7
5256# asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6
5257pand %xmm15,%xmm6
5258
5259# qhasm: xmm2 ^= xmm6
5260# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
5261# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
5262pxor %xmm6,%xmm2
5263
5264# qhasm: xmm6 ^= xmm10
5265# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
5266# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
5267pxor %xmm10,%xmm6
5268
5269# qhasm: xmm15 ^= xmm13
5270# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
5271# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
5272pxor %xmm15,%xmm13
5273
5274# qhasm: xmm14 ^= xmm9
5275# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
5276# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
5277pxor %xmm12,%xmm11
5278
5279# qhasm: xmm11 = xmm15
5280# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5281# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5282movdqa %xmm13,%xmm10
5283
5284# qhasm: xmm11 ^= xmm14
5285# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5286# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5287pxor %xmm11,%xmm10
5288
5289# qhasm: xmm11 &= xmm5
5290# asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11
5291# asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10
5292pand %xmm5,%xmm10
5293
5294# qhasm: xmm5 ^= xmm1
5295# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
5296# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
5297pxor %xmm1,%xmm5
5298
5299# qhasm: xmm5 &= xmm14
5300# asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6
5301# asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5
5302pand %xmm11,%xmm5
5303
5304# qhasm: xmm1 &= xmm15
5305# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
5306# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
5307pand %xmm13,%xmm1
5308
5309# qhasm: xmm5 ^= xmm1
5310# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
5311# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
5312pxor %xmm1,%xmm5
5313
5314# qhasm: xmm1 ^= xmm11
5315# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
5316# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
5317pxor %xmm10,%xmm1
5318
5319# qhasm: xmm5 ^= xmm12
5320# asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6
5321# asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5
5322pxor %xmm8,%xmm5
5323
5324# qhasm: xmm2 ^= xmm12
5325# asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3
5326# asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2
5327pxor %xmm8,%xmm2
5328
5329# qhasm: xmm1 ^= xmm8
5330# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
5331# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
5332pxor %xmm9,%xmm1
5333
5334# qhasm: xmm6 ^= xmm8
5335# asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7
5336# asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6
5337pxor %xmm9,%xmm6
5338
5339# qhasm: xmm5 ^= xmm0
5340# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
5341# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
5342pxor %xmm0,%xmm5
5343
5344# qhasm: xmm1 ^= xmm3
5345# asm 1: pxor <xmm3=int6464#4,<xmm1=int6464#2
5346# asm 2: pxor <xmm3=%xmm3,<xmm1=%xmm1
5347pxor %xmm3,%xmm1
5348
5349# qhasm: xmm2 ^= xmm5
5350# asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3
5351# asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2
5352pxor %xmm5,%xmm2
5353
5354# qhasm: xmm3 ^= xmm0
5355# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
5356# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
5357pxor %xmm0,%xmm3
5358
5359# qhasm: xmm0 ^= xmm1
5360# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
5361# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
5362pxor %xmm1,%xmm0
5363
5364# qhasm: xmm1 ^= xmm7
5365# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
5366# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
5367pxor %xmm7,%xmm1
5368
5369# qhasm: xmm7 ^= xmm6
5370# asm 1: pxor <xmm6=int6464#7,<xmm7=int6464#8
5371# asm 2: pxor <xmm6=%xmm6,<xmm7=%xmm7
5372pxor %xmm6,%xmm7
5373
5374# qhasm: xmm2 ^= xmm7
5375# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
5376# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
5377pxor %xmm7,%xmm2
5378
5379# qhasm: xmm6 ^= xmm4
5380# asm 1: pxor <xmm4=int6464#5,<xmm6=int6464#7
5381# asm 2: pxor <xmm4=%xmm4,<xmm6=%xmm6
5382pxor %xmm4,%xmm6
5383
5384# qhasm: xmm4 ^= xmm7
5385# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
5386# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
5387pxor %xmm7,%xmm4
5388
5389# qhasm: xmm3 ^= xmm4
5390# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
5391# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
5392pxor %xmm4,%xmm3
5393
5394# qhasm: xmm3 ^= RCON
5395# asm 1: pxor RCON,<xmm3=int6464#4
5396# asm 2: pxor RCON,<xmm3=%xmm3
5397pxor RCON,%xmm3
5398
5399# qhasm: shuffle bytes of xmm0 by EXPB0
5400# asm 1: pshufb EXPB0,<xmm0=int6464#1
5401# asm 2: pshufb EXPB0,<xmm0=%xmm0
5402pshufb EXPB0,%xmm0
5403
5404# qhasm: shuffle bytes of xmm1 by EXPB0
5405# asm 1: pshufb EXPB0,<xmm1=int6464#2
5406# asm 2: pshufb EXPB0,<xmm1=%xmm1
5407pshufb EXPB0,%xmm1
5408
5409# qhasm: shuffle bytes of xmm2 by EXPB0
5410# asm 1: pshufb EXPB0,<xmm2=int6464#3
5411# asm 2: pshufb EXPB0,<xmm2=%xmm2
5412pshufb EXPB0,%xmm2
5413
5414# qhasm: shuffle bytes of xmm3 by EXPB0
5415# asm 1: pshufb EXPB0,<xmm3=int6464#4
5416# asm 2: pshufb EXPB0,<xmm3=%xmm3
5417pshufb EXPB0,%xmm3
5418
5419# qhasm: shuffle bytes of xmm4 by EXPB0
5420# asm 1: pshufb EXPB0,<xmm4=int6464#5
5421# asm 2: pshufb EXPB0,<xmm4=%xmm4
5422pshufb EXPB0,%xmm4
5423
5424# qhasm: shuffle bytes of xmm5 by EXPB0
5425# asm 1: pshufb EXPB0,<xmm5=int6464#6
5426# asm 2: pshufb EXPB0,<xmm5=%xmm5
5427pshufb EXPB0,%xmm5
5428
5429# qhasm: shuffle bytes of xmm6 by EXPB0
5430# asm 1: pshufb EXPB0,<xmm6=int6464#7
5431# asm 2: pshufb EXPB0,<xmm6=%xmm6
5432pshufb EXPB0,%xmm6
5433
5434# qhasm: shuffle bytes of xmm7 by EXPB0
5435# asm 1: pshufb EXPB0,<xmm7=int6464#8
5436# asm 2: pshufb EXPB0,<xmm7=%xmm7
5437pshufb EXPB0,%xmm7
5438
5439# qhasm: xmm8 = *(int128 *)(c + 384)
5440# asm 1: movdqa 384(<c=int64#1),>xmm8=int6464#9
5441# asm 2: movdqa 384(<c=%rdi),>xmm8=%xmm8
5442movdqa 384(%rdi),%xmm8
5443
5444# qhasm: xmm9 = *(int128 *)(c + 400)
5445# asm 1: movdqa 400(<c=int64#1),>xmm9=int6464#10
5446# asm 2: movdqa 400(<c=%rdi),>xmm9=%xmm9
5447movdqa 400(%rdi),%xmm9
5448
5449# qhasm: xmm10 = *(int128 *)(c + 416)
5450# asm 1: movdqa 416(<c=int64#1),>xmm10=int6464#11
5451# asm 2: movdqa 416(<c=%rdi),>xmm10=%xmm10
5452movdqa 416(%rdi),%xmm10
5453
5454# qhasm: xmm11 = *(int128 *)(c + 432)
5455# asm 1: movdqa 432(<c=int64#1),>xmm11=int6464#12
5456# asm 2: movdqa 432(<c=%rdi),>xmm11=%xmm11
5457movdqa 432(%rdi),%xmm11
5458
5459# qhasm: xmm12 = *(int128 *)(c + 448)
5460# asm 1: movdqa 448(<c=int64#1),>xmm12=int6464#13
5461# asm 2: movdqa 448(<c=%rdi),>xmm12=%xmm12
5462movdqa 448(%rdi),%xmm12
5463
5464# qhasm: xmm13 = *(int128 *)(c + 464)
5465# asm 1: movdqa 464(<c=int64#1),>xmm13=int6464#14
5466# asm 2: movdqa 464(<c=%rdi),>xmm13=%xmm13
5467movdqa 464(%rdi),%xmm13
5468
5469# qhasm: xmm14 = *(int128 *)(c + 480)
5470# asm 1: movdqa 480(<c=int64#1),>xmm14=int6464#15
5471# asm 2: movdqa 480(<c=%rdi),>xmm14=%xmm14
5472movdqa 480(%rdi),%xmm14
5473
5474# qhasm: xmm15 = *(int128 *)(c + 496)
5475# asm 1: movdqa 496(<c=int64#1),>xmm15=int6464#16
5476# asm 2: movdqa 496(<c=%rdi),>xmm15=%xmm15
5477movdqa 496(%rdi),%xmm15
5478
5479# qhasm: xmm8 ^= ONE
5480# asm 1: pxor ONE,<xmm8=int6464#9
5481# asm 2: pxor ONE,<xmm8=%xmm8
5482pxor ONE,%xmm8
5483
5484# qhasm: xmm9 ^= ONE
5485# asm 1: pxor ONE,<xmm9=int6464#10
5486# asm 2: pxor ONE,<xmm9=%xmm9
5487pxor ONE,%xmm9
5488
5489# qhasm: xmm13 ^= ONE
5490# asm 1: pxor ONE,<xmm13=int6464#14
5491# asm 2: pxor ONE,<xmm13=%xmm13
5492pxor ONE,%xmm13
5493
5494# qhasm: xmm14 ^= ONE
5495# asm 1: pxor ONE,<xmm14=int6464#15
5496# asm 2: pxor ONE,<xmm14=%xmm14
5497pxor ONE,%xmm14
5498
5499# qhasm: xmm0 ^= xmm8
5500# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
5501# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
5502pxor %xmm8,%xmm0
5503
5504# qhasm: xmm1 ^= xmm9
5505# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
5506# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
5507pxor %xmm9,%xmm1
5508
5509# qhasm: xmm2 ^= xmm10
5510# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
5511# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
5512pxor %xmm10,%xmm2
5513
5514# qhasm: xmm3 ^= xmm11
5515# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
5516# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
5517pxor %xmm11,%xmm3
5518
5519# qhasm: xmm4 ^= xmm12
5520# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
5521# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
5522pxor %xmm12,%xmm4
5523
5524# qhasm: xmm5 ^= xmm13
5525# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
5526# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
5527pxor %xmm13,%xmm5
5528
5529# qhasm: xmm6 ^= xmm14
5530# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
5531# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
5532pxor %xmm14,%xmm6
5533
5534# qhasm: xmm7 ^= xmm15
5535# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
5536# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
5537pxor %xmm15,%xmm7
5538
5539# qhasm: uint32323232 xmm8 >>= 8
5540# asm 1: psrld $8,<xmm8=int6464#9
5541# asm 2: psrld $8,<xmm8=%xmm8
5542psrld $8,%xmm8
5543
5544# qhasm: uint32323232 xmm9 >>= 8
5545# asm 1: psrld $8,<xmm9=int6464#10
5546# asm 2: psrld $8,<xmm9=%xmm9
5547psrld $8,%xmm9
5548
5549# qhasm: uint32323232 xmm10 >>= 8
5550# asm 1: psrld $8,<xmm10=int6464#11
5551# asm 2: psrld $8,<xmm10=%xmm10
5552psrld $8,%xmm10
5553
5554# qhasm: uint32323232 xmm11 >>= 8
5555# asm 1: psrld $8,<xmm11=int6464#12
5556# asm 2: psrld $8,<xmm11=%xmm11
5557psrld $8,%xmm11
5558
5559# qhasm: uint32323232 xmm12 >>= 8
5560# asm 1: psrld $8,<xmm12=int6464#13
5561# asm 2: psrld $8,<xmm12=%xmm12
5562psrld $8,%xmm12
5563
5564# qhasm: uint32323232 xmm13 >>= 8
5565# asm 1: psrld $8,<xmm13=int6464#14
5566# asm 2: psrld $8,<xmm13=%xmm13
5567psrld $8,%xmm13
5568
5569# qhasm: uint32323232 xmm14 >>= 8
5570# asm 1: psrld $8,<xmm14=int6464#15
5571# asm 2: psrld $8,<xmm14=%xmm14
5572psrld $8,%xmm14
5573
5574# qhasm: uint32323232 xmm15 >>= 8
5575# asm 1: psrld $8,<xmm15=int6464#16
5576# asm 2: psrld $8,<xmm15=%xmm15
5577psrld $8,%xmm15
5578
5579# qhasm: xmm0 ^= xmm8
5580# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
5581# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
5582pxor %xmm8,%xmm0
5583
5584# qhasm: xmm1 ^= xmm9
5585# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
5586# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
5587pxor %xmm9,%xmm1
5588
5589# qhasm: xmm2 ^= xmm10
5590# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
5591# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
5592pxor %xmm10,%xmm2
5593
5594# qhasm: xmm3 ^= xmm11
5595# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
5596# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
5597pxor %xmm11,%xmm3
5598
5599# qhasm: xmm4 ^= xmm12
5600# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
5601# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
5602pxor %xmm12,%xmm4
5603
5604# qhasm: xmm5 ^= xmm13
5605# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
5606# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
5607pxor %xmm13,%xmm5
5608
5609# qhasm: xmm6 ^= xmm14
5610# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
5611# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
5612pxor %xmm14,%xmm6
5613
5614# qhasm: xmm7 ^= xmm15
5615# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
5616# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
5617pxor %xmm15,%xmm7
5618
5619# qhasm: uint32323232 xmm8 >>= 8
5620# asm 1: psrld $8,<xmm8=int6464#9
5621# asm 2: psrld $8,<xmm8=%xmm8
5622psrld $8,%xmm8
5623
5624# qhasm: uint32323232 xmm9 >>= 8
5625# asm 1: psrld $8,<xmm9=int6464#10
5626# asm 2: psrld $8,<xmm9=%xmm9
5627psrld $8,%xmm9
5628
5629# qhasm: uint32323232 xmm10 >>= 8
5630# asm 1: psrld $8,<xmm10=int6464#11
5631# asm 2: psrld $8,<xmm10=%xmm10
5632psrld $8,%xmm10
5633
5634# qhasm: uint32323232 xmm11 >>= 8
5635# asm 1: psrld $8,<xmm11=int6464#12
5636# asm 2: psrld $8,<xmm11=%xmm11
5637psrld $8,%xmm11
5638
5639# qhasm: uint32323232 xmm12 >>= 8
5640# asm 1: psrld $8,<xmm12=int6464#13
5641# asm 2: psrld $8,<xmm12=%xmm12
5642psrld $8,%xmm12
5643
5644# qhasm: uint32323232 xmm13 >>= 8
5645# asm 1: psrld $8,<xmm13=int6464#14
5646# asm 2: psrld $8,<xmm13=%xmm13
5647psrld $8,%xmm13
5648
5649# qhasm: uint32323232 xmm14 >>= 8
5650# asm 1: psrld $8,<xmm14=int6464#15
5651# asm 2: psrld $8,<xmm14=%xmm14
5652psrld $8,%xmm14
5653
5654# qhasm: uint32323232 xmm15 >>= 8
5655# asm 1: psrld $8,<xmm15=int6464#16
5656# asm 2: psrld $8,<xmm15=%xmm15
5657psrld $8,%xmm15
5658
5659# qhasm: xmm0 ^= xmm8
5660# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
5661# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
5662pxor %xmm8,%xmm0
5663
5664# qhasm: xmm1 ^= xmm9
5665# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
5666# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
5667pxor %xmm9,%xmm1
5668
5669# qhasm: xmm2 ^= xmm10
5670# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
5671# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
5672pxor %xmm10,%xmm2
5673
5674# qhasm: xmm3 ^= xmm11
5675# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
5676# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
5677pxor %xmm11,%xmm3
5678
5679# qhasm: xmm4 ^= xmm12
5680# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
5681# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
5682pxor %xmm12,%xmm4
5683
5684# qhasm: xmm5 ^= xmm13
5685# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
5686# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
5687pxor %xmm13,%xmm5
5688
5689# qhasm: xmm6 ^= xmm14
5690# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
5691# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
5692pxor %xmm14,%xmm6
5693
5694# qhasm: xmm7 ^= xmm15
5695# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
5696# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
5697pxor %xmm15,%xmm7
5698
5699# qhasm: uint32323232 xmm8 >>= 8
5700# asm 1: psrld $8,<xmm8=int6464#9
5701# asm 2: psrld $8,<xmm8=%xmm8
5702psrld $8,%xmm8
5703
5704# qhasm: uint32323232 xmm9 >>= 8
5705# asm 1: psrld $8,<xmm9=int6464#10
5706# asm 2: psrld $8,<xmm9=%xmm9
5707psrld $8,%xmm9
5708
5709# qhasm: uint32323232 xmm10 >>= 8
5710# asm 1: psrld $8,<xmm10=int6464#11
5711# asm 2: psrld $8,<xmm10=%xmm10
5712psrld $8,%xmm10
5713
5714# qhasm: uint32323232 xmm11 >>= 8
5715# asm 1: psrld $8,<xmm11=int6464#12
5716# asm 2: psrld $8,<xmm11=%xmm11
5717psrld $8,%xmm11
5718
5719# qhasm: uint32323232 xmm12 >>= 8
5720# asm 1: psrld $8,<xmm12=int6464#13
5721# asm 2: psrld $8,<xmm12=%xmm12
5722psrld $8,%xmm12
5723
5724# qhasm: uint32323232 xmm13 >>= 8
5725# asm 1: psrld $8,<xmm13=int6464#14
5726# asm 2: psrld $8,<xmm13=%xmm13
5727psrld $8,%xmm13
5728
5729# qhasm: uint32323232 xmm14 >>= 8
5730# asm 1: psrld $8,<xmm14=int6464#15
5731# asm 2: psrld $8,<xmm14=%xmm14
5732psrld $8,%xmm14
5733
5734# qhasm: uint32323232 xmm15 >>= 8
5735# asm 1: psrld $8,<xmm15=int6464#16
5736# asm 2: psrld $8,<xmm15=%xmm15
5737psrld $8,%xmm15
5738
5739# qhasm: xmm0 ^= xmm8
5740# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
5741# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
5742pxor %xmm8,%xmm0
5743
5744# qhasm: xmm1 ^= xmm9
5745# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
5746# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
5747pxor %xmm9,%xmm1
5748
5749# qhasm: xmm2 ^= xmm10
5750# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
5751# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
5752pxor %xmm10,%xmm2
5753
5754# qhasm: xmm3 ^= xmm11
5755# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
5756# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
5757pxor %xmm11,%xmm3
5758
5759# qhasm: xmm4 ^= xmm12
5760# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
5761# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
5762pxor %xmm12,%xmm4
5763
5764# qhasm: xmm5 ^= xmm13
5765# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
5766# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
5767pxor %xmm13,%xmm5
5768
5769# qhasm: xmm6 ^= xmm14
5770# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
5771# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
5772pxor %xmm14,%xmm6
5773
5774# qhasm: xmm7 ^= xmm15
5775# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
5776# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
5777pxor %xmm15,%xmm7
5778
5779# qhasm: *(int128 *)(c + 512) = xmm0
5780# asm 1: movdqa <xmm0=int6464#1,512(<c=int64#1)
5781# asm 2: movdqa <xmm0=%xmm0,512(<c=%rdi)
5782movdqa %xmm0,512(%rdi)
5783
5784# qhasm: *(int128 *)(c + 528) = xmm1
5785# asm 1: movdqa <xmm1=int6464#2,528(<c=int64#1)
5786# asm 2: movdqa <xmm1=%xmm1,528(<c=%rdi)
5787movdqa %xmm1,528(%rdi)
5788
5789# qhasm: *(int128 *)(c + 544) = xmm2
5790# asm 1: movdqa <xmm2=int6464#3,544(<c=int64#1)
5791# asm 2: movdqa <xmm2=%xmm2,544(<c=%rdi)
5792movdqa %xmm2,544(%rdi)
5793
5794# qhasm: *(int128 *)(c + 560) = xmm3
5795# asm 1: movdqa <xmm3=int6464#4,560(<c=int64#1)
5796# asm 2: movdqa <xmm3=%xmm3,560(<c=%rdi)
5797movdqa %xmm3,560(%rdi)
5798
5799# qhasm: *(int128 *)(c + 576) = xmm4
5800# asm 1: movdqa <xmm4=int6464#5,576(<c=int64#1)
5801# asm 2: movdqa <xmm4=%xmm4,576(<c=%rdi)
5802movdqa %xmm4,576(%rdi)
5803
5804# qhasm: *(int128 *)(c + 592) = xmm5
5805# asm 1: movdqa <xmm5=int6464#6,592(<c=int64#1)
5806# asm 2: movdqa <xmm5=%xmm5,592(<c=%rdi)
5807movdqa %xmm5,592(%rdi)
5808
5809# qhasm: *(int128 *)(c + 608) = xmm6
5810# asm 1: movdqa <xmm6=int6464#7,608(<c=int64#1)
5811# asm 2: movdqa <xmm6=%xmm6,608(<c=%rdi)
5812movdqa %xmm6,608(%rdi)
5813
5814# qhasm: *(int128 *)(c + 624) = xmm7
5815# asm 1: movdqa <xmm7=int6464#8,624(<c=int64#1)
5816# asm 2: movdqa <xmm7=%xmm7,624(<c=%rdi)
5817movdqa %xmm7,624(%rdi)
5818
5819# qhasm: xmm0 ^= ONE
5820# asm 1: pxor ONE,<xmm0=int6464#1
5821# asm 2: pxor ONE,<xmm0=%xmm0
5822pxor ONE,%xmm0
5823
5824# qhasm: xmm1 ^= ONE
5825# asm 1: pxor ONE,<xmm1=int6464#2
5826# asm 2: pxor ONE,<xmm1=%xmm1
5827pxor ONE,%xmm1
5828
5829# qhasm: xmm5 ^= ONE
5830# asm 1: pxor ONE,<xmm5=int6464#6
5831# asm 2: pxor ONE,<xmm5=%xmm5
5832pxor ONE,%xmm5
5833
5834# qhasm: xmm6 ^= ONE
5835# asm 1: pxor ONE,<xmm6=int6464#7
5836# asm 2: pxor ONE,<xmm6=%xmm6
5837pxor ONE,%xmm6
5838
5839# qhasm: shuffle bytes of xmm0 by ROTB
5840# asm 1: pshufb ROTB,<xmm0=int6464#1
5841# asm 2: pshufb ROTB,<xmm0=%xmm0
5842pshufb ROTB,%xmm0
5843
5844# qhasm: shuffle bytes of xmm1 by ROTB
5845# asm 1: pshufb ROTB,<xmm1=int6464#2
5846# asm 2: pshufb ROTB,<xmm1=%xmm1
5847pshufb ROTB,%xmm1
5848
5849# qhasm: shuffle bytes of xmm2 by ROTB
5850# asm 1: pshufb ROTB,<xmm2=int6464#3
5851# asm 2: pshufb ROTB,<xmm2=%xmm2
5852pshufb ROTB,%xmm2
5853
5854# qhasm: shuffle bytes of xmm3 by ROTB
5855# asm 1: pshufb ROTB,<xmm3=int6464#4
5856# asm 2: pshufb ROTB,<xmm3=%xmm3
5857pshufb ROTB,%xmm3
5858
5859# qhasm: shuffle bytes of xmm4 by ROTB
5860# asm 1: pshufb ROTB,<xmm4=int6464#5
5861# asm 2: pshufb ROTB,<xmm4=%xmm4
5862pshufb ROTB,%xmm4
5863
5864# qhasm: shuffle bytes of xmm5 by ROTB
5865# asm 1: pshufb ROTB,<xmm5=int6464#6
5866# asm 2: pshufb ROTB,<xmm5=%xmm5
5867pshufb ROTB,%xmm5
5868
5869# qhasm: shuffle bytes of xmm6 by ROTB
5870# asm 1: pshufb ROTB,<xmm6=int6464#7
5871# asm 2: pshufb ROTB,<xmm6=%xmm6
5872pshufb ROTB,%xmm6
5873
5874# qhasm: shuffle bytes of xmm7 by ROTB
5875# asm 1: pshufb ROTB,<xmm7=int6464#8
5876# asm 2: pshufb ROTB,<xmm7=%xmm7
5877pshufb ROTB,%xmm7
5878
5879# qhasm: xmm5 ^= xmm6
5880# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
5881# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
5882pxor %xmm6,%xmm5
5883
5884# qhasm: xmm2 ^= xmm1
5885# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
5886# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
5887pxor %xmm1,%xmm2
5888
5889# qhasm: xmm5 ^= xmm0
5890# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
5891# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
5892pxor %xmm0,%xmm5
5893
5894# qhasm: xmm6 ^= xmm2
5895# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
5896# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
5897pxor %xmm2,%xmm6
5898
5899# qhasm: xmm3 ^= xmm0
5900# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
5901# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
5902pxor %xmm0,%xmm3
5903
5904# qhasm: xmm6 ^= xmm3
5905# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
5906# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
5907pxor %xmm3,%xmm6
5908
5909# qhasm: xmm3 ^= xmm7
5910# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
5911# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
5912pxor %xmm7,%xmm3
5913
5914# qhasm: xmm3 ^= xmm4
5915# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
5916# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
5917pxor %xmm4,%xmm3
5918
5919# qhasm: xmm7 ^= xmm5
5920# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
5921# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
5922pxor %xmm5,%xmm7
5923
5924# qhasm: xmm3 ^= xmm1
5925# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
5926# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
5927pxor %xmm1,%xmm3
5928
5929# qhasm: xmm4 ^= xmm5
5930# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
5931# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
5932pxor %xmm5,%xmm4
5933
5934# qhasm: xmm2 ^= xmm7
5935# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
5936# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
5937pxor %xmm7,%xmm2
5938
5939# qhasm: xmm1 ^= xmm5
5940# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
5941# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
5942pxor %xmm5,%xmm1
5943
5944# qhasm: xmm11 = xmm7
5945# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
5946# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
5947movdqa %xmm7,%xmm8
5948
5949# qhasm: xmm10 = xmm1
5950# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
5951# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
5952movdqa %xmm1,%xmm9
5953
5954# qhasm: xmm9 = xmm5
5955# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
5956# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
5957movdqa %xmm5,%xmm10
5958
5959# qhasm: xmm13 = xmm2
5960# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
5961# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
5962movdqa %xmm2,%xmm11
5963
5964# qhasm: xmm12 = xmm6
5965# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
5966# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
5967movdqa %xmm6,%xmm12
5968
5969# qhasm: xmm11 ^= xmm4
5970# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
5971# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
5972pxor %xmm4,%xmm8
5973
5974# qhasm: xmm10 ^= xmm2
5975# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
5976# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
5977pxor %xmm2,%xmm9
5978
5979# qhasm: xmm9 ^= xmm3
5980# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
5981# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
5982pxor %xmm3,%xmm10
5983
5984# qhasm: xmm13 ^= xmm4
5985# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
5986# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
5987pxor %xmm4,%xmm11
5988
5989# qhasm: xmm12 ^= xmm0
5990# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
5991# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
5992pxor %xmm0,%xmm12
5993
5994# qhasm: xmm14 = xmm11
5995# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
5996# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
5997movdqa %xmm8,%xmm13
5998
5999# qhasm: xmm8 = xmm10
6000# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
6001# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
6002movdqa %xmm9,%xmm14
6003
6004# qhasm: xmm15 = xmm11
6005# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
6006# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
6007movdqa %xmm8,%xmm15
6008
6009# qhasm: xmm10 |= xmm9
6010# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
6011# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
6012por %xmm10,%xmm9
6013
6014# qhasm: xmm11 |= xmm12
6015# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
6016# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
6017por %xmm12,%xmm8
6018
6019# qhasm: xmm15 ^= xmm8
6020# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
6021# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
6022pxor %xmm14,%xmm15
6023
6024# qhasm: xmm14 &= xmm12
6025# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
6026# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
6027pand %xmm12,%xmm13
6028
6029# qhasm: xmm8 &= xmm9
6030# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
6031# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
6032pand %xmm10,%xmm14
6033
6034# qhasm: xmm12 ^= xmm9
6035# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
6036# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
6037pxor %xmm10,%xmm12
6038
6039# qhasm: xmm15 &= xmm12
6040# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
6041# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
6042pand %xmm12,%xmm15
6043
6044# qhasm: xmm12 = xmm3
6045# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
6046# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
6047movdqa %xmm3,%xmm10
6048
6049# qhasm: xmm12 ^= xmm0
6050# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
6051# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
6052pxor %xmm0,%xmm10
6053
6054# qhasm: xmm13 &= xmm12
6055# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
6056# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
6057pand %xmm10,%xmm11
6058
6059# qhasm: xmm11 ^= xmm13
6060# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
6061# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
6062pxor %xmm11,%xmm8
6063
6064# qhasm: xmm10 ^= xmm13
6065# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
6066# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
6067pxor %xmm11,%xmm9
6068
6069# qhasm: xmm13 = xmm7
6070# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
6071# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
6072movdqa %xmm7,%xmm10
6073
6074# qhasm: xmm13 ^= xmm1
6075# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
6076# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
6077pxor %xmm1,%xmm10
6078
6079# qhasm: xmm12 = xmm5
6080# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
6081# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
6082movdqa %xmm5,%xmm11
6083
6084# qhasm: xmm9 = xmm13
6085# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
6086# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
6087movdqa %xmm10,%xmm12
6088
6089# qhasm: xmm12 ^= xmm6
6090# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
6091# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
6092pxor %xmm6,%xmm11
6093
6094# qhasm: xmm9 |= xmm12
6095# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
6096# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
6097por %xmm11,%xmm12
6098
6099# qhasm: xmm13 &= xmm12
6100# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
6101# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
6102pand %xmm11,%xmm10
6103
6104# qhasm: xmm8 ^= xmm13
6105# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
6106# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
6107pxor %xmm10,%xmm14
6108
6109# qhasm: xmm11 ^= xmm15
6110# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
6111# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
6112pxor %xmm15,%xmm8
6113
6114# qhasm: xmm10 ^= xmm14
6115# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
6116# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
6117pxor %xmm13,%xmm9
6118
6119# qhasm: xmm9 ^= xmm15
6120# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
6121# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
6122pxor %xmm15,%xmm12
6123
6124# qhasm: xmm8 ^= xmm14
6125# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
6126# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
6127pxor %xmm13,%xmm14
6128
6129# qhasm: xmm9 ^= xmm14
6130# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
6131# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
6132pxor %xmm13,%xmm12
6133
6134# qhasm: xmm12 = xmm2
6135# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
6136# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
6137movdqa %xmm2,%xmm10
6138
6139# qhasm: xmm13 = xmm4
6140# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
6141# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
6142movdqa %xmm4,%xmm11
6143
6144# qhasm: xmm14 = xmm1
6145# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
6146# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
6147movdqa %xmm1,%xmm13
6148
6149# qhasm: xmm15 = xmm7
6150# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
6151# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
6152movdqa %xmm7,%xmm15
6153
6154# qhasm: xmm12 &= xmm3
6155# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
6156# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
6157pand %xmm3,%xmm10
6158
6159# qhasm: xmm13 &= xmm0
6160# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
6161# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
6162pand %xmm0,%xmm11
6163
6164# qhasm: xmm14 &= xmm5
6165# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
6166# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
6167pand %xmm5,%xmm13
6168
6169# qhasm: xmm15 |= xmm6
6170# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
6171# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
6172por %xmm6,%xmm15
6173
6174# qhasm: xmm11 ^= xmm12
6175# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
6176# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
6177pxor %xmm10,%xmm8
6178
6179# qhasm: xmm10 ^= xmm13
6180# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
6181# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
6182pxor %xmm11,%xmm9
6183
6184# qhasm: xmm9 ^= xmm14
6185# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
6186# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
6187pxor %xmm13,%xmm12
6188
6189# qhasm: xmm8 ^= xmm15
6190# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
6191# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
6192pxor %xmm15,%xmm14
6193
6194# qhasm: xmm12 = xmm11
6195# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
6196# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
6197movdqa %xmm8,%xmm10
6198
6199# qhasm: xmm12 ^= xmm10
6200# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
6201# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
6202pxor %xmm9,%xmm10
6203
6204# qhasm: xmm11 &= xmm9
6205# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
6206# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
6207pand %xmm12,%xmm8
6208
6209# qhasm: xmm14 = xmm8
6210# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
6211# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
6212movdqa %xmm14,%xmm11
6213
6214# qhasm: xmm14 ^= xmm11
6215# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
6216# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
6217pxor %xmm8,%xmm11
6218
6219# qhasm: xmm15 = xmm12
6220# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
6221# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
6222movdqa %xmm10,%xmm13
6223
6224# qhasm: xmm15 &= xmm14
6225# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
6226# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
6227pand %xmm11,%xmm13
6228
6229# qhasm: xmm15 ^= xmm10
6230# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
6231# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
6232pxor %xmm9,%xmm13
6233
6234# qhasm: xmm13 = xmm9
6235# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
6236# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
6237movdqa %xmm12,%xmm15
6238
6239# qhasm: xmm13 ^= xmm8
6240# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
6241# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
6242pxor %xmm14,%xmm15
6243
6244# qhasm: xmm11 ^= xmm10
6245# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
6246# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
6247pxor %xmm9,%xmm8
6248
6249# qhasm: xmm13 &= xmm11
6250# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
6251# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
6252pand %xmm8,%xmm15
6253
6254# qhasm: xmm13 ^= xmm8
6255# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
6256# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
6257pxor %xmm14,%xmm15
6258
6259# qhasm: xmm9 ^= xmm13
6260# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
6261# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
6262pxor %xmm15,%xmm12
6263
6264# qhasm: xmm10 = xmm14
6265# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
6266# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
6267movdqa %xmm11,%xmm8
6268
6269# qhasm: xmm10 ^= xmm13
6270# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
6271# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
6272pxor %xmm15,%xmm8
6273
6274# qhasm: xmm10 &= xmm8
6275# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
6276# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
6277pand %xmm14,%xmm8
6278
6279# qhasm: xmm9 ^= xmm10
6280# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
6281# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
6282pxor %xmm8,%xmm12
6283
6284# qhasm: xmm14 ^= xmm10
6285# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
6286# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
6287pxor %xmm8,%xmm11
6288
6289# qhasm: xmm14 &= xmm15
6290# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
6291# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
6292pand %xmm13,%xmm11
6293
6294# qhasm: xmm14 ^= xmm12
6295# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
6296# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
6297pxor %xmm10,%xmm11
6298
6299# qhasm: xmm12 = xmm6
6300# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
6301# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
6302movdqa %xmm6,%xmm8
6303
6304# qhasm: xmm8 = xmm5
6305# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
6306# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
6307movdqa %xmm5,%xmm9
6308
6309# qhasm: xmm10 = xmm15
6310# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
6311# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
6312movdqa %xmm13,%xmm10
6313
6314# qhasm: xmm10 ^= xmm14
6315# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
6316# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
6317pxor %xmm11,%xmm10
6318
6319# qhasm: xmm10 &= xmm6
6320# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
6321# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
6322pand %xmm6,%xmm10
6323
6324# qhasm: xmm6 ^= xmm5
6325# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
6326# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
6327pxor %xmm5,%xmm6
6328
6329# qhasm: xmm6 &= xmm14
6330# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
6331# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
6332pand %xmm11,%xmm6
6333
6334# qhasm: xmm5 &= xmm15
6335# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
6336# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
6337pand %xmm13,%xmm5
6338
6339# qhasm: xmm6 ^= xmm5
6340# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
6341# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
6342pxor %xmm5,%xmm6
6343
6344# qhasm: xmm5 ^= xmm10
6345# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
6346# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
6347pxor %xmm10,%xmm5
6348
6349# qhasm: xmm12 ^= xmm0
6350# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
6351# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
6352pxor %xmm0,%xmm8
6353
6354# qhasm: xmm8 ^= xmm3
6355# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
6356# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
6357pxor %xmm3,%xmm9
6358
6359# qhasm: xmm15 ^= xmm13
6360# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
6361# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
6362pxor %xmm15,%xmm13
6363
6364# qhasm: xmm14 ^= xmm9
6365# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
6366# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
6367pxor %xmm12,%xmm11
6368
6369# qhasm: xmm11 = xmm15
6370# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
6371# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
6372movdqa %xmm13,%xmm10
6373
6374# qhasm: xmm11 ^= xmm14
6375# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
6376# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
6377pxor %xmm11,%xmm10
6378
6379# qhasm: xmm11 &= xmm12
6380# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
6381# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
6382pand %xmm8,%xmm10
6383
6384# qhasm: xmm12 ^= xmm8
6385# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
6386# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
6387pxor %xmm9,%xmm8
6388
6389# qhasm: xmm12 &= xmm14
6390# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
6391# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
6392pand %xmm11,%xmm8
6393
6394# qhasm: xmm8 &= xmm15
6395# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
6396# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
6397pand %xmm13,%xmm9
6398
6399# qhasm: xmm8 ^= xmm12
6400# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
6401# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
6402pxor %xmm8,%xmm9
6403
6404# qhasm: xmm12 ^= xmm11
6405# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
6406# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
6407pxor %xmm10,%xmm8
6408
6409# qhasm: xmm10 = xmm13
6410# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
6411# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
6412movdqa %xmm15,%xmm10
6413
6414# qhasm: xmm10 ^= xmm9
6415# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
6416# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
6417pxor %xmm12,%xmm10
6418
6419# qhasm: xmm10 &= xmm0
6420# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
6421# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
6422pand %xmm0,%xmm10
6423
6424# qhasm: xmm0 ^= xmm3
6425# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
6426# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
6427pxor %xmm3,%xmm0
6428
6429# qhasm: xmm0 &= xmm9
6430# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
6431# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
6432pand %xmm12,%xmm0
6433
6434# qhasm: xmm3 &= xmm13
6435# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
6436# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
6437pand %xmm15,%xmm3
6438
6439# qhasm: xmm0 ^= xmm3
6440# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
6441# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
6442pxor %xmm3,%xmm0
6443
6444# qhasm: xmm3 ^= xmm10
6445# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
6446# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
6447pxor %xmm10,%xmm3
6448
6449# qhasm: xmm6 ^= xmm12
6450# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
6451# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
6452pxor %xmm8,%xmm6
6453
6454# qhasm: xmm0 ^= xmm12
6455# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
6456# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
6457pxor %xmm8,%xmm0
6458
6459# qhasm: xmm5 ^= xmm8
6460# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
6461# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
6462pxor %xmm9,%xmm5
6463
6464# qhasm: xmm3 ^= xmm8
6465# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
6466# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
6467pxor %xmm9,%xmm3
6468
6469# qhasm: xmm12 = xmm7
6470# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
6471# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
6472movdqa %xmm7,%xmm8
6473
6474# qhasm: xmm8 = xmm1
6475# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
6476# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
6477movdqa %xmm1,%xmm9
6478
6479# qhasm: xmm12 ^= xmm4
6480# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
6481# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
6482pxor %xmm4,%xmm8
6483
6484# qhasm: xmm8 ^= xmm2
6485# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
6486# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
6487pxor %xmm2,%xmm9
6488
6489# qhasm: xmm11 = xmm15
6490# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
6491# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
6492movdqa %xmm13,%xmm10
6493
6494# qhasm: xmm11 ^= xmm14
6495# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
6496# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
6497pxor %xmm11,%xmm10
6498
6499# qhasm: xmm11 &= xmm12
6500# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
6501# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
6502pand %xmm8,%xmm10
6503
6504# qhasm: xmm12 ^= xmm8
6505# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
6506# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
6507pxor %xmm9,%xmm8
6508
6509# qhasm: xmm12 &= xmm14
6510# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
6511# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
6512pand %xmm11,%xmm8
6513
6514# qhasm: xmm8 &= xmm15
6515# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
6516# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
6517pand %xmm13,%xmm9
6518
6519# qhasm: xmm8 ^= xmm12
6520# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
6521# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
6522pxor %xmm8,%xmm9
6523
6524# qhasm: xmm12 ^= xmm11
6525# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
6526# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
6527pxor %xmm10,%xmm8
6528
6529# qhasm: xmm10 = xmm13
6530# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
6531# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
6532movdqa %xmm15,%xmm10
6533
6534# qhasm: xmm10 ^= xmm9
6535# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
6536# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
6537pxor %xmm12,%xmm10
6538
6539# qhasm: xmm10 &= xmm4
6540# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
6541# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
6542pand %xmm4,%xmm10
6543
6544# qhasm: xmm4 ^= xmm2
6545# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
6546# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
6547pxor %xmm2,%xmm4
6548
6549# qhasm: xmm4 &= xmm9
6550# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
6551# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
6552pand %xmm12,%xmm4
6553
6554# qhasm: xmm2 &= xmm13
6555# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
6556# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
6557pand %xmm15,%xmm2
6558
6559# qhasm: xmm4 ^= xmm2
6560# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
6561# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
6562pxor %xmm2,%xmm4
6563
6564# qhasm: xmm2 ^= xmm10
6565# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
6566# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
6567pxor %xmm10,%xmm2
6568
6569# qhasm: xmm15 ^= xmm13
6570# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
6571# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
6572pxor %xmm15,%xmm13
6573
6574# qhasm: xmm14 ^= xmm9
6575# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
6576# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
6577pxor %xmm12,%xmm11
6578
6579# qhasm: xmm11 = xmm15
6580# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
6581# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
6582movdqa %xmm13,%xmm10
6583
6584# qhasm: xmm11 ^= xmm14
6585# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
6586# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
6587pxor %xmm11,%xmm10
6588
6589# qhasm: xmm11 &= xmm7
6590# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
6591# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
6592pand %xmm7,%xmm10
6593
6594# qhasm: xmm7 ^= xmm1
6595# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
6596# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
6597pxor %xmm1,%xmm7
6598
6599# qhasm: xmm7 &= xmm14
6600# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
6601# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
6602pand %xmm11,%xmm7
6603
6604# qhasm: xmm1 &= xmm15
6605# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
6606# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
6607pand %xmm13,%xmm1
6608
6609# qhasm: xmm7 ^= xmm1
6610# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
6611# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
6612pxor %xmm1,%xmm7
6613
6614# qhasm: xmm1 ^= xmm11
6615# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
6616# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
6617pxor %xmm10,%xmm1
6618
6619# qhasm: xmm7 ^= xmm12
6620# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
6621# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
6622pxor %xmm8,%xmm7
6623
6624# qhasm: xmm4 ^= xmm12
6625# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
6626# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
6627pxor %xmm8,%xmm4
6628
6629# qhasm: xmm1 ^= xmm8
6630# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
6631# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
6632pxor %xmm9,%xmm1
6633
6634# qhasm: xmm2 ^= xmm8
6635# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
6636# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
6637pxor %xmm9,%xmm2
6638
6639# qhasm: xmm7 ^= xmm0
6640# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
6641# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
6642pxor %xmm0,%xmm7
6643
6644# qhasm: xmm1 ^= xmm6
6645# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
6646# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
6647pxor %xmm6,%xmm1
6648
6649# qhasm: xmm4 ^= xmm7
6650# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
6651# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
6652pxor %xmm7,%xmm4
6653
6654# qhasm: xmm6 ^= xmm0
6655# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
6656# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
6657pxor %xmm0,%xmm6
6658
6659# qhasm: xmm0 ^= xmm1
6660# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
6661# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
6662pxor %xmm1,%xmm0
6663
6664# qhasm: xmm1 ^= xmm5
6665# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
6666# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
6667pxor %xmm5,%xmm1
6668
6669# qhasm: xmm5 ^= xmm2
6670# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
6671# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
6672pxor %xmm2,%xmm5
6673
6674# qhasm: xmm4 ^= xmm5
6675# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
6676# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
6677pxor %xmm5,%xmm4
6678
6679# qhasm: xmm2 ^= xmm3
6680# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
6681# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
6682pxor %xmm3,%xmm2
6683
6684# qhasm: xmm3 ^= xmm5
6685# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
6686# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
6687pxor %xmm5,%xmm3
6688
6689# qhasm: xmm6 ^= xmm3
6690# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
6691# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
6692pxor %xmm3,%xmm6
6693
6694# qhasm: xmm3 ^= RCON
6695# asm 1: pxor RCON,<xmm3=int6464#4
6696# asm 2: pxor RCON,<xmm3=%xmm3
6697pxor RCON,%xmm3
6698
6699# qhasm: shuffle bytes of xmm0 by EXPB0
6700# asm 1: pshufb EXPB0,<xmm0=int6464#1
6701# asm 2: pshufb EXPB0,<xmm0=%xmm0
6702pshufb EXPB0,%xmm0
6703
6704# qhasm: shuffle bytes of xmm1 by EXPB0
6705# asm 1: pshufb EXPB0,<xmm1=int6464#2
6706# asm 2: pshufb EXPB0,<xmm1=%xmm1
6707pshufb EXPB0,%xmm1
6708
6709# qhasm: shuffle bytes of xmm4 by EXPB0
6710# asm 1: pshufb EXPB0,<xmm4=int6464#5
6711# asm 2: pshufb EXPB0,<xmm4=%xmm4
6712pshufb EXPB0,%xmm4
6713
6714# qhasm: shuffle bytes of xmm6 by EXPB0
6715# asm 1: pshufb EXPB0,<xmm6=int6464#7
6716# asm 2: pshufb EXPB0,<xmm6=%xmm6
6717pshufb EXPB0,%xmm6
6718
6719# qhasm: shuffle bytes of xmm3 by EXPB0
6720# asm 1: pshufb EXPB0,<xmm3=int6464#4
6721# asm 2: pshufb EXPB0,<xmm3=%xmm3
6722pshufb EXPB0,%xmm3
6723
6724# qhasm: shuffle bytes of xmm7 by EXPB0
6725# asm 1: pshufb EXPB0,<xmm7=int6464#8
6726# asm 2: pshufb EXPB0,<xmm7=%xmm7
6727pshufb EXPB0,%xmm7
6728
6729# qhasm: shuffle bytes of xmm2 by EXPB0
6730# asm 1: pshufb EXPB0,<xmm2=int6464#3
6731# asm 2: pshufb EXPB0,<xmm2=%xmm2
6732pshufb EXPB0,%xmm2
6733
6734# qhasm: shuffle bytes of xmm5 by EXPB0
6735# asm 1: pshufb EXPB0,<xmm5=int6464#6
6736# asm 2: pshufb EXPB0,<xmm5=%xmm5
6737pshufb EXPB0,%xmm5
6738
6739# qhasm: xmm8 = *(int128 *)(c + 512)
6740# asm 1: movdqa 512(<c=int64#1),>xmm8=int6464#9
6741# asm 2: movdqa 512(<c=%rdi),>xmm8=%xmm8
6742movdqa 512(%rdi),%xmm8
6743
6744# qhasm: xmm9 = *(int128 *)(c + 528)
6745# asm 1: movdqa 528(<c=int64#1),>xmm9=int6464#10
6746# asm 2: movdqa 528(<c=%rdi),>xmm9=%xmm9
6747movdqa 528(%rdi),%xmm9
6748
6749# qhasm: xmm10 = *(int128 *)(c + 544)
6750# asm 1: movdqa 544(<c=int64#1),>xmm10=int6464#11
6751# asm 2: movdqa 544(<c=%rdi),>xmm10=%xmm10
6752movdqa 544(%rdi),%xmm10
6753
6754# qhasm: xmm11 = *(int128 *)(c + 560)
6755# asm 1: movdqa 560(<c=int64#1),>xmm11=int6464#12
6756# asm 2: movdqa 560(<c=%rdi),>xmm11=%xmm11
6757movdqa 560(%rdi),%xmm11
6758
6759# qhasm: xmm12 = *(int128 *)(c + 576)
6760# asm 1: movdqa 576(<c=int64#1),>xmm12=int6464#13
6761# asm 2: movdqa 576(<c=%rdi),>xmm12=%xmm12
6762movdqa 576(%rdi),%xmm12
6763
6764# qhasm: xmm13 = *(int128 *)(c + 592)
6765# asm 1: movdqa 592(<c=int64#1),>xmm13=int6464#14
6766# asm 2: movdqa 592(<c=%rdi),>xmm13=%xmm13
6767movdqa 592(%rdi),%xmm13
6768
6769# qhasm: xmm14 = *(int128 *)(c + 608)
6770# asm 1: movdqa 608(<c=int64#1),>xmm14=int6464#15
6771# asm 2: movdqa 608(<c=%rdi),>xmm14=%xmm14
6772movdqa 608(%rdi),%xmm14
6773
6774# qhasm: xmm15 = *(int128 *)(c + 624)
6775# asm 1: movdqa 624(<c=int64#1),>xmm15=int6464#16
6776# asm 2: movdqa 624(<c=%rdi),>xmm15=%xmm15
6777movdqa 624(%rdi),%xmm15
6778
6779# qhasm: xmm8 ^= ONE
6780# asm 1: pxor ONE,<xmm8=int6464#9
6781# asm 2: pxor ONE,<xmm8=%xmm8
6782pxor ONE,%xmm8
6783
6784# qhasm: xmm9 ^= ONE
6785# asm 1: pxor ONE,<xmm9=int6464#10
6786# asm 2: pxor ONE,<xmm9=%xmm9
6787pxor ONE,%xmm9
6788
6789# qhasm: xmm13 ^= ONE
6790# asm 1: pxor ONE,<xmm13=int6464#14
6791# asm 2: pxor ONE,<xmm13=%xmm13
6792pxor ONE,%xmm13
6793
6794# qhasm: xmm14 ^= ONE
6795# asm 1: pxor ONE,<xmm14=int6464#15
6796# asm 2: pxor ONE,<xmm14=%xmm14
6797pxor ONE,%xmm14
6798
6799# qhasm: xmm0 ^= xmm8
6800# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
6801# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
6802pxor %xmm8,%xmm0
6803
6804# qhasm: xmm1 ^= xmm9
6805# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
6806# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
6807pxor %xmm9,%xmm1
6808
6809# qhasm: xmm4 ^= xmm10
6810# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
6811# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
6812pxor %xmm10,%xmm4
6813
6814# qhasm: xmm6 ^= xmm11
6815# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
6816# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
6817pxor %xmm11,%xmm6
6818
6819# qhasm: xmm3 ^= xmm12
6820# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
6821# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
6822pxor %xmm12,%xmm3
6823
6824# qhasm: xmm7 ^= xmm13
6825# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
6826# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
6827pxor %xmm13,%xmm7
6828
6829# qhasm: xmm2 ^= xmm14
6830# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
6831# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
6832pxor %xmm14,%xmm2
6833
6834# qhasm: xmm5 ^= xmm15
6835# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
6836# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
6837pxor %xmm15,%xmm5
6838
6839# qhasm: uint32323232 xmm8 >>= 8
6840# asm 1: psrld $8,<xmm8=int6464#9
6841# asm 2: psrld $8,<xmm8=%xmm8
6842psrld $8,%xmm8
6843
6844# qhasm: uint32323232 xmm9 >>= 8
6845# asm 1: psrld $8,<xmm9=int6464#10
6846# asm 2: psrld $8,<xmm9=%xmm9
6847psrld $8,%xmm9
6848
6849# qhasm: uint32323232 xmm10 >>= 8
6850# asm 1: psrld $8,<xmm10=int6464#11
6851# asm 2: psrld $8,<xmm10=%xmm10
6852psrld $8,%xmm10
6853
6854# qhasm: uint32323232 xmm11 >>= 8
6855# asm 1: psrld $8,<xmm11=int6464#12
6856# asm 2: psrld $8,<xmm11=%xmm11
6857psrld $8,%xmm11
6858
6859# qhasm: uint32323232 xmm12 >>= 8
6860# asm 1: psrld $8,<xmm12=int6464#13
6861# asm 2: psrld $8,<xmm12=%xmm12
6862psrld $8,%xmm12
6863
6864# qhasm: uint32323232 xmm13 >>= 8
6865# asm 1: psrld $8,<xmm13=int6464#14
6866# asm 2: psrld $8,<xmm13=%xmm13
6867psrld $8,%xmm13
6868
6869# qhasm: uint32323232 xmm14 >>= 8
6870# asm 1: psrld $8,<xmm14=int6464#15
6871# asm 2: psrld $8,<xmm14=%xmm14
6872psrld $8,%xmm14
6873
6874# qhasm: uint32323232 xmm15 >>= 8
6875# asm 1: psrld $8,<xmm15=int6464#16
6876# asm 2: psrld $8,<xmm15=%xmm15
6877psrld $8,%xmm15
6878
6879# qhasm: xmm0 ^= xmm8
6880# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
6881# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
6882pxor %xmm8,%xmm0
6883
6884# qhasm: xmm1 ^= xmm9
6885# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
6886# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
6887pxor %xmm9,%xmm1
6888
6889# qhasm: xmm4 ^= xmm10
6890# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
6891# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
6892pxor %xmm10,%xmm4
6893
6894# qhasm: xmm6 ^= xmm11
6895# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
6896# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
6897pxor %xmm11,%xmm6
6898
6899# qhasm: xmm3 ^= xmm12
6900# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
6901# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
6902pxor %xmm12,%xmm3
6903
6904# qhasm: xmm7 ^= xmm13
6905# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
6906# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
6907pxor %xmm13,%xmm7
6908
6909# qhasm: xmm2 ^= xmm14
6910# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
6911# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
6912pxor %xmm14,%xmm2
6913
6914# qhasm: xmm5 ^= xmm15
6915# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
6916# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
6917pxor %xmm15,%xmm5
6918
6919# qhasm: uint32323232 xmm8 >>= 8
6920# asm 1: psrld $8,<xmm8=int6464#9
6921# asm 2: psrld $8,<xmm8=%xmm8
6922psrld $8,%xmm8
6923
6924# qhasm: uint32323232 xmm9 >>= 8
6925# asm 1: psrld $8,<xmm9=int6464#10
6926# asm 2: psrld $8,<xmm9=%xmm9
6927psrld $8,%xmm9
6928
6929# qhasm: uint32323232 xmm10 >>= 8
6930# asm 1: psrld $8,<xmm10=int6464#11
6931# asm 2: psrld $8,<xmm10=%xmm10
6932psrld $8,%xmm10
6933
6934# qhasm: uint32323232 xmm11 >>= 8
6935# asm 1: psrld $8,<xmm11=int6464#12
6936# asm 2: psrld $8,<xmm11=%xmm11
6937psrld $8,%xmm11
6938
6939# qhasm: uint32323232 xmm12 >>= 8
6940# asm 1: psrld $8,<xmm12=int6464#13
6941# asm 2: psrld $8,<xmm12=%xmm12
6942psrld $8,%xmm12
6943
6944# qhasm: uint32323232 xmm13 >>= 8
6945# asm 1: psrld $8,<xmm13=int6464#14
6946# asm 2: psrld $8,<xmm13=%xmm13
6947psrld $8,%xmm13
6948
6949# qhasm: uint32323232 xmm14 >>= 8
6950# asm 1: psrld $8,<xmm14=int6464#15
6951# asm 2: psrld $8,<xmm14=%xmm14
6952psrld $8,%xmm14
6953
6954# qhasm: uint32323232 xmm15 >>= 8
6955# asm 1: psrld $8,<xmm15=int6464#16
6956# asm 2: psrld $8,<xmm15=%xmm15
6957psrld $8,%xmm15
6958
6959# qhasm: xmm0 ^= xmm8
6960# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
6961# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
6962pxor %xmm8,%xmm0
6963
6964# qhasm: xmm1 ^= xmm9
6965# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
6966# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
6967pxor %xmm9,%xmm1
6968
6969# qhasm: xmm4 ^= xmm10
6970# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
6971# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
6972pxor %xmm10,%xmm4
6973
6974# qhasm: xmm6 ^= xmm11
6975# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
6976# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
6977pxor %xmm11,%xmm6
6978
6979# qhasm: xmm3 ^= xmm12
6980# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
6981# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
6982pxor %xmm12,%xmm3
6983
6984# qhasm: xmm7 ^= xmm13
6985# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
6986# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
6987pxor %xmm13,%xmm7
6988
6989# qhasm: xmm2 ^= xmm14
6990# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
6991# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
6992pxor %xmm14,%xmm2
6993
6994# qhasm: xmm5 ^= xmm15
6995# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
6996# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
6997pxor %xmm15,%xmm5
6998
6999# qhasm: uint32323232 xmm8 >>= 8
7000# asm 1: psrld $8,<xmm8=int6464#9
7001# asm 2: psrld $8,<xmm8=%xmm8
7002psrld $8,%xmm8
7003
7004# qhasm: uint32323232 xmm9 >>= 8
7005# asm 1: psrld $8,<xmm9=int6464#10
7006# asm 2: psrld $8,<xmm9=%xmm9
7007psrld $8,%xmm9
7008
7009# qhasm: uint32323232 xmm10 >>= 8
7010# asm 1: psrld $8,<xmm10=int6464#11
7011# asm 2: psrld $8,<xmm10=%xmm10
7012psrld $8,%xmm10
7013
7014# qhasm: uint32323232 xmm11 >>= 8
7015# asm 1: psrld $8,<xmm11=int6464#12
7016# asm 2: psrld $8,<xmm11=%xmm11
7017psrld $8,%xmm11
7018
7019# qhasm: uint32323232 xmm12 >>= 8
7020# asm 1: psrld $8,<xmm12=int6464#13
7021# asm 2: psrld $8,<xmm12=%xmm12
7022psrld $8,%xmm12
7023
7024# qhasm: uint32323232 xmm13 >>= 8
7025# asm 1: psrld $8,<xmm13=int6464#14
7026# asm 2: psrld $8,<xmm13=%xmm13
7027psrld $8,%xmm13
7028
7029# qhasm: uint32323232 xmm14 >>= 8
7030# asm 1: psrld $8,<xmm14=int6464#15
7031# asm 2: psrld $8,<xmm14=%xmm14
7032psrld $8,%xmm14
7033
7034# qhasm: uint32323232 xmm15 >>= 8
7035# asm 1: psrld $8,<xmm15=int6464#16
7036# asm 2: psrld $8,<xmm15=%xmm15
7037psrld $8,%xmm15
7038
7039# qhasm: xmm0 ^= xmm8
7040# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
7041# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
7042pxor %xmm8,%xmm0
7043
7044# qhasm: xmm1 ^= xmm9
7045# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
7046# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
7047pxor %xmm9,%xmm1
7048
7049# qhasm: xmm4 ^= xmm10
7050# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
7051# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
7052pxor %xmm10,%xmm4
7053
7054# qhasm: xmm6 ^= xmm11
7055# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
7056# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
7057pxor %xmm11,%xmm6
7058
7059# qhasm: xmm3 ^= xmm12
7060# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
7061# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
7062pxor %xmm12,%xmm3
7063
7064# qhasm: xmm7 ^= xmm13
7065# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
7066# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
7067pxor %xmm13,%xmm7
7068
7069# qhasm: xmm2 ^= xmm14
7070# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
7071# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
7072pxor %xmm14,%xmm2
7073
7074# qhasm: xmm5 ^= xmm15
7075# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
7076# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
7077pxor %xmm15,%xmm5
7078
7079# qhasm: *(int128 *)(c + 640) = xmm0
7080# asm 1: movdqa <xmm0=int6464#1,640(<c=int64#1)
7081# asm 2: movdqa <xmm0=%xmm0,640(<c=%rdi)
7082movdqa %xmm0,640(%rdi)
7083
7084# qhasm: *(int128 *)(c + 656) = xmm1
7085# asm 1: movdqa <xmm1=int6464#2,656(<c=int64#1)
7086# asm 2: movdqa <xmm1=%xmm1,656(<c=%rdi)
7087movdqa %xmm1,656(%rdi)
7088
7089# qhasm: *(int128 *)(c + 672) = xmm4
7090# asm 1: movdqa <xmm4=int6464#5,672(<c=int64#1)
7091# asm 2: movdqa <xmm4=%xmm4,672(<c=%rdi)
7092movdqa %xmm4,672(%rdi)
7093
7094# qhasm: *(int128 *)(c + 688) = xmm6
7095# asm 1: movdqa <xmm6=int6464#7,688(<c=int64#1)
7096# asm 2: movdqa <xmm6=%xmm6,688(<c=%rdi)
7097movdqa %xmm6,688(%rdi)
7098
7099# qhasm: *(int128 *)(c + 704) = xmm3
7100# asm 1: movdqa <xmm3=int6464#4,704(<c=int64#1)
7101# asm 2: movdqa <xmm3=%xmm3,704(<c=%rdi)
7102movdqa %xmm3,704(%rdi)
7103
7104# qhasm: *(int128 *)(c + 720) = xmm7
7105# asm 1: movdqa <xmm7=int6464#8,720(<c=int64#1)
7106# asm 2: movdqa <xmm7=%xmm7,720(<c=%rdi)
7107movdqa %xmm7,720(%rdi)
7108
7109# qhasm: *(int128 *)(c + 736) = xmm2
7110# asm 1: movdqa <xmm2=int6464#3,736(<c=int64#1)
7111# asm 2: movdqa <xmm2=%xmm2,736(<c=%rdi)
7112movdqa %xmm2,736(%rdi)
7113
7114# qhasm: *(int128 *)(c + 752) = xmm5
7115# asm 1: movdqa <xmm5=int6464#6,752(<c=int64#1)
7116# asm 2: movdqa <xmm5=%xmm5,752(<c=%rdi)
7117movdqa %xmm5,752(%rdi)
7118
7119# qhasm: xmm0 ^= ONE
7120# asm 1: pxor ONE,<xmm0=int6464#1
7121# asm 2: pxor ONE,<xmm0=%xmm0
7122pxor ONE,%xmm0
7123
7124# qhasm: xmm1 ^= ONE
7125# asm 1: pxor ONE,<xmm1=int6464#2
7126# asm 2: pxor ONE,<xmm1=%xmm1
7127pxor ONE,%xmm1
7128
7129# qhasm: xmm7 ^= ONE
7130# asm 1: pxor ONE,<xmm7=int6464#8
7131# asm 2: pxor ONE,<xmm7=%xmm7
7132pxor ONE,%xmm7
7133
7134# qhasm: xmm2 ^= ONE
7135# asm 1: pxor ONE,<xmm2=int6464#3
7136# asm 2: pxor ONE,<xmm2=%xmm2
7137pxor ONE,%xmm2
7138
7139# qhasm: shuffle bytes of xmm0 by ROTB
7140# asm 1: pshufb ROTB,<xmm0=int6464#1
7141# asm 2: pshufb ROTB,<xmm0=%xmm0
7142pshufb ROTB,%xmm0
7143
7144# qhasm: shuffle bytes of xmm1 by ROTB
7145# asm 1: pshufb ROTB,<xmm1=int6464#2
7146# asm 2: pshufb ROTB,<xmm1=%xmm1
7147pshufb ROTB,%xmm1
7148
7149# qhasm: shuffle bytes of xmm4 by ROTB
7150# asm 1: pshufb ROTB,<xmm4=int6464#5
7151# asm 2: pshufb ROTB,<xmm4=%xmm4
7152pshufb ROTB,%xmm4
7153
7154# qhasm: shuffle bytes of xmm6 by ROTB
7155# asm 1: pshufb ROTB,<xmm6=int6464#7
7156# asm 2: pshufb ROTB,<xmm6=%xmm6
7157pshufb ROTB,%xmm6
7158
7159# qhasm: shuffle bytes of xmm3 by ROTB
7160# asm 1: pshufb ROTB,<xmm3=int6464#4
7161# asm 2: pshufb ROTB,<xmm3=%xmm3
7162pshufb ROTB,%xmm3
7163
7164# qhasm: shuffle bytes of xmm7 by ROTB
7165# asm 1: pshufb ROTB,<xmm7=int6464#8
7166# asm 2: pshufb ROTB,<xmm7=%xmm7
7167pshufb ROTB,%xmm7
7168
7169# qhasm: shuffle bytes of xmm2 by ROTB
7170# asm 1: pshufb ROTB,<xmm2=int6464#3
7171# asm 2: pshufb ROTB,<xmm2=%xmm2
7172pshufb ROTB,%xmm2
7173
7174# qhasm: shuffle bytes of xmm5 by ROTB
7175# asm 1: pshufb ROTB,<xmm5=int6464#6
7176# asm 2: pshufb ROTB,<xmm5=%xmm5
7177pshufb ROTB,%xmm5
7178
7179# qhasm: xmm7 ^= xmm2
7180# asm 1: pxor <xmm2=int6464#3,<xmm7=int6464#8
7181# asm 2: pxor <xmm2=%xmm2,<xmm7=%xmm7
7182pxor %xmm2,%xmm7
7183
7184# qhasm: xmm4 ^= xmm1
7185# asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5
7186# asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4
7187pxor %xmm1,%xmm4
7188
7189# qhasm: xmm7 ^= xmm0
7190# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
7191# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
7192pxor %xmm0,%xmm7
7193
7194# qhasm: xmm2 ^= xmm4
7195# asm 1: pxor <xmm4=int6464#5,<xmm2=int6464#3
7196# asm 2: pxor <xmm4=%xmm4,<xmm2=%xmm2
7197pxor %xmm4,%xmm2
7198
7199# qhasm: xmm6 ^= xmm0
7200# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
7201# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
7202pxor %xmm0,%xmm6
7203
7204# qhasm: xmm2 ^= xmm6
7205# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
7206# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
7207pxor %xmm6,%xmm2
7208
7209# qhasm: xmm6 ^= xmm5
7210# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
7211# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
7212pxor %xmm5,%xmm6
7213
7214# qhasm: xmm6 ^= xmm3
7215# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
7216# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
7217pxor %xmm3,%xmm6
7218
7219# qhasm: xmm5 ^= xmm7
7220# asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6
7221# asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5
7222pxor %xmm7,%xmm5
7223
7224# qhasm: xmm6 ^= xmm1
7225# asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7
7226# asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6
7227pxor %xmm1,%xmm6
7228
7229# qhasm: xmm3 ^= xmm7
7230# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
7231# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
7232pxor %xmm7,%xmm3
7233
7234# qhasm: xmm4 ^= xmm5
7235# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
7236# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
7237pxor %xmm5,%xmm4
7238
7239# qhasm: xmm1 ^= xmm7
7240# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
7241# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
7242pxor %xmm7,%xmm1
7243
7244# qhasm: xmm11 = xmm5
7245# asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9
7246# asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8
7247movdqa %xmm5,%xmm8
7248
7249# qhasm: xmm10 = xmm1
7250# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
7251# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
7252movdqa %xmm1,%xmm9
7253
7254# qhasm: xmm9 = xmm7
7255# asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11
7256# asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10
7257movdqa %xmm7,%xmm10
7258
7259# qhasm: xmm13 = xmm4
7260# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
7261# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
7262movdqa %xmm4,%xmm11
7263
7264# qhasm: xmm12 = xmm2
7265# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#13
7266# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm12
7267movdqa %xmm2,%xmm12
7268
7269# qhasm: xmm11 ^= xmm3
7270# asm 1: pxor <xmm3=int6464#4,<xmm11=int6464#9
7271# asm 2: pxor <xmm3=%xmm3,<xmm11=%xmm8
7272pxor %xmm3,%xmm8
7273
7274# qhasm: xmm10 ^= xmm4
7275# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#10
7276# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm9
7277pxor %xmm4,%xmm9
7278
7279# qhasm: xmm9 ^= xmm6
7280# asm 1: pxor <xmm6=int6464#7,<xmm9=int6464#11
7281# asm 2: pxor <xmm6=%xmm6,<xmm9=%xmm10
7282pxor %xmm6,%xmm10
7283
7284# qhasm: xmm13 ^= xmm3
7285# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#12
7286# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm11
7287pxor %xmm3,%xmm11
7288
7289# qhasm: xmm12 ^= xmm0
7290# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
7291# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
7292pxor %xmm0,%xmm12
7293
7294# qhasm: xmm14 = xmm11
7295# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
7296# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
7297movdqa %xmm8,%xmm13
7298
7299# qhasm: xmm8 = xmm10
7300# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
7301# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
7302movdqa %xmm9,%xmm14
7303
7304# qhasm: xmm15 = xmm11
7305# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
7306# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
7307movdqa %xmm8,%xmm15
7308
7309# qhasm: xmm10 |= xmm9
7310# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
7311# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
7312por %xmm10,%xmm9
7313
7314# qhasm: xmm11 |= xmm12
7315# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
7316# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
7317por %xmm12,%xmm8
7318
7319# qhasm: xmm15 ^= xmm8
7320# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
7321# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
7322pxor %xmm14,%xmm15
7323
7324# qhasm: xmm14 &= xmm12
7325# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
7326# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
7327pand %xmm12,%xmm13
7328
7329# qhasm: xmm8 &= xmm9
7330# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
7331# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
7332pand %xmm10,%xmm14
7333
7334# qhasm: xmm12 ^= xmm9
7335# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
7336# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
7337pxor %xmm10,%xmm12
7338
7339# qhasm: xmm15 &= xmm12
7340# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
7341# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
7342pand %xmm12,%xmm15
7343
7344# qhasm: xmm12 = xmm6
7345# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11
7346# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10
7347movdqa %xmm6,%xmm10
7348
7349# qhasm: xmm12 ^= xmm0
7350# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
7351# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
7352pxor %xmm0,%xmm10
7353
7354# qhasm: xmm13 &= xmm12
7355# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
7356# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
7357pand %xmm10,%xmm11
7358
7359# qhasm: xmm11 ^= xmm13
7360# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
7361# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
7362pxor %xmm11,%xmm8
7363
7364# qhasm: xmm10 ^= xmm13
7365# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
7366# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
7367pxor %xmm11,%xmm9
7368
7369# qhasm: xmm13 = xmm5
7370# asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11
7371# asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10
7372movdqa %xmm5,%xmm10
7373
7374# qhasm: xmm13 ^= xmm1
7375# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
7376# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
7377pxor %xmm1,%xmm10
7378
7379# qhasm: xmm12 = xmm7
7380# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12
7381# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11
7382movdqa %xmm7,%xmm11
7383
7384# qhasm: xmm9 = xmm13
7385# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
7386# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
7387movdqa %xmm10,%xmm12
7388
7389# qhasm: xmm12 ^= xmm2
7390# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#12
7391# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm11
7392pxor %xmm2,%xmm11
7393
7394# qhasm: xmm9 |= xmm12
7395# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
7396# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
7397por %xmm11,%xmm12
7398
7399# qhasm: xmm13 &= xmm12
7400# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
7401# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
7402pand %xmm11,%xmm10
7403
7404# qhasm: xmm8 ^= xmm13
7405# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
7406# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
7407pxor %xmm10,%xmm14
7408
7409# qhasm: xmm11 ^= xmm15
7410# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
7411# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
7412pxor %xmm15,%xmm8
7413
7414# qhasm: xmm10 ^= xmm14
7415# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
7416# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
7417pxor %xmm13,%xmm9
7418
7419# qhasm: xmm9 ^= xmm15
7420# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
7421# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
7422pxor %xmm15,%xmm12
7423
7424# qhasm: xmm8 ^= xmm14
7425# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
7426# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
7427pxor %xmm13,%xmm14
7428
7429# qhasm: xmm9 ^= xmm14
7430# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
7431# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
7432pxor %xmm13,%xmm12
7433
7434# qhasm: xmm12 = xmm4
7435# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11
7436# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10
7437movdqa %xmm4,%xmm10
7438
7439# qhasm: xmm13 = xmm3
7440# asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12
7441# asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11
7442movdqa %xmm3,%xmm11
7443
7444# qhasm: xmm14 = xmm1
7445# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
7446# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
7447movdqa %xmm1,%xmm13
7448
7449# qhasm: xmm15 = xmm5
7450# asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16
7451# asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15
7452movdqa %xmm5,%xmm15
7453
7454# qhasm: xmm12 &= xmm6
7455# asm 1: pand <xmm6=int6464#7,<xmm12=int6464#11
7456# asm 2: pand <xmm6=%xmm6,<xmm12=%xmm10
7457pand %xmm6,%xmm10
7458
7459# qhasm: xmm13 &= xmm0
7460# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
7461# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
7462pand %xmm0,%xmm11
7463
7464# qhasm: xmm14 &= xmm7
7465# asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14
7466# asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13
7467pand %xmm7,%xmm13
7468
7469# qhasm: xmm15 |= xmm2
7470# asm 1: por <xmm2=int6464#3,<xmm15=int6464#16
7471# asm 2: por <xmm2=%xmm2,<xmm15=%xmm15
7472por %xmm2,%xmm15
7473
7474# qhasm: xmm11 ^= xmm12
7475# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
7476# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
7477pxor %xmm10,%xmm8
7478
7479# qhasm: xmm10 ^= xmm13
7480# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
7481# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
7482pxor %xmm11,%xmm9
7483
7484# qhasm: xmm9 ^= xmm14
7485# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
7486# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
7487pxor %xmm13,%xmm12
7488
7489# qhasm: xmm8 ^= xmm15
7490# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
7491# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
7492pxor %xmm15,%xmm14
7493
7494# qhasm: xmm12 = xmm11
7495# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
7496# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
7497movdqa %xmm8,%xmm10
7498
7499# qhasm: xmm12 ^= xmm10
7500# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
7501# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
7502pxor %xmm9,%xmm10
7503
7504# qhasm: xmm11 &= xmm9
7505# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
7506# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
7507pand %xmm12,%xmm8
7508
7509# qhasm: xmm14 = xmm8
7510# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
7511# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
7512movdqa %xmm14,%xmm11
7513
7514# qhasm: xmm14 ^= xmm11
7515# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
7516# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
7517pxor %xmm8,%xmm11
7518
7519# qhasm: xmm15 = xmm12
7520# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
7521# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
7522movdqa %xmm10,%xmm13
7523
7524# qhasm: xmm15 &= xmm14
7525# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
7526# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
7527pand %xmm11,%xmm13
7528
7529# qhasm: xmm15 ^= xmm10
7530# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
7531# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
7532pxor %xmm9,%xmm13
7533
7534# qhasm: xmm13 = xmm9
7535# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
7536# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
7537movdqa %xmm12,%xmm15
7538
7539# qhasm: xmm13 ^= xmm8
7540# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
7541# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
7542pxor %xmm14,%xmm15
7543
7544# qhasm: xmm11 ^= xmm10
7545# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
7546# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
7547pxor %xmm9,%xmm8
7548
7549# qhasm: xmm13 &= xmm11
7550# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
7551# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
7552pand %xmm8,%xmm15
7553
7554# qhasm: xmm13 ^= xmm8
7555# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
7556# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
7557pxor %xmm14,%xmm15
7558
7559# qhasm: xmm9 ^= xmm13
7560# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
7561# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
7562pxor %xmm15,%xmm12
7563
7564# qhasm: xmm10 = xmm14
7565# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
7566# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
7567movdqa %xmm11,%xmm8
7568
7569# qhasm: xmm10 ^= xmm13
7570# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
7571# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
7572pxor %xmm15,%xmm8
7573
7574# qhasm: xmm10 &= xmm8
7575# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
7576# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
7577pand %xmm14,%xmm8
7578
7579# qhasm: xmm9 ^= xmm10
7580# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
7581# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
7582pxor %xmm8,%xmm12
7583
7584# qhasm: xmm14 ^= xmm10
7585# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
7586# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
7587pxor %xmm8,%xmm11
7588
7589# qhasm: xmm14 &= xmm15
7590# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
7591# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
7592pand %xmm13,%xmm11
7593
7594# qhasm: xmm14 ^= xmm12
7595# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
7596# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
7597pxor %xmm10,%xmm11
7598
7599# qhasm: xmm12 = xmm2
7600# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#9
7601# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm8
7602movdqa %xmm2,%xmm8
7603
7604# qhasm: xmm8 = xmm7
7605# asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10
7606# asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9
7607movdqa %xmm7,%xmm9
7608
7609# qhasm: xmm10 = xmm15
7610# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
7611# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
7612movdqa %xmm13,%xmm10
7613
7614# qhasm: xmm10 ^= xmm14
7615# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
7616# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
7617pxor %xmm11,%xmm10
7618
7619# qhasm: xmm10 &= xmm2
7620# asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11
7621# asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10
7622pand %xmm2,%xmm10
7623
7624# qhasm: xmm2 ^= xmm7
7625# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
7626# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
7627pxor %xmm7,%xmm2
7628
7629# qhasm: xmm2 &= xmm14
7630# asm 1: pand <xmm14=int6464#12,<xmm2=int6464#3
7631# asm 2: pand <xmm14=%xmm11,<xmm2=%xmm2
7632pand %xmm11,%xmm2
7633
7634# qhasm: xmm7 &= xmm15
7635# asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8
7636# asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7
7637pand %xmm13,%xmm7
7638
7639# qhasm: xmm2 ^= xmm7
7640# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
7641# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
7642pxor %xmm7,%xmm2
7643
7644# qhasm: xmm7 ^= xmm10
7645# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
7646# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
7647pxor %xmm10,%xmm7
7648
7649# qhasm: xmm12 ^= xmm0
7650# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
7651# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
7652pxor %xmm0,%xmm8
7653
7654# qhasm: xmm8 ^= xmm6
7655# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10
7656# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9
7657pxor %xmm6,%xmm9
7658
7659# qhasm: xmm15 ^= xmm13
7660# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
7661# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
7662pxor %xmm15,%xmm13
7663
7664# qhasm: xmm14 ^= xmm9
7665# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
7666# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
7667pxor %xmm12,%xmm11
7668
7669# qhasm: xmm11 = xmm15
7670# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
7671# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
7672movdqa %xmm13,%xmm10
7673
7674# qhasm: xmm11 ^= xmm14
7675# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
7676# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
7677pxor %xmm11,%xmm10
7678
7679# qhasm: xmm11 &= xmm12
7680# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
7681# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
7682pand %xmm8,%xmm10
7683
7684# qhasm: xmm12 ^= xmm8
7685# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
7686# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
7687pxor %xmm9,%xmm8
7688
7689# qhasm: xmm12 &= xmm14
7690# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
7691# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
7692pand %xmm11,%xmm8
7693
7694# qhasm: xmm8 &= xmm15
7695# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
7696# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
7697pand %xmm13,%xmm9
7698
7699# qhasm: xmm8 ^= xmm12
7700# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
7701# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
7702pxor %xmm8,%xmm9
7703
7704# qhasm: xmm12 ^= xmm11
7705# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
7706# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
7707pxor %xmm10,%xmm8
7708
7709# qhasm: xmm10 = xmm13
7710# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
7711# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
7712movdqa %xmm15,%xmm10
7713
7714# qhasm: xmm10 ^= xmm9
7715# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
7716# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
7717pxor %xmm12,%xmm10
7718
7719# qhasm: xmm10 &= xmm0
7720# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
7721# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
7722pand %xmm0,%xmm10
7723
7724# qhasm: xmm0 ^= xmm6
7725# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
7726# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
7727pxor %xmm6,%xmm0
7728
7729# qhasm: xmm0 &= xmm9
7730# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
7731# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
7732pand %xmm12,%xmm0
7733
7734# qhasm: xmm6 &= xmm13
7735# asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7
7736# asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6
7737pand %xmm15,%xmm6
7738
7739# qhasm: xmm0 ^= xmm6
7740# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
7741# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
7742pxor %xmm6,%xmm0
7743
7744# qhasm: xmm6 ^= xmm10
7745# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
7746# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
7747pxor %xmm10,%xmm6
7748
7749# qhasm: xmm2 ^= xmm12
7750# asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3
7751# asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2
7752pxor %xmm8,%xmm2
7753
7754# qhasm: xmm0 ^= xmm12
7755# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
7756# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
7757pxor %xmm8,%xmm0
7758
7759# qhasm: xmm7 ^= xmm8
7760# asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8
7761# asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7
7762pxor %xmm9,%xmm7
7763
7764# qhasm: xmm6 ^= xmm8
7765# asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7
7766# asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6
7767pxor %xmm9,%xmm6
7768
7769# qhasm: xmm12 = xmm5
7770# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9
7771# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8
7772movdqa %xmm5,%xmm8
7773
7774# qhasm: xmm8 = xmm1
7775# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
7776# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
7777movdqa %xmm1,%xmm9
7778
7779# qhasm: xmm12 ^= xmm3
7780# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#9
7781# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm8
7782pxor %xmm3,%xmm8
7783
7784# qhasm: xmm8 ^= xmm4
7785# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10
7786# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9
7787pxor %xmm4,%xmm9
7788
7789# qhasm: xmm11 = xmm15
7790# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
7791# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
7792movdqa %xmm13,%xmm10
7793
7794# qhasm: xmm11 ^= xmm14
7795# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
7796# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
7797pxor %xmm11,%xmm10
7798
7799# qhasm: xmm11 &= xmm12
7800# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
7801# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
7802pand %xmm8,%xmm10
7803
7804# qhasm: xmm12 ^= xmm8
7805# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
7806# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
7807pxor %xmm9,%xmm8
7808
7809# qhasm: xmm12 &= xmm14
7810# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
7811# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
7812pand %xmm11,%xmm8
7813
7814# qhasm: xmm8 &= xmm15
7815# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
7816# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
7817pand %xmm13,%xmm9
7818
7819# qhasm: xmm8 ^= xmm12
7820# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
7821# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
7822pxor %xmm8,%xmm9
7823
7824# qhasm: xmm12 ^= xmm11
7825# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
7826# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
7827pxor %xmm10,%xmm8
7828
7829# qhasm: xmm10 = xmm13
7830# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
7831# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
7832movdqa %xmm15,%xmm10
7833
7834# qhasm: xmm10 ^= xmm9
7835# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
7836# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
7837pxor %xmm12,%xmm10
7838
7839# qhasm: xmm10 &= xmm3
7840# asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11
7841# asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10
7842pand %xmm3,%xmm10
7843
7844# qhasm: xmm3 ^= xmm4
7845# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
7846# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
7847pxor %xmm4,%xmm3
7848
7849# qhasm: xmm3 &= xmm9
7850# asm 1: pand <xmm9=int6464#13,<xmm3=int6464#4
7851# asm 2: pand <xmm9=%xmm12,<xmm3=%xmm3
7852pand %xmm12,%xmm3
7853
7854# qhasm: xmm4 &= xmm13
7855# asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5
7856# asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4
7857pand %xmm15,%xmm4
7858
7859# qhasm: xmm3 ^= xmm4
7860# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
7861# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
7862pxor %xmm4,%xmm3
7863
7864# qhasm: xmm4 ^= xmm10
7865# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
7866# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
7867pxor %xmm10,%xmm4
7868
7869# qhasm: xmm15 ^= xmm13
7870# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
7871# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
7872pxor %xmm15,%xmm13
7873
7874# qhasm: xmm14 ^= xmm9
7875# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
7876# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
7877pxor %xmm12,%xmm11
7878
7879# qhasm: xmm11 = xmm15
7880# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
7881# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
7882movdqa %xmm13,%xmm10
7883
7884# qhasm: xmm11 ^= xmm14
7885# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
7886# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
7887pxor %xmm11,%xmm10
7888
7889# qhasm: xmm11 &= xmm5
7890# asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11
7891# asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10
7892pand %xmm5,%xmm10
7893
7894# qhasm: xmm5 ^= xmm1
7895# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
7896# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
7897pxor %xmm1,%xmm5
7898
7899# qhasm: xmm5 &= xmm14
7900# asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6
7901# asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5
7902pand %xmm11,%xmm5
7903
7904# qhasm: xmm1 &= xmm15
7905# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
7906# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
7907pand %xmm13,%xmm1
7908
7909# qhasm: xmm5 ^= xmm1
7910# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
7911# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
7912pxor %xmm1,%xmm5
7913
7914# qhasm: xmm1 ^= xmm11
7915# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
7916# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
7917pxor %xmm10,%xmm1
7918
7919# qhasm: xmm5 ^= xmm12
7920# asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6
7921# asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5
7922pxor %xmm8,%xmm5
7923
7924# qhasm: xmm3 ^= xmm12
7925# asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4
7926# asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3
7927pxor %xmm8,%xmm3
7928
7929# qhasm: xmm1 ^= xmm8
7930# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
7931# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
7932pxor %xmm9,%xmm1
7933
7934# qhasm: xmm4 ^= xmm8
7935# asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5
7936# asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4
7937pxor %xmm9,%xmm4
7938
7939# qhasm: xmm5 ^= xmm0
7940# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
7941# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
7942pxor %xmm0,%xmm5
7943
7944# qhasm: xmm1 ^= xmm2
7945# asm 1: pxor <xmm2=int6464#3,<xmm1=int6464#2
7946# asm 2: pxor <xmm2=%xmm2,<xmm1=%xmm1
7947pxor %xmm2,%xmm1
7948
7949# qhasm: xmm3 ^= xmm5
7950# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
7951# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
7952pxor %xmm5,%xmm3
7953
7954# qhasm: xmm2 ^= xmm0
7955# asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3
7956# asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2
7957pxor %xmm0,%xmm2
7958
7959# qhasm: xmm0 ^= xmm1
7960# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
7961# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
7962pxor %xmm1,%xmm0
7963
7964# qhasm: xmm1 ^= xmm7
7965# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
7966# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
7967pxor %xmm7,%xmm1
7968
7969# qhasm: xmm7 ^= xmm4
7970# asm 1: pxor <xmm4=int6464#5,<xmm7=int6464#8
7971# asm 2: pxor <xmm4=%xmm4,<xmm7=%xmm7
7972pxor %xmm4,%xmm7
7973
7974# qhasm: xmm3 ^= xmm7
7975# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
7976# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
7977pxor %xmm7,%xmm3
7978
7979# qhasm: xmm4 ^= xmm6
7980# asm 1: pxor <xmm6=int6464#7,<xmm4=int6464#5
7981# asm 2: pxor <xmm6=%xmm6,<xmm4=%xmm4
7982pxor %xmm6,%xmm4
7983
7984# qhasm: xmm6 ^= xmm7
7985# asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7
7986# asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6
7987pxor %xmm7,%xmm6
7988
7989# qhasm: xmm2 ^= xmm6
7990# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
7991# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
7992pxor %xmm6,%xmm2
7993
7994# qhasm: xmm5 ^= RCON
7995# asm 1: pxor RCON,<xmm5=int6464#6
7996# asm 2: pxor RCON,<xmm5=%xmm5
7997pxor RCON,%xmm5
7998
7999# qhasm: shuffle bytes of xmm0 by EXPB0
8000# asm 1: pshufb EXPB0,<xmm0=int6464#1
8001# asm 2: pshufb EXPB0,<xmm0=%xmm0
8002pshufb EXPB0,%xmm0
8003
8004# qhasm: shuffle bytes of xmm1 by EXPB0
8005# asm 1: pshufb EXPB0,<xmm1=int6464#2
8006# asm 2: pshufb EXPB0,<xmm1=%xmm1
8007pshufb EXPB0,%xmm1
8008
8009# qhasm: shuffle bytes of xmm3 by EXPB0
8010# asm 1: pshufb EXPB0,<xmm3=int6464#4
8011# asm 2: pshufb EXPB0,<xmm3=%xmm3
8012pshufb EXPB0,%xmm3
8013
8014# qhasm: shuffle bytes of xmm2 by EXPB0
8015# asm 1: pshufb EXPB0,<xmm2=int6464#3
8016# asm 2: pshufb EXPB0,<xmm2=%xmm2
8017pshufb EXPB0,%xmm2
8018
8019# qhasm: shuffle bytes of xmm6 by EXPB0
8020# asm 1: pshufb EXPB0,<xmm6=int6464#7
8021# asm 2: pshufb EXPB0,<xmm6=%xmm6
8022pshufb EXPB0,%xmm6
8023
8024# qhasm: shuffle bytes of xmm5 by EXPB0
8025# asm 1: pshufb EXPB0,<xmm5=int6464#6
8026# asm 2: pshufb EXPB0,<xmm5=%xmm5
8027pshufb EXPB0,%xmm5
8028
8029# qhasm: shuffle bytes of xmm4 by EXPB0
8030# asm 1: pshufb EXPB0,<xmm4=int6464#5
8031# asm 2: pshufb EXPB0,<xmm4=%xmm4
8032pshufb EXPB0,%xmm4
8033
8034# qhasm: shuffle bytes of xmm7 by EXPB0
8035# asm 1: pshufb EXPB0,<xmm7=int6464#8
8036# asm 2: pshufb EXPB0,<xmm7=%xmm7
8037pshufb EXPB0,%xmm7
8038
8039# qhasm: xmm8 = *(int128 *)(c + 640)
8040# asm 1: movdqa 640(<c=int64#1),>xmm8=int6464#9
8041# asm 2: movdqa 640(<c=%rdi),>xmm8=%xmm8
8042movdqa 640(%rdi),%xmm8
8043
8044# qhasm: xmm9 = *(int128 *)(c + 656)
8045# asm 1: movdqa 656(<c=int64#1),>xmm9=int6464#10
8046# asm 2: movdqa 656(<c=%rdi),>xmm9=%xmm9
8047movdqa 656(%rdi),%xmm9
8048
8049# qhasm: xmm10 = *(int128 *)(c + 672)
8050# asm 1: movdqa 672(<c=int64#1),>xmm10=int6464#11
8051# asm 2: movdqa 672(<c=%rdi),>xmm10=%xmm10
8052movdqa 672(%rdi),%xmm10
8053
8054# qhasm: xmm11 = *(int128 *)(c + 688)
8055# asm 1: movdqa 688(<c=int64#1),>xmm11=int6464#12
8056# asm 2: movdqa 688(<c=%rdi),>xmm11=%xmm11
8057movdqa 688(%rdi),%xmm11
8058
8059# qhasm: xmm12 = *(int128 *)(c + 704)
8060# asm 1: movdqa 704(<c=int64#1),>xmm12=int6464#13
8061# asm 2: movdqa 704(<c=%rdi),>xmm12=%xmm12
8062movdqa 704(%rdi),%xmm12
8063
8064# qhasm: xmm13 = *(int128 *)(c + 720)
8065# asm 1: movdqa 720(<c=int64#1),>xmm13=int6464#14
8066# asm 2: movdqa 720(<c=%rdi),>xmm13=%xmm13
8067movdqa 720(%rdi),%xmm13
8068
8069# qhasm: xmm14 = *(int128 *)(c + 736)
8070# asm 1: movdqa 736(<c=int64#1),>xmm14=int6464#15
8071# asm 2: movdqa 736(<c=%rdi),>xmm14=%xmm14
8072movdqa 736(%rdi),%xmm14
8073
8074# qhasm: xmm15 = *(int128 *)(c + 752)
8075# asm 1: movdqa 752(<c=int64#1),>xmm15=int6464#16
8076# asm 2: movdqa 752(<c=%rdi),>xmm15=%xmm15
8077movdqa 752(%rdi),%xmm15
8078
8079# qhasm: xmm8 ^= ONE
8080# asm 1: pxor ONE,<xmm8=int6464#9
8081# asm 2: pxor ONE,<xmm8=%xmm8
8082pxor ONE,%xmm8
8083
8084# qhasm: xmm9 ^= ONE
8085# asm 1: pxor ONE,<xmm9=int6464#10
8086# asm 2: pxor ONE,<xmm9=%xmm9
8087pxor ONE,%xmm9
8088
8089# qhasm: xmm13 ^= ONE
8090# asm 1: pxor ONE,<xmm13=int6464#14
8091# asm 2: pxor ONE,<xmm13=%xmm13
8092pxor ONE,%xmm13
8093
8094# qhasm: xmm14 ^= ONE
8095# asm 1: pxor ONE,<xmm14=int6464#15
8096# asm 2: pxor ONE,<xmm14=%xmm14
8097pxor ONE,%xmm14
8098
8099# qhasm: xmm0 ^= xmm8
8100# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
8101# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
8102pxor %xmm8,%xmm0
8103
8104# qhasm: xmm1 ^= xmm9
8105# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
8106# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
8107pxor %xmm9,%xmm1
8108
8109# qhasm: xmm3 ^= xmm10
8110# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
8111# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
8112pxor %xmm10,%xmm3
8113
8114# qhasm: xmm2 ^= xmm11
8115# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
8116# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
8117pxor %xmm11,%xmm2
8118
8119# qhasm: xmm6 ^= xmm12
8120# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
8121# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
8122pxor %xmm12,%xmm6
8123
8124# qhasm: xmm5 ^= xmm13
8125# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
8126# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
8127pxor %xmm13,%xmm5
8128
8129# qhasm: xmm4 ^= xmm14
8130# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
8131# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
8132pxor %xmm14,%xmm4
8133
8134# qhasm: xmm7 ^= xmm15
8135# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
8136# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
8137pxor %xmm15,%xmm7
8138
8139# qhasm: uint32323232 xmm8 >>= 8
8140# asm 1: psrld $8,<xmm8=int6464#9
8141# asm 2: psrld $8,<xmm8=%xmm8
8142psrld $8,%xmm8
8143
8144# qhasm: uint32323232 xmm9 >>= 8
8145# asm 1: psrld $8,<xmm9=int6464#10
8146# asm 2: psrld $8,<xmm9=%xmm9
8147psrld $8,%xmm9
8148
8149# qhasm: uint32323232 xmm10 >>= 8
8150# asm 1: psrld $8,<xmm10=int6464#11
8151# asm 2: psrld $8,<xmm10=%xmm10
8152psrld $8,%xmm10
8153
8154# qhasm: uint32323232 xmm11 >>= 8
8155# asm 1: psrld $8,<xmm11=int6464#12
8156# asm 2: psrld $8,<xmm11=%xmm11
8157psrld $8,%xmm11
8158
8159# qhasm: uint32323232 xmm12 >>= 8
8160# asm 1: psrld $8,<xmm12=int6464#13
8161# asm 2: psrld $8,<xmm12=%xmm12
8162psrld $8,%xmm12
8163
8164# qhasm: uint32323232 xmm13 >>= 8
8165# asm 1: psrld $8,<xmm13=int6464#14
8166# asm 2: psrld $8,<xmm13=%xmm13
8167psrld $8,%xmm13
8168
8169# qhasm: uint32323232 xmm14 >>= 8
8170# asm 1: psrld $8,<xmm14=int6464#15
8171# asm 2: psrld $8,<xmm14=%xmm14
8172psrld $8,%xmm14
8173
8174# qhasm: uint32323232 xmm15 >>= 8
8175# asm 1: psrld $8,<xmm15=int6464#16
8176# asm 2: psrld $8,<xmm15=%xmm15
8177psrld $8,%xmm15
8178
8179# qhasm: xmm0 ^= xmm8
8180# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
8181# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
8182pxor %xmm8,%xmm0
8183
8184# qhasm: xmm1 ^= xmm9
8185# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
8186# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
8187pxor %xmm9,%xmm1
8188
8189# qhasm: xmm3 ^= xmm10
8190# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
8191# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
8192pxor %xmm10,%xmm3
8193
8194# qhasm: xmm2 ^= xmm11
8195# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
8196# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
8197pxor %xmm11,%xmm2
8198
8199# qhasm: xmm6 ^= xmm12
8200# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
8201# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
8202pxor %xmm12,%xmm6
8203
8204# qhasm: xmm5 ^= xmm13
8205# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
8206# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
8207pxor %xmm13,%xmm5
8208
8209# qhasm: xmm4 ^= xmm14
8210# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
8211# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
8212pxor %xmm14,%xmm4
8213
8214# qhasm: xmm7 ^= xmm15
8215# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
8216# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
8217pxor %xmm15,%xmm7
8218
8219# qhasm: uint32323232 xmm8 >>= 8
8220# asm 1: psrld $8,<xmm8=int6464#9
8221# asm 2: psrld $8,<xmm8=%xmm8
8222psrld $8,%xmm8
8223
8224# qhasm: uint32323232 xmm9 >>= 8
8225# asm 1: psrld $8,<xmm9=int6464#10
8226# asm 2: psrld $8,<xmm9=%xmm9
8227psrld $8,%xmm9
8228
8229# qhasm: uint32323232 xmm10 >>= 8
8230# asm 1: psrld $8,<xmm10=int6464#11
8231# asm 2: psrld $8,<xmm10=%xmm10
8232psrld $8,%xmm10
8233
8234# qhasm: uint32323232 xmm11 >>= 8
8235# asm 1: psrld $8,<xmm11=int6464#12
8236# asm 2: psrld $8,<xmm11=%xmm11
8237psrld $8,%xmm11
8238
8239# qhasm: uint32323232 xmm12 >>= 8
8240# asm 1: psrld $8,<xmm12=int6464#13
8241# asm 2: psrld $8,<xmm12=%xmm12
8242psrld $8,%xmm12
8243
8244# qhasm: uint32323232 xmm13 >>= 8
8245# asm 1: psrld $8,<xmm13=int6464#14
8246# asm 2: psrld $8,<xmm13=%xmm13
8247psrld $8,%xmm13
8248
8249# qhasm: uint32323232 xmm14 >>= 8
8250# asm 1: psrld $8,<xmm14=int6464#15
8251# asm 2: psrld $8,<xmm14=%xmm14
8252psrld $8,%xmm14
8253
8254# qhasm: uint32323232 xmm15 >>= 8
8255# asm 1: psrld $8,<xmm15=int6464#16
8256# asm 2: psrld $8,<xmm15=%xmm15
8257psrld $8,%xmm15
8258
8259# qhasm: xmm0 ^= xmm8
8260# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
8261# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
8262pxor %xmm8,%xmm0
8263
8264# qhasm: xmm1 ^= xmm9
8265# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
8266# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
8267pxor %xmm9,%xmm1
8268
8269# qhasm: xmm3 ^= xmm10
8270# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
8271# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
8272pxor %xmm10,%xmm3
8273
8274# qhasm: xmm2 ^= xmm11
8275# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
8276# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
8277pxor %xmm11,%xmm2
8278
8279# qhasm: xmm6 ^= xmm12
8280# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
8281# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
8282pxor %xmm12,%xmm6
8283
8284# qhasm: xmm5 ^= xmm13
8285# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
8286# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
8287pxor %xmm13,%xmm5
8288
8289# qhasm: xmm4 ^= xmm14
8290# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
8291# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
8292pxor %xmm14,%xmm4
8293
8294# qhasm: xmm7 ^= xmm15
8295# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
8296# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
8297pxor %xmm15,%xmm7
8298
8299# qhasm: uint32323232 xmm8 >>= 8
8300# asm 1: psrld $8,<xmm8=int6464#9
8301# asm 2: psrld $8,<xmm8=%xmm8
8302psrld $8,%xmm8
8303
8304# qhasm: uint32323232 xmm9 >>= 8
8305# asm 1: psrld $8,<xmm9=int6464#10
8306# asm 2: psrld $8,<xmm9=%xmm9
8307psrld $8,%xmm9
8308
8309# qhasm: uint32323232 xmm10 >>= 8
8310# asm 1: psrld $8,<xmm10=int6464#11
8311# asm 2: psrld $8,<xmm10=%xmm10
8312psrld $8,%xmm10
8313
8314# qhasm: uint32323232 xmm11 >>= 8
8315# asm 1: psrld $8,<xmm11=int6464#12
8316# asm 2: psrld $8,<xmm11=%xmm11
8317psrld $8,%xmm11
8318
8319# qhasm: uint32323232 xmm12 >>= 8
8320# asm 1: psrld $8,<xmm12=int6464#13
8321# asm 2: psrld $8,<xmm12=%xmm12
8322psrld $8,%xmm12
8323
8324# qhasm: uint32323232 xmm13 >>= 8
8325# asm 1: psrld $8,<xmm13=int6464#14
8326# asm 2: psrld $8,<xmm13=%xmm13
8327psrld $8,%xmm13
8328
8329# qhasm: uint32323232 xmm14 >>= 8
8330# asm 1: psrld $8,<xmm14=int6464#15
8331# asm 2: psrld $8,<xmm14=%xmm14
8332psrld $8,%xmm14
8333
8334# qhasm: uint32323232 xmm15 >>= 8
8335# asm 1: psrld $8,<xmm15=int6464#16
8336# asm 2: psrld $8,<xmm15=%xmm15
8337psrld $8,%xmm15
8338
8339# qhasm: xmm0 ^= xmm8
8340# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
8341# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
8342pxor %xmm8,%xmm0
8343
8344# qhasm: xmm1 ^= xmm9
8345# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
8346# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
8347pxor %xmm9,%xmm1
8348
8349# qhasm: xmm3 ^= xmm10
8350# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
8351# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
8352pxor %xmm10,%xmm3
8353
8354# qhasm: xmm2 ^= xmm11
8355# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
8356# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
8357pxor %xmm11,%xmm2
8358
8359# qhasm: xmm6 ^= xmm12
8360# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
8361# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
8362pxor %xmm12,%xmm6
8363
8364# qhasm: xmm5 ^= xmm13
8365# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
8366# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
8367pxor %xmm13,%xmm5
8368
8369# qhasm: xmm4 ^= xmm14
8370# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
8371# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
8372pxor %xmm14,%xmm4
8373
8374# qhasm: xmm7 ^= xmm15
8375# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
8376# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
8377pxor %xmm15,%xmm7
8378
8379# qhasm: *(int128 *)(c + 768) = xmm0
8380# asm 1: movdqa <xmm0=int6464#1,768(<c=int64#1)
8381# asm 2: movdqa <xmm0=%xmm0,768(<c=%rdi)
8382movdqa %xmm0,768(%rdi)
8383
8384# qhasm: *(int128 *)(c + 784) = xmm1
8385# asm 1: movdqa <xmm1=int6464#2,784(<c=int64#1)
8386# asm 2: movdqa <xmm1=%xmm1,784(<c=%rdi)
8387movdqa %xmm1,784(%rdi)
8388
8389# qhasm: *(int128 *)(c + 800) = xmm3
8390# asm 1: movdqa <xmm3=int6464#4,800(<c=int64#1)
8391# asm 2: movdqa <xmm3=%xmm3,800(<c=%rdi)
8392movdqa %xmm3,800(%rdi)
8393
8394# qhasm: *(int128 *)(c + 816) = xmm2
8395# asm 1: movdqa <xmm2=int6464#3,816(<c=int64#1)
8396# asm 2: movdqa <xmm2=%xmm2,816(<c=%rdi)
8397movdqa %xmm2,816(%rdi)
8398
8399# qhasm: *(int128 *)(c + 832) = xmm6
8400# asm 1: movdqa <xmm6=int6464#7,832(<c=int64#1)
8401# asm 2: movdqa <xmm6=%xmm6,832(<c=%rdi)
8402movdqa %xmm6,832(%rdi)
8403
8404# qhasm: *(int128 *)(c + 848) = xmm5
8405# asm 1: movdqa <xmm5=int6464#6,848(<c=int64#1)
8406# asm 2: movdqa <xmm5=%xmm5,848(<c=%rdi)
8407movdqa %xmm5,848(%rdi)
8408
8409# qhasm: *(int128 *)(c + 864) = xmm4
8410# asm 1: movdqa <xmm4=int6464#5,864(<c=int64#1)
8411# asm 2: movdqa <xmm4=%xmm4,864(<c=%rdi)
8412movdqa %xmm4,864(%rdi)
8413
8414# qhasm: *(int128 *)(c + 880) = xmm7
8415# asm 1: movdqa <xmm7=int6464#8,880(<c=int64#1)
8416# asm 2: movdqa <xmm7=%xmm7,880(<c=%rdi)
8417movdqa %xmm7,880(%rdi)
8418
8419# qhasm: xmm0 ^= ONE
8420# asm 1: pxor ONE,<xmm0=int6464#1
8421# asm 2: pxor ONE,<xmm0=%xmm0
8422pxor ONE,%xmm0
8423
8424# qhasm: xmm1 ^= ONE
8425# asm 1: pxor ONE,<xmm1=int6464#2
8426# asm 2: pxor ONE,<xmm1=%xmm1
8427pxor ONE,%xmm1
8428
8429# qhasm: xmm5 ^= ONE
8430# asm 1: pxor ONE,<xmm5=int6464#6
8431# asm 2: pxor ONE,<xmm5=%xmm5
8432pxor ONE,%xmm5
8433
8434# qhasm: xmm4 ^= ONE
8435# asm 1: pxor ONE,<xmm4=int6464#5
8436# asm 2: pxor ONE,<xmm4=%xmm4
8437pxor ONE,%xmm4
8438
8439# qhasm: shuffle bytes of xmm0 by ROTB
8440# asm 1: pshufb ROTB,<xmm0=int6464#1
8441# asm 2: pshufb ROTB,<xmm0=%xmm0
8442pshufb ROTB,%xmm0
8443
8444# qhasm: shuffle bytes of xmm1 by ROTB
8445# asm 1: pshufb ROTB,<xmm1=int6464#2
8446# asm 2: pshufb ROTB,<xmm1=%xmm1
8447pshufb ROTB,%xmm1
8448
8449# qhasm: shuffle bytes of xmm3 by ROTB
8450# asm 1: pshufb ROTB,<xmm3=int6464#4
8451# asm 2: pshufb ROTB,<xmm3=%xmm3
8452pshufb ROTB,%xmm3
8453
8454# qhasm: shuffle bytes of xmm2 by ROTB
8455# asm 1: pshufb ROTB,<xmm2=int6464#3
8456# asm 2: pshufb ROTB,<xmm2=%xmm2
8457pshufb ROTB,%xmm2
8458
8459# qhasm: shuffle bytes of xmm6 by ROTB
8460# asm 1: pshufb ROTB,<xmm6=int6464#7
8461# asm 2: pshufb ROTB,<xmm6=%xmm6
8462pshufb ROTB,%xmm6
8463
8464# qhasm: shuffle bytes of xmm5 by ROTB
8465# asm 1: pshufb ROTB,<xmm5=int6464#6
8466# asm 2: pshufb ROTB,<xmm5=%xmm5
8467pshufb ROTB,%xmm5
8468
8469# qhasm: shuffle bytes of xmm4 by ROTB
8470# asm 1: pshufb ROTB,<xmm4=int6464#5
8471# asm 2: pshufb ROTB,<xmm4=%xmm4
8472pshufb ROTB,%xmm4
8473
8474# qhasm: shuffle bytes of xmm7 by ROTB
8475# asm 1: pshufb ROTB,<xmm7=int6464#8
8476# asm 2: pshufb ROTB,<xmm7=%xmm7
8477pshufb ROTB,%xmm7
8478
8479# qhasm: xmm5 ^= xmm4
8480# asm 1: pxor <xmm4=int6464#5,<xmm5=int6464#6
8481# asm 2: pxor <xmm4=%xmm4,<xmm5=%xmm5
8482pxor %xmm4,%xmm5
8483
8484# qhasm: xmm3 ^= xmm1
8485# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
8486# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
8487pxor %xmm1,%xmm3
8488
8489# qhasm: xmm5 ^= xmm0
8490# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
8491# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
8492pxor %xmm0,%xmm5
8493
8494# qhasm: xmm4 ^= xmm3
8495# asm 1: pxor <xmm3=int6464#4,<xmm4=int6464#5
8496# asm 2: pxor <xmm3=%xmm3,<xmm4=%xmm4
8497pxor %xmm3,%xmm4
8498
8499# qhasm: xmm2 ^= xmm0
8500# asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3
8501# asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2
8502pxor %xmm0,%xmm2
8503
8504# qhasm: xmm4 ^= xmm2
8505# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
8506# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
8507pxor %xmm2,%xmm4
8508
8509# qhasm: xmm2 ^= xmm7
8510# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
8511# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
8512pxor %xmm7,%xmm2
8513
8514# qhasm: xmm2 ^= xmm6
8515# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
8516# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
8517pxor %xmm6,%xmm2
8518
8519# qhasm: xmm7 ^= xmm5
8520# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
8521# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
8522pxor %xmm5,%xmm7
8523
8524# qhasm: xmm2 ^= xmm1
8525# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
8526# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
8527pxor %xmm1,%xmm2
8528
8529# qhasm: xmm6 ^= xmm5
8530# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
8531# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
8532pxor %xmm5,%xmm6
8533
8534# qhasm: xmm3 ^= xmm7
8535# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
8536# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
8537pxor %xmm7,%xmm3
8538
8539# qhasm: xmm1 ^= xmm5
8540# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
8541# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
8542pxor %xmm5,%xmm1
8543
8544# qhasm: xmm11 = xmm7
8545# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
8546# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
8547movdqa %xmm7,%xmm8
8548
8549# qhasm: xmm10 = xmm1
8550# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
8551# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
8552movdqa %xmm1,%xmm9
8553
8554# qhasm: xmm9 = xmm5
8555# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
8556# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
8557movdqa %xmm5,%xmm10
8558
8559# qhasm: xmm13 = xmm3
8560# asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12
8561# asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11
8562movdqa %xmm3,%xmm11
8563
8564# qhasm: xmm12 = xmm4
8565# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#13
8566# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm12
8567movdqa %xmm4,%xmm12
8568
8569# qhasm: xmm11 ^= xmm6
8570# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#9
8571# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm8
8572pxor %xmm6,%xmm8
8573
8574# qhasm: xmm10 ^= xmm3
8575# asm 1: pxor <xmm3=int6464#4,<xmm10=int6464#10
8576# asm 2: pxor <xmm3=%xmm3,<xmm10=%xmm9
8577pxor %xmm3,%xmm9
8578
8579# qhasm: xmm9 ^= xmm2
8580# asm 1: pxor <xmm2=int6464#3,<xmm9=int6464#11
8581# asm 2: pxor <xmm2=%xmm2,<xmm9=%xmm10
8582pxor %xmm2,%xmm10
8583
8584# qhasm: xmm13 ^= xmm6
8585# asm 1: pxor <xmm6=int6464#7,<xmm13=int6464#12
8586# asm 2: pxor <xmm6=%xmm6,<xmm13=%xmm11
8587pxor %xmm6,%xmm11
8588
8589# qhasm: xmm12 ^= xmm0
8590# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
8591# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
8592pxor %xmm0,%xmm12
8593
8594# qhasm: xmm14 = xmm11
8595# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
8596# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
8597movdqa %xmm8,%xmm13
8598
8599# qhasm: xmm8 = xmm10
8600# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
8601# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
8602movdqa %xmm9,%xmm14
8603
8604# qhasm: xmm15 = xmm11
8605# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
8606# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
8607movdqa %xmm8,%xmm15
8608
8609# qhasm: xmm10 |= xmm9
8610# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
8611# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
8612por %xmm10,%xmm9
8613
8614# qhasm: xmm11 |= xmm12
8615# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
8616# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
8617por %xmm12,%xmm8
8618
8619# qhasm: xmm15 ^= xmm8
8620# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
8621# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
8622pxor %xmm14,%xmm15
8623
8624# qhasm: xmm14 &= xmm12
8625# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
8626# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
8627pand %xmm12,%xmm13
8628
8629# qhasm: xmm8 &= xmm9
8630# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
8631# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
8632pand %xmm10,%xmm14
8633
8634# qhasm: xmm12 ^= xmm9
8635# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
8636# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
8637pxor %xmm10,%xmm12
8638
8639# qhasm: xmm15 &= xmm12
8640# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
8641# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
8642pand %xmm12,%xmm15
8643
8644# qhasm: xmm12 = xmm2
8645# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
8646# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
8647movdqa %xmm2,%xmm10
8648
8649# qhasm: xmm12 ^= xmm0
8650# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
8651# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
8652pxor %xmm0,%xmm10
8653
8654# qhasm: xmm13 &= xmm12
8655# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
8656# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
8657pand %xmm10,%xmm11
8658
8659# qhasm: xmm11 ^= xmm13
8660# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
8661# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
8662pxor %xmm11,%xmm8
8663
8664# qhasm: xmm10 ^= xmm13
8665# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
8666# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
8667pxor %xmm11,%xmm9
8668
8669# qhasm: xmm13 = xmm7
8670# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
8671# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
8672movdqa %xmm7,%xmm10
8673
8674# qhasm: xmm13 ^= xmm1
8675# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
8676# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
8677pxor %xmm1,%xmm10
8678
8679# qhasm: xmm12 = xmm5
8680# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
8681# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
8682movdqa %xmm5,%xmm11
8683
8684# qhasm: xmm9 = xmm13
8685# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
8686# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
8687movdqa %xmm10,%xmm12
8688
8689# qhasm: xmm12 ^= xmm4
8690# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#12
8691# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm11
8692pxor %xmm4,%xmm11
8693
8694# qhasm: xmm9 |= xmm12
8695# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
8696# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
8697por %xmm11,%xmm12
8698
8699# qhasm: xmm13 &= xmm12
8700# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
8701# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
8702pand %xmm11,%xmm10
8703
8704# qhasm: xmm8 ^= xmm13
8705# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
8706# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
8707pxor %xmm10,%xmm14
8708
8709# qhasm: xmm11 ^= xmm15
8710# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
8711# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
8712pxor %xmm15,%xmm8
8713
8714# qhasm: xmm10 ^= xmm14
8715# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
8716# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
8717pxor %xmm13,%xmm9
8718
8719# qhasm: xmm9 ^= xmm15
8720# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
8721# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
8722pxor %xmm15,%xmm12
8723
8724# qhasm: xmm8 ^= xmm14
8725# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
8726# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
8727pxor %xmm13,%xmm14
8728
8729# qhasm: xmm9 ^= xmm14
8730# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
8731# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
8732pxor %xmm13,%xmm12
8733
8734# qhasm: xmm12 = xmm3
8735# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
8736# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
8737movdqa %xmm3,%xmm10
8738
8739# qhasm: xmm13 = xmm6
8740# asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12
8741# asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11
8742movdqa %xmm6,%xmm11
8743
8744# qhasm: xmm14 = xmm1
8745# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
8746# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
8747movdqa %xmm1,%xmm13
8748
8749# qhasm: xmm15 = xmm7
8750# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
8751# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
8752movdqa %xmm7,%xmm15
8753
8754# qhasm: xmm12 &= xmm2
8755# asm 1: pand <xmm2=int6464#3,<xmm12=int6464#11
8756# asm 2: pand <xmm2=%xmm2,<xmm12=%xmm10
8757pand %xmm2,%xmm10
8758
8759# qhasm: xmm13 &= xmm0
8760# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
8761# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
8762pand %xmm0,%xmm11
8763
8764# qhasm: xmm14 &= xmm5
8765# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
8766# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
8767pand %xmm5,%xmm13
8768
8769# qhasm: xmm15 |= xmm4
8770# asm 1: por <xmm4=int6464#5,<xmm15=int6464#16
8771# asm 2: por <xmm4=%xmm4,<xmm15=%xmm15
8772por %xmm4,%xmm15
8773
8774# qhasm: xmm11 ^= xmm12
8775# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
8776# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
8777pxor %xmm10,%xmm8
8778
8779# qhasm: xmm10 ^= xmm13
8780# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
8781# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
8782pxor %xmm11,%xmm9
8783
8784# qhasm: xmm9 ^= xmm14
8785# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
8786# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
8787pxor %xmm13,%xmm12
8788
8789# qhasm: xmm8 ^= xmm15
8790# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
8791# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
8792pxor %xmm15,%xmm14
8793
8794# qhasm: xmm12 = xmm11
8795# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
8796# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
8797movdqa %xmm8,%xmm10
8798
8799# qhasm: xmm12 ^= xmm10
8800# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
8801# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
8802pxor %xmm9,%xmm10
8803
8804# qhasm: xmm11 &= xmm9
8805# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
8806# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
8807pand %xmm12,%xmm8
8808
8809# qhasm: xmm14 = xmm8
8810# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
8811# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
8812movdqa %xmm14,%xmm11
8813
8814# qhasm: xmm14 ^= xmm11
8815# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
8816# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
8817pxor %xmm8,%xmm11
8818
8819# qhasm: xmm15 = xmm12
8820# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
8821# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
8822movdqa %xmm10,%xmm13
8823
8824# qhasm: xmm15 &= xmm14
8825# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
8826# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
8827pand %xmm11,%xmm13
8828
8829# qhasm: xmm15 ^= xmm10
8830# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
8831# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
8832pxor %xmm9,%xmm13
8833
8834# qhasm: xmm13 = xmm9
8835# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
8836# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
8837movdqa %xmm12,%xmm15
8838
8839# qhasm: xmm13 ^= xmm8
8840# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
8841# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
8842pxor %xmm14,%xmm15
8843
8844# qhasm: xmm11 ^= xmm10
8845# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
8846# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
8847pxor %xmm9,%xmm8
8848
8849# qhasm: xmm13 &= xmm11
8850# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
8851# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
8852pand %xmm8,%xmm15
8853
8854# qhasm: xmm13 ^= xmm8
8855# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
8856# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
8857pxor %xmm14,%xmm15
8858
8859# qhasm: xmm9 ^= xmm13
8860# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
8861# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
8862pxor %xmm15,%xmm12
8863
8864# qhasm: xmm10 = xmm14
8865# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
8866# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
8867movdqa %xmm11,%xmm8
8868
8869# qhasm: xmm10 ^= xmm13
8870# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
8871# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
8872pxor %xmm15,%xmm8
8873
8874# qhasm: xmm10 &= xmm8
8875# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
8876# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
8877pand %xmm14,%xmm8
8878
8879# qhasm: xmm9 ^= xmm10
8880# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
8881# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
8882pxor %xmm8,%xmm12
8883
8884# qhasm: xmm14 ^= xmm10
8885# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
8886# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
8887pxor %xmm8,%xmm11
8888
8889# qhasm: xmm14 &= xmm15
8890# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
8891# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
8892pand %xmm13,%xmm11
8893
8894# qhasm: xmm14 ^= xmm12
8895# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
8896# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
8897pxor %xmm10,%xmm11
8898
8899# qhasm: xmm12 = xmm4
8900# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#9
8901# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm8
8902movdqa %xmm4,%xmm8
8903
8904# qhasm: xmm8 = xmm5
8905# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
8906# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
8907movdqa %xmm5,%xmm9
8908
8909# qhasm: xmm10 = xmm15
8910# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
8911# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
8912movdqa %xmm13,%xmm10
8913
8914# qhasm: xmm10 ^= xmm14
8915# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
8916# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
8917pxor %xmm11,%xmm10
8918
8919# qhasm: xmm10 &= xmm4
8920# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
8921# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
8922pand %xmm4,%xmm10
8923
8924# qhasm: xmm4 ^= xmm5
8925# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
8926# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
8927pxor %xmm5,%xmm4
8928
8929# qhasm: xmm4 &= xmm14
8930# asm 1: pand <xmm14=int6464#12,<xmm4=int6464#5
8931# asm 2: pand <xmm14=%xmm11,<xmm4=%xmm4
8932pand %xmm11,%xmm4
8933
8934# qhasm: xmm5 &= xmm15
8935# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
8936# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
8937pand %xmm13,%xmm5
8938
8939# qhasm: xmm4 ^= xmm5
8940# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
8941# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
8942pxor %xmm5,%xmm4
8943
8944# qhasm: xmm5 ^= xmm10
8945# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
8946# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
8947pxor %xmm10,%xmm5
8948
8949# qhasm: xmm12 ^= xmm0
8950# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
8951# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
8952pxor %xmm0,%xmm8
8953
8954# qhasm: xmm8 ^= xmm2
8955# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
8956# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
8957pxor %xmm2,%xmm9
8958
8959# qhasm: xmm15 ^= xmm13
8960# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
8961# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
8962pxor %xmm15,%xmm13
8963
8964# qhasm: xmm14 ^= xmm9
8965# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
8966# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
8967pxor %xmm12,%xmm11
8968
8969# qhasm: xmm11 = xmm15
8970# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
8971# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
8972movdqa %xmm13,%xmm10
8973
8974# qhasm: xmm11 ^= xmm14
8975# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
8976# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
8977pxor %xmm11,%xmm10
8978
8979# qhasm: xmm11 &= xmm12
8980# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
8981# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
8982pand %xmm8,%xmm10
8983
8984# qhasm: xmm12 ^= xmm8
8985# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
8986# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
8987pxor %xmm9,%xmm8
8988
8989# qhasm: xmm12 &= xmm14
8990# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
8991# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
8992pand %xmm11,%xmm8
8993
8994# qhasm: xmm8 &= xmm15
8995# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
8996# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
8997pand %xmm13,%xmm9
8998
8999# qhasm: xmm8 ^= xmm12
9000# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
9001# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
9002pxor %xmm8,%xmm9
9003
9004# qhasm: xmm12 ^= xmm11
9005# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
9006# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
9007pxor %xmm10,%xmm8
9008
9009# qhasm: xmm10 = xmm13
9010# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
9011# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
9012movdqa %xmm15,%xmm10
9013
9014# qhasm: xmm10 ^= xmm9
9015# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
9016# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
9017pxor %xmm12,%xmm10
9018
9019# qhasm: xmm10 &= xmm0
9020# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
9021# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
9022pand %xmm0,%xmm10
9023
9024# qhasm: xmm0 ^= xmm2
9025# asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1
9026# asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0
9027pxor %xmm2,%xmm0
9028
9029# qhasm: xmm0 &= xmm9
9030# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
9031# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
9032pand %xmm12,%xmm0
9033
9034# qhasm: xmm2 &= xmm13
9035# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
9036# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
9037pand %xmm15,%xmm2
9038
9039# qhasm: xmm0 ^= xmm2
9040# asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1
9041# asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0
9042pxor %xmm2,%xmm0
9043
9044# qhasm: xmm2 ^= xmm10
9045# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
9046# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
9047pxor %xmm10,%xmm2
9048
9049# qhasm: xmm4 ^= xmm12
9050# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
9051# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
9052pxor %xmm8,%xmm4
9053
9054# qhasm: xmm0 ^= xmm12
9055# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
9056# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
9057pxor %xmm8,%xmm0
9058
9059# qhasm: xmm5 ^= xmm8
9060# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
9061# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
9062pxor %xmm9,%xmm5
9063
9064# qhasm: xmm2 ^= xmm8
9065# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
9066# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
9067pxor %xmm9,%xmm2
9068
9069# qhasm: xmm12 = xmm7
9070# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
9071# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
9072movdqa %xmm7,%xmm8
9073
9074# qhasm: xmm8 = xmm1
9075# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
9076# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
9077movdqa %xmm1,%xmm9
9078
9079# qhasm: xmm12 ^= xmm6
9080# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#9
9081# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm8
9082pxor %xmm6,%xmm8
9083
9084# qhasm: xmm8 ^= xmm3
9085# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
9086# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
9087pxor %xmm3,%xmm9
9088
9089# qhasm: xmm11 = xmm15
9090# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
9091# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
9092movdqa %xmm13,%xmm10
9093
9094# qhasm: xmm11 ^= xmm14
9095# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
9096# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
9097pxor %xmm11,%xmm10
9098
9099# qhasm: xmm11 &= xmm12
9100# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
9101# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
9102pand %xmm8,%xmm10
9103
9104# qhasm: xmm12 ^= xmm8
9105# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
9106# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
9107pxor %xmm9,%xmm8
9108
9109# qhasm: xmm12 &= xmm14
9110# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
9111# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
9112pand %xmm11,%xmm8
9113
9114# qhasm: xmm8 &= xmm15
9115# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
9116# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
9117pand %xmm13,%xmm9
9118
9119# qhasm: xmm8 ^= xmm12
9120# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
9121# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
9122pxor %xmm8,%xmm9
9123
9124# qhasm: xmm12 ^= xmm11
9125# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
9126# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
9127pxor %xmm10,%xmm8
9128
9129# qhasm: xmm10 = xmm13
9130# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
9131# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
9132movdqa %xmm15,%xmm10
9133
9134# qhasm: xmm10 ^= xmm9
9135# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
9136# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
9137pxor %xmm12,%xmm10
9138
9139# qhasm: xmm10 &= xmm6
9140# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
9141# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
9142pand %xmm6,%xmm10
9143
9144# qhasm: xmm6 ^= xmm3
9145# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
9146# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
9147pxor %xmm3,%xmm6
9148
9149# qhasm: xmm6 &= xmm9
9150# asm 1: pand <xmm9=int6464#13,<xmm6=int6464#7
9151# asm 2: pand <xmm9=%xmm12,<xmm6=%xmm6
9152pand %xmm12,%xmm6
9153
9154# qhasm: xmm3 &= xmm13
9155# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
9156# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
9157pand %xmm15,%xmm3
9158
9159# qhasm: xmm6 ^= xmm3
9160# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
9161# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
9162pxor %xmm3,%xmm6
9163
9164# qhasm: xmm3 ^= xmm10
9165# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
9166# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
9167pxor %xmm10,%xmm3
9168
9169# qhasm: xmm15 ^= xmm13
9170# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
9171# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
9172pxor %xmm15,%xmm13
9173
9174# qhasm: xmm14 ^= xmm9
9175# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
9176# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
9177pxor %xmm12,%xmm11
9178
9179# qhasm: xmm11 = xmm15
9180# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
9181# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
9182movdqa %xmm13,%xmm10
9183
9184# qhasm: xmm11 ^= xmm14
9185# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
9186# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
9187pxor %xmm11,%xmm10
9188
9189# qhasm: xmm11 &= xmm7
9190# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
9191# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
9192pand %xmm7,%xmm10
9193
9194# qhasm: xmm7 ^= xmm1
9195# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
9196# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
9197pxor %xmm1,%xmm7
9198
9199# qhasm: xmm7 &= xmm14
9200# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
9201# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
9202pand %xmm11,%xmm7
9203
9204# qhasm: xmm1 &= xmm15
9205# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
9206# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
9207pand %xmm13,%xmm1
9208
9209# qhasm: xmm7 ^= xmm1
9210# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
9211# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
9212pxor %xmm1,%xmm7
9213
9214# qhasm: xmm1 ^= xmm11
9215# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
9216# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
9217pxor %xmm10,%xmm1
9218
9219# qhasm: xmm7 ^= xmm12
9220# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
9221# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
9222pxor %xmm8,%xmm7
9223
9224# qhasm: xmm6 ^= xmm12
9225# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
9226# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
9227pxor %xmm8,%xmm6
9228
9229# qhasm: xmm1 ^= xmm8
9230# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
9231# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
9232pxor %xmm9,%xmm1
9233
9234# qhasm: xmm3 ^= xmm8
9235# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
9236# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
9237pxor %xmm9,%xmm3
9238
9239# qhasm: xmm7 ^= xmm0
9240# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
9241# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
9242pxor %xmm0,%xmm7
9243
9244# qhasm: xmm1 ^= xmm4
9245# asm 1: pxor <xmm4=int6464#5,<xmm1=int6464#2
9246# asm 2: pxor <xmm4=%xmm4,<xmm1=%xmm1
9247pxor %xmm4,%xmm1
9248
9249# qhasm: xmm6 ^= xmm7
9250# asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7
9251# asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6
9252pxor %xmm7,%xmm6
9253
9254# qhasm: xmm4 ^= xmm0
9255# asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5
9256# asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4
9257pxor %xmm0,%xmm4
9258
9259# qhasm: xmm0 ^= xmm1
9260# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
9261# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
9262pxor %xmm1,%xmm0
9263
9264# qhasm: xmm1 ^= xmm5
9265# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
9266# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
9267pxor %xmm5,%xmm1
9268
9269# qhasm: xmm5 ^= xmm3
9270# asm 1: pxor <xmm3=int6464#4,<xmm5=int6464#6
9271# asm 2: pxor <xmm3=%xmm3,<xmm5=%xmm5
9272pxor %xmm3,%xmm5
9273
9274# qhasm: xmm6 ^= xmm5
9275# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
9276# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
9277pxor %xmm5,%xmm6
9278
9279# qhasm: xmm3 ^= xmm2
9280# asm 1: pxor <xmm2=int6464#3,<xmm3=int6464#4
9281# asm 2: pxor <xmm2=%xmm2,<xmm3=%xmm3
9282pxor %xmm2,%xmm3
9283
9284# qhasm: xmm2 ^= xmm5
9285# asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3
9286# asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2
9287pxor %xmm5,%xmm2
9288
9289# qhasm: xmm4 ^= xmm2
9290# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
9291# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
9292pxor %xmm2,%xmm4
9293
9294# qhasm: xmm3 ^= RCON
9295# asm 1: pxor RCON,<xmm3=int6464#4
9296# asm 2: pxor RCON,<xmm3=%xmm3
9297pxor RCON,%xmm3
9298
9299# qhasm: shuffle bytes of xmm0 by EXPB0
9300# asm 1: pshufb EXPB0,<xmm0=int6464#1
9301# asm 2: pshufb EXPB0,<xmm0=%xmm0
9302pshufb EXPB0,%xmm0
9303
9304# qhasm: shuffle bytes of xmm1 by EXPB0
9305# asm 1: pshufb EXPB0,<xmm1=int6464#2
9306# asm 2: pshufb EXPB0,<xmm1=%xmm1
9307pshufb EXPB0,%xmm1
9308
9309# qhasm: shuffle bytes of xmm6 by EXPB0
9310# asm 1: pshufb EXPB0,<xmm6=int6464#7
9311# asm 2: pshufb EXPB0,<xmm6=%xmm6
9312pshufb EXPB0,%xmm6
9313
9314# qhasm: shuffle bytes of xmm4 by EXPB0
9315# asm 1: pshufb EXPB0,<xmm4=int6464#5
9316# asm 2: pshufb EXPB0,<xmm4=%xmm4
9317pshufb EXPB0,%xmm4
9318
9319# qhasm: shuffle bytes of xmm2 by EXPB0
9320# asm 1: pshufb EXPB0,<xmm2=int6464#3
9321# asm 2: pshufb EXPB0,<xmm2=%xmm2
9322pshufb EXPB0,%xmm2
9323
9324# qhasm: shuffle bytes of xmm7 by EXPB0
9325# asm 1: pshufb EXPB0,<xmm7=int6464#8
9326# asm 2: pshufb EXPB0,<xmm7=%xmm7
9327pshufb EXPB0,%xmm7
9328
9329# qhasm: shuffle bytes of xmm3 by EXPB0
9330# asm 1: pshufb EXPB0,<xmm3=int6464#4
9331# asm 2: pshufb EXPB0,<xmm3=%xmm3
9332pshufb EXPB0,%xmm3
9333
9334# qhasm: shuffle bytes of xmm5 by EXPB0
9335# asm 1: pshufb EXPB0,<xmm5=int6464#6
9336# asm 2: pshufb EXPB0,<xmm5=%xmm5
9337pshufb EXPB0,%xmm5
9338
9339# qhasm: xmm8 = *(int128 *)(c + 768)
9340# asm 1: movdqa 768(<c=int64#1),>xmm8=int6464#9
9341# asm 2: movdqa 768(<c=%rdi),>xmm8=%xmm8
9342movdqa 768(%rdi),%xmm8
9343
9344# qhasm: xmm9 = *(int128 *)(c + 784)
9345# asm 1: movdqa 784(<c=int64#1),>xmm9=int6464#10
9346# asm 2: movdqa 784(<c=%rdi),>xmm9=%xmm9
9347movdqa 784(%rdi),%xmm9
9348
9349# qhasm: xmm10 = *(int128 *)(c + 800)
9350# asm 1: movdqa 800(<c=int64#1),>xmm10=int6464#11
9351# asm 2: movdqa 800(<c=%rdi),>xmm10=%xmm10
9352movdqa 800(%rdi),%xmm10
9353
9354# qhasm: xmm11 = *(int128 *)(c + 816)
9355# asm 1: movdqa 816(<c=int64#1),>xmm11=int6464#12
9356# asm 2: movdqa 816(<c=%rdi),>xmm11=%xmm11
9357movdqa 816(%rdi),%xmm11
9358
9359# qhasm: xmm12 = *(int128 *)(c + 832)
9360# asm 1: movdqa 832(<c=int64#1),>xmm12=int6464#13
9361# asm 2: movdqa 832(<c=%rdi),>xmm12=%xmm12
9362movdqa 832(%rdi),%xmm12
9363
9364# qhasm: xmm13 = *(int128 *)(c + 848)
9365# asm 1: movdqa 848(<c=int64#1),>xmm13=int6464#14
9366# asm 2: movdqa 848(<c=%rdi),>xmm13=%xmm13
9367movdqa 848(%rdi),%xmm13
9368
9369# qhasm: xmm14 = *(int128 *)(c + 864)
9370# asm 1: movdqa 864(<c=int64#1),>xmm14=int6464#15
9371# asm 2: movdqa 864(<c=%rdi),>xmm14=%xmm14
9372movdqa 864(%rdi),%xmm14
9373
9374# qhasm: xmm15 = *(int128 *)(c + 880)
9375# asm 1: movdqa 880(<c=int64#1),>xmm15=int6464#16
9376# asm 2: movdqa 880(<c=%rdi),>xmm15=%xmm15
9377movdqa 880(%rdi),%xmm15
9378
9379# qhasm: xmm8 ^= ONE
9380# asm 1: pxor ONE,<xmm8=int6464#9
9381# asm 2: pxor ONE,<xmm8=%xmm8
9382pxor ONE,%xmm8
9383
9384# qhasm: xmm9 ^= ONE
9385# asm 1: pxor ONE,<xmm9=int6464#10
9386# asm 2: pxor ONE,<xmm9=%xmm9
9387pxor ONE,%xmm9
9388
9389# qhasm: xmm13 ^= ONE
9390# asm 1: pxor ONE,<xmm13=int6464#14
9391# asm 2: pxor ONE,<xmm13=%xmm13
9392pxor ONE,%xmm13
9393
9394# qhasm: xmm14 ^= ONE
9395# asm 1: pxor ONE,<xmm14=int6464#15
9396# asm 2: pxor ONE,<xmm14=%xmm14
9397pxor ONE,%xmm14
9398
9399# qhasm: xmm0 ^= xmm8
9400# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
9401# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
9402pxor %xmm8,%xmm0
9403
9404# qhasm: xmm1 ^= xmm9
9405# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
9406# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
9407pxor %xmm9,%xmm1
9408
9409# qhasm: xmm6 ^= xmm10
9410# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
9411# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
9412pxor %xmm10,%xmm6
9413
9414# qhasm: xmm4 ^= xmm11
9415# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
9416# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
9417pxor %xmm11,%xmm4
9418
9419# qhasm: xmm2 ^= xmm12
9420# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
9421# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
9422pxor %xmm12,%xmm2
9423
9424# qhasm: xmm7 ^= xmm13
9425# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
9426# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
9427pxor %xmm13,%xmm7
9428
9429# qhasm: xmm3 ^= xmm14
9430# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
9431# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
9432pxor %xmm14,%xmm3
9433
9434# qhasm: xmm5 ^= xmm15
9435# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
9436# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
9437pxor %xmm15,%xmm5
9438
9439# qhasm: uint32323232 xmm8 >>= 8
9440# asm 1: psrld $8,<xmm8=int6464#9
9441# asm 2: psrld $8,<xmm8=%xmm8
9442psrld $8,%xmm8
9443
9444# qhasm: uint32323232 xmm9 >>= 8
9445# asm 1: psrld $8,<xmm9=int6464#10
9446# asm 2: psrld $8,<xmm9=%xmm9
9447psrld $8,%xmm9
9448
9449# qhasm: uint32323232 xmm10 >>= 8
9450# asm 1: psrld $8,<xmm10=int6464#11
9451# asm 2: psrld $8,<xmm10=%xmm10
9452psrld $8,%xmm10
9453
9454# qhasm: uint32323232 xmm11 >>= 8
9455# asm 1: psrld $8,<xmm11=int6464#12
9456# asm 2: psrld $8,<xmm11=%xmm11
9457psrld $8,%xmm11
9458
9459# qhasm: uint32323232 xmm12 >>= 8
9460# asm 1: psrld $8,<xmm12=int6464#13
9461# asm 2: psrld $8,<xmm12=%xmm12
9462psrld $8,%xmm12
9463
9464# qhasm: uint32323232 xmm13 >>= 8
9465# asm 1: psrld $8,<xmm13=int6464#14
9466# asm 2: psrld $8,<xmm13=%xmm13
9467psrld $8,%xmm13
9468
9469# qhasm: uint32323232 xmm14 >>= 8
9470# asm 1: psrld $8,<xmm14=int6464#15
9471# asm 2: psrld $8,<xmm14=%xmm14
9472psrld $8,%xmm14
9473
9474# qhasm: uint32323232 xmm15 >>= 8
9475# asm 1: psrld $8,<xmm15=int6464#16
9476# asm 2: psrld $8,<xmm15=%xmm15
9477psrld $8,%xmm15
9478
9479# qhasm: xmm0 ^= xmm8
9480# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
9481# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
9482pxor %xmm8,%xmm0
9483
9484# qhasm: xmm1 ^= xmm9
9485# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
9486# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
9487pxor %xmm9,%xmm1
9488
9489# qhasm: xmm6 ^= xmm10
9490# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
9491# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
9492pxor %xmm10,%xmm6
9493
9494# qhasm: xmm4 ^= xmm11
9495# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
9496# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
9497pxor %xmm11,%xmm4
9498
9499# qhasm: xmm2 ^= xmm12
9500# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
9501# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
9502pxor %xmm12,%xmm2
9503
9504# qhasm: xmm7 ^= xmm13
9505# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
9506# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
9507pxor %xmm13,%xmm7
9508
9509# qhasm: xmm3 ^= xmm14
9510# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
9511# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
9512pxor %xmm14,%xmm3
9513
9514# qhasm: xmm5 ^= xmm15
9515# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
9516# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
9517pxor %xmm15,%xmm5
9518
9519# qhasm: uint32323232 xmm8 >>= 8
9520# asm 1: psrld $8,<xmm8=int6464#9
9521# asm 2: psrld $8,<xmm8=%xmm8
9522psrld $8,%xmm8
9523
9524# qhasm: uint32323232 xmm9 >>= 8
9525# asm 1: psrld $8,<xmm9=int6464#10
9526# asm 2: psrld $8,<xmm9=%xmm9
9527psrld $8,%xmm9
9528
9529# qhasm: uint32323232 xmm10 >>= 8
9530# asm 1: psrld $8,<xmm10=int6464#11
9531# asm 2: psrld $8,<xmm10=%xmm10
9532psrld $8,%xmm10
9533
9534# qhasm: uint32323232 xmm11 >>= 8
9535# asm 1: psrld $8,<xmm11=int6464#12
9536# asm 2: psrld $8,<xmm11=%xmm11
9537psrld $8,%xmm11
9538
9539# qhasm: uint32323232 xmm12 >>= 8
9540# asm 1: psrld $8,<xmm12=int6464#13
9541# asm 2: psrld $8,<xmm12=%xmm12
9542psrld $8,%xmm12
9543
9544# qhasm: uint32323232 xmm13 >>= 8
9545# asm 1: psrld $8,<xmm13=int6464#14
9546# asm 2: psrld $8,<xmm13=%xmm13
9547psrld $8,%xmm13
9548
9549# qhasm: uint32323232 xmm14 >>= 8
9550# asm 1: psrld $8,<xmm14=int6464#15
9551# asm 2: psrld $8,<xmm14=%xmm14
9552psrld $8,%xmm14
9553
9554# qhasm: uint32323232 xmm15 >>= 8
9555# asm 1: psrld $8,<xmm15=int6464#16
9556# asm 2: psrld $8,<xmm15=%xmm15
9557psrld $8,%xmm15
9558
9559# qhasm: xmm0 ^= xmm8
9560# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
9561# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
9562pxor %xmm8,%xmm0
9563
9564# qhasm: xmm1 ^= xmm9
9565# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
9566# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
9567pxor %xmm9,%xmm1
9568
9569# qhasm: xmm6 ^= xmm10
9570# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
9571# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
9572pxor %xmm10,%xmm6
9573
9574# qhasm: xmm4 ^= xmm11
9575# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
9576# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
9577pxor %xmm11,%xmm4
9578
9579# qhasm: xmm2 ^= xmm12
9580# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
9581# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
9582pxor %xmm12,%xmm2
9583
9584# qhasm: xmm7 ^= xmm13
9585# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
9586# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
9587pxor %xmm13,%xmm7
9588
9589# qhasm: xmm3 ^= xmm14
9590# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
9591# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
9592pxor %xmm14,%xmm3
9593
9594# qhasm: xmm5 ^= xmm15
9595# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
9596# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
9597pxor %xmm15,%xmm5
9598
9599# qhasm: uint32323232 xmm8 >>= 8
9600# asm 1: psrld $8,<xmm8=int6464#9
9601# asm 2: psrld $8,<xmm8=%xmm8
9602psrld $8,%xmm8
9603
9604# qhasm: uint32323232 xmm9 >>= 8
9605# asm 1: psrld $8,<xmm9=int6464#10
9606# asm 2: psrld $8,<xmm9=%xmm9
9607psrld $8,%xmm9
9608
9609# qhasm: uint32323232 xmm10 >>= 8
9610# asm 1: psrld $8,<xmm10=int6464#11
9611# asm 2: psrld $8,<xmm10=%xmm10
9612psrld $8,%xmm10
9613
9614# qhasm: uint32323232 xmm11 >>= 8
9615# asm 1: psrld $8,<xmm11=int6464#12
9616# asm 2: psrld $8,<xmm11=%xmm11
9617psrld $8,%xmm11
9618
9619# qhasm: uint32323232 xmm12 >>= 8
9620# asm 1: psrld $8,<xmm12=int6464#13
9621# asm 2: psrld $8,<xmm12=%xmm12
9622psrld $8,%xmm12
9623
9624# qhasm: uint32323232 xmm13 >>= 8
9625# asm 1: psrld $8,<xmm13=int6464#14
9626# asm 2: psrld $8,<xmm13=%xmm13
9627psrld $8,%xmm13
9628
9629# qhasm: uint32323232 xmm14 >>= 8
9630# asm 1: psrld $8,<xmm14=int6464#15
9631# asm 2: psrld $8,<xmm14=%xmm14
9632psrld $8,%xmm14
9633
9634# qhasm: uint32323232 xmm15 >>= 8
9635# asm 1: psrld $8,<xmm15=int6464#16
9636# asm 2: psrld $8,<xmm15=%xmm15
9637psrld $8,%xmm15
9638
9639# qhasm: xmm0 ^= xmm8
9640# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
9641# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
9642pxor %xmm8,%xmm0
9643
9644# qhasm: xmm1 ^= xmm9
9645# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
9646# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
9647pxor %xmm9,%xmm1
9648
9649# qhasm: xmm6 ^= xmm10
9650# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
9651# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
9652pxor %xmm10,%xmm6
9653
9654# qhasm: xmm4 ^= xmm11
9655# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
9656# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
9657pxor %xmm11,%xmm4
9658
9659# qhasm: xmm2 ^= xmm12
9660# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
9661# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
9662pxor %xmm12,%xmm2
9663
9664# qhasm: xmm7 ^= xmm13
9665# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
9666# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
9667pxor %xmm13,%xmm7
9668
9669# qhasm: xmm3 ^= xmm14
9670# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
9671# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
9672pxor %xmm14,%xmm3
9673
9674# qhasm: xmm5 ^= xmm15
9675# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
9676# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
9677pxor %xmm15,%xmm5
9678
9679# qhasm: *(int128 *)(c + 896) = xmm0
9680# asm 1: movdqa <xmm0=int6464#1,896(<c=int64#1)
9681# asm 2: movdqa <xmm0=%xmm0,896(<c=%rdi)
9682movdqa %xmm0,896(%rdi)
9683
9684# qhasm: *(int128 *)(c + 912) = xmm1
9685# asm 1: movdqa <xmm1=int6464#2,912(<c=int64#1)
9686# asm 2: movdqa <xmm1=%xmm1,912(<c=%rdi)
9687movdqa %xmm1,912(%rdi)
9688
9689# qhasm: *(int128 *)(c + 928) = xmm6
9690# asm 1: movdqa <xmm6=int6464#7,928(<c=int64#1)
9691# asm 2: movdqa <xmm6=%xmm6,928(<c=%rdi)
9692movdqa %xmm6,928(%rdi)
9693
9694# qhasm: *(int128 *)(c + 944) = xmm4
9695# asm 1: movdqa <xmm4=int6464#5,944(<c=int64#1)
9696# asm 2: movdqa <xmm4=%xmm4,944(<c=%rdi)
9697movdqa %xmm4,944(%rdi)
9698
9699# qhasm: *(int128 *)(c + 960) = xmm2
9700# asm 1: movdqa <xmm2=int6464#3,960(<c=int64#1)
9701# asm 2: movdqa <xmm2=%xmm2,960(<c=%rdi)
9702movdqa %xmm2,960(%rdi)
9703
9704# qhasm: *(int128 *)(c + 976) = xmm7
9705# asm 1: movdqa <xmm7=int6464#8,976(<c=int64#1)
9706# asm 2: movdqa <xmm7=%xmm7,976(<c=%rdi)
9707movdqa %xmm7,976(%rdi)
9708
9709# qhasm: *(int128 *)(c + 992) = xmm3
9710# asm 1: movdqa <xmm3=int6464#4,992(<c=int64#1)
9711# asm 2: movdqa <xmm3=%xmm3,992(<c=%rdi)
9712movdqa %xmm3,992(%rdi)
9713
9714# qhasm: *(int128 *)(c + 1008) = xmm5
9715# asm 1: movdqa <xmm5=int6464#6,1008(<c=int64#1)
9716# asm 2: movdqa <xmm5=%xmm5,1008(<c=%rdi)
9717movdqa %xmm5,1008(%rdi)
9718
9719# qhasm: xmm0 ^= ONE
9720# asm 1: pxor ONE,<xmm0=int6464#1
9721# asm 2: pxor ONE,<xmm0=%xmm0
9722pxor ONE,%xmm0
9723
9724# qhasm: xmm1 ^= ONE
9725# asm 1: pxor ONE,<xmm1=int6464#2
9726# asm 2: pxor ONE,<xmm1=%xmm1
9727pxor ONE,%xmm1
9728
9729# qhasm: xmm7 ^= ONE
9730# asm 1: pxor ONE,<xmm7=int6464#8
9731# asm 2: pxor ONE,<xmm7=%xmm7
9732pxor ONE,%xmm7
9733
9734# qhasm: xmm3 ^= ONE
9735# asm 1: pxor ONE,<xmm3=int6464#4
9736# asm 2: pxor ONE,<xmm3=%xmm3
9737pxor ONE,%xmm3
9738
9739# qhasm: shuffle bytes of xmm0 by ROTB
9740# asm 1: pshufb ROTB,<xmm0=int6464#1
9741# asm 2: pshufb ROTB,<xmm0=%xmm0
9742pshufb ROTB,%xmm0
9743
9744# qhasm: shuffle bytes of xmm1 by ROTB
9745# asm 1: pshufb ROTB,<xmm1=int6464#2
9746# asm 2: pshufb ROTB,<xmm1=%xmm1
9747pshufb ROTB,%xmm1
9748
9749# qhasm: shuffle bytes of xmm6 by ROTB
9750# asm 1: pshufb ROTB,<xmm6=int6464#7
9751# asm 2: pshufb ROTB,<xmm6=%xmm6
9752pshufb ROTB,%xmm6
9753
9754# qhasm: shuffle bytes of xmm4 by ROTB
9755# asm 1: pshufb ROTB,<xmm4=int6464#5
9756# asm 2: pshufb ROTB,<xmm4=%xmm4
9757pshufb ROTB,%xmm4
9758
9759# qhasm: shuffle bytes of xmm2 by ROTB
9760# asm 1: pshufb ROTB,<xmm2=int6464#3
9761# asm 2: pshufb ROTB,<xmm2=%xmm2
9762pshufb ROTB,%xmm2
9763
9764# qhasm: shuffle bytes of xmm7 by ROTB
9765# asm 1: pshufb ROTB,<xmm7=int6464#8
9766# asm 2: pshufb ROTB,<xmm7=%xmm7
9767pshufb ROTB,%xmm7
9768
9769# qhasm: shuffle bytes of xmm3 by ROTB
9770# asm 1: pshufb ROTB,<xmm3=int6464#4
9771# asm 2: pshufb ROTB,<xmm3=%xmm3
9772pshufb ROTB,%xmm3
9773
9774# qhasm: shuffle bytes of xmm5 by ROTB
9775# asm 1: pshufb ROTB,<xmm5=int6464#6
9776# asm 2: pshufb ROTB,<xmm5=%xmm5
9777pshufb ROTB,%xmm5
9778
9779# qhasm: xmm7 ^= xmm3
9780# asm 1: pxor <xmm3=int6464#4,<xmm7=int6464#8
9781# asm 2: pxor <xmm3=%xmm3,<xmm7=%xmm7
9782pxor %xmm3,%xmm7
9783
9784# qhasm: xmm6 ^= xmm1
9785# asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7
9786# asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6
9787pxor %xmm1,%xmm6
9788
9789# qhasm: xmm7 ^= xmm0
9790# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
9791# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
9792pxor %xmm0,%xmm7
9793
9794# qhasm: xmm3 ^= xmm6
9795# asm 1: pxor <xmm6=int6464#7,<xmm3=int6464#4
9796# asm 2: pxor <xmm6=%xmm6,<xmm3=%xmm3
9797pxor %xmm6,%xmm3
9798
9799# qhasm: xmm4 ^= xmm0
9800# asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5
9801# asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4
9802pxor %xmm0,%xmm4
9803
9804# qhasm: xmm3 ^= xmm4
9805# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
9806# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
9807pxor %xmm4,%xmm3
9808
9809# qhasm: xmm4 ^= xmm5
9810# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
9811# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
9812pxor %xmm5,%xmm4
9813
9814# qhasm: xmm4 ^= xmm2
9815# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
9816# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
9817pxor %xmm2,%xmm4
9818
9819# qhasm: xmm5 ^= xmm7
9820# asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6
9821# asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5
9822pxor %xmm7,%xmm5
9823
9824# qhasm: xmm4 ^= xmm1
9825# asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5
9826# asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4
9827pxor %xmm1,%xmm4
9828
9829# qhasm: xmm2 ^= xmm7
9830# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
9831# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
9832pxor %xmm7,%xmm2
9833
9834# qhasm: xmm6 ^= xmm5
9835# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
9836# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
9837pxor %xmm5,%xmm6
9838
9839# qhasm: xmm1 ^= xmm7
9840# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
9841# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
9842pxor %xmm7,%xmm1
9843
9844# qhasm: xmm11 = xmm5
9845# asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9
9846# asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8
9847movdqa %xmm5,%xmm8
9848
9849# qhasm: xmm10 = xmm1
9850# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
9851# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
9852movdqa %xmm1,%xmm9
9853
9854# qhasm: xmm9 = xmm7
9855# asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11
9856# asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10
9857movdqa %xmm7,%xmm10
9858
9859# qhasm: xmm13 = xmm6
9860# asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12
9861# asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11
9862movdqa %xmm6,%xmm11
9863
9864# qhasm: xmm12 = xmm3
9865# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#13
9866# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm12
9867movdqa %xmm3,%xmm12
9868
9869# qhasm: xmm11 ^= xmm2
9870# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#9
9871# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm8
9872pxor %xmm2,%xmm8
9873
9874# qhasm: xmm10 ^= xmm6
9875# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#10
9876# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm9
9877pxor %xmm6,%xmm9
9878
9879# qhasm: xmm9 ^= xmm4
9880# asm 1: pxor <xmm4=int6464#5,<xmm9=int6464#11
9881# asm 2: pxor <xmm4=%xmm4,<xmm9=%xmm10
9882pxor %xmm4,%xmm10
9883
9884# qhasm: xmm13 ^= xmm2
9885# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#12
9886# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm11
9887pxor %xmm2,%xmm11
9888
9889# qhasm: xmm12 ^= xmm0
9890# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
9891# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
9892pxor %xmm0,%xmm12
9893
9894# qhasm: xmm14 = xmm11
9895# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
9896# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
9897movdqa %xmm8,%xmm13
9898
9899# qhasm: xmm8 = xmm10
9900# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
9901# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
9902movdqa %xmm9,%xmm14
9903
9904# qhasm: xmm15 = xmm11
9905# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
9906# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
9907movdqa %xmm8,%xmm15
9908
9909# qhasm: xmm10 |= xmm9
9910# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
9911# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
9912por %xmm10,%xmm9
9913
9914# qhasm: xmm11 |= xmm12
9915# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
9916# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
9917por %xmm12,%xmm8
9918
9919# qhasm: xmm15 ^= xmm8
9920# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
9921# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
9922pxor %xmm14,%xmm15
9923
9924# qhasm: xmm14 &= xmm12
9925# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
9926# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
9927pand %xmm12,%xmm13
9928
9929# qhasm: xmm8 &= xmm9
9930# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
9931# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
9932pand %xmm10,%xmm14
9933
9934# qhasm: xmm12 ^= xmm9
9935# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
9936# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
9937pxor %xmm10,%xmm12
9938
9939# qhasm: xmm15 &= xmm12
9940# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
9941# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
9942pand %xmm12,%xmm15
9943
9944# qhasm: xmm12 = xmm4
9945# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11
9946# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10
9947movdqa %xmm4,%xmm10
9948
9949# qhasm: xmm12 ^= xmm0
9950# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
9951# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
9952pxor %xmm0,%xmm10
9953
9954# qhasm: xmm13 &= xmm12
9955# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
9956# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
9957pand %xmm10,%xmm11
9958
9959# qhasm: xmm11 ^= xmm13
9960# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
9961# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
9962pxor %xmm11,%xmm8
9963
9964# qhasm: xmm10 ^= xmm13
9965# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
9966# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
9967pxor %xmm11,%xmm9
9968
9969# qhasm: xmm13 = xmm5
9970# asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11
9971# asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10
9972movdqa %xmm5,%xmm10
9973
9974# qhasm: xmm13 ^= xmm1
9975# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
9976# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
9977pxor %xmm1,%xmm10
9978
9979# qhasm: xmm12 = xmm7
9980# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12
9981# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11
9982movdqa %xmm7,%xmm11
9983
9984# qhasm: xmm9 = xmm13
9985# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
9986# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
9987movdqa %xmm10,%xmm12
9988
9989# qhasm: xmm12 ^= xmm3
9990# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#12
9991# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm11
9992pxor %xmm3,%xmm11
9993
9994# qhasm: xmm9 |= xmm12
9995# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
9996# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
9997por %xmm11,%xmm12
9998
9999# qhasm: xmm13 &= xmm12
10000# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
10001# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
10002pand %xmm11,%xmm10
10003
10004# qhasm: xmm8 ^= xmm13
10005# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
10006# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
10007pxor %xmm10,%xmm14
10008
10009# qhasm: xmm11 ^= xmm15
10010# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
10011# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
10012pxor %xmm15,%xmm8
10013
10014# qhasm: xmm10 ^= xmm14
10015# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
10016# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
10017pxor %xmm13,%xmm9
10018
10019# qhasm: xmm9 ^= xmm15
10020# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
10021# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
10022pxor %xmm15,%xmm12
10023
10024# qhasm: xmm8 ^= xmm14
10025# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
10026# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
10027pxor %xmm13,%xmm14
10028
10029# qhasm: xmm9 ^= xmm14
10030# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
10031# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
10032pxor %xmm13,%xmm12
10033
10034# qhasm: xmm12 = xmm6
10035# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11
10036# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10
10037movdqa %xmm6,%xmm10
10038
10039# qhasm: xmm13 = xmm2
10040# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
10041# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
10042movdqa %xmm2,%xmm11
10043
10044# qhasm: xmm14 = xmm1
10045# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
10046# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
10047movdqa %xmm1,%xmm13
10048
10049# qhasm: xmm15 = xmm5
10050# asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16
10051# asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15
10052movdqa %xmm5,%xmm15
10053
10054# qhasm: xmm12 &= xmm4
10055# asm 1: pand <xmm4=int6464#5,<xmm12=int6464#11
10056# asm 2: pand <xmm4=%xmm4,<xmm12=%xmm10
10057pand %xmm4,%xmm10
10058
10059# qhasm: xmm13 &= xmm0
10060# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
10061# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
10062pand %xmm0,%xmm11
10063
10064# qhasm: xmm14 &= xmm7
10065# asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14
10066# asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13
10067pand %xmm7,%xmm13
10068
10069# qhasm: xmm15 |= xmm3
10070# asm 1: por <xmm3=int6464#4,<xmm15=int6464#16
10071# asm 2: por <xmm3=%xmm3,<xmm15=%xmm15
10072por %xmm3,%xmm15
10073
10074# qhasm: xmm11 ^= xmm12
10075# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
10076# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
10077pxor %xmm10,%xmm8
10078
10079# qhasm: xmm10 ^= xmm13
10080# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
10081# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
10082pxor %xmm11,%xmm9
10083
10084# qhasm: xmm9 ^= xmm14
10085# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
10086# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
10087pxor %xmm13,%xmm12
10088
10089# qhasm: xmm8 ^= xmm15
10090# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
10091# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
10092pxor %xmm15,%xmm14
10093
10094# qhasm: xmm12 = xmm11
10095# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
10096# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
10097movdqa %xmm8,%xmm10
10098
10099# qhasm: xmm12 ^= xmm10
10100# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
10101# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
10102pxor %xmm9,%xmm10
10103
10104# qhasm: xmm11 &= xmm9
10105# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
10106# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
10107pand %xmm12,%xmm8
10108
10109# qhasm: xmm14 = xmm8
10110# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
10111# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
10112movdqa %xmm14,%xmm11
10113
10114# qhasm: xmm14 ^= xmm11
10115# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
10116# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
10117pxor %xmm8,%xmm11
10118
10119# qhasm: xmm15 = xmm12
10120# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
10121# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
10122movdqa %xmm10,%xmm13
10123
10124# qhasm: xmm15 &= xmm14
10125# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
10126# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
10127pand %xmm11,%xmm13
10128
10129# qhasm: xmm15 ^= xmm10
10130# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
10131# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
10132pxor %xmm9,%xmm13
10133
10134# qhasm: xmm13 = xmm9
10135# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
10136# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
10137movdqa %xmm12,%xmm15
10138
10139# qhasm: xmm13 ^= xmm8
10140# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
10141# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
10142pxor %xmm14,%xmm15
10143
10144# qhasm: xmm11 ^= xmm10
10145# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
10146# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
10147pxor %xmm9,%xmm8
10148
10149# qhasm: xmm13 &= xmm11
10150# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
10151# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
10152pand %xmm8,%xmm15
10153
10154# qhasm: xmm13 ^= xmm8
10155# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
10156# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
10157pxor %xmm14,%xmm15
10158
10159# qhasm: xmm9 ^= xmm13
10160# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
10161# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
10162pxor %xmm15,%xmm12
10163
10164# qhasm: xmm10 = xmm14
10165# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
10166# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
10167movdqa %xmm11,%xmm8
10168
10169# qhasm: xmm10 ^= xmm13
10170# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
10171# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
10172pxor %xmm15,%xmm8
10173
10174# qhasm: xmm10 &= xmm8
10175# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
10176# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
10177pand %xmm14,%xmm8
10178
10179# qhasm: xmm9 ^= xmm10
10180# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
10181# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
10182pxor %xmm8,%xmm12
10183
10184# qhasm: xmm14 ^= xmm10
10185# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
10186# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
10187pxor %xmm8,%xmm11
10188
10189# qhasm: xmm14 &= xmm15
10190# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
10191# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
10192pand %xmm13,%xmm11
10193
10194# qhasm: xmm14 ^= xmm12
10195# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
10196# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
10197pxor %xmm10,%xmm11
10198
10199# qhasm: xmm12 = xmm3
10200# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#9
10201# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm8
10202movdqa %xmm3,%xmm8
10203
10204# qhasm: xmm8 = xmm7
10205# asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10
10206# asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9
10207movdqa %xmm7,%xmm9
10208
10209# qhasm: xmm10 = xmm15
10210# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
10211# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
10212movdqa %xmm13,%xmm10
10213
10214# qhasm: xmm10 ^= xmm14
10215# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
10216# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
10217pxor %xmm11,%xmm10
10218
10219# qhasm: xmm10 &= xmm3
10220# asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11
10221# asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10
10222pand %xmm3,%xmm10
10223
10224# qhasm: xmm3 ^= xmm7
10225# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
10226# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
10227pxor %xmm7,%xmm3
10228
10229# qhasm: xmm3 &= xmm14
10230# asm 1: pand <xmm14=int6464#12,<xmm3=int6464#4
10231# asm 2: pand <xmm14=%xmm11,<xmm3=%xmm3
10232pand %xmm11,%xmm3
10233
10234# qhasm: xmm7 &= xmm15
10235# asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8
10236# asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7
10237pand %xmm13,%xmm7
10238
10239# qhasm: xmm3 ^= xmm7
10240# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
10241# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
10242pxor %xmm7,%xmm3
10243
10244# qhasm: xmm7 ^= xmm10
10245# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
10246# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
10247pxor %xmm10,%xmm7
10248
10249# qhasm: xmm12 ^= xmm0
10250# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
10251# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
10252pxor %xmm0,%xmm8
10253
10254# qhasm: xmm8 ^= xmm4
10255# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10
10256# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9
10257pxor %xmm4,%xmm9
10258
10259# qhasm: xmm15 ^= xmm13
10260# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
10261# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
10262pxor %xmm15,%xmm13
10263
10264# qhasm: xmm14 ^= xmm9
10265# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
10266# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
10267pxor %xmm12,%xmm11
10268
10269# qhasm: xmm11 = xmm15
10270# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10271# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10272movdqa %xmm13,%xmm10
10273
10274# qhasm: xmm11 ^= xmm14
10275# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10276# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10277pxor %xmm11,%xmm10
10278
10279# qhasm: xmm11 &= xmm12
10280# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
10281# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
10282pand %xmm8,%xmm10
10283
10284# qhasm: xmm12 ^= xmm8
10285# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
10286# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
10287pxor %xmm9,%xmm8
10288
10289# qhasm: xmm12 &= xmm14
10290# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
10291# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
10292pand %xmm11,%xmm8
10293
10294# qhasm: xmm8 &= xmm15
10295# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
10296# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
10297pand %xmm13,%xmm9
10298
10299# qhasm: xmm8 ^= xmm12
10300# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
10301# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
10302pxor %xmm8,%xmm9
10303
10304# qhasm: xmm12 ^= xmm11
10305# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
10306# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
10307pxor %xmm10,%xmm8
10308
10309# qhasm: xmm10 = xmm13
10310# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10311# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10312movdqa %xmm15,%xmm10
10313
10314# qhasm: xmm10 ^= xmm9
10315# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
10316# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
10317pxor %xmm12,%xmm10
10318
10319# qhasm: xmm10 &= xmm0
10320# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
10321# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
10322pand %xmm0,%xmm10
10323
10324# qhasm: xmm0 ^= xmm4
10325# asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1
10326# asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0
10327pxor %xmm4,%xmm0
10328
10329# qhasm: xmm0 &= xmm9
10330# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
10331# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
10332pand %xmm12,%xmm0
10333
10334# qhasm: xmm4 &= xmm13
10335# asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5
10336# asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4
10337pand %xmm15,%xmm4
10338
10339# qhasm: xmm0 ^= xmm4
10340# asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1
10341# asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0
10342pxor %xmm4,%xmm0
10343
10344# qhasm: xmm4 ^= xmm10
10345# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
10346# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
10347pxor %xmm10,%xmm4
10348
10349# qhasm: xmm3 ^= xmm12
10350# asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4
10351# asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3
10352pxor %xmm8,%xmm3
10353
10354# qhasm: xmm0 ^= xmm12
10355# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
10356# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
10357pxor %xmm8,%xmm0
10358
10359# qhasm: xmm7 ^= xmm8
10360# asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8
10361# asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7
10362pxor %xmm9,%xmm7
10363
10364# qhasm: xmm4 ^= xmm8
10365# asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5
10366# asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4
10367pxor %xmm9,%xmm4
10368
10369# qhasm: xmm12 = xmm5
10370# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9
10371# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8
10372movdqa %xmm5,%xmm8
10373
10374# qhasm: xmm8 = xmm1
10375# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
10376# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
10377movdqa %xmm1,%xmm9
10378
10379# qhasm: xmm12 ^= xmm2
10380# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#9
10381# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm8
10382pxor %xmm2,%xmm8
10383
10384# qhasm: xmm8 ^= xmm6
10385# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10
10386# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9
10387pxor %xmm6,%xmm9
10388
10389# qhasm: xmm11 = xmm15
10390# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10391# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10392movdqa %xmm13,%xmm10
10393
10394# qhasm: xmm11 ^= xmm14
10395# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10396# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10397pxor %xmm11,%xmm10
10398
10399# qhasm: xmm11 &= xmm12
10400# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
10401# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
10402pand %xmm8,%xmm10
10403
10404# qhasm: xmm12 ^= xmm8
10405# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
10406# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
10407pxor %xmm9,%xmm8
10408
10409# qhasm: xmm12 &= xmm14
10410# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
10411# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
10412pand %xmm11,%xmm8
10413
10414# qhasm: xmm8 &= xmm15
10415# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
10416# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
10417pand %xmm13,%xmm9
10418
10419# qhasm: xmm8 ^= xmm12
10420# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
10421# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
10422pxor %xmm8,%xmm9
10423
10424# qhasm: xmm12 ^= xmm11
10425# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
10426# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
10427pxor %xmm10,%xmm8
10428
10429# qhasm: xmm10 = xmm13
10430# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10431# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10432movdqa %xmm15,%xmm10
10433
10434# qhasm: xmm10 ^= xmm9
10435# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
10436# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
10437pxor %xmm12,%xmm10
10438
10439# qhasm: xmm10 &= xmm2
10440# asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11
10441# asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10
10442pand %xmm2,%xmm10
10443
10444# qhasm: xmm2 ^= xmm6
10445# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
10446# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
10447pxor %xmm6,%xmm2
10448
10449# qhasm: xmm2 &= xmm9
10450# asm 1: pand <xmm9=int6464#13,<xmm2=int6464#3
10451# asm 2: pand <xmm9=%xmm12,<xmm2=%xmm2
10452pand %xmm12,%xmm2
10453
10454# qhasm: xmm6 &= xmm13
10455# asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7
10456# asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6
10457pand %xmm15,%xmm6
10458
10459# qhasm: xmm2 ^= xmm6
10460# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
10461# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
10462pxor %xmm6,%xmm2
10463
10464# qhasm: xmm6 ^= xmm10
10465# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
10466# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
10467pxor %xmm10,%xmm6
10468
10469# qhasm: xmm15 ^= xmm13
10470# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
10471# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
10472pxor %xmm15,%xmm13
10473
10474# qhasm: xmm14 ^= xmm9
10475# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
10476# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
10477pxor %xmm12,%xmm11
10478
10479# qhasm: xmm11 = xmm15
10480# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10481# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10482movdqa %xmm13,%xmm10
10483
10484# qhasm: xmm11 ^= xmm14
10485# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10486# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10487pxor %xmm11,%xmm10
10488
10489# qhasm: xmm11 &= xmm5
10490# asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11
10491# asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10
10492pand %xmm5,%xmm10
10493
10494# qhasm: xmm5 ^= xmm1
10495# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
10496# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
10497pxor %xmm1,%xmm5
10498
10499# qhasm: xmm5 &= xmm14
10500# asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6
10501# asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5
10502pand %xmm11,%xmm5
10503
10504# qhasm: xmm1 &= xmm15
10505# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
10506# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
10507pand %xmm13,%xmm1
10508
10509# qhasm: xmm5 ^= xmm1
10510# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
10511# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
10512pxor %xmm1,%xmm5
10513
10514# qhasm: xmm1 ^= xmm11
10515# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
10516# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
10517pxor %xmm10,%xmm1
10518
10519# qhasm: xmm5 ^= xmm12
10520# asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6
10521# asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5
10522pxor %xmm8,%xmm5
10523
10524# qhasm: xmm2 ^= xmm12
10525# asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3
10526# asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2
10527pxor %xmm8,%xmm2
10528
10529# qhasm: xmm1 ^= xmm8
10530# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
10531# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
10532pxor %xmm9,%xmm1
10533
10534# qhasm: xmm6 ^= xmm8
10535# asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7
10536# asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6
10537pxor %xmm9,%xmm6
10538
10539# qhasm: xmm5 ^= xmm0
10540# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
10541# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
10542pxor %xmm0,%xmm5
10543
10544# qhasm: xmm1 ^= xmm3
10545# asm 1: pxor <xmm3=int6464#4,<xmm1=int6464#2
10546# asm 2: pxor <xmm3=%xmm3,<xmm1=%xmm1
10547pxor %xmm3,%xmm1
10548
10549# qhasm: xmm2 ^= xmm5
10550# asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3
10551# asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2
10552pxor %xmm5,%xmm2
10553
10554# qhasm: xmm3 ^= xmm0
10555# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
10556# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
10557pxor %xmm0,%xmm3
10558
10559# qhasm: xmm0 ^= xmm1
10560# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
10561# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
10562pxor %xmm1,%xmm0
10563
10564# qhasm: xmm1 ^= xmm7
10565# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
10566# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
10567pxor %xmm7,%xmm1
10568
10569# qhasm: xmm7 ^= xmm6
10570# asm 1: pxor <xmm6=int6464#7,<xmm7=int6464#8
10571# asm 2: pxor <xmm6=%xmm6,<xmm7=%xmm7
10572pxor %xmm6,%xmm7
10573
10574# qhasm: xmm2 ^= xmm7
10575# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
10576# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
10577pxor %xmm7,%xmm2
10578
10579# qhasm: xmm6 ^= xmm4
10580# asm 1: pxor <xmm4=int6464#5,<xmm6=int6464#7
10581# asm 2: pxor <xmm4=%xmm4,<xmm6=%xmm6
10582pxor %xmm4,%xmm6
10583
10584# qhasm: xmm4 ^= xmm7
10585# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
10586# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
10587pxor %xmm7,%xmm4
10588
10589# qhasm: xmm3 ^= xmm4
10590# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
10591# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
10592pxor %xmm4,%xmm3
10593
10594# qhasm: xmm7 ^= RCON
10595# asm 1: pxor RCON,<xmm7=int6464#8
10596# asm 2: pxor RCON,<xmm7=%xmm7
10597pxor RCON,%xmm7
10598
10599# qhasm: shuffle bytes of xmm0 by EXPB0
10600# asm 1: pshufb EXPB0,<xmm0=int6464#1
10601# asm 2: pshufb EXPB0,<xmm0=%xmm0
10602pshufb EXPB0,%xmm0
10603
10604# qhasm: shuffle bytes of xmm1 by EXPB0
10605# asm 1: pshufb EXPB0,<xmm1=int6464#2
10606# asm 2: pshufb EXPB0,<xmm1=%xmm1
10607pshufb EXPB0,%xmm1
10608
10609# qhasm: shuffle bytes of xmm2 by EXPB0
10610# asm 1: pshufb EXPB0,<xmm2=int6464#3
10611# asm 2: pshufb EXPB0,<xmm2=%xmm2
10612pshufb EXPB0,%xmm2
10613
10614# qhasm: shuffle bytes of xmm3 by EXPB0
10615# asm 1: pshufb EXPB0,<xmm3=int6464#4
10616# asm 2: pshufb EXPB0,<xmm3=%xmm3
10617pshufb EXPB0,%xmm3
10618
10619# qhasm: shuffle bytes of xmm4 by EXPB0
10620# asm 1: pshufb EXPB0,<xmm4=int6464#5
10621# asm 2: pshufb EXPB0,<xmm4=%xmm4
10622pshufb EXPB0,%xmm4
10623
10624# qhasm: shuffle bytes of xmm5 by EXPB0
10625# asm 1: pshufb EXPB0,<xmm5=int6464#6
10626# asm 2: pshufb EXPB0,<xmm5=%xmm5
10627pshufb EXPB0,%xmm5
10628
10629# qhasm: shuffle bytes of xmm6 by EXPB0
10630# asm 1: pshufb EXPB0,<xmm6=int6464#7
10631# asm 2: pshufb EXPB0,<xmm6=%xmm6
10632pshufb EXPB0,%xmm6
10633
10634# qhasm: shuffle bytes of xmm7 by EXPB0
10635# asm 1: pshufb EXPB0,<xmm7=int6464#8
10636# asm 2: pshufb EXPB0,<xmm7=%xmm7
10637pshufb EXPB0,%xmm7
10638
10639# qhasm: xmm8 = *(int128 *)(c + 896)
10640# asm 1: movdqa 896(<c=int64#1),>xmm8=int6464#9
10641# asm 2: movdqa 896(<c=%rdi),>xmm8=%xmm8
10642movdqa 896(%rdi),%xmm8
10643
10644# qhasm: xmm9 = *(int128 *)(c + 912)
10645# asm 1: movdqa 912(<c=int64#1),>xmm9=int6464#10
10646# asm 2: movdqa 912(<c=%rdi),>xmm9=%xmm9
10647movdqa 912(%rdi),%xmm9
10648
10649# qhasm: xmm10 = *(int128 *)(c + 928)
10650# asm 1: movdqa 928(<c=int64#1),>xmm10=int6464#11
10651# asm 2: movdqa 928(<c=%rdi),>xmm10=%xmm10
10652movdqa 928(%rdi),%xmm10
10653
10654# qhasm: xmm11 = *(int128 *)(c + 944)
10655# asm 1: movdqa 944(<c=int64#1),>xmm11=int6464#12
10656# asm 2: movdqa 944(<c=%rdi),>xmm11=%xmm11
10657movdqa 944(%rdi),%xmm11
10658
10659# qhasm: xmm12 = *(int128 *)(c + 960)
10660# asm 1: movdqa 960(<c=int64#1),>xmm12=int6464#13
10661# asm 2: movdqa 960(<c=%rdi),>xmm12=%xmm12
10662movdqa 960(%rdi),%xmm12
10663
10664# qhasm: xmm13 = *(int128 *)(c + 976)
10665# asm 1: movdqa 976(<c=int64#1),>xmm13=int6464#14
10666# asm 2: movdqa 976(<c=%rdi),>xmm13=%xmm13
10667movdqa 976(%rdi),%xmm13
10668
10669# qhasm: xmm14 = *(int128 *)(c + 992)
10670# asm 1: movdqa 992(<c=int64#1),>xmm14=int6464#15
10671# asm 2: movdqa 992(<c=%rdi),>xmm14=%xmm14
10672movdqa 992(%rdi),%xmm14
10673
10674# qhasm: xmm15 = *(int128 *)(c + 1008)
10675# asm 1: movdqa 1008(<c=int64#1),>xmm15=int6464#16
10676# asm 2: movdqa 1008(<c=%rdi),>xmm15=%xmm15
10677movdqa 1008(%rdi),%xmm15
10678
10679# qhasm: xmm8 ^= ONE
10680# asm 1: pxor ONE,<xmm8=int6464#9
10681# asm 2: pxor ONE,<xmm8=%xmm8
10682pxor ONE,%xmm8
10683
10684# qhasm: xmm9 ^= ONE
10685# asm 1: pxor ONE,<xmm9=int6464#10
10686# asm 2: pxor ONE,<xmm9=%xmm9
10687pxor ONE,%xmm9
10688
10689# qhasm: xmm13 ^= ONE
10690# asm 1: pxor ONE,<xmm13=int6464#14
10691# asm 2: pxor ONE,<xmm13=%xmm13
10692pxor ONE,%xmm13
10693
10694# qhasm: xmm14 ^= ONE
10695# asm 1: pxor ONE,<xmm14=int6464#15
10696# asm 2: pxor ONE,<xmm14=%xmm14
10697pxor ONE,%xmm14
10698
10699# qhasm: xmm0 ^= xmm8
10700# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
10701# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
10702pxor %xmm8,%xmm0
10703
10704# qhasm: xmm1 ^= xmm9
10705# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
10706# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
10707pxor %xmm9,%xmm1
10708
10709# qhasm: xmm2 ^= xmm10
10710# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
10711# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
10712pxor %xmm10,%xmm2
10713
10714# qhasm: xmm3 ^= xmm11
10715# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
10716# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
10717pxor %xmm11,%xmm3
10718
10719# qhasm: xmm4 ^= xmm12
10720# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
10721# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
10722pxor %xmm12,%xmm4
10723
10724# qhasm: xmm5 ^= xmm13
10725# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
10726# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
10727pxor %xmm13,%xmm5
10728
10729# qhasm: xmm6 ^= xmm14
10730# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
10731# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
10732pxor %xmm14,%xmm6
10733
10734# qhasm: xmm7 ^= xmm15
10735# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
10736# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
10737pxor %xmm15,%xmm7
10738
10739# qhasm: uint32323232 xmm8 >>= 8
10740# asm 1: psrld $8,<xmm8=int6464#9
10741# asm 2: psrld $8,<xmm8=%xmm8
10742psrld $8,%xmm8
10743
10744# qhasm: uint32323232 xmm9 >>= 8
10745# asm 1: psrld $8,<xmm9=int6464#10
10746# asm 2: psrld $8,<xmm9=%xmm9
10747psrld $8,%xmm9
10748
10749# qhasm: uint32323232 xmm10 >>= 8
10750# asm 1: psrld $8,<xmm10=int6464#11
10751# asm 2: psrld $8,<xmm10=%xmm10
10752psrld $8,%xmm10
10753
10754# qhasm: uint32323232 xmm11 >>= 8
10755# asm 1: psrld $8,<xmm11=int6464#12
10756# asm 2: psrld $8,<xmm11=%xmm11
10757psrld $8,%xmm11
10758
10759# qhasm: uint32323232 xmm12 >>= 8
10760# asm 1: psrld $8,<xmm12=int6464#13
10761# asm 2: psrld $8,<xmm12=%xmm12
10762psrld $8,%xmm12
10763
10764# qhasm: uint32323232 xmm13 >>= 8
10765# asm 1: psrld $8,<xmm13=int6464#14
10766# asm 2: psrld $8,<xmm13=%xmm13
10767psrld $8,%xmm13
10768
10769# qhasm: uint32323232 xmm14 >>= 8
10770# asm 1: psrld $8,<xmm14=int6464#15
10771# asm 2: psrld $8,<xmm14=%xmm14
10772psrld $8,%xmm14
10773
10774# qhasm: uint32323232 xmm15 >>= 8
10775# asm 1: psrld $8,<xmm15=int6464#16
10776# asm 2: psrld $8,<xmm15=%xmm15
10777psrld $8,%xmm15
10778
10779# qhasm: xmm0 ^= xmm8
10780# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
10781# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
10782pxor %xmm8,%xmm0
10783
10784# qhasm: xmm1 ^= xmm9
10785# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
10786# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
10787pxor %xmm9,%xmm1
10788
10789# qhasm: xmm2 ^= xmm10
10790# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
10791# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
10792pxor %xmm10,%xmm2
10793
10794# qhasm: xmm3 ^= xmm11
10795# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
10796# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
10797pxor %xmm11,%xmm3
10798
10799# qhasm: xmm4 ^= xmm12
10800# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
10801# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
10802pxor %xmm12,%xmm4
10803
10804# qhasm: xmm5 ^= xmm13
10805# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
10806# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
10807pxor %xmm13,%xmm5
10808
10809# qhasm: xmm6 ^= xmm14
10810# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
10811# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
10812pxor %xmm14,%xmm6
10813
10814# qhasm: xmm7 ^= xmm15
10815# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
10816# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
10817pxor %xmm15,%xmm7
10818
10819# qhasm: uint32323232 xmm8 >>= 8
10820# asm 1: psrld $8,<xmm8=int6464#9
10821# asm 2: psrld $8,<xmm8=%xmm8
10822psrld $8,%xmm8
10823
10824# qhasm: uint32323232 xmm9 >>= 8
10825# asm 1: psrld $8,<xmm9=int6464#10
10826# asm 2: psrld $8,<xmm9=%xmm9
10827psrld $8,%xmm9
10828
10829# qhasm: uint32323232 xmm10 >>= 8
10830# asm 1: psrld $8,<xmm10=int6464#11
10831# asm 2: psrld $8,<xmm10=%xmm10
10832psrld $8,%xmm10
10833
10834# qhasm: uint32323232 xmm11 >>= 8
10835# asm 1: psrld $8,<xmm11=int6464#12
10836# asm 2: psrld $8,<xmm11=%xmm11
10837psrld $8,%xmm11
10838
10839# qhasm: uint32323232 xmm12 >>= 8
10840# asm 1: psrld $8,<xmm12=int6464#13
10841# asm 2: psrld $8,<xmm12=%xmm12
10842psrld $8,%xmm12
10843
10844# qhasm: uint32323232 xmm13 >>= 8
10845# asm 1: psrld $8,<xmm13=int6464#14
10846# asm 2: psrld $8,<xmm13=%xmm13
10847psrld $8,%xmm13
10848
10849# qhasm: uint32323232 xmm14 >>= 8
10850# asm 1: psrld $8,<xmm14=int6464#15
10851# asm 2: psrld $8,<xmm14=%xmm14
10852psrld $8,%xmm14
10853
10854# qhasm: uint32323232 xmm15 >>= 8
10855# asm 1: psrld $8,<xmm15=int6464#16
10856# asm 2: psrld $8,<xmm15=%xmm15
10857psrld $8,%xmm15
10858
10859# qhasm: xmm0 ^= xmm8
10860# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
10861# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
10862pxor %xmm8,%xmm0
10863
10864# qhasm: xmm1 ^= xmm9
10865# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
10866# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
10867pxor %xmm9,%xmm1
10868
10869# qhasm: xmm2 ^= xmm10
10870# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
10871# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
10872pxor %xmm10,%xmm2
10873
10874# qhasm: xmm3 ^= xmm11
10875# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
10876# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
10877pxor %xmm11,%xmm3
10878
10879# qhasm: xmm4 ^= xmm12
10880# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
10881# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
10882pxor %xmm12,%xmm4
10883
10884# qhasm: xmm5 ^= xmm13
10885# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
10886# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
10887pxor %xmm13,%xmm5
10888
10889# qhasm: xmm6 ^= xmm14
10890# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
10891# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
10892pxor %xmm14,%xmm6
10893
10894# qhasm: xmm7 ^= xmm15
10895# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
10896# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
10897pxor %xmm15,%xmm7
10898
10899# qhasm: uint32323232 xmm8 >>= 8
10900# asm 1: psrld $8,<xmm8=int6464#9
10901# asm 2: psrld $8,<xmm8=%xmm8
10902psrld $8,%xmm8
10903
10904# qhasm: uint32323232 xmm9 >>= 8
10905# asm 1: psrld $8,<xmm9=int6464#10
10906# asm 2: psrld $8,<xmm9=%xmm9
10907psrld $8,%xmm9
10908
10909# qhasm: uint32323232 xmm10 >>= 8
10910# asm 1: psrld $8,<xmm10=int6464#11
10911# asm 2: psrld $8,<xmm10=%xmm10
10912psrld $8,%xmm10
10913
10914# qhasm: uint32323232 xmm11 >>= 8
10915# asm 1: psrld $8,<xmm11=int6464#12
10916# asm 2: psrld $8,<xmm11=%xmm11
10917psrld $8,%xmm11
10918
10919# qhasm: uint32323232 xmm12 >>= 8
10920# asm 1: psrld $8,<xmm12=int6464#13
10921# asm 2: psrld $8,<xmm12=%xmm12
10922psrld $8,%xmm12
10923
10924# qhasm: uint32323232 xmm13 >>= 8
10925# asm 1: psrld $8,<xmm13=int6464#14
10926# asm 2: psrld $8,<xmm13=%xmm13
10927psrld $8,%xmm13
10928
10929# qhasm: uint32323232 xmm14 >>= 8
10930# asm 1: psrld $8,<xmm14=int6464#15
10931# asm 2: psrld $8,<xmm14=%xmm14
10932psrld $8,%xmm14
10933
10934# qhasm: uint32323232 xmm15 >>= 8
10935# asm 1: psrld $8,<xmm15=int6464#16
10936# asm 2: psrld $8,<xmm15=%xmm15
10937psrld $8,%xmm15
10938
10939# qhasm: xmm0 ^= xmm8
10940# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
10941# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
10942pxor %xmm8,%xmm0
10943
10944# qhasm: xmm1 ^= xmm9
10945# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
10946# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
10947pxor %xmm9,%xmm1
10948
10949# qhasm: xmm2 ^= xmm10
10950# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
10951# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
10952pxor %xmm10,%xmm2
10953
10954# qhasm: xmm3 ^= xmm11
10955# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
10956# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
10957pxor %xmm11,%xmm3
10958
10959# qhasm: xmm4 ^= xmm12
10960# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
10961# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
10962pxor %xmm12,%xmm4
10963
10964# qhasm: xmm5 ^= xmm13
10965# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
10966# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
10967pxor %xmm13,%xmm5
10968
10969# qhasm: xmm6 ^= xmm14
10970# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
10971# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
10972pxor %xmm14,%xmm6
10973
10974# qhasm: xmm7 ^= xmm15
10975# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
10976# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
10977pxor %xmm15,%xmm7
10978
10979# qhasm: *(int128 *)(c + 1024) = xmm0
10980# asm 1: movdqa <xmm0=int6464#1,1024(<c=int64#1)
10981# asm 2: movdqa <xmm0=%xmm0,1024(<c=%rdi)
10982movdqa %xmm0,1024(%rdi)
10983
10984# qhasm: *(int128 *)(c + 1040) = xmm1
10985# asm 1: movdqa <xmm1=int6464#2,1040(<c=int64#1)
10986# asm 2: movdqa <xmm1=%xmm1,1040(<c=%rdi)
10987movdqa %xmm1,1040(%rdi)
10988
10989# qhasm: *(int128 *)(c + 1056) = xmm2
10990# asm 1: movdqa <xmm2=int6464#3,1056(<c=int64#1)
10991# asm 2: movdqa <xmm2=%xmm2,1056(<c=%rdi)
10992movdqa %xmm2,1056(%rdi)
10993
10994# qhasm: *(int128 *)(c + 1072) = xmm3
10995# asm 1: movdqa <xmm3=int6464#4,1072(<c=int64#1)
10996# asm 2: movdqa <xmm3=%xmm3,1072(<c=%rdi)
10997movdqa %xmm3,1072(%rdi)
10998
10999# qhasm: *(int128 *)(c + 1088) = xmm4
11000# asm 1: movdqa <xmm4=int6464#5,1088(<c=int64#1)
11001# asm 2: movdqa <xmm4=%xmm4,1088(<c=%rdi)
11002movdqa %xmm4,1088(%rdi)
11003
11004# qhasm: *(int128 *)(c + 1104) = xmm5
11005# asm 1: movdqa <xmm5=int6464#6,1104(<c=int64#1)
11006# asm 2: movdqa <xmm5=%xmm5,1104(<c=%rdi)
11007movdqa %xmm5,1104(%rdi)
11008
11009# qhasm: *(int128 *)(c + 1120) = xmm6
11010# asm 1: movdqa <xmm6=int6464#7,1120(<c=int64#1)
11011# asm 2: movdqa <xmm6=%xmm6,1120(<c=%rdi)
11012movdqa %xmm6,1120(%rdi)
11013
11014# qhasm: *(int128 *)(c + 1136) = xmm7
11015# asm 1: movdqa <xmm7=int6464#8,1136(<c=int64#1)
11016# asm 2: movdqa <xmm7=%xmm7,1136(<c=%rdi)
11017movdqa %xmm7,1136(%rdi)
11018
11019# qhasm: xmm0 ^= ONE
11020# asm 1: pxor ONE,<xmm0=int6464#1
11021# asm 2: pxor ONE,<xmm0=%xmm0
11022pxor ONE,%xmm0
11023
11024# qhasm: xmm1 ^= ONE
11025# asm 1: pxor ONE,<xmm1=int6464#2
11026# asm 2: pxor ONE,<xmm1=%xmm1
11027pxor ONE,%xmm1
11028
11029# qhasm: xmm5 ^= ONE
11030# asm 1: pxor ONE,<xmm5=int6464#6
11031# asm 2: pxor ONE,<xmm5=%xmm5
11032pxor ONE,%xmm5
11033
11034# qhasm: xmm6 ^= ONE
11035# asm 1: pxor ONE,<xmm6=int6464#7
11036# asm 2: pxor ONE,<xmm6=%xmm6
11037pxor ONE,%xmm6
11038
11039# qhasm: shuffle bytes of xmm0 by ROTB
11040# asm 1: pshufb ROTB,<xmm0=int6464#1
11041# asm 2: pshufb ROTB,<xmm0=%xmm0
11042pshufb ROTB,%xmm0
11043
11044# qhasm: shuffle bytes of xmm1 by ROTB
11045# asm 1: pshufb ROTB,<xmm1=int6464#2
11046# asm 2: pshufb ROTB,<xmm1=%xmm1
11047pshufb ROTB,%xmm1
11048
11049# qhasm: shuffle bytes of xmm2 by ROTB
11050# asm 1: pshufb ROTB,<xmm2=int6464#3
11051# asm 2: pshufb ROTB,<xmm2=%xmm2
11052pshufb ROTB,%xmm2
11053
11054# qhasm: shuffle bytes of xmm3 by ROTB
11055# asm 1: pshufb ROTB,<xmm3=int6464#4
11056# asm 2: pshufb ROTB,<xmm3=%xmm3
11057pshufb ROTB,%xmm3
11058
11059# qhasm: shuffle bytes of xmm4 by ROTB
11060# asm 1: pshufb ROTB,<xmm4=int6464#5
11061# asm 2: pshufb ROTB,<xmm4=%xmm4
11062pshufb ROTB,%xmm4
11063
11064# qhasm: shuffle bytes of xmm5 by ROTB
11065# asm 1: pshufb ROTB,<xmm5=int6464#6
11066# asm 2: pshufb ROTB,<xmm5=%xmm5
11067pshufb ROTB,%xmm5
11068
11069# qhasm: shuffle bytes of xmm6 by ROTB
11070# asm 1: pshufb ROTB,<xmm6=int6464#7
11071# asm 2: pshufb ROTB,<xmm6=%xmm6
11072pshufb ROTB,%xmm6
11073
11074# qhasm: shuffle bytes of xmm7 by ROTB
11075# asm 1: pshufb ROTB,<xmm7=int6464#8
11076# asm 2: pshufb ROTB,<xmm7=%xmm7
11077pshufb ROTB,%xmm7
11078
11079# qhasm: xmm5 ^= xmm6
11080# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
11081# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
11082pxor %xmm6,%xmm5
11083
11084# qhasm: xmm2 ^= xmm1
11085# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
11086# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
11087pxor %xmm1,%xmm2
11088
11089# qhasm: xmm5 ^= xmm0
11090# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
11091# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
11092pxor %xmm0,%xmm5
11093
11094# qhasm: xmm6 ^= xmm2
11095# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
11096# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
11097pxor %xmm2,%xmm6
11098
11099# qhasm: xmm3 ^= xmm0
11100# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
11101# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
11102pxor %xmm0,%xmm3
11103
11104# qhasm: xmm6 ^= xmm3
11105# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
11106# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
11107pxor %xmm3,%xmm6
11108
11109# qhasm: xmm3 ^= xmm7
11110# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
11111# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
11112pxor %xmm7,%xmm3
11113
11114# qhasm: xmm3 ^= xmm4
11115# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
11116# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
11117pxor %xmm4,%xmm3
11118
11119# qhasm: xmm7 ^= xmm5
11120# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
11121# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
11122pxor %xmm5,%xmm7
11123
11124# qhasm: xmm3 ^= xmm1
11125# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
11126# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
11127pxor %xmm1,%xmm3
11128
11129# qhasm: xmm4 ^= xmm5
11130# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
11131# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
11132pxor %xmm5,%xmm4
11133
11134# qhasm: xmm2 ^= xmm7
11135# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
11136# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
11137pxor %xmm7,%xmm2
11138
11139# qhasm: xmm1 ^= xmm5
11140# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
11141# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
11142pxor %xmm5,%xmm1
11143
11144# qhasm: xmm11 = xmm7
11145# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
11146# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
11147movdqa %xmm7,%xmm8
11148
11149# qhasm: xmm10 = xmm1
11150# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
11151# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
11152movdqa %xmm1,%xmm9
11153
11154# qhasm: xmm9 = xmm5
11155# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
11156# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
11157movdqa %xmm5,%xmm10
11158
11159# qhasm: xmm13 = xmm2
11160# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
11161# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
11162movdqa %xmm2,%xmm11
11163
11164# qhasm: xmm12 = xmm6
11165# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
11166# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
11167movdqa %xmm6,%xmm12
11168
11169# qhasm: xmm11 ^= xmm4
11170# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
11171# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
11172pxor %xmm4,%xmm8
11173
11174# qhasm: xmm10 ^= xmm2
11175# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
11176# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
11177pxor %xmm2,%xmm9
11178
11179# qhasm: xmm9 ^= xmm3
11180# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
11181# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
11182pxor %xmm3,%xmm10
11183
11184# qhasm: xmm13 ^= xmm4
11185# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
11186# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
11187pxor %xmm4,%xmm11
11188
11189# qhasm: xmm12 ^= xmm0
11190# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
11191# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
11192pxor %xmm0,%xmm12
11193
11194# qhasm: xmm14 = xmm11
11195# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
11196# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
11197movdqa %xmm8,%xmm13
11198
11199# qhasm: xmm8 = xmm10
11200# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
11201# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
11202movdqa %xmm9,%xmm14
11203
11204# qhasm: xmm15 = xmm11
11205# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
11206# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
11207movdqa %xmm8,%xmm15
11208
11209# qhasm: xmm10 |= xmm9
11210# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
11211# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
11212por %xmm10,%xmm9
11213
11214# qhasm: xmm11 |= xmm12
11215# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
11216# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
11217por %xmm12,%xmm8
11218
11219# qhasm: xmm15 ^= xmm8
11220# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
11221# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
11222pxor %xmm14,%xmm15
11223
11224# qhasm: xmm14 &= xmm12
11225# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
11226# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
11227pand %xmm12,%xmm13
11228
11229# qhasm: xmm8 &= xmm9
11230# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
11231# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
11232pand %xmm10,%xmm14
11233
11234# qhasm: xmm12 ^= xmm9
11235# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
11236# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
11237pxor %xmm10,%xmm12
11238
11239# qhasm: xmm15 &= xmm12
11240# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
11241# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
11242pand %xmm12,%xmm15
11243
11244# qhasm: xmm12 = xmm3
11245# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
11246# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
11247movdqa %xmm3,%xmm10
11248
11249# qhasm: xmm12 ^= xmm0
11250# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
11251# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
11252pxor %xmm0,%xmm10
11253
11254# qhasm: xmm13 &= xmm12
11255# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
11256# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
11257pand %xmm10,%xmm11
11258
11259# qhasm: xmm11 ^= xmm13
11260# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
11261# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
11262pxor %xmm11,%xmm8
11263
11264# qhasm: xmm10 ^= xmm13
11265# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
11266# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
11267pxor %xmm11,%xmm9
11268
11269# qhasm: xmm13 = xmm7
11270# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
11271# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
11272movdqa %xmm7,%xmm10
11273
11274# qhasm: xmm13 ^= xmm1
11275# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
11276# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
11277pxor %xmm1,%xmm10
11278
11279# qhasm: xmm12 = xmm5
11280# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
11281# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
11282movdqa %xmm5,%xmm11
11283
11284# qhasm: xmm9 = xmm13
11285# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
11286# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
11287movdqa %xmm10,%xmm12
11288
11289# qhasm: xmm12 ^= xmm6
11290# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
11291# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
11292pxor %xmm6,%xmm11
11293
11294# qhasm: xmm9 |= xmm12
11295# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
11296# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
11297por %xmm11,%xmm12
11298
11299# qhasm: xmm13 &= xmm12
11300# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
11301# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
11302pand %xmm11,%xmm10
11303
11304# qhasm: xmm8 ^= xmm13
11305# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
11306# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
11307pxor %xmm10,%xmm14
11308
11309# qhasm: xmm11 ^= xmm15
11310# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
11311# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
11312pxor %xmm15,%xmm8
11313
11314# qhasm: xmm10 ^= xmm14
11315# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
11316# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
11317pxor %xmm13,%xmm9
11318
11319# qhasm: xmm9 ^= xmm15
11320# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
11321# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
11322pxor %xmm15,%xmm12
11323
11324# qhasm: xmm8 ^= xmm14
11325# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
11326# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
11327pxor %xmm13,%xmm14
11328
11329# qhasm: xmm9 ^= xmm14
11330# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
11331# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
11332pxor %xmm13,%xmm12
11333
11334# qhasm: xmm12 = xmm2
11335# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
11336# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
11337movdqa %xmm2,%xmm10
11338
11339# qhasm: xmm13 = xmm4
11340# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
11341# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
11342movdqa %xmm4,%xmm11
11343
11344# qhasm: xmm14 = xmm1
11345# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
11346# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
11347movdqa %xmm1,%xmm13
11348
11349# qhasm: xmm15 = xmm7
11350# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
11351# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
11352movdqa %xmm7,%xmm15
11353
11354# qhasm: xmm12 &= xmm3
11355# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
11356# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
11357pand %xmm3,%xmm10
11358
11359# qhasm: xmm13 &= xmm0
11360# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
11361# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
11362pand %xmm0,%xmm11
11363
11364# qhasm: xmm14 &= xmm5
11365# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
11366# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
11367pand %xmm5,%xmm13
11368
11369# qhasm: xmm15 |= xmm6
11370# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
11371# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
11372por %xmm6,%xmm15
11373
11374# qhasm: xmm11 ^= xmm12
11375# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
11376# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
11377pxor %xmm10,%xmm8
11378
11379# qhasm: xmm10 ^= xmm13
11380# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
11381# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
11382pxor %xmm11,%xmm9
11383
11384# qhasm: xmm9 ^= xmm14
11385# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
11386# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
11387pxor %xmm13,%xmm12
11388
11389# qhasm: xmm8 ^= xmm15
11390# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
11391# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
11392pxor %xmm15,%xmm14
11393
11394# qhasm: xmm12 = xmm11
11395# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
11396# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
11397movdqa %xmm8,%xmm10
11398
11399# qhasm: xmm12 ^= xmm10
11400# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
11401# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
11402pxor %xmm9,%xmm10
11403
11404# qhasm: xmm11 &= xmm9
11405# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
11406# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
11407pand %xmm12,%xmm8
11408
11409# qhasm: xmm14 = xmm8
11410# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
11411# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
11412movdqa %xmm14,%xmm11
11413
11414# qhasm: xmm14 ^= xmm11
11415# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
11416# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
11417pxor %xmm8,%xmm11
11418
11419# qhasm: xmm15 = xmm12
11420# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
11421# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
11422movdqa %xmm10,%xmm13
11423
11424# qhasm: xmm15 &= xmm14
11425# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
11426# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
11427pand %xmm11,%xmm13
11428
11429# qhasm: xmm15 ^= xmm10
11430# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
11431# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
11432pxor %xmm9,%xmm13
11433
11434# qhasm: xmm13 = xmm9
11435# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
11436# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
11437movdqa %xmm12,%xmm15
11438
11439# qhasm: xmm13 ^= xmm8
11440# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
11441# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
11442pxor %xmm14,%xmm15
11443
11444# qhasm: xmm11 ^= xmm10
11445# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
11446# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
11447pxor %xmm9,%xmm8
11448
11449# qhasm: xmm13 &= xmm11
11450# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
11451# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
11452pand %xmm8,%xmm15
11453
11454# qhasm: xmm13 ^= xmm8
11455# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
11456# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
11457pxor %xmm14,%xmm15
11458
11459# qhasm: xmm9 ^= xmm13
11460# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
11461# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
11462pxor %xmm15,%xmm12
11463
11464# qhasm: xmm10 = xmm14
11465# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
11466# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
11467movdqa %xmm11,%xmm8
11468
11469# qhasm: xmm10 ^= xmm13
11470# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
11471# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
11472pxor %xmm15,%xmm8
11473
11474# qhasm: xmm10 &= xmm8
11475# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
11476# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
11477pand %xmm14,%xmm8
11478
11479# qhasm: xmm9 ^= xmm10
11480# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
11481# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
11482pxor %xmm8,%xmm12
11483
11484# qhasm: xmm14 ^= xmm10
11485# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
11486# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
11487pxor %xmm8,%xmm11
11488
11489# qhasm: xmm14 &= xmm15
11490# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
11491# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
11492pand %xmm13,%xmm11
11493
11494# qhasm: xmm14 ^= xmm12
11495# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
11496# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
11497pxor %xmm10,%xmm11
11498
11499# qhasm: xmm12 = xmm6
11500# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
11501# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
11502movdqa %xmm6,%xmm8
11503
11504# qhasm: xmm8 = xmm5
11505# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
11506# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
11507movdqa %xmm5,%xmm9
11508
11509# qhasm: xmm10 = xmm15
11510# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
11511# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
11512movdqa %xmm13,%xmm10
11513
11514# qhasm: xmm10 ^= xmm14
11515# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
11516# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
11517pxor %xmm11,%xmm10
11518
11519# qhasm: xmm10 &= xmm6
11520# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
11521# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
11522pand %xmm6,%xmm10
11523
11524# qhasm: xmm6 ^= xmm5
11525# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
11526# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
11527pxor %xmm5,%xmm6
11528
11529# qhasm: xmm6 &= xmm14
11530# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
11531# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
11532pand %xmm11,%xmm6
11533
11534# qhasm: xmm5 &= xmm15
11535# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
11536# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
11537pand %xmm13,%xmm5
11538
11539# qhasm: xmm6 ^= xmm5
11540# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
11541# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
11542pxor %xmm5,%xmm6
11543
11544# qhasm: xmm5 ^= xmm10
11545# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
11546# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
11547pxor %xmm10,%xmm5
11548
11549# qhasm: xmm12 ^= xmm0
11550# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
11551# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
11552pxor %xmm0,%xmm8
11553
11554# qhasm: xmm8 ^= xmm3
11555# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
11556# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
11557pxor %xmm3,%xmm9
11558
11559# qhasm: xmm15 ^= xmm13
11560# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
11561# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
11562pxor %xmm15,%xmm13
11563
11564# qhasm: xmm14 ^= xmm9
11565# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
11566# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
11567pxor %xmm12,%xmm11
11568
11569# qhasm: xmm11 = xmm15
11570# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
11571# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
11572movdqa %xmm13,%xmm10
11573
11574# qhasm: xmm11 ^= xmm14
11575# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
11576# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
11577pxor %xmm11,%xmm10
11578
11579# qhasm: xmm11 &= xmm12
11580# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
11581# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
11582pand %xmm8,%xmm10
11583
11584# qhasm: xmm12 ^= xmm8
11585# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
11586# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
11587pxor %xmm9,%xmm8
11588
11589# qhasm: xmm12 &= xmm14
11590# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
11591# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
11592pand %xmm11,%xmm8
11593
11594# qhasm: xmm8 &= xmm15
11595# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
11596# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
11597pand %xmm13,%xmm9
11598
11599# qhasm: xmm8 ^= xmm12
11600# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
11601# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
11602pxor %xmm8,%xmm9
11603
11604# qhasm: xmm12 ^= xmm11
11605# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
11606# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
11607pxor %xmm10,%xmm8
11608
11609# qhasm: xmm10 = xmm13
11610# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
11611# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
11612movdqa %xmm15,%xmm10
11613
11614# qhasm: xmm10 ^= xmm9
11615# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
11616# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
11617pxor %xmm12,%xmm10
11618
11619# qhasm: xmm10 &= xmm0
11620# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
11621# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
11622pand %xmm0,%xmm10
11623
11624# qhasm: xmm0 ^= xmm3
11625# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
11626# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
11627pxor %xmm3,%xmm0
11628
11629# qhasm: xmm0 &= xmm9
11630# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
11631# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
11632pand %xmm12,%xmm0
11633
11634# qhasm: xmm3 &= xmm13
11635# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
11636# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
11637pand %xmm15,%xmm3
11638
11639# qhasm: xmm0 ^= xmm3
11640# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
11641# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
11642pxor %xmm3,%xmm0
11643
11644# qhasm: xmm3 ^= xmm10
11645# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
11646# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
11647pxor %xmm10,%xmm3
11648
11649# qhasm: xmm6 ^= xmm12
11650# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
11651# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
11652pxor %xmm8,%xmm6
11653
11654# qhasm: xmm0 ^= xmm12
11655# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
11656# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
11657pxor %xmm8,%xmm0
11658
11659# qhasm: xmm5 ^= xmm8
11660# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
11661# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
11662pxor %xmm9,%xmm5
11663
11664# qhasm: xmm3 ^= xmm8
11665# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
11666# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
11667pxor %xmm9,%xmm3
11668
11669# qhasm: xmm12 = xmm7
11670# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
11671# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
11672movdqa %xmm7,%xmm8
11673
11674# qhasm: xmm8 = xmm1
11675# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
11676# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
11677movdqa %xmm1,%xmm9
11678
11679# qhasm: xmm12 ^= xmm4
11680# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
11681# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
11682pxor %xmm4,%xmm8
11683
11684# qhasm: xmm8 ^= xmm2
11685# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
11686# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
11687pxor %xmm2,%xmm9
11688
11689# qhasm: xmm11 = xmm15
11690# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
11691# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
11692movdqa %xmm13,%xmm10
11693
11694# qhasm: xmm11 ^= xmm14
11695# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
11696# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
11697pxor %xmm11,%xmm10
11698
11699# qhasm: xmm11 &= xmm12
11700# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
11701# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
11702pand %xmm8,%xmm10
11703
11704# qhasm: xmm12 ^= xmm8
11705# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
11706# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
11707pxor %xmm9,%xmm8
11708
11709# qhasm: xmm12 &= xmm14
11710# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
11711# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
11712pand %xmm11,%xmm8
11713
11714# qhasm: xmm8 &= xmm15
11715# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
11716# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
11717pand %xmm13,%xmm9
11718
11719# qhasm: xmm8 ^= xmm12
11720# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
11721# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
11722pxor %xmm8,%xmm9
11723
11724# qhasm: xmm12 ^= xmm11
11725# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
11726# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
11727pxor %xmm10,%xmm8
11728
11729# qhasm: xmm10 = xmm13
11730# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
11731# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
11732movdqa %xmm15,%xmm10
11733
11734# qhasm: xmm10 ^= xmm9
11735# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
11736# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
11737pxor %xmm12,%xmm10
11738
11739# qhasm: xmm10 &= xmm4
11740# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
11741# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
11742pand %xmm4,%xmm10
11743
11744# qhasm: xmm4 ^= xmm2
11745# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
11746# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
11747pxor %xmm2,%xmm4
11748
11749# qhasm: xmm4 &= xmm9
11750# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
11751# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
11752pand %xmm12,%xmm4
11753
11754# qhasm: xmm2 &= xmm13
11755# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
11756# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
11757pand %xmm15,%xmm2
11758
11759# qhasm: xmm4 ^= xmm2
11760# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
11761# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
11762pxor %xmm2,%xmm4
11763
11764# qhasm: xmm2 ^= xmm10
11765# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
11766# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
11767pxor %xmm10,%xmm2
11768
11769# qhasm: xmm15 ^= xmm13
11770# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
11771# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
11772pxor %xmm15,%xmm13
11773
11774# qhasm: xmm14 ^= xmm9
11775# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
11776# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
11777pxor %xmm12,%xmm11
11778
11779# qhasm: xmm11 = xmm15
11780# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
11781# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
11782movdqa %xmm13,%xmm10
11783
11784# qhasm: xmm11 ^= xmm14
11785# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
11786# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
11787pxor %xmm11,%xmm10
11788
11789# qhasm: xmm11 &= xmm7
11790# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
11791# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
11792pand %xmm7,%xmm10
11793
11794# qhasm: xmm7 ^= xmm1
11795# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
11796# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
11797pxor %xmm1,%xmm7
11798
11799# qhasm: xmm7 &= xmm14
11800# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
11801# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
11802pand %xmm11,%xmm7
11803
11804# qhasm: xmm1 &= xmm15
11805# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
11806# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
11807pand %xmm13,%xmm1
11808
11809# qhasm: xmm7 ^= xmm1
11810# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
11811# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
11812pxor %xmm1,%xmm7
11813
11814# qhasm: xmm1 ^= xmm11
11815# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
11816# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
11817pxor %xmm10,%xmm1
11818
11819# qhasm: xmm7 ^= xmm12
11820# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
11821# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
11822pxor %xmm8,%xmm7
11823
11824# qhasm: xmm4 ^= xmm12
11825# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
11826# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
11827pxor %xmm8,%xmm4
11828
11829# qhasm: xmm1 ^= xmm8
11830# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
11831# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
11832pxor %xmm9,%xmm1
11833
11834# qhasm: xmm2 ^= xmm8
11835# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
11836# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
11837pxor %xmm9,%xmm2
11838
11839# qhasm: xmm7 ^= xmm0
11840# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
11841# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
11842pxor %xmm0,%xmm7
11843
11844# qhasm: xmm1 ^= xmm6
11845# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
11846# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
11847pxor %xmm6,%xmm1
11848
11849# qhasm: xmm4 ^= xmm7
11850# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
11851# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
11852pxor %xmm7,%xmm4
11853
11854# qhasm: xmm6 ^= xmm0
11855# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
11856# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
11857pxor %xmm0,%xmm6
11858
11859# qhasm: xmm0 ^= xmm1
11860# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
11861# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
11862pxor %xmm1,%xmm0
11863
11864# qhasm: xmm1 ^= xmm5
11865# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
11866# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
11867pxor %xmm5,%xmm1
11868
11869# qhasm: xmm5 ^= xmm2
11870# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
11871# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
11872pxor %xmm2,%xmm5
11873
11874# qhasm: xmm4 ^= xmm5
11875# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
11876# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
11877pxor %xmm5,%xmm4
11878
11879# qhasm: xmm2 ^= xmm3
11880# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
11881# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
11882pxor %xmm3,%xmm2
11883
11884# qhasm: xmm3 ^= xmm5
11885# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
11886# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
11887pxor %xmm5,%xmm3
11888
11889# qhasm: xmm6 ^= xmm3
11890# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
11891# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
11892pxor %xmm3,%xmm6
11893
11894# qhasm: xmm0 ^= RCON
11895# asm 1: pxor RCON,<xmm0=int6464#1
11896# asm 2: pxor RCON,<xmm0=%xmm0
11897pxor RCON,%xmm0
11898
11899# qhasm: xmm1 ^= RCON
11900# asm 1: pxor RCON,<xmm1=int6464#2
11901# asm 2: pxor RCON,<xmm1=%xmm1
11902pxor RCON,%xmm1
11903
11904# qhasm: xmm6 ^= RCON
11905# asm 1: pxor RCON,<xmm6=int6464#7
11906# asm 2: pxor RCON,<xmm6=%xmm6
11907pxor RCON,%xmm6
11908
11909# qhasm: xmm3 ^= RCON
11910# asm 1: pxor RCON,<xmm3=int6464#4
11911# asm 2: pxor RCON,<xmm3=%xmm3
11912pxor RCON,%xmm3
11913
11914# qhasm: shuffle bytes of xmm0 by EXPB0
11915# asm 1: pshufb EXPB0,<xmm0=int6464#1
11916# asm 2: pshufb EXPB0,<xmm0=%xmm0
11917pshufb EXPB0,%xmm0
11918
11919# qhasm: shuffle bytes of xmm1 by EXPB0
11920# asm 1: pshufb EXPB0,<xmm1=int6464#2
11921# asm 2: pshufb EXPB0,<xmm1=%xmm1
11922pshufb EXPB0,%xmm1
11923
11924# qhasm: shuffle bytes of xmm4 by EXPB0
11925# asm 1: pshufb EXPB0,<xmm4=int6464#5
11926# asm 2: pshufb EXPB0,<xmm4=%xmm4
11927pshufb EXPB0,%xmm4
11928
11929# qhasm: shuffle bytes of xmm6 by EXPB0
11930# asm 1: pshufb EXPB0,<xmm6=int6464#7
11931# asm 2: pshufb EXPB0,<xmm6=%xmm6
11932pshufb EXPB0,%xmm6
11933
11934# qhasm: shuffle bytes of xmm3 by EXPB0
11935# asm 1: pshufb EXPB0,<xmm3=int6464#4
11936# asm 2: pshufb EXPB0,<xmm3=%xmm3
11937pshufb EXPB0,%xmm3
11938
11939# qhasm: shuffle bytes of xmm7 by EXPB0
11940# asm 1: pshufb EXPB0,<xmm7=int6464#8
11941# asm 2: pshufb EXPB0,<xmm7=%xmm7
11942pshufb EXPB0,%xmm7
11943
11944# qhasm: shuffle bytes of xmm2 by EXPB0
11945# asm 1: pshufb EXPB0,<xmm2=int6464#3
11946# asm 2: pshufb EXPB0,<xmm2=%xmm2
11947pshufb EXPB0,%xmm2
11948
11949# qhasm: shuffle bytes of xmm5 by EXPB0
11950# asm 1: pshufb EXPB0,<xmm5=int6464#6
11951# asm 2: pshufb EXPB0,<xmm5=%xmm5
11952pshufb EXPB0,%xmm5
11953
11954# qhasm: xmm8 = *(int128 *)(c + 1024)
11955# asm 1: movdqa 1024(<c=int64#1),>xmm8=int6464#9
11956# asm 2: movdqa 1024(<c=%rdi),>xmm8=%xmm8
11957movdqa 1024(%rdi),%xmm8
11958
11959# qhasm: xmm9 = *(int128 *)(c + 1040)
11960# asm 1: movdqa 1040(<c=int64#1),>xmm9=int6464#10
11961# asm 2: movdqa 1040(<c=%rdi),>xmm9=%xmm9
11962movdqa 1040(%rdi),%xmm9
11963
11964# qhasm: xmm10 = *(int128 *)(c + 1056)
11965# asm 1: movdqa 1056(<c=int64#1),>xmm10=int6464#11
11966# asm 2: movdqa 1056(<c=%rdi),>xmm10=%xmm10
11967movdqa 1056(%rdi),%xmm10
11968
11969# qhasm: xmm11 = *(int128 *)(c + 1072)
11970# asm 1: movdqa 1072(<c=int64#1),>xmm11=int6464#12
11971# asm 2: movdqa 1072(<c=%rdi),>xmm11=%xmm11
11972movdqa 1072(%rdi),%xmm11
11973
11974# qhasm: xmm12 = *(int128 *)(c + 1088)
11975# asm 1: movdqa 1088(<c=int64#1),>xmm12=int6464#13
11976# asm 2: movdqa 1088(<c=%rdi),>xmm12=%xmm12
11977movdqa 1088(%rdi),%xmm12
11978
11979# qhasm: xmm13 = *(int128 *)(c + 1104)
11980# asm 1: movdqa 1104(<c=int64#1),>xmm13=int6464#14
11981# asm 2: movdqa 1104(<c=%rdi),>xmm13=%xmm13
11982movdqa 1104(%rdi),%xmm13
11983
11984# qhasm: xmm14 = *(int128 *)(c + 1120)
11985# asm 1: movdqa 1120(<c=int64#1),>xmm14=int6464#15
11986# asm 2: movdqa 1120(<c=%rdi),>xmm14=%xmm14
11987movdqa 1120(%rdi),%xmm14
11988
11989# qhasm: xmm15 = *(int128 *)(c + 1136)
11990# asm 1: movdqa 1136(<c=int64#1),>xmm15=int6464#16
11991# asm 2: movdqa 1136(<c=%rdi),>xmm15=%xmm15
11992movdqa 1136(%rdi),%xmm15
11993
11994# qhasm: xmm8 ^= ONE
11995# asm 1: pxor ONE,<xmm8=int6464#9
11996# asm 2: pxor ONE,<xmm8=%xmm8
11997pxor ONE,%xmm8
11998
11999# qhasm: xmm9 ^= ONE
12000# asm 1: pxor ONE,<xmm9=int6464#10
12001# asm 2: pxor ONE,<xmm9=%xmm9
12002pxor ONE,%xmm9
12003
12004# qhasm: xmm13 ^= ONE
12005# asm 1: pxor ONE,<xmm13=int6464#14
12006# asm 2: pxor ONE,<xmm13=%xmm13
12007pxor ONE,%xmm13
12008
12009# qhasm: xmm14 ^= ONE
12010# asm 1: pxor ONE,<xmm14=int6464#15
12011# asm 2: pxor ONE,<xmm14=%xmm14
12012pxor ONE,%xmm14
12013
12014# qhasm: xmm0 ^= xmm8
12015# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
12016# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
12017pxor %xmm8,%xmm0
12018
12019# qhasm: xmm1 ^= xmm9
12020# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
12021# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
12022pxor %xmm9,%xmm1
12023
12024# qhasm: xmm4 ^= xmm10
12025# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
12026# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
12027pxor %xmm10,%xmm4
12028
12029# qhasm: xmm6 ^= xmm11
12030# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
12031# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
12032pxor %xmm11,%xmm6
12033
12034# qhasm: xmm3 ^= xmm12
12035# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
12036# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
12037pxor %xmm12,%xmm3
12038
12039# qhasm: xmm7 ^= xmm13
12040# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
12041# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
12042pxor %xmm13,%xmm7
12043
12044# qhasm: xmm2 ^= xmm14
12045# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
12046# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
12047pxor %xmm14,%xmm2
12048
12049# qhasm: xmm5 ^= xmm15
12050# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
12051# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
12052pxor %xmm15,%xmm5
12053
12054# qhasm: uint32323232 xmm8 >>= 8
12055# asm 1: psrld $8,<xmm8=int6464#9
12056# asm 2: psrld $8,<xmm8=%xmm8
12057psrld $8,%xmm8
12058
12059# qhasm: uint32323232 xmm9 >>= 8
12060# asm 1: psrld $8,<xmm9=int6464#10
12061# asm 2: psrld $8,<xmm9=%xmm9
12062psrld $8,%xmm9
12063
12064# qhasm: uint32323232 xmm10 >>= 8
12065# asm 1: psrld $8,<xmm10=int6464#11
12066# asm 2: psrld $8,<xmm10=%xmm10
12067psrld $8,%xmm10
12068
12069# qhasm: uint32323232 xmm11 >>= 8
12070# asm 1: psrld $8,<xmm11=int6464#12
12071# asm 2: psrld $8,<xmm11=%xmm11
12072psrld $8,%xmm11
12073
12074# qhasm: uint32323232 xmm12 >>= 8
12075# asm 1: psrld $8,<xmm12=int6464#13
12076# asm 2: psrld $8,<xmm12=%xmm12
12077psrld $8,%xmm12
12078
12079# qhasm: uint32323232 xmm13 >>= 8
12080# asm 1: psrld $8,<xmm13=int6464#14
12081# asm 2: psrld $8,<xmm13=%xmm13
12082psrld $8,%xmm13
12083
12084# qhasm: uint32323232 xmm14 >>= 8
12085# asm 1: psrld $8,<xmm14=int6464#15
12086# asm 2: psrld $8,<xmm14=%xmm14
12087psrld $8,%xmm14
12088
12089# qhasm: uint32323232 xmm15 >>= 8
12090# asm 1: psrld $8,<xmm15=int6464#16
12091# asm 2: psrld $8,<xmm15=%xmm15
12092psrld $8,%xmm15
12093
12094# qhasm: xmm0 ^= xmm8
12095# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
12096# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
12097pxor %xmm8,%xmm0
12098
12099# qhasm: xmm1 ^= xmm9
12100# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
12101# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
12102pxor %xmm9,%xmm1
12103
12104# qhasm: xmm4 ^= xmm10
12105# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
12106# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
12107pxor %xmm10,%xmm4
12108
12109# qhasm: xmm6 ^= xmm11
12110# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
12111# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
12112pxor %xmm11,%xmm6
12113
12114# qhasm: xmm3 ^= xmm12
12115# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
12116# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
12117pxor %xmm12,%xmm3
12118
12119# qhasm: xmm7 ^= xmm13
12120# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
12121# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
12122pxor %xmm13,%xmm7
12123
12124# qhasm: xmm2 ^= xmm14
12125# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
12126# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
12127pxor %xmm14,%xmm2
12128
12129# qhasm: xmm5 ^= xmm15
12130# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
12131# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
12132pxor %xmm15,%xmm5
12133
12134# qhasm: uint32323232 xmm8 >>= 8
12135# asm 1: psrld $8,<xmm8=int6464#9
12136# asm 2: psrld $8,<xmm8=%xmm8
12137psrld $8,%xmm8
12138
12139# qhasm: uint32323232 xmm9 >>= 8
12140# asm 1: psrld $8,<xmm9=int6464#10
12141# asm 2: psrld $8,<xmm9=%xmm9
12142psrld $8,%xmm9
12143
12144# qhasm: uint32323232 xmm10 >>= 8
12145# asm 1: psrld $8,<xmm10=int6464#11
12146# asm 2: psrld $8,<xmm10=%xmm10
12147psrld $8,%xmm10
12148
12149# qhasm: uint32323232 xmm11 >>= 8
12150# asm 1: psrld $8,<xmm11=int6464#12
12151# asm 2: psrld $8,<xmm11=%xmm11
12152psrld $8,%xmm11
12153
12154# qhasm: uint32323232 xmm12 >>= 8
12155# asm 1: psrld $8,<xmm12=int6464#13
12156# asm 2: psrld $8,<xmm12=%xmm12
12157psrld $8,%xmm12
12158
12159# qhasm: uint32323232 xmm13 >>= 8
12160# asm 1: psrld $8,<xmm13=int6464#14
12161# asm 2: psrld $8,<xmm13=%xmm13
12162psrld $8,%xmm13
12163
12164# qhasm: uint32323232 xmm14 >>= 8
12165# asm 1: psrld $8,<xmm14=int6464#15
12166# asm 2: psrld $8,<xmm14=%xmm14
12167psrld $8,%xmm14
12168
12169# qhasm: uint32323232 xmm15 >>= 8
12170# asm 1: psrld $8,<xmm15=int6464#16
12171# asm 2: psrld $8,<xmm15=%xmm15
12172psrld $8,%xmm15
12173
12174# qhasm: xmm0 ^= xmm8
12175# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
12176# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
12177pxor %xmm8,%xmm0
12178
12179# qhasm: xmm1 ^= xmm9
12180# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
12181# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
12182pxor %xmm9,%xmm1
12183
12184# qhasm: xmm4 ^= xmm10
12185# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
12186# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
12187pxor %xmm10,%xmm4
12188
12189# qhasm: xmm6 ^= xmm11
12190# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
12191# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
12192pxor %xmm11,%xmm6
12193
12194# qhasm: xmm3 ^= xmm12
12195# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
12196# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
12197pxor %xmm12,%xmm3
12198
12199# qhasm: xmm7 ^= xmm13
12200# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
12201# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
12202pxor %xmm13,%xmm7
12203
12204# qhasm: xmm2 ^= xmm14
12205# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
12206# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
12207pxor %xmm14,%xmm2
12208
12209# qhasm: xmm5 ^= xmm15
12210# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
12211# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
12212pxor %xmm15,%xmm5
12213
12214# qhasm: uint32323232 xmm8 >>= 8
12215# asm 1: psrld $8,<xmm8=int6464#9
12216# asm 2: psrld $8,<xmm8=%xmm8
12217psrld $8,%xmm8
12218
12219# qhasm: uint32323232 xmm9 >>= 8
12220# asm 1: psrld $8,<xmm9=int6464#10
12221# asm 2: psrld $8,<xmm9=%xmm9
12222psrld $8,%xmm9
12223
12224# qhasm: uint32323232 xmm10 >>= 8
12225# asm 1: psrld $8,<xmm10=int6464#11
12226# asm 2: psrld $8,<xmm10=%xmm10
12227psrld $8,%xmm10
12228
12229# qhasm: uint32323232 xmm11 >>= 8
12230# asm 1: psrld $8,<xmm11=int6464#12
12231# asm 2: psrld $8,<xmm11=%xmm11
12232psrld $8,%xmm11
12233
12234# qhasm: uint32323232 xmm12 >>= 8
12235# asm 1: psrld $8,<xmm12=int6464#13
12236# asm 2: psrld $8,<xmm12=%xmm12
12237psrld $8,%xmm12
12238
12239# qhasm: uint32323232 xmm13 >>= 8
12240# asm 1: psrld $8,<xmm13=int6464#14
12241# asm 2: psrld $8,<xmm13=%xmm13
12242psrld $8,%xmm13
12243
12244# qhasm: uint32323232 xmm14 >>= 8
12245# asm 1: psrld $8,<xmm14=int6464#15
12246# asm 2: psrld $8,<xmm14=%xmm14
12247psrld $8,%xmm14
12248
12249# qhasm: uint32323232 xmm15 >>= 8
12250# asm 1: psrld $8,<xmm15=int6464#16
12251# asm 2: psrld $8,<xmm15=%xmm15
12252psrld $8,%xmm15
12253
12254# qhasm: xmm0 ^= xmm8
12255# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
12256# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
12257pxor %xmm8,%xmm0
12258
12259# qhasm: xmm1 ^= xmm9
12260# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
12261# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
12262pxor %xmm9,%xmm1
12263
12264# qhasm: xmm4 ^= xmm10
12265# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
12266# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
12267pxor %xmm10,%xmm4
12268
12269# qhasm: xmm6 ^= xmm11
12270# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
12271# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
12272pxor %xmm11,%xmm6
12273
12274# qhasm: xmm3 ^= xmm12
12275# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
12276# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
12277pxor %xmm12,%xmm3
12278
12279# qhasm: xmm7 ^= xmm13
12280# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
12281# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
12282pxor %xmm13,%xmm7
12283
12284# qhasm: xmm2 ^= xmm14
12285# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
12286# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
12287pxor %xmm14,%xmm2
12288
12289# qhasm: xmm5 ^= xmm15
12290# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
12291# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
12292pxor %xmm15,%xmm5
12293
12294# qhasm: *(int128 *)(c + 1152) = xmm0
12295# asm 1: movdqa <xmm0=int6464#1,1152(<c=int64#1)
12296# asm 2: movdqa <xmm0=%xmm0,1152(<c=%rdi)
12297movdqa %xmm0,1152(%rdi)
12298
12299# qhasm: *(int128 *)(c + 1168) = xmm1
12300# asm 1: movdqa <xmm1=int6464#2,1168(<c=int64#1)
12301# asm 2: movdqa <xmm1=%xmm1,1168(<c=%rdi)
12302movdqa %xmm1,1168(%rdi)
12303
12304# qhasm: *(int128 *)(c + 1184) = xmm4
12305# asm 1: movdqa <xmm4=int6464#5,1184(<c=int64#1)
12306# asm 2: movdqa <xmm4=%xmm4,1184(<c=%rdi)
12307movdqa %xmm4,1184(%rdi)
12308
12309# qhasm: *(int128 *)(c + 1200) = xmm6
12310# asm 1: movdqa <xmm6=int6464#7,1200(<c=int64#1)
12311# asm 2: movdqa <xmm6=%xmm6,1200(<c=%rdi)
12312movdqa %xmm6,1200(%rdi)
12313
12314# qhasm: *(int128 *)(c + 1216) = xmm3
12315# asm 1: movdqa <xmm3=int6464#4,1216(<c=int64#1)
12316# asm 2: movdqa <xmm3=%xmm3,1216(<c=%rdi)
12317movdqa %xmm3,1216(%rdi)
12318
12319# qhasm: *(int128 *)(c + 1232) = xmm7
12320# asm 1: movdqa <xmm7=int6464#8,1232(<c=int64#1)
12321# asm 2: movdqa <xmm7=%xmm7,1232(<c=%rdi)
12322movdqa %xmm7,1232(%rdi)
12323
12324# qhasm: *(int128 *)(c + 1248) = xmm2
12325# asm 1: movdqa <xmm2=int6464#3,1248(<c=int64#1)
12326# asm 2: movdqa <xmm2=%xmm2,1248(<c=%rdi)
12327movdqa %xmm2,1248(%rdi)
12328
12329# qhasm: *(int128 *)(c + 1264) = xmm5
12330# asm 1: movdqa <xmm5=int6464#6,1264(<c=int64#1)
12331# asm 2: movdqa <xmm5=%xmm5,1264(<c=%rdi)
12332movdqa %xmm5,1264(%rdi)
12333
12334# qhasm: xmm0 ^= ONE
12335# asm 1: pxor ONE,<xmm0=int6464#1
12336# asm 2: pxor ONE,<xmm0=%xmm0
12337pxor ONE,%xmm0
12338
12339# qhasm: xmm1 ^= ONE
12340# asm 1: pxor ONE,<xmm1=int6464#2
12341# asm 2: pxor ONE,<xmm1=%xmm1
12342pxor ONE,%xmm1
12343
12344# qhasm: xmm7 ^= ONE
12345# asm 1: pxor ONE,<xmm7=int6464#8
12346# asm 2: pxor ONE,<xmm7=%xmm7
12347pxor ONE,%xmm7
12348
12349# qhasm: xmm2 ^= ONE
12350# asm 1: pxor ONE,<xmm2=int6464#3
12351# asm 2: pxor ONE,<xmm2=%xmm2
12352pxor ONE,%xmm2
12353
12354# qhasm: shuffle bytes of xmm0 by ROTB
12355# asm 1: pshufb ROTB,<xmm0=int6464#1
12356# asm 2: pshufb ROTB,<xmm0=%xmm0
12357pshufb ROTB,%xmm0
12358
12359# qhasm: shuffle bytes of xmm1 by ROTB
12360# asm 1: pshufb ROTB,<xmm1=int6464#2
12361# asm 2: pshufb ROTB,<xmm1=%xmm1
12362pshufb ROTB,%xmm1
12363
12364# qhasm: shuffle bytes of xmm4 by ROTB
12365# asm 1: pshufb ROTB,<xmm4=int6464#5
12366# asm 2: pshufb ROTB,<xmm4=%xmm4
12367pshufb ROTB,%xmm4
12368
12369# qhasm: shuffle bytes of xmm6 by ROTB
12370# asm 1: pshufb ROTB,<xmm6=int6464#7
12371# asm 2: pshufb ROTB,<xmm6=%xmm6
12372pshufb ROTB,%xmm6
12373
12374# qhasm: shuffle bytes of xmm3 by ROTB
12375# asm 1: pshufb ROTB,<xmm3=int6464#4
12376# asm 2: pshufb ROTB,<xmm3=%xmm3
12377pshufb ROTB,%xmm3
12378
12379# qhasm: shuffle bytes of xmm7 by ROTB
12380# asm 1: pshufb ROTB,<xmm7=int6464#8
12381# asm 2: pshufb ROTB,<xmm7=%xmm7
12382pshufb ROTB,%xmm7
12383
12384# qhasm: shuffle bytes of xmm2 by ROTB
12385# asm 1: pshufb ROTB,<xmm2=int6464#3
12386# asm 2: pshufb ROTB,<xmm2=%xmm2
12387pshufb ROTB,%xmm2
12388
12389# qhasm: shuffle bytes of xmm5 by ROTB
12390# asm 1: pshufb ROTB,<xmm5=int6464#6
12391# asm 2: pshufb ROTB,<xmm5=%xmm5
12392pshufb ROTB,%xmm5
12393
12394# qhasm: xmm7 ^= xmm2
12395# asm 1: pxor <xmm2=int6464#3,<xmm7=int6464#8
12396# asm 2: pxor <xmm2=%xmm2,<xmm7=%xmm7
12397pxor %xmm2,%xmm7
12398
12399# qhasm: xmm4 ^= xmm1
12400# asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5
12401# asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4
12402pxor %xmm1,%xmm4
12403
12404# qhasm: xmm7 ^= xmm0
12405# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
12406# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
12407pxor %xmm0,%xmm7
12408
12409# qhasm: xmm2 ^= xmm4
12410# asm 1: pxor <xmm4=int6464#5,<xmm2=int6464#3
12411# asm 2: pxor <xmm4=%xmm4,<xmm2=%xmm2
12412pxor %xmm4,%xmm2
12413
12414# qhasm: xmm6 ^= xmm0
12415# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
12416# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
12417pxor %xmm0,%xmm6
12418
12419# qhasm: xmm2 ^= xmm6
12420# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
12421# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
12422pxor %xmm6,%xmm2
12423
12424# qhasm: xmm6 ^= xmm5
12425# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
12426# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
12427pxor %xmm5,%xmm6
12428
12429# qhasm: xmm6 ^= xmm3
12430# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
12431# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
12432pxor %xmm3,%xmm6
12433
12434# qhasm: xmm5 ^= xmm7
12435# asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6
12436# asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5
12437pxor %xmm7,%xmm5
12438
12439# qhasm: xmm6 ^= xmm1
12440# asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7
12441# asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6
12442pxor %xmm1,%xmm6
12443
12444# qhasm: xmm3 ^= xmm7
12445# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
12446# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
12447pxor %xmm7,%xmm3
12448
12449# qhasm: xmm4 ^= xmm5
12450# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
12451# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
12452pxor %xmm5,%xmm4
12453
12454# qhasm: xmm1 ^= xmm7
12455# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
12456# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
12457pxor %xmm7,%xmm1
12458
12459# qhasm: xmm11 = xmm5
12460# asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9
12461# asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8
12462movdqa %xmm5,%xmm8
12463
12464# qhasm: xmm10 = xmm1
12465# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
12466# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
12467movdqa %xmm1,%xmm9
12468
12469# qhasm: xmm9 = xmm7
12470# asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11
12471# asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10
12472movdqa %xmm7,%xmm10
12473
12474# qhasm: xmm13 = xmm4
12475# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
12476# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
12477movdqa %xmm4,%xmm11
12478
12479# qhasm: xmm12 = xmm2
12480# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#13
12481# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm12
12482movdqa %xmm2,%xmm12
12483
12484# qhasm: xmm11 ^= xmm3
12485# asm 1: pxor <xmm3=int6464#4,<xmm11=int6464#9
12486# asm 2: pxor <xmm3=%xmm3,<xmm11=%xmm8
12487pxor %xmm3,%xmm8
12488
12489# qhasm: xmm10 ^= xmm4
12490# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#10
12491# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm9
12492pxor %xmm4,%xmm9
12493
12494# qhasm: xmm9 ^= xmm6
12495# asm 1: pxor <xmm6=int6464#7,<xmm9=int6464#11
12496# asm 2: pxor <xmm6=%xmm6,<xmm9=%xmm10
12497pxor %xmm6,%xmm10
12498
12499# qhasm: xmm13 ^= xmm3
12500# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#12
12501# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm11
12502pxor %xmm3,%xmm11
12503
12504# qhasm: xmm12 ^= xmm0
12505# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
12506# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
12507pxor %xmm0,%xmm12
12508
12509# qhasm: xmm14 = xmm11
12510# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
12511# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
12512movdqa %xmm8,%xmm13
12513
12514# qhasm: xmm8 = xmm10
12515# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
12516# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
12517movdqa %xmm9,%xmm14
12518
12519# qhasm: xmm15 = xmm11
12520# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
12521# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
12522movdqa %xmm8,%xmm15
12523
12524# qhasm: xmm10 |= xmm9
12525# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
12526# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
12527por %xmm10,%xmm9
12528
12529# qhasm: xmm11 |= xmm12
12530# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
12531# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
12532por %xmm12,%xmm8
12533
12534# qhasm: xmm15 ^= xmm8
12535# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
12536# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
12537pxor %xmm14,%xmm15
12538
12539# qhasm: xmm14 &= xmm12
12540# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
12541# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
12542pand %xmm12,%xmm13
12543
12544# qhasm: xmm8 &= xmm9
12545# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
12546# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
12547pand %xmm10,%xmm14
12548
12549# qhasm: xmm12 ^= xmm9
12550# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
12551# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
12552pxor %xmm10,%xmm12
12553
12554# qhasm: xmm15 &= xmm12
12555# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
12556# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
12557pand %xmm12,%xmm15
12558
12559# qhasm: xmm12 = xmm6
12560# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11
12561# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10
12562movdqa %xmm6,%xmm10
12563
12564# qhasm: xmm12 ^= xmm0
12565# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
12566# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
12567pxor %xmm0,%xmm10
12568
12569# qhasm: xmm13 &= xmm12
12570# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
12571# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
12572pand %xmm10,%xmm11
12573
12574# qhasm: xmm11 ^= xmm13
12575# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
12576# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
12577pxor %xmm11,%xmm8
12578
12579# qhasm: xmm10 ^= xmm13
12580# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
12581# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
12582pxor %xmm11,%xmm9
12583
12584# qhasm: xmm13 = xmm5
12585# asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11
12586# asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10
12587movdqa %xmm5,%xmm10
12588
12589# qhasm: xmm13 ^= xmm1
12590# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
12591# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
12592pxor %xmm1,%xmm10
12593
12594# qhasm: xmm12 = xmm7
12595# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12
12596# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11
12597movdqa %xmm7,%xmm11
12598
12599# qhasm: xmm9 = xmm13
12600# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
12601# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
12602movdqa %xmm10,%xmm12
12603
12604# qhasm: xmm12 ^= xmm2
12605# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#12
12606# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm11
12607pxor %xmm2,%xmm11
12608
12609# qhasm: xmm9 |= xmm12
12610# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
12611# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
12612por %xmm11,%xmm12
12613
12614# qhasm: xmm13 &= xmm12
12615# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
12616# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
12617pand %xmm11,%xmm10
12618
12619# qhasm: xmm8 ^= xmm13
12620# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
12621# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
12622pxor %xmm10,%xmm14
12623
12624# qhasm: xmm11 ^= xmm15
12625# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
12626# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
12627pxor %xmm15,%xmm8
12628
12629# qhasm: xmm10 ^= xmm14
12630# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
12631# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
12632pxor %xmm13,%xmm9
12633
12634# qhasm: xmm9 ^= xmm15
12635# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
12636# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
12637pxor %xmm15,%xmm12
12638
12639# qhasm: xmm8 ^= xmm14
12640# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
12641# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
12642pxor %xmm13,%xmm14
12643
12644# qhasm: xmm9 ^= xmm14
12645# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
12646# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
12647pxor %xmm13,%xmm12
12648
12649# qhasm: xmm12 = xmm4
12650# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11
12651# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10
12652movdqa %xmm4,%xmm10
12653
12654# qhasm: xmm13 = xmm3
12655# asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12
12656# asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11
12657movdqa %xmm3,%xmm11
12658
12659# qhasm: xmm14 = xmm1
12660# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
12661# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
12662movdqa %xmm1,%xmm13
12663
12664# qhasm: xmm15 = xmm5
12665# asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16
12666# asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15
12667movdqa %xmm5,%xmm15
12668
12669# qhasm: xmm12 &= xmm6
12670# asm 1: pand <xmm6=int6464#7,<xmm12=int6464#11
12671# asm 2: pand <xmm6=%xmm6,<xmm12=%xmm10
12672pand %xmm6,%xmm10
12673
12674# qhasm: xmm13 &= xmm0
12675# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
12676# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
12677pand %xmm0,%xmm11
12678
12679# qhasm: xmm14 &= xmm7
12680# asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14
12681# asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13
12682pand %xmm7,%xmm13
12683
12684# qhasm: xmm15 |= xmm2
12685# asm 1: por <xmm2=int6464#3,<xmm15=int6464#16
12686# asm 2: por <xmm2=%xmm2,<xmm15=%xmm15
12687por %xmm2,%xmm15
12688
12689# qhasm: xmm11 ^= xmm12
12690# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
12691# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
12692pxor %xmm10,%xmm8
12693
12694# qhasm: xmm10 ^= xmm13
12695# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
12696# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
12697pxor %xmm11,%xmm9
12698
12699# qhasm: xmm9 ^= xmm14
12700# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
12701# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
12702pxor %xmm13,%xmm12
12703
12704# qhasm: xmm8 ^= xmm15
12705# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
12706# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
12707pxor %xmm15,%xmm14
12708
12709# qhasm: xmm12 = xmm11
12710# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
12711# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
12712movdqa %xmm8,%xmm10
12713
12714# qhasm: xmm12 ^= xmm10
12715# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
12716# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
12717pxor %xmm9,%xmm10
12718
12719# qhasm: xmm11 &= xmm9
12720# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
12721# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
12722pand %xmm12,%xmm8
12723
12724# qhasm: xmm14 = xmm8
12725# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
12726# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
12727movdqa %xmm14,%xmm11
12728
12729# qhasm: xmm14 ^= xmm11
12730# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
12731# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
12732pxor %xmm8,%xmm11
12733
12734# qhasm: xmm15 = xmm12
12735# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
12736# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
12737movdqa %xmm10,%xmm13
12738
12739# qhasm: xmm15 &= xmm14
12740# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
12741# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
12742pand %xmm11,%xmm13
12743
12744# qhasm: xmm15 ^= xmm10
12745# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
12746# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
12747pxor %xmm9,%xmm13
12748
12749# qhasm: xmm13 = xmm9
12750# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
12751# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
12752movdqa %xmm12,%xmm15
12753
12754# qhasm: xmm13 ^= xmm8
12755# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
12756# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
12757pxor %xmm14,%xmm15
12758
12759# qhasm: xmm11 ^= xmm10
12760# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
12761# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
12762pxor %xmm9,%xmm8
12763
12764# qhasm: xmm13 &= xmm11
12765# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
12766# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
12767pand %xmm8,%xmm15
12768
12769# qhasm: xmm13 ^= xmm8
12770# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
12771# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
12772pxor %xmm14,%xmm15
12773
12774# qhasm: xmm9 ^= xmm13
12775# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
12776# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
12777pxor %xmm15,%xmm12
12778
12779# qhasm: xmm10 = xmm14
12780# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
12781# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
12782movdqa %xmm11,%xmm8
12783
12784# qhasm: xmm10 ^= xmm13
12785# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
12786# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
12787pxor %xmm15,%xmm8
12788
12789# qhasm: xmm10 &= xmm8
12790# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
12791# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
12792pand %xmm14,%xmm8
12793
12794# qhasm: xmm9 ^= xmm10
12795# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
12796# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
12797pxor %xmm8,%xmm12
12798
12799# qhasm: xmm14 ^= xmm10
12800# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
12801# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
12802pxor %xmm8,%xmm11
12803
12804# qhasm: xmm14 &= xmm15
12805# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
12806# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
12807pand %xmm13,%xmm11
12808
12809# qhasm: xmm14 ^= xmm12
12810# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
12811# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
12812pxor %xmm10,%xmm11
12813
12814# qhasm: xmm12 = xmm2
12815# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#9
12816# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm8
12817movdqa %xmm2,%xmm8
12818
12819# qhasm: xmm8 = xmm7
12820# asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10
12821# asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9
12822movdqa %xmm7,%xmm9
12823
12824# qhasm: xmm10 = xmm15
12825# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
12826# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
12827movdqa %xmm13,%xmm10
12828
12829# qhasm: xmm10 ^= xmm14
12830# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
12831# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
12832pxor %xmm11,%xmm10
12833
12834# qhasm: xmm10 &= xmm2
12835# asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11
12836# asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10
12837pand %xmm2,%xmm10
12838
12839# qhasm: xmm2 ^= xmm7
12840# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
12841# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
12842pxor %xmm7,%xmm2
12843
12844# qhasm: xmm2 &= xmm14
12845# asm 1: pand <xmm14=int6464#12,<xmm2=int6464#3
12846# asm 2: pand <xmm14=%xmm11,<xmm2=%xmm2
12847pand %xmm11,%xmm2
12848
12849# qhasm: xmm7 &= xmm15
12850# asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8
12851# asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7
12852pand %xmm13,%xmm7
12853
12854# qhasm: xmm2 ^= xmm7
12855# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
12856# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
12857pxor %xmm7,%xmm2
12858
12859# qhasm: xmm7 ^= xmm10
12860# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
12861# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
12862pxor %xmm10,%xmm7
12863
12864# qhasm: xmm12 ^= xmm0
12865# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
12866# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
12867pxor %xmm0,%xmm8
12868
12869# qhasm: xmm8 ^= xmm6
12870# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10
12871# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9
12872pxor %xmm6,%xmm9
12873
12874# qhasm: xmm15 ^= xmm13
12875# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
12876# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
12877pxor %xmm15,%xmm13
12878
12879# qhasm: xmm14 ^= xmm9
12880# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
12881# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
12882pxor %xmm12,%xmm11
12883
12884# qhasm: xmm11 = xmm15
12885# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
12886# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
12887movdqa %xmm13,%xmm10
12888
12889# qhasm: xmm11 ^= xmm14
12890# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
12891# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
12892pxor %xmm11,%xmm10
12893
12894# qhasm: xmm11 &= xmm12
12895# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
12896# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
12897pand %xmm8,%xmm10
12898
12899# qhasm: xmm12 ^= xmm8
12900# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
12901# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
12902pxor %xmm9,%xmm8
12903
12904# qhasm: xmm12 &= xmm14
12905# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
12906# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
12907pand %xmm11,%xmm8
12908
12909# qhasm: xmm8 &= xmm15
12910# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
12911# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
12912pand %xmm13,%xmm9
12913
12914# qhasm: xmm8 ^= xmm12
12915# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
12916# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
12917pxor %xmm8,%xmm9
12918
12919# qhasm: xmm12 ^= xmm11
12920# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
12921# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
12922pxor %xmm10,%xmm8
12923
12924# qhasm: xmm10 = xmm13
12925# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
12926# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
12927movdqa %xmm15,%xmm10
12928
12929# qhasm: xmm10 ^= xmm9
12930# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
12931# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
12932pxor %xmm12,%xmm10
12933
12934# qhasm: xmm10 &= xmm0
12935# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
12936# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
12937pand %xmm0,%xmm10
12938
12939# qhasm: xmm0 ^= xmm6
12940# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
12941# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
12942pxor %xmm6,%xmm0
12943
12944# qhasm: xmm0 &= xmm9
12945# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
12946# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
12947pand %xmm12,%xmm0
12948
12949# qhasm: xmm6 &= xmm13
12950# asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7
12951# asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6
12952pand %xmm15,%xmm6
12953
12954# qhasm: xmm0 ^= xmm6
12955# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
12956# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
12957pxor %xmm6,%xmm0
12958
12959# qhasm: xmm6 ^= xmm10
12960# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
12961# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
12962pxor %xmm10,%xmm6
12963
12964# qhasm: xmm2 ^= xmm12
12965# asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3
12966# asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2
12967pxor %xmm8,%xmm2
12968
12969# qhasm: xmm0 ^= xmm12
12970# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
12971# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
12972pxor %xmm8,%xmm0
12973
12974# qhasm: xmm7 ^= xmm8
12975# asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8
12976# asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7
12977pxor %xmm9,%xmm7
12978
12979# qhasm: xmm6 ^= xmm8
12980# asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7
12981# asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6
12982pxor %xmm9,%xmm6
12983
12984# qhasm: xmm12 = xmm5
12985# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9
12986# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8
12987movdqa %xmm5,%xmm8
12988
12989# qhasm: xmm8 = xmm1
12990# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
12991# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
12992movdqa %xmm1,%xmm9
12993
12994# qhasm: xmm12 ^= xmm3
12995# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#9
12996# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm8
12997pxor %xmm3,%xmm8
12998
12999# qhasm: xmm8 ^= xmm4
13000# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10
13001# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9
13002pxor %xmm4,%xmm9
13003
13004# qhasm: xmm11 = xmm15
13005# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
13006# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
13007movdqa %xmm13,%xmm10
13008
13009# qhasm: xmm11 ^= xmm14
13010# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
13011# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
13012pxor %xmm11,%xmm10
13013
13014# qhasm: xmm11 &= xmm12
13015# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
13016# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
13017pand %xmm8,%xmm10
13018
13019# qhasm: xmm12 ^= xmm8
13020# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
13021# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
13022pxor %xmm9,%xmm8
13023
13024# qhasm: xmm12 &= xmm14
13025# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
13026# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
13027pand %xmm11,%xmm8
13028
13029# qhasm: xmm8 &= xmm15
13030# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
13031# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
13032pand %xmm13,%xmm9
13033
13034# qhasm: xmm8 ^= xmm12
13035# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
13036# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
13037pxor %xmm8,%xmm9
13038
13039# qhasm: xmm12 ^= xmm11
13040# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
13041# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
13042pxor %xmm10,%xmm8
13043
13044# qhasm: xmm10 = xmm13
13045# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
13046# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
13047movdqa %xmm15,%xmm10
13048
13049# qhasm: xmm10 ^= xmm9
13050# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
13051# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
13052pxor %xmm12,%xmm10
13053
13054# qhasm: xmm10 &= xmm3
13055# asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11
13056# asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10
13057pand %xmm3,%xmm10
13058
13059# qhasm: xmm3 ^= xmm4
13060# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
13061# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
13062pxor %xmm4,%xmm3
13063
13064# qhasm: xmm3 &= xmm9
13065# asm 1: pand <xmm9=int6464#13,<xmm3=int6464#4
13066# asm 2: pand <xmm9=%xmm12,<xmm3=%xmm3
13067pand %xmm12,%xmm3
13068
13069# qhasm: xmm4 &= xmm13
13070# asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5
13071# asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4
13072pand %xmm15,%xmm4
13073
13074# qhasm: xmm3 ^= xmm4
13075# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
13076# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
13077pxor %xmm4,%xmm3
13078
13079# qhasm: xmm4 ^= xmm10
13080# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
13081# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
13082pxor %xmm10,%xmm4
13083
13084# qhasm: xmm15 ^= xmm13
13085# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
13086# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
13087pxor %xmm15,%xmm13
13088
13089# qhasm: xmm14 ^= xmm9
13090# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
13091# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
13092pxor %xmm12,%xmm11
13093
13094# qhasm: xmm11 = xmm15
13095# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
13096# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
13097movdqa %xmm13,%xmm10
13098
13099# qhasm: xmm11 ^= xmm14
13100# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
13101# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
13102pxor %xmm11,%xmm10
13103
13104# qhasm: xmm11 &= xmm5
13105# asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11
13106# asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10
13107pand %xmm5,%xmm10
13108
13109# qhasm: xmm5 ^= xmm1
13110# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
13111# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
13112pxor %xmm1,%xmm5
13113
13114# qhasm: xmm5 &= xmm14
13115# asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6
13116# asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5
13117pand %xmm11,%xmm5
13118
13119# qhasm: xmm1 &= xmm15
13120# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
13121# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
13122pand %xmm13,%xmm1
13123
13124# qhasm: xmm5 ^= xmm1
13125# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
13126# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
13127pxor %xmm1,%xmm5
13128
13129# qhasm: xmm1 ^= xmm11
13130# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
13131# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
13132pxor %xmm10,%xmm1
13133
13134# qhasm: xmm5 ^= xmm12
13135# asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6
13136# asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5
13137pxor %xmm8,%xmm5
13138
13139# qhasm: xmm3 ^= xmm12
13140# asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4
13141# asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3
13142pxor %xmm8,%xmm3
13143
13144# qhasm: xmm1 ^= xmm8
13145# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
13146# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
13147pxor %xmm9,%xmm1
13148
13149# qhasm: xmm4 ^= xmm8
13150# asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5
13151# asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4
13152pxor %xmm9,%xmm4
13153
13154# qhasm: xmm5 ^= xmm0
13155# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
13156# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
13157pxor %xmm0,%xmm5
13158
13159# qhasm: xmm1 ^= xmm2
13160# asm 1: pxor <xmm2=int6464#3,<xmm1=int6464#2
13161# asm 2: pxor <xmm2=%xmm2,<xmm1=%xmm1
13162pxor %xmm2,%xmm1
13163
13164# qhasm: xmm3 ^= xmm5
13165# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
13166# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
13167pxor %xmm5,%xmm3
13168
13169# qhasm: xmm2 ^= xmm0
13170# asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3
13171# asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2
13172pxor %xmm0,%xmm2
13173
13174# qhasm: xmm0 ^= xmm1
13175# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
13176# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
13177pxor %xmm1,%xmm0
13178
13179# qhasm: xmm1 ^= xmm7
13180# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
13181# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
13182pxor %xmm7,%xmm1
13183
13184# qhasm: xmm7 ^= xmm4
13185# asm 1: pxor <xmm4=int6464#5,<xmm7=int6464#8
13186# asm 2: pxor <xmm4=%xmm4,<xmm7=%xmm7
13187pxor %xmm4,%xmm7
13188
13189# qhasm: xmm3 ^= xmm7
13190# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
13191# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
13192pxor %xmm7,%xmm3
13193
13194# qhasm: xmm4 ^= xmm6
13195# asm 1: pxor <xmm6=int6464#7,<xmm4=int6464#5
13196# asm 2: pxor <xmm6=%xmm6,<xmm4=%xmm4
13197pxor %xmm6,%xmm4
13198
13199# qhasm: xmm6 ^= xmm7
13200# asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7
13201# asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6
13202pxor %xmm7,%xmm6
13203
13204# qhasm: xmm2 ^= xmm6
13205# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
13206# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
13207pxor %xmm6,%xmm2
13208
13209# qhasm: xmm1 ^= RCON
13210# asm 1: pxor RCON,<xmm1=int6464#2
13211# asm 2: pxor RCON,<xmm1=%xmm1
13212pxor RCON,%xmm1
13213
13214# qhasm: xmm3 ^= RCON
13215# asm 1: pxor RCON,<xmm3=int6464#4
13216# asm 2: pxor RCON,<xmm3=%xmm3
13217pxor RCON,%xmm3
13218
13219# qhasm: xmm6 ^= RCON
13220# asm 1: pxor RCON,<xmm6=int6464#7
13221# asm 2: pxor RCON,<xmm6=%xmm6
13222pxor RCON,%xmm6
13223
13224# qhasm: xmm5 ^= RCON
13225# asm 1: pxor RCON,<xmm5=int6464#6
13226# asm 2: pxor RCON,<xmm5=%xmm5
13227pxor RCON,%xmm5
13228
13229# qhasm: shuffle bytes of xmm0 by EXPB0
13230# asm 1: pshufb EXPB0,<xmm0=int6464#1
13231# asm 2: pshufb EXPB0,<xmm0=%xmm0
13232pshufb EXPB0,%xmm0
13233
13234# qhasm: shuffle bytes of xmm1 by EXPB0
13235# asm 1: pshufb EXPB0,<xmm1=int6464#2
13236# asm 2: pshufb EXPB0,<xmm1=%xmm1
13237pshufb EXPB0,%xmm1
13238
13239# qhasm: shuffle bytes of xmm3 by EXPB0
13240# asm 1: pshufb EXPB0,<xmm3=int6464#4
13241# asm 2: pshufb EXPB0,<xmm3=%xmm3
13242pshufb EXPB0,%xmm3
13243
13244# qhasm: shuffle bytes of xmm2 by EXPB0
13245# asm 1: pshufb EXPB0,<xmm2=int6464#3
13246# asm 2: pshufb EXPB0,<xmm2=%xmm2
13247pshufb EXPB0,%xmm2
13248
13249# qhasm: shuffle bytes of xmm6 by EXPB0
13250# asm 1: pshufb EXPB0,<xmm6=int6464#7
13251# asm 2: pshufb EXPB0,<xmm6=%xmm6
13252pshufb EXPB0,%xmm6
13253
13254# qhasm: shuffle bytes of xmm5 by EXPB0
13255# asm 1: pshufb EXPB0,<xmm5=int6464#6
13256# asm 2: pshufb EXPB0,<xmm5=%xmm5
13257pshufb EXPB0,%xmm5
13258
13259# qhasm: shuffle bytes of xmm4 by EXPB0
13260# asm 1: pshufb EXPB0,<xmm4=int6464#5
13261# asm 2: pshufb EXPB0,<xmm4=%xmm4
13262pshufb EXPB0,%xmm4
13263
13264# qhasm: shuffle bytes of xmm7 by EXPB0
13265# asm 1: pshufb EXPB0,<xmm7=int6464#8
13266# asm 2: pshufb EXPB0,<xmm7=%xmm7
13267pshufb EXPB0,%xmm7
13268
13269# qhasm: xmm8 = *(int128 *)(c + 1152)
13270# asm 1: movdqa 1152(<c=int64#1),>xmm8=int6464#9
13271# asm 2: movdqa 1152(<c=%rdi),>xmm8=%xmm8
13272movdqa 1152(%rdi),%xmm8
13273
13274# qhasm: xmm9 = *(int128 *)(c + 1168)
13275# asm 1: movdqa 1168(<c=int64#1),>xmm9=int6464#10
13276# asm 2: movdqa 1168(<c=%rdi),>xmm9=%xmm9
13277movdqa 1168(%rdi),%xmm9
13278
13279# qhasm: xmm10 = *(int128 *)(c + 1184)
13280# asm 1: movdqa 1184(<c=int64#1),>xmm10=int6464#11
13281# asm 2: movdqa 1184(<c=%rdi),>xmm10=%xmm10
13282movdqa 1184(%rdi),%xmm10
13283
13284# qhasm: xmm11 = *(int128 *)(c + 1200)
13285# asm 1: movdqa 1200(<c=int64#1),>xmm11=int6464#12
13286# asm 2: movdqa 1200(<c=%rdi),>xmm11=%xmm11
13287movdqa 1200(%rdi),%xmm11
13288
13289# qhasm: xmm12 = *(int128 *)(c + 1216)
13290# asm 1: movdqa 1216(<c=int64#1),>xmm12=int6464#13
13291# asm 2: movdqa 1216(<c=%rdi),>xmm12=%xmm12
13292movdqa 1216(%rdi),%xmm12
13293
13294# qhasm: xmm13 = *(int128 *)(c + 1232)
13295# asm 1: movdqa 1232(<c=int64#1),>xmm13=int6464#14
13296# asm 2: movdqa 1232(<c=%rdi),>xmm13=%xmm13
13297movdqa 1232(%rdi),%xmm13
13298
13299# qhasm: xmm14 = *(int128 *)(c + 1248)
13300# asm 1: movdqa 1248(<c=int64#1),>xmm14=int6464#15
13301# asm 2: movdqa 1248(<c=%rdi),>xmm14=%xmm14
13302movdqa 1248(%rdi),%xmm14
13303
13304# qhasm: xmm15 = *(int128 *)(c + 1264)
13305# asm 1: movdqa 1264(<c=int64#1),>xmm15=int6464#16
13306# asm 2: movdqa 1264(<c=%rdi),>xmm15=%xmm15
13307movdqa 1264(%rdi),%xmm15
13308
13309# qhasm: xmm8 ^= ONE
13310# asm 1: pxor ONE,<xmm8=int6464#9
13311# asm 2: pxor ONE,<xmm8=%xmm8
13312pxor ONE,%xmm8
13313
13314# qhasm: xmm9 ^= ONE
13315# asm 1: pxor ONE,<xmm9=int6464#10
13316# asm 2: pxor ONE,<xmm9=%xmm9
13317pxor ONE,%xmm9
13318
13319# qhasm: xmm13 ^= ONE
13320# asm 1: pxor ONE,<xmm13=int6464#14
13321# asm 2: pxor ONE,<xmm13=%xmm13
13322pxor ONE,%xmm13
13323
13324# qhasm: xmm14 ^= ONE
13325# asm 1: pxor ONE,<xmm14=int6464#15
13326# asm 2: pxor ONE,<xmm14=%xmm14
13327pxor ONE,%xmm14
13328
13329# qhasm: xmm0 ^= xmm8
13330# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
13331# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
13332pxor %xmm8,%xmm0
13333
13334# qhasm: xmm1 ^= xmm9
13335# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
13336# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
13337pxor %xmm9,%xmm1
13338
13339# qhasm: xmm3 ^= xmm10
13340# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
13341# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
13342pxor %xmm10,%xmm3
13343
13344# qhasm: xmm2 ^= xmm11
13345# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
13346# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
13347pxor %xmm11,%xmm2
13348
13349# qhasm: xmm6 ^= xmm12
13350# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
13351# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
13352pxor %xmm12,%xmm6
13353
13354# qhasm: xmm5 ^= xmm13
13355# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
13356# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
13357pxor %xmm13,%xmm5
13358
13359# qhasm: xmm4 ^= xmm14
13360# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
13361# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
13362pxor %xmm14,%xmm4
13363
13364# qhasm: xmm7 ^= xmm15
13365# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
13366# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
13367pxor %xmm15,%xmm7
13368
13369# qhasm: uint32323232 xmm8 >>= 8
13370# asm 1: psrld $8,<xmm8=int6464#9
13371# asm 2: psrld $8,<xmm8=%xmm8
13372psrld $8,%xmm8
13373
13374# qhasm: uint32323232 xmm9 >>= 8
13375# asm 1: psrld $8,<xmm9=int6464#10
13376# asm 2: psrld $8,<xmm9=%xmm9
13377psrld $8,%xmm9
13378
13379# qhasm: uint32323232 xmm10 >>= 8
13380# asm 1: psrld $8,<xmm10=int6464#11
13381# asm 2: psrld $8,<xmm10=%xmm10
13382psrld $8,%xmm10
13383
13384# qhasm: uint32323232 xmm11 >>= 8
13385# asm 1: psrld $8,<xmm11=int6464#12
13386# asm 2: psrld $8,<xmm11=%xmm11
13387psrld $8,%xmm11
13388
13389# qhasm: uint32323232 xmm12 >>= 8
13390# asm 1: psrld $8,<xmm12=int6464#13
13391# asm 2: psrld $8,<xmm12=%xmm12
13392psrld $8,%xmm12
13393
13394# qhasm: uint32323232 xmm13 >>= 8
13395# asm 1: psrld $8,<xmm13=int6464#14
13396# asm 2: psrld $8,<xmm13=%xmm13
13397psrld $8,%xmm13
13398
13399# qhasm: uint32323232 xmm14 >>= 8
13400# asm 1: psrld $8,<xmm14=int6464#15
13401# asm 2: psrld $8,<xmm14=%xmm14
13402psrld $8,%xmm14
13403
13404# qhasm: uint32323232 xmm15 >>= 8
13405# asm 1: psrld $8,<xmm15=int6464#16
13406# asm 2: psrld $8,<xmm15=%xmm15
13407psrld $8,%xmm15
13408
13409# qhasm: xmm0 ^= xmm8
13410# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
13411# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
13412pxor %xmm8,%xmm0
13413
13414# qhasm: xmm1 ^= xmm9
13415# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
13416# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
13417pxor %xmm9,%xmm1
13418
13419# qhasm: xmm3 ^= xmm10
13420# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
13421# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
13422pxor %xmm10,%xmm3
13423
13424# qhasm: xmm2 ^= xmm11
13425# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
13426# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
13427pxor %xmm11,%xmm2
13428
13429# qhasm: xmm6 ^= xmm12
13430# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
13431# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
13432pxor %xmm12,%xmm6
13433
13434# qhasm: xmm5 ^= xmm13
13435# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
13436# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
13437pxor %xmm13,%xmm5
13438
13439# qhasm: xmm4 ^= xmm14
13440# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
13441# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
13442pxor %xmm14,%xmm4
13443
13444# qhasm: xmm7 ^= xmm15
13445# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
13446# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
13447pxor %xmm15,%xmm7
13448
13449# qhasm: uint32323232 xmm8 >>= 8
13450# asm 1: psrld $8,<xmm8=int6464#9
13451# asm 2: psrld $8,<xmm8=%xmm8
13452psrld $8,%xmm8
13453
13454# qhasm: uint32323232 xmm9 >>= 8
13455# asm 1: psrld $8,<xmm9=int6464#10
13456# asm 2: psrld $8,<xmm9=%xmm9
13457psrld $8,%xmm9
13458
13459# qhasm: uint32323232 xmm10 >>= 8
13460# asm 1: psrld $8,<xmm10=int6464#11
13461# asm 2: psrld $8,<xmm10=%xmm10
13462psrld $8,%xmm10
13463
13464# qhasm: uint32323232 xmm11 >>= 8
13465# asm 1: psrld $8,<xmm11=int6464#12
13466# asm 2: psrld $8,<xmm11=%xmm11
13467psrld $8,%xmm11
13468
13469# qhasm: uint32323232 xmm12 >>= 8
13470# asm 1: psrld $8,<xmm12=int6464#13
13471# asm 2: psrld $8,<xmm12=%xmm12
13472psrld $8,%xmm12
13473
13474# qhasm: uint32323232 xmm13 >>= 8
13475# asm 1: psrld $8,<xmm13=int6464#14
13476# asm 2: psrld $8,<xmm13=%xmm13
13477psrld $8,%xmm13
13478
13479# qhasm: uint32323232 xmm14 >>= 8
13480# asm 1: psrld $8,<xmm14=int6464#15
13481# asm 2: psrld $8,<xmm14=%xmm14
13482psrld $8,%xmm14
13483
13484# qhasm: uint32323232 xmm15 >>= 8
13485# asm 1: psrld $8,<xmm15=int6464#16
13486# asm 2: psrld $8,<xmm15=%xmm15
13487psrld $8,%xmm15
13488
13489# qhasm: xmm0 ^= xmm8
13490# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
13491# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
13492pxor %xmm8,%xmm0
13493
13494# qhasm: xmm1 ^= xmm9
13495# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
13496# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
13497pxor %xmm9,%xmm1
13498
13499# qhasm: xmm3 ^= xmm10
13500# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
13501# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
13502pxor %xmm10,%xmm3
13503
13504# qhasm: xmm2 ^= xmm11
13505# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
13506# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
13507pxor %xmm11,%xmm2
13508
13509# qhasm: xmm6 ^= xmm12
13510# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
13511# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
13512pxor %xmm12,%xmm6
13513
13514# qhasm: xmm5 ^= xmm13
13515# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
13516# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
13517pxor %xmm13,%xmm5
13518
13519# qhasm: xmm4 ^= xmm14
13520# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
13521# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
13522pxor %xmm14,%xmm4
13523
13524# qhasm: xmm7 ^= xmm15
13525# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
13526# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
13527pxor %xmm15,%xmm7
13528
13529# qhasm: uint32323232 xmm8 >>= 8
13530# asm 1: psrld $8,<xmm8=int6464#9
13531# asm 2: psrld $8,<xmm8=%xmm8
13532psrld $8,%xmm8
13533
13534# qhasm: uint32323232 xmm9 >>= 8
13535# asm 1: psrld $8,<xmm9=int6464#10
13536# asm 2: psrld $8,<xmm9=%xmm9
13537psrld $8,%xmm9
13538
13539# qhasm: uint32323232 xmm10 >>= 8
13540# asm 1: psrld $8,<xmm10=int6464#11
13541# asm 2: psrld $8,<xmm10=%xmm10
13542psrld $8,%xmm10
13543
13544# qhasm: uint32323232 xmm11 >>= 8
13545# asm 1: psrld $8,<xmm11=int6464#12
13546# asm 2: psrld $8,<xmm11=%xmm11
13547psrld $8,%xmm11
13548
13549# qhasm: uint32323232 xmm12 >>= 8
13550# asm 1: psrld $8,<xmm12=int6464#13
13551# asm 2: psrld $8,<xmm12=%xmm12
13552psrld $8,%xmm12
13553
13554# qhasm: uint32323232 xmm13 >>= 8
13555# asm 1: psrld $8,<xmm13=int6464#14
13556# asm 2: psrld $8,<xmm13=%xmm13
13557psrld $8,%xmm13
13558
13559# qhasm: uint32323232 xmm14 >>= 8
13560# asm 1: psrld $8,<xmm14=int6464#15
13561# asm 2: psrld $8,<xmm14=%xmm14
13562psrld $8,%xmm14
13563
13564# qhasm: uint32323232 xmm15 >>= 8
13565# asm 1: psrld $8,<xmm15=int6464#16
13566# asm 2: psrld $8,<xmm15=%xmm15
13567psrld $8,%xmm15
13568
13569# qhasm: xmm0 ^= xmm8
13570# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
13571# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
13572pxor %xmm8,%xmm0
13573
13574# qhasm: xmm1 ^= xmm9
13575# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
13576# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
13577pxor %xmm9,%xmm1
13578
13579# qhasm: xmm3 ^= xmm10
13580# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
13581# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
13582pxor %xmm10,%xmm3
13583
13584# qhasm: xmm2 ^= xmm11
13585# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
13586# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
13587pxor %xmm11,%xmm2
13588
13589# qhasm: xmm6 ^= xmm12
13590# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
13591# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
13592pxor %xmm12,%xmm6
13593
13594# qhasm: xmm5 ^= xmm13
13595# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
13596# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
13597pxor %xmm13,%xmm5
13598
13599# qhasm: xmm4 ^= xmm14
13600# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
13601# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
13602pxor %xmm14,%xmm4
13603
13604# qhasm: xmm7 ^= xmm15
13605# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
13606# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
13607pxor %xmm15,%xmm7
13608
13609# qhasm: shuffle bytes of xmm0 by M0
13610# asm 1: pshufb M0,<xmm0=int6464#1
13611# asm 2: pshufb M0,<xmm0=%xmm0
13612pshufb M0,%xmm0
13613
13614# qhasm: shuffle bytes of xmm1 by M0
13615# asm 1: pshufb M0,<xmm1=int6464#2
13616# asm 2: pshufb M0,<xmm1=%xmm1
13617pshufb M0,%xmm1
13618
13619# qhasm: shuffle bytes of xmm4 by M0
13620# asm 1: pshufb M0,<xmm4=int6464#5
13621# asm 2: pshufb M0,<xmm4=%xmm4
13622pshufb M0,%xmm4
13623
13624# qhasm: shuffle bytes of xmm6 by M0
13625# asm 1: pshufb M0,<xmm6=int6464#7
13626# asm 2: pshufb M0,<xmm6=%xmm6
13627pshufb M0,%xmm6
13628
13629# qhasm: shuffle bytes of xmm3 by M0
13630# asm 1: pshufb M0,<xmm3=int6464#4
13631# asm 2: pshufb M0,<xmm3=%xmm3
13632pshufb M0,%xmm3
13633
13634# qhasm: shuffle bytes of xmm7 by M0
13635# asm 1: pshufb M0,<xmm7=int6464#8
13636# asm 2: pshufb M0,<xmm7=%xmm7
13637pshufb M0,%xmm7
13638
13639# qhasm: shuffle bytes of xmm2 by M0
13640# asm 1: pshufb M0,<xmm2=int6464#3
13641# asm 2: pshufb M0,<xmm2=%xmm2
13642pshufb M0,%xmm2
13643
13644# qhasm: shuffle bytes of xmm5 by M0
13645# asm 1: pshufb M0,<xmm5=int6464#6
13646# asm 2: pshufb M0,<xmm5=%xmm5
13647pshufb M0,%xmm5
13648
13649# qhasm: *(int128 *)(c + 1280) = xmm0
13650# asm 1: movdqa <xmm0=int6464#1,1280(<c=int64#1)
13651# asm 2: movdqa <xmm0=%xmm0,1280(<c=%rdi)
13652movdqa %xmm0,1280(%rdi)
13653
13654# qhasm: *(int128 *)(c + 1296) = xmm1
13655# asm 1: movdqa <xmm1=int6464#2,1296(<c=int64#1)
13656# asm 2: movdqa <xmm1=%xmm1,1296(<c=%rdi)
13657movdqa %xmm1,1296(%rdi)
13658
13659# qhasm: *(int128 *)(c + 1312) = xmm3
13660# asm 1: movdqa <xmm3=int6464#4,1312(<c=int64#1)
13661# asm 2: movdqa <xmm3=%xmm3,1312(<c=%rdi)
13662movdqa %xmm3,1312(%rdi)
13663
13664# qhasm: *(int128 *)(c + 1328) = xmm2
13665# asm 1: movdqa <xmm2=int6464#3,1328(<c=int64#1)
13666# asm 2: movdqa <xmm2=%xmm2,1328(<c=%rdi)
13667movdqa %xmm2,1328(%rdi)
13668
13669# qhasm: *(int128 *)(c + 1344) = xmm6
13670# asm 1: movdqa <xmm6=int6464#7,1344(<c=int64#1)
13671# asm 2: movdqa <xmm6=%xmm6,1344(<c=%rdi)
13672movdqa %xmm6,1344(%rdi)
13673
13674# qhasm: *(int128 *)(c + 1360) = xmm5
13675# asm 1: movdqa <xmm5=int6464#6,1360(<c=int64#1)
13676# asm 2: movdqa <xmm5=%xmm5,1360(<c=%rdi)
13677movdqa %xmm5,1360(%rdi)
13678
13679# qhasm: *(int128 *)(c + 1376) = xmm4
13680# asm 1: movdqa <xmm4=int6464#5,1376(<c=int64#1)
13681# asm 2: movdqa <xmm4=%xmm4,1376(<c=%rdi)
13682movdqa %xmm4,1376(%rdi)
13683
13684# qhasm: *(int128 *)(c + 1392) = xmm7
13685# asm 1: movdqa <xmm7=int6464#8,1392(<c=int64#1)
13686# asm 2: movdqa <xmm7=%xmm7,1392(<c=%rdi)
13687movdqa %xmm7,1392(%rdi)
13688
13689# qhasm: leave
13690add %r11,%rsp
13691mov %rdi,%rax
13692mov %rsi,%rdx
13693xor %rax,%rax
13694ret
diff --git a/nacl/crypto_stream/aes128ctr/core2/stream.c b/nacl/crypto_stream/aes128ctr/core2/stream.c
new file mode 100644
index 00000000..53524a62
--- /dev/null
+++ b/nacl/crypto_stream/aes128ctr/core2/stream.c
@@ -0,0 +1,14 @@
1#include "crypto_stream.h"
2
3int crypto_stream(
4 unsigned char *out,
5 unsigned long long outlen,
6 const unsigned char *n,
7 const unsigned char *k
8 )
9{
10 unsigned char d[crypto_stream_BEFORENMBYTES];
11 crypto_stream_beforenm(d, k);
12 crypto_stream_afternm(out, outlen, n, d);
13 return 0;
14}
diff --git a/nacl/crypto_stream/aes128ctr/core2/xor.c b/nacl/crypto_stream/aes128ctr/core2/xor.c
new file mode 100644
index 00000000..825088cc
--- /dev/null
+++ b/nacl/crypto_stream/aes128ctr/core2/xor.c
@@ -0,0 +1,15 @@
1#include "crypto_stream.h"
2
3int crypto_stream_xor(
4 unsigned char *out,
5 const unsigned char *in,
6 unsigned long long inlen,
7 const unsigned char *n,
8 const unsigned char *k
9 )
10{
11 unsigned char d[crypto_stream_BEFORENMBYTES];
12 crypto_stream_beforenm(d, k);
13 crypto_stream_xor_afternm(out, in, inlen, n, d);
14 return 0;
15}
diff --git a/nacl/crypto_stream/aes128ctr/core2/xor_afternm.s b/nacl/crypto_stream/aes128ctr/core2/xor_afternm.s
new file mode 100644
index 00000000..022691a2
--- /dev/null
+++ b/nacl/crypto_stream/aes128ctr/core2/xor_afternm.s
@@ -0,0 +1,12407 @@
1# Author: Emilia Käsper and Peter Schwabe
2# Date: 2009-03-19
3# +2010.01.31: minor namespace modifications
4# Public domain
5
6.data
7.p2align 6
8
9RCON: .int 0x00000000, 0x00000000, 0x00000000, 0xffffffff
10ROTB: .int 0x0c000000, 0x00000000, 0x04000000, 0x08000000
11EXPB0: .int 0x03030303, 0x07070707, 0x0b0b0b0b, 0x0f0f0f0f
12CTRINC1: .int 0x00000001, 0x00000000, 0x00000000, 0x00000000
13CTRINC2: .int 0x00000002, 0x00000000, 0x00000000, 0x00000000
14CTRINC3: .int 0x00000003, 0x00000000, 0x00000000, 0x00000000
15CTRINC4: .int 0x00000004, 0x00000000, 0x00000000, 0x00000000
16CTRINC5: .int 0x00000005, 0x00000000, 0x00000000, 0x00000000
17CTRINC6: .int 0x00000006, 0x00000000, 0x00000000, 0x00000000
18CTRINC7: .int 0x00000007, 0x00000000, 0x00000000, 0x00000000
19RCTRINC1: .int 0x00000000, 0x00000000, 0x00000000, 0x00000001
20RCTRINC2: .int 0x00000000, 0x00000000, 0x00000000, 0x00000002
21RCTRINC3: .int 0x00000000, 0x00000000, 0x00000000, 0x00000003
22RCTRINC4: .int 0x00000000, 0x00000000, 0x00000000, 0x00000004
23RCTRINC5: .int 0x00000000, 0x00000000, 0x00000000, 0x00000005
24RCTRINC6: .int 0x00000000, 0x00000000, 0x00000000, 0x00000006
25RCTRINC7: .int 0x00000000, 0x00000000, 0x00000000, 0x00000007
26
27SWAP32: .int 0x00010203, 0x04050607, 0x08090a0b, 0x0c0d0e0f
28M0SWAP: .quad 0x0105090d0004080c , 0x03070b0f02060a0e
29
30BS0: .quad 0x5555555555555555, 0x5555555555555555
31BS1: .quad 0x3333333333333333, 0x3333333333333333
32BS2: .quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f
33ONE: .quad 0xffffffffffffffff, 0xffffffffffffffff
34M0: .quad 0x02060a0e03070b0f, 0x0004080c0105090d
35SRM0: .quad 0x0304090e00050a0f, 0x01060b0c0207080d
36SR: .quad 0x0504070600030201, 0x0f0e0d0c0a09080b
37
38# qhasm: int64 outp
39
40# qhasm: int64 inp
41
42# qhasm: int64 len
43
44# qhasm: int64 np
45
46# qhasm: int64 c
47
48# qhasm: input outp
49
50# qhasm: input inp
51
52# qhasm: input len
53
54# qhasm: input np
55
56# qhasm: input c
57
58# qhasm: int64 lensav
59
60# qhasm: int64 tmp
61
62# qhasm: int6464 xmm0
63
64# qhasm: int6464 xmm1
65
66# qhasm: int6464 xmm2
67
68# qhasm: int6464 xmm3
69
70# qhasm: int6464 xmm4
71
72# qhasm: int6464 xmm5
73
74# qhasm: int6464 xmm6
75
76# qhasm: int6464 xmm7
77
78# qhasm: int6464 xmm8
79
80# qhasm: int6464 xmm9
81
82# qhasm: int6464 xmm10
83
84# qhasm: int6464 xmm11
85
86# qhasm: int6464 xmm12
87
88# qhasm: int6464 xmm13
89
90# qhasm: int6464 xmm14
91
92# qhasm: int6464 xmm15
93
94# qhasm: int6464 t
95
96# qhasm: stack1024 bl
97
98# qhasm: stack128 nonce_stack
99
100# qhasm: int64 blp
101
102# qhasm: int64 b
103
104# qhasm: enter crypto_stream_aes128ctr_core2_xor_afternm
105.text
106.p2align 5
107.globl _crypto_stream_aes128ctr_core2_xor_afternm
108.globl crypto_stream_aes128ctr_core2_xor_afternm
109_crypto_stream_aes128ctr_core2_xor_afternm:
110crypto_stream_aes128ctr_core2_xor_afternm:
111mov %rsp,%r11
112and $31,%r11
113add $160,%r11
114sub %r11,%rsp
115
116# qhasm: xmm0 = *(int128 *) (np + 0)
117# asm 1: movdqa 0(<np=int64#4),>xmm0=int6464#1
118# asm 2: movdqa 0(<np=%rcx),>xmm0=%xmm0
119movdqa 0(%rcx),%xmm0
120
121# qhasm: nonce_stack = xmm0
122# asm 1: movdqa <xmm0=int6464#1,>nonce_stack=stack128#1
123# asm 2: movdqa <xmm0=%xmm0,>nonce_stack=0(%rsp)
124movdqa %xmm0,0(%rsp)
125
126# qhasm: np = &nonce_stack
127# asm 1: leaq <nonce_stack=stack128#1,>np=int64#4
128# asm 2: leaq <nonce_stack=0(%rsp),>np=%rcx
129leaq 0(%rsp),%rcx
130
131# qhasm: enc_block:
132._enc_block:
133
134# qhasm: xmm0 = *(int128 *) (np + 0)
135# asm 1: movdqa 0(<np=int64#4),>xmm0=int6464#1
136# asm 2: movdqa 0(<np=%rcx),>xmm0=%xmm0
137movdqa 0(%rcx),%xmm0
138
139# qhasm: xmm1 = xmm0
140# asm 1: movdqa <xmm0=int6464#1,>xmm1=int6464#2
141# asm 2: movdqa <xmm0=%xmm0,>xmm1=%xmm1
142movdqa %xmm0,%xmm1
143
144# qhasm: shuffle bytes of xmm1 by SWAP32
145# asm 1: pshufb SWAP32,<xmm1=int6464#2
146# asm 2: pshufb SWAP32,<xmm1=%xmm1
147pshufb SWAP32,%xmm1
148
149# qhasm: xmm2 = xmm1
150# asm 1: movdqa <xmm1=int6464#2,>xmm2=int6464#3
151# asm 2: movdqa <xmm1=%xmm1,>xmm2=%xmm2
152movdqa %xmm1,%xmm2
153
154# qhasm: xmm3 = xmm1
155# asm 1: movdqa <xmm1=int6464#2,>xmm3=int6464#4
156# asm 2: movdqa <xmm1=%xmm1,>xmm3=%xmm3
157movdqa %xmm1,%xmm3
158
159# qhasm: xmm4 = xmm1
160# asm 1: movdqa <xmm1=int6464#2,>xmm4=int6464#5
161# asm 2: movdqa <xmm1=%xmm1,>xmm4=%xmm4
162movdqa %xmm1,%xmm4
163
164# qhasm: xmm5 = xmm1
165# asm 1: movdqa <xmm1=int6464#2,>xmm5=int6464#6
166# asm 2: movdqa <xmm1=%xmm1,>xmm5=%xmm5
167movdqa %xmm1,%xmm5
168
169# qhasm: xmm6 = xmm1
170# asm 1: movdqa <xmm1=int6464#2,>xmm6=int6464#7
171# asm 2: movdqa <xmm1=%xmm1,>xmm6=%xmm6
172movdqa %xmm1,%xmm6
173
174# qhasm: xmm7 = xmm1
175# asm 1: movdqa <xmm1=int6464#2,>xmm7=int6464#8
176# asm 2: movdqa <xmm1=%xmm1,>xmm7=%xmm7
177movdqa %xmm1,%xmm7
178
179# qhasm: int32323232 xmm1 += RCTRINC1
180# asm 1: paddd RCTRINC1,<xmm1=int6464#2
181# asm 2: paddd RCTRINC1,<xmm1=%xmm1
182paddd RCTRINC1,%xmm1
183
184# qhasm: int32323232 xmm2 += RCTRINC2
185# asm 1: paddd RCTRINC2,<xmm2=int6464#3
186# asm 2: paddd RCTRINC2,<xmm2=%xmm2
187paddd RCTRINC2,%xmm2
188
189# qhasm: int32323232 xmm3 += RCTRINC3
190# asm 1: paddd RCTRINC3,<xmm3=int6464#4
191# asm 2: paddd RCTRINC3,<xmm3=%xmm3
192paddd RCTRINC3,%xmm3
193
194# qhasm: int32323232 xmm4 += RCTRINC4
195# asm 1: paddd RCTRINC4,<xmm4=int6464#5
196# asm 2: paddd RCTRINC4,<xmm4=%xmm4
197paddd RCTRINC4,%xmm4
198
199# qhasm: int32323232 xmm5 += RCTRINC5
200# asm 1: paddd RCTRINC5,<xmm5=int6464#6
201# asm 2: paddd RCTRINC5,<xmm5=%xmm5
202paddd RCTRINC5,%xmm5
203
204# qhasm: int32323232 xmm6 += RCTRINC6
205# asm 1: paddd RCTRINC6,<xmm6=int6464#7
206# asm 2: paddd RCTRINC6,<xmm6=%xmm6
207paddd RCTRINC6,%xmm6
208
209# qhasm: int32323232 xmm7 += RCTRINC7
210# asm 1: paddd RCTRINC7,<xmm7=int6464#8
211# asm 2: paddd RCTRINC7,<xmm7=%xmm7
212paddd RCTRINC7,%xmm7
213
214# qhasm: shuffle bytes of xmm0 by M0
215# asm 1: pshufb M0,<xmm0=int6464#1
216# asm 2: pshufb M0,<xmm0=%xmm0
217pshufb M0,%xmm0
218
219# qhasm: shuffle bytes of xmm1 by M0SWAP
220# asm 1: pshufb M0SWAP,<xmm1=int6464#2
221# asm 2: pshufb M0SWAP,<xmm1=%xmm1
222pshufb M0SWAP,%xmm1
223
224# qhasm: shuffle bytes of xmm2 by M0SWAP
225# asm 1: pshufb M0SWAP,<xmm2=int6464#3
226# asm 2: pshufb M0SWAP,<xmm2=%xmm2
227pshufb M0SWAP,%xmm2
228
229# qhasm: shuffle bytes of xmm3 by M0SWAP
230# asm 1: pshufb M0SWAP,<xmm3=int6464#4
231# asm 2: pshufb M0SWAP,<xmm3=%xmm3
232pshufb M0SWAP,%xmm3
233
234# qhasm: shuffle bytes of xmm4 by M0SWAP
235# asm 1: pshufb M0SWAP,<xmm4=int6464#5
236# asm 2: pshufb M0SWAP,<xmm4=%xmm4
237pshufb M0SWAP,%xmm4
238
239# qhasm: shuffle bytes of xmm5 by M0SWAP
240# asm 1: pshufb M0SWAP,<xmm5=int6464#6
241# asm 2: pshufb M0SWAP,<xmm5=%xmm5
242pshufb M0SWAP,%xmm5
243
244# qhasm: shuffle bytes of xmm6 by M0SWAP
245# asm 1: pshufb M0SWAP,<xmm6=int6464#7
246# asm 2: pshufb M0SWAP,<xmm6=%xmm6
247pshufb M0SWAP,%xmm6
248
249# qhasm: shuffle bytes of xmm7 by M0SWAP
250# asm 1: pshufb M0SWAP,<xmm7=int6464#8
251# asm 2: pshufb M0SWAP,<xmm7=%xmm7
252pshufb M0SWAP,%xmm7
253
254# qhasm: xmm8 = xmm6
255# asm 1: movdqa <xmm6=int6464#7,>xmm8=int6464#9
256# asm 2: movdqa <xmm6=%xmm6,>xmm8=%xmm8
257movdqa %xmm6,%xmm8
258
259# qhasm: uint6464 xmm8 >>= 1
260# asm 1: psrlq $1,<xmm8=int6464#9
261# asm 2: psrlq $1,<xmm8=%xmm8
262psrlq $1,%xmm8
263
264# qhasm: xmm8 ^= xmm7
265# asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9
266# asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8
267pxor %xmm7,%xmm8
268
269# qhasm: xmm8 &= BS0
270# asm 1: pand BS0,<xmm8=int6464#9
271# asm 2: pand BS0,<xmm8=%xmm8
272pand BS0,%xmm8
273
274# qhasm: xmm7 ^= xmm8
275# asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8
276# asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7
277pxor %xmm8,%xmm7
278
279# qhasm: uint6464 xmm8 <<= 1
280# asm 1: psllq $1,<xmm8=int6464#9
281# asm 2: psllq $1,<xmm8=%xmm8
282psllq $1,%xmm8
283
284# qhasm: xmm6 ^= xmm8
285# asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7
286# asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6
287pxor %xmm8,%xmm6
288
289# qhasm: xmm8 = xmm4
290# asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9
291# asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8
292movdqa %xmm4,%xmm8
293
294# qhasm: uint6464 xmm8 >>= 1
295# asm 1: psrlq $1,<xmm8=int6464#9
296# asm 2: psrlq $1,<xmm8=%xmm8
297psrlq $1,%xmm8
298
299# qhasm: xmm8 ^= xmm5
300# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
301# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
302pxor %xmm5,%xmm8
303
304# qhasm: xmm8 &= BS0
305# asm 1: pand BS0,<xmm8=int6464#9
306# asm 2: pand BS0,<xmm8=%xmm8
307pand BS0,%xmm8
308
309# qhasm: xmm5 ^= xmm8
310# asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6
311# asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5
312pxor %xmm8,%xmm5
313
314# qhasm: uint6464 xmm8 <<= 1
315# asm 1: psllq $1,<xmm8=int6464#9
316# asm 2: psllq $1,<xmm8=%xmm8
317psllq $1,%xmm8
318
319# qhasm: xmm4 ^= xmm8
320# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
321# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
322pxor %xmm8,%xmm4
323
324# qhasm: xmm8 = xmm2
325# asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9
326# asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8
327movdqa %xmm2,%xmm8
328
329# qhasm: uint6464 xmm8 >>= 1
330# asm 1: psrlq $1,<xmm8=int6464#9
331# asm 2: psrlq $1,<xmm8=%xmm8
332psrlq $1,%xmm8
333
334# qhasm: xmm8 ^= xmm3
335# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#9
336# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm8
337pxor %xmm3,%xmm8
338
339# qhasm: xmm8 &= BS0
340# asm 1: pand BS0,<xmm8=int6464#9
341# asm 2: pand BS0,<xmm8=%xmm8
342pand BS0,%xmm8
343
344# qhasm: xmm3 ^= xmm8
345# asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4
346# asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3
347pxor %xmm8,%xmm3
348
349# qhasm: uint6464 xmm8 <<= 1
350# asm 1: psllq $1,<xmm8=int6464#9
351# asm 2: psllq $1,<xmm8=%xmm8
352psllq $1,%xmm8
353
354# qhasm: xmm2 ^= xmm8
355# asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3
356# asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2
357pxor %xmm8,%xmm2
358
359# qhasm: xmm8 = xmm0
360# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
361# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
362movdqa %xmm0,%xmm8
363
364# qhasm: uint6464 xmm8 >>= 1
365# asm 1: psrlq $1,<xmm8=int6464#9
366# asm 2: psrlq $1,<xmm8=%xmm8
367psrlq $1,%xmm8
368
369# qhasm: xmm8 ^= xmm1
370# asm 1: pxor <xmm1=int6464#2,<xmm8=int6464#9
371# asm 2: pxor <xmm1=%xmm1,<xmm8=%xmm8
372pxor %xmm1,%xmm8
373
374# qhasm: xmm8 &= BS0
375# asm 1: pand BS0,<xmm8=int6464#9
376# asm 2: pand BS0,<xmm8=%xmm8
377pand BS0,%xmm8
378
379# qhasm: xmm1 ^= xmm8
380# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
381# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
382pxor %xmm8,%xmm1
383
384# qhasm: uint6464 xmm8 <<= 1
385# asm 1: psllq $1,<xmm8=int6464#9
386# asm 2: psllq $1,<xmm8=%xmm8
387psllq $1,%xmm8
388
389# qhasm: xmm0 ^= xmm8
390# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
391# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
392pxor %xmm8,%xmm0
393
394# qhasm: xmm8 = xmm5
395# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#9
396# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm8
397movdqa %xmm5,%xmm8
398
399# qhasm: uint6464 xmm8 >>= 2
400# asm 1: psrlq $2,<xmm8=int6464#9
401# asm 2: psrlq $2,<xmm8=%xmm8
402psrlq $2,%xmm8
403
404# qhasm: xmm8 ^= xmm7
405# asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9
406# asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8
407pxor %xmm7,%xmm8
408
409# qhasm: xmm8 &= BS1
410# asm 1: pand BS1,<xmm8=int6464#9
411# asm 2: pand BS1,<xmm8=%xmm8
412pand BS1,%xmm8
413
414# qhasm: xmm7 ^= xmm8
415# asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8
416# asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7
417pxor %xmm8,%xmm7
418
419# qhasm: uint6464 xmm8 <<= 2
420# asm 1: psllq $2,<xmm8=int6464#9
421# asm 2: psllq $2,<xmm8=%xmm8
422psllq $2,%xmm8
423
424# qhasm: xmm5 ^= xmm8
425# asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6
426# asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5
427pxor %xmm8,%xmm5
428
429# qhasm: xmm8 = xmm4
430# asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9
431# asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8
432movdqa %xmm4,%xmm8
433
434# qhasm: uint6464 xmm8 >>= 2
435# asm 1: psrlq $2,<xmm8=int6464#9
436# asm 2: psrlq $2,<xmm8=%xmm8
437psrlq $2,%xmm8
438
439# qhasm: xmm8 ^= xmm6
440# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#9
441# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm8
442pxor %xmm6,%xmm8
443
444# qhasm: xmm8 &= BS1
445# asm 1: pand BS1,<xmm8=int6464#9
446# asm 2: pand BS1,<xmm8=%xmm8
447pand BS1,%xmm8
448
449# qhasm: xmm6 ^= xmm8
450# asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7
451# asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6
452pxor %xmm8,%xmm6
453
454# qhasm: uint6464 xmm8 <<= 2
455# asm 1: psllq $2,<xmm8=int6464#9
456# asm 2: psllq $2,<xmm8=%xmm8
457psllq $2,%xmm8
458
459# qhasm: xmm4 ^= xmm8
460# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
461# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
462pxor %xmm8,%xmm4
463
464# qhasm: xmm8 = xmm1
465# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9
466# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8
467movdqa %xmm1,%xmm8
468
469# qhasm: uint6464 xmm8 >>= 2
470# asm 1: psrlq $2,<xmm8=int6464#9
471# asm 2: psrlq $2,<xmm8=%xmm8
472psrlq $2,%xmm8
473
474# qhasm: xmm8 ^= xmm3
475# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#9
476# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm8
477pxor %xmm3,%xmm8
478
479# qhasm: xmm8 &= BS1
480# asm 1: pand BS1,<xmm8=int6464#9
481# asm 2: pand BS1,<xmm8=%xmm8
482pand BS1,%xmm8
483
484# qhasm: xmm3 ^= xmm8
485# asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4
486# asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3
487pxor %xmm8,%xmm3
488
489# qhasm: uint6464 xmm8 <<= 2
490# asm 1: psllq $2,<xmm8=int6464#9
491# asm 2: psllq $2,<xmm8=%xmm8
492psllq $2,%xmm8
493
494# qhasm: xmm1 ^= xmm8
495# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
496# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
497pxor %xmm8,%xmm1
498
499# qhasm: xmm8 = xmm0
500# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
501# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
502movdqa %xmm0,%xmm8
503
504# qhasm: uint6464 xmm8 >>= 2
505# asm 1: psrlq $2,<xmm8=int6464#9
506# asm 2: psrlq $2,<xmm8=%xmm8
507psrlq $2,%xmm8
508
509# qhasm: xmm8 ^= xmm2
510# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#9
511# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm8
512pxor %xmm2,%xmm8
513
514# qhasm: xmm8 &= BS1
515# asm 1: pand BS1,<xmm8=int6464#9
516# asm 2: pand BS1,<xmm8=%xmm8
517pand BS1,%xmm8
518
519# qhasm: xmm2 ^= xmm8
520# asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3
521# asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2
522pxor %xmm8,%xmm2
523
524# qhasm: uint6464 xmm8 <<= 2
525# asm 1: psllq $2,<xmm8=int6464#9
526# asm 2: psllq $2,<xmm8=%xmm8
527psllq $2,%xmm8
528
529# qhasm: xmm0 ^= xmm8
530# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
531# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
532pxor %xmm8,%xmm0
533
534# qhasm: xmm8 = xmm3
535# asm 1: movdqa <xmm3=int6464#4,>xmm8=int6464#9
536# asm 2: movdqa <xmm3=%xmm3,>xmm8=%xmm8
537movdqa %xmm3,%xmm8
538
539# qhasm: uint6464 xmm8 >>= 4
540# asm 1: psrlq $4,<xmm8=int6464#9
541# asm 2: psrlq $4,<xmm8=%xmm8
542psrlq $4,%xmm8
543
544# qhasm: xmm8 ^= xmm7
545# asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9
546# asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8
547pxor %xmm7,%xmm8
548
549# qhasm: xmm8 &= BS2
550# asm 1: pand BS2,<xmm8=int6464#9
551# asm 2: pand BS2,<xmm8=%xmm8
552pand BS2,%xmm8
553
554# qhasm: xmm7 ^= xmm8
555# asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8
556# asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7
557pxor %xmm8,%xmm7
558
559# qhasm: uint6464 xmm8 <<= 4
560# asm 1: psllq $4,<xmm8=int6464#9
561# asm 2: psllq $4,<xmm8=%xmm8
562psllq $4,%xmm8
563
564# qhasm: xmm3 ^= xmm8
565# asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4
566# asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3
567pxor %xmm8,%xmm3
568
569# qhasm: xmm8 = xmm2
570# asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9
571# asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8
572movdqa %xmm2,%xmm8
573
574# qhasm: uint6464 xmm8 >>= 4
575# asm 1: psrlq $4,<xmm8=int6464#9
576# asm 2: psrlq $4,<xmm8=%xmm8
577psrlq $4,%xmm8
578
579# qhasm: xmm8 ^= xmm6
580# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#9
581# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm8
582pxor %xmm6,%xmm8
583
584# qhasm: xmm8 &= BS2
585# asm 1: pand BS2,<xmm8=int6464#9
586# asm 2: pand BS2,<xmm8=%xmm8
587pand BS2,%xmm8
588
589# qhasm: xmm6 ^= xmm8
590# asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7
591# asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6
592pxor %xmm8,%xmm6
593
594# qhasm: uint6464 xmm8 <<= 4
595# asm 1: psllq $4,<xmm8=int6464#9
596# asm 2: psllq $4,<xmm8=%xmm8
597psllq $4,%xmm8
598
599# qhasm: xmm2 ^= xmm8
600# asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3
601# asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2
602pxor %xmm8,%xmm2
603
604# qhasm: xmm8 = xmm1
605# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9
606# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8
607movdqa %xmm1,%xmm8
608
609# qhasm: uint6464 xmm8 >>= 4
610# asm 1: psrlq $4,<xmm8=int6464#9
611# asm 2: psrlq $4,<xmm8=%xmm8
612psrlq $4,%xmm8
613
614# qhasm: xmm8 ^= xmm5
615# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
616# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
617pxor %xmm5,%xmm8
618
619# qhasm: xmm8 &= BS2
620# asm 1: pand BS2,<xmm8=int6464#9
621# asm 2: pand BS2,<xmm8=%xmm8
622pand BS2,%xmm8
623
624# qhasm: xmm5 ^= xmm8
625# asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6
626# asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5
627pxor %xmm8,%xmm5
628
629# qhasm: uint6464 xmm8 <<= 4
630# asm 1: psllq $4,<xmm8=int6464#9
631# asm 2: psllq $4,<xmm8=%xmm8
632psllq $4,%xmm8
633
634# qhasm: xmm1 ^= xmm8
635# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
636# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
637pxor %xmm8,%xmm1
638
639# qhasm: xmm8 = xmm0
640# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
641# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
642movdqa %xmm0,%xmm8
643
644# qhasm: uint6464 xmm8 >>= 4
645# asm 1: psrlq $4,<xmm8=int6464#9
646# asm 2: psrlq $4,<xmm8=%xmm8
647psrlq $4,%xmm8
648
649# qhasm: xmm8 ^= xmm4
650# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#9
651# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm8
652pxor %xmm4,%xmm8
653
654# qhasm: xmm8 &= BS2
655# asm 1: pand BS2,<xmm8=int6464#9
656# asm 2: pand BS2,<xmm8=%xmm8
657pand BS2,%xmm8
658
659# qhasm: xmm4 ^= xmm8
660# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
661# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
662pxor %xmm8,%xmm4
663
664# qhasm: uint6464 xmm8 <<= 4
665# asm 1: psllq $4,<xmm8=int6464#9
666# asm 2: psllq $4,<xmm8=%xmm8
667psllq $4,%xmm8
668
669# qhasm: xmm0 ^= xmm8
670# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
671# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
672pxor %xmm8,%xmm0
673
674# qhasm: xmm0 ^= *(int128 *)(c + 0)
675# asm 1: pxor 0(<c=int64#5),<xmm0=int6464#1
676# asm 2: pxor 0(<c=%r8),<xmm0=%xmm0
677pxor 0(%r8),%xmm0
678
679# qhasm: shuffle bytes of xmm0 by SR
680# asm 1: pshufb SR,<xmm0=int6464#1
681# asm 2: pshufb SR,<xmm0=%xmm0
682pshufb SR,%xmm0
683
684# qhasm: xmm1 ^= *(int128 *)(c + 16)
685# asm 1: pxor 16(<c=int64#5),<xmm1=int6464#2
686# asm 2: pxor 16(<c=%r8),<xmm1=%xmm1
687pxor 16(%r8),%xmm1
688
689# qhasm: shuffle bytes of xmm1 by SR
690# asm 1: pshufb SR,<xmm1=int6464#2
691# asm 2: pshufb SR,<xmm1=%xmm1
692pshufb SR,%xmm1
693
694# qhasm: xmm2 ^= *(int128 *)(c + 32)
695# asm 1: pxor 32(<c=int64#5),<xmm2=int6464#3
696# asm 2: pxor 32(<c=%r8),<xmm2=%xmm2
697pxor 32(%r8),%xmm2
698
699# qhasm: shuffle bytes of xmm2 by SR
700# asm 1: pshufb SR,<xmm2=int6464#3
701# asm 2: pshufb SR,<xmm2=%xmm2
702pshufb SR,%xmm2
703
704# qhasm: xmm3 ^= *(int128 *)(c + 48)
705# asm 1: pxor 48(<c=int64#5),<xmm3=int6464#4
706# asm 2: pxor 48(<c=%r8),<xmm3=%xmm3
707pxor 48(%r8),%xmm3
708
709# qhasm: shuffle bytes of xmm3 by SR
710# asm 1: pshufb SR,<xmm3=int6464#4
711# asm 2: pshufb SR,<xmm3=%xmm3
712pshufb SR,%xmm3
713
714# qhasm: xmm4 ^= *(int128 *)(c + 64)
715# asm 1: pxor 64(<c=int64#5),<xmm4=int6464#5
716# asm 2: pxor 64(<c=%r8),<xmm4=%xmm4
717pxor 64(%r8),%xmm4
718
719# qhasm: shuffle bytes of xmm4 by SR
720# asm 1: pshufb SR,<xmm4=int6464#5
721# asm 2: pshufb SR,<xmm4=%xmm4
722pshufb SR,%xmm4
723
724# qhasm: xmm5 ^= *(int128 *)(c + 80)
725# asm 1: pxor 80(<c=int64#5),<xmm5=int6464#6
726# asm 2: pxor 80(<c=%r8),<xmm5=%xmm5
727pxor 80(%r8),%xmm5
728
729# qhasm: shuffle bytes of xmm5 by SR
730# asm 1: pshufb SR,<xmm5=int6464#6
731# asm 2: pshufb SR,<xmm5=%xmm5
732pshufb SR,%xmm5
733
734# qhasm: xmm6 ^= *(int128 *)(c + 96)
735# asm 1: pxor 96(<c=int64#5),<xmm6=int6464#7
736# asm 2: pxor 96(<c=%r8),<xmm6=%xmm6
737pxor 96(%r8),%xmm6
738
739# qhasm: shuffle bytes of xmm6 by SR
740# asm 1: pshufb SR,<xmm6=int6464#7
741# asm 2: pshufb SR,<xmm6=%xmm6
742pshufb SR,%xmm6
743
744# qhasm: xmm7 ^= *(int128 *)(c + 112)
745# asm 1: pxor 112(<c=int64#5),<xmm7=int6464#8
746# asm 2: pxor 112(<c=%r8),<xmm7=%xmm7
747pxor 112(%r8),%xmm7
748
749# qhasm: shuffle bytes of xmm7 by SR
750# asm 1: pshufb SR,<xmm7=int6464#8
751# asm 2: pshufb SR,<xmm7=%xmm7
752pshufb SR,%xmm7
753
754# qhasm: xmm5 ^= xmm6
755# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
756# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
757pxor %xmm6,%xmm5
758
759# qhasm: xmm2 ^= xmm1
760# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
761# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
762pxor %xmm1,%xmm2
763
764# qhasm: xmm5 ^= xmm0
765# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
766# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
767pxor %xmm0,%xmm5
768
769# qhasm: xmm6 ^= xmm2
770# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
771# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
772pxor %xmm2,%xmm6
773
774# qhasm: xmm3 ^= xmm0
775# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
776# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
777pxor %xmm0,%xmm3
778
779# qhasm: xmm6 ^= xmm3
780# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
781# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
782pxor %xmm3,%xmm6
783
784# qhasm: xmm3 ^= xmm7
785# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
786# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
787pxor %xmm7,%xmm3
788
789# qhasm: xmm3 ^= xmm4
790# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
791# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
792pxor %xmm4,%xmm3
793
794# qhasm: xmm7 ^= xmm5
795# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
796# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
797pxor %xmm5,%xmm7
798
799# qhasm: xmm3 ^= xmm1
800# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
801# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
802pxor %xmm1,%xmm3
803
804# qhasm: xmm4 ^= xmm5
805# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
806# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
807pxor %xmm5,%xmm4
808
809# qhasm: xmm2 ^= xmm7
810# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
811# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
812pxor %xmm7,%xmm2
813
814# qhasm: xmm1 ^= xmm5
815# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
816# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
817pxor %xmm5,%xmm1
818
819# qhasm: xmm11 = xmm7
820# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
821# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
822movdqa %xmm7,%xmm8
823
824# qhasm: xmm10 = xmm1
825# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
826# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
827movdqa %xmm1,%xmm9
828
829# qhasm: xmm9 = xmm5
830# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
831# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
832movdqa %xmm5,%xmm10
833
834# qhasm: xmm13 = xmm2
835# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
836# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
837movdqa %xmm2,%xmm11
838
839# qhasm: xmm12 = xmm6
840# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
841# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
842movdqa %xmm6,%xmm12
843
844# qhasm: xmm11 ^= xmm4
845# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
846# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
847pxor %xmm4,%xmm8
848
849# qhasm: xmm10 ^= xmm2
850# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
851# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
852pxor %xmm2,%xmm9
853
854# qhasm: xmm9 ^= xmm3
855# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
856# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
857pxor %xmm3,%xmm10
858
859# qhasm: xmm13 ^= xmm4
860# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
861# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
862pxor %xmm4,%xmm11
863
864# qhasm: xmm12 ^= xmm0
865# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
866# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
867pxor %xmm0,%xmm12
868
869# qhasm: xmm14 = xmm11
870# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
871# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
872movdqa %xmm8,%xmm13
873
874# qhasm: xmm8 = xmm10
875# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
876# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
877movdqa %xmm9,%xmm14
878
879# qhasm: xmm15 = xmm11
880# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
881# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
882movdqa %xmm8,%xmm15
883
884# qhasm: xmm10 |= xmm9
885# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
886# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
887por %xmm10,%xmm9
888
889# qhasm: xmm11 |= xmm12
890# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
891# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
892por %xmm12,%xmm8
893
894# qhasm: xmm15 ^= xmm8
895# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
896# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
897pxor %xmm14,%xmm15
898
899# qhasm: xmm14 &= xmm12
900# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
901# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
902pand %xmm12,%xmm13
903
904# qhasm: xmm8 &= xmm9
905# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
906# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
907pand %xmm10,%xmm14
908
909# qhasm: xmm12 ^= xmm9
910# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
911# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
912pxor %xmm10,%xmm12
913
914# qhasm: xmm15 &= xmm12
915# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
916# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
917pand %xmm12,%xmm15
918
919# qhasm: xmm12 = xmm3
920# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
921# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
922movdqa %xmm3,%xmm10
923
924# qhasm: xmm12 ^= xmm0
925# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
926# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
927pxor %xmm0,%xmm10
928
929# qhasm: xmm13 &= xmm12
930# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
931# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
932pand %xmm10,%xmm11
933
934# qhasm: xmm11 ^= xmm13
935# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
936# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
937pxor %xmm11,%xmm8
938
939# qhasm: xmm10 ^= xmm13
940# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
941# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
942pxor %xmm11,%xmm9
943
944# qhasm: xmm13 = xmm7
945# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
946# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
947movdqa %xmm7,%xmm10
948
949# qhasm: xmm13 ^= xmm1
950# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
951# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
952pxor %xmm1,%xmm10
953
954# qhasm: xmm12 = xmm5
955# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
956# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
957movdqa %xmm5,%xmm11
958
959# qhasm: xmm9 = xmm13
960# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
961# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
962movdqa %xmm10,%xmm12
963
964# qhasm: xmm12 ^= xmm6
965# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
966# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
967pxor %xmm6,%xmm11
968
969# qhasm: xmm9 |= xmm12
970# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
971# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
972por %xmm11,%xmm12
973
974# qhasm: xmm13 &= xmm12
975# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
976# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
977pand %xmm11,%xmm10
978
979# qhasm: xmm8 ^= xmm13
980# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
981# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
982pxor %xmm10,%xmm14
983
984# qhasm: xmm11 ^= xmm15
985# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
986# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
987pxor %xmm15,%xmm8
988
989# qhasm: xmm10 ^= xmm14
990# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
991# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
992pxor %xmm13,%xmm9
993
994# qhasm: xmm9 ^= xmm15
995# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
996# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
997pxor %xmm15,%xmm12
998
999# qhasm: xmm8 ^= xmm14
1000# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
1001# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
1002pxor %xmm13,%xmm14
1003
1004# qhasm: xmm9 ^= xmm14
1005# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
1006# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
1007pxor %xmm13,%xmm12
1008
1009# qhasm: xmm12 = xmm2
1010# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
1011# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
1012movdqa %xmm2,%xmm10
1013
1014# qhasm: xmm13 = xmm4
1015# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
1016# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
1017movdqa %xmm4,%xmm11
1018
1019# qhasm: xmm14 = xmm1
1020# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
1021# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
1022movdqa %xmm1,%xmm13
1023
1024# qhasm: xmm15 = xmm7
1025# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
1026# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
1027movdqa %xmm7,%xmm15
1028
1029# qhasm: xmm12 &= xmm3
1030# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
1031# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
1032pand %xmm3,%xmm10
1033
1034# qhasm: xmm13 &= xmm0
1035# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
1036# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
1037pand %xmm0,%xmm11
1038
1039# qhasm: xmm14 &= xmm5
1040# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
1041# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
1042pand %xmm5,%xmm13
1043
1044# qhasm: xmm15 |= xmm6
1045# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
1046# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
1047por %xmm6,%xmm15
1048
1049# qhasm: xmm11 ^= xmm12
1050# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
1051# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
1052pxor %xmm10,%xmm8
1053
1054# qhasm: xmm10 ^= xmm13
1055# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
1056# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
1057pxor %xmm11,%xmm9
1058
1059# qhasm: xmm9 ^= xmm14
1060# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
1061# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
1062pxor %xmm13,%xmm12
1063
1064# qhasm: xmm8 ^= xmm15
1065# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
1066# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
1067pxor %xmm15,%xmm14
1068
1069# qhasm: xmm12 = xmm11
1070# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
1071# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
1072movdqa %xmm8,%xmm10
1073
1074# qhasm: xmm12 ^= xmm10
1075# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
1076# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
1077pxor %xmm9,%xmm10
1078
1079# qhasm: xmm11 &= xmm9
1080# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
1081# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
1082pand %xmm12,%xmm8
1083
1084# qhasm: xmm14 = xmm8
1085# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
1086# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
1087movdqa %xmm14,%xmm11
1088
1089# qhasm: xmm14 ^= xmm11
1090# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
1091# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
1092pxor %xmm8,%xmm11
1093
1094# qhasm: xmm15 = xmm12
1095# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
1096# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
1097movdqa %xmm10,%xmm13
1098
1099# qhasm: xmm15 &= xmm14
1100# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
1101# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
1102pand %xmm11,%xmm13
1103
1104# qhasm: xmm15 ^= xmm10
1105# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
1106# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
1107pxor %xmm9,%xmm13
1108
1109# qhasm: xmm13 = xmm9
1110# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
1111# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
1112movdqa %xmm12,%xmm15
1113
1114# qhasm: xmm13 ^= xmm8
1115# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
1116# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
1117pxor %xmm14,%xmm15
1118
1119# qhasm: xmm11 ^= xmm10
1120# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
1121# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
1122pxor %xmm9,%xmm8
1123
1124# qhasm: xmm13 &= xmm11
1125# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
1126# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
1127pand %xmm8,%xmm15
1128
1129# qhasm: xmm13 ^= xmm8
1130# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
1131# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
1132pxor %xmm14,%xmm15
1133
1134# qhasm: xmm9 ^= xmm13
1135# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
1136# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
1137pxor %xmm15,%xmm12
1138
1139# qhasm: xmm10 = xmm14
1140# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
1141# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
1142movdqa %xmm11,%xmm8
1143
1144# qhasm: xmm10 ^= xmm13
1145# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
1146# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
1147pxor %xmm15,%xmm8
1148
1149# qhasm: xmm10 &= xmm8
1150# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
1151# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
1152pand %xmm14,%xmm8
1153
1154# qhasm: xmm9 ^= xmm10
1155# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
1156# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
1157pxor %xmm8,%xmm12
1158
1159# qhasm: xmm14 ^= xmm10
1160# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
1161# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
1162pxor %xmm8,%xmm11
1163
1164# qhasm: xmm14 &= xmm15
1165# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
1166# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
1167pand %xmm13,%xmm11
1168
1169# qhasm: xmm14 ^= xmm12
1170# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
1171# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
1172pxor %xmm10,%xmm11
1173
1174# qhasm: xmm12 = xmm6
1175# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
1176# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
1177movdqa %xmm6,%xmm8
1178
1179# qhasm: xmm8 = xmm5
1180# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
1181# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
1182movdqa %xmm5,%xmm9
1183
1184# qhasm: xmm10 = xmm15
1185# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
1186# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
1187movdqa %xmm13,%xmm10
1188
1189# qhasm: xmm10 ^= xmm14
1190# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
1191# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
1192pxor %xmm11,%xmm10
1193
1194# qhasm: xmm10 &= xmm6
1195# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
1196# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
1197pand %xmm6,%xmm10
1198
1199# qhasm: xmm6 ^= xmm5
1200# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
1201# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
1202pxor %xmm5,%xmm6
1203
1204# qhasm: xmm6 &= xmm14
1205# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
1206# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
1207pand %xmm11,%xmm6
1208
1209# qhasm: xmm5 &= xmm15
1210# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
1211# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
1212pand %xmm13,%xmm5
1213
1214# qhasm: xmm6 ^= xmm5
1215# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
1216# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
1217pxor %xmm5,%xmm6
1218
1219# qhasm: xmm5 ^= xmm10
1220# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
1221# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
1222pxor %xmm10,%xmm5
1223
1224# qhasm: xmm12 ^= xmm0
1225# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
1226# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
1227pxor %xmm0,%xmm8
1228
1229# qhasm: xmm8 ^= xmm3
1230# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
1231# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
1232pxor %xmm3,%xmm9
1233
1234# qhasm: xmm15 ^= xmm13
1235# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
1236# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
1237pxor %xmm15,%xmm13
1238
1239# qhasm: xmm14 ^= xmm9
1240# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
1241# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
1242pxor %xmm12,%xmm11
1243
1244# qhasm: xmm11 = xmm15
1245# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1246# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1247movdqa %xmm13,%xmm10
1248
1249# qhasm: xmm11 ^= xmm14
1250# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1251# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1252pxor %xmm11,%xmm10
1253
1254# qhasm: xmm11 &= xmm12
1255# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
1256# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
1257pand %xmm8,%xmm10
1258
1259# qhasm: xmm12 ^= xmm8
1260# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
1261# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
1262pxor %xmm9,%xmm8
1263
1264# qhasm: xmm12 &= xmm14
1265# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
1266# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
1267pand %xmm11,%xmm8
1268
1269# qhasm: xmm8 &= xmm15
1270# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
1271# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
1272pand %xmm13,%xmm9
1273
1274# qhasm: xmm8 ^= xmm12
1275# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
1276# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
1277pxor %xmm8,%xmm9
1278
1279# qhasm: xmm12 ^= xmm11
1280# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
1281# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
1282pxor %xmm10,%xmm8
1283
1284# qhasm: xmm10 = xmm13
1285# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1286# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1287movdqa %xmm15,%xmm10
1288
1289# qhasm: xmm10 ^= xmm9
1290# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
1291# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
1292pxor %xmm12,%xmm10
1293
1294# qhasm: xmm10 &= xmm0
1295# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
1296# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
1297pand %xmm0,%xmm10
1298
1299# qhasm: xmm0 ^= xmm3
1300# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
1301# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
1302pxor %xmm3,%xmm0
1303
1304# qhasm: xmm0 &= xmm9
1305# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
1306# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
1307pand %xmm12,%xmm0
1308
1309# qhasm: xmm3 &= xmm13
1310# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
1311# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
1312pand %xmm15,%xmm3
1313
1314# qhasm: xmm0 ^= xmm3
1315# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
1316# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
1317pxor %xmm3,%xmm0
1318
1319# qhasm: xmm3 ^= xmm10
1320# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
1321# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
1322pxor %xmm10,%xmm3
1323
1324# qhasm: xmm6 ^= xmm12
1325# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
1326# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
1327pxor %xmm8,%xmm6
1328
1329# qhasm: xmm0 ^= xmm12
1330# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
1331# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
1332pxor %xmm8,%xmm0
1333
1334# qhasm: xmm5 ^= xmm8
1335# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
1336# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
1337pxor %xmm9,%xmm5
1338
1339# qhasm: xmm3 ^= xmm8
1340# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
1341# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
1342pxor %xmm9,%xmm3
1343
1344# qhasm: xmm12 = xmm7
1345# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
1346# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
1347movdqa %xmm7,%xmm8
1348
1349# qhasm: xmm8 = xmm1
1350# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
1351# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
1352movdqa %xmm1,%xmm9
1353
1354# qhasm: xmm12 ^= xmm4
1355# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
1356# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
1357pxor %xmm4,%xmm8
1358
1359# qhasm: xmm8 ^= xmm2
1360# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
1361# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
1362pxor %xmm2,%xmm9
1363
1364# qhasm: xmm11 = xmm15
1365# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1366# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1367movdqa %xmm13,%xmm10
1368
1369# qhasm: xmm11 ^= xmm14
1370# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1371# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1372pxor %xmm11,%xmm10
1373
1374# qhasm: xmm11 &= xmm12
1375# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
1376# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
1377pand %xmm8,%xmm10
1378
1379# qhasm: xmm12 ^= xmm8
1380# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
1381# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
1382pxor %xmm9,%xmm8
1383
1384# qhasm: xmm12 &= xmm14
1385# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
1386# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
1387pand %xmm11,%xmm8
1388
1389# qhasm: xmm8 &= xmm15
1390# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
1391# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
1392pand %xmm13,%xmm9
1393
1394# qhasm: xmm8 ^= xmm12
1395# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
1396# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
1397pxor %xmm8,%xmm9
1398
1399# qhasm: xmm12 ^= xmm11
1400# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
1401# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
1402pxor %xmm10,%xmm8
1403
1404# qhasm: xmm10 = xmm13
1405# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1406# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1407movdqa %xmm15,%xmm10
1408
1409# qhasm: xmm10 ^= xmm9
1410# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
1411# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
1412pxor %xmm12,%xmm10
1413
1414# qhasm: xmm10 &= xmm4
1415# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
1416# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
1417pand %xmm4,%xmm10
1418
1419# qhasm: xmm4 ^= xmm2
1420# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
1421# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
1422pxor %xmm2,%xmm4
1423
1424# qhasm: xmm4 &= xmm9
1425# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
1426# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
1427pand %xmm12,%xmm4
1428
1429# qhasm: xmm2 &= xmm13
1430# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
1431# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
1432pand %xmm15,%xmm2
1433
1434# qhasm: xmm4 ^= xmm2
1435# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
1436# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
1437pxor %xmm2,%xmm4
1438
1439# qhasm: xmm2 ^= xmm10
1440# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
1441# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
1442pxor %xmm10,%xmm2
1443
1444# qhasm: xmm15 ^= xmm13
1445# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
1446# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
1447pxor %xmm15,%xmm13
1448
1449# qhasm: xmm14 ^= xmm9
1450# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
1451# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
1452pxor %xmm12,%xmm11
1453
1454# qhasm: xmm11 = xmm15
1455# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1456# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1457movdqa %xmm13,%xmm10
1458
1459# qhasm: xmm11 ^= xmm14
1460# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1461# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1462pxor %xmm11,%xmm10
1463
1464# qhasm: xmm11 &= xmm7
1465# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
1466# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
1467pand %xmm7,%xmm10
1468
1469# qhasm: xmm7 ^= xmm1
1470# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
1471# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
1472pxor %xmm1,%xmm7
1473
1474# qhasm: xmm7 &= xmm14
1475# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
1476# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
1477pand %xmm11,%xmm7
1478
1479# qhasm: xmm1 &= xmm15
1480# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
1481# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
1482pand %xmm13,%xmm1
1483
1484# qhasm: xmm7 ^= xmm1
1485# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
1486# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
1487pxor %xmm1,%xmm7
1488
1489# qhasm: xmm1 ^= xmm11
1490# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
1491# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
1492pxor %xmm10,%xmm1
1493
1494# qhasm: xmm7 ^= xmm12
1495# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
1496# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
1497pxor %xmm8,%xmm7
1498
1499# qhasm: xmm4 ^= xmm12
1500# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
1501# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
1502pxor %xmm8,%xmm4
1503
1504# qhasm: xmm1 ^= xmm8
1505# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
1506# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
1507pxor %xmm9,%xmm1
1508
1509# qhasm: xmm2 ^= xmm8
1510# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
1511# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
1512pxor %xmm9,%xmm2
1513
1514# qhasm: xmm7 ^= xmm0
1515# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
1516# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
1517pxor %xmm0,%xmm7
1518
1519# qhasm: xmm1 ^= xmm6
1520# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
1521# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
1522pxor %xmm6,%xmm1
1523
1524# qhasm: xmm4 ^= xmm7
1525# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
1526# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
1527pxor %xmm7,%xmm4
1528
1529# qhasm: xmm6 ^= xmm0
1530# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
1531# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
1532pxor %xmm0,%xmm6
1533
1534# qhasm: xmm0 ^= xmm1
1535# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
1536# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
1537pxor %xmm1,%xmm0
1538
1539# qhasm: xmm1 ^= xmm5
1540# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
1541# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
1542pxor %xmm5,%xmm1
1543
1544# qhasm: xmm5 ^= xmm2
1545# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
1546# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
1547pxor %xmm2,%xmm5
1548
1549# qhasm: xmm4 ^= xmm5
1550# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
1551# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
1552pxor %xmm5,%xmm4
1553
1554# qhasm: xmm2 ^= xmm3
1555# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
1556# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
1557pxor %xmm3,%xmm2
1558
1559# qhasm: xmm3 ^= xmm5
1560# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
1561# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
1562pxor %xmm5,%xmm3
1563
1564# qhasm: xmm6 ^= xmm3
1565# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
1566# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
1567pxor %xmm3,%xmm6
1568
1569# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
1570# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
1571# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
1572pshufd $0x93,%xmm0,%xmm8
1573
1574# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
1575# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
1576# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
1577pshufd $0x93,%xmm1,%xmm9
1578
1579# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
1580# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
1581# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
1582pshufd $0x93,%xmm4,%xmm10
1583
1584# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
1585# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
1586# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
1587pshufd $0x93,%xmm6,%xmm11
1588
1589# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
1590# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
1591# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
1592pshufd $0x93,%xmm3,%xmm12
1593
1594# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
1595# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
1596# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
1597pshufd $0x93,%xmm7,%xmm13
1598
1599# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
1600# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
1601# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
1602pshufd $0x93,%xmm2,%xmm14
1603
1604# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
1605# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
1606# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
1607pshufd $0x93,%xmm5,%xmm15
1608
1609# qhasm: xmm0 ^= xmm8
1610# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
1611# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
1612pxor %xmm8,%xmm0
1613
1614# qhasm: xmm1 ^= xmm9
1615# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
1616# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
1617pxor %xmm9,%xmm1
1618
1619# qhasm: xmm4 ^= xmm10
1620# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
1621# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
1622pxor %xmm10,%xmm4
1623
1624# qhasm: xmm6 ^= xmm11
1625# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
1626# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
1627pxor %xmm11,%xmm6
1628
1629# qhasm: xmm3 ^= xmm12
1630# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
1631# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
1632pxor %xmm12,%xmm3
1633
1634# qhasm: xmm7 ^= xmm13
1635# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
1636# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
1637pxor %xmm13,%xmm7
1638
1639# qhasm: xmm2 ^= xmm14
1640# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
1641# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
1642pxor %xmm14,%xmm2
1643
1644# qhasm: xmm5 ^= xmm15
1645# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
1646# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
1647pxor %xmm15,%xmm5
1648
1649# qhasm: xmm8 ^= xmm5
1650# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
1651# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
1652pxor %xmm5,%xmm8
1653
1654# qhasm: xmm9 ^= xmm0
1655# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
1656# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
1657pxor %xmm0,%xmm9
1658
1659# qhasm: xmm10 ^= xmm1
1660# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
1661# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
1662pxor %xmm1,%xmm10
1663
1664# qhasm: xmm9 ^= xmm5
1665# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
1666# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
1667pxor %xmm5,%xmm9
1668
1669# qhasm: xmm11 ^= xmm4
1670# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
1671# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
1672pxor %xmm4,%xmm11
1673
1674# qhasm: xmm12 ^= xmm6
1675# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
1676# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
1677pxor %xmm6,%xmm12
1678
1679# qhasm: xmm13 ^= xmm3
1680# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
1681# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
1682pxor %xmm3,%xmm13
1683
1684# qhasm: xmm11 ^= xmm5
1685# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
1686# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
1687pxor %xmm5,%xmm11
1688
1689# qhasm: xmm14 ^= xmm7
1690# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
1691# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
1692pxor %xmm7,%xmm14
1693
1694# qhasm: xmm15 ^= xmm2
1695# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
1696# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
1697pxor %xmm2,%xmm15
1698
1699# qhasm: xmm12 ^= xmm5
1700# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
1701# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
1702pxor %xmm5,%xmm12
1703
1704# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
1705# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
1706# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
1707pshufd $0x4E,%xmm0,%xmm0
1708
1709# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
1710# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
1711# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
1712pshufd $0x4E,%xmm1,%xmm1
1713
1714# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
1715# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
1716# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
1717pshufd $0x4E,%xmm4,%xmm4
1718
1719# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
1720# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
1721# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
1722pshufd $0x4E,%xmm6,%xmm6
1723
1724# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
1725# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
1726# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
1727pshufd $0x4E,%xmm3,%xmm3
1728
1729# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
1730# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
1731# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
1732pshufd $0x4E,%xmm7,%xmm7
1733
1734# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
1735# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
1736# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
1737pshufd $0x4E,%xmm2,%xmm2
1738
1739# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
1740# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
1741# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
1742pshufd $0x4E,%xmm5,%xmm5
1743
1744# qhasm: xmm8 ^= xmm0
1745# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
1746# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
1747pxor %xmm0,%xmm8
1748
1749# qhasm: xmm9 ^= xmm1
1750# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
1751# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
1752pxor %xmm1,%xmm9
1753
1754# qhasm: xmm10 ^= xmm4
1755# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
1756# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
1757pxor %xmm4,%xmm10
1758
1759# qhasm: xmm11 ^= xmm6
1760# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
1761# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
1762pxor %xmm6,%xmm11
1763
1764# qhasm: xmm12 ^= xmm3
1765# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
1766# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
1767pxor %xmm3,%xmm12
1768
1769# qhasm: xmm13 ^= xmm7
1770# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
1771# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
1772pxor %xmm7,%xmm13
1773
1774# qhasm: xmm14 ^= xmm2
1775# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
1776# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
1777pxor %xmm2,%xmm14
1778
1779# qhasm: xmm15 ^= xmm5
1780# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
1781# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
1782pxor %xmm5,%xmm15
1783
1784# qhasm: xmm8 ^= *(int128 *)(c + 128)
1785# asm 1: pxor 128(<c=int64#5),<xmm8=int6464#9
1786# asm 2: pxor 128(<c=%r8),<xmm8=%xmm8
1787pxor 128(%r8),%xmm8
1788
1789# qhasm: shuffle bytes of xmm8 by SR
1790# asm 1: pshufb SR,<xmm8=int6464#9
1791# asm 2: pshufb SR,<xmm8=%xmm8
1792pshufb SR,%xmm8
1793
1794# qhasm: xmm9 ^= *(int128 *)(c + 144)
1795# asm 1: pxor 144(<c=int64#5),<xmm9=int6464#10
1796# asm 2: pxor 144(<c=%r8),<xmm9=%xmm9
1797pxor 144(%r8),%xmm9
1798
1799# qhasm: shuffle bytes of xmm9 by SR
1800# asm 1: pshufb SR,<xmm9=int6464#10
1801# asm 2: pshufb SR,<xmm9=%xmm9
1802pshufb SR,%xmm9
1803
1804# qhasm: xmm10 ^= *(int128 *)(c + 160)
1805# asm 1: pxor 160(<c=int64#5),<xmm10=int6464#11
1806# asm 2: pxor 160(<c=%r8),<xmm10=%xmm10
1807pxor 160(%r8),%xmm10
1808
1809# qhasm: shuffle bytes of xmm10 by SR
1810# asm 1: pshufb SR,<xmm10=int6464#11
1811# asm 2: pshufb SR,<xmm10=%xmm10
1812pshufb SR,%xmm10
1813
1814# qhasm: xmm11 ^= *(int128 *)(c + 176)
1815# asm 1: pxor 176(<c=int64#5),<xmm11=int6464#12
1816# asm 2: pxor 176(<c=%r8),<xmm11=%xmm11
1817pxor 176(%r8),%xmm11
1818
1819# qhasm: shuffle bytes of xmm11 by SR
1820# asm 1: pshufb SR,<xmm11=int6464#12
1821# asm 2: pshufb SR,<xmm11=%xmm11
1822pshufb SR,%xmm11
1823
1824# qhasm: xmm12 ^= *(int128 *)(c + 192)
1825# asm 1: pxor 192(<c=int64#5),<xmm12=int6464#13
1826# asm 2: pxor 192(<c=%r8),<xmm12=%xmm12
1827pxor 192(%r8),%xmm12
1828
1829# qhasm: shuffle bytes of xmm12 by SR
1830# asm 1: pshufb SR,<xmm12=int6464#13
1831# asm 2: pshufb SR,<xmm12=%xmm12
1832pshufb SR,%xmm12
1833
1834# qhasm: xmm13 ^= *(int128 *)(c + 208)
1835# asm 1: pxor 208(<c=int64#5),<xmm13=int6464#14
1836# asm 2: pxor 208(<c=%r8),<xmm13=%xmm13
1837pxor 208(%r8),%xmm13
1838
1839# qhasm: shuffle bytes of xmm13 by SR
1840# asm 1: pshufb SR,<xmm13=int6464#14
1841# asm 2: pshufb SR,<xmm13=%xmm13
1842pshufb SR,%xmm13
1843
1844# qhasm: xmm14 ^= *(int128 *)(c + 224)
1845# asm 1: pxor 224(<c=int64#5),<xmm14=int6464#15
1846# asm 2: pxor 224(<c=%r8),<xmm14=%xmm14
1847pxor 224(%r8),%xmm14
1848
1849# qhasm: shuffle bytes of xmm14 by SR
1850# asm 1: pshufb SR,<xmm14=int6464#15
1851# asm 2: pshufb SR,<xmm14=%xmm14
1852pshufb SR,%xmm14
1853
1854# qhasm: xmm15 ^= *(int128 *)(c + 240)
1855# asm 1: pxor 240(<c=int64#5),<xmm15=int6464#16
1856# asm 2: pxor 240(<c=%r8),<xmm15=%xmm15
1857pxor 240(%r8),%xmm15
1858
1859# qhasm: shuffle bytes of xmm15 by SR
1860# asm 1: pshufb SR,<xmm15=int6464#16
1861# asm 2: pshufb SR,<xmm15=%xmm15
1862pshufb SR,%xmm15
1863
1864# qhasm: xmm13 ^= xmm14
1865# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
1866# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
1867pxor %xmm14,%xmm13
1868
1869# qhasm: xmm10 ^= xmm9
1870# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
1871# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
1872pxor %xmm9,%xmm10
1873
1874# qhasm: xmm13 ^= xmm8
1875# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
1876# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
1877pxor %xmm8,%xmm13
1878
1879# qhasm: xmm14 ^= xmm10
1880# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
1881# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
1882pxor %xmm10,%xmm14
1883
1884# qhasm: xmm11 ^= xmm8
1885# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
1886# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
1887pxor %xmm8,%xmm11
1888
1889# qhasm: xmm14 ^= xmm11
1890# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
1891# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
1892pxor %xmm11,%xmm14
1893
1894# qhasm: xmm11 ^= xmm15
1895# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
1896# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
1897pxor %xmm15,%xmm11
1898
1899# qhasm: xmm11 ^= xmm12
1900# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
1901# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
1902pxor %xmm12,%xmm11
1903
1904# qhasm: xmm15 ^= xmm13
1905# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
1906# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
1907pxor %xmm13,%xmm15
1908
1909# qhasm: xmm11 ^= xmm9
1910# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
1911# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
1912pxor %xmm9,%xmm11
1913
1914# qhasm: xmm12 ^= xmm13
1915# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
1916# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
1917pxor %xmm13,%xmm12
1918
1919# qhasm: xmm10 ^= xmm15
1920# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
1921# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
1922pxor %xmm15,%xmm10
1923
1924# qhasm: xmm9 ^= xmm13
1925# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
1926# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
1927pxor %xmm13,%xmm9
1928
1929# qhasm: xmm3 = xmm15
1930# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
1931# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
1932movdqa %xmm15,%xmm0
1933
1934# qhasm: xmm2 = xmm9
1935# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
1936# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
1937movdqa %xmm9,%xmm1
1938
1939# qhasm: xmm1 = xmm13
1940# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
1941# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
1942movdqa %xmm13,%xmm2
1943
1944# qhasm: xmm5 = xmm10
1945# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
1946# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
1947movdqa %xmm10,%xmm3
1948
1949# qhasm: xmm4 = xmm14
1950# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
1951# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
1952movdqa %xmm14,%xmm4
1953
1954# qhasm: xmm3 ^= xmm12
1955# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
1956# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
1957pxor %xmm12,%xmm0
1958
1959# qhasm: xmm2 ^= xmm10
1960# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
1961# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
1962pxor %xmm10,%xmm1
1963
1964# qhasm: xmm1 ^= xmm11
1965# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
1966# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
1967pxor %xmm11,%xmm2
1968
1969# qhasm: xmm5 ^= xmm12
1970# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
1971# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
1972pxor %xmm12,%xmm3
1973
1974# qhasm: xmm4 ^= xmm8
1975# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
1976# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
1977pxor %xmm8,%xmm4
1978
1979# qhasm: xmm6 = xmm3
1980# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
1981# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
1982movdqa %xmm0,%xmm5
1983
1984# qhasm: xmm0 = xmm2
1985# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
1986# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
1987movdqa %xmm1,%xmm6
1988
1989# qhasm: xmm7 = xmm3
1990# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
1991# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
1992movdqa %xmm0,%xmm7
1993
1994# qhasm: xmm2 |= xmm1
1995# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
1996# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
1997por %xmm2,%xmm1
1998
1999# qhasm: xmm3 |= xmm4
2000# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
2001# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
2002por %xmm4,%xmm0
2003
2004# qhasm: xmm7 ^= xmm0
2005# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
2006# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
2007pxor %xmm6,%xmm7
2008
2009# qhasm: xmm6 &= xmm4
2010# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
2011# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
2012pand %xmm4,%xmm5
2013
2014# qhasm: xmm0 &= xmm1
2015# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
2016# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
2017pand %xmm2,%xmm6
2018
2019# qhasm: xmm4 ^= xmm1
2020# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
2021# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
2022pxor %xmm2,%xmm4
2023
2024# qhasm: xmm7 &= xmm4
2025# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
2026# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
2027pand %xmm4,%xmm7
2028
2029# qhasm: xmm4 = xmm11
2030# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
2031# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
2032movdqa %xmm11,%xmm2
2033
2034# qhasm: xmm4 ^= xmm8
2035# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
2036# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
2037pxor %xmm8,%xmm2
2038
2039# qhasm: xmm5 &= xmm4
2040# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
2041# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
2042pand %xmm2,%xmm3
2043
2044# qhasm: xmm3 ^= xmm5
2045# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
2046# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
2047pxor %xmm3,%xmm0
2048
2049# qhasm: xmm2 ^= xmm5
2050# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
2051# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
2052pxor %xmm3,%xmm1
2053
2054# qhasm: xmm5 = xmm15
2055# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
2056# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
2057movdqa %xmm15,%xmm2
2058
2059# qhasm: xmm5 ^= xmm9
2060# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
2061# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
2062pxor %xmm9,%xmm2
2063
2064# qhasm: xmm4 = xmm13
2065# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
2066# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
2067movdqa %xmm13,%xmm3
2068
2069# qhasm: xmm1 = xmm5
2070# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
2071# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
2072movdqa %xmm2,%xmm4
2073
2074# qhasm: xmm4 ^= xmm14
2075# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
2076# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
2077pxor %xmm14,%xmm3
2078
2079# qhasm: xmm1 |= xmm4
2080# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
2081# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
2082por %xmm3,%xmm4
2083
2084# qhasm: xmm5 &= xmm4
2085# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
2086# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
2087pand %xmm3,%xmm2
2088
2089# qhasm: xmm0 ^= xmm5
2090# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
2091# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
2092pxor %xmm2,%xmm6
2093
2094# qhasm: xmm3 ^= xmm7
2095# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
2096# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
2097pxor %xmm7,%xmm0
2098
2099# qhasm: xmm2 ^= xmm6
2100# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
2101# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
2102pxor %xmm5,%xmm1
2103
2104# qhasm: xmm1 ^= xmm7
2105# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
2106# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
2107pxor %xmm7,%xmm4
2108
2109# qhasm: xmm0 ^= xmm6
2110# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
2111# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
2112pxor %xmm5,%xmm6
2113
2114# qhasm: xmm1 ^= xmm6
2115# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
2116# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
2117pxor %xmm5,%xmm4
2118
2119# qhasm: xmm4 = xmm10
2120# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
2121# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
2122movdqa %xmm10,%xmm2
2123
2124# qhasm: xmm5 = xmm12
2125# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
2126# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
2127movdqa %xmm12,%xmm3
2128
2129# qhasm: xmm6 = xmm9
2130# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
2131# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
2132movdqa %xmm9,%xmm5
2133
2134# qhasm: xmm7 = xmm15
2135# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
2136# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
2137movdqa %xmm15,%xmm7
2138
2139# qhasm: xmm4 &= xmm11
2140# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
2141# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
2142pand %xmm11,%xmm2
2143
2144# qhasm: xmm5 &= xmm8
2145# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
2146# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
2147pand %xmm8,%xmm3
2148
2149# qhasm: xmm6 &= xmm13
2150# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
2151# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
2152pand %xmm13,%xmm5
2153
2154# qhasm: xmm7 |= xmm14
2155# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
2156# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
2157por %xmm14,%xmm7
2158
2159# qhasm: xmm3 ^= xmm4
2160# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
2161# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
2162pxor %xmm2,%xmm0
2163
2164# qhasm: xmm2 ^= xmm5
2165# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
2166# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
2167pxor %xmm3,%xmm1
2168
2169# qhasm: xmm1 ^= xmm6
2170# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
2171# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
2172pxor %xmm5,%xmm4
2173
2174# qhasm: xmm0 ^= xmm7
2175# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
2176# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
2177pxor %xmm7,%xmm6
2178
2179# qhasm: xmm4 = xmm3
2180# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
2181# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
2182movdqa %xmm0,%xmm2
2183
2184# qhasm: xmm4 ^= xmm2
2185# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
2186# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
2187pxor %xmm1,%xmm2
2188
2189# qhasm: xmm3 &= xmm1
2190# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
2191# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
2192pand %xmm4,%xmm0
2193
2194# qhasm: xmm6 = xmm0
2195# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
2196# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
2197movdqa %xmm6,%xmm3
2198
2199# qhasm: xmm6 ^= xmm3
2200# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
2201# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
2202pxor %xmm0,%xmm3
2203
2204# qhasm: xmm7 = xmm4
2205# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
2206# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
2207movdqa %xmm2,%xmm5
2208
2209# qhasm: xmm7 &= xmm6
2210# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
2211# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
2212pand %xmm3,%xmm5
2213
2214# qhasm: xmm7 ^= xmm2
2215# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
2216# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
2217pxor %xmm1,%xmm5
2218
2219# qhasm: xmm5 = xmm1
2220# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
2221# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
2222movdqa %xmm4,%xmm7
2223
2224# qhasm: xmm5 ^= xmm0
2225# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
2226# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
2227pxor %xmm6,%xmm7
2228
2229# qhasm: xmm3 ^= xmm2
2230# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
2231# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
2232pxor %xmm1,%xmm0
2233
2234# qhasm: xmm5 &= xmm3
2235# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
2236# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
2237pand %xmm0,%xmm7
2238
2239# qhasm: xmm5 ^= xmm0
2240# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
2241# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
2242pxor %xmm6,%xmm7
2243
2244# qhasm: xmm1 ^= xmm5
2245# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
2246# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
2247pxor %xmm7,%xmm4
2248
2249# qhasm: xmm2 = xmm6
2250# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
2251# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
2252movdqa %xmm3,%xmm0
2253
2254# qhasm: xmm2 ^= xmm5
2255# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
2256# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
2257pxor %xmm7,%xmm0
2258
2259# qhasm: xmm2 &= xmm0
2260# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
2261# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
2262pand %xmm6,%xmm0
2263
2264# qhasm: xmm1 ^= xmm2
2265# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
2266# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
2267pxor %xmm0,%xmm4
2268
2269# qhasm: xmm6 ^= xmm2
2270# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
2271# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
2272pxor %xmm0,%xmm3
2273
2274# qhasm: xmm6 &= xmm7
2275# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
2276# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
2277pand %xmm5,%xmm3
2278
2279# qhasm: xmm6 ^= xmm4
2280# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
2281# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
2282pxor %xmm2,%xmm3
2283
2284# qhasm: xmm4 = xmm14
2285# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
2286# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
2287movdqa %xmm14,%xmm0
2288
2289# qhasm: xmm0 = xmm13
2290# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
2291# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
2292movdqa %xmm13,%xmm1
2293
2294# qhasm: xmm2 = xmm7
2295# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
2296# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
2297movdqa %xmm5,%xmm2
2298
2299# qhasm: xmm2 ^= xmm6
2300# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
2301# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
2302pxor %xmm3,%xmm2
2303
2304# qhasm: xmm2 &= xmm14
2305# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
2306# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
2307pand %xmm14,%xmm2
2308
2309# qhasm: xmm14 ^= xmm13
2310# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
2311# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
2312pxor %xmm13,%xmm14
2313
2314# qhasm: xmm14 &= xmm6
2315# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
2316# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
2317pand %xmm3,%xmm14
2318
2319# qhasm: xmm13 &= xmm7
2320# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
2321# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
2322pand %xmm5,%xmm13
2323
2324# qhasm: xmm14 ^= xmm13
2325# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
2326# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
2327pxor %xmm13,%xmm14
2328
2329# qhasm: xmm13 ^= xmm2
2330# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
2331# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
2332pxor %xmm2,%xmm13
2333
2334# qhasm: xmm4 ^= xmm8
2335# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
2336# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
2337pxor %xmm8,%xmm0
2338
2339# qhasm: xmm0 ^= xmm11
2340# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
2341# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
2342pxor %xmm11,%xmm1
2343
2344# qhasm: xmm7 ^= xmm5
2345# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
2346# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
2347pxor %xmm7,%xmm5
2348
2349# qhasm: xmm6 ^= xmm1
2350# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
2351# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
2352pxor %xmm4,%xmm3
2353
2354# qhasm: xmm3 = xmm7
2355# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2356# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2357movdqa %xmm5,%xmm2
2358
2359# qhasm: xmm3 ^= xmm6
2360# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
2361# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
2362pxor %xmm3,%xmm2
2363
2364# qhasm: xmm3 &= xmm4
2365# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
2366# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
2367pand %xmm0,%xmm2
2368
2369# qhasm: xmm4 ^= xmm0
2370# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
2371# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
2372pxor %xmm1,%xmm0
2373
2374# qhasm: xmm4 &= xmm6
2375# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
2376# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
2377pand %xmm3,%xmm0
2378
2379# qhasm: xmm0 &= xmm7
2380# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
2381# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
2382pand %xmm5,%xmm1
2383
2384# qhasm: xmm0 ^= xmm4
2385# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
2386# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
2387pxor %xmm0,%xmm1
2388
2389# qhasm: xmm4 ^= xmm3
2390# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
2391# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
2392pxor %xmm2,%xmm0
2393
2394# qhasm: xmm2 = xmm5
2395# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
2396# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
2397movdqa %xmm7,%xmm2
2398
2399# qhasm: xmm2 ^= xmm1
2400# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
2401# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
2402pxor %xmm4,%xmm2
2403
2404# qhasm: xmm2 &= xmm8
2405# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
2406# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
2407pand %xmm8,%xmm2
2408
2409# qhasm: xmm8 ^= xmm11
2410# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
2411# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
2412pxor %xmm11,%xmm8
2413
2414# qhasm: xmm8 &= xmm1
2415# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
2416# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
2417pand %xmm4,%xmm8
2418
2419# qhasm: xmm11 &= xmm5
2420# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
2421# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
2422pand %xmm7,%xmm11
2423
2424# qhasm: xmm8 ^= xmm11
2425# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
2426# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
2427pxor %xmm11,%xmm8
2428
2429# qhasm: xmm11 ^= xmm2
2430# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
2431# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
2432pxor %xmm2,%xmm11
2433
2434# qhasm: xmm14 ^= xmm4
2435# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
2436# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
2437pxor %xmm0,%xmm14
2438
2439# qhasm: xmm8 ^= xmm4
2440# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
2441# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
2442pxor %xmm0,%xmm8
2443
2444# qhasm: xmm13 ^= xmm0
2445# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
2446# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
2447pxor %xmm1,%xmm13
2448
2449# qhasm: xmm11 ^= xmm0
2450# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
2451# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
2452pxor %xmm1,%xmm11
2453
2454# qhasm: xmm4 = xmm15
2455# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
2456# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
2457movdqa %xmm15,%xmm0
2458
2459# qhasm: xmm0 = xmm9
2460# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
2461# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
2462movdqa %xmm9,%xmm1
2463
2464# qhasm: xmm4 ^= xmm12
2465# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
2466# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
2467pxor %xmm12,%xmm0
2468
2469# qhasm: xmm0 ^= xmm10
2470# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
2471# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
2472pxor %xmm10,%xmm1
2473
2474# qhasm: xmm3 = xmm7
2475# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2476# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2477movdqa %xmm5,%xmm2
2478
2479# qhasm: xmm3 ^= xmm6
2480# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
2481# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
2482pxor %xmm3,%xmm2
2483
2484# qhasm: xmm3 &= xmm4
2485# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
2486# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
2487pand %xmm0,%xmm2
2488
2489# qhasm: xmm4 ^= xmm0
2490# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
2491# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
2492pxor %xmm1,%xmm0
2493
2494# qhasm: xmm4 &= xmm6
2495# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
2496# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
2497pand %xmm3,%xmm0
2498
2499# qhasm: xmm0 &= xmm7
2500# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
2501# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
2502pand %xmm5,%xmm1
2503
2504# qhasm: xmm0 ^= xmm4
2505# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
2506# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
2507pxor %xmm0,%xmm1
2508
2509# qhasm: xmm4 ^= xmm3
2510# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
2511# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
2512pxor %xmm2,%xmm0
2513
2514# qhasm: xmm2 = xmm5
2515# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
2516# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
2517movdqa %xmm7,%xmm2
2518
2519# qhasm: xmm2 ^= xmm1
2520# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
2521# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
2522pxor %xmm4,%xmm2
2523
2524# qhasm: xmm2 &= xmm12
2525# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
2526# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
2527pand %xmm12,%xmm2
2528
2529# qhasm: xmm12 ^= xmm10
2530# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
2531# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
2532pxor %xmm10,%xmm12
2533
2534# qhasm: xmm12 &= xmm1
2535# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
2536# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
2537pand %xmm4,%xmm12
2538
2539# qhasm: xmm10 &= xmm5
2540# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
2541# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
2542pand %xmm7,%xmm10
2543
2544# qhasm: xmm12 ^= xmm10
2545# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
2546# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
2547pxor %xmm10,%xmm12
2548
2549# qhasm: xmm10 ^= xmm2
2550# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
2551# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
2552pxor %xmm2,%xmm10
2553
2554# qhasm: xmm7 ^= xmm5
2555# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
2556# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
2557pxor %xmm7,%xmm5
2558
2559# qhasm: xmm6 ^= xmm1
2560# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
2561# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
2562pxor %xmm4,%xmm3
2563
2564# qhasm: xmm3 = xmm7
2565# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2566# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2567movdqa %xmm5,%xmm2
2568
2569# qhasm: xmm3 ^= xmm6
2570# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
2571# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
2572pxor %xmm3,%xmm2
2573
2574# qhasm: xmm3 &= xmm15
2575# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
2576# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
2577pand %xmm15,%xmm2
2578
2579# qhasm: xmm15 ^= xmm9
2580# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
2581# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
2582pxor %xmm9,%xmm15
2583
2584# qhasm: xmm15 &= xmm6
2585# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
2586# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
2587pand %xmm3,%xmm15
2588
2589# qhasm: xmm9 &= xmm7
2590# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
2591# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
2592pand %xmm5,%xmm9
2593
2594# qhasm: xmm15 ^= xmm9
2595# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
2596# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
2597pxor %xmm9,%xmm15
2598
2599# qhasm: xmm9 ^= xmm3
2600# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
2601# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
2602pxor %xmm2,%xmm9
2603
2604# qhasm: xmm15 ^= xmm4
2605# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
2606# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
2607pxor %xmm0,%xmm15
2608
2609# qhasm: xmm12 ^= xmm4
2610# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
2611# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
2612pxor %xmm0,%xmm12
2613
2614# qhasm: xmm9 ^= xmm0
2615# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
2616# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
2617pxor %xmm1,%xmm9
2618
2619# qhasm: xmm10 ^= xmm0
2620# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
2621# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
2622pxor %xmm1,%xmm10
2623
2624# qhasm: xmm15 ^= xmm8
2625# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
2626# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
2627pxor %xmm8,%xmm15
2628
2629# qhasm: xmm9 ^= xmm14
2630# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
2631# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
2632pxor %xmm14,%xmm9
2633
2634# qhasm: xmm12 ^= xmm15
2635# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
2636# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
2637pxor %xmm15,%xmm12
2638
2639# qhasm: xmm14 ^= xmm8
2640# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
2641# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
2642pxor %xmm8,%xmm14
2643
2644# qhasm: xmm8 ^= xmm9
2645# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
2646# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
2647pxor %xmm9,%xmm8
2648
2649# qhasm: xmm9 ^= xmm13
2650# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
2651# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
2652pxor %xmm13,%xmm9
2653
2654# qhasm: xmm13 ^= xmm10
2655# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
2656# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
2657pxor %xmm10,%xmm13
2658
2659# qhasm: xmm12 ^= xmm13
2660# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
2661# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
2662pxor %xmm13,%xmm12
2663
2664# qhasm: xmm10 ^= xmm11
2665# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
2666# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
2667pxor %xmm11,%xmm10
2668
2669# qhasm: xmm11 ^= xmm13
2670# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
2671# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
2672pxor %xmm13,%xmm11
2673
2674# qhasm: xmm14 ^= xmm11
2675# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
2676# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
2677pxor %xmm11,%xmm14
2678
2679# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
2680# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
2681# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
2682pshufd $0x93,%xmm8,%xmm0
2683
2684# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
2685# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
2686# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
2687pshufd $0x93,%xmm9,%xmm1
2688
2689# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
2690# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
2691# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
2692pshufd $0x93,%xmm12,%xmm2
2693
2694# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
2695# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
2696# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
2697pshufd $0x93,%xmm14,%xmm3
2698
2699# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
2700# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
2701# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
2702pshufd $0x93,%xmm11,%xmm4
2703
2704# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
2705# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
2706# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
2707pshufd $0x93,%xmm15,%xmm5
2708
2709# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
2710# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
2711# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
2712pshufd $0x93,%xmm10,%xmm6
2713
2714# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
2715# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
2716# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
2717pshufd $0x93,%xmm13,%xmm7
2718
2719# qhasm: xmm8 ^= xmm0
2720# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
2721# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
2722pxor %xmm0,%xmm8
2723
2724# qhasm: xmm9 ^= xmm1
2725# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
2726# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
2727pxor %xmm1,%xmm9
2728
2729# qhasm: xmm12 ^= xmm2
2730# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
2731# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
2732pxor %xmm2,%xmm12
2733
2734# qhasm: xmm14 ^= xmm3
2735# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
2736# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
2737pxor %xmm3,%xmm14
2738
2739# qhasm: xmm11 ^= xmm4
2740# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
2741# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
2742pxor %xmm4,%xmm11
2743
2744# qhasm: xmm15 ^= xmm5
2745# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
2746# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
2747pxor %xmm5,%xmm15
2748
2749# qhasm: xmm10 ^= xmm6
2750# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
2751# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
2752pxor %xmm6,%xmm10
2753
2754# qhasm: xmm13 ^= xmm7
2755# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
2756# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
2757pxor %xmm7,%xmm13
2758
2759# qhasm: xmm0 ^= xmm13
2760# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
2761# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
2762pxor %xmm13,%xmm0
2763
2764# qhasm: xmm1 ^= xmm8
2765# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
2766# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
2767pxor %xmm8,%xmm1
2768
2769# qhasm: xmm2 ^= xmm9
2770# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
2771# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
2772pxor %xmm9,%xmm2
2773
2774# qhasm: xmm1 ^= xmm13
2775# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
2776# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
2777pxor %xmm13,%xmm1
2778
2779# qhasm: xmm3 ^= xmm12
2780# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
2781# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
2782pxor %xmm12,%xmm3
2783
2784# qhasm: xmm4 ^= xmm14
2785# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
2786# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
2787pxor %xmm14,%xmm4
2788
2789# qhasm: xmm5 ^= xmm11
2790# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
2791# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
2792pxor %xmm11,%xmm5
2793
2794# qhasm: xmm3 ^= xmm13
2795# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
2796# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
2797pxor %xmm13,%xmm3
2798
2799# qhasm: xmm6 ^= xmm15
2800# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
2801# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
2802pxor %xmm15,%xmm6
2803
2804# qhasm: xmm7 ^= xmm10
2805# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
2806# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
2807pxor %xmm10,%xmm7
2808
2809# qhasm: xmm4 ^= xmm13
2810# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
2811# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
2812pxor %xmm13,%xmm4
2813
2814# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
2815# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
2816# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
2817pshufd $0x4E,%xmm8,%xmm8
2818
2819# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
2820# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
2821# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
2822pshufd $0x4E,%xmm9,%xmm9
2823
2824# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
2825# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
2826# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
2827pshufd $0x4E,%xmm12,%xmm12
2828
2829# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
2830# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
2831# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
2832pshufd $0x4E,%xmm14,%xmm14
2833
2834# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
2835# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
2836# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
2837pshufd $0x4E,%xmm11,%xmm11
2838
2839# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
2840# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
2841# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
2842pshufd $0x4E,%xmm15,%xmm15
2843
2844# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
2845# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
2846# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
2847pshufd $0x4E,%xmm10,%xmm10
2848
2849# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
2850# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
2851# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
2852pshufd $0x4E,%xmm13,%xmm13
2853
2854# qhasm: xmm0 ^= xmm8
2855# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
2856# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
2857pxor %xmm8,%xmm0
2858
2859# qhasm: xmm1 ^= xmm9
2860# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
2861# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
2862pxor %xmm9,%xmm1
2863
2864# qhasm: xmm2 ^= xmm12
2865# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
2866# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
2867pxor %xmm12,%xmm2
2868
2869# qhasm: xmm3 ^= xmm14
2870# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
2871# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
2872pxor %xmm14,%xmm3
2873
2874# qhasm: xmm4 ^= xmm11
2875# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
2876# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
2877pxor %xmm11,%xmm4
2878
2879# qhasm: xmm5 ^= xmm15
2880# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
2881# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
2882pxor %xmm15,%xmm5
2883
2884# qhasm: xmm6 ^= xmm10
2885# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
2886# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
2887pxor %xmm10,%xmm6
2888
2889# qhasm: xmm7 ^= xmm13
2890# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
2891# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
2892pxor %xmm13,%xmm7
2893
2894# qhasm: xmm0 ^= *(int128 *)(c + 256)
2895# asm 1: pxor 256(<c=int64#5),<xmm0=int6464#1
2896# asm 2: pxor 256(<c=%r8),<xmm0=%xmm0
2897pxor 256(%r8),%xmm0
2898
2899# qhasm: shuffle bytes of xmm0 by SR
2900# asm 1: pshufb SR,<xmm0=int6464#1
2901# asm 2: pshufb SR,<xmm0=%xmm0
2902pshufb SR,%xmm0
2903
2904# qhasm: xmm1 ^= *(int128 *)(c + 272)
2905# asm 1: pxor 272(<c=int64#5),<xmm1=int6464#2
2906# asm 2: pxor 272(<c=%r8),<xmm1=%xmm1
2907pxor 272(%r8),%xmm1
2908
2909# qhasm: shuffle bytes of xmm1 by SR
2910# asm 1: pshufb SR,<xmm1=int6464#2
2911# asm 2: pshufb SR,<xmm1=%xmm1
2912pshufb SR,%xmm1
2913
2914# qhasm: xmm2 ^= *(int128 *)(c + 288)
2915# asm 1: pxor 288(<c=int64#5),<xmm2=int6464#3
2916# asm 2: pxor 288(<c=%r8),<xmm2=%xmm2
2917pxor 288(%r8),%xmm2
2918
2919# qhasm: shuffle bytes of xmm2 by SR
2920# asm 1: pshufb SR,<xmm2=int6464#3
2921# asm 2: pshufb SR,<xmm2=%xmm2
2922pshufb SR,%xmm2
2923
2924# qhasm: xmm3 ^= *(int128 *)(c + 304)
2925# asm 1: pxor 304(<c=int64#5),<xmm3=int6464#4
2926# asm 2: pxor 304(<c=%r8),<xmm3=%xmm3
2927pxor 304(%r8),%xmm3
2928
2929# qhasm: shuffle bytes of xmm3 by SR
2930# asm 1: pshufb SR,<xmm3=int6464#4
2931# asm 2: pshufb SR,<xmm3=%xmm3
2932pshufb SR,%xmm3
2933
2934# qhasm: xmm4 ^= *(int128 *)(c + 320)
2935# asm 1: pxor 320(<c=int64#5),<xmm4=int6464#5
2936# asm 2: pxor 320(<c=%r8),<xmm4=%xmm4
2937pxor 320(%r8),%xmm4
2938
2939# qhasm: shuffle bytes of xmm4 by SR
2940# asm 1: pshufb SR,<xmm4=int6464#5
2941# asm 2: pshufb SR,<xmm4=%xmm4
2942pshufb SR,%xmm4
2943
2944# qhasm: xmm5 ^= *(int128 *)(c + 336)
2945# asm 1: pxor 336(<c=int64#5),<xmm5=int6464#6
2946# asm 2: pxor 336(<c=%r8),<xmm5=%xmm5
2947pxor 336(%r8),%xmm5
2948
2949# qhasm: shuffle bytes of xmm5 by SR
2950# asm 1: pshufb SR,<xmm5=int6464#6
2951# asm 2: pshufb SR,<xmm5=%xmm5
2952pshufb SR,%xmm5
2953
2954# qhasm: xmm6 ^= *(int128 *)(c + 352)
2955# asm 1: pxor 352(<c=int64#5),<xmm6=int6464#7
2956# asm 2: pxor 352(<c=%r8),<xmm6=%xmm6
2957pxor 352(%r8),%xmm6
2958
2959# qhasm: shuffle bytes of xmm6 by SR
2960# asm 1: pshufb SR,<xmm6=int6464#7
2961# asm 2: pshufb SR,<xmm6=%xmm6
2962pshufb SR,%xmm6
2963
2964# qhasm: xmm7 ^= *(int128 *)(c + 368)
2965# asm 1: pxor 368(<c=int64#5),<xmm7=int6464#8
2966# asm 2: pxor 368(<c=%r8),<xmm7=%xmm7
2967pxor 368(%r8),%xmm7
2968
2969# qhasm: shuffle bytes of xmm7 by SR
2970# asm 1: pshufb SR,<xmm7=int6464#8
2971# asm 2: pshufb SR,<xmm7=%xmm7
2972pshufb SR,%xmm7
2973
2974# qhasm: xmm5 ^= xmm6
2975# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
2976# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
2977pxor %xmm6,%xmm5
2978
2979# qhasm: xmm2 ^= xmm1
2980# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
2981# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
2982pxor %xmm1,%xmm2
2983
2984# qhasm: xmm5 ^= xmm0
2985# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
2986# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
2987pxor %xmm0,%xmm5
2988
2989# qhasm: xmm6 ^= xmm2
2990# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
2991# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
2992pxor %xmm2,%xmm6
2993
2994# qhasm: xmm3 ^= xmm0
2995# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
2996# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
2997pxor %xmm0,%xmm3
2998
2999# qhasm: xmm6 ^= xmm3
3000# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
3001# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
3002pxor %xmm3,%xmm6
3003
3004# qhasm: xmm3 ^= xmm7
3005# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
3006# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
3007pxor %xmm7,%xmm3
3008
3009# qhasm: xmm3 ^= xmm4
3010# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
3011# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
3012pxor %xmm4,%xmm3
3013
3014# qhasm: xmm7 ^= xmm5
3015# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
3016# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
3017pxor %xmm5,%xmm7
3018
3019# qhasm: xmm3 ^= xmm1
3020# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
3021# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
3022pxor %xmm1,%xmm3
3023
3024# qhasm: xmm4 ^= xmm5
3025# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
3026# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
3027pxor %xmm5,%xmm4
3028
3029# qhasm: xmm2 ^= xmm7
3030# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
3031# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
3032pxor %xmm7,%xmm2
3033
3034# qhasm: xmm1 ^= xmm5
3035# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
3036# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
3037pxor %xmm5,%xmm1
3038
3039# qhasm: xmm11 = xmm7
3040# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
3041# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
3042movdqa %xmm7,%xmm8
3043
3044# qhasm: xmm10 = xmm1
3045# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
3046# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
3047movdqa %xmm1,%xmm9
3048
3049# qhasm: xmm9 = xmm5
3050# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
3051# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
3052movdqa %xmm5,%xmm10
3053
3054# qhasm: xmm13 = xmm2
3055# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
3056# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
3057movdqa %xmm2,%xmm11
3058
3059# qhasm: xmm12 = xmm6
3060# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
3061# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
3062movdqa %xmm6,%xmm12
3063
3064# qhasm: xmm11 ^= xmm4
3065# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
3066# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
3067pxor %xmm4,%xmm8
3068
3069# qhasm: xmm10 ^= xmm2
3070# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
3071# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
3072pxor %xmm2,%xmm9
3073
3074# qhasm: xmm9 ^= xmm3
3075# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
3076# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
3077pxor %xmm3,%xmm10
3078
3079# qhasm: xmm13 ^= xmm4
3080# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
3081# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
3082pxor %xmm4,%xmm11
3083
3084# qhasm: xmm12 ^= xmm0
3085# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
3086# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
3087pxor %xmm0,%xmm12
3088
3089# qhasm: xmm14 = xmm11
3090# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
3091# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
3092movdqa %xmm8,%xmm13
3093
3094# qhasm: xmm8 = xmm10
3095# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
3096# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
3097movdqa %xmm9,%xmm14
3098
3099# qhasm: xmm15 = xmm11
3100# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
3101# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
3102movdqa %xmm8,%xmm15
3103
3104# qhasm: xmm10 |= xmm9
3105# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
3106# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
3107por %xmm10,%xmm9
3108
3109# qhasm: xmm11 |= xmm12
3110# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
3111# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
3112por %xmm12,%xmm8
3113
3114# qhasm: xmm15 ^= xmm8
3115# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
3116# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
3117pxor %xmm14,%xmm15
3118
3119# qhasm: xmm14 &= xmm12
3120# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
3121# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
3122pand %xmm12,%xmm13
3123
3124# qhasm: xmm8 &= xmm9
3125# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
3126# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
3127pand %xmm10,%xmm14
3128
3129# qhasm: xmm12 ^= xmm9
3130# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
3131# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
3132pxor %xmm10,%xmm12
3133
3134# qhasm: xmm15 &= xmm12
3135# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
3136# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
3137pand %xmm12,%xmm15
3138
3139# qhasm: xmm12 = xmm3
3140# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
3141# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
3142movdqa %xmm3,%xmm10
3143
3144# qhasm: xmm12 ^= xmm0
3145# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
3146# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
3147pxor %xmm0,%xmm10
3148
3149# qhasm: xmm13 &= xmm12
3150# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
3151# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
3152pand %xmm10,%xmm11
3153
3154# qhasm: xmm11 ^= xmm13
3155# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
3156# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
3157pxor %xmm11,%xmm8
3158
3159# qhasm: xmm10 ^= xmm13
3160# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
3161# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
3162pxor %xmm11,%xmm9
3163
3164# qhasm: xmm13 = xmm7
3165# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
3166# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
3167movdqa %xmm7,%xmm10
3168
3169# qhasm: xmm13 ^= xmm1
3170# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
3171# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
3172pxor %xmm1,%xmm10
3173
3174# qhasm: xmm12 = xmm5
3175# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
3176# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
3177movdqa %xmm5,%xmm11
3178
3179# qhasm: xmm9 = xmm13
3180# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
3181# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
3182movdqa %xmm10,%xmm12
3183
3184# qhasm: xmm12 ^= xmm6
3185# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
3186# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
3187pxor %xmm6,%xmm11
3188
3189# qhasm: xmm9 |= xmm12
3190# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
3191# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
3192por %xmm11,%xmm12
3193
3194# qhasm: xmm13 &= xmm12
3195# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
3196# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
3197pand %xmm11,%xmm10
3198
3199# qhasm: xmm8 ^= xmm13
3200# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
3201# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
3202pxor %xmm10,%xmm14
3203
3204# qhasm: xmm11 ^= xmm15
3205# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
3206# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
3207pxor %xmm15,%xmm8
3208
3209# qhasm: xmm10 ^= xmm14
3210# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
3211# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
3212pxor %xmm13,%xmm9
3213
3214# qhasm: xmm9 ^= xmm15
3215# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
3216# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
3217pxor %xmm15,%xmm12
3218
3219# qhasm: xmm8 ^= xmm14
3220# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
3221# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
3222pxor %xmm13,%xmm14
3223
3224# qhasm: xmm9 ^= xmm14
3225# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
3226# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
3227pxor %xmm13,%xmm12
3228
3229# qhasm: xmm12 = xmm2
3230# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
3231# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
3232movdqa %xmm2,%xmm10
3233
3234# qhasm: xmm13 = xmm4
3235# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
3236# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
3237movdqa %xmm4,%xmm11
3238
3239# qhasm: xmm14 = xmm1
3240# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
3241# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
3242movdqa %xmm1,%xmm13
3243
3244# qhasm: xmm15 = xmm7
3245# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
3246# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
3247movdqa %xmm7,%xmm15
3248
3249# qhasm: xmm12 &= xmm3
3250# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
3251# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
3252pand %xmm3,%xmm10
3253
3254# qhasm: xmm13 &= xmm0
3255# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
3256# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
3257pand %xmm0,%xmm11
3258
3259# qhasm: xmm14 &= xmm5
3260# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
3261# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
3262pand %xmm5,%xmm13
3263
3264# qhasm: xmm15 |= xmm6
3265# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
3266# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
3267por %xmm6,%xmm15
3268
3269# qhasm: xmm11 ^= xmm12
3270# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
3271# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
3272pxor %xmm10,%xmm8
3273
3274# qhasm: xmm10 ^= xmm13
3275# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
3276# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
3277pxor %xmm11,%xmm9
3278
3279# qhasm: xmm9 ^= xmm14
3280# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
3281# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
3282pxor %xmm13,%xmm12
3283
3284# qhasm: xmm8 ^= xmm15
3285# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
3286# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
3287pxor %xmm15,%xmm14
3288
3289# qhasm: xmm12 = xmm11
3290# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
3291# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
3292movdqa %xmm8,%xmm10
3293
3294# qhasm: xmm12 ^= xmm10
3295# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
3296# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
3297pxor %xmm9,%xmm10
3298
3299# qhasm: xmm11 &= xmm9
3300# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
3301# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
3302pand %xmm12,%xmm8
3303
3304# qhasm: xmm14 = xmm8
3305# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
3306# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
3307movdqa %xmm14,%xmm11
3308
3309# qhasm: xmm14 ^= xmm11
3310# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
3311# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
3312pxor %xmm8,%xmm11
3313
3314# qhasm: xmm15 = xmm12
3315# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
3316# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
3317movdqa %xmm10,%xmm13
3318
3319# qhasm: xmm15 &= xmm14
3320# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
3321# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
3322pand %xmm11,%xmm13
3323
3324# qhasm: xmm15 ^= xmm10
3325# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
3326# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
3327pxor %xmm9,%xmm13
3328
3329# qhasm: xmm13 = xmm9
3330# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
3331# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
3332movdqa %xmm12,%xmm15
3333
3334# qhasm: xmm13 ^= xmm8
3335# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
3336# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
3337pxor %xmm14,%xmm15
3338
3339# qhasm: xmm11 ^= xmm10
3340# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
3341# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
3342pxor %xmm9,%xmm8
3343
3344# qhasm: xmm13 &= xmm11
3345# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
3346# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
3347pand %xmm8,%xmm15
3348
3349# qhasm: xmm13 ^= xmm8
3350# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
3351# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
3352pxor %xmm14,%xmm15
3353
3354# qhasm: xmm9 ^= xmm13
3355# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
3356# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
3357pxor %xmm15,%xmm12
3358
3359# qhasm: xmm10 = xmm14
3360# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
3361# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
3362movdqa %xmm11,%xmm8
3363
3364# qhasm: xmm10 ^= xmm13
3365# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
3366# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
3367pxor %xmm15,%xmm8
3368
3369# qhasm: xmm10 &= xmm8
3370# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
3371# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
3372pand %xmm14,%xmm8
3373
3374# qhasm: xmm9 ^= xmm10
3375# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
3376# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
3377pxor %xmm8,%xmm12
3378
3379# qhasm: xmm14 ^= xmm10
3380# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
3381# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
3382pxor %xmm8,%xmm11
3383
3384# qhasm: xmm14 &= xmm15
3385# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
3386# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
3387pand %xmm13,%xmm11
3388
3389# qhasm: xmm14 ^= xmm12
3390# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
3391# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
3392pxor %xmm10,%xmm11
3393
3394# qhasm: xmm12 = xmm6
3395# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
3396# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
3397movdqa %xmm6,%xmm8
3398
3399# qhasm: xmm8 = xmm5
3400# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
3401# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
3402movdqa %xmm5,%xmm9
3403
3404# qhasm: xmm10 = xmm15
3405# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
3406# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
3407movdqa %xmm13,%xmm10
3408
3409# qhasm: xmm10 ^= xmm14
3410# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
3411# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
3412pxor %xmm11,%xmm10
3413
3414# qhasm: xmm10 &= xmm6
3415# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
3416# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
3417pand %xmm6,%xmm10
3418
3419# qhasm: xmm6 ^= xmm5
3420# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
3421# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
3422pxor %xmm5,%xmm6
3423
3424# qhasm: xmm6 &= xmm14
3425# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
3426# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
3427pand %xmm11,%xmm6
3428
3429# qhasm: xmm5 &= xmm15
3430# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
3431# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
3432pand %xmm13,%xmm5
3433
3434# qhasm: xmm6 ^= xmm5
3435# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
3436# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
3437pxor %xmm5,%xmm6
3438
3439# qhasm: xmm5 ^= xmm10
3440# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
3441# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
3442pxor %xmm10,%xmm5
3443
3444# qhasm: xmm12 ^= xmm0
3445# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
3446# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
3447pxor %xmm0,%xmm8
3448
3449# qhasm: xmm8 ^= xmm3
3450# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
3451# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
3452pxor %xmm3,%xmm9
3453
3454# qhasm: xmm15 ^= xmm13
3455# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
3456# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
3457pxor %xmm15,%xmm13
3458
3459# qhasm: xmm14 ^= xmm9
3460# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
3461# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
3462pxor %xmm12,%xmm11
3463
3464# qhasm: xmm11 = xmm15
3465# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3466# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3467movdqa %xmm13,%xmm10
3468
3469# qhasm: xmm11 ^= xmm14
3470# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3471# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3472pxor %xmm11,%xmm10
3473
3474# qhasm: xmm11 &= xmm12
3475# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
3476# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
3477pand %xmm8,%xmm10
3478
3479# qhasm: xmm12 ^= xmm8
3480# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
3481# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
3482pxor %xmm9,%xmm8
3483
3484# qhasm: xmm12 &= xmm14
3485# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
3486# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
3487pand %xmm11,%xmm8
3488
3489# qhasm: xmm8 &= xmm15
3490# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
3491# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
3492pand %xmm13,%xmm9
3493
3494# qhasm: xmm8 ^= xmm12
3495# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
3496# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
3497pxor %xmm8,%xmm9
3498
3499# qhasm: xmm12 ^= xmm11
3500# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
3501# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
3502pxor %xmm10,%xmm8
3503
3504# qhasm: xmm10 = xmm13
3505# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3506# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3507movdqa %xmm15,%xmm10
3508
3509# qhasm: xmm10 ^= xmm9
3510# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
3511# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
3512pxor %xmm12,%xmm10
3513
3514# qhasm: xmm10 &= xmm0
3515# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
3516# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
3517pand %xmm0,%xmm10
3518
3519# qhasm: xmm0 ^= xmm3
3520# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
3521# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
3522pxor %xmm3,%xmm0
3523
3524# qhasm: xmm0 &= xmm9
3525# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
3526# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
3527pand %xmm12,%xmm0
3528
3529# qhasm: xmm3 &= xmm13
3530# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
3531# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
3532pand %xmm15,%xmm3
3533
3534# qhasm: xmm0 ^= xmm3
3535# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
3536# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
3537pxor %xmm3,%xmm0
3538
3539# qhasm: xmm3 ^= xmm10
3540# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
3541# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
3542pxor %xmm10,%xmm3
3543
3544# qhasm: xmm6 ^= xmm12
3545# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
3546# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
3547pxor %xmm8,%xmm6
3548
3549# qhasm: xmm0 ^= xmm12
3550# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
3551# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
3552pxor %xmm8,%xmm0
3553
3554# qhasm: xmm5 ^= xmm8
3555# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
3556# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
3557pxor %xmm9,%xmm5
3558
3559# qhasm: xmm3 ^= xmm8
3560# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
3561# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
3562pxor %xmm9,%xmm3
3563
3564# qhasm: xmm12 = xmm7
3565# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
3566# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
3567movdqa %xmm7,%xmm8
3568
3569# qhasm: xmm8 = xmm1
3570# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
3571# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
3572movdqa %xmm1,%xmm9
3573
3574# qhasm: xmm12 ^= xmm4
3575# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
3576# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
3577pxor %xmm4,%xmm8
3578
3579# qhasm: xmm8 ^= xmm2
3580# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
3581# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
3582pxor %xmm2,%xmm9
3583
3584# qhasm: xmm11 = xmm15
3585# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3586# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3587movdqa %xmm13,%xmm10
3588
3589# qhasm: xmm11 ^= xmm14
3590# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3591# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3592pxor %xmm11,%xmm10
3593
3594# qhasm: xmm11 &= xmm12
3595# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
3596# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
3597pand %xmm8,%xmm10
3598
3599# qhasm: xmm12 ^= xmm8
3600# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
3601# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
3602pxor %xmm9,%xmm8
3603
3604# qhasm: xmm12 &= xmm14
3605# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
3606# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
3607pand %xmm11,%xmm8
3608
3609# qhasm: xmm8 &= xmm15
3610# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
3611# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
3612pand %xmm13,%xmm9
3613
3614# qhasm: xmm8 ^= xmm12
3615# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
3616# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
3617pxor %xmm8,%xmm9
3618
3619# qhasm: xmm12 ^= xmm11
3620# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
3621# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
3622pxor %xmm10,%xmm8
3623
3624# qhasm: xmm10 = xmm13
3625# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3626# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3627movdqa %xmm15,%xmm10
3628
3629# qhasm: xmm10 ^= xmm9
3630# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
3631# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
3632pxor %xmm12,%xmm10
3633
3634# qhasm: xmm10 &= xmm4
3635# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
3636# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
3637pand %xmm4,%xmm10
3638
3639# qhasm: xmm4 ^= xmm2
3640# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
3641# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
3642pxor %xmm2,%xmm4
3643
3644# qhasm: xmm4 &= xmm9
3645# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
3646# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
3647pand %xmm12,%xmm4
3648
3649# qhasm: xmm2 &= xmm13
3650# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
3651# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
3652pand %xmm15,%xmm2
3653
3654# qhasm: xmm4 ^= xmm2
3655# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
3656# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
3657pxor %xmm2,%xmm4
3658
3659# qhasm: xmm2 ^= xmm10
3660# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
3661# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
3662pxor %xmm10,%xmm2
3663
3664# qhasm: xmm15 ^= xmm13
3665# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
3666# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
3667pxor %xmm15,%xmm13
3668
3669# qhasm: xmm14 ^= xmm9
3670# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
3671# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
3672pxor %xmm12,%xmm11
3673
3674# qhasm: xmm11 = xmm15
3675# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3676# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3677movdqa %xmm13,%xmm10
3678
3679# qhasm: xmm11 ^= xmm14
3680# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3681# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3682pxor %xmm11,%xmm10
3683
3684# qhasm: xmm11 &= xmm7
3685# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
3686# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
3687pand %xmm7,%xmm10
3688
3689# qhasm: xmm7 ^= xmm1
3690# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
3691# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
3692pxor %xmm1,%xmm7
3693
3694# qhasm: xmm7 &= xmm14
3695# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
3696# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
3697pand %xmm11,%xmm7
3698
3699# qhasm: xmm1 &= xmm15
3700# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
3701# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
3702pand %xmm13,%xmm1
3703
3704# qhasm: xmm7 ^= xmm1
3705# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
3706# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
3707pxor %xmm1,%xmm7
3708
3709# qhasm: xmm1 ^= xmm11
3710# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
3711# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
3712pxor %xmm10,%xmm1
3713
3714# qhasm: xmm7 ^= xmm12
3715# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
3716# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
3717pxor %xmm8,%xmm7
3718
3719# qhasm: xmm4 ^= xmm12
3720# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
3721# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
3722pxor %xmm8,%xmm4
3723
3724# qhasm: xmm1 ^= xmm8
3725# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
3726# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
3727pxor %xmm9,%xmm1
3728
3729# qhasm: xmm2 ^= xmm8
3730# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
3731# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
3732pxor %xmm9,%xmm2
3733
3734# qhasm: xmm7 ^= xmm0
3735# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
3736# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
3737pxor %xmm0,%xmm7
3738
3739# qhasm: xmm1 ^= xmm6
3740# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
3741# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
3742pxor %xmm6,%xmm1
3743
3744# qhasm: xmm4 ^= xmm7
3745# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
3746# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
3747pxor %xmm7,%xmm4
3748
3749# qhasm: xmm6 ^= xmm0
3750# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
3751# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
3752pxor %xmm0,%xmm6
3753
3754# qhasm: xmm0 ^= xmm1
3755# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
3756# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
3757pxor %xmm1,%xmm0
3758
3759# qhasm: xmm1 ^= xmm5
3760# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
3761# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
3762pxor %xmm5,%xmm1
3763
3764# qhasm: xmm5 ^= xmm2
3765# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
3766# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
3767pxor %xmm2,%xmm5
3768
3769# qhasm: xmm4 ^= xmm5
3770# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
3771# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
3772pxor %xmm5,%xmm4
3773
3774# qhasm: xmm2 ^= xmm3
3775# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
3776# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
3777pxor %xmm3,%xmm2
3778
3779# qhasm: xmm3 ^= xmm5
3780# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
3781# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
3782pxor %xmm5,%xmm3
3783
3784# qhasm: xmm6 ^= xmm3
3785# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
3786# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
3787pxor %xmm3,%xmm6
3788
3789# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
3790# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
3791# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
3792pshufd $0x93,%xmm0,%xmm8
3793
3794# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
3795# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
3796# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
3797pshufd $0x93,%xmm1,%xmm9
3798
3799# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
3800# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
3801# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
3802pshufd $0x93,%xmm4,%xmm10
3803
3804# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
3805# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
3806# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
3807pshufd $0x93,%xmm6,%xmm11
3808
3809# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
3810# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
3811# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
3812pshufd $0x93,%xmm3,%xmm12
3813
3814# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
3815# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
3816# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
3817pshufd $0x93,%xmm7,%xmm13
3818
3819# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
3820# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
3821# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
3822pshufd $0x93,%xmm2,%xmm14
3823
3824# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
3825# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
3826# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
3827pshufd $0x93,%xmm5,%xmm15
3828
3829# qhasm: xmm0 ^= xmm8
3830# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
3831# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
3832pxor %xmm8,%xmm0
3833
3834# qhasm: xmm1 ^= xmm9
3835# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
3836# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
3837pxor %xmm9,%xmm1
3838
3839# qhasm: xmm4 ^= xmm10
3840# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
3841# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
3842pxor %xmm10,%xmm4
3843
3844# qhasm: xmm6 ^= xmm11
3845# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
3846# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
3847pxor %xmm11,%xmm6
3848
3849# qhasm: xmm3 ^= xmm12
3850# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
3851# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
3852pxor %xmm12,%xmm3
3853
3854# qhasm: xmm7 ^= xmm13
3855# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
3856# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
3857pxor %xmm13,%xmm7
3858
3859# qhasm: xmm2 ^= xmm14
3860# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
3861# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
3862pxor %xmm14,%xmm2
3863
3864# qhasm: xmm5 ^= xmm15
3865# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
3866# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
3867pxor %xmm15,%xmm5
3868
3869# qhasm: xmm8 ^= xmm5
3870# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
3871# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
3872pxor %xmm5,%xmm8
3873
3874# qhasm: xmm9 ^= xmm0
3875# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
3876# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
3877pxor %xmm0,%xmm9
3878
3879# qhasm: xmm10 ^= xmm1
3880# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
3881# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
3882pxor %xmm1,%xmm10
3883
3884# qhasm: xmm9 ^= xmm5
3885# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
3886# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
3887pxor %xmm5,%xmm9
3888
3889# qhasm: xmm11 ^= xmm4
3890# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
3891# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
3892pxor %xmm4,%xmm11
3893
3894# qhasm: xmm12 ^= xmm6
3895# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
3896# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
3897pxor %xmm6,%xmm12
3898
3899# qhasm: xmm13 ^= xmm3
3900# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
3901# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
3902pxor %xmm3,%xmm13
3903
3904# qhasm: xmm11 ^= xmm5
3905# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
3906# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
3907pxor %xmm5,%xmm11
3908
3909# qhasm: xmm14 ^= xmm7
3910# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
3911# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
3912pxor %xmm7,%xmm14
3913
3914# qhasm: xmm15 ^= xmm2
3915# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
3916# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
3917pxor %xmm2,%xmm15
3918
3919# qhasm: xmm12 ^= xmm5
3920# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
3921# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
3922pxor %xmm5,%xmm12
3923
3924# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
3925# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
3926# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
3927pshufd $0x4E,%xmm0,%xmm0
3928
3929# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
3930# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
3931# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
3932pshufd $0x4E,%xmm1,%xmm1
3933
3934# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
3935# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
3936# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
3937pshufd $0x4E,%xmm4,%xmm4
3938
3939# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
3940# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
3941# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
3942pshufd $0x4E,%xmm6,%xmm6
3943
3944# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
3945# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
3946# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
3947pshufd $0x4E,%xmm3,%xmm3
3948
3949# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
3950# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
3951# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
3952pshufd $0x4E,%xmm7,%xmm7
3953
3954# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
3955# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
3956# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
3957pshufd $0x4E,%xmm2,%xmm2
3958
3959# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
3960# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
3961# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
3962pshufd $0x4E,%xmm5,%xmm5
3963
3964# qhasm: xmm8 ^= xmm0
3965# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
3966# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
3967pxor %xmm0,%xmm8
3968
3969# qhasm: xmm9 ^= xmm1
3970# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
3971# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
3972pxor %xmm1,%xmm9
3973
3974# qhasm: xmm10 ^= xmm4
3975# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
3976# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
3977pxor %xmm4,%xmm10
3978
3979# qhasm: xmm11 ^= xmm6
3980# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
3981# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
3982pxor %xmm6,%xmm11
3983
3984# qhasm: xmm12 ^= xmm3
3985# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
3986# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
3987pxor %xmm3,%xmm12
3988
3989# qhasm: xmm13 ^= xmm7
3990# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
3991# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
3992pxor %xmm7,%xmm13
3993
3994# qhasm: xmm14 ^= xmm2
3995# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
3996# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
3997pxor %xmm2,%xmm14
3998
3999# qhasm: xmm15 ^= xmm5
4000# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
4001# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
4002pxor %xmm5,%xmm15
4003
4004# qhasm: xmm8 ^= *(int128 *)(c + 384)
4005# asm 1: pxor 384(<c=int64#5),<xmm8=int6464#9
4006# asm 2: pxor 384(<c=%r8),<xmm8=%xmm8
4007pxor 384(%r8),%xmm8
4008
4009# qhasm: shuffle bytes of xmm8 by SR
4010# asm 1: pshufb SR,<xmm8=int6464#9
4011# asm 2: pshufb SR,<xmm8=%xmm8
4012pshufb SR,%xmm8
4013
4014# qhasm: xmm9 ^= *(int128 *)(c + 400)
4015# asm 1: pxor 400(<c=int64#5),<xmm9=int6464#10
4016# asm 2: pxor 400(<c=%r8),<xmm9=%xmm9
4017pxor 400(%r8),%xmm9
4018
4019# qhasm: shuffle bytes of xmm9 by SR
4020# asm 1: pshufb SR,<xmm9=int6464#10
4021# asm 2: pshufb SR,<xmm9=%xmm9
4022pshufb SR,%xmm9
4023
4024# qhasm: xmm10 ^= *(int128 *)(c + 416)
4025# asm 1: pxor 416(<c=int64#5),<xmm10=int6464#11
4026# asm 2: pxor 416(<c=%r8),<xmm10=%xmm10
4027pxor 416(%r8),%xmm10
4028
4029# qhasm: shuffle bytes of xmm10 by SR
4030# asm 1: pshufb SR,<xmm10=int6464#11
4031# asm 2: pshufb SR,<xmm10=%xmm10
4032pshufb SR,%xmm10
4033
4034# qhasm: xmm11 ^= *(int128 *)(c + 432)
4035# asm 1: pxor 432(<c=int64#5),<xmm11=int6464#12
4036# asm 2: pxor 432(<c=%r8),<xmm11=%xmm11
4037pxor 432(%r8),%xmm11
4038
4039# qhasm: shuffle bytes of xmm11 by SR
4040# asm 1: pshufb SR,<xmm11=int6464#12
4041# asm 2: pshufb SR,<xmm11=%xmm11
4042pshufb SR,%xmm11
4043
4044# qhasm: xmm12 ^= *(int128 *)(c + 448)
4045# asm 1: pxor 448(<c=int64#5),<xmm12=int6464#13
4046# asm 2: pxor 448(<c=%r8),<xmm12=%xmm12
4047pxor 448(%r8),%xmm12
4048
4049# qhasm: shuffle bytes of xmm12 by SR
4050# asm 1: pshufb SR,<xmm12=int6464#13
4051# asm 2: pshufb SR,<xmm12=%xmm12
4052pshufb SR,%xmm12
4053
4054# qhasm: xmm13 ^= *(int128 *)(c + 464)
4055# asm 1: pxor 464(<c=int64#5),<xmm13=int6464#14
4056# asm 2: pxor 464(<c=%r8),<xmm13=%xmm13
4057pxor 464(%r8),%xmm13
4058
4059# qhasm: shuffle bytes of xmm13 by SR
4060# asm 1: pshufb SR,<xmm13=int6464#14
4061# asm 2: pshufb SR,<xmm13=%xmm13
4062pshufb SR,%xmm13
4063
4064# qhasm: xmm14 ^= *(int128 *)(c + 480)
4065# asm 1: pxor 480(<c=int64#5),<xmm14=int6464#15
4066# asm 2: pxor 480(<c=%r8),<xmm14=%xmm14
4067pxor 480(%r8),%xmm14
4068
4069# qhasm: shuffle bytes of xmm14 by SR
4070# asm 1: pshufb SR,<xmm14=int6464#15
4071# asm 2: pshufb SR,<xmm14=%xmm14
4072pshufb SR,%xmm14
4073
4074# qhasm: xmm15 ^= *(int128 *)(c + 496)
4075# asm 1: pxor 496(<c=int64#5),<xmm15=int6464#16
4076# asm 2: pxor 496(<c=%r8),<xmm15=%xmm15
4077pxor 496(%r8),%xmm15
4078
4079# qhasm: shuffle bytes of xmm15 by SR
4080# asm 1: pshufb SR,<xmm15=int6464#16
4081# asm 2: pshufb SR,<xmm15=%xmm15
4082pshufb SR,%xmm15
4083
4084# qhasm: xmm13 ^= xmm14
4085# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
4086# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
4087pxor %xmm14,%xmm13
4088
4089# qhasm: xmm10 ^= xmm9
4090# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
4091# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
4092pxor %xmm9,%xmm10
4093
4094# qhasm: xmm13 ^= xmm8
4095# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
4096# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
4097pxor %xmm8,%xmm13
4098
4099# qhasm: xmm14 ^= xmm10
4100# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
4101# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
4102pxor %xmm10,%xmm14
4103
4104# qhasm: xmm11 ^= xmm8
4105# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
4106# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
4107pxor %xmm8,%xmm11
4108
4109# qhasm: xmm14 ^= xmm11
4110# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
4111# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
4112pxor %xmm11,%xmm14
4113
4114# qhasm: xmm11 ^= xmm15
4115# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
4116# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
4117pxor %xmm15,%xmm11
4118
4119# qhasm: xmm11 ^= xmm12
4120# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
4121# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
4122pxor %xmm12,%xmm11
4123
4124# qhasm: xmm15 ^= xmm13
4125# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
4126# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
4127pxor %xmm13,%xmm15
4128
4129# qhasm: xmm11 ^= xmm9
4130# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
4131# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
4132pxor %xmm9,%xmm11
4133
4134# qhasm: xmm12 ^= xmm13
4135# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
4136# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
4137pxor %xmm13,%xmm12
4138
4139# qhasm: xmm10 ^= xmm15
4140# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
4141# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
4142pxor %xmm15,%xmm10
4143
4144# qhasm: xmm9 ^= xmm13
4145# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
4146# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
4147pxor %xmm13,%xmm9
4148
4149# qhasm: xmm3 = xmm15
4150# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
4151# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
4152movdqa %xmm15,%xmm0
4153
4154# qhasm: xmm2 = xmm9
4155# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
4156# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
4157movdqa %xmm9,%xmm1
4158
4159# qhasm: xmm1 = xmm13
4160# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
4161# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
4162movdqa %xmm13,%xmm2
4163
4164# qhasm: xmm5 = xmm10
4165# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
4166# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
4167movdqa %xmm10,%xmm3
4168
4169# qhasm: xmm4 = xmm14
4170# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
4171# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
4172movdqa %xmm14,%xmm4
4173
4174# qhasm: xmm3 ^= xmm12
4175# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
4176# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
4177pxor %xmm12,%xmm0
4178
4179# qhasm: xmm2 ^= xmm10
4180# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
4181# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
4182pxor %xmm10,%xmm1
4183
4184# qhasm: xmm1 ^= xmm11
4185# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
4186# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
4187pxor %xmm11,%xmm2
4188
4189# qhasm: xmm5 ^= xmm12
4190# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
4191# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
4192pxor %xmm12,%xmm3
4193
4194# qhasm: xmm4 ^= xmm8
4195# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
4196# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
4197pxor %xmm8,%xmm4
4198
4199# qhasm: xmm6 = xmm3
4200# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
4201# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
4202movdqa %xmm0,%xmm5
4203
4204# qhasm: xmm0 = xmm2
4205# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
4206# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
4207movdqa %xmm1,%xmm6
4208
4209# qhasm: xmm7 = xmm3
4210# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
4211# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
4212movdqa %xmm0,%xmm7
4213
4214# qhasm: xmm2 |= xmm1
4215# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
4216# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
4217por %xmm2,%xmm1
4218
4219# qhasm: xmm3 |= xmm4
4220# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
4221# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
4222por %xmm4,%xmm0
4223
4224# qhasm: xmm7 ^= xmm0
4225# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
4226# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
4227pxor %xmm6,%xmm7
4228
4229# qhasm: xmm6 &= xmm4
4230# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
4231# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
4232pand %xmm4,%xmm5
4233
4234# qhasm: xmm0 &= xmm1
4235# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
4236# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
4237pand %xmm2,%xmm6
4238
4239# qhasm: xmm4 ^= xmm1
4240# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
4241# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
4242pxor %xmm2,%xmm4
4243
4244# qhasm: xmm7 &= xmm4
4245# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
4246# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
4247pand %xmm4,%xmm7
4248
4249# qhasm: xmm4 = xmm11
4250# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
4251# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
4252movdqa %xmm11,%xmm2
4253
4254# qhasm: xmm4 ^= xmm8
4255# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
4256# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
4257pxor %xmm8,%xmm2
4258
4259# qhasm: xmm5 &= xmm4
4260# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
4261# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
4262pand %xmm2,%xmm3
4263
4264# qhasm: xmm3 ^= xmm5
4265# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
4266# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
4267pxor %xmm3,%xmm0
4268
4269# qhasm: xmm2 ^= xmm5
4270# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
4271# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
4272pxor %xmm3,%xmm1
4273
4274# qhasm: xmm5 = xmm15
4275# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
4276# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
4277movdqa %xmm15,%xmm2
4278
4279# qhasm: xmm5 ^= xmm9
4280# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
4281# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
4282pxor %xmm9,%xmm2
4283
4284# qhasm: xmm4 = xmm13
4285# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
4286# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
4287movdqa %xmm13,%xmm3
4288
4289# qhasm: xmm1 = xmm5
4290# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
4291# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
4292movdqa %xmm2,%xmm4
4293
4294# qhasm: xmm4 ^= xmm14
4295# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
4296# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
4297pxor %xmm14,%xmm3
4298
4299# qhasm: xmm1 |= xmm4
4300# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
4301# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
4302por %xmm3,%xmm4
4303
4304# qhasm: xmm5 &= xmm4
4305# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
4306# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
4307pand %xmm3,%xmm2
4308
4309# qhasm: xmm0 ^= xmm5
4310# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
4311# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
4312pxor %xmm2,%xmm6
4313
4314# qhasm: xmm3 ^= xmm7
4315# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
4316# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
4317pxor %xmm7,%xmm0
4318
4319# qhasm: xmm2 ^= xmm6
4320# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
4321# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
4322pxor %xmm5,%xmm1
4323
4324# qhasm: xmm1 ^= xmm7
4325# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
4326# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
4327pxor %xmm7,%xmm4
4328
4329# qhasm: xmm0 ^= xmm6
4330# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
4331# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
4332pxor %xmm5,%xmm6
4333
4334# qhasm: xmm1 ^= xmm6
4335# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
4336# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
4337pxor %xmm5,%xmm4
4338
4339# qhasm: xmm4 = xmm10
4340# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
4341# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
4342movdqa %xmm10,%xmm2
4343
4344# qhasm: xmm5 = xmm12
4345# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
4346# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
4347movdqa %xmm12,%xmm3
4348
4349# qhasm: xmm6 = xmm9
4350# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
4351# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
4352movdqa %xmm9,%xmm5
4353
4354# qhasm: xmm7 = xmm15
4355# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
4356# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
4357movdqa %xmm15,%xmm7
4358
4359# qhasm: xmm4 &= xmm11
4360# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
4361# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
4362pand %xmm11,%xmm2
4363
4364# qhasm: xmm5 &= xmm8
4365# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
4366# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
4367pand %xmm8,%xmm3
4368
4369# qhasm: xmm6 &= xmm13
4370# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
4371# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
4372pand %xmm13,%xmm5
4373
4374# qhasm: xmm7 |= xmm14
4375# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
4376# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
4377por %xmm14,%xmm7
4378
4379# qhasm: xmm3 ^= xmm4
4380# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
4381# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
4382pxor %xmm2,%xmm0
4383
4384# qhasm: xmm2 ^= xmm5
4385# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
4386# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
4387pxor %xmm3,%xmm1
4388
4389# qhasm: xmm1 ^= xmm6
4390# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
4391# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
4392pxor %xmm5,%xmm4
4393
4394# qhasm: xmm0 ^= xmm7
4395# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
4396# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
4397pxor %xmm7,%xmm6
4398
4399# qhasm: xmm4 = xmm3
4400# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
4401# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
4402movdqa %xmm0,%xmm2
4403
4404# qhasm: xmm4 ^= xmm2
4405# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
4406# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
4407pxor %xmm1,%xmm2
4408
4409# qhasm: xmm3 &= xmm1
4410# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
4411# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
4412pand %xmm4,%xmm0
4413
4414# qhasm: xmm6 = xmm0
4415# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
4416# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
4417movdqa %xmm6,%xmm3
4418
4419# qhasm: xmm6 ^= xmm3
4420# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
4421# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
4422pxor %xmm0,%xmm3
4423
4424# qhasm: xmm7 = xmm4
4425# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
4426# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
4427movdqa %xmm2,%xmm5
4428
4429# qhasm: xmm7 &= xmm6
4430# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
4431# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
4432pand %xmm3,%xmm5
4433
4434# qhasm: xmm7 ^= xmm2
4435# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
4436# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
4437pxor %xmm1,%xmm5
4438
4439# qhasm: xmm5 = xmm1
4440# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
4441# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
4442movdqa %xmm4,%xmm7
4443
4444# qhasm: xmm5 ^= xmm0
4445# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
4446# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
4447pxor %xmm6,%xmm7
4448
4449# qhasm: xmm3 ^= xmm2
4450# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
4451# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
4452pxor %xmm1,%xmm0
4453
4454# qhasm: xmm5 &= xmm3
4455# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
4456# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
4457pand %xmm0,%xmm7
4458
4459# qhasm: xmm5 ^= xmm0
4460# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
4461# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
4462pxor %xmm6,%xmm7
4463
4464# qhasm: xmm1 ^= xmm5
4465# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
4466# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
4467pxor %xmm7,%xmm4
4468
4469# qhasm: xmm2 = xmm6
4470# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
4471# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
4472movdqa %xmm3,%xmm0
4473
4474# qhasm: xmm2 ^= xmm5
4475# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
4476# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
4477pxor %xmm7,%xmm0
4478
4479# qhasm: xmm2 &= xmm0
4480# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
4481# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
4482pand %xmm6,%xmm0
4483
4484# qhasm: xmm1 ^= xmm2
4485# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
4486# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
4487pxor %xmm0,%xmm4
4488
4489# qhasm: xmm6 ^= xmm2
4490# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
4491# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
4492pxor %xmm0,%xmm3
4493
4494# qhasm: xmm6 &= xmm7
4495# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
4496# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
4497pand %xmm5,%xmm3
4498
4499# qhasm: xmm6 ^= xmm4
4500# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
4501# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
4502pxor %xmm2,%xmm3
4503
4504# qhasm: xmm4 = xmm14
4505# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
4506# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
4507movdqa %xmm14,%xmm0
4508
4509# qhasm: xmm0 = xmm13
4510# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
4511# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
4512movdqa %xmm13,%xmm1
4513
4514# qhasm: xmm2 = xmm7
4515# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
4516# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
4517movdqa %xmm5,%xmm2
4518
4519# qhasm: xmm2 ^= xmm6
4520# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
4521# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
4522pxor %xmm3,%xmm2
4523
4524# qhasm: xmm2 &= xmm14
4525# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
4526# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
4527pand %xmm14,%xmm2
4528
4529# qhasm: xmm14 ^= xmm13
4530# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
4531# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
4532pxor %xmm13,%xmm14
4533
4534# qhasm: xmm14 &= xmm6
4535# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
4536# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
4537pand %xmm3,%xmm14
4538
4539# qhasm: xmm13 &= xmm7
4540# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
4541# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
4542pand %xmm5,%xmm13
4543
4544# qhasm: xmm14 ^= xmm13
4545# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
4546# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
4547pxor %xmm13,%xmm14
4548
4549# qhasm: xmm13 ^= xmm2
4550# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
4551# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
4552pxor %xmm2,%xmm13
4553
4554# qhasm: xmm4 ^= xmm8
4555# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
4556# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
4557pxor %xmm8,%xmm0
4558
4559# qhasm: xmm0 ^= xmm11
4560# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
4561# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
4562pxor %xmm11,%xmm1
4563
4564# qhasm: xmm7 ^= xmm5
4565# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
4566# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
4567pxor %xmm7,%xmm5
4568
4569# qhasm: xmm6 ^= xmm1
4570# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
4571# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
4572pxor %xmm4,%xmm3
4573
4574# qhasm: xmm3 = xmm7
4575# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4576# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4577movdqa %xmm5,%xmm2
4578
4579# qhasm: xmm3 ^= xmm6
4580# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
4581# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
4582pxor %xmm3,%xmm2
4583
4584# qhasm: xmm3 &= xmm4
4585# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
4586# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
4587pand %xmm0,%xmm2
4588
4589# qhasm: xmm4 ^= xmm0
4590# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
4591# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
4592pxor %xmm1,%xmm0
4593
4594# qhasm: xmm4 &= xmm6
4595# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
4596# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
4597pand %xmm3,%xmm0
4598
4599# qhasm: xmm0 &= xmm7
4600# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
4601# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
4602pand %xmm5,%xmm1
4603
4604# qhasm: xmm0 ^= xmm4
4605# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
4606# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
4607pxor %xmm0,%xmm1
4608
4609# qhasm: xmm4 ^= xmm3
4610# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
4611# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
4612pxor %xmm2,%xmm0
4613
4614# qhasm: xmm2 = xmm5
4615# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
4616# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
4617movdqa %xmm7,%xmm2
4618
4619# qhasm: xmm2 ^= xmm1
4620# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
4621# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
4622pxor %xmm4,%xmm2
4623
4624# qhasm: xmm2 &= xmm8
4625# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
4626# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
4627pand %xmm8,%xmm2
4628
4629# qhasm: xmm8 ^= xmm11
4630# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
4631# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
4632pxor %xmm11,%xmm8
4633
4634# qhasm: xmm8 &= xmm1
4635# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
4636# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
4637pand %xmm4,%xmm8
4638
4639# qhasm: xmm11 &= xmm5
4640# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
4641# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
4642pand %xmm7,%xmm11
4643
4644# qhasm: xmm8 ^= xmm11
4645# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
4646# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
4647pxor %xmm11,%xmm8
4648
4649# qhasm: xmm11 ^= xmm2
4650# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
4651# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
4652pxor %xmm2,%xmm11
4653
4654# qhasm: xmm14 ^= xmm4
4655# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
4656# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
4657pxor %xmm0,%xmm14
4658
4659# qhasm: xmm8 ^= xmm4
4660# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
4661# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
4662pxor %xmm0,%xmm8
4663
4664# qhasm: xmm13 ^= xmm0
4665# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
4666# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
4667pxor %xmm1,%xmm13
4668
4669# qhasm: xmm11 ^= xmm0
4670# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
4671# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
4672pxor %xmm1,%xmm11
4673
4674# qhasm: xmm4 = xmm15
4675# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
4676# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
4677movdqa %xmm15,%xmm0
4678
4679# qhasm: xmm0 = xmm9
4680# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
4681# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
4682movdqa %xmm9,%xmm1
4683
4684# qhasm: xmm4 ^= xmm12
4685# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
4686# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
4687pxor %xmm12,%xmm0
4688
4689# qhasm: xmm0 ^= xmm10
4690# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
4691# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
4692pxor %xmm10,%xmm1
4693
4694# qhasm: xmm3 = xmm7
4695# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4696# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4697movdqa %xmm5,%xmm2
4698
4699# qhasm: xmm3 ^= xmm6
4700# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
4701# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
4702pxor %xmm3,%xmm2
4703
4704# qhasm: xmm3 &= xmm4
4705# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
4706# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
4707pand %xmm0,%xmm2
4708
4709# qhasm: xmm4 ^= xmm0
4710# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
4711# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
4712pxor %xmm1,%xmm0
4713
4714# qhasm: xmm4 &= xmm6
4715# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
4716# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
4717pand %xmm3,%xmm0
4718
4719# qhasm: xmm0 &= xmm7
4720# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
4721# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
4722pand %xmm5,%xmm1
4723
4724# qhasm: xmm0 ^= xmm4
4725# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
4726# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
4727pxor %xmm0,%xmm1
4728
4729# qhasm: xmm4 ^= xmm3
4730# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
4731# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
4732pxor %xmm2,%xmm0
4733
4734# qhasm: xmm2 = xmm5
4735# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
4736# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
4737movdqa %xmm7,%xmm2
4738
4739# qhasm: xmm2 ^= xmm1
4740# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
4741# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
4742pxor %xmm4,%xmm2
4743
4744# qhasm: xmm2 &= xmm12
4745# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
4746# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
4747pand %xmm12,%xmm2
4748
4749# qhasm: xmm12 ^= xmm10
4750# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
4751# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
4752pxor %xmm10,%xmm12
4753
4754# qhasm: xmm12 &= xmm1
4755# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
4756# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
4757pand %xmm4,%xmm12
4758
4759# qhasm: xmm10 &= xmm5
4760# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
4761# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
4762pand %xmm7,%xmm10
4763
4764# qhasm: xmm12 ^= xmm10
4765# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
4766# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
4767pxor %xmm10,%xmm12
4768
4769# qhasm: xmm10 ^= xmm2
4770# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
4771# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
4772pxor %xmm2,%xmm10
4773
4774# qhasm: xmm7 ^= xmm5
4775# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
4776# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
4777pxor %xmm7,%xmm5
4778
4779# qhasm: xmm6 ^= xmm1
4780# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
4781# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
4782pxor %xmm4,%xmm3
4783
4784# qhasm: xmm3 = xmm7
4785# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4786# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4787movdqa %xmm5,%xmm2
4788
4789# qhasm: xmm3 ^= xmm6
4790# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
4791# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
4792pxor %xmm3,%xmm2
4793
4794# qhasm: xmm3 &= xmm15
4795# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
4796# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
4797pand %xmm15,%xmm2
4798
4799# qhasm: xmm15 ^= xmm9
4800# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
4801# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
4802pxor %xmm9,%xmm15
4803
4804# qhasm: xmm15 &= xmm6
4805# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
4806# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
4807pand %xmm3,%xmm15
4808
4809# qhasm: xmm9 &= xmm7
4810# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
4811# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
4812pand %xmm5,%xmm9
4813
4814# qhasm: xmm15 ^= xmm9
4815# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
4816# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
4817pxor %xmm9,%xmm15
4818
4819# qhasm: xmm9 ^= xmm3
4820# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
4821# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
4822pxor %xmm2,%xmm9
4823
4824# qhasm: xmm15 ^= xmm4
4825# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
4826# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
4827pxor %xmm0,%xmm15
4828
4829# qhasm: xmm12 ^= xmm4
4830# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
4831# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
4832pxor %xmm0,%xmm12
4833
4834# qhasm: xmm9 ^= xmm0
4835# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
4836# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
4837pxor %xmm1,%xmm9
4838
4839# qhasm: xmm10 ^= xmm0
4840# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
4841# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
4842pxor %xmm1,%xmm10
4843
4844# qhasm: xmm15 ^= xmm8
4845# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
4846# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
4847pxor %xmm8,%xmm15
4848
4849# qhasm: xmm9 ^= xmm14
4850# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
4851# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
4852pxor %xmm14,%xmm9
4853
4854# qhasm: xmm12 ^= xmm15
4855# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
4856# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
4857pxor %xmm15,%xmm12
4858
4859# qhasm: xmm14 ^= xmm8
4860# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
4861# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
4862pxor %xmm8,%xmm14
4863
4864# qhasm: xmm8 ^= xmm9
4865# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
4866# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
4867pxor %xmm9,%xmm8
4868
4869# qhasm: xmm9 ^= xmm13
4870# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
4871# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
4872pxor %xmm13,%xmm9
4873
4874# qhasm: xmm13 ^= xmm10
4875# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
4876# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
4877pxor %xmm10,%xmm13
4878
4879# qhasm: xmm12 ^= xmm13
4880# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
4881# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
4882pxor %xmm13,%xmm12
4883
4884# qhasm: xmm10 ^= xmm11
4885# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
4886# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
4887pxor %xmm11,%xmm10
4888
4889# qhasm: xmm11 ^= xmm13
4890# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
4891# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
4892pxor %xmm13,%xmm11
4893
4894# qhasm: xmm14 ^= xmm11
4895# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
4896# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
4897pxor %xmm11,%xmm14
4898
4899# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
4900# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
4901# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
4902pshufd $0x93,%xmm8,%xmm0
4903
4904# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
4905# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
4906# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
4907pshufd $0x93,%xmm9,%xmm1
4908
4909# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
4910# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
4911# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
4912pshufd $0x93,%xmm12,%xmm2
4913
4914# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
4915# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
4916# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
4917pshufd $0x93,%xmm14,%xmm3
4918
4919# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
4920# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
4921# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
4922pshufd $0x93,%xmm11,%xmm4
4923
4924# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
4925# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
4926# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
4927pshufd $0x93,%xmm15,%xmm5
4928
4929# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
4930# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
4931# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
4932pshufd $0x93,%xmm10,%xmm6
4933
4934# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
4935# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
4936# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
4937pshufd $0x93,%xmm13,%xmm7
4938
4939# qhasm: xmm8 ^= xmm0
4940# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
4941# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
4942pxor %xmm0,%xmm8
4943
4944# qhasm: xmm9 ^= xmm1
4945# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
4946# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
4947pxor %xmm1,%xmm9
4948
4949# qhasm: xmm12 ^= xmm2
4950# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
4951# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
4952pxor %xmm2,%xmm12
4953
4954# qhasm: xmm14 ^= xmm3
4955# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
4956# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
4957pxor %xmm3,%xmm14
4958
4959# qhasm: xmm11 ^= xmm4
4960# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
4961# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
4962pxor %xmm4,%xmm11
4963
4964# qhasm: xmm15 ^= xmm5
4965# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
4966# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
4967pxor %xmm5,%xmm15
4968
4969# qhasm: xmm10 ^= xmm6
4970# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
4971# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
4972pxor %xmm6,%xmm10
4973
4974# qhasm: xmm13 ^= xmm7
4975# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
4976# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
4977pxor %xmm7,%xmm13
4978
4979# qhasm: xmm0 ^= xmm13
4980# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
4981# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
4982pxor %xmm13,%xmm0
4983
4984# qhasm: xmm1 ^= xmm8
4985# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
4986# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
4987pxor %xmm8,%xmm1
4988
4989# qhasm: xmm2 ^= xmm9
4990# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
4991# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
4992pxor %xmm9,%xmm2
4993
4994# qhasm: xmm1 ^= xmm13
4995# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
4996# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
4997pxor %xmm13,%xmm1
4998
4999# qhasm: xmm3 ^= xmm12
5000# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
5001# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
5002pxor %xmm12,%xmm3
5003
5004# qhasm: xmm4 ^= xmm14
5005# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
5006# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
5007pxor %xmm14,%xmm4
5008
5009# qhasm: xmm5 ^= xmm11
5010# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
5011# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
5012pxor %xmm11,%xmm5
5013
5014# qhasm: xmm3 ^= xmm13
5015# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
5016# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
5017pxor %xmm13,%xmm3
5018
5019# qhasm: xmm6 ^= xmm15
5020# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
5021# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
5022pxor %xmm15,%xmm6
5023
5024# qhasm: xmm7 ^= xmm10
5025# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
5026# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
5027pxor %xmm10,%xmm7
5028
5029# qhasm: xmm4 ^= xmm13
5030# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
5031# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
5032pxor %xmm13,%xmm4
5033
5034# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
5035# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
5036# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
5037pshufd $0x4E,%xmm8,%xmm8
5038
5039# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
5040# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
5041# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
5042pshufd $0x4E,%xmm9,%xmm9
5043
5044# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
5045# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
5046# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
5047pshufd $0x4E,%xmm12,%xmm12
5048
5049# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
5050# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
5051# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
5052pshufd $0x4E,%xmm14,%xmm14
5053
5054# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
5055# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
5056# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
5057pshufd $0x4E,%xmm11,%xmm11
5058
5059# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
5060# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
5061# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
5062pshufd $0x4E,%xmm15,%xmm15
5063
5064# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
5065# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
5066# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
5067pshufd $0x4E,%xmm10,%xmm10
5068
5069# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
5070# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
5071# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
5072pshufd $0x4E,%xmm13,%xmm13
5073
5074# qhasm: xmm0 ^= xmm8
5075# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
5076# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
5077pxor %xmm8,%xmm0
5078
5079# qhasm: xmm1 ^= xmm9
5080# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
5081# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
5082pxor %xmm9,%xmm1
5083
5084# qhasm: xmm2 ^= xmm12
5085# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
5086# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
5087pxor %xmm12,%xmm2
5088
5089# qhasm: xmm3 ^= xmm14
5090# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
5091# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
5092pxor %xmm14,%xmm3
5093
5094# qhasm: xmm4 ^= xmm11
5095# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
5096# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
5097pxor %xmm11,%xmm4
5098
5099# qhasm: xmm5 ^= xmm15
5100# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
5101# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
5102pxor %xmm15,%xmm5
5103
5104# qhasm: xmm6 ^= xmm10
5105# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
5106# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
5107pxor %xmm10,%xmm6
5108
5109# qhasm: xmm7 ^= xmm13
5110# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
5111# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
5112pxor %xmm13,%xmm7
5113
5114# qhasm: xmm0 ^= *(int128 *)(c + 512)
5115# asm 1: pxor 512(<c=int64#5),<xmm0=int6464#1
5116# asm 2: pxor 512(<c=%r8),<xmm0=%xmm0
5117pxor 512(%r8),%xmm0
5118
5119# qhasm: shuffle bytes of xmm0 by SR
5120# asm 1: pshufb SR,<xmm0=int6464#1
5121# asm 2: pshufb SR,<xmm0=%xmm0
5122pshufb SR,%xmm0
5123
5124# qhasm: xmm1 ^= *(int128 *)(c + 528)
5125# asm 1: pxor 528(<c=int64#5),<xmm1=int6464#2
5126# asm 2: pxor 528(<c=%r8),<xmm1=%xmm1
5127pxor 528(%r8),%xmm1
5128
5129# qhasm: shuffle bytes of xmm1 by SR
5130# asm 1: pshufb SR,<xmm1=int6464#2
5131# asm 2: pshufb SR,<xmm1=%xmm1
5132pshufb SR,%xmm1
5133
5134# qhasm: xmm2 ^= *(int128 *)(c + 544)
5135# asm 1: pxor 544(<c=int64#5),<xmm2=int6464#3
5136# asm 2: pxor 544(<c=%r8),<xmm2=%xmm2
5137pxor 544(%r8),%xmm2
5138
5139# qhasm: shuffle bytes of xmm2 by SR
5140# asm 1: pshufb SR,<xmm2=int6464#3
5141# asm 2: pshufb SR,<xmm2=%xmm2
5142pshufb SR,%xmm2
5143
5144# qhasm: xmm3 ^= *(int128 *)(c + 560)
5145# asm 1: pxor 560(<c=int64#5),<xmm3=int6464#4
5146# asm 2: pxor 560(<c=%r8),<xmm3=%xmm3
5147pxor 560(%r8),%xmm3
5148
5149# qhasm: shuffle bytes of xmm3 by SR
5150# asm 1: pshufb SR,<xmm3=int6464#4
5151# asm 2: pshufb SR,<xmm3=%xmm3
5152pshufb SR,%xmm3
5153
5154# qhasm: xmm4 ^= *(int128 *)(c + 576)
5155# asm 1: pxor 576(<c=int64#5),<xmm4=int6464#5
5156# asm 2: pxor 576(<c=%r8),<xmm4=%xmm4
5157pxor 576(%r8),%xmm4
5158
5159# qhasm: shuffle bytes of xmm4 by SR
5160# asm 1: pshufb SR,<xmm4=int6464#5
5161# asm 2: pshufb SR,<xmm4=%xmm4
5162pshufb SR,%xmm4
5163
5164# qhasm: xmm5 ^= *(int128 *)(c + 592)
5165# asm 1: pxor 592(<c=int64#5),<xmm5=int6464#6
5166# asm 2: pxor 592(<c=%r8),<xmm5=%xmm5
5167pxor 592(%r8),%xmm5
5168
5169# qhasm: shuffle bytes of xmm5 by SR
5170# asm 1: pshufb SR,<xmm5=int6464#6
5171# asm 2: pshufb SR,<xmm5=%xmm5
5172pshufb SR,%xmm5
5173
5174# qhasm: xmm6 ^= *(int128 *)(c + 608)
5175# asm 1: pxor 608(<c=int64#5),<xmm6=int6464#7
5176# asm 2: pxor 608(<c=%r8),<xmm6=%xmm6
5177pxor 608(%r8),%xmm6
5178
5179# qhasm: shuffle bytes of xmm6 by SR
5180# asm 1: pshufb SR,<xmm6=int6464#7
5181# asm 2: pshufb SR,<xmm6=%xmm6
5182pshufb SR,%xmm6
5183
5184# qhasm: xmm7 ^= *(int128 *)(c + 624)
5185# asm 1: pxor 624(<c=int64#5),<xmm7=int6464#8
5186# asm 2: pxor 624(<c=%r8),<xmm7=%xmm7
5187pxor 624(%r8),%xmm7
5188
5189# qhasm: shuffle bytes of xmm7 by SR
5190# asm 1: pshufb SR,<xmm7=int6464#8
5191# asm 2: pshufb SR,<xmm7=%xmm7
5192pshufb SR,%xmm7
5193
5194# qhasm: xmm5 ^= xmm6
5195# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
5196# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
5197pxor %xmm6,%xmm5
5198
5199# qhasm: xmm2 ^= xmm1
5200# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
5201# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
5202pxor %xmm1,%xmm2
5203
5204# qhasm: xmm5 ^= xmm0
5205# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
5206# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
5207pxor %xmm0,%xmm5
5208
5209# qhasm: xmm6 ^= xmm2
5210# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
5211# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
5212pxor %xmm2,%xmm6
5213
5214# qhasm: xmm3 ^= xmm0
5215# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
5216# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
5217pxor %xmm0,%xmm3
5218
5219# qhasm: xmm6 ^= xmm3
5220# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
5221# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
5222pxor %xmm3,%xmm6
5223
5224# qhasm: xmm3 ^= xmm7
5225# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
5226# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
5227pxor %xmm7,%xmm3
5228
5229# qhasm: xmm3 ^= xmm4
5230# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
5231# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
5232pxor %xmm4,%xmm3
5233
5234# qhasm: xmm7 ^= xmm5
5235# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
5236# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
5237pxor %xmm5,%xmm7
5238
5239# qhasm: xmm3 ^= xmm1
5240# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
5241# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
5242pxor %xmm1,%xmm3
5243
5244# qhasm: xmm4 ^= xmm5
5245# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
5246# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
5247pxor %xmm5,%xmm4
5248
5249# qhasm: xmm2 ^= xmm7
5250# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
5251# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
5252pxor %xmm7,%xmm2
5253
5254# qhasm: xmm1 ^= xmm5
5255# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
5256# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
5257pxor %xmm5,%xmm1
5258
5259# qhasm: xmm11 = xmm7
5260# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
5261# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
5262movdqa %xmm7,%xmm8
5263
5264# qhasm: xmm10 = xmm1
5265# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
5266# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
5267movdqa %xmm1,%xmm9
5268
5269# qhasm: xmm9 = xmm5
5270# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
5271# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
5272movdqa %xmm5,%xmm10
5273
5274# qhasm: xmm13 = xmm2
5275# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
5276# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
5277movdqa %xmm2,%xmm11
5278
5279# qhasm: xmm12 = xmm6
5280# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
5281# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
5282movdqa %xmm6,%xmm12
5283
5284# qhasm: xmm11 ^= xmm4
5285# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
5286# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
5287pxor %xmm4,%xmm8
5288
5289# qhasm: xmm10 ^= xmm2
5290# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
5291# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
5292pxor %xmm2,%xmm9
5293
5294# qhasm: xmm9 ^= xmm3
5295# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
5296# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
5297pxor %xmm3,%xmm10
5298
5299# qhasm: xmm13 ^= xmm4
5300# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
5301# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
5302pxor %xmm4,%xmm11
5303
5304# qhasm: xmm12 ^= xmm0
5305# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
5306# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
5307pxor %xmm0,%xmm12
5308
5309# qhasm: xmm14 = xmm11
5310# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
5311# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
5312movdqa %xmm8,%xmm13
5313
5314# qhasm: xmm8 = xmm10
5315# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
5316# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
5317movdqa %xmm9,%xmm14
5318
5319# qhasm: xmm15 = xmm11
5320# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
5321# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
5322movdqa %xmm8,%xmm15
5323
5324# qhasm: xmm10 |= xmm9
5325# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
5326# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
5327por %xmm10,%xmm9
5328
5329# qhasm: xmm11 |= xmm12
5330# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
5331# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
5332por %xmm12,%xmm8
5333
5334# qhasm: xmm15 ^= xmm8
5335# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
5336# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
5337pxor %xmm14,%xmm15
5338
5339# qhasm: xmm14 &= xmm12
5340# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
5341# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
5342pand %xmm12,%xmm13
5343
5344# qhasm: xmm8 &= xmm9
5345# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
5346# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
5347pand %xmm10,%xmm14
5348
5349# qhasm: xmm12 ^= xmm9
5350# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
5351# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
5352pxor %xmm10,%xmm12
5353
5354# qhasm: xmm15 &= xmm12
5355# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
5356# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
5357pand %xmm12,%xmm15
5358
5359# qhasm: xmm12 = xmm3
5360# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
5361# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
5362movdqa %xmm3,%xmm10
5363
5364# qhasm: xmm12 ^= xmm0
5365# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
5366# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
5367pxor %xmm0,%xmm10
5368
5369# qhasm: xmm13 &= xmm12
5370# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
5371# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
5372pand %xmm10,%xmm11
5373
5374# qhasm: xmm11 ^= xmm13
5375# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
5376# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
5377pxor %xmm11,%xmm8
5378
5379# qhasm: xmm10 ^= xmm13
5380# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
5381# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
5382pxor %xmm11,%xmm9
5383
5384# qhasm: xmm13 = xmm7
5385# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
5386# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
5387movdqa %xmm7,%xmm10
5388
5389# qhasm: xmm13 ^= xmm1
5390# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
5391# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
5392pxor %xmm1,%xmm10
5393
5394# qhasm: xmm12 = xmm5
5395# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
5396# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
5397movdqa %xmm5,%xmm11
5398
5399# qhasm: xmm9 = xmm13
5400# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
5401# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
5402movdqa %xmm10,%xmm12
5403
5404# qhasm: xmm12 ^= xmm6
5405# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
5406# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
5407pxor %xmm6,%xmm11
5408
5409# qhasm: xmm9 |= xmm12
5410# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
5411# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
5412por %xmm11,%xmm12
5413
5414# qhasm: xmm13 &= xmm12
5415# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
5416# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
5417pand %xmm11,%xmm10
5418
5419# qhasm: xmm8 ^= xmm13
5420# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
5421# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
5422pxor %xmm10,%xmm14
5423
5424# qhasm: xmm11 ^= xmm15
5425# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
5426# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
5427pxor %xmm15,%xmm8
5428
5429# qhasm: xmm10 ^= xmm14
5430# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
5431# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
5432pxor %xmm13,%xmm9
5433
5434# qhasm: xmm9 ^= xmm15
5435# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
5436# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
5437pxor %xmm15,%xmm12
5438
5439# qhasm: xmm8 ^= xmm14
5440# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
5441# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
5442pxor %xmm13,%xmm14
5443
5444# qhasm: xmm9 ^= xmm14
5445# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
5446# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
5447pxor %xmm13,%xmm12
5448
5449# qhasm: xmm12 = xmm2
5450# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
5451# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
5452movdqa %xmm2,%xmm10
5453
5454# qhasm: xmm13 = xmm4
5455# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
5456# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
5457movdqa %xmm4,%xmm11
5458
5459# qhasm: xmm14 = xmm1
5460# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
5461# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
5462movdqa %xmm1,%xmm13
5463
5464# qhasm: xmm15 = xmm7
5465# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
5466# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
5467movdqa %xmm7,%xmm15
5468
5469# qhasm: xmm12 &= xmm3
5470# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
5471# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
5472pand %xmm3,%xmm10
5473
5474# qhasm: xmm13 &= xmm0
5475# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
5476# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
5477pand %xmm0,%xmm11
5478
5479# qhasm: xmm14 &= xmm5
5480# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
5481# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
5482pand %xmm5,%xmm13
5483
5484# qhasm: xmm15 |= xmm6
5485# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
5486# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
5487por %xmm6,%xmm15
5488
5489# qhasm: xmm11 ^= xmm12
5490# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
5491# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
5492pxor %xmm10,%xmm8
5493
5494# qhasm: xmm10 ^= xmm13
5495# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
5496# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
5497pxor %xmm11,%xmm9
5498
5499# qhasm: xmm9 ^= xmm14
5500# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
5501# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
5502pxor %xmm13,%xmm12
5503
5504# qhasm: xmm8 ^= xmm15
5505# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
5506# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
5507pxor %xmm15,%xmm14
5508
5509# qhasm: xmm12 = xmm11
5510# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
5511# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
5512movdqa %xmm8,%xmm10
5513
5514# qhasm: xmm12 ^= xmm10
5515# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
5516# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
5517pxor %xmm9,%xmm10
5518
5519# qhasm: xmm11 &= xmm9
5520# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
5521# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
5522pand %xmm12,%xmm8
5523
5524# qhasm: xmm14 = xmm8
5525# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
5526# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
5527movdqa %xmm14,%xmm11
5528
5529# qhasm: xmm14 ^= xmm11
5530# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
5531# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
5532pxor %xmm8,%xmm11
5533
5534# qhasm: xmm15 = xmm12
5535# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
5536# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
5537movdqa %xmm10,%xmm13
5538
5539# qhasm: xmm15 &= xmm14
5540# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
5541# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
5542pand %xmm11,%xmm13
5543
5544# qhasm: xmm15 ^= xmm10
5545# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
5546# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
5547pxor %xmm9,%xmm13
5548
5549# qhasm: xmm13 = xmm9
5550# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
5551# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
5552movdqa %xmm12,%xmm15
5553
5554# qhasm: xmm13 ^= xmm8
5555# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
5556# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
5557pxor %xmm14,%xmm15
5558
5559# qhasm: xmm11 ^= xmm10
5560# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
5561# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
5562pxor %xmm9,%xmm8
5563
5564# qhasm: xmm13 &= xmm11
5565# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
5566# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
5567pand %xmm8,%xmm15
5568
5569# qhasm: xmm13 ^= xmm8
5570# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
5571# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
5572pxor %xmm14,%xmm15
5573
5574# qhasm: xmm9 ^= xmm13
5575# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
5576# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
5577pxor %xmm15,%xmm12
5578
5579# qhasm: xmm10 = xmm14
5580# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
5581# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
5582movdqa %xmm11,%xmm8
5583
5584# qhasm: xmm10 ^= xmm13
5585# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
5586# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
5587pxor %xmm15,%xmm8
5588
5589# qhasm: xmm10 &= xmm8
5590# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
5591# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
5592pand %xmm14,%xmm8
5593
5594# qhasm: xmm9 ^= xmm10
5595# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
5596# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
5597pxor %xmm8,%xmm12
5598
5599# qhasm: xmm14 ^= xmm10
5600# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
5601# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
5602pxor %xmm8,%xmm11
5603
5604# qhasm: xmm14 &= xmm15
5605# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
5606# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
5607pand %xmm13,%xmm11
5608
5609# qhasm: xmm14 ^= xmm12
5610# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
5611# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
5612pxor %xmm10,%xmm11
5613
5614# qhasm: xmm12 = xmm6
5615# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
5616# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
5617movdqa %xmm6,%xmm8
5618
5619# qhasm: xmm8 = xmm5
5620# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
5621# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
5622movdqa %xmm5,%xmm9
5623
5624# qhasm: xmm10 = xmm15
5625# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
5626# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
5627movdqa %xmm13,%xmm10
5628
5629# qhasm: xmm10 ^= xmm14
5630# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
5631# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
5632pxor %xmm11,%xmm10
5633
5634# qhasm: xmm10 &= xmm6
5635# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
5636# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
5637pand %xmm6,%xmm10
5638
5639# qhasm: xmm6 ^= xmm5
5640# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
5641# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
5642pxor %xmm5,%xmm6
5643
5644# qhasm: xmm6 &= xmm14
5645# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
5646# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
5647pand %xmm11,%xmm6
5648
5649# qhasm: xmm5 &= xmm15
5650# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
5651# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
5652pand %xmm13,%xmm5
5653
5654# qhasm: xmm6 ^= xmm5
5655# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
5656# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
5657pxor %xmm5,%xmm6
5658
5659# qhasm: xmm5 ^= xmm10
5660# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
5661# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
5662pxor %xmm10,%xmm5
5663
5664# qhasm: xmm12 ^= xmm0
5665# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
5666# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
5667pxor %xmm0,%xmm8
5668
5669# qhasm: xmm8 ^= xmm3
5670# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
5671# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
5672pxor %xmm3,%xmm9
5673
5674# qhasm: xmm15 ^= xmm13
5675# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
5676# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
5677pxor %xmm15,%xmm13
5678
5679# qhasm: xmm14 ^= xmm9
5680# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
5681# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
5682pxor %xmm12,%xmm11
5683
5684# qhasm: xmm11 = xmm15
5685# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5686# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5687movdqa %xmm13,%xmm10
5688
5689# qhasm: xmm11 ^= xmm14
5690# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5691# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5692pxor %xmm11,%xmm10
5693
5694# qhasm: xmm11 &= xmm12
5695# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
5696# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
5697pand %xmm8,%xmm10
5698
5699# qhasm: xmm12 ^= xmm8
5700# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
5701# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
5702pxor %xmm9,%xmm8
5703
5704# qhasm: xmm12 &= xmm14
5705# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
5706# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
5707pand %xmm11,%xmm8
5708
5709# qhasm: xmm8 &= xmm15
5710# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
5711# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
5712pand %xmm13,%xmm9
5713
5714# qhasm: xmm8 ^= xmm12
5715# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
5716# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
5717pxor %xmm8,%xmm9
5718
5719# qhasm: xmm12 ^= xmm11
5720# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
5721# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
5722pxor %xmm10,%xmm8
5723
5724# qhasm: xmm10 = xmm13
5725# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5726# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5727movdqa %xmm15,%xmm10
5728
5729# qhasm: xmm10 ^= xmm9
5730# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
5731# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
5732pxor %xmm12,%xmm10
5733
5734# qhasm: xmm10 &= xmm0
5735# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
5736# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
5737pand %xmm0,%xmm10
5738
5739# qhasm: xmm0 ^= xmm3
5740# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
5741# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
5742pxor %xmm3,%xmm0
5743
5744# qhasm: xmm0 &= xmm9
5745# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
5746# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
5747pand %xmm12,%xmm0
5748
5749# qhasm: xmm3 &= xmm13
5750# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
5751# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
5752pand %xmm15,%xmm3
5753
5754# qhasm: xmm0 ^= xmm3
5755# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
5756# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
5757pxor %xmm3,%xmm0
5758
5759# qhasm: xmm3 ^= xmm10
5760# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
5761# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
5762pxor %xmm10,%xmm3
5763
5764# qhasm: xmm6 ^= xmm12
5765# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
5766# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
5767pxor %xmm8,%xmm6
5768
5769# qhasm: xmm0 ^= xmm12
5770# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
5771# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
5772pxor %xmm8,%xmm0
5773
5774# qhasm: xmm5 ^= xmm8
5775# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
5776# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
5777pxor %xmm9,%xmm5
5778
5779# qhasm: xmm3 ^= xmm8
5780# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
5781# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
5782pxor %xmm9,%xmm3
5783
5784# qhasm: xmm12 = xmm7
5785# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
5786# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
5787movdqa %xmm7,%xmm8
5788
5789# qhasm: xmm8 = xmm1
5790# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
5791# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
5792movdqa %xmm1,%xmm9
5793
5794# qhasm: xmm12 ^= xmm4
5795# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
5796# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
5797pxor %xmm4,%xmm8
5798
5799# qhasm: xmm8 ^= xmm2
5800# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
5801# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
5802pxor %xmm2,%xmm9
5803
5804# qhasm: xmm11 = xmm15
5805# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5806# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5807movdqa %xmm13,%xmm10
5808
5809# qhasm: xmm11 ^= xmm14
5810# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5811# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5812pxor %xmm11,%xmm10
5813
5814# qhasm: xmm11 &= xmm12
5815# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
5816# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
5817pand %xmm8,%xmm10
5818
5819# qhasm: xmm12 ^= xmm8
5820# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
5821# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
5822pxor %xmm9,%xmm8
5823
5824# qhasm: xmm12 &= xmm14
5825# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
5826# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
5827pand %xmm11,%xmm8
5828
5829# qhasm: xmm8 &= xmm15
5830# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
5831# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
5832pand %xmm13,%xmm9
5833
5834# qhasm: xmm8 ^= xmm12
5835# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
5836# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
5837pxor %xmm8,%xmm9
5838
5839# qhasm: xmm12 ^= xmm11
5840# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
5841# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
5842pxor %xmm10,%xmm8
5843
5844# qhasm: xmm10 = xmm13
5845# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5846# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5847movdqa %xmm15,%xmm10
5848
5849# qhasm: xmm10 ^= xmm9
5850# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
5851# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
5852pxor %xmm12,%xmm10
5853
5854# qhasm: xmm10 &= xmm4
5855# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
5856# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
5857pand %xmm4,%xmm10
5858
5859# qhasm: xmm4 ^= xmm2
5860# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
5861# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
5862pxor %xmm2,%xmm4
5863
5864# qhasm: xmm4 &= xmm9
5865# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
5866# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
5867pand %xmm12,%xmm4
5868
5869# qhasm: xmm2 &= xmm13
5870# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
5871# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
5872pand %xmm15,%xmm2
5873
5874# qhasm: xmm4 ^= xmm2
5875# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
5876# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
5877pxor %xmm2,%xmm4
5878
5879# qhasm: xmm2 ^= xmm10
5880# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
5881# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
5882pxor %xmm10,%xmm2
5883
5884# qhasm: xmm15 ^= xmm13
5885# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
5886# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
5887pxor %xmm15,%xmm13
5888
5889# qhasm: xmm14 ^= xmm9
5890# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
5891# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
5892pxor %xmm12,%xmm11
5893
5894# qhasm: xmm11 = xmm15
5895# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5896# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5897movdqa %xmm13,%xmm10
5898
5899# qhasm: xmm11 ^= xmm14
5900# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5901# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5902pxor %xmm11,%xmm10
5903
5904# qhasm: xmm11 &= xmm7
5905# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
5906# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
5907pand %xmm7,%xmm10
5908
5909# qhasm: xmm7 ^= xmm1
5910# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
5911# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
5912pxor %xmm1,%xmm7
5913
5914# qhasm: xmm7 &= xmm14
5915# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
5916# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
5917pand %xmm11,%xmm7
5918
5919# qhasm: xmm1 &= xmm15
5920# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
5921# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
5922pand %xmm13,%xmm1
5923
5924# qhasm: xmm7 ^= xmm1
5925# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
5926# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
5927pxor %xmm1,%xmm7
5928
5929# qhasm: xmm1 ^= xmm11
5930# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
5931# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
5932pxor %xmm10,%xmm1
5933
5934# qhasm: xmm7 ^= xmm12
5935# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
5936# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
5937pxor %xmm8,%xmm7
5938
5939# qhasm: xmm4 ^= xmm12
5940# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
5941# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
5942pxor %xmm8,%xmm4
5943
5944# qhasm: xmm1 ^= xmm8
5945# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
5946# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
5947pxor %xmm9,%xmm1
5948
5949# qhasm: xmm2 ^= xmm8
5950# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
5951# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
5952pxor %xmm9,%xmm2
5953
5954# qhasm: xmm7 ^= xmm0
5955# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
5956# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
5957pxor %xmm0,%xmm7
5958
5959# qhasm: xmm1 ^= xmm6
5960# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
5961# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
5962pxor %xmm6,%xmm1
5963
5964# qhasm: xmm4 ^= xmm7
5965# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
5966# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
5967pxor %xmm7,%xmm4
5968
5969# qhasm: xmm6 ^= xmm0
5970# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
5971# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
5972pxor %xmm0,%xmm6
5973
5974# qhasm: xmm0 ^= xmm1
5975# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
5976# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
5977pxor %xmm1,%xmm0
5978
5979# qhasm: xmm1 ^= xmm5
5980# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
5981# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
5982pxor %xmm5,%xmm1
5983
5984# qhasm: xmm5 ^= xmm2
5985# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
5986# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
5987pxor %xmm2,%xmm5
5988
5989# qhasm: xmm4 ^= xmm5
5990# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
5991# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
5992pxor %xmm5,%xmm4
5993
5994# qhasm: xmm2 ^= xmm3
5995# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
5996# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
5997pxor %xmm3,%xmm2
5998
5999# qhasm: xmm3 ^= xmm5
6000# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
6001# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
6002pxor %xmm5,%xmm3
6003
6004# qhasm: xmm6 ^= xmm3
6005# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
6006# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
6007pxor %xmm3,%xmm6
6008
6009# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
6010# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
6011# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
6012pshufd $0x93,%xmm0,%xmm8
6013
6014# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
6015# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
6016# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
6017pshufd $0x93,%xmm1,%xmm9
6018
6019# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
6020# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
6021# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
6022pshufd $0x93,%xmm4,%xmm10
6023
6024# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
6025# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
6026# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
6027pshufd $0x93,%xmm6,%xmm11
6028
6029# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
6030# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
6031# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
6032pshufd $0x93,%xmm3,%xmm12
6033
6034# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
6035# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
6036# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
6037pshufd $0x93,%xmm7,%xmm13
6038
6039# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
6040# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
6041# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
6042pshufd $0x93,%xmm2,%xmm14
6043
6044# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
6045# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
6046# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
6047pshufd $0x93,%xmm5,%xmm15
6048
6049# qhasm: xmm0 ^= xmm8
6050# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
6051# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
6052pxor %xmm8,%xmm0
6053
6054# qhasm: xmm1 ^= xmm9
6055# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
6056# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
6057pxor %xmm9,%xmm1
6058
6059# qhasm: xmm4 ^= xmm10
6060# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
6061# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
6062pxor %xmm10,%xmm4
6063
6064# qhasm: xmm6 ^= xmm11
6065# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
6066# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
6067pxor %xmm11,%xmm6
6068
6069# qhasm: xmm3 ^= xmm12
6070# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
6071# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
6072pxor %xmm12,%xmm3
6073
6074# qhasm: xmm7 ^= xmm13
6075# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
6076# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
6077pxor %xmm13,%xmm7
6078
6079# qhasm: xmm2 ^= xmm14
6080# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
6081# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
6082pxor %xmm14,%xmm2
6083
6084# qhasm: xmm5 ^= xmm15
6085# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
6086# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
6087pxor %xmm15,%xmm5
6088
6089# qhasm: xmm8 ^= xmm5
6090# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
6091# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
6092pxor %xmm5,%xmm8
6093
6094# qhasm: xmm9 ^= xmm0
6095# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
6096# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
6097pxor %xmm0,%xmm9
6098
6099# qhasm: xmm10 ^= xmm1
6100# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
6101# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
6102pxor %xmm1,%xmm10
6103
6104# qhasm: xmm9 ^= xmm5
6105# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
6106# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
6107pxor %xmm5,%xmm9
6108
6109# qhasm: xmm11 ^= xmm4
6110# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
6111# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
6112pxor %xmm4,%xmm11
6113
6114# qhasm: xmm12 ^= xmm6
6115# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
6116# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
6117pxor %xmm6,%xmm12
6118
6119# qhasm: xmm13 ^= xmm3
6120# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
6121# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
6122pxor %xmm3,%xmm13
6123
6124# qhasm: xmm11 ^= xmm5
6125# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
6126# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
6127pxor %xmm5,%xmm11
6128
6129# qhasm: xmm14 ^= xmm7
6130# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
6131# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
6132pxor %xmm7,%xmm14
6133
6134# qhasm: xmm15 ^= xmm2
6135# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
6136# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
6137pxor %xmm2,%xmm15
6138
6139# qhasm: xmm12 ^= xmm5
6140# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
6141# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
6142pxor %xmm5,%xmm12
6143
6144# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
6145# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
6146# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
6147pshufd $0x4E,%xmm0,%xmm0
6148
6149# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
6150# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
6151# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
6152pshufd $0x4E,%xmm1,%xmm1
6153
6154# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
6155# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
6156# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
6157pshufd $0x4E,%xmm4,%xmm4
6158
6159# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
6160# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
6161# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
6162pshufd $0x4E,%xmm6,%xmm6
6163
6164# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
6165# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
6166# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
6167pshufd $0x4E,%xmm3,%xmm3
6168
6169# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
6170# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
6171# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
6172pshufd $0x4E,%xmm7,%xmm7
6173
6174# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
6175# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
6176# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
6177pshufd $0x4E,%xmm2,%xmm2
6178
6179# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
6180# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
6181# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
6182pshufd $0x4E,%xmm5,%xmm5
6183
6184# qhasm: xmm8 ^= xmm0
6185# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
6186# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
6187pxor %xmm0,%xmm8
6188
6189# qhasm: xmm9 ^= xmm1
6190# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
6191# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
6192pxor %xmm1,%xmm9
6193
6194# qhasm: xmm10 ^= xmm4
6195# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
6196# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
6197pxor %xmm4,%xmm10
6198
6199# qhasm: xmm11 ^= xmm6
6200# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
6201# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
6202pxor %xmm6,%xmm11
6203
6204# qhasm: xmm12 ^= xmm3
6205# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
6206# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
6207pxor %xmm3,%xmm12
6208
6209# qhasm: xmm13 ^= xmm7
6210# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
6211# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
6212pxor %xmm7,%xmm13
6213
6214# qhasm: xmm14 ^= xmm2
6215# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
6216# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
6217pxor %xmm2,%xmm14
6218
6219# qhasm: xmm15 ^= xmm5
6220# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
6221# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
6222pxor %xmm5,%xmm15
6223
6224# qhasm: xmm8 ^= *(int128 *)(c + 640)
6225# asm 1: pxor 640(<c=int64#5),<xmm8=int6464#9
6226# asm 2: pxor 640(<c=%r8),<xmm8=%xmm8
6227pxor 640(%r8),%xmm8
6228
6229# qhasm: shuffle bytes of xmm8 by SR
6230# asm 1: pshufb SR,<xmm8=int6464#9
6231# asm 2: pshufb SR,<xmm8=%xmm8
6232pshufb SR,%xmm8
6233
6234# qhasm: xmm9 ^= *(int128 *)(c + 656)
6235# asm 1: pxor 656(<c=int64#5),<xmm9=int6464#10
6236# asm 2: pxor 656(<c=%r8),<xmm9=%xmm9
6237pxor 656(%r8),%xmm9
6238
6239# qhasm: shuffle bytes of xmm9 by SR
6240# asm 1: pshufb SR,<xmm9=int6464#10
6241# asm 2: pshufb SR,<xmm9=%xmm9
6242pshufb SR,%xmm9
6243
6244# qhasm: xmm10 ^= *(int128 *)(c + 672)
6245# asm 1: pxor 672(<c=int64#5),<xmm10=int6464#11
6246# asm 2: pxor 672(<c=%r8),<xmm10=%xmm10
6247pxor 672(%r8),%xmm10
6248
6249# qhasm: shuffle bytes of xmm10 by SR
6250# asm 1: pshufb SR,<xmm10=int6464#11
6251# asm 2: pshufb SR,<xmm10=%xmm10
6252pshufb SR,%xmm10
6253
6254# qhasm: xmm11 ^= *(int128 *)(c + 688)
6255# asm 1: pxor 688(<c=int64#5),<xmm11=int6464#12
6256# asm 2: pxor 688(<c=%r8),<xmm11=%xmm11
6257pxor 688(%r8),%xmm11
6258
6259# qhasm: shuffle bytes of xmm11 by SR
6260# asm 1: pshufb SR,<xmm11=int6464#12
6261# asm 2: pshufb SR,<xmm11=%xmm11
6262pshufb SR,%xmm11
6263
6264# qhasm: xmm12 ^= *(int128 *)(c + 704)
6265# asm 1: pxor 704(<c=int64#5),<xmm12=int6464#13
6266# asm 2: pxor 704(<c=%r8),<xmm12=%xmm12
6267pxor 704(%r8),%xmm12
6268
6269# qhasm: shuffle bytes of xmm12 by SR
6270# asm 1: pshufb SR,<xmm12=int6464#13
6271# asm 2: pshufb SR,<xmm12=%xmm12
6272pshufb SR,%xmm12
6273
6274# qhasm: xmm13 ^= *(int128 *)(c + 720)
6275# asm 1: pxor 720(<c=int64#5),<xmm13=int6464#14
6276# asm 2: pxor 720(<c=%r8),<xmm13=%xmm13
6277pxor 720(%r8),%xmm13
6278
6279# qhasm: shuffle bytes of xmm13 by SR
6280# asm 1: pshufb SR,<xmm13=int6464#14
6281# asm 2: pshufb SR,<xmm13=%xmm13
6282pshufb SR,%xmm13
6283
6284# qhasm: xmm14 ^= *(int128 *)(c + 736)
6285# asm 1: pxor 736(<c=int64#5),<xmm14=int6464#15
6286# asm 2: pxor 736(<c=%r8),<xmm14=%xmm14
6287pxor 736(%r8),%xmm14
6288
6289# qhasm: shuffle bytes of xmm14 by SR
6290# asm 1: pshufb SR,<xmm14=int6464#15
6291# asm 2: pshufb SR,<xmm14=%xmm14
6292pshufb SR,%xmm14
6293
6294# qhasm: xmm15 ^= *(int128 *)(c + 752)
6295# asm 1: pxor 752(<c=int64#5),<xmm15=int6464#16
6296# asm 2: pxor 752(<c=%r8),<xmm15=%xmm15
6297pxor 752(%r8),%xmm15
6298
6299# qhasm: shuffle bytes of xmm15 by SR
6300# asm 1: pshufb SR,<xmm15=int6464#16
6301# asm 2: pshufb SR,<xmm15=%xmm15
6302pshufb SR,%xmm15
6303
6304# qhasm: xmm13 ^= xmm14
6305# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
6306# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
6307pxor %xmm14,%xmm13
6308
6309# qhasm: xmm10 ^= xmm9
6310# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
6311# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
6312pxor %xmm9,%xmm10
6313
6314# qhasm: xmm13 ^= xmm8
6315# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
6316# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
6317pxor %xmm8,%xmm13
6318
6319# qhasm: xmm14 ^= xmm10
6320# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
6321# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
6322pxor %xmm10,%xmm14
6323
6324# qhasm: xmm11 ^= xmm8
6325# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
6326# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
6327pxor %xmm8,%xmm11
6328
6329# qhasm: xmm14 ^= xmm11
6330# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
6331# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
6332pxor %xmm11,%xmm14
6333
6334# qhasm: xmm11 ^= xmm15
6335# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
6336# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
6337pxor %xmm15,%xmm11
6338
6339# qhasm: xmm11 ^= xmm12
6340# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
6341# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
6342pxor %xmm12,%xmm11
6343
6344# qhasm: xmm15 ^= xmm13
6345# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
6346# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
6347pxor %xmm13,%xmm15
6348
6349# qhasm: xmm11 ^= xmm9
6350# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
6351# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
6352pxor %xmm9,%xmm11
6353
6354# qhasm: xmm12 ^= xmm13
6355# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
6356# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
6357pxor %xmm13,%xmm12
6358
6359# qhasm: xmm10 ^= xmm15
6360# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
6361# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
6362pxor %xmm15,%xmm10
6363
6364# qhasm: xmm9 ^= xmm13
6365# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
6366# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
6367pxor %xmm13,%xmm9
6368
6369# qhasm: xmm3 = xmm15
6370# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
6371# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
6372movdqa %xmm15,%xmm0
6373
6374# qhasm: xmm2 = xmm9
6375# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
6376# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
6377movdqa %xmm9,%xmm1
6378
6379# qhasm: xmm1 = xmm13
6380# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
6381# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
6382movdqa %xmm13,%xmm2
6383
6384# qhasm: xmm5 = xmm10
6385# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
6386# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
6387movdqa %xmm10,%xmm3
6388
6389# qhasm: xmm4 = xmm14
6390# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
6391# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
6392movdqa %xmm14,%xmm4
6393
6394# qhasm: xmm3 ^= xmm12
6395# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
6396# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
6397pxor %xmm12,%xmm0
6398
6399# qhasm: xmm2 ^= xmm10
6400# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
6401# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
6402pxor %xmm10,%xmm1
6403
6404# qhasm: xmm1 ^= xmm11
6405# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
6406# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
6407pxor %xmm11,%xmm2
6408
6409# qhasm: xmm5 ^= xmm12
6410# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
6411# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
6412pxor %xmm12,%xmm3
6413
6414# qhasm: xmm4 ^= xmm8
6415# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
6416# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
6417pxor %xmm8,%xmm4
6418
6419# qhasm: xmm6 = xmm3
6420# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
6421# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
6422movdqa %xmm0,%xmm5
6423
6424# qhasm: xmm0 = xmm2
6425# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
6426# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
6427movdqa %xmm1,%xmm6
6428
6429# qhasm: xmm7 = xmm3
6430# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
6431# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
6432movdqa %xmm0,%xmm7
6433
6434# qhasm: xmm2 |= xmm1
6435# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
6436# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
6437por %xmm2,%xmm1
6438
6439# qhasm: xmm3 |= xmm4
6440# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
6441# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
6442por %xmm4,%xmm0
6443
6444# qhasm: xmm7 ^= xmm0
6445# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
6446# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
6447pxor %xmm6,%xmm7
6448
6449# qhasm: xmm6 &= xmm4
6450# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
6451# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
6452pand %xmm4,%xmm5
6453
6454# qhasm: xmm0 &= xmm1
6455# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
6456# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
6457pand %xmm2,%xmm6
6458
6459# qhasm: xmm4 ^= xmm1
6460# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
6461# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
6462pxor %xmm2,%xmm4
6463
6464# qhasm: xmm7 &= xmm4
6465# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
6466# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
6467pand %xmm4,%xmm7
6468
6469# qhasm: xmm4 = xmm11
6470# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
6471# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
6472movdqa %xmm11,%xmm2
6473
6474# qhasm: xmm4 ^= xmm8
6475# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
6476# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
6477pxor %xmm8,%xmm2
6478
6479# qhasm: xmm5 &= xmm4
6480# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
6481# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
6482pand %xmm2,%xmm3
6483
6484# qhasm: xmm3 ^= xmm5
6485# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
6486# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
6487pxor %xmm3,%xmm0
6488
6489# qhasm: xmm2 ^= xmm5
6490# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
6491# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
6492pxor %xmm3,%xmm1
6493
6494# qhasm: xmm5 = xmm15
6495# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
6496# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
6497movdqa %xmm15,%xmm2
6498
6499# qhasm: xmm5 ^= xmm9
6500# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
6501# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
6502pxor %xmm9,%xmm2
6503
6504# qhasm: xmm4 = xmm13
6505# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
6506# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
6507movdqa %xmm13,%xmm3
6508
6509# qhasm: xmm1 = xmm5
6510# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
6511# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
6512movdqa %xmm2,%xmm4
6513
6514# qhasm: xmm4 ^= xmm14
6515# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
6516# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
6517pxor %xmm14,%xmm3
6518
6519# qhasm: xmm1 |= xmm4
6520# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
6521# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
6522por %xmm3,%xmm4
6523
6524# qhasm: xmm5 &= xmm4
6525# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
6526# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
6527pand %xmm3,%xmm2
6528
6529# qhasm: xmm0 ^= xmm5
6530# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
6531# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
6532pxor %xmm2,%xmm6
6533
6534# qhasm: xmm3 ^= xmm7
6535# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
6536# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
6537pxor %xmm7,%xmm0
6538
6539# qhasm: xmm2 ^= xmm6
6540# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
6541# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
6542pxor %xmm5,%xmm1
6543
6544# qhasm: xmm1 ^= xmm7
6545# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
6546# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
6547pxor %xmm7,%xmm4
6548
6549# qhasm: xmm0 ^= xmm6
6550# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
6551# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
6552pxor %xmm5,%xmm6
6553
6554# qhasm: xmm1 ^= xmm6
6555# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
6556# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
6557pxor %xmm5,%xmm4
6558
6559# qhasm: xmm4 = xmm10
6560# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
6561# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
6562movdqa %xmm10,%xmm2
6563
6564# qhasm: xmm5 = xmm12
6565# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
6566# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
6567movdqa %xmm12,%xmm3
6568
6569# qhasm: xmm6 = xmm9
6570# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
6571# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
6572movdqa %xmm9,%xmm5
6573
6574# qhasm: xmm7 = xmm15
6575# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
6576# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
6577movdqa %xmm15,%xmm7
6578
6579# qhasm: xmm4 &= xmm11
6580# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
6581# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
6582pand %xmm11,%xmm2
6583
6584# qhasm: xmm5 &= xmm8
6585# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
6586# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
6587pand %xmm8,%xmm3
6588
6589# qhasm: xmm6 &= xmm13
6590# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
6591# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
6592pand %xmm13,%xmm5
6593
6594# qhasm: xmm7 |= xmm14
6595# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
6596# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
6597por %xmm14,%xmm7
6598
6599# qhasm: xmm3 ^= xmm4
6600# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
6601# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
6602pxor %xmm2,%xmm0
6603
6604# qhasm: xmm2 ^= xmm5
6605# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
6606# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
6607pxor %xmm3,%xmm1
6608
6609# qhasm: xmm1 ^= xmm6
6610# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
6611# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
6612pxor %xmm5,%xmm4
6613
6614# qhasm: xmm0 ^= xmm7
6615# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
6616# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
6617pxor %xmm7,%xmm6
6618
6619# qhasm: xmm4 = xmm3
6620# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
6621# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
6622movdqa %xmm0,%xmm2
6623
6624# qhasm: xmm4 ^= xmm2
6625# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
6626# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
6627pxor %xmm1,%xmm2
6628
6629# qhasm: xmm3 &= xmm1
6630# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
6631# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
6632pand %xmm4,%xmm0
6633
6634# qhasm: xmm6 = xmm0
6635# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
6636# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
6637movdqa %xmm6,%xmm3
6638
6639# qhasm: xmm6 ^= xmm3
6640# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
6641# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
6642pxor %xmm0,%xmm3
6643
6644# qhasm: xmm7 = xmm4
6645# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
6646# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
6647movdqa %xmm2,%xmm5
6648
6649# qhasm: xmm7 &= xmm6
6650# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
6651# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
6652pand %xmm3,%xmm5
6653
6654# qhasm: xmm7 ^= xmm2
6655# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
6656# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
6657pxor %xmm1,%xmm5
6658
6659# qhasm: xmm5 = xmm1
6660# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
6661# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
6662movdqa %xmm4,%xmm7
6663
6664# qhasm: xmm5 ^= xmm0
6665# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
6666# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
6667pxor %xmm6,%xmm7
6668
6669# qhasm: xmm3 ^= xmm2
6670# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
6671# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
6672pxor %xmm1,%xmm0
6673
6674# qhasm: xmm5 &= xmm3
6675# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
6676# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
6677pand %xmm0,%xmm7
6678
6679# qhasm: xmm5 ^= xmm0
6680# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
6681# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
6682pxor %xmm6,%xmm7
6683
6684# qhasm: xmm1 ^= xmm5
6685# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
6686# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
6687pxor %xmm7,%xmm4
6688
6689# qhasm: xmm2 = xmm6
6690# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
6691# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
6692movdqa %xmm3,%xmm0
6693
6694# qhasm: xmm2 ^= xmm5
6695# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
6696# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
6697pxor %xmm7,%xmm0
6698
6699# qhasm: xmm2 &= xmm0
6700# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
6701# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
6702pand %xmm6,%xmm0
6703
6704# qhasm: xmm1 ^= xmm2
6705# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
6706# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
6707pxor %xmm0,%xmm4
6708
6709# qhasm: xmm6 ^= xmm2
6710# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
6711# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
6712pxor %xmm0,%xmm3
6713
6714# qhasm: xmm6 &= xmm7
6715# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
6716# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
6717pand %xmm5,%xmm3
6718
6719# qhasm: xmm6 ^= xmm4
6720# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
6721# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
6722pxor %xmm2,%xmm3
6723
6724# qhasm: xmm4 = xmm14
6725# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
6726# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
6727movdqa %xmm14,%xmm0
6728
6729# qhasm: xmm0 = xmm13
6730# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
6731# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
6732movdqa %xmm13,%xmm1
6733
6734# qhasm: xmm2 = xmm7
6735# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
6736# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
6737movdqa %xmm5,%xmm2
6738
6739# qhasm: xmm2 ^= xmm6
6740# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
6741# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
6742pxor %xmm3,%xmm2
6743
6744# qhasm: xmm2 &= xmm14
6745# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
6746# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
6747pand %xmm14,%xmm2
6748
6749# qhasm: xmm14 ^= xmm13
6750# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
6751# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
6752pxor %xmm13,%xmm14
6753
6754# qhasm: xmm14 &= xmm6
6755# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
6756# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
6757pand %xmm3,%xmm14
6758
6759# qhasm: xmm13 &= xmm7
6760# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
6761# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
6762pand %xmm5,%xmm13
6763
6764# qhasm: xmm14 ^= xmm13
6765# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
6766# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
6767pxor %xmm13,%xmm14
6768
6769# qhasm: xmm13 ^= xmm2
6770# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
6771# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
6772pxor %xmm2,%xmm13
6773
6774# qhasm: xmm4 ^= xmm8
6775# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
6776# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
6777pxor %xmm8,%xmm0
6778
6779# qhasm: xmm0 ^= xmm11
6780# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
6781# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
6782pxor %xmm11,%xmm1
6783
6784# qhasm: xmm7 ^= xmm5
6785# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
6786# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
6787pxor %xmm7,%xmm5
6788
6789# qhasm: xmm6 ^= xmm1
6790# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
6791# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
6792pxor %xmm4,%xmm3
6793
6794# qhasm: xmm3 = xmm7
6795# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
6796# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
6797movdqa %xmm5,%xmm2
6798
6799# qhasm: xmm3 ^= xmm6
6800# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
6801# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
6802pxor %xmm3,%xmm2
6803
6804# qhasm: xmm3 &= xmm4
6805# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
6806# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
6807pand %xmm0,%xmm2
6808
6809# qhasm: xmm4 ^= xmm0
6810# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
6811# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
6812pxor %xmm1,%xmm0
6813
6814# qhasm: xmm4 &= xmm6
6815# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
6816# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
6817pand %xmm3,%xmm0
6818
6819# qhasm: xmm0 &= xmm7
6820# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
6821# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
6822pand %xmm5,%xmm1
6823
6824# qhasm: xmm0 ^= xmm4
6825# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
6826# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
6827pxor %xmm0,%xmm1
6828
6829# qhasm: xmm4 ^= xmm3
6830# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
6831# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
6832pxor %xmm2,%xmm0
6833
6834# qhasm: xmm2 = xmm5
6835# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
6836# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
6837movdqa %xmm7,%xmm2
6838
6839# qhasm: xmm2 ^= xmm1
6840# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
6841# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
6842pxor %xmm4,%xmm2
6843
6844# qhasm: xmm2 &= xmm8
6845# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
6846# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
6847pand %xmm8,%xmm2
6848
6849# qhasm: xmm8 ^= xmm11
6850# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
6851# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
6852pxor %xmm11,%xmm8
6853
6854# qhasm: xmm8 &= xmm1
6855# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
6856# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
6857pand %xmm4,%xmm8
6858
6859# qhasm: xmm11 &= xmm5
6860# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
6861# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
6862pand %xmm7,%xmm11
6863
6864# qhasm: xmm8 ^= xmm11
6865# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
6866# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
6867pxor %xmm11,%xmm8
6868
6869# qhasm: xmm11 ^= xmm2
6870# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
6871# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
6872pxor %xmm2,%xmm11
6873
6874# qhasm: xmm14 ^= xmm4
6875# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
6876# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
6877pxor %xmm0,%xmm14
6878
6879# qhasm: xmm8 ^= xmm4
6880# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
6881# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
6882pxor %xmm0,%xmm8
6883
6884# qhasm: xmm13 ^= xmm0
6885# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
6886# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
6887pxor %xmm1,%xmm13
6888
6889# qhasm: xmm11 ^= xmm0
6890# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
6891# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
6892pxor %xmm1,%xmm11
6893
6894# qhasm: xmm4 = xmm15
6895# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
6896# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
6897movdqa %xmm15,%xmm0
6898
6899# qhasm: xmm0 = xmm9
6900# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
6901# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
6902movdqa %xmm9,%xmm1
6903
6904# qhasm: xmm4 ^= xmm12
6905# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
6906# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
6907pxor %xmm12,%xmm0
6908
6909# qhasm: xmm0 ^= xmm10
6910# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
6911# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
6912pxor %xmm10,%xmm1
6913
6914# qhasm: xmm3 = xmm7
6915# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
6916# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
6917movdqa %xmm5,%xmm2
6918
6919# qhasm: xmm3 ^= xmm6
6920# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
6921# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
6922pxor %xmm3,%xmm2
6923
6924# qhasm: xmm3 &= xmm4
6925# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
6926# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
6927pand %xmm0,%xmm2
6928
6929# qhasm: xmm4 ^= xmm0
6930# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
6931# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
6932pxor %xmm1,%xmm0
6933
6934# qhasm: xmm4 &= xmm6
6935# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
6936# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
6937pand %xmm3,%xmm0
6938
6939# qhasm: xmm0 &= xmm7
6940# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
6941# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
6942pand %xmm5,%xmm1
6943
6944# qhasm: xmm0 ^= xmm4
6945# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
6946# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
6947pxor %xmm0,%xmm1
6948
6949# qhasm: xmm4 ^= xmm3
6950# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
6951# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
6952pxor %xmm2,%xmm0
6953
6954# qhasm: xmm2 = xmm5
6955# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
6956# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
6957movdqa %xmm7,%xmm2
6958
6959# qhasm: xmm2 ^= xmm1
6960# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
6961# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
6962pxor %xmm4,%xmm2
6963
6964# qhasm: xmm2 &= xmm12
6965# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
6966# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
6967pand %xmm12,%xmm2
6968
6969# qhasm: xmm12 ^= xmm10
6970# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
6971# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
6972pxor %xmm10,%xmm12
6973
6974# qhasm: xmm12 &= xmm1
6975# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
6976# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
6977pand %xmm4,%xmm12
6978
6979# qhasm: xmm10 &= xmm5
6980# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
6981# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
6982pand %xmm7,%xmm10
6983
6984# qhasm: xmm12 ^= xmm10
6985# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
6986# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
6987pxor %xmm10,%xmm12
6988
6989# qhasm: xmm10 ^= xmm2
6990# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
6991# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
6992pxor %xmm2,%xmm10
6993
6994# qhasm: xmm7 ^= xmm5
6995# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
6996# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
6997pxor %xmm7,%xmm5
6998
6999# qhasm: xmm6 ^= xmm1
7000# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
7001# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
7002pxor %xmm4,%xmm3
7003
7004# qhasm: xmm3 = xmm7
7005# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
7006# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
7007movdqa %xmm5,%xmm2
7008
7009# qhasm: xmm3 ^= xmm6
7010# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
7011# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
7012pxor %xmm3,%xmm2
7013
7014# qhasm: xmm3 &= xmm15
7015# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
7016# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
7017pand %xmm15,%xmm2
7018
7019# qhasm: xmm15 ^= xmm9
7020# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
7021# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
7022pxor %xmm9,%xmm15
7023
7024# qhasm: xmm15 &= xmm6
7025# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
7026# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
7027pand %xmm3,%xmm15
7028
7029# qhasm: xmm9 &= xmm7
7030# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
7031# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
7032pand %xmm5,%xmm9
7033
7034# qhasm: xmm15 ^= xmm9
7035# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
7036# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
7037pxor %xmm9,%xmm15
7038
7039# qhasm: xmm9 ^= xmm3
7040# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
7041# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
7042pxor %xmm2,%xmm9
7043
7044# qhasm: xmm15 ^= xmm4
7045# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
7046# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
7047pxor %xmm0,%xmm15
7048
7049# qhasm: xmm12 ^= xmm4
7050# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
7051# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
7052pxor %xmm0,%xmm12
7053
7054# qhasm: xmm9 ^= xmm0
7055# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
7056# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
7057pxor %xmm1,%xmm9
7058
7059# qhasm: xmm10 ^= xmm0
7060# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
7061# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
7062pxor %xmm1,%xmm10
7063
7064# qhasm: xmm15 ^= xmm8
7065# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
7066# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
7067pxor %xmm8,%xmm15
7068
7069# qhasm: xmm9 ^= xmm14
7070# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
7071# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
7072pxor %xmm14,%xmm9
7073
7074# qhasm: xmm12 ^= xmm15
7075# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
7076# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
7077pxor %xmm15,%xmm12
7078
7079# qhasm: xmm14 ^= xmm8
7080# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
7081# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
7082pxor %xmm8,%xmm14
7083
7084# qhasm: xmm8 ^= xmm9
7085# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
7086# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
7087pxor %xmm9,%xmm8
7088
7089# qhasm: xmm9 ^= xmm13
7090# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
7091# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
7092pxor %xmm13,%xmm9
7093
7094# qhasm: xmm13 ^= xmm10
7095# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
7096# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
7097pxor %xmm10,%xmm13
7098
7099# qhasm: xmm12 ^= xmm13
7100# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
7101# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
7102pxor %xmm13,%xmm12
7103
7104# qhasm: xmm10 ^= xmm11
7105# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
7106# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
7107pxor %xmm11,%xmm10
7108
7109# qhasm: xmm11 ^= xmm13
7110# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
7111# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
7112pxor %xmm13,%xmm11
7113
7114# qhasm: xmm14 ^= xmm11
7115# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
7116# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
7117pxor %xmm11,%xmm14
7118
7119# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
7120# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
7121# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
7122pshufd $0x93,%xmm8,%xmm0
7123
7124# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
7125# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
7126# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
7127pshufd $0x93,%xmm9,%xmm1
7128
7129# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
7130# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
7131# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
7132pshufd $0x93,%xmm12,%xmm2
7133
7134# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
7135# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
7136# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
7137pshufd $0x93,%xmm14,%xmm3
7138
7139# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
7140# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
7141# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
7142pshufd $0x93,%xmm11,%xmm4
7143
7144# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
7145# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
7146# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
7147pshufd $0x93,%xmm15,%xmm5
7148
7149# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
7150# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
7151# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
7152pshufd $0x93,%xmm10,%xmm6
7153
7154# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
7155# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
7156# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
7157pshufd $0x93,%xmm13,%xmm7
7158
7159# qhasm: xmm8 ^= xmm0
7160# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
7161# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
7162pxor %xmm0,%xmm8
7163
7164# qhasm: xmm9 ^= xmm1
7165# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
7166# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
7167pxor %xmm1,%xmm9
7168
7169# qhasm: xmm12 ^= xmm2
7170# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
7171# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
7172pxor %xmm2,%xmm12
7173
7174# qhasm: xmm14 ^= xmm3
7175# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
7176# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
7177pxor %xmm3,%xmm14
7178
7179# qhasm: xmm11 ^= xmm4
7180# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
7181# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
7182pxor %xmm4,%xmm11
7183
7184# qhasm: xmm15 ^= xmm5
7185# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
7186# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
7187pxor %xmm5,%xmm15
7188
7189# qhasm: xmm10 ^= xmm6
7190# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
7191# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
7192pxor %xmm6,%xmm10
7193
7194# qhasm: xmm13 ^= xmm7
7195# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
7196# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
7197pxor %xmm7,%xmm13
7198
7199# qhasm: xmm0 ^= xmm13
7200# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
7201# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
7202pxor %xmm13,%xmm0
7203
7204# qhasm: xmm1 ^= xmm8
7205# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
7206# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
7207pxor %xmm8,%xmm1
7208
7209# qhasm: xmm2 ^= xmm9
7210# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
7211# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
7212pxor %xmm9,%xmm2
7213
7214# qhasm: xmm1 ^= xmm13
7215# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
7216# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
7217pxor %xmm13,%xmm1
7218
7219# qhasm: xmm3 ^= xmm12
7220# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
7221# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
7222pxor %xmm12,%xmm3
7223
7224# qhasm: xmm4 ^= xmm14
7225# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
7226# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
7227pxor %xmm14,%xmm4
7228
7229# qhasm: xmm5 ^= xmm11
7230# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
7231# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
7232pxor %xmm11,%xmm5
7233
7234# qhasm: xmm3 ^= xmm13
7235# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
7236# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
7237pxor %xmm13,%xmm3
7238
7239# qhasm: xmm6 ^= xmm15
7240# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
7241# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
7242pxor %xmm15,%xmm6
7243
7244# qhasm: xmm7 ^= xmm10
7245# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
7246# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
7247pxor %xmm10,%xmm7
7248
7249# qhasm: xmm4 ^= xmm13
7250# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
7251# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
7252pxor %xmm13,%xmm4
7253
7254# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
7255# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
7256# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
7257pshufd $0x4E,%xmm8,%xmm8
7258
7259# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
7260# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
7261# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
7262pshufd $0x4E,%xmm9,%xmm9
7263
7264# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
7265# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
7266# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
7267pshufd $0x4E,%xmm12,%xmm12
7268
7269# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
7270# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
7271# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
7272pshufd $0x4E,%xmm14,%xmm14
7273
7274# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
7275# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
7276# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
7277pshufd $0x4E,%xmm11,%xmm11
7278
7279# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
7280# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
7281# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
7282pshufd $0x4E,%xmm15,%xmm15
7283
7284# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
7285# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
7286# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
7287pshufd $0x4E,%xmm10,%xmm10
7288
7289# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
7290# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
7291# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
7292pshufd $0x4E,%xmm13,%xmm13
7293
7294# qhasm: xmm0 ^= xmm8
7295# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
7296# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
7297pxor %xmm8,%xmm0
7298
7299# qhasm: xmm1 ^= xmm9
7300# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
7301# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
7302pxor %xmm9,%xmm1
7303
7304# qhasm: xmm2 ^= xmm12
7305# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
7306# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
7307pxor %xmm12,%xmm2
7308
7309# qhasm: xmm3 ^= xmm14
7310# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
7311# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
7312pxor %xmm14,%xmm3
7313
7314# qhasm: xmm4 ^= xmm11
7315# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
7316# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
7317pxor %xmm11,%xmm4
7318
7319# qhasm: xmm5 ^= xmm15
7320# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
7321# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
7322pxor %xmm15,%xmm5
7323
7324# qhasm: xmm6 ^= xmm10
7325# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
7326# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
7327pxor %xmm10,%xmm6
7328
7329# qhasm: xmm7 ^= xmm13
7330# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
7331# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
7332pxor %xmm13,%xmm7
7333
7334# qhasm: xmm0 ^= *(int128 *)(c + 768)
7335# asm 1: pxor 768(<c=int64#5),<xmm0=int6464#1
7336# asm 2: pxor 768(<c=%r8),<xmm0=%xmm0
7337pxor 768(%r8),%xmm0
7338
7339# qhasm: shuffle bytes of xmm0 by SR
7340# asm 1: pshufb SR,<xmm0=int6464#1
7341# asm 2: pshufb SR,<xmm0=%xmm0
7342pshufb SR,%xmm0
7343
7344# qhasm: xmm1 ^= *(int128 *)(c + 784)
7345# asm 1: pxor 784(<c=int64#5),<xmm1=int6464#2
7346# asm 2: pxor 784(<c=%r8),<xmm1=%xmm1
7347pxor 784(%r8),%xmm1
7348
7349# qhasm: shuffle bytes of xmm1 by SR
7350# asm 1: pshufb SR,<xmm1=int6464#2
7351# asm 2: pshufb SR,<xmm1=%xmm1
7352pshufb SR,%xmm1
7353
7354# qhasm: xmm2 ^= *(int128 *)(c + 800)
7355# asm 1: pxor 800(<c=int64#5),<xmm2=int6464#3
7356# asm 2: pxor 800(<c=%r8),<xmm2=%xmm2
7357pxor 800(%r8),%xmm2
7358
7359# qhasm: shuffle bytes of xmm2 by SR
7360# asm 1: pshufb SR,<xmm2=int6464#3
7361# asm 2: pshufb SR,<xmm2=%xmm2
7362pshufb SR,%xmm2
7363
7364# qhasm: xmm3 ^= *(int128 *)(c + 816)
7365# asm 1: pxor 816(<c=int64#5),<xmm3=int6464#4
7366# asm 2: pxor 816(<c=%r8),<xmm3=%xmm3
7367pxor 816(%r8),%xmm3
7368
7369# qhasm: shuffle bytes of xmm3 by SR
7370# asm 1: pshufb SR,<xmm3=int6464#4
7371# asm 2: pshufb SR,<xmm3=%xmm3
7372pshufb SR,%xmm3
7373
7374# qhasm: xmm4 ^= *(int128 *)(c + 832)
7375# asm 1: pxor 832(<c=int64#5),<xmm4=int6464#5
7376# asm 2: pxor 832(<c=%r8),<xmm4=%xmm4
7377pxor 832(%r8),%xmm4
7378
7379# qhasm: shuffle bytes of xmm4 by SR
7380# asm 1: pshufb SR,<xmm4=int6464#5
7381# asm 2: pshufb SR,<xmm4=%xmm4
7382pshufb SR,%xmm4
7383
7384# qhasm: xmm5 ^= *(int128 *)(c + 848)
7385# asm 1: pxor 848(<c=int64#5),<xmm5=int6464#6
7386# asm 2: pxor 848(<c=%r8),<xmm5=%xmm5
7387pxor 848(%r8),%xmm5
7388
7389# qhasm: shuffle bytes of xmm5 by SR
7390# asm 1: pshufb SR,<xmm5=int6464#6
7391# asm 2: pshufb SR,<xmm5=%xmm5
7392pshufb SR,%xmm5
7393
7394# qhasm: xmm6 ^= *(int128 *)(c + 864)
7395# asm 1: pxor 864(<c=int64#5),<xmm6=int6464#7
7396# asm 2: pxor 864(<c=%r8),<xmm6=%xmm6
7397pxor 864(%r8),%xmm6
7398
7399# qhasm: shuffle bytes of xmm6 by SR
7400# asm 1: pshufb SR,<xmm6=int6464#7
7401# asm 2: pshufb SR,<xmm6=%xmm6
7402pshufb SR,%xmm6
7403
7404# qhasm: xmm7 ^= *(int128 *)(c + 880)
7405# asm 1: pxor 880(<c=int64#5),<xmm7=int6464#8
7406# asm 2: pxor 880(<c=%r8),<xmm7=%xmm7
7407pxor 880(%r8),%xmm7
7408
7409# qhasm: shuffle bytes of xmm7 by SR
7410# asm 1: pshufb SR,<xmm7=int6464#8
7411# asm 2: pshufb SR,<xmm7=%xmm7
7412pshufb SR,%xmm7
7413
7414# qhasm: xmm5 ^= xmm6
7415# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
7416# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
7417pxor %xmm6,%xmm5
7418
7419# qhasm: xmm2 ^= xmm1
7420# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
7421# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
7422pxor %xmm1,%xmm2
7423
7424# qhasm: xmm5 ^= xmm0
7425# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
7426# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
7427pxor %xmm0,%xmm5
7428
7429# qhasm: xmm6 ^= xmm2
7430# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
7431# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
7432pxor %xmm2,%xmm6
7433
7434# qhasm: xmm3 ^= xmm0
7435# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
7436# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
7437pxor %xmm0,%xmm3
7438
7439# qhasm: xmm6 ^= xmm3
7440# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
7441# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
7442pxor %xmm3,%xmm6
7443
7444# qhasm: xmm3 ^= xmm7
7445# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
7446# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
7447pxor %xmm7,%xmm3
7448
7449# qhasm: xmm3 ^= xmm4
7450# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
7451# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
7452pxor %xmm4,%xmm3
7453
7454# qhasm: xmm7 ^= xmm5
7455# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
7456# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
7457pxor %xmm5,%xmm7
7458
7459# qhasm: xmm3 ^= xmm1
7460# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
7461# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
7462pxor %xmm1,%xmm3
7463
7464# qhasm: xmm4 ^= xmm5
7465# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
7466# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
7467pxor %xmm5,%xmm4
7468
7469# qhasm: xmm2 ^= xmm7
7470# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
7471# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
7472pxor %xmm7,%xmm2
7473
7474# qhasm: xmm1 ^= xmm5
7475# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
7476# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
7477pxor %xmm5,%xmm1
7478
7479# qhasm: xmm11 = xmm7
7480# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
7481# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
7482movdqa %xmm7,%xmm8
7483
7484# qhasm: xmm10 = xmm1
7485# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
7486# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
7487movdqa %xmm1,%xmm9
7488
7489# qhasm: xmm9 = xmm5
7490# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
7491# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
7492movdqa %xmm5,%xmm10
7493
7494# qhasm: xmm13 = xmm2
7495# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
7496# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
7497movdqa %xmm2,%xmm11
7498
7499# qhasm: xmm12 = xmm6
7500# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
7501# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
7502movdqa %xmm6,%xmm12
7503
7504# qhasm: xmm11 ^= xmm4
7505# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
7506# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
7507pxor %xmm4,%xmm8
7508
7509# qhasm: xmm10 ^= xmm2
7510# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
7511# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
7512pxor %xmm2,%xmm9
7513
7514# qhasm: xmm9 ^= xmm3
7515# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
7516# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
7517pxor %xmm3,%xmm10
7518
7519# qhasm: xmm13 ^= xmm4
7520# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
7521# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
7522pxor %xmm4,%xmm11
7523
7524# qhasm: xmm12 ^= xmm0
7525# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
7526# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
7527pxor %xmm0,%xmm12
7528
7529# qhasm: xmm14 = xmm11
7530# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
7531# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
7532movdqa %xmm8,%xmm13
7533
7534# qhasm: xmm8 = xmm10
7535# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
7536# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
7537movdqa %xmm9,%xmm14
7538
7539# qhasm: xmm15 = xmm11
7540# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
7541# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
7542movdqa %xmm8,%xmm15
7543
7544# qhasm: xmm10 |= xmm9
7545# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
7546# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
7547por %xmm10,%xmm9
7548
7549# qhasm: xmm11 |= xmm12
7550# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
7551# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
7552por %xmm12,%xmm8
7553
7554# qhasm: xmm15 ^= xmm8
7555# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
7556# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
7557pxor %xmm14,%xmm15
7558
7559# qhasm: xmm14 &= xmm12
7560# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
7561# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
7562pand %xmm12,%xmm13
7563
7564# qhasm: xmm8 &= xmm9
7565# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
7566# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
7567pand %xmm10,%xmm14
7568
7569# qhasm: xmm12 ^= xmm9
7570# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
7571# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
7572pxor %xmm10,%xmm12
7573
7574# qhasm: xmm15 &= xmm12
7575# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
7576# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
7577pand %xmm12,%xmm15
7578
7579# qhasm: xmm12 = xmm3
7580# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
7581# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
7582movdqa %xmm3,%xmm10
7583
7584# qhasm: xmm12 ^= xmm0
7585# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
7586# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
7587pxor %xmm0,%xmm10
7588
7589# qhasm: xmm13 &= xmm12
7590# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
7591# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
7592pand %xmm10,%xmm11
7593
7594# qhasm: xmm11 ^= xmm13
7595# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
7596# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
7597pxor %xmm11,%xmm8
7598
7599# qhasm: xmm10 ^= xmm13
7600# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
7601# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
7602pxor %xmm11,%xmm9
7603
7604# qhasm: xmm13 = xmm7
7605# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
7606# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
7607movdqa %xmm7,%xmm10
7608
7609# qhasm: xmm13 ^= xmm1
7610# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
7611# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
7612pxor %xmm1,%xmm10
7613
7614# qhasm: xmm12 = xmm5
7615# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
7616# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
7617movdqa %xmm5,%xmm11
7618
7619# qhasm: xmm9 = xmm13
7620# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
7621# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
7622movdqa %xmm10,%xmm12
7623
7624# qhasm: xmm12 ^= xmm6
7625# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
7626# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
7627pxor %xmm6,%xmm11
7628
7629# qhasm: xmm9 |= xmm12
7630# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
7631# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
7632por %xmm11,%xmm12
7633
7634# qhasm: xmm13 &= xmm12
7635# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
7636# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
7637pand %xmm11,%xmm10
7638
7639# qhasm: xmm8 ^= xmm13
7640# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
7641# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
7642pxor %xmm10,%xmm14
7643
7644# qhasm: xmm11 ^= xmm15
7645# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
7646# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
7647pxor %xmm15,%xmm8
7648
7649# qhasm: xmm10 ^= xmm14
7650# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
7651# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
7652pxor %xmm13,%xmm9
7653
7654# qhasm: xmm9 ^= xmm15
7655# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
7656# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
7657pxor %xmm15,%xmm12
7658
7659# qhasm: xmm8 ^= xmm14
7660# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
7661# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
7662pxor %xmm13,%xmm14
7663
7664# qhasm: xmm9 ^= xmm14
7665# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
7666# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
7667pxor %xmm13,%xmm12
7668
7669# qhasm: xmm12 = xmm2
7670# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
7671# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
7672movdqa %xmm2,%xmm10
7673
7674# qhasm: xmm13 = xmm4
7675# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
7676# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
7677movdqa %xmm4,%xmm11
7678
7679# qhasm: xmm14 = xmm1
7680# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
7681# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
7682movdqa %xmm1,%xmm13
7683
7684# qhasm: xmm15 = xmm7
7685# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
7686# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
7687movdqa %xmm7,%xmm15
7688
7689# qhasm: xmm12 &= xmm3
7690# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
7691# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
7692pand %xmm3,%xmm10
7693
7694# qhasm: xmm13 &= xmm0
7695# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
7696# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
7697pand %xmm0,%xmm11
7698
7699# qhasm: xmm14 &= xmm5
7700# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
7701# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
7702pand %xmm5,%xmm13
7703
7704# qhasm: xmm15 |= xmm6
7705# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
7706# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
7707por %xmm6,%xmm15
7708
7709# qhasm: xmm11 ^= xmm12
7710# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
7711# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
7712pxor %xmm10,%xmm8
7713
7714# qhasm: xmm10 ^= xmm13
7715# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
7716# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
7717pxor %xmm11,%xmm9
7718
7719# qhasm: xmm9 ^= xmm14
7720# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
7721# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
7722pxor %xmm13,%xmm12
7723
7724# qhasm: xmm8 ^= xmm15
7725# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
7726# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
7727pxor %xmm15,%xmm14
7728
7729# qhasm: xmm12 = xmm11
7730# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
7731# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
7732movdqa %xmm8,%xmm10
7733
7734# qhasm: xmm12 ^= xmm10
7735# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
7736# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
7737pxor %xmm9,%xmm10
7738
7739# qhasm: xmm11 &= xmm9
7740# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
7741# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
7742pand %xmm12,%xmm8
7743
7744# qhasm: xmm14 = xmm8
7745# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
7746# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
7747movdqa %xmm14,%xmm11
7748
7749# qhasm: xmm14 ^= xmm11
7750# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
7751# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
7752pxor %xmm8,%xmm11
7753
7754# qhasm: xmm15 = xmm12
7755# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
7756# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
7757movdqa %xmm10,%xmm13
7758
7759# qhasm: xmm15 &= xmm14
7760# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
7761# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
7762pand %xmm11,%xmm13
7763
7764# qhasm: xmm15 ^= xmm10
7765# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
7766# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
7767pxor %xmm9,%xmm13
7768
7769# qhasm: xmm13 = xmm9
7770# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
7771# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
7772movdqa %xmm12,%xmm15
7773
7774# qhasm: xmm13 ^= xmm8
7775# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
7776# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
7777pxor %xmm14,%xmm15
7778
7779# qhasm: xmm11 ^= xmm10
7780# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
7781# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
7782pxor %xmm9,%xmm8
7783
7784# qhasm: xmm13 &= xmm11
7785# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
7786# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
7787pand %xmm8,%xmm15
7788
7789# qhasm: xmm13 ^= xmm8
7790# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
7791# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
7792pxor %xmm14,%xmm15
7793
7794# qhasm: xmm9 ^= xmm13
7795# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
7796# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
7797pxor %xmm15,%xmm12
7798
7799# qhasm: xmm10 = xmm14
7800# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
7801# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
7802movdqa %xmm11,%xmm8
7803
7804# qhasm: xmm10 ^= xmm13
7805# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
7806# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
7807pxor %xmm15,%xmm8
7808
7809# qhasm: xmm10 &= xmm8
7810# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
7811# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
7812pand %xmm14,%xmm8
7813
7814# qhasm: xmm9 ^= xmm10
7815# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
7816# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
7817pxor %xmm8,%xmm12
7818
7819# qhasm: xmm14 ^= xmm10
7820# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
7821# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
7822pxor %xmm8,%xmm11
7823
7824# qhasm: xmm14 &= xmm15
7825# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
7826# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
7827pand %xmm13,%xmm11
7828
7829# qhasm: xmm14 ^= xmm12
7830# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
7831# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
7832pxor %xmm10,%xmm11
7833
7834# qhasm: xmm12 = xmm6
7835# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
7836# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
7837movdqa %xmm6,%xmm8
7838
7839# qhasm: xmm8 = xmm5
7840# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
7841# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
7842movdqa %xmm5,%xmm9
7843
7844# qhasm: xmm10 = xmm15
7845# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
7846# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
7847movdqa %xmm13,%xmm10
7848
7849# qhasm: xmm10 ^= xmm14
7850# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
7851# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
7852pxor %xmm11,%xmm10
7853
7854# qhasm: xmm10 &= xmm6
7855# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
7856# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
7857pand %xmm6,%xmm10
7858
7859# qhasm: xmm6 ^= xmm5
7860# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
7861# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
7862pxor %xmm5,%xmm6
7863
7864# qhasm: xmm6 &= xmm14
7865# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
7866# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
7867pand %xmm11,%xmm6
7868
7869# qhasm: xmm5 &= xmm15
7870# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
7871# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
7872pand %xmm13,%xmm5
7873
7874# qhasm: xmm6 ^= xmm5
7875# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
7876# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
7877pxor %xmm5,%xmm6
7878
7879# qhasm: xmm5 ^= xmm10
7880# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
7881# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
7882pxor %xmm10,%xmm5
7883
7884# qhasm: xmm12 ^= xmm0
7885# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
7886# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
7887pxor %xmm0,%xmm8
7888
7889# qhasm: xmm8 ^= xmm3
7890# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
7891# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
7892pxor %xmm3,%xmm9
7893
7894# qhasm: xmm15 ^= xmm13
7895# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
7896# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
7897pxor %xmm15,%xmm13
7898
7899# qhasm: xmm14 ^= xmm9
7900# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
7901# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
7902pxor %xmm12,%xmm11
7903
7904# qhasm: xmm11 = xmm15
7905# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
7906# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
7907movdqa %xmm13,%xmm10
7908
7909# qhasm: xmm11 ^= xmm14
7910# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
7911# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
7912pxor %xmm11,%xmm10
7913
7914# qhasm: xmm11 &= xmm12
7915# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
7916# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
7917pand %xmm8,%xmm10
7918
7919# qhasm: xmm12 ^= xmm8
7920# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
7921# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
7922pxor %xmm9,%xmm8
7923
7924# qhasm: xmm12 &= xmm14
7925# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
7926# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
7927pand %xmm11,%xmm8
7928
7929# qhasm: xmm8 &= xmm15
7930# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
7931# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
7932pand %xmm13,%xmm9
7933
7934# qhasm: xmm8 ^= xmm12
7935# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
7936# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
7937pxor %xmm8,%xmm9
7938
7939# qhasm: xmm12 ^= xmm11
7940# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
7941# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
7942pxor %xmm10,%xmm8
7943
7944# qhasm: xmm10 = xmm13
7945# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
7946# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
7947movdqa %xmm15,%xmm10
7948
7949# qhasm: xmm10 ^= xmm9
7950# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
7951# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
7952pxor %xmm12,%xmm10
7953
7954# qhasm: xmm10 &= xmm0
7955# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
7956# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
7957pand %xmm0,%xmm10
7958
7959# qhasm: xmm0 ^= xmm3
7960# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
7961# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
7962pxor %xmm3,%xmm0
7963
7964# qhasm: xmm0 &= xmm9
7965# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
7966# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
7967pand %xmm12,%xmm0
7968
7969# qhasm: xmm3 &= xmm13
7970# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
7971# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
7972pand %xmm15,%xmm3
7973
7974# qhasm: xmm0 ^= xmm3
7975# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
7976# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
7977pxor %xmm3,%xmm0
7978
7979# qhasm: xmm3 ^= xmm10
7980# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
7981# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
7982pxor %xmm10,%xmm3
7983
7984# qhasm: xmm6 ^= xmm12
7985# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
7986# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
7987pxor %xmm8,%xmm6
7988
7989# qhasm: xmm0 ^= xmm12
7990# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
7991# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
7992pxor %xmm8,%xmm0
7993
7994# qhasm: xmm5 ^= xmm8
7995# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
7996# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
7997pxor %xmm9,%xmm5
7998
7999# qhasm: xmm3 ^= xmm8
8000# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
8001# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
8002pxor %xmm9,%xmm3
8003
8004# qhasm: xmm12 = xmm7
8005# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
8006# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
8007movdqa %xmm7,%xmm8
8008
8009# qhasm: xmm8 = xmm1
8010# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
8011# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
8012movdqa %xmm1,%xmm9
8013
8014# qhasm: xmm12 ^= xmm4
8015# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
8016# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
8017pxor %xmm4,%xmm8
8018
8019# qhasm: xmm8 ^= xmm2
8020# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
8021# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
8022pxor %xmm2,%xmm9
8023
8024# qhasm: xmm11 = xmm15
8025# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
8026# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
8027movdqa %xmm13,%xmm10
8028
8029# qhasm: xmm11 ^= xmm14
8030# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
8031# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
8032pxor %xmm11,%xmm10
8033
8034# qhasm: xmm11 &= xmm12
8035# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
8036# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
8037pand %xmm8,%xmm10
8038
8039# qhasm: xmm12 ^= xmm8
8040# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
8041# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
8042pxor %xmm9,%xmm8
8043
8044# qhasm: xmm12 &= xmm14
8045# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
8046# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
8047pand %xmm11,%xmm8
8048
8049# qhasm: xmm8 &= xmm15
8050# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
8051# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
8052pand %xmm13,%xmm9
8053
8054# qhasm: xmm8 ^= xmm12
8055# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
8056# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
8057pxor %xmm8,%xmm9
8058
8059# qhasm: xmm12 ^= xmm11
8060# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
8061# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
8062pxor %xmm10,%xmm8
8063
8064# qhasm: xmm10 = xmm13
8065# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
8066# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
8067movdqa %xmm15,%xmm10
8068
8069# qhasm: xmm10 ^= xmm9
8070# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
8071# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
8072pxor %xmm12,%xmm10
8073
8074# qhasm: xmm10 &= xmm4
8075# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
8076# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
8077pand %xmm4,%xmm10
8078
8079# qhasm: xmm4 ^= xmm2
8080# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
8081# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
8082pxor %xmm2,%xmm4
8083
8084# qhasm: xmm4 &= xmm9
8085# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
8086# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
8087pand %xmm12,%xmm4
8088
8089# qhasm: xmm2 &= xmm13
8090# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
8091# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
8092pand %xmm15,%xmm2
8093
8094# qhasm: xmm4 ^= xmm2
8095# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
8096# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
8097pxor %xmm2,%xmm4
8098
8099# qhasm: xmm2 ^= xmm10
8100# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
8101# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
8102pxor %xmm10,%xmm2
8103
8104# qhasm: xmm15 ^= xmm13
8105# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
8106# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
8107pxor %xmm15,%xmm13
8108
8109# qhasm: xmm14 ^= xmm9
8110# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
8111# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
8112pxor %xmm12,%xmm11
8113
8114# qhasm: xmm11 = xmm15
8115# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
8116# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
8117movdqa %xmm13,%xmm10
8118
8119# qhasm: xmm11 ^= xmm14
8120# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
8121# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
8122pxor %xmm11,%xmm10
8123
8124# qhasm: xmm11 &= xmm7
8125# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
8126# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
8127pand %xmm7,%xmm10
8128
8129# qhasm: xmm7 ^= xmm1
8130# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
8131# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
8132pxor %xmm1,%xmm7
8133
8134# qhasm: xmm7 &= xmm14
8135# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
8136# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
8137pand %xmm11,%xmm7
8138
8139# qhasm: xmm1 &= xmm15
8140# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
8141# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
8142pand %xmm13,%xmm1
8143
8144# qhasm: xmm7 ^= xmm1
8145# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
8146# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
8147pxor %xmm1,%xmm7
8148
8149# qhasm: xmm1 ^= xmm11
8150# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
8151# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
8152pxor %xmm10,%xmm1
8153
8154# qhasm: xmm7 ^= xmm12
8155# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
8156# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
8157pxor %xmm8,%xmm7
8158
8159# qhasm: xmm4 ^= xmm12
8160# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
8161# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
8162pxor %xmm8,%xmm4
8163
8164# qhasm: xmm1 ^= xmm8
8165# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
8166# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
8167pxor %xmm9,%xmm1
8168
8169# qhasm: xmm2 ^= xmm8
8170# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
8171# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
8172pxor %xmm9,%xmm2
8173
8174# qhasm: xmm7 ^= xmm0
8175# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
8176# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
8177pxor %xmm0,%xmm7
8178
8179# qhasm: xmm1 ^= xmm6
8180# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
8181# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
8182pxor %xmm6,%xmm1
8183
8184# qhasm: xmm4 ^= xmm7
8185# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
8186# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
8187pxor %xmm7,%xmm4
8188
8189# qhasm: xmm6 ^= xmm0
8190# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
8191# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
8192pxor %xmm0,%xmm6
8193
8194# qhasm: xmm0 ^= xmm1
8195# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
8196# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
8197pxor %xmm1,%xmm0
8198
8199# qhasm: xmm1 ^= xmm5
8200# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
8201# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
8202pxor %xmm5,%xmm1
8203
8204# qhasm: xmm5 ^= xmm2
8205# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
8206# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
8207pxor %xmm2,%xmm5
8208
8209# qhasm: xmm4 ^= xmm5
8210# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
8211# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
8212pxor %xmm5,%xmm4
8213
8214# qhasm: xmm2 ^= xmm3
8215# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
8216# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
8217pxor %xmm3,%xmm2
8218
8219# qhasm: xmm3 ^= xmm5
8220# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
8221# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
8222pxor %xmm5,%xmm3
8223
8224# qhasm: xmm6 ^= xmm3
8225# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
8226# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
8227pxor %xmm3,%xmm6
8228
8229# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
8230# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
8231# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
8232pshufd $0x93,%xmm0,%xmm8
8233
8234# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
8235# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
8236# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
8237pshufd $0x93,%xmm1,%xmm9
8238
8239# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
8240# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
8241# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
8242pshufd $0x93,%xmm4,%xmm10
8243
8244# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
8245# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
8246# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
8247pshufd $0x93,%xmm6,%xmm11
8248
8249# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
8250# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
8251# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
8252pshufd $0x93,%xmm3,%xmm12
8253
8254# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
8255# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
8256# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
8257pshufd $0x93,%xmm7,%xmm13
8258
8259# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
8260# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
8261# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
8262pshufd $0x93,%xmm2,%xmm14
8263
8264# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
8265# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
8266# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
8267pshufd $0x93,%xmm5,%xmm15
8268
8269# qhasm: xmm0 ^= xmm8
8270# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
8271# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
8272pxor %xmm8,%xmm0
8273
8274# qhasm: xmm1 ^= xmm9
8275# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
8276# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
8277pxor %xmm9,%xmm1
8278
8279# qhasm: xmm4 ^= xmm10
8280# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
8281# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
8282pxor %xmm10,%xmm4
8283
8284# qhasm: xmm6 ^= xmm11
8285# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
8286# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
8287pxor %xmm11,%xmm6
8288
8289# qhasm: xmm3 ^= xmm12
8290# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
8291# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
8292pxor %xmm12,%xmm3
8293
8294# qhasm: xmm7 ^= xmm13
8295# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
8296# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
8297pxor %xmm13,%xmm7
8298
8299# qhasm: xmm2 ^= xmm14
8300# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
8301# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
8302pxor %xmm14,%xmm2
8303
8304# qhasm: xmm5 ^= xmm15
8305# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
8306# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
8307pxor %xmm15,%xmm5
8308
8309# qhasm: xmm8 ^= xmm5
8310# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
8311# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
8312pxor %xmm5,%xmm8
8313
8314# qhasm: xmm9 ^= xmm0
8315# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
8316# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
8317pxor %xmm0,%xmm9
8318
8319# qhasm: xmm10 ^= xmm1
8320# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
8321# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
8322pxor %xmm1,%xmm10
8323
8324# qhasm: xmm9 ^= xmm5
8325# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
8326# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
8327pxor %xmm5,%xmm9
8328
8329# qhasm: xmm11 ^= xmm4
8330# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
8331# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
8332pxor %xmm4,%xmm11
8333
8334# qhasm: xmm12 ^= xmm6
8335# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
8336# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
8337pxor %xmm6,%xmm12
8338
8339# qhasm: xmm13 ^= xmm3
8340# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
8341# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
8342pxor %xmm3,%xmm13
8343
8344# qhasm: xmm11 ^= xmm5
8345# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
8346# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
8347pxor %xmm5,%xmm11
8348
8349# qhasm: xmm14 ^= xmm7
8350# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
8351# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
8352pxor %xmm7,%xmm14
8353
8354# qhasm: xmm15 ^= xmm2
8355# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
8356# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
8357pxor %xmm2,%xmm15
8358
8359# qhasm: xmm12 ^= xmm5
8360# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
8361# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
8362pxor %xmm5,%xmm12
8363
8364# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
8365# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
8366# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
8367pshufd $0x4E,%xmm0,%xmm0
8368
8369# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
8370# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
8371# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
8372pshufd $0x4E,%xmm1,%xmm1
8373
8374# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
8375# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
8376# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
8377pshufd $0x4E,%xmm4,%xmm4
8378
8379# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
8380# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
8381# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
8382pshufd $0x4E,%xmm6,%xmm6
8383
8384# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
8385# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
8386# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
8387pshufd $0x4E,%xmm3,%xmm3
8388
8389# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
8390# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
8391# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
8392pshufd $0x4E,%xmm7,%xmm7
8393
8394# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
8395# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
8396# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
8397pshufd $0x4E,%xmm2,%xmm2
8398
8399# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
8400# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
8401# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
8402pshufd $0x4E,%xmm5,%xmm5
8403
8404# qhasm: xmm8 ^= xmm0
8405# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
8406# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
8407pxor %xmm0,%xmm8
8408
8409# qhasm: xmm9 ^= xmm1
8410# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
8411# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
8412pxor %xmm1,%xmm9
8413
8414# qhasm: xmm10 ^= xmm4
8415# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
8416# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
8417pxor %xmm4,%xmm10
8418
8419# qhasm: xmm11 ^= xmm6
8420# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
8421# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
8422pxor %xmm6,%xmm11
8423
8424# qhasm: xmm12 ^= xmm3
8425# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
8426# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
8427pxor %xmm3,%xmm12
8428
8429# qhasm: xmm13 ^= xmm7
8430# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
8431# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
8432pxor %xmm7,%xmm13
8433
8434# qhasm: xmm14 ^= xmm2
8435# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
8436# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
8437pxor %xmm2,%xmm14
8438
8439# qhasm: xmm15 ^= xmm5
8440# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
8441# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
8442pxor %xmm5,%xmm15
8443
8444# qhasm: xmm8 ^= *(int128 *)(c + 896)
8445# asm 1: pxor 896(<c=int64#5),<xmm8=int6464#9
8446# asm 2: pxor 896(<c=%r8),<xmm8=%xmm8
8447pxor 896(%r8),%xmm8
8448
8449# qhasm: shuffle bytes of xmm8 by SR
8450# asm 1: pshufb SR,<xmm8=int6464#9
8451# asm 2: pshufb SR,<xmm8=%xmm8
8452pshufb SR,%xmm8
8453
8454# qhasm: xmm9 ^= *(int128 *)(c + 912)
8455# asm 1: pxor 912(<c=int64#5),<xmm9=int6464#10
8456# asm 2: pxor 912(<c=%r8),<xmm9=%xmm9
8457pxor 912(%r8),%xmm9
8458
8459# qhasm: shuffle bytes of xmm9 by SR
8460# asm 1: pshufb SR,<xmm9=int6464#10
8461# asm 2: pshufb SR,<xmm9=%xmm9
8462pshufb SR,%xmm9
8463
8464# qhasm: xmm10 ^= *(int128 *)(c + 928)
8465# asm 1: pxor 928(<c=int64#5),<xmm10=int6464#11
8466# asm 2: pxor 928(<c=%r8),<xmm10=%xmm10
8467pxor 928(%r8),%xmm10
8468
8469# qhasm: shuffle bytes of xmm10 by SR
8470# asm 1: pshufb SR,<xmm10=int6464#11
8471# asm 2: pshufb SR,<xmm10=%xmm10
8472pshufb SR,%xmm10
8473
8474# qhasm: xmm11 ^= *(int128 *)(c + 944)
8475# asm 1: pxor 944(<c=int64#5),<xmm11=int6464#12
8476# asm 2: pxor 944(<c=%r8),<xmm11=%xmm11
8477pxor 944(%r8),%xmm11
8478
8479# qhasm: shuffle bytes of xmm11 by SR
8480# asm 1: pshufb SR,<xmm11=int6464#12
8481# asm 2: pshufb SR,<xmm11=%xmm11
8482pshufb SR,%xmm11
8483
8484# qhasm: xmm12 ^= *(int128 *)(c + 960)
8485# asm 1: pxor 960(<c=int64#5),<xmm12=int6464#13
8486# asm 2: pxor 960(<c=%r8),<xmm12=%xmm12
8487pxor 960(%r8),%xmm12
8488
8489# qhasm: shuffle bytes of xmm12 by SR
8490# asm 1: pshufb SR,<xmm12=int6464#13
8491# asm 2: pshufb SR,<xmm12=%xmm12
8492pshufb SR,%xmm12
8493
8494# qhasm: xmm13 ^= *(int128 *)(c + 976)
8495# asm 1: pxor 976(<c=int64#5),<xmm13=int6464#14
8496# asm 2: pxor 976(<c=%r8),<xmm13=%xmm13
8497pxor 976(%r8),%xmm13
8498
8499# qhasm: shuffle bytes of xmm13 by SR
8500# asm 1: pshufb SR,<xmm13=int6464#14
8501# asm 2: pshufb SR,<xmm13=%xmm13
8502pshufb SR,%xmm13
8503
8504# qhasm: xmm14 ^= *(int128 *)(c + 992)
8505# asm 1: pxor 992(<c=int64#5),<xmm14=int6464#15
8506# asm 2: pxor 992(<c=%r8),<xmm14=%xmm14
8507pxor 992(%r8),%xmm14
8508
8509# qhasm: shuffle bytes of xmm14 by SR
8510# asm 1: pshufb SR,<xmm14=int6464#15
8511# asm 2: pshufb SR,<xmm14=%xmm14
8512pshufb SR,%xmm14
8513
8514# qhasm: xmm15 ^= *(int128 *)(c + 1008)
8515# asm 1: pxor 1008(<c=int64#5),<xmm15=int6464#16
8516# asm 2: pxor 1008(<c=%r8),<xmm15=%xmm15
8517pxor 1008(%r8),%xmm15
8518
8519# qhasm: shuffle bytes of xmm15 by SR
8520# asm 1: pshufb SR,<xmm15=int6464#16
8521# asm 2: pshufb SR,<xmm15=%xmm15
8522pshufb SR,%xmm15
8523
8524# qhasm: xmm13 ^= xmm14
8525# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
8526# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
8527pxor %xmm14,%xmm13
8528
8529# qhasm: xmm10 ^= xmm9
8530# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
8531# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
8532pxor %xmm9,%xmm10
8533
8534# qhasm: xmm13 ^= xmm8
8535# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
8536# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
8537pxor %xmm8,%xmm13
8538
8539# qhasm: xmm14 ^= xmm10
8540# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
8541# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
8542pxor %xmm10,%xmm14
8543
8544# qhasm: xmm11 ^= xmm8
8545# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
8546# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
8547pxor %xmm8,%xmm11
8548
8549# qhasm: xmm14 ^= xmm11
8550# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
8551# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
8552pxor %xmm11,%xmm14
8553
8554# qhasm: xmm11 ^= xmm15
8555# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
8556# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
8557pxor %xmm15,%xmm11
8558
8559# qhasm: xmm11 ^= xmm12
8560# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
8561# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
8562pxor %xmm12,%xmm11
8563
8564# qhasm: xmm15 ^= xmm13
8565# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
8566# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
8567pxor %xmm13,%xmm15
8568
8569# qhasm: xmm11 ^= xmm9
8570# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
8571# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
8572pxor %xmm9,%xmm11
8573
8574# qhasm: xmm12 ^= xmm13
8575# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
8576# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
8577pxor %xmm13,%xmm12
8578
8579# qhasm: xmm10 ^= xmm15
8580# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
8581# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
8582pxor %xmm15,%xmm10
8583
8584# qhasm: xmm9 ^= xmm13
8585# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
8586# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
8587pxor %xmm13,%xmm9
8588
8589# qhasm: xmm3 = xmm15
8590# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
8591# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
8592movdqa %xmm15,%xmm0
8593
8594# qhasm: xmm2 = xmm9
8595# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
8596# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
8597movdqa %xmm9,%xmm1
8598
8599# qhasm: xmm1 = xmm13
8600# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
8601# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
8602movdqa %xmm13,%xmm2
8603
8604# qhasm: xmm5 = xmm10
8605# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
8606# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
8607movdqa %xmm10,%xmm3
8608
8609# qhasm: xmm4 = xmm14
8610# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
8611# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
8612movdqa %xmm14,%xmm4
8613
8614# qhasm: xmm3 ^= xmm12
8615# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
8616# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
8617pxor %xmm12,%xmm0
8618
8619# qhasm: xmm2 ^= xmm10
8620# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
8621# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
8622pxor %xmm10,%xmm1
8623
8624# qhasm: xmm1 ^= xmm11
8625# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
8626# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
8627pxor %xmm11,%xmm2
8628
8629# qhasm: xmm5 ^= xmm12
8630# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
8631# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
8632pxor %xmm12,%xmm3
8633
8634# qhasm: xmm4 ^= xmm8
8635# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
8636# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
8637pxor %xmm8,%xmm4
8638
8639# qhasm: xmm6 = xmm3
8640# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
8641# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
8642movdqa %xmm0,%xmm5
8643
8644# qhasm: xmm0 = xmm2
8645# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
8646# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
8647movdqa %xmm1,%xmm6
8648
8649# qhasm: xmm7 = xmm3
8650# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
8651# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
8652movdqa %xmm0,%xmm7
8653
8654# qhasm: xmm2 |= xmm1
8655# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
8656# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
8657por %xmm2,%xmm1
8658
8659# qhasm: xmm3 |= xmm4
8660# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
8661# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
8662por %xmm4,%xmm0
8663
8664# qhasm: xmm7 ^= xmm0
8665# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
8666# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
8667pxor %xmm6,%xmm7
8668
8669# qhasm: xmm6 &= xmm4
8670# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
8671# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
8672pand %xmm4,%xmm5
8673
8674# qhasm: xmm0 &= xmm1
8675# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
8676# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
8677pand %xmm2,%xmm6
8678
8679# qhasm: xmm4 ^= xmm1
8680# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
8681# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
8682pxor %xmm2,%xmm4
8683
8684# qhasm: xmm7 &= xmm4
8685# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
8686# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
8687pand %xmm4,%xmm7
8688
8689# qhasm: xmm4 = xmm11
8690# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
8691# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
8692movdqa %xmm11,%xmm2
8693
8694# qhasm: xmm4 ^= xmm8
8695# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
8696# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
8697pxor %xmm8,%xmm2
8698
8699# qhasm: xmm5 &= xmm4
8700# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
8701# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
8702pand %xmm2,%xmm3
8703
8704# qhasm: xmm3 ^= xmm5
8705# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
8706# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
8707pxor %xmm3,%xmm0
8708
8709# qhasm: xmm2 ^= xmm5
8710# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
8711# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
8712pxor %xmm3,%xmm1
8713
8714# qhasm: xmm5 = xmm15
8715# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
8716# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
8717movdqa %xmm15,%xmm2
8718
8719# qhasm: xmm5 ^= xmm9
8720# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
8721# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
8722pxor %xmm9,%xmm2
8723
8724# qhasm: xmm4 = xmm13
8725# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
8726# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
8727movdqa %xmm13,%xmm3
8728
8729# qhasm: xmm1 = xmm5
8730# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
8731# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
8732movdqa %xmm2,%xmm4
8733
8734# qhasm: xmm4 ^= xmm14
8735# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
8736# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
8737pxor %xmm14,%xmm3
8738
8739# qhasm: xmm1 |= xmm4
8740# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
8741# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
8742por %xmm3,%xmm4
8743
8744# qhasm: xmm5 &= xmm4
8745# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
8746# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
8747pand %xmm3,%xmm2
8748
8749# qhasm: xmm0 ^= xmm5
8750# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
8751# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
8752pxor %xmm2,%xmm6
8753
8754# qhasm: xmm3 ^= xmm7
8755# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
8756# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
8757pxor %xmm7,%xmm0
8758
8759# qhasm: xmm2 ^= xmm6
8760# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
8761# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
8762pxor %xmm5,%xmm1
8763
8764# qhasm: xmm1 ^= xmm7
8765# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
8766# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
8767pxor %xmm7,%xmm4
8768
8769# qhasm: xmm0 ^= xmm6
8770# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
8771# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
8772pxor %xmm5,%xmm6
8773
8774# qhasm: xmm1 ^= xmm6
8775# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
8776# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
8777pxor %xmm5,%xmm4
8778
8779# qhasm: xmm4 = xmm10
8780# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
8781# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
8782movdqa %xmm10,%xmm2
8783
8784# qhasm: xmm5 = xmm12
8785# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
8786# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
8787movdqa %xmm12,%xmm3
8788
8789# qhasm: xmm6 = xmm9
8790# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
8791# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
8792movdqa %xmm9,%xmm5
8793
8794# qhasm: xmm7 = xmm15
8795# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
8796# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
8797movdqa %xmm15,%xmm7
8798
8799# qhasm: xmm4 &= xmm11
8800# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
8801# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
8802pand %xmm11,%xmm2
8803
8804# qhasm: xmm5 &= xmm8
8805# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
8806# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
8807pand %xmm8,%xmm3
8808
8809# qhasm: xmm6 &= xmm13
8810# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
8811# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
8812pand %xmm13,%xmm5
8813
8814# qhasm: xmm7 |= xmm14
8815# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
8816# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
8817por %xmm14,%xmm7
8818
8819# qhasm: xmm3 ^= xmm4
8820# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
8821# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
8822pxor %xmm2,%xmm0
8823
8824# qhasm: xmm2 ^= xmm5
8825# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
8826# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
8827pxor %xmm3,%xmm1
8828
8829# qhasm: xmm1 ^= xmm6
8830# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
8831# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
8832pxor %xmm5,%xmm4
8833
8834# qhasm: xmm0 ^= xmm7
8835# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
8836# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
8837pxor %xmm7,%xmm6
8838
8839# qhasm: xmm4 = xmm3
8840# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
8841# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
8842movdqa %xmm0,%xmm2
8843
8844# qhasm: xmm4 ^= xmm2
8845# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
8846# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
8847pxor %xmm1,%xmm2
8848
8849# qhasm: xmm3 &= xmm1
8850# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
8851# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
8852pand %xmm4,%xmm0
8853
8854# qhasm: xmm6 = xmm0
8855# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
8856# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
8857movdqa %xmm6,%xmm3
8858
8859# qhasm: xmm6 ^= xmm3
8860# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
8861# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
8862pxor %xmm0,%xmm3
8863
8864# qhasm: xmm7 = xmm4
8865# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
8866# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
8867movdqa %xmm2,%xmm5
8868
8869# qhasm: xmm7 &= xmm6
8870# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
8871# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
8872pand %xmm3,%xmm5
8873
8874# qhasm: xmm7 ^= xmm2
8875# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
8876# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
8877pxor %xmm1,%xmm5
8878
8879# qhasm: xmm5 = xmm1
8880# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
8881# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
8882movdqa %xmm4,%xmm7
8883
8884# qhasm: xmm5 ^= xmm0
8885# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
8886# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
8887pxor %xmm6,%xmm7
8888
8889# qhasm: xmm3 ^= xmm2
8890# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
8891# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
8892pxor %xmm1,%xmm0
8893
8894# qhasm: xmm5 &= xmm3
8895# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
8896# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
8897pand %xmm0,%xmm7
8898
8899# qhasm: xmm5 ^= xmm0
8900# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
8901# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
8902pxor %xmm6,%xmm7
8903
8904# qhasm: xmm1 ^= xmm5
8905# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
8906# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
8907pxor %xmm7,%xmm4
8908
8909# qhasm: xmm2 = xmm6
8910# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
8911# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
8912movdqa %xmm3,%xmm0
8913
8914# qhasm: xmm2 ^= xmm5
8915# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
8916# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
8917pxor %xmm7,%xmm0
8918
8919# qhasm: xmm2 &= xmm0
8920# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
8921# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
8922pand %xmm6,%xmm0
8923
8924# qhasm: xmm1 ^= xmm2
8925# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
8926# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
8927pxor %xmm0,%xmm4
8928
8929# qhasm: xmm6 ^= xmm2
8930# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
8931# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
8932pxor %xmm0,%xmm3
8933
8934# qhasm: xmm6 &= xmm7
8935# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
8936# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
8937pand %xmm5,%xmm3
8938
8939# qhasm: xmm6 ^= xmm4
8940# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
8941# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
8942pxor %xmm2,%xmm3
8943
8944# qhasm: xmm4 = xmm14
8945# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
8946# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
8947movdqa %xmm14,%xmm0
8948
8949# qhasm: xmm0 = xmm13
8950# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
8951# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
8952movdqa %xmm13,%xmm1
8953
8954# qhasm: xmm2 = xmm7
8955# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
8956# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
8957movdqa %xmm5,%xmm2
8958
8959# qhasm: xmm2 ^= xmm6
8960# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
8961# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
8962pxor %xmm3,%xmm2
8963
8964# qhasm: xmm2 &= xmm14
8965# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
8966# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
8967pand %xmm14,%xmm2
8968
8969# qhasm: xmm14 ^= xmm13
8970# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
8971# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
8972pxor %xmm13,%xmm14
8973
8974# qhasm: xmm14 &= xmm6
8975# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
8976# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
8977pand %xmm3,%xmm14
8978
8979# qhasm: xmm13 &= xmm7
8980# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
8981# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
8982pand %xmm5,%xmm13
8983
8984# qhasm: xmm14 ^= xmm13
8985# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
8986# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
8987pxor %xmm13,%xmm14
8988
8989# qhasm: xmm13 ^= xmm2
8990# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
8991# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
8992pxor %xmm2,%xmm13
8993
8994# qhasm: xmm4 ^= xmm8
8995# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
8996# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
8997pxor %xmm8,%xmm0
8998
8999# qhasm: xmm0 ^= xmm11
9000# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
9001# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
9002pxor %xmm11,%xmm1
9003
9004# qhasm: xmm7 ^= xmm5
9005# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
9006# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
9007pxor %xmm7,%xmm5
9008
9009# qhasm: xmm6 ^= xmm1
9010# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
9011# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
9012pxor %xmm4,%xmm3
9013
9014# qhasm: xmm3 = xmm7
9015# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9016# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9017movdqa %xmm5,%xmm2
9018
9019# qhasm: xmm3 ^= xmm6
9020# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
9021# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
9022pxor %xmm3,%xmm2
9023
9024# qhasm: xmm3 &= xmm4
9025# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
9026# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
9027pand %xmm0,%xmm2
9028
9029# qhasm: xmm4 ^= xmm0
9030# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
9031# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
9032pxor %xmm1,%xmm0
9033
9034# qhasm: xmm4 &= xmm6
9035# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
9036# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
9037pand %xmm3,%xmm0
9038
9039# qhasm: xmm0 &= xmm7
9040# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
9041# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
9042pand %xmm5,%xmm1
9043
9044# qhasm: xmm0 ^= xmm4
9045# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
9046# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
9047pxor %xmm0,%xmm1
9048
9049# qhasm: xmm4 ^= xmm3
9050# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
9051# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
9052pxor %xmm2,%xmm0
9053
9054# qhasm: xmm2 = xmm5
9055# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
9056# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
9057movdqa %xmm7,%xmm2
9058
9059# qhasm: xmm2 ^= xmm1
9060# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
9061# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
9062pxor %xmm4,%xmm2
9063
9064# qhasm: xmm2 &= xmm8
9065# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
9066# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
9067pand %xmm8,%xmm2
9068
9069# qhasm: xmm8 ^= xmm11
9070# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
9071# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
9072pxor %xmm11,%xmm8
9073
9074# qhasm: xmm8 &= xmm1
9075# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
9076# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
9077pand %xmm4,%xmm8
9078
9079# qhasm: xmm11 &= xmm5
9080# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
9081# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
9082pand %xmm7,%xmm11
9083
9084# qhasm: xmm8 ^= xmm11
9085# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
9086# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
9087pxor %xmm11,%xmm8
9088
9089# qhasm: xmm11 ^= xmm2
9090# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
9091# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
9092pxor %xmm2,%xmm11
9093
9094# qhasm: xmm14 ^= xmm4
9095# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
9096# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
9097pxor %xmm0,%xmm14
9098
9099# qhasm: xmm8 ^= xmm4
9100# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
9101# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
9102pxor %xmm0,%xmm8
9103
9104# qhasm: xmm13 ^= xmm0
9105# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
9106# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
9107pxor %xmm1,%xmm13
9108
9109# qhasm: xmm11 ^= xmm0
9110# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
9111# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
9112pxor %xmm1,%xmm11
9113
9114# qhasm: xmm4 = xmm15
9115# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
9116# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
9117movdqa %xmm15,%xmm0
9118
9119# qhasm: xmm0 = xmm9
9120# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
9121# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
9122movdqa %xmm9,%xmm1
9123
9124# qhasm: xmm4 ^= xmm12
9125# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
9126# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
9127pxor %xmm12,%xmm0
9128
9129# qhasm: xmm0 ^= xmm10
9130# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
9131# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
9132pxor %xmm10,%xmm1
9133
9134# qhasm: xmm3 = xmm7
9135# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9136# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9137movdqa %xmm5,%xmm2
9138
9139# qhasm: xmm3 ^= xmm6
9140# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
9141# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
9142pxor %xmm3,%xmm2
9143
9144# qhasm: xmm3 &= xmm4
9145# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
9146# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
9147pand %xmm0,%xmm2
9148
9149# qhasm: xmm4 ^= xmm0
9150# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
9151# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
9152pxor %xmm1,%xmm0
9153
9154# qhasm: xmm4 &= xmm6
9155# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
9156# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
9157pand %xmm3,%xmm0
9158
9159# qhasm: xmm0 &= xmm7
9160# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
9161# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
9162pand %xmm5,%xmm1
9163
9164# qhasm: xmm0 ^= xmm4
9165# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
9166# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
9167pxor %xmm0,%xmm1
9168
9169# qhasm: xmm4 ^= xmm3
9170# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
9171# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
9172pxor %xmm2,%xmm0
9173
9174# qhasm: xmm2 = xmm5
9175# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
9176# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
9177movdqa %xmm7,%xmm2
9178
9179# qhasm: xmm2 ^= xmm1
9180# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
9181# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
9182pxor %xmm4,%xmm2
9183
9184# qhasm: xmm2 &= xmm12
9185# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
9186# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
9187pand %xmm12,%xmm2
9188
9189# qhasm: xmm12 ^= xmm10
9190# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
9191# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
9192pxor %xmm10,%xmm12
9193
9194# qhasm: xmm12 &= xmm1
9195# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
9196# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
9197pand %xmm4,%xmm12
9198
9199# qhasm: xmm10 &= xmm5
9200# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
9201# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
9202pand %xmm7,%xmm10
9203
9204# qhasm: xmm12 ^= xmm10
9205# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
9206# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
9207pxor %xmm10,%xmm12
9208
9209# qhasm: xmm10 ^= xmm2
9210# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
9211# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
9212pxor %xmm2,%xmm10
9213
9214# qhasm: xmm7 ^= xmm5
9215# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
9216# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
9217pxor %xmm7,%xmm5
9218
9219# qhasm: xmm6 ^= xmm1
9220# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
9221# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
9222pxor %xmm4,%xmm3
9223
9224# qhasm: xmm3 = xmm7
9225# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9226# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9227movdqa %xmm5,%xmm2
9228
9229# qhasm: xmm3 ^= xmm6
9230# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
9231# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
9232pxor %xmm3,%xmm2
9233
9234# qhasm: xmm3 &= xmm15
9235# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
9236# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
9237pand %xmm15,%xmm2
9238
9239# qhasm: xmm15 ^= xmm9
9240# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
9241# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
9242pxor %xmm9,%xmm15
9243
9244# qhasm: xmm15 &= xmm6
9245# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
9246# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
9247pand %xmm3,%xmm15
9248
9249# qhasm: xmm9 &= xmm7
9250# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
9251# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
9252pand %xmm5,%xmm9
9253
9254# qhasm: xmm15 ^= xmm9
9255# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
9256# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
9257pxor %xmm9,%xmm15
9258
9259# qhasm: xmm9 ^= xmm3
9260# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
9261# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
9262pxor %xmm2,%xmm9
9263
9264# qhasm: xmm15 ^= xmm4
9265# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
9266# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
9267pxor %xmm0,%xmm15
9268
9269# qhasm: xmm12 ^= xmm4
9270# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
9271# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
9272pxor %xmm0,%xmm12
9273
9274# qhasm: xmm9 ^= xmm0
9275# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
9276# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
9277pxor %xmm1,%xmm9
9278
9279# qhasm: xmm10 ^= xmm0
9280# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
9281# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
9282pxor %xmm1,%xmm10
9283
9284# qhasm: xmm15 ^= xmm8
9285# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
9286# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
9287pxor %xmm8,%xmm15
9288
9289# qhasm: xmm9 ^= xmm14
9290# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
9291# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
9292pxor %xmm14,%xmm9
9293
9294# qhasm: xmm12 ^= xmm15
9295# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
9296# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
9297pxor %xmm15,%xmm12
9298
9299# qhasm: xmm14 ^= xmm8
9300# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
9301# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
9302pxor %xmm8,%xmm14
9303
9304# qhasm: xmm8 ^= xmm9
9305# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
9306# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
9307pxor %xmm9,%xmm8
9308
9309# qhasm: xmm9 ^= xmm13
9310# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
9311# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
9312pxor %xmm13,%xmm9
9313
9314# qhasm: xmm13 ^= xmm10
9315# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
9316# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
9317pxor %xmm10,%xmm13
9318
9319# qhasm: xmm12 ^= xmm13
9320# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
9321# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
9322pxor %xmm13,%xmm12
9323
9324# qhasm: xmm10 ^= xmm11
9325# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
9326# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
9327pxor %xmm11,%xmm10
9328
9329# qhasm: xmm11 ^= xmm13
9330# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
9331# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
9332pxor %xmm13,%xmm11
9333
9334# qhasm: xmm14 ^= xmm11
9335# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
9336# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
9337pxor %xmm11,%xmm14
9338
9339# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
9340# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
9341# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
9342pshufd $0x93,%xmm8,%xmm0
9343
9344# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
9345# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
9346# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
9347pshufd $0x93,%xmm9,%xmm1
9348
9349# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
9350# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
9351# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
9352pshufd $0x93,%xmm12,%xmm2
9353
9354# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
9355# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
9356# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
9357pshufd $0x93,%xmm14,%xmm3
9358
9359# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
9360# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
9361# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
9362pshufd $0x93,%xmm11,%xmm4
9363
9364# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
9365# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
9366# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
9367pshufd $0x93,%xmm15,%xmm5
9368
9369# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
9370# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
9371# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
9372pshufd $0x93,%xmm10,%xmm6
9373
9374# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
9375# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
9376# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
9377pshufd $0x93,%xmm13,%xmm7
9378
9379# qhasm: xmm8 ^= xmm0
9380# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
9381# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
9382pxor %xmm0,%xmm8
9383
9384# qhasm: xmm9 ^= xmm1
9385# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
9386# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
9387pxor %xmm1,%xmm9
9388
9389# qhasm: xmm12 ^= xmm2
9390# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
9391# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
9392pxor %xmm2,%xmm12
9393
9394# qhasm: xmm14 ^= xmm3
9395# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
9396# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
9397pxor %xmm3,%xmm14
9398
9399# qhasm: xmm11 ^= xmm4
9400# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
9401# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
9402pxor %xmm4,%xmm11
9403
9404# qhasm: xmm15 ^= xmm5
9405# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
9406# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
9407pxor %xmm5,%xmm15
9408
9409# qhasm: xmm10 ^= xmm6
9410# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
9411# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
9412pxor %xmm6,%xmm10
9413
9414# qhasm: xmm13 ^= xmm7
9415# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
9416# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
9417pxor %xmm7,%xmm13
9418
9419# qhasm: xmm0 ^= xmm13
9420# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
9421# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
9422pxor %xmm13,%xmm0
9423
9424# qhasm: xmm1 ^= xmm8
9425# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
9426# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
9427pxor %xmm8,%xmm1
9428
9429# qhasm: xmm2 ^= xmm9
9430# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
9431# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
9432pxor %xmm9,%xmm2
9433
9434# qhasm: xmm1 ^= xmm13
9435# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
9436# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
9437pxor %xmm13,%xmm1
9438
9439# qhasm: xmm3 ^= xmm12
9440# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
9441# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
9442pxor %xmm12,%xmm3
9443
9444# qhasm: xmm4 ^= xmm14
9445# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
9446# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
9447pxor %xmm14,%xmm4
9448
9449# qhasm: xmm5 ^= xmm11
9450# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
9451# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
9452pxor %xmm11,%xmm5
9453
9454# qhasm: xmm3 ^= xmm13
9455# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
9456# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
9457pxor %xmm13,%xmm3
9458
9459# qhasm: xmm6 ^= xmm15
9460# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
9461# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
9462pxor %xmm15,%xmm6
9463
9464# qhasm: xmm7 ^= xmm10
9465# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
9466# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
9467pxor %xmm10,%xmm7
9468
9469# qhasm: xmm4 ^= xmm13
9470# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
9471# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
9472pxor %xmm13,%xmm4
9473
9474# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
9475# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
9476# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
9477pshufd $0x4E,%xmm8,%xmm8
9478
9479# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
9480# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
9481# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
9482pshufd $0x4E,%xmm9,%xmm9
9483
9484# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
9485# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
9486# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
9487pshufd $0x4E,%xmm12,%xmm12
9488
9489# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
9490# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
9491# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
9492pshufd $0x4E,%xmm14,%xmm14
9493
9494# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
9495# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
9496# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
9497pshufd $0x4E,%xmm11,%xmm11
9498
9499# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
9500# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
9501# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
9502pshufd $0x4E,%xmm15,%xmm15
9503
9504# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
9505# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
9506# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
9507pshufd $0x4E,%xmm10,%xmm10
9508
9509# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
9510# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
9511# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
9512pshufd $0x4E,%xmm13,%xmm13
9513
9514# qhasm: xmm0 ^= xmm8
9515# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
9516# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
9517pxor %xmm8,%xmm0
9518
9519# qhasm: xmm1 ^= xmm9
9520# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
9521# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
9522pxor %xmm9,%xmm1
9523
9524# qhasm: xmm2 ^= xmm12
9525# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
9526# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
9527pxor %xmm12,%xmm2
9528
9529# qhasm: xmm3 ^= xmm14
9530# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
9531# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
9532pxor %xmm14,%xmm3
9533
9534# qhasm: xmm4 ^= xmm11
9535# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
9536# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
9537pxor %xmm11,%xmm4
9538
9539# qhasm: xmm5 ^= xmm15
9540# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
9541# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
9542pxor %xmm15,%xmm5
9543
9544# qhasm: xmm6 ^= xmm10
9545# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
9546# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
9547pxor %xmm10,%xmm6
9548
9549# qhasm: xmm7 ^= xmm13
9550# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
9551# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
9552pxor %xmm13,%xmm7
9553
9554# qhasm: xmm0 ^= *(int128 *)(c + 1024)
9555# asm 1: pxor 1024(<c=int64#5),<xmm0=int6464#1
9556# asm 2: pxor 1024(<c=%r8),<xmm0=%xmm0
9557pxor 1024(%r8),%xmm0
9558
9559# qhasm: shuffle bytes of xmm0 by SR
9560# asm 1: pshufb SR,<xmm0=int6464#1
9561# asm 2: pshufb SR,<xmm0=%xmm0
9562pshufb SR,%xmm0
9563
9564# qhasm: xmm1 ^= *(int128 *)(c + 1040)
9565# asm 1: pxor 1040(<c=int64#5),<xmm1=int6464#2
9566# asm 2: pxor 1040(<c=%r8),<xmm1=%xmm1
9567pxor 1040(%r8),%xmm1
9568
9569# qhasm: shuffle bytes of xmm1 by SR
9570# asm 1: pshufb SR,<xmm1=int6464#2
9571# asm 2: pshufb SR,<xmm1=%xmm1
9572pshufb SR,%xmm1
9573
9574# qhasm: xmm2 ^= *(int128 *)(c + 1056)
9575# asm 1: pxor 1056(<c=int64#5),<xmm2=int6464#3
9576# asm 2: pxor 1056(<c=%r8),<xmm2=%xmm2
9577pxor 1056(%r8),%xmm2
9578
9579# qhasm: shuffle bytes of xmm2 by SR
9580# asm 1: pshufb SR,<xmm2=int6464#3
9581# asm 2: pshufb SR,<xmm2=%xmm2
9582pshufb SR,%xmm2
9583
9584# qhasm: xmm3 ^= *(int128 *)(c + 1072)
9585# asm 1: pxor 1072(<c=int64#5),<xmm3=int6464#4
9586# asm 2: pxor 1072(<c=%r8),<xmm3=%xmm3
9587pxor 1072(%r8),%xmm3
9588
9589# qhasm: shuffle bytes of xmm3 by SR
9590# asm 1: pshufb SR,<xmm3=int6464#4
9591# asm 2: pshufb SR,<xmm3=%xmm3
9592pshufb SR,%xmm3
9593
9594# qhasm: xmm4 ^= *(int128 *)(c + 1088)
9595# asm 1: pxor 1088(<c=int64#5),<xmm4=int6464#5
9596# asm 2: pxor 1088(<c=%r8),<xmm4=%xmm4
9597pxor 1088(%r8),%xmm4
9598
9599# qhasm: shuffle bytes of xmm4 by SR
9600# asm 1: pshufb SR,<xmm4=int6464#5
9601# asm 2: pshufb SR,<xmm4=%xmm4
9602pshufb SR,%xmm4
9603
9604# qhasm: xmm5 ^= *(int128 *)(c + 1104)
9605# asm 1: pxor 1104(<c=int64#5),<xmm5=int6464#6
9606# asm 2: pxor 1104(<c=%r8),<xmm5=%xmm5
9607pxor 1104(%r8),%xmm5
9608
9609# qhasm: shuffle bytes of xmm5 by SR
9610# asm 1: pshufb SR,<xmm5=int6464#6
9611# asm 2: pshufb SR,<xmm5=%xmm5
9612pshufb SR,%xmm5
9613
9614# qhasm: xmm6 ^= *(int128 *)(c + 1120)
9615# asm 1: pxor 1120(<c=int64#5),<xmm6=int6464#7
9616# asm 2: pxor 1120(<c=%r8),<xmm6=%xmm6
9617pxor 1120(%r8),%xmm6
9618
9619# qhasm: shuffle bytes of xmm6 by SR
9620# asm 1: pshufb SR,<xmm6=int6464#7
9621# asm 2: pshufb SR,<xmm6=%xmm6
9622pshufb SR,%xmm6
9623
9624# qhasm: xmm7 ^= *(int128 *)(c + 1136)
9625# asm 1: pxor 1136(<c=int64#5),<xmm7=int6464#8
9626# asm 2: pxor 1136(<c=%r8),<xmm7=%xmm7
9627pxor 1136(%r8),%xmm7
9628
9629# qhasm: shuffle bytes of xmm7 by SR
9630# asm 1: pshufb SR,<xmm7=int6464#8
9631# asm 2: pshufb SR,<xmm7=%xmm7
9632pshufb SR,%xmm7
9633
9634# qhasm: xmm5 ^= xmm6
9635# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
9636# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
9637pxor %xmm6,%xmm5
9638
9639# qhasm: xmm2 ^= xmm1
9640# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
9641# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
9642pxor %xmm1,%xmm2
9643
9644# qhasm: xmm5 ^= xmm0
9645# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
9646# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
9647pxor %xmm0,%xmm5
9648
9649# qhasm: xmm6 ^= xmm2
9650# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
9651# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
9652pxor %xmm2,%xmm6
9653
9654# qhasm: xmm3 ^= xmm0
9655# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
9656# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
9657pxor %xmm0,%xmm3
9658
9659# qhasm: xmm6 ^= xmm3
9660# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
9661# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
9662pxor %xmm3,%xmm6
9663
9664# qhasm: xmm3 ^= xmm7
9665# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
9666# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
9667pxor %xmm7,%xmm3
9668
9669# qhasm: xmm3 ^= xmm4
9670# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
9671# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
9672pxor %xmm4,%xmm3
9673
9674# qhasm: xmm7 ^= xmm5
9675# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
9676# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
9677pxor %xmm5,%xmm7
9678
9679# qhasm: xmm3 ^= xmm1
9680# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
9681# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
9682pxor %xmm1,%xmm3
9683
9684# qhasm: xmm4 ^= xmm5
9685# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
9686# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
9687pxor %xmm5,%xmm4
9688
9689# qhasm: xmm2 ^= xmm7
9690# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
9691# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
9692pxor %xmm7,%xmm2
9693
9694# qhasm: xmm1 ^= xmm5
9695# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
9696# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
9697pxor %xmm5,%xmm1
9698
9699# qhasm: xmm11 = xmm7
9700# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
9701# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
9702movdqa %xmm7,%xmm8
9703
9704# qhasm: xmm10 = xmm1
9705# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
9706# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
9707movdqa %xmm1,%xmm9
9708
9709# qhasm: xmm9 = xmm5
9710# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
9711# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
9712movdqa %xmm5,%xmm10
9713
9714# qhasm: xmm13 = xmm2
9715# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
9716# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
9717movdqa %xmm2,%xmm11
9718
9719# qhasm: xmm12 = xmm6
9720# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
9721# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
9722movdqa %xmm6,%xmm12
9723
9724# qhasm: xmm11 ^= xmm4
9725# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
9726# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
9727pxor %xmm4,%xmm8
9728
9729# qhasm: xmm10 ^= xmm2
9730# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
9731# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
9732pxor %xmm2,%xmm9
9733
9734# qhasm: xmm9 ^= xmm3
9735# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
9736# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
9737pxor %xmm3,%xmm10
9738
9739# qhasm: xmm13 ^= xmm4
9740# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
9741# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
9742pxor %xmm4,%xmm11
9743
9744# qhasm: xmm12 ^= xmm0
9745# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
9746# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
9747pxor %xmm0,%xmm12
9748
9749# qhasm: xmm14 = xmm11
9750# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
9751# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
9752movdqa %xmm8,%xmm13
9753
9754# qhasm: xmm8 = xmm10
9755# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
9756# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
9757movdqa %xmm9,%xmm14
9758
9759# qhasm: xmm15 = xmm11
9760# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
9761# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
9762movdqa %xmm8,%xmm15
9763
9764# qhasm: xmm10 |= xmm9
9765# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
9766# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
9767por %xmm10,%xmm9
9768
9769# qhasm: xmm11 |= xmm12
9770# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
9771# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
9772por %xmm12,%xmm8
9773
9774# qhasm: xmm15 ^= xmm8
9775# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
9776# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
9777pxor %xmm14,%xmm15
9778
9779# qhasm: xmm14 &= xmm12
9780# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
9781# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
9782pand %xmm12,%xmm13
9783
9784# qhasm: xmm8 &= xmm9
9785# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
9786# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
9787pand %xmm10,%xmm14
9788
9789# qhasm: xmm12 ^= xmm9
9790# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
9791# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
9792pxor %xmm10,%xmm12
9793
9794# qhasm: xmm15 &= xmm12
9795# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
9796# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
9797pand %xmm12,%xmm15
9798
9799# qhasm: xmm12 = xmm3
9800# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
9801# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
9802movdqa %xmm3,%xmm10
9803
9804# qhasm: xmm12 ^= xmm0
9805# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
9806# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
9807pxor %xmm0,%xmm10
9808
9809# qhasm: xmm13 &= xmm12
9810# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
9811# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
9812pand %xmm10,%xmm11
9813
9814# qhasm: xmm11 ^= xmm13
9815# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
9816# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
9817pxor %xmm11,%xmm8
9818
9819# qhasm: xmm10 ^= xmm13
9820# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
9821# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
9822pxor %xmm11,%xmm9
9823
9824# qhasm: xmm13 = xmm7
9825# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
9826# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
9827movdqa %xmm7,%xmm10
9828
9829# qhasm: xmm13 ^= xmm1
9830# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
9831# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
9832pxor %xmm1,%xmm10
9833
9834# qhasm: xmm12 = xmm5
9835# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
9836# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
9837movdqa %xmm5,%xmm11
9838
9839# qhasm: xmm9 = xmm13
9840# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
9841# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
9842movdqa %xmm10,%xmm12
9843
9844# qhasm: xmm12 ^= xmm6
9845# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
9846# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
9847pxor %xmm6,%xmm11
9848
9849# qhasm: xmm9 |= xmm12
9850# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
9851# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
9852por %xmm11,%xmm12
9853
9854# qhasm: xmm13 &= xmm12
9855# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
9856# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
9857pand %xmm11,%xmm10
9858
9859# qhasm: xmm8 ^= xmm13
9860# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
9861# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
9862pxor %xmm10,%xmm14
9863
9864# qhasm: xmm11 ^= xmm15
9865# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
9866# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
9867pxor %xmm15,%xmm8
9868
9869# qhasm: xmm10 ^= xmm14
9870# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
9871# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
9872pxor %xmm13,%xmm9
9873
9874# qhasm: xmm9 ^= xmm15
9875# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
9876# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
9877pxor %xmm15,%xmm12
9878
9879# qhasm: xmm8 ^= xmm14
9880# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
9881# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
9882pxor %xmm13,%xmm14
9883
9884# qhasm: xmm9 ^= xmm14
9885# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
9886# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
9887pxor %xmm13,%xmm12
9888
9889# qhasm: xmm12 = xmm2
9890# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
9891# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
9892movdqa %xmm2,%xmm10
9893
9894# qhasm: xmm13 = xmm4
9895# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
9896# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
9897movdqa %xmm4,%xmm11
9898
9899# qhasm: xmm14 = xmm1
9900# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
9901# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
9902movdqa %xmm1,%xmm13
9903
9904# qhasm: xmm15 = xmm7
9905# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
9906# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
9907movdqa %xmm7,%xmm15
9908
9909# qhasm: xmm12 &= xmm3
9910# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
9911# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
9912pand %xmm3,%xmm10
9913
9914# qhasm: xmm13 &= xmm0
9915# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
9916# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
9917pand %xmm0,%xmm11
9918
9919# qhasm: xmm14 &= xmm5
9920# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
9921# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
9922pand %xmm5,%xmm13
9923
9924# qhasm: xmm15 |= xmm6
9925# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
9926# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
9927por %xmm6,%xmm15
9928
9929# qhasm: xmm11 ^= xmm12
9930# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
9931# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
9932pxor %xmm10,%xmm8
9933
9934# qhasm: xmm10 ^= xmm13
9935# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
9936# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
9937pxor %xmm11,%xmm9
9938
9939# qhasm: xmm9 ^= xmm14
9940# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
9941# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
9942pxor %xmm13,%xmm12
9943
9944# qhasm: xmm8 ^= xmm15
9945# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
9946# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
9947pxor %xmm15,%xmm14
9948
9949# qhasm: xmm12 = xmm11
9950# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
9951# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
9952movdqa %xmm8,%xmm10
9953
9954# qhasm: xmm12 ^= xmm10
9955# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
9956# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
9957pxor %xmm9,%xmm10
9958
9959# qhasm: xmm11 &= xmm9
9960# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
9961# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
9962pand %xmm12,%xmm8
9963
9964# qhasm: xmm14 = xmm8
9965# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
9966# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
9967movdqa %xmm14,%xmm11
9968
9969# qhasm: xmm14 ^= xmm11
9970# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
9971# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
9972pxor %xmm8,%xmm11
9973
9974# qhasm: xmm15 = xmm12
9975# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
9976# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
9977movdqa %xmm10,%xmm13
9978
9979# qhasm: xmm15 &= xmm14
9980# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
9981# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
9982pand %xmm11,%xmm13
9983
9984# qhasm: xmm15 ^= xmm10
9985# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
9986# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
9987pxor %xmm9,%xmm13
9988
9989# qhasm: xmm13 = xmm9
9990# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
9991# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
9992movdqa %xmm12,%xmm15
9993
9994# qhasm: xmm13 ^= xmm8
9995# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
9996# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
9997pxor %xmm14,%xmm15
9998
9999# qhasm: xmm11 ^= xmm10
10000# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
10001# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
10002pxor %xmm9,%xmm8
10003
10004# qhasm: xmm13 &= xmm11
10005# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
10006# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
10007pand %xmm8,%xmm15
10008
10009# qhasm: xmm13 ^= xmm8
10010# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
10011# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
10012pxor %xmm14,%xmm15
10013
10014# qhasm: xmm9 ^= xmm13
10015# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
10016# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
10017pxor %xmm15,%xmm12
10018
10019# qhasm: xmm10 = xmm14
10020# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
10021# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
10022movdqa %xmm11,%xmm8
10023
10024# qhasm: xmm10 ^= xmm13
10025# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
10026# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
10027pxor %xmm15,%xmm8
10028
10029# qhasm: xmm10 &= xmm8
10030# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
10031# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
10032pand %xmm14,%xmm8
10033
10034# qhasm: xmm9 ^= xmm10
10035# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
10036# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
10037pxor %xmm8,%xmm12
10038
10039# qhasm: xmm14 ^= xmm10
10040# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
10041# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
10042pxor %xmm8,%xmm11
10043
10044# qhasm: xmm14 &= xmm15
10045# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
10046# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
10047pand %xmm13,%xmm11
10048
10049# qhasm: xmm14 ^= xmm12
10050# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
10051# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
10052pxor %xmm10,%xmm11
10053
10054# qhasm: xmm12 = xmm6
10055# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
10056# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
10057movdqa %xmm6,%xmm8
10058
10059# qhasm: xmm8 = xmm5
10060# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
10061# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
10062movdqa %xmm5,%xmm9
10063
10064# qhasm: xmm10 = xmm15
10065# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
10066# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
10067movdqa %xmm13,%xmm10
10068
10069# qhasm: xmm10 ^= xmm14
10070# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
10071# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
10072pxor %xmm11,%xmm10
10073
10074# qhasm: xmm10 &= xmm6
10075# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
10076# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
10077pand %xmm6,%xmm10
10078
10079# qhasm: xmm6 ^= xmm5
10080# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
10081# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
10082pxor %xmm5,%xmm6
10083
10084# qhasm: xmm6 &= xmm14
10085# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
10086# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
10087pand %xmm11,%xmm6
10088
10089# qhasm: xmm5 &= xmm15
10090# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
10091# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
10092pand %xmm13,%xmm5
10093
10094# qhasm: xmm6 ^= xmm5
10095# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
10096# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
10097pxor %xmm5,%xmm6
10098
10099# qhasm: xmm5 ^= xmm10
10100# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
10101# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
10102pxor %xmm10,%xmm5
10103
10104# qhasm: xmm12 ^= xmm0
10105# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
10106# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
10107pxor %xmm0,%xmm8
10108
10109# qhasm: xmm8 ^= xmm3
10110# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
10111# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
10112pxor %xmm3,%xmm9
10113
10114# qhasm: xmm15 ^= xmm13
10115# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
10116# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
10117pxor %xmm15,%xmm13
10118
10119# qhasm: xmm14 ^= xmm9
10120# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
10121# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
10122pxor %xmm12,%xmm11
10123
10124# qhasm: xmm11 = xmm15
10125# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10126# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10127movdqa %xmm13,%xmm10
10128
10129# qhasm: xmm11 ^= xmm14
10130# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10131# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10132pxor %xmm11,%xmm10
10133
10134# qhasm: xmm11 &= xmm12
10135# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
10136# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
10137pand %xmm8,%xmm10
10138
10139# qhasm: xmm12 ^= xmm8
10140# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
10141# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
10142pxor %xmm9,%xmm8
10143
10144# qhasm: xmm12 &= xmm14
10145# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
10146# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
10147pand %xmm11,%xmm8
10148
10149# qhasm: xmm8 &= xmm15
10150# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
10151# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
10152pand %xmm13,%xmm9
10153
10154# qhasm: xmm8 ^= xmm12
10155# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
10156# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
10157pxor %xmm8,%xmm9
10158
10159# qhasm: xmm12 ^= xmm11
10160# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
10161# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
10162pxor %xmm10,%xmm8
10163
10164# qhasm: xmm10 = xmm13
10165# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10166# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10167movdqa %xmm15,%xmm10
10168
10169# qhasm: xmm10 ^= xmm9
10170# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
10171# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
10172pxor %xmm12,%xmm10
10173
10174# qhasm: xmm10 &= xmm0
10175# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
10176# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
10177pand %xmm0,%xmm10
10178
10179# qhasm: xmm0 ^= xmm3
10180# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
10181# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
10182pxor %xmm3,%xmm0
10183
10184# qhasm: xmm0 &= xmm9
10185# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
10186# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
10187pand %xmm12,%xmm0
10188
10189# qhasm: xmm3 &= xmm13
10190# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
10191# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
10192pand %xmm15,%xmm3
10193
10194# qhasm: xmm0 ^= xmm3
10195# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
10196# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
10197pxor %xmm3,%xmm0
10198
10199# qhasm: xmm3 ^= xmm10
10200# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
10201# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
10202pxor %xmm10,%xmm3
10203
10204# qhasm: xmm6 ^= xmm12
10205# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
10206# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
10207pxor %xmm8,%xmm6
10208
10209# qhasm: xmm0 ^= xmm12
10210# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
10211# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
10212pxor %xmm8,%xmm0
10213
10214# qhasm: xmm5 ^= xmm8
10215# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
10216# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
10217pxor %xmm9,%xmm5
10218
10219# qhasm: xmm3 ^= xmm8
10220# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
10221# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
10222pxor %xmm9,%xmm3
10223
10224# qhasm: xmm12 = xmm7
10225# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
10226# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
10227movdqa %xmm7,%xmm8
10228
10229# qhasm: xmm8 = xmm1
10230# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
10231# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
10232movdqa %xmm1,%xmm9
10233
10234# qhasm: xmm12 ^= xmm4
10235# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
10236# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
10237pxor %xmm4,%xmm8
10238
10239# qhasm: xmm8 ^= xmm2
10240# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
10241# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
10242pxor %xmm2,%xmm9
10243
10244# qhasm: xmm11 = xmm15
10245# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10246# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10247movdqa %xmm13,%xmm10
10248
10249# qhasm: xmm11 ^= xmm14
10250# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10251# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10252pxor %xmm11,%xmm10
10253
10254# qhasm: xmm11 &= xmm12
10255# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
10256# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
10257pand %xmm8,%xmm10
10258
10259# qhasm: xmm12 ^= xmm8
10260# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
10261# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
10262pxor %xmm9,%xmm8
10263
10264# qhasm: xmm12 &= xmm14
10265# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
10266# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
10267pand %xmm11,%xmm8
10268
10269# qhasm: xmm8 &= xmm15
10270# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
10271# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
10272pand %xmm13,%xmm9
10273
10274# qhasm: xmm8 ^= xmm12
10275# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
10276# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
10277pxor %xmm8,%xmm9
10278
10279# qhasm: xmm12 ^= xmm11
10280# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
10281# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
10282pxor %xmm10,%xmm8
10283
10284# qhasm: xmm10 = xmm13
10285# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10286# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10287movdqa %xmm15,%xmm10
10288
10289# qhasm: xmm10 ^= xmm9
10290# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
10291# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
10292pxor %xmm12,%xmm10
10293
10294# qhasm: xmm10 &= xmm4
10295# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
10296# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
10297pand %xmm4,%xmm10
10298
10299# qhasm: xmm4 ^= xmm2
10300# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
10301# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
10302pxor %xmm2,%xmm4
10303
10304# qhasm: xmm4 &= xmm9
10305# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
10306# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
10307pand %xmm12,%xmm4
10308
10309# qhasm: xmm2 &= xmm13
10310# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
10311# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
10312pand %xmm15,%xmm2
10313
10314# qhasm: xmm4 ^= xmm2
10315# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
10316# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
10317pxor %xmm2,%xmm4
10318
10319# qhasm: xmm2 ^= xmm10
10320# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
10321# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
10322pxor %xmm10,%xmm2
10323
10324# qhasm: xmm15 ^= xmm13
10325# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
10326# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
10327pxor %xmm15,%xmm13
10328
10329# qhasm: xmm14 ^= xmm9
10330# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
10331# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
10332pxor %xmm12,%xmm11
10333
10334# qhasm: xmm11 = xmm15
10335# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10336# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10337movdqa %xmm13,%xmm10
10338
10339# qhasm: xmm11 ^= xmm14
10340# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10341# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10342pxor %xmm11,%xmm10
10343
10344# qhasm: xmm11 &= xmm7
10345# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
10346# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
10347pand %xmm7,%xmm10
10348
10349# qhasm: xmm7 ^= xmm1
10350# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
10351# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
10352pxor %xmm1,%xmm7
10353
10354# qhasm: xmm7 &= xmm14
10355# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
10356# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
10357pand %xmm11,%xmm7
10358
10359# qhasm: xmm1 &= xmm15
10360# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
10361# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
10362pand %xmm13,%xmm1
10363
10364# qhasm: xmm7 ^= xmm1
10365# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
10366# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
10367pxor %xmm1,%xmm7
10368
10369# qhasm: xmm1 ^= xmm11
10370# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
10371# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
10372pxor %xmm10,%xmm1
10373
10374# qhasm: xmm7 ^= xmm12
10375# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
10376# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
10377pxor %xmm8,%xmm7
10378
10379# qhasm: xmm4 ^= xmm12
10380# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
10381# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
10382pxor %xmm8,%xmm4
10383
10384# qhasm: xmm1 ^= xmm8
10385# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
10386# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
10387pxor %xmm9,%xmm1
10388
10389# qhasm: xmm2 ^= xmm8
10390# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
10391# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
10392pxor %xmm9,%xmm2
10393
10394# qhasm: xmm7 ^= xmm0
10395# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
10396# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
10397pxor %xmm0,%xmm7
10398
10399# qhasm: xmm1 ^= xmm6
10400# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
10401# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
10402pxor %xmm6,%xmm1
10403
10404# qhasm: xmm4 ^= xmm7
10405# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
10406# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
10407pxor %xmm7,%xmm4
10408
10409# qhasm: xmm6 ^= xmm0
10410# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
10411# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
10412pxor %xmm0,%xmm6
10413
10414# qhasm: xmm0 ^= xmm1
10415# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
10416# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
10417pxor %xmm1,%xmm0
10418
10419# qhasm: xmm1 ^= xmm5
10420# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
10421# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
10422pxor %xmm5,%xmm1
10423
10424# qhasm: xmm5 ^= xmm2
10425# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
10426# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
10427pxor %xmm2,%xmm5
10428
10429# qhasm: xmm4 ^= xmm5
10430# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
10431# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
10432pxor %xmm5,%xmm4
10433
10434# qhasm: xmm2 ^= xmm3
10435# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
10436# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
10437pxor %xmm3,%xmm2
10438
10439# qhasm: xmm3 ^= xmm5
10440# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
10441# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
10442pxor %xmm5,%xmm3
10443
10444# qhasm: xmm6 ^= xmm3
10445# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
10446# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
10447pxor %xmm3,%xmm6
10448
10449# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
10450# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
10451# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
10452pshufd $0x93,%xmm0,%xmm8
10453
10454# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
10455# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
10456# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
10457pshufd $0x93,%xmm1,%xmm9
10458
10459# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
10460# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
10461# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
10462pshufd $0x93,%xmm4,%xmm10
10463
10464# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
10465# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
10466# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
10467pshufd $0x93,%xmm6,%xmm11
10468
10469# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
10470# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
10471# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
10472pshufd $0x93,%xmm3,%xmm12
10473
10474# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
10475# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
10476# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
10477pshufd $0x93,%xmm7,%xmm13
10478
10479# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
10480# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
10481# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
10482pshufd $0x93,%xmm2,%xmm14
10483
10484# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
10485# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
10486# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
10487pshufd $0x93,%xmm5,%xmm15
10488
10489# qhasm: xmm0 ^= xmm8
10490# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
10491# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
10492pxor %xmm8,%xmm0
10493
10494# qhasm: xmm1 ^= xmm9
10495# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
10496# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
10497pxor %xmm9,%xmm1
10498
10499# qhasm: xmm4 ^= xmm10
10500# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
10501# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
10502pxor %xmm10,%xmm4
10503
10504# qhasm: xmm6 ^= xmm11
10505# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
10506# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
10507pxor %xmm11,%xmm6
10508
10509# qhasm: xmm3 ^= xmm12
10510# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
10511# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
10512pxor %xmm12,%xmm3
10513
10514# qhasm: xmm7 ^= xmm13
10515# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
10516# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
10517pxor %xmm13,%xmm7
10518
10519# qhasm: xmm2 ^= xmm14
10520# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
10521# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
10522pxor %xmm14,%xmm2
10523
10524# qhasm: xmm5 ^= xmm15
10525# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
10526# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
10527pxor %xmm15,%xmm5
10528
10529# qhasm: xmm8 ^= xmm5
10530# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
10531# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
10532pxor %xmm5,%xmm8
10533
10534# qhasm: xmm9 ^= xmm0
10535# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
10536# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
10537pxor %xmm0,%xmm9
10538
10539# qhasm: xmm10 ^= xmm1
10540# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
10541# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
10542pxor %xmm1,%xmm10
10543
10544# qhasm: xmm9 ^= xmm5
10545# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
10546# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
10547pxor %xmm5,%xmm9
10548
10549# qhasm: xmm11 ^= xmm4
10550# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
10551# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
10552pxor %xmm4,%xmm11
10553
10554# qhasm: xmm12 ^= xmm6
10555# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
10556# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
10557pxor %xmm6,%xmm12
10558
10559# qhasm: xmm13 ^= xmm3
10560# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
10561# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
10562pxor %xmm3,%xmm13
10563
10564# qhasm: xmm11 ^= xmm5
10565# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
10566# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
10567pxor %xmm5,%xmm11
10568
10569# qhasm: xmm14 ^= xmm7
10570# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
10571# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
10572pxor %xmm7,%xmm14
10573
10574# qhasm: xmm15 ^= xmm2
10575# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
10576# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
10577pxor %xmm2,%xmm15
10578
10579# qhasm: xmm12 ^= xmm5
10580# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
10581# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
10582pxor %xmm5,%xmm12
10583
10584# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
10585# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
10586# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
10587pshufd $0x4E,%xmm0,%xmm0
10588
10589# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
10590# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
10591# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
10592pshufd $0x4E,%xmm1,%xmm1
10593
10594# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
10595# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
10596# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
10597pshufd $0x4E,%xmm4,%xmm4
10598
10599# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
10600# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
10601# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
10602pshufd $0x4E,%xmm6,%xmm6
10603
10604# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
10605# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
10606# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
10607pshufd $0x4E,%xmm3,%xmm3
10608
10609# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
10610# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
10611# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
10612pshufd $0x4E,%xmm7,%xmm7
10613
10614# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
10615# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
10616# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
10617pshufd $0x4E,%xmm2,%xmm2
10618
10619# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
10620# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
10621# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
10622pshufd $0x4E,%xmm5,%xmm5
10623
10624# qhasm: xmm8 ^= xmm0
10625# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
10626# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
10627pxor %xmm0,%xmm8
10628
10629# qhasm: xmm9 ^= xmm1
10630# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
10631# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
10632pxor %xmm1,%xmm9
10633
10634# qhasm: xmm10 ^= xmm4
10635# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
10636# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
10637pxor %xmm4,%xmm10
10638
10639# qhasm: xmm11 ^= xmm6
10640# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
10641# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
10642pxor %xmm6,%xmm11
10643
10644# qhasm: xmm12 ^= xmm3
10645# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
10646# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
10647pxor %xmm3,%xmm12
10648
10649# qhasm: xmm13 ^= xmm7
10650# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
10651# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
10652pxor %xmm7,%xmm13
10653
10654# qhasm: xmm14 ^= xmm2
10655# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
10656# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
10657pxor %xmm2,%xmm14
10658
10659# qhasm: xmm15 ^= xmm5
10660# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
10661# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
10662pxor %xmm5,%xmm15
10663
10664# qhasm: xmm8 ^= *(int128 *)(c + 1152)
10665# asm 1: pxor 1152(<c=int64#5),<xmm8=int6464#9
10666# asm 2: pxor 1152(<c=%r8),<xmm8=%xmm8
10667pxor 1152(%r8),%xmm8
10668
10669# qhasm: shuffle bytes of xmm8 by SRM0
10670# asm 1: pshufb SRM0,<xmm8=int6464#9
10671# asm 2: pshufb SRM0,<xmm8=%xmm8
10672pshufb SRM0,%xmm8
10673
10674# qhasm: xmm9 ^= *(int128 *)(c + 1168)
10675# asm 1: pxor 1168(<c=int64#5),<xmm9=int6464#10
10676# asm 2: pxor 1168(<c=%r8),<xmm9=%xmm9
10677pxor 1168(%r8),%xmm9
10678
10679# qhasm: shuffle bytes of xmm9 by SRM0
10680# asm 1: pshufb SRM0,<xmm9=int6464#10
10681# asm 2: pshufb SRM0,<xmm9=%xmm9
10682pshufb SRM0,%xmm9
10683
10684# qhasm: xmm10 ^= *(int128 *)(c + 1184)
10685# asm 1: pxor 1184(<c=int64#5),<xmm10=int6464#11
10686# asm 2: pxor 1184(<c=%r8),<xmm10=%xmm10
10687pxor 1184(%r8),%xmm10
10688
10689# qhasm: shuffle bytes of xmm10 by SRM0
10690# asm 1: pshufb SRM0,<xmm10=int6464#11
10691# asm 2: pshufb SRM0,<xmm10=%xmm10
10692pshufb SRM0,%xmm10
10693
10694# qhasm: xmm11 ^= *(int128 *)(c + 1200)
10695# asm 1: pxor 1200(<c=int64#5),<xmm11=int6464#12
10696# asm 2: pxor 1200(<c=%r8),<xmm11=%xmm11
10697pxor 1200(%r8),%xmm11
10698
10699# qhasm: shuffle bytes of xmm11 by SRM0
10700# asm 1: pshufb SRM0,<xmm11=int6464#12
10701# asm 2: pshufb SRM0,<xmm11=%xmm11
10702pshufb SRM0,%xmm11
10703
10704# qhasm: xmm12 ^= *(int128 *)(c + 1216)
10705# asm 1: pxor 1216(<c=int64#5),<xmm12=int6464#13
10706# asm 2: pxor 1216(<c=%r8),<xmm12=%xmm12
10707pxor 1216(%r8),%xmm12
10708
10709# qhasm: shuffle bytes of xmm12 by SRM0
10710# asm 1: pshufb SRM0,<xmm12=int6464#13
10711# asm 2: pshufb SRM0,<xmm12=%xmm12
10712pshufb SRM0,%xmm12
10713
10714# qhasm: xmm13 ^= *(int128 *)(c + 1232)
10715# asm 1: pxor 1232(<c=int64#5),<xmm13=int6464#14
10716# asm 2: pxor 1232(<c=%r8),<xmm13=%xmm13
10717pxor 1232(%r8),%xmm13
10718
10719# qhasm: shuffle bytes of xmm13 by SRM0
10720# asm 1: pshufb SRM0,<xmm13=int6464#14
10721# asm 2: pshufb SRM0,<xmm13=%xmm13
10722pshufb SRM0,%xmm13
10723
10724# qhasm: xmm14 ^= *(int128 *)(c + 1248)
10725# asm 1: pxor 1248(<c=int64#5),<xmm14=int6464#15
10726# asm 2: pxor 1248(<c=%r8),<xmm14=%xmm14
10727pxor 1248(%r8),%xmm14
10728
10729# qhasm: shuffle bytes of xmm14 by SRM0
10730# asm 1: pshufb SRM0,<xmm14=int6464#15
10731# asm 2: pshufb SRM0,<xmm14=%xmm14
10732pshufb SRM0,%xmm14
10733
10734# qhasm: xmm15 ^= *(int128 *)(c + 1264)
10735# asm 1: pxor 1264(<c=int64#5),<xmm15=int6464#16
10736# asm 2: pxor 1264(<c=%r8),<xmm15=%xmm15
10737pxor 1264(%r8),%xmm15
10738
10739# qhasm: shuffle bytes of xmm15 by SRM0
10740# asm 1: pshufb SRM0,<xmm15=int6464#16
10741# asm 2: pshufb SRM0,<xmm15=%xmm15
10742pshufb SRM0,%xmm15
10743
10744# qhasm: xmm13 ^= xmm14
10745# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
10746# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
10747pxor %xmm14,%xmm13
10748
10749# qhasm: xmm10 ^= xmm9
10750# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
10751# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
10752pxor %xmm9,%xmm10
10753
10754# qhasm: xmm13 ^= xmm8
10755# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
10756# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
10757pxor %xmm8,%xmm13
10758
10759# qhasm: xmm14 ^= xmm10
10760# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
10761# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
10762pxor %xmm10,%xmm14
10763
10764# qhasm: xmm11 ^= xmm8
10765# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
10766# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
10767pxor %xmm8,%xmm11
10768
10769# qhasm: xmm14 ^= xmm11
10770# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
10771# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
10772pxor %xmm11,%xmm14
10773
10774# qhasm: xmm11 ^= xmm15
10775# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
10776# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
10777pxor %xmm15,%xmm11
10778
10779# qhasm: xmm11 ^= xmm12
10780# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
10781# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
10782pxor %xmm12,%xmm11
10783
10784# qhasm: xmm15 ^= xmm13
10785# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
10786# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
10787pxor %xmm13,%xmm15
10788
10789# qhasm: xmm11 ^= xmm9
10790# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
10791# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
10792pxor %xmm9,%xmm11
10793
10794# qhasm: xmm12 ^= xmm13
10795# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
10796# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
10797pxor %xmm13,%xmm12
10798
10799# qhasm: xmm10 ^= xmm15
10800# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
10801# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
10802pxor %xmm15,%xmm10
10803
10804# qhasm: xmm9 ^= xmm13
10805# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
10806# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
10807pxor %xmm13,%xmm9
10808
10809# qhasm: xmm3 = xmm15
10810# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
10811# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
10812movdqa %xmm15,%xmm0
10813
10814# qhasm: xmm2 = xmm9
10815# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
10816# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
10817movdqa %xmm9,%xmm1
10818
10819# qhasm: xmm1 = xmm13
10820# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
10821# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
10822movdqa %xmm13,%xmm2
10823
10824# qhasm: xmm5 = xmm10
10825# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
10826# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
10827movdqa %xmm10,%xmm3
10828
10829# qhasm: xmm4 = xmm14
10830# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
10831# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
10832movdqa %xmm14,%xmm4
10833
10834# qhasm: xmm3 ^= xmm12
10835# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
10836# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
10837pxor %xmm12,%xmm0
10838
10839# qhasm: xmm2 ^= xmm10
10840# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
10841# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
10842pxor %xmm10,%xmm1
10843
10844# qhasm: xmm1 ^= xmm11
10845# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
10846# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
10847pxor %xmm11,%xmm2
10848
10849# qhasm: xmm5 ^= xmm12
10850# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
10851# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
10852pxor %xmm12,%xmm3
10853
10854# qhasm: xmm4 ^= xmm8
10855# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
10856# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
10857pxor %xmm8,%xmm4
10858
10859# qhasm: xmm6 = xmm3
10860# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
10861# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
10862movdqa %xmm0,%xmm5
10863
10864# qhasm: xmm0 = xmm2
10865# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
10866# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
10867movdqa %xmm1,%xmm6
10868
10869# qhasm: xmm7 = xmm3
10870# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
10871# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
10872movdqa %xmm0,%xmm7
10873
10874# qhasm: xmm2 |= xmm1
10875# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
10876# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
10877por %xmm2,%xmm1
10878
10879# qhasm: xmm3 |= xmm4
10880# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
10881# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
10882por %xmm4,%xmm0
10883
10884# qhasm: xmm7 ^= xmm0
10885# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
10886# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
10887pxor %xmm6,%xmm7
10888
10889# qhasm: xmm6 &= xmm4
10890# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
10891# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
10892pand %xmm4,%xmm5
10893
10894# qhasm: xmm0 &= xmm1
10895# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
10896# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
10897pand %xmm2,%xmm6
10898
10899# qhasm: xmm4 ^= xmm1
10900# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
10901# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
10902pxor %xmm2,%xmm4
10903
10904# qhasm: xmm7 &= xmm4
10905# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
10906# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
10907pand %xmm4,%xmm7
10908
10909# qhasm: xmm4 = xmm11
10910# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
10911# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
10912movdqa %xmm11,%xmm2
10913
10914# qhasm: xmm4 ^= xmm8
10915# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
10916# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
10917pxor %xmm8,%xmm2
10918
10919# qhasm: xmm5 &= xmm4
10920# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
10921# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
10922pand %xmm2,%xmm3
10923
10924# qhasm: xmm3 ^= xmm5
10925# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
10926# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
10927pxor %xmm3,%xmm0
10928
10929# qhasm: xmm2 ^= xmm5
10930# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
10931# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
10932pxor %xmm3,%xmm1
10933
10934# qhasm: xmm5 = xmm15
10935# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
10936# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
10937movdqa %xmm15,%xmm2
10938
10939# qhasm: xmm5 ^= xmm9
10940# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
10941# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
10942pxor %xmm9,%xmm2
10943
10944# qhasm: xmm4 = xmm13
10945# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
10946# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
10947movdqa %xmm13,%xmm3
10948
10949# qhasm: xmm1 = xmm5
10950# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
10951# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
10952movdqa %xmm2,%xmm4
10953
10954# qhasm: xmm4 ^= xmm14
10955# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
10956# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
10957pxor %xmm14,%xmm3
10958
10959# qhasm: xmm1 |= xmm4
10960# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
10961# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
10962por %xmm3,%xmm4
10963
10964# qhasm: xmm5 &= xmm4
10965# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
10966# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
10967pand %xmm3,%xmm2
10968
10969# qhasm: xmm0 ^= xmm5
10970# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
10971# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
10972pxor %xmm2,%xmm6
10973
10974# qhasm: xmm3 ^= xmm7
10975# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
10976# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
10977pxor %xmm7,%xmm0
10978
10979# qhasm: xmm2 ^= xmm6
10980# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
10981# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
10982pxor %xmm5,%xmm1
10983
10984# qhasm: xmm1 ^= xmm7
10985# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
10986# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
10987pxor %xmm7,%xmm4
10988
10989# qhasm: xmm0 ^= xmm6
10990# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
10991# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
10992pxor %xmm5,%xmm6
10993
10994# qhasm: xmm1 ^= xmm6
10995# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
10996# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
10997pxor %xmm5,%xmm4
10998
10999# qhasm: xmm4 = xmm10
11000# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
11001# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
11002movdqa %xmm10,%xmm2
11003
11004# qhasm: xmm5 = xmm12
11005# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
11006# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
11007movdqa %xmm12,%xmm3
11008
11009# qhasm: xmm6 = xmm9
11010# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
11011# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
11012movdqa %xmm9,%xmm5
11013
11014# qhasm: xmm7 = xmm15
11015# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
11016# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
11017movdqa %xmm15,%xmm7
11018
11019# qhasm: xmm4 &= xmm11
11020# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
11021# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
11022pand %xmm11,%xmm2
11023
11024# qhasm: xmm5 &= xmm8
11025# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
11026# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
11027pand %xmm8,%xmm3
11028
11029# qhasm: xmm6 &= xmm13
11030# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
11031# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
11032pand %xmm13,%xmm5
11033
11034# qhasm: xmm7 |= xmm14
11035# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
11036# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
11037por %xmm14,%xmm7
11038
11039# qhasm: xmm3 ^= xmm4
11040# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
11041# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
11042pxor %xmm2,%xmm0
11043
11044# qhasm: xmm2 ^= xmm5
11045# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
11046# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
11047pxor %xmm3,%xmm1
11048
11049# qhasm: xmm1 ^= xmm6
11050# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
11051# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
11052pxor %xmm5,%xmm4
11053
11054# qhasm: xmm0 ^= xmm7
11055# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
11056# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
11057pxor %xmm7,%xmm6
11058
11059# qhasm: xmm4 = xmm3
11060# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
11061# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
11062movdqa %xmm0,%xmm2
11063
11064# qhasm: xmm4 ^= xmm2
11065# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
11066# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
11067pxor %xmm1,%xmm2
11068
11069# qhasm: xmm3 &= xmm1
11070# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
11071# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
11072pand %xmm4,%xmm0
11073
11074# qhasm: xmm6 = xmm0
11075# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
11076# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
11077movdqa %xmm6,%xmm3
11078
11079# qhasm: xmm6 ^= xmm3
11080# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
11081# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
11082pxor %xmm0,%xmm3
11083
11084# qhasm: xmm7 = xmm4
11085# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
11086# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
11087movdqa %xmm2,%xmm5
11088
11089# qhasm: xmm7 &= xmm6
11090# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
11091# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
11092pand %xmm3,%xmm5
11093
11094# qhasm: xmm7 ^= xmm2
11095# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
11096# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
11097pxor %xmm1,%xmm5
11098
11099# qhasm: xmm5 = xmm1
11100# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
11101# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
11102movdqa %xmm4,%xmm7
11103
11104# qhasm: xmm5 ^= xmm0
11105# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
11106# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
11107pxor %xmm6,%xmm7
11108
11109# qhasm: xmm3 ^= xmm2
11110# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
11111# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
11112pxor %xmm1,%xmm0
11113
11114# qhasm: xmm5 &= xmm3
11115# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
11116# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
11117pand %xmm0,%xmm7
11118
11119# qhasm: xmm5 ^= xmm0
11120# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
11121# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
11122pxor %xmm6,%xmm7
11123
11124# qhasm: xmm1 ^= xmm5
11125# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
11126# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
11127pxor %xmm7,%xmm4
11128
11129# qhasm: xmm2 = xmm6
11130# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
11131# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
11132movdqa %xmm3,%xmm0
11133
11134# qhasm: xmm2 ^= xmm5
11135# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
11136# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
11137pxor %xmm7,%xmm0
11138
11139# qhasm: xmm2 &= xmm0
11140# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
11141# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
11142pand %xmm6,%xmm0
11143
11144# qhasm: xmm1 ^= xmm2
11145# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
11146# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
11147pxor %xmm0,%xmm4
11148
11149# qhasm: xmm6 ^= xmm2
11150# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
11151# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
11152pxor %xmm0,%xmm3
11153
11154# qhasm: xmm6 &= xmm7
11155# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
11156# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
11157pand %xmm5,%xmm3
11158
11159# qhasm: xmm6 ^= xmm4
11160# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
11161# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
11162pxor %xmm2,%xmm3
11163
11164# qhasm: xmm4 = xmm14
11165# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
11166# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
11167movdqa %xmm14,%xmm0
11168
11169# qhasm: xmm0 = xmm13
11170# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
11171# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
11172movdqa %xmm13,%xmm1
11173
11174# qhasm: xmm2 = xmm7
11175# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
11176# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
11177movdqa %xmm5,%xmm2
11178
11179# qhasm: xmm2 ^= xmm6
11180# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
11181# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
11182pxor %xmm3,%xmm2
11183
11184# qhasm: xmm2 &= xmm14
11185# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
11186# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
11187pand %xmm14,%xmm2
11188
11189# qhasm: xmm14 ^= xmm13
11190# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
11191# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
11192pxor %xmm13,%xmm14
11193
11194# qhasm: xmm14 &= xmm6
11195# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
11196# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
11197pand %xmm3,%xmm14
11198
11199# qhasm: xmm13 &= xmm7
11200# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
11201# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
11202pand %xmm5,%xmm13
11203
11204# qhasm: xmm14 ^= xmm13
11205# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
11206# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
11207pxor %xmm13,%xmm14
11208
11209# qhasm: xmm13 ^= xmm2
11210# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
11211# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
11212pxor %xmm2,%xmm13
11213
11214# qhasm: xmm4 ^= xmm8
11215# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
11216# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
11217pxor %xmm8,%xmm0
11218
11219# qhasm: xmm0 ^= xmm11
11220# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
11221# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
11222pxor %xmm11,%xmm1
11223
11224# qhasm: xmm7 ^= xmm5
11225# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
11226# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
11227pxor %xmm7,%xmm5
11228
11229# qhasm: xmm6 ^= xmm1
11230# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
11231# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
11232pxor %xmm4,%xmm3
11233
11234# qhasm: xmm3 = xmm7
11235# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11236# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11237movdqa %xmm5,%xmm2
11238
11239# qhasm: xmm3 ^= xmm6
11240# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
11241# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
11242pxor %xmm3,%xmm2
11243
11244# qhasm: xmm3 &= xmm4
11245# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
11246# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
11247pand %xmm0,%xmm2
11248
11249# qhasm: xmm4 ^= xmm0
11250# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
11251# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
11252pxor %xmm1,%xmm0
11253
11254# qhasm: xmm4 &= xmm6
11255# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
11256# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
11257pand %xmm3,%xmm0
11258
11259# qhasm: xmm0 &= xmm7
11260# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
11261# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
11262pand %xmm5,%xmm1
11263
11264# qhasm: xmm0 ^= xmm4
11265# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
11266# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
11267pxor %xmm0,%xmm1
11268
11269# qhasm: xmm4 ^= xmm3
11270# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
11271# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
11272pxor %xmm2,%xmm0
11273
11274# qhasm: xmm2 = xmm5
11275# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
11276# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
11277movdqa %xmm7,%xmm2
11278
11279# qhasm: xmm2 ^= xmm1
11280# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
11281# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
11282pxor %xmm4,%xmm2
11283
11284# qhasm: xmm2 &= xmm8
11285# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
11286# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
11287pand %xmm8,%xmm2
11288
11289# qhasm: xmm8 ^= xmm11
11290# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
11291# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
11292pxor %xmm11,%xmm8
11293
11294# qhasm: xmm8 &= xmm1
11295# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
11296# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
11297pand %xmm4,%xmm8
11298
11299# qhasm: xmm11 &= xmm5
11300# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
11301# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
11302pand %xmm7,%xmm11
11303
11304# qhasm: xmm8 ^= xmm11
11305# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
11306# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
11307pxor %xmm11,%xmm8
11308
11309# qhasm: xmm11 ^= xmm2
11310# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
11311# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
11312pxor %xmm2,%xmm11
11313
11314# qhasm: xmm14 ^= xmm4
11315# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
11316# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
11317pxor %xmm0,%xmm14
11318
11319# qhasm: xmm8 ^= xmm4
11320# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
11321# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
11322pxor %xmm0,%xmm8
11323
11324# qhasm: xmm13 ^= xmm0
11325# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
11326# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
11327pxor %xmm1,%xmm13
11328
11329# qhasm: xmm11 ^= xmm0
11330# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
11331# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
11332pxor %xmm1,%xmm11
11333
11334# qhasm: xmm4 = xmm15
11335# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
11336# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
11337movdqa %xmm15,%xmm0
11338
11339# qhasm: xmm0 = xmm9
11340# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
11341# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
11342movdqa %xmm9,%xmm1
11343
11344# qhasm: xmm4 ^= xmm12
11345# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
11346# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
11347pxor %xmm12,%xmm0
11348
11349# qhasm: xmm0 ^= xmm10
11350# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
11351# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
11352pxor %xmm10,%xmm1
11353
11354# qhasm: xmm3 = xmm7
11355# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11356# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11357movdqa %xmm5,%xmm2
11358
11359# qhasm: xmm3 ^= xmm6
11360# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
11361# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
11362pxor %xmm3,%xmm2
11363
11364# qhasm: xmm3 &= xmm4
11365# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
11366# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
11367pand %xmm0,%xmm2
11368
11369# qhasm: xmm4 ^= xmm0
11370# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
11371# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
11372pxor %xmm1,%xmm0
11373
11374# qhasm: xmm4 &= xmm6
11375# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
11376# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
11377pand %xmm3,%xmm0
11378
11379# qhasm: xmm0 &= xmm7
11380# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
11381# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
11382pand %xmm5,%xmm1
11383
11384# qhasm: xmm0 ^= xmm4
11385# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
11386# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
11387pxor %xmm0,%xmm1
11388
11389# qhasm: xmm4 ^= xmm3
11390# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
11391# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
11392pxor %xmm2,%xmm0
11393
11394# qhasm: xmm2 = xmm5
11395# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
11396# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
11397movdqa %xmm7,%xmm2
11398
11399# qhasm: xmm2 ^= xmm1
11400# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
11401# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
11402pxor %xmm4,%xmm2
11403
11404# qhasm: xmm2 &= xmm12
11405# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
11406# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
11407pand %xmm12,%xmm2
11408
11409# qhasm: xmm12 ^= xmm10
11410# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
11411# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
11412pxor %xmm10,%xmm12
11413
11414# qhasm: xmm12 &= xmm1
11415# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
11416# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
11417pand %xmm4,%xmm12
11418
11419# qhasm: xmm10 &= xmm5
11420# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
11421# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
11422pand %xmm7,%xmm10
11423
11424# qhasm: xmm12 ^= xmm10
11425# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
11426# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
11427pxor %xmm10,%xmm12
11428
11429# qhasm: xmm10 ^= xmm2
11430# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
11431# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
11432pxor %xmm2,%xmm10
11433
11434# qhasm: xmm7 ^= xmm5
11435# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
11436# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
11437pxor %xmm7,%xmm5
11438
11439# qhasm: xmm6 ^= xmm1
11440# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
11441# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
11442pxor %xmm4,%xmm3
11443
11444# qhasm: xmm3 = xmm7
11445# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11446# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11447movdqa %xmm5,%xmm2
11448
11449# qhasm: xmm3 ^= xmm6
11450# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
11451# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
11452pxor %xmm3,%xmm2
11453
11454# qhasm: xmm3 &= xmm15
11455# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
11456# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
11457pand %xmm15,%xmm2
11458
11459# qhasm: xmm15 ^= xmm9
11460# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
11461# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
11462pxor %xmm9,%xmm15
11463
11464# qhasm: xmm15 &= xmm6
11465# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
11466# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
11467pand %xmm3,%xmm15
11468
11469# qhasm: xmm9 &= xmm7
11470# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
11471# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
11472pand %xmm5,%xmm9
11473
11474# qhasm: xmm15 ^= xmm9
11475# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
11476# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
11477pxor %xmm9,%xmm15
11478
11479# qhasm: xmm9 ^= xmm3
11480# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
11481# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
11482pxor %xmm2,%xmm9
11483
11484# qhasm: xmm15 ^= xmm4
11485# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
11486# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
11487pxor %xmm0,%xmm15
11488
11489# qhasm: xmm12 ^= xmm4
11490# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
11491# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
11492pxor %xmm0,%xmm12
11493
11494# qhasm: xmm9 ^= xmm0
11495# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
11496# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
11497pxor %xmm1,%xmm9
11498
11499# qhasm: xmm10 ^= xmm0
11500# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
11501# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
11502pxor %xmm1,%xmm10
11503
11504# qhasm: xmm15 ^= xmm8
11505# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
11506# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
11507pxor %xmm8,%xmm15
11508
11509# qhasm: xmm9 ^= xmm14
11510# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
11511# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
11512pxor %xmm14,%xmm9
11513
11514# qhasm: xmm12 ^= xmm15
11515# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
11516# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
11517pxor %xmm15,%xmm12
11518
11519# qhasm: xmm14 ^= xmm8
11520# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
11521# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
11522pxor %xmm8,%xmm14
11523
11524# qhasm: xmm8 ^= xmm9
11525# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
11526# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
11527pxor %xmm9,%xmm8
11528
11529# qhasm: xmm9 ^= xmm13
11530# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
11531# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
11532pxor %xmm13,%xmm9
11533
11534# qhasm: xmm13 ^= xmm10
11535# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
11536# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
11537pxor %xmm10,%xmm13
11538
11539# qhasm: xmm12 ^= xmm13
11540# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
11541# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
11542pxor %xmm13,%xmm12
11543
11544# qhasm: xmm10 ^= xmm11
11545# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
11546# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
11547pxor %xmm11,%xmm10
11548
11549# qhasm: xmm11 ^= xmm13
11550# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
11551# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
11552pxor %xmm13,%xmm11
11553
11554# qhasm: xmm14 ^= xmm11
11555# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
11556# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
11557pxor %xmm11,%xmm14
11558
11559# qhasm: xmm8 ^= *(int128 *)(c + 1280)
11560# asm 1: pxor 1280(<c=int64#5),<xmm8=int6464#9
11561# asm 2: pxor 1280(<c=%r8),<xmm8=%xmm8
11562pxor 1280(%r8),%xmm8
11563
11564# qhasm: xmm9 ^= *(int128 *)(c + 1296)
11565# asm 1: pxor 1296(<c=int64#5),<xmm9=int6464#10
11566# asm 2: pxor 1296(<c=%r8),<xmm9=%xmm9
11567pxor 1296(%r8),%xmm9
11568
11569# qhasm: xmm12 ^= *(int128 *)(c + 1312)
11570# asm 1: pxor 1312(<c=int64#5),<xmm12=int6464#13
11571# asm 2: pxor 1312(<c=%r8),<xmm12=%xmm12
11572pxor 1312(%r8),%xmm12
11573
11574# qhasm: xmm14 ^= *(int128 *)(c + 1328)
11575# asm 1: pxor 1328(<c=int64#5),<xmm14=int6464#15
11576# asm 2: pxor 1328(<c=%r8),<xmm14=%xmm14
11577pxor 1328(%r8),%xmm14
11578
11579# qhasm: xmm11 ^= *(int128 *)(c + 1344)
11580# asm 1: pxor 1344(<c=int64#5),<xmm11=int6464#12
11581# asm 2: pxor 1344(<c=%r8),<xmm11=%xmm11
11582pxor 1344(%r8),%xmm11
11583
11584# qhasm: xmm15 ^= *(int128 *)(c + 1360)
11585# asm 1: pxor 1360(<c=int64#5),<xmm15=int6464#16
11586# asm 2: pxor 1360(<c=%r8),<xmm15=%xmm15
11587pxor 1360(%r8),%xmm15
11588
11589# qhasm: xmm10 ^= *(int128 *)(c + 1376)
11590# asm 1: pxor 1376(<c=int64#5),<xmm10=int6464#11
11591# asm 2: pxor 1376(<c=%r8),<xmm10=%xmm10
11592pxor 1376(%r8),%xmm10
11593
11594# qhasm: xmm13 ^= *(int128 *)(c + 1392)
11595# asm 1: pxor 1392(<c=int64#5),<xmm13=int6464#14
11596# asm 2: pxor 1392(<c=%r8),<xmm13=%xmm13
11597pxor 1392(%r8),%xmm13
11598
11599# qhasm: xmm0 = xmm10
11600# asm 1: movdqa <xmm10=int6464#11,>xmm0=int6464#1
11601# asm 2: movdqa <xmm10=%xmm10,>xmm0=%xmm0
11602movdqa %xmm10,%xmm0
11603
11604# qhasm: uint6464 xmm0 >>= 1
11605# asm 1: psrlq $1,<xmm0=int6464#1
11606# asm 2: psrlq $1,<xmm0=%xmm0
11607psrlq $1,%xmm0
11608
11609# qhasm: xmm0 ^= xmm13
11610# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
11611# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
11612pxor %xmm13,%xmm0
11613
11614# qhasm: xmm0 &= BS0
11615# asm 1: pand BS0,<xmm0=int6464#1
11616# asm 2: pand BS0,<xmm0=%xmm0
11617pand BS0,%xmm0
11618
11619# qhasm: xmm13 ^= xmm0
11620# asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14
11621# asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13
11622pxor %xmm0,%xmm13
11623
11624# qhasm: uint6464 xmm0 <<= 1
11625# asm 1: psllq $1,<xmm0=int6464#1
11626# asm 2: psllq $1,<xmm0=%xmm0
11627psllq $1,%xmm0
11628
11629# qhasm: xmm10 ^= xmm0
11630# asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11
11631# asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10
11632pxor %xmm0,%xmm10
11633
11634# qhasm: xmm0 = xmm11
11635# asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1
11636# asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0
11637movdqa %xmm11,%xmm0
11638
11639# qhasm: uint6464 xmm0 >>= 1
11640# asm 1: psrlq $1,<xmm0=int6464#1
11641# asm 2: psrlq $1,<xmm0=%xmm0
11642psrlq $1,%xmm0
11643
11644# qhasm: xmm0 ^= xmm15
11645# asm 1: pxor <xmm15=int6464#16,<xmm0=int6464#1
11646# asm 2: pxor <xmm15=%xmm15,<xmm0=%xmm0
11647pxor %xmm15,%xmm0
11648
11649# qhasm: xmm0 &= BS0
11650# asm 1: pand BS0,<xmm0=int6464#1
11651# asm 2: pand BS0,<xmm0=%xmm0
11652pand BS0,%xmm0
11653
11654# qhasm: xmm15 ^= xmm0
11655# asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16
11656# asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15
11657pxor %xmm0,%xmm15
11658
11659# qhasm: uint6464 xmm0 <<= 1
11660# asm 1: psllq $1,<xmm0=int6464#1
11661# asm 2: psllq $1,<xmm0=%xmm0
11662psllq $1,%xmm0
11663
11664# qhasm: xmm11 ^= xmm0
11665# asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12
11666# asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11
11667pxor %xmm0,%xmm11
11668
11669# qhasm: xmm0 = xmm12
11670# asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1
11671# asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0
11672movdqa %xmm12,%xmm0
11673
11674# qhasm: uint6464 xmm0 >>= 1
11675# asm 1: psrlq $1,<xmm0=int6464#1
11676# asm 2: psrlq $1,<xmm0=%xmm0
11677psrlq $1,%xmm0
11678
11679# qhasm: xmm0 ^= xmm14
11680# asm 1: pxor <xmm14=int6464#15,<xmm0=int6464#1
11681# asm 2: pxor <xmm14=%xmm14,<xmm0=%xmm0
11682pxor %xmm14,%xmm0
11683
11684# qhasm: xmm0 &= BS0
11685# asm 1: pand BS0,<xmm0=int6464#1
11686# asm 2: pand BS0,<xmm0=%xmm0
11687pand BS0,%xmm0
11688
11689# qhasm: xmm14 ^= xmm0
11690# asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15
11691# asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14
11692pxor %xmm0,%xmm14
11693
11694# qhasm: uint6464 xmm0 <<= 1
11695# asm 1: psllq $1,<xmm0=int6464#1
11696# asm 2: psllq $1,<xmm0=%xmm0
11697psllq $1,%xmm0
11698
11699# qhasm: xmm12 ^= xmm0
11700# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
11701# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
11702pxor %xmm0,%xmm12
11703
11704# qhasm: xmm0 = xmm8
11705# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11706# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11707movdqa %xmm8,%xmm0
11708
11709# qhasm: uint6464 xmm0 >>= 1
11710# asm 1: psrlq $1,<xmm0=int6464#1
11711# asm 2: psrlq $1,<xmm0=%xmm0
11712psrlq $1,%xmm0
11713
11714# qhasm: xmm0 ^= xmm9
11715# asm 1: pxor <xmm9=int6464#10,<xmm0=int6464#1
11716# asm 2: pxor <xmm9=%xmm9,<xmm0=%xmm0
11717pxor %xmm9,%xmm0
11718
11719# qhasm: xmm0 &= BS0
11720# asm 1: pand BS0,<xmm0=int6464#1
11721# asm 2: pand BS0,<xmm0=%xmm0
11722pand BS0,%xmm0
11723
11724# qhasm: xmm9 ^= xmm0
11725# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
11726# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
11727pxor %xmm0,%xmm9
11728
11729# qhasm: uint6464 xmm0 <<= 1
11730# asm 1: psllq $1,<xmm0=int6464#1
11731# asm 2: psllq $1,<xmm0=%xmm0
11732psllq $1,%xmm0
11733
11734# qhasm: xmm8 ^= xmm0
11735# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
11736# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
11737pxor %xmm0,%xmm8
11738
11739# qhasm: xmm0 = xmm15
11740# asm 1: movdqa <xmm15=int6464#16,>xmm0=int6464#1
11741# asm 2: movdqa <xmm15=%xmm15,>xmm0=%xmm0
11742movdqa %xmm15,%xmm0
11743
11744# qhasm: uint6464 xmm0 >>= 2
11745# asm 1: psrlq $2,<xmm0=int6464#1
11746# asm 2: psrlq $2,<xmm0=%xmm0
11747psrlq $2,%xmm0
11748
11749# qhasm: xmm0 ^= xmm13
11750# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
11751# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
11752pxor %xmm13,%xmm0
11753
11754# qhasm: xmm0 &= BS1
11755# asm 1: pand BS1,<xmm0=int6464#1
11756# asm 2: pand BS1,<xmm0=%xmm0
11757pand BS1,%xmm0
11758
11759# qhasm: xmm13 ^= xmm0
11760# asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14
11761# asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13
11762pxor %xmm0,%xmm13
11763
11764# qhasm: uint6464 xmm0 <<= 2
11765# asm 1: psllq $2,<xmm0=int6464#1
11766# asm 2: psllq $2,<xmm0=%xmm0
11767psllq $2,%xmm0
11768
11769# qhasm: xmm15 ^= xmm0
11770# asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16
11771# asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15
11772pxor %xmm0,%xmm15
11773
11774# qhasm: xmm0 = xmm11
11775# asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1
11776# asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0
11777movdqa %xmm11,%xmm0
11778
11779# qhasm: uint6464 xmm0 >>= 2
11780# asm 1: psrlq $2,<xmm0=int6464#1
11781# asm 2: psrlq $2,<xmm0=%xmm0
11782psrlq $2,%xmm0
11783
11784# qhasm: xmm0 ^= xmm10
11785# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#1
11786# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm0
11787pxor %xmm10,%xmm0
11788
11789# qhasm: xmm0 &= BS1
11790# asm 1: pand BS1,<xmm0=int6464#1
11791# asm 2: pand BS1,<xmm0=%xmm0
11792pand BS1,%xmm0
11793
11794# qhasm: xmm10 ^= xmm0
11795# asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11
11796# asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10
11797pxor %xmm0,%xmm10
11798
11799# qhasm: uint6464 xmm0 <<= 2
11800# asm 1: psllq $2,<xmm0=int6464#1
11801# asm 2: psllq $2,<xmm0=%xmm0
11802psllq $2,%xmm0
11803
11804# qhasm: xmm11 ^= xmm0
11805# asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12
11806# asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11
11807pxor %xmm0,%xmm11
11808
11809# qhasm: xmm0 = xmm9
11810# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1
11811# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0
11812movdqa %xmm9,%xmm0
11813
11814# qhasm: uint6464 xmm0 >>= 2
11815# asm 1: psrlq $2,<xmm0=int6464#1
11816# asm 2: psrlq $2,<xmm0=%xmm0
11817psrlq $2,%xmm0
11818
11819# qhasm: xmm0 ^= xmm14
11820# asm 1: pxor <xmm14=int6464#15,<xmm0=int6464#1
11821# asm 2: pxor <xmm14=%xmm14,<xmm0=%xmm0
11822pxor %xmm14,%xmm0
11823
11824# qhasm: xmm0 &= BS1
11825# asm 1: pand BS1,<xmm0=int6464#1
11826# asm 2: pand BS1,<xmm0=%xmm0
11827pand BS1,%xmm0
11828
11829# qhasm: xmm14 ^= xmm0
11830# asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15
11831# asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14
11832pxor %xmm0,%xmm14
11833
11834# qhasm: uint6464 xmm0 <<= 2
11835# asm 1: psllq $2,<xmm0=int6464#1
11836# asm 2: psllq $2,<xmm0=%xmm0
11837psllq $2,%xmm0
11838
11839# qhasm: xmm9 ^= xmm0
11840# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
11841# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
11842pxor %xmm0,%xmm9
11843
11844# qhasm: xmm0 = xmm8
11845# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11846# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11847movdqa %xmm8,%xmm0
11848
11849# qhasm: uint6464 xmm0 >>= 2
11850# asm 1: psrlq $2,<xmm0=int6464#1
11851# asm 2: psrlq $2,<xmm0=%xmm0
11852psrlq $2,%xmm0
11853
11854# qhasm: xmm0 ^= xmm12
11855# asm 1: pxor <xmm12=int6464#13,<xmm0=int6464#1
11856# asm 2: pxor <xmm12=%xmm12,<xmm0=%xmm0
11857pxor %xmm12,%xmm0
11858
11859# qhasm: xmm0 &= BS1
11860# asm 1: pand BS1,<xmm0=int6464#1
11861# asm 2: pand BS1,<xmm0=%xmm0
11862pand BS1,%xmm0
11863
11864# qhasm: xmm12 ^= xmm0
11865# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
11866# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
11867pxor %xmm0,%xmm12
11868
11869# qhasm: uint6464 xmm0 <<= 2
11870# asm 1: psllq $2,<xmm0=int6464#1
11871# asm 2: psllq $2,<xmm0=%xmm0
11872psllq $2,%xmm0
11873
11874# qhasm: xmm8 ^= xmm0
11875# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
11876# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
11877pxor %xmm0,%xmm8
11878
11879# qhasm: xmm0 = xmm14
11880# asm 1: movdqa <xmm14=int6464#15,>xmm0=int6464#1
11881# asm 2: movdqa <xmm14=%xmm14,>xmm0=%xmm0
11882movdqa %xmm14,%xmm0
11883
11884# qhasm: uint6464 xmm0 >>= 4
11885# asm 1: psrlq $4,<xmm0=int6464#1
11886# asm 2: psrlq $4,<xmm0=%xmm0
11887psrlq $4,%xmm0
11888
11889# qhasm: xmm0 ^= xmm13
11890# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
11891# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
11892pxor %xmm13,%xmm0
11893
11894# qhasm: xmm0 &= BS2
11895# asm 1: pand BS2,<xmm0=int6464#1
11896# asm 2: pand BS2,<xmm0=%xmm0
11897pand BS2,%xmm0
11898
11899# qhasm: xmm13 ^= xmm0
11900# asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14
11901# asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13
11902pxor %xmm0,%xmm13
11903
11904# qhasm: uint6464 xmm0 <<= 4
11905# asm 1: psllq $4,<xmm0=int6464#1
11906# asm 2: psllq $4,<xmm0=%xmm0
11907psllq $4,%xmm0
11908
11909# qhasm: xmm14 ^= xmm0
11910# asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15
11911# asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14
11912pxor %xmm0,%xmm14
11913
11914# qhasm: xmm0 = xmm12
11915# asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1
11916# asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0
11917movdqa %xmm12,%xmm0
11918
11919# qhasm: uint6464 xmm0 >>= 4
11920# asm 1: psrlq $4,<xmm0=int6464#1
11921# asm 2: psrlq $4,<xmm0=%xmm0
11922psrlq $4,%xmm0
11923
11924# qhasm: xmm0 ^= xmm10
11925# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#1
11926# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm0
11927pxor %xmm10,%xmm0
11928
11929# qhasm: xmm0 &= BS2
11930# asm 1: pand BS2,<xmm0=int6464#1
11931# asm 2: pand BS2,<xmm0=%xmm0
11932pand BS2,%xmm0
11933
11934# qhasm: xmm10 ^= xmm0
11935# asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11
11936# asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10
11937pxor %xmm0,%xmm10
11938
11939# qhasm: uint6464 xmm0 <<= 4
11940# asm 1: psllq $4,<xmm0=int6464#1
11941# asm 2: psllq $4,<xmm0=%xmm0
11942psllq $4,%xmm0
11943
11944# qhasm: xmm12 ^= xmm0
11945# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
11946# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
11947pxor %xmm0,%xmm12
11948
11949# qhasm: xmm0 = xmm9
11950# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1
11951# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0
11952movdqa %xmm9,%xmm0
11953
11954# qhasm: uint6464 xmm0 >>= 4
11955# asm 1: psrlq $4,<xmm0=int6464#1
11956# asm 2: psrlq $4,<xmm0=%xmm0
11957psrlq $4,%xmm0
11958
11959# qhasm: xmm0 ^= xmm15
11960# asm 1: pxor <xmm15=int6464#16,<xmm0=int6464#1
11961# asm 2: pxor <xmm15=%xmm15,<xmm0=%xmm0
11962pxor %xmm15,%xmm0
11963
11964# qhasm: xmm0 &= BS2
11965# asm 1: pand BS2,<xmm0=int6464#1
11966# asm 2: pand BS2,<xmm0=%xmm0
11967pand BS2,%xmm0
11968
11969# qhasm: xmm15 ^= xmm0
11970# asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16
11971# asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15
11972pxor %xmm0,%xmm15
11973
11974# qhasm: uint6464 xmm0 <<= 4
11975# asm 1: psllq $4,<xmm0=int6464#1
11976# asm 2: psllq $4,<xmm0=%xmm0
11977psllq $4,%xmm0
11978
11979# qhasm: xmm9 ^= xmm0
11980# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
11981# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
11982pxor %xmm0,%xmm9
11983
11984# qhasm: xmm0 = xmm8
11985# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11986# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11987movdqa %xmm8,%xmm0
11988
11989# qhasm: uint6464 xmm0 >>= 4
11990# asm 1: psrlq $4,<xmm0=int6464#1
11991# asm 2: psrlq $4,<xmm0=%xmm0
11992psrlq $4,%xmm0
11993
11994# qhasm: xmm0 ^= xmm11
11995# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#1
11996# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm0
11997pxor %xmm11,%xmm0
11998
11999# qhasm: xmm0 &= BS2
12000# asm 1: pand BS2,<xmm0=int6464#1
12001# asm 2: pand BS2,<xmm0=%xmm0
12002pand BS2,%xmm0
12003
12004# qhasm: xmm11 ^= xmm0
12005# asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12
12006# asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11
12007pxor %xmm0,%xmm11
12008
12009# qhasm: uint6464 xmm0 <<= 4
12010# asm 1: psllq $4,<xmm0=int6464#1
12011# asm 2: psllq $4,<xmm0=%xmm0
12012psllq $4,%xmm0
12013
12014# qhasm: xmm8 ^= xmm0
12015# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
12016# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
12017pxor %xmm0,%xmm8
12018
12019# qhasm: unsigned<? =? len-128
12020# asm 1: cmp $128,<len=int64#3
12021# asm 2: cmp $128,<len=%rdx
12022cmp $128,%rdx
12023# comment:fp stack unchanged by jump
12024
12025# qhasm: goto partial if unsigned<
12026jb ._partial
12027# comment:fp stack unchanged by jump
12028
12029# qhasm: goto full if =
12030je ._full
12031
12032# qhasm: tmp = *(uint32 *)(np + 12)
12033# asm 1: movl 12(<np=int64#4),>tmp=int64#6d
12034# asm 2: movl 12(<np=%rcx),>tmp=%r9d
12035movl 12(%rcx),%r9d
12036
12037# qhasm: (uint32) bswap tmp
12038# asm 1: bswap <tmp=int64#6d
12039# asm 2: bswap <tmp=%r9d
12040bswap %r9d
12041
12042# qhasm: tmp += 8
12043# asm 1: add $8,<tmp=int64#6
12044# asm 2: add $8,<tmp=%r9
12045add $8,%r9
12046
12047# qhasm: (uint32) bswap tmp
12048# asm 1: bswap <tmp=int64#6d
12049# asm 2: bswap <tmp=%r9d
12050bswap %r9d
12051
12052# qhasm: *(uint32 *)(np + 12) = tmp
12053# asm 1: movl <tmp=int64#6d,12(<np=int64#4)
12054# asm 2: movl <tmp=%r9d,12(<np=%rcx)
12055movl %r9d,12(%rcx)
12056
12057# qhasm: xmm8 ^= *(int128 *)(inp + 0)
12058# asm 1: pxor 0(<inp=int64#2),<xmm8=int6464#9
12059# asm 2: pxor 0(<inp=%rsi),<xmm8=%xmm8
12060pxor 0(%rsi),%xmm8
12061
12062# qhasm: xmm9 ^= *(int128 *)(inp + 16)
12063# asm 1: pxor 16(<inp=int64#2),<xmm9=int6464#10
12064# asm 2: pxor 16(<inp=%rsi),<xmm9=%xmm9
12065pxor 16(%rsi),%xmm9
12066
12067# qhasm: xmm12 ^= *(int128 *)(inp + 32)
12068# asm 1: pxor 32(<inp=int64#2),<xmm12=int6464#13
12069# asm 2: pxor 32(<inp=%rsi),<xmm12=%xmm12
12070pxor 32(%rsi),%xmm12
12071
12072# qhasm: xmm14 ^= *(int128 *)(inp + 48)
12073# asm 1: pxor 48(<inp=int64#2),<xmm14=int6464#15
12074# asm 2: pxor 48(<inp=%rsi),<xmm14=%xmm14
12075pxor 48(%rsi),%xmm14
12076
12077# qhasm: xmm11 ^= *(int128 *)(inp + 64)
12078# asm 1: pxor 64(<inp=int64#2),<xmm11=int6464#12
12079# asm 2: pxor 64(<inp=%rsi),<xmm11=%xmm11
12080pxor 64(%rsi),%xmm11
12081
12082# qhasm: xmm15 ^= *(int128 *)(inp + 80)
12083# asm 1: pxor 80(<inp=int64#2),<xmm15=int6464#16
12084# asm 2: pxor 80(<inp=%rsi),<xmm15=%xmm15
12085pxor 80(%rsi),%xmm15
12086
12087# qhasm: xmm10 ^= *(int128 *)(inp + 96)
12088# asm 1: pxor 96(<inp=int64#2),<xmm10=int6464#11
12089# asm 2: pxor 96(<inp=%rsi),<xmm10=%xmm10
12090pxor 96(%rsi),%xmm10
12091
12092# qhasm: xmm13 ^= *(int128 *)(inp + 112)
12093# asm 1: pxor 112(<inp=int64#2),<xmm13=int6464#14
12094# asm 2: pxor 112(<inp=%rsi),<xmm13=%xmm13
12095pxor 112(%rsi),%xmm13
12096
12097# qhasm: *(int128 *) (outp + 0) = xmm8
12098# asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1)
12099# asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi)
12100movdqa %xmm8,0(%rdi)
12101
12102# qhasm: *(int128 *) (outp + 16) = xmm9
12103# asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1)
12104# asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi)
12105movdqa %xmm9,16(%rdi)
12106
12107# qhasm: *(int128 *) (outp + 32) = xmm12
12108# asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1)
12109# asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi)
12110movdqa %xmm12,32(%rdi)
12111
12112# qhasm: *(int128 *) (outp + 48) = xmm14
12113# asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1)
12114# asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi)
12115movdqa %xmm14,48(%rdi)
12116
12117# qhasm: *(int128 *) (outp + 64) = xmm11
12118# asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1)
12119# asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi)
12120movdqa %xmm11,64(%rdi)
12121
12122# qhasm: *(int128 *) (outp + 80) = xmm15
12123# asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1)
12124# asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi)
12125movdqa %xmm15,80(%rdi)
12126
12127# qhasm: *(int128 *) (outp + 96) = xmm10
12128# asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1)
12129# asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi)
12130movdqa %xmm10,96(%rdi)
12131
12132# qhasm: *(int128 *) (outp + 112) = xmm13
12133# asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1)
12134# asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi)
12135movdqa %xmm13,112(%rdi)
12136
12137# qhasm: len -= 128
12138# asm 1: sub $128,<len=int64#3
12139# asm 2: sub $128,<len=%rdx
12140sub $128,%rdx
12141
12142# qhasm: inp += 128
12143# asm 1: add $128,<inp=int64#2
12144# asm 2: add $128,<inp=%rsi
12145add $128,%rsi
12146
12147# qhasm: outp += 128
12148# asm 1: add $128,<outp=int64#1
12149# asm 2: add $128,<outp=%rdi
12150add $128,%rdi
12151# comment:fp stack unchanged by jump
12152
12153# qhasm: goto enc_block
12154jmp ._enc_block
12155
12156# qhasm: partial:
12157._partial:
12158
12159# qhasm: lensav = len
12160# asm 1: mov <len=int64#3,>lensav=int64#5
12161# asm 2: mov <len=%rdx,>lensav=%r8
12162mov %rdx,%r8
12163
12164# qhasm: (uint32) len >>= 4
12165# asm 1: shr $4,<len=int64#3d
12166# asm 2: shr $4,<len=%edx
12167shr $4,%edx
12168
12169# qhasm: tmp = *(uint32 *)(np + 12)
12170# asm 1: movl 12(<np=int64#4),>tmp=int64#6d
12171# asm 2: movl 12(<np=%rcx),>tmp=%r9d
12172movl 12(%rcx),%r9d
12173
12174# qhasm: (uint32) bswap tmp
12175# asm 1: bswap <tmp=int64#6d
12176# asm 2: bswap <tmp=%r9d
12177bswap %r9d
12178
12179# qhasm: tmp += len
12180# asm 1: add <len=int64#3,<tmp=int64#6
12181# asm 2: add <len=%rdx,<tmp=%r9
12182add %rdx,%r9
12183
12184# qhasm: (uint32) bswap tmp
12185# asm 1: bswap <tmp=int64#6d
12186# asm 2: bswap <tmp=%r9d
12187bswap %r9d
12188
12189# qhasm: *(uint32 *)(np + 12) = tmp
12190# asm 1: movl <tmp=int64#6d,12(<np=int64#4)
12191# asm 2: movl <tmp=%r9d,12(<np=%rcx)
12192movl %r9d,12(%rcx)
12193
12194# qhasm: blp = &bl
12195# asm 1: leaq <bl=stack1024#1,>blp=int64#3
12196# asm 2: leaq <bl=32(%rsp),>blp=%rdx
12197leaq 32(%rsp),%rdx
12198
12199# qhasm: *(int128 *)(blp + 0) = xmm8
12200# asm 1: movdqa <xmm8=int6464#9,0(<blp=int64#3)
12201# asm 2: movdqa <xmm8=%xmm8,0(<blp=%rdx)
12202movdqa %xmm8,0(%rdx)
12203
12204# qhasm: *(int128 *)(blp + 16) = xmm9
12205# asm 1: movdqa <xmm9=int6464#10,16(<blp=int64#3)
12206# asm 2: movdqa <xmm9=%xmm9,16(<blp=%rdx)
12207movdqa %xmm9,16(%rdx)
12208
12209# qhasm: *(int128 *)(blp + 32) = xmm12
12210# asm 1: movdqa <xmm12=int6464#13,32(<blp=int64#3)
12211# asm 2: movdqa <xmm12=%xmm12,32(<blp=%rdx)
12212movdqa %xmm12,32(%rdx)
12213
12214# qhasm: *(int128 *)(blp + 48) = xmm14
12215# asm 1: movdqa <xmm14=int6464#15,48(<blp=int64#3)
12216# asm 2: movdqa <xmm14=%xmm14,48(<blp=%rdx)
12217movdqa %xmm14,48(%rdx)
12218
12219# qhasm: *(int128 *)(blp + 64) = xmm11
12220# asm 1: movdqa <xmm11=int6464#12,64(<blp=int64#3)
12221# asm 2: movdqa <xmm11=%xmm11,64(<blp=%rdx)
12222movdqa %xmm11,64(%rdx)
12223
12224# qhasm: *(int128 *)(blp + 80) = xmm15
12225# asm 1: movdqa <xmm15=int6464#16,80(<blp=int64#3)
12226# asm 2: movdqa <xmm15=%xmm15,80(<blp=%rdx)
12227movdqa %xmm15,80(%rdx)
12228
12229# qhasm: *(int128 *)(blp + 96) = xmm10
12230# asm 1: movdqa <xmm10=int6464#11,96(<blp=int64#3)
12231# asm 2: movdqa <xmm10=%xmm10,96(<blp=%rdx)
12232movdqa %xmm10,96(%rdx)
12233
12234# qhasm: *(int128 *)(blp + 112) = xmm13
12235# asm 1: movdqa <xmm13=int6464#14,112(<blp=int64#3)
12236# asm 2: movdqa <xmm13=%xmm13,112(<blp=%rdx)
12237movdqa %xmm13,112(%rdx)
12238
12239# qhasm: bytes:
12240._bytes:
12241
12242# qhasm: =? lensav-0
12243# asm 1: cmp $0,<lensav=int64#5
12244# asm 2: cmp $0,<lensav=%r8
12245cmp $0,%r8
12246# comment:fp stack unchanged by jump
12247
12248# qhasm: goto end if =
12249je ._end
12250
12251# qhasm: b = *(uint8 *)(blp + 0)
12252# asm 1: movzbq 0(<blp=int64#3),>b=int64#4
12253# asm 2: movzbq 0(<blp=%rdx),>b=%rcx
12254movzbq 0(%rdx),%rcx
12255
12256# qhasm: (uint8) b ^= *(uint8 *)(inp + 0)
12257# asm 1: xorb 0(<inp=int64#2),<b=int64#4b
12258# asm 2: xorb 0(<inp=%rsi),<b=%cl
12259xorb 0(%rsi),%cl
12260
12261# qhasm: *(uint8 *)(outp + 0) = b
12262# asm 1: movb <b=int64#4b,0(<outp=int64#1)
12263# asm 2: movb <b=%cl,0(<outp=%rdi)
12264movb %cl,0(%rdi)
12265
12266# qhasm: blp += 1
12267# asm 1: add $1,<blp=int64#3
12268# asm 2: add $1,<blp=%rdx
12269add $1,%rdx
12270
12271# qhasm: inp +=1
12272# asm 1: add $1,<inp=int64#2
12273# asm 2: add $1,<inp=%rsi
12274add $1,%rsi
12275
12276# qhasm: outp +=1
12277# asm 1: add $1,<outp=int64#1
12278# asm 2: add $1,<outp=%rdi
12279add $1,%rdi
12280
12281# qhasm: lensav -= 1
12282# asm 1: sub $1,<lensav=int64#5
12283# asm 2: sub $1,<lensav=%r8
12284sub $1,%r8
12285# comment:fp stack unchanged by jump
12286
12287# qhasm: goto bytes
12288jmp ._bytes
12289
12290# qhasm: full:
12291._full:
12292
12293# qhasm: tmp = *(uint32 *)(np + 12)
12294# asm 1: movl 12(<np=int64#4),>tmp=int64#3d
12295# asm 2: movl 12(<np=%rcx),>tmp=%edx
12296movl 12(%rcx),%edx
12297
12298# qhasm: (uint32) bswap tmp
12299# asm 1: bswap <tmp=int64#3d
12300# asm 2: bswap <tmp=%edx
12301bswap %edx
12302
12303# qhasm: tmp += 8
12304# asm 1: add $8,<tmp=int64#3
12305# asm 2: add $8,<tmp=%rdx
12306add $8,%rdx
12307
12308# qhasm: (uint32) bswap tmp
12309# asm 1: bswap <tmp=int64#3d
12310# asm 2: bswap <tmp=%edx
12311bswap %edx
12312
12313# qhasm: *(uint32 *)(np + 12) = tmp
12314# asm 1: movl <tmp=int64#3d,12(<np=int64#4)
12315# asm 2: movl <tmp=%edx,12(<np=%rcx)
12316movl %edx,12(%rcx)
12317
12318# qhasm: xmm8 ^= *(int128 *)(inp + 0)
12319# asm 1: pxor 0(<inp=int64#2),<xmm8=int6464#9
12320# asm 2: pxor 0(<inp=%rsi),<xmm8=%xmm8
12321pxor 0(%rsi),%xmm8
12322
12323# qhasm: xmm9 ^= *(int128 *)(inp + 16)
12324# asm 1: pxor 16(<inp=int64#2),<xmm9=int6464#10
12325# asm 2: pxor 16(<inp=%rsi),<xmm9=%xmm9
12326pxor 16(%rsi),%xmm9
12327
12328# qhasm: xmm12 ^= *(int128 *)(inp + 32)
12329# asm 1: pxor 32(<inp=int64#2),<xmm12=int6464#13
12330# asm 2: pxor 32(<inp=%rsi),<xmm12=%xmm12
12331pxor 32(%rsi),%xmm12
12332
12333# qhasm: xmm14 ^= *(int128 *)(inp + 48)
12334# asm 1: pxor 48(<inp=int64#2),<xmm14=int6464#15
12335# asm 2: pxor 48(<inp=%rsi),<xmm14=%xmm14
12336pxor 48(%rsi),%xmm14
12337
12338# qhasm: xmm11 ^= *(int128 *)(inp + 64)
12339# asm 1: pxor 64(<inp=int64#2),<xmm11=int6464#12
12340# asm 2: pxor 64(<inp=%rsi),<xmm11=%xmm11
12341pxor 64(%rsi),%xmm11
12342
12343# qhasm: xmm15 ^= *(int128 *)(inp + 80)
12344# asm 1: pxor 80(<inp=int64#2),<xmm15=int6464#16
12345# asm 2: pxor 80(<inp=%rsi),<xmm15=%xmm15
12346pxor 80(%rsi),%xmm15
12347
12348# qhasm: xmm10 ^= *(int128 *)(inp + 96)
12349# asm 1: pxor 96(<inp=int64#2),<xmm10=int6464#11
12350# asm 2: pxor 96(<inp=%rsi),<xmm10=%xmm10
12351pxor 96(%rsi),%xmm10
12352
12353# qhasm: xmm13 ^= *(int128 *)(inp + 112)
12354# asm 1: pxor 112(<inp=int64#2),<xmm13=int6464#14
12355# asm 2: pxor 112(<inp=%rsi),<xmm13=%xmm13
12356pxor 112(%rsi),%xmm13
12357
12358# qhasm: *(int128 *) (outp + 0) = xmm8
12359# asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1)
12360# asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi)
12361movdqa %xmm8,0(%rdi)
12362
12363# qhasm: *(int128 *) (outp + 16) = xmm9
12364# asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1)
12365# asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi)
12366movdqa %xmm9,16(%rdi)
12367
12368# qhasm: *(int128 *) (outp + 32) = xmm12
12369# asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1)
12370# asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi)
12371movdqa %xmm12,32(%rdi)
12372
12373# qhasm: *(int128 *) (outp + 48) = xmm14
12374# asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1)
12375# asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi)
12376movdqa %xmm14,48(%rdi)
12377
12378# qhasm: *(int128 *) (outp + 64) = xmm11
12379# asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1)
12380# asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi)
12381movdqa %xmm11,64(%rdi)
12382
12383# qhasm: *(int128 *) (outp + 80) = xmm15
12384# asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1)
12385# asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi)
12386movdqa %xmm15,80(%rdi)
12387
12388# qhasm: *(int128 *) (outp + 96) = xmm10
12389# asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1)
12390# asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi)
12391movdqa %xmm10,96(%rdi)
12392
12393# qhasm: *(int128 *) (outp + 112) = xmm13
12394# asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1)
12395# asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi)
12396movdqa %xmm13,112(%rdi)
12397# comment:fp stack unchanged by fallthrough
12398
12399# qhasm: end:
12400._end:
12401
12402# qhasm: leave
12403add %r11,%rsp
12404mov %rdi,%rax
12405mov %rsi,%rdx
12406xor %rax,%rax
12407ret