summaryrefslogtreecommitdiff
path: root/nacl/crypto_stream/aes128ctr/core2/beforenm.s
diff options
context:
space:
mode:
Diffstat (limited to 'nacl/crypto_stream/aes128ctr/core2/beforenm.s')
-rw-r--r--nacl/crypto_stream/aes128ctr/core2/beforenm.s13694
1 files changed, 13694 insertions, 0 deletions
diff --git a/nacl/crypto_stream/aes128ctr/core2/beforenm.s b/nacl/crypto_stream/aes128ctr/core2/beforenm.s
new file mode 100644
index 00000000..689ad8c3
--- /dev/null
+++ b/nacl/crypto_stream/aes128ctr/core2/beforenm.s
@@ -0,0 +1,13694 @@
1# Author: Emilia Käsper and Peter Schwabe
2# Date: 2009-03-19
3# +2010.01.31: minor namespace modifications
4# Public domain
5
6.data
7.p2align 6
8
9RCON: .int 0x00000000, 0x00000000, 0x00000000, 0xffffffff
10ROTB: .int 0x0c000000, 0x00000000, 0x04000000, 0x08000000
11EXPB0: .int 0x03030303, 0x07070707, 0x0b0b0b0b, 0x0f0f0f0f
12CTRINC1: .int 0x00000001, 0x00000000, 0x00000000, 0x00000000
13CTRINC2: .int 0x00000002, 0x00000000, 0x00000000, 0x00000000
14CTRINC3: .int 0x00000003, 0x00000000, 0x00000000, 0x00000000
15CTRINC4: .int 0x00000004, 0x00000000, 0x00000000, 0x00000000
16CTRINC5: .int 0x00000005, 0x00000000, 0x00000000, 0x00000000
17CTRINC6: .int 0x00000006, 0x00000000, 0x00000000, 0x00000000
18CTRINC7: .int 0x00000007, 0x00000000, 0x00000000, 0x00000000
19RCTRINC1: .int 0x00000000, 0x00000000, 0x00000000, 0x00000001
20RCTRINC2: .int 0x00000000, 0x00000000, 0x00000000, 0x00000002
21RCTRINC3: .int 0x00000000, 0x00000000, 0x00000000, 0x00000003
22RCTRINC4: .int 0x00000000, 0x00000000, 0x00000000, 0x00000004
23RCTRINC5: .int 0x00000000, 0x00000000, 0x00000000, 0x00000005
24RCTRINC6: .int 0x00000000, 0x00000000, 0x00000000, 0x00000006
25RCTRINC7: .int 0x00000000, 0x00000000, 0x00000000, 0x00000007
26
27SWAP32: .int 0x00010203, 0x04050607, 0x08090a0b, 0x0c0d0e0f
28M0SWAP: .quad 0x0105090d0004080c , 0x03070b0f02060a0e
29
30BS0: .quad 0x5555555555555555, 0x5555555555555555
31BS1: .quad 0x3333333333333333, 0x3333333333333333
32BS2: .quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f
33ONE: .quad 0xffffffffffffffff, 0xffffffffffffffff
34M0: .quad 0x02060a0e03070b0f, 0x0004080c0105090d
35SRM0: .quad 0x0304090e00050a0f, 0x01060b0c0207080d
36SR: .quad 0x0504070600030201, 0x0f0e0d0c0a09080b
37
38# qhasm: int64 arg1
39
40# qhasm: int64 arg2
41
42# qhasm: input arg1
43
44# qhasm: input arg2
45
46# qhasm: int64 r11_caller
47
48# qhasm: int64 r12_caller
49
50# qhasm: int64 r13_caller
51
52# qhasm: int64 r14_caller
53
54# qhasm: int64 r15_caller
55
56# qhasm: int64 rbx_caller
57
58# qhasm: int64 rbp_caller
59
60# qhasm: caller r11_caller
61
62# qhasm: caller r12_caller
63
64# qhasm: caller r13_caller
65
66# qhasm: caller r14_caller
67
68# qhasm: caller r15_caller
69
70# qhasm: caller rbx_caller
71
72# qhasm: caller rbp_caller
73
74# qhasm: int64 sboxp
75
76# qhasm: int64 c
77
78# qhasm: int64 k
79
80# qhasm: int64 x0
81
82# qhasm: int64 x1
83
84# qhasm: int64 x2
85
86# qhasm: int64 x3
87
88# qhasm: int64 e
89
90# qhasm: int64 q0
91
92# qhasm: int64 q1
93
94# qhasm: int64 q2
95
96# qhasm: int64 q3
97
98# qhasm: int6464 xmm0
99
100# qhasm: int6464 xmm1
101
102# qhasm: int6464 xmm2
103
104# qhasm: int6464 xmm3
105
106# qhasm: int6464 xmm4
107
108# qhasm: int6464 xmm5
109
110# qhasm: int6464 xmm6
111
112# qhasm: int6464 xmm7
113
114# qhasm: int6464 xmm8
115
116# qhasm: int6464 xmm9
117
118# qhasm: int6464 xmm10
119
120# qhasm: int6464 xmm11
121
122# qhasm: int6464 xmm12
123
124# qhasm: int6464 xmm13
125
126# qhasm: int6464 xmm14
127
128# qhasm: int6464 xmm15
129
130# qhasm: int6464 t
131
132# qhasm: enter crypto_stream_aes128ctr_core2_beforenm
133.text
134.p2align 5
135.globl _crypto_stream_aes128ctr_core2_beforenm
136.globl crypto_stream_aes128ctr_core2_beforenm
137_crypto_stream_aes128ctr_core2_beforenm:
138crypto_stream_aes128ctr_core2_beforenm:
139mov %rsp,%r11
140and $31,%r11
141add $0,%r11
142sub %r11,%rsp
143
144# qhasm: c = arg1
145# asm 1: mov <arg1=int64#1,>c=int64#1
146# asm 2: mov <arg1=%rdi,>c=%rdi
147mov %rdi,%rdi
148
149# qhasm: k = arg2
150# asm 1: mov <arg2=int64#2,>k=int64#2
151# asm 2: mov <arg2=%rsi,>k=%rsi
152mov %rsi,%rsi
153
154# qhasm: xmm0 = *(int128 *) (k + 0)
155# asm 1: movdqa 0(<k=int64#2),>xmm0=int6464#1
156# asm 2: movdqa 0(<k=%rsi),>xmm0=%xmm0
157movdqa 0(%rsi),%xmm0
158
159# qhasm: shuffle bytes of xmm0 by M0
160# asm 1: pshufb M0,<xmm0=int6464#1
161# asm 2: pshufb M0,<xmm0=%xmm0
162pshufb M0,%xmm0
163
164# qhasm: xmm1 = xmm0
165# asm 1: movdqa <xmm0=int6464#1,>xmm1=int6464#2
166# asm 2: movdqa <xmm0=%xmm0,>xmm1=%xmm1
167movdqa %xmm0,%xmm1
168
169# qhasm: xmm2 = xmm0
170# asm 1: movdqa <xmm0=int6464#1,>xmm2=int6464#3
171# asm 2: movdqa <xmm0=%xmm0,>xmm2=%xmm2
172movdqa %xmm0,%xmm2
173
174# qhasm: xmm3 = xmm0
175# asm 1: movdqa <xmm0=int6464#1,>xmm3=int6464#4
176# asm 2: movdqa <xmm0=%xmm0,>xmm3=%xmm3
177movdqa %xmm0,%xmm3
178
179# qhasm: xmm4 = xmm0
180# asm 1: movdqa <xmm0=int6464#1,>xmm4=int6464#5
181# asm 2: movdqa <xmm0=%xmm0,>xmm4=%xmm4
182movdqa %xmm0,%xmm4
183
184# qhasm: xmm5 = xmm0
185# asm 1: movdqa <xmm0=int6464#1,>xmm5=int6464#6
186# asm 2: movdqa <xmm0=%xmm0,>xmm5=%xmm5
187movdqa %xmm0,%xmm5
188
189# qhasm: xmm6 = xmm0
190# asm 1: movdqa <xmm0=int6464#1,>xmm6=int6464#7
191# asm 2: movdqa <xmm0=%xmm0,>xmm6=%xmm6
192movdqa %xmm0,%xmm6
193
194# qhasm: xmm7 = xmm0
195# asm 1: movdqa <xmm0=int6464#1,>xmm7=int6464#8
196# asm 2: movdqa <xmm0=%xmm0,>xmm7=%xmm7
197movdqa %xmm0,%xmm7
198
199# qhasm: t = xmm6
200# asm 1: movdqa <xmm6=int6464#7,>t=int6464#9
201# asm 2: movdqa <xmm6=%xmm6,>t=%xmm8
202movdqa %xmm6,%xmm8
203
204# qhasm: uint6464 t >>= 1
205# asm 1: psrlq $1,<t=int6464#9
206# asm 2: psrlq $1,<t=%xmm8
207psrlq $1,%xmm8
208
209# qhasm: t ^= xmm7
210# asm 1: pxor <xmm7=int6464#8,<t=int6464#9
211# asm 2: pxor <xmm7=%xmm7,<t=%xmm8
212pxor %xmm7,%xmm8
213
214# qhasm: t &= BS0
215# asm 1: pand BS0,<t=int6464#9
216# asm 2: pand BS0,<t=%xmm8
217pand BS0,%xmm8
218
219# qhasm: xmm7 ^= t
220# asm 1: pxor <t=int6464#9,<xmm7=int6464#8
221# asm 2: pxor <t=%xmm8,<xmm7=%xmm7
222pxor %xmm8,%xmm7
223
224# qhasm: uint6464 t <<= 1
225# asm 1: psllq $1,<t=int6464#9
226# asm 2: psllq $1,<t=%xmm8
227psllq $1,%xmm8
228
229# qhasm: xmm6 ^= t
230# asm 1: pxor <t=int6464#9,<xmm6=int6464#7
231# asm 2: pxor <t=%xmm8,<xmm6=%xmm6
232pxor %xmm8,%xmm6
233
234# qhasm: t = xmm4
235# asm 1: movdqa <xmm4=int6464#5,>t=int6464#9
236# asm 2: movdqa <xmm4=%xmm4,>t=%xmm8
237movdqa %xmm4,%xmm8
238
239# qhasm: uint6464 t >>= 1
240# asm 1: psrlq $1,<t=int6464#9
241# asm 2: psrlq $1,<t=%xmm8
242psrlq $1,%xmm8
243
244# qhasm: t ^= xmm5
245# asm 1: pxor <xmm5=int6464#6,<t=int6464#9
246# asm 2: pxor <xmm5=%xmm5,<t=%xmm8
247pxor %xmm5,%xmm8
248
249# qhasm: t &= BS0
250# asm 1: pand BS0,<t=int6464#9
251# asm 2: pand BS0,<t=%xmm8
252pand BS0,%xmm8
253
254# qhasm: xmm5 ^= t
255# asm 1: pxor <t=int6464#9,<xmm5=int6464#6
256# asm 2: pxor <t=%xmm8,<xmm5=%xmm5
257pxor %xmm8,%xmm5
258
259# qhasm: uint6464 t <<= 1
260# asm 1: psllq $1,<t=int6464#9
261# asm 2: psllq $1,<t=%xmm8
262psllq $1,%xmm8
263
264# qhasm: xmm4 ^= t
265# asm 1: pxor <t=int6464#9,<xmm4=int6464#5
266# asm 2: pxor <t=%xmm8,<xmm4=%xmm4
267pxor %xmm8,%xmm4
268
269# qhasm: t = xmm2
270# asm 1: movdqa <xmm2=int6464#3,>t=int6464#9
271# asm 2: movdqa <xmm2=%xmm2,>t=%xmm8
272movdqa %xmm2,%xmm8
273
274# qhasm: uint6464 t >>= 1
275# asm 1: psrlq $1,<t=int6464#9
276# asm 2: psrlq $1,<t=%xmm8
277psrlq $1,%xmm8
278
279# qhasm: t ^= xmm3
280# asm 1: pxor <xmm3=int6464#4,<t=int6464#9
281# asm 2: pxor <xmm3=%xmm3,<t=%xmm8
282pxor %xmm3,%xmm8
283
284# qhasm: t &= BS0
285# asm 1: pand BS0,<t=int6464#9
286# asm 2: pand BS0,<t=%xmm8
287pand BS0,%xmm8
288
289# qhasm: xmm3 ^= t
290# asm 1: pxor <t=int6464#9,<xmm3=int6464#4
291# asm 2: pxor <t=%xmm8,<xmm3=%xmm3
292pxor %xmm8,%xmm3
293
294# qhasm: uint6464 t <<= 1
295# asm 1: psllq $1,<t=int6464#9
296# asm 2: psllq $1,<t=%xmm8
297psllq $1,%xmm8
298
299# qhasm: xmm2 ^= t
300# asm 1: pxor <t=int6464#9,<xmm2=int6464#3
301# asm 2: pxor <t=%xmm8,<xmm2=%xmm2
302pxor %xmm8,%xmm2
303
304# qhasm: t = xmm0
305# asm 1: movdqa <xmm0=int6464#1,>t=int6464#9
306# asm 2: movdqa <xmm0=%xmm0,>t=%xmm8
307movdqa %xmm0,%xmm8
308
309# qhasm: uint6464 t >>= 1
310# asm 1: psrlq $1,<t=int6464#9
311# asm 2: psrlq $1,<t=%xmm8
312psrlq $1,%xmm8
313
314# qhasm: t ^= xmm1
315# asm 1: pxor <xmm1=int6464#2,<t=int6464#9
316# asm 2: pxor <xmm1=%xmm1,<t=%xmm8
317pxor %xmm1,%xmm8
318
319# qhasm: t &= BS0
320# asm 1: pand BS0,<t=int6464#9
321# asm 2: pand BS0,<t=%xmm8
322pand BS0,%xmm8
323
324# qhasm: xmm1 ^= t
325# asm 1: pxor <t=int6464#9,<xmm1=int6464#2
326# asm 2: pxor <t=%xmm8,<xmm1=%xmm1
327pxor %xmm8,%xmm1
328
329# qhasm: uint6464 t <<= 1
330# asm 1: psllq $1,<t=int6464#9
331# asm 2: psllq $1,<t=%xmm8
332psllq $1,%xmm8
333
334# qhasm: xmm0 ^= t
335# asm 1: pxor <t=int6464#9,<xmm0=int6464#1
336# asm 2: pxor <t=%xmm8,<xmm0=%xmm0
337pxor %xmm8,%xmm0
338
339# qhasm: t = xmm5
340# asm 1: movdqa <xmm5=int6464#6,>t=int6464#9
341# asm 2: movdqa <xmm5=%xmm5,>t=%xmm8
342movdqa %xmm5,%xmm8
343
344# qhasm: uint6464 t >>= 2
345# asm 1: psrlq $2,<t=int6464#9
346# asm 2: psrlq $2,<t=%xmm8
347psrlq $2,%xmm8
348
349# qhasm: t ^= xmm7
350# asm 1: pxor <xmm7=int6464#8,<t=int6464#9
351# asm 2: pxor <xmm7=%xmm7,<t=%xmm8
352pxor %xmm7,%xmm8
353
354# qhasm: t &= BS1
355# asm 1: pand BS1,<t=int6464#9
356# asm 2: pand BS1,<t=%xmm8
357pand BS1,%xmm8
358
359# qhasm: xmm7 ^= t
360# asm 1: pxor <t=int6464#9,<xmm7=int6464#8
361# asm 2: pxor <t=%xmm8,<xmm7=%xmm7
362pxor %xmm8,%xmm7
363
364# qhasm: uint6464 t <<= 2
365# asm 1: psllq $2,<t=int6464#9
366# asm 2: psllq $2,<t=%xmm8
367psllq $2,%xmm8
368
369# qhasm: xmm5 ^= t
370# asm 1: pxor <t=int6464#9,<xmm5=int6464#6
371# asm 2: pxor <t=%xmm8,<xmm5=%xmm5
372pxor %xmm8,%xmm5
373
374# qhasm: t = xmm4
375# asm 1: movdqa <xmm4=int6464#5,>t=int6464#9
376# asm 2: movdqa <xmm4=%xmm4,>t=%xmm8
377movdqa %xmm4,%xmm8
378
379# qhasm: uint6464 t >>= 2
380# asm 1: psrlq $2,<t=int6464#9
381# asm 2: psrlq $2,<t=%xmm8
382psrlq $2,%xmm8
383
384# qhasm: t ^= xmm6
385# asm 1: pxor <xmm6=int6464#7,<t=int6464#9
386# asm 2: pxor <xmm6=%xmm6,<t=%xmm8
387pxor %xmm6,%xmm8
388
389# qhasm: t &= BS1
390# asm 1: pand BS1,<t=int6464#9
391# asm 2: pand BS1,<t=%xmm8
392pand BS1,%xmm8
393
394# qhasm: xmm6 ^= t
395# asm 1: pxor <t=int6464#9,<xmm6=int6464#7
396# asm 2: pxor <t=%xmm8,<xmm6=%xmm6
397pxor %xmm8,%xmm6
398
399# qhasm: uint6464 t <<= 2
400# asm 1: psllq $2,<t=int6464#9
401# asm 2: psllq $2,<t=%xmm8
402psllq $2,%xmm8
403
404# qhasm: xmm4 ^= t
405# asm 1: pxor <t=int6464#9,<xmm4=int6464#5
406# asm 2: pxor <t=%xmm8,<xmm4=%xmm4
407pxor %xmm8,%xmm4
408
409# qhasm: t = xmm1
410# asm 1: movdqa <xmm1=int6464#2,>t=int6464#9
411# asm 2: movdqa <xmm1=%xmm1,>t=%xmm8
412movdqa %xmm1,%xmm8
413
414# qhasm: uint6464 t >>= 2
415# asm 1: psrlq $2,<t=int6464#9
416# asm 2: psrlq $2,<t=%xmm8
417psrlq $2,%xmm8
418
419# qhasm: t ^= xmm3
420# asm 1: pxor <xmm3=int6464#4,<t=int6464#9
421# asm 2: pxor <xmm3=%xmm3,<t=%xmm8
422pxor %xmm3,%xmm8
423
424# qhasm: t &= BS1
425# asm 1: pand BS1,<t=int6464#9
426# asm 2: pand BS1,<t=%xmm8
427pand BS1,%xmm8
428
429# qhasm: xmm3 ^= t
430# asm 1: pxor <t=int6464#9,<xmm3=int6464#4
431# asm 2: pxor <t=%xmm8,<xmm3=%xmm3
432pxor %xmm8,%xmm3
433
434# qhasm: uint6464 t <<= 2
435# asm 1: psllq $2,<t=int6464#9
436# asm 2: psllq $2,<t=%xmm8
437psllq $2,%xmm8
438
439# qhasm: xmm1 ^= t
440# asm 1: pxor <t=int6464#9,<xmm1=int6464#2
441# asm 2: pxor <t=%xmm8,<xmm1=%xmm1
442pxor %xmm8,%xmm1
443
444# qhasm: t = xmm0
445# asm 1: movdqa <xmm0=int6464#1,>t=int6464#9
446# asm 2: movdqa <xmm0=%xmm0,>t=%xmm8
447movdqa %xmm0,%xmm8
448
449# qhasm: uint6464 t >>= 2
450# asm 1: psrlq $2,<t=int6464#9
451# asm 2: psrlq $2,<t=%xmm8
452psrlq $2,%xmm8
453
454# qhasm: t ^= xmm2
455# asm 1: pxor <xmm2=int6464#3,<t=int6464#9
456# asm 2: pxor <xmm2=%xmm2,<t=%xmm8
457pxor %xmm2,%xmm8
458
459# qhasm: t &= BS1
460# asm 1: pand BS1,<t=int6464#9
461# asm 2: pand BS1,<t=%xmm8
462pand BS1,%xmm8
463
464# qhasm: xmm2 ^= t
465# asm 1: pxor <t=int6464#9,<xmm2=int6464#3
466# asm 2: pxor <t=%xmm8,<xmm2=%xmm2
467pxor %xmm8,%xmm2
468
469# qhasm: uint6464 t <<= 2
470# asm 1: psllq $2,<t=int6464#9
471# asm 2: psllq $2,<t=%xmm8
472psllq $2,%xmm8
473
474# qhasm: xmm0 ^= t
475# asm 1: pxor <t=int6464#9,<xmm0=int6464#1
476# asm 2: pxor <t=%xmm8,<xmm0=%xmm0
477pxor %xmm8,%xmm0
478
479# qhasm: t = xmm3
480# asm 1: movdqa <xmm3=int6464#4,>t=int6464#9
481# asm 2: movdqa <xmm3=%xmm3,>t=%xmm8
482movdqa %xmm3,%xmm8
483
484# qhasm: uint6464 t >>= 4
485# asm 1: psrlq $4,<t=int6464#9
486# asm 2: psrlq $4,<t=%xmm8
487psrlq $4,%xmm8
488
489# qhasm: t ^= xmm7
490# asm 1: pxor <xmm7=int6464#8,<t=int6464#9
491# asm 2: pxor <xmm7=%xmm7,<t=%xmm8
492pxor %xmm7,%xmm8
493
494# qhasm: t &= BS2
495# asm 1: pand BS2,<t=int6464#9
496# asm 2: pand BS2,<t=%xmm8
497pand BS2,%xmm8
498
499# qhasm: xmm7 ^= t
500# asm 1: pxor <t=int6464#9,<xmm7=int6464#8
501# asm 2: pxor <t=%xmm8,<xmm7=%xmm7
502pxor %xmm8,%xmm7
503
504# qhasm: uint6464 t <<= 4
505# asm 1: psllq $4,<t=int6464#9
506# asm 2: psllq $4,<t=%xmm8
507psllq $4,%xmm8
508
509# qhasm: xmm3 ^= t
510# asm 1: pxor <t=int6464#9,<xmm3=int6464#4
511# asm 2: pxor <t=%xmm8,<xmm3=%xmm3
512pxor %xmm8,%xmm3
513
514# qhasm: t = xmm2
515# asm 1: movdqa <xmm2=int6464#3,>t=int6464#9
516# asm 2: movdqa <xmm2=%xmm2,>t=%xmm8
517movdqa %xmm2,%xmm8
518
519# qhasm: uint6464 t >>= 4
520# asm 1: psrlq $4,<t=int6464#9
521# asm 2: psrlq $4,<t=%xmm8
522psrlq $4,%xmm8
523
524# qhasm: t ^= xmm6
525# asm 1: pxor <xmm6=int6464#7,<t=int6464#9
526# asm 2: pxor <xmm6=%xmm6,<t=%xmm8
527pxor %xmm6,%xmm8
528
529# qhasm: t &= BS2
530# asm 1: pand BS2,<t=int6464#9
531# asm 2: pand BS2,<t=%xmm8
532pand BS2,%xmm8
533
534# qhasm: xmm6 ^= t
535# asm 1: pxor <t=int6464#9,<xmm6=int6464#7
536# asm 2: pxor <t=%xmm8,<xmm6=%xmm6
537pxor %xmm8,%xmm6
538
539# qhasm: uint6464 t <<= 4
540# asm 1: psllq $4,<t=int6464#9
541# asm 2: psllq $4,<t=%xmm8
542psllq $4,%xmm8
543
544# qhasm: xmm2 ^= t
545# asm 1: pxor <t=int6464#9,<xmm2=int6464#3
546# asm 2: pxor <t=%xmm8,<xmm2=%xmm2
547pxor %xmm8,%xmm2
548
549# qhasm: t = xmm1
550# asm 1: movdqa <xmm1=int6464#2,>t=int6464#9
551# asm 2: movdqa <xmm1=%xmm1,>t=%xmm8
552movdqa %xmm1,%xmm8
553
554# qhasm: uint6464 t >>= 4
555# asm 1: psrlq $4,<t=int6464#9
556# asm 2: psrlq $4,<t=%xmm8
557psrlq $4,%xmm8
558
559# qhasm: t ^= xmm5
560# asm 1: pxor <xmm5=int6464#6,<t=int6464#9
561# asm 2: pxor <xmm5=%xmm5,<t=%xmm8
562pxor %xmm5,%xmm8
563
564# qhasm: t &= BS2
565# asm 1: pand BS2,<t=int6464#9
566# asm 2: pand BS2,<t=%xmm8
567pand BS2,%xmm8
568
569# qhasm: xmm5 ^= t
570# asm 1: pxor <t=int6464#9,<xmm5=int6464#6
571# asm 2: pxor <t=%xmm8,<xmm5=%xmm5
572pxor %xmm8,%xmm5
573
574# qhasm: uint6464 t <<= 4
575# asm 1: psllq $4,<t=int6464#9
576# asm 2: psllq $4,<t=%xmm8
577psllq $4,%xmm8
578
579# qhasm: xmm1 ^= t
580# asm 1: pxor <t=int6464#9,<xmm1=int6464#2
581# asm 2: pxor <t=%xmm8,<xmm1=%xmm1
582pxor %xmm8,%xmm1
583
584# qhasm: t = xmm0
585# asm 1: movdqa <xmm0=int6464#1,>t=int6464#9
586# asm 2: movdqa <xmm0=%xmm0,>t=%xmm8
587movdqa %xmm0,%xmm8
588
589# qhasm: uint6464 t >>= 4
590# asm 1: psrlq $4,<t=int6464#9
591# asm 2: psrlq $4,<t=%xmm8
592psrlq $4,%xmm8
593
594# qhasm: t ^= xmm4
595# asm 1: pxor <xmm4=int6464#5,<t=int6464#9
596# asm 2: pxor <xmm4=%xmm4,<t=%xmm8
597pxor %xmm4,%xmm8
598
599# qhasm: t &= BS2
600# asm 1: pand BS2,<t=int6464#9
601# asm 2: pand BS2,<t=%xmm8
602pand BS2,%xmm8
603
604# qhasm: xmm4 ^= t
605# asm 1: pxor <t=int6464#9,<xmm4=int6464#5
606# asm 2: pxor <t=%xmm8,<xmm4=%xmm4
607pxor %xmm8,%xmm4
608
609# qhasm: uint6464 t <<= 4
610# asm 1: psllq $4,<t=int6464#9
611# asm 2: psllq $4,<t=%xmm8
612psllq $4,%xmm8
613
614# qhasm: xmm0 ^= t
615# asm 1: pxor <t=int6464#9,<xmm0=int6464#1
616# asm 2: pxor <t=%xmm8,<xmm0=%xmm0
617pxor %xmm8,%xmm0
618
619# qhasm: *(int128 *) (c + 0) = xmm0
620# asm 1: movdqa <xmm0=int6464#1,0(<c=int64#1)
621# asm 2: movdqa <xmm0=%xmm0,0(<c=%rdi)
622movdqa %xmm0,0(%rdi)
623
624# qhasm: *(int128 *) (c + 16) = xmm1
625# asm 1: movdqa <xmm1=int6464#2,16(<c=int64#1)
626# asm 2: movdqa <xmm1=%xmm1,16(<c=%rdi)
627movdqa %xmm1,16(%rdi)
628
629# qhasm: *(int128 *) (c + 32) = xmm2
630# asm 1: movdqa <xmm2=int6464#3,32(<c=int64#1)
631# asm 2: movdqa <xmm2=%xmm2,32(<c=%rdi)
632movdqa %xmm2,32(%rdi)
633
634# qhasm: *(int128 *) (c + 48) = xmm3
635# asm 1: movdqa <xmm3=int6464#4,48(<c=int64#1)
636# asm 2: movdqa <xmm3=%xmm3,48(<c=%rdi)
637movdqa %xmm3,48(%rdi)
638
639# qhasm: *(int128 *) (c + 64) = xmm4
640# asm 1: movdqa <xmm4=int6464#5,64(<c=int64#1)
641# asm 2: movdqa <xmm4=%xmm4,64(<c=%rdi)
642movdqa %xmm4,64(%rdi)
643
644# qhasm: *(int128 *) (c + 80) = xmm5
645# asm 1: movdqa <xmm5=int6464#6,80(<c=int64#1)
646# asm 2: movdqa <xmm5=%xmm5,80(<c=%rdi)
647movdqa %xmm5,80(%rdi)
648
649# qhasm: *(int128 *) (c + 96) = xmm6
650# asm 1: movdqa <xmm6=int6464#7,96(<c=int64#1)
651# asm 2: movdqa <xmm6=%xmm6,96(<c=%rdi)
652movdqa %xmm6,96(%rdi)
653
654# qhasm: *(int128 *) (c + 112) = xmm7
655# asm 1: movdqa <xmm7=int6464#8,112(<c=int64#1)
656# asm 2: movdqa <xmm7=%xmm7,112(<c=%rdi)
657movdqa %xmm7,112(%rdi)
658
659# qhasm: shuffle bytes of xmm0 by ROTB
660# asm 1: pshufb ROTB,<xmm0=int6464#1
661# asm 2: pshufb ROTB,<xmm0=%xmm0
662pshufb ROTB,%xmm0
663
664# qhasm: shuffle bytes of xmm1 by ROTB
665# asm 1: pshufb ROTB,<xmm1=int6464#2
666# asm 2: pshufb ROTB,<xmm1=%xmm1
667pshufb ROTB,%xmm1
668
669# qhasm: shuffle bytes of xmm2 by ROTB
670# asm 1: pshufb ROTB,<xmm2=int6464#3
671# asm 2: pshufb ROTB,<xmm2=%xmm2
672pshufb ROTB,%xmm2
673
674# qhasm: shuffle bytes of xmm3 by ROTB
675# asm 1: pshufb ROTB,<xmm3=int6464#4
676# asm 2: pshufb ROTB,<xmm3=%xmm3
677pshufb ROTB,%xmm3
678
679# qhasm: shuffle bytes of xmm4 by ROTB
680# asm 1: pshufb ROTB,<xmm4=int6464#5
681# asm 2: pshufb ROTB,<xmm4=%xmm4
682pshufb ROTB,%xmm4
683
684# qhasm: shuffle bytes of xmm5 by ROTB
685# asm 1: pshufb ROTB,<xmm5=int6464#6
686# asm 2: pshufb ROTB,<xmm5=%xmm5
687pshufb ROTB,%xmm5
688
689# qhasm: shuffle bytes of xmm6 by ROTB
690# asm 1: pshufb ROTB,<xmm6=int6464#7
691# asm 2: pshufb ROTB,<xmm6=%xmm6
692pshufb ROTB,%xmm6
693
694# qhasm: shuffle bytes of xmm7 by ROTB
695# asm 1: pshufb ROTB,<xmm7=int6464#8
696# asm 2: pshufb ROTB,<xmm7=%xmm7
697pshufb ROTB,%xmm7
698
699# qhasm: xmm5 ^= xmm6
700# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
701# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
702pxor %xmm6,%xmm5
703
704# qhasm: xmm2 ^= xmm1
705# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
706# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
707pxor %xmm1,%xmm2
708
709# qhasm: xmm5 ^= xmm0
710# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
711# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
712pxor %xmm0,%xmm5
713
714# qhasm: xmm6 ^= xmm2
715# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
716# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
717pxor %xmm2,%xmm6
718
719# qhasm: xmm3 ^= xmm0
720# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
721# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
722pxor %xmm0,%xmm3
723
724# qhasm: xmm6 ^= xmm3
725# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
726# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
727pxor %xmm3,%xmm6
728
729# qhasm: xmm3 ^= xmm7
730# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
731# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
732pxor %xmm7,%xmm3
733
734# qhasm: xmm3 ^= xmm4
735# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
736# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
737pxor %xmm4,%xmm3
738
739# qhasm: xmm7 ^= xmm5
740# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
741# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
742pxor %xmm5,%xmm7
743
744# qhasm: xmm3 ^= xmm1
745# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
746# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
747pxor %xmm1,%xmm3
748
749# qhasm: xmm4 ^= xmm5
750# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
751# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
752pxor %xmm5,%xmm4
753
754# qhasm: xmm2 ^= xmm7
755# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
756# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
757pxor %xmm7,%xmm2
758
759# qhasm: xmm1 ^= xmm5
760# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
761# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
762pxor %xmm5,%xmm1
763
764# qhasm: xmm11 = xmm7
765# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
766# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
767movdqa %xmm7,%xmm8
768
769# qhasm: xmm10 = xmm1
770# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
771# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
772movdqa %xmm1,%xmm9
773
774# qhasm: xmm9 = xmm5
775# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
776# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
777movdqa %xmm5,%xmm10
778
779# qhasm: xmm13 = xmm2
780# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
781# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
782movdqa %xmm2,%xmm11
783
784# qhasm: xmm12 = xmm6
785# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
786# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
787movdqa %xmm6,%xmm12
788
789# qhasm: xmm11 ^= xmm4
790# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
791# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
792pxor %xmm4,%xmm8
793
794# qhasm: xmm10 ^= xmm2
795# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
796# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
797pxor %xmm2,%xmm9
798
799# qhasm: xmm9 ^= xmm3
800# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
801# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
802pxor %xmm3,%xmm10
803
804# qhasm: xmm13 ^= xmm4
805# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
806# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
807pxor %xmm4,%xmm11
808
809# qhasm: xmm12 ^= xmm0
810# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
811# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
812pxor %xmm0,%xmm12
813
814# qhasm: xmm14 = xmm11
815# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
816# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
817movdqa %xmm8,%xmm13
818
819# qhasm: xmm8 = xmm10
820# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
821# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
822movdqa %xmm9,%xmm14
823
824# qhasm: xmm15 = xmm11
825# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
826# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
827movdqa %xmm8,%xmm15
828
829# qhasm: xmm10 |= xmm9
830# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
831# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
832por %xmm10,%xmm9
833
834# qhasm: xmm11 |= xmm12
835# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
836# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
837por %xmm12,%xmm8
838
839# qhasm: xmm15 ^= xmm8
840# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
841# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
842pxor %xmm14,%xmm15
843
844# qhasm: xmm14 &= xmm12
845# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
846# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
847pand %xmm12,%xmm13
848
849# qhasm: xmm8 &= xmm9
850# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
851# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
852pand %xmm10,%xmm14
853
854# qhasm: xmm12 ^= xmm9
855# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
856# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
857pxor %xmm10,%xmm12
858
859# qhasm: xmm15 &= xmm12
860# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
861# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
862pand %xmm12,%xmm15
863
864# qhasm: xmm12 = xmm3
865# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
866# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
867movdqa %xmm3,%xmm10
868
869# qhasm: xmm12 ^= xmm0
870# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
871# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
872pxor %xmm0,%xmm10
873
874# qhasm: xmm13 &= xmm12
875# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
876# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
877pand %xmm10,%xmm11
878
879# qhasm: xmm11 ^= xmm13
880# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
881# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
882pxor %xmm11,%xmm8
883
884# qhasm: xmm10 ^= xmm13
885# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
886# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
887pxor %xmm11,%xmm9
888
889# qhasm: xmm13 = xmm7
890# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
891# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
892movdqa %xmm7,%xmm10
893
894# qhasm: xmm13 ^= xmm1
895# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
896# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
897pxor %xmm1,%xmm10
898
899# qhasm: xmm12 = xmm5
900# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
901# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
902movdqa %xmm5,%xmm11
903
904# qhasm: xmm9 = xmm13
905# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
906# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
907movdqa %xmm10,%xmm12
908
909# qhasm: xmm12 ^= xmm6
910# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
911# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
912pxor %xmm6,%xmm11
913
914# qhasm: xmm9 |= xmm12
915# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
916# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
917por %xmm11,%xmm12
918
919# qhasm: xmm13 &= xmm12
920# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
921# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
922pand %xmm11,%xmm10
923
924# qhasm: xmm8 ^= xmm13
925# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
926# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
927pxor %xmm10,%xmm14
928
929# qhasm: xmm11 ^= xmm15
930# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
931# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
932pxor %xmm15,%xmm8
933
934# qhasm: xmm10 ^= xmm14
935# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
936# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
937pxor %xmm13,%xmm9
938
939# qhasm: xmm9 ^= xmm15
940# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
941# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
942pxor %xmm15,%xmm12
943
944# qhasm: xmm8 ^= xmm14
945# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
946# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
947pxor %xmm13,%xmm14
948
949# qhasm: xmm9 ^= xmm14
950# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
951# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
952pxor %xmm13,%xmm12
953
954# qhasm: xmm12 = xmm2
955# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
956# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
957movdqa %xmm2,%xmm10
958
959# qhasm: xmm13 = xmm4
960# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
961# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
962movdqa %xmm4,%xmm11
963
964# qhasm: xmm14 = xmm1
965# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
966# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
967movdqa %xmm1,%xmm13
968
969# qhasm: xmm15 = xmm7
970# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
971# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
972movdqa %xmm7,%xmm15
973
974# qhasm: xmm12 &= xmm3
975# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
976# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
977pand %xmm3,%xmm10
978
979# qhasm: xmm13 &= xmm0
980# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
981# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
982pand %xmm0,%xmm11
983
984# qhasm: xmm14 &= xmm5
985# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
986# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
987pand %xmm5,%xmm13
988
989# qhasm: xmm15 |= xmm6
990# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
991# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
992por %xmm6,%xmm15
993
994# qhasm: xmm11 ^= xmm12
995# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
996# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
997pxor %xmm10,%xmm8
998
999# qhasm: xmm10 ^= xmm13
1000# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
1001# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
1002pxor %xmm11,%xmm9
1003
1004# qhasm: xmm9 ^= xmm14
1005# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
1006# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
1007pxor %xmm13,%xmm12
1008
1009# qhasm: xmm8 ^= xmm15
1010# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
1011# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
1012pxor %xmm15,%xmm14
1013
1014# qhasm: xmm12 = xmm11
1015# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
1016# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
1017movdqa %xmm8,%xmm10
1018
1019# qhasm: xmm12 ^= xmm10
1020# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
1021# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
1022pxor %xmm9,%xmm10
1023
1024# qhasm: xmm11 &= xmm9
1025# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
1026# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
1027pand %xmm12,%xmm8
1028
1029# qhasm: xmm14 = xmm8
1030# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
1031# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
1032movdqa %xmm14,%xmm11
1033
1034# qhasm: xmm14 ^= xmm11
1035# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
1036# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
1037pxor %xmm8,%xmm11
1038
1039# qhasm: xmm15 = xmm12
1040# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
1041# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
1042movdqa %xmm10,%xmm13
1043
1044# qhasm: xmm15 &= xmm14
1045# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
1046# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
1047pand %xmm11,%xmm13
1048
1049# qhasm: xmm15 ^= xmm10
1050# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
1051# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
1052pxor %xmm9,%xmm13
1053
1054# qhasm: xmm13 = xmm9
1055# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
1056# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
1057movdqa %xmm12,%xmm15
1058
1059# qhasm: xmm13 ^= xmm8
1060# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
1061# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
1062pxor %xmm14,%xmm15
1063
1064# qhasm: xmm11 ^= xmm10
1065# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
1066# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
1067pxor %xmm9,%xmm8
1068
1069# qhasm: xmm13 &= xmm11
1070# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
1071# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
1072pand %xmm8,%xmm15
1073
1074# qhasm: xmm13 ^= xmm8
1075# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
1076# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
1077pxor %xmm14,%xmm15
1078
1079# qhasm: xmm9 ^= xmm13
1080# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
1081# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
1082pxor %xmm15,%xmm12
1083
1084# qhasm: xmm10 = xmm14
1085# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
1086# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
1087movdqa %xmm11,%xmm8
1088
1089# qhasm: xmm10 ^= xmm13
1090# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
1091# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
1092pxor %xmm15,%xmm8
1093
1094# qhasm: xmm10 &= xmm8
1095# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
1096# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
1097pand %xmm14,%xmm8
1098
1099# qhasm: xmm9 ^= xmm10
1100# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
1101# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
1102pxor %xmm8,%xmm12
1103
1104# qhasm: xmm14 ^= xmm10
1105# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
1106# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
1107pxor %xmm8,%xmm11
1108
1109# qhasm: xmm14 &= xmm15
1110# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
1111# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
1112pand %xmm13,%xmm11
1113
1114# qhasm: xmm14 ^= xmm12
1115# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
1116# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
1117pxor %xmm10,%xmm11
1118
1119# qhasm: xmm12 = xmm6
1120# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
1121# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
1122movdqa %xmm6,%xmm8
1123
1124# qhasm: xmm8 = xmm5
1125# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
1126# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
1127movdqa %xmm5,%xmm9
1128
1129# qhasm: xmm10 = xmm15
1130# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
1131# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
1132movdqa %xmm13,%xmm10
1133
1134# qhasm: xmm10 ^= xmm14
1135# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
1136# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
1137pxor %xmm11,%xmm10
1138
1139# qhasm: xmm10 &= xmm6
1140# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
1141# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
1142pand %xmm6,%xmm10
1143
1144# qhasm: xmm6 ^= xmm5
1145# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
1146# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
1147pxor %xmm5,%xmm6
1148
1149# qhasm: xmm6 &= xmm14
1150# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
1151# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
1152pand %xmm11,%xmm6
1153
1154# qhasm: xmm5 &= xmm15
1155# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
1156# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
1157pand %xmm13,%xmm5
1158
1159# qhasm: xmm6 ^= xmm5
1160# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
1161# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
1162pxor %xmm5,%xmm6
1163
1164# qhasm: xmm5 ^= xmm10
1165# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
1166# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
1167pxor %xmm10,%xmm5
1168
1169# qhasm: xmm12 ^= xmm0
1170# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
1171# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
1172pxor %xmm0,%xmm8
1173
1174# qhasm: xmm8 ^= xmm3
1175# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
1176# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
1177pxor %xmm3,%xmm9
1178
1179# qhasm: xmm15 ^= xmm13
1180# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
1181# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
1182pxor %xmm15,%xmm13
1183
1184# qhasm: xmm14 ^= xmm9
1185# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
1186# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
1187pxor %xmm12,%xmm11
1188
1189# qhasm: xmm11 = xmm15
1190# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1191# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1192movdqa %xmm13,%xmm10
1193
1194# qhasm: xmm11 ^= xmm14
1195# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1196# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1197pxor %xmm11,%xmm10
1198
1199# qhasm: xmm11 &= xmm12
1200# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
1201# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
1202pand %xmm8,%xmm10
1203
1204# qhasm: xmm12 ^= xmm8
1205# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
1206# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
1207pxor %xmm9,%xmm8
1208
1209# qhasm: xmm12 &= xmm14
1210# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
1211# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
1212pand %xmm11,%xmm8
1213
1214# qhasm: xmm8 &= xmm15
1215# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
1216# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
1217pand %xmm13,%xmm9
1218
1219# qhasm: xmm8 ^= xmm12
1220# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
1221# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
1222pxor %xmm8,%xmm9
1223
1224# qhasm: xmm12 ^= xmm11
1225# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
1226# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
1227pxor %xmm10,%xmm8
1228
1229# qhasm: xmm10 = xmm13
1230# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1231# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1232movdqa %xmm15,%xmm10
1233
1234# qhasm: xmm10 ^= xmm9
1235# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
1236# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
1237pxor %xmm12,%xmm10
1238
1239# qhasm: xmm10 &= xmm0
1240# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
1241# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
1242pand %xmm0,%xmm10
1243
1244# qhasm: xmm0 ^= xmm3
1245# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
1246# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
1247pxor %xmm3,%xmm0
1248
1249# qhasm: xmm0 &= xmm9
1250# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
1251# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
1252pand %xmm12,%xmm0
1253
1254# qhasm: xmm3 &= xmm13
1255# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
1256# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
1257pand %xmm15,%xmm3
1258
1259# qhasm: xmm0 ^= xmm3
1260# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
1261# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
1262pxor %xmm3,%xmm0
1263
1264# qhasm: xmm3 ^= xmm10
1265# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
1266# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
1267pxor %xmm10,%xmm3
1268
1269# qhasm: xmm6 ^= xmm12
1270# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
1271# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
1272pxor %xmm8,%xmm6
1273
1274# qhasm: xmm0 ^= xmm12
1275# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
1276# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
1277pxor %xmm8,%xmm0
1278
1279# qhasm: xmm5 ^= xmm8
1280# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
1281# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
1282pxor %xmm9,%xmm5
1283
1284# qhasm: xmm3 ^= xmm8
1285# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
1286# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
1287pxor %xmm9,%xmm3
1288
1289# qhasm: xmm12 = xmm7
1290# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
1291# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
1292movdqa %xmm7,%xmm8
1293
1294# qhasm: xmm8 = xmm1
1295# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
1296# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
1297movdqa %xmm1,%xmm9
1298
1299# qhasm: xmm12 ^= xmm4
1300# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
1301# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
1302pxor %xmm4,%xmm8
1303
1304# qhasm: xmm8 ^= xmm2
1305# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
1306# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
1307pxor %xmm2,%xmm9
1308
1309# qhasm: xmm11 = xmm15
1310# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1311# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1312movdqa %xmm13,%xmm10
1313
1314# qhasm: xmm11 ^= xmm14
1315# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1316# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1317pxor %xmm11,%xmm10
1318
1319# qhasm: xmm11 &= xmm12
1320# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
1321# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
1322pand %xmm8,%xmm10
1323
1324# qhasm: xmm12 ^= xmm8
1325# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
1326# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
1327pxor %xmm9,%xmm8
1328
1329# qhasm: xmm12 &= xmm14
1330# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
1331# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
1332pand %xmm11,%xmm8
1333
1334# qhasm: xmm8 &= xmm15
1335# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
1336# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
1337pand %xmm13,%xmm9
1338
1339# qhasm: xmm8 ^= xmm12
1340# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
1341# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
1342pxor %xmm8,%xmm9
1343
1344# qhasm: xmm12 ^= xmm11
1345# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
1346# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
1347pxor %xmm10,%xmm8
1348
1349# qhasm: xmm10 = xmm13
1350# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1351# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1352movdqa %xmm15,%xmm10
1353
1354# qhasm: xmm10 ^= xmm9
1355# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
1356# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
1357pxor %xmm12,%xmm10
1358
1359# qhasm: xmm10 &= xmm4
1360# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
1361# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
1362pand %xmm4,%xmm10
1363
1364# qhasm: xmm4 ^= xmm2
1365# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
1366# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
1367pxor %xmm2,%xmm4
1368
1369# qhasm: xmm4 &= xmm9
1370# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
1371# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
1372pand %xmm12,%xmm4
1373
1374# qhasm: xmm2 &= xmm13
1375# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
1376# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
1377pand %xmm15,%xmm2
1378
1379# qhasm: xmm4 ^= xmm2
1380# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
1381# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
1382pxor %xmm2,%xmm4
1383
1384# qhasm: xmm2 ^= xmm10
1385# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
1386# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
1387pxor %xmm10,%xmm2
1388
1389# qhasm: xmm15 ^= xmm13
1390# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
1391# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
1392pxor %xmm15,%xmm13
1393
1394# qhasm: xmm14 ^= xmm9
1395# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
1396# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
1397pxor %xmm12,%xmm11
1398
1399# qhasm: xmm11 = xmm15
1400# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1401# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1402movdqa %xmm13,%xmm10
1403
1404# qhasm: xmm11 ^= xmm14
1405# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1406# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1407pxor %xmm11,%xmm10
1408
1409# qhasm: xmm11 &= xmm7
1410# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
1411# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
1412pand %xmm7,%xmm10
1413
1414# qhasm: xmm7 ^= xmm1
1415# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
1416# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
1417pxor %xmm1,%xmm7
1418
1419# qhasm: xmm7 &= xmm14
1420# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
1421# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
1422pand %xmm11,%xmm7
1423
1424# qhasm: xmm1 &= xmm15
1425# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
1426# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
1427pand %xmm13,%xmm1
1428
1429# qhasm: xmm7 ^= xmm1
1430# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
1431# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
1432pxor %xmm1,%xmm7
1433
1434# qhasm: xmm1 ^= xmm11
1435# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
1436# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
1437pxor %xmm10,%xmm1
1438
1439# qhasm: xmm7 ^= xmm12
1440# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
1441# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
1442pxor %xmm8,%xmm7
1443
1444# qhasm: xmm4 ^= xmm12
1445# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
1446# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
1447pxor %xmm8,%xmm4
1448
1449# qhasm: xmm1 ^= xmm8
1450# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
1451# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
1452pxor %xmm9,%xmm1
1453
1454# qhasm: xmm2 ^= xmm8
1455# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
1456# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
1457pxor %xmm9,%xmm2
1458
1459# qhasm: xmm7 ^= xmm0
1460# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
1461# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
1462pxor %xmm0,%xmm7
1463
1464# qhasm: xmm1 ^= xmm6
1465# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
1466# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
1467pxor %xmm6,%xmm1
1468
1469# qhasm: xmm4 ^= xmm7
1470# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
1471# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
1472pxor %xmm7,%xmm4
1473
1474# qhasm: xmm6 ^= xmm0
1475# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
1476# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
1477pxor %xmm0,%xmm6
1478
1479# qhasm: xmm0 ^= xmm1
1480# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
1481# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
1482pxor %xmm1,%xmm0
1483
1484# qhasm: xmm1 ^= xmm5
1485# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
1486# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
1487pxor %xmm5,%xmm1
1488
1489# qhasm: xmm5 ^= xmm2
1490# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
1491# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
1492pxor %xmm2,%xmm5
1493
1494# qhasm: xmm4 ^= xmm5
1495# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
1496# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
1497pxor %xmm5,%xmm4
1498
1499# qhasm: xmm2 ^= xmm3
1500# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
1501# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
1502pxor %xmm3,%xmm2
1503
1504# qhasm: xmm3 ^= xmm5
1505# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
1506# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
1507pxor %xmm5,%xmm3
1508
1509# qhasm: xmm6 ^= xmm3
1510# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
1511# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
1512pxor %xmm3,%xmm6
1513
1514# qhasm: xmm0 ^= RCON
1515# asm 1: pxor RCON,<xmm0=int6464#1
1516# asm 2: pxor RCON,<xmm0=%xmm0
1517pxor RCON,%xmm0
1518
1519# qhasm: shuffle bytes of xmm0 by EXPB0
1520# asm 1: pshufb EXPB0,<xmm0=int6464#1
1521# asm 2: pshufb EXPB0,<xmm0=%xmm0
1522pshufb EXPB0,%xmm0
1523
1524# qhasm: shuffle bytes of xmm1 by EXPB0
1525# asm 1: pshufb EXPB0,<xmm1=int6464#2
1526# asm 2: pshufb EXPB0,<xmm1=%xmm1
1527pshufb EXPB0,%xmm1
1528
1529# qhasm: shuffle bytes of xmm4 by EXPB0
1530# asm 1: pshufb EXPB0,<xmm4=int6464#5
1531# asm 2: pshufb EXPB0,<xmm4=%xmm4
1532pshufb EXPB0,%xmm4
1533
1534# qhasm: shuffle bytes of xmm6 by EXPB0
1535# asm 1: pshufb EXPB0,<xmm6=int6464#7
1536# asm 2: pshufb EXPB0,<xmm6=%xmm6
1537pshufb EXPB0,%xmm6
1538
1539# qhasm: shuffle bytes of xmm3 by EXPB0
1540# asm 1: pshufb EXPB0,<xmm3=int6464#4
1541# asm 2: pshufb EXPB0,<xmm3=%xmm3
1542pshufb EXPB0,%xmm3
1543
1544# qhasm: shuffle bytes of xmm7 by EXPB0
1545# asm 1: pshufb EXPB0,<xmm7=int6464#8
1546# asm 2: pshufb EXPB0,<xmm7=%xmm7
1547pshufb EXPB0,%xmm7
1548
1549# qhasm: shuffle bytes of xmm2 by EXPB0
1550# asm 1: pshufb EXPB0,<xmm2=int6464#3
1551# asm 2: pshufb EXPB0,<xmm2=%xmm2
1552pshufb EXPB0,%xmm2
1553
1554# qhasm: shuffle bytes of xmm5 by EXPB0
1555# asm 1: pshufb EXPB0,<xmm5=int6464#6
1556# asm 2: pshufb EXPB0,<xmm5=%xmm5
1557pshufb EXPB0,%xmm5
1558
1559# qhasm: xmm8 = *(int128 *)(c + 0)
1560# asm 1: movdqa 0(<c=int64#1),>xmm8=int6464#9
1561# asm 2: movdqa 0(<c=%rdi),>xmm8=%xmm8
1562movdqa 0(%rdi),%xmm8
1563
1564# qhasm: xmm9 = *(int128 *)(c + 16)
1565# asm 1: movdqa 16(<c=int64#1),>xmm9=int6464#10
1566# asm 2: movdqa 16(<c=%rdi),>xmm9=%xmm9
1567movdqa 16(%rdi),%xmm9
1568
1569# qhasm: xmm10 = *(int128 *)(c + 32)
1570# asm 1: movdqa 32(<c=int64#1),>xmm10=int6464#11
1571# asm 2: movdqa 32(<c=%rdi),>xmm10=%xmm10
1572movdqa 32(%rdi),%xmm10
1573
1574# qhasm: xmm11 = *(int128 *)(c + 48)
1575# asm 1: movdqa 48(<c=int64#1),>xmm11=int6464#12
1576# asm 2: movdqa 48(<c=%rdi),>xmm11=%xmm11
1577movdqa 48(%rdi),%xmm11
1578
1579# qhasm: xmm12 = *(int128 *)(c + 64)
1580# asm 1: movdqa 64(<c=int64#1),>xmm12=int6464#13
1581# asm 2: movdqa 64(<c=%rdi),>xmm12=%xmm12
1582movdqa 64(%rdi),%xmm12
1583
1584# qhasm: xmm13 = *(int128 *)(c + 80)
1585# asm 1: movdqa 80(<c=int64#1),>xmm13=int6464#14
1586# asm 2: movdqa 80(<c=%rdi),>xmm13=%xmm13
1587movdqa 80(%rdi),%xmm13
1588
1589# qhasm: xmm14 = *(int128 *)(c + 96)
1590# asm 1: movdqa 96(<c=int64#1),>xmm14=int6464#15
1591# asm 2: movdqa 96(<c=%rdi),>xmm14=%xmm14
1592movdqa 96(%rdi),%xmm14
1593
1594# qhasm: xmm15 = *(int128 *)(c + 112)
1595# asm 1: movdqa 112(<c=int64#1),>xmm15=int6464#16
1596# asm 2: movdqa 112(<c=%rdi),>xmm15=%xmm15
1597movdqa 112(%rdi),%xmm15
1598
1599# qhasm: xmm0 ^= xmm8
1600# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
1601# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
1602pxor %xmm8,%xmm0
1603
1604# qhasm: xmm1 ^= xmm9
1605# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
1606# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
1607pxor %xmm9,%xmm1
1608
1609# qhasm: xmm4 ^= xmm10
1610# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
1611# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
1612pxor %xmm10,%xmm4
1613
1614# qhasm: xmm6 ^= xmm11
1615# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
1616# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
1617pxor %xmm11,%xmm6
1618
1619# qhasm: xmm3 ^= xmm12
1620# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
1621# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
1622pxor %xmm12,%xmm3
1623
1624# qhasm: xmm7 ^= xmm13
1625# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
1626# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
1627pxor %xmm13,%xmm7
1628
1629# qhasm: xmm2 ^= xmm14
1630# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
1631# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
1632pxor %xmm14,%xmm2
1633
1634# qhasm: xmm5 ^= xmm15
1635# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
1636# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
1637pxor %xmm15,%xmm5
1638
1639# qhasm: uint32323232 xmm8 >>= 8
1640# asm 1: psrld $8,<xmm8=int6464#9
1641# asm 2: psrld $8,<xmm8=%xmm8
1642psrld $8,%xmm8
1643
1644# qhasm: uint32323232 xmm9 >>= 8
1645# asm 1: psrld $8,<xmm9=int6464#10
1646# asm 2: psrld $8,<xmm9=%xmm9
1647psrld $8,%xmm9
1648
1649# qhasm: uint32323232 xmm10 >>= 8
1650# asm 1: psrld $8,<xmm10=int6464#11
1651# asm 2: psrld $8,<xmm10=%xmm10
1652psrld $8,%xmm10
1653
1654# qhasm: uint32323232 xmm11 >>= 8
1655# asm 1: psrld $8,<xmm11=int6464#12
1656# asm 2: psrld $8,<xmm11=%xmm11
1657psrld $8,%xmm11
1658
1659# qhasm: uint32323232 xmm12 >>= 8
1660# asm 1: psrld $8,<xmm12=int6464#13
1661# asm 2: psrld $8,<xmm12=%xmm12
1662psrld $8,%xmm12
1663
1664# qhasm: uint32323232 xmm13 >>= 8
1665# asm 1: psrld $8,<xmm13=int6464#14
1666# asm 2: psrld $8,<xmm13=%xmm13
1667psrld $8,%xmm13
1668
1669# qhasm: uint32323232 xmm14 >>= 8
1670# asm 1: psrld $8,<xmm14=int6464#15
1671# asm 2: psrld $8,<xmm14=%xmm14
1672psrld $8,%xmm14
1673
1674# qhasm: uint32323232 xmm15 >>= 8
1675# asm 1: psrld $8,<xmm15=int6464#16
1676# asm 2: psrld $8,<xmm15=%xmm15
1677psrld $8,%xmm15
1678
1679# qhasm: xmm0 ^= xmm8
1680# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
1681# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
1682pxor %xmm8,%xmm0
1683
1684# qhasm: xmm1 ^= xmm9
1685# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
1686# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
1687pxor %xmm9,%xmm1
1688
1689# qhasm: xmm4 ^= xmm10
1690# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
1691# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
1692pxor %xmm10,%xmm4
1693
1694# qhasm: xmm6 ^= xmm11
1695# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
1696# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
1697pxor %xmm11,%xmm6
1698
1699# qhasm: xmm3 ^= xmm12
1700# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
1701# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
1702pxor %xmm12,%xmm3
1703
1704# qhasm: xmm7 ^= xmm13
1705# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
1706# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
1707pxor %xmm13,%xmm7
1708
1709# qhasm: xmm2 ^= xmm14
1710# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
1711# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
1712pxor %xmm14,%xmm2
1713
1714# qhasm: xmm5 ^= xmm15
1715# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
1716# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
1717pxor %xmm15,%xmm5
1718
1719# qhasm: uint32323232 xmm8 >>= 8
1720# asm 1: psrld $8,<xmm8=int6464#9
1721# asm 2: psrld $8,<xmm8=%xmm8
1722psrld $8,%xmm8
1723
1724# qhasm: uint32323232 xmm9 >>= 8
1725# asm 1: psrld $8,<xmm9=int6464#10
1726# asm 2: psrld $8,<xmm9=%xmm9
1727psrld $8,%xmm9
1728
1729# qhasm: uint32323232 xmm10 >>= 8
1730# asm 1: psrld $8,<xmm10=int6464#11
1731# asm 2: psrld $8,<xmm10=%xmm10
1732psrld $8,%xmm10
1733
1734# qhasm: uint32323232 xmm11 >>= 8
1735# asm 1: psrld $8,<xmm11=int6464#12
1736# asm 2: psrld $8,<xmm11=%xmm11
1737psrld $8,%xmm11
1738
1739# qhasm: uint32323232 xmm12 >>= 8
1740# asm 1: psrld $8,<xmm12=int6464#13
1741# asm 2: psrld $8,<xmm12=%xmm12
1742psrld $8,%xmm12
1743
1744# qhasm: uint32323232 xmm13 >>= 8
1745# asm 1: psrld $8,<xmm13=int6464#14
1746# asm 2: psrld $8,<xmm13=%xmm13
1747psrld $8,%xmm13
1748
1749# qhasm: uint32323232 xmm14 >>= 8
1750# asm 1: psrld $8,<xmm14=int6464#15
1751# asm 2: psrld $8,<xmm14=%xmm14
1752psrld $8,%xmm14
1753
1754# qhasm: uint32323232 xmm15 >>= 8
1755# asm 1: psrld $8,<xmm15=int6464#16
1756# asm 2: psrld $8,<xmm15=%xmm15
1757psrld $8,%xmm15
1758
1759# qhasm: xmm0 ^= xmm8
1760# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
1761# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
1762pxor %xmm8,%xmm0
1763
1764# qhasm: xmm1 ^= xmm9
1765# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
1766# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
1767pxor %xmm9,%xmm1
1768
1769# qhasm: xmm4 ^= xmm10
1770# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
1771# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
1772pxor %xmm10,%xmm4
1773
1774# qhasm: xmm6 ^= xmm11
1775# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
1776# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
1777pxor %xmm11,%xmm6
1778
1779# qhasm: xmm3 ^= xmm12
1780# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
1781# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
1782pxor %xmm12,%xmm3
1783
1784# qhasm: xmm7 ^= xmm13
1785# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
1786# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
1787pxor %xmm13,%xmm7
1788
1789# qhasm: xmm2 ^= xmm14
1790# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
1791# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
1792pxor %xmm14,%xmm2
1793
1794# qhasm: xmm5 ^= xmm15
1795# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
1796# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
1797pxor %xmm15,%xmm5
1798
1799# qhasm: uint32323232 xmm8 >>= 8
1800# asm 1: psrld $8,<xmm8=int6464#9
1801# asm 2: psrld $8,<xmm8=%xmm8
1802psrld $8,%xmm8
1803
1804# qhasm: uint32323232 xmm9 >>= 8
1805# asm 1: psrld $8,<xmm9=int6464#10
1806# asm 2: psrld $8,<xmm9=%xmm9
1807psrld $8,%xmm9
1808
1809# qhasm: uint32323232 xmm10 >>= 8
1810# asm 1: psrld $8,<xmm10=int6464#11
1811# asm 2: psrld $8,<xmm10=%xmm10
1812psrld $8,%xmm10
1813
1814# qhasm: uint32323232 xmm11 >>= 8
1815# asm 1: psrld $8,<xmm11=int6464#12
1816# asm 2: psrld $8,<xmm11=%xmm11
1817psrld $8,%xmm11
1818
1819# qhasm: uint32323232 xmm12 >>= 8
1820# asm 1: psrld $8,<xmm12=int6464#13
1821# asm 2: psrld $8,<xmm12=%xmm12
1822psrld $8,%xmm12
1823
1824# qhasm: uint32323232 xmm13 >>= 8
1825# asm 1: psrld $8,<xmm13=int6464#14
1826# asm 2: psrld $8,<xmm13=%xmm13
1827psrld $8,%xmm13
1828
1829# qhasm: uint32323232 xmm14 >>= 8
1830# asm 1: psrld $8,<xmm14=int6464#15
1831# asm 2: psrld $8,<xmm14=%xmm14
1832psrld $8,%xmm14
1833
1834# qhasm: uint32323232 xmm15 >>= 8
1835# asm 1: psrld $8,<xmm15=int6464#16
1836# asm 2: psrld $8,<xmm15=%xmm15
1837psrld $8,%xmm15
1838
1839# qhasm: xmm0 ^= xmm8
1840# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
1841# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
1842pxor %xmm8,%xmm0
1843
1844# qhasm: xmm1 ^= xmm9
1845# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
1846# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
1847pxor %xmm9,%xmm1
1848
1849# qhasm: xmm4 ^= xmm10
1850# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
1851# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
1852pxor %xmm10,%xmm4
1853
1854# qhasm: xmm6 ^= xmm11
1855# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
1856# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
1857pxor %xmm11,%xmm6
1858
1859# qhasm: xmm3 ^= xmm12
1860# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
1861# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
1862pxor %xmm12,%xmm3
1863
1864# qhasm: xmm7 ^= xmm13
1865# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
1866# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
1867pxor %xmm13,%xmm7
1868
1869# qhasm: xmm2 ^= xmm14
1870# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
1871# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
1872pxor %xmm14,%xmm2
1873
1874# qhasm: xmm5 ^= xmm15
1875# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
1876# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
1877pxor %xmm15,%xmm5
1878
1879# qhasm: *(int128 *)(c + 128) = xmm0
1880# asm 1: movdqa <xmm0=int6464#1,128(<c=int64#1)
1881# asm 2: movdqa <xmm0=%xmm0,128(<c=%rdi)
1882movdqa %xmm0,128(%rdi)
1883
1884# qhasm: *(int128 *)(c + 144) = xmm1
1885# asm 1: movdqa <xmm1=int6464#2,144(<c=int64#1)
1886# asm 2: movdqa <xmm1=%xmm1,144(<c=%rdi)
1887movdqa %xmm1,144(%rdi)
1888
1889# qhasm: *(int128 *)(c + 160) = xmm4
1890# asm 1: movdqa <xmm4=int6464#5,160(<c=int64#1)
1891# asm 2: movdqa <xmm4=%xmm4,160(<c=%rdi)
1892movdqa %xmm4,160(%rdi)
1893
1894# qhasm: *(int128 *)(c + 176) = xmm6
1895# asm 1: movdqa <xmm6=int6464#7,176(<c=int64#1)
1896# asm 2: movdqa <xmm6=%xmm6,176(<c=%rdi)
1897movdqa %xmm6,176(%rdi)
1898
1899# qhasm: *(int128 *)(c + 192) = xmm3
1900# asm 1: movdqa <xmm3=int6464#4,192(<c=int64#1)
1901# asm 2: movdqa <xmm3=%xmm3,192(<c=%rdi)
1902movdqa %xmm3,192(%rdi)
1903
1904# qhasm: *(int128 *)(c + 208) = xmm7
1905# asm 1: movdqa <xmm7=int6464#8,208(<c=int64#1)
1906# asm 2: movdqa <xmm7=%xmm7,208(<c=%rdi)
1907movdqa %xmm7,208(%rdi)
1908
1909# qhasm: *(int128 *)(c + 224) = xmm2
1910# asm 1: movdqa <xmm2=int6464#3,224(<c=int64#1)
1911# asm 2: movdqa <xmm2=%xmm2,224(<c=%rdi)
1912movdqa %xmm2,224(%rdi)
1913
1914# qhasm: *(int128 *)(c + 240) = xmm5
1915# asm 1: movdqa <xmm5=int6464#6,240(<c=int64#1)
1916# asm 2: movdqa <xmm5=%xmm5,240(<c=%rdi)
1917movdqa %xmm5,240(%rdi)
1918
1919# qhasm: xmm0 ^= ONE
1920# asm 1: pxor ONE,<xmm0=int6464#1
1921# asm 2: pxor ONE,<xmm0=%xmm0
1922pxor ONE,%xmm0
1923
1924# qhasm: xmm1 ^= ONE
1925# asm 1: pxor ONE,<xmm1=int6464#2
1926# asm 2: pxor ONE,<xmm1=%xmm1
1927pxor ONE,%xmm1
1928
1929# qhasm: xmm7 ^= ONE
1930# asm 1: pxor ONE,<xmm7=int6464#8
1931# asm 2: pxor ONE,<xmm7=%xmm7
1932pxor ONE,%xmm7
1933
1934# qhasm: xmm2 ^= ONE
1935# asm 1: pxor ONE,<xmm2=int6464#3
1936# asm 2: pxor ONE,<xmm2=%xmm2
1937pxor ONE,%xmm2
1938
1939# qhasm: shuffle bytes of xmm0 by ROTB
1940# asm 1: pshufb ROTB,<xmm0=int6464#1
1941# asm 2: pshufb ROTB,<xmm0=%xmm0
1942pshufb ROTB,%xmm0
1943
1944# qhasm: shuffle bytes of xmm1 by ROTB
1945# asm 1: pshufb ROTB,<xmm1=int6464#2
1946# asm 2: pshufb ROTB,<xmm1=%xmm1
1947pshufb ROTB,%xmm1
1948
1949# qhasm: shuffle bytes of xmm4 by ROTB
1950# asm 1: pshufb ROTB,<xmm4=int6464#5
1951# asm 2: pshufb ROTB,<xmm4=%xmm4
1952pshufb ROTB,%xmm4
1953
1954# qhasm: shuffle bytes of xmm6 by ROTB
1955# asm 1: pshufb ROTB,<xmm6=int6464#7
1956# asm 2: pshufb ROTB,<xmm6=%xmm6
1957pshufb ROTB,%xmm6
1958
1959# qhasm: shuffle bytes of xmm3 by ROTB
1960# asm 1: pshufb ROTB,<xmm3=int6464#4
1961# asm 2: pshufb ROTB,<xmm3=%xmm3
1962pshufb ROTB,%xmm3
1963
1964# qhasm: shuffle bytes of xmm7 by ROTB
1965# asm 1: pshufb ROTB,<xmm7=int6464#8
1966# asm 2: pshufb ROTB,<xmm7=%xmm7
1967pshufb ROTB,%xmm7
1968
1969# qhasm: shuffle bytes of xmm2 by ROTB
1970# asm 1: pshufb ROTB,<xmm2=int6464#3
1971# asm 2: pshufb ROTB,<xmm2=%xmm2
1972pshufb ROTB,%xmm2
1973
1974# qhasm: shuffle bytes of xmm5 by ROTB
1975# asm 1: pshufb ROTB,<xmm5=int6464#6
1976# asm 2: pshufb ROTB,<xmm5=%xmm5
1977pshufb ROTB,%xmm5
1978
1979# qhasm: xmm7 ^= xmm2
1980# asm 1: pxor <xmm2=int6464#3,<xmm7=int6464#8
1981# asm 2: pxor <xmm2=%xmm2,<xmm7=%xmm7
1982pxor %xmm2,%xmm7
1983
1984# qhasm: xmm4 ^= xmm1
1985# asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5
1986# asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4
1987pxor %xmm1,%xmm4
1988
1989# qhasm: xmm7 ^= xmm0
1990# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
1991# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
1992pxor %xmm0,%xmm7
1993
1994# qhasm: xmm2 ^= xmm4
1995# asm 1: pxor <xmm4=int6464#5,<xmm2=int6464#3
1996# asm 2: pxor <xmm4=%xmm4,<xmm2=%xmm2
1997pxor %xmm4,%xmm2
1998
1999# qhasm: xmm6 ^= xmm0
2000# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
2001# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
2002pxor %xmm0,%xmm6
2003
2004# qhasm: xmm2 ^= xmm6
2005# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
2006# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
2007pxor %xmm6,%xmm2
2008
2009# qhasm: xmm6 ^= xmm5
2010# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
2011# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
2012pxor %xmm5,%xmm6
2013
2014# qhasm: xmm6 ^= xmm3
2015# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
2016# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
2017pxor %xmm3,%xmm6
2018
2019# qhasm: xmm5 ^= xmm7
2020# asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6
2021# asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5
2022pxor %xmm7,%xmm5
2023
2024# qhasm: xmm6 ^= xmm1
2025# asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7
2026# asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6
2027pxor %xmm1,%xmm6
2028
2029# qhasm: xmm3 ^= xmm7
2030# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
2031# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
2032pxor %xmm7,%xmm3
2033
2034# qhasm: xmm4 ^= xmm5
2035# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
2036# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
2037pxor %xmm5,%xmm4
2038
2039# qhasm: xmm1 ^= xmm7
2040# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
2041# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
2042pxor %xmm7,%xmm1
2043
2044# qhasm: xmm11 = xmm5
2045# asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9
2046# asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8
2047movdqa %xmm5,%xmm8
2048
2049# qhasm: xmm10 = xmm1
2050# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
2051# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
2052movdqa %xmm1,%xmm9
2053
2054# qhasm: xmm9 = xmm7
2055# asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11
2056# asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10
2057movdqa %xmm7,%xmm10
2058
2059# qhasm: xmm13 = xmm4
2060# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
2061# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
2062movdqa %xmm4,%xmm11
2063
2064# qhasm: xmm12 = xmm2
2065# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#13
2066# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm12
2067movdqa %xmm2,%xmm12
2068
2069# qhasm: xmm11 ^= xmm3
2070# asm 1: pxor <xmm3=int6464#4,<xmm11=int6464#9
2071# asm 2: pxor <xmm3=%xmm3,<xmm11=%xmm8
2072pxor %xmm3,%xmm8
2073
2074# qhasm: xmm10 ^= xmm4
2075# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#10
2076# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm9
2077pxor %xmm4,%xmm9
2078
2079# qhasm: xmm9 ^= xmm6
2080# asm 1: pxor <xmm6=int6464#7,<xmm9=int6464#11
2081# asm 2: pxor <xmm6=%xmm6,<xmm9=%xmm10
2082pxor %xmm6,%xmm10
2083
2084# qhasm: xmm13 ^= xmm3
2085# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#12
2086# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm11
2087pxor %xmm3,%xmm11
2088
2089# qhasm: xmm12 ^= xmm0
2090# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
2091# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
2092pxor %xmm0,%xmm12
2093
2094# qhasm: xmm14 = xmm11
2095# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
2096# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
2097movdqa %xmm8,%xmm13
2098
2099# qhasm: xmm8 = xmm10
2100# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
2101# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
2102movdqa %xmm9,%xmm14
2103
2104# qhasm: xmm15 = xmm11
2105# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
2106# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
2107movdqa %xmm8,%xmm15
2108
2109# qhasm: xmm10 |= xmm9
2110# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
2111# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
2112por %xmm10,%xmm9
2113
2114# qhasm: xmm11 |= xmm12
2115# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
2116# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
2117por %xmm12,%xmm8
2118
2119# qhasm: xmm15 ^= xmm8
2120# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
2121# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
2122pxor %xmm14,%xmm15
2123
2124# qhasm: xmm14 &= xmm12
2125# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
2126# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
2127pand %xmm12,%xmm13
2128
2129# qhasm: xmm8 &= xmm9
2130# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
2131# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
2132pand %xmm10,%xmm14
2133
2134# qhasm: xmm12 ^= xmm9
2135# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
2136# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
2137pxor %xmm10,%xmm12
2138
2139# qhasm: xmm15 &= xmm12
2140# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
2141# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
2142pand %xmm12,%xmm15
2143
2144# qhasm: xmm12 = xmm6
2145# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11
2146# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10
2147movdqa %xmm6,%xmm10
2148
2149# qhasm: xmm12 ^= xmm0
2150# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
2151# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
2152pxor %xmm0,%xmm10
2153
2154# qhasm: xmm13 &= xmm12
2155# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
2156# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
2157pand %xmm10,%xmm11
2158
2159# qhasm: xmm11 ^= xmm13
2160# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
2161# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
2162pxor %xmm11,%xmm8
2163
2164# qhasm: xmm10 ^= xmm13
2165# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
2166# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
2167pxor %xmm11,%xmm9
2168
2169# qhasm: xmm13 = xmm5
2170# asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11
2171# asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10
2172movdqa %xmm5,%xmm10
2173
2174# qhasm: xmm13 ^= xmm1
2175# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
2176# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
2177pxor %xmm1,%xmm10
2178
2179# qhasm: xmm12 = xmm7
2180# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12
2181# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11
2182movdqa %xmm7,%xmm11
2183
2184# qhasm: xmm9 = xmm13
2185# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
2186# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
2187movdqa %xmm10,%xmm12
2188
2189# qhasm: xmm12 ^= xmm2
2190# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#12
2191# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm11
2192pxor %xmm2,%xmm11
2193
2194# qhasm: xmm9 |= xmm12
2195# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
2196# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
2197por %xmm11,%xmm12
2198
2199# qhasm: xmm13 &= xmm12
2200# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
2201# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
2202pand %xmm11,%xmm10
2203
2204# qhasm: xmm8 ^= xmm13
2205# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
2206# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
2207pxor %xmm10,%xmm14
2208
2209# qhasm: xmm11 ^= xmm15
2210# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
2211# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
2212pxor %xmm15,%xmm8
2213
2214# qhasm: xmm10 ^= xmm14
2215# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
2216# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
2217pxor %xmm13,%xmm9
2218
2219# qhasm: xmm9 ^= xmm15
2220# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
2221# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
2222pxor %xmm15,%xmm12
2223
2224# qhasm: xmm8 ^= xmm14
2225# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
2226# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
2227pxor %xmm13,%xmm14
2228
2229# qhasm: xmm9 ^= xmm14
2230# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
2231# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
2232pxor %xmm13,%xmm12
2233
2234# qhasm: xmm12 = xmm4
2235# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11
2236# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10
2237movdqa %xmm4,%xmm10
2238
2239# qhasm: xmm13 = xmm3
2240# asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12
2241# asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11
2242movdqa %xmm3,%xmm11
2243
2244# qhasm: xmm14 = xmm1
2245# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
2246# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
2247movdqa %xmm1,%xmm13
2248
2249# qhasm: xmm15 = xmm5
2250# asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16
2251# asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15
2252movdqa %xmm5,%xmm15
2253
2254# qhasm: xmm12 &= xmm6
2255# asm 1: pand <xmm6=int6464#7,<xmm12=int6464#11
2256# asm 2: pand <xmm6=%xmm6,<xmm12=%xmm10
2257pand %xmm6,%xmm10
2258
2259# qhasm: xmm13 &= xmm0
2260# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
2261# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
2262pand %xmm0,%xmm11
2263
2264# qhasm: xmm14 &= xmm7
2265# asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14
2266# asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13
2267pand %xmm7,%xmm13
2268
2269# qhasm: xmm15 |= xmm2
2270# asm 1: por <xmm2=int6464#3,<xmm15=int6464#16
2271# asm 2: por <xmm2=%xmm2,<xmm15=%xmm15
2272por %xmm2,%xmm15
2273
2274# qhasm: xmm11 ^= xmm12
2275# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
2276# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
2277pxor %xmm10,%xmm8
2278
2279# qhasm: xmm10 ^= xmm13
2280# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
2281# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
2282pxor %xmm11,%xmm9
2283
2284# qhasm: xmm9 ^= xmm14
2285# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
2286# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
2287pxor %xmm13,%xmm12
2288
2289# qhasm: xmm8 ^= xmm15
2290# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
2291# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
2292pxor %xmm15,%xmm14
2293
2294# qhasm: xmm12 = xmm11
2295# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
2296# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
2297movdqa %xmm8,%xmm10
2298
2299# qhasm: xmm12 ^= xmm10
2300# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
2301# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
2302pxor %xmm9,%xmm10
2303
2304# qhasm: xmm11 &= xmm9
2305# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
2306# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
2307pand %xmm12,%xmm8
2308
2309# qhasm: xmm14 = xmm8
2310# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
2311# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
2312movdqa %xmm14,%xmm11
2313
2314# qhasm: xmm14 ^= xmm11
2315# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
2316# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
2317pxor %xmm8,%xmm11
2318
2319# qhasm: xmm15 = xmm12
2320# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
2321# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
2322movdqa %xmm10,%xmm13
2323
2324# qhasm: xmm15 &= xmm14
2325# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
2326# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
2327pand %xmm11,%xmm13
2328
2329# qhasm: xmm15 ^= xmm10
2330# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
2331# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
2332pxor %xmm9,%xmm13
2333
2334# qhasm: xmm13 = xmm9
2335# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
2336# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
2337movdqa %xmm12,%xmm15
2338
2339# qhasm: xmm13 ^= xmm8
2340# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
2341# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
2342pxor %xmm14,%xmm15
2343
2344# qhasm: xmm11 ^= xmm10
2345# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
2346# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
2347pxor %xmm9,%xmm8
2348
2349# qhasm: xmm13 &= xmm11
2350# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
2351# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
2352pand %xmm8,%xmm15
2353
2354# qhasm: xmm13 ^= xmm8
2355# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
2356# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
2357pxor %xmm14,%xmm15
2358
2359# qhasm: xmm9 ^= xmm13
2360# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
2361# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
2362pxor %xmm15,%xmm12
2363
2364# qhasm: xmm10 = xmm14
2365# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
2366# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
2367movdqa %xmm11,%xmm8
2368
2369# qhasm: xmm10 ^= xmm13
2370# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
2371# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
2372pxor %xmm15,%xmm8
2373
2374# qhasm: xmm10 &= xmm8
2375# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
2376# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
2377pand %xmm14,%xmm8
2378
2379# qhasm: xmm9 ^= xmm10
2380# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
2381# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
2382pxor %xmm8,%xmm12
2383
2384# qhasm: xmm14 ^= xmm10
2385# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
2386# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
2387pxor %xmm8,%xmm11
2388
2389# qhasm: xmm14 &= xmm15
2390# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
2391# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
2392pand %xmm13,%xmm11
2393
2394# qhasm: xmm14 ^= xmm12
2395# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
2396# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
2397pxor %xmm10,%xmm11
2398
2399# qhasm: xmm12 = xmm2
2400# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#9
2401# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm8
2402movdqa %xmm2,%xmm8
2403
2404# qhasm: xmm8 = xmm7
2405# asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10
2406# asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9
2407movdqa %xmm7,%xmm9
2408
2409# qhasm: xmm10 = xmm15
2410# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
2411# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
2412movdqa %xmm13,%xmm10
2413
2414# qhasm: xmm10 ^= xmm14
2415# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
2416# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
2417pxor %xmm11,%xmm10
2418
2419# qhasm: xmm10 &= xmm2
2420# asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11
2421# asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10
2422pand %xmm2,%xmm10
2423
2424# qhasm: xmm2 ^= xmm7
2425# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
2426# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
2427pxor %xmm7,%xmm2
2428
2429# qhasm: xmm2 &= xmm14
2430# asm 1: pand <xmm14=int6464#12,<xmm2=int6464#3
2431# asm 2: pand <xmm14=%xmm11,<xmm2=%xmm2
2432pand %xmm11,%xmm2
2433
2434# qhasm: xmm7 &= xmm15
2435# asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8
2436# asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7
2437pand %xmm13,%xmm7
2438
2439# qhasm: xmm2 ^= xmm7
2440# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
2441# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
2442pxor %xmm7,%xmm2
2443
2444# qhasm: xmm7 ^= xmm10
2445# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
2446# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
2447pxor %xmm10,%xmm7
2448
2449# qhasm: xmm12 ^= xmm0
2450# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
2451# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
2452pxor %xmm0,%xmm8
2453
2454# qhasm: xmm8 ^= xmm6
2455# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10
2456# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9
2457pxor %xmm6,%xmm9
2458
2459# qhasm: xmm15 ^= xmm13
2460# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
2461# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
2462pxor %xmm15,%xmm13
2463
2464# qhasm: xmm14 ^= xmm9
2465# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
2466# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
2467pxor %xmm12,%xmm11
2468
2469# qhasm: xmm11 = xmm15
2470# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
2471# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
2472movdqa %xmm13,%xmm10
2473
2474# qhasm: xmm11 ^= xmm14
2475# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
2476# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
2477pxor %xmm11,%xmm10
2478
2479# qhasm: xmm11 &= xmm12
2480# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
2481# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
2482pand %xmm8,%xmm10
2483
2484# qhasm: xmm12 ^= xmm8
2485# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
2486# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
2487pxor %xmm9,%xmm8
2488
2489# qhasm: xmm12 &= xmm14
2490# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
2491# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
2492pand %xmm11,%xmm8
2493
2494# qhasm: xmm8 &= xmm15
2495# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
2496# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
2497pand %xmm13,%xmm9
2498
2499# qhasm: xmm8 ^= xmm12
2500# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
2501# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
2502pxor %xmm8,%xmm9
2503
2504# qhasm: xmm12 ^= xmm11
2505# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
2506# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
2507pxor %xmm10,%xmm8
2508
2509# qhasm: xmm10 = xmm13
2510# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
2511# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
2512movdqa %xmm15,%xmm10
2513
2514# qhasm: xmm10 ^= xmm9
2515# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
2516# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
2517pxor %xmm12,%xmm10
2518
2519# qhasm: xmm10 &= xmm0
2520# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
2521# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
2522pand %xmm0,%xmm10
2523
2524# qhasm: xmm0 ^= xmm6
2525# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
2526# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
2527pxor %xmm6,%xmm0
2528
2529# qhasm: xmm0 &= xmm9
2530# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
2531# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
2532pand %xmm12,%xmm0
2533
2534# qhasm: xmm6 &= xmm13
2535# asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7
2536# asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6
2537pand %xmm15,%xmm6
2538
2539# qhasm: xmm0 ^= xmm6
2540# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
2541# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
2542pxor %xmm6,%xmm0
2543
2544# qhasm: xmm6 ^= xmm10
2545# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
2546# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
2547pxor %xmm10,%xmm6
2548
2549# qhasm: xmm2 ^= xmm12
2550# asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3
2551# asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2
2552pxor %xmm8,%xmm2
2553
2554# qhasm: xmm0 ^= xmm12
2555# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
2556# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
2557pxor %xmm8,%xmm0
2558
2559# qhasm: xmm7 ^= xmm8
2560# asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8
2561# asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7
2562pxor %xmm9,%xmm7
2563
2564# qhasm: xmm6 ^= xmm8
2565# asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7
2566# asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6
2567pxor %xmm9,%xmm6
2568
2569# qhasm: xmm12 = xmm5
2570# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9
2571# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8
2572movdqa %xmm5,%xmm8
2573
2574# qhasm: xmm8 = xmm1
2575# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
2576# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
2577movdqa %xmm1,%xmm9
2578
2579# qhasm: xmm12 ^= xmm3
2580# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#9
2581# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm8
2582pxor %xmm3,%xmm8
2583
2584# qhasm: xmm8 ^= xmm4
2585# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10
2586# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9
2587pxor %xmm4,%xmm9
2588
2589# qhasm: xmm11 = xmm15
2590# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
2591# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
2592movdqa %xmm13,%xmm10
2593
2594# qhasm: xmm11 ^= xmm14
2595# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
2596# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
2597pxor %xmm11,%xmm10
2598
2599# qhasm: xmm11 &= xmm12
2600# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
2601# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
2602pand %xmm8,%xmm10
2603
2604# qhasm: xmm12 ^= xmm8
2605# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
2606# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
2607pxor %xmm9,%xmm8
2608
2609# qhasm: xmm12 &= xmm14
2610# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
2611# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
2612pand %xmm11,%xmm8
2613
2614# qhasm: xmm8 &= xmm15
2615# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
2616# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
2617pand %xmm13,%xmm9
2618
2619# qhasm: xmm8 ^= xmm12
2620# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
2621# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
2622pxor %xmm8,%xmm9
2623
2624# qhasm: xmm12 ^= xmm11
2625# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
2626# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
2627pxor %xmm10,%xmm8
2628
2629# qhasm: xmm10 = xmm13
2630# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
2631# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
2632movdqa %xmm15,%xmm10
2633
2634# qhasm: xmm10 ^= xmm9
2635# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
2636# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
2637pxor %xmm12,%xmm10
2638
2639# qhasm: xmm10 &= xmm3
2640# asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11
2641# asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10
2642pand %xmm3,%xmm10
2643
2644# qhasm: xmm3 ^= xmm4
2645# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
2646# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
2647pxor %xmm4,%xmm3
2648
2649# qhasm: xmm3 &= xmm9
2650# asm 1: pand <xmm9=int6464#13,<xmm3=int6464#4
2651# asm 2: pand <xmm9=%xmm12,<xmm3=%xmm3
2652pand %xmm12,%xmm3
2653
2654# qhasm: xmm4 &= xmm13
2655# asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5
2656# asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4
2657pand %xmm15,%xmm4
2658
2659# qhasm: xmm3 ^= xmm4
2660# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
2661# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
2662pxor %xmm4,%xmm3
2663
2664# qhasm: xmm4 ^= xmm10
2665# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
2666# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
2667pxor %xmm10,%xmm4
2668
2669# qhasm: xmm15 ^= xmm13
2670# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
2671# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
2672pxor %xmm15,%xmm13
2673
2674# qhasm: xmm14 ^= xmm9
2675# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
2676# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
2677pxor %xmm12,%xmm11
2678
2679# qhasm: xmm11 = xmm15
2680# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
2681# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
2682movdqa %xmm13,%xmm10
2683
2684# qhasm: xmm11 ^= xmm14
2685# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
2686# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
2687pxor %xmm11,%xmm10
2688
2689# qhasm: xmm11 &= xmm5
2690# asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11
2691# asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10
2692pand %xmm5,%xmm10
2693
2694# qhasm: xmm5 ^= xmm1
2695# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
2696# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
2697pxor %xmm1,%xmm5
2698
2699# qhasm: xmm5 &= xmm14
2700# asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6
2701# asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5
2702pand %xmm11,%xmm5
2703
2704# qhasm: xmm1 &= xmm15
2705# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
2706# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
2707pand %xmm13,%xmm1
2708
2709# qhasm: xmm5 ^= xmm1
2710# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
2711# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
2712pxor %xmm1,%xmm5
2713
2714# qhasm: xmm1 ^= xmm11
2715# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
2716# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
2717pxor %xmm10,%xmm1
2718
2719# qhasm: xmm5 ^= xmm12
2720# asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6
2721# asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5
2722pxor %xmm8,%xmm5
2723
2724# qhasm: xmm3 ^= xmm12
2725# asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4
2726# asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3
2727pxor %xmm8,%xmm3
2728
2729# qhasm: xmm1 ^= xmm8
2730# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
2731# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
2732pxor %xmm9,%xmm1
2733
2734# qhasm: xmm4 ^= xmm8
2735# asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5
2736# asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4
2737pxor %xmm9,%xmm4
2738
2739# qhasm: xmm5 ^= xmm0
2740# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
2741# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
2742pxor %xmm0,%xmm5
2743
2744# qhasm: xmm1 ^= xmm2
2745# asm 1: pxor <xmm2=int6464#3,<xmm1=int6464#2
2746# asm 2: pxor <xmm2=%xmm2,<xmm1=%xmm1
2747pxor %xmm2,%xmm1
2748
2749# qhasm: xmm3 ^= xmm5
2750# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
2751# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
2752pxor %xmm5,%xmm3
2753
2754# qhasm: xmm2 ^= xmm0
2755# asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3
2756# asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2
2757pxor %xmm0,%xmm2
2758
2759# qhasm: xmm0 ^= xmm1
2760# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
2761# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
2762pxor %xmm1,%xmm0
2763
2764# qhasm: xmm1 ^= xmm7
2765# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
2766# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
2767pxor %xmm7,%xmm1
2768
2769# qhasm: xmm7 ^= xmm4
2770# asm 1: pxor <xmm4=int6464#5,<xmm7=int6464#8
2771# asm 2: pxor <xmm4=%xmm4,<xmm7=%xmm7
2772pxor %xmm4,%xmm7
2773
2774# qhasm: xmm3 ^= xmm7
2775# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
2776# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
2777pxor %xmm7,%xmm3
2778
2779# qhasm: xmm4 ^= xmm6
2780# asm 1: pxor <xmm6=int6464#7,<xmm4=int6464#5
2781# asm 2: pxor <xmm6=%xmm6,<xmm4=%xmm4
2782pxor %xmm6,%xmm4
2783
2784# qhasm: xmm6 ^= xmm7
2785# asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7
2786# asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6
2787pxor %xmm7,%xmm6
2788
2789# qhasm: xmm2 ^= xmm6
2790# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
2791# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
2792pxor %xmm6,%xmm2
2793
2794# qhasm: xmm1 ^= RCON
2795# asm 1: pxor RCON,<xmm1=int6464#2
2796# asm 2: pxor RCON,<xmm1=%xmm1
2797pxor RCON,%xmm1
2798
2799# qhasm: shuffle bytes of xmm0 by EXPB0
2800# asm 1: pshufb EXPB0,<xmm0=int6464#1
2801# asm 2: pshufb EXPB0,<xmm0=%xmm0
2802pshufb EXPB0,%xmm0
2803
2804# qhasm: shuffle bytes of xmm1 by EXPB0
2805# asm 1: pshufb EXPB0,<xmm1=int6464#2
2806# asm 2: pshufb EXPB0,<xmm1=%xmm1
2807pshufb EXPB0,%xmm1
2808
2809# qhasm: shuffle bytes of xmm3 by EXPB0
2810# asm 1: pshufb EXPB0,<xmm3=int6464#4
2811# asm 2: pshufb EXPB0,<xmm3=%xmm3
2812pshufb EXPB0,%xmm3
2813
2814# qhasm: shuffle bytes of xmm2 by EXPB0
2815# asm 1: pshufb EXPB0,<xmm2=int6464#3
2816# asm 2: pshufb EXPB0,<xmm2=%xmm2
2817pshufb EXPB0,%xmm2
2818
2819# qhasm: shuffle bytes of xmm6 by EXPB0
2820# asm 1: pshufb EXPB0,<xmm6=int6464#7
2821# asm 2: pshufb EXPB0,<xmm6=%xmm6
2822pshufb EXPB0,%xmm6
2823
2824# qhasm: shuffle bytes of xmm5 by EXPB0
2825# asm 1: pshufb EXPB0,<xmm5=int6464#6
2826# asm 2: pshufb EXPB0,<xmm5=%xmm5
2827pshufb EXPB0,%xmm5
2828
2829# qhasm: shuffle bytes of xmm4 by EXPB0
2830# asm 1: pshufb EXPB0,<xmm4=int6464#5
2831# asm 2: pshufb EXPB0,<xmm4=%xmm4
2832pshufb EXPB0,%xmm4
2833
2834# qhasm: shuffle bytes of xmm7 by EXPB0
2835# asm 1: pshufb EXPB0,<xmm7=int6464#8
2836# asm 2: pshufb EXPB0,<xmm7=%xmm7
2837pshufb EXPB0,%xmm7
2838
2839# qhasm: xmm8 = *(int128 *)(c + 128)
2840# asm 1: movdqa 128(<c=int64#1),>xmm8=int6464#9
2841# asm 2: movdqa 128(<c=%rdi),>xmm8=%xmm8
2842movdqa 128(%rdi),%xmm8
2843
2844# qhasm: xmm9 = *(int128 *)(c + 144)
2845# asm 1: movdqa 144(<c=int64#1),>xmm9=int6464#10
2846# asm 2: movdqa 144(<c=%rdi),>xmm9=%xmm9
2847movdqa 144(%rdi),%xmm9
2848
2849# qhasm: xmm10 = *(int128 *)(c + 160)
2850# asm 1: movdqa 160(<c=int64#1),>xmm10=int6464#11
2851# asm 2: movdqa 160(<c=%rdi),>xmm10=%xmm10
2852movdqa 160(%rdi),%xmm10
2853
2854# qhasm: xmm11 = *(int128 *)(c + 176)
2855# asm 1: movdqa 176(<c=int64#1),>xmm11=int6464#12
2856# asm 2: movdqa 176(<c=%rdi),>xmm11=%xmm11
2857movdqa 176(%rdi),%xmm11
2858
2859# qhasm: xmm12 = *(int128 *)(c + 192)
2860# asm 1: movdqa 192(<c=int64#1),>xmm12=int6464#13
2861# asm 2: movdqa 192(<c=%rdi),>xmm12=%xmm12
2862movdqa 192(%rdi),%xmm12
2863
2864# qhasm: xmm13 = *(int128 *)(c + 208)
2865# asm 1: movdqa 208(<c=int64#1),>xmm13=int6464#14
2866# asm 2: movdqa 208(<c=%rdi),>xmm13=%xmm13
2867movdqa 208(%rdi),%xmm13
2868
2869# qhasm: xmm14 = *(int128 *)(c + 224)
2870# asm 1: movdqa 224(<c=int64#1),>xmm14=int6464#15
2871# asm 2: movdqa 224(<c=%rdi),>xmm14=%xmm14
2872movdqa 224(%rdi),%xmm14
2873
2874# qhasm: xmm15 = *(int128 *)(c + 240)
2875# asm 1: movdqa 240(<c=int64#1),>xmm15=int6464#16
2876# asm 2: movdqa 240(<c=%rdi),>xmm15=%xmm15
2877movdqa 240(%rdi),%xmm15
2878
2879# qhasm: xmm8 ^= ONE
2880# asm 1: pxor ONE,<xmm8=int6464#9
2881# asm 2: pxor ONE,<xmm8=%xmm8
2882pxor ONE,%xmm8
2883
2884# qhasm: xmm9 ^= ONE
2885# asm 1: pxor ONE,<xmm9=int6464#10
2886# asm 2: pxor ONE,<xmm9=%xmm9
2887pxor ONE,%xmm9
2888
2889# qhasm: xmm13 ^= ONE
2890# asm 1: pxor ONE,<xmm13=int6464#14
2891# asm 2: pxor ONE,<xmm13=%xmm13
2892pxor ONE,%xmm13
2893
2894# qhasm: xmm14 ^= ONE
2895# asm 1: pxor ONE,<xmm14=int6464#15
2896# asm 2: pxor ONE,<xmm14=%xmm14
2897pxor ONE,%xmm14
2898
2899# qhasm: xmm0 ^= xmm8
2900# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
2901# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
2902pxor %xmm8,%xmm0
2903
2904# qhasm: xmm1 ^= xmm9
2905# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
2906# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
2907pxor %xmm9,%xmm1
2908
2909# qhasm: xmm3 ^= xmm10
2910# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
2911# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
2912pxor %xmm10,%xmm3
2913
2914# qhasm: xmm2 ^= xmm11
2915# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
2916# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
2917pxor %xmm11,%xmm2
2918
2919# qhasm: xmm6 ^= xmm12
2920# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
2921# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
2922pxor %xmm12,%xmm6
2923
2924# qhasm: xmm5 ^= xmm13
2925# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
2926# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
2927pxor %xmm13,%xmm5
2928
2929# qhasm: xmm4 ^= xmm14
2930# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
2931# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
2932pxor %xmm14,%xmm4
2933
2934# qhasm: xmm7 ^= xmm15
2935# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
2936# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
2937pxor %xmm15,%xmm7
2938
2939# qhasm: uint32323232 xmm8 >>= 8
2940# asm 1: psrld $8,<xmm8=int6464#9
2941# asm 2: psrld $8,<xmm8=%xmm8
2942psrld $8,%xmm8
2943
2944# qhasm: uint32323232 xmm9 >>= 8
2945# asm 1: psrld $8,<xmm9=int6464#10
2946# asm 2: psrld $8,<xmm9=%xmm9
2947psrld $8,%xmm9
2948
2949# qhasm: uint32323232 xmm10 >>= 8
2950# asm 1: psrld $8,<xmm10=int6464#11
2951# asm 2: psrld $8,<xmm10=%xmm10
2952psrld $8,%xmm10
2953
2954# qhasm: uint32323232 xmm11 >>= 8
2955# asm 1: psrld $8,<xmm11=int6464#12
2956# asm 2: psrld $8,<xmm11=%xmm11
2957psrld $8,%xmm11
2958
2959# qhasm: uint32323232 xmm12 >>= 8
2960# asm 1: psrld $8,<xmm12=int6464#13
2961# asm 2: psrld $8,<xmm12=%xmm12
2962psrld $8,%xmm12
2963
2964# qhasm: uint32323232 xmm13 >>= 8
2965# asm 1: psrld $8,<xmm13=int6464#14
2966# asm 2: psrld $8,<xmm13=%xmm13
2967psrld $8,%xmm13
2968
2969# qhasm: uint32323232 xmm14 >>= 8
2970# asm 1: psrld $8,<xmm14=int6464#15
2971# asm 2: psrld $8,<xmm14=%xmm14
2972psrld $8,%xmm14
2973
2974# qhasm: uint32323232 xmm15 >>= 8
2975# asm 1: psrld $8,<xmm15=int6464#16
2976# asm 2: psrld $8,<xmm15=%xmm15
2977psrld $8,%xmm15
2978
2979# qhasm: xmm0 ^= xmm8
2980# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
2981# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
2982pxor %xmm8,%xmm0
2983
2984# qhasm: xmm1 ^= xmm9
2985# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
2986# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
2987pxor %xmm9,%xmm1
2988
2989# qhasm: xmm3 ^= xmm10
2990# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
2991# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
2992pxor %xmm10,%xmm3
2993
2994# qhasm: xmm2 ^= xmm11
2995# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
2996# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
2997pxor %xmm11,%xmm2
2998
2999# qhasm: xmm6 ^= xmm12
3000# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
3001# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
3002pxor %xmm12,%xmm6
3003
3004# qhasm: xmm5 ^= xmm13
3005# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
3006# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
3007pxor %xmm13,%xmm5
3008
3009# qhasm: xmm4 ^= xmm14
3010# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
3011# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
3012pxor %xmm14,%xmm4
3013
3014# qhasm: xmm7 ^= xmm15
3015# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
3016# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
3017pxor %xmm15,%xmm7
3018
3019# qhasm: uint32323232 xmm8 >>= 8
3020# asm 1: psrld $8,<xmm8=int6464#9
3021# asm 2: psrld $8,<xmm8=%xmm8
3022psrld $8,%xmm8
3023
3024# qhasm: uint32323232 xmm9 >>= 8
3025# asm 1: psrld $8,<xmm9=int6464#10
3026# asm 2: psrld $8,<xmm9=%xmm9
3027psrld $8,%xmm9
3028
3029# qhasm: uint32323232 xmm10 >>= 8
3030# asm 1: psrld $8,<xmm10=int6464#11
3031# asm 2: psrld $8,<xmm10=%xmm10
3032psrld $8,%xmm10
3033
3034# qhasm: uint32323232 xmm11 >>= 8
3035# asm 1: psrld $8,<xmm11=int6464#12
3036# asm 2: psrld $8,<xmm11=%xmm11
3037psrld $8,%xmm11
3038
3039# qhasm: uint32323232 xmm12 >>= 8
3040# asm 1: psrld $8,<xmm12=int6464#13
3041# asm 2: psrld $8,<xmm12=%xmm12
3042psrld $8,%xmm12
3043
3044# qhasm: uint32323232 xmm13 >>= 8
3045# asm 1: psrld $8,<xmm13=int6464#14
3046# asm 2: psrld $8,<xmm13=%xmm13
3047psrld $8,%xmm13
3048
3049# qhasm: uint32323232 xmm14 >>= 8
3050# asm 1: psrld $8,<xmm14=int6464#15
3051# asm 2: psrld $8,<xmm14=%xmm14
3052psrld $8,%xmm14
3053
3054# qhasm: uint32323232 xmm15 >>= 8
3055# asm 1: psrld $8,<xmm15=int6464#16
3056# asm 2: psrld $8,<xmm15=%xmm15
3057psrld $8,%xmm15
3058
3059# qhasm: xmm0 ^= xmm8
3060# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
3061# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
3062pxor %xmm8,%xmm0
3063
3064# qhasm: xmm1 ^= xmm9
3065# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
3066# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
3067pxor %xmm9,%xmm1
3068
3069# qhasm: xmm3 ^= xmm10
3070# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
3071# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
3072pxor %xmm10,%xmm3
3073
3074# qhasm: xmm2 ^= xmm11
3075# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
3076# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
3077pxor %xmm11,%xmm2
3078
3079# qhasm: xmm6 ^= xmm12
3080# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
3081# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
3082pxor %xmm12,%xmm6
3083
3084# qhasm: xmm5 ^= xmm13
3085# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
3086# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
3087pxor %xmm13,%xmm5
3088
3089# qhasm: xmm4 ^= xmm14
3090# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
3091# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
3092pxor %xmm14,%xmm4
3093
3094# qhasm: xmm7 ^= xmm15
3095# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
3096# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
3097pxor %xmm15,%xmm7
3098
3099# qhasm: uint32323232 xmm8 >>= 8
3100# asm 1: psrld $8,<xmm8=int6464#9
3101# asm 2: psrld $8,<xmm8=%xmm8
3102psrld $8,%xmm8
3103
3104# qhasm: uint32323232 xmm9 >>= 8
3105# asm 1: psrld $8,<xmm9=int6464#10
3106# asm 2: psrld $8,<xmm9=%xmm9
3107psrld $8,%xmm9
3108
3109# qhasm: uint32323232 xmm10 >>= 8
3110# asm 1: psrld $8,<xmm10=int6464#11
3111# asm 2: psrld $8,<xmm10=%xmm10
3112psrld $8,%xmm10
3113
3114# qhasm: uint32323232 xmm11 >>= 8
3115# asm 1: psrld $8,<xmm11=int6464#12
3116# asm 2: psrld $8,<xmm11=%xmm11
3117psrld $8,%xmm11
3118
3119# qhasm: uint32323232 xmm12 >>= 8
3120# asm 1: psrld $8,<xmm12=int6464#13
3121# asm 2: psrld $8,<xmm12=%xmm12
3122psrld $8,%xmm12
3123
3124# qhasm: uint32323232 xmm13 >>= 8
3125# asm 1: psrld $8,<xmm13=int6464#14
3126# asm 2: psrld $8,<xmm13=%xmm13
3127psrld $8,%xmm13
3128
3129# qhasm: uint32323232 xmm14 >>= 8
3130# asm 1: psrld $8,<xmm14=int6464#15
3131# asm 2: psrld $8,<xmm14=%xmm14
3132psrld $8,%xmm14
3133
3134# qhasm: uint32323232 xmm15 >>= 8
3135# asm 1: psrld $8,<xmm15=int6464#16
3136# asm 2: psrld $8,<xmm15=%xmm15
3137psrld $8,%xmm15
3138
3139# qhasm: xmm0 ^= xmm8
3140# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
3141# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
3142pxor %xmm8,%xmm0
3143
3144# qhasm: xmm1 ^= xmm9
3145# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
3146# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
3147pxor %xmm9,%xmm1
3148
3149# qhasm: xmm3 ^= xmm10
3150# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
3151# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
3152pxor %xmm10,%xmm3
3153
3154# qhasm: xmm2 ^= xmm11
3155# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
3156# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
3157pxor %xmm11,%xmm2
3158
3159# qhasm: xmm6 ^= xmm12
3160# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
3161# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
3162pxor %xmm12,%xmm6
3163
3164# qhasm: xmm5 ^= xmm13
3165# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
3166# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
3167pxor %xmm13,%xmm5
3168
3169# qhasm: xmm4 ^= xmm14
3170# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
3171# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
3172pxor %xmm14,%xmm4
3173
3174# qhasm: xmm7 ^= xmm15
3175# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
3176# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
3177pxor %xmm15,%xmm7
3178
3179# qhasm: *(int128 *)(c + 256) = xmm0
3180# asm 1: movdqa <xmm0=int6464#1,256(<c=int64#1)
3181# asm 2: movdqa <xmm0=%xmm0,256(<c=%rdi)
3182movdqa %xmm0,256(%rdi)
3183
3184# qhasm: *(int128 *)(c + 272) = xmm1
3185# asm 1: movdqa <xmm1=int6464#2,272(<c=int64#1)
3186# asm 2: movdqa <xmm1=%xmm1,272(<c=%rdi)
3187movdqa %xmm1,272(%rdi)
3188
3189# qhasm: *(int128 *)(c + 288) = xmm3
3190# asm 1: movdqa <xmm3=int6464#4,288(<c=int64#1)
3191# asm 2: movdqa <xmm3=%xmm3,288(<c=%rdi)
3192movdqa %xmm3,288(%rdi)
3193
3194# qhasm: *(int128 *)(c + 304) = xmm2
3195# asm 1: movdqa <xmm2=int6464#3,304(<c=int64#1)
3196# asm 2: movdqa <xmm2=%xmm2,304(<c=%rdi)
3197movdqa %xmm2,304(%rdi)
3198
3199# qhasm: *(int128 *)(c + 320) = xmm6
3200# asm 1: movdqa <xmm6=int6464#7,320(<c=int64#1)
3201# asm 2: movdqa <xmm6=%xmm6,320(<c=%rdi)
3202movdqa %xmm6,320(%rdi)
3203
3204# qhasm: *(int128 *)(c + 336) = xmm5
3205# asm 1: movdqa <xmm5=int6464#6,336(<c=int64#1)
3206# asm 2: movdqa <xmm5=%xmm5,336(<c=%rdi)
3207movdqa %xmm5,336(%rdi)
3208
3209# qhasm: *(int128 *)(c + 352) = xmm4
3210# asm 1: movdqa <xmm4=int6464#5,352(<c=int64#1)
3211# asm 2: movdqa <xmm4=%xmm4,352(<c=%rdi)
3212movdqa %xmm4,352(%rdi)
3213
3214# qhasm: *(int128 *)(c + 368) = xmm7
3215# asm 1: movdqa <xmm7=int6464#8,368(<c=int64#1)
3216# asm 2: movdqa <xmm7=%xmm7,368(<c=%rdi)
3217movdqa %xmm7,368(%rdi)
3218
3219# qhasm: xmm0 ^= ONE
3220# asm 1: pxor ONE,<xmm0=int6464#1
3221# asm 2: pxor ONE,<xmm0=%xmm0
3222pxor ONE,%xmm0
3223
3224# qhasm: xmm1 ^= ONE
3225# asm 1: pxor ONE,<xmm1=int6464#2
3226# asm 2: pxor ONE,<xmm1=%xmm1
3227pxor ONE,%xmm1
3228
3229# qhasm: xmm5 ^= ONE
3230# asm 1: pxor ONE,<xmm5=int6464#6
3231# asm 2: pxor ONE,<xmm5=%xmm5
3232pxor ONE,%xmm5
3233
3234# qhasm: xmm4 ^= ONE
3235# asm 1: pxor ONE,<xmm4=int6464#5
3236# asm 2: pxor ONE,<xmm4=%xmm4
3237pxor ONE,%xmm4
3238
3239# qhasm: shuffle bytes of xmm0 by ROTB
3240# asm 1: pshufb ROTB,<xmm0=int6464#1
3241# asm 2: pshufb ROTB,<xmm0=%xmm0
3242pshufb ROTB,%xmm0
3243
3244# qhasm: shuffle bytes of xmm1 by ROTB
3245# asm 1: pshufb ROTB,<xmm1=int6464#2
3246# asm 2: pshufb ROTB,<xmm1=%xmm1
3247pshufb ROTB,%xmm1
3248
3249# qhasm: shuffle bytes of xmm3 by ROTB
3250# asm 1: pshufb ROTB,<xmm3=int6464#4
3251# asm 2: pshufb ROTB,<xmm3=%xmm3
3252pshufb ROTB,%xmm3
3253
3254# qhasm: shuffle bytes of xmm2 by ROTB
3255# asm 1: pshufb ROTB,<xmm2=int6464#3
3256# asm 2: pshufb ROTB,<xmm2=%xmm2
3257pshufb ROTB,%xmm2
3258
3259# qhasm: shuffle bytes of xmm6 by ROTB
3260# asm 1: pshufb ROTB,<xmm6=int6464#7
3261# asm 2: pshufb ROTB,<xmm6=%xmm6
3262pshufb ROTB,%xmm6
3263
3264# qhasm: shuffle bytes of xmm5 by ROTB
3265# asm 1: pshufb ROTB,<xmm5=int6464#6
3266# asm 2: pshufb ROTB,<xmm5=%xmm5
3267pshufb ROTB,%xmm5
3268
3269# qhasm: shuffle bytes of xmm4 by ROTB
3270# asm 1: pshufb ROTB,<xmm4=int6464#5
3271# asm 2: pshufb ROTB,<xmm4=%xmm4
3272pshufb ROTB,%xmm4
3273
3274# qhasm: shuffle bytes of xmm7 by ROTB
3275# asm 1: pshufb ROTB,<xmm7=int6464#8
3276# asm 2: pshufb ROTB,<xmm7=%xmm7
3277pshufb ROTB,%xmm7
3278
3279# qhasm: xmm5 ^= xmm4
3280# asm 1: pxor <xmm4=int6464#5,<xmm5=int6464#6
3281# asm 2: pxor <xmm4=%xmm4,<xmm5=%xmm5
3282pxor %xmm4,%xmm5
3283
3284# qhasm: xmm3 ^= xmm1
3285# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
3286# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
3287pxor %xmm1,%xmm3
3288
3289# qhasm: xmm5 ^= xmm0
3290# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
3291# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
3292pxor %xmm0,%xmm5
3293
3294# qhasm: xmm4 ^= xmm3
3295# asm 1: pxor <xmm3=int6464#4,<xmm4=int6464#5
3296# asm 2: pxor <xmm3=%xmm3,<xmm4=%xmm4
3297pxor %xmm3,%xmm4
3298
3299# qhasm: xmm2 ^= xmm0
3300# asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3
3301# asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2
3302pxor %xmm0,%xmm2
3303
3304# qhasm: xmm4 ^= xmm2
3305# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
3306# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
3307pxor %xmm2,%xmm4
3308
3309# qhasm: xmm2 ^= xmm7
3310# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
3311# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
3312pxor %xmm7,%xmm2
3313
3314# qhasm: xmm2 ^= xmm6
3315# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
3316# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
3317pxor %xmm6,%xmm2
3318
3319# qhasm: xmm7 ^= xmm5
3320# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
3321# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
3322pxor %xmm5,%xmm7
3323
3324# qhasm: xmm2 ^= xmm1
3325# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
3326# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
3327pxor %xmm1,%xmm2
3328
3329# qhasm: xmm6 ^= xmm5
3330# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
3331# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
3332pxor %xmm5,%xmm6
3333
3334# qhasm: xmm3 ^= xmm7
3335# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
3336# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
3337pxor %xmm7,%xmm3
3338
3339# qhasm: xmm1 ^= xmm5
3340# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
3341# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
3342pxor %xmm5,%xmm1
3343
3344# qhasm: xmm11 = xmm7
3345# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
3346# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
3347movdqa %xmm7,%xmm8
3348
3349# qhasm: xmm10 = xmm1
3350# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
3351# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
3352movdqa %xmm1,%xmm9
3353
3354# qhasm: xmm9 = xmm5
3355# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
3356# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
3357movdqa %xmm5,%xmm10
3358
3359# qhasm: xmm13 = xmm3
3360# asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12
3361# asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11
3362movdqa %xmm3,%xmm11
3363
3364# qhasm: xmm12 = xmm4
3365# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#13
3366# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm12
3367movdqa %xmm4,%xmm12
3368
3369# qhasm: xmm11 ^= xmm6
3370# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#9
3371# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm8
3372pxor %xmm6,%xmm8
3373
3374# qhasm: xmm10 ^= xmm3
3375# asm 1: pxor <xmm3=int6464#4,<xmm10=int6464#10
3376# asm 2: pxor <xmm3=%xmm3,<xmm10=%xmm9
3377pxor %xmm3,%xmm9
3378
3379# qhasm: xmm9 ^= xmm2
3380# asm 1: pxor <xmm2=int6464#3,<xmm9=int6464#11
3381# asm 2: pxor <xmm2=%xmm2,<xmm9=%xmm10
3382pxor %xmm2,%xmm10
3383
3384# qhasm: xmm13 ^= xmm6
3385# asm 1: pxor <xmm6=int6464#7,<xmm13=int6464#12
3386# asm 2: pxor <xmm6=%xmm6,<xmm13=%xmm11
3387pxor %xmm6,%xmm11
3388
3389# qhasm: xmm12 ^= xmm0
3390# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
3391# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
3392pxor %xmm0,%xmm12
3393
3394# qhasm: xmm14 = xmm11
3395# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
3396# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
3397movdqa %xmm8,%xmm13
3398
3399# qhasm: xmm8 = xmm10
3400# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
3401# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
3402movdqa %xmm9,%xmm14
3403
3404# qhasm: xmm15 = xmm11
3405# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
3406# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
3407movdqa %xmm8,%xmm15
3408
3409# qhasm: xmm10 |= xmm9
3410# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
3411# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
3412por %xmm10,%xmm9
3413
3414# qhasm: xmm11 |= xmm12
3415# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
3416# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
3417por %xmm12,%xmm8
3418
3419# qhasm: xmm15 ^= xmm8
3420# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
3421# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
3422pxor %xmm14,%xmm15
3423
3424# qhasm: xmm14 &= xmm12
3425# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
3426# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
3427pand %xmm12,%xmm13
3428
3429# qhasm: xmm8 &= xmm9
3430# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
3431# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
3432pand %xmm10,%xmm14
3433
3434# qhasm: xmm12 ^= xmm9
3435# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
3436# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
3437pxor %xmm10,%xmm12
3438
3439# qhasm: xmm15 &= xmm12
3440# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
3441# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
3442pand %xmm12,%xmm15
3443
3444# qhasm: xmm12 = xmm2
3445# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
3446# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
3447movdqa %xmm2,%xmm10
3448
3449# qhasm: xmm12 ^= xmm0
3450# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
3451# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
3452pxor %xmm0,%xmm10
3453
3454# qhasm: xmm13 &= xmm12
3455# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
3456# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
3457pand %xmm10,%xmm11
3458
3459# qhasm: xmm11 ^= xmm13
3460# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
3461# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
3462pxor %xmm11,%xmm8
3463
3464# qhasm: xmm10 ^= xmm13
3465# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
3466# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
3467pxor %xmm11,%xmm9
3468
3469# qhasm: xmm13 = xmm7
3470# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
3471# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
3472movdqa %xmm7,%xmm10
3473
3474# qhasm: xmm13 ^= xmm1
3475# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
3476# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
3477pxor %xmm1,%xmm10
3478
3479# qhasm: xmm12 = xmm5
3480# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
3481# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
3482movdqa %xmm5,%xmm11
3483
3484# qhasm: xmm9 = xmm13
3485# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
3486# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
3487movdqa %xmm10,%xmm12
3488
3489# qhasm: xmm12 ^= xmm4
3490# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#12
3491# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm11
3492pxor %xmm4,%xmm11
3493
3494# qhasm: xmm9 |= xmm12
3495# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
3496# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
3497por %xmm11,%xmm12
3498
3499# qhasm: xmm13 &= xmm12
3500# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
3501# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
3502pand %xmm11,%xmm10
3503
3504# qhasm: xmm8 ^= xmm13
3505# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
3506# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
3507pxor %xmm10,%xmm14
3508
3509# qhasm: xmm11 ^= xmm15
3510# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
3511# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
3512pxor %xmm15,%xmm8
3513
3514# qhasm: xmm10 ^= xmm14
3515# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
3516# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
3517pxor %xmm13,%xmm9
3518
3519# qhasm: xmm9 ^= xmm15
3520# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
3521# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
3522pxor %xmm15,%xmm12
3523
3524# qhasm: xmm8 ^= xmm14
3525# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
3526# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
3527pxor %xmm13,%xmm14
3528
3529# qhasm: xmm9 ^= xmm14
3530# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
3531# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
3532pxor %xmm13,%xmm12
3533
3534# qhasm: xmm12 = xmm3
3535# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
3536# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
3537movdqa %xmm3,%xmm10
3538
3539# qhasm: xmm13 = xmm6
3540# asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12
3541# asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11
3542movdqa %xmm6,%xmm11
3543
3544# qhasm: xmm14 = xmm1
3545# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
3546# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
3547movdqa %xmm1,%xmm13
3548
3549# qhasm: xmm15 = xmm7
3550# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
3551# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
3552movdqa %xmm7,%xmm15
3553
3554# qhasm: xmm12 &= xmm2
3555# asm 1: pand <xmm2=int6464#3,<xmm12=int6464#11
3556# asm 2: pand <xmm2=%xmm2,<xmm12=%xmm10
3557pand %xmm2,%xmm10
3558
3559# qhasm: xmm13 &= xmm0
3560# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
3561# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
3562pand %xmm0,%xmm11
3563
3564# qhasm: xmm14 &= xmm5
3565# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
3566# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
3567pand %xmm5,%xmm13
3568
3569# qhasm: xmm15 |= xmm4
3570# asm 1: por <xmm4=int6464#5,<xmm15=int6464#16
3571# asm 2: por <xmm4=%xmm4,<xmm15=%xmm15
3572por %xmm4,%xmm15
3573
3574# qhasm: xmm11 ^= xmm12
3575# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
3576# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
3577pxor %xmm10,%xmm8
3578
3579# qhasm: xmm10 ^= xmm13
3580# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
3581# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
3582pxor %xmm11,%xmm9
3583
3584# qhasm: xmm9 ^= xmm14
3585# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
3586# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
3587pxor %xmm13,%xmm12
3588
3589# qhasm: xmm8 ^= xmm15
3590# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
3591# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
3592pxor %xmm15,%xmm14
3593
3594# qhasm: xmm12 = xmm11
3595# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
3596# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
3597movdqa %xmm8,%xmm10
3598
3599# qhasm: xmm12 ^= xmm10
3600# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
3601# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
3602pxor %xmm9,%xmm10
3603
3604# qhasm: xmm11 &= xmm9
3605# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
3606# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
3607pand %xmm12,%xmm8
3608
3609# qhasm: xmm14 = xmm8
3610# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
3611# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
3612movdqa %xmm14,%xmm11
3613
3614# qhasm: xmm14 ^= xmm11
3615# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
3616# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
3617pxor %xmm8,%xmm11
3618
3619# qhasm: xmm15 = xmm12
3620# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
3621# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
3622movdqa %xmm10,%xmm13
3623
3624# qhasm: xmm15 &= xmm14
3625# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
3626# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
3627pand %xmm11,%xmm13
3628
3629# qhasm: xmm15 ^= xmm10
3630# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
3631# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
3632pxor %xmm9,%xmm13
3633
3634# qhasm: xmm13 = xmm9
3635# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
3636# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
3637movdqa %xmm12,%xmm15
3638
3639# qhasm: xmm13 ^= xmm8
3640# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
3641# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
3642pxor %xmm14,%xmm15
3643
3644# qhasm: xmm11 ^= xmm10
3645# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
3646# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
3647pxor %xmm9,%xmm8
3648
3649# qhasm: xmm13 &= xmm11
3650# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
3651# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
3652pand %xmm8,%xmm15
3653
3654# qhasm: xmm13 ^= xmm8
3655# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
3656# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
3657pxor %xmm14,%xmm15
3658
3659# qhasm: xmm9 ^= xmm13
3660# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
3661# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
3662pxor %xmm15,%xmm12
3663
3664# qhasm: xmm10 = xmm14
3665# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
3666# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
3667movdqa %xmm11,%xmm8
3668
3669# qhasm: xmm10 ^= xmm13
3670# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
3671# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
3672pxor %xmm15,%xmm8
3673
3674# qhasm: xmm10 &= xmm8
3675# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
3676# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
3677pand %xmm14,%xmm8
3678
3679# qhasm: xmm9 ^= xmm10
3680# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
3681# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
3682pxor %xmm8,%xmm12
3683
3684# qhasm: xmm14 ^= xmm10
3685# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
3686# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
3687pxor %xmm8,%xmm11
3688
3689# qhasm: xmm14 &= xmm15
3690# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
3691# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
3692pand %xmm13,%xmm11
3693
3694# qhasm: xmm14 ^= xmm12
3695# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
3696# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
3697pxor %xmm10,%xmm11
3698
3699# qhasm: xmm12 = xmm4
3700# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#9
3701# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm8
3702movdqa %xmm4,%xmm8
3703
3704# qhasm: xmm8 = xmm5
3705# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
3706# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
3707movdqa %xmm5,%xmm9
3708
3709# qhasm: xmm10 = xmm15
3710# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
3711# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
3712movdqa %xmm13,%xmm10
3713
3714# qhasm: xmm10 ^= xmm14
3715# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
3716# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
3717pxor %xmm11,%xmm10
3718
3719# qhasm: xmm10 &= xmm4
3720# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
3721# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
3722pand %xmm4,%xmm10
3723
3724# qhasm: xmm4 ^= xmm5
3725# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
3726# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
3727pxor %xmm5,%xmm4
3728
3729# qhasm: xmm4 &= xmm14
3730# asm 1: pand <xmm14=int6464#12,<xmm4=int6464#5
3731# asm 2: pand <xmm14=%xmm11,<xmm4=%xmm4
3732pand %xmm11,%xmm4
3733
3734# qhasm: xmm5 &= xmm15
3735# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
3736# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
3737pand %xmm13,%xmm5
3738
3739# qhasm: xmm4 ^= xmm5
3740# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
3741# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
3742pxor %xmm5,%xmm4
3743
3744# qhasm: xmm5 ^= xmm10
3745# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
3746# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
3747pxor %xmm10,%xmm5
3748
3749# qhasm: xmm12 ^= xmm0
3750# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
3751# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
3752pxor %xmm0,%xmm8
3753
3754# qhasm: xmm8 ^= xmm2
3755# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
3756# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
3757pxor %xmm2,%xmm9
3758
3759# qhasm: xmm15 ^= xmm13
3760# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
3761# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
3762pxor %xmm15,%xmm13
3763
3764# qhasm: xmm14 ^= xmm9
3765# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
3766# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
3767pxor %xmm12,%xmm11
3768
3769# qhasm: xmm11 = xmm15
3770# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3771# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3772movdqa %xmm13,%xmm10
3773
3774# qhasm: xmm11 ^= xmm14
3775# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3776# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3777pxor %xmm11,%xmm10
3778
3779# qhasm: xmm11 &= xmm12
3780# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
3781# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
3782pand %xmm8,%xmm10
3783
3784# qhasm: xmm12 ^= xmm8
3785# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
3786# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
3787pxor %xmm9,%xmm8
3788
3789# qhasm: xmm12 &= xmm14
3790# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
3791# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
3792pand %xmm11,%xmm8
3793
3794# qhasm: xmm8 &= xmm15
3795# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
3796# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
3797pand %xmm13,%xmm9
3798
3799# qhasm: xmm8 ^= xmm12
3800# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
3801# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
3802pxor %xmm8,%xmm9
3803
3804# qhasm: xmm12 ^= xmm11
3805# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
3806# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
3807pxor %xmm10,%xmm8
3808
3809# qhasm: xmm10 = xmm13
3810# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3811# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3812movdqa %xmm15,%xmm10
3813
3814# qhasm: xmm10 ^= xmm9
3815# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
3816# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
3817pxor %xmm12,%xmm10
3818
3819# qhasm: xmm10 &= xmm0
3820# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
3821# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
3822pand %xmm0,%xmm10
3823
3824# qhasm: xmm0 ^= xmm2
3825# asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1
3826# asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0
3827pxor %xmm2,%xmm0
3828
3829# qhasm: xmm0 &= xmm9
3830# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
3831# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
3832pand %xmm12,%xmm0
3833
3834# qhasm: xmm2 &= xmm13
3835# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
3836# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
3837pand %xmm15,%xmm2
3838
3839# qhasm: xmm0 ^= xmm2
3840# asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1
3841# asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0
3842pxor %xmm2,%xmm0
3843
3844# qhasm: xmm2 ^= xmm10
3845# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
3846# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
3847pxor %xmm10,%xmm2
3848
3849# qhasm: xmm4 ^= xmm12
3850# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
3851# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
3852pxor %xmm8,%xmm4
3853
3854# qhasm: xmm0 ^= xmm12
3855# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
3856# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
3857pxor %xmm8,%xmm0
3858
3859# qhasm: xmm5 ^= xmm8
3860# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
3861# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
3862pxor %xmm9,%xmm5
3863
3864# qhasm: xmm2 ^= xmm8
3865# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
3866# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
3867pxor %xmm9,%xmm2
3868
3869# qhasm: xmm12 = xmm7
3870# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
3871# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
3872movdqa %xmm7,%xmm8
3873
3874# qhasm: xmm8 = xmm1
3875# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
3876# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
3877movdqa %xmm1,%xmm9
3878
3879# qhasm: xmm12 ^= xmm6
3880# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#9
3881# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm8
3882pxor %xmm6,%xmm8
3883
3884# qhasm: xmm8 ^= xmm3
3885# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
3886# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
3887pxor %xmm3,%xmm9
3888
3889# qhasm: xmm11 = xmm15
3890# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3891# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3892movdqa %xmm13,%xmm10
3893
3894# qhasm: xmm11 ^= xmm14
3895# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3896# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3897pxor %xmm11,%xmm10
3898
3899# qhasm: xmm11 &= xmm12
3900# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
3901# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
3902pand %xmm8,%xmm10
3903
3904# qhasm: xmm12 ^= xmm8
3905# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
3906# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
3907pxor %xmm9,%xmm8
3908
3909# qhasm: xmm12 &= xmm14
3910# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
3911# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
3912pand %xmm11,%xmm8
3913
3914# qhasm: xmm8 &= xmm15
3915# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
3916# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
3917pand %xmm13,%xmm9
3918
3919# qhasm: xmm8 ^= xmm12
3920# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
3921# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
3922pxor %xmm8,%xmm9
3923
3924# qhasm: xmm12 ^= xmm11
3925# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
3926# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
3927pxor %xmm10,%xmm8
3928
3929# qhasm: xmm10 = xmm13
3930# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3931# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3932movdqa %xmm15,%xmm10
3933
3934# qhasm: xmm10 ^= xmm9
3935# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
3936# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
3937pxor %xmm12,%xmm10
3938
3939# qhasm: xmm10 &= xmm6
3940# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
3941# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
3942pand %xmm6,%xmm10
3943
3944# qhasm: xmm6 ^= xmm3
3945# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
3946# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
3947pxor %xmm3,%xmm6
3948
3949# qhasm: xmm6 &= xmm9
3950# asm 1: pand <xmm9=int6464#13,<xmm6=int6464#7
3951# asm 2: pand <xmm9=%xmm12,<xmm6=%xmm6
3952pand %xmm12,%xmm6
3953
3954# qhasm: xmm3 &= xmm13
3955# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
3956# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
3957pand %xmm15,%xmm3
3958
3959# qhasm: xmm6 ^= xmm3
3960# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
3961# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
3962pxor %xmm3,%xmm6
3963
3964# qhasm: xmm3 ^= xmm10
3965# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
3966# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
3967pxor %xmm10,%xmm3
3968
3969# qhasm: xmm15 ^= xmm13
3970# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
3971# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
3972pxor %xmm15,%xmm13
3973
3974# qhasm: xmm14 ^= xmm9
3975# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
3976# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
3977pxor %xmm12,%xmm11
3978
3979# qhasm: xmm11 = xmm15
3980# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3981# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3982movdqa %xmm13,%xmm10
3983
3984# qhasm: xmm11 ^= xmm14
3985# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3986# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3987pxor %xmm11,%xmm10
3988
3989# qhasm: xmm11 &= xmm7
3990# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
3991# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
3992pand %xmm7,%xmm10
3993
3994# qhasm: xmm7 ^= xmm1
3995# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
3996# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
3997pxor %xmm1,%xmm7
3998
3999# qhasm: xmm7 &= xmm14
4000# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
4001# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
4002pand %xmm11,%xmm7
4003
4004# qhasm: xmm1 &= xmm15
4005# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
4006# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
4007pand %xmm13,%xmm1
4008
4009# qhasm: xmm7 ^= xmm1
4010# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
4011# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
4012pxor %xmm1,%xmm7
4013
4014# qhasm: xmm1 ^= xmm11
4015# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
4016# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
4017pxor %xmm10,%xmm1
4018
4019# qhasm: xmm7 ^= xmm12
4020# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
4021# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
4022pxor %xmm8,%xmm7
4023
4024# qhasm: xmm6 ^= xmm12
4025# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
4026# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
4027pxor %xmm8,%xmm6
4028
4029# qhasm: xmm1 ^= xmm8
4030# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
4031# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
4032pxor %xmm9,%xmm1
4033
4034# qhasm: xmm3 ^= xmm8
4035# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
4036# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
4037pxor %xmm9,%xmm3
4038
4039# qhasm: xmm7 ^= xmm0
4040# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
4041# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
4042pxor %xmm0,%xmm7
4043
4044# qhasm: xmm1 ^= xmm4
4045# asm 1: pxor <xmm4=int6464#5,<xmm1=int6464#2
4046# asm 2: pxor <xmm4=%xmm4,<xmm1=%xmm1
4047pxor %xmm4,%xmm1
4048
4049# qhasm: xmm6 ^= xmm7
4050# asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7
4051# asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6
4052pxor %xmm7,%xmm6
4053
4054# qhasm: xmm4 ^= xmm0
4055# asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5
4056# asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4
4057pxor %xmm0,%xmm4
4058
4059# qhasm: xmm0 ^= xmm1
4060# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
4061# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
4062pxor %xmm1,%xmm0
4063
4064# qhasm: xmm1 ^= xmm5
4065# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
4066# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
4067pxor %xmm5,%xmm1
4068
4069# qhasm: xmm5 ^= xmm3
4070# asm 1: pxor <xmm3=int6464#4,<xmm5=int6464#6
4071# asm 2: pxor <xmm3=%xmm3,<xmm5=%xmm5
4072pxor %xmm3,%xmm5
4073
4074# qhasm: xmm6 ^= xmm5
4075# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
4076# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
4077pxor %xmm5,%xmm6
4078
4079# qhasm: xmm3 ^= xmm2
4080# asm 1: pxor <xmm2=int6464#3,<xmm3=int6464#4
4081# asm 2: pxor <xmm2=%xmm2,<xmm3=%xmm3
4082pxor %xmm2,%xmm3
4083
4084# qhasm: xmm2 ^= xmm5
4085# asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3
4086# asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2
4087pxor %xmm5,%xmm2
4088
4089# qhasm: xmm4 ^= xmm2
4090# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
4091# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
4092pxor %xmm2,%xmm4
4093
4094# qhasm: xmm6 ^= RCON
4095# asm 1: pxor RCON,<xmm6=int6464#7
4096# asm 2: pxor RCON,<xmm6=%xmm6
4097pxor RCON,%xmm6
4098
4099# qhasm: shuffle bytes of xmm0 by EXPB0
4100# asm 1: pshufb EXPB0,<xmm0=int6464#1
4101# asm 2: pshufb EXPB0,<xmm0=%xmm0
4102pshufb EXPB0,%xmm0
4103
4104# qhasm: shuffle bytes of xmm1 by EXPB0
4105# asm 1: pshufb EXPB0,<xmm1=int6464#2
4106# asm 2: pshufb EXPB0,<xmm1=%xmm1
4107pshufb EXPB0,%xmm1
4108
4109# qhasm: shuffle bytes of xmm6 by EXPB0
4110# asm 1: pshufb EXPB0,<xmm6=int6464#7
4111# asm 2: pshufb EXPB0,<xmm6=%xmm6
4112pshufb EXPB0,%xmm6
4113
4114# qhasm: shuffle bytes of xmm4 by EXPB0
4115# asm 1: pshufb EXPB0,<xmm4=int6464#5
4116# asm 2: pshufb EXPB0,<xmm4=%xmm4
4117pshufb EXPB0,%xmm4
4118
4119# qhasm: shuffle bytes of xmm2 by EXPB0
4120# asm 1: pshufb EXPB0,<xmm2=int6464#3
4121# asm 2: pshufb EXPB0,<xmm2=%xmm2
4122pshufb EXPB0,%xmm2
4123
4124# qhasm: shuffle bytes of xmm7 by EXPB0
4125# asm 1: pshufb EXPB0,<xmm7=int6464#8
4126# asm 2: pshufb EXPB0,<xmm7=%xmm7
4127pshufb EXPB0,%xmm7
4128
4129# qhasm: shuffle bytes of xmm3 by EXPB0
4130# asm 1: pshufb EXPB0,<xmm3=int6464#4
4131# asm 2: pshufb EXPB0,<xmm3=%xmm3
4132pshufb EXPB0,%xmm3
4133
4134# qhasm: shuffle bytes of xmm5 by EXPB0
4135# asm 1: pshufb EXPB0,<xmm5=int6464#6
4136# asm 2: pshufb EXPB0,<xmm5=%xmm5
4137pshufb EXPB0,%xmm5
4138
4139# qhasm: xmm8 = *(int128 *)(c + 256)
4140# asm 1: movdqa 256(<c=int64#1),>xmm8=int6464#9
4141# asm 2: movdqa 256(<c=%rdi),>xmm8=%xmm8
4142movdqa 256(%rdi),%xmm8
4143
4144# qhasm: xmm9 = *(int128 *)(c + 272)
4145# asm 1: movdqa 272(<c=int64#1),>xmm9=int6464#10
4146# asm 2: movdqa 272(<c=%rdi),>xmm9=%xmm9
4147movdqa 272(%rdi),%xmm9
4148
4149# qhasm: xmm10 = *(int128 *)(c + 288)
4150# asm 1: movdqa 288(<c=int64#1),>xmm10=int6464#11
4151# asm 2: movdqa 288(<c=%rdi),>xmm10=%xmm10
4152movdqa 288(%rdi),%xmm10
4153
4154# qhasm: xmm11 = *(int128 *)(c + 304)
4155# asm 1: movdqa 304(<c=int64#1),>xmm11=int6464#12
4156# asm 2: movdqa 304(<c=%rdi),>xmm11=%xmm11
4157movdqa 304(%rdi),%xmm11
4158
4159# qhasm: xmm12 = *(int128 *)(c + 320)
4160# asm 1: movdqa 320(<c=int64#1),>xmm12=int6464#13
4161# asm 2: movdqa 320(<c=%rdi),>xmm12=%xmm12
4162movdqa 320(%rdi),%xmm12
4163
4164# qhasm: xmm13 = *(int128 *)(c + 336)
4165# asm 1: movdqa 336(<c=int64#1),>xmm13=int6464#14
4166# asm 2: movdqa 336(<c=%rdi),>xmm13=%xmm13
4167movdqa 336(%rdi),%xmm13
4168
4169# qhasm: xmm14 = *(int128 *)(c + 352)
4170# asm 1: movdqa 352(<c=int64#1),>xmm14=int6464#15
4171# asm 2: movdqa 352(<c=%rdi),>xmm14=%xmm14
4172movdqa 352(%rdi),%xmm14
4173
4174# qhasm: xmm15 = *(int128 *)(c + 368)
4175# asm 1: movdqa 368(<c=int64#1),>xmm15=int6464#16
4176# asm 2: movdqa 368(<c=%rdi),>xmm15=%xmm15
4177movdqa 368(%rdi),%xmm15
4178
4179# qhasm: xmm8 ^= ONE
4180# asm 1: pxor ONE,<xmm8=int6464#9
4181# asm 2: pxor ONE,<xmm8=%xmm8
4182pxor ONE,%xmm8
4183
4184# qhasm: xmm9 ^= ONE
4185# asm 1: pxor ONE,<xmm9=int6464#10
4186# asm 2: pxor ONE,<xmm9=%xmm9
4187pxor ONE,%xmm9
4188
4189# qhasm: xmm13 ^= ONE
4190# asm 1: pxor ONE,<xmm13=int6464#14
4191# asm 2: pxor ONE,<xmm13=%xmm13
4192pxor ONE,%xmm13
4193
4194# qhasm: xmm14 ^= ONE
4195# asm 1: pxor ONE,<xmm14=int6464#15
4196# asm 2: pxor ONE,<xmm14=%xmm14
4197pxor ONE,%xmm14
4198
4199# qhasm: xmm0 ^= xmm8
4200# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
4201# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
4202pxor %xmm8,%xmm0
4203
4204# qhasm: xmm1 ^= xmm9
4205# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
4206# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
4207pxor %xmm9,%xmm1
4208
4209# qhasm: xmm6 ^= xmm10
4210# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
4211# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
4212pxor %xmm10,%xmm6
4213
4214# qhasm: xmm4 ^= xmm11
4215# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
4216# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
4217pxor %xmm11,%xmm4
4218
4219# qhasm: xmm2 ^= xmm12
4220# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
4221# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
4222pxor %xmm12,%xmm2
4223
4224# qhasm: xmm7 ^= xmm13
4225# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
4226# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
4227pxor %xmm13,%xmm7
4228
4229# qhasm: xmm3 ^= xmm14
4230# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
4231# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
4232pxor %xmm14,%xmm3
4233
4234# qhasm: xmm5 ^= xmm15
4235# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
4236# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
4237pxor %xmm15,%xmm5
4238
4239# qhasm: uint32323232 xmm8 >>= 8
4240# asm 1: psrld $8,<xmm8=int6464#9
4241# asm 2: psrld $8,<xmm8=%xmm8
4242psrld $8,%xmm8
4243
4244# qhasm: uint32323232 xmm9 >>= 8
4245# asm 1: psrld $8,<xmm9=int6464#10
4246# asm 2: psrld $8,<xmm9=%xmm9
4247psrld $8,%xmm9
4248
4249# qhasm: uint32323232 xmm10 >>= 8
4250# asm 1: psrld $8,<xmm10=int6464#11
4251# asm 2: psrld $8,<xmm10=%xmm10
4252psrld $8,%xmm10
4253
4254# qhasm: uint32323232 xmm11 >>= 8
4255# asm 1: psrld $8,<xmm11=int6464#12
4256# asm 2: psrld $8,<xmm11=%xmm11
4257psrld $8,%xmm11
4258
4259# qhasm: uint32323232 xmm12 >>= 8
4260# asm 1: psrld $8,<xmm12=int6464#13
4261# asm 2: psrld $8,<xmm12=%xmm12
4262psrld $8,%xmm12
4263
4264# qhasm: uint32323232 xmm13 >>= 8
4265# asm 1: psrld $8,<xmm13=int6464#14
4266# asm 2: psrld $8,<xmm13=%xmm13
4267psrld $8,%xmm13
4268
4269# qhasm: uint32323232 xmm14 >>= 8
4270# asm 1: psrld $8,<xmm14=int6464#15
4271# asm 2: psrld $8,<xmm14=%xmm14
4272psrld $8,%xmm14
4273
4274# qhasm: uint32323232 xmm15 >>= 8
4275# asm 1: psrld $8,<xmm15=int6464#16
4276# asm 2: psrld $8,<xmm15=%xmm15
4277psrld $8,%xmm15
4278
4279# qhasm: xmm0 ^= xmm8
4280# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
4281# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
4282pxor %xmm8,%xmm0
4283
4284# qhasm: xmm1 ^= xmm9
4285# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
4286# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
4287pxor %xmm9,%xmm1
4288
4289# qhasm: xmm6 ^= xmm10
4290# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
4291# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
4292pxor %xmm10,%xmm6
4293
4294# qhasm: xmm4 ^= xmm11
4295# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
4296# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
4297pxor %xmm11,%xmm4
4298
4299# qhasm: xmm2 ^= xmm12
4300# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
4301# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
4302pxor %xmm12,%xmm2
4303
4304# qhasm: xmm7 ^= xmm13
4305# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
4306# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
4307pxor %xmm13,%xmm7
4308
4309# qhasm: xmm3 ^= xmm14
4310# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
4311# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
4312pxor %xmm14,%xmm3
4313
4314# qhasm: xmm5 ^= xmm15
4315# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
4316# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
4317pxor %xmm15,%xmm5
4318
4319# qhasm: uint32323232 xmm8 >>= 8
4320# asm 1: psrld $8,<xmm8=int6464#9
4321# asm 2: psrld $8,<xmm8=%xmm8
4322psrld $8,%xmm8
4323
4324# qhasm: uint32323232 xmm9 >>= 8
4325# asm 1: psrld $8,<xmm9=int6464#10
4326# asm 2: psrld $8,<xmm9=%xmm9
4327psrld $8,%xmm9
4328
4329# qhasm: uint32323232 xmm10 >>= 8
4330# asm 1: psrld $8,<xmm10=int6464#11
4331# asm 2: psrld $8,<xmm10=%xmm10
4332psrld $8,%xmm10
4333
4334# qhasm: uint32323232 xmm11 >>= 8
4335# asm 1: psrld $8,<xmm11=int6464#12
4336# asm 2: psrld $8,<xmm11=%xmm11
4337psrld $8,%xmm11
4338
4339# qhasm: uint32323232 xmm12 >>= 8
4340# asm 1: psrld $8,<xmm12=int6464#13
4341# asm 2: psrld $8,<xmm12=%xmm12
4342psrld $8,%xmm12
4343
4344# qhasm: uint32323232 xmm13 >>= 8
4345# asm 1: psrld $8,<xmm13=int6464#14
4346# asm 2: psrld $8,<xmm13=%xmm13
4347psrld $8,%xmm13
4348
4349# qhasm: uint32323232 xmm14 >>= 8
4350# asm 1: psrld $8,<xmm14=int6464#15
4351# asm 2: psrld $8,<xmm14=%xmm14
4352psrld $8,%xmm14
4353
4354# qhasm: uint32323232 xmm15 >>= 8
4355# asm 1: psrld $8,<xmm15=int6464#16
4356# asm 2: psrld $8,<xmm15=%xmm15
4357psrld $8,%xmm15
4358
4359# qhasm: xmm0 ^= xmm8
4360# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
4361# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
4362pxor %xmm8,%xmm0
4363
4364# qhasm: xmm1 ^= xmm9
4365# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
4366# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
4367pxor %xmm9,%xmm1
4368
4369# qhasm: xmm6 ^= xmm10
4370# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
4371# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
4372pxor %xmm10,%xmm6
4373
4374# qhasm: xmm4 ^= xmm11
4375# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
4376# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
4377pxor %xmm11,%xmm4
4378
4379# qhasm: xmm2 ^= xmm12
4380# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
4381# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
4382pxor %xmm12,%xmm2
4383
4384# qhasm: xmm7 ^= xmm13
4385# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
4386# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
4387pxor %xmm13,%xmm7
4388
4389# qhasm: xmm3 ^= xmm14
4390# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
4391# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
4392pxor %xmm14,%xmm3
4393
4394# qhasm: xmm5 ^= xmm15
4395# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
4396# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
4397pxor %xmm15,%xmm5
4398
4399# qhasm: uint32323232 xmm8 >>= 8
4400# asm 1: psrld $8,<xmm8=int6464#9
4401# asm 2: psrld $8,<xmm8=%xmm8
4402psrld $8,%xmm8
4403
4404# qhasm: uint32323232 xmm9 >>= 8
4405# asm 1: psrld $8,<xmm9=int6464#10
4406# asm 2: psrld $8,<xmm9=%xmm9
4407psrld $8,%xmm9
4408
4409# qhasm: uint32323232 xmm10 >>= 8
4410# asm 1: psrld $8,<xmm10=int6464#11
4411# asm 2: psrld $8,<xmm10=%xmm10
4412psrld $8,%xmm10
4413
4414# qhasm: uint32323232 xmm11 >>= 8
4415# asm 1: psrld $8,<xmm11=int6464#12
4416# asm 2: psrld $8,<xmm11=%xmm11
4417psrld $8,%xmm11
4418
4419# qhasm: uint32323232 xmm12 >>= 8
4420# asm 1: psrld $8,<xmm12=int6464#13
4421# asm 2: psrld $8,<xmm12=%xmm12
4422psrld $8,%xmm12
4423
4424# qhasm: uint32323232 xmm13 >>= 8
4425# asm 1: psrld $8,<xmm13=int6464#14
4426# asm 2: psrld $8,<xmm13=%xmm13
4427psrld $8,%xmm13
4428
4429# qhasm: uint32323232 xmm14 >>= 8
4430# asm 1: psrld $8,<xmm14=int6464#15
4431# asm 2: psrld $8,<xmm14=%xmm14
4432psrld $8,%xmm14
4433
4434# qhasm: uint32323232 xmm15 >>= 8
4435# asm 1: psrld $8,<xmm15=int6464#16
4436# asm 2: psrld $8,<xmm15=%xmm15
4437psrld $8,%xmm15
4438
4439# qhasm: xmm0 ^= xmm8
4440# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
4441# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
4442pxor %xmm8,%xmm0
4443
4444# qhasm: xmm1 ^= xmm9
4445# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
4446# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
4447pxor %xmm9,%xmm1
4448
4449# qhasm: xmm6 ^= xmm10
4450# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
4451# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
4452pxor %xmm10,%xmm6
4453
4454# qhasm: xmm4 ^= xmm11
4455# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
4456# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
4457pxor %xmm11,%xmm4
4458
4459# qhasm: xmm2 ^= xmm12
4460# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
4461# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
4462pxor %xmm12,%xmm2
4463
4464# qhasm: xmm7 ^= xmm13
4465# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
4466# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
4467pxor %xmm13,%xmm7
4468
4469# qhasm: xmm3 ^= xmm14
4470# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
4471# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
4472pxor %xmm14,%xmm3
4473
4474# qhasm: xmm5 ^= xmm15
4475# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
4476# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
4477pxor %xmm15,%xmm5
4478
4479# qhasm: *(int128 *)(c + 384) = xmm0
4480# asm 1: movdqa <xmm0=int6464#1,384(<c=int64#1)
4481# asm 2: movdqa <xmm0=%xmm0,384(<c=%rdi)
4482movdqa %xmm0,384(%rdi)
4483
4484# qhasm: *(int128 *)(c + 400) = xmm1
4485# asm 1: movdqa <xmm1=int6464#2,400(<c=int64#1)
4486# asm 2: movdqa <xmm1=%xmm1,400(<c=%rdi)
4487movdqa %xmm1,400(%rdi)
4488
4489# qhasm: *(int128 *)(c + 416) = xmm6
4490# asm 1: movdqa <xmm6=int6464#7,416(<c=int64#1)
4491# asm 2: movdqa <xmm6=%xmm6,416(<c=%rdi)
4492movdqa %xmm6,416(%rdi)
4493
4494# qhasm: *(int128 *)(c + 432) = xmm4
4495# asm 1: movdqa <xmm4=int6464#5,432(<c=int64#1)
4496# asm 2: movdqa <xmm4=%xmm4,432(<c=%rdi)
4497movdqa %xmm4,432(%rdi)
4498
4499# qhasm: *(int128 *)(c + 448) = xmm2
4500# asm 1: movdqa <xmm2=int6464#3,448(<c=int64#1)
4501# asm 2: movdqa <xmm2=%xmm2,448(<c=%rdi)
4502movdqa %xmm2,448(%rdi)
4503
4504# qhasm: *(int128 *)(c + 464) = xmm7
4505# asm 1: movdqa <xmm7=int6464#8,464(<c=int64#1)
4506# asm 2: movdqa <xmm7=%xmm7,464(<c=%rdi)
4507movdqa %xmm7,464(%rdi)
4508
4509# qhasm: *(int128 *)(c + 480) = xmm3
4510# asm 1: movdqa <xmm3=int6464#4,480(<c=int64#1)
4511# asm 2: movdqa <xmm3=%xmm3,480(<c=%rdi)
4512movdqa %xmm3,480(%rdi)
4513
4514# qhasm: *(int128 *)(c + 496) = xmm5
4515# asm 1: movdqa <xmm5=int6464#6,496(<c=int64#1)
4516# asm 2: movdqa <xmm5=%xmm5,496(<c=%rdi)
4517movdqa %xmm5,496(%rdi)
4518
4519# qhasm: xmm0 ^= ONE
4520# asm 1: pxor ONE,<xmm0=int6464#1
4521# asm 2: pxor ONE,<xmm0=%xmm0
4522pxor ONE,%xmm0
4523
4524# qhasm: xmm1 ^= ONE
4525# asm 1: pxor ONE,<xmm1=int6464#2
4526# asm 2: pxor ONE,<xmm1=%xmm1
4527pxor ONE,%xmm1
4528
4529# qhasm: xmm7 ^= ONE
4530# asm 1: pxor ONE,<xmm7=int6464#8
4531# asm 2: pxor ONE,<xmm7=%xmm7
4532pxor ONE,%xmm7
4533
4534# qhasm: xmm3 ^= ONE
4535# asm 1: pxor ONE,<xmm3=int6464#4
4536# asm 2: pxor ONE,<xmm3=%xmm3
4537pxor ONE,%xmm3
4538
4539# qhasm: shuffle bytes of xmm0 by ROTB
4540# asm 1: pshufb ROTB,<xmm0=int6464#1
4541# asm 2: pshufb ROTB,<xmm0=%xmm0
4542pshufb ROTB,%xmm0
4543
4544# qhasm: shuffle bytes of xmm1 by ROTB
4545# asm 1: pshufb ROTB,<xmm1=int6464#2
4546# asm 2: pshufb ROTB,<xmm1=%xmm1
4547pshufb ROTB,%xmm1
4548
4549# qhasm: shuffle bytes of xmm6 by ROTB
4550# asm 1: pshufb ROTB,<xmm6=int6464#7
4551# asm 2: pshufb ROTB,<xmm6=%xmm6
4552pshufb ROTB,%xmm6
4553
4554# qhasm: shuffle bytes of xmm4 by ROTB
4555# asm 1: pshufb ROTB,<xmm4=int6464#5
4556# asm 2: pshufb ROTB,<xmm4=%xmm4
4557pshufb ROTB,%xmm4
4558
4559# qhasm: shuffle bytes of xmm2 by ROTB
4560# asm 1: pshufb ROTB,<xmm2=int6464#3
4561# asm 2: pshufb ROTB,<xmm2=%xmm2
4562pshufb ROTB,%xmm2
4563
4564# qhasm: shuffle bytes of xmm7 by ROTB
4565# asm 1: pshufb ROTB,<xmm7=int6464#8
4566# asm 2: pshufb ROTB,<xmm7=%xmm7
4567pshufb ROTB,%xmm7
4568
4569# qhasm: shuffle bytes of xmm3 by ROTB
4570# asm 1: pshufb ROTB,<xmm3=int6464#4
4571# asm 2: pshufb ROTB,<xmm3=%xmm3
4572pshufb ROTB,%xmm3
4573
4574# qhasm: shuffle bytes of xmm5 by ROTB
4575# asm 1: pshufb ROTB,<xmm5=int6464#6
4576# asm 2: pshufb ROTB,<xmm5=%xmm5
4577pshufb ROTB,%xmm5
4578
4579# qhasm: xmm7 ^= xmm3
4580# asm 1: pxor <xmm3=int6464#4,<xmm7=int6464#8
4581# asm 2: pxor <xmm3=%xmm3,<xmm7=%xmm7
4582pxor %xmm3,%xmm7
4583
4584# qhasm: xmm6 ^= xmm1
4585# asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7
4586# asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6
4587pxor %xmm1,%xmm6
4588
4589# qhasm: xmm7 ^= xmm0
4590# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
4591# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
4592pxor %xmm0,%xmm7
4593
4594# qhasm: xmm3 ^= xmm6
4595# asm 1: pxor <xmm6=int6464#7,<xmm3=int6464#4
4596# asm 2: pxor <xmm6=%xmm6,<xmm3=%xmm3
4597pxor %xmm6,%xmm3
4598
4599# qhasm: xmm4 ^= xmm0
4600# asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5
4601# asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4
4602pxor %xmm0,%xmm4
4603
4604# qhasm: xmm3 ^= xmm4
4605# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
4606# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
4607pxor %xmm4,%xmm3
4608
4609# qhasm: xmm4 ^= xmm5
4610# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
4611# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
4612pxor %xmm5,%xmm4
4613
4614# qhasm: xmm4 ^= xmm2
4615# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
4616# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
4617pxor %xmm2,%xmm4
4618
4619# qhasm: xmm5 ^= xmm7
4620# asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6
4621# asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5
4622pxor %xmm7,%xmm5
4623
4624# qhasm: xmm4 ^= xmm1
4625# asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5
4626# asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4
4627pxor %xmm1,%xmm4
4628
4629# qhasm: xmm2 ^= xmm7
4630# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
4631# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
4632pxor %xmm7,%xmm2
4633
4634# qhasm: xmm6 ^= xmm5
4635# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
4636# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
4637pxor %xmm5,%xmm6
4638
4639# qhasm: xmm1 ^= xmm7
4640# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
4641# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
4642pxor %xmm7,%xmm1
4643
4644# qhasm: xmm11 = xmm5
4645# asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9
4646# asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8
4647movdqa %xmm5,%xmm8
4648
4649# qhasm: xmm10 = xmm1
4650# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
4651# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
4652movdqa %xmm1,%xmm9
4653
4654# qhasm: xmm9 = xmm7
4655# asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11
4656# asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10
4657movdqa %xmm7,%xmm10
4658
4659# qhasm: xmm13 = xmm6
4660# asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12
4661# asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11
4662movdqa %xmm6,%xmm11
4663
4664# qhasm: xmm12 = xmm3
4665# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#13
4666# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm12
4667movdqa %xmm3,%xmm12
4668
4669# qhasm: xmm11 ^= xmm2
4670# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#9
4671# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm8
4672pxor %xmm2,%xmm8
4673
4674# qhasm: xmm10 ^= xmm6
4675# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#10
4676# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm9
4677pxor %xmm6,%xmm9
4678
4679# qhasm: xmm9 ^= xmm4
4680# asm 1: pxor <xmm4=int6464#5,<xmm9=int6464#11
4681# asm 2: pxor <xmm4=%xmm4,<xmm9=%xmm10
4682pxor %xmm4,%xmm10
4683
4684# qhasm: xmm13 ^= xmm2
4685# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#12
4686# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm11
4687pxor %xmm2,%xmm11
4688
4689# qhasm: xmm12 ^= xmm0
4690# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
4691# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
4692pxor %xmm0,%xmm12
4693
4694# qhasm: xmm14 = xmm11
4695# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
4696# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
4697movdqa %xmm8,%xmm13
4698
4699# qhasm: xmm8 = xmm10
4700# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
4701# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
4702movdqa %xmm9,%xmm14
4703
4704# qhasm: xmm15 = xmm11
4705# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
4706# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
4707movdqa %xmm8,%xmm15
4708
4709# qhasm: xmm10 |= xmm9
4710# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
4711# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
4712por %xmm10,%xmm9
4713
4714# qhasm: xmm11 |= xmm12
4715# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
4716# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
4717por %xmm12,%xmm8
4718
4719# qhasm: xmm15 ^= xmm8
4720# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
4721# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
4722pxor %xmm14,%xmm15
4723
4724# qhasm: xmm14 &= xmm12
4725# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
4726# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
4727pand %xmm12,%xmm13
4728
4729# qhasm: xmm8 &= xmm9
4730# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
4731# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
4732pand %xmm10,%xmm14
4733
4734# qhasm: xmm12 ^= xmm9
4735# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
4736# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
4737pxor %xmm10,%xmm12
4738
4739# qhasm: xmm15 &= xmm12
4740# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
4741# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
4742pand %xmm12,%xmm15
4743
4744# qhasm: xmm12 = xmm4
4745# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11
4746# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10
4747movdqa %xmm4,%xmm10
4748
4749# qhasm: xmm12 ^= xmm0
4750# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
4751# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
4752pxor %xmm0,%xmm10
4753
4754# qhasm: xmm13 &= xmm12
4755# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
4756# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
4757pand %xmm10,%xmm11
4758
4759# qhasm: xmm11 ^= xmm13
4760# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
4761# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
4762pxor %xmm11,%xmm8
4763
4764# qhasm: xmm10 ^= xmm13
4765# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
4766# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
4767pxor %xmm11,%xmm9
4768
4769# qhasm: xmm13 = xmm5
4770# asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11
4771# asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10
4772movdqa %xmm5,%xmm10
4773
4774# qhasm: xmm13 ^= xmm1
4775# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
4776# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
4777pxor %xmm1,%xmm10
4778
4779# qhasm: xmm12 = xmm7
4780# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12
4781# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11
4782movdqa %xmm7,%xmm11
4783
4784# qhasm: xmm9 = xmm13
4785# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
4786# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
4787movdqa %xmm10,%xmm12
4788
4789# qhasm: xmm12 ^= xmm3
4790# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#12
4791# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm11
4792pxor %xmm3,%xmm11
4793
4794# qhasm: xmm9 |= xmm12
4795# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
4796# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
4797por %xmm11,%xmm12
4798
4799# qhasm: xmm13 &= xmm12
4800# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
4801# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
4802pand %xmm11,%xmm10
4803
4804# qhasm: xmm8 ^= xmm13
4805# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
4806# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
4807pxor %xmm10,%xmm14
4808
4809# qhasm: xmm11 ^= xmm15
4810# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
4811# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
4812pxor %xmm15,%xmm8
4813
4814# qhasm: xmm10 ^= xmm14
4815# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
4816# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
4817pxor %xmm13,%xmm9
4818
4819# qhasm: xmm9 ^= xmm15
4820# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
4821# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
4822pxor %xmm15,%xmm12
4823
4824# qhasm: xmm8 ^= xmm14
4825# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
4826# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
4827pxor %xmm13,%xmm14
4828
4829# qhasm: xmm9 ^= xmm14
4830# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
4831# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
4832pxor %xmm13,%xmm12
4833
4834# qhasm: xmm12 = xmm6
4835# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11
4836# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10
4837movdqa %xmm6,%xmm10
4838
4839# qhasm: xmm13 = xmm2
4840# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
4841# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
4842movdqa %xmm2,%xmm11
4843
4844# qhasm: xmm14 = xmm1
4845# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
4846# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
4847movdqa %xmm1,%xmm13
4848
4849# qhasm: xmm15 = xmm5
4850# asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16
4851# asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15
4852movdqa %xmm5,%xmm15
4853
4854# qhasm: xmm12 &= xmm4
4855# asm 1: pand <xmm4=int6464#5,<xmm12=int6464#11
4856# asm 2: pand <xmm4=%xmm4,<xmm12=%xmm10
4857pand %xmm4,%xmm10
4858
4859# qhasm: xmm13 &= xmm0
4860# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
4861# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
4862pand %xmm0,%xmm11
4863
4864# qhasm: xmm14 &= xmm7
4865# asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14
4866# asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13
4867pand %xmm7,%xmm13
4868
4869# qhasm: xmm15 |= xmm3
4870# asm 1: por <xmm3=int6464#4,<xmm15=int6464#16
4871# asm 2: por <xmm3=%xmm3,<xmm15=%xmm15
4872por %xmm3,%xmm15
4873
4874# qhasm: xmm11 ^= xmm12
4875# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
4876# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
4877pxor %xmm10,%xmm8
4878
4879# qhasm: xmm10 ^= xmm13
4880# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
4881# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
4882pxor %xmm11,%xmm9
4883
4884# qhasm: xmm9 ^= xmm14
4885# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
4886# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
4887pxor %xmm13,%xmm12
4888
4889# qhasm: xmm8 ^= xmm15
4890# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
4891# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
4892pxor %xmm15,%xmm14
4893
4894# qhasm: xmm12 = xmm11
4895# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
4896# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
4897movdqa %xmm8,%xmm10
4898
4899# qhasm: xmm12 ^= xmm10
4900# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
4901# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
4902pxor %xmm9,%xmm10
4903
4904# qhasm: xmm11 &= xmm9
4905# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
4906# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
4907pand %xmm12,%xmm8
4908
4909# qhasm: xmm14 = xmm8
4910# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
4911# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
4912movdqa %xmm14,%xmm11
4913
4914# qhasm: xmm14 ^= xmm11
4915# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
4916# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
4917pxor %xmm8,%xmm11
4918
4919# qhasm: xmm15 = xmm12
4920# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
4921# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
4922movdqa %xmm10,%xmm13
4923
4924# qhasm: xmm15 &= xmm14
4925# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
4926# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
4927pand %xmm11,%xmm13
4928
4929# qhasm: xmm15 ^= xmm10
4930# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
4931# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
4932pxor %xmm9,%xmm13
4933
4934# qhasm: xmm13 = xmm9
4935# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
4936# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
4937movdqa %xmm12,%xmm15
4938
4939# qhasm: xmm13 ^= xmm8
4940# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
4941# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
4942pxor %xmm14,%xmm15
4943
4944# qhasm: xmm11 ^= xmm10
4945# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
4946# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
4947pxor %xmm9,%xmm8
4948
4949# qhasm: xmm13 &= xmm11
4950# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
4951# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
4952pand %xmm8,%xmm15
4953
4954# qhasm: xmm13 ^= xmm8
4955# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
4956# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
4957pxor %xmm14,%xmm15
4958
4959# qhasm: xmm9 ^= xmm13
4960# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
4961# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
4962pxor %xmm15,%xmm12
4963
4964# qhasm: xmm10 = xmm14
4965# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
4966# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
4967movdqa %xmm11,%xmm8
4968
4969# qhasm: xmm10 ^= xmm13
4970# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
4971# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
4972pxor %xmm15,%xmm8
4973
4974# qhasm: xmm10 &= xmm8
4975# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
4976# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
4977pand %xmm14,%xmm8
4978
4979# qhasm: xmm9 ^= xmm10
4980# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
4981# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
4982pxor %xmm8,%xmm12
4983
4984# qhasm: xmm14 ^= xmm10
4985# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
4986# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
4987pxor %xmm8,%xmm11
4988
4989# qhasm: xmm14 &= xmm15
4990# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
4991# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
4992pand %xmm13,%xmm11
4993
4994# qhasm: xmm14 ^= xmm12
4995# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
4996# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
4997pxor %xmm10,%xmm11
4998
4999# qhasm: xmm12 = xmm3
5000# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#9
5001# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm8
5002movdqa %xmm3,%xmm8
5003
5004# qhasm: xmm8 = xmm7
5005# asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10
5006# asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9
5007movdqa %xmm7,%xmm9
5008
5009# qhasm: xmm10 = xmm15
5010# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
5011# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
5012movdqa %xmm13,%xmm10
5013
5014# qhasm: xmm10 ^= xmm14
5015# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
5016# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
5017pxor %xmm11,%xmm10
5018
5019# qhasm: xmm10 &= xmm3
5020# asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11
5021# asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10
5022pand %xmm3,%xmm10
5023
5024# qhasm: xmm3 ^= xmm7
5025# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
5026# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
5027pxor %xmm7,%xmm3
5028
5029# qhasm: xmm3 &= xmm14
5030# asm 1: pand <xmm14=int6464#12,<xmm3=int6464#4
5031# asm 2: pand <xmm14=%xmm11,<xmm3=%xmm3
5032pand %xmm11,%xmm3
5033
5034# qhasm: xmm7 &= xmm15
5035# asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8
5036# asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7
5037pand %xmm13,%xmm7
5038
5039# qhasm: xmm3 ^= xmm7
5040# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
5041# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
5042pxor %xmm7,%xmm3
5043
5044# qhasm: xmm7 ^= xmm10
5045# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
5046# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
5047pxor %xmm10,%xmm7
5048
5049# qhasm: xmm12 ^= xmm0
5050# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
5051# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
5052pxor %xmm0,%xmm8
5053
5054# qhasm: xmm8 ^= xmm4
5055# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10
5056# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9
5057pxor %xmm4,%xmm9
5058
5059# qhasm: xmm15 ^= xmm13
5060# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
5061# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
5062pxor %xmm15,%xmm13
5063
5064# qhasm: xmm14 ^= xmm9
5065# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
5066# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
5067pxor %xmm12,%xmm11
5068
5069# qhasm: xmm11 = xmm15
5070# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5071# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5072movdqa %xmm13,%xmm10
5073
5074# qhasm: xmm11 ^= xmm14
5075# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5076# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5077pxor %xmm11,%xmm10
5078
5079# qhasm: xmm11 &= xmm12
5080# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
5081# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
5082pand %xmm8,%xmm10
5083
5084# qhasm: xmm12 ^= xmm8
5085# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
5086# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
5087pxor %xmm9,%xmm8
5088
5089# qhasm: xmm12 &= xmm14
5090# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
5091# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
5092pand %xmm11,%xmm8
5093
5094# qhasm: xmm8 &= xmm15
5095# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
5096# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
5097pand %xmm13,%xmm9
5098
5099# qhasm: xmm8 ^= xmm12
5100# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
5101# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
5102pxor %xmm8,%xmm9
5103
5104# qhasm: xmm12 ^= xmm11
5105# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
5106# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
5107pxor %xmm10,%xmm8
5108
5109# qhasm: xmm10 = xmm13
5110# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5111# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5112movdqa %xmm15,%xmm10
5113
5114# qhasm: xmm10 ^= xmm9
5115# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
5116# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
5117pxor %xmm12,%xmm10
5118
5119# qhasm: xmm10 &= xmm0
5120# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
5121# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
5122pand %xmm0,%xmm10
5123
5124# qhasm: xmm0 ^= xmm4
5125# asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1
5126# asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0
5127pxor %xmm4,%xmm0
5128
5129# qhasm: xmm0 &= xmm9
5130# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
5131# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
5132pand %xmm12,%xmm0
5133
5134# qhasm: xmm4 &= xmm13
5135# asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5
5136# asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4
5137pand %xmm15,%xmm4
5138
5139# qhasm: xmm0 ^= xmm4
5140# asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1
5141# asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0
5142pxor %xmm4,%xmm0
5143
5144# qhasm: xmm4 ^= xmm10
5145# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
5146# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
5147pxor %xmm10,%xmm4
5148
5149# qhasm: xmm3 ^= xmm12
5150# asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4
5151# asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3
5152pxor %xmm8,%xmm3
5153
5154# qhasm: xmm0 ^= xmm12
5155# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
5156# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
5157pxor %xmm8,%xmm0
5158
5159# qhasm: xmm7 ^= xmm8
5160# asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8
5161# asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7
5162pxor %xmm9,%xmm7
5163
5164# qhasm: xmm4 ^= xmm8
5165# asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5
5166# asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4
5167pxor %xmm9,%xmm4
5168
5169# qhasm: xmm12 = xmm5
5170# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9
5171# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8
5172movdqa %xmm5,%xmm8
5173
5174# qhasm: xmm8 = xmm1
5175# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
5176# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
5177movdqa %xmm1,%xmm9
5178
5179# qhasm: xmm12 ^= xmm2
5180# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#9
5181# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm8
5182pxor %xmm2,%xmm8
5183
5184# qhasm: xmm8 ^= xmm6
5185# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10
5186# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9
5187pxor %xmm6,%xmm9
5188
5189# qhasm: xmm11 = xmm15
5190# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5191# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5192movdqa %xmm13,%xmm10
5193
5194# qhasm: xmm11 ^= xmm14
5195# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5196# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5197pxor %xmm11,%xmm10
5198
5199# qhasm: xmm11 &= xmm12
5200# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
5201# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
5202pand %xmm8,%xmm10
5203
5204# qhasm: xmm12 ^= xmm8
5205# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
5206# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
5207pxor %xmm9,%xmm8
5208
5209# qhasm: xmm12 &= xmm14
5210# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
5211# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
5212pand %xmm11,%xmm8
5213
5214# qhasm: xmm8 &= xmm15
5215# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
5216# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
5217pand %xmm13,%xmm9
5218
5219# qhasm: xmm8 ^= xmm12
5220# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
5221# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
5222pxor %xmm8,%xmm9
5223
5224# qhasm: xmm12 ^= xmm11
5225# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
5226# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
5227pxor %xmm10,%xmm8
5228
5229# qhasm: xmm10 = xmm13
5230# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5231# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5232movdqa %xmm15,%xmm10
5233
5234# qhasm: xmm10 ^= xmm9
5235# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
5236# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
5237pxor %xmm12,%xmm10
5238
5239# qhasm: xmm10 &= xmm2
5240# asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11
5241# asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10
5242pand %xmm2,%xmm10
5243
5244# qhasm: xmm2 ^= xmm6
5245# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
5246# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
5247pxor %xmm6,%xmm2
5248
5249# qhasm: xmm2 &= xmm9
5250# asm 1: pand <xmm9=int6464#13,<xmm2=int6464#3
5251# asm 2: pand <xmm9=%xmm12,<xmm2=%xmm2
5252pand %xmm12,%xmm2
5253
5254# qhasm: xmm6 &= xmm13
5255# asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7
5256# asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6
5257pand %xmm15,%xmm6
5258
5259# qhasm: xmm2 ^= xmm6
5260# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
5261# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
5262pxor %xmm6,%xmm2
5263
5264# qhasm: xmm6 ^= xmm10
5265# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
5266# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
5267pxor %xmm10,%xmm6
5268
5269# qhasm: xmm15 ^= xmm13
5270# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
5271# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
5272pxor %xmm15,%xmm13
5273
5274# qhasm: xmm14 ^= xmm9
5275# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
5276# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
5277pxor %xmm12,%xmm11
5278
5279# qhasm: xmm11 = xmm15
5280# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5281# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5282movdqa %xmm13,%xmm10
5283
5284# qhasm: xmm11 ^= xmm14
5285# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5286# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5287pxor %xmm11,%xmm10
5288
5289# qhasm: xmm11 &= xmm5
5290# asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11
5291# asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10
5292pand %xmm5,%xmm10
5293
5294# qhasm: xmm5 ^= xmm1
5295# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
5296# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
5297pxor %xmm1,%xmm5
5298
5299# qhasm: xmm5 &= xmm14
5300# asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6
5301# asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5
5302pand %xmm11,%xmm5
5303
5304# qhasm: xmm1 &= xmm15
5305# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
5306# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
5307pand %xmm13,%xmm1
5308
5309# qhasm: xmm5 ^= xmm1
5310# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
5311# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
5312pxor %xmm1,%xmm5
5313
5314# qhasm: xmm1 ^= xmm11
5315# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
5316# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
5317pxor %xmm10,%xmm1
5318
5319# qhasm: xmm5 ^= xmm12
5320# asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6
5321# asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5
5322pxor %xmm8,%xmm5
5323
5324# qhasm: xmm2 ^= xmm12
5325# asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3
5326# asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2
5327pxor %xmm8,%xmm2
5328
5329# qhasm: xmm1 ^= xmm8
5330# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
5331# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
5332pxor %xmm9,%xmm1
5333
5334# qhasm: xmm6 ^= xmm8
5335# asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7
5336# asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6
5337pxor %xmm9,%xmm6
5338
5339# qhasm: xmm5 ^= xmm0
5340# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
5341# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
5342pxor %xmm0,%xmm5
5343
5344# qhasm: xmm1 ^= xmm3
5345# asm 1: pxor <xmm3=int6464#4,<xmm1=int6464#2
5346# asm 2: pxor <xmm3=%xmm3,<xmm1=%xmm1
5347pxor %xmm3,%xmm1
5348
5349# qhasm: xmm2 ^= xmm5
5350# asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3
5351# asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2
5352pxor %xmm5,%xmm2
5353
5354# qhasm: xmm3 ^= xmm0
5355# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
5356# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
5357pxor %xmm0,%xmm3
5358
5359# qhasm: xmm0 ^= xmm1
5360# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
5361# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
5362pxor %xmm1,%xmm0
5363
5364# qhasm: xmm1 ^= xmm7
5365# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
5366# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
5367pxor %xmm7,%xmm1
5368
5369# qhasm: xmm7 ^= xmm6
5370# asm 1: pxor <xmm6=int6464#7,<xmm7=int6464#8
5371# asm 2: pxor <xmm6=%xmm6,<xmm7=%xmm7
5372pxor %xmm6,%xmm7
5373
5374# qhasm: xmm2 ^= xmm7
5375# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
5376# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
5377pxor %xmm7,%xmm2
5378
5379# qhasm: xmm6 ^= xmm4
5380# asm 1: pxor <xmm4=int6464#5,<xmm6=int6464#7
5381# asm 2: pxor <xmm4=%xmm4,<xmm6=%xmm6
5382pxor %xmm4,%xmm6
5383
5384# qhasm: xmm4 ^= xmm7
5385# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
5386# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
5387pxor %xmm7,%xmm4
5388
5389# qhasm: xmm3 ^= xmm4
5390# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
5391# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
5392pxor %xmm4,%xmm3
5393
5394# qhasm: xmm3 ^= RCON
5395# asm 1: pxor RCON,<xmm3=int6464#4
5396# asm 2: pxor RCON,<xmm3=%xmm3
5397pxor RCON,%xmm3
5398
5399# qhasm: shuffle bytes of xmm0 by EXPB0
5400# asm 1: pshufb EXPB0,<xmm0=int6464#1
5401# asm 2: pshufb EXPB0,<xmm0=%xmm0
5402pshufb EXPB0,%xmm0
5403
5404# qhasm: shuffle bytes of xmm1 by EXPB0
5405# asm 1: pshufb EXPB0,<xmm1=int6464#2
5406# asm 2: pshufb EXPB0,<xmm1=%xmm1
5407pshufb EXPB0,%xmm1
5408
5409# qhasm: shuffle bytes of xmm2 by EXPB0
5410# asm 1: pshufb EXPB0,<xmm2=int6464#3
5411# asm 2: pshufb EXPB0,<xmm2=%xmm2
5412pshufb EXPB0,%xmm2
5413
5414# qhasm: shuffle bytes of xmm3 by EXPB0
5415# asm 1: pshufb EXPB0,<xmm3=int6464#4
5416# asm 2: pshufb EXPB0,<xmm3=%xmm3
5417pshufb EXPB0,%xmm3
5418
5419# qhasm: shuffle bytes of xmm4 by EXPB0
5420# asm 1: pshufb EXPB0,<xmm4=int6464#5
5421# asm 2: pshufb EXPB0,<xmm4=%xmm4
5422pshufb EXPB0,%xmm4
5423
5424# qhasm: shuffle bytes of xmm5 by EXPB0
5425# asm 1: pshufb EXPB0,<xmm5=int6464#6
5426# asm 2: pshufb EXPB0,<xmm5=%xmm5
5427pshufb EXPB0,%xmm5
5428
5429# qhasm: shuffle bytes of xmm6 by EXPB0
5430# asm 1: pshufb EXPB0,<xmm6=int6464#7
5431# asm 2: pshufb EXPB0,<xmm6=%xmm6
5432pshufb EXPB0,%xmm6
5433
5434# qhasm: shuffle bytes of xmm7 by EXPB0
5435# asm 1: pshufb EXPB0,<xmm7=int6464#8
5436# asm 2: pshufb EXPB0,<xmm7=%xmm7
5437pshufb EXPB0,%xmm7
5438
5439# qhasm: xmm8 = *(int128 *)(c + 384)
5440# asm 1: movdqa 384(<c=int64#1),>xmm8=int6464#9
5441# asm 2: movdqa 384(<c=%rdi),>xmm8=%xmm8
5442movdqa 384(%rdi),%xmm8
5443
5444# qhasm: xmm9 = *(int128 *)(c + 400)
5445# asm 1: movdqa 400(<c=int64#1),>xmm9=int6464#10
5446# asm 2: movdqa 400(<c=%rdi),>xmm9=%xmm9
5447movdqa 400(%rdi),%xmm9
5448
5449# qhasm: xmm10 = *(int128 *)(c + 416)
5450# asm 1: movdqa 416(<c=int64#1),>xmm10=int6464#11
5451# asm 2: movdqa 416(<c=%rdi),>xmm10=%xmm10
5452movdqa 416(%rdi),%xmm10
5453
5454# qhasm: xmm11 = *(int128 *)(c + 432)
5455# asm 1: movdqa 432(<c=int64#1),>xmm11=int6464#12
5456# asm 2: movdqa 432(<c=%rdi),>xmm11=%xmm11
5457movdqa 432(%rdi),%xmm11
5458
5459# qhasm: xmm12 = *(int128 *)(c + 448)
5460# asm 1: movdqa 448(<c=int64#1),>xmm12=int6464#13
5461# asm 2: movdqa 448(<c=%rdi),>xmm12=%xmm12
5462movdqa 448(%rdi),%xmm12
5463
5464# qhasm: xmm13 = *(int128 *)(c + 464)
5465# asm 1: movdqa 464(<c=int64#1),>xmm13=int6464#14
5466# asm 2: movdqa 464(<c=%rdi),>xmm13=%xmm13
5467movdqa 464(%rdi),%xmm13
5468
5469# qhasm: xmm14 = *(int128 *)(c + 480)
5470# asm 1: movdqa 480(<c=int64#1),>xmm14=int6464#15
5471# asm 2: movdqa 480(<c=%rdi),>xmm14=%xmm14
5472movdqa 480(%rdi),%xmm14
5473
5474# qhasm: xmm15 = *(int128 *)(c + 496)
5475# asm 1: movdqa 496(<c=int64#1),>xmm15=int6464#16
5476# asm 2: movdqa 496(<c=%rdi),>xmm15=%xmm15
5477movdqa 496(%rdi),%xmm15
5478
5479# qhasm: xmm8 ^= ONE
5480# asm 1: pxor ONE,<xmm8=int6464#9
5481# asm 2: pxor ONE,<xmm8=%xmm8
5482pxor ONE,%xmm8
5483
5484# qhasm: xmm9 ^= ONE
5485# asm 1: pxor ONE,<xmm9=int6464#10
5486# asm 2: pxor ONE,<xmm9=%xmm9
5487pxor ONE,%xmm9
5488
5489# qhasm: xmm13 ^= ONE
5490# asm 1: pxor ONE,<xmm13=int6464#14
5491# asm 2: pxor ONE,<xmm13=%xmm13
5492pxor ONE,%xmm13
5493
5494# qhasm: xmm14 ^= ONE
5495# asm 1: pxor ONE,<xmm14=int6464#15
5496# asm 2: pxor ONE,<xmm14=%xmm14
5497pxor ONE,%xmm14
5498
5499# qhasm: xmm0 ^= xmm8
5500# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
5501# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
5502pxor %xmm8,%xmm0
5503
5504# qhasm: xmm1 ^= xmm9
5505# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
5506# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
5507pxor %xmm9,%xmm1
5508
5509# qhasm: xmm2 ^= xmm10
5510# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
5511# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
5512pxor %xmm10,%xmm2
5513
5514# qhasm: xmm3 ^= xmm11
5515# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
5516# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
5517pxor %xmm11,%xmm3
5518
5519# qhasm: xmm4 ^= xmm12
5520# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
5521# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
5522pxor %xmm12,%xmm4
5523
5524# qhasm: xmm5 ^= xmm13
5525# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
5526# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
5527pxor %xmm13,%xmm5
5528
5529# qhasm: xmm6 ^= xmm14
5530# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
5531# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
5532pxor %xmm14,%xmm6
5533
5534# qhasm: xmm7 ^= xmm15
5535# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
5536# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
5537pxor %xmm15,%xmm7
5538
5539# qhasm: uint32323232 xmm8 >>= 8
5540# asm 1: psrld $8,<xmm8=int6464#9
5541# asm 2: psrld $8,<xmm8=%xmm8
5542psrld $8,%xmm8
5543
5544# qhasm: uint32323232 xmm9 >>= 8
5545# asm 1: psrld $8,<xmm9=int6464#10
5546# asm 2: psrld $8,<xmm9=%xmm9
5547psrld $8,%xmm9
5548
5549# qhasm: uint32323232 xmm10 >>= 8
5550# asm 1: psrld $8,<xmm10=int6464#11
5551# asm 2: psrld $8,<xmm10=%xmm10
5552psrld $8,%xmm10
5553
5554# qhasm: uint32323232 xmm11 >>= 8
5555# asm 1: psrld $8,<xmm11=int6464#12
5556# asm 2: psrld $8,<xmm11=%xmm11
5557psrld $8,%xmm11
5558
5559# qhasm: uint32323232 xmm12 >>= 8
5560# asm 1: psrld $8,<xmm12=int6464#13
5561# asm 2: psrld $8,<xmm12=%xmm12
5562psrld $8,%xmm12
5563
5564# qhasm: uint32323232 xmm13 >>= 8
5565# asm 1: psrld $8,<xmm13=int6464#14
5566# asm 2: psrld $8,<xmm13=%xmm13
5567psrld $8,%xmm13
5568
5569# qhasm: uint32323232 xmm14 >>= 8
5570# asm 1: psrld $8,<xmm14=int6464#15
5571# asm 2: psrld $8,<xmm14=%xmm14
5572psrld $8,%xmm14
5573
5574# qhasm: uint32323232 xmm15 >>= 8
5575# asm 1: psrld $8,<xmm15=int6464#16
5576# asm 2: psrld $8,<xmm15=%xmm15
5577psrld $8,%xmm15
5578
5579# qhasm: xmm0 ^= xmm8
5580# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
5581# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
5582pxor %xmm8,%xmm0
5583
5584# qhasm: xmm1 ^= xmm9
5585# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
5586# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
5587pxor %xmm9,%xmm1
5588
5589# qhasm: xmm2 ^= xmm10
5590# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
5591# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
5592pxor %xmm10,%xmm2
5593
5594# qhasm: xmm3 ^= xmm11
5595# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
5596# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
5597pxor %xmm11,%xmm3
5598
5599# qhasm: xmm4 ^= xmm12
5600# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
5601# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
5602pxor %xmm12,%xmm4
5603
5604# qhasm: xmm5 ^= xmm13
5605# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
5606# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
5607pxor %xmm13,%xmm5
5608
5609# qhasm: xmm6 ^= xmm14
5610# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
5611# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
5612pxor %xmm14,%xmm6
5613
5614# qhasm: xmm7 ^= xmm15
5615# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
5616# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
5617pxor %xmm15,%xmm7
5618
5619# qhasm: uint32323232 xmm8 >>= 8
5620# asm 1: psrld $8,<xmm8=int6464#9
5621# asm 2: psrld $8,<xmm8=%xmm8
5622psrld $8,%xmm8
5623
5624# qhasm: uint32323232 xmm9 >>= 8
5625# asm 1: psrld $8,<xmm9=int6464#10
5626# asm 2: psrld $8,<xmm9=%xmm9
5627psrld $8,%xmm9
5628
5629# qhasm: uint32323232 xmm10 >>= 8
5630# asm 1: psrld $8,<xmm10=int6464#11
5631# asm 2: psrld $8,<xmm10=%xmm10
5632psrld $8,%xmm10
5633
5634# qhasm: uint32323232 xmm11 >>= 8
5635# asm 1: psrld $8,<xmm11=int6464#12
5636# asm 2: psrld $8,<xmm11=%xmm11
5637psrld $8,%xmm11
5638
5639# qhasm: uint32323232 xmm12 >>= 8
5640# asm 1: psrld $8,<xmm12=int6464#13
5641# asm 2: psrld $8,<xmm12=%xmm12
5642psrld $8,%xmm12
5643
5644# qhasm: uint32323232 xmm13 >>= 8
5645# asm 1: psrld $8,<xmm13=int6464#14
5646# asm 2: psrld $8,<xmm13=%xmm13
5647psrld $8,%xmm13
5648
5649# qhasm: uint32323232 xmm14 >>= 8
5650# asm 1: psrld $8,<xmm14=int6464#15
5651# asm 2: psrld $8,<xmm14=%xmm14
5652psrld $8,%xmm14
5653
5654# qhasm: uint32323232 xmm15 >>= 8
5655# asm 1: psrld $8,<xmm15=int6464#16
5656# asm 2: psrld $8,<xmm15=%xmm15
5657psrld $8,%xmm15
5658
5659# qhasm: xmm0 ^= xmm8
5660# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
5661# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
5662pxor %xmm8,%xmm0
5663
5664# qhasm: xmm1 ^= xmm9
5665# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
5666# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
5667pxor %xmm9,%xmm1
5668
5669# qhasm: xmm2 ^= xmm10
5670# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
5671# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
5672pxor %xmm10,%xmm2
5673
5674# qhasm: xmm3 ^= xmm11
5675# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
5676# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
5677pxor %xmm11,%xmm3
5678
5679# qhasm: xmm4 ^= xmm12
5680# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
5681# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
5682pxor %xmm12,%xmm4
5683
5684# qhasm: xmm5 ^= xmm13
5685# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
5686# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
5687pxor %xmm13,%xmm5
5688
5689# qhasm: xmm6 ^= xmm14
5690# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
5691# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
5692pxor %xmm14,%xmm6
5693
5694# qhasm: xmm7 ^= xmm15
5695# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
5696# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
5697pxor %xmm15,%xmm7
5698
5699# qhasm: uint32323232 xmm8 >>= 8
5700# asm 1: psrld $8,<xmm8=int6464#9
5701# asm 2: psrld $8,<xmm8=%xmm8
5702psrld $8,%xmm8
5703
5704# qhasm: uint32323232 xmm9 >>= 8
5705# asm 1: psrld $8,<xmm9=int6464#10
5706# asm 2: psrld $8,<xmm9=%xmm9
5707psrld $8,%xmm9
5708
5709# qhasm: uint32323232 xmm10 >>= 8
5710# asm 1: psrld $8,<xmm10=int6464#11
5711# asm 2: psrld $8,<xmm10=%xmm10
5712psrld $8,%xmm10
5713
5714# qhasm: uint32323232 xmm11 >>= 8
5715# asm 1: psrld $8,<xmm11=int6464#12
5716# asm 2: psrld $8,<xmm11=%xmm11
5717psrld $8,%xmm11
5718
5719# qhasm: uint32323232 xmm12 >>= 8
5720# asm 1: psrld $8,<xmm12=int6464#13
5721# asm 2: psrld $8,<xmm12=%xmm12
5722psrld $8,%xmm12
5723
5724# qhasm: uint32323232 xmm13 >>= 8
5725# asm 1: psrld $8,<xmm13=int6464#14
5726# asm 2: psrld $8,<xmm13=%xmm13
5727psrld $8,%xmm13
5728
5729# qhasm: uint32323232 xmm14 >>= 8
5730# asm 1: psrld $8,<xmm14=int6464#15
5731# asm 2: psrld $8,<xmm14=%xmm14
5732psrld $8,%xmm14
5733
5734# qhasm: uint32323232 xmm15 >>= 8
5735# asm 1: psrld $8,<xmm15=int6464#16
5736# asm 2: psrld $8,<xmm15=%xmm15
5737psrld $8,%xmm15
5738
5739# qhasm: xmm0 ^= xmm8
5740# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
5741# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
5742pxor %xmm8,%xmm0
5743
5744# qhasm: xmm1 ^= xmm9
5745# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
5746# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
5747pxor %xmm9,%xmm1
5748
5749# qhasm: xmm2 ^= xmm10
5750# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
5751# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
5752pxor %xmm10,%xmm2
5753
5754# qhasm: xmm3 ^= xmm11
5755# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
5756# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
5757pxor %xmm11,%xmm3
5758
5759# qhasm: xmm4 ^= xmm12
5760# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
5761# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
5762pxor %xmm12,%xmm4
5763
5764# qhasm: xmm5 ^= xmm13
5765# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
5766# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
5767pxor %xmm13,%xmm5
5768
5769# qhasm: xmm6 ^= xmm14
5770# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
5771# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
5772pxor %xmm14,%xmm6
5773
5774# qhasm: xmm7 ^= xmm15
5775# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
5776# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
5777pxor %xmm15,%xmm7
5778
5779# qhasm: *(int128 *)(c + 512) = xmm0
5780# asm 1: movdqa <xmm0=int6464#1,512(<c=int64#1)
5781# asm 2: movdqa <xmm0=%xmm0,512(<c=%rdi)
5782movdqa %xmm0,512(%rdi)
5783
5784# qhasm: *(int128 *)(c + 528) = xmm1
5785# asm 1: movdqa <xmm1=int6464#2,528(<c=int64#1)
5786# asm 2: movdqa <xmm1=%xmm1,528(<c=%rdi)
5787movdqa %xmm1,528(%rdi)
5788
5789# qhasm: *(int128 *)(c + 544) = xmm2
5790# asm 1: movdqa <xmm2=int6464#3,544(<c=int64#1)
5791# asm 2: movdqa <xmm2=%xmm2,544(<c=%rdi)
5792movdqa %xmm2,544(%rdi)
5793
5794# qhasm: *(int128 *)(c + 560) = xmm3
5795# asm 1: movdqa <xmm3=int6464#4,560(<c=int64#1)
5796# asm 2: movdqa <xmm3=%xmm3,560(<c=%rdi)
5797movdqa %xmm3,560(%rdi)
5798
5799# qhasm: *(int128 *)(c + 576) = xmm4
5800# asm 1: movdqa <xmm4=int6464#5,576(<c=int64#1)
5801# asm 2: movdqa <xmm4=%xmm4,576(<c=%rdi)
5802movdqa %xmm4,576(%rdi)
5803
5804# qhasm: *(int128 *)(c + 592) = xmm5
5805# asm 1: movdqa <xmm5=int6464#6,592(<c=int64#1)
5806# asm 2: movdqa <xmm5=%xmm5,592(<c=%rdi)
5807movdqa %xmm5,592(%rdi)
5808
5809# qhasm: *(int128 *)(c + 608) = xmm6
5810# asm 1: movdqa <xmm6=int6464#7,608(<c=int64#1)
5811# asm 2: movdqa <xmm6=%xmm6,608(<c=%rdi)
5812movdqa %xmm6,608(%rdi)
5813
5814# qhasm: *(int128 *)(c + 624) = xmm7
5815# asm 1: movdqa <xmm7=int6464#8,624(<c=int64#1)
5816# asm 2: movdqa <xmm7=%xmm7,624(<c=%rdi)
5817movdqa %xmm7,624(%rdi)
5818
5819# qhasm: xmm0 ^= ONE
5820# asm 1: pxor ONE,<xmm0=int6464#1
5821# asm 2: pxor ONE,<xmm0=%xmm0
5822pxor ONE,%xmm0
5823
5824# qhasm: xmm1 ^= ONE
5825# asm 1: pxor ONE,<xmm1=int6464#2
5826# asm 2: pxor ONE,<xmm1=%xmm1
5827pxor ONE,%xmm1
5828
5829# qhasm: xmm5 ^= ONE
5830# asm 1: pxor ONE,<xmm5=int6464#6
5831# asm 2: pxor ONE,<xmm5=%xmm5
5832pxor ONE,%xmm5
5833
5834# qhasm: xmm6 ^= ONE
5835# asm 1: pxor ONE,<xmm6=int6464#7
5836# asm 2: pxor ONE,<xmm6=%xmm6
5837pxor ONE,%xmm6
5838
5839# qhasm: shuffle bytes of xmm0 by ROTB
5840# asm 1: pshufb ROTB,<xmm0=int6464#1
5841# asm 2: pshufb ROTB,<xmm0=%xmm0
5842pshufb ROTB,%xmm0
5843
5844# qhasm: shuffle bytes of xmm1 by ROTB
5845# asm 1: pshufb ROTB,<xmm1=int6464#2
5846# asm 2: pshufb ROTB,<xmm1=%xmm1
5847pshufb ROTB,%xmm1
5848
5849# qhasm: shuffle bytes of xmm2 by ROTB
5850# asm 1: pshufb ROTB,<xmm2=int6464#3
5851# asm 2: pshufb ROTB,<xmm2=%xmm2
5852pshufb ROTB,%xmm2
5853
5854# qhasm: shuffle bytes of xmm3 by ROTB
5855# asm 1: pshufb ROTB,<xmm3=int6464#4
5856# asm 2: pshufb ROTB,<xmm3=%xmm3
5857pshufb ROTB,%xmm3
5858
5859# qhasm: shuffle bytes of xmm4 by ROTB
5860# asm 1: pshufb ROTB,<xmm4=int6464#5
5861# asm 2: pshufb ROTB,<xmm4=%xmm4
5862pshufb ROTB,%xmm4
5863
5864# qhasm: shuffle bytes of xmm5 by ROTB
5865# asm 1: pshufb ROTB,<xmm5=int6464#6
5866# asm 2: pshufb ROTB,<xmm5=%xmm5
5867pshufb ROTB,%xmm5
5868
5869# qhasm: shuffle bytes of xmm6 by ROTB
5870# asm 1: pshufb ROTB,<xmm6=int6464#7
5871# asm 2: pshufb ROTB,<xmm6=%xmm6
5872pshufb ROTB,%xmm6
5873
5874# qhasm: shuffle bytes of xmm7 by ROTB
5875# asm 1: pshufb ROTB,<xmm7=int6464#8
5876# asm 2: pshufb ROTB,<xmm7=%xmm7
5877pshufb ROTB,%xmm7
5878
5879# qhasm: xmm5 ^= xmm6
5880# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
5881# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
5882pxor %xmm6,%xmm5
5883
5884# qhasm: xmm2 ^= xmm1
5885# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
5886# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
5887pxor %xmm1,%xmm2
5888
5889# qhasm: xmm5 ^= xmm0
5890# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
5891# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
5892pxor %xmm0,%xmm5
5893
5894# qhasm: xmm6 ^= xmm2
5895# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
5896# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
5897pxor %xmm2,%xmm6
5898
5899# qhasm: xmm3 ^= xmm0
5900# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
5901# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
5902pxor %xmm0,%xmm3
5903
5904# qhasm: xmm6 ^= xmm3
5905# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
5906# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
5907pxor %xmm3,%xmm6
5908
5909# qhasm: xmm3 ^= xmm7
5910# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
5911# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
5912pxor %xmm7,%xmm3
5913
5914# qhasm: xmm3 ^= xmm4
5915# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
5916# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
5917pxor %xmm4,%xmm3
5918
5919# qhasm: xmm7 ^= xmm5
5920# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
5921# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
5922pxor %xmm5,%xmm7
5923
5924# qhasm: xmm3 ^= xmm1
5925# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
5926# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
5927pxor %xmm1,%xmm3
5928
5929# qhasm: xmm4 ^= xmm5
5930# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
5931# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
5932pxor %xmm5,%xmm4
5933
5934# qhasm: xmm2 ^= xmm7
5935# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
5936# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
5937pxor %xmm7,%xmm2
5938
5939# qhasm: xmm1 ^= xmm5
5940# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
5941# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
5942pxor %xmm5,%xmm1
5943
5944# qhasm: xmm11 = xmm7
5945# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
5946# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
5947movdqa %xmm7,%xmm8
5948
5949# qhasm: xmm10 = xmm1
5950# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
5951# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
5952movdqa %xmm1,%xmm9
5953
5954# qhasm: xmm9 = xmm5
5955# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
5956# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
5957movdqa %xmm5,%xmm10
5958
5959# qhasm: xmm13 = xmm2
5960# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
5961# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
5962movdqa %xmm2,%xmm11
5963
5964# qhasm: xmm12 = xmm6
5965# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
5966# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
5967movdqa %xmm6,%xmm12
5968
5969# qhasm: xmm11 ^= xmm4
5970# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
5971# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
5972pxor %xmm4,%xmm8
5973
5974# qhasm: xmm10 ^= xmm2
5975# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
5976# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
5977pxor %xmm2,%xmm9
5978
5979# qhasm: xmm9 ^= xmm3
5980# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
5981# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
5982pxor %xmm3,%xmm10
5983
5984# qhasm: xmm13 ^= xmm4
5985# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
5986# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
5987pxor %xmm4,%xmm11
5988
5989# qhasm: xmm12 ^= xmm0
5990# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
5991# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
5992pxor %xmm0,%xmm12
5993
5994# qhasm: xmm14 = xmm11
5995# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
5996# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
5997movdqa %xmm8,%xmm13
5998
5999# qhasm: xmm8 = xmm10
6000# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
6001# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
6002movdqa %xmm9,%xmm14
6003
6004# qhasm: xmm15 = xmm11
6005# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
6006# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
6007movdqa %xmm8,%xmm15
6008
6009# qhasm: xmm10 |= xmm9
6010# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
6011# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
6012por %xmm10,%xmm9
6013
6014# qhasm: xmm11 |= xmm12
6015# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
6016# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
6017por %xmm12,%xmm8
6018
6019# qhasm: xmm15 ^= xmm8
6020# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
6021# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
6022pxor %xmm14,%xmm15
6023
6024# qhasm: xmm14 &= xmm12
6025# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
6026# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
6027pand %xmm12,%xmm13
6028
6029# qhasm: xmm8 &= xmm9
6030# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
6031# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
6032pand %xmm10,%xmm14
6033
6034# qhasm: xmm12 ^= xmm9
6035# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
6036# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
6037pxor %xmm10,%xmm12
6038
6039# qhasm: xmm15 &= xmm12
6040# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
6041# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
6042pand %xmm12,%xmm15
6043
6044# qhasm: xmm12 = xmm3
6045# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
6046# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
6047movdqa %xmm3,%xmm10
6048
6049# qhasm: xmm12 ^= xmm0
6050# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
6051# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
6052pxor %xmm0,%xmm10
6053
6054# qhasm: xmm13 &= xmm12
6055# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
6056# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
6057pand %xmm10,%xmm11
6058
6059# qhasm: xmm11 ^= xmm13
6060# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
6061# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
6062pxor %xmm11,%xmm8
6063
6064# qhasm: xmm10 ^= xmm13
6065# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
6066# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
6067pxor %xmm11,%xmm9
6068
6069# qhasm: xmm13 = xmm7
6070# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
6071# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
6072movdqa %xmm7,%xmm10
6073
6074# qhasm: xmm13 ^= xmm1
6075# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
6076# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
6077pxor %xmm1,%xmm10
6078
6079# qhasm: xmm12 = xmm5
6080# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
6081# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
6082movdqa %xmm5,%xmm11
6083
6084# qhasm: xmm9 = xmm13
6085# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
6086# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
6087movdqa %xmm10,%xmm12
6088
6089# qhasm: xmm12 ^= xmm6
6090# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
6091# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
6092pxor %xmm6,%xmm11
6093
6094# qhasm: xmm9 |= xmm12
6095# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
6096# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
6097por %xmm11,%xmm12
6098
6099# qhasm: xmm13 &= xmm12
6100# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
6101# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
6102pand %xmm11,%xmm10
6103
6104# qhasm: xmm8 ^= xmm13
6105# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
6106# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
6107pxor %xmm10,%xmm14
6108
6109# qhasm: xmm11 ^= xmm15
6110# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
6111# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
6112pxor %xmm15,%xmm8
6113
6114# qhasm: xmm10 ^= xmm14
6115# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
6116# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
6117pxor %xmm13,%xmm9
6118
6119# qhasm: xmm9 ^= xmm15
6120# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
6121# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
6122pxor %xmm15,%xmm12
6123
6124# qhasm: xmm8 ^= xmm14
6125# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
6126# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
6127pxor %xmm13,%xmm14
6128
6129# qhasm: xmm9 ^= xmm14
6130# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
6131# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
6132pxor %xmm13,%xmm12
6133
6134# qhasm: xmm12 = xmm2
6135# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
6136# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
6137movdqa %xmm2,%xmm10
6138
6139# qhasm: xmm13 = xmm4
6140# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
6141# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
6142movdqa %xmm4,%xmm11
6143
6144# qhasm: xmm14 = xmm1
6145# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
6146# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
6147movdqa %xmm1,%xmm13
6148
6149# qhasm: xmm15 = xmm7
6150# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
6151# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
6152movdqa %xmm7,%xmm15
6153
6154# qhasm: xmm12 &= xmm3
6155# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
6156# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
6157pand %xmm3,%xmm10
6158
6159# qhasm: xmm13 &= xmm0
6160# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
6161# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
6162pand %xmm0,%xmm11
6163
6164# qhasm: xmm14 &= xmm5
6165# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
6166# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
6167pand %xmm5,%xmm13
6168
6169# qhasm: xmm15 |= xmm6
6170# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
6171# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
6172por %xmm6,%xmm15
6173
6174# qhasm: xmm11 ^= xmm12
6175# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
6176# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
6177pxor %xmm10,%xmm8
6178
6179# qhasm: xmm10 ^= xmm13
6180# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
6181# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
6182pxor %xmm11,%xmm9
6183
6184# qhasm: xmm9 ^= xmm14
6185# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
6186# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
6187pxor %xmm13,%xmm12
6188
6189# qhasm: xmm8 ^= xmm15
6190# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
6191# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
6192pxor %xmm15,%xmm14
6193
6194# qhasm: xmm12 = xmm11
6195# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
6196# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
6197movdqa %xmm8,%xmm10
6198
6199# qhasm: xmm12 ^= xmm10
6200# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
6201# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
6202pxor %xmm9,%xmm10
6203
6204# qhasm: xmm11 &= xmm9
6205# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
6206# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
6207pand %xmm12,%xmm8
6208
6209# qhasm: xmm14 = xmm8
6210# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
6211# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
6212movdqa %xmm14,%xmm11
6213
6214# qhasm: xmm14 ^= xmm11
6215# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
6216# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
6217pxor %xmm8,%xmm11
6218
6219# qhasm: xmm15 = xmm12
6220# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
6221# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
6222movdqa %xmm10,%xmm13
6223
6224# qhasm: xmm15 &= xmm14
6225# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
6226# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
6227pand %xmm11,%xmm13
6228
6229# qhasm: xmm15 ^= xmm10
6230# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
6231# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
6232pxor %xmm9,%xmm13
6233
6234# qhasm: xmm13 = xmm9
6235# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
6236# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
6237movdqa %xmm12,%xmm15
6238
6239# qhasm: xmm13 ^= xmm8
6240# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
6241# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
6242pxor %xmm14,%xmm15
6243
6244# qhasm: xmm11 ^= xmm10
6245# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
6246# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
6247pxor %xmm9,%xmm8
6248
6249# qhasm: xmm13 &= xmm11
6250# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
6251# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
6252pand %xmm8,%xmm15
6253
6254# qhasm: xmm13 ^= xmm8
6255# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
6256# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
6257pxor %xmm14,%xmm15
6258
6259# qhasm: xmm9 ^= xmm13
6260# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
6261# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
6262pxor %xmm15,%xmm12
6263
6264# qhasm: xmm10 = xmm14
6265# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
6266# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
6267movdqa %xmm11,%xmm8
6268
6269# qhasm: xmm10 ^= xmm13
6270# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
6271# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
6272pxor %xmm15,%xmm8
6273
6274# qhasm: xmm10 &= xmm8
6275# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
6276# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
6277pand %xmm14,%xmm8
6278
6279# qhasm: xmm9 ^= xmm10
6280# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
6281# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
6282pxor %xmm8,%xmm12
6283
6284# qhasm: xmm14 ^= xmm10
6285# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
6286# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
6287pxor %xmm8,%xmm11
6288
6289# qhasm: xmm14 &= xmm15
6290# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
6291# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
6292pand %xmm13,%xmm11
6293
6294# qhasm: xmm14 ^= xmm12
6295# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
6296# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
6297pxor %xmm10,%xmm11
6298
6299# qhasm: xmm12 = xmm6
6300# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
6301# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
6302movdqa %xmm6,%xmm8
6303
6304# qhasm: xmm8 = xmm5
6305# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
6306# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
6307movdqa %xmm5,%xmm9
6308
6309# qhasm: xmm10 = xmm15
6310# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
6311# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
6312movdqa %xmm13,%xmm10
6313
6314# qhasm: xmm10 ^= xmm14
6315# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
6316# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
6317pxor %xmm11,%xmm10
6318
6319# qhasm: xmm10 &= xmm6
6320# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
6321# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
6322pand %xmm6,%xmm10
6323
6324# qhasm: xmm6 ^= xmm5
6325# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
6326# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
6327pxor %xmm5,%xmm6
6328
6329# qhasm: xmm6 &= xmm14
6330# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
6331# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
6332pand %xmm11,%xmm6
6333
6334# qhasm: xmm5 &= xmm15
6335# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
6336# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
6337pand %xmm13,%xmm5
6338
6339# qhasm: xmm6 ^= xmm5
6340# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
6341# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
6342pxor %xmm5,%xmm6
6343
6344# qhasm: xmm5 ^= xmm10
6345# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
6346# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
6347pxor %xmm10,%xmm5
6348
6349# qhasm: xmm12 ^= xmm0
6350# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
6351# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
6352pxor %xmm0,%xmm8
6353
6354# qhasm: xmm8 ^= xmm3
6355# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
6356# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
6357pxor %xmm3,%xmm9
6358
6359# qhasm: xmm15 ^= xmm13
6360# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
6361# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
6362pxor %xmm15,%xmm13
6363
6364# qhasm: xmm14 ^= xmm9
6365# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
6366# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
6367pxor %xmm12,%xmm11
6368
6369# qhasm: xmm11 = xmm15
6370# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
6371# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
6372movdqa %xmm13,%xmm10
6373
6374# qhasm: xmm11 ^= xmm14
6375# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
6376# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
6377pxor %xmm11,%xmm10
6378
6379# qhasm: xmm11 &= xmm12
6380# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
6381# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
6382pand %xmm8,%xmm10
6383
6384# qhasm: xmm12 ^= xmm8
6385# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
6386# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
6387pxor %xmm9,%xmm8
6388
6389# qhasm: xmm12 &= xmm14
6390# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
6391# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
6392pand %xmm11,%xmm8
6393
6394# qhasm: xmm8 &= xmm15
6395# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
6396# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
6397pand %xmm13,%xmm9
6398
6399# qhasm: xmm8 ^= xmm12
6400# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
6401# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
6402pxor %xmm8,%xmm9
6403
6404# qhasm: xmm12 ^= xmm11
6405# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
6406# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
6407pxor %xmm10,%xmm8
6408
6409# qhasm: xmm10 = xmm13
6410# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
6411# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
6412movdqa %xmm15,%xmm10
6413
6414# qhasm: xmm10 ^= xmm9
6415# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
6416# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
6417pxor %xmm12,%xmm10
6418
6419# qhasm: xmm10 &= xmm0
6420# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
6421# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
6422pand %xmm0,%xmm10
6423
6424# qhasm: xmm0 ^= xmm3
6425# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
6426# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
6427pxor %xmm3,%xmm0
6428
6429# qhasm: xmm0 &= xmm9
6430# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
6431# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
6432pand %xmm12,%xmm0
6433
6434# qhasm: xmm3 &= xmm13
6435# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
6436# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
6437pand %xmm15,%xmm3
6438
6439# qhasm: xmm0 ^= xmm3
6440# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
6441# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
6442pxor %xmm3,%xmm0
6443
6444# qhasm: xmm3 ^= xmm10
6445# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
6446# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
6447pxor %xmm10,%xmm3
6448
6449# qhasm: xmm6 ^= xmm12
6450# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
6451# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
6452pxor %xmm8,%xmm6
6453
6454# qhasm: xmm0 ^= xmm12
6455# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
6456# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
6457pxor %xmm8,%xmm0
6458
6459# qhasm: xmm5 ^= xmm8
6460# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
6461# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
6462pxor %xmm9,%xmm5
6463
6464# qhasm: xmm3 ^= xmm8
6465# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
6466# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
6467pxor %xmm9,%xmm3
6468
6469# qhasm: xmm12 = xmm7
6470# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
6471# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
6472movdqa %xmm7,%xmm8
6473
6474# qhasm: xmm8 = xmm1
6475# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
6476# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
6477movdqa %xmm1,%xmm9
6478
6479# qhasm: xmm12 ^= xmm4
6480# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
6481# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
6482pxor %xmm4,%xmm8
6483
6484# qhasm: xmm8 ^= xmm2
6485# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
6486# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
6487pxor %xmm2,%xmm9
6488
6489# qhasm: xmm11 = xmm15
6490# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
6491# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
6492movdqa %xmm13,%xmm10
6493
6494# qhasm: xmm11 ^= xmm14
6495# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
6496# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
6497pxor %xmm11,%xmm10
6498
6499# qhasm: xmm11 &= xmm12
6500# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
6501# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
6502pand %xmm8,%xmm10
6503
6504# qhasm: xmm12 ^= xmm8
6505# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
6506# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
6507pxor %xmm9,%xmm8
6508
6509# qhasm: xmm12 &= xmm14
6510# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
6511# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
6512pand %xmm11,%xmm8
6513
6514# qhasm: xmm8 &= xmm15
6515# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
6516# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
6517pand %xmm13,%xmm9
6518
6519# qhasm: xmm8 ^= xmm12
6520# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
6521# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
6522pxor %xmm8,%xmm9
6523
6524# qhasm: xmm12 ^= xmm11
6525# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
6526# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
6527pxor %xmm10,%xmm8
6528
6529# qhasm: xmm10 = xmm13
6530# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
6531# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
6532movdqa %xmm15,%xmm10
6533
6534# qhasm: xmm10 ^= xmm9
6535# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
6536# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
6537pxor %xmm12,%xmm10
6538
6539# qhasm: xmm10 &= xmm4
6540# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
6541# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
6542pand %xmm4,%xmm10
6543
6544# qhasm: xmm4 ^= xmm2
6545# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
6546# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
6547pxor %xmm2,%xmm4
6548
6549# qhasm: xmm4 &= xmm9
6550# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
6551# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
6552pand %xmm12,%xmm4
6553
6554# qhasm: xmm2 &= xmm13
6555# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
6556# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
6557pand %xmm15,%xmm2
6558
6559# qhasm: xmm4 ^= xmm2
6560# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
6561# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
6562pxor %xmm2,%xmm4
6563
6564# qhasm: xmm2 ^= xmm10
6565# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
6566# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
6567pxor %xmm10,%xmm2
6568
6569# qhasm: xmm15 ^= xmm13
6570# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
6571# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
6572pxor %xmm15,%xmm13
6573
6574# qhasm: xmm14 ^= xmm9
6575# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
6576# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
6577pxor %xmm12,%xmm11
6578
6579# qhasm: xmm11 = xmm15
6580# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
6581# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
6582movdqa %xmm13,%xmm10
6583
6584# qhasm: xmm11 ^= xmm14
6585# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
6586# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
6587pxor %xmm11,%xmm10
6588
6589# qhasm: xmm11 &= xmm7
6590# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
6591# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
6592pand %xmm7,%xmm10
6593
6594# qhasm: xmm7 ^= xmm1
6595# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
6596# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
6597pxor %xmm1,%xmm7
6598
6599# qhasm: xmm7 &= xmm14
6600# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
6601# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
6602pand %xmm11,%xmm7
6603
6604# qhasm: xmm1 &= xmm15
6605# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
6606# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
6607pand %xmm13,%xmm1
6608
6609# qhasm: xmm7 ^= xmm1
6610# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
6611# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
6612pxor %xmm1,%xmm7
6613
6614# qhasm: xmm1 ^= xmm11
6615# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
6616# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
6617pxor %xmm10,%xmm1
6618
6619# qhasm: xmm7 ^= xmm12
6620# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
6621# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
6622pxor %xmm8,%xmm7
6623
6624# qhasm: xmm4 ^= xmm12
6625# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
6626# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
6627pxor %xmm8,%xmm4
6628
6629# qhasm: xmm1 ^= xmm8
6630# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
6631# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
6632pxor %xmm9,%xmm1
6633
6634# qhasm: xmm2 ^= xmm8
6635# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
6636# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
6637pxor %xmm9,%xmm2
6638
6639# qhasm: xmm7 ^= xmm0
6640# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
6641# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
6642pxor %xmm0,%xmm7
6643
6644# qhasm: xmm1 ^= xmm6
6645# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
6646# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
6647pxor %xmm6,%xmm1
6648
6649# qhasm: xmm4 ^= xmm7
6650# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
6651# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
6652pxor %xmm7,%xmm4
6653
6654# qhasm: xmm6 ^= xmm0
6655# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
6656# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
6657pxor %xmm0,%xmm6
6658
6659# qhasm: xmm0 ^= xmm1
6660# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
6661# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
6662pxor %xmm1,%xmm0
6663
6664# qhasm: xmm1 ^= xmm5
6665# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
6666# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
6667pxor %xmm5,%xmm1
6668
6669# qhasm: xmm5 ^= xmm2
6670# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
6671# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
6672pxor %xmm2,%xmm5
6673
6674# qhasm: xmm4 ^= xmm5
6675# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
6676# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
6677pxor %xmm5,%xmm4
6678
6679# qhasm: xmm2 ^= xmm3
6680# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
6681# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
6682pxor %xmm3,%xmm2
6683
6684# qhasm: xmm3 ^= xmm5
6685# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
6686# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
6687pxor %xmm5,%xmm3
6688
6689# qhasm: xmm6 ^= xmm3
6690# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
6691# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
6692pxor %xmm3,%xmm6
6693
6694# qhasm: xmm3 ^= RCON
6695# asm 1: pxor RCON,<xmm3=int6464#4
6696# asm 2: pxor RCON,<xmm3=%xmm3
6697pxor RCON,%xmm3
6698
6699# qhasm: shuffle bytes of xmm0 by EXPB0
6700# asm 1: pshufb EXPB0,<xmm0=int6464#1
6701# asm 2: pshufb EXPB0,<xmm0=%xmm0
6702pshufb EXPB0,%xmm0
6703
6704# qhasm: shuffle bytes of xmm1 by EXPB0
6705# asm 1: pshufb EXPB0,<xmm1=int6464#2
6706# asm 2: pshufb EXPB0,<xmm1=%xmm1
6707pshufb EXPB0,%xmm1
6708
6709# qhasm: shuffle bytes of xmm4 by EXPB0
6710# asm 1: pshufb EXPB0,<xmm4=int6464#5
6711# asm 2: pshufb EXPB0,<xmm4=%xmm4
6712pshufb EXPB0,%xmm4
6713
6714# qhasm: shuffle bytes of xmm6 by EXPB0
6715# asm 1: pshufb EXPB0,<xmm6=int6464#7
6716# asm 2: pshufb EXPB0,<xmm6=%xmm6
6717pshufb EXPB0,%xmm6
6718
6719# qhasm: shuffle bytes of xmm3 by EXPB0
6720# asm 1: pshufb EXPB0,<xmm3=int6464#4
6721# asm 2: pshufb EXPB0,<xmm3=%xmm3
6722pshufb EXPB0,%xmm3
6723
6724# qhasm: shuffle bytes of xmm7 by EXPB0
6725# asm 1: pshufb EXPB0,<xmm7=int6464#8
6726# asm 2: pshufb EXPB0,<xmm7=%xmm7
6727pshufb EXPB0,%xmm7
6728
6729# qhasm: shuffle bytes of xmm2 by EXPB0
6730# asm 1: pshufb EXPB0,<xmm2=int6464#3
6731# asm 2: pshufb EXPB0,<xmm2=%xmm2
6732pshufb EXPB0,%xmm2
6733
6734# qhasm: shuffle bytes of xmm5 by EXPB0
6735# asm 1: pshufb EXPB0,<xmm5=int6464#6
6736# asm 2: pshufb EXPB0,<xmm5=%xmm5
6737pshufb EXPB0,%xmm5
6738
6739# qhasm: xmm8 = *(int128 *)(c + 512)
6740# asm 1: movdqa 512(<c=int64#1),>xmm8=int6464#9
6741# asm 2: movdqa 512(<c=%rdi),>xmm8=%xmm8
6742movdqa 512(%rdi),%xmm8
6743
6744# qhasm: xmm9 = *(int128 *)(c + 528)
6745# asm 1: movdqa 528(<c=int64#1),>xmm9=int6464#10
6746# asm 2: movdqa 528(<c=%rdi),>xmm9=%xmm9
6747movdqa 528(%rdi),%xmm9
6748
6749# qhasm: xmm10 = *(int128 *)(c + 544)
6750# asm 1: movdqa 544(<c=int64#1),>xmm10=int6464#11
6751# asm 2: movdqa 544(<c=%rdi),>xmm10=%xmm10
6752movdqa 544(%rdi),%xmm10
6753
6754# qhasm: xmm11 = *(int128 *)(c + 560)
6755# asm 1: movdqa 560(<c=int64#1),>xmm11=int6464#12
6756# asm 2: movdqa 560(<c=%rdi),>xmm11=%xmm11
6757movdqa 560(%rdi),%xmm11
6758
6759# qhasm: xmm12 = *(int128 *)(c + 576)
6760# asm 1: movdqa 576(<c=int64#1),>xmm12=int6464#13
6761# asm 2: movdqa 576(<c=%rdi),>xmm12=%xmm12
6762movdqa 576(%rdi),%xmm12
6763
6764# qhasm: xmm13 = *(int128 *)(c + 592)
6765# asm 1: movdqa 592(<c=int64#1),>xmm13=int6464#14
6766# asm 2: movdqa 592(<c=%rdi),>xmm13=%xmm13
6767movdqa 592(%rdi),%xmm13
6768
6769# qhasm: xmm14 = *(int128 *)(c + 608)
6770# asm 1: movdqa 608(<c=int64#1),>xmm14=int6464#15
6771# asm 2: movdqa 608(<c=%rdi),>xmm14=%xmm14
6772movdqa 608(%rdi),%xmm14
6773
6774# qhasm: xmm15 = *(int128 *)(c + 624)
6775# asm 1: movdqa 624(<c=int64#1),>xmm15=int6464#16
6776# asm 2: movdqa 624(<c=%rdi),>xmm15=%xmm15
6777movdqa 624(%rdi),%xmm15
6778
6779# qhasm: xmm8 ^= ONE
6780# asm 1: pxor ONE,<xmm8=int6464#9
6781# asm 2: pxor ONE,<xmm8=%xmm8
6782pxor ONE,%xmm8
6783
6784# qhasm: xmm9 ^= ONE
6785# asm 1: pxor ONE,<xmm9=int6464#10
6786# asm 2: pxor ONE,<xmm9=%xmm9
6787pxor ONE,%xmm9
6788
6789# qhasm: xmm13 ^= ONE
6790# asm 1: pxor ONE,<xmm13=int6464#14
6791# asm 2: pxor ONE,<xmm13=%xmm13
6792pxor ONE,%xmm13
6793
6794# qhasm: xmm14 ^= ONE
6795# asm 1: pxor ONE,<xmm14=int6464#15
6796# asm 2: pxor ONE,<xmm14=%xmm14
6797pxor ONE,%xmm14
6798
6799# qhasm: xmm0 ^= xmm8
6800# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
6801# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
6802pxor %xmm8,%xmm0
6803
6804# qhasm: xmm1 ^= xmm9
6805# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
6806# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
6807pxor %xmm9,%xmm1
6808
6809# qhasm: xmm4 ^= xmm10
6810# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
6811# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
6812pxor %xmm10,%xmm4
6813
6814# qhasm: xmm6 ^= xmm11
6815# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
6816# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
6817pxor %xmm11,%xmm6
6818
6819# qhasm: xmm3 ^= xmm12
6820# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
6821# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
6822pxor %xmm12,%xmm3
6823
6824# qhasm: xmm7 ^= xmm13
6825# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
6826# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
6827pxor %xmm13,%xmm7
6828
6829# qhasm: xmm2 ^= xmm14
6830# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
6831# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
6832pxor %xmm14,%xmm2
6833
6834# qhasm: xmm5 ^= xmm15
6835# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
6836# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
6837pxor %xmm15,%xmm5
6838
6839# qhasm: uint32323232 xmm8 >>= 8
6840# asm 1: psrld $8,<xmm8=int6464#9
6841# asm 2: psrld $8,<xmm8=%xmm8
6842psrld $8,%xmm8
6843
6844# qhasm: uint32323232 xmm9 >>= 8
6845# asm 1: psrld $8,<xmm9=int6464#10
6846# asm 2: psrld $8,<xmm9=%xmm9
6847psrld $8,%xmm9
6848
6849# qhasm: uint32323232 xmm10 >>= 8
6850# asm 1: psrld $8,<xmm10=int6464#11
6851# asm 2: psrld $8,<xmm10=%xmm10
6852psrld $8,%xmm10
6853
6854# qhasm: uint32323232 xmm11 >>= 8
6855# asm 1: psrld $8,<xmm11=int6464#12
6856# asm 2: psrld $8,<xmm11=%xmm11
6857psrld $8,%xmm11
6858
6859# qhasm: uint32323232 xmm12 >>= 8
6860# asm 1: psrld $8,<xmm12=int6464#13
6861# asm 2: psrld $8,<xmm12=%xmm12
6862psrld $8,%xmm12
6863
6864# qhasm: uint32323232 xmm13 >>= 8
6865# asm 1: psrld $8,<xmm13=int6464#14
6866# asm 2: psrld $8,<xmm13=%xmm13
6867psrld $8,%xmm13
6868
6869# qhasm: uint32323232 xmm14 >>= 8
6870# asm 1: psrld $8,<xmm14=int6464#15
6871# asm 2: psrld $8,<xmm14=%xmm14
6872psrld $8,%xmm14
6873
6874# qhasm: uint32323232 xmm15 >>= 8
6875# asm 1: psrld $8,<xmm15=int6464#16
6876# asm 2: psrld $8,<xmm15=%xmm15
6877psrld $8,%xmm15
6878
6879# qhasm: xmm0 ^= xmm8
6880# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
6881# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
6882pxor %xmm8,%xmm0
6883
6884# qhasm: xmm1 ^= xmm9
6885# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
6886# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
6887pxor %xmm9,%xmm1
6888
6889# qhasm: xmm4 ^= xmm10
6890# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
6891# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
6892pxor %xmm10,%xmm4
6893
6894# qhasm: xmm6 ^= xmm11
6895# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
6896# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
6897pxor %xmm11,%xmm6
6898
6899# qhasm: xmm3 ^= xmm12
6900# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
6901# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
6902pxor %xmm12,%xmm3
6903
6904# qhasm: xmm7 ^= xmm13
6905# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
6906# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
6907pxor %xmm13,%xmm7
6908
6909# qhasm: xmm2 ^= xmm14
6910# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
6911# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
6912pxor %xmm14,%xmm2
6913
6914# qhasm: xmm5 ^= xmm15
6915# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
6916# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
6917pxor %xmm15,%xmm5
6918
6919# qhasm: uint32323232 xmm8 >>= 8
6920# asm 1: psrld $8,<xmm8=int6464#9
6921# asm 2: psrld $8,<xmm8=%xmm8
6922psrld $8,%xmm8
6923
6924# qhasm: uint32323232 xmm9 >>= 8
6925# asm 1: psrld $8,<xmm9=int6464#10
6926# asm 2: psrld $8,<xmm9=%xmm9
6927psrld $8,%xmm9
6928
6929# qhasm: uint32323232 xmm10 >>= 8
6930# asm 1: psrld $8,<xmm10=int6464#11
6931# asm 2: psrld $8,<xmm10=%xmm10
6932psrld $8,%xmm10
6933
6934# qhasm: uint32323232 xmm11 >>= 8
6935# asm 1: psrld $8,<xmm11=int6464#12
6936# asm 2: psrld $8,<xmm11=%xmm11
6937psrld $8,%xmm11
6938
6939# qhasm: uint32323232 xmm12 >>= 8
6940# asm 1: psrld $8,<xmm12=int6464#13
6941# asm 2: psrld $8,<xmm12=%xmm12
6942psrld $8,%xmm12
6943
6944# qhasm: uint32323232 xmm13 >>= 8
6945# asm 1: psrld $8,<xmm13=int6464#14
6946# asm 2: psrld $8,<xmm13=%xmm13
6947psrld $8,%xmm13
6948
6949# qhasm: uint32323232 xmm14 >>= 8
6950# asm 1: psrld $8,<xmm14=int6464#15
6951# asm 2: psrld $8,<xmm14=%xmm14
6952psrld $8,%xmm14
6953
6954# qhasm: uint32323232 xmm15 >>= 8
6955# asm 1: psrld $8,<xmm15=int6464#16
6956# asm 2: psrld $8,<xmm15=%xmm15
6957psrld $8,%xmm15
6958
6959# qhasm: xmm0 ^= xmm8
6960# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
6961# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
6962pxor %xmm8,%xmm0
6963
6964# qhasm: xmm1 ^= xmm9
6965# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
6966# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
6967pxor %xmm9,%xmm1
6968
6969# qhasm: xmm4 ^= xmm10
6970# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
6971# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
6972pxor %xmm10,%xmm4
6973
6974# qhasm: xmm6 ^= xmm11
6975# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
6976# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
6977pxor %xmm11,%xmm6
6978
6979# qhasm: xmm3 ^= xmm12
6980# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
6981# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
6982pxor %xmm12,%xmm3
6983
6984# qhasm: xmm7 ^= xmm13
6985# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
6986# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
6987pxor %xmm13,%xmm7
6988
6989# qhasm: xmm2 ^= xmm14
6990# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
6991# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
6992pxor %xmm14,%xmm2
6993
6994# qhasm: xmm5 ^= xmm15
6995# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
6996# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
6997pxor %xmm15,%xmm5
6998
6999# qhasm: uint32323232 xmm8 >>= 8
7000# asm 1: psrld $8,<xmm8=int6464#9
7001# asm 2: psrld $8,<xmm8=%xmm8
7002psrld $8,%xmm8
7003
7004# qhasm: uint32323232 xmm9 >>= 8
7005# asm 1: psrld $8,<xmm9=int6464#10
7006# asm 2: psrld $8,<xmm9=%xmm9
7007psrld $8,%xmm9
7008
7009# qhasm: uint32323232 xmm10 >>= 8
7010# asm 1: psrld $8,<xmm10=int6464#11
7011# asm 2: psrld $8,<xmm10=%xmm10
7012psrld $8,%xmm10
7013
7014# qhasm: uint32323232 xmm11 >>= 8
7015# asm 1: psrld $8,<xmm11=int6464#12
7016# asm 2: psrld $8,<xmm11=%xmm11
7017psrld $8,%xmm11
7018
7019# qhasm: uint32323232 xmm12 >>= 8
7020# asm 1: psrld $8,<xmm12=int6464#13
7021# asm 2: psrld $8,<xmm12=%xmm12
7022psrld $8,%xmm12
7023
7024# qhasm: uint32323232 xmm13 >>= 8
7025# asm 1: psrld $8,<xmm13=int6464#14
7026# asm 2: psrld $8,<xmm13=%xmm13
7027psrld $8,%xmm13
7028
7029# qhasm: uint32323232 xmm14 >>= 8
7030# asm 1: psrld $8,<xmm14=int6464#15
7031# asm 2: psrld $8,<xmm14=%xmm14
7032psrld $8,%xmm14
7033
7034# qhasm: uint32323232 xmm15 >>= 8
7035# asm 1: psrld $8,<xmm15=int6464#16
7036# asm 2: psrld $8,<xmm15=%xmm15
7037psrld $8,%xmm15
7038
7039# qhasm: xmm0 ^= xmm8
7040# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
7041# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
7042pxor %xmm8,%xmm0
7043
7044# qhasm: xmm1 ^= xmm9
7045# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
7046# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
7047pxor %xmm9,%xmm1
7048
7049# qhasm: xmm4 ^= xmm10
7050# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
7051# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
7052pxor %xmm10,%xmm4
7053
7054# qhasm: xmm6 ^= xmm11
7055# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
7056# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
7057pxor %xmm11,%xmm6
7058
7059# qhasm: xmm3 ^= xmm12
7060# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
7061# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
7062pxor %xmm12,%xmm3
7063
7064# qhasm: xmm7 ^= xmm13
7065# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
7066# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
7067pxor %xmm13,%xmm7
7068
7069# qhasm: xmm2 ^= xmm14
7070# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
7071# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
7072pxor %xmm14,%xmm2
7073
7074# qhasm: xmm5 ^= xmm15
7075# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
7076# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
7077pxor %xmm15,%xmm5
7078
7079# qhasm: *(int128 *)(c + 640) = xmm0
7080# asm 1: movdqa <xmm0=int6464#1,640(<c=int64#1)
7081# asm 2: movdqa <xmm0=%xmm0,640(<c=%rdi)
7082movdqa %xmm0,640(%rdi)
7083
7084# qhasm: *(int128 *)(c + 656) = xmm1
7085# asm 1: movdqa <xmm1=int6464#2,656(<c=int64#1)
7086# asm 2: movdqa <xmm1=%xmm1,656(<c=%rdi)
7087movdqa %xmm1,656(%rdi)
7088
7089# qhasm: *(int128 *)(c + 672) = xmm4
7090# asm 1: movdqa <xmm4=int6464#5,672(<c=int64#1)
7091# asm 2: movdqa <xmm4=%xmm4,672(<c=%rdi)
7092movdqa %xmm4,672(%rdi)
7093
7094# qhasm: *(int128 *)(c + 688) = xmm6
7095# asm 1: movdqa <xmm6=int6464#7,688(<c=int64#1)
7096# asm 2: movdqa <xmm6=%xmm6,688(<c=%rdi)
7097movdqa %xmm6,688(%rdi)
7098
7099# qhasm: *(int128 *)(c + 704) = xmm3
7100# asm 1: movdqa <xmm3=int6464#4,704(<c=int64#1)
7101# asm 2: movdqa <xmm3=%xmm3,704(<c=%rdi)
7102movdqa %xmm3,704(%rdi)
7103
7104# qhasm: *(int128 *)(c + 720) = xmm7
7105# asm 1: movdqa <xmm7=int6464#8,720(<c=int64#1)
7106# asm 2: movdqa <xmm7=%xmm7,720(<c=%rdi)
7107movdqa %xmm7,720(%rdi)
7108
7109# qhasm: *(int128 *)(c + 736) = xmm2
7110# asm 1: movdqa <xmm2=int6464#3,736(<c=int64#1)
7111# asm 2: movdqa <xmm2=%xmm2,736(<c=%rdi)
7112movdqa %xmm2,736(%rdi)
7113
7114# qhasm: *(int128 *)(c + 752) = xmm5
7115# asm 1: movdqa <xmm5=int6464#6,752(<c=int64#1)
7116# asm 2: movdqa <xmm5=%xmm5,752(<c=%rdi)
7117movdqa %xmm5,752(%rdi)
7118
7119# qhasm: xmm0 ^= ONE
7120# asm 1: pxor ONE,<xmm0=int6464#1
7121# asm 2: pxor ONE,<xmm0=%xmm0
7122pxor ONE,%xmm0
7123
7124# qhasm: xmm1 ^= ONE
7125# asm 1: pxor ONE,<xmm1=int6464#2
7126# asm 2: pxor ONE,<xmm1=%xmm1
7127pxor ONE,%xmm1
7128
7129# qhasm: xmm7 ^= ONE
7130# asm 1: pxor ONE,<xmm7=int6464#8
7131# asm 2: pxor ONE,<xmm7=%xmm7
7132pxor ONE,%xmm7
7133
7134# qhasm: xmm2 ^= ONE
7135# asm 1: pxor ONE,<xmm2=int6464#3
7136# asm 2: pxor ONE,<xmm2=%xmm2
7137pxor ONE,%xmm2
7138
7139# qhasm: shuffle bytes of xmm0 by ROTB
7140# asm 1: pshufb ROTB,<xmm0=int6464#1
7141# asm 2: pshufb ROTB,<xmm0=%xmm0
7142pshufb ROTB,%xmm0
7143
7144# qhasm: shuffle bytes of xmm1 by ROTB
7145# asm 1: pshufb ROTB,<xmm1=int6464#2
7146# asm 2: pshufb ROTB,<xmm1=%xmm1
7147pshufb ROTB,%xmm1
7148
7149# qhasm: shuffle bytes of xmm4 by ROTB
7150# asm 1: pshufb ROTB,<xmm4=int6464#5
7151# asm 2: pshufb ROTB,<xmm4=%xmm4
7152pshufb ROTB,%xmm4
7153
7154# qhasm: shuffle bytes of xmm6 by ROTB
7155# asm 1: pshufb ROTB,<xmm6=int6464#7
7156# asm 2: pshufb ROTB,<xmm6=%xmm6
7157pshufb ROTB,%xmm6
7158
7159# qhasm: shuffle bytes of xmm3 by ROTB
7160# asm 1: pshufb ROTB,<xmm3=int6464#4
7161# asm 2: pshufb ROTB,<xmm3=%xmm3
7162pshufb ROTB,%xmm3
7163
7164# qhasm: shuffle bytes of xmm7 by ROTB
7165# asm 1: pshufb ROTB,<xmm7=int6464#8
7166# asm 2: pshufb ROTB,<xmm7=%xmm7
7167pshufb ROTB,%xmm7
7168
7169# qhasm: shuffle bytes of xmm2 by ROTB
7170# asm 1: pshufb ROTB,<xmm2=int6464#3
7171# asm 2: pshufb ROTB,<xmm2=%xmm2
7172pshufb ROTB,%xmm2
7173
7174# qhasm: shuffle bytes of xmm5 by ROTB
7175# asm 1: pshufb ROTB,<xmm5=int6464#6
7176# asm 2: pshufb ROTB,<xmm5=%xmm5
7177pshufb ROTB,%xmm5
7178
7179# qhasm: xmm7 ^= xmm2
7180# asm 1: pxor <xmm2=int6464#3,<xmm7=int6464#8
7181# asm 2: pxor <xmm2=%xmm2,<xmm7=%xmm7
7182pxor %xmm2,%xmm7
7183
7184# qhasm: xmm4 ^= xmm1
7185# asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5
7186# asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4
7187pxor %xmm1,%xmm4
7188
7189# qhasm: xmm7 ^= xmm0
7190# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
7191# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
7192pxor %xmm0,%xmm7
7193
7194# qhasm: xmm2 ^= xmm4
7195# asm 1: pxor <xmm4=int6464#5,<xmm2=int6464#3
7196# asm 2: pxor <xmm4=%xmm4,<xmm2=%xmm2
7197pxor %xmm4,%xmm2
7198
7199# qhasm: xmm6 ^= xmm0
7200# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
7201# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
7202pxor %xmm0,%xmm6
7203
7204# qhasm: xmm2 ^= xmm6
7205# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
7206# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
7207pxor %xmm6,%xmm2
7208
7209# qhasm: xmm6 ^= xmm5
7210# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
7211# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
7212pxor %xmm5,%xmm6
7213
7214# qhasm: xmm6 ^= xmm3
7215# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
7216# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
7217pxor %xmm3,%xmm6
7218
7219# qhasm: xmm5 ^= xmm7
7220# asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6
7221# asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5
7222pxor %xmm7,%xmm5
7223
7224# qhasm: xmm6 ^= xmm1
7225# asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7
7226# asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6
7227pxor %xmm1,%xmm6
7228
7229# qhasm: xmm3 ^= xmm7
7230# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
7231# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
7232pxor %xmm7,%xmm3
7233
7234# qhasm: xmm4 ^= xmm5
7235# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
7236# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
7237pxor %xmm5,%xmm4
7238
7239# qhasm: xmm1 ^= xmm7
7240# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
7241# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
7242pxor %xmm7,%xmm1
7243
7244# qhasm: xmm11 = xmm5
7245# asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9
7246# asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8
7247movdqa %xmm5,%xmm8
7248
7249# qhasm: xmm10 = xmm1
7250# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
7251# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
7252movdqa %xmm1,%xmm9
7253
7254# qhasm: xmm9 = xmm7
7255# asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11
7256# asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10
7257movdqa %xmm7,%xmm10
7258
7259# qhasm: xmm13 = xmm4
7260# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
7261# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
7262movdqa %xmm4,%xmm11
7263
7264# qhasm: xmm12 = xmm2
7265# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#13
7266# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm12
7267movdqa %xmm2,%xmm12
7268
7269# qhasm: xmm11 ^= xmm3
7270# asm 1: pxor <xmm3=int6464#4,<xmm11=int6464#9
7271# asm 2: pxor <xmm3=%xmm3,<xmm11=%xmm8
7272pxor %xmm3,%xmm8
7273
7274# qhasm: xmm10 ^= xmm4
7275# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#10
7276# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm9
7277pxor %xmm4,%xmm9
7278
7279# qhasm: xmm9 ^= xmm6
7280# asm 1: pxor <xmm6=int6464#7,<xmm9=int6464#11
7281# asm 2: pxor <xmm6=%xmm6,<xmm9=%xmm10
7282pxor %xmm6,%xmm10
7283
7284# qhasm: xmm13 ^= xmm3
7285# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#12
7286# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm11
7287pxor %xmm3,%xmm11
7288
7289# qhasm: xmm12 ^= xmm0
7290# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
7291# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
7292pxor %xmm0,%xmm12
7293
7294# qhasm: xmm14 = xmm11
7295# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
7296# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
7297movdqa %xmm8,%xmm13
7298
7299# qhasm: xmm8 = xmm10
7300# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
7301# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
7302movdqa %xmm9,%xmm14
7303
7304# qhasm: xmm15 = xmm11
7305# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
7306# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
7307movdqa %xmm8,%xmm15
7308
7309# qhasm: xmm10 |= xmm9
7310# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
7311# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
7312por %xmm10,%xmm9
7313
7314# qhasm: xmm11 |= xmm12
7315# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
7316# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
7317por %xmm12,%xmm8
7318
7319# qhasm: xmm15 ^= xmm8
7320# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
7321# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
7322pxor %xmm14,%xmm15
7323
7324# qhasm: xmm14 &= xmm12
7325# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
7326# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
7327pand %xmm12,%xmm13
7328
7329# qhasm: xmm8 &= xmm9
7330# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
7331# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
7332pand %xmm10,%xmm14
7333
7334# qhasm: xmm12 ^= xmm9
7335# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
7336# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
7337pxor %xmm10,%xmm12
7338
7339# qhasm: xmm15 &= xmm12
7340# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
7341# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
7342pand %xmm12,%xmm15
7343
7344# qhasm: xmm12 = xmm6
7345# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11
7346# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10
7347movdqa %xmm6,%xmm10
7348
7349# qhasm: xmm12 ^= xmm0
7350# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
7351# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
7352pxor %xmm0,%xmm10
7353
7354# qhasm: xmm13 &= xmm12
7355# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
7356# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
7357pand %xmm10,%xmm11
7358
7359# qhasm: xmm11 ^= xmm13
7360# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
7361# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
7362pxor %xmm11,%xmm8
7363
7364# qhasm: xmm10 ^= xmm13
7365# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
7366# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
7367pxor %xmm11,%xmm9
7368
7369# qhasm: xmm13 = xmm5
7370# asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11
7371# asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10
7372movdqa %xmm5,%xmm10
7373
7374# qhasm: xmm13 ^= xmm1
7375# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
7376# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
7377pxor %xmm1,%xmm10
7378
7379# qhasm: xmm12 = xmm7
7380# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12
7381# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11
7382movdqa %xmm7,%xmm11
7383
7384# qhasm: xmm9 = xmm13
7385# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
7386# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
7387movdqa %xmm10,%xmm12
7388
7389# qhasm: xmm12 ^= xmm2
7390# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#12
7391# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm11
7392pxor %xmm2,%xmm11
7393
7394# qhasm: xmm9 |= xmm12
7395# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
7396# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
7397por %xmm11,%xmm12
7398
7399# qhasm: xmm13 &= xmm12
7400# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
7401# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
7402pand %xmm11,%xmm10
7403
7404# qhasm: xmm8 ^= xmm13
7405# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
7406# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
7407pxor %xmm10,%xmm14
7408
7409# qhasm: xmm11 ^= xmm15
7410# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
7411# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
7412pxor %xmm15,%xmm8
7413
7414# qhasm: xmm10 ^= xmm14
7415# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
7416# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
7417pxor %xmm13,%xmm9
7418
7419# qhasm: xmm9 ^= xmm15
7420# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
7421# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
7422pxor %xmm15,%xmm12
7423
7424# qhasm: xmm8 ^= xmm14
7425# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
7426# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
7427pxor %xmm13,%xmm14
7428
7429# qhasm: xmm9 ^= xmm14
7430# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
7431# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
7432pxor %xmm13,%xmm12
7433
7434# qhasm: xmm12 = xmm4
7435# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11
7436# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10
7437movdqa %xmm4,%xmm10
7438
7439# qhasm: xmm13 = xmm3
7440# asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12
7441# asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11
7442movdqa %xmm3,%xmm11
7443
7444# qhasm: xmm14 = xmm1
7445# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
7446# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
7447movdqa %xmm1,%xmm13
7448
7449# qhasm: xmm15 = xmm5
7450# asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16
7451# asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15
7452movdqa %xmm5,%xmm15
7453
7454# qhasm: xmm12 &= xmm6
7455# asm 1: pand <xmm6=int6464#7,<xmm12=int6464#11
7456# asm 2: pand <xmm6=%xmm6,<xmm12=%xmm10
7457pand %xmm6,%xmm10
7458
7459# qhasm: xmm13 &= xmm0
7460# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
7461# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
7462pand %xmm0,%xmm11
7463
7464# qhasm: xmm14 &= xmm7
7465# asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14
7466# asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13
7467pand %xmm7,%xmm13
7468
7469# qhasm: xmm15 |= xmm2
7470# asm 1: por <xmm2=int6464#3,<xmm15=int6464#16
7471# asm 2: por <xmm2=%xmm2,<xmm15=%xmm15
7472por %xmm2,%xmm15
7473
7474# qhasm: xmm11 ^= xmm12
7475# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
7476# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
7477pxor %xmm10,%xmm8
7478
7479# qhasm: xmm10 ^= xmm13
7480# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
7481# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
7482pxor %xmm11,%xmm9
7483
7484# qhasm: xmm9 ^= xmm14
7485# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
7486# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
7487pxor %xmm13,%xmm12
7488
7489# qhasm: xmm8 ^= xmm15
7490# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
7491# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
7492pxor %xmm15,%xmm14
7493
7494# qhasm: xmm12 = xmm11
7495# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
7496# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
7497movdqa %xmm8,%xmm10
7498
7499# qhasm: xmm12 ^= xmm10
7500# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
7501# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
7502pxor %xmm9,%xmm10
7503
7504# qhasm: xmm11 &= xmm9
7505# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
7506# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
7507pand %xmm12,%xmm8
7508
7509# qhasm: xmm14 = xmm8
7510# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
7511# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
7512movdqa %xmm14,%xmm11
7513
7514# qhasm: xmm14 ^= xmm11
7515# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
7516# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
7517pxor %xmm8,%xmm11
7518
7519# qhasm: xmm15 = xmm12
7520# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
7521# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
7522movdqa %xmm10,%xmm13
7523
7524# qhasm: xmm15 &= xmm14
7525# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
7526# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
7527pand %xmm11,%xmm13
7528
7529# qhasm: xmm15 ^= xmm10
7530# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
7531# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
7532pxor %xmm9,%xmm13
7533
7534# qhasm: xmm13 = xmm9
7535# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
7536# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
7537movdqa %xmm12,%xmm15
7538
7539# qhasm: xmm13 ^= xmm8
7540# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
7541# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
7542pxor %xmm14,%xmm15
7543
7544# qhasm: xmm11 ^= xmm10
7545# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
7546# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
7547pxor %xmm9,%xmm8
7548
7549# qhasm: xmm13 &= xmm11
7550# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
7551# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
7552pand %xmm8,%xmm15
7553
7554# qhasm: xmm13 ^= xmm8
7555# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
7556# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
7557pxor %xmm14,%xmm15
7558
7559# qhasm: xmm9 ^= xmm13
7560# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
7561# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
7562pxor %xmm15,%xmm12
7563
7564# qhasm: xmm10 = xmm14
7565# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
7566# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
7567movdqa %xmm11,%xmm8
7568
7569# qhasm: xmm10 ^= xmm13
7570# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
7571# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
7572pxor %xmm15,%xmm8
7573
7574# qhasm: xmm10 &= xmm8
7575# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
7576# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
7577pand %xmm14,%xmm8
7578
7579# qhasm: xmm9 ^= xmm10
7580# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
7581# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
7582pxor %xmm8,%xmm12
7583
7584# qhasm: xmm14 ^= xmm10
7585# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
7586# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
7587pxor %xmm8,%xmm11
7588
7589# qhasm: xmm14 &= xmm15
7590# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
7591# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
7592pand %xmm13,%xmm11
7593
7594# qhasm: xmm14 ^= xmm12
7595# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
7596# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
7597pxor %xmm10,%xmm11
7598
7599# qhasm: xmm12 = xmm2
7600# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#9
7601# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm8
7602movdqa %xmm2,%xmm8
7603
7604# qhasm: xmm8 = xmm7
7605# asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10
7606# asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9
7607movdqa %xmm7,%xmm9
7608
7609# qhasm: xmm10 = xmm15
7610# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
7611# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
7612movdqa %xmm13,%xmm10
7613
7614# qhasm: xmm10 ^= xmm14
7615# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
7616# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
7617pxor %xmm11,%xmm10
7618
7619# qhasm: xmm10 &= xmm2
7620# asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11
7621# asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10
7622pand %xmm2,%xmm10
7623
7624# qhasm: xmm2 ^= xmm7
7625# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
7626# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
7627pxor %xmm7,%xmm2
7628
7629# qhasm: xmm2 &= xmm14
7630# asm 1: pand <xmm14=int6464#12,<xmm2=int6464#3
7631# asm 2: pand <xmm14=%xmm11,<xmm2=%xmm2
7632pand %xmm11,%xmm2
7633
7634# qhasm: xmm7 &= xmm15
7635# asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8
7636# asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7
7637pand %xmm13,%xmm7
7638
7639# qhasm: xmm2 ^= xmm7
7640# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
7641# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
7642pxor %xmm7,%xmm2
7643
7644# qhasm: xmm7 ^= xmm10
7645# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
7646# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
7647pxor %xmm10,%xmm7
7648
7649# qhasm: xmm12 ^= xmm0
7650# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
7651# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
7652pxor %xmm0,%xmm8
7653
7654# qhasm: xmm8 ^= xmm6
7655# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10
7656# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9
7657pxor %xmm6,%xmm9
7658
7659# qhasm: xmm15 ^= xmm13
7660# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
7661# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
7662pxor %xmm15,%xmm13
7663
7664# qhasm: xmm14 ^= xmm9
7665# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
7666# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
7667pxor %xmm12,%xmm11
7668
7669# qhasm: xmm11 = xmm15
7670# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
7671# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
7672movdqa %xmm13,%xmm10
7673
7674# qhasm: xmm11 ^= xmm14
7675# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
7676# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
7677pxor %xmm11,%xmm10
7678
7679# qhasm: xmm11 &= xmm12
7680# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
7681# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
7682pand %xmm8,%xmm10
7683
7684# qhasm: xmm12 ^= xmm8
7685# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
7686# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
7687pxor %xmm9,%xmm8
7688
7689# qhasm: xmm12 &= xmm14
7690# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
7691# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
7692pand %xmm11,%xmm8
7693
7694# qhasm: xmm8 &= xmm15
7695# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
7696# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
7697pand %xmm13,%xmm9
7698
7699# qhasm: xmm8 ^= xmm12
7700# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
7701# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
7702pxor %xmm8,%xmm9
7703
7704# qhasm: xmm12 ^= xmm11
7705# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
7706# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
7707pxor %xmm10,%xmm8
7708
7709# qhasm: xmm10 = xmm13
7710# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
7711# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
7712movdqa %xmm15,%xmm10
7713
7714# qhasm: xmm10 ^= xmm9
7715# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
7716# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
7717pxor %xmm12,%xmm10
7718
7719# qhasm: xmm10 &= xmm0
7720# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
7721# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
7722pand %xmm0,%xmm10
7723
7724# qhasm: xmm0 ^= xmm6
7725# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
7726# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
7727pxor %xmm6,%xmm0
7728
7729# qhasm: xmm0 &= xmm9
7730# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
7731# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
7732pand %xmm12,%xmm0
7733
7734# qhasm: xmm6 &= xmm13
7735# asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7
7736# asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6
7737pand %xmm15,%xmm6
7738
7739# qhasm: xmm0 ^= xmm6
7740# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
7741# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
7742pxor %xmm6,%xmm0
7743
7744# qhasm: xmm6 ^= xmm10
7745# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
7746# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
7747pxor %xmm10,%xmm6
7748
7749# qhasm: xmm2 ^= xmm12
7750# asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3
7751# asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2
7752pxor %xmm8,%xmm2
7753
7754# qhasm: xmm0 ^= xmm12
7755# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
7756# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
7757pxor %xmm8,%xmm0
7758
7759# qhasm: xmm7 ^= xmm8
7760# asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8
7761# asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7
7762pxor %xmm9,%xmm7
7763
7764# qhasm: xmm6 ^= xmm8
7765# asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7
7766# asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6
7767pxor %xmm9,%xmm6
7768
7769# qhasm: xmm12 = xmm5
7770# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9
7771# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8
7772movdqa %xmm5,%xmm8
7773
7774# qhasm: xmm8 = xmm1
7775# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
7776# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
7777movdqa %xmm1,%xmm9
7778
7779# qhasm: xmm12 ^= xmm3
7780# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#9
7781# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm8
7782pxor %xmm3,%xmm8
7783
7784# qhasm: xmm8 ^= xmm4
7785# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10
7786# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9
7787pxor %xmm4,%xmm9
7788
7789# qhasm: xmm11 = xmm15
7790# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
7791# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
7792movdqa %xmm13,%xmm10
7793
7794# qhasm: xmm11 ^= xmm14
7795# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
7796# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
7797pxor %xmm11,%xmm10
7798
7799# qhasm: xmm11 &= xmm12
7800# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
7801# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
7802pand %xmm8,%xmm10
7803
7804# qhasm: xmm12 ^= xmm8
7805# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
7806# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
7807pxor %xmm9,%xmm8
7808
7809# qhasm: xmm12 &= xmm14
7810# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
7811# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
7812pand %xmm11,%xmm8
7813
7814# qhasm: xmm8 &= xmm15
7815# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
7816# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
7817pand %xmm13,%xmm9
7818
7819# qhasm: xmm8 ^= xmm12
7820# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
7821# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
7822pxor %xmm8,%xmm9
7823
7824# qhasm: xmm12 ^= xmm11
7825# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
7826# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
7827pxor %xmm10,%xmm8
7828
7829# qhasm: xmm10 = xmm13
7830# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
7831# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
7832movdqa %xmm15,%xmm10
7833
7834# qhasm: xmm10 ^= xmm9
7835# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
7836# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
7837pxor %xmm12,%xmm10
7838
7839# qhasm: xmm10 &= xmm3
7840# asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11
7841# asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10
7842pand %xmm3,%xmm10
7843
7844# qhasm: xmm3 ^= xmm4
7845# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
7846# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
7847pxor %xmm4,%xmm3
7848
7849# qhasm: xmm3 &= xmm9
7850# asm 1: pand <xmm9=int6464#13,<xmm3=int6464#4
7851# asm 2: pand <xmm9=%xmm12,<xmm3=%xmm3
7852pand %xmm12,%xmm3
7853
7854# qhasm: xmm4 &= xmm13
7855# asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5
7856# asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4
7857pand %xmm15,%xmm4
7858
7859# qhasm: xmm3 ^= xmm4
7860# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
7861# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
7862pxor %xmm4,%xmm3
7863
7864# qhasm: xmm4 ^= xmm10
7865# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
7866# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
7867pxor %xmm10,%xmm4
7868
7869# qhasm: xmm15 ^= xmm13
7870# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
7871# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
7872pxor %xmm15,%xmm13
7873
7874# qhasm: xmm14 ^= xmm9
7875# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
7876# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
7877pxor %xmm12,%xmm11
7878
7879# qhasm: xmm11 = xmm15
7880# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
7881# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
7882movdqa %xmm13,%xmm10
7883
7884# qhasm: xmm11 ^= xmm14
7885# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
7886# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
7887pxor %xmm11,%xmm10
7888
7889# qhasm: xmm11 &= xmm5
7890# asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11
7891# asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10
7892pand %xmm5,%xmm10
7893
7894# qhasm: xmm5 ^= xmm1
7895# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
7896# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
7897pxor %xmm1,%xmm5
7898
7899# qhasm: xmm5 &= xmm14
7900# asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6
7901# asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5
7902pand %xmm11,%xmm5
7903
7904# qhasm: xmm1 &= xmm15
7905# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
7906# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
7907pand %xmm13,%xmm1
7908
7909# qhasm: xmm5 ^= xmm1
7910# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
7911# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
7912pxor %xmm1,%xmm5
7913
7914# qhasm: xmm1 ^= xmm11
7915# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
7916# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
7917pxor %xmm10,%xmm1
7918
7919# qhasm: xmm5 ^= xmm12
7920# asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6
7921# asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5
7922pxor %xmm8,%xmm5
7923
7924# qhasm: xmm3 ^= xmm12
7925# asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4
7926# asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3
7927pxor %xmm8,%xmm3
7928
7929# qhasm: xmm1 ^= xmm8
7930# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
7931# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
7932pxor %xmm9,%xmm1
7933
7934# qhasm: xmm4 ^= xmm8
7935# asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5
7936# asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4
7937pxor %xmm9,%xmm4
7938
7939# qhasm: xmm5 ^= xmm0
7940# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
7941# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
7942pxor %xmm0,%xmm5
7943
7944# qhasm: xmm1 ^= xmm2
7945# asm 1: pxor <xmm2=int6464#3,<xmm1=int6464#2
7946# asm 2: pxor <xmm2=%xmm2,<xmm1=%xmm1
7947pxor %xmm2,%xmm1
7948
7949# qhasm: xmm3 ^= xmm5
7950# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
7951# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
7952pxor %xmm5,%xmm3
7953
7954# qhasm: xmm2 ^= xmm0
7955# asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3
7956# asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2
7957pxor %xmm0,%xmm2
7958
7959# qhasm: xmm0 ^= xmm1
7960# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
7961# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
7962pxor %xmm1,%xmm0
7963
7964# qhasm: xmm1 ^= xmm7
7965# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
7966# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
7967pxor %xmm7,%xmm1
7968
7969# qhasm: xmm7 ^= xmm4
7970# asm 1: pxor <xmm4=int6464#5,<xmm7=int6464#8
7971# asm 2: pxor <xmm4=%xmm4,<xmm7=%xmm7
7972pxor %xmm4,%xmm7
7973
7974# qhasm: xmm3 ^= xmm7
7975# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
7976# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
7977pxor %xmm7,%xmm3
7978
7979# qhasm: xmm4 ^= xmm6
7980# asm 1: pxor <xmm6=int6464#7,<xmm4=int6464#5
7981# asm 2: pxor <xmm6=%xmm6,<xmm4=%xmm4
7982pxor %xmm6,%xmm4
7983
7984# qhasm: xmm6 ^= xmm7
7985# asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7
7986# asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6
7987pxor %xmm7,%xmm6
7988
7989# qhasm: xmm2 ^= xmm6
7990# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
7991# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
7992pxor %xmm6,%xmm2
7993
7994# qhasm: xmm5 ^= RCON
7995# asm 1: pxor RCON,<xmm5=int6464#6
7996# asm 2: pxor RCON,<xmm5=%xmm5
7997pxor RCON,%xmm5
7998
7999# qhasm: shuffle bytes of xmm0 by EXPB0
8000# asm 1: pshufb EXPB0,<xmm0=int6464#1
8001# asm 2: pshufb EXPB0,<xmm0=%xmm0
8002pshufb EXPB0,%xmm0
8003
8004# qhasm: shuffle bytes of xmm1 by EXPB0
8005# asm 1: pshufb EXPB0,<xmm1=int6464#2
8006# asm 2: pshufb EXPB0,<xmm1=%xmm1
8007pshufb EXPB0,%xmm1
8008
8009# qhasm: shuffle bytes of xmm3 by EXPB0
8010# asm 1: pshufb EXPB0,<xmm3=int6464#4
8011# asm 2: pshufb EXPB0,<xmm3=%xmm3
8012pshufb EXPB0,%xmm3
8013
8014# qhasm: shuffle bytes of xmm2 by EXPB0
8015# asm 1: pshufb EXPB0,<xmm2=int6464#3
8016# asm 2: pshufb EXPB0,<xmm2=%xmm2
8017pshufb EXPB0,%xmm2
8018
8019# qhasm: shuffle bytes of xmm6 by EXPB0
8020# asm 1: pshufb EXPB0,<xmm6=int6464#7
8021# asm 2: pshufb EXPB0,<xmm6=%xmm6
8022pshufb EXPB0,%xmm6
8023
8024# qhasm: shuffle bytes of xmm5 by EXPB0
8025# asm 1: pshufb EXPB0,<xmm5=int6464#6
8026# asm 2: pshufb EXPB0,<xmm5=%xmm5
8027pshufb EXPB0,%xmm5
8028
8029# qhasm: shuffle bytes of xmm4 by EXPB0
8030# asm 1: pshufb EXPB0,<xmm4=int6464#5
8031# asm 2: pshufb EXPB0,<xmm4=%xmm4
8032pshufb EXPB0,%xmm4
8033
8034# qhasm: shuffle bytes of xmm7 by EXPB0
8035# asm 1: pshufb EXPB0,<xmm7=int6464#8
8036# asm 2: pshufb EXPB0,<xmm7=%xmm7
8037pshufb EXPB0,%xmm7
8038
8039# qhasm: xmm8 = *(int128 *)(c + 640)
8040# asm 1: movdqa 640(<c=int64#1),>xmm8=int6464#9
8041# asm 2: movdqa 640(<c=%rdi),>xmm8=%xmm8
8042movdqa 640(%rdi),%xmm8
8043
8044# qhasm: xmm9 = *(int128 *)(c + 656)
8045# asm 1: movdqa 656(<c=int64#1),>xmm9=int6464#10
8046# asm 2: movdqa 656(<c=%rdi),>xmm9=%xmm9
8047movdqa 656(%rdi),%xmm9
8048
8049# qhasm: xmm10 = *(int128 *)(c + 672)
8050# asm 1: movdqa 672(<c=int64#1),>xmm10=int6464#11
8051# asm 2: movdqa 672(<c=%rdi),>xmm10=%xmm10
8052movdqa 672(%rdi),%xmm10
8053
8054# qhasm: xmm11 = *(int128 *)(c + 688)
8055# asm 1: movdqa 688(<c=int64#1),>xmm11=int6464#12
8056# asm 2: movdqa 688(<c=%rdi),>xmm11=%xmm11
8057movdqa 688(%rdi),%xmm11
8058
8059# qhasm: xmm12 = *(int128 *)(c + 704)
8060# asm 1: movdqa 704(<c=int64#1),>xmm12=int6464#13
8061# asm 2: movdqa 704(<c=%rdi),>xmm12=%xmm12
8062movdqa 704(%rdi),%xmm12
8063
8064# qhasm: xmm13 = *(int128 *)(c + 720)
8065# asm 1: movdqa 720(<c=int64#1),>xmm13=int6464#14
8066# asm 2: movdqa 720(<c=%rdi),>xmm13=%xmm13
8067movdqa 720(%rdi),%xmm13
8068
8069# qhasm: xmm14 = *(int128 *)(c + 736)
8070# asm 1: movdqa 736(<c=int64#1),>xmm14=int6464#15
8071# asm 2: movdqa 736(<c=%rdi),>xmm14=%xmm14
8072movdqa 736(%rdi),%xmm14
8073
8074# qhasm: xmm15 = *(int128 *)(c + 752)
8075# asm 1: movdqa 752(<c=int64#1),>xmm15=int6464#16
8076# asm 2: movdqa 752(<c=%rdi),>xmm15=%xmm15
8077movdqa 752(%rdi),%xmm15
8078
8079# qhasm: xmm8 ^= ONE
8080# asm 1: pxor ONE,<xmm8=int6464#9
8081# asm 2: pxor ONE,<xmm8=%xmm8
8082pxor ONE,%xmm8
8083
8084# qhasm: xmm9 ^= ONE
8085# asm 1: pxor ONE,<xmm9=int6464#10
8086# asm 2: pxor ONE,<xmm9=%xmm9
8087pxor ONE,%xmm9
8088
8089# qhasm: xmm13 ^= ONE
8090# asm 1: pxor ONE,<xmm13=int6464#14
8091# asm 2: pxor ONE,<xmm13=%xmm13
8092pxor ONE,%xmm13
8093
8094# qhasm: xmm14 ^= ONE
8095# asm 1: pxor ONE,<xmm14=int6464#15
8096# asm 2: pxor ONE,<xmm14=%xmm14
8097pxor ONE,%xmm14
8098
8099# qhasm: xmm0 ^= xmm8
8100# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
8101# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
8102pxor %xmm8,%xmm0
8103
8104# qhasm: xmm1 ^= xmm9
8105# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
8106# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
8107pxor %xmm9,%xmm1
8108
8109# qhasm: xmm3 ^= xmm10
8110# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
8111# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
8112pxor %xmm10,%xmm3
8113
8114# qhasm: xmm2 ^= xmm11
8115# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
8116# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
8117pxor %xmm11,%xmm2
8118
8119# qhasm: xmm6 ^= xmm12
8120# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
8121# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
8122pxor %xmm12,%xmm6
8123
8124# qhasm: xmm5 ^= xmm13
8125# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
8126# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
8127pxor %xmm13,%xmm5
8128
8129# qhasm: xmm4 ^= xmm14
8130# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
8131# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
8132pxor %xmm14,%xmm4
8133
8134# qhasm: xmm7 ^= xmm15
8135# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
8136# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
8137pxor %xmm15,%xmm7
8138
8139# qhasm: uint32323232 xmm8 >>= 8
8140# asm 1: psrld $8,<xmm8=int6464#9
8141# asm 2: psrld $8,<xmm8=%xmm8
8142psrld $8,%xmm8
8143
8144# qhasm: uint32323232 xmm9 >>= 8
8145# asm 1: psrld $8,<xmm9=int6464#10
8146# asm 2: psrld $8,<xmm9=%xmm9
8147psrld $8,%xmm9
8148
8149# qhasm: uint32323232 xmm10 >>= 8
8150# asm 1: psrld $8,<xmm10=int6464#11
8151# asm 2: psrld $8,<xmm10=%xmm10
8152psrld $8,%xmm10
8153
8154# qhasm: uint32323232 xmm11 >>= 8
8155# asm 1: psrld $8,<xmm11=int6464#12
8156# asm 2: psrld $8,<xmm11=%xmm11
8157psrld $8,%xmm11
8158
8159# qhasm: uint32323232 xmm12 >>= 8
8160# asm 1: psrld $8,<xmm12=int6464#13
8161# asm 2: psrld $8,<xmm12=%xmm12
8162psrld $8,%xmm12
8163
8164# qhasm: uint32323232 xmm13 >>= 8
8165# asm 1: psrld $8,<xmm13=int6464#14
8166# asm 2: psrld $8,<xmm13=%xmm13
8167psrld $8,%xmm13
8168
8169# qhasm: uint32323232 xmm14 >>= 8
8170# asm 1: psrld $8,<xmm14=int6464#15
8171# asm 2: psrld $8,<xmm14=%xmm14
8172psrld $8,%xmm14
8173
8174# qhasm: uint32323232 xmm15 >>= 8
8175# asm 1: psrld $8,<xmm15=int6464#16
8176# asm 2: psrld $8,<xmm15=%xmm15
8177psrld $8,%xmm15
8178
8179# qhasm: xmm0 ^= xmm8
8180# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
8181# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
8182pxor %xmm8,%xmm0
8183
8184# qhasm: xmm1 ^= xmm9
8185# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
8186# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
8187pxor %xmm9,%xmm1
8188
8189# qhasm: xmm3 ^= xmm10
8190# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
8191# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
8192pxor %xmm10,%xmm3
8193
8194# qhasm: xmm2 ^= xmm11
8195# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
8196# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
8197pxor %xmm11,%xmm2
8198
8199# qhasm: xmm6 ^= xmm12
8200# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
8201# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
8202pxor %xmm12,%xmm6
8203
8204# qhasm: xmm5 ^= xmm13
8205# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
8206# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
8207pxor %xmm13,%xmm5
8208
8209# qhasm: xmm4 ^= xmm14
8210# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
8211# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
8212pxor %xmm14,%xmm4
8213
8214# qhasm: xmm7 ^= xmm15
8215# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
8216# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
8217pxor %xmm15,%xmm7
8218
8219# qhasm: uint32323232 xmm8 >>= 8
8220# asm 1: psrld $8,<xmm8=int6464#9
8221# asm 2: psrld $8,<xmm8=%xmm8
8222psrld $8,%xmm8
8223
8224# qhasm: uint32323232 xmm9 >>= 8
8225# asm 1: psrld $8,<xmm9=int6464#10
8226# asm 2: psrld $8,<xmm9=%xmm9
8227psrld $8,%xmm9
8228
8229# qhasm: uint32323232 xmm10 >>= 8
8230# asm 1: psrld $8,<xmm10=int6464#11
8231# asm 2: psrld $8,<xmm10=%xmm10
8232psrld $8,%xmm10
8233
8234# qhasm: uint32323232 xmm11 >>= 8
8235# asm 1: psrld $8,<xmm11=int6464#12
8236# asm 2: psrld $8,<xmm11=%xmm11
8237psrld $8,%xmm11
8238
8239# qhasm: uint32323232 xmm12 >>= 8
8240# asm 1: psrld $8,<xmm12=int6464#13
8241# asm 2: psrld $8,<xmm12=%xmm12
8242psrld $8,%xmm12
8243
8244# qhasm: uint32323232 xmm13 >>= 8
8245# asm 1: psrld $8,<xmm13=int6464#14
8246# asm 2: psrld $8,<xmm13=%xmm13
8247psrld $8,%xmm13
8248
8249# qhasm: uint32323232 xmm14 >>= 8
8250# asm 1: psrld $8,<xmm14=int6464#15
8251# asm 2: psrld $8,<xmm14=%xmm14
8252psrld $8,%xmm14
8253
8254# qhasm: uint32323232 xmm15 >>= 8
8255# asm 1: psrld $8,<xmm15=int6464#16
8256# asm 2: psrld $8,<xmm15=%xmm15
8257psrld $8,%xmm15
8258
8259# qhasm: xmm0 ^= xmm8
8260# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
8261# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
8262pxor %xmm8,%xmm0
8263
8264# qhasm: xmm1 ^= xmm9
8265# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
8266# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
8267pxor %xmm9,%xmm1
8268
8269# qhasm: xmm3 ^= xmm10
8270# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
8271# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
8272pxor %xmm10,%xmm3
8273
8274# qhasm: xmm2 ^= xmm11
8275# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
8276# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
8277pxor %xmm11,%xmm2
8278
8279# qhasm: xmm6 ^= xmm12
8280# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
8281# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
8282pxor %xmm12,%xmm6
8283
8284# qhasm: xmm5 ^= xmm13
8285# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
8286# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
8287pxor %xmm13,%xmm5
8288
8289# qhasm: xmm4 ^= xmm14
8290# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
8291# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
8292pxor %xmm14,%xmm4
8293
8294# qhasm: xmm7 ^= xmm15
8295# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
8296# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
8297pxor %xmm15,%xmm7
8298
8299# qhasm: uint32323232 xmm8 >>= 8
8300# asm 1: psrld $8,<xmm8=int6464#9
8301# asm 2: psrld $8,<xmm8=%xmm8
8302psrld $8,%xmm8
8303
8304# qhasm: uint32323232 xmm9 >>= 8
8305# asm 1: psrld $8,<xmm9=int6464#10
8306# asm 2: psrld $8,<xmm9=%xmm9
8307psrld $8,%xmm9
8308
8309# qhasm: uint32323232 xmm10 >>= 8
8310# asm 1: psrld $8,<xmm10=int6464#11
8311# asm 2: psrld $8,<xmm10=%xmm10
8312psrld $8,%xmm10
8313
8314# qhasm: uint32323232 xmm11 >>= 8
8315# asm 1: psrld $8,<xmm11=int6464#12
8316# asm 2: psrld $8,<xmm11=%xmm11
8317psrld $8,%xmm11
8318
8319# qhasm: uint32323232 xmm12 >>= 8
8320# asm 1: psrld $8,<xmm12=int6464#13
8321# asm 2: psrld $8,<xmm12=%xmm12
8322psrld $8,%xmm12
8323
8324# qhasm: uint32323232 xmm13 >>= 8
8325# asm 1: psrld $8,<xmm13=int6464#14
8326# asm 2: psrld $8,<xmm13=%xmm13
8327psrld $8,%xmm13
8328
8329# qhasm: uint32323232 xmm14 >>= 8
8330# asm 1: psrld $8,<xmm14=int6464#15
8331# asm 2: psrld $8,<xmm14=%xmm14
8332psrld $8,%xmm14
8333
8334# qhasm: uint32323232 xmm15 >>= 8
8335# asm 1: psrld $8,<xmm15=int6464#16
8336# asm 2: psrld $8,<xmm15=%xmm15
8337psrld $8,%xmm15
8338
8339# qhasm: xmm0 ^= xmm8
8340# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
8341# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
8342pxor %xmm8,%xmm0
8343
8344# qhasm: xmm1 ^= xmm9
8345# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
8346# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
8347pxor %xmm9,%xmm1
8348
8349# qhasm: xmm3 ^= xmm10
8350# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
8351# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
8352pxor %xmm10,%xmm3
8353
8354# qhasm: xmm2 ^= xmm11
8355# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
8356# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
8357pxor %xmm11,%xmm2
8358
8359# qhasm: xmm6 ^= xmm12
8360# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
8361# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
8362pxor %xmm12,%xmm6
8363
8364# qhasm: xmm5 ^= xmm13
8365# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
8366# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
8367pxor %xmm13,%xmm5
8368
8369# qhasm: xmm4 ^= xmm14
8370# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
8371# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
8372pxor %xmm14,%xmm4
8373
8374# qhasm: xmm7 ^= xmm15
8375# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
8376# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
8377pxor %xmm15,%xmm7
8378
8379# qhasm: *(int128 *)(c + 768) = xmm0
8380# asm 1: movdqa <xmm0=int6464#1,768(<c=int64#1)
8381# asm 2: movdqa <xmm0=%xmm0,768(<c=%rdi)
8382movdqa %xmm0,768(%rdi)
8383
8384# qhasm: *(int128 *)(c + 784) = xmm1
8385# asm 1: movdqa <xmm1=int6464#2,784(<c=int64#1)
8386# asm 2: movdqa <xmm1=%xmm1,784(<c=%rdi)
8387movdqa %xmm1,784(%rdi)
8388
8389# qhasm: *(int128 *)(c + 800) = xmm3
8390# asm 1: movdqa <xmm3=int6464#4,800(<c=int64#1)
8391# asm 2: movdqa <xmm3=%xmm3,800(<c=%rdi)
8392movdqa %xmm3,800(%rdi)
8393
8394# qhasm: *(int128 *)(c + 816) = xmm2
8395# asm 1: movdqa <xmm2=int6464#3,816(<c=int64#1)
8396# asm 2: movdqa <xmm2=%xmm2,816(<c=%rdi)
8397movdqa %xmm2,816(%rdi)
8398
8399# qhasm: *(int128 *)(c + 832) = xmm6
8400# asm 1: movdqa <xmm6=int6464#7,832(<c=int64#1)
8401# asm 2: movdqa <xmm6=%xmm6,832(<c=%rdi)
8402movdqa %xmm6,832(%rdi)
8403
8404# qhasm: *(int128 *)(c + 848) = xmm5
8405# asm 1: movdqa <xmm5=int6464#6,848(<c=int64#1)
8406# asm 2: movdqa <xmm5=%xmm5,848(<c=%rdi)
8407movdqa %xmm5,848(%rdi)
8408
8409# qhasm: *(int128 *)(c + 864) = xmm4
8410# asm 1: movdqa <xmm4=int6464#5,864(<c=int64#1)
8411# asm 2: movdqa <xmm4=%xmm4,864(<c=%rdi)
8412movdqa %xmm4,864(%rdi)
8413
8414# qhasm: *(int128 *)(c + 880) = xmm7
8415# asm 1: movdqa <xmm7=int6464#8,880(<c=int64#1)
8416# asm 2: movdqa <xmm7=%xmm7,880(<c=%rdi)
8417movdqa %xmm7,880(%rdi)
8418
8419# qhasm: xmm0 ^= ONE
8420# asm 1: pxor ONE,<xmm0=int6464#1
8421# asm 2: pxor ONE,<xmm0=%xmm0
8422pxor ONE,%xmm0
8423
8424# qhasm: xmm1 ^= ONE
8425# asm 1: pxor ONE,<xmm1=int6464#2
8426# asm 2: pxor ONE,<xmm1=%xmm1
8427pxor ONE,%xmm1
8428
8429# qhasm: xmm5 ^= ONE
8430# asm 1: pxor ONE,<xmm5=int6464#6
8431# asm 2: pxor ONE,<xmm5=%xmm5
8432pxor ONE,%xmm5
8433
8434# qhasm: xmm4 ^= ONE
8435# asm 1: pxor ONE,<xmm4=int6464#5
8436# asm 2: pxor ONE,<xmm4=%xmm4
8437pxor ONE,%xmm4
8438
8439# qhasm: shuffle bytes of xmm0 by ROTB
8440# asm 1: pshufb ROTB,<xmm0=int6464#1
8441# asm 2: pshufb ROTB,<xmm0=%xmm0
8442pshufb ROTB,%xmm0
8443
8444# qhasm: shuffle bytes of xmm1 by ROTB
8445# asm 1: pshufb ROTB,<xmm1=int6464#2
8446# asm 2: pshufb ROTB,<xmm1=%xmm1
8447pshufb ROTB,%xmm1
8448
8449# qhasm: shuffle bytes of xmm3 by ROTB
8450# asm 1: pshufb ROTB,<xmm3=int6464#4
8451# asm 2: pshufb ROTB,<xmm3=%xmm3
8452pshufb ROTB,%xmm3
8453
8454# qhasm: shuffle bytes of xmm2 by ROTB
8455# asm 1: pshufb ROTB,<xmm2=int6464#3
8456# asm 2: pshufb ROTB,<xmm2=%xmm2
8457pshufb ROTB,%xmm2
8458
8459# qhasm: shuffle bytes of xmm6 by ROTB
8460# asm 1: pshufb ROTB,<xmm6=int6464#7
8461# asm 2: pshufb ROTB,<xmm6=%xmm6
8462pshufb ROTB,%xmm6
8463
8464# qhasm: shuffle bytes of xmm5 by ROTB
8465# asm 1: pshufb ROTB,<xmm5=int6464#6
8466# asm 2: pshufb ROTB,<xmm5=%xmm5
8467pshufb ROTB,%xmm5
8468
8469# qhasm: shuffle bytes of xmm4 by ROTB
8470# asm 1: pshufb ROTB,<xmm4=int6464#5
8471# asm 2: pshufb ROTB,<xmm4=%xmm4
8472pshufb ROTB,%xmm4
8473
8474# qhasm: shuffle bytes of xmm7 by ROTB
8475# asm 1: pshufb ROTB,<xmm7=int6464#8
8476# asm 2: pshufb ROTB,<xmm7=%xmm7
8477pshufb ROTB,%xmm7
8478
8479# qhasm: xmm5 ^= xmm4
8480# asm 1: pxor <xmm4=int6464#5,<xmm5=int6464#6
8481# asm 2: pxor <xmm4=%xmm4,<xmm5=%xmm5
8482pxor %xmm4,%xmm5
8483
8484# qhasm: xmm3 ^= xmm1
8485# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
8486# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
8487pxor %xmm1,%xmm3
8488
8489# qhasm: xmm5 ^= xmm0
8490# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
8491# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
8492pxor %xmm0,%xmm5
8493
8494# qhasm: xmm4 ^= xmm3
8495# asm 1: pxor <xmm3=int6464#4,<xmm4=int6464#5
8496# asm 2: pxor <xmm3=%xmm3,<xmm4=%xmm4
8497pxor %xmm3,%xmm4
8498
8499# qhasm: xmm2 ^= xmm0
8500# asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3
8501# asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2
8502pxor %xmm0,%xmm2
8503
8504# qhasm: xmm4 ^= xmm2
8505# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
8506# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
8507pxor %xmm2,%xmm4
8508
8509# qhasm: xmm2 ^= xmm7
8510# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
8511# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
8512pxor %xmm7,%xmm2
8513
8514# qhasm: xmm2 ^= xmm6
8515# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
8516# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
8517pxor %xmm6,%xmm2
8518
8519# qhasm: xmm7 ^= xmm5
8520# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
8521# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
8522pxor %xmm5,%xmm7
8523
8524# qhasm: xmm2 ^= xmm1
8525# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
8526# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
8527pxor %xmm1,%xmm2
8528
8529# qhasm: xmm6 ^= xmm5
8530# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
8531# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
8532pxor %xmm5,%xmm6
8533
8534# qhasm: xmm3 ^= xmm7
8535# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
8536# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
8537pxor %xmm7,%xmm3
8538
8539# qhasm: xmm1 ^= xmm5
8540# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
8541# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
8542pxor %xmm5,%xmm1
8543
8544# qhasm: xmm11 = xmm7
8545# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
8546# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
8547movdqa %xmm7,%xmm8
8548
8549# qhasm: xmm10 = xmm1
8550# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
8551# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
8552movdqa %xmm1,%xmm9
8553
8554# qhasm: xmm9 = xmm5
8555# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
8556# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
8557movdqa %xmm5,%xmm10
8558
8559# qhasm: xmm13 = xmm3
8560# asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12
8561# asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11
8562movdqa %xmm3,%xmm11
8563
8564# qhasm: xmm12 = xmm4
8565# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#13
8566# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm12
8567movdqa %xmm4,%xmm12
8568
8569# qhasm: xmm11 ^= xmm6
8570# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#9
8571# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm8
8572pxor %xmm6,%xmm8
8573
8574# qhasm: xmm10 ^= xmm3
8575# asm 1: pxor <xmm3=int6464#4,<xmm10=int6464#10
8576# asm 2: pxor <xmm3=%xmm3,<xmm10=%xmm9
8577pxor %xmm3,%xmm9
8578
8579# qhasm: xmm9 ^= xmm2
8580# asm 1: pxor <xmm2=int6464#3,<xmm9=int6464#11
8581# asm 2: pxor <xmm2=%xmm2,<xmm9=%xmm10
8582pxor %xmm2,%xmm10
8583
8584# qhasm: xmm13 ^= xmm6
8585# asm 1: pxor <xmm6=int6464#7,<xmm13=int6464#12
8586# asm 2: pxor <xmm6=%xmm6,<xmm13=%xmm11
8587pxor %xmm6,%xmm11
8588
8589# qhasm: xmm12 ^= xmm0
8590# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
8591# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
8592pxor %xmm0,%xmm12
8593
8594# qhasm: xmm14 = xmm11
8595# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
8596# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
8597movdqa %xmm8,%xmm13
8598
8599# qhasm: xmm8 = xmm10
8600# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
8601# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
8602movdqa %xmm9,%xmm14
8603
8604# qhasm: xmm15 = xmm11
8605# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
8606# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
8607movdqa %xmm8,%xmm15
8608
8609# qhasm: xmm10 |= xmm9
8610# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
8611# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
8612por %xmm10,%xmm9
8613
8614# qhasm: xmm11 |= xmm12
8615# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
8616# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
8617por %xmm12,%xmm8
8618
8619# qhasm: xmm15 ^= xmm8
8620# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
8621# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
8622pxor %xmm14,%xmm15
8623
8624# qhasm: xmm14 &= xmm12
8625# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
8626# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
8627pand %xmm12,%xmm13
8628
8629# qhasm: xmm8 &= xmm9
8630# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
8631# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
8632pand %xmm10,%xmm14
8633
8634# qhasm: xmm12 ^= xmm9
8635# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
8636# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
8637pxor %xmm10,%xmm12
8638
8639# qhasm: xmm15 &= xmm12
8640# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
8641# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
8642pand %xmm12,%xmm15
8643
8644# qhasm: xmm12 = xmm2
8645# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
8646# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
8647movdqa %xmm2,%xmm10
8648
8649# qhasm: xmm12 ^= xmm0
8650# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
8651# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
8652pxor %xmm0,%xmm10
8653
8654# qhasm: xmm13 &= xmm12
8655# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
8656# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
8657pand %xmm10,%xmm11
8658
8659# qhasm: xmm11 ^= xmm13
8660# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
8661# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
8662pxor %xmm11,%xmm8
8663
8664# qhasm: xmm10 ^= xmm13
8665# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
8666# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
8667pxor %xmm11,%xmm9
8668
8669# qhasm: xmm13 = xmm7
8670# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
8671# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
8672movdqa %xmm7,%xmm10
8673
8674# qhasm: xmm13 ^= xmm1
8675# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
8676# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
8677pxor %xmm1,%xmm10
8678
8679# qhasm: xmm12 = xmm5
8680# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
8681# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
8682movdqa %xmm5,%xmm11
8683
8684# qhasm: xmm9 = xmm13
8685# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
8686# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
8687movdqa %xmm10,%xmm12
8688
8689# qhasm: xmm12 ^= xmm4
8690# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#12
8691# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm11
8692pxor %xmm4,%xmm11
8693
8694# qhasm: xmm9 |= xmm12
8695# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
8696# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
8697por %xmm11,%xmm12
8698
8699# qhasm: xmm13 &= xmm12
8700# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
8701# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
8702pand %xmm11,%xmm10
8703
8704# qhasm: xmm8 ^= xmm13
8705# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
8706# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
8707pxor %xmm10,%xmm14
8708
8709# qhasm: xmm11 ^= xmm15
8710# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
8711# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
8712pxor %xmm15,%xmm8
8713
8714# qhasm: xmm10 ^= xmm14
8715# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
8716# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
8717pxor %xmm13,%xmm9
8718
8719# qhasm: xmm9 ^= xmm15
8720# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
8721# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
8722pxor %xmm15,%xmm12
8723
8724# qhasm: xmm8 ^= xmm14
8725# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
8726# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
8727pxor %xmm13,%xmm14
8728
8729# qhasm: xmm9 ^= xmm14
8730# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
8731# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
8732pxor %xmm13,%xmm12
8733
8734# qhasm: xmm12 = xmm3
8735# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
8736# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
8737movdqa %xmm3,%xmm10
8738
8739# qhasm: xmm13 = xmm6
8740# asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12
8741# asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11
8742movdqa %xmm6,%xmm11
8743
8744# qhasm: xmm14 = xmm1
8745# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
8746# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
8747movdqa %xmm1,%xmm13
8748
8749# qhasm: xmm15 = xmm7
8750# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
8751# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
8752movdqa %xmm7,%xmm15
8753
8754# qhasm: xmm12 &= xmm2
8755# asm 1: pand <xmm2=int6464#3,<xmm12=int6464#11
8756# asm 2: pand <xmm2=%xmm2,<xmm12=%xmm10
8757pand %xmm2,%xmm10
8758
8759# qhasm: xmm13 &= xmm0
8760# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
8761# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
8762pand %xmm0,%xmm11
8763
8764# qhasm: xmm14 &= xmm5
8765# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
8766# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
8767pand %xmm5,%xmm13
8768
8769# qhasm: xmm15 |= xmm4
8770# asm 1: por <xmm4=int6464#5,<xmm15=int6464#16
8771# asm 2: por <xmm4=%xmm4,<xmm15=%xmm15
8772por %xmm4,%xmm15
8773
8774# qhasm: xmm11 ^= xmm12
8775# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
8776# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
8777pxor %xmm10,%xmm8
8778
8779# qhasm: xmm10 ^= xmm13
8780# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
8781# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
8782pxor %xmm11,%xmm9
8783
8784# qhasm: xmm9 ^= xmm14
8785# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
8786# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
8787pxor %xmm13,%xmm12
8788
8789# qhasm: xmm8 ^= xmm15
8790# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
8791# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
8792pxor %xmm15,%xmm14
8793
8794# qhasm: xmm12 = xmm11
8795# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
8796# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
8797movdqa %xmm8,%xmm10
8798
8799# qhasm: xmm12 ^= xmm10
8800# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
8801# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
8802pxor %xmm9,%xmm10
8803
8804# qhasm: xmm11 &= xmm9
8805# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
8806# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
8807pand %xmm12,%xmm8
8808
8809# qhasm: xmm14 = xmm8
8810# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
8811# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
8812movdqa %xmm14,%xmm11
8813
8814# qhasm: xmm14 ^= xmm11
8815# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
8816# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
8817pxor %xmm8,%xmm11
8818
8819# qhasm: xmm15 = xmm12
8820# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
8821# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
8822movdqa %xmm10,%xmm13
8823
8824# qhasm: xmm15 &= xmm14
8825# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
8826# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
8827pand %xmm11,%xmm13
8828
8829# qhasm: xmm15 ^= xmm10
8830# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
8831# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
8832pxor %xmm9,%xmm13
8833
8834# qhasm: xmm13 = xmm9
8835# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
8836# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
8837movdqa %xmm12,%xmm15
8838
8839# qhasm: xmm13 ^= xmm8
8840# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
8841# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
8842pxor %xmm14,%xmm15
8843
8844# qhasm: xmm11 ^= xmm10
8845# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
8846# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
8847pxor %xmm9,%xmm8
8848
8849# qhasm: xmm13 &= xmm11
8850# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
8851# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
8852pand %xmm8,%xmm15
8853
8854# qhasm: xmm13 ^= xmm8
8855# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
8856# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
8857pxor %xmm14,%xmm15
8858
8859# qhasm: xmm9 ^= xmm13
8860# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
8861# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
8862pxor %xmm15,%xmm12
8863
8864# qhasm: xmm10 = xmm14
8865# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
8866# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
8867movdqa %xmm11,%xmm8
8868
8869# qhasm: xmm10 ^= xmm13
8870# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
8871# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
8872pxor %xmm15,%xmm8
8873
8874# qhasm: xmm10 &= xmm8
8875# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
8876# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
8877pand %xmm14,%xmm8
8878
8879# qhasm: xmm9 ^= xmm10
8880# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
8881# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
8882pxor %xmm8,%xmm12
8883
8884# qhasm: xmm14 ^= xmm10
8885# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
8886# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
8887pxor %xmm8,%xmm11
8888
8889# qhasm: xmm14 &= xmm15
8890# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
8891# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
8892pand %xmm13,%xmm11
8893
8894# qhasm: xmm14 ^= xmm12
8895# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
8896# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
8897pxor %xmm10,%xmm11
8898
8899# qhasm: xmm12 = xmm4
8900# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#9
8901# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm8
8902movdqa %xmm4,%xmm8
8903
8904# qhasm: xmm8 = xmm5
8905# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
8906# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
8907movdqa %xmm5,%xmm9
8908
8909# qhasm: xmm10 = xmm15
8910# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
8911# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
8912movdqa %xmm13,%xmm10
8913
8914# qhasm: xmm10 ^= xmm14
8915# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
8916# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
8917pxor %xmm11,%xmm10
8918
8919# qhasm: xmm10 &= xmm4
8920# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
8921# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
8922pand %xmm4,%xmm10
8923
8924# qhasm: xmm4 ^= xmm5
8925# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
8926# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
8927pxor %xmm5,%xmm4
8928
8929# qhasm: xmm4 &= xmm14
8930# asm 1: pand <xmm14=int6464#12,<xmm4=int6464#5
8931# asm 2: pand <xmm14=%xmm11,<xmm4=%xmm4
8932pand %xmm11,%xmm4
8933
8934# qhasm: xmm5 &= xmm15
8935# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
8936# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
8937pand %xmm13,%xmm5
8938
8939# qhasm: xmm4 ^= xmm5
8940# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
8941# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
8942pxor %xmm5,%xmm4
8943
8944# qhasm: xmm5 ^= xmm10
8945# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
8946# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
8947pxor %xmm10,%xmm5
8948
8949# qhasm: xmm12 ^= xmm0
8950# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
8951# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
8952pxor %xmm0,%xmm8
8953
8954# qhasm: xmm8 ^= xmm2
8955# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
8956# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
8957pxor %xmm2,%xmm9
8958
8959# qhasm: xmm15 ^= xmm13
8960# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
8961# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
8962pxor %xmm15,%xmm13
8963
8964# qhasm: xmm14 ^= xmm9
8965# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
8966# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
8967pxor %xmm12,%xmm11
8968
8969# qhasm: xmm11 = xmm15
8970# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
8971# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
8972movdqa %xmm13,%xmm10
8973
8974# qhasm: xmm11 ^= xmm14
8975# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
8976# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
8977pxor %xmm11,%xmm10
8978
8979# qhasm: xmm11 &= xmm12
8980# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
8981# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
8982pand %xmm8,%xmm10
8983
8984# qhasm: xmm12 ^= xmm8
8985# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
8986# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
8987pxor %xmm9,%xmm8
8988
8989# qhasm: xmm12 &= xmm14
8990# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
8991# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
8992pand %xmm11,%xmm8
8993
8994# qhasm: xmm8 &= xmm15
8995# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
8996# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
8997pand %xmm13,%xmm9
8998
8999# qhasm: xmm8 ^= xmm12
9000# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
9001# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
9002pxor %xmm8,%xmm9
9003
9004# qhasm: xmm12 ^= xmm11
9005# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
9006# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
9007pxor %xmm10,%xmm8
9008
9009# qhasm: xmm10 = xmm13
9010# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
9011# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
9012movdqa %xmm15,%xmm10
9013
9014# qhasm: xmm10 ^= xmm9
9015# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
9016# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
9017pxor %xmm12,%xmm10
9018
9019# qhasm: xmm10 &= xmm0
9020# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
9021# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
9022pand %xmm0,%xmm10
9023
9024# qhasm: xmm0 ^= xmm2
9025# asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1
9026# asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0
9027pxor %xmm2,%xmm0
9028
9029# qhasm: xmm0 &= xmm9
9030# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
9031# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
9032pand %xmm12,%xmm0
9033
9034# qhasm: xmm2 &= xmm13
9035# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
9036# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
9037pand %xmm15,%xmm2
9038
9039# qhasm: xmm0 ^= xmm2
9040# asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1
9041# asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0
9042pxor %xmm2,%xmm0
9043
9044# qhasm: xmm2 ^= xmm10
9045# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
9046# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
9047pxor %xmm10,%xmm2
9048
9049# qhasm: xmm4 ^= xmm12
9050# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
9051# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
9052pxor %xmm8,%xmm4
9053
9054# qhasm: xmm0 ^= xmm12
9055# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
9056# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
9057pxor %xmm8,%xmm0
9058
9059# qhasm: xmm5 ^= xmm8
9060# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
9061# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
9062pxor %xmm9,%xmm5
9063
9064# qhasm: xmm2 ^= xmm8
9065# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
9066# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
9067pxor %xmm9,%xmm2
9068
9069# qhasm: xmm12 = xmm7
9070# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
9071# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
9072movdqa %xmm7,%xmm8
9073
9074# qhasm: xmm8 = xmm1
9075# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
9076# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
9077movdqa %xmm1,%xmm9
9078
9079# qhasm: xmm12 ^= xmm6
9080# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#9
9081# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm8
9082pxor %xmm6,%xmm8
9083
9084# qhasm: xmm8 ^= xmm3
9085# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
9086# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
9087pxor %xmm3,%xmm9
9088
9089# qhasm: xmm11 = xmm15
9090# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
9091# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
9092movdqa %xmm13,%xmm10
9093
9094# qhasm: xmm11 ^= xmm14
9095# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
9096# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
9097pxor %xmm11,%xmm10
9098
9099# qhasm: xmm11 &= xmm12
9100# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
9101# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
9102pand %xmm8,%xmm10
9103
9104# qhasm: xmm12 ^= xmm8
9105# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
9106# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
9107pxor %xmm9,%xmm8
9108
9109# qhasm: xmm12 &= xmm14
9110# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
9111# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
9112pand %xmm11,%xmm8
9113
9114# qhasm: xmm8 &= xmm15
9115# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
9116# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
9117pand %xmm13,%xmm9
9118
9119# qhasm: xmm8 ^= xmm12
9120# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
9121# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
9122pxor %xmm8,%xmm9
9123
9124# qhasm: xmm12 ^= xmm11
9125# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
9126# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
9127pxor %xmm10,%xmm8
9128
9129# qhasm: xmm10 = xmm13
9130# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
9131# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
9132movdqa %xmm15,%xmm10
9133
9134# qhasm: xmm10 ^= xmm9
9135# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
9136# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
9137pxor %xmm12,%xmm10
9138
9139# qhasm: xmm10 &= xmm6
9140# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
9141# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
9142pand %xmm6,%xmm10
9143
9144# qhasm: xmm6 ^= xmm3
9145# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
9146# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
9147pxor %xmm3,%xmm6
9148
9149# qhasm: xmm6 &= xmm9
9150# asm 1: pand <xmm9=int6464#13,<xmm6=int6464#7
9151# asm 2: pand <xmm9=%xmm12,<xmm6=%xmm6
9152pand %xmm12,%xmm6
9153
9154# qhasm: xmm3 &= xmm13
9155# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
9156# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
9157pand %xmm15,%xmm3
9158
9159# qhasm: xmm6 ^= xmm3
9160# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
9161# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
9162pxor %xmm3,%xmm6
9163
9164# qhasm: xmm3 ^= xmm10
9165# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
9166# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
9167pxor %xmm10,%xmm3
9168
9169# qhasm: xmm15 ^= xmm13
9170# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
9171# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
9172pxor %xmm15,%xmm13
9173
9174# qhasm: xmm14 ^= xmm9
9175# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
9176# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
9177pxor %xmm12,%xmm11
9178
9179# qhasm: xmm11 = xmm15
9180# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
9181# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
9182movdqa %xmm13,%xmm10
9183
9184# qhasm: xmm11 ^= xmm14
9185# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
9186# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
9187pxor %xmm11,%xmm10
9188
9189# qhasm: xmm11 &= xmm7
9190# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
9191# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
9192pand %xmm7,%xmm10
9193
9194# qhasm: xmm7 ^= xmm1
9195# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
9196# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
9197pxor %xmm1,%xmm7
9198
9199# qhasm: xmm7 &= xmm14
9200# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
9201# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
9202pand %xmm11,%xmm7
9203
9204# qhasm: xmm1 &= xmm15
9205# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
9206# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
9207pand %xmm13,%xmm1
9208
9209# qhasm: xmm7 ^= xmm1
9210# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
9211# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
9212pxor %xmm1,%xmm7
9213
9214# qhasm: xmm1 ^= xmm11
9215# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
9216# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
9217pxor %xmm10,%xmm1
9218
9219# qhasm: xmm7 ^= xmm12
9220# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
9221# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
9222pxor %xmm8,%xmm7
9223
9224# qhasm: xmm6 ^= xmm12
9225# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
9226# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
9227pxor %xmm8,%xmm6
9228
9229# qhasm: xmm1 ^= xmm8
9230# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
9231# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
9232pxor %xmm9,%xmm1
9233
9234# qhasm: xmm3 ^= xmm8
9235# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
9236# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
9237pxor %xmm9,%xmm3
9238
9239# qhasm: xmm7 ^= xmm0
9240# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
9241# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
9242pxor %xmm0,%xmm7
9243
9244# qhasm: xmm1 ^= xmm4
9245# asm 1: pxor <xmm4=int6464#5,<xmm1=int6464#2
9246# asm 2: pxor <xmm4=%xmm4,<xmm1=%xmm1
9247pxor %xmm4,%xmm1
9248
9249# qhasm: xmm6 ^= xmm7
9250# asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7
9251# asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6
9252pxor %xmm7,%xmm6
9253
9254# qhasm: xmm4 ^= xmm0
9255# asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5
9256# asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4
9257pxor %xmm0,%xmm4
9258
9259# qhasm: xmm0 ^= xmm1
9260# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
9261# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
9262pxor %xmm1,%xmm0
9263
9264# qhasm: xmm1 ^= xmm5
9265# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
9266# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
9267pxor %xmm5,%xmm1
9268
9269# qhasm: xmm5 ^= xmm3
9270# asm 1: pxor <xmm3=int6464#4,<xmm5=int6464#6
9271# asm 2: pxor <xmm3=%xmm3,<xmm5=%xmm5
9272pxor %xmm3,%xmm5
9273
9274# qhasm: xmm6 ^= xmm5
9275# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
9276# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
9277pxor %xmm5,%xmm6
9278
9279# qhasm: xmm3 ^= xmm2
9280# asm 1: pxor <xmm2=int6464#3,<xmm3=int6464#4
9281# asm 2: pxor <xmm2=%xmm2,<xmm3=%xmm3
9282pxor %xmm2,%xmm3
9283
9284# qhasm: xmm2 ^= xmm5
9285# asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3
9286# asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2
9287pxor %xmm5,%xmm2
9288
9289# qhasm: xmm4 ^= xmm2
9290# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
9291# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
9292pxor %xmm2,%xmm4
9293
9294# qhasm: xmm3 ^= RCON
9295# asm 1: pxor RCON,<xmm3=int6464#4
9296# asm 2: pxor RCON,<xmm3=%xmm3
9297pxor RCON,%xmm3
9298
9299# qhasm: shuffle bytes of xmm0 by EXPB0
9300# asm 1: pshufb EXPB0,<xmm0=int6464#1
9301# asm 2: pshufb EXPB0,<xmm0=%xmm0
9302pshufb EXPB0,%xmm0
9303
9304# qhasm: shuffle bytes of xmm1 by EXPB0
9305# asm 1: pshufb EXPB0,<xmm1=int6464#2
9306# asm 2: pshufb EXPB0,<xmm1=%xmm1
9307pshufb EXPB0,%xmm1
9308
9309# qhasm: shuffle bytes of xmm6 by EXPB0
9310# asm 1: pshufb EXPB0,<xmm6=int6464#7
9311# asm 2: pshufb EXPB0,<xmm6=%xmm6
9312pshufb EXPB0,%xmm6
9313
9314# qhasm: shuffle bytes of xmm4 by EXPB0
9315# asm 1: pshufb EXPB0,<xmm4=int6464#5
9316# asm 2: pshufb EXPB0,<xmm4=%xmm4
9317pshufb EXPB0,%xmm4
9318
9319# qhasm: shuffle bytes of xmm2 by EXPB0
9320# asm 1: pshufb EXPB0,<xmm2=int6464#3
9321# asm 2: pshufb EXPB0,<xmm2=%xmm2
9322pshufb EXPB0,%xmm2
9323
9324# qhasm: shuffle bytes of xmm7 by EXPB0
9325# asm 1: pshufb EXPB0,<xmm7=int6464#8
9326# asm 2: pshufb EXPB0,<xmm7=%xmm7
9327pshufb EXPB0,%xmm7
9328
9329# qhasm: shuffle bytes of xmm3 by EXPB0
9330# asm 1: pshufb EXPB0,<xmm3=int6464#4
9331# asm 2: pshufb EXPB0,<xmm3=%xmm3
9332pshufb EXPB0,%xmm3
9333
9334# qhasm: shuffle bytes of xmm5 by EXPB0
9335# asm 1: pshufb EXPB0,<xmm5=int6464#6
9336# asm 2: pshufb EXPB0,<xmm5=%xmm5
9337pshufb EXPB0,%xmm5
9338
9339# qhasm: xmm8 = *(int128 *)(c + 768)
9340# asm 1: movdqa 768(<c=int64#1),>xmm8=int6464#9
9341# asm 2: movdqa 768(<c=%rdi),>xmm8=%xmm8
9342movdqa 768(%rdi),%xmm8
9343
9344# qhasm: xmm9 = *(int128 *)(c + 784)
9345# asm 1: movdqa 784(<c=int64#1),>xmm9=int6464#10
9346# asm 2: movdqa 784(<c=%rdi),>xmm9=%xmm9
9347movdqa 784(%rdi),%xmm9
9348
9349# qhasm: xmm10 = *(int128 *)(c + 800)
9350# asm 1: movdqa 800(<c=int64#1),>xmm10=int6464#11
9351# asm 2: movdqa 800(<c=%rdi),>xmm10=%xmm10
9352movdqa 800(%rdi),%xmm10
9353
9354# qhasm: xmm11 = *(int128 *)(c + 816)
9355# asm 1: movdqa 816(<c=int64#1),>xmm11=int6464#12
9356# asm 2: movdqa 816(<c=%rdi),>xmm11=%xmm11
9357movdqa 816(%rdi),%xmm11
9358
9359# qhasm: xmm12 = *(int128 *)(c + 832)
9360# asm 1: movdqa 832(<c=int64#1),>xmm12=int6464#13
9361# asm 2: movdqa 832(<c=%rdi),>xmm12=%xmm12
9362movdqa 832(%rdi),%xmm12
9363
9364# qhasm: xmm13 = *(int128 *)(c + 848)
9365# asm 1: movdqa 848(<c=int64#1),>xmm13=int6464#14
9366# asm 2: movdqa 848(<c=%rdi),>xmm13=%xmm13
9367movdqa 848(%rdi),%xmm13
9368
9369# qhasm: xmm14 = *(int128 *)(c + 864)
9370# asm 1: movdqa 864(<c=int64#1),>xmm14=int6464#15
9371# asm 2: movdqa 864(<c=%rdi),>xmm14=%xmm14
9372movdqa 864(%rdi),%xmm14
9373
9374# qhasm: xmm15 = *(int128 *)(c + 880)
9375# asm 1: movdqa 880(<c=int64#1),>xmm15=int6464#16
9376# asm 2: movdqa 880(<c=%rdi),>xmm15=%xmm15
9377movdqa 880(%rdi),%xmm15
9378
9379# qhasm: xmm8 ^= ONE
9380# asm 1: pxor ONE,<xmm8=int6464#9
9381# asm 2: pxor ONE,<xmm8=%xmm8
9382pxor ONE,%xmm8
9383
9384# qhasm: xmm9 ^= ONE
9385# asm 1: pxor ONE,<xmm9=int6464#10
9386# asm 2: pxor ONE,<xmm9=%xmm9
9387pxor ONE,%xmm9
9388
9389# qhasm: xmm13 ^= ONE
9390# asm 1: pxor ONE,<xmm13=int6464#14
9391# asm 2: pxor ONE,<xmm13=%xmm13
9392pxor ONE,%xmm13
9393
9394# qhasm: xmm14 ^= ONE
9395# asm 1: pxor ONE,<xmm14=int6464#15
9396# asm 2: pxor ONE,<xmm14=%xmm14
9397pxor ONE,%xmm14
9398
9399# qhasm: xmm0 ^= xmm8
9400# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
9401# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
9402pxor %xmm8,%xmm0
9403
9404# qhasm: xmm1 ^= xmm9
9405# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
9406# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
9407pxor %xmm9,%xmm1
9408
9409# qhasm: xmm6 ^= xmm10
9410# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
9411# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
9412pxor %xmm10,%xmm6
9413
9414# qhasm: xmm4 ^= xmm11
9415# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
9416# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
9417pxor %xmm11,%xmm4
9418
9419# qhasm: xmm2 ^= xmm12
9420# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
9421# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
9422pxor %xmm12,%xmm2
9423
9424# qhasm: xmm7 ^= xmm13
9425# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
9426# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
9427pxor %xmm13,%xmm7
9428
9429# qhasm: xmm3 ^= xmm14
9430# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
9431# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
9432pxor %xmm14,%xmm3
9433
9434# qhasm: xmm5 ^= xmm15
9435# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
9436# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
9437pxor %xmm15,%xmm5
9438
9439# qhasm: uint32323232 xmm8 >>= 8
9440# asm 1: psrld $8,<xmm8=int6464#9
9441# asm 2: psrld $8,<xmm8=%xmm8
9442psrld $8,%xmm8
9443
9444# qhasm: uint32323232 xmm9 >>= 8
9445# asm 1: psrld $8,<xmm9=int6464#10
9446# asm 2: psrld $8,<xmm9=%xmm9
9447psrld $8,%xmm9
9448
9449# qhasm: uint32323232 xmm10 >>= 8
9450# asm 1: psrld $8,<xmm10=int6464#11
9451# asm 2: psrld $8,<xmm10=%xmm10
9452psrld $8,%xmm10
9453
9454# qhasm: uint32323232 xmm11 >>= 8
9455# asm 1: psrld $8,<xmm11=int6464#12
9456# asm 2: psrld $8,<xmm11=%xmm11
9457psrld $8,%xmm11
9458
9459# qhasm: uint32323232 xmm12 >>= 8
9460# asm 1: psrld $8,<xmm12=int6464#13
9461# asm 2: psrld $8,<xmm12=%xmm12
9462psrld $8,%xmm12
9463
9464# qhasm: uint32323232 xmm13 >>= 8
9465# asm 1: psrld $8,<xmm13=int6464#14
9466# asm 2: psrld $8,<xmm13=%xmm13
9467psrld $8,%xmm13
9468
9469# qhasm: uint32323232 xmm14 >>= 8
9470# asm 1: psrld $8,<xmm14=int6464#15
9471# asm 2: psrld $8,<xmm14=%xmm14
9472psrld $8,%xmm14
9473
9474# qhasm: uint32323232 xmm15 >>= 8
9475# asm 1: psrld $8,<xmm15=int6464#16
9476# asm 2: psrld $8,<xmm15=%xmm15
9477psrld $8,%xmm15
9478
9479# qhasm: xmm0 ^= xmm8
9480# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
9481# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
9482pxor %xmm8,%xmm0
9483
9484# qhasm: xmm1 ^= xmm9
9485# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
9486# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
9487pxor %xmm9,%xmm1
9488
9489# qhasm: xmm6 ^= xmm10
9490# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
9491# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
9492pxor %xmm10,%xmm6
9493
9494# qhasm: xmm4 ^= xmm11
9495# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
9496# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
9497pxor %xmm11,%xmm4
9498
9499# qhasm: xmm2 ^= xmm12
9500# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
9501# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
9502pxor %xmm12,%xmm2
9503
9504# qhasm: xmm7 ^= xmm13
9505# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
9506# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
9507pxor %xmm13,%xmm7
9508
9509# qhasm: xmm3 ^= xmm14
9510# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
9511# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
9512pxor %xmm14,%xmm3
9513
9514# qhasm: xmm5 ^= xmm15
9515# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
9516# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
9517pxor %xmm15,%xmm5
9518
9519# qhasm: uint32323232 xmm8 >>= 8
9520# asm 1: psrld $8,<xmm8=int6464#9
9521# asm 2: psrld $8,<xmm8=%xmm8
9522psrld $8,%xmm8
9523
9524# qhasm: uint32323232 xmm9 >>= 8
9525# asm 1: psrld $8,<xmm9=int6464#10
9526# asm 2: psrld $8,<xmm9=%xmm9
9527psrld $8,%xmm9
9528
9529# qhasm: uint32323232 xmm10 >>= 8
9530# asm 1: psrld $8,<xmm10=int6464#11
9531# asm 2: psrld $8,<xmm10=%xmm10
9532psrld $8,%xmm10
9533
9534# qhasm: uint32323232 xmm11 >>= 8
9535# asm 1: psrld $8,<xmm11=int6464#12
9536# asm 2: psrld $8,<xmm11=%xmm11
9537psrld $8,%xmm11
9538
9539# qhasm: uint32323232 xmm12 >>= 8
9540# asm 1: psrld $8,<xmm12=int6464#13
9541# asm 2: psrld $8,<xmm12=%xmm12
9542psrld $8,%xmm12
9543
9544# qhasm: uint32323232 xmm13 >>= 8
9545# asm 1: psrld $8,<xmm13=int6464#14
9546# asm 2: psrld $8,<xmm13=%xmm13
9547psrld $8,%xmm13
9548
9549# qhasm: uint32323232 xmm14 >>= 8
9550# asm 1: psrld $8,<xmm14=int6464#15
9551# asm 2: psrld $8,<xmm14=%xmm14
9552psrld $8,%xmm14
9553
9554# qhasm: uint32323232 xmm15 >>= 8
9555# asm 1: psrld $8,<xmm15=int6464#16
9556# asm 2: psrld $8,<xmm15=%xmm15
9557psrld $8,%xmm15
9558
9559# qhasm: xmm0 ^= xmm8
9560# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
9561# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
9562pxor %xmm8,%xmm0
9563
9564# qhasm: xmm1 ^= xmm9
9565# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
9566# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
9567pxor %xmm9,%xmm1
9568
9569# qhasm: xmm6 ^= xmm10
9570# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
9571# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
9572pxor %xmm10,%xmm6
9573
9574# qhasm: xmm4 ^= xmm11
9575# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
9576# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
9577pxor %xmm11,%xmm4
9578
9579# qhasm: xmm2 ^= xmm12
9580# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
9581# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
9582pxor %xmm12,%xmm2
9583
9584# qhasm: xmm7 ^= xmm13
9585# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
9586# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
9587pxor %xmm13,%xmm7
9588
9589# qhasm: xmm3 ^= xmm14
9590# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
9591# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
9592pxor %xmm14,%xmm3
9593
9594# qhasm: xmm5 ^= xmm15
9595# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
9596# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
9597pxor %xmm15,%xmm5
9598
9599# qhasm: uint32323232 xmm8 >>= 8
9600# asm 1: psrld $8,<xmm8=int6464#9
9601# asm 2: psrld $8,<xmm8=%xmm8
9602psrld $8,%xmm8
9603
9604# qhasm: uint32323232 xmm9 >>= 8
9605# asm 1: psrld $8,<xmm9=int6464#10
9606# asm 2: psrld $8,<xmm9=%xmm9
9607psrld $8,%xmm9
9608
9609# qhasm: uint32323232 xmm10 >>= 8
9610# asm 1: psrld $8,<xmm10=int6464#11
9611# asm 2: psrld $8,<xmm10=%xmm10
9612psrld $8,%xmm10
9613
9614# qhasm: uint32323232 xmm11 >>= 8
9615# asm 1: psrld $8,<xmm11=int6464#12
9616# asm 2: psrld $8,<xmm11=%xmm11
9617psrld $8,%xmm11
9618
9619# qhasm: uint32323232 xmm12 >>= 8
9620# asm 1: psrld $8,<xmm12=int6464#13
9621# asm 2: psrld $8,<xmm12=%xmm12
9622psrld $8,%xmm12
9623
9624# qhasm: uint32323232 xmm13 >>= 8
9625# asm 1: psrld $8,<xmm13=int6464#14
9626# asm 2: psrld $8,<xmm13=%xmm13
9627psrld $8,%xmm13
9628
9629# qhasm: uint32323232 xmm14 >>= 8
9630# asm 1: psrld $8,<xmm14=int6464#15
9631# asm 2: psrld $8,<xmm14=%xmm14
9632psrld $8,%xmm14
9633
9634# qhasm: uint32323232 xmm15 >>= 8
9635# asm 1: psrld $8,<xmm15=int6464#16
9636# asm 2: psrld $8,<xmm15=%xmm15
9637psrld $8,%xmm15
9638
9639# qhasm: xmm0 ^= xmm8
9640# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
9641# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
9642pxor %xmm8,%xmm0
9643
9644# qhasm: xmm1 ^= xmm9
9645# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
9646# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
9647pxor %xmm9,%xmm1
9648
9649# qhasm: xmm6 ^= xmm10
9650# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
9651# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
9652pxor %xmm10,%xmm6
9653
9654# qhasm: xmm4 ^= xmm11
9655# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
9656# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
9657pxor %xmm11,%xmm4
9658
9659# qhasm: xmm2 ^= xmm12
9660# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
9661# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
9662pxor %xmm12,%xmm2
9663
9664# qhasm: xmm7 ^= xmm13
9665# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
9666# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
9667pxor %xmm13,%xmm7
9668
9669# qhasm: xmm3 ^= xmm14
9670# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
9671# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
9672pxor %xmm14,%xmm3
9673
9674# qhasm: xmm5 ^= xmm15
9675# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
9676# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
9677pxor %xmm15,%xmm5
9678
9679# qhasm: *(int128 *)(c + 896) = xmm0
9680# asm 1: movdqa <xmm0=int6464#1,896(<c=int64#1)
9681# asm 2: movdqa <xmm0=%xmm0,896(<c=%rdi)
9682movdqa %xmm0,896(%rdi)
9683
9684# qhasm: *(int128 *)(c + 912) = xmm1
9685# asm 1: movdqa <xmm1=int6464#2,912(<c=int64#1)
9686# asm 2: movdqa <xmm1=%xmm1,912(<c=%rdi)
9687movdqa %xmm1,912(%rdi)
9688
9689# qhasm: *(int128 *)(c + 928) = xmm6
9690# asm 1: movdqa <xmm6=int6464#7,928(<c=int64#1)
9691# asm 2: movdqa <xmm6=%xmm6,928(<c=%rdi)
9692movdqa %xmm6,928(%rdi)
9693
9694# qhasm: *(int128 *)(c + 944) = xmm4
9695# asm 1: movdqa <xmm4=int6464#5,944(<c=int64#1)
9696# asm 2: movdqa <xmm4=%xmm4,944(<c=%rdi)
9697movdqa %xmm4,944(%rdi)
9698
9699# qhasm: *(int128 *)(c + 960) = xmm2
9700# asm 1: movdqa <xmm2=int6464#3,960(<c=int64#1)
9701# asm 2: movdqa <xmm2=%xmm2,960(<c=%rdi)
9702movdqa %xmm2,960(%rdi)
9703
9704# qhasm: *(int128 *)(c + 976) = xmm7
9705# asm 1: movdqa <xmm7=int6464#8,976(<c=int64#1)
9706# asm 2: movdqa <xmm7=%xmm7,976(<c=%rdi)
9707movdqa %xmm7,976(%rdi)
9708
9709# qhasm: *(int128 *)(c + 992) = xmm3
9710# asm 1: movdqa <xmm3=int6464#4,992(<c=int64#1)
9711# asm 2: movdqa <xmm3=%xmm3,992(<c=%rdi)
9712movdqa %xmm3,992(%rdi)
9713
9714# qhasm: *(int128 *)(c + 1008) = xmm5
9715# asm 1: movdqa <xmm5=int6464#6,1008(<c=int64#1)
9716# asm 2: movdqa <xmm5=%xmm5,1008(<c=%rdi)
9717movdqa %xmm5,1008(%rdi)
9718
9719# qhasm: xmm0 ^= ONE
9720# asm 1: pxor ONE,<xmm0=int6464#1
9721# asm 2: pxor ONE,<xmm0=%xmm0
9722pxor ONE,%xmm0
9723
9724# qhasm: xmm1 ^= ONE
9725# asm 1: pxor ONE,<xmm1=int6464#2
9726# asm 2: pxor ONE,<xmm1=%xmm1
9727pxor ONE,%xmm1
9728
9729# qhasm: xmm7 ^= ONE
9730# asm 1: pxor ONE,<xmm7=int6464#8
9731# asm 2: pxor ONE,<xmm7=%xmm7
9732pxor ONE,%xmm7
9733
9734# qhasm: xmm3 ^= ONE
9735# asm 1: pxor ONE,<xmm3=int6464#4
9736# asm 2: pxor ONE,<xmm3=%xmm3
9737pxor ONE,%xmm3
9738
9739# qhasm: shuffle bytes of xmm0 by ROTB
9740# asm 1: pshufb ROTB,<xmm0=int6464#1
9741# asm 2: pshufb ROTB,<xmm0=%xmm0
9742pshufb ROTB,%xmm0
9743
9744# qhasm: shuffle bytes of xmm1 by ROTB
9745# asm 1: pshufb ROTB,<xmm1=int6464#2
9746# asm 2: pshufb ROTB,<xmm1=%xmm1
9747pshufb ROTB,%xmm1
9748
9749# qhasm: shuffle bytes of xmm6 by ROTB
9750# asm 1: pshufb ROTB,<xmm6=int6464#7
9751# asm 2: pshufb ROTB,<xmm6=%xmm6
9752pshufb ROTB,%xmm6
9753
9754# qhasm: shuffle bytes of xmm4 by ROTB
9755# asm 1: pshufb ROTB,<xmm4=int6464#5
9756# asm 2: pshufb ROTB,<xmm4=%xmm4
9757pshufb ROTB,%xmm4
9758
9759# qhasm: shuffle bytes of xmm2 by ROTB
9760# asm 1: pshufb ROTB,<xmm2=int6464#3
9761# asm 2: pshufb ROTB,<xmm2=%xmm2
9762pshufb ROTB,%xmm2
9763
9764# qhasm: shuffle bytes of xmm7 by ROTB
9765# asm 1: pshufb ROTB,<xmm7=int6464#8
9766# asm 2: pshufb ROTB,<xmm7=%xmm7
9767pshufb ROTB,%xmm7
9768
9769# qhasm: shuffle bytes of xmm3 by ROTB
9770# asm 1: pshufb ROTB,<xmm3=int6464#4
9771# asm 2: pshufb ROTB,<xmm3=%xmm3
9772pshufb ROTB,%xmm3
9773
9774# qhasm: shuffle bytes of xmm5 by ROTB
9775# asm 1: pshufb ROTB,<xmm5=int6464#6
9776# asm 2: pshufb ROTB,<xmm5=%xmm5
9777pshufb ROTB,%xmm5
9778
9779# qhasm: xmm7 ^= xmm3
9780# asm 1: pxor <xmm3=int6464#4,<xmm7=int6464#8
9781# asm 2: pxor <xmm3=%xmm3,<xmm7=%xmm7
9782pxor %xmm3,%xmm7
9783
9784# qhasm: xmm6 ^= xmm1
9785# asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7
9786# asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6
9787pxor %xmm1,%xmm6
9788
9789# qhasm: xmm7 ^= xmm0
9790# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
9791# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
9792pxor %xmm0,%xmm7
9793
9794# qhasm: xmm3 ^= xmm6
9795# asm 1: pxor <xmm6=int6464#7,<xmm3=int6464#4
9796# asm 2: pxor <xmm6=%xmm6,<xmm3=%xmm3
9797pxor %xmm6,%xmm3
9798
9799# qhasm: xmm4 ^= xmm0
9800# asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5
9801# asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4
9802pxor %xmm0,%xmm4
9803
9804# qhasm: xmm3 ^= xmm4
9805# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
9806# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
9807pxor %xmm4,%xmm3
9808
9809# qhasm: xmm4 ^= xmm5
9810# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
9811# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
9812pxor %xmm5,%xmm4
9813
9814# qhasm: xmm4 ^= xmm2
9815# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
9816# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
9817pxor %xmm2,%xmm4
9818
9819# qhasm: xmm5 ^= xmm7
9820# asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6
9821# asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5
9822pxor %xmm7,%xmm5
9823
9824# qhasm: xmm4 ^= xmm1
9825# asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5
9826# asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4
9827pxor %xmm1,%xmm4
9828
9829# qhasm: xmm2 ^= xmm7
9830# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
9831# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
9832pxor %xmm7,%xmm2
9833
9834# qhasm: xmm6 ^= xmm5
9835# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
9836# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
9837pxor %xmm5,%xmm6
9838
9839# qhasm: xmm1 ^= xmm7
9840# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
9841# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
9842pxor %xmm7,%xmm1
9843
9844# qhasm: xmm11 = xmm5
9845# asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9
9846# asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8
9847movdqa %xmm5,%xmm8
9848
9849# qhasm: xmm10 = xmm1
9850# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
9851# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
9852movdqa %xmm1,%xmm9
9853
9854# qhasm: xmm9 = xmm7
9855# asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11
9856# asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10
9857movdqa %xmm7,%xmm10
9858
9859# qhasm: xmm13 = xmm6
9860# asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12
9861# asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11
9862movdqa %xmm6,%xmm11
9863
9864# qhasm: xmm12 = xmm3
9865# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#13
9866# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm12
9867movdqa %xmm3,%xmm12
9868
9869# qhasm: xmm11 ^= xmm2
9870# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#9
9871# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm8
9872pxor %xmm2,%xmm8
9873
9874# qhasm: xmm10 ^= xmm6
9875# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#10
9876# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm9
9877pxor %xmm6,%xmm9
9878
9879# qhasm: xmm9 ^= xmm4
9880# asm 1: pxor <xmm4=int6464#5,<xmm9=int6464#11
9881# asm 2: pxor <xmm4=%xmm4,<xmm9=%xmm10
9882pxor %xmm4,%xmm10
9883
9884# qhasm: xmm13 ^= xmm2
9885# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#12
9886# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm11
9887pxor %xmm2,%xmm11
9888
9889# qhasm: xmm12 ^= xmm0
9890# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
9891# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
9892pxor %xmm0,%xmm12
9893
9894# qhasm: xmm14 = xmm11
9895# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
9896# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
9897movdqa %xmm8,%xmm13
9898
9899# qhasm: xmm8 = xmm10
9900# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
9901# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
9902movdqa %xmm9,%xmm14
9903
9904# qhasm: xmm15 = xmm11
9905# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
9906# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
9907movdqa %xmm8,%xmm15
9908
9909# qhasm: xmm10 |= xmm9
9910# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
9911# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
9912por %xmm10,%xmm9
9913
9914# qhasm: xmm11 |= xmm12
9915# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
9916# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
9917por %xmm12,%xmm8
9918
9919# qhasm: xmm15 ^= xmm8
9920# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
9921# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
9922pxor %xmm14,%xmm15
9923
9924# qhasm: xmm14 &= xmm12
9925# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
9926# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
9927pand %xmm12,%xmm13
9928
9929# qhasm: xmm8 &= xmm9
9930# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
9931# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
9932pand %xmm10,%xmm14
9933
9934# qhasm: xmm12 ^= xmm9
9935# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
9936# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
9937pxor %xmm10,%xmm12
9938
9939# qhasm: xmm15 &= xmm12
9940# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
9941# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
9942pand %xmm12,%xmm15
9943
9944# qhasm: xmm12 = xmm4
9945# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11
9946# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10
9947movdqa %xmm4,%xmm10
9948
9949# qhasm: xmm12 ^= xmm0
9950# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
9951# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
9952pxor %xmm0,%xmm10
9953
9954# qhasm: xmm13 &= xmm12
9955# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
9956# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
9957pand %xmm10,%xmm11
9958
9959# qhasm: xmm11 ^= xmm13
9960# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
9961# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
9962pxor %xmm11,%xmm8
9963
9964# qhasm: xmm10 ^= xmm13
9965# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
9966# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
9967pxor %xmm11,%xmm9
9968
9969# qhasm: xmm13 = xmm5
9970# asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11
9971# asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10
9972movdqa %xmm5,%xmm10
9973
9974# qhasm: xmm13 ^= xmm1
9975# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
9976# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
9977pxor %xmm1,%xmm10
9978
9979# qhasm: xmm12 = xmm7
9980# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12
9981# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11
9982movdqa %xmm7,%xmm11
9983
9984# qhasm: xmm9 = xmm13
9985# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
9986# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
9987movdqa %xmm10,%xmm12
9988
9989# qhasm: xmm12 ^= xmm3
9990# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#12
9991# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm11
9992pxor %xmm3,%xmm11
9993
9994# qhasm: xmm9 |= xmm12
9995# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
9996# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
9997por %xmm11,%xmm12
9998
9999# qhasm: xmm13 &= xmm12
10000# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
10001# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
10002pand %xmm11,%xmm10
10003
10004# qhasm: xmm8 ^= xmm13
10005# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
10006# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
10007pxor %xmm10,%xmm14
10008
10009# qhasm: xmm11 ^= xmm15
10010# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
10011# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
10012pxor %xmm15,%xmm8
10013
10014# qhasm: xmm10 ^= xmm14
10015# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
10016# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
10017pxor %xmm13,%xmm9
10018
10019# qhasm: xmm9 ^= xmm15
10020# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
10021# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
10022pxor %xmm15,%xmm12
10023
10024# qhasm: xmm8 ^= xmm14
10025# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
10026# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
10027pxor %xmm13,%xmm14
10028
10029# qhasm: xmm9 ^= xmm14
10030# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
10031# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
10032pxor %xmm13,%xmm12
10033
10034# qhasm: xmm12 = xmm6
10035# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11
10036# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10
10037movdqa %xmm6,%xmm10
10038
10039# qhasm: xmm13 = xmm2
10040# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
10041# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
10042movdqa %xmm2,%xmm11
10043
10044# qhasm: xmm14 = xmm1
10045# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
10046# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
10047movdqa %xmm1,%xmm13
10048
10049# qhasm: xmm15 = xmm5
10050# asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16
10051# asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15
10052movdqa %xmm5,%xmm15
10053
10054# qhasm: xmm12 &= xmm4
10055# asm 1: pand <xmm4=int6464#5,<xmm12=int6464#11
10056# asm 2: pand <xmm4=%xmm4,<xmm12=%xmm10
10057pand %xmm4,%xmm10
10058
10059# qhasm: xmm13 &= xmm0
10060# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
10061# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
10062pand %xmm0,%xmm11
10063
10064# qhasm: xmm14 &= xmm7
10065# asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14
10066# asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13
10067pand %xmm7,%xmm13
10068
10069# qhasm: xmm15 |= xmm3
10070# asm 1: por <xmm3=int6464#4,<xmm15=int6464#16
10071# asm 2: por <xmm3=%xmm3,<xmm15=%xmm15
10072por %xmm3,%xmm15
10073
10074# qhasm: xmm11 ^= xmm12
10075# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
10076# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
10077pxor %xmm10,%xmm8
10078
10079# qhasm: xmm10 ^= xmm13
10080# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
10081# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
10082pxor %xmm11,%xmm9
10083
10084# qhasm: xmm9 ^= xmm14
10085# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
10086# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
10087pxor %xmm13,%xmm12
10088
10089# qhasm: xmm8 ^= xmm15
10090# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
10091# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
10092pxor %xmm15,%xmm14
10093
10094# qhasm: xmm12 = xmm11
10095# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
10096# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
10097movdqa %xmm8,%xmm10
10098
10099# qhasm: xmm12 ^= xmm10
10100# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
10101# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
10102pxor %xmm9,%xmm10
10103
10104# qhasm: xmm11 &= xmm9
10105# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
10106# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
10107pand %xmm12,%xmm8
10108
10109# qhasm: xmm14 = xmm8
10110# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
10111# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
10112movdqa %xmm14,%xmm11
10113
10114# qhasm: xmm14 ^= xmm11
10115# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
10116# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
10117pxor %xmm8,%xmm11
10118
10119# qhasm: xmm15 = xmm12
10120# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
10121# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
10122movdqa %xmm10,%xmm13
10123
10124# qhasm: xmm15 &= xmm14
10125# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
10126# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
10127pand %xmm11,%xmm13
10128
10129# qhasm: xmm15 ^= xmm10
10130# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
10131# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
10132pxor %xmm9,%xmm13
10133
10134# qhasm: xmm13 = xmm9
10135# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
10136# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
10137movdqa %xmm12,%xmm15
10138
10139# qhasm: xmm13 ^= xmm8
10140# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
10141# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
10142pxor %xmm14,%xmm15
10143
10144# qhasm: xmm11 ^= xmm10
10145# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
10146# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
10147pxor %xmm9,%xmm8
10148
10149# qhasm: xmm13 &= xmm11
10150# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
10151# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
10152pand %xmm8,%xmm15
10153
10154# qhasm: xmm13 ^= xmm8
10155# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
10156# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
10157pxor %xmm14,%xmm15
10158
10159# qhasm: xmm9 ^= xmm13
10160# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
10161# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
10162pxor %xmm15,%xmm12
10163
10164# qhasm: xmm10 = xmm14
10165# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
10166# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
10167movdqa %xmm11,%xmm8
10168
10169# qhasm: xmm10 ^= xmm13
10170# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
10171# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
10172pxor %xmm15,%xmm8
10173
10174# qhasm: xmm10 &= xmm8
10175# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
10176# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
10177pand %xmm14,%xmm8
10178
10179# qhasm: xmm9 ^= xmm10
10180# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
10181# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
10182pxor %xmm8,%xmm12
10183
10184# qhasm: xmm14 ^= xmm10
10185# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
10186# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
10187pxor %xmm8,%xmm11
10188
10189# qhasm: xmm14 &= xmm15
10190# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
10191# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
10192pand %xmm13,%xmm11
10193
10194# qhasm: xmm14 ^= xmm12
10195# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
10196# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
10197pxor %xmm10,%xmm11
10198
10199# qhasm: xmm12 = xmm3
10200# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#9
10201# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm8
10202movdqa %xmm3,%xmm8
10203
10204# qhasm: xmm8 = xmm7
10205# asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10
10206# asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9
10207movdqa %xmm7,%xmm9
10208
10209# qhasm: xmm10 = xmm15
10210# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
10211# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
10212movdqa %xmm13,%xmm10
10213
10214# qhasm: xmm10 ^= xmm14
10215# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
10216# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
10217pxor %xmm11,%xmm10
10218
10219# qhasm: xmm10 &= xmm3
10220# asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11
10221# asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10
10222pand %xmm3,%xmm10
10223
10224# qhasm: xmm3 ^= xmm7
10225# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
10226# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
10227pxor %xmm7,%xmm3
10228
10229# qhasm: xmm3 &= xmm14
10230# asm 1: pand <xmm14=int6464#12,<xmm3=int6464#4
10231# asm 2: pand <xmm14=%xmm11,<xmm3=%xmm3
10232pand %xmm11,%xmm3
10233
10234# qhasm: xmm7 &= xmm15
10235# asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8
10236# asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7
10237pand %xmm13,%xmm7
10238
10239# qhasm: xmm3 ^= xmm7
10240# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
10241# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
10242pxor %xmm7,%xmm3
10243
10244# qhasm: xmm7 ^= xmm10
10245# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
10246# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
10247pxor %xmm10,%xmm7
10248
10249# qhasm: xmm12 ^= xmm0
10250# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
10251# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
10252pxor %xmm0,%xmm8
10253
10254# qhasm: xmm8 ^= xmm4
10255# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10
10256# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9
10257pxor %xmm4,%xmm9
10258
10259# qhasm: xmm15 ^= xmm13
10260# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
10261# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
10262pxor %xmm15,%xmm13
10263
10264# qhasm: xmm14 ^= xmm9
10265# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
10266# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
10267pxor %xmm12,%xmm11
10268
10269# qhasm: xmm11 = xmm15
10270# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10271# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10272movdqa %xmm13,%xmm10
10273
10274# qhasm: xmm11 ^= xmm14
10275# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10276# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10277pxor %xmm11,%xmm10
10278
10279# qhasm: xmm11 &= xmm12
10280# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
10281# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
10282pand %xmm8,%xmm10
10283
10284# qhasm: xmm12 ^= xmm8
10285# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
10286# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
10287pxor %xmm9,%xmm8
10288
10289# qhasm: xmm12 &= xmm14
10290# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
10291# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
10292pand %xmm11,%xmm8
10293
10294# qhasm: xmm8 &= xmm15
10295# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
10296# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
10297pand %xmm13,%xmm9
10298
10299# qhasm: xmm8 ^= xmm12
10300# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
10301# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
10302pxor %xmm8,%xmm9
10303
10304# qhasm: xmm12 ^= xmm11
10305# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
10306# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
10307pxor %xmm10,%xmm8
10308
10309# qhasm: xmm10 = xmm13
10310# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10311# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10312movdqa %xmm15,%xmm10
10313
10314# qhasm: xmm10 ^= xmm9
10315# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
10316# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
10317pxor %xmm12,%xmm10
10318
10319# qhasm: xmm10 &= xmm0
10320# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
10321# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
10322pand %xmm0,%xmm10
10323
10324# qhasm: xmm0 ^= xmm4
10325# asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1
10326# asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0
10327pxor %xmm4,%xmm0
10328
10329# qhasm: xmm0 &= xmm9
10330# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
10331# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
10332pand %xmm12,%xmm0
10333
10334# qhasm: xmm4 &= xmm13
10335# asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5
10336# asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4
10337pand %xmm15,%xmm4
10338
10339# qhasm: xmm0 ^= xmm4
10340# asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1
10341# asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0
10342pxor %xmm4,%xmm0
10343
10344# qhasm: xmm4 ^= xmm10
10345# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
10346# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
10347pxor %xmm10,%xmm4
10348
10349# qhasm: xmm3 ^= xmm12
10350# asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4
10351# asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3
10352pxor %xmm8,%xmm3
10353
10354# qhasm: xmm0 ^= xmm12
10355# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
10356# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
10357pxor %xmm8,%xmm0
10358
10359# qhasm: xmm7 ^= xmm8
10360# asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8
10361# asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7
10362pxor %xmm9,%xmm7
10363
10364# qhasm: xmm4 ^= xmm8
10365# asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5
10366# asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4
10367pxor %xmm9,%xmm4
10368
10369# qhasm: xmm12 = xmm5
10370# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9
10371# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8
10372movdqa %xmm5,%xmm8
10373
10374# qhasm: xmm8 = xmm1
10375# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
10376# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
10377movdqa %xmm1,%xmm9
10378
10379# qhasm: xmm12 ^= xmm2
10380# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#9
10381# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm8
10382pxor %xmm2,%xmm8
10383
10384# qhasm: xmm8 ^= xmm6
10385# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10
10386# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9
10387pxor %xmm6,%xmm9
10388
10389# qhasm: xmm11 = xmm15
10390# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10391# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10392movdqa %xmm13,%xmm10
10393
10394# qhasm: xmm11 ^= xmm14
10395# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10396# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10397pxor %xmm11,%xmm10
10398
10399# qhasm: xmm11 &= xmm12
10400# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
10401# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
10402pand %xmm8,%xmm10
10403
10404# qhasm: xmm12 ^= xmm8
10405# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
10406# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
10407pxor %xmm9,%xmm8
10408
10409# qhasm: xmm12 &= xmm14
10410# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
10411# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
10412pand %xmm11,%xmm8
10413
10414# qhasm: xmm8 &= xmm15
10415# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
10416# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
10417pand %xmm13,%xmm9
10418
10419# qhasm: xmm8 ^= xmm12
10420# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
10421# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
10422pxor %xmm8,%xmm9
10423
10424# qhasm: xmm12 ^= xmm11
10425# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
10426# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
10427pxor %xmm10,%xmm8
10428
10429# qhasm: xmm10 = xmm13
10430# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10431# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10432movdqa %xmm15,%xmm10
10433
10434# qhasm: xmm10 ^= xmm9
10435# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
10436# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
10437pxor %xmm12,%xmm10
10438
10439# qhasm: xmm10 &= xmm2
10440# asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11
10441# asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10
10442pand %xmm2,%xmm10
10443
10444# qhasm: xmm2 ^= xmm6
10445# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
10446# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
10447pxor %xmm6,%xmm2
10448
10449# qhasm: xmm2 &= xmm9
10450# asm 1: pand <xmm9=int6464#13,<xmm2=int6464#3
10451# asm 2: pand <xmm9=%xmm12,<xmm2=%xmm2
10452pand %xmm12,%xmm2
10453
10454# qhasm: xmm6 &= xmm13
10455# asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7
10456# asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6
10457pand %xmm15,%xmm6
10458
10459# qhasm: xmm2 ^= xmm6
10460# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
10461# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
10462pxor %xmm6,%xmm2
10463
10464# qhasm: xmm6 ^= xmm10
10465# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
10466# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
10467pxor %xmm10,%xmm6
10468
10469# qhasm: xmm15 ^= xmm13
10470# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
10471# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
10472pxor %xmm15,%xmm13
10473
10474# qhasm: xmm14 ^= xmm9
10475# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
10476# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
10477pxor %xmm12,%xmm11
10478
10479# qhasm: xmm11 = xmm15
10480# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10481# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10482movdqa %xmm13,%xmm10
10483
10484# qhasm: xmm11 ^= xmm14
10485# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10486# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10487pxor %xmm11,%xmm10
10488
10489# qhasm: xmm11 &= xmm5
10490# asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11
10491# asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10
10492pand %xmm5,%xmm10
10493
10494# qhasm: xmm5 ^= xmm1
10495# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
10496# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
10497pxor %xmm1,%xmm5
10498
10499# qhasm: xmm5 &= xmm14
10500# asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6
10501# asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5
10502pand %xmm11,%xmm5
10503
10504# qhasm: xmm1 &= xmm15
10505# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
10506# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
10507pand %xmm13,%xmm1
10508
10509# qhasm: xmm5 ^= xmm1
10510# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
10511# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
10512pxor %xmm1,%xmm5
10513
10514# qhasm: xmm1 ^= xmm11
10515# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
10516# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
10517pxor %xmm10,%xmm1
10518
10519# qhasm: xmm5 ^= xmm12
10520# asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6
10521# asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5
10522pxor %xmm8,%xmm5
10523
10524# qhasm: xmm2 ^= xmm12
10525# asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3
10526# asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2
10527pxor %xmm8,%xmm2
10528
10529# qhasm: xmm1 ^= xmm8
10530# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
10531# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
10532pxor %xmm9,%xmm1
10533
10534# qhasm: xmm6 ^= xmm8
10535# asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7
10536# asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6
10537pxor %xmm9,%xmm6
10538
10539# qhasm: xmm5 ^= xmm0
10540# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
10541# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
10542pxor %xmm0,%xmm5
10543
10544# qhasm: xmm1 ^= xmm3
10545# asm 1: pxor <xmm3=int6464#4,<xmm1=int6464#2
10546# asm 2: pxor <xmm3=%xmm3,<xmm1=%xmm1
10547pxor %xmm3,%xmm1
10548
10549# qhasm: xmm2 ^= xmm5
10550# asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3
10551# asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2
10552pxor %xmm5,%xmm2
10553
10554# qhasm: xmm3 ^= xmm0
10555# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
10556# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
10557pxor %xmm0,%xmm3
10558
10559# qhasm: xmm0 ^= xmm1
10560# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
10561# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
10562pxor %xmm1,%xmm0
10563
10564# qhasm: xmm1 ^= xmm7
10565# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
10566# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
10567pxor %xmm7,%xmm1
10568
10569# qhasm: xmm7 ^= xmm6
10570# asm 1: pxor <xmm6=int6464#7,<xmm7=int6464#8
10571# asm 2: pxor <xmm6=%xmm6,<xmm7=%xmm7
10572pxor %xmm6,%xmm7
10573
10574# qhasm: xmm2 ^= xmm7
10575# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
10576# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
10577pxor %xmm7,%xmm2
10578
10579# qhasm: xmm6 ^= xmm4
10580# asm 1: pxor <xmm4=int6464#5,<xmm6=int6464#7
10581# asm 2: pxor <xmm4=%xmm4,<xmm6=%xmm6
10582pxor %xmm4,%xmm6
10583
10584# qhasm: xmm4 ^= xmm7
10585# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
10586# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
10587pxor %xmm7,%xmm4
10588
10589# qhasm: xmm3 ^= xmm4
10590# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
10591# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
10592pxor %xmm4,%xmm3
10593
10594# qhasm: xmm7 ^= RCON
10595# asm 1: pxor RCON,<xmm7=int6464#8
10596# asm 2: pxor RCON,<xmm7=%xmm7
10597pxor RCON,%xmm7
10598
10599# qhasm: shuffle bytes of xmm0 by EXPB0
10600# asm 1: pshufb EXPB0,<xmm0=int6464#1
10601# asm 2: pshufb EXPB0,<xmm0=%xmm0
10602pshufb EXPB0,%xmm0
10603
10604# qhasm: shuffle bytes of xmm1 by EXPB0
10605# asm 1: pshufb EXPB0,<xmm1=int6464#2
10606# asm 2: pshufb EXPB0,<xmm1=%xmm1
10607pshufb EXPB0,%xmm1
10608
10609# qhasm: shuffle bytes of xmm2 by EXPB0
10610# asm 1: pshufb EXPB0,<xmm2=int6464#3
10611# asm 2: pshufb EXPB0,<xmm2=%xmm2
10612pshufb EXPB0,%xmm2
10613
10614# qhasm: shuffle bytes of xmm3 by EXPB0
10615# asm 1: pshufb EXPB0,<xmm3=int6464#4
10616# asm 2: pshufb EXPB0,<xmm3=%xmm3
10617pshufb EXPB0,%xmm3
10618
10619# qhasm: shuffle bytes of xmm4 by EXPB0
10620# asm 1: pshufb EXPB0,<xmm4=int6464#5
10621# asm 2: pshufb EXPB0,<xmm4=%xmm4
10622pshufb EXPB0,%xmm4
10623
10624# qhasm: shuffle bytes of xmm5 by EXPB0
10625# asm 1: pshufb EXPB0,<xmm5=int6464#6
10626# asm 2: pshufb EXPB0,<xmm5=%xmm5
10627pshufb EXPB0,%xmm5
10628
10629# qhasm: shuffle bytes of xmm6 by EXPB0
10630# asm 1: pshufb EXPB0,<xmm6=int6464#7
10631# asm 2: pshufb EXPB0,<xmm6=%xmm6
10632pshufb EXPB0,%xmm6
10633
10634# qhasm: shuffle bytes of xmm7 by EXPB0
10635# asm 1: pshufb EXPB0,<xmm7=int6464#8
10636# asm 2: pshufb EXPB0,<xmm7=%xmm7
10637pshufb EXPB0,%xmm7
10638
10639# qhasm: xmm8 = *(int128 *)(c + 896)
10640# asm 1: movdqa 896(<c=int64#1),>xmm8=int6464#9
10641# asm 2: movdqa 896(<c=%rdi),>xmm8=%xmm8
10642movdqa 896(%rdi),%xmm8
10643
10644# qhasm: xmm9 = *(int128 *)(c + 912)
10645# asm 1: movdqa 912(<c=int64#1),>xmm9=int6464#10
10646# asm 2: movdqa 912(<c=%rdi),>xmm9=%xmm9
10647movdqa 912(%rdi),%xmm9
10648
10649# qhasm: xmm10 = *(int128 *)(c + 928)
10650# asm 1: movdqa 928(<c=int64#1),>xmm10=int6464#11
10651# asm 2: movdqa 928(<c=%rdi),>xmm10=%xmm10
10652movdqa 928(%rdi),%xmm10
10653
10654# qhasm: xmm11 = *(int128 *)(c + 944)
10655# asm 1: movdqa 944(<c=int64#1),>xmm11=int6464#12
10656# asm 2: movdqa 944(<c=%rdi),>xmm11=%xmm11
10657movdqa 944(%rdi),%xmm11
10658
10659# qhasm: xmm12 = *(int128 *)(c + 960)
10660# asm 1: movdqa 960(<c=int64#1),>xmm12=int6464#13
10661# asm 2: movdqa 960(<c=%rdi),>xmm12=%xmm12
10662movdqa 960(%rdi),%xmm12
10663
10664# qhasm: xmm13 = *(int128 *)(c + 976)
10665# asm 1: movdqa 976(<c=int64#1),>xmm13=int6464#14
10666# asm 2: movdqa 976(<c=%rdi),>xmm13=%xmm13
10667movdqa 976(%rdi),%xmm13
10668
10669# qhasm: xmm14 = *(int128 *)(c + 992)
10670# asm 1: movdqa 992(<c=int64#1),>xmm14=int6464#15
10671# asm 2: movdqa 992(<c=%rdi),>xmm14=%xmm14
10672movdqa 992(%rdi),%xmm14
10673
10674# qhasm: xmm15 = *(int128 *)(c + 1008)
10675# asm 1: movdqa 1008(<c=int64#1),>xmm15=int6464#16
10676# asm 2: movdqa 1008(<c=%rdi),>xmm15=%xmm15
10677movdqa 1008(%rdi),%xmm15
10678
10679# qhasm: xmm8 ^= ONE
10680# asm 1: pxor ONE,<xmm8=int6464#9
10681# asm 2: pxor ONE,<xmm8=%xmm8
10682pxor ONE,%xmm8
10683
10684# qhasm: xmm9 ^= ONE
10685# asm 1: pxor ONE,<xmm9=int6464#10
10686# asm 2: pxor ONE,<xmm9=%xmm9
10687pxor ONE,%xmm9
10688
10689# qhasm: xmm13 ^= ONE
10690# asm 1: pxor ONE,<xmm13=int6464#14
10691# asm 2: pxor ONE,<xmm13=%xmm13
10692pxor ONE,%xmm13
10693
10694# qhasm: xmm14 ^= ONE
10695# asm 1: pxor ONE,<xmm14=int6464#15
10696# asm 2: pxor ONE,<xmm14=%xmm14
10697pxor ONE,%xmm14
10698
10699# qhasm: xmm0 ^= xmm8
10700# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
10701# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
10702pxor %xmm8,%xmm0
10703
10704# qhasm: xmm1 ^= xmm9
10705# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
10706# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
10707pxor %xmm9,%xmm1
10708
10709# qhasm: xmm2 ^= xmm10
10710# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
10711# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
10712pxor %xmm10,%xmm2
10713
10714# qhasm: xmm3 ^= xmm11
10715# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
10716# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
10717pxor %xmm11,%xmm3
10718
10719# qhasm: xmm4 ^= xmm12
10720# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
10721# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
10722pxor %xmm12,%xmm4
10723
10724# qhasm: xmm5 ^= xmm13
10725# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
10726# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
10727pxor %xmm13,%xmm5
10728
10729# qhasm: xmm6 ^= xmm14
10730# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
10731# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
10732pxor %xmm14,%xmm6
10733
10734# qhasm: xmm7 ^= xmm15
10735# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
10736# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
10737pxor %xmm15,%xmm7
10738
10739# qhasm: uint32323232 xmm8 >>= 8
10740# asm 1: psrld $8,<xmm8=int6464#9
10741# asm 2: psrld $8,<xmm8=%xmm8
10742psrld $8,%xmm8
10743
10744# qhasm: uint32323232 xmm9 >>= 8
10745# asm 1: psrld $8,<xmm9=int6464#10
10746# asm 2: psrld $8,<xmm9=%xmm9
10747psrld $8,%xmm9
10748
10749# qhasm: uint32323232 xmm10 >>= 8
10750# asm 1: psrld $8,<xmm10=int6464#11
10751# asm 2: psrld $8,<xmm10=%xmm10
10752psrld $8,%xmm10
10753
10754# qhasm: uint32323232 xmm11 >>= 8
10755# asm 1: psrld $8,<xmm11=int6464#12
10756# asm 2: psrld $8,<xmm11=%xmm11
10757psrld $8,%xmm11
10758
10759# qhasm: uint32323232 xmm12 >>= 8
10760# asm 1: psrld $8,<xmm12=int6464#13
10761# asm 2: psrld $8,<xmm12=%xmm12
10762psrld $8,%xmm12
10763
10764# qhasm: uint32323232 xmm13 >>= 8
10765# asm 1: psrld $8,<xmm13=int6464#14
10766# asm 2: psrld $8,<xmm13=%xmm13
10767psrld $8,%xmm13
10768
10769# qhasm: uint32323232 xmm14 >>= 8
10770# asm 1: psrld $8,<xmm14=int6464#15
10771# asm 2: psrld $8,<xmm14=%xmm14
10772psrld $8,%xmm14
10773
10774# qhasm: uint32323232 xmm15 >>= 8
10775# asm 1: psrld $8,<xmm15=int6464#16
10776# asm 2: psrld $8,<xmm15=%xmm15
10777psrld $8,%xmm15
10778
10779# qhasm: xmm0 ^= xmm8
10780# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
10781# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
10782pxor %xmm8,%xmm0
10783
10784# qhasm: xmm1 ^= xmm9
10785# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
10786# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
10787pxor %xmm9,%xmm1
10788
10789# qhasm: xmm2 ^= xmm10
10790# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
10791# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
10792pxor %xmm10,%xmm2
10793
10794# qhasm: xmm3 ^= xmm11
10795# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
10796# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
10797pxor %xmm11,%xmm3
10798
10799# qhasm: xmm4 ^= xmm12
10800# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
10801# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
10802pxor %xmm12,%xmm4
10803
10804# qhasm: xmm5 ^= xmm13
10805# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
10806# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
10807pxor %xmm13,%xmm5
10808
10809# qhasm: xmm6 ^= xmm14
10810# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
10811# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
10812pxor %xmm14,%xmm6
10813
10814# qhasm: xmm7 ^= xmm15
10815# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
10816# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
10817pxor %xmm15,%xmm7
10818
10819# qhasm: uint32323232 xmm8 >>= 8
10820# asm 1: psrld $8,<xmm8=int6464#9
10821# asm 2: psrld $8,<xmm8=%xmm8
10822psrld $8,%xmm8
10823
10824# qhasm: uint32323232 xmm9 >>= 8
10825# asm 1: psrld $8,<xmm9=int6464#10
10826# asm 2: psrld $8,<xmm9=%xmm9
10827psrld $8,%xmm9
10828
10829# qhasm: uint32323232 xmm10 >>= 8
10830# asm 1: psrld $8,<xmm10=int6464#11
10831# asm 2: psrld $8,<xmm10=%xmm10
10832psrld $8,%xmm10
10833
10834# qhasm: uint32323232 xmm11 >>= 8
10835# asm 1: psrld $8,<xmm11=int6464#12
10836# asm 2: psrld $8,<xmm11=%xmm11
10837psrld $8,%xmm11
10838
10839# qhasm: uint32323232 xmm12 >>= 8
10840# asm 1: psrld $8,<xmm12=int6464#13
10841# asm 2: psrld $8,<xmm12=%xmm12
10842psrld $8,%xmm12
10843
10844# qhasm: uint32323232 xmm13 >>= 8
10845# asm 1: psrld $8,<xmm13=int6464#14
10846# asm 2: psrld $8,<xmm13=%xmm13
10847psrld $8,%xmm13
10848
10849# qhasm: uint32323232 xmm14 >>= 8
10850# asm 1: psrld $8,<xmm14=int6464#15
10851# asm 2: psrld $8,<xmm14=%xmm14
10852psrld $8,%xmm14
10853
10854# qhasm: uint32323232 xmm15 >>= 8
10855# asm 1: psrld $8,<xmm15=int6464#16
10856# asm 2: psrld $8,<xmm15=%xmm15
10857psrld $8,%xmm15
10858
10859# qhasm: xmm0 ^= xmm8
10860# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
10861# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
10862pxor %xmm8,%xmm0
10863
10864# qhasm: xmm1 ^= xmm9
10865# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
10866# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
10867pxor %xmm9,%xmm1
10868
10869# qhasm: xmm2 ^= xmm10
10870# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
10871# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
10872pxor %xmm10,%xmm2
10873
10874# qhasm: xmm3 ^= xmm11
10875# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
10876# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
10877pxor %xmm11,%xmm3
10878
10879# qhasm: xmm4 ^= xmm12
10880# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
10881# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
10882pxor %xmm12,%xmm4
10883
10884# qhasm: xmm5 ^= xmm13
10885# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
10886# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
10887pxor %xmm13,%xmm5
10888
10889# qhasm: xmm6 ^= xmm14
10890# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
10891# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
10892pxor %xmm14,%xmm6
10893
10894# qhasm: xmm7 ^= xmm15
10895# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
10896# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
10897pxor %xmm15,%xmm7
10898
10899# qhasm: uint32323232 xmm8 >>= 8
10900# asm 1: psrld $8,<xmm8=int6464#9
10901# asm 2: psrld $8,<xmm8=%xmm8
10902psrld $8,%xmm8
10903
10904# qhasm: uint32323232 xmm9 >>= 8
10905# asm 1: psrld $8,<xmm9=int6464#10
10906# asm 2: psrld $8,<xmm9=%xmm9
10907psrld $8,%xmm9
10908
10909# qhasm: uint32323232 xmm10 >>= 8
10910# asm 1: psrld $8,<xmm10=int6464#11
10911# asm 2: psrld $8,<xmm10=%xmm10
10912psrld $8,%xmm10
10913
10914# qhasm: uint32323232 xmm11 >>= 8
10915# asm 1: psrld $8,<xmm11=int6464#12
10916# asm 2: psrld $8,<xmm11=%xmm11
10917psrld $8,%xmm11
10918
10919# qhasm: uint32323232 xmm12 >>= 8
10920# asm 1: psrld $8,<xmm12=int6464#13
10921# asm 2: psrld $8,<xmm12=%xmm12
10922psrld $8,%xmm12
10923
10924# qhasm: uint32323232 xmm13 >>= 8
10925# asm 1: psrld $8,<xmm13=int6464#14
10926# asm 2: psrld $8,<xmm13=%xmm13
10927psrld $8,%xmm13
10928
10929# qhasm: uint32323232 xmm14 >>= 8
10930# asm 1: psrld $8,<xmm14=int6464#15
10931# asm 2: psrld $8,<xmm14=%xmm14
10932psrld $8,%xmm14
10933
10934# qhasm: uint32323232 xmm15 >>= 8
10935# asm 1: psrld $8,<xmm15=int6464#16
10936# asm 2: psrld $8,<xmm15=%xmm15
10937psrld $8,%xmm15
10938
10939# qhasm: xmm0 ^= xmm8
10940# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
10941# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
10942pxor %xmm8,%xmm0
10943
10944# qhasm: xmm1 ^= xmm9
10945# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
10946# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
10947pxor %xmm9,%xmm1
10948
10949# qhasm: xmm2 ^= xmm10
10950# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
10951# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
10952pxor %xmm10,%xmm2
10953
10954# qhasm: xmm3 ^= xmm11
10955# asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4
10956# asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3
10957pxor %xmm11,%xmm3
10958
10959# qhasm: xmm4 ^= xmm12
10960# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5
10961# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4
10962pxor %xmm12,%xmm4
10963
10964# qhasm: xmm5 ^= xmm13
10965# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
10966# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
10967pxor %xmm13,%xmm5
10968
10969# qhasm: xmm6 ^= xmm14
10970# asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7
10971# asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6
10972pxor %xmm14,%xmm6
10973
10974# qhasm: xmm7 ^= xmm15
10975# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
10976# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
10977pxor %xmm15,%xmm7
10978
10979# qhasm: *(int128 *)(c + 1024) = xmm0
10980# asm 1: movdqa <xmm0=int6464#1,1024(<c=int64#1)
10981# asm 2: movdqa <xmm0=%xmm0,1024(<c=%rdi)
10982movdqa %xmm0,1024(%rdi)
10983
10984# qhasm: *(int128 *)(c + 1040) = xmm1
10985# asm 1: movdqa <xmm1=int6464#2,1040(<c=int64#1)
10986# asm 2: movdqa <xmm1=%xmm1,1040(<c=%rdi)
10987movdqa %xmm1,1040(%rdi)
10988
10989# qhasm: *(int128 *)(c + 1056) = xmm2
10990# asm 1: movdqa <xmm2=int6464#3,1056(<c=int64#1)
10991# asm 2: movdqa <xmm2=%xmm2,1056(<c=%rdi)
10992movdqa %xmm2,1056(%rdi)
10993
10994# qhasm: *(int128 *)(c + 1072) = xmm3
10995# asm 1: movdqa <xmm3=int6464#4,1072(<c=int64#1)
10996# asm 2: movdqa <xmm3=%xmm3,1072(<c=%rdi)
10997movdqa %xmm3,1072(%rdi)
10998
10999# qhasm: *(int128 *)(c + 1088) = xmm4
11000# asm 1: movdqa <xmm4=int6464#5,1088(<c=int64#1)
11001# asm 2: movdqa <xmm4=%xmm4,1088(<c=%rdi)
11002movdqa %xmm4,1088(%rdi)
11003
11004# qhasm: *(int128 *)(c + 1104) = xmm5
11005# asm 1: movdqa <xmm5=int6464#6,1104(<c=int64#1)
11006# asm 2: movdqa <xmm5=%xmm5,1104(<c=%rdi)
11007movdqa %xmm5,1104(%rdi)
11008
11009# qhasm: *(int128 *)(c + 1120) = xmm6
11010# asm 1: movdqa <xmm6=int6464#7,1120(<c=int64#1)
11011# asm 2: movdqa <xmm6=%xmm6,1120(<c=%rdi)
11012movdqa %xmm6,1120(%rdi)
11013
11014# qhasm: *(int128 *)(c + 1136) = xmm7
11015# asm 1: movdqa <xmm7=int6464#8,1136(<c=int64#1)
11016# asm 2: movdqa <xmm7=%xmm7,1136(<c=%rdi)
11017movdqa %xmm7,1136(%rdi)
11018
11019# qhasm: xmm0 ^= ONE
11020# asm 1: pxor ONE,<xmm0=int6464#1
11021# asm 2: pxor ONE,<xmm0=%xmm0
11022pxor ONE,%xmm0
11023
11024# qhasm: xmm1 ^= ONE
11025# asm 1: pxor ONE,<xmm1=int6464#2
11026# asm 2: pxor ONE,<xmm1=%xmm1
11027pxor ONE,%xmm1
11028
11029# qhasm: xmm5 ^= ONE
11030# asm 1: pxor ONE,<xmm5=int6464#6
11031# asm 2: pxor ONE,<xmm5=%xmm5
11032pxor ONE,%xmm5
11033
11034# qhasm: xmm6 ^= ONE
11035# asm 1: pxor ONE,<xmm6=int6464#7
11036# asm 2: pxor ONE,<xmm6=%xmm6
11037pxor ONE,%xmm6
11038
11039# qhasm: shuffle bytes of xmm0 by ROTB
11040# asm 1: pshufb ROTB,<xmm0=int6464#1
11041# asm 2: pshufb ROTB,<xmm0=%xmm0
11042pshufb ROTB,%xmm0
11043
11044# qhasm: shuffle bytes of xmm1 by ROTB
11045# asm 1: pshufb ROTB,<xmm1=int6464#2
11046# asm 2: pshufb ROTB,<xmm1=%xmm1
11047pshufb ROTB,%xmm1
11048
11049# qhasm: shuffle bytes of xmm2 by ROTB
11050# asm 1: pshufb ROTB,<xmm2=int6464#3
11051# asm 2: pshufb ROTB,<xmm2=%xmm2
11052pshufb ROTB,%xmm2
11053
11054# qhasm: shuffle bytes of xmm3 by ROTB
11055# asm 1: pshufb ROTB,<xmm3=int6464#4
11056# asm 2: pshufb ROTB,<xmm3=%xmm3
11057pshufb ROTB,%xmm3
11058
11059# qhasm: shuffle bytes of xmm4 by ROTB
11060# asm 1: pshufb ROTB,<xmm4=int6464#5
11061# asm 2: pshufb ROTB,<xmm4=%xmm4
11062pshufb ROTB,%xmm4
11063
11064# qhasm: shuffle bytes of xmm5 by ROTB
11065# asm 1: pshufb ROTB,<xmm5=int6464#6
11066# asm 2: pshufb ROTB,<xmm5=%xmm5
11067pshufb ROTB,%xmm5
11068
11069# qhasm: shuffle bytes of xmm6 by ROTB
11070# asm 1: pshufb ROTB,<xmm6=int6464#7
11071# asm 2: pshufb ROTB,<xmm6=%xmm6
11072pshufb ROTB,%xmm6
11073
11074# qhasm: shuffle bytes of xmm7 by ROTB
11075# asm 1: pshufb ROTB,<xmm7=int6464#8
11076# asm 2: pshufb ROTB,<xmm7=%xmm7
11077pshufb ROTB,%xmm7
11078
11079# qhasm: xmm5 ^= xmm6
11080# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
11081# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
11082pxor %xmm6,%xmm5
11083
11084# qhasm: xmm2 ^= xmm1
11085# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
11086# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
11087pxor %xmm1,%xmm2
11088
11089# qhasm: xmm5 ^= xmm0
11090# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
11091# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
11092pxor %xmm0,%xmm5
11093
11094# qhasm: xmm6 ^= xmm2
11095# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
11096# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
11097pxor %xmm2,%xmm6
11098
11099# qhasm: xmm3 ^= xmm0
11100# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
11101# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
11102pxor %xmm0,%xmm3
11103
11104# qhasm: xmm6 ^= xmm3
11105# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
11106# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
11107pxor %xmm3,%xmm6
11108
11109# qhasm: xmm3 ^= xmm7
11110# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
11111# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
11112pxor %xmm7,%xmm3
11113
11114# qhasm: xmm3 ^= xmm4
11115# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
11116# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
11117pxor %xmm4,%xmm3
11118
11119# qhasm: xmm7 ^= xmm5
11120# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
11121# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
11122pxor %xmm5,%xmm7
11123
11124# qhasm: xmm3 ^= xmm1
11125# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
11126# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
11127pxor %xmm1,%xmm3
11128
11129# qhasm: xmm4 ^= xmm5
11130# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
11131# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
11132pxor %xmm5,%xmm4
11133
11134# qhasm: xmm2 ^= xmm7
11135# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
11136# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
11137pxor %xmm7,%xmm2
11138
11139# qhasm: xmm1 ^= xmm5
11140# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
11141# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
11142pxor %xmm5,%xmm1
11143
11144# qhasm: xmm11 = xmm7
11145# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
11146# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
11147movdqa %xmm7,%xmm8
11148
11149# qhasm: xmm10 = xmm1
11150# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
11151# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
11152movdqa %xmm1,%xmm9
11153
11154# qhasm: xmm9 = xmm5
11155# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
11156# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
11157movdqa %xmm5,%xmm10
11158
11159# qhasm: xmm13 = xmm2
11160# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
11161# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
11162movdqa %xmm2,%xmm11
11163
11164# qhasm: xmm12 = xmm6
11165# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
11166# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
11167movdqa %xmm6,%xmm12
11168
11169# qhasm: xmm11 ^= xmm4
11170# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
11171# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
11172pxor %xmm4,%xmm8
11173
11174# qhasm: xmm10 ^= xmm2
11175# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
11176# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
11177pxor %xmm2,%xmm9
11178
11179# qhasm: xmm9 ^= xmm3
11180# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
11181# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
11182pxor %xmm3,%xmm10
11183
11184# qhasm: xmm13 ^= xmm4
11185# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
11186# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
11187pxor %xmm4,%xmm11
11188
11189# qhasm: xmm12 ^= xmm0
11190# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
11191# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
11192pxor %xmm0,%xmm12
11193
11194# qhasm: xmm14 = xmm11
11195# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
11196# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
11197movdqa %xmm8,%xmm13
11198
11199# qhasm: xmm8 = xmm10
11200# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
11201# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
11202movdqa %xmm9,%xmm14
11203
11204# qhasm: xmm15 = xmm11
11205# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
11206# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
11207movdqa %xmm8,%xmm15
11208
11209# qhasm: xmm10 |= xmm9
11210# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
11211# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
11212por %xmm10,%xmm9
11213
11214# qhasm: xmm11 |= xmm12
11215# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
11216# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
11217por %xmm12,%xmm8
11218
11219# qhasm: xmm15 ^= xmm8
11220# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
11221# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
11222pxor %xmm14,%xmm15
11223
11224# qhasm: xmm14 &= xmm12
11225# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
11226# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
11227pand %xmm12,%xmm13
11228
11229# qhasm: xmm8 &= xmm9
11230# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
11231# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
11232pand %xmm10,%xmm14
11233
11234# qhasm: xmm12 ^= xmm9
11235# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
11236# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
11237pxor %xmm10,%xmm12
11238
11239# qhasm: xmm15 &= xmm12
11240# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
11241# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
11242pand %xmm12,%xmm15
11243
11244# qhasm: xmm12 = xmm3
11245# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
11246# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
11247movdqa %xmm3,%xmm10
11248
11249# qhasm: xmm12 ^= xmm0
11250# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
11251# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
11252pxor %xmm0,%xmm10
11253
11254# qhasm: xmm13 &= xmm12
11255# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
11256# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
11257pand %xmm10,%xmm11
11258
11259# qhasm: xmm11 ^= xmm13
11260# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
11261# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
11262pxor %xmm11,%xmm8
11263
11264# qhasm: xmm10 ^= xmm13
11265# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
11266# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
11267pxor %xmm11,%xmm9
11268
11269# qhasm: xmm13 = xmm7
11270# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
11271# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
11272movdqa %xmm7,%xmm10
11273
11274# qhasm: xmm13 ^= xmm1
11275# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
11276# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
11277pxor %xmm1,%xmm10
11278
11279# qhasm: xmm12 = xmm5
11280# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
11281# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
11282movdqa %xmm5,%xmm11
11283
11284# qhasm: xmm9 = xmm13
11285# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
11286# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
11287movdqa %xmm10,%xmm12
11288
11289# qhasm: xmm12 ^= xmm6
11290# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
11291# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
11292pxor %xmm6,%xmm11
11293
11294# qhasm: xmm9 |= xmm12
11295# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
11296# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
11297por %xmm11,%xmm12
11298
11299# qhasm: xmm13 &= xmm12
11300# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
11301# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
11302pand %xmm11,%xmm10
11303
11304# qhasm: xmm8 ^= xmm13
11305# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
11306# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
11307pxor %xmm10,%xmm14
11308
11309# qhasm: xmm11 ^= xmm15
11310# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
11311# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
11312pxor %xmm15,%xmm8
11313
11314# qhasm: xmm10 ^= xmm14
11315# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
11316# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
11317pxor %xmm13,%xmm9
11318
11319# qhasm: xmm9 ^= xmm15
11320# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
11321# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
11322pxor %xmm15,%xmm12
11323
11324# qhasm: xmm8 ^= xmm14
11325# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
11326# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
11327pxor %xmm13,%xmm14
11328
11329# qhasm: xmm9 ^= xmm14
11330# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
11331# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
11332pxor %xmm13,%xmm12
11333
11334# qhasm: xmm12 = xmm2
11335# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
11336# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
11337movdqa %xmm2,%xmm10
11338
11339# qhasm: xmm13 = xmm4
11340# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
11341# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
11342movdqa %xmm4,%xmm11
11343
11344# qhasm: xmm14 = xmm1
11345# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
11346# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
11347movdqa %xmm1,%xmm13
11348
11349# qhasm: xmm15 = xmm7
11350# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
11351# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
11352movdqa %xmm7,%xmm15
11353
11354# qhasm: xmm12 &= xmm3
11355# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
11356# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
11357pand %xmm3,%xmm10
11358
11359# qhasm: xmm13 &= xmm0
11360# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
11361# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
11362pand %xmm0,%xmm11
11363
11364# qhasm: xmm14 &= xmm5
11365# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
11366# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
11367pand %xmm5,%xmm13
11368
11369# qhasm: xmm15 |= xmm6
11370# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
11371# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
11372por %xmm6,%xmm15
11373
11374# qhasm: xmm11 ^= xmm12
11375# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
11376# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
11377pxor %xmm10,%xmm8
11378
11379# qhasm: xmm10 ^= xmm13
11380# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
11381# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
11382pxor %xmm11,%xmm9
11383
11384# qhasm: xmm9 ^= xmm14
11385# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
11386# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
11387pxor %xmm13,%xmm12
11388
11389# qhasm: xmm8 ^= xmm15
11390# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
11391# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
11392pxor %xmm15,%xmm14
11393
11394# qhasm: xmm12 = xmm11
11395# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
11396# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
11397movdqa %xmm8,%xmm10
11398
11399# qhasm: xmm12 ^= xmm10
11400# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
11401# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
11402pxor %xmm9,%xmm10
11403
11404# qhasm: xmm11 &= xmm9
11405# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
11406# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
11407pand %xmm12,%xmm8
11408
11409# qhasm: xmm14 = xmm8
11410# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
11411# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
11412movdqa %xmm14,%xmm11
11413
11414# qhasm: xmm14 ^= xmm11
11415# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
11416# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
11417pxor %xmm8,%xmm11
11418
11419# qhasm: xmm15 = xmm12
11420# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
11421# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
11422movdqa %xmm10,%xmm13
11423
11424# qhasm: xmm15 &= xmm14
11425# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
11426# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
11427pand %xmm11,%xmm13
11428
11429# qhasm: xmm15 ^= xmm10
11430# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
11431# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
11432pxor %xmm9,%xmm13
11433
11434# qhasm: xmm13 = xmm9
11435# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
11436# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
11437movdqa %xmm12,%xmm15
11438
11439# qhasm: xmm13 ^= xmm8
11440# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
11441# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
11442pxor %xmm14,%xmm15
11443
11444# qhasm: xmm11 ^= xmm10
11445# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
11446# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
11447pxor %xmm9,%xmm8
11448
11449# qhasm: xmm13 &= xmm11
11450# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
11451# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
11452pand %xmm8,%xmm15
11453
11454# qhasm: xmm13 ^= xmm8
11455# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
11456# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
11457pxor %xmm14,%xmm15
11458
11459# qhasm: xmm9 ^= xmm13
11460# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
11461# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
11462pxor %xmm15,%xmm12
11463
11464# qhasm: xmm10 = xmm14
11465# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
11466# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
11467movdqa %xmm11,%xmm8
11468
11469# qhasm: xmm10 ^= xmm13
11470# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
11471# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
11472pxor %xmm15,%xmm8
11473
11474# qhasm: xmm10 &= xmm8
11475# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
11476# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
11477pand %xmm14,%xmm8
11478
11479# qhasm: xmm9 ^= xmm10
11480# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
11481# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
11482pxor %xmm8,%xmm12
11483
11484# qhasm: xmm14 ^= xmm10
11485# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
11486# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
11487pxor %xmm8,%xmm11
11488
11489# qhasm: xmm14 &= xmm15
11490# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
11491# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
11492pand %xmm13,%xmm11
11493
11494# qhasm: xmm14 ^= xmm12
11495# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
11496# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
11497pxor %xmm10,%xmm11
11498
11499# qhasm: xmm12 = xmm6
11500# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
11501# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
11502movdqa %xmm6,%xmm8
11503
11504# qhasm: xmm8 = xmm5
11505# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
11506# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
11507movdqa %xmm5,%xmm9
11508
11509# qhasm: xmm10 = xmm15
11510# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
11511# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
11512movdqa %xmm13,%xmm10
11513
11514# qhasm: xmm10 ^= xmm14
11515# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
11516# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
11517pxor %xmm11,%xmm10
11518
11519# qhasm: xmm10 &= xmm6
11520# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
11521# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
11522pand %xmm6,%xmm10
11523
11524# qhasm: xmm6 ^= xmm5
11525# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
11526# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
11527pxor %xmm5,%xmm6
11528
11529# qhasm: xmm6 &= xmm14
11530# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
11531# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
11532pand %xmm11,%xmm6
11533
11534# qhasm: xmm5 &= xmm15
11535# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
11536# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
11537pand %xmm13,%xmm5
11538
11539# qhasm: xmm6 ^= xmm5
11540# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
11541# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
11542pxor %xmm5,%xmm6
11543
11544# qhasm: xmm5 ^= xmm10
11545# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
11546# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
11547pxor %xmm10,%xmm5
11548
11549# qhasm: xmm12 ^= xmm0
11550# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
11551# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
11552pxor %xmm0,%xmm8
11553
11554# qhasm: xmm8 ^= xmm3
11555# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
11556# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
11557pxor %xmm3,%xmm9
11558
11559# qhasm: xmm15 ^= xmm13
11560# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
11561# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
11562pxor %xmm15,%xmm13
11563
11564# qhasm: xmm14 ^= xmm9
11565# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
11566# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
11567pxor %xmm12,%xmm11
11568
11569# qhasm: xmm11 = xmm15
11570# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
11571# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
11572movdqa %xmm13,%xmm10
11573
11574# qhasm: xmm11 ^= xmm14
11575# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
11576# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
11577pxor %xmm11,%xmm10
11578
11579# qhasm: xmm11 &= xmm12
11580# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
11581# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
11582pand %xmm8,%xmm10
11583
11584# qhasm: xmm12 ^= xmm8
11585# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
11586# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
11587pxor %xmm9,%xmm8
11588
11589# qhasm: xmm12 &= xmm14
11590# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
11591# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
11592pand %xmm11,%xmm8
11593
11594# qhasm: xmm8 &= xmm15
11595# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
11596# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
11597pand %xmm13,%xmm9
11598
11599# qhasm: xmm8 ^= xmm12
11600# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
11601# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
11602pxor %xmm8,%xmm9
11603
11604# qhasm: xmm12 ^= xmm11
11605# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
11606# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
11607pxor %xmm10,%xmm8
11608
11609# qhasm: xmm10 = xmm13
11610# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
11611# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
11612movdqa %xmm15,%xmm10
11613
11614# qhasm: xmm10 ^= xmm9
11615# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
11616# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
11617pxor %xmm12,%xmm10
11618
11619# qhasm: xmm10 &= xmm0
11620# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
11621# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
11622pand %xmm0,%xmm10
11623
11624# qhasm: xmm0 ^= xmm3
11625# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
11626# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
11627pxor %xmm3,%xmm0
11628
11629# qhasm: xmm0 &= xmm9
11630# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
11631# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
11632pand %xmm12,%xmm0
11633
11634# qhasm: xmm3 &= xmm13
11635# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
11636# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
11637pand %xmm15,%xmm3
11638
11639# qhasm: xmm0 ^= xmm3
11640# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
11641# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
11642pxor %xmm3,%xmm0
11643
11644# qhasm: xmm3 ^= xmm10
11645# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
11646# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
11647pxor %xmm10,%xmm3
11648
11649# qhasm: xmm6 ^= xmm12
11650# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
11651# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
11652pxor %xmm8,%xmm6
11653
11654# qhasm: xmm0 ^= xmm12
11655# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
11656# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
11657pxor %xmm8,%xmm0
11658
11659# qhasm: xmm5 ^= xmm8
11660# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
11661# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
11662pxor %xmm9,%xmm5
11663
11664# qhasm: xmm3 ^= xmm8
11665# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
11666# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
11667pxor %xmm9,%xmm3
11668
11669# qhasm: xmm12 = xmm7
11670# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
11671# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
11672movdqa %xmm7,%xmm8
11673
11674# qhasm: xmm8 = xmm1
11675# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
11676# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
11677movdqa %xmm1,%xmm9
11678
11679# qhasm: xmm12 ^= xmm4
11680# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
11681# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
11682pxor %xmm4,%xmm8
11683
11684# qhasm: xmm8 ^= xmm2
11685# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
11686# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
11687pxor %xmm2,%xmm9
11688
11689# qhasm: xmm11 = xmm15
11690# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
11691# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
11692movdqa %xmm13,%xmm10
11693
11694# qhasm: xmm11 ^= xmm14
11695# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
11696# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
11697pxor %xmm11,%xmm10
11698
11699# qhasm: xmm11 &= xmm12
11700# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
11701# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
11702pand %xmm8,%xmm10
11703
11704# qhasm: xmm12 ^= xmm8
11705# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
11706# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
11707pxor %xmm9,%xmm8
11708
11709# qhasm: xmm12 &= xmm14
11710# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
11711# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
11712pand %xmm11,%xmm8
11713
11714# qhasm: xmm8 &= xmm15
11715# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
11716# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
11717pand %xmm13,%xmm9
11718
11719# qhasm: xmm8 ^= xmm12
11720# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
11721# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
11722pxor %xmm8,%xmm9
11723
11724# qhasm: xmm12 ^= xmm11
11725# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
11726# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
11727pxor %xmm10,%xmm8
11728
11729# qhasm: xmm10 = xmm13
11730# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
11731# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
11732movdqa %xmm15,%xmm10
11733
11734# qhasm: xmm10 ^= xmm9
11735# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
11736# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
11737pxor %xmm12,%xmm10
11738
11739# qhasm: xmm10 &= xmm4
11740# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
11741# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
11742pand %xmm4,%xmm10
11743
11744# qhasm: xmm4 ^= xmm2
11745# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
11746# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
11747pxor %xmm2,%xmm4
11748
11749# qhasm: xmm4 &= xmm9
11750# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
11751# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
11752pand %xmm12,%xmm4
11753
11754# qhasm: xmm2 &= xmm13
11755# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
11756# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
11757pand %xmm15,%xmm2
11758
11759# qhasm: xmm4 ^= xmm2
11760# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
11761# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
11762pxor %xmm2,%xmm4
11763
11764# qhasm: xmm2 ^= xmm10
11765# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
11766# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
11767pxor %xmm10,%xmm2
11768
11769# qhasm: xmm15 ^= xmm13
11770# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
11771# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
11772pxor %xmm15,%xmm13
11773
11774# qhasm: xmm14 ^= xmm9
11775# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
11776# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
11777pxor %xmm12,%xmm11
11778
11779# qhasm: xmm11 = xmm15
11780# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
11781# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
11782movdqa %xmm13,%xmm10
11783
11784# qhasm: xmm11 ^= xmm14
11785# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
11786# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
11787pxor %xmm11,%xmm10
11788
11789# qhasm: xmm11 &= xmm7
11790# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
11791# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
11792pand %xmm7,%xmm10
11793
11794# qhasm: xmm7 ^= xmm1
11795# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
11796# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
11797pxor %xmm1,%xmm7
11798
11799# qhasm: xmm7 &= xmm14
11800# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
11801# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
11802pand %xmm11,%xmm7
11803
11804# qhasm: xmm1 &= xmm15
11805# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
11806# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
11807pand %xmm13,%xmm1
11808
11809# qhasm: xmm7 ^= xmm1
11810# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
11811# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
11812pxor %xmm1,%xmm7
11813
11814# qhasm: xmm1 ^= xmm11
11815# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
11816# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
11817pxor %xmm10,%xmm1
11818
11819# qhasm: xmm7 ^= xmm12
11820# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
11821# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
11822pxor %xmm8,%xmm7
11823
11824# qhasm: xmm4 ^= xmm12
11825# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
11826# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
11827pxor %xmm8,%xmm4
11828
11829# qhasm: xmm1 ^= xmm8
11830# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
11831# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
11832pxor %xmm9,%xmm1
11833
11834# qhasm: xmm2 ^= xmm8
11835# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
11836# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
11837pxor %xmm9,%xmm2
11838
11839# qhasm: xmm7 ^= xmm0
11840# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
11841# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
11842pxor %xmm0,%xmm7
11843
11844# qhasm: xmm1 ^= xmm6
11845# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
11846# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
11847pxor %xmm6,%xmm1
11848
11849# qhasm: xmm4 ^= xmm7
11850# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
11851# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
11852pxor %xmm7,%xmm4
11853
11854# qhasm: xmm6 ^= xmm0
11855# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
11856# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
11857pxor %xmm0,%xmm6
11858
11859# qhasm: xmm0 ^= xmm1
11860# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
11861# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
11862pxor %xmm1,%xmm0
11863
11864# qhasm: xmm1 ^= xmm5
11865# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
11866# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
11867pxor %xmm5,%xmm1
11868
11869# qhasm: xmm5 ^= xmm2
11870# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
11871# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
11872pxor %xmm2,%xmm5
11873
11874# qhasm: xmm4 ^= xmm5
11875# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
11876# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
11877pxor %xmm5,%xmm4
11878
11879# qhasm: xmm2 ^= xmm3
11880# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
11881# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
11882pxor %xmm3,%xmm2
11883
11884# qhasm: xmm3 ^= xmm5
11885# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
11886# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
11887pxor %xmm5,%xmm3
11888
11889# qhasm: xmm6 ^= xmm3
11890# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
11891# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
11892pxor %xmm3,%xmm6
11893
11894# qhasm: xmm0 ^= RCON
11895# asm 1: pxor RCON,<xmm0=int6464#1
11896# asm 2: pxor RCON,<xmm0=%xmm0
11897pxor RCON,%xmm0
11898
11899# qhasm: xmm1 ^= RCON
11900# asm 1: pxor RCON,<xmm1=int6464#2
11901# asm 2: pxor RCON,<xmm1=%xmm1
11902pxor RCON,%xmm1
11903
11904# qhasm: xmm6 ^= RCON
11905# asm 1: pxor RCON,<xmm6=int6464#7
11906# asm 2: pxor RCON,<xmm6=%xmm6
11907pxor RCON,%xmm6
11908
11909# qhasm: xmm3 ^= RCON
11910# asm 1: pxor RCON,<xmm3=int6464#4
11911# asm 2: pxor RCON,<xmm3=%xmm3
11912pxor RCON,%xmm3
11913
11914# qhasm: shuffle bytes of xmm0 by EXPB0
11915# asm 1: pshufb EXPB0,<xmm0=int6464#1
11916# asm 2: pshufb EXPB0,<xmm0=%xmm0
11917pshufb EXPB0,%xmm0
11918
11919# qhasm: shuffle bytes of xmm1 by EXPB0
11920# asm 1: pshufb EXPB0,<xmm1=int6464#2
11921# asm 2: pshufb EXPB0,<xmm1=%xmm1
11922pshufb EXPB0,%xmm1
11923
11924# qhasm: shuffle bytes of xmm4 by EXPB0
11925# asm 1: pshufb EXPB0,<xmm4=int6464#5
11926# asm 2: pshufb EXPB0,<xmm4=%xmm4
11927pshufb EXPB0,%xmm4
11928
11929# qhasm: shuffle bytes of xmm6 by EXPB0
11930# asm 1: pshufb EXPB0,<xmm6=int6464#7
11931# asm 2: pshufb EXPB0,<xmm6=%xmm6
11932pshufb EXPB0,%xmm6
11933
11934# qhasm: shuffle bytes of xmm3 by EXPB0
11935# asm 1: pshufb EXPB0,<xmm3=int6464#4
11936# asm 2: pshufb EXPB0,<xmm3=%xmm3
11937pshufb EXPB0,%xmm3
11938
11939# qhasm: shuffle bytes of xmm7 by EXPB0
11940# asm 1: pshufb EXPB0,<xmm7=int6464#8
11941# asm 2: pshufb EXPB0,<xmm7=%xmm7
11942pshufb EXPB0,%xmm7
11943
11944# qhasm: shuffle bytes of xmm2 by EXPB0
11945# asm 1: pshufb EXPB0,<xmm2=int6464#3
11946# asm 2: pshufb EXPB0,<xmm2=%xmm2
11947pshufb EXPB0,%xmm2
11948
11949# qhasm: shuffle bytes of xmm5 by EXPB0
11950# asm 1: pshufb EXPB0,<xmm5=int6464#6
11951# asm 2: pshufb EXPB0,<xmm5=%xmm5
11952pshufb EXPB0,%xmm5
11953
11954# qhasm: xmm8 = *(int128 *)(c + 1024)
11955# asm 1: movdqa 1024(<c=int64#1),>xmm8=int6464#9
11956# asm 2: movdqa 1024(<c=%rdi),>xmm8=%xmm8
11957movdqa 1024(%rdi),%xmm8
11958
11959# qhasm: xmm9 = *(int128 *)(c + 1040)
11960# asm 1: movdqa 1040(<c=int64#1),>xmm9=int6464#10
11961# asm 2: movdqa 1040(<c=%rdi),>xmm9=%xmm9
11962movdqa 1040(%rdi),%xmm9
11963
11964# qhasm: xmm10 = *(int128 *)(c + 1056)
11965# asm 1: movdqa 1056(<c=int64#1),>xmm10=int6464#11
11966# asm 2: movdqa 1056(<c=%rdi),>xmm10=%xmm10
11967movdqa 1056(%rdi),%xmm10
11968
11969# qhasm: xmm11 = *(int128 *)(c + 1072)
11970# asm 1: movdqa 1072(<c=int64#1),>xmm11=int6464#12
11971# asm 2: movdqa 1072(<c=%rdi),>xmm11=%xmm11
11972movdqa 1072(%rdi),%xmm11
11973
11974# qhasm: xmm12 = *(int128 *)(c + 1088)
11975# asm 1: movdqa 1088(<c=int64#1),>xmm12=int6464#13
11976# asm 2: movdqa 1088(<c=%rdi),>xmm12=%xmm12
11977movdqa 1088(%rdi),%xmm12
11978
11979# qhasm: xmm13 = *(int128 *)(c + 1104)
11980# asm 1: movdqa 1104(<c=int64#1),>xmm13=int6464#14
11981# asm 2: movdqa 1104(<c=%rdi),>xmm13=%xmm13
11982movdqa 1104(%rdi),%xmm13
11983
11984# qhasm: xmm14 = *(int128 *)(c + 1120)
11985# asm 1: movdqa 1120(<c=int64#1),>xmm14=int6464#15
11986# asm 2: movdqa 1120(<c=%rdi),>xmm14=%xmm14
11987movdqa 1120(%rdi),%xmm14
11988
11989# qhasm: xmm15 = *(int128 *)(c + 1136)
11990# asm 1: movdqa 1136(<c=int64#1),>xmm15=int6464#16
11991# asm 2: movdqa 1136(<c=%rdi),>xmm15=%xmm15
11992movdqa 1136(%rdi),%xmm15
11993
11994# qhasm: xmm8 ^= ONE
11995# asm 1: pxor ONE,<xmm8=int6464#9
11996# asm 2: pxor ONE,<xmm8=%xmm8
11997pxor ONE,%xmm8
11998
11999# qhasm: xmm9 ^= ONE
12000# asm 1: pxor ONE,<xmm9=int6464#10
12001# asm 2: pxor ONE,<xmm9=%xmm9
12002pxor ONE,%xmm9
12003
12004# qhasm: xmm13 ^= ONE
12005# asm 1: pxor ONE,<xmm13=int6464#14
12006# asm 2: pxor ONE,<xmm13=%xmm13
12007pxor ONE,%xmm13
12008
12009# qhasm: xmm14 ^= ONE
12010# asm 1: pxor ONE,<xmm14=int6464#15
12011# asm 2: pxor ONE,<xmm14=%xmm14
12012pxor ONE,%xmm14
12013
12014# qhasm: xmm0 ^= xmm8
12015# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
12016# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
12017pxor %xmm8,%xmm0
12018
12019# qhasm: xmm1 ^= xmm9
12020# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
12021# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
12022pxor %xmm9,%xmm1
12023
12024# qhasm: xmm4 ^= xmm10
12025# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
12026# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
12027pxor %xmm10,%xmm4
12028
12029# qhasm: xmm6 ^= xmm11
12030# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
12031# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
12032pxor %xmm11,%xmm6
12033
12034# qhasm: xmm3 ^= xmm12
12035# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
12036# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
12037pxor %xmm12,%xmm3
12038
12039# qhasm: xmm7 ^= xmm13
12040# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
12041# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
12042pxor %xmm13,%xmm7
12043
12044# qhasm: xmm2 ^= xmm14
12045# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
12046# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
12047pxor %xmm14,%xmm2
12048
12049# qhasm: xmm5 ^= xmm15
12050# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
12051# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
12052pxor %xmm15,%xmm5
12053
12054# qhasm: uint32323232 xmm8 >>= 8
12055# asm 1: psrld $8,<xmm8=int6464#9
12056# asm 2: psrld $8,<xmm8=%xmm8
12057psrld $8,%xmm8
12058
12059# qhasm: uint32323232 xmm9 >>= 8
12060# asm 1: psrld $8,<xmm9=int6464#10
12061# asm 2: psrld $8,<xmm9=%xmm9
12062psrld $8,%xmm9
12063
12064# qhasm: uint32323232 xmm10 >>= 8
12065# asm 1: psrld $8,<xmm10=int6464#11
12066# asm 2: psrld $8,<xmm10=%xmm10
12067psrld $8,%xmm10
12068
12069# qhasm: uint32323232 xmm11 >>= 8
12070# asm 1: psrld $8,<xmm11=int6464#12
12071# asm 2: psrld $8,<xmm11=%xmm11
12072psrld $8,%xmm11
12073
12074# qhasm: uint32323232 xmm12 >>= 8
12075# asm 1: psrld $8,<xmm12=int6464#13
12076# asm 2: psrld $8,<xmm12=%xmm12
12077psrld $8,%xmm12
12078
12079# qhasm: uint32323232 xmm13 >>= 8
12080# asm 1: psrld $8,<xmm13=int6464#14
12081# asm 2: psrld $8,<xmm13=%xmm13
12082psrld $8,%xmm13
12083
12084# qhasm: uint32323232 xmm14 >>= 8
12085# asm 1: psrld $8,<xmm14=int6464#15
12086# asm 2: psrld $8,<xmm14=%xmm14
12087psrld $8,%xmm14
12088
12089# qhasm: uint32323232 xmm15 >>= 8
12090# asm 1: psrld $8,<xmm15=int6464#16
12091# asm 2: psrld $8,<xmm15=%xmm15
12092psrld $8,%xmm15
12093
12094# qhasm: xmm0 ^= xmm8
12095# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
12096# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
12097pxor %xmm8,%xmm0
12098
12099# qhasm: xmm1 ^= xmm9
12100# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
12101# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
12102pxor %xmm9,%xmm1
12103
12104# qhasm: xmm4 ^= xmm10
12105# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
12106# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
12107pxor %xmm10,%xmm4
12108
12109# qhasm: xmm6 ^= xmm11
12110# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
12111# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
12112pxor %xmm11,%xmm6
12113
12114# qhasm: xmm3 ^= xmm12
12115# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
12116# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
12117pxor %xmm12,%xmm3
12118
12119# qhasm: xmm7 ^= xmm13
12120# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
12121# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
12122pxor %xmm13,%xmm7
12123
12124# qhasm: xmm2 ^= xmm14
12125# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
12126# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
12127pxor %xmm14,%xmm2
12128
12129# qhasm: xmm5 ^= xmm15
12130# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
12131# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
12132pxor %xmm15,%xmm5
12133
12134# qhasm: uint32323232 xmm8 >>= 8
12135# asm 1: psrld $8,<xmm8=int6464#9
12136# asm 2: psrld $8,<xmm8=%xmm8
12137psrld $8,%xmm8
12138
12139# qhasm: uint32323232 xmm9 >>= 8
12140# asm 1: psrld $8,<xmm9=int6464#10
12141# asm 2: psrld $8,<xmm9=%xmm9
12142psrld $8,%xmm9
12143
12144# qhasm: uint32323232 xmm10 >>= 8
12145# asm 1: psrld $8,<xmm10=int6464#11
12146# asm 2: psrld $8,<xmm10=%xmm10
12147psrld $8,%xmm10
12148
12149# qhasm: uint32323232 xmm11 >>= 8
12150# asm 1: psrld $8,<xmm11=int6464#12
12151# asm 2: psrld $8,<xmm11=%xmm11
12152psrld $8,%xmm11
12153
12154# qhasm: uint32323232 xmm12 >>= 8
12155# asm 1: psrld $8,<xmm12=int6464#13
12156# asm 2: psrld $8,<xmm12=%xmm12
12157psrld $8,%xmm12
12158
12159# qhasm: uint32323232 xmm13 >>= 8
12160# asm 1: psrld $8,<xmm13=int6464#14
12161# asm 2: psrld $8,<xmm13=%xmm13
12162psrld $8,%xmm13
12163
12164# qhasm: uint32323232 xmm14 >>= 8
12165# asm 1: psrld $8,<xmm14=int6464#15
12166# asm 2: psrld $8,<xmm14=%xmm14
12167psrld $8,%xmm14
12168
12169# qhasm: uint32323232 xmm15 >>= 8
12170# asm 1: psrld $8,<xmm15=int6464#16
12171# asm 2: psrld $8,<xmm15=%xmm15
12172psrld $8,%xmm15
12173
12174# qhasm: xmm0 ^= xmm8
12175# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
12176# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
12177pxor %xmm8,%xmm0
12178
12179# qhasm: xmm1 ^= xmm9
12180# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
12181# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
12182pxor %xmm9,%xmm1
12183
12184# qhasm: xmm4 ^= xmm10
12185# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
12186# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
12187pxor %xmm10,%xmm4
12188
12189# qhasm: xmm6 ^= xmm11
12190# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
12191# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
12192pxor %xmm11,%xmm6
12193
12194# qhasm: xmm3 ^= xmm12
12195# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
12196# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
12197pxor %xmm12,%xmm3
12198
12199# qhasm: xmm7 ^= xmm13
12200# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
12201# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
12202pxor %xmm13,%xmm7
12203
12204# qhasm: xmm2 ^= xmm14
12205# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
12206# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
12207pxor %xmm14,%xmm2
12208
12209# qhasm: xmm5 ^= xmm15
12210# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
12211# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
12212pxor %xmm15,%xmm5
12213
12214# qhasm: uint32323232 xmm8 >>= 8
12215# asm 1: psrld $8,<xmm8=int6464#9
12216# asm 2: psrld $8,<xmm8=%xmm8
12217psrld $8,%xmm8
12218
12219# qhasm: uint32323232 xmm9 >>= 8
12220# asm 1: psrld $8,<xmm9=int6464#10
12221# asm 2: psrld $8,<xmm9=%xmm9
12222psrld $8,%xmm9
12223
12224# qhasm: uint32323232 xmm10 >>= 8
12225# asm 1: psrld $8,<xmm10=int6464#11
12226# asm 2: psrld $8,<xmm10=%xmm10
12227psrld $8,%xmm10
12228
12229# qhasm: uint32323232 xmm11 >>= 8
12230# asm 1: psrld $8,<xmm11=int6464#12
12231# asm 2: psrld $8,<xmm11=%xmm11
12232psrld $8,%xmm11
12233
12234# qhasm: uint32323232 xmm12 >>= 8
12235# asm 1: psrld $8,<xmm12=int6464#13
12236# asm 2: psrld $8,<xmm12=%xmm12
12237psrld $8,%xmm12
12238
12239# qhasm: uint32323232 xmm13 >>= 8
12240# asm 1: psrld $8,<xmm13=int6464#14
12241# asm 2: psrld $8,<xmm13=%xmm13
12242psrld $8,%xmm13
12243
12244# qhasm: uint32323232 xmm14 >>= 8
12245# asm 1: psrld $8,<xmm14=int6464#15
12246# asm 2: psrld $8,<xmm14=%xmm14
12247psrld $8,%xmm14
12248
12249# qhasm: uint32323232 xmm15 >>= 8
12250# asm 1: psrld $8,<xmm15=int6464#16
12251# asm 2: psrld $8,<xmm15=%xmm15
12252psrld $8,%xmm15
12253
12254# qhasm: xmm0 ^= xmm8
12255# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
12256# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
12257pxor %xmm8,%xmm0
12258
12259# qhasm: xmm1 ^= xmm9
12260# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
12261# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
12262pxor %xmm9,%xmm1
12263
12264# qhasm: xmm4 ^= xmm10
12265# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
12266# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
12267pxor %xmm10,%xmm4
12268
12269# qhasm: xmm6 ^= xmm11
12270# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
12271# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
12272pxor %xmm11,%xmm6
12273
12274# qhasm: xmm3 ^= xmm12
12275# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
12276# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
12277pxor %xmm12,%xmm3
12278
12279# qhasm: xmm7 ^= xmm13
12280# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
12281# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
12282pxor %xmm13,%xmm7
12283
12284# qhasm: xmm2 ^= xmm14
12285# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
12286# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
12287pxor %xmm14,%xmm2
12288
12289# qhasm: xmm5 ^= xmm15
12290# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
12291# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
12292pxor %xmm15,%xmm5
12293
12294# qhasm: *(int128 *)(c + 1152) = xmm0
12295# asm 1: movdqa <xmm0=int6464#1,1152(<c=int64#1)
12296# asm 2: movdqa <xmm0=%xmm0,1152(<c=%rdi)
12297movdqa %xmm0,1152(%rdi)
12298
12299# qhasm: *(int128 *)(c + 1168) = xmm1
12300# asm 1: movdqa <xmm1=int6464#2,1168(<c=int64#1)
12301# asm 2: movdqa <xmm1=%xmm1,1168(<c=%rdi)
12302movdqa %xmm1,1168(%rdi)
12303
12304# qhasm: *(int128 *)(c + 1184) = xmm4
12305# asm 1: movdqa <xmm4=int6464#5,1184(<c=int64#1)
12306# asm 2: movdqa <xmm4=%xmm4,1184(<c=%rdi)
12307movdqa %xmm4,1184(%rdi)
12308
12309# qhasm: *(int128 *)(c + 1200) = xmm6
12310# asm 1: movdqa <xmm6=int6464#7,1200(<c=int64#1)
12311# asm 2: movdqa <xmm6=%xmm6,1200(<c=%rdi)
12312movdqa %xmm6,1200(%rdi)
12313
12314# qhasm: *(int128 *)(c + 1216) = xmm3
12315# asm 1: movdqa <xmm3=int6464#4,1216(<c=int64#1)
12316# asm 2: movdqa <xmm3=%xmm3,1216(<c=%rdi)
12317movdqa %xmm3,1216(%rdi)
12318
12319# qhasm: *(int128 *)(c + 1232) = xmm7
12320# asm 1: movdqa <xmm7=int6464#8,1232(<c=int64#1)
12321# asm 2: movdqa <xmm7=%xmm7,1232(<c=%rdi)
12322movdqa %xmm7,1232(%rdi)
12323
12324# qhasm: *(int128 *)(c + 1248) = xmm2
12325# asm 1: movdqa <xmm2=int6464#3,1248(<c=int64#1)
12326# asm 2: movdqa <xmm2=%xmm2,1248(<c=%rdi)
12327movdqa %xmm2,1248(%rdi)
12328
12329# qhasm: *(int128 *)(c + 1264) = xmm5
12330# asm 1: movdqa <xmm5=int6464#6,1264(<c=int64#1)
12331# asm 2: movdqa <xmm5=%xmm5,1264(<c=%rdi)
12332movdqa %xmm5,1264(%rdi)
12333
12334# qhasm: xmm0 ^= ONE
12335# asm 1: pxor ONE,<xmm0=int6464#1
12336# asm 2: pxor ONE,<xmm0=%xmm0
12337pxor ONE,%xmm0
12338
12339# qhasm: xmm1 ^= ONE
12340# asm 1: pxor ONE,<xmm1=int6464#2
12341# asm 2: pxor ONE,<xmm1=%xmm1
12342pxor ONE,%xmm1
12343
12344# qhasm: xmm7 ^= ONE
12345# asm 1: pxor ONE,<xmm7=int6464#8
12346# asm 2: pxor ONE,<xmm7=%xmm7
12347pxor ONE,%xmm7
12348
12349# qhasm: xmm2 ^= ONE
12350# asm 1: pxor ONE,<xmm2=int6464#3
12351# asm 2: pxor ONE,<xmm2=%xmm2
12352pxor ONE,%xmm2
12353
12354# qhasm: shuffle bytes of xmm0 by ROTB
12355# asm 1: pshufb ROTB,<xmm0=int6464#1
12356# asm 2: pshufb ROTB,<xmm0=%xmm0
12357pshufb ROTB,%xmm0
12358
12359# qhasm: shuffle bytes of xmm1 by ROTB
12360# asm 1: pshufb ROTB,<xmm1=int6464#2
12361# asm 2: pshufb ROTB,<xmm1=%xmm1
12362pshufb ROTB,%xmm1
12363
12364# qhasm: shuffle bytes of xmm4 by ROTB
12365# asm 1: pshufb ROTB,<xmm4=int6464#5
12366# asm 2: pshufb ROTB,<xmm4=%xmm4
12367pshufb ROTB,%xmm4
12368
12369# qhasm: shuffle bytes of xmm6 by ROTB
12370# asm 1: pshufb ROTB,<xmm6=int6464#7
12371# asm 2: pshufb ROTB,<xmm6=%xmm6
12372pshufb ROTB,%xmm6
12373
12374# qhasm: shuffle bytes of xmm3 by ROTB
12375# asm 1: pshufb ROTB,<xmm3=int6464#4
12376# asm 2: pshufb ROTB,<xmm3=%xmm3
12377pshufb ROTB,%xmm3
12378
12379# qhasm: shuffle bytes of xmm7 by ROTB
12380# asm 1: pshufb ROTB,<xmm7=int6464#8
12381# asm 2: pshufb ROTB,<xmm7=%xmm7
12382pshufb ROTB,%xmm7
12383
12384# qhasm: shuffle bytes of xmm2 by ROTB
12385# asm 1: pshufb ROTB,<xmm2=int6464#3
12386# asm 2: pshufb ROTB,<xmm2=%xmm2
12387pshufb ROTB,%xmm2
12388
12389# qhasm: shuffle bytes of xmm5 by ROTB
12390# asm 1: pshufb ROTB,<xmm5=int6464#6
12391# asm 2: pshufb ROTB,<xmm5=%xmm5
12392pshufb ROTB,%xmm5
12393
12394# qhasm: xmm7 ^= xmm2
12395# asm 1: pxor <xmm2=int6464#3,<xmm7=int6464#8
12396# asm 2: pxor <xmm2=%xmm2,<xmm7=%xmm7
12397pxor %xmm2,%xmm7
12398
12399# qhasm: xmm4 ^= xmm1
12400# asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5
12401# asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4
12402pxor %xmm1,%xmm4
12403
12404# qhasm: xmm7 ^= xmm0
12405# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
12406# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
12407pxor %xmm0,%xmm7
12408
12409# qhasm: xmm2 ^= xmm4
12410# asm 1: pxor <xmm4=int6464#5,<xmm2=int6464#3
12411# asm 2: pxor <xmm4=%xmm4,<xmm2=%xmm2
12412pxor %xmm4,%xmm2
12413
12414# qhasm: xmm6 ^= xmm0
12415# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
12416# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
12417pxor %xmm0,%xmm6
12418
12419# qhasm: xmm2 ^= xmm6
12420# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
12421# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
12422pxor %xmm6,%xmm2
12423
12424# qhasm: xmm6 ^= xmm5
12425# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
12426# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
12427pxor %xmm5,%xmm6
12428
12429# qhasm: xmm6 ^= xmm3
12430# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
12431# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
12432pxor %xmm3,%xmm6
12433
12434# qhasm: xmm5 ^= xmm7
12435# asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6
12436# asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5
12437pxor %xmm7,%xmm5
12438
12439# qhasm: xmm6 ^= xmm1
12440# asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7
12441# asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6
12442pxor %xmm1,%xmm6
12443
12444# qhasm: xmm3 ^= xmm7
12445# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
12446# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
12447pxor %xmm7,%xmm3
12448
12449# qhasm: xmm4 ^= xmm5
12450# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
12451# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
12452pxor %xmm5,%xmm4
12453
12454# qhasm: xmm1 ^= xmm7
12455# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
12456# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
12457pxor %xmm7,%xmm1
12458
12459# qhasm: xmm11 = xmm5
12460# asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9
12461# asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8
12462movdqa %xmm5,%xmm8
12463
12464# qhasm: xmm10 = xmm1
12465# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
12466# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
12467movdqa %xmm1,%xmm9
12468
12469# qhasm: xmm9 = xmm7
12470# asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11
12471# asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10
12472movdqa %xmm7,%xmm10
12473
12474# qhasm: xmm13 = xmm4
12475# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
12476# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
12477movdqa %xmm4,%xmm11
12478
12479# qhasm: xmm12 = xmm2
12480# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#13
12481# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm12
12482movdqa %xmm2,%xmm12
12483
12484# qhasm: xmm11 ^= xmm3
12485# asm 1: pxor <xmm3=int6464#4,<xmm11=int6464#9
12486# asm 2: pxor <xmm3=%xmm3,<xmm11=%xmm8
12487pxor %xmm3,%xmm8
12488
12489# qhasm: xmm10 ^= xmm4
12490# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#10
12491# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm9
12492pxor %xmm4,%xmm9
12493
12494# qhasm: xmm9 ^= xmm6
12495# asm 1: pxor <xmm6=int6464#7,<xmm9=int6464#11
12496# asm 2: pxor <xmm6=%xmm6,<xmm9=%xmm10
12497pxor %xmm6,%xmm10
12498
12499# qhasm: xmm13 ^= xmm3
12500# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#12
12501# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm11
12502pxor %xmm3,%xmm11
12503
12504# qhasm: xmm12 ^= xmm0
12505# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
12506# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
12507pxor %xmm0,%xmm12
12508
12509# qhasm: xmm14 = xmm11
12510# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
12511# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
12512movdqa %xmm8,%xmm13
12513
12514# qhasm: xmm8 = xmm10
12515# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
12516# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
12517movdqa %xmm9,%xmm14
12518
12519# qhasm: xmm15 = xmm11
12520# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
12521# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
12522movdqa %xmm8,%xmm15
12523
12524# qhasm: xmm10 |= xmm9
12525# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
12526# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
12527por %xmm10,%xmm9
12528
12529# qhasm: xmm11 |= xmm12
12530# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
12531# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
12532por %xmm12,%xmm8
12533
12534# qhasm: xmm15 ^= xmm8
12535# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
12536# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
12537pxor %xmm14,%xmm15
12538
12539# qhasm: xmm14 &= xmm12
12540# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
12541# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
12542pand %xmm12,%xmm13
12543
12544# qhasm: xmm8 &= xmm9
12545# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
12546# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
12547pand %xmm10,%xmm14
12548
12549# qhasm: xmm12 ^= xmm9
12550# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
12551# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
12552pxor %xmm10,%xmm12
12553
12554# qhasm: xmm15 &= xmm12
12555# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
12556# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
12557pand %xmm12,%xmm15
12558
12559# qhasm: xmm12 = xmm6
12560# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11
12561# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10
12562movdqa %xmm6,%xmm10
12563
12564# qhasm: xmm12 ^= xmm0
12565# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
12566# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
12567pxor %xmm0,%xmm10
12568
12569# qhasm: xmm13 &= xmm12
12570# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
12571# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
12572pand %xmm10,%xmm11
12573
12574# qhasm: xmm11 ^= xmm13
12575# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
12576# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
12577pxor %xmm11,%xmm8
12578
12579# qhasm: xmm10 ^= xmm13
12580# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
12581# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
12582pxor %xmm11,%xmm9
12583
12584# qhasm: xmm13 = xmm5
12585# asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11
12586# asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10
12587movdqa %xmm5,%xmm10
12588
12589# qhasm: xmm13 ^= xmm1
12590# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
12591# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
12592pxor %xmm1,%xmm10
12593
12594# qhasm: xmm12 = xmm7
12595# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12
12596# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11
12597movdqa %xmm7,%xmm11
12598
12599# qhasm: xmm9 = xmm13
12600# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
12601# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
12602movdqa %xmm10,%xmm12
12603
12604# qhasm: xmm12 ^= xmm2
12605# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#12
12606# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm11
12607pxor %xmm2,%xmm11
12608
12609# qhasm: xmm9 |= xmm12
12610# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
12611# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
12612por %xmm11,%xmm12
12613
12614# qhasm: xmm13 &= xmm12
12615# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
12616# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
12617pand %xmm11,%xmm10
12618
12619# qhasm: xmm8 ^= xmm13
12620# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
12621# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
12622pxor %xmm10,%xmm14
12623
12624# qhasm: xmm11 ^= xmm15
12625# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
12626# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
12627pxor %xmm15,%xmm8
12628
12629# qhasm: xmm10 ^= xmm14
12630# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
12631# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
12632pxor %xmm13,%xmm9
12633
12634# qhasm: xmm9 ^= xmm15
12635# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
12636# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
12637pxor %xmm15,%xmm12
12638
12639# qhasm: xmm8 ^= xmm14
12640# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
12641# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
12642pxor %xmm13,%xmm14
12643
12644# qhasm: xmm9 ^= xmm14
12645# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
12646# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
12647pxor %xmm13,%xmm12
12648
12649# qhasm: xmm12 = xmm4
12650# asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11
12651# asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10
12652movdqa %xmm4,%xmm10
12653
12654# qhasm: xmm13 = xmm3
12655# asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12
12656# asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11
12657movdqa %xmm3,%xmm11
12658
12659# qhasm: xmm14 = xmm1
12660# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
12661# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
12662movdqa %xmm1,%xmm13
12663
12664# qhasm: xmm15 = xmm5
12665# asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16
12666# asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15
12667movdqa %xmm5,%xmm15
12668
12669# qhasm: xmm12 &= xmm6
12670# asm 1: pand <xmm6=int6464#7,<xmm12=int6464#11
12671# asm 2: pand <xmm6=%xmm6,<xmm12=%xmm10
12672pand %xmm6,%xmm10
12673
12674# qhasm: xmm13 &= xmm0
12675# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
12676# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
12677pand %xmm0,%xmm11
12678
12679# qhasm: xmm14 &= xmm7
12680# asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14
12681# asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13
12682pand %xmm7,%xmm13
12683
12684# qhasm: xmm15 |= xmm2
12685# asm 1: por <xmm2=int6464#3,<xmm15=int6464#16
12686# asm 2: por <xmm2=%xmm2,<xmm15=%xmm15
12687por %xmm2,%xmm15
12688
12689# qhasm: xmm11 ^= xmm12
12690# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
12691# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
12692pxor %xmm10,%xmm8
12693
12694# qhasm: xmm10 ^= xmm13
12695# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
12696# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
12697pxor %xmm11,%xmm9
12698
12699# qhasm: xmm9 ^= xmm14
12700# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
12701# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
12702pxor %xmm13,%xmm12
12703
12704# qhasm: xmm8 ^= xmm15
12705# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
12706# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
12707pxor %xmm15,%xmm14
12708
12709# qhasm: xmm12 = xmm11
12710# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
12711# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
12712movdqa %xmm8,%xmm10
12713
12714# qhasm: xmm12 ^= xmm10
12715# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
12716# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
12717pxor %xmm9,%xmm10
12718
12719# qhasm: xmm11 &= xmm9
12720# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
12721# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
12722pand %xmm12,%xmm8
12723
12724# qhasm: xmm14 = xmm8
12725# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
12726# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
12727movdqa %xmm14,%xmm11
12728
12729# qhasm: xmm14 ^= xmm11
12730# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
12731# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
12732pxor %xmm8,%xmm11
12733
12734# qhasm: xmm15 = xmm12
12735# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
12736# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
12737movdqa %xmm10,%xmm13
12738
12739# qhasm: xmm15 &= xmm14
12740# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
12741# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
12742pand %xmm11,%xmm13
12743
12744# qhasm: xmm15 ^= xmm10
12745# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
12746# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
12747pxor %xmm9,%xmm13
12748
12749# qhasm: xmm13 = xmm9
12750# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
12751# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
12752movdqa %xmm12,%xmm15
12753
12754# qhasm: xmm13 ^= xmm8
12755# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
12756# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
12757pxor %xmm14,%xmm15
12758
12759# qhasm: xmm11 ^= xmm10
12760# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
12761# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
12762pxor %xmm9,%xmm8
12763
12764# qhasm: xmm13 &= xmm11
12765# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
12766# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
12767pand %xmm8,%xmm15
12768
12769# qhasm: xmm13 ^= xmm8
12770# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
12771# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
12772pxor %xmm14,%xmm15
12773
12774# qhasm: xmm9 ^= xmm13
12775# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
12776# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
12777pxor %xmm15,%xmm12
12778
12779# qhasm: xmm10 = xmm14
12780# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
12781# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
12782movdqa %xmm11,%xmm8
12783
12784# qhasm: xmm10 ^= xmm13
12785# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
12786# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
12787pxor %xmm15,%xmm8
12788
12789# qhasm: xmm10 &= xmm8
12790# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
12791# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
12792pand %xmm14,%xmm8
12793
12794# qhasm: xmm9 ^= xmm10
12795# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
12796# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
12797pxor %xmm8,%xmm12
12798
12799# qhasm: xmm14 ^= xmm10
12800# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
12801# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
12802pxor %xmm8,%xmm11
12803
12804# qhasm: xmm14 &= xmm15
12805# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
12806# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
12807pand %xmm13,%xmm11
12808
12809# qhasm: xmm14 ^= xmm12
12810# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
12811# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
12812pxor %xmm10,%xmm11
12813
12814# qhasm: xmm12 = xmm2
12815# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#9
12816# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm8
12817movdqa %xmm2,%xmm8
12818
12819# qhasm: xmm8 = xmm7
12820# asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10
12821# asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9
12822movdqa %xmm7,%xmm9
12823
12824# qhasm: xmm10 = xmm15
12825# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
12826# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
12827movdqa %xmm13,%xmm10
12828
12829# qhasm: xmm10 ^= xmm14
12830# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
12831# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
12832pxor %xmm11,%xmm10
12833
12834# qhasm: xmm10 &= xmm2
12835# asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11
12836# asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10
12837pand %xmm2,%xmm10
12838
12839# qhasm: xmm2 ^= xmm7
12840# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
12841# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
12842pxor %xmm7,%xmm2
12843
12844# qhasm: xmm2 &= xmm14
12845# asm 1: pand <xmm14=int6464#12,<xmm2=int6464#3
12846# asm 2: pand <xmm14=%xmm11,<xmm2=%xmm2
12847pand %xmm11,%xmm2
12848
12849# qhasm: xmm7 &= xmm15
12850# asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8
12851# asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7
12852pand %xmm13,%xmm7
12853
12854# qhasm: xmm2 ^= xmm7
12855# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
12856# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
12857pxor %xmm7,%xmm2
12858
12859# qhasm: xmm7 ^= xmm10
12860# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
12861# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
12862pxor %xmm10,%xmm7
12863
12864# qhasm: xmm12 ^= xmm0
12865# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
12866# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
12867pxor %xmm0,%xmm8
12868
12869# qhasm: xmm8 ^= xmm6
12870# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10
12871# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9
12872pxor %xmm6,%xmm9
12873
12874# qhasm: xmm15 ^= xmm13
12875# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
12876# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
12877pxor %xmm15,%xmm13
12878
12879# qhasm: xmm14 ^= xmm9
12880# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
12881# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
12882pxor %xmm12,%xmm11
12883
12884# qhasm: xmm11 = xmm15
12885# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
12886# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
12887movdqa %xmm13,%xmm10
12888
12889# qhasm: xmm11 ^= xmm14
12890# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
12891# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
12892pxor %xmm11,%xmm10
12893
12894# qhasm: xmm11 &= xmm12
12895# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
12896# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
12897pand %xmm8,%xmm10
12898
12899# qhasm: xmm12 ^= xmm8
12900# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
12901# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
12902pxor %xmm9,%xmm8
12903
12904# qhasm: xmm12 &= xmm14
12905# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
12906# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
12907pand %xmm11,%xmm8
12908
12909# qhasm: xmm8 &= xmm15
12910# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
12911# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
12912pand %xmm13,%xmm9
12913
12914# qhasm: xmm8 ^= xmm12
12915# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
12916# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
12917pxor %xmm8,%xmm9
12918
12919# qhasm: xmm12 ^= xmm11
12920# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
12921# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
12922pxor %xmm10,%xmm8
12923
12924# qhasm: xmm10 = xmm13
12925# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
12926# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
12927movdqa %xmm15,%xmm10
12928
12929# qhasm: xmm10 ^= xmm9
12930# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
12931# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
12932pxor %xmm12,%xmm10
12933
12934# qhasm: xmm10 &= xmm0
12935# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
12936# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
12937pand %xmm0,%xmm10
12938
12939# qhasm: xmm0 ^= xmm6
12940# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
12941# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
12942pxor %xmm6,%xmm0
12943
12944# qhasm: xmm0 &= xmm9
12945# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
12946# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
12947pand %xmm12,%xmm0
12948
12949# qhasm: xmm6 &= xmm13
12950# asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7
12951# asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6
12952pand %xmm15,%xmm6
12953
12954# qhasm: xmm0 ^= xmm6
12955# asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1
12956# asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0
12957pxor %xmm6,%xmm0
12958
12959# qhasm: xmm6 ^= xmm10
12960# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
12961# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
12962pxor %xmm10,%xmm6
12963
12964# qhasm: xmm2 ^= xmm12
12965# asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3
12966# asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2
12967pxor %xmm8,%xmm2
12968
12969# qhasm: xmm0 ^= xmm12
12970# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
12971# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
12972pxor %xmm8,%xmm0
12973
12974# qhasm: xmm7 ^= xmm8
12975# asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8
12976# asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7
12977pxor %xmm9,%xmm7
12978
12979# qhasm: xmm6 ^= xmm8
12980# asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7
12981# asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6
12982pxor %xmm9,%xmm6
12983
12984# qhasm: xmm12 = xmm5
12985# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9
12986# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8
12987movdqa %xmm5,%xmm8
12988
12989# qhasm: xmm8 = xmm1
12990# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
12991# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
12992movdqa %xmm1,%xmm9
12993
12994# qhasm: xmm12 ^= xmm3
12995# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#9
12996# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm8
12997pxor %xmm3,%xmm8
12998
12999# qhasm: xmm8 ^= xmm4
13000# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10
13001# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9
13002pxor %xmm4,%xmm9
13003
13004# qhasm: xmm11 = xmm15
13005# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
13006# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
13007movdqa %xmm13,%xmm10
13008
13009# qhasm: xmm11 ^= xmm14
13010# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
13011# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
13012pxor %xmm11,%xmm10
13013
13014# qhasm: xmm11 &= xmm12
13015# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
13016# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
13017pand %xmm8,%xmm10
13018
13019# qhasm: xmm12 ^= xmm8
13020# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
13021# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
13022pxor %xmm9,%xmm8
13023
13024# qhasm: xmm12 &= xmm14
13025# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
13026# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
13027pand %xmm11,%xmm8
13028
13029# qhasm: xmm8 &= xmm15
13030# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
13031# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
13032pand %xmm13,%xmm9
13033
13034# qhasm: xmm8 ^= xmm12
13035# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
13036# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
13037pxor %xmm8,%xmm9
13038
13039# qhasm: xmm12 ^= xmm11
13040# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
13041# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
13042pxor %xmm10,%xmm8
13043
13044# qhasm: xmm10 = xmm13
13045# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
13046# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
13047movdqa %xmm15,%xmm10
13048
13049# qhasm: xmm10 ^= xmm9
13050# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
13051# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
13052pxor %xmm12,%xmm10
13053
13054# qhasm: xmm10 &= xmm3
13055# asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11
13056# asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10
13057pand %xmm3,%xmm10
13058
13059# qhasm: xmm3 ^= xmm4
13060# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
13061# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
13062pxor %xmm4,%xmm3
13063
13064# qhasm: xmm3 &= xmm9
13065# asm 1: pand <xmm9=int6464#13,<xmm3=int6464#4
13066# asm 2: pand <xmm9=%xmm12,<xmm3=%xmm3
13067pand %xmm12,%xmm3
13068
13069# qhasm: xmm4 &= xmm13
13070# asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5
13071# asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4
13072pand %xmm15,%xmm4
13073
13074# qhasm: xmm3 ^= xmm4
13075# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
13076# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
13077pxor %xmm4,%xmm3
13078
13079# qhasm: xmm4 ^= xmm10
13080# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
13081# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
13082pxor %xmm10,%xmm4
13083
13084# qhasm: xmm15 ^= xmm13
13085# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
13086# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
13087pxor %xmm15,%xmm13
13088
13089# qhasm: xmm14 ^= xmm9
13090# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
13091# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
13092pxor %xmm12,%xmm11
13093
13094# qhasm: xmm11 = xmm15
13095# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
13096# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
13097movdqa %xmm13,%xmm10
13098
13099# qhasm: xmm11 ^= xmm14
13100# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
13101# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
13102pxor %xmm11,%xmm10
13103
13104# qhasm: xmm11 &= xmm5
13105# asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11
13106# asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10
13107pand %xmm5,%xmm10
13108
13109# qhasm: xmm5 ^= xmm1
13110# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
13111# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
13112pxor %xmm1,%xmm5
13113
13114# qhasm: xmm5 &= xmm14
13115# asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6
13116# asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5
13117pand %xmm11,%xmm5
13118
13119# qhasm: xmm1 &= xmm15
13120# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
13121# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
13122pand %xmm13,%xmm1
13123
13124# qhasm: xmm5 ^= xmm1
13125# asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6
13126# asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5
13127pxor %xmm1,%xmm5
13128
13129# qhasm: xmm1 ^= xmm11
13130# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
13131# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
13132pxor %xmm10,%xmm1
13133
13134# qhasm: xmm5 ^= xmm12
13135# asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6
13136# asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5
13137pxor %xmm8,%xmm5
13138
13139# qhasm: xmm3 ^= xmm12
13140# asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4
13141# asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3
13142pxor %xmm8,%xmm3
13143
13144# qhasm: xmm1 ^= xmm8
13145# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
13146# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
13147pxor %xmm9,%xmm1
13148
13149# qhasm: xmm4 ^= xmm8
13150# asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5
13151# asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4
13152pxor %xmm9,%xmm4
13153
13154# qhasm: xmm5 ^= xmm0
13155# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
13156# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
13157pxor %xmm0,%xmm5
13158
13159# qhasm: xmm1 ^= xmm2
13160# asm 1: pxor <xmm2=int6464#3,<xmm1=int6464#2
13161# asm 2: pxor <xmm2=%xmm2,<xmm1=%xmm1
13162pxor %xmm2,%xmm1
13163
13164# qhasm: xmm3 ^= xmm5
13165# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
13166# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
13167pxor %xmm5,%xmm3
13168
13169# qhasm: xmm2 ^= xmm0
13170# asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3
13171# asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2
13172pxor %xmm0,%xmm2
13173
13174# qhasm: xmm0 ^= xmm1
13175# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
13176# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
13177pxor %xmm1,%xmm0
13178
13179# qhasm: xmm1 ^= xmm7
13180# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2
13181# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1
13182pxor %xmm7,%xmm1
13183
13184# qhasm: xmm7 ^= xmm4
13185# asm 1: pxor <xmm4=int6464#5,<xmm7=int6464#8
13186# asm 2: pxor <xmm4=%xmm4,<xmm7=%xmm7
13187pxor %xmm4,%xmm7
13188
13189# qhasm: xmm3 ^= xmm7
13190# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
13191# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
13192pxor %xmm7,%xmm3
13193
13194# qhasm: xmm4 ^= xmm6
13195# asm 1: pxor <xmm6=int6464#7,<xmm4=int6464#5
13196# asm 2: pxor <xmm6=%xmm6,<xmm4=%xmm4
13197pxor %xmm6,%xmm4
13198
13199# qhasm: xmm6 ^= xmm7
13200# asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7
13201# asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6
13202pxor %xmm7,%xmm6
13203
13204# qhasm: xmm2 ^= xmm6
13205# asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3
13206# asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2
13207pxor %xmm6,%xmm2
13208
13209# qhasm: xmm1 ^= RCON
13210# asm 1: pxor RCON,<xmm1=int6464#2
13211# asm 2: pxor RCON,<xmm1=%xmm1
13212pxor RCON,%xmm1
13213
13214# qhasm: xmm3 ^= RCON
13215# asm 1: pxor RCON,<xmm3=int6464#4
13216# asm 2: pxor RCON,<xmm3=%xmm3
13217pxor RCON,%xmm3
13218
13219# qhasm: xmm6 ^= RCON
13220# asm 1: pxor RCON,<xmm6=int6464#7
13221# asm 2: pxor RCON,<xmm6=%xmm6
13222pxor RCON,%xmm6
13223
13224# qhasm: xmm5 ^= RCON
13225# asm 1: pxor RCON,<xmm5=int6464#6
13226# asm 2: pxor RCON,<xmm5=%xmm5
13227pxor RCON,%xmm5
13228
13229# qhasm: shuffle bytes of xmm0 by EXPB0
13230# asm 1: pshufb EXPB0,<xmm0=int6464#1
13231# asm 2: pshufb EXPB0,<xmm0=%xmm0
13232pshufb EXPB0,%xmm0
13233
13234# qhasm: shuffle bytes of xmm1 by EXPB0
13235# asm 1: pshufb EXPB0,<xmm1=int6464#2
13236# asm 2: pshufb EXPB0,<xmm1=%xmm1
13237pshufb EXPB0,%xmm1
13238
13239# qhasm: shuffle bytes of xmm3 by EXPB0
13240# asm 1: pshufb EXPB0,<xmm3=int6464#4
13241# asm 2: pshufb EXPB0,<xmm3=%xmm3
13242pshufb EXPB0,%xmm3
13243
13244# qhasm: shuffle bytes of xmm2 by EXPB0
13245# asm 1: pshufb EXPB0,<xmm2=int6464#3
13246# asm 2: pshufb EXPB0,<xmm2=%xmm2
13247pshufb EXPB0,%xmm2
13248
13249# qhasm: shuffle bytes of xmm6 by EXPB0
13250# asm 1: pshufb EXPB0,<xmm6=int6464#7
13251# asm 2: pshufb EXPB0,<xmm6=%xmm6
13252pshufb EXPB0,%xmm6
13253
13254# qhasm: shuffle bytes of xmm5 by EXPB0
13255# asm 1: pshufb EXPB0,<xmm5=int6464#6
13256# asm 2: pshufb EXPB0,<xmm5=%xmm5
13257pshufb EXPB0,%xmm5
13258
13259# qhasm: shuffle bytes of xmm4 by EXPB0
13260# asm 1: pshufb EXPB0,<xmm4=int6464#5
13261# asm 2: pshufb EXPB0,<xmm4=%xmm4
13262pshufb EXPB0,%xmm4
13263
13264# qhasm: shuffle bytes of xmm7 by EXPB0
13265# asm 1: pshufb EXPB0,<xmm7=int6464#8
13266# asm 2: pshufb EXPB0,<xmm7=%xmm7
13267pshufb EXPB0,%xmm7
13268
13269# qhasm: xmm8 = *(int128 *)(c + 1152)
13270# asm 1: movdqa 1152(<c=int64#1),>xmm8=int6464#9
13271# asm 2: movdqa 1152(<c=%rdi),>xmm8=%xmm8
13272movdqa 1152(%rdi),%xmm8
13273
13274# qhasm: xmm9 = *(int128 *)(c + 1168)
13275# asm 1: movdqa 1168(<c=int64#1),>xmm9=int6464#10
13276# asm 2: movdqa 1168(<c=%rdi),>xmm9=%xmm9
13277movdqa 1168(%rdi),%xmm9
13278
13279# qhasm: xmm10 = *(int128 *)(c + 1184)
13280# asm 1: movdqa 1184(<c=int64#1),>xmm10=int6464#11
13281# asm 2: movdqa 1184(<c=%rdi),>xmm10=%xmm10
13282movdqa 1184(%rdi),%xmm10
13283
13284# qhasm: xmm11 = *(int128 *)(c + 1200)
13285# asm 1: movdqa 1200(<c=int64#1),>xmm11=int6464#12
13286# asm 2: movdqa 1200(<c=%rdi),>xmm11=%xmm11
13287movdqa 1200(%rdi),%xmm11
13288
13289# qhasm: xmm12 = *(int128 *)(c + 1216)
13290# asm 1: movdqa 1216(<c=int64#1),>xmm12=int6464#13
13291# asm 2: movdqa 1216(<c=%rdi),>xmm12=%xmm12
13292movdqa 1216(%rdi),%xmm12
13293
13294# qhasm: xmm13 = *(int128 *)(c + 1232)
13295# asm 1: movdqa 1232(<c=int64#1),>xmm13=int6464#14
13296# asm 2: movdqa 1232(<c=%rdi),>xmm13=%xmm13
13297movdqa 1232(%rdi),%xmm13
13298
13299# qhasm: xmm14 = *(int128 *)(c + 1248)
13300# asm 1: movdqa 1248(<c=int64#1),>xmm14=int6464#15
13301# asm 2: movdqa 1248(<c=%rdi),>xmm14=%xmm14
13302movdqa 1248(%rdi),%xmm14
13303
13304# qhasm: xmm15 = *(int128 *)(c + 1264)
13305# asm 1: movdqa 1264(<c=int64#1),>xmm15=int6464#16
13306# asm 2: movdqa 1264(<c=%rdi),>xmm15=%xmm15
13307movdqa 1264(%rdi),%xmm15
13308
13309# qhasm: xmm8 ^= ONE
13310# asm 1: pxor ONE,<xmm8=int6464#9
13311# asm 2: pxor ONE,<xmm8=%xmm8
13312pxor ONE,%xmm8
13313
13314# qhasm: xmm9 ^= ONE
13315# asm 1: pxor ONE,<xmm9=int6464#10
13316# asm 2: pxor ONE,<xmm9=%xmm9
13317pxor ONE,%xmm9
13318
13319# qhasm: xmm13 ^= ONE
13320# asm 1: pxor ONE,<xmm13=int6464#14
13321# asm 2: pxor ONE,<xmm13=%xmm13
13322pxor ONE,%xmm13
13323
13324# qhasm: xmm14 ^= ONE
13325# asm 1: pxor ONE,<xmm14=int6464#15
13326# asm 2: pxor ONE,<xmm14=%xmm14
13327pxor ONE,%xmm14
13328
13329# qhasm: xmm0 ^= xmm8
13330# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
13331# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
13332pxor %xmm8,%xmm0
13333
13334# qhasm: xmm1 ^= xmm9
13335# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
13336# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
13337pxor %xmm9,%xmm1
13338
13339# qhasm: xmm3 ^= xmm10
13340# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
13341# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
13342pxor %xmm10,%xmm3
13343
13344# qhasm: xmm2 ^= xmm11
13345# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
13346# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
13347pxor %xmm11,%xmm2
13348
13349# qhasm: xmm6 ^= xmm12
13350# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
13351# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
13352pxor %xmm12,%xmm6
13353
13354# qhasm: xmm5 ^= xmm13
13355# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
13356# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
13357pxor %xmm13,%xmm5
13358
13359# qhasm: xmm4 ^= xmm14
13360# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
13361# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
13362pxor %xmm14,%xmm4
13363
13364# qhasm: xmm7 ^= xmm15
13365# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
13366# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
13367pxor %xmm15,%xmm7
13368
13369# qhasm: uint32323232 xmm8 >>= 8
13370# asm 1: psrld $8,<xmm8=int6464#9
13371# asm 2: psrld $8,<xmm8=%xmm8
13372psrld $8,%xmm8
13373
13374# qhasm: uint32323232 xmm9 >>= 8
13375# asm 1: psrld $8,<xmm9=int6464#10
13376# asm 2: psrld $8,<xmm9=%xmm9
13377psrld $8,%xmm9
13378
13379# qhasm: uint32323232 xmm10 >>= 8
13380# asm 1: psrld $8,<xmm10=int6464#11
13381# asm 2: psrld $8,<xmm10=%xmm10
13382psrld $8,%xmm10
13383
13384# qhasm: uint32323232 xmm11 >>= 8
13385# asm 1: psrld $8,<xmm11=int6464#12
13386# asm 2: psrld $8,<xmm11=%xmm11
13387psrld $8,%xmm11
13388
13389# qhasm: uint32323232 xmm12 >>= 8
13390# asm 1: psrld $8,<xmm12=int6464#13
13391# asm 2: psrld $8,<xmm12=%xmm12
13392psrld $8,%xmm12
13393
13394# qhasm: uint32323232 xmm13 >>= 8
13395# asm 1: psrld $8,<xmm13=int6464#14
13396# asm 2: psrld $8,<xmm13=%xmm13
13397psrld $8,%xmm13
13398
13399# qhasm: uint32323232 xmm14 >>= 8
13400# asm 1: psrld $8,<xmm14=int6464#15
13401# asm 2: psrld $8,<xmm14=%xmm14
13402psrld $8,%xmm14
13403
13404# qhasm: uint32323232 xmm15 >>= 8
13405# asm 1: psrld $8,<xmm15=int6464#16
13406# asm 2: psrld $8,<xmm15=%xmm15
13407psrld $8,%xmm15
13408
13409# qhasm: xmm0 ^= xmm8
13410# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
13411# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
13412pxor %xmm8,%xmm0
13413
13414# qhasm: xmm1 ^= xmm9
13415# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
13416# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
13417pxor %xmm9,%xmm1
13418
13419# qhasm: xmm3 ^= xmm10
13420# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
13421# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
13422pxor %xmm10,%xmm3
13423
13424# qhasm: xmm2 ^= xmm11
13425# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
13426# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
13427pxor %xmm11,%xmm2
13428
13429# qhasm: xmm6 ^= xmm12
13430# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
13431# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
13432pxor %xmm12,%xmm6
13433
13434# qhasm: xmm5 ^= xmm13
13435# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
13436# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
13437pxor %xmm13,%xmm5
13438
13439# qhasm: xmm4 ^= xmm14
13440# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
13441# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
13442pxor %xmm14,%xmm4
13443
13444# qhasm: xmm7 ^= xmm15
13445# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
13446# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
13447pxor %xmm15,%xmm7
13448
13449# qhasm: uint32323232 xmm8 >>= 8
13450# asm 1: psrld $8,<xmm8=int6464#9
13451# asm 2: psrld $8,<xmm8=%xmm8
13452psrld $8,%xmm8
13453
13454# qhasm: uint32323232 xmm9 >>= 8
13455# asm 1: psrld $8,<xmm9=int6464#10
13456# asm 2: psrld $8,<xmm9=%xmm9
13457psrld $8,%xmm9
13458
13459# qhasm: uint32323232 xmm10 >>= 8
13460# asm 1: psrld $8,<xmm10=int6464#11
13461# asm 2: psrld $8,<xmm10=%xmm10
13462psrld $8,%xmm10
13463
13464# qhasm: uint32323232 xmm11 >>= 8
13465# asm 1: psrld $8,<xmm11=int6464#12
13466# asm 2: psrld $8,<xmm11=%xmm11
13467psrld $8,%xmm11
13468
13469# qhasm: uint32323232 xmm12 >>= 8
13470# asm 1: psrld $8,<xmm12=int6464#13
13471# asm 2: psrld $8,<xmm12=%xmm12
13472psrld $8,%xmm12
13473
13474# qhasm: uint32323232 xmm13 >>= 8
13475# asm 1: psrld $8,<xmm13=int6464#14
13476# asm 2: psrld $8,<xmm13=%xmm13
13477psrld $8,%xmm13
13478
13479# qhasm: uint32323232 xmm14 >>= 8
13480# asm 1: psrld $8,<xmm14=int6464#15
13481# asm 2: psrld $8,<xmm14=%xmm14
13482psrld $8,%xmm14
13483
13484# qhasm: uint32323232 xmm15 >>= 8
13485# asm 1: psrld $8,<xmm15=int6464#16
13486# asm 2: psrld $8,<xmm15=%xmm15
13487psrld $8,%xmm15
13488
13489# qhasm: xmm0 ^= xmm8
13490# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
13491# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
13492pxor %xmm8,%xmm0
13493
13494# qhasm: xmm1 ^= xmm9
13495# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
13496# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
13497pxor %xmm9,%xmm1
13498
13499# qhasm: xmm3 ^= xmm10
13500# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
13501# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
13502pxor %xmm10,%xmm3
13503
13504# qhasm: xmm2 ^= xmm11
13505# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
13506# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
13507pxor %xmm11,%xmm2
13508
13509# qhasm: xmm6 ^= xmm12
13510# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
13511# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
13512pxor %xmm12,%xmm6
13513
13514# qhasm: xmm5 ^= xmm13
13515# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
13516# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
13517pxor %xmm13,%xmm5
13518
13519# qhasm: xmm4 ^= xmm14
13520# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
13521# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
13522pxor %xmm14,%xmm4
13523
13524# qhasm: xmm7 ^= xmm15
13525# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
13526# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
13527pxor %xmm15,%xmm7
13528
13529# qhasm: uint32323232 xmm8 >>= 8
13530# asm 1: psrld $8,<xmm8=int6464#9
13531# asm 2: psrld $8,<xmm8=%xmm8
13532psrld $8,%xmm8
13533
13534# qhasm: uint32323232 xmm9 >>= 8
13535# asm 1: psrld $8,<xmm9=int6464#10
13536# asm 2: psrld $8,<xmm9=%xmm9
13537psrld $8,%xmm9
13538
13539# qhasm: uint32323232 xmm10 >>= 8
13540# asm 1: psrld $8,<xmm10=int6464#11
13541# asm 2: psrld $8,<xmm10=%xmm10
13542psrld $8,%xmm10
13543
13544# qhasm: uint32323232 xmm11 >>= 8
13545# asm 1: psrld $8,<xmm11=int6464#12
13546# asm 2: psrld $8,<xmm11=%xmm11
13547psrld $8,%xmm11
13548
13549# qhasm: uint32323232 xmm12 >>= 8
13550# asm 1: psrld $8,<xmm12=int6464#13
13551# asm 2: psrld $8,<xmm12=%xmm12
13552psrld $8,%xmm12
13553
13554# qhasm: uint32323232 xmm13 >>= 8
13555# asm 1: psrld $8,<xmm13=int6464#14
13556# asm 2: psrld $8,<xmm13=%xmm13
13557psrld $8,%xmm13
13558
13559# qhasm: uint32323232 xmm14 >>= 8
13560# asm 1: psrld $8,<xmm14=int6464#15
13561# asm 2: psrld $8,<xmm14=%xmm14
13562psrld $8,%xmm14
13563
13564# qhasm: uint32323232 xmm15 >>= 8
13565# asm 1: psrld $8,<xmm15=int6464#16
13566# asm 2: psrld $8,<xmm15=%xmm15
13567psrld $8,%xmm15
13568
13569# qhasm: xmm0 ^= xmm8
13570# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
13571# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
13572pxor %xmm8,%xmm0
13573
13574# qhasm: xmm1 ^= xmm9
13575# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
13576# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
13577pxor %xmm9,%xmm1
13578
13579# qhasm: xmm3 ^= xmm10
13580# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
13581# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
13582pxor %xmm10,%xmm3
13583
13584# qhasm: xmm2 ^= xmm11
13585# asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3
13586# asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2
13587pxor %xmm11,%xmm2
13588
13589# qhasm: xmm6 ^= xmm12
13590# asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7
13591# asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6
13592pxor %xmm12,%xmm6
13593
13594# qhasm: xmm5 ^= xmm13
13595# asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6
13596# asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5
13597pxor %xmm13,%xmm5
13598
13599# qhasm: xmm4 ^= xmm14
13600# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
13601# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
13602pxor %xmm14,%xmm4
13603
13604# qhasm: xmm7 ^= xmm15
13605# asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8
13606# asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7
13607pxor %xmm15,%xmm7
13608
13609# qhasm: shuffle bytes of xmm0 by M0
13610# asm 1: pshufb M0,<xmm0=int6464#1
13611# asm 2: pshufb M0,<xmm0=%xmm0
13612pshufb M0,%xmm0
13613
13614# qhasm: shuffle bytes of xmm1 by M0
13615# asm 1: pshufb M0,<xmm1=int6464#2
13616# asm 2: pshufb M0,<xmm1=%xmm1
13617pshufb M0,%xmm1
13618
13619# qhasm: shuffle bytes of xmm4 by M0
13620# asm 1: pshufb M0,<xmm4=int6464#5
13621# asm 2: pshufb M0,<xmm4=%xmm4
13622pshufb M0,%xmm4
13623
13624# qhasm: shuffle bytes of xmm6 by M0
13625# asm 1: pshufb M0,<xmm6=int6464#7
13626# asm 2: pshufb M0,<xmm6=%xmm6
13627pshufb M0,%xmm6
13628
13629# qhasm: shuffle bytes of xmm3 by M0
13630# asm 1: pshufb M0,<xmm3=int6464#4
13631# asm 2: pshufb M0,<xmm3=%xmm3
13632pshufb M0,%xmm3
13633
13634# qhasm: shuffle bytes of xmm7 by M0
13635# asm 1: pshufb M0,<xmm7=int6464#8
13636# asm 2: pshufb M0,<xmm7=%xmm7
13637pshufb M0,%xmm7
13638
13639# qhasm: shuffle bytes of xmm2 by M0
13640# asm 1: pshufb M0,<xmm2=int6464#3
13641# asm 2: pshufb M0,<xmm2=%xmm2
13642pshufb M0,%xmm2
13643
13644# qhasm: shuffle bytes of xmm5 by M0
13645# asm 1: pshufb M0,<xmm5=int6464#6
13646# asm 2: pshufb M0,<xmm5=%xmm5
13647pshufb M0,%xmm5
13648
13649# qhasm: *(int128 *)(c + 1280) = xmm0
13650# asm 1: movdqa <xmm0=int6464#1,1280(<c=int64#1)
13651# asm 2: movdqa <xmm0=%xmm0,1280(<c=%rdi)
13652movdqa %xmm0,1280(%rdi)
13653
13654# qhasm: *(int128 *)(c + 1296) = xmm1
13655# asm 1: movdqa <xmm1=int6464#2,1296(<c=int64#1)
13656# asm 2: movdqa <xmm1=%xmm1,1296(<c=%rdi)
13657movdqa %xmm1,1296(%rdi)
13658
13659# qhasm: *(int128 *)(c + 1312) = xmm3
13660# asm 1: movdqa <xmm3=int6464#4,1312(<c=int64#1)
13661# asm 2: movdqa <xmm3=%xmm3,1312(<c=%rdi)
13662movdqa %xmm3,1312(%rdi)
13663
13664# qhasm: *(int128 *)(c + 1328) = xmm2
13665# asm 1: movdqa <xmm2=int6464#3,1328(<c=int64#1)
13666# asm 2: movdqa <xmm2=%xmm2,1328(<c=%rdi)
13667movdqa %xmm2,1328(%rdi)
13668
13669# qhasm: *(int128 *)(c + 1344) = xmm6
13670# asm 1: movdqa <xmm6=int6464#7,1344(<c=int64#1)
13671# asm 2: movdqa <xmm6=%xmm6,1344(<c=%rdi)
13672movdqa %xmm6,1344(%rdi)
13673
13674# qhasm: *(int128 *)(c + 1360) = xmm5
13675# asm 1: movdqa <xmm5=int6464#6,1360(<c=int64#1)
13676# asm 2: movdqa <xmm5=%xmm5,1360(<c=%rdi)
13677movdqa %xmm5,1360(%rdi)
13678
13679# qhasm: *(int128 *)(c + 1376) = xmm4
13680# asm 1: movdqa <xmm4=int6464#5,1376(<c=int64#1)
13681# asm 2: movdqa <xmm4=%xmm4,1376(<c=%rdi)
13682movdqa %xmm4,1376(%rdi)
13683
13684# qhasm: *(int128 *)(c + 1392) = xmm7
13685# asm 1: movdqa <xmm7=int6464#8,1392(<c=int64#1)
13686# asm 2: movdqa <xmm7=%xmm7,1392(<c=%rdi)
13687movdqa %xmm7,1392(%rdi)
13688
13689# qhasm: leave
13690add %r11,%rsp
13691mov %rdi,%rax
13692mov %rsi,%rdx
13693xor %rax,%rax
13694ret