diff options
author | irungentoo <irungentoo@gmail.com> | 2013-07-02 09:53:34 -0400 |
---|---|---|
committer | irungentoo <irungentoo@gmail.com> | 2013-07-02 09:53:34 -0400 |
commit | e2967396ac73cb7410787886cdaf072a184ffc49 (patch) | |
tree | 527a74d25a4a0705fc641994fd35bfab22662034 /nacl/crypto_stream | |
parent | 8928c817df345f29aa0b194743595aa11bd6a8ba (diff) |
Added NaCl crypto library.
Diffstat (limited to 'nacl/crypto_stream')
67 files changed, 70260 insertions, 0 deletions
diff --git a/nacl/crypto_stream/aes128ctr/checksum b/nacl/crypto_stream/aes128ctr/checksum new file mode 100644 index 00000000..92865436 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/checksum | |||
@@ -0,0 +1 @@ | |||
6e9966897837aae181e93261ae88fdf0 | |||
diff --git a/nacl/crypto_stream/aes128ctr/core2/afternm.s b/nacl/crypto_stream/aes128ctr/core2/afternm.s new file mode 100644 index 00000000..c1ba79ef --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/core2/afternm.s | |||
@@ -0,0 +1,12308 @@ | |||
1 | # Author: Emilia Käsper and Peter Schwabe | ||
2 | # Date: 2009-03-19 | ||
3 | # +2010.01.31: minor namespace modifications | ||
4 | # Public domain | ||
5 | |||
6 | .data | ||
7 | .p2align 6 | ||
8 | |||
9 | RCON: .int 0x00000000, 0x00000000, 0x00000000, 0xffffffff | ||
10 | ROTB: .int 0x0c000000, 0x00000000, 0x04000000, 0x08000000 | ||
11 | EXPB0: .int 0x03030303, 0x07070707, 0x0b0b0b0b, 0x0f0f0f0f | ||
12 | CTRINC1: .int 0x00000001, 0x00000000, 0x00000000, 0x00000000 | ||
13 | CTRINC2: .int 0x00000002, 0x00000000, 0x00000000, 0x00000000 | ||
14 | CTRINC3: .int 0x00000003, 0x00000000, 0x00000000, 0x00000000 | ||
15 | CTRINC4: .int 0x00000004, 0x00000000, 0x00000000, 0x00000000 | ||
16 | CTRINC5: .int 0x00000005, 0x00000000, 0x00000000, 0x00000000 | ||
17 | CTRINC6: .int 0x00000006, 0x00000000, 0x00000000, 0x00000000 | ||
18 | CTRINC7: .int 0x00000007, 0x00000000, 0x00000000, 0x00000000 | ||
19 | RCTRINC1: .int 0x00000000, 0x00000000, 0x00000000, 0x00000001 | ||
20 | RCTRINC2: .int 0x00000000, 0x00000000, 0x00000000, 0x00000002 | ||
21 | RCTRINC3: .int 0x00000000, 0x00000000, 0x00000000, 0x00000003 | ||
22 | RCTRINC4: .int 0x00000000, 0x00000000, 0x00000000, 0x00000004 | ||
23 | RCTRINC5: .int 0x00000000, 0x00000000, 0x00000000, 0x00000005 | ||
24 | RCTRINC6: .int 0x00000000, 0x00000000, 0x00000000, 0x00000006 | ||
25 | RCTRINC7: .int 0x00000000, 0x00000000, 0x00000000, 0x00000007 | ||
26 | |||
27 | SWAP32: .int 0x00010203, 0x04050607, 0x08090a0b, 0x0c0d0e0f | ||
28 | M0SWAP: .quad 0x0105090d0004080c , 0x03070b0f02060a0e | ||
29 | |||
30 | BS0: .quad 0x5555555555555555, 0x5555555555555555 | ||
31 | BS1: .quad 0x3333333333333333, 0x3333333333333333 | ||
32 | BS2: .quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f | ||
33 | ONE: .quad 0xffffffffffffffff, 0xffffffffffffffff | ||
34 | M0: .quad 0x02060a0e03070b0f, 0x0004080c0105090d | ||
35 | SRM0: .quad 0x0304090e00050a0f, 0x01060b0c0207080d | ||
36 | SR: .quad 0x0504070600030201, 0x0f0e0d0c0a09080b | ||
37 | |||
38 | # qhasm: int64 outp | ||
39 | |||
40 | # qhasm: int64 len | ||
41 | |||
42 | # qhasm: int64 np | ||
43 | |||
44 | # qhasm: int64 c | ||
45 | |||
46 | # qhasm: input outp | ||
47 | |||
48 | # qhasm: input len | ||
49 | |||
50 | # qhasm: input np | ||
51 | |||
52 | # qhasm: input c | ||
53 | |||
54 | # qhasm: int64 lensav | ||
55 | |||
56 | # qhasm: int6464 xmm0 | ||
57 | |||
58 | # qhasm: int6464 xmm1 | ||
59 | |||
60 | # qhasm: int6464 xmm2 | ||
61 | |||
62 | # qhasm: int6464 xmm3 | ||
63 | |||
64 | # qhasm: int6464 xmm4 | ||
65 | |||
66 | # qhasm: int6464 xmm5 | ||
67 | |||
68 | # qhasm: int6464 xmm6 | ||
69 | |||
70 | # qhasm: int6464 xmm7 | ||
71 | |||
72 | # qhasm: int6464 xmm8 | ||
73 | |||
74 | # qhasm: int6464 xmm9 | ||
75 | |||
76 | # qhasm: int6464 xmm10 | ||
77 | |||
78 | # qhasm: int6464 xmm11 | ||
79 | |||
80 | # qhasm: int6464 xmm12 | ||
81 | |||
82 | # qhasm: int6464 xmm13 | ||
83 | |||
84 | # qhasm: int6464 xmm14 | ||
85 | |||
86 | # qhasm: int6464 xmm15 | ||
87 | |||
88 | # qhasm: int6464 t | ||
89 | |||
90 | # qhasm: stack1024 bl | ||
91 | |||
92 | # qhasm: stack128 nonce_stack | ||
93 | |||
94 | # qhasm: int64 blp | ||
95 | |||
96 | # qhasm: int64 b | ||
97 | |||
98 | # qhasm: int64 tmp | ||
99 | |||
100 | # qhasm: enter crypto_stream_aes128ctr_core2_afternm | ||
101 | .text | ||
102 | .p2align 5 | ||
103 | .globl _crypto_stream_aes128ctr_core2_afternm | ||
104 | .globl crypto_stream_aes128ctr_core2_afternm | ||
105 | _crypto_stream_aes128ctr_core2_afternm: | ||
106 | crypto_stream_aes128ctr_core2_afternm: | ||
107 | mov %rsp,%r11 | ||
108 | and $31,%r11 | ||
109 | add $160,%r11 | ||
110 | sub %r11,%rsp | ||
111 | |||
112 | # qhasm: xmm0 = *(int128 *) (np + 0) | ||
113 | # asm 1: movdqa 0(<np=int64#3),>xmm0=int6464#1 | ||
114 | # asm 2: movdqa 0(<np=%rdx),>xmm0=%xmm0 | ||
115 | movdqa 0(%rdx),%xmm0 | ||
116 | |||
117 | # qhasm: nonce_stack = xmm0 | ||
118 | # asm 1: movdqa <xmm0=int6464#1,>nonce_stack=stack128#1 | ||
119 | # asm 2: movdqa <xmm0=%xmm0,>nonce_stack=0(%rsp) | ||
120 | movdqa %xmm0,0(%rsp) | ||
121 | |||
122 | # qhasm: np = &nonce_stack | ||
123 | # asm 1: leaq <nonce_stack=stack128#1,>np=int64#3 | ||
124 | # asm 2: leaq <nonce_stack=0(%rsp),>np=%rdx | ||
125 | leaq 0(%rsp),%rdx | ||
126 | |||
127 | # qhasm: enc_block: | ||
128 | ._enc_block: | ||
129 | |||
130 | # qhasm: xmm0 = *(int128 *) (np + 0) | ||
131 | # asm 1: movdqa 0(<np=int64#3),>xmm0=int6464#1 | ||
132 | # asm 2: movdqa 0(<np=%rdx),>xmm0=%xmm0 | ||
133 | movdqa 0(%rdx),%xmm0 | ||
134 | |||
135 | # qhasm: xmm1 = xmm0 | ||
136 | # asm 1: movdqa <xmm0=int6464#1,>xmm1=int6464#2 | ||
137 | # asm 2: movdqa <xmm0=%xmm0,>xmm1=%xmm1 | ||
138 | movdqa %xmm0,%xmm1 | ||
139 | |||
140 | # qhasm: shuffle bytes of xmm1 by SWAP32 | ||
141 | # asm 1: pshufb SWAP32,<xmm1=int6464#2 | ||
142 | # asm 2: pshufb SWAP32,<xmm1=%xmm1 | ||
143 | pshufb SWAP32,%xmm1 | ||
144 | |||
145 | # qhasm: xmm2 = xmm1 | ||
146 | # asm 1: movdqa <xmm1=int6464#2,>xmm2=int6464#3 | ||
147 | # asm 2: movdqa <xmm1=%xmm1,>xmm2=%xmm2 | ||
148 | movdqa %xmm1,%xmm2 | ||
149 | |||
150 | # qhasm: xmm3 = xmm1 | ||
151 | # asm 1: movdqa <xmm1=int6464#2,>xmm3=int6464#4 | ||
152 | # asm 2: movdqa <xmm1=%xmm1,>xmm3=%xmm3 | ||
153 | movdqa %xmm1,%xmm3 | ||
154 | |||
155 | # qhasm: xmm4 = xmm1 | ||
156 | # asm 1: movdqa <xmm1=int6464#2,>xmm4=int6464#5 | ||
157 | # asm 2: movdqa <xmm1=%xmm1,>xmm4=%xmm4 | ||
158 | movdqa %xmm1,%xmm4 | ||
159 | |||
160 | # qhasm: xmm5 = xmm1 | ||
161 | # asm 1: movdqa <xmm1=int6464#2,>xmm5=int6464#6 | ||
162 | # asm 2: movdqa <xmm1=%xmm1,>xmm5=%xmm5 | ||
163 | movdqa %xmm1,%xmm5 | ||
164 | |||
165 | # qhasm: xmm6 = xmm1 | ||
166 | # asm 1: movdqa <xmm1=int6464#2,>xmm6=int6464#7 | ||
167 | # asm 2: movdqa <xmm1=%xmm1,>xmm6=%xmm6 | ||
168 | movdqa %xmm1,%xmm6 | ||
169 | |||
170 | # qhasm: xmm7 = xmm1 | ||
171 | # asm 1: movdqa <xmm1=int6464#2,>xmm7=int6464#8 | ||
172 | # asm 2: movdqa <xmm1=%xmm1,>xmm7=%xmm7 | ||
173 | movdqa %xmm1,%xmm7 | ||
174 | |||
175 | # qhasm: int32323232 xmm1 += RCTRINC1 | ||
176 | # asm 1: paddd RCTRINC1,<xmm1=int6464#2 | ||
177 | # asm 2: paddd RCTRINC1,<xmm1=%xmm1 | ||
178 | paddd RCTRINC1,%xmm1 | ||
179 | |||
180 | # qhasm: int32323232 xmm2 += RCTRINC2 | ||
181 | # asm 1: paddd RCTRINC2,<xmm2=int6464#3 | ||
182 | # asm 2: paddd RCTRINC2,<xmm2=%xmm2 | ||
183 | paddd RCTRINC2,%xmm2 | ||
184 | |||
185 | # qhasm: int32323232 xmm3 += RCTRINC3 | ||
186 | # asm 1: paddd RCTRINC3,<xmm3=int6464#4 | ||
187 | # asm 2: paddd RCTRINC3,<xmm3=%xmm3 | ||
188 | paddd RCTRINC3,%xmm3 | ||
189 | |||
190 | # qhasm: int32323232 xmm4 += RCTRINC4 | ||
191 | # asm 1: paddd RCTRINC4,<xmm4=int6464#5 | ||
192 | # asm 2: paddd RCTRINC4,<xmm4=%xmm4 | ||
193 | paddd RCTRINC4,%xmm4 | ||
194 | |||
195 | # qhasm: int32323232 xmm5 += RCTRINC5 | ||
196 | # asm 1: paddd RCTRINC5,<xmm5=int6464#6 | ||
197 | # asm 2: paddd RCTRINC5,<xmm5=%xmm5 | ||
198 | paddd RCTRINC5,%xmm5 | ||
199 | |||
200 | # qhasm: int32323232 xmm6 += RCTRINC6 | ||
201 | # asm 1: paddd RCTRINC6,<xmm6=int6464#7 | ||
202 | # asm 2: paddd RCTRINC6,<xmm6=%xmm6 | ||
203 | paddd RCTRINC6,%xmm6 | ||
204 | |||
205 | # qhasm: int32323232 xmm7 += RCTRINC7 | ||
206 | # asm 1: paddd RCTRINC7,<xmm7=int6464#8 | ||
207 | # asm 2: paddd RCTRINC7,<xmm7=%xmm7 | ||
208 | paddd RCTRINC7,%xmm7 | ||
209 | |||
210 | # qhasm: shuffle bytes of xmm0 by M0 | ||
211 | # asm 1: pshufb M0,<xmm0=int6464#1 | ||
212 | # asm 2: pshufb M0,<xmm0=%xmm0 | ||
213 | pshufb M0,%xmm0 | ||
214 | |||
215 | # qhasm: shuffle bytes of xmm1 by M0SWAP | ||
216 | # asm 1: pshufb M0SWAP,<xmm1=int6464#2 | ||
217 | # asm 2: pshufb M0SWAP,<xmm1=%xmm1 | ||
218 | pshufb M0SWAP,%xmm1 | ||
219 | |||
220 | # qhasm: shuffle bytes of xmm2 by M0SWAP | ||
221 | # asm 1: pshufb M0SWAP,<xmm2=int6464#3 | ||
222 | # asm 2: pshufb M0SWAP,<xmm2=%xmm2 | ||
223 | pshufb M0SWAP,%xmm2 | ||
224 | |||
225 | # qhasm: shuffle bytes of xmm3 by M0SWAP | ||
226 | # asm 1: pshufb M0SWAP,<xmm3=int6464#4 | ||
227 | # asm 2: pshufb M0SWAP,<xmm3=%xmm3 | ||
228 | pshufb M0SWAP,%xmm3 | ||
229 | |||
230 | # qhasm: shuffle bytes of xmm4 by M0SWAP | ||
231 | # asm 1: pshufb M0SWAP,<xmm4=int6464#5 | ||
232 | # asm 2: pshufb M0SWAP,<xmm4=%xmm4 | ||
233 | pshufb M0SWAP,%xmm4 | ||
234 | |||
235 | # qhasm: shuffle bytes of xmm5 by M0SWAP | ||
236 | # asm 1: pshufb M0SWAP,<xmm5=int6464#6 | ||
237 | # asm 2: pshufb M0SWAP,<xmm5=%xmm5 | ||
238 | pshufb M0SWAP,%xmm5 | ||
239 | |||
240 | # qhasm: shuffle bytes of xmm6 by M0SWAP | ||
241 | # asm 1: pshufb M0SWAP,<xmm6=int6464#7 | ||
242 | # asm 2: pshufb M0SWAP,<xmm6=%xmm6 | ||
243 | pshufb M0SWAP,%xmm6 | ||
244 | |||
245 | # qhasm: shuffle bytes of xmm7 by M0SWAP | ||
246 | # asm 1: pshufb M0SWAP,<xmm7=int6464#8 | ||
247 | # asm 2: pshufb M0SWAP,<xmm7=%xmm7 | ||
248 | pshufb M0SWAP,%xmm7 | ||
249 | |||
250 | # qhasm: xmm8 = xmm6 | ||
251 | # asm 1: movdqa <xmm6=int6464#7,>xmm8=int6464#9 | ||
252 | # asm 2: movdqa <xmm6=%xmm6,>xmm8=%xmm8 | ||
253 | movdqa %xmm6,%xmm8 | ||
254 | |||
255 | # qhasm: uint6464 xmm8 >>= 1 | ||
256 | # asm 1: psrlq $1,<xmm8=int6464#9 | ||
257 | # asm 2: psrlq $1,<xmm8=%xmm8 | ||
258 | psrlq $1,%xmm8 | ||
259 | |||
260 | # qhasm: xmm8 ^= xmm7 | ||
261 | # asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9 | ||
262 | # asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8 | ||
263 | pxor %xmm7,%xmm8 | ||
264 | |||
265 | # qhasm: xmm8 &= BS0 | ||
266 | # asm 1: pand BS0,<xmm8=int6464#9 | ||
267 | # asm 2: pand BS0,<xmm8=%xmm8 | ||
268 | pand BS0,%xmm8 | ||
269 | |||
270 | # qhasm: xmm7 ^= xmm8 | ||
271 | # asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8 | ||
272 | # asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7 | ||
273 | pxor %xmm8,%xmm7 | ||
274 | |||
275 | # qhasm: uint6464 xmm8 <<= 1 | ||
276 | # asm 1: psllq $1,<xmm8=int6464#9 | ||
277 | # asm 2: psllq $1,<xmm8=%xmm8 | ||
278 | psllq $1,%xmm8 | ||
279 | |||
280 | # qhasm: xmm6 ^= xmm8 | ||
281 | # asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7 | ||
282 | # asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6 | ||
283 | pxor %xmm8,%xmm6 | ||
284 | |||
285 | # qhasm: xmm8 = xmm4 | ||
286 | # asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9 | ||
287 | # asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8 | ||
288 | movdqa %xmm4,%xmm8 | ||
289 | |||
290 | # qhasm: uint6464 xmm8 >>= 1 | ||
291 | # asm 1: psrlq $1,<xmm8=int6464#9 | ||
292 | # asm 2: psrlq $1,<xmm8=%xmm8 | ||
293 | psrlq $1,%xmm8 | ||
294 | |||
295 | # qhasm: xmm8 ^= xmm5 | ||
296 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
297 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
298 | pxor %xmm5,%xmm8 | ||
299 | |||
300 | # qhasm: xmm8 &= BS0 | ||
301 | # asm 1: pand BS0,<xmm8=int6464#9 | ||
302 | # asm 2: pand BS0,<xmm8=%xmm8 | ||
303 | pand BS0,%xmm8 | ||
304 | |||
305 | # qhasm: xmm5 ^= xmm8 | ||
306 | # asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6 | ||
307 | # asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5 | ||
308 | pxor %xmm8,%xmm5 | ||
309 | |||
310 | # qhasm: uint6464 xmm8 <<= 1 | ||
311 | # asm 1: psllq $1,<xmm8=int6464#9 | ||
312 | # asm 2: psllq $1,<xmm8=%xmm8 | ||
313 | psllq $1,%xmm8 | ||
314 | |||
315 | # qhasm: xmm4 ^= xmm8 | ||
316 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
317 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
318 | pxor %xmm8,%xmm4 | ||
319 | |||
320 | # qhasm: xmm8 = xmm2 | ||
321 | # asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9 | ||
322 | # asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8 | ||
323 | movdqa %xmm2,%xmm8 | ||
324 | |||
325 | # qhasm: uint6464 xmm8 >>= 1 | ||
326 | # asm 1: psrlq $1,<xmm8=int6464#9 | ||
327 | # asm 2: psrlq $1,<xmm8=%xmm8 | ||
328 | psrlq $1,%xmm8 | ||
329 | |||
330 | # qhasm: xmm8 ^= xmm3 | ||
331 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#9 | ||
332 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm8 | ||
333 | pxor %xmm3,%xmm8 | ||
334 | |||
335 | # qhasm: xmm8 &= BS0 | ||
336 | # asm 1: pand BS0,<xmm8=int6464#9 | ||
337 | # asm 2: pand BS0,<xmm8=%xmm8 | ||
338 | pand BS0,%xmm8 | ||
339 | |||
340 | # qhasm: xmm3 ^= xmm8 | ||
341 | # asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4 | ||
342 | # asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3 | ||
343 | pxor %xmm8,%xmm3 | ||
344 | |||
345 | # qhasm: uint6464 xmm8 <<= 1 | ||
346 | # asm 1: psllq $1,<xmm8=int6464#9 | ||
347 | # asm 2: psllq $1,<xmm8=%xmm8 | ||
348 | psllq $1,%xmm8 | ||
349 | |||
350 | # qhasm: xmm2 ^= xmm8 | ||
351 | # asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3 | ||
352 | # asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2 | ||
353 | pxor %xmm8,%xmm2 | ||
354 | |||
355 | # qhasm: xmm8 = xmm0 | ||
356 | # asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9 | ||
357 | # asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8 | ||
358 | movdqa %xmm0,%xmm8 | ||
359 | |||
360 | # qhasm: uint6464 xmm8 >>= 1 | ||
361 | # asm 1: psrlq $1,<xmm8=int6464#9 | ||
362 | # asm 2: psrlq $1,<xmm8=%xmm8 | ||
363 | psrlq $1,%xmm8 | ||
364 | |||
365 | # qhasm: xmm8 ^= xmm1 | ||
366 | # asm 1: pxor <xmm1=int6464#2,<xmm8=int6464#9 | ||
367 | # asm 2: pxor <xmm1=%xmm1,<xmm8=%xmm8 | ||
368 | pxor %xmm1,%xmm8 | ||
369 | |||
370 | # qhasm: xmm8 &= BS0 | ||
371 | # asm 1: pand BS0,<xmm8=int6464#9 | ||
372 | # asm 2: pand BS0,<xmm8=%xmm8 | ||
373 | pand BS0,%xmm8 | ||
374 | |||
375 | # qhasm: xmm1 ^= xmm8 | ||
376 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
377 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
378 | pxor %xmm8,%xmm1 | ||
379 | |||
380 | # qhasm: uint6464 xmm8 <<= 1 | ||
381 | # asm 1: psllq $1,<xmm8=int6464#9 | ||
382 | # asm 2: psllq $1,<xmm8=%xmm8 | ||
383 | psllq $1,%xmm8 | ||
384 | |||
385 | # qhasm: xmm0 ^= xmm8 | ||
386 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
387 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
388 | pxor %xmm8,%xmm0 | ||
389 | |||
390 | # qhasm: xmm8 = xmm5 | ||
391 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#9 | ||
392 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm8 | ||
393 | movdqa %xmm5,%xmm8 | ||
394 | |||
395 | # qhasm: uint6464 xmm8 >>= 2 | ||
396 | # asm 1: psrlq $2,<xmm8=int6464#9 | ||
397 | # asm 2: psrlq $2,<xmm8=%xmm8 | ||
398 | psrlq $2,%xmm8 | ||
399 | |||
400 | # qhasm: xmm8 ^= xmm7 | ||
401 | # asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9 | ||
402 | # asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8 | ||
403 | pxor %xmm7,%xmm8 | ||
404 | |||
405 | # qhasm: xmm8 &= BS1 | ||
406 | # asm 1: pand BS1,<xmm8=int6464#9 | ||
407 | # asm 2: pand BS1,<xmm8=%xmm8 | ||
408 | pand BS1,%xmm8 | ||
409 | |||
410 | # qhasm: xmm7 ^= xmm8 | ||
411 | # asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8 | ||
412 | # asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7 | ||
413 | pxor %xmm8,%xmm7 | ||
414 | |||
415 | # qhasm: uint6464 xmm8 <<= 2 | ||
416 | # asm 1: psllq $2,<xmm8=int6464#9 | ||
417 | # asm 2: psllq $2,<xmm8=%xmm8 | ||
418 | psllq $2,%xmm8 | ||
419 | |||
420 | # qhasm: xmm5 ^= xmm8 | ||
421 | # asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6 | ||
422 | # asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5 | ||
423 | pxor %xmm8,%xmm5 | ||
424 | |||
425 | # qhasm: xmm8 = xmm4 | ||
426 | # asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9 | ||
427 | # asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8 | ||
428 | movdqa %xmm4,%xmm8 | ||
429 | |||
430 | # qhasm: uint6464 xmm8 >>= 2 | ||
431 | # asm 1: psrlq $2,<xmm8=int6464#9 | ||
432 | # asm 2: psrlq $2,<xmm8=%xmm8 | ||
433 | psrlq $2,%xmm8 | ||
434 | |||
435 | # qhasm: xmm8 ^= xmm6 | ||
436 | # asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#9 | ||
437 | # asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm8 | ||
438 | pxor %xmm6,%xmm8 | ||
439 | |||
440 | # qhasm: xmm8 &= BS1 | ||
441 | # asm 1: pand BS1,<xmm8=int6464#9 | ||
442 | # asm 2: pand BS1,<xmm8=%xmm8 | ||
443 | pand BS1,%xmm8 | ||
444 | |||
445 | # qhasm: xmm6 ^= xmm8 | ||
446 | # asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7 | ||
447 | # asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6 | ||
448 | pxor %xmm8,%xmm6 | ||
449 | |||
450 | # qhasm: uint6464 xmm8 <<= 2 | ||
451 | # asm 1: psllq $2,<xmm8=int6464#9 | ||
452 | # asm 2: psllq $2,<xmm8=%xmm8 | ||
453 | psllq $2,%xmm8 | ||
454 | |||
455 | # qhasm: xmm4 ^= xmm8 | ||
456 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
457 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
458 | pxor %xmm8,%xmm4 | ||
459 | |||
460 | # qhasm: xmm8 = xmm1 | ||
461 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9 | ||
462 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8 | ||
463 | movdqa %xmm1,%xmm8 | ||
464 | |||
465 | # qhasm: uint6464 xmm8 >>= 2 | ||
466 | # asm 1: psrlq $2,<xmm8=int6464#9 | ||
467 | # asm 2: psrlq $2,<xmm8=%xmm8 | ||
468 | psrlq $2,%xmm8 | ||
469 | |||
470 | # qhasm: xmm8 ^= xmm3 | ||
471 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#9 | ||
472 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm8 | ||
473 | pxor %xmm3,%xmm8 | ||
474 | |||
475 | # qhasm: xmm8 &= BS1 | ||
476 | # asm 1: pand BS1,<xmm8=int6464#9 | ||
477 | # asm 2: pand BS1,<xmm8=%xmm8 | ||
478 | pand BS1,%xmm8 | ||
479 | |||
480 | # qhasm: xmm3 ^= xmm8 | ||
481 | # asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4 | ||
482 | # asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3 | ||
483 | pxor %xmm8,%xmm3 | ||
484 | |||
485 | # qhasm: uint6464 xmm8 <<= 2 | ||
486 | # asm 1: psllq $2,<xmm8=int6464#9 | ||
487 | # asm 2: psllq $2,<xmm8=%xmm8 | ||
488 | psllq $2,%xmm8 | ||
489 | |||
490 | # qhasm: xmm1 ^= xmm8 | ||
491 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
492 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
493 | pxor %xmm8,%xmm1 | ||
494 | |||
495 | # qhasm: xmm8 = xmm0 | ||
496 | # asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9 | ||
497 | # asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8 | ||
498 | movdqa %xmm0,%xmm8 | ||
499 | |||
500 | # qhasm: uint6464 xmm8 >>= 2 | ||
501 | # asm 1: psrlq $2,<xmm8=int6464#9 | ||
502 | # asm 2: psrlq $2,<xmm8=%xmm8 | ||
503 | psrlq $2,%xmm8 | ||
504 | |||
505 | # qhasm: xmm8 ^= xmm2 | ||
506 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#9 | ||
507 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm8 | ||
508 | pxor %xmm2,%xmm8 | ||
509 | |||
510 | # qhasm: xmm8 &= BS1 | ||
511 | # asm 1: pand BS1,<xmm8=int6464#9 | ||
512 | # asm 2: pand BS1,<xmm8=%xmm8 | ||
513 | pand BS1,%xmm8 | ||
514 | |||
515 | # qhasm: xmm2 ^= xmm8 | ||
516 | # asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3 | ||
517 | # asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2 | ||
518 | pxor %xmm8,%xmm2 | ||
519 | |||
520 | # qhasm: uint6464 xmm8 <<= 2 | ||
521 | # asm 1: psllq $2,<xmm8=int6464#9 | ||
522 | # asm 2: psllq $2,<xmm8=%xmm8 | ||
523 | psllq $2,%xmm8 | ||
524 | |||
525 | # qhasm: xmm0 ^= xmm8 | ||
526 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
527 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
528 | pxor %xmm8,%xmm0 | ||
529 | |||
530 | # qhasm: xmm8 = xmm3 | ||
531 | # asm 1: movdqa <xmm3=int6464#4,>xmm8=int6464#9 | ||
532 | # asm 2: movdqa <xmm3=%xmm3,>xmm8=%xmm8 | ||
533 | movdqa %xmm3,%xmm8 | ||
534 | |||
535 | # qhasm: uint6464 xmm8 >>= 4 | ||
536 | # asm 1: psrlq $4,<xmm8=int6464#9 | ||
537 | # asm 2: psrlq $4,<xmm8=%xmm8 | ||
538 | psrlq $4,%xmm8 | ||
539 | |||
540 | # qhasm: xmm8 ^= xmm7 | ||
541 | # asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9 | ||
542 | # asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8 | ||
543 | pxor %xmm7,%xmm8 | ||
544 | |||
545 | # qhasm: xmm8 &= BS2 | ||
546 | # asm 1: pand BS2,<xmm8=int6464#9 | ||
547 | # asm 2: pand BS2,<xmm8=%xmm8 | ||
548 | pand BS2,%xmm8 | ||
549 | |||
550 | # qhasm: xmm7 ^= xmm8 | ||
551 | # asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8 | ||
552 | # asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7 | ||
553 | pxor %xmm8,%xmm7 | ||
554 | |||
555 | # qhasm: uint6464 xmm8 <<= 4 | ||
556 | # asm 1: psllq $4,<xmm8=int6464#9 | ||
557 | # asm 2: psllq $4,<xmm8=%xmm8 | ||
558 | psllq $4,%xmm8 | ||
559 | |||
560 | # qhasm: xmm3 ^= xmm8 | ||
561 | # asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4 | ||
562 | # asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3 | ||
563 | pxor %xmm8,%xmm3 | ||
564 | |||
565 | # qhasm: xmm8 = xmm2 | ||
566 | # asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9 | ||
567 | # asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8 | ||
568 | movdqa %xmm2,%xmm8 | ||
569 | |||
570 | # qhasm: uint6464 xmm8 >>= 4 | ||
571 | # asm 1: psrlq $4,<xmm8=int6464#9 | ||
572 | # asm 2: psrlq $4,<xmm8=%xmm8 | ||
573 | psrlq $4,%xmm8 | ||
574 | |||
575 | # qhasm: xmm8 ^= xmm6 | ||
576 | # asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#9 | ||
577 | # asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm8 | ||
578 | pxor %xmm6,%xmm8 | ||
579 | |||
580 | # qhasm: xmm8 &= BS2 | ||
581 | # asm 1: pand BS2,<xmm8=int6464#9 | ||
582 | # asm 2: pand BS2,<xmm8=%xmm8 | ||
583 | pand BS2,%xmm8 | ||
584 | |||
585 | # qhasm: xmm6 ^= xmm8 | ||
586 | # asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7 | ||
587 | # asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6 | ||
588 | pxor %xmm8,%xmm6 | ||
589 | |||
590 | # qhasm: uint6464 xmm8 <<= 4 | ||
591 | # asm 1: psllq $4,<xmm8=int6464#9 | ||
592 | # asm 2: psllq $4,<xmm8=%xmm8 | ||
593 | psllq $4,%xmm8 | ||
594 | |||
595 | # qhasm: xmm2 ^= xmm8 | ||
596 | # asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3 | ||
597 | # asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2 | ||
598 | pxor %xmm8,%xmm2 | ||
599 | |||
600 | # qhasm: xmm8 = xmm1 | ||
601 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9 | ||
602 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8 | ||
603 | movdqa %xmm1,%xmm8 | ||
604 | |||
605 | # qhasm: uint6464 xmm8 >>= 4 | ||
606 | # asm 1: psrlq $4,<xmm8=int6464#9 | ||
607 | # asm 2: psrlq $4,<xmm8=%xmm8 | ||
608 | psrlq $4,%xmm8 | ||
609 | |||
610 | # qhasm: xmm8 ^= xmm5 | ||
611 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
612 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
613 | pxor %xmm5,%xmm8 | ||
614 | |||
615 | # qhasm: xmm8 &= BS2 | ||
616 | # asm 1: pand BS2,<xmm8=int6464#9 | ||
617 | # asm 2: pand BS2,<xmm8=%xmm8 | ||
618 | pand BS2,%xmm8 | ||
619 | |||
620 | # qhasm: xmm5 ^= xmm8 | ||
621 | # asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6 | ||
622 | # asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5 | ||
623 | pxor %xmm8,%xmm5 | ||
624 | |||
625 | # qhasm: uint6464 xmm8 <<= 4 | ||
626 | # asm 1: psllq $4,<xmm8=int6464#9 | ||
627 | # asm 2: psllq $4,<xmm8=%xmm8 | ||
628 | psllq $4,%xmm8 | ||
629 | |||
630 | # qhasm: xmm1 ^= xmm8 | ||
631 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
632 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
633 | pxor %xmm8,%xmm1 | ||
634 | |||
635 | # qhasm: xmm8 = xmm0 | ||
636 | # asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9 | ||
637 | # asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8 | ||
638 | movdqa %xmm0,%xmm8 | ||
639 | |||
640 | # qhasm: uint6464 xmm8 >>= 4 | ||
641 | # asm 1: psrlq $4,<xmm8=int6464#9 | ||
642 | # asm 2: psrlq $4,<xmm8=%xmm8 | ||
643 | psrlq $4,%xmm8 | ||
644 | |||
645 | # qhasm: xmm8 ^= xmm4 | ||
646 | # asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#9 | ||
647 | # asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm8 | ||
648 | pxor %xmm4,%xmm8 | ||
649 | |||
650 | # qhasm: xmm8 &= BS2 | ||
651 | # asm 1: pand BS2,<xmm8=int6464#9 | ||
652 | # asm 2: pand BS2,<xmm8=%xmm8 | ||
653 | pand BS2,%xmm8 | ||
654 | |||
655 | # qhasm: xmm4 ^= xmm8 | ||
656 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
657 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
658 | pxor %xmm8,%xmm4 | ||
659 | |||
660 | # qhasm: uint6464 xmm8 <<= 4 | ||
661 | # asm 1: psllq $4,<xmm8=int6464#9 | ||
662 | # asm 2: psllq $4,<xmm8=%xmm8 | ||
663 | psllq $4,%xmm8 | ||
664 | |||
665 | # qhasm: xmm0 ^= xmm8 | ||
666 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
667 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
668 | pxor %xmm8,%xmm0 | ||
669 | |||
670 | # qhasm: xmm0 ^= *(int128 *)(c + 0) | ||
671 | # asm 1: pxor 0(<c=int64#4),<xmm0=int6464#1 | ||
672 | # asm 2: pxor 0(<c=%rcx),<xmm0=%xmm0 | ||
673 | pxor 0(%rcx),%xmm0 | ||
674 | |||
675 | # qhasm: shuffle bytes of xmm0 by SR | ||
676 | # asm 1: pshufb SR,<xmm0=int6464#1 | ||
677 | # asm 2: pshufb SR,<xmm0=%xmm0 | ||
678 | pshufb SR,%xmm0 | ||
679 | |||
680 | # qhasm: xmm1 ^= *(int128 *)(c + 16) | ||
681 | # asm 1: pxor 16(<c=int64#4),<xmm1=int6464#2 | ||
682 | # asm 2: pxor 16(<c=%rcx),<xmm1=%xmm1 | ||
683 | pxor 16(%rcx),%xmm1 | ||
684 | |||
685 | # qhasm: shuffle bytes of xmm1 by SR | ||
686 | # asm 1: pshufb SR,<xmm1=int6464#2 | ||
687 | # asm 2: pshufb SR,<xmm1=%xmm1 | ||
688 | pshufb SR,%xmm1 | ||
689 | |||
690 | # qhasm: xmm2 ^= *(int128 *)(c + 32) | ||
691 | # asm 1: pxor 32(<c=int64#4),<xmm2=int6464#3 | ||
692 | # asm 2: pxor 32(<c=%rcx),<xmm2=%xmm2 | ||
693 | pxor 32(%rcx),%xmm2 | ||
694 | |||
695 | # qhasm: shuffle bytes of xmm2 by SR | ||
696 | # asm 1: pshufb SR,<xmm2=int6464#3 | ||
697 | # asm 2: pshufb SR,<xmm2=%xmm2 | ||
698 | pshufb SR,%xmm2 | ||
699 | |||
700 | # qhasm: xmm3 ^= *(int128 *)(c + 48) | ||
701 | # asm 1: pxor 48(<c=int64#4),<xmm3=int6464#4 | ||
702 | # asm 2: pxor 48(<c=%rcx),<xmm3=%xmm3 | ||
703 | pxor 48(%rcx),%xmm3 | ||
704 | |||
705 | # qhasm: shuffle bytes of xmm3 by SR | ||
706 | # asm 1: pshufb SR,<xmm3=int6464#4 | ||
707 | # asm 2: pshufb SR,<xmm3=%xmm3 | ||
708 | pshufb SR,%xmm3 | ||
709 | |||
710 | # qhasm: xmm4 ^= *(int128 *)(c + 64) | ||
711 | # asm 1: pxor 64(<c=int64#4),<xmm4=int6464#5 | ||
712 | # asm 2: pxor 64(<c=%rcx),<xmm4=%xmm4 | ||
713 | pxor 64(%rcx),%xmm4 | ||
714 | |||
715 | # qhasm: shuffle bytes of xmm4 by SR | ||
716 | # asm 1: pshufb SR,<xmm4=int6464#5 | ||
717 | # asm 2: pshufb SR,<xmm4=%xmm4 | ||
718 | pshufb SR,%xmm4 | ||
719 | |||
720 | # qhasm: xmm5 ^= *(int128 *)(c + 80) | ||
721 | # asm 1: pxor 80(<c=int64#4),<xmm5=int6464#6 | ||
722 | # asm 2: pxor 80(<c=%rcx),<xmm5=%xmm5 | ||
723 | pxor 80(%rcx),%xmm5 | ||
724 | |||
725 | # qhasm: shuffle bytes of xmm5 by SR | ||
726 | # asm 1: pshufb SR,<xmm5=int6464#6 | ||
727 | # asm 2: pshufb SR,<xmm5=%xmm5 | ||
728 | pshufb SR,%xmm5 | ||
729 | |||
730 | # qhasm: xmm6 ^= *(int128 *)(c + 96) | ||
731 | # asm 1: pxor 96(<c=int64#4),<xmm6=int6464#7 | ||
732 | # asm 2: pxor 96(<c=%rcx),<xmm6=%xmm6 | ||
733 | pxor 96(%rcx),%xmm6 | ||
734 | |||
735 | # qhasm: shuffle bytes of xmm6 by SR | ||
736 | # asm 1: pshufb SR,<xmm6=int6464#7 | ||
737 | # asm 2: pshufb SR,<xmm6=%xmm6 | ||
738 | pshufb SR,%xmm6 | ||
739 | |||
740 | # qhasm: xmm7 ^= *(int128 *)(c + 112) | ||
741 | # asm 1: pxor 112(<c=int64#4),<xmm7=int6464#8 | ||
742 | # asm 2: pxor 112(<c=%rcx),<xmm7=%xmm7 | ||
743 | pxor 112(%rcx),%xmm7 | ||
744 | |||
745 | # qhasm: shuffle bytes of xmm7 by SR | ||
746 | # asm 1: pshufb SR,<xmm7=int6464#8 | ||
747 | # asm 2: pshufb SR,<xmm7=%xmm7 | ||
748 | pshufb SR,%xmm7 | ||
749 | |||
750 | # qhasm: xmm5 ^= xmm6 | ||
751 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
752 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
753 | pxor %xmm6,%xmm5 | ||
754 | |||
755 | # qhasm: xmm2 ^= xmm1 | ||
756 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
757 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
758 | pxor %xmm1,%xmm2 | ||
759 | |||
760 | # qhasm: xmm5 ^= xmm0 | ||
761 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
762 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
763 | pxor %xmm0,%xmm5 | ||
764 | |||
765 | # qhasm: xmm6 ^= xmm2 | ||
766 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
767 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
768 | pxor %xmm2,%xmm6 | ||
769 | |||
770 | # qhasm: xmm3 ^= xmm0 | ||
771 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
772 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
773 | pxor %xmm0,%xmm3 | ||
774 | |||
775 | # qhasm: xmm6 ^= xmm3 | ||
776 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
777 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
778 | pxor %xmm3,%xmm6 | ||
779 | |||
780 | # qhasm: xmm3 ^= xmm7 | ||
781 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
782 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
783 | pxor %xmm7,%xmm3 | ||
784 | |||
785 | # qhasm: xmm3 ^= xmm4 | ||
786 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
787 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
788 | pxor %xmm4,%xmm3 | ||
789 | |||
790 | # qhasm: xmm7 ^= xmm5 | ||
791 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
792 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
793 | pxor %xmm5,%xmm7 | ||
794 | |||
795 | # qhasm: xmm3 ^= xmm1 | ||
796 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
797 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
798 | pxor %xmm1,%xmm3 | ||
799 | |||
800 | # qhasm: xmm4 ^= xmm5 | ||
801 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
802 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
803 | pxor %xmm5,%xmm4 | ||
804 | |||
805 | # qhasm: xmm2 ^= xmm7 | ||
806 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
807 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
808 | pxor %xmm7,%xmm2 | ||
809 | |||
810 | # qhasm: xmm1 ^= xmm5 | ||
811 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
812 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
813 | pxor %xmm5,%xmm1 | ||
814 | |||
815 | # qhasm: xmm11 = xmm7 | ||
816 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
817 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
818 | movdqa %xmm7,%xmm8 | ||
819 | |||
820 | # qhasm: xmm10 = xmm1 | ||
821 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
822 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
823 | movdqa %xmm1,%xmm9 | ||
824 | |||
825 | # qhasm: xmm9 = xmm5 | ||
826 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
827 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
828 | movdqa %xmm5,%xmm10 | ||
829 | |||
830 | # qhasm: xmm13 = xmm2 | ||
831 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
832 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
833 | movdqa %xmm2,%xmm11 | ||
834 | |||
835 | # qhasm: xmm12 = xmm6 | ||
836 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
837 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
838 | movdqa %xmm6,%xmm12 | ||
839 | |||
840 | # qhasm: xmm11 ^= xmm4 | ||
841 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
842 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
843 | pxor %xmm4,%xmm8 | ||
844 | |||
845 | # qhasm: xmm10 ^= xmm2 | ||
846 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
847 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
848 | pxor %xmm2,%xmm9 | ||
849 | |||
850 | # qhasm: xmm9 ^= xmm3 | ||
851 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
852 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
853 | pxor %xmm3,%xmm10 | ||
854 | |||
855 | # qhasm: xmm13 ^= xmm4 | ||
856 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
857 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
858 | pxor %xmm4,%xmm11 | ||
859 | |||
860 | # qhasm: xmm12 ^= xmm0 | ||
861 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
862 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
863 | pxor %xmm0,%xmm12 | ||
864 | |||
865 | # qhasm: xmm14 = xmm11 | ||
866 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
867 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
868 | movdqa %xmm8,%xmm13 | ||
869 | |||
870 | # qhasm: xmm8 = xmm10 | ||
871 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
872 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
873 | movdqa %xmm9,%xmm14 | ||
874 | |||
875 | # qhasm: xmm15 = xmm11 | ||
876 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
877 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
878 | movdqa %xmm8,%xmm15 | ||
879 | |||
880 | # qhasm: xmm10 |= xmm9 | ||
881 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
882 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
883 | por %xmm10,%xmm9 | ||
884 | |||
885 | # qhasm: xmm11 |= xmm12 | ||
886 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
887 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
888 | por %xmm12,%xmm8 | ||
889 | |||
890 | # qhasm: xmm15 ^= xmm8 | ||
891 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
892 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
893 | pxor %xmm14,%xmm15 | ||
894 | |||
895 | # qhasm: xmm14 &= xmm12 | ||
896 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
897 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
898 | pand %xmm12,%xmm13 | ||
899 | |||
900 | # qhasm: xmm8 &= xmm9 | ||
901 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
902 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
903 | pand %xmm10,%xmm14 | ||
904 | |||
905 | # qhasm: xmm12 ^= xmm9 | ||
906 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
907 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
908 | pxor %xmm10,%xmm12 | ||
909 | |||
910 | # qhasm: xmm15 &= xmm12 | ||
911 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
912 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
913 | pand %xmm12,%xmm15 | ||
914 | |||
915 | # qhasm: xmm12 = xmm3 | ||
916 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
917 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
918 | movdqa %xmm3,%xmm10 | ||
919 | |||
920 | # qhasm: xmm12 ^= xmm0 | ||
921 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
922 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
923 | pxor %xmm0,%xmm10 | ||
924 | |||
925 | # qhasm: xmm13 &= xmm12 | ||
926 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
927 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
928 | pand %xmm10,%xmm11 | ||
929 | |||
930 | # qhasm: xmm11 ^= xmm13 | ||
931 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
932 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
933 | pxor %xmm11,%xmm8 | ||
934 | |||
935 | # qhasm: xmm10 ^= xmm13 | ||
936 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
937 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
938 | pxor %xmm11,%xmm9 | ||
939 | |||
940 | # qhasm: xmm13 = xmm7 | ||
941 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
942 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
943 | movdqa %xmm7,%xmm10 | ||
944 | |||
945 | # qhasm: xmm13 ^= xmm1 | ||
946 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
947 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
948 | pxor %xmm1,%xmm10 | ||
949 | |||
950 | # qhasm: xmm12 = xmm5 | ||
951 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
952 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
953 | movdqa %xmm5,%xmm11 | ||
954 | |||
955 | # qhasm: xmm9 = xmm13 | ||
956 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
957 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
958 | movdqa %xmm10,%xmm12 | ||
959 | |||
960 | # qhasm: xmm12 ^= xmm6 | ||
961 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
962 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
963 | pxor %xmm6,%xmm11 | ||
964 | |||
965 | # qhasm: xmm9 |= xmm12 | ||
966 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
967 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
968 | por %xmm11,%xmm12 | ||
969 | |||
970 | # qhasm: xmm13 &= xmm12 | ||
971 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
972 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
973 | pand %xmm11,%xmm10 | ||
974 | |||
975 | # qhasm: xmm8 ^= xmm13 | ||
976 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
977 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
978 | pxor %xmm10,%xmm14 | ||
979 | |||
980 | # qhasm: xmm11 ^= xmm15 | ||
981 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
982 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
983 | pxor %xmm15,%xmm8 | ||
984 | |||
985 | # qhasm: xmm10 ^= xmm14 | ||
986 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
987 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
988 | pxor %xmm13,%xmm9 | ||
989 | |||
990 | # qhasm: xmm9 ^= xmm15 | ||
991 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
992 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
993 | pxor %xmm15,%xmm12 | ||
994 | |||
995 | # qhasm: xmm8 ^= xmm14 | ||
996 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
997 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
998 | pxor %xmm13,%xmm14 | ||
999 | |||
1000 | # qhasm: xmm9 ^= xmm14 | ||
1001 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
1002 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
1003 | pxor %xmm13,%xmm12 | ||
1004 | |||
1005 | # qhasm: xmm12 = xmm2 | ||
1006 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
1007 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
1008 | movdqa %xmm2,%xmm10 | ||
1009 | |||
1010 | # qhasm: xmm13 = xmm4 | ||
1011 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
1012 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
1013 | movdqa %xmm4,%xmm11 | ||
1014 | |||
1015 | # qhasm: xmm14 = xmm1 | ||
1016 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
1017 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
1018 | movdqa %xmm1,%xmm13 | ||
1019 | |||
1020 | # qhasm: xmm15 = xmm7 | ||
1021 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
1022 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
1023 | movdqa %xmm7,%xmm15 | ||
1024 | |||
1025 | # qhasm: xmm12 &= xmm3 | ||
1026 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
1027 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
1028 | pand %xmm3,%xmm10 | ||
1029 | |||
1030 | # qhasm: xmm13 &= xmm0 | ||
1031 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
1032 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
1033 | pand %xmm0,%xmm11 | ||
1034 | |||
1035 | # qhasm: xmm14 &= xmm5 | ||
1036 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
1037 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
1038 | pand %xmm5,%xmm13 | ||
1039 | |||
1040 | # qhasm: xmm15 |= xmm6 | ||
1041 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
1042 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
1043 | por %xmm6,%xmm15 | ||
1044 | |||
1045 | # qhasm: xmm11 ^= xmm12 | ||
1046 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
1047 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
1048 | pxor %xmm10,%xmm8 | ||
1049 | |||
1050 | # qhasm: xmm10 ^= xmm13 | ||
1051 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
1052 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
1053 | pxor %xmm11,%xmm9 | ||
1054 | |||
1055 | # qhasm: xmm9 ^= xmm14 | ||
1056 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
1057 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
1058 | pxor %xmm13,%xmm12 | ||
1059 | |||
1060 | # qhasm: xmm8 ^= xmm15 | ||
1061 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
1062 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
1063 | pxor %xmm15,%xmm14 | ||
1064 | |||
1065 | # qhasm: xmm12 = xmm11 | ||
1066 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
1067 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
1068 | movdqa %xmm8,%xmm10 | ||
1069 | |||
1070 | # qhasm: xmm12 ^= xmm10 | ||
1071 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
1072 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
1073 | pxor %xmm9,%xmm10 | ||
1074 | |||
1075 | # qhasm: xmm11 &= xmm9 | ||
1076 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
1077 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
1078 | pand %xmm12,%xmm8 | ||
1079 | |||
1080 | # qhasm: xmm14 = xmm8 | ||
1081 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
1082 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
1083 | movdqa %xmm14,%xmm11 | ||
1084 | |||
1085 | # qhasm: xmm14 ^= xmm11 | ||
1086 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
1087 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
1088 | pxor %xmm8,%xmm11 | ||
1089 | |||
1090 | # qhasm: xmm15 = xmm12 | ||
1091 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
1092 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
1093 | movdqa %xmm10,%xmm13 | ||
1094 | |||
1095 | # qhasm: xmm15 &= xmm14 | ||
1096 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
1097 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
1098 | pand %xmm11,%xmm13 | ||
1099 | |||
1100 | # qhasm: xmm15 ^= xmm10 | ||
1101 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
1102 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
1103 | pxor %xmm9,%xmm13 | ||
1104 | |||
1105 | # qhasm: xmm13 = xmm9 | ||
1106 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
1107 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
1108 | movdqa %xmm12,%xmm15 | ||
1109 | |||
1110 | # qhasm: xmm13 ^= xmm8 | ||
1111 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
1112 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
1113 | pxor %xmm14,%xmm15 | ||
1114 | |||
1115 | # qhasm: xmm11 ^= xmm10 | ||
1116 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
1117 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
1118 | pxor %xmm9,%xmm8 | ||
1119 | |||
1120 | # qhasm: xmm13 &= xmm11 | ||
1121 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
1122 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
1123 | pand %xmm8,%xmm15 | ||
1124 | |||
1125 | # qhasm: xmm13 ^= xmm8 | ||
1126 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
1127 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
1128 | pxor %xmm14,%xmm15 | ||
1129 | |||
1130 | # qhasm: xmm9 ^= xmm13 | ||
1131 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
1132 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
1133 | pxor %xmm15,%xmm12 | ||
1134 | |||
1135 | # qhasm: xmm10 = xmm14 | ||
1136 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
1137 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
1138 | movdqa %xmm11,%xmm8 | ||
1139 | |||
1140 | # qhasm: xmm10 ^= xmm13 | ||
1141 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
1142 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
1143 | pxor %xmm15,%xmm8 | ||
1144 | |||
1145 | # qhasm: xmm10 &= xmm8 | ||
1146 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
1147 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
1148 | pand %xmm14,%xmm8 | ||
1149 | |||
1150 | # qhasm: xmm9 ^= xmm10 | ||
1151 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
1152 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
1153 | pxor %xmm8,%xmm12 | ||
1154 | |||
1155 | # qhasm: xmm14 ^= xmm10 | ||
1156 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
1157 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
1158 | pxor %xmm8,%xmm11 | ||
1159 | |||
1160 | # qhasm: xmm14 &= xmm15 | ||
1161 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
1162 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
1163 | pand %xmm13,%xmm11 | ||
1164 | |||
1165 | # qhasm: xmm14 ^= xmm12 | ||
1166 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
1167 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
1168 | pxor %xmm10,%xmm11 | ||
1169 | |||
1170 | # qhasm: xmm12 = xmm6 | ||
1171 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
1172 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
1173 | movdqa %xmm6,%xmm8 | ||
1174 | |||
1175 | # qhasm: xmm8 = xmm5 | ||
1176 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
1177 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
1178 | movdqa %xmm5,%xmm9 | ||
1179 | |||
1180 | # qhasm: xmm10 = xmm15 | ||
1181 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
1182 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
1183 | movdqa %xmm13,%xmm10 | ||
1184 | |||
1185 | # qhasm: xmm10 ^= xmm14 | ||
1186 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
1187 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
1188 | pxor %xmm11,%xmm10 | ||
1189 | |||
1190 | # qhasm: xmm10 &= xmm6 | ||
1191 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
1192 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
1193 | pand %xmm6,%xmm10 | ||
1194 | |||
1195 | # qhasm: xmm6 ^= xmm5 | ||
1196 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
1197 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
1198 | pxor %xmm5,%xmm6 | ||
1199 | |||
1200 | # qhasm: xmm6 &= xmm14 | ||
1201 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
1202 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
1203 | pand %xmm11,%xmm6 | ||
1204 | |||
1205 | # qhasm: xmm5 &= xmm15 | ||
1206 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
1207 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
1208 | pand %xmm13,%xmm5 | ||
1209 | |||
1210 | # qhasm: xmm6 ^= xmm5 | ||
1211 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
1212 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
1213 | pxor %xmm5,%xmm6 | ||
1214 | |||
1215 | # qhasm: xmm5 ^= xmm10 | ||
1216 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
1217 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
1218 | pxor %xmm10,%xmm5 | ||
1219 | |||
1220 | # qhasm: xmm12 ^= xmm0 | ||
1221 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
1222 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
1223 | pxor %xmm0,%xmm8 | ||
1224 | |||
1225 | # qhasm: xmm8 ^= xmm3 | ||
1226 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
1227 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
1228 | pxor %xmm3,%xmm9 | ||
1229 | |||
1230 | # qhasm: xmm15 ^= xmm13 | ||
1231 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
1232 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
1233 | pxor %xmm15,%xmm13 | ||
1234 | |||
1235 | # qhasm: xmm14 ^= xmm9 | ||
1236 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
1237 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
1238 | pxor %xmm12,%xmm11 | ||
1239 | |||
1240 | # qhasm: xmm11 = xmm15 | ||
1241 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
1242 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
1243 | movdqa %xmm13,%xmm10 | ||
1244 | |||
1245 | # qhasm: xmm11 ^= xmm14 | ||
1246 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
1247 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
1248 | pxor %xmm11,%xmm10 | ||
1249 | |||
1250 | # qhasm: xmm11 &= xmm12 | ||
1251 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
1252 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
1253 | pand %xmm8,%xmm10 | ||
1254 | |||
1255 | # qhasm: xmm12 ^= xmm8 | ||
1256 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
1257 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
1258 | pxor %xmm9,%xmm8 | ||
1259 | |||
1260 | # qhasm: xmm12 &= xmm14 | ||
1261 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
1262 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
1263 | pand %xmm11,%xmm8 | ||
1264 | |||
1265 | # qhasm: xmm8 &= xmm15 | ||
1266 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
1267 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
1268 | pand %xmm13,%xmm9 | ||
1269 | |||
1270 | # qhasm: xmm8 ^= xmm12 | ||
1271 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
1272 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
1273 | pxor %xmm8,%xmm9 | ||
1274 | |||
1275 | # qhasm: xmm12 ^= xmm11 | ||
1276 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
1277 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
1278 | pxor %xmm10,%xmm8 | ||
1279 | |||
1280 | # qhasm: xmm10 = xmm13 | ||
1281 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
1282 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
1283 | movdqa %xmm15,%xmm10 | ||
1284 | |||
1285 | # qhasm: xmm10 ^= xmm9 | ||
1286 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
1287 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
1288 | pxor %xmm12,%xmm10 | ||
1289 | |||
1290 | # qhasm: xmm10 &= xmm0 | ||
1291 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
1292 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
1293 | pand %xmm0,%xmm10 | ||
1294 | |||
1295 | # qhasm: xmm0 ^= xmm3 | ||
1296 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
1297 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
1298 | pxor %xmm3,%xmm0 | ||
1299 | |||
1300 | # qhasm: xmm0 &= xmm9 | ||
1301 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
1302 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
1303 | pand %xmm12,%xmm0 | ||
1304 | |||
1305 | # qhasm: xmm3 &= xmm13 | ||
1306 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
1307 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
1308 | pand %xmm15,%xmm3 | ||
1309 | |||
1310 | # qhasm: xmm0 ^= xmm3 | ||
1311 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
1312 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
1313 | pxor %xmm3,%xmm0 | ||
1314 | |||
1315 | # qhasm: xmm3 ^= xmm10 | ||
1316 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
1317 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
1318 | pxor %xmm10,%xmm3 | ||
1319 | |||
1320 | # qhasm: xmm6 ^= xmm12 | ||
1321 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
1322 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
1323 | pxor %xmm8,%xmm6 | ||
1324 | |||
1325 | # qhasm: xmm0 ^= xmm12 | ||
1326 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
1327 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
1328 | pxor %xmm8,%xmm0 | ||
1329 | |||
1330 | # qhasm: xmm5 ^= xmm8 | ||
1331 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
1332 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
1333 | pxor %xmm9,%xmm5 | ||
1334 | |||
1335 | # qhasm: xmm3 ^= xmm8 | ||
1336 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
1337 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
1338 | pxor %xmm9,%xmm3 | ||
1339 | |||
1340 | # qhasm: xmm12 = xmm7 | ||
1341 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
1342 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
1343 | movdqa %xmm7,%xmm8 | ||
1344 | |||
1345 | # qhasm: xmm8 = xmm1 | ||
1346 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
1347 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
1348 | movdqa %xmm1,%xmm9 | ||
1349 | |||
1350 | # qhasm: xmm12 ^= xmm4 | ||
1351 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
1352 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
1353 | pxor %xmm4,%xmm8 | ||
1354 | |||
1355 | # qhasm: xmm8 ^= xmm2 | ||
1356 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
1357 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
1358 | pxor %xmm2,%xmm9 | ||
1359 | |||
1360 | # qhasm: xmm11 = xmm15 | ||
1361 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
1362 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
1363 | movdqa %xmm13,%xmm10 | ||
1364 | |||
1365 | # qhasm: xmm11 ^= xmm14 | ||
1366 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
1367 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
1368 | pxor %xmm11,%xmm10 | ||
1369 | |||
1370 | # qhasm: xmm11 &= xmm12 | ||
1371 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
1372 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
1373 | pand %xmm8,%xmm10 | ||
1374 | |||
1375 | # qhasm: xmm12 ^= xmm8 | ||
1376 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
1377 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
1378 | pxor %xmm9,%xmm8 | ||
1379 | |||
1380 | # qhasm: xmm12 &= xmm14 | ||
1381 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
1382 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
1383 | pand %xmm11,%xmm8 | ||
1384 | |||
1385 | # qhasm: xmm8 &= xmm15 | ||
1386 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
1387 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
1388 | pand %xmm13,%xmm9 | ||
1389 | |||
1390 | # qhasm: xmm8 ^= xmm12 | ||
1391 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
1392 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
1393 | pxor %xmm8,%xmm9 | ||
1394 | |||
1395 | # qhasm: xmm12 ^= xmm11 | ||
1396 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
1397 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
1398 | pxor %xmm10,%xmm8 | ||
1399 | |||
1400 | # qhasm: xmm10 = xmm13 | ||
1401 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
1402 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
1403 | movdqa %xmm15,%xmm10 | ||
1404 | |||
1405 | # qhasm: xmm10 ^= xmm9 | ||
1406 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
1407 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
1408 | pxor %xmm12,%xmm10 | ||
1409 | |||
1410 | # qhasm: xmm10 &= xmm4 | ||
1411 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
1412 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
1413 | pand %xmm4,%xmm10 | ||
1414 | |||
1415 | # qhasm: xmm4 ^= xmm2 | ||
1416 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
1417 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
1418 | pxor %xmm2,%xmm4 | ||
1419 | |||
1420 | # qhasm: xmm4 &= xmm9 | ||
1421 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
1422 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
1423 | pand %xmm12,%xmm4 | ||
1424 | |||
1425 | # qhasm: xmm2 &= xmm13 | ||
1426 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
1427 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
1428 | pand %xmm15,%xmm2 | ||
1429 | |||
1430 | # qhasm: xmm4 ^= xmm2 | ||
1431 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
1432 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
1433 | pxor %xmm2,%xmm4 | ||
1434 | |||
1435 | # qhasm: xmm2 ^= xmm10 | ||
1436 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
1437 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
1438 | pxor %xmm10,%xmm2 | ||
1439 | |||
1440 | # qhasm: xmm15 ^= xmm13 | ||
1441 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
1442 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
1443 | pxor %xmm15,%xmm13 | ||
1444 | |||
1445 | # qhasm: xmm14 ^= xmm9 | ||
1446 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
1447 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
1448 | pxor %xmm12,%xmm11 | ||
1449 | |||
1450 | # qhasm: xmm11 = xmm15 | ||
1451 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
1452 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
1453 | movdqa %xmm13,%xmm10 | ||
1454 | |||
1455 | # qhasm: xmm11 ^= xmm14 | ||
1456 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
1457 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
1458 | pxor %xmm11,%xmm10 | ||
1459 | |||
1460 | # qhasm: xmm11 &= xmm7 | ||
1461 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
1462 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
1463 | pand %xmm7,%xmm10 | ||
1464 | |||
1465 | # qhasm: xmm7 ^= xmm1 | ||
1466 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
1467 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
1468 | pxor %xmm1,%xmm7 | ||
1469 | |||
1470 | # qhasm: xmm7 &= xmm14 | ||
1471 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
1472 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
1473 | pand %xmm11,%xmm7 | ||
1474 | |||
1475 | # qhasm: xmm1 &= xmm15 | ||
1476 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
1477 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
1478 | pand %xmm13,%xmm1 | ||
1479 | |||
1480 | # qhasm: xmm7 ^= xmm1 | ||
1481 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
1482 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
1483 | pxor %xmm1,%xmm7 | ||
1484 | |||
1485 | # qhasm: xmm1 ^= xmm11 | ||
1486 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
1487 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
1488 | pxor %xmm10,%xmm1 | ||
1489 | |||
1490 | # qhasm: xmm7 ^= xmm12 | ||
1491 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
1492 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
1493 | pxor %xmm8,%xmm7 | ||
1494 | |||
1495 | # qhasm: xmm4 ^= xmm12 | ||
1496 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
1497 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
1498 | pxor %xmm8,%xmm4 | ||
1499 | |||
1500 | # qhasm: xmm1 ^= xmm8 | ||
1501 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
1502 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
1503 | pxor %xmm9,%xmm1 | ||
1504 | |||
1505 | # qhasm: xmm2 ^= xmm8 | ||
1506 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
1507 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
1508 | pxor %xmm9,%xmm2 | ||
1509 | |||
1510 | # qhasm: xmm7 ^= xmm0 | ||
1511 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
1512 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
1513 | pxor %xmm0,%xmm7 | ||
1514 | |||
1515 | # qhasm: xmm1 ^= xmm6 | ||
1516 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
1517 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
1518 | pxor %xmm6,%xmm1 | ||
1519 | |||
1520 | # qhasm: xmm4 ^= xmm7 | ||
1521 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
1522 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
1523 | pxor %xmm7,%xmm4 | ||
1524 | |||
1525 | # qhasm: xmm6 ^= xmm0 | ||
1526 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
1527 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
1528 | pxor %xmm0,%xmm6 | ||
1529 | |||
1530 | # qhasm: xmm0 ^= xmm1 | ||
1531 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
1532 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
1533 | pxor %xmm1,%xmm0 | ||
1534 | |||
1535 | # qhasm: xmm1 ^= xmm5 | ||
1536 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
1537 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
1538 | pxor %xmm5,%xmm1 | ||
1539 | |||
1540 | # qhasm: xmm5 ^= xmm2 | ||
1541 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
1542 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
1543 | pxor %xmm2,%xmm5 | ||
1544 | |||
1545 | # qhasm: xmm4 ^= xmm5 | ||
1546 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
1547 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
1548 | pxor %xmm5,%xmm4 | ||
1549 | |||
1550 | # qhasm: xmm2 ^= xmm3 | ||
1551 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
1552 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
1553 | pxor %xmm3,%xmm2 | ||
1554 | |||
1555 | # qhasm: xmm3 ^= xmm5 | ||
1556 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
1557 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
1558 | pxor %xmm5,%xmm3 | ||
1559 | |||
1560 | # qhasm: xmm6 ^= xmm3 | ||
1561 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
1562 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
1563 | pxor %xmm3,%xmm6 | ||
1564 | |||
1565 | # qhasm: xmm8 = shuffle dwords of xmm0 by 0x93 | ||
1566 | # asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9 | ||
1567 | # asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8 | ||
1568 | pshufd $0x93,%xmm0,%xmm8 | ||
1569 | |||
1570 | # qhasm: xmm9 = shuffle dwords of xmm1 by 0x93 | ||
1571 | # asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10 | ||
1572 | # asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9 | ||
1573 | pshufd $0x93,%xmm1,%xmm9 | ||
1574 | |||
1575 | # qhasm: xmm10 = shuffle dwords of xmm4 by 0x93 | ||
1576 | # asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11 | ||
1577 | # asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10 | ||
1578 | pshufd $0x93,%xmm4,%xmm10 | ||
1579 | |||
1580 | # qhasm: xmm11 = shuffle dwords of xmm6 by 0x93 | ||
1581 | # asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12 | ||
1582 | # asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11 | ||
1583 | pshufd $0x93,%xmm6,%xmm11 | ||
1584 | |||
1585 | # qhasm: xmm12 = shuffle dwords of xmm3 by 0x93 | ||
1586 | # asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13 | ||
1587 | # asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12 | ||
1588 | pshufd $0x93,%xmm3,%xmm12 | ||
1589 | |||
1590 | # qhasm: xmm13 = shuffle dwords of xmm7 by 0x93 | ||
1591 | # asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14 | ||
1592 | # asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13 | ||
1593 | pshufd $0x93,%xmm7,%xmm13 | ||
1594 | |||
1595 | # qhasm: xmm14 = shuffle dwords of xmm2 by 0x93 | ||
1596 | # asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15 | ||
1597 | # asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14 | ||
1598 | pshufd $0x93,%xmm2,%xmm14 | ||
1599 | |||
1600 | # qhasm: xmm15 = shuffle dwords of xmm5 by 0x93 | ||
1601 | # asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16 | ||
1602 | # asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15 | ||
1603 | pshufd $0x93,%xmm5,%xmm15 | ||
1604 | |||
1605 | # qhasm: xmm0 ^= xmm8 | ||
1606 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
1607 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
1608 | pxor %xmm8,%xmm0 | ||
1609 | |||
1610 | # qhasm: xmm1 ^= xmm9 | ||
1611 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
1612 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
1613 | pxor %xmm9,%xmm1 | ||
1614 | |||
1615 | # qhasm: xmm4 ^= xmm10 | ||
1616 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
1617 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
1618 | pxor %xmm10,%xmm4 | ||
1619 | |||
1620 | # qhasm: xmm6 ^= xmm11 | ||
1621 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
1622 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
1623 | pxor %xmm11,%xmm6 | ||
1624 | |||
1625 | # qhasm: xmm3 ^= xmm12 | ||
1626 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
1627 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
1628 | pxor %xmm12,%xmm3 | ||
1629 | |||
1630 | # qhasm: xmm7 ^= xmm13 | ||
1631 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
1632 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
1633 | pxor %xmm13,%xmm7 | ||
1634 | |||
1635 | # qhasm: xmm2 ^= xmm14 | ||
1636 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
1637 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
1638 | pxor %xmm14,%xmm2 | ||
1639 | |||
1640 | # qhasm: xmm5 ^= xmm15 | ||
1641 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
1642 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
1643 | pxor %xmm15,%xmm5 | ||
1644 | |||
1645 | # qhasm: xmm8 ^= xmm5 | ||
1646 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
1647 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
1648 | pxor %xmm5,%xmm8 | ||
1649 | |||
1650 | # qhasm: xmm9 ^= xmm0 | ||
1651 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
1652 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
1653 | pxor %xmm0,%xmm9 | ||
1654 | |||
1655 | # qhasm: xmm10 ^= xmm1 | ||
1656 | # asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11 | ||
1657 | # asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10 | ||
1658 | pxor %xmm1,%xmm10 | ||
1659 | |||
1660 | # qhasm: xmm9 ^= xmm5 | ||
1661 | # asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10 | ||
1662 | # asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9 | ||
1663 | pxor %xmm5,%xmm9 | ||
1664 | |||
1665 | # qhasm: xmm11 ^= xmm4 | ||
1666 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
1667 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
1668 | pxor %xmm4,%xmm11 | ||
1669 | |||
1670 | # qhasm: xmm12 ^= xmm6 | ||
1671 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13 | ||
1672 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12 | ||
1673 | pxor %xmm6,%xmm12 | ||
1674 | |||
1675 | # qhasm: xmm13 ^= xmm3 | ||
1676 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14 | ||
1677 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13 | ||
1678 | pxor %xmm3,%xmm13 | ||
1679 | |||
1680 | # qhasm: xmm11 ^= xmm5 | ||
1681 | # asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12 | ||
1682 | # asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11 | ||
1683 | pxor %xmm5,%xmm11 | ||
1684 | |||
1685 | # qhasm: xmm14 ^= xmm7 | ||
1686 | # asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15 | ||
1687 | # asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14 | ||
1688 | pxor %xmm7,%xmm14 | ||
1689 | |||
1690 | # qhasm: xmm15 ^= xmm2 | ||
1691 | # asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16 | ||
1692 | # asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15 | ||
1693 | pxor %xmm2,%xmm15 | ||
1694 | |||
1695 | # qhasm: xmm12 ^= xmm5 | ||
1696 | # asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13 | ||
1697 | # asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12 | ||
1698 | pxor %xmm5,%xmm12 | ||
1699 | |||
1700 | # qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E | ||
1701 | # asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1 | ||
1702 | # asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0 | ||
1703 | pshufd $0x4E,%xmm0,%xmm0 | ||
1704 | |||
1705 | # qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E | ||
1706 | # asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2 | ||
1707 | # asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1 | ||
1708 | pshufd $0x4E,%xmm1,%xmm1 | ||
1709 | |||
1710 | # qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E | ||
1711 | # asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5 | ||
1712 | # asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4 | ||
1713 | pshufd $0x4E,%xmm4,%xmm4 | ||
1714 | |||
1715 | # qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E | ||
1716 | # asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7 | ||
1717 | # asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6 | ||
1718 | pshufd $0x4E,%xmm6,%xmm6 | ||
1719 | |||
1720 | # qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E | ||
1721 | # asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4 | ||
1722 | # asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3 | ||
1723 | pshufd $0x4E,%xmm3,%xmm3 | ||
1724 | |||
1725 | # qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E | ||
1726 | # asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8 | ||
1727 | # asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7 | ||
1728 | pshufd $0x4E,%xmm7,%xmm7 | ||
1729 | |||
1730 | # qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E | ||
1731 | # asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3 | ||
1732 | # asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2 | ||
1733 | pshufd $0x4E,%xmm2,%xmm2 | ||
1734 | |||
1735 | # qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E | ||
1736 | # asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6 | ||
1737 | # asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5 | ||
1738 | pshufd $0x4E,%xmm5,%xmm5 | ||
1739 | |||
1740 | # qhasm: xmm8 ^= xmm0 | ||
1741 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
1742 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
1743 | pxor %xmm0,%xmm8 | ||
1744 | |||
1745 | # qhasm: xmm9 ^= xmm1 | ||
1746 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
1747 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
1748 | pxor %xmm1,%xmm9 | ||
1749 | |||
1750 | # qhasm: xmm10 ^= xmm4 | ||
1751 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11 | ||
1752 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10 | ||
1753 | pxor %xmm4,%xmm10 | ||
1754 | |||
1755 | # qhasm: xmm11 ^= xmm6 | ||
1756 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12 | ||
1757 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11 | ||
1758 | pxor %xmm6,%xmm11 | ||
1759 | |||
1760 | # qhasm: xmm12 ^= xmm3 | ||
1761 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13 | ||
1762 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12 | ||
1763 | pxor %xmm3,%xmm12 | ||
1764 | |||
1765 | # qhasm: xmm13 ^= xmm7 | ||
1766 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
1767 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
1768 | pxor %xmm7,%xmm13 | ||
1769 | |||
1770 | # qhasm: xmm14 ^= xmm2 | ||
1771 | # asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15 | ||
1772 | # asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14 | ||
1773 | pxor %xmm2,%xmm14 | ||
1774 | |||
1775 | # qhasm: xmm15 ^= xmm5 | ||
1776 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
1777 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
1778 | pxor %xmm5,%xmm15 | ||
1779 | |||
1780 | # qhasm: xmm8 ^= *(int128 *)(c + 128) | ||
1781 | # asm 1: pxor 128(<c=int64#4),<xmm8=int6464#9 | ||
1782 | # asm 2: pxor 128(<c=%rcx),<xmm8=%xmm8 | ||
1783 | pxor 128(%rcx),%xmm8 | ||
1784 | |||
1785 | # qhasm: shuffle bytes of xmm8 by SR | ||
1786 | # asm 1: pshufb SR,<xmm8=int6464#9 | ||
1787 | # asm 2: pshufb SR,<xmm8=%xmm8 | ||
1788 | pshufb SR,%xmm8 | ||
1789 | |||
1790 | # qhasm: xmm9 ^= *(int128 *)(c + 144) | ||
1791 | # asm 1: pxor 144(<c=int64#4),<xmm9=int6464#10 | ||
1792 | # asm 2: pxor 144(<c=%rcx),<xmm9=%xmm9 | ||
1793 | pxor 144(%rcx),%xmm9 | ||
1794 | |||
1795 | # qhasm: shuffle bytes of xmm9 by SR | ||
1796 | # asm 1: pshufb SR,<xmm9=int6464#10 | ||
1797 | # asm 2: pshufb SR,<xmm9=%xmm9 | ||
1798 | pshufb SR,%xmm9 | ||
1799 | |||
1800 | # qhasm: xmm10 ^= *(int128 *)(c + 160) | ||
1801 | # asm 1: pxor 160(<c=int64#4),<xmm10=int6464#11 | ||
1802 | # asm 2: pxor 160(<c=%rcx),<xmm10=%xmm10 | ||
1803 | pxor 160(%rcx),%xmm10 | ||
1804 | |||
1805 | # qhasm: shuffle bytes of xmm10 by SR | ||
1806 | # asm 1: pshufb SR,<xmm10=int6464#11 | ||
1807 | # asm 2: pshufb SR,<xmm10=%xmm10 | ||
1808 | pshufb SR,%xmm10 | ||
1809 | |||
1810 | # qhasm: xmm11 ^= *(int128 *)(c + 176) | ||
1811 | # asm 1: pxor 176(<c=int64#4),<xmm11=int6464#12 | ||
1812 | # asm 2: pxor 176(<c=%rcx),<xmm11=%xmm11 | ||
1813 | pxor 176(%rcx),%xmm11 | ||
1814 | |||
1815 | # qhasm: shuffle bytes of xmm11 by SR | ||
1816 | # asm 1: pshufb SR,<xmm11=int6464#12 | ||
1817 | # asm 2: pshufb SR,<xmm11=%xmm11 | ||
1818 | pshufb SR,%xmm11 | ||
1819 | |||
1820 | # qhasm: xmm12 ^= *(int128 *)(c + 192) | ||
1821 | # asm 1: pxor 192(<c=int64#4),<xmm12=int6464#13 | ||
1822 | # asm 2: pxor 192(<c=%rcx),<xmm12=%xmm12 | ||
1823 | pxor 192(%rcx),%xmm12 | ||
1824 | |||
1825 | # qhasm: shuffle bytes of xmm12 by SR | ||
1826 | # asm 1: pshufb SR,<xmm12=int6464#13 | ||
1827 | # asm 2: pshufb SR,<xmm12=%xmm12 | ||
1828 | pshufb SR,%xmm12 | ||
1829 | |||
1830 | # qhasm: xmm13 ^= *(int128 *)(c + 208) | ||
1831 | # asm 1: pxor 208(<c=int64#4),<xmm13=int6464#14 | ||
1832 | # asm 2: pxor 208(<c=%rcx),<xmm13=%xmm13 | ||
1833 | pxor 208(%rcx),%xmm13 | ||
1834 | |||
1835 | # qhasm: shuffle bytes of xmm13 by SR | ||
1836 | # asm 1: pshufb SR,<xmm13=int6464#14 | ||
1837 | # asm 2: pshufb SR,<xmm13=%xmm13 | ||
1838 | pshufb SR,%xmm13 | ||
1839 | |||
1840 | # qhasm: xmm14 ^= *(int128 *)(c + 224) | ||
1841 | # asm 1: pxor 224(<c=int64#4),<xmm14=int6464#15 | ||
1842 | # asm 2: pxor 224(<c=%rcx),<xmm14=%xmm14 | ||
1843 | pxor 224(%rcx),%xmm14 | ||
1844 | |||
1845 | # qhasm: shuffle bytes of xmm14 by SR | ||
1846 | # asm 1: pshufb SR,<xmm14=int6464#15 | ||
1847 | # asm 2: pshufb SR,<xmm14=%xmm14 | ||
1848 | pshufb SR,%xmm14 | ||
1849 | |||
1850 | # qhasm: xmm15 ^= *(int128 *)(c + 240) | ||
1851 | # asm 1: pxor 240(<c=int64#4),<xmm15=int6464#16 | ||
1852 | # asm 2: pxor 240(<c=%rcx),<xmm15=%xmm15 | ||
1853 | pxor 240(%rcx),%xmm15 | ||
1854 | |||
1855 | # qhasm: shuffle bytes of xmm15 by SR | ||
1856 | # asm 1: pshufb SR,<xmm15=int6464#16 | ||
1857 | # asm 2: pshufb SR,<xmm15=%xmm15 | ||
1858 | pshufb SR,%xmm15 | ||
1859 | |||
1860 | # qhasm: xmm13 ^= xmm14 | ||
1861 | # asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14 | ||
1862 | # asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13 | ||
1863 | pxor %xmm14,%xmm13 | ||
1864 | |||
1865 | # qhasm: xmm10 ^= xmm9 | ||
1866 | # asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11 | ||
1867 | # asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10 | ||
1868 | pxor %xmm9,%xmm10 | ||
1869 | |||
1870 | # qhasm: xmm13 ^= xmm8 | ||
1871 | # asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14 | ||
1872 | # asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13 | ||
1873 | pxor %xmm8,%xmm13 | ||
1874 | |||
1875 | # qhasm: xmm14 ^= xmm10 | ||
1876 | # asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15 | ||
1877 | # asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14 | ||
1878 | pxor %xmm10,%xmm14 | ||
1879 | |||
1880 | # qhasm: xmm11 ^= xmm8 | ||
1881 | # asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12 | ||
1882 | # asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11 | ||
1883 | pxor %xmm8,%xmm11 | ||
1884 | |||
1885 | # qhasm: xmm14 ^= xmm11 | ||
1886 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
1887 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
1888 | pxor %xmm11,%xmm14 | ||
1889 | |||
1890 | # qhasm: xmm11 ^= xmm15 | ||
1891 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12 | ||
1892 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11 | ||
1893 | pxor %xmm15,%xmm11 | ||
1894 | |||
1895 | # qhasm: xmm11 ^= xmm12 | ||
1896 | # asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12 | ||
1897 | # asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11 | ||
1898 | pxor %xmm12,%xmm11 | ||
1899 | |||
1900 | # qhasm: xmm15 ^= xmm13 | ||
1901 | # asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16 | ||
1902 | # asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15 | ||
1903 | pxor %xmm13,%xmm15 | ||
1904 | |||
1905 | # qhasm: xmm11 ^= xmm9 | ||
1906 | # asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12 | ||
1907 | # asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11 | ||
1908 | pxor %xmm9,%xmm11 | ||
1909 | |||
1910 | # qhasm: xmm12 ^= xmm13 | ||
1911 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
1912 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
1913 | pxor %xmm13,%xmm12 | ||
1914 | |||
1915 | # qhasm: xmm10 ^= xmm15 | ||
1916 | # asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11 | ||
1917 | # asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10 | ||
1918 | pxor %xmm15,%xmm10 | ||
1919 | |||
1920 | # qhasm: xmm9 ^= xmm13 | ||
1921 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
1922 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
1923 | pxor %xmm13,%xmm9 | ||
1924 | |||
1925 | # qhasm: xmm3 = xmm15 | ||
1926 | # asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1 | ||
1927 | # asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0 | ||
1928 | movdqa %xmm15,%xmm0 | ||
1929 | |||
1930 | # qhasm: xmm2 = xmm9 | ||
1931 | # asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2 | ||
1932 | # asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1 | ||
1933 | movdqa %xmm9,%xmm1 | ||
1934 | |||
1935 | # qhasm: xmm1 = xmm13 | ||
1936 | # asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3 | ||
1937 | # asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2 | ||
1938 | movdqa %xmm13,%xmm2 | ||
1939 | |||
1940 | # qhasm: xmm5 = xmm10 | ||
1941 | # asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4 | ||
1942 | # asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3 | ||
1943 | movdqa %xmm10,%xmm3 | ||
1944 | |||
1945 | # qhasm: xmm4 = xmm14 | ||
1946 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5 | ||
1947 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4 | ||
1948 | movdqa %xmm14,%xmm4 | ||
1949 | |||
1950 | # qhasm: xmm3 ^= xmm12 | ||
1951 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1 | ||
1952 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0 | ||
1953 | pxor %xmm12,%xmm0 | ||
1954 | |||
1955 | # qhasm: xmm2 ^= xmm10 | ||
1956 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2 | ||
1957 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1 | ||
1958 | pxor %xmm10,%xmm1 | ||
1959 | |||
1960 | # qhasm: xmm1 ^= xmm11 | ||
1961 | # asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3 | ||
1962 | # asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2 | ||
1963 | pxor %xmm11,%xmm2 | ||
1964 | |||
1965 | # qhasm: xmm5 ^= xmm12 | ||
1966 | # asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4 | ||
1967 | # asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3 | ||
1968 | pxor %xmm12,%xmm3 | ||
1969 | |||
1970 | # qhasm: xmm4 ^= xmm8 | ||
1971 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
1972 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
1973 | pxor %xmm8,%xmm4 | ||
1974 | |||
1975 | # qhasm: xmm6 = xmm3 | ||
1976 | # asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6 | ||
1977 | # asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5 | ||
1978 | movdqa %xmm0,%xmm5 | ||
1979 | |||
1980 | # qhasm: xmm0 = xmm2 | ||
1981 | # asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7 | ||
1982 | # asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6 | ||
1983 | movdqa %xmm1,%xmm6 | ||
1984 | |||
1985 | # qhasm: xmm7 = xmm3 | ||
1986 | # asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8 | ||
1987 | # asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7 | ||
1988 | movdqa %xmm0,%xmm7 | ||
1989 | |||
1990 | # qhasm: xmm2 |= xmm1 | ||
1991 | # asm 1: por <xmm1=int6464#3,<xmm2=int6464#2 | ||
1992 | # asm 2: por <xmm1=%xmm2,<xmm2=%xmm1 | ||
1993 | por %xmm2,%xmm1 | ||
1994 | |||
1995 | # qhasm: xmm3 |= xmm4 | ||
1996 | # asm 1: por <xmm4=int6464#5,<xmm3=int6464#1 | ||
1997 | # asm 2: por <xmm4=%xmm4,<xmm3=%xmm0 | ||
1998 | por %xmm4,%xmm0 | ||
1999 | |||
2000 | # qhasm: xmm7 ^= xmm0 | ||
2001 | # asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8 | ||
2002 | # asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7 | ||
2003 | pxor %xmm6,%xmm7 | ||
2004 | |||
2005 | # qhasm: xmm6 &= xmm4 | ||
2006 | # asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6 | ||
2007 | # asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5 | ||
2008 | pand %xmm4,%xmm5 | ||
2009 | |||
2010 | # qhasm: xmm0 &= xmm1 | ||
2011 | # asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7 | ||
2012 | # asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6 | ||
2013 | pand %xmm2,%xmm6 | ||
2014 | |||
2015 | # qhasm: xmm4 ^= xmm1 | ||
2016 | # asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5 | ||
2017 | # asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4 | ||
2018 | pxor %xmm2,%xmm4 | ||
2019 | |||
2020 | # qhasm: xmm7 &= xmm4 | ||
2021 | # asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8 | ||
2022 | # asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7 | ||
2023 | pand %xmm4,%xmm7 | ||
2024 | |||
2025 | # qhasm: xmm4 = xmm11 | ||
2026 | # asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3 | ||
2027 | # asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2 | ||
2028 | movdqa %xmm11,%xmm2 | ||
2029 | |||
2030 | # qhasm: xmm4 ^= xmm8 | ||
2031 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3 | ||
2032 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2 | ||
2033 | pxor %xmm8,%xmm2 | ||
2034 | |||
2035 | # qhasm: xmm5 &= xmm4 | ||
2036 | # asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4 | ||
2037 | # asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3 | ||
2038 | pand %xmm2,%xmm3 | ||
2039 | |||
2040 | # qhasm: xmm3 ^= xmm5 | ||
2041 | # asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1 | ||
2042 | # asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0 | ||
2043 | pxor %xmm3,%xmm0 | ||
2044 | |||
2045 | # qhasm: xmm2 ^= xmm5 | ||
2046 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
2047 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
2048 | pxor %xmm3,%xmm1 | ||
2049 | |||
2050 | # qhasm: xmm5 = xmm15 | ||
2051 | # asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3 | ||
2052 | # asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2 | ||
2053 | movdqa %xmm15,%xmm2 | ||
2054 | |||
2055 | # qhasm: xmm5 ^= xmm9 | ||
2056 | # asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3 | ||
2057 | # asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2 | ||
2058 | pxor %xmm9,%xmm2 | ||
2059 | |||
2060 | # qhasm: xmm4 = xmm13 | ||
2061 | # asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4 | ||
2062 | # asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3 | ||
2063 | movdqa %xmm13,%xmm3 | ||
2064 | |||
2065 | # qhasm: xmm1 = xmm5 | ||
2066 | # asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5 | ||
2067 | # asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4 | ||
2068 | movdqa %xmm2,%xmm4 | ||
2069 | |||
2070 | # qhasm: xmm4 ^= xmm14 | ||
2071 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4 | ||
2072 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3 | ||
2073 | pxor %xmm14,%xmm3 | ||
2074 | |||
2075 | # qhasm: xmm1 |= xmm4 | ||
2076 | # asm 1: por <xmm4=int6464#4,<xmm1=int6464#5 | ||
2077 | # asm 2: por <xmm4=%xmm3,<xmm1=%xmm4 | ||
2078 | por %xmm3,%xmm4 | ||
2079 | |||
2080 | # qhasm: xmm5 &= xmm4 | ||
2081 | # asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3 | ||
2082 | # asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2 | ||
2083 | pand %xmm3,%xmm2 | ||
2084 | |||
2085 | # qhasm: xmm0 ^= xmm5 | ||
2086 | # asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7 | ||
2087 | # asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6 | ||
2088 | pxor %xmm2,%xmm6 | ||
2089 | |||
2090 | # qhasm: xmm3 ^= xmm7 | ||
2091 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1 | ||
2092 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0 | ||
2093 | pxor %xmm7,%xmm0 | ||
2094 | |||
2095 | # qhasm: xmm2 ^= xmm6 | ||
2096 | # asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2 | ||
2097 | # asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1 | ||
2098 | pxor %xmm5,%xmm1 | ||
2099 | |||
2100 | # qhasm: xmm1 ^= xmm7 | ||
2101 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5 | ||
2102 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4 | ||
2103 | pxor %xmm7,%xmm4 | ||
2104 | |||
2105 | # qhasm: xmm0 ^= xmm6 | ||
2106 | # asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7 | ||
2107 | # asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6 | ||
2108 | pxor %xmm5,%xmm6 | ||
2109 | |||
2110 | # qhasm: xmm1 ^= xmm6 | ||
2111 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
2112 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
2113 | pxor %xmm5,%xmm4 | ||
2114 | |||
2115 | # qhasm: xmm4 = xmm10 | ||
2116 | # asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3 | ||
2117 | # asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2 | ||
2118 | movdqa %xmm10,%xmm2 | ||
2119 | |||
2120 | # qhasm: xmm5 = xmm12 | ||
2121 | # asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4 | ||
2122 | # asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3 | ||
2123 | movdqa %xmm12,%xmm3 | ||
2124 | |||
2125 | # qhasm: xmm6 = xmm9 | ||
2126 | # asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6 | ||
2127 | # asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5 | ||
2128 | movdqa %xmm9,%xmm5 | ||
2129 | |||
2130 | # qhasm: xmm7 = xmm15 | ||
2131 | # asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8 | ||
2132 | # asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7 | ||
2133 | movdqa %xmm15,%xmm7 | ||
2134 | |||
2135 | # qhasm: xmm4 &= xmm11 | ||
2136 | # asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3 | ||
2137 | # asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2 | ||
2138 | pand %xmm11,%xmm2 | ||
2139 | |||
2140 | # qhasm: xmm5 &= xmm8 | ||
2141 | # asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4 | ||
2142 | # asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3 | ||
2143 | pand %xmm8,%xmm3 | ||
2144 | |||
2145 | # qhasm: xmm6 &= xmm13 | ||
2146 | # asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6 | ||
2147 | # asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5 | ||
2148 | pand %xmm13,%xmm5 | ||
2149 | |||
2150 | # qhasm: xmm7 |= xmm14 | ||
2151 | # asm 1: por <xmm14=int6464#15,<xmm7=int6464#8 | ||
2152 | # asm 2: por <xmm14=%xmm14,<xmm7=%xmm7 | ||
2153 | por %xmm14,%xmm7 | ||
2154 | |||
2155 | # qhasm: xmm3 ^= xmm4 | ||
2156 | # asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1 | ||
2157 | # asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0 | ||
2158 | pxor %xmm2,%xmm0 | ||
2159 | |||
2160 | # qhasm: xmm2 ^= xmm5 | ||
2161 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
2162 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
2163 | pxor %xmm3,%xmm1 | ||
2164 | |||
2165 | # qhasm: xmm1 ^= xmm6 | ||
2166 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
2167 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
2168 | pxor %xmm5,%xmm4 | ||
2169 | |||
2170 | # qhasm: xmm0 ^= xmm7 | ||
2171 | # asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7 | ||
2172 | # asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6 | ||
2173 | pxor %xmm7,%xmm6 | ||
2174 | |||
2175 | # qhasm: xmm4 = xmm3 | ||
2176 | # asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3 | ||
2177 | # asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2 | ||
2178 | movdqa %xmm0,%xmm2 | ||
2179 | |||
2180 | # qhasm: xmm4 ^= xmm2 | ||
2181 | # asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3 | ||
2182 | # asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2 | ||
2183 | pxor %xmm1,%xmm2 | ||
2184 | |||
2185 | # qhasm: xmm3 &= xmm1 | ||
2186 | # asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1 | ||
2187 | # asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0 | ||
2188 | pand %xmm4,%xmm0 | ||
2189 | |||
2190 | # qhasm: xmm6 = xmm0 | ||
2191 | # asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4 | ||
2192 | # asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3 | ||
2193 | movdqa %xmm6,%xmm3 | ||
2194 | |||
2195 | # qhasm: xmm6 ^= xmm3 | ||
2196 | # asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4 | ||
2197 | # asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3 | ||
2198 | pxor %xmm0,%xmm3 | ||
2199 | |||
2200 | # qhasm: xmm7 = xmm4 | ||
2201 | # asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6 | ||
2202 | # asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5 | ||
2203 | movdqa %xmm2,%xmm5 | ||
2204 | |||
2205 | # qhasm: xmm7 &= xmm6 | ||
2206 | # asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6 | ||
2207 | # asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5 | ||
2208 | pand %xmm3,%xmm5 | ||
2209 | |||
2210 | # qhasm: xmm7 ^= xmm2 | ||
2211 | # asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6 | ||
2212 | # asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5 | ||
2213 | pxor %xmm1,%xmm5 | ||
2214 | |||
2215 | # qhasm: xmm5 = xmm1 | ||
2216 | # asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8 | ||
2217 | # asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7 | ||
2218 | movdqa %xmm4,%xmm7 | ||
2219 | |||
2220 | # qhasm: xmm5 ^= xmm0 | ||
2221 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
2222 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
2223 | pxor %xmm6,%xmm7 | ||
2224 | |||
2225 | # qhasm: xmm3 ^= xmm2 | ||
2226 | # asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1 | ||
2227 | # asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0 | ||
2228 | pxor %xmm1,%xmm0 | ||
2229 | |||
2230 | # qhasm: xmm5 &= xmm3 | ||
2231 | # asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8 | ||
2232 | # asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7 | ||
2233 | pand %xmm0,%xmm7 | ||
2234 | |||
2235 | # qhasm: xmm5 ^= xmm0 | ||
2236 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
2237 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
2238 | pxor %xmm6,%xmm7 | ||
2239 | |||
2240 | # qhasm: xmm1 ^= xmm5 | ||
2241 | # asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5 | ||
2242 | # asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4 | ||
2243 | pxor %xmm7,%xmm4 | ||
2244 | |||
2245 | # qhasm: xmm2 = xmm6 | ||
2246 | # asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1 | ||
2247 | # asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0 | ||
2248 | movdqa %xmm3,%xmm0 | ||
2249 | |||
2250 | # qhasm: xmm2 ^= xmm5 | ||
2251 | # asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1 | ||
2252 | # asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0 | ||
2253 | pxor %xmm7,%xmm0 | ||
2254 | |||
2255 | # qhasm: xmm2 &= xmm0 | ||
2256 | # asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1 | ||
2257 | # asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0 | ||
2258 | pand %xmm6,%xmm0 | ||
2259 | |||
2260 | # qhasm: xmm1 ^= xmm2 | ||
2261 | # asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5 | ||
2262 | # asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4 | ||
2263 | pxor %xmm0,%xmm4 | ||
2264 | |||
2265 | # qhasm: xmm6 ^= xmm2 | ||
2266 | # asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4 | ||
2267 | # asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3 | ||
2268 | pxor %xmm0,%xmm3 | ||
2269 | |||
2270 | # qhasm: xmm6 &= xmm7 | ||
2271 | # asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4 | ||
2272 | # asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3 | ||
2273 | pand %xmm5,%xmm3 | ||
2274 | |||
2275 | # qhasm: xmm6 ^= xmm4 | ||
2276 | # asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4 | ||
2277 | # asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3 | ||
2278 | pxor %xmm2,%xmm3 | ||
2279 | |||
2280 | # qhasm: xmm4 = xmm14 | ||
2281 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1 | ||
2282 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0 | ||
2283 | movdqa %xmm14,%xmm0 | ||
2284 | |||
2285 | # qhasm: xmm0 = xmm13 | ||
2286 | # asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2 | ||
2287 | # asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1 | ||
2288 | movdqa %xmm13,%xmm1 | ||
2289 | |||
2290 | # qhasm: xmm2 = xmm7 | ||
2291 | # asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3 | ||
2292 | # asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2 | ||
2293 | movdqa %xmm5,%xmm2 | ||
2294 | |||
2295 | # qhasm: xmm2 ^= xmm6 | ||
2296 | # asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3 | ||
2297 | # asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2 | ||
2298 | pxor %xmm3,%xmm2 | ||
2299 | |||
2300 | # qhasm: xmm2 &= xmm14 | ||
2301 | # asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3 | ||
2302 | # asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2 | ||
2303 | pand %xmm14,%xmm2 | ||
2304 | |||
2305 | # qhasm: xmm14 ^= xmm13 | ||
2306 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
2307 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
2308 | pxor %xmm13,%xmm14 | ||
2309 | |||
2310 | # qhasm: xmm14 &= xmm6 | ||
2311 | # asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15 | ||
2312 | # asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14 | ||
2313 | pand %xmm3,%xmm14 | ||
2314 | |||
2315 | # qhasm: xmm13 &= xmm7 | ||
2316 | # asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14 | ||
2317 | # asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13 | ||
2318 | pand %xmm5,%xmm13 | ||
2319 | |||
2320 | # qhasm: xmm14 ^= xmm13 | ||
2321 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
2322 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
2323 | pxor %xmm13,%xmm14 | ||
2324 | |||
2325 | # qhasm: xmm13 ^= xmm2 | ||
2326 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14 | ||
2327 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13 | ||
2328 | pxor %xmm2,%xmm13 | ||
2329 | |||
2330 | # qhasm: xmm4 ^= xmm8 | ||
2331 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1 | ||
2332 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0 | ||
2333 | pxor %xmm8,%xmm0 | ||
2334 | |||
2335 | # qhasm: xmm0 ^= xmm11 | ||
2336 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2 | ||
2337 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1 | ||
2338 | pxor %xmm11,%xmm1 | ||
2339 | |||
2340 | # qhasm: xmm7 ^= xmm5 | ||
2341 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
2342 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
2343 | pxor %xmm7,%xmm5 | ||
2344 | |||
2345 | # qhasm: xmm6 ^= xmm1 | ||
2346 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
2347 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
2348 | pxor %xmm4,%xmm3 | ||
2349 | |||
2350 | # qhasm: xmm3 = xmm7 | ||
2351 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
2352 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
2353 | movdqa %xmm5,%xmm2 | ||
2354 | |||
2355 | # qhasm: xmm3 ^= xmm6 | ||
2356 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
2357 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
2358 | pxor %xmm3,%xmm2 | ||
2359 | |||
2360 | # qhasm: xmm3 &= xmm4 | ||
2361 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
2362 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
2363 | pand %xmm0,%xmm2 | ||
2364 | |||
2365 | # qhasm: xmm4 ^= xmm0 | ||
2366 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
2367 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
2368 | pxor %xmm1,%xmm0 | ||
2369 | |||
2370 | # qhasm: xmm4 &= xmm6 | ||
2371 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
2372 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
2373 | pand %xmm3,%xmm0 | ||
2374 | |||
2375 | # qhasm: xmm0 &= xmm7 | ||
2376 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
2377 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
2378 | pand %xmm5,%xmm1 | ||
2379 | |||
2380 | # qhasm: xmm0 ^= xmm4 | ||
2381 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
2382 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
2383 | pxor %xmm0,%xmm1 | ||
2384 | |||
2385 | # qhasm: xmm4 ^= xmm3 | ||
2386 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
2387 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
2388 | pxor %xmm2,%xmm0 | ||
2389 | |||
2390 | # qhasm: xmm2 = xmm5 | ||
2391 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
2392 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
2393 | movdqa %xmm7,%xmm2 | ||
2394 | |||
2395 | # qhasm: xmm2 ^= xmm1 | ||
2396 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
2397 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
2398 | pxor %xmm4,%xmm2 | ||
2399 | |||
2400 | # qhasm: xmm2 &= xmm8 | ||
2401 | # asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3 | ||
2402 | # asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2 | ||
2403 | pand %xmm8,%xmm2 | ||
2404 | |||
2405 | # qhasm: xmm8 ^= xmm11 | ||
2406 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
2407 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
2408 | pxor %xmm11,%xmm8 | ||
2409 | |||
2410 | # qhasm: xmm8 &= xmm1 | ||
2411 | # asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9 | ||
2412 | # asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8 | ||
2413 | pand %xmm4,%xmm8 | ||
2414 | |||
2415 | # qhasm: xmm11 &= xmm5 | ||
2416 | # asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12 | ||
2417 | # asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11 | ||
2418 | pand %xmm7,%xmm11 | ||
2419 | |||
2420 | # qhasm: xmm8 ^= xmm11 | ||
2421 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
2422 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
2423 | pxor %xmm11,%xmm8 | ||
2424 | |||
2425 | # qhasm: xmm11 ^= xmm2 | ||
2426 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12 | ||
2427 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11 | ||
2428 | pxor %xmm2,%xmm11 | ||
2429 | |||
2430 | # qhasm: xmm14 ^= xmm4 | ||
2431 | # asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15 | ||
2432 | # asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14 | ||
2433 | pxor %xmm0,%xmm14 | ||
2434 | |||
2435 | # qhasm: xmm8 ^= xmm4 | ||
2436 | # asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9 | ||
2437 | # asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8 | ||
2438 | pxor %xmm0,%xmm8 | ||
2439 | |||
2440 | # qhasm: xmm13 ^= xmm0 | ||
2441 | # asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14 | ||
2442 | # asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13 | ||
2443 | pxor %xmm1,%xmm13 | ||
2444 | |||
2445 | # qhasm: xmm11 ^= xmm0 | ||
2446 | # asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12 | ||
2447 | # asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11 | ||
2448 | pxor %xmm1,%xmm11 | ||
2449 | |||
2450 | # qhasm: xmm4 = xmm15 | ||
2451 | # asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1 | ||
2452 | # asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0 | ||
2453 | movdqa %xmm15,%xmm0 | ||
2454 | |||
2455 | # qhasm: xmm0 = xmm9 | ||
2456 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2 | ||
2457 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1 | ||
2458 | movdqa %xmm9,%xmm1 | ||
2459 | |||
2460 | # qhasm: xmm4 ^= xmm12 | ||
2461 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1 | ||
2462 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0 | ||
2463 | pxor %xmm12,%xmm0 | ||
2464 | |||
2465 | # qhasm: xmm0 ^= xmm10 | ||
2466 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2 | ||
2467 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1 | ||
2468 | pxor %xmm10,%xmm1 | ||
2469 | |||
2470 | # qhasm: xmm3 = xmm7 | ||
2471 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
2472 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
2473 | movdqa %xmm5,%xmm2 | ||
2474 | |||
2475 | # qhasm: xmm3 ^= xmm6 | ||
2476 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
2477 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
2478 | pxor %xmm3,%xmm2 | ||
2479 | |||
2480 | # qhasm: xmm3 &= xmm4 | ||
2481 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
2482 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
2483 | pand %xmm0,%xmm2 | ||
2484 | |||
2485 | # qhasm: xmm4 ^= xmm0 | ||
2486 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
2487 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
2488 | pxor %xmm1,%xmm0 | ||
2489 | |||
2490 | # qhasm: xmm4 &= xmm6 | ||
2491 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
2492 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
2493 | pand %xmm3,%xmm0 | ||
2494 | |||
2495 | # qhasm: xmm0 &= xmm7 | ||
2496 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
2497 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
2498 | pand %xmm5,%xmm1 | ||
2499 | |||
2500 | # qhasm: xmm0 ^= xmm4 | ||
2501 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
2502 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
2503 | pxor %xmm0,%xmm1 | ||
2504 | |||
2505 | # qhasm: xmm4 ^= xmm3 | ||
2506 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
2507 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
2508 | pxor %xmm2,%xmm0 | ||
2509 | |||
2510 | # qhasm: xmm2 = xmm5 | ||
2511 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
2512 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
2513 | movdqa %xmm7,%xmm2 | ||
2514 | |||
2515 | # qhasm: xmm2 ^= xmm1 | ||
2516 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
2517 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
2518 | pxor %xmm4,%xmm2 | ||
2519 | |||
2520 | # qhasm: xmm2 &= xmm12 | ||
2521 | # asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3 | ||
2522 | # asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2 | ||
2523 | pand %xmm12,%xmm2 | ||
2524 | |||
2525 | # qhasm: xmm12 ^= xmm10 | ||
2526 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
2527 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
2528 | pxor %xmm10,%xmm12 | ||
2529 | |||
2530 | # qhasm: xmm12 &= xmm1 | ||
2531 | # asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13 | ||
2532 | # asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12 | ||
2533 | pand %xmm4,%xmm12 | ||
2534 | |||
2535 | # qhasm: xmm10 &= xmm5 | ||
2536 | # asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11 | ||
2537 | # asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10 | ||
2538 | pand %xmm7,%xmm10 | ||
2539 | |||
2540 | # qhasm: xmm12 ^= xmm10 | ||
2541 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
2542 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
2543 | pxor %xmm10,%xmm12 | ||
2544 | |||
2545 | # qhasm: xmm10 ^= xmm2 | ||
2546 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11 | ||
2547 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10 | ||
2548 | pxor %xmm2,%xmm10 | ||
2549 | |||
2550 | # qhasm: xmm7 ^= xmm5 | ||
2551 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
2552 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
2553 | pxor %xmm7,%xmm5 | ||
2554 | |||
2555 | # qhasm: xmm6 ^= xmm1 | ||
2556 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
2557 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
2558 | pxor %xmm4,%xmm3 | ||
2559 | |||
2560 | # qhasm: xmm3 = xmm7 | ||
2561 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
2562 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
2563 | movdqa %xmm5,%xmm2 | ||
2564 | |||
2565 | # qhasm: xmm3 ^= xmm6 | ||
2566 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
2567 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
2568 | pxor %xmm3,%xmm2 | ||
2569 | |||
2570 | # qhasm: xmm3 &= xmm15 | ||
2571 | # asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3 | ||
2572 | # asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2 | ||
2573 | pand %xmm15,%xmm2 | ||
2574 | |||
2575 | # qhasm: xmm15 ^= xmm9 | ||
2576 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
2577 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
2578 | pxor %xmm9,%xmm15 | ||
2579 | |||
2580 | # qhasm: xmm15 &= xmm6 | ||
2581 | # asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16 | ||
2582 | # asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15 | ||
2583 | pand %xmm3,%xmm15 | ||
2584 | |||
2585 | # qhasm: xmm9 &= xmm7 | ||
2586 | # asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10 | ||
2587 | # asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9 | ||
2588 | pand %xmm5,%xmm9 | ||
2589 | |||
2590 | # qhasm: xmm15 ^= xmm9 | ||
2591 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
2592 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
2593 | pxor %xmm9,%xmm15 | ||
2594 | |||
2595 | # qhasm: xmm9 ^= xmm3 | ||
2596 | # asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10 | ||
2597 | # asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9 | ||
2598 | pxor %xmm2,%xmm9 | ||
2599 | |||
2600 | # qhasm: xmm15 ^= xmm4 | ||
2601 | # asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16 | ||
2602 | # asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15 | ||
2603 | pxor %xmm0,%xmm15 | ||
2604 | |||
2605 | # qhasm: xmm12 ^= xmm4 | ||
2606 | # asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13 | ||
2607 | # asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12 | ||
2608 | pxor %xmm0,%xmm12 | ||
2609 | |||
2610 | # qhasm: xmm9 ^= xmm0 | ||
2611 | # asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10 | ||
2612 | # asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9 | ||
2613 | pxor %xmm1,%xmm9 | ||
2614 | |||
2615 | # qhasm: xmm10 ^= xmm0 | ||
2616 | # asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11 | ||
2617 | # asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10 | ||
2618 | pxor %xmm1,%xmm10 | ||
2619 | |||
2620 | # qhasm: xmm15 ^= xmm8 | ||
2621 | # asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16 | ||
2622 | # asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15 | ||
2623 | pxor %xmm8,%xmm15 | ||
2624 | |||
2625 | # qhasm: xmm9 ^= xmm14 | ||
2626 | # asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10 | ||
2627 | # asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9 | ||
2628 | pxor %xmm14,%xmm9 | ||
2629 | |||
2630 | # qhasm: xmm12 ^= xmm15 | ||
2631 | # asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13 | ||
2632 | # asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12 | ||
2633 | pxor %xmm15,%xmm12 | ||
2634 | |||
2635 | # qhasm: xmm14 ^= xmm8 | ||
2636 | # asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15 | ||
2637 | # asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14 | ||
2638 | pxor %xmm8,%xmm14 | ||
2639 | |||
2640 | # qhasm: xmm8 ^= xmm9 | ||
2641 | # asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9 | ||
2642 | # asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8 | ||
2643 | pxor %xmm9,%xmm8 | ||
2644 | |||
2645 | # qhasm: xmm9 ^= xmm13 | ||
2646 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
2647 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
2648 | pxor %xmm13,%xmm9 | ||
2649 | |||
2650 | # qhasm: xmm13 ^= xmm10 | ||
2651 | # asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14 | ||
2652 | # asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13 | ||
2653 | pxor %xmm10,%xmm13 | ||
2654 | |||
2655 | # qhasm: xmm12 ^= xmm13 | ||
2656 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
2657 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
2658 | pxor %xmm13,%xmm12 | ||
2659 | |||
2660 | # qhasm: xmm10 ^= xmm11 | ||
2661 | # asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11 | ||
2662 | # asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10 | ||
2663 | pxor %xmm11,%xmm10 | ||
2664 | |||
2665 | # qhasm: xmm11 ^= xmm13 | ||
2666 | # asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12 | ||
2667 | # asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11 | ||
2668 | pxor %xmm13,%xmm11 | ||
2669 | |||
2670 | # qhasm: xmm14 ^= xmm11 | ||
2671 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
2672 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
2673 | pxor %xmm11,%xmm14 | ||
2674 | |||
2675 | # qhasm: xmm0 = shuffle dwords of xmm8 by 0x93 | ||
2676 | # asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1 | ||
2677 | # asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0 | ||
2678 | pshufd $0x93,%xmm8,%xmm0 | ||
2679 | |||
2680 | # qhasm: xmm1 = shuffle dwords of xmm9 by 0x93 | ||
2681 | # asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2 | ||
2682 | # asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1 | ||
2683 | pshufd $0x93,%xmm9,%xmm1 | ||
2684 | |||
2685 | # qhasm: xmm2 = shuffle dwords of xmm12 by 0x93 | ||
2686 | # asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3 | ||
2687 | # asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2 | ||
2688 | pshufd $0x93,%xmm12,%xmm2 | ||
2689 | |||
2690 | # qhasm: xmm3 = shuffle dwords of xmm14 by 0x93 | ||
2691 | # asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4 | ||
2692 | # asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3 | ||
2693 | pshufd $0x93,%xmm14,%xmm3 | ||
2694 | |||
2695 | # qhasm: xmm4 = shuffle dwords of xmm11 by 0x93 | ||
2696 | # asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5 | ||
2697 | # asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4 | ||
2698 | pshufd $0x93,%xmm11,%xmm4 | ||
2699 | |||
2700 | # qhasm: xmm5 = shuffle dwords of xmm15 by 0x93 | ||
2701 | # asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6 | ||
2702 | # asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5 | ||
2703 | pshufd $0x93,%xmm15,%xmm5 | ||
2704 | |||
2705 | # qhasm: xmm6 = shuffle dwords of xmm10 by 0x93 | ||
2706 | # asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7 | ||
2707 | # asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6 | ||
2708 | pshufd $0x93,%xmm10,%xmm6 | ||
2709 | |||
2710 | # qhasm: xmm7 = shuffle dwords of xmm13 by 0x93 | ||
2711 | # asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8 | ||
2712 | # asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7 | ||
2713 | pshufd $0x93,%xmm13,%xmm7 | ||
2714 | |||
2715 | # qhasm: xmm8 ^= xmm0 | ||
2716 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
2717 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
2718 | pxor %xmm0,%xmm8 | ||
2719 | |||
2720 | # qhasm: xmm9 ^= xmm1 | ||
2721 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
2722 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
2723 | pxor %xmm1,%xmm9 | ||
2724 | |||
2725 | # qhasm: xmm12 ^= xmm2 | ||
2726 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13 | ||
2727 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12 | ||
2728 | pxor %xmm2,%xmm12 | ||
2729 | |||
2730 | # qhasm: xmm14 ^= xmm3 | ||
2731 | # asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15 | ||
2732 | # asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14 | ||
2733 | pxor %xmm3,%xmm14 | ||
2734 | |||
2735 | # qhasm: xmm11 ^= xmm4 | ||
2736 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
2737 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
2738 | pxor %xmm4,%xmm11 | ||
2739 | |||
2740 | # qhasm: xmm15 ^= xmm5 | ||
2741 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
2742 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
2743 | pxor %xmm5,%xmm15 | ||
2744 | |||
2745 | # qhasm: xmm10 ^= xmm6 | ||
2746 | # asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11 | ||
2747 | # asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10 | ||
2748 | pxor %xmm6,%xmm10 | ||
2749 | |||
2750 | # qhasm: xmm13 ^= xmm7 | ||
2751 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
2752 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
2753 | pxor %xmm7,%xmm13 | ||
2754 | |||
2755 | # qhasm: xmm0 ^= xmm13 | ||
2756 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
2757 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
2758 | pxor %xmm13,%xmm0 | ||
2759 | |||
2760 | # qhasm: xmm1 ^= xmm8 | ||
2761 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
2762 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
2763 | pxor %xmm8,%xmm1 | ||
2764 | |||
2765 | # qhasm: xmm2 ^= xmm9 | ||
2766 | # asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3 | ||
2767 | # asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2 | ||
2768 | pxor %xmm9,%xmm2 | ||
2769 | |||
2770 | # qhasm: xmm1 ^= xmm13 | ||
2771 | # asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2 | ||
2772 | # asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1 | ||
2773 | pxor %xmm13,%xmm1 | ||
2774 | |||
2775 | # qhasm: xmm3 ^= xmm12 | ||
2776 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
2777 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
2778 | pxor %xmm12,%xmm3 | ||
2779 | |||
2780 | # qhasm: xmm4 ^= xmm14 | ||
2781 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
2782 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
2783 | pxor %xmm14,%xmm4 | ||
2784 | |||
2785 | # qhasm: xmm5 ^= xmm11 | ||
2786 | # asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6 | ||
2787 | # asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5 | ||
2788 | pxor %xmm11,%xmm5 | ||
2789 | |||
2790 | # qhasm: xmm3 ^= xmm13 | ||
2791 | # asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4 | ||
2792 | # asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3 | ||
2793 | pxor %xmm13,%xmm3 | ||
2794 | |||
2795 | # qhasm: xmm6 ^= xmm15 | ||
2796 | # asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7 | ||
2797 | # asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6 | ||
2798 | pxor %xmm15,%xmm6 | ||
2799 | |||
2800 | # qhasm: xmm7 ^= xmm10 | ||
2801 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
2802 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
2803 | pxor %xmm10,%xmm7 | ||
2804 | |||
2805 | # qhasm: xmm4 ^= xmm13 | ||
2806 | # asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5 | ||
2807 | # asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4 | ||
2808 | pxor %xmm13,%xmm4 | ||
2809 | |||
2810 | # qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E | ||
2811 | # asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9 | ||
2812 | # asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8 | ||
2813 | pshufd $0x4E,%xmm8,%xmm8 | ||
2814 | |||
2815 | # qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E | ||
2816 | # asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10 | ||
2817 | # asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9 | ||
2818 | pshufd $0x4E,%xmm9,%xmm9 | ||
2819 | |||
2820 | # qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E | ||
2821 | # asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13 | ||
2822 | # asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12 | ||
2823 | pshufd $0x4E,%xmm12,%xmm12 | ||
2824 | |||
2825 | # qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E | ||
2826 | # asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15 | ||
2827 | # asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14 | ||
2828 | pshufd $0x4E,%xmm14,%xmm14 | ||
2829 | |||
2830 | # qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E | ||
2831 | # asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12 | ||
2832 | # asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11 | ||
2833 | pshufd $0x4E,%xmm11,%xmm11 | ||
2834 | |||
2835 | # qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E | ||
2836 | # asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16 | ||
2837 | # asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15 | ||
2838 | pshufd $0x4E,%xmm15,%xmm15 | ||
2839 | |||
2840 | # qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E | ||
2841 | # asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11 | ||
2842 | # asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10 | ||
2843 | pshufd $0x4E,%xmm10,%xmm10 | ||
2844 | |||
2845 | # qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E | ||
2846 | # asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14 | ||
2847 | # asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13 | ||
2848 | pshufd $0x4E,%xmm13,%xmm13 | ||
2849 | |||
2850 | # qhasm: xmm0 ^= xmm8 | ||
2851 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
2852 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
2853 | pxor %xmm8,%xmm0 | ||
2854 | |||
2855 | # qhasm: xmm1 ^= xmm9 | ||
2856 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
2857 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
2858 | pxor %xmm9,%xmm1 | ||
2859 | |||
2860 | # qhasm: xmm2 ^= xmm12 | ||
2861 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
2862 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
2863 | pxor %xmm12,%xmm2 | ||
2864 | |||
2865 | # qhasm: xmm3 ^= xmm14 | ||
2866 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
2867 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
2868 | pxor %xmm14,%xmm3 | ||
2869 | |||
2870 | # qhasm: xmm4 ^= xmm11 | ||
2871 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
2872 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
2873 | pxor %xmm11,%xmm4 | ||
2874 | |||
2875 | # qhasm: xmm5 ^= xmm15 | ||
2876 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
2877 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
2878 | pxor %xmm15,%xmm5 | ||
2879 | |||
2880 | # qhasm: xmm6 ^= xmm10 | ||
2881 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
2882 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
2883 | pxor %xmm10,%xmm6 | ||
2884 | |||
2885 | # qhasm: xmm7 ^= xmm13 | ||
2886 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
2887 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
2888 | pxor %xmm13,%xmm7 | ||
2889 | |||
2890 | # qhasm: xmm0 ^= *(int128 *)(c + 256) | ||
2891 | # asm 1: pxor 256(<c=int64#4),<xmm0=int6464#1 | ||
2892 | # asm 2: pxor 256(<c=%rcx),<xmm0=%xmm0 | ||
2893 | pxor 256(%rcx),%xmm0 | ||
2894 | |||
2895 | # qhasm: shuffle bytes of xmm0 by SR | ||
2896 | # asm 1: pshufb SR,<xmm0=int6464#1 | ||
2897 | # asm 2: pshufb SR,<xmm0=%xmm0 | ||
2898 | pshufb SR,%xmm0 | ||
2899 | |||
2900 | # qhasm: xmm1 ^= *(int128 *)(c + 272) | ||
2901 | # asm 1: pxor 272(<c=int64#4),<xmm1=int6464#2 | ||
2902 | # asm 2: pxor 272(<c=%rcx),<xmm1=%xmm1 | ||
2903 | pxor 272(%rcx),%xmm1 | ||
2904 | |||
2905 | # qhasm: shuffle bytes of xmm1 by SR | ||
2906 | # asm 1: pshufb SR,<xmm1=int6464#2 | ||
2907 | # asm 2: pshufb SR,<xmm1=%xmm1 | ||
2908 | pshufb SR,%xmm1 | ||
2909 | |||
2910 | # qhasm: xmm2 ^= *(int128 *)(c + 288) | ||
2911 | # asm 1: pxor 288(<c=int64#4),<xmm2=int6464#3 | ||
2912 | # asm 2: pxor 288(<c=%rcx),<xmm2=%xmm2 | ||
2913 | pxor 288(%rcx),%xmm2 | ||
2914 | |||
2915 | # qhasm: shuffle bytes of xmm2 by SR | ||
2916 | # asm 1: pshufb SR,<xmm2=int6464#3 | ||
2917 | # asm 2: pshufb SR,<xmm2=%xmm2 | ||
2918 | pshufb SR,%xmm2 | ||
2919 | |||
2920 | # qhasm: xmm3 ^= *(int128 *)(c + 304) | ||
2921 | # asm 1: pxor 304(<c=int64#4),<xmm3=int6464#4 | ||
2922 | # asm 2: pxor 304(<c=%rcx),<xmm3=%xmm3 | ||
2923 | pxor 304(%rcx),%xmm3 | ||
2924 | |||
2925 | # qhasm: shuffle bytes of xmm3 by SR | ||
2926 | # asm 1: pshufb SR,<xmm3=int6464#4 | ||
2927 | # asm 2: pshufb SR,<xmm3=%xmm3 | ||
2928 | pshufb SR,%xmm3 | ||
2929 | |||
2930 | # qhasm: xmm4 ^= *(int128 *)(c + 320) | ||
2931 | # asm 1: pxor 320(<c=int64#4),<xmm4=int6464#5 | ||
2932 | # asm 2: pxor 320(<c=%rcx),<xmm4=%xmm4 | ||
2933 | pxor 320(%rcx),%xmm4 | ||
2934 | |||
2935 | # qhasm: shuffle bytes of xmm4 by SR | ||
2936 | # asm 1: pshufb SR,<xmm4=int6464#5 | ||
2937 | # asm 2: pshufb SR,<xmm4=%xmm4 | ||
2938 | pshufb SR,%xmm4 | ||
2939 | |||
2940 | # qhasm: xmm5 ^= *(int128 *)(c + 336) | ||
2941 | # asm 1: pxor 336(<c=int64#4),<xmm5=int6464#6 | ||
2942 | # asm 2: pxor 336(<c=%rcx),<xmm5=%xmm5 | ||
2943 | pxor 336(%rcx),%xmm5 | ||
2944 | |||
2945 | # qhasm: shuffle bytes of xmm5 by SR | ||
2946 | # asm 1: pshufb SR,<xmm5=int6464#6 | ||
2947 | # asm 2: pshufb SR,<xmm5=%xmm5 | ||
2948 | pshufb SR,%xmm5 | ||
2949 | |||
2950 | # qhasm: xmm6 ^= *(int128 *)(c + 352) | ||
2951 | # asm 1: pxor 352(<c=int64#4),<xmm6=int6464#7 | ||
2952 | # asm 2: pxor 352(<c=%rcx),<xmm6=%xmm6 | ||
2953 | pxor 352(%rcx),%xmm6 | ||
2954 | |||
2955 | # qhasm: shuffle bytes of xmm6 by SR | ||
2956 | # asm 1: pshufb SR,<xmm6=int6464#7 | ||
2957 | # asm 2: pshufb SR,<xmm6=%xmm6 | ||
2958 | pshufb SR,%xmm6 | ||
2959 | |||
2960 | # qhasm: xmm7 ^= *(int128 *)(c + 368) | ||
2961 | # asm 1: pxor 368(<c=int64#4),<xmm7=int6464#8 | ||
2962 | # asm 2: pxor 368(<c=%rcx),<xmm7=%xmm7 | ||
2963 | pxor 368(%rcx),%xmm7 | ||
2964 | |||
2965 | # qhasm: shuffle bytes of xmm7 by SR | ||
2966 | # asm 1: pshufb SR,<xmm7=int6464#8 | ||
2967 | # asm 2: pshufb SR,<xmm7=%xmm7 | ||
2968 | pshufb SR,%xmm7 | ||
2969 | |||
2970 | # qhasm: xmm5 ^= xmm6 | ||
2971 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
2972 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
2973 | pxor %xmm6,%xmm5 | ||
2974 | |||
2975 | # qhasm: xmm2 ^= xmm1 | ||
2976 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
2977 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
2978 | pxor %xmm1,%xmm2 | ||
2979 | |||
2980 | # qhasm: xmm5 ^= xmm0 | ||
2981 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
2982 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
2983 | pxor %xmm0,%xmm5 | ||
2984 | |||
2985 | # qhasm: xmm6 ^= xmm2 | ||
2986 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
2987 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
2988 | pxor %xmm2,%xmm6 | ||
2989 | |||
2990 | # qhasm: xmm3 ^= xmm0 | ||
2991 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
2992 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
2993 | pxor %xmm0,%xmm3 | ||
2994 | |||
2995 | # qhasm: xmm6 ^= xmm3 | ||
2996 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
2997 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
2998 | pxor %xmm3,%xmm6 | ||
2999 | |||
3000 | # qhasm: xmm3 ^= xmm7 | ||
3001 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
3002 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
3003 | pxor %xmm7,%xmm3 | ||
3004 | |||
3005 | # qhasm: xmm3 ^= xmm4 | ||
3006 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
3007 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
3008 | pxor %xmm4,%xmm3 | ||
3009 | |||
3010 | # qhasm: xmm7 ^= xmm5 | ||
3011 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
3012 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
3013 | pxor %xmm5,%xmm7 | ||
3014 | |||
3015 | # qhasm: xmm3 ^= xmm1 | ||
3016 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
3017 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
3018 | pxor %xmm1,%xmm3 | ||
3019 | |||
3020 | # qhasm: xmm4 ^= xmm5 | ||
3021 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
3022 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
3023 | pxor %xmm5,%xmm4 | ||
3024 | |||
3025 | # qhasm: xmm2 ^= xmm7 | ||
3026 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
3027 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
3028 | pxor %xmm7,%xmm2 | ||
3029 | |||
3030 | # qhasm: xmm1 ^= xmm5 | ||
3031 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
3032 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
3033 | pxor %xmm5,%xmm1 | ||
3034 | |||
3035 | # qhasm: xmm11 = xmm7 | ||
3036 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
3037 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
3038 | movdqa %xmm7,%xmm8 | ||
3039 | |||
3040 | # qhasm: xmm10 = xmm1 | ||
3041 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
3042 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
3043 | movdqa %xmm1,%xmm9 | ||
3044 | |||
3045 | # qhasm: xmm9 = xmm5 | ||
3046 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
3047 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
3048 | movdqa %xmm5,%xmm10 | ||
3049 | |||
3050 | # qhasm: xmm13 = xmm2 | ||
3051 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
3052 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
3053 | movdqa %xmm2,%xmm11 | ||
3054 | |||
3055 | # qhasm: xmm12 = xmm6 | ||
3056 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
3057 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
3058 | movdqa %xmm6,%xmm12 | ||
3059 | |||
3060 | # qhasm: xmm11 ^= xmm4 | ||
3061 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
3062 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
3063 | pxor %xmm4,%xmm8 | ||
3064 | |||
3065 | # qhasm: xmm10 ^= xmm2 | ||
3066 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
3067 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
3068 | pxor %xmm2,%xmm9 | ||
3069 | |||
3070 | # qhasm: xmm9 ^= xmm3 | ||
3071 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
3072 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
3073 | pxor %xmm3,%xmm10 | ||
3074 | |||
3075 | # qhasm: xmm13 ^= xmm4 | ||
3076 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
3077 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
3078 | pxor %xmm4,%xmm11 | ||
3079 | |||
3080 | # qhasm: xmm12 ^= xmm0 | ||
3081 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
3082 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
3083 | pxor %xmm0,%xmm12 | ||
3084 | |||
3085 | # qhasm: xmm14 = xmm11 | ||
3086 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
3087 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
3088 | movdqa %xmm8,%xmm13 | ||
3089 | |||
3090 | # qhasm: xmm8 = xmm10 | ||
3091 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
3092 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
3093 | movdqa %xmm9,%xmm14 | ||
3094 | |||
3095 | # qhasm: xmm15 = xmm11 | ||
3096 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
3097 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
3098 | movdqa %xmm8,%xmm15 | ||
3099 | |||
3100 | # qhasm: xmm10 |= xmm9 | ||
3101 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
3102 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
3103 | por %xmm10,%xmm9 | ||
3104 | |||
3105 | # qhasm: xmm11 |= xmm12 | ||
3106 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
3107 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
3108 | por %xmm12,%xmm8 | ||
3109 | |||
3110 | # qhasm: xmm15 ^= xmm8 | ||
3111 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
3112 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
3113 | pxor %xmm14,%xmm15 | ||
3114 | |||
3115 | # qhasm: xmm14 &= xmm12 | ||
3116 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
3117 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
3118 | pand %xmm12,%xmm13 | ||
3119 | |||
3120 | # qhasm: xmm8 &= xmm9 | ||
3121 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
3122 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
3123 | pand %xmm10,%xmm14 | ||
3124 | |||
3125 | # qhasm: xmm12 ^= xmm9 | ||
3126 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
3127 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
3128 | pxor %xmm10,%xmm12 | ||
3129 | |||
3130 | # qhasm: xmm15 &= xmm12 | ||
3131 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
3132 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
3133 | pand %xmm12,%xmm15 | ||
3134 | |||
3135 | # qhasm: xmm12 = xmm3 | ||
3136 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
3137 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
3138 | movdqa %xmm3,%xmm10 | ||
3139 | |||
3140 | # qhasm: xmm12 ^= xmm0 | ||
3141 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
3142 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
3143 | pxor %xmm0,%xmm10 | ||
3144 | |||
3145 | # qhasm: xmm13 &= xmm12 | ||
3146 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
3147 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
3148 | pand %xmm10,%xmm11 | ||
3149 | |||
3150 | # qhasm: xmm11 ^= xmm13 | ||
3151 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
3152 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
3153 | pxor %xmm11,%xmm8 | ||
3154 | |||
3155 | # qhasm: xmm10 ^= xmm13 | ||
3156 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
3157 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
3158 | pxor %xmm11,%xmm9 | ||
3159 | |||
3160 | # qhasm: xmm13 = xmm7 | ||
3161 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
3162 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
3163 | movdqa %xmm7,%xmm10 | ||
3164 | |||
3165 | # qhasm: xmm13 ^= xmm1 | ||
3166 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
3167 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
3168 | pxor %xmm1,%xmm10 | ||
3169 | |||
3170 | # qhasm: xmm12 = xmm5 | ||
3171 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
3172 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
3173 | movdqa %xmm5,%xmm11 | ||
3174 | |||
3175 | # qhasm: xmm9 = xmm13 | ||
3176 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
3177 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
3178 | movdqa %xmm10,%xmm12 | ||
3179 | |||
3180 | # qhasm: xmm12 ^= xmm6 | ||
3181 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
3182 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
3183 | pxor %xmm6,%xmm11 | ||
3184 | |||
3185 | # qhasm: xmm9 |= xmm12 | ||
3186 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
3187 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
3188 | por %xmm11,%xmm12 | ||
3189 | |||
3190 | # qhasm: xmm13 &= xmm12 | ||
3191 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
3192 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
3193 | pand %xmm11,%xmm10 | ||
3194 | |||
3195 | # qhasm: xmm8 ^= xmm13 | ||
3196 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
3197 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
3198 | pxor %xmm10,%xmm14 | ||
3199 | |||
3200 | # qhasm: xmm11 ^= xmm15 | ||
3201 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
3202 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
3203 | pxor %xmm15,%xmm8 | ||
3204 | |||
3205 | # qhasm: xmm10 ^= xmm14 | ||
3206 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
3207 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
3208 | pxor %xmm13,%xmm9 | ||
3209 | |||
3210 | # qhasm: xmm9 ^= xmm15 | ||
3211 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
3212 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
3213 | pxor %xmm15,%xmm12 | ||
3214 | |||
3215 | # qhasm: xmm8 ^= xmm14 | ||
3216 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
3217 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
3218 | pxor %xmm13,%xmm14 | ||
3219 | |||
3220 | # qhasm: xmm9 ^= xmm14 | ||
3221 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
3222 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
3223 | pxor %xmm13,%xmm12 | ||
3224 | |||
3225 | # qhasm: xmm12 = xmm2 | ||
3226 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
3227 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
3228 | movdqa %xmm2,%xmm10 | ||
3229 | |||
3230 | # qhasm: xmm13 = xmm4 | ||
3231 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
3232 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
3233 | movdqa %xmm4,%xmm11 | ||
3234 | |||
3235 | # qhasm: xmm14 = xmm1 | ||
3236 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
3237 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
3238 | movdqa %xmm1,%xmm13 | ||
3239 | |||
3240 | # qhasm: xmm15 = xmm7 | ||
3241 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
3242 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
3243 | movdqa %xmm7,%xmm15 | ||
3244 | |||
3245 | # qhasm: xmm12 &= xmm3 | ||
3246 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
3247 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
3248 | pand %xmm3,%xmm10 | ||
3249 | |||
3250 | # qhasm: xmm13 &= xmm0 | ||
3251 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
3252 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
3253 | pand %xmm0,%xmm11 | ||
3254 | |||
3255 | # qhasm: xmm14 &= xmm5 | ||
3256 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
3257 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
3258 | pand %xmm5,%xmm13 | ||
3259 | |||
3260 | # qhasm: xmm15 |= xmm6 | ||
3261 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
3262 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
3263 | por %xmm6,%xmm15 | ||
3264 | |||
3265 | # qhasm: xmm11 ^= xmm12 | ||
3266 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
3267 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
3268 | pxor %xmm10,%xmm8 | ||
3269 | |||
3270 | # qhasm: xmm10 ^= xmm13 | ||
3271 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
3272 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
3273 | pxor %xmm11,%xmm9 | ||
3274 | |||
3275 | # qhasm: xmm9 ^= xmm14 | ||
3276 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
3277 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
3278 | pxor %xmm13,%xmm12 | ||
3279 | |||
3280 | # qhasm: xmm8 ^= xmm15 | ||
3281 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
3282 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
3283 | pxor %xmm15,%xmm14 | ||
3284 | |||
3285 | # qhasm: xmm12 = xmm11 | ||
3286 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
3287 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
3288 | movdqa %xmm8,%xmm10 | ||
3289 | |||
3290 | # qhasm: xmm12 ^= xmm10 | ||
3291 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
3292 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
3293 | pxor %xmm9,%xmm10 | ||
3294 | |||
3295 | # qhasm: xmm11 &= xmm9 | ||
3296 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
3297 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
3298 | pand %xmm12,%xmm8 | ||
3299 | |||
3300 | # qhasm: xmm14 = xmm8 | ||
3301 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
3302 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
3303 | movdqa %xmm14,%xmm11 | ||
3304 | |||
3305 | # qhasm: xmm14 ^= xmm11 | ||
3306 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
3307 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
3308 | pxor %xmm8,%xmm11 | ||
3309 | |||
3310 | # qhasm: xmm15 = xmm12 | ||
3311 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
3312 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
3313 | movdqa %xmm10,%xmm13 | ||
3314 | |||
3315 | # qhasm: xmm15 &= xmm14 | ||
3316 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
3317 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
3318 | pand %xmm11,%xmm13 | ||
3319 | |||
3320 | # qhasm: xmm15 ^= xmm10 | ||
3321 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
3322 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
3323 | pxor %xmm9,%xmm13 | ||
3324 | |||
3325 | # qhasm: xmm13 = xmm9 | ||
3326 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
3327 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
3328 | movdqa %xmm12,%xmm15 | ||
3329 | |||
3330 | # qhasm: xmm13 ^= xmm8 | ||
3331 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
3332 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
3333 | pxor %xmm14,%xmm15 | ||
3334 | |||
3335 | # qhasm: xmm11 ^= xmm10 | ||
3336 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
3337 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
3338 | pxor %xmm9,%xmm8 | ||
3339 | |||
3340 | # qhasm: xmm13 &= xmm11 | ||
3341 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
3342 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
3343 | pand %xmm8,%xmm15 | ||
3344 | |||
3345 | # qhasm: xmm13 ^= xmm8 | ||
3346 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
3347 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
3348 | pxor %xmm14,%xmm15 | ||
3349 | |||
3350 | # qhasm: xmm9 ^= xmm13 | ||
3351 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
3352 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
3353 | pxor %xmm15,%xmm12 | ||
3354 | |||
3355 | # qhasm: xmm10 = xmm14 | ||
3356 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
3357 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
3358 | movdqa %xmm11,%xmm8 | ||
3359 | |||
3360 | # qhasm: xmm10 ^= xmm13 | ||
3361 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
3362 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
3363 | pxor %xmm15,%xmm8 | ||
3364 | |||
3365 | # qhasm: xmm10 &= xmm8 | ||
3366 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
3367 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
3368 | pand %xmm14,%xmm8 | ||
3369 | |||
3370 | # qhasm: xmm9 ^= xmm10 | ||
3371 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
3372 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
3373 | pxor %xmm8,%xmm12 | ||
3374 | |||
3375 | # qhasm: xmm14 ^= xmm10 | ||
3376 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
3377 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
3378 | pxor %xmm8,%xmm11 | ||
3379 | |||
3380 | # qhasm: xmm14 &= xmm15 | ||
3381 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
3382 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
3383 | pand %xmm13,%xmm11 | ||
3384 | |||
3385 | # qhasm: xmm14 ^= xmm12 | ||
3386 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
3387 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
3388 | pxor %xmm10,%xmm11 | ||
3389 | |||
3390 | # qhasm: xmm12 = xmm6 | ||
3391 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
3392 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
3393 | movdqa %xmm6,%xmm8 | ||
3394 | |||
3395 | # qhasm: xmm8 = xmm5 | ||
3396 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
3397 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
3398 | movdqa %xmm5,%xmm9 | ||
3399 | |||
3400 | # qhasm: xmm10 = xmm15 | ||
3401 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
3402 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
3403 | movdqa %xmm13,%xmm10 | ||
3404 | |||
3405 | # qhasm: xmm10 ^= xmm14 | ||
3406 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
3407 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
3408 | pxor %xmm11,%xmm10 | ||
3409 | |||
3410 | # qhasm: xmm10 &= xmm6 | ||
3411 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
3412 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
3413 | pand %xmm6,%xmm10 | ||
3414 | |||
3415 | # qhasm: xmm6 ^= xmm5 | ||
3416 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
3417 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
3418 | pxor %xmm5,%xmm6 | ||
3419 | |||
3420 | # qhasm: xmm6 &= xmm14 | ||
3421 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
3422 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
3423 | pand %xmm11,%xmm6 | ||
3424 | |||
3425 | # qhasm: xmm5 &= xmm15 | ||
3426 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
3427 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
3428 | pand %xmm13,%xmm5 | ||
3429 | |||
3430 | # qhasm: xmm6 ^= xmm5 | ||
3431 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
3432 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
3433 | pxor %xmm5,%xmm6 | ||
3434 | |||
3435 | # qhasm: xmm5 ^= xmm10 | ||
3436 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
3437 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
3438 | pxor %xmm10,%xmm5 | ||
3439 | |||
3440 | # qhasm: xmm12 ^= xmm0 | ||
3441 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
3442 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
3443 | pxor %xmm0,%xmm8 | ||
3444 | |||
3445 | # qhasm: xmm8 ^= xmm3 | ||
3446 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
3447 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
3448 | pxor %xmm3,%xmm9 | ||
3449 | |||
3450 | # qhasm: xmm15 ^= xmm13 | ||
3451 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
3452 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
3453 | pxor %xmm15,%xmm13 | ||
3454 | |||
3455 | # qhasm: xmm14 ^= xmm9 | ||
3456 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
3457 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
3458 | pxor %xmm12,%xmm11 | ||
3459 | |||
3460 | # qhasm: xmm11 = xmm15 | ||
3461 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
3462 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
3463 | movdqa %xmm13,%xmm10 | ||
3464 | |||
3465 | # qhasm: xmm11 ^= xmm14 | ||
3466 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
3467 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
3468 | pxor %xmm11,%xmm10 | ||
3469 | |||
3470 | # qhasm: xmm11 &= xmm12 | ||
3471 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
3472 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
3473 | pand %xmm8,%xmm10 | ||
3474 | |||
3475 | # qhasm: xmm12 ^= xmm8 | ||
3476 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
3477 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
3478 | pxor %xmm9,%xmm8 | ||
3479 | |||
3480 | # qhasm: xmm12 &= xmm14 | ||
3481 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
3482 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
3483 | pand %xmm11,%xmm8 | ||
3484 | |||
3485 | # qhasm: xmm8 &= xmm15 | ||
3486 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
3487 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
3488 | pand %xmm13,%xmm9 | ||
3489 | |||
3490 | # qhasm: xmm8 ^= xmm12 | ||
3491 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
3492 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
3493 | pxor %xmm8,%xmm9 | ||
3494 | |||
3495 | # qhasm: xmm12 ^= xmm11 | ||
3496 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
3497 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
3498 | pxor %xmm10,%xmm8 | ||
3499 | |||
3500 | # qhasm: xmm10 = xmm13 | ||
3501 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
3502 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
3503 | movdqa %xmm15,%xmm10 | ||
3504 | |||
3505 | # qhasm: xmm10 ^= xmm9 | ||
3506 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
3507 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
3508 | pxor %xmm12,%xmm10 | ||
3509 | |||
3510 | # qhasm: xmm10 &= xmm0 | ||
3511 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
3512 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
3513 | pand %xmm0,%xmm10 | ||
3514 | |||
3515 | # qhasm: xmm0 ^= xmm3 | ||
3516 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
3517 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
3518 | pxor %xmm3,%xmm0 | ||
3519 | |||
3520 | # qhasm: xmm0 &= xmm9 | ||
3521 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
3522 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
3523 | pand %xmm12,%xmm0 | ||
3524 | |||
3525 | # qhasm: xmm3 &= xmm13 | ||
3526 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
3527 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
3528 | pand %xmm15,%xmm3 | ||
3529 | |||
3530 | # qhasm: xmm0 ^= xmm3 | ||
3531 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
3532 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
3533 | pxor %xmm3,%xmm0 | ||
3534 | |||
3535 | # qhasm: xmm3 ^= xmm10 | ||
3536 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
3537 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
3538 | pxor %xmm10,%xmm3 | ||
3539 | |||
3540 | # qhasm: xmm6 ^= xmm12 | ||
3541 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
3542 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
3543 | pxor %xmm8,%xmm6 | ||
3544 | |||
3545 | # qhasm: xmm0 ^= xmm12 | ||
3546 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
3547 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
3548 | pxor %xmm8,%xmm0 | ||
3549 | |||
3550 | # qhasm: xmm5 ^= xmm8 | ||
3551 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
3552 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
3553 | pxor %xmm9,%xmm5 | ||
3554 | |||
3555 | # qhasm: xmm3 ^= xmm8 | ||
3556 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
3557 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
3558 | pxor %xmm9,%xmm3 | ||
3559 | |||
3560 | # qhasm: xmm12 = xmm7 | ||
3561 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
3562 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
3563 | movdqa %xmm7,%xmm8 | ||
3564 | |||
3565 | # qhasm: xmm8 = xmm1 | ||
3566 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
3567 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
3568 | movdqa %xmm1,%xmm9 | ||
3569 | |||
3570 | # qhasm: xmm12 ^= xmm4 | ||
3571 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
3572 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
3573 | pxor %xmm4,%xmm8 | ||
3574 | |||
3575 | # qhasm: xmm8 ^= xmm2 | ||
3576 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
3577 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
3578 | pxor %xmm2,%xmm9 | ||
3579 | |||
3580 | # qhasm: xmm11 = xmm15 | ||
3581 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
3582 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
3583 | movdqa %xmm13,%xmm10 | ||
3584 | |||
3585 | # qhasm: xmm11 ^= xmm14 | ||
3586 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
3587 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
3588 | pxor %xmm11,%xmm10 | ||
3589 | |||
3590 | # qhasm: xmm11 &= xmm12 | ||
3591 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
3592 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
3593 | pand %xmm8,%xmm10 | ||
3594 | |||
3595 | # qhasm: xmm12 ^= xmm8 | ||
3596 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
3597 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
3598 | pxor %xmm9,%xmm8 | ||
3599 | |||
3600 | # qhasm: xmm12 &= xmm14 | ||
3601 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
3602 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
3603 | pand %xmm11,%xmm8 | ||
3604 | |||
3605 | # qhasm: xmm8 &= xmm15 | ||
3606 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
3607 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
3608 | pand %xmm13,%xmm9 | ||
3609 | |||
3610 | # qhasm: xmm8 ^= xmm12 | ||
3611 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
3612 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
3613 | pxor %xmm8,%xmm9 | ||
3614 | |||
3615 | # qhasm: xmm12 ^= xmm11 | ||
3616 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
3617 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
3618 | pxor %xmm10,%xmm8 | ||
3619 | |||
3620 | # qhasm: xmm10 = xmm13 | ||
3621 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
3622 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
3623 | movdqa %xmm15,%xmm10 | ||
3624 | |||
3625 | # qhasm: xmm10 ^= xmm9 | ||
3626 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
3627 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
3628 | pxor %xmm12,%xmm10 | ||
3629 | |||
3630 | # qhasm: xmm10 &= xmm4 | ||
3631 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
3632 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
3633 | pand %xmm4,%xmm10 | ||
3634 | |||
3635 | # qhasm: xmm4 ^= xmm2 | ||
3636 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
3637 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
3638 | pxor %xmm2,%xmm4 | ||
3639 | |||
3640 | # qhasm: xmm4 &= xmm9 | ||
3641 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
3642 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
3643 | pand %xmm12,%xmm4 | ||
3644 | |||
3645 | # qhasm: xmm2 &= xmm13 | ||
3646 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
3647 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
3648 | pand %xmm15,%xmm2 | ||
3649 | |||
3650 | # qhasm: xmm4 ^= xmm2 | ||
3651 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
3652 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
3653 | pxor %xmm2,%xmm4 | ||
3654 | |||
3655 | # qhasm: xmm2 ^= xmm10 | ||
3656 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
3657 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
3658 | pxor %xmm10,%xmm2 | ||
3659 | |||
3660 | # qhasm: xmm15 ^= xmm13 | ||
3661 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
3662 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
3663 | pxor %xmm15,%xmm13 | ||
3664 | |||
3665 | # qhasm: xmm14 ^= xmm9 | ||
3666 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
3667 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
3668 | pxor %xmm12,%xmm11 | ||
3669 | |||
3670 | # qhasm: xmm11 = xmm15 | ||
3671 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
3672 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
3673 | movdqa %xmm13,%xmm10 | ||
3674 | |||
3675 | # qhasm: xmm11 ^= xmm14 | ||
3676 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
3677 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
3678 | pxor %xmm11,%xmm10 | ||
3679 | |||
3680 | # qhasm: xmm11 &= xmm7 | ||
3681 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
3682 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
3683 | pand %xmm7,%xmm10 | ||
3684 | |||
3685 | # qhasm: xmm7 ^= xmm1 | ||
3686 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
3687 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
3688 | pxor %xmm1,%xmm7 | ||
3689 | |||
3690 | # qhasm: xmm7 &= xmm14 | ||
3691 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
3692 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
3693 | pand %xmm11,%xmm7 | ||
3694 | |||
3695 | # qhasm: xmm1 &= xmm15 | ||
3696 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
3697 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
3698 | pand %xmm13,%xmm1 | ||
3699 | |||
3700 | # qhasm: xmm7 ^= xmm1 | ||
3701 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
3702 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
3703 | pxor %xmm1,%xmm7 | ||
3704 | |||
3705 | # qhasm: xmm1 ^= xmm11 | ||
3706 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
3707 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
3708 | pxor %xmm10,%xmm1 | ||
3709 | |||
3710 | # qhasm: xmm7 ^= xmm12 | ||
3711 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
3712 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
3713 | pxor %xmm8,%xmm7 | ||
3714 | |||
3715 | # qhasm: xmm4 ^= xmm12 | ||
3716 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
3717 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
3718 | pxor %xmm8,%xmm4 | ||
3719 | |||
3720 | # qhasm: xmm1 ^= xmm8 | ||
3721 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
3722 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
3723 | pxor %xmm9,%xmm1 | ||
3724 | |||
3725 | # qhasm: xmm2 ^= xmm8 | ||
3726 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
3727 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
3728 | pxor %xmm9,%xmm2 | ||
3729 | |||
3730 | # qhasm: xmm7 ^= xmm0 | ||
3731 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
3732 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
3733 | pxor %xmm0,%xmm7 | ||
3734 | |||
3735 | # qhasm: xmm1 ^= xmm6 | ||
3736 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
3737 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
3738 | pxor %xmm6,%xmm1 | ||
3739 | |||
3740 | # qhasm: xmm4 ^= xmm7 | ||
3741 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
3742 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
3743 | pxor %xmm7,%xmm4 | ||
3744 | |||
3745 | # qhasm: xmm6 ^= xmm0 | ||
3746 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
3747 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
3748 | pxor %xmm0,%xmm6 | ||
3749 | |||
3750 | # qhasm: xmm0 ^= xmm1 | ||
3751 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
3752 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
3753 | pxor %xmm1,%xmm0 | ||
3754 | |||
3755 | # qhasm: xmm1 ^= xmm5 | ||
3756 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
3757 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
3758 | pxor %xmm5,%xmm1 | ||
3759 | |||
3760 | # qhasm: xmm5 ^= xmm2 | ||
3761 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
3762 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
3763 | pxor %xmm2,%xmm5 | ||
3764 | |||
3765 | # qhasm: xmm4 ^= xmm5 | ||
3766 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
3767 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
3768 | pxor %xmm5,%xmm4 | ||
3769 | |||
3770 | # qhasm: xmm2 ^= xmm3 | ||
3771 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
3772 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
3773 | pxor %xmm3,%xmm2 | ||
3774 | |||
3775 | # qhasm: xmm3 ^= xmm5 | ||
3776 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
3777 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
3778 | pxor %xmm5,%xmm3 | ||
3779 | |||
3780 | # qhasm: xmm6 ^= xmm3 | ||
3781 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
3782 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
3783 | pxor %xmm3,%xmm6 | ||
3784 | |||
3785 | # qhasm: xmm8 = shuffle dwords of xmm0 by 0x93 | ||
3786 | # asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9 | ||
3787 | # asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8 | ||
3788 | pshufd $0x93,%xmm0,%xmm8 | ||
3789 | |||
3790 | # qhasm: xmm9 = shuffle dwords of xmm1 by 0x93 | ||
3791 | # asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10 | ||
3792 | # asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9 | ||
3793 | pshufd $0x93,%xmm1,%xmm9 | ||
3794 | |||
3795 | # qhasm: xmm10 = shuffle dwords of xmm4 by 0x93 | ||
3796 | # asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11 | ||
3797 | # asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10 | ||
3798 | pshufd $0x93,%xmm4,%xmm10 | ||
3799 | |||
3800 | # qhasm: xmm11 = shuffle dwords of xmm6 by 0x93 | ||
3801 | # asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12 | ||
3802 | # asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11 | ||
3803 | pshufd $0x93,%xmm6,%xmm11 | ||
3804 | |||
3805 | # qhasm: xmm12 = shuffle dwords of xmm3 by 0x93 | ||
3806 | # asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13 | ||
3807 | # asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12 | ||
3808 | pshufd $0x93,%xmm3,%xmm12 | ||
3809 | |||
3810 | # qhasm: xmm13 = shuffle dwords of xmm7 by 0x93 | ||
3811 | # asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14 | ||
3812 | # asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13 | ||
3813 | pshufd $0x93,%xmm7,%xmm13 | ||
3814 | |||
3815 | # qhasm: xmm14 = shuffle dwords of xmm2 by 0x93 | ||
3816 | # asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15 | ||
3817 | # asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14 | ||
3818 | pshufd $0x93,%xmm2,%xmm14 | ||
3819 | |||
3820 | # qhasm: xmm15 = shuffle dwords of xmm5 by 0x93 | ||
3821 | # asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16 | ||
3822 | # asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15 | ||
3823 | pshufd $0x93,%xmm5,%xmm15 | ||
3824 | |||
3825 | # qhasm: xmm0 ^= xmm8 | ||
3826 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
3827 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
3828 | pxor %xmm8,%xmm0 | ||
3829 | |||
3830 | # qhasm: xmm1 ^= xmm9 | ||
3831 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
3832 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
3833 | pxor %xmm9,%xmm1 | ||
3834 | |||
3835 | # qhasm: xmm4 ^= xmm10 | ||
3836 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
3837 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
3838 | pxor %xmm10,%xmm4 | ||
3839 | |||
3840 | # qhasm: xmm6 ^= xmm11 | ||
3841 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
3842 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
3843 | pxor %xmm11,%xmm6 | ||
3844 | |||
3845 | # qhasm: xmm3 ^= xmm12 | ||
3846 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
3847 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
3848 | pxor %xmm12,%xmm3 | ||
3849 | |||
3850 | # qhasm: xmm7 ^= xmm13 | ||
3851 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
3852 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
3853 | pxor %xmm13,%xmm7 | ||
3854 | |||
3855 | # qhasm: xmm2 ^= xmm14 | ||
3856 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
3857 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
3858 | pxor %xmm14,%xmm2 | ||
3859 | |||
3860 | # qhasm: xmm5 ^= xmm15 | ||
3861 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
3862 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
3863 | pxor %xmm15,%xmm5 | ||
3864 | |||
3865 | # qhasm: xmm8 ^= xmm5 | ||
3866 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
3867 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
3868 | pxor %xmm5,%xmm8 | ||
3869 | |||
3870 | # qhasm: xmm9 ^= xmm0 | ||
3871 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
3872 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
3873 | pxor %xmm0,%xmm9 | ||
3874 | |||
3875 | # qhasm: xmm10 ^= xmm1 | ||
3876 | # asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11 | ||
3877 | # asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10 | ||
3878 | pxor %xmm1,%xmm10 | ||
3879 | |||
3880 | # qhasm: xmm9 ^= xmm5 | ||
3881 | # asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10 | ||
3882 | # asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9 | ||
3883 | pxor %xmm5,%xmm9 | ||
3884 | |||
3885 | # qhasm: xmm11 ^= xmm4 | ||
3886 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
3887 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
3888 | pxor %xmm4,%xmm11 | ||
3889 | |||
3890 | # qhasm: xmm12 ^= xmm6 | ||
3891 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13 | ||
3892 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12 | ||
3893 | pxor %xmm6,%xmm12 | ||
3894 | |||
3895 | # qhasm: xmm13 ^= xmm3 | ||
3896 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14 | ||
3897 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13 | ||
3898 | pxor %xmm3,%xmm13 | ||
3899 | |||
3900 | # qhasm: xmm11 ^= xmm5 | ||
3901 | # asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12 | ||
3902 | # asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11 | ||
3903 | pxor %xmm5,%xmm11 | ||
3904 | |||
3905 | # qhasm: xmm14 ^= xmm7 | ||
3906 | # asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15 | ||
3907 | # asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14 | ||
3908 | pxor %xmm7,%xmm14 | ||
3909 | |||
3910 | # qhasm: xmm15 ^= xmm2 | ||
3911 | # asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16 | ||
3912 | # asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15 | ||
3913 | pxor %xmm2,%xmm15 | ||
3914 | |||
3915 | # qhasm: xmm12 ^= xmm5 | ||
3916 | # asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13 | ||
3917 | # asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12 | ||
3918 | pxor %xmm5,%xmm12 | ||
3919 | |||
3920 | # qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E | ||
3921 | # asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1 | ||
3922 | # asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0 | ||
3923 | pshufd $0x4E,%xmm0,%xmm0 | ||
3924 | |||
3925 | # qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E | ||
3926 | # asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2 | ||
3927 | # asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1 | ||
3928 | pshufd $0x4E,%xmm1,%xmm1 | ||
3929 | |||
3930 | # qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E | ||
3931 | # asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5 | ||
3932 | # asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4 | ||
3933 | pshufd $0x4E,%xmm4,%xmm4 | ||
3934 | |||
3935 | # qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E | ||
3936 | # asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7 | ||
3937 | # asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6 | ||
3938 | pshufd $0x4E,%xmm6,%xmm6 | ||
3939 | |||
3940 | # qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E | ||
3941 | # asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4 | ||
3942 | # asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3 | ||
3943 | pshufd $0x4E,%xmm3,%xmm3 | ||
3944 | |||
3945 | # qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E | ||
3946 | # asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8 | ||
3947 | # asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7 | ||
3948 | pshufd $0x4E,%xmm7,%xmm7 | ||
3949 | |||
3950 | # qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E | ||
3951 | # asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3 | ||
3952 | # asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2 | ||
3953 | pshufd $0x4E,%xmm2,%xmm2 | ||
3954 | |||
3955 | # qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E | ||
3956 | # asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6 | ||
3957 | # asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5 | ||
3958 | pshufd $0x4E,%xmm5,%xmm5 | ||
3959 | |||
3960 | # qhasm: xmm8 ^= xmm0 | ||
3961 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
3962 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
3963 | pxor %xmm0,%xmm8 | ||
3964 | |||
3965 | # qhasm: xmm9 ^= xmm1 | ||
3966 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
3967 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
3968 | pxor %xmm1,%xmm9 | ||
3969 | |||
3970 | # qhasm: xmm10 ^= xmm4 | ||
3971 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11 | ||
3972 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10 | ||
3973 | pxor %xmm4,%xmm10 | ||
3974 | |||
3975 | # qhasm: xmm11 ^= xmm6 | ||
3976 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12 | ||
3977 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11 | ||
3978 | pxor %xmm6,%xmm11 | ||
3979 | |||
3980 | # qhasm: xmm12 ^= xmm3 | ||
3981 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13 | ||
3982 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12 | ||
3983 | pxor %xmm3,%xmm12 | ||
3984 | |||
3985 | # qhasm: xmm13 ^= xmm7 | ||
3986 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
3987 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
3988 | pxor %xmm7,%xmm13 | ||
3989 | |||
3990 | # qhasm: xmm14 ^= xmm2 | ||
3991 | # asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15 | ||
3992 | # asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14 | ||
3993 | pxor %xmm2,%xmm14 | ||
3994 | |||
3995 | # qhasm: xmm15 ^= xmm5 | ||
3996 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
3997 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
3998 | pxor %xmm5,%xmm15 | ||
3999 | |||
4000 | # qhasm: xmm8 ^= *(int128 *)(c + 384) | ||
4001 | # asm 1: pxor 384(<c=int64#4),<xmm8=int6464#9 | ||
4002 | # asm 2: pxor 384(<c=%rcx),<xmm8=%xmm8 | ||
4003 | pxor 384(%rcx),%xmm8 | ||
4004 | |||
4005 | # qhasm: shuffle bytes of xmm8 by SR | ||
4006 | # asm 1: pshufb SR,<xmm8=int6464#9 | ||
4007 | # asm 2: pshufb SR,<xmm8=%xmm8 | ||
4008 | pshufb SR,%xmm8 | ||
4009 | |||
4010 | # qhasm: xmm9 ^= *(int128 *)(c + 400) | ||
4011 | # asm 1: pxor 400(<c=int64#4),<xmm9=int6464#10 | ||
4012 | # asm 2: pxor 400(<c=%rcx),<xmm9=%xmm9 | ||
4013 | pxor 400(%rcx),%xmm9 | ||
4014 | |||
4015 | # qhasm: shuffle bytes of xmm9 by SR | ||
4016 | # asm 1: pshufb SR,<xmm9=int6464#10 | ||
4017 | # asm 2: pshufb SR,<xmm9=%xmm9 | ||
4018 | pshufb SR,%xmm9 | ||
4019 | |||
4020 | # qhasm: xmm10 ^= *(int128 *)(c + 416) | ||
4021 | # asm 1: pxor 416(<c=int64#4),<xmm10=int6464#11 | ||
4022 | # asm 2: pxor 416(<c=%rcx),<xmm10=%xmm10 | ||
4023 | pxor 416(%rcx),%xmm10 | ||
4024 | |||
4025 | # qhasm: shuffle bytes of xmm10 by SR | ||
4026 | # asm 1: pshufb SR,<xmm10=int6464#11 | ||
4027 | # asm 2: pshufb SR,<xmm10=%xmm10 | ||
4028 | pshufb SR,%xmm10 | ||
4029 | |||
4030 | # qhasm: xmm11 ^= *(int128 *)(c + 432) | ||
4031 | # asm 1: pxor 432(<c=int64#4),<xmm11=int6464#12 | ||
4032 | # asm 2: pxor 432(<c=%rcx),<xmm11=%xmm11 | ||
4033 | pxor 432(%rcx),%xmm11 | ||
4034 | |||
4035 | # qhasm: shuffle bytes of xmm11 by SR | ||
4036 | # asm 1: pshufb SR,<xmm11=int6464#12 | ||
4037 | # asm 2: pshufb SR,<xmm11=%xmm11 | ||
4038 | pshufb SR,%xmm11 | ||
4039 | |||
4040 | # qhasm: xmm12 ^= *(int128 *)(c + 448) | ||
4041 | # asm 1: pxor 448(<c=int64#4),<xmm12=int6464#13 | ||
4042 | # asm 2: pxor 448(<c=%rcx),<xmm12=%xmm12 | ||
4043 | pxor 448(%rcx),%xmm12 | ||
4044 | |||
4045 | # qhasm: shuffle bytes of xmm12 by SR | ||
4046 | # asm 1: pshufb SR,<xmm12=int6464#13 | ||
4047 | # asm 2: pshufb SR,<xmm12=%xmm12 | ||
4048 | pshufb SR,%xmm12 | ||
4049 | |||
4050 | # qhasm: xmm13 ^= *(int128 *)(c + 464) | ||
4051 | # asm 1: pxor 464(<c=int64#4),<xmm13=int6464#14 | ||
4052 | # asm 2: pxor 464(<c=%rcx),<xmm13=%xmm13 | ||
4053 | pxor 464(%rcx),%xmm13 | ||
4054 | |||
4055 | # qhasm: shuffle bytes of xmm13 by SR | ||
4056 | # asm 1: pshufb SR,<xmm13=int6464#14 | ||
4057 | # asm 2: pshufb SR,<xmm13=%xmm13 | ||
4058 | pshufb SR,%xmm13 | ||
4059 | |||
4060 | # qhasm: xmm14 ^= *(int128 *)(c + 480) | ||
4061 | # asm 1: pxor 480(<c=int64#4),<xmm14=int6464#15 | ||
4062 | # asm 2: pxor 480(<c=%rcx),<xmm14=%xmm14 | ||
4063 | pxor 480(%rcx),%xmm14 | ||
4064 | |||
4065 | # qhasm: shuffle bytes of xmm14 by SR | ||
4066 | # asm 1: pshufb SR,<xmm14=int6464#15 | ||
4067 | # asm 2: pshufb SR,<xmm14=%xmm14 | ||
4068 | pshufb SR,%xmm14 | ||
4069 | |||
4070 | # qhasm: xmm15 ^= *(int128 *)(c + 496) | ||
4071 | # asm 1: pxor 496(<c=int64#4),<xmm15=int6464#16 | ||
4072 | # asm 2: pxor 496(<c=%rcx),<xmm15=%xmm15 | ||
4073 | pxor 496(%rcx),%xmm15 | ||
4074 | |||
4075 | # qhasm: shuffle bytes of xmm15 by SR | ||
4076 | # asm 1: pshufb SR,<xmm15=int6464#16 | ||
4077 | # asm 2: pshufb SR,<xmm15=%xmm15 | ||
4078 | pshufb SR,%xmm15 | ||
4079 | |||
4080 | # qhasm: xmm13 ^= xmm14 | ||
4081 | # asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14 | ||
4082 | # asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13 | ||
4083 | pxor %xmm14,%xmm13 | ||
4084 | |||
4085 | # qhasm: xmm10 ^= xmm9 | ||
4086 | # asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11 | ||
4087 | # asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10 | ||
4088 | pxor %xmm9,%xmm10 | ||
4089 | |||
4090 | # qhasm: xmm13 ^= xmm8 | ||
4091 | # asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14 | ||
4092 | # asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13 | ||
4093 | pxor %xmm8,%xmm13 | ||
4094 | |||
4095 | # qhasm: xmm14 ^= xmm10 | ||
4096 | # asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15 | ||
4097 | # asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14 | ||
4098 | pxor %xmm10,%xmm14 | ||
4099 | |||
4100 | # qhasm: xmm11 ^= xmm8 | ||
4101 | # asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12 | ||
4102 | # asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11 | ||
4103 | pxor %xmm8,%xmm11 | ||
4104 | |||
4105 | # qhasm: xmm14 ^= xmm11 | ||
4106 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
4107 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
4108 | pxor %xmm11,%xmm14 | ||
4109 | |||
4110 | # qhasm: xmm11 ^= xmm15 | ||
4111 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12 | ||
4112 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11 | ||
4113 | pxor %xmm15,%xmm11 | ||
4114 | |||
4115 | # qhasm: xmm11 ^= xmm12 | ||
4116 | # asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12 | ||
4117 | # asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11 | ||
4118 | pxor %xmm12,%xmm11 | ||
4119 | |||
4120 | # qhasm: xmm15 ^= xmm13 | ||
4121 | # asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16 | ||
4122 | # asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15 | ||
4123 | pxor %xmm13,%xmm15 | ||
4124 | |||
4125 | # qhasm: xmm11 ^= xmm9 | ||
4126 | # asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12 | ||
4127 | # asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11 | ||
4128 | pxor %xmm9,%xmm11 | ||
4129 | |||
4130 | # qhasm: xmm12 ^= xmm13 | ||
4131 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
4132 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
4133 | pxor %xmm13,%xmm12 | ||
4134 | |||
4135 | # qhasm: xmm10 ^= xmm15 | ||
4136 | # asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11 | ||
4137 | # asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10 | ||
4138 | pxor %xmm15,%xmm10 | ||
4139 | |||
4140 | # qhasm: xmm9 ^= xmm13 | ||
4141 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
4142 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
4143 | pxor %xmm13,%xmm9 | ||
4144 | |||
4145 | # qhasm: xmm3 = xmm15 | ||
4146 | # asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1 | ||
4147 | # asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0 | ||
4148 | movdqa %xmm15,%xmm0 | ||
4149 | |||
4150 | # qhasm: xmm2 = xmm9 | ||
4151 | # asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2 | ||
4152 | # asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1 | ||
4153 | movdqa %xmm9,%xmm1 | ||
4154 | |||
4155 | # qhasm: xmm1 = xmm13 | ||
4156 | # asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3 | ||
4157 | # asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2 | ||
4158 | movdqa %xmm13,%xmm2 | ||
4159 | |||
4160 | # qhasm: xmm5 = xmm10 | ||
4161 | # asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4 | ||
4162 | # asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3 | ||
4163 | movdqa %xmm10,%xmm3 | ||
4164 | |||
4165 | # qhasm: xmm4 = xmm14 | ||
4166 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5 | ||
4167 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4 | ||
4168 | movdqa %xmm14,%xmm4 | ||
4169 | |||
4170 | # qhasm: xmm3 ^= xmm12 | ||
4171 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1 | ||
4172 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0 | ||
4173 | pxor %xmm12,%xmm0 | ||
4174 | |||
4175 | # qhasm: xmm2 ^= xmm10 | ||
4176 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2 | ||
4177 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1 | ||
4178 | pxor %xmm10,%xmm1 | ||
4179 | |||
4180 | # qhasm: xmm1 ^= xmm11 | ||
4181 | # asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3 | ||
4182 | # asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2 | ||
4183 | pxor %xmm11,%xmm2 | ||
4184 | |||
4185 | # qhasm: xmm5 ^= xmm12 | ||
4186 | # asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4 | ||
4187 | # asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3 | ||
4188 | pxor %xmm12,%xmm3 | ||
4189 | |||
4190 | # qhasm: xmm4 ^= xmm8 | ||
4191 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
4192 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
4193 | pxor %xmm8,%xmm4 | ||
4194 | |||
4195 | # qhasm: xmm6 = xmm3 | ||
4196 | # asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6 | ||
4197 | # asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5 | ||
4198 | movdqa %xmm0,%xmm5 | ||
4199 | |||
4200 | # qhasm: xmm0 = xmm2 | ||
4201 | # asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7 | ||
4202 | # asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6 | ||
4203 | movdqa %xmm1,%xmm6 | ||
4204 | |||
4205 | # qhasm: xmm7 = xmm3 | ||
4206 | # asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8 | ||
4207 | # asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7 | ||
4208 | movdqa %xmm0,%xmm7 | ||
4209 | |||
4210 | # qhasm: xmm2 |= xmm1 | ||
4211 | # asm 1: por <xmm1=int6464#3,<xmm2=int6464#2 | ||
4212 | # asm 2: por <xmm1=%xmm2,<xmm2=%xmm1 | ||
4213 | por %xmm2,%xmm1 | ||
4214 | |||
4215 | # qhasm: xmm3 |= xmm4 | ||
4216 | # asm 1: por <xmm4=int6464#5,<xmm3=int6464#1 | ||
4217 | # asm 2: por <xmm4=%xmm4,<xmm3=%xmm0 | ||
4218 | por %xmm4,%xmm0 | ||
4219 | |||
4220 | # qhasm: xmm7 ^= xmm0 | ||
4221 | # asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8 | ||
4222 | # asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7 | ||
4223 | pxor %xmm6,%xmm7 | ||
4224 | |||
4225 | # qhasm: xmm6 &= xmm4 | ||
4226 | # asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6 | ||
4227 | # asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5 | ||
4228 | pand %xmm4,%xmm5 | ||
4229 | |||
4230 | # qhasm: xmm0 &= xmm1 | ||
4231 | # asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7 | ||
4232 | # asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6 | ||
4233 | pand %xmm2,%xmm6 | ||
4234 | |||
4235 | # qhasm: xmm4 ^= xmm1 | ||
4236 | # asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5 | ||
4237 | # asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4 | ||
4238 | pxor %xmm2,%xmm4 | ||
4239 | |||
4240 | # qhasm: xmm7 &= xmm4 | ||
4241 | # asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8 | ||
4242 | # asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7 | ||
4243 | pand %xmm4,%xmm7 | ||
4244 | |||
4245 | # qhasm: xmm4 = xmm11 | ||
4246 | # asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3 | ||
4247 | # asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2 | ||
4248 | movdqa %xmm11,%xmm2 | ||
4249 | |||
4250 | # qhasm: xmm4 ^= xmm8 | ||
4251 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3 | ||
4252 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2 | ||
4253 | pxor %xmm8,%xmm2 | ||
4254 | |||
4255 | # qhasm: xmm5 &= xmm4 | ||
4256 | # asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4 | ||
4257 | # asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3 | ||
4258 | pand %xmm2,%xmm3 | ||
4259 | |||
4260 | # qhasm: xmm3 ^= xmm5 | ||
4261 | # asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1 | ||
4262 | # asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0 | ||
4263 | pxor %xmm3,%xmm0 | ||
4264 | |||
4265 | # qhasm: xmm2 ^= xmm5 | ||
4266 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
4267 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
4268 | pxor %xmm3,%xmm1 | ||
4269 | |||
4270 | # qhasm: xmm5 = xmm15 | ||
4271 | # asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3 | ||
4272 | # asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2 | ||
4273 | movdqa %xmm15,%xmm2 | ||
4274 | |||
4275 | # qhasm: xmm5 ^= xmm9 | ||
4276 | # asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3 | ||
4277 | # asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2 | ||
4278 | pxor %xmm9,%xmm2 | ||
4279 | |||
4280 | # qhasm: xmm4 = xmm13 | ||
4281 | # asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4 | ||
4282 | # asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3 | ||
4283 | movdqa %xmm13,%xmm3 | ||
4284 | |||
4285 | # qhasm: xmm1 = xmm5 | ||
4286 | # asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5 | ||
4287 | # asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4 | ||
4288 | movdqa %xmm2,%xmm4 | ||
4289 | |||
4290 | # qhasm: xmm4 ^= xmm14 | ||
4291 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4 | ||
4292 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3 | ||
4293 | pxor %xmm14,%xmm3 | ||
4294 | |||
4295 | # qhasm: xmm1 |= xmm4 | ||
4296 | # asm 1: por <xmm4=int6464#4,<xmm1=int6464#5 | ||
4297 | # asm 2: por <xmm4=%xmm3,<xmm1=%xmm4 | ||
4298 | por %xmm3,%xmm4 | ||
4299 | |||
4300 | # qhasm: xmm5 &= xmm4 | ||
4301 | # asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3 | ||
4302 | # asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2 | ||
4303 | pand %xmm3,%xmm2 | ||
4304 | |||
4305 | # qhasm: xmm0 ^= xmm5 | ||
4306 | # asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7 | ||
4307 | # asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6 | ||
4308 | pxor %xmm2,%xmm6 | ||
4309 | |||
4310 | # qhasm: xmm3 ^= xmm7 | ||
4311 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1 | ||
4312 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0 | ||
4313 | pxor %xmm7,%xmm0 | ||
4314 | |||
4315 | # qhasm: xmm2 ^= xmm6 | ||
4316 | # asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2 | ||
4317 | # asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1 | ||
4318 | pxor %xmm5,%xmm1 | ||
4319 | |||
4320 | # qhasm: xmm1 ^= xmm7 | ||
4321 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5 | ||
4322 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4 | ||
4323 | pxor %xmm7,%xmm4 | ||
4324 | |||
4325 | # qhasm: xmm0 ^= xmm6 | ||
4326 | # asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7 | ||
4327 | # asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6 | ||
4328 | pxor %xmm5,%xmm6 | ||
4329 | |||
4330 | # qhasm: xmm1 ^= xmm6 | ||
4331 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
4332 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
4333 | pxor %xmm5,%xmm4 | ||
4334 | |||
4335 | # qhasm: xmm4 = xmm10 | ||
4336 | # asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3 | ||
4337 | # asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2 | ||
4338 | movdqa %xmm10,%xmm2 | ||
4339 | |||
4340 | # qhasm: xmm5 = xmm12 | ||
4341 | # asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4 | ||
4342 | # asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3 | ||
4343 | movdqa %xmm12,%xmm3 | ||
4344 | |||
4345 | # qhasm: xmm6 = xmm9 | ||
4346 | # asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6 | ||
4347 | # asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5 | ||
4348 | movdqa %xmm9,%xmm5 | ||
4349 | |||
4350 | # qhasm: xmm7 = xmm15 | ||
4351 | # asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8 | ||
4352 | # asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7 | ||
4353 | movdqa %xmm15,%xmm7 | ||
4354 | |||
4355 | # qhasm: xmm4 &= xmm11 | ||
4356 | # asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3 | ||
4357 | # asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2 | ||
4358 | pand %xmm11,%xmm2 | ||
4359 | |||
4360 | # qhasm: xmm5 &= xmm8 | ||
4361 | # asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4 | ||
4362 | # asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3 | ||
4363 | pand %xmm8,%xmm3 | ||
4364 | |||
4365 | # qhasm: xmm6 &= xmm13 | ||
4366 | # asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6 | ||
4367 | # asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5 | ||
4368 | pand %xmm13,%xmm5 | ||
4369 | |||
4370 | # qhasm: xmm7 |= xmm14 | ||
4371 | # asm 1: por <xmm14=int6464#15,<xmm7=int6464#8 | ||
4372 | # asm 2: por <xmm14=%xmm14,<xmm7=%xmm7 | ||
4373 | por %xmm14,%xmm7 | ||
4374 | |||
4375 | # qhasm: xmm3 ^= xmm4 | ||
4376 | # asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1 | ||
4377 | # asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0 | ||
4378 | pxor %xmm2,%xmm0 | ||
4379 | |||
4380 | # qhasm: xmm2 ^= xmm5 | ||
4381 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
4382 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
4383 | pxor %xmm3,%xmm1 | ||
4384 | |||
4385 | # qhasm: xmm1 ^= xmm6 | ||
4386 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
4387 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
4388 | pxor %xmm5,%xmm4 | ||
4389 | |||
4390 | # qhasm: xmm0 ^= xmm7 | ||
4391 | # asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7 | ||
4392 | # asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6 | ||
4393 | pxor %xmm7,%xmm6 | ||
4394 | |||
4395 | # qhasm: xmm4 = xmm3 | ||
4396 | # asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3 | ||
4397 | # asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2 | ||
4398 | movdqa %xmm0,%xmm2 | ||
4399 | |||
4400 | # qhasm: xmm4 ^= xmm2 | ||
4401 | # asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3 | ||
4402 | # asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2 | ||
4403 | pxor %xmm1,%xmm2 | ||
4404 | |||
4405 | # qhasm: xmm3 &= xmm1 | ||
4406 | # asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1 | ||
4407 | # asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0 | ||
4408 | pand %xmm4,%xmm0 | ||
4409 | |||
4410 | # qhasm: xmm6 = xmm0 | ||
4411 | # asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4 | ||
4412 | # asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3 | ||
4413 | movdqa %xmm6,%xmm3 | ||
4414 | |||
4415 | # qhasm: xmm6 ^= xmm3 | ||
4416 | # asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4 | ||
4417 | # asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3 | ||
4418 | pxor %xmm0,%xmm3 | ||
4419 | |||
4420 | # qhasm: xmm7 = xmm4 | ||
4421 | # asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6 | ||
4422 | # asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5 | ||
4423 | movdqa %xmm2,%xmm5 | ||
4424 | |||
4425 | # qhasm: xmm7 &= xmm6 | ||
4426 | # asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6 | ||
4427 | # asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5 | ||
4428 | pand %xmm3,%xmm5 | ||
4429 | |||
4430 | # qhasm: xmm7 ^= xmm2 | ||
4431 | # asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6 | ||
4432 | # asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5 | ||
4433 | pxor %xmm1,%xmm5 | ||
4434 | |||
4435 | # qhasm: xmm5 = xmm1 | ||
4436 | # asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8 | ||
4437 | # asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7 | ||
4438 | movdqa %xmm4,%xmm7 | ||
4439 | |||
4440 | # qhasm: xmm5 ^= xmm0 | ||
4441 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
4442 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
4443 | pxor %xmm6,%xmm7 | ||
4444 | |||
4445 | # qhasm: xmm3 ^= xmm2 | ||
4446 | # asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1 | ||
4447 | # asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0 | ||
4448 | pxor %xmm1,%xmm0 | ||
4449 | |||
4450 | # qhasm: xmm5 &= xmm3 | ||
4451 | # asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8 | ||
4452 | # asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7 | ||
4453 | pand %xmm0,%xmm7 | ||
4454 | |||
4455 | # qhasm: xmm5 ^= xmm0 | ||
4456 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
4457 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
4458 | pxor %xmm6,%xmm7 | ||
4459 | |||
4460 | # qhasm: xmm1 ^= xmm5 | ||
4461 | # asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5 | ||
4462 | # asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4 | ||
4463 | pxor %xmm7,%xmm4 | ||
4464 | |||
4465 | # qhasm: xmm2 = xmm6 | ||
4466 | # asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1 | ||
4467 | # asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0 | ||
4468 | movdqa %xmm3,%xmm0 | ||
4469 | |||
4470 | # qhasm: xmm2 ^= xmm5 | ||
4471 | # asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1 | ||
4472 | # asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0 | ||
4473 | pxor %xmm7,%xmm0 | ||
4474 | |||
4475 | # qhasm: xmm2 &= xmm0 | ||
4476 | # asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1 | ||
4477 | # asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0 | ||
4478 | pand %xmm6,%xmm0 | ||
4479 | |||
4480 | # qhasm: xmm1 ^= xmm2 | ||
4481 | # asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5 | ||
4482 | # asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4 | ||
4483 | pxor %xmm0,%xmm4 | ||
4484 | |||
4485 | # qhasm: xmm6 ^= xmm2 | ||
4486 | # asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4 | ||
4487 | # asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3 | ||
4488 | pxor %xmm0,%xmm3 | ||
4489 | |||
4490 | # qhasm: xmm6 &= xmm7 | ||
4491 | # asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4 | ||
4492 | # asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3 | ||
4493 | pand %xmm5,%xmm3 | ||
4494 | |||
4495 | # qhasm: xmm6 ^= xmm4 | ||
4496 | # asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4 | ||
4497 | # asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3 | ||
4498 | pxor %xmm2,%xmm3 | ||
4499 | |||
4500 | # qhasm: xmm4 = xmm14 | ||
4501 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1 | ||
4502 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0 | ||
4503 | movdqa %xmm14,%xmm0 | ||
4504 | |||
4505 | # qhasm: xmm0 = xmm13 | ||
4506 | # asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2 | ||
4507 | # asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1 | ||
4508 | movdqa %xmm13,%xmm1 | ||
4509 | |||
4510 | # qhasm: xmm2 = xmm7 | ||
4511 | # asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3 | ||
4512 | # asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2 | ||
4513 | movdqa %xmm5,%xmm2 | ||
4514 | |||
4515 | # qhasm: xmm2 ^= xmm6 | ||
4516 | # asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3 | ||
4517 | # asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2 | ||
4518 | pxor %xmm3,%xmm2 | ||
4519 | |||
4520 | # qhasm: xmm2 &= xmm14 | ||
4521 | # asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3 | ||
4522 | # asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2 | ||
4523 | pand %xmm14,%xmm2 | ||
4524 | |||
4525 | # qhasm: xmm14 ^= xmm13 | ||
4526 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
4527 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
4528 | pxor %xmm13,%xmm14 | ||
4529 | |||
4530 | # qhasm: xmm14 &= xmm6 | ||
4531 | # asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15 | ||
4532 | # asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14 | ||
4533 | pand %xmm3,%xmm14 | ||
4534 | |||
4535 | # qhasm: xmm13 &= xmm7 | ||
4536 | # asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14 | ||
4537 | # asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13 | ||
4538 | pand %xmm5,%xmm13 | ||
4539 | |||
4540 | # qhasm: xmm14 ^= xmm13 | ||
4541 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
4542 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
4543 | pxor %xmm13,%xmm14 | ||
4544 | |||
4545 | # qhasm: xmm13 ^= xmm2 | ||
4546 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14 | ||
4547 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13 | ||
4548 | pxor %xmm2,%xmm13 | ||
4549 | |||
4550 | # qhasm: xmm4 ^= xmm8 | ||
4551 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1 | ||
4552 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0 | ||
4553 | pxor %xmm8,%xmm0 | ||
4554 | |||
4555 | # qhasm: xmm0 ^= xmm11 | ||
4556 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2 | ||
4557 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1 | ||
4558 | pxor %xmm11,%xmm1 | ||
4559 | |||
4560 | # qhasm: xmm7 ^= xmm5 | ||
4561 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
4562 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
4563 | pxor %xmm7,%xmm5 | ||
4564 | |||
4565 | # qhasm: xmm6 ^= xmm1 | ||
4566 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
4567 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
4568 | pxor %xmm4,%xmm3 | ||
4569 | |||
4570 | # qhasm: xmm3 = xmm7 | ||
4571 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
4572 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
4573 | movdqa %xmm5,%xmm2 | ||
4574 | |||
4575 | # qhasm: xmm3 ^= xmm6 | ||
4576 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
4577 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
4578 | pxor %xmm3,%xmm2 | ||
4579 | |||
4580 | # qhasm: xmm3 &= xmm4 | ||
4581 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
4582 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
4583 | pand %xmm0,%xmm2 | ||
4584 | |||
4585 | # qhasm: xmm4 ^= xmm0 | ||
4586 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
4587 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
4588 | pxor %xmm1,%xmm0 | ||
4589 | |||
4590 | # qhasm: xmm4 &= xmm6 | ||
4591 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
4592 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
4593 | pand %xmm3,%xmm0 | ||
4594 | |||
4595 | # qhasm: xmm0 &= xmm7 | ||
4596 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
4597 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
4598 | pand %xmm5,%xmm1 | ||
4599 | |||
4600 | # qhasm: xmm0 ^= xmm4 | ||
4601 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
4602 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
4603 | pxor %xmm0,%xmm1 | ||
4604 | |||
4605 | # qhasm: xmm4 ^= xmm3 | ||
4606 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
4607 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
4608 | pxor %xmm2,%xmm0 | ||
4609 | |||
4610 | # qhasm: xmm2 = xmm5 | ||
4611 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
4612 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
4613 | movdqa %xmm7,%xmm2 | ||
4614 | |||
4615 | # qhasm: xmm2 ^= xmm1 | ||
4616 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
4617 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
4618 | pxor %xmm4,%xmm2 | ||
4619 | |||
4620 | # qhasm: xmm2 &= xmm8 | ||
4621 | # asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3 | ||
4622 | # asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2 | ||
4623 | pand %xmm8,%xmm2 | ||
4624 | |||
4625 | # qhasm: xmm8 ^= xmm11 | ||
4626 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
4627 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
4628 | pxor %xmm11,%xmm8 | ||
4629 | |||
4630 | # qhasm: xmm8 &= xmm1 | ||
4631 | # asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9 | ||
4632 | # asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8 | ||
4633 | pand %xmm4,%xmm8 | ||
4634 | |||
4635 | # qhasm: xmm11 &= xmm5 | ||
4636 | # asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12 | ||
4637 | # asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11 | ||
4638 | pand %xmm7,%xmm11 | ||
4639 | |||
4640 | # qhasm: xmm8 ^= xmm11 | ||
4641 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
4642 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
4643 | pxor %xmm11,%xmm8 | ||
4644 | |||
4645 | # qhasm: xmm11 ^= xmm2 | ||
4646 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12 | ||
4647 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11 | ||
4648 | pxor %xmm2,%xmm11 | ||
4649 | |||
4650 | # qhasm: xmm14 ^= xmm4 | ||
4651 | # asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15 | ||
4652 | # asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14 | ||
4653 | pxor %xmm0,%xmm14 | ||
4654 | |||
4655 | # qhasm: xmm8 ^= xmm4 | ||
4656 | # asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9 | ||
4657 | # asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8 | ||
4658 | pxor %xmm0,%xmm8 | ||
4659 | |||
4660 | # qhasm: xmm13 ^= xmm0 | ||
4661 | # asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14 | ||
4662 | # asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13 | ||
4663 | pxor %xmm1,%xmm13 | ||
4664 | |||
4665 | # qhasm: xmm11 ^= xmm0 | ||
4666 | # asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12 | ||
4667 | # asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11 | ||
4668 | pxor %xmm1,%xmm11 | ||
4669 | |||
4670 | # qhasm: xmm4 = xmm15 | ||
4671 | # asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1 | ||
4672 | # asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0 | ||
4673 | movdqa %xmm15,%xmm0 | ||
4674 | |||
4675 | # qhasm: xmm0 = xmm9 | ||
4676 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2 | ||
4677 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1 | ||
4678 | movdqa %xmm9,%xmm1 | ||
4679 | |||
4680 | # qhasm: xmm4 ^= xmm12 | ||
4681 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1 | ||
4682 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0 | ||
4683 | pxor %xmm12,%xmm0 | ||
4684 | |||
4685 | # qhasm: xmm0 ^= xmm10 | ||
4686 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2 | ||
4687 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1 | ||
4688 | pxor %xmm10,%xmm1 | ||
4689 | |||
4690 | # qhasm: xmm3 = xmm7 | ||
4691 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
4692 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
4693 | movdqa %xmm5,%xmm2 | ||
4694 | |||
4695 | # qhasm: xmm3 ^= xmm6 | ||
4696 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
4697 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
4698 | pxor %xmm3,%xmm2 | ||
4699 | |||
4700 | # qhasm: xmm3 &= xmm4 | ||
4701 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
4702 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
4703 | pand %xmm0,%xmm2 | ||
4704 | |||
4705 | # qhasm: xmm4 ^= xmm0 | ||
4706 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
4707 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
4708 | pxor %xmm1,%xmm0 | ||
4709 | |||
4710 | # qhasm: xmm4 &= xmm6 | ||
4711 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
4712 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
4713 | pand %xmm3,%xmm0 | ||
4714 | |||
4715 | # qhasm: xmm0 &= xmm7 | ||
4716 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
4717 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
4718 | pand %xmm5,%xmm1 | ||
4719 | |||
4720 | # qhasm: xmm0 ^= xmm4 | ||
4721 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
4722 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
4723 | pxor %xmm0,%xmm1 | ||
4724 | |||
4725 | # qhasm: xmm4 ^= xmm3 | ||
4726 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
4727 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
4728 | pxor %xmm2,%xmm0 | ||
4729 | |||
4730 | # qhasm: xmm2 = xmm5 | ||
4731 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
4732 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
4733 | movdqa %xmm7,%xmm2 | ||
4734 | |||
4735 | # qhasm: xmm2 ^= xmm1 | ||
4736 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
4737 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
4738 | pxor %xmm4,%xmm2 | ||
4739 | |||
4740 | # qhasm: xmm2 &= xmm12 | ||
4741 | # asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3 | ||
4742 | # asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2 | ||
4743 | pand %xmm12,%xmm2 | ||
4744 | |||
4745 | # qhasm: xmm12 ^= xmm10 | ||
4746 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
4747 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
4748 | pxor %xmm10,%xmm12 | ||
4749 | |||
4750 | # qhasm: xmm12 &= xmm1 | ||
4751 | # asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13 | ||
4752 | # asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12 | ||
4753 | pand %xmm4,%xmm12 | ||
4754 | |||
4755 | # qhasm: xmm10 &= xmm5 | ||
4756 | # asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11 | ||
4757 | # asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10 | ||
4758 | pand %xmm7,%xmm10 | ||
4759 | |||
4760 | # qhasm: xmm12 ^= xmm10 | ||
4761 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
4762 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
4763 | pxor %xmm10,%xmm12 | ||
4764 | |||
4765 | # qhasm: xmm10 ^= xmm2 | ||
4766 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11 | ||
4767 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10 | ||
4768 | pxor %xmm2,%xmm10 | ||
4769 | |||
4770 | # qhasm: xmm7 ^= xmm5 | ||
4771 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
4772 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
4773 | pxor %xmm7,%xmm5 | ||
4774 | |||
4775 | # qhasm: xmm6 ^= xmm1 | ||
4776 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
4777 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
4778 | pxor %xmm4,%xmm3 | ||
4779 | |||
4780 | # qhasm: xmm3 = xmm7 | ||
4781 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
4782 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
4783 | movdqa %xmm5,%xmm2 | ||
4784 | |||
4785 | # qhasm: xmm3 ^= xmm6 | ||
4786 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
4787 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
4788 | pxor %xmm3,%xmm2 | ||
4789 | |||
4790 | # qhasm: xmm3 &= xmm15 | ||
4791 | # asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3 | ||
4792 | # asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2 | ||
4793 | pand %xmm15,%xmm2 | ||
4794 | |||
4795 | # qhasm: xmm15 ^= xmm9 | ||
4796 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
4797 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
4798 | pxor %xmm9,%xmm15 | ||
4799 | |||
4800 | # qhasm: xmm15 &= xmm6 | ||
4801 | # asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16 | ||
4802 | # asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15 | ||
4803 | pand %xmm3,%xmm15 | ||
4804 | |||
4805 | # qhasm: xmm9 &= xmm7 | ||
4806 | # asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10 | ||
4807 | # asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9 | ||
4808 | pand %xmm5,%xmm9 | ||
4809 | |||
4810 | # qhasm: xmm15 ^= xmm9 | ||
4811 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
4812 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
4813 | pxor %xmm9,%xmm15 | ||
4814 | |||
4815 | # qhasm: xmm9 ^= xmm3 | ||
4816 | # asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10 | ||
4817 | # asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9 | ||
4818 | pxor %xmm2,%xmm9 | ||
4819 | |||
4820 | # qhasm: xmm15 ^= xmm4 | ||
4821 | # asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16 | ||
4822 | # asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15 | ||
4823 | pxor %xmm0,%xmm15 | ||
4824 | |||
4825 | # qhasm: xmm12 ^= xmm4 | ||
4826 | # asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13 | ||
4827 | # asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12 | ||
4828 | pxor %xmm0,%xmm12 | ||
4829 | |||
4830 | # qhasm: xmm9 ^= xmm0 | ||
4831 | # asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10 | ||
4832 | # asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9 | ||
4833 | pxor %xmm1,%xmm9 | ||
4834 | |||
4835 | # qhasm: xmm10 ^= xmm0 | ||
4836 | # asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11 | ||
4837 | # asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10 | ||
4838 | pxor %xmm1,%xmm10 | ||
4839 | |||
4840 | # qhasm: xmm15 ^= xmm8 | ||
4841 | # asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16 | ||
4842 | # asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15 | ||
4843 | pxor %xmm8,%xmm15 | ||
4844 | |||
4845 | # qhasm: xmm9 ^= xmm14 | ||
4846 | # asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10 | ||
4847 | # asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9 | ||
4848 | pxor %xmm14,%xmm9 | ||
4849 | |||
4850 | # qhasm: xmm12 ^= xmm15 | ||
4851 | # asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13 | ||
4852 | # asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12 | ||
4853 | pxor %xmm15,%xmm12 | ||
4854 | |||
4855 | # qhasm: xmm14 ^= xmm8 | ||
4856 | # asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15 | ||
4857 | # asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14 | ||
4858 | pxor %xmm8,%xmm14 | ||
4859 | |||
4860 | # qhasm: xmm8 ^= xmm9 | ||
4861 | # asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9 | ||
4862 | # asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8 | ||
4863 | pxor %xmm9,%xmm8 | ||
4864 | |||
4865 | # qhasm: xmm9 ^= xmm13 | ||
4866 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
4867 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
4868 | pxor %xmm13,%xmm9 | ||
4869 | |||
4870 | # qhasm: xmm13 ^= xmm10 | ||
4871 | # asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14 | ||
4872 | # asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13 | ||
4873 | pxor %xmm10,%xmm13 | ||
4874 | |||
4875 | # qhasm: xmm12 ^= xmm13 | ||
4876 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
4877 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
4878 | pxor %xmm13,%xmm12 | ||
4879 | |||
4880 | # qhasm: xmm10 ^= xmm11 | ||
4881 | # asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11 | ||
4882 | # asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10 | ||
4883 | pxor %xmm11,%xmm10 | ||
4884 | |||
4885 | # qhasm: xmm11 ^= xmm13 | ||
4886 | # asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12 | ||
4887 | # asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11 | ||
4888 | pxor %xmm13,%xmm11 | ||
4889 | |||
4890 | # qhasm: xmm14 ^= xmm11 | ||
4891 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
4892 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
4893 | pxor %xmm11,%xmm14 | ||
4894 | |||
4895 | # qhasm: xmm0 = shuffle dwords of xmm8 by 0x93 | ||
4896 | # asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1 | ||
4897 | # asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0 | ||
4898 | pshufd $0x93,%xmm8,%xmm0 | ||
4899 | |||
4900 | # qhasm: xmm1 = shuffle dwords of xmm9 by 0x93 | ||
4901 | # asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2 | ||
4902 | # asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1 | ||
4903 | pshufd $0x93,%xmm9,%xmm1 | ||
4904 | |||
4905 | # qhasm: xmm2 = shuffle dwords of xmm12 by 0x93 | ||
4906 | # asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3 | ||
4907 | # asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2 | ||
4908 | pshufd $0x93,%xmm12,%xmm2 | ||
4909 | |||
4910 | # qhasm: xmm3 = shuffle dwords of xmm14 by 0x93 | ||
4911 | # asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4 | ||
4912 | # asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3 | ||
4913 | pshufd $0x93,%xmm14,%xmm3 | ||
4914 | |||
4915 | # qhasm: xmm4 = shuffle dwords of xmm11 by 0x93 | ||
4916 | # asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5 | ||
4917 | # asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4 | ||
4918 | pshufd $0x93,%xmm11,%xmm4 | ||
4919 | |||
4920 | # qhasm: xmm5 = shuffle dwords of xmm15 by 0x93 | ||
4921 | # asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6 | ||
4922 | # asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5 | ||
4923 | pshufd $0x93,%xmm15,%xmm5 | ||
4924 | |||
4925 | # qhasm: xmm6 = shuffle dwords of xmm10 by 0x93 | ||
4926 | # asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7 | ||
4927 | # asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6 | ||
4928 | pshufd $0x93,%xmm10,%xmm6 | ||
4929 | |||
4930 | # qhasm: xmm7 = shuffle dwords of xmm13 by 0x93 | ||
4931 | # asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8 | ||
4932 | # asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7 | ||
4933 | pshufd $0x93,%xmm13,%xmm7 | ||
4934 | |||
4935 | # qhasm: xmm8 ^= xmm0 | ||
4936 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
4937 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
4938 | pxor %xmm0,%xmm8 | ||
4939 | |||
4940 | # qhasm: xmm9 ^= xmm1 | ||
4941 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
4942 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
4943 | pxor %xmm1,%xmm9 | ||
4944 | |||
4945 | # qhasm: xmm12 ^= xmm2 | ||
4946 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13 | ||
4947 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12 | ||
4948 | pxor %xmm2,%xmm12 | ||
4949 | |||
4950 | # qhasm: xmm14 ^= xmm3 | ||
4951 | # asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15 | ||
4952 | # asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14 | ||
4953 | pxor %xmm3,%xmm14 | ||
4954 | |||
4955 | # qhasm: xmm11 ^= xmm4 | ||
4956 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
4957 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
4958 | pxor %xmm4,%xmm11 | ||
4959 | |||
4960 | # qhasm: xmm15 ^= xmm5 | ||
4961 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
4962 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
4963 | pxor %xmm5,%xmm15 | ||
4964 | |||
4965 | # qhasm: xmm10 ^= xmm6 | ||
4966 | # asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11 | ||
4967 | # asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10 | ||
4968 | pxor %xmm6,%xmm10 | ||
4969 | |||
4970 | # qhasm: xmm13 ^= xmm7 | ||
4971 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
4972 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
4973 | pxor %xmm7,%xmm13 | ||
4974 | |||
4975 | # qhasm: xmm0 ^= xmm13 | ||
4976 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
4977 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
4978 | pxor %xmm13,%xmm0 | ||
4979 | |||
4980 | # qhasm: xmm1 ^= xmm8 | ||
4981 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
4982 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
4983 | pxor %xmm8,%xmm1 | ||
4984 | |||
4985 | # qhasm: xmm2 ^= xmm9 | ||
4986 | # asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3 | ||
4987 | # asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2 | ||
4988 | pxor %xmm9,%xmm2 | ||
4989 | |||
4990 | # qhasm: xmm1 ^= xmm13 | ||
4991 | # asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2 | ||
4992 | # asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1 | ||
4993 | pxor %xmm13,%xmm1 | ||
4994 | |||
4995 | # qhasm: xmm3 ^= xmm12 | ||
4996 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
4997 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
4998 | pxor %xmm12,%xmm3 | ||
4999 | |||
5000 | # qhasm: xmm4 ^= xmm14 | ||
5001 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
5002 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
5003 | pxor %xmm14,%xmm4 | ||
5004 | |||
5005 | # qhasm: xmm5 ^= xmm11 | ||
5006 | # asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6 | ||
5007 | # asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5 | ||
5008 | pxor %xmm11,%xmm5 | ||
5009 | |||
5010 | # qhasm: xmm3 ^= xmm13 | ||
5011 | # asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4 | ||
5012 | # asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3 | ||
5013 | pxor %xmm13,%xmm3 | ||
5014 | |||
5015 | # qhasm: xmm6 ^= xmm15 | ||
5016 | # asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7 | ||
5017 | # asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6 | ||
5018 | pxor %xmm15,%xmm6 | ||
5019 | |||
5020 | # qhasm: xmm7 ^= xmm10 | ||
5021 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
5022 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
5023 | pxor %xmm10,%xmm7 | ||
5024 | |||
5025 | # qhasm: xmm4 ^= xmm13 | ||
5026 | # asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5 | ||
5027 | # asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4 | ||
5028 | pxor %xmm13,%xmm4 | ||
5029 | |||
5030 | # qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E | ||
5031 | # asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9 | ||
5032 | # asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8 | ||
5033 | pshufd $0x4E,%xmm8,%xmm8 | ||
5034 | |||
5035 | # qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E | ||
5036 | # asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10 | ||
5037 | # asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9 | ||
5038 | pshufd $0x4E,%xmm9,%xmm9 | ||
5039 | |||
5040 | # qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E | ||
5041 | # asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13 | ||
5042 | # asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12 | ||
5043 | pshufd $0x4E,%xmm12,%xmm12 | ||
5044 | |||
5045 | # qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E | ||
5046 | # asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15 | ||
5047 | # asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14 | ||
5048 | pshufd $0x4E,%xmm14,%xmm14 | ||
5049 | |||
5050 | # qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E | ||
5051 | # asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12 | ||
5052 | # asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11 | ||
5053 | pshufd $0x4E,%xmm11,%xmm11 | ||
5054 | |||
5055 | # qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E | ||
5056 | # asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16 | ||
5057 | # asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15 | ||
5058 | pshufd $0x4E,%xmm15,%xmm15 | ||
5059 | |||
5060 | # qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E | ||
5061 | # asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11 | ||
5062 | # asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10 | ||
5063 | pshufd $0x4E,%xmm10,%xmm10 | ||
5064 | |||
5065 | # qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E | ||
5066 | # asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14 | ||
5067 | # asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13 | ||
5068 | pshufd $0x4E,%xmm13,%xmm13 | ||
5069 | |||
5070 | # qhasm: xmm0 ^= xmm8 | ||
5071 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
5072 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
5073 | pxor %xmm8,%xmm0 | ||
5074 | |||
5075 | # qhasm: xmm1 ^= xmm9 | ||
5076 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
5077 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
5078 | pxor %xmm9,%xmm1 | ||
5079 | |||
5080 | # qhasm: xmm2 ^= xmm12 | ||
5081 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
5082 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
5083 | pxor %xmm12,%xmm2 | ||
5084 | |||
5085 | # qhasm: xmm3 ^= xmm14 | ||
5086 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
5087 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
5088 | pxor %xmm14,%xmm3 | ||
5089 | |||
5090 | # qhasm: xmm4 ^= xmm11 | ||
5091 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
5092 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
5093 | pxor %xmm11,%xmm4 | ||
5094 | |||
5095 | # qhasm: xmm5 ^= xmm15 | ||
5096 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
5097 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
5098 | pxor %xmm15,%xmm5 | ||
5099 | |||
5100 | # qhasm: xmm6 ^= xmm10 | ||
5101 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
5102 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
5103 | pxor %xmm10,%xmm6 | ||
5104 | |||
5105 | # qhasm: xmm7 ^= xmm13 | ||
5106 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
5107 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
5108 | pxor %xmm13,%xmm7 | ||
5109 | |||
5110 | # qhasm: xmm0 ^= *(int128 *)(c + 512) | ||
5111 | # asm 1: pxor 512(<c=int64#4),<xmm0=int6464#1 | ||
5112 | # asm 2: pxor 512(<c=%rcx),<xmm0=%xmm0 | ||
5113 | pxor 512(%rcx),%xmm0 | ||
5114 | |||
5115 | # qhasm: shuffle bytes of xmm0 by SR | ||
5116 | # asm 1: pshufb SR,<xmm0=int6464#1 | ||
5117 | # asm 2: pshufb SR,<xmm0=%xmm0 | ||
5118 | pshufb SR,%xmm0 | ||
5119 | |||
5120 | # qhasm: xmm1 ^= *(int128 *)(c + 528) | ||
5121 | # asm 1: pxor 528(<c=int64#4),<xmm1=int6464#2 | ||
5122 | # asm 2: pxor 528(<c=%rcx),<xmm1=%xmm1 | ||
5123 | pxor 528(%rcx),%xmm1 | ||
5124 | |||
5125 | # qhasm: shuffle bytes of xmm1 by SR | ||
5126 | # asm 1: pshufb SR,<xmm1=int6464#2 | ||
5127 | # asm 2: pshufb SR,<xmm1=%xmm1 | ||
5128 | pshufb SR,%xmm1 | ||
5129 | |||
5130 | # qhasm: xmm2 ^= *(int128 *)(c + 544) | ||
5131 | # asm 1: pxor 544(<c=int64#4),<xmm2=int6464#3 | ||
5132 | # asm 2: pxor 544(<c=%rcx),<xmm2=%xmm2 | ||
5133 | pxor 544(%rcx),%xmm2 | ||
5134 | |||
5135 | # qhasm: shuffle bytes of xmm2 by SR | ||
5136 | # asm 1: pshufb SR,<xmm2=int6464#3 | ||
5137 | # asm 2: pshufb SR,<xmm2=%xmm2 | ||
5138 | pshufb SR,%xmm2 | ||
5139 | |||
5140 | # qhasm: xmm3 ^= *(int128 *)(c + 560) | ||
5141 | # asm 1: pxor 560(<c=int64#4),<xmm3=int6464#4 | ||
5142 | # asm 2: pxor 560(<c=%rcx),<xmm3=%xmm3 | ||
5143 | pxor 560(%rcx),%xmm3 | ||
5144 | |||
5145 | # qhasm: shuffle bytes of xmm3 by SR | ||
5146 | # asm 1: pshufb SR,<xmm3=int6464#4 | ||
5147 | # asm 2: pshufb SR,<xmm3=%xmm3 | ||
5148 | pshufb SR,%xmm3 | ||
5149 | |||
5150 | # qhasm: xmm4 ^= *(int128 *)(c + 576) | ||
5151 | # asm 1: pxor 576(<c=int64#4),<xmm4=int6464#5 | ||
5152 | # asm 2: pxor 576(<c=%rcx),<xmm4=%xmm4 | ||
5153 | pxor 576(%rcx),%xmm4 | ||
5154 | |||
5155 | # qhasm: shuffle bytes of xmm4 by SR | ||
5156 | # asm 1: pshufb SR,<xmm4=int6464#5 | ||
5157 | # asm 2: pshufb SR,<xmm4=%xmm4 | ||
5158 | pshufb SR,%xmm4 | ||
5159 | |||
5160 | # qhasm: xmm5 ^= *(int128 *)(c + 592) | ||
5161 | # asm 1: pxor 592(<c=int64#4),<xmm5=int6464#6 | ||
5162 | # asm 2: pxor 592(<c=%rcx),<xmm5=%xmm5 | ||
5163 | pxor 592(%rcx),%xmm5 | ||
5164 | |||
5165 | # qhasm: shuffle bytes of xmm5 by SR | ||
5166 | # asm 1: pshufb SR,<xmm5=int6464#6 | ||
5167 | # asm 2: pshufb SR,<xmm5=%xmm5 | ||
5168 | pshufb SR,%xmm5 | ||
5169 | |||
5170 | # qhasm: xmm6 ^= *(int128 *)(c + 608) | ||
5171 | # asm 1: pxor 608(<c=int64#4),<xmm6=int6464#7 | ||
5172 | # asm 2: pxor 608(<c=%rcx),<xmm6=%xmm6 | ||
5173 | pxor 608(%rcx),%xmm6 | ||
5174 | |||
5175 | # qhasm: shuffle bytes of xmm6 by SR | ||
5176 | # asm 1: pshufb SR,<xmm6=int6464#7 | ||
5177 | # asm 2: pshufb SR,<xmm6=%xmm6 | ||
5178 | pshufb SR,%xmm6 | ||
5179 | |||
5180 | # qhasm: xmm7 ^= *(int128 *)(c + 624) | ||
5181 | # asm 1: pxor 624(<c=int64#4),<xmm7=int6464#8 | ||
5182 | # asm 2: pxor 624(<c=%rcx),<xmm7=%xmm7 | ||
5183 | pxor 624(%rcx),%xmm7 | ||
5184 | |||
5185 | # qhasm: shuffle bytes of xmm7 by SR | ||
5186 | # asm 1: pshufb SR,<xmm7=int6464#8 | ||
5187 | # asm 2: pshufb SR,<xmm7=%xmm7 | ||
5188 | pshufb SR,%xmm7 | ||
5189 | |||
5190 | # qhasm: xmm5 ^= xmm6 | ||
5191 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
5192 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
5193 | pxor %xmm6,%xmm5 | ||
5194 | |||
5195 | # qhasm: xmm2 ^= xmm1 | ||
5196 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
5197 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
5198 | pxor %xmm1,%xmm2 | ||
5199 | |||
5200 | # qhasm: xmm5 ^= xmm0 | ||
5201 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
5202 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
5203 | pxor %xmm0,%xmm5 | ||
5204 | |||
5205 | # qhasm: xmm6 ^= xmm2 | ||
5206 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
5207 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
5208 | pxor %xmm2,%xmm6 | ||
5209 | |||
5210 | # qhasm: xmm3 ^= xmm0 | ||
5211 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
5212 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
5213 | pxor %xmm0,%xmm3 | ||
5214 | |||
5215 | # qhasm: xmm6 ^= xmm3 | ||
5216 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
5217 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
5218 | pxor %xmm3,%xmm6 | ||
5219 | |||
5220 | # qhasm: xmm3 ^= xmm7 | ||
5221 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
5222 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
5223 | pxor %xmm7,%xmm3 | ||
5224 | |||
5225 | # qhasm: xmm3 ^= xmm4 | ||
5226 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
5227 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
5228 | pxor %xmm4,%xmm3 | ||
5229 | |||
5230 | # qhasm: xmm7 ^= xmm5 | ||
5231 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
5232 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
5233 | pxor %xmm5,%xmm7 | ||
5234 | |||
5235 | # qhasm: xmm3 ^= xmm1 | ||
5236 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
5237 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
5238 | pxor %xmm1,%xmm3 | ||
5239 | |||
5240 | # qhasm: xmm4 ^= xmm5 | ||
5241 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
5242 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
5243 | pxor %xmm5,%xmm4 | ||
5244 | |||
5245 | # qhasm: xmm2 ^= xmm7 | ||
5246 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
5247 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
5248 | pxor %xmm7,%xmm2 | ||
5249 | |||
5250 | # qhasm: xmm1 ^= xmm5 | ||
5251 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
5252 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
5253 | pxor %xmm5,%xmm1 | ||
5254 | |||
5255 | # qhasm: xmm11 = xmm7 | ||
5256 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
5257 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
5258 | movdqa %xmm7,%xmm8 | ||
5259 | |||
5260 | # qhasm: xmm10 = xmm1 | ||
5261 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
5262 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
5263 | movdqa %xmm1,%xmm9 | ||
5264 | |||
5265 | # qhasm: xmm9 = xmm5 | ||
5266 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
5267 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
5268 | movdqa %xmm5,%xmm10 | ||
5269 | |||
5270 | # qhasm: xmm13 = xmm2 | ||
5271 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
5272 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
5273 | movdqa %xmm2,%xmm11 | ||
5274 | |||
5275 | # qhasm: xmm12 = xmm6 | ||
5276 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
5277 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
5278 | movdqa %xmm6,%xmm12 | ||
5279 | |||
5280 | # qhasm: xmm11 ^= xmm4 | ||
5281 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
5282 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
5283 | pxor %xmm4,%xmm8 | ||
5284 | |||
5285 | # qhasm: xmm10 ^= xmm2 | ||
5286 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
5287 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
5288 | pxor %xmm2,%xmm9 | ||
5289 | |||
5290 | # qhasm: xmm9 ^= xmm3 | ||
5291 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
5292 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
5293 | pxor %xmm3,%xmm10 | ||
5294 | |||
5295 | # qhasm: xmm13 ^= xmm4 | ||
5296 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
5297 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
5298 | pxor %xmm4,%xmm11 | ||
5299 | |||
5300 | # qhasm: xmm12 ^= xmm0 | ||
5301 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
5302 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
5303 | pxor %xmm0,%xmm12 | ||
5304 | |||
5305 | # qhasm: xmm14 = xmm11 | ||
5306 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
5307 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
5308 | movdqa %xmm8,%xmm13 | ||
5309 | |||
5310 | # qhasm: xmm8 = xmm10 | ||
5311 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
5312 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
5313 | movdqa %xmm9,%xmm14 | ||
5314 | |||
5315 | # qhasm: xmm15 = xmm11 | ||
5316 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
5317 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
5318 | movdqa %xmm8,%xmm15 | ||
5319 | |||
5320 | # qhasm: xmm10 |= xmm9 | ||
5321 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
5322 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
5323 | por %xmm10,%xmm9 | ||
5324 | |||
5325 | # qhasm: xmm11 |= xmm12 | ||
5326 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
5327 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
5328 | por %xmm12,%xmm8 | ||
5329 | |||
5330 | # qhasm: xmm15 ^= xmm8 | ||
5331 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
5332 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
5333 | pxor %xmm14,%xmm15 | ||
5334 | |||
5335 | # qhasm: xmm14 &= xmm12 | ||
5336 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
5337 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
5338 | pand %xmm12,%xmm13 | ||
5339 | |||
5340 | # qhasm: xmm8 &= xmm9 | ||
5341 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
5342 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
5343 | pand %xmm10,%xmm14 | ||
5344 | |||
5345 | # qhasm: xmm12 ^= xmm9 | ||
5346 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
5347 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
5348 | pxor %xmm10,%xmm12 | ||
5349 | |||
5350 | # qhasm: xmm15 &= xmm12 | ||
5351 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
5352 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
5353 | pand %xmm12,%xmm15 | ||
5354 | |||
5355 | # qhasm: xmm12 = xmm3 | ||
5356 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
5357 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
5358 | movdqa %xmm3,%xmm10 | ||
5359 | |||
5360 | # qhasm: xmm12 ^= xmm0 | ||
5361 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
5362 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
5363 | pxor %xmm0,%xmm10 | ||
5364 | |||
5365 | # qhasm: xmm13 &= xmm12 | ||
5366 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
5367 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
5368 | pand %xmm10,%xmm11 | ||
5369 | |||
5370 | # qhasm: xmm11 ^= xmm13 | ||
5371 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
5372 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
5373 | pxor %xmm11,%xmm8 | ||
5374 | |||
5375 | # qhasm: xmm10 ^= xmm13 | ||
5376 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
5377 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
5378 | pxor %xmm11,%xmm9 | ||
5379 | |||
5380 | # qhasm: xmm13 = xmm7 | ||
5381 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
5382 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
5383 | movdqa %xmm7,%xmm10 | ||
5384 | |||
5385 | # qhasm: xmm13 ^= xmm1 | ||
5386 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
5387 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
5388 | pxor %xmm1,%xmm10 | ||
5389 | |||
5390 | # qhasm: xmm12 = xmm5 | ||
5391 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
5392 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
5393 | movdqa %xmm5,%xmm11 | ||
5394 | |||
5395 | # qhasm: xmm9 = xmm13 | ||
5396 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
5397 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
5398 | movdqa %xmm10,%xmm12 | ||
5399 | |||
5400 | # qhasm: xmm12 ^= xmm6 | ||
5401 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
5402 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
5403 | pxor %xmm6,%xmm11 | ||
5404 | |||
5405 | # qhasm: xmm9 |= xmm12 | ||
5406 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
5407 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
5408 | por %xmm11,%xmm12 | ||
5409 | |||
5410 | # qhasm: xmm13 &= xmm12 | ||
5411 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
5412 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
5413 | pand %xmm11,%xmm10 | ||
5414 | |||
5415 | # qhasm: xmm8 ^= xmm13 | ||
5416 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
5417 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
5418 | pxor %xmm10,%xmm14 | ||
5419 | |||
5420 | # qhasm: xmm11 ^= xmm15 | ||
5421 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
5422 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
5423 | pxor %xmm15,%xmm8 | ||
5424 | |||
5425 | # qhasm: xmm10 ^= xmm14 | ||
5426 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
5427 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
5428 | pxor %xmm13,%xmm9 | ||
5429 | |||
5430 | # qhasm: xmm9 ^= xmm15 | ||
5431 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
5432 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
5433 | pxor %xmm15,%xmm12 | ||
5434 | |||
5435 | # qhasm: xmm8 ^= xmm14 | ||
5436 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
5437 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
5438 | pxor %xmm13,%xmm14 | ||
5439 | |||
5440 | # qhasm: xmm9 ^= xmm14 | ||
5441 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
5442 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
5443 | pxor %xmm13,%xmm12 | ||
5444 | |||
5445 | # qhasm: xmm12 = xmm2 | ||
5446 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
5447 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
5448 | movdqa %xmm2,%xmm10 | ||
5449 | |||
5450 | # qhasm: xmm13 = xmm4 | ||
5451 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
5452 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
5453 | movdqa %xmm4,%xmm11 | ||
5454 | |||
5455 | # qhasm: xmm14 = xmm1 | ||
5456 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
5457 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
5458 | movdqa %xmm1,%xmm13 | ||
5459 | |||
5460 | # qhasm: xmm15 = xmm7 | ||
5461 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
5462 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
5463 | movdqa %xmm7,%xmm15 | ||
5464 | |||
5465 | # qhasm: xmm12 &= xmm3 | ||
5466 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
5467 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
5468 | pand %xmm3,%xmm10 | ||
5469 | |||
5470 | # qhasm: xmm13 &= xmm0 | ||
5471 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
5472 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
5473 | pand %xmm0,%xmm11 | ||
5474 | |||
5475 | # qhasm: xmm14 &= xmm5 | ||
5476 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
5477 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
5478 | pand %xmm5,%xmm13 | ||
5479 | |||
5480 | # qhasm: xmm15 |= xmm6 | ||
5481 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
5482 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
5483 | por %xmm6,%xmm15 | ||
5484 | |||
5485 | # qhasm: xmm11 ^= xmm12 | ||
5486 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
5487 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
5488 | pxor %xmm10,%xmm8 | ||
5489 | |||
5490 | # qhasm: xmm10 ^= xmm13 | ||
5491 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
5492 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
5493 | pxor %xmm11,%xmm9 | ||
5494 | |||
5495 | # qhasm: xmm9 ^= xmm14 | ||
5496 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
5497 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
5498 | pxor %xmm13,%xmm12 | ||
5499 | |||
5500 | # qhasm: xmm8 ^= xmm15 | ||
5501 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
5502 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
5503 | pxor %xmm15,%xmm14 | ||
5504 | |||
5505 | # qhasm: xmm12 = xmm11 | ||
5506 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
5507 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
5508 | movdqa %xmm8,%xmm10 | ||
5509 | |||
5510 | # qhasm: xmm12 ^= xmm10 | ||
5511 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
5512 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
5513 | pxor %xmm9,%xmm10 | ||
5514 | |||
5515 | # qhasm: xmm11 &= xmm9 | ||
5516 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
5517 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
5518 | pand %xmm12,%xmm8 | ||
5519 | |||
5520 | # qhasm: xmm14 = xmm8 | ||
5521 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
5522 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
5523 | movdqa %xmm14,%xmm11 | ||
5524 | |||
5525 | # qhasm: xmm14 ^= xmm11 | ||
5526 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
5527 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
5528 | pxor %xmm8,%xmm11 | ||
5529 | |||
5530 | # qhasm: xmm15 = xmm12 | ||
5531 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
5532 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
5533 | movdqa %xmm10,%xmm13 | ||
5534 | |||
5535 | # qhasm: xmm15 &= xmm14 | ||
5536 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
5537 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
5538 | pand %xmm11,%xmm13 | ||
5539 | |||
5540 | # qhasm: xmm15 ^= xmm10 | ||
5541 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
5542 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
5543 | pxor %xmm9,%xmm13 | ||
5544 | |||
5545 | # qhasm: xmm13 = xmm9 | ||
5546 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
5547 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
5548 | movdqa %xmm12,%xmm15 | ||
5549 | |||
5550 | # qhasm: xmm13 ^= xmm8 | ||
5551 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
5552 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
5553 | pxor %xmm14,%xmm15 | ||
5554 | |||
5555 | # qhasm: xmm11 ^= xmm10 | ||
5556 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
5557 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
5558 | pxor %xmm9,%xmm8 | ||
5559 | |||
5560 | # qhasm: xmm13 &= xmm11 | ||
5561 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
5562 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
5563 | pand %xmm8,%xmm15 | ||
5564 | |||
5565 | # qhasm: xmm13 ^= xmm8 | ||
5566 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
5567 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
5568 | pxor %xmm14,%xmm15 | ||
5569 | |||
5570 | # qhasm: xmm9 ^= xmm13 | ||
5571 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
5572 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
5573 | pxor %xmm15,%xmm12 | ||
5574 | |||
5575 | # qhasm: xmm10 = xmm14 | ||
5576 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
5577 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
5578 | movdqa %xmm11,%xmm8 | ||
5579 | |||
5580 | # qhasm: xmm10 ^= xmm13 | ||
5581 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
5582 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
5583 | pxor %xmm15,%xmm8 | ||
5584 | |||
5585 | # qhasm: xmm10 &= xmm8 | ||
5586 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
5587 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
5588 | pand %xmm14,%xmm8 | ||
5589 | |||
5590 | # qhasm: xmm9 ^= xmm10 | ||
5591 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
5592 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
5593 | pxor %xmm8,%xmm12 | ||
5594 | |||
5595 | # qhasm: xmm14 ^= xmm10 | ||
5596 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
5597 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
5598 | pxor %xmm8,%xmm11 | ||
5599 | |||
5600 | # qhasm: xmm14 &= xmm15 | ||
5601 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
5602 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
5603 | pand %xmm13,%xmm11 | ||
5604 | |||
5605 | # qhasm: xmm14 ^= xmm12 | ||
5606 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
5607 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
5608 | pxor %xmm10,%xmm11 | ||
5609 | |||
5610 | # qhasm: xmm12 = xmm6 | ||
5611 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
5612 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
5613 | movdqa %xmm6,%xmm8 | ||
5614 | |||
5615 | # qhasm: xmm8 = xmm5 | ||
5616 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
5617 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
5618 | movdqa %xmm5,%xmm9 | ||
5619 | |||
5620 | # qhasm: xmm10 = xmm15 | ||
5621 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
5622 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
5623 | movdqa %xmm13,%xmm10 | ||
5624 | |||
5625 | # qhasm: xmm10 ^= xmm14 | ||
5626 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
5627 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
5628 | pxor %xmm11,%xmm10 | ||
5629 | |||
5630 | # qhasm: xmm10 &= xmm6 | ||
5631 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
5632 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
5633 | pand %xmm6,%xmm10 | ||
5634 | |||
5635 | # qhasm: xmm6 ^= xmm5 | ||
5636 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
5637 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
5638 | pxor %xmm5,%xmm6 | ||
5639 | |||
5640 | # qhasm: xmm6 &= xmm14 | ||
5641 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
5642 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
5643 | pand %xmm11,%xmm6 | ||
5644 | |||
5645 | # qhasm: xmm5 &= xmm15 | ||
5646 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
5647 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
5648 | pand %xmm13,%xmm5 | ||
5649 | |||
5650 | # qhasm: xmm6 ^= xmm5 | ||
5651 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
5652 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
5653 | pxor %xmm5,%xmm6 | ||
5654 | |||
5655 | # qhasm: xmm5 ^= xmm10 | ||
5656 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
5657 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
5658 | pxor %xmm10,%xmm5 | ||
5659 | |||
5660 | # qhasm: xmm12 ^= xmm0 | ||
5661 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
5662 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
5663 | pxor %xmm0,%xmm8 | ||
5664 | |||
5665 | # qhasm: xmm8 ^= xmm3 | ||
5666 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
5667 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
5668 | pxor %xmm3,%xmm9 | ||
5669 | |||
5670 | # qhasm: xmm15 ^= xmm13 | ||
5671 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
5672 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
5673 | pxor %xmm15,%xmm13 | ||
5674 | |||
5675 | # qhasm: xmm14 ^= xmm9 | ||
5676 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
5677 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
5678 | pxor %xmm12,%xmm11 | ||
5679 | |||
5680 | # qhasm: xmm11 = xmm15 | ||
5681 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
5682 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
5683 | movdqa %xmm13,%xmm10 | ||
5684 | |||
5685 | # qhasm: xmm11 ^= xmm14 | ||
5686 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
5687 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
5688 | pxor %xmm11,%xmm10 | ||
5689 | |||
5690 | # qhasm: xmm11 &= xmm12 | ||
5691 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
5692 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
5693 | pand %xmm8,%xmm10 | ||
5694 | |||
5695 | # qhasm: xmm12 ^= xmm8 | ||
5696 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
5697 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
5698 | pxor %xmm9,%xmm8 | ||
5699 | |||
5700 | # qhasm: xmm12 &= xmm14 | ||
5701 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
5702 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
5703 | pand %xmm11,%xmm8 | ||
5704 | |||
5705 | # qhasm: xmm8 &= xmm15 | ||
5706 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
5707 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
5708 | pand %xmm13,%xmm9 | ||
5709 | |||
5710 | # qhasm: xmm8 ^= xmm12 | ||
5711 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
5712 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
5713 | pxor %xmm8,%xmm9 | ||
5714 | |||
5715 | # qhasm: xmm12 ^= xmm11 | ||
5716 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
5717 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
5718 | pxor %xmm10,%xmm8 | ||
5719 | |||
5720 | # qhasm: xmm10 = xmm13 | ||
5721 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
5722 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
5723 | movdqa %xmm15,%xmm10 | ||
5724 | |||
5725 | # qhasm: xmm10 ^= xmm9 | ||
5726 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
5727 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
5728 | pxor %xmm12,%xmm10 | ||
5729 | |||
5730 | # qhasm: xmm10 &= xmm0 | ||
5731 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
5732 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
5733 | pand %xmm0,%xmm10 | ||
5734 | |||
5735 | # qhasm: xmm0 ^= xmm3 | ||
5736 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
5737 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
5738 | pxor %xmm3,%xmm0 | ||
5739 | |||
5740 | # qhasm: xmm0 &= xmm9 | ||
5741 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
5742 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
5743 | pand %xmm12,%xmm0 | ||
5744 | |||
5745 | # qhasm: xmm3 &= xmm13 | ||
5746 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
5747 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
5748 | pand %xmm15,%xmm3 | ||
5749 | |||
5750 | # qhasm: xmm0 ^= xmm3 | ||
5751 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
5752 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
5753 | pxor %xmm3,%xmm0 | ||
5754 | |||
5755 | # qhasm: xmm3 ^= xmm10 | ||
5756 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
5757 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
5758 | pxor %xmm10,%xmm3 | ||
5759 | |||
5760 | # qhasm: xmm6 ^= xmm12 | ||
5761 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
5762 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
5763 | pxor %xmm8,%xmm6 | ||
5764 | |||
5765 | # qhasm: xmm0 ^= xmm12 | ||
5766 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
5767 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
5768 | pxor %xmm8,%xmm0 | ||
5769 | |||
5770 | # qhasm: xmm5 ^= xmm8 | ||
5771 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
5772 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
5773 | pxor %xmm9,%xmm5 | ||
5774 | |||
5775 | # qhasm: xmm3 ^= xmm8 | ||
5776 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
5777 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
5778 | pxor %xmm9,%xmm3 | ||
5779 | |||
5780 | # qhasm: xmm12 = xmm7 | ||
5781 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
5782 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
5783 | movdqa %xmm7,%xmm8 | ||
5784 | |||
5785 | # qhasm: xmm8 = xmm1 | ||
5786 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
5787 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
5788 | movdqa %xmm1,%xmm9 | ||
5789 | |||
5790 | # qhasm: xmm12 ^= xmm4 | ||
5791 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
5792 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
5793 | pxor %xmm4,%xmm8 | ||
5794 | |||
5795 | # qhasm: xmm8 ^= xmm2 | ||
5796 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
5797 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
5798 | pxor %xmm2,%xmm9 | ||
5799 | |||
5800 | # qhasm: xmm11 = xmm15 | ||
5801 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
5802 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
5803 | movdqa %xmm13,%xmm10 | ||
5804 | |||
5805 | # qhasm: xmm11 ^= xmm14 | ||
5806 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
5807 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
5808 | pxor %xmm11,%xmm10 | ||
5809 | |||
5810 | # qhasm: xmm11 &= xmm12 | ||
5811 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
5812 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
5813 | pand %xmm8,%xmm10 | ||
5814 | |||
5815 | # qhasm: xmm12 ^= xmm8 | ||
5816 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
5817 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
5818 | pxor %xmm9,%xmm8 | ||
5819 | |||
5820 | # qhasm: xmm12 &= xmm14 | ||
5821 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
5822 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
5823 | pand %xmm11,%xmm8 | ||
5824 | |||
5825 | # qhasm: xmm8 &= xmm15 | ||
5826 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
5827 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
5828 | pand %xmm13,%xmm9 | ||
5829 | |||
5830 | # qhasm: xmm8 ^= xmm12 | ||
5831 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
5832 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
5833 | pxor %xmm8,%xmm9 | ||
5834 | |||
5835 | # qhasm: xmm12 ^= xmm11 | ||
5836 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
5837 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
5838 | pxor %xmm10,%xmm8 | ||
5839 | |||
5840 | # qhasm: xmm10 = xmm13 | ||
5841 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
5842 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
5843 | movdqa %xmm15,%xmm10 | ||
5844 | |||
5845 | # qhasm: xmm10 ^= xmm9 | ||
5846 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
5847 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
5848 | pxor %xmm12,%xmm10 | ||
5849 | |||
5850 | # qhasm: xmm10 &= xmm4 | ||
5851 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
5852 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
5853 | pand %xmm4,%xmm10 | ||
5854 | |||
5855 | # qhasm: xmm4 ^= xmm2 | ||
5856 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
5857 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
5858 | pxor %xmm2,%xmm4 | ||
5859 | |||
5860 | # qhasm: xmm4 &= xmm9 | ||
5861 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
5862 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
5863 | pand %xmm12,%xmm4 | ||
5864 | |||
5865 | # qhasm: xmm2 &= xmm13 | ||
5866 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
5867 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
5868 | pand %xmm15,%xmm2 | ||
5869 | |||
5870 | # qhasm: xmm4 ^= xmm2 | ||
5871 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
5872 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
5873 | pxor %xmm2,%xmm4 | ||
5874 | |||
5875 | # qhasm: xmm2 ^= xmm10 | ||
5876 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
5877 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
5878 | pxor %xmm10,%xmm2 | ||
5879 | |||
5880 | # qhasm: xmm15 ^= xmm13 | ||
5881 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
5882 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
5883 | pxor %xmm15,%xmm13 | ||
5884 | |||
5885 | # qhasm: xmm14 ^= xmm9 | ||
5886 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
5887 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
5888 | pxor %xmm12,%xmm11 | ||
5889 | |||
5890 | # qhasm: xmm11 = xmm15 | ||
5891 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
5892 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
5893 | movdqa %xmm13,%xmm10 | ||
5894 | |||
5895 | # qhasm: xmm11 ^= xmm14 | ||
5896 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
5897 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
5898 | pxor %xmm11,%xmm10 | ||
5899 | |||
5900 | # qhasm: xmm11 &= xmm7 | ||
5901 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
5902 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
5903 | pand %xmm7,%xmm10 | ||
5904 | |||
5905 | # qhasm: xmm7 ^= xmm1 | ||
5906 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
5907 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
5908 | pxor %xmm1,%xmm7 | ||
5909 | |||
5910 | # qhasm: xmm7 &= xmm14 | ||
5911 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
5912 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
5913 | pand %xmm11,%xmm7 | ||
5914 | |||
5915 | # qhasm: xmm1 &= xmm15 | ||
5916 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
5917 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
5918 | pand %xmm13,%xmm1 | ||
5919 | |||
5920 | # qhasm: xmm7 ^= xmm1 | ||
5921 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
5922 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
5923 | pxor %xmm1,%xmm7 | ||
5924 | |||
5925 | # qhasm: xmm1 ^= xmm11 | ||
5926 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
5927 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
5928 | pxor %xmm10,%xmm1 | ||
5929 | |||
5930 | # qhasm: xmm7 ^= xmm12 | ||
5931 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
5932 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
5933 | pxor %xmm8,%xmm7 | ||
5934 | |||
5935 | # qhasm: xmm4 ^= xmm12 | ||
5936 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
5937 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
5938 | pxor %xmm8,%xmm4 | ||
5939 | |||
5940 | # qhasm: xmm1 ^= xmm8 | ||
5941 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
5942 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
5943 | pxor %xmm9,%xmm1 | ||
5944 | |||
5945 | # qhasm: xmm2 ^= xmm8 | ||
5946 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
5947 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
5948 | pxor %xmm9,%xmm2 | ||
5949 | |||
5950 | # qhasm: xmm7 ^= xmm0 | ||
5951 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
5952 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
5953 | pxor %xmm0,%xmm7 | ||
5954 | |||
5955 | # qhasm: xmm1 ^= xmm6 | ||
5956 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
5957 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
5958 | pxor %xmm6,%xmm1 | ||
5959 | |||
5960 | # qhasm: xmm4 ^= xmm7 | ||
5961 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
5962 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
5963 | pxor %xmm7,%xmm4 | ||
5964 | |||
5965 | # qhasm: xmm6 ^= xmm0 | ||
5966 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
5967 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
5968 | pxor %xmm0,%xmm6 | ||
5969 | |||
5970 | # qhasm: xmm0 ^= xmm1 | ||
5971 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
5972 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
5973 | pxor %xmm1,%xmm0 | ||
5974 | |||
5975 | # qhasm: xmm1 ^= xmm5 | ||
5976 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
5977 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
5978 | pxor %xmm5,%xmm1 | ||
5979 | |||
5980 | # qhasm: xmm5 ^= xmm2 | ||
5981 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
5982 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
5983 | pxor %xmm2,%xmm5 | ||
5984 | |||
5985 | # qhasm: xmm4 ^= xmm5 | ||
5986 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
5987 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
5988 | pxor %xmm5,%xmm4 | ||
5989 | |||
5990 | # qhasm: xmm2 ^= xmm3 | ||
5991 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
5992 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
5993 | pxor %xmm3,%xmm2 | ||
5994 | |||
5995 | # qhasm: xmm3 ^= xmm5 | ||
5996 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
5997 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
5998 | pxor %xmm5,%xmm3 | ||
5999 | |||
6000 | # qhasm: xmm6 ^= xmm3 | ||
6001 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
6002 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
6003 | pxor %xmm3,%xmm6 | ||
6004 | |||
6005 | # qhasm: xmm8 = shuffle dwords of xmm0 by 0x93 | ||
6006 | # asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9 | ||
6007 | # asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8 | ||
6008 | pshufd $0x93,%xmm0,%xmm8 | ||
6009 | |||
6010 | # qhasm: xmm9 = shuffle dwords of xmm1 by 0x93 | ||
6011 | # asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10 | ||
6012 | # asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9 | ||
6013 | pshufd $0x93,%xmm1,%xmm9 | ||
6014 | |||
6015 | # qhasm: xmm10 = shuffle dwords of xmm4 by 0x93 | ||
6016 | # asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11 | ||
6017 | # asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10 | ||
6018 | pshufd $0x93,%xmm4,%xmm10 | ||
6019 | |||
6020 | # qhasm: xmm11 = shuffle dwords of xmm6 by 0x93 | ||
6021 | # asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12 | ||
6022 | # asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11 | ||
6023 | pshufd $0x93,%xmm6,%xmm11 | ||
6024 | |||
6025 | # qhasm: xmm12 = shuffle dwords of xmm3 by 0x93 | ||
6026 | # asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13 | ||
6027 | # asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12 | ||
6028 | pshufd $0x93,%xmm3,%xmm12 | ||
6029 | |||
6030 | # qhasm: xmm13 = shuffle dwords of xmm7 by 0x93 | ||
6031 | # asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14 | ||
6032 | # asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13 | ||
6033 | pshufd $0x93,%xmm7,%xmm13 | ||
6034 | |||
6035 | # qhasm: xmm14 = shuffle dwords of xmm2 by 0x93 | ||
6036 | # asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15 | ||
6037 | # asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14 | ||
6038 | pshufd $0x93,%xmm2,%xmm14 | ||
6039 | |||
6040 | # qhasm: xmm15 = shuffle dwords of xmm5 by 0x93 | ||
6041 | # asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16 | ||
6042 | # asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15 | ||
6043 | pshufd $0x93,%xmm5,%xmm15 | ||
6044 | |||
6045 | # qhasm: xmm0 ^= xmm8 | ||
6046 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
6047 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
6048 | pxor %xmm8,%xmm0 | ||
6049 | |||
6050 | # qhasm: xmm1 ^= xmm9 | ||
6051 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
6052 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
6053 | pxor %xmm9,%xmm1 | ||
6054 | |||
6055 | # qhasm: xmm4 ^= xmm10 | ||
6056 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
6057 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
6058 | pxor %xmm10,%xmm4 | ||
6059 | |||
6060 | # qhasm: xmm6 ^= xmm11 | ||
6061 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
6062 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
6063 | pxor %xmm11,%xmm6 | ||
6064 | |||
6065 | # qhasm: xmm3 ^= xmm12 | ||
6066 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
6067 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
6068 | pxor %xmm12,%xmm3 | ||
6069 | |||
6070 | # qhasm: xmm7 ^= xmm13 | ||
6071 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
6072 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
6073 | pxor %xmm13,%xmm7 | ||
6074 | |||
6075 | # qhasm: xmm2 ^= xmm14 | ||
6076 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
6077 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
6078 | pxor %xmm14,%xmm2 | ||
6079 | |||
6080 | # qhasm: xmm5 ^= xmm15 | ||
6081 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
6082 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
6083 | pxor %xmm15,%xmm5 | ||
6084 | |||
6085 | # qhasm: xmm8 ^= xmm5 | ||
6086 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
6087 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
6088 | pxor %xmm5,%xmm8 | ||
6089 | |||
6090 | # qhasm: xmm9 ^= xmm0 | ||
6091 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
6092 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
6093 | pxor %xmm0,%xmm9 | ||
6094 | |||
6095 | # qhasm: xmm10 ^= xmm1 | ||
6096 | # asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11 | ||
6097 | # asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10 | ||
6098 | pxor %xmm1,%xmm10 | ||
6099 | |||
6100 | # qhasm: xmm9 ^= xmm5 | ||
6101 | # asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10 | ||
6102 | # asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9 | ||
6103 | pxor %xmm5,%xmm9 | ||
6104 | |||
6105 | # qhasm: xmm11 ^= xmm4 | ||
6106 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
6107 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
6108 | pxor %xmm4,%xmm11 | ||
6109 | |||
6110 | # qhasm: xmm12 ^= xmm6 | ||
6111 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13 | ||
6112 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12 | ||
6113 | pxor %xmm6,%xmm12 | ||
6114 | |||
6115 | # qhasm: xmm13 ^= xmm3 | ||
6116 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14 | ||
6117 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13 | ||
6118 | pxor %xmm3,%xmm13 | ||
6119 | |||
6120 | # qhasm: xmm11 ^= xmm5 | ||
6121 | # asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12 | ||
6122 | # asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11 | ||
6123 | pxor %xmm5,%xmm11 | ||
6124 | |||
6125 | # qhasm: xmm14 ^= xmm7 | ||
6126 | # asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15 | ||
6127 | # asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14 | ||
6128 | pxor %xmm7,%xmm14 | ||
6129 | |||
6130 | # qhasm: xmm15 ^= xmm2 | ||
6131 | # asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16 | ||
6132 | # asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15 | ||
6133 | pxor %xmm2,%xmm15 | ||
6134 | |||
6135 | # qhasm: xmm12 ^= xmm5 | ||
6136 | # asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13 | ||
6137 | # asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12 | ||
6138 | pxor %xmm5,%xmm12 | ||
6139 | |||
6140 | # qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E | ||
6141 | # asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1 | ||
6142 | # asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0 | ||
6143 | pshufd $0x4E,%xmm0,%xmm0 | ||
6144 | |||
6145 | # qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E | ||
6146 | # asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2 | ||
6147 | # asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1 | ||
6148 | pshufd $0x4E,%xmm1,%xmm1 | ||
6149 | |||
6150 | # qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E | ||
6151 | # asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5 | ||
6152 | # asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4 | ||
6153 | pshufd $0x4E,%xmm4,%xmm4 | ||
6154 | |||
6155 | # qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E | ||
6156 | # asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7 | ||
6157 | # asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6 | ||
6158 | pshufd $0x4E,%xmm6,%xmm6 | ||
6159 | |||
6160 | # qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E | ||
6161 | # asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4 | ||
6162 | # asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3 | ||
6163 | pshufd $0x4E,%xmm3,%xmm3 | ||
6164 | |||
6165 | # qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E | ||
6166 | # asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8 | ||
6167 | # asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7 | ||
6168 | pshufd $0x4E,%xmm7,%xmm7 | ||
6169 | |||
6170 | # qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E | ||
6171 | # asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3 | ||
6172 | # asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2 | ||
6173 | pshufd $0x4E,%xmm2,%xmm2 | ||
6174 | |||
6175 | # qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E | ||
6176 | # asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6 | ||
6177 | # asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5 | ||
6178 | pshufd $0x4E,%xmm5,%xmm5 | ||
6179 | |||
6180 | # qhasm: xmm8 ^= xmm0 | ||
6181 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
6182 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
6183 | pxor %xmm0,%xmm8 | ||
6184 | |||
6185 | # qhasm: xmm9 ^= xmm1 | ||
6186 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
6187 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
6188 | pxor %xmm1,%xmm9 | ||
6189 | |||
6190 | # qhasm: xmm10 ^= xmm4 | ||
6191 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11 | ||
6192 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10 | ||
6193 | pxor %xmm4,%xmm10 | ||
6194 | |||
6195 | # qhasm: xmm11 ^= xmm6 | ||
6196 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12 | ||
6197 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11 | ||
6198 | pxor %xmm6,%xmm11 | ||
6199 | |||
6200 | # qhasm: xmm12 ^= xmm3 | ||
6201 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13 | ||
6202 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12 | ||
6203 | pxor %xmm3,%xmm12 | ||
6204 | |||
6205 | # qhasm: xmm13 ^= xmm7 | ||
6206 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
6207 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
6208 | pxor %xmm7,%xmm13 | ||
6209 | |||
6210 | # qhasm: xmm14 ^= xmm2 | ||
6211 | # asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15 | ||
6212 | # asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14 | ||
6213 | pxor %xmm2,%xmm14 | ||
6214 | |||
6215 | # qhasm: xmm15 ^= xmm5 | ||
6216 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
6217 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
6218 | pxor %xmm5,%xmm15 | ||
6219 | |||
6220 | # qhasm: xmm8 ^= *(int128 *)(c + 640) | ||
6221 | # asm 1: pxor 640(<c=int64#4),<xmm8=int6464#9 | ||
6222 | # asm 2: pxor 640(<c=%rcx),<xmm8=%xmm8 | ||
6223 | pxor 640(%rcx),%xmm8 | ||
6224 | |||
6225 | # qhasm: shuffle bytes of xmm8 by SR | ||
6226 | # asm 1: pshufb SR,<xmm8=int6464#9 | ||
6227 | # asm 2: pshufb SR,<xmm8=%xmm8 | ||
6228 | pshufb SR,%xmm8 | ||
6229 | |||
6230 | # qhasm: xmm9 ^= *(int128 *)(c + 656) | ||
6231 | # asm 1: pxor 656(<c=int64#4),<xmm9=int6464#10 | ||
6232 | # asm 2: pxor 656(<c=%rcx),<xmm9=%xmm9 | ||
6233 | pxor 656(%rcx),%xmm9 | ||
6234 | |||
6235 | # qhasm: shuffle bytes of xmm9 by SR | ||
6236 | # asm 1: pshufb SR,<xmm9=int6464#10 | ||
6237 | # asm 2: pshufb SR,<xmm9=%xmm9 | ||
6238 | pshufb SR,%xmm9 | ||
6239 | |||
6240 | # qhasm: xmm10 ^= *(int128 *)(c + 672) | ||
6241 | # asm 1: pxor 672(<c=int64#4),<xmm10=int6464#11 | ||
6242 | # asm 2: pxor 672(<c=%rcx),<xmm10=%xmm10 | ||
6243 | pxor 672(%rcx),%xmm10 | ||
6244 | |||
6245 | # qhasm: shuffle bytes of xmm10 by SR | ||
6246 | # asm 1: pshufb SR,<xmm10=int6464#11 | ||
6247 | # asm 2: pshufb SR,<xmm10=%xmm10 | ||
6248 | pshufb SR,%xmm10 | ||
6249 | |||
6250 | # qhasm: xmm11 ^= *(int128 *)(c + 688) | ||
6251 | # asm 1: pxor 688(<c=int64#4),<xmm11=int6464#12 | ||
6252 | # asm 2: pxor 688(<c=%rcx),<xmm11=%xmm11 | ||
6253 | pxor 688(%rcx),%xmm11 | ||
6254 | |||
6255 | # qhasm: shuffle bytes of xmm11 by SR | ||
6256 | # asm 1: pshufb SR,<xmm11=int6464#12 | ||
6257 | # asm 2: pshufb SR,<xmm11=%xmm11 | ||
6258 | pshufb SR,%xmm11 | ||
6259 | |||
6260 | # qhasm: xmm12 ^= *(int128 *)(c + 704) | ||
6261 | # asm 1: pxor 704(<c=int64#4),<xmm12=int6464#13 | ||
6262 | # asm 2: pxor 704(<c=%rcx),<xmm12=%xmm12 | ||
6263 | pxor 704(%rcx),%xmm12 | ||
6264 | |||
6265 | # qhasm: shuffle bytes of xmm12 by SR | ||
6266 | # asm 1: pshufb SR,<xmm12=int6464#13 | ||
6267 | # asm 2: pshufb SR,<xmm12=%xmm12 | ||
6268 | pshufb SR,%xmm12 | ||
6269 | |||
6270 | # qhasm: xmm13 ^= *(int128 *)(c + 720) | ||
6271 | # asm 1: pxor 720(<c=int64#4),<xmm13=int6464#14 | ||
6272 | # asm 2: pxor 720(<c=%rcx),<xmm13=%xmm13 | ||
6273 | pxor 720(%rcx),%xmm13 | ||
6274 | |||
6275 | # qhasm: shuffle bytes of xmm13 by SR | ||
6276 | # asm 1: pshufb SR,<xmm13=int6464#14 | ||
6277 | # asm 2: pshufb SR,<xmm13=%xmm13 | ||
6278 | pshufb SR,%xmm13 | ||
6279 | |||
6280 | # qhasm: xmm14 ^= *(int128 *)(c + 736) | ||
6281 | # asm 1: pxor 736(<c=int64#4),<xmm14=int6464#15 | ||
6282 | # asm 2: pxor 736(<c=%rcx),<xmm14=%xmm14 | ||
6283 | pxor 736(%rcx),%xmm14 | ||
6284 | |||
6285 | # qhasm: shuffle bytes of xmm14 by SR | ||
6286 | # asm 1: pshufb SR,<xmm14=int6464#15 | ||
6287 | # asm 2: pshufb SR,<xmm14=%xmm14 | ||
6288 | pshufb SR,%xmm14 | ||
6289 | |||
6290 | # qhasm: xmm15 ^= *(int128 *)(c + 752) | ||
6291 | # asm 1: pxor 752(<c=int64#4),<xmm15=int6464#16 | ||
6292 | # asm 2: pxor 752(<c=%rcx),<xmm15=%xmm15 | ||
6293 | pxor 752(%rcx),%xmm15 | ||
6294 | |||
6295 | # qhasm: shuffle bytes of xmm15 by SR | ||
6296 | # asm 1: pshufb SR,<xmm15=int6464#16 | ||
6297 | # asm 2: pshufb SR,<xmm15=%xmm15 | ||
6298 | pshufb SR,%xmm15 | ||
6299 | |||
6300 | # qhasm: xmm13 ^= xmm14 | ||
6301 | # asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14 | ||
6302 | # asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13 | ||
6303 | pxor %xmm14,%xmm13 | ||
6304 | |||
6305 | # qhasm: xmm10 ^= xmm9 | ||
6306 | # asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11 | ||
6307 | # asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10 | ||
6308 | pxor %xmm9,%xmm10 | ||
6309 | |||
6310 | # qhasm: xmm13 ^= xmm8 | ||
6311 | # asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14 | ||
6312 | # asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13 | ||
6313 | pxor %xmm8,%xmm13 | ||
6314 | |||
6315 | # qhasm: xmm14 ^= xmm10 | ||
6316 | # asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15 | ||
6317 | # asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14 | ||
6318 | pxor %xmm10,%xmm14 | ||
6319 | |||
6320 | # qhasm: xmm11 ^= xmm8 | ||
6321 | # asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12 | ||
6322 | # asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11 | ||
6323 | pxor %xmm8,%xmm11 | ||
6324 | |||
6325 | # qhasm: xmm14 ^= xmm11 | ||
6326 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
6327 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
6328 | pxor %xmm11,%xmm14 | ||
6329 | |||
6330 | # qhasm: xmm11 ^= xmm15 | ||
6331 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12 | ||
6332 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11 | ||
6333 | pxor %xmm15,%xmm11 | ||
6334 | |||
6335 | # qhasm: xmm11 ^= xmm12 | ||
6336 | # asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12 | ||
6337 | # asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11 | ||
6338 | pxor %xmm12,%xmm11 | ||
6339 | |||
6340 | # qhasm: xmm15 ^= xmm13 | ||
6341 | # asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16 | ||
6342 | # asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15 | ||
6343 | pxor %xmm13,%xmm15 | ||
6344 | |||
6345 | # qhasm: xmm11 ^= xmm9 | ||
6346 | # asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12 | ||
6347 | # asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11 | ||
6348 | pxor %xmm9,%xmm11 | ||
6349 | |||
6350 | # qhasm: xmm12 ^= xmm13 | ||
6351 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
6352 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
6353 | pxor %xmm13,%xmm12 | ||
6354 | |||
6355 | # qhasm: xmm10 ^= xmm15 | ||
6356 | # asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11 | ||
6357 | # asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10 | ||
6358 | pxor %xmm15,%xmm10 | ||
6359 | |||
6360 | # qhasm: xmm9 ^= xmm13 | ||
6361 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
6362 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
6363 | pxor %xmm13,%xmm9 | ||
6364 | |||
6365 | # qhasm: xmm3 = xmm15 | ||
6366 | # asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1 | ||
6367 | # asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0 | ||
6368 | movdqa %xmm15,%xmm0 | ||
6369 | |||
6370 | # qhasm: xmm2 = xmm9 | ||
6371 | # asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2 | ||
6372 | # asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1 | ||
6373 | movdqa %xmm9,%xmm1 | ||
6374 | |||
6375 | # qhasm: xmm1 = xmm13 | ||
6376 | # asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3 | ||
6377 | # asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2 | ||
6378 | movdqa %xmm13,%xmm2 | ||
6379 | |||
6380 | # qhasm: xmm5 = xmm10 | ||
6381 | # asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4 | ||
6382 | # asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3 | ||
6383 | movdqa %xmm10,%xmm3 | ||
6384 | |||
6385 | # qhasm: xmm4 = xmm14 | ||
6386 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5 | ||
6387 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4 | ||
6388 | movdqa %xmm14,%xmm4 | ||
6389 | |||
6390 | # qhasm: xmm3 ^= xmm12 | ||
6391 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1 | ||
6392 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0 | ||
6393 | pxor %xmm12,%xmm0 | ||
6394 | |||
6395 | # qhasm: xmm2 ^= xmm10 | ||
6396 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2 | ||
6397 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1 | ||
6398 | pxor %xmm10,%xmm1 | ||
6399 | |||
6400 | # qhasm: xmm1 ^= xmm11 | ||
6401 | # asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3 | ||
6402 | # asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2 | ||
6403 | pxor %xmm11,%xmm2 | ||
6404 | |||
6405 | # qhasm: xmm5 ^= xmm12 | ||
6406 | # asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4 | ||
6407 | # asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3 | ||
6408 | pxor %xmm12,%xmm3 | ||
6409 | |||
6410 | # qhasm: xmm4 ^= xmm8 | ||
6411 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
6412 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
6413 | pxor %xmm8,%xmm4 | ||
6414 | |||
6415 | # qhasm: xmm6 = xmm3 | ||
6416 | # asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6 | ||
6417 | # asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5 | ||
6418 | movdqa %xmm0,%xmm5 | ||
6419 | |||
6420 | # qhasm: xmm0 = xmm2 | ||
6421 | # asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7 | ||
6422 | # asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6 | ||
6423 | movdqa %xmm1,%xmm6 | ||
6424 | |||
6425 | # qhasm: xmm7 = xmm3 | ||
6426 | # asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8 | ||
6427 | # asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7 | ||
6428 | movdqa %xmm0,%xmm7 | ||
6429 | |||
6430 | # qhasm: xmm2 |= xmm1 | ||
6431 | # asm 1: por <xmm1=int6464#3,<xmm2=int6464#2 | ||
6432 | # asm 2: por <xmm1=%xmm2,<xmm2=%xmm1 | ||
6433 | por %xmm2,%xmm1 | ||
6434 | |||
6435 | # qhasm: xmm3 |= xmm4 | ||
6436 | # asm 1: por <xmm4=int6464#5,<xmm3=int6464#1 | ||
6437 | # asm 2: por <xmm4=%xmm4,<xmm3=%xmm0 | ||
6438 | por %xmm4,%xmm0 | ||
6439 | |||
6440 | # qhasm: xmm7 ^= xmm0 | ||
6441 | # asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8 | ||
6442 | # asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7 | ||
6443 | pxor %xmm6,%xmm7 | ||
6444 | |||
6445 | # qhasm: xmm6 &= xmm4 | ||
6446 | # asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6 | ||
6447 | # asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5 | ||
6448 | pand %xmm4,%xmm5 | ||
6449 | |||
6450 | # qhasm: xmm0 &= xmm1 | ||
6451 | # asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7 | ||
6452 | # asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6 | ||
6453 | pand %xmm2,%xmm6 | ||
6454 | |||
6455 | # qhasm: xmm4 ^= xmm1 | ||
6456 | # asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5 | ||
6457 | # asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4 | ||
6458 | pxor %xmm2,%xmm4 | ||
6459 | |||
6460 | # qhasm: xmm7 &= xmm4 | ||
6461 | # asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8 | ||
6462 | # asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7 | ||
6463 | pand %xmm4,%xmm7 | ||
6464 | |||
6465 | # qhasm: xmm4 = xmm11 | ||
6466 | # asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3 | ||
6467 | # asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2 | ||
6468 | movdqa %xmm11,%xmm2 | ||
6469 | |||
6470 | # qhasm: xmm4 ^= xmm8 | ||
6471 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3 | ||
6472 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2 | ||
6473 | pxor %xmm8,%xmm2 | ||
6474 | |||
6475 | # qhasm: xmm5 &= xmm4 | ||
6476 | # asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4 | ||
6477 | # asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3 | ||
6478 | pand %xmm2,%xmm3 | ||
6479 | |||
6480 | # qhasm: xmm3 ^= xmm5 | ||
6481 | # asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1 | ||
6482 | # asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0 | ||
6483 | pxor %xmm3,%xmm0 | ||
6484 | |||
6485 | # qhasm: xmm2 ^= xmm5 | ||
6486 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
6487 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
6488 | pxor %xmm3,%xmm1 | ||
6489 | |||
6490 | # qhasm: xmm5 = xmm15 | ||
6491 | # asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3 | ||
6492 | # asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2 | ||
6493 | movdqa %xmm15,%xmm2 | ||
6494 | |||
6495 | # qhasm: xmm5 ^= xmm9 | ||
6496 | # asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3 | ||
6497 | # asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2 | ||
6498 | pxor %xmm9,%xmm2 | ||
6499 | |||
6500 | # qhasm: xmm4 = xmm13 | ||
6501 | # asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4 | ||
6502 | # asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3 | ||
6503 | movdqa %xmm13,%xmm3 | ||
6504 | |||
6505 | # qhasm: xmm1 = xmm5 | ||
6506 | # asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5 | ||
6507 | # asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4 | ||
6508 | movdqa %xmm2,%xmm4 | ||
6509 | |||
6510 | # qhasm: xmm4 ^= xmm14 | ||
6511 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4 | ||
6512 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3 | ||
6513 | pxor %xmm14,%xmm3 | ||
6514 | |||
6515 | # qhasm: xmm1 |= xmm4 | ||
6516 | # asm 1: por <xmm4=int6464#4,<xmm1=int6464#5 | ||
6517 | # asm 2: por <xmm4=%xmm3,<xmm1=%xmm4 | ||
6518 | por %xmm3,%xmm4 | ||
6519 | |||
6520 | # qhasm: xmm5 &= xmm4 | ||
6521 | # asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3 | ||
6522 | # asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2 | ||
6523 | pand %xmm3,%xmm2 | ||
6524 | |||
6525 | # qhasm: xmm0 ^= xmm5 | ||
6526 | # asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7 | ||
6527 | # asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6 | ||
6528 | pxor %xmm2,%xmm6 | ||
6529 | |||
6530 | # qhasm: xmm3 ^= xmm7 | ||
6531 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1 | ||
6532 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0 | ||
6533 | pxor %xmm7,%xmm0 | ||
6534 | |||
6535 | # qhasm: xmm2 ^= xmm6 | ||
6536 | # asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2 | ||
6537 | # asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1 | ||
6538 | pxor %xmm5,%xmm1 | ||
6539 | |||
6540 | # qhasm: xmm1 ^= xmm7 | ||
6541 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5 | ||
6542 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4 | ||
6543 | pxor %xmm7,%xmm4 | ||
6544 | |||
6545 | # qhasm: xmm0 ^= xmm6 | ||
6546 | # asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7 | ||
6547 | # asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6 | ||
6548 | pxor %xmm5,%xmm6 | ||
6549 | |||
6550 | # qhasm: xmm1 ^= xmm6 | ||
6551 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
6552 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
6553 | pxor %xmm5,%xmm4 | ||
6554 | |||
6555 | # qhasm: xmm4 = xmm10 | ||
6556 | # asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3 | ||
6557 | # asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2 | ||
6558 | movdqa %xmm10,%xmm2 | ||
6559 | |||
6560 | # qhasm: xmm5 = xmm12 | ||
6561 | # asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4 | ||
6562 | # asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3 | ||
6563 | movdqa %xmm12,%xmm3 | ||
6564 | |||
6565 | # qhasm: xmm6 = xmm9 | ||
6566 | # asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6 | ||
6567 | # asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5 | ||
6568 | movdqa %xmm9,%xmm5 | ||
6569 | |||
6570 | # qhasm: xmm7 = xmm15 | ||
6571 | # asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8 | ||
6572 | # asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7 | ||
6573 | movdqa %xmm15,%xmm7 | ||
6574 | |||
6575 | # qhasm: xmm4 &= xmm11 | ||
6576 | # asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3 | ||
6577 | # asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2 | ||
6578 | pand %xmm11,%xmm2 | ||
6579 | |||
6580 | # qhasm: xmm5 &= xmm8 | ||
6581 | # asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4 | ||
6582 | # asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3 | ||
6583 | pand %xmm8,%xmm3 | ||
6584 | |||
6585 | # qhasm: xmm6 &= xmm13 | ||
6586 | # asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6 | ||
6587 | # asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5 | ||
6588 | pand %xmm13,%xmm5 | ||
6589 | |||
6590 | # qhasm: xmm7 |= xmm14 | ||
6591 | # asm 1: por <xmm14=int6464#15,<xmm7=int6464#8 | ||
6592 | # asm 2: por <xmm14=%xmm14,<xmm7=%xmm7 | ||
6593 | por %xmm14,%xmm7 | ||
6594 | |||
6595 | # qhasm: xmm3 ^= xmm4 | ||
6596 | # asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1 | ||
6597 | # asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0 | ||
6598 | pxor %xmm2,%xmm0 | ||
6599 | |||
6600 | # qhasm: xmm2 ^= xmm5 | ||
6601 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
6602 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
6603 | pxor %xmm3,%xmm1 | ||
6604 | |||
6605 | # qhasm: xmm1 ^= xmm6 | ||
6606 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
6607 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
6608 | pxor %xmm5,%xmm4 | ||
6609 | |||
6610 | # qhasm: xmm0 ^= xmm7 | ||
6611 | # asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7 | ||
6612 | # asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6 | ||
6613 | pxor %xmm7,%xmm6 | ||
6614 | |||
6615 | # qhasm: xmm4 = xmm3 | ||
6616 | # asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3 | ||
6617 | # asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2 | ||
6618 | movdqa %xmm0,%xmm2 | ||
6619 | |||
6620 | # qhasm: xmm4 ^= xmm2 | ||
6621 | # asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3 | ||
6622 | # asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2 | ||
6623 | pxor %xmm1,%xmm2 | ||
6624 | |||
6625 | # qhasm: xmm3 &= xmm1 | ||
6626 | # asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1 | ||
6627 | # asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0 | ||
6628 | pand %xmm4,%xmm0 | ||
6629 | |||
6630 | # qhasm: xmm6 = xmm0 | ||
6631 | # asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4 | ||
6632 | # asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3 | ||
6633 | movdqa %xmm6,%xmm3 | ||
6634 | |||
6635 | # qhasm: xmm6 ^= xmm3 | ||
6636 | # asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4 | ||
6637 | # asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3 | ||
6638 | pxor %xmm0,%xmm3 | ||
6639 | |||
6640 | # qhasm: xmm7 = xmm4 | ||
6641 | # asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6 | ||
6642 | # asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5 | ||
6643 | movdqa %xmm2,%xmm5 | ||
6644 | |||
6645 | # qhasm: xmm7 &= xmm6 | ||
6646 | # asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6 | ||
6647 | # asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5 | ||
6648 | pand %xmm3,%xmm5 | ||
6649 | |||
6650 | # qhasm: xmm7 ^= xmm2 | ||
6651 | # asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6 | ||
6652 | # asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5 | ||
6653 | pxor %xmm1,%xmm5 | ||
6654 | |||
6655 | # qhasm: xmm5 = xmm1 | ||
6656 | # asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8 | ||
6657 | # asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7 | ||
6658 | movdqa %xmm4,%xmm7 | ||
6659 | |||
6660 | # qhasm: xmm5 ^= xmm0 | ||
6661 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
6662 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
6663 | pxor %xmm6,%xmm7 | ||
6664 | |||
6665 | # qhasm: xmm3 ^= xmm2 | ||
6666 | # asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1 | ||
6667 | # asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0 | ||
6668 | pxor %xmm1,%xmm0 | ||
6669 | |||
6670 | # qhasm: xmm5 &= xmm3 | ||
6671 | # asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8 | ||
6672 | # asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7 | ||
6673 | pand %xmm0,%xmm7 | ||
6674 | |||
6675 | # qhasm: xmm5 ^= xmm0 | ||
6676 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
6677 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
6678 | pxor %xmm6,%xmm7 | ||
6679 | |||
6680 | # qhasm: xmm1 ^= xmm5 | ||
6681 | # asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5 | ||
6682 | # asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4 | ||
6683 | pxor %xmm7,%xmm4 | ||
6684 | |||
6685 | # qhasm: xmm2 = xmm6 | ||
6686 | # asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1 | ||
6687 | # asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0 | ||
6688 | movdqa %xmm3,%xmm0 | ||
6689 | |||
6690 | # qhasm: xmm2 ^= xmm5 | ||
6691 | # asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1 | ||
6692 | # asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0 | ||
6693 | pxor %xmm7,%xmm0 | ||
6694 | |||
6695 | # qhasm: xmm2 &= xmm0 | ||
6696 | # asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1 | ||
6697 | # asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0 | ||
6698 | pand %xmm6,%xmm0 | ||
6699 | |||
6700 | # qhasm: xmm1 ^= xmm2 | ||
6701 | # asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5 | ||
6702 | # asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4 | ||
6703 | pxor %xmm0,%xmm4 | ||
6704 | |||
6705 | # qhasm: xmm6 ^= xmm2 | ||
6706 | # asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4 | ||
6707 | # asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3 | ||
6708 | pxor %xmm0,%xmm3 | ||
6709 | |||
6710 | # qhasm: xmm6 &= xmm7 | ||
6711 | # asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4 | ||
6712 | # asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3 | ||
6713 | pand %xmm5,%xmm3 | ||
6714 | |||
6715 | # qhasm: xmm6 ^= xmm4 | ||
6716 | # asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4 | ||
6717 | # asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3 | ||
6718 | pxor %xmm2,%xmm3 | ||
6719 | |||
6720 | # qhasm: xmm4 = xmm14 | ||
6721 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1 | ||
6722 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0 | ||
6723 | movdqa %xmm14,%xmm0 | ||
6724 | |||
6725 | # qhasm: xmm0 = xmm13 | ||
6726 | # asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2 | ||
6727 | # asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1 | ||
6728 | movdqa %xmm13,%xmm1 | ||
6729 | |||
6730 | # qhasm: xmm2 = xmm7 | ||
6731 | # asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3 | ||
6732 | # asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2 | ||
6733 | movdqa %xmm5,%xmm2 | ||
6734 | |||
6735 | # qhasm: xmm2 ^= xmm6 | ||
6736 | # asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3 | ||
6737 | # asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2 | ||
6738 | pxor %xmm3,%xmm2 | ||
6739 | |||
6740 | # qhasm: xmm2 &= xmm14 | ||
6741 | # asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3 | ||
6742 | # asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2 | ||
6743 | pand %xmm14,%xmm2 | ||
6744 | |||
6745 | # qhasm: xmm14 ^= xmm13 | ||
6746 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
6747 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
6748 | pxor %xmm13,%xmm14 | ||
6749 | |||
6750 | # qhasm: xmm14 &= xmm6 | ||
6751 | # asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15 | ||
6752 | # asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14 | ||
6753 | pand %xmm3,%xmm14 | ||
6754 | |||
6755 | # qhasm: xmm13 &= xmm7 | ||
6756 | # asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14 | ||
6757 | # asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13 | ||
6758 | pand %xmm5,%xmm13 | ||
6759 | |||
6760 | # qhasm: xmm14 ^= xmm13 | ||
6761 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
6762 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
6763 | pxor %xmm13,%xmm14 | ||
6764 | |||
6765 | # qhasm: xmm13 ^= xmm2 | ||
6766 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14 | ||
6767 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13 | ||
6768 | pxor %xmm2,%xmm13 | ||
6769 | |||
6770 | # qhasm: xmm4 ^= xmm8 | ||
6771 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1 | ||
6772 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0 | ||
6773 | pxor %xmm8,%xmm0 | ||
6774 | |||
6775 | # qhasm: xmm0 ^= xmm11 | ||
6776 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2 | ||
6777 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1 | ||
6778 | pxor %xmm11,%xmm1 | ||
6779 | |||
6780 | # qhasm: xmm7 ^= xmm5 | ||
6781 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
6782 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
6783 | pxor %xmm7,%xmm5 | ||
6784 | |||
6785 | # qhasm: xmm6 ^= xmm1 | ||
6786 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
6787 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
6788 | pxor %xmm4,%xmm3 | ||
6789 | |||
6790 | # qhasm: xmm3 = xmm7 | ||
6791 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
6792 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
6793 | movdqa %xmm5,%xmm2 | ||
6794 | |||
6795 | # qhasm: xmm3 ^= xmm6 | ||
6796 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
6797 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
6798 | pxor %xmm3,%xmm2 | ||
6799 | |||
6800 | # qhasm: xmm3 &= xmm4 | ||
6801 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
6802 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
6803 | pand %xmm0,%xmm2 | ||
6804 | |||
6805 | # qhasm: xmm4 ^= xmm0 | ||
6806 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
6807 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
6808 | pxor %xmm1,%xmm0 | ||
6809 | |||
6810 | # qhasm: xmm4 &= xmm6 | ||
6811 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
6812 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
6813 | pand %xmm3,%xmm0 | ||
6814 | |||
6815 | # qhasm: xmm0 &= xmm7 | ||
6816 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
6817 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
6818 | pand %xmm5,%xmm1 | ||
6819 | |||
6820 | # qhasm: xmm0 ^= xmm4 | ||
6821 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
6822 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
6823 | pxor %xmm0,%xmm1 | ||
6824 | |||
6825 | # qhasm: xmm4 ^= xmm3 | ||
6826 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
6827 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
6828 | pxor %xmm2,%xmm0 | ||
6829 | |||
6830 | # qhasm: xmm2 = xmm5 | ||
6831 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
6832 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
6833 | movdqa %xmm7,%xmm2 | ||
6834 | |||
6835 | # qhasm: xmm2 ^= xmm1 | ||
6836 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
6837 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
6838 | pxor %xmm4,%xmm2 | ||
6839 | |||
6840 | # qhasm: xmm2 &= xmm8 | ||
6841 | # asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3 | ||
6842 | # asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2 | ||
6843 | pand %xmm8,%xmm2 | ||
6844 | |||
6845 | # qhasm: xmm8 ^= xmm11 | ||
6846 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
6847 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
6848 | pxor %xmm11,%xmm8 | ||
6849 | |||
6850 | # qhasm: xmm8 &= xmm1 | ||
6851 | # asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9 | ||
6852 | # asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8 | ||
6853 | pand %xmm4,%xmm8 | ||
6854 | |||
6855 | # qhasm: xmm11 &= xmm5 | ||
6856 | # asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12 | ||
6857 | # asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11 | ||
6858 | pand %xmm7,%xmm11 | ||
6859 | |||
6860 | # qhasm: xmm8 ^= xmm11 | ||
6861 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
6862 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
6863 | pxor %xmm11,%xmm8 | ||
6864 | |||
6865 | # qhasm: xmm11 ^= xmm2 | ||
6866 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12 | ||
6867 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11 | ||
6868 | pxor %xmm2,%xmm11 | ||
6869 | |||
6870 | # qhasm: xmm14 ^= xmm4 | ||
6871 | # asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15 | ||
6872 | # asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14 | ||
6873 | pxor %xmm0,%xmm14 | ||
6874 | |||
6875 | # qhasm: xmm8 ^= xmm4 | ||
6876 | # asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9 | ||
6877 | # asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8 | ||
6878 | pxor %xmm0,%xmm8 | ||
6879 | |||
6880 | # qhasm: xmm13 ^= xmm0 | ||
6881 | # asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14 | ||
6882 | # asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13 | ||
6883 | pxor %xmm1,%xmm13 | ||
6884 | |||
6885 | # qhasm: xmm11 ^= xmm0 | ||
6886 | # asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12 | ||
6887 | # asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11 | ||
6888 | pxor %xmm1,%xmm11 | ||
6889 | |||
6890 | # qhasm: xmm4 = xmm15 | ||
6891 | # asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1 | ||
6892 | # asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0 | ||
6893 | movdqa %xmm15,%xmm0 | ||
6894 | |||
6895 | # qhasm: xmm0 = xmm9 | ||
6896 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2 | ||
6897 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1 | ||
6898 | movdqa %xmm9,%xmm1 | ||
6899 | |||
6900 | # qhasm: xmm4 ^= xmm12 | ||
6901 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1 | ||
6902 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0 | ||
6903 | pxor %xmm12,%xmm0 | ||
6904 | |||
6905 | # qhasm: xmm0 ^= xmm10 | ||
6906 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2 | ||
6907 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1 | ||
6908 | pxor %xmm10,%xmm1 | ||
6909 | |||
6910 | # qhasm: xmm3 = xmm7 | ||
6911 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
6912 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
6913 | movdqa %xmm5,%xmm2 | ||
6914 | |||
6915 | # qhasm: xmm3 ^= xmm6 | ||
6916 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
6917 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
6918 | pxor %xmm3,%xmm2 | ||
6919 | |||
6920 | # qhasm: xmm3 &= xmm4 | ||
6921 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
6922 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
6923 | pand %xmm0,%xmm2 | ||
6924 | |||
6925 | # qhasm: xmm4 ^= xmm0 | ||
6926 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
6927 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
6928 | pxor %xmm1,%xmm0 | ||
6929 | |||
6930 | # qhasm: xmm4 &= xmm6 | ||
6931 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
6932 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
6933 | pand %xmm3,%xmm0 | ||
6934 | |||
6935 | # qhasm: xmm0 &= xmm7 | ||
6936 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
6937 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
6938 | pand %xmm5,%xmm1 | ||
6939 | |||
6940 | # qhasm: xmm0 ^= xmm4 | ||
6941 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
6942 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
6943 | pxor %xmm0,%xmm1 | ||
6944 | |||
6945 | # qhasm: xmm4 ^= xmm3 | ||
6946 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
6947 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
6948 | pxor %xmm2,%xmm0 | ||
6949 | |||
6950 | # qhasm: xmm2 = xmm5 | ||
6951 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
6952 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
6953 | movdqa %xmm7,%xmm2 | ||
6954 | |||
6955 | # qhasm: xmm2 ^= xmm1 | ||
6956 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
6957 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
6958 | pxor %xmm4,%xmm2 | ||
6959 | |||
6960 | # qhasm: xmm2 &= xmm12 | ||
6961 | # asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3 | ||
6962 | # asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2 | ||
6963 | pand %xmm12,%xmm2 | ||
6964 | |||
6965 | # qhasm: xmm12 ^= xmm10 | ||
6966 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
6967 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
6968 | pxor %xmm10,%xmm12 | ||
6969 | |||
6970 | # qhasm: xmm12 &= xmm1 | ||
6971 | # asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13 | ||
6972 | # asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12 | ||
6973 | pand %xmm4,%xmm12 | ||
6974 | |||
6975 | # qhasm: xmm10 &= xmm5 | ||
6976 | # asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11 | ||
6977 | # asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10 | ||
6978 | pand %xmm7,%xmm10 | ||
6979 | |||
6980 | # qhasm: xmm12 ^= xmm10 | ||
6981 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
6982 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
6983 | pxor %xmm10,%xmm12 | ||
6984 | |||
6985 | # qhasm: xmm10 ^= xmm2 | ||
6986 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11 | ||
6987 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10 | ||
6988 | pxor %xmm2,%xmm10 | ||
6989 | |||
6990 | # qhasm: xmm7 ^= xmm5 | ||
6991 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
6992 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
6993 | pxor %xmm7,%xmm5 | ||
6994 | |||
6995 | # qhasm: xmm6 ^= xmm1 | ||
6996 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
6997 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
6998 | pxor %xmm4,%xmm3 | ||
6999 | |||
7000 | # qhasm: xmm3 = xmm7 | ||
7001 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
7002 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
7003 | movdqa %xmm5,%xmm2 | ||
7004 | |||
7005 | # qhasm: xmm3 ^= xmm6 | ||
7006 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
7007 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
7008 | pxor %xmm3,%xmm2 | ||
7009 | |||
7010 | # qhasm: xmm3 &= xmm15 | ||
7011 | # asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3 | ||
7012 | # asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2 | ||
7013 | pand %xmm15,%xmm2 | ||
7014 | |||
7015 | # qhasm: xmm15 ^= xmm9 | ||
7016 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
7017 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
7018 | pxor %xmm9,%xmm15 | ||
7019 | |||
7020 | # qhasm: xmm15 &= xmm6 | ||
7021 | # asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16 | ||
7022 | # asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15 | ||
7023 | pand %xmm3,%xmm15 | ||
7024 | |||
7025 | # qhasm: xmm9 &= xmm7 | ||
7026 | # asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10 | ||
7027 | # asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9 | ||
7028 | pand %xmm5,%xmm9 | ||
7029 | |||
7030 | # qhasm: xmm15 ^= xmm9 | ||
7031 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
7032 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
7033 | pxor %xmm9,%xmm15 | ||
7034 | |||
7035 | # qhasm: xmm9 ^= xmm3 | ||
7036 | # asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10 | ||
7037 | # asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9 | ||
7038 | pxor %xmm2,%xmm9 | ||
7039 | |||
7040 | # qhasm: xmm15 ^= xmm4 | ||
7041 | # asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16 | ||
7042 | # asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15 | ||
7043 | pxor %xmm0,%xmm15 | ||
7044 | |||
7045 | # qhasm: xmm12 ^= xmm4 | ||
7046 | # asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13 | ||
7047 | # asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12 | ||
7048 | pxor %xmm0,%xmm12 | ||
7049 | |||
7050 | # qhasm: xmm9 ^= xmm0 | ||
7051 | # asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10 | ||
7052 | # asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9 | ||
7053 | pxor %xmm1,%xmm9 | ||
7054 | |||
7055 | # qhasm: xmm10 ^= xmm0 | ||
7056 | # asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11 | ||
7057 | # asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10 | ||
7058 | pxor %xmm1,%xmm10 | ||
7059 | |||
7060 | # qhasm: xmm15 ^= xmm8 | ||
7061 | # asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16 | ||
7062 | # asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15 | ||
7063 | pxor %xmm8,%xmm15 | ||
7064 | |||
7065 | # qhasm: xmm9 ^= xmm14 | ||
7066 | # asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10 | ||
7067 | # asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9 | ||
7068 | pxor %xmm14,%xmm9 | ||
7069 | |||
7070 | # qhasm: xmm12 ^= xmm15 | ||
7071 | # asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13 | ||
7072 | # asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12 | ||
7073 | pxor %xmm15,%xmm12 | ||
7074 | |||
7075 | # qhasm: xmm14 ^= xmm8 | ||
7076 | # asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15 | ||
7077 | # asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14 | ||
7078 | pxor %xmm8,%xmm14 | ||
7079 | |||
7080 | # qhasm: xmm8 ^= xmm9 | ||
7081 | # asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9 | ||
7082 | # asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8 | ||
7083 | pxor %xmm9,%xmm8 | ||
7084 | |||
7085 | # qhasm: xmm9 ^= xmm13 | ||
7086 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
7087 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
7088 | pxor %xmm13,%xmm9 | ||
7089 | |||
7090 | # qhasm: xmm13 ^= xmm10 | ||
7091 | # asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14 | ||
7092 | # asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13 | ||
7093 | pxor %xmm10,%xmm13 | ||
7094 | |||
7095 | # qhasm: xmm12 ^= xmm13 | ||
7096 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
7097 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
7098 | pxor %xmm13,%xmm12 | ||
7099 | |||
7100 | # qhasm: xmm10 ^= xmm11 | ||
7101 | # asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11 | ||
7102 | # asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10 | ||
7103 | pxor %xmm11,%xmm10 | ||
7104 | |||
7105 | # qhasm: xmm11 ^= xmm13 | ||
7106 | # asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12 | ||
7107 | # asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11 | ||
7108 | pxor %xmm13,%xmm11 | ||
7109 | |||
7110 | # qhasm: xmm14 ^= xmm11 | ||
7111 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
7112 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
7113 | pxor %xmm11,%xmm14 | ||
7114 | |||
7115 | # qhasm: xmm0 = shuffle dwords of xmm8 by 0x93 | ||
7116 | # asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1 | ||
7117 | # asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0 | ||
7118 | pshufd $0x93,%xmm8,%xmm0 | ||
7119 | |||
7120 | # qhasm: xmm1 = shuffle dwords of xmm9 by 0x93 | ||
7121 | # asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2 | ||
7122 | # asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1 | ||
7123 | pshufd $0x93,%xmm9,%xmm1 | ||
7124 | |||
7125 | # qhasm: xmm2 = shuffle dwords of xmm12 by 0x93 | ||
7126 | # asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3 | ||
7127 | # asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2 | ||
7128 | pshufd $0x93,%xmm12,%xmm2 | ||
7129 | |||
7130 | # qhasm: xmm3 = shuffle dwords of xmm14 by 0x93 | ||
7131 | # asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4 | ||
7132 | # asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3 | ||
7133 | pshufd $0x93,%xmm14,%xmm3 | ||
7134 | |||
7135 | # qhasm: xmm4 = shuffle dwords of xmm11 by 0x93 | ||
7136 | # asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5 | ||
7137 | # asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4 | ||
7138 | pshufd $0x93,%xmm11,%xmm4 | ||
7139 | |||
7140 | # qhasm: xmm5 = shuffle dwords of xmm15 by 0x93 | ||
7141 | # asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6 | ||
7142 | # asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5 | ||
7143 | pshufd $0x93,%xmm15,%xmm5 | ||
7144 | |||
7145 | # qhasm: xmm6 = shuffle dwords of xmm10 by 0x93 | ||
7146 | # asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7 | ||
7147 | # asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6 | ||
7148 | pshufd $0x93,%xmm10,%xmm6 | ||
7149 | |||
7150 | # qhasm: xmm7 = shuffle dwords of xmm13 by 0x93 | ||
7151 | # asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8 | ||
7152 | # asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7 | ||
7153 | pshufd $0x93,%xmm13,%xmm7 | ||
7154 | |||
7155 | # qhasm: xmm8 ^= xmm0 | ||
7156 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
7157 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
7158 | pxor %xmm0,%xmm8 | ||
7159 | |||
7160 | # qhasm: xmm9 ^= xmm1 | ||
7161 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
7162 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
7163 | pxor %xmm1,%xmm9 | ||
7164 | |||
7165 | # qhasm: xmm12 ^= xmm2 | ||
7166 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13 | ||
7167 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12 | ||
7168 | pxor %xmm2,%xmm12 | ||
7169 | |||
7170 | # qhasm: xmm14 ^= xmm3 | ||
7171 | # asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15 | ||
7172 | # asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14 | ||
7173 | pxor %xmm3,%xmm14 | ||
7174 | |||
7175 | # qhasm: xmm11 ^= xmm4 | ||
7176 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
7177 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
7178 | pxor %xmm4,%xmm11 | ||
7179 | |||
7180 | # qhasm: xmm15 ^= xmm5 | ||
7181 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
7182 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
7183 | pxor %xmm5,%xmm15 | ||
7184 | |||
7185 | # qhasm: xmm10 ^= xmm6 | ||
7186 | # asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11 | ||
7187 | # asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10 | ||
7188 | pxor %xmm6,%xmm10 | ||
7189 | |||
7190 | # qhasm: xmm13 ^= xmm7 | ||
7191 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
7192 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
7193 | pxor %xmm7,%xmm13 | ||
7194 | |||
7195 | # qhasm: xmm0 ^= xmm13 | ||
7196 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
7197 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
7198 | pxor %xmm13,%xmm0 | ||
7199 | |||
7200 | # qhasm: xmm1 ^= xmm8 | ||
7201 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
7202 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
7203 | pxor %xmm8,%xmm1 | ||
7204 | |||
7205 | # qhasm: xmm2 ^= xmm9 | ||
7206 | # asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3 | ||
7207 | # asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2 | ||
7208 | pxor %xmm9,%xmm2 | ||
7209 | |||
7210 | # qhasm: xmm1 ^= xmm13 | ||
7211 | # asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2 | ||
7212 | # asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1 | ||
7213 | pxor %xmm13,%xmm1 | ||
7214 | |||
7215 | # qhasm: xmm3 ^= xmm12 | ||
7216 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
7217 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
7218 | pxor %xmm12,%xmm3 | ||
7219 | |||
7220 | # qhasm: xmm4 ^= xmm14 | ||
7221 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
7222 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
7223 | pxor %xmm14,%xmm4 | ||
7224 | |||
7225 | # qhasm: xmm5 ^= xmm11 | ||
7226 | # asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6 | ||
7227 | # asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5 | ||
7228 | pxor %xmm11,%xmm5 | ||
7229 | |||
7230 | # qhasm: xmm3 ^= xmm13 | ||
7231 | # asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4 | ||
7232 | # asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3 | ||
7233 | pxor %xmm13,%xmm3 | ||
7234 | |||
7235 | # qhasm: xmm6 ^= xmm15 | ||
7236 | # asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7 | ||
7237 | # asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6 | ||
7238 | pxor %xmm15,%xmm6 | ||
7239 | |||
7240 | # qhasm: xmm7 ^= xmm10 | ||
7241 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
7242 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
7243 | pxor %xmm10,%xmm7 | ||
7244 | |||
7245 | # qhasm: xmm4 ^= xmm13 | ||
7246 | # asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5 | ||
7247 | # asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4 | ||
7248 | pxor %xmm13,%xmm4 | ||
7249 | |||
7250 | # qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E | ||
7251 | # asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9 | ||
7252 | # asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8 | ||
7253 | pshufd $0x4E,%xmm8,%xmm8 | ||
7254 | |||
7255 | # qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E | ||
7256 | # asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10 | ||
7257 | # asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9 | ||
7258 | pshufd $0x4E,%xmm9,%xmm9 | ||
7259 | |||
7260 | # qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E | ||
7261 | # asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13 | ||
7262 | # asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12 | ||
7263 | pshufd $0x4E,%xmm12,%xmm12 | ||
7264 | |||
7265 | # qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E | ||
7266 | # asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15 | ||
7267 | # asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14 | ||
7268 | pshufd $0x4E,%xmm14,%xmm14 | ||
7269 | |||
7270 | # qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E | ||
7271 | # asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12 | ||
7272 | # asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11 | ||
7273 | pshufd $0x4E,%xmm11,%xmm11 | ||
7274 | |||
7275 | # qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E | ||
7276 | # asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16 | ||
7277 | # asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15 | ||
7278 | pshufd $0x4E,%xmm15,%xmm15 | ||
7279 | |||
7280 | # qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E | ||
7281 | # asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11 | ||
7282 | # asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10 | ||
7283 | pshufd $0x4E,%xmm10,%xmm10 | ||
7284 | |||
7285 | # qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E | ||
7286 | # asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14 | ||
7287 | # asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13 | ||
7288 | pshufd $0x4E,%xmm13,%xmm13 | ||
7289 | |||
7290 | # qhasm: xmm0 ^= xmm8 | ||
7291 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
7292 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
7293 | pxor %xmm8,%xmm0 | ||
7294 | |||
7295 | # qhasm: xmm1 ^= xmm9 | ||
7296 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
7297 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
7298 | pxor %xmm9,%xmm1 | ||
7299 | |||
7300 | # qhasm: xmm2 ^= xmm12 | ||
7301 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
7302 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
7303 | pxor %xmm12,%xmm2 | ||
7304 | |||
7305 | # qhasm: xmm3 ^= xmm14 | ||
7306 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
7307 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
7308 | pxor %xmm14,%xmm3 | ||
7309 | |||
7310 | # qhasm: xmm4 ^= xmm11 | ||
7311 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
7312 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
7313 | pxor %xmm11,%xmm4 | ||
7314 | |||
7315 | # qhasm: xmm5 ^= xmm15 | ||
7316 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
7317 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
7318 | pxor %xmm15,%xmm5 | ||
7319 | |||
7320 | # qhasm: xmm6 ^= xmm10 | ||
7321 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
7322 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
7323 | pxor %xmm10,%xmm6 | ||
7324 | |||
7325 | # qhasm: xmm7 ^= xmm13 | ||
7326 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
7327 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
7328 | pxor %xmm13,%xmm7 | ||
7329 | |||
7330 | # qhasm: xmm0 ^= *(int128 *)(c + 768) | ||
7331 | # asm 1: pxor 768(<c=int64#4),<xmm0=int6464#1 | ||
7332 | # asm 2: pxor 768(<c=%rcx),<xmm0=%xmm0 | ||
7333 | pxor 768(%rcx),%xmm0 | ||
7334 | |||
7335 | # qhasm: shuffle bytes of xmm0 by SR | ||
7336 | # asm 1: pshufb SR,<xmm0=int6464#1 | ||
7337 | # asm 2: pshufb SR,<xmm0=%xmm0 | ||
7338 | pshufb SR,%xmm0 | ||
7339 | |||
7340 | # qhasm: xmm1 ^= *(int128 *)(c + 784) | ||
7341 | # asm 1: pxor 784(<c=int64#4),<xmm1=int6464#2 | ||
7342 | # asm 2: pxor 784(<c=%rcx),<xmm1=%xmm1 | ||
7343 | pxor 784(%rcx),%xmm1 | ||
7344 | |||
7345 | # qhasm: shuffle bytes of xmm1 by SR | ||
7346 | # asm 1: pshufb SR,<xmm1=int6464#2 | ||
7347 | # asm 2: pshufb SR,<xmm1=%xmm1 | ||
7348 | pshufb SR,%xmm1 | ||
7349 | |||
7350 | # qhasm: xmm2 ^= *(int128 *)(c + 800) | ||
7351 | # asm 1: pxor 800(<c=int64#4),<xmm2=int6464#3 | ||
7352 | # asm 2: pxor 800(<c=%rcx),<xmm2=%xmm2 | ||
7353 | pxor 800(%rcx),%xmm2 | ||
7354 | |||
7355 | # qhasm: shuffle bytes of xmm2 by SR | ||
7356 | # asm 1: pshufb SR,<xmm2=int6464#3 | ||
7357 | # asm 2: pshufb SR,<xmm2=%xmm2 | ||
7358 | pshufb SR,%xmm2 | ||
7359 | |||
7360 | # qhasm: xmm3 ^= *(int128 *)(c + 816) | ||
7361 | # asm 1: pxor 816(<c=int64#4),<xmm3=int6464#4 | ||
7362 | # asm 2: pxor 816(<c=%rcx),<xmm3=%xmm3 | ||
7363 | pxor 816(%rcx),%xmm3 | ||
7364 | |||
7365 | # qhasm: shuffle bytes of xmm3 by SR | ||
7366 | # asm 1: pshufb SR,<xmm3=int6464#4 | ||
7367 | # asm 2: pshufb SR,<xmm3=%xmm3 | ||
7368 | pshufb SR,%xmm3 | ||
7369 | |||
7370 | # qhasm: xmm4 ^= *(int128 *)(c + 832) | ||
7371 | # asm 1: pxor 832(<c=int64#4),<xmm4=int6464#5 | ||
7372 | # asm 2: pxor 832(<c=%rcx),<xmm4=%xmm4 | ||
7373 | pxor 832(%rcx),%xmm4 | ||
7374 | |||
7375 | # qhasm: shuffle bytes of xmm4 by SR | ||
7376 | # asm 1: pshufb SR,<xmm4=int6464#5 | ||
7377 | # asm 2: pshufb SR,<xmm4=%xmm4 | ||
7378 | pshufb SR,%xmm4 | ||
7379 | |||
7380 | # qhasm: xmm5 ^= *(int128 *)(c + 848) | ||
7381 | # asm 1: pxor 848(<c=int64#4),<xmm5=int6464#6 | ||
7382 | # asm 2: pxor 848(<c=%rcx),<xmm5=%xmm5 | ||
7383 | pxor 848(%rcx),%xmm5 | ||
7384 | |||
7385 | # qhasm: shuffle bytes of xmm5 by SR | ||
7386 | # asm 1: pshufb SR,<xmm5=int6464#6 | ||
7387 | # asm 2: pshufb SR,<xmm5=%xmm5 | ||
7388 | pshufb SR,%xmm5 | ||
7389 | |||
7390 | # qhasm: xmm6 ^= *(int128 *)(c + 864) | ||
7391 | # asm 1: pxor 864(<c=int64#4),<xmm6=int6464#7 | ||
7392 | # asm 2: pxor 864(<c=%rcx),<xmm6=%xmm6 | ||
7393 | pxor 864(%rcx),%xmm6 | ||
7394 | |||
7395 | # qhasm: shuffle bytes of xmm6 by SR | ||
7396 | # asm 1: pshufb SR,<xmm6=int6464#7 | ||
7397 | # asm 2: pshufb SR,<xmm6=%xmm6 | ||
7398 | pshufb SR,%xmm6 | ||
7399 | |||
7400 | # qhasm: xmm7 ^= *(int128 *)(c + 880) | ||
7401 | # asm 1: pxor 880(<c=int64#4),<xmm7=int6464#8 | ||
7402 | # asm 2: pxor 880(<c=%rcx),<xmm7=%xmm7 | ||
7403 | pxor 880(%rcx),%xmm7 | ||
7404 | |||
7405 | # qhasm: shuffle bytes of xmm7 by SR | ||
7406 | # asm 1: pshufb SR,<xmm7=int6464#8 | ||
7407 | # asm 2: pshufb SR,<xmm7=%xmm7 | ||
7408 | pshufb SR,%xmm7 | ||
7409 | |||
7410 | # qhasm: xmm5 ^= xmm6 | ||
7411 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
7412 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
7413 | pxor %xmm6,%xmm5 | ||
7414 | |||
7415 | # qhasm: xmm2 ^= xmm1 | ||
7416 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
7417 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
7418 | pxor %xmm1,%xmm2 | ||
7419 | |||
7420 | # qhasm: xmm5 ^= xmm0 | ||
7421 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
7422 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
7423 | pxor %xmm0,%xmm5 | ||
7424 | |||
7425 | # qhasm: xmm6 ^= xmm2 | ||
7426 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
7427 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
7428 | pxor %xmm2,%xmm6 | ||
7429 | |||
7430 | # qhasm: xmm3 ^= xmm0 | ||
7431 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
7432 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
7433 | pxor %xmm0,%xmm3 | ||
7434 | |||
7435 | # qhasm: xmm6 ^= xmm3 | ||
7436 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
7437 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
7438 | pxor %xmm3,%xmm6 | ||
7439 | |||
7440 | # qhasm: xmm3 ^= xmm7 | ||
7441 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
7442 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
7443 | pxor %xmm7,%xmm3 | ||
7444 | |||
7445 | # qhasm: xmm3 ^= xmm4 | ||
7446 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
7447 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
7448 | pxor %xmm4,%xmm3 | ||
7449 | |||
7450 | # qhasm: xmm7 ^= xmm5 | ||
7451 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
7452 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
7453 | pxor %xmm5,%xmm7 | ||
7454 | |||
7455 | # qhasm: xmm3 ^= xmm1 | ||
7456 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
7457 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
7458 | pxor %xmm1,%xmm3 | ||
7459 | |||
7460 | # qhasm: xmm4 ^= xmm5 | ||
7461 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
7462 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
7463 | pxor %xmm5,%xmm4 | ||
7464 | |||
7465 | # qhasm: xmm2 ^= xmm7 | ||
7466 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
7467 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
7468 | pxor %xmm7,%xmm2 | ||
7469 | |||
7470 | # qhasm: xmm1 ^= xmm5 | ||
7471 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
7472 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
7473 | pxor %xmm5,%xmm1 | ||
7474 | |||
7475 | # qhasm: xmm11 = xmm7 | ||
7476 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
7477 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
7478 | movdqa %xmm7,%xmm8 | ||
7479 | |||
7480 | # qhasm: xmm10 = xmm1 | ||
7481 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
7482 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
7483 | movdqa %xmm1,%xmm9 | ||
7484 | |||
7485 | # qhasm: xmm9 = xmm5 | ||
7486 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
7487 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
7488 | movdqa %xmm5,%xmm10 | ||
7489 | |||
7490 | # qhasm: xmm13 = xmm2 | ||
7491 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
7492 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
7493 | movdqa %xmm2,%xmm11 | ||
7494 | |||
7495 | # qhasm: xmm12 = xmm6 | ||
7496 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
7497 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
7498 | movdqa %xmm6,%xmm12 | ||
7499 | |||
7500 | # qhasm: xmm11 ^= xmm4 | ||
7501 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
7502 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
7503 | pxor %xmm4,%xmm8 | ||
7504 | |||
7505 | # qhasm: xmm10 ^= xmm2 | ||
7506 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
7507 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
7508 | pxor %xmm2,%xmm9 | ||
7509 | |||
7510 | # qhasm: xmm9 ^= xmm3 | ||
7511 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
7512 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
7513 | pxor %xmm3,%xmm10 | ||
7514 | |||
7515 | # qhasm: xmm13 ^= xmm4 | ||
7516 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
7517 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
7518 | pxor %xmm4,%xmm11 | ||
7519 | |||
7520 | # qhasm: xmm12 ^= xmm0 | ||
7521 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
7522 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
7523 | pxor %xmm0,%xmm12 | ||
7524 | |||
7525 | # qhasm: xmm14 = xmm11 | ||
7526 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
7527 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
7528 | movdqa %xmm8,%xmm13 | ||
7529 | |||
7530 | # qhasm: xmm8 = xmm10 | ||
7531 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
7532 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
7533 | movdqa %xmm9,%xmm14 | ||
7534 | |||
7535 | # qhasm: xmm15 = xmm11 | ||
7536 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
7537 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
7538 | movdqa %xmm8,%xmm15 | ||
7539 | |||
7540 | # qhasm: xmm10 |= xmm9 | ||
7541 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
7542 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
7543 | por %xmm10,%xmm9 | ||
7544 | |||
7545 | # qhasm: xmm11 |= xmm12 | ||
7546 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
7547 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
7548 | por %xmm12,%xmm8 | ||
7549 | |||
7550 | # qhasm: xmm15 ^= xmm8 | ||
7551 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
7552 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
7553 | pxor %xmm14,%xmm15 | ||
7554 | |||
7555 | # qhasm: xmm14 &= xmm12 | ||
7556 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
7557 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
7558 | pand %xmm12,%xmm13 | ||
7559 | |||
7560 | # qhasm: xmm8 &= xmm9 | ||
7561 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
7562 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
7563 | pand %xmm10,%xmm14 | ||
7564 | |||
7565 | # qhasm: xmm12 ^= xmm9 | ||
7566 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
7567 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
7568 | pxor %xmm10,%xmm12 | ||
7569 | |||
7570 | # qhasm: xmm15 &= xmm12 | ||
7571 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
7572 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
7573 | pand %xmm12,%xmm15 | ||
7574 | |||
7575 | # qhasm: xmm12 = xmm3 | ||
7576 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
7577 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
7578 | movdqa %xmm3,%xmm10 | ||
7579 | |||
7580 | # qhasm: xmm12 ^= xmm0 | ||
7581 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
7582 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
7583 | pxor %xmm0,%xmm10 | ||
7584 | |||
7585 | # qhasm: xmm13 &= xmm12 | ||
7586 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
7587 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
7588 | pand %xmm10,%xmm11 | ||
7589 | |||
7590 | # qhasm: xmm11 ^= xmm13 | ||
7591 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
7592 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
7593 | pxor %xmm11,%xmm8 | ||
7594 | |||
7595 | # qhasm: xmm10 ^= xmm13 | ||
7596 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
7597 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
7598 | pxor %xmm11,%xmm9 | ||
7599 | |||
7600 | # qhasm: xmm13 = xmm7 | ||
7601 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
7602 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
7603 | movdqa %xmm7,%xmm10 | ||
7604 | |||
7605 | # qhasm: xmm13 ^= xmm1 | ||
7606 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
7607 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
7608 | pxor %xmm1,%xmm10 | ||
7609 | |||
7610 | # qhasm: xmm12 = xmm5 | ||
7611 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
7612 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
7613 | movdqa %xmm5,%xmm11 | ||
7614 | |||
7615 | # qhasm: xmm9 = xmm13 | ||
7616 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
7617 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
7618 | movdqa %xmm10,%xmm12 | ||
7619 | |||
7620 | # qhasm: xmm12 ^= xmm6 | ||
7621 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
7622 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
7623 | pxor %xmm6,%xmm11 | ||
7624 | |||
7625 | # qhasm: xmm9 |= xmm12 | ||
7626 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
7627 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
7628 | por %xmm11,%xmm12 | ||
7629 | |||
7630 | # qhasm: xmm13 &= xmm12 | ||
7631 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
7632 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
7633 | pand %xmm11,%xmm10 | ||
7634 | |||
7635 | # qhasm: xmm8 ^= xmm13 | ||
7636 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
7637 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
7638 | pxor %xmm10,%xmm14 | ||
7639 | |||
7640 | # qhasm: xmm11 ^= xmm15 | ||
7641 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
7642 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
7643 | pxor %xmm15,%xmm8 | ||
7644 | |||
7645 | # qhasm: xmm10 ^= xmm14 | ||
7646 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
7647 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
7648 | pxor %xmm13,%xmm9 | ||
7649 | |||
7650 | # qhasm: xmm9 ^= xmm15 | ||
7651 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
7652 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
7653 | pxor %xmm15,%xmm12 | ||
7654 | |||
7655 | # qhasm: xmm8 ^= xmm14 | ||
7656 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
7657 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
7658 | pxor %xmm13,%xmm14 | ||
7659 | |||
7660 | # qhasm: xmm9 ^= xmm14 | ||
7661 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
7662 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
7663 | pxor %xmm13,%xmm12 | ||
7664 | |||
7665 | # qhasm: xmm12 = xmm2 | ||
7666 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
7667 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
7668 | movdqa %xmm2,%xmm10 | ||
7669 | |||
7670 | # qhasm: xmm13 = xmm4 | ||
7671 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
7672 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
7673 | movdqa %xmm4,%xmm11 | ||
7674 | |||
7675 | # qhasm: xmm14 = xmm1 | ||
7676 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
7677 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
7678 | movdqa %xmm1,%xmm13 | ||
7679 | |||
7680 | # qhasm: xmm15 = xmm7 | ||
7681 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
7682 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
7683 | movdqa %xmm7,%xmm15 | ||
7684 | |||
7685 | # qhasm: xmm12 &= xmm3 | ||
7686 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
7687 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
7688 | pand %xmm3,%xmm10 | ||
7689 | |||
7690 | # qhasm: xmm13 &= xmm0 | ||
7691 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
7692 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
7693 | pand %xmm0,%xmm11 | ||
7694 | |||
7695 | # qhasm: xmm14 &= xmm5 | ||
7696 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
7697 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
7698 | pand %xmm5,%xmm13 | ||
7699 | |||
7700 | # qhasm: xmm15 |= xmm6 | ||
7701 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
7702 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
7703 | por %xmm6,%xmm15 | ||
7704 | |||
7705 | # qhasm: xmm11 ^= xmm12 | ||
7706 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
7707 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
7708 | pxor %xmm10,%xmm8 | ||
7709 | |||
7710 | # qhasm: xmm10 ^= xmm13 | ||
7711 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
7712 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
7713 | pxor %xmm11,%xmm9 | ||
7714 | |||
7715 | # qhasm: xmm9 ^= xmm14 | ||
7716 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
7717 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
7718 | pxor %xmm13,%xmm12 | ||
7719 | |||
7720 | # qhasm: xmm8 ^= xmm15 | ||
7721 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
7722 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
7723 | pxor %xmm15,%xmm14 | ||
7724 | |||
7725 | # qhasm: xmm12 = xmm11 | ||
7726 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
7727 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
7728 | movdqa %xmm8,%xmm10 | ||
7729 | |||
7730 | # qhasm: xmm12 ^= xmm10 | ||
7731 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
7732 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
7733 | pxor %xmm9,%xmm10 | ||
7734 | |||
7735 | # qhasm: xmm11 &= xmm9 | ||
7736 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
7737 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
7738 | pand %xmm12,%xmm8 | ||
7739 | |||
7740 | # qhasm: xmm14 = xmm8 | ||
7741 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
7742 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
7743 | movdqa %xmm14,%xmm11 | ||
7744 | |||
7745 | # qhasm: xmm14 ^= xmm11 | ||
7746 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
7747 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
7748 | pxor %xmm8,%xmm11 | ||
7749 | |||
7750 | # qhasm: xmm15 = xmm12 | ||
7751 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
7752 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
7753 | movdqa %xmm10,%xmm13 | ||
7754 | |||
7755 | # qhasm: xmm15 &= xmm14 | ||
7756 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
7757 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
7758 | pand %xmm11,%xmm13 | ||
7759 | |||
7760 | # qhasm: xmm15 ^= xmm10 | ||
7761 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
7762 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
7763 | pxor %xmm9,%xmm13 | ||
7764 | |||
7765 | # qhasm: xmm13 = xmm9 | ||
7766 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
7767 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
7768 | movdqa %xmm12,%xmm15 | ||
7769 | |||
7770 | # qhasm: xmm13 ^= xmm8 | ||
7771 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
7772 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
7773 | pxor %xmm14,%xmm15 | ||
7774 | |||
7775 | # qhasm: xmm11 ^= xmm10 | ||
7776 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
7777 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
7778 | pxor %xmm9,%xmm8 | ||
7779 | |||
7780 | # qhasm: xmm13 &= xmm11 | ||
7781 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
7782 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
7783 | pand %xmm8,%xmm15 | ||
7784 | |||
7785 | # qhasm: xmm13 ^= xmm8 | ||
7786 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
7787 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
7788 | pxor %xmm14,%xmm15 | ||
7789 | |||
7790 | # qhasm: xmm9 ^= xmm13 | ||
7791 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
7792 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
7793 | pxor %xmm15,%xmm12 | ||
7794 | |||
7795 | # qhasm: xmm10 = xmm14 | ||
7796 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
7797 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
7798 | movdqa %xmm11,%xmm8 | ||
7799 | |||
7800 | # qhasm: xmm10 ^= xmm13 | ||
7801 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
7802 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
7803 | pxor %xmm15,%xmm8 | ||
7804 | |||
7805 | # qhasm: xmm10 &= xmm8 | ||
7806 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
7807 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
7808 | pand %xmm14,%xmm8 | ||
7809 | |||
7810 | # qhasm: xmm9 ^= xmm10 | ||
7811 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
7812 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
7813 | pxor %xmm8,%xmm12 | ||
7814 | |||
7815 | # qhasm: xmm14 ^= xmm10 | ||
7816 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
7817 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
7818 | pxor %xmm8,%xmm11 | ||
7819 | |||
7820 | # qhasm: xmm14 &= xmm15 | ||
7821 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
7822 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
7823 | pand %xmm13,%xmm11 | ||
7824 | |||
7825 | # qhasm: xmm14 ^= xmm12 | ||
7826 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
7827 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
7828 | pxor %xmm10,%xmm11 | ||
7829 | |||
7830 | # qhasm: xmm12 = xmm6 | ||
7831 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
7832 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
7833 | movdqa %xmm6,%xmm8 | ||
7834 | |||
7835 | # qhasm: xmm8 = xmm5 | ||
7836 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
7837 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
7838 | movdqa %xmm5,%xmm9 | ||
7839 | |||
7840 | # qhasm: xmm10 = xmm15 | ||
7841 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
7842 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
7843 | movdqa %xmm13,%xmm10 | ||
7844 | |||
7845 | # qhasm: xmm10 ^= xmm14 | ||
7846 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
7847 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
7848 | pxor %xmm11,%xmm10 | ||
7849 | |||
7850 | # qhasm: xmm10 &= xmm6 | ||
7851 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
7852 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
7853 | pand %xmm6,%xmm10 | ||
7854 | |||
7855 | # qhasm: xmm6 ^= xmm5 | ||
7856 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
7857 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
7858 | pxor %xmm5,%xmm6 | ||
7859 | |||
7860 | # qhasm: xmm6 &= xmm14 | ||
7861 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
7862 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
7863 | pand %xmm11,%xmm6 | ||
7864 | |||
7865 | # qhasm: xmm5 &= xmm15 | ||
7866 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
7867 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
7868 | pand %xmm13,%xmm5 | ||
7869 | |||
7870 | # qhasm: xmm6 ^= xmm5 | ||
7871 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
7872 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
7873 | pxor %xmm5,%xmm6 | ||
7874 | |||
7875 | # qhasm: xmm5 ^= xmm10 | ||
7876 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
7877 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
7878 | pxor %xmm10,%xmm5 | ||
7879 | |||
7880 | # qhasm: xmm12 ^= xmm0 | ||
7881 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
7882 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
7883 | pxor %xmm0,%xmm8 | ||
7884 | |||
7885 | # qhasm: xmm8 ^= xmm3 | ||
7886 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
7887 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
7888 | pxor %xmm3,%xmm9 | ||
7889 | |||
7890 | # qhasm: xmm15 ^= xmm13 | ||
7891 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
7892 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
7893 | pxor %xmm15,%xmm13 | ||
7894 | |||
7895 | # qhasm: xmm14 ^= xmm9 | ||
7896 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
7897 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
7898 | pxor %xmm12,%xmm11 | ||
7899 | |||
7900 | # qhasm: xmm11 = xmm15 | ||
7901 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
7902 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
7903 | movdqa %xmm13,%xmm10 | ||
7904 | |||
7905 | # qhasm: xmm11 ^= xmm14 | ||
7906 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
7907 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
7908 | pxor %xmm11,%xmm10 | ||
7909 | |||
7910 | # qhasm: xmm11 &= xmm12 | ||
7911 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
7912 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
7913 | pand %xmm8,%xmm10 | ||
7914 | |||
7915 | # qhasm: xmm12 ^= xmm8 | ||
7916 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
7917 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
7918 | pxor %xmm9,%xmm8 | ||
7919 | |||
7920 | # qhasm: xmm12 &= xmm14 | ||
7921 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
7922 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
7923 | pand %xmm11,%xmm8 | ||
7924 | |||
7925 | # qhasm: xmm8 &= xmm15 | ||
7926 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
7927 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
7928 | pand %xmm13,%xmm9 | ||
7929 | |||
7930 | # qhasm: xmm8 ^= xmm12 | ||
7931 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
7932 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
7933 | pxor %xmm8,%xmm9 | ||
7934 | |||
7935 | # qhasm: xmm12 ^= xmm11 | ||
7936 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
7937 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
7938 | pxor %xmm10,%xmm8 | ||
7939 | |||
7940 | # qhasm: xmm10 = xmm13 | ||
7941 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
7942 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
7943 | movdqa %xmm15,%xmm10 | ||
7944 | |||
7945 | # qhasm: xmm10 ^= xmm9 | ||
7946 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
7947 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
7948 | pxor %xmm12,%xmm10 | ||
7949 | |||
7950 | # qhasm: xmm10 &= xmm0 | ||
7951 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
7952 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
7953 | pand %xmm0,%xmm10 | ||
7954 | |||
7955 | # qhasm: xmm0 ^= xmm3 | ||
7956 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
7957 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
7958 | pxor %xmm3,%xmm0 | ||
7959 | |||
7960 | # qhasm: xmm0 &= xmm9 | ||
7961 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
7962 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
7963 | pand %xmm12,%xmm0 | ||
7964 | |||
7965 | # qhasm: xmm3 &= xmm13 | ||
7966 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
7967 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
7968 | pand %xmm15,%xmm3 | ||
7969 | |||
7970 | # qhasm: xmm0 ^= xmm3 | ||
7971 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
7972 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
7973 | pxor %xmm3,%xmm0 | ||
7974 | |||
7975 | # qhasm: xmm3 ^= xmm10 | ||
7976 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
7977 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
7978 | pxor %xmm10,%xmm3 | ||
7979 | |||
7980 | # qhasm: xmm6 ^= xmm12 | ||
7981 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
7982 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
7983 | pxor %xmm8,%xmm6 | ||
7984 | |||
7985 | # qhasm: xmm0 ^= xmm12 | ||
7986 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
7987 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
7988 | pxor %xmm8,%xmm0 | ||
7989 | |||
7990 | # qhasm: xmm5 ^= xmm8 | ||
7991 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
7992 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
7993 | pxor %xmm9,%xmm5 | ||
7994 | |||
7995 | # qhasm: xmm3 ^= xmm8 | ||
7996 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
7997 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
7998 | pxor %xmm9,%xmm3 | ||
7999 | |||
8000 | # qhasm: xmm12 = xmm7 | ||
8001 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
8002 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
8003 | movdqa %xmm7,%xmm8 | ||
8004 | |||
8005 | # qhasm: xmm8 = xmm1 | ||
8006 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
8007 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
8008 | movdqa %xmm1,%xmm9 | ||
8009 | |||
8010 | # qhasm: xmm12 ^= xmm4 | ||
8011 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
8012 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
8013 | pxor %xmm4,%xmm8 | ||
8014 | |||
8015 | # qhasm: xmm8 ^= xmm2 | ||
8016 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
8017 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
8018 | pxor %xmm2,%xmm9 | ||
8019 | |||
8020 | # qhasm: xmm11 = xmm15 | ||
8021 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
8022 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
8023 | movdqa %xmm13,%xmm10 | ||
8024 | |||
8025 | # qhasm: xmm11 ^= xmm14 | ||
8026 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
8027 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
8028 | pxor %xmm11,%xmm10 | ||
8029 | |||
8030 | # qhasm: xmm11 &= xmm12 | ||
8031 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
8032 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
8033 | pand %xmm8,%xmm10 | ||
8034 | |||
8035 | # qhasm: xmm12 ^= xmm8 | ||
8036 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
8037 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
8038 | pxor %xmm9,%xmm8 | ||
8039 | |||
8040 | # qhasm: xmm12 &= xmm14 | ||
8041 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
8042 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
8043 | pand %xmm11,%xmm8 | ||
8044 | |||
8045 | # qhasm: xmm8 &= xmm15 | ||
8046 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
8047 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
8048 | pand %xmm13,%xmm9 | ||
8049 | |||
8050 | # qhasm: xmm8 ^= xmm12 | ||
8051 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
8052 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
8053 | pxor %xmm8,%xmm9 | ||
8054 | |||
8055 | # qhasm: xmm12 ^= xmm11 | ||
8056 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
8057 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
8058 | pxor %xmm10,%xmm8 | ||
8059 | |||
8060 | # qhasm: xmm10 = xmm13 | ||
8061 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
8062 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
8063 | movdqa %xmm15,%xmm10 | ||
8064 | |||
8065 | # qhasm: xmm10 ^= xmm9 | ||
8066 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
8067 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
8068 | pxor %xmm12,%xmm10 | ||
8069 | |||
8070 | # qhasm: xmm10 &= xmm4 | ||
8071 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
8072 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
8073 | pand %xmm4,%xmm10 | ||
8074 | |||
8075 | # qhasm: xmm4 ^= xmm2 | ||
8076 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
8077 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
8078 | pxor %xmm2,%xmm4 | ||
8079 | |||
8080 | # qhasm: xmm4 &= xmm9 | ||
8081 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
8082 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
8083 | pand %xmm12,%xmm4 | ||
8084 | |||
8085 | # qhasm: xmm2 &= xmm13 | ||
8086 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
8087 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
8088 | pand %xmm15,%xmm2 | ||
8089 | |||
8090 | # qhasm: xmm4 ^= xmm2 | ||
8091 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
8092 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
8093 | pxor %xmm2,%xmm4 | ||
8094 | |||
8095 | # qhasm: xmm2 ^= xmm10 | ||
8096 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
8097 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
8098 | pxor %xmm10,%xmm2 | ||
8099 | |||
8100 | # qhasm: xmm15 ^= xmm13 | ||
8101 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
8102 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
8103 | pxor %xmm15,%xmm13 | ||
8104 | |||
8105 | # qhasm: xmm14 ^= xmm9 | ||
8106 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
8107 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
8108 | pxor %xmm12,%xmm11 | ||
8109 | |||
8110 | # qhasm: xmm11 = xmm15 | ||
8111 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
8112 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
8113 | movdqa %xmm13,%xmm10 | ||
8114 | |||
8115 | # qhasm: xmm11 ^= xmm14 | ||
8116 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
8117 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
8118 | pxor %xmm11,%xmm10 | ||
8119 | |||
8120 | # qhasm: xmm11 &= xmm7 | ||
8121 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
8122 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
8123 | pand %xmm7,%xmm10 | ||
8124 | |||
8125 | # qhasm: xmm7 ^= xmm1 | ||
8126 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
8127 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
8128 | pxor %xmm1,%xmm7 | ||
8129 | |||
8130 | # qhasm: xmm7 &= xmm14 | ||
8131 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
8132 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
8133 | pand %xmm11,%xmm7 | ||
8134 | |||
8135 | # qhasm: xmm1 &= xmm15 | ||
8136 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
8137 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
8138 | pand %xmm13,%xmm1 | ||
8139 | |||
8140 | # qhasm: xmm7 ^= xmm1 | ||
8141 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
8142 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
8143 | pxor %xmm1,%xmm7 | ||
8144 | |||
8145 | # qhasm: xmm1 ^= xmm11 | ||
8146 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
8147 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
8148 | pxor %xmm10,%xmm1 | ||
8149 | |||
8150 | # qhasm: xmm7 ^= xmm12 | ||
8151 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
8152 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
8153 | pxor %xmm8,%xmm7 | ||
8154 | |||
8155 | # qhasm: xmm4 ^= xmm12 | ||
8156 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
8157 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
8158 | pxor %xmm8,%xmm4 | ||
8159 | |||
8160 | # qhasm: xmm1 ^= xmm8 | ||
8161 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
8162 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
8163 | pxor %xmm9,%xmm1 | ||
8164 | |||
8165 | # qhasm: xmm2 ^= xmm8 | ||
8166 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
8167 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
8168 | pxor %xmm9,%xmm2 | ||
8169 | |||
8170 | # qhasm: xmm7 ^= xmm0 | ||
8171 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
8172 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
8173 | pxor %xmm0,%xmm7 | ||
8174 | |||
8175 | # qhasm: xmm1 ^= xmm6 | ||
8176 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
8177 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
8178 | pxor %xmm6,%xmm1 | ||
8179 | |||
8180 | # qhasm: xmm4 ^= xmm7 | ||
8181 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
8182 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
8183 | pxor %xmm7,%xmm4 | ||
8184 | |||
8185 | # qhasm: xmm6 ^= xmm0 | ||
8186 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
8187 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
8188 | pxor %xmm0,%xmm6 | ||
8189 | |||
8190 | # qhasm: xmm0 ^= xmm1 | ||
8191 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
8192 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
8193 | pxor %xmm1,%xmm0 | ||
8194 | |||
8195 | # qhasm: xmm1 ^= xmm5 | ||
8196 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
8197 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
8198 | pxor %xmm5,%xmm1 | ||
8199 | |||
8200 | # qhasm: xmm5 ^= xmm2 | ||
8201 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
8202 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
8203 | pxor %xmm2,%xmm5 | ||
8204 | |||
8205 | # qhasm: xmm4 ^= xmm5 | ||
8206 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
8207 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
8208 | pxor %xmm5,%xmm4 | ||
8209 | |||
8210 | # qhasm: xmm2 ^= xmm3 | ||
8211 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
8212 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
8213 | pxor %xmm3,%xmm2 | ||
8214 | |||
8215 | # qhasm: xmm3 ^= xmm5 | ||
8216 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
8217 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
8218 | pxor %xmm5,%xmm3 | ||
8219 | |||
8220 | # qhasm: xmm6 ^= xmm3 | ||
8221 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
8222 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
8223 | pxor %xmm3,%xmm6 | ||
8224 | |||
8225 | # qhasm: xmm8 = shuffle dwords of xmm0 by 0x93 | ||
8226 | # asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9 | ||
8227 | # asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8 | ||
8228 | pshufd $0x93,%xmm0,%xmm8 | ||
8229 | |||
8230 | # qhasm: xmm9 = shuffle dwords of xmm1 by 0x93 | ||
8231 | # asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10 | ||
8232 | # asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9 | ||
8233 | pshufd $0x93,%xmm1,%xmm9 | ||
8234 | |||
8235 | # qhasm: xmm10 = shuffle dwords of xmm4 by 0x93 | ||
8236 | # asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11 | ||
8237 | # asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10 | ||
8238 | pshufd $0x93,%xmm4,%xmm10 | ||
8239 | |||
8240 | # qhasm: xmm11 = shuffle dwords of xmm6 by 0x93 | ||
8241 | # asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12 | ||
8242 | # asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11 | ||
8243 | pshufd $0x93,%xmm6,%xmm11 | ||
8244 | |||
8245 | # qhasm: xmm12 = shuffle dwords of xmm3 by 0x93 | ||
8246 | # asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13 | ||
8247 | # asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12 | ||
8248 | pshufd $0x93,%xmm3,%xmm12 | ||
8249 | |||
8250 | # qhasm: xmm13 = shuffle dwords of xmm7 by 0x93 | ||
8251 | # asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14 | ||
8252 | # asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13 | ||
8253 | pshufd $0x93,%xmm7,%xmm13 | ||
8254 | |||
8255 | # qhasm: xmm14 = shuffle dwords of xmm2 by 0x93 | ||
8256 | # asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15 | ||
8257 | # asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14 | ||
8258 | pshufd $0x93,%xmm2,%xmm14 | ||
8259 | |||
8260 | # qhasm: xmm15 = shuffle dwords of xmm5 by 0x93 | ||
8261 | # asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16 | ||
8262 | # asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15 | ||
8263 | pshufd $0x93,%xmm5,%xmm15 | ||
8264 | |||
8265 | # qhasm: xmm0 ^= xmm8 | ||
8266 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
8267 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
8268 | pxor %xmm8,%xmm0 | ||
8269 | |||
8270 | # qhasm: xmm1 ^= xmm9 | ||
8271 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
8272 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
8273 | pxor %xmm9,%xmm1 | ||
8274 | |||
8275 | # qhasm: xmm4 ^= xmm10 | ||
8276 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
8277 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
8278 | pxor %xmm10,%xmm4 | ||
8279 | |||
8280 | # qhasm: xmm6 ^= xmm11 | ||
8281 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
8282 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
8283 | pxor %xmm11,%xmm6 | ||
8284 | |||
8285 | # qhasm: xmm3 ^= xmm12 | ||
8286 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
8287 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
8288 | pxor %xmm12,%xmm3 | ||
8289 | |||
8290 | # qhasm: xmm7 ^= xmm13 | ||
8291 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
8292 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
8293 | pxor %xmm13,%xmm7 | ||
8294 | |||
8295 | # qhasm: xmm2 ^= xmm14 | ||
8296 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
8297 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
8298 | pxor %xmm14,%xmm2 | ||
8299 | |||
8300 | # qhasm: xmm5 ^= xmm15 | ||
8301 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
8302 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
8303 | pxor %xmm15,%xmm5 | ||
8304 | |||
8305 | # qhasm: xmm8 ^= xmm5 | ||
8306 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
8307 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
8308 | pxor %xmm5,%xmm8 | ||
8309 | |||
8310 | # qhasm: xmm9 ^= xmm0 | ||
8311 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
8312 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
8313 | pxor %xmm0,%xmm9 | ||
8314 | |||
8315 | # qhasm: xmm10 ^= xmm1 | ||
8316 | # asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11 | ||
8317 | # asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10 | ||
8318 | pxor %xmm1,%xmm10 | ||
8319 | |||
8320 | # qhasm: xmm9 ^= xmm5 | ||
8321 | # asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10 | ||
8322 | # asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9 | ||
8323 | pxor %xmm5,%xmm9 | ||
8324 | |||
8325 | # qhasm: xmm11 ^= xmm4 | ||
8326 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
8327 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
8328 | pxor %xmm4,%xmm11 | ||
8329 | |||
8330 | # qhasm: xmm12 ^= xmm6 | ||
8331 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13 | ||
8332 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12 | ||
8333 | pxor %xmm6,%xmm12 | ||
8334 | |||
8335 | # qhasm: xmm13 ^= xmm3 | ||
8336 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14 | ||
8337 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13 | ||
8338 | pxor %xmm3,%xmm13 | ||
8339 | |||
8340 | # qhasm: xmm11 ^= xmm5 | ||
8341 | # asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12 | ||
8342 | # asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11 | ||
8343 | pxor %xmm5,%xmm11 | ||
8344 | |||
8345 | # qhasm: xmm14 ^= xmm7 | ||
8346 | # asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15 | ||
8347 | # asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14 | ||
8348 | pxor %xmm7,%xmm14 | ||
8349 | |||
8350 | # qhasm: xmm15 ^= xmm2 | ||
8351 | # asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16 | ||
8352 | # asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15 | ||
8353 | pxor %xmm2,%xmm15 | ||
8354 | |||
8355 | # qhasm: xmm12 ^= xmm5 | ||
8356 | # asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13 | ||
8357 | # asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12 | ||
8358 | pxor %xmm5,%xmm12 | ||
8359 | |||
8360 | # qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E | ||
8361 | # asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1 | ||
8362 | # asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0 | ||
8363 | pshufd $0x4E,%xmm0,%xmm0 | ||
8364 | |||
8365 | # qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E | ||
8366 | # asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2 | ||
8367 | # asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1 | ||
8368 | pshufd $0x4E,%xmm1,%xmm1 | ||
8369 | |||
8370 | # qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E | ||
8371 | # asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5 | ||
8372 | # asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4 | ||
8373 | pshufd $0x4E,%xmm4,%xmm4 | ||
8374 | |||
8375 | # qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E | ||
8376 | # asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7 | ||
8377 | # asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6 | ||
8378 | pshufd $0x4E,%xmm6,%xmm6 | ||
8379 | |||
8380 | # qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E | ||
8381 | # asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4 | ||
8382 | # asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3 | ||
8383 | pshufd $0x4E,%xmm3,%xmm3 | ||
8384 | |||
8385 | # qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E | ||
8386 | # asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8 | ||
8387 | # asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7 | ||
8388 | pshufd $0x4E,%xmm7,%xmm7 | ||
8389 | |||
8390 | # qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E | ||
8391 | # asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3 | ||
8392 | # asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2 | ||
8393 | pshufd $0x4E,%xmm2,%xmm2 | ||
8394 | |||
8395 | # qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E | ||
8396 | # asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6 | ||
8397 | # asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5 | ||
8398 | pshufd $0x4E,%xmm5,%xmm5 | ||
8399 | |||
8400 | # qhasm: xmm8 ^= xmm0 | ||
8401 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
8402 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
8403 | pxor %xmm0,%xmm8 | ||
8404 | |||
8405 | # qhasm: xmm9 ^= xmm1 | ||
8406 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
8407 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
8408 | pxor %xmm1,%xmm9 | ||
8409 | |||
8410 | # qhasm: xmm10 ^= xmm4 | ||
8411 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11 | ||
8412 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10 | ||
8413 | pxor %xmm4,%xmm10 | ||
8414 | |||
8415 | # qhasm: xmm11 ^= xmm6 | ||
8416 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12 | ||
8417 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11 | ||
8418 | pxor %xmm6,%xmm11 | ||
8419 | |||
8420 | # qhasm: xmm12 ^= xmm3 | ||
8421 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13 | ||
8422 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12 | ||
8423 | pxor %xmm3,%xmm12 | ||
8424 | |||
8425 | # qhasm: xmm13 ^= xmm7 | ||
8426 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
8427 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
8428 | pxor %xmm7,%xmm13 | ||
8429 | |||
8430 | # qhasm: xmm14 ^= xmm2 | ||
8431 | # asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15 | ||
8432 | # asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14 | ||
8433 | pxor %xmm2,%xmm14 | ||
8434 | |||
8435 | # qhasm: xmm15 ^= xmm5 | ||
8436 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
8437 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
8438 | pxor %xmm5,%xmm15 | ||
8439 | |||
8440 | # qhasm: xmm8 ^= *(int128 *)(c + 896) | ||
8441 | # asm 1: pxor 896(<c=int64#4),<xmm8=int6464#9 | ||
8442 | # asm 2: pxor 896(<c=%rcx),<xmm8=%xmm8 | ||
8443 | pxor 896(%rcx),%xmm8 | ||
8444 | |||
8445 | # qhasm: shuffle bytes of xmm8 by SR | ||
8446 | # asm 1: pshufb SR,<xmm8=int6464#9 | ||
8447 | # asm 2: pshufb SR,<xmm8=%xmm8 | ||
8448 | pshufb SR,%xmm8 | ||
8449 | |||
8450 | # qhasm: xmm9 ^= *(int128 *)(c + 912) | ||
8451 | # asm 1: pxor 912(<c=int64#4),<xmm9=int6464#10 | ||
8452 | # asm 2: pxor 912(<c=%rcx),<xmm9=%xmm9 | ||
8453 | pxor 912(%rcx),%xmm9 | ||
8454 | |||
8455 | # qhasm: shuffle bytes of xmm9 by SR | ||
8456 | # asm 1: pshufb SR,<xmm9=int6464#10 | ||
8457 | # asm 2: pshufb SR,<xmm9=%xmm9 | ||
8458 | pshufb SR,%xmm9 | ||
8459 | |||
8460 | # qhasm: xmm10 ^= *(int128 *)(c + 928) | ||
8461 | # asm 1: pxor 928(<c=int64#4),<xmm10=int6464#11 | ||
8462 | # asm 2: pxor 928(<c=%rcx),<xmm10=%xmm10 | ||
8463 | pxor 928(%rcx),%xmm10 | ||
8464 | |||
8465 | # qhasm: shuffle bytes of xmm10 by SR | ||
8466 | # asm 1: pshufb SR,<xmm10=int6464#11 | ||
8467 | # asm 2: pshufb SR,<xmm10=%xmm10 | ||
8468 | pshufb SR,%xmm10 | ||
8469 | |||
8470 | # qhasm: xmm11 ^= *(int128 *)(c + 944) | ||
8471 | # asm 1: pxor 944(<c=int64#4),<xmm11=int6464#12 | ||
8472 | # asm 2: pxor 944(<c=%rcx),<xmm11=%xmm11 | ||
8473 | pxor 944(%rcx),%xmm11 | ||
8474 | |||
8475 | # qhasm: shuffle bytes of xmm11 by SR | ||
8476 | # asm 1: pshufb SR,<xmm11=int6464#12 | ||
8477 | # asm 2: pshufb SR,<xmm11=%xmm11 | ||
8478 | pshufb SR,%xmm11 | ||
8479 | |||
8480 | # qhasm: xmm12 ^= *(int128 *)(c + 960) | ||
8481 | # asm 1: pxor 960(<c=int64#4),<xmm12=int6464#13 | ||
8482 | # asm 2: pxor 960(<c=%rcx),<xmm12=%xmm12 | ||
8483 | pxor 960(%rcx),%xmm12 | ||
8484 | |||
8485 | # qhasm: shuffle bytes of xmm12 by SR | ||
8486 | # asm 1: pshufb SR,<xmm12=int6464#13 | ||
8487 | # asm 2: pshufb SR,<xmm12=%xmm12 | ||
8488 | pshufb SR,%xmm12 | ||
8489 | |||
8490 | # qhasm: xmm13 ^= *(int128 *)(c + 976) | ||
8491 | # asm 1: pxor 976(<c=int64#4),<xmm13=int6464#14 | ||
8492 | # asm 2: pxor 976(<c=%rcx),<xmm13=%xmm13 | ||
8493 | pxor 976(%rcx),%xmm13 | ||
8494 | |||
8495 | # qhasm: shuffle bytes of xmm13 by SR | ||
8496 | # asm 1: pshufb SR,<xmm13=int6464#14 | ||
8497 | # asm 2: pshufb SR,<xmm13=%xmm13 | ||
8498 | pshufb SR,%xmm13 | ||
8499 | |||
8500 | # qhasm: xmm14 ^= *(int128 *)(c + 992) | ||
8501 | # asm 1: pxor 992(<c=int64#4),<xmm14=int6464#15 | ||
8502 | # asm 2: pxor 992(<c=%rcx),<xmm14=%xmm14 | ||
8503 | pxor 992(%rcx),%xmm14 | ||
8504 | |||
8505 | # qhasm: shuffle bytes of xmm14 by SR | ||
8506 | # asm 1: pshufb SR,<xmm14=int6464#15 | ||
8507 | # asm 2: pshufb SR,<xmm14=%xmm14 | ||
8508 | pshufb SR,%xmm14 | ||
8509 | |||
8510 | # qhasm: xmm15 ^= *(int128 *)(c + 1008) | ||
8511 | # asm 1: pxor 1008(<c=int64#4),<xmm15=int6464#16 | ||
8512 | # asm 2: pxor 1008(<c=%rcx),<xmm15=%xmm15 | ||
8513 | pxor 1008(%rcx),%xmm15 | ||
8514 | |||
8515 | # qhasm: shuffle bytes of xmm15 by SR | ||
8516 | # asm 1: pshufb SR,<xmm15=int6464#16 | ||
8517 | # asm 2: pshufb SR,<xmm15=%xmm15 | ||
8518 | pshufb SR,%xmm15 | ||
8519 | |||
8520 | # qhasm: xmm13 ^= xmm14 | ||
8521 | # asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14 | ||
8522 | # asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13 | ||
8523 | pxor %xmm14,%xmm13 | ||
8524 | |||
8525 | # qhasm: xmm10 ^= xmm9 | ||
8526 | # asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11 | ||
8527 | # asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10 | ||
8528 | pxor %xmm9,%xmm10 | ||
8529 | |||
8530 | # qhasm: xmm13 ^= xmm8 | ||
8531 | # asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14 | ||
8532 | # asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13 | ||
8533 | pxor %xmm8,%xmm13 | ||
8534 | |||
8535 | # qhasm: xmm14 ^= xmm10 | ||
8536 | # asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15 | ||
8537 | # asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14 | ||
8538 | pxor %xmm10,%xmm14 | ||
8539 | |||
8540 | # qhasm: xmm11 ^= xmm8 | ||
8541 | # asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12 | ||
8542 | # asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11 | ||
8543 | pxor %xmm8,%xmm11 | ||
8544 | |||
8545 | # qhasm: xmm14 ^= xmm11 | ||
8546 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
8547 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
8548 | pxor %xmm11,%xmm14 | ||
8549 | |||
8550 | # qhasm: xmm11 ^= xmm15 | ||
8551 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12 | ||
8552 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11 | ||
8553 | pxor %xmm15,%xmm11 | ||
8554 | |||
8555 | # qhasm: xmm11 ^= xmm12 | ||
8556 | # asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12 | ||
8557 | # asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11 | ||
8558 | pxor %xmm12,%xmm11 | ||
8559 | |||
8560 | # qhasm: xmm15 ^= xmm13 | ||
8561 | # asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16 | ||
8562 | # asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15 | ||
8563 | pxor %xmm13,%xmm15 | ||
8564 | |||
8565 | # qhasm: xmm11 ^= xmm9 | ||
8566 | # asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12 | ||
8567 | # asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11 | ||
8568 | pxor %xmm9,%xmm11 | ||
8569 | |||
8570 | # qhasm: xmm12 ^= xmm13 | ||
8571 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
8572 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
8573 | pxor %xmm13,%xmm12 | ||
8574 | |||
8575 | # qhasm: xmm10 ^= xmm15 | ||
8576 | # asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11 | ||
8577 | # asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10 | ||
8578 | pxor %xmm15,%xmm10 | ||
8579 | |||
8580 | # qhasm: xmm9 ^= xmm13 | ||
8581 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
8582 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
8583 | pxor %xmm13,%xmm9 | ||
8584 | |||
8585 | # qhasm: xmm3 = xmm15 | ||
8586 | # asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1 | ||
8587 | # asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0 | ||
8588 | movdqa %xmm15,%xmm0 | ||
8589 | |||
8590 | # qhasm: xmm2 = xmm9 | ||
8591 | # asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2 | ||
8592 | # asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1 | ||
8593 | movdqa %xmm9,%xmm1 | ||
8594 | |||
8595 | # qhasm: xmm1 = xmm13 | ||
8596 | # asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3 | ||
8597 | # asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2 | ||
8598 | movdqa %xmm13,%xmm2 | ||
8599 | |||
8600 | # qhasm: xmm5 = xmm10 | ||
8601 | # asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4 | ||
8602 | # asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3 | ||
8603 | movdqa %xmm10,%xmm3 | ||
8604 | |||
8605 | # qhasm: xmm4 = xmm14 | ||
8606 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5 | ||
8607 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4 | ||
8608 | movdqa %xmm14,%xmm4 | ||
8609 | |||
8610 | # qhasm: xmm3 ^= xmm12 | ||
8611 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1 | ||
8612 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0 | ||
8613 | pxor %xmm12,%xmm0 | ||
8614 | |||
8615 | # qhasm: xmm2 ^= xmm10 | ||
8616 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2 | ||
8617 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1 | ||
8618 | pxor %xmm10,%xmm1 | ||
8619 | |||
8620 | # qhasm: xmm1 ^= xmm11 | ||
8621 | # asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3 | ||
8622 | # asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2 | ||
8623 | pxor %xmm11,%xmm2 | ||
8624 | |||
8625 | # qhasm: xmm5 ^= xmm12 | ||
8626 | # asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4 | ||
8627 | # asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3 | ||
8628 | pxor %xmm12,%xmm3 | ||
8629 | |||
8630 | # qhasm: xmm4 ^= xmm8 | ||
8631 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
8632 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
8633 | pxor %xmm8,%xmm4 | ||
8634 | |||
8635 | # qhasm: xmm6 = xmm3 | ||
8636 | # asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6 | ||
8637 | # asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5 | ||
8638 | movdqa %xmm0,%xmm5 | ||
8639 | |||
8640 | # qhasm: xmm0 = xmm2 | ||
8641 | # asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7 | ||
8642 | # asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6 | ||
8643 | movdqa %xmm1,%xmm6 | ||
8644 | |||
8645 | # qhasm: xmm7 = xmm3 | ||
8646 | # asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8 | ||
8647 | # asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7 | ||
8648 | movdqa %xmm0,%xmm7 | ||
8649 | |||
8650 | # qhasm: xmm2 |= xmm1 | ||
8651 | # asm 1: por <xmm1=int6464#3,<xmm2=int6464#2 | ||
8652 | # asm 2: por <xmm1=%xmm2,<xmm2=%xmm1 | ||
8653 | por %xmm2,%xmm1 | ||
8654 | |||
8655 | # qhasm: xmm3 |= xmm4 | ||
8656 | # asm 1: por <xmm4=int6464#5,<xmm3=int6464#1 | ||
8657 | # asm 2: por <xmm4=%xmm4,<xmm3=%xmm0 | ||
8658 | por %xmm4,%xmm0 | ||
8659 | |||
8660 | # qhasm: xmm7 ^= xmm0 | ||
8661 | # asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8 | ||
8662 | # asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7 | ||
8663 | pxor %xmm6,%xmm7 | ||
8664 | |||
8665 | # qhasm: xmm6 &= xmm4 | ||
8666 | # asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6 | ||
8667 | # asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5 | ||
8668 | pand %xmm4,%xmm5 | ||
8669 | |||
8670 | # qhasm: xmm0 &= xmm1 | ||
8671 | # asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7 | ||
8672 | # asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6 | ||
8673 | pand %xmm2,%xmm6 | ||
8674 | |||
8675 | # qhasm: xmm4 ^= xmm1 | ||
8676 | # asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5 | ||
8677 | # asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4 | ||
8678 | pxor %xmm2,%xmm4 | ||
8679 | |||
8680 | # qhasm: xmm7 &= xmm4 | ||
8681 | # asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8 | ||
8682 | # asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7 | ||
8683 | pand %xmm4,%xmm7 | ||
8684 | |||
8685 | # qhasm: xmm4 = xmm11 | ||
8686 | # asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3 | ||
8687 | # asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2 | ||
8688 | movdqa %xmm11,%xmm2 | ||
8689 | |||
8690 | # qhasm: xmm4 ^= xmm8 | ||
8691 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3 | ||
8692 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2 | ||
8693 | pxor %xmm8,%xmm2 | ||
8694 | |||
8695 | # qhasm: xmm5 &= xmm4 | ||
8696 | # asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4 | ||
8697 | # asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3 | ||
8698 | pand %xmm2,%xmm3 | ||
8699 | |||
8700 | # qhasm: xmm3 ^= xmm5 | ||
8701 | # asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1 | ||
8702 | # asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0 | ||
8703 | pxor %xmm3,%xmm0 | ||
8704 | |||
8705 | # qhasm: xmm2 ^= xmm5 | ||
8706 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
8707 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
8708 | pxor %xmm3,%xmm1 | ||
8709 | |||
8710 | # qhasm: xmm5 = xmm15 | ||
8711 | # asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3 | ||
8712 | # asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2 | ||
8713 | movdqa %xmm15,%xmm2 | ||
8714 | |||
8715 | # qhasm: xmm5 ^= xmm9 | ||
8716 | # asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3 | ||
8717 | # asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2 | ||
8718 | pxor %xmm9,%xmm2 | ||
8719 | |||
8720 | # qhasm: xmm4 = xmm13 | ||
8721 | # asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4 | ||
8722 | # asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3 | ||
8723 | movdqa %xmm13,%xmm3 | ||
8724 | |||
8725 | # qhasm: xmm1 = xmm5 | ||
8726 | # asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5 | ||
8727 | # asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4 | ||
8728 | movdqa %xmm2,%xmm4 | ||
8729 | |||
8730 | # qhasm: xmm4 ^= xmm14 | ||
8731 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4 | ||
8732 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3 | ||
8733 | pxor %xmm14,%xmm3 | ||
8734 | |||
8735 | # qhasm: xmm1 |= xmm4 | ||
8736 | # asm 1: por <xmm4=int6464#4,<xmm1=int6464#5 | ||
8737 | # asm 2: por <xmm4=%xmm3,<xmm1=%xmm4 | ||
8738 | por %xmm3,%xmm4 | ||
8739 | |||
8740 | # qhasm: xmm5 &= xmm4 | ||
8741 | # asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3 | ||
8742 | # asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2 | ||
8743 | pand %xmm3,%xmm2 | ||
8744 | |||
8745 | # qhasm: xmm0 ^= xmm5 | ||
8746 | # asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7 | ||
8747 | # asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6 | ||
8748 | pxor %xmm2,%xmm6 | ||
8749 | |||
8750 | # qhasm: xmm3 ^= xmm7 | ||
8751 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1 | ||
8752 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0 | ||
8753 | pxor %xmm7,%xmm0 | ||
8754 | |||
8755 | # qhasm: xmm2 ^= xmm6 | ||
8756 | # asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2 | ||
8757 | # asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1 | ||
8758 | pxor %xmm5,%xmm1 | ||
8759 | |||
8760 | # qhasm: xmm1 ^= xmm7 | ||
8761 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5 | ||
8762 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4 | ||
8763 | pxor %xmm7,%xmm4 | ||
8764 | |||
8765 | # qhasm: xmm0 ^= xmm6 | ||
8766 | # asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7 | ||
8767 | # asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6 | ||
8768 | pxor %xmm5,%xmm6 | ||
8769 | |||
8770 | # qhasm: xmm1 ^= xmm6 | ||
8771 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
8772 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
8773 | pxor %xmm5,%xmm4 | ||
8774 | |||
8775 | # qhasm: xmm4 = xmm10 | ||
8776 | # asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3 | ||
8777 | # asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2 | ||
8778 | movdqa %xmm10,%xmm2 | ||
8779 | |||
8780 | # qhasm: xmm5 = xmm12 | ||
8781 | # asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4 | ||
8782 | # asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3 | ||
8783 | movdqa %xmm12,%xmm3 | ||
8784 | |||
8785 | # qhasm: xmm6 = xmm9 | ||
8786 | # asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6 | ||
8787 | # asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5 | ||
8788 | movdqa %xmm9,%xmm5 | ||
8789 | |||
8790 | # qhasm: xmm7 = xmm15 | ||
8791 | # asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8 | ||
8792 | # asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7 | ||
8793 | movdqa %xmm15,%xmm7 | ||
8794 | |||
8795 | # qhasm: xmm4 &= xmm11 | ||
8796 | # asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3 | ||
8797 | # asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2 | ||
8798 | pand %xmm11,%xmm2 | ||
8799 | |||
8800 | # qhasm: xmm5 &= xmm8 | ||
8801 | # asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4 | ||
8802 | # asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3 | ||
8803 | pand %xmm8,%xmm3 | ||
8804 | |||
8805 | # qhasm: xmm6 &= xmm13 | ||
8806 | # asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6 | ||
8807 | # asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5 | ||
8808 | pand %xmm13,%xmm5 | ||
8809 | |||
8810 | # qhasm: xmm7 |= xmm14 | ||
8811 | # asm 1: por <xmm14=int6464#15,<xmm7=int6464#8 | ||
8812 | # asm 2: por <xmm14=%xmm14,<xmm7=%xmm7 | ||
8813 | por %xmm14,%xmm7 | ||
8814 | |||
8815 | # qhasm: xmm3 ^= xmm4 | ||
8816 | # asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1 | ||
8817 | # asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0 | ||
8818 | pxor %xmm2,%xmm0 | ||
8819 | |||
8820 | # qhasm: xmm2 ^= xmm5 | ||
8821 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
8822 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
8823 | pxor %xmm3,%xmm1 | ||
8824 | |||
8825 | # qhasm: xmm1 ^= xmm6 | ||
8826 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
8827 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
8828 | pxor %xmm5,%xmm4 | ||
8829 | |||
8830 | # qhasm: xmm0 ^= xmm7 | ||
8831 | # asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7 | ||
8832 | # asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6 | ||
8833 | pxor %xmm7,%xmm6 | ||
8834 | |||
8835 | # qhasm: xmm4 = xmm3 | ||
8836 | # asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3 | ||
8837 | # asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2 | ||
8838 | movdqa %xmm0,%xmm2 | ||
8839 | |||
8840 | # qhasm: xmm4 ^= xmm2 | ||
8841 | # asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3 | ||
8842 | # asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2 | ||
8843 | pxor %xmm1,%xmm2 | ||
8844 | |||
8845 | # qhasm: xmm3 &= xmm1 | ||
8846 | # asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1 | ||
8847 | # asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0 | ||
8848 | pand %xmm4,%xmm0 | ||
8849 | |||
8850 | # qhasm: xmm6 = xmm0 | ||
8851 | # asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4 | ||
8852 | # asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3 | ||
8853 | movdqa %xmm6,%xmm3 | ||
8854 | |||
8855 | # qhasm: xmm6 ^= xmm3 | ||
8856 | # asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4 | ||
8857 | # asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3 | ||
8858 | pxor %xmm0,%xmm3 | ||
8859 | |||
8860 | # qhasm: xmm7 = xmm4 | ||
8861 | # asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6 | ||
8862 | # asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5 | ||
8863 | movdqa %xmm2,%xmm5 | ||
8864 | |||
8865 | # qhasm: xmm7 &= xmm6 | ||
8866 | # asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6 | ||
8867 | # asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5 | ||
8868 | pand %xmm3,%xmm5 | ||
8869 | |||
8870 | # qhasm: xmm7 ^= xmm2 | ||
8871 | # asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6 | ||
8872 | # asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5 | ||
8873 | pxor %xmm1,%xmm5 | ||
8874 | |||
8875 | # qhasm: xmm5 = xmm1 | ||
8876 | # asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8 | ||
8877 | # asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7 | ||
8878 | movdqa %xmm4,%xmm7 | ||
8879 | |||
8880 | # qhasm: xmm5 ^= xmm0 | ||
8881 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
8882 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
8883 | pxor %xmm6,%xmm7 | ||
8884 | |||
8885 | # qhasm: xmm3 ^= xmm2 | ||
8886 | # asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1 | ||
8887 | # asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0 | ||
8888 | pxor %xmm1,%xmm0 | ||
8889 | |||
8890 | # qhasm: xmm5 &= xmm3 | ||
8891 | # asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8 | ||
8892 | # asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7 | ||
8893 | pand %xmm0,%xmm7 | ||
8894 | |||
8895 | # qhasm: xmm5 ^= xmm0 | ||
8896 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
8897 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
8898 | pxor %xmm6,%xmm7 | ||
8899 | |||
8900 | # qhasm: xmm1 ^= xmm5 | ||
8901 | # asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5 | ||
8902 | # asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4 | ||
8903 | pxor %xmm7,%xmm4 | ||
8904 | |||
8905 | # qhasm: xmm2 = xmm6 | ||
8906 | # asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1 | ||
8907 | # asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0 | ||
8908 | movdqa %xmm3,%xmm0 | ||
8909 | |||
8910 | # qhasm: xmm2 ^= xmm5 | ||
8911 | # asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1 | ||
8912 | # asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0 | ||
8913 | pxor %xmm7,%xmm0 | ||
8914 | |||
8915 | # qhasm: xmm2 &= xmm0 | ||
8916 | # asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1 | ||
8917 | # asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0 | ||
8918 | pand %xmm6,%xmm0 | ||
8919 | |||
8920 | # qhasm: xmm1 ^= xmm2 | ||
8921 | # asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5 | ||
8922 | # asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4 | ||
8923 | pxor %xmm0,%xmm4 | ||
8924 | |||
8925 | # qhasm: xmm6 ^= xmm2 | ||
8926 | # asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4 | ||
8927 | # asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3 | ||
8928 | pxor %xmm0,%xmm3 | ||
8929 | |||
8930 | # qhasm: xmm6 &= xmm7 | ||
8931 | # asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4 | ||
8932 | # asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3 | ||
8933 | pand %xmm5,%xmm3 | ||
8934 | |||
8935 | # qhasm: xmm6 ^= xmm4 | ||
8936 | # asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4 | ||
8937 | # asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3 | ||
8938 | pxor %xmm2,%xmm3 | ||
8939 | |||
8940 | # qhasm: xmm4 = xmm14 | ||
8941 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1 | ||
8942 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0 | ||
8943 | movdqa %xmm14,%xmm0 | ||
8944 | |||
8945 | # qhasm: xmm0 = xmm13 | ||
8946 | # asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2 | ||
8947 | # asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1 | ||
8948 | movdqa %xmm13,%xmm1 | ||
8949 | |||
8950 | # qhasm: xmm2 = xmm7 | ||
8951 | # asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3 | ||
8952 | # asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2 | ||
8953 | movdqa %xmm5,%xmm2 | ||
8954 | |||
8955 | # qhasm: xmm2 ^= xmm6 | ||
8956 | # asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3 | ||
8957 | # asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2 | ||
8958 | pxor %xmm3,%xmm2 | ||
8959 | |||
8960 | # qhasm: xmm2 &= xmm14 | ||
8961 | # asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3 | ||
8962 | # asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2 | ||
8963 | pand %xmm14,%xmm2 | ||
8964 | |||
8965 | # qhasm: xmm14 ^= xmm13 | ||
8966 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
8967 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
8968 | pxor %xmm13,%xmm14 | ||
8969 | |||
8970 | # qhasm: xmm14 &= xmm6 | ||
8971 | # asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15 | ||
8972 | # asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14 | ||
8973 | pand %xmm3,%xmm14 | ||
8974 | |||
8975 | # qhasm: xmm13 &= xmm7 | ||
8976 | # asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14 | ||
8977 | # asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13 | ||
8978 | pand %xmm5,%xmm13 | ||
8979 | |||
8980 | # qhasm: xmm14 ^= xmm13 | ||
8981 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
8982 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
8983 | pxor %xmm13,%xmm14 | ||
8984 | |||
8985 | # qhasm: xmm13 ^= xmm2 | ||
8986 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14 | ||
8987 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13 | ||
8988 | pxor %xmm2,%xmm13 | ||
8989 | |||
8990 | # qhasm: xmm4 ^= xmm8 | ||
8991 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1 | ||
8992 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0 | ||
8993 | pxor %xmm8,%xmm0 | ||
8994 | |||
8995 | # qhasm: xmm0 ^= xmm11 | ||
8996 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2 | ||
8997 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1 | ||
8998 | pxor %xmm11,%xmm1 | ||
8999 | |||
9000 | # qhasm: xmm7 ^= xmm5 | ||
9001 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
9002 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
9003 | pxor %xmm7,%xmm5 | ||
9004 | |||
9005 | # qhasm: xmm6 ^= xmm1 | ||
9006 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
9007 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
9008 | pxor %xmm4,%xmm3 | ||
9009 | |||
9010 | # qhasm: xmm3 = xmm7 | ||
9011 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
9012 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
9013 | movdqa %xmm5,%xmm2 | ||
9014 | |||
9015 | # qhasm: xmm3 ^= xmm6 | ||
9016 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
9017 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
9018 | pxor %xmm3,%xmm2 | ||
9019 | |||
9020 | # qhasm: xmm3 &= xmm4 | ||
9021 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
9022 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
9023 | pand %xmm0,%xmm2 | ||
9024 | |||
9025 | # qhasm: xmm4 ^= xmm0 | ||
9026 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
9027 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
9028 | pxor %xmm1,%xmm0 | ||
9029 | |||
9030 | # qhasm: xmm4 &= xmm6 | ||
9031 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
9032 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
9033 | pand %xmm3,%xmm0 | ||
9034 | |||
9035 | # qhasm: xmm0 &= xmm7 | ||
9036 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
9037 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
9038 | pand %xmm5,%xmm1 | ||
9039 | |||
9040 | # qhasm: xmm0 ^= xmm4 | ||
9041 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
9042 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
9043 | pxor %xmm0,%xmm1 | ||
9044 | |||
9045 | # qhasm: xmm4 ^= xmm3 | ||
9046 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
9047 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
9048 | pxor %xmm2,%xmm0 | ||
9049 | |||
9050 | # qhasm: xmm2 = xmm5 | ||
9051 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
9052 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
9053 | movdqa %xmm7,%xmm2 | ||
9054 | |||
9055 | # qhasm: xmm2 ^= xmm1 | ||
9056 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
9057 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
9058 | pxor %xmm4,%xmm2 | ||
9059 | |||
9060 | # qhasm: xmm2 &= xmm8 | ||
9061 | # asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3 | ||
9062 | # asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2 | ||
9063 | pand %xmm8,%xmm2 | ||
9064 | |||
9065 | # qhasm: xmm8 ^= xmm11 | ||
9066 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
9067 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
9068 | pxor %xmm11,%xmm8 | ||
9069 | |||
9070 | # qhasm: xmm8 &= xmm1 | ||
9071 | # asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9 | ||
9072 | # asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8 | ||
9073 | pand %xmm4,%xmm8 | ||
9074 | |||
9075 | # qhasm: xmm11 &= xmm5 | ||
9076 | # asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12 | ||
9077 | # asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11 | ||
9078 | pand %xmm7,%xmm11 | ||
9079 | |||
9080 | # qhasm: xmm8 ^= xmm11 | ||
9081 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
9082 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
9083 | pxor %xmm11,%xmm8 | ||
9084 | |||
9085 | # qhasm: xmm11 ^= xmm2 | ||
9086 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12 | ||
9087 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11 | ||
9088 | pxor %xmm2,%xmm11 | ||
9089 | |||
9090 | # qhasm: xmm14 ^= xmm4 | ||
9091 | # asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15 | ||
9092 | # asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14 | ||
9093 | pxor %xmm0,%xmm14 | ||
9094 | |||
9095 | # qhasm: xmm8 ^= xmm4 | ||
9096 | # asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9 | ||
9097 | # asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8 | ||
9098 | pxor %xmm0,%xmm8 | ||
9099 | |||
9100 | # qhasm: xmm13 ^= xmm0 | ||
9101 | # asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14 | ||
9102 | # asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13 | ||
9103 | pxor %xmm1,%xmm13 | ||
9104 | |||
9105 | # qhasm: xmm11 ^= xmm0 | ||
9106 | # asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12 | ||
9107 | # asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11 | ||
9108 | pxor %xmm1,%xmm11 | ||
9109 | |||
9110 | # qhasm: xmm4 = xmm15 | ||
9111 | # asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1 | ||
9112 | # asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0 | ||
9113 | movdqa %xmm15,%xmm0 | ||
9114 | |||
9115 | # qhasm: xmm0 = xmm9 | ||
9116 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2 | ||
9117 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1 | ||
9118 | movdqa %xmm9,%xmm1 | ||
9119 | |||
9120 | # qhasm: xmm4 ^= xmm12 | ||
9121 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1 | ||
9122 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0 | ||
9123 | pxor %xmm12,%xmm0 | ||
9124 | |||
9125 | # qhasm: xmm0 ^= xmm10 | ||
9126 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2 | ||
9127 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1 | ||
9128 | pxor %xmm10,%xmm1 | ||
9129 | |||
9130 | # qhasm: xmm3 = xmm7 | ||
9131 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
9132 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
9133 | movdqa %xmm5,%xmm2 | ||
9134 | |||
9135 | # qhasm: xmm3 ^= xmm6 | ||
9136 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
9137 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
9138 | pxor %xmm3,%xmm2 | ||
9139 | |||
9140 | # qhasm: xmm3 &= xmm4 | ||
9141 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
9142 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
9143 | pand %xmm0,%xmm2 | ||
9144 | |||
9145 | # qhasm: xmm4 ^= xmm0 | ||
9146 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
9147 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
9148 | pxor %xmm1,%xmm0 | ||
9149 | |||
9150 | # qhasm: xmm4 &= xmm6 | ||
9151 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
9152 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
9153 | pand %xmm3,%xmm0 | ||
9154 | |||
9155 | # qhasm: xmm0 &= xmm7 | ||
9156 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
9157 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
9158 | pand %xmm5,%xmm1 | ||
9159 | |||
9160 | # qhasm: xmm0 ^= xmm4 | ||
9161 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
9162 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
9163 | pxor %xmm0,%xmm1 | ||
9164 | |||
9165 | # qhasm: xmm4 ^= xmm3 | ||
9166 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
9167 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
9168 | pxor %xmm2,%xmm0 | ||
9169 | |||
9170 | # qhasm: xmm2 = xmm5 | ||
9171 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
9172 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
9173 | movdqa %xmm7,%xmm2 | ||
9174 | |||
9175 | # qhasm: xmm2 ^= xmm1 | ||
9176 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
9177 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
9178 | pxor %xmm4,%xmm2 | ||
9179 | |||
9180 | # qhasm: xmm2 &= xmm12 | ||
9181 | # asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3 | ||
9182 | # asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2 | ||
9183 | pand %xmm12,%xmm2 | ||
9184 | |||
9185 | # qhasm: xmm12 ^= xmm10 | ||
9186 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
9187 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
9188 | pxor %xmm10,%xmm12 | ||
9189 | |||
9190 | # qhasm: xmm12 &= xmm1 | ||
9191 | # asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13 | ||
9192 | # asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12 | ||
9193 | pand %xmm4,%xmm12 | ||
9194 | |||
9195 | # qhasm: xmm10 &= xmm5 | ||
9196 | # asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11 | ||
9197 | # asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10 | ||
9198 | pand %xmm7,%xmm10 | ||
9199 | |||
9200 | # qhasm: xmm12 ^= xmm10 | ||
9201 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
9202 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
9203 | pxor %xmm10,%xmm12 | ||
9204 | |||
9205 | # qhasm: xmm10 ^= xmm2 | ||
9206 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11 | ||
9207 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10 | ||
9208 | pxor %xmm2,%xmm10 | ||
9209 | |||
9210 | # qhasm: xmm7 ^= xmm5 | ||
9211 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
9212 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
9213 | pxor %xmm7,%xmm5 | ||
9214 | |||
9215 | # qhasm: xmm6 ^= xmm1 | ||
9216 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
9217 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
9218 | pxor %xmm4,%xmm3 | ||
9219 | |||
9220 | # qhasm: xmm3 = xmm7 | ||
9221 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
9222 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
9223 | movdqa %xmm5,%xmm2 | ||
9224 | |||
9225 | # qhasm: xmm3 ^= xmm6 | ||
9226 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
9227 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
9228 | pxor %xmm3,%xmm2 | ||
9229 | |||
9230 | # qhasm: xmm3 &= xmm15 | ||
9231 | # asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3 | ||
9232 | # asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2 | ||
9233 | pand %xmm15,%xmm2 | ||
9234 | |||
9235 | # qhasm: xmm15 ^= xmm9 | ||
9236 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
9237 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
9238 | pxor %xmm9,%xmm15 | ||
9239 | |||
9240 | # qhasm: xmm15 &= xmm6 | ||
9241 | # asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16 | ||
9242 | # asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15 | ||
9243 | pand %xmm3,%xmm15 | ||
9244 | |||
9245 | # qhasm: xmm9 &= xmm7 | ||
9246 | # asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10 | ||
9247 | # asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9 | ||
9248 | pand %xmm5,%xmm9 | ||
9249 | |||
9250 | # qhasm: xmm15 ^= xmm9 | ||
9251 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
9252 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
9253 | pxor %xmm9,%xmm15 | ||
9254 | |||
9255 | # qhasm: xmm9 ^= xmm3 | ||
9256 | # asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10 | ||
9257 | # asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9 | ||
9258 | pxor %xmm2,%xmm9 | ||
9259 | |||
9260 | # qhasm: xmm15 ^= xmm4 | ||
9261 | # asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16 | ||
9262 | # asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15 | ||
9263 | pxor %xmm0,%xmm15 | ||
9264 | |||
9265 | # qhasm: xmm12 ^= xmm4 | ||
9266 | # asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13 | ||
9267 | # asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12 | ||
9268 | pxor %xmm0,%xmm12 | ||
9269 | |||
9270 | # qhasm: xmm9 ^= xmm0 | ||
9271 | # asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10 | ||
9272 | # asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9 | ||
9273 | pxor %xmm1,%xmm9 | ||
9274 | |||
9275 | # qhasm: xmm10 ^= xmm0 | ||
9276 | # asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11 | ||
9277 | # asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10 | ||
9278 | pxor %xmm1,%xmm10 | ||
9279 | |||
9280 | # qhasm: xmm15 ^= xmm8 | ||
9281 | # asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16 | ||
9282 | # asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15 | ||
9283 | pxor %xmm8,%xmm15 | ||
9284 | |||
9285 | # qhasm: xmm9 ^= xmm14 | ||
9286 | # asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10 | ||
9287 | # asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9 | ||
9288 | pxor %xmm14,%xmm9 | ||
9289 | |||
9290 | # qhasm: xmm12 ^= xmm15 | ||
9291 | # asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13 | ||
9292 | # asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12 | ||
9293 | pxor %xmm15,%xmm12 | ||
9294 | |||
9295 | # qhasm: xmm14 ^= xmm8 | ||
9296 | # asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15 | ||
9297 | # asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14 | ||
9298 | pxor %xmm8,%xmm14 | ||
9299 | |||
9300 | # qhasm: xmm8 ^= xmm9 | ||
9301 | # asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9 | ||
9302 | # asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8 | ||
9303 | pxor %xmm9,%xmm8 | ||
9304 | |||
9305 | # qhasm: xmm9 ^= xmm13 | ||
9306 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
9307 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
9308 | pxor %xmm13,%xmm9 | ||
9309 | |||
9310 | # qhasm: xmm13 ^= xmm10 | ||
9311 | # asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14 | ||
9312 | # asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13 | ||
9313 | pxor %xmm10,%xmm13 | ||
9314 | |||
9315 | # qhasm: xmm12 ^= xmm13 | ||
9316 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
9317 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
9318 | pxor %xmm13,%xmm12 | ||
9319 | |||
9320 | # qhasm: xmm10 ^= xmm11 | ||
9321 | # asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11 | ||
9322 | # asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10 | ||
9323 | pxor %xmm11,%xmm10 | ||
9324 | |||
9325 | # qhasm: xmm11 ^= xmm13 | ||
9326 | # asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12 | ||
9327 | # asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11 | ||
9328 | pxor %xmm13,%xmm11 | ||
9329 | |||
9330 | # qhasm: xmm14 ^= xmm11 | ||
9331 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
9332 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
9333 | pxor %xmm11,%xmm14 | ||
9334 | |||
9335 | # qhasm: xmm0 = shuffle dwords of xmm8 by 0x93 | ||
9336 | # asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1 | ||
9337 | # asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0 | ||
9338 | pshufd $0x93,%xmm8,%xmm0 | ||
9339 | |||
9340 | # qhasm: xmm1 = shuffle dwords of xmm9 by 0x93 | ||
9341 | # asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2 | ||
9342 | # asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1 | ||
9343 | pshufd $0x93,%xmm9,%xmm1 | ||
9344 | |||
9345 | # qhasm: xmm2 = shuffle dwords of xmm12 by 0x93 | ||
9346 | # asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3 | ||
9347 | # asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2 | ||
9348 | pshufd $0x93,%xmm12,%xmm2 | ||
9349 | |||
9350 | # qhasm: xmm3 = shuffle dwords of xmm14 by 0x93 | ||
9351 | # asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4 | ||
9352 | # asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3 | ||
9353 | pshufd $0x93,%xmm14,%xmm3 | ||
9354 | |||
9355 | # qhasm: xmm4 = shuffle dwords of xmm11 by 0x93 | ||
9356 | # asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5 | ||
9357 | # asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4 | ||
9358 | pshufd $0x93,%xmm11,%xmm4 | ||
9359 | |||
9360 | # qhasm: xmm5 = shuffle dwords of xmm15 by 0x93 | ||
9361 | # asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6 | ||
9362 | # asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5 | ||
9363 | pshufd $0x93,%xmm15,%xmm5 | ||
9364 | |||
9365 | # qhasm: xmm6 = shuffle dwords of xmm10 by 0x93 | ||
9366 | # asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7 | ||
9367 | # asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6 | ||
9368 | pshufd $0x93,%xmm10,%xmm6 | ||
9369 | |||
9370 | # qhasm: xmm7 = shuffle dwords of xmm13 by 0x93 | ||
9371 | # asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8 | ||
9372 | # asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7 | ||
9373 | pshufd $0x93,%xmm13,%xmm7 | ||
9374 | |||
9375 | # qhasm: xmm8 ^= xmm0 | ||
9376 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
9377 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
9378 | pxor %xmm0,%xmm8 | ||
9379 | |||
9380 | # qhasm: xmm9 ^= xmm1 | ||
9381 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
9382 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
9383 | pxor %xmm1,%xmm9 | ||
9384 | |||
9385 | # qhasm: xmm12 ^= xmm2 | ||
9386 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13 | ||
9387 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12 | ||
9388 | pxor %xmm2,%xmm12 | ||
9389 | |||
9390 | # qhasm: xmm14 ^= xmm3 | ||
9391 | # asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15 | ||
9392 | # asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14 | ||
9393 | pxor %xmm3,%xmm14 | ||
9394 | |||
9395 | # qhasm: xmm11 ^= xmm4 | ||
9396 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
9397 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
9398 | pxor %xmm4,%xmm11 | ||
9399 | |||
9400 | # qhasm: xmm15 ^= xmm5 | ||
9401 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
9402 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
9403 | pxor %xmm5,%xmm15 | ||
9404 | |||
9405 | # qhasm: xmm10 ^= xmm6 | ||
9406 | # asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11 | ||
9407 | # asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10 | ||
9408 | pxor %xmm6,%xmm10 | ||
9409 | |||
9410 | # qhasm: xmm13 ^= xmm7 | ||
9411 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
9412 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
9413 | pxor %xmm7,%xmm13 | ||
9414 | |||
9415 | # qhasm: xmm0 ^= xmm13 | ||
9416 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
9417 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
9418 | pxor %xmm13,%xmm0 | ||
9419 | |||
9420 | # qhasm: xmm1 ^= xmm8 | ||
9421 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
9422 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
9423 | pxor %xmm8,%xmm1 | ||
9424 | |||
9425 | # qhasm: xmm2 ^= xmm9 | ||
9426 | # asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3 | ||
9427 | # asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2 | ||
9428 | pxor %xmm9,%xmm2 | ||
9429 | |||
9430 | # qhasm: xmm1 ^= xmm13 | ||
9431 | # asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2 | ||
9432 | # asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1 | ||
9433 | pxor %xmm13,%xmm1 | ||
9434 | |||
9435 | # qhasm: xmm3 ^= xmm12 | ||
9436 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
9437 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
9438 | pxor %xmm12,%xmm3 | ||
9439 | |||
9440 | # qhasm: xmm4 ^= xmm14 | ||
9441 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
9442 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
9443 | pxor %xmm14,%xmm4 | ||
9444 | |||
9445 | # qhasm: xmm5 ^= xmm11 | ||
9446 | # asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6 | ||
9447 | # asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5 | ||
9448 | pxor %xmm11,%xmm5 | ||
9449 | |||
9450 | # qhasm: xmm3 ^= xmm13 | ||
9451 | # asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4 | ||
9452 | # asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3 | ||
9453 | pxor %xmm13,%xmm3 | ||
9454 | |||
9455 | # qhasm: xmm6 ^= xmm15 | ||
9456 | # asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7 | ||
9457 | # asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6 | ||
9458 | pxor %xmm15,%xmm6 | ||
9459 | |||
9460 | # qhasm: xmm7 ^= xmm10 | ||
9461 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
9462 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
9463 | pxor %xmm10,%xmm7 | ||
9464 | |||
9465 | # qhasm: xmm4 ^= xmm13 | ||
9466 | # asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5 | ||
9467 | # asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4 | ||
9468 | pxor %xmm13,%xmm4 | ||
9469 | |||
9470 | # qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E | ||
9471 | # asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9 | ||
9472 | # asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8 | ||
9473 | pshufd $0x4E,%xmm8,%xmm8 | ||
9474 | |||
9475 | # qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E | ||
9476 | # asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10 | ||
9477 | # asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9 | ||
9478 | pshufd $0x4E,%xmm9,%xmm9 | ||
9479 | |||
9480 | # qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E | ||
9481 | # asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13 | ||
9482 | # asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12 | ||
9483 | pshufd $0x4E,%xmm12,%xmm12 | ||
9484 | |||
9485 | # qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E | ||
9486 | # asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15 | ||
9487 | # asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14 | ||
9488 | pshufd $0x4E,%xmm14,%xmm14 | ||
9489 | |||
9490 | # qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E | ||
9491 | # asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12 | ||
9492 | # asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11 | ||
9493 | pshufd $0x4E,%xmm11,%xmm11 | ||
9494 | |||
9495 | # qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E | ||
9496 | # asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16 | ||
9497 | # asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15 | ||
9498 | pshufd $0x4E,%xmm15,%xmm15 | ||
9499 | |||
9500 | # qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E | ||
9501 | # asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11 | ||
9502 | # asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10 | ||
9503 | pshufd $0x4E,%xmm10,%xmm10 | ||
9504 | |||
9505 | # qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E | ||
9506 | # asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14 | ||
9507 | # asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13 | ||
9508 | pshufd $0x4E,%xmm13,%xmm13 | ||
9509 | |||
9510 | # qhasm: xmm0 ^= xmm8 | ||
9511 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
9512 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
9513 | pxor %xmm8,%xmm0 | ||
9514 | |||
9515 | # qhasm: xmm1 ^= xmm9 | ||
9516 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
9517 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
9518 | pxor %xmm9,%xmm1 | ||
9519 | |||
9520 | # qhasm: xmm2 ^= xmm12 | ||
9521 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
9522 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
9523 | pxor %xmm12,%xmm2 | ||
9524 | |||
9525 | # qhasm: xmm3 ^= xmm14 | ||
9526 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
9527 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
9528 | pxor %xmm14,%xmm3 | ||
9529 | |||
9530 | # qhasm: xmm4 ^= xmm11 | ||
9531 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
9532 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
9533 | pxor %xmm11,%xmm4 | ||
9534 | |||
9535 | # qhasm: xmm5 ^= xmm15 | ||
9536 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
9537 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
9538 | pxor %xmm15,%xmm5 | ||
9539 | |||
9540 | # qhasm: xmm6 ^= xmm10 | ||
9541 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
9542 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
9543 | pxor %xmm10,%xmm6 | ||
9544 | |||
9545 | # qhasm: xmm7 ^= xmm13 | ||
9546 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
9547 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
9548 | pxor %xmm13,%xmm7 | ||
9549 | |||
9550 | # qhasm: xmm0 ^= *(int128 *)(c + 1024) | ||
9551 | # asm 1: pxor 1024(<c=int64#4),<xmm0=int6464#1 | ||
9552 | # asm 2: pxor 1024(<c=%rcx),<xmm0=%xmm0 | ||
9553 | pxor 1024(%rcx),%xmm0 | ||
9554 | |||
9555 | # qhasm: shuffle bytes of xmm0 by SR | ||
9556 | # asm 1: pshufb SR,<xmm0=int6464#1 | ||
9557 | # asm 2: pshufb SR,<xmm0=%xmm0 | ||
9558 | pshufb SR,%xmm0 | ||
9559 | |||
9560 | # qhasm: xmm1 ^= *(int128 *)(c + 1040) | ||
9561 | # asm 1: pxor 1040(<c=int64#4),<xmm1=int6464#2 | ||
9562 | # asm 2: pxor 1040(<c=%rcx),<xmm1=%xmm1 | ||
9563 | pxor 1040(%rcx),%xmm1 | ||
9564 | |||
9565 | # qhasm: shuffle bytes of xmm1 by SR | ||
9566 | # asm 1: pshufb SR,<xmm1=int6464#2 | ||
9567 | # asm 2: pshufb SR,<xmm1=%xmm1 | ||
9568 | pshufb SR,%xmm1 | ||
9569 | |||
9570 | # qhasm: xmm2 ^= *(int128 *)(c + 1056) | ||
9571 | # asm 1: pxor 1056(<c=int64#4),<xmm2=int6464#3 | ||
9572 | # asm 2: pxor 1056(<c=%rcx),<xmm2=%xmm2 | ||
9573 | pxor 1056(%rcx),%xmm2 | ||
9574 | |||
9575 | # qhasm: shuffle bytes of xmm2 by SR | ||
9576 | # asm 1: pshufb SR,<xmm2=int6464#3 | ||
9577 | # asm 2: pshufb SR,<xmm2=%xmm2 | ||
9578 | pshufb SR,%xmm2 | ||
9579 | |||
9580 | # qhasm: xmm3 ^= *(int128 *)(c + 1072) | ||
9581 | # asm 1: pxor 1072(<c=int64#4),<xmm3=int6464#4 | ||
9582 | # asm 2: pxor 1072(<c=%rcx),<xmm3=%xmm3 | ||
9583 | pxor 1072(%rcx),%xmm3 | ||
9584 | |||
9585 | # qhasm: shuffle bytes of xmm3 by SR | ||
9586 | # asm 1: pshufb SR,<xmm3=int6464#4 | ||
9587 | # asm 2: pshufb SR,<xmm3=%xmm3 | ||
9588 | pshufb SR,%xmm3 | ||
9589 | |||
9590 | # qhasm: xmm4 ^= *(int128 *)(c + 1088) | ||
9591 | # asm 1: pxor 1088(<c=int64#4),<xmm4=int6464#5 | ||
9592 | # asm 2: pxor 1088(<c=%rcx),<xmm4=%xmm4 | ||
9593 | pxor 1088(%rcx),%xmm4 | ||
9594 | |||
9595 | # qhasm: shuffle bytes of xmm4 by SR | ||
9596 | # asm 1: pshufb SR,<xmm4=int6464#5 | ||
9597 | # asm 2: pshufb SR,<xmm4=%xmm4 | ||
9598 | pshufb SR,%xmm4 | ||
9599 | |||
9600 | # qhasm: xmm5 ^= *(int128 *)(c + 1104) | ||
9601 | # asm 1: pxor 1104(<c=int64#4),<xmm5=int6464#6 | ||
9602 | # asm 2: pxor 1104(<c=%rcx),<xmm5=%xmm5 | ||
9603 | pxor 1104(%rcx),%xmm5 | ||
9604 | |||
9605 | # qhasm: shuffle bytes of xmm5 by SR | ||
9606 | # asm 1: pshufb SR,<xmm5=int6464#6 | ||
9607 | # asm 2: pshufb SR,<xmm5=%xmm5 | ||
9608 | pshufb SR,%xmm5 | ||
9609 | |||
9610 | # qhasm: xmm6 ^= *(int128 *)(c + 1120) | ||
9611 | # asm 1: pxor 1120(<c=int64#4),<xmm6=int6464#7 | ||
9612 | # asm 2: pxor 1120(<c=%rcx),<xmm6=%xmm6 | ||
9613 | pxor 1120(%rcx),%xmm6 | ||
9614 | |||
9615 | # qhasm: shuffle bytes of xmm6 by SR | ||
9616 | # asm 1: pshufb SR,<xmm6=int6464#7 | ||
9617 | # asm 2: pshufb SR,<xmm6=%xmm6 | ||
9618 | pshufb SR,%xmm6 | ||
9619 | |||
9620 | # qhasm: xmm7 ^= *(int128 *)(c + 1136) | ||
9621 | # asm 1: pxor 1136(<c=int64#4),<xmm7=int6464#8 | ||
9622 | # asm 2: pxor 1136(<c=%rcx),<xmm7=%xmm7 | ||
9623 | pxor 1136(%rcx),%xmm7 | ||
9624 | |||
9625 | # qhasm: shuffle bytes of xmm7 by SR | ||
9626 | # asm 1: pshufb SR,<xmm7=int6464#8 | ||
9627 | # asm 2: pshufb SR,<xmm7=%xmm7 | ||
9628 | pshufb SR,%xmm7 | ||
9629 | |||
9630 | # qhasm: xmm5 ^= xmm6 | ||
9631 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
9632 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
9633 | pxor %xmm6,%xmm5 | ||
9634 | |||
9635 | # qhasm: xmm2 ^= xmm1 | ||
9636 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
9637 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
9638 | pxor %xmm1,%xmm2 | ||
9639 | |||
9640 | # qhasm: xmm5 ^= xmm0 | ||
9641 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
9642 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
9643 | pxor %xmm0,%xmm5 | ||
9644 | |||
9645 | # qhasm: xmm6 ^= xmm2 | ||
9646 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
9647 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
9648 | pxor %xmm2,%xmm6 | ||
9649 | |||
9650 | # qhasm: xmm3 ^= xmm0 | ||
9651 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
9652 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
9653 | pxor %xmm0,%xmm3 | ||
9654 | |||
9655 | # qhasm: xmm6 ^= xmm3 | ||
9656 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
9657 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
9658 | pxor %xmm3,%xmm6 | ||
9659 | |||
9660 | # qhasm: xmm3 ^= xmm7 | ||
9661 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
9662 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
9663 | pxor %xmm7,%xmm3 | ||
9664 | |||
9665 | # qhasm: xmm3 ^= xmm4 | ||
9666 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
9667 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
9668 | pxor %xmm4,%xmm3 | ||
9669 | |||
9670 | # qhasm: xmm7 ^= xmm5 | ||
9671 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
9672 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
9673 | pxor %xmm5,%xmm7 | ||
9674 | |||
9675 | # qhasm: xmm3 ^= xmm1 | ||
9676 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
9677 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
9678 | pxor %xmm1,%xmm3 | ||
9679 | |||
9680 | # qhasm: xmm4 ^= xmm5 | ||
9681 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
9682 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
9683 | pxor %xmm5,%xmm4 | ||
9684 | |||
9685 | # qhasm: xmm2 ^= xmm7 | ||
9686 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
9687 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
9688 | pxor %xmm7,%xmm2 | ||
9689 | |||
9690 | # qhasm: xmm1 ^= xmm5 | ||
9691 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
9692 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
9693 | pxor %xmm5,%xmm1 | ||
9694 | |||
9695 | # qhasm: xmm11 = xmm7 | ||
9696 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
9697 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
9698 | movdqa %xmm7,%xmm8 | ||
9699 | |||
9700 | # qhasm: xmm10 = xmm1 | ||
9701 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
9702 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
9703 | movdqa %xmm1,%xmm9 | ||
9704 | |||
9705 | # qhasm: xmm9 = xmm5 | ||
9706 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
9707 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
9708 | movdqa %xmm5,%xmm10 | ||
9709 | |||
9710 | # qhasm: xmm13 = xmm2 | ||
9711 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
9712 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
9713 | movdqa %xmm2,%xmm11 | ||
9714 | |||
9715 | # qhasm: xmm12 = xmm6 | ||
9716 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
9717 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
9718 | movdqa %xmm6,%xmm12 | ||
9719 | |||
9720 | # qhasm: xmm11 ^= xmm4 | ||
9721 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
9722 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
9723 | pxor %xmm4,%xmm8 | ||
9724 | |||
9725 | # qhasm: xmm10 ^= xmm2 | ||
9726 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
9727 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
9728 | pxor %xmm2,%xmm9 | ||
9729 | |||
9730 | # qhasm: xmm9 ^= xmm3 | ||
9731 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
9732 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
9733 | pxor %xmm3,%xmm10 | ||
9734 | |||
9735 | # qhasm: xmm13 ^= xmm4 | ||
9736 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
9737 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
9738 | pxor %xmm4,%xmm11 | ||
9739 | |||
9740 | # qhasm: xmm12 ^= xmm0 | ||
9741 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
9742 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
9743 | pxor %xmm0,%xmm12 | ||
9744 | |||
9745 | # qhasm: xmm14 = xmm11 | ||
9746 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
9747 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
9748 | movdqa %xmm8,%xmm13 | ||
9749 | |||
9750 | # qhasm: xmm8 = xmm10 | ||
9751 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
9752 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
9753 | movdqa %xmm9,%xmm14 | ||
9754 | |||
9755 | # qhasm: xmm15 = xmm11 | ||
9756 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
9757 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
9758 | movdqa %xmm8,%xmm15 | ||
9759 | |||
9760 | # qhasm: xmm10 |= xmm9 | ||
9761 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
9762 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
9763 | por %xmm10,%xmm9 | ||
9764 | |||
9765 | # qhasm: xmm11 |= xmm12 | ||
9766 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
9767 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
9768 | por %xmm12,%xmm8 | ||
9769 | |||
9770 | # qhasm: xmm15 ^= xmm8 | ||
9771 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
9772 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
9773 | pxor %xmm14,%xmm15 | ||
9774 | |||
9775 | # qhasm: xmm14 &= xmm12 | ||
9776 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
9777 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
9778 | pand %xmm12,%xmm13 | ||
9779 | |||
9780 | # qhasm: xmm8 &= xmm9 | ||
9781 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
9782 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
9783 | pand %xmm10,%xmm14 | ||
9784 | |||
9785 | # qhasm: xmm12 ^= xmm9 | ||
9786 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
9787 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
9788 | pxor %xmm10,%xmm12 | ||
9789 | |||
9790 | # qhasm: xmm15 &= xmm12 | ||
9791 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
9792 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
9793 | pand %xmm12,%xmm15 | ||
9794 | |||
9795 | # qhasm: xmm12 = xmm3 | ||
9796 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
9797 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
9798 | movdqa %xmm3,%xmm10 | ||
9799 | |||
9800 | # qhasm: xmm12 ^= xmm0 | ||
9801 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
9802 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
9803 | pxor %xmm0,%xmm10 | ||
9804 | |||
9805 | # qhasm: xmm13 &= xmm12 | ||
9806 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
9807 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
9808 | pand %xmm10,%xmm11 | ||
9809 | |||
9810 | # qhasm: xmm11 ^= xmm13 | ||
9811 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
9812 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
9813 | pxor %xmm11,%xmm8 | ||
9814 | |||
9815 | # qhasm: xmm10 ^= xmm13 | ||
9816 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
9817 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
9818 | pxor %xmm11,%xmm9 | ||
9819 | |||
9820 | # qhasm: xmm13 = xmm7 | ||
9821 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
9822 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
9823 | movdqa %xmm7,%xmm10 | ||
9824 | |||
9825 | # qhasm: xmm13 ^= xmm1 | ||
9826 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
9827 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
9828 | pxor %xmm1,%xmm10 | ||
9829 | |||
9830 | # qhasm: xmm12 = xmm5 | ||
9831 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
9832 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
9833 | movdqa %xmm5,%xmm11 | ||
9834 | |||
9835 | # qhasm: xmm9 = xmm13 | ||
9836 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
9837 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
9838 | movdqa %xmm10,%xmm12 | ||
9839 | |||
9840 | # qhasm: xmm12 ^= xmm6 | ||
9841 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
9842 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
9843 | pxor %xmm6,%xmm11 | ||
9844 | |||
9845 | # qhasm: xmm9 |= xmm12 | ||
9846 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
9847 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
9848 | por %xmm11,%xmm12 | ||
9849 | |||
9850 | # qhasm: xmm13 &= xmm12 | ||
9851 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
9852 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
9853 | pand %xmm11,%xmm10 | ||
9854 | |||
9855 | # qhasm: xmm8 ^= xmm13 | ||
9856 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
9857 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
9858 | pxor %xmm10,%xmm14 | ||
9859 | |||
9860 | # qhasm: xmm11 ^= xmm15 | ||
9861 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
9862 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
9863 | pxor %xmm15,%xmm8 | ||
9864 | |||
9865 | # qhasm: xmm10 ^= xmm14 | ||
9866 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
9867 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
9868 | pxor %xmm13,%xmm9 | ||
9869 | |||
9870 | # qhasm: xmm9 ^= xmm15 | ||
9871 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
9872 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
9873 | pxor %xmm15,%xmm12 | ||
9874 | |||
9875 | # qhasm: xmm8 ^= xmm14 | ||
9876 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
9877 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
9878 | pxor %xmm13,%xmm14 | ||
9879 | |||
9880 | # qhasm: xmm9 ^= xmm14 | ||
9881 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
9882 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
9883 | pxor %xmm13,%xmm12 | ||
9884 | |||
9885 | # qhasm: xmm12 = xmm2 | ||
9886 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
9887 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
9888 | movdqa %xmm2,%xmm10 | ||
9889 | |||
9890 | # qhasm: xmm13 = xmm4 | ||
9891 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
9892 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
9893 | movdqa %xmm4,%xmm11 | ||
9894 | |||
9895 | # qhasm: xmm14 = xmm1 | ||
9896 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
9897 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
9898 | movdqa %xmm1,%xmm13 | ||
9899 | |||
9900 | # qhasm: xmm15 = xmm7 | ||
9901 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
9902 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
9903 | movdqa %xmm7,%xmm15 | ||
9904 | |||
9905 | # qhasm: xmm12 &= xmm3 | ||
9906 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
9907 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
9908 | pand %xmm3,%xmm10 | ||
9909 | |||
9910 | # qhasm: xmm13 &= xmm0 | ||
9911 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
9912 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
9913 | pand %xmm0,%xmm11 | ||
9914 | |||
9915 | # qhasm: xmm14 &= xmm5 | ||
9916 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
9917 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
9918 | pand %xmm5,%xmm13 | ||
9919 | |||
9920 | # qhasm: xmm15 |= xmm6 | ||
9921 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
9922 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
9923 | por %xmm6,%xmm15 | ||
9924 | |||
9925 | # qhasm: xmm11 ^= xmm12 | ||
9926 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
9927 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
9928 | pxor %xmm10,%xmm8 | ||
9929 | |||
9930 | # qhasm: xmm10 ^= xmm13 | ||
9931 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
9932 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
9933 | pxor %xmm11,%xmm9 | ||
9934 | |||
9935 | # qhasm: xmm9 ^= xmm14 | ||
9936 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
9937 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
9938 | pxor %xmm13,%xmm12 | ||
9939 | |||
9940 | # qhasm: xmm8 ^= xmm15 | ||
9941 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
9942 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
9943 | pxor %xmm15,%xmm14 | ||
9944 | |||
9945 | # qhasm: xmm12 = xmm11 | ||
9946 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
9947 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
9948 | movdqa %xmm8,%xmm10 | ||
9949 | |||
9950 | # qhasm: xmm12 ^= xmm10 | ||
9951 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
9952 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
9953 | pxor %xmm9,%xmm10 | ||
9954 | |||
9955 | # qhasm: xmm11 &= xmm9 | ||
9956 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
9957 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
9958 | pand %xmm12,%xmm8 | ||
9959 | |||
9960 | # qhasm: xmm14 = xmm8 | ||
9961 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
9962 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
9963 | movdqa %xmm14,%xmm11 | ||
9964 | |||
9965 | # qhasm: xmm14 ^= xmm11 | ||
9966 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
9967 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
9968 | pxor %xmm8,%xmm11 | ||
9969 | |||
9970 | # qhasm: xmm15 = xmm12 | ||
9971 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
9972 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
9973 | movdqa %xmm10,%xmm13 | ||
9974 | |||
9975 | # qhasm: xmm15 &= xmm14 | ||
9976 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
9977 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
9978 | pand %xmm11,%xmm13 | ||
9979 | |||
9980 | # qhasm: xmm15 ^= xmm10 | ||
9981 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
9982 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
9983 | pxor %xmm9,%xmm13 | ||
9984 | |||
9985 | # qhasm: xmm13 = xmm9 | ||
9986 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
9987 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
9988 | movdqa %xmm12,%xmm15 | ||
9989 | |||
9990 | # qhasm: xmm13 ^= xmm8 | ||
9991 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
9992 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
9993 | pxor %xmm14,%xmm15 | ||
9994 | |||
9995 | # qhasm: xmm11 ^= xmm10 | ||
9996 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
9997 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
9998 | pxor %xmm9,%xmm8 | ||
9999 | |||
10000 | # qhasm: xmm13 &= xmm11 | ||
10001 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
10002 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
10003 | pand %xmm8,%xmm15 | ||
10004 | |||
10005 | # qhasm: xmm13 ^= xmm8 | ||
10006 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
10007 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
10008 | pxor %xmm14,%xmm15 | ||
10009 | |||
10010 | # qhasm: xmm9 ^= xmm13 | ||
10011 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
10012 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
10013 | pxor %xmm15,%xmm12 | ||
10014 | |||
10015 | # qhasm: xmm10 = xmm14 | ||
10016 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
10017 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
10018 | movdqa %xmm11,%xmm8 | ||
10019 | |||
10020 | # qhasm: xmm10 ^= xmm13 | ||
10021 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
10022 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
10023 | pxor %xmm15,%xmm8 | ||
10024 | |||
10025 | # qhasm: xmm10 &= xmm8 | ||
10026 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
10027 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
10028 | pand %xmm14,%xmm8 | ||
10029 | |||
10030 | # qhasm: xmm9 ^= xmm10 | ||
10031 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
10032 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
10033 | pxor %xmm8,%xmm12 | ||
10034 | |||
10035 | # qhasm: xmm14 ^= xmm10 | ||
10036 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
10037 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
10038 | pxor %xmm8,%xmm11 | ||
10039 | |||
10040 | # qhasm: xmm14 &= xmm15 | ||
10041 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
10042 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
10043 | pand %xmm13,%xmm11 | ||
10044 | |||
10045 | # qhasm: xmm14 ^= xmm12 | ||
10046 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
10047 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
10048 | pxor %xmm10,%xmm11 | ||
10049 | |||
10050 | # qhasm: xmm12 = xmm6 | ||
10051 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
10052 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
10053 | movdqa %xmm6,%xmm8 | ||
10054 | |||
10055 | # qhasm: xmm8 = xmm5 | ||
10056 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
10057 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
10058 | movdqa %xmm5,%xmm9 | ||
10059 | |||
10060 | # qhasm: xmm10 = xmm15 | ||
10061 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
10062 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
10063 | movdqa %xmm13,%xmm10 | ||
10064 | |||
10065 | # qhasm: xmm10 ^= xmm14 | ||
10066 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
10067 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
10068 | pxor %xmm11,%xmm10 | ||
10069 | |||
10070 | # qhasm: xmm10 &= xmm6 | ||
10071 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
10072 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
10073 | pand %xmm6,%xmm10 | ||
10074 | |||
10075 | # qhasm: xmm6 ^= xmm5 | ||
10076 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
10077 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
10078 | pxor %xmm5,%xmm6 | ||
10079 | |||
10080 | # qhasm: xmm6 &= xmm14 | ||
10081 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
10082 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
10083 | pand %xmm11,%xmm6 | ||
10084 | |||
10085 | # qhasm: xmm5 &= xmm15 | ||
10086 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
10087 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
10088 | pand %xmm13,%xmm5 | ||
10089 | |||
10090 | # qhasm: xmm6 ^= xmm5 | ||
10091 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
10092 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
10093 | pxor %xmm5,%xmm6 | ||
10094 | |||
10095 | # qhasm: xmm5 ^= xmm10 | ||
10096 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
10097 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
10098 | pxor %xmm10,%xmm5 | ||
10099 | |||
10100 | # qhasm: xmm12 ^= xmm0 | ||
10101 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
10102 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
10103 | pxor %xmm0,%xmm8 | ||
10104 | |||
10105 | # qhasm: xmm8 ^= xmm3 | ||
10106 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
10107 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
10108 | pxor %xmm3,%xmm9 | ||
10109 | |||
10110 | # qhasm: xmm15 ^= xmm13 | ||
10111 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
10112 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
10113 | pxor %xmm15,%xmm13 | ||
10114 | |||
10115 | # qhasm: xmm14 ^= xmm9 | ||
10116 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
10117 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
10118 | pxor %xmm12,%xmm11 | ||
10119 | |||
10120 | # qhasm: xmm11 = xmm15 | ||
10121 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
10122 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
10123 | movdqa %xmm13,%xmm10 | ||
10124 | |||
10125 | # qhasm: xmm11 ^= xmm14 | ||
10126 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
10127 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
10128 | pxor %xmm11,%xmm10 | ||
10129 | |||
10130 | # qhasm: xmm11 &= xmm12 | ||
10131 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
10132 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
10133 | pand %xmm8,%xmm10 | ||
10134 | |||
10135 | # qhasm: xmm12 ^= xmm8 | ||
10136 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
10137 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
10138 | pxor %xmm9,%xmm8 | ||
10139 | |||
10140 | # qhasm: xmm12 &= xmm14 | ||
10141 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
10142 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
10143 | pand %xmm11,%xmm8 | ||
10144 | |||
10145 | # qhasm: xmm8 &= xmm15 | ||
10146 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
10147 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
10148 | pand %xmm13,%xmm9 | ||
10149 | |||
10150 | # qhasm: xmm8 ^= xmm12 | ||
10151 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
10152 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
10153 | pxor %xmm8,%xmm9 | ||
10154 | |||
10155 | # qhasm: xmm12 ^= xmm11 | ||
10156 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
10157 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
10158 | pxor %xmm10,%xmm8 | ||
10159 | |||
10160 | # qhasm: xmm10 = xmm13 | ||
10161 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
10162 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
10163 | movdqa %xmm15,%xmm10 | ||
10164 | |||
10165 | # qhasm: xmm10 ^= xmm9 | ||
10166 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
10167 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
10168 | pxor %xmm12,%xmm10 | ||
10169 | |||
10170 | # qhasm: xmm10 &= xmm0 | ||
10171 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
10172 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
10173 | pand %xmm0,%xmm10 | ||
10174 | |||
10175 | # qhasm: xmm0 ^= xmm3 | ||
10176 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
10177 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
10178 | pxor %xmm3,%xmm0 | ||
10179 | |||
10180 | # qhasm: xmm0 &= xmm9 | ||
10181 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
10182 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
10183 | pand %xmm12,%xmm0 | ||
10184 | |||
10185 | # qhasm: xmm3 &= xmm13 | ||
10186 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
10187 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
10188 | pand %xmm15,%xmm3 | ||
10189 | |||
10190 | # qhasm: xmm0 ^= xmm3 | ||
10191 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
10192 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
10193 | pxor %xmm3,%xmm0 | ||
10194 | |||
10195 | # qhasm: xmm3 ^= xmm10 | ||
10196 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
10197 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
10198 | pxor %xmm10,%xmm3 | ||
10199 | |||
10200 | # qhasm: xmm6 ^= xmm12 | ||
10201 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
10202 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
10203 | pxor %xmm8,%xmm6 | ||
10204 | |||
10205 | # qhasm: xmm0 ^= xmm12 | ||
10206 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
10207 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
10208 | pxor %xmm8,%xmm0 | ||
10209 | |||
10210 | # qhasm: xmm5 ^= xmm8 | ||
10211 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
10212 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
10213 | pxor %xmm9,%xmm5 | ||
10214 | |||
10215 | # qhasm: xmm3 ^= xmm8 | ||
10216 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
10217 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
10218 | pxor %xmm9,%xmm3 | ||
10219 | |||
10220 | # qhasm: xmm12 = xmm7 | ||
10221 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
10222 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
10223 | movdqa %xmm7,%xmm8 | ||
10224 | |||
10225 | # qhasm: xmm8 = xmm1 | ||
10226 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
10227 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
10228 | movdqa %xmm1,%xmm9 | ||
10229 | |||
10230 | # qhasm: xmm12 ^= xmm4 | ||
10231 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
10232 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
10233 | pxor %xmm4,%xmm8 | ||
10234 | |||
10235 | # qhasm: xmm8 ^= xmm2 | ||
10236 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
10237 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
10238 | pxor %xmm2,%xmm9 | ||
10239 | |||
10240 | # qhasm: xmm11 = xmm15 | ||
10241 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
10242 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
10243 | movdqa %xmm13,%xmm10 | ||
10244 | |||
10245 | # qhasm: xmm11 ^= xmm14 | ||
10246 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
10247 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
10248 | pxor %xmm11,%xmm10 | ||
10249 | |||
10250 | # qhasm: xmm11 &= xmm12 | ||
10251 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
10252 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
10253 | pand %xmm8,%xmm10 | ||
10254 | |||
10255 | # qhasm: xmm12 ^= xmm8 | ||
10256 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
10257 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
10258 | pxor %xmm9,%xmm8 | ||
10259 | |||
10260 | # qhasm: xmm12 &= xmm14 | ||
10261 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
10262 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
10263 | pand %xmm11,%xmm8 | ||
10264 | |||
10265 | # qhasm: xmm8 &= xmm15 | ||
10266 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
10267 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
10268 | pand %xmm13,%xmm9 | ||
10269 | |||
10270 | # qhasm: xmm8 ^= xmm12 | ||
10271 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
10272 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
10273 | pxor %xmm8,%xmm9 | ||
10274 | |||
10275 | # qhasm: xmm12 ^= xmm11 | ||
10276 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
10277 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
10278 | pxor %xmm10,%xmm8 | ||
10279 | |||
10280 | # qhasm: xmm10 = xmm13 | ||
10281 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
10282 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
10283 | movdqa %xmm15,%xmm10 | ||
10284 | |||
10285 | # qhasm: xmm10 ^= xmm9 | ||
10286 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
10287 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
10288 | pxor %xmm12,%xmm10 | ||
10289 | |||
10290 | # qhasm: xmm10 &= xmm4 | ||
10291 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
10292 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
10293 | pand %xmm4,%xmm10 | ||
10294 | |||
10295 | # qhasm: xmm4 ^= xmm2 | ||
10296 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
10297 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
10298 | pxor %xmm2,%xmm4 | ||
10299 | |||
10300 | # qhasm: xmm4 &= xmm9 | ||
10301 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
10302 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
10303 | pand %xmm12,%xmm4 | ||
10304 | |||
10305 | # qhasm: xmm2 &= xmm13 | ||
10306 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
10307 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
10308 | pand %xmm15,%xmm2 | ||
10309 | |||
10310 | # qhasm: xmm4 ^= xmm2 | ||
10311 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
10312 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
10313 | pxor %xmm2,%xmm4 | ||
10314 | |||
10315 | # qhasm: xmm2 ^= xmm10 | ||
10316 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
10317 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
10318 | pxor %xmm10,%xmm2 | ||
10319 | |||
10320 | # qhasm: xmm15 ^= xmm13 | ||
10321 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
10322 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
10323 | pxor %xmm15,%xmm13 | ||
10324 | |||
10325 | # qhasm: xmm14 ^= xmm9 | ||
10326 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
10327 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
10328 | pxor %xmm12,%xmm11 | ||
10329 | |||
10330 | # qhasm: xmm11 = xmm15 | ||
10331 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
10332 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
10333 | movdqa %xmm13,%xmm10 | ||
10334 | |||
10335 | # qhasm: xmm11 ^= xmm14 | ||
10336 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
10337 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
10338 | pxor %xmm11,%xmm10 | ||
10339 | |||
10340 | # qhasm: xmm11 &= xmm7 | ||
10341 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
10342 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
10343 | pand %xmm7,%xmm10 | ||
10344 | |||
10345 | # qhasm: xmm7 ^= xmm1 | ||
10346 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
10347 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
10348 | pxor %xmm1,%xmm7 | ||
10349 | |||
10350 | # qhasm: xmm7 &= xmm14 | ||
10351 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
10352 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
10353 | pand %xmm11,%xmm7 | ||
10354 | |||
10355 | # qhasm: xmm1 &= xmm15 | ||
10356 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
10357 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
10358 | pand %xmm13,%xmm1 | ||
10359 | |||
10360 | # qhasm: xmm7 ^= xmm1 | ||
10361 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
10362 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
10363 | pxor %xmm1,%xmm7 | ||
10364 | |||
10365 | # qhasm: xmm1 ^= xmm11 | ||
10366 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
10367 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
10368 | pxor %xmm10,%xmm1 | ||
10369 | |||
10370 | # qhasm: xmm7 ^= xmm12 | ||
10371 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
10372 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
10373 | pxor %xmm8,%xmm7 | ||
10374 | |||
10375 | # qhasm: xmm4 ^= xmm12 | ||
10376 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
10377 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
10378 | pxor %xmm8,%xmm4 | ||
10379 | |||
10380 | # qhasm: xmm1 ^= xmm8 | ||
10381 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
10382 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
10383 | pxor %xmm9,%xmm1 | ||
10384 | |||
10385 | # qhasm: xmm2 ^= xmm8 | ||
10386 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
10387 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
10388 | pxor %xmm9,%xmm2 | ||
10389 | |||
10390 | # qhasm: xmm7 ^= xmm0 | ||
10391 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
10392 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
10393 | pxor %xmm0,%xmm7 | ||
10394 | |||
10395 | # qhasm: xmm1 ^= xmm6 | ||
10396 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
10397 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
10398 | pxor %xmm6,%xmm1 | ||
10399 | |||
10400 | # qhasm: xmm4 ^= xmm7 | ||
10401 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
10402 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
10403 | pxor %xmm7,%xmm4 | ||
10404 | |||
10405 | # qhasm: xmm6 ^= xmm0 | ||
10406 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
10407 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
10408 | pxor %xmm0,%xmm6 | ||
10409 | |||
10410 | # qhasm: xmm0 ^= xmm1 | ||
10411 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
10412 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
10413 | pxor %xmm1,%xmm0 | ||
10414 | |||
10415 | # qhasm: xmm1 ^= xmm5 | ||
10416 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
10417 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
10418 | pxor %xmm5,%xmm1 | ||
10419 | |||
10420 | # qhasm: xmm5 ^= xmm2 | ||
10421 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
10422 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
10423 | pxor %xmm2,%xmm5 | ||
10424 | |||
10425 | # qhasm: xmm4 ^= xmm5 | ||
10426 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
10427 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
10428 | pxor %xmm5,%xmm4 | ||
10429 | |||
10430 | # qhasm: xmm2 ^= xmm3 | ||
10431 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
10432 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
10433 | pxor %xmm3,%xmm2 | ||
10434 | |||
10435 | # qhasm: xmm3 ^= xmm5 | ||
10436 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
10437 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
10438 | pxor %xmm5,%xmm3 | ||
10439 | |||
10440 | # qhasm: xmm6 ^= xmm3 | ||
10441 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
10442 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
10443 | pxor %xmm3,%xmm6 | ||
10444 | |||
10445 | # qhasm: xmm8 = shuffle dwords of xmm0 by 0x93 | ||
10446 | # asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9 | ||
10447 | # asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8 | ||
10448 | pshufd $0x93,%xmm0,%xmm8 | ||
10449 | |||
10450 | # qhasm: xmm9 = shuffle dwords of xmm1 by 0x93 | ||
10451 | # asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10 | ||
10452 | # asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9 | ||
10453 | pshufd $0x93,%xmm1,%xmm9 | ||
10454 | |||
10455 | # qhasm: xmm10 = shuffle dwords of xmm4 by 0x93 | ||
10456 | # asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11 | ||
10457 | # asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10 | ||
10458 | pshufd $0x93,%xmm4,%xmm10 | ||
10459 | |||
10460 | # qhasm: xmm11 = shuffle dwords of xmm6 by 0x93 | ||
10461 | # asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12 | ||
10462 | # asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11 | ||
10463 | pshufd $0x93,%xmm6,%xmm11 | ||
10464 | |||
10465 | # qhasm: xmm12 = shuffle dwords of xmm3 by 0x93 | ||
10466 | # asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13 | ||
10467 | # asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12 | ||
10468 | pshufd $0x93,%xmm3,%xmm12 | ||
10469 | |||
10470 | # qhasm: xmm13 = shuffle dwords of xmm7 by 0x93 | ||
10471 | # asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14 | ||
10472 | # asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13 | ||
10473 | pshufd $0x93,%xmm7,%xmm13 | ||
10474 | |||
10475 | # qhasm: xmm14 = shuffle dwords of xmm2 by 0x93 | ||
10476 | # asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15 | ||
10477 | # asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14 | ||
10478 | pshufd $0x93,%xmm2,%xmm14 | ||
10479 | |||
10480 | # qhasm: xmm15 = shuffle dwords of xmm5 by 0x93 | ||
10481 | # asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16 | ||
10482 | # asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15 | ||
10483 | pshufd $0x93,%xmm5,%xmm15 | ||
10484 | |||
10485 | # qhasm: xmm0 ^= xmm8 | ||
10486 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
10487 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
10488 | pxor %xmm8,%xmm0 | ||
10489 | |||
10490 | # qhasm: xmm1 ^= xmm9 | ||
10491 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
10492 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
10493 | pxor %xmm9,%xmm1 | ||
10494 | |||
10495 | # qhasm: xmm4 ^= xmm10 | ||
10496 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
10497 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
10498 | pxor %xmm10,%xmm4 | ||
10499 | |||
10500 | # qhasm: xmm6 ^= xmm11 | ||
10501 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
10502 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
10503 | pxor %xmm11,%xmm6 | ||
10504 | |||
10505 | # qhasm: xmm3 ^= xmm12 | ||
10506 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
10507 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
10508 | pxor %xmm12,%xmm3 | ||
10509 | |||
10510 | # qhasm: xmm7 ^= xmm13 | ||
10511 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
10512 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
10513 | pxor %xmm13,%xmm7 | ||
10514 | |||
10515 | # qhasm: xmm2 ^= xmm14 | ||
10516 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
10517 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
10518 | pxor %xmm14,%xmm2 | ||
10519 | |||
10520 | # qhasm: xmm5 ^= xmm15 | ||
10521 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
10522 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
10523 | pxor %xmm15,%xmm5 | ||
10524 | |||
10525 | # qhasm: xmm8 ^= xmm5 | ||
10526 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
10527 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
10528 | pxor %xmm5,%xmm8 | ||
10529 | |||
10530 | # qhasm: xmm9 ^= xmm0 | ||
10531 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
10532 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
10533 | pxor %xmm0,%xmm9 | ||
10534 | |||
10535 | # qhasm: xmm10 ^= xmm1 | ||
10536 | # asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11 | ||
10537 | # asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10 | ||
10538 | pxor %xmm1,%xmm10 | ||
10539 | |||
10540 | # qhasm: xmm9 ^= xmm5 | ||
10541 | # asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10 | ||
10542 | # asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9 | ||
10543 | pxor %xmm5,%xmm9 | ||
10544 | |||
10545 | # qhasm: xmm11 ^= xmm4 | ||
10546 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
10547 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
10548 | pxor %xmm4,%xmm11 | ||
10549 | |||
10550 | # qhasm: xmm12 ^= xmm6 | ||
10551 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13 | ||
10552 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12 | ||
10553 | pxor %xmm6,%xmm12 | ||
10554 | |||
10555 | # qhasm: xmm13 ^= xmm3 | ||
10556 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14 | ||
10557 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13 | ||
10558 | pxor %xmm3,%xmm13 | ||
10559 | |||
10560 | # qhasm: xmm11 ^= xmm5 | ||
10561 | # asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12 | ||
10562 | # asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11 | ||
10563 | pxor %xmm5,%xmm11 | ||
10564 | |||
10565 | # qhasm: xmm14 ^= xmm7 | ||
10566 | # asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15 | ||
10567 | # asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14 | ||
10568 | pxor %xmm7,%xmm14 | ||
10569 | |||
10570 | # qhasm: xmm15 ^= xmm2 | ||
10571 | # asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16 | ||
10572 | # asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15 | ||
10573 | pxor %xmm2,%xmm15 | ||
10574 | |||
10575 | # qhasm: xmm12 ^= xmm5 | ||
10576 | # asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13 | ||
10577 | # asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12 | ||
10578 | pxor %xmm5,%xmm12 | ||
10579 | |||
10580 | # qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E | ||
10581 | # asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1 | ||
10582 | # asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0 | ||
10583 | pshufd $0x4E,%xmm0,%xmm0 | ||
10584 | |||
10585 | # qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E | ||
10586 | # asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2 | ||
10587 | # asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1 | ||
10588 | pshufd $0x4E,%xmm1,%xmm1 | ||
10589 | |||
10590 | # qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E | ||
10591 | # asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5 | ||
10592 | # asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4 | ||
10593 | pshufd $0x4E,%xmm4,%xmm4 | ||
10594 | |||
10595 | # qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E | ||
10596 | # asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7 | ||
10597 | # asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6 | ||
10598 | pshufd $0x4E,%xmm6,%xmm6 | ||
10599 | |||
10600 | # qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E | ||
10601 | # asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4 | ||
10602 | # asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3 | ||
10603 | pshufd $0x4E,%xmm3,%xmm3 | ||
10604 | |||
10605 | # qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E | ||
10606 | # asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8 | ||
10607 | # asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7 | ||
10608 | pshufd $0x4E,%xmm7,%xmm7 | ||
10609 | |||
10610 | # qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E | ||
10611 | # asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3 | ||
10612 | # asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2 | ||
10613 | pshufd $0x4E,%xmm2,%xmm2 | ||
10614 | |||
10615 | # qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E | ||
10616 | # asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6 | ||
10617 | # asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5 | ||
10618 | pshufd $0x4E,%xmm5,%xmm5 | ||
10619 | |||
10620 | # qhasm: xmm8 ^= xmm0 | ||
10621 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
10622 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
10623 | pxor %xmm0,%xmm8 | ||
10624 | |||
10625 | # qhasm: xmm9 ^= xmm1 | ||
10626 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
10627 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
10628 | pxor %xmm1,%xmm9 | ||
10629 | |||
10630 | # qhasm: xmm10 ^= xmm4 | ||
10631 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11 | ||
10632 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10 | ||
10633 | pxor %xmm4,%xmm10 | ||
10634 | |||
10635 | # qhasm: xmm11 ^= xmm6 | ||
10636 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12 | ||
10637 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11 | ||
10638 | pxor %xmm6,%xmm11 | ||
10639 | |||
10640 | # qhasm: xmm12 ^= xmm3 | ||
10641 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13 | ||
10642 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12 | ||
10643 | pxor %xmm3,%xmm12 | ||
10644 | |||
10645 | # qhasm: xmm13 ^= xmm7 | ||
10646 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
10647 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
10648 | pxor %xmm7,%xmm13 | ||
10649 | |||
10650 | # qhasm: xmm14 ^= xmm2 | ||
10651 | # asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15 | ||
10652 | # asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14 | ||
10653 | pxor %xmm2,%xmm14 | ||
10654 | |||
10655 | # qhasm: xmm15 ^= xmm5 | ||
10656 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
10657 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
10658 | pxor %xmm5,%xmm15 | ||
10659 | |||
10660 | # qhasm: xmm8 ^= *(int128 *)(c + 1152) | ||
10661 | # asm 1: pxor 1152(<c=int64#4),<xmm8=int6464#9 | ||
10662 | # asm 2: pxor 1152(<c=%rcx),<xmm8=%xmm8 | ||
10663 | pxor 1152(%rcx),%xmm8 | ||
10664 | |||
10665 | # qhasm: shuffle bytes of xmm8 by SRM0 | ||
10666 | # asm 1: pshufb SRM0,<xmm8=int6464#9 | ||
10667 | # asm 2: pshufb SRM0,<xmm8=%xmm8 | ||
10668 | pshufb SRM0,%xmm8 | ||
10669 | |||
10670 | # qhasm: xmm9 ^= *(int128 *)(c + 1168) | ||
10671 | # asm 1: pxor 1168(<c=int64#4),<xmm9=int6464#10 | ||
10672 | # asm 2: pxor 1168(<c=%rcx),<xmm9=%xmm9 | ||
10673 | pxor 1168(%rcx),%xmm9 | ||
10674 | |||
10675 | # qhasm: shuffle bytes of xmm9 by SRM0 | ||
10676 | # asm 1: pshufb SRM0,<xmm9=int6464#10 | ||
10677 | # asm 2: pshufb SRM0,<xmm9=%xmm9 | ||
10678 | pshufb SRM0,%xmm9 | ||
10679 | |||
10680 | # qhasm: xmm10 ^= *(int128 *)(c + 1184) | ||
10681 | # asm 1: pxor 1184(<c=int64#4),<xmm10=int6464#11 | ||
10682 | # asm 2: pxor 1184(<c=%rcx),<xmm10=%xmm10 | ||
10683 | pxor 1184(%rcx),%xmm10 | ||
10684 | |||
10685 | # qhasm: shuffle bytes of xmm10 by SRM0 | ||
10686 | # asm 1: pshufb SRM0,<xmm10=int6464#11 | ||
10687 | # asm 2: pshufb SRM0,<xmm10=%xmm10 | ||
10688 | pshufb SRM0,%xmm10 | ||
10689 | |||
10690 | # qhasm: xmm11 ^= *(int128 *)(c + 1200) | ||
10691 | # asm 1: pxor 1200(<c=int64#4),<xmm11=int6464#12 | ||
10692 | # asm 2: pxor 1200(<c=%rcx),<xmm11=%xmm11 | ||
10693 | pxor 1200(%rcx),%xmm11 | ||
10694 | |||
10695 | # qhasm: shuffle bytes of xmm11 by SRM0 | ||
10696 | # asm 1: pshufb SRM0,<xmm11=int6464#12 | ||
10697 | # asm 2: pshufb SRM0,<xmm11=%xmm11 | ||
10698 | pshufb SRM0,%xmm11 | ||
10699 | |||
10700 | # qhasm: xmm12 ^= *(int128 *)(c + 1216) | ||
10701 | # asm 1: pxor 1216(<c=int64#4),<xmm12=int6464#13 | ||
10702 | # asm 2: pxor 1216(<c=%rcx),<xmm12=%xmm12 | ||
10703 | pxor 1216(%rcx),%xmm12 | ||
10704 | |||
10705 | # qhasm: shuffle bytes of xmm12 by SRM0 | ||
10706 | # asm 1: pshufb SRM0,<xmm12=int6464#13 | ||
10707 | # asm 2: pshufb SRM0,<xmm12=%xmm12 | ||
10708 | pshufb SRM0,%xmm12 | ||
10709 | |||
10710 | # qhasm: xmm13 ^= *(int128 *)(c + 1232) | ||
10711 | # asm 1: pxor 1232(<c=int64#4),<xmm13=int6464#14 | ||
10712 | # asm 2: pxor 1232(<c=%rcx),<xmm13=%xmm13 | ||
10713 | pxor 1232(%rcx),%xmm13 | ||
10714 | |||
10715 | # qhasm: shuffle bytes of xmm13 by SRM0 | ||
10716 | # asm 1: pshufb SRM0,<xmm13=int6464#14 | ||
10717 | # asm 2: pshufb SRM0,<xmm13=%xmm13 | ||
10718 | pshufb SRM0,%xmm13 | ||
10719 | |||
10720 | # qhasm: xmm14 ^= *(int128 *)(c + 1248) | ||
10721 | # asm 1: pxor 1248(<c=int64#4),<xmm14=int6464#15 | ||
10722 | # asm 2: pxor 1248(<c=%rcx),<xmm14=%xmm14 | ||
10723 | pxor 1248(%rcx),%xmm14 | ||
10724 | |||
10725 | # qhasm: shuffle bytes of xmm14 by SRM0 | ||
10726 | # asm 1: pshufb SRM0,<xmm14=int6464#15 | ||
10727 | # asm 2: pshufb SRM0,<xmm14=%xmm14 | ||
10728 | pshufb SRM0,%xmm14 | ||
10729 | |||
10730 | # qhasm: xmm15 ^= *(int128 *)(c + 1264) | ||
10731 | # asm 1: pxor 1264(<c=int64#4),<xmm15=int6464#16 | ||
10732 | # asm 2: pxor 1264(<c=%rcx),<xmm15=%xmm15 | ||
10733 | pxor 1264(%rcx),%xmm15 | ||
10734 | |||
10735 | # qhasm: shuffle bytes of xmm15 by SRM0 | ||
10736 | # asm 1: pshufb SRM0,<xmm15=int6464#16 | ||
10737 | # asm 2: pshufb SRM0,<xmm15=%xmm15 | ||
10738 | pshufb SRM0,%xmm15 | ||
10739 | |||
10740 | # qhasm: xmm13 ^= xmm14 | ||
10741 | # asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14 | ||
10742 | # asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13 | ||
10743 | pxor %xmm14,%xmm13 | ||
10744 | |||
10745 | # qhasm: xmm10 ^= xmm9 | ||
10746 | # asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11 | ||
10747 | # asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10 | ||
10748 | pxor %xmm9,%xmm10 | ||
10749 | |||
10750 | # qhasm: xmm13 ^= xmm8 | ||
10751 | # asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14 | ||
10752 | # asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13 | ||
10753 | pxor %xmm8,%xmm13 | ||
10754 | |||
10755 | # qhasm: xmm14 ^= xmm10 | ||
10756 | # asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15 | ||
10757 | # asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14 | ||
10758 | pxor %xmm10,%xmm14 | ||
10759 | |||
10760 | # qhasm: xmm11 ^= xmm8 | ||
10761 | # asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12 | ||
10762 | # asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11 | ||
10763 | pxor %xmm8,%xmm11 | ||
10764 | |||
10765 | # qhasm: xmm14 ^= xmm11 | ||
10766 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
10767 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
10768 | pxor %xmm11,%xmm14 | ||
10769 | |||
10770 | # qhasm: xmm11 ^= xmm15 | ||
10771 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12 | ||
10772 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11 | ||
10773 | pxor %xmm15,%xmm11 | ||
10774 | |||
10775 | # qhasm: xmm11 ^= xmm12 | ||
10776 | # asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12 | ||
10777 | # asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11 | ||
10778 | pxor %xmm12,%xmm11 | ||
10779 | |||
10780 | # qhasm: xmm15 ^= xmm13 | ||
10781 | # asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16 | ||
10782 | # asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15 | ||
10783 | pxor %xmm13,%xmm15 | ||
10784 | |||
10785 | # qhasm: xmm11 ^= xmm9 | ||
10786 | # asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12 | ||
10787 | # asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11 | ||
10788 | pxor %xmm9,%xmm11 | ||
10789 | |||
10790 | # qhasm: xmm12 ^= xmm13 | ||
10791 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
10792 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
10793 | pxor %xmm13,%xmm12 | ||
10794 | |||
10795 | # qhasm: xmm10 ^= xmm15 | ||
10796 | # asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11 | ||
10797 | # asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10 | ||
10798 | pxor %xmm15,%xmm10 | ||
10799 | |||
10800 | # qhasm: xmm9 ^= xmm13 | ||
10801 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
10802 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
10803 | pxor %xmm13,%xmm9 | ||
10804 | |||
10805 | # qhasm: xmm3 = xmm15 | ||
10806 | # asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1 | ||
10807 | # asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0 | ||
10808 | movdqa %xmm15,%xmm0 | ||
10809 | |||
10810 | # qhasm: xmm2 = xmm9 | ||
10811 | # asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2 | ||
10812 | # asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1 | ||
10813 | movdqa %xmm9,%xmm1 | ||
10814 | |||
10815 | # qhasm: xmm1 = xmm13 | ||
10816 | # asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3 | ||
10817 | # asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2 | ||
10818 | movdqa %xmm13,%xmm2 | ||
10819 | |||
10820 | # qhasm: xmm5 = xmm10 | ||
10821 | # asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4 | ||
10822 | # asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3 | ||
10823 | movdqa %xmm10,%xmm3 | ||
10824 | |||
10825 | # qhasm: xmm4 = xmm14 | ||
10826 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5 | ||
10827 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4 | ||
10828 | movdqa %xmm14,%xmm4 | ||
10829 | |||
10830 | # qhasm: xmm3 ^= xmm12 | ||
10831 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1 | ||
10832 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0 | ||
10833 | pxor %xmm12,%xmm0 | ||
10834 | |||
10835 | # qhasm: xmm2 ^= xmm10 | ||
10836 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2 | ||
10837 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1 | ||
10838 | pxor %xmm10,%xmm1 | ||
10839 | |||
10840 | # qhasm: xmm1 ^= xmm11 | ||
10841 | # asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3 | ||
10842 | # asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2 | ||
10843 | pxor %xmm11,%xmm2 | ||
10844 | |||
10845 | # qhasm: xmm5 ^= xmm12 | ||
10846 | # asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4 | ||
10847 | # asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3 | ||
10848 | pxor %xmm12,%xmm3 | ||
10849 | |||
10850 | # qhasm: xmm4 ^= xmm8 | ||
10851 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
10852 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
10853 | pxor %xmm8,%xmm4 | ||
10854 | |||
10855 | # qhasm: xmm6 = xmm3 | ||
10856 | # asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6 | ||
10857 | # asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5 | ||
10858 | movdqa %xmm0,%xmm5 | ||
10859 | |||
10860 | # qhasm: xmm0 = xmm2 | ||
10861 | # asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7 | ||
10862 | # asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6 | ||
10863 | movdqa %xmm1,%xmm6 | ||
10864 | |||
10865 | # qhasm: xmm7 = xmm3 | ||
10866 | # asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8 | ||
10867 | # asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7 | ||
10868 | movdqa %xmm0,%xmm7 | ||
10869 | |||
10870 | # qhasm: xmm2 |= xmm1 | ||
10871 | # asm 1: por <xmm1=int6464#3,<xmm2=int6464#2 | ||
10872 | # asm 2: por <xmm1=%xmm2,<xmm2=%xmm1 | ||
10873 | por %xmm2,%xmm1 | ||
10874 | |||
10875 | # qhasm: xmm3 |= xmm4 | ||
10876 | # asm 1: por <xmm4=int6464#5,<xmm3=int6464#1 | ||
10877 | # asm 2: por <xmm4=%xmm4,<xmm3=%xmm0 | ||
10878 | por %xmm4,%xmm0 | ||
10879 | |||
10880 | # qhasm: xmm7 ^= xmm0 | ||
10881 | # asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8 | ||
10882 | # asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7 | ||
10883 | pxor %xmm6,%xmm7 | ||
10884 | |||
10885 | # qhasm: xmm6 &= xmm4 | ||
10886 | # asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6 | ||
10887 | # asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5 | ||
10888 | pand %xmm4,%xmm5 | ||
10889 | |||
10890 | # qhasm: xmm0 &= xmm1 | ||
10891 | # asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7 | ||
10892 | # asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6 | ||
10893 | pand %xmm2,%xmm6 | ||
10894 | |||
10895 | # qhasm: xmm4 ^= xmm1 | ||
10896 | # asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5 | ||
10897 | # asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4 | ||
10898 | pxor %xmm2,%xmm4 | ||
10899 | |||
10900 | # qhasm: xmm7 &= xmm4 | ||
10901 | # asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8 | ||
10902 | # asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7 | ||
10903 | pand %xmm4,%xmm7 | ||
10904 | |||
10905 | # qhasm: xmm4 = xmm11 | ||
10906 | # asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3 | ||
10907 | # asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2 | ||
10908 | movdqa %xmm11,%xmm2 | ||
10909 | |||
10910 | # qhasm: xmm4 ^= xmm8 | ||
10911 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3 | ||
10912 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2 | ||
10913 | pxor %xmm8,%xmm2 | ||
10914 | |||
10915 | # qhasm: xmm5 &= xmm4 | ||
10916 | # asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4 | ||
10917 | # asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3 | ||
10918 | pand %xmm2,%xmm3 | ||
10919 | |||
10920 | # qhasm: xmm3 ^= xmm5 | ||
10921 | # asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1 | ||
10922 | # asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0 | ||
10923 | pxor %xmm3,%xmm0 | ||
10924 | |||
10925 | # qhasm: xmm2 ^= xmm5 | ||
10926 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
10927 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
10928 | pxor %xmm3,%xmm1 | ||
10929 | |||
10930 | # qhasm: xmm5 = xmm15 | ||
10931 | # asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3 | ||
10932 | # asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2 | ||
10933 | movdqa %xmm15,%xmm2 | ||
10934 | |||
10935 | # qhasm: xmm5 ^= xmm9 | ||
10936 | # asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3 | ||
10937 | # asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2 | ||
10938 | pxor %xmm9,%xmm2 | ||
10939 | |||
10940 | # qhasm: xmm4 = xmm13 | ||
10941 | # asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4 | ||
10942 | # asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3 | ||
10943 | movdqa %xmm13,%xmm3 | ||
10944 | |||
10945 | # qhasm: xmm1 = xmm5 | ||
10946 | # asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5 | ||
10947 | # asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4 | ||
10948 | movdqa %xmm2,%xmm4 | ||
10949 | |||
10950 | # qhasm: xmm4 ^= xmm14 | ||
10951 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4 | ||
10952 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3 | ||
10953 | pxor %xmm14,%xmm3 | ||
10954 | |||
10955 | # qhasm: xmm1 |= xmm4 | ||
10956 | # asm 1: por <xmm4=int6464#4,<xmm1=int6464#5 | ||
10957 | # asm 2: por <xmm4=%xmm3,<xmm1=%xmm4 | ||
10958 | por %xmm3,%xmm4 | ||
10959 | |||
10960 | # qhasm: xmm5 &= xmm4 | ||
10961 | # asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3 | ||
10962 | # asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2 | ||
10963 | pand %xmm3,%xmm2 | ||
10964 | |||
10965 | # qhasm: xmm0 ^= xmm5 | ||
10966 | # asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7 | ||
10967 | # asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6 | ||
10968 | pxor %xmm2,%xmm6 | ||
10969 | |||
10970 | # qhasm: xmm3 ^= xmm7 | ||
10971 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1 | ||
10972 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0 | ||
10973 | pxor %xmm7,%xmm0 | ||
10974 | |||
10975 | # qhasm: xmm2 ^= xmm6 | ||
10976 | # asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2 | ||
10977 | # asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1 | ||
10978 | pxor %xmm5,%xmm1 | ||
10979 | |||
10980 | # qhasm: xmm1 ^= xmm7 | ||
10981 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5 | ||
10982 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4 | ||
10983 | pxor %xmm7,%xmm4 | ||
10984 | |||
10985 | # qhasm: xmm0 ^= xmm6 | ||
10986 | # asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7 | ||
10987 | # asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6 | ||
10988 | pxor %xmm5,%xmm6 | ||
10989 | |||
10990 | # qhasm: xmm1 ^= xmm6 | ||
10991 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
10992 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
10993 | pxor %xmm5,%xmm4 | ||
10994 | |||
10995 | # qhasm: xmm4 = xmm10 | ||
10996 | # asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3 | ||
10997 | # asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2 | ||
10998 | movdqa %xmm10,%xmm2 | ||
10999 | |||
11000 | # qhasm: xmm5 = xmm12 | ||
11001 | # asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4 | ||
11002 | # asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3 | ||
11003 | movdqa %xmm12,%xmm3 | ||
11004 | |||
11005 | # qhasm: xmm6 = xmm9 | ||
11006 | # asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6 | ||
11007 | # asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5 | ||
11008 | movdqa %xmm9,%xmm5 | ||
11009 | |||
11010 | # qhasm: xmm7 = xmm15 | ||
11011 | # asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8 | ||
11012 | # asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7 | ||
11013 | movdqa %xmm15,%xmm7 | ||
11014 | |||
11015 | # qhasm: xmm4 &= xmm11 | ||
11016 | # asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3 | ||
11017 | # asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2 | ||
11018 | pand %xmm11,%xmm2 | ||
11019 | |||
11020 | # qhasm: xmm5 &= xmm8 | ||
11021 | # asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4 | ||
11022 | # asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3 | ||
11023 | pand %xmm8,%xmm3 | ||
11024 | |||
11025 | # qhasm: xmm6 &= xmm13 | ||
11026 | # asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6 | ||
11027 | # asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5 | ||
11028 | pand %xmm13,%xmm5 | ||
11029 | |||
11030 | # qhasm: xmm7 |= xmm14 | ||
11031 | # asm 1: por <xmm14=int6464#15,<xmm7=int6464#8 | ||
11032 | # asm 2: por <xmm14=%xmm14,<xmm7=%xmm7 | ||
11033 | por %xmm14,%xmm7 | ||
11034 | |||
11035 | # qhasm: xmm3 ^= xmm4 | ||
11036 | # asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1 | ||
11037 | # asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0 | ||
11038 | pxor %xmm2,%xmm0 | ||
11039 | |||
11040 | # qhasm: xmm2 ^= xmm5 | ||
11041 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
11042 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
11043 | pxor %xmm3,%xmm1 | ||
11044 | |||
11045 | # qhasm: xmm1 ^= xmm6 | ||
11046 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
11047 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
11048 | pxor %xmm5,%xmm4 | ||
11049 | |||
11050 | # qhasm: xmm0 ^= xmm7 | ||
11051 | # asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7 | ||
11052 | # asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6 | ||
11053 | pxor %xmm7,%xmm6 | ||
11054 | |||
11055 | # qhasm: xmm4 = xmm3 | ||
11056 | # asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3 | ||
11057 | # asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2 | ||
11058 | movdqa %xmm0,%xmm2 | ||
11059 | |||
11060 | # qhasm: xmm4 ^= xmm2 | ||
11061 | # asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3 | ||
11062 | # asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2 | ||
11063 | pxor %xmm1,%xmm2 | ||
11064 | |||
11065 | # qhasm: xmm3 &= xmm1 | ||
11066 | # asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1 | ||
11067 | # asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0 | ||
11068 | pand %xmm4,%xmm0 | ||
11069 | |||
11070 | # qhasm: xmm6 = xmm0 | ||
11071 | # asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4 | ||
11072 | # asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3 | ||
11073 | movdqa %xmm6,%xmm3 | ||
11074 | |||
11075 | # qhasm: xmm6 ^= xmm3 | ||
11076 | # asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4 | ||
11077 | # asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3 | ||
11078 | pxor %xmm0,%xmm3 | ||
11079 | |||
11080 | # qhasm: xmm7 = xmm4 | ||
11081 | # asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6 | ||
11082 | # asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5 | ||
11083 | movdqa %xmm2,%xmm5 | ||
11084 | |||
11085 | # qhasm: xmm7 &= xmm6 | ||
11086 | # asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6 | ||
11087 | # asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5 | ||
11088 | pand %xmm3,%xmm5 | ||
11089 | |||
11090 | # qhasm: xmm7 ^= xmm2 | ||
11091 | # asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6 | ||
11092 | # asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5 | ||
11093 | pxor %xmm1,%xmm5 | ||
11094 | |||
11095 | # qhasm: xmm5 = xmm1 | ||
11096 | # asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8 | ||
11097 | # asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7 | ||
11098 | movdqa %xmm4,%xmm7 | ||
11099 | |||
11100 | # qhasm: xmm5 ^= xmm0 | ||
11101 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
11102 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
11103 | pxor %xmm6,%xmm7 | ||
11104 | |||
11105 | # qhasm: xmm3 ^= xmm2 | ||
11106 | # asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1 | ||
11107 | # asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0 | ||
11108 | pxor %xmm1,%xmm0 | ||
11109 | |||
11110 | # qhasm: xmm5 &= xmm3 | ||
11111 | # asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8 | ||
11112 | # asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7 | ||
11113 | pand %xmm0,%xmm7 | ||
11114 | |||
11115 | # qhasm: xmm5 ^= xmm0 | ||
11116 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
11117 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
11118 | pxor %xmm6,%xmm7 | ||
11119 | |||
11120 | # qhasm: xmm1 ^= xmm5 | ||
11121 | # asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5 | ||
11122 | # asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4 | ||
11123 | pxor %xmm7,%xmm4 | ||
11124 | |||
11125 | # qhasm: xmm2 = xmm6 | ||
11126 | # asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1 | ||
11127 | # asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0 | ||
11128 | movdqa %xmm3,%xmm0 | ||
11129 | |||
11130 | # qhasm: xmm2 ^= xmm5 | ||
11131 | # asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1 | ||
11132 | # asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0 | ||
11133 | pxor %xmm7,%xmm0 | ||
11134 | |||
11135 | # qhasm: xmm2 &= xmm0 | ||
11136 | # asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1 | ||
11137 | # asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0 | ||
11138 | pand %xmm6,%xmm0 | ||
11139 | |||
11140 | # qhasm: xmm1 ^= xmm2 | ||
11141 | # asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5 | ||
11142 | # asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4 | ||
11143 | pxor %xmm0,%xmm4 | ||
11144 | |||
11145 | # qhasm: xmm6 ^= xmm2 | ||
11146 | # asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4 | ||
11147 | # asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3 | ||
11148 | pxor %xmm0,%xmm3 | ||
11149 | |||
11150 | # qhasm: xmm6 &= xmm7 | ||
11151 | # asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4 | ||
11152 | # asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3 | ||
11153 | pand %xmm5,%xmm3 | ||
11154 | |||
11155 | # qhasm: xmm6 ^= xmm4 | ||
11156 | # asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4 | ||
11157 | # asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3 | ||
11158 | pxor %xmm2,%xmm3 | ||
11159 | |||
11160 | # qhasm: xmm4 = xmm14 | ||
11161 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1 | ||
11162 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0 | ||
11163 | movdqa %xmm14,%xmm0 | ||
11164 | |||
11165 | # qhasm: xmm0 = xmm13 | ||
11166 | # asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2 | ||
11167 | # asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1 | ||
11168 | movdqa %xmm13,%xmm1 | ||
11169 | |||
11170 | # qhasm: xmm2 = xmm7 | ||
11171 | # asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3 | ||
11172 | # asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2 | ||
11173 | movdqa %xmm5,%xmm2 | ||
11174 | |||
11175 | # qhasm: xmm2 ^= xmm6 | ||
11176 | # asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3 | ||
11177 | # asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2 | ||
11178 | pxor %xmm3,%xmm2 | ||
11179 | |||
11180 | # qhasm: xmm2 &= xmm14 | ||
11181 | # asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3 | ||
11182 | # asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2 | ||
11183 | pand %xmm14,%xmm2 | ||
11184 | |||
11185 | # qhasm: xmm14 ^= xmm13 | ||
11186 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
11187 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
11188 | pxor %xmm13,%xmm14 | ||
11189 | |||
11190 | # qhasm: xmm14 &= xmm6 | ||
11191 | # asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15 | ||
11192 | # asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14 | ||
11193 | pand %xmm3,%xmm14 | ||
11194 | |||
11195 | # qhasm: xmm13 &= xmm7 | ||
11196 | # asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14 | ||
11197 | # asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13 | ||
11198 | pand %xmm5,%xmm13 | ||
11199 | |||
11200 | # qhasm: xmm14 ^= xmm13 | ||
11201 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
11202 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
11203 | pxor %xmm13,%xmm14 | ||
11204 | |||
11205 | # qhasm: xmm13 ^= xmm2 | ||
11206 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14 | ||
11207 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13 | ||
11208 | pxor %xmm2,%xmm13 | ||
11209 | |||
11210 | # qhasm: xmm4 ^= xmm8 | ||
11211 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1 | ||
11212 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0 | ||
11213 | pxor %xmm8,%xmm0 | ||
11214 | |||
11215 | # qhasm: xmm0 ^= xmm11 | ||
11216 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2 | ||
11217 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1 | ||
11218 | pxor %xmm11,%xmm1 | ||
11219 | |||
11220 | # qhasm: xmm7 ^= xmm5 | ||
11221 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
11222 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
11223 | pxor %xmm7,%xmm5 | ||
11224 | |||
11225 | # qhasm: xmm6 ^= xmm1 | ||
11226 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
11227 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
11228 | pxor %xmm4,%xmm3 | ||
11229 | |||
11230 | # qhasm: xmm3 = xmm7 | ||
11231 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
11232 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
11233 | movdqa %xmm5,%xmm2 | ||
11234 | |||
11235 | # qhasm: xmm3 ^= xmm6 | ||
11236 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
11237 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
11238 | pxor %xmm3,%xmm2 | ||
11239 | |||
11240 | # qhasm: xmm3 &= xmm4 | ||
11241 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
11242 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
11243 | pand %xmm0,%xmm2 | ||
11244 | |||
11245 | # qhasm: xmm4 ^= xmm0 | ||
11246 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
11247 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
11248 | pxor %xmm1,%xmm0 | ||
11249 | |||
11250 | # qhasm: xmm4 &= xmm6 | ||
11251 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
11252 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
11253 | pand %xmm3,%xmm0 | ||
11254 | |||
11255 | # qhasm: xmm0 &= xmm7 | ||
11256 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
11257 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
11258 | pand %xmm5,%xmm1 | ||
11259 | |||
11260 | # qhasm: xmm0 ^= xmm4 | ||
11261 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
11262 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
11263 | pxor %xmm0,%xmm1 | ||
11264 | |||
11265 | # qhasm: xmm4 ^= xmm3 | ||
11266 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
11267 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
11268 | pxor %xmm2,%xmm0 | ||
11269 | |||
11270 | # qhasm: xmm2 = xmm5 | ||
11271 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
11272 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
11273 | movdqa %xmm7,%xmm2 | ||
11274 | |||
11275 | # qhasm: xmm2 ^= xmm1 | ||
11276 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
11277 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
11278 | pxor %xmm4,%xmm2 | ||
11279 | |||
11280 | # qhasm: xmm2 &= xmm8 | ||
11281 | # asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3 | ||
11282 | # asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2 | ||
11283 | pand %xmm8,%xmm2 | ||
11284 | |||
11285 | # qhasm: xmm8 ^= xmm11 | ||
11286 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
11287 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
11288 | pxor %xmm11,%xmm8 | ||
11289 | |||
11290 | # qhasm: xmm8 &= xmm1 | ||
11291 | # asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9 | ||
11292 | # asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8 | ||
11293 | pand %xmm4,%xmm8 | ||
11294 | |||
11295 | # qhasm: xmm11 &= xmm5 | ||
11296 | # asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12 | ||
11297 | # asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11 | ||
11298 | pand %xmm7,%xmm11 | ||
11299 | |||
11300 | # qhasm: xmm8 ^= xmm11 | ||
11301 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
11302 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
11303 | pxor %xmm11,%xmm8 | ||
11304 | |||
11305 | # qhasm: xmm11 ^= xmm2 | ||
11306 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12 | ||
11307 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11 | ||
11308 | pxor %xmm2,%xmm11 | ||
11309 | |||
11310 | # qhasm: xmm14 ^= xmm4 | ||
11311 | # asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15 | ||
11312 | # asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14 | ||
11313 | pxor %xmm0,%xmm14 | ||
11314 | |||
11315 | # qhasm: xmm8 ^= xmm4 | ||
11316 | # asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9 | ||
11317 | # asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8 | ||
11318 | pxor %xmm0,%xmm8 | ||
11319 | |||
11320 | # qhasm: xmm13 ^= xmm0 | ||
11321 | # asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14 | ||
11322 | # asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13 | ||
11323 | pxor %xmm1,%xmm13 | ||
11324 | |||
11325 | # qhasm: xmm11 ^= xmm0 | ||
11326 | # asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12 | ||
11327 | # asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11 | ||
11328 | pxor %xmm1,%xmm11 | ||
11329 | |||
11330 | # qhasm: xmm4 = xmm15 | ||
11331 | # asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1 | ||
11332 | # asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0 | ||
11333 | movdqa %xmm15,%xmm0 | ||
11334 | |||
11335 | # qhasm: xmm0 = xmm9 | ||
11336 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2 | ||
11337 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1 | ||
11338 | movdqa %xmm9,%xmm1 | ||
11339 | |||
11340 | # qhasm: xmm4 ^= xmm12 | ||
11341 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1 | ||
11342 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0 | ||
11343 | pxor %xmm12,%xmm0 | ||
11344 | |||
11345 | # qhasm: xmm0 ^= xmm10 | ||
11346 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2 | ||
11347 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1 | ||
11348 | pxor %xmm10,%xmm1 | ||
11349 | |||
11350 | # qhasm: xmm3 = xmm7 | ||
11351 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
11352 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
11353 | movdqa %xmm5,%xmm2 | ||
11354 | |||
11355 | # qhasm: xmm3 ^= xmm6 | ||
11356 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
11357 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
11358 | pxor %xmm3,%xmm2 | ||
11359 | |||
11360 | # qhasm: xmm3 &= xmm4 | ||
11361 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
11362 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
11363 | pand %xmm0,%xmm2 | ||
11364 | |||
11365 | # qhasm: xmm4 ^= xmm0 | ||
11366 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
11367 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
11368 | pxor %xmm1,%xmm0 | ||
11369 | |||
11370 | # qhasm: xmm4 &= xmm6 | ||
11371 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
11372 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
11373 | pand %xmm3,%xmm0 | ||
11374 | |||
11375 | # qhasm: xmm0 &= xmm7 | ||
11376 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
11377 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
11378 | pand %xmm5,%xmm1 | ||
11379 | |||
11380 | # qhasm: xmm0 ^= xmm4 | ||
11381 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
11382 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
11383 | pxor %xmm0,%xmm1 | ||
11384 | |||
11385 | # qhasm: xmm4 ^= xmm3 | ||
11386 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
11387 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
11388 | pxor %xmm2,%xmm0 | ||
11389 | |||
11390 | # qhasm: xmm2 = xmm5 | ||
11391 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
11392 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
11393 | movdqa %xmm7,%xmm2 | ||
11394 | |||
11395 | # qhasm: xmm2 ^= xmm1 | ||
11396 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
11397 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
11398 | pxor %xmm4,%xmm2 | ||
11399 | |||
11400 | # qhasm: xmm2 &= xmm12 | ||
11401 | # asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3 | ||
11402 | # asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2 | ||
11403 | pand %xmm12,%xmm2 | ||
11404 | |||
11405 | # qhasm: xmm12 ^= xmm10 | ||
11406 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
11407 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
11408 | pxor %xmm10,%xmm12 | ||
11409 | |||
11410 | # qhasm: xmm12 &= xmm1 | ||
11411 | # asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13 | ||
11412 | # asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12 | ||
11413 | pand %xmm4,%xmm12 | ||
11414 | |||
11415 | # qhasm: xmm10 &= xmm5 | ||
11416 | # asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11 | ||
11417 | # asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10 | ||
11418 | pand %xmm7,%xmm10 | ||
11419 | |||
11420 | # qhasm: xmm12 ^= xmm10 | ||
11421 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
11422 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
11423 | pxor %xmm10,%xmm12 | ||
11424 | |||
11425 | # qhasm: xmm10 ^= xmm2 | ||
11426 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11 | ||
11427 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10 | ||
11428 | pxor %xmm2,%xmm10 | ||
11429 | |||
11430 | # qhasm: xmm7 ^= xmm5 | ||
11431 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
11432 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
11433 | pxor %xmm7,%xmm5 | ||
11434 | |||
11435 | # qhasm: xmm6 ^= xmm1 | ||
11436 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
11437 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
11438 | pxor %xmm4,%xmm3 | ||
11439 | |||
11440 | # qhasm: xmm3 = xmm7 | ||
11441 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
11442 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
11443 | movdqa %xmm5,%xmm2 | ||
11444 | |||
11445 | # qhasm: xmm3 ^= xmm6 | ||
11446 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
11447 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
11448 | pxor %xmm3,%xmm2 | ||
11449 | |||
11450 | # qhasm: xmm3 &= xmm15 | ||
11451 | # asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3 | ||
11452 | # asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2 | ||
11453 | pand %xmm15,%xmm2 | ||
11454 | |||
11455 | # qhasm: xmm15 ^= xmm9 | ||
11456 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
11457 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
11458 | pxor %xmm9,%xmm15 | ||
11459 | |||
11460 | # qhasm: xmm15 &= xmm6 | ||
11461 | # asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16 | ||
11462 | # asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15 | ||
11463 | pand %xmm3,%xmm15 | ||
11464 | |||
11465 | # qhasm: xmm9 &= xmm7 | ||
11466 | # asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10 | ||
11467 | # asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9 | ||
11468 | pand %xmm5,%xmm9 | ||
11469 | |||
11470 | # qhasm: xmm15 ^= xmm9 | ||
11471 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
11472 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
11473 | pxor %xmm9,%xmm15 | ||
11474 | |||
11475 | # qhasm: xmm9 ^= xmm3 | ||
11476 | # asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10 | ||
11477 | # asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9 | ||
11478 | pxor %xmm2,%xmm9 | ||
11479 | |||
11480 | # qhasm: xmm15 ^= xmm4 | ||
11481 | # asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16 | ||
11482 | # asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15 | ||
11483 | pxor %xmm0,%xmm15 | ||
11484 | |||
11485 | # qhasm: xmm12 ^= xmm4 | ||
11486 | # asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13 | ||
11487 | # asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12 | ||
11488 | pxor %xmm0,%xmm12 | ||
11489 | |||
11490 | # qhasm: xmm9 ^= xmm0 | ||
11491 | # asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10 | ||
11492 | # asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9 | ||
11493 | pxor %xmm1,%xmm9 | ||
11494 | |||
11495 | # qhasm: xmm10 ^= xmm0 | ||
11496 | # asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11 | ||
11497 | # asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10 | ||
11498 | pxor %xmm1,%xmm10 | ||
11499 | |||
11500 | # qhasm: xmm15 ^= xmm8 | ||
11501 | # asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16 | ||
11502 | # asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15 | ||
11503 | pxor %xmm8,%xmm15 | ||
11504 | |||
11505 | # qhasm: xmm9 ^= xmm14 | ||
11506 | # asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10 | ||
11507 | # asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9 | ||
11508 | pxor %xmm14,%xmm9 | ||
11509 | |||
11510 | # qhasm: xmm12 ^= xmm15 | ||
11511 | # asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13 | ||
11512 | # asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12 | ||
11513 | pxor %xmm15,%xmm12 | ||
11514 | |||
11515 | # qhasm: xmm14 ^= xmm8 | ||
11516 | # asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15 | ||
11517 | # asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14 | ||
11518 | pxor %xmm8,%xmm14 | ||
11519 | |||
11520 | # qhasm: xmm8 ^= xmm9 | ||
11521 | # asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9 | ||
11522 | # asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8 | ||
11523 | pxor %xmm9,%xmm8 | ||
11524 | |||
11525 | # qhasm: xmm9 ^= xmm13 | ||
11526 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
11527 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
11528 | pxor %xmm13,%xmm9 | ||
11529 | |||
11530 | # qhasm: xmm13 ^= xmm10 | ||
11531 | # asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14 | ||
11532 | # asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13 | ||
11533 | pxor %xmm10,%xmm13 | ||
11534 | |||
11535 | # qhasm: xmm12 ^= xmm13 | ||
11536 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
11537 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
11538 | pxor %xmm13,%xmm12 | ||
11539 | |||
11540 | # qhasm: xmm10 ^= xmm11 | ||
11541 | # asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11 | ||
11542 | # asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10 | ||
11543 | pxor %xmm11,%xmm10 | ||
11544 | |||
11545 | # qhasm: xmm11 ^= xmm13 | ||
11546 | # asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12 | ||
11547 | # asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11 | ||
11548 | pxor %xmm13,%xmm11 | ||
11549 | |||
11550 | # qhasm: xmm14 ^= xmm11 | ||
11551 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
11552 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
11553 | pxor %xmm11,%xmm14 | ||
11554 | |||
11555 | # qhasm: xmm8 ^= *(int128 *)(c + 1280) | ||
11556 | # asm 1: pxor 1280(<c=int64#4),<xmm8=int6464#9 | ||
11557 | # asm 2: pxor 1280(<c=%rcx),<xmm8=%xmm8 | ||
11558 | pxor 1280(%rcx),%xmm8 | ||
11559 | |||
11560 | # qhasm: xmm9 ^= *(int128 *)(c + 1296) | ||
11561 | # asm 1: pxor 1296(<c=int64#4),<xmm9=int6464#10 | ||
11562 | # asm 2: pxor 1296(<c=%rcx),<xmm9=%xmm9 | ||
11563 | pxor 1296(%rcx),%xmm9 | ||
11564 | |||
11565 | # qhasm: xmm12 ^= *(int128 *)(c + 1312) | ||
11566 | # asm 1: pxor 1312(<c=int64#4),<xmm12=int6464#13 | ||
11567 | # asm 2: pxor 1312(<c=%rcx),<xmm12=%xmm12 | ||
11568 | pxor 1312(%rcx),%xmm12 | ||
11569 | |||
11570 | # qhasm: xmm14 ^= *(int128 *)(c + 1328) | ||
11571 | # asm 1: pxor 1328(<c=int64#4),<xmm14=int6464#15 | ||
11572 | # asm 2: pxor 1328(<c=%rcx),<xmm14=%xmm14 | ||
11573 | pxor 1328(%rcx),%xmm14 | ||
11574 | |||
11575 | # qhasm: xmm11 ^= *(int128 *)(c + 1344) | ||
11576 | # asm 1: pxor 1344(<c=int64#4),<xmm11=int6464#12 | ||
11577 | # asm 2: pxor 1344(<c=%rcx),<xmm11=%xmm11 | ||
11578 | pxor 1344(%rcx),%xmm11 | ||
11579 | |||
11580 | # qhasm: xmm15 ^= *(int128 *)(c + 1360) | ||
11581 | # asm 1: pxor 1360(<c=int64#4),<xmm15=int6464#16 | ||
11582 | # asm 2: pxor 1360(<c=%rcx),<xmm15=%xmm15 | ||
11583 | pxor 1360(%rcx),%xmm15 | ||
11584 | |||
11585 | # qhasm: xmm10 ^= *(int128 *)(c + 1376) | ||
11586 | # asm 1: pxor 1376(<c=int64#4),<xmm10=int6464#11 | ||
11587 | # asm 2: pxor 1376(<c=%rcx),<xmm10=%xmm10 | ||
11588 | pxor 1376(%rcx),%xmm10 | ||
11589 | |||
11590 | # qhasm: xmm13 ^= *(int128 *)(c + 1392) | ||
11591 | # asm 1: pxor 1392(<c=int64#4),<xmm13=int6464#14 | ||
11592 | # asm 2: pxor 1392(<c=%rcx),<xmm13=%xmm13 | ||
11593 | pxor 1392(%rcx),%xmm13 | ||
11594 | |||
11595 | # qhasm: xmm0 = xmm10 | ||
11596 | # asm 1: movdqa <xmm10=int6464#11,>xmm0=int6464#1 | ||
11597 | # asm 2: movdqa <xmm10=%xmm10,>xmm0=%xmm0 | ||
11598 | movdqa %xmm10,%xmm0 | ||
11599 | |||
11600 | # qhasm: uint6464 xmm0 >>= 1 | ||
11601 | # asm 1: psrlq $1,<xmm0=int6464#1 | ||
11602 | # asm 2: psrlq $1,<xmm0=%xmm0 | ||
11603 | psrlq $1,%xmm0 | ||
11604 | |||
11605 | # qhasm: xmm0 ^= xmm13 | ||
11606 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
11607 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
11608 | pxor %xmm13,%xmm0 | ||
11609 | |||
11610 | # qhasm: xmm0 &= BS0 | ||
11611 | # asm 1: pand BS0,<xmm0=int6464#1 | ||
11612 | # asm 2: pand BS0,<xmm0=%xmm0 | ||
11613 | pand BS0,%xmm0 | ||
11614 | |||
11615 | # qhasm: xmm13 ^= xmm0 | ||
11616 | # asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14 | ||
11617 | # asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13 | ||
11618 | pxor %xmm0,%xmm13 | ||
11619 | |||
11620 | # qhasm: uint6464 xmm0 <<= 1 | ||
11621 | # asm 1: psllq $1,<xmm0=int6464#1 | ||
11622 | # asm 2: psllq $1,<xmm0=%xmm0 | ||
11623 | psllq $1,%xmm0 | ||
11624 | |||
11625 | # qhasm: xmm10 ^= xmm0 | ||
11626 | # asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11 | ||
11627 | # asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10 | ||
11628 | pxor %xmm0,%xmm10 | ||
11629 | |||
11630 | # qhasm: xmm0 = xmm11 | ||
11631 | # asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1 | ||
11632 | # asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0 | ||
11633 | movdqa %xmm11,%xmm0 | ||
11634 | |||
11635 | # qhasm: uint6464 xmm0 >>= 1 | ||
11636 | # asm 1: psrlq $1,<xmm0=int6464#1 | ||
11637 | # asm 2: psrlq $1,<xmm0=%xmm0 | ||
11638 | psrlq $1,%xmm0 | ||
11639 | |||
11640 | # qhasm: xmm0 ^= xmm15 | ||
11641 | # asm 1: pxor <xmm15=int6464#16,<xmm0=int6464#1 | ||
11642 | # asm 2: pxor <xmm15=%xmm15,<xmm0=%xmm0 | ||
11643 | pxor %xmm15,%xmm0 | ||
11644 | |||
11645 | # qhasm: xmm0 &= BS0 | ||
11646 | # asm 1: pand BS0,<xmm0=int6464#1 | ||
11647 | # asm 2: pand BS0,<xmm0=%xmm0 | ||
11648 | pand BS0,%xmm0 | ||
11649 | |||
11650 | # qhasm: xmm15 ^= xmm0 | ||
11651 | # asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16 | ||
11652 | # asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15 | ||
11653 | pxor %xmm0,%xmm15 | ||
11654 | |||
11655 | # qhasm: uint6464 xmm0 <<= 1 | ||
11656 | # asm 1: psllq $1,<xmm0=int6464#1 | ||
11657 | # asm 2: psllq $1,<xmm0=%xmm0 | ||
11658 | psllq $1,%xmm0 | ||
11659 | |||
11660 | # qhasm: xmm11 ^= xmm0 | ||
11661 | # asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12 | ||
11662 | # asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11 | ||
11663 | pxor %xmm0,%xmm11 | ||
11664 | |||
11665 | # qhasm: xmm0 = xmm12 | ||
11666 | # asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1 | ||
11667 | # asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0 | ||
11668 | movdqa %xmm12,%xmm0 | ||
11669 | |||
11670 | # qhasm: uint6464 xmm0 >>= 1 | ||
11671 | # asm 1: psrlq $1,<xmm0=int6464#1 | ||
11672 | # asm 2: psrlq $1,<xmm0=%xmm0 | ||
11673 | psrlq $1,%xmm0 | ||
11674 | |||
11675 | # qhasm: xmm0 ^= xmm14 | ||
11676 | # asm 1: pxor <xmm14=int6464#15,<xmm0=int6464#1 | ||
11677 | # asm 2: pxor <xmm14=%xmm14,<xmm0=%xmm0 | ||
11678 | pxor %xmm14,%xmm0 | ||
11679 | |||
11680 | # qhasm: xmm0 &= BS0 | ||
11681 | # asm 1: pand BS0,<xmm0=int6464#1 | ||
11682 | # asm 2: pand BS0,<xmm0=%xmm0 | ||
11683 | pand BS0,%xmm0 | ||
11684 | |||
11685 | # qhasm: xmm14 ^= xmm0 | ||
11686 | # asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15 | ||
11687 | # asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14 | ||
11688 | pxor %xmm0,%xmm14 | ||
11689 | |||
11690 | # qhasm: uint6464 xmm0 <<= 1 | ||
11691 | # asm 1: psllq $1,<xmm0=int6464#1 | ||
11692 | # asm 2: psllq $1,<xmm0=%xmm0 | ||
11693 | psllq $1,%xmm0 | ||
11694 | |||
11695 | # qhasm: xmm12 ^= xmm0 | ||
11696 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
11697 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
11698 | pxor %xmm0,%xmm12 | ||
11699 | |||
11700 | # qhasm: xmm0 = xmm8 | ||
11701 | # asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1 | ||
11702 | # asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0 | ||
11703 | movdqa %xmm8,%xmm0 | ||
11704 | |||
11705 | # qhasm: uint6464 xmm0 >>= 1 | ||
11706 | # asm 1: psrlq $1,<xmm0=int6464#1 | ||
11707 | # asm 2: psrlq $1,<xmm0=%xmm0 | ||
11708 | psrlq $1,%xmm0 | ||
11709 | |||
11710 | # qhasm: xmm0 ^= xmm9 | ||
11711 | # asm 1: pxor <xmm9=int6464#10,<xmm0=int6464#1 | ||
11712 | # asm 2: pxor <xmm9=%xmm9,<xmm0=%xmm0 | ||
11713 | pxor %xmm9,%xmm0 | ||
11714 | |||
11715 | # qhasm: xmm0 &= BS0 | ||
11716 | # asm 1: pand BS0,<xmm0=int6464#1 | ||
11717 | # asm 2: pand BS0,<xmm0=%xmm0 | ||
11718 | pand BS0,%xmm0 | ||
11719 | |||
11720 | # qhasm: xmm9 ^= xmm0 | ||
11721 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
11722 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
11723 | pxor %xmm0,%xmm9 | ||
11724 | |||
11725 | # qhasm: uint6464 xmm0 <<= 1 | ||
11726 | # asm 1: psllq $1,<xmm0=int6464#1 | ||
11727 | # asm 2: psllq $1,<xmm0=%xmm0 | ||
11728 | psllq $1,%xmm0 | ||
11729 | |||
11730 | # qhasm: xmm8 ^= xmm0 | ||
11731 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
11732 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
11733 | pxor %xmm0,%xmm8 | ||
11734 | |||
11735 | # qhasm: xmm0 = xmm15 | ||
11736 | # asm 1: movdqa <xmm15=int6464#16,>xmm0=int6464#1 | ||
11737 | # asm 2: movdqa <xmm15=%xmm15,>xmm0=%xmm0 | ||
11738 | movdqa %xmm15,%xmm0 | ||
11739 | |||
11740 | # qhasm: uint6464 xmm0 >>= 2 | ||
11741 | # asm 1: psrlq $2,<xmm0=int6464#1 | ||
11742 | # asm 2: psrlq $2,<xmm0=%xmm0 | ||
11743 | psrlq $2,%xmm0 | ||
11744 | |||
11745 | # qhasm: xmm0 ^= xmm13 | ||
11746 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
11747 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
11748 | pxor %xmm13,%xmm0 | ||
11749 | |||
11750 | # qhasm: xmm0 &= BS1 | ||
11751 | # asm 1: pand BS1,<xmm0=int6464#1 | ||
11752 | # asm 2: pand BS1,<xmm0=%xmm0 | ||
11753 | pand BS1,%xmm0 | ||
11754 | |||
11755 | # qhasm: xmm13 ^= xmm0 | ||
11756 | # asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14 | ||
11757 | # asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13 | ||
11758 | pxor %xmm0,%xmm13 | ||
11759 | |||
11760 | # qhasm: uint6464 xmm0 <<= 2 | ||
11761 | # asm 1: psllq $2,<xmm0=int6464#1 | ||
11762 | # asm 2: psllq $2,<xmm0=%xmm0 | ||
11763 | psllq $2,%xmm0 | ||
11764 | |||
11765 | # qhasm: xmm15 ^= xmm0 | ||
11766 | # asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16 | ||
11767 | # asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15 | ||
11768 | pxor %xmm0,%xmm15 | ||
11769 | |||
11770 | # qhasm: xmm0 = xmm11 | ||
11771 | # asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1 | ||
11772 | # asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0 | ||
11773 | movdqa %xmm11,%xmm0 | ||
11774 | |||
11775 | # qhasm: uint6464 xmm0 >>= 2 | ||
11776 | # asm 1: psrlq $2,<xmm0=int6464#1 | ||
11777 | # asm 2: psrlq $2,<xmm0=%xmm0 | ||
11778 | psrlq $2,%xmm0 | ||
11779 | |||
11780 | # qhasm: xmm0 ^= xmm10 | ||
11781 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#1 | ||
11782 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm0 | ||
11783 | pxor %xmm10,%xmm0 | ||
11784 | |||
11785 | # qhasm: xmm0 &= BS1 | ||
11786 | # asm 1: pand BS1,<xmm0=int6464#1 | ||
11787 | # asm 2: pand BS1,<xmm0=%xmm0 | ||
11788 | pand BS1,%xmm0 | ||
11789 | |||
11790 | # qhasm: xmm10 ^= xmm0 | ||
11791 | # asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11 | ||
11792 | # asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10 | ||
11793 | pxor %xmm0,%xmm10 | ||
11794 | |||
11795 | # qhasm: uint6464 xmm0 <<= 2 | ||
11796 | # asm 1: psllq $2,<xmm0=int6464#1 | ||
11797 | # asm 2: psllq $2,<xmm0=%xmm0 | ||
11798 | psllq $2,%xmm0 | ||
11799 | |||
11800 | # qhasm: xmm11 ^= xmm0 | ||
11801 | # asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12 | ||
11802 | # asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11 | ||
11803 | pxor %xmm0,%xmm11 | ||
11804 | |||
11805 | # qhasm: xmm0 = xmm9 | ||
11806 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1 | ||
11807 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0 | ||
11808 | movdqa %xmm9,%xmm0 | ||
11809 | |||
11810 | # qhasm: uint6464 xmm0 >>= 2 | ||
11811 | # asm 1: psrlq $2,<xmm0=int6464#1 | ||
11812 | # asm 2: psrlq $2,<xmm0=%xmm0 | ||
11813 | psrlq $2,%xmm0 | ||
11814 | |||
11815 | # qhasm: xmm0 ^= xmm14 | ||
11816 | # asm 1: pxor <xmm14=int6464#15,<xmm0=int6464#1 | ||
11817 | # asm 2: pxor <xmm14=%xmm14,<xmm0=%xmm0 | ||
11818 | pxor %xmm14,%xmm0 | ||
11819 | |||
11820 | # qhasm: xmm0 &= BS1 | ||
11821 | # asm 1: pand BS1,<xmm0=int6464#1 | ||
11822 | # asm 2: pand BS1,<xmm0=%xmm0 | ||
11823 | pand BS1,%xmm0 | ||
11824 | |||
11825 | # qhasm: xmm14 ^= xmm0 | ||
11826 | # asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15 | ||
11827 | # asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14 | ||
11828 | pxor %xmm0,%xmm14 | ||
11829 | |||
11830 | # qhasm: uint6464 xmm0 <<= 2 | ||
11831 | # asm 1: psllq $2,<xmm0=int6464#1 | ||
11832 | # asm 2: psllq $2,<xmm0=%xmm0 | ||
11833 | psllq $2,%xmm0 | ||
11834 | |||
11835 | # qhasm: xmm9 ^= xmm0 | ||
11836 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
11837 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
11838 | pxor %xmm0,%xmm9 | ||
11839 | |||
11840 | # qhasm: xmm0 = xmm8 | ||
11841 | # asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1 | ||
11842 | # asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0 | ||
11843 | movdqa %xmm8,%xmm0 | ||
11844 | |||
11845 | # qhasm: uint6464 xmm0 >>= 2 | ||
11846 | # asm 1: psrlq $2,<xmm0=int6464#1 | ||
11847 | # asm 2: psrlq $2,<xmm0=%xmm0 | ||
11848 | psrlq $2,%xmm0 | ||
11849 | |||
11850 | # qhasm: xmm0 ^= xmm12 | ||
11851 | # asm 1: pxor <xmm12=int6464#13,<xmm0=int6464#1 | ||
11852 | # asm 2: pxor <xmm12=%xmm12,<xmm0=%xmm0 | ||
11853 | pxor %xmm12,%xmm0 | ||
11854 | |||
11855 | # qhasm: xmm0 &= BS1 | ||
11856 | # asm 1: pand BS1,<xmm0=int6464#1 | ||
11857 | # asm 2: pand BS1,<xmm0=%xmm0 | ||
11858 | pand BS1,%xmm0 | ||
11859 | |||
11860 | # qhasm: xmm12 ^= xmm0 | ||
11861 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
11862 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
11863 | pxor %xmm0,%xmm12 | ||
11864 | |||
11865 | # qhasm: uint6464 xmm0 <<= 2 | ||
11866 | # asm 1: psllq $2,<xmm0=int6464#1 | ||
11867 | # asm 2: psllq $2,<xmm0=%xmm0 | ||
11868 | psllq $2,%xmm0 | ||
11869 | |||
11870 | # qhasm: xmm8 ^= xmm0 | ||
11871 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
11872 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
11873 | pxor %xmm0,%xmm8 | ||
11874 | |||
11875 | # qhasm: xmm0 = xmm14 | ||
11876 | # asm 1: movdqa <xmm14=int6464#15,>xmm0=int6464#1 | ||
11877 | # asm 2: movdqa <xmm14=%xmm14,>xmm0=%xmm0 | ||
11878 | movdqa %xmm14,%xmm0 | ||
11879 | |||
11880 | # qhasm: uint6464 xmm0 >>= 4 | ||
11881 | # asm 1: psrlq $4,<xmm0=int6464#1 | ||
11882 | # asm 2: psrlq $4,<xmm0=%xmm0 | ||
11883 | psrlq $4,%xmm0 | ||
11884 | |||
11885 | # qhasm: xmm0 ^= xmm13 | ||
11886 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
11887 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
11888 | pxor %xmm13,%xmm0 | ||
11889 | |||
11890 | # qhasm: xmm0 &= BS2 | ||
11891 | # asm 1: pand BS2,<xmm0=int6464#1 | ||
11892 | # asm 2: pand BS2,<xmm0=%xmm0 | ||
11893 | pand BS2,%xmm0 | ||
11894 | |||
11895 | # qhasm: xmm13 ^= xmm0 | ||
11896 | # asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14 | ||
11897 | # asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13 | ||
11898 | pxor %xmm0,%xmm13 | ||
11899 | |||
11900 | # qhasm: uint6464 xmm0 <<= 4 | ||
11901 | # asm 1: psllq $4,<xmm0=int6464#1 | ||
11902 | # asm 2: psllq $4,<xmm0=%xmm0 | ||
11903 | psllq $4,%xmm0 | ||
11904 | |||
11905 | # qhasm: xmm14 ^= xmm0 | ||
11906 | # asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15 | ||
11907 | # asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14 | ||
11908 | pxor %xmm0,%xmm14 | ||
11909 | |||
11910 | # qhasm: xmm0 = xmm12 | ||
11911 | # asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1 | ||
11912 | # asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0 | ||
11913 | movdqa %xmm12,%xmm0 | ||
11914 | |||
11915 | # qhasm: uint6464 xmm0 >>= 4 | ||
11916 | # asm 1: psrlq $4,<xmm0=int6464#1 | ||
11917 | # asm 2: psrlq $4,<xmm0=%xmm0 | ||
11918 | psrlq $4,%xmm0 | ||
11919 | |||
11920 | # qhasm: xmm0 ^= xmm10 | ||
11921 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#1 | ||
11922 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm0 | ||
11923 | pxor %xmm10,%xmm0 | ||
11924 | |||
11925 | # qhasm: xmm0 &= BS2 | ||
11926 | # asm 1: pand BS2,<xmm0=int6464#1 | ||
11927 | # asm 2: pand BS2,<xmm0=%xmm0 | ||
11928 | pand BS2,%xmm0 | ||
11929 | |||
11930 | # qhasm: xmm10 ^= xmm0 | ||
11931 | # asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11 | ||
11932 | # asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10 | ||
11933 | pxor %xmm0,%xmm10 | ||
11934 | |||
11935 | # qhasm: uint6464 xmm0 <<= 4 | ||
11936 | # asm 1: psllq $4,<xmm0=int6464#1 | ||
11937 | # asm 2: psllq $4,<xmm0=%xmm0 | ||
11938 | psllq $4,%xmm0 | ||
11939 | |||
11940 | # qhasm: xmm12 ^= xmm0 | ||
11941 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
11942 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
11943 | pxor %xmm0,%xmm12 | ||
11944 | |||
11945 | # qhasm: xmm0 = xmm9 | ||
11946 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1 | ||
11947 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0 | ||
11948 | movdqa %xmm9,%xmm0 | ||
11949 | |||
11950 | # qhasm: uint6464 xmm0 >>= 4 | ||
11951 | # asm 1: psrlq $4,<xmm0=int6464#1 | ||
11952 | # asm 2: psrlq $4,<xmm0=%xmm0 | ||
11953 | psrlq $4,%xmm0 | ||
11954 | |||
11955 | # qhasm: xmm0 ^= xmm15 | ||
11956 | # asm 1: pxor <xmm15=int6464#16,<xmm0=int6464#1 | ||
11957 | # asm 2: pxor <xmm15=%xmm15,<xmm0=%xmm0 | ||
11958 | pxor %xmm15,%xmm0 | ||
11959 | |||
11960 | # qhasm: xmm0 &= BS2 | ||
11961 | # asm 1: pand BS2,<xmm0=int6464#1 | ||
11962 | # asm 2: pand BS2,<xmm0=%xmm0 | ||
11963 | pand BS2,%xmm0 | ||
11964 | |||
11965 | # qhasm: xmm15 ^= xmm0 | ||
11966 | # asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16 | ||
11967 | # asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15 | ||
11968 | pxor %xmm0,%xmm15 | ||
11969 | |||
11970 | # qhasm: uint6464 xmm0 <<= 4 | ||
11971 | # asm 1: psllq $4,<xmm0=int6464#1 | ||
11972 | # asm 2: psllq $4,<xmm0=%xmm0 | ||
11973 | psllq $4,%xmm0 | ||
11974 | |||
11975 | # qhasm: xmm9 ^= xmm0 | ||
11976 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
11977 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
11978 | pxor %xmm0,%xmm9 | ||
11979 | |||
11980 | # qhasm: xmm0 = xmm8 | ||
11981 | # asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1 | ||
11982 | # asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0 | ||
11983 | movdqa %xmm8,%xmm0 | ||
11984 | |||
11985 | # qhasm: uint6464 xmm0 >>= 4 | ||
11986 | # asm 1: psrlq $4,<xmm0=int6464#1 | ||
11987 | # asm 2: psrlq $4,<xmm0=%xmm0 | ||
11988 | psrlq $4,%xmm0 | ||
11989 | |||
11990 | # qhasm: xmm0 ^= xmm11 | ||
11991 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#1 | ||
11992 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm0 | ||
11993 | pxor %xmm11,%xmm0 | ||
11994 | |||
11995 | # qhasm: xmm0 &= BS2 | ||
11996 | # asm 1: pand BS2,<xmm0=int6464#1 | ||
11997 | # asm 2: pand BS2,<xmm0=%xmm0 | ||
11998 | pand BS2,%xmm0 | ||
11999 | |||
12000 | # qhasm: xmm11 ^= xmm0 | ||
12001 | # asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12 | ||
12002 | # asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11 | ||
12003 | pxor %xmm0,%xmm11 | ||
12004 | |||
12005 | # qhasm: uint6464 xmm0 <<= 4 | ||
12006 | # asm 1: psllq $4,<xmm0=int6464#1 | ||
12007 | # asm 2: psllq $4,<xmm0=%xmm0 | ||
12008 | psllq $4,%xmm0 | ||
12009 | |||
12010 | # qhasm: xmm8 ^= xmm0 | ||
12011 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
12012 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
12013 | pxor %xmm0,%xmm8 | ||
12014 | |||
12015 | # qhasm: unsigned<? =? len-128 | ||
12016 | # asm 1: cmp $128,<len=int64#2 | ||
12017 | # asm 2: cmp $128,<len=%rsi | ||
12018 | cmp $128,%rsi | ||
12019 | # comment:fp stack unchanged by jump | ||
12020 | |||
12021 | # qhasm: goto partial if unsigned< | ||
12022 | jb ._partial | ||
12023 | # comment:fp stack unchanged by jump | ||
12024 | |||
12025 | # qhasm: goto full if = | ||
12026 | je ._full | ||
12027 | |||
12028 | # qhasm: tmp = *(uint32 *)(np + 12) | ||
12029 | # asm 1: movl 12(<np=int64#3),>tmp=int64#5d | ||
12030 | # asm 2: movl 12(<np=%rdx),>tmp=%r8d | ||
12031 | movl 12(%rdx),%r8d | ||
12032 | |||
12033 | # qhasm: (uint32) bswap tmp | ||
12034 | # asm 1: bswap <tmp=int64#5d | ||
12035 | # asm 2: bswap <tmp=%r8d | ||
12036 | bswap %r8d | ||
12037 | |||
12038 | # qhasm: tmp += 8 | ||
12039 | # asm 1: add $8,<tmp=int64#5 | ||
12040 | # asm 2: add $8,<tmp=%r8 | ||
12041 | add $8,%r8 | ||
12042 | |||
12043 | # qhasm: (uint32) bswap tmp | ||
12044 | # asm 1: bswap <tmp=int64#5d | ||
12045 | # asm 2: bswap <tmp=%r8d | ||
12046 | bswap %r8d | ||
12047 | |||
12048 | # qhasm: *(uint32 *)(np + 12) = tmp | ||
12049 | # asm 1: movl <tmp=int64#5d,12(<np=int64#3) | ||
12050 | # asm 2: movl <tmp=%r8d,12(<np=%rdx) | ||
12051 | movl %r8d,12(%rdx) | ||
12052 | |||
12053 | # qhasm: *(int128 *) (outp + 0) = xmm8 | ||
12054 | # asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1) | ||
12055 | # asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi) | ||
12056 | movdqa %xmm8,0(%rdi) | ||
12057 | |||
12058 | # qhasm: *(int128 *) (outp + 16) = xmm9 | ||
12059 | # asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1) | ||
12060 | # asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi) | ||
12061 | movdqa %xmm9,16(%rdi) | ||
12062 | |||
12063 | # qhasm: *(int128 *) (outp + 32) = xmm12 | ||
12064 | # asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1) | ||
12065 | # asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi) | ||
12066 | movdqa %xmm12,32(%rdi) | ||
12067 | |||
12068 | # qhasm: *(int128 *) (outp + 48) = xmm14 | ||
12069 | # asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1) | ||
12070 | # asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi) | ||
12071 | movdqa %xmm14,48(%rdi) | ||
12072 | |||
12073 | # qhasm: *(int128 *) (outp + 64) = xmm11 | ||
12074 | # asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1) | ||
12075 | # asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi) | ||
12076 | movdqa %xmm11,64(%rdi) | ||
12077 | |||
12078 | # qhasm: *(int128 *) (outp + 80) = xmm15 | ||
12079 | # asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1) | ||
12080 | # asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi) | ||
12081 | movdqa %xmm15,80(%rdi) | ||
12082 | |||
12083 | # qhasm: *(int128 *) (outp + 96) = xmm10 | ||
12084 | # asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1) | ||
12085 | # asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi) | ||
12086 | movdqa %xmm10,96(%rdi) | ||
12087 | |||
12088 | # qhasm: *(int128 *) (outp + 112) = xmm13 | ||
12089 | # asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1) | ||
12090 | # asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi) | ||
12091 | movdqa %xmm13,112(%rdi) | ||
12092 | |||
12093 | # qhasm: len -= 128 | ||
12094 | # asm 1: sub $128,<len=int64#2 | ||
12095 | # asm 2: sub $128,<len=%rsi | ||
12096 | sub $128,%rsi | ||
12097 | |||
12098 | # qhasm: outp += 128 | ||
12099 | # asm 1: add $128,<outp=int64#1 | ||
12100 | # asm 2: add $128,<outp=%rdi | ||
12101 | add $128,%rdi | ||
12102 | # comment:fp stack unchanged by jump | ||
12103 | |||
12104 | # qhasm: goto enc_block | ||
12105 | jmp ._enc_block | ||
12106 | |||
12107 | # qhasm: partial: | ||
12108 | ._partial: | ||
12109 | |||
12110 | # qhasm: lensav = len | ||
12111 | # asm 1: mov <len=int64#2,>lensav=int64#4 | ||
12112 | # asm 2: mov <len=%rsi,>lensav=%rcx | ||
12113 | mov %rsi,%rcx | ||
12114 | |||
12115 | # qhasm: (uint32) len >>= 4 | ||
12116 | # asm 1: shr $4,<len=int64#2d | ||
12117 | # asm 2: shr $4,<len=%esi | ||
12118 | shr $4,%esi | ||
12119 | |||
12120 | # qhasm: tmp = *(uint32 *)(np + 12) | ||
12121 | # asm 1: movl 12(<np=int64#3),>tmp=int64#5d | ||
12122 | # asm 2: movl 12(<np=%rdx),>tmp=%r8d | ||
12123 | movl 12(%rdx),%r8d | ||
12124 | |||
12125 | # qhasm: (uint32) bswap tmp | ||
12126 | # asm 1: bswap <tmp=int64#5d | ||
12127 | # asm 2: bswap <tmp=%r8d | ||
12128 | bswap %r8d | ||
12129 | |||
12130 | # qhasm: tmp += len | ||
12131 | # asm 1: add <len=int64#2,<tmp=int64#5 | ||
12132 | # asm 2: add <len=%rsi,<tmp=%r8 | ||
12133 | add %rsi,%r8 | ||
12134 | |||
12135 | # qhasm: (uint32) bswap tmp | ||
12136 | # asm 1: bswap <tmp=int64#5d | ||
12137 | # asm 2: bswap <tmp=%r8d | ||
12138 | bswap %r8d | ||
12139 | |||
12140 | # qhasm: *(uint32 *)(np + 12) = tmp | ||
12141 | # asm 1: movl <tmp=int64#5d,12(<np=int64#3) | ||
12142 | # asm 2: movl <tmp=%r8d,12(<np=%rdx) | ||
12143 | movl %r8d,12(%rdx) | ||
12144 | |||
12145 | # qhasm: blp = &bl | ||
12146 | # asm 1: leaq <bl=stack1024#1,>blp=int64#2 | ||
12147 | # asm 2: leaq <bl=32(%rsp),>blp=%rsi | ||
12148 | leaq 32(%rsp),%rsi | ||
12149 | |||
12150 | # qhasm: *(int128 *)(blp + 0) = xmm8 | ||
12151 | # asm 1: movdqa <xmm8=int6464#9,0(<blp=int64#2) | ||
12152 | # asm 2: movdqa <xmm8=%xmm8,0(<blp=%rsi) | ||
12153 | movdqa %xmm8,0(%rsi) | ||
12154 | |||
12155 | # qhasm: *(int128 *)(blp + 16) = xmm9 | ||
12156 | # asm 1: movdqa <xmm9=int6464#10,16(<blp=int64#2) | ||
12157 | # asm 2: movdqa <xmm9=%xmm9,16(<blp=%rsi) | ||
12158 | movdqa %xmm9,16(%rsi) | ||
12159 | |||
12160 | # qhasm: *(int128 *)(blp + 32) = xmm12 | ||
12161 | # asm 1: movdqa <xmm12=int6464#13,32(<blp=int64#2) | ||
12162 | # asm 2: movdqa <xmm12=%xmm12,32(<blp=%rsi) | ||
12163 | movdqa %xmm12,32(%rsi) | ||
12164 | |||
12165 | # qhasm: *(int128 *)(blp + 48) = xmm14 | ||
12166 | # asm 1: movdqa <xmm14=int6464#15,48(<blp=int64#2) | ||
12167 | # asm 2: movdqa <xmm14=%xmm14,48(<blp=%rsi) | ||
12168 | movdqa %xmm14,48(%rsi) | ||
12169 | |||
12170 | # qhasm: *(int128 *)(blp + 64) = xmm11 | ||
12171 | # asm 1: movdqa <xmm11=int6464#12,64(<blp=int64#2) | ||
12172 | # asm 2: movdqa <xmm11=%xmm11,64(<blp=%rsi) | ||
12173 | movdqa %xmm11,64(%rsi) | ||
12174 | |||
12175 | # qhasm: *(int128 *)(blp + 80) = xmm15 | ||
12176 | # asm 1: movdqa <xmm15=int6464#16,80(<blp=int64#2) | ||
12177 | # asm 2: movdqa <xmm15=%xmm15,80(<blp=%rsi) | ||
12178 | movdqa %xmm15,80(%rsi) | ||
12179 | |||
12180 | # qhasm: *(int128 *)(blp + 96) = xmm10 | ||
12181 | # asm 1: movdqa <xmm10=int6464#11,96(<blp=int64#2) | ||
12182 | # asm 2: movdqa <xmm10=%xmm10,96(<blp=%rsi) | ||
12183 | movdqa %xmm10,96(%rsi) | ||
12184 | |||
12185 | # qhasm: *(int128 *)(blp + 112) = xmm13 | ||
12186 | # asm 1: movdqa <xmm13=int6464#14,112(<blp=int64#2) | ||
12187 | # asm 2: movdqa <xmm13=%xmm13,112(<blp=%rsi) | ||
12188 | movdqa %xmm13,112(%rsi) | ||
12189 | |||
12190 | # qhasm: bytes: | ||
12191 | ._bytes: | ||
12192 | |||
12193 | # qhasm: =? lensav-0 | ||
12194 | # asm 1: cmp $0,<lensav=int64#4 | ||
12195 | # asm 2: cmp $0,<lensav=%rcx | ||
12196 | cmp $0,%rcx | ||
12197 | # comment:fp stack unchanged by jump | ||
12198 | |||
12199 | # qhasm: goto end if = | ||
12200 | je ._end | ||
12201 | |||
12202 | # qhasm: b = *(uint8 *)(blp + 0) | ||
12203 | # asm 1: movzbq 0(<blp=int64#2),>b=int64#3 | ||
12204 | # asm 2: movzbq 0(<blp=%rsi),>b=%rdx | ||
12205 | movzbq 0(%rsi),%rdx | ||
12206 | |||
12207 | # qhasm: *(uint8 *)(outp + 0) = b | ||
12208 | # asm 1: movb <b=int64#3b,0(<outp=int64#1) | ||
12209 | # asm 2: movb <b=%dl,0(<outp=%rdi) | ||
12210 | movb %dl,0(%rdi) | ||
12211 | |||
12212 | # qhasm: blp += 1 | ||
12213 | # asm 1: add $1,<blp=int64#2 | ||
12214 | # asm 2: add $1,<blp=%rsi | ||
12215 | add $1,%rsi | ||
12216 | |||
12217 | # qhasm: outp +=1 | ||
12218 | # asm 1: add $1,<outp=int64#1 | ||
12219 | # asm 2: add $1,<outp=%rdi | ||
12220 | add $1,%rdi | ||
12221 | |||
12222 | # qhasm: lensav -= 1 | ||
12223 | # asm 1: sub $1,<lensav=int64#4 | ||
12224 | # asm 2: sub $1,<lensav=%rcx | ||
12225 | sub $1,%rcx | ||
12226 | # comment:fp stack unchanged by jump | ||
12227 | |||
12228 | # qhasm: goto bytes | ||
12229 | jmp ._bytes | ||
12230 | |||
12231 | # qhasm: full: | ||
12232 | ._full: | ||
12233 | |||
12234 | # qhasm: tmp = *(uint32 *)(np + 12) | ||
12235 | # asm 1: movl 12(<np=int64#3),>tmp=int64#4d | ||
12236 | # asm 2: movl 12(<np=%rdx),>tmp=%ecx | ||
12237 | movl 12(%rdx),%ecx | ||
12238 | |||
12239 | # qhasm: (uint32) bswap tmp | ||
12240 | # asm 1: bswap <tmp=int64#4d | ||
12241 | # asm 2: bswap <tmp=%ecx | ||
12242 | bswap %ecx | ||
12243 | |||
12244 | # qhasm: tmp += len | ||
12245 | # asm 1: add <len=int64#2,<tmp=int64#4 | ||
12246 | # asm 2: add <len=%rsi,<tmp=%rcx | ||
12247 | add %rsi,%rcx | ||
12248 | |||
12249 | # qhasm: (uint32) bswap tmp | ||
12250 | # asm 1: bswap <tmp=int64#4d | ||
12251 | # asm 2: bswap <tmp=%ecx | ||
12252 | bswap %ecx | ||
12253 | |||
12254 | # qhasm: *(uint32 *)(np + 12) = tmp | ||
12255 | # asm 1: movl <tmp=int64#4d,12(<np=int64#3) | ||
12256 | # asm 2: movl <tmp=%ecx,12(<np=%rdx) | ||
12257 | movl %ecx,12(%rdx) | ||
12258 | |||
12259 | # qhasm: *(int128 *) (outp + 0) = xmm8 | ||
12260 | # asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1) | ||
12261 | # asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi) | ||
12262 | movdqa %xmm8,0(%rdi) | ||
12263 | |||
12264 | # qhasm: *(int128 *) (outp + 16) = xmm9 | ||
12265 | # asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1) | ||
12266 | # asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi) | ||
12267 | movdqa %xmm9,16(%rdi) | ||
12268 | |||
12269 | # qhasm: *(int128 *) (outp + 32) = xmm12 | ||
12270 | # asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1) | ||
12271 | # asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi) | ||
12272 | movdqa %xmm12,32(%rdi) | ||
12273 | |||
12274 | # qhasm: *(int128 *) (outp + 48) = xmm14 | ||
12275 | # asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1) | ||
12276 | # asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi) | ||
12277 | movdqa %xmm14,48(%rdi) | ||
12278 | |||
12279 | # qhasm: *(int128 *) (outp + 64) = xmm11 | ||
12280 | # asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1) | ||
12281 | # asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi) | ||
12282 | movdqa %xmm11,64(%rdi) | ||
12283 | |||
12284 | # qhasm: *(int128 *) (outp + 80) = xmm15 | ||
12285 | # asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1) | ||
12286 | # asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi) | ||
12287 | movdqa %xmm15,80(%rdi) | ||
12288 | |||
12289 | # qhasm: *(int128 *) (outp + 96) = xmm10 | ||
12290 | # asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1) | ||
12291 | # asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi) | ||
12292 | movdqa %xmm10,96(%rdi) | ||
12293 | |||
12294 | # qhasm: *(int128 *) (outp + 112) = xmm13 | ||
12295 | # asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1) | ||
12296 | # asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi) | ||
12297 | movdqa %xmm13,112(%rdi) | ||
12298 | # comment:fp stack unchanged by fallthrough | ||
12299 | |||
12300 | # qhasm: end: | ||
12301 | ._end: | ||
12302 | |||
12303 | # qhasm: leave | ||
12304 | add %r11,%rsp | ||
12305 | mov %rdi,%rax | ||
12306 | mov %rsi,%rdx | ||
12307 | xor %rax,%rax | ||
12308 | ret | ||
diff --git a/nacl/crypto_stream/aes128ctr/core2/api.h b/nacl/crypto_stream/aes128ctr/core2/api.h new file mode 100644 index 00000000..62fc8d88 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/core2/api.h | |||
@@ -0,0 +1,3 @@ | |||
1 | #define CRYPTO_KEYBYTES 16 | ||
2 | #define CRYPTO_NONCEBYTES 16 | ||
3 | #define CRYPTO_BEFORENMBYTES 1408 | ||
diff --git a/nacl/crypto_stream/aes128ctr/core2/beforenm.s b/nacl/crypto_stream/aes128ctr/core2/beforenm.s new file mode 100644 index 00000000..689ad8c3 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/core2/beforenm.s | |||
@@ -0,0 +1,13694 @@ | |||
1 | # Author: Emilia Käsper and Peter Schwabe | ||
2 | # Date: 2009-03-19 | ||
3 | # +2010.01.31: minor namespace modifications | ||
4 | # Public domain | ||
5 | |||
6 | .data | ||
7 | .p2align 6 | ||
8 | |||
9 | RCON: .int 0x00000000, 0x00000000, 0x00000000, 0xffffffff | ||
10 | ROTB: .int 0x0c000000, 0x00000000, 0x04000000, 0x08000000 | ||
11 | EXPB0: .int 0x03030303, 0x07070707, 0x0b0b0b0b, 0x0f0f0f0f | ||
12 | CTRINC1: .int 0x00000001, 0x00000000, 0x00000000, 0x00000000 | ||
13 | CTRINC2: .int 0x00000002, 0x00000000, 0x00000000, 0x00000000 | ||
14 | CTRINC3: .int 0x00000003, 0x00000000, 0x00000000, 0x00000000 | ||
15 | CTRINC4: .int 0x00000004, 0x00000000, 0x00000000, 0x00000000 | ||
16 | CTRINC5: .int 0x00000005, 0x00000000, 0x00000000, 0x00000000 | ||
17 | CTRINC6: .int 0x00000006, 0x00000000, 0x00000000, 0x00000000 | ||
18 | CTRINC7: .int 0x00000007, 0x00000000, 0x00000000, 0x00000000 | ||
19 | RCTRINC1: .int 0x00000000, 0x00000000, 0x00000000, 0x00000001 | ||
20 | RCTRINC2: .int 0x00000000, 0x00000000, 0x00000000, 0x00000002 | ||
21 | RCTRINC3: .int 0x00000000, 0x00000000, 0x00000000, 0x00000003 | ||
22 | RCTRINC4: .int 0x00000000, 0x00000000, 0x00000000, 0x00000004 | ||
23 | RCTRINC5: .int 0x00000000, 0x00000000, 0x00000000, 0x00000005 | ||
24 | RCTRINC6: .int 0x00000000, 0x00000000, 0x00000000, 0x00000006 | ||
25 | RCTRINC7: .int 0x00000000, 0x00000000, 0x00000000, 0x00000007 | ||
26 | |||
27 | SWAP32: .int 0x00010203, 0x04050607, 0x08090a0b, 0x0c0d0e0f | ||
28 | M0SWAP: .quad 0x0105090d0004080c , 0x03070b0f02060a0e | ||
29 | |||
30 | BS0: .quad 0x5555555555555555, 0x5555555555555555 | ||
31 | BS1: .quad 0x3333333333333333, 0x3333333333333333 | ||
32 | BS2: .quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f | ||
33 | ONE: .quad 0xffffffffffffffff, 0xffffffffffffffff | ||
34 | M0: .quad 0x02060a0e03070b0f, 0x0004080c0105090d | ||
35 | SRM0: .quad 0x0304090e00050a0f, 0x01060b0c0207080d | ||
36 | SR: .quad 0x0504070600030201, 0x0f0e0d0c0a09080b | ||
37 | |||
38 | # qhasm: int64 arg1 | ||
39 | |||
40 | # qhasm: int64 arg2 | ||
41 | |||
42 | # qhasm: input arg1 | ||
43 | |||
44 | # qhasm: input arg2 | ||
45 | |||
46 | # qhasm: int64 r11_caller | ||
47 | |||
48 | # qhasm: int64 r12_caller | ||
49 | |||
50 | # qhasm: int64 r13_caller | ||
51 | |||
52 | # qhasm: int64 r14_caller | ||
53 | |||
54 | # qhasm: int64 r15_caller | ||
55 | |||
56 | # qhasm: int64 rbx_caller | ||
57 | |||
58 | # qhasm: int64 rbp_caller | ||
59 | |||
60 | # qhasm: caller r11_caller | ||
61 | |||
62 | # qhasm: caller r12_caller | ||
63 | |||
64 | # qhasm: caller r13_caller | ||
65 | |||
66 | # qhasm: caller r14_caller | ||
67 | |||
68 | # qhasm: caller r15_caller | ||
69 | |||
70 | # qhasm: caller rbx_caller | ||
71 | |||
72 | # qhasm: caller rbp_caller | ||
73 | |||
74 | # qhasm: int64 sboxp | ||
75 | |||
76 | # qhasm: int64 c | ||
77 | |||
78 | # qhasm: int64 k | ||
79 | |||
80 | # qhasm: int64 x0 | ||
81 | |||
82 | # qhasm: int64 x1 | ||
83 | |||
84 | # qhasm: int64 x2 | ||
85 | |||
86 | # qhasm: int64 x3 | ||
87 | |||
88 | # qhasm: int64 e | ||
89 | |||
90 | # qhasm: int64 q0 | ||
91 | |||
92 | # qhasm: int64 q1 | ||
93 | |||
94 | # qhasm: int64 q2 | ||
95 | |||
96 | # qhasm: int64 q3 | ||
97 | |||
98 | # qhasm: int6464 xmm0 | ||
99 | |||
100 | # qhasm: int6464 xmm1 | ||
101 | |||
102 | # qhasm: int6464 xmm2 | ||
103 | |||
104 | # qhasm: int6464 xmm3 | ||
105 | |||
106 | # qhasm: int6464 xmm4 | ||
107 | |||
108 | # qhasm: int6464 xmm5 | ||
109 | |||
110 | # qhasm: int6464 xmm6 | ||
111 | |||
112 | # qhasm: int6464 xmm7 | ||
113 | |||
114 | # qhasm: int6464 xmm8 | ||
115 | |||
116 | # qhasm: int6464 xmm9 | ||
117 | |||
118 | # qhasm: int6464 xmm10 | ||
119 | |||
120 | # qhasm: int6464 xmm11 | ||
121 | |||
122 | # qhasm: int6464 xmm12 | ||
123 | |||
124 | # qhasm: int6464 xmm13 | ||
125 | |||
126 | # qhasm: int6464 xmm14 | ||
127 | |||
128 | # qhasm: int6464 xmm15 | ||
129 | |||
130 | # qhasm: int6464 t | ||
131 | |||
132 | # qhasm: enter crypto_stream_aes128ctr_core2_beforenm | ||
133 | .text | ||
134 | .p2align 5 | ||
135 | .globl _crypto_stream_aes128ctr_core2_beforenm | ||
136 | .globl crypto_stream_aes128ctr_core2_beforenm | ||
137 | _crypto_stream_aes128ctr_core2_beforenm: | ||
138 | crypto_stream_aes128ctr_core2_beforenm: | ||
139 | mov %rsp,%r11 | ||
140 | and $31,%r11 | ||
141 | add $0,%r11 | ||
142 | sub %r11,%rsp | ||
143 | |||
144 | # qhasm: c = arg1 | ||
145 | # asm 1: mov <arg1=int64#1,>c=int64#1 | ||
146 | # asm 2: mov <arg1=%rdi,>c=%rdi | ||
147 | mov %rdi,%rdi | ||
148 | |||
149 | # qhasm: k = arg2 | ||
150 | # asm 1: mov <arg2=int64#2,>k=int64#2 | ||
151 | # asm 2: mov <arg2=%rsi,>k=%rsi | ||
152 | mov %rsi,%rsi | ||
153 | |||
154 | # qhasm: xmm0 = *(int128 *) (k + 0) | ||
155 | # asm 1: movdqa 0(<k=int64#2),>xmm0=int6464#1 | ||
156 | # asm 2: movdqa 0(<k=%rsi),>xmm0=%xmm0 | ||
157 | movdqa 0(%rsi),%xmm0 | ||
158 | |||
159 | # qhasm: shuffle bytes of xmm0 by M0 | ||
160 | # asm 1: pshufb M0,<xmm0=int6464#1 | ||
161 | # asm 2: pshufb M0,<xmm0=%xmm0 | ||
162 | pshufb M0,%xmm0 | ||
163 | |||
164 | # qhasm: xmm1 = xmm0 | ||
165 | # asm 1: movdqa <xmm0=int6464#1,>xmm1=int6464#2 | ||
166 | # asm 2: movdqa <xmm0=%xmm0,>xmm1=%xmm1 | ||
167 | movdqa %xmm0,%xmm1 | ||
168 | |||
169 | # qhasm: xmm2 = xmm0 | ||
170 | # asm 1: movdqa <xmm0=int6464#1,>xmm2=int6464#3 | ||
171 | # asm 2: movdqa <xmm0=%xmm0,>xmm2=%xmm2 | ||
172 | movdqa %xmm0,%xmm2 | ||
173 | |||
174 | # qhasm: xmm3 = xmm0 | ||
175 | # asm 1: movdqa <xmm0=int6464#1,>xmm3=int6464#4 | ||
176 | # asm 2: movdqa <xmm0=%xmm0,>xmm3=%xmm3 | ||
177 | movdqa %xmm0,%xmm3 | ||
178 | |||
179 | # qhasm: xmm4 = xmm0 | ||
180 | # asm 1: movdqa <xmm0=int6464#1,>xmm4=int6464#5 | ||
181 | # asm 2: movdqa <xmm0=%xmm0,>xmm4=%xmm4 | ||
182 | movdqa %xmm0,%xmm4 | ||
183 | |||
184 | # qhasm: xmm5 = xmm0 | ||
185 | # asm 1: movdqa <xmm0=int6464#1,>xmm5=int6464#6 | ||
186 | # asm 2: movdqa <xmm0=%xmm0,>xmm5=%xmm5 | ||
187 | movdqa %xmm0,%xmm5 | ||
188 | |||
189 | # qhasm: xmm6 = xmm0 | ||
190 | # asm 1: movdqa <xmm0=int6464#1,>xmm6=int6464#7 | ||
191 | # asm 2: movdqa <xmm0=%xmm0,>xmm6=%xmm6 | ||
192 | movdqa %xmm0,%xmm6 | ||
193 | |||
194 | # qhasm: xmm7 = xmm0 | ||
195 | # asm 1: movdqa <xmm0=int6464#1,>xmm7=int6464#8 | ||
196 | # asm 2: movdqa <xmm0=%xmm0,>xmm7=%xmm7 | ||
197 | movdqa %xmm0,%xmm7 | ||
198 | |||
199 | # qhasm: t = xmm6 | ||
200 | # asm 1: movdqa <xmm6=int6464#7,>t=int6464#9 | ||
201 | # asm 2: movdqa <xmm6=%xmm6,>t=%xmm8 | ||
202 | movdqa %xmm6,%xmm8 | ||
203 | |||
204 | # qhasm: uint6464 t >>= 1 | ||
205 | # asm 1: psrlq $1,<t=int6464#9 | ||
206 | # asm 2: psrlq $1,<t=%xmm8 | ||
207 | psrlq $1,%xmm8 | ||
208 | |||
209 | # qhasm: t ^= xmm7 | ||
210 | # asm 1: pxor <xmm7=int6464#8,<t=int6464#9 | ||
211 | # asm 2: pxor <xmm7=%xmm7,<t=%xmm8 | ||
212 | pxor %xmm7,%xmm8 | ||
213 | |||
214 | # qhasm: t &= BS0 | ||
215 | # asm 1: pand BS0,<t=int6464#9 | ||
216 | # asm 2: pand BS0,<t=%xmm8 | ||
217 | pand BS0,%xmm8 | ||
218 | |||
219 | # qhasm: xmm7 ^= t | ||
220 | # asm 1: pxor <t=int6464#9,<xmm7=int6464#8 | ||
221 | # asm 2: pxor <t=%xmm8,<xmm7=%xmm7 | ||
222 | pxor %xmm8,%xmm7 | ||
223 | |||
224 | # qhasm: uint6464 t <<= 1 | ||
225 | # asm 1: psllq $1,<t=int6464#9 | ||
226 | # asm 2: psllq $1,<t=%xmm8 | ||
227 | psllq $1,%xmm8 | ||
228 | |||
229 | # qhasm: xmm6 ^= t | ||
230 | # asm 1: pxor <t=int6464#9,<xmm6=int6464#7 | ||
231 | # asm 2: pxor <t=%xmm8,<xmm6=%xmm6 | ||
232 | pxor %xmm8,%xmm6 | ||
233 | |||
234 | # qhasm: t = xmm4 | ||
235 | # asm 1: movdqa <xmm4=int6464#5,>t=int6464#9 | ||
236 | # asm 2: movdqa <xmm4=%xmm4,>t=%xmm8 | ||
237 | movdqa %xmm4,%xmm8 | ||
238 | |||
239 | # qhasm: uint6464 t >>= 1 | ||
240 | # asm 1: psrlq $1,<t=int6464#9 | ||
241 | # asm 2: psrlq $1,<t=%xmm8 | ||
242 | psrlq $1,%xmm8 | ||
243 | |||
244 | # qhasm: t ^= xmm5 | ||
245 | # asm 1: pxor <xmm5=int6464#6,<t=int6464#9 | ||
246 | # asm 2: pxor <xmm5=%xmm5,<t=%xmm8 | ||
247 | pxor %xmm5,%xmm8 | ||
248 | |||
249 | # qhasm: t &= BS0 | ||
250 | # asm 1: pand BS0,<t=int6464#9 | ||
251 | # asm 2: pand BS0,<t=%xmm8 | ||
252 | pand BS0,%xmm8 | ||
253 | |||
254 | # qhasm: xmm5 ^= t | ||
255 | # asm 1: pxor <t=int6464#9,<xmm5=int6464#6 | ||
256 | # asm 2: pxor <t=%xmm8,<xmm5=%xmm5 | ||
257 | pxor %xmm8,%xmm5 | ||
258 | |||
259 | # qhasm: uint6464 t <<= 1 | ||
260 | # asm 1: psllq $1,<t=int6464#9 | ||
261 | # asm 2: psllq $1,<t=%xmm8 | ||
262 | psllq $1,%xmm8 | ||
263 | |||
264 | # qhasm: xmm4 ^= t | ||
265 | # asm 1: pxor <t=int6464#9,<xmm4=int6464#5 | ||
266 | # asm 2: pxor <t=%xmm8,<xmm4=%xmm4 | ||
267 | pxor %xmm8,%xmm4 | ||
268 | |||
269 | # qhasm: t = xmm2 | ||
270 | # asm 1: movdqa <xmm2=int6464#3,>t=int6464#9 | ||
271 | # asm 2: movdqa <xmm2=%xmm2,>t=%xmm8 | ||
272 | movdqa %xmm2,%xmm8 | ||
273 | |||
274 | # qhasm: uint6464 t >>= 1 | ||
275 | # asm 1: psrlq $1,<t=int6464#9 | ||
276 | # asm 2: psrlq $1,<t=%xmm8 | ||
277 | psrlq $1,%xmm8 | ||
278 | |||
279 | # qhasm: t ^= xmm3 | ||
280 | # asm 1: pxor <xmm3=int6464#4,<t=int6464#9 | ||
281 | # asm 2: pxor <xmm3=%xmm3,<t=%xmm8 | ||
282 | pxor %xmm3,%xmm8 | ||
283 | |||
284 | # qhasm: t &= BS0 | ||
285 | # asm 1: pand BS0,<t=int6464#9 | ||
286 | # asm 2: pand BS0,<t=%xmm8 | ||
287 | pand BS0,%xmm8 | ||
288 | |||
289 | # qhasm: xmm3 ^= t | ||
290 | # asm 1: pxor <t=int6464#9,<xmm3=int6464#4 | ||
291 | # asm 2: pxor <t=%xmm8,<xmm3=%xmm3 | ||
292 | pxor %xmm8,%xmm3 | ||
293 | |||
294 | # qhasm: uint6464 t <<= 1 | ||
295 | # asm 1: psllq $1,<t=int6464#9 | ||
296 | # asm 2: psllq $1,<t=%xmm8 | ||
297 | psllq $1,%xmm8 | ||
298 | |||
299 | # qhasm: xmm2 ^= t | ||
300 | # asm 1: pxor <t=int6464#9,<xmm2=int6464#3 | ||
301 | # asm 2: pxor <t=%xmm8,<xmm2=%xmm2 | ||
302 | pxor %xmm8,%xmm2 | ||
303 | |||
304 | # qhasm: t = xmm0 | ||
305 | # asm 1: movdqa <xmm0=int6464#1,>t=int6464#9 | ||
306 | # asm 2: movdqa <xmm0=%xmm0,>t=%xmm8 | ||
307 | movdqa %xmm0,%xmm8 | ||
308 | |||
309 | # qhasm: uint6464 t >>= 1 | ||
310 | # asm 1: psrlq $1,<t=int6464#9 | ||
311 | # asm 2: psrlq $1,<t=%xmm8 | ||
312 | psrlq $1,%xmm8 | ||
313 | |||
314 | # qhasm: t ^= xmm1 | ||
315 | # asm 1: pxor <xmm1=int6464#2,<t=int6464#9 | ||
316 | # asm 2: pxor <xmm1=%xmm1,<t=%xmm8 | ||
317 | pxor %xmm1,%xmm8 | ||
318 | |||
319 | # qhasm: t &= BS0 | ||
320 | # asm 1: pand BS0,<t=int6464#9 | ||
321 | # asm 2: pand BS0,<t=%xmm8 | ||
322 | pand BS0,%xmm8 | ||
323 | |||
324 | # qhasm: xmm1 ^= t | ||
325 | # asm 1: pxor <t=int6464#9,<xmm1=int6464#2 | ||
326 | # asm 2: pxor <t=%xmm8,<xmm1=%xmm1 | ||
327 | pxor %xmm8,%xmm1 | ||
328 | |||
329 | # qhasm: uint6464 t <<= 1 | ||
330 | # asm 1: psllq $1,<t=int6464#9 | ||
331 | # asm 2: psllq $1,<t=%xmm8 | ||
332 | psllq $1,%xmm8 | ||
333 | |||
334 | # qhasm: xmm0 ^= t | ||
335 | # asm 1: pxor <t=int6464#9,<xmm0=int6464#1 | ||
336 | # asm 2: pxor <t=%xmm8,<xmm0=%xmm0 | ||
337 | pxor %xmm8,%xmm0 | ||
338 | |||
339 | # qhasm: t = xmm5 | ||
340 | # asm 1: movdqa <xmm5=int6464#6,>t=int6464#9 | ||
341 | # asm 2: movdqa <xmm5=%xmm5,>t=%xmm8 | ||
342 | movdqa %xmm5,%xmm8 | ||
343 | |||
344 | # qhasm: uint6464 t >>= 2 | ||
345 | # asm 1: psrlq $2,<t=int6464#9 | ||
346 | # asm 2: psrlq $2,<t=%xmm8 | ||
347 | psrlq $2,%xmm8 | ||
348 | |||
349 | # qhasm: t ^= xmm7 | ||
350 | # asm 1: pxor <xmm7=int6464#8,<t=int6464#9 | ||
351 | # asm 2: pxor <xmm7=%xmm7,<t=%xmm8 | ||
352 | pxor %xmm7,%xmm8 | ||
353 | |||
354 | # qhasm: t &= BS1 | ||
355 | # asm 1: pand BS1,<t=int6464#9 | ||
356 | # asm 2: pand BS1,<t=%xmm8 | ||
357 | pand BS1,%xmm8 | ||
358 | |||
359 | # qhasm: xmm7 ^= t | ||
360 | # asm 1: pxor <t=int6464#9,<xmm7=int6464#8 | ||
361 | # asm 2: pxor <t=%xmm8,<xmm7=%xmm7 | ||
362 | pxor %xmm8,%xmm7 | ||
363 | |||
364 | # qhasm: uint6464 t <<= 2 | ||
365 | # asm 1: psllq $2,<t=int6464#9 | ||
366 | # asm 2: psllq $2,<t=%xmm8 | ||
367 | psllq $2,%xmm8 | ||
368 | |||
369 | # qhasm: xmm5 ^= t | ||
370 | # asm 1: pxor <t=int6464#9,<xmm5=int6464#6 | ||
371 | # asm 2: pxor <t=%xmm8,<xmm5=%xmm5 | ||
372 | pxor %xmm8,%xmm5 | ||
373 | |||
374 | # qhasm: t = xmm4 | ||
375 | # asm 1: movdqa <xmm4=int6464#5,>t=int6464#9 | ||
376 | # asm 2: movdqa <xmm4=%xmm4,>t=%xmm8 | ||
377 | movdqa %xmm4,%xmm8 | ||
378 | |||
379 | # qhasm: uint6464 t >>= 2 | ||
380 | # asm 1: psrlq $2,<t=int6464#9 | ||
381 | # asm 2: psrlq $2,<t=%xmm8 | ||
382 | psrlq $2,%xmm8 | ||
383 | |||
384 | # qhasm: t ^= xmm6 | ||
385 | # asm 1: pxor <xmm6=int6464#7,<t=int6464#9 | ||
386 | # asm 2: pxor <xmm6=%xmm6,<t=%xmm8 | ||
387 | pxor %xmm6,%xmm8 | ||
388 | |||
389 | # qhasm: t &= BS1 | ||
390 | # asm 1: pand BS1,<t=int6464#9 | ||
391 | # asm 2: pand BS1,<t=%xmm8 | ||
392 | pand BS1,%xmm8 | ||
393 | |||
394 | # qhasm: xmm6 ^= t | ||
395 | # asm 1: pxor <t=int6464#9,<xmm6=int6464#7 | ||
396 | # asm 2: pxor <t=%xmm8,<xmm6=%xmm6 | ||
397 | pxor %xmm8,%xmm6 | ||
398 | |||
399 | # qhasm: uint6464 t <<= 2 | ||
400 | # asm 1: psllq $2,<t=int6464#9 | ||
401 | # asm 2: psllq $2,<t=%xmm8 | ||
402 | psllq $2,%xmm8 | ||
403 | |||
404 | # qhasm: xmm4 ^= t | ||
405 | # asm 1: pxor <t=int6464#9,<xmm4=int6464#5 | ||
406 | # asm 2: pxor <t=%xmm8,<xmm4=%xmm4 | ||
407 | pxor %xmm8,%xmm4 | ||
408 | |||
409 | # qhasm: t = xmm1 | ||
410 | # asm 1: movdqa <xmm1=int6464#2,>t=int6464#9 | ||
411 | # asm 2: movdqa <xmm1=%xmm1,>t=%xmm8 | ||
412 | movdqa %xmm1,%xmm8 | ||
413 | |||
414 | # qhasm: uint6464 t >>= 2 | ||
415 | # asm 1: psrlq $2,<t=int6464#9 | ||
416 | # asm 2: psrlq $2,<t=%xmm8 | ||
417 | psrlq $2,%xmm8 | ||
418 | |||
419 | # qhasm: t ^= xmm3 | ||
420 | # asm 1: pxor <xmm3=int6464#4,<t=int6464#9 | ||
421 | # asm 2: pxor <xmm3=%xmm3,<t=%xmm8 | ||
422 | pxor %xmm3,%xmm8 | ||
423 | |||
424 | # qhasm: t &= BS1 | ||
425 | # asm 1: pand BS1,<t=int6464#9 | ||
426 | # asm 2: pand BS1,<t=%xmm8 | ||
427 | pand BS1,%xmm8 | ||
428 | |||
429 | # qhasm: xmm3 ^= t | ||
430 | # asm 1: pxor <t=int6464#9,<xmm3=int6464#4 | ||
431 | # asm 2: pxor <t=%xmm8,<xmm3=%xmm3 | ||
432 | pxor %xmm8,%xmm3 | ||
433 | |||
434 | # qhasm: uint6464 t <<= 2 | ||
435 | # asm 1: psllq $2,<t=int6464#9 | ||
436 | # asm 2: psllq $2,<t=%xmm8 | ||
437 | psllq $2,%xmm8 | ||
438 | |||
439 | # qhasm: xmm1 ^= t | ||
440 | # asm 1: pxor <t=int6464#9,<xmm1=int6464#2 | ||
441 | # asm 2: pxor <t=%xmm8,<xmm1=%xmm1 | ||
442 | pxor %xmm8,%xmm1 | ||
443 | |||
444 | # qhasm: t = xmm0 | ||
445 | # asm 1: movdqa <xmm0=int6464#1,>t=int6464#9 | ||
446 | # asm 2: movdqa <xmm0=%xmm0,>t=%xmm8 | ||
447 | movdqa %xmm0,%xmm8 | ||
448 | |||
449 | # qhasm: uint6464 t >>= 2 | ||
450 | # asm 1: psrlq $2,<t=int6464#9 | ||
451 | # asm 2: psrlq $2,<t=%xmm8 | ||
452 | psrlq $2,%xmm8 | ||
453 | |||
454 | # qhasm: t ^= xmm2 | ||
455 | # asm 1: pxor <xmm2=int6464#3,<t=int6464#9 | ||
456 | # asm 2: pxor <xmm2=%xmm2,<t=%xmm8 | ||
457 | pxor %xmm2,%xmm8 | ||
458 | |||
459 | # qhasm: t &= BS1 | ||
460 | # asm 1: pand BS1,<t=int6464#9 | ||
461 | # asm 2: pand BS1,<t=%xmm8 | ||
462 | pand BS1,%xmm8 | ||
463 | |||
464 | # qhasm: xmm2 ^= t | ||
465 | # asm 1: pxor <t=int6464#9,<xmm2=int6464#3 | ||
466 | # asm 2: pxor <t=%xmm8,<xmm2=%xmm2 | ||
467 | pxor %xmm8,%xmm2 | ||
468 | |||
469 | # qhasm: uint6464 t <<= 2 | ||
470 | # asm 1: psllq $2,<t=int6464#9 | ||
471 | # asm 2: psllq $2,<t=%xmm8 | ||
472 | psllq $2,%xmm8 | ||
473 | |||
474 | # qhasm: xmm0 ^= t | ||
475 | # asm 1: pxor <t=int6464#9,<xmm0=int6464#1 | ||
476 | # asm 2: pxor <t=%xmm8,<xmm0=%xmm0 | ||
477 | pxor %xmm8,%xmm0 | ||
478 | |||
479 | # qhasm: t = xmm3 | ||
480 | # asm 1: movdqa <xmm3=int6464#4,>t=int6464#9 | ||
481 | # asm 2: movdqa <xmm3=%xmm3,>t=%xmm8 | ||
482 | movdqa %xmm3,%xmm8 | ||
483 | |||
484 | # qhasm: uint6464 t >>= 4 | ||
485 | # asm 1: psrlq $4,<t=int6464#9 | ||
486 | # asm 2: psrlq $4,<t=%xmm8 | ||
487 | psrlq $4,%xmm8 | ||
488 | |||
489 | # qhasm: t ^= xmm7 | ||
490 | # asm 1: pxor <xmm7=int6464#8,<t=int6464#9 | ||
491 | # asm 2: pxor <xmm7=%xmm7,<t=%xmm8 | ||
492 | pxor %xmm7,%xmm8 | ||
493 | |||
494 | # qhasm: t &= BS2 | ||
495 | # asm 1: pand BS2,<t=int6464#9 | ||
496 | # asm 2: pand BS2,<t=%xmm8 | ||
497 | pand BS2,%xmm8 | ||
498 | |||
499 | # qhasm: xmm7 ^= t | ||
500 | # asm 1: pxor <t=int6464#9,<xmm7=int6464#8 | ||
501 | # asm 2: pxor <t=%xmm8,<xmm7=%xmm7 | ||
502 | pxor %xmm8,%xmm7 | ||
503 | |||
504 | # qhasm: uint6464 t <<= 4 | ||
505 | # asm 1: psllq $4,<t=int6464#9 | ||
506 | # asm 2: psllq $4,<t=%xmm8 | ||
507 | psllq $4,%xmm8 | ||
508 | |||
509 | # qhasm: xmm3 ^= t | ||
510 | # asm 1: pxor <t=int6464#9,<xmm3=int6464#4 | ||
511 | # asm 2: pxor <t=%xmm8,<xmm3=%xmm3 | ||
512 | pxor %xmm8,%xmm3 | ||
513 | |||
514 | # qhasm: t = xmm2 | ||
515 | # asm 1: movdqa <xmm2=int6464#3,>t=int6464#9 | ||
516 | # asm 2: movdqa <xmm2=%xmm2,>t=%xmm8 | ||
517 | movdqa %xmm2,%xmm8 | ||
518 | |||
519 | # qhasm: uint6464 t >>= 4 | ||
520 | # asm 1: psrlq $4,<t=int6464#9 | ||
521 | # asm 2: psrlq $4,<t=%xmm8 | ||
522 | psrlq $4,%xmm8 | ||
523 | |||
524 | # qhasm: t ^= xmm6 | ||
525 | # asm 1: pxor <xmm6=int6464#7,<t=int6464#9 | ||
526 | # asm 2: pxor <xmm6=%xmm6,<t=%xmm8 | ||
527 | pxor %xmm6,%xmm8 | ||
528 | |||
529 | # qhasm: t &= BS2 | ||
530 | # asm 1: pand BS2,<t=int6464#9 | ||
531 | # asm 2: pand BS2,<t=%xmm8 | ||
532 | pand BS2,%xmm8 | ||
533 | |||
534 | # qhasm: xmm6 ^= t | ||
535 | # asm 1: pxor <t=int6464#9,<xmm6=int6464#7 | ||
536 | # asm 2: pxor <t=%xmm8,<xmm6=%xmm6 | ||
537 | pxor %xmm8,%xmm6 | ||
538 | |||
539 | # qhasm: uint6464 t <<= 4 | ||
540 | # asm 1: psllq $4,<t=int6464#9 | ||
541 | # asm 2: psllq $4,<t=%xmm8 | ||
542 | psllq $4,%xmm8 | ||
543 | |||
544 | # qhasm: xmm2 ^= t | ||
545 | # asm 1: pxor <t=int6464#9,<xmm2=int6464#3 | ||
546 | # asm 2: pxor <t=%xmm8,<xmm2=%xmm2 | ||
547 | pxor %xmm8,%xmm2 | ||
548 | |||
549 | # qhasm: t = xmm1 | ||
550 | # asm 1: movdqa <xmm1=int6464#2,>t=int6464#9 | ||
551 | # asm 2: movdqa <xmm1=%xmm1,>t=%xmm8 | ||
552 | movdqa %xmm1,%xmm8 | ||
553 | |||
554 | # qhasm: uint6464 t >>= 4 | ||
555 | # asm 1: psrlq $4,<t=int6464#9 | ||
556 | # asm 2: psrlq $4,<t=%xmm8 | ||
557 | psrlq $4,%xmm8 | ||
558 | |||
559 | # qhasm: t ^= xmm5 | ||
560 | # asm 1: pxor <xmm5=int6464#6,<t=int6464#9 | ||
561 | # asm 2: pxor <xmm5=%xmm5,<t=%xmm8 | ||
562 | pxor %xmm5,%xmm8 | ||
563 | |||
564 | # qhasm: t &= BS2 | ||
565 | # asm 1: pand BS2,<t=int6464#9 | ||
566 | # asm 2: pand BS2,<t=%xmm8 | ||
567 | pand BS2,%xmm8 | ||
568 | |||
569 | # qhasm: xmm5 ^= t | ||
570 | # asm 1: pxor <t=int6464#9,<xmm5=int6464#6 | ||
571 | # asm 2: pxor <t=%xmm8,<xmm5=%xmm5 | ||
572 | pxor %xmm8,%xmm5 | ||
573 | |||
574 | # qhasm: uint6464 t <<= 4 | ||
575 | # asm 1: psllq $4,<t=int6464#9 | ||
576 | # asm 2: psllq $4,<t=%xmm8 | ||
577 | psllq $4,%xmm8 | ||
578 | |||
579 | # qhasm: xmm1 ^= t | ||
580 | # asm 1: pxor <t=int6464#9,<xmm1=int6464#2 | ||
581 | # asm 2: pxor <t=%xmm8,<xmm1=%xmm1 | ||
582 | pxor %xmm8,%xmm1 | ||
583 | |||
584 | # qhasm: t = xmm0 | ||
585 | # asm 1: movdqa <xmm0=int6464#1,>t=int6464#9 | ||
586 | # asm 2: movdqa <xmm0=%xmm0,>t=%xmm8 | ||
587 | movdqa %xmm0,%xmm8 | ||
588 | |||
589 | # qhasm: uint6464 t >>= 4 | ||
590 | # asm 1: psrlq $4,<t=int6464#9 | ||
591 | # asm 2: psrlq $4,<t=%xmm8 | ||
592 | psrlq $4,%xmm8 | ||
593 | |||
594 | # qhasm: t ^= xmm4 | ||
595 | # asm 1: pxor <xmm4=int6464#5,<t=int6464#9 | ||
596 | # asm 2: pxor <xmm4=%xmm4,<t=%xmm8 | ||
597 | pxor %xmm4,%xmm8 | ||
598 | |||
599 | # qhasm: t &= BS2 | ||
600 | # asm 1: pand BS2,<t=int6464#9 | ||
601 | # asm 2: pand BS2,<t=%xmm8 | ||
602 | pand BS2,%xmm8 | ||
603 | |||
604 | # qhasm: xmm4 ^= t | ||
605 | # asm 1: pxor <t=int6464#9,<xmm4=int6464#5 | ||
606 | # asm 2: pxor <t=%xmm8,<xmm4=%xmm4 | ||
607 | pxor %xmm8,%xmm4 | ||
608 | |||
609 | # qhasm: uint6464 t <<= 4 | ||
610 | # asm 1: psllq $4,<t=int6464#9 | ||
611 | # asm 2: psllq $4,<t=%xmm8 | ||
612 | psllq $4,%xmm8 | ||
613 | |||
614 | # qhasm: xmm0 ^= t | ||
615 | # asm 1: pxor <t=int6464#9,<xmm0=int6464#1 | ||
616 | # asm 2: pxor <t=%xmm8,<xmm0=%xmm0 | ||
617 | pxor %xmm8,%xmm0 | ||
618 | |||
619 | # qhasm: *(int128 *) (c + 0) = xmm0 | ||
620 | # asm 1: movdqa <xmm0=int6464#1,0(<c=int64#1) | ||
621 | # asm 2: movdqa <xmm0=%xmm0,0(<c=%rdi) | ||
622 | movdqa %xmm0,0(%rdi) | ||
623 | |||
624 | # qhasm: *(int128 *) (c + 16) = xmm1 | ||
625 | # asm 1: movdqa <xmm1=int6464#2,16(<c=int64#1) | ||
626 | # asm 2: movdqa <xmm1=%xmm1,16(<c=%rdi) | ||
627 | movdqa %xmm1,16(%rdi) | ||
628 | |||
629 | # qhasm: *(int128 *) (c + 32) = xmm2 | ||
630 | # asm 1: movdqa <xmm2=int6464#3,32(<c=int64#1) | ||
631 | # asm 2: movdqa <xmm2=%xmm2,32(<c=%rdi) | ||
632 | movdqa %xmm2,32(%rdi) | ||
633 | |||
634 | # qhasm: *(int128 *) (c + 48) = xmm3 | ||
635 | # asm 1: movdqa <xmm3=int6464#4,48(<c=int64#1) | ||
636 | # asm 2: movdqa <xmm3=%xmm3,48(<c=%rdi) | ||
637 | movdqa %xmm3,48(%rdi) | ||
638 | |||
639 | # qhasm: *(int128 *) (c + 64) = xmm4 | ||
640 | # asm 1: movdqa <xmm4=int6464#5,64(<c=int64#1) | ||
641 | # asm 2: movdqa <xmm4=%xmm4,64(<c=%rdi) | ||
642 | movdqa %xmm4,64(%rdi) | ||
643 | |||
644 | # qhasm: *(int128 *) (c + 80) = xmm5 | ||
645 | # asm 1: movdqa <xmm5=int6464#6,80(<c=int64#1) | ||
646 | # asm 2: movdqa <xmm5=%xmm5,80(<c=%rdi) | ||
647 | movdqa %xmm5,80(%rdi) | ||
648 | |||
649 | # qhasm: *(int128 *) (c + 96) = xmm6 | ||
650 | # asm 1: movdqa <xmm6=int6464#7,96(<c=int64#1) | ||
651 | # asm 2: movdqa <xmm6=%xmm6,96(<c=%rdi) | ||
652 | movdqa %xmm6,96(%rdi) | ||
653 | |||
654 | # qhasm: *(int128 *) (c + 112) = xmm7 | ||
655 | # asm 1: movdqa <xmm7=int6464#8,112(<c=int64#1) | ||
656 | # asm 2: movdqa <xmm7=%xmm7,112(<c=%rdi) | ||
657 | movdqa %xmm7,112(%rdi) | ||
658 | |||
659 | # qhasm: shuffle bytes of xmm0 by ROTB | ||
660 | # asm 1: pshufb ROTB,<xmm0=int6464#1 | ||
661 | # asm 2: pshufb ROTB,<xmm0=%xmm0 | ||
662 | pshufb ROTB,%xmm0 | ||
663 | |||
664 | # qhasm: shuffle bytes of xmm1 by ROTB | ||
665 | # asm 1: pshufb ROTB,<xmm1=int6464#2 | ||
666 | # asm 2: pshufb ROTB,<xmm1=%xmm1 | ||
667 | pshufb ROTB,%xmm1 | ||
668 | |||
669 | # qhasm: shuffle bytes of xmm2 by ROTB | ||
670 | # asm 1: pshufb ROTB,<xmm2=int6464#3 | ||
671 | # asm 2: pshufb ROTB,<xmm2=%xmm2 | ||
672 | pshufb ROTB,%xmm2 | ||
673 | |||
674 | # qhasm: shuffle bytes of xmm3 by ROTB | ||
675 | # asm 1: pshufb ROTB,<xmm3=int6464#4 | ||
676 | # asm 2: pshufb ROTB,<xmm3=%xmm3 | ||
677 | pshufb ROTB,%xmm3 | ||
678 | |||
679 | # qhasm: shuffle bytes of xmm4 by ROTB | ||
680 | # asm 1: pshufb ROTB,<xmm4=int6464#5 | ||
681 | # asm 2: pshufb ROTB,<xmm4=%xmm4 | ||
682 | pshufb ROTB,%xmm4 | ||
683 | |||
684 | # qhasm: shuffle bytes of xmm5 by ROTB | ||
685 | # asm 1: pshufb ROTB,<xmm5=int6464#6 | ||
686 | # asm 2: pshufb ROTB,<xmm5=%xmm5 | ||
687 | pshufb ROTB,%xmm5 | ||
688 | |||
689 | # qhasm: shuffle bytes of xmm6 by ROTB | ||
690 | # asm 1: pshufb ROTB,<xmm6=int6464#7 | ||
691 | # asm 2: pshufb ROTB,<xmm6=%xmm6 | ||
692 | pshufb ROTB,%xmm6 | ||
693 | |||
694 | # qhasm: shuffle bytes of xmm7 by ROTB | ||
695 | # asm 1: pshufb ROTB,<xmm7=int6464#8 | ||
696 | # asm 2: pshufb ROTB,<xmm7=%xmm7 | ||
697 | pshufb ROTB,%xmm7 | ||
698 | |||
699 | # qhasm: xmm5 ^= xmm6 | ||
700 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
701 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
702 | pxor %xmm6,%xmm5 | ||
703 | |||
704 | # qhasm: xmm2 ^= xmm1 | ||
705 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
706 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
707 | pxor %xmm1,%xmm2 | ||
708 | |||
709 | # qhasm: xmm5 ^= xmm0 | ||
710 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
711 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
712 | pxor %xmm0,%xmm5 | ||
713 | |||
714 | # qhasm: xmm6 ^= xmm2 | ||
715 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
716 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
717 | pxor %xmm2,%xmm6 | ||
718 | |||
719 | # qhasm: xmm3 ^= xmm0 | ||
720 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
721 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
722 | pxor %xmm0,%xmm3 | ||
723 | |||
724 | # qhasm: xmm6 ^= xmm3 | ||
725 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
726 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
727 | pxor %xmm3,%xmm6 | ||
728 | |||
729 | # qhasm: xmm3 ^= xmm7 | ||
730 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
731 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
732 | pxor %xmm7,%xmm3 | ||
733 | |||
734 | # qhasm: xmm3 ^= xmm4 | ||
735 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
736 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
737 | pxor %xmm4,%xmm3 | ||
738 | |||
739 | # qhasm: xmm7 ^= xmm5 | ||
740 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
741 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
742 | pxor %xmm5,%xmm7 | ||
743 | |||
744 | # qhasm: xmm3 ^= xmm1 | ||
745 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
746 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
747 | pxor %xmm1,%xmm3 | ||
748 | |||
749 | # qhasm: xmm4 ^= xmm5 | ||
750 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
751 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
752 | pxor %xmm5,%xmm4 | ||
753 | |||
754 | # qhasm: xmm2 ^= xmm7 | ||
755 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
756 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
757 | pxor %xmm7,%xmm2 | ||
758 | |||
759 | # qhasm: xmm1 ^= xmm5 | ||
760 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
761 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
762 | pxor %xmm5,%xmm1 | ||
763 | |||
764 | # qhasm: xmm11 = xmm7 | ||
765 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
766 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
767 | movdqa %xmm7,%xmm8 | ||
768 | |||
769 | # qhasm: xmm10 = xmm1 | ||
770 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
771 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
772 | movdqa %xmm1,%xmm9 | ||
773 | |||
774 | # qhasm: xmm9 = xmm5 | ||
775 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
776 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
777 | movdqa %xmm5,%xmm10 | ||
778 | |||
779 | # qhasm: xmm13 = xmm2 | ||
780 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
781 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
782 | movdqa %xmm2,%xmm11 | ||
783 | |||
784 | # qhasm: xmm12 = xmm6 | ||
785 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
786 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
787 | movdqa %xmm6,%xmm12 | ||
788 | |||
789 | # qhasm: xmm11 ^= xmm4 | ||
790 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
791 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
792 | pxor %xmm4,%xmm8 | ||
793 | |||
794 | # qhasm: xmm10 ^= xmm2 | ||
795 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
796 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
797 | pxor %xmm2,%xmm9 | ||
798 | |||
799 | # qhasm: xmm9 ^= xmm3 | ||
800 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
801 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
802 | pxor %xmm3,%xmm10 | ||
803 | |||
804 | # qhasm: xmm13 ^= xmm4 | ||
805 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
806 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
807 | pxor %xmm4,%xmm11 | ||
808 | |||
809 | # qhasm: xmm12 ^= xmm0 | ||
810 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
811 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
812 | pxor %xmm0,%xmm12 | ||
813 | |||
814 | # qhasm: xmm14 = xmm11 | ||
815 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
816 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
817 | movdqa %xmm8,%xmm13 | ||
818 | |||
819 | # qhasm: xmm8 = xmm10 | ||
820 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
821 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
822 | movdqa %xmm9,%xmm14 | ||
823 | |||
824 | # qhasm: xmm15 = xmm11 | ||
825 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
826 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
827 | movdqa %xmm8,%xmm15 | ||
828 | |||
829 | # qhasm: xmm10 |= xmm9 | ||
830 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
831 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
832 | por %xmm10,%xmm9 | ||
833 | |||
834 | # qhasm: xmm11 |= xmm12 | ||
835 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
836 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
837 | por %xmm12,%xmm8 | ||
838 | |||
839 | # qhasm: xmm15 ^= xmm8 | ||
840 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
841 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
842 | pxor %xmm14,%xmm15 | ||
843 | |||
844 | # qhasm: xmm14 &= xmm12 | ||
845 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
846 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
847 | pand %xmm12,%xmm13 | ||
848 | |||
849 | # qhasm: xmm8 &= xmm9 | ||
850 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
851 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
852 | pand %xmm10,%xmm14 | ||
853 | |||
854 | # qhasm: xmm12 ^= xmm9 | ||
855 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
856 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
857 | pxor %xmm10,%xmm12 | ||
858 | |||
859 | # qhasm: xmm15 &= xmm12 | ||
860 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
861 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
862 | pand %xmm12,%xmm15 | ||
863 | |||
864 | # qhasm: xmm12 = xmm3 | ||
865 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
866 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
867 | movdqa %xmm3,%xmm10 | ||
868 | |||
869 | # qhasm: xmm12 ^= xmm0 | ||
870 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
871 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
872 | pxor %xmm0,%xmm10 | ||
873 | |||
874 | # qhasm: xmm13 &= xmm12 | ||
875 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
876 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
877 | pand %xmm10,%xmm11 | ||
878 | |||
879 | # qhasm: xmm11 ^= xmm13 | ||
880 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
881 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
882 | pxor %xmm11,%xmm8 | ||
883 | |||
884 | # qhasm: xmm10 ^= xmm13 | ||
885 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
886 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
887 | pxor %xmm11,%xmm9 | ||
888 | |||
889 | # qhasm: xmm13 = xmm7 | ||
890 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
891 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
892 | movdqa %xmm7,%xmm10 | ||
893 | |||
894 | # qhasm: xmm13 ^= xmm1 | ||
895 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
896 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
897 | pxor %xmm1,%xmm10 | ||
898 | |||
899 | # qhasm: xmm12 = xmm5 | ||
900 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
901 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
902 | movdqa %xmm5,%xmm11 | ||
903 | |||
904 | # qhasm: xmm9 = xmm13 | ||
905 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
906 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
907 | movdqa %xmm10,%xmm12 | ||
908 | |||
909 | # qhasm: xmm12 ^= xmm6 | ||
910 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
911 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
912 | pxor %xmm6,%xmm11 | ||
913 | |||
914 | # qhasm: xmm9 |= xmm12 | ||
915 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
916 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
917 | por %xmm11,%xmm12 | ||
918 | |||
919 | # qhasm: xmm13 &= xmm12 | ||
920 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
921 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
922 | pand %xmm11,%xmm10 | ||
923 | |||
924 | # qhasm: xmm8 ^= xmm13 | ||
925 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
926 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
927 | pxor %xmm10,%xmm14 | ||
928 | |||
929 | # qhasm: xmm11 ^= xmm15 | ||
930 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
931 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
932 | pxor %xmm15,%xmm8 | ||
933 | |||
934 | # qhasm: xmm10 ^= xmm14 | ||
935 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
936 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
937 | pxor %xmm13,%xmm9 | ||
938 | |||
939 | # qhasm: xmm9 ^= xmm15 | ||
940 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
941 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
942 | pxor %xmm15,%xmm12 | ||
943 | |||
944 | # qhasm: xmm8 ^= xmm14 | ||
945 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
946 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
947 | pxor %xmm13,%xmm14 | ||
948 | |||
949 | # qhasm: xmm9 ^= xmm14 | ||
950 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
951 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
952 | pxor %xmm13,%xmm12 | ||
953 | |||
954 | # qhasm: xmm12 = xmm2 | ||
955 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
956 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
957 | movdqa %xmm2,%xmm10 | ||
958 | |||
959 | # qhasm: xmm13 = xmm4 | ||
960 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
961 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
962 | movdqa %xmm4,%xmm11 | ||
963 | |||
964 | # qhasm: xmm14 = xmm1 | ||
965 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
966 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
967 | movdqa %xmm1,%xmm13 | ||
968 | |||
969 | # qhasm: xmm15 = xmm7 | ||
970 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
971 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
972 | movdqa %xmm7,%xmm15 | ||
973 | |||
974 | # qhasm: xmm12 &= xmm3 | ||
975 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
976 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
977 | pand %xmm3,%xmm10 | ||
978 | |||
979 | # qhasm: xmm13 &= xmm0 | ||
980 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
981 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
982 | pand %xmm0,%xmm11 | ||
983 | |||
984 | # qhasm: xmm14 &= xmm5 | ||
985 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
986 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
987 | pand %xmm5,%xmm13 | ||
988 | |||
989 | # qhasm: xmm15 |= xmm6 | ||
990 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
991 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
992 | por %xmm6,%xmm15 | ||
993 | |||
994 | # qhasm: xmm11 ^= xmm12 | ||
995 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
996 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
997 | pxor %xmm10,%xmm8 | ||
998 | |||
999 | # qhasm: xmm10 ^= xmm13 | ||
1000 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
1001 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
1002 | pxor %xmm11,%xmm9 | ||
1003 | |||
1004 | # qhasm: xmm9 ^= xmm14 | ||
1005 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
1006 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
1007 | pxor %xmm13,%xmm12 | ||
1008 | |||
1009 | # qhasm: xmm8 ^= xmm15 | ||
1010 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
1011 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
1012 | pxor %xmm15,%xmm14 | ||
1013 | |||
1014 | # qhasm: xmm12 = xmm11 | ||
1015 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
1016 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
1017 | movdqa %xmm8,%xmm10 | ||
1018 | |||
1019 | # qhasm: xmm12 ^= xmm10 | ||
1020 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
1021 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
1022 | pxor %xmm9,%xmm10 | ||
1023 | |||
1024 | # qhasm: xmm11 &= xmm9 | ||
1025 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
1026 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
1027 | pand %xmm12,%xmm8 | ||
1028 | |||
1029 | # qhasm: xmm14 = xmm8 | ||
1030 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
1031 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
1032 | movdqa %xmm14,%xmm11 | ||
1033 | |||
1034 | # qhasm: xmm14 ^= xmm11 | ||
1035 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
1036 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
1037 | pxor %xmm8,%xmm11 | ||
1038 | |||
1039 | # qhasm: xmm15 = xmm12 | ||
1040 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
1041 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
1042 | movdqa %xmm10,%xmm13 | ||
1043 | |||
1044 | # qhasm: xmm15 &= xmm14 | ||
1045 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
1046 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
1047 | pand %xmm11,%xmm13 | ||
1048 | |||
1049 | # qhasm: xmm15 ^= xmm10 | ||
1050 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
1051 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
1052 | pxor %xmm9,%xmm13 | ||
1053 | |||
1054 | # qhasm: xmm13 = xmm9 | ||
1055 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
1056 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
1057 | movdqa %xmm12,%xmm15 | ||
1058 | |||
1059 | # qhasm: xmm13 ^= xmm8 | ||
1060 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
1061 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
1062 | pxor %xmm14,%xmm15 | ||
1063 | |||
1064 | # qhasm: xmm11 ^= xmm10 | ||
1065 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
1066 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
1067 | pxor %xmm9,%xmm8 | ||
1068 | |||
1069 | # qhasm: xmm13 &= xmm11 | ||
1070 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
1071 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
1072 | pand %xmm8,%xmm15 | ||
1073 | |||
1074 | # qhasm: xmm13 ^= xmm8 | ||
1075 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
1076 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
1077 | pxor %xmm14,%xmm15 | ||
1078 | |||
1079 | # qhasm: xmm9 ^= xmm13 | ||
1080 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
1081 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
1082 | pxor %xmm15,%xmm12 | ||
1083 | |||
1084 | # qhasm: xmm10 = xmm14 | ||
1085 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
1086 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
1087 | movdqa %xmm11,%xmm8 | ||
1088 | |||
1089 | # qhasm: xmm10 ^= xmm13 | ||
1090 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
1091 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
1092 | pxor %xmm15,%xmm8 | ||
1093 | |||
1094 | # qhasm: xmm10 &= xmm8 | ||
1095 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
1096 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
1097 | pand %xmm14,%xmm8 | ||
1098 | |||
1099 | # qhasm: xmm9 ^= xmm10 | ||
1100 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
1101 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
1102 | pxor %xmm8,%xmm12 | ||
1103 | |||
1104 | # qhasm: xmm14 ^= xmm10 | ||
1105 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
1106 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
1107 | pxor %xmm8,%xmm11 | ||
1108 | |||
1109 | # qhasm: xmm14 &= xmm15 | ||
1110 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
1111 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
1112 | pand %xmm13,%xmm11 | ||
1113 | |||
1114 | # qhasm: xmm14 ^= xmm12 | ||
1115 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
1116 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
1117 | pxor %xmm10,%xmm11 | ||
1118 | |||
1119 | # qhasm: xmm12 = xmm6 | ||
1120 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
1121 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
1122 | movdqa %xmm6,%xmm8 | ||
1123 | |||
1124 | # qhasm: xmm8 = xmm5 | ||
1125 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
1126 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
1127 | movdqa %xmm5,%xmm9 | ||
1128 | |||
1129 | # qhasm: xmm10 = xmm15 | ||
1130 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
1131 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
1132 | movdqa %xmm13,%xmm10 | ||
1133 | |||
1134 | # qhasm: xmm10 ^= xmm14 | ||
1135 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
1136 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
1137 | pxor %xmm11,%xmm10 | ||
1138 | |||
1139 | # qhasm: xmm10 &= xmm6 | ||
1140 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
1141 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
1142 | pand %xmm6,%xmm10 | ||
1143 | |||
1144 | # qhasm: xmm6 ^= xmm5 | ||
1145 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
1146 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
1147 | pxor %xmm5,%xmm6 | ||
1148 | |||
1149 | # qhasm: xmm6 &= xmm14 | ||
1150 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
1151 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
1152 | pand %xmm11,%xmm6 | ||
1153 | |||
1154 | # qhasm: xmm5 &= xmm15 | ||
1155 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
1156 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
1157 | pand %xmm13,%xmm5 | ||
1158 | |||
1159 | # qhasm: xmm6 ^= xmm5 | ||
1160 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
1161 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
1162 | pxor %xmm5,%xmm6 | ||
1163 | |||
1164 | # qhasm: xmm5 ^= xmm10 | ||
1165 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
1166 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
1167 | pxor %xmm10,%xmm5 | ||
1168 | |||
1169 | # qhasm: xmm12 ^= xmm0 | ||
1170 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
1171 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
1172 | pxor %xmm0,%xmm8 | ||
1173 | |||
1174 | # qhasm: xmm8 ^= xmm3 | ||
1175 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
1176 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
1177 | pxor %xmm3,%xmm9 | ||
1178 | |||
1179 | # qhasm: xmm15 ^= xmm13 | ||
1180 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
1181 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
1182 | pxor %xmm15,%xmm13 | ||
1183 | |||
1184 | # qhasm: xmm14 ^= xmm9 | ||
1185 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
1186 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
1187 | pxor %xmm12,%xmm11 | ||
1188 | |||
1189 | # qhasm: xmm11 = xmm15 | ||
1190 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
1191 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
1192 | movdqa %xmm13,%xmm10 | ||
1193 | |||
1194 | # qhasm: xmm11 ^= xmm14 | ||
1195 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
1196 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
1197 | pxor %xmm11,%xmm10 | ||
1198 | |||
1199 | # qhasm: xmm11 &= xmm12 | ||
1200 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
1201 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
1202 | pand %xmm8,%xmm10 | ||
1203 | |||
1204 | # qhasm: xmm12 ^= xmm8 | ||
1205 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
1206 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
1207 | pxor %xmm9,%xmm8 | ||
1208 | |||
1209 | # qhasm: xmm12 &= xmm14 | ||
1210 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
1211 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
1212 | pand %xmm11,%xmm8 | ||
1213 | |||
1214 | # qhasm: xmm8 &= xmm15 | ||
1215 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
1216 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
1217 | pand %xmm13,%xmm9 | ||
1218 | |||
1219 | # qhasm: xmm8 ^= xmm12 | ||
1220 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
1221 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
1222 | pxor %xmm8,%xmm9 | ||
1223 | |||
1224 | # qhasm: xmm12 ^= xmm11 | ||
1225 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
1226 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
1227 | pxor %xmm10,%xmm8 | ||
1228 | |||
1229 | # qhasm: xmm10 = xmm13 | ||
1230 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
1231 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
1232 | movdqa %xmm15,%xmm10 | ||
1233 | |||
1234 | # qhasm: xmm10 ^= xmm9 | ||
1235 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
1236 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
1237 | pxor %xmm12,%xmm10 | ||
1238 | |||
1239 | # qhasm: xmm10 &= xmm0 | ||
1240 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
1241 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
1242 | pand %xmm0,%xmm10 | ||
1243 | |||
1244 | # qhasm: xmm0 ^= xmm3 | ||
1245 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
1246 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
1247 | pxor %xmm3,%xmm0 | ||
1248 | |||
1249 | # qhasm: xmm0 &= xmm9 | ||
1250 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
1251 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
1252 | pand %xmm12,%xmm0 | ||
1253 | |||
1254 | # qhasm: xmm3 &= xmm13 | ||
1255 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
1256 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
1257 | pand %xmm15,%xmm3 | ||
1258 | |||
1259 | # qhasm: xmm0 ^= xmm3 | ||
1260 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
1261 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
1262 | pxor %xmm3,%xmm0 | ||
1263 | |||
1264 | # qhasm: xmm3 ^= xmm10 | ||
1265 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
1266 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
1267 | pxor %xmm10,%xmm3 | ||
1268 | |||
1269 | # qhasm: xmm6 ^= xmm12 | ||
1270 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
1271 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
1272 | pxor %xmm8,%xmm6 | ||
1273 | |||
1274 | # qhasm: xmm0 ^= xmm12 | ||
1275 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
1276 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
1277 | pxor %xmm8,%xmm0 | ||
1278 | |||
1279 | # qhasm: xmm5 ^= xmm8 | ||
1280 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
1281 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
1282 | pxor %xmm9,%xmm5 | ||
1283 | |||
1284 | # qhasm: xmm3 ^= xmm8 | ||
1285 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
1286 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
1287 | pxor %xmm9,%xmm3 | ||
1288 | |||
1289 | # qhasm: xmm12 = xmm7 | ||
1290 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
1291 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
1292 | movdqa %xmm7,%xmm8 | ||
1293 | |||
1294 | # qhasm: xmm8 = xmm1 | ||
1295 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
1296 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
1297 | movdqa %xmm1,%xmm9 | ||
1298 | |||
1299 | # qhasm: xmm12 ^= xmm4 | ||
1300 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
1301 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
1302 | pxor %xmm4,%xmm8 | ||
1303 | |||
1304 | # qhasm: xmm8 ^= xmm2 | ||
1305 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
1306 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
1307 | pxor %xmm2,%xmm9 | ||
1308 | |||
1309 | # qhasm: xmm11 = xmm15 | ||
1310 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
1311 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
1312 | movdqa %xmm13,%xmm10 | ||
1313 | |||
1314 | # qhasm: xmm11 ^= xmm14 | ||
1315 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
1316 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
1317 | pxor %xmm11,%xmm10 | ||
1318 | |||
1319 | # qhasm: xmm11 &= xmm12 | ||
1320 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
1321 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
1322 | pand %xmm8,%xmm10 | ||
1323 | |||
1324 | # qhasm: xmm12 ^= xmm8 | ||
1325 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
1326 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
1327 | pxor %xmm9,%xmm8 | ||
1328 | |||
1329 | # qhasm: xmm12 &= xmm14 | ||
1330 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
1331 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
1332 | pand %xmm11,%xmm8 | ||
1333 | |||
1334 | # qhasm: xmm8 &= xmm15 | ||
1335 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
1336 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
1337 | pand %xmm13,%xmm9 | ||
1338 | |||
1339 | # qhasm: xmm8 ^= xmm12 | ||
1340 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
1341 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
1342 | pxor %xmm8,%xmm9 | ||
1343 | |||
1344 | # qhasm: xmm12 ^= xmm11 | ||
1345 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
1346 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
1347 | pxor %xmm10,%xmm8 | ||
1348 | |||
1349 | # qhasm: xmm10 = xmm13 | ||
1350 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
1351 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
1352 | movdqa %xmm15,%xmm10 | ||
1353 | |||
1354 | # qhasm: xmm10 ^= xmm9 | ||
1355 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
1356 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
1357 | pxor %xmm12,%xmm10 | ||
1358 | |||
1359 | # qhasm: xmm10 &= xmm4 | ||
1360 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
1361 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
1362 | pand %xmm4,%xmm10 | ||
1363 | |||
1364 | # qhasm: xmm4 ^= xmm2 | ||
1365 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
1366 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
1367 | pxor %xmm2,%xmm4 | ||
1368 | |||
1369 | # qhasm: xmm4 &= xmm9 | ||
1370 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
1371 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
1372 | pand %xmm12,%xmm4 | ||
1373 | |||
1374 | # qhasm: xmm2 &= xmm13 | ||
1375 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
1376 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
1377 | pand %xmm15,%xmm2 | ||
1378 | |||
1379 | # qhasm: xmm4 ^= xmm2 | ||
1380 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
1381 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
1382 | pxor %xmm2,%xmm4 | ||
1383 | |||
1384 | # qhasm: xmm2 ^= xmm10 | ||
1385 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
1386 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
1387 | pxor %xmm10,%xmm2 | ||
1388 | |||
1389 | # qhasm: xmm15 ^= xmm13 | ||
1390 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
1391 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
1392 | pxor %xmm15,%xmm13 | ||
1393 | |||
1394 | # qhasm: xmm14 ^= xmm9 | ||
1395 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
1396 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
1397 | pxor %xmm12,%xmm11 | ||
1398 | |||
1399 | # qhasm: xmm11 = xmm15 | ||
1400 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
1401 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
1402 | movdqa %xmm13,%xmm10 | ||
1403 | |||
1404 | # qhasm: xmm11 ^= xmm14 | ||
1405 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
1406 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
1407 | pxor %xmm11,%xmm10 | ||
1408 | |||
1409 | # qhasm: xmm11 &= xmm7 | ||
1410 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
1411 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
1412 | pand %xmm7,%xmm10 | ||
1413 | |||
1414 | # qhasm: xmm7 ^= xmm1 | ||
1415 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
1416 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
1417 | pxor %xmm1,%xmm7 | ||
1418 | |||
1419 | # qhasm: xmm7 &= xmm14 | ||
1420 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
1421 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
1422 | pand %xmm11,%xmm7 | ||
1423 | |||
1424 | # qhasm: xmm1 &= xmm15 | ||
1425 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
1426 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
1427 | pand %xmm13,%xmm1 | ||
1428 | |||
1429 | # qhasm: xmm7 ^= xmm1 | ||
1430 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
1431 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
1432 | pxor %xmm1,%xmm7 | ||
1433 | |||
1434 | # qhasm: xmm1 ^= xmm11 | ||
1435 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
1436 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
1437 | pxor %xmm10,%xmm1 | ||
1438 | |||
1439 | # qhasm: xmm7 ^= xmm12 | ||
1440 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
1441 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
1442 | pxor %xmm8,%xmm7 | ||
1443 | |||
1444 | # qhasm: xmm4 ^= xmm12 | ||
1445 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
1446 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
1447 | pxor %xmm8,%xmm4 | ||
1448 | |||
1449 | # qhasm: xmm1 ^= xmm8 | ||
1450 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
1451 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
1452 | pxor %xmm9,%xmm1 | ||
1453 | |||
1454 | # qhasm: xmm2 ^= xmm8 | ||
1455 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
1456 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
1457 | pxor %xmm9,%xmm2 | ||
1458 | |||
1459 | # qhasm: xmm7 ^= xmm0 | ||
1460 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
1461 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
1462 | pxor %xmm0,%xmm7 | ||
1463 | |||
1464 | # qhasm: xmm1 ^= xmm6 | ||
1465 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
1466 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
1467 | pxor %xmm6,%xmm1 | ||
1468 | |||
1469 | # qhasm: xmm4 ^= xmm7 | ||
1470 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
1471 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
1472 | pxor %xmm7,%xmm4 | ||
1473 | |||
1474 | # qhasm: xmm6 ^= xmm0 | ||
1475 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
1476 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
1477 | pxor %xmm0,%xmm6 | ||
1478 | |||
1479 | # qhasm: xmm0 ^= xmm1 | ||
1480 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
1481 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
1482 | pxor %xmm1,%xmm0 | ||
1483 | |||
1484 | # qhasm: xmm1 ^= xmm5 | ||
1485 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
1486 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
1487 | pxor %xmm5,%xmm1 | ||
1488 | |||
1489 | # qhasm: xmm5 ^= xmm2 | ||
1490 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
1491 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
1492 | pxor %xmm2,%xmm5 | ||
1493 | |||
1494 | # qhasm: xmm4 ^= xmm5 | ||
1495 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
1496 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
1497 | pxor %xmm5,%xmm4 | ||
1498 | |||
1499 | # qhasm: xmm2 ^= xmm3 | ||
1500 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
1501 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
1502 | pxor %xmm3,%xmm2 | ||
1503 | |||
1504 | # qhasm: xmm3 ^= xmm5 | ||
1505 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
1506 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
1507 | pxor %xmm5,%xmm3 | ||
1508 | |||
1509 | # qhasm: xmm6 ^= xmm3 | ||
1510 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
1511 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
1512 | pxor %xmm3,%xmm6 | ||
1513 | |||
1514 | # qhasm: xmm0 ^= RCON | ||
1515 | # asm 1: pxor RCON,<xmm0=int6464#1 | ||
1516 | # asm 2: pxor RCON,<xmm0=%xmm0 | ||
1517 | pxor RCON,%xmm0 | ||
1518 | |||
1519 | # qhasm: shuffle bytes of xmm0 by EXPB0 | ||
1520 | # asm 1: pshufb EXPB0,<xmm0=int6464#1 | ||
1521 | # asm 2: pshufb EXPB0,<xmm0=%xmm0 | ||
1522 | pshufb EXPB0,%xmm0 | ||
1523 | |||
1524 | # qhasm: shuffle bytes of xmm1 by EXPB0 | ||
1525 | # asm 1: pshufb EXPB0,<xmm1=int6464#2 | ||
1526 | # asm 2: pshufb EXPB0,<xmm1=%xmm1 | ||
1527 | pshufb EXPB0,%xmm1 | ||
1528 | |||
1529 | # qhasm: shuffle bytes of xmm4 by EXPB0 | ||
1530 | # asm 1: pshufb EXPB0,<xmm4=int6464#5 | ||
1531 | # asm 2: pshufb EXPB0,<xmm4=%xmm4 | ||
1532 | pshufb EXPB0,%xmm4 | ||
1533 | |||
1534 | # qhasm: shuffle bytes of xmm6 by EXPB0 | ||
1535 | # asm 1: pshufb EXPB0,<xmm6=int6464#7 | ||
1536 | # asm 2: pshufb EXPB0,<xmm6=%xmm6 | ||
1537 | pshufb EXPB0,%xmm6 | ||
1538 | |||
1539 | # qhasm: shuffle bytes of xmm3 by EXPB0 | ||
1540 | # asm 1: pshufb EXPB0,<xmm3=int6464#4 | ||
1541 | # asm 2: pshufb EXPB0,<xmm3=%xmm3 | ||
1542 | pshufb EXPB0,%xmm3 | ||
1543 | |||
1544 | # qhasm: shuffle bytes of xmm7 by EXPB0 | ||
1545 | # asm 1: pshufb EXPB0,<xmm7=int6464#8 | ||
1546 | # asm 2: pshufb EXPB0,<xmm7=%xmm7 | ||
1547 | pshufb EXPB0,%xmm7 | ||
1548 | |||
1549 | # qhasm: shuffle bytes of xmm2 by EXPB0 | ||
1550 | # asm 1: pshufb EXPB0,<xmm2=int6464#3 | ||
1551 | # asm 2: pshufb EXPB0,<xmm2=%xmm2 | ||
1552 | pshufb EXPB0,%xmm2 | ||
1553 | |||
1554 | # qhasm: shuffle bytes of xmm5 by EXPB0 | ||
1555 | # asm 1: pshufb EXPB0,<xmm5=int6464#6 | ||
1556 | # asm 2: pshufb EXPB0,<xmm5=%xmm5 | ||
1557 | pshufb EXPB0,%xmm5 | ||
1558 | |||
1559 | # qhasm: xmm8 = *(int128 *)(c + 0) | ||
1560 | # asm 1: movdqa 0(<c=int64#1),>xmm8=int6464#9 | ||
1561 | # asm 2: movdqa 0(<c=%rdi),>xmm8=%xmm8 | ||
1562 | movdqa 0(%rdi),%xmm8 | ||
1563 | |||
1564 | # qhasm: xmm9 = *(int128 *)(c + 16) | ||
1565 | # asm 1: movdqa 16(<c=int64#1),>xmm9=int6464#10 | ||
1566 | # asm 2: movdqa 16(<c=%rdi),>xmm9=%xmm9 | ||
1567 | movdqa 16(%rdi),%xmm9 | ||
1568 | |||
1569 | # qhasm: xmm10 = *(int128 *)(c + 32) | ||
1570 | # asm 1: movdqa 32(<c=int64#1),>xmm10=int6464#11 | ||
1571 | # asm 2: movdqa 32(<c=%rdi),>xmm10=%xmm10 | ||
1572 | movdqa 32(%rdi),%xmm10 | ||
1573 | |||
1574 | # qhasm: xmm11 = *(int128 *)(c + 48) | ||
1575 | # asm 1: movdqa 48(<c=int64#1),>xmm11=int6464#12 | ||
1576 | # asm 2: movdqa 48(<c=%rdi),>xmm11=%xmm11 | ||
1577 | movdqa 48(%rdi),%xmm11 | ||
1578 | |||
1579 | # qhasm: xmm12 = *(int128 *)(c + 64) | ||
1580 | # asm 1: movdqa 64(<c=int64#1),>xmm12=int6464#13 | ||
1581 | # asm 2: movdqa 64(<c=%rdi),>xmm12=%xmm12 | ||
1582 | movdqa 64(%rdi),%xmm12 | ||
1583 | |||
1584 | # qhasm: xmm13 = *(int128 *)(c + 80) | ||
1585 | # asm 1: movdqa 80(<c=int64#1),>xmm13=int6464#14 | ||
1586 | # asm 2: movdqa 80(<c=%rdi),>xmm13=%xmm13 | ||
1587 | movdqa 80(%rdi),%xmm13 | ||
1588 | |||
1589 | # qhasm: xmm14 = *(int128 *)(c + 96) | ||
1590 | # asm 1: movdqa 96(<c=int64#1),>xmm14=int6464#15 | ||
1591 | # asm 2: movdqa 96(<c=%rdi),>xmm14=%xmm14 | ||
1592 | movdqa 96(%rdi),%xmm14 | ||
1593 | |||
1594 | # qhasm: xmm15 = *(int128 *)(c + 112) | ||
1595 | # asm 1: movdqa 112(<c=int64#1),>xmm15=int6464#16 | ||
1596 | # asm 2: movdqa 112(<c=%rdi),>xmm15=%xmm15 | ||
1597 | movdqa 112(%rdi),%xmm15 | ||
1598 | |||
1599 | # qhasm: xmm0 ^= xmm8 | ||
1600 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
1601 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
1602 | pxor %xmm8,%xmm0 | ||
1603 | |||
1604 | # qhasm: xmm1 ^= xmm9 | ||
1605 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
1606 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
1607 | pxor %xmm9,%xmm1 | ||
1608 | |||
1609 | # qhasm: xmm4 ^= xmm10 | ||
1610 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
1611 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
1612 | pxor %xmm10,%xmm4 | ||
1613 | |||
1614 | # qhasm: xmm6 ^= xmm11 | ||
1615 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
1616 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
1617 | pxor %xmm11,%xmm6 | ||
1618 | |||
1619 | # qhasm: xmm3 ^= xmm12 | ||
1620 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
1621 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
1622 | pxor %xmm12,%xmm3 | ||
1623 | |||
1624 | # qhasm: xmm7 ^= xmm13 | ||
1625 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
1626 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
1627 | pxor %xmm13,%xmm7 | ||
1628 | |||
1629 | # qhasm: xmm2 ^= xmm14 | ||
1630 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
1631 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
1632 | pxor %xmm14,%xmm2 | ||
1633 | |||
1634 | # qhasm: xmm5 ^= xmm15 | ||
1635 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
1636 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
1637 | pxor %xmm15,%xmm5 | ||
1638 | |||
1639 | # qhasm: uint32323232 xmm8 >>= 8 | ||
1640 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
1641 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
1642 | psrld $8,%xmm8 | ||
1643 | |||
1644 | # qhasm: uint32323232 xmm9 >>= 8 | ||
1645 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
1646 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
1647 | psrld $8,%xmm9 | ||
1648 | |||
1649 | # qhasm: uint32323232 xmm10 >>= 8 | ||
1650 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
1651 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
1652 | psrld $8,%xmm10 | ||
1653 | |||
1654 | # qhasm: uint32323232 xmm11 >>= 8 | ||
1655 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
1656 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
1657 | psrld $8,%xmm11 | ||
1658 | |||
1659 | # qhasm: uint32323232 xmm12 >>= 8 | ||
1660 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
1661 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
1662 | psrld $8,%xmm12 | ||
1663 | |||
1664 | # qhasm: uint32323232 xmm13 >>= 8 | ||
1665 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
1666 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
1667 | psrld $8,%xmm13 | ||
1668 | |||
1669 | # qhasm: uint32323232 xmm14 >>= 8 | ||
1670 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
1671 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
1672 | psrld $8,%xmm14 | ||
1673 | |||
1674 | # qhasm: uint32323232 xmm15 >>= 8 | ||
1675 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
1676 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
1677 | psrld $8,%xmm15 | ||
1678 | |||
1679 | # qhasm: xmm0 ^= xmm8 | ||
1680 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
1681 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
1682 | pxor %xmm8,%xmm0 | ||
1683 | |||
1684 | # qhasm: xmm1 ^= xmm9 | ||
1685 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
1686 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
1687 | pxor %xmm9,%xmm1 | ||
1688 | |||
1689 | # qhasm: xmm4 ^= xmm10 | ||
1690 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
1691 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
1692 | pxor %xmm10,%xmm4 | ||
1693 | |||
1694 | # qhasm: xmm6 ^= xmm11 | ||
1695 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
1696 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
1697 | pxor %xmm11,%xmm6 | ||
1698 | |||
1699 | # qhasm: xmm3 ^= xmm12 | ||
1700 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
1701 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
1702 | pxor %xmm12,%xmm3 | ||
1703 | |||
1704 | # qhasm: xmm7 ^= xmm13 | ||
1705 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
1706 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
1707 | pxor %xmm13,%xmm7 | ||
1708 | |||
1709 | # qhasm: xmm2 ^= xmm14 | ||
1710 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
1711 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
1712 | pxor %xmm14,%xmm2 | ||
1713 | |||
1714 | # qhasm: xmm5 ^= xmm15 | ||
1715 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
1716 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
1717 | pxor %xmm15,%xmm5 | ||
1718 | |||
1719 | # qhasm: uint32323232 xmm8 >>= 8 | ||
1720 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
1721 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
1722 | psrld $8,%xmm8 | ||
1723 | |||
1724 | # qhasm: uint32323232 xmm9 >>= 8 | ||
1725 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
1726 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
1727 | psrld $8,%xmm9 | ||
1728 | |||
1729 | # qhasm: uint32323232 xmm10 >>= 8 | ||
1730 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
1731 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
1732 | psrld $8,%xmm10 | ||
1733 | |||
1734 | # qhasm: uint32323232 xmm11 >>= 8 | ||
1735 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
1736 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
1737 | psrld $8,%xmm11 | ||
1738 | |||
1739 | # qhasm: uint32323232 xmm12 >>= 8 | ||
1740 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
1741 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
1742 | psrld $8,%xmm12 | ||
1743 | |||
1744 | # qhasm: uint32323232 xmm13 >>= 8 | ||
1745 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
1746 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
1747 | psrld $8,%xmm13 | ||
1748 | |||
1749 | # qhasm: uint32323232 xmm14 >>= 8 | ||
1750 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
1751 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
1752 | psrld $8,%xmm14 | ||
1753 | |||
1754 | # qhasm: uint32323232 xmm15 >>= 8 | ||
1755 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
1756 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
1757 | psrld $8,%xmm15 | ||
1758 | |||
1759 | # qhasm: xmm0 ^= xmm8 | ||
1760 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
1761 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
1762 | pxor %xmm8,%xmm0 | ||
1763 | |||
1764 | # qhasm: xmm1 ^= xmm9 | ||
1765 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
1766 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
1767 | pxor %xmm9,%xmm1 | ||
1768 | |||
1769 | # qhasm: xmm4 ^= xmm10 | ||
1770 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
1771 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
1772 | pxor %xmm10,%xmm4 | ||
1773 | |||
1774 | # qhasm: xmm6 ^= xmm11 | ||
1775 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
1776 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
1777 | pxor %xmm11,%xmm6 | ||
1778 | |||
1779 | # qhasm: xmm3 ^= xmm12 | ||
1780 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
1781 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
1782 | pxor %xmm12,%xmm3 | ||
1783 | |||
1784 | # qhasm: xmm7 ^= xmm13 | ||
1785 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
1786 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
1787 | pxor %xmm13,%xmm7 | ||
1788 | |||
1789 | # qhasm: xmm2 ^= xmm14 | ||
1790 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
1791 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
1792 | pxor %xmm14,%xmm2 | ||
1793 | |||
1794 | # qhasm: xmm5 ^= xmm15 | ||
1795 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
1796 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
1797 | pxor %xmm15,%xmm5 | ||
1798 | |||
1799 | # qhasm: uint32323232 xmm8 >>= 8 | ||
1800 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
1801 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
1802 | psrld $8,%xmm8 | ||
1803 | |||
1804 | # qhasm: uint32323232 xmm9 >>= 8 | ||
1805 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
1806 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
1807 | psrld $8,%xmm9 | ||
1808 | |||
1809 | # qhasm: uint32323232 xmm10 >>= 8 | ||
1810 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
1811 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
1812 | psrld $8,%xmm10 | ||
1813 | |||
1814 | # qhasm: uint32323232 xmm11 >>= 8 | ||
1815 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
1816 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
1817 | psrld $8,%xmm11 | ||
1818 | |||
1819 | # qhasm: uint32323232 xmm12 >>= 8 | ||
1820 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
1821 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
1822 | psrld $8,%xmm12 | ||
1823 | |||
1824 | # qhasm: uint32323232 xmm13 >>= 8 | ||
1825 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
1826 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
1827 | psrld $8,%xmm13 | ||
1828 | |||
1829 | # qhasm: uint32323232 xmm14 >>= 8 | ||
1830 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
1831 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
1832 | psrld $8,%xmm14 | ||
1833 | |||
1834 | # qhasm: uint32323232 xmm15 >>= 8 | ||
1835 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
1836 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
1837 | psrld $8,%xmm15 | ||
1838 | |||
1839 | # qhasm: xmm0 ^= xmm8 | ||
1840 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
1841 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
1842 | pxor %xmm8,%xmm0 | ||
1843 | |||
1844 | # qhasm: xmm1 ^= xmm9 | ||
1845 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
1846 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
1847 | pxor %xmm9,%xmm1 | ||
1848 | |||
1849 | # qhasm: xmm4 ^= xmm10 | ||
1850 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
1851 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
1852 | pxor %xmm10,%xmm4 | ||
1853 | |||
1854 | # qhasm: xmm6 ^= xmm11 | ||
1855 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
1856 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
1857 | pxor %xmm11,%xmm6 | ||
1858 | |||
1859 | # qhasm: xmm3 ^= xmm12 | ||
1860 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
1861 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
1862 | pxor %xmm12,%xmm3 | ||
1863 | |||
1864 | # qhasm: xmm7 ^= xmm13 | ||
1865 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
1866 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
1867 | pxor %xmm13,%xmm7 | ||
1868 | |||
1869 | # qhasm: xmm2 ^= xmm14 | ||
1870 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
1871 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
1872 | pxor %xmm14,%xmm2 | ||
1873 | |||
1874 | # qhasm: xmm5 ^= xmm15 | ||
1875 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
1876 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
1877 | pxor %xmm15,%xmm5 | ||
1878 | |||
1879 | # qhasm: *(int128 *)(c + 128) = xmm0 | ||
1880 | # asm 1: movdqa <xmm0=int6464#1,128(<c=int64#1) | ||
1881 | # asm 2: movdqa <xmm0=%xmm0,128(<c=%rdi) | ||
1882 | movdqa %xmm0,128(%rdi) | ||
1883 | |||
1884 | # qhasm: *(int128 *)(c + 144) = xmm1 | ||
1885 | # asm 1: movdqa <xmm1=int6464#2,144(<c=int64#1) | ||
1886 | # asm 2: movdqa <xmm1=%xmm1,144(<c=%rdi) | ||
1887 | movdqa %xmm1,144(%rdi) | ||
1888 | |||
1889 | # qhasm: *(int128 *)(c + 160) = xmm4 | ||
1890 | # asm 1: movdqa <xmm4=int6464#5,160(<c=int64#1) | ||
1891 | # asm 2: movdqa <xmm4=%xmm4,160(<c=%rdi) | ||
1892 | movdqa %xmm4,160(%rdi) | ||
1893 | |||
1894 | # qhasm: *(int128 *)(c + 176) = xmm6 | ||
1895 | # asm 1: movdqa <xmm6=int6464#7,176(<c=int64#1) | ||
1896 | # asm 2: movdqa <xmm6=%xmm6,176(<c=%rdi) | ||
1897 | movdqa %xmm6,176(%rdi) | ||
1898 | |||
1899 | # qhasm: *(int128 *)(c + 192) = xmm3 | ||
1900 | # asm 1: movdqa <xmm3=int6464#4,192(<c=int64#1) | ||
1901 | # asm 2: movdqa <xmm3=%xmm3,192(<c=%rdi) | ||
1902 | movdqa %xmm3,192(%rdi) | ||
1903 | |||
1904 | # qhasm: *(int128 *)(c + 208) = xmm7 | ||
1905 | # asm 1: movdqa <xmm7=int6464#8,208(<c=int64#1) | ||
1906 | # asm 2: movdqa <xmm7=%xmm7,208(<c=%rdi) | ||
1907 | movdqa %xmm7,208(%rdi) | ||
1908 | |||
1909 | # qhasm: *(int128 *)(c + 224) = xmm2 | ||
1910 | # asm 1: movdqa <xmm2=int6464#3,224(<c=int64#1) | ||
1911 | # asm 2: movdqa <xmm2=%xmm2,224(<c=%rdi) | ||
1912 | movdqa %xmm2,224(%rdi) | ||
1913 | |||
1914 | # qhasm: *(int128 *)(c + 240) = xmm5 | ||
1915 | # asm 1: movdqa <xmm5=int6464#6,240(<c=int64#1) | ||
1916 | # asm 2: movdqa <xmm5=%xmm5,240(<c=%rdi) | ||
1917 | movdqa %xmm5,240(%rdi) | ||
1918 | |||
1919 | # qhasm: xmm0 ^= ONE | ||
1920 | # asm 1: pxor ONE,<xmm0=int6464#1 | ||
1921 | # asm 2: pxor ONE,<xmm0=%xmm0 | ||
1922 | pxor ONE,%xmm0 | ||
1923 | |||
1924 | # qhasm: xmm1 ^= ONE | ||
1925 | # asm 1: pxor ONE,<xmm1=int6464#2 | ||
1926 | # asm 2: pxor ONE,<xmm1=%xmm1 | ||
1927 | pxor ONE,%xmm1 | ||
1928 | |||
1929 | # qhasm: xmm7 ^= ONE | ||
1930 | # asm 1: pxor ONE,<xmm7=int6464#8 | ||
1931 | # asm 2: pxor ONE,<xmm7=%xmm7 | ||
1932 | pxor ONE,%xmm7 | ||
1933 | |||
1934 | # qhasm: xmm2 ^= ONE | ||
1935 | # asm 1: pxor ONE,<xmm2=int6464#3 | ||
1936 | # asm 2: pxor ONE,<xmm2=%xmm2 | ||
1937 | pxor ONE,%xmm2 | ||
1938 | |||
1939 | # qhasm: shuffle bytes of xmm0 by ROTB | ||
1940 | # asm 1: pshufb ROTB,<xmm0=int6464#1 | ||
1941 | # asm 2: pshufb ROTB,<xmm0=%xmm0 | ||
1942 | pshufb ROTB,%xmm0 | ||
1943 | |||
1944 | # qhasm: shuffle bytes of xmm1 by ROTB | ||
1945 | # asm 1: pshufb ROTB,<xmm1=int6464#2 | ||
1946 | # asm 2: pshufb ROTB,<xmm1=%xmm1 | ||
1947 | pshufb ROTB,%xmm1 | ||
1948 | |||
1949 | # qhasm: shuffle bytes of xmm4 by ROTB | ||
1950 | # asm 1: pshufb ROTB,<xmm4=int6464#5 | ||
1951 | # asm 2: pshufb ROTB,<xmm4=%xmm4 | ||
1952 | pshufb ROTB,%xmm4 | ||
1953 | |||
1954 | # qhasm: shuffle bytes of xmm6 by ROTB | ||
1955 | # asm 1: pshufb ROTB,<xmm6=int6464#7 | ||
1956 | # asm 2: pshufb ROTB,<xmm6=%xmm6 | ||
1957 | pshufb ROTB,%xmm6 | ||
1958 | |||
1959 | # qhasm: shuffle bytes of xmm3 by ROTB | ||
1960 | # asm 1: pshufb ROTB,<xmm3=int6464#4 | ||
1961 | # asm 2: pshufb ROTB,<xmm3=%xmm3 | ||
1962 | pshufb ROTB,%xmm3 | ||
1963 | |||
1964 | # qhasm: shuffle bytes of xmm7 by ROTB | ||
1965 | # asm 1: pshufb ROTB,<xmm7=int6464#8 | ||
1966 | # asm 2: pshufb ROTB,<xmm7=%xmm7 | ||
1967 | pshufb ROTB,%xmm7 | ||
1968 | |||
1969 | # qhasm: shuffle bytes of xmm2 by ROTB | ||
1970 | # asm 1: pshufb ROTB,<xmm2=int6464#3 | ||
1971 | # asm 2: pshufb ROTB,<xmm2=%xmm2 | ||
1972 | pshufb ROTB,%xmm2 | ||
1973 | |||
1974 | # qhasm: shuffle bytes of xmm5 by ROTB | ||
1975 | # asm 1: pshufb ROTB,<xmm5=int6464#6 | ||
1976 | # asm 2: pshufb ROTB,<xmm5=%xmm5 | ||
1977 | pshufb ROTB,%xmm5 | ||
1978 | |||
1979 | # qhasm: xmm7 ^= xmm2 | ||
1980 | # asm 1: pxor <xmm2=int6464#3,<xmm7=int6464#8 | ||
1981 | # asm 2: pxor <xmm2=%xmm2,<xmm7=%xmm7 | ||
1982 | pxor %xmm2,%xmm7 | ||
1983 | |||
1984 | # qhasm: xmm4 ^= xmm1 | ||
1985 | # asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5 | ||
1986 | # asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4 | ||
1987 | pxor %xmm1,%xmm4 | ||
1988 | |||
1989 | # qhasm: xmm7 ^= xmm0 | ||
1990 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
1991 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
1992 | pxor %xmm0,%xmm7 | ||
1993 | |||
1994 | # qhasm: xmm2 ^= xmm4 | ||
1995 | # asm 1: pxor <xmm4=int6464#5,<xmm2=int6464#3 | ||
1996 | # asm 2: pxor <xmm4=%xmm4,<xmm2=%xmm2 | ||
1997 | pxor %xmm4,%xmm2 | ||
1998 | |||
1999 | # qhasm: xmm6 ^= xmm0 | ||
2000 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
2001 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
2002 | pxor %xmm0,%xmm6 | ||
2003 | |||
2004 | # qhasm: xmm2 ^= xmm6 | ||
2005 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
2006 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
2007 | pxor %xmm6,%xmm2 | ||
2008 | |||
2009 | # qhasm: xmm6 ^= xmm5 | ||
2010 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
2011 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
2012 | pxor %xmm5,%xmm6 | ||
2013 | |||
2014 | # qhasm: xmm6 ^= xmm3 | ||
2015 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
2016 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
2017 | pxor %xmm3,%xmm6 | ||
2018 | |||
2019 | # qhasm: xmm5 ^= xmm7 | ||
2020 | # asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6 | ||
2021 | # asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5 | ||
2022 | pxor %xmm7,%xmm5 | ||
2023 | |||
2024 | # qhasm: xmm6 ^= xmm1 | ||
2025 | # asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7 | ||
2026 | # asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6 | ||
2027 | pxor %xmm1,%xmm6 | ||
2028 | |||
2029 | # qhasm: xmm3 ^= xmm7 | ||
2030 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
2031 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
2032 | pxor %xmm7,%xmm3 | ||
2033 | |||
2034 | # qhasm: xmm4 ^= xmm5 | ||
2035 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
2036 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
2037 | pxor %xmm5,%xmm4 | ||
2038 | |||
2039 | # qhasm: xmm1 ^= xmm7 | ||
2040 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2 | ||
2041 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1 | ||
2042 | pxor %xmm7,%xmm1 | ||
2043 | |||
2044 | # qhasm: xmm11 = xmm5 | ||
2045 | # asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9 | ||
2046 | # asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8 | ||
2047 | movdqa %xmm5,%xmm8 | ||
2048 | |||
2049 | # qhasm: xmm10 = xmm1 | ||
2050 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
2051 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
2052 | movdqa %xmm1,%xmm9 | ||
2053 | |||
2054 | # qhasm: xmm9 = xmm7 | ||
2055 | # asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11 | ||
2056 | # asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10 | ||
2057 | movdqa %xmm7,%xmm10 | ||
2058 | |||
2059 | # qhasm: xmm13 = xmm4 | ||
2060 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
2061 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
2062 | movdqa %xmm4,%xmm11 | ||
2063 | |||
2064 | # qhasm: xmm12 = xmm2 | ||
2065 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#13 | ||
2066 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm12 | ||
2067 | movdqa %xmm2,%xmm12 | ||
2068 | |||
2069 | # qhasm: xmm11 ^= xmm3 | ||
2070 | # asm 1: pxor <xmm3=int6464#4,<xmm11=int6464#9 | ||
2071 | # asm 2: pxor <xmm3=%xmm3,<xmm11=%xmm8 | ||
2072 | pxor %xmm3,%xmm8 | ||
2073 | |||
2074 | # qhasm: xmm10 ^= xmm4 | ||
2075 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#10 | ||
2076 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm9 | ||
2077 | pxor %xmm4,%xmm9 | ||
2078 | |||
2079 | # qhasm: xmm9 ^= xmm6 | ||
2080 | # asm 1: pxor <xmm6=int6464#7,<xmm9=int6464#11 | ||
2081 | # asm 2: pxor <xmm6=%xmm6,<xmm9=%xmm10 | ||
2082 | pxor %xmm6,%xmm10 | ||
2083 | |||
2084 | # qhasm: xmm13 ^= xmm3 | ||
2085 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#12 | ||
2086 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm11 | ||
2087 | pxor %xmm3,%xmm11 | ||
2088 | |||
2089 | # qhasm: xmm12 ^= xmm0 | ||
2090 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
2091 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
2092 | pxor %xmm0,%xmm12 | ||
2093 | |||
2094 | # qhasm: xmm14 = xmm11 | ||
2095 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
2096 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
2097 | movdqa %xmm8,%xmm13 | ||
2098 | |||
2099 | # qhasm: xmm8 = xmm10 | ||
2100 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
2101 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
2102 | movdqa %xmm9,%xmm14 | ||
2103 | |||
2104 | # qhasm: xmm15 = xmm11 | ||
2105 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
2106 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
2107 | movdqa %xmm8,%xmm15 | ||
2108 | |||
2109 | # qhasm: xmm10 |= xmm9 | ||
2110 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
2111 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
2112 | por %xmm10,%xmm9 | ||
2113 | |||
2114 | # qhasm: xmm11 |= xmm12 | ||
2115 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
2116 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
2117 | por %xmm12,%xmm8 | ||
2118 | |||
2119 | # qhasm: xmm15 ^= xmm8 | ||
2120 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
2121 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
2122 | pxor %xmm14,%xmm15 | ||
2123 | |||
2124 | # qhasm: xmm14 &= xmm12 | ||
2125 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
2126 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
2127 | pand %xmm12,%xmm13 | ||
2128 | |||
2129 | # qhasm: xmm8 &= xmm9 | ||
2130 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
2131 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
2132 | pand %xmm10,%xmm14 | ||
2133 | |||
2134 | # qhasm: xmm12 ^= xmm9 | ||
2135 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
2136 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
2137 | pxor %xmm10,%xmm12 | ||
2138 | |||
2139 | # qhasm: xmm15 &= xmm12 | ||
2140 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
2141 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
2142 | pand %xmm12,%xmm15 | ||
2143 | |||
2144 | # qhasm: xmm12 = xmm6 | ||
2145 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11 | ||
2146 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10 | ||
2147 | movdqa %xmm6,%xmm10 | ||
2148 | |||
2149 | # qhasm: xmm12 ^= xmm0 | ||
2150 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
2151 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
2152 | pxor %xmm0,%xmm10 | ||
2153 | |||
2154 | # qhasm: xmm13 &= xmm12 | ||
2155 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
2156 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
2157 | pand %xmm10,%xmm11 | ||
2158 | |||
2159 | # qhasm: xmm11 ^= xmm13 | ||
2160 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
2161 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
2162 | pxor %xmm11,%xmm8 | ||
2163 | |||
2164 | # qhasm: xmm10 ^= xmm13 | ||
2165 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
2166 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
2167 | pxor %xmm11,%xmm9 | ||
2168 | |||
2169 | # qhasm: xmm13 = xmm5 | ||
2170 | # asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11 | ||
2171 | # asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10 | ||
2172 | movdqa %xmm5,%xmm10 | ||
2173 | |||
2174 | # qhasm: xmm13 ^= xmm1 | ||
2175 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
2176 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
2177 | pxor %xmm1,%xmm10 | ||
2178 | |||
2179 | # qhasm: xmm12 = xmm7 | ||
2180 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12 | ||
2181 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11 | ||
2182 | movdqa %xmm7,%xmm11 | ||
2183 | |||
2184 | # qhasm: xmm9 = xmm13 | ||
2185 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
2186 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
2187 | movdqa %xmm10,%xmm12 | ||
2188 | |||
2189 | # qhasm: xmm12 ^= xmm2 | ||
2190 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#12 | ||
2191 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm11 | ||
2192 | pxor %xmm2,%xmm11 | ||
2193 | |||
2194 | # qhasm: xmm9 |= xmm12 | ||
2195 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
2196 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
2197 | por %xmm11,%xmm12 | ||
2198 | |||
2199 | # qhasm: xmm13 &= xmm12 | ||
2200 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
2201 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
2202 | pand %xmm11,%xmm10 | ||
2203 | |||
2204 | # qhasm: xmm8 ^= xmm13 | ||
2205 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
2206 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
2207 | pxor %xmm10,%xmm14 | ||
2208 | |||
2209 | # qhasm: xmm11 ^= xmm15 | ||
2210 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
2211 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
2212 | pxor %xmm15,%xmm8 | ||
2213 | |||
2214 | # qhasm: xmm10 ^= xmm14 | ||
2215 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
2216 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
2217 | pxor %xmm13,%xmm9 | ||
2218 | |||
2219 | # qhasm: xmm9 ^= xmm15 | ||
2220 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
2221 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
2222 | pxor %xmm15,%xmm12 | ||
2223 | |||
2224 | # qhasm: xmm8 ^= xmm14 | ||
2225 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
2226 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
2227 | pxor %xmm13,%xmm14 | ||
2228 | |||
2229 | # qhasm: xmm9 ^= xmm14 | ||
2230 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
2231 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
2232 | pxor %xmm13,%xmm12 | ||
2233 | |||
2234 | # qhasm: xmm12 = xmm4 | ||
2235 | # asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11 | ||
2236 | # asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10 | ||
2237 | movdqa %xmm4,%xmm10 | ||
2238 | |||
2239 | # qhasm: xmm13 = xmm3 | ||
2240 | # asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12 | ||
2241 | # asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11 | ||
2242 | movdqa %xmm3,%xmm11 | ||
2243 | |||
2244 | # qhasm: xmm14 = xmm1 | ||
2245 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
2246 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
2247 | movdqa %xmm1,%xmm13 | ||
2248 | |||
2249 | # qhasm: xmm15 = xmm5 | ||
2250 | # asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16 | ||
2251 | # asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15 | ||
2252 | movdqa %xmm5,%xmm15 | ||
2253 | |||
2254 | # qhasm: xmm12 &= xmm6 | ||
2255 | # asm 1: pand <xmm6=int6464#7,<xmm12=int6464#11 | ||
2256 | # asm 2: pand <xmm6=%xmm6,<xmm12=%xmm10 | ||
2257 | pand %xmm6,%xmm10 | ||
2258 | |||
2259 | # qhasm: xmm13 &= xmm0 | ||
2260 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
2261 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
2262 | pand %xmm0,%xmm11 | ||
2263 | |||
2264 | # qhasm: xmm14 &= xmm7 | ||
2265 | # asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14 | ||
2266 | # asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13 | ||
2267 | pand %xmm7,%xmm13 | ||
2268 | |||
2269 | # qhasm: xmm15 |= xmm2 | ||
2270 | # asm 1: por <xmm2=int6464#3,<xmm15=int6464#16 | ||
2271 | # asm 2: por <xmm2=%xmm2,<xmm15=%xmm15 | ||
2272 | por %xmm2,%xmm15 | ||
2273 | |||
2274 | # qhasm: xmm11 ^= xmm12 | ||
2275 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
2276 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
2277 | pxor %xmm10,%xmm8 | ||
2278 | |||
2279 | # qhasm: xmm10 ^= xmm13 | ||
2280 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
2281 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
2282 | pxor %xmm11,%xmm9 | ||
2283 | |||
2284 | # qhasm: xmm9 ^= xmm14 | ||
2285 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
2286 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
2287 | pxor %xmm13,%xmm12 | ||
2288 | |||
2289 | # qhasm: xmm8 ^= xmm15 | ||
2290 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
2291 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
2292 | pxor %xmm15,%xmm14 | ||
2293 | |||
2294 | # qhasm: xmm12 = xmm11 | ||
2295 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
2296 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
2297 | movdqa %xmm8,%xmm10 | ||
2298 | |||
2299 | # qhasm: xmm12 ^= xmm10 | ||
2300 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
2301 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
2302 | pxor %xmm9,%xmm10 | ||
2303 | |||
2304 | # qhasm: xmm11 &= xmm9 | ||
2305 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
2306 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
2307 | pand %xmm12,%xmm8 | ||
2308 | |||
2309 | # qhasm: xmm14 = xmm8 | ||
2310 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
2311 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
2312 | movdqa %xmm14,%xmm11 | ||
2313 | |||
2314 | # qhasm: xmm14 ^= xmm11 | ||
2315 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
2316 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
2317 | pxor %xmm8,%xmm11 | ||
2318 | |||
2319 | # qhasm: xmm15 = xmm12 | ||
2320 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
2321 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
2322 | movdqa %xmm10,%xmm13 | ||
2323 | |||
2324 | # qhasm: xmm15 &= xmm14 | ||
2325 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
2326 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
2327 | pand %xmm11,%xmm13 | ||
2328 | |||
2329 | # qhasm: xmm15 ^= xmm10 | ||
2330 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
2331 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
2332 | pxor %xmm9,%xmm13 | ||
2333 | |||
2334 | # qhasm: xmm13 = xmm9 | ||
2335 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
2336 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
2337 | movdqa %xmm12,%xmm15 | ||
2338 | |||
2339 | # qhasm: xmm13 ^= xmm8 | ||
2340 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
2341 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
2342 | pxor %xmm14,%xmm15 | ||
2343 | |||
2344 | # qhasm: xmm11 ^= xmm10 | ||
2345 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
2346 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
2347 | pxor %xmm9,%xmm8 | ||
2348 | |||
2349 | # qhasm: xmm13 &= xmm11 | ||
2350 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
2351 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
2352 | pand %xmm8,%xmm15 | ||
2353 | |||
2354 | # qhasm: xmm13 ^= xmm8 | ||
2355 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
2356 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
2357 | pxor %xmm14,%xmm15 | ||
2358 | |||
2359 | # qhasm: xmm9 ^= xmm13 | ||
2360 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
2361 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
2362 | pxor %xmm15,%xmm12 | ||
2363 | |||
2364 | # qhasm: xmm10 = xmm14 | ||
2365 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
2366 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
2367 | movdqa %xmm11,%xmm8 | ||
2368 | |||
2369 | # qhasm: xmm10 ^= xmm13 | ||
2370 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
2371 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
2372 | pxor %xmm15,%xmm8 | ||
2373 | |||
2374 | # qhasm: xmm10 &= xmm8 | ||
2375 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
2376 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
2377 | pand %xmm14,%xmm8 | ||
2378 | |||
2379 | # qhasm: xmm9 ^= xmm10 | ||
2380 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
2381 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
2382 | pxor %xmm8,%xmm12 | ||
2383 | |||
2384 | # qhasm: xmm14 ^= xmm10 | ||
2385 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
2386 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
2387 | pxor %xmm8,%xmm11 | ||
2388 | |||
2389 | # qhasm: xmm14 &= xmm15 | ||
2390 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
2391 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
2392 | pand %xmm13,%xmm11 | ||
2393 | |||
2394 | # qhasm: xmm14 ^= xmm12 | ||
2395 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
2396 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
2397 | pxor %xmm10,%xmm11 | ||
2398 | |||
2399 | # qhasm: xmm12 = xmm2 | ||
2400 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#9 | ||
2401 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm8 | ||
2402 | movdqa %xmm2,%xmm8 | ||
2403 | |||
2404 | # qhasm: xmm8 = xmm7 | ||
2405 | # asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10 | ||
2406 | # asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9 | ||
2407 | movdqa %xmm7,%xmm9 | ||
2408 | |||
2409 | # qhasm: xmm10 = xmm15 | ||
2410 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
2411 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
2412 | movdqa %xmm13,%xmm10 | ||
2413 | |||
2414 | # qhasm: xmm10 ^= xmm14 | ||
2415 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
2416 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
2417 | pxor %xmm11,%xmm10 | ||
2418 | |||
2419 | # qhasm: xmm10 &= xmm2 | ||
2420 | # asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11 | ||
2421 | # asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10 | ||
2422 | pand %xmm2,%xmm10 | ||
2423 | |||
2424 | # qhasm: xmm2 ^= xmm7 | ||
2425 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
2426 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
2427 | pxor %xmm7,%xmm2 | ||
2428 | |||
2429 | # qhasm: xmm2 &= xmm14 | ||
2430 | # asm 1: pand <xmm14=int6464#12,<xmm2=int6464#3 | ||
2431 | # asm 2: pand <xmm14=%xmm11,<xmm2=%xmm2 | ||
2432 | pand %xmm11,%xmm2 | ||
2433 | |||
2434 | # qhasm: xmm7 &= xmm15 | ||
2435 | # asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8 | ||
2436 | # asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7 | ||
2437 | pand %xmm13,%xmm7 | ||
2438 | |||
2439 | # qhasm: xmm2 ^= xmm7 | ||
2440 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
2441 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
2442 | pxor %xmm7,%xmm2 | ||
2443 | |||
2444 | # qhasm: xmm7 ^= xmm10 | ||
2445 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
2446 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
2447 | pxor %xmm10,%xmm7 | ||
2448 | |||
2449 | # qhasm: xmm12 ^= xmm0 | ||
2450 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
2451 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
2452 | pxor %xmm0,%xmm8 | ||
2453 | |||
2454 | # qhasm: xmm8 ^= xmm6 | ||
2455 | # asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10 | ||
2456 | # asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9 | ||
2457 | pxor %xmm6,%xmm9 | ||
2458 | |||
2459 | # qhasm: xmm15 ^= xmm13 | ||
2460 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
2461 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
2462 | pxor %xmm15,%xmm13 | ||
2463 | |||
2464 | # qhasm: xmm14 ^= xmm9 | ||
2465 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
2466 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
2467 | pxor %xmm12,%xmm11 | ||
2468 | |||
2469 | # qhasm: xmm11 = xmm15 | ||
2470 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
2471 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
2472 | movdqa %xmm13,%xmm10 | ||
2473 | |||
2474 | # qhasm: xmm11 ^= xmm14 | ||
2475 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
2476 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
2477 | pxor %xmm11,%xmm10 | ||
2478 | |||
2479 | # qhasm: xmm11 &= xmm12 | ||
2480 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
2481 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
2482 | pand %xmm8,%xmm10 | ||
2483 | |||
2484 | # qhasm: xmm12 ^= xmm8 | ||
2485 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
2486 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
2487 | pxor %xmm9,%xmm8 | ||
2488 | |||
2489 | # qhasm: xmm12 &= xmm14 | ||
2490 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
2491 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
2492 | pand %xmm11,%xmm8 | ||
2493 | |||
2494 | # qhasm: xmm8 &= xmm15 | ||
2495 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
2496 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
2497 | pand %xmm13,%xmm9 | ||
2498 | |||
2499 | # qhasm: xmm8 ^= xmm12 | ||
2500 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
2501 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
2502 | pxor %xmm8,%xmm9 | ||
2503 | |||
2504 | # qhasm: xmm12 ^= xmm11 | ||
2505 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
2506 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
2507 | pxor %xmm10,%xmm8 | ||
2508 | |||
2509 | # qhasm: xmm10 = xmm13 | ||
2510 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
2511 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
2512 | movdqa %xmm15,%xmm10 | ||
2513 | |||
2514 | # qhasm: xmm10 ^= xmm9 | ||
2515 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
2516 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
2517 | pxor %xmm12,%xmm10 | ||
2518 | |||
2519 | # qhasm: xmm10 &= xmm0 | ||
2520 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
2521 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
2522 | pand %xmm0,%xmm10 | ||
2523 | |||
2524 | # qhasm: xmm0 ^= xmm6 | ||
2525 | # asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1 | ||
2526 | # asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0 | ||
2527 | pxor %xmm6,%xmm0 | ||
2528 | |||
2529 | # qhasm: xmm0 &= xmm9 | ||
2530 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
2531 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
2532 | pand %xmm12,%xmm0 | ||
2533 | |||
2534 | # qhasm: xmm6 &= xmm13 | ||
2535 | # asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7 | ||
2536 | # asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6 | ||
2537 | pand %xmm15,%xmm6 | ||
2538 | |||
2539 | # qhasm: xmm0 ^= xmm6 | ||
2540 | # asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1 | ||
2541 | # asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0 | ||
2542 | pxor %xmm6,%xmm0 | ||
2543 | |||
2544 | # qhasm: xmm6 ^= xmm10 | ||
2545 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
2546 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
2547 | pxor %xmm10,%xmm6 | ||
2548 | |||
2549 | # qhasm: xmm2 ^= xmm12 | ||
2550 | # asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3 | ||
2551 | # asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2 | ||
2552 | pxor %xmm8,%xmm2 | ||
2553 | |||
2554 | # qhasm: xmm0 ^= xmm12 | ||
2555 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
2556 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
2557 | pxor %xmm8,%xmm0 | ||
2558 | |||
2559 | # qhasm: xmm7 ^= xmm8 | ||
2560 | # asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8 | ||
2561 | # asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7 | ||
2562 | pxor %xmm9,%xmm7 | ||
2563 | |||
2564 | # qhasm: xmm6 ^= xmm8 | ||
2565 | # asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7 | ||
2566 | # asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6 | ||
2567 | pxor %xmm9,%xmm6 | ||
2568 | |||
2569 | # qhasm: xmm12 = xmm5 | ||
2570 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9 | ||
2571 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8 | ||
2572 | movdqa %xmm5,%xmm8 | ||
2573 | |||
2574 | # qhasm: xmm8 = xmm1 | ||
2575 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
2576 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
2577 | movdqa %xmm1,%xmm9 | ||
2578 | |||
2579 | # qhasm: xmm12 ^= xmm3 | ||
2580 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#9 | ||
2581 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm8 | ||
2582 | pxor %xmm3,%xmm8 | ||
2583 | |||
2584 | # qhasm: xmm8 ^= xmm4 | ||
2585 | # asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10 | ||
2586 | # asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9 | ||
2587 | pxor %xmm4,%xmm9 | ||
2588 | |||
2589 | # qhasm: xmm11 = xmm15 | ||
2590 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
2591 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
2592 | movdqa %xmm13,%xmm10 | ||
2593 | |||
2594 | # qhasm: xmm11 ^= xmm14 | ||
2595 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
2596 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
2597 | pxor %xmm11,%xmm10 | ||
2598 | |||
2599 | # qhasm: xmm11 &= xmm12 | ||
2600 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
2601 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
2602 | pand %xmm8,%xmm10 | ||
2603 | |||
2604 | # qhasm: xmm12 ^= xmm8 | ||
2605 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
2606 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
2607 | pxor %xmm9,%xmm8 | ||
2608 | |||
2609 | # qhasm: xmm12 &= xmm14 | ||
2610 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
2611 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
2612 | pand %xmm11,%xmm8 | ||
2613 | |||
2614 | # qhasm: xmm8 &= xmm15 | ||
2615 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
2616 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
2617 | pand %xmm13,%xmm9 | ||
2618 | |||
2619 | # qhasm: xmm8 ^= xmm12 | ||
2620 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
2621 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
2622 | pxor %xmm8,%xmm9 | ||
2623 | |||
2624 | # qhasm: xmm12 ^= xmm11 | ||
2625 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
2626 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
2627 | pxor %xmm10,%xmm8 | ||
2628 | |||
2629 | # qhasm: xmm10 = xmm13 | ||
2630 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
2631 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
2632 | movdqa %xmm15,%xmm10 | ||
2633 | |||
2634 | # qhasm: xmm10 ^= xmm9 | ||
2635 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
2636 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
2637 | pxor %xmm12,%xmm10 | ||
2638 | |||
2639 | # qhasm: xmm10 &= xmm3 | ||
2640 | # asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11 | ||
2641 | # asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10 | ||
2642 | pand %xmm3,%xmm10 | ||
2643 | |||
2644 | # qhasm: xmm3 ^= xmm4 | ||
2645 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
2646 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
2647 | pxor %xmm4,%xmm3 | ||
2648 | |||
2649 | # qhasm: xmm3 &= xmm9 | ||
2650 | # asm 1: pand <xmm9=int6464#13,<xmm3=int6464#4 | ||
2651 | # asm 2: pand <xmm9=%xmm12,<xmm3=%xmm3 | ||
2652 | pand %xmm12,%xmm3 | ||
2653 | |||
2654 | # qhasm: xmm4 &= xmm13 | ||
2655 | # asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5 | ||
2656 | # asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4 | ||
2657 | pand %xmm15,%xmm4 | ||
2658 | |||
2659 | # qhasm: xmm3 ^= xmm4 | ||
2660 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
2661 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
2662 | pxor %xmm4,%xmm3 | ||
2663 | |||
2664 | # qhasm: xmm4 ^= xmm10 | ||
2665 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
2666 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
2667 | pxor %xmm10,%xmm4 | ||
2668 | |||
2669 | # qhasm: xmm15 ^= xmm13 | ||
2670 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
2671 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
2672 | pxor %xmm15,%xmm13 | ||
2673 | |||
2674 | # qhasm: xmm14 ^= xmm9 | ||
2675 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
2676 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
2677 | pxor %xmm12,%xmm11 | ||
2678 | |||
2679 | # qhasm: xmm11 = xmm15 | ||
2680 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
2681 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
2682 | movdqa %xmm13,%xmm10 | ||
2683 | |||
2684 | # qhasm: xmm11 ^= xmm14 | ||
2685 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
2686 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
2687 | pxor %xmm11,%xmm10 | ||
2688 | |||
2689 | # qhasm: xmm11 &= xmm5 | ||
2690 | # asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11 | ||
2691 | # asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10 | ||
2692 | pand %xmm5,%xmm10 | ||
2693 | |||
2694 | # qhasm: xmm5 ^= xmm1 | ||
2695 | # asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6 | ||
2696 | # asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5 | ||
2697 | pxor %xmm1,%xmm5 | ||
2698 | |||
2699 | # qhasm: xmm5 &= xmm14 | ||
2700 | # asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6 | ||
2701 | # asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5 | ||
2702 | pand %xmm11,%xmm5 | ||
2703 | |||
2704 | # qhasm: xmm1 &= xmm15 | ||
2705 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
2706 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
2707 | pand %xmm13,%xmm1 | ||
2708 | |||
2709 | # qhasm: xmm5 ^= xmm1 | ||
2710 | # asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6 | ||
2711 | # asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5 | ||
2712 | pxor %xmm1,%xmm5 | ||
2713 | |||
2714 | # qhasm: xmm1 ^= xmm11 | ||
2715 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
2716 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
2717 | pxor %xmm10,%xmm1 | ||
2718 | |||
2719 | # qhasm: xmm5 ^= xmm12 | ||
2720 | # asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6 | ||
2721 | # asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5 | ||
2722 | pxor %xmm8,%xmm5 | ||
2723 | |||
2724 | # qhasm: xmm3 ^= xmm12 | ||
2725 | # asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4 | ||
2726 | # asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3 | ||
2727 | pxor %xmm8,%xmm3 | ||
2728 | |||
2729 | # qhasm: xmm1 ^= xmm8 | ||
2730 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
2731 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
2732 | pxor %xmm9,%xmm1 | ||
2733 | |||
2734 | # qhasm: xmm4 ^= xmm8 | ||
2735 | # asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5 | ||
2736 | # asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4 | ||
2737 | pxor %xmm9,%xmm4 | ||
2738 | |||
2739 | # qhasm: xmm5 ^= xmm0 | ||
2740 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
2741 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
2742 | pxor %xmm0,%xmm5 | ||
2743 | |||
2744 | # qhasm: xmm1 ^= xmm2 | ||
2745 | # asm 1: pxor <xmm2=int6464#3,<xmm1=int6464#2 | ||
2746 | # asm 2: pxor <xmm2=%xmm2,<xmm1=%xmm1 | ||
2747 | pxor %xmm2,%xmm1 | ||
2748 | |||
2749 | # qhasm: xmm3 ^= xmm5 | ||
2750 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
2751 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
2752 | pxor %xmm5,%xmm3 | ||
2753 | |||
2754 | # qhasm: xmm2 ^= xmm0 | ||
2755 | # asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3 | ||
2756 | # asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2 | ||
2757 | pxor %xmm0,%xmm2 | ||
2758 | |||
2759 | # qhasm: xmm0 ^= xmm1 | ||
2760 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
2761 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
2762 | pxor %xmm1,%xmm0 | ||
2763 | |||
2764 | # qhasm: xmm1 ^= xmm7 | ||
2765 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2 | ||
2766 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1 | ||
2767 | pxor %xmm7,%xmm1 | ||
2768 | |||
2769 | # qhasm: xmm7 ^= xmm4 | ||
2770 | # asm 1: pxor <xmm4=int6464#5,<xmm7=int6464#8 | ||
2771 | # asm 2: pxor <xmm4=%xmm4,<xmm7=%xmm7 | ||
2772 | pxor %xmm4,%xmm7 | ||
2773 | |||
2774 | # qhasm: xmm3 ^= xmm7 | ||
2775 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
2776 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
2777 | pxor %xmm7,%xmm3 | ||
2778 | |||
2779 | # qhasm: xmm4 ^= xmm6 | ||
2780 | # asm 1: pxor <xmm6=int6464#7,<xmm4=int6464#5 | ||
2781 | # asm 2: pxor <xmm6=%xmm6,<xmm4=%xmm4 | ||
2782 | pxor %xmm6,%xmm4 | ||
2783 | |||
2784 | # qhasm: xmm6 ^= xmm7 | ||
2785 | # asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7 | ||
2786 | # asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6 | ||
2787 | pxor %xmm7,%xmm6 | ||
2788 | |||
2789 | # qhasm: xmm2 ^= xmm6 | ||
2790 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
2791 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
2792 | pxor %xmm6,%xmm2 | ||
2793 | |||
2794 | # qhasm: xmm1 ^= RCON | ||
2795 | # asm 1: pxor RCON,<xmm1=int6464#2 | ||
2796 | # asm 2: pxor RCON,<xmm1=%xmm1 | ||
2797 | pxor RCON,%xmm1 | ||
2798 | |||
2799 | # qhasm: shuffle bytes of xmm0 by EXPB0 | ||
2800 | # asm 1: pshufb EXPB0,<xmm0=int6464#1 | ||
2801 | # asm 2: pshufb EXPB0,<xmm0=%xmm0 | ||
2802 | pshufb EXPB0,%xmm0 | ||
2803 | |||
2804 | # qhasm: shuffle bytes of xmm1 by EXPB0 | ||
2805 | # asm 1: pshufb EXPB0,<xmm1=int6464#2 | ||
2806 | # asm 2: pshufb EXPB0,<xmm1=%xmm1 | ||
2807 | pshufb EXPB0,%xmm1 | ||
2808 | |||
2809 | # qhasm: shuffle bytes of xmm3 by EXPB0 | ||
2810 | # asm 1: pshufb EXPB0,<xmm3=int6464#4 | ||
2811 | # asm 2: pshufb EXPB0,<xmm3=%xmm3 | ||
2812 | pshufb EXPB0,%xmm3 | ||
2813 | |||
2814 | # qhasm: shuffle bytes of xmm2 by EXPB0 | ||
2815 | # asm 1: pshufb EXPB0,<xmm2=int6464#3 | ||
2816 | # asm 2: pshufb EXPB0,<xmm2=%xmm2 | ||
2817 | pshufb EXPB0,%xmm2 | ||
2818 | |||
2819 | # qhasm: shuffle bytes of xmm6 by EXPB0 | ||
2820 | # asm 1: pshufb EXPB0,<xmm6=int6464#7 | ||
2821 | # asm 2: pshufb EXPB0,<xmm6=%xmm6 | ||
2822 | pshufb EXPB0,%xmm6 | ||
2823 | |||
2824 | # qhasm: shuffle bytes of xmm5 by EXPB0 | ||
2825 | # asm 1: pshufb EXPB0,<xmm5=int6464#6 | ||
2826 | # asm 2: pshufb EXPB0,<xmm5=%xmm5 | ||
2827 | pshufb EXPB0,%xmm5 | ||
2828 | |||
2829 | # qhasm: shuffle bytes of xmm4 by EXPB0 | ||
2830 | # asm 1: pshufb EXPB0,<xmm4=int6464#5 | ||
2831 | # asm 2: pshufb EXPB0,<xmm4=%xmm4 | ||
2832 | pshufb EXPB0,%xmm4 | ||
2833 | |||
2834 | # qhasm: shuffle bytes of xmm7 by EXPB0 | ||
2835 | # asm 1: pshufb EXPB0,<xmm7=int6464#8 | ||
2836 | # asm 2: pshufb EXPB0,<xmm7=%xmm7 | ||
2837 | pshufb EXPB0,%xmm7 | ||
2838 | |||
2839 | # qhasm: xmm8 = *(int128 *)(c + 128) | ||
2840 | # asm 1: movdqa 128(<c=int64#1),>xmm8=int6464#9 | ||
2841 | # asm 2: movdqa 128(<c=%rdi),>xmm8=%xmm8 | ||
2842 | movdqa 128(%rdi),%xmm8 | ||
2843 | |||
2844 | # qhasm: xmm9 = *(int128 *)(c + 144) | ||
2845 | # asm 1: movdqa 144(<c=int64#1),>xmm9=int6464#10 | ||
2846 | # asm 2: movdqa 144(<c=%rdi),>xmm9=%xmm9 | ||
2847 | movdqa 144(%rdi),%xmm9 | ||
2848 | |||
2849 | # qhasm: xmm10 = *(int128 *)(c + 160) | ||
2850 | # asm 1: movdqa 160(<c=int64#1),>xmm10=int6464#11 | ||
2851 | # asm 2: movdqa 160(<c=%rdi),>xmm10=%xmm10 | ||
2852 | movdqa 160(%rdi),%xmm10 | ||
2853 | |||
2854 | # qhasm: xmm11 = *(int128 *)(c + 176) | ||
2855 | # asm 1: movdqa 176(<c=int64#1),>xmm11=int6464#12 | ||
2856 | # asm 2: movdqa 176(<c=%rdi),>xmm11=%xmm11 | ||
2857 | movdqa 176(%rdi),%xmm11 | ||
2858 | |||
2859 | # qhasm: xmm12 = *(int128 *)(c + 192) | ||
2860 | # asm 1: movdqa 192(<c=int64#1),>xmm12=int6464#13 | ||
2861 | # asm 2: movdqa 192(<c=%rdi),>xmm12=%xmm12 | ||
2862 | movdqa 192(%rdi),%xmm12 | ||
2863 | |||
2864 | # qhasm: xmm13 = *(int128 *)(c + 208) | ||
2865 | # asm 1: movdqa 208(<c=int64#1),>xmm13=int6464#14 | ||
2866 | # asm 2: movdqa 208(<c=%rdi),>xmm13=%xmm13 | ||
2867 | movdqa 208(%rdi),%xmm13 | ||
2868 | |||
2869 | # qhasm: xmm14 = *(int128 *)(c + 224) | ||
2870 | # asm 1: movdqa 224(<c=int64#1),>xmm14=int6464#15 | ||
2871 | # asm 2: movdqa 224(<c=%rdi),>xmm14=%xmm14 | ||
2872 | movdqa 224(%rdi),%xmm14 | ||
2873 | |||
2874 | # qhasm: xmm15 = *(int128 *)(c + 240) | ||
2875 | # asm 1: movdqa 240(<c=int64#1),>xmm15=int6464#16 | ||
2876 | # asm 2: movdqa 240(<c=%rdi),>xmm15=%xmm15 | ||
2877 | movdqa 240(%rdi),%xmm15 | ||
2878 | |||
2879 | # qhasm: xmm8 ^= ONE | ||
2880 | # asm 1: pxor ONE,<xmm8=int6464#9 | ||
2881 | # asm 2: pxor ONE,<xmm8=%xmm8 | ||
2882 | pxor ONE,%xmm8 | ||
2883 | |||
2884 | # qhasm: xmm9 ^= ONE | ||
2885 | # asm 1: pxor ONE,<xmm9=int6464#10 | ||
2886 | # asm 2: pxor ONE,<xmm9=%xmm9 | ||
2887 | pxor ONE,%xmm9 | ||
2888 | |||
2889 | # qhasm: xmm13 ^= ONE | ||
2890 | # asm 1: pxor ONE,<xmm13=int6464#14 | ||
2891 | # asm 2: pxor ONE,<xmm13=%xmm13 | ||
2892 | pxor ONE,%xmm13 | ||
2893 | |||
2894 | # qhasm: xmm14 ^= ONE | ||
2895 | # asm 1: pxor ONE,<xmm14=int6464#15 | ||
2896 | # asm 2: pxor ONE,<xmm14=%xmm14 | ||
2897 | pxor ONE,%xmm14 | ||
2898 | |||
2899 | # qhasm: xmm0 ^= xmm8 | ||
2900 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
2901 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
2902 | pxor %xmm8,%xmm0 | ||
2903 | |||
2904 | # qhasm: xmm1 ^= xmm9 | ||
2905 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
2906 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
2907 | pxor %xmm9,%xmm1 | ||
2908 | |||
2909 | # qhasm: xmm3 ^= xmm10 | ||
2910 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
2911 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
2912 | pxor %xmm10,%xmm3 | ||
2913 | |||
2914 | # qhasm: xmm2 ^= xmm11 | ||
2915 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
2916 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
2917 | pxor %xmm11,%xmm2 | ||
2918 | |||
2919 | # qhasm: xmm6 ^= xmm12 | ||
2920 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
2921 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
2922 | pxor %xmm12,%xmm6 | ||
2923 | |||
2924 | # qhasm: xmm5 ^= xmm13 | ||
2925 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
2926 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
2927 | pxor %xmm13,%xmm5 | ||
2928 | |||
2929 | # qhasm: xmm4 ^= xmm14 | ||
2930 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
2931 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
2932 | pxor %xmm14,%xmm4 | ||
2933 | |||
2934 | # qhasm: xmm7 ^= xmm15 | ||
2935 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
2936 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
2937 | pxor %xmm15,%xmm7 | ||
2938 | |||
2939 | # qhasm: uint32323232 xmm8 >>= 8 | ||
2940 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
2941 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
2942 | psrld $8,%xmm8 | ||
2943 | |||
2944 | # qhasm: uint32323232 xmm9 >>= 8 | ||
2945 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
2946 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
2947 | psrld $8,%xmm9 | ||
2948 | |||
2949 | # qhasm: uint32323232 xmm10 >>= 8 | ||
2950 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
2951 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
2952 | psrld $8,%xmm10 | ||
2953 | |||
2954 | # qhasm: uint32323232 xmm11 >>= 8 | ||
2955 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
2956 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
2957 | psrld $8,%xmm11 | ||
2958 | |||
2959 | # qhasm: uint32323232 xmm12 >>= 8 | ||
2960 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
2961 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
2962 | psrld $8,%xmm12 | ||
2963 | |||
2964 | # qhasm: uint32323232 xmm13 >>= 8 | ||
2965 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
2966 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
2967 | psrld $8,%xmm13 | ||
2968 | |||
2969 | # qhasm: uint32323232 xmm14 >>= 8 | ||
2970 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
2971 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
2972 | psrld $8,%xmm14 | ||
2973 | |||
2974 | # qhasm: uint32323232 xmm15 >>= 8 | ||
2975 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
2976 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
2977 | psrld $8,%xmm15 | ||
2978 | |||
2979 | # qhasm: xmm0 ^= xmm8 | ||
2980 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
2981 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
2982 | pxor %xmm8,%xmm0 | ||
2983 | |||
2984 | # qhasm: xmm1 ^= xmm9 | ||
2985 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
2986 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
2987 | pxor %xmm9,%xmm1 | ||
2988 | |||
2989 | # qhasm: xmm3 ^= xmm10 | ||
2990 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
2991 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
2992 | pxor %xmm10,%xmm3 | ||
2993 | |||
2994 | # qhasm: xmm2 ^= xmm11 | ||
2995 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
2996 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
2997 | pxor %xmm11,%xmm2 | ||
2998 | |||
2999 | # qhasm: xmm6 ^= xmm12 | ||
3000 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
3001 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
3002 | pxor %xmm12,%xmm6 | ||
3003 | |||
3004 | # qhasm: xmm5 ^= xmm13 | ||
3005 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
3006 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
3007 | pxor %xmm13,%xmm5 | ||
3008 | |||
3009 | # qhasm: xmm4 ^= xmm14 | ||
3010 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
3011 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
3012 | pxor %xmm14,%xmm4 | ||
3013 | |||
3014 | # qhasm: xmm7 ^= xmm15 | ||
3015 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
3016 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
3017 | pxor %xmm15,%xmm7 | ||
3018 | |||
3019 | # qhasm: uint32323232 xmm8 >>= 8 | ||
3020 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
3021 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
3022 | psrld $8,%xmm8 | ||
3023 | |||
3024 | # qhasm: uint32323232 xmm9 >>= 8 | ||
3025 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
3026 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
3027 | psrld $8,%xmm9 | ||
3028 | |||
3029 | # qhasm: uint32323232 xmm10 >>= 8 | ||
3030 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
3031 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
3032 | psrld $8,%xmm10 | ||
3033 | |||
3034 | # qhasm: uint32323232 xmm11 >>= 8 | ||
3035 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
3036 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
3037 | psrld $8,%xmm11 | ||
3038 | |||
3039 | # qhasm: uint32323232 xmm12 >>= 8 | ||
3040 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
3041 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
3042 | psrld $8,%xmm12 | ||
3043 | |||
3044 | # qhasm: uint32323232 xmm13 >>= 8 | ||
3045 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
3046 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
3047 | psrld $8,%xmm13 | ||
3048 | |||
3049 | # qhasm: uint32323232 xmm14 >>= 8 | ||
3050 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
3051 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
3052 | psrld $8,%xmm14 | ||
3053 | |||
3054 | # qhasm: uint32323232 xmm15 >>= 8 | ||
3055 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
3056 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
3057 | psrld $8,%xmm15 | ||
3058 | |||
3059 | # qhasm: xmm0 ^= xmm8 | ||
3060 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
3061 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
3062 | pxor %xmm8,%xmm0 | ||
3063 | |||
3064 | # qhasm: xmm1 ^= xmm9 | ||
3065 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
3066 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
3067 | pxor %xmm9,%xmm1 | ||
3068 | |||
3069 | # qhasm: xmm3 ^= xmm10 | ||
3070 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
3071 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
3072 | pxor %xmm10,%xmm3 | ||
3073 | |||
3074 | # qhasm: xmm2 ^= xmm11 | ||
3075 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
3076 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
3077 | pxor %xmm11,%xmm2 | ||
3078 | |||
3079 | # qhasm: xmm6 ^= xmm12 | ||
3080 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
3081 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
3082 | pxor %xmm12,%xmm6 | ||
3083 | |||
3084 | # qhasm: xmm5 ^= xmm13 | ||
3085 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
3086 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
3087 | pxor %xmm13,%xmm5 | ||
3088 | |||
3089 | # qhasm: xmm4 ^= xmm14 | ||
3090 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
3091 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
3092 | pxor %xmm14,%xmm4 | ||
3093 | |||
3094 | # qhasm: xmm7 ^= xmm15 | ||
3095 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
3096 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
3097 | pxor %xmm15,%xmm7 | ||
3098 | |||
3099 | # qhasm: uint32323232 xmm8 >>= 8 | ||
3100 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
3101 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
3102 | psrld $8,%xmm8 | ||
3103 | |||
3104 | # qhasm: uint32323232 xmm9 >>= 8 | ||
3105 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
3106 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
3107 | psrld $8,%xmm9 | ||
3108 | |||
3109 | # qhasm: uint32323232 xmm10 >>= 8 | ||
3110 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
3111 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
3112 | psrld $8,%xmm10 | ||
3113 | |||
3114 | # qhasm: uint32323232 xmm11 >>= 8 | ||
3115 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
3116 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
3117 | psrld $8,%xmm11 | ||
3118 | |||
3119 | # qhasm: uint32323232 xmm12 >>= 8 | ||
3120 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
3121 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
3122 | psrld $8,%xmm12 | ||
3123 | |||
3124 | # qhasm: uint32323232 xmm13 >>= 8 | ||
3125 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
3126 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
3127 | psrld $8,%xmm13 | ||
3128 | |||
3129 | # qhasm: uint32323232 xmm14 >>= 8 | ||
3130 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
3131 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
3132 | psrld $8,%xmm14 | ||
3133 | |||
3134 | # qhasm: uint32323232 xmm15 >>= 8 | ||
3135 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
3136 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
3137 | psrld $8,%xmm15 | ||
3138 | |||
3139 | # qhasm: xmm0 ^= xmm8 | ||
3140 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
3141 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
3142 | pxor %xmm8,%xmm0 | ||
3143 | |||
3144 | # qhasm: xmm1 ^= xmm9 | ||
3145 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
3146 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
3147 | pxor %xmm9,%xmm1 | ||
3148 | |||
3149 | # qhasm: xmm3 ^= xmm10 | ||
3150 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
3151 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
3152 | pxor %xmm10,%xmm3 | ||
3153 | |||
3154 | # qhasm: xmm2 ^= xmm11 | ||
3155 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
3156 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
3157 | pxor %xmm11,%xmm2 | ||
3158 | |||
3159 | # qhasm: xmm6 ^= xmm12 | ||
3160 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
3161 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
3162 | pxor %xmm12,%xmm6 | ||
3163 | |||
3164 | # qhasm: xmm5 ^= xmm13 | ||
3165 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
3166 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
3167 | pxor %xmm13,%xmm5 | ||
3168 | |||
3169 | # qhasm: xmm4 ^= xmm14 | ||
3170 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
3171 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
3172 | pxor %xmm14,%xmm4 | ||
3173 | |||
3174 | # qhasm: xmm7 ^= xmm15 | ||
3175 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
3176 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
3177 | pxor %xmm15,%xmm7 | ||
3178 | |||
3179 | # qhasm: *(int128 *)(c + 256) = xmm0 | ||
3180 | # asm 1: movdqa <xmm0=int6464#1,256(<c=int64#1) | ||
3181 | # asm 2: movdqa <xmm0=%xmm0,256(<c=%rdi) | ||
3182 | movdqa %xmm0,256(%rdi) | ||
3183 | |||
3184 | # qhasm: *(int128 *)(c + 272) = xmm1 | ||
3185 | # asm 1: movdqa <xmm1=int6464#2,272(<c=int64#1) | ||
3186 | # asm 2: movdqa <xmm1=%xmm1,272(<c=%rdi) | ||
3187 | movdqa %xmm1,272(%rdi) | ||
3188 | |||
3189 | # qhasm: *(int128 *)(c + 288) = xmm3 | ||
3190 | # asm 1: movdqa <xmm3=int6464#4,288(<c=int64#1) | ||
3191 | # asm 2: movdqa <xmm3=%xmm3,288(<c=%rdi) | ||
3192 | movdqa %xmm3,288(%rdi) | ||
3193 | |||
3194 | # qhasm: *(int128 *)(c + 304) = xmm2 | ||
3195 | # asm 1: movdqa <xmm2=int6464#3,304(<c=int64#1) | ||
3196 | # asm 2: movdqa <xmm2=%xmm2,304(<c=%rdi) | ||
3197 | movdqa %xmm2,304(%rdi) | ||
3198 | |||
3199 | # qhasm: *(int128 *)(c + 320) = xmm6 | ||
3200 | # asm 1: movdqa <xmm6=int6464#7,320(<c=int64#1) | ||
3201 | # asm 2: movdqa <xmm6=%xmm6,320(<c=%rdi) | ||
3202 | movdqa %xmm6,320(%rdi) | ||
3203 | |||
3204 | # qhasm: *(int128 *)(c + 336) = xmm5 | ||
3205 | # asm 1: movdqa <xmm5=int6464#6,336(<c=int64#1) | ||
3206 | # asm 2: movdqa <xmm5=%xmm5,336(<c=%rdi) | ||
3207 | movdqa %xmm5,336(%rdi) | ||
3208 | |||
3209 | # qhasm: *(int128 *)(c + 352) = xmm4 | ||
3210 | # asm 1: movdqa <xmm4=int6464#5,352(<c=int64#1) | ||
3211 | # asm 2: movdqa <xmm4=%xmm4,352(<c=%rdi) | ||
3212 | movdqa %xmm4,352(%rdi) | ||
3213 | |||
3214 | # qhasm: *(int128 *)(c + 368) = xmm7 | ||
3215 | # asm 1: movdqa <xmm7=int6464#8,368(<c=int64#1) | ||
3216 | # asm 2: movdqa <xmm7=%xmm7,368(<c=%rdi) | ||
3217 | movdqa %xmm7,368(%rdi) | ||
3218 | |||
3219 | # qhasm: xmm0 ^= ONE | ||
3220 | # asm 1: pxor ONE,<xmm0=int6464#1 | ||
3221 | # asm 2: pxor ONE,<xmm0=%xmm0 | ||
3222 | pxor ONE,%xmm0 | ||
3223 | |||
3224 | # qhasm: xmm1 ^= ONE | ||
3225 | # asm 1: pxor ONE,<xmm1=int6464#2 | ||
3226 | # asm 2: pxor ONE,<xmm1=%xmm1 | ||
3227 | pxor ONE,%xmm1 | ||
3228 | |||
3229 | # qhasm: xmm5 ^= ONE | ||
3230 | # asm 1: pxor ONE,<xmm5=int6464#6 | ||
3231 | # asm 2: pxor ONE,<xmm5=%xmm5 | ||
3232 | pxor ONE,%xmm5 | ||
3233 | |||
3234 | # qhasm: xmm4 ^= ONE | ||
3235 | # asm 1: pxor ONE,<xmm4=int6464#5 | ||
3236 | # asm 2: pxor ONE,<xmm4=%xmm4 | ||
3237 | pxor ONE,%xmm4 | ||
3238 | |||
3239 | # qhasm: shuffle bytes of xmm0 by ROTB | ||
3240 | # asm 1: pshufb ROTB,<xmm0=int6464#1 | ||
3241 | # asm 2: pshufb ROTB,<xmm0=%xmm0 | ||
3242 | pshufb ROTB,%xmm0 | ||
3243 | |||
3244 | # qhasm: shuffle bytes of xmm1 by ROTB | ||
3245 | # asm 1: pshufb ROTB,<xmm1=int6464#2 | ||
3246 | # asm 2: pshufb ROTB,<xmm1=%xmm1 | ||
3247 | pshufb ROTB,%xmm1 | ||
3248 | |||
3249 | # qhasm: shuffle bytes of xmm3 by ROTB | ||
3250 | # asm 1: pshufb ROTB,<xmm3=int6464#4 | ||
3251 | # asm 2: pshufb ROTB,<xmm3=%xmm3 | ||
3252 | pshufb ROTB,%xmm3 | ||
3253 | |||
3254 | # qhasm: shuffle bytes of xmm2 by ROTB | ||
3255 | # asm 1: pshufb ROTB,<xmm2=int6464#3 | ||
3256 | # asm 2: pshufb ROTB,<xmm2=%xmm2 | ||
3257 | pshufb ROTB,%xmm2 | ||
3258 | |||
3259 | # qhasm: shuffle bytes of xmm6 by ROTB | ||
3260 | # asm 1: pshufb ROTB,<xmm6=int6464#7 | ||
3261 | # asm 2: pshufb ROTB,<xmm6=%xmm6 | ||
3262 | pshufb ROTB,%xmm6 | ||
3263 | |||
3264 | # qhasm: shuffle bytes of xmm5 by ROTB | ||
3265 | # asm 1: pshufb ROTB,<xmm5=int6464#6 | ||
3266 | # asm 2: pshufb ROTB,<xmm5=%xmm5 | ||
3267 | pshufb ROTB,%xmm5 | ||
3268 | |||
3269 | # qhasm: shuffle bytes of xmm4 by ROTB | ||
3270 | # asm 1: pshufb ROTB,<xmm4=int6464#5 | ||
3271 | # asm 2: pshufb ROTB,<xmm4=%xmm4 | ||
3272 | pshufb ROTB,%xmm4 | ||
3273 | |||
3274 | # qhasm: shuffle bytes of xmm7 by ROTB | ||
3275 | # asm 1: pshufb ROTB,<xmm7=int6464#8 | ||
3276 | # asm 2: pshufb ROTB,<xmm7=%xmm7 | ||
3277 | pshufb ROTB,%xmm7 | ||
3278 | |||
3279 | # qhasm: xmm5 ^= xmm4 | ||
3280 | # asm 1: pxor <xmm4=int6464#5,<xmm5=int6464#6 | ||
3281 | # asm 2: pxor <xmm4=%xmm4,<xmm5=%xmm5 | ||
3282 | pxor %xmm4,%xmm5 | ||
3283 | |||
3284 | # qhasm: xmm3 ^= xmm1 | ||
3285 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
3286 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
3287 | pxor %xmm1,%xmm3 | ||
3288 | |||
3289 | # qhasm: xmm5 ^= xmm0 | ||
3290 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
3291 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
3292 | pxor %xmm0,%xmm5 | ||
3293 | |||
3294 | # qhasm: xmm4 ^= xmm3 | ||
3295 | # asm 1: pxor <xmm3=int6464#4,<xmm4=int6464#5 | ||
3296 | # asm 2: pxor <xmm3=%xmm3,<xmm4=%xmm4 | ||
3297 | pxor %xmm3,%xmm4 | ||
3298 | |||
3299 | # qhasm: xmm2 ^= xmm0 | ||
3300 | # asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3 | ||
3301 | # asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2 | ||
3302 | pxor %xmm0,%xmm2 | ||
3303 | |||
3304 | # qhasm: xmm4 ^= xmm2 | ||
3305 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
3306 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
3307 | pxor %xmm2,%xmm4 | ||
3308 | |||
3309 | # qhasm: xmm2 ^= xmm7 | ||
3310 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
3311 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
3312 | pxor %xmm7,%xmm2 | ||
3313 | |||
3314 | # qhasm: xmm2 ^= xmm6 | ||
3315 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
3316 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
3317 | pxor %xmm6,%xmm2 | ||
3318 | |||
3319 | # qhasm: xmm7 ^= xmm5 | ||
3320 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
3321 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
3322 | pxor %xmm5,%xmm7 | ||
3323 | |||
3324 | # qhasm: xmm2 ^= xmm1 | ||
3325 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
3326 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
3327 | pxor %xmm1,%xmm2 | ||
3328 | |||
3329 | # qhasm: xmm6 ^= xmm5 | ||
3330 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
3331 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
3332 | pxor %xmm5,%xmm6 | ||
3333 | |||
3334 | # qhasm: xmm3 ^= xmm7 | ||
3335 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
3336 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
3337 | pxor %xmm7,%xmm3 | ||
3338 | |||
3339 | # qhasm: xmm1 ^= xmm5 | ||
3340 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
3341 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
3342 | pxor %xmm5,%xmm1 | ||
3343 | |||
3344 | # qhasm: xmm11 = xmm7 | ||
3345 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
3346 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
3347 | movdqa %xmm7,%xmm8 | ||
3348 | |||
3349 | # qhasm: xmm10 = xmm1 | ||
3350 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
3351 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
3352 | movdqa %xmm1,%xmm9 | ||
3353 | |||
3354 | # qhasm: xmm9 = xmm5 | ||
3355 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
3356 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
3357 | movdqa %xmm5,%xmm10 | ||
3358 | |||
3359 | # qhasm: xmm13 = xmm3 | ||
3360 | # asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12 | ||
3361 | # asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11 | ||
3362 | movdqa %xmm3,%xmm11 | ||
3363 | |||
3364 | # qhasm: xmm12 = xmm4 | ||
3365 | # asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#13 | ||
3366 | # asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm12 | ||
3367 | movdqa %xmm4,%xmm12 | ||
3368 | |||
3369 | # qhasm: xmm11 ^= xmm6 | ||
3370 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#9 | ||
3371 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm8 | ||
3372 | pxor %xmm6,%xmm8 | ||
3373 | |||
3374 | # qhasm: xmm10 ^= xmm3 | ||
3375 | # asm 1: pxor <xmm3=int6464#4,<xmm10=int6464#10 | ||
3376 | # asm 2: pxor <xmm3=%xmm3,<xmm10=%xmm9 | ||
3377 | pxor %xmm3,%xmm9 | ||
3378 | |||
3379 | # qhasm: xmm9 ^= xmm2 | ||
3380 | # asm 1: pxor <xmm2=int6464#3,<xmm9=int6464#11 | ||
3381 | # asm 2: pxor <xmm2=%xmm2,<xmm9=%xmm10 | ||
3382 | pxor %xmm2,%xmm10 | ||
3383 | |||
3384 | # qhasm: xmm13 ^= xmm6 | ||
3385 | # asm 1: pxor <xmm6=int6464#7,<xmm13=int6464#12 | ||
3386 | # asm 2: pxor <xmm6=%xmm6,<xmm13=%xmm11 | ||
3387 | pxor %xmm6,%xmm11 | ||
3388 | |||
3389 | # qhasm: xmm12 ^= xmm0 | ||
3390 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
3391 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
3392 | pxor %xmm0,%xmm12 | ||
3393 | |||
3394 | # qhasm: xmm14 = xmm11 | ||
3395 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
3396 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
3397 | movdqa %xmm8,%xmm13 | ||
3398 | |||
3399 | # qhasm: xmm8 = xmm10 | ||
3400 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
3401 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
3402 | movdqa %xmm9,%xmm14 | ||
3403 | |||
3404 | # qhasm: xmm15 = xmm11 | ||
3405 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
3406 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
3407 | movdqa %xmm8,%xmm15 | ||
3408 | |||
3409 | # qhasm: xmm10 |= xmm9 | ||
3410 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
3411 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
3412 | por %xmm10,%xmm9 | ||
3413 | |||
3414 | # qhasm: xmm11 |= xmm12 | ||
3415 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
3416 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
3417 | por %xmm12,%xmm8 | ||
3418 | |||
3419 | # qhasm: xmm15 ^= xmm8 | ||
3420 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
3421 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
3422 | pxor %xmm14,%xmm15 | ||
3423 | |||
3424 | # qhasm: xmm14 &= xmm12 | ||
3425 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
3426 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
3427 | pand %xmm12,%xmm13 | ||
3428 | |||
3429 | # qhasm: xmm8 &= xmm9 | ||
3430 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
3431 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
3432 | pand %xmm10,%xmm14 | ||
3433 | |||
3434 | # qhasm: xmm12 ^= xmm9 | ||
3435 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
3436 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
3437 | pxor %xmm10,%xmm12 | ||
3438 | |||
3439 | # qhasm: xmm15 &= xmm12 | ||
3440 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
3441 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
3442 | pand %xmm12,%xmm15 | ||
3443 | |||
3444 | # qhasm: xmm12 = xmm2 | ||
3445 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
3446 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
3447 | movdqa %xmm2,%xmm10 | ||
3448 | |||
3449 | # qhasm: xmm12 ^= xmm0 | ||
3450 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
3451 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
3452 | pxor %xmm0,%xmm10 | ||
3453 | |||
3454 | # qhasm: xmm13 &= xmm12 | ||
3455 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
3456 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
3457 | pand %xmm10,%xmm11 | ||
3458 | |||
3459 | # qhasm: xmm11 ^= xmm13 | ||
3460 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
3461 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
3462 | pxor %xmm11,%xmm8 | ||
3463 | |||
3464 | # qhasm: xmm10 ^= xmm13 | ||
3465 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
3466 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
3467 | pxor %xmm11,%xmm9 | ||
3468 | |||
3469 | # qhasm: xmm13 = xmm7 | ||
3470 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
3471 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
3472 | movdqa %xmm7,%xmm10 | ||
3473 | |||
3474 | # qhasm: xmm13 ^= xmm1 | ||
3475 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
3476 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
3477 | pxor %xmm1,%xmm10 | ||
3478 | |||
3479 | # qhasm: xmm12 = xmm5 | ||
3480 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
3481 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
3482 | movdqa %xmm5,%xmm11 | ||
3483 | |||
3484 | # qhasm: xmm9 = xmm13 | ||
3485 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
3486 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
3487 | movdqa %xmm10,%xmm12 | ||
3488 | |||
3489 | # qhasm: xmm12 ^= xmm4 | ||
3490 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#12 | ||
3491 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm11 | ||
3492 | pxor %xmm4,%xmm11 | ||
3493 | |||
3494 | # qhasm: xmm9 |= xmm12 | ||
3495 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
3496 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
3497 | por %xmm11,%xmm12 | ||
3498 | |||
3499 | # qhasm: xmm13 &= xmm12 | ||
3500 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
3501 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
3502 | pand %xmm11,%xmm10 | ||
3503 | |||
3504 | # qhasm: xmm8 ^= xmm13 | ||
3505 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
3506 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
3507 | pxor %xmm10,%xmm14 | ||
3508 | |||
3509 | # qhasm: xmm11 ^= xmm15 | ||
3510 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
3511 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
3512 | pxor %xmm15,%xmm8 | ||
3513 | |||
3514 | # qhasm: xmm10 ^= xmm14 | ||
3515 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
3516 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
3517 | pxor %xmm13,%xmm9 | ||
3518 | |||
3519 | # qhasm: xmm9 ^= xmm15 | ||
3520 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
3521 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
3522 | pxor %xmm15,%xmm12 | ||
3523 | |||
3524 | # qhasm: xmm8 ^= xmm14 | ||
3525 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
3526 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
3527 | pxor %xmm13,%xmm14 | ||
3528 | |||
3529 | # qhasm: xmm9 ^= xmm14 | ||
3530 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
3531 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
3532 | pxor %xmm13,%xmm12 | ||
3533 | |||
3534 | # qhasm: xmm12 = xmm3 | ||
3535 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
3536 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
3537 | movdqa %xmm3,%xmm10 | ||
3538 | |||
3539 | # qhasm: xmm13 = xmm6 | ||
3540 | # asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12 | ||
3541 | # asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11 | ||
3542 | movdqa %xmm6,%xmm11 | ||
3543 | |||
3544 | # qhasm: xmm14 = xmm1 | ||
3545 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
3546 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
3547 | movdqa %xmm1,%xmm13 | ||
3548 | |||
3549 | # qhasm: xmm15 = xmm7 | ||
3550 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
3551 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
3552 | movdqa %xmm7,%xmm15 | ||
3553 | |||
3554 | # qhasm: xmm12 &= xmm2 | ||
3555 | # asm 1: pand <xmm2=int6464#3,<xmm12=int6464#11 | ||
3556 | # asm 2: pand <xmm2=%xmm2,<xmm12=%xmm10 | ||
3557 | pand %xmm2,%xmm10 | ||
3558 | |||
3559 | # qhasm: xmm13 &= xmm0 | ||
3560 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
3561 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
3562 | pand %xmm0,%xmm11 | ||
3563 | |||
3564 | # qhasm: xmm14 &= xmm5 | ||
3565 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
3566 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
3567 | pand %xmm5,%xmm13 | ||
3568 | |||
3569 | # qhasm: xmm15 |= xmm4 | ||
3570 | # asm 1: por <xmm4=int6464#5,<xmm15=int6464#16 | ||
3571 | # asm 2: por <xmm4=%xmm4,<xmm15=%xmm15 | ||
3572 | por %xmm4,%xmm15 | ||
3573 | |||
3574 | # qhasm: xmm11 ^= xmm12 | ||
3575 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
3576 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
3577 | pxor %xmm10,%xmm8 | ||
3578 | |||
3579 | # qhasm: xmm10 ^= xmm13 | ||
3580 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
3581 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
3582 | pxor %xmm11,%xmm9 | ||
3583 | |||
3584 | # qhasm: xmm9 ^= xmm14 | ||
3585 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
3586 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
3587 | pxor %xmm13,%xmm12 | ||
3588 | |||
3589 | # qhasm: xmm8 ^= xmm15 | ||
3590 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
3591 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
3592 | pxor %xmm15,%xmm14 | ||
3593 | |||
3594 | # qhasm: xmm12 = xmm11 | ||
3595 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
3596 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
3597 | movdqa %xmm8,%xmm10 | ||
3598 | |||
3599 | # qhasm: xmm12 ^= xmm10 | ||
3600 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
3601 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
3602 | pxor %xmm9,%xmm10 | ||
3603 | |||
3604 | # qhasm: xmm11 &= xmm9 | ||
3605 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
3606 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
3607 | pand %xmm12,%xmm8 | ||
3608 | |||
3609 | # qhasm: xmm14 = xmm8 | ||
3610 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
3611 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
3612 | movdqa %xmm14,%xmm11 | ||
3613 | |||
3614 | # qhasm: xmm14 ^= xmm11 | ||
3615 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
3616 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
3617 | pxor %xmm8,%xmm11 | ||
3618 | |||
3619 | # qhasm: xmm15 = xmm12 | ||
3620 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
3621 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
3622 | movdqa %xmm10,%xmm13 | ||
3623 | |||
3624 | # qhasm: xmm15 &= xmm14 | ||
3625 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
3626 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
3627 | pand %xmm11,%xmm13 | ||
3628 | |||
3629 | # qhasm: xmm15 ^= xmm10 | ||
3630 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
3631 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
3632 | pxor %xmm9,%xmm13 | ||
3633 | |||
3634 | # qhasm: xmm13 = xmm9 | ||
3635 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
3636 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
3637 | movdqa %xmm12,%xmm15 | ||
3638 | |||
3639 | # qhasm: xmm13 ^= xmm8 | ||
3640 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
3641 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
3642 | pxor %xmm14,%xmm15 | ||
3643 | |||
3644 | # qhasm: xmm11 ^= xmm10 | ||
3645 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
3646 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
3647 | pxor %xmm9,%xmm8 | ||
3648 | |||
3649 | # qhasm: xmm13 &= xmm11 | ||
3650 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
3651 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
3652 | pand %xmm8,%xmm15 | ||
3653 | |||
3654 | # qhasm: xmm13 ^= xmm8 | ||
3655 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
3656 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
3657 | pxor %xmm14,%xmm15 | ||
3658 | |||
3659 | # qhasm: xmm9 ^= xmm13 | ||
3660 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
3661 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
3662 | pxor %xmm15,%xmm12 | ||
3663 | |||
3664 | # qhasm: xmm10 = xmm14 | ||
3665 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
3666 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
3667 | movdqa %xmm11,%xmm8 | ||
3668 | |||
3669 | # qhasm: xmm10 ^= xmm13 | ||
3670 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
3671 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
3672 | pxor %xmm15,%xmm8 | ||
3673 | |||
3674 | # qhasm: xmm10 &= xmm8 | ||
3675 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
3676 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
3677 | pand %xmm14,%xmm8 | ||
3678 | |||
3679 | # qhasm: xmm9 ^= xmm10 | ||
3680 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
3681 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
3682 | pxor %xmm8,%xmm12 | ||
3683 | |||
3684 | # qhasm: xmm14 ^= xmm10 | ||
3685 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
3686 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
3687 | pxor %xmm8,%xmm11 | ||
3688 | |||
3689 | # qhasm: xmm14 &= xmm15 | ||
3690 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
3691 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
3692 | pand %xmm13,%xmm11 | ||
3693 | |||
3694 | # qhasm: xmm14 ^= xmm12 | ||
3695 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
3696 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
3697 | pxor %xmm10,%xmm11 | ||
3698 | |||
3699 | # qhasm: xmm12 = xmm4 | ||
3700 | # asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#9 | ||
3701 | # asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm8 | ||
3702 | movdqa %xmm4,%xmm8 | ||
3703 | |||
3704 | # qhasm: xmm8 = xmm5 | ||
3705 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
3706 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
3707 | movdqa %xmm5,%xmm9 | ||
3708 | |||
3709 | # qhasm: xmm10 = xmm15 | ||
3710 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
3711 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
3712 | movdqa %xmm13,%xmm10 | ||
3713 | |||
3714 | # qhasm: xmm10 ^= xmm14 | ||
3715 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
3716 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
3717 | pxor %xmm11,%xmm10 | ||
3718 | |||
3719 | # qhasm: xmm10 &= xmm4 | ||
3720 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
3721 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
3722 | pand %xmm4,%xmm10 | ||
3723 | |||
3724 | # qhasm: xmm4 ^= xmm5 | ||
3725 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
3726 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
3727 | pxor %xmm5,%xmm4 | ||
3728 | |||
3729 | # qhasm: xmm4 &= xmm14 | ||
3730 | # asm 1: pand <xmm14=int6464#12,<xmm4=int6464#5 | ||
3731 | # asm 2: pand <xmm14=%xmm11,<xmm4=%xmm4 | ||
3732 | pand %xmm11,%xmm4 | ||
3733 | |||
3734 | # qhasm: xmm5 &= xmm15 | ||
3735 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
3736 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
3737 | pand %xmm13,%xmm5 | ||
3738 | |||
3739 | # qhasm: xmm4 ^= xmm5 | ||
3740 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
3741 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
3742 | pxor %xmm5,%xmm4 | ||
3743 | |||
3744 | # qhasm: xmm5 ^= xmm10 | ||
3745 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
3746 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
3747 | pxor %xmm10,%xmm5 | ||
3748 | |||
3749 | # qhasm: xmm12 ^= xmm0 | ||
3750 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
3751 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
3752 | pxor %xmm0,%xmm8 | ||
3753 | |||
3754 | # qhasm: xmm8 ^= xmm2 | ||
3755 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
3756 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
3757 | pxor %xmm2,%xmm9 | ||
3758 | |||
3759 | # qhasm: xmm15 ^= xmm13 | ||
3760 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
3761 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
3762 | pxor %xmm15,%xmm13 | ||
3763 | |||
3764 | # qhasm: xmm14 ^= xmm9 | ||
3765 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
3766 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
3767 | pxor %xmm12,%xmm11 | ||
3768 | |||
3769 | # qhasm: xmm11 = xmm15 | ||
3770 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
3771 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
3772 | movdqa %xmm13,%xmm10 | ||
3773 | |||
3774 | # qhasm: xmm11 ^= xmm14 | ||
3775 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
3776 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
3777 | pxor %xmm11,%xmm10 | ||
3778 | |||
3779 | # qhasm: xmm11 &= xmm12 | ||
3780 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
3781 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
3782 | pand %xmm8,%xmm10 | ||
3783 | |||
3784 | # qhasm: xmm12 ^= xmm8 | ||
3785 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
3786 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
3787 | pxor %xmm9,%xmm8 | ||
3788 | |||
3789 | # qhasm: xmm12 &= xmm14 | ||
3790 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
3791 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
3792 | pand %xmm11,%xmm8 | ||
3793 | |||
3794 | # qhasm: xmm8 &= xmm15 | ||
3795 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
3796 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
3797 | pand %xmm13,%xmm9 | ||
3798 | |||
3799 | # qhasm: xmm8 ^= xmm12 | ||
3800 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
3801 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
3802 | pxor %xmm8,%xmm9 | ||
3803 | |||
3804 | # qhasm: xmm12 ^= xmm11 | ||
3805 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
3806 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
3807 | pxor %xmm10,%xmm8 | ||
3808 | |||
3809 | # qhasm: xmm10 = xmm13 | ||
3810 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
3811 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
3812 | movdqa %xmm15,%xmm10 | ||
3813 | |||
3814 | # qhasm: xmm10 ^= xmm9 | ||
3815 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
3816 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
3817 | pxor %xmm12,%xmm10 | ||
3818 | |||
3819 | # qhasm: xmm10 &= xmm0 | ||
3820 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
3821 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
3822 | pand %xmm0,%xmm10 | ||
3823 | |||
3824 | # qhasm: xmm0 ^= xmm2 | ||
3825 | # asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1 | ||
3826 | # asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0 | ||
3827 | pxor %xmm2,%xmm0 | ||
3828 | |||
3829 | # qhasm: xmm0 &= xmm9 | ||
3830 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
3831 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
3832 | pand %xmm12,%xmm0 | ||
3833 | |||
3834 | # qhasm: xmm2 &= xmm13 | ||
3835 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
3836 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
3837 | pand %xmm15,%xmm2 | ||
3838 | |||
3839 | # qhasm: xmm0 ^= xmm2 | ||
3840 | # asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1 | ||
3841 | # asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0 | ||
3842 | pxor %xmm2,%xmm0 | ||
3843 | |||
3844 | # qhasm: xmm2 ^= xmm10 | ||
3845 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
3846 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
3847 | pxor %xmm10,%xmm2 | ||
3848 | |||
3849 | # qhasm: xmm4 ^= xmm12 | ||
3850 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
3851 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
3852 | pxor %xmm8,%xmm4 | ||
3853 | |||
3854 | # qhasm: xmm0 ^= xmm12 | ||
3855 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
3856 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
3857 | pxor %xmm8,%xmm0 | ||
3858 | |||
3859 | # qhasm: xmm5 ^= xmm8 | ||
3860 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
3861 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
3862 | pxor %xmm9,%xmm5 | ||
3863 | |||
3864 | # qhasm: xmm2 ^= xmm8 | ||
3865 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
3866 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
3867 | pxor %xmm9,%xmm2 | ||
3868 | |||
3869 | # qhasm: xmm12 = xmm7 | ||
3870 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
3871 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
3872 | movdqa %xmm7,%xmm8 | ||
3873 | |||
3874 | # qhasm: xmm8 = xmm1 | ||
3875 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
3876 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
3877 | movdqa %xmm1,%xmm9 | ||
3878 | |||
3879 | # qhasm: xmm12 ^= xmm6 | ||
3880 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#9 | ||
3881 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm8 | ||
3882 | pxor %xmm6,%xmm8 | ||
3883 | |||
3884 | # qhasm: xmm8 ^= xmm3 | ||
3885 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
3886 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
3887 | pxor %xmm3,%xmm9 | ||
3888 | |||
3889 | # qhasm: xmm11 = xmm15 | ||
3890 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
3891 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
3892 | movdqa %xmm13,%xmm10 | ||
3893 | |||
3894 | # qhasm: xmm11 ^= xmm14 | ||
3895 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
3896 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
3897 | pxor %xmm11,%xmm10 | ||
3898 | |||
3899 | # qhasm: xmm11 &= xmm12 | ||
3900 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
3901 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
3902 | pand %xmm8,%xmm10 | ||
3903 | |||
3904 | # qhasm: xmm12 ^= xmm8 | ||
3905 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
3906 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
3907 | pxor %xmm9,%xmm8 | ||
3908 | |||
3909 | # qhasm: xmm12 &= xmm14 | ||
3910 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
3911 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
3912 | pand %xmm11,%xmm8 | ||
3913 | |||
3914 | # qhasm: xmm8 &= xmm15 | ||
3915 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
3916 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
3917 | pand %xmm13,%xmm9 | ||
3918 | |||
3919 | # qhasm: xmm8 ^= xmm12 | ||
3920 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
3921 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
3922 | pxor %xmm8,%xmm9 | ||
3923 | |||
3924 | # qhasm: xmm12 ^= xmm11 | ||
3925 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
3926 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
3927 | pxor %xmm10,%xmm8 | ||
3928 | |||
3929 | # qhasm: xmm10 = xmm13 | ||
3930 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
3931 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
3932 | movdqa %xmm15,%xmm10 | ||
3933 | |||
3934 | # qhasm: xmm10 ^= xmm9 | ||
3935 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
3936 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
3937 | pxor %xmm12,%xmm10 | ||
3938 | |||
3939 | # qhasm: xmm10 &= xmm6 | ||
3940 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
3941 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
3942 | pand %xmm6,%xmm10 | ||
3943 | |||
3944 | # qhasm: xmm6 ^= xmm3 | ||
3945 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
3946 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
3947 | pxor %xmm3,%xmm6 | ||
3948 | |||
3949 | # qhasm: xmm6 &= xmm9 | ||
3950 | # asm 1: pand <xmm9=int6464#13,<xmm6=int6464#7 | ||
3951 | # asm 2: pand <xmm9=%xmm12,<xmm6=%xmm6 | ||
3952 | pand %xmm12,%xmm6 | ||
3953 | |||
3954 | # qhasm: xmm3 &= xmm13 | ||
3955 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
3956 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
3957 | pand %xmm15,%xmm3 | ||
3958 | |||
3959 | # qhasm: xmm6 ^= xmm3 | ||
3960 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
3961 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
3962 | pxor %xmm3,%xmm6 | ||
3963 | |||
3964 | # qhasm: xmm3 ^= xmm10 | ||
3965 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
3966 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
3967 | pxor %xmm10,%xmm3 | ||
3968 | |||
3969 | # qhasm: xmm15 ^= xmm13 | ||
3970 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
3971 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
3972 | pxor %xmm15,%xmm13 | ||
3973 | |||
3974 | # qhasm: xmm14 ^= xmm9 | ||
3975 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
3976 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
3977 | pxor %xmm12,%xmm11 | ||
3978 | |||
3979 | # qhasm: xmm11 = xmm15 | ||
3980 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
3981 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
3982 | movdqa %xmm13,%xmm10 | ||
3983 | |||
3984 | # qhasm: xmm11 ^= xmm14 | ||
3985 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
3986 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
3987 | pxor %xmm11,%xmm10 | ||
3988 | |||
3989 | # qhasm: xmm11 &= xmm7 | ||
3990 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
3991 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
3992 | pand %xmm7,%xmm10 | ||
3993 | |||
3994 | # qhasm: xmm7 ^= xmm1 | ||
3995 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
3996 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
3997 | pxor %xmm1,%xmm7 | ||
3998 | |||
3999 | # qhasm: xmm7 &= xmm14 | ||
4000 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
4001 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
4002 | pand %xmm11,%xmm7 | ||
4003 | |||
4004 | # qhasm: xmm1 &= xmm15 | ||
4005 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
4006 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
4007 | pand %xmm13,%xmm1 | ||
4008 | |||
4009 | # qhasm: xmm7 ^= xmm1 | ||
4010 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
4011 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
4012 | pxor %xmm1,%xmm7 | ||
4013 | |||
4014 | # qhasm: xmm1 ^= xmm11 | ||
4015 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
4016 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
4017 | pxor %xmm10,%xmm1 | ||
4018 | |||
4019 | # qhasm: xmm7 ^= xmm12 | ||
4020 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
4021 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
4022 | pxor %xmm8,%xmm7 | ||
4023 | |||
4024 | # qhasm: xmm6 ^= xmm12 | ||
4025 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
4026 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
4027 | pxor %xmm8,%xmm6 | ||
4028 | |||
4029 | # qhasm: xmm1 ^= xmm8 | ||
4030 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
4031 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
4032 | pxor %xmm9,%xmm1 | ||
4033 | |||
4034 | # qhasm: xmm3 ^= xmm8 | ||
4035 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
4036 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
4037 | pxor %xmm9,%xmm3 | ||
4038 | |||
4039 | # qhasm: xmm7 ^= xmm0 | ||
4040 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
4041 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
4042 | pxor %xmm0,%xmm7 | ||
4043 | |||
4044 | # qhasm: xmm1 ^= xmm4 | ||
4045 | # asm 1: pxor <xmm4=int6464#5,<xmm1=int6464#2 | ||
4046 | # asm 2: pxor <xmm4=%xmm4,<xmm1=%xmm1 | ||
4047 | pxor %xmm4,%xmm1 | ||
4048 | |||
4049 | # qhasm: xmm6 ^= xmm7 | ||
4050 | # asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7 | ||
4051 | # asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6 | ||
4052 | pxor %xmm7,%xmm6 | ||
4053 | |||
4054 | # qhasm: xmm4 ^= xmm0 | ||
4055 | # asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5 | ||
4056 | # asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4 | ||
4057 | pxor %xmm0,%xmm4 | ||
4058 | |||
4059 | # qhasm: xmm0 ^= xmm1 | ||
4060 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
4061 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
4062 | pxor %xmm1,%xmm0 | ||
4063 | |||
4064 | # qhasm: xmm1 ^= xmm5 | ||
4065 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
4066 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
4067 | pxor %xmm5,%xmm1 | ||
4068 | |||
4069 | # qhasm: xmm5 ^= xmm3 | ||
4070 | # asm 1: pxor <xmm3=int6464#4,<xmm5=int6464#6 | ||
4071 | # asm 2: pxor <xmm3=%xmm3,<xmm5=%xmm5 | ||
4072 | pxor %xmm3,%xmm5 | ||
4073 | |||
4074 | # qhasm: xmm6 ^= xmm5 | ||
4075 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
4076 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
4077 | pxor %xmm5,%xmm6 | ||
4078 | |||
4079 | # qhasm: xmm3 ^= xmm2 | ||
4080 | # asm 1: pxor <xmm2=int6464#3,<xmm3=int6464#4 | ||
4081 | # asm 2: pxor <xmm2=%xmm2,<xmm3=%xmm3 | ||
4082 | pxor %xmm2,%xmm3 | ||
4083 | |||
4084 | # qhasm: xmm2 ^= xmm5 | ||
4085 | # asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3 | ||
4086 | # asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2 | ||
4087 | pxor %xmm5,%xmm2 | ||
4088 | |||
4089 | # qhasm: xmm4 ^= xmm2 | ||
4090 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
4091 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
4092 | pxor %xmm2,%xmm4 | ||
4093 | |||
4094 | # qhasm: xmm6 ^= RCON | ||
4095 | # asm 1: pxor RCON,<xmm6=int6464#7 | ||
4096 | # asm 2: pxor RCON,<xmm6=%xmm6 | ||
4097 | pxor RCON,%xmm6 | ||
4098 | |||
4099 | # qhasm: shuffle bytes of xmm0 by EXPB0 | ||
4100 | # asm 1: pshufb EXPB0,<xmm0=int6464#1 | ||
4101 | # asm 2: pshufb EXPB0,<xmm0=%xmm0 | ||
4102 | pshufb EXPB0,%xmm0 | ||
4103 | |||
4104 | # qhasm: shuffle bytes of xmm1 by EXPB0 | ||
4105 | # asm 1: pshufb EXPB0,<xmm1=int6464#2 | ||
4106 | # asm 2: pshufb EXPB0,<xmm1=%xmm1 | ||
4107 | pshufb EXPB0,%xmm1 | ||
4108 | |||
4109 | # qhasm: shuffle bytes of xmm6 by EXPB0 | ||
4110 | # asm 1: pshufb EXPB0,<xmm6=int6464#7 | ||
4111 | # asm 2: pshufb EXPB0,<xmm6=%xmm6 | ||
4112 | pshufb EXPB0,%xmm6 | ||
4113 | |||
4114 | # qhasm: shuffle bytes of xmm4 by EXPB0 | ||
4115 | # asm 1: pshufb EXPB0,<xmm4=int6464#5 | ||
4116 | # asm 2: pshufb EXPB0,<xmm4=%xmm4 | ||
4117 | pshufb EXPB0,%xmm4 | ||
4118 | |||
4119 | # qhasm: shuffle bytes of xmm2 by EXPB0 | ||
4120 | # asm 1: pshufb EXPB0,<xmm2=int6464#3 | ||
4121 | # asm 2: pshufb EXPB0,<xmm2=%xmm2 | ||
4122 | pshufb EXPB0,%xmm2 | ||
4123 | |||
4124 | # qhasm: shuffle bytes of xmm7 by EXPB0 | ||
4125 | # asm 1: pshufb EXPB0,<xmm7=int6464#8 | ||
4126 | # asm 2: pshufb EXPB0,<xmm7=%xmm7 | ||
4127 | pshufb EXPB0,%xmm7 | ||
4128 | |||
4129 | # qhasm: shuffle bytes of xmm3 by EXPB0 | ||
4130 | # asm 1: pshufb EXPB0,<xmm3=int6464#4 | ||
4131 | # asm 2: pshufb EXPB0,<xmm3=%xmm3 | ||
4132 | pshufb EXPB0,%xmm3 | ||
4133 | |||
4134 | # qhasm: shuffle bytes of xmm5 by EXPB0 | ||
4135 | # asm 1: pshufb EXPB0,<xmm5=int6464#6 | ||
4136 | # asm 2: pshufb EXPB0,<xmm5=%xmm5 | ||
4137 | pshufb EXPB0,%xmm5 | ||
4138 | |||
4139 | # qhasm: xmm8 = *(int128 *)(c + 256) | ||
4140 | # asm 1: movdqa 256(<c=int64#1),>xmm8=int6464#9 | ||
4141 | # asm 2: movdqa 256(<c=%rdi),>xmm8=%xmm8 | ||
4142 | movdqa 256(%rdi),%xmm8 | ||
4143 | |||
4144 | # qhasm: xmm9 = *(int128 *)(c + 272) | ||
4145 | # asm 1: movdqa 272(<c=int64#1),>xmm9=int6464#10 | ||
4146 | # asm 2: movdqa 272(<c=%rdi),>xmm9=%xmm9 | ||
4147 | movdqa 272(%rdi),%xmm9 | ||
4148 | |||
4149 | # qhasm: xmm10 = *(int128 *)(c + 288) | ||
4150 | # asm 1: movdqa 288(<c=int64#1),>xmm10=int6464#11 | ||
4151 | # asm 2: movdqa 288(<c=%rdi),>xmm10=%xmm10 | ||
4152 | movdqa 288(%rdi),%xmm10 | ||
4153 | |||
4154 | # qhasm: xmm11 = *(int128 *)(c + 304) | ||
4155 | # asm 1: movdqa 304(<c=int64#1),>xmm11=int6464#12 | ||
4156 | # asm 2: movdqa 304(<c=%rdi),>xmm11=%xmm11 | ||
4157 | movdqa 304(%rdi),%xmm11 | ||
4158 | |||
4159 | # qhasm: xmm12 = *(int128 *)(c + 320) | ||
4160 | # asm 1: movdqa 320(<c=int64#1),>xmm12=int6464#13 | ||
4161 | # asm 2: movdqa 320(<c=%rdi),>xmm12=%xmm12 | ||
4162 | movdqa 320(%rdi),%xmm12 | ||
4163 | |||
4164 | # qhasm: xmm13 = *(int128 *)(c + 336) | ||
4165 | # asm 1: movdqa 336(<c=int64#1),>xmm13=int6464#14 | ||
4166 | # asm 2: movdqa 336(<c=%rdi),>xmm13=%xmm13 | ||
4167 | movdqa 336(%rdi),%xmm13 | ||
4168 | |||
4169 | # qhasm: xmm14 = *(int128 *)(c + 352) | ||
4170 | # asm 1: movdqa 352(<c=int64#1),>xmm14=int6464#15 | ||
4171 | # asm 2: movdqa 352(<c=%rdi),>xmm14=%xmm14 | ||
4172 | movdqa 352(%rdi),%xmm14 | ||
4173 | |||
4174 | # qhasm: xmm15 = *(int128 *)(c + 368) | ||
4175 | # asm 1: movdqa 368(<c=int64#1),>xmm15=int6464#16 | ||
4176 | # asm 2: movdqa 368(<c=%rdi),>xmm15=%xmm15 | ||
4177 | movdqa 368(%rdi),%xmm15 | ||
4178 | |||
4179 | # qhasm: xmm8 ^= ONE | ||
4180 | # asm 1: pxor ONE,<xmm8=int6464#9 | ||
4181 | # asm 2: pxor ONE,<xmm8=%xmm8 | ||
4182 | pxor ONE,%xmm8 | ||
4183 | |||
4184 | # qhasm: xmm9 ^= ONE | ||
4185 | # asm 1: pxor ONE,<xmm9=int6464#10 | ||
4186 | # asm 2: pxor ONE,<xmm9=%xmm9 | ||
4187 | pxor ONE,%xmm9 | ||
4188 | |||
4189 | # qhasm: xmm13 ^= ONE | ||
4190 | # asm 1: pxor ONE,<xmm13=int6464#14 | ||
4191 | # asm 2: pxor ONE,<xmm13=%xmm13 | ||
4192 | pxor ONE,%xmm13 | ||
4193 | |||
4194 | # qhasm: xmm14 ^= ONE | ||
4195 | # asm 1: pxor ONE,<xmm14=int6464#15 | ||
4196 | # asm 2: pxor ONE,<xmm14=%xmm14 | ||
4197 | pxor ONE,%xmm14 | ||
4198 | |||
4199 | # qhasm: xmm0 ^= xmm8 | ||
4200 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
4201 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
4202 | pxor %xmm8,%xmm0 | ||
4203 | |||
4204 | # qhasm: xmm1 ^= xmm9 | ||
4205 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
4206 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
4207 | pxor %xmm9,%xmm1 | ||
4208 | |||
4209 | # qhasm: xmm6 ^= xmm10 | ||
4210 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
4211 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
4212 | pxor %xmm10,%xmm6 | ||
4213 | |||
4214 | # qhasm: xmm4 ^= xmm11 | ||
4215 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
4216 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
4217 | pxor %xmm11,%xmm4 | ||
4218 | |||
4219 | # qhasm: xmm2 ^= xmm12 | ||
4220 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
4221 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
4222 | pxor %xmm12,%xmm2 | ||
4223 | |||
4224 | # qhasm: xmm7 ^= xmm13 | ||
4225 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
4226 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
4227 | pxor %xmm13,%xmm7 | ||
4228 | |||
4229 | # qhasm: xmm3 ^= xmm14 | ||
4230 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
4231 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
4232 | pxor %xmm14,%xmm3 | ||
4233 | |||
4234 | # qhasm: xmm5 ^= xmm15 | ||
4235 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
4236 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
4237 | pxor %xmm15,%xmm5 | ||
4238 | |||
4239 | # qhasm: uint32323232 xmm8 >>= 8 | ||
4240 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
4241 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
4242 | psrld $8,%xmm8 | ||
4243 | |||
4244 | # qhasm: uint32323232 xmm9 >>= 8 | ||
4245 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
4246 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
4247 | psrld $8,%xmm9 | ||
4248 | |||
4249 | # qhasm: uint32323232 xmm10 >>= 8 | ||
4250 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
4251 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
4252 | psrld $8,%xmm10 | ||
4253 | |||
4254 | # qhasm: uint32323232 xmm11 >>= 8 | ||
4255 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
4256 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
4257 | psrld $8,%xmm11 | ||
4258 | |||
4259 | # qhasm: uint32323232 xmm12 >>= 8 | ||
4260 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
4261 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
4262 | psrld $8,%xmm12 | ||
4263 | |||
4264 | # qhasm: uint32323232 xmm13 >>= 8 | ||
4265 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
4266 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
4267 | psrld $8,%xmm13 | ||
4268 | |||
4269 | # qhasm: uint32323232 xmm14 >>= 8 | ||
4270 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
4271 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
4272 | psrld $8,%xmm14 | ||
4273 | |||
4274 | # qhasm: uint32323232 xmm15 >>= 8 | ||
4275 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
4276 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
4277 | psrld $8,%xmm15 | ||
4278 | |||
4279 | # qhasm: xmm0 ^= xmm8 | ||
4280 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
4281 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
4282 | pxor %xmm8,%xmm0 | ||
4283 | |||
4284 | # qhasm: xmm1 ^= xmm9 | ||
4285 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
4286 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
4287 | pxor %xmm9,%xmm1 | ||
4288 | |||
4289 | # qhasm: xmm6 ^= xmm10 | ||
4290 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
4291 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
4292 | pxor %xmm10,%xmm6 | ||
4293 | |||
4294 | # qhasm: xmm4 ^= xmm11 | ||
4295 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
4296 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
4297 | pxor %xmm11,%xmm4 | ||
4298 | |||
4299 | # qhasm: xmm2 ^= xmm12 | ||
4300 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
4301 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
4302 | pxor %xmm12,%xmm2 | ||
4303 | |||
4304 | # qhasm: xmm7 ^= xmm13 | ||
4305 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
4306 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
4307 | pxor %xmm13,%xmm7 | ||
4308 | |||
4309 | # qhasm: xmm3 ^= xmm14 | ||
4310 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
4311 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
4312 | pxor %xmm14,%xmm3 | ||
4313 | |||
4314 | # qhasm: xmm5 ^= xmm15 | ||
4315 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
4316 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
4317 | pxor %xmm15,%xmm5 | ||
4318 | |||
4319 | # qhasm: uint32323232 xmm8 >>= 8 | ||
4320 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
4321 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
4322 | psrld $8,%xmm8 | ||
4323 | |||
4324 | # qhasm: uint32323232 xmm9 >>= 8 | ||
4325 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
4326 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
4327 | psrld $8,%xmm9 | ||
4328 | |||
4329 | # qhasm: uint32323232 xmm10 >>= 8 | ||
4330 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
4331 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
4332 | psrld $8,%xmm10 | ||
4333 | |||
4334 | # qhasm: uint32323232 xmm11 >>= 8 | ||
4335 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
4336 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
4337 | psrld $8,%xmm11 | ||
4338 | |||
4339 | # qhasm: uint32323232 xmm12 >>= 8 | ||
4340 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
4341 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
4342 | psrld $8,%xmm12 | ||
4343 | |||
4344 | # qhasm: uint32323232 xmm13 >>= 8 | ||
4345 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
4346 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
4347 | psrld $8,%xmm13 | ||
4348 | |||
4349 | # qhasm: uint32323232 xmm14 >>= 8 | ||
4350 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
4351 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
4352 | psrld $8,%xmm14 | ||
4353 | |||
4354 | # qhasm: uint32323232 xmm15 >>= 8 | ||
4355 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
4356 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
4357 | psrld $8,%xmm15 | ||
4358 | |||
4359 | # qhasm: xmm0 ^= xmm8 | ||
4360 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
4361 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
4362 | pxor %xmm8,%xmm0 | ||
4363 | |||
4364 | # qhasm: xmm1 ^= xmm9 | ||
4365 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
4366 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
4367 | pxor %xmm9,%xmm1 | ||
4368 | |||
4369 | # qhasm: xmm6 ^= xmm10 | ||
4370 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
4371 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
4372 | pxor %xmm10,%xmm6 | ||
4373 | |||
4374 | # qhasm: xmm4 ^= xmm11 | ||
4375 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
4376 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
4377 | pxor %xmm11,%xmm4 | ||
4378 | |||
4379 | # qhasm: xmm2 ^= xmm12 | ||
4380 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
4381 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
4382 | pxor %xmm12,%xmm2 | ||
4383 | |||
4384 | # qhasm: xmm7 ^= xmm13 | ||
4385 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
4386 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
4387 | pxor %xmm13,%xmm7 | ||
4388 | |||
4389 | # qhasm: xmm3 ^= xmm14 | ||
4390 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
4391 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
4392 | pxor %xmm14,%xmm3 | ||
4393 | |||
4394 | # qhasm: xmm5 ^= xmm15 | ||
4395 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
4396 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
4397 | pxor %xmm15,%xmm5 | ||
4398 | |||
4399 | # qhasm: uint32323232 xmm8 >>= 8 | ||
4400 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
4401 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
4402 | psrld $8,%xmm8 | ||
4403 | |||
4404 | # qhasm: uint32323232 xmm9 >>= 8 | ||
4405 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
4406 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
4407 | psrld $8,%xmm9 | ||
4408 | |||
4409 | # qhasm: uint32323232 xmm10 >>= 8 | ||
4410 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
4411 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
4412 | psrld $8,%xmm10 | ||
4413 | |||
4414 | # qhasm: uint32323232 xmm11 >>= 8 | ||
4415 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
4416 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
4417 | psrld $8,%xmm11 | ||
4418 | |||
4419 | # qhasm: uint32323232 xmm12 >>= 8 | ||
4420 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
4421 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
4422 | psrld $8,%xmm12 | ||
4423 | |||
4424 | # qhasm: uint32323232 xmm13 >>= 8 | ||
4425 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
4426 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
4427 | psrld $8,%xmm13 | ||
4428 | |||
4429 | # qhasm: uint32323232 xmm14 >>= 8 | ||
4430 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
4431 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
4432 | psrld $8,%xmm14 | ||
4433 | |||
4434 | # qhasm: uint32323232 xmm15 >>= 8 | ||
4435 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
4436 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
4437 | psrld $8,%xmm15 | ||
4438 | |||
4439 | # qhasm: xmm0 ^= xmm8 | ||
4440 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
4441 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
4442 | pxor %xmm8,%xmm0 | ||
4443 | |||
4444 | # qhasm: xmm1 ^= xmm9 | ||
4445 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
4446 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
4447 | pxor %xmm9,%xmm1 | ||
4448 | |||
4449 | # qhasm: xmm6 ^= xmm10 | ||
4450 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
4451 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
4452 | pxor %xmm10,%xmm6 | ||
4453 | |||
4454 | # qhasm: xmm4 ^= xmm11 | ||
4455 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
4456 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
4457 | pxor %xmm11,%xmm4 | ||
4458 | |||
4459 | # qhasm: xmm2 ^= xmm12 | ||
4460 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
4461 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
4462 | pxor %xmm12,%xmm2 | ||
4463 | |||
4464 | # qhasm: xmm7 ^= xmm13 | ||
4465 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
4466 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
4467 | pxor %xmm13,%xmm7 | ||
4468 | |||
4469 | # qhasm: xmm3 ^= xmm14 | ||
4470 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
4471 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
4472 | pxor %xmm14,%xmm3 | ||
4473 | |||
4474 | # qhasm: xmm5 ^= xmm15 | ||
4475 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
4476 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
4477 | pxor %xmm15,%xmm5 | ||
4478 | |||
4479 | # qhasm: *(int128 *)(c + 384) = xmm0 | ||
4480 | # asm 1: movdqa <xmm0=int6464#1,384(<c=int64#1) | ||
4481 | # asm 2: movdqa <xmm0=%xmm0,384(<c=%rdi) | ||
4482 | movdqa %xmm0,384(%rdi) | ||
4483 | |||
4484 | # qhasm: *(int128 *)(c + 400) = xmm1 | ||
4485 | # asm 1: movdqa <xmm1=int6464#2,400(<c=int64#1) | ||
4486 | # asm 2: movdqa <xmm1=%xmm1,400(<c=%rdi) | ||
4487 | movdqa %xmm1,400(%rdi) | ||
4488 | |||
4489 | # qhasm: *(int128 *)(c + 416) = xmm6 | ||
4490 | # asm 1: movdqa <xmm6=int6464#7,416(<c=int64#1) | ||
4491 | # asm 2: movdqa <xmm6=%xmm6,416(<c=%rdi) | ||
4492 | movdqa %xmm6,416(%rdi) | ||
4493 | |||
4494 | # qhasm: *(int128 *)(c + 432) = xmm4 | ||
4495 | # asm 1: movdqa <xmm4=int6464#5,432(<c=int64#1) | ||
4496 | # asm 2: movdqa <xmm4=%xmm4,432(<c=%rdi) | ||
4497 | movdqa %xmm4,432(%rdi) | ||
4498 | |||
4499 | # qhasm: *(int128 *)(c + 448) = xmm2 | ||
4500 | # asm 1: movdqa <xmm2=int6464#3,448(<c=int64#1) | ||
4501 | # asm 2: movdqa <xmm2=%xmm2,448(<c=%rdi) | ||
4502 | movdqa %xmm2,448(%rdi) | ||
4503 | |||
4504 | # qhasm: *(int128 *)(c + 464) = xmm7 | ||
4505 | # asm 1: movdqa <xmm7=int6464#8,464(<c=int64#1) | ||
4506 | # asm 2: movdqa <xmm7=%xmm7,464(<c=%rdi) | ||
4507 | movdqa %xmm7,464(%rdi) | ||
4508 | |||
4509 | # qhasm: *(int128 *)(c + 480) = xmm3 | ||
4510 | # asm 1: movdqa <xmm3=int6464#4,480(<c=int64#1) | ||
4511 | # asm 2: movdqa <xmm3=%xmm3,480(<c=%rdi) | ||
4512 | movdqa %xmm3,480(%rdi) | ||
4513 | |||
4514 | # qhasm: *(int128 *)(c + 496) = xmm5 | ||
4515 | # asm 1: movdqa <xmm5=int6464#6,496(<c=int64#1) | ||
4516 | # asm 2: movdqa <xmm5=%xmm5,496(<c=%rdi) | ||
4517 | movdqa %xmm5,496(%rdi) | ||
4518 | |||
4519 | # qhasm: xmm0 ^= ONE | ||
4520 | # asm 1: pxor ONE,<xmm0=int6464#1 | ||
4521 | # asm 2: pxor ONE,<xmm0=%xmm0 | ||
4522 | pxor ONE,%xmm0 | ||
4523 | |||
4524 | # qhasm: xmm1 ^= ONE | ||
4525 | # asm 1: pxor ONE,<xmm1=int6464#2 | ||
4526 | # asm 2: pxor ONE,<xmm1=%xmm1 | ||
4527 | pxor ONE,%xmm1 | ||
4528 | |||
4529 | # qhasm: xmm7 ^= ONE | ||
4530 | # asm 1: pxor ONE,<xmm7=int6464#8 | ||
4531 | # asm 2: pxor ONE,<xmm7=%xmm7 | ||
4532 | pxor ONE,%xmm7 | ||
4533 | |||
4534 | # qhasm: xmm3 ^= ONE | ||
4535 | # asm 1: pxor ONE,<xmm3=int6464#4 | ||
4536 | # asm 2: pxor ONE,<xmm3=%xmm3 | ||
4537 | pxor ONE,%xmm3 | ||
4538 | |||
4539 | # qhasm: shuffle bytes of xmm0 by ROTB | ||
4540 | # asm 1: pshufb ROTB,<xmm0=int6464#1 | ||
4541 | # asm 2: pshufb ROTB,<xmm0=%xmm0 | ||
4542 | pshufb ROTB,%xmm0 | ||
4543 | |||
4544 | # qhasm: shuffle bytes of xmm1 by ROTB | ||
4545 | # asm 1: pshufb ROTB,<xmm1=int6464#2 | ||
4546 | # asm 2: pshufb ROTB,<xmm1=%xmm1 | ||
4547 | pshufb ROTB,%xmm1 | ||
4548 | |||
4549 | # qhasm: shuffle bytes of xmm6 by ROTB | ||
4550 | # asm 1: pshufb ROTB,<xmm6=int6464#7 | ||
4551 | # asm 2: pshufb ROTB,<xmm6=%xmm6 | ||
4552 | pshufb ROTB,%xmm6 | ||
4553 | |||
4554 | # qhasm: shuffle bytes of xmm4 by ROTB | ||
4555 | # asm 1: pshufb ROTB,<xmm4=int6464#5 | ||
4556 | # asm 2: pshufb ROTB,<xmm4=%xmm4 | ||
4557 | pshufb ROTB,%xmm4 | ||
4558 | |||
4559 | # qhasm: shuffle bytes of xmm2 by ROTB | ||
4560 | # asm 1: pshufb ROTB,<xmm2=int6464#3 | ||
4561 | # asm 2: pshufb ROTB,<xmm2=%xmm2 | ||
4562 | pshufb ROTB,%xmm2 | ||
4563 | |||
4564 | # qhasm: shuffle bytes of xmm7 by ROTB | ||
4565 | # asm 1: pshufb ROTB,<xmm7=int6464#8 | ||
4566 | # asm 2: pshufb ROTB,<xmm7=%xmm7 | ||
4567 | pshufb ROTB,%xmm7 | ||
4568 | |||
4569 | # qhasm: shuffle bytes of xmm3 by ROTB | ||
4570 | # asm 1: pshufb ROTB,<xmm3=int6464#4 | ||
4571 | # asm 2: pshufb ROTB,<xmm3=%xmm3 | ||
4572 | pshufb ROTB,%xmm3 | ||
4573 | |||
4574 | # qhasm: shuffle bytes of xmm5 by ROTB | ||
4575 | # asm 1: pshufb ROTB,<xmm5=int6464#6 | ||
4576 | # asm 2: pshufb ROTB,<xmm5=%xmm5 | ||
4577 | pshufb ROTB,%xmm5 | ||
4578 | |||
4579 | # qhasm: xmm7 ^= xmm3 | ||
4580 | # asm 1: pxor <xmm3=int6464#4,<xmm7=int6464#8 | ||
4581 | # asm 2: pxor <xmm3=%xmm3,<xmm7=%xmm7 | ||
4582 | pxor %xmm3,%xmm7 | ||
4583 | |||
4584 | # qhasm: xmm6 ^= xmm1 | ||
4585 | # asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7 | ||
4586 | # asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6 | ||
4587 | pxor %xmm1,%xmm6 | ||
4588 | |||
4589 | # qhasm: xmm7 ^= xmm0 | ||
4590 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
4591 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
4592 | pxor %xmm0,%xmm7 | ||
4593 | |||
4594 | # qhasm: xmm3 ^= xmm6 | ||
4595 | # asm 1: pxor <xmm6=int6464#7,<xmm3=int6464#4 | ||
4596 | # asm 2: pxor <xmm6=%xmm6,<xmm3=%xmm3 | ||
4597 | pxor %xmm6,%xmm3 | ||
4598 | |||
4599 | # qhasm: xmm4 ^= xmm0 | ||
4600 | # asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5 | ||
4601 | # asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4 | ||
4602 | pxor %xmm0,%xmm4 | ||
4603 | |||
4604 | # qhasm: xmm3 ^= xmm4 | ||
4605 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
4606 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
4607 | pxor %xmm4,%xmm3 | ||
4608 | |||
4609 | # qhasm: xmm4 ^= xmm5 | ||
4610 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
4611 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
4612 | pxor %xmm5,%xmm4 | ||
4613 | |||
4614 | # qhasm: xmm4 ^= xmm2 | ||
4615 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
4616 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
4617 | pxor %xmm2,%xmm4 | ||
4618 | |||
4619 | # qhasm: xmm5 ^= xmm7 | ||
4620 | # asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6 | ||
4621 | # asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5 | ||
4622 | pxor %xmm7,%xmm5 | ||
4623 | |||
4624 | # qhasm: xmm4 ^= xmm1 | ||
4625 | # asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5 | ||
4626 | # asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4 | ||
4627 | pxor %xmm1,%xmm4 | ||
4628 | |||
4629 | # qhasm: xmm2 ^= xmm7 | ||
4630 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
4631 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
4632 | pxor %xmm7,%xmm2 | ||
4633 | |||
4634 | # qhasm: xmm6 ^= xmm5 | ||
4635 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
4636 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
4637 | pxor %xmm5,%xmm6 | ||
4638 | |||
4639 | # qhasm: xmm1 ^= xmm7 | ||
4640 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2 | ||
4641 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1 | ||
4642 | pxor %xmm7,%xmm1 | ||
4643 | |||
4644 | # qhasm: xmm11 = xmm5 | ||
4645 | # asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9 | ||
4646 | # asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8 | ||
4647 | movdqa %xmm5,%xmm8 | ||
4648 | |||
4649 | # qhasm: xmm10 = xmm1 | ||
4650 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
4651 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
4652 | movdqa %xmm1,%xmm9 | ||
4653 | |||
4654 | # qhasm: xmm9 = xmm7 | ||
4655 | # asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11 | ||
4656 | # asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10 | ||
4657 | movdqa %xmm7,%xmm10 | ||
4658 | |||
4659 | # qhasm: xmm13 = xmm6 | ||
4660 | # asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12 | ||
4661 | # asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11 | ||
4662 | movdqa %xmm6,%xmm11 | ||
4663 | |||
4664 | # qhasm: xmm12 = xmm3 | ||
4665 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#13 | ||
4666 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm12 | ||
4667 | movdqa %xmm3,%xmm12 | ||
4668 | |||
4669 | # qhasm: xmm11 ^= xmm2 | ||
4670 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#9 | ||
4671 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm8 | ||
4672 | pxor %xmm2,%xmm8 | ||
4673 | |||
4674 | # qhasm: xmm10 ^= xmm6 | ||
4675 | # asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#10 | ||
4676 | # asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm9 | ||
4677 | pxor %xmm6,%xmm9 | ||
4678 | |||
4679 | # qhasm: xmm9 ^= xmm4 | ||
4680 | # asm 1: pxor <xmm4=int6464#5,<xmm9=int6464#11 | ||
4681 | # asm 2: pxor <xmm4=%xmm4,<xmm9=%xmm10 | ||
4682 | pxor %xmm4,%xmm10 | ||
4683 | |||
4684 | # qhasm: xmm13 ^= xmm2 | ||
4685 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#12 | ||
4686 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm11 | ||
4687 | pxor %xmm2,%xmm11 | ||
4688 | |||
4689 | # qhasm: xmm12 ^= xmm0 | ||
4690 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
4691 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
4692 | pxor %xmm0,%xmm12 | ||
4693 | |||
4694 | # qhasm: xmm14 = xmm11 | ||
4695 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
4696 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
4697 | movdqa %xmm8,%xmm13 | ||
4698 | |||
4699 | # qhasm: xmm8 = xmm10 | ||
4700 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
4701 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
4702 | movdqa %xmm9,%xmm14 | ||
4703 | |||
4704 | # qhasm: xmm15 = xmm11 | ||
4705 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
4706 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
4707 | movdqa %xmm8,%xmm15 | ||
4708 | |||
4709 | # qhasm: xmm10 |= xmm9 | ||
4710 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
4711 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
4712 | por %xmm10,%xmm9 | ||
4713 | |||
4714 | # qhasm: xmm11 |= xmm12 | ||
4715 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
4716 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
4717 | por %xmm12,%xmm8 | ||
4718 | |||
4719 | # qhasm: xmm15 ^= xmm8 | ||
4720 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
4721 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
4722 | pxor %xmm14,%xmm15 | ||
4723 | |||
4724 | # qhasm: xmm14 &= xmm12 | ||
4725 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
4726 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
4727 | pand %xmm12,%xmm13 | ||
4728 | |||
4729 | # qhasm: xmm8 &= xmm9 | ||
4730 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
4731 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
4732 | pand %xmm10,%xmm14 | ||
4733 | |||
4734 | # qhasm: xmm12 ^= xmm9 | ||
4735 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
4736 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
4737 | pxor %xmm10,%xmm12 | ||
4738 | |||
4739 | # qhasm: xmm15 &= xmm12 | ||
4740 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
4741 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
4742 | pand %xmm12,%xmm15 | ||
4743 | |||
4744 | # qhasm: xmm12 = xmm4 | ||
4745 | # asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11 | ||
4746 | # asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10 | ||
4747 | movdqa %xmm4,%xmm10 | ||
4748 | |||
4749 | # qhasm: xmm12 ^= xmm0 | ||
4750 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
4751 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
4752 | pxor %xmm0,%xmm10 | ||
4753 | |||
4754 | # qhasm: xmm13 &= xmm12 | ||
4755 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
4756 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
4757 | pand %xmm10,%xmm11 | ||
4758 | |||
4759 | # qhasm: xmm11 ^= xmm13 | ||
4760 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
4761 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
4762 | pxor %xmm11,%xmm8 | ||
4763 | |||
4764 | # qhasm: xmm10 ^= xmm13 | ||
4765 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
4766 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
4767 | pxor %xmm11,%xmm9 | ||
4768 | |||
4769 | # qhasm: xmm13 = xmm5 | ||
4770 | # asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11 | ||
4771 | # asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10 | ||
4772 | movdqa %xmm5,%xmm10 | ||
4773 | |||
4774 | # qhasm: xmm13 ^= xmm1 | ||
4775 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
4776 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
4777 | pxor %xmm1,%xmm10 | ||
4778 | |||
4779 | # qhasm: xmm12 = xmm7 | ||
4780 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12 | ||
4781 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11 | ||
4782 | movdqa %xmm7,%xmm11 | ||
4783 | |||
4784 | # qhasm: xmm9 = xmm13 | ||
4785 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
4786 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
4787 | movdqa %xmm10,%xmm12 | ||
4788 | |||
4789 | # qhasm: xmm12 ^= xmm3 | ||
4790 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#12 | ||
4791 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm11 | ||
4792 | pxor %xmm3,%xmm11 | ||
4793 | |||
4794 | # qhasm: xmm9 |= xmm12 | ||
4795 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
4796 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
4797 | por %xmm11,%xmm12 | ||
4798 | |||
4799 | # qhasm: xmm13 &= xmm12 | ||
4800 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
4801 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
4802 | pand %xmm11,%xmm10 | ||
4803 | |||
4804 | # qhasm: xmm8 ^= xmm13 | ||
4805 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
4806 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
4807 | pxor %xmm10,%xmm14 | ||
4808 | |||
4809 | # qhasm: xmm11 ^= xmm15 | ||
4810 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
4811 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
4812 | pxor %xmm15,%xmm8 | ||
4813 | |||
4814 | # qhasm: xmm10 ^= xmm14 | ||
4815 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
4816 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
4817 | pxor %xmm13,%xmm9 | ||
4818 | |||
4819 | # qhasm: xmm9 ^= xmm15 | ||
4820 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
4821 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
4822 | pxor %xmm15,%xmm12 | ||
4823 | |||
4824 | # qhasm: xmm8 ^= xmm14 | ||
4825 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
4826 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
4827 | pxor %xmm13,%xmm14 | ||
4828 | |||
4829 | # qhasm: xmm9 ^= xmm14 | ||
4830 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
4831 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
4832 | pxor %xmm13,%xmm12 | ||
4833 | |||
4834 | # qhasm: xmm12 = xmm6 | ||
4835 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11 | ||
4836 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10 | ||
4837 | movdqa %xmm6,%xmm10 | ||
4838 | |||
4839 | # qhasm: xmm13 = xmm2 | ||
4840 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
4841 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
4842 | movdqa %xmm2,%xmm11 | ||
4843 | |||
4844 | # qhasm: xmm14 = xmm1 | ||
4845 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
4846 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
4847 | movdqa %xmm1,%xmm13 | ||
4848 | |||
4849 | # qhasm: xmm15 = xmm5 | ||
4850 | # asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16 | ||
4851 | # asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15 | ||
4852 | movdqa %xmm5,%xmm15 | ||
4853 | |||
4854 | # qhasm: xmm12 &= xmm4 | ||
4855 | # asm 1: pand <xmm4=int6464#5,<xmm12=int6464#11 | ||
4856 | # asm 2: pand <xmm4=%xmm4,<xmm12=%xmm10 | ||
4857 | pand %xmm4,%xmm10 | ||
4858 | |||
4859 | # qhasm: xmm13 &= xmm0 | ||
4860 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
4861 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
4862 | pand %xmm0,%xmm11 | ||
4863 | |||
4864 | # qhasm: xmm14 &= xmm7 | ||
4865 | # asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14 | ||
4866 | # asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13 | ||
4867 | pand %xmm7,%xmm13 | ||
4868 | |||
4869 | # qhasm: xmm15 |= xmm3 | ||
4870 | # asm 1: por <xmm3=int6464#4,<xmm15=int6464#16 | ||
4871 | # asm 2: por <xmm3=%xmm3,<xmm15=%xmm15 | ||
4872 | por %xmm3,%xmm15 | ||
4873 | |||
4874 | # qhasm: xmm11 ^= xmm12 | ||
4875 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
4876 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
4877 | pxor %xmm10,%xmm8 | ||
4878 | |||
4879 | # qhasm: xmm10 ^= xmm13 | ||
4880 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
4881 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
4882 | pxor %xmm11,%xmm9 | ||
4883 | |||
4884 | # qhasm: xmm9 ^= xmm14 | ||
4885 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
4886 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
4887 | pxor %xmm13,%xmm12 | ||
4888 | |||
4889 | # qhasm: xmm8 ^= xmm15 | ||
4890 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
4891 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
4892 | pxor %xmm15,%xmm14 | ||
4893 | |||
4894 | # qhasm: xmm12 = xmm11 | ||
4895 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
4896 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
4897 | movdqa %xmm8,%xmm10 | ||
4898 | |||
4899 | # qhasm: xmm12 ^= xmm10 | ||
4900 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
4901 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
4902 | pxor %xmm9,%xmm10 | ||
4903 | |||
4904 | # qhasm: xmm11 &= xmm9 | ||
4905 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
4906 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
4907 | pand %xmm12,%xmm8 | ||
4908 | |||
4909 | # qhasm: xmm14 = xmm8 | ||
4910 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
4911 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
4912 | movdqa %xmm14,%xmm11 | ||
4913 | |||
4914 | # qhasm: xmm14 ^= xmm11 | ||
4915 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
4916 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
4917 | pxor %xmm8,%xmm11 | ||
4918 | |||
4919 | # qhasm: xmm15 = xmm12 | ||
4920 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
4921 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
4922 | movdqa %xmm10,%xmm13 | ||
4923 | |||
4924 | # qhasm: xmm15 &= xmm14 | ||
4925 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
4926 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
4927 | pand %xmm11,%xmm13 | ||
4928 | |||
4929 | # qhasm: xmm15 ^= xmm10 | ||
4930 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
4931 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
4932 | pxor %xmm9,%xmm13 | ||
4933 | |||
4934 | # qhasm: xmm13 = xmm9 | ||
4935 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
4936 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
4937 | movdqa %xmm12,%xmm15 | ||
4938 | |||
4939 | # qhasm: xmm13 ^= xmm8 | ||
4940 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
4941 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
4942 | pxor %xmm14,%xmm15 | ||
4943 | |||
4944 | # qhasm: xmm11 ^= xmm10 | ||
4945 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
4946 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
4947 | pxor %xmm9,%xmm8 | ||
4948 | |||
4949 | # qhasm: xmm13 &= xmm11 | ||
4950 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
4951 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
4952 | pand %xmm8,%xmm15 | ||
4953 | |||
4954 | # qhasm: xmm13 ^= xmm8 | ||
4955 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
4956 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
4957 | pxor %xmm14,%xmm15 | ||
4958 | |||
4959 | # qhasm: xmm9 ^= xmm13 | ||
4960 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
4961 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
4962 | pxor %xmm15,%xmm12 | ||
4963 | |||
4964 | # qhasm: xmm10 = xmm14 | ||
4965 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
4966 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
4967 | movdqa %xmm11,%xmm8 | ||
4968 | |||
4969 | # qhasm: xmm10 ^= xmm13 | ||
4970 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
4971 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
4972 | pxor %xmm15,%xmm8 | ||
4973 | |||
4974 | # qhasm: xmm10 &= xmm8 | ||
4975 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
4976 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
4977 | pand %xmm14,%xmm8 | ||
4978 | |||
4979 | # qhasm: xmm9 ^= xmm10 | ||
4980 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
4981 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
4982 | pxor %xmm8,%xmm12 | ||
4983 | |||
4984 | # qhasm: xmm14 ^= xmm10 | ||
4985 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
4986 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
4987 | pxor %xmm8,%xmm11 | ||
4988 | |||
4989 | # qhasm: xmm14 &= xmm15 | ||
4990 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
4991 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
4992 | pand %xmm13,%xmm11 | ||
4993 | |||
4994 | # qhasm: xmm14 ^= xmm12 | ||
4995 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
4996 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
4997 | pxor %xmm10,%xmm11 | ||
4998 | |||
4999 | # qhasm: xmm12 = xmm3 | ||
5000 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#9 | ||
5001 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm8 | ||
5002 | movdqa %xmm3,%xmm8 | ||
5003 | |||
5004 | # qhasm: xmm8 = xmm7 | ||
5005 | # asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10 | ||
5006 | # asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9 | ||
5007 | movdqa %xmm7,%xmm9 | ||
5008 | |||
5009 | # qhasm: xmm10 = xmm15 | ||
5010 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
5011 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
5012 | movdqa %xmm13,%xmm10 | ||
5013 | |||
5014 | # qhasm: xmm10 ^= xmm14 | ||
5015 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
5016 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
5017 | pxor %xmm11,%xmm10 | ||
5018 | |||
5019 | # qhasm: xmm10 &= xmm3 | ||
5020 | # asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11 | ||
5021 | # asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10 | ||
5022 | pand %xmm3,%xmm10 | ||
5023 | |||
5024 | # qhasm: xmm3 ^= xmm7 | ||
5025 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
5026 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
5027 | pxor %xmm7,%xmm3 | ||
5028 | |||
5029 | # qhasm: xmm3 &= xmm14 | ||
5030 | # asm 1: pand <xmm14=int6464#12,<xmm3=int6464#4 | ||
5031 | # asm 2: pand <xmm14=%xmm11,<xmm3=%xmm3 | ||
5032 | pand %xmm11,%xmm3 | ||
5033 | |||
5034 | # qhasm: xmm7 &= xmm15 | ||
5035 | # asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8 | ||
5036 | # asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7 | ||
5037 | pand %xmm13,%xmm7 | ||
5038 | |||
5039 | # qhasm: xmm3 ^= xmm7 | ||
5040 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
5041 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
5042 | pxor %xmm7,%xmm3 | ||
5043 | |||
5044 | # qhasm: xmm7 ^= xmm10 | ||
5045 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
5046 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
5047 | pxor %xmm10,%xmm7 | ||
5048 | |||
5049 | # qhasm: xmm12 ^= xmm0 | ||
5050 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
5051 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
5052 | pxor %xmm0,%xmm8 | ||
5053 | |||
5054 | # qhasm: xmm8 ^= xmm4 | ||
5055 | # asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10 | ||
5056 | # asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9 | ||
5057 | pxor %xmm4,%xmm9 | ||
5058 | |||
5059 | # qhasm: xmm15 ^= xmm13 | ||
5060 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
5061 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
5062 | pxor %xmm15,%xmm13 | ||
5063 | |||
5064 | # qhasm: xmm14 ^= xmm9 | ||
5065 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
5066 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
5067 | pxor %xmm12,%xmm11 | ||
5068 | |||
5069 | # qhasm: xmm11 = xmm15 | ||
5070 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
5071 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
5072 | movdqa %xmm13,%xmm10 | ||
5073 | |||
5074 | # qhasm: xmm11 ^= xmm14 | ||
5075 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
5076 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
5077 | pxor %xmm11,%xmm10 | ||
5078 | |||
5079 | # qhasm: xmm11 &= xmm12 | ||
5080 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
5081 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
5082 | pand %xmm8,%xmm10 | ||
5083 | |||
5084 | # qhasm: xmm12 ^= xmm8 | ||
5085 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
5086 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
5087 | pxor %xmm9,%xmm8 | ||
5088 | |||
5089 | # qhasm: xmm12 &= xmm14 | ||
5090 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
5091 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
5092 | pand %xmm11,%xmm8 | ||
5093 | |||
5094 | # qhasm: xmm8 &= xmm15 | ||
5095 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
5096 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
5097 | pand %xmm13,%xmm9 | ||
5098 | |||
5099 | # qhasm: xmm8 ^= xmm12 | ||
5100 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
5101 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
5102 | pxor %xmm8,%xmm9 | ||
5103 | |||
5104 | # qhasm: xmm12 ^= xmm11 | ||
5105 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
5106 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
5107 | pxor %xmm10,%xmm8 | ||
5108 | |||
5109 | # qhasm: xmm10 = xmm13 | ||
5110 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
5111 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
5112 | movdqa %xmm15,%xmm10 | ||
5113 | |||
5114 | # qhasm: xmm10 ^= xmm9 | ||
5115 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
5116 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
5117 | pxor %xmm12,%xmm10 | ||
5118 | |||
5119 | # qhasm: xmm10 &= xmm0 | ||
5120 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
5121 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
5122 | pand %xmm0,%xmm10 | ||
5123 | |||
5124 | # qhasm: xmm0 ^= xmm4 | ||
5125 | # asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1 | ||
5126 | # asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0 | ||
5127 | pxor %xmm4,%xmm0 | ||
5128 | |||
5129 | # qhasm: xmm0 &= xmm9 | ||
5130 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
5131 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
5132 | pand %xmm12,%xmm0 | ||
5133 | |||
5134 | # qhasm: xmm4 &= xmm13 | ||
5135 | # asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5 | ||
5136 | # asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4 | ||
5137 | pand %xmm15,%xmm4 | ||
5138 | |||
5139 | # qhasm: xmm0 ^= xmm4 | ||
5140 | # asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1 | ||
5141 | # asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0 | ||
5142 | pxor %xmm4,%xmm0 | ||
5143 | |||
5144 | # qhasm: xmm4 ^= xmm10 | ||
5145 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
5146 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
5147 | pxor %xmm10,%xmm4 | ||
5148 | |||
5149 | # qhasm: xmm3 ^= xmm12 | ||
5150 | # asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4 | ||
5151 | # asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3 | ||
5152 | pxor %xmm8,%xmm3 | ||
5153 | |||
5154 | # qhasm: xmm0 ^= xmm12 | ||
5155 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
5156 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
5157 | pxor %xmm8,%xmm0 | ||
5158 | |||
5159 | # qhasm: xmm7 ^= xmm8 | ||
5160 | # asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8 | ||
5161 | # asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7 | ||
5162 | pxor %xmm9,%xmm7 | ||
5163 | |||
5164 | # qhasm: xmm4 ^= xmm8 | ||
5165 | # asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5 | ||
5166 | # asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4 | ||
5167 | pxor %xmm9,%xmm4 | ||
5168 | |||
5169 | # qhasm: xmm12 = xmm5 | ||
5170 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9 | ||
5171 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8 | ||
5172 | movdqa %xmm5,%xmm8 | ||
5173 | |||
5174 | # qhasm: xmm8 = xmm1 | ||
5175 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
5176 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
5177 | movdqa %xmm1,%xmm9 | ||
5178 | |||
5179 | # qhasm: xmm12 ^= xmm2 | ||
5180 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#9 | ||
5181 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm8 | ||
5182 | pxor %xmm2,%xmm8 | ||
5183 | |||
5184 | # qhasm: xmm8 ^= xmm6 | ||
5185 | # asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10 | ||
5186 | # asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9 | ||
5187 | pxor %xmm6,%xmm9 | ||
5188 | |||
5189 | # qhasm: xmm11 = xmm15 | ||
5190 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
5191 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
5192 | movdqa %xmm13,%xmm10 | ||
5193 | |||
5194 | # qhasm: xmm11 ^= xmm14 | ||
5195 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
5196 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
5197 | pxor %xmm11,%xmm10 | ||
5198 | |||
5199 | # qhasm: xmm11 &= xmm12 | ||
5200 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
5201 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
5202 | pand %xmm8,%xmm10 | ||
5203 | |||
5204 | # qhasm: xmm12 ^= xmm8 | ||
5205 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
5206 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
5207 | pxor %xmm9,%xmm8 | ||
5208 | |||
5209 | # qhasm: xmm12 &= xmm14 | ||
5210 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
5211 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
5212 | pand %xmm11,%xmm8 | ||
5213 | |||
5214 | # qhasm: xmm8 &= xmm15 | ||
5215 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
5216 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
5217 | pand %xmm13,%xmm9 | ||
5218 | |||
5219 | # qhasm: xmm8 ^= xmm12 | ||
5220 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
5221 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
5222 | pxor %xmm8,%xmm9 | ||
5223 | |||
5224 | # qhasm: xmm12 ^= xmm11 | ||
5225 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
5226 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
5227 | pxor %xmm10,%xmm8 | ||
5228 | |||
5229 | # qhasm: xmm10 = xmm13 | ||
5230 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
5231 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
5232 | movdqa %xmm15,%xmm10 | ||
5233 | |||
5234 | # qhasm: xmm10 ^= xmm9 | ||
5235 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
5236 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
5237 | pxor %xmm12,%xmm10 | ||
5238 | |||
5239 | # qhasm: xmm10 &= xmm2 | ||
5240 | # asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11 | ||
5241 | # asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10 | ||
5242 | pand %xmm2,%xmm10 | ||
5243 | |||
5244 | # qhasm: xmm2 ^= xmm6 | ||
5245 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
5246 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
5247 | pxor %xmm6,%xmm2 | ||
5248 | |||
5249 | # qhasm: xmm2 &= xmm9 | ||
5250 | # asm 1: pand <xmm9=int6464#13,<xmm2=int6464#3 | ||
5251 | # asm 2: pand <xmm9=%xmm12,<xmm2=%xmm2 | ||
5252 | pand %xmm12,%xmm2 | ||
5253 | |||
5254 | # qhasm: xmm6 &= xmm13 | ||
5255 | # asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7 | ||
5256 | # asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6 | ||
5257 | pand %xmm15,%xmm6 | ||
5258 | |||
5259 | # qhasm: xmm2 ^= xmm6 | ||
5260 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
5261 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
5262 | pxor %xmm6,%xmm2 | ||
5263 | |||
5264 | # qhasm: xmm6 ^= xmm10 | ||
5265 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
5266 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
5267 | pxor %xmm10,%xmm6 | ||
5268 | |||
5269 | # qhasm: xmm15 ^= xmm13 | ||
5270 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
5271 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
5272 | pxor %xmm15,%xmm13 | ||
5273 | |||
5274 | # qhasm: xmm14 ^= xmm9 | ||
5275 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
5276 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
5277 | pxor %xmm12,%xmm11 | ||
5278 | |||
5279 | # qhasm: xmm11 = xmm15 | ||
5280 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
5281 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
5282 | movdqa %xmm13,%xmm10 | ||
5283 | |||
5284 | # qhasm: xmm11 ^= xmm14 | ||
5285 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
5286 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
5287 | pxor %xmm11,%xmm10 | ||
5288 | |||
5289 | # qhasm: xmm11 &= xmm5 | ||
5290 | # asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11 | ||
5291 | # asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10 | ||
5292 | pand %xmm5,%xmm10 | ||
5293 | |||
5294 | # qhasm: xmm5 ^= xmm1 | ||
5295 | # asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6 | ||
5296 | # asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5 | ||
5297 | pxor %xmm1,%xmm5 | ||
5298 | |||
5299 | # qhasm: xmm5 &= xmm14 | ||
5300 | # asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6 | ||
5301 | # asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5 | ||
5302 | pand %xmm11,%xmm5 | ||
5303 | |||
5304 | # qhasm: xmm1 &= xmm15 | ||
5305 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
5306 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
5307 | pand %xmm13,%xmm1 | ||
5308 | |||
5309 | # qhasm: xmm5 ^= xmm1 | ||
5310 | # asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6 | ||
5311 | # asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5 | ||
5312 | pxor %xmm1,%xmm5 | ||
5313 | |||
5314 | # qhasm: xmm1 ^= xmm11 | ||
5315 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
5316 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
5317 | pxor %xmm10,%xmm1 | ||
5318 | |||
5319 | # qhasm: xmm5 ^= xmm12 | ||
5320 | # asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6 | ||
5321 | # asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5 | ||
5322 | pxor %xmm8,%xmm5 | ||
5323 | |||
5324 | # qhasm: xmm2 ^= xmm12 | ||
5325 | # asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3 | ||
5326 | # asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2 | ||
5327 | pxor %xmm8,%xmm2 | ||
5328 | |||
5329 | # qhasm: xmm1 ^= xmm8 | ||
5330 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
5331 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
5332 | pxor %xmm9,%xmm1 | ||
5333 | |||
5334 | # qhasm: xmm6 ^= xmm8 | ||
5335 | # asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7 | ||
5336 | # asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6 | ||
5337 | pxor %xmm9,%xmm6 | ||
5338 | |||
5339 | # qhasm: xmm5 ^= xmm0 | ||
5340 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
5341 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
5342 | pxor %xmm0,%xmm5 | ||
5343 | |||
5344 | # qhasm: xmm1 ^= xmm3 | ||
5345 | # asm 1: pxor <xmm3=int6464#4,<xmm1=int6464#2 | ||
5346 | # asm 2: pxor <xmm3=%xmm3,<xmm1=%xmm1 | ||
5347 | pxor %xmm3,%xmm1 | ||
5348 | |||
5349 | # qhasm: xmm2 ^= xmm5 | ||
5350 | # asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3 | ||
5351 | # asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2 | ||
5352 | pxor %xmm5,%xmm2 | ||
5353 | |||
5354 | # qhasm: xmm3 ^= xmm0 | ||
5355 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
5356 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
5357 | pxor %xmm0,%xmm3 | ||
5358 | |||
5359 | # qhasm: xmm0 ^= xmm1 | ||
5360 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
5361 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
5362 | pxor %xmm1,%xmm0 | ||
5363 | |||
5364 | # qhasm: xmm1 ^= xmm7 | ||
5365 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2 | ||
5366 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1 | ||
5367 | pxor %xmm7,%xmm1 | ||
5368 | |||
5369 | # qhasm: xmm7 ^= xmm6 | ||
5370 | # asm 1: pxor <xmm6=int6464#7,<xmm7=int6464#8 | ||
5371 | # asm 2: pxor <xmm6=%xmm6,<xmm7=%xmm7 | ||
5372 | pxor %xmm6,%xmm7 | ||
5373 | |||
5374 | # qhasm: xmm2 ^= xmm7 | ||
5375 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
5376 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
5377 | pxor %xmm7,%xmm2 | ||
5378 | |||
5379 | # qhasm: xmm6 ^= xmm4 | ||
5380 | # asm 1: pxor <xmm4=int6464#5,<xmm6=int6464#7 | ||
5381 | # asm 2: pxor <xmm4=%xmm4,<xmm6=%xmm6 | ||
5382 | pxor %xmm4,%xmm6 | ||
5383 | |||
5384 | # qhasm: xmm4 ^= xmm7 | ||
5385 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
5386 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
5387 | pxor %xmm7,%xmm4 | ||
5388 | |||
5389 | # qhasm: xmm3 ^= xmm4 | ||
5390 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
5391 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
5392 | pxor %xmm4,%xmm3 | ||
5393 | |||
5394 | # qhasm: xmm3 ^= RCON | ||
5395 | # asm 1: pxor RCON,<xmm3=int6464#4 | ||
5396 | # asm 2: pxor RCON,<xmm3=%xmm3 | ||
5397 | pxor RCON,%xmm3 | ||
5398 | |||
5399 | # qhasm: shuffle bytes of xmm0 by EXPB0 | ||
5400 | # asm 1: pshufb EXPB0,<xmm0=int6464#1 | ||
5401 | # asm 2: pshufb EXPB0,<xmm0=%xmm0 | ||
5402 | pshufb EXPB0,%xmm0 | ||
5403 | |||
5404 | # qhasm: shuffle bytes of xmm1 by EXPB0 | ||
5405 | # asm 1: pshufb EXPB0,<xmm1=int6464#2 | ||
5406 | # asm 2: pshufb EXPB0,<xmm1=%xmm1 | ||
5407 | pshufb EXPB0,%xmm1 | ||
5408 | |||
5409 | # qhasm: shuffle bytes of xmm2 by EXPB0 | ||
5410 | # asm 1: pshufb EXPB0,<xmm2=int6464#3 | ||
5411 | # asm 2: pshufb EXPB0,<xmm2=%xmm2 | ||
5412 | pshufb EXPB0,%xmm2 | ||
5413 | |||
5414 | # qhasm: shuffle bytes of xmm3 by EXPB0 | ||
5415 | # asm 1: pshufb EXPB0,<xmm3=int6464#4 | ||
5416 | # asm 2: pshufb EXPB0,<xmm3=%xmm3 | ||
5417 | pshufb EXPB0,%xmm3 | ||
5418 | |||
5419 | # qhasm: shuffle bytes of xmm4 by EXPB0 | ||
5420 | # asm 1: pshufb EXPB0,<xmm4=int6464#5 | ||
5421 | # asm 2: pshufb EXPB0,<xmm4=%xmm4 | ||
5422 | pshufb EXPB0,%xmm4 | ||
5423 | |||
5424 | # qhasm: shuffle bytes of xmm5 by EXPB0 | ||
5425 | # asm 1: pshufb EXPB0,<xmm5=int6464#6 | ||
5426 | # asm 2: pshufb EXPB0,<xmm5=%xmm5 | ||
5427 | pshufb EXPB0,%xmm5 | ||
5428 | |||
5429 | # qhasm: shuffle bytes of xmm6 by EXPB0 | ||
5430 | # asm 1: pshufb EXPB0,<xmm6=int6464#7 | ||
5431 | # asm 2: pshufb EXPB0,<xmm6=%xmm6 | ||
5432 | pshufb EXPB0,%xmm6 | ||
5433 | |||
5434 | # qhasm: shuffle bytes of xmm7 by EXPB0 | ||
5435 | # asm 1: pshufb EXPB0,<xmm7=int6464#8 | ||
5436 | # asm 2: pshufb EXPB0,<xmm7=%xmm7 | ||
5437 | pshufb EXPB0,%xmm7 | ||
5438 | |||
5439 | # qhasm: xmm8 = *(int128 *)(c + 384) | ||
5440 | # asm 1: movdqa 384(<c=int64#1),>xmm8=int6464#9 | ||
5441 | # asm 2: movdqa 384(<c=%rdi),>xmm8=%xmm8 | ||
5442 | movdqa 384(%rdi),%xmm8 | ||
5443 | |||
5444 | # qhasm: xmm9 = *(int128 *)(c + 400) | ||
5445 | # asm 1: movdqa 400(<c=int64#1),>xmm9=int6464#10 | ||
5446 | # asm 2: movdqa 400(<c=%rdi),>xmm9=%xmm9 | ||
5447 | movdqa 400(%rdi),%xmm9 | ||
5448 | |||
5449 | # qhasm: xmm10 = *(int128 *)(c + 416) | ||
5450 | # asm 1: movdqa 416(<c=int64#1),>xmm10=int6464#11 | ||
5451 | # asm 2: movdqa 416(<c=%rdi),>xmm10=%xmm10 | ||
5452 | movdqa 416(%rdi),%xmm10 | ||
5453 | |||
5454 | # qhasm: xmm11 = *(int128 *)(c + 432) | ||
5455 | # asm 1: movdqa 432(<c=int64#1),>xmm11=int6464#12 | ||
5456 | # asm 2: movdqa 432(<c=%rdi),>xmm11=%xmm11 | ||
5457 | movdqa 432(%rdi),%xmm11 | ||
5458 | |||
5459 | # qhasm: xmm12 = *(int128 *)(c + 448) | ||
5460 | # asm 1: movdqa 448(<c=int64#1),>xmm12=int6464#13 | ||
5461 | # asm 2: movdqa 448(<c=%rdi),>xmm12=%xmm12 | ||
5462 | movdqa 448(%rdi),%xmm12 | ||
5463 | |||
5464 | # qhasm: xmm13 = *(int128 *)(c + 464) | ||
5465 | # asm 1: movdqa 464(<c=int64#1),>xmm13=int6464#14 | ||
5466 | # asm 2: movdqa 464(<c=%rdi),>xmm13=%xmm13 | ||
5467 | movdqa 464(%rdi),%xmm13 | ||
5468 | |||
5469 | # qhasm: xmm14 = *(int128 *)(c + 480) | ||
5470 | # asm 1: movdqa 480(<c=int64#1),>xmm14=int6464#15 | ||
5471 | # asm 2: movdqa 480(<c=%rdi),>xmm14=%xmm14 | ||
5472 | movdqa 480(%rdi),%xmm14 | ||
5473 | |||
5474 | # qhasm: xmm15 = *(int128 *)(c + 496) | ||
5475 | # asm 1: movdqa 496(<c=int64#1),>xmm15=int6464#16 | ||
5476 | # asm 2: movdqa 496(<c=%rdi),>xmm15=%xmm15 | ||
5477 | movdqa 496(%rdi),%xmm15 | ||
5478 | |||
5479 | # qhasm: xmm8 ^= ONE | ||
5480 | # asm 1: pxor ONE,<xmm8=int6464#9 | ||
5481 | # asm 2: pxor ONE,<xmm8=%xmm8 | ||
5482 | pxor ONE,%xmm8 | ||
5483 | |||
5484 | # qhasm: xmm9 ^= ONE | ||
5485 | # asm 1: pxor ONE,<xmm9=int6464#10 | ||
5486 | # asm 2: pxor ONE,<xmm9=%xmm9 | ||
5487 | pxor ONE,%xmm9 | ||
5488 | |||
5489 | # qhasm: xmm13 ^= ONE | ||
5490 | # asm 1: pxor ONE,<xmm13=int6464#14 | ||
5491 | # asm 2: pxor ONE,<xmm13=%xmm13 | ||
5492 | pxor ONE,%xmm13 | ||
5493 | |||
5494 | # qhasm: xmm14 ^= ONE | ||
5495 | # asm 1: pxor ONE,<xmm14=int6464#15 | ||
5496 | # asm 2: pxor ONE,<xmm14=%xmm14 | ||
5497 | pxor ONE,%xmm14 | ||
5498 | |||
5499 | # qhasm: xmm0 ^= xmm8 | ||
5500 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
5501 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
5502 | pxor %xmm8,%xmm0 | ||
5503 | |||
5504 | # qhasm: xmm1 ^= xmm9 | ||
5505 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
5506 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
5507 | pxor %xmm9,%xmm1 | ||
5508 | |||
5509 | # qhasm: xmm2 ^= xmm10 | ||
5510 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
5511 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
5512 | pxor %xmm10,%xmm2 | ||
5513 | |||
5514 | # qhasm: xmm3 ^= xmm11 | ||
5515 | # asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4 | ||
5516 | # asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3 | ||
5517 | pxor %xmm11,%xmm3 | ||
5518 | |||
5519 | # qhasm: xmm4 ^= xmm12 | ||
5520 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5 | ||
5521 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4 | ||
5522 | pxor %xmm12,%xmm4 | ||
5523 | |||
5524 | # qhasm: xmm5 ^= xmm13 | ||
5525 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
5526 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
5527 | pxor %xmm13,%xmm5 | ||
5528 | |||
5529 | # qhasm: xmm6 ^= xmm14 | ||
5530 | # asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7 | ||
5531 | # asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6 | ||
5532 | pxor %xmm14,%xmm6 | ||
5533 | |||
5534 | # qhasm: xmm7 ^= xmm15 | ||
5535 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
5536 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
5537 | pxor %xmm15,%xmm7 | ||
5538 | |||
5539 | # qhasm: uint32323232 xmm8 >>= 8 | ||
5540 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
5541 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
5542 | psrld $8,%xmm8 | ||
5543 | |||
5544 | # qhasm: uint32323232 xmm9 >>= 8 | ||
5545 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
5546 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
5547 | psrld $8,%xmm9 | ||
5548 | |||
5549 | # qhasm: uint32323232 xmm10 >>= 8 | ||
5550 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
5551 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
5552 | psrld $8,%xmm10 | ||
5553 | |||
5554 | # qhasm: uint32323232 xmm11 >>= 8 | ||
5555 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
5556 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
5557 | psrld $8,%xmm11 | ||
5558 | |||
5559 | # qhasm: uint32323232 xmm12 >>= 8 | ||
5560 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
5561 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
5562 | psrld $8,%xmm12 | ||
5563 | |||
5564 | # qhasm: uint32323232 xmm13 >>= 8 | ||
5565 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
5566 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
5567 | psrld $8,%xmm13 | ||
5568 | |||
5569 | # qhasm: uint32323232 xmm14 >>= 8 | ||
5570 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
5571 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
5572 | psrld $8,%xmm14 | ||
5573 | |||
5574 | # qhasm: uint32323232 xmm15 >>= 8 | ||
5575 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
5576 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
5577 | psrld $8,%xmm15 | ||
5578 | |||
5579 | # qhasm: xmm0 ^= xmm8 | ||
5580 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
5581 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
5582 | pxor %xmm8,%xmm0 | ||
5583 | |||
5584 | # qhasm: xmm1 ^= xmm9 | ||
5585 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
5586 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
5587 | pxor %xmm9,%xmm1 | ||
5588 | |||
5589 | # qhasm: xmm2 ^= xmm10 | ||
5590 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
5591 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
5592 | pxor %xmm10,%xmm2 | ||
5593 | |||
5594 | # qhasm: xmm3 ^= xmm11 | ||
5595 | # asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4 | ||
5596 | # asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3 | ||
5597 | pxor %xmm11,%xmm3 | ||
5598 | |||
5599 | # qhasm: xmm4 ^= xmm12 | ||
5600 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5 | ||
5601 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4 | ||
5602 | pxor %xmm12,%xmm4 | ||
5603 | |||
5604 | # qhasm: xmm5 ^= xmm13 | ||
5605 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
5606 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
5607 | pxor %xmm13,%xmm5 | ||
5608 | |||
5609 | # qhasm: xmm6 ^= xmm14 | ||
5610 | # asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7 | ||
5611 | # asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6 | ||
5612 | pxor %xmm14,%xmm6 | ||
5613 | |||
5614 | # qhasm: xmm7 ^= xmm15 | ||
5615 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
5616 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
5617 | pxor %xmm15,%xmm7 | ||
5618 | |||
5619 | # qhasm: uint32323232 xmm8 >>= 8 | ||
5620 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
5621 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
5622 | psrld $8,%xmm8 | ||
5623 | |||
5624 | # qhasm: uint32323232 xmm9 >>= 8 | ||
5625 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
5626 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
5627 | psrld $8,%xmm9 | ||
5628 | |||
5629 | # qhasm: uint32323232 xmm10 >>= 8 | ||
5630 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
5631 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
5632 | psrld $8,%xmm10 | ||
5633 | |||
5634 | # qhasm: uint32323232 xmm11 >>= 8 | ||
5635 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
5636 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
5637 | psrld $8,%xmm11 | ||
5638 | |||
5639 | # qhasm: uint32323232 xmm12 >>= 8 | ||
5640 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
5641 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
5642 | psrld $8,%xmm12 | ||
5643 | |||
5644 | # qhasm: uint32323232 xmm13 >>= 8 | ||
5645 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
5646 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
5647 | psrld $8,%xmm13 | ||
5648 | |||
5649 | # qhasm: uint32323232 xmm14 >>= 8 | ||
5650 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
5651 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
5652 | psrld $8,%xmm14 | ||
5653 | |||
5654 | # qhasm: uint32323232 xmm15 >>= 8 | ||
5655 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
5656 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
5657 | psrld $8,%xmm15 | ||
5658 | |||
5659 | # qhasm: xmm0 ^= xmm8 | ||
5660 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
5661 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
5662 | pxor %xmm8,%xmm0 | ||
5663 | |||
5664 | # qhasm: xmm1 ^= xmm9 | ||
5665 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
5666 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
5667 | pxor %xmm9,%xmm1 | ||
5668 | |||
5669 | # qhasm: xmm2 ^= xmm10 | ||
5670 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
5671 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
5672 | pxor %xmm10,%xmm2 | ||
5673 | |||
5674 | # qhasm: xmm3 ^= xmm11 | ||
5675 | # asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4 | ||
5676 | # asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3 | ||
5677 | pxor %xmm11,%xmm3 | ||
5678 | |||
5679 | # qhasm: xmm4 ^= xmm12 | ||
5680 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5 | ||
5681 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4 | ||
5682 | pxor %xmm12,%xmm4 | ||
5683 | |||
5684 | # qhasm: xmm5 ^= xmm13 | ||
5685 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
5686 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
5687 | pxor %xmm13,%xmm5 | ||
5688 | |||
5689 | # qhasm: xmm6 ^= xmm14 | ||
5690 | # asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7 | ||
5691 | # asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6 | ||
5692 | pxor %xmm14,%xmm6 | ||
5693 | |||
5694 | # qhasm: xmm7 ^= xmm15 | ||
5695 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
5696 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
5697 | pxor %xmm15,%xmm7 | ||
5698 | |||
5699 | # qhasm: uint32323232 xmm8 >>= 8 | ||
5700 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
5701 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
5702 | psrld $8,%xmm8 | ||
5703 | |||
5704 | # qhasm: uint32323232 xmm9 >>= 8 | ||
5705 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
5706 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
5707 | psrld $8,%xmm9 | ||
5708 | |||
5709 | # qhasm: uint32323232 xmm10 >>= 8 | ||
5710 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
5711 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
5712 | psrld $8,%xmm10 | ||
5713 | |||
5714 | # qhasm: uint32323232 xmm11 >>= 8 | ||
5715 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
5716 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
5717 | psrld $8,%xmm11 | ||
5718 | |||
5719 | # qhasm: uint32323232 xmm12 >>= 8 | ||
5720 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
5721 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
5722 | psrld $8,%xmm12 | ||
5723 | |||
5724 | # qhasm: uint32323232 xmm13 >>= 8 | ||
5725 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
5726 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
5727 | psrld $8,%xmm13 | ||
5728 | |||
5729 | # qhasm: uint32323232 xmm14 >>= 8 | ||
5730 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
5731 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
5732 | psrld $8,%xmm14 | ||
5733 | |||
5734 | # qhasm: uint32323232 xmm15 >>= 8 | ||
5735 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
5736 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
5737 | psrld $8,%xmm15 | ||
5738 | |||
5739 | # qhasm: xmm0 ^= xmm8 | ||
5740 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
5741 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
5742 | pxor %xmm8,%xmm0 | ||
5743 | |||
5744 | # qhasm: xmm1 ^= xmm9 | ||
5745 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
5746 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
5747 | pxor %xmm9,%xmm1 | ||
5748 | |||
5749 | # qhasm: xmm2 ^= xmm10 | ||
5750 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
5751 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
5752 | pxor %xmm10,%xmm2 | ||
5753 | |||
5754 | # qhasm: xmm3 ^= xmm11 | ||
5755 | # asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4 | ||
5756 | # asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3 | ||
5757 | pxor %xmm11,%xmm3 | ||
5758 | |||
5759 | # qhasm: xmm4 ^= xmm12 | ||
5760 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5 | ||
5761 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4 | ||
5762 | pxor %xmm12,%xmm4 | ||
5763 | |||
5764 | # qhasm: xmm5 ^= xmm13 | ||
5765 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
5766 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
5767 | pxor %xmm13,%xmm5 | ||
5768 | |||
5769 | # qhasm: xmm6 ^= xmm14 | ||
5770 | # asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7 | ||
5771 | # asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6 | ||
5772 | pxor %xmm14,%xmm6 | ||
5773 | |||
5774 | # qhasm: xmm7 ^= xmm15 | ||
5775 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
5776 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
5777 | pxor %xmm15,%xmm7 | ||
5778 | |||
5779 | # qhasm: *(int128 *)(c + 512) = xmm0 | ||
5780 | # asm 1: movdqa <xmm0=int6464#1,512(<c=int64#1) | ||
5781 | # asm 2: movdqa <xmm0=%xmm0,512(<c=%rdi) | ||
5782 | movdqa %xmm0,512(%rdi) | ||
5783 | |||
5784 | # qhasm: *(int128 *)(c + 528) = xmm1 | ||
5785 | # asm 1: movdqa <xmm1=int6464#2,528(<c=int64#1) | ||
5786 | # asm 2: movdqa <xmm1=%xmm1,528(<c=%rdi) | ||
5787 | movdqa %xmm1,528(%rdi) | ||
5788 | |||
5789 | # qhasm: *(int128 *)(c + 544) = xmm2 | ||
5790 | # asm 1: movdqa <xmm2=int6464#3,544(<c=int64#1) | ||
5791 | # asm 2: movdqa <xmm2=%xmm2,544(<c=%rdi) | ||
5792 | movdqa %xmm2,544(%rdi) | ||
5793 | |||
5794 | # qhasm: *(int128 *)(c + 560) = xmm3 | ||
5795 | # asm 1: movdqa <xmm3=int6464#4,560(<c=int64#1) | ||
5796 | # asm 2: movdqa <xmm3=%xmm3,560(<c=%rdi) | ||
5797 | movdqa %xmm3,560(%rdi) | ||
5798 | |||
5799 | # qhasm: *(int128 *)(c + 576) = xmm4 | ||
5800 | # asm 1: movdqa <xmm4=int6464#5,576(<c=int64#1) | ||
5801 | # asm 2: movdqa <xmm4=%xmm4,576(<c=%rdi) | ||
5802 | movdqa %xmm4,576(%rdi) | ||
5803 | |||
5804 | # qhasm: *(int128 *)(c + 592) = xmm5 | ||
5805 | # asm 1: movdqa <xmm5=int6464#6,592(<c=int64#1) | ||
5806 | # asm 2: movdqa <xmm5=%xmm5,592(<c=%rdi) | ||
5807 | movdqa %xmm5,592(%rdi) | ||
5808 | |||
5809 | # qhasm: *(int128 *)(c + 608) = xmm6 | ||
5810 | # asm 1: movdqa <xmm6=int6464#7,608(<c=int64#1) | ||
5811 | # asm 2: movdqa <xmm6=%xmm6,608(<c=%rdi) | ||
5812 | movdqa %xmm6,608(%rdi) | ||
5813 | |||
5814 | # qhasm: *(int128 *)(c + 624) = xmm7 | ||
5815 | # asm 1: movdqa <xmm7=int6464#8,624(<c=int64#1) | ||
5816 | # asm 2: movdqa <xmm7=%xmm7,624(<c=%rdi) | ||
5817 | movdqa %xmm7,624(%rdi) | ||
5818 | |||
5819 | # qhasm: xmm0 ^= ONE | ||
5820 | # asm 1: pxor ONE,<xmm0=int6464#1 | ||
5821 | # asm 2: pxor ONE,<xmm0=%xmm0 | ||
5822 | pxor ONE,%xmm0 | ||
5823 | |||
5824 | # qhasm: xmm1 ^= ONE | ||
5825 | # asm 1: pxor ONE,<xmm1=int6464#2 | ||
5826 | # asm 2: pxor ONE,<xmm1=%xmm1 | ||
5827 | pxor ONE,%xmm1 | ||
5828 | |||
5829 | # qhasm: xmm5 ^= ONE | ||
5830 | # asm 1: pxor ONE,<xmm5=int6464#6 | ||
5831 | # asm 2: pxor ONE,<xmm5=%xmm5 | ||
5832 | pxor ONE,%xmm5 | ||
5833 | |||
5834 | # qhasm: xmm6 ^= ONE | ||
5835 | # asm 1: pxor ONE,<xmm6=int6464#7 | ||
5836 | # asm 2: pxor ONE,<xmm6=%xmm6 | ||
5837 | pxor ONE,%xmm6 | ||
5838 | |||
5839 | # qhasm: shuffle bytes of xmm0 by ROTB | ||
5840 | # asm 1: pshufb ROTB,<xmm0=int6464#1 | ||
5841 | # asm 2: pshufb ROTB,<xmm0=%xmm0 | ||
5842 | pshufb ROTB,%xmm0 | ||
5843 | |||
5844 | # qhasm: shuffle bytes of xmm1 by ROTB | ||
5845 | # asm 1: pshufb ROTB,<xmm1=int6464#2 | ||
5846 | # asm 2: pshufb ROTB,<xmm1=%xmm1 | ||
5847 | pshufb ROTB,%xmm1 | ||
5848 | |||
5849 | # qhasm: shuffle bytes of xmm2 by ROTB | ||
5850 | # asm 1: pshufb ROTB,<xmm2=int6464#3 | ||
5851 | # asm 2: pshufb ROTB,<xmm2=%xmm2 | ||
5852 | pshufb ROTB,%xmm2 | ||
5853 | |||
5854 | # qhasm: shuffle bytes of xmm3 by ROTB | ||
5855 | # asm 1: pshufb ROTB,<xmm3=int6464#4 | ||
5856 | # asm 2: pshufb ROTB,<xmm3=%xmm3 | ||
5857 | pshufb ROTB,%xmm3 | ||
5858 | |||
5859 | # qhasm: shuffle bytes of xmm4 by ROTB | ||
5860 | # asm 1: pshufb ROTB,<xmm4=int6464#5 | ||
5861 | # asm 2: pshufb ROTB,<xmm4=%xmm4 | ||
5862 | pshufb ROTB,%xmm4 | ||
5863 | |||
5864 | # qhasm: shuffle bytes of xmm5 by ROTB | ||
5865 | # asm 1: pshufb ROTB,<xmm5=int6464#6 | ||
5866 | # asm 2: pshufb ROTB,<xmm5=%xmm5 | ||
5867 | pshufb ROTB,%xmm5 | ||
5868 | |||
5869 | # qhasm: shuffle bytes of xmm6 by ROTB | ||
5870 | # asm 1: pshufb ROTB,<xmm6=int6464#7 | ||
5871 | # asm 2: pshufb ROTB,<xmm6=%xmm6 | ||
5872 | pshufb ROTB,%xmm6 | ||
5873 | |||
5874 | # qhasm: shuffle bytes of xmm7 by ROTB | ||
5875 | # asm 1: pshufb ROTB,<xmm7=int6464#8 | ||
5876 | # asm 2: pshufb ROTB,<xmm7=%xmm7 | ||
5877 | pshufb ROTB,%xmm7 | ||
5878 | |||
5879 | # qhasm: xmm5 ^= xmm6 | ||
5880 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
5881 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
5882 | pxor %xmm6,%xmm5 | ||
5883 | |||
5884 | # qhasm: xmm2 ^= xmm1 | ||
5885 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
5886 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
5887 | pxor %xmm1,%xmm2 | ||
5888 | |||
5889 | # qhasm: xmm5 ^= xmm0 | ||
5890 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
5891 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
5892 | pxor %xmm0,%xmm5 | ||
5893 | |||
5894 | # qhasm: xmm6 ^= xmm2 | ||
5895 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
5896 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
5897 | pxor %xmm2,%xmm6 | ||
5898 | |||
5899 | # qhasm: xmm3 ^= xmm0 | ||
5900 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
5901 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
5902 | pxor %xmm0,%xmm3 | ||
5903 | |||
5904 | # qhasm: xmm6 ^= xmm3 | ||
5905 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
5906 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
5907 | pxor %xmm3,%xmm6 | ||
5908 | |||
5909 | # qhasm: xmm3 ^= xmm7 | ||
5910 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
5911 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
5912 | pxor %xmm7,%xmm3 | ||
5913 | |||
5914 | # qhasm: xmm3 ^= xmm4 | ||
5915 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
5916 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
5917 | pxor %xmm4,%xmm3 | ||
5918 | |||
5919 | # qhasm: xmm7 ^= xmm5 | ||
5920 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
5921 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
5922 | pxor %xmm5,%xmm7 | ||
5923 | |||
5924 | # qhasm: xmm3 ^= xmm1 | ||
5925 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
5926 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
5927 | pxor %xmm1,%xmm3 | ||
5928 | |||
5929 | # qhasm: xmm4 ^= xmm5 | ||
5930 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
5931 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
5932 | pxor %xmm5,%xmm4 | ||
5933 | |||
5934 | # qhasm: xmm2 ^= xmm7 | ||
5935 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
5936 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
5937 | pxor %xmm7,%xmm2 | ||
5938 | |||
5939 | # qhasm: xmm1 ^= xmm5 | ||
5940 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
5941 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
5942 | pxor %xmm5,%xmm1 | ||
5943 | |||
5944 | # qhasm: xmm11 = xmm7 | ||
5945 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
5946 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
5947 | movdqa %xmm7,%xmm8 | ||
5948 | |||
5949 | # qhasm: xmm10 = xmm1 | ||
5950 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
5951 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
5952 | movdqa %xmm1,%xmm9 | ||
5953 | |||
5954 | # qhasm: xmm9 = xmm5 | ||
5955 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
5956 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
5957 | movdqa %xmm5,%xmm10 | ||
5958 | |||
5959 | # qhasm: xmm13 = xmm2 | ||
5960 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
5961 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
5962 | movdqa %xmm2,%xmm11 | ||
5963 | |||
5964 | # qhasm: xmm12 = xmm6 | ||
5965 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
5966 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
5967 | movdqa %xmm6,%xmm12 | ||
5968 | |||
5969 | # qhasm: xmm11 ^= xmm4 | ||
5970 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
5971 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
5972 | pxor %xmm4,%xmm8 | ||
5973 | |||
5974 | # qhasm: xmm10 ^= xmm2 | ||
5975 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
5976 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
5977 | pxor %xmm2,%xmm9 | ||
5978 | |||
5979 | # qhasm: xmm9 ^= xmm3 | ||
5980 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
5981 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
5982 | pxor %xmm3,%xmm10 | ||
5983 | |||
5984 | # qhasm: xmm13 ^= xmm4 | ||
5985 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
5986 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
5987 | pxor %xmm4,%xmm11 | ||
5988 | |||
5989 | # qhasm: xmm12 ^= xmm0 | ||
5990 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
5991 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
5992 | pxor %xmm0,%xmm12 | ||
5993 | |||
5994 | # qhasm: xmm14 = xmm11 | ||
5995 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
5996 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
5997 | movdqa %xmm8,%xmm13 | ||
5998 | |||
5999 | # qhasm: xmm8 = xmm10 | ||
6000 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
6001 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
6002 | movdqa %xmm9,%xmm14 | ||
6003 | |||
6004 | # qhasm: xmm15 = xmm11 | ||
6005 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
6006 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
6007 | movdqa %xmm8,%xmm15 | ||
6008 | |||
6009 | # qhasm: xmm10 |= xmm9 | ||
6010 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
6011 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
6012 | por %xmm10,%xmm9 | ||
6013 | |||
6014 | # qhasm: xmm11 |= xmm12 | ||
6015 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
6016 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
6017 | por %xmm12,%xmm8 | ||
6018 | |||
6019 | # qhasm: xmm15 ^= xmm8 | ||
6020 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
6021 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
6022 | pxor %xmm14,%xmm15 | ||
6023 | |||
6024 | # qhasm: xmm14 &= xmm12 | ||
6025 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
6026 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
6027 | pand %xmm12,%xmm13 | ||
6028 | |||
6029 | # qhasm: xmm8 &= xmm9 | ||
6030 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
6031 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
6032 | pand %xmm10,%xmm14 | ||
6033 | |||
6034 | # qhasm: xmm12 ^= xmm9 | ||
6035 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
6036 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
6037 | pxor %xmm10,%xmm12 | ||
6038 | |||
6039 | # qhasm: xmm15 &= xmm12 | ||
6040 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
6041 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
6042 | pand %xmm12,%xmm15 | ||
6043 | |||
6044 | # qhasm: xmm12 = xmm3 | ||
6045 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
6046 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
6047 | movdqa %xmm3,%xmm10 | ||
6048 | |||
6049 | # qhasm: xmm12 ^= xmm0 | ||
6050 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
6051 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
6052 | pxor %xmm0,%xmm10 | ||
6053 | |||
6054 | # qhasm: xmm13 &= xmm12 | ||
6055 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
6056 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
6057 | pand %xmm10,%xmm11 | ||
6058 | |||
6059 | # qhasm: xmm11 ^= xmm13 | ||
6060 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
6061 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
6062 | pxor %xmm11,%xmm8 | ||
6063 | |||
6064 | # qhasm: xmm10 ^= xmm13 | ||
6065 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
6066 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
6067 | pxor %xmm11,%xmm9 | ||
6068 | |||
6069 | # qhasm: xmm13 = xmm7 | ||
6070 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
6071 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
6072 | movdqa %xmm7,%xmm10 | ||
6073 | |||
6074 | # qhasm: xmm13 ^= xmm1 | ||
6075 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
6076 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
6077 | pxor %xmm1,%xmm10 | ||
6078 | |||
6079 | # qhasm: xmm12 = xmm5 | ||
6080 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
6081 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
6082 | movdqa %xmm5,%xmm11 | ||
6083 | |||
6084 | # qhasm: xmm9 = xmm13 | ||
6085 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
6086 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
6087 | movdqa %xmm10,%xmm12 | ||
6088 | |||
6089 | # qhasm: xmm12 ^= xmm6 | ||
6090 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
6091 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
6092 | pxor %xmm6,%xmm11 | ||
6093 | |||
6094 | # qhasm: xmm9 |= xmm12 | ||
6095 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
6096 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
6097 | por %xmm11,%xmm12 | ||
6098 | |||
6099 | # qhasm: xmm13 &= xmm12 | ||
6100 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
6101 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
6102 | pand %xmm11,%xmm10 | ||
6103 | |||
6104 | # qhasm: xmm8 ^= xmm13 | ||
6105 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
6106 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
6107 | pxor %xmm10,%xmm14 | ||
6108 | |||
6109 | # qhasm: xmm11 ^= xmm15 | ||
6110 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
6111 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
6112 | pxor %xmm15,%xmm8 | ||
6113 | |||
6114 | # qhasm: xmm10 ^= xmm14 | ||
6115 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
6116 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
6117 | pxor %xmm13,%xmm9 | ||
6118 | |||
6119 | # qhasm: xmm9 ^= xmm15 | ||
6120 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
6121 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
6122 | pxor %xmm15,%xmm12 | ||
6123 | |||
6124 | # qhasm: xmm8 ^= xmm14 | ||
6125 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
6126 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
6127 | pxor %xmm13,%xmm14 | ||
6128 | |||
6129 | # qhasm: xmm9 ^= xmm14 | ||
6130 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
6131 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
6132 | pxor %xmm13,%xmm12 | ||
6133 | |||
6134 | # qhasm: xmm12 = xmm2 | ||
6135 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
6136 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
6137 | movdqa %xmm2,%xmm10 | ||
6138 | |||
6139 | # qhasm: xmm13 = xmm4 | ||
6140 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
6141 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
6142 | movdqa %xmm4,%xmm11 | ||
6143 | |||
6144 | # qhasm: xmm14 = xmm1 | ||
6145 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
6146 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
6147 | movdqa %xmm1,%xmm13 | ||
6148 | |||
6149 | # qhasm: xmm15 = xmm7 | ||
6150 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
6151 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
6152 | movdqa %xmm7,%xmm15 | ||
6153 | |||
6154 | # qhasm: xmm12 &= xmm3 | ||
6155 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
6156 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
6157 | pand %xmm3,%xmm10 | ||
6158 | |||
6159 | # qhasm: xmm13 &= xmm0 | ||
6160 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
6161 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
6162 | pand %xmm0,%xmm11 | ||
6163 | |||
6164 | # qhasm: xmm14 &= xmm5 | ||
6165 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
6166 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
6167 | pand %xmm5,%xmm13 | ||
6168 | |||
6169 | # qhasm: xmm15 |= xmm6 | ||
6170 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
6171 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
6172 | por %xmm6,%xmm15 | ||
6173 | |||
6174 | # qhasm: xmm11 ^= xmm12 | ||
6175 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
6176 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
6177 | pxor %xmm10,%xmm8 | ||
6178 | |||
6179 | # qhasm: xmm10 ^= xmm13 | ||
6180 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
6181 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
6182 | pxor %xmm11,%xmm9 | ||
6183 | |||
6184 | # qhasm: xmm9 ^= xmm14 | ||
6185 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
6186 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
6187 | pxor %xmm13,%xmm12 | ||
6188 | |||
6189 | # qhasm: xmm8 ^= xmm15 | ||
6190 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
6191 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
6192 | pxor %xmm15,%xmm14 | ||
6193 | |||
6194 | # qhasm: xmm12 = xmm11 | ||
6195 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
6196 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
6197 | movdqa %xmm8,%xmm10 | ||
6198 | |||
6199 | # qhasm: xmm12 ^= xmm10 | ||
6200 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
6201 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
6202 | pxor %xmm9,%xmm10 | ||
6203 | |||
6204 | # qhasm: xmm11 &= xmm9 | ||
6205 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
6206 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
6207 | pand %xmm12,%xmm8 | ||
6208 | |||
6209 | # qhasm: xmm14 = xmm8 | ||
6210 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
6211 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
6212 | movdqa %xmm14,%xmm11 | ||
6213 | |||
6214 | # qhasm: xmm14 ^= xmm11 | ||
6215 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
6216 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
6217 | pxor %xmm8,%xmm11 | ||
6218 | |||
6219 | # qhasm: xmm15 = xmm12 | ||
6220 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
6221 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
6222 | movdqa %xmm10,%xmm13 | ||
6223 | |||
6224 | # qhasm: xmm15 &= xmm14 | ||
6225 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
6226 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
6227 | pand %xmm11,%xmm13 | ||
6228 | |||
6229 | # qhasm: xmm15 ^= xmm10 | ||
6230 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
6231 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
6232 | pxor %xmm9,%xmm13 | ||
6233 | |||
6234 | # qhasm: xmm13 = xmm9 | ||
6235 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
6236 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
6237 | movdqa %xmm12,%xmm15 | ||
6238 | |||
6239 | # qhasm: xmm13 ^= xmm8 | ||
6240 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
6241 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
6242 | pxor %xmm14,%xmm15 | ||
6243 | |||
6244 | # qhasm: xmm11 ^= xmm10 | ||
6245 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
6246 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
6247 | pxor %xmm9,%xmm8 | ||
6248 | |||
6249 | # qhasm: xmm13 &= xmm11 | ||
6250 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
6251 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
6252 | pand %xmm8,%xmm15 | ||
6253 | |||
6254 | # qhasm: xmm13 ^= xmm8 | ||
6255 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
6256 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
6257 | pxor %xmm14,%xmm15 | ||
6258 | |||
6259 | # qhasm: xmm9 ^= xmm13 | ||
6260 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
6261 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
6262 | pxor %xmm15,%xmm12 | ||
6263 | |||
6264 | # qhasm: xmm10 = xmm14 | ||
6265 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
6266 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
6267 | movdqa %xmm11,%xmm8 | ||
6268 | |||
6269 | # qhasm: xmm10 ^= xmm13 | ||
6270 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
6271 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
6272 | pxor %xmm15,%xmm8 | ||
6273 | |||
6274 | # qhasm: xmm10 &= xmm8 | ||
6275 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
6276 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
6277 | pand %xmm14,%xmm8 | ||
6278 | |||
6279 | # qhasm: xmm9 ^= xmm10 | ||
6280 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
6281 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
6282 | pxor %xmm8,%xmm12 | ||
6283 | |||
6284 | # qhasm: xmm14 ^= xmm10 | ||
6285 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
6286 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
6287 | pxor %xmm8,%xmm11 | ||
6288 | |||
6289 | # qhasm: xmm14 &= xmm15 | ||
6290 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
6291 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
6292 | pand %xmm13,%xmm11 | ||
6293 | |||
6294 | # qhasm: xmm14 ^= xmm12 | ||
6295 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
6296 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
6297 | pxor %xmm10,%xmm11 | ||
6298 | |||
6299 | # qhasm: xmm12 = xmm6 | ||
6300 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
6301 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
6302 | movdqa %xmm6,%xmm8 | ||
6303 | |||
6304 | # qhasm: xmm8 = xmm5 | ||
6305 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
6306 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
6307 | movdqa %xmm5,%xmm9 | ||
6308 | |||
6309 | # qhasm: xmm10 = xmm15 | ||
6310 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
6311 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
6312 | movdqa %xmm13,%xmm10 | ||
6313 | |||
6314 | # qhasm: xmm10 ^= xmm14 | ||
6315 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
6316 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
6317 | pxor %xmm11,%xmm10 | ||
6318 | |||
6319 | # qhasm: xmm10 &= xmm6 | ||
6320 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
6321 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
6322 | pand %xmm6,%xmm10 | ||
6323 | |||
6324 | # qhasm: xmm6 ^= xmm5 | ||
6325 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
6326 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
6327 | pxor %xmm5,%xmm6 | ||
6328 | |||
6329 | # qhasm: xmm6 &= xmm14 | ||
6330 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
6331 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
6332 | pand %xmm11,%xmm6 | ||
6333 | |||
6334 | # qhasm: xmm5 &= xmm15 | ||
6335 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
6336 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
6337 | pand %xmm13,%xmm5 | ||
6338 | |||
6339 | # qhasm: xmm6 ^= xmm5 | ||
6340 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
6341 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
6342 | pxor %xmm5,%xmm6 | ||
6343 | |||
6344 | # qhasm: xmm5 ^= xmm10 | ||
6345 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
6346 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
6347 | pxor %xmm10,%xmm5 | ||
6348 | |||
6349 | # qhasm: xmm12 ^= xmm0 | ||
6350 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
6351 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
6352 | pxor %xmm0,%xmm8 | ||
6353 | |||
6354 | # qhasm: xmm8 ^= xmm3 | ||
6355 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
6356 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
6357 | pxor %xmm3,%xmm9 | ||
6358 | |||
6359 | # qhasm: xmm15 ^= xmm13 | ||
6360 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
6361 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
6362 | pxor %xmm15,%xmm13 | ||
6363 | |||
6364 | # qhasm: xmm14 ^= xmm9 | ||
6365 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
6366 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
6367 | pxor %xmm12,%xmm11 | ||
6368 | |||
6369 | # qhasm: xmm11 = xmm15 | ||
6370 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
6371 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
6372 | movdqa %xmm13,%xmm10 | ||
6373 | |||
6374 | # qhasm: xmm11 ^= xmm14 | ||
6375 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
6376 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
6377 | pxor %xmm11,%xmm10 | ||
6378 | |||
6379 | # qhasm: xmm11 &= xmm12 | ||
6380 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
6381 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
6382 | pand %xmm8,%xmm10 | ||
6383 | |||
6384 | # qhasm: xmm12 ^= xmm8 | ||
6385 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
6386 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
6387 | pxor %xmm9,%xmm8 | ||
6388 | |||
6389 | # qhasm: xmm12 &= xmm14 | ||
6390 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
6391 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
6392 | pand %xmm11,%xmm8 | ||
6393 | |||
6394 | # qhasm: xmm8 &= xmm15 | ||
6395 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
6396 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
6397 | pand %xmm13,%xmm9 | ||
6398 | |||
6399 | # qhasm: xmm8 ^= xmm12 | ||
6400 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
6401 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
6402 | pxor %xmm8,%xmm9 | ||
6403 | |||
6404 | # qhasm: xmm12 ^= xmm11 | ||
6405 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
6406 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
6407 | pxor %xmm10,%xmm8 | ||
6408 | |||
6409 | # qhasm: xmm10 = xmm13 | ||
6410 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
6411 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
6412 | movdqa %xmm15,%xmm10 | ||
6413 | |||
6414 | # qhasm: xmm10 ^= xmm9 | ||
6415 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
6416 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
6417 | pxor %xmm12,%xmm10 | ||
6418 | |||
6419 | # qhasm: xmm10 &= xmm0 | ||
6420 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
6421 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
6422 | pand %xmm0,%xmm10 | ||
6423 | |||
6424 | # qhasm: xmm0 ^= xmm3 | ||
6425 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
6426 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
6427 | pxor %xmm3,%xmm0 | ||
6428 | |||
6429 | # qhasm: xmm0 &= xmm9 | ||
6430 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
6431 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
6432 | pand %xmm12,%xmm0 | ||
6433 | |||
6434 | # qhasm: xmm3 &= xmm13 | ||
6435 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
6436 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
6437 | pand %xmm15,%xmm3 | ||
6438 | |||
6439 | # qhasm: xmm0 ^= xmm3 | ||
6440 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
6441 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
6442 | pxor %xmm3,%xmm0 | ||
6443 | |||
6444 | # qhasm: xmm3 ^= xmm10 | ||
6445 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
6446 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
6447 | pxor %xmm10,%xmm3 | ||
6448 | |||
6449 | # qhasm: xmm6 ^= xmm12 | ||
6450 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
6451 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
6452 | pxor %xmm8,%xmm6 | ||
6453 | |||
6454 | # qhasm: xmm0 ^= xmm12 | ||
6455 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
6456 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
6457 | pxor %xmm8,%xmm0 | ||
6458 | |||
6459 | # qhasm: xmm5 ^= xmm8 | ||
6460 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
6461 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
6462 | pxor %xmm9,%xmm5 | ||
6463 | |||
6464 | # qhasm: xmm3 ^= xmm8 | ||
6465 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
6466 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
6467 | pxor %xmm9,%xmm3 | ||
6468 | |||
6469 | # qhasm: xmm12 = xmm7 | ||
6470 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
6471 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
6472 | movdqa %xmm7,%xmm8 | ||
6473 | |||
6474 | # qhasm: xmm8 = xmm1 | ||
6475 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
6476 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
6477 | movdqa %xmm1,%xmm9 | ||
6478 | |||
6479 | # qhasm: xmm12 ^= xmm4 | ||
6480 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
6481 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
6482 | pxor %xmm4,%xmm8 | ||
6483 | |||
6484 | # qhasm: xmm8 ^= xmm2 | ||
6485 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
6486 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
6487 | pxor %xmm2,%xmm9 | ||
6488 | |||
6489 | # qhasm: xmm11 = xmm15 | ||
6490 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
6491 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
6492 | movdqa %xmm13,%xmm10 | ||
6493 | |||
6494 | # qhasm: xmm11 ^= xmm14 | ||
6495 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
6496 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
6497 | pxor %xmm11,%xmm10 | ||
6498 | |||
6499 | # qhasm: xmm11 &= xmm12 | ||
6500 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
6501 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
6502 | pand %xmm8,%xmm10 | ||
6503 | |||
6504 | # qhasm: xmm12 ^= xmm8 | ||
6505 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
6506 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
6507 | pxor %xmm9,%xmm8 | ||
6508 | |||
6509 | # qhasm: xmm12 &= xmm14 | ||
6510 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
6511 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
6512 | pand %xmm11,%xmm8 | ||
6513 | |||
6514 | # qhasm: xmm8 &= xmm15 | ||
6515 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
6516 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
6517 | pand %xmm13,%xmm9 | ||
6518 | |||
6519 | # qhasm: xmm8 ^= xmm12 | ||
6520 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
6521 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
6522 | pxor %xmm8,%xmm9 | ||
6523 | |||
6524 | # qhasm: xmm12 ^= xmm11 | ||
6525 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
6526 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
6527 | pxor %xmm10,%xmm8 | ||
6528 | |||
6529 | # qhasm: xmm10 = xmm13 | ||
6530 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
6531 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
6532 | movdqa %xmm15,%xmm10 | ||
6533 | |||
6534 | # qhasm: xmm10 ^= xmm9 | ||
6535 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
6536 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
6537 | pxor %xmm12,%xmm10 | ||
6538 | |||
6539 | # qhasm: xmm10 &= xmm4 | ||
6540 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
6541 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
6542 | pand %xmm4,%xmm10 | ||
6543 | |||
6544 | # qhasm: xmm4 ^= xmm2 | ||
6545 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
6546 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
6547 | pxor %xmm2,%xmm4 | ||
6548 | |||
6549 | # qhasm: xmm4 &= xmm9 | ||
6550 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
6551 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
6552 | pand %xmm12,%xmm4 | ||
6553 | |||
6554 | # qhasm: xmm2 &= xmm13 | ||
6555 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
6556 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
6557 | pand %xmm15,%xmm2 | ||
6558 | |||
6559 | # qhasm: xmm4 ^= xmm2 | ||
6560 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
6561 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
6562 | pxor %xmm2,%xmm4 | ||
6563 | |||
6564 | # qhasm: xmm2 ^= xmm10 | ||
6565 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
6566 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
6567 | pxor %xmm10,%xmm2 | ||
6568 | |||
6569 | # qhasm: xmm15 ^= xmm13 | ||
6570 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
6571 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
6572 | pxor %xmm15,%xmm13 | ||
6573 | |||
6574 | # qhasm: xmm14 ^= xmm9 | ||
6575 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
6576 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
6577 | pxor %xmm12,%xmm11 | ||
6578 | |||
6579 | # qhasm: xmm11 = xmm15 | ||
6580 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
6581 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
6582 | movdqa %xmm13,%xmm10 | ||
6583 | |||
6584 | # qhasm: xmm11 ^= xmm14 | ||
6585 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
6586 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
6587 | pxor %xmm11,%xmm10 | ||
6588 | |||
6589 | # qhasm: xmm11 &= xmm7 | ||
6590 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
6591 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
6592 | pand %xmm7,%xmm10 | ||
6593 | |||
6594 | # qhasm: xmm7 ^= xmm1 | ||
6595 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
6596 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
6597 | pxor %xmm1,%xmm7 | ||
6598 | |||
6599 | # qhasm: xmm7 &= xmm14 | ||
6600 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
6601 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
6602 | pand %xmm11,%xmm7 | ||
6603 | |||
6604 | # qhasm: xmm1 &= xmm15 | ||
6605 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
6606 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
6607 | pand %xmm13,%xmm1 | ||
6608 | |||
6609 | # qhasm: xmm7 ^= xmm1 | ||
6610 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
6611 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
6612 | pxor %xmm1,%xmm7 | ||
6613 | |||
6614 | # qhasm: xmm1 ^= xmm11 | ||
6615 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
6616 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
6617 | pxor %xmm10,%xmm1 | ||
6618 | |||
6619 | # qhasm: xmm7 ^= xmm12 | ||
6620 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
6621 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
6622 | pxor %xmm8,%xmm7 | ||
6623 | |||
6624 | # qhasm: xmm4 ^= xmm12 | ||
6625 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
6626 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
6627 | pxor %xmm8,%xmm4 | ||
6628 | |||
6629 | # qhasm: xmm1 ^= xmm8 | ||
6630 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
6631 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
6632 | pxor %xmm9,%xmm1 | ||
6633 | |||
6634 | # qhasm: xmm2 ^= xmm8 | ||
6635 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
6636 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
6637 | pxor %xmm9,%xmm2 | ||
6638 | |||
6639 | # qhasm: xmm7 ^= xmm0 | ||
6640 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
6641 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
6642 | pxor %xmm0,%xmm7 | ||
6643 | |||
6644 | # qhasm: xmm1 ^= xmm6 | ||
6645 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
6646 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
6647 | pxor %xmm6,%xmm1 | ||
6648 | |||
6649 | # qhasm: xmm4 ^= xmm7 | ||
6650 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
6651 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
6652 | pxor %xmm7,%xmm4 | ||
6653 | |||
6654 | # qhasm: xmm6 ^= xmm0 | ||
6655 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
6656 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
6657 | pxor %xmm0,%xmm6 | ||
6658 | |||
6659 | # qhasm: xmm0 ^= xmm1 | ||
6660 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
6661 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
6662 | pxor %xmm1,%xmm0 | ||
6663 | |||
6664 | # qhasm: xmm1 ^= xmm5 | ||
6665 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
6666 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
6667 | pxor %xmm5,%xmm1 | ||
6668 | |||
6669 | # qhasm: xmm5 ^= xmm2 | ||
6670 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
6671 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
6672 | pxor %xmm2,%xmm5 | ||
6673 | |||
6674 | # qhasm: xmm4 ^= xmm5 | ||
6675 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
6676 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
6677 | pxor %xmm5,%xmm4 | ||
6678 | |||
6679 | # qhasm: xmm2 ^= xmm3 | ||
6680 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
6681 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
6682 | pxor %xmm3,%xmm2 | ||
6683 | |||
6684 | # qhasm: xmm3 ^= xmm5 | ||
6685 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
6686 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
6687 | pxor %xmm5,%xmm3 | ||
6688 | |||
6689 | # qhasm: xmm6 ^= xmm3 | ||
6690 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
6691 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
6692 | pxor %xmm3,%xmm6 | ||
6693 | |||
6694 | # qhasm: xmm3 ^= RCON | ||
6695 | # asm 1: pxor RCON,<xmm3=int6464#4 | ||
6696 | # asm 2: pxor RCON,<xmm3=%xmm3 | ||
6697 | pxor RCON,%xmm3 | ||
6698 | |||
6699 | # qhasm: shuffle bytes of xmm0 by EXPB0 | ||
6700 | # asm 1: pshufb EXPB0,<xmm0=int6464#1 | ||
6701 | # asm 2: pshufb EXPB0,<xmm0=%xmm0 | ||
6702 | pshufb EXPB0,%xmm0 | ||
6703 | |||
6704 | # qhasm: shuffle bytes of xmm1 by EXPB0 | ||
6705 | # asm 1: pshufb EXPB0,<xmm1=int6464#2 | ||
6706 | # asm 2: pshufb EXPB0,<xmm1=%xmm1 | ||
6707 | pshufb EXPB0,%xmm1 | ||
6708 | |||
6709 | # qhasm: shuffle bytes of xmm4 by EXPB0 | ||
6710 | # asm 1: pshufb EXPB0,<xmm4=int6464#5 | ||
6711 | # asm 2: pshufb EXPB0,<xmm4=%xmm4 | ||
6712 | pshufb EXPB0,%xmm4 | ||
6713 | |||
6714 | # qhasm: shuffle bytes of xmm6 by EXPB0 | ||
6715 | # asm 1: pshufb EXPB0,<xmm6=int6464#7 | ||
6716 | # asm 2: pshufb EXPB0,<xmm6=%xmm6 | ||
6717 | pshufb EXPB0,%xmm6 | ||
6718 | |||
6719 | # qhasm: shuffle bytes of xmm3 by EXPB0 | ||
6720 | # asm 1: pshufb EXPB0,<xmm3=int6464#4 | ||
6721 | # asm 2: pshufb EXPB0,<xmm3=%xmm3 | ||
6722 | pshufb EXPB0,%xmm3 | ||
6723 | |||
6724 | # qhasm: shuffle bytes of xmm7 by EXPB0 | ||
6725 | # asm 1: pshufb EXPB0,<xmm7=int6464#8 | ||
6726 | # asm 2: pshufb EXPB0,<xmm7=%xmm7 | ||
6727 | pshufb EXPB0,%xmm7 | ||
6728 | |||
6729 | # qhasm: shuffle bytes of xmm2 by EXPB0 | ||
6730 | # asm 1: pshufb EXPB0,<xmm2=int6464#3 | ||
6731 | # asm 2: pshufb EXPB0,<xmm2=%xmm2 | ||
6732 | pshufb EXPB0,%xmm2 | ||
6733 | |||
6734 | # qhasm: shuffle bytes of xmm5 by EXPB0 | ||
6735 | # asm 1: pshufb EXPB0,<xmm5=int6464#6 | ||
6736 | # asm 2: pshufb EXPB0,<xmm5=%xmm5 | ||
6737 | pshufb EXPB0,%xmm5 | ||
6738 | |||
6739 | # qhasm: xmm8 = *(int128 *)(c + 512) | ||
6740 | # asm 1: movdqa 512(<c=int64#1),>xmm8=int6464#9 | ||
6741 | # asm 2: movdqa 512(<c=%rdi),>xmm8=%xmm8 | ||
6742 | movdqa 512(%rdi),%xmm8 | ||
6743 | |||
6744 | # qhasm: xmm9 = *(int128 *)(c + 528) | ||
6745 | # asm 1: movdqa 528(<c=int64#1),>xmm9=int6464#10 | ||
6746 | # asm 2: movdqa 528(<c=%rdi),>xmm9=%xmm9 | ||
6747 | movdqa 528(%rdi),%xmm9 | ||
6748 | |||
6749 | # qhasm: xmm10 = *(int128 *)(c + 544) | ||
6750 | # asm 1: movdqa 544(<c=int64#1),>xmm10=int6464#11 | ||
6751 | # asm 2: movdqa 544(<c=%rdi),>xmm10=%xmm10 | ||
6752 | movdqa 544(%rdi),%xmm10 | ||
6753 | |||
6754 | # qhasm: xmm11 = *(int128 *)(c + 560) | ||
6755 | # asm 1: movdqa 560(<c=int64#1),>xmm11=int6464#12 | ||
6756 | # asm 2: movdqa 560(<c=%rdi),>xmm11=%xmm11 | ||
6757 | movdqa 560(%rdi),%xmm11 | ||
6758 | |||
6759 | # qhasm: xmm12 = *(int128 *)(c + 576) | ||
6760 | # asm 1: movdqa 576(<c=int64#1),>xmm12=int6464#13 | ||
6761 | # asm 2: movdqa 576(<c=%rdi),>xmm12=%xmm12 | ||
6762 | movdqa 576(%rdi),%xmm12 | ||
6763 | |||
6764 | # qhasm: xmm13 = *(int128 *)(c + 592) | ||
6765 | # asm 1: movdqa 592(<c=int64#1),>xmm13=int6464#14 | ||
6766 | # asm 2: movdqa 592(<c=%rdi),>xmm13=%xmm13 | ||
6767 | movdqa 592(%rdi),%xmm13 | ||
6768 | |||
6769 | # qhasm: xmm14 = *(int128 *)(c + 608) | ||
6770 | # asm 1: movdqa 608(<c=int64#1),>xmm14=int6464#15 | ||
6771 | # asm 2: movdqa 608(<c=%rdi),>xmm14=%xmm14 | ||
6772 | movdqa 608(%rdi),%xmm14 | ||
6773 | |||
6774 | # qhasm: xmm15 = *(int128 *)(c + 624) | ||
6775 | # asm 1: movdqa 624(<c=int64#1),>xmm15=int6464#16 | ||
6776 | # asm 2: movdqa 624(<c=%rdi),>xmm15=%xmm15 | ||
6777 | movdqa 624(%rdi),%xmm15 | ||
6778 | |||
6779 | # qhasm: xmm8 ^= ONE | ||
6780 | # asm 1: pxor ONE,<xmm8=int6464#9 | ||
6781 | # asm 2: pxor ONE,<xmm8=%xmm8 | ||
6782 | pxor ONE,%xmm8 | ||
6783 | |||
6784 | # qhasm: xmm9 ^= ONE | ||
6785 | # asm 1: pxor ONE,<xmm9=int6464#10 | ||
6786 | # asm 2: pxor ONE,<xmm9=%xmm9 | ||
6787 | pxor ONE,%xmm9 | ||
6788 | |||
6789 | # qhasm: xmm13 ^= ONE | ||
6790 | # asm 1: pxor ONE,<xmm13=int6464#14 | ||
6791 | # asm 2: pxor ONE,<xmm13=%xmm13 | ||
6792 | pxor ONE,%xmm13 | ||
6793 | |||
6794 | # qhasm: xmm14 ^= ONE | ||
6795 | # asm 1: pxor ONE,<xmm14=int6464#15 | ||
6796 | # asm 2: pxor ONE,<xmm14=%xmm14 | ||
6797 | pxor ONE,%xmm14 | ||
6798 | |||
6799 | # qhasm: xmm0 ^= xmm8 | ||
6800 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
6801 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
6802 | pxor %xmm8,%xmm0 | ||
6803 | |||
6804 | # qhasm: xmm1 ^= xmm9 | ||
6805 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
6806 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
6807 | pxor %xmm9,%xmm1 | ||
6808 | |||
6809 | # qhasm: xmm4 ^= xmm10 | ||
6810 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
6811 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
6812 | pxor %xmm10,%xmm4 | ||
6813 | |||
6814 | # qhasm: xmm6 ^= xmm11 | ||
6815 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
6816 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
6817 | pxor %xmm11,%xmm6 | ||
6818 | |||
6819 | # qhasm: xmm3 ^= xmm12 | ||
6820 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
6821 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
6822 | pxor %xmm12,%xmm3 | ||
6823 | |||
6824 | # qhasm: xmm7 ^= xmm13 | ||
6825 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
6826 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
6827 | pxor %xmm13,%xmm7 | ||
6828 | |||
6829 | # qhasm: xmm2 ^= xmm14 | ||
6830 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
6831 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
6832 | pxor %xmm14,%xmm2 | ||
6833 | |||
6834 | # qhasm: xmm5 ^= xmm15 | ||
6835 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
6836 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
6837 | pxor %xmm15,%xmm5 | ||
6838 | |||
6839 | # qhasm: uint32323232 xmm8 >>= 8 | ||
6840 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
6841 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
6842 | psrld $8,%xmm8 | ||
6843 | |||
6844 | # qhasm: uint32323232 xmm9 >>= 8 | ||
6845 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
6846 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
6847 | psrld $8,%xmm9 | ||
6848 | |||
6849 | # qhasm: uint32323232 xmm10 >>= 8 | ||
6850 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
6851 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
6852 | psrld $8,%xmm10 | ||
6853 | |||
6854 | # qhasm: uint32323232 xmm11 >>= 8 | ||
6855 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
6856 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
6857 | psrld $8,%xmm11 | ||
6858 | |||
6859 | # qhasm: uint32323232 xmm12 >>= 8 | ||
6860 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
6861 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
6862 | psrld $8,%xmm12 | ||
6863 | |||
6864 | # qhasm: uint32323232 xmm13 >>= 8 | ||
6865 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
6866 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
6867 | psrld $8,%xmm13 | ||
6868 | |||
6869 | # qhasm: uint32323232 xmm14 >>= 8 | ||
6870 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
6871 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
6872 | psrld $8,%xmm14 | ||
6873 | |||
6874 | # qhasm: uint32323232 xmm15 >>= 8 | ||
6875 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
6876 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
6877 | psrld $8,%xmm15 | ||
6878 | |||
6879 | # qhasm: xmm0 ^= xmm8 | ||
6880 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
6881 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
6882 | pxor %xmm8,%xmm0 | ||
6883 | |||
6884 | # qhasm: xmm1 ^= xmm9 | ||
6885 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
6886 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
6887 | pxor %xmm9,%xmm1 | ||
6888 | |||
6889 | # qhasm: xmm4 ^= xmm10 | ||
6890 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
6891 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
6892 | pxor %xmm10,%xmm4 | ||
6893 | |||
6894 | # qhasm: xmm6 ^= xmm11 | ||
6895 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
6896 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
6897 | pxor %xmm11,%xmm6 | ||
6898 | |||
6899 | # qhasm: xmm3 ^= xmm12 | ||
6900 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
6901 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
6902 | pxor %xmm12,%xmm3 | ||
6903 | |||
6904 | # qhasm: xmm7 ^= xmm13 | ||
6905 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
6906 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
6907 | pxor %xmm13,%xmm7 | ||
6908 | |||
6909 | # qhasm: xmm2 ^= xmm14 | ||
6910 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
6911 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
6912 | pxor %xmm14,%xmm2 | ||
6913 | |||
6914 | # qhasm: xmm5 ^= xmm15 | ||
6915 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
6916 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
6917 | pxor %xmm15,%xmm5 | ||
6918 | |||
6919 | # qhasm: uint32323232 xmm8 >>= 8 | ||
6920 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
6921 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
6922 | psrld $8,%xmm8 | ||
6923 | |||
6924 | # qhasm: uint32323232 xmm9 >>= 8 | ||
6925 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
6926 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
6927 | psrld $8,%xmm9 | ||
6928 | |||
6929 | # qhasm: uint32323232 xmm10 >>= 8 | ||
6930 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
6931 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
6932 | psrld $8,%xmm10 | ||
6933 | |||
6934 | # qhasm: uint32323232 xmm11 >>= 8 | ||
6935 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
6936 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
6937 | psrld $8,%xmm11 | ||
6938 | |||
6939 | # qhasm: uint32323232 xmm12 >>= 8 | ||
6940 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
6941 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
6942 | psrld $8,%xmm12 | ||
6943 | |||
6944 | # qhasm: uint32323232 xmm13 >>= 8 | ||
6945 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
6946 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
6947 | psrld $8,%xmm13 | ||
6948 | |||
6949 | # qhasm: uint32323232 xmm14 >>= 8 | ||
6950 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
6951 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
6952 | psrld $8,%xmm14 | ||
6953 | |||
6954 | # qhasm: uint32323232 xmm15 >>= 8 | ||
6955 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
6956 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
6957 | psrld $8,%xmm15 | ||
6958 | |||
6959 | # qhasm: xmm0 ^= xmm8 | ||
6960 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
6961 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
6962 | pxor %xmm8,%xmm0 | ||
6963 | |||
6964 | # qhasm: xmm1 ^= xmm9 | ||
6965 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
6966 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
6967 | pxor %xmm9,%xmm1 | ||
6968 | |||
6969 | # qhasm: xmm4 ^= xmm10 | ||
6970 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
6971 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
6972 | pxor %xmm10,%xmm4 | ||
6973 | |||
6974 | # qhasm: xmm6 ^= xmm11 | ||
6975 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
6976 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
6977 | pxor %xmm11,%xmm6 | ||
6978 | |||
6979 | # qhasm: xmm3 ^= xmm12 | ||
6980 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
6981 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
6982 | pxor %xmm12,%xmm3 | ||
6983 | |||
6984 | # qhasm: xmm7 ^= xmm13 | ||
6985 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
6986 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
6987 | pxor %xmm13,%xmm7 | ||
6988 | |||
6989 | # qhasm: xmm2 ^= xmm14 | ||
6990 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
6991 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
6992 | pxor %xmm14,%xmm2 | ||
6993 | |||
6994 | # qhasm: xmm5 ^= xmm15 | ||
6995 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
6996 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
6997 | pxor %xmm15,%xmm5 | ||
6998 | |||
6999 | # qhasm: uint32323232 xmm8 >>= 8 | ||
7000 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
7001 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
7002 | psrld $8,%xmm8 | ||
7003 | |||
7004 | # qhasm: uint32323232 xmm9 >>= 8 | ||
7005 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
7006 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
7007 | psrld $8,%xmm9 | ||
7008 | |||
7009 | # qhasm: uint32323232 xmm10 >>= 8 | ||
7010 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
7011 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
7012 | psrld $8,%xmm10 | ||
7013 | |||
7014 | # qhasm: uint32323232 xmm11 >>= 8 | ||
7015 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
7016 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
7017 | psrld $8,%xmm11 | ||
7018 | |||
7019 | # qhasm: uint32323232 xmm12 >>= 8 | ||
7020 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
7021 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
7022 | psrld $8,%xmm12 | ||
7023 | |||
7024 | # qhasm: uint32323232 xmm13 >>= 8 | ||
7025 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
7026 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
7027 | psrld $8,%xmm13 | ||
7028 | |||
7029 | # qhasm: uint32323232 xmm14 >>= 8 | ||
7030 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
7031 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
7032 | psrld $8,%xmm14 | ||
7033 | |||
7034 | # qhasm: uint32323232 xmm15 >>= 8 | ||
7035 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
7036 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
7037 | psrld $8,%xmm15 | ||
7038 | |||
7039 | # qhasm: xmm0 ^= xmm8 | ||
7040 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
7041 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
7042 | pxor %xmm8,%xmm0 | ||
7043 | |||
7044 | # qhasm: xmm1 ^= xmm9 | ||
7045 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
7046 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
7047 | pxor %xmm9,%xmm1 | ||
7048 | |||
7049 | # qhasm: xmm4 ^= xmm10 | ||
7050 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
7051 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
7052 | pxor %xmm10,%xmm4 | ||
7053 | |||
7054 | # qhasm: xmm6 ^= xmm11 | ||
7055 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
7056 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
7057 | pxor %xmm11,%xmm6 | ||
7058 | |||
7059 | # qhasm: xmm3 ^= xmm12 | ||
7060 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
7061 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
7062 | pxor %xmm12,%xmm3 | ||
7063 | |||
7064 | # qhasm: xmm7 ^= xmm13 | ||
7065 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
7066 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
7067 | pxor %xmm13,%xmm7 | ||
7068 | |||
7069 | # qhasm: xmm2 ^= xmm14 | ||
7070 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
7071 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
7072 | pxor %xmm14,%xmm2 | ||
7073 | |||
7074 | # qhasm: xmm5 ^= xmm15 | ||
7075 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
7076 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
7077 | pxor %xmm15,%xmm5 | ||
7078 | |||
7079 | # qhasm: *(int128 *)(c + 640) = xmm0 | ||
7080 | # asm 1: movdqa <xmm0=int6464#1,640(<c=int64#1) | ||
7081 | # asm 2: movdqa <xmm0=%xmm0,640(<c=%rdi) | ||
7082 | movdqa %xmm0,640(%rdi) | ||
7083 | |||
7084 | # qhasm: *(int128 *)(c + 656) = xmm1 | ||
7085 | # asm 1: movdqa <xmm1=int6464#2,656(<c=int64#1) | ||
7086 | # asm 2: movdqa <xmm1=%xmm1,656(<c=%rdi) | ||
7087 | movdqa %xmm1,656(%rdi) | ||
7088 | |||
7089 | # qhasm: *(int128 *)(c + 672) = xmm4 | ||
7090 | # asm 1: movdqa <xmm4=int6464#5,672(<c=int64#1) | ||
7091 | # asm 2: movdqa <xmm4=%xmm4,672(<c=%rdi) | ||
7092 | movdqa %xmm4,672(%rdi) | ||
7093 | |||
7094 | # qhasm: *(int128 *)(c + 688) = xmm6 | ||
7095 | # asm 1: movdqa <xmm6=int6464#7,688(<c=int64#1) | ||
7096 | # asm 2: movdqa <xmm6=%xmm6,688(<c=%rdi) | ||
7097 | movdqa %xmm6,688(%rdi) | ||
7098 | |||
7099 | # qhasm: *(int128 *)(c + 704) = xmm3 | ||
7100 | # asm 1: movdqa <xmm3=int6464#4,704(<c=int64#1) | ||
7101 | # asm 2: movdqa <xmm3=%xmm3,704(<c=%rdi) | ||
7102 | movdqa %xmm3,704(%rdi) | ||
7103 | |||
7104 | # qhasm: *(int128 *)(c + 720) = xmm7 | ||
7105 | # asm 1: movdqa <xmm7=int6464#8,720(<c=int64#1) | ||
7106 | # asm 2: movdqa <xmm7=%xmm7,720(<c=%rdi) | ||
7107 | movdqa %xmm7,720(%rdi) | ||
7108 | |||
7109 | # qhasm: *(int128 *)(c + 736) = xmm2 | ||
7110 | # asm 1: movdqa <xmm2=int6464#3,736(<c=int64#1) | ||
7111 | # asm 2: movdqa <xmm2=%xmm2,736(<c=%rdi) | ||
7112 | movdqa %xmm2,736(%rdi) | ||
7113 | |||
7114 | # qhasm: *(int128 *)(c + 752) = xmm5 | ||
7115 | # asm 1: movdqa <xmm5=int6464#6,752(<c=int64#1) | ||
7116 | # asm 2: movdqa <xmm5=%xmm5,752(<c=%rdi) | ||
7117 | movdqa %xmm5,752(%rdi) | ||
7118 | |||
7119 | # qhasm: xmm0 ^= ONE | ||
7120 | # asm 1: pxor ONE,<xmm0=int6464#1 | ||
7121 | # asm 2: pxor ONE,<xmm0=%xmm0 | ||
7122 | pxor ONE,%xmm0 | ||
7123 | |||
7124 | # qhasm: xmm1 ^= ONE | ||
7125 | # asm 1: pxor ONE,<xmm1=int6464#2 | ||
7126 | # asm 2: pxor ONE,<xmm1=%xmm1 | ||
7127 | pxor ONE,%xmm1 | ||
7128 | |||
7129 | # qhasm: xmm7 ^= ONE | ||
7130 | # asm 1: pxor ONE,<xmm7=int6464#8 | ||
7131 | # asm 2: pxor ONE,<xmm7=%xmm7 | ||
7132 | pxor ONE,%xmm7 | ||
7133 | |||
7134 | # qhasm: xmm2 ^= ONE | ||
7135 | # asm 1: pxor ONE,<xmm2=int6464#3 | ||
7136 | # asm 2: pxor ONE,<xmm2=%xmm2 | ||
7137 | pxor ONE,%xmm2 | ||
7138 | |||
7139 | # qhasm: shuffle bytes of xmm0 by ROTB | ||
7140 | # asm 1: pshufb ROTB,<xmm0=int6464#1 | ||
7141 | # asm 2: pshufb ROTB,<xmm0=%xmm0 | ||
7142 | pshufb ROTB,%xmm0 | ||
7143 | |||
7144 | # qhasm: shuffle bytes of xmm1 by ROTB | ||
7145 | # asm 1: pshufb ROTB,<xmm1=int6464#2 | ||
7146 | # asm 2: pshufb ROTB,<xmm1=%xmm1 | ||
7147 | pshufb ROTB,%xmm1 | ||
7148 | |||
7149 | # qhasm: shuffle bytes of xmm4 by ROTB | ||
7150 | # asm 1: pshufb ROTB,<xmm4=int6464#5 | ||
7151 | # asm 2: pshufb ROTB,<xmm4=%xmm4 | ||
7152 | pshufb ROTB,%xmm4 | ||
7153 | |||
7154 | # qhasm: shuffle bytes of xmm6 by ROTB | ||
7155 | # asm 1: pshufb ROTB,<xmm6=int6464#7 | ||
7156 | # asm 2: pshufb ROTB,<xmm6=%xmm6 | ||
7157 | pshufb ROTB,%xmm6 | ||
7158 | |||
7159 | # qhasm: shuffle bytes of xmm3 by ROTB | ||
7160 | # asm 1: pshufb ROTB,<xmm3=int6464#4 | ||
7161 | # asm 2: pshufb ROTB,<xmm3=%xmm3 | ||
7162 | pshufb ROTB,%xmm3 | ||
7163 | |||
7164 | # qhasm: shuffle bytes of xmm7 by ROTB | ||
7165 | # asm 1: pshufb ROTB,<xmm7=int6464#8 | ||
7166 | # asm 2: pshufb ROTB,<xmm7=%xmm7 | ||
7167 | pshufb ROTB,%xmm7 | ||
7168 | |||
7169 | # qhasm: shuffle bytes of xmm2 by ROTB | ||
7170 | # asm 1: pshufb ROTB,<xmm2=int6464#3 | ||
7171 | # asm 2: pshufb ROTB,<xmm2=%xmm2 | ||
7172 | pshufb ROTB,%xmm2 | ||
7173 | |||
7174 | # qhasm: shuffle bytes of xmm5 by ROTB | ||
7175 | # asm 1: pshufb ROTB,<xmm5=int6464#6 | ||
7176 | # asm 2: pshufb ROTB,<xmm5=%xmm5 | ||
7177 | pshufb ROTB,%xmm5 | ||
7178 | |||
7179 | # qhasm: xmm7 ^= xmm2 | ||
7180 | # asm 1: pxor <xmm2=int6464#3,<xmm7=int6464#8 | ||
7181 | # asm 2: pxor <xmm2=%xmm2,<xmm7=%xmm7 | ||
7182 | pxor %xmm2,%xmm7 | ||
7183 | |||
7184 | # qhasm: xmm4 ^= xmm1 | ||
7185 | # asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5 | ||
7186 | # asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4 | ||
7187 | pxor %xmm1,%xmm4 | ||
7188 | |||
7189 | # qhasm: xmm7 ^= xmm0 | ||
7190 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
7191 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
7192 | pxor %xmm0,%xmm7 | ||
7193 | |||
7194 | # qhasm: xmm2 ^= xmm4 | ||
7195 | # asm 1: pxor <xmm4=int6464#5,<xmm2=int6464#3 | ||
7196 | # asm 2: pxor <xmm4=%xmm4,<xmm2=%xmm2 | ||
7197 | pxor %xmm4,%xmm2 | ||
7198 | |||
7199 | # qhasm: xmm6 ^= xmm0 | ||
7200 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
7201 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
7202 | pxor %xmm0,%xmm6 | ||
7203 | |||
7204 | # qhasm: xmm2 ^= xmm6 | ||
7205 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
7206 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
7207 | pxor %xmm6,%xmm2 | ||
7208 | |||
7209 | # qhasm: xmm6 ^= xmm5 | ||
7210 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
7211 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
7212 | pxor %xmm5,%xmm6 | ||
7213 | |||
7214 | # qhasm: xmm6 ^= xmm3 | ||
7215 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
7216 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
7217 | pxor %xmm3,%xmm6 | ||
7218 | |||
7219 | # qhasm: xmm5 ^= xmm7 | ||
7220 | # asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6 | ||
7221 | # asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5 | ||
7222 | pxor %xmm7,%xmm5 | ||
7223 | |||
7224 | # qhasm: xmm6 ^= xmm1 | ||
7225 | # asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7 | ||
7226 | # asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6 | ||
7227 | pxor %xmm1,%xmm6 | ||
7228 | |||
7229 | # qhasm: xmm3 ^= xmm7 | ||
7230 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
7231 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
7232 | pxor %xmm7,%xmm3 | ||
7233 | |||
7234 | # qhasm: xmm4 ^= xmm5 | ||
7235 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
7236 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
7237 | pxor %xmm5,%xmm4 | ||
7238 | |||
7239 | # qhasm: xmm1 ^= xmm7 | ||
7240 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2 | ||
7241 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1 | ||
7242 | pxor %xmm7,%xmm1 | ||
7243 | |||
7244 | # qhasm: xmm11 = xmm5 | ||
7245 | # asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9 | ||
7246 | # asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8 | ||
7247 | movdqa %xmm5,%xmm8 | ||
7248 | |||
7249 | # qhasm: xmm10 = xmm1 | ||
7250 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
7251 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
7252 | movdqa %xmm1,%xmm9 | ||
7253 | |||
7254 | # qhasm: xmm9 = xmm7 | ||
7255 | # asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11 | ||
7256 | # asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10 | ||
7257 | movdqa %xmm7,%xmm10 | ||
7258 | |||
7259 | # qhasm: xmm13 = xmm4 | ||
7260 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
7261 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
7262 | movdqa %xmm4,%xmm11 | ||
7263 | |||
7264 | # qhasm: xmm12 = xmm2 | ||
7265 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#13 | ||
7266 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm12 | ||
7267 | movdqa %xmm2,%xmm12 | ||
7268 | |||
7269 | # qhasm: xmm11 ^= xmm3 | ||
7270 | # asm 1: pxor <xmm3=int6464#4,<xmm11=int6464#9 | ||
7271 | # asm 2: pxor <xmm3=%xmm3,<xmm11=%xmm8 | ||
7272 | pxor %xmm3,%xmm8 | ||
7273 | |||
7274 | # qhasm: xmm10 ^= xmm4 | ||
7275 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#10 | ||
7276 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm9 | ||
7277 | pxor %xmm4,%xmm9 | ||
7278 | |||
7279 | # qhasm: xmm9 ^= xmm6 | ||
7280 | # asm 1: pxor <xmm6=int6464#7,<xmm9=int6464#11 | ||
7281 | # asm 2: pxor <xmm6=%xmm6,<xmm9=%xmm10 | ||
7282 | pxor %xmm6,%xmm10 | ||
7283 | |||
7284 | # qhasm: xmm13 ^= xmm3 | ||
7285 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#12 | ||
7286 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm11 | ||
7287 | pxor %xmm3,%xmm11 | ||
7288 | |||
7289 | # qhasm: xmm12 ^= xmm0 | ||
7290 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
7291 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
7292 | pxor %xmm0,%xmm12 | ||
7293 | |||
7294 | # qhasm: xmm14 = xmm11 | ||
7295 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
7296 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
7297 | movdqa %xmm8,%xmm13 | ||
7298 | |||
7299 | # qhasm: xmm8 = xmm10 | ||
7300 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
7301 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
7302 | movdqa %xmm9,%xmm14 | ||
7303 | |||
7304 | # qhasm: xmm15 = xmm11 | ||
7305 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
7306 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
7307 | movdqa %xmm8,%xmm15 | ||
7308 | |||
7309 | # qhasm: xmm10 |= xmm9 | ||
7310 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
7311 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
7312 | por %xmm10,%xmm9 | ||
7313 | |||
7314 | # qhasm: xmm11 |= xmm12 | ||
7315 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
7316 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
7317 | por %xmm12,%xmm8 | ||
7318 | |||
7319 | # qhasm: xmm15 ^= xmm8 | ||
7320 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
7321 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
7322 | pxor %xmm14,%xmm15 | ||
7323 | |||
7324 | # qhasm: xmm14 &= xmm12 | ||
7325 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
7326 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
7327 | pand %xmm12,%xmm13 | ||
7328 | |||
7329 | # qhasm: xmm8 &= xmm9 | ||
7330 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
7331 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
7332 | pand %xmm10,%xmm14 | ||
7333 | |||
7334 | # qhasm: xmm12 ^= xmm9 | ||
7335 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
7336 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
7337 | pxor %xmm10,%xmm12 | ||
7338 | |||
7339 | # qhasm: xmm15 &= xmm12 | ||
7340 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
7341 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
7342 | pand %xmm12,%xmm15 | ||
7343 | |||
7344 | # qhasm: xmm12 = xmm6 | ||
7345 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11 | ||
7346 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10 | ||
7347 | movdqa %xmm6,%xmm10 | ||
7348 | |||
7349 | # qhasm: xmm12 ^= xmm0 | ||
7350 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
7351 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
7352 | pxor %xmm0,%xmm10 | ||
7353 | |||
7354 | # qhasm: xmm13 &= xmm12 | ||
7355 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
7356 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
7357 | pand %xmm10,%xmm11 | ||
7358 | |||
7359 | # qhasm: xmm11 ^= xmm13 | ||
7360 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
7361 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
7362 | pxor %xmm11,%xmm8 | ||
7363 | |||
7364 | # qhasm: xmm10 ^= xmm13 | ||
7365 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
7366 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
7367 | pxor %xmm11,%xmm9 | ||
7368 | |||
7369 | # qhasm: xmm13 = xmm5 | ||
7370 | # asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11 | ||
7371 | # asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10 | ||
7372 | movdqa %xmm5,%xmm10 | ||
7373 | |||
7374 | # qhasm: xmm13 ^= xmm1 | ||
7375 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
7376 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
7377 | pxor %xmm1,%xmm10 | ||
7378 | |||
7379 | # qhasm: xmm12 = xmm7 | ||
7380 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12 | ||
7381 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11 | ||
7382 | movdqa %xmm7,%xmm11 | ||
7383 | |||
7384 | # qhasm: xmm9 = xmm13 | ||
7385 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
7386 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
7387 | movdqa %xmm10,%xmm12 | ||
7388 | |||
7389 | # qhasm: xmm12 ^= xmm2 | ||
7390 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#12 | ||
7391 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm11 | ||
7392 | pxor %xmm2,%xmm11 | ||
7393 | |||
7394 | # qhasm: xmm9 |= xmm12 | ||
7395 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
7396 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
7397 | por %xmm11,%xmm12 | ||
7398 | |||
7399 | # qhasm: xmm13 &= xmm12 | ||
7400 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
7401 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
7402 | pand %xmm11,%xmm10 | ||
7403 | |||
7404 | # qhasm: xmm8 ^= xmm13 | ||
7405 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
7406 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
7407 | pxor %xmm10,%xmm14 | ||
7408 | |||
7409 | # qhasm: xmm11 ^= xmm15 | ||
7410 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
7411 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
7412 | pxor %xmm15,%xmm8 | ||
7413 | |||
7414 | # qhasm: xmm10 ^= xmm14 | ||
7415 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
7416 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
7417 | pxor %xmm13,%xmm9 | ||
7418 | |||
7419 | # qhasm: xmm9 ^= xmm15 | ||
7420 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
7421 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
7422 | pxor %xmm15,%xmm12 | ||
7423 | |||
7424 | # qhasm: xmm8 ^= xmm14 | ||
7425 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
7426 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
7427 | pxor %xmm13,%xmm14 | ||
7428 | |||
7429 | # qhasm: xmm9 ^= xmm14 | ||
7430 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
7431 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
7432 | pxor %xmm13,%xmm12 | ||
7433 | |||
7434 | # qhasm: xmm12 = xmm4 | ||
7435 | # asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11 | ||
7436 | # asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10 | ||
7437 | movdqa %xmm4,%xmm10 | ||
7438 | |||
7439 | # qhasm: xmm13 = xmm3 | ||
7440 | # asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12 | ||
7441 | # asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11 | ||
7442 | movdqa %xmm3,%xmm11 | ||
7443 | |||
7444 | # qhasm: xmm14 = xmm1 | ||
7445 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
7446 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
7447 | movdqa %xmm1,%xmm13 | ||
7448 | |||
7449 | # qhasm: xmm15 = xmm5 | ||
7450 | # asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16 | ||
7451 | # asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15 | ||
7452 | movdqa %xmm5,%xmm15 | ||
7453 | |||
7454 | # qhasm: xmm12 &= xmm6 | ||
7455 | # asm 1: pand <xmm6=int6464#7,<xmm12=int6464#11 | ||
7456 | # asm 2: pand <xmm6=%xmm6,<xmm12=%xmm10 | ||
7457 | pand %xmm6,%xmm10 | ||
7458 | |||
7459 | # qhasm: xmm13 &= xmm0 | ||
7460 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
7461 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
7462 | pand %xmm0,%xmm11 | ||
7463 | |||
7464 | # qhasm: xmm14 &= xmm7 | ||
7465 | # asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14 | ||
7466 | # asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13 | ||
7467 | pand %xmm7,%xmm13 | ||
7468 | |||
7469 | # qhasm: xmm15 |= xmm2 | ||
7470 | # asm 1: por <xmm2=int6464#3,<xmm15=int6464#16 | ||
7471 | # asm 2: por <xmm2=%xmm2,<xmm15=%xmm15 | ||
7472 | por %xmm2,%xmm15 | ||
7473 | |||
7474 | # qhasm: xmm11 ^= xmm12 | ||
7475 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
7476 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
7477 | pxor %xmm10,%xmm8 | ||
7478 | |||
7479 | # qhasm: xmm10 ^= xmm13 | ||
7480 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
7481 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
7482 | pxor %xmm11,%xmm9 | ||
7483 | |||
7484 | # qhasm: xmm9 ^= xmm14 | ||
7485 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
7486 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
7487 | pxor %xmm13,%xmm12 | ||
7488 | |||
7489 | # qhasm: xmm8 ^= xmm15 | ||
7490 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
7491 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
7492 | pxor %xmm15,%xmm14 | ||
7493 | |||
7494 | # qhasm: xmm12 = xmm11 | ||
7495 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
7496 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
7497 | movdqa %xmm8,%xmm10 | ||
7498 | |||
7499 | # qhasm: xmm12 ^= xmm10 | ||
7500 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
7501 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
7502 | pxor %xmm9,%xmm10 | ||
7503 | |||
7504 | # qhasm: xmm11 &= xmm9 | ||
7505 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
7506 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
7507 | pand %xmm12,%xmm8 | ||
7508 | |||
7509 | # qhasm: xmm14 = xmm8 | ||
7510 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
7511 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
7512 | movdqa %xmm14,%xmm11 | ||
7513 | |||
7514 | # qhasm: xmm14 ^= xmm11 | ||
7515 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
7516 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
7517 | pxor %xmm8,%xmm11 | ||
7518 | |||
7519 | # qhasm: xmm15 = xmm12 | ||
7520 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
7521 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
7522 | movdqa %xmm10,%xmm13 | ||
7523 | |||
7524 | # qhasm: xmm15 &= xmm14 | ||
7525 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
7526 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
7527 | pand %xmm11,%xmm13 | ||
7528 | |||
7529 | # qhasm: xmm15 ^= xmm10 | ||
7530 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
7531 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
7532 | pxor %xmm9,%xmm13 | ||
7533 | |||
7534 | # qhasm: xmm13 = xmm9 | ||
7535 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
7536 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
7537 | movdqa %xmm12,%xmm15 | ||
7538 | |||
7539 | # qhasm: xmm13 ^= xmm8 | ||
7540 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
7541 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
7542 | pxor %xmm14,%xmm15 | ||
7543 | |||
7544 | # qhasm: xmm11 ^= xmm10 | ||
7545 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
7546 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
7547 | pxor %xmm9,%xmm8 | ||
7548 | |||
7549 | # qhasm: xmm13 &= xmm11 | ||
7550 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
7551 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
7552 | pand %xmm8,%xmm15 | ||
7553 | |||
7554 | # qhasm: xmm13 ^= xmm8 | ||
7555 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
7556 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
7557 | pxor %xmm14,%xmm15 | ||
7558 | |||
7559 | # qhasm: xmm9 ^= xmm13 | ||
7560 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
7561 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
7562 | pxor %xmm15,%xmm12 | ||
7563 | |||
7564 | # qhasm: xmm10 = xmm14 | ||
7565 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
7566 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
7567 | movdqa %xmm11,%xmm8 | ||
7568 | |||
7569 | # qhasm: xmm10 ^= xmm13 | ||
7570 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
7571 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
7572 | pxor %xmm15,%xmm8 | ||
7573 | |||
7574 | # qhasm: xmm10 &= xmm8 | ||
7575 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
7576 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
7577 | pand %xmm14,%xmm8 | ||
7578 | |||
7579 | # qhasm: xmm9 ^= xmm10 | ||
7580 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
7581 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
7582 | pxor %xmm8,%xmm12 | ||
7583 | |||
7584 | # qhasm: xmm14 ^= xmm10 | ||
7585 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
7586 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
7587 | pxor %xmm8,%xmm11 | ||
7588 | |||
7589 | # qhasm: xmm14 &= xmm15 | ||
7590 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
7591 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
7592 | pand %xmm13,%xmm11 | ||
7593 | |||
7594 | # qhasm: xmm14 ^= xmm12 | ||
7595 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
7596 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
7597 | pxor %xmm10,%xmm11 | ||
7598 | |||
7599 | # qhasm: xmm12 = xmm2 | ||
7600 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#9 | ||
7601 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm8 | ||
7602 | movdqa %xmm2,%xmm8 | ||
7603 | |||
7604 | # qhasm: xmm8 = xmm7 | ||
7605 | # asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10 | ||
7606 | # asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9 | ||
7607 | movdqa %xmm7,%xmm9 | ||
7608 | |||
7609 | # qhasm: xmm10 = xmm15 | ||
7610 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
7611 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
7612 | movdqa %xmm13,%xmm10 | ||
7613 | |||
7614 | # qhasm: xmm10 ^= xmm14 | ||
7615 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
7616 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
7617 | pxor %xmm11,%xmm10 | ||
7618 | |||
7619 | # qhasm: xmm10 &= xmm2 | ||
7620 | # asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11 | ||
7621 | # asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10 | ||
7622 | pand %xmm2,%xmm10 | ||
7623 | |||
7624 | # qhasm: xmm2 ^= xmm7 | ||
7625 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
7626 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
7627 | pxor %xmm7,%xmm2 | ||
7628 | |||
7629 | # qhasm: xmm2 &= xmm14 | ||
7630 | # asm 1: pand <xmm14=int6464#12,<xmm2=int6464#3 | ||
7631 | # asm 2: pand <xmm14=%xmm11,<xmm2=%xmm2 | ||
7632 | pand %xmm11,%xmm2 | ||
7633 | |||
7634 | # qhasm: xmm7 &= xmm15 | ||
7635 | # asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8 | ||
7636 | # asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7 | ||
7637 | pand %xmm13,%xmm7 | ||
7638 | |||
7639 | # qhasm: xmm2 ^= xmm7 | ||
7640 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
7641 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
7642 | pxor %xmm7,%xmm2 | ||
7643 | |||
7644 | # qhasm: xmm7 ^= xmm10 | ||
7645 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
7646 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
7647 | pxor %xmm10,%xmm7 | ||
7648 | |||
7649 | # qhasm: xmm12 ^= xmm0 | ||
7650 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
7651 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
7652 | pxor %xmm0,%xmm8 | ||
7653 | |||
7654 | # qhasm: xmm8 ^= xmm6 | ||
7655 | # asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10 | ||
7656 | # asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9 | ||
7657 | pxor %xmm6,%xmm9 | ||
7658 | |||
7659 | # qhasm: xmm15 ^= xmm13 | ||
7660 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
7661 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
7662 | pxor %xmm15,%xmm13 | ||
7663 | |||
7664 | # qhasm: xmm14 ^= xmm9 | ||
7665 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
7666 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
7667 | pxor %xmm12,%xmm11 | ||
7668 | |||
7669 | # qhasm: xmm11 = xmm15 | ||
7670 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
7671 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
7672 | movdqa %xmm13,%xmm10 | ||
7673 | |||
7674 | # qhasm: xmm11 ^= xmm14 | ||
7675 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
7676 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
7677 | pxor %xmm11,%xmm10 | ||
7678 | |||
7679 | # qhasm: xmm11 &= xmm12 | ||
7680 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
7681 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
7682 | pand %xmm8,%xmm10 | ||
7683 | |||
7684 | # qhasm: xmm12 ^= xmm8 | ||
7685 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
7686 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
7687 | pxor %xmm9,%xmm8 | ||
7688 | |||
7689 | # qhasm: xmm12 &= xmm14 | ||
7690 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
7691 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
7692 | pand %xmm11,%xmm8 | ||
7693 | |||
7694 | # qhasm: xmm8 &= xmm15 | ||
7695 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
7696 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
7697 | pand %xmm13,%xmm9 | ||
7698 | |||
7699 | # qhasm: xmm8 ^= xmm12 | ||
7700 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
7701 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
7702 | pxor %xmm8,%xmm9 | ||
7703 | |||
7704 | # qhasm: xmm12 ^= xmm11 | ||
7705 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
7706 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
7707 | pxor %xmm10,%xmm8 | ||
7708 | |||
7709 | # qhasm: xmm10 = xmm13 | ||
7710 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
7711 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
7712 | movdqa %xmm15,%xmm10 | ||
7713 | |||
7714 | # qhasm: xmm10 ^= xmm9 | ||
7715 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
7716 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
7717 | pxor %xmm12,%xmm10 | ||
7718 | |||
7719 | # qhasm: xmm10 &= xmm0 | ||
7720 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
7721 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
7722 | pand %xmm0,%xmm10 | ||
7723 | |||
7724 | # qhasm: xmm0 ^= xmm6 | ||
7725 | # asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1 | ||
7726 | # asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0 | ||
7727 | pxor %xmm6,%xmm0 | ||
7728 | |||
7729 | # qhasm: xmm0 &= xmm9 | ||
7730 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
7731 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
7732 | pand %xmm12,%xmm0 | ||
7733 | |||
7734 | # qhasm: xmm6 &= xmm13 | ||
7735 | # asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7 | ||
7736 | # asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6 | ||
7737 | pand %xmm15,%xmm6 | ||
7738 | |||
7739 | # qhasm: xmm0 ^= xmm6 | ||
7740 | # asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1 | ||
7741 | # asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0 | ||
7742 | pxor %xmm6,%xmm0 | ||
7743 | |||
7744 | # qhasm: xmm6 ^= xmm10 | ||
7745 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
7746 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
7747 | pxor %xmm10,%xmm6 | ||
7748 | |||
7749 | # qhasm: xmm2 ^= xmm12 | ||
7750 | # asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3 | ||
7751 | # asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2 | ||
7752 | pxor %xmm8,%xmm2 | ||
7753 | |||
7754 | # qhasm: xmm0 ^= xmm12 | ||
7755 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
7756 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
7757 | pxor %xmm8,%xmm0 | ||
7758 | |||
7759 | # qhasm: xmm7 ^= xmm8 | ||
7760 | # asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8 | ||
7761 | # asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7 | ||
7762 | pxor %xmm9,%xmm7 | ||
7763 | |||
7764 | # qhasm: xmm6 ^= xmm8 | ||
7765 | # asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7 | ||
7766 | # asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6 | ||
7767 | pxor %xmm9,%xmm6 | ||
7768 | |||
7769 | # qhasm: xmm12 = xmm5 | ||
7770 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9 | ||
7771 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8 | ||
7772 | movdqa %xmm5,%xmm8 | ||
7773 | |||
7774 | # qhasm: xmm8 = xmm1 | ||
7775 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
7776 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
7777 | movdqa %xmm1,%xmm9 | ||
7778 | |||
7779 | # qhasm: xmm12 ^= xmm3 | ||
7780 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#9 | ||
7781 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm8 | ||
7782 | pxor %xmm3,%xmm8 | ||
7783 | |||
7784 | # qhasm: xmm8 ^= xmm4 | ||
7785 | # asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10 | ||
7786 | # asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9 | ||
7787 | pxor %xmm4,%xmm9 | ||
7788 | |||
7789 | # qhasm: xmm11 = xmm15 | ||
7790 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
7791 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
7792 | movdqa %xmm13,%xmm10 | ||
7793 | |||
7794 | # qhasm: xmm11 ^= xmm14 | ||
7795 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
7796 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
7797 | pxor %xmm11,%xmm10 | ||
7798 | |||
7799 | # qhasm: xmm11 &= xmm12 | ||
7800 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
7801 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
7802 | pand %xmm8,%xmm10 | ||
7803 | |||
7804 | # qhasm: xmm12 ^= xmm8 | ||
7805 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
7806 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
7807 | pxor %xmm9,%xmm8 | ||
7808 | |||
7809 | # qhasm: xmm12 &= xmm14 | ||
7810 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
7811 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
7812 | pand %xmm11,%xmm8 | ||
7813 | |||
7814 | # qhasm: xmm8 &= xmm15 | ||
7815 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
7816 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
7817 | pand %xmm13,%xmm9 | ||
7818 | |||
7819 | # qhasm: xmm8 ^= xmm12 | ||
7820 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
7821 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
7822 | pxor %xmm8,%xmm9 | ||
7823 | |||
7824 | # qhasm: xmm12 ^= xmm11 | ||
7825 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
7826 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
7827 | pxor %xmm10,%xmm8 | ||
7828 | |||
7829 | # qhasm: xmm10 = xmm13 | ||
7830 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
7831 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
7832 | movdqa %xmm15,%xmm10 | ||
7833 | |||
7834 | # qhasm: xmm10 ^= xmm9 | ||
7835 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
7836 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
7837 | pxor %xmm12,%xmm10 | ||
7838 | |||
7839 | # qhasm: xmm10 &= xmm3 | ||
7840 | # asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11 | ||
7841 | # asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10 | ||
7842 | pand %xmm3,%xmm10 | ||
7843 | |||
7844 | # qhasm: xmm3 ^= xmm4 | ||
7845 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
7846 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
7847 | pxor %xmm4,%xmm3 | ||
7848 | |||
7849 | # qhasm: xmm3 &= xmm9 | ||
7850 | # asm 1: pand <xmm9=int6464#13,<xmm3=int6464#4 | ||
7851 | # asm 2: pand <xmm9=%xmm12,<xmm3=%xmm3 | ||
7852 | pand %xmm12,%xmm3 | ||
7853 | |||
7854 | # qhasm: xmm4 &= xmm13 | ||
7855 | # asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5 | ||
7856 | # asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4 | ||
7857 | pand %xmm15,%xmm4 | ||
7858 | |||
7859 | # qhasm: xmm3 ^= xmm4 | ||
7860 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
7861 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
7862 | pxor %xmm4,%xmm3 | ||
7863 | |||
7864 | # qhasm: xmm4 ^= xmm10 | ||
7865 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
7866 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
7867 | pxor %xmm10,%xmm4 | ||
7868 | |||
7869 | # qhasm: xmm15 ^= xmm13 | ||
7870 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
7871 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
7872 | pxor %xmm15,%xmm13 | ||
7873 | |||
7874 | # qhasm: xmm14 ^= xmm9 | ||
7875 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
7876 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
7877 | pxor %xmm12,%xmm11 | ||
7878 | |||
7879 | # qhasm: xmm11 = xmm15 | ||
7880 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
7881 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
7882 | movdqa %xmm13,%xmm10 | ||
7883 | |||
7884 | # qhasm: xmm11 ^= xmm14 | ||
7885 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
7886 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
7887 | pxor %xmm11,%xmm10 | ||
7888 | |||
7889 | # qhasm: xmm11 &= xmm5 | ||
7890 | # asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11 | ||
7891 | # asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10 | ||
7892 | pand %xmm5,%xmm10 | ||
7893 | |||
7894 | # qhasm: xmm5 ^= xmm1 | ||
7895 | # asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6 | ||
7896 | # asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5 | ||
7897 | pxor %xmm1,%xmm5 | ||
7898 | |||
7899 | # qhasm: xmm5 &= xmm14 | ||
7900 | # asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6 | ||
7901 | # asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5 | ||
7902 | pand %xmm11,%xmm5 | ||
7903 | |||
7904 | # qhasm: xmm1 &= xmm15 | ||
7905 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
7906 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
7907 | pand %xmm13,%xmm1 | ||
7908 | |||
7909 | # qhasm: xmm5 ^= xmm1 | ||
7910 | # asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6 | ||
7911 | # asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5 | ||
7912 | pxor %xmm1,%xmm5 | ||
7913 | |||
7914 | # qhasm: xmm1 ^= xmm11 | ||
7915 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
7916 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
7917 | pxor %xmm10,%xmm1 | ||
7918 | |||
7919 | # qhasm: xmm5 ^= xmm12 | ||
7920 | # asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6 | ||
7921 | # asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5 | ||
7922 | pxor %xmm8,%xmm5 | ||
7923 | |||
7924 | # qhasm: xmm3 ^= xmm12 | ||
7925 | # asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4 | ||
7926 | # asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3 | ||
7927 | pxor %xmm8,%xmm3 | ||
7928 | |||
7929 | # qhasm: xmm1 ^= xmm8 | ||
7930 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
7931 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
7932 | pxor %xmm9,%xmm1 | ||
7933 | |||
7934 | # qhasm: xmm4 ^= xmm8 | ||
7935 | # asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5 | ||
7936 | # asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4 | ||
7937 | pxor %xmm9,%xmm4 | ||
7938 | |||
7939 | # qhasm: xmm5 ^= xmm0 | ||
7940 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
7941 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
7942 | pxor %xmm0,%xmm5 | ||
7943 | |||
7944 | # qhasm: xmm1 ^= xmm2 | ||
7945 | # asm 1: pxor <xmm2=int6464#3,<xmm1=int6464#2 | ||
7946 | # asm 2: pxor <xmm2=%xmm2,<xmm1=%xmm1 | ||
7947 | pxor %xmm2,%xmm1 | ||
7948 | |||
7949 | # qhasm: xmm3 ^= xmm5 | ||
7950 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
7951 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
7952 | pxor %xmm5,%xmm3 | ||
7953 | |||
7954 | # qhasm: xmm2 ^= xmm0 | ||
7955 | # asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3 | ||
7956 | # asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2 | ||
7957 | pxor %xmm0,%xmm2 | ||
7958 | |||
7959 | # qhasm: xmm0 ^= xmm1 | ||
7960 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
7961 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
7962 | pxor %xmm1,%xmm0 | ||
7963 | |||
7964 | # qhasm: xmm1 ^= xmm7 | ||
7965 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2 | ||
7966 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1 | ||
7967 | pxor %xmm7,%xmm1 | ||
7968 | |||
7969 | # qhasm: xmm7 ^= xmm4 | ||
7970 | # asm 1: pxor <xmm4=int6464#5,<xmm7=int6464#8 | ||
7971 | # asm 2: pxor <xmm4=%xmm4,<xmm7=%xmm7 | ||
7972 | pxor %xmm4,%xmm7 | ||
7973 | |||
7974 | # qhasm: xmm3 ^= xmm7 | ||
7975 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
7976 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
7977 | pxor %xmm7,%xmm3 | ||
7978 | |||
7979 | # qhasm: xmm4 ^= xmm6 | ||
7980 | # asm 1: pxor <xmm6=int6464#7,<xmm4=int6464#5 | ||
7981 | # asm 2: pxor <xmm6=%xmm6,<xmm4=%xmm4 | ||
7982 | pxor %xmm6,%xmm4 | ||
7983 | |||
7984 | # qhasm: xmm6 ^= xmm7 | ||
7985 | # asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7 | ||
7986 | # asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6 | ||
7987 | pxor %xmm7,%xmm6 | ||
7988 | |||
7989 | # qhasm: xmm2 ^= xmm6 | ||
7990 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
7991 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
7992 | pxor %xmm6,%xmm2 | ||
7993 | |||
7994 | # qhasm: xmm5 ^= RCON | ||
7995 | # asm 1: pxor RCON,<xmm5=int6464#6 | ||
7996 | # asm 2: pxor RCON,<xmm5=%xmm5 | ||
7997 | pxor RCON,%xmm5 | ||
7998 | |||
7999 | # qhasm: shuffle bytes of xmm0 by EXPB0 | ||
8000 | # asm 1: pshufb EXPB0,<xmm0=int6464#1 | ||
8001 | # asm 2: pshufb EXPB0,<xmm0=%xmm0 | ||
8002 | pshufb EXPB0,%xmm0 | ||
8003 | |||
8004 | # qhasm: shuffle bytes of xmm1 by EXPB0 | ||
8005 | # asm 1: pshufb EXPB0,<xmm1=int6464#2 | ||
8006 | # asm 2: pshufb EXPB0,<xmm1=%xmm1 | ||
8007 | pshufb EXPB0,%xmm1 | ||
8008 | |||
8009 | # qhasm: shuffle bytes of xmm3 by EXPB0 | ||
8010 | # asm 1: pshufb EXPB0,<xmm3=int6464#4 | ||
8011 | # asm 2: pshufb EXPB0,<xmm3=%xmm3 | ||
8012 | pshufb EXPB0,%xmm3 | ||
8013 | |||
8014 | # qhasm: shuffle bytes of xmm2 by EXPB0 | ||
8015 | # asm 1: pshufb EXPB0,<xmm2=int6464#3 | ||
8016 | # asm 2: pshufb EXPB0,<xmm2=%xmm2 | ||
8017 | pshufb EXPB0,%xmm2 | ||
8018 | |||
8019 | # qhasm: shuffle bytes of xmm6 by EXPB0 | ||
8020 | # asm 1: pshufb EXPB0,<xmm6=int6464#7 | ||
8021 | # asm 2: pshufb EXPB0,<xmm6=%xmm6 | ||
8022 | pshufb EXPB0,%xmm6 | ||
8023 | |||
8024 | # qhasm: shuffle bytes of xmm5 by EXPB0 | ||
8025 | # asm 1: pshufb EXPB0,<xmm5=int6464#6 | ||
8026 | # asm 2: pshufb EXPB0,<xmm5=%xmm5 | ||
8027 | pshufb EXPB0,%xmm5 | ||
8028 | |||
8029 | # qhasm: shuffle bytes of xmm4 by EXPB0 | ||
8030 | # asm 1: pshufb EXPB0,<xmm4=int6464#5 | ||
8031 | # asm 2: pshufb EXPB0,<xmm4=%xmm4 | ||
8032 | pshufb EXPB0,%xmm4 | ||
8033 | |||
8034 | # qhasm: shuffle bytes of xmm7 by EXPB0 | ||
8035 | # asm 1: pshufb EXPB0,<xmm7=int6464#8 | ||
8036 | # asm 2: pshufb EXPB0,<xmm7=%xmm7 | ||
8037 | pshufb EXPB0,%xmm7 | ||
8038 | |||
8039 | # qhasm: xmm8 = *(int128 *)(c + 640) | ||
8040 | # asm 1: movdqa 640(<c=int64#1),>xmm8=int6464#9 | ||
8041 | # asm 2: movdqa 640(<c=%rdi),>xmm8=%xmm8 | ||
8042 | movdqa 640(%rdi),%xmm8 | ||
8043 | |||
8044 | # qhasm: xmm9 = *(int128 *)(c + 656) | ||
8045 | # asm 1: movdqa 656(<c=int64#1),>xmm9=int6464#10 | ||
8046 | # asm 2: movdqa 656(<c=%rdi),>xmm9=%xmm9 | ||
8047 | movdqa 656(%rdi),%xmm9 | ||
8048 | |||
8049 | # qhasm: xmm10 = *(int128 *)(c + 672) | ||
8050 | # asm 1: movdqa 672(<c=int64#1),>xmm10=int6464#11 | ||
8051 | # asm 2: movdqa 672(<c=%rdi),>xmm10=%xmm10 | ||
8052 | movdqa 672(%rdi),%xmm10 | ||
8053 | |||
8054 | # qhasm: xmm11 = *(int128 *)(c + 688) | ||
8055 | # asm 1: movdqa 688(<c=int64#1),>xmm11=int6464#12 | ||
8056 | # asm 2: movdqa 688(<c=%rdi),>xmm11=%xmm11 | ||
8057 | movdqa 688(%rdi),%xmm11 | ||
8058 | |||
8059 | # qhasm: xmm12 = *(int128 *)(c + 704) | ||
8060 | # asm 1: movdqa 704(<c=int64#1),>xmm12=int6464#13 | ||
8061 | # asm 2: movdqa 704(<c=%rdi),>xmm12=%xmm12 | ||
8062 | movdqa 704(%rdi),%xmm12 | ||
8063 | |||
8064 | # qhasm: xmm13 = *(int128 *)(c + 720) | ||
8065 | # asm 1: movdqa 720(<c=int64#1),>xmm13=int6464#14 | ||
8066 | # asm 2: movdqa 720(<c=%rdi),>xmm13=%xmm13 | ||
8067 | movdqa 720(%rdi),%xmm13 | ||
8068 | |||
8069 | # qhasm: xmm14 = *(int128 *)(c + 736) | ||
8070 | # asm 1: movdqa 736(<c=int64#1),>xmm14=int6464#15 | ||
8071 | # asm 2: movdqa 736(<c=%rdi),>xmm14=%xmm14 | ||
8072 | movdqa 736(%rdi),%xmm14 | ||
8073 | |||
8074 | # qhasm: xmm15 = *(int128 *)(c + 752) | ||
8075 | # asm 1: movdqa 752(<c=int64#1),>xmm15=int6464#16 | ||
8076 | # asm 2: movdqa 752(<c=%rdi),>xmm15=%xmm15 | ||
8077 | movdqa 752(%rdi),%xmm15 | ||
8078 | |||
8079 | # qhasm: xmm8 ^= ONE | ||
8080 | # asm 1: pxor ONE,<xmm8=int6464#9 | ||
8081 | # asm 2: pxor ONE,<xmm8=%xmm8 | ||
8082 | pxor ONE,%xmm8 | ||
8083 | |||
8084 | # qhasm: xmm9 ^= ONE | ||
8085 | # asm 1: pxor ONE,<xmm9=int6464#10 | ||
8086 | # asm 2: pxor ONE,<xmm9=%xmm9 | ||
8087 | pxor ONE,%xmm9 | ||
8088 | |||
8089 | # qhasm: xmm13 ^= ONE | ||
8090 | # asm 1: pxor ONE,<xmm13=int6464#14 | ||
8091 | # asm 2: pxor ONE,<xmm13=%xmm13 | ||
8092 | pxor ONE,%xmm13 | ||
8093 | |||
8094 | # qhasm: xmm14 ^= ONE | ||
8095 | # asm 1: pxor ONE,<xmm14=int6464#15 | ||
8096 | # asm 2: pxor ONE,<xmm14=%xmm14 | ||
8097 | pxor ONE,%xmm14 | ||
8098 | |||
8099 | # qhasm: xmm0 ^= xmm8 | ||
8100 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
8101 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
8102 | pxor %xmm8,%xmm0 | ||
8103 | |||
8104 | # qhasm: xmm1 ^= xmm9 | ||
8105 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
8106 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
8107 | pxor %xmm9,%xmm1 | ||
8108 | |||
8109 | # qhasm: xmm3 ^= xmm10 | ||
8110 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
8111 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
8112 | pxor %xmm10,%xmm3 | ||
8113 | |||
8114 | # qhasm: xmm2 ^= xmm11 | ||
8115 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
8116 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
8117 | pxor %xmm11,%xmm2 | ||
8118 | |||
8119 | # qhasm: xmm6 ^= xmm12 | ||
8120 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
8121 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
8122 | pxor %xmm12,%xmm6 | ||
8123 | |||
8124 | # qhasm: xmm5 ^= xmm13 | ||
8125 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
8126 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
8127 | pxor %xmm13,%xmm5 | ||
8128 | |||
8129 | # qhasm: xmm4 ^= xmm14 | ||
8130 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
8131 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
8132 | pxor %xmm14,%xmm4 | ||
8133 | |||
8134 | # qhasm: xmm7 ^= xmm15 | ||
8135 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
8136 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
8137 | pxor %xmm15,%xmm7 | ||
8138 | |||
8139 | # qhasm: uint32323232 xmm8 >>= 8 | ||
8140 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
8141 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
8142 | psrld $8,%xmm8 | ||
8143 | |||
8144 | # qhasm: uint32323232 xmm9 >>= 8 | ||
8145 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
8146 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
8147 | psrld $8,%xmm9 | ||
8148 | |||
8149 | # qhasm: uint32323232 xmm10 >>= 8 | ||
8150 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
8151 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
8152 | psrld $8,%xmm10 | ||
8153 | |||
8154 | # qhasm: uint32323232 xmm11 >>= 8 | ||
8155 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
8156 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
8157 | psrld $8,%xmm11 | ||
8158 | |||
8159 | # qhasm: uint32323232 xmm12 >>= 8 | ||
8160 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
8161 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
8162 | psrld $8,%xmm12 | ||
8163 | |||
8164 | # qhasm: uint32323232 xmm13 >>= 8 | ||
8165 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
8166 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
8167 | psrld $8,%xmm13 | ||
8168 | |||
8169 | # qhasm: uint32323232 xmm14 >>= 8 | ||
8170 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
8171 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
8172 | psrld $8,%xmm14 | ||
8173 | |||
8174 | # qhasm: uint32323232 xmm15 >>= 8 | ||
8175 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
8176 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
8177 | psrld $8,%xmm15 | ||
8178 | |||
8179 | # qhasm: xmm0 ^= xmm8 | ||
8180 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
8181 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
8182 | pxor %xmm8,%xmm0 | ||
8183 | |||
8184 | # qhasm: xmm1 ^= xmm9 | ||
8185 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
8186 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
8187 | pxor %xmm9,%xmm1 | ||
8188 | |||
8189 | # qhasm: xmm3 ^= xmm10 | ||
8190 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
8191 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
8192 | pxor %xmm10,%xmm3 | ||
8193 | |||
8194 | # qhasm: xmm2 ^= xmm11 | ||
8195 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
8196 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
8197 | pxor %xmm11,%xmm2 | ||
8198 | |||
8199 | # qhasm: xmm6 ^= xmm12 | ||
8200 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
8201 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
8202 | pxor %xmm12,%xmm6 | ||
8203 | |||
8204 | # qhasm: xmm5 ^= xmm13 | ||
8205 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
8206 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
8207 | pxor %xmm13,%xmm5 | ||
8208 | |||
8209 | # qhasm: xmm4 ^= xmm14 | ||
8210 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
8211 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
8212 | pxor %xmm14,%xmm4 | ||
8213 | |||
8214 | # qhasm: xmm7 ^= xmm15 | ||
8215 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
8216 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
8217 | pxor %xmm15,%xmm7 | ||
8218 | |||
8219 | # qhasm: uint32323232 xmm8 >>= 8 | ||
8220 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
8221 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
8222 | psrld $8,%xmm8 | ||
8223 | |||
8224 | # qhasm: uint32323232 xmm9 >>= 8 | ||
8225 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
8226 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
8227 | psrld $8,%xmm9 | ||
8228 | |||
8229 | # qhasm: uint32323232 xmm10 >>= 8 | ||
8230 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
8231 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
8232 | psrld $8,%xmm10 | ||
8233 | |||
8234 | # qhasm: uint32323232 xmm11 >>= 8 | ||
8235 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
8236 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
8237 | psrld $8,%xmm11 | ||
8238 | |||
8239 | # qhasm: uint32323232 xmm12 >>= 8 | ||
8240 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
8241 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
8242 | psrld $8,%xmm12 | ||
8243 | |||
8244 | # qhasm: uint32323232 xmm13 >>= 8 | ||
8245 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
8246 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
8247 | psrld $8,%xmm13 | ||
8248 | |||
8249 | # qhasm: uint32323232 xmm14 >>= 8 | ||
8250 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
8251 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
8252 | psrld $8,%xmm14 | ||
8253 | |||
8254 | # qhasm: uint32323232 xmm15 >>= 8 | ||
8255 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
8256 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
8257 | psrld $8,%xmm15 | ||
8258 | |||
8259 | # qhasm: xmm0 ^= xmm8 | ||
8260 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
8261 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
8262 | pxor %xmm8,%xmm0 | ||
8263 | |||
8264 | # qhasm: xmm1 ^= xmm9 | ||
8265 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
8266 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
8267 | pxor %xmm9,%xmm1 | ||
8268 | |||
8269 | # qhasm: xmm3 ^= xmm10 | ||
8270 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
8271 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
8272 | pxor %xmm10,%xmm3 | ||
8273 | |||
8274 | # qhasm: xmm2 ^= xmm11 | ||
8275 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
8276 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
8277 | pxor %xmm11,%xmm2 | ||
8278 | |||
8279 | # qhasm: xmm6 ^= xmm12 | ||
8280 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
8281 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
8282 | pxor %xmm12,%xmm6 | ||
8283 | |||
8284 | # qhasm: xmm5 ^= xmm13 | ||
8285 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
8286 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
8287 | pxor %xmm13,%xmm5 | ||
8288 | |||
8289 | # qhasm: xmm4 ^= xmm14 | ||
8290 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
8291 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
8292 | pxor %xmm14,%xmm4 | ||
8293 | |||
8294 | # qhasm: xmm7 ^= xmm15 | ||
8295 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
8296 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
8297 | pxor %xmm15,%xmm7 | ||
8298 | |||
8299 | # qhasm: uint32323232 xmm8 >>= 8 | ||
8300 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
8301 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
8302 | psrld $8,%xmm8 | ||
8303 | |||
8304 | # qhasm: uint32323232 xmm9 >>= 8 | ||
8305 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
8306 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
8307 | psrld $8,%xmm9 | ||
8308 | |||
8309 | # qhasm: uint32323232 xmm10 >>= 8 | ||
8310 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
8311 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
8312 | psrld $8,%xmm10 | ||
8313 | |||
8314 | # qhasm: uint32323232 xmm11 >>= 8 | ||
8315 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
8316 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
8317 | psrld $8,%xmm11 | ||
8318 | |||
8319 | # qhasm: uint32323232 xmm12 >>= 8 | ||
8320 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
8321 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
8322 | psrld $8,%xmm12 | ||
8323 | |||
8324 | # qhasm: uint32323232 xmm13 >>= 8 | ||
8325 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
8326 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
8327 | psrld $8,%xmm13 | ||
8328 | |||
8329 | # qhasm: uint32323232 xmm14 >>= 8 | ||
8330 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
8331 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
8332 | psrld $8,%xmm14 | ||
8333 | |||
8334 | # qhasm: uint32323232 xmm15 >>= 8 | ||
8335 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
8336 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
8337 | psrld $8,%xmm15 | ||
8338 | |||
8339 | # qhasm: xmm0 ^= xmm8 | ||
8340 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
8341 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
8342 | pxor %xmm8,%xmm0 | ||
8343 | |||
8344 | # qhasm: xmm1 ^= xmm9 | ||
8345 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
8346 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
8347 | pxor %xmm9,%xmm1 | ||
8348 | |||
8349 | # qhasm: xmm3 ^= xmm10 | ||
8350 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
8351 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
8352 | pxor %xmm10,%xmm3 | ||
8353 | |||
8354 | # qhasm: xmm2 ^= xmm11 | ||
8355 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
8356 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
8357 | pxor %xmm11,%xmm2 | ||
8358 | |||
8359 | # qhasm: xmm6 ^= xmm12 | ||
8360 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
8361 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
8362 | pxor %xmm12,%xmm6 | ||
8363 | |||
8364 | # qhasm: xmm5 ^= xmm13 | ||
8365 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
8366 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
8367 | pxor %xmm13,%xmm5 | ||
8368 | |||
8369 | # qhasm: xmm4 ^= xmm14 | ||
8370 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
8371 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
8372 | pxor %xmm14,%xmm4 | ||
8373 | |||
8374 | # qhasm: xmm7 ^= xmm15 | ||
8375 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
8376 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
8377 | pxor %xmm15,%xmm7 | ||
8378 | |||
8379 | # qhasm: *(int128 *)(c + 768) = xmm0 | ||
8380 | # asm 1: movdqa <xmm0=int6464#1,768(<c=int64#1) | ||
8381 | # asm 2: movdqa <xmm0=%xmm0,768(<c=%rdi) | ||
8382 | movdqa %xmm0,768(%rdi) | ||
8383 | |||
8384 | # qhasm: *(int128 *)(c + 784) = xmm1 | ||
8385 | # asm 1: movdqa <xmm1=int6464#2,784(<c=int64#1) | ||
8386 | # asm 2: movdqa <xmm1=%xmm1,784(<c=%rdi) | ||
8387 | movdqa %xmm1,784(%rdi) | ||
8388 | |||
8389 | # qhasm: *(int128 *)(c + 800) = xmm3 | ||
8390 | # asm 1: movdqa <xmm3=int6464#4,800(<c=int64#1) | ||
8391 | # asm 2: movdqa <xmm3=%xmm3,800(<c=%rdi) | ||
8392 | movdqa %xmm3,800(%rdi) | ||
8393 | |||
8394 | # qhasm: *(int128 *)(c + 816) = xmm2 | ||
8395 | # asm 1: movdqa <xmm2=int6464#3,816(<c=int64#1) | ||
8396 | # asm 2: movdqa <xmm2=%xmm2,816(<c=%rdi) | ||
8397 | movdqa %xmm2,816(%rdi) | ||
8398 | |||
8399 | # qhasm: *(int128 *)(c + 832) = xmm6 | ||
8400 | # asm 1: movdqa <xmm6=int6464#7,832(<c=int64#1) | ||
8401 | # asm 2: movdqa <xmm6=%xmm6,832(<c=%rdi) | ||
8402 | movdqa %xmm6,832(%rdi) | ||
8403 | |||
8404 | # qhasm: *(int128 *)(c + 848) = xmm5 | ||
8405 | # asm 1: movdqa <xmm5=int6464#6,848(<c=int64#1) | ||
8406 | # asm 2: movdqa <xmm5=%xmm5,848(<c=%rdi) | ||
8407 | movdqa %xmm5,848(%rdi) | ||
8408 | |||
8409 | # qhasm: *(int128 *)(c + 864) = xmm4 | ||
8410 | # asm 1: movdqa <xmm4=int6464#5,864(<c=int64#1) | ||
8411 | # asm 2: movdqa <xmm4=%xmm4,864(<c=%rdi) | ||
8412 | movdqa %xmm4,864(%rdi) | ||
8413 | |||
8414 | # qhasm: *(int128 *)(c + 880) = xmm7 | ||
8415 | # asm 1: movdqa <xmm7=int6464#8,880(<c=int64#1) | ||
8416 | # asm 2: movdqa <xmm7=%xmm7,880(<c=%rdi) | ||
8417 | movdqa %xmm7,880(%rdi) | ||
8418 | |||
8419 | # qhasm: xmm0 ^= ONE | ||
8420 | # asm 1: pxor ONE,<xmm0=int6464#1 | ||
8421 | # asm 2: pxor ONE,<xmm0=%xmm0 | ||
8422 | pxor ONE,%xmm0 | ||
8423 | |||
8424 | # qhasm: xmm1 ^= ONE | ||
8425 | # asm 1: pxor ONE,<xmm1=int6464#2 | ||
8426 | # asm 2: pxor ONE,<xmm1=%xmm1 | ||
8427 | pxor ONE,%xmm1 | ||
8428 | |||
8429 | # qhasm: xmm5 ^= ONE | ||
8430 | # asm 1: pxor ONE,<xmm5=int6464#6 | ||
8431 | # asm 2: pxor ONE,<xmm5=%xmm5 | ||
8432 | pxor ONE,%xmm5 | ||
8433 | |||
8434 | # qhasm: xmm4 ^= ONE | ||
8435 | # asm 1: pxor ONE,<xmm4=int6464#5 | ||
8436 | # asm 2: pxor ONE,<xmm4=%xmm4 | ||
8437 | pxor ONE,%xmm4 | ||
8438 | |||
8439 | # qhasm: shuffle bytes of xmm0 by ROTB | ||
8440 | # asm 1: pshufb ROTB,<xmm0=int6464#1 | ||
8441 | # asm 2: pshufb ROTB,<xmm0=%xmm0 | ||
8442 | pshufb ROTB,%xmm0 | ||
8443 | |||
8444 | # qhasm: shuffle bytes of xmm1 by ROTB | ||
8445 | # asm 1: pshufb ROTB,<xmm1=int6464#2 | ||
8446 | # asm 2: pshufb ROTB,<xmm1=%xmm1 | ||
8447 | pshufb ROTB,%xmm1 | ||
8448 | |||
8449 | # qhasm: shuffle bytes of xmm3 by ROTB | ||
8450 | # asm 1: pshufb ROTB,<xmm3=int6464#4 | ||
8451 | # asm 2: pshufb ROTB,<xmm3=%xmm3 | ||
8452 | pshufb ROTB,%xmm3 | ||
8453 | |||
8454 | # qhasm: shuffle bytes of xmm2 by ROTB | ||
8455 | # asm 1: pshufb ROTB,<xmm2=int6464#3 | ||
8456 | # asm 2: pshufb ROTB,<xmm2=%xmm2 | ||
8457 | pshufb ROTB,%xmm2 | ||
8458 | |||
8459 | # qhasm: shuffle bytes of xmm6 by ROTB | ||
8460 | # asm 1: pshufb ROTB,<xmm6=int6464#7 | ||
8461 | # asm 2: pshufb ROTB,<xmm6=%xmm6 | ||
8462 | pshufb ROTB,%xmm6 | ||
8463 | |||
8464 | # qhasm: shuffle bytes of xmm5 by ROTB | ||
8465 | # asm 1: pshufb ROTB,<xmm5=int6464#6 | ||
8466 | # asm 2: pshufb ROTB,<xmm5=%xmm5 | ||
8467 | pshufb ROTB,%xmm5 | ||
8468 | |||
8469 | # qhasm: shuffle bytes of xmm4 by ROTB | ||
8470 | # asm 1: pshufb ROTB,<xmm4=int6464#5 | ||
8471 | # asm 2: pshufb ROTB,<xmm4=%xmm4 | ||
8472 | pshufb ROTB,%xmm4 | ||
8473 | |||
8474 | # qhasm: shuffle bytes of xmm7 by ROTB | ||
8475 | # asm 1: pshufb ROTB,<xmm7=int6464#8 | ||
8476 | # asm 2: pshufb ROTB,<xmm7=%xmm7 | ||
8477 | pshufb ROTB,%xmm7 | ||
8478 | |||
8479 | # qhasm: xmm5 ^= xmm4 | ||
8480 | # asm 1: pxor <xmm4=int6464#5,<xmm5=int6464#6 | ||
8481 | # asm 2: pxor <xmm4=%xmm4,<xmm5=%xmm5 | ||
8482 | pxor %xmm4,%xmm5 | ||
8483 | |||
8484 | # qhasm: xmm3 ^= xmm1 | ||
8485 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
8486 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
8487 | pxor %xmm1,%xmm3 | ||
8488 | |||
8489 | # qhasm: xmm5 ^= xmm0 | ||
8490 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
8491 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
8492 | pxor %xmm0,%xmm5 | ||
8493 | |||
8494 | # qhasm: xmm4 ^= xmm3 | ||
8495 | # asm 1: pxor <xmm3=int6464#4,<xmm4=int6464#5 | ||
8496 | # asm 2: pxor <xmm3=%xmm3,<xmm4=%xmm4 | ||
8497 | pxor %xmm3,%xmm4 | ||
8498 | |||
8499 | # qhasm: xmm2 ^= xmm0 | ||
8500 | # asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3 | ||
8501 | # asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2 | ||
8502 | pxor %xmm0,%xmm2 | ||
8503 | |||
8504 | # qhasm: xmm4 ^= xmm2 | ||
8505 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
8506 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
8507 | pxor %xmm2,%xmm4 | ||
8508 | |||
8509 | # qhasm: xmm2 ^= xmm7 | ||
8510 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
8511 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
8512 | pxor %xmm7,%xmm2 | ||
8513 | |||
8514 | # qhasm: xmm2 ^= xmm6 | ||
8515 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
8516 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
8517 | pxor %xmm6,%xmm2 | ||
8518 | |||
8519 | # qhasm: xmm7 ^= xmm5 | ||
8520 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
8521 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
8522 | pxor %xmm5,%xmm7 | ||
8523 | |||
8524 | # qhasm: xmm2 ^= xmm1 | ||
8525 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
8526 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
8527 | pxor %xmm1,%xmm2 | ||
8528 | |||
8529 | # qhasm: xmm6 ^= xmm5 | ||
8530 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
8531 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
8532 | pxor %xmm5,%xmm6 | ||
8533 | |||
8534 | # qhasm: xmm3 ^= xmm7 | ||
8535 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
8536 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
8537 | pxor %xmm7,%xmm3 | ||
8538 | |||
8539 | # qhasm: xmm1 ^= xmm5 | ||
8540 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
8541 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
8542 | pxor %xmm5,%xmm1 | ||
8543 | |||
8544 | # qhasm: xmm11 = xmm7 | ||
8545 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
8546 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
8547 | movdqa %xmm7,%xmm8 | ||
8548 | |||
8549 | # qhasm: xmm10 = xmm1 | ||
8550 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
8551 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
8552 | movdqa %xmm1,%xmm9 | ||
8553 | |||
8554 | # qhasm: xmm9 = xmm5 | ||
8555 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
8556 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
8557 | movdqa %xmm5,%xmm10 | ||
8558 | |||
8559 | # qhasm: xmm13 = xmm3 | ||
8560 | # asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12 | ||
8561 | # asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11 | ||
8562 | movdqa %xmm3,%xmm11 | ||
8563 | |||
8564 | # qhasm: xmm12 = xmm4 | ||
8565 | # asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#13 | ||
8566 | # asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm12 | ||
8567 | movdqa %xmm4,%xmm12 | ||
8568 | |||
8569 | # qhasm: xmm11 ^= xmm6 | ||
8570 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#9 | ||
8571 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm8 | ||
8572 | pxor %xmm6,%xmm8 | ||
8573 | |||
8574 | # qhasm: xmm10 ^= xmm3 | ||
8575 | # asm 1: pxor <xmm3=int6464#4,<xmm10=int6464#10 | ||
8576 | # asm 2: pxor <xmm3=%xmm3,<xmm10=%xmm9 | ||
8577 | pxor %xmm3,%xmm9 | ||
8578 | |||
8579 | # qhasm: xmm9 ^= xmm2 | ||
8580 | # asm 1: pxor <xmm2=int6464#3,<xmm9=int6464#11 | ||
8581 | # asm 2: pxor <xmm2=%xmm2,<xmm9=%xmm10 | ||
8582 | pxor %xmm2,%xmm10 | ||
8583 | |||
8584 | # qhasm: xmm13 ^= xmm6 | ||
8585 | # asm 1: pxor <xmm6=int6464#7,<xmm13=int6464#12 | ||
8586 | # asm 2: pxor <xmm6=%xmm6,<xmm13=%xmm11 | ||
8587 | pxor %xmm6,%xmm11 | ||
8588 | |||
8589 | # qhasm: xmm12 ^= xmm0 | ||
8590 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
8591 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
8592 | pxor %xmm0,%xmm12 | ||
8593 | |||
8594 | # qhasm: xmm14 = xmm11 | ||
8595 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
8596 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
8597 | movdqa %xmm8,%xmm13 | ||
8598 | |||
8599 | # qhasm: xmm8 = xmm10 | ||
8600 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
8601 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
8602 | movdqa %xmm9,%xmm14 | ||
8603 | |||
8604 | # qhasm: xmm15 = xmm11 | ||
8605 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
8606 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
8607 | movdqa %xmm8,%xmm15 | ||
8608 | |||
8609 | # qhasm: xmm10 |= xmm9 | ||
8610 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
8611 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
8612 | por %xmm10,%xmm9 | ||
8613 | |||
8614 | # qhasm: xmm11 |= xmm12 | ||
8615 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
8616 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
8617 | por %xmm12,%xmm8 | ||
8618 | |||
8619 | # qhasm: xmm15 ^= xmm8 | ||
8620 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
8621 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
8622 | pxor %xmm14,%xmm15 | ||
8623 | |||
8624 | # qhasm: xmm14 &= xmm12 | ||
8625 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
8626 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
8627 | pand %xmm12,%xmm13 | ||
8628 | |||
8629 | # qhasm: xmm8 &= xmm9 | ||
8630 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
8631 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
8632 | pand %xmm10,%xmm14 | ||
8633 | |||
8634 | # qhasm: xmm12 ^= xmm9 | ||
8635 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
8636 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
8637 | pxor %xmm10,%xmm12 | ||
8638 | |||
8639 | # qhasm: xmm15 &= xmm12 | ||
8640 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
8641 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
8642 | pand %xmm12,%xmm15 | ||
8643 | |||
8644 | # qhasm: xmm12 = xmm2 | ||
8645 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
8646 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
8647 | movdqa %xmm2,%xmm10 | ||
8648 | |||
8649 | # qhasm: xmm12 ^= xmm0 | ||
8650 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
8651 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
8652 | pxor %xmm0,%xmm10 | ||
8653 | |||
8654 | # qhasm: xmm13 &= xmm12 | ||
8655 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
8656 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
8657 | pand %xmm10,%xmm11 | ||
8658 | |||
8659 | # qhasm: xmm11 ^= xmm13 | ||
8660 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
8661 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
8662 | pxor %xmm11,%xmm8 | ||
8663 | |||
8664 | # qhasm: xmm10 ^= xmm13 | ||
8665 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
8666 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
8667 | pxor %xmm11,%xmm9 | ||
8668 | |||
8669 | # qhasm: xmm13 = xmm7 | ||
8670 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
8671 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
8672 | movdqa %xmm7,%xmm10 | ||
8673 | |||
8674 | # qhasm: xmm13 ^= xmm1 | ||
8675 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
8676 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
8677 | pxor %xmm1,%xmm10 | ||
8678 | |||
8679 | # qhasm: xmm12 = xmm5 | ||
8680 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
8681 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
8682 | movdqa %xmm5,%xmm11 | ||
8683 | |||
8684 | # qhasm: xmm9 = xmm13 | ||
8685 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
8686 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
8687 | movdqa %xmm10,%xmm12 | ||
8688 | |||
8689 | # qhasm: xmm12 ^= xmm4 | ||
8690 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#12 | ||
8691 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm11 | ||
8692 | pxor %xmm4,%xmm11 | ||
8693 | |||
8694 | # qhasm: xmm9 |= xmm12 | ||
8695 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
8696 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
8697 | por %xmm11,%xmm12 | ||
8698 | |||
8699 | # qhasm: xmm13 &= xmm12 | ||
8700 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
8701 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
8702 | pand %xmm11,%xmm10 | ||
8703 | |||
8704 | # qhasm: xmm8 ^= xmm13 | ||
8705 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
8706 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
8707 | pxor %xmm10,%xmm14 | ||
8708 | |||
8709 | # qhasm: xmm11 ^= xmm15 | ||
8710 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
8711 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
8712 | pxor %xmm15,%xmm8 | ||
8713 | |||
8714 | # qhasm: xmm10 ^= xmm14 | ||
8715 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
8716 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
8717 | pxor %xmm13,%xmm9 | ||
8718 | |||
8719 | # qhasm: xmm9 ^= xmm15 | ||
8720 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
8721 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
8722 | pxor %xmm15,%xmm12 | ||
8723 | |||
8724 | # qhasm: xmm8 ^= xmm14 | ||
8725 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
8726 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
8727 | pxor %xmm13,%xmm14 | ||
8728 | |||
8729 | # qhasm: xmm9 ^= xmm14 | ||
8730 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
8731 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
8732 | pxor %xmm13,%xmm12 | ||
8733 | |||
8734 | # qhasm: xmm12 = xmm3 | ||
8735 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
8736 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
8737 | movdqa %xmm3,%xmm10 | ||
8738 | |||
8739 | # qhasm: xmm13 = xmm6 | ||
8740 | # asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12 | ||
8741 | # asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11 | ||
8742 | movdqa %xmm6,%xmm11 | ||
8743 | |||
8744 | # qhasm: xmm14 = xmm1 | ||
8745 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
8746 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
8747 | movdqa %xmm1,%xmm13 | ||
8748 | |||
8749 | # qhasm: xmm15 = xmm7 | ||
8750 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
8751 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
8752 | movdqa %xmm7,%xmm15 | ||
8753 | |||
8754 | # qhasm: xmm12 &= xmm2 | ||
8755 | # asm 1: pand <xmm2=int6464#3,<xmm12=int6464#11 | ||
8756 | # asm 2: pand <xmm2=%xmm2,<xmm12=%xmm10 | ||
8757 | pand %xmm2,%xmm10 | ||
8758 | |||
8759 | # qhasm: xmm13 &= xmm0 | ||
8760 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
8761 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
8762 | pand %xmm0,%xmm11 | ||
8763 | |||
8764 | # qhasm: xmm14 &= xmm5 | ||
8765 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
8766 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
8767 | pand %xmm5,%xmm13 | ||
8768 | |||
8769 | # qhasm: xmm15 |= xmm4 | ||
8770 | # asm 1: por <xmm4=int6464#5,<xmm15=int6464#16 | ||
8771 | # asm 2: por <xmm4=%xmm4,<xmm15=%xmm15 | ||
8772 | por %xmm4,%xmm15 | ||
8773 | |||
8774 | # qhasm: xmm11 ^= xmm12 | ||
8775 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
8776 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
8777 | pxor %xmm10,%xmm8 | ||
8778 | |||
8779 | # qhasm: xmm10 ^= xmm13 | ||
8780 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
8781 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
8782 | pxor %xmm11,%xmm9 | ||
8783 | |||
8784 | # qhasm: xmm9 ^= xmm14 | ||
8785 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
8786 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
8787 | pxor %xmm13,%xmm12 | ||
8788 | |||
8789 | # qhasm: xmm8 ^= xmm15 | ||
8790 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
8791 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
8792 | pxor %xmm15,%xmm14 | ||
8793 | |||
8794 | # qhasm: xmm12 = xmm11 | ||
8795 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
8796 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
8797 | movdqa %xmm8,%xmm10 | ||
8798 | |||
8799 | # qhasm: xmm12 ^= xmm10 | ||
8800 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
8801 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
8802 | pxor %xmm9,%xmm10 | ||
8803 | |||
8804 | # qhasm: xmm11 &= xmm9 | ||
8805 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
8806 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
8807 | pand %xmm12,%xmm8 | ||
8808 | |||
8809 | # qhasm: xmm14 = xmm8 | ||
8810 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
8811 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
8812 | movdqa %xmm14,%xmm11 | ||
8813 | |||
8814 | # qhasm: xmm14 ^= xmm11 | ||
8815 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
8816 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
8817 | pxor %xmm8,%xmm11 | ||
8818 | |||
8819 | # qhasm: xmm15 = xmm12 | ||
8820 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
8821 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
8822 | movdqa %xmm10,%xmm13 | ||
8823 | |||
8824 | # qhasm: xmm15 &= xmm14 | ||
8825 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
8826 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
8827 | pand %xmm11,%xmm13 | ||
8828 | |||
8829 | # qhasm: xmm15 ^= xmm10 | ||
8830 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
8831 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
8832 | pxor %xmm9,%xmm13 | ||
8833 | |||
8834 | # qhasm: xmm13 = xmm9 | ||
8835 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
8836 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
8837 | movdqa %xmm12,%xmm15 | ||
8838 | |||
8839 | # qhasm: xmm13 ^= xmm8 | ||
8840 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
8841 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
8842 | pxor %xmm14,%xmm15 | ||
8843 | |||
8844 | # qhasm: xmm11 ^= xmm10 | ||
8845 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
8846 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
8847 | pxor %xmm9,%xmm8 | ||
8848 | |||
8849 | # qhasm: xmm13 &= xmm11 | ||
8850 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
8851 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
8852 | pand %xmm8,%xmm15 | ||
8853 | |||
8854 | # qhasm: xmm13 ^= xmm8 | ||
8855 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
8856 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
8857 | pxor %xmm14,%xmm15 | ||
8858 | |||
8859 | # qhasm: xmm9 ^= xmm13 | ||
8860 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
8861 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
8862 | pxor %xmm15,%xmm12 | ||
8863 | |||
8864 | # qhasm: xmm10 = xmm14 | ||
8865 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
8866 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
8867 | movdqa %xmm11,%xmm8 | ||
8868 | |||
8869 | # qhasm: xmm10 ^= xmm13 | ||
8870 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
8871 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
8872 | pxor %xmm15,%xmm8 | ||
8873 | |||
8874 | # qhasm: xmm10 &= xmm8 | ||
8875 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
8876 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
8877 | pand %xmm14,%xmm8 | ||
8878 | |||
8879 | # qhasm: xmm9 ^= xmm10 | ||
8880 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
8881 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
8882 | pxor %xmm8,%xmm12 | ||
8883 | |||
8884 | # qhasm: xmm14 ^= xmm10 | ||
8885 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
8886 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
8887 | pxor %xmm8,%xmm11 | ||
8888 | |||
8889 | # qhasm: xmm14 &= xmm15 | ||
8890 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
8891 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
8892 | pand %xmm13,%xmm11 | ||
8893 | |||
8894 | # qhasm: xmm14 ^= xmm12 | ||
8895 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
8896 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
8897 | pxor %xmm10,%xmm11 | ||
8898 | |||
8899 | # qhasm: xmm12 = xmm4 | ||
8900 | # asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#9 | ||
8901 | # asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm8 | ||
8902 | movdqa %xmm4,%xmm8 | ||
8903 | |||
8904 | # qhasm: xmm8 = xmm5 | ||
8905 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
8906 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
8907 | movdqa %xmm5,%xmm9 | ||
8908 | |||
8909 | # qhasm: xmm10 = xmm15 | ||
8910 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
8911 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
8912 | movdqa %xmm13,%xmm10 | ||
8913 | |||
8914 | # qhasm: xmm10 ^= xmm14 | ||
8915 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
8916 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
8917 | pxor %xmm11,%xmm10 | ||
8918 | |||
8919 | # qhasm: xmm10 &= xmm4 | ||
8920 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
8921 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
8922 | pand %xmm4,%xmm10 | ||
8923 | |||
8924 | # qhasm: xmm4 ^= xmm5 | ||
8925 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
8926 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
8927 | pxor %xmm5,%xmm4 | ||
8928 | |||
8929 | # qhasm: xmm4 &= xmm14 | ||
8930 | # asm 1: pand <xmm14=int6464#12,<xmm4=int6464#5 | ||
8931 | # asm 2: pand <xmm14=%xmm11,<xmm4=%xmm4 | ||
8932 | pand %xmm11,%xmm4 | ||
8933 | |||
8934 | # qhasm: xmm5 &= xmm15 | ||
8935 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
8936 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
8937 | pand %xmm13,%xmm5 | ||
8938 | |||
8939 | # qhasm: xmm4 ^= xmm5 | ||
8940 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
8941 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
8942 | pxor %xmm5,%xmm4 | ||
8943 | |||
8944 | # qhasm: xmm5 ^= xmm10 | ||
8945 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
8946 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
8947 | pxor %xmm10,%xmm5 | ||
8948 | |||
8949 | # qhasm: xmm12 ^= xmm0 | ||
8950 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
8951 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
8952 | pxor %xmm0,%xmm8 | ||
8953 | |||
8954 | # qhasm: xmm8 ^= xmm2 | ||
8955 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
8956 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
8957 | pxor %xmm2,%xmm9 | ||
8958 | |||
8959 | # qhasm: xmm15 ^= xmm13 | ||
8960 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
8961 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
8962 | pxor %xmm15,%xmm13 | ||
8963 | |||
8964 | # qhasm: xmm14 ^= xmm9 | ||
8965 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
8966 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
8967 | pxor %xmm12,%xmm11 | ||
8968 | |||
8969 | # qhasm: xmm11 = xmm15 | ||
8970 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
8971 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
8972 | movdqa %xmm13,%xmm10 | ||
8973 | |||
8974 | # qhasm: xmm11 ^= xmm14 | ||
8975 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
8976 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
8977 | pxor %xmm11,%xmm10 | ||
8978 | |||
8979 | # qhasm: xmm11 &= xmm12 | ||
8980 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
8981 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
8982 | pand %xmm8,%xmm10 | ||
8983 | |||
8984 | # qhasm: xmm12 ^= xmm8 | ||
8985 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
8986 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
8987 | pxor %xmm9,%xmm8 | ||
8988 | |||
8989 | # qhasm: xmm12 &= xmm14 | ||
8990 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
8991 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
8992 | pand %xmm11,%xmm8 | ||
8993 | |||
8994 | # qhasm: xmm8 &= xmm15 | ||
8995 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
8996 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
8997 | pand %xmm13,%xmm9 | ||
8998 | |||
8999 | # qhasm: xmm8 ^= xmm12 | ||
9000 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
9001 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
9002 | pxor %xmm8,%xmm9 | ||
9003 | |||
9004 | # qhasm: xmm12 ^= xmm11 | ||
9005 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
9006 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
9007 | pxor %xmm10,%xmm8 | ||
9008 | |||
9009 | # qhasm: xmm10 = xmm13 | ||
9010 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
9011 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
9012 | movdqa %xmm15,%xmm10 | ||
9013 | |||
9014 | # qhasm: xmm10 ^= xmm9 | ||
9015 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
9016 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
9017 | pxor %xmm12,%xmm10 | ||
9018 | |||
9019 | # qhasm: xmm10 &= xmm0 | ||
9020 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
9021 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
9022 | pand %xmm0,%xmm10 | ||
9023 | |||
9024 | # qhasm: xmm0 ^= xmm2 | ||
9025 | # asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1 | ||
9026 | # asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0 | ||
9027 | pxor %xmm2,%xmm0 | ||
9028 | |||
9029 | # qhasm: xmm0 &= xmm9 | ||
9030 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
9031 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
9032 | pand %xmm12,%xmm0 | ||
9033 | |||
9034 | # qhasm: xmm2 &= xmm13 | ||
9035 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
9036 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
9037 | pand %xmm15,%xmm2 | ||
9038 | |||
9039 | # qhasm: xmm0 ^= xmm2 | ||
9040 | # asm 1: pxor <xmm2=int6464#3,<xmm0=int6464#1 | ||
9041 | # asm 2: pxor <xmm2=%xmm2,<xmm0=%xmm0 | ||
9042 | pxor %xmm2,%xmm0 | ||
9043 | |||
9044 | # qhasm: xmm2 ^= xmm10 | ||
9045 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
9046 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
9047 | pxor %xmm10,%xmm2 | ||
9048 | |||
9049 | # qhasm: xmm4 ^= xmm12 | ||
9050 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
9051 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
9052 | pxor %xmm8,%xmm4 | ||
9053 | |||
9054 | # qhasm: xmm0 ^= xmm12 | ||
9055 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
9056 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
9057 | pxor %xmm8,%xmm0 | ||
9058 | |||
9059 | # qhasm: xmm5 ^= xmm8 | ||
9060 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
9061 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
9062 | pxor %xmm9,%xmm5 | ||
9063 | |||
9064 | # qhasm: xmm2 ^= xmm8 | ||
9065 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
9066 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
9067 | pxor %xmm9,%xmm2 | ||
9068 | |||
9069 | # qhasm: xmm12 = xmm7 | ||
9070 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
9071 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
9072 | movdqa %xmm7,%xmm8 | ||
9073 | |||
9074 | # qhasm: xmm8 = xmm1 | ||
9075 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
9076 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
9077 | movdqa %xmm1,%xmm9 | ||
9078 | |||
9079 | # qhasm: xmm12 ^= xmm6 | ||
9080 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#9 | ||
9081 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm8 | ||
9082 | pxor %xmm6,%xmm8 | ||
9083 | |||
9084 | # qhasm: xmm8 ^= xmm3 | ||
9085 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
9086 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
9087 | pxor %xmm3,%xmm9 | ||
9088 | |||
9089 | # qhasm: xmm11 = xmm15 | ||
9090 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
9091 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
9092 | movdqa %xmm13,%xmm10 | ||
9093 | |||
9094 | # qhasm: xmm11 ^= xmm14 | ||
9095 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
9096 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
9097 | pxor %xmm11,%xmm10 | ||
9098 | |||
9099 | # qhasm: xmm11 &= xmm12 | ||
9100 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
9101 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
9102 | pand %xmm8,%xmm10 | ||
9103 | |||
9104 | # qhasm: xmm12 ^= xmm8 | ||
9105 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
9106 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
9107 | pxor %xmm9,%xmm8 | ||
9108 | |||
9109 | # qhasm: xmm12 &= xmm14 | ||
9110 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
9111 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
9112 | pand %xmm11,%xmm8 | ||
9113 | |||
9114 | # qhasm: xmm8 &= xmm15 | ||
9115 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
9116 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
9117 | pand %xmm13,%xmm9 | ||
9118 | |||
9119 | # qhasm: xmm8 ^= xmm12 | ||
9120 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
9121 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
9122 | pxor %xmm8,%xmm9 | ||
9123 | |||
9124 | # qhasm: xmm12 ^= xmm11 | ||
9125 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
9126 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
9127 | pxor %xmm10,%xmm8 | ||
9128 | |||
9129 | # qhasm: xmm10 = xmm13 | ||
9130 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
9131 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
9132 | movdqa %xmm15,%xmm10 | ||
9133 | |||
9134 | # qhasm: xmm10 ^= xmm9 | ||
9135 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
9136 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
9137 | pxor %xmm12,%xmm10 | ||
9138 | |||
9139 | # qhasm: xmm10 &= xmm6 | ||
9140 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
9141 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
9142 | pand %xmm6,%xmm10 | ||
9143 | |||
9144 | # qhasm: xmm6 ^= xmm3 | ||
9145 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
9146 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
9147 | pxor %xmm3,%xmm6 | ||
9148 | |||
9149 | # qhasm: xmm6 &= xmm9 | ||
9150 | # asm 1: pand <xmm9=int6464#13,<xmm6=int6464#7 | ||
9151 | # asm 2: pand <xmm9=%xmm12,<xmm6=%xmm6 | ||
9152 | pand %xmm12,%xmm6 | ||
9153 | |||
9154 | # qhasm: xmm3 &= xmm13 | ||
9155 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
9156 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
9157 | pand %xmm15,%xmm3 | ||
9158 | |||
9159 | # qhasm: xmm6 ^= xmm3 | ||
9160 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
9161 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
9162 | pxor %xmm3,%xmm6 | ||
9163 | |||
9164 | # qhasm: xmm3 ^= xmm10 | ||
9165 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
9166 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
9167 | pxor %xmm10,%xmm3 | ||
9168 | |||
9169 | # qhasm: xmm15 ^= xmm13 | ||
9170 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
9171 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
9172 | pxor %xmm15,%xmm13 | ||
9173 | |||
9174 | # qhasm: xmm14 ^= xmm9 | ||
9175 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
9176 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
9177 | pxor %xmm12,%xmm11 | ||
9178 | |||
9179 | # qhasm: xmm11 = xmm15 | ||
9180 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
9181 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
9182 | movdqa %xmm13,%xmm10 | ||
9183 | |||
9184 | # qhasm: xmm11 ^= xmm14 | ||
9185 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
9186 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
9187 | pxor %xmm11,%xmm10 | ||
9188 | |||
9189 | # qhasm: xmm11 &= xmm7 | ||
9190 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
9191 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
9192 | pand %xmm7,%xmm10 | ||
9193 | |||
9194 | # qhasm: xmm7 ^= xmm1 | ||
9195 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
9196 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
9197 | pxor %xmm1,%xmm7 | ||
9198 | |||
9199 | # qhasm: xmm7 &= xmm14 | ||
9200 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
9201 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
9202 | pand %xmm11,%xmm7 | ||
9203 | |||
9204 | # qhasm: xmm1 &= xmm15 | ||
9205 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
9206 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
9207 | pand %xmm13,%xmm1 | ||
9208 | |||
9209 | # qhasm: xmm7 ^= xmm1 | ||
9210 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
9211 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
9212 | pxor %xmm1,%xmm7 | ||
9213 | |||
9214 | # qhasm: xmm1 ^= xmm11 | ||
9215 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
9216 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
9217 | pxor %xmm10,%xmm1 | ||
9218 | |||
9219 | # qhasm: xmm7 ^= xmm12 | ||
9220 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
9221 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
9222 | pxor %xmm8,%xmm7 | ||
9223 | |||
9224 | # qhasm: xmm6 ^= xmm12 | ||
9225 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
9226 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
9227 | pxor %xmm8,%xmm6 | ||
9228 | |||
9229 | # qhasm: xmm1 ^= xmm8 | ||
9230 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
9231 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
9232 | pxor %xmm9,%xmm1 | ||
9233 | |||
9234 | # qhasm: xmm3 ^= xmm8 | ||
9235 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
9236 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
9237 | pxor %xmm9,%xmm3 | ||
9238 | |||
9239 | # qhasm: xmm7 ^= xmm0 | ||
9240 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
9241 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
9242 | pxor %xmm0,%xmm7 | ||
9243 | |||
9244 | # qhasm: xmm1 ^= xmm4 | ||
9245 | # asm 1: pxor <xmm4=int6464#5,<xmm1=int6464#2 | ||
9246 | # asm 2: pxor <xmm4=%xmm4,<xmm1=%xmm1 | ||
9247 | pxor %xmm4,%xmm1 | ||
9248 | |||
9249 | # qhasm: xmm6 ^= xmm7 | ||
9250 | # asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7 | ||
9251 | # asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6 | ||
9252 | pxor %xmm7,%xmm6 | ||
9253 | |||
9254 | # qhasm: xmm4 ^= xmm0 | ||
9255 | # asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5 | ||
9256 | # asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4 | ||
9257 | pxor %xmm0,%xmm4 | ||
9258 | |||
9259 | # qhasm: xmm0 ^= xmm1 | ||
9260 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
9261 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
9262 | pxor %xmm1,%xmm0 | ||
9263 | |||
9264 | # qhasm: xmm1 ^= xmm5 | ||
9265 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
9266 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
9267 | pxor %xmm5,%xmm1 | ||
9268 | |||
9269 | # qhasm: xmm5 ^= xmm3 | ||
9270 | # asm 1: pxor <xmm3=int6464#4,<xmm5=int6464#6 | ||
9271 | # asm 2: pxor <xmm3=%xmm3,<xmm5=%xmm5 | ||
9272 | pxor %xmm3,%xmm5 | ||
9273 | |||
9274 | # qhasm: xmm6 ^= xmm5 | ||
9275 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
9276 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
9277 | pxor %xmm5,%xmm6 | ||
9278 | |||
9279 | # qhasm: xmm3 ^= xmm2 | ||
9280 | # asm 1: pxor <xmm2=int6464#3,<xmm3=int6464#4 | ||
9281 | # asm 2: pxor <xmm2=%xmm2,<xmm3=%xmm3 | ||
9282 | pxor %xmm2,%xmm3 | ||
9283 | |||
9284 | # qhasm: xmm2 ^= xmm5 | ||
9285 | # asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3 | ||
9286 | # asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2 | ||
9287 | pxor %xmm5,%xmm2 | ||
9288 | |||
9289 | # qhasm: xmm4 ^= xmm2 | ||
9290 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
9291 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
9292 | pxor %xmm2,%xmm4 | ||
9293 | |||
9294 | # qhasm: xmm3 ^= RCON | ||
9295 | # asm 1: pxor RCON,<xmm3=int6464#4 | ||
9296 | # asm 2: pxor RCON,<xmm3=%xmm3 | ||
9297 | pxor RCON,%xmm3 | ||
9298 | |||
9299 | # qhasm: shuffle bytes of xmm0 by EXPB0 | ||
9300 | # asm 1: pshufb EXPB0,<xmm0=int6464#1 | ||
9301 | # asm 2: pshufb EXPB0,<xmm0=%xmm0 | ||
9302 | pshufb EXPB0,%xmm0 | ||
9303 | |||
9304 | # qhasm: shuffle bytes of xmm1 by EXPB0 | ||
9305 | # asm 1: pshufb EXPB0,<xmm1=int6464#2 | ||
9306 | # asm 2: pshufb EXPB0,<xmm1=%xmm1 | ||
9307 | pshufb EXPB0,%xmm1 | ||
9308 | |||
9309 | # qhasm: shuffle bytes of xmm6 by EXPB0 | ||
9310 | # asm 1: pshufb EXPB0,<xmm6=int6464#7 | ||
9311 | # asm 2: pshufb EXPB0,<xmm6=%xmm6 | ||
9312 | pshufb EXPB0,%xmm6 | ||
9313 | |||
9314 | # qhasm: shuffle bytes of xmm4 by EXPB0 | ||
9315 | # asm 1: pshufb EXPB0,<xmm4=int6464#5 | ||
9316 | # asm 2: pshufb EXPB0,<xmm4=%xmm4 | ||
9317 | pshufb EXPB0,%xmm4 | ||
9318 | |||
9319 | # qhasm: shuffle bytes of xmm2 by EXPB0 | ||
9320 | # asm 1: pshufb EXPB0,<xmm2=int6464#3 | ||
9321 | # asm 2: pshufb EXPB0,<xmm2=%xmm2 | ||
9322 | pshufb EXPB0,%xmm2 | ||
9323 | |||
9324 | # qhasm: shuffle bytes of xmm7 by EXPB0 | ||
9325 | # asm 1: pshufb EXPB0,<xmm7=int6464#8 | ||
9326 | # asm 2: pshufb EXPB0,<xmm7=%xmm7 | ||
9327 | pshufb EXPB0,%xmm7 | ||
9328 | |||
9329 | # qhasm: shuffle bytes of xmm3 by EXPB0 | ||
9330 | # asm 1: pshufb EXPB0,<xmm3=int6464#4 | ||
9331 | # asm 2: pshufb EXPB0,<xmm3=%xmm3 | ||
9332 | pshufb EXPB0,%xmm3 | ||
9333 | |||
9334 | # qhasm: shuffle bytes of xmm5 by EXPB0 | ||
9335 | # asm 1: pshufb EXPB0,<xmm5=int6464#6 | ||
9336 | # asm 2: pshufb EXPB0,<xmm5=%xmm5 | ||
9337 | pshufb EXPB0,%xmm5 | ||
9338 | |||
9339 | # qhasm: xmm8 = *(int128 *)(c + 768) | ||
9340 | # asm 1: movdqa 768(<c=int64#1),>xmm8=int6464#9 | ||
9341 | # asm 2: movdqa 768(<c=%rdi),>xmm8=%xmm8 | ||
9342 | movdqa 768(%rdi),%xmm8 | ||
9343 | |||
9344 | # qhasm: xmm9 = *(int128 *)(c + 784) | ||
9345 | # asm 1: movdqa 784(<c=int64#1),>xmm9=int6464#10 | ||
9346 | # asm 2: movdqa 784(<c=%rdi),>xmm9=%xmm9 | ||
9347 | movdqa 784(%rdi),%xmm9 | ||
9348 | |||
9349 | # qhasm: xmm10 = *(int128 *)(c + 800) | ||
9350 | # asm 1: movdqa 800(<c=int64#1),>xmm10=int6464#11 | ||
9351 | # asm 2: movdqa 800(<c=%rdi),>xmm10=%xmm10 | ||
9352 | movdqa 800(%rdi),%xmm10 | ||
9353 | |||
9354 | # qhasm: xmm11 = *(int128 *)(c + 816) | ||
9355 | # asm 1: movdqa 816(<c=int64#1),>xmm11=int6464#12 | ||
9356 | # asm 2: movdqa 816(<c=%rdi),>xmm11=%xmm11 | ||
9357 | movdqa 816(%rdi),%xmm11 | ||
9358 | |||
9359 | # qhasm: xmm12 = *(int128 *)(c + 832) | ||
9360 | # asm 1: movdqa 832(<c=int64#1),>xmm12=int6464#13 | ||
9361 | # asm 2: movdqa 832(<c=%rdi),>xmm12=%xmm12 | ||
9362 | movdqa 832(%rdi),%xmm12 | ||
9363 | |||
9364 | # qhasm: xmm13 = *(int128 *)(c + 848) | ||
9365 | # asm 1: movdqa 848(<c=int64#1),>xmm13=int6464#14 | ||
9366 | # asm 2: movdqa 848(<c=%rdi),>xmm13=%xmm13 | ||
9367 | movdqa 848(%rdi),%xmm13 | ||
9368 | |||
9369 | # qhasm: xmm14 = *(int128 *)(c + 864) | ||
9370 | # asm 1: movdqa 864(<c=int64#1),>xmm14=int6464#15 | ||
9371 | # asm 2: movdqa 864(<c=%rdi),>xmm14=%xmm14 | ||
9372 | movdqa 864(%rdi),%xmm14 | ||
9373 | |||
9374 | # qhasm: xmm15 = *(int128 *)(c + 880) | ||
9375 | # asm 1: movdqa 880(<c=int64#1),>xmm15=int6464#16 | ||
9376 | # asm 2: movdqa 880(<c=%rdi),>xmm15=%xmm15 | ||
9377 | movdqa 880(%rdi),%xmm15 | ||
9378 | |||
9379 | # qhasm: xmm8 ^= ONE | ||
9380 | # asm 1: pxor ONE,<xmm8=int6464#9 | ||
9381 | # asm 2: pxor ONE,<xmm8=%xmm8 | ||
9382 | pxor ONE,%xmm8 | ||
9383 | |||
9384 | # qhasm: xmm9 ^= ONE | ||
9385 | # asm 1: pxor ONE,<xmm9=int6464#10 | ||
9386 | # asm 2: pxor ONE,<xmm9=%xmm9 | ||
9387 | pxor ONE,%xmm9 | ||
9388 | |||
9389 | # qhasm: xmm13 ^= ONE | ||
9390 | # asm 1: pxor ONE,<xmm13=int6464#14 | ||
9391 | # asm 2: pxor ONE,<xmm13=%xmm13 | ||
9392 | pxor ONE,%xmm13 | ||
9393 | |||
9394 | # qhasm: xmm14 ^= ONE | ||
9395 | # asm 1: pxor ONE,<xmm14=int6464#15 | ||
9396 | # asm 2: pxor ONE,<xmm14=%xmm14 | ||
9397 | pxor ONE,%xmm14 | ||
9398 | |||
9399 | # qhasm: xmm0 ^= xmm8 | ||
9400 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
9401 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
9402 | pxor %xmm8,%xmm0 | ||
9403 | |||
9404 | # qhasm: xmm1 ^= xmm9 | ||
9405 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
9406 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
9407 | pxor %xmm9,%xmm1 | ||
9408 | |||
9409 | # qhasm: xmm6 ^= xmm10 | ||
9410 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
9411 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
9412 | pxor %xmm10,%xmm6 | ||
9413 | |||
9414 | # qhasm: xmm4 ^= xmm11 | ||
9415 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
9416 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
9417 | pxor %xmm11,%xmm4 | ||
9418 | |||
9419 | # qhasm: xmm2 ^= xmm12 | ||
9420 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
9421 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
9422 | pxor %xmm12,%xmm2 | ||
9423 | |||
9424 | # qhasm: xmm7 ^= xmm13 | ||
9425 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
9426 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
9427 | pxor %xmm13,%xmm7 | ||
9428 | |||
9429 | # qhasm: xmm3 ^= xmm14 | ||
9430 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
9431 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
9432 | pxor %xmm14,%xmm3 | ||
9433 | |||
9434 | # qhasm: xmm5 ^= xmm15 | ||
9435 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
9436 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
9437 | pxor %xmm15,%xmm5 | ||
9438 | |||
9439 | # qhasm: uint32323232 xmm8 >>= 8 | ||
9440 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
9441 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
9442 | psrld $8,%xmm8 | ||
9443 | |||
9444 | # qhasm: uint32323232 xmm9 >>= 8 | ||
9445 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
9446 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
9447 | psrld $8,%xmm9 | ||
9448 | |||
9449 | # qhasm: uint32323232 xmm10 >>= 8 | ||
9450 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
9451 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
9452 | psrld $8,%xmm10 | ||
9453 | |||
9454 | # qhasm: uint32323232 xmm11 >>= 8 | ||
9455 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
9456 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
9457 | psrld $8,%xmm11 | ||
9458 | |||
9459 | # qhasm: uint32323232 xmm12 >>= 8 | ||
9460 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
9461 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
9462 | psrld $8,%xmm12 | ||
9463 | |||
9464 | # qhasm: uint32323232 xmm13 >>= 8 | ||
9465 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
9466 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
9467 | psrld $8,%xmm13 | ||
9468 | |||
9469 | # qhasm: uint32323232 xmm14 >>= 8 | ||
9470 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
9471 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
9472 | psrld $8,%xmm14 | ||
9473 | |||
9474 | # qhasm: uint32323232 xmm15 >>= 8 | ||
9475 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
9476 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
9477 | psrld $8,%xmm15 | ||
9478 | |||
9479 | # qhasm: xmm0 ^= xmm8 | ||
9480 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
9481 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
9482 | pxor %xmm8,%xmm0 | ||
9483 | |||
9484 | # qhasm: xmm1 ^= xmm9 | ||
9485 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
9486 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
9487 | pxor %xmm9,%xmm1 | ||
9488 | |||
9489 | # qhasm: xmm6 ^= xmm10 | ||
9490 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
9491 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
9492 | pxor %xmm10,%xmm6 | ||
9493 | |||
9494 | # qhasm: xmm4 ^= xmm11 | ||
9495 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
9496 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
9497 | pxor %xmm11,%xmm4 | ||
9498 | |||
9499 | # qhasm: xmm2 ^= xmm12 | ||
9500 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
9501 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
9502 | pxor %xmm12,%xmm2 | ||
9503 | |||
9504 | # qhasm: xmm7 ^= xmm13 | ||
9505 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
9506 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
9507 | pxor %xmm13,%xmm7 | ||
9508 | |||
9509 | # qhasm: xmm3 ^= xmm14 | ||
9510 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
9511 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
9512 | pxor %xmm14,%xmm3 | ||
9513 | |||
9514 | # qhasm: xmm5 ^= xmm15 | ||
9515 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
9516 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
9517 | pxor %xmm15,%xmm5 | ||
9518 | |||
9519 | # qhasm: uint32323232 xmm8 >>= 8 | ||
9520 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
9521 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
9522 | psrld $8,%xmm8 | ||
9523 | |||
9524 | # qhasm: uint32323232 xmm9 >>= 8 | ||
9525 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
9526 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
9527 | psrld $8,%xmm9 | ||
9528 | |||
9529 | # qhasm: uint32323232 xmm10 >>= 8 | ||
9530 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
9531 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
9532 | psrld $8,%xmm10 | ||
9533 | |||
9534 | # qhasm: uint32323232 xmm11 >>= 8 | ||
9535 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
9536 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
9537 | psrld $8,%xmm11 | ||
9538 | |||
9539 | # qhasm: uint32323232 xmm12 >>= 8 | ||
9540 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
9541 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
9542 | psrld $8,%xmm12 | ||
9543 | |||
9544 | # qhasm: uint32323232 xmm13 >>= 8 | ||
9545 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
9546 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
9547 | psrld $8,%xmm13 | ||
9548 | |||
9549 | # qhasm: uint32323232 xmm14 >>= 8 | ||
9550 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
9551 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
9552 | psrld $8,%xmm14 | ||
9553 | |||
9554 | # qhasm: uint32323232 xmm15 >>= 8 | ||
9555 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
9556 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
9557 | psrld $8,%xmm15 | ||
9558 | |||
9559 | # qhasm: xmm0 ^= xmm8 | ||
9560 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
9561 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
9562 | pxor %xmm8,%xmm0 | ||
9563 | |||
9564 | # qhasm: xmm1 ^= xmm9 | ||
9565 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
9566 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
9567 | pxor %xmm9,%xmm1 | ||
9568 | |||
9569 | # qhasm: xmm6 ^= xmm10 | ||
9570 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
9571 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
9572 | pxor %xmm10,%xmm6 | ||
9573 | |||
9574 | # qhasm: xmm4 ^= xmm11 | ||
9575 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
9576 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
9577 | pxor %xmm11,%xmm4 | ||
9578 | |||
9579 | # qhasm: xmm2 ^= xmm12 | ||
9580 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
9581 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
9582 | pxor %xmm12,%xmm2 | ||
9583 | |||
9584 | # qhasm: xmm7 ^= xmm13 | ||
9585 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
9586 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
9587 | pxor %xmm13,%xmm7 | ||
9588 | |||
9589 | # qhasm: xmm3 ^= xmm14 | ||
9590 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
9591 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
9592 | pxor %xmm14,%xmm3 | ||
9593 | |||
9594 | # qhasm: xmm5 ^= xmm15 | ||
9595 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
9596 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
9597 | pxor %xmm15,%xmm5 | ||
9598 | |||
9599 | # qhasm: uint32323232 xmm8 >>= 8 | ||
9600 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
9601 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
9602 | psrld $8,%xmm8 | ||
9603 | |||
9604 | # qhasm: uint32323232 xmm9 >>= 8 | ||
9605 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
9606 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
9607 | psrld $8,%xmm9 | ||
9608 | |||
9609 | # qhasm: uint32323232 xmm10 >>= 8 | ||
9610 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
9611 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
9612 | psrld $8,%xmm10 | ||
9613 | |||
9614 | # qhasm: uint32323232 xmm11 >>= 8 | ||
9615 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
9616 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
9617 | psrld $8,%xmm11 | ||
9618 | |||
9619 | # qhasm: uint32323232 xmm12 >>= 8 | ||
9620 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
9621 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
9622 | psrld $8,%xmm12 | ||
9623 | |||
9624 | # qhasm: uint32323232 xmm13 >>= 8 | ||
9625 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
9626 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
9627 | psrld $8,%xmm13 | ||
9628 | |||
9629 | # qhasm: uint32323232 xmm14 >>= 8 | ||
9630 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
9631 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
9632 | psrld $8,%xmm14 | ||
9633 | |||
9634 | # qhasm: uint32323232 xmm15 >>= 8 | ||
9635 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
9636 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
9637 | psrld $8,%xmm15 | ||
9638 | |||
9639 | # qhasm: xmm0 ^= xmm8 | ||
9640 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
9641 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
9642 | pxor %xmm8,%xmm0 | ||
9643 | |||
9644 | # qhasm: xmm1 ^= xmm9 | ||
9645 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
9646 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
9647 | pxor %xmm9,%xmm1 | ||
9648 | |||
9649 | # qhasm: xmm6 ^= xmm10 | ||
9650 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
9651 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
9652 | pxor %xmm10,%xmm6 | ||
9653 | |||
9654 | # qhasm: xmm4 ^= xmm11 | ||
9655 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
9656 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
9657 | pxor %xmm11,%xmm4 | ||
9658 | |||
9659 | # qhasm: xmm2 ^= xmm12 | ||
9660 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
9661 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
9662 | pxor %xmm12,%xmm2 | ||
9663 | |||
9664 | # qhasm: xmm7 ^= xmm13 | ||
9665 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
9666 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
9667 | pxor %xmm13,%xmm7 | ||
9668 | |||
9669 | # qhasm: xmm3 ^= xmm14 | ||
9670 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
9671 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
9672 | pxor %xmm14,%xmm3 | ||
9673 | |||
9674 | # qhasm: xmm5 ^= xmm15 | ||
9675 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
9676 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
9677 | pxor %xmm15,%xmm5 | ||
9678 | |||
9679 | # qhasm: *(int128 *)(c + 896) = xmm0 | ||
9680 | # asm 1: movdqa <xmm0=int6464#1,896(<c=int64#1) | ||
9681 | # asm 2: movdqa <xmm0=%xmm0,896(<c=%rdi) | ||
9682 | movdqa %xmm0,896(%rdi) | ||
9683 | |||
9684 | # qhasm: *(int128 *)(c + 912) = xmm1 | ||
9685 | # asm 1: movdqa <xmm1=int6464#2,912(<c=int64#1) | ||
9686 | # asm 2: movdqa <xmm1=%xmm1,912(<c=%rdi) | ||
9687 | movdqa %xmm1,912(%rdi) | ||
9688 | |||
9689 | # qhasm: *(int128 *)(c + 928) = xmm6 | ||
9690 | # asm 1: movdqa <xmm6=int6464#7,928(<c=int64#1) | ||
9691 | # asm 2: movdqa <xmm6=%xmm6,928(<c=%rdi) | ||
9692 | movdqa %xmm6,928(%rdi) | ||
9693 | |||
9694 | # qhasm: *(int128 *)(c + 944) = xmm4 | ||
9695 | # asm 1: movdqa <xmm4=int6464#5,944(<c=int64#1) | ||
9696 | # asm 2: movdqa <xmm4=%xmm4,944(<c=%rdi) | ||
9697 | movdqa %xmm4,944(%rdi) | ||
9698 | |||
9699 | # qhasm: *(int128 *)(c + 960) = xmm2 | ||
9700 | # asm 1: movdqa <xmm2=int6464#3,960(<c=int64#1) | ||
9701 | # asm 2: movdqa <xmm2=%xmm2,960(<c=%rdi) | ||
9702 | movdqa %xmm2,960(%rdi) | ||
9703 | |||
9704 | # qhasm: *(int128 *)(c + 976) = xmm7 | ||
9705 | # asm 1: movdqa <xmm7=int6464#8,976(<c=int64#1) | ||
9706 | # asm 2: movdqa <xmm7=%xmm7,976(<c=%rdi) | ||
9707 | movdqa %xmm7,976(%rdi) | ||
9708 | |||
9709 | # qhasm: *(int128 *)(c + 992) = xmm3 | ||
9710 | # asm 1: movdqa <xmm3=int6464#4,992(<c=int64#1) | ||
9711 | # asm 2: movdqa <xmm3=%xmm3,992(<c=%rdi) | ||
9712 | movdqa %xmm3,992(%rdi) | ||
9713 | |||
9714 | # qhasm: *(int128 *)(c + 1008) = xmm5 | ||
9715 | # asm 1: movdqa <xmm5=int6464#6,1008(<c=int64#1) | ||
9716 | # asm 2: movdqa <xmm5=%xmm5,1008(<c=%rdi) | ||
9717 | movdqa %xmm5,1008(%rdi) | ||
9718 | |||
9719 | # qhasm: xmm0 ^= ONE | ||
9720 | # asm 1: pxor ONE,<xmm0=int6464#1 | ||
9721 | # asm 2: pxor ONE,<xmm0=%xmm0 | ||
9722 | pxor ONE,%xmm0 | ||
9723 | |||
9724 | # qhasm: xmm1 ^= ONE | ||
9725 | # asm 1: pxor ONE,<xmm1=int6464#2 | ||
9726 | # asm 2: pxor ONE,<xmm1=%xmm1 | ||
9727 | pxor ONE,%xmm1 | ||
9728 | |||
9729 | # qhasm: xmm7 ^= ONE | ||
9730 | # asm 1: pxor ONE,<xmm7=int6464#8 | ||
9731 | # asm 2: pxor ONE,<xmm7=%xmm7 | ||
9732 | pxor ONE,%xmm7 | ||
9733 | |||
9734 | # qhasm: xmm3 ^= ONE | ||
9735 | # asm 1: pxor ONE,<xmm3=int6464#4 | ||
9736 | # asm 2: pxor ONE,<xmm3=%xmm3 | ||
9737 | pxor ONE,%xmm3 | ||
9738 | |||
9739 | # qhasm: shuffle bytes of xmm0 by ROTB | ||
9740 | # asm 1: pshufb ROTB,<xmm0=int6464#1 | ||
9741 | # asm 2: pshufb ROTB,<xmm0=%xmm0 | ||
9742 | pshufb ROTB,%xmm0 | ||
9743 | |||
9744 | # qhasm: shuffle bytes of xmm1 by ROTB | ||
9745 | # asm 1: pshufb ROTB,<xmm1=int6464#2 | ||
9746 | # asm 2: pshufb ROTB,<xmm1=%xmm1 | ||
9747 | pshufb ROTB,%xmm1 | ||
9748 | |||
9749 | # qhasm: shuffle bytes of xmm6 by ROTB | ||
9750 | # asm 1: pshufb ROTB,<xmm6=int6464#7 | ||
9751 | # asm 2: pshufb ROTB,<xmm6=%xmm6 | ||
9752 | pshufb ROTB,%xmm6 | ||
9753 | |||
9754 | # qhasm: shuffle bytes of xmm4 by ROTB | ||
9755 | # asm 1: pshufb ROTB,<xmm4=int6464#5 | ||
9756 | # asm 2: pshufb ROTB,<xmm4=%xmm4 | ||
9757 | pshufb ROTB,%xmm4 | ||
9758 | |||
9759 | # qhasm: shuffle bytes of xmm2 by ROTB | ||
9760 | # asm 1: pshufb ROTB,<xmm2=int6464#3 | ||
9761 | # asm 2: pshufb ROTB,<xmm2=%xmm2 | ||
9762 | pshufb ROTB,%xmm2 | ||
9763 | |||
9764 | # qhasm: shuffle bytes of xmm7 by ROTB | ||
9765 | # asm 1: pshufb ROTB,<xmm7=int6464#8 | ||
9766 | # asm 2: pshufb ROTB,<xmm7=%xmm7 | ||
9767 | pshufb ROTB,%xmm7 | ||
9768 | |||
9769 | # qhasm: shuffle bytes of xmm3 by ROTB | ||
9770 | # asm 1: pshufb ROTB,<xmm3=int6464#4 | ||
9771 | # asm 2: pshufb ROTB,<xmm3=%xmm3 | ||
9772 | pshufb ROTB,%xmm3 | ||
9773 | |||
9774 | # qhasm: shuffle bytes of xmm5 by ROTB | ||
9775 | # asm 1: pshufb ROTB,<xmm5=int6464#6 | ||
9776 | # asm 2: pshufb ROTB,<xmm5=%xmm5 | ||
9777 | pshufb ROTB,%xmm5 | ||
9778 | |||
9779 | # qhasm: xmm7 ^= xmm3 | ||
9780 | # asm 1: pxor <xmm3=int6464#4,<xmm7=int6464#8 | ||
9781 | # asm 2: pxor <xmm3=%xmm3,<xmm7=%xmm7 | ||
9782 | pxor %xmm3,%xmm7 | ||
9783 | |||
9784 | # qhasm: xmm6 ^= xmm1 | ||
9785 | # asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7 | ||
9786 | # asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6 | ||
9787 | pxor %xmm1,%xmm6 | ||
9788 | |||
9789 | # qhasm: xmm7 ^= xmm0 | ||
9790 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
9791 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
9792 | pxor %xmm0,%xmm7 | ||
9793 | |||
9794 | # qhasm: xmm3 ^= xmm6 | ||
9795 | # asm 1: pxor <xmm6=int6464#7,<xmm3=int6464#4 | ||
9796 | # asm 2: pxor <xmm6=%xmm6,<xmm3=%xmm3 | ||
9797 | pxor %xmm6,%xmm3 | ||
9798 | |||
9799 | # qhasm: xmm4 ^= xmm0 | ||
9800 | # asm 1: pxor <xmm0=int6464#1,<xmm4=int6464#5 | ||
9801 | # asm 2: pxor <xmm0=%xmm0,<xmm4=%xmm4 | ||
9802 | pxor %xmm0,%xmm4 | ||
9803 | |||
9804 | # qhasm: xmm3 ^= xmm4 | ||
9805 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
9806 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
9807 | pxor %xmm4,%xmm3 | ||
9808 | |||
9809 | # qhasm: xmm4 ^= xmm5 | ||
9810 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
9811 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
9812 | pxor %xmm5,%xmm4 | ||
9813 | |||
9814 | # qhasm: xmm4 ^= xmm2 | ||
9815 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
9816 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
9817 | pxor %xmm2,%xmm4 | ||
9818 | |||
9819 | # qhasm: xmm5 ^= xmm7 | ||
9820 | # asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6 | ||
9821 | # asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5 | ||
9822 | pxor %xmm7,%xmm5 | ||
9823 | |||
9824 | # qhasm: xmm4 ^= xmm1 | ||
9825 | # asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5 | ||
9826 | # asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4 | ||
9827 | pxor %xmm1,%xmm4 | ||
9828 | |||
9829 | # qhasm: xmm2 ^= xmm7 | ||
9830 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
9831 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
9832 | pxor %xmm7,%xmm2 | ||
9833 | |||
9834 | # qhasm: xmm6 ^= xmm5 | ||
9835 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
9836 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
9837 | pxor %xmm5,%xmm6 | ||
9838 | |||
9839 | # qhasm: xmm1 ^= xmm7 | ||
9840 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2 | ||
9841 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1 | ||
9842 | pxor %xmm7,%xmm1 | ||
9843 | |||
9844 | # qhasm: xmm11 = xmm5 | ||
9845 | # asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9 | ||
9846 | # asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8 | ||
9847 | movdqa %xmm5,%xmm8 | ||
9848 | |||
9849 | # qhasm: xmm10 = xmm1 | ||
9850 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
9851 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
9852 | movdqa %xmm1,%xmm9 | ||
9853 | |||
9854 | # qhasm: xmm9 = xmm7 | ||
9855 | # asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11 | ||
9856 | # asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10 | ||
9857 | movdqa %xmm7,%xmm10 | ||
9858 | |||
9859 | # qhasm: xmm13 = xmm6 | ||
9860 | # asm 1: movdqa <xmm6=int6464#7,>xmm13=int6464#12 | ||
9861 | # asm 2: movdqa <xmm6=%xmm6,>xmm13=%xmm11 | ||
9862 | movdqa %xmm6,%xmm11 | ||
9863 | |||
9864 | # qhasm: xmm12 = xmm3 | ||
9865 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#13 | ||
9866 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm12 | ||
9867 | movdqa %xmm3,%xmm12 | ||
9868 | |||
9869 | # qhasm: xmm11 ^= xmm2 | ||
9870 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#9 | ||
9871 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm8 | ||
9872 | pxor %xmm2,%xmm8 | ||
9873 | |||
9874 | # qhasm: xmm10 ^= xmm6 | ||
9875 | # asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#10 | ||
9876 | # asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm9 | ||
9877 | pxor %xmm6,%xmm9 | ||
9878 | |||
9879 | # qhasm: xmm9 ^= xmm4 | ||
9880 | # asm 1: pxor <xmm4=int6464#5,<xmm9=int6464#11 | ||
9881 | # asm 2: pxor <xmm4=%xmm4,<xmm9=%xmm10 | ||
9882 | pxor %xmm4,%xmm10 | ||
9883 | |||
9884 | # qhasm: xmm13 ^= xmm2 | ||
9885 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#12 | ||
9886 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm11 | ||
9887 | pxor %xmm2,%xmm11 | ||
9888 | |||
9889 | # qhasm: xmm12 ^= xmm0 | ||
9890 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
9891 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
9892 | pxor %xmm0,%xmm12 | ||
9893 | |||
9894 | # qhasm: xmm14 = xmm11 | ||
9895 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
9896 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
9897 | movdqa %xmm8,%xmm13 | ||
9898 | |||
9899 | # qhasm: xmm8 = xmm10 | ||
9900 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
9901 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
9902 | movdqa %xmm9,%xmm14 | ||
9903 | |||
9904 | # qhasm: xmm15 = xmm11 | ||
9905 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
9906 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
9907 | movdqa %xmm8,%xmm15 | ||
9908 | |||
9909 | # qhasm: xmm10 |= xmm9 | ||
9910 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
9911 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
9912 | por %xmm10,%xmm9 | ||
9913 | |||
9914 | # qhasm: xmm11 |= xmm12 | ||
9915 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
9916 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
9917 | por %xmm12,%xmm8 | ||
9918 | |||
9919 | # qhasm: xmm15 ^= xmm8 | ||
9920 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
9921 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
9922 | pxor %xmm14,%xmm15 | ||
9923 | |||
9924 | # qhasm: xmm14 &= xmm12 | ||
9925 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
9926 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
9927 | pand %xmm12,%xmm13 | ||
9928 | |||
9929 | # qhasm: xmm8 &= xmm9 | ||
9930 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
9931 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
9932 | pand %xmm10,%xmm14 | ||
9933 | |||
9934 | # qhasm: xmm12 ^= xmm9 | ||
9935 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
9936 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
9937 | pxor %xmm10,%xmm12 | ||
9938 | |||
9939 | # qhasm: xmm15 &= xmm12 | ||
9940 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
9941 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
9942 | pand %xmm12,%xmm15 | ||
9943 | |||
9944 | # qhasm: xmm12 = xmm4 | ||
9945 | # asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11 | ||
9946 | # asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10 | ||
9947 | movdqa %xmm4,%xmm10 | ||
9948 | |||
9949 | # qhasm: xmm12 ^= xmm0 | ||
9950 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
9951 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
9952 | pxor %xmm0,%xmm10 | ||
9953 | |||
9954 | # qhasm: xmm13 &= xmm12 | ||
9955 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
9956 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
9957 | pand %xmm10,%xmm11 | ||
9958 | |||
9959 | # qhasm: xmm11 ^= xmm13 | ||
9960 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
9961 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
9962 | pxor %xmm11,%xmm8 | ||
9963 | |||
9964 | # qhasm: xmm10 ^= xmm13 | ||
9965 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
9966 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
9967 | pxor %xmm11,%xmm9 | ||
9968 | |||
9969 | # qhasm: xmm13 = xmm5 | ||
9970 | # asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11 | ||
9971 | # asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10 | ||
9972 | movdqa %xmm5,%xmm10 | ||
9973 | |||
9974 | # qhasm: xmm13 ^= xmm1 | ||
9975 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
9976 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
9977 | pxor %xmm1,%xmm10 | ||
9978 | |||
9979 | # qhasm: xmm12 = xmm7 | ||
9980 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12 | ||
9981 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11 | ||
9982 | movdqa %xmm7,%xmm11 | ||
9983 | |||
9984 | # qhasm: xmm9 = xmm13 | ||
9985 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
9986 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
9987 | movdqa %xmm10,%xmm12 | ||
9988 | |||
9989 | # qhasm: xmm12 ^= xmm3 | ||
9990 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#12 | ||
9991 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm11 | ||
9992 | pxor %xmm3,%xmm11 | ||
9993 | |||
9994 | # qhasm: xmm9 |= xmm12 | ||
9995 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
9996 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
9997 | por %xmm11,%xmm12 | ||
9998 | |||
9999 | # qhasm: xmm13 &= xmm12 | ||
10000 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
10001 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
10002 | pand %xmm11,%xmm10 | ||
10003 | |||
10004 | # qhasm: xmm8 ^= xmm13 | ||
10005 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
10006 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
10007 | pxor %xmm10,%xmm14 | ||
10008 | |||
10009 | # qhasm: xmm11 ^= xmm15 | ||
10010 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
10011 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
10012 | pxor %xmm15,%xmm8 | ||
10013 | |||
10014 | # qhasm: xmm10 ^= xmm14 | ||
10015 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
10016 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
10017 | pxor %xmm13,%xmm9 | ||
10018 | |||
10019 | # qhasm: xmm9 ^= xmm15 | ||
10020 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
10021 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
10022 | pxor %xmm15,%xmm12 | ||
10023 | |||
10024 | # qhasm: xmm8 ^= xmm14 | ||
10025 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
10026 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
10027 | pxor %xmm13,%xmm14 | ||
10028 | |||
10029 | # qhasm: xmm9 ^= xmm14 | ||
10030 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
10031 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
10032 | pxor %xmm13,%xmm12 | ||
10033 | |||
10034 | # qhasm: xmm12 = xmm6 | ||
10035 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11 | ||
10036 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10 | ||
10037 | movdqa %xmm6,%xmm10 | ||
10038 | |||
10039 | # qhasm: xmm13 = xmm2 | ||
10040 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
10041 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
10042 | movdqa %xmm2,%xmm11 | ||
10043 | |||
10044 | # qhasm: xmm14 = xmm1 | ||
10045 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
10046 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
10047 | movdqa %xmm1,%xmm13 | ||
10048 | |||
10049 | # qhasm: xmm15 = xmm5 | ||
10050 | # asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16 | ||
10051 | # asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15 | ||
10052 | movdqa %xmm5,%xmm15 | ||
10053 | |||
10054 | # qhasm: xmm12 &= xmm4 | ||
10055 | # asm 1: pand <xmm4=int6464#5,<xmm12=int6464#11 | ||
10056 | # asm 2: pand <xmm4=%xmm4,<xmm12=%xmm10 | ||
10057 | pand %xmm4,%xmm10 | ||
10058 | |||
10059 | # qhasm: xmm13 &= xmm0 | ||
10060 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
10061 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
10062 | pand %xmm0,%xmm11 | ||
10063 | |||
10064 | # qhasm: xmm14 &= xmm7 | ||
10065 | # asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14 | ||
10066 | # asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13 | ||
10067 | pand %xmm7,%xmm13 | ||
10068 | |||
10069 | # qhasm: xmm15 |= xmm3 | ||
10070 | # asm 1: por <xmm3=int6464#4,<xmm15=int6464#16 | ||
10071 | # asm 2: por <xmm3=%xmm3,<xmm15=%xmm15 | ||
10072 | por %xmm3,%xmm15 | ||
10073 | |||
10074 | # qhasm: xmm11 ^= xmm12 | ||
10075 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
10076 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
10077 | pxor %xmm10,%xmm8 | ||
10078 | |||
10079 | # qhasm: xmm10 ^= xmm13 | ||
10080 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
10081 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
10082 | pxor %xmm11,%xmm9 | ||
10083 | |||
10084 | # qhasm: xmm9 ^= xmm14 | ||
10085 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
10086 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
10087 | pxor %xmm13,%xmm12 | ||
10088 | |||
10089 | # qhasm: xmm8 ^= xmm15 | ||
10090 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
10091 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
10092 | pxor %xmm15,%xmm14 | ||
10093 | |||
10094 | # qhasm: xmm12 = xmm11 | ||
10095 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
10096 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
10097 | movdqa %xmm8,%xmm10 | ||
10098 | |||
10099 | # qhasm: xmm12 ^= xmm10 | ||
10100 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
10101 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
10102 | pxor %xmm9,%xmm10 | ||
10103 | |||
10104 | # qhasm: xmm11 &= xmm9 | ||
10105 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
10106 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
10107 | pand %xmm12,%xmm8 | ||
10108 | |||
10109 | # qhasm: xmm14 = xmm8 | ||
10110 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
10111 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
10112 | movdqa %xmm14,%xmm11 | ||
10113 | |||
10114 | # qhasm: xmm14 ^= xmm11 | ||
10115 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
10116 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
10117 | pxor %xmm8,%xmm11 | ||
10118 | |||
10119 | # qhasm: xmm15 = xmm12 | ||
10120 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
10121 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
10122 | movdqa %xmm10,%xmm13 | ||
10123 | |||
10124 | # qhasm: xmm15 &= xmm14 | ||
10125 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
10126 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
10127 | pand %xmm11,%xmm13 | ||
10128 | |||
10129 | # qhasm: xmm15 ^= xmm10 | ||
10130 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
10131 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
10132 | pxor %xmm9,%xmm13 | ||
10133 | |||
10134 | # qhasm: xmm13 = xmm9 | ||
10135 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
10136 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
10137 | movdqa %xmm12,%xmm15 | ||
10138 | |||
10139 | # qhasm: xmm13 ^= xmm8 | ||
10140 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
10141 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
10142 | pxor %xmm14,%xmm15 | ||
10143 | |||
10144 | # qhasm: xmm11 ^= xmm10 | ||
10145 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
10146 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
10147 | pxor %xmm9,%xmm8 | ||
10148 | |||
10149 | # qhasm: xmm13 &= xmm11 | ||
10150 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
10151 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
10152 | pand %xmm8,%xmm15 | ||
10153 | |||
10154 | # qhasm: xmm13 ^= xmm8 | ||
10155 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
10156 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
10157 | pxor %xmm14,%xmm15 | ||
10158 | |||
10159 | # qhasm: xmm9 ^= xmm13 | ||
10160 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
10161 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
10162 | pxor %xmm15,%xmm12 | ||
10163 | |||
10164 | # qhasm: xmm10 = xmm14 | ||
10165 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
10166 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
10167 | movdqa %xmm11,%xmm8 | ||
10168 | |||
10169 | # qhasm: xmm10 ^= xmm13 | ||
10170 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
10171 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
10172 | pxor %xmm15,%xmm8 | ||
10173 | |||
10174 | # qhasm: xmm10 &= xmm8 | ||
10175 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
10176 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
10177 | pand %xmm14,%xmm8 | ||
10178 | |||
10179 | # qhasm: xmm9 ^= xmm10 | ||
10180 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
10181 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
10182 | pxor %xmm8,%xmm12 | ||
10183 | |||
10184 | # qhasm: xmm14 ^= xmm10 | ||
10185 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
10186 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
10187 | pxor %xmm8,%xmm11 | ||
10188 | |||
10189 | # qhasm: xmm14 &= xmm15 | ||
10190 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
10191 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
10192 | pand %xmm13,%xmm11 | ||
10193 | |||
10194 | # qhasm: xmm14 ^= xmm12 | ||
10195 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
10196 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
10197 | pxor %xmm10,%xmm11 | ||
10198 | |||
10199 | # qhasm: xmm12 = xmm3 | ||
10200 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#9 | ||
10201 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm8 | ||
10202 | movdqa %xmm3,%xmm8 | ||
10203 | |||
10204 | # qhasm: xmm8 = xmm7 | ||
10205 | # asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10 | ||
10206 | # asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9 | ||
10207 | movdqa %xmm7,%xmm9 | ||
10208 | |||
10209 | # qhasm: xmm10 = xmm15 | ||
10210 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
10211 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
10212 | movdqa %xmm13,%xmm10 | ||
10213 | |||
10214 | # qhasm: xmm10 ^= xmm14 | ||
10215 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
10216 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
10217 | pxor %xmm11,%xmm10 | ||
10218 | |||
10219 | # qhasm: xmm10 &= xmm3 | ||
10220 | # asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11 | ||
10221 | # asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10 | ||
10222 | pand %xmm3,%xmm10 | ||
10223 | |||
10224 | # qhasm: xmm3 ^= xmm7 | ||
10225 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
10226 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
10227 | pxor %xmm7,%xmm3 | ||
10228 | |||
10229 | # qhasm: xmm3 &= xmm14 | ||
10230 | # asm 1: pand <xmm14=int6464#12,<xmm3=int6464#4 | ||
10231 | # asm 2: pand <xmm14=%xmm11,<xmm3=%xmm3 | ||
10232 | pand %xmm11,%xmm3 | ||
10233 | |||
10234 | # qhasm: xmm7 &= xmm15 | ||
10235 | # asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8 | ||
10236 | # asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7 | ||
10237 | pand %xmm13,%xmm7 | ||
10238 | |||
10239 | # qhasm: xmm3 ^= xmm7 | ||
10240 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
10241 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
10242 | pxor %xmm7,%xmm3 | ||
10243 | |||
10244 | # qhasm: xmm7 ^= xmm10 | ||
10245 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
10246 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
10247 | pxor %xmm10,%xmm7 | ||
10248 | |||
10249 | # qhasm: xmm12 ^= xmm0 | ||
10250 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
10251 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
10252 | pxor %xmm0,%xmm8 | ||
10253 | |||
10254 | # qhasm: xmm8 ^= xmm4 | ||
10255 | # asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10 | ||
10256 | # asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9 | ||
10257 | pxor %xmm4,%xmm9 | ||
10258 | |||
10259 | # qhasm: xmm15 ^= xmm13 | ||
10260 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
10261 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
10262 | pxor %xmm15,%xmm13 | ||
10263 | |||
10264 | # qhasm: xmm14 ^= xmm9 | ||
10265 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
10266 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
10267 | pxor %xmm12,%xmm11 | ||
10268 | |||
10269 | # qhasm: xmm11 = xmm15 | ||
10270 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
10271 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
10272 | movdqa %xmm13,%xmm10 | ||
10273 | |||
10274 | # qhasm: xmm11 ^= xmm14 | ||
10275 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
10276 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
10277 | pxor %xmm11,%xmm10 | ||
10278 | |||
10279 | # qhasm: xmm11 &= xmm12 | ||
10280 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
10281 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
10282 | pand %xmm8,%xmm10 | ||
10283 | |||
10284 | # qhasm: xmm12 ^= xmm8 | ||
10285 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
10286 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
10287 | pxor %xmm9,%xmm8 | ||
10288 | |||
10289 | # qhasm: xmm12 &= xmm14 | ||
10290 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
10291 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
10292 | pand %xmm11,%xmm8 | ||
10293 | |||
10294 | # qhasm: xmm8 &= xmm15 | ||
10295 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
10296 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
10297 | pand %xmm13,%xmm9 | ||
10298 | |||
10299 | # qhasm: xmm8 ^= xmm12 | ||
10300 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
10301 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
10302 | pxor %xmm8,%xmm9 | ||
10303 | |||
10304 | # qhasm: xmm12 ^= xmm11 | ||
10305 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
10306 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
10307 | pxor %xmm10,%xmm8 | ||
10308 | |||
10309 | # qhasm: xmm10 = xmm13 | ||
10310 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
10311 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
10312 | movdqa %xmm15,%xmm10 | ||
10313 | |||
10314 | # qhasm: xmm10 ^= xmm9 | ||
10315 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
10316 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
10317 | pxor %xmm12,%xmm10 | ||
10318 | |||
10319 | # qhasm: xmm10 &= xmm0 | ||
10320 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
10321 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
10322 | pand %xmm0,%xmm10 | ||
10323 | |||
10324 | # qhasm: xmm0 ^= xmm4 | ||
10325 | # asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1 | ||
10326 | # asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0 | ||
10327 | pxor %xmm4,%xmm0 | ||
10328 | |||
10329 | # qhasm: xmm0 &= xmm9 | ||
10330 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
10331 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
10332 | pand %xmm12,%xmm0 | ||
10333 | |||
10334 | # qhasm: xmm4 &= xmm13 | ||
10335 | # asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5 | ||
10336 | # asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4 | ||
10337 | pand %xmm15,%xmm4 | ||
10338 | |||
10339 | # qhasm: xmm0 ^= xmm4 | ||
10340 | # asm 1: pxor <xmm4=int6464#5,<xmm0=int6464#1 | ||
10341 | # asm 2: pxor <xmm4=%xmm4,<xmm0=%xmm0 | ||
10342 | pxor %xmm4,%xmm0 | ||
10343 | |||
10344 | # qhasm: xmm4 ^= xmm10 | ||
10345 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
10346 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
10347 | pxor %xmm10,%xmm4 | ||
10348 | |||
10349 | # qhasm: xmm3 ^= xmm12 | ||
10350 | # asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4 | ||
10351 | # asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3 | ||
10352 | pxor %xmm8,%xmm3 | ||
10353 | |||
10354 | # qhasm: xmm0 ^= xmm12 | ||
10355 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
10356 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
10357 | pxor %xmm8,%xmm0 | ||
10358 | |||
10359 | # qhasm: xmm7 ^= xmm8 | ||
10360 | # asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8 | ||
10361 | # asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7 | ||
10362 | pxor %xmm9,%xmm7 | ||
10363 | |||
10364 | # qhasm: xmm4 ^= xmm8 | ||
10365 | # asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5 | ||
10366 | # asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4 | ||
10367 | pxor %xmm9,%xmm4 | ||
10368 | |||
10369 | # qhasm: xmm12 = xmm5 | ||
10370 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9 | ||
10371 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8 | ||
10372 | movdqa %xmm5,%xmm8 | ||
10373 | |||
10374 | # qhasm: xmm8 = xmm1 | ||
10375 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
10376 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
10377 | movdqa %xmm1,%xmm9 | ||
10378 | |||
10379 | # qhasm: xmm12 ^= xmm2 | ||
10380 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#9 | ||
10381 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm8 | ||
10382 | pxor %xmm2,%xmm8 | ||
10383 | |||
10384 | # qhasm: xmm8 ^= xmm6 | ||
10385 | # asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10 | ||
10386 | # asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9 | ||
10387 | pxor %xmm6,%xmm9 | ||
10388 | |||
10389 | # qhasm: xmm11 = xmm15 | ||
10390 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
10391 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
10392 | movdqa %xmm13,%xmm10 | ||
10393 | |||
10394 | # qhasm: xmm11 ^= xmm14 | ||
10395 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
10396 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
10397 | pxor %xmm11,%xmm10 | ||
10398 | |||
10399 | # qhasm: xmm11 &= xmm12 | ||
10400 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
10401 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
10402 | pand %xmm8,%xmm10 | ||
10403 | |||
10404 | # qhasm: xmm12 ^= xmm8 | ||
10405 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
10406 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
10407 | pxor %xmm9,%xmm8 | ||
10408 | |||
10409 | # qhasm: xmm12 &= xmm14 | ||
10410 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
10411 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
10412 | pand %xmm11,%xmm8 | ||
10413 | |||
10414 | # qhasm: xmm8 &= xmm15 | ||
10415 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
10416 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
10417 | pand %xmm13,%xmm9 | ||
10418 | |||
10419 | # qhasm: xmm8 ^= xmm12 | ||
10420 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
10421 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
10422 | pxor %xmm8,%xmm9 | ||
10423 | |||
10424 | # qhasm: xmm12 ^= xmm11 | ||
10425 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
10426 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
10427 | pxor %xmm10,%xmm8 | ||
10428 | |||
10429 | # qhasm: xmm10 = xmm13 | ||
10430 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
10431 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
10432 | movdqa %xmm15,%xmm10 | ||
10433 | |||
10434 | # qhasm: xmm10 ^= xmm9 | ||
10435 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
10436 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
10437 | pxor %xmm12,%xmm10 | ||
10438 | |||
10439 | # qhasm: xmm10 &= xmm2 | ||
10440 | # asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11 | ||
10441 | # asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10 | ||
10442 | pand %xmm2,%xmm10 | ||
10443 | |||
10444 | # qhasm: xmm2 ^= xmm6 | ||
10445 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
10446 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
10447 | pxor %xmm6,%xmm2 | ||
10448 | |||
10449 | # qhasm: xmm2 &= xmm9 | ||
10450 | # asm 1: pand <xmm9=int6464#13,<xmm2=int6464#3 | ||
10451 | # asm 2: pand <xmm9=%xmm12,<xmm2=%xmm2 | ||
10452 | pand %xmm12,%xmm2 | ||
10453 | |||
10454 | # qhasm: xmm6 &= xmm13 | ||
10455 | # asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7 | ||
10456 | # asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6 | ||
10457 | pand %xmm15,%xmm6 | ||
10458 | |||
10459 | # qhasm: xmm2 ^= xmm6 | ||
10460 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
10461 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
10462 | pxor %xmm6,%xmm2 | ||
10463 | |||
10464 | # qhasm: xmm6 ^= xmm10 | ||
10465 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
10466 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
10467 | pxor %xmm10,%xmm6 | ||
10468 | |||
10469 | # qhasm: xmm15 ^= xmm13 | ||
10470 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
10471 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
10472 | pxor %xmm15,%xmm13 | ||
10473 | |||
10474 | # qhasm: xmm14 ^= xmm9 | ||
10475 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
10476 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
10477 | pxor %xmm12,%xmm11 | ||
10478 | |||
10479 | # qhasm: xmm11 = xmm15 | ||
10480 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
10481 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
10482 | movdqa %xmm13,%xmm10 | ||
10483 | |||
10484 | # qhasm: xmm11 ^= xmm14 | ||
10485 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
10486 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
10487 | pxor %xmm11,%xmm10 | ||
10488 | |||
10489 | # qhasm: xmm11 &= xmm5 | ||
10490 | # asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11 | ||
10491 | # asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10 | ||
10492 | pand %xmm5,%xmm10 | ||
10493 | |||
10494 | # qhasm: xmm5 ^= xmm1 | ||
10495 | # asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6 | ||
10496 | # asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5 | ||
10497 | pxor %xmm1,%xmm5 | ||
10498 | |||
10499 | # qhasm: xmm5 &= xmm14 | ||
10500 | # asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6 | ||
10501 | # asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5 | ||
10502 | pand %xmm11,%xmm5 | ||
10503 | |||
10504 | # qhasm: xmm1 &= xmm15 | ||
10505 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
10506 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
10507 | pand %xmm13,%xmm1 | ||
10508 | |||
10509 | # qhasm: xmm5 ^= xmm1 | ||
10510 | # asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6 | ||
10511 | # asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5 | ||
10512 | pxor %xmm1,%xmm5 | ||
10513 | |||
10514 | # qhasm: xmm1 ^= xmm11 | ||
10515 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
10516 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
10517 | pxor %xmm10,%xmm1 | ||
10518 | |||
10519 | # qhasm: xmm5 ^= xmm12 | ||
10520 | # asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6 | ||
10521 | # asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5 | ||
10522 | pxor %xmm8,%xmm5 | ||
10523 | |||
10524 | # qhasm: xmm2 ^= xmm12 | ||
10525 | # asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3 | ||
10526 | # asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2 | ||
10527 | pxor %xmm8,%xmm2 | ||
10528 | |||
10529 | # qhasm: xmm1 ^= xmm8 | ||
10530 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
10531 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
10532 | pxor %xmm9,%xmm1 | ||
10533 | |||
10534 | # qhasm: xmm6 ^= xmm8 | ||
10535 | # asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7 | ||
10536 | # asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6 | ||
10537 | pxor %xmm9,%xmm6 | ||
10538 | |||
10539 | # qhasm: xmm5 ^= xmm0 | ||
10540 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
10541 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
10542 | pxor %xmm0,%xmm5 | ||
10543 | |||
10544 | # qhasm: xmm1 ^= xmm3 | ||
10545 | # asm 1: pxor <xmm3=int6464#4,<xmm1=int6464#2 | ||
10546 | # asm 2: pxor <xmm3=%xmm3,<xmm1=%xmm1 | ||
10547 | pxor %xmm3,%xmm1 | ||
10548 | |||
10549 | # qhasm: xmm2 ^= xmm5 | ||
10550 | # asm 1: pxor <xmm5=int6464#6,<xmm2=int6464#3 | ||
10551 | # asm 2: pxor <xmm5=%xmm5,<xmm2=%xmm2 | ||
10552 | pxor %xmm5,%xmm2 | ||
10553 | |||
10554 | # qhasm: xmm3 ^= xmm0 | ||
10555 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
10556 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
10557 | pxor %xmm0,%xmm3 | ||
10558 | |||
10559 | # qhasm: xmm0 ^= xmm1 | ||
10560 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
10561 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
10562 | pxor %xmm1,%xmm0 | ||
10563 | |||
10564 | # qhasm: xmm1 ^= xmm7 | ||
10565 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2 | ||
10566 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1 | ||
10567 | pxor %xmm7,%xmm1 | ||
10568 | |||
10569 | # qhasm: xmm7 ^= xmm6 | ||
10570 | # asm 1: pxor <xmm6=int6464#7,<xmm7=int6464#8 | ||
10571 | # asm 2: pxor <xmm6=%xmm6,<xmm7=%xmm7 | ||
10572 | pxor %xmm6,%xmm7 | ||
10573 | |||
10574 | # qhasm: xmm2 ^= xmm7 | ||
10575 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
10576 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
10577 | pxor %xmm7,%xmm2 | ||
10578 | |||
10579 | # qhasm: xmm6 ^= xmm4 | ||
10580 | # asm 1: pxor <xmm4=int6464#5,<xmm6=int6464#7 | ||
10581 | # asm 2: pxor <xmm4=%xmm4,<xmm6=%xmm6 | ||
10582 | pxor %xmm4,%xmm6 | ||
10583 | |||
10584 | # qhasm: xmm4 ^= xmm7 | ||
10585 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
10586 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
10587 | pxor %xmm7,%xmm4 | ||
10588 | |||
10589 | # qhasm: xmm3 ^= xmm4 | ||
10590 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
10591 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
10592 | pxor %xmm4,%xmm3 | ||
10593 | |||
10594 | # qhasm: xmm7 ^= RCON | ||
10595 | # asm 1: pxor RCON,<xmm7=int6464#8 | ||
10596 | # asm 2: pxor RCON,<xmm7=%xmm7 | ||
10597 | pxor RCON,%xmm7 | ||
10598 | |||
10599 | # qhasm: shuffle bytes of xmm0 by EXPB0 | ||
10600 | # asm 1: pshufb EXPB0,<xmm0=int6464#1 | ||
10601 | # asm 2: pshufb EXPB0,<xmm0=%xmm0 | ||
10602 | pshufb EXPB0,%xmm0 | ||
10603 | |||
10604 | # qhasm: shuffle bytes of xmm1 by EXPB0 | ||
10605 | # asm 1: pshufb EXPB0,<xmm1=int6464#2 | ||
10606 | # asm 2: pshufb EXPB0,<xmm1=%xmm1 | ||
10607 | pshufb EXPB0,%xmm1 | ||
10608 | |||
10609 | # qhasm: shuffle bytes of xmm2 by EXPB0 | ||
10610 | # asm 1: pshufb EXPB0,<xmm2=int6464#3 | ||
10611 | # asm 2: pshufb EXPB0,<xmm2=%xmm2 | ||
10612 | pshufb EXPB0,%xmm2 | ||
10613 | |||
10614 | # qhasm: shuffle bytes of xmm3 by EXPB0 | ||
10615 | # asm 1: pshufb EXPB0,<xmm3=int6464#4 | ||
10616 | # asm 2: pshufb EXPB0,<xmm3=%xmm3 | ||
10617 | pshufb EXPB0,%xmm3 | ||
10618 | |||
10619 | # qhasm: shuffle bytes of xmm4 by EXPB0 | ||
10620 | # asm 1: pshufb EXPB0,<xmm4=int6464#5 | ||
10621 | # asm 2: pshufb EXPB0,<xmm4=%xmm4 | ||
10622 | pshufb EXPB0,%xmm4 | ||
10623 | |||
10624 | # qhasm: shuffle bytes of xmm5 by EXPB0 | ||
10625 | # asm 1: pshufb EXPB0,<xmm5=int6464#6 | ||
10626 | # asm 2: pshufb EXPB0,<xmm5=%xmm5 | ||
10627 | pshufb EXPB0,%xmm5 | ||
10628 | |||
10629 | # qhasm: shuffle bytes of xmm6 by EXPB0 | ||
10630 | # asm 1: pshufb EXPB0,<xmm6=int6464#7 | ||
10631 | # asm 2: pshufb EXPB0,<xmm6=%xmm6 | ||
10632 | pshufb EXPB0,%xmm6 | ||
10633 | |||
10634 | # qhasm: shuffle bytes of xmm7 by EXPB0 | ||
10635 | # asm 1: pshufb EXPB0,<xmm7=int6464#8 | ||
10636 | # asm 2: pshufb EXPB0,<xmm7=%xmm7 | ||
10637 | pshufb EXPB0,%xmm7 | ||
10638 | |||
10639 | # qhasm: xmm8 = *(int128 *)(c + 896) | ||
10640 | # asm 1: movdqa 896(<c=int64#1),>xmm8=int6464#9 | ||
10641 | # asm 2: movdqa 896(<c=%rdi),>xmm8=%xmm8 | ||
10642 | movdqa 896(%rdi),%xmm8 | ||
10643 | |||
10644 | # qhasm: xmm9 = *(int128 *)(c + 912) | ||
10645 | # asm 1: movdqa 912(<c=int64#1),>xmm9=int6464#10 | ||
10646 | # asm 2: movdqa 912(<c=%rdi),>xmm9=%xmm9 | ||
10647 | movdqa 912(%rdi),%xmm9 | ||
10648 | |||
10649 | # qhasm: xmm10 = *(int128 *)(c + 928) | ||
10650 | # asm 1: movdqa 928(<c=int64#1),>xmm10=int6464#11 | ||
10651 | # asm 2: movdqa 928(<c=%rdi),>xmm10=%xmm10 | ||
10652 | movdqa 928(%rdi),%xmm10 | ||
10653 | |||
10654 | # qhasm: xmm11 = *(int128 *)(c + 944) | ||
10655 | # asm 1: movdqa 944(<c=int64#1),>xmm11=int6464#12 | ||
10656 | # asm 2: movdqa 944(<c=%rdi),>xmm11=%xmm11 | ||
10657 | movdqa 944(%rdi),%xmm11 | ||
10658 | |||
10659 | # qhasm: xmm12 = *(int128 *)(c + 960) | ||
10660 | # asm 1: movdqa 960(<c=int64#1),>xmm12=int6464#13 | ||
10661 | # asm 2: movdqa 960(<c=%rdi),>xmm12=%xmm12 | ||
10662 | movdqa 960(%rdi),%xmm12 | ||
10663 | |||
10664 | # qhasm: xmm13 = *(int128 *)(c + 976) | ||
10665 | # asm 1: movdqa 976(<c=int64#1),>xmm13=int6464#14 | ||
10666 | # asm 2: movdqa 976(<c=%rdi),>xmm13=%xmm13 | ||
10667 | movdqa 976(%rdi),%xmm13 | ||
10668 | |||
10669 | # qhasm: xmm14 = *(int128 *)(c + 992) | ||
10670 | # asm 1: movdqa 992(<c=int64#1),>xmm14=int6464#15 | ||
10671 | # asm 2: movdqa 992(<c=%rdi),>xmm14=%xmm14 | ||
10672 | movdqa 992(%rdi),%xmm14 | ||
10673 | |||
10674 | # qhasm: xmm15 = *(int128 *)(c + 1008) | ||
10675 | # asm 1: movdqa 1008(<c=int64#1),>xmm15=int6464#16 | ||
10676 | # asm 2: movdqa 1008(<c=%rdi),>xmm15=%xmm15 | ||
10677 | movdqa 1008(%rdi),%xmm15 | ||
10678 | |||
10679 | # qhasm: xmm8 ^= ONE | ||
10680 | # asm 1: pxor ONE,<xmm8=int6464#9 | ||
10681 | # asm 2: pxor ONE,<xmm8=%xmm8 | ||
10682 | pxor ONE,%xmm8 | ||
10683 | |||
10684 | # qhasm: xmm9 ^= ONE | ||
10685 | # asm 1: pxor ONE,<xmm9=int6464#10 | ||
10686 | # asm 2: pxor ONE,<xmm9=%xmm9 | ||
10687 | pxor ONE,%xmm9 | ||
10688 | |||
10689 | # qhasm: xmm13 ^= ONE | ||
10690 | # asm 1: pxor ONE,<xmm13=int6464#14 | ||
10691 | # asm 2: pxor ONE,<xmm13=%xmm13 | ||
10692 | pxor ONE,%xmm13 | ||
10693 | |||
10694 | # qhasm: xmm14 ^= ONE | ||
10695 | # asm 1: pxor ONE,<xmm14=int6464#15 | ||
10696 | # asm 2: pxor ONE,<xmm14=%xmm14 | ||
10697 | pxor ONE,%xmm14 | ||
10698 | |||
10699 | # qhasm: xmm0 ^= xmm8 | ||
10700 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
10701 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
10702 | pxor %xmm8,%xmm0 | ||
10703 | |||
10704 | # qhasm: xmm1 ^= xmm9 | ||
10705 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
10706 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
10707 | pxor %xmm9,%xmm1 | ||
10708 | |||
10709 | # qhasm: xmm2 ^= xmm10 | ||
10710 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
10711 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
10712 | pxor %xmm10,%xmm2 | ||
10713 | |||
10714 | # qhasm: xmm3 ^= xmm11 | ||
10715 | # asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4 | ||
10716 | # asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3 | ||
10717 | pxor %xmm11,%xmm3 | ||
10718 | |||
10719 | # qhasm: xmm4 ^= xmm12 | ||
10720 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5 | ||
10721 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4 | ||
10722 | pxor %xmm12,%xmm4 | ||
10723 | |||
10724 | # qhasm: xmm5 ^= xmm13 | ||
10725 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
10726 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
10727 | pxor %xmm13,%xmm5 | ||
10728 | |||
10729 | # qhasm: xmm6 ^= xmm14 | ||
10730 | # asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7 | ||
10731 | # asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6 | ||
10732 | pxor %xmm14,%xmm6 | ||
10733 | |||
10734 | # qhasm: xmm7 ^= xmm15 | ||
10735 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
10736 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
10737 | pxor %xmm15,%xmm7 | ||
10738 | |||
10739 | # qhasm: uint32323232 xmm8 >>= 8 | ||
10740 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
10741 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
10742 | psrld $8,%xmm8 | ||
10743 | |||
10744 | # qhasm: uint32323232 xmm9 >>= 8 | ||
10745 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
10746 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
10747 | psrld $8,%xmm9 | ||
10748 | |||
10749 | # qhasm: uint32323232 xmm10 >>= 8 | ||
10750 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
10751 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
10752 | psrld $8,%xmm10 | ||
10753 | |||
10754 | # qhasm: uint32323232 xmm11 >>= 8 | ||
10755 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
10756 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
10757 | psrld $8,%xmm11 | ||
10758 | |||
10759 | # qhasm: uint32323232 xmm12 >>= 8 | ||
10760 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
10761 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
10762 | psrld $8,%xmm12 | ||
10763 | |||
10764 | # qhasm: uint32323232 xmm13 >>= 8 | ||
10765 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
10766 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
10767 | psrld $8,%xmm13 | ||
10768 | |||
10769 | # qhasm: uint32323232 xmm14 >>= 8 | ||
10770 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
10771 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
10772 | psrld $8,%xmm14 | ||
10773 | |||
10774 | # qhasm: uint32323232 xmm15 >>= 8 | ||
10775 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
10776 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
10777 | psrld $8,%xmm15 | ||
10778 | |||
10779 | # qhasm: xmm0 ^= xmm8 | ||
10780 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
10781 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
10782 | pxor %xmm8,%xmm0 | ||
10783 | |||
10784 | # qhasm: xmm1 ^= xmm9 | ||
10785 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
10786 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
10787 | pxor %xmm9,%xmm1 | ||
10788 | |||
10789 | # qhasm: xmm2 ^= xmm10 | ||
10790 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
10791 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
10792 | pxor %xmm10,%xmm2 | ||
10793 | |||
10794 | # qhasm: xmm3 ^= xmm11 | ||
10795 | # asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4 | ||
10796 | # asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3 | ||
10797 | pxor %xmm11,%xmm3 | ||
10798 | |||
10799 | # qhasm: xmm4 ^= xmm12 | ||
10800 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5 | ||
10801 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4 | ||
10802 | pxor %xmm12,%xmm4 | ||
10803 | |||
10804 | # qhasm: xmm5 ^= xmm13 | ||
10805 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
10806 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
10807 | pxor %xmm13,%xmm5 | ||
10808 | |||
10809 | # qhasm: xmm6 ^= xmm14 | ||
10810 | # asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7 | ||
10811 | # asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6 | ||
10812 | pxor %xmm14,%xmm6 | ||
10813 | |||
10814 | # qhasm: xmm7 ^= xmm15 | ||
10815 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
10816 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
10817 | pxor %xmm15,%xmm7 | ||
10818 | |||
10819 | # qhasm: uint32323232 xmm8 >>= 8 | ||
10820 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
10821 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
10822 | psrld $8,%xmm8 | ||
10823 | |||
10824 | # qhasm: uint32323232 xmm9 >>= 8 | ||
10825 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
10826 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
10827 | psrld $8,%xmm9 | ||
10828 | |||
10829 | # qhasm: uint32323232 xmm10 >>= 8 | ||
10830 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
10831 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
10832 | psrld $8,%xmm10 | ||
10833 | |||
10834 | # qhasm: uint32323232 xmm11 >>= 8 | ||
10835 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
10836 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
10837 | psrld $8,%xmm11 | ||
10838 | |||
10839 | # qhasm: uint32323232 xmm12 >>= 8 | ||
10840 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
10841 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
10842 | psrld $8,%xmm12 | ||
10843 | |||
10844 | # qhasm: uint32323232 xmm13 >>= 8 | ||
10845 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
10846 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
10847 | psrld $8,%xmm13 | ||
10848 | |||
10849 | # qhasm: uint32323232 xmm14 >>= 8 | ||
10850 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
10851 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
10852 | psrld $8,%xmm14 | ||
10853 | |||
10854 | # qhasm: uint32323232 xmm15 >>= 8 | ||
10855 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
10856 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
10857 | psrld $8,%xmm15 | ||
10858 | |||
10859 | # qhasm: xmm0 ^= xmm8 | ||
10860 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
10861 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
10862 | pxor %xmm8,%xmm0 | ||
10863 | |||
10864 | # qhasm: xmm1 ^= xmm9 | ||
10865 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
10866 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
10867 | pxor %xmm9,%xmm1 | ||
10868 | |||
10869 | # qhasm: xmm2 ^= xmm10 | ||
10870 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
10871 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
10872 | pxor %xmm10,%xmm2 | ||
10873 | |||
10874 | # qhasm: xmm3 ^= xmm11 | ||
10875 | # asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4 | ||
10876 | # asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3 | ||
10877 | pxor %xmm11,%xmm3 | ||
10878 | |||
10879 | # qhasm: xmm4 ^= xmm12 | ||
10880 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5 | ||
10881 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4 | ||
10882 | pxor %xmm12,%xmm4 | ||
10883 | |||
10884 | # qhasm: xmm5 ^= xmm13 | ||
10885 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
10886 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
10887 | pxor %xmm13,%xmm5 | ||
10888 | |||
10889 | # qhasm: xmm6 ^= xmm14 | ||
10890 | # asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7 | ||
10891 | # asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6 | ||
10892 | pxor %xmm14,%xmm6 | ||
10893 | |||
10894 | # qhasm: xmm7 ^= xmm15 | ||
10895 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
10896 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
10897 | pxor %xmm15,%xmm7 | ||
10898 | |||
10899 | # qhasm: uint32323232 xmm8 >>= 8 | ||
10900 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
10901 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
10902 | psrld $8,%xmm8 | ||
10903 | |||
10904 | # qhasm: uint32323232 xmm9 >>= 8 | ||
10905 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
10906 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
10907 | psrld $8,%xmm9 | ||
10908 | |||
10909 | # qhasm: uint32323232 xmm10 >>= 8 | ||
10910 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
10911 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
10912 | psrld $8,%xmm10 | ||
10913 | |||
10914 | # qhasm: uint32323232 xmm11 >>= 8 | ||
10915 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
10916 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
10917 | psrld $8,%xmm11 | ||
10918 | |||
10919 | # qhasm: uint32323232 xmm12 >>= 8 | ||
10920 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
10921 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
10922 | psrld $8,%xmm12 | ||
10923 | |||
10924 | # qhasm: uint32323232 xmm13 >>= 8 | ||
10925 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
10926 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
10927 | psrld $8,%xmm13 | ||
10928 | |||
10929 | # qhasm: uint32323232 xmm14 >>= 8 | ||
10930 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
10931 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
10932 | psrld $8,%xmm14 | ||
10933 | |||
10934 | # qhasm: uint32323232 xmm15 >>= 8 | ||
10935 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
10936 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
10937 | psrld $8,%xmm15 | ||
10938 | |||
10939 | # qhasm: xmm0 ^= xmm8 | ||
10940 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
10941 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
10942 | pxor %xmm8,%xmm0 | ||
10943 | |||
10944 | # qhasm: xmm1 ^= xmm9 | ||
10945 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
10946 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
10947 | pxor %xmm9,%xmm1 | ||
10948 | |||
10949 | # qhasm: xmm2 ^= xmm10 | ||
10950 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
10951 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
10952 | pxor %xmm10,%xmm2 | ||
10953 | |||
10954 | # qhasm: xmm3 ^= xmm11 | ||
10955 | # asm 1: pxor <xmm11=int6464#12,<xmm3=int6464#4 | ||
10956 | # asm 2: pxor <xmm11=%xmm11,<xmm3=%xmm3 | ||
10957 | pxor %xmm11,%xmm3 | ||
10958 | |||
10959 | # qhasm: xmm4 ^= xmm12 | ||
10960 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#5 | ||
10961 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm4 | ||
10962 | pxor %xmm12,%xmm4 | ||
10963 | |||
10964 | # qhasm: xmm5 ^= xmm13 | ||
10965 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
10966 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
10967 | pxor %xmm13,%xmm5 | ||
10968 | |||
10969 | # qhasm: xmm6 ^= xmm14 | ||
10970 | # asm 1: pxor <xmm14=int6464#15,<xmm6=int6464#7 | ||
10971 | # asm 2: pxor <xmm14=%xmm14,<xmm6=%xmm6 | ||
10972 | pxor %xmm14,%xmm6 | ||
10973 | |||
10974 | # qhasm: xmm7 ^= xmm15 | ||
10975 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
10976 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
10977 | pxor %xmm15,%xmm7 | ||
10978 | |||
10979 | # qhasm: *(int128 *)(c + 1024) = xmm0 | ||
10980 | # asm 1: movdqa <xmm0=int6464#1,1024(<c=int64#1) | ||
10981 | # asm 2: movdqa <xmm0=%xmm0,1024(<c=%rdi) | ||
10982 | movdqa %xmm0,1024(%rdi) | ||
10983 | |||
10984 | # qhasm: *(int128 *)(c + 1040) = xmm1 | ||
10985 | # asm 1: movdqa <xmm1=int6464#2,1040(<c=int64#1) | ||
10986 | # asm 2: movdqa <xmm1=%xmm1,1040(<c=%rdi) | ||
10987 | movdqa %xmm1,1040(%rdi) | ||
10988 | |||
10989 | # qhasm: *(int128 *)(c + 1056) = xmm2 | ||
10990 | # asm 1: movdqa <xmm2=int6464#3,1056(<c=int64#1) | ||
10991 | # asm 2: movdqa <xmm2=%xmm2,1056(<c=%rdi) | ||
10992 | movdqa %xmm2,1056(%rdi) | ||
10993 | |||
10994 | # qhasm: *(int128 *)(c + 1072) = xmm3 | ||
10995 | # asm 1: movdqa <xmm3=int6464#4,1072(<c=int64#1) | ||
10996 | # asm 2: movdqa <xmm3=%xmm3,1072(<c=%rdi) | ||
10997 | movdqa %xmm3,1072(%rdi) | ||
10998 | |||
10999 | # qhasm: *(int128 *)(c + 1088) = xmm4 | ||
11000 | # asm 1: movdqa <xmm4=int6464#5,1088(<c=int64#1) | ||
11001 | # asm 2: movdqa <xmm4=%xmm4,1088(<c=%rdi) | ||
11002 | movdqa %xmm4,1088(%rdi) | ||
11003 | |||
11004 | # qhasm: *(int128 *)(c + 1104) = xmm5 | ||
11005 | # asm 1: movdqa <xmm5=int6464#6,1104(<c=int64#1) | ||
11006 | # asm 2: movdqa <xmm5=%xmm5,1104(<c=%rdi) | ||
11007 | movdqa %xmm5,1104(%rdi) | ||
11008 | |||
11009 | # qhasm: *(int128 *)(c + 1120) = xmm6 | ||
11010 | # asm 1: movdqa <xmm6=int6464#7,1120(<c=int64#1) | ||
11011 | # asm 2: movdqa <xmm6=%xmm6,1120(<c=%rdi) | ||
11012 | movdqa %xmm6,1120(%rdi) | ||
11013 | |||
11014 | # qhasm: *(int128 *)(c + 1136) = xmm7 | ||
11015 | # asm 1: movdqa <xmm7=int6464#8,1136(<c=int64#1) | ||
11016 | # asm 2: movdqa <xmm7=%xmm7,1136(<c=%rdi) | ||
11017 | movdqa %xmm7,1136(%rdi) | ||
11018 | |||
11019 | # qhasm: xmm0 ^= ONE | ||
11020 | # asm 1: pxor ONE,<xmm0=int6464#1 | ||
11021 | # asm 2: pxor ONE,<xmm0=%xmm0 | ||
11022 | pxor ONE,%xmm0 | ||
11023 | |||
11024 | # qhasm: xmm1 ^= ONE | ||
11025 | # asm 1: pxor ONE,<xmm1=int6464#2 | ||
11026 | # asm 2: pxor ONE,<xmm1=%xmm1 | ||
11027 | pxor ONE,%xmm1 | ||
11028 | |||
11029 | # qhasm: xmm5 ^= ONE | ||
11030 | # asm 1: pxor ONE,<xmm5=int6464#6 | ||
11031 | # asm 2: pxor ONE,<xmm5=%xmm5 | ||
11032 | pxor ONE,%xmm5 | ||
11033 | |||
11034 | # qhasm: xmm6 ^= ONE | ||
11035 | # asm 1: pxor ONE,<xmm6=int6464#7 | ||
11036 | # asm 2: pxor ONE,<xmm6=%xmm6 | ||
11037 | pxor ONE,%xmm6 | ||
11038 | |||
11039 | # qhasm: shuffle bytes of xmm0 by ROTB | ||
11040 | # asm 1: pshufb ROTB,<xmm0=int6464#1 | ||
11041 | # asm 2: pshufb ROTB,<xmm0=%xmm0 | ||
11042 | pshufb ROTB,%xmm0 | ||
11043 | |||
11044 | # qhasm: shuffle bytes of xmm1 by ROTB | ||
11045 | # asm 1: pshufb ROTB,<xmm1=int6464#2 | ||
11046 | # asm 2: pshufb ROTB,<xmm1=%xmm1 | ||
11047 | pshufb ROTB,%xmm1 | ||
11048 | |||
11049 | # qhasm: shuffle bytes of xmm2 by ROTB | ||
11050 | # asm 1: pshufb ROTB,<xmm2=int6464#3 | ||
11051 | # asm 2: pshufb ROTB,<xmm2=%xmm2 | ||
11052 | pshufb ROTB,%xmm2 | ||
11053 | |||
11054 | # qhasm: shuffle bytes of xmm3 by ROTB | ||
11055 | # asm 1: pshufb ROTB,<xmm3=int6464#4 | ||
11056 | # asm 2: pshufb ROTB,<xmm3=%xmm3 | ||
11057 | pshufb ROTB,%xmm3 | ||
11058 | |||
11059 | # qhasm: shuffle bytes of xmm4 by ROTB | ||
11060 | # asm 1: pshufb ROTB,<xmm4=int6464#5 | ||
11061 | # asm 2: pshufb ROTB,<xmm4=%xmm4 | ||
11062 | pshufb ROTB,%xmm4 | ||
11063 | |||
11064 | # qhasm: shuffle bytes of xmm5 by ROTB | ||
11065 | # asm 1: pshufb ROTB,<xmm5=int6464#6 | ||
11066 | # asm 2: pshufb ROTB,<xmm5=%xmm5 | ||
11067 | pshufb ROTB,%xmm5 | ||
11068 | |||
11069 | # qhasm: shuffle bytes of xmm6 by ROTB | ||
11070 | # asm 1: pshufb ROTB,<xmm6=int6464#7 | ||
11071 | # asm 2: pshufb ROTB,<xmm6=%xmm6 | ||
11072 | pshufb ROTB,%xmm6 | ||
11073 | |||
11074 | # qhasm: shuffle bytes of xmm7 by ROTB | ||
11075 | # asm 1: pshufb ROTB,<xmm7=int6464#8 | ||
11076 | # asm 2: pshufb ROTB,<xmm7=%xmm7 | ||
11077 | pshufb ROTB,%xmm7 | ||
11078 | |||
11079 | # qhasm: xmm5 ^= xmm6 | ||
11080 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
11081 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
11082 | pxor %xmm6,%xmm5 | ||
11083 | |||
11084 | # qhasm: xmm2 ^= xmm1 | ||
11085 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
11086 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
11087 | pxor %xmm1,%xmm2 | ||
11088 | |||
11089 | # qhasm: xmm5 ^= xmm0 | ||
11090 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
11091 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
11092 | pxor %xmm0,%xmm5 | ||
11093 | |||
11094 | # qhasm: xmm6 ^= xmm2 | ||
11095 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
11096 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
11097 | pxor %xmm2,%xmm6 | ||
11098 | |||
11099 | # qhasm: xmm3 ^= xmm0 | ||
11100 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
11101 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
11102 | pxor %xmm0,%xmm3 | ||
11103 | |||
11104 | # qhasm: xmm6 ^= xmm3 | ||
11105 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
11106 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
11107 | pxor %xmm3,%xmm6 | ||
11108 | |||
11109 | # qhasm: xmm3 ^= xmm7 | ||
11110 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
11111 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
11112 | pxor %xmm7,%xmm3 | ||
11113 | |||
11114 | # qhasm: xmm3 ^= xmm4 | ||
11115 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
11116 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
11117 | pxor %xmm4,%xmm3 | ||
11118 | |||
11119 | # qhasm: xmm7 ^= xmm5 | ||
11120 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
11121 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
11122 | pxor %xmm5,%xmm7 | ||
11123 | |||
11124 | # qhasm: xmm3 ^= xmm1 | ||
11125 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
11126 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
11127 | pxor %xmm1,%xmm3 | ||
11128 | |||
11129 | # qhasm: xmm4 ^= xmm5 | ||
11130 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
11131 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
11132 | pxor %xmm5,%xmm4 | ||
11133 | |||
11134 | # qhasm: xmm2 ^= xmm7 | ||
11135 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
11136 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
11137 | pxor %xmm7,%xmm2 | ||
11138 | |||
11139 | # qhasm: xmm1 ^= xmm5 | ||
11140 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
11141 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
11142 | pxor %xmm5,%xmm1 | ||
11143 | |||
11144 | # qhasm: xmm11 = xmm7 | ||
11145 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
11146 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
11147 | movdqa %xmm7,%xmm8 | ||
11148 | |||
11149 | # qhasm: xmm10 = xmm1 | ||
11150 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
11151 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
11152 | movdqa %xmm1,%xmm9 | ||
11153 | |||
11154 | # qhasm: xmm9 = xmm5 | ||
11155 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
11156 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
11157 | movdqa %xmm5,%xmm10 | ||
11158 | |||
11159 | # qhasm: xmm13 = xmm2 | ||
11160 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
11161 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
11162 | movdqa %xmm2,%xmm11 | ||
11163 | |||
11164 | # qhasm: xmm12 = xmm6 | ||
11165 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
11166 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
11167 | movdqa %xmm6,%xmm12 | ||
11168 | |||
11169 | # qhasm: xmm11 ^= xmm4 | ||
11170 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
11171 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
11172 | pxor %xmm4,%xmm8 | ||
11173 | |||
11174 | # qhasm: xmm10 ^= xmm2 | ||
11175 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
11176 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
11177 | pxor %xmm2,%xmm9 | ||
11178 | |||
11179 | # qhasm: xmm9 ^= xmm3 | ||
11180 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
11181 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
11182 | pxor %xmm3,%xmm10 | ||
11183 | |||
11184 | # qhasm: xmm13 ^= xmm4 | ||
11185 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
11186 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
11187 | pxor %xmm4,%xmm11 | ||
11188 | |||
11189 | # qhasm: xmm12 ^= xmm0 | ||
11190 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
11191 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
11192 | pxor %xmm0,%xmm12 | ||
11193 | |||
11194 | # qhasm: xmm14 = xmm11 | ||
11195 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
11196 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
11197 | movdqa %xmm8,%xmm13 | ||
11198 | |||
11199 | # qhasm: xmm8 = xmm10 | ||
11200 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
11201 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
11202 | movdqa %xmm9,%xmm14 | ||
11203 | |||
11204 | # qhasm: xmm15 = xmm11 | ||
11205 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
11206 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
11207 | movdqa %xmm8,%xmm15 | ||
11208 | |||
11209 | # qhasm: xmm10 |= xmm9 | ||
11210 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
11211 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
11212 | por %xmm10,%xmm9 | ||
11213 | |||
11214 | # qhasm: xmm11 |= xmm12 | ||
11215 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
11216 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
11217 | por %xmm12,%xmm8 | ||
11218 | |||
11219 | # qhasm: xmm15 ^= xmm8 | ||
11220 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
11221 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
11222 | pxor %xmm14,%xmm15 | ||
11223 | |||
11224 | # qhasm: xmm14 &= xmm12 | ||
11225 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
11226 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
11227 | pand %xmm12,%xmm13 | ||
11228 | |||
11229 | # qhasm: xmm8 &= xmm9 | ||
11230 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
11231 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
11232 | pand %xmm10,%xmm14 | ||
11233 | |||
11234 | # qhasm: xmm12 ^= xmm9 | ||
11235 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
11236 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
11237 | pxor %xmm10,%xmm12 | ||
11238 | |||
11239 | # qhasm: xmm15 &= xmm12 | ||
11240 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
11241 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
11242 | pand %xmm12,%xmm15 | ||
11243 | |||
11244 | # qhasm: xmm12 = xmm3 | ||
11245 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
11246 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
11247 | movdqa %xmm3,%xmm10 | ||
11248 | |||
11249 | # qhasm: xmm12 ^= xmm0 | ||
11250 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
11251 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
11252 | pxor %xmm0,%xmm10 | ||
11253 | |||
11254 | # qhasm: xmm13 &= xmm12 | ||
11255 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
11256 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
11257 | pand %xmm10,%xmm11 | ||
11258 | |||
11259 | # qhasm: xmm11 ^= xmm13 | ||
11260 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
11261 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
11262 | pxor %xmm11,%xmm8 | ||
11263 | |||
11264 | # qhasm: xmm10 ^= xmm13 | ||
11265 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
11266 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
11267 | pxor %xmm11,%xmm9 | ||
11268 | |||
11269 | # qhasm: xmm13 = xmm7 | ||
11270 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
11271 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
11272 | movdqa %xmm7,%xmm10 | ||
11273 | |||
11274 | # qhasm: xmm13 ^= xmm1 | ||
11275 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
11276 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
11277 | pxor %xmm1,%xmm10 | ||
11278 | |||
11279 | # qhasm: xmm12 = xmm5 | ||
11280 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
11281 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
11282 | movdqa %xmm5,%xmm11 | ||
11283 | |||
11284 | # qhasm: xmm9 = xmm13 | ||
11285 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
11286 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
11287 | movdqa %xmm10,%xmm12 | ||
11288 | |||
11289 | # qhasm: xmm12 ^= xmm6 | ||
11290 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
11291 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
11292 | pxor %xmm6,%xmm11 | ||
11293 | |||
11294 | # qhasm: xmm9 |= xmm12 | ||
11295 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
11296 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
11297 | por %xmm11,%xmm12 | ||
11298 | |||
11299 | # qhasm: xmm13 &= xmm12 | ||
11300 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
11301 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
11302 | pand %xmm11,%xmm10 | ||
11303 | |||
11304 | # qhasm: xmm8 ^= xmm13 | ||
11305 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
11306 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
11307 | pxor %xmm10,%xmm14 | ||
11308 | |||
11309 | # qhasm: xmm11 ^= xmm15 | ||
11310 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
11311 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
11312 | pxor %xmm15,%xmm8 | ||
11313 | |||
11314 | # qhasm: xmm10 ^= xmm14 | ||
11315 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
11316 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
11317 | pxor %xmm13,%xmm9 | ||
11318 | |||
11319 | # qhasm: xmm9 ^= xmm15 | ||
11320 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
11321 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
11322 | pxor %xmm15,%xmm12 | ||
11323 | |||
11324 | # qhasm: xmm8 ^= xmm14 | ||
11325 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
11326 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
11327 | pxor %xmm13,%xmm14 | ||
11328 | |||
11329 | # qhasm: xmm9 ^= xmm14 | ||
11330 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
11331 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
11332 | pxor %xmm13,%xmm12 | ||
11333 | |||
11334 | # qhasm: xmm12 = xmm2 | ||
11335 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
11336 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
11337 | movdqa %xmm2,%xmm10 | ||
11338 | |||
11339 | # qhasm: xmm13 = xmm4 | ||
11340 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
11341 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
11342 | movdqa %xmm4,%xmm11 | ||
11343 | |||
11344 | # qhasm: xmm14 = xmm1 | ||
11345 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
11346 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
11347 | movdqa %xmm1,%xmm13 | ||
11348 | |||
11349 | # qhasm: xmm15 = xmm7 | ||
11350 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
11351 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
11352 | movdqa %xmm7,%xmm15 | ||
11353 | |||
11354 | # qhasm: xmm12 &= xmm3 | ||
11355 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
11356 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
11357 | pand %xmm3,%xmm10 | ||
11358 | |||
11359 | # qhasm: xmm13 &= xmm0 | ||
11360 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
11361 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
11362 | pand %xmm0,%xmm11 | ||
11363 | |||
11364 | # qhasm: xmm14 &= xmm5 | ||
11365 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
11366 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
11367 | pand %xmm5,%xmm13 | ||
11368 | |||
11369 | # qhasm: xmm15 |= xmm6 | ||
11370 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
11371 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
11372 | por %xmm6,%xmm15 | ||
11373 | |||
11374 | # qhasm: xmm11 ^= xmm12 | ||
11375 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
11376 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
11377 | pxor %xmm10,%xmm8 | ||
11378 | |||
11379 | # qhasm: xmm10 ^= xmm13 | ||
11380 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
11381 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
11382 | pxor %xmm11,%xmm9 | ||
11383 | |||
11384 | # qhasm: xmm9 ^= xmm14 | ||
11385 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
11386 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
11387 | pxor %xmm13,%xmm12 | ||
11388 | |||
11389 | # qhasm: xmm8 ^= xmm15 | ||
11390 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
11391 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
11392 | pxor %xmm15,%xmm14 | ||
11393 | |||
11394 | # qhasm: xmm12 = xmm11 | ||
11395 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
11396 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
11397 | movdqa %xmm8,%xmm10 | ||
11398 | |||
11399 | # qhasm: xmm12 ^= xmm10 | ||
11400 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
11401 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
11402 | pxor %xmm9,%xmm10 | ||
11403 | |||
11404 | # qhasm: xmm11 &= xmm9 | ||
11405 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
11406 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
11407 | pand %xmm12,%xmm8 | ||
11408 | |||
11409 | # qhasm: xmm14 = xmm8 | ||
11410 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
11411 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
11412 | movdqa %xmm14,%xmm11 | ||
11413 | |||
11414 | # qhasm: xmm14 ^= xmm11 | ||
11415 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
11416 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
11417 | pxor %xmm8,%xmm11 | ||
11418 | |||
11419 | # qhasm: xmm15 = xmm12 | ||
11420 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
11421 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
11422 | movdqa %xmm10,%xmm13 | ||
11423 | |||
11424 | # qhasm: xmm15 &= xmm14 | ||
11425 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
11426 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
11427 | pand %xmm11,%xmm13 | ||
11428 | |||
11429 | # qhasm: xmm15 ^= xmm10 | ||
11430 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
11431 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
11432 | pxor %xmm9,%xmm13 | ||
11433 | |||
11434 | # qhasm: xmm13 = xmm9 | ||
11435 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
11436 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
11437 | movdqa %xmm12,%xmm15 | ||
11438 | |||
11439 | # qhasm: xmm13 ^= xmm8 | ||
11440 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
11441 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
11442 | pxor %xmm14,%xmm15 | ||
11443 | |||
11444 | # qhasm: xmm11 ^= xmm10 | ||
11445 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
11446 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
11447 | pxor %xmm9,%xmm8 | ||
11448 | |||
11449 | # qhasm: xmm13 &= xmm11 | ||
11450 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
11451 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
11452 | pand %xmm8,%xmm15 | ||
11453 | |||
11454 | # qhasm: xmm13 ^= xmm8 | ||
11455 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
11456 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
11457 | pxor %xmm14,%xmm15 | ||
11458 | |||
11459 | # qhasm: xmm9 ^= xmm13 | ||
11460 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
11461 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
11462 | pxor %xmm15,%xmm12 | ||
11463 | |||
11464 | # qhasm: xmm10 = xmm14 | ||
11465 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
11466 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
11467 | movdqa %xmm11,%xmm8 | ||
11468 | |||
11469 | # qhasm: xmm10 ^= xmm13 | ||
11470 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
11471 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
11472 | pxor %xmm15,%xmm8 | ||
11473 | |||
11474 | # qhasm: xmm10 &= xmm8 | ||
11475 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
11476 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
11477 | pand %xmm14,%xmm8 | ||
11478 | |||
11479 | # qhasm: xmm9 ^= xmm10 | ||
11480 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
11481 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
11482 | pxor %xmm8,%xmm12 | ||
11483 | |||
11484 | # qhasm: xmm14 ^= xmm10 | ||
11485 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
11486 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
11487 | pxor %xmm8,%xmm11 | ||
11488 | |||
11489 | # qhasm: xmm14 &= xmm15 | ||
11490 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
11491 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
11492 | pand %xmm13,%xmm11 | ||
11493 | |||
11494 | # qhasm: xmm14 ^= xmm12 | ||
11495 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
11496 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
11497 | pxor %xmm10,%xmm11 | ||
11498 | |||
11499 | # qhasm: xmm12 = xmm6 | ||
11500 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
11501 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
11502 | movdqa %xmm6,%xmm8 | ||
11503 | |||
11504 | # qhasm: xmm8 = xmm5 | ||
11505 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
11506 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
11507 | movdqa %xmm5,%xmm9 | ||
11508 | |||
11509 | # qhasm: xmm10 = xmm15 | ||
11510 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
11511 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
11512 | movdqa %xmm13,%xmm10 | ||
11513 | |||
11514 | # qhasm: xmm10 ^= xmm14 | ||
11515 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
11516 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
11517 | pxor %xmm11,%xmm10 | ||
11518 | |||
11519 | # qhasm: xmm10 &= xmm6 | ||
11520 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
11521 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
11522 | pand %xmm6,%xmm10 | ||
11523 | |||
11524 | # qhasm: xmm6 ^= xmm5 | ||
11525 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
11526 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
11527 | pxor %xmm5,%xmm6 | ||
11528 | |||
11529 | # qhasm: xmm6 &= xmm14 | ||
11530 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
11531 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
11532 | pand %xmm11,%xmm6 | ||
11533 | |||
11534 | # qhasm: xmm5 &= xmm15 | ||
11535 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
11536 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
11537 | pand %xmm13,%xmm5 | ||
11538 | |||
11539 | # qhasm: xmm6 ^= xmm5 | ||
11540 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
11541 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
11542 | pxor %xmm5,%xmm6 | ||
11543 | |||
11544 | # qhasm: xmm5 ^= xmm10 | ||
11545 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
11546 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
11547 | pxor %xmm10,%xmm5 | ||
11548 | |||
11549 | # qhasm: xmm12 ^= xmm0 | ||
11550 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
11551 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
11552 | pxor %xmm0,%xmm8 | ||
11553 | |||
11554 | # qhasm: xmm8 ^= xmm3 | ||
11555 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
11556 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
11557 | pxor %xmm3,%xmm9 | ||
11558 | |||
11559 | # qhasm: xmm15 ^= xmm13 | ||
11560 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
11561 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
11562 | pxor %xmm15,%xmm13 | ||
11563 | |||
11564 | # qhasm: xmm14 ^= xmm9 | ||
11565 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
11566 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
11567 | pxor %xmm12,%xmm11 | ||
11568 | |||
11569 | # qhasm: xmm11 = xmm15 | ||
11570 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
11571 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
11572 | movdqa %xmm13,%xmm10 | ||
11573 | |||
11574 | # qhasm: xmm11 ^= xmm14 | ||
11575 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
11576 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
11577 | pxor %xmm11,%xmm10 | ||
11578 | |||
11579 | # qhasm: xmm11 &= xmm12 | ||
11580 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
11581 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
11582 | pand %xmm8,%xmm10 | ||
11583 | |||
11584 | # qhasm: xmm12 ^= xmm8 | ||
11585 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
11586 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
11587 | pxor %xmm9,%xmm8 | ||
11588 | |||
11589 | # qhasm: xmm12 &= xmm14 | ||
11590 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
11591 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
11592 | pand %xmm11,%xmm8 | ||
11593 | |||
11594 | # qhasm: xmm8 &= xmm15 | ||
11595 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
11596 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
11597 | pand %xmm13,%xmm9 | ||
11598 | |||
11599 | # qhasm: xmm8 ^= xmm12 | ||
11600 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
11601 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
11602 | pxor %xmm8,%xmm9 | ||
11603 | |||
11604 | # qhasm: xmm12 ^= xmm11 | ||
11605 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
11606 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
11607 | pxor %xmm10,%xmm8 | ||
11608 | |||
11609 | # qhasm: xmm10 = xmm13 | ||
11610 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
11611 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
11612 | movdqa %xmm15,%xmm10 | ||
11613 | |||
11614 | # qhasm: xmm10 ^= xmm9 | ||
11615 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
11616 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
11617 | pxor %xmm12,%xmm10 | ||
11618 | |||
11619 | # qhasm: xmm10 &= xmm0 | ||
11620 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
11621 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
11622 | pand %xmm0,%xmm10 | ||
11623 | |||
11624 | # qhasm: xmm0 ^= xmm3 | ||
11625 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
11626 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
11627 | pxor %xmm3,%xmm0 | ||
11628 | |||
11629 | # qhasm: xmm0 &= xmm9 | ||
11630 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
11631 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
11632 | pand %xmm12,%xmm0 | ||
11633 | |||
11634 | # qhasm: xmm3 &= xmm13 | ||
11635 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
11636 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
11637 | pand %xmm15,%xmm3 | ||
11638 | |||
11639 | # qhasm: xmm0 ^= xmm3 | ||
11640 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
11641 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
11642 | pxor %xmm3,%xmm0 | ||
11643 | |||
11644 | # qhasm: xmm3 ^= xmm10 | ||
11645 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
11646 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
11647 | pxor %xmm10,%xmm3 | ||
11648 | |||
11649 | # qhasm: xmm6 ^= xmm12 | ||
11650 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
11651 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
11652 | pxor %xmm8,%xmm6 | ||
11653 | |||
11654 | # qhasm: xmm0 ^= xmm12 | ||
11655 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
11656 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
11657 | pxor %xmm8,%xmm0 | ||
11658 | |||
11659 | # qhasm: xmm5 ^= xmm8 | ||
11660 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
11661 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
11662 | pxor %xmm9,%xmm5 | ||
11663 | |||
11664 | # qhasm: xmm3 ^= xmm8 | ||
11665 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
11666 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
11667 | pxor %xmm9,%xmm3 | ||
11668 | |||
11669 | # qhasm: xmm12 = xmm7 | ||
11670 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
11671 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
11672 | movdqa %xmm7,%xmm8 | ||
11673 | |||
11674 | # qhasm: xmm8 = xmm1 | ||
11675 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
11676 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
11677 | movdqa %xmm1,%xmm9 | ||
11678 | |||
11679 | # qhasm: xmm12 ^= xmm4 | ||
11680 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
11681 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
11682 | pxor %xmm4,%xmm8 | ||
11683 | |||
11684 | # qhasm: xmm8 ^= xmm2 | ||
11685 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
11686 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
11687 | pxor %xmm2,%xmm9 | ||
11688 | |||
11689 | # qhasm: xmm11 = xmm15 | ||
11690 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
11691 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
11692 | movdqa %xmm13,%xmm10 | ||
11693 | |||
11694 | # qhasm: xmm11 ^= xmm14 | ||
11695 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
11696 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
11697 | pxor %xmm11,%xmm10 | ||
11698 | |||
11699 | # qhasm: xmm11 &= xmm12 | ||
11700 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
11701 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
11702 | pand %xmm8,%xmm10 | ||
11703 | |||
11704 | # qhasm: xmm12 ^= xmm8 | ||
11705 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
11706 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
11707 | pxor %xmm9,%xmm8 | ||
11708 | |||
11709 | # qhasm: xmm12 &= xmm14 | ||
11710 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
11711 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
11712 | pand %xmm11,%xmm8 | ||
11713 | |||
11714 | # qhasm: xmm8 &= xmm15 | ||
11715 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
11716 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
11717 | pand %xmm13,%xmm9 | ||
11718 | |||
11719 | # qhasm: xmm8 ^= xmm12 | ||
11720 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
11721 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
11722 | pxor %xmm8,%xmm9 | ||
11723 | |||
11724 | # qhasm: xmm12 ^= xmm11 | ||
11725 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
11726 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
11727 | pxor %xmm10,%xmm8 | ||
11728 | |||
11729 | # qhasm: xmm10 = xmm13 | ||
11730 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
11731 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
11732 | movdqa %xmm15,%xmm10 | ||
11733 | |||
11734 | # qhasm: xmm10 ^= xmm9 | ||
11735 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
11736 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
11737 | pxor %xmm12,%xmm10 | ||
11738 | |||
11739 | # qhasm: xmm10 &= xmm4 | ||
11740 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
11741 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
11742 | pand %xmm4,%xmm10 | ||
11743 | |||
11744 | # qhasm: xmm4 ^= xmm2 | ||
11745 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
11746 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
11747 | pxor %xmm2,%xmm4 | ||
11748 | |||
11749 | # qhasm: xmm4 &= xmm9 | ||
11750 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
11751 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
11752 | pand %xmm12,%xmm4 | ||
11753 | |||
11754 | # qhasm: xmm2 &= xmm13 | ||
11755 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
11756 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
11757 | pand %xmm15,%xmm2 | ||
11758 | |||
11759 | # qhasm: xmm4 ^= xmm2 | ||
11760 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
11761 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
11762 | pxor %xmm2,%xmm4 | ||
11763 | |||
11764 | # qhasm: xmm2 ^= xmm10 | ||
11765 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
11766 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
11767 | pxor %xmm10,%xmm2 | ||
11768 | |||
11769 | # qhasm: xmm15 ^= xmm13 | ||
11770 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
11771 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
11772 | pxor %xmm15,%xmm13 | ||
11773 | |||
11774 | # qhasm: xmm14 ^= xmm9 | ||
11775 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
11776 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
11777 | pxor %xmm12,%xmm11 | ||
11778 | |||
11779 | # qhasm: xmm11 = xmm15 | ||
11780 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
11781 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
11782 | movdqa %xmm13,%xmm10 | ||
11783 | |||
11784 | # qhasm: xmm11 ^= xmm14 | ||
11785 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
11786 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
11787 | pxor %xmm11,%xmm10 | ||
11788 | |||
11789 | # qhasm: xmm11 &= xmm7 | ||
11790 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
11791 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
11792 | pand %xmm7,%xmm10 | ||
11793 | |||
11794 | # qhasm: xmm7 ^= xmm1 | ||
11795 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
11796 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
11797 | pxor %xmm1,%xmm7 | ||
11798 | |||
11799 | # qhasm: xmm7 &= xmm14 | ||
11800 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
11801 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
11802 | pand %xmm11,%xmm7 | ||
11803 | |||
11804 | # qhasm: xmm1 &= xmm15 | ||
11805 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
11806 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
11807 | pand %xmm13,%xmm1 | ||
11808 | |||
11809 | # qhasm: xmm7 ^= xmm1 | ||
11810 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
11811 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
11812 | pxor %xmm1,%xmm7 | ||
11813 | |||
11814 | # qhasm: xmm1 ^= xmm11 | ||
11815 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
11816 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
11817 | pxor %xmm10,%xmm1 | ||
11818 | |||
11819 | # qhasm: xmm7 ^= xmm12 | ||
11820 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
11821 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
11822 | pxor %xmm8,%xmm7 | ||
11823 | |||
11824 | # qhasm: xmm4 ^= xmm12 | ||
11825 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
11826 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
11827 | pxor %xmm8,%xmm4 | ||
11828 | |||
11829 | # qhasm: xmm1 ^= xmm8 | ||
11830 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
11831 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
11832 | pxor %xmm9,%xmm1 | ||
11833 | |||
11834 | # qhasm: xmm2 ^= xmm8 | ||
11835 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
11836 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
11837 | pxor %xmm9,%xmm2 | ||
11838 | |||
11839 | # qhasm: xmm7 ^= xmm0 | ||
11840 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
11841 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
11842 | pxor %xmm0,%xmm7 | ||
11843 | |||
11844 | # qhasm: xmm1 ^= xmm6 | ||
11845 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
11846 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
11847 | pxor %xmm6,%xmm1 | ||
11848 | |||
11849 | # qhasm: xmm4 ^= xmm7 | ||
11850 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
11851 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
11852 | pxor %xmm7,%xmm4 | ||
11853 | |||
11854 | # qhasm: xmm6 ^= xmm0 | ||
11855 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
11856 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
11857 | pxor %xmm0,%xmm6 | ||
11858 | |||
11859 | # qhasm: xmm0 ^= xmm1 | ||
11860 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
11861 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
11862 | pxor %xmm1,%xmm0 | ||
11863 | |||
11864 | # qhasm: xmm1 ^= xmm5 | ||
11865 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
11866 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
11867 | pxor %xmm5,%xmm1 | ||
11868 | |||
11869 | # qhasm: xmm5 ^= xmm2 | ||
11870 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
11871 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
11872 | pxor %xmm2,%xmm5 | ||
11873 | |||
11874 | # qhasm: xmm4 ^= xmm5 | ||
11875 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
11876 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
11877 | pxor %xmm5,%xmm4 | ||
11878 | |||
11879 | # qhasm: xmm2 ^= xmm3 | ||
11880 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
11881 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
11882 | pxor %xmm3,%xmm2 | ||
11883 | |||
11884 | # qhasm: xmm3 ^= xmm5 | ||
11885 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
11886 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
11887 | pxor %xmm5,%xmm3 | ||
11888 | |||
11889 | # qhasm: xmm6 ^= xmm3 | ||
11890 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
11891 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
11892 | pxor %xmm3,%xmm6 | ||
11893 | |||
11894 | # qhasm: xmm0 ^= RCON | ||
11895 | # asm 1: pxor RCON,<xmm0=int6464#1 | ||
11896 | # asm 2: pxor RCON,<xmm0=%xmm0 | ||
11897 | pxor RCON,%xmm0 | ||
11898 | |||
11899 | # qhasm: xmm1 ^= RCON | ||
11900 | # asm 1: pxor RCON,<xmm1=int6464#2 | ||
11901 | # asm 2: pxor RCON,<xmm1=%xmm1 | ||
11902 | pxor RCON,%xmm1 | ||
11903 | |||
11904 | # qhasm: xmm6 ^= RCON | ||
11905 | # asm 1: pxor RCON,<xmm6=int6464#7 | ||
11906 | # asm 2: pxor RCON,<xmm6=%xmm6 | ||
11907 | pxor RCON,%xmm6 | ||
11908 | |||
11909 | # qhasm: xmm3 ^= RCON | ||
11910 | # asm 1: pxor RCON,<xmm3=int6464#4 | ||
11911 | # asm 2: pxor RCON,<xmm3=%xmm3 | ||
11912 | pxor RCON,%xmm3 | ||
11913 | |||
11914 | # qhasm: shuffle bytes of xmm0 by EXPB0 | ||
11915 | # asm 1: pshufb EXPB0,<xmm0=int6464#1 | ||
11916 | # asm 2: pshufb EXPB0,<xmm0=%xmm0 | ||
11917 | pshufb EXPB0,%xmm0 | ||
11918 | |||
11919 | # qhasm: shuffle bytes of xmm1 by EXPB0 | ||
11920 | # asm 1: pshufb EXPB0,<xmm1=int6464#2 | ||
11921 | # asm 2: pshufb EXPB0,<xmm1=%xmm1 | ||
11922 | pshufb EXPB0,%xmm1 | ||
11923 | |||
11924 | # qhasm: shuffle bytes of xmm4 by EXPB0 | ||
11925 | # asm 1: pshufb EXPB0,<xmm4=int6464#5 | ||
11926 | # asm 2: pshufb EXPB0,<xmm4=%xmm4 | ||
11927 | pshufb EXPB0,%xmm4 | ||
11928 | |||
11929 | # qhasm: shuffle bytes of xmm6 by EXPB0 | ||
11930 | # asm 1: pshufb EXPB0,<xmm6=int6464#7 | ||
11931 | # asm 2: pshufb EXPB0,<xmm6=%xmm6 | ||
11932 | pshufb EXPB0,%xmm6 | ||
11933 | |||
11934 | # qhasm: shuffle bytes of xmm3 by EXPB0 | ||
11935 | # asm 1: pshufb EXPB0,<xmm3=int6464#4 | ||
11936 | # asm 2: pshufb EXPB0,<xmm3=%xmm3 | ||
11937 | pshufb EXPB0,%xmm3 | ||
11938 | |||
11939 | # qhasm: shuffle bytes of xmm7 by EXPB0 | ||
11940 | # asm 1: pshufb EXPB0,<xmm7=int6464#8 | ||
11941 | # asm 2: pshufb EXPB0,<xmm7=%xmm7 | ||
11942 | pshufb EXPB0,%xmm7 | ||
11943 | |||
11944 | # qhasm: shuffle bytes of xmm2 by EXPB0 | ||
11945 | # asm 1: pshufb EXPB0,<xmm2=int6464#3 | ||
11946 | # asm 2: pshufb EXPB0,<xmm2=%xmm2 | ||
11947 | pshufb EXPB0,%xmm2 | ||
11948 | |||
11949 | # qhasm: shuffle bytes of xmm5 by EXPB0 | ||
11950 | # asm 1: pshufb EXPB0,<xmm5=int6464#6 | ||
11951 | # asm 2: pshufb EXPB0,<xmm5=%xmm5 | ||
11952 | pshufb EXPB0,%xmm5 | ||
11953 | |||
11954 | # qhasm: xmm8 = *(int128 *)(c + 1024) | ||
11955 | # asm 1: movdqa 1024(<c=int64#1),>xmm8=int6464#9 | ||
11956 | # asm 2: movdqa 1024(<c=%rdi),>xmm8=%xmm8 | ||
11957 | movdqa 1024(%rdi),%xmm8 | ||
11958 | |||
11959 | # qhasm: xmm9 = *(int128 *)(c + 1040) | ||
11960 | # asm 1: movdqa 1040(<c=int64#1),>xmm9=int6464#10 | ||
11961 | # asm 2: movdqa 1040(<c=%rdi),>xmm9=%xmm9 | ||
11962 | movdqa 1040(%rdi),%xmm9 | ||
11963 | |||
11964 | # qhasm: xmm10 = *(int128 *)(c + 1056) | ||
11965 | # asm 1: movdqa 1056(<c=int64#1),>xmm10=int6464#11 | ||
11966 | # asm 2: movdqa 1056(<c=%rdi),>xmm10=%xmm10 | ||
11967 | movdqa 1056(%rdi),%xmm10 | ||
11968 | |||
11969 | # qhasm: xmm11 = *(int128 *)(c + 1072) | ||
11970 | # asm 1: movdqa 1072(<c=int64#1),>xmm11=int6464#12 | ||
11971 | # asm 2: movdqa 1072(<c=%rdi),>xmm11=%xmm11 | ||
11972 | movdqa 1072(%rdi),%xmm11 | ||
11973 | |||
11974 | # qhasm: xmm12 = *(int128 *)(c + 1088) | ||
11975 | # asm 1: movdqa 1088(<c=int64#1),>xmm12=int6464#13 | ||
11976 | # asm 2: movdqa 1088(<c=%rdi),>xmm12=%xmm12 | ||
11977 | movdqa 1088(%rdi),%xmm12 | ||
11978 | |||
11979 | # qhasm: xmm13 = *(int128 *)(c + 1104) | ||
11980 | # asm 1: movdqa 1104(<c=int64#1),>xmm13=int6464#14 | ||
11981 | # asm 2: movdqa 1104(<c=%rdi),>xmm13=%xmm13 | ||
11982 | movdqa 1104(%rdi),%xmm13 | ||
11983 | |||
11984 | # qhasm: xmm14 = *(int128 *)(c + 1120) | ||
11985 | # asm 1: movdqa 1120(<c=int64#1),>xmm14=int6464#15 | ||
11986 | # asm 2: movdqa 1120(<c=%rdi),>xmm14=%xmm14 | ||
11987 | movdqa 1120(%rdi),%xmm14 | ||
11988 | |||
11989 | # qhasm: xmm15 = *(int128 *)(c + 1136) | ||
11990 | # asm 1: movdqa 1136(<c=int64#1),>xmm15=int6464#16 | ||
11991 | # asm 2: movdqa 1136(<c=%rdi),>xmm15=%xmm15 | ||
11992 | movdqa 1136(%rdi),%xmm15 | ||
11993 | |||
11994 | # qhasm: xmm8 ^= ONE | ||
11995 | # asm 1: pxor ONE,<xmm8=int6464#9 | ||
11996 | # asm 2: pxor ONE,<xmm8=%xmm8 | ||
11997 | pxor ONE,%xmm8 | ||
11998 | |||
11999 | # qhasm: xmm9 ^= ONE | ||
12000 | # asm 1: pxor ONE,<xmm9=int6464#10 | ||
12001 | # asm 2: pxor ONE,<xmm9=%xmm9 | ||
12002 | pxor ONE,%xmm9 | ||
12003 | |||
12004 | # qhasm: xmm13 ^= ONE | ||
12005 | # asm 1: pxor ONE,<xmm13=int6464#14 | ||
12006 | # asm 2: pxor ONE,<xmm13=%xmm13 | ||
12007 | pxor ONE,%xmm13 | ||
12008 | |||
12009 | # qhasm: xmm14 ^= ONE | ||
12010 | # asm 1: pxor ONE,<xmm14=int6464#15 | ||
12011 | # asm 2: pxor ONE,<xmm14=%xmm14 | ||
12012 | pxor ONE,%xmm14 | ||
12013 | |||
12014 | # qhasm: xmm0 ^= xmm8 | ||
12015 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
12016 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
12017 | pxor %xmm8,%xmm0 | ||
12018 | |||
12019 | # qhasm: xmm1 ^= xmm9 | ||
12020 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
12021 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
12022 | pxor %xmm9,%xmm1 | ||
12023 | |||
12024 | # qhasm: xmm4 ^= xmm10 | ||
12025 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
12026 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
12027 | pxor %xmm10,%xmm4 | ||
12028 | |||
12029 | # qhasm: xmm6 ^= xmm11 | ||
12030 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
12031 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
12032 | pxor %xmm11,%xmm6 | ||
12033 | |||
12034 | # qhasm: xmm3 ^= xmm12 | ||
12035 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
12036 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
12037 | pxor %xmm12,%xmm3 | ||
12038 | |||
12039 | # qhasm: xmm7 ^= xmm13 | ||
12040 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
12041 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
12042 | pxor %xmm13,%xmm7 | ||
12043 | |||
12044 | # qhasm: xmm2 ^= xmm14 | ||
12045 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
12046 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
12047 | pxor %xmm14,%xmm2 | ||
12048 | |||
12049 | # qhasm: xmm5 ^= xmm15 | ||
12050 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
12051 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
12052 | pxor %xmm15,%xmm5 | ||
12053 | |||
12054 | # qhasm: uint32323232 xmm8 >>= 8 | ||
12055 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
12056 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
12057 | psrld $8,%xmm8 | ||
12058 | |||
12059 | # qhasm: uint32323232 xmm9 >>= 8 | ||
12060 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
12061 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
12062 | psrld $8,%xmm9 | ||
12063 | |||
12064 | # qhasm: uint32323232 xmm10 >>= 8 | ||
12065 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
12066 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
12067 | psrld $8,%xmm10 | ||
12068 | |||
12069 | # qhasm: uint32323232 xmm11 >>= 8 | ||
12070 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
12071 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
12072 | psrld $8,%xmm11 | ||
12073 | |||
12074 | # qhasm: uint32323232 xmm12 >>= 8 | ||
12075 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
12076 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
12077 | psrld $8,%xmm12 | ||
12078 | |||
12079 | # qhasm: uint32323232 xmm13 >>= 8 | ||
12080 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
12081 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
12082 | psrld $8,%xmm13 | ||
12083 | |||
12084 | # qhasm: uint32323232 xmm14 >>= 8 | ||
12085 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
12086 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
12087 | psrld $8,%xmm14 | ||
12088 | |||
12089 | # qhasm: uint32323232 xmm15 >>= 8 | ||
12090 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
12091 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
12092 | psrld $8,%xmm15 | ||
12093 | |||
12094 | # qhasm: xmm0 ^= xmm8 | ||
12095 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
12096 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
12097 | pxor %xmm8,%xmm0 | ||
12098 | |||
12099 | # qhasm: xmm1 ^= xmm9 | ||
12100 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
12101 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
12102 | pxor %xmm9,%xmm1 | ||
12103 | |||
12104 | # qhasm: xmm4 ^= xmm10 | ||
12105 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
12106 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
12107 | pxor %xmm10,%xmm4 | ||
12108 | |||
12109 | # qhasm: xmm6 ^= xmm11 | ||
12110 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
12111 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
12112 | pxor %xmm11,%xmm6 | ||
12113 | |||
12114 | # qhasm: xmm3 ^= xmm12 | ||
12115 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
12116 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
12117 | pxor %xmm12,%xmm3 | ||
12118 | |||
12119 | # qhasm: xmm7 ^= xmm13 | ||
12120 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
12121 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
12122 | pxor %xmm13,%xmm7 | ||
12123 | |||
12124 | # qhasm: xmm2 ^= xmm14 | ||
12125 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
12126 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
12127 | pxor %xmm14,%xmm2 | ||
12128 | |||
12129 | # qhasm: xmm5 ^= xmm15 | ||
12130 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
12131 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
12132 | pxor %xmm15,%xmm5 | ||
12133 | |||
12134 | # qhasm: uint32323232 xmm8 >>= 8 | ||
12135 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
12136 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
12137 | psrld $8,%xmm8 | ||
12138 | |||
12139 | # qhasm: uint32323232 xmm9 >>= 8 | ||
12140 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
12141 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
12142 | psrld $8,%xmm9 | ||
12143 | |||
12144 | # qhasm: uint32323232 xmm10 >>= 8 | ||
12145 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
12146 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
12147 | psrld $8,%xmm10 | ||
12148 | |||
12149 | # qhasm: uint32323232 xmm11 >>= 8 | ||
12150 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
12151 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
12152 | psrld $8,%xmm11 | ||
12153 | |||
12154 | # qhasm: uint32323232 xmm12 >>= 8 | ||
12155 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
12156 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
12157 | psrld $8,%xmm12 | ||
12158 | |||
12159 | # qhasm: uint32323232 xmm13 >>= 8 | ||
12160 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
12161 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
12162 | psrld $8,%xmm13 | ||
12163 | |||
12164 | # qhasm: uint32323232 xmm14 >>= 8 | ||
12165 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
12166 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
12167 | psrld $8,%xmm14 | ||
12168 | |||
12169 | # qhasm: uint32323232 xmm15 >>= 8 | ||
12170 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
12171 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
12172 | psrld $8,%xmm15 | ||
12173 | |||
12174 | # qhasm: xmm0 ^= xmm8 | ||
12175 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
12176 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
12177 | pxor %xmm8,%xmm0 | ||
12178 | |||
12179 | # qhasm: xmm1 ^= xmm9 | ||
12180 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
12181 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
12182 | pxor %xmm9,%xmm1 | ||
12183 | |||
12184 | # qhasm: xmm4 ^= xmm10 | ||
12185 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
12186 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
12187 | pxor %xmm10,%xmm4 | ||
12188 | |||
12189 | # qhasm: xmm6 ^= xmm11 | ||
12190 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
12191 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
12192 | pxor %xmm11,%xmm6 | ||
12193 | |||
12194 | # qhasm: xmm3 ^= xmm12 | ||
12195 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
12196 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
12197 | pxor %xmm12,%xmm3 | ||
12198 | |||
12199 | # qhasm: xmm7 ^= xmm13 | ||
12200 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
12201 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
12202 | pxor %xmm13,%xmm7 | ||
12203 | |||
12204 | # qhasm: xmm2 ^= xmm14 | ||
12205 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
12206 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
12207 | pxor %xmm14,%xmm2 | ||
12208 | |||
12209 | # qhasm: xmm5 ^= xmm15 | ||
12210 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
12211 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
12212 | pxor %xmm15,%xmm5 | ||
12213 | |||
12214 | # qhasm: uint32323232 xmm8 >>= 8 | ||
12215 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
12216 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
12217 | psrld $8,%xmm8 | ||
12218 | |||
12219 | # qhasm: uint32323232 xmm9 >>= 8 | ||
12220 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
12221 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
12222 | psrld $8,%xmm9 | ||
12223 | |||
12224 | # qhasm: uint32323232 xmm10 >>= 8 | ||
12225 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
12226 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
12227 | psrld $8,%xmm10 | ||
12228 | |||
12229 | # qhasm: uint32323232 xmm11 >>= 8 | ||
12230 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
12231 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
12232 | psrld $8,%xmm11 | ||
12233 | |||
12234 | # qhasm: uint32323232 xmm12 >>= 8 | ||
12235 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
12236 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
12237 | psrld $8,%xmm12 | ||
12238 | |||
12239 | # qhasm: uint32323232 xmm13 >>= 8 | ||
12240 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
12241 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
12242 | psrld $8,%xmm13 | ||
12243 | |||
12244 | # qhasm: uint32323232 xmm14 >>= 8 | ||
12245 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
12246 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
12247 | psrld $8,%xmm14 | ||
12248 | |||
12249 | # qhasm: uint32323232 xmm15 >>= 8 | ||
12250 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
12251 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
12252 | psrld $8,%xmm15 | ||
12253 | |||
12254 | # qhasm: xmm0 ^= xmm8 | ||
12255 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
12256 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
12257 | pxor %xmm8,%xmm0 | ||
12258 | |||
12259 | # qhasm: xmm1 ^= xmm9 | ||
12260 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
12261 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
12262 | pxor %xmm9,%xmm1 | ||
12263 | |||
12264 | # qhasm: xmm4 ^= xmm10 | ||
12265 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
12266 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
12267 | pxor %xmm10,%xmm4 | ||
12268 | |||
12269 | # qhasm: xmm6 ^= xmm11 | ||
12270 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
12271 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
12272 | pxor %xmm11,%xmm6 | ||
12273 | |||
12274 | # qhasm: xmm3 ^= xmm12 | ||
12275 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
12276 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
12277 | pxor %xmm12,%xmm3 | ||
12278 | |||
12279 | # qhasm: xmm7 ^= xmm13 | ||
12280 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
12281 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
12282 | pxor %xmm13,%xmm7 | ||
12283 | |||
12284 | # qhasm: xmm2 ^= xmm14 | ||
12285 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
12286 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
12287 | pxor %xmm14,%xmm2 | ||
12288 | |||
12289 | # qhasm: xmm5 ^= xmm15 | ||
12290 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
12291 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
12292 | pxor %xmm15,%xmm5 | ||
12293 | |||
12294 | # qhasm: *(int128 *)(c + 1152) = xmm0 | ||
12295 | # asm 1: movdqa <xmm0=int6464#1,1152(<c=int64#1) | ||
12296 | # asm 2: movdqa <xmm0=%xmm0,1152(<c=%rdi) | ||
12297 | movdqa %xmm0,1152(%rdi) | ||
12298 | |||
12299 | # qhasm: *(int128 *)(c + 1168) = xmm1 | ||
12300 | # asm 1: movdqa <xmm1=int6464#2,1168(<c=int64#1) | ||
12301 | # asm 2: movdqa <xmm1=%xmm1,1168(<c=%rdi) | ||
12302 | movdqa %xmm1,1168(%rdi) | ||
12303 | |||
12304 | # qhasm: *(int128 *)(c + 1184) = xmm4 | ||
12305 | # asm 1: movdqa <xmm4=int6464#5,1184(<c=int64#1) | ||
12306 | # asm 2: movdqa <xmm4=%xmm4,1184(<c=%rdi) | ||
12307 | movdqa %xmm4,1184(%rdi) | ||
12308 | |||
12309 | # qhasm: *(int128 *)(c + 1200) = xmm6 | ||
12310 | # asm 1: movdqa <xmm6=int6464#7,1200(<c=int64#1) | ||
12311 | # asm 2: movdqa <xmm6=%xmm6,1200(<c=%rdi) | ||
12312 | movdqa %xmm6,1200(%rdi) | ||
12313 | |||
12314 | # qhasm: *(int128 *)(c + 1216) = xmm3 | ||
12315 | # asm 1: movdqa <xmm3=int6464#4,1216(<c=int64#1) | ||
12316 | # asm 2: movdqa <xmm3=%xmm3,1216(<c=%rdi) | ||
12317 | movdqa %xmm3,1216(%rdi) | ||
12318 | |||
12319 | # qhasm: *(int128 *)(c + 1232) = xmm7 | ||
12320 | # asm 1: movdqa <xmm7=int6464#8,1232(<c=int64#1) | ||
12321 | # asm 2: movdqa <xmm7=%xmm7,1232(<c=%rdi) | ||
12322 | movdqa %xmm7,1232(%rdi) | ||
12323 | |||
12324 | # qhasm: *(int128 *)(c + 1248) = xmm2 | ||
12325 | # asm 1: movdqa <xmm2=int6464#3,1248(<c=int64#1) | ||
12326 | # asm 2: movdqa <xmm2=%xmm2,1248(<c=%rdi) | ||
12327 | movdqa %xmm2,1248(%rdi) | ||
12328 | |||
12329 | # qhasm: *(int128 *)(c + 1264) = xmm5 | ||
12330 | # asm 1: movdqa <xmm5=int6464#6,1264(<c=int64#1) | ||
12331 | # asm 2: movdqa <xmm5=%xmm5,1264(<c=%rdi) | ||
12332 | movdqa %xmm5,1264(%rdi) | ||
12333 | |||
12334 | # qhasm: xmm0 ^= ONE | ||
12335 | # asm 1: pxor ONE,<xmm0=int6464#1 | ||
12336 | # asm 2: pxor ONE,<xmm0=%xmm0 | ||
12337 | pxor ONE,%xmm0 | ||
12338 | |||
12339 | # qhasm: xmm1 ^= ONE | ||
12340 | # asm 1: pxor ONE,<xmm1=int6464#2 | ||
12341 | # asm 2: pxor ONE,<xmm1=%xmm1 | ||
12342 | pxor ONE,%xmm1 | ||
12343 | |||
12344 | # qhasm: xmm7 ^= ONE | ||
12345 | # asm 1: pxor ONE,<xmm7=int6464#8 | ||
12346 | # asm 2: pxor ONE,<xmm7=%xmm7 | ||
12347 | pxor ONE,%xmm7 | ||
12348 | |||
12349 | # qhasm: xmm2 ^= ONE | ||
12350 | # asm 1: pxor ONE,<xmm2=int6464#3 | ||
12351 | # asm 2: pxor ONE,<xmm2=%xmm2 | ||
12352 | pxor ONE,%xmm2 | ||
12353 | |||
12354 | # qhasm: shuffle bytes of xmm0 by ROTB | ||
12355 | # asm 1: pshufb ROTB,<xmm0=int6464#1 | ||
12356 | # asm 2: pshufb ROTB,<xmm0=%xmm0 | ||
12357 | pshufb ROTB,%xmm0 | ||
12358 | |||
12359 | # qhasm: shuffle bytes of xmm1 by ROTB | ||
12360 | # asm 1: pshufb ROTB,<xmm1=int6464#2 | ||
12361 | # asm 2: pshufb ROTB,<xmm1=%xmm1 | ||
12362 | pshufb ROTB,%xmm1 | ||
12363 | |||
12364 | # qhasm: shuffle bytes of xmm4 by ROTB | ||
12365 | # asm 1: pshufb ROTB,<xmm4=int6464#5 | ||
12366 | # asm 2: pshufb ROTB,<xmm4=%xmm4 | ||
12367 | pshufb ROTB,%xmm4 | ||
12368 | |||
12369 | # qhasm: shuffle bytes of xmm6 by ROTB | ||
12370 | # asm 1: pshufb ROTB,<xmm6=int6464#7 | ||
12371 | # asm 2: pshufb ROTB,<xmm6=%xmm6 | ||
12372 | pshufb ROTB,%xmm6 | ||
12373 | |||
12374 | # qhasm: shuffle bytes of xmm3 by ROTB | ||
12375 | # asm 1: pshufb ROTB,<xmm3=int6464#4 | ||
12376 | # asm 2: pshufb ROTB,<xmm3=%xmm3 | ||
12377 | pshufb ROTB,%xmm3 | ||
12378 | |||
12379 | # qhasm: shuffle bytes of xmm7 by ROTB | ||
12380 | # asm 1: pshufb ROTB,<xmm7=int6464#8 | ||
12381 | # asm 2: pshufb ROTB,<xmm7=%xmm7 | ||
12382 | pshufb ROTB,%xmm7 | ||
12383 | |||
12384 | # qhasm: shuffle bytes of xmm2 by ROTB | ||
12385 | # asm 1: pshufb ROTB,<xmm2=int6464#3 | ||
12386 | # asm 2: pshufb ROTB,<xmm2=%xmm2 | ||
12387 | pshufb ROTB,%xmm2 | ||
12388 | |||
12389 | # qhasm: shuffle bytes of xmm5 by ROTB | ||
12390 | # asm 1: pshufb ROTB,<xmm5=int6464#6 | ||
12391 | # asm 2: pshufb ROTB,<xmm5=%xmm5 | ||
12392 | pshufb ROTB,%xmm5 | ||
12393 | |||
12394 | # qhasm: xmm7 ^= xmm2 | ||
12395 | # asm 1: pxor <xmm2=int6464#3,<xmm7=int6464#8 | ||
12396 | # asm 2: pxor <xmm2=%xmm2,<xmm7=%xmm7 | ||
12397 | pxor %xmm2,%xmm7 | ||
12398 | |||
12399 | # qhasm: xmm4 ^= xmm1 | ||
12400 | # asm 1: pxor <xmm1=int6464#2,<xmm4=int6464#5 | ||
12401 | # asm 2: pxor <xmm1=%xmm1,<xmm4=%xmm4 | ||
12402 | pxor %xmm1,%xmm4 | ||
12403 | |||
12404 | # qhasm: xmm7 ^= xmm0 | ||
12405 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
12406 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
12407 | pxor %xmm0,%xmm7 | ||
12408 | |||
12409 | # qhasm: xmm2 ^= xmm4 | ||
12410 | # asm 1: pxor <xmm4=int6464#5,<xmm2=int6464#3 | ||
12411 | # asm 2: pxor <xmm4=%xmm4,<xmm2=%xmm2 | ||
12412 | pxor %xmm4,%xmm2 | ||
12413 | |||
12414 | # qhasm: xmm6 ^= xmm0 | ||
12415 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
12416 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
12417 | pxor %xmm0,%xmm6 | ||
12418 | |||
12419 | # qhasm: xmm2 ^= xmm6 | ||
12420 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
12421 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
12422 | pxor %xmm6,%xmm2 | ||
12423 | |||
12424 | # qhasm: xmm6 ^= xmm5 | ||
12425 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
12426 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
12427 | pxor %xmm5,%xmm6 | ||
12428 | |||
12429 | # qhasm: xmm6 ^= xmm3 | ||
12430 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
12431 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
12432 | pxor %xmm3,%xmm6 | ||
12433 | |||
12434 | # qhasm: xmm5 ^= xmm7 | ||
12435 | # asm 1: pxor <xmm7=int6464#8,<xmm5=int6464#6 | ||
12436 | # asm 2: pxor <xmm7=%xmm7,<xmm5=%xmm5 | ||
12437 | pxor %xmm7,%xmm5 | ||
12438 | |||
12439 | # qhasm: xmm6 ^= xmm1 | ||
12440 | # asm 1: pxor <xmm1=int6464#2,<xmm6=int6464#7 | ||
12441 | # asm 2: pxor <xmm1=%xmm1,<xmm6=%xmm6 | ||
12442 | pxor %xmm1,%xmm6 | ||
12443 | |||
12444 | # qhasm: xmm3 ^= xmm7 | ||
12445 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
12446 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
12447 | pxor %xmm7,%xmm3 | ||
12448 | |||
12449 | # qhasm: xmm4 ^= xmm5 | ||
12450 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
12451 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
12452 | pxor %xmm5,%xmm4 | ||
12453 | |||
12454 | # qhasm: xmm1 ^= xmm7 | ||
12455 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2 | ||
12456 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1 | ||
12457 | pxor %xmm7,%xmm1 | ||
12458 | |||
12459 | # qhasm: xmm11 = xmm5 | ||
12460 | # asm 1: movdqa <xmm5=int6464#6,>xmm11=int6464#9 | ||
12461 | # asm 2: movdqa <xmm5=%xmm5,>xmm11=%xmm8 | ||
12462 | movdqa %xmm5,%xmm8 | ||
12463 | |||
12464 | # qhasm: xmm10 = xmm1 | ||
12465 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
12466 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
12467 | movdqa %xmm1,%xmm9 | ||
12468 | |||
12469 | # qhasm: xmm9 = xmm7 | ||
12470 | # asm 1: movdqa <xmm7=int6464#8,>xmm9=int6464#11 | ||
12471 | # asm 2: movdqa <xmm7=%xmm7,>xmm9=%xmm10 | ||
12472 | movdqa %xmm7,%xmm10 | ||
12473 | |||
12474 | # qhasm: xmm13 = xmm4 | ||
12475 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
12476 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
12477 | movdqa %xmm4,%xmm11 | ||
12478 | |||
12479 | # qhasm: xmm12 = xmm2 | ||
12480 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#13 | ||
12481 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm12 | ||
12482 | movdqa %xmm2,%xmm12 | ||
12483 | |||
12484 | # qhasm: xmm11 ^= xmm3 | ||
12485 | # asm 1: pxor <xmm3=int6464#4,<xmm11=int6464#9 | ||
12486 | # asm 2: pxor <xmm3=%xmm3,<xmm11=%xmm8 | ||
12487 | pxor %xmm3,%xmm8 | ||
12488 | |||
12489 | # qhasm: xmm10 ^= xmm4 | ||
12490 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#10 | ||
12491 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm9 | ||
12492 | pxor %xmm4,%xmm9 | ||
12493 | |||
12494 | # qhasm: xmm9 ^= xmm6 | ||
12495 | # asm 1: pxor <xmm6=int6464#7,<xmm9=int6464#11 | ||
12496 | # asm 2: pxor <xmm6=%xmm6,<xmm9=%xmm10 | ||
12497 | pxor %xmm6,%xmm10 | ||
12498 | |||
12499 | # qhasm: xmm13 ^= xmm3 | ||
12500 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#12 | ||
12501 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm11 | ||
12502 | pxor %xmm3,%xmm11 | ||
12503 | |||
12504 | # qhasm: xmm12 ^= xmm0 | ||
12505 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
12506 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
12507 | pxor %xmm0,%xmm12 | ||
12508 | |||
12509 | # qhasm: xmm14 = xmm11 | ||
12510 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
12511 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
12512 | movdqa %xmm8,%xmm13 | ||
12513 | |||
12514 | # qhasm: xmm8 = xmm10 | ||
12515 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
12516 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
12517 | movdqa %xmm9,%xmm14 | ||
12518 | |||
12519 | # qhasm: xmm15 = xmm11 | ||
12520 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
12521 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
12522 | movdqa %xmm8,%xmm15 | ||
12523 | |||
12524 | # qhasm: xmm10 |= xmm9 | ||
12525 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
12526 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
12527 | por %xmm10,%xmm9 | ||
12528 | |||
12529 | # qhasm: xmm11 |= xmm12 | ||
12530 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
12531 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
12532 | por %xmm12,%xmm8 | ||
12533 | |||
12534 | # qhasm: xmm15 ^= xmm8 | ||
12535 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
12536 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
12537 | pxor %xmm14,%xmm15 | ||
12538 | |||
12539 | # qhasm: xmm14 &= xmm12 | ||
12540 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
12541 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
12542 | pand %xmm12,%xmm13 | ||
12543 | |||
12544 | # qhasm: xmm8 &= xmm9 | ||
12545 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
12546 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
12547 | pand %xmm10,%xmm14 | ||
12548 | |||
12549 | # qhasm: xmm12 ^= xmm9 | ||
12550 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
12551 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
12552 | pxor %xmm10,%xmm12 | ||
12553 | |||
12554 | # qhasm: xmm15 &= xmm12 | ||
12555 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
12556 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
12557 | pand %xmm12,%xmm15 | ||
12558 | |||
12559 | # qhasm: xmm12 = xmm6 | ||
12560 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#11 | ||
12561 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm10 | ||
12562 | movdqa %xmm6,%xmm10 | ||
12563 | |||
12564 | # qhasm: xmm12 ^= xmm0 | ||
12565 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
12566 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
12567 | pxor %xmm0,%xmm10 | ||
12568 | |||
12569 | # qhasm: xmm13 &= xmm12 | ||
12570 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
12571 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
12572 | pand %xmm10,%xmm11 | ||
12573 | |||
12574 | # qhasm: xmm11 ^= xmm13 | ||
12575 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
12576 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
12577 | pxor %xmm11,%xmm8 | ||
12578 | |||
12579 | # qhasm: xmm10 ^= xmm13 | ||
12580 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
12581 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
12582 | pxor %xmm11,%xmm9 | ||
12583 | |||
12584 | # qhasm: xmm13 = xmm5 | ||
12585 | # asm 1: movdqa <xmm5=int6464#6,>xmm13=int6464#11 | ||
12586 | # asm 2: movdqa <xmm5=%xmm5,>xmm13=%xmm10 | ||
12587 | movdqa %xmm5,%xmm10 | ||
12588 | |||
12589 | # qhasm: xmm13 ^= xmm1 | ||
12590 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
12591 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
12592 | pxor %xmm1,%xmm10 | ||
12593 | |||
12594 | # qhasm: xmm12 = xmm7 | ||
12595 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#12 | ||
12596 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm11 | ||
12597 | movdqa %xmm7,%xmm11 | ||
12598 | |||
12599 | # qhasm: xmm9 = xmm13 | ||
12600 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
12601 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
12602 | movdqa %xmm10,%xmm12 | ||
12603 | |||
12604 | # qhasm: xmm12 ^= xmm2 | ||
12605 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#12 | ||
12606 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm11 | ||
12607 | pxor %xmm2,%xmm11 | ||
12608 | |||
12609 | # qhasm: xmm9 |= xmm12 | ||
12610 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
12611 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
12612 | por %xmm11,%xmm12 | ||
12613 | |||
12614 | # qhasm: xmm13 &= xmm12 | ||
12615 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
12616 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
12617 | pand %xmm11,%xmm10 | ||
12618 | |||
12619 | # qhasm: xmm8 ^= xmm13 | ||
12620 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
12621 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
12622 | pxor %xmm10,%xmm14 | ||
12623 | |||
12624 | # qhasm: xmm11 ^= xmm15 | ||
12625 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
12626 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
12627 | pxor %xmm15,%xmm8 | ||
12628 | |||
12629 | # qhasm: xmm10 ^= xmm14 | ||
12630 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
12631 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
12632 | pxor %xmm13,%xmm9 | ||
12633 | |||
12634 | # qhasm: xmm9 ^= xmm15 | ||
12635 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
12636 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
12637 | pxor %xmm15,%xmm12 | ||
12638 | |||
12639 | # qhasm: xmm8 ^= xmm14 | ||
12640 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
12641 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
12642 | pxor %xmm13,%xmm14 | ||
12643 | |||
12644 | # qhasm: xmm9 ^= xmm14 | ||
12645 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
12646 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
12647 | pxor %xmm13,%xmm12 | ||
12648 | |||
12649 | # qhasm: xmm12 = xmm4 | ||
12650 | # asm 1: movdqa <xmm4=int6464#5,>xmm12=int6464#11 | ||
12651 | # asm 2: movdqa <xmm4=%xmm4,>xmm12=%xmm10 | ||
12652 | movdqa %xmm4,%xmm10 | ||
12653 | |||
12654 | # qhasm: xmm13 = xmm3 | ||
12655 | # asm 1: movdqa <xmm3=int6464#4,>xmm13=int6464#12 | ||
12656 | # asm 2: movdqa <xmm3=%xmm3,>xmm13=%xmm11 | ||
12657 | movdqa %xmm3,%xmm11 | ||
12658 | |||
12659 | # qhasm: xmm14 = xmm1 | ||
12660 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
12661 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
12662 | movdqa %xmm1,%xmm13 | ||
12663 | |||
12664 | # qhasm: xmm15 = xmm5 | ||
12665 | # asm 1: movdqa <xmm5=int6464#6,>xmm15=int6464#16 | ||
12666 | # asm 2: movdqa <xmm5=%xmm5,>xmm15=%xmm15 | ||
12667 | movdqa %xmm5,%xmm15 | ||
12668 | |||
12669 | # qhasm: xmm12 &= xmm6 | ||
12670 | # asm 1: pand <xmm6=int6464#7,<xmm12=int6464#11 | ||
12671 | # asm 2: pand <xmm6=%xmm6,<xmm12=%xmm10 | ||
12672 | pand %xmm6,%xmm10 | ||
12673 | |||
12674 | # qhasm: xmm13 &= xmm0 | ||
12675 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
12676 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
12677 | pand %xmm0,%xmm11 | ||
12678 | |||
12679 | # qhasm: xmm14 &= xmm7 | ||
12680 | # asm 1: pand <xmm7=int6464#8,<xmm14=int6464#14 | ||
12681 | # asm 2: pand <xmm7=%xmm7,<xmm14=%xmm13 | ||
12682 | pand %xmm7,%xmm13 | ||
12683 | |||
12684 | # qhasm: xmm15 |= xmm2 | ||
12685 | # asm 1: por <xmm2=int6464#3,<xmm15=int6464#16 | ||
12686 | # asm 2: por <xmm2=%xmm2,<xmm15=%xmm15 | ||
12687 | por %xmm2,%xmm15 | ||
12688 | |||
12689 | # qhasm: xmm11 ^= xmm12 | ||
12690 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
12691 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
12692 | pxor %xmm10,%xmm8 | ||
12693 | |||
12694 | # qhasm: xmm10 ^= xmm13 | ||
12695 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
12696 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
12697 | pxor %xmm11,%xmm9 | ||
12698 | |||
12699 | # qhasm: xmm9 ^= xmm14 | ||
12700 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
12701 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
12702 | pxor %xmm13,%xmm12 | ||
12703 | |||
12704 | # qhasm: xmm8 ^= xmm15 | ||
12705 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
12706 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
12707 | pxor %xmm15,%xmm14 | ||
12708 | |||
12709 | # qhasm: xmm12 = xmm11 | ||
12710 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
12711 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
12712 | movdqa %xmm8,%xmm10 | ||
12713 | |||
12714 | # qhasm: xmm12 ^= xmm10 | ||
12715 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
12716 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
12717 | pxor %xmm9,%xmm10 | ||
12718 | |||
12719 | # qhasm: xmm11 &= xmm9 | ||
12720 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
12721 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
12722 | pand %xmm12,%xmm8 | ||
12723 | |||
12724 | # qhasm: xmm14 = xmm8 | ||
12725 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
12726 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
12727 | movdqa %xmm14,%xmm11 | ||
12728 | |||
12729 | # qhasm: xmm14 ^= xmm11 | ||
12730 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
12731 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
12732 | pxor %xmm8,%xmm11 | ||
12733 | |||
12734 | # qhasm: xmm15 = xmm12 | ||
12735 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
12736 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
12737 | movdqa %xmm10,%xmm13 | ||
12738 | |||
12739 | # qhasm: xmm15 &= xmm14 | ||
12740 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
12741 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
12742 | pand %xmm11,%xmm13 | ||
12743 | |||
12744 | # qhasm: xmm15 ^= xmm10 | ||
12745 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
12746 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
12747 | pxor %xmm9,%xmm13 | ||
12748 | |||
12749 | # qhasm: xmm13 = xmm9 | ||
12750 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
12751 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
12752 | movdqa %xmm12,%xmm15 | ||
12753 | |||
12754 | # qhasm: xmm13 ^= xmm8 | ||
12755 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
12756 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
12757 | pxor %xmm14,%xmm15 | ||
12758 | |||
12759 | # qhasm: xmm11 ^= xmm10 | ||
12760 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
12761 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
12762 | pxor %xmm9,%xmm8 | ||
12763 | |||
12764 | # qhasm: xmm13 &= xmm11 | ||
12765 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
12766 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
12767 | pand %xmm8,%xmm15 | ||
12768 | |||
12769 | # qhasm: xmm13 ^= xmm8 | ||
12770 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
12771 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
12772 | pxor %xmm14,%xmm15 | ||
12773 | |||
12774 | # qhasm: xmm9 ^= xmm13 | ||
12775 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
12776 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
12777 | pxor %xmm15,%xmm12 | ||
12778 | |||
12779 | # qhasm: xmm10 = xmm14 | ||
12780 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
12781 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
12782 | movdqa %xmm11,%xmm8 | ||
12783 | |||
12784 | # qhasm: xmm10 ^= xmm13 | ||
12785 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
12786 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
12787 | pxor %xmm15,%xmm8 | ||
12788 | |||
12789 | # qhasm: xmm10 &= xmm8 | ||
12790 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
12791 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
12792 | pand %xmm14,%xmm8 | ||
12793 | |||
12794 | # qhasm: xmm9 ^= xmm10 | ||
12795 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
12796 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
12797 | pxor %xmm8,%xmm12 | ||
12798 | |||
12799 | # qhasm: xmm14 ^= xmm10 | ||
12800 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
12801 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
12802 | pxor %xmm8,%xmm11 | ||
12803 | |||
12804 | # qhasm: xmm14 &= xmm15 | ||
12805 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
12806 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
12807 | pand %xmm13,%xmm11 | ||
12808 | |||
12809 | # qhasm: xmm14 ^= xmm12 | ||
12810 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
12811 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
12812 | pxor %xmm10,%xmm11 | ||
12813 | |||
12814 | # qhasm: xmm12 = xmm2 | ||
12815 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#9 | ||
12816 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm8 | ||
12817 | movdqa %xmm2,%xmm8 | ||
12818 | |||
12819 | # qhasm: xmm8 = xmm7 | ||
12820 | # asm 1: movdqa <xmm7=int6464#8,>xmm8=int6464#10 | ||
12821 | # asm 2: movdqa <xmm7=%xmm7,>xmm8=%xmm9 | ||
12822 | movdqa %xmm7,%xmm9 | ||
12823 | |||
12824 | # qhasm: xmm10 = xmm15 | ||
12825 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
12826 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
12827 | movdqa %xmm13,%xmm10 | ||
12828 | |||
12829 | # qhasm: xmm10 ^= xmm14 | ||
12830 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
12831 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
12832 | pxor %xmm11,%xmm10 | ||
12833 | |||
12834 | # qhasm: xmm10 &= xmm2 | ||
12835 | # asm 1: pand <xmm2=int6464#3,<xmm10=int6464#11 | ||
12836 | # asm 2: pand <xmm2=%xmm2,<xmm10=%xmm10 | ||
12837 | pand %xmm2,%xmm10 | ||
12838 | |||
12839 | # qhasm: xmm2 ^= xmm7 | ||
12840 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
12841 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
12842 | pxor %xmm7,%xmm2 | ||
12843 | |||
12844 | # qhasm: xmm2 &= xmm14 | ||
12845 | # asm 1: pand <xmm14=int6464#12,<xmm2=int6464#3 | ||
12846 | # asm 2: pand <xmm14=%xmm11,<xmm2=%xmm2 | ||
12847 | pand %xmm11,%xmm2 | ||
12848 | |||
12849 | # qhasm: xmm7 &= xmm15 | ||
12850 | # asm 1: pand <xmm15=int6464#14,<xmm7=int6464#8 | ||
12851 | # asm 2: pand <xmm15=%xmm13,<xmm7=%xmm7 | ||
12852 | pand %xmm13,%xmm7 | ||
12853 | |||
12854 | # qhasm: xmm2 ^= xmm7 | ||
12855 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
12856 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
12857 | pxor %xmm7,%xmm2 | ||
12858 | |||
12859 | # qhasm: xmm7 ^= xmm10 | ||
12860 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
12861 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
12862 | pxor %xmm10,%xmm7 | ||
12863 | |||
12864 | # qhasm: xmm12 ^= xmm0 | ||
12865 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
12866 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
12867 | pxor %xmm0,%xmm8 | ||
12868 | |||
12869 | # qhasm: xmm8 ^= xmm6 | ||
12870 | # asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#10 | ||
12871 | # asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm9 | ||
12872 | pxor %xmm6,%xmm9 | ||
12873 | |||
12874 | # qhasm: xmm15 ^= xmm13 | ||
12875 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
12876 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
12877 | pxor %xmm15,%xmm13 | ||
12878 | |||
12879 | # qhasm: xmm14 ^= xmm9 | ||
12880 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
12881 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
12882 | pxor %xmm12,%xmm11 | ||
12883 | |||
12884 | # qhasm: xmm11 = xmm15 | ||
12885 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
12886 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
12887 | movdqa %xmm13,%xmm10 | ||
12888 | |||
12889 | # qhasm: xmm11 ^= xmm14 | ||
12890 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
12891 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
12892 | pxor %xmm11,%xmm10 | ||
12893 | |||
12894 | # qhasm: xmm11 &= xmm12 | ||
12895 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
12896 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
12897 | pand %xmm8,%xmm10 | ||
12898 | |||
12899 | # qhasm: xmm12 ^= xmm8 | ||
12900 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
12901 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
12902 | pxor %xmm9,%xmm8 | ||
12903 | |||
12904 | # qhasm: xmm12 &= xmm14 | ||
12905 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
12906 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
12907 | pand %xmm11,%xmm8 | ||
12908 | |||
12909 | # qhasm: xmm8 &= xmm15 | ||
12910 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
12911 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
12912 | pand %xmm13,%xmm9 | ||
12913 | |||
12914 | # qhasm: xmm8 ^= xmm12 | ||
12915 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
12916 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
12917 | pxor %xmm8,%xmm9 | ||
12918 | |||
12919 | # qhasm: xmm12 ^= xmm11 | ||
12920 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
12921 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
12922 | pxor %xmm10,%xmm8 | ||
12923 | |||
12924 | # qhasm: xmm10 = xmm13 | ||
12925 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
12926 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
12927 | movdqa %xmm15,%xmm10 | ||
12928 | |||
12929 | # qhasm: xmm10 ^= xmm9 | ||
12930 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
12931 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
12932 | pxor %xmm12,%xmm10 | ||
12933 | |||
12934 | # qhasm: xmm10 &= xmm0 | ||
12935 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
12936 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
12937 | pand %xmm0,%xmm10 | ||
12938 | |||
12939 | # qhasm: xmm0 ^= xmm6 | ||
12940 | # asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1 | ||
12941 | # asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0 | ||
12942 | pxor %xmm6,%xmm0 | ||
12943 | |||
12944 | # qhasm: xmm0 &= xmm9 | ||
12945 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
12946 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
12947 | pand %xmm12,%xmm0 | ||
12948 | |||
12949 | # qhasm: xmm6 &= xmm13 | ||
12950 | # asm 1: pand <xmm13=int6464#16,<xmm6=int6464#7 | ||
12951 | # asm 2: pand <xmm13=%xmm15,<xmm6=%xmm6 | ||
12952 | pand %xmm15,%xmm6 | ||
12953 | |||
12954 | # qhasm: xmm0 ^= xmm6 | ||
12955 | # asm 1: pxor <xmm6=int6464#7,<xmm0=int6464#1 | ||
12956 | # asm 2: pxor <xmm6=%xmm6,<xmm0=%xmm0 | ||
12957 | pxor %xmm6,%xmm0 | ||
12958 | |||
12959 | # qhasm: xmm6 ^= xmm10 | ||
12960 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
12961 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
12962 | pxor %xmm10,%xmm6 | ||
12963 | |||
12964 | # qhasm: xmm2 ^= xmm12 | ||
12965 | # asm 1: pxor <xmm12=int6464#9,<xmm2=int6464#3 | ||
12966 | # asm 2: pxor <xmm12=%xmm8,<xmm2=%xmm2 | ||
12967 | pxor %xmm8,%xmm2 | ||
12968 | |||
12969 | # qhasm: xmm0 ^= xmm12 | ||
12970 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
12971 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
12972 | pxor %xmm8,%xmm0 | ||
12973 | |||
12974 | # qhasm: xmm7 ^= xmm8 | ||
12975 | # asm 1: pxor <xmm8=int6464#10,<xmm7=int6464#8 | ||
12976 | # asm 2: pxor <xmm8=%xmm9,<xmm7=%xmm7 | ||
12977 | pxor %xmm9,%xmm7 | ||
12978 | |||
12979 | # qhasm: xmm6 ^= xmm8 | ||
12980 | # asm 1: pxor <xmm8=int6464#10,<xmm6=int6464#7 | ||
12981 | # asm 2: pxor <xmm8=%xmm9,<xmm6=%xmm6 | ||
12982 | pxor %xmm9,%xmm6 | ||
12983 | |||
12984 | # qhasm: xmm12 = xmm5 | ||
12985 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#9 | ||
12986 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm8 | ||
12987 | movdqa %xmm5,%xmm8 | ||
12988 | |||
12989 | # qhasm: xmm8 = xmm1 | ||
12990 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
12991 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
12992 | movdqa %xmm1,%xmm9 | ||
12993 | |||
12994 | # qhasm: xmm12 ^= xmm3 | ||
12995 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#9 | ||
12996 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm8 | ||
12997 | pxor %xmm3,%xmm8 | ||
12998 | |||
12999 | # qhasm: xmm8 ^= xmm4 | ||
13000 | # asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#10 | ||
13001 | # asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm9 | ||
13002 | pxor %xmm4,%xmm9 | ||
13003 | |||
13004 | # qhasm: xmm11 = xmm15 | ||
13005 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
13006 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
13007 | movdqa %xmm13,%xmm10 | ||
13008 | |||
13009 | # qhasm: xmm11 ^= xmm14 | ||
13010 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
13011 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
13012 | pxor %xmm11,%xmm10 | ||
13013 | |||
13014 | # qhasm: xmm11 &= xmm12 | ||
13015 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
13016 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
13017 | pand %xmm8,%xmm10 | ||
13018 | |||
13019 | # qhasm: xmm12 ^= xmm8 | ||
13020 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
13021 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
13022 | pxor %xmm9,%xmm8 | ||
13023 | |||
13024 | # qhasm: xmm12 &= xmm14 | ||
13025 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
13026 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
13027 | pand %xmm11,%xmm8 | ||
13028 | |||
13029 | # qhasm: xmm8 &= xmm15 | ||
13030 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
13031 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
13032 | pand %xmm13,%xmm9 | ||
13033 | |||
13034 | # qhasm: xmm8 ^= xmm12 | ||
13035 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
13036 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
13037 | pxor %xmm8,%xmm9 | ||
13038 | |||
13039 | # qhasm: xmm12 ^= xmm11 | ||
13040 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
13041 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
13042 | pxor %xmm10,%xmm8 | ||
13043 | |||
13044 | # qhasm: xmm10 = xmm13 | ||
13045 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
13046 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
13047 | movdqa %xmm15,%xmm10 | ||
13048 | |||
13049 | # qhasm: xmm10 ^= xmm9 | ||
13050 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
13051 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
13052 | pxor %xmm12,%xmm10 | ||
13053 | |||
13054 | # qhasm: xmm10 &= xmm3 | ||
13055 | # asm 1: pand <xmm3=int6464#4,<xmm10=int6464#11 | ||
13056 | # asm 2: pand <xmm3=%xmm3,<xmm10=%xmm10 | ||
13057 | pand %xmm3,%xmm10 | ||
13058 | |||
13059 | # qhasm: xmm3 ^= xmm4 | ||
13060 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
13061 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
13062 | pxor %xmm4,%xmm3 | ||
13063 | |||
13064 | # qhasm: xmm3 &= xmm9 | ||
13065 | # asm 1: pand <xmm9=int6464#13,<xmm3=int6464#4 | ||
13066 | # asm 2: pand <xmm9=%xmm12,<xmm3=%xmm3 | ||
13067 | pand %xmm12,%xmm3 | ||
13068 | |||
13069 | # qhasm: xmm4 &= xmm13 | ||
13070 | # asm 1: pand <xmm13=int6464#16,<xmm4=int6464#5 | ||
13071 | # asm 2: pand <xmm13=%xmm15,<xmm4=%xmm4 | ||
13072 | pand %xmm15,%xmm4 | ||
13073 | |||
13074 | # qhasm: xmm3 ^= xmm4 | ||
13075 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
13076 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
13077 | pxor %xmm4,%xmm3 | ||
13078 | |||
13079 | # qhasm: xmm4 ^= xmm10 | ||
13080 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
13081 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
13082 | pxor %xmm10,%xmm4 | ||
13083 | |||
13084 | # qhasm: xmm15 ^= xmm13 | ||
13085 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
13086 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
13087 | pxor %xmm15,%xmm13 | ||
13088 | |||
13089 | # qhasm: xmm14 ^= xmm9 | ||
13090 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
13091 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
13092 | pxor %xmm12,%xmm11 | ||
13093 | |||
13094 | # qhasm: xmm11 = xmm15 | ||
13095 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
13096 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
13097 | movdqa %xmm13,%xmm10 | ||
13098 | |||
13099 | # qhasm: xmm11 ^= xmm14 | ||
13100 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
13101 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
13102 | pxor %xmm11,%xmm10 | ||
13103 | |||
13104 | # qhasm: xmm11 &= xmm5 | ||
13105 | # asm 1: pand <xmm5=int6464#6,<xmm11=int6464#11 | ||
13106 | # asm 2: pand <xmm5=%xmm5,<xmm11=%xmm10 | ||
13107 | pand %xmm5,%xmm10 | ||
13108 | |||
13109 | # qhasm: xmm5 ^= xmm1 | ||
13110 | # asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6 | ||
13111 | # asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5 | ||
13112 | pxor %xmm1,%xmm5 | ||
13113 | |||
13114 | # qhasm: xmm5 &= xmm14 | ||
13115 | # asm 1: pand <xmm14=int6464#12,<xmm5=int6464#6 | ||
13116 | # asm 2: pand <xmm14=%xmm11,<xmm5=%xmm5 | ||
13117 | pand %xmm11,%xmm5 | ||
13118 | |||
13119 | # qhasm: xmm1 &= xmm15 | ||
13120 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
13121 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
13122 | pand %xmm13,%xmm1 | ||
13123 | |||
13124 | # qhasm: xmm5 ^= xmm1 | ||
13125 | # asm 1: pxor <xmm1=int6464#2,<xmm5=int6464#6 | ||
13126 | # asm 2: pxor <xmm1=%xmm1,<xmm5=%xmm5 | ||
13127 | pxor %xmm1,%xmm5 | ||
13128 | |||
13129 | # qhasm: xmm1 ^= xmm11 | ||
13130 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
13131 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
13132 | pxor %xmm10,%xmm1 | ||
13133 | |||
13134 | # qhasm: xmm5 ^= xmm12 | ||
13135 | # asm 1: pxor <xmm12=int6464#9,<xmm5=int6464#6 | ||
13136 | # asm 2: pxor <xmm12=%xmm8,<xmm5=%xmm5 | ||
13137 | pxor %xmm8,%xmm5 | ||
13138 | |||
13139 | # qhasm: xmm3 ^= xmm12 | ||
13140 | # asm 1: pxor <xmm12=int6464#9,<xmm3=int6464#4 | ||
13141 | # asm 2: pxor <xmm12=%xmm8,<xmm3=%xmm3 | ||
13142 | pxor %xmm8,%xmm3 | ||
13143 | |||
13144 | # qhasm: xmm1 ^= xmm8 | ||
13145 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
13146 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
13147 | pxor %xmm9,%xmm1 | ||
13148 | |||
13149 | # qhasm: xmm4 ^= xmm8 | ||
13150 | # asm 1: pxor <xmm8=int6464#10,<xmm4=int6464#5 | ||
13151 | # asm 2: pxor <xmm8=%xmm9,<xmm4=%xmm4 | ||
13152 | pxor %xmm9,%xmm4 | ||
13153 | |||
13154 | # qhasm: xmm5 ^= xmm0 | ||
13155 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
13156 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
13157 | pxor %xmm0,%xmm5 | ||
13158 | |||
13159 | # qhasm: xmm1 ^= xmm2 | ||
13160 | # asm 1: pxor <xmm2=int6464#3,<xmm1=int6464#2 | ||
13161 | # asm 2: pxor <xmm2=%xmm2,<xmm1=%xmm1 | ||
13162 | pxor %xmm2,%xmm1 | ||
13163 | |||
13164 | # qhasm: xmm3 ^= xmm5 | ||
13165 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
13166 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
13167 | pxor %xmm5,%xmm3 | ||
13168 | |||
13169 | # qhasm: xmm2 ^= xmm0 | ||
13170 | # asm 1: pxor <xmm0=int6464#1,<xmm2=int6464#3 | ||
13171 | # asm 2: pxor <xmm0=%xmm0,<xmm2=%xmm2 | ||
13172 | pxor %xmm0,%xmm2 | ||
13173 | |||
13174 | # qhasm: xmm0 ^= xmm1 | ||
13175 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
13176 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
13177 | pxor %xmm1,%xmm0 | ||
13178 | |||
13179 | # qhasm: xmm1 ^= xmm7 | ||
13180 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#2 | ||
13181 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm1 | ||
13182 | pxor %xmm7,%xmm1 | ||
13183 | |||
13184 | # qhasm: xmm7 ^= xmm4 | ||
13185 | # asm 1: pxor <xmm4=int6464#5,<xmm7=int6464#8 | ||
13186 | # asm 2: pxor <xmm4=%xmm4,<xmm7=%xmm7 | ||
13187 | pxor %xmm4,%xmm7 | ||
13188 | |||
13189 | # qhasm: xmm3 ^= xmm7 | ||
13190 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
13191 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
13192 | pxor %xmm7,%xmm3 | ||
13193 | |||
13194 | # qhasm: xmm4 ^= xmm6 | ||
13195 | # asm 1: pxor <xmm6=int6464#7,<xmm4=int6464#5 | ||
13196 | # asm 2: pxor <xmm6=%xmm6,<xmm4=%xmm4 | ||
13197 | pxor %xmm6,%xmm4 | ||
13198 | |||
13199 | # qhasm: xmm6 ^= xmm7 | ||
13200 | # asm 1: pxor <xmm7=int6464#8,<xmm6=int6464#7 | ||
13201 | # asm 2: pxor <xmm7=%xmm7,<xmm6=%xmm6 | ||
13202 | pxor %xmm7,%xmm6 | ||
13203 | |||
13204 | # qhasm: xmm2 ^= xmm6 | ||
13205 | # asm 1: pxor <xmm6=int6464#7,<xmm2=int6464#3 | ||
13206 | # asm 2: pxor <xmm6=%xmm6,<xmm2=%xmm2 | ||
13207 | pxor %xmm6,%xmm2 | ||
13208 | |||
13209 | # qhasm: xmm1 ^= RCON | ||
13210 | # asm 1: pxor RCON,<xmm1=int6464#2 | ||
13211 | # asm 2: pxor RCON,<xmm1=%xmm1 | ||
13212 | pxor RCON,%xmm1 | ||
13213 | |||
13214 | # qhasm: xmm3 ^= RCON | ||
13215 | # asm 1: pxor RCON,<xmm3=int6464#4 | ||
13216 | # asm 2: pxor RCON,<xmm3=%xmm3 | ||
13217 | pxor RCON,%xmm3 | ||
13218 | |||
13219 | # qhasm: xmm6 ^= RCON | ||
13220 | # asm 1: pxor RCON,<xmm6=int6464#7 | ||
13221 | # asm 2: pxor RCON,<xmm6=%xmm6 | ||
13222 | pxor RCON,%xmm6 | ||
13223 | |||
13224 | # qhasm: xmm5 ^= RCON | ||
13225 | # asm 1: pxor RCON,<xmm5=int6464#6 | ||
13226 | # asm 2: pxor RCON,<xmm5=%xmm5 | ||
13227 | pxor RCON,%xmm5 | ||
13228 | |||
13229 | # qhasm: shuffle bytes of xmm0 by EXPB0 | ||
13230 | # asm 1: pshufb EXPB0,<xmm0=int6464#1 | ||
13231 | # asm 2: pshufb EXPB0,<xmm0=%xmm0 | ||
13232 | pshufb EXPB0,%xmm0 | ||
13233 | |||
13234 | # qhasm: shuffle bytes of xmm1 by EXPB0 | ||
13235 | # asm 1: pshufb EXPB0,<xmm1=int6464#2 | ||
13236 | # asm 2: pshufb EXPB0,<xmm1=%xmm1 | ||
13237 | pshufb EXPB0,%xmm1 | ||
13238 | |||
13239 | # qhasm: shuffle bytes of xmm3 by EXPB0 | ||
13240 | # asm 1: pshufb EXPB0,<xmm3=int6464#4 | ||
13241 | # asm 2: pshufb EXPB0,<xmm3=%xmm3 | ||
13242 | pshufb EXPB0,%xmm3 | ||
13243 | |||
13244 | # qhasm: shuffle bytes of xmm2 by EXPB0 | ||
13245 | # asm 1: pshufb EXPB0,<xmm2=int6464#3 | ||
13246 | # asm 2: pshufb EXPB0,<xmm2=%xmm2 | ||
13247 | pshufb EXPB0,%xmm2 | ||
13248 | |||
13249 | # qhasm: shuffle bytes of xmm6 by EXPB0 | ||
13250 | # asm 1: pshufb EXPB0,<xmm6=int6464#7 | ||
13251 | # asm 2: pshufb EXPB0,<xmm6=%xmm6 | ||
13252 | pshufb EXPB0,%xmm6 | ||
13253 | |||
13254 | # qhasm: shuffle bytes of xmm5 by EXPB0 | ||
13255 | # asm 1: pshufb EXPB0,<xmm5=int6464#6 | ||
13256 | # asm 2: pshufb EXPB0,<xmm5=%xmm5 | ||
13257 | pshufb EXPB0,%xmm5 | ||
13258 | |||
13259 | # qhasm: shuffle bytes of xmm4 by EXPB0 | ||
13260 | # asm 1: pshufb EXPB0,<xmm4=int6464#5 | ||
13261 | # asm 2: pshufb EXPB0,<xmm4=%xmm4 | ||
13262 | pshufb EXPB0,%xmm4 | ||
13263 | |||
13264 | # qhasm: shuffle bytes of xmm7 by EXPB0 | ||
13265 | # asm 1: pshufb EXPB0,<xmm7=int6464#8 | ||
13266 | # asm 2: pshufb EXPB0,<xmm7=%xmm7 | ||
13267 | pshufb EXPB0,%xmm7 | ||
13268 | |||
13269 | # qhasm: xmm8 = *(int128 *)(c + 1152) | ||
13270 | # asm 1: movdqa 1152(<c=int64#1),>xmm8=int6464#9 | ||
13271 | # asm 2: movdqa 1152(<c=%rdi),>xmm8=%xmm8 | ||
13272 | movdqa 1152(%rdi),%xmm8 | ||
13273 | |||
13274 | # qhasm: xmm9 = *(int128 *)(c + 1168) | ||
13275 | # asm 1: movdqa 1168(<c=int64#1),>xmm9=int6464#10 | ||
13276 | # asm 2: movdqa 1168(<c=%rdi),>xmm9=%xmm9 | ||
13277 | movdqa 1168(%rdi),%xmm9 | ||
13278 | |||
13279 | # qhasm: xmm10 = *(int128 *)(c + 1184) | ||
13280 | # asm 1: movdqa 1184(<c=int64#1),>xmm10=int6464#11 | ||
13281 | # asm 2: movdqa 1184(<c=%rdi),>xmm10=%xmm10 | ||
13282 | movdqa 1184(%rdi),%xmm10 | ||
13283 | |||
13284 | # qhasm: xmm11 = *(int128 *)(c + 1200) | ||
13285 | # asm 1: movdqa 1200(<c=int64#1),>xmm11=int6464#12 | ||
13286 | # asm 2: movdqa 1200(<c=%rdi),>xmm11=%xmm11 | ||
13287 | movdqa 1200(%rdi),%xmm11 | ||
13288 | |||
13289 | # qhasm: xmm12 = *(int128 *)(c + 1216) | ||
13290 | # asm 1: movdqa 1216(<c=int64#1),>xmm12=int6464#13 | ||
13291 | # asm 2: movdqa 1216(<c=%rdi),>xmm12=%xmm12 | ||
13292 | movdqa 1216(%rdi),%xmm12 | ||
13293 | |||
13294 | # qhasm: xmm13 = *(int128 *)(c + 1232) | ||
13295 | # asm 1: movdqa 1232(<c=int64#1),>xmm13=int6464#14 | ||
13296 | # asm 2: movdqa 1232(<c=%rdi),>xmm13=%xmm13 | ||
13297 | movdqa 1232(%rdi),%xmm13 | ||
13298 | |||
13299 | # qhasm: xmm14 = *(int128 *)(c + 1248) | ||
13300 | # asm 1: movdqa 1248(<c=int64#1),>xmm14=int6464#15 | ||
13301 | # asm 2: movdqa 1248(<c=%rdi),>xmm14=%xmm14 | ||
13302 | movdqa 1248(%rdi),%xmm14 | ||
13303 | |||
13304 | # qhasm: xmm15 = *(int128 *)(c + 1264) | ||
13305 | # asm 1: movdqa 1264(<c=int64#1),>xmm15=int6464#16 | ||
13306 | # asm 2: movdqa 1264(<c=%rdi),>xmm15=%xmm15 | ||
13307 | movdqa 1264(%rdi),%xmm15 | ||
13308 | |||
13309 | # qhasm: xmm8 ^= ONE | ||
13310 | # asm 1: pxor ONE,<xmm8=int6464#9 | ||
13311 | # asm 2: pxor ONE,<xmm8=%xmm8 | ||
13312 | pxor ONE,%xmm8 | ||
13313 | |||
13314 | # qhasm: xmm9 ^= ONE | ||
13315 | # asm 1: pxor ONE,<xmm9=int6464#10 | ||
13316 | # asm 2: pxor ONE,<xmm9=%xmm9 | ||
13317 | pxor ONE,%xmm9 | ||
13318 | |||
13319 | # qhasm: xmm13 ^= ONE | ||
13320 | # asm 1: pxor ONE,<xmm13=int6464#14 | ||
13321 | # asm 2: pxor ONE,<xmm13=%xmm13 | ||
13322 | pxor ONE,%xmm13 | ||
13323 | |||
13324 | # qhasm: xmm14 ^= ONE | ||
13325 | # asm 1: pxor ONE,<xmm14=int6464#15 | ||
13326 | # asm 2: pxor ONE,<xmm14=%xmm14 | ||
13327 | pxor ONE,%xmm14 | ||
13328 | |||
13329 | # qhasm: xmm0 ^= xmm8 | ||
13330 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
13331 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
13332 | pxor %xmm8,%xmm0 | ||
13333 | |||
13334 | # qhasm: xmm1 ^= xmm9 | ||
13335 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
13336 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
13337 | pxor %xmm9,%xmm1 | ||
13338 | |||
13339 | # qhasm: xmm3 ^= xmm10 | ||
13340 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
13341 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
13342 | pxor %xmm10,%xmm3 | ||
13343 | |||
13344 | # qhasm: xmm2 ^= xmm11 | ||
13345 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
13346 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
13347 | pxor %xmm11,%xmm2 | ||
13348 | |||
13349 | # qhasm: xmm6 ^= xmm12 | ||
13350 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
13351 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
13352 | pxor %xmm12,%xmm6 | ||
13353 | |||
13354 | # qhasm: xmm5 ^= xmm13 | ||
13355 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
13356 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
13357 | pxor %xmm13,%xmm5 | ||
13358 | |||
13359 | # qhasm: xmm4 ^= xmm14 | ||
13360 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
13361 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
13362 | pxor %xmm14,%xmm4 | ||
13363 | |||
13364 | # qhasm: xmm7 ^= xmm15 | ||
13365 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
13366 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
13367 | pxor %xmm15,%xmm7 | ||
13368 | |||
13369 | # qhasm: uint32323232 xmm8 >>= 8 | ||
13370 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
13371 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
13372 | psrld $8,%xmm8 | ||
13373 | |||
13374 | # qhasm: uint32323232 xmm9 >>= 8 | ||
13375 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
13376 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
13377 | psrld $8,%xmm9 | ||
13378 | |||
13379 | # qhasm: uint32323232 xmm10 >>= 8 | ||
13380 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
13381 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
13382 | psrld $8,%xmm10 | ||
13383 | |||
13384 | # qhasm: uint32323232 xmm11 >>= 8 | ||
13385 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
13386 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
13387 | psrld $8,%xmm11 | ||
13388 | |||
13389 | # qhasm: uint32323232 xmm12 >>= 8 | ||
13390 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
13391 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
13392 | psrld $8,%xmm12 | ||
13393 | |||
13394 | # qhasm: uint32323232 xmm13 >>= 8 | ||
13395 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
13396 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
13397 | psrld $8,%xmm13 | ||
13398 | |||
13399 | # qhasm: uint32323232 xmm14 >>= 8 | ||
13400 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
13401 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
13402 | psrld $8,%xmm14 | ||
13403 | |||
13404 | # qhasm: uint32323232 xmm15 >>= 8 | ||
13405 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
13406 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
13407 | psrld $8,%xmm15 | ||
13408 | |||
13409 | # qhasm: xmm0 ^= xmm8 | ||
13410 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
13411 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
13412 | pxor %xmm8,%xmm0 | ||
13413 | |||
13414 | # qhasm: xmm1 ^= xmm9 | ||
13415 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
13416 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
13417 | pxor %xmm9,%xmm1 | ||
13418 | |||
13419 | # qhasm: xmm3 ^= xmm10 | ||
13420 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
13421 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
13422 | pxor %xmm10,%xmm3 | ||
13423 | |||
13424 | # qhasm: xmm2 ^= xmm11 | ||
13425 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
13426 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
13427 | pxor %xmm11,%xmm2 | ||
13428 | |||
13429 | # qhasm: xmm6 ^= xmm12 | ||
13430 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
13431 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
13432 | pxor %xmm12,%xmm6 | ||
13433 | |||
13434 | # qhasm: xmm5 ^= xmm13 | ||
13435 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
13436 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
13437 | pxor %xmm13,%xmm5 | ||
13438 | |||
13439 | # qhasm: xmm4 ^= xmm14 | ||
13440 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
13441 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
13442 | pxor %xmm14,%xmm4 | ||
13443 | |||
13444 | # qhasm: xmm7 ^= xmm15 | ||
13445 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
13446 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
13447 | pxor %xmm15,%xmm7 | ||
13448 | |||
13449 | # qhasm: uint32323232 xmm8 >>= 8 | ||
13450 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
13451 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
13452 | psrld $8,%xmm8 | ||
13453 | |||
13454 | # qhasm: uint32323232 xmm9 >>= 8 | ||
13455 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
13456 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
13457 | psrld $8,%xmm9 | ||
13458 | |||
13459 | # qhasm: uint32323232 xmm10 >>= 8 | ||
13460 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
13461 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
13462 | psrld $8,%xmm10 | ||
13463 | |||
13464 | # qhasm: uint32323232 xmm11 >>= 8 | ||
13465 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
13466 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
13467 | psrld $8,%xmm11 | ||
13468 | |||
13469 | # qhasm: uint32323232 xmm12 >>= 8 | ||
13470 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
13471 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
13472 | psrld $8,%xmm12 | ||
13473 | |||
13474 | # qhasm: uint32323232 xmm13 >>= 8 | ||
13475 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
13476 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
13477 | psrld $8,%xmm13 | ||
13478 | |||
13479 | # qhasm: uint32323232 xmm14 >>= 8 | ||
13480 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
13481 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
13482 | psrld $8,%xmm14 | ||
13483 | |||
13484 | # qhasm: uint32323232 xmm15 >>= 8 | ||
13485 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
13486 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
13487 | psrld $8,%xmm15 | ||
13488 | |||
13489 | # qhasm: xmm0 ^= xmm8 | ||
13490 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
13491 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
13492 | pxor %xmm8,%xmm0 | ||
13493 | |||
13494 | # qhasm: xmm1 ^= xmm9 | ||
13495 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
13496 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
13497 | pxor %xmm9,%xmm1 | ||
13498 | |||
13499 | # qhasm: xmm3 ^= xmm10 | ||
13500 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
13501 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
13502 | pxor %xmm10,%xmm3 | ||
13503 | |||
13504 | # qhasm: xmm2 ^= xmm11 | ||
13505 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
13506 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
13507 | pxor %xmm11,%xmm2 | ||
13508 | |||
13509 | # qhasm: xmm6 ^= xmm12 | ||
13510 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
13511 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
13512 | pxor %xmm12,%xmm6 | ||
13513 | |||
13514 | # qhasm: xmm5 ^= xmm13 | ||
13515 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
13516 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
13517 | pxor %xmm13,%xmm5 | ||
13518 | |||
13519 | # qhasm: xmm4 ^= xmm14 | ||
13520 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
13521 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
13522 | pxor %xmm14,%xmm4 | ||
13523 | |||
13524 | # qhasm: xmm7 ^= xmm15 | ||
13525 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
13526 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
13527 | pxor %xmm15,%xmm7 | ||
13528 | |||
13529 | # qhasm: uint32323232 xmm8 >>= 8 | ||
13530 | # asm 1: psrld $8,<xmm8=int6464#9 | ||
13531 | # asm 2: psrld $8,<xmm8=%xmm8 | ||
13532 | psrld $8,%xmm8 | ||
13533 | |||
13534 | # qhasm: uint32323232 xmm9 >>= 8 | ||
13535 | # asm 1: psrld $8,<xmm9=int6464#10 | ||
13536 | # asm 2: psrld $8,<xmm9=%xmm9 | ||
13537 | psrld $8,%xmm9 | ||
13538 | |||
13539 | # qhasm: uint32323232 xmm10 >>= 8 | ||
13540 | # asm 1: psrld $8,<xmm10=int6464#11 | ||
13541 | # asm 2: psrld $8,<xmm10=%xmm10 | ||
13542 | psrld $8,%xmm10 | ||
13543 | |||
13544 | # qhasm: uint32323232 xmm11 >>= 8 | ||
13545 | # asm 1: psrld $8,<xmm11=int6464#12 | ||
13546 | # asm 2: psrld $8,<xmm11=%xmm11 | ||
13547 | psrld $8,%xmm11 | ||
13548 | |||
13549 | # qhasm: uint32323232 xmm12 >>= 8 | ||
13550 | # asm 1: psrld $8,<xmm12=int6464#13 | ||
13551 | # asm 2: psrld $8,<xmm12=%xmm12 | ||
13552 | psrld $8,%xmm12 | ||
13553 | |||
13554 | # qhasm: uint32323232 xmm13 >>= 8 | ||
13555 | # asm 1: psrld $8,<xmm13=int6464#14 | ||
13556 | # asm 2: psrld $8,<xmm13=%xmm13 | ||
13557 | psrld $8,%xmm13 | ||
13558 | |||
13559 | # qhasm: uint32323232 xmm14 >>= 8 | ||
13560 | # asm 1: psrld $8,<xmm14=int6464#15 | ||
13561 | # asm 2: psrld $8,<xmm14=%xmm14 | ||
13562 | psrld $8,%xmm14 | ||
13563 | |||
13564 | # qhasm: uint32323232 xmm15 >>= 8 | ||
13565 | # asm 1: psrld $8,<xmm15=int6464#16 | ||
13566 | # asm 2: psrld $8,<xmm15=%xmm15 | ||
13567 | psrld $8,%xmm15 | ||
13568 | |||
13569 | # qhasm: xmm0 ^= xmm8 | ||
13570 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
13571 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
13572 | pxor %xmm8,%xmm0 | ||
13573 | |||
13574 | # qhasm: xmm1 ^= xmm9 | ||
13575 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
13576 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
13577 | pxor %xmm9,%xmm1 | ||
13578 | |||
13579 | # qhasm: xmm3 ^= xmm10 | ||
13580 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
13581 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
13582 | pxor %xmm10,%xmm3 | ||
13583 | |||
13584 | # qhasm: xmm2 ^= xmm11 | ||
13585 | # asm 1: pxor <xmm11=int6464#12,<xmm2=int6464#3 | ||
13586 | # asm 2: pxor <xmm11=%xmm11,<xmm2=%xmm2 | ||
13587 | pxor %xmm11,%xmm2 | ||
13588 | |||
13589 | # qhasm: xmm6 ^= xmm12 | ||
13590 | # asm 1: pxor <xmm12=int6464#13,<xmm6=int6464#7 | ||
13591 | # asm 2: pxor <xmm12=%xmm12,<xmm6=%xmm6 | ||
13592 | pxor %xmm12,%xmm6 | ||
13593 | |||
13594 | # qhasm: xmm5 ^= xmm13 | ||
13595 | # asm 1: pxor <xmm13=int6464#14,<xmm5=int6464#6 | ||
13596 | # asm 2: pxor <xmm13=%xmm13,<xmm5=%xmm5 | ||
13597 | pxor %xmm13,%xmm5 | ||
13598 | |||
13599 | # qhasm: xmm4 ^= xmm14 | ||
13600 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
13601 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
13602 | pxor %xmm14,%xmm4 | ||
13603 | |||
13604 | # qhasm: xmm7 ^= xmm15 | ||
13605 | # asm 1: pxor <xmm15=int6464#16,<xmm7=int6464#8 | ||
13606 | # asm 2: pxor <xmm15=%xmm15,<xmm7=%xmm7 | ||
13607 | pxor %xmm15,%xmm7 | ||
13608 | |||
13609 | # qhasm: shuffle bytes of xmm0 by M0 | ||
13610 | # asm 1: pshufb M0,<xmm0=int6464#1 | ||
13611 | # asm 2: pshufb M0,<xmm0=%xmm0 | ||
13612 | pshufb M0,%xmm0 | ||
13613 | |||
13614 | # qhasm: shuffle bytes of xmm1 by M0 | ||
13615 | # asm 1: pshufb M0,<xmm1=int6464#2 | ||
13616 | # asm 2: pshufb M0,<xmm1=%xmm1 | ||
13617 | pshufb M0,%xmm1 | ||
13618 | |||
13619 | # qhasm: shuffle bytes of xmm4 by M0 | ||
13620 | # asm 1: pshufb M0,<xmm4=int6464#5 | ||
13621 | # asm 2: pshufb M0,<xmm4=%xmm4 | ||
13622 | pshufb M0,%xmm4 | ||
13623 | |||
13624 | # qhasm: shuffle bytes of xmm6 by M0 | ||
13625 | # asm 1: pshufb M0,<xmm6=int6464#7 | ||
13626 | # asm 2: pshufb M0,<xmm6=%xmm6 | ||
13627 | pshufb M0,%xmm6 | ||
13628 | |||
13629 | # qhasm: shuffle bytes of xmm3 by M0 | ||
13630 | # asm 1: pshufb M0,<xmm3=int6464#4 | ||
13631 | # asm 2: pshufb M0,<xmm3=%xmm3 | ||
13632 | pshufb M0,%xmm3 | ||
13633 | |||
13634 | # qhasm: shuffle bytes of xmm7 by M0 | ||
13635 | # asm 1: pshufb M0,<xmm7=int6464#8 | ||
13636 | # asm 2: pshufb M0,<xmm7=%xmm7 | ||
13637 | pshufb M0,%xmm7 | ||
13638 | |||
13639 | # qhasm: shuffle bytes of xmm2 by M0 | ||
13640 | # asm 1: pshufb M0,<xmm2=int6464#3 | ||
13641 | # asm 2: pshufb M0,<xmm2=%xmm2 | ||
13642 | pshufb M0,%xmm2 | ||
13643 | |||
13644 | # qhasm: shuffle bytes of xmm5 by M0 | ||
13645 | # asm 1: pshufb M0,<xmm5=int6464#6 | ||
13646 | # asm 2: pshufb M0,<xmm5=%xmm5 | ||
13647 | pshufb M0,%xmm5 | ||
13648 | |||
13649 | # qhasm: *(int128 *)(c + 1280) = xmm0 | ||
13650 | # asm 1: movdqa <xmm0=int6464#1,1280(<c=int64#1) | ||
13651 | # asm 2: movdqa <xmm0=%xmm0,1280(<c=%rdi) | ||
13652 | movdqa %xmm0,1280(%rdi) | ||
13653 | |||
13654 | # qhasm: *(int128 *)(c + 1296) = xmm1 | ||
13655 | # asm 1: movdqa <xmm1=int6464#2,1296(<c=int64#1) | ||
13656 | # asm 2: movdqa <xmm1=%xmm1,1296(<c=%rdi) | ||
13657 | movdqa %xmm1,1296(%rdi) | ||
13658 | |||
13659 | # qhasm: *(int128 *)(c + 1312) = xmm3 | ||
13660 | # asm 1: movdqa <xmm3=int6464#4,1312(<c=int64#1) | ||
13661 | # asm 2: movdqa <xmm3=%xmm3,1312(<c=%rdi) | ||
13662 | movdqa %xmm3,1312(%rdi) | ||
13663 | |||
13664 | # qhasm: *(int128 *)(c + 1328) = xmm2 | ||
13665 | # asm 1: movdqa <xmm2=int6464#3,1328(<c=int64#1) | ||
13666 | # asm 2: movdqa <xmm2=%xmm2,1328(<c=%rdi) | ||
13667 | movdqa %xmm2,1328(%rdi) | ||
13668 | |||
13669 | # qhasm: *(int128 *)(c + 1344) = xmm6 | ||
13670 | # asm 1: movdqa <xmm6=int6464#7,1344(<c=int64#1) | ||
13671 | # asm 2: movdqa <xmm6=%xmm6,1344(<c=%rdi) | ||
13672 | movdqa %xmm6,1344(%rdi) | ||
13673 | |||
13674 | # qhasm: *(int128 *)(c + 1360) = xmm5 | ||
13675 | # asm 1: movdqa <xmm5=int6464#6,1360(<c=int64#1) | ||
13676 | # asm 2: movdqa <xmm5=%xmm5,1360(<c=%rdi) | ||
13677 | movdqa %xmm5,1360(%rdi) | ||
13678 | |||
13679 | # qhasm: *(int128 *)(c + 1376) = xmm4 | ||
13680 | # asm 1: movdqa <xmm4=int6464#5,1376(<c=int64#1) | ||
13681 | # asm 2: movdqa <xmm4=%xmm4,1376(<c=%rdi) | ||
13682 | movdqa %xmm4,1376(%rdi) | ||
13683 | |||
13684 | # qhasm: *(int128 *)(c + 1392) = xmm7 | ||
13685 | # asm 1: movdqa <xmm7=int6464#8,1392(<c=int64#1) | ||
13686 | # asm 2: movdqa <xmm7=%xmm7,1392(<c=%rdi) | ||
13687 | movdqa %xmm7,1392(%rdi) | ||
13688 | |||
13689 | # qhasm: leave | ||
13690 | add %r11,%rsp | ||
13691 | mov %rdi,%rax | ||
13692 | mov %rsi,%rdx | ||
13693 | xor %rax,%rax | ||
13694 | ret | ||
diff --git a/nacl/crypto_stream/aes128ctr/core2/stream.c b/nacl/crypto_stream/aes128ctr/core2/stream.c new file mode 100644 index 00000000..53524a62 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/core2/stream.c | |||
@@ -0,0 +1,14 @@ | |||
1 | #include "crypto_stream.h" | ||
2 | |||
3 | int crypto_stream( | ||
4 | unsigned char *out, | ||
5 | unsigned long long outlen, | ||
6 | const unsigned char *n, | ||
7 | const unsigned char *k | ||
8 | ) | ||
9 | { | ||
10 | unsigned char d[crypto_stream_BEFORENMBYTES]; | ||
11 | crypto_stream_beforenm(d, k); | ||
12 | crypto_stream_afternm(out, outlen, n, d); | ||
13 | return 0; | ||
14 | } | ||
diff --git a/nacl/crypto_stream/aes128ctr/core2/xor.c b/nacl/crypto_stream/aes128ctr/core2/xor.c new file mode 100644 index 00000000..825088cc --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/core2/xor.c | |||
@@ -0,0 +1,15 @@ | |||
1 | #include "crypto_stream.h" | ||
2 | |||
3 | int crypto_stream_xor( | ||
4 | unsigned char *out, | ||
5 | const unsigned char *in, | ||
6 | unsigned long long inlen, | ||
7 | const unsigned char *n, | ||
8 | const unsigned char *k | ||
9 | ) | ||
10 | { | ||
11 | unsigned char d[crypto_stream_BEFORENMBYTES]; | ||
12 | crypto_stream_beforenm(d, k); | ||
13 | crypto_stream_xor_afternm(out, in, inlen, n, d); | ||
14 | return 0; | ||
15 | } | ||
diff --git a/nacl/crypto_stream/aes128ctr/core2/xor_afternm.s b/nacl/crypto_stream/aes128ctr/core2/xor_afternm.s new file mode 100644 index 00000000..022691a2 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/core2/xor_afternm.s | |||
@@ -0,0 +1,12407 @@ | |||
1 | # Author: Emilia Käsper and Peter Schwabe | ||
2 | # Date: 2009-03-19 | ||
3 | # +2010.01.31: minor namespace modifications | ||
4 | # Public domain | ||
5 | |||
6 | .data | ||
7 | .p2align 6 | ||
8 | |||
9 | RCON: .int 0x00000000, 0x00000000, 0x00000000, 0xffffffff | ||
10 | ROTB: .int 0x0c000000, 0x00000000, 0x04000000, 0x08000000 | ||
11 | EXPB0: .int 0x03030303, 0x07070707, 0x0b0b0b0b, 0x0f0f0f0f | ||
12 | CTRINC1: .int 0x00000001, 0x00000000, 0x00000000, 0x00000000 | ||
13 | CTRINC2: .int 0x00000002, 0x00000000, 0x00000000, 0x00000000 | ||
14 | CTRINC3: .int 0x00000003, 0x00000000, 0x00000000, 0x00000000 | ||
15 | CTRINC4: .int 0x00000004, 0x00000000, 0x00000000, 0x00000000 | ||
16 | CTRINC5: .int 0x00000005, 0x00000000, 0x00000000, 0x00000000 | ||
17 | CTRINC6: .int 0x00000006, 0x00000000, 0x00000000, 0x00000000 | ||
18 | CTRINC7: .int 0x00000007, 0x00000000, 0x00000000, 0x00000000 | ||
19 | RCTRINC1: .int 0x00000000, 0x00000000, 0x00000000, 0x00000001 | ||
20 | RCTRINC2: .int 0x00000000, 0x00000000, 0x00000000, 0x00000002 | ||
21 | RCTRINC3: .int 0x00000000, 0x00000000, 0x00000000, 0x00000003 | ||
22 | RCTRINC4: .int 0x00000000, 0x00000000, 0x00000000, 0x00000004 | ||
23 | RCTRINC5: .int 0x00000000, 0x00000000, 0x00000000, 0x00000005 | ||
24 | RCTRINC6: .int 0x00000000, 0x00000000, 0x00000000, 0x00000006 | ||
25 | RCTRINC7: .int 0x00000000, 0x00000000, 0x00000000, 0x00000007 | ||
26 | |||
27 | SWAP32: .int 0x00010203, 0x04050607, 0x08090a0b, 0x0c0d0e0f | ||
28 | M0SWAP: .quad 0x0105090d0004080c , 0x03070b0f02060a0e | ||
29 | |||
30 | BS0: .quad 0x5555555555555555, 0x5555555555555555 | ||
31 | BS1: .quad 0x3333333333333333, 0x3333333333333333 | ||
32 | BS2: .quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f | ||
33 | ONE: .quad 0xffffffffffffffff, 0xffffffffffffffff | ||
34 | M0: .quad 0x02060a0e03070b0f, 0x0004080c0105090d | ||
35 | SRM0: .quad 0x0304090e00050a0f, 0x01060b0c0207080d | ||
36 | SR: .quad 0x0504070600030201, 0x0f0e0d0c0a09080b | ||
37 | |||
38 | # qhasm: int64 outp | ||
39 | |||
40 | # qhasm: int64 inp | ||
41 | |||
42 | # qhasm: int64 len | ||
43 | |||
44 | # qhasm: int64 np | ||
45 | |||
46 | # qhasm: int64 c | ||
47 | |||
48 | # qhasm: input outp | ||
49 | |||
50 | # qhasm: input inp | ||
51 | |||
52 | # qhasm: input len | ||
53 | |||
54 | # qhasm: input np | ||
55 | |||
56 | # qhasm: input c | ||
57 | |||
58 | # qhasm: int64 lensav | ||
59 | |||
60 | # qhasm: int64 tmp | ||
61 | |||
62 | # qhasm: int6464 xmm0 | ||
63 | |||
64 | # qhasm: int6464 xmm1 | ||
65 | |||
66 | # qhasm: int6464 xmm2 | ||
67 | |||
68 | # qhasm: int6464 xmm3 | ||
69 | |||
70 | # qhasm: int6464 xmm4 | ||
71 | |||
72 | # qhasm: int6464 xmm5 | ||
73 | |||
74 | # qhasm: int6464 xmm6 | ||
75 | |||
76 | # qhasm: int6464 xmm7 | ||
77 | |||
78 | # qhasm: int6464 xmm8 | ||
79 | |||
80 | # qhasm: int6464 xmm9 | ||
81 | |||
82 | # qhasm: int6464 xmm10 | ||
83 | |||
84 | # qhasm: int6464 xmm11 | ||
85 | |||
86 | # qhasm: int6464 xmm12 | ||
87 | |||
88 | # qhasm: int6464 xmm13 | ||
89 | |||
90 | # qhasm: int6464 xmm14 | ||
91 | |||
92 | # qhasm: int6464 xmm15 | ||
93 | |||
94 | # qhasm: int6464 t | ||
95 | |||
96 | # qhasm: stack1024 bl | ||
97 | |||
98 | # qhasm: stack128 nonce_stack | ||
99 | |||
100 | # qhasm: int64 blp | ||
101 | |||
102 | # qhasm: int64 b | ||
103 | |||
104 | # qhasm: enter crypto_stream_aes128ctr_core2_xor_afternm | ||
105 | .text | ||
106 | .p2align 5 | ||
107 | .globl _crypto_stream_aes128ctr_core2_xor_afternm | ||
108 | .globl crypto_stream_aes128ctr_core2_xor_afternm | ||
109 | _crypto_stream_aes128ctr_core2_xor_afternm: | ||
110 | crypto_stream_aes128ctr_core2_xor_afternm: | ||
111 | mov %rsp,%r11 | ||
112 | and $31,%r11 | ||
113 | add $160,%r11 | ||
114 | sub %r11,%rsp | ||
115 | |||
116 | # qhasm: xmm0 = *(int128 *) (np + 0) | ||
117 | # asm 1: movdqa 0(<np=int64#4),>xmm0=int6464#1 | ||
118 | # asm 2: movdqa 0(<np=%rcx),>xmm0=%xmm0 | ||
119 | movdqa 0(%rcx),%xmm0 | ||
120 | |||
121 | # qhasm: nonce_stack = xmm0 | ||
122 | # asm 1: movdqa <xmm0=int6464#1,>nonce_stack=stack128#1 | ||
123 | # asm 2: movdqa <xmm0=%xmm0,>nonce_stack=0(%rsp) | ||
124 | movdqa %xmm0,0(%rsp) | ||
125 | |||
126 | # qhasm: np = &nonce_stack | ||
127 | # asm 1: leaq <nonce_stack=stack128#1,>np=int64#4 | ||
128 | # asm 2: leaq <nonce_stack=0(%rsp),>np=%rcx | ||
129 | leaq 0(%rsp),%rcx | ||
130 | |||
131 | # qhasm: enc_block: | ||
132 | ._enc_block: | ||
133 | |||
134 | # qhasm: xmm0 = *(int128 *) (np + 0) | ||
135 | # asm 1: movdqa 0(<np=int64#4),>xmm0=int6464#1 | ||
136 | # asm 2: movdqa 0(<np=%rcx),>xmm0=%xmm0 | ||
137 | movdqa 0(%rcx),%xmm0 | ||
138 | |||
139 | # qhasm: xmm1 = xmm0 | ||
140 | # asm 1: movdqa <xmm0=int6464#1,>xmm1=int6464#2 | ||
141 | # asm 2: movdqa <xmm0=%xmm0,>xmm1=%xmm1 | ||
142 | movdqa %xmm0,%xmm1 | ||
143 | |||
144 | # qhasm: shuffle bytes of xmm1 by SWAP32 | ||
145 | # asm 1: pshufb SWAP32,<xmm1=int6464#2 | ||
146 | # asm 2: pshufb SWAP32,<xmm1=%xmm1 | ||
147 | pshufb SWAP32,%xmm1 | ||
148 | |||
149 | # qhasm: xmm2 = xmm1 | ||
150 | # asm 1: movdqa <xmm1=int6464#2,>xmm2=int6464#3 | ||
151 | # asm 2: movdqa <xmm1=%xmm1,>xmm2=%xmm2 | ||
152 | movdqa %xmm1,%xmm2 | ||
153 | |||
154 | # qhasm: xmm3 = xmm1 | ||
155 | # asm 1: movdqa <xmm1=int6464#2,>xmm3=int6464#4 | ||
156 | # asm 2: movdqa <xmm1=%xmm1,>xmm3=%xmm3 | ||
157 | movdqa %xmm1,%xmm3 | ||
158 | |||
159 | # qhasm: xmm4 = xmm1 | ||
160 | # asm 1: movdqa <xmm1=int6464#2,>xmm4=int6464#5 | ||
161 | # asm 2: movdqa <xmm1=%xmm1,>xmm4=%xmm4 | ||
162 | movdqa %xmm1,%xmm4 | ||
163 | |||
164 | # qhasm: xmm5 = xmm1 | ||
165 | # asm 1: movdqa <xmm1=int6464#2,>xmm5=int6464#6 | ||
166 | # asm 2: movdqa <xmm1=%xmm1,>xmm5=%xmm5 | ||
167 | movdqa %xmm1,%xmm5 | ||
168 | |||
169 | # qhasm: xmm6 = xmm1 | ||
170 | # asm 1: movdqa <xmm1=int6464#2,>xmm6=int6464#7 | ||
171 | # asm 2: movdqa <xmm1=%xmm1,>xmm6=%xmm6 | ||
172 | movdqa %xmm1,%xmm6 | ||
173 | |||
174 | # qhasm: xmm7 = xmm1 | ||
175 | # asm 1: movdqa <xmm1=int6464#2,>xmm7=int6464#8 | ||
176 | # asm 2: movdqa <xmm1=%xmm1,>xmm7=%xmm7 | ||
177 | movdqa %xmm1,%xmm7 | ||
178 | |||
179 | # qhasm: int32323232 xmm1 += RCTRINC1 | ||
180 | # asm 1: paddd RCTRINC1,<xmm1=int6464#2 | ||
181 | # asm 2: paddd RCTRINC1,<xmm1=%xmm1 | ||
182 | paddd RCTRINC1,%xmm1 | ||
183 | |||
184 | # qhasm: int32323232 xmm2 += RCTRINC2 | ||
185 | # asm 1: paddd RCTRINC2,<xmm2=int6464#3 | ||
186 | # asm 2: paddd RCTRINC2,<xmm2=%xmm2 | ||
187 | paddd RCTRINC2,%xmm2 | ||
188 | |||
189 | # qhasm: int32323232 xmm3 += RCTRINC3 | ||
190 | # asm 1: paddd RCTRINC3,<xmm3=int6464#4 | ||
191 | # asm 2: paddd RCTRINC3,<xmm3=%xmm3 | ||
192 | paddd RCTRINC3,%xmm3 | ||
193 | |||
194 | # qhasm: int32323232 xmm4 += RCTRINC4 | ||
195 | # asm 1: paddd RCTRINC4,<xmm4=int6464#5 | ||
196 | # asm 2: paddd RCTRINC4,<xmm4=%xmm4 | ||
197 | paddd RCTRINC4,%xmm4 | ||
198 | |||
199 | # qhasm: int32323232 xmm5 += RCTRINC5 | ||
200 | # asm 1: paddd RCTRINC5,<xmm5=int6464#6 | ||
201 | # asm 2: paddd RCTRINC5,<xmm5=%xmm5 | ||
202 | paddd RCTRINC5,%xmm5 | ||
203 | |||
204 | # qhasm: int32323232 xmm6 += RCTRINC6 | ||
205 | # asm 1: paddd RCTRINC6,<xmm6=int6464#7 | ||
206 | # asm 2: paddd RCTRINC6,<xmm6=%xmm6 | ||
207 | paddd RCTRINC6,%xmm6 | ||
208 | |||
209 | # qhasm: int32323232 xmm7 += RCTRINC7 | ||
210 | # asm 1: paddd RCTRINC7,<xmm7=int6464#8 | ||
211 | # asm 2: paddd RCTRINC7,<xmm7=%xmm7 | ||
212 | paddd RCTRINC7,%xmm7 | ||
213 | |||
214 | # qhasm: shuffle bytes of xmm0 by M0 | ||
215 | # asm 1: pshufb M0,<xmm0=int6464#1 | ||
216 | # asm 2: pshufb M0,<xmm0=%xmm0 | ||
217 | pshufb M0,%xmm0 | ||
218 | |||
219 | # qhasm: shuffle bytes of xmm1 by M0SWAP | ||
220 | # asm 1: pshufb M0SWAP,<xmm1=int6464#2 | ||
221 | # asm 2: pshufb M0SWAP,<xmm1=%xmm1 | ||
222 | pshufb M0SWAP,%xmm1 | ||
223 | |||
224 | # qhasm: shuffle bytes of xmm2 by M0SWAP | ||
225 | # asm 1: pshufb M0SWAP,<xmm2=int6464#3 | ||
226 | # asm 2: pshufb M0SWAP,<xmm2=%xmm2 | ||
227 | pshufb M0SWAP,%xmm2 | ||
228 | |||
229 | # qhasm: shuffle bytes of xmm3 by M0SWAP | ||
230 | # asm 1: pshufb M0SWAP,<xmm3=int6464#4 | ||
231 | # asm 2: pshufb M0SWAP,<xmm3=%xmm3 | ||
232 | pshufb M0SWAP,%xmm3 | ||
233 | |||
234 | # qhasm: shuffle bytes of xmm4 by M0SWAP | ||
235 | # asm 1: pshufb M0SWAP,<xmm4=int6464#5 | ||
236 | # asm 2: pshufb M0SWAP,<xmm4=%xmm4 | ||
237 | pshufb M0SWAP,%xmm4 | ||
238 | |||
239 | # qhasm: shuffle bytes of xmm5 by M0SWAP | ||
240 | # asm 1: pshufb M0SWAP,<xmm5=int6464#6 | ||
241 | # asm 2: pshufb M0SWAP,<xmm5=%xmm5 | ||
242 | pshufb M0SWAP,%xmm5 | ||
243 | |||
244 | # qhasm: shuffle bytes of xmm6 by M0SWAP | ||
245 | # asm 1: pshufb M0SWAP,<xmm6=int6464#7 | ||
246 | # asm 2: pshufb M0SWAP,<xmm6=%xmm6 | ||
247 | pshufb M0SWAP,%xmm6 | ||
248 | |||
249 | # qhasm: shuffle bytes of xmm7 by M0SWAP | ||
250 | # asm 1: pshufb M0SWAP,<xmm7=int6464#8 | ||
251 | # asm 2: pshufb M0SWAP,<xmm7=%xmm7 | ||
252 | pshufb M0SWAP,%xmm7 | ||
253 | |||
254 | # qhasm: xmm8 = xmm6 | ||
255 | # asm 1: movdqa <xmm6=int6464#7,>xmm8=int6464#9 | ||
256 | # asm 2: movdqa <xmm6=%xmm6,>xmm8=%xmm8 | ||
257 | movdqa %xmm6,%xmm8 | ||
258 | |||
259 | # qhasm: uint6464 xmm8 >>= 1 | ||
260 | # asm 1: psrlq $1,<xmm8=int6464#9 | ||
261 | # asm 2: psrlq $1,<xmm8=%xmm8 | ||
262 | psrlq $1,%xmm8 | ||
263 | |||
264 | # qhasm: xmm8 ^= xmm7 | ||
265 | # asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9 | ||
266 | # asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8 | ||
267 | pxor %xmm7,%xmm8 | ||
268 | |||
269 | # qhasm: xmm8 &= BS0 | ||
270 | # asm 1: pand BS0,<xmm8=int6464#9 | ||
271 | # asm 2: pand BS0,<xmm8=%xmm8 | ||
272 | pand BS0,%xmm8 | ||
273 | |||
274 | # qhasm: xmm7 ^= xmm8 | ||
275 | # asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8 | ||
276 | # asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7 | ||
277 | pxor %xmm8,%xmm7 | ||
278 | |||
279 | # qhasm: uint6464 xmm8 <<= 1 | ||
280 | # asm 1: psllq $1,<xmm8=int6464#9 | ||
281 | # asm 2: psllq $1,<xmm8=%xmm8 | ||
282 | psllq $1,%xmm8 | ||
283 | |||
284 | # qhasm: xmm6 ^= xmm8 | ||
285 | # asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7 | ||
286 | # asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6 | ||
287 | pxor %xmm8,%xmm6 | ||
288 | |||
289 | # qhasm: xmm8 = xmm4 | ||
290 | # asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9 | ||
291 | # asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8 | ||
292 | movdqa %xmm4,%xmm8 | ||
293 | |||
294 | # qhasm: uint6464 xmm8 >>= 1 | ||
295 | # asm 1: psrlq $1,<xmm8=int6464#9 | ||
296 | # asm 2: psrlq $1,<xmm8=%xmm8 | ||
297 | psrlq $1,%xmm8 | ||
298 | |||
299 | # qhasm: xmm8 ^= xmm5 | ||
300 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
301 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
302 | pxor %xmm5,%xmm8 | ||
303 | |||
304 | # qhasm: xmm8 &= BS0 | ||
305 | # asm 1: pand BS0,<xmm8=int6464#9 | ||
306 | # asm 2: pand BS0,<xmm8=%xmm8 | ||
307 | pand BS0,%xmm8 | ||
308 | |||
309 | # qhasm: xmm5 ^= xmm8 | ||
310 | # asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6 | ||
311 | # asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5 | ||
312 | pxor %xmm8,%xmm5 | ||
313 | |||
314 | # qhasm: uint6464 xmm8 <<= 1 | ||
315 | # asm 1: psllq $1,<xmm8=int6464#9 | ||
316 | # asm 2: psllq $1,<xmm8=%xmm8 | ||
317 | psllq $1,%xmm8 | ||
318 | |||
319 | # qhasm: xmm4 ^= xmm8 | ||
320 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
321 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
322 | pxor %xmm8,%xmm4 | ||
323 | |||
324 | # qhasm: xmm8 = xmm2 | ||
325 | # asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9 | ||
326 | # asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8 | ||
327 | movdqa %xmm2,%xmm8 | ||
328 | |||
329 | # qhasm: uint6464 xmm8 >>= 1 | ||
330 | # asm 1: psrlq $1,<xmm8=int6464#9 | ||
331 | # asm 2: psrlq $1,<xmm8=%xmm8 | ||
332 | psrlq $1,%xmm8 | ||
333 | |||
334 | # qhasm: xmm8 ^= xmm3 | ||
335 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#9 | ||
336 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm8 | ||
337 | pxor %xmm3,%xmm8 | ||
338 | |||
339 | # qhasm: xmm8 &= BS0 | ||
340 | # asm 1: pand BS0,<xmm8=int6464#9 | ||
341 | # asm 2: pand BS0,<xmm8=%xmm8 | ||
342 | pand BS0,%xmm8 | ||
343 | |||
344 | # qhasm: xmm3 ^= xmm8 | ||
345 | # asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4 | ||
346 | # asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3 | ||
347 | pxor %xmm8,%xmm3 | ||
348 | |||
349 | # qhasm: uint6464 xmm8 <<= 1 | ||
350 | # asm 1: psllq $1,<xmm8=int6464#9 | ||
351 | # asm 2: psllq $1,<xmm8=%xmm8 | ||
352 | psllq $1,%xmm8 | ||
353 | |||
354 | # qhasm: xmm2 ^= xmm8 | ||
355 | # asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3 | ||
356 | # asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2 | ||
357 | pxor %xmm8,%xmm2 | ||
358 | |||
359 | # qhasm: xmm8 = xmm0 | ||
360 | # asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9 | ||
361 | # asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8 | ||
362 | movdqa %xmm0,%xmm8 | ||
363 | |||
364 | # qhasm: uint6464 xmm8 >>= 1 | ||
365 | # asm 1: psrlq $1,<xmm8=int6464#9 | ||
366 | # asm 2: psrlq $1,<xmm8=%xmm8 | ||
367 | psrlq $1,%xmm8 | ||
368 | |||
369 | # qhasm: xmm8 ^= xmm1 | ||
370 | # asm 1: pxor <xmm1=int6464#2,<xmm8=int6464#9 | ||
371 | # asm 2: pxor <xmm1=%xmm1,<xmm8=%xmm8 | ||
372 | pxor %xmm1,%xmm8 | ||
373 | |||
374 | # qhasm: xmm8 &= BS0 | ||
375 | # asm 1: pand BS0,<xmm8=int6464#9 | ||
376 | # asm 2: pand BS0,<xmm8=%xmm8 | ||
377 | pand BS0,%xmm8 | ||
378 | |||
379 | # qhasm: xmm1 ^= xmm8 | ||
380 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
381 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
382 | pxor %xmm8,%xmm1 | ||
383 | |||
384 | # qhasm: uint6464 xmm8 <<= 1 | ||
385 | # asm 1: psllq $1,<xmm8=int6464#9 | ||
386 | # asm 2: psllq $1,<xmm8=%xmm8 | ||
387 | psllq $1,%xmm8 | ||
388 | |||
389 | # qhasm: xmm0 ^= xmm8 | ||
390 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
391 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
392 | pxor %xmm8,%xmm0 | ||
393 | |||
394 | # qhasm: xmm8 = xmm5 | ||
395 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#9 | ||
396 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm8 | ||
397 | movdqa %xmm5,%xmm8 | ||
398 | |||
399 | # qhasm: uint6464 xmm8 >>= 2 | ||
400 | # asm 1: psrlq $2,<xmm8=int6464#9 | ||
401 | # asm 2: psrlq $2,<xmm8=%xmm8 | ||
402 | psrlq $2,%xmm8 | ||
403 | |||
404 | # qhasm: xmm8 ^= xmm7 | ||
405 | # asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9 | ||
406 | # asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8 | ||
407 | pxor %xmm7,%xmm8 | ||
408 | |||
409 | # qhasm: xmm8 &= BS1 | ||
410 | # asm 1: pand BS1,<xmm8=int6464#9 | ||
411 | # asm 2: pand BS1,<xmm8=%xmm8 | ||
412 | pand BS1,%xmm8 | ||
413 | |||
414 | # qhasm: xmm7 ^= xmm8 | ||
415 | # asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8 | ||
416 | # asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7 | ||
417 | pxor %xmm8,%xmm7 | ||
418 | |||
419 | # qhasm: uint6464 xmm8 <<= 2 | ||
420 | # asm 1: psllq $2,<xmm8=int6464#9 | ||
421 | # asm 2: psllq $2,<xmm8=%xmm8 | ||
422 | psllq $2,%xmm8 | ||
423 | |||
424 | # qhasm: xmm5 ^= xmm8 | ||
425 | # asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6 | ||
426 | # asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5 | ||
427 | pxor %xmm8,%xmm5 | ||
428 | |||
429 | # qhasm: xmm8 = xmm4 | ||
430 | # asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9 | ||
431 | # asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8 | ||
432 | movdqa %xmm4,%xmm8 | ||
433 | |||
434 | # qhasm: uint6464 xmm8 >>= 2 | ||
435 | # asm 1: psrlq $2,<xmm8=int6464#9 | ||
436 | # asm 2: psrlq $2,<xmm8=%xmm8 | ||
437 | psrlq $2,%xmm8 | ||
438 | |||
439 | # qhasm: xmm8 ^= xmm6 | ||
440 | # asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#9 | ||
441 | # asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm8 | ||
442 | pxor %xmm6,%xmm8 | ||
443 | |||
444 | # qhasm: xmm8 &= BS1 | ||
445 | # asm 1: pand BS1,<xmm8=int6464#9 | ||
446 | # asm 2: pand BS1,<xmm8=%xmm8 | ||
447 | pand BS1,%xmm8 | ||
448 | |||
449 | # qhasm: xmm6 ^= xmm8 | ||
450 | # asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7 | ||
451 | # asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6 | ||
452 | pxor %xmm8,%xmm6 | ||
453 | |||
454 | # qhasm: uint6464 xmm8 <<= 2 | ||
455 | # asm 1: psllq $2,<xmm8=int6464#9 | ||
456 | # asm 2: psllq $2,<xmm8=%xmm8 | ||
457 | psllq $2,%xmm8 | ||
458 | |||
459 | # qhasm: xmm4 ^= xmm8 | ||
460 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
461 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
462 | pxor %xmm8,%xmm4 | ||
463 | |||
464 | # qhasm: xmm8 = xmm1 | ||
465 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9 | ||
466 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8 | ||
467 | movdqa %xmm1,%xmm8 | ||
468 | |||
469 | # qhasm: uint6464 xmm8 >>= 2 | ||
470 | # asm 1: psrlq $2,<xmm8=int6464#9 | ||
471 | # asm 2: psrlq $2,<xmm8=%xmm8 | ||
472 | psrlq $2,%xmm8 | ||
473 | |||
474 | # qhasm: xmm8 ^= xmm3 | ||
475 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#9 | ||
476 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm8 | ||
477 | pxor %xmm3,%xmm8 | ||
478 | |||
479 | # qhasm: xmm8 &= BS1 | ||
480 | # asm 1: pand BS1,<xmm8=int6464#9 | ||
481 | # asm 2: pand BS1,<xmm8=%xmm8 | ||
482 | pand BS1,%xmm8 | ||
483 | |||
484 | # qhasm: xmm3 ^= xmm8 | ||
485 | # asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4 | ||
486 | # asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3 | ||
487 | pxor %xmm8,%xmm3 | ||
488 | |||
489 | # qhasm: uint6464 xmm8 <<= 2 | ||
490 | # asm 1: psllq $2,<xmm8=int6464#9 | ||
491 | # asm 2: psllq $2,<xmm8=%xmm8 | ||
492 | psllq $2,%xmm8 | ||
493 | |||
494 | # qhasm: xmm1 ^= xmm8 | ||
495 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
496 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
497 | pxor %xmm8,%xmm1 | ||
498 | |||
499 | # qhasm: xmm8 = xmm0 | ||
500 | # asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9 | ||
501 | # asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8 | ||
502 | movdqa %xmm0,%xmm8 | ||
503 | |||
504 | # qhasm: uint6464 xmm8 >>= 2 | ||
505 | # asm 1: psrlq $2,<xmm8=int6464#9 | ||
506 | # asm 2: psrlq $2,<xmm8=%xmm8 | ||
507 | psrlq $2,%xmm8 | ||
508 | |||
509 | # qhasm: xmm8 ^= xmm2 | ||
510 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#9 | ||
511 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm8 | ||
512 | pxor %xmm2,%xmm8 | ||
513 | |||
514 | # qhasm: xmm8 &= BS1 | ||
515 | # asm 1: pand BS1,<xmm8=int6464#9 | ||
516 | # asm 2: pand BS1,<xmm8=%xmm8 | ||
517 | pand BS1,%xmm8 | ||
518 | |||
519 | # qhasm: xmm2 ^= xmm8 | ||
520 | # asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3 | ||
521 | # asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2 | ||
522 | pxor %xmm8,%xmm2 | ||
523 | |||
524 | # qhasm: uint6464 xmm8 <<= 2 | ||
525 | # asm 1: psllq $2,<xmm8=int6464#9 | ||
526 | # asm 2: psllq $2,<xmm8=%xmm8 | ||
527 | psllq $2,%xmm8 | ||
528 | |||
529 | # qhasm: xmm0 ^= xmm8 | ||
530 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
531 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
532 | pxor %xmm8,%xmm0 | ||
533 | |||
534 | # qhasm: xmm8 = xmm3 | ||
535 | # asm 1: movdqa <xmm3=int6464#4,>xmm8=int6464#9 | ||
536 | # asm 2: movdqa <xmm3=%xmm3,>xmm8=%xmm8 | ||
537 | movdqa %xmm3,%xmm8 | ||
538 | |||
539 | # qhasm: uint6464 xmm8 >>= 4 | ||
540 | # asm 1: psrlq $4,<xmm8=int6464#9 | ||
541 | # asm 2: psrlq $4,<xmm8=%xmm8 | ||
542 | psrlq $4,%xmm8 | ||
543 | |||
544 | # qhasm: xmm8 ^= xmm7 | ||
545 | # asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9 | ||
546 | # asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8 | ||
547 | pxor %xmm7,%xmm8 | ||
548 | |||
549 | # qhasm: xmm8 &= BS2 | ||
550 | # asm 1: pand BS2,<xmm8=int6464#9 | ||
551 | # asm 2: pand BS2,<xmm8=%xmm8 | ||
552 | pand BS2,%xmm8 | ||
553 | |||
554 | # qhasm: xmm7 ^= xmm8 | ||
555 | # asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8 | ||
556 | # asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7 | ||
557 | pxor %xmm8,%xmm7 | ||
558 | |||
559 | # qhasm: uint6464 xmm8 <<= 4 | ||
560 | # asm 1: psllq $4,<xmm8=int6464#9 | ||
561 | # asm 2: psllq $4,<xmm8=%xmm8 | ||
562 | psllq $4,%xmm8 | ||
563 | |||
564 | # qhasm: xmm3 ^= xmm8 | ||
565 | # asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4 | ||
566 | # asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3 | ||
567 | pxor %xmm8,%xmm3 | ||
568 | |||
569 | # qhasm: xmm8 = xmm2 | ||
570 | # asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9 | ||
571 | # asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8 | ||
572 | movdqa %xmm2,%xmm8 | ||
573 | |||
574 | # qhasm: uint6464 xmm8 >>= 4 | ||
575 | # asm 1: psrlq $4,<xmm8=int6464#9 | ||
576 | # asm 2: psrlq $4,<xmm8=%xmm8 | ||
577 | psrlq $4,%xmm8 | ||
578 | |||
579 | # qhasm: xmm8 ^= xmm6 | ||
580 | # asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#9 | ||
581 | # asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm8 | ||
582 | pxor %xmm6,%xmm8 | ||
583 | |||
584 | # qhasm: xmm8 &= BS2 | ||
585 | # asm 1: pand BS2,<xmm8=int6464#9 | ||
586 | # asm 2: pand BS2,<xmm8=%xmm8 | ||
587 | pand BS2,%xmm8 | ||
588 | |||
589 | # qhasm: xmm6 ^= xmm8 | ||
590 | # asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7 | ||
591 | # asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6 | ||
592 | pxor %xmm8,%xmm6 | ||
593 | |||
594 | # qhasm: uint6464 xmm8 <<= 4 | ||
595 | # asm 1: psllq $4,<xmm8=int6464#9 | ||
596 | # asm 2: psllq $4,<xmm8=%xmm8 | ||
597 | psllq $4,%xmm8 | ||
598 | |||
599 | # qhasm: xmm2 ^= xmm8 | ||
600 | # asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3 | ||
601 | # asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2 | ||
602 | pxor %xmm8,%xmm2 | ||
603 | |||
604 | # qhasm: xmm8 = xmm1 | ||
605 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9 | ||
606 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8 | ||
607 | movdqa %xmm1,%xmm8 | ||
608 | |||
609 | # qhasm: uint6464 xmm8 >>= 4 | ||
610 | # asm 1: psrlq $4,<xmm8=int6464#9 | ||
611 | # asm 2: psrlq $4,<xmm8=%xmm8 | ||
612 | psrlq $4,%xmm8 | ||
613 | |||
614 | # qhasm: xmm8 ^= xmm5 | ||
615 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
616 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
617 | pxor %xmm5,%xmm8 | ||
618 | |||
619 | # qhasm: xmm8 &= BS2 | ||
620 | # asm 1: pand BS2,<xmm8=int6464#9 | ||
621 | # asm 2: pand BS2,<xmm8=%xmm8 | ||
622 | pand BS2,%xmm8 | ||
623 | |||
624 | # qhasm: xmm5 ^= xmm8 | ||
625 | # asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6 | ||
626 | # asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5 | ||
627 | pxor %xmm8,%xmm5 | ||
628 | |||
629 | # qhasm: uint6464 xmm8 <<= 4 | ||
630 | # asm 1: psllq $4,<xmm8=int6464#9 | ||
631 | # asm 2: psllq $4,<xmm8=%xmm8 | ||
632 | psllq $4,%xmm8 | ||
633 | |||
634 | # qhasm: xmm1 ^= xmm8 | ||
635 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
636 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
637 | pxor %xmm8,%xmm1 | ||
638 | |||
639 | # qhasm: xmm8 = xmm0 | ||
640 | # asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9 | ||
641 | # asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8 | ||
642 | movdqa %xmm0,%xmm8 | ||
643 | |||
644 | # qhasm: uint6464 xmm8 >>= 4 | ||
645 | # asm 1: psrlq $4,<xmm8=int6464#9 | ||
646 | # asm 2: psrlq $4,<xmm8=%xmm8 | ||
647 | psrlq $4,%xmm8 | ||
648 | |||
649 | # qhasm: xmm8 ^= xmm4 | ||
650 | # asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#9 | ||
651 | # asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm8 | ||
652 | pxor %xmm4,%xmm8 | ||
653 | |||
654 | # qhasm: xmm8 &= BS2 | ||
655 | # asm 1: pand BS2,<xmm8=int6464#9 | ||
656 | # asm 2: pand BS2,<xmm8=%xmm8 | ||
657 | pand BS2,%xmm8 | ||
658 | |||
659 | # qhasm: xmm4 ^= xmm8 | ||
660 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
661 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
662 | pxor %xmm8,%xmm4 | ||
663 | |||
664 | # qhasm: uint6464 xmm8 <<= 4 | ||
665 | # asm 1: psllq $4,<xmm8=int6464#9 | ||
666 | # asm 2: psllq $4,<xmm8=%xmm8 | ||
667 | psllq $4,%xmm8 | ||
668 | |||
669 | # qhasm: xmm0 ^= xmm8 | ||
670 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
671 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
672 | pxor %xmm8,%xmm0 | ||
673 | |||
674 | # qhasm: xmm0 ^= *(int128 *)(c + 0) | ||
675 | # asm 1: pxor 0(<c=int64#5),<xmm0=int6464#1 | ||
676 | # asm 2: pxor 0(<c=%r8),<xmm0=%xmm0 | ||
677 | pxor 0(%r8),%xmm0 | ||
678 | |||
679 | # qhasm: shuffle bytes of xmm0 by SR | ||
680 | # asm 1: pshufb SR,<xmm0=int6464#1 | ||
681 | # asm 2: pshufb SR,<xmm0=%xmm0 | ||
682 | pshufb SR,%xmm0 | ||
683 | |||
684 | # qhasm: xmm1 ^= *(int128 *)(c + 16) | ||
685 | # asm 1: pxor 16(<c=int64#5),<xmm1=int6464#2 | ||
686 | # asm 2: pxor 16(<c=%r8),<xmm1=%xmm1 | ||
687 | pxor 16(%r8),%xmm1 | ||
688 | |||
689 | # qhasm: shuffle bytes of xmm1 by SR | ||
690 | # asm 1: pshufb SR,<xmm1=int6464#2 | ||
691 | # asm 2: pshufb SR,<xmm1=%xmm1 | ||
692 | pshufb SR,%xmm1 | ||
693 | |||
694 | # qhasm: xmm2 ^= *(int128 *)(c + 32) | ||
695 | # asm 1: pxor 32(<c=int64#5),<xmm2=int6464#3 | ||
696 | # asm 2: pxor 32(<c=%r8),<xmm2=%xmm2 | ||
697 | pxor 32(%r8),%xmm2 | ||
698 | |||
699 | # qhasm: shuffle bytes of xmm2 by SR | ||
700 | # asm 1: pshufb SR,<xmm2=int6464#3 | ||
701 | # asm 2: pshufb SR,<xmm2=%xmm2 | ||
702 | pshufb SR,%xmm2 | ||
703 | |||
704 | # qhasm: xmm3 ^= *(int128 *)(c + 48) | ||
705 | # asm 1: pxor 48(<c=int64#5),<xmm3=int6464#4 | ||
706 | # asm 2: pxor 48(<c=%r8),<xmm3=%xmm3 | ||
707 | pxor 48(%r8),%xmm3 | ||
708 | |||
709 | # qhasm: shuffle bytes of xmm3 by SR | ||
710 | # asm 1: pshufb SR,<xmm3=int6464#4 | ||
711 | # asm 2: pshufb SR,<xmm3=%xmm3 | ||
712 | pshufb SR,%xmm3 | ||
713 | |||
714 | # qhasm: xmm4 ^= *(int128 *)(c + 64) | ||
715 | # asm 1: pxor 64(<c=int64#5),<xmm4=int6464#5 | ||
716 | # asm 2: pxor 64(<c=%r8),<xmm4=%xmm4 | ||
717 | pxor 64(%r8),%xmm4 | ||
718 | |||
719 | # qhasm: shuffle bytes of xmm4 by SR | ||
720 | # asm 1: pshufb SR,<xmm4=int6464#5 | ||
721 | # asm 2: pshufb SR,<xmm4=%xmm4 | ||
722 | pshufb SR,%xmm4 | ||
723 | |||
724 | # qhasm: xmm5 ^= *(int128 *)(c + 80) | ||
725 | # asm 1: pxor 80(<c=int64#5),<xmm5=int6464#6 | ||
726 | # asm 2: pxor 80(<c=%r8),<xmm5=%xmm5 | ||
727 | pxor 80(%r8),%xmm5 | ||
728 | |||
729 | # qhasm: shuffle bytes of xmm5 by SR | ||
730 | # asm 1: pshufb SR,<xmm5=int6464#6 | ||
731 | # asm 2: pshufb SR,<xmm5=%xmm5 | ||
732 | pshufb SR,%xmm5 | ||
733 | |||
734 | # qhasm: xmm6 ^= *(int128 *)(c + 96) | ||
735 | # asm 1: pxor 96(<c=int64#5),<xmm6=int6464#7 | ||
736 | # asm 2: pxor 96(<c=%r8),<xmm6=%xmm6 | ||
737 | pxor 96(%r8),%xmm6 | ||
738 | |||
739 | # qhasm: shuffle bytes of xmm6 by SR | ||
740 | # asm 1: pshufb SR,<xmm6=int6464#7 | ||
741 | # asm 2: pshufb SR,<xmm6=%xmm6 | ||
742 | pshufb SR,%xmm6 | ||
743 | |||
744 | # qhasm: xmm7 ^= *(int128 *)(c + 112) | ||
745 | # asm 1: pxor 112(<c=int64#5),<xmm7=int6464#8 | ||
746 | # asm 2: pxor 112(<c=%r8),<xmm7=%xmm7 | ||
747 | pxor 112(%r8),%xmm7 | ||
748 | |||
749 | # qhasm: shuffle bytes of xmm7 by SR | ||
750 | # asm 1: pshufb SR,<xmm7=int6464#8 | ||
751 | # asm 2: pshufb SR,<xmm7=%xmm7 | ||
752 | pshufb SR,%xmm7 | ||
753 | |||
754 | # qhasm: xmm5 ^= xmm6 | ||
755 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
756 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
757 | pxor %xmm6,%xmm5 | ||
758 | |||
759 | # qhasm: xmm2 ^= xmm1 | ||
760 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
761 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
762 | pxor %xmm1,%xmm2 | ||
763 | |||
764 | # qhasm: xmm5 ^= xmm0 | ||
765 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
766 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
767 | pxor %xmm0,%xmm5 | ||
768 | |||
769 | # qhasm: xmm6 ^= xmm2 | ||
770 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
771 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
772 | pxor %xmm2,%xmm6 | ||
773 | |||
774 | # qhasm: xmm3 ^= xmm0 | ||
775 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
776 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
777 | pxor %xmm0,%xmm3 | ||
778 | |||
779 | # qhasm: xmm6 ^= xmm3 | ||
780 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
781 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
782 | pxor %xmm3,%xmm6 | ||
783 | |||
784 | # qhasm: xmm3 ^= xmm7 | ||
785 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
786 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
787 | pxor %xmm7,%xmm3 | ||
788 | |||
789 | # qhasm: xmm3 ^= xmm4 | ||
790 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
791 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
792 | pxor %xmm4,%xmm3 | ||
793 | |||
794 | # qhasm: xmm7 ^= xmm5 | ||
795 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
796 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
797 | pxor %xmm5,%xmm7 | ||
798 | |||
799 | # qhasm: xmm3 ^= xmm1 | ||
800 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
801 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
802 | pxor %xmm1,%xmm3 | ||
803 | |||
804 | # qhasm: xmm4 ^= xmm5 | ||
805 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
806 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
807 | pxor %xmm5,%xmm4 | ||
808 | |||
809 | # qhasm: xmm2 ^= xmm7 | ||
810 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
811 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
812 | pxor %xmm7,%xmm2 | ||
813 | |||
814 | # qhasm: xmm1 ^= xmm5 | ||
815 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
816 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
817 | pxor %xmm5,%xmm1 | ||
818 | |||
819 | # qhasm: xmm11 = xmm7 | ||
820 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
821 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
822 | movdqa %xmm7,%xmm8 | ||
823 | |||
824 | # qhasm: xmm10 = xmm1 | ||
825 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
826 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
827 | movdqa %xmm1,%xmm9 | ||
828 | |||
829 | # qhasm: xmm9 = xmm5 | ||
830 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
831 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
832 | movdqa %xmm5,%xmm10 | ||
833 | |||
834 | # qhasm: xmm13 = xmm2 | ||
835 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
836 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
837 | movdqa %xmm2,%xmm11 | ||
838 | |||
839 | # qhasm: xmm12 = xmm6 | ||
840 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
841 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
842 | movdqa %xmm6,%xmm12 | ||
843 | |||
844 | # qhasm: xmm11 ^= xmm4 | ||
845 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
846 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
847 | pxor %xmm4,%xmm8 | ||
848 | |||
849 | # qhasm: xmm10 ^= xmm2 | ||
850 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
851 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
852 | pxor %xmm2,%xmm9 | ||
853 | |||
854 | # qhasm: xmm9 ^= xmm3 | ||
855 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
856 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
857 | pxor %xmm3,%xmm10 | ||
858 | |||
859 | # qhasm: xmm13 ^= xmm4 | ||
860 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
861 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
862 | pxor %xmm4,%xmm11 | ||
863 | |||
864 | # qhasm: xmm12 ^= xmm0 | ||
865 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
866 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
867 | pxor %xmm0,%xmm12 | ||
868 | |||
869 | # qhasm: xmm14 = xmm11 | ||
870 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
871 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
872 | movdqa %xmm8,%xmm13 | ||
873 | |||
874 | # qhasm: xmm8 = xmm10 | ||
875 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
876 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
877 | movdqa %xmm9,%xmm14 | ||
878 | |||
879 | # qhasm: xmm15 = xmm11 | ||
880 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
881 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
882 | movdqa %xmm8,%xmm15 | ||
883 | |||
884 | # qhasm: xmm10 |= xmm9 | ||
885 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
886 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
887 | por %xmm10,%xmm9 | ||
888 | |||
889 | # qhasm: xmm11 |= xmm12 | ||
890 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
891 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
892 | por %xmm12,%xmm8 | ||
893 | |||
894 | # qhasm: xmm15 ^= xmm8 | ||
895 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
896 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
897 | pxor %xmm14,%xmm15 | ||
898 | |||
899 | # qhasm: xmm14 &= xmm12 | ||
900 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
901 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
902 | pand %xmm12,%xmm13 | ||
903 | |||
904 | # qhasm: xmm8 &= xmm9 | ||
905 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
906 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
907 | pand %xmm10,%xmm14 | ||
908 | |||
909 | # qhasm: xmm12 ^= xmm9 | ||
910 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
911 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
912 | pxor %xmm10,%xmm12 | ||
913 | |||
914 | # qhasm: xmm15 &= xmm12 | ||
915 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
916 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
917 | pand %xmm12,%xmm15 | ||
918 | |||
919 | # qhasm: xmm12 = xmm3 | ||
920 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
921 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
922 | movdqa %xmm3,%xmm10 | ||
923 | |||
924 | # qhasm: xmm12 ^= xmm0 | ||
925 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
926 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
927 | pxor %xmm0,%xmm10 | ||
928 | |||
929 | # qhasm: xmm13 &= xmm12 | ||
930 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
931 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
932 | pand %xmm10,%xmm11 | ||
933 | |||
934 | # qhasm: xmm11 ^= xmm13 | ||
935 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
936 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
937 | pxor %xmm11,%xmm8 | ||
938 | |||
939 | # qhasm: xmm10 ^= xmm13 | ||
940 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
941 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
942 | pxor %xmm11,%xmm9 | ||
943 | |||
944 | # qhasm: xmm13 = xmm7 | ||
945 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
946 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
947 | movdqa %xmm7,%xmm10 | ||
948 | |||
949 | # qhasm: xmm13 ^= xmm1 | ||
950 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
951 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
952 | pxor %xmm1,%xmm10 | ||
953 | |||
954 | # qhasm: xmm12 = xmm5 | ||
955 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
956 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
957 | movdqa %xmm5,%xmm11 | ||
958 | |||
959 | # qhasm: xmm9 = xmm13 | ||
960 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
961 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
962 | movdqa %xmm10,%xmm12 | ||
963 | |||
964 | # qhasm: xmm12 ^= xmm6 | ||
965 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
966 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
967 | pxor %xmm6,%xmm11 | ||
968 | |||
969 | # qhasm: xmm9 |= xmm12 | ||
970 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
971 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
972 | por %xmm11,%xmm12 | ||
973 | |||
974 | # qhasm: xmm13 &= xmm12 | ||
975 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
976 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
977 | pand %xmm11,%xmm10 | ||
978 | |||
979 | # qhasm: xmm8 ^= xmm13 | ||
980 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
981 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
982 | pxor %xmm10,%xmm14 | ||
983 | |||
984 | # qhasm: xmm11 ^= xmm15 | ||
985 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
986 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
987 | pxor %xmm15,%xmm8 | ||
988 | |||
989 | # qhasm: xmm10 ^= xmm14 | ||
990 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
991 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
992 | pxor %xmm13,%xmm9 | ||
993 | |||
994 | # qhasm: xmm9 ^= xmm15 | ||
995 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
996 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
997 | pxor %xmm15,%xmm12 | ||
998 | |||
999 | # qhasm: xmm8 ^= xmm14 | ||
1000 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
1001 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
1002 | pxor %xmm13,%xmm14 | ||
1003 | |||
1004 | # qhasm: xmm9 ^= xmm14 | ||
1005 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
1006 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
1007 | pxor %xmm13,%xmm12 | ||
1008 | |||
1009 | # qhasm: xmm12 = xmm2 | ||
1010 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
1011 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
1012 | movdqa %xmm2,%xmm10 | ||
1013 | |||
1014 | # qhasm: xmm13 = xmm4 | ||
1015 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
1016 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
1017 | movdqa %xmm4,%xmm11 | ||
1018 | |||
1019 | # qhasm: xmm14 = xmm1 | ||
1020 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
1021 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
1022 | movdqa %xmm1,%xmm13 | ||
1023 | |||
1024 | # qhasm: xmm15 = xmm7 | ||
1025 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
1026 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
1027 | movdqa %xmm7,%xmm15 | ||
1028 | |||
1029 | # qhasm: xmm12 &= xmm3 | ||
1030 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
1031 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
1032 | pand %xmm3,%xmm10 | ||
1033 | |||
1034 | # qhasm: xmm13 &= xmm0 | ||
1035 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
1036 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
1037 | pand %xmm0,%xmm11 | ||
1038 | |||
1039 | # qhasm: xmm14 &= xmm5 | ||
1040 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
1041 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
1042 | pand %xmm5,%xmm13 | ||
1043 | |||
1044 | # qhasm: xmm15 |= xmm6 | ||
1045 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
1046 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
1047 | por %xmm6,%xmm15 | ||
1048 | |||
1049 | # qhasm: xmm11 ^= xmm12 | ||
1050 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
1051 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
1052 | pxor %xmm10,%xmm8 | ||
1053 | |||
1054 | # qhasm: xmm10 ^= xmm13 | ||
1055 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
1056 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
1057 | pxor %xmm11,%xmm9 | ||
1058 | |||
1059 | # qhasm: xmm9 ^= xmm14 | ||
1060 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
1061 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
1062 | pxor %xmm13,%xmm12 | ||
1063 | |||
1064 | # qhasm: xmm8 ^= xmm15 | ||
1065 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
1066 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
1067 | pxor %xmm15,%xmm14 | ||
1068 | |||
1069 | # qhasm: xmm12 = xmm11 | ||
1070 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
1071 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
1072 | movdqa %xmm8,%xmm10 | ||
1073 | |||
1074 | # qhasm: xmm12 ^= xmm10 | ||
1075 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
1076 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
1077 | pxor %xmm9,%xmm10 | ||
1078 | |||
1079 | # qhasm: xmm11 &= xmm9 | ||
1080 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
1081 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
1082 | pand %xmm12,%xmm8 | ||
1083 | |||
1084 | # qhasm: xmm14 = xmm8 | ||
1085 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
1086 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
1087 | movdqa %xmm14,%xmm11 | ||
1088 | |||
1089 | # qhasm: xmm14 ^= xmm11 | ||
1090 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
1091 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
1092 | pxor %xmm8,%xmm11 | ||
1093 | |||
1094 | # qhasm: xmm15 = xmm12 | ||
1095 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
1096 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
1097 | movdqa %xmm10,%xmm13 | ||
1098 | |||
1099 | # qhasm: xmm15 &= xmm14 | ||
1100 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
1101 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
1102 | pand %xmm11,%xmm13 | ||
1103 | |||
1104 | # qhasm: xmm15 ^= xmm10 | ||
1105 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
1106 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
1107 | pxor %xmm9,%xmm13 | ||
1108 | |||
1109 | # qhasm: xmm13 = xmm9 | ||
1110 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
1111 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
1112 | movdqa %xmm12,%xmm15 | ||
1113 | |||
1114 | # qhasm: xmm13 ^= xmm8 | ||
1115 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
1116 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
1117 | pxor %xmm14,%xmm15 | ||
1118 | |||
1119 | # qhasm: xmm11 ^= xmm10 | ||
1120 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
1121 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
1122 | pxor %xmm9,%xmm8 | ||
1123 | |||
1124 | # qhasm: xmm13 &= xmm11 | ||
1125 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
1126 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
1127 | pand %xmm8,%xmm15 | ||
1128 | |||
1129 | # qhasm: xmm13 ^= xmm8 | ||
1130 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
1131 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
1132 | pxor %xmm14,%xmm15 | ||
1133 | |||
1134 | # qhasm: xmm9 ^= xmm13 | ||
1135 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
1136 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
1137 | pxor %xmm15,%xmm12 | ||
1138 | |||
1139 | # qhasm: xmm10 = xmm14 | ||
1140 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
1141 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
1142 | movdqa %xmm11,%xmm8 | ||
1143 | |||
1144 | # qhasm: xmm10 ^= xmm13 | ||
1145 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
1146 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
1147 | pxor %xmm15,%xmm8 | ||
1148 | |||
1149 | # qhasm: xmm10 &= xmm8 | ||
1150 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
1151 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
1152 | pand %xmm14,%xmm8 | ||
1153 | |||
1154 | # qhasm: xmm9 ^= xmm10 | ||
1155 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
1156 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
1157 | pxor %xmm8,%xmm12 | ||
1158 | |||
1159 | # qhasm: xmm14 ^= xmm10 | ||
1160 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
1161 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
1162 | pxor %xmm8,%xmm11 | ||
1163 | |||
1164 | # qhasm: xmm14 &= xmm15 | ||
1165 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
1166 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
1167 | pand %xmm13,%xmm11 | ||
1168 | |||
1169 | # qhasm: xmm14 ^= xmm12 | ||
1170 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
1171 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
1172 | pxor %xmm10,%xmm11 | ||
1173 | |||
1174 | # qhasm: xmm12 = xmm6 | ||
1175 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
1176 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
1177 | movdqa %xmm6,%xmm8 | ||
1178 | |||
1179 | # qhasm: xmm8 = xmm5 | ||
1180 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
1181 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
1182 | movdqa %xmm5,%xmm9 | ||
1183 | |||
1184 | # qhasm: xmm10 = xmm15 | ||
1185 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
1186 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
1187 | movdqa %xmm13,%xmm10 | ||
1188 | |||
1189 | # qhasm: xmm10 ^= xmm14 | ||
1190 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
1191 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
1192 | pxor %xmm11,%xmm10 | ||
1193 | |||
1194 | # qhasm: xmm10 &= xmm6 | ||
1195 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
1196 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
1197 | pand %xmm6,%xmm10 | ||
1198 | |||
1199 | # qhasm: xmm6 ^= xmm5 | ||
1200 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
1201 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
1202 | pxor %xmm5,%xmm6 | ||
1203 | |||
1204 | # qhasm: xmm6 &= xmm14 | ||
1205 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
1206 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
1207 | pand %xmm11,%xmm6 | ||
1208 | |||
1209 | # qhasm: xmm5 &= xmm15 | ||
1210 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
1211 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
1212 | pand %xmm13,%xmm5 | ||
1213 | |||
1214 | # qhasm: xmm6 ^= xmm5 | ||
1215 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
1216 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
1217 | pxor %xmm5,%xmm6 | ||
1218 | |||
1219 | # qhasm: xmm5 ^= xmm10 | ||
1220 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
1221 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
1222 | pxor %xmm10,%xmm5 | ||
1223 | |||
1224 | # qhasm: xmm12 ^= xmm0 | ||
1225 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
1226 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
1227 | pxor %xmm0,%xmm8 | ||
1228 | |||
1229 | # qhasm: xmm8 ^= xmm3 | ||
1230 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
1231 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
1232 | pxor %xmm3,%xmm9 | ||
1233 | |||
1234 | # qhasm: xmm15 ^= xmm13 | ||
1235 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
1236 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
1237 | pxor %xmm15,%xmm13 | ||
1238 | |||
1239 | # qhasm: xmm14 ^= xmm9 | ||
1240 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
1241 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
1242 | pxor %xmm12,%xmm11 | ||
1243 | |||
1244 | # qhasm: xmm11 = xmm15 | ||
1245 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
1246 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
1247 | movdqa %xmm13,%xmm10 | ||
1248 | |||
1249 | # qhasm: xmm11 ^= xmm14 | ||
1250 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
1251 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
1252 | pxor %xmm11,%xmm10 | ||
1253 | |||
1254 | # qhasm: xmm11 &= xmm12 | ||
1255 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
1256 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
1257 | pand %xmm8,%xmm10 | ||
1258 | |||
1259 | # qhasm: xmm12 ^= xmm8 | ||
1260 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
1261 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
1262 | pxor %xmm9,%xmm8 | ||
1263 | |||
1264 | # qhasm: xmm12 &= xmm14 | ||
1265 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
1266 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
1267 | pand %xmm11,%xmm8 | ||
1268 | |||
1269 | # qhasm: xmm8 &= xmm15 | ||
1270 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
1271 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
1272 | pand %xmm13,%xmm9 | ||
1273 | |||
1274 | # qhasm: xmm8 ^= xmm12 | ||
1275 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
1276 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
1277 | pxor %xmm8,%xmm9 | ||
1278 | |||
1279 | # qhasm: xmm12 ^= xmm11 | ||
1280 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
1281 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
1282 | pxor %xmm10,%xmm8 | ||
1283 | |||
1284 | # qhasm: xmm10 = xmm13 | ||
1285 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
1286 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
1287 | movdqa %xmm15,%xmm10 | ||
1288 | |||
1289 | # qhasm: xmm10 ^= xmm9 | ||
1290 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
1291 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
1292 | pxor %xmm12,%xmm10 | ||
1293 | |||
1294 | # qhasm: xmm10 &= xmm0 | ||
1295 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
1296 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
1297 | pand %xmm0,%xmm10 | ||
1298 | |||
1299 | # qhasm: xmm0 ^= xmm3 | ||
1300 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
1301 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
1302 | pxor %xmm3,%xmm0 | ||
1303 | |||
1304 | # qhasm: xmm0 &= xmm9 | ||
1305 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
1306 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
1307 | pand %xmm12,%xmm0 | ||
1308 | |||
1309 | # qhasm: xmm3 &= xmm13 | ||
1310 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
1311 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
1312 | pand %xmm15,%xmm3 | ||
1313 | |||
1314 | # qhasm: xmm0 ^= xmm3 | ||
1315 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
1316 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
1317 | pxor %xmm3,%xmm0 | ||
1318 | |||
1319 | # qhasm: xmm3 ^= xmm10 | ||
1320 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
1321 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
1322 | pxor %xmm10,%xmm3 | ||
1323 | |||
1324 | # qhasm: xmm6 ^= xmm12 | ||
1325 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
1326 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
1327 | pxor %xmm8,%xmm6 | ||
1328 | |||
1329 | # qhasm: xmm0 ^= xmm12 | ||
1330 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
1331 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
1332 | pxor %xmm8,%xmm0 | ||
1333 | |||
1334 | # qhasm: xmm5 ^= xmm8 | ||
1335 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
1336 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
1337 | pxor %xmm9,%xmm5 | ||
1338 | |||
1339 | # qhasm: xmm3 ^= xmm8 | ||
1340 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
1341 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
1342 | pxor %xmm9,%xmm3 | ||
1343 | |||
1344 | # qhasm: xmm12 = xmm7 | ||
1345 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
1346 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
1347 | movdqa %xmm7,%xmm8 | ||
1348 | |||
1349 | # qhasm: xmm8 = xmm1 | ||
1350 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
1351 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
1352 | movdqa %xmm1,%xmm9 | ||
1353 | |||
1354 | # qhasm: xmm12 ^= xmm4 | ||
1355 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
1356 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
1357 | pxor %xmm4,%xmm8 | ||
1358 | |||
1359 | # qhasm: xmm8 ^= xmm2 | ||
1360 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
1361 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
1362 | pxor %xmm2,%xmm9 | ||
1363 | |||
1364 | # qhasm: xmm11 = xmm15 | ||
1365 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
1366 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
1367 | movdqa %xmm13,%xmm10 | ||
1368 | |||
1369 | # qhasm: xmm11 ^= xmm14 | ||
1370 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
1371 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
1372 | pxor %xmm11,%xmm10 | ||
1373 | |||
1374 | # qhasm: xmm11 &= xmm12 | ||
1375 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
1376 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
1377 | pand %xmm8,%xmm10 | ||
1378 | |||
1379 | # qhasm: xmm12 ^= xmm8 | ||
1380 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
1381 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
1382 | pxor %xmm9,%xmm8 | ||
1383 | |||
1384 | # qhasm: xmm12 &= xmm14 | ||
1385 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
1386 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
1387 | pand %xmm11,%xmm8 | ||
1388 | |||
1389 | # qhasm: xmm8 &= xmm15 | ||
1390 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
1391 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
1392 | pand %xmm13,%xmm9 | ||
1393 | |||
1394 | # qhasm: xmm8 ^= xmm12 | ||
1395 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
1396 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
1397 | pxor %xmm8,%xmm9 | ||
1398 | |||
1399 | # qhasm: xmm12 ^= xmm11 | ||
1400 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
1401 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
1402 | pxor %xmm10,%xmm8 | ||
1403 | |||
1404 | # qhasm: xmm10 = xmm13 | ||
1405 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
1406 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
1407 | movdqa %xmm15,%xmm10 | ||
1408 | |||
1409 | # qhasm: xmm10 ^= xmm9 | ||
1410 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
1411 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
1412 | pxor %xmm12,%xmm10 | ||
1413 | |||
1414 | # qhasm: xmm10 &= xmm4 | ||
1415 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
1416 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
1417 | pand %xmm4,%xmm10 | ||
1418 | |||
1419 | # qhasm: xmm4 ^= xmm2 | ||
1420 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
1421 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
1422 | pxor %xmm2,%xmm4 | ||
1423 | |||
1424 | # qhasm: xmm4 &= xmm9 | ||
1425 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
1426 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
1427 | pand %xmm12,%xmm4 | ||
1428 | |||
1429 | # qhasm: xmm2 &= xmm13 | ||
1430 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
1431 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
1432 | pand %xmm15,%xmm2 | ||
1433 | |||
1434 | # qhasm: xmm4 ^= xmm2 | ||
1435 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
1436 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
1437 | pxor %xmm2,%xmm4 | ||
1438 | |||
1439 | # qhasm: xmm2 ^= xmm10 | ||
1440 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
1441 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
1442 | pxor %xmm10,%xmm2 | ||
1443 | |||
1444 | # qhasm: xmm15 ^= xmm13 | ||
1445 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
1446 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
1447 | pxor %xmm15,%xmm13 | ||
1448 | |||
1449 | # qhasm: xmm14 ^= xmm9 | ||
1450 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
1451 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
1452 | pxor %xmm12,%xmm11 | ||
1453 | |||
1454 | # qhasm: xmm11 = xmm15 | ||
1455 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
1456 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
1457 | movdqa %xmm13,%xmm10 | ||
1458 | |||
1459 | # qhasm: xmm11 ^= xmm14 | ||
1460 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
1461 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
1462 | pxor %xmm11,%xmm10 | ||
1463 | |||
1464 | # qhasm: xmm11 &= xmm7 | ||
1465 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
1466 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
1467 | pand %xmm7,%xmm10 | ||
1468 | |||
1469 | # qhasm: xmm7 ^= xmm1 | ||
1470 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
1471 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
1472 | pxor %xmm1,%xmm7 | ||
1473 | |||
1474 | # qhasm: xmm7 &= xmm14 | ||
1475 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
1476 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
1477 | pand %xmm11,%xmm7 | ||
1478 | |||
1479 | # qhasm: xmm1 &= xmm15 | ||
1480 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
1481 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
1482 | pand %xmm13,%xmm1 | ||
1483 | |||
1484 | # qhasm: xmm7 ^= xmm1 | ||
1485 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
1486 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
1487 | pxor %xmm1,%xmm7 | ||
1488 | |||
1489 | # qhasm: xmm1 ^= xmm11 | ||
1490 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
1491 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
1492 | pxor %xmm10,%xmm1 | ||
1493 | |||
1494 | # qhasm: xmm7 ^= xmm12 | ||
1495 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
1496 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
1497 | pxor %xmm8,%xmm7 | ||
1498 | |||
1499 | # qhasm: xmm4 ^= xmm12 | ||
1500 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
1501 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
1502 | pxor %xmm8,%xmm4 | ||
1503 | |||
1504 | # qhasm: xmm1 ^= xmm8 | ||
1505 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
1506 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
1507 | pxor %xmm9,%xmm1 | ||
1508 | |||
1509 | # qhasm: xmm2 ^= xmm8 | ||
1510 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
1511 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
1512 | pxor %xmm9,%xmm2 | ||
1513 | |||
1514 | # qhasm: xmm7 ^= xmm0 | ||
1515 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
1516 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
1517 | pxor %xmm0,%xmm7 | ||
1518 | |||
1519 | # qhasm: xmm1 ^= xmm6 | ||
1520 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
1521 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
1522 | pxor %xmm6,%xmm1 | ||
1523 | |||
1524 | # qhasm: xmm4 ^= xmm7 | ||
1525 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
1526 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
1527 | pxor %xmm7,%xmm4 | ||
1528 | |||
1529 | # qhasm: xmm6 ^= xmm0 | ||
1530 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
1531 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
1532 | pxor %xmm0,%xmm6 | ||
1533 | |||
1534 | # qhasm: xmm0 ^= xmm1 | ||
1535 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
1536 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
1537 | pxor %xmm1,%xmm0 | ||
1538 | |||
1539 | # qhasm: xmm1 ^= xmm5 | ||
1540 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
1541 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
1542 | pxor %xmm5,%xmm1 | ||
1543 | |||
1544 | # qhasm: xmm5 ^= xmm2 | ||
1545 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
1546 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
1547 | pxor %xmm2,%xmm5 | ||
1548 | |||
1549 | # qhasm: xmm4 ^= xmm5 | ||
1550 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
1551 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
1552 | pxor %xmm5,%xmm4 | ||
1553 | |||
1554 | # qhasm: xmm2 ^= xmm3 | ||
1555 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
1556 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
1557 | pxor %xmm3,%xmm2 | ||
1558 | |||
1559 | # qhasm: xmm3 ^= xmm5 | ||
1560 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
1561 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
1562 | pxor %xmm5,%xmm3 | ||
1563 | |||
1564 | # qhasm: xmm6 ^= xmm3 | ||
1565 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
1566 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
1567 | pxor %xmm3,%xmm6 | ||
1568 | |||
1569 | # qhasm: xmm8 = shuffle dwords of xmm0 by 0x93 | ||
1570 | # asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9 | ||
1571 | # asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8 | ||
1572 | pshufd $0x93,%xmm0,%xmm8 | ||
1573 | |||
1574 | # qhasm: xmm9 = shuffle dwords of xmm1 by 0x93 | ||
1575 | # asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10 | ||
1576 | # asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9 | ||
1577 | pshufd $0x93,%xmm1,%xmm9 | ||
1578 | |||
1579 | # qhasm: xmm10 = shuffle dwords of xmm4 by 0x93 | ||
1580 | # asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11 | ||
1581 | # asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10 | ||
1582 | pshufd $0x93,%xmm4,%xmm10 | ||
1583 | |||
1584 | # qhasm: xmm11 = shuffle dwords of xmm6 by 0x93 | ||
1585 | # asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12 | ||
1586 | # asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11 | ||
1587 | pshufd $0x93,%xmm6,%xmm11 | ||
1588 | |||
1589 | # qhasm: xmm12 = shuffle dwords of xmm3 by 0x93 | ||
1590 | # asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13 | ||
1591 | # asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12 | ||
1592 | pshufd $0x93,%xmm3,%xmm12 | ||
1593 | |||
1594 | # qhasm: xmm13 = shuffle dwords of xmm7 by 0x93 | ||
1595 | # asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14 | ||
1596 | # asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13 | ||
1597 | pshufd $0x93,%xmm7,%xmm13 | ||
1598 | |||
1599 | # qhasm: xmm14 = shuffle dwords of xmm2 by 0x93 | ||
1600 | # asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15 | ||
1601 | # asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14 | ||
1602 | pshufd $0x93,%xmm2,%xmm14 | ||
1603 | |||
1604 | # qhasm: xmm15 = shuffle dwords of xmm5 by 0x93 | ||
1605 | # asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16 | ||
1606 | # asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15 | ||
1607 | pshufd $0x93,%xmm5,%xmm15 | ||
1608 | |||
1609 | # qhasm: xmm0 ^= xmm8 | ||
1610 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
1611 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
1612 | pxor %xmm8,%xmm0 | ||
1613 | |||
1614 | # qhasm: xmm1 ^= xmm9 | ||
1615 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
1616 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
1617 | pxor %xmm9,%xmm1 | ||
1618 | |||
1619 | # qhasm: xmm4 ^= xmm10 | ||
1620 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
1621 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
1622 | pxor %xmm10,%xmm4 | ||
1623 | |||
1624 | # qhasm: xmm6 ^= xmm11 | ||
1625 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
1626 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
1627 | pxor %xmm11,%xmm6 | ||
1628 | |||
1629 | # qhasm: xmm3 ^= xmm12 | ||
1630 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
1631 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
1632 | pxor %xmm12,%xmm3 | ||
1633 | |||
1634 | # qhasm: xmm7 ^= xmm13 | ||
1635 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
1636 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
1637 | pxor %xmm13,%xmm7 | ||
1638 | |||
1639 | # qhasm: xmm2 ^= xmm14 | ||
1640 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
1641 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
1642 | pxor %xmm14,%xmm2 | ||
1643 | |||
1644 | # qhasm: xmm5 ^= xmm15 | ||
1645 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
1646 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
1647 | pxor %xmm15,%xmm5 | ||
1648 | |||
1649 | # qhasm: xmm8 ^= xmm5 | ||
1650 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
1651 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
1652 | pxor %xmm5,%xmm8 | ||
1653 | |||
1654 | # qhasm: xmm9 ^= xmm0 | ||
1655 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
1656 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
1657 | pxor %xmm0,%xmm9 | ||
1658 | |||
1659 | # qhasm: xmm10 ^= xmm1 | ||
1660 | # asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11 | ||
1661 | # asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10 | ||
1662 | pxor %xmm1,%xmm10 | ||
1663 | |||
1664 | # qhasm: xmm9 ^= xmm5 | ||
1665 | # asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10 | ||
1666 | # asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9 | ||
1667 | pxor %xmm5,%xmm9 | ||
1668 | |||
1669 | # qhasm: xmm11 ^= xmm4 | ||
1670 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
1671 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
1672 | pxor %xmm4,%xmm11 | ||
1673 | |||
1674 | # qhasm: xmm12 ^= xmm6 | ||
1675 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13 | ||
1676 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12 | ||
1677 | pxor %xmm6,%xmm12 | ||
1678 | |||
1679 | # qhasm: xmm13 ^= xmm3 | ||
1680 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14 | ||
1681 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13 | ||
1682 | pxor %xmm3,%xmm13 | ||
1683 | |||
1684 | # qhasm: xmm11 ^= xmm5 | ||
1685 | # asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12 | ||
1686 | # asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11 | ||
1687 | pxor %xmm5,%xmm11 | ||
1688 | |||
1689 | # qhasm: xmm14 ^= xmm7 | ||
1690 | # asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15 | ||
1691 | # asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14 | ||
1692 | pxor %xmm7,%xmm14 | ||
1693 | |||
1694 | # qhasm: xmm15 ^= xmm2 | ||
1695 | # asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16 | ||
1696 | # asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15 | ||
1697 | pxor %xmm2,%xmm15 | ||
1698 | |||
1699 | # qhasm: xmm12 ^= xmm5 | ||
1700 | # asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13 | ||
1701 | # asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12 | ||
1702 | pxor %xmm5,%xmm12 | ||
1703 | |||
1704 | # qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E | ||
1705 | # asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1 | ||
1706 | # asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0 | ||
1707 | pshufd $0x4E,%xmm0,%xmm0 | ||
1708 | |||
1709 | # qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E | ||
1710 | # asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2 | ||
1711 | # asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1 | ||
1712 | pshufd $0x4E,%xmm1,%xmm1 | ||
1713 | |||
1714 | # qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E | ||
1715 | # asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5 | ||
1716 | # asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4 | ||
1717 | pshufd $0x4E,%xmm4,%xmm4 | ||
1718 | |||
1719 | # qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E | ||
1720 | # asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7 | ||
1721 | # asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6 | ||
1722 | pshufd $0x4E,%xmm6,%xmm6 | ||
1723 | |||
1724 | # qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E | ||
1725 | # asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4 | ||
1726 | # asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3 | ||
1727 | pshufd $0x4E,%xmm3,%xmm3 | ||
1728 | |||
1729 | # qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E | ||
1730 | # asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8 | ||
1731 | # asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7 | ||
1732 | pshufd $0x4E,%xmm7,%xmm7 | ||
1733 | |||
1734 | # qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E | ||
1735 | # asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3 | ||
1736 | # asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2 | ||
1737 | pshufd $0x4E,%xmm2,%xmm2 | ||
1738 | |||
1739 | # qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E | ||
1740 | # asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6 | ||
1741 | # asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5 | ||
1742 | pshufd $0x4E,%xmm5,%xmm5 | ||
1743 | |||
1744 | # qhasm: xmm8 ^= xmm0 | ||
1745 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
1746 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
1747 | pxor %xmm0,%xmm8 | ||
1748 | |||
1749 | # qhasm: xmm9 ^= xmm1 | ||
1750 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
1751 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
1752 | pxor %xmm1,%xmm9 | ||
1753 | |||
1754 | # qhasm: xmm10 ^= xmm4 | ||
1755 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11 | ||
1756 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10 | ||
1757 | pxor %xmm4,%xmm10 | ||
1758 | |||
1759 | # qhasm: xmm11 ^= xmm6 | ||
1760 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12 | ||
1761 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11 | ||
1762 | pxor %xmm6,%xmm11 | ||
1763 | |||
1764 | # qhasm: xmm12 ^= xmm3 | ||
1765 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13 | ||
1766 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12 | ||
1767 | pxor %xmm3,%xmm12 | ||
1768 | |||
1769 | # qhasm: xmm13 ^= xmm7 | ||
1770 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
1771 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
1772 | pxor %xmm7,%xmm13 | ||
1773 | |||
1774 | # qhasm: xmm14 ^= xmm2 | ||
1775 | # asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15 | ||
1776 | # asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14 | ||
1777 | pxor %xmm2,%xmm14 | ||
1778 | |||
1779 | # qhasm: xmm15 ^= xmm5 | ||
1780 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
1781 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
1782 | pxor %xmm5,%xmm15 | ||
1783 | |||
1784 | # qhasm: xmm8 ^= *(int128 *)(c + 128) | ||
1785 | # asm 1: pxor 128(<c=int64#5),<xmm8=int6464#9 | ||
1786 | # asm 2: pxor 128(<c=%r8),<xmm8=%xmm8 | ||
1787 | pxor 128(%r8),%xmm8 | ||
1788 | |||
1789 | # qhasm: shuffle bytes of xmm8 by SR | ||
1790 | # asm 1: pshufb SR,<xmm8=int6464#9 | ||
1791 | # asm 2: pshufb SR,<xmm8=%xmm8 | ||
1792 | pshufb SR,%xmm8 | ||
1793 | |||
1794 | # qhasm: xmm9 ^= *(int128 *)(c + 144) | ||
1795 | # asm 1: pxor 144(<c=int64#5),<xmm9=int6464#10 | ||
1796 | # asm 2: pxor 144(<c=%r8),<xmm9=%xmm9 | ||
1797 | pxor 144(%r8),%xmm9 | ||
1798 | |||
1799 | # qhasm: shuffle bytes of xmm9 by SR | ||
1800 | # asm 1: pshufb SR,<xmm9=int6464#10 | ||
1801 | # asm 2: pshufb SR,<xmm9=%xmm9 | ||
1802 | pshufb SR,%xmm9 | ||
1803 | |||
1804 | # qhasm: xmm10 ^= *(int128 *)(c + 160) | ||
1805 | # asm 1: pxor 160(<c=int64#5),<xmm10=int6464#11 | ||
1806 | # asm 2: pxor 160(<c=%r8),<xmm10=%xmm10 | ||
1807 | pxor 160(%r8),%xmm10 | ||
1808 | |||
1809 | # qhasm: shuffle bytes of xmm10 by SR | ||
1810 | # asm 1: pshufb SR,<xmm10=int6464#11 | ||
1811 | # asm 2: pshufb SR,<xmm10=%xmm10 | ||
1812 | pshufb SR,%xmm10 | ||
1813 | |||
1814 | # qhasm: xmm11 ^= *(int128 *)(c + 176) | ||
1815 | # asm 1: pxor 176(<c=int64#5),<xmm11=int6464#12 | ||
1816 | # asm 2: pxor 176(<c=%r8),<xmm11=%xmm11 | ||
1817 | pxor 176(%r8),%xmm11 | ||
1818 | |||
1819 | # qhasm: shuffle bytes of xmm11 by SR | ||
1820 | # asm 1: pshufb SR,<xmm11=int6464#12 | ||
1821 | # asm 2: pshufb SR,<xmm11=%xmm11 | ||
1822 | pshufb SR,%xmm11 | ||
1823 | |||
1824 | # qhasm: xmm12 ^= *(int128 *)(c + 192) | ||
1825 | # asm 1: pxor 192(<c=int64#5),<xmm12=int6464#13 | ||
1826 | # asm 2: pxor 192(<c=%r8),<xmm12=%xmm12 | ||
1827 | pxor 192(%r8),%xmm12 | ||
1828 | |||
1829 | # qhasm: shuffle bytes of xmm12 by SR | ||
1830 | # asm 1: pshufb SR,<xmm12=int6464#13 | ||
1831 | # asm 2: pshufb SR,<xmm12=%xmm12 | ||
1832 | pshufb SR,%xmm12 | ||
1833 | |||
1834 | # qhasm: xmm13 ^= *(int128 *)(c + 208) | ||
1835 | # asm 1: pxor 208(<c=int64#5),<xmm13=int6464#14 | ||
1836 | # asm 2: pxor 208(<c=%r8),<xmm13=%xmm13 | ||
1837 | pxor 208(%r8),%xmm13 | ||
1838 | |||
1839 | # qhasm: shuffle bytes of xmm13 by SR | ||
1840 | # asm 1: pshufb SR,<xmm13=int6464#14 | ||
1841 | # asm 2: pshufb SR,<xmm13=%xmm13 | ||
1842 | pshufb SR,%xmm13 | ||
1843 | |||
1844 | # qhasm: xmm14 ^= *(int128 *)(c + 224) | ||
1845 | # asm 1: pxor 224(<c=int64#5),<xmm14=int6464#15 | ||
1846 | # asm 2: pxor 224(<c=%r8),<xmm14=%xmm14 | ||
1847 | pxor 224(%r8),%xmm14 | ||
1848 | |||
1849 | # qhasm: shuffle bytes of xmm14 by SR | ||
1850 | # asm 1: pshufb SR,<xmm14=int6464#15 | ||
1851 | # asm 2: pshufb SR,<xmm14=%xmm14 | ||
1852 | pshufb SR,%xmm14 | ||
1853 | |||
1854 | # qhasm: xmm15 ^= *(int128 *)(c + 240) | ||
1855 | # asm 1: pxor 240(<c=int64#5),<xmm15=int6464#16 | ||
1856 | # asm 2: pxor 240(<c=%r8),<xmm15=%xmm15 | ||
1857 | pxor 240(%r8),%xmm15 | ||
1858 | |||
1859 | # qhasm: shuffle bytes of xmm15 by SR | ||
1860 | # asm 1: pshufb SR,<xmm15=int6464#16 | ||
1861 | # asm 2: pshufb SR,<xmm15=%xmm15 | ||
1862 | pshufb SR,%xmm15 | ||
1863 | |||
1864 | # qhasm: xmm13 ^= xmm14 | ||
1865 | # asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14 | ||
1866 | # asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13 | ||
1867 | pxor %xmm14,%xmm13 | ||
1868 | |||
1869 | # qhasm: xmm10 ^= xmm9 | ||
1870 | # asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11 | ||
1871 | # asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10 | ||
1872 | pxor %xmm9,%xmm10 | ||
1873 | |||
1874 | # qhasm: xmm13 ^= xmm8 | ||
1875 | # asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14 | ||
1876 | # asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13 | ||
1877 | pxor %xmm8,%xmm13 | ||
1878 | |||
1879 | # qhasm: xmm14 ^= xmm10 | ||
1880 | # asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15 | ||
1881 | # asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14 | ||
1882 | pxor %xmm10,%xmm14 | ||
1883 | |||
1884 | # qhasm: xmm11 ^= xmm8 | ||
1885 | # asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12 | ||
1886 | # asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11 | ||
1887 | pxor %xmm8,%xmm11 | ||
1888 | |||
1889 | # qhasm: xmm14 ^= xmm11 | ||
1890 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
1891 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
1892 | pxor %xmm11,%xmm14 | ||
1893 | |||
1894 | # qhasm: xmm11 ^= xmm15 | ||
1895 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12 | ||
1896 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11 | ||
1897 | pxor %xmm15,%xmm11 | ||
1898 | |||
1899 | # qhasm: xmm11 ^= xmm12 | ||
1900 | # asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12 | ||
1901 | # asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11 | ||
1902 | pxor %xmm12,%xmm11 | ||
1903 | |||
1904 | # qhasm: xmm15 ^= xmm13 | ||
1905 | # asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16 | ||
1906 | # asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15 | ||
1907 | pxor %xmm13,%xmm15 | ||
1908 | |||
1909 | # qhasm: xmm11 ^= xmm9 | ||
1910 | # asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12 | ||
1911 | # asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11 | ||
1912 | pxor %xmm9,%xmm11 | ||
1913 | |||
1914 | # qhasm: xmm12 ^= xmm13 | ||
1915 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
1916 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
1917 | pxor %xmm13,%xmm12 | ||
1918 | |||
1919 | # qhasm: xmm10 ^= xmm15 | ||
1920 | # asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11 | ||
1921 | # asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10 | ||
1922 | pxor %xmm15,%xmm10 | ||
1923 | |||
1924 | # qhasm: xmm9 ^= xmm13 | ||
1925 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
1926 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
1927 | pxor %xmm13,%xmm9 | ||
1928 | |||
1929 | # qhasm: xmm3 = xmm15 | ||
1930 | # asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1 | ||
1931 | # asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0 | ||
1932 | movdqa %xmm15,%xmm0 | ||
1933 | |||
1934 | # qhasm: xmm2 = xmm9 | ||
1935 | # asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2 | ||
1936 | # asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1 | ||
1937 | movdqa %xmm9,%xmm1 | ||
1938 | |||
1939 | # qhasm: xmm1 = xmm13 | ||
1940 | # asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3 | ||
1941 | # asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2 | ||
1942 | movdqa %xmm13,%xmm2 | ||
1943 | |||
1944 | # qhasm: xmm5 = xmm10 | ||
1945 | # asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4 | ||
1946 | # asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3 | ||
1947 | movdqa %xmm10,%xmm3 | ||
1948 | |||
1949 | # qhasm: xmm4 = xmm14 | ||
1950 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5 | ||
1951 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4 | ||
1952 | movdqa %xmm14,%xmm4 | ||
1953 | |||
1954 | # qhasm: xmm3 ^= xmm12 | ||
1955 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1 | ||
1956 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0 | ||
1957 | pxor %xmm12,%xmm0 | ||
1958 | |||
1959 | # qhasm: xmm2 ^= xmm10 | ||
1960 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2 | ||
1961 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1 | ||
1962 | pxor %xmm10,%xmm1 | ||
1963 | |||
1964 | # qhasm: xmm1 ^= xmm11 | ||
1965 | # asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3 | ||
1966 | # asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2 | ||
1967 | pxor %xmm11,%xmm2 | ||
1968 | |||
1969 | # qhasm: xmm5 ^= xmm12 | ||
1970 | # asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4 | ||
1971 | # asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3 | ||
1972 | pxor %xmm12,%xmm3 | ||
1973 | |||
1974 | # qhasm: xmm4 ^= xmm8 | ||
1975 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
1976 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
1977 | pxor %xmm8,%xmm4 | ||
1978 | |||
1979 | # qhasm: xmm6 = xmm3 | ||
1980 | # asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6 | ||
1981 | # asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5 | ||
1982 | movdqa %xmm0,%xmm5 | ||
1983 | |||
1984 | # qhasm: xmm0 = xmm2 | ||
1985 | # asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7 | ||
1986 | # asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6 | ||
1987 | movdqa %xmm1,%xmm6 | ||
1988 | |||
1989 | # qhasm: xmm7 = xmm3 | ||
1990 | # asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8 | ||
1991 | # asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7 | ||
1992 | movdqa %xmm0,%xmm7 | ||
1993 | |||
1994 | # qhasm: xmm2 |= xmm1 | ||
1995 | # asm 1: por <xmm1=int6464#3,<xmm2=int6464#2 | ||
1996 | # asm 2: por <xmm1=%xmm2,<xmm2=%xmm1 | ||
1997 | por %xmm2,%xmm1 | ||
1998 | |||
1999 | # qhasm: xmm3 |= xmm4 | ||
2000 | # asm 1: por <xmm4=int6464#5,<xmm3=int6464#1 | ||
2001 | # asm 2: por <xmm4=%xmm4,<xmm3=%xmm0 | ||
2002 | por %xmm4,%xmm0 | ||
2003 | |||
2004 | # qhasm: xmm7 ^= xmm0 | ||
2005 | # asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8 | ||
2006 | # asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7 | ||
2007 | pxor %xmm6,%xmm7 | ||
2008 | |||
2009 | # qhasm: xmm6 &= xmm4 | ||
2010 | # asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6 | ||
2011 | # asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5 | ||
2012 | pand %xmm4,%xmm5 | ||
2013 | |||
2014 | # qhasm: xmm0 &= xmm1 | ||
2015 | # asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7 | ||
2016 | # asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6 | ||
2017 | pand %xmm2,%xmm6 | ||
2018 | |||
2019 | # qhasm: xmm4 ^= xmm1 | ||
2020 | # asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5 | ||
2021 | # asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4 | ||
2022 | pxor %xmm2,%xmm4 | ||
2023 | |||
2024 | # qhasm: xmm7 &= xmm4 | ||
2025 | # asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8 | ||
2026 | # asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7 | ||
2027 | pand %xmm4,%xmm7 | ||
2028 | |||
2029 | # qhasm: xmm4 = xmm11 | ||
2030 | # asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3 | ||
2031 | # asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2 | ||
2032 | movdqa %xmm11,%xmm2 | ||
2033 | |||
2034 | # qhasm: xmm4 ^= xmm8 | ||
2035 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3 | ||
2036 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2 | ||
2037 | pxor %xmm8,%xmm2 | ||
2038 | |||
2039 | # qhasm: xmm5 &= xmm4 | ||
2040 | # asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4 | ||
2041 | # asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3 | ||
2042 | pand %xmm2,%xmm3 | ||
2043 | |||
2044 | # qhasm: xmm3 ^= xmm5 | ||
2045 | # asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1 | ||
2046 | # asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0 | ||
2047 | pxor %xmm3,%xmm0 | ||
2048 | |||
2049 | # qhasm: xmm2 ^= xmm5 | ||
2050 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
2051 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
2052 | pxor %xmm3,%xmm1 | ||
2053 | |||
2054 | # qhasm: xmm5 = xmm15 | ||
2055 | # asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3 | ||
2056 | # asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2 | ||
2057 | movdqa %xmm15,%xmm2 | ||
2058 | |||
2059 | # qhasm: xmm5 ^= xmm9 | ||
2060 | # asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3 | ||
2061 | # asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2 | ||
2062 | pxor %xmm9,%xmm2 | ||
2063 | |||
2064 | # qhasm: xmm4 = xmm13 | ||
2065 | # asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4 | ||
2066 | # asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3 | ||
2067 | movdqa %xmm13,%xmm3 | ||
2068 | |||
2069 | # qhasm: xmm1 = xmm5 | ||
2070 | # asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5 | ||
2071 | # asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4 | ||
2072 | movdqa %xmm2,%xmm4 | ||
2073 | |||
2074 | # qhasm: xmm4 ^= xmm14 | ||
2075 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4 | ||
2076 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3 | ||
2077 | pxor %xmm14,%xmm3 | ||
2078 | |||
2079 | # qhasm: xmm1 |= xmm4 | ||
2080 | # asm 1: por <xmm4=int6464#4,<xmm1=int6464#5 | ||
2081 | # asm 2: por <xmm4=%xmm3,<xmm1=%xmm4 | ||
2082 | por %xmm3,%xmm4 | ||
2083 | |||
2084 | # qhasm: xmm5 &= xmm4 | ||
2085 | # asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3 | ||
2086 | # asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2 | ||
2087 | pand %xmm3,%xmm2 | ||
2088 | |||
2089 | # qhasm: xmm0 ^= xmm5 | ||
2090 | # asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7 | ||
2091 | # asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6 | ||
2092 | pxor %xmm2,%xmm6 | ||
2093 | |||
2094 | # qhasm: xmm3 ^= xmm7 | ||
2095 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1 | ||
2096 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0 | ||
2097 | pxor %xmm7,%xmm0 | ||
2098 | |||
2099 | # qhasm: xmm2 ^= xmm6 | ||
2100 | # asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2 | ||
2101 | # asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1 | ||
2102 | pxor %xmm5,%xmm1 | ||
2103 | |||
2104 | # qhasm: xmm1 ^= xmm7 | ||
2105 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5 | ||
2106 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4 | ||
2107 | pxor %xmm7,%xmm4 | ||
2108 | |||
2109 | # qhasm: xmm0 ^= xmm6 | ||
2110 | # asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7 | ||
2111 | # asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6 | ||
2112 | pxor %xmm5,%xmm6 | ||
2113 | |||
2114 | # qhasm: xmm1 ^= xmm6 | ||
2115 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
2116 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
2117 | pxor %xmm5,%xmm4 | ||
2118 | |||
2119 | # qhasm: xmm4 = xmm10 | ||
2120 | # asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3 | ||
2121 | # asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2 | ||
2122 | movdqa %xmm10,%xmm2 | ||
2123 | |||
2124 | # qhasm: xmm5 = xmm12 | ||
2125 | # asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4 | ||
2126 | # asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3 | ||
2127 | movdqa %xmm12,%xmm3 | ||
2128 | |||
2129 | # qhasm: xmm6 = xmm9 | ||
2130 | # asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6 | ||
2131 | # asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5 | ||
2132 | movdqa %xmm9,%xmm5 | ||
2133 | |||
2134 | # qhasm: xmm7 = xmm15 | ||
2135 | # asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8 | ||
2136 | # asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7 | ||
2137 | movdqa %xmm15,%xmm7 | ||
2138 | |||
2139 | # qhasm: xmm4 &= xmm11 | ||
2140 | # asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3 | ||
2141 | # asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2 | ||
2142 | pand %xmm11,%xmm2 | ||
2143 | |||
2144 | # qhasm: xmm5 &= xmm8 | ||
2145 | # asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4 | ||
2146 | # asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3 | ||
2147 | pand %xmm8,%xmm3 | ||
2148 | |||
2149 | # qhasm: xmm6 &= xmm13 | ||
2150 | # asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6 | ||
2151 | # asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5 | ||
2152 | pand %xmm13,%xmm5 | ||
2153 | |||
2154 | # qhasm: xmm7 |= xmm14 | ||
2155 | # asm 1: por <xmm14=int6464#15,<xmm7=int6464#8 | ||
2156 | # asm 2: por <xmm14=%xmm14,<xmm7=%xmm7 | ||
2157 | por %xmm14,%xmm7 | ||
2158 | |||
2159 | # qhasm: xmm3 ^= xmm4 | ||
2160 | # asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1 | ||
2161 | # asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0 | ||
2162 | pxor %xmm2,%xmm0 | ||
2163 | |||
2164 | # qhasm: xmm2 ^= xmm5 | ||
2165 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
2166 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
2167 | pxor %xmm3,%xmm1 | ||
2168 | |||
2169 | # qhasm: xmm1 ^= xmm6 | ||
2170 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
2171 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
2172 | pxor %xmm5,%xmm4 | ||
2173 | |||
2174 | # qhasm: xmm0 ^= xmm7 | ||
2175 | # asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7 | ||
2176 | # asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6 | ||
2177 | pxor %xmm7,%xmm6 | ||
2178 | |||
2179 | # qhasm: xmm4 = xmm3 | ||
2180 | # asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3 | ||
2181 | # asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2 | ||
2182 | movdqa %xmm0,%xmm2 | ||
2183 | |||
2184 | # qhasm: xmm4 ^= xmm2 | ||
2185 | # asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3 | ||
2186 | # asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2 | ||
2187 | pxor %xmm1,%xmm2 | ||
2188 | |||
2189 | # qhasm: xmm3 &= xmm1 | ||
2190 | # asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1 | ||
2191 | # asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0 | ||
2192 | pand %xmm4,%xmm0 | ||
2193 | |||
2194 | # qhasm: xmm6 = xmm0 | ||
2195 | # asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4 | ||
2196 | # asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3 | ||
2197 | movdqa %xmm6,%xmm3 | ||
2198 | |||
2199 | # qhasm: xmm6 ^= xmm3 | ||
2200 | # asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4 | ||
2201 | # asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3 | ||
2202 | pxor %xmm0,%xmm3 | ||
2203 | |||
2204 | # qhasm: xmm7 = xmm4 | ||
2205 | # asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6 | ||
2206 | # asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5 | ||
2207 | movdqa %xmm2,%xmm5 | ||
2208 | |||
2209 | # qhasm: xmm7 &= xmm6 | ||
2210 | # asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6 | ||
2211 | # asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5 | ||
2212 | pand %xmm3,%xmm5 | ||
2213 | |||
2214 | # qhasm: xmm7 ^= xmm2 | ||
2215 | # asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6 | ||
2216 | # asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5 | ||
2217 | pxor %xmm1,%xmm5 | ||
2218 | |||
2219 | # qhasm: xmm5 = xmm1 | ||
2220 | # asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8 | ||
2221 | # asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7 | ||
2222 | movdqa %xmm4,%xmm7 | ||
2223 | |||
2224 | # qhasm: xmm5 ^= xmm0 | ||
2225 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
2226 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
2227 | pxor %xmm6,%xmm7 | ||
2228 | |||
2229 | # qhasm: xmm3 ^= xmm2 | ||
2230 | # asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1 | ||
2231 | # asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0 | ||
2232 | pxor %xmm1,%xmm0 | ||
2233 | |||
2234 | # qhasm: xmm5 &= xmm3 | ||
2235 | # asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8 | ||
2236 | # asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7 | ||
2237 | pand %xmm0,%xmm7 | ||
2238 | |||
2239 | # qhasm: xmm5 ^= xmm0 | ||
2240 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
2241 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
2242 | pxor %xmm6,%xmm7 | ||
2243 | |||
2244 | # qhasm: xmm1 ^= xmm5 | ||
2245 | # asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5 | ||
2246 | # asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4 | ||
2247 | pxor %xmm7,%xmm4 | ||
2248 | |||
2249 | # qhasm: xmm2 = xmm6 | ||
2250 | # asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1 | ||
2251 | # asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0 | ||
2252 | movdqa %xmm3,%xmm0 | ||
2253 | |||
2254 | # qhasm: xmm2 ^= xmm5 | ||
2255 | # asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1 | ||
2256 | # asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0 | ||
2257 | pxor %xmm7,%xmm0 | ||
2258 | |||
2259 | # qhasm: xmm2 &= xmm0 | ||
2260 | # asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1 | ||
2261 | # asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0 | ||
2262 | pand %xmm6,%xmm0 | ||
2263 | |||
2264 | # qhasm: xmm1 ^= xmm2 | ||
2265 | # asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5 | ||
2266 | # asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4 | ||
2267 | pxor %xmm0,%xmm4 | ||
2268 | |||
2269 | # qhasm: xmm6 ^= xmm2 | ||
2270 | # asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4 | ||
2271 | # asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3 | ||
2272 | pxor %xmm0,%xmm3 | ||
2273 | |||
2274 | # qhasm: xmm6 &= xmm7 | ||
2275 | # asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4 | ||
2276 | # asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3 | ||
2277 | pand %xmm5,%xmm3 | ||
2278 | |||
2279 | # qhasm: xmm6 ^= xmm4 | ||
2280 | # asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4 | ||
2281 | # asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3 | ||
2282 | pxor %xmm2,%xmm3 | ||
2283 | |||
2284 | # qhasm: xmm4 = xmm14 | ||
2285 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1 | ||
2286 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0 | ||
2287 | movdqa %xmm14,%xmm0 | ||
2288 | |||
2289 | # qhasm: xmm0 = xmm13 | ||
2290 | # asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2 | ||
2291 | # asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1 | ||
2292 | movdqa %xmm13,%xmm1 | ||
2293 | |||
2294 | # qhasm: xmm2 = xmm7 | ||
2295 | # asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3 | ||
2296 | # asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2 | ||
2297 | movdqa %xmm5,%xmm2 | ||
2298 | |||
2299 | # qhasm: xmm2 ^= xmm6 | ||
2300 | # asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3 | ||
2301 | # asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2 | ||
2302 | pxor %xmm3,%xmm2 | ||
2303 | |||
2304 | # qhasm: xmm2 &= xmm14 | ||
2305 | # asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3 | ||
2306 | # asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2 | ||
2307 | pand %xmm14,%xmm2 | ||
2308 | |||
2309 | # qhasm: xmm14 ^= xmm13 | ||
2310 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
2311 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
2312 | pxor %xmm13,%xmm14 | ||
2313 | |||
2314 | # qhasm: xmm14 &= xmm6 | ||
2315 | # asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15 | ||
2316 | # asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14 | ||
2317 | pand %xmm3,%xmm14 | ||
2318 | |||
2319 | # qhasm: xmm13 &= xmm7 | ||
2320 | # asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14 | ||
2321 | # asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13 | ||
2322 | pand %xmm5,%xmm13 | ||
2323 | |||
2324 | # qhasm: xmm14 ^= xmm13 | ||
2325 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
2326 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
2327 | pxor %xmm13,%xmm14 | ||
2328 | |||
2329 | # qhasm: xmm13 ^= xmm2 | ||
2330 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14 | ||
2331 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13 | ||
2332 | pxor %xmm2,%xmm13 | ||
2333 | |||
2334 | # qhasm: xmm4 ^= xmm8 | ||
2335 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1 | ||
2336 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0 | ||
2337 | pxor %xmm8,%xmm0 | ||
2338 | |||
2339 | # qhasm: xmm0 ^= xmm11 | ||
2340 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2 | ||
2341 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1 | ||
2342 | pxor %xmm11,%xmm1 | ||
2343 | |||
2344 | # qhasm: xmm7 ^= xmm5 | ||
2345 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
2346 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
2347 | pxor %xmm7,%xmm5 | ||
2348 | |||
2349 | # qhasm: xmm6 ^= xmm1 | ||
2350 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
2351 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
2352 | pxor %xmm4,%xmm3 | ||
2353 | |||
2354 | # qhasm: xmm3 = xmm7 | ||
2355 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
2356 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
2357 | movdqa %xmm5,%xmm2 | ||
2358 | |||
2359 | # qhasm: xmm3 ^= xmm6 | ||
2360 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
2361 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
2362 | pxor %xmm3,%xmm2 | ||
2363 | |||
2364 | # qhasm: xmm3 &= xmm4 | ||
2365 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
2366 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
2367 | pand %xmm0,%xmm2 | ||
2368 | |||
2369 | # qhasm: xmm4 ^= xmm0 | ||
2370 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
2371 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
2372 | pxor %xmm1,%xmm0 | ||
2373 | |||
2374 | # qhasm: xmm4 &= xmm6 | ||
2375 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
2376 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
2377 | pand %xmm3,%xmm0 | ||
2378 | |||
2379 | # qhasm: xmm0 &= xmm7 | ||
2380 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
2381 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
2382 | pand %xmm5,%xmm1 | ||
2383 | |||
2384 | # qhasm: xmm0 ^= xmm4 | ||
2385 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
2386 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
2387 | pxor %xmm0,%xmm1 | ||
2388 | |||
2389 | # qhasm: xmm4 ^= xmm3 | ||
2390 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
2391 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
2392 | pxor %xmm2,%xmm0 | ||
2393 | |||
2394 | # qhasm: xmm2 = xmm5 | ||
2395 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
2396 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
2397 | movdqa %xmm7,%xmm2 | ||
2398 | |||
2399 | # qhasm: xmm2 ^= xmm1 | ||
2400 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
2401 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
2402 | pxor %xmm4,%xmm2 | ||
2403 | |||
2404 | # qhasm: xmm2 &= xmm8 | ||
2405 | # asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3 | ||
2406 | # asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2 | ||
2407 | pand %xmm8,%xmm2 | ||
2408 | |||
2409 | # qhasm: xmm8 ^= xmm11 | ||
2410 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
2411 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
2412 | pxor %xmm11,%xmm8 | ||
2413 | |||
2414 | # qhasm: xmm8 &= xmm1 | ||
2415 | # asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9 | ||
2416 | # asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8 | ||
2417 | pand %xmm4,%xmm8 | ||
2418 | |||
2419 | # qhasm: xmm11 &= xmm5 | ||
2420 | # asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12 | ||
2421 | # asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11 | ||
2422 | pand %xmm7,%xmm11 | ||
2423 | |||
2424 | # qhasm: xmm8 ^= xmm11 | ||
2425 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
2426 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
2427 | pxor %xmm11,%xmm8 | ||
2428 | |||
2429 | # qhasm: xmm11 ^= xmm2 | ||
2430 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12 | ||
2431 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11 | ||
2432 | pxor %xmm2,%xmm11 | ||
2433 | |||
2434 | # qhasm: xmm14 ^= xmm4 | ||
2435 | # asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15 | ||
2436 | # asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14 | ||
2437 | pxor %xmm0,%xmm14 | ||
2438 | |||
2439 | # qhasm: xmm8 ^= xmm4 | ||
2440 | # asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9 | ||
2441 | # asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8 | ||
2442 | pxor %xmm0,%xmm8 | ||
2443 | |||
2444 | # qhasm: xmm13 ^= xmm0 | ||
2445 | # asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14 | ||
2446 | # asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13 | ||
2447 | pxor %xmm1,%xmm13 | ||
2448 | |||
2449 | # qhasm: xmm11 ^= xmm0 | ||
2450 | # asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12 | ||
2451 | # asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11 | ||
2452 | pxor %xmm1,%xmm11 | ||
2453 | |||
2454 | # qhasm: xmm4 = xmm15 | ||
2455 | # asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1 | ||
2456 | # asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0 | ||
2457 | movdqa %xmm15,%xmm0 | ||
2458 | |||
2459 | # qhasm: xmm0 = xmm9 | ||
2460 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2 | ||
2461 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1 | ||
2462 | movdqa %xmm9,%xmm1 | ||
2463 | |||
2464 | # qhasm: xmm4 ^= xmm12 | ||
2465 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1 | ||
2466 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0 | ||
2467 | pxor %xmm12,%xmm0 | ||
2468 | |||
2469 | # qhasm: xmm0 ^= xmm10 | ||
2470 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2 | ||
2471 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1 | ||
2472 | pxor %xmm10,%xmm1 | ||
2473 | |||
2474 | # qhasm: xmm3 = xmm7 | ||
2475 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
2476 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
2477 | movdqa %xmm5,%xmm2 | ||
2478 | |||
2479 | # qhasm: xmm3 ^= xmm6 | ||
2480 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
2481 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
2482 | pxor %xmm3,%xmm2 | ||
2483 | |||
2484 | # qhasm: xmm3 &= xmm4 | ||
2485 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
2486 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
2487 | pand %xmm0,%xmm2 | ||
2488 | |||
2489 | # qhasm: xmm4 ^= xmm0 | ||
2490 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
2491 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
2492 | pxor %xmm1,%xmm0 | ||
2493 | |||
2494 | # qhasm: xmm4 &= xmm6 | ||
2495 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
2496 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
2497 | pand %xmm3,%xmm0 | ||
2498 | |||
2499 | # qhasm: xmm0 &= xmm7 | ||
2500 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
2501 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
2502 | pand %xmm5,%xmm1 | ||
2503 | |||
2504 | # qhasm: xmm0 ^= xmm4 | ||
2505 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
2506 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
2507 | pxor %xmm0,%xmm1 | ||
2508 | |||
2509 | # qhasm: xmm4 ^= xmm3 | ||
2510 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
2511 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
2512 | pxor %xmm2,%xmm0 | ||
2513 | |||
2514 | # qhasm: xmm2 = xmm5 | ||
2515 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
2516 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
2517 | movdqa %xmm7,%xmm2 | ||
2518 | |||
2519 | # qhasm: xmm2 ^= xmm1 | ||
2520 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
2521 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
2522 | pxor %xmm4,%xmm2 | ||
2523 | |||
2524 | # qhasm: xmm2 &= xmm12 | ||
2525 | # asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3 | ||
2526 | # asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2 | ||
2527 | pand %xmm12,%xmm2 | ||
2528 | |||
2529 | # qhasm: xmm12 ^= xmm10 | ||
2530 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
2531 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
2532 | pxor %xmm10,%xmm12 | ||
2533 | |||
2534 | # qhasm: xmm12 &= xmm1 | ||
2535 | # asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13 | ||
2536 | # asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12 | ||
2537 | pand %xmm4,%xmm12 | ||
2538 | |||
2539 | # qhasm: xmm10 &= xmm5 | ||
2540 | # asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11 | ||
2541 | # asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10 | ||
2542 | pand %xmm7,%xmm10 | ||
2543 | |||
2544 | # qhasm: xmm12 ^= xmm10 | ||
2545 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
2546 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
2547 | pxor %xmm10,%xmm12 | ||
2548 | |||
2549 | # qhasm: xmm10 ^= xmm2 | ||
2550 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11 | ||
2551 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10 | ||
2552 | pxor %xmm2,%xmm10 | ||
2553 | |||
2554 | # qhasm: xmm7 ^= xmm5 | ||
2555 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
2556 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
2557 | pxor %xmm7,%xmm5 | ||
2558 | |||
2559 | # qhasm: xmm6 ^= xmm1 | ||
2560 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
2561 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
2562 | pxor %xmm4,%xmm3 | ||
2563 | |||
2564 | # qhasm: xmm3 = xmm7 | ||
2565 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
2566 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
2567 | movdqa %xmm5,%xmm2 | ||
2568 | |||
2569 | # qhasm: xmm3 ^= xmm6 | ||
2570 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
2571 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
2572 | pxor %xmm3,%xmm2 | ||
2573 | |||
2574 | # qhasm: xmm3 &= xmm15 | ||
2575 | # asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3 | ||
2576 | # asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2 | ||
2577 | pand %xmm15,%xmm2 | ||
2578 | |||
2579 | # qhasm: xmm15 ^= xmm9 | ||
2580 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
2581 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
2582 | pxor %xmm9,%xmm15 | ||
2583 | |||
2584 | # qhasm: xmm15 &= xmm6 | ||
2585 | # asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16 | ||
2586 | # asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15 | ||
2587 | pand %xmm3,%xmm15 | ||
2588 | |||
2589 | # qhasm: xmm9 &= xmm7 | ||
2590 | # asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10 | ||
2591 | # asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9 | ||
2592 | pand %xmm5,%xmm9 | ||
2593 | |||
2594 | # qhasm: xmm15 ^= xmm9 | ||
2595 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
2596 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
2597 | pxor %xmm9,%xmm15 | ||
2598 | |||
2599 | # qhasm: xmm9 ^= xmm3 | ||
2600 | # asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10 | ||
2601 | # asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9 | ||
2602 | pxor %xmm2,%xmm9 | ||
2603 | |||
2604 | # qhasm: xmm15 ^= xmm4 | ||
2605 | # asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16 | ||
2606 | # asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15 | ||
2607 | pxor %xmm0,%xmm15 | ||
2608 | |||
2609 | # qhasm: xmm12 ^= xmm4 | ||
2610 | # asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13 | ||
2611 | # asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12 | ||
2612 | pxor %xmm0,%xmm12 | ||
2613 | |||
2614 | # qhasm: xmm9 ^= xmm0 | ||
2615 | # asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10 | ||
2616 | # asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9 | ||
2617 | pxor %xmm1,%xmm9 | ||
2618 | |||
2619 | # qhasm: xmm10 ^= xmm0 | ||
2620 | # asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11 | ||
2621 | # asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10 | ||
2622 | pxor %xmm1,%xmm10 | ||
2623 | |||
2624 | # qhasm: xmm15 ^= xmm8 | ||
2625 | # asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16 | ||
2626 | # asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15 | ||
2627 | pxor %xmm8,%xmm15 | ||
2628 | |||
2629 | # qhasm: xmm9 ^= xmm14 | ||
2630 | # asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10 | ||
2631 | # asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9 | ||
2632 | pxor %xmm14,%xmm9 | ||
2633 | |||
2634 | # qhasm: xmm12 ^= xmm15 | ||
2635 | # asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13 | ||
2636 | # asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12 | ||
2637 | pxor %xmm15,%xmm12 | ||
2638 | |||
2639 | # qhasm: xmm14 ^= xmm8 | ||
2640 | # asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15 | ||
2641 | # asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14 | ||
2642 | pxor %xmm8,%xmm14 | ||
2643 | |||
2644 | # qhasm: xmm8 ^= xmm9 | ||
2645 | # asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9 | ||
2646 | # asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8 | ||
2647 | pxor %xmm9,%xmm8 | ||
2648 | |||
2649 | # qhasm: xmm9 ^= xmm13 | ||
2650 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
2651 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
2652 | pxor %xmm13,%xmm9 | ||
2653 | |||
2654 | # qhasm: xmm13 ^= xmm10 | ||
2655 | # asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14 | ||
2656 | # asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13 | ||
2657 | pxor %xmm10,%xmm13 | ||
2658 | |||
2659 | # qhasm: xmm12 ^= xmm13 | ||
2660 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
2661 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
2662 | pxor %xmm13,%xmm12 | ||
2663 | |||
2664 | # qhasm: xmm10 ^= xmm11 | ||
2665 | # asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11 | ||
2666 | # asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10 | ||
2667 | pxor %xmm11,%xmm10 | ||
2668 | |||
2669 | # qhasm: xmm11 ^= xmm13 | ||
2670 | # asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12 | ||
2671 | # asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11 | ||
2672 | pxor %xmm13,%xmm11 | ||
2673 | |||
2674 | # qhasm: xmm14 ^= xmm11 | ||
2675 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
2676 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
2677 | pxor %xmm11,%xmm14 | ||
2678 | |||
2679 | # qhasm: xmm0 = shuffle dwords of xmm8 by 0x93 | ||
2680 | # asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1 | ||
2681 | # asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0 | ||
2682 | pshufd $0x93,%xmm8,%xmm0 | ||
2683 | |||
2684 | # qhasm: xmm1 = shuffle dwords of xmm9 by 0x93 | ||
2685 | # asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2 | ||
2686 | # asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1 | ||
2687 | pshufd $0x93,%xmm9,%xmm1 | ||
2688 | |||
2689 | # qhasm: xmm2 = shuffle dwords of xmm12 by 0x93 | ||
2690 | # asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3 | ||
2691 | # asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2 | ||
2692 | pshufd $0x93,%xmm12,%xmm2 | ||
2693 | |||
2694 | # qhasm: xmm3 = shuffle dwords of xmm14 by 0x93 | ||
2695 | # asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4 | ||
2696 | # asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3 | ||
2697 | pshufd $0x93,%xmm14,%xmm3 | ||
2698 | |||
2699 | # qhasm: xmm4 = shuffle dwords of xmm11 by 0x93 | ||
2700 | # asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5 | ||
2701 | # asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4 | ||
2702 | pshufd $0x93,%xmm11,%xmm4 | ||
2703 | |||
2704 | # qhasm: xmm5 = shuffle dwords of xmm15 by 0x93 | ||
2705 | # asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6 | ||
2706 | # asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5 | ||
2707 | pshufd $0x93,%xmm15,%xmm5 | ||
2708 | |||
2709 | # qhasm: xmm6 = shuffle dwords of xmm10 by 0x93 | ||
2710 | # asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7 | ||
2711 | # asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6 | ||
2712 | pshufd $0x93,%xmm10,%xmm6 | ||
2713 | |||
2714 | # qhasm: xmm7 = shuffle dwords of xmm13 by 0x93 | ||
2715 | # asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8 | ||
2716 | # asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7 | ||
2717 | pshufd $0x93,%xmm13,%xmm7 | ||
2718 | |||
2719 | # qhasm: xmm8 ^= xmm0 | ||
2720 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
2721 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
2722 | pxor %xmm0,%xmm8 | ||
2723 | |||
2724 | # qhasm: xmm9 ^= xmm1 | ||
2725 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
2726 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
2727 | pxor %xmm1,%xmm9 | ||
2728 | |||
2729 | # qhasm: xmm12 ^= xmm2 | ||
2730 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13 | ||
2731 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12 | ||
2732 | pxor %xmm2,%xmm12 | ||
2733 | |||
2734 | # qhasm: xmm14 ^= xmm3 | ||
2735 | # asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15 | ||
2736 | # asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14 | ||
2737 | pxor %xmm3,%xmm14 | ||
2738 | |||
2739 | # qhasm: xmm11 ^= xmm4 | ||
2740 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
2741 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
2742 | pxor %xmm4,%xmm11 | ||
2743 | |||
2744 | # qhasm: xmm15 ^= xmm5 | ||
2745 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
2746 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
2747 | pxor %xmm5,%xmm15 | ||
2748 | |||
2749 | # qhasm: xmm10 ^= xmm6 | ||
2750 | # asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11 | ||
2751 | # asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10 | ||
2752 | pxor %xmm6,%xmm10 | ||
2753 | |||
2754 | # qhasm: xmm13 ^= xmm7 | ||
2755 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
2756 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
2757 | pxor %xmm7,%xmm13 | ||
2758 | |||
2759 | # qhasm: xmm0 ^= xmm13 | ||
2760 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
2761 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
2762 | pxor %xmm13,%xmm0 | ||
2763 | |||
2764 | # qhasm: xmm1 ^= xmm8 | ||
2765 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
2766 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
2767 | pxor %xmm8,%xmm1 | ||
2768 | |||
2769 | # qhasm: xmm2 ^= xmm9 | ||
2770 | # asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3 | ||
2771 | # asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2 | ||
2772 | pxor %xmm9,%xmm2 | ||
2773 | |||
2774 | # qhasm: xmm1 ^= xmm13 | ||
2775 | # asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2 | ||
2776 | # asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1 | ||
2777 | pxor %xmm13,%xmm1 | ||
2778 | |||
2779 | # qhasm: xmm3 ^= xmm12 | ||
2780 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
2781 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
2782 | pxor %xmm12,%xmm3 | ||
2783 | |||
2784 | # qhasm: xmm4 ^= xmm14 | ||
2785 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
2786 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
2787 | pxor %xmm14,%xmm4 | ||
2788 | |||
2789 | # qhasm: xmm5 ^= xmm11 | ||
2790 | # asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6 | ||
2791 | # asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5 | ||
2792 | pxor %xmm11,%xmm5 | ||
2793 | |||
2794 | # qhasm: xmm3 ^= xmm13 | ||
2795 | # asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4 | ||
2796 | # asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3 | ||
2797 | pxor %xmm13,%xmm3 | ||
2798 | |||
2799 | # qhasm: xmm6 ^= xmm15 | ||
2800 | # asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7 | ||
2801 | # asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6 | ||
2802 | pxor %xmm15,%xmm6 | ||
2803 | |||
2804 | # qhasm: xmm7 ^= xmm10 | ||
2805 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
2806 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
2807 | pxor %xmm10,%xmm7 | ||
2808 | |||
2809 | # qhasm: xmm4 ^= xmm13 | ||
2810 | # asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5 | ||
2811 | # asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4 | ||
2812 | pxor %xmm13,%xmm4 | ||
2813 | |||
2814 | # qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E | ||
2815 | # asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9 | ||
2816 | # asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8 | ||
2817 | pshufd $0x4E,%xmm8,%xmm8 | ||
2818 | |||
2819 | # qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E | ||
2820 | # asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10 | ||
2821 | # asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9 | ||
2822 | pshufd $0x4E,%xmm9,%xmm9 | ||
2823 | |||
2824 | # qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E | ||
2825 | # asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13 | ||
2826 | # asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12 | ||
2827 | pshufd $0x4E,%xmm12,%xmm12 | ||
2828 | |||
2829 | # qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E | ||
2830 | # asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15 | ||
2831 | # asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14 | ||
2832 | pshufd $0x4E,%xmm14,%xmm14 | ||
2833 | |||
2834 | # qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E | ||
2835 | # asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12 | ||
2836 | # asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11 | ||
2837 | pshufd $0x4E,%xmm11,%xmm11 | ||
2838 | |||
2839 | # qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E | ||
2840 | # asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16 | ||
2841 | # asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15 | ||
2842 | pshufd $0x4E,%xmm15,%xmm15 | ||
2843 | |||
2844 | # qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E | ||
2845 | # asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11 | ||
2846 | # asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10 | ||
2847 | pshufd $0x4E,%xmm10,%xmm10 | ||
2848 | |||
2849 | # qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E | ||
2850 | # asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14 | ||
2851 | # asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13 | ||
2852 | pshufd $0x4E,%xmm13,%xmm13 | ||
2853 | |||
2854 | # qhasm: xmm0 ^= xmm8 | ||
2855 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
2856 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
2857 | pxor %xmm8,%xmm0 | ||
2858 | |||
2859 | # qhasm: xmm1 ^= xmm9 | ||
2860 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
2861 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
2862 | pxor %xmm9,%xmm1 | ||
2863 | |||
2864 | # qhasm: xmm2 ^= xmm12 | ||
2865 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
2866 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
2867 | pxor %xmm12,%xmm2 | ||
2868 | |||
2869 | # qhasm: xmm3 ^= xmm14 | ||
2870 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
2871 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
2872 | pxor %xmm14,%xmm3 | ||
2873 | |||
2874 | # qhasm: xmm4 ^= xmm11 | ||
2875 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
2876 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
2877 | pxor %xmm11,%xmm4 | ||
2878 | |||
2879 | # qhasm: xmm5 ^= xmm15 | ||
2880 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
2881 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
2882 | pxor %xmm15,%xmm5 | ||
2883 | |||
2884 | # qhasm: xmm6 ^= xmm10 | ||
2885 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
2886 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
2887 | pxor %xmm10,%xmm6 | ||
2888 | |||
2889 | # qhasm: xmm7 ^= xmm13 | ||
2890 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
2891 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
2892 | pxor %xmm13,%xmm7 | ||
2893 | |||
2894 | # qhasm: xmm0 ^= *(int128 *)(c + 256) | ||
2895 | # asm 1: pxor 256(<c=int64#5),<xmm0=int6464#1 | ||
2896 | # asm 2: pxor 256(<c=%r8),<xmm0=%xmm0 | ||
2897 | pxor 256(%r8),%xmm0 | ||
2898 | |||
2899 | # qhasm: shuffle bytes of xmm0 by SR | ||
2900 | # asm 1: pshufb SR,<xmm0=int6464#1 | ||
2901 | # asm 2: pshufb SR,<xmm0=%xmm0 | ||
2902 | pshufb SR,%xmm0 | ||
2903 | |||
2904 | # qhasm: xmm1 ^= *(int128 *)(c + 272) | ||
2905 | # asm 1: pxor 272(<c=int64#5),<xmm1=int6464#2 | ||
2906 | # asm 2: pxor 272(<c=%r8),<xmm1=%xmm1 | ||
2907 | pxor 272(%r8),%xmm1 | ||
2908 | |||
2909 | # qhasm: shuffle bytes of xmm1 by SR | ||
2910 | # asm 1: pshufb SR,<xmm1=int6464#2 | ||
2911 | # asm 2: pshufb SR,<xmm1=%xmm1 | ||
2912 | pshufb SR,%xmm1 | ||
2913 | |||
2914 | # qhasm: xmm2 ^= *(int128 *)(c + 288) | ||
2915 | # asm 1: pxor 288(<c=int64#5),<xmm2=int6464#3 | ||
2916 | # asm 2: pxor 288(<c=%r8),<xmm2=%xmm2 | ||
2917 | pxor 288(%r8),%xmm2 | ||
2918 | |||
2919 | # qhasm: shuffle bytes of xmm2 by SR | ||
2920 | # asm 1: pshufb SR,<xmm2=int6464#3 | ||
2921 | # asm 2: pshufb SR,<xmm2=%xmm2 | ||
2922 | pshufb SR,%xmm2 | ||
2923 | |||
2924 | # qhasm: xmm3 ^= *(int128 *)(c + 304) | ||
2925 | # asm 1: pxor 304(<c=int64#5),<xmm3=int6464#4 | ||
2926 | # asm 2: pxor 304(<c=%r8),<xmm3=%xmm3 | ||
2927 | pxor 304(%r8),%xmm3 | ||
2928 | |||
2929 | # qhasm: shuffle bytes of xmm3 by SR | ||
2930 | # asm 1: pshufb SR,<xmm3=int6464#4 | ||
2931 | # asm 2: pshufb SR,<xmm3=%xmm3 | ||
2932 | pshufb SR,%xmm3 | ||
2933 | |||
2934 | # qhasm: xmm4 ^= *(int128 *)(c + 320) | ||
2935 | # asm 1: pxor 320(<c=int64#5),<xmm4=int6464#5 | ||
2936 | # asm 2: pxor 320(<c=%r8),<xmm4=%xmm4 | ||
2937 | pxor 320(%r8),%xmm4 | ||
2938 | |||
2939 | # qhasm: shuffle bytes of xmm4 by SR | ||
2940 | # asm 1: pshufb SR,<xmm4=int6464#5 | ||
2941 | # asm 2: pshufb SR,<xmm4=%xmm4 | ||
2942 | pshufb SR,%xmm4 | ||
2943 | |||
2944 | # qhasm: xmm5 ^= *(int128 *)(c + 336) | ||
2945 | # asm 1: pxor 336(<c=int64#5),<xmm5=int6464#6 | ||
2946 | # asm 2: pxor 336(<c=%r8),<xmm5=%xmm5 | ||
2947 | pxor 336(%r8),%xmm5 | ||
2948 | |||
2949 | # qhasm: shuffle bytes of xmm5 by SR | ||
2950 | # asm 1: pshufb SR,<xmm5=int6464#6 | ||
2951 | # asm 2: pshufb SR,<xmm5=%xmm5 | ||
2952 | pshufb SR,%xmm5 | ||
2953 | |||
2954 | # qhasm: xmm6 ^= *(int128 *)(c + 352) | ||
2955 | # asm 1: pxor 352(<c=int64#5),<xmm6=int6464#7 | ||
2956 | # asm 2: pxor 352(<c=%r8),<xmm6=%xmm6 | ||
2957 | pxor 352(%r8),%xmm6 | ||
2958 | |||
2959 | # qhasm: shuffle bytes of xmm6 by SR | ||
2960 | # asm 1: pshufb SR,<xmm6=int6464#7 | ||
2961 | # asm 2: pshufb SR,<xmm6=%xmm6 | ||
2962 | pshufb SR,%xmm6 | ||
2963 | |||
2964 | # qhasm: xmm7 ^= *(int128 *)(c + 368) | ||
2965 | # asm 1: pxor 368(<c=int64#5),<xmm7=int6464#8 | ||
2966 | # asm 2: pxor 368(<c=%r8),<xmm7=%xmm7 | ||
2967 | pxor 368(%r8),%xmm7 | ||
2968 | |||
2969 | # qhasm: shuffle bytes of xmm7 by SR | ||
2970 | # asm 1: pshufb SR,<xmm7=int6464#8 | ||
2971 | # asm 2: pshufb SR,<xmm7=%xmm7 | ||
2972 | pshufb SR,%xmm7 | ||
2973 | |||
2974 | # qhasm: xmm5 ^= xmm6 | ||
2975 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
2976 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
2977 | pxor %xmm6,%xmm5 | ||
2978 | |||
2979 | # qhasm: xmm2 ^= xmm1 | ||
2980 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
2981 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
2982 | pxor %xmm1,%xmm2 | ||
2983 | |||
2984 | # qhasm: xmm5 ^= xmm0 | ||
2985 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
2986 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
2987 | pxor %xmm0,%xmm5 | ||
2988 | |||
2989 | # qhasm: xmm6 ^= xmm2 | ||
2990 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
2991 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
2992 | pxor %xmm2,%xmm6 | ||
2993 | |||
2994 | # qhasm: xmm3 ^= xmm0 | ||
2995 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
2996 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
2997 | pxor %xmm0,%xmm3 | ||
2998 | |||
2999 | # qhasm: xmm6 ^= xmm3 | ||
3000 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
3001 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
3002 | pxor %xmm3,%xmm6 | ||
3003 | |||
3004 | # qhasm: xmm3 ^= xmm7 | ||
3005 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
3006 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
3007 | pxor %xmm7,%xmm3 | ||
3008 | |||
3009 | # qhasm: xmm3 ^= xmm4 | ||
3010 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
3011 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
3012 | pxor %xmm4,%xmm3 | ||
3013 | |||
3014 | # qhasm: xmm7 ^= xmm5 | ||
3015 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
3016 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
3017 | pxor %xmm5,%xmm7 | ||
3018 | |||
3019 | # qhasm: xmm3 ^= xmm1 | ||
3020 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
3021 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
3022 | pxor %xmm1,%xmm3 | ||
3023 | |||
3024 | # qhasm: xmm4 ^= xmm5 | ||
3025 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
3026 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
3027 | pxor %xmm5,%xmm4 | ||
3028 | |||
3029 | # qhasm: xmm2 ^= xmm7 | ||
3030 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
3031 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
3032 | pxor %xmm7,%xmm2 | ||
3033 | |||
3034 | # qhasm: xmm1 ^= xmm5 | ||
3035 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
3036 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
3037 | pxor %xmm5,%xmm1 | ||
3038 | |||
3039 | # qhasm: xmm11 = xmm7 | ||
3040 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
3041 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
3042 | movdqa %xmm7,%xmm8 | ||
3043 | |||
3044 | # qhasm: xmm10 = xmm1 | ||
3045 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
3046 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
3047 | movdqa %xmm1,%xmm9 | ||
3048 | |||
3049 | # qhasm: xmm9 = xmm5 | ||
3050 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
3051 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
3052 | movdqa %xmm5,%xmm10 | ||
3053 | |||
3054 | # qhasm: xmm13 = xmm2 | ||
3055 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
3056 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
3057 | movdqa %xmm2,%xmm11 | ||
3058 | |||
3059 | # qhasm: xmm12 = xmm6 | ||
3060 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
3061 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
3062 | movdqa %xmm6,%xmm12 | ||
3063 | |||
3064 | # qhasm: xmm11 ^= xmm4 | ||
3065 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
3066 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
3067 | pxor %xmm4,%xmm8 | ||
3068 | |||
3069 | # qhasm: xmm10 ^= xmm2 | ||
3070 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
3071 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
3072 | pxor %xmm2,%xmm9 | ||
3073 | |||
3074 | # qhasm: xmm9 ^= xmm3 | ||
3075 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
3076 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
3077 | pxor %xmm3,%xmm10 | ||
3078 | |||
3079 | # qhasm: xmm13 ^= xmm4 | ||
3080 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
3081 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
3082 | pxor %xmm4,%xmm11 | ||
3083 | |||
3084 | # qhasm: xmm12 ^= xmm0 | ||
3085 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
3086 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
3087 | pxor %xmm0,%xmm12 | ||
3088 | |||
3089 | # qhasm: xmm14 = xmm11 | ||
3090 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
3091 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
3092 | movdqa %xmm8,%xmm13 | ||
3093 | |||
3094 | # qhasm: xmm8 = xmm10 | ||
3095 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
3096 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
3097 | movdqa %xmm9,%xmm14 | ||
3098 | |||
3099 | # qhasm: xmm15 = xmm11 | ||
3100 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
3101 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
3102 | movdqa %xmm8,%xmm15 | ||
3103 | |||
3104 | # qhasm: xmm10 |= xmm9 | ||
3105 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
3106 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
3107 | por %xmm10,%xmm9 | ||
3108 | |||
3109 | # qhasm: xmm11 |= xmm12 | ||
3110 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
3111 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
3112 | por %xmm12,%xmm8 | ||
3113 | |||
3114 | # qhasm: xmm15 ^= xmm8 | ||
3115 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
3116 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
3117 | pxor %xmm14,%xmm15 | ||
3118 | |||
3119 | # qhasm: xmm14 &= xmm12 | ||
3120 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
3121 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
3122 | pand %xmm12,%xmm13 | ||
3123 | |||
3124 | # qhasm: xmm8 &= xmm9 | ||
3125 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
3126 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
3127 | pand %xmm10,%xmm14 | ||
3128 | |||
3129 | # qhasm: xmm12 ^= xmm9 | ||
3130 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
3131 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
3132 | pxor %xmm10,%xmm12 | ||
3133 | |||
3134 | # qhasm: xmm15 &= xmm12 | ||
3135 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
3136 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
3137 | pand %xmm12,%xmm15 | ||
3138 | |||
3139 | # qhasm: xmm12 = xmm3 | ||
3140 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
3141 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
3142 | movdqa %xmm3,%xmm10 | ||
3143 | |||
3144 | # qhasm: xmm12 ^= xmm0 | ||
3145 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
3146 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
3147 | pxor %xmm0,%xmm10 | ||
3148 | |||
3149 | # qhasm: xmm13 &= xmm12 | ||
3150 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
3151 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
3152 | pand %xmm10,%xmm11 | ||
3153 | |||
3154 | # qhasm: xmm11 ^= xmm13 | ||
3155 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
3156 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
3157 | pxor %xmm11,%xmm8 | ||
3158 | |||
3159 | # qhasm: xmm10 ^= xmm13 | ||
3160 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
3161 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
3162 | pxor %xmm11,%xmm9 | ||
3163 | |||
3164 | # qhasm: xmm13 = xmm7 | ||
3165 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
3166 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
3167 | movdqa %xmm7,%xmm10 | ||
3168 | |||
3169 | # qhasm: xmm13 ^= xmm1 | ||
3170 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
3171 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
3172 | pxor %xmm1,%xmm10 | ||
3173 | |||
3174 | # qhasm: xmm12 = xmm5 | ||
3175 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
3176 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
3177 | movdqa %xmm5,%xmm11 | ||
3178 | |||
3179 | # qhasm: xmm9 = xmm13 | ||
3180 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
3181 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
3182 | movdqa %xmm10,%xmm12 | ||
3183 | |||
3184 | # qhasm: xmm12 ^= xmm6 | ||
3185 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
3186 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
3187 | pxor %xmm6,%xmm11 | ||
3188 | |||
3189 | # qhasm: xmm9 |= xmm12 | ||
3190 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
3191 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
3192 | por %xmm11,%xmm12 | ||
3193 | |||
3194 | # qhasm: xmm13 &= xmm12 | ||
3195 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
3196 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
3197 | pand %xmm11,%xmm10 | ||
3198 | |||
3199 | # qhasm: xmm8 ^= xmm13 | ||
3200 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
3201 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
3202 | pxor %xmm10,%xmm14 | ||
3203 | |||
3204 | # qhasm: xmm11 ^= xmm15 | ||
3205 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
3206 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
3207 | pxor %xmm15,%xmm8 | ||
3208 | |||
3209 | # qhasm: xmm10 ^= xmm14 | ||
3210 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
3211 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
3212 | pxor %xmm13,%xmm9 | ||
3213 | |||
3214 | # qhasm: xmm9 ^= xmm15 | ||
3215 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
3216 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
3217 | pxor %xmm15,%xmm12 | ||
3218 | |||
3219 | # qhasm: xmm8 ^= xmm14 | ||
3220 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
3221 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
3222 | pxor %xmm13,%xmm14 | ||
3223 | |||
3224 | # qhasm: xmm9 ^= xmm14 | ||
3225 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
3226 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
3227 | pxor %xmm13,%xmm12 | ||
3228 | |||
3229 | # qhasm: xmm12 = xmm2 | ||
3230 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
3231 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
3232 | movdqa %xmm2,%xmm10 | ||
3233 | |||
3234 | # qhasm: xmm13 = xmm4 | ||
3235 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
3236 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
3237 | movdqa %xmm4,%xmm11 | ||
3238 | |||
3239 | # qhasm: xmm14 = xmm1 | ||
3240 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
3241 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
3242 | movdqa %xmm1,%xmm13 | ||
3243 | |||
3244 | # qhasm: xmm15 = xmm7 | ||
3245 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
3246 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
3247 | movdqa %xmm7,%xmm15 | ||
3248 | |||
3249 | # qhasm: xmm12 &= xmm3 | ||
3250 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
3251 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
3252 | pand %xmm3,%xmm10 | ||
3253 | |||
3254 | # qhasm: xmm13 &= xmm0 | ||
3255 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
3256 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
3257 | pand %xmm0,%xmm11 | ||
3258 | |||
3259 | # qhasm: xmm14 &= xmm5 | ||
3260 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
3261 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
3262 | pand %xmm5,%xmm13 | ||
3263 | |||
3264 | # qhasm: xmm15 |= xmm6 | ||
3265 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
3266 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
3267 | por %xmm6,%xmm15 | ||
3268 | |||
3269 | # qhasm: xmm11 ^= xmm12 | ||
3270 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
3271 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
3272 | pxor %xmm10,%xmm8 | ||
3273 | |||
3274 | # qhasm: xmm10 ^= xmm13 | ||
3275 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
3276 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
3277 | pxor %xmm11,%xmm9 | ||
3278 | |||
3279 | # qhasm: xmm9 ^= xmm14 | ||
3280 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
3281 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
3282 | pxor %xmm13,%xmm12 | ||
3283 | |||
3284 | # qhasm: xmm8 ^= xmm15 | ||
3285 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
3286 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
3287 | pxor %xmm15,%xmm14 | ||
3288 | |||
3289 | # qhasm: xmm12 = xmm11 | ||
3290 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
3291 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
3292 | movdqa %xmm8,%xmm10 | ||
3293 | |||
3294 | # qhasm: xmm12 ^= xmm10 | ||
3295 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
3296 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
3297 | pxor %xmm9,%xmm10 | ||
3298 | |||
3299 | # qhasm: xmm11 &= xmm9 | ||
3300 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
3301 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
3302 | pand %xmm12,%xmm8 | ||
3303 | |||
3304 | # qhasm: xmm14 = xmm8 | ||
3305 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
3306 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
3307 | movdqa %xmm14,%xmm11 | ||
3308 | |||
3309 | # qhasm: xmm14 ^= xmm11 | ||
3310 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
3311 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
3312 | pxor %xmm8,%xmm11 | ||
3313 | |||
3314 | # qhasm: xmm15 = xmm12 | ||
3315 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
3316 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
3317 | movdqa %xmm10,%xmm13 | ||
3318 | |||
3319 | # qhasm: xmm15 &= xmm14 | ||
3320 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
3321 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
3322 | pand %xmm11,%xmm13 | ||
3323 | |||
3324 | # qhasm: xmm15 ^= xmm10 | ||
3325 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
3326 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
3327 | pxor %xmm9,%xmm13 | ||
3328 | |||
3329 | # qhasm: xmm13 = xmm9 | ||
3330 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
3331 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
3332 | movdqa %xmm12,%xmm15 | ||
3333 | |||
3334 | # qhasm: xmm13 ^= xmm8 | ||
3335 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
3336 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
3337 | pxor %xmm14,%xmm15 | ||
3338 | |||
3339 | # qhasm: xmm11 ^= xmm10 | ||
3340 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
3341 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
3342 | pxor %xmm9,%xmm8 | ||
3343 | |||
3344 | # qhasm: xmm13 &= xmm11 | ||
3345 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
3346 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
3347 | pand %xmm8,%xmm15 | ||
3348 | |||
3349 | # qhasm: xmm13 ^= xmm8 | ||
3350 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
3351 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
3352 | pxor %xmm14,%xmm15 | ||
3353 | |||
3354 | # qhasm: xmm9 ^= xmm13 | ||
3355 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
3356 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
3357 | pxor %xmm15,%xmm12 | ||
3358 | |||
3359 | # qhasm: xmm10 = xmm14 | ||
3360 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
3361 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
3362 | movdqa %xmm11,%xmm8 | ||
3363 | |||
3364 | # qhasm: xmm10 ^= xmm13 | ||
3365 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
3366 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
3367 | pxor %xmm15,%xmm8 | ||
3368 | |||
3369 | # qhasm: xmm10 &= xmm8 | ||
3370 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
3371 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
3372 | pand %xmm14,%xmm8 | ||
3373 | |||
3374 | # qhasm: xmm9 ^= xmm10 | ||
3375 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
3376 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
3377 | pxor %xmm8,%xmm12 | ||
3378 | |||
3379 | # qhasm: xmm14 ^= xmm10 | ||
3380 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
3381 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
3382 | pxor %xmm8,%xmm11 | ||
3383 | |||
3384 | # qhasm: xmm14 &= xmm15 | ||
3385 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
3386 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
3387 | pand %xmm13,%xmm11 | ||
3388 | |||
3389 | # qhasm: xmm14 ^= xmm12 | ||
3390 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
3391 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
3392 | pxor %xmm10,%xmm11 | ||
3393 | |||
3394 | # qhasm: xmm12 = xmm6 | ||
3395 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
3396 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
3397 | movdqa %xmm6,%xmm8 | ||
3398 | |||
3399 | # qhasm: xmm8 = xmm5 | ||
3400 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
3401 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
3402 | movdqa %xmm5,%xmm9 | ||
3403 | |||
3404 | # qhasm: xmm10 = xmm15 | ||
3405 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
3406 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
3407 | movdqa %xmm13,%xmm10 | ||
3408 | |||
3409 | # qhasm: xmm10 ^= xmm14 | ||
3410 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
3411 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
3412 | pxor %xmm11,%xmm10 | ||
3413 | |||
3414 | # qhasm: xmm10 &= xmm6 | ||
3415 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
3416 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
3417 | pand %xmm6,%xmm10 | ||
3418 | |||
3419 | # qhasm: xmm6 ^= xmm5 | ||
3420 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
3421 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
3422 | pxor %xmm5,%xmm6 | ||
3423 | |||
3424 | # qhasm: xmm6 &= xmm14 | ||
3425 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
3426 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
3427 | pand %xmm11,%xmm6 | ||
3428 | |||
3429 | # qhasm: xmm5 &= xmm15 | ||
3430 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
3431 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
3432 | pand %xmm13,%xmm5 | ||
3433 | |||
3434 | # qhasm: xmm6 ^= xmm5 | ||
3435 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
3436 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
3437 | pxor %xmm5,%xmm6 | ||
3438 | |||
3439 | # qhasm: xmm5 ^= xmm10 | ||
3440 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
3441 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
3442 | pxor %xmm10,%xmm5 | ||
3443 | |||
3444 | # qhasm: xmm12 ^= xmm0 | ||
3445 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
3446 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
3447 | pxor %xmm0,%xmm8 | ||
3448 | |||
3449 | # qhasm: xmm8 ^= xmm3 | ||
3450 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
3451 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
3452 | pxor %xmm3,%xmm9 | ||
3453 | |||
3454 | # qhasm: xmm15 ^= xmm13 | ||
3455 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
3456 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
3457 | pxor %xmm15,%xmm13 | ||
3458 | |||
3459 | # qhasm: xmm14 ^= xmm9 | ||
3460 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
3461 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
3462 | pxor %xmm12,%xmm11 | ||
3463 | |||
3464 | # qhasm: xmm11 = xmm15 | ||
3465 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
3466 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
3467 | movdqa %xmm13,%xmm10 | ||
3468 | |||
3469 | # qhasm: xmm11 ^= xmm14 | ||
3470 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
3471 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
3472 | pxor %xmm11,%xmm10 | ||
3473 | |||
3474 | # qhasm: xmm11 &= xmm12 | ||
3475 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
3476 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
3477 | pand %xmm8,%xmm10 | ||
3478 | |||
3479 | # qhasm: xmm12 ^= xmm8 | ||
3480 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
3481 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
3482 | pxor %xmm9,%xmm8 | ||
3483 | |||
3484 | # qhasm: xmm12 &= xmm14 | ||
3485 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
3486 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
3487 | pand %xmm11,%xmm8 | ||
3488 | |||
3489 | # qhasm: xmm8 &= xmm15 | ||
3490 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
3491 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
3492 | pand %xmm13,%xmm9 | ||
3493 | |||
3494 | # qhasm: xmm8 ^= xmm12 | ||
3495 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
3496 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
3497 | pxor %xmm8,%xmm9 | ||
3498 | |||
3499 | # qhasm: xmm12 ^= xmm11 | ||
3500 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
3501 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
3502 | pxor %xmm10,%xmm8 | ||
3503 | |||
3504 | # qhasm: xmm10 = xmm13 | ||
3505 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
3506 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
3507 | movdqa %xmm15,%xmm10 | ||
3508 | |||
3509 | # qhasm: xmm10 ^= xmm9 | ||
3510 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
3511 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
3512 | pxor %xmm12,%xmm10 | ||
3513 | |||
3514 | # qhasm: xmm10 &= xmm0 | ||
3515 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
3516 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
3517 | pand %xmm0,%xmm10 | ||
3518 | |||
3519 | # qhasm: xmm0 ^= xmm3 | ||
3520 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
3521 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
3522 | pxor %xmm3,%xmm0 | ||
3523 | |||
3524 | # qhasm: xmm0 &= xmm9 | ||
3525 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
3526 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
3527 | pand %xmm12,%xmm0 | ||
3528 | |||
3529 | # qhasm: xmm3 &= xmm13 | ||
3530 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
3531 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
3532 | pand %xmm15,%xmm3 | ||
3533 | |||
3534 | # qhasm: xmm0 ^= xmm3 | ||
3535 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
3536 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
3537 | pxor %xmm3,%xmm0 | ||
3538 | |||
3539 | # qhasm: xmm3 ^= xmm10 | ||
3540 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
3541 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
3542 | pxor %xmm10,%xmm3 | ||
3543 | |||
3544 | # qhasm: xmm6 ^= xmm12 | ||
3545 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
3546 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
3547 | pxor %xmm8,%xmm6 | ||
3548 | |||
3549 | # qhasm: xmm0 ^= xmm12 | ||
3550 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
3551 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
3552 | pxor %xmm8,%xmm0 | ||
3553 | |||
3554 | # qhasm: xmm5 ^= xmm8 | ||
3555 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
3556 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
3557 | pxor %xmm9,%xmm5 | ||
3558 | |||
3559 | # qhasm: xmm3 ^= xmm8 | ||
3560 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
3561 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
3562 | pxor %xmm9,%xmm3 | ||
3563 | |||
3564 | # qhasm: xmm12 = xmm7 | ||
3565 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
3566 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
3567 | movdqa %xmm7,%xmm8 | ||
3568 | |||
3569 | # qhasm: xmm8 = xmm1 | ||
3570 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
3571 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
3572 | movdqa %xmm1,%xmm9 | ||
3573 | |||
3574 | # qhasm: xmm12 ^= xmm4 | ||
3575 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
3576 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
3577 | pxor %xmm4,%xmm8 | ||
3578 | |||
3579 | # qhasm: xmm8 ^= xmm2 | ||
3580 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
3581 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
3582 | pxor %xmm2,%xmm9 | ||
3583 | |||
3584 | # qhasm: xmm11 = xmm15 | ||
3585 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
3586 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
3587 | movdqa %xmm13,%xmm10 | ||
3588 | |||
3589 | # qhasm: xmm11 ^= xmm14 | ||
3590 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
3591 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
3592 | pxor %xmm11,%xmm10 | ||
3593 | |||
3594 | # qhasm: xmm11 &= xmm12 | ||
3595 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
3596 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
3597 | pand %xmm8,%xmm10 | ||
3598 | |||
3599 | # qhasm: xmm12 ^= xmm8 | ||
3600 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
3601 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
3602 | pxor %xmm9,%xmm8 | ||
3603 | |||
3604 | # qhasm: xmm12 &= xmm14 | ||
3605 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
3606 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
3607 | pand %xmm11,%xmm8 | ||
3608 | |||
3609 | # qhasm: xmm8 &= xmm15 | ||
3610 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
3611 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
3612 | pand %xmm13,%xmm9 | ||
3613 | |||
3614 | # qhasm: xmm8 ^= xmm12 | ||
3615 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
3616 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
3617 | pxor %xmm8,%xmm9 | ||
3618 | |||
3619 | # qhasm: xmm12 ^= xmm11 | ||
3620 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
3621 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
3622 | pxor %xmm10,%xmm8 | ||
3623 | |||
3624 | # qhasm: xmm10 = xmm13 | ||
3625 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
3626 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
3627 | movdqa %xmm15,%xmm10 | ||
3628 | |||
3629 | # qhasm: xmm10 ^= xmm9 | ||
3630 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
3631 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
3632 | pxor %xmm12,%xmm10 | ||
3633 | |||
3634 | # qhasm: xmm10 &= xmm4 | ||
3635 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
3636 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
3637 | pand %xmm4,%xmm10 | ||
3638 | |||
3639 | # qhasm: xmm4 ^= xmm2 | ||
3640 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
3641 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
3642 | pxor %xmm2,%xmm4 | ||
3643 | |||
3644 | # qhasm: xmm4 &= xmm9 | ||
3645 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
3646 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
3647 | pand %xmm12,%xmm4 | ||
3648 | |||
3649 | # qhasm: xmm2 &= xmm13 | ||
3650 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
3651 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
3652 | pand %xmm15,%xmm2 | ||
3653 | |||
3654 | # qhasm: xmm4 ^= xmm2 | ||
3655 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
3656 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
3657 | pxor %xmm2,%xmm4 | ||
3658 | |||
3659 | # qhasm: xmm2 ^= xmm10 | ||
3660 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
3661 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
3662 | pxor %xmm10,%xmm2 | ||
3663 | |||
3664 | # qhasm: xmm15 ^= xmm13 | ||
3665 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
3666 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
3667 | pxor %xmm15,%xmm13 | ||
3668 | |||
3669 | # qhasm: xmm14 ^= xmm9 | ||
3670 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
3671 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
3672 | pxor %xmm12,%xmm11 | ||
3673 | |||
3674 | # qhasm: xmm11 = xmm15 | ||
3675 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
3676 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
3677 | movdqa %xmm13,%xmm10 | ||
3678 | |||
3679 | # qhasm: xmm11 ^= xmm14 | ||
3680 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
3681 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
3682 | pxor %xmm11,%xmm10 | ||
3683 | |||
3684 | # qhasm: xmm11 &= xmm7 | ||
3685 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
3686 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
3687 | pand %xmm7,%xmm10 | ||
3688 | |||
3689 | # qhasm: xmm7 ^= xmm1 | ||
3690 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
3691 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
3692 | pxor %xmm1,%xmm7 | ||
3693 | |||
3694 | # qhasm: xmm7 &= xmm14 | ||
3695 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
3696 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
3697 | pand %xmm11,%xmm7 | ||
3698 | |||
3699 | # qhasm: xmm1 &= xmm15 | ||
3700 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
3701 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
3702 | pand %xmm13,%xmm1 | ||
3703 | |||
3704 | # qhasm: xmm7 ^= xmm1 | ||
3705 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
3706 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
3707 | pxor %xmm1,%xmm7 | ||
3708 | |||
3709 | # qhasm: xmm1 ^= xmm11 | ||
3710 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
3711 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
3712 | pxor %xmm10,%xmm1 | ||
3713 | |||
3714 | # qhasm: xmm7 ^= xmm12 | ||
3715 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
3716 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
3717 | pxor %xmm8,%xmm7 | ||
3718 | |||
3719 | # qhasm: xmm4 ^= xmm12 | ||
3720 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
3721 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
3722 | pxor %xmm8,%xmm4 | ||
3723 | |||
3724 | # qhasm: xmm1 ^= xmm8 | ||
3725 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
3726 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
3727 | pxor %xmm9,%xmm1 | ||
3728 | |||
3729 | # qhasm: xmm2 ^= xmm8 | ||
3730 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
3731 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
3732 | pxor %xmm9,%xmm2 | ||
3733 | |||
3734 | # qhasm: xmm7 ^= xmm0 | ||
3735 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
3736 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
3737 | pxor %xmm0,%xmm7 | ||
3738 | |||
3739 | # qhasm: xmm1 ^= xmm6 | ||
3740 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
3741 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
3742 | pxor %xmm6,%xmm1 | ||
3743 | |||
3744 | # qhasm: xmm4 ^= xmm7 | ||
3745 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
3746 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
3747 | pxor %xmm7,%xmm4 | ||
3748 | |||
3749 | # qhasm: xmm6 ^= xmm0 | ||
3750 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
3751 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
3752 | pxor %xmm0,%xmm6 | ||
3753 | |||
3754 | # qhasm: xmm0 ^= xmm1 | ||
3755 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
3756 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
3757 | pxor %xmm1,%xmm0 | ||
3758 | |||
3759 | # qhasm: xmm1 ^= xmm5 | ||
3760 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
3761 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
3762 | pxor %xmm5,%xmm1 | ||
3763 | |||
3764 | # qhasm: xmm5 ^= xmm2 | ||
3765 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
3766 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
3767 | pxor %xmm2,%xmm5 | ||
3768 | |||
3769 | # qhasm: xmm4 ^= xmm5 | ||
3770 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
3771 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
3772 | pxor %xmm5,%xmm4 | ||
3773 | |||
3774 | # qhasm: xmm2 ^= xmm3 | ||
3775 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
3776 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
3777 | pxor %xmm3,%xmm2 | ||
3778 | |||
3779 | # qhasm: xmm3 ^= xmm5 | ||
3780 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
3781 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
3782 | pxor %xmm5,%xmm3 | ||
3783 | |||
3784 | # qhasm: xmm6 ^= xmm3 | ||
3785 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
3786 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
3787 | pxor %xmm3,%xmm6 | ||
3788 | |||
3789 | # qhasm: xmm8 = shuffle dwords of xmm0 by 0x93 | ||
3790 | # asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9 | ||
3791 | # asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8 | ||
3792 | pshufd $0x93,%xmm0,%xmm8 | ||
3793 | |||
3794 | # qhasm: xmm9 = shuffle dwords of xmm1 by 0x93 | ||
3795 | # asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10 | ||
3796 | # asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9 | ||
3797 | pshufd $0x93,%xmm1,%xmm9 | ||
3798 | |||
3799 | # qhasm: xmm10 = shuffle dwords of xmm4 by 0x93 | ||
3800 | # asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11 | ||
3801 | # asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10 | ||
3802 | pshufd $0x93,%xmm4,%xmm10 | ||
3803 | |||
3804 | # qhasm: xmm11 = shuffle dwords of xmm6 by 0x93 | ||
3805 | # asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12 | ||
3806 | # asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11 | ||
3807 | pshufd $0x93,%xmm6,%xmm11 | ||
3808 | |||
3809 | # qhasm: xmm12 = shuffle dwords of xmm3 by 0x93 | ||
3810 | # asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13 | ||
3811 | # asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12 | ||
3812 | pshufd $0x93,%xmm3,%xmm12 | ||
3813 | |||
3814 | # qhasm: xmm13 = shuffle dwords of xmm7 by 0x93 | ||
3815 | # asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14 | ||
3816 | # asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13 | ||
3817 | pshufd $0x93,%xmm7,%xmm13 | ||
3818 | |||
3819 | # qhasm: xmm14 = shuffle dwords of xmm2 by 0x93 | ||
3820 | # asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15 | ||
3821 | # asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14 | ||
3822 | pshufd $0x93,%xmm2,%xmm14 | ||
3823 | |||
3824 | # qhasm: xmm15 = shuffle dwords of xmm5 by 0x93 | ||
3825 | # asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16 | ||
3826 | # asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15 | ||
3827 | pshufd $0x93,%xmm5,%xmm15 | ||
3828 | |||
3829 | # qhasm: xmm0 ^= xmm8 | ||
3830 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
3831 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
3832 | pxor %xmm8,%xmm0 | ||
3833 | |||
3834 | # qhasm: xmm1 ^= xmm9 | ||
3835 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
3836 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
3837 | pxor %xmm9,%xmm1 | ||
3838 | |||
3839 | # qhasm: xmm4 ^= xmm10 | ||
3840 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
3841 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
3842 | pxor %xmm10,%xmm4 | ||
3843 | |||
3844 | # qhasm: xmm6 ^= xmm11 | ||
3845 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
3846 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
3847 | pxor %xmm11,%xmm6 | ||
3848 | |||
3849 | # qhasm: xmm3 ^= xmm12 | ||
3850 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
3851 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
3852 | pxor %xmm12,%xmm3 | ||
3853 | |||
3854 | # qhasm: xmm7 ^= xmm13 | ||
3855 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
3856 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
3857 | pxor %xmm13,%xmm7 | ||
3858 | |||
3859 | # qhasm: xmm2 ^= xmm14 | ||
3860 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
3861 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
3862 | pxor %xmm14,%xmm2 | ||
3863 | |||
3864 | # qhasm: xmm5 ^= xmm15 | ||
3865 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
3866 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
3867 | pxor %xmm15,%xmm5 | ||
3868 | |||
3869 | # qhasm: xmm8 ^= xmm5 | ||
3870 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
3871 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
3872 | pxor %xmm5,%xmm8 | ||
3873 | |||
3874 | # qhasm: xmm9 ^= xmm0 | ||
3875 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
3876 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
3877 | pxor %xmm0,%xmm9 | ||
3878 | |||
3879 | # qhasm: xmm10 ^= xmm1 | ||
3880 | # asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11 | ||
3881 | # asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10 | ||
3882 | pxor %xmm1,%xmm10 | ||
3883 | |||
3884 | # qhasm: xmm9 ^= xmm5 | ||
3885 | # asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10 | ||
3886 | # asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9 | ||
3887 | pxor %xmm5,%xmm9 | ||
3888 | |||
3889 | # qhasm: xmm11 ^= xmm4 | ||
3890 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
3891 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
3892 | pxor %xmm4,%xmm11 | ||
3893 | |||
3894 | # qhasm: xmm12 ^= xmm6 | ||
3895 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13 | ||
3896 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12 | ||
3897 | pxor %xmm6,%xmm12 | ||
3898 | |||
3899 | # qhasm: xmm13 ^= xmm3 | ||
3900 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14 | ||
3901 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13 | ||
3902 | pxor %xmm3,%xmm13 | ||
3903 | |||
3904 | # qhasm: xmm11 ^= xmm5 | ||
3905 | # asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12 | ||
3906 | # asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11 | ||
3907 | pxor %xmm5,%xmm11 | ||
3908 | |||
3909 | # qhasm: xmm14 ^= xmm7 | ||
3910 | # asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15 | ||
3911 | # asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14 | ||
3912 | pxor %xmm7,%xmm14 | ||
3913 | |||
3914 | # qhasm: xmm15 ^= xmm2 | ||
3915 | # asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16 | ||
3916 | # asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15 | ||
3917 | pxor %xmm2,%xmm15 | ||
3918 | |||
3919 | # qhasm: xmm12 ^= xmm5 | ||
3920 | # asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13 | ||
3921 | # asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12 | ||
3922 | pxor %xmm5,%xmm12 | ||
3923 | |||
3924 | # qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E | ||
3925 | # asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1 | ||
3926 | # asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0 | ||
3927 | pshufd $0x4E,%xmm0,%xmm0 | ||
3928 | |||
3929 | # qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E | ||
3930 | # asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2 | ||
3931 | # asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1 | ||
3932 | pshufd $0x4E,%xmm1,%xmm1 | ||
3933 | |||
3934 | # qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E | ||
3935 | # asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5 | ||
3936 | # asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4 | ||
3937 | pshufd $0x4E,%xmm4,%xmm4 | ||
3938 | |||
3939 | # qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E | ||
3940 | # asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7 | ||
3941 | # asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6 | ||
3942 | pshufd $0x4E,%xmm6,%xmm6 | ||
3943 | |||
3944 | # qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E | ||
3945 | # asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4 | ||
3946 | # asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3 | ||
3947 | pshufd $0x4E,%xmm3,%xmm3 | ||
3948 | |||
3949 | # qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E | ||
3950 | # asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8 | ||
3951 | # asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7 | ||
3952 | pshufd $0x4E,%xmm7,%xmm7 | ||
3953 | |||
3954 | # qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E | ||
3955 | # asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3 | ||
3956 | # asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2 | ||
3957 | pshufd $0x4E,%xmm2,%xmm2 | ||
3958 | |||
3959 | # qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E | ||
3960 | # asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6 | ||
3961 | # asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5 | ||
3962 | pshufd $0x4E,%xmm5,%xmm5 | ||
3963 | |||
3964 | # qhasm: xmm8 ^= xmm0 | ||
3965 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
3966 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
3967 | pxor %xmm0,%xmm8 | ||
3968 | |||
3969 | # qhasm: xmm9 ^= xmm1 | ||
3970 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
3971 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
3972 | pxor %xmm1,%xmm9 | ||
3973 | |||
3974 | # qhasm: xmm10 ^= xmm4 | ||
3975 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11 | ||
3976 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10 | ||
3977 | pxor %xmm4,%xmm10 | ||
3978 | |||
3979 | # qhasm: xmm11 ^= xmm6 | ||
3980 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12 | ||
3981 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11 | ||
3982 | pxor %xmm6,%xmm11 | ||
3983 | |||
3984 | # qhasm: xmm12 ^= xmm3 | ||
3985 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13 | ||
3986 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12 | ||
3987 | pxor %xmm3,%xmm12 | ||
3988 | |||
3989 | # qhasm: xmm13 ^= xmm7 | ||
3990 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
3991 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
3992 | pxor %xmm7,%xmm13 | ||
3993 | |||
3994 | # qhasm: xmm14 ^= xmm2 | ||
3995 | # asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15 | ||
3996 | # asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14 | ||
3997 | pxor %xmm2,%xmm14 | ||
3998 | |||
3999 | # qhasm: xmm15 ^= xmm5 | ||
4000 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
4001 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
4002 | pxor %xmm5,%xmm15 | ||
4003 | |||
4004 | # qhasm: xmm8 ^= *(int128 *)(c + 384) | ||
4005 | # asm 1: pxor 384(<c=int64#5),<xmm8=int6464#9 | ||
4006 | # asm 2: pxor 384(<c=%r8),<xmm8=%xmm8 | ||
4007 | pxor 384(%r8),%xmm8 | ||
4008 | |||
4009 | # qhasm: shuffle bytes of xmm8 by SR | ||
4010 | # asm 1: pshufb SR,<xmm8=int6464#9 | ||
4011 | # asm 2: pshufb SR,<xmm8=%xmm8 | ||
4012 | pshufb SR,%xmm8 | ||
4013 | |||
4014 | # qhasm: xmm9 ^= *(int128 *)(c + 400) | ||
4015 | # asm 1: pxor 400(<c=int64#5),<xmm9=int6464#10 | ||
4016 | # asm 2: pxor 400(<c=%r8),<xmm9=%xmm9 | ||
4017 | pxor 400(%r8),%xmm9 | ||
4018 | |||
4019 | # qhasm: shuffle bytes of xmm9 by SR | ||
4020 | # asm 1: pshufb SR,<xmm9=int6464#10 | ||
4021 | # asm 2: pshufb SR,<xmm9=%xmm9 | ||
4022 | pshufb SR,%xmm9 | ||
4023 | |||
4024 | # qhasm: xmm10 ^= *(int128 *)(c + 416) | ||
4025 | # asm 1: pxor 416(<c=int64#5),<xmm10=int6464#11 | ||
4026 | # asm 2: pxor 416(<c=%r8),<xmm10=%xmm10 | ||
4027 | pxor 416(%r8),%xmm10 | ||
4028 | |||
4029 | # qhasm: shuffle bytes of xmm10 by SR | ||
4030 | # asm 1: pshufb SR,<xmm10=int6464#11 | ||
4031 | # asm 2: pshufb SR,<xmm10=%xmm10 | ||
4032 | pshufb SR,%xmm10 | ||
4033 | |||
4034 | # qhasm: xmm11 ^= *(int128 *)(c + 432) | ||
4035 | # asm 1: pxor 432(<c=int64#5),<xmm11=int6464#12 | ||
4036 | # asm 2: pxor 432(<c=%r8),<xmm11=%xmm11 | ||
4037 | pxor 432(%r8),%xmm11 | ||
4038 | |||
4039 | # qhasm: shuffle bytes of xmm11 by SR | ||
4040 | # asm 1: pshufb SR,<xmm11=int6464#12 | ||
4041 | # asm 2: pshufb SR,<xmm11=%xmm11 | ||
4042 | pshufb SR,%xmm11 | ||
4043 | |||
4044 | # qhasm: xmm12 ^= *(int128 *)(c + 448) | ||
4045 | # asm 1: pxor 448(<c=int64#5),<xmm12=int6464#13 | ||
4046 | # asm 2: pxor 448(<c=%r8),<xmm12=%xmm12 | ||
4047 | pxor 448(%r8),%xmm12 | ||
4048 | |||
4049 | # qhasm: shuffle bytes of xmm12 by SR | ||
4050 | # asm 1: pshufb SR,<xmm12=int6464#13 | ||
4051 | # asm 2: pshufb SR,<xmm12=%xmm12 | ||
4052 | pshufb SR,%xmm12 | ||
4053 | |||
4054 | # qhasm: xmm13 ^= *(int128 *)(c + 464) | ||
4055 | # asm 1: pxor 464(<c=int64#5),<xmm13=int6464#14 | ||
4056 | # asm 2: pxor 464(<c=%r8),<xmm13=%xmm13 | ||
4057 | pxor 464(%r8),%xmm13 | ||
4058 | |||
4059 | # qhasm: shuffle bytes of xmm13 by SR | ||
4060 | # asm 1: pshufb SR,<xmm13=int6464#14 | ||
4061 | # asm 2: pshufb SR,<xmm13=%xmm13 | ||
4062 | pshufb SR,%xmm13 | ||
4063 | |||
4064 | # qhasm: xmm14 ^= *(int128 *)(c + 480) | ||
4065 | # asm 1: pxor 480(<c=int64#5),<xmm14=int6464#15 | ||
4066 | # asm 2: pxor 480(<c=%r8),<xmm14=%xmm14 | ||
4067 | pxor 480(%r8),%xmm14 | ||
4068 | |||
4069 | # qhasm: shuffle bytes of xmm14 by SR | ||
4070 | # asm 1: pshufb SR,<xmm14=int6464#15 | ||
4071 | # asm 2: pshufb SR,<xmm14=%xmm14 | ||
4072 | pshufb SR,%xmm14 | ||
4073 | |||
4074 | # qhasm: xmm15 ^= *(int128 *)(c + 496) | ||
4075 | # asm 1: pxor 496(<c=int64#5),<xmm15=int6464#16 | ||
4076 | # asm 2: pxor 496(<c=%r8),<xmm15=%xmm15 | ||
4077 | pxor 496(%r8),%xmm15 | ||
4078 | |||
4079 | # qhasm: shuffle bytes of xmm15 by SR | ||
4080 | # asm 1: pshufb SR,<xmm15=int6464#16 | ||
4081 | # asm 2: pshufb SR,<xmm15=%xmm15 | ||
4082 | pshufb SR,%xmm15 | ||
4083 | |||
4084 | # qhasm: xmm13 ^= xmm14 | ||
4085 | # asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14 | ||
4086 | # asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13 | ||
4087 | pxor %xmm14,%xmm13 | ||
4088 | |||
4089 | # qhasm: xmm10 ^= xmm9 | ||
4090 | # asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11 | ||
4091 | # asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10 | ||
4092 | pxor %xmm9,%xmm10 | ||
4093 | |||
4094 | # qhasm: xmm13 ^= xmm8 | ||
4095 | # asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14 | ||
4096 | # asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13 | ||
4097 | pxor %xmm8,%xmm13 | ||
4098 | |||
4099 | # qhasm: xmm14 ^= xmm10 | ||
4100 | # asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15 | ||
4101 | # asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14 | ||
4102 | pxor %xmm10,%xmm14 | ||
4103 | |||
4104 | # qhasm: xmm11 ^= xmm8 | ||
4105 | # asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12 | ||
4106 | # asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11 | ||
4107 | pxor %xmm8,%xmm11 | ||
4108 | |||
4109 | # qhasm: xmm14 ^= xmm11 | ||
4110 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
4111 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
4112 | pxor %xmm11,%xmm14 | ||
4113 | |||
4114 | # qhasm: xmm11 ^= xmm15 | ||
4115 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12 | ||
4116 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11 | ||
4117 | pxor %xmm15,%xmm11 | ||
4118 | |||
4119 | # qhasm: xmm11 ^= xmm12 | ||
4120 | # asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12 | ||
4121 | # asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11 | ||
4122 | pxor %xmm12,%xmm11 | ||
4123 | |||
4124 | # qhasm: xmm15 ^= xmm13 | ||
4125 | # asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16 | ||
4126 | # asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15 | ||
4127 | pxor %xmm13,%xmm15 | ||
4128 | |||
4129 | # qhasm: xmm11 ^= xmm9 | ||
4130 | # asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12 | ||
4131 | # asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11 | ||
4132 | pxor %xmm9,%xmm11 | ||
4133 | |||
4134 | # qhasm: xmm12 ^= xmm13 | ||
4135 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
4136 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
4137 | pxor %xmm13,%xmm12 | ||
4138 | |||
4139 | # qhasm: xmm10 ^= xmm15 | ||
4140 | # asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11 | ||
4141 | # asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10 | ||
4142 | pxor %xmm15,%xmm10 | ||
4143 | |||
4144 | # qhasm: xmm9 ^= xmm13 | ||
4145 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
4146 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
4147 | pxor %xmm13,%xmm9 | ||
4148 | |||
4149 | # qhasm: xmm3 = xmm15 | ||
4150 | # asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1 | ||
4151 | # asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0 | ||
4152 | movdqa %xmm15,%xmm0 | ||
4153 | |||
4154 | # qhasm: xmm2 = xmm9 | ||
4155 | # asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2 | ||
4156 | # asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1 | ||
4157 | movdqa %xmm9,%xmm1 | ||
4158 | |||
4159 | # qhasm: xmm1 = xmm13 | ||
4160 | # asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3 | ||
4161 | # asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2 | ||
4162 | movdqa %xmm13,%xmm2 | ||
4163 | |||
4164 | # qhasm: xmm5 = xmm10 | ||
4165 | # asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4 | ||
4166 | # asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3 | ||
4167 | movdqa %xmm10,%xmm3 | ||
4168 | |||
4169 | # qhasm: xmm4 = xmm14 | ||
4170 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5 | ||
4171 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4 | ||
4172 | movdqa %xmm14,%xmm4 | ||
4173 | |||
4174 | # qhasm: xmm3 ^= xmm12 | ||
4175 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1 | ||
4176 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0 | ||
4177 | pxor %xmm12,%xmm0 | ||
4178 | |||
4179 | # qhasm: xmm2 ^= xmm10 | ||
4180 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2 | ||
4181 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1 | ||
4182 | pxor %xmm10,%xmm1 | ||
4183 | |||
4184 | # qhasm: xmm1 ^= xmm11 | ||
4185 | # asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3 | ||
4186 | # asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2 | ||
4187 | pxor %xmm11,%xmm2 | ||
4188 | |||
4189 | # qhasm: xmm5 ^= xmm12 | ||
4190 | # asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4 | ||
4191 | # asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3 | ||
4192 | pxor %xmm12,%xmm3 | ||
4193 | |||
4194 | # qhasm: xmm4 ^= xmm8 | ||
4195 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
4196 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
4197 | pxor %xmm8,%xmm4 | ||
4198 | |||
4199 | # qhasm: xmm6 = xmm3 | ||
4200 | # asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6 | ||
4201 | # asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5 | ||
4202 | movdqa %xmm0,%xmm5 | ||
4203 | |||
4204 | # qhasm: xmm0 = xmm2 | ||
4205 | # asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7 | ||
4206 | # asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6 | ||
4207 | movdqa %xmm1,%xmm6 | ||
4208 | |||
4209 | # qhasm: xmm7 = xmm3 | ||
4210 | # asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8 | ||
4211 | # asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7 | ||
4212 | movdqa %xmm0,%xmm7 | ||
4213 | |||
4214 | # qhasm: xmm2 |= xmm1 | ||
4215 | # asm 1: por <xmm1=int6464#3,<xmm2=int6464#2 | ||
4216 | # asm 2: por <xmm1=%xmm2,<xmm2=%xmm1 | ||
4217 | por %xmm2,%xmm1 | ||
4218 | |||
4219 | # qhasm: xmm3 |= xmm4 | ||
4220 | # asm 1: por <xmm4=int6464#5,<xmm3=int6464#1 | ||
4221 | # asm 2: por <xmm4=%xmm4,<xmm3=%xmm0 | ||
4222 | por %xmm4,%xmm0 | ||
4223 | |||
4224 | # qhasm: xmm7 ^= xmm0 | ||
4225 | # asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8 | ||
4226 | # asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7 | ||
4227 | pxor %xmm6,%xmm7 | ||
4228 | |||
4229 | # qhasm: xmm6 &= xmm4 | ||
4230 | # asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6 | ||
4231 | # asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5 | ||
4232 | pand %xmm4,%xmm5 | ||
4233 | |||
4234 | # qhasm: xmm0 &= xmm1 | ||
4235 | # asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7 | ||
4236 | # asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6 | ||
4237 | pand %xmm2,%xmm6 | ||
4238 | |||
4239 | # qhasm: xmm4 ^= xmm1 | ||
4240 | # asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5 | ||
4241 | # asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4 | ||
4242 | pxor %xmm2,%xmm4 | ||
4243 | |||
4244 | # qhasm: xmm7 &= xmm4 | ||
4245 | # asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8 | ||
4246 | # asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7 | ||
4247 | pand %xmm4,%xmm7 | ||
4248 | |||
4249 | # qhasm: xmm4 = xmm11 | ||
4250 | # asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3 | ||
4251 | # asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2 | ||
4252 | movdqa %xmm11,%xmm2 | ||
4253 | |||
4254 | # qhasm: xmm4 ^= xmm8 | ||
4255 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3 | ||
4256 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2 | ||
4257 | pxor %xmm8,%xmm2 | ||
4258 | |||
4259 | # qhasm: xmm5 &= xmm4 | ||
4260 | # asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4 | ||
4261 | # asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3 | ||
4262 | pand %xmm2,%xmm3 | ||
4263 | |||
4264 | # qhasm: xmm3 ^= xmm5 | ||
4265 | # asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1 | ||
4266 | # asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0 | ||
4267 | pxor %xmm3,%xmm0 | ||
4268 | |||
4269 | # qhasm: xmm2 ^= xmm5 | ||
4270 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
4271 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
4272 | pxor %xmm3,%xmm1 | ||
4273 | |||
4274 | # qhasm: xmm5 = xmm15 | ||
4275 | # asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3 | ||
4276 | # asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2 | ||
4277 | movdqa %xmm15,%xmm2 | ||
4278 | |||
4279 | # qhasm: xmm5 ^= xmm9 | ||
4280 | # asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3 | ||
4281 | # asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2 | ||
4282 | pxor %xmm9,%xmm2 | ||
4283 | |||
4284 | # qhasm: xmm4 = xmm13 | ||
4285 | # asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4 | ||
4286 | # asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3 | ||
4287 | movdqa %xmm13,%xmm3 | ||
4288 | |||
4289 | # qhasm: xmm1 = xmm5 | ||
4290 | # asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5 | ||
4291 | # asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4 | ||
4292 | movdqa %xmm2,%xmm4 | ||
4293 | |||
4294 | # qhasm: xmm4 ^= xmm14 | ||
4295 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4 | ||
4296 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3 | ||
4297 | pxor %xmm14,%xmm3 | ||
4298 | |||
4299 | # qhasm: xmm1 |= xmm4 | ||
4300 | # asm 1: por <xmm4=int6464#4,<xmm1=int6464#5 | ||
4301 | # asm 2: por <xmm4=%xmm3,<xmm1=%xmm4 | ||
4302 | por %xmm3,%xmm4 | ||
4303 | |||
4304 | # qhasm: xmm5 &= xmm4 | ||
4305 | # asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3 | ||
4306 | # asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2 | ||
4307 | pand %xmm3,%xmm2 | ||
4308 | |||
4309 | # qhasm: xmm0 ^= xmm5 | ||
4310 | # asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7 | ||
4311 | # asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6 | ||
4312 | pxor %xmm2,%xmm6 | ||
4313 | |||
4314 | # qhasm: xmm3 ^= xmm7 | ||
4315 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1 | ||
4316 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0 | ||
4317 | pxor %xmm7,%xmm0 | ||
4318 | |||
4319 | # qhasm: xmm2 ^= xmm6 | ||
4320 | # asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2 | ||
4321 | # asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1 | ||
4322 | pxor %xmm5,%xmm1 | ||
4323 | |||
4324 | # qhasm: xmm1 ^= xmm7 | ||
4325 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5 | ||
4326 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4 | ||
4327 | pxor %xmm7,%xmm4 | ||
4328 | |||
4329 | # qhasm: xmm0 ^= xmm6 | ||
4330 | # asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7 | ||
4331 | # asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6 | ||
4332 | pxor %xmm5,%xmm6 | ||
4333 | |||
4334 | # qhasm: xmm1 ^= xmm6 | ||
4335 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
4336 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
4337 | pxor %xmm5,%xmm4 | ||
4338 | |||
4339 | # qhasm: xmm4 = xmm10 | ||
4340 | # asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3 | ||
4341 | # asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2 | ||
4342 | movdqa %xmm10,%xmm2 | ||
4343 | |||
4344 | # qhasm: xmm5 = xmm12 | ||
4345 | # asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4 | ||
4346 | # asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3 | ||
4347 | movdqa %xmm12,%xmm3 | ||
4348 | |||
4349 | # qhasm: xmm6 = xmm9 | ||
4350 | # asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6 | ||
4351 | # asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5 | ||
4352 | movdqa %xmm9,%xmm5 | ||
4353 | |||
4354 | # qhasm: xmm7 = xmm15 | ||
4355 | # asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8 | ||
4356 | # asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7 | ||
4357 | movdqa %xmm15,%xmm7 | ||
4358 | |||
4359 | # qhasm: xmm4 &= xmm11 | ||
4360 | # asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3 | ||
4361 | # asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2 | ||
4362 | pand %xmm11,%xmm2 | ||
4363 | |||
4364 | # qhasm: xmm5 &= xmm8 | ||
4365 | # asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4 | ||
4366 | # asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3 | ||
4367 | pand %xmm8,%xmm3 | ||
4368 | |||
4369 | # qhasm: xmm6 &= xmm13 | ||
4370 | # asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6 | ||
4371 | # asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5 | ||
4372 | pand %xmm13,%xmm5 | ||
4373 | |||
4374 | # qhasm: xmm7 |= xmm14 | ||
4375 | # asm 1: por <xmm14=int6464#15,<xmm7=int6464#8 | ||
4376 | # asm 2: por <xmm14=%xmm14,<xmm7=%xmm7 | ||
4377 | por %xmm14,%xmm7 | ||
4378 | |||
4379 | # qhasm: xmm3 ^= xmm4 | ||
4380 | # asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1 | ||
4381 | # asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0 | ||
4382 | pxor %xmm2,%xmm0 | ||
4383 | |||
4384 | # qhasm: xmm2 ^= xmm5 | ||
4385 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
4386 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
4387 | pxor %xmm3,%xmm1 | ||
4388 | |||
4389 | # qhasm: xmm1 ^= xmm6 | ||
4390 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
4391 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
4392 | pxor %xmm5,%xmm4 | ||
4393 | |||
4394 | # qhasm: xmm0 ^= xmm7 | ||
4395 | # asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7 | ||
4396 | # asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6 | ||
4397 | pxor %xmm7,%xmm6 | ||
4398 | |||
4399 | # qhasm: xmm4 = xmm3 | ||
4400 | # asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3 | ||
4401 | # asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2 | ||
4402 | movdqa %xmm0,%xmm2 | ||
4403 | |||
4404 | # qhasm: xmm4 ^= xmm2 | ||
4405 | # asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3 | ||
4406 | # asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2 | ||
4407 | pxor %xmm1,%xmm2 | ||
4408 | |||
4409 | # qhasm: xmm3 &= xmm1 | ||
4410 | # asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1 | ||
4411 | # asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0 | ||
4412 | pand %xmm4,%xmm0 | ||
4413 | |||
4414 | # qhasm: xmm6 = xmm0 | ||
4415 | # asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4 | ||
4416 | # asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3 | ||
4417 | movdqa %xmm6,%xmm3 | ||
4418 | |||
4419 | # qhasm: xmm6 ^= xmm3 | ||
4420 | # asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4 | ||
4421 | # asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3 | ||
4422 | pxor %xmm0,%xmm3 | ||
4423 | |||
4424 | # qhasm: xmm7 = xmm4 | ||
4425 | # asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6 | ||
4426 | # asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5 | ||
4427 | movdqa %xmm2,%xmm5 | ||
4428 | |||
4429 | # qhasm: xmm7 &= xmm6 | ||
4430 | # asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6 | ||
4431 | # asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5 | ||
4432 | pand %xmm3,%xmm5 | ||
4433 | |||
4434 | # qhasm: xmm7 ^= xmm2 | ||
4435 | # asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6 | ||
4436 | # asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5 | ||
4437 | pxor %xmm1,%xmm5 | ||
4438 | |||
4439 | # qhasm: xmm5 = xmm1 | ||
4440 | # asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8 | ||
4441 | # asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7 | ||
4442 | movdqa %xmm4,%xmm7 | ||
4443 | |||
4444 | # qhasm: xmm5 ^= xmm0 | ||
4445 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
4446 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
4447 | pxor %xmm6,%xmm7 | ||
4448 | |||
4449 | # qhasm: xmm3 ^= xmm2 | ||
4450 | # asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1 | ||
4451 | # asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0 | ||
4452 | pxor %xmm1,%xmm0 | ||
4453 | |||
4454 | # qhasm: xmm5 &= xmm3 | ||
4455 | # asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8 | ||
4456 | # asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7 | ||
4457 | pand %xmm0,%xmm7 | ||
4458 | |||
4459 | # qhasm: xmm5 ^= xmm0 | ||
4460 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
4461 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
4462 | pxor %xmm6,%xmm7 | ||
4463 | |||
4464 | # qhasm: xmm1 ^= xmm5 | ||
4465 | # asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5 | ||
4466 | # asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4 | ||
4467 | pxor %xmm7,%xmm4 | ||
4468 | |||
4469 | # qhasm: xmm2 = xmm6 | ||
4470 | # asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1 | ||
4471 | # asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0 | ||
4472 | movdqa %xmm3,%xmm0 | ||
4473 | |||
4474 | # qhasm: xmm2 ^= xmm5 | ||
4475 | # asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1 | ||
4476 | # asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0 | ||
4477 | pxor %xmm7,%xmm0 | ||
4478 | |||
4479 | # qhasm: xmm2 &= xmm0 | ||
4480 | # asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1 | ||
4481 | # asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0 | ||
4482 | pand %xmm6,%xmm0 | ||
4483 | |||
4484 | # qhasm: xmm1 ^= xmm2 | ||
4485 | # asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5 | ||
4486 | # asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4 | ||
4487 | pxor %xmm0,%xmm4 | ||
4488 | |||
4489 | # qhasm: xmm6 ^= xmm2 | ||
4490 | # asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4 | ||
4491 | # asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3 | ||
4492 | pxor %xmm0,%xmm3 | ||
4493 | |||
4494 | # qhasm: xmm6 &= xmm7 | ||
4495 | # asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4 | ||
4496 | # asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3 | ||
4497 | pand %xmm5,%xmm3 | ||
4498 | |||
4499 | # qhasm: xmm6 ^= xmm4 | ||
4500 | # asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4 | ||
4501 | # asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3 | ||
4502 | pxor %xmm2,%xmm3 | ||
4503 | |||
4504 | # qhasm: xmm4 = xmm14 | ||
4505 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1 | ||
4506 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0 | ||
4507 | movdqa %xmm14,%xmm0 | ||
4508 | |||
4509 | # qhasm: xmm0 = xmm13 | ||
4510 | # asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2 | ||
4511 | # asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1 | ||
4512 | movdqa %xmm13,%xmm1 | ||
4513 | |||
4514 | # qhasm: xmm2 = xmm7 | ||
4515 | # asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3 | ||
4516 | # asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2 | ||
4517 | movdqa %xmm5,%xmm2 | ||
4518 | |||
4519 | # qhasm: xmm2 ^= xmm6 | ||
4520 | # asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3 | ||
4521 | # asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2 | ||
4522 | pxor %xmm3,%xmm2 | ||
4523 | |||
4524 | # qhasm: xmm2 &= xmm14 | ||
4525 | # asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3 | ||
4526 | # asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2 | ||
4527 | pand %xmm14,%xmm2 | ||
4528 | |||
4529 | # qhasm: xmm14 ^= xmm13 | ||
4530 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
4531 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
4532 | pxor %xmm13,%xmm14 | ||
4533 | |||
4534 | # qhasm: xmm14 &= xmm6 | ||
4535 | # asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15 | ||
4536 | # asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14 | ||
4537 | pand %xmm3,%xmm14 | ||
4538 | |||
4539 | # qhasm: xmm13 &= xmm7 | ||
4540 | # asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14 | ||
4541 | # asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13 | ||
4542 | pand %xmm5,%xmm13 | ||
4543 | |||
4544 | # qhasm: xmm14 ^= xmm13 | ||
4545 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
4546 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
4547 | pxor %xmm13,%xmm14 | ||
4548 | |||
4549 | # qhasm: xmm13 ^= xmm2 | ||
4550 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14 | ||
4551 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13 | ||
4552 | pxor %xmm2,%xmm13 | ||
4553 | |||
4554 | # qhasm: xmm4 ^= xmm8 | ||
4555 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1 | ||
4556 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0 | ||
4557 | pxor %xmm8,%xmm0 | ||
4558 | |||
4559 | # qhasm: xmm0 ^= xmm11 | ||
4560 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2 | ||
4561 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1 | ||
4562 | pxor %xmm11,%xmm1 | ||
4563 | |||
4564 | # qhasm: xmm7 ^= xmm5 | ||
4565 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
4566 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
4567 | pxor %xmm7,%xmm5 | ||
4568 | |||
4569 | # qhasm: xmm6 ^= xmm1 | ||
4570 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
4571 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
4572 | pxor %xmm4,%xmm3 | ||
4573 | |||
4574 | # qhasm: xmm3 = xmm7 | ||
4575 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
4576 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
4577 | movdqa %xmm5,%xmm2 | ||
4578 | |||
4579 | # qhasm: xmm3 ^= xmm6 | ||
4580 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
4581 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
4582 | pxor %xmm3,%xmm2 | ||
4583 | |||
4584 | # qhasm: xmm3 &= xmm4 | ||
4585 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
4586 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
4587 | pand %xmm0,%xmm2 | ||
4588 | |||
4589 | # qhasm: xmm4 ^= xmm0 | ||
4590 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
4591 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
4592 | pxor %xmm1,%xmm0 | ||
4593 | |||
4594 | # qhasm: xmm4 &= xmm6 | ||
4595 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
4596 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
4597 | pand %xmm3,%xmm0 | ||
4598 | |||
4599 | # qhasm: xmm0 &= xmm7 | ||
4600 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
4601 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
4602 | pand %xmm5,%xmm1 | ||
4603 | |||
4604 | # qhasm: xmm0 ^= xmm4 | ||
4605 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
4606 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
4607 | pxor %xmm0,%xmm1 | ||
4608 | |||
4609 | # qhasm: xmm4 ^= xmm3 | ||
4610 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
4611 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
4612 | pxor %xmm2,%xmm0 | ||
4613 | |||
4614 | # qhasm: xmm2 = xmm5 | ||
4615 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
4616 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
4617 | movdqa %xmm7,%xmm2 | ||
4618 | |||
4619 | # qhasm: xmm2 ^= xmm1 | ||
4620 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
4621 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
4622 | pxor %xmm4,%xmm2 | ||
4623 | |||
4624 | # qhasm: xmm2 &= xmm8 | ||
4625 | # asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3 | ||
4626 | # asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2 | ||
4627 | pand %xmm8,%xmm2 | ||
4628 | |||
4629 | # qhasm: xmm8 ^= xmm11 | ||
4630 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
4631 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
4632 | pxor %xmm11,%xmm8 | ||
4633 | |||
4634 | # qhasm: xmm8 &= xmm1 | ||
4635 | # asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9 | ||
4636 | # asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8 | ||
4637 | pand %xmm4,%xmm8 | ||
4638 | |||
4639 | # qhasm: xmm11 &= xmm5 | ||
4640 | # asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12 | ||
4641 | # asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11 | ||
4642 | pand %xmm7,%xmm11 | ||
4643 | |||
4644 | # qhasm: xmm8 ^= xmm11 | ||
4645 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
4646 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
4647 | pxor %xmm11,%xmm8 | ||
4648 | |||
4649 | # qhasm: xmm11 ^= xmm2 | ||
4650 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12 | ||
4651 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11 | ||
4652 | pxor %xmm2,%xmm11 | ||
4653 | |||
4654 | # qhasm: xmm14 ^= xmm4 | ||
4655 | # asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15 | ||
4656 | # asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14 | ||
4657 | pxor %xmm0,%xmm14 | ||
4658 | |||
4659 | # qhasm: xmm8 ^= xmm4 | ||
4660 | # asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9 | ||
4661 | # asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8 | ||
4662 | pxor %xmm0,%xmm8 | ||
4663 | |||
4664 | # qhasm: xmm13 ^= xmm0 | ||
4665 | # asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14 | ||
4666 | # asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13 | ||
4667 | pxor %xmm1,%xmm13 | ||
4668 | |||
4669 | # qhasm: xmm11 ^= xmm0 | ||
4670 | # asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12 | ||
4671 | # asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11 | ||
4672 | pxor %xmm1,%xmm11 | ||
4673 | |||
4674 | # qhasm: xmm4 = xmm15 | ||
4675 | # asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1 | ||
4676 | # asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0 | ||
4677 | movdqa %xmm15,%xmm0 | ||
4678 | |||
4679 | # qhasm: xmm0 = xmm9 | ||
4680 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2 | ||
4681 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1 | ||
4682 | movdqa %xmm9,%xmm1 | ||
4683 | |||
4684 | # qhasm: xmm4 ^= xmm12 | ||
4685 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1 | ||
4686 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0 | ||
4687 | pxor %xmm12,%xmm0 | ||
4688 | |||
4689 | # qhasm: xmm0 ^= xmm10 | ||
4690 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2 | ||
4691 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1 | ||
4692 | pxor %xmm10,%xmm1 | ||
4693 | |||
4694 | # qhasm: xmm3 = xmm7 | ||
4695 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
4696 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
4697 | movdqa %xmm5,%xmm2 | ||
4698 | |||
4699 | # qhasm: xmm3 ^= xmm6 | ||
4700 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
4701 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
4702 | pxor %xmm3,%xmm2 | ||
4703 | |||
4704 | # qhasm: xmm3 &= xmm4 | ||
4705 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
4706 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
4707 | pand %xmm0,%xmm2 | ||
4708 | |||
4709 | # qhasm: xmm4 ^= xmm0 | ||
4710 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
4711 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
4712 | pxor %xmm1,%xmm0 | ||
4713 | |||
4714 | # qhasm: xmm4 &= xmm6 | ||
4715 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
4716 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
4717 | pand %xmm3,%xmm0 | ||
4718 | |||
4719 | # qhasm: xmm0 &= xmm7 | ||
4720 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
4721 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
4722 | pand %xmm5,%xmm1 | ||
4723 | |||
4724 | # qhasm: xmm0 ^= xmm4 | ||
4725 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
4726 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
4727 | pxor %xmm0,%xmm1 | ||
4728 | |||
4729 | # qhasm: xmm4 ^= xmm3 | ||
4730 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
4731 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
4732 | pxor %xmm2,%xmm0 | ||
4733 | |||
4734 | # qhasm: xmm2 = xmm5 | ||
4735 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
4736 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
4737 | movdqa %xmm7,%xmm2 | ||
4738 | |||
4739 | # qhasm: xmm2 ^= xmm1 | ||
4740 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
4741 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
4742 | pxor %xmm4,%xmm2 | ||
4743 | |||
4744 | # qhasm: xmm2 &= xmm12 | ||
4745 | # asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3 | ||
4746 | # asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2 | ||
4747 | pand %xmm12,%xmm2 | ||
4748 | |||
4749 | # qhasm: xmm12 ^= xmm10 | ||
4750 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
4751 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
4752 | pxor %xmm10,%xmm12 | ||
4753 | |||
4754 | # qhasm: xmm12 &= xmm1 | ||
4755 | # asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13 | ||
4756 | # asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12 | ||
4757 | pand %xmm4,%xmm12 | ||
4758 | |||
4759 | # qhasm: xmm10 &= xmm5 | ||
4760 | # asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11 | ||
4761 | # asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10 | ||
4762 | pand %xmm7,%xmm10 | ||
4763 | |||
4764 | # qhasm: xmm12 ^= xmm10 | ||
4765 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
4766 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
4767 | pxor %xmm10,%xmm12 | ||
4768 | |||
4769 | # qhasm: xmm10 ^= xmm2 | ||
4770 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11 | ||
4771 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10 | ||
4772 | pxor %xmm2,%xmm10 | ||
4773 | |||
4774 | # qhasm: xmm7 ^= xmm5 | ||
4775 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
4776 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
4777 | pxor %xmm7,%xmm5 | ||
4778 | |||
4779 | # qhasm: xmm6 ^= xmm1 | ||
4780 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
4781 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
4782 | pxor %xmm4,%xmm3 | ||
4783 | |||
4784 | # qhasm: xmm3 = xmm7 | ||
4785 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
4786 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
4787 | movdqa %xmm5,%xmm2 | ||
4788 | |||
4789 | # qhasm: xmm3 ^= xmm6 | ||
4790 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
4791 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
4792 | pxor %xmm3,%xmm2 | ||
4793 | |||
4794 | # qhasm: xmm3 &= xmm15 | ||
4795 | # asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3 | ||
4796 | # asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2 | ||
4797 | pand %xmm15,%xmm2 | ||
4798 | |||
4799 | # qhasm: xmm15 ^= xmm9 | ||
4800 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
4801 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
4802 | pxor %xmm9,%xmm15 | ||
4803 | |||
4804 | # qhasm: xmm15 &= xmm6 | ||
4805 | # asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16 | ||
4806 | # asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15 | ||
4807 | pand %xmm3,%xmm15 | ||
4808 | |||
4809 | # qhasm: xmm9 &= xmm7 | ||
4810 | # asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10 | ||
4811 | # asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9 | ||
4812 | pand %xmm5,%xmm9 | ||
4813 | |||
4814 | # qhasm: xmm15 ^= xmm9 | ||
4815 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
4816 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
4817 | pxor %xmm9,%xmm15 | ||
4818 | |||
4819 | # qhasm: xmm9 ^= xmm3 | ||
4820 | # asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10 | ||
4821 | # asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9 | ||
4822 | pxor %xmm2,%xmm9 | ||
4823 | |||
4824 | # qhasm: xmm15 ^= xmm4 | ||
4825 | # asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16 | ||
4826 | # asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15 | ||
4827 | pxor %xmm0,%xmm15 | ||
4828 | |||
4829 | # qhasm: xmm12 ^= xmm4 | ||
4830 | # asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13 | ||
4831 | # asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12 | ||
4832 | pxor %xmm0,%xmm12 | ||
4833 | |||
4834 | # qhasm: xmm9 ^= xmm0 | ||
4835 | # asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10 | ||
4836 | # asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9 | ||
4837 | pxor %xmm1,%xmm9 | ||
4838 | |||
4839 | # qhasm: xmm10 ^= xmm0 | ||
4840 | # asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11 | ||
4841 | # asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10 | ||
4842 | pxor %xmm1,%xmm10 | ||
4843 | |||
4844 | # qhasm: xmm15 ^= xmm8 | ||
4845 | # asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16 | ||
4846 | # asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15 | ||
4847 | pxor %xmm8,%xmm15 | ||
4848 | |||
4849 | # qhasm: xmm9 ^= xmm14 | ||
4850 | # asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10 | ||
4851 | # asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9 | ||
4852 | pxor %xmm14,%xmm9 | ||
4853 | |||
4854 | # qhasm: xmm12 ^= xmm15 | ||
4855 | # asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13 | ||
4856 | # asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12 | ||
4857 | pxor %xmm15,%xmm12 | ||
4858 | |||
4859 | # qhasm: xmm14 ^= xmm8 | ||
4860 | # asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15 | ||
4861 | # asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14 | ||
4862 | pxor %xmm8,%xmm14 | ||
4863 | |||
4864 | # qhasm: xmm8 ^= xmm9 | ||
4865 | # asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9 | ||
4866 | # asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8 | ||
4867 | pxor %xmm9,%xmm8 | ||
4868 | |||
4869 | # qhasm: xmm9 ^= xmm13 | ||
4870 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
4871 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
4872 | pxor %xmm13,%xmm9 | ||
4873 | |||
4874 | # qhasm: xmm13 ^= xmm10 | ||
4875 | # asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14 | ||
4876 | # asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13 | ||
4877 | pxor %xmm10,%xmm13 | ||
4878 | |||
4879 | # qhasm: xmm12 ^= xmm13 | ||
4880 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
4881 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
4882 | pxor %xmm13,%xmm12 | ||
4883 | |||
4884 | # qhasm: xmm10 ^= xmm11 | ||
4885 | # asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11 | ||
4886 | # asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10 | ||
4887 | pxor %xmm11,%xmm10 | ||
4888 | |||
4889 | # qhasm: xmm11 ^= xmm13 | ||
4890 | # asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12 | ||
4891 | # asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11 | ||
4892 | pxor %xmm13,%xmm11 | ||
4893 | |||
4894 | # qhasm: xmm14 ^= xmm11 | ||
4895 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
4896 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
4897 | pxor %xmm11,%xmm14 | ||
4898 | |||
4899 | # qhasm: xmm0 = shuffle dwords of xmm8 by 0x93 | ||
4900 | # asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1 | ||
4901 | # asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0 | ||
4902 | pshufd $0x93,%xmm8,%xmm0 | ||
4903 | |||
4904 | # qhasm: xmm1 = shuffle dwords of xmm9 by 0x93 | ||
4905 | # asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2 | ||
4906 | # asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1 | ||
4907 | pshufd $0x93,%xmm9,%xmm1 | ||
4908 | |||
4909 | # qhasm: xmm2 = shuffle dwords of xmm12 by 0x93 | ||
4910 | # asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3 | ||
4911 | # asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2 | ||
4912 | pshufd $0x93,%xmm12,%xmm2 | ||
4913 | |||
4914 | # qhasm: xmm3 = shuffle dwords of xmm14 by 0x93 | ||
4915 | # asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4 | ||
4916 | # asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3 | ||
4917 | pshufd $0x93,%xmm14,%xmm3 | ||
4918 | |||
4919 | # qhasm: xmm4 = shuffle dwords of xmm11 by 0x93 | ||
4920 | # asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5 | ||
4921 | # asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4 | ||
4922 | pshufd $0x93,%xmm11,%xmm4 | ||
4923 | |||
4924 | # qhasm: xmm5 = shuffle dwords of xmm15 by 0x93 | ||
4925 | # asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6 | ||
4926 | # asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5 | ||
4927 | pshufd $0x93,%xmm15,%xmm5 | ||
4928 | |||
4929 | # qhasm: xmm6 = shuffle dwords of xmm10 by 0x93 | ||
4930 | # asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7 | ||
4931 | # asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6 | ||
4932 | pshufd $0x93,%xmm10,%xmm6 | ||
4933 | |||
4934 | # qhasm: xmm7 = shuffle dwords of xmm13 by 0x93 | ||
4935 | # asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8 | ||
4936 | # asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7 | ||
4937 | pshufd $0x93,%xmm13,%xmm7 | ||
4938 | |||
4939 | # qhasm: xmm8 ^= xmm0 | ||
4940 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
4941 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
4942 | pxor %xmm0,%xmm8 | ||
4943 | |||
4944 | # qhasm: xmm9 ^= xmm1 | ||
4945 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
4946 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
4947 | pxor %xmm1,%xmm9 | ||
4948 | |||
4949 | # qhasm: xmm12 ^= xmm2 | ||
4950 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13 | ||
4951 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12 | ||
4952 | pxor %xmm2,%xmm12 | ||
4953 | |||
4954 | # qhasm: xmm14 ^= xmm3 | ||
4955 | # asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15 | ||
4956 | # asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14 | ||
4957 | pxor %xmm3,%xmm14 | ||
4958 | |||
4959 | # qhasm: xmm11 ^= xmm4 | ||
4960 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
4961 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
4962 | pxor %xmm4,%xmm11 | ||
4963 | |||
4964 | # qhasm: xmm15 ^= xmm5 | ||
4965 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
4966 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
4967 | pxor %xmm5,%xmm15 | ||
4968 | |||
4969 | # qhasm: xmm10 ^= xmm6 | ||
4970 | # asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11 | ||
4971 | # asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10 | ||
4972 | pxor %xmm6,%xmm10 | ||
4973 | |||
4974 | # qhasm: xmm13 ^= xmm7 | ||
4975 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
4976 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
4977 | pxor %xmm7,%xmm13 | ||
4978 | |||
4979 | # qhasm: xmm0 ^= xmm13 | ||
4980 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
4981 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
4982 | pxor %xmm13,%xmm0 | ||
4983 | |||
4984 | # qhasm: xmm1 ^= xmm8 | ||
4985 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
4986 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
4987 | pxor %xmm8,%xmm1 | ||
4988 | |||
4989 | # qhasm: xmm2 ^= xmm9 | ||
4990 | # asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3 | ||
4991 | # asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2 | ||
4992 | pxor %xmm9,%xmm2 | ||
4993 | |||
4994 | # qhasm: xmm1 ^= xmm13 | ||
4995 | # asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2 | ||
4996 | # asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1 | ||
4997 | pxor %xmm13,%xmm1 | ||
4998 | |||
4999 | # qhasm: xmm3 ^= xmm12 | ||
5000 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
5001 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
5002 | pxor %xmm12,%xmm3 | ||
5003 | |||
5004 | # qhasm: xmm4 ^= xmm14 | ||
5005 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
5006 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
5007 | pxor %xmm14,%xmm4 | ||
5008 | |||
5009 | # qhasm: xmm5 ^= xmm11 | ||
5010 | # asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6 | ||
5011 | # asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5 | ||
5012 | pxor %xmm11,%xmm5 | ||
5013 | |||
5014 | # qhasm: xmm3 ^= xmm13 | ||
5015 | # asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4 | ||
5016 | # asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3 | ||
5017 | pxor %xmm13,%xmm3 | ||
5018 | |||
5019 | # qhasm: xmm6 ^= xmm15 | ||
5020 | # asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7 | ||
5021 | # asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6 | ||
5022 | pxor %xmm15,%xmm6 | ||
5023 | |||
5024 | # qhasm: xmm7 ^= xmm10 | ||
5025 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
5026 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
5027 | pxor %xmm10,%xmm7 | ||
5028 | |||
5029 | # qhasm: xmm4 ^= xmm13 | ||
5030 | # asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5 | ||
5031 | # asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4 | ||
5032 | pxor %xmm13,%xmm4 | ||
5033 | |||
5034 | # qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E | ||
5035 | # asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9 | ||
5036 | # asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8 | ||
5037 | pshufd $0x4E,%xmm8,%xmm8 | ||
5038 | |||
5039 | # qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E | ||
5040 | # asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10 | ||
5041 | # asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9 | ||
5042 | pshufd $0x4E,%xmm9,%xmm9 | ||
5043 | |||
5044 | # qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E | ||
5045 | # asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13 | ||
5046 | # asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12 | ||
5047 | pshufd $0x4E,%xmm12,%xmm12 | ||
5048 | |||
5049 | # qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E | ||
5050 | # asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15 | ||
5051 | # asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14 | ||
5052 | pshufd $0x4E,%xmm14,%xmm14 | ||
5053 | |||
5054 | # qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E | ||
5055 | # asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12 | ||
5056 | # asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11 | ||
5057 | pshufd $0x4E,%xmm11,%xmm11 | ||
5058 | |||
5059 | # qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E | ||
5060 | # asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16 | ||
5061 | # asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15 | ||
5062 | pshufd $0x4E,%xmm15,%xmm15 | ||
5063 | |||
5064 | # qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E | ||
5065 | # asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11 | ||
5066 | # asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10 | ||
5067 | pshufd $0x4E,%xmm10,%xmm10 | ||
5068 | |||
5069 | # qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E | ||
5070 | # asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14 | ||
5071 | # asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13 | ||
5072 | pshufd $0x4E,%xmm13,%xmm13 | ||
5073 | |||
5074 | # qhasm: xmm0 ^= xmm8 | ||
5075 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
5076 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
5077 | pxor %xmm8,%xmm0 | ||
5078 | |||
5079 | # qhasm: xmm1 ^= xmm9 | ||
5080 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
5081 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
5082 | pxor %xmm9,%xmm1 | ||
5083 | |||
5084 | # qhasm: xmm2 ^= xmm12 | ||
5085 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
5086 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
5087 | pxor %xmm12,%xmm2 | ||
5088 | |||
5089 | # qhasm: xmm3 ^= xmm14 | ||
5090 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
5091 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
5092 | pxor %xmm14,%xmm3 | ||
5093 | |||
5094 | # qhasm: xmm4 ^= xmm11 | ||
5095 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
5096 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
5097 | pxor %xmm11,%xmm4 | ||
5098 | |||
5099 | # qhasm: xmm5 ^= xmm15 | ||
5100 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
5101 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
5102 | pxor %xmm15,%xmm5 | ||
5103 | |||
5104 | # qhasm: xmm6 ^= xmm10 | ||
5105 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
5106 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
5107 | pxor %xmm10,%xmm6 | ||
5108 | |||
5109 | # qhasm: xmm7 ^= xmm13 | ||
5110 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
5111 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
5112 | pxor %xmm13,%xmm7 | ||
5113 | |||
5114 | # qhasm: xmm0 ^= *(int128 *)(c + 512) | ||
5115 | # asm 1: pxor 512(<c=int64#5),<xmm0=int6464#1 | ||
5116 | # asm 2: pxor 512(<c=%r8),<xmm0=%xmm0 | ||
5117 | pxor 512(%r8),%xmm0 | ||
5118 | |||
5119 | # qhasm: shuffle bytes of xmm0 by SR | ||
5120 | # asm 1: pshufb SR,<xmm0=int6464#1 | ||
5121 | # asm 2: pshufb SR,<xmm0=%xmm0 | ||
5122 | pshufb SR,%xmm0 | ||
5123 | |||
5124 | # qhasm: xmm1 ^= *(int128 *)(c + 528) | ||
5125 | # asm 1: pxor 528(<c=int64#5),<xmm1=int6464#2 | ||
5126 | # asm 2: pxor 528(<c=%r8),<xmm1=%xmm1 | ||
5127 | pxor 528(%r8),%xmm1 | ||
5128 | |||
5129 | # qhasm: shuffle bytes of xmm1 by SR | ||
5130 | # asm 1: pshufb SR,<xmm1=int6464#2 | ||
5131 | # asm 2: pshufb SR,<xmm1=%xmm1 | ||
5132 | pshufb SR,%xmm1 | ||
5133 | |||
5134 | # qhasm: xmm2 ^= *(int128 *)(c + 544) | ||
5135 | # asm 1: pxor 544(<c=int64#5),<xmm2=int6464#3 | ||
5136 | # asm 2: pxor 544(<c=%r8),<xmm2=%xmm2 | ||
5137 | pxor 544(%r8),%xmm2 | ||
5138 | |||
5139 | # qhasm: shuffle bytes of xmm2 by SR | ||
5140 | # asm 1: pshufb SR,<xmm2=int6464#3 | ||
5141 | # asm 2: pshufb SR,<xmm2=%xmm2 | ||
5142 | pshufb SR,%xmm2 | ||
5143 | |||
5144 | # qhasm: xmm3 ^= *(int128 *)(c + 560) | ||
5145 | # asm 1: pxor 560(<c=int64#5),<xmm3=int6464#4 | ||
5146 | # asm 2: pxor 560(<c=%r8),<xmm3=%xmm3 | ||
5147 | pxor 560(%r8),%xmm3 | ||
5148 | |||
5149 | # qhasm: shuffle bytes of xmm3 by SR | ||
5150 | # asm 1: pshufb SR,<xmm3=int6464#4 | ||
5151 | # asm 2: pshufb SR,<xmm3=%xmm3 | ||
5152 | pshufb SR,%xmm3 | ||
5153 | |||
5154 | # qhasm: xmm4 ^= *(int128 *)(c + 576) | ||
5155 | # asm 1: pxor 576(<c=int64#5),<xmm4=int6464#5 | ||
5156 | # asm 2: pxor 576(<c=%r8),<xmm4=%xmm4 | ||
5157 | pxor 576(%r8),%xmm4 | ||
5158 | |||
5159 | # qhasm: shuffle bytes of xmm4 by SR | ||
5160 | # asm 1: pshufb SR,<xmm4=int6464#5 | ||
5161 | # asm 2: pshufb SR,<xmm4=%xmm4 | ||
5162 | pshufb SR,%xmm4 | ||
5163 | |||
5164 | # qhasm: xmm5 ^= *(int128 *)(c + 592) | ||
5165 | # asm 1: pxor 592(<c=int64#5),<xmm5=int6464#6 | ||
5166 | # asm 2: pxor 592(<c=%r8),<xmm5=%xmm5 | ||
5167 | pxor 592(%r8),%xmm5 | ||
5168 | |||
5169 | # qhasm: shuffle bytes of xmm5 by SR | ||
5170 | # asm 1: pshufb SR,<xmm5=int6464#6 | ||
5171 | # asm 2: pshufb SR,<xmm5=%xmm5 | ||
5172 | pshufb SR,%xmm5 | ||
5173 | |||
5174 | # qhasm: xmm6 ^= *(int128 *)(c + 608) | ||
5175 | # asm 1: pxor 608(<c=int64#5),<xmm6=int6464#7 | ||
5176 | # asm 2: pxor 608(<c=%r8),<xmm6=%xmm6 | ||
5177 | pxor 608(%r8),%xmm6 | ||
5178 | |||
5179 | # qhasm: shuffle bytes of xmm6 by SR | ||
5180 | # asm 1: pshufb SR,<xmm6=int6464#7 | ||
5181 | # asm 2: pshufb SR,<xmm6=%xmm6 | ||
5182 | pshufb SR,%xmm6 | ||
5183 | |||
5184 | # qhasm: xmm7 ^= *(int128 *)(c + 624) | ||
5185 | # asm 1: pxor 624(<c=int64#5),<xmm7=int6464#8 | ||
5186 | # asm 2: pxor 624(<c=%r8),<xmm7=%xmm7 | ||
5187 | pxor 624(%r8),%xmm7 | ||
5188 | |||
5189 | # qhasm: shuffle bytes of xmm7 by SR | ||
5190 | # asm 1: pshufb SR,<xmm7=int6464#8 | ||
5191 | # asm 2: pshufb SR,<xmm7=%xmm7 | ||
5192 | pshufb SR,%xmm7 | ||
5193 | |||
5194 | # qhasm: xmm5 ^= xmm6 | ||
5195 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
5196 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
5197 | pxor %xmm6,%xmm5 | ||
5198 | |||
5199 | # qhasm: xmm2 ^= xmm1 | ||
5200 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
5201 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
5202 | pxor %xmm1,%xmm2 | ||
5203 | |||
5204 | # qhasm: xmm5 ^= xmm0 | ||
5205 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
5206 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
5207 | pxor %xmm0,%xmm5 | ||
5208 | |||
5209 | # qhasm: xmm6 ^= xmm2 | ||
5210 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
5211 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
5212 | pxor %xmm2,%xmm6 | ||
5213 | |||
5214 | # qhasm: xmm3 ^= xmm0 | ||
5215 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
5216 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
5217 | pxor %xmm0,%xmm3 | ||
5218 | |||
5219 | # qhasm: xmm6 ^= xmm3 | ||
5220 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
5221 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
5222 | pxor %xmm3,%xmm6 | ||
5223 | |||
5224 | # qhasm: xmm3 ^= xmm7 | ||
5225 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
5226 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
5227 | pxor %xmm7,%xmm3 | ||
5228 | |||
5229 | # qhasm: xmm3 ^= xmm4 | ||
5230 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
5231 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
5232 | pxor %xmm4,%xmm3 | ||
5233 | |||
5234 | # qhasm: xmm7 ^= xmm5 | ||
5235 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
5236 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
5237 | pxor %xmm5,%xmm7 | ||
5238 | |||
5239 | # qhasm: xmm3 ^= xmm1 | ||
5240 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
5241 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
5242 | pxor %xmm1,%xmm3 | ||
5243 | |||
5244 | # qhasm: xmm4 ^= xmm5 | ||
5245 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
5246 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
5247 | pxor %xmm5,%xmm4 | ||
5248 | |||
5249 | # qhasm: xmm2 ^= xmm7 | ||
5250 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
5251 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
5252 | pxor %xmm7,%xmm2 | ||
5253 | |||
5254 | # qhasm: xmm1 ^= xmm5 | ||
5255 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
5256 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
5257 | pxor %xmm5,%xmm1 | ||
5258 | |||
5259 | # qhasm: xmm11 = xmm7 | ||
5260 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
5261 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
5262 | movdqa %xmm7,%xmm8 | ||
5263 | |||
5264 | # qhasm: xmm10 = xmm1 | ||
5265 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
5266 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
5267 | movdqa %xmm1,%xmm9 | ||
5268 | |||
5269 | # qhasm: xmm9 = xmm5 | ||
5270 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
5271 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
5272 | movdqa %xmm5,%xmm10 | ||
5273 | |||
5274 | # qhasm: xmm13 = xmm2 | ||
5275 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
5276 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
5277 | movdqa %xmm2,%xmm11 | ||
5278 | |||
5279 | # qhasm: xmm12 = xmm6 | ||
5280 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
5281 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
5282 | movdqa %xmm6,%xmm12 | ||
5283 | |||
5284 | # qhasm: xmm11 ^= xmm4 | ||
5285 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
5286 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
5287 | pxor %xmm4,%xmm8 | ||
5288 | |||
5289 | # qhasm: xmm10 ^= xmm2 | ||
5290 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
5291 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
5292 | pxor %xmm2,%xmm9 | ||
5293 | |||
5294 | # qhasm: xmm9 ^= xmm3 | ||
5295 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
5296 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
5297 | pxor %xmm3,%xmm10 | ||
5298 | |||
5299 | # qhasm: xmm13 ^= xmm4 | ||
5300 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
5301 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
5302 | pxor %xmm4,%xmm11 | ||
5303 | |||
5304 | # qhasm: xmm12 ^= xmm0 | ||
5305 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
5306 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
5307 | pxor %xmm0,%xmm12 | ||
5308 | |||
5309 | # qhasm: xmm14 = xmm11 | ||
5310 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
5311 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
5312 | movdqa %xmm8,%xmm13 | ||
5313 | |||
5314 | # qhasm: xmm8 = xmm10 | ||
5315 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
5316 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
5317 | movdqa %xmm9,%xmm14 | ||
5318 | |||
5319 | # qhasm: xmm15 = xmm11 | ||
5320 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
5321 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
5322 | movdqa %xmm8,%xmm15 | ||
5323 | |||
5324 | # qhasm: xmm10 |= xmm9 | ||
5325 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
5326 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
5327 | por %xmm10,%xmm9 | ||
5328 | |||
5329 | # qhasm: xmm11 |= xmm12 | ||
5330 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
5331 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
5332 | por %xmm12,%xmm8 | ||
5333 | |||
5334 | # qhasm: xmm15 ^= xmm8 | ||
5335 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
5336 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
5337 | pxor %xmm14,%xmm15 | ||
5338 | |||
5339 | # qhasm: xmm14 &= xmm12 | ||
5340 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
5341 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
5342 | pand %xmm12,%xmm13 | ||
5343 | |||
5344 | # qhasm: xmm8 &= xmm9 | ||
5345 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
5346 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
5347 | pand %xmm10,%xmm14 | ||
5348 | |||
5349 | # qhasm: xmm12 ^= xmm9 | ||
5350 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
5351 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
5352 | pxor %xmm10,%xmm12 | ||
5353 | |||
5354 | # qhasm: xmm15 &= xmm12 | ||
5355 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
5356 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
5357 | pand %xmm12,%xmm15 | ||
5358 | |||
5359 | # qhasm: xmm12 = xmm3 | ||
5360 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
5361 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
5362 | movdqa %xmm3,%xmm10 | ||
5363 | |||
5364 | # qhasm: xmm12 ^= xmm0 | ||
5365 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
5366 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
5367 | pxor %xmm0,%xmm10 | ||
5368 | |||
5369 | # qhasm: xmm13 &= xmm12 | ||
5370 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
5371 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
5372 | pand %xmm10,%xmm11 | ||
5373 | |||
5374 | # qhasm: xmm11 ^= xmm13 | ||
5375 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
5376 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
5377 | pxor %xmm11,%xmm8 | ||
5378 | |||
5379 | # qhasm: xmm10 ^= xmm13 | ||
5380 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
5381 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
5382 | pxor %xmm11,%xmm9 | ||
5383 | |||
5384 | # qhasm: xmm13 = xmm7 | ||
5385 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
5386 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
5387 | movdqa %xmm7,%xmm10 | ||
5388 | |||
5389 | # qhasm: xmm13 ^= xmm1 | ||
5390 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
5391 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
5392 | pxor %xmm1,%xmm10 | ||
5393 | |||
5394 | # qhasm: xmm12 = xmm5 | ||
5395 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
5396 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
5397 | movdqa %xmm5,%xmm11 | ||
5398 | |||
5399 | # qhasm: xmm9 = xmm13 | ||
5400 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
5401 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
5402 | movdqa %xmm10,%xmm12 | ||
5403 | |||
5404 | # qhasm: xmm12 ^= xmm6 | ||
5405 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
5406 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
5407 | pxor %xmm6,%xmm11 | ||
5408 | |||
5409 | # qhasm: xmm9 |= xmm12 | ||
5410 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
5411 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
5412 | por %xmm11,%xmm12 | ||
5413 | |||
5414 | # qhasm: xmm13 &= xmm12 | ||
5415 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
5416 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
5417 | pand %xmm11,%xmm10 | ||
5418 | |||
5419 | # qhasm: xmm8 ^= xmm13 | ||
5420 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
5421 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
5422 | pxor %xmm10,%xmm14 | ||
5423 | |||
5424 | # qhasm: xmm11 ^= xmm15 | ||
5425 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
5426 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
5427 | pxor %xmm15,%xmm8 | ||
5428 | |||
5429 | # qhasm: xmm10 ^= xmm14 | ||
5430 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
5431 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
5432 | pxor %xmm13,%xmm9 | ||
5433 | |||
5434 | # qhasm: xmm9 ^= xmm15 | ||
5435 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
5436 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
5437 | pxor %xmm15,%xmm12 | ||
5438 | |||
5439 | # qhasm: xmm8 ^= xmm14 | ||
5440 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
5441 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
5442 | pxor %xmm13,%xmm14 | ||
5443 | |||
5444 | # qhasm: xmm9 ^= xmm14 | ||
5445 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
5446 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
5447 | pxor %xmm13,%xmm12 | ||
5448 | |||
5449 | # qhasm: xmm12 = xmm2 | ||
5450 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
5451 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
5452 | movdqa %xmm2,%xmm10 | ||
5453 | |||
5454 | # qhasm: xmm13 = xmm4 | ||
5455 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
5456 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
5457 | movdqa %xmm4,%xmm11 | ||
5458 | |||
5459 | # qhasm: xmm14 = xmm1 | ||
5460 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
5461 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
5462 | movdqa %xmm1,%xmm13 | ||
5463 | |||
5464 | # qhasm: xmm15 = xmm7 | ||
5465 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
5466 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
5467 | movdqa %xmm7,%xmm15 | ||
5468 | |||
5469 | # qhasm: xmm12 &= xmm3 | ||
5470 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
5471 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
5472 | pand %xmm3,%xmm10 | ||
5473 | |||
5474 | # qhasm: xmm13 &= xmm0 | ||
5475 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
5476 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
5477 | pand %xmm0,%xmm11 | ||
5478 | |||
5479 | # qhasm: xmm14 &= xmm5 | ||
5480 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
5481 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
5482 | pand %xmm5,%xmm13 | ||
5483 | |||
5484 | # qhasm: xmm15 |= xmm6 | ||
5485 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
5486 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
5487 | por %xmm6,%xmm15 | ||
5488 | |||
5489 | # qhasm: xmm11 ^= xmm12 | ||
5490 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
5491 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
5492 | pxor %xmm10,%xmm8 | ||
5493 | |||
5494 | # qhasm: xmm10 ^= xmm13 | ||
5495 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
5496 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
5497 | pxor %xmm11,%xmm9 | ||
5498 | |||
5499 | # qhasm: xmm9 ^= xmm14 | ||
5500 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
5501 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
5502 | pxor %xmm13,%xmm12 | ||
5503 | |||
5504 | # qhasm: xmm8 ^= xmm15 | ||
5505 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
5506 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
5507 | pxor %xmm15,%xmm14 | ||
5508 | |||
5509 | # qhasm: xmm12 = xmm11 | ||
5510 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
5511 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
5512 | movdqa %xmm8,%xmm10 | ||
5513 | |||
5514 | # qhasm: xmm12 ^= xmm10 | ||
5515 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
5516 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
5517 | pxor %xmm9,%xmm10 | ||
5518 | |||
5519 | # qhasm: xmm11 &= xmm9 | ||
5520 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
5521 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
5522 | pand %xmm12,%xmm8 | ||
5523 | |||
5524 | # qhasm: xmm14 = xmm8 | ||
5525 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
5526 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
5527 | movdqa %xmm14,%xmm11 | ||
5528 | |||
5529 | # qhasm: xmm14 ^= xmm11 | ||
5530 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
5531 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
5532 | pxor %xmm8,%xmm11 | ||
5533 | |||
5534 | # qhasm: xmm15 = xmm12 | ||
5535 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
5536 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
5537 | movdqa %xmm10,%xmm13 | ||
5538 | |||
5539 | # qhasm: xmm15 &= xmm14 | ||
5540 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
5541 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
5542 | pand %xmm11,%xmm13 | ||
5543 | |||
5544 | # qhasm: xmm15 ^= xmm10 | ||
5545 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
5546 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
5547 | pxor %xmm9,%xmm13 | ||
5548 | |||
5549 | # qhasm: xmm13 = xmm9 | ||
5550 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
5551 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
5552 | movdqa %xmm12,%xmm15 | ||
5553 | |||
5554 | # qhasm: xmm13 ^= xmm8 | ||
5555 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
5556 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
5557 | pxor %xmm14,%xmm15 | ||
5558 | |||
5559 | # qhasm: xmm11 ^= xmm10 | ||
5560 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
5561 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
5562 | pxor %xmm9,%xmm8 | ||
5563 | |||
5564 | # qhasm: xmm13 &= xmm11 | ||
5565 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
5566 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
5567 | pand %xmm8,%xmm15 | ||
5568 | |||
5569 | # qhasm: xmm13 ^= xmm8 | ||
5570 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
5571 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
5572 | pxor %xmm14,%xmm15 | ||
5573 | |||
5574 | # qhasm: xmm9 ^= xmm13 | ||
5575 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
5576 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
5577 | pxor %xmm15,%xmm12 | ||
5578 | |||
5579 | # qhasm: xmm10 = xmm14 | ||
5580 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
5581 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
5582 | movdqa %xmm11,%xmm8 | ||
5583 | |||
5584 | # qhasm: xmm10 ^= xmm13 | ||
5585 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
5586 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
5587 | pxor %xmm15,%xmm8 | ||
5588 | |||
5589 | # qhasm: xmm10 &= xmm8 | ||
5590 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
5591 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
5592 | pand %xmm14,%xmm8 | ||
5593 | |||
5594 | # qhasm: xmm9 ^= xmm10 | ||
5595 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
5596 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
5597 | pxor %xmm8,%xmm12 | ||
5598 | |||
5599 | # qhasm: xmm14 ^= xmm10 | ||
5600 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
5601 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
5602 | pxor %xmm8,%xmm11 | ||
5603 | |||
5604 | # qhasm: xmm14 &= xmm15 | ||
5605 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
5606 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
5607 | pand %xmm13,%xmm11 | ||
5608 | |||
5609 | # qhasm: xmm14 ^= xmm12 | ||
5610 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
5611 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
5612 | pxor %xmm10,%xmm11 | ||
5613 | |||
5614 | # qhasm: xmm12 = xmm6 | ||
5615 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
5616 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
5617 | movdqa %xmm6,%xmm8 | ||
5618 | |||
5619 | # qhasm: xmm8 = xmm5 | ||
5620 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
5621 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
5622 | movdqa %xmm5,%xmm9 | ||
5623 | |||
5624 | # qhasm: xmm10 = xmm15 | ||
5625 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
5626 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
5627 | movdqa %xmm13,%xmm10 | ||
5628 | |||
5629 | # qhasm: xmm10 ^= xmm14 | ||
5630 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
5631 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
5632 | pxor %xmm11,%xmm10 | ||
5633 | |||
5634 | # qhasm: xmm10 &= xmm6 | ||
5635 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
5636 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
5637 | pand %xmm6,%xmm10 | ||
5638 | |||
5639 | # qhasm: xmm6 ^= xmm5 | ||
5640 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
5641 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
5642 | pxor %xmm5,%xmm6 | ||
5643 | |||
5644 | # qhasm: xmm6 &= xmm14 | ||
5645 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
5646 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
5647 | pand %xmm11,%xmm6 | ||
5648 | |||
5649 | # qhasm: xmm5 &= xmm15 | ||
5650 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
5651 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
5652 | pand %xmm13,%xmm5 | ||
5653 | |||
5654 | # qhasm: xmm6 ^= xmm5 | ||
5655 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
5656 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
5657 | pxor %xmm5,%xmm6 | ||
5658 | |||
5659 | # qhasm: xmm5 ^= xmm10 | ||
5660 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
5661 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
5662 | pxor %xmm10,%xmm5 | ||
5663 | |||
5664 | # qhasm: xmm12 ^= xmm0 | ||
5665 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
5666 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
5667 | pxor %xmm0,%xmm8 | ||
5668 | |||
5669 | # qhasm: xmm8 ^= xmm3 | ||
5670 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
5671 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
5672 | pxor %xmm3,%xmm9 | ||
5673 | |||
5674 | # qhasm: xmm15 ^= xmm13 | ||
5675 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
5676 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
5677 | pxor %xmm15,%xmm13 | ||
5678 | |||
5679 | # qhasm: xmm14 ^= xmm9 | ||
5680 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
5681 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
5682 | pxor %xmm12,%xmm11 | ||
5683 | |||
5684 | # qhasm: xmm11 = xmm15 | ||
5685 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
5686 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
5687 | movdqa %xmm13,%xmm10 | ||
5688 | |||
5689 | # qhasm: xmm11 ^= xmm14 | ||
5690 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
5691 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
5692 | pxor %xmm11,%xmm10 | ||
5693 | |||
5694 | # qhasm: xmm11 &= xmm12 | ||
5695 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
5696 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
5697 | pand %xmm8,%xmm10 | ||
5698 | |||
5699 | # qhasm: xmm12 ^= xmm8 | ||
5700 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
5701 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
5702 | pxor %xmm9,%xmm8 | ||
5703 | |||
5704 | # qhasm: xmm12 &= xmm14 | ||
5705 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
5706 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
5707 | pand %xmm11,%xmm8 | ||
5708 | |||
5709 | # qhasm: xmm8 &= xmm15 | ||
5710 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
5711 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
5712 | pand %xmm13,%xmm9 | ||
5713 | |||
5714 | # qhasm: xmm8 ^= xmm12 | ||
5715 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
5716 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
5717 | pxor %xmm8,%xmm9 | ||
5718 | |||
5719 | # qhasm: xmm12 ^= xmm11 | ||
5720 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
5721 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
5722 | pxor %xmm10,%xmm8 | ||
5723 | |||
5724 | # qhasm: xmm10 = xmm13 | ||
5725 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
5726 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
5727 | movdqa %xmm15,%xmm10 | ||
5728 | |||
5729 | # qhasm: xmm10 ^= xmm9 | ||
5730 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
5731 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
5732 | pxor %xmm12,%xmm10 | ||
5733 | |||
5734 | # qhasm: xmm10 &= xmm0 | ||
5735 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
5736 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
5737 | pand %xmm0,%xmm10 | ||
5738 | |||
5739 | # qhasm: xmm0 ^= xmm3 | ||
5740 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
5741 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
5742 | pxor %xmm3,%xmm0 | ||
5743 | |||
5744 | # qhasm: xmm0 &= xmm9 | ||
5745 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
5746 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
5747 | pand %xmm12,%xmm0 | ||
5748 | |||
5749 | # qhasm: xmm3 &= xmm13 | ||
5750 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
5751 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
5752 | pand %xmm15,%xmm3 | ||
5753 | |||
5754 | # qhasm: xmm0 ^= xmm3 | ||
5755 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
5756 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
5757 | pxor %xmm3,%xmm0 | ||
5758 | |||
5759 | # qhasm: xmm3 ^= xmm10 | ||
5760 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
5761 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
5762 | pxor %xmm10,%xmm3 | ||
5763 | |||
5764 | # qhasm: xmm6 ^= xmm12 | ||
5765 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
5766 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
5767 | pxor %xmm8,%xmm6 | ||
5768 | |||
5769 | # qhasm: xmm0 ^= xmm12 | ||
5770 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
5771 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
5772 | pxor %xmm8,%xmm0 | ||
5773 | |||
5774 | # qhasm: xmm5 ^= xmm8 | ||
5775 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
5776 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
5777 | pxor %xmm9,%xmm5 | ||
5778 | |||
5779 | # qhasm: xmm3 ^= xmm8 | ||
5780 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
5781 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
5782 | pxor %xmm9,%xmm3 | ||
5783 | |||
5784 | # qhasm: xmm12 = xmm7 | ||
5785 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
5786 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
5787 | movdqa %xmm7,%xmm8 | ||
5788 | |||
5789 | # qhasm: xmm8 = xmm1 | ||
5790 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
5791 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
5792 | movdqa %xmm1,%xmm9 | ||
5793 | |||
5794 | # qhasm: xmm12 ^= xmm4 | ||
5795 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
5796 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
5797 | pxor %xmm4,%xmm8 | ||
5798 | |||
5799 | # qhasm: xmm8 ^= xmm2 | ||
5800 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
5801 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
5802 | pxor %xmm2,%xmm9 | ||
5803 | |||
5804 | # qhasm: xmm11 = xmm15 | ||
5805 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
5806 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
5807 | movdqa %xmm13,%xmm10 | ||
5808 | |||
5809 | # qhasm: xmm11 ^= xmm14 | ||
5810 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
5811 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
5812 | pxor %xmm11,%xmm10 | ||
5813 | |||
5814 | # qhasm: xmm11 &= xmm12 | ||
5815 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
5816 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
5817 | pand %xmm8,%xmm10 | ||
5818 | |||
5819 | # qhasm: xmm12 ^= xmm8 | ||
5820 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
5821 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
5822 | pxor %xmm9,%xmm8 | ||
5823 | |||
5824 | # qhasm: xmm12 &= xmm14 | ||
5825 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
5826 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
5827 | pand %xmm11,%xmm8 | ||
5828 | |||
5829 | # qhasm: xmm8 &= xmm15 | ||
5830 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
5831 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
5832 | pand %xmm13,%xmm9 | ||
5833 | |||
5834 | # qhasm: xmm8 ^= xmm12 | ||
5835 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
5836 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
5837 | pxor %xmm8,%xmm9 | ||
5838 | |||
5839 | # qhasm: xmm12 ^= xmm11 | ||
5840 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
5841 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
5842 | pxor %xmm10,%xmm8 | ||
5843 | |||
5844 | # qhasm: xmm10 = xmm13 | ||
5845 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
5846 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
5847 | movdqa %xmm15,%xmm10 | ||
5848 | |||
5849 | # qhasm: xmm10 ^= xmm9 | ||
5850 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
5851 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
5852 | pxor %xmm12,%xmm10 | ||
5853 | |||
5854 | # qhasm: xmm10 &= xmm4 | ||
5855 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
5856 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
5857 | pand %xmm4,%xmm10 | ||
5858 | |||
5859 | # qhasm: xmm4 ^= xmm2 | ||
5860 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
5861 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
5862 | pxor %xmm2,%xmm4 | ||
5863 | |||
5864 | # qhasm: xmm4 &= xmm9 | ||
5865 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
5866 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
5867 | pand %xmm12,%xmm4 | ||
5868 | |||
5869 | # qhasm: xmm2 &= xmm13 | ||
5870 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
5871 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
5872 | pand %xmm15,%xmm2 | ||
5873 | |||
5874 | # qhasm: xmm4 ^= xmm2 | ||
5875 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
5876 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
5877 | pxor %xmm2,%xmm4 | ||
5878 | |||
5879 | # qhasm: xmm2 ^= xmm10 | ||
5880 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
5881 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
5882 | pxor %xmm10,%xmm2 | ||
5883 | |||
5884 | # qhasm: xmm15 ^= xmm13 | ||
5885 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
5886 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
5887 | pxor %xmm15,%xmm13 | ||
5888 | |||
5889 | # qhasm: xmm14 ^= xmm9 | ||
5890 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
5891 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
5892 | pxor %xmm12,%xmm11 | ||
5893 | |||
5894 | # qhasm: xmm11 = xmm15 | ||
5895 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
5896 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
5897 | movdqa %xmm13,%xmm10 | ||
5898 | |||
5899 | # qhasm: xmm11 ^= xmm14 | ||
5900 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
5901 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
5902 | pxor %xmm11,%xmm10 | ||
5903 | |||
5904 | # qhasm: xmm11 &= xmm7 | ||
5905 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
5906 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
5907 | pand %xmm7,%xmm10 | ||
5908 | |||
5909 | # qhasm: xmm7 ^= xmm1 | ||
5910 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
5911 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
5912 | pxor %xmm1,%xmm7 | ||
5913 | |||
5914 | # qhasm: xmm7 &= xmm14 | ||
5915 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
5916 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
5917 | pand %xmm11,%xmm7 | ||
5918 | |||
5919 | # qhasm: xmm1 &= xmm15 | ||
5920 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
5921 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
5922 | pand %xmm13,%xmm1 | ||
5923 | |||
5924 | # qhasm: xmm7 ^= xmm1 | ||
5925 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
5926 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
5927 | pxor %xmm1,%xmm7 | ||
5928 | |||
5929 | # qhasm: xmm1 ^= xmm11 | ||
5930 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
5931 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
5932 | pxor %xmm10,%xmm1 | ||
5933 | |||
5934 | # qhasm: xmm7 ^= xmm12 | ||
5935 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
5936 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
5937 | pxor %xmm8,%xmm7 | ||
5938 | |||
5939 | # qhasm: xmm4 ^= xmm12 | ||
5940 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
5941 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
5942 | pxor %xmm8,%xmm4 | ||
5943 | |||
5944 | # qhasm: xmm1 ^= xmm8 | ||
5945 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
5946 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
5947 | pxor %xmm9,%xmm1 | ||
5948 | |||
5949 | # qhasm: xmm2 ^= xmm8 | ||
5950 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
5951 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
5952 | pxor %xmm9,%xmm2 | ||
5953 | |||
5954 | # qhasm: xmm7 ^= xmm0 | ||
5955 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
5956 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
5957 | pxor %xmm0,%xmm7 | ||
5958 | |||
5959 | # qhasm: xmm1 ^= xmm6 | ||
5960 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
5961 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
5962 | pxor %xmm6,%xmm1 | ||
5963 | |||
5964 | # qhasm: xmm4 ^= xmm7 | ||
5965 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
5966 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
5967 | pxor %xmm7,%xmm4 | ||
5968 | |||
5969 | # qhasm: xmm6 ^= xmm0 | ||
5970 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
5971 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
5972 | pxor %xmm0,%xmm6 | ||
5973 | |||
5974 | # qhasm: xmm0 ^= xmm1 | ||
5975 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
5976 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
5977 | pxor %xmm1,%xmm0 | ||
5978 | |||
5979 | # qhasm: xmm1 ^= xmm5 | ||
5980 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
5981 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
5982 | pxor %xmm5,%xmm1 | ||
5983 | |||
5984 | # qhasm: xmm5 ^= xmm2 | ||
5985 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
5986 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
5987 | pxor %xmm2,%xmm5 | ||
5988 | |||
5989 | # qhasm: xmm4 ^= xmm5 | ||
5990 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
5991 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
5992 | pxor %xmm5,%xmm4 | ||
5993 | |||
5994 | # qhasm: xmm2 ^= xmm3 | ||
5995 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
5996 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
5997 | pxor %xmm3,%xmm2 | ||
5998 | |||
5999 | # qhasm: xmm3 ^= xmm5 | ||
6000 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
6001 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
6002 | pxor %xmm5,%xmm3 | ||
6003 | |||
6004 | # qhasm: xmm6 ^= xmm3 | ||
6005 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
6006 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
6007 | pxor %xmm3,%xmm6 | ||
6008 | |||
6009 | # qhasm: xmm8 = shuffle dwords of xmm0 by 0x93 | ||
6010 | # asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9 | ||
6011 | # asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8 | ||
6012 | pshufd $0x93,%xmm0,%xmm8 | ||
6013 | |||
6014 | # qhasm: xmm9 = shuffle dwords of xmm1 by 0x93 | ||
6015 | # asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10 | ||
6016 | # asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9 | ||
6017 | pshufd $0x93,%xmm1,%xmm9 | ||
6018 | |||
6019 | # qhasm: xmm10 = shuffle dwords of xmm4 by 0x93 | ||
6020 | # asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11 | ||
6021 | # asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10 | ||
6022 | pshufd $0x93,%xmm4,%xmm10 | ||
6023 | |||
6024 | # qhasm: xmm11 = shuffle dwords of xmm6 by 0x93 | ||
6025 | # asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12 | ||
6026 | # asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11 | ||
6027 | pshufd $0x93,%xmm6,%xmm11 | ||
6028 | |||
6029 | # qhasm: xmm12 = shuffle dwords of xmm3 by 0x93 | ||
6030 | # asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13 | ||
6031 | # asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12 | ||
6032 | pshufd $0x93,%xmm3,%xmm12 | ||
6033 | |||
6034 | # qhasm: xmm13 = shuffle dwords of xmm7 by 0x93 | ||
6035 | # asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14 | ||
6036 | # asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13 | ||
6037 | pshufd $0x93,%xmm7,%xmm13 | ||
6038 | |||
6039 | # qhasm: xmm14 = shuffle dwords of xmm2 by 0x93 | ||
6040 | # asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15 | ||
6041 | # asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14 | ||
6042 | pshufd $0x93,%xmm2,%xmm14 | ||
6043 | |||
6044 | # qhasm: xmm15 = shuffle dwords of xmm5 by 0x93 | ||
6045 | # asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16 | ||
6046 | # asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15 | ||
6047 | pshufd $0x93,%xmm5,%xmm15 | ||
6048 | |||
6049 | # qhasm: xmm0 ^= xmm8 | ||
6050 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
6051 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
6052 | pxor %xmm8,%xmm0 | ||
6053 | |||
6054 | # qhasm: xmm1 ^= xmm9 | ||
6055 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
6056 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
6057 | pxor %xmm9,%xmm1 | ||
6058 | |||
6059 | # qhasm: xmm4 ^= xmm10 | ||
6060 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
6061 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
6062 | pxor %xmm10,%xmm4 | ||
6063 | |||
6064 | # qhasm: xmm6 ^= xmm11 | ||
6065 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
6066 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
6067 | pxor %xmm11,%xmm6 | ||
6068 | |||
6069 | # qhasm: xmm3 ^= xmm12 | ||
6070 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
6071 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
6072 | pxor %xmm12,%xmm3 | ||
6073 | |||
6074 | # qhasm: xmm7 ^= xmm13 | ||
6075 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
6076 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
6077 | pxor %xmm13,%xmm7 | ||
6078 | |||
6079 | # qhasm: xmm2 ^= xmm14 | ||
6080 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
6081 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
6082 | pxor %xmm14,%xmm2 | ||
6083 | |||
6084 | # qhasm: xmm5 ^= xmm15 | ||
6085 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
6086 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
6087 | pxor %xmm15,%xmm5 | ||
6088 | |||
6089 | # qhasm: xmm8 ^= xmm5 | ||
6090 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
6091 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
6092 | pxor %xmm5,%xmm8 | ||
6093 | |||
6094 | # qhasm: xmm9 ^= xmm0 | ||
6095 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
6096 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
6097 | pxor %xmm0,%xmm9 | ||
6098 | |||
6099 | # qhasm: xmm10 ^= xmm1 | ||
6100 | # asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11 | ||
6101 | # asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10 | ||
6102 | pxor %xmm1,%xmm10 | ||
6103 | |||
6104 | # qhasm: xmm9 ^= xmm5 | ||
6105 | # asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10 | ||
6106 | # asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9 | ||
6107 | pxor %xmm5,%xmm9 | ||
6108 | |||
6109 | # qhasm: xmm11 ^= xmm4 | ||
6110 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
6111 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
6112 | pxor %xmm4,%xmm11 | ||
6113 | |||
6114 | # qhasm: xmm12 ^= xmm6 | ||
6115 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13 | ||
6116 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12 | ||
6117 | pxor %xmm6,%xmm12 | ||
6118 | |||
6119 | # qhasm: xmm13 ^= xmm3 | ||
6120 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14 | ||
6121 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13 | ||
6122 | pxor %xmm3,%xmm13 | ||
6123 | |||
6124 | # qhasm: xmm11 ^= xmm5 | ||
6125 | # asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12 | ||
6126 | # asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11 | ||
6127 | pxor %xmm5,%xmm11 | ||
6128 | |||
6129 | # qhasm: xmm14 ^= xmm7 | ||
6130 | # asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15 | ||
6131 | # asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14 | ||
6132 | pxor %xmm7,%xmm14 | ||
6133 | |||
6134 | # qhasm: xmm15 ^= xmm2 | ||
6135 | # asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16 | ||
6136 | # asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15 | ||
6137 | pxor %xmm2,%xmm15 | ||
6138 | |||
6139 | # qhasm: xmm12 ^= xmm5 | ||
6140 | # asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13 | ||
6141 | # asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12 | ||
6142 | pxor %xmm5,%xmm12 | ||
6143 | |||
6144 | # qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E | ||
6145 | # asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1 | ||
6146 | # asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0 | ||
6147 | pshufd $0x4E,%xmm0,%xmm0 | ||
6148 | |||
6149 | # qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E | ||
6150 | # asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2 | ||
6151 | # asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1 | ||
6152 | pshufd $0x4E,%xmm1,%xmm1 | ||
6153 | |||
6154 | # qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E | ||
6155 | # asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5 | ||
6156 | # asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4 | ||
6157 | pshufd $0x4E,%xmm4,%xmm4 | ||
6158 | |||
6159 | # qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E | ||
6160 | # asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7 | ||
6161 | # asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6 | ||
6162 | pshufd $0x4E,%xmm6,%xmm6 | ||
6163 | |||
6164 | # qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E | ||
6165 | # asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4 | ||
6166 | # asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3 | ||
6167 | pshufd $0x4E,%xmm3,%xmm3 | ||
6168 | |||
6169 | # qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E | ||
6170 | # asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8 | ||
6171 | # asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7 | ||
6172 | pshufd $0x4E,%xmm7,%xmm7 | ||
6173 | |||
6174 | # qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E | ||
6175 | # asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3 | ||
6176 | # asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2 | ||
6177 | pshufd $0x4E,%xmm2,%xmm2 | ||
6178 | |||
6179 | # qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E | ||
6180 | # asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6 | ||
6181 | # asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5 | ||
6182 | pshufd $0x4E,%xmm5,%xmm5 | ||
6183 | |||
6184 | # qhasm: xmm8 ^= xmm0 | ||
6185 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
6186 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
6187 | pxor %xmm0,%xmm8 | ||
6188 | |||
6189 | # qhasm: xmm9 ^= xmm1 | ||
6190 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
6191 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
6192 | pxor %xmm1,%xmm9 | ||
6193 | |||
6194 | # qhasm: xmm10 ^= xmm4 | ||
6195 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11 | ||
6196 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10 | ||
6197 | pxor %xmm4,%xmm10 | ||
6198 | |||
6199 | # qhasm: xmm11 ^= xmm6 | ||
6200 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12 | ||
6201 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11 | ||
6202 | pxor %xmm6,%xmm11 | ||
6203 | |||
6204 | # qhasm: xmm12 ^= xmm3 | ||
6205 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13 | ||
6206 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12 | ||
6207 | pxor %xmm3,%xmm12 | ||
6208 | |||
6209 | # qhasm: xmm13 ^= xmm7 | ||
6210 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
6211 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
6212 | pxor %xmm7,%xmm13 | ||
6213 | |||
6214 | # qhasm: xmm14 ^= xmm2 | ||
6215 | # asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15 | ||
6216 | # asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14 | ||
6217 | pxor %xmm2,%xmm14 | ||
6218 | |||
6219 | # qhasm: xmm15 ^= xmm5 | ||
6220 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
6221 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
6222 | pxor %xmm5,%xmm15 | ||
6223 | |||
6224 | # qhasm: xmm8 ^= *(int128 *)(c + 640) | ||
6225 | # asm 1: pxor 640(<c=int64#5),<xmm8=int6464#9 | ||
6226 | # asm 2: pxor 640(<c=%r8),<xmm8=%xmm8 | ||
6227 | pxor 640(%r8),%xmm8 | ||
6228 | |||
6229 | # qhasm: shuffle bytes of xmm8 by SR | ||
6230 | # asm 1: pshufb SR,<xmm8=int6464#9 | ||
6231 | # asm 2: pshufb SR,<xmm8=%xmm8 | ||
6232 | pshufb SR,%xmm8 | ||
6233 | |||
6234 | # qhasm: xmm9 ^= *(int128 *)(c + 656) | ||
6235 | # asm 1: pxor 656(<c=int64#5),<xmm9=int6464#10 | ||
6236 | # asm 2: pxor 656(<c=%r8),<xmm9=%xmm9 | ||
6237 | pxor 656(%r8),%xmm9 | ||
6238 | |||
6239 | # qhasm: shuffle bytes of xmm9 by SR | ||
6240 | # asm 1: pshufb SR,<xmm9=int6464#10 | ||
6241 | # asm 2: pshufb SR,<xmm9=%xmm9 | ||
6242 | pshufb SR,%xmm9 | ||
6243 | |||
6244 | # qhasm: xmm10 ^= *(int128 *)(c + 672) | ||
6245 | # asm 1: pxor 672(<c=int64#5),<xmm10=int6464#11 | ||
6246 | # asm 2: pxor 672(<c=%r8),<xmm10=%xmm10 | ||
6247 | pxor 672(%r8),%xmm10 | ||
6248 | |||
6249 | # qhasm: shuffle bytes of xmm10 by SR | ||
6250 | # asm 1: pshufb SR,<xmm10=int6464#11 | ||
6251 | # asm 2: pshufb SR,<xmm10=%xmm10 | ||
6252 | pshufb SR,%xmm10 | ||
6253 | |||
6254 | # qhasm: xmm11 ^= *(int128 *)(c + 688) | ||
6255 | # asm 1: pxor 688(<c=int64#5),<xmm11=int6464#12 | ||
6256 | # asm 2: pxor 688(<c=%r8),<xmm11=%xmm11 | ||
6257 | pxor 688(%r8),%xmm11 | ||
6258 | |||
6259 | # qhasm: shuffle bytes of xmm11 by SR | ||
6260 | # asm 1: pshufb SR,<xmm11=int6464#12 | ||
6261 | # asm 2: pshufb SR,<xmm11=%xmm11 | ||
6262 | pshufb SR,%xmm11 | ||
6263 | |||
6264 | # qhasm: xmm12 ^= *(int128 *)(c + 704) | ||
6265 | # asm 1: pxor 704(<c=int64#5),<xmm12=int6464#13 | ||
6266 | # asm 2: pxor 704(<c=%r8),<xmm12=%xmm12 | ||
6267 | pxor 704(%r8),%xmm12 | ||
6268 | |||
6269 | # qhasm: shuffle bytes of xmm12 by SR | ||
6270 | # asm 1: pshufb SR,<xmm12=int6464#13 | ||
6271 | # asm 2: pshufb SR,<xmm12=%xmm12 | ||
6272 | pshufb SR,%xmm12 | ||
6273 | |||
6274 | # qhasm: xmm13 ^= *(int128 *)(c + 720) | ||
6275 | # asm 1: pxor 720(<c=int64#5),<xmm13=int6464#14 | ||
6276 | # asm 2: pxor 720(<c=%r8),<xmm13=%xmm13 | ||
6277 | pxor 720(%r8),%xmm13 | ||
6278 | |||
6279 | # qhasm: shuffle bytes of xmm13 by SR | ||
6280 | # asm 1: pshufb SR,<xmm13=int6464#14 | ||
6281 | # asm 2: pshufb SR,<xmm13=%xmm13 | ||
6282 | pshufb SR,%xmm13 | ||
6283 | |||
6284 | # qhasm: xmm14 ^= *(int128 *)(c + 736) | ||
6285 | # asm 1: pxor 736(<c=int64#5),<xmm14=int6464#15 | ||
6286 | # asm 2: pxor 736(<c=%r8),<xmm14=%xmm14 | ||
6287 | pxor 736(%r8),%xmm14 | ||
6288 | |||
6289 | # qhasm: shuffle bytes of xmm14 by SR | ||
6290 | # asm 1: pshufb SR,<xmm14=int6464#15 | ||
6291 | # asm 2: pshufb SR,<xmm14=%xmm14 | ||
6292 | pshufb SR,%xmm14 | ||
6293 | |||
6294 | # qhasm: xmm15 ^= *(int128 *)(c + 752) | ||
6295 | # asm 1: pxor 752(<c=int64#5),<xmm15=int6464#16 | ||
6296 | # asm 2: pxor 752(<c=%r8),<xmm15=%xmm15 | ||
6297 | pxor 752(%r8),%xmm15 | ||
6298 | |||
6299 | # qhasm: shuffle bytes of xmm15 by SR | ||
6300 | # asm 1: pshufb SR,<xmm15=int6464#16 | ||
6301 | # asm 2: pshufb SR,<xmm15=%xmm15 | ||
6302 | pshufb SR,%xmm15 | ||
6303 | |||
6304 | # qhasm: xmm13 ^= xmm14 | ||
6305 | # asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14 | ||
6306 | # asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13 | ||
6307 | pxor %xmm14,%xmm13 | ||
6308 | |||
6309 | # qhasm: xmm10 ^= xmm9 | ||
6310 | # asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11 | ||
6311 | # asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10 | ||
6312 | pxor %xmm9,%xmm10 | ||
6313 | |||
6314 | # qhasm: xmm13 ^= xmm8 | ||
6315 | # asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14 | ||
6316 | # asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13 | ||
6317 | pxor %xmm8,%xmm13 | ||
6318 | |||
6319 | # qhasm: xmm14 ^= xmm10 | ||
6320 | # asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15 | ||
6321 | # asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14 | ||
6322 | pxor %xmm10,%xmm14 | ||
6323 | |||
6324 | # qhasm: xmm11 ^= xmm8 | ||
6325 | # asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12 | ||
6326 | # asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11 | ||
6327 | pxor %xmm8,%xmm11 | ||
6328 | |||
6329 | # qhasm: xmm14 ^= xmm11 | ||
6330 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
6331 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
6332 | pxor %xmm11,%xmm14 | ||
6333 | |||
6334 | # qhasm: xmm11 ^= xmm15 | ||
6335 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12 | ||
6336 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11 | ||
6337 | pxor %xmm15,%xmm11 | ||
6338 | |||
6339 | # qhasm: xmm11 ^= xmm12 | ||
6340 | # asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12 | ||
6341 | # asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11 | ||
6342 | pxor %xmm12,%xmm11 | ||
6343 | |||
6344 | # qhasm: xmm15 ^= xmm13 | ||
6345 | # asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16 | ||
6346 | # asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15 | ||
6347 | pxor %xmm13,%xmm15 | ||
6348 | |||
6349 | # qhasm: xmm11 ^= xmm9 | ||
6350 | # asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12 | ||
6351 | # asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11 | ||
6352 | pxor %xmm9,%xmm11 | ||
6353 | |||
6354 | # qhasm: xmm12 ^= xmm13 | ||
6355 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
6356 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
6357 | pxor %xmm13,%xmm12 | ||
6358 | |||
6359 | # qhasm: xmm10 ^= xmm15 | ||
6360 | # asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11 | ||
6361 | # asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10 | ||
6362 | pxor %xmm15,%xmm10 | ||
6363 | |||
6364 | # qhasm: xmm9 ^= xmm13 | ||
6365 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
6366 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
6367 | pxor %xmm13,%xmm9 | ||
6368 | |||
6369 | # qhasm: xmm3 = xmm15 | ||
6370 | # asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1 | ||
6371 | # asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0 | ||
6372 | movdqa %xmm15,%xmm0 | ||
6373 | |||
6374 | # qhasm: xmm2 = xmm9 | ||
6375 | # asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2 | ||
6376 | # asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1 | ||
6377 | movdqa %xmm9,%xmm1 | ||
6378 | |||
6379 | # qhasm: xmm1 = xmm13 | ||
6380 | # asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3 | ||
6381 | # asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2 | ||
6382 | movdqa %xmm13,%xmm2 | ||
6383 | |||
6384 | # qhasm: xmm5 = xmm10 | ||
6385 | # asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4 | ||
6386 | # asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3 | ||
6387 | movdqa %xmm10,%xmm3 | ||
6388 | |||
6389 | # qhasm: xmm4 = xmm14 | ||
6390 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5 | ||
6391 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4 | ||
6392 | movdqa %xmm14,%xmm4 | ||
6393 | |||
6394 | # qhasm: xmm3 ^= xmm12 | ||
6395 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1 | ||
6396 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0 | ||
6397 | pxor %xmm12,%xmm0 | ||
6398 | |||
6399 | # qhasm: xmm2 ^= xmm10 | ||
6400 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2 | ||
6401 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1 | ||
6402 | pxor %xmm10,%xmm1 | ||
6403 | |||
6404 | # qhasm: xmm1 ^= xmm11 | ||
6405 | # asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3 | ||
6406 | # asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2 | ||
6407 | pxor %xmm11,%xmm2 | ||
6408 | |||
6409 | # qhasm: xmm5 ^= xmm12 | ||
6410 | # asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4 | ||
6411 | # asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3 | ||
6412 | pxor %xmm12,%xmm3 | ||
6413 | |||
6414 | # qhasm: xmm4 ^= xmm8 | ||
6415 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
6416 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
6417 | pxor %xmm8,%xmm4 | ||
6418 | |||
6419 | # qhasm: xmm6 = xmm3 | ||
6420 | # asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6 | ||
6421 | # asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5 | ||
6422 | movdqa %xmm0,%xmm5 | ||
6423 | |||
6424 | # qhasm: xmm0 = xmm2 | ||
6425 | # asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7 | ||
6426 | # asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6 | ||
6427 | movdqa %xmm1,%xmm6 | ||
6428 | |||
6429 | # qhasm: xmm7 = xmm3 | ||
6430 | # asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8 | ||
6431 | # asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7 | ||
6432 | movdqa %xmm0,%xmm7 | ||
6433 | |||
6434 | # qhasm: xmm2 |= xmm1 | ||
6435 | # asm 1: por <xmm1=int6464#3,<xmm2=int6464#2 | ||
6436 | # asm 2: por <xmm1=%xmm2,<xmm2=%xmm1 | ||
6437 | por %xmm2,%xmm1 | ||
6438 | |||
6439 | # qhasm: xmm3 |= xmm4 | ||
6440 | # asm 1: por <xmm4=int6464#5,<xmm3=int6464#1 | ||
6441 | # asm 2: por <xmm4=%xmm4,<xmm3=%xmm0 | ||
6442 | por %xmm4,%xmm0 | ||
6443 | |||
6444 | # qhasm: xmm7 ^= xmm0 | ||
6445 | # asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8 | ||
6446 | # asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7 | ||
6447 | pxor %xmm6,%xmm7 | ||
6448 | |||
6449 | # qhasm: xmm6 &= xmm4 | ||
6450 | # asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6 | ||
6451 | # asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5 | ||
6452 | pand %xmm4,%xmm5 | ||
6453 | |||
6454 | # qhasm: xmm0 &= xmm1 | ||
6455 | # asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7 | ||
6456 | # asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6 | ||
6457 | pand %xmm2,%xmm6 | ||
6458 | |||
6459 | # qhasm: xmm4 ^= xmm1 | ||
6460 | # asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5 | ||
6461 | # asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4 | ||
6462 | pxor %xmm2,%xmm4 | ||
6463 | |||
6464 | # qhasm: xmm7 &= xmm4 | ||
6465 | # asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8 | ||
6466 | # asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7 | ||
6467 | pand %xmm4,%xmm7 | ||
6468 | |||
6469 | # qhasm: xmm4 = xmm11 | ||
6470 | # asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3 | ||
6471 | # asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2 | ||
6472 | movdqa %xmm11,%xmm2 | ||
6473 | |||
6474 | # qhasm: xmm4 ^= xmm8 | ||
6475 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3 | ||
6476 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2 | ||
6477 | pxor %xmm8,%xmm2 | ||
6478 | |||
6479 | # qhasm: xmm5 &= xmm4 | ||
6480 | # asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4 | ||
6481 | # asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3 | ||
6482 | pand %xmm2,%xmm3 | ||
6483 | |||
6484 | # qhasm: xmm3 ^= xmm5 | ||
6485 | # asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1 | ||
6486 | # asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0 | ||
6487 | pxor %xmm3,%xmm0 | ||
6488 | |||
6489 | # qhasm: xmm2 ^= xmm5 | ||
6490 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
6491 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
6492 | pxor %xmm3,%xmm1 | ||
6493 | |||
6494 | # qhasm: xmm5 = xmm15 | ||
6495 | # asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3 | ||
6496 | # asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2 | ||
6497 | movdqa %xmm15,%xmm2 | ||
6498 | |||
6499 | # qhasm: xmm5 ^= xmm9 | ||
6500 | # asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3 | ||
6501 | # asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2 | ||
6502 | pxor %xmm9,%xmm2 | ||
6503 | |||
6504 | # qhasm: xmm4 = xmm13 | ||
6505 | # asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4 | ||
6506 | # asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3 | ||
6507 | movdqa %xmm13,%xmm3 | ||
6508 | |||
6509 | # qhasm: xmm1 = xmm5 | ||
6510 | # asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5 | ||
6511 | # asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4 | ||
6512 | movdqa %xmm2,%xmm4 | ||
6513 | |||
6514 | # qhasm: xmm4 ^= xmm14 | ||
6515 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4 | ||
6516 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3 | ||
6517 | pxor %xmm14,%xmm3 | ||
6518 | |||
6519 | # qhasm: xmm1 |= xmm4 | ||
6520 | # asm 1: por <xmm4=int6464#4,<xmm1=int6464#5 | ||
6521 | # asm 2: por <xmm4=%xmm3,<xmm1=%xmm4 | ||
6522 | por %xmm3,%xmm4 | ||
6523 | |||
6524 | # qhasm: xmm5 &= xmm4 | ||
6525 | # asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3 | ||
6526 | # asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2 | ||
6527 | pand %xmm3,%xmm2 | ||
6528 | |||
6529 | # qhasm: xmm0 ^= xmm5 | ||
6530 | # asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7 | ||
6531 | # asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6 | ||
6532 | pxor %xmm2,%xmm6 | ||
6533 | |||
6534 | # qhasm: xmm3 ^= xmm7 | ||
6535 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1 | ||
6536 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0 | ||
6537 | pxor %xmm7,%xmm0 | ||
6538 | |||
6539 | # qhasm: xmm2 ^= xmm6 | ||
6540 | # asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2 | ||
6541 | # asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1 | ||
6542 | pxor %xmm5,%xmm1 | ||
6543 | |||
6544 | # qhasm: xmm1 ^= xmm7 | ||
6545 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5 | ||
6546 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4 | ||
6547 | pxor %xmm7,%xmm4 | ||
6548 | |||
6549 | # qhasm: xmm0 ^= xmm6 | ||
6550 | # asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7 | ||
6551 | # asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6 | ||
6552 | pxor %xmm5,%xmm6 | ||
6553 | |||
6554 | # qhasm: xmm1 ^= xmm6 | ||
6555 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
6556 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
6557 | pxor %xmm5,%xmm4 | ||
6558 | |||
6559 | # qhasm: xmm4 = xmm10 | ||
6560 | # asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3 | ||
6561 | # asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2 | ||
6562 | movdqa %xmm10,%xmm2 | ||
6563 | |||
6564 | # qhasm: xmm5 = xmm12 | ||
6565 | # asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4 | ||
6566 | # asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3 | ||
6567 | movdqa %xmm12,%xmm3 | ||
6568 | |||
6569 | # qhasm: xmm6 = xmm9 | ||
6570 | # asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6 | ||
6571 | # asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5 | ||
6572 | movdqa %xmm9,%xmm5 | ||
6573 | |||
6574 | # qhasm: xmm7 = xmm15 | ||
6575 | # asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8 | ||
6576 | # asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7 | ||
6577 | movdqa %xmm15,%xmm7 | ||
6578 | |||
6579 | # qhasm: xmm4 &= xmm11 | ||
6580 | # asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3 | ||
6581 | # asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2 | ||
6582 | pand %xmm11,%xmm2 | ||
6583 | |||
6584 | # qhasm: xmm5 &= xmm8 | ||
6585 | # asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4 | ||
6586 | # asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3 | ||
6587 | pand %xmm8,%xmm3 | ||
6588 | |||
6589 | # qhasm: xmm6 &= xmm13 | ||
6590 | # asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6 | ||
6591 | # asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5 | ||
6592 | pand %xmm13,%xmm5 | ||
6593 | |||
6594 | # qhasm: xmm7 |= xmm14 | ||
6595 | # asm 1: por <xmm14=int6464#15,<xmm7=int6464#8 | ||
6596 | # asm 2: por <xmm14=%xmm14,<xmm7=%xmm7 | ||
6597 | por %xmm14,%xmm7 | ||
6598 | |||
6599 | # qhasm: xmm3 ^= xmm4 | ||
6600 | # asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1 | ||
6601 | # asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0 | ||
6602 | pxor %xmm2,%xmm0 | ||
6603 | |||
6604 | # qhasm: xmm2 ^= xmm5 | ||
6605 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
6606 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
6607 | pxor %xmm3,%xmm1 | ||
6608 | |||
6609 | # qhasm: xmm1 ^= xmm6 | ||
6610 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
6611 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
6612 | pxor %xmm5,%xmm4 | ||
6613 | |||
6614 | # qhasm: xmm0 ^= xmm7 | ||
6615 | # asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7 | ||
6616 | # asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6 | ||
6617 | pxor %xmm7,%xmm6 | ||
6618 | |||
6619 | # qhasm: xmm4 = xmm3 | ||
6620 | # asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3 | ||
6621 | # asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2 | ||
6622 | movdqa %xmm0,%xmm2 | ||
6623 | |||
6624 | # qhasm: xmm4 ^= xmm2 | ||
6625 | # asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3 | ||
6626 | # asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2 | ||
6627 | pxor %xmm1,%xmm2 | ||
6628 | |||
6629 | # qhasm: xmm3 &= xmm1 | ||
6630 | # asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1 | ||
6631 | # asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0 | ||
6632 | pand %xmm4,%xmm0 | ||
6633 | |||
6634 | # qhasm: xmm6 = xmm0 | ||
6635 | # asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4 | ||
6636 | # asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3 | ||
6637 | movdqa %xmm6,%xmm3 | ||
6638 | |||
6639 | # qhasm: xmm6 ^= xmm3 | ||
6640 | # asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4 | ||
6641 | # asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3 | ||
6642 | pxor %xmm0,%xmm3 | ||
6643 | |||
6644 | # qhasm: xmm7 = xmm4 | ||
6645 | # asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6 | ||
6646 | # asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5 | ||
6647 | movdqa %xmm2,%xmm5 | ||
6648 | |||
6649 | # qhasm: xmm7 &= xmm6 | ||
6650 | # asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6 | ||
6651 | # asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5 | ||
6652 | pand %xmm3,%xmm5 | ||
6653 | |||
6654 | # qhasm: xmm7 ^= xmm2 | ||
6655 | # asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6 | ||
6656 | # asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5 | ||
6657 | pxor %xmm1,%xmm5 | ||
6658 | |||
6659 | # qhasm: xmm5 = xmm1 | ||
6660 | # asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8 | ||
6661 | # asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7 | ||
6662 | movdqa %xmm4,%xmm7 | ||
6663 | |||
6664 | # qhasm: xmm5 ^= xmm0 | ||
6665 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
6666 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
6667 | pxor %xmm6,%xmm7 | ||
6668 | |||
6669 | # qhasm: xmm3 ^= xmm2 | ||
6670 | # asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1 | ||
6671 | # asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0 | ||
6672 | pxor %xmm1,%xmm0 | ||
6673 | |||
6674 | # qhasm: xmm5 &= xmm3 | ||
6675 | # asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8 | ||
6676 | # asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7 | ||
6677 | pand %xmm0,%xmm7 | ||
6678 | |||
6679 | # qhasm: xmm5 ^= xmm0 | ||
6680 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
6681 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
6682 | pxor %xmm6,%xmm7 | ||
6683 | |||
6684 | # qhasm: xmm1 ^= xmm5 | ||
6685 | # asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5 | ||
6686 | # asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4 | ||
6687 | pxor %xmm7,%xmm4 | ||
6688 | |||
6689 | # qhasm: xmm2 = xmm6 | ||
6690 | # asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1 | ||
6691 | # asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0 | ||
6692 | movdqa %xmm3,%xmm0 | ||
6693 | |||
6694 | # qhasm: xmm2 ^= xmm5 | ||
6695 | # asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1 | ||
6696 | # asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0 | ||
6697 | pxor %xmm7,%xmm0 | ||
6698 | |||
6699 | # qhasm: xmm2 &= xmm0 | ||
6700 | # asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1 | ||
6701 | # asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0 | ||
6702 | pand %xmm6,%xmm0 | ||
6703 | |||
6704 | # qhasm: xmm1 ^= xmm2 | ||
6705 | # asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5 | ||
6706 | # asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4 | ||
6707 | pxor %xmm0,%xmm4 | ||
6708 | |||
6709 | # qhasm: xmm6 ^= xmm2 | ||
6710 | # asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4 | ||
6711 | # asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3 | ||
6712 | pxor %xmm0,%xmm3 | ||
6713 | |||
6714 | # qhasm: xmm6 &= xmm7 | ||
6715 | # asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4 | ||
6716 | # asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3 | ||
6717 | pand %xmm5,%xmm3 | ||
6718 | |||
6719 | # qhasm: xmm6 ^= xmm4 | ||
6720 | # asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4 | ||
6721 | # asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3 | ||
6722 | pxor %xmm2,%xmm3 | ||
6723 | |||
6724 | # qhasm: xmm4 = xmm14 | ||
6725 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1 | ||
6726 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0 | ||
6727 | movdqa %xmm14,%xmm0 | ||
6728 | |||
6729 | # qhasm: xmm0 = xmm13 | ||
6730 | # asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2 | ||
6731 | # asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1 | ||
6732 | movdqa %xmm13,%xmm1 | ||
6733 | |||
6734 | # qhasm: xmm2 = xmm7 | ||
6735 | # asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3 | ||
6736 | # asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2 | ||
6737 | movdqa %xmm5,%xmm2 | ||
6738 | |||
6739 | # qhasm: xmm2 ^= xmm6 | ||
6740 | # asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3 | ||
6741 | # asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2 | ||
6742 | pxor %xmm3,%xmm2 | ||
6743 | |||
6744 | # qhasm: xmm2 &= xmm14 | ||
6745 | # asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3 | ||
6746 | # asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2 | ||
6747 | pand %xmm14,%xmm2 | ||
6748 | |||
6749 | # qhasm: xmm14 ^= xmm13 | ||
6750 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
6751 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
6752 | pxor %xmm13,%xmm14 | ||
6753 | |||
6754 | # qhasm: xmm14 &= xmm6 | ||
6755 | # asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15 | ||
6756 | # asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14 | ||
6757 | pand %xmm3,%xmm14 | ||
6758 | |||
6759 | # qhasm: xmm13 &= xmm7 | ||
6760 | # asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14 | ||
6761 | # asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13 | ||
6762 | pand %xmm5,%xmm13 | ||
6763 | |||
6764 | # qhasm: xmm14 ^= xmm13 | ||
6765 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
6766 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
6767 | pxor %xmm13,%xmm14 | ||
6768 | |||
6769 | # qhasm: xmm13 ^= xmm2 | ||
6770 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14 | ||
6771 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13 | ||
6772 | pxor %xmm2,%xmm13 | ||
6773 | |||
6774 | # qhasm: xmm4 ^= xmm8 | ||
6775 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1 | ||
6776 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0 | ||
6777 | pxor %xmm8,%xmm0 | ||
6778 | |||
6779 | # qhasm: xmm0 ^= xmm11 | ||
6780 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2 | ||
6781 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1 | ||
6782 | pxor %xmm11,%xmm1 | ||
6783 | |||
6784 | # qhasm: xmm7 ^= xmm5 | ||
6785 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
6786 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
6787 | pxor %xmm7,%xmm5 | ||
6788 | |||
6789 | # qhasm: xmm6 ^= xmm1 | ||
6790 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
6791 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
6792 | pxor %xmm4,%xmm3 | ||
6793 | |||
6794 | # qhasm: xmm3 = xmm7 | ||
6795 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
6796 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
6797 | movdqa %xmm5,%xmm2 | ||
6798 | |||
6799 | # qhasm: xmm3 ^= xmm6 | ||
6800 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
6801 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
6802 | pxor %xmm3,%xmm2 | ||
6803 | |||
6804 | # qhasm: xmm3 &= xmm4 | ||
6805 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
6806 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
6807 | pand %xmm0,%xmm2 | ||
6808 | |||
6809 | # qhasm: xmm4 ^= xmm0 | ||
6810 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
6811 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
6812 | pxor %xmm1,%xmm0 | ||
6813 | |||
6814 | # qhasm: xmm4 &= xmm6 | ||
6815 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
6816 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
6817 | pand %xmm3,%xmm0 | ||
6818 | |||
6819 | # qhasm: xmm0 &= xmm7 | ||
6820 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
6821 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
6822 | pand %xmm5,%xmm1 | ||
6823 | |||
6824 | # qhasm: xmm0 ^= xmm4 | ||
6825 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
6826 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
6827 | pxor %xmm0,%xmm1 | ||
6828 | |||
6829 | # qhasm: xmm4 ^= xmm3 | ||
6830 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
6831 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
6832 | pxor %xmm2,%xmm0 | ||
6833 | |||
6834 | # qhasm: xmm2 = xmm5 | ||
6835 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
6836 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
6837 | movdqa %xmm7,%xmm2 | ||
6838 | |||
6839 | # qhasm: xmm2 ^= xmm1 | ||
6840 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
6841 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
6842 | pxor %xmm4,%xmm2 | ||
6843 | |||
6844 | # qhasm: xmm2 &= xmm8 | ||
6845 | # asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3 | ||
6846 | # asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2 | ||
6847 | pand %xmm8,%xmm2 | ||
6848 | |||
6849 | # qhasm: xmm8 ^= xmm11 | ||
6850 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
6851 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
6852 | pxor %xmm11,%xmm8 | ||
6853 | |||
6854 | # qhasm: xmm8 &= xmm1 | ||
6855 | # asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9 | ||
6856 | # asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8 | ||
6857 | pand %xmm4,%xmm8 | ||
6858 | |||
6859 | # qhasm: xmm11 &= xmm5 | ||
6860 | # asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12 | ||
6861 | # asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11 | ||
6862 | pand %xmm7,%xmm11 | ||
6863 | |||
6864 | # qhasm: xmm8 ^= xmm11 | ||
6865 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
6866 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
6867 | pxor %xmm11,%xmm8 | ||
6868 | |||
6869 | # qhasm: xmm11 ^= xmm2 | ||
6870 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12 | ||
6871 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11 | ||
6872 | pxor %xmm2,%xmm11 | ||
6873 | |||
6874 | # qhasm: xmm14 ^= xmm4 | ||
6875 | # asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15 | ||
6876 | # asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14 | ||
6877 | pxor %xmm0,%xmm14 | ||
6878 | |||
6879 | # qhasm: xmm8 ^= xmm4 | ||
6880 | # asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9 | ||
6881 | # asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8 | ||
6882 | pxor %xmm0,%xmm8 | ||
6883 | |||
6884 | # qhasm: xmm13 ^= xmm0 | ||
6885 | # asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14 | ||
6886 | # asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13 | ||
6887 | pxor %xmm1,%xmm13 | ||
6888 | |||
6889 | # qhasm: xmm11 ^= xmm0 | ||
6890 | # asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12 | ||
6891 | # asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11 | ||
6892 | pxor %xmm1,%xmm11 | ||
6893 | |||
6894 | # qhasm: xmm4 = xmm15 | ||
6895 | # asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1 | ||
6896 | # asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0 | ||
6897 | movdqa %xmm15,%xmm0 | ||
6898 | |||
6899 | # qhasm: xmm0 = xmm9 | ||
6900 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2 | ||
6901 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1 | ||
6902 | movdqa %xmm9,%xmm1 | ||
6903 | |||
6904 | # qhasm: xmm4 ^= xmm12 | ||
6905 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1 | ||
6906 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0 | ||
6907 | pxor %xmm12,%xmm0 | ||
6908 | |||
6909 | # qhasm: xmm0 ^= xmm10 | ||
6910 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2 | ||
6911 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1 | ||
6912 | pxor %xmm10,%xmm1 | ||
6913 | |||
6914 | # qhasm: xmm3 = xmm7 | ||
6915 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
6916 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
6917 | movdqa %xmm5,%xmm2 | ||
6918 | |||
6919 | # qhasm: xmm3 ^= xmm6 | ||
6920 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
6921 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
6922 | pxor %xmm3,%xmm2 | ||
6923 | |||
6924 | # qhasm: xmm3 &= xmm4 | ||
6925 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
6926 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
6927 | pand %xmm0,%xmm2 | ||
6928 | |||
6929 | # qhasm: xmm4 ^= xmm0 | ||
6930 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
6931 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
6932 | pxor %xmm1,%xmm0 | ||
6933 | |||
6934 | # qhasm: xmm4 &= xmm6 | ||
6935 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
6936 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
6937 | pand %xmm3,%xmm0 | ||
6938 | |||
6939 | # qhasm: xmm0 &= xmm7 | ||
6940 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
6941 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
6942 | pand %xmm5,%xmm1 | ||
6943 | |||
6944 | # qhasm: xmm0 ^= xmm4 | ||
6945 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
6946 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
6947 | pxor %xmm0,%xmm1 | ||
6948 | |||
6949 | # qhasm: xmm4 ^= xmm3 | ||
6950 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
6951 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
6952 | pxor %xmm2,%xmm0 | ||
6953 | |||
6954 | # qhasm: xmm2 = xmm5 | ||
6955 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
6956 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
6957 | movdqa %xmm7,%xmm2 | ||
6958 | |||
6959 | # qhasm: xmm2 ^= xmm1 | ||
6960 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
6961 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
6962 | pxor %xmm4,%xmm2 | ||
6963 | |||
6964 | # qhasm: xmm2 &= xmm12 | ||
6965 | # asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3 | ||
6966 | # asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2 | ||
6967 | pand %xmm12,%xmm2 | ||
6968 | |||
6969 | # qhasm: xmm12 ^= xmm10 | ||
6970 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
6971 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
6972 | pxor %xmm10,%xmm12 | ||
6973 | |||
6974 | # qhasm: xmm12 &= xmm1 | ||
6975 | # asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13 | ||
6976 | # asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12 | ||
6977 | pand %xmm4,%xmm12 | ||
6978 | |||
6979 | # qhasm: xmm10 &= xmm5 | ||
6980 | # asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11 | ||
6981 | # asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10 | ||
6982 | pand %xmm7,%xmm10 | ||
6983 | |||
6984 | # qhasm: xmm12 ^= xmm10 | ||
6985 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
6986 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
6987 | pxor %xmm10,%xmm12 | ||
6988 | |||
6989 | # qhasm: xmm10 ^= xmm2 | ||
6990 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11 | ||
6991 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10 | ||
6992 | pxor %xmm2,%xmm10 | ||
6993 | |||
6994 | # qhasm: xmm7 ^= xmm5 | ||
6995 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
6996 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
6997 | pxor %xmm7,%xmm5 | ||
6998 | |||
6999 | # qhasm: xmm6 ^= xmm1 | ||
7000 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
7001 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
7002 | pxor %xmm4,%xmm3 | ||
7003 | |||
7004 | # qhasm: xmm3 = xmm7 | ||
7005 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
7006 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
7007 | movdqa %xmm5,%xmm2 | ||
7008 | |||
7009 | # qhasm: xmm3 ^= xmm6 | ||
7010 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
7011 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
7012 | pxor %xmm3,%xmm2 | ||
7013 | |||
7014 | # qhasm: xmm3 &= xmm15 | ||
7015 | # asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3 | ||
7016 | # asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2 | ||
7017 | pand %xmm15,%xmm2 | ||
7018 | |||
7019 | # qhasm: xmm15 ^= xmm9 | ||
7020 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
7021 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
7022 | pxor %xmm9,%xmm15 | ||
7023 | |||
7024 | # qhasm: xmm15 &= xmm6 | ||
7025 | # asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16 | ||
7026 | # asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15 | ||
7027 | pand %xmm3,%xmm15 | ||
7028 | |||
7029 | # qhasm: xmm9 &= xmm7 | ||
7030 | # asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10 | ||
7031 | # asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9 | ||
7032 | pand %xmm5,%xmm9 | ||
7033 | |||
7034 | # qhasm: xmm15 ^= xmm9 | ||
7035 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
7036 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
7037 | pxor %xmm9,%xmm15 | ||
7038 | |||
7039 | # qhasm: xmm9 ^= xmm3 | ||
7040 | # asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10 | ||
7041 | # asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9 | ||
7042 | pxor %xmm2,%xmm9 | ||
7043 | |||
7044 | # qhasm: xmm15 ^= xmm4 | ||
7045 | # asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16 | ||
7046 | # asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15 | ||
7047 | pxor %xmm0,%xmm15 | ||
7048 | |||
7049 | # qhasm: xmm12 ^= xmm4 | ||
7050 | # asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13 | ||
7051 | # asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12 | ||
7052 | pxor %xmm0,%xmm12 | ||
7053 | |||
7054 | # qhasm: xmm9 ^= xmm0 | ||
7055 | # asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10 | ||
7056 | # asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9 | ||
7057 | pxor %xmm1,%xmm9 | ||
7058 | |||
7059 | # qhasm: xmm10 ^= xmm0 | ||
7060 | # asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11 | ||
7061 | # asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10 | ||
7062 | pxor %xmm1,%xmm10 | ||
7063 | |||
7064 | # qhasm: xmm15 ^= xmm8 | ||
7065 | # asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16 | ||
7066 | # asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15 | ||
7067 | pxor %xmm8,%xmm15 | ||
7068 | |||
7069 | # qhasm: xmm9 ^= xmm14 | ||
7070 | # asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10 | ||
7071 | # asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9 | ||
7072 | pxor %xmm14,%xmm9 | ||
7073 | |||
7074 | # qhasm: xmm12 ^= xmm15 | ||
7075 | # asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13 | ||
7076 | # asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12 | ||
7077 | pxor %xmm15,%xmm12 | ||
7078 | |||
7079 | # qhasm: xmm14 ^= xmm8 | ||
7080 | # asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15 | ||
7081 | # asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14 | ||
7082 | pxor %xmm8,%xmm14 | ||
7083 | |||
7084 | # qhasm: xmm8 ^= xmm9 | ||
7085 | # asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9 | ||
7086 | # asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8 | ||
7087 | pxor %xmm9,%xmm8 | ||
7088 | |||
7089 | # qhasm: xmm9 ^= xmm13 | ||
7090 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
7091 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
7092 | pxor %xmm13,%xmm9 | ||
7093 | |||
7094 | # qhasm: xmm13 ^= xmm10 | ||
7095 | # asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14 | ||
7096 | # asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13 | ||
7097 | pxor %xmm10,%xmm13 | ||
7098 | |||
7099 | # qhasm: xmm12 ^= xmm13 | ||
7100 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
7101 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
7102 | pxor %xmm13,%xmm12 | ||
7103 | |||
7104 | # qhasm: xmm10 ^= xmm11 | ||
7105 | # asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11 | ||
7106 | # asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10 | ||
7107 | pxor %xmm11,%xmm10 | ||
7108 | |||
7109 | # qhasm: xmm11 ^= xmm13 | ||
7110 | # asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12 | ||
7111 | # asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11 | ||
7112 | pxor %xmm13,%xmm11 | ||
7113 | |||
7114 | # qhasm: xmm14 ^= xmm11 | ||
7115 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
7116 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
7117 | pxor %xmm11,%xmm14 | ||
7118 | |||
7119 | # qhasm: xmm0 = shuffle dwords of xmm8 by 0x93 | ||
7120 | # asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1 | ||
7121 | # asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0 | ||
7122 | pshufd $0x93,%xmm8,%xmm0 | ||
7123 | |||
7124 | # qhasm: xmm1 = shuffle dwords of xmm9 by 0x93 | ||
7125 | # asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2 | ||
7126 | # asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1 | ||
7127 | pshufd $0x93,%xmm9,%xmm1 | ||
7128 | |||
7129 | # qhasm: xmm2 = shuffle dwords of xmm12 by 0x93 | ||
7130 | # asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3 | ||
7131 | # asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2 | ||
7132 | pshufd $0x93,%xmm12,%xmm2 | ||
7133 | |||
7134 | # qhasm: xmm3 = shuffle dwords of xmm14 by 0x93 | ||
7135 | # asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4 | ||
7136 | # asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3 | ||
7137 | pshufd $0x93,%xmm14,%xmm3 | ||
7138 | |||
7139 | # qhasm: xmm4 = shuffle dwords of xmm11 by 0x93 | ||
7140 | # asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5 | ||
7141 | # asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4 | ||
7142 | pshufd $0x93,%xmm11,%xmm4 | ||
7143 | |||
7144 | # qhasm: xmm5 = shuffle dwords of xmm15 by 0x93 | ||
7145 | # asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6 | ||
7146 | # asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5 | ||
7147 | pshufd $0x93,%xmm15,%xmm5 | ||
7148 | |||
7149 | # qhasm: xmm6 = shuffle dwords of xmm10 by 0x93 | ||
7150 | # asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7 | ||
7151 | # asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6 | ||
7152 | pshufd $0x93,%xmm10,%xmm6 | ||
7153 | |||
7154 | # qhasm: xmm7 = shuffle dwords of xmm13 by 0x93 | ||
7155 | # asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8 | ||
7156 | # asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7 | ||
7157 | pshufd $0x93,%xmm13,%xmm7 | ||
7158 | |||
7159 | # qhasm: xmm8 ^= xmm0 | ||
7160 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
7161 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
7162 | pxor %xmm0,%xmm8 | ||
7163 | |||
7164 | # qhasm: xmm9 ^= xmm1 | ||
7165 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
7166 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
7167 | pxor %xmm1,%xmm9 | ||
7168 | |||
7169 | # qhasm: xmm12 ^= xmm2 | ||
7170 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13 | ||
7171 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12 | ||
7172 | pxor %xmm2,%xmm12 | ||
7173 | |||
7174 | # qhasm: xmm14 ^= xmm3 | ||
7175 | # asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15 | ||
7176 | # asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14 | ||
7177 | pxor %xmm3,%xmm14 | ||
7178 | |||
7179 | # qhasm: xmm11 ^= xmm4 | ||
7180 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
7181 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
7182 | pxor %xmm4,%xmm11 | ||
7183 | |||
7184 | # qhasm: xmm15 ^= xmm5 | ||
7185 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
7186 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
7187 | pxor %xmm5,%xmm15 | ||
7188 | |||
7189 | # qhasm: xmm10 ^= xmm6 | ||
7190 | # asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11 | ||
7191 | # asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10 | ||
7192 | pxor %xmm6,%xmm10 | ||
7193 | |||
7194 | # qhasm: xmm13 ^= xmm7 | ||
7195 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
7196 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
7197 | pxor %xmm7,%xmm13 | ||
7198 | |||
7199 | # qhasm: xmm0 ^= xmm13 | ||
7200 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
7201 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
7202 | pxor %xmm13,%xmm0 | ||
7203 | |||
7204 | # qhasm: xmm1 ^= xmm8 | ||
7205 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
7206 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
7207 | pxor %xmm8,%xmm1 | ||
7208 | |||
7209 | # qhasm: xmm2 ^= xmm9 | ||
7210 | # asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3 | ||
7211 | # asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2 | ||
7212 | pxor %xmm9,%xmm2 | ||
7213 | |||
7214 | # qhasm: xmm1 ^= xmm13 | ||
7215 | # asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2 | ||
7216 | # asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1 | ||
7217 | pxor %xmm13,%xmm1 | ||
7218 | |||
7219 | # qhasm: xmm3 ^= xmm12 | ||
7220 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
7221 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
7222 | pxor %xmm12,%xmm3 | ||
7223 | |||
7224 | # qhasm: xmm4 ^= xmm14 | ||
7225 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
7226 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
7227 | pxor %xmm14,%xmm4 | ||
7228 | |||
7229 | # qhasm: xmm5 ^= xmm11 | ||
7230 | # asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6 | ||
7231 | # asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5 | ||
7232 | pxor %xmm11,%xmm5 | ||
7233 | |||
7234 | # qhasm: xmm3 ^= xmm13 | ||
7235 | # asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4 | ||
7236 | # asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3 | ||
7237 | pxor %xmm13,%xmm3 | ||
7238 | |||
7239 | # qhasm: xmm6 ^= xmm15 | ||
7240 | # asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7 | ||
7241 | # asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6 | ||
7242 | pxor %xmm15,%xmm6 | ||
7243 | |||
7244 | # qhasm: xmm7 ^= xmm10 | ||
7245 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
7246 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
7247 | pxor %xmm10,%xmm7 | ||
7248 | |||
7249 | # qhasm: xmm4 ^= xmm13 | ||
7250 | # asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5 | ||
7251 | # asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4 | ||
7252 | pxor %xmm13,%xmm4 | ||
7253 | |||
7254 | # qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E | ||
7255 | # asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9 | ||
7256 | # asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8 | ||
7257 | pshufd $0x4E,%xmm8,%xmm8 | ||
7258 | |||
7259 | # qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E | ||
7260 | # asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10 | ||
7261 | # asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9 | ||
7262 | pshufd $0x4E,%xmm9,%xmm9 | ||
7263 | |||
7264 | # qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E | ||
7265 | # asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13 | ||
7266 | # asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12 | ||
7267 | pshufd $0x4E,%xmm12,%xmm12 | ||
7268 | |||
7269 | # qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E | ||
7270 | # asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15 | ||
7271 | # asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14 | ||
7272 | pshufd $0x4E,%xmm14,%xmm14 | ||
7273 | |||
7274 | # qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E | ||
7275 | # asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12 | ||
7276 | # asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11 | ||
7277 | pshufd $0x4E,%xmm11,%xmm11 | ||
7278 | |||
7279 | # qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E | ||
7280 | # asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16 | ||
7281 | # asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15 | ||
7282 | pshufd $0x4E,%xmm15,%xmm15 | ||
7283 | |||
7284 | # qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E | ||
7285 | # asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11 | ||
7286 | # asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10 | ||
7287 | pshufd $0x4E,%xmm10,%xmm10 | ||
7288 | |||
7289 | # qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E | ||
7290 | # asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14 | ||
7291 | # asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13 | ||
7292 | pshufd $0x4E,%xmm13,%xmm13 | ||
7293 | |||
7294 | # qhasm: xmm0 ^= xmm8 | ||
7295 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
7296 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
7297 | pxor %xmm8,%xmm0 | ||
7298 | |||
7299 | # qhasm: xmm1 ^= xmm9 | ||
7300 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
7301 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
7302 | pxor %xmm9,%xmm1 | ||
7303 | |||
7304 | # qhasm: xmm2 ^= xmm12 | ||
7305 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
7306 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
7307 | pxor %xmm12,%xmm2 | ||
7308 | |||
7309 | # qhasm: xmm3 ^= xmm14 | ||
7310 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
7311 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
7312 | pxor %xmm14,%xmm3 | ||
7313 | |||
7314 | # qhasm: xmm4 ^= xmm11 | ||
7315 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
7316 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
7317 | pxor %xmm11,%xmm4 | ||
7318 | |||
7319 | # qhasm: xmm5 ^= xmm15 | ||
7320 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
7321 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
7322 | pxor %xmm15,%xmm5 | ||
7323 | |||
7324 | # qhasm: xmm6 ^= xmm10 | ||
7325 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
7326 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
7327 | pxor %xmm10,%xmm6 | ||
7328 | |||
7329 | # qhasm: xmm7 ^= xmm13 | ||
7330 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
7331 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
7332 | pxor %xmm13,%xmm7 | ||
7333 | |||
7334 | # qhasm: xmm0 ^= *(int128 *)(c + 768) | ||
7335 | # asm 1: pxor 768(<c=int64#5),<xmm0=int6464#1 | ||
7336 | # asm 2: pxor 768(<c=%r8),<xmm0=%xmm0 | ||
7337 | pxor 768(%r8),%xmm0 | ||
7338 | |||
7339 | # qhasm: shuffle bytes of xmm0 by SR | ||
7340 | # asm 1: pshufb SR,<xmm0=int6464#1 | ||
7341 | # asm 2: pshufb SR,<xmm0=%xmm0 | ||
7342 | pshufb SR,%xmm0 | ||
7343 | |||
7344 | # qhasm: xmm1 ^= *(int128 *)(c + 784) | ||
7345 | # asm 1: pxor 784(<c=int64#5),<xmm1=int6464#2 | ||
7346 | # asm 2: pxor 784(<c=%r8),<xmm1=%xmm1 | ||
7347 | pxor 784(%r8),%xmm1 | ||
7348 | |||
7349 | # qhasm: shuffle bytes of xmm1 by SR | ||
7350 | # asm 1: pshufb SR,<xmm1=int6464#2 | ||
7351 | # asm 2: pshufb SR,<xmm1=%xmm1 | ||
7352 | pshufb SR,%xmm1 | ||
7353 | |||
7354 | # qhasm: xmm2 ^= *(int128 *)(c + 800) | ||
7355 | # asm 1: pxor 800(<c=int64#5),<xmm2=int6464#3 | ||
7356 | # asm 2: pxor 800(<c=%r8),<xmm2=%xmm2 | ||
7357 | pxor 800(%r8),%xmm2 | ||
7358 | |||
7359 | # qhasm: shuffle bytes of xmm2 by SR | ||
7360 | # asm 1: pshufb SR,<xmm2=int6464#3 | ||
7361 | # asm 2: pshufb SR,<xmm2=%xmm2 | ||
7362 | pshufb SR,%xmm2 | ||
7363 | |||
7364 | # qhasm: xmm3 ^= *(int128 *)(c + 816) | ||
7365 | # asm 1: pxor 816(<c=int64#5),<xmm3=int6464#4 | ||
7366 | # asm 2: pxor 816(<c=%r8),<xmm3=%xmm3 | ||
7367 | pxor 816(%r8),%xmm3 | ||
7368 | |||
7369 | # qhasm: shuffle bytes of xmm3 by SR | ||
7370 | # asm 1: pshufb SR,<xmm3=int6464#4 | ||
7371 | # asm 2: pshufb SR,<xmm3=%xmm3 | ||
7372 | pshufb SR,%xmm3 | ||
7373 | |||
7374 | # qhasm: xmm4 ^= *(int128 *)(c + 832) | ||
7375 | # asm 1: pxor 832(<c=int64#5),<xmm4=int6464#5 | ||
7376 | # asm 2: pxor 832(<c=%r8),<xmm4=%xmm4 | ||
7377 | pxor 832(%r8),%xmm4 | ||
7378 | |||
7379 | # qhasm: shuffle bytes of xmm4 by SR | ||
7380 | # asm 1: pshufb SR,<xmm4=int6464#5 | ||
7381 | # asm 2: pshufb SR,<xmm4=%xmm4 | ||
7382 | pshufb SR,%xmm4 | ||
7383 | |||
7384 | # qhasm: xmm5 ^= *(int128 *)(c + 848) | ||
7385 | # asm 1: pxor 848(<c=int64#5),<xmm5=int6464#6 | ||
7386 | # asm 2: pxor 848(<c=%r8),<xmm5=%xmm5 | ||
7387 | pxor 848(%r8),%xmm5 | ||
7388 | |||
7389 | # qhasm: shuffle bytes of xmm5 by SR | ||
7390 | # asm 1: pshufb SR,<xmm5=int6464#6 | ||
7391 | # asm 2: pshufb SR,<xmm5=%xmm5 | ||
7392 | pshufb SR,%xmm5 | ||
7393 | |||
7394 | # qhasm: xmm6 ^= *(int128 *)(c + 864) | ||
7395 | # asm 1: pxor 864(<c=int64#5),<xmm6=int6464#7 | ||
7396 | # asm 2: pxor 864(<c=%r8),<xmm6=%xmm6 | ||
7397 | pxor 864(%r8),%xmm6 | ||
7398 | |||
7399 | # qhasm: shuffle bytes of xmm6 by SR | ||
7400 | # asm 1: pshufb SR,<xmm6=int6464#7 | ||
7401 | # asm 2: pshufb SR,<xmm6=%xmm6 | ||
7402 | pshufb SR,%xmm6 | ||
7403 | |||
7404 | # qhasm: xmm7 ^= *(int128 *)(c + 880) | ||
7405 | # asm 1: pxor 880(<c=int64#5),<xmm7=int6464#8 | ||
7406 | # asm 2: pxor 880(<c=%r8),<xmm7=%xmm7 | ||
7407 | pxor 880(%r8),%xmm7 | ||
7408 | |||
7409 | # qhasm: shuffle bytes of xmm7 by SR | ||
7410 | # asm 1: pshufb SR,<xmm7=int6464#8 | ||
7411 | # asm 2: pshufb SR,<xmm7=%xmm7 | ||
7412 | pshufb SR,%xmm7 | ||
7413 | |||
7414 | # qhasm: xmm5 ^= xmm6 | ||
7415 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
7416 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
7417 | pxor %xmm6,%xmm5 | ||
7418 | |||
7419 | # qhasm: xmm2 ^= xmm1 | ||
7420 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
7421 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
7422 | pxor %xmm1,%xmm2 | ||
7423 | |||
7424 | # qhasm: xmm5 ^= xmm0 | ||
7425 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
7426 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
7427 | pxor %xmm0,%xmm5 | ||
7428 | |||
7429 | # qhasm: xmm6 ^= xmm2 | ||
7430 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
7431 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
7432 | pxor %xmm2,%xmm6 | ||
7433 | |||
7434 | # qhasm: xmm3 ^= xmm0 | ||
7435 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
7436 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
7437 | pxor %xmm0,%xmm3 | ||
7438 | |||
7439 | # qhasm: xmm6 ^= xmm3 | ||
7440 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
7441 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
7442 | pxor %xmm3,%xmm6 | ||
7443 | |||
7444 | # qhasm: xmm3 ^= xmm7 | ||
7445 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
7446 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
7447 | pxor %xmm7,%xmm3 | ||
7448 | |||
7449 | # qhasm: xmm3 ^= xmm4 | ||
7450 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
7451 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
7452 | pxor %xmm4,%xmm3 | ||
7453 | |||
7454 | # qhasm: xmm7 ^= xmm5 | ||
7455 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
7456 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
7457 | pxor %xmm5,%xmm7 | ||
7458 | |||
7459 | # qhasm: xmm3 ^= xmm1 | ||
7460 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
7461 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
7462 | pxor %xmm1,%xmm3 | ||
7463 | |||
7464 | # qhasm: xmm4 ^= xmm5 | ||
7465 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
7466 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
7467 | pxor %xmm5,%xmm4 | ||
7468 | |||
7469 | # qhasm: xmm2 ^= xmm7 | ||
7470 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
7471 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
7472 | pxor %xmm7,%xmm2 | ||
7473 | |||
7474 | # qhasm: xmm1 ^= xmm5 | ||
7475 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
7476 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
7477 | pxor %xmm5,%xmm1 | ||
7478 | |||
7479 | # qhasm: xmm11 = xmm7 | ||
7480 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
7481 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
7482 | movdqa %xmm7,%xmm8 | ||
7483 | |||
7484 | # qhasm: xmm10 = xmm1 | ||
7485 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
7486 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
7487 | movdqa %xmm1,%xmm9 | ||
7488 | |||
7489 | # qhasm: xmm9 = xmm5 | ||
7490 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
7491 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
7492 | movdqa %xmm5,%xmm10 | ||
7493 | |||
7494 | # qhasm: xmm13 = xmm2 | ||
7495 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
7496 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
7497 | movdqa %xmm2,%xmm11 | ||
7498 | |||
7499 | # qhasm: xmm12 = xmm6 | ||
7500 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
7501 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
7502 | movdqa %xmm6,%xmm12 | ||
7503 | |||
7504 | # qhasm: xmm11 ^= xmm4 | ||
7505 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
7506 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
7507 | pxor %xmm4,%xmm8 | ||
7508 | |||
7509 | # qhasm: xmm10 ^= xmm2 | ||
7510 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
7511 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
7512 | pxor %xmm2,%xmm9 | ||
7513 | |||
7514 | # qhasm: xmm9 ^= xmm3 | ||
7515 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
7516 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
7517 | pxor %xmm3,%xmm10 | ||
7518 | |||
7519 | # qhasm: xmm13 ^= xmm4 | ||
7520 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
7521 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
7522 | pxor %xmm4,%xmm11 | ||
7523 | |||
7524 | # qhasm: xmm12 ^= xmm0 | ||
7525 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
7526 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
7527 | pxor %xmm0,%xmm12 | ||
7528 | |||
7529 | # qhasm: xmm14 = xmm11 | ||
7530 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
7531 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
7532 | movdqa %xmm8,%xmm13 | ||
7533 | |||
7534 | # qhasm: xmm8 = xmm10 | ||
7535 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
7536 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
7537 | movdqa %xmm9,%xmm14 | ||
7538 | |||
7539 | # qhasm: xmm15 = xmm11 | ||
7540 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
7541 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
7542 | movdqa %xmm8,%xmm15 | ||
7543 | |||
7544 | # qhasm: xmm10 |= xmm9 | ||
7545 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
7546 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
7547 | por %xmm10,%xmm9 | ||
7548 | |||
7549 | # qhasm: xmm11 |= xmm12 | ||
7550 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
7551 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
7552 | por %xmm12,%xmm8 | ||
7553 | |||
7554 | # qhasm: xmm15 ^= xmm8 | ||
7555 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
7556 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
7557 | pxor %xmm14,%xmm15 | ||
7558 | |||
7559 | # qhasm: xmm14 &= xmm12 | ||
7560 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
7561 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
7562 | pand %xmm12,%xmm13 | ||
7563 | |||
7564 | # qhasm: xmm8 &= xmm9 | ||
7565 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
7566 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
7567 | pand %xmm10,%xmm14 | ||
7568 | |||
7569 | # qhasm: xmm12 ^= xmm9 | ||
7570 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
7571 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
7572 | pxor %xmm10,%xmm12 | ||
7573 | |||
7574 | # qhasm: xmm15 &= xmm12 | ||
7575 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
7576 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
7577 | pand %xmm12,%xmm15 | ||
7578 | |||
7579 | # qhasm: xmm12 = xmm3 | ||
7580 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
7581 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
7582 | movdqa %xmm3,%xmm10 | ||
7583 | |||
7584 | # qhasm: xmm12 ^= xmm0 | ||
7585 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
7586 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
7587 | pxor %xmm0,%xmm10 | ||
7588 | |||
7589 | # qhasm: xmm13 &= xmm12 | ||
7590 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
7591 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
7592 | pand %xmm10,%xmm11 | ||
7593 | |||
7594 | # qhasm: xmm11 ^= xmm13 | ||
7595 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
7596 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
7597 | pxor %xmm11,%xmm8 | ||
7598 | |||
7599 | # qhasm: xmm10 ^= xmm13 | ||
7600 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
7601 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
7602 | pxor %xmm11,%xmm9 | ||
7603 | |||
7604 | # qhasm: xmm13 = xmm7 | ||
7605 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
7606 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
7607 | movdqa %xmm7,%xmm10 | ||
7608 | |||
7609 | # qhasm: xmm13 ^= xmm1 | ||
7610 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
7611 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
7612 | pxor %xmm1,%xmm10 | ||
7613 | |||
7614 | # qhasm: xmm12 = xmm5 | ||
7615 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
7616 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
7617 | movdqa %xmm5,%xmm11 | ||
7618 | |||
7619 | # qhasm: xmm9 = xmm13 | ||
7620 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
7621 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
7622 | movdqa %xmm10,%xmm12 | ||
7623 | |||
7624 | # qhasm: xmm12 ^= xmm6 | ||
7625 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
7626 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
7627 | pxor %xmm6,%xmm11 | ||
7628 | |||
7629 | # qhasm: xmm9 |= xmm12 | ||
7630 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
7631 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
7632 | por %xmm11,%xmm12 | ||
7633 | |||
7634 | # qhasm: xmm13 &= xmm12 | ||
7635 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
7636 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
7637 | pand %xmm11,%xmm10 | ||
7638 | |||
7639 | # qhasm: xmm8 ^= xmm13 | ||
7640 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
7641 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
7642 | pxor %xmm10,%xmm14 | ||
7643 | |||
7644 | # qhasm: xmm11 ^= xmm15 | ||
7645 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
7646 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
7647 | pxor %xmm15,%xmm8 | ||
7648 | |||
7649 | # qhasm: xmm10 ^= xmm14 | ||
7650 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
7651 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
7652 | pxor %xmm13,%xmm9 | ||
7653 | |||
7654 | # qhasm: xmm9 ^= xmm15 | ||
7655 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
7656 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
7657 | pxor %xmm15,%xmm12 | ||
7658 | |||
7659 | # qhasm: xmm8 ^= xmm14 | ||
7660 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
7661 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
7662 | pxor %xmm13,%xmm14 | ||
7663 | |||
7664 | # qhasm: xmm9 ^= xmm14 | ||
7665 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
7666 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
7667 | pxor %xmm13,%xmm12 | ||
7668 | |||
7669 | # qhasm: xmm12 = xmm2 | ||
7670 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
7671 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
7672 | movdqa %xmm2,%xmm10 | ||
7673 | |||
7674 | # qhasm: xmm13 = xmm4 | ||
7675 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
7676 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
7677 | movdqa %xmm4,%xmm11 | ||
7678 | |||
7679 | # qhasm: xmm14 = xmm1 | ||
7680 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
7681 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
7682 | movdqa %xmm1,%xmm13 | ||
7683 | |||
7684 | # qhasm: xmm15 = xmm7 | ||
7685 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
7686 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
7687 | movdqa %xmm7,%xmm15 | ||
7688 | |||
7689 | # qhasm: xmm12 &= xmm3 | ||
7690 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
7691 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
7692 | pand %xmm3,%xmm10 | ||
7693 | |||
7694 | # qhasm: xmm13 &= xmm0 | ||
7695 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
7696 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
7697 | pand %xmm0,%xmm11 | ||
7698 | |||
7699 | # qhasm: xmm14 &= xmm5 | ||
7700 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
7701 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
7702 | pand %xmm5,%xmm13 | ||
7703 | |||
7704 | # qhasm: xmm15 |= xmm6 | ||
7705 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
7706 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
7707 | por %xmm6,%xmm15 | ||
7708 | |||
7709 | # qhasm: xmm11 ^= xmm12 | ||
7710 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
7711 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
7712 | pxor %xmm10,%xmm8 | ||
7713 | |||
7714 | # qhasm: xmm10 ^= xmm13 | ||
7715 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
7716 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
7717 | pxor %xmm11,%xmm9 | ||
7718 | |||
7719 | # qhasm: xmm9 ^= xmm14 | ||
7720 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
7721 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
7722 | pxor %xmm13,%xmm12 | ||
7723 | |||
7724 | # qhasm: xmm8 ^= xmm15 | ||
7725 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
7726 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
7727 | pxor %xmm15,%xmm14 | ||
7728 | |||
7729 | # qhasm: xmm12 = xmm11 | ||
7730 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
7731 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
7732 | movdqa %xmm8,%xmm10 | ||
7733 | |||
7734 | # qhasm: xmm12 ^= xmm10 | ||
7735 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
7736 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
7737 | pxor %xmm9,%xmm10 | ||
7738 | |||
7739 | # qhasm: xmm11 &= xmm9 | ||
7740 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
7741 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
7742 | pand %xmm12,%xmm8 | ||
7743 | |||
7744 | # qhasm: xmm14 = xmm8 | ||
7745 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
7746 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
7747 | movdqa %xmm14,%xmm11 | ||
7748 | |||
7749 | # qhasm: xmm14 ^= xmm11 | ||
7750 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
7751 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
7752 | pxor %xmm8,%xmm11 | ||
7753 | |||
7754 | # qhasm: xmm15 = xmm12 | ||
7755 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
7756 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
7757 | movdqa %xmm10,%xmm13 | ||
7758 | |||
7759 | # qhasm: xmm15 &= xmm14 | ||
7760 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
7761 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
7762 | pand %xmm11,%xmm13 | ||
7763 | |||
7764 | # qhasm: xmm15 ^= xmm10 | ||
7765 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
7766 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
7767 | pxor %xmm9,%xmm13 | ||
7768 | |||
7769 | # qhasm: xmm13 = xmm9 | ||
7770 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
7771 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
7772 | movdqa %xmm12,%xmm15 | ||
7773 | |||
7774 | # qhasm: xmm13 ^= xmm8 | ||
7775 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
7776 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
7777 | pxor %xmm14,%xmm15 | ||
7778 | |||
7779 | # qhasm: xmm11 ^= xmm10 | ||
7780 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
7781 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
7782 | pxor %xmm9,%xmm8 | ||
7783 | |||
7784 | # qhasm: xmm13 &= xmm11 | ||
7785 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
7786 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
7787 | pand %xmm8,%xmm15 | ||
7788 | |||
7789 | # qhasm: xmm13 ^= xmm8 | ||
7790 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
7791 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
7792 | pxor %xmm14,%xmm15 | ||
7793 | |||
7794 | # qhasm: xmm9 ^= xmm13 | ||
7795 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
7796 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
7797 | pxor %xmm15,%xmm12 | ||
7798 | |||
7799 | # qhasm: xmm10 = xmm14 | ||
7800 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
7801 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
7802 | movdqa %xmm11,%xmm8 | ||
7803 | |||
7804 | # qhasm: xmm10 ^= xmm13 | ||
7805 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
7806 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
7807 | pxor %xmm15,%xmm8 | ||
7808 | |||
7809 | # qhasm: xmm10 &= xmm8 | ||
7810 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
7811 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
7812 | pand %xmm14,%xmm8 | ||
7813 | |||
7814 | # qhasm: xmm9 ^= xmm10 | ||
7815 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
7816 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
7817 | pxor %xmm8,%xmm12 | ||
7818 | |||
7819 | # qhasm: xmm14 ^= xmm10 | ||
7820 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
7821 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
7822 | pxor %xmm8,%xmm11 | ||
7823 | |||
7824 | # qhasm: xmm14 &= xmm15 | ||
7825 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
7826 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
7827 | pand %xmm13,%xmm11 | ||
7828 | |||
7829 | # qhasm: xmm14 ^= xmm12 | ||
7830 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
7831 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
7832 | pxor %xmm10,%xmm11 | ||
7833 | |||
7834 | # qhasm: xmm12 = xmm6 | ||
7835 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
7836 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
7837 | movdqa %xmm6,%xmm8 | ||
7838 | |||
7839 | # qhasm: xmm8 = xmm5 | ||
7840 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
7841 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
7842 | movdqa %xmm5,%xmm9 | ||
7843 | |||
7844 | # qhasm: xmm10 = xmm15 | ||
7845 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
7846 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
7847 | movdqa %xmm13,%xmm10 | ||
7848 | |||
7849 | # qhasm: xmm10 ^= xmm14 | ||
7850 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
7851 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
7852 | pxor %xmm11,%xmm10 | ||
7853 | |||
7854 | # qhasm: xmm10 &= xmm6 | ||
7855 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
7856 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
7857 | pand %xmm6,%xmm10 | ||
7858 | |||
7859 | # qhasm: xmm6 ^= xmm5 | ||
7860 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
7861 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
7862 | pxor %xmm5,%xmm6 | ||
7863 | |||
7864 | # qhasm: xmm6 &= xmm14 | ||
7865 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
7866 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
7867 | pand %xmm11,%xmm6 | ||
7868 | |||
7869 | # qhasm: xmm5 &= xmm15 | ||
7870 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
7871 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
7872 | pand %xmm13,%xmm5 | ||
7873 | |||
7874 | # qhasm: xmm6 ^= xmm5 | ||
7875 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
7876 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
7877 | pxor %xmm5,%xmm6 | ||
7878 | |||
7879 | # qhasm: xmm5 ^= xmm10 | ||
7880 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
7881 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
7882 | pxor %xmm10,%xmm5 | ||
7883 | |||
7884 | # qhasm: xmm12 ^= xmm0 | ||
7885 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
7886 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
7887 | pxor %xmm0,%xmm8 | ||
7888 | |||
7889 | # qhasm: xmm8 ^= xmm3 | ||
7890 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
7891 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
7892 | pxor %xmm3,%xmm9 | ||
7893 | |||
7894 | # qhasm: xmm15 ^= xmm13 | ||
7895 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
7896 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
7897 | pxor %xmm15,%xmm13 | ||
7898 | |||
7899 | # qhasm: xmm14 ^= xmm9 | ||
7900 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
7901 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
7902 | pxor %xmm12,%xmm11 | ||
7903 | |||
7904 | # qhasm: xmm11 = xmm15 | ||
7905 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
7906 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
7907 | movdqa %xmm13,%xmm10 | ||
7908 | |||
7909 | # qhasm: xmm11 ^= xmm14 | ||
7910 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
7911 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
7912 | pxor %xmm11,%xmm10 | ||
7913 | |||
7914 | # qhasm: xmm11 &= xmm12 | ||
7915 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
7916 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
7917 | pand %xmm8,%xmm10 | ||
7918 | |||
7919 | # qhasm: xmm12 ^= xmm8 | ||
7920 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
7921 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
7922 | pxor %xmm9,%xmm8 | ||
7923 | |||
7924 | # qhasm: xmm12 &= xmm14 | ||
7925 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
7926 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
7927 | pand %xmm11,%xmm8 | ||
7928 | |||
7929 | # qhasm: xmm8 &= xmm15 | ||
7930 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
7931 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
7932 | pand %xmm13,%xmm9 | ||
7933 | |||
7934 | # qhasm: xmm8 ^= xmm12 | ||
7935 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
7936 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
7937 | pxor %xmm8,%xmm9 | ||
7938 | |||
7939 | # qhasm: xmm12 ^= xmm11 | ||
7940 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
7941 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
7942 | pxor %xmm10,%xmm8 | ||
7943 | |||
7944 | # qhasm: xmm10 = xmm13 | ||
7945 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
7946 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
7947 | movdqa %xmm15,%xmm10 | ||
7948 | |||
7949 | # qhasm: xmm10 ^= xmm9 | ||
7950 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
7951 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
7952 | pxor %xmm12,%xmm10 | ||
7953 | |||
7954 | # qhasm: xmm10 &= xmm0 | ||
7955 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
7956 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
7957 | pand %xmm0,%xmm10 | ||
7958 | |||
7959 | # qhasm: xmm0 ^= xmm3 | ||
7960 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
7961 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
7962 | pxor %xmm3,%xmm0 | ||
7963 | |||
7964 | # qhasm: xmm0 &= xmm9 | ||
7965 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
7966 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
7967 | pand %xmm12,%xmm0 | ||
7968 | |||
7969 | # qhasm: xmm3 &= xmm13 | ||
7970 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
7971 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
7972 | pand %xmm15,%xmm3 | ||
7973 | |||
7974 | # qhasm: xmm0 ^= xmm3 | ||
7975 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
7976 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
7977 | pxor %xmm3,%xmm0 | ||
7978 | |||
7979 | # qhasm: xmm3 ^= xmm10 | ||
7980 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
7981 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
7982 | pxor %xmm10,%xmm3 | ||
7983 | |||
7984 | # qhasm: xmm6 ^= xmm12 | ||
7985 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
7986 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
7987 | pxor %xmm8,%xmm6 | ||
7988 | |||
7989 | # qhasm: xmm0 ^= xmm12 | ||
7990 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
7991 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
7992 | pxor %xmm8,%xmm0 | ||
7993 | |||
7994 | # qhasm: xmm5 ^= xmm8 | ||
7995 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
7996 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
7997 | pxor %xmm9,%xmm5 | ||
7998 | |||
7999 | # qhasm: xmm3 ^= xmm8 | ||
8000 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
8001 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
8002 | pxor %xmm9,%xmm3 | ||
8003 | |||
8004 | # qhasm: xmm12 = xmm7 | ||
8005 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
8006 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
8007 | movdqa %xmm7,%xmm8 | ||
8008 | |||
8009 | # qhasm: xmm8 = xmm1 | ||
8010 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
8011 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
8012 | movdqa %xmm1,%xmm9 | ||
8013 | |||
8014 | # qhasm: xmm12 ^= xmm4 | ||
8015 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
8016 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
8017 | pxor %xmm4,%xmm8 | ||
8018 | |||
8019 | # qhasm: xmm8 ^= xmm2 | ||
8020 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
8021 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
8022 | pxor %xmm2,%xmm9 | ||
8023 | |||
8024 | # qhasm: xmm11 = xmm15 | ||
8025 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
8026 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
8027 | movdqa %xmm13,%xmm10 | ||
8028 | |||
8029 | # qhasm: xmm11 ^= xmm14 | ||
8030 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
8031 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
8032 | pxor %xmm11,%xmm10 | ||
8033 | |||
8034 | # qhasm: xmm11 &= xmm12 | ||
8035 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
8036 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
8037 | pand %xmm8,%xmm10 | ||
8038 | |||
8039 | # qhasm: xmm12 ^= xmm8 | ||
8040 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
8041 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
8042 | pxor %xmm9,%xmm8 | ||
8043 | |||
8044 | # qhasm: xmm12 &= xmm14 | ||
8045 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
8046 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
8047 | pand %xmm11,%xmm8 | ||
8048 | |||
8049 | # qhasm: xmm8 &= xmm15 | ||
8050 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
8051 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
8052 | pand %xmm13,%xmm9 | ||
8053 | |||
8054 | # qhasm: xmm8 ^= xmm12 | ||
8055 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
8056 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
8057 | pxor %xmm8,%xmm9 | ||
8058 | |||
8059 | # qhasm: xmm12 ^= xmm11 | ||
8060 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
8061 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
8062 | pxor %xmm10,%xmm8 | ||
8063 | |||
8064 | # qhasm: xmm10 = xmm13 | ||
8065 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
8066 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
8067 | movdqa %xmm15,%xmm10 | ||
8068 | |||
8069 | # qhasm: xmm10 ^= xmm9 | ||
8070 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
8071 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
8072 | pxor %xmm12,%xmm10 | ||
8073 | |||
8074 | # qhasm: xmm10 &= xmm4 | ||
8075 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
8076 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
8077 | pand %xmm4,%xmm10 | ||
8078 | |||
8079 | # qhasm: xmm4 ^= xmm2 | ||
8080 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
8081 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
8082 | pxor %xmm2,%xmm4 | ||
8083 | |||
8084 | # qhasm: xmm4 &= xmm9 | ||
8085 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
8086 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
8087 | pand %xmm12,%xmm4 | ||
8088 | |||
8089 | # qhasm: xmm2 &= xmm13 | ||
8090 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
8091 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
8092 | pand %xmm15,%xmm2 | ||
8093 | |||
8094 | # qhasm: xmm4 ^= xmm2 | ||
8095 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
8096 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
8097 | pxor %xmm2,%xmm4 | ||
8098 | |||
8099 | # qhasm: xmm2 ^= xmm10 | ||
8100 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
8101 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
8102 | pxor %xmm10,%xmm2 | ||
8103 | |||
8104 | # qhasm: xmm15 ^= xmm13 | ||
8105 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
8106 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
8107 | pxor %xmm15,%xmm13 | ||
8108 | |||
8109 | # qhasm: xmm14 ^= xmm9 | ||
8110 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
8111 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
8112 | pxor %xmm12,%xmm11 | ||
8113 | |||
8114 | # qhasm: xmm11 = xmm15 | ||
8115 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
8116 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
8117 | movdqa %xmm13,%xmm10 | ||
8118 | |||
8119 | # qhasm: xmm11 ^= xmm14 | ||
8120 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
8121 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
8122 | pxor %xmm11,%xmm10 | ||
8123 | |||
8124 | # qhasm: xmm11 &= xmm7 | ||
8125 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
8126 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
8127 | pand %xmm7,%xmm10 | ||
8128 | |||
8129 | # qhasm: xmm7 ^= xmm1 | ||
8130 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
8131 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
8132 | pxor %xmm1,%xmm7 | ||
8133 | |||
8134 | # qhasm: xmm7 &= xmm14 | ||
8135 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
8136 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
8137 | pand %xmm11,%xmm7 | ||
8138 | |||
8139 | # qhasm: xmm1 &= xmm15 | ||
8140 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
8141 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
8142 | pand %xmm13,%xmm1 | ||
8143 | |||
8144 | # qhasm: xmm7 ^= xmm1 | ||
8145 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
8146 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
8147 | pxor %xmm1,%xmm7 | ||
8148 | |||
8149 | # qhasm: xmm1 ^= xmm11 | ||
8150 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
8151 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
8152 | pxor %xmm10,%xmm1 | ||
8153 | |||
8154 | # qhasm: xmm7 ^= xmm12 | ||
8155 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
8156 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
8157 | pxor %xmm8,%xmm7 | ||
8158 | |||
8159 | # qhasm: xmm4 ^= xmm12 | ||
8160 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
8161 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
8162 | pxor %xmm8,%xmm4 | ||
8163 | |||
8164 | # qhasm: xmm1 ^= xmm8 | ||
8165 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
8166 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
8167 | pxor %xmm9,%xmm1 | ||
8168 | |||
8169 | # qhasm: xmm2 ^= xmm8 | ||
8170 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
8171 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
8172 | pxor %xmm9,%xmm2 | ||
8173 | |||
8174 | # qhasm: xmm7 ^= xmm0 | ||
8175 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
8176 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
8177 | pxor %xmm0,%xmm7 | ||
8178 | |||
8179 | # qhasm: xmm1 ^= xmm6 | ||
8180 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
8181 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
8182 | pxor %xmm6,%xmm1 | ||
8183 | |||
8184 | # qhasm: xmm4 ^= xmm7 | ||
8185 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
8186 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
8187 | pxor %xmm7,%xmm4 | ||
8188 | |||
8189 | # qhasm: xmm6 ^= xmm0 | ||
8190 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
8191 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
8192 | pxor %xmm0,%xmm6 | ||
8193 | |||
8194 | # qhasm: xmm0 ^= xmm1 | ||
8195 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
8196 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
8197 | pxor %xmm1,%xmm0 | ||
8198 | |||
8199 | # qhasm: xmm1 ^= xmm5 | ||
8200 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
8201 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
8202 | pxor %xmm5,%xmm1 | ||
8203 | |||
8204 | # qhasm: xmm5 ^= xmm2 | ||
8205 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
8206 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
8207 | pxor %xmm2,%xmm5 | ||
8208 | |||
8209 | # qhasm: xmm4 ^= xmm5 | ||
8210 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
8211 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
8212 | pxor %xmm5,%xmm4 | ||
8213 | |||
8214 | # qhasm: xmm2 ^= xmm3 | ||
8215 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
8216 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
8217 | pxor %xmm3,%xmm2 | ||
8218 | |||
8219 | # qhasm: xmm3 ^= xmm5 | ||
8220 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
8221 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
8222 | pxor %xmm5,%xmm3 | ||
8223 | |||
8224 | # qhasm: xmm6 ^= xmm3 | ||
8225 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
8226 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
8227 | pxor %xmm3,%xmm6 | ||
8228 | |||
8229 | # qhasm: xmm8 = shuffle dwords of xmm0 by 0x93 | ||
8230 | # asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9 | ||
8231 | # asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8 | ||
8232 | pshufd $0x93,%xmm0,%xmm8 | ||
8233 | |||
8234 | # qhasm: xmm9 = shuffle dwords of xmm1 by 0x93 | ||
8235 | # asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10 | ||
8236 | # asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9 | ||
8237 | pshufd $0x93,%xmm1,%xmm9 | ||
8238 | |||
8239 | # qhasm: xmm10 = shuffle dwords of xmm4 by 0x93 | ||
8240 | # asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11 | ||
8241 | # asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10 | ||
8242 | pshufd $0x93,%xmm4,%xmm10 | ||
8243 | |||
8244 | # qhasm: xmm11 = shuffle dwords of xmm6 by 0x93 | ||
8245 | # asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12 | ||
8246 | # asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11 | ||
8247 | pshufd $0x93,%xmm6,%xmm11 | ||
8248 | |||
8249 | # qhasm: xmm12 = shuffle dwords of xmm3 by 0x93 | ||
8250 | # asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13 | ||
8251 | # asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12 | ||
8252 | pshufd $0x93,%xmm3,%xmm12 | ||
8253 | |||
8254 | # qhasm: xmm13 = shuffle dwords of xmm7 by 0x93 | ||
8255 | # asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14 | ||
8256 | # asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13 | ||
8257 | pshufd $0x93,%xmm7,%xmm13 | ||
8258 | |||
8259 | # qhasm: xmm14 = shuffle dwords of xmm2 by 0x93 | ||
8260 | # asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15 | ||
8261 | # asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14 | ||
8262 | pshufd $0x93,%xmm2,%xmm14 | ||
8263 | |||
8264 | # qhasm: xmm15 = shuffle dwords of xmm5 by 0x93 | ||
8265 | # asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16 | ||
8266 | # asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15 | ||
8267 | pshufd $0x93,%xmm5,%xmm15 | ||
8268 | |||
8269 | # qhasm: xmm0 ^= xmm8 | ||
8270 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
8271 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
8272 | pxor %xmm8,%xmm0 | ||
8273 | |||
8274 | # qhasm: xmm1 ^= xmm9 | ||
8275 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
8276 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
8277 | pxor %xmm9,%xmm1 | ||
8278 | |||
8279 | # qhasm: xmm4 ^= xmm10 | ||
8280 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
8281 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
8282 | pxor %xmm10,%xmm4 | ||
8283 | |||
8284 | # qhasm: xmm6 ^= xmm11 | ||
8285 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
8286 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
8287 | pxor %xmm11,%xmm6 | ||
8288 | |||
8289 | # qhasm: xmm3 ^= xmm12 | ||
8290 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
8291 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
8292 | pxor %xmm12,%xmm3 | ||
8293 | |||
8294 | # qhasm: xmm7 ^= xmm13 | ||
8295 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
8296 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
8297 | pxor %xmm13,%xmm7 | ||
8298 | |||
8299 | # qhasm: xmm2 ^= xmm14 | ||
8300 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
8301 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
8302 | pxor %xmm14,%xmm2 | ||
8303 | |||
8304 | # qhasm: xmm5 ^= xmm15 | ||
8305 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
8306 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
8307 | pxor %xmm15,%xmm5 | ||
8308 | |||
8309 | # qhasm: xmm8 ^= xmm5 | ||
8310 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
8311 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
8312 | pxor %xmm5,%xmm8 | ||
8313 | |||
8314 | # qhasm: xmm9 ^= xmm0 | ||
8315 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
8316 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
8317 | pxor %xmm0,%xmm9 | ||
8318 | |||
8319 | # qhasm: xmm10 ^= xmm1 | ||
8320 | # asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11 | ||
8321 | # asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10 | ||
8322 | pxor %xmm1,%xmm10 | ||
8323 | |||
8324 | # qhasm: xmm9 ^= xmm5 | ||
8325 | # asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10 | ||
8326 | # asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9 | ||
8327 | pxor %xmm5,%xmm9 | ||
8328 | |||
8329 | # qhasm: xmm11 ^= xmm4 | ||
8330 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
8331 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
8332 | pxor %xmm4,%xmm11 | ||
8333 | |||
8334 | # qhasm: xmm12 ^= xmm6 | ||
8335 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13 | ||
8336 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12 | ||
8337 | pxor %xmm6,%xmm12 | ||
8338 | |||
8339 | # qhasm: xmm13 ^= xmm3 | ||
8340 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14 | ||
8341 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13 | ||
8342 | pxor %xmm3,%xmm13 | ||
8343 | |||
8344 | # qhasm: xmm11 ^= xmm5 | ||
8345 | # asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12 | ||
8346 | # asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11 | ||
8347 | pxor %xmm5,%xmm11 | ||
8348 | |||
8349 | # qhasm: xmm14 ^= xmm7 | ||
8350 | # asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15 | ||
8351 | # asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14 | ||
8352 | pxor %xmm7,%xmm14 | ||
8353 | |||
8354 | # qhasm: xmm15 ^= xmm2 | ||
8355 | # asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16 | ||
8356 | # asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15 | ||
8357 | pxor %xmm2,%xmm15 | ||
8358 | |||
8359 | # qhasm: xmm12 ^= xmm5 | ||
8360 | # asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13 | ||
8361 | # asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12 | ||
8362 | pxor %xmm5,%xmm12 | ||
8363 | |||
8364 | # qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E | ||
8365 | # asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1 | ||
8366 | # asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0 | ||
8367 | pshufd $0x4E,%xmm0,%xmm0 | ||
8368 | |||
8369 | # qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E | ||
8370 | # asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2 | ||
8371 | # asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1 | ||
8372 | pshufd $0x4E,%xmm1,%xmm1 | ||
8373 | |||
8374 | # qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E | ||
8375 | # asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5 | ||
8376 | # asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4 | ||
8377 | pshufd $0x4E,%xmm4,%xmm4 | ||
8378 | |||
8379 | # qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E | ||
8380 | # asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7 | ||
8381 | # asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6 | ||
8382 | pshufd $0x4E,%xmm6,%xmm6 | ||
8383 | |||
8384 | # qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E | ||
8385 | # asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4 | ||
8386 | # asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3 | ||
8387 | pshufd $0x4E,%xmm3,%xmm3 | ||
8388 | |||
8389 | # qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E | ||
8390 | # asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8 | ||
8391 | # asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7 | ||
8392 | pshufd $0x4E,%xmm7,%xmm7 | ||
8393 | |||
8394 | # qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E | ||
8395 | # asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3 | ||
8396 | # asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2 | ||
8397 | pshufd $0x4E,%xmm2,%xmm2 | ||
8398 | |||
8399 | # qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E | ||
8400 | # asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6 | ||
8401 | # asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5 | ||
8402 | pshufd $0x4E,%xmm5,%xmm5 | ||
8403 | |||
8404 | # qhasm: xmm8 ^= xmm0 | ||
8405 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
8406 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
8407 | pxor %xmm0,%xmm8 | ||
8408 | |||
8409 | # qhasm: xmm9 ^= xmm1 | ||
8410 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
8411 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
8412 | pxor %xmm1,%xmm9 | ||
8413 | |||
8414 | # qhasm: xmm10 ^= xmm4 | ||
8415 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11 | ||
8416 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10 | ||
8417 | pxor %xmm4,%xmm10 | ||
8418 | |||
8419 | # qhasm: xmm11 ^= xmm6 | ||
8420 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12 | ||
8421 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11 | ||
8422 | pxor %xmm6,%xmm11 | ||
8423 | |||
8424 | # qhasm: xmm12 ^= xmm3 | ||
8425 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13 | ||
8426 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12 | ||
8427 | pxor %xmm3,%xmm12 | ||
8428 | |||
8429 | # qhasm: xmm13 ^= xmm7 | ||
8430 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
8431 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
8432 | pxor %xmm7,%xmm13 | ||
8433 | |||
8434 | # qhasm: xmm14 ^= xmm2 | ||
8435 | # asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15 | ||
8436 | # asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14 | ||
8437 | pxor %xmm2,%xmm14 | ||
8438 | |||
8439 | # qhasm: xmm15 ^= xmm5 | ||
8440 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
8441 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
8442 | pxor %xmm5,%xmm15 | ||
8443 | |||
8444 | # qhasm: xmm8 ^= *(int128 *)(c + 896) | ||
8445 | # asm 1: pxor 896(<c=int64#5),<xmm8=int6464#9 | ||
8446 | # asm 2: pxor 896(<c=%r8),<xmm8=%xmm8 | ||
8447 | pxor 896(%r8),%xmm8 | ||
8448 | |||
8449 | # qhasm: shuffle bytes of xmm8 by SR | ||
8450 | # asm 1: pshufb SR,<xmm8=int6464#9 | ||
8451 | # asm 2: pshufb SR,<xmm8=%xmm8 | ||
8452 | pshufb SR,%xmm8 | ||
8453 | |||
8454 | # qhasm: xmm9 ^= *(int128 *)(c + 912) | ||
8455 | # asm 1: pxor 912(<c=int64#5),<xmm9=int6464#10 | ||
8456 | # asm 2: pxor 912(<c=%r8),<xmm9=%xmm9 | ||
8457 | pxor 912(%r8),%xmm9 | ||
8458 | |||
8459 | # qhasm: shuffle bytes of xmm9 by SR | ||
8460 | # asm 1: pshufb SR,<xmm9=int6464#10 | ||
8461 | # asm 2: pshufb SR,<xmm9=%xmm9 | ||
8462 | pshufb SR,%xmm9 | ||
8463 | |||
8464 | # qhasm: xmm10 ^= *(int128 *)(c + 928) | ||
8465 | # asm 1: pxor 928(<c=int64#5),<xmm10=int6464#11 | ||
8466 | # asm 2: pxor 928(<c=%r8),<xmm10=%xmm10 | ||
8467 | pxor 928(%r8),%xmm10 | ||
8468 | |||
8469 | # qhasm: shuffle bytes of xmm10 by SR | ||
8470 | # asm 1: pshufb SR,<xmm10=int6464#11 | ||
8471 | # asm 2: pshufb SR,<xmm10=%xmm10 | ||
8472 | pshufb SR,%xmm10 | ||
8473 | |||
8474 | # qhasm: xmm11 ^= *(int128 *)(c + 944) | ||
8475 | # asm 1: pxor 944(<c=int64#5),<xmm11=int6464#12 | ||
8476 | # asm 2: pxor 944(<c=%r8),<xmm11=%xmm11 | ||
8477 | pxor 944(%r8),%xmm11 | ||
8478 | |||
8479 | # qhasm: shuffle bytes of xmm11 by SR | ||
8480 | # asm 1: pshufb SR,<xmm11=int6464#12 | ||
8481 | # asm 2: pshufb SR,<xmm11=%xmm11 | ||
8482 | pshufb SR,%xmm11 | ||
8483 | |||
8484 | # qhasm: xmm12 ^= *(int128 *)(c + 960) | ||
8485 | # asm 1: pxor 960(<c=int64#5),<xmm12=int6464#13 | ||
8486 | # asm 2: pxor 960(<c=%r8),<xmm12=%xmm12 | ||
8487 | pxor 960(%r8),%xmm12 | ||
8488 | |||
8489 | # qhasm: shuffle bytes of xmm12 by SR | ||
8490 | # asm 1: pshufb SR,<xmm12=int6464#13 | ||
8491 | # asm 2: pshufb SR,<xmm12=%xmm12 | ||
8492 | pshufb SR,%xmm12 | ||
8493 | |||
8494 | # qhasm: xmm13 ^= *(int128 *)(c + 976) | ||
8495 | # asm 1: pxor 976(<c=int64#5),<xmm13=int6464#14 | ||
8496 | # asm 2: pxor 976(<c=%r8),<xmm13=%xmm13 | ||
8497 | pxor 976(%r8),%xmm13 | ||
8498 | |||
8499 | # qhasm: shuffle bytes of xmm13 by SR | ||
8500 | # asm 1: pshufb SR,<xmm13=int6464#14 | ||
8501 | # asm 2: pshufb SR,<xmm13=%xmm13 | ||
8502 | pshufb SR,%xmm13 | ||
8503 | |||
8504 | # qhasm: xmm14 ^= *(int128 *)(c + 992) | ||
8505 | # asm 1: pxor 992(<c=int64#5),<xmm14=int6464#15 | ||
8506 | # asm 2: pxor 992(<c=%r8),<xmm14=%xmm14 | ||
8507 | pxor 992(%r8),%xmm14 | ||
8508 | |||
8509 | # qhasm: shuffle bytes of xmm14 by SR | ||
8510 | # asm 1: pshufb SR,<xmm14=int6464#15 | ||
8511 | # asm 2: pshufb SR,<xmm14=%xmm14 | ||
8512 | pshufb SR,%xmm14 | ||
8513 | |||
8514 | # qhasm: xmm15 ^= *(int128 *)(c + 1008) | ||
8515 | # asm 1: pxor 1008(<c=int64#5),<xmm15=int6464#16 | ||
8516 | # asm 2: pxor 1008(<c=%r8),<xmm15=%xmm15 | ||
8517 | pxor 1008(%r8),%xmm15 | ||
8518 | |||
8519 | # qhasm: shuffle bytes of xmm15 by SR | ||
8520 | # asm 1: pshufb SR,<xmm15=int6464#16 | ||
8521 | # asm 2: pshufb SR,<xmm15=%xmm15 | ||
8522 | pshufb SR,%xmm15 | ||
8523 | |||
8524 | # qhasm: xmm13 ^= xmm14 | ||
8525 | # asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14 | ||
8526 | # asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13 | ||
8527 | pxor %xmm14,%xmm13 | ||
8528 | |||
8529 | # qhasm: xmm10 ^= xmm9 | ||
8530 | # asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11 | ||
8531 | # asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10 | ||
8532 | pxor %xmm9,%xmm10 | ||
8533 | |||
8534 | # qhasm: xmm13 ^= xmm8 | ||
8535 | # asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14 | ||
8536 | # asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13 | ||
8537 | pxor %xmm8,%xmm13 | ||
8538 | |||
8539 | # qhasm: xmm14 ^= xmm10 | ||
8540 | # asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15 | ||
8541 | # asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14 | ||
8542 | pxor %xmm10,%xmm14 | ||
8543 | |||
8544 | # qhasm: xmm11 ^= xmm8 | ||
8545 | # asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12 | ||
8546 | # asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11 | ||
8547 | pxor %xmm8,%xmm11 | ||
8548 | |||
8549 | # qhasm: xmm14 ^= xmm11 | ||
8550 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
8551 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
8552 | pxor %xmm11,%xmm14 | ||
8553 | |||
8554 | # qhasm: xmm11 ^= xmm15 | ||
8555 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12 | ||
8556 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11 | ||
8557 | pxor %xmm15,%xmm11 | ||
8558 | |||
8559 | # qhasm: xmm11 ^= xmm12 | ||
8560 | # asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12 | ||
8561 | # asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11 | ||
8562 | pxor %xmm12,%xmm11 | ||
8563 | |||
8564 | # qhasm: xmm15 ^= xmm13 | ||
8565 | # asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16 | ||
8566 | # asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15 | ||
8567 | pxor %xmm13,%xmm15 | ||
8568 | |||
8569 | # qhasm: xmm11 ^= xmm9 | ||
8570 | # asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12 | ||
8571 | # asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11 | ||
8572 | pxor %xmm9,%xmm11 | ||
8573 | |||
8574 | # qhasm: xmm12 ^= xmm13 | ||
8575 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
8576 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
8577 | pxor %xmm13,%xmm12 | ||
8578 | |||
8579 | # qhasm: xmm10 ^= xmm15 | ||
8580 | # asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11 | ||
8581 | # asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10 | ||
8582 | pxor %xmm15,%xmm10 | ||
8583 | |||
8584 | # qhasm: xmm9 ^= xmm13 | ||
8585 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
8586 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
8587 | pxor %xmm13,%xmm9 | ||
8588 | |||
8589 | # qhasm: xmm3 = xmm15 | ||
8590 | # asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1 | ||
8591 | # asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0 | ||
8592 | movdqa %xmm15,%xmm0 | ||
8593 | |||
8594 | # qhasm: xmm2 = xmm9 | ||
8595 | # asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2 | ||
8596 | # asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1 | ||
8597 | movdqa %xmm9,%xmm1 | ||
8598 | |||
8599 | # qhasm: xmm1 = xmm13 | ||
8600 | # asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3 | ||
8601 | # asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2 | ||
8602 | movdqa %xmm13,%xmm2 | ||
8603 | |||
8604 | # qhasm: xmm5 = xmm10 | ||
8605 | # asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4 | ||
8606 | # asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3 | ||
8607 | movdqa %xmm10,%xmm3 | ||
8608 | |||
8609 | # qhasm: xmm4 = xmm14 | ||
8610 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5 | ||
8611 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4 | ||
8612 | movdqa %xmm14,%xmm4 | ||
8613 | |||
8614 | # qhasm: xmm3 ^= xmm12 | ||
8615 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1 | ||
8616 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0 | ||
8617 | pxor %xmm12,%xmm0 | ||
8618 | |||
8619 | # qhasm: xmm2 ^= xmm10 | ||
8620 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2 | ||
8621 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1 | ||
8622 | pxor %xmm10,%xmm1 | ||
8623 | |||
8624 | # qhasm: xmm1 ^= xmm11 | ||
8625 | # asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3 | ||
8626 | # asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2 | ||
8627 | pxor %xmm11,%xmm2 | ||
8628 | |||
8629 | # qhasm: xmm5 ^= xmm12 | ||
8630 | # asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4 | ||
8631 | # asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3 | ||
8632 | pxor %xmm12,%xmm3 | ||
8633 | |||
8634 | # qhasm: xmm4 ^= xmm8 | ||
8635 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
8636 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
8637 | pxor %xmm8,%xmm4 | ||
8638 | |||
8639 | # qhasm: xmm6 = xmm3 | ||
8640 | # asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6 | ||
8641 | # asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5 | ||
8642 | movdqa %xmm0,%xmm5 | ||
8643 | |||
8644 | # qhasm: xmm0 = xmm2 | ||
8645 | # asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7 | ||
8646 | # asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6 | ||
8647 | movdqa %xmm1,%xmm6 | ||
8648 | |||
8649 | # qhasm: xmm7 = xmm3 | ||
8650 | # asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8 | ||
8651 | # asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7 | ||
8652 | movdqa %xmm0,%xmm7 | ||
8653 | |||
8654 | # qhasm: xmm2 |= xmm1 | ||
8655 | # asm 1: por <xmm1=int6464#3,<xmm2=int6464#2 | ||
8656 | # asm 2: por <xmm1=%xmm2,<xmm2=%xmm1 | ||
8657 | por %xmm2,%xmm1 | ||
8658 | |||
8659 | # qhasm: xmm3 |= xmm4 | ||
8660 | # asm 1: por <xmm4=int6464#5,<xmm3=int6464#1 | ||
8661 | # asm 2: por <xmm4=%xmm4,<xmm3=%xmm0 | ||
8662 | por %xmm4,%xmm0 | ||
8663 | |||
8664 | # qhasm: xmm7 ^= xmm0 | ||
8665 | # asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8 | ||
8666 | # asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7 | ||
8667 | pxor %xmm6,%xmm7 | ||
8668 | |||
8669 | # qhasm: xmm6 &= xmm4 | ||
8670 | # asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6 | ||
8671 | # asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5 | ||
8672 | pand %xmm4,%xmm5 | ||
8673 | |||
8674 | # qhasm: xmm0 &= xmm1 | ||
8675 | # asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7 | ||
8676 | # asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6 | ||
8677 | pand %xmm2,%xmm6 | ||
8678 | |||
8679 | # qhasm: xmm4 ^= xmm1 | ||
8680 | # asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5 | ||
8681 | # asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4 | ||
8682 | pxor %xmm2,%xmm4 | ||
8683 | |||
8684 | # qhasm: xmm7 &= xmm4 | ||
8685 | # asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8 | ||
8686 | # asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7 | ||
8687 | pand %xmm4,%xmm7 | ||
8688 | |||
8689 | # qhasm: xmm4 = xmm11 | ||
8690 | # asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3 | ||
8691 | # asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2 | ||
8692 | movdqa %xmm11,%xmm2 | ||
8693 | |||
8694 | # qhasm: xmm4 ^= xmm8 | ||
8695 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3 | ||
8696 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2 | ||
8697 | pxor %xmm8,%xmm2 | ||
8698 | |||
8699 | # qhasm: xmm5 &= xmm4 | ||
8700 | # asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4 | ||
8701 | # asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3 | ||
8702 | pand %xmm2,%xmm3 | ||
8703 | |||
8704 | # qhasm: xmm3 ^= xmm5 | ||
8705 | # asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1 | ||
8706 | # asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0 | ||
8707 | pxor %xmm3,%xmm0 | ||
8708 | |||
8709 | # qhasm: xmm2 ^= xmm5 | ||
8710 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
8711 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
8712 | pxor %xmm3,%xmm1 | ||
8713 | |||
8714 | # qhasm: xmm5 = xmm15 | ||
8715 | # asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3 | ||
8716 | # asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2 | ||
8717 | movdqa %xmm15,%xmm2 | ||
8718 | |||
8719 | # qhasm: xmm5 ^= xmm9 | ||
8720 | # asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3 | ||
8721 | # asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2 | ||
8722 | pxor %xmm9,%xmm2 | ||
8723 | |||
8724 | # qhasm: xmm4 = xmm13 | ||
8725 | # asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4 | ||
8726 | # asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3 | ||
8727 | movdqa %xmm13,%xmm3 | ||
8728 | |||
8729 | # qhasm: xmm1 = xmm5 | ||
8730 | # asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5 | ||
8731 | # asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4 | ||
8732 | movdqa %xmm2,%xmm4 | ||
8733 | |||
8734 | # qhasm: xmm4 ^= xmm14 | ||
8735 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4 | ||
8736 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3 | ||
8737 | pxor %xmm14,%xmm3 | ||
8738 | |||
8739 | # qhasm: xmm1 |= xmm4 | ||
8740 | # asm 1: por <xmm4=int6464#4,<xmm1=int6464#5 | ||
8741 | # asm 2: por <xmm4=%xmm3,<xmm1=%xmm4 | ||
8742 | por %xmm3,%xmm4 | ||
8743 | |||
8744 | # qhasm: xmm5 &= xmm4 | ||
8745 | # asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3 | ||
8746 | # asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2 | ||
8747 | pand %xmm3,%xmm2 | ||
8748 | |||
8749 | # qhasm: xmm0 ^= xmm5 | ||
8750 | # asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7 | ||
8751 | # asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6 | ||
8752 | pxor %xmm2,%xmm6 | ||
8753 | |||
8754 | # qhasm: xmm3 ^= xmm7 | ||
8755 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1 | ||
8756 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0 | ||
8757 | pxor %xmm7,%xmm0 | ||
8758 | |||
8759 | # qhasm: xmm2 ^= xmm6 | ||
8760 | # asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2 | ||
8761 | # asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1 | ||
8762 | pxor %xmm5,%xmm1 | ||
8763 | |||
8764 | # qhasm: xmm1 ^= xmm7 | ||
8765 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5 | ||
8766 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4 | ||
8767 | pxor %xmm7,%xmm4 | ||
8768 | |||
8769 | # qhasm: xmm0 ^= xmm6 | ||
8770 | # asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7 | ||
8771 | # asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6 | ||
8772 | pxor %xmm5,%xmm6 | ||
8773 | |||
8774 | # qhasm: xmm1 ^= xmm6 | ||
8775 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
8776 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
8777 | pxor %xmm5,%xmm4 | ||
8778 | |||
8779 | # qhasm: xmm4 = xmm10 | ||
8780 | # asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3 | ||
8781 | # asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2 | ||
8782 | movdqa %xmm10,%xmm2 | ||
8783 | |||
8784 | # qhasm: xmm5 = xmm12 | ||
8785 | # asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4 | ||
8786 | # asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3 | ||
8787 | movdqa %xmm12,%xmm3 | ||
8788 | |||
8789 | # qhasm: xmm6 = xmm9 | ||
8790 | # asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6 | ||
8791 | # asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5 | ||
8792 | movdqa %xmm9,%xmm5 | ||
8793 | |||
8794 | # qhasm: xmm7 = xmm15 | ||
8795 | # asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8 | ||
8796 | # asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7 | ||
8797 | movdqa %xmm15,%xmm7 | ||
8798 | |||
8799 | # qhasm: xmm4 &= xmm11 | ||
8800 | # asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3 | ||
8801 | # asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2 | ||
8802 | pand %xmm11,%xmm2 | ||
8803 | |||
8804 | # qhasm: xmm5 &= xmm8 | ||
8805 | # asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4 | ||
8806 | # asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3 | ||
8807 | pand %xmm8,%xmm3 | ||
8808 | |||
8809 | # qhasm: xmm6 &= xmm13 | ||
8810 | # asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6 | ||
8811 | # asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5 | ||
8812 | pand %xmm13,%xmm5 | ||
8813 | |||
8814 | # qhasm: xmm7 |= xmm14 | ||
8815 | # asm 1: por <xmm14=int6464#15,<xmm7=int6464#8 | ||
8816 | # asm 2: por <xmm14=%xmm14,<xmm7=%xmm7 | ||
8817 | por %xmm14,%xmm7 | ||
8818 | |||
8819 | # qhasm: xmm3 ^= xmm4 | ||
8820 | # asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1 | ||
8821 | # asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0 | ||
8822 | pxor %xmm2,%xmm0 | ||
8823 | |||
8824 | # qhasm: xmm2 ^= xmm5 | ||
8825 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
8826 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
8827 | pxor %xmm3,%xmm1 | ||
8828 | |||
8829 | # qhasm: xmm1 ^= xmm6 | ||
8830 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
8831 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
8832 | pxor %xmm5,%xmm4 | ||
8833 | |||
8834 | # qhasm: xmm0 ^= xmm7 | ||
8835 | # asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7 | ||
8836 | # asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6 | ||
8837 | pxor %xmm7,%xmm6 | ||
8838 | |||
8839 | # qhasm: xmm4 = xmm3 | ||
8840 | # asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3 | ||
8841 | # asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2 | ||
8842 | movdqa %xmm0,%xmm2 | ||
8843 | |||
8844 | # qhasm: xmm4 ^= xmm2 | ||
8845 | # asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3 | ||
8846 | # asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2 | ||
8847 | pxor %xmm1,%xmm2 | ||
8848 | |||
8849 | # qhasm: xmm3 &= xmm1 | ||
8850 | # asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1 | ||
8851 | # asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0 | ||
8852 | pand %xmm4,%xmm0 | ||
8853 | |||
8854 | # qhasm: xmm6 = xmm0 | ||
8855 | # asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4 | ||
8856 | # asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3 | ||
8857 | movdqa %xmm6,%xmm3 | ||
8858 | |||
8859 | # qhasm: xmm6 ^= xmm3 | ||
8860 | # asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4 | ||
8861 | # asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3 | ||
8862 | pxor %xmm0,%xmm3 | ||
8863 | |||
8864 | # qhasm: xmm7 = xmm4 | ||
8865 | # asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6 | ||
8866 | # asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5 | ||
8867 | movdqa %xmm2,%xmm5 | ||
8868 | |||
8869 | # qhasm: xmm7 &= xmm6 | ||
8870 | # asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6 | ||
8871 | # asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5 | ||
8872 | pand %xmm3,%xmm5 | ||
8873 | |||
8874 | # qhasm: xmm7 ^= xmm2 | ||
8875 | # asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6 | ||
8876 | # asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5 | ||
8877 | pxor %xmm1,%xmm5 | ||
8878 | |||
8879 | # qhasm: xmm5 = xmm1 | ||
8880 | # asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8 | ||
8881 | # asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7 | ||
8882 | movdqa %xmm4,%xmm7 | ||
8883 | |||
8884 | # qhasm: xmm5 ^= xmm0 | ||
8885 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
8886 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
8887 | pxor %xmm6,%xmm7 | ||
8888 | |||
8889 | # qhasm: xmm3 ^= xmm2 | ||
8890 | # asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1 | ||
8891 | # asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0 | ||
8892 | pxor %xmm1,%xmm0 | ||
8893 | |||
8894 | # qhasm: xmm5 &= xmm3 | ||
8895 | # asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8 | ||
8896 | # asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7 | ||
8897 | pand %xmm0,%xmm7 | ||
8898 | |||
8899 | # qhasm: xmm5 ^= xmm0 | ||
8900 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
8901 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
8902 | pxor %xmm6,%xmm7 | ||
8903 | |||
8904 | # qhasm: xmm1 ^= xmm5 | ||
8905 | # asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5 | ||
8906 | # asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4 | ||
8907 | pxor %xmm7,%xmm4 | ||
8908 | |||
8909 | # qhasm: xmm2 = xmm6 | ||
8910 | # asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1 | ||
8911 | # asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0 | ||
8912 | movdqa %xmm3,%xmm0 | ||
8913 | |||
8914 | # qhasm: xmm2 ^= xmm5 | ||
8915 | # asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1 | ||
8916 | # asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0 | ||
8917 | pxor %xmm7,%xmm0 | ||
8918 | |||
8919 | # qhasm: xmm2 &= xmm0 | ||
8920 | # asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1 | ||
8921 | # asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0 | ||
8922 | pand %xmm6,%xmm0 | ||
8923 | |||
8924 | # qhasm: xmm1 ^= xmm2 | ||
8925 | # asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5 | ||
8926 | # asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4 | ||
8927 | pxor %xmm0,%xmm4 | ||
8928 | |||
8929 | # qhasm: xmm6 ^= xmm2 | ||
8930 | # asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4 | ||
8931 | # asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3 | ||
8932 | pxor %xmm0,%xmm3 | ||
8933 | |||
8934 | # qhasm: xmm6 &= xmm7 | ||
8935 | # asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4 | ||
8936 | # asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3 | ||
8937 | pand %xmm5,%xmm3 | ||
8938 | |||
8939 | # qhasm: xmm6 ^= xmm4 | ||
8940 | # asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4 | ||
8941 | # asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3 | ||
8942 | pxor %xmm2,%xmm3 | ||
8943 | |||
8944 | # qhasm: xmm4 = xmm14 | ||
8945 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1 | ||
8946 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0 | ||
8947 | movdqa %xmm14,%xmm0 | ||
8948 | |||
8949 | # qhasm: xmm0 = xmm13 | ||
8950 | # asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2 | ||
8951 | # asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1 | ||
8952 | movdqa %xmm13,%xmm1 | ||
8953 | |||
8954 | # qhasm: xmm2 = xmm7 | ||
8955 | # asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3 | ||
8956 | # asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2 | ||
8957 | movdqa %xmm5,%xmm2 | ||
8958 | |||
8959 | # qhasm: xmm2 ^= xmm6 | ||
8960 | # asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3 | ||
8961 | # asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2 | ||
8962 | pxor %xmm3,%xmm2 | ||
8963 | |||
8964 | # qhasm: xmm2 &= xmm14 | ||
8965 | # asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3 | ||
8966 | # asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2 | ||
8967 | pand %xmm14,%xmm2 | ||
8968 | |||
8969 | # qhasm: xmm14 ^= xmm13 | ||
8970 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
8971 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
8972 | pxor %xmm13,%xmm14 | ||
8973 | |||
8974 | # qhasm: xmm14 &= xmm6 | ||
8975 | # asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15 | ||
8976 | # asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14 | ||
8977 | pand %xmm3,%xmm14 | ||
8978 | |||
8979 | # qhasm: xmm13 &= xmm7 | ||
8980 | # asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14 | ||
8981 | # asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13 | ||
8982 | pand %xmm5,%xmm13 | ||
8983 | |||
8984 | # qhasm: xmm14 ^= xmm13 | ||
8985 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
8986 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
8987 | pxor %xmm13,%xmm14 | ||
8988 | |||
8989 | # qhasm: xmm13 ^= xmm2 | ||
8990 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14 | ||
8991 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13 | ||
8992 | pxor %xmm2,%xmm13 | ||
8993 | |||
8994 | # qhasm: xmm4 ^= xmm8 | ||
8995 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1 | ||
8996 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0 | ||
8997 | pxor %xmm8,%xmm0 | ||
8998 | |||
8999 | # qhasm: xmm0 ^= xmm11 | ||
9000 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2 | ||
9001 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1 | ||
9002 | pxor %xmm11,%xmm1 | ||
9003 | |||
9004 | # qhasm: xmm7 ^= xmm5 | ||
9005 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
9006 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
9007 | pxor %xmm7,%xmm5 | ||
9008 | |||
9009 | # qhasm: xmm6 ^= xmm1 | ||
9010 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
9011 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
9012 | pxor %xmm4,%xmm3 | ||
9013 | |||
9014 | # qhasm: xmm3 = xmm7 | ||
9015 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
9016 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
9017 | movdqa %xmm5,%xmm2 | ||
9018 | |||
9019 | # qhasm: xmm3 ^= xmm6 | ||
9020 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
9021 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
9022 | pxor %xmm3,%xmm2 | ||
9023 | |||
9024 | # qhasm: xmm3 &= xmm4 | ||
9025 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
9026 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
9027 | pand %xmm0,%xmm2 | ||
9028 | |||
9029 | # qhasm: xmm4 ^= xmm0 | ||
9030 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
9031 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
9032 | pxor %xmm1,%xmm0 | ||
9033 | |||
9034 | # qhasm: xmm4 &= xmm6 | ||
9035 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
9036 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
9037 | pand %xmm3,%xmm0 | ||
9038 | |||
9039 | # qhasm: xmm0 &= xmm7 | ||
9040 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
9041 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
9042 | pand %xmm5,%xmm1 | ||
9043 | |||
9044 | # qhasm: xmm0 ^= xmm4 | ||
9045 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
9046 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
9047 | pxor %xmm0,%xmm1 | ||
9048 | |||
9049 | # qhasm: xmm4 ^= xmm3 | ||
9050 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
9051 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
9052 | pxor %xmm2,%xmm0 | ||
9053 | |||
9054 | # qhasm: xmm2 = xmm5 | ||
9055 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
9056 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
9057 | movdqa %xmm7,%xmm2 | ||
9058 | |||
9059 | # qhasm: xmm2 ^= xmm1 | ||
9060 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
9061 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
9062 | pxor %xmm4,%xmm2 | ||
9063 | |||
9064 | # qhasm: xmm2 &= xmm8 | ||
9065 | # asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3 | ||
9066 | # asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2 | ||
9067 | pand %xmm8,%xmm2 | ||
9068 | |||
9069 | # qhasm: xmm8 ^= xmm11 | ||
9070 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
9071 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
9072 | pxor %xmm11,%xmm8 | ||
9073 | |||
9074 | # qhasm: xmm8 &= xmm1 | ||
9075 | # asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9 | ||
9076 | # asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8 | ||
9077 | pand %xmm4,%xmm8 | ||
9078 | |||
9079 | # qhasm: xmm11 &= xmm5 | ||
9080 | # asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12 | ||
9081 | # asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11 | ||
9082 | pand %xmm7,%xmm11 | ||
9083 | |||
9084 | # qhasm: xmm8 ^= xmm11 | ||
9085 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
9086 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
9087 | pxor %xmm11,%xmm8 | ||
9088 | |||
9089 | # qhasm: xmm11 ^= xmm2 | ||
9090 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12 | ||
9091 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11 | ||
9092 | pxor %xmm2,%xmm11 | ||
9093 | |||
9094 | # qhasm: xmm14 ^= xmm4 | ||
9095 | # asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15 | ||
9096 | # asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14 | ||
9097 | pxor %xmm0,%xmm14 | ||
9098 | |||
9099 | # qhasm: xmm8 ^= xmm4 | ||
9100 | # asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9 | ||
9101 | # asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8 | ||
9102 | pxor %xmm0,%xmm8 | ||
9103 | |||
9104 | # qhasm: xmm13 ^= xmm0 | ||
9105 | # asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14 | ||
9106 | # asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13 | ||
9107 | pxor %xmm1,%xmm13 | ||
9108 | |||
9109 | # qhasm: xmm11 ^= xmm0 | ||
9110 | # asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12 | ||
9111 | # asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11 | ||
9112 | pxor %xmm1,%xmm11 | ||
9113 | |||
9114 | # qhasm: xmm4 = xmm15 | ||
9115 | # asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1 | ||
9116 | # asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0 | ||
9117 | movdqa %xmm15,%xmm0 | ||
9118 | |||
9119 | # qhasm: xmm0 = xmm9 | ||
9120 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2 | ||
9121 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1 | ||
9122 | movdqa %xmm9,%xmm1 | ||
9123 | |||
9124 | # qhasm: xmm4 ^= xmm12 | ||
9125 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1 | ||
9126 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0 | ||
9127 | pxor %xmm12,%xmm0 | ||
9128 | |||
9129 | # qhasm: xmm0 ^= xmm10 | ||
9130 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2 | ||
9131 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1 | ||
9132 | pxor %xmm10,%xmm1 | ||
9133 | |||
9134 | # qhasm: xmm3 = xmm7 | ||
9135 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
9136 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
9137 | movdqa %xmm5,%xmm2 | ||
9138 | |||
9139 | # qhasm: xmm3 ^= xmm6 | ||
9140 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
9141 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
9142 | pxor %xmm3,%xmm2 | ||
9143 | |||
9144 | # qhasm: xmm3 &= xmm4 | ||
9145 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
9146 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
9147 | pand %xmm0,%xmm2 | ||
9148 | |||
9149 | # qhasm: xmm4 ^= xmm0 | ||
9150 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
9151 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
9152 | pxor %xmm1,%xmm0 | ||
9153 | |||
9154 | # qhasm: xmm4 &= xmm6 | ||
9155 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
9156 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
9157 | pand %xmm3,%xmm0 | ||
9158 | |||
9159 | # qhasm: xmm0 &= xmm7 | ||
9160 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
9161 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
9162 | pand %xmm5,%xmm1 | ||
9163 | |||
9164 | # qhasm: xmm0 ^= xmm4 | ||
9165 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
9166 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
9167 | pxor %xmm0,%xmm1 | ||
9168 | |||
9169 | # qhasm: xmm4 ^= xmm3 | ||
9170 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
9171 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
9172 | pxor %xmm2,%xmm0 | ||
9173 | |||
9174 | # qhasm: xmm2 = xmm5 | ||
9175 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
9176 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
9177 | movdqa %xmm7,%xmm2 | ||
9178 | |||
9179 | # qhasm: xmm2 ^= xmm1 | ||
9180 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
9181 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
9182 | pxor %xmm4,%xmm2 | ||
9183 | |||
9184 | # qhasm: xmm2 &= xmm12 | ||
9185 | # asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3 | ||
9186 | # asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2 | ||
9187 | pand %xmm12,%xmm2 | ||
9188 | |||
9189 | # qhasm: xmm12 ^= xmm10 | ||
9190 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
9191 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
9192 | pxor %xmm10,%xmm12 | ||
9193 | |||
9194 | # qhasm: xmm12 &= xmm1 | ||
9195 | # asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13 | ||
9196 | # asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12 | ||
9197 | pand %xmm4,%xmm12 | ||
9198 | |||
9199 | # qhasm: xmm10 &= xmm5 | ||
9200 | # asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11 | ||
9201 | # asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10 | ||
9202 | pand %xmm7,%xmm10 | ||
9203 | |||
9204 | # qhasm: xmm12 ^= xmm10 | ||
9205 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
9206 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
9207 | pxor %xmm10,%xmm12 | ||
9208 | |||
9209 | # qhasm: xmm10 ^= xmm2 | ||
9210 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11 | ||
9211 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10 | ||
9212 | pxor %xmm2,%xmm10 | ||
9213 | |||
9214 | # qhasm: xmm7 ^= xmm5 | ||
9215 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
9216 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
9217 | pxor %xmm7,%xmm5 | ||
9218 | |||
9219 | # qhasm: xmm6 ^= xmm1 | ||
9220 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
9221 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
9222 | pxor %xmm4,%xmm3 | ||
9223 | |||
9224 | # qhasm: xmm3 = xmm7 | ||
9225 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
9226 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
9227 | movdqa %xmm5,%xmm2 | ||
9228 | |||
9229 | # qhasm: xmm3 ^= xmm6 | ||
9230 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
9231 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
9232 | pxor %xmm3,%xmm2 | ||
9233 | |||
9234 | # qhasm: xmm3 &= xmm15 | ||
9235 | # asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3 | ||
9236 | # asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2 | ||
9237 | pand %xmm15,%xmm2 | ||
9238 | |||
9239 | # qhasm: xmm15 ^= xmm9 | ||
9240 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
9241 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
9242 | pxor %xmm9,%xmm15 | ||
9243 | |||
9244 | # qhasm: xmm15 &= xmm6 | ||
9245 | # asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16 | ||
9246 | # asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15 | ||
9247 | pand %xmm3,%xmm15 | ||
9248 | |||
9249 | # qhasm: xmm9 &= xmm7 | ||
9250 | # asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10 | ||
9251 | # asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9 | ||
9252 | pand %xmm5,%xmm9 | ||
9253 | |||
9254 | # qhasm: xmm15 ^= xmm9 | ||
9255 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
9256 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
9257 | pxor %xmm9,%xmm15 | ||
9258 | |||
9259 | # qhasm: xmm9 ^= xmm3 | ||
9260 | # asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10 | ||
9261 | # asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9 | ||
9262 | pxor %xmm2,%xmm9 | ||
9263 | |||
9264 | # qhasm: xmm15 ^= xmm4 | ||
9265 | # asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16 | ||
9266 | # asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15 | ||
9267 | pxor %xmm0,%xmm15 | ||
9268 | |||
9269 | # qhasm: xmm12 ^= xmm4 | ||
9270 | # asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13 | ||
9271 | # asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12 | ||
9272 | pxor %xmm0,%xmm12 | ||
9273 | |||
9274 | # qhasm: xmm9 ^= xmm0 | ||
9275 | # asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10 | ||
9276 | # asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9 | ||
9277 | pxor %xmm1,%xmm9 | ||
9278 | |||
9279 | # qhasm: xmm10 ^= xmm0 | ||
9280 | # asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11 | ||
9281 | # asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10 | ||
9282 | pxor %xmm1,%xmm10 | ||
9283 | |||
9284 | # qhasm: xmm15 ^= xmm8 | ||
9285 | # asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16 | ||
9286 | # asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15 | ||
9287 | pxor %xmm8,%xmm15 | ||
9288 | |||
9289 | # qhasm: xmm9 ^= xmm14 | ||
9290 | # asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10 | ||
9291 | # asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9 | ||
9292 | pxor %xmm14,%xmm9 | ||
9293 | |||
9294 | # qhasm: xmm12 ^= xmm15 | ||
9295 | # asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13 | ||
9296 | # asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12 | ||
9297 | pxor %xmm15,%xmm12 | ||
9298 | |||
9299 | # qhasm: xmm14 ^= xmm8 | ||
9300 | # asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15 | ||
9301 | # asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14 | ||
9302 | pxor %xmm8,%xmm14 | ||
9303 | |||
9304 | # qhasm: xmm8 ^= xmm9 | ||
9305 | # asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9 | ||
9306 | # asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8 | ||
9307 | pxor %xmm9,%xmm8 | ||
9308 | |||
9309 | # qhasm: xmm9 ^= xmm13 | ||
9310 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
9311 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
9312 | pxor %xmm13,%xmm9 | ||
9313 | |||
9314 | # qhasm: xmm13 ^= xmm10 | ||
9315 | # asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14 | ||
9316 | # asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13 | ||
9317 | pxor %xmm10,%xmm13 | ||
9318 | |||
9319 | # qhasm: xmm12 ^= xmm13 | ||
9320 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
9321 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
9322 | pxor %xmm13,%xmm12 | ||
9323 | |||
9324 | # qhasm: xmm10 ^= xmm11 | ||
9325 | # asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11 | ||
9326 | # asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10 | ||
9327 | pxor %xmm11,%xmm10 | ||
9328 | |||
9329 | # qhasm: xmm11 ^= xmm13 | ||
9330 | # asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12 | ||
9331 | # asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11 | ||
9332 | pxor %xmm13,%xmm11 | ||
9333 | |||
9334 | # qhasm: xmm14 ^= xmm11 | ||
9335 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
9336 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
9337 | pxor %xmm11,%xmm14 | ||
9338 | |||
9339 | # qhasm: xmm0 = shuffle dwords of xmm8 by 0x93 | ||
9340 | # asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1 | ||
9341 | # asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0 | ||
9342 | pshufd $0x93,%xmm8,%xmm0 | ||
9343 | |||
9344 | # qhasm: xmm1 = shuffle dwords of xmm9 by 0x93 | ||
9345 | # asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2 | ||
9346 | # asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1 | ||
9347 | pshufd $0x93,%xmm9,%xmm1 | ||
9348 | |||
9349 | # qhasm: xmm2 = shuffle dwords of xmm12 by 0x93 | ||
9350 | # asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3 | ||
9351 | # asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2 | ||
9352 | pshufd $0x93,%xmm12,%xmm2 | ||
9353 | |||
9354 | # qhasm: xmm3 = shuffle dwords of xmm14 by 0x93 | ||
9355 | # asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4 | ||
9356 | # asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3 | ||
9357 | pshufd $0x93,%xmm14,%xmm3 | ||
9358 | |||
9359 | # qhasm: xmm4 = shuffle dwords of xmm11 by 0x93 | ||
9360 | # asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5 | ||
9361 | # asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4 | ||
9362 | pshufd $0x93,%xmm11,%xmm4 | ||
9363 | |||
9364 | # qhasm: xmm5 = shuffle dwords of xmm15 by 0x93 | ||
9365 | # asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6 | ||
9366 | # asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5 | ||
9367 | pshufd $0x93,%xmm15,%xmm5 | ||
9368 | |||
9369 | # qhasm: xmm6 = shuffle dwords of xmm10 by 0x93 | ||
9370 | # asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7 | ||
9371 | # asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6 | ||
9372 | pshufd $0x93,%xmm10,%xmm6 | ||
9373 | |||
9374 | # qhasm: xmm7 = shuffle dwords of xmm13 by 0x93 | ||
9375 | # asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8 | ||
9376 | # asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7 | ||
9377 | pshufd $0x93,%xmm13,%xmm7 | ||
9378 | |||
9379 | # qhasm: xmm8 ^= xmm0 | ||
9380 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
9381 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
9382 | pxor %xmm0,%xmm8 | ||
9383 | |||
9384 | # qhasm: xmm9 ^= xmm1 | ||
9385 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
9386 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
9387 | pxor %xmm1,%xmm9 | ||
9388 | |||
9389 | # qhasm: xmm12 ^= xmm2 | ||
9390 | # asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13 | ||
9391 | # asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12 | ||
9392 | pxor %xmm2,%xmm12 | ||
9393 | |||
9394 | # qhasm: xmm14 ^= xmm3 | ||
9395 | # asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15 | ||
9396 | # asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14 | ||
9397 | pxor %xmm3,%xmm14 | ||
9398 | |||
9399 | # qhasm: xmm11 ^= xmm4 | ||
9400 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
9401 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
9402 | pxor %xmm4,%xmm11 | ||
9403 | |||
9404 | # qhasm: xmm15 ^= xmm5 | ||
9405 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
9406 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
9407 | pxor %xmm5,%xmm15 | ||
9408 | |||
9409 | # qhasm: xmm10 ^= xmm6 | ||
9410 | # asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11 | ||
9411 | # asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10 | ||
9412 | pxor %xmm6,%xmm10 | ||
9413 | |||
9414 | # qhasm: xmm13 ^= xmm7 | ||
9415 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
9416 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
9417 | pxor %xmm7,%xmm13 | ||
9418 | |||
9419 | # qhasm: xmm0 ^= xmm13 | ||
9420 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
9421 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
9422 | pxor %xmm13,%xmm0 | ||
9423 | |||
9424 | # qhasm: xmm1 ^= xmm8 | ||
9425 | # asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2 | ||
9426 | # asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1 | ||
9427 | pxor %xmm8,%xmm1 | ||
9428 | |||
9429 | # qhasm: xmm2 ^= xmm9 | ||
9430 | # asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3 | ||
9431 | # asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2 | ||
9432 | pxor %xmm9,%xmm2 | ||
9433 | |||
9434 | # qhasm: xmm1 ^= xmm13 | ||
9435 | # asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2 | ||
9436 | # asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1 | ||
9437 | pxor %xmm13,%xmm1 | ||
9438 | |||
9439 | # qhasm: xmm3 ^= xmm12 | ||
9440 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
9441 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
9442 | pxor %xmm12,%xmm3 | ||
9443 | |||
9444 | # qhasm: xmm4 ^= xmm14 | ||
9445 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5 | ||
9446 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4 | ||
9447 | pxor %xmm14,%xmm4 | ||
9448 | |||
9449 | # qhasm: xmm5 ^= xmm11 | ||
9450 | # asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6 | ||
9451 | # asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5 | ||
9452 | pxor %xmm11,%xmm5 | ||
9453 | |||
9454 | # qhasm: xmm3 ^= xmm13 | ||
9455 | # asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4 | ||
9456 | # asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3 | ||
9457 | pxor %xmm13,%xmm3 | ||
9458 | |||
9459 | # qhasm: xmm6 ^= xmm15 | ||
9460 | # asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7 | ||
9461 | # asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6 | ||
9462 | pxor %xmm15,%xmm6 | ||
9463 | |||
9464 | # qhasm: xmm7 ^= xmm10 | ||
9465 | # asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8 | ||
9466 | # asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7 | ||
9467 | pxor %xmm10,%xmm7 | ||
9468 | |||
9469 | # qhasm: xmm4 ^= xmm13 | ||
9470 | # asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5 | ||
9471 | # asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4 | ||
9472 | pxor %xmm13,%xmm4 | ||
9473 | |||
9474 | # qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E | ||
9475 | # asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9 | ||
9476 | # asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8 | ||
9477 | pshufd $0x4E,%xmm8,%xmm8 | ||
9478 | |||
9479 | # qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E | ||
9480 | # asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10 | ||
9481 | # asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9 | ||
9482 | pshufd $0x4E,%xmm9,%xmm9 | ||
9483 | |||
9484 | # qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E | ||
9485 | # asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13 | ||
9486 | # asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12 | ||
9487 | pshufd $0x4E,%xmm12,%xmm12 | ||
9488 | |||
9489 | # qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E | ||
9490 | # asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15 | ||
9491 | # asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14 | ||
9492 | pshufd $0x4E,%xmm14,%xmm14 | ||
9493 | |||
9494 | # qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E | ||
9495 | # asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12 | ||
9496 | # asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11 | ||
9497 | pshufd $0x4E,%xmm11,%xmm11 | ||
9498 | |||
9499 | # qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E | ||
9500 | # asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16 | ||
9501 | # asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15 | ||
9502 | pshufd $0x4E,%xmm15,%xmm15 | ||
9503 | |||
9504 | # qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E | ||
9505 | # asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11 | ||
9506 | # asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10 | ||
9507 | pshufd $0x4E,%xmm10,%xmm10 | ||
9508 | |||
9509 | # qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E | ||
9510 | # asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14 | ||
9511 | # asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13 | ||
9512 | pshufd $0x4E,%xmm13,%xmm13 | ||
9513 | |||
9514 | # qhasm: xmm0 ^= xmm8 | ||
9515 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
9516 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
9517 | pxor %xmm8,%xmm0 | ||
9518 | |||
9519 | # qhasm: xmm1 ^= xmm9 | ||
9520 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
9521 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
9522 | pxor %xmm9,%xmm1 | ||
9523 | |||
9524 | # qhasm: xmm2 ^= xmm12 | ||
9525 | # asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3 | ||
9526 | # asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2 | ||
9527 | pxor %xmm12,%xmm2 | ||
9528 | |||
9529 | # qhasm: xmm3 ^= xmm14 | ||
9530 | # asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4 | ||
9531 | # asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3 | ||
9532 | pxor %xmm14,%xmm3 | ||
9533 | |||
9534 | # qhasm: xmm4 ^= xmm11 | ||
9535 | # asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5 | ||
9536 | # asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4 | ||
9537 | pxor %xmm11,%xmm4 | ||
9538 | |||
9539 | # qhasm: xmm5 ^= xmm15 | ||
9540 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
9541 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
9542 | pxor %xmm15,%xmm5 | ||
9543 | |||
9544 | # qhasm: xmm6 ^= xmm10 | ||
9545 | # asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7 | ||
9546 | # asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6 | ||
9547 | pxor %xmm10,%xmm6 | ||
9548 | |||
9549 | # qhasm: xmm7 ^= xmm13 | ||
9550 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
9551 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
9552 | pxor %xmm13,%xmm7 | ||
9553 | |||
9554 | # qhasm: xmm0 ^= *(int128 *)(c + 1024) | ||
9555 | # asm 1: pxor 1024(<c=int64#5),<xmm0=int6464#1 | ||
9556 | # asm 2: pxor 1024(<c=%r8),<xmm0=%xmm0 | ||
9557 | pxor 1024(%r8),%xmm0 | ||
9558 | |||
9559 | # qhasm: shuffle bytes of xmm0 by SR | ||
9560 | # asm 1: pshufb SR,<xmm0=int6464#1 | ||
9561 | # asm 2: pshufb SR,<xmm0=%xmm0 | ||
9562 | pshufb SR,%xmm0 | ||
9563 | |||
9564 | # qhasm: xmm1 ^= *(int128 *)(c + 1040) | ||
9565 | # asm 1: pxor 1040(<c=int64#5),<xmm1=int6464#2 | ||
9566 | # asm 2: pxor 1040(<c=%r8),<xmm1=%xmm1 | ||
9567 | pxor 1040(%r8),%xmm1 | ||
9568 | |||
9569 | # qhasm: shuffle bytes of xmm1 by SR | ||
9570 | # asm 1: pshufb SR,<xmm1=int6464#2 | ||
9571 | # asm 2: pshufb SR,<xmm1=%xmm1 | ||
9572 | pshufb SR,%xmm1 | ||
9573 | |||
9574 | # qhasm: xmm2 ^= *(int128 *)(c + 1056) | ||
9575 | # asm 1: pxor 1056(<c=int64#5),<xmm2=int6464#3 | ||
9576 | # asm 2: pxor 1056(<c=%r8),<xmm2=%xmm2 | ||
9577 | pxor 1056(%r8),%xmm2 | ||
9578 | |||
9579 | # qhasm: shuffle bytes of xmm2 by SR | ||
9580 | # asm 1: pshufb SR,<xmm2=int6464#3 | ||
9581 | # asm 2: pshufb SR,<xmm2=%xmm2 | ||
9582 | pshufb SR,%xmm2 | ||
9583 | |||
9584 | # qhasm: xmm3 ^= *(int128 *)(c + 1072) | ||
9585 | # asm 1: pxor 1072(<c=int64#5),<xmm3=int6464#4 | ||
9586 | # asm 2: pxor 1072(<c=%r8),<xmm3=%xmm3 | ||
9587 | pxor 1072(%r8),%xmm3 | ||
9588 | |||
9589 | # qhasm: shuffle bytes of xmm3 by SR | ||
9590 | # asm 1: pshufb SR,<xmm3=int6464#4 | ||
9591 | # asm 2: pshufb SR,<xmm3=%xmm3 | ||
9592 | pshufb SR,%xmm3 | ||
9593 | |||
9594 | # qhasm: xmm4 ^= *(int128 *)(c + 1088) | ||
9595 | # asm 1: pxor 1088(<c=int64#5),<xmm4=int6464#5 | ||
9596 | # asm 2: pxor 1088(<c=%r8),<xmm4=%xmm4 | ||
9597 | pxor 1088(%r8),%xmm4 | ||
9598 | |||
9599 | # qhasm: shuffle bytes of xmm4 by SR | ||
9600 | # asm 1: pshufb SR,<xmm4=int6464#5 | ||
9601 | # asm 2: pshufb SR,<xmm4=%xmm4 | ||
9602 | pshufb SR,%xmm4 | ||
9603 | |||
9604 | # qhasm: xmm5 ^= *(int128 *)(c + 1104) | ||
9605 | # asm 1: pxor 1104(<c=int64#5),<xmm5=int6464#6 | ||
9606 | # asm 2: pxor 1104(<c=%r8),<xmm5=%xmm5 | ||
9607 | pxor 1104(%r8),%xmm5 | ||
9608 | |||
9609 | # qhasm: shuffle bytes of xmm5 by SR | ||
9610 | # asm 1: pshufb SR,<xmm5=int6464#6 | ||
9611 | # asm 2: pshufb SR,<xmm5=%xmm5 | ||
9612 | pshufb SR,%xmm5 | ||
9613 | |||
9614 | # qhasm: xmm6 ^= *(int128 *)(c + 1120) | ||
9615 | # asm 1: pxor 1120(<c=int64#5),<xmm6=int6464#7 | ||
9616 | # asm 2: pxor 1120(<c=%r8),<xmm6=%xmm6 | ||
9617 | pxor 1120(%r8),%xmm6 | ||
9618 | |||
9619 | # qhasm: shuffle bytes of xmm6 by SR | ||
9620 | # asm 1: pshufb SR,<xmm6=int6464#7 | ||
9621 | # asm 2: pshufb SR,<xmm6=%xmm6 | ||
9622 | pshufb SR,%xmm6 | ||
9623 | |||
9624 | # qhasm: xmm7 ^= *(int128 *)(c + 1136) | ||
9625 | # asm 1: pxor 1136(<c=int64#5),<xmm7=int6464#8 | ||
9626 | # asm 2: pxor 1136(<c=%r8),<xmm7=%xmm7 | ||
9627 | pxor 1136(%r8),%xmm7 | ||
9628 | |||
9629 | # qhasm: shuffle bytes of xmm7 by SR | ||
9630 | # asm 1: pshufb SR,<xmm7=int6464#8 | ||
9631 | # asm 2: pshufb SR,<xmm7=%xmm7 | ||
9632 | pshufb SR,%xmm7 | ||
9633 | |||
9634 | # qhasm: xmm5 ^= xmm6 | ||
9635 | # asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6 | ||
9636 | # asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5 | ||
9637 | pxor %xmm6,%xmm5 | ||
9638 | |||
9639 | # qhasm: xmm2 ^= xmm1 | ||
9640 | # asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3 | ||
9641 | # asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2 | ||
9642 | pxor %xmm1,%xmm2 | ||
9643 | |||
9644 | # qhasm: xmm5 ^= xmm0 | ||
9645 | # asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6 | ||
9646 | # asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5 | ||
9647 | pxor %xmm0,%xmm5 | ||
9648 | |||
9649 | # qhasm: xmm6 ^= xmm2 | ||
9650 | # asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7 | ||
9651 | # asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6 | ||
9652 | pxor %xmm2,%xmm6 | ||
9653 | |||
9654 | # qhasm: xmm3 ^= xmm0 | ||
9655 | # asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4 | ||
9656 | # asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3 | ||
9657 | pxor %xmm0,%xmm3 | ||
9658 | |||
9659 | # qhasm: xmm6 ^= xmm3 | ||
9660 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
9661 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
9662 | pxor %xmm3,%xmm6 | ||
9663 | |||
9664 | # qhasm: xmm3 ^= xmm7 | ||
9665 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4 | ||
9666 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3 | ||
9667 | pxor %xmm7,%xmm3 | ||
9668 | |||
9669 | # qhasm: xmm3 ^= xmm4 | ||
9670 | # asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4 | ||
9671 | # asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3 | ||
9672 | pxor %xmm4,%xmm3 | ||
9673 | |||
9674 | # qhasm: xmm7 ^= xmm5 | ||
9675 | # asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8 | ||
9676 | # asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7 | ||
9677 | pxor %xmm5,%xmm7 | ||
9678 | |||
9679 | # qhasm: xmm3 ^= xmm1 | ||
9680 | # asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4 | ||
9681 | # asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3 | ||
9682 | pxor %xmm1,%xmm3 | ||
9683 | |||
9684 | # qhasm: xmm4 ^= xmm5 | ||
9685 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
9686 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
9687 | pxor %xmm5,%xmm4 | ||
9688 | |||
9689 | # qhasm: xmm2 ^= xmm7 | ||
9690 | # asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3 | ||
9691 | # asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2 | ||
9692 | pxor %xmm7,%xmm2 | ||
9693 | |||
9694 | # qhasm: xmm1 ^= xmm5 | ||
9695 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
9696 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
9697 | pxor %xmm5,%xmm1 | ||
9698 | |||
9699 | # qhasm: xmm11 = xmm7 | ||
9700 | # asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9 | ||
9701 | # asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8 | ||
9702 | movdqa %xmm7,%xmm8 | ||
9703 | |||
9704 | # qhasm: xmm10 = xmm1 | ||
9705 | # asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10 | ||
9706 | # asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9 | ||
9707 | movdqa %xmm1,%xmm9 | ||
9708 | |||
9709 | # qhasm: xmm9 = xmm5 | ||
9710 | # asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11 | ||
9711 | # asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10 | ||
9712 | movdqa %xmm5,%xmm10 | ||
9713 | |||
9714 | # qhasm: xmm13 = xmm2 | ||
9715 | # asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12 | ||
9716 | # asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11 | ||
9717 | movdqa %xmm2,%xmm11 | ||
9718 | |||
9719 | # qhasm: xmm12 = xmm6 | ||
9720 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13 | ||
9721 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12 | ||
9722 | movdqa %xmm6,%xmm12 | ||
9723 | |||
9724 | # qhasm: xmm11 ^= xmm4 | ||
9725 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9 | ||
9726 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8 | ||
9727 | pxor %xmm4,%xmm8 | ||
9728 | |||
9729 | # qhasm: xmm10 ^= xmm2 | ||
9730 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10 | ||
9731 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9 | ||
9732 | pxor %xmm2,%xmm9 | ||
9733 | |||
9734 | # qhasm: xmm9 ^= xmm3 | ||
9735 | # asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11 | ||
9736 | # asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10 | ||
9737 | pxor %xmm3,%xmm10 | ||
9738 | |||
9739 | # qhasm: xmm13 ^= xmm4 | ||
9740 | # asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12 | ||
9741 | # asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11 | ||
9742 | pxor %xmm4,%xmm11 | ||
9743 | |||
9744 | # qhasm: xmm12 ^= xmm0 | ||
9745 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
9746 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
9747 | pxor %xmm0,%xmm12 | ||
9748 | |||
9749 | # qhasm: xmm14 = xmm11 | ||
9750 | # asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14 | ||
9751 | # asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13 | ||
9752 | movdqa %xmm8,%xmm13 | ||
9753 | |||
9754 | # qhasm: xmm8 = xmm10 | ||
9755 | # asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15 | ||
9756 | # asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14 | ||
9757 | movdqa %xmm9,%xmm14 | ||
9758 | |||
9759 | # qhasm: xmm15 = xmm11 | ||
9760 | # asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16 | ||
9761 | # asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15 | ||
9762 | movdqa %xmm8,%xmm15 | ||
9763 | |||
9764 | # qhasm: xmm10 |= xmm9 | ||
9765 | # asm 1: por <xmm9=int6464#11,<xmm10=int6464#10 | ||
9766 | # asm 2: por <xmm9=%xmm10,<xmm10=%xmm9 | ||
9767 | por %xmm10,%xmm9 | ||
9768 | |||
9769 | # qhasm: xmm11 |= xmm12 | ||
9770 | # asm 1: por <xmm12=int6464#13,<xmm11=int6464#9 | ||
9771 | # asm 2: por <xmm12=%xmm12,<xmm11=%xmm8 | ||
9772 | por %xmm12,%xmm8 | ||
9773 | |||
9774 | # qhasm: xmm15 ^= xmm8 | ||
9775 | # asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16 | ||
9776 | # asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15 | ||
9777 | pxor %xmm14,%xmm15 | ||
9778 | |||
9779 | # qhasm: xmm14 &= xmm12 | ||
9780 | # asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14 | ||
9781 | # asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13 | ||
9782 | pand %xmm12,%xmm13 | ||
9783 | |||
9784 | # qhasm: xmm8 &= xmm9 | ||
9785 | # asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15 | ||
9786 | # asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14 | ||
9787 | pand %xmm10,%xmm14 | ||
9788 | |||
9789 | # qhasm: xmm12 ^= xmm9 | ||
9790 | # asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13 | ||
9791 | # asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12 | ||
9792 | pxor %xmm10,%xmm12 | ||
9793 | |||
9794 | # qhasm: xmm15 &= xmm12 | ||
9795 | # asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16 | ||
9796 | # asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15 | ||
9797 | pand %xmm12,%xmm15 | ||
9798 | |||
9799 | # qhasm: xmm12 = xmm3 | ||
9800 | # asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11 | ||
9801 | # asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10 | ||
9802 | movdqa %xmm3,%xmm10 | ||
9803 | |||
9804 | # qhasm: xmm12 ^= xmm0 | ||
9805 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11 | ||
9806 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10 | ||
9807 | pxor %xmm0,%xmm10 | ||
9808 | |||
9809 | # qhasm: xmm13 &= xmm12 | ||
9810 | # asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12 | ||
9811 | # asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11 | ||
9812 | pand %xmm10,%xmm11 | ||
9813 | |||
9814 | # qhasm: xmm11 ^= xmm13 | ||
9815 | # asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9 | ||
9816 | # asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8 | ||
9817 | pxor %xmm11,%xmm8 | ||
9818 | |||
9819 | # qhasm: xmm10 ^= xmm13 | ||
9820 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
9821 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
9822 | pxor %xmm11,%xmm9 | ||
9823 | |||
9824 | # qhasm: xmm13 = xmm7 | ||
9825 | # asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11 | ||
9826 | # asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10 | ||
9827 | movdqa %xmm7,%xmm10 | ||
9828 | |||
9829 | # qhasm: xmm13 ^= xmm1 | ||
9830 | # asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11 | ||
9831 | # asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10 | ||
9832 | pxor %xmm1,%xmm10 | ||
9833 | |||
9834 | # qhasm: xmm12 = xmm5 | ||
9835 | # asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12 | ||
9836 | # asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11 | ||
9837 | movdqa %xmm5,%xmm11 | ||
9838 | |||
9839 | # qhasm: xmm9 = xmm13 | ||
9840 | # asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13 | ||
9841 | # asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12 | ||
9842 | movdqa %xmm10,%xmm12 | ||
9843 | |||
9844 | # qhasm: xmm12 ^= xmm6 | ||
9845 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12 | ||
9846 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11 | ||
9847 | pxor %xmm6,%xmm11 | ||
9848 | |||
9849 | # qhasm: xmm9 |= xmm12 | ||
9850 | # asm 1: por <xmm12=int6464#12,<xmm9=int6464#13 | ||
9851 | # asm 2: por <xmm12=%xmm11,<xmm9=%xmm12 | ||
9852 | por %xmm11,%xmm12 | ||
9853 | |||
9854 | # qhasm: xmm13 &= xmm12 | ||
9855 | # asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11 | ||
9856 | # asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10 | ||
9857 | pand %xmm11,%xmm10 | ||
9858 | |||
9859 | # qhasm: xmm8 ^= xmm13 | ||
9860 | # asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15 | ||
9861 | # asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14 | ||
9862 | pxor %xmm10,%xmm14 | ||
9863 | |||
9864 | # qhasm: xmm11 ^= xmm15 | ||
9865 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9 | ||
9866 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8 | ||
9867 | pxor %xmm15,%xmm8 | ||
9868 | |||
9869 | # qhasm: xmm10 ^= xmm14 | ||
9870 | # asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10 | ||
9871 | # asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9 | ||
9872 | pxor %xmm13,%xmm9 | ||
9873 | |||
9874 | # qhasm: xmm9 ^= xmm15 | ||
9875 | # asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13 | ||
9876 | # asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12 | ||
9877 | pxor %xmm15,%xmm12 | ||
9878 | |||
9879 | # qhasm: xmm8 ^= xmm14 | ||
9880 | # asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15 | ||
9881 | # asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14 | ||
9882 | pxor %xmm13,%xmm14 | ||
9883 | |||
9884 | # qhasm: xmm9 ^= xmm14 | ||
9885 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
9886 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
9887 | pxor %xmm13,%xmm12 | ||
9888 | |||
9889 | # qhasm: xmm12 = xmm2 | ||
9890 | # asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11 | ||
9891 | # asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10 | ||
9892 | movdqa %xmm2,%xmm10 | ||
9893 | |||
9894 | # qhasm: xmm13 = xmm4 | ||
9895 | # asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12 | ||
9896 | # asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11 | ||
9897 | movdqa %xmm4,%xmm11 | ||
9898 | |||
9899 | # qhasm: xmm14 = xmm1 | ||
9900 | # asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14 | ||
9901 | # asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13 | ||
9902 | movdqa %xmm1,%xmm13 | ||
9903 | |||
9904 | # qhasm: xmm15 = xmm7 | ||
9905 | # asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16 | ||
9906 | # asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15 | ||
9907 | movdqa %xmm7,%xmm15 | ||
9908 | |||
9909 | # qhasm: xmm12 &= xmm3 | ||
9910 | # asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11 | ||
9911 | # asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10 | ||
9912 | pand %xmm3,%xmm10 | ||
9913 | |||
9914 | # qhasm: xmm13 &= xmm0 | ||
9915 | # asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12 | ||
9916 | # asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11 | ||
9917 | pand %xmm0,%xmm11 | ||
9918 | |||
9919 | # qhasm: xmm14 &= xmm5 | ||
9920 | # asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14 | ||
9921 | # asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13 | ||
9922 | pand %xmm5,%xmm13 | ||
9923 | |||
9924 | # qhasm: xmm15 |= xmm6 | ||
9925 | # asm 1: por <xmm6=int6464#7,<xmm15=int6464#16 | ||
9926 | # asm 2: por <xmm6=%xmm6,<xmm15=%xmm15 | ||
9927 | por %xmm6,%xmm15 | ||
9928 | |||
9929 | # qhasm: xmm11 ^= xmm12 | ||
9930 | # asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9 | ||
9931 | # asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8 | ||
9932 | pxor %xmm10,%xmm8 | ||
9933 | |||
9934 | # qhasm: xmm10 ^= xmm13 | ||
9935 | # asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10 | ||
9936 | # asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9 | ||
9937 | pxor %xmm11,%xmm9 | ||
9938 | |||
9939 | # qhasm: xmm9 ^= xmm14 | ||
9940 | # asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13 | ||
9941 | # asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12 | ||
9942 | pxor %xmm13,%xmm12 | ||
9943 | |||
9944 | # qhasm: xmm8 ^= xmm15 | ||
9945 | # asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15 | ||
9946 | # asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14 | ||
9947 | pxor %xmm15,%xmm14 | ||
9948 | |||
9949 | # qhasm: xmm12 = xmm11 | ||
9950 | # asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11 | ||
9951 | # asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10 | ||
9952 | movdqa %xmm8,%xmm10 | ||
9953 | |||
9954 | # qhasm: xmm12 ^= xmm10 | ||
9955 | # asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11 | ||
9956 | # asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10 | ||
9957 | pxor %xmm9,%xmm10 | ||
9958 | |||
9959 | # qhasm: xmm11 &= xmm9 | ||
9960 | # asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9 | ||
9961 | # asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8 | ||
9962 | pand %xmm12,%xmm8 | ||
9963 | |||
9964 | # qhasm: xmm14 = xmm8 | ||
9965 | # asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12 | ||
9966 | # asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11 | ||
9967 | movdqa %xmm14,%xmm11 | ||
9968 | |||
9969 | # qhasm: xmm14 ^= xmm11 | ||
9970 | # asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12 | ||
9971 | # asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11 | ||
9972 | pxor %xmm8,%xmm11 | ||
9973 | |||
9974 | # qhasm: xmm15 = xmm12 | ||
9975 | # asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14 | ||
9976 | # asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13 | ||
9977 | movdqa %xmm10,%xmm13 | ||
9978 | |||
9979 | # qhasm: xmm15 &= xmm14 | ||
9980 | # asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14 | ||
9981 | # asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13 | ||
9982 | pand %xmm11,%xmm13 | ||
9983 | |||
9984 | # qhasm: xmm15 ^= xmm10 | ||
9985 | # asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14 | ||
9986 | # asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13 | ||
9987 | pxor %xmm9,%xmm13 | ||
9988 | |||
9989 | # qhasm: xmm13 = xmm9 | ||
9990 | # asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16 | ||
9991 | # asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15 | ||
9992 | movdqa %xmm12,%xmm15 | ||
9993 | |||
9994 | # qhasm: xmm13 ^= xmm8 | ||
9995 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
9996 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
9997 | pxor %xmm14,%xmm15 | ||
9998 | |||
9999 | # qhasm: xmm11 ^= xmm10 | ||
10000 | # asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9 | ||
10001 | # asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8 | ||
10002 | pxor %xmm9,%xmm8 | ||
10003 | |||
10004 | # qhasm: xmm13 &= xmm11 | ||
10005 | # asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16 | ||
10006 | # asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15 | ||
10007 | pand %xmm8,%xmm15 | ||
10008 | |||
10009 | # qhasm: xmm13 ^= xmm8 | ||
10010 | # asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16 | ||
10011 | # asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15 | ||
10012 | pxor %xmm14,%xmm15 | ||
10013 | |||
10014 | # qhasm: xmm9 ^= xmm13 | ||
10015 | # asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13 | ||
10016 | # asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12 | ||
10017 | pxor %xmm15,%xmm12 | ||
10018 | |||
10019 | # qhasm: xmm10 = xmm14 | ||
10020 | # asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9 | ||
10021 | # asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8 | ||
10022 | movdqa %xmm11,%xmm8 | ||
10023 | |||
10024 | # qhasm: xmm10 ^= xmm13 | ||
10025 | # asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9 | ||
10026 | # asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8 | ||
10027 | pxor %xmm15,%xmm8 | ||
10028 | |||
10029 | # qhasm: xmm10 &= xmm8 | ||
10030 | # asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9 | ||
10031 | # asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8 | ||
10032 | pand %xmm14,%xmm8 | ||
10033 | |||
10034 | # qhasm: xmm9 ^= xmm10 | ||
10035 | # asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13 | ||
10036 | # asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12 | ||
10037 | pxor %xmm8,%xmm12 | ||
10038 | |||
10039 | # qhasm: xmm14 ^= xmm10 | ||
10040 | # asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12 | ||
10041 | # asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11 | ||
10042 | pxor %xmm8,%xmm11 | ||
10043 | |||
10044 | # qhasm: xmm14 &= xmm15 | ||
10045 | # asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12 | ||
10046 | # asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11 | ||
10047 | pand %xmm13,%xmm11 | ||
10048 | |||
10049 | # qhasm: xmm14 ^= xmm12 | ||
10050 | # asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12 | ||
10051 | # asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11 | ||
10052 | pxor %xmm10,%xmm11 | ||
10053 | |||
10054 | # qhasm: xmm12 = xmm6 | ||
10055 | # asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9 | ||
10056 | # asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8 | ||
10057 | movdqa %xmm6,%xmm8 | ||
10058 | |||
10059 | # qhasm: xmm8 = xmm5 | ||
10060 | # asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10 | ||
10061 | # asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9 | ||
10062 | movdqa %xmm5,%xmm9 | ||
10063 | |||
10064 | # qhasm: xmm10 = xmm15 | ||
10065 | # asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11 | ||
10066 | # asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10 | ||
10067 | movdqa %xmm13,%xmm10 | ||
10068 | |||
10069 | # qhasm: xmm10 ^= xmm14 | ||
10070 | # asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11 | ||
10071 | # asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10 | ||
10072 | pxor %xmm11,%xmm10 | ||
10073 | |||
10074 | # qhasm: xmm10 &= xmm6 | ||
10075 | # asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11 | ||
10076 | # asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10 | ||
10077 | pand %xmm6,%xmm10 | ||
10078 | |||
10079 | # qhasm: xmm6 ^= xmm5 | ||
10080 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
10081 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
10082 | pxor %xmm5,%xmm6 | ||
10083 | |||
10084 | # qhasm: xmm6 &= xmm14 | ||
10085 | # asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7 | ||
10086 | # asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6 | ||
10087 | pand %xmm11,%xmm6 | ||
10088 | |||
10089 | # qhasm: xmm5 &= xmm15 | ||
10090 | # asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6 | ||
10091 | # asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5 | ||
10092 | pand %xmm13,%xmm5 | ||
10093 | |||
10094 | # qhasm: xmm6 ^= xmm5 | ||
10095 | # asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7 | ||
10096 | # asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6 | ||
10097 | pxor %xmm5,%xmm6 | ||
10098 | |||
10099 | # qhasm: xmm5 ^= xmm10 | ||
10100 | # asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6 | ||
10101 | # asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5 | ||
10102 | pxor %xmm10,%xmm5 | ||
10103 | |||
10104 | # qhasm: xmm12 ^= xmm0 | ||
10105 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9 | ||
10106 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8 | ||
10107 | pxor %xmm0,%xmm8 | ||
10108 | |||
10109 | # qhasm: xmm8 ^= xmm3 | ||
10110 | # asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10 | ||
10111 | # asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9 | ||
10112 | pxor %xmm3,%xmm9 | ||
10113 | |||
10114 | # qhasm: xmm15 ^= xmm13 | ||
10115 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
10116 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
10117 | pxor %xmm15,%xmm13 | ||
10118 | |||
10119 | # qhasm: xmm14 ^= xmm9 | ||
10120 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
10121 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
10122 | pxor %xmm12,%xmm11 | ||
10123 | |||
10124 | # qhasm: xmm11 = xmm15 | ||
10125 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
10126 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
10127 | movdqa %xmm13,%xmm10 | ||
10128 | |||
10129 | # qhasm: xmm11 ^= xmm14 | ||
10130 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
10131 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
10132 | pxor %xmm11,%xmm10 | ||
10133 | |||
10134 | # qhasm: xmm11 &= xmm12 | ||
10135 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
10136 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
10137 | pand %xmm8,%xmm10 | ||
10138 | |||
10139 | # qhasm: xmm12 ^= xmm8 | ||
10140 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
10141 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
10142 | pxor %xmm9,%xmm8 | ||
10143 | |||
10144 | # qhasm: xmm12 &= xmm14 | ||
10145 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
10146 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
10147 | pand %xmm11,%xmm8 | ||
10148 | |||
10149 | # qhasm: xmm8 &= xmm15 | ||
10150 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
10151 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
10152 | pand %xmm13,%xmm9 | ||
10153 | |||
10154 | # qhasm: xmm8 ^= xmm12 | ||
10155 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
10156 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
10157 | pxor %xmm8,%xmm9 | ||
10158 | |||
10159 | # qhasm: xmm12 ^= xmm11 | ||
10160 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
10161 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
10162 | pxor %xmm10,%xmm8 | ||
10163 | |||
10164 | # qhasm: xmm10 = xmm13 | ||
10165 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
10166 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
10167 | movdqa %xmm15,%xmm10 | ||
10168 | |||
10169 | # qhasm: xmm10 ^= xmm9 | ||
10170 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
10171 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
10172 | pxor %xmm12,%xmm10 | ||
10173 | |||
10174 | # qhasm: xmm10 &= xmm0 | ||
10175 | # asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11 | ||
10176 | # asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10 | ||
10177 | pand %xmm0,%xmm10 | ||
10178 | |||
10179 | # qhasm: xmm0 ^= xmm3 | ||
10180 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
10181 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
10182 | pxor %xmm3,%xmm0 | ||
10183 | |||
10184 | # qhasm: xmm0 &= xmm9 | ||
10185 | # asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1 | ||
10186 | # asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0 | ||
10187 | pand %xmm12,%xmm0 | ||
10188 | |||
10189 | # qhasm: xmm3 &= xmm13 | ||
10190 | # asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4 | ||
10191 | # asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3 | ||
10192 | pand %xmm15,%xmm3 | ||
10193 | |||
10194 | # qhasm: xmm0 ^= xmm3 | ||
10195 | # asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1 | ||
10196 | # asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0 | ||
10197 | pxor %xmm3,%xmm0 | ||
10198 | |||
10199 | # qhasm: xmm3 ^= xmm10 | ||
10200 | # asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4 | ||
10201 | # asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3 | ||
10202 | pxor %xmm10,%xmm3 | ||
10203 | |||
10204 | # qhasm: xmm6 ^= xmm12 | ||
10205 | # asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7 | ||
10206 | # asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6 | ||
10207 | pxor %xmm8,%xmm6 | ||
10208 | |||
10209 | # qhasm: xmm0 ^= xmm12 | ||
10210 | # asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1 | ||
10211 | # asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0 | ||
10212 | pxor %xmm8,%xmm0 | ||
10213 | |||
10214 | # qhasm: xmm5 ^= xmm8 | ||
10215 | # asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6 | ||
10216 | # asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5 | ||
10217 | pxor %xmm9,%xmm5 | ||
10218 | |||
10219 | # qhasm: xmm3 ^= xmm8 | ||
10220 | # asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4 | ||
10221 | # asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3 | ||
10222 | pxor %xmm9,%xmm3 | ||
10223 | |||
10224 | # qhasm: xmm12 = xmm7 | ||
10225 | # asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9 | ||
10226 | # asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8 | ||
10227 | movdqa %xmm7,%xmm8 | ||
10228 | |||
10229 | # qhasm: xmm8 = xmm1 | ||
10230 | # asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10 | ||
10231 | # asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9 | ||
10232 | movdqa %xmm1,%xmm9 | ||
10233 | |||
10234 | # qhasm: xmm12 ^= xmm4 | ||
10235 | # asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9 | ||
10236 | # asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8 | ||
10237 | pxor %xmm4,%xmm8 | ||
10238 | |||
10239 | # qhasm: xmm8 ^= xmm2 | ||
10240 | # asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10 | ||
10241 | # asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9 | ||
10242 | pxor %xmm2,%xmm9 | ||
10243 | |||
10244 | # qhasm: xmm11 = xmm15 | ||
10245 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
10246 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
10247 | movdqa %xmm13,%xmm10 | ||
10248 | |||
10249 | # qhasm: xmm11 ^= xmm14 | ||
10250 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
10251 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
10252 | pxor %xmm11,%xmm10 | ||
10253 | |||
10254 | # qhasm: xmm11 &= xmm12 | ||
10255 | # asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11 | ||
10256 | # asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10 | ||
10257 | pand %xmm8,%xmm10 | ||
10258 | |||
10259 | # qhasm: xmm12 ^= xmm8 | ||
10260 | # asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9 | ||
10261 | # asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8 | ||
10262 | pxor %xmm9,%xmm8 | ||
10263 | |||
10264 | # qhasm: xmm12 &= xmm14 | ||
10265 | # asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9 | ||
10266 | # asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8 | ||
10267 | pand %xmm11,%xmm8 | ||
10268 | |||
10269 | # qhasm: xmm8 &= xmm15 | ||
10270 | # asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10 | ||
10271 | # asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9 | ||
10272 | pand %xmm13,%xmm9 | ||
10273 | |||
10274 | # qhasm: xmm8 ^= xmm12 | ||
10275 | # asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10 | ||
10276 | # asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9 | ||
10277 | pxor %xmm8,%xmm9 | ||
10278 | |||
10279 | # qhasm: xmm12 ^= xmm11 | ||
10280 | # asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9 | ||
10281 | # asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8 | ||
10282 | pxor %xmm10,%xmm8 | ||
10283 | |||
10284 | # qhasm: xmm10 = xmm13 | ||
10285 | # asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11 | ||
10286 | # asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10 | ||
10287 | movdqa %xmm15,%xmm10 | ||
10288 | |||
10289 | # qhasm: xmm10 ^= xmm9 | ||
10290 | # asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11 | ||
10291 | # asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10 | ||
10292 | pxor %xmm12,%xmm10 | ||
10293 | |||
10294 | # qhasm: xmm10 &= xmm4 | ||
10295 | # asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11 | ||
10296 | # asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10 | ||
10297 | pand %xmm4,%xmm10 | ||
10298 | |||
10299 | # qhasm: xmm4 ^= xmm2 | ||
10300 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
10301 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
10302 | pxor %xmm2,%xmm4 | ||
10303 | |||
10304 | # qhasm: xmm4 &= xmm9 | ||
10305 | # asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5 | ||
10306 | # asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4 | ||
10307 | pand %xmm12,%xmm4 | ||
10308 | |||
10309 | # qhasm: xmm2 &= xmm13 | ||
10310 | # asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3 | ||
10311 | # asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2 | ||
10312 | pand %xmm15,%xmm2 | ||
10313 | |||
10314 | # qhasm: xmm4 ^= xmm2 | ||
10315 | # asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5 | ||
10316 | # asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4 | ||
10317 | pxor %xmm2,%xmm4 | ||
10318 | |||
10319 | # qhasm: xmm2 ^= xmm10 | ||
10320 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3 | ||
10321 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2 | ||
10322 | pxor %xmm10,%xmm2 | ||
10323 | |||
10324 | # qhasm: xmm15 ^= xmm13 | ||
10325 | # asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14 | ||
10326 | # asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13 | ||
10327 | pxor %xmm15,%xmm13 | ||
10328 | |||
10329 | # qhasm: xmm14 ^= xmm9 | ||
10330 | # asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12 | ||
10331 | # asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11 | ||
10332 | pxor %xmm12,%xmm11 | ||
10333 | |||
10334 | # qhasm: xmm11 = xmm15 | ||
10335 | # asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11 | ||
10336 | # asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10 | ||
10337 | movdqa %xmm13,%xmm10 | ||
10338 | |||
10339 | # qhasm: xmm11 ^= xmm14 | ||
10340 | # asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11 | ||
10341 | # asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10 | ||
10342 | pxor %xmm11,%xmm10 | ||
10343 | |||
10344 | # qhasm: xmm11 &= xmm7 | ||
10345 | # asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11 | ||
10346 | # asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10 | ||
10347 | pand %xmm7,%xmm10 | ||
10348 | |||
10349 | # qhasm: xmm7 ^= xmm1 | ||
10350 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
10351 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
10352 | pxor %xmm1,%xmm7 | ||
10353 | |||
10354 | # qhasm: xmm7 &= xmm14 | ||
10355 | # asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8 | ||
10356 | # asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7 | ||
10357 | pand %xmm11,%xmm7 | ||
10358 | |||
10359 | # qhasm: xmm1 &= xmm15 | ||
10360 | # asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2 | ||
10361 | # asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1 | ||
10362 | pand %xmm13,%xmm1 | ||
10363 | |||
10364 | # qhasm: xmm7 ^= xmm1 | ||
10365 | # asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8 | ||
10366 | # asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7 | ||
10367 | pxor %xmm1,%xmm7 | ||
10368 | |||
10369 | # qhasm: xmm1 ^= xmm11 | ||
10370 | # asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2 | ||
10371 | # asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1 | ||
10372 | pxor %xmm10,%xmm1 | ||
10373 | |||
10374 | # qhasm: xmm7 ^= xmm12 | ||
10375 | # asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8 | ||
10376 | # asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7 | ||
10377 | pxor %xmm8,%xmm7 | ||
10378 | |||
10379 | # qhasm: xmm4 ^= xmm12 | ||
10380 | # asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5 | ||
10381 | # asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4 | ||
10382 | pxor %xmm8,%xmm4 | ||
10383 | |||
10384 | # qhasm: xmm1 ^= xmm8 | ||
10385 | # asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2 | ||
10386 | # asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1 | ||
10387 | pxor %xmm9,%xmm1 | ||
10388 | |||
10389 | # qhasm: xmm2 ^= xmm8 | ||
10390 | # asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3 | ||
10391 | # asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2 | ||
10392 | pxor %xmm9,%xmm2 | ||
10393 | |||
10394 | # qhasm: xmm7 ^= xmm0 | ||
10395 | # asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8 | ||
10396 | # asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7 | ||
10397 | pxor %xmm0,%xmm7 | ||
10398 | |||
10399 | # qhasm: xmm1 ^= xmm6 | ||
10400 | # asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2 | ||
10401 | # asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1 | ||
10402 | pxor %xmm6,%xmm1 | ||
10403 | |||
10404 | # qhasm: xmm4 ^= xmm7 | ||
10405 | # asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5 | ||
10406 | # asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4 | ||
10407 | pxor %xmm7,%xmm4 | ||
10408 | |||
10409 | # qhasm: xmm6 ^= xmm0 | ||
10410 | # asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7 | ||
10411 | # asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6 | ||
10412 | pxor %xmm0,%xmm6 | ||
10413 | |||
10414 | # qhasm: xmm0 ^= xmm1 | ||
10415 | # asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1 | ||
10416 | # asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0 | ||
10417 | pxor %xmm1,%xmm0 | ||
10418 | |||
10419 | # qhasm: xmm1 ^= xmm5 | ||
10420 | # asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2 | ||
10421 | # asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1 | ||
10422 | pxor %xmm5,%xmm1 | ||
10423 | |||
10424 | # qhasm: xmm5 ^= xmm2 | ||
10425 | # asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6 | ||
10426 | # asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5 | ||
10427 | pxor %xmm2,%xmm5 | ||
10428 | |||
10429 | # qhasm: xmm4 ^= xmm5 | ||
10430 | # asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5 | ||
10431 | # asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4 | ||
10432 | pxor %xmm5,%xmm4 | ||
10433 | |||
10434 | # qhasm: xmm2 ^= xmm3 | ||
10435 | # asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3 | ||
10436 | # asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2 | ||
10437 | pxor %xmm3,%xmm2 | ||
10438 | |||
10439 | # qhasm: xmm3 ^= xmm5 | ||
10440 | # asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4 | ||
10441 | # asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3 | ||
10442 | pxor %xmm5,%xmm3 | ||
10443 | |||
10444 | # qhasm: xmm6 ^= xmm3 | ||
10445 | # asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7 | ||
10446 | # asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6 | ||
10447 | pxor %xmm3,%xmm6 | ||
10448 | |||
10449 | # qhasm: xmm8 = shuffle dwords of xmm0 by 0x93 | ||
10450 | # asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9 | ||
10451 | # asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8 | ||
10452 | pshufd $0x93,%xmm0,%xmm8 | ||
10453 | |||
10454 | # qhasm: xmm9 = shuffle dwords of xmm1 by 0x93 | ||
10455 | # asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10 | ||
10456 | # asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9 | ||
10457 | pshufd $0x93,%xmm1,%xmm9 | ||
10458 | |||
10459 | # qhasm: xmm10 = shuffle dwords of xmm4 by 0x93 | ||
10460 | # asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11 | ||
10461 | # asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10 | ||
10462 | pshufd $0x93,%xmm4,%xmm10 | ||
10463 | |||
10464 | # qhasm: xmm11 = shuffle dwords of xmm6 by 0x93 | ||
10465 | # asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12 | ||
10466 | # asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11 | ||
10467 | pshufd $0x93,%xmm6,%xmm11 | ||
10468 | |||
10469 | # qhasm: xmm12 = shuffle dwords of xmm3 by 0x93 | ||
10470 | # asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13 | ||
10471 | # asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12 | ||
10472 | pshufd $0x93,%xmm3,%xmm12 | ||
10473 | |||
10474 | # qhasm: xmm13 = shuffle dwords of xmm7 by 0x93 | ||
10475 | # asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14 | ||
10476 | # asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13 | ||
10477 | pshufd $0x93,%xmm7,%xmm13 | ||
10478 | |||
10479 | # qhasm: xmm14 = shuffle dwords of xmm2 by 0x93 | ||
10480 | # asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15 | ||
10481 | # asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14 | ||
10482 | pshufd $0x93,%xmm2,%xmm14 | ||
10483 | |||
10484 | # qhasm: xmm15 = shuffle dwords of xmm5 by 0x93 | ||
10485 | # asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16 | ||
10486 | # asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15 | ||
10487 | pshufd $0x93,%xmm5,%xmm15 | ||
10488 | |||
10489 | # qhasm: xmm0 ^= xmm8 | ||
10490 | # asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1 | ||
10491 | # asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0 | ||
10492 | pxor %xmm8,%xmm0 | ||
10493 | |||
10494 | # qhasm: xmm1 ^= xmm9 | ||
10495 | # asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2 | ||
10496 | # asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1 | ||
10497 | pxor %xmm9,%xmm1 | ||
10498 | |||
10499 | # qhasm: xmm4 ^= xmm10 | ||
10500 | # asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5 | ||
10501 | # asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4 | ||
10502 | pxor %xmm10,%xmm4 | ||
10503 | |||
10504 | # qhasm: xmm6 ^= xmm11 | ||
10505 | # asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7 | ||
10506 | # asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6 | ||
10507 | pxor %xmm11,%xmm6 | ||
10508 | |||
10509 | # qhasm: xmm3 ^= xmm12 | ||
10510 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4 | ||
10511 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3 | ||
10512 | pxor %xmm12,%xmm3 | ||
10513 | |||
10514 | # qhasm: xmm7 ^= xmm13 | ||
10515 | # asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8 | ||
10516 | # asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7 | ||
10517 | pxor %xmm13,%xmm7 | ||
10518 | |||
10519 | # qhasm: xmm2 ^= xmm14 | ||
10520 | # asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3 | ||
10521 | # asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2 | ||
10522 | pxor %xmm14,%xmm2 | ||
10523 | |||
10524 | # qhasm: xmm5 ^= xmm15 | ||
10525 | # asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6 | ||
10526 | # asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5 | ||
10527 | pxor %xmm15,%xmm5 | ||
10528 | |||
10529 | # qhasm: xmm8 ^= xmm5 | ||
10530 | # asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9 | ||
10531 | # asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8 | ||
10532 | pxor %xmm5,%xmm8 | ||
10533 | |||
10534 | # qhasm: xmm9 ^= xmm0 | ||
10535 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
10536 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
10537 | pxor %xmm0,%xmm9 | ||
10538 | |||
10539 | # qhasm: xmm10 ^= xmm1 | ||
10540 | # asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11 | ||
10541 | # asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10 | ||
10542 | pxor %xmm1,%xmm10 | ||
10543 | |||
10544 | # qhasm: xmm9 ^= xmm5 | ||
10545 | # asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10 | ||
10546 | # asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9 | ||
10547 | pxor %xmm5,%xmm9 | ||
10548 | |||
10549 | # qhasm: xmm11 ^= xmm4 | ||
10550 | # asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12 | ||
10551 | # asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11 | ||
10552 | pxor %xmm4,%xmm11 | ||
10553 | |||
10554 | # qhasm: xmm12 ^= xmm6 | ||
10555 | # asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13 | ||
10556 | # asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12 | ||
10557 | pxor %xmm6,%xmm12 | ||
10558 | |||
10559 | # qhasm: xmm13 ^= xmm3 | ||
10560 | # asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14 | ||
10561 | # asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13 | ||
10562 | pxor %xmm3,%xmm13 | ||
10563 | |||
10564 | # qhasm: xmm11 ^= xmm5 | ||
10565 | # asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12 | ||
10566 | # asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11 | ||
10567 | pxor %xmm5,%xmm11 | ||
10568 | |||
10569 | # qhasm: xmm14 ^= xmm7 | ||
10570 | # asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15 | ||
10571 | # asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14 | ||
10572 | pxor %xmm7,%xmm14 | ||
10573 | |||
10574 | # qhasm: xmm15 ^= xmm2 | ||
10575 | # asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16 | ||
10576 | # asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15 | ||
10577 | pxor %xmm2,%xmm15 | ||
10578 | |||
10579 | # qhasm: xmm12 ^= xmm5 | ||
10580 | # asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13 | ||
10581 | # asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12 | ||
10582 | pxor %xmm5,%xmm12 | ||
10583 | |||
10584 | # qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E | ||
10585 | # asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1 | ||
10586 | # asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0 | ||
10587 | pshufd $0x4E,%xmm0,%xmm0 | ||
10588 | |||
10589 | # qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E | ||
10590 | # asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2 | ||
10591 | # asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1 | ||
10592 | pshufd $0x4E,%xmm1,%xmm1 | ||
10593 | |||
10594 | # qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E | ||
10595 | # asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5 | ||
10596 | # asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4 | ||
10597 | pshufd $0x4E,%xmm4,%xmm4 | ||
10598 | |||
10599 | # qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E | ||
10600 | # asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7 | ||
10601 | # asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6 | ||
10602 | pshufd $0x4E,%xmm6,%xmm6 | ||
10603 | |||
10604 | # qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E | ||
10605 | # asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4 | ||
10606 | # asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3 | ||
10607 | pshufd $0x4E,%xmm3,%xmm3 | ||
10608 | |||
10609 | # qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E | ||
10610 | # asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8 | ||
10611 | # asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7 | ||
10612 | pshufd $0x4E,%xmm7,%xmm7 | ||
10613 | |||
10614 | # qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E | ||
10615 | # asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3 | ||
10616 | # asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2 | ||
10617 | pshufd $0x4E,%xmm2,%xmm2 | ||
10618 | |||
10619 | # qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E | ||
10620 | # asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6 | ||
10621 | # asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5 | ||
10622 | pshufd $0x4E,%xmm5,%xmm5 | ||
10623 | |||
10624 | # qhasm: xmm8 ^= xmm0 | ||
10625 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
10626 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
10627 | pxor %xmm0,%xmm8 | ||
10628 | |||
10629 | # qhasm: xmm9 ^= xmm1 | ||
10630 | # asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10 | ||
10631 | # asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9 | ||
10632 | pxor %xmm1,%xmm9 | ||
10633 | |||
10634 | # qhasm: xmm10 ^= xmm4 | ||
10635 | # asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11 | ||
10636 | # asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10 | ||
10637 | pxor %xmm4,%xmm10 | ||
10638 | |||
10639 | # qhasm: xmm11 ^= xmm6 | ||
10640 | # asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12 | ||
10641 | # asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11 | ||
10642 | pxor %xmm6,%xmm11 | ||
10643 | |||
10644 | # qhasm: xmm12 ^= xmm3 | ||
10645 | # asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13 | ||
10646 | # asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12 | ||
10647 | pxor %xmm3,%xmm12 | ||
10648 | |||
10649 | # qhasm: xmm13 ^= xmm7 | ||
10650 | # asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14 | ||
10651 | # asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13 | ||
10652 | pxor %xmm7,%xmm13 | ||
10653 | |||
10654 | # qhasm: xmm14 ^= xmm2 | ||
10655 | # asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15 | ||
10656 | # asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14 | ||
10657 | pxor %xmm2,%xmm14 | ||
10658 | |||
10659 | # qhasm: xmm15 ^= xmm5 | ||
10660 | # asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16 | ||
10661 | # asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15 | ||
10662 | pxor %xmm5,%xmm15 | ||
10663 | |||
10664 | # qhasm: xmm8 ^= *(int128 *)(c + 1152) | ||
10665 | # asm 1: pxor 1152(<c=int64#5),<xmm8=int6464#9 | ||
10666 | # asm 2: pxor 1152(<c=%r8),<xmm8=%xmm8 | ||
10667 | pxor 1152(%r8),%xmm8 | ||
10668 | |||
10669 | # qhasm: shuffle bytes of xmm8 by SRM0 | ||
10670 | # asm 1: pshufb SRM0,<xmm8=int6464#9 | ||
10671 | # asm 2: pshufb SRM0,<xmm8=%xmm8 | ||
10672 | pshufb SRM0,%xmm8 | ||
10673 | |||
10674 | # qhasm: xmm9 ^= *(int128 *)(c + 1168) | ||
10675 | # asm 1: pxor 1168(<c=int64#5),<xmm9=int6464#10 | ||
10676 | # asm 2: pxor 1168(<c=%r8),<xmm9=%xmm9 | ||
10677 | pxor 1168(%r8),%xmm9 | ||
10678 | |||
10679 | # qhasm: shuffle bytes of xmm9 by SRM0 | ||
10680 | # asm 1: pshufb SRM0,<xmm9=int6464#10 | ||
10681 | # asm 2: pshufb SRM0,<xmm9=%xmm9 | ||
10682 | pshufb SRM0,%xmm9 | ||
10683 | |||
10684 | # qhasm: xmm10 ^= *(int128 *)(c + 1184) | ||
10685 | # asm 1: pxor 1184(<c=int64#5),<xmm10=int6464#11 | ||
10686 | # asm 2: pxor 1184(<c=%r8),<xmm10=%xmm10 | ||
10687 | pxor 1184(%r8),%xmm10 | ||
10688 | |||
10689 | # qhasm: shuffle bytes of xmm10 by SRM0 | ||
10690 | # asm 1: pshufb SRM0,<xmm10=int6464#11 | ||
10691 | # asm 2: pshufb SRM0,<xmm10=%xmm10 | ||
10692 | pshufb SRM0,%xmm10 | ||
10693 | |||
10694 | # qhasm: xmm11 ^= *(int128 *)(c + 1200) | ||
10695 | # asm 1: pxor 1200(<c=int64#5),<xmm11=int6464#12 | ||
10696 | # asm 2: pxor 1200(<c=%r8),<xmm11=%xmm11 | ||
10697 | pxor 1200(%r8),%xmm11 | ||
10698 | |||
10699 | # qhasm: shuffle bytes of xmm11 by SRM0 | ||
10700 | # asm 1: pshufb SRM0,<xmm11=int6464#12 | ||
10701 | # asm 2: pshufb SRM0,<xmm11=%xmm11 | ||
10702 | pshufb SRM0,%xmm11 | ||
10703 | |||
10704 | # qhasm: xmm12 ^= *(int128 *)(c + 1216) | ||
10705 | # asm 1: pxor 1216(<c=int64#5),<xmm12=int6464#13 | ||
10706 | # asm 2: pxor 1216(<c=%r8),<xmm12=%xmm12 | ||
10707 | pxor 1216(%r8),%xmm12 | ||
10708 | |||
10709 | # qhasm: shuffle bytes of xmm12 by SRM0 | ||
10710 | # asm 1: pshufb SRM0,<xmm12=int6464#13 | ||
10711 | # asm 2: pshufb SRM0,<xmm12=%xmm12 | ||
10712 | pshufb SRM0,%xmm12 | ||
10713 | |||
10714 | # qhasm: xmm13 ^= *(int128 *)(c + 1232) | ||
10715 | # asm 1: pxor 1232(<c=int64#5),<xmm13=int6464#14 | ||
10716 | # asm 2: pxor 1232(<c=%r8),<xmm13=%xmm13 | ||
10717 | pxor 1232(%r8),%xmm13 | ||
10718 | |||
10719 | # qhasm: shuffle bytes of xmm13 by SRM0 | ||
10720 | # asm 1: pshufb SRM0,<xmm13=int6464#14 | ||
10721 | # asm 2: pshufb SRM0,<xmm13=%xmm13 | ||
10722 | pshufb SRM0,%xmm13 | ||
10723 | |||
10724 | # qhasm: xmm14 ^= *(int128 *)(c + 1248) | ||
10725 | # asm 1: pxor 1248(<c=int64#5),<xmm14=int6464#15 | ||
10726 | # asm 2: pxor 1248(<c=%r8),<xmm14=%xmm14 | ||
10727 | pxor 1248(%r8),%xmm14 | ||
10728 | |||
10729 | # qhasm: shuffle bytes of xmm14 by SRM0 | ||
10730 | # asm 1: pshufb SRM0,<xmm14=int6464#15 | ||
10731 | # asm 2: pshufb SRM0,<xmm14=%xmm14 | ||
10732 | pshufb SRM0,%xmm14 | ||
10733 | |||
10734 | # qhasm: xmm15 ^= *(int128 *)(c + 1264) | ||
10735 | # asm 1: pxor 1264(<c=int64#5),<xmm15=int6464#16 | ||
10736 | # asm 2: pxor 1264(<c=%r8),<xmm15=%xmm15 | ||
10737 | pxor 1264(%r8),%xmm15 | ||
10738 | |||
10739 | # qhasm: shuffle bytes of xmm15 by SRM0 | ||
10740 | # asm 1: pshufb SRM0,<xmm15=int6464#16 | ||
10741 | # asm 2: pshufb SRM0,<xmm15=%xmm15 | ||
10742 | pshufb SRM0,%xmm15 | ||
10743 | |||
10744 | # qhasm: xmm13 ^= xmm14 | ||
10745 | # asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14 | ||
10746 | # asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13 | ||
10747 | pxor %xmm14,%xmm13 | ||
10748 | |||
10749 | # qhasm: xmm10 ^= xmm9 | ||
10750 | # asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11 | ||
10751 | # asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10 | ||
10752 | pxor %xmm9,%xmm10 | ||
10753 | |||
10754 | # qhasm: xmm13 ^= xmm8 | ||
10755 | # asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14 | ||
10756 | # asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13 | ||
10757 | pxor %xmm8,%xmm13 | ||
10758 | |||
10759 | # qhasm: xmm14 ^= xmm10 | ||
10760 | # asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15 | ||
10761 | # asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14 | ||
10762 | pxor %xmm10,%xmm14 | ||
10763 | |||
10764 | # qhasm: xmm11 ^= xmm8 | ||
10765 | # asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12 | ||
10766 | # asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11 | ||
10767 | pxor %xmm8,%xmm11 | ||
10768 | |||
10769 | # qhasm: xmm14 ^= xmm11 | ||
10770 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
10771 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
10772 | pxor %xmm11,%xmm14 | ||
10773 | |||
10774 | # qhasm: xmm11 ^= xmm15 | ||
10775 | # asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12 | ||
10776 | # asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11 | ||
10777 | pxor %xmm15,%xmm11 | ||
10778 | |||
10779 | # qhasm: xmm11 ^= xmm12 | ||
10780 | # asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12 | ||
10781 | # asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11 | ||
10782 | pxor %xmm12,%xmm11 | ||
10783 | |||
10784 | # qhasm: xmm15 ^= xmm13 | ||
10785 | # asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16 | ||
10786 | # asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15 | ||
10787 | pxor %xmm13,%xmm15 | ||
10788 | |||
10789 | # qhasm: xmm11 ^= xmm9 | ||
10790 | # asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12 | ||
10791 | # asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11 | ||
10792 | pxor %xmm9,%xmm11 | ||
10793 | |||
10794 | # qhasm: xmm12 ^= xmm13 | ||
10795 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
10796 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
10797 | pxor %xmm13,%xmm12 | ||
10798 | |||
10799 | # qhasm: xmm10 ^= xmm15 | ||
10800 | # asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11 | ||
10801 | # asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10 | ||
10802 | pxor %xmm15,%xmm10 | ||
10803 | |||
10804 | # qhasm: xmm9 ^= xmm13 | ||
10805 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
10806 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
10807 | pxor %xmm13,%xmm9 | ||
10808 | |||
10809 | # qhasm: xmm3 = xmm15 | ||
10810 | # asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1 | ||
10811 | # asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0 | ||
10812 | movdqa %xmm15,%xmm0 | ||
10813 | |||
10814 | # qhasm: xmm2 = xmm9 | ||
10815 | # asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2 | ||
10816 | # asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1 | ||
10817 | movdqa %xmm9,%xmm1 | ||
10818 | |||
10819 | # qhasm: xmm1 = xmm13 | ||
10820 | # asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3 | ||
10821 | # asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2 | ||
10822 | movdqa %xmm13,%xmm2 | ||
10823 | |||
10824 | # qhasm: xmm5 = xmm10 | ||
10825 | # asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4 | ||
10826 | # asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3 | ||
10827 | movdqa %xmm10,%xmm3 | ||
10828 | |||
10829 | # qhasm: xmm4 = xmm14 | ||
10830 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5 | ||
10831 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4 | ||
10832 | movdqa %xmm14,%xmm4 | ||
10833 | |||
10834 | # qhasm: xmm3 ^= xmm12 | ||
10835 | # asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1 | ||
10836 | # asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0 | ||
10837 | pxor %xmm12,%xmm0 | ||
10838 | |||
10839 | # qhasm: xmm2 ^= xmm10 | ||
10840 | # asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2 | ||
10841 | # asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1 | ||
10842 | pxor %xmm10,%xmm1 | ||
10843 | |||
10844 | # qhasm: xmm1 ^= xmm11 | ||
10845 | # asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3 | ||
10846 | # asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2 | ||
10847 | pxor %xmm11,%xmm2 | ||
10848 | |||
10849 | # qhasm: xmm5 ^= xmm12 | ||
10850 | # asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4 | ||
10851 | # asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3 | ||
10852 | pxor %xmm12,%xmm3 | ||
10853 | |||
10854 | # qhasm: xmm4 ^= xmm8 | ||
10855 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5 | ||
10856 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4 | ||
10857 | pxor %xmm8,%xmm4 | ||
10858 | |||
10859 | # qhasm: xmm6 = xmm3 | ||
10860 | # asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6 | ||
10861 | # asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5 | ||
10862 | movdqa %xmm0,%xmm5 | ||
10863 | |||
10864 | # qhasm: xmm0 = xmm2 | ||
10865 | # asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7 | ||
10866 | # asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6 | ||
10867 | movdqa %xmm1,%xmm6 | ||
10868 | |||
10869 | # qhasm: xmm7 = xmm3 | ||
10870 | # asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8 | ||
10871 | # asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7 | ||
10872 | movdqa %xmm0,%xmm7 | ||
10873 | |||
10874 | # qhasm: xmm2 |= xmm1 | ||
10875 | # asm 1: por <xmm1=int6464#3,<xmm2=int6464#2 | ||
10876 | # asm 2: por <xmm1=%xmm2,<xmm2=%xmm1 | ||
10877 | por %xmm2,%xmm1 | ||
10878 | |||
10879 | # qhasm: xmm3 |= xmm4 | ||
10880 | # asm 1: por <xmm4=int6464#5,<xmm3=int6464#1 | ||
10881 | # asm 2: por <xmm4=%xmm4,<xmm3=%xmm0 | ||
10882 | por %xmm4,%xmm0 | ||
10883 | |||
10884 | # qhasm: xmm7 ^= xmm0 | ||
10885 | # asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8 | ||
10886 | # asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7 | ||
10887 | pxor %xmm6,%xmm7 | ||
10888 | |||
10889 | # qhasm: xmm6 &= xmm4 | ||
10890 | # asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6 | ||
10891 | # asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5 | ||
10892 | pand %xmm4,%xmm5 | ||
10893 | |||
10894 | # qhasm: xmm0 &= xmm1 | ||
10895 | # asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7 | ||
10896 | # asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6 | ||
10897 | pand %xmm2,%xmm6 | ||
10898 | |||
10899 | # qhasm: xmm4 ^= xmm1 | ||
10900 | # asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5 | ||
10901 | # asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4 | ||
10902 | pxor %xmm2,%xmm4 | ||
10903 | |||
10904 | # qhasm: xmm7 &= xmm4 | ||
10905 | # asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8 | ||
10906 | # asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7 | ||
10907 | pand %xmm4,%xmm7 | ||
10908 | |||
10909 | # qhasm: xmm4 = xmm11 | ||
10910 | # asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3 | ||
10911 | # asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2 | ||
10912 | movdqa %xmm11,%xmm2 | ||
10913 | |||
10914 | # qhasm: xmm4 ^= xmm8 | ||
10915 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3 | ||
10916 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2 | ||
10917 | pxor %xmm8,%xmm2 | ||
10918 | |||
10919 | # qhasm: xmm5 &= xmm4 | ||
10920 | # asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4 | ||
10921 | # asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3 | ||
10922 | pand %xmm2,%xmm3 | ||
10923 | |||
10924 | # qhasm: xmm3 ^= xmm5 | ||
10925 | # asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1 | ||
10926 | # asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0 | ||
10927 | pxor %xmm3,%xmm0 | ||
10928 | |||
10929 | # qhasm: xmm2 ^= xmm5 | ||
10930 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
10931 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
10932 | pxor %xmm3,%xmm1 | ||
10933 | |||
10934 | # qhasm: xmm5 = xmm15 | ||
10935 | # asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3 | ||
10936 | # asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2 | ||
10937 | movdqa %xmm15,%xmm2 | ||
10938 | |||
10939 | # qhasm: xmm5 ^= xmm9 | ||
10940 | # asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3 | ||
10941 | # asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2 | ||
10942 | pxor %xmm9,%xmm2 | ||
10943 | |||
10944 | # qhasm: xmm4 = xmm13 | ||
10945 | # asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4 | ||
10946 | # asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3 | ||
10947 | movdqa %xmm13,%xmm3 | ||
10948 | |||
10949 | # qhasm: xmm1 = xmm5 | ||
10950 | # asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5 | ||
10951 | # asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4 | ||
10952 | movdqa %xmm2,%xmm4 | ||
10953 | |||
10954 | # qhasm: xmm4 ^= xmm14 | ||
10955 | # asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4 | ||
10956 | # asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3 | ||
10957 | pxor %xmm14,%xmm3 | ||
10958 | |||
10959 | # qhasm: xmm1 |= xmm4 | ||
10960 | # asm 1: por <xmm4=int6464#4,<xmm1=int6464#5 | ||
10961 | # asm 2: por <xmm4=%xmm3,<xmm1=%xmm4 | ||
10962 | por %xmm3,%xmm4 | ||
10963 | |||
10964 | # qhasm: xmm5 &= xmm4 | ||
10965 | # asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3 | ||
10966 | # asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2 | ||
10967 | pand %xmm3,%xmm2 | ||
10968 | |||
10969 | # qhasm: xmm0 ^= xmm5 | ||
10970 | # asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7 | ||
10971 | # asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6 | ||
10972 | pxor %xmm2,%xmm6 | ||
10973 | |||
10974 | # qhasm: xmm3 ^= xmm7 | ||
10975 | # asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1 | ||
10976 | # asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0 | ||
10977 | pxor %xmm7,%xmm0 | ||
10978 | |||
10979 | # qhasm: xmm2 ^= xmm6 | ||
10980 | # asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2 | ||
10981 | # asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1 | ||
10982 | pxor %xmm5,%xmm1 | ||
10983 | |||
10984 | # qhasm: xmm1 ^= xmm7 | ||
10985 | # asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5 | ||
10986 | # asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4 | ||
10987 | pxor %xmm7,%xmm4 | ||
10988 | |||
10989 | # qhasm: xmm0 ^= xmm6 | ||
10990 | # asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7 | ||
10991 | # asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6 | ||
10992 | pxor %xmm5,%xmm6 | ||
10993 | |||
10994 | # qhasm: xmm1 ^= xmm6 | ||
10995 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
10996 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
10997 | pxor %xmm5,%xmm4 | ||
10998 | |||
10999 | # qhasm: xmm4 = xmm10 | ||
11000 | # asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3 | ||
11001 | # asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2 | ||
11002 | movdqa %xmm10,%xmm2 | ||
11003 | |||
11004 | # qhasm: xmm5 = xmm12 | ||
11005 | # asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4 | ||
11006 | # asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3 | ||
11007 | movdqa %xmm12,%xmm3 | ||
11008 | |||
11009 | # qhasm: xmm6 = xmm9 | ||
11010 | # asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6 | ||
11011 | # asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5 | ||
11012 | movdqa %xmm9,%xmm5 | ||
11013 | |||
11014 | # qhasm: xmm7 = xmm15 | ||
11015 | # asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8 | ||
11016 | # asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7 | ||
11017 | movdqa %xmm15,%xmm7 | ||
11018 | |||
11019 | # qhasm: xmm4 &= xmm11 | ||
11020 | # asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3 | ||
11021 | # asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2 | ||
11022 | pand %xmm11,%xmm2 | ||
11023 | |||
11024 | # qhasm: xmm5 &= xmm8 | ||
11025 | # asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4 | ||
11026 | # asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3 | ||
11027 | pand %xmm8,%xmm3 | ||
11028 | |||
11029 | # qhasm: xmm6 &= xmm13 | ||
11030 | # asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6 | ||
11031 | # asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5 | ||
11032 | pand %xmm13,%xmm5 | ||
11033 | |||
11034 | # qhasm: xmm7 |= xmm14 | ||
11035 | # asm 1: por <xmm14=int6464#15,<xmm7=int6464#8 | ||
11036 | # asm 2: por <xmm14=%xmm14,<xmm7=%xmm7 | ||
11037 | por %xmm14,%xmm7 | ||
11038 | |||
11039 | # qhasm: xmm3 ^= xmm4 | ||
11040 | # asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1 | ||
11041 | # asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0 | ||
11042 | pxor %xmm2,%xmm0 | ||
11043 | |||
11044 | # qhasm: xmm2 ^= xmm5 | ||
11045 | # asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2 | ||
11046 | # asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1 | ||
11047 | pxor %xmm3,%xmm1 | ||
11048 | |||
11049 | # qhasm: xmm1 ^= xmm6 | ||
11050 | # asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5 | ||
11051 | # asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4 | ||
11052 | pxor %xmm5,%xmm4 | ||
11053 | |||
11054 | # qhasm: xmm0 ^= xmm7 | ||
11055 | # asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7 | ||
11056 | # asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6 | ||
11057 | pxor %xmm7,%xmm6 | ||
11058 | |||
11059 | # qhasm: xmm4 = xmm3 | ||
11060 | # asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3 | ||
11061 | # asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2 | ||
11062 | movdqa %xmm0,%xmm2 | ||
11063 | |||
11064 | # qhasm: xmm4 ^= xmm2 | ||
11065 | # asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3 | ||
11066 | # asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2 | ||
11067 | pxor %xmm1,%xmm2 | ||
11068 | |||
11069 | # qhasm: xmm3 &= xmm1 | ||
11070 | # asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1 | ||
11071 | # asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0 | ||
11072 | pand %xmm4,%xmm0 | ||
11073 | |||
11074 | # qhasm: xmm6 = xmm0 | ||
11075 | # asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4 | ||
11076 | # asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3 | ||
11077 | movdqa %xmm6,%xmm3 | ||
11078 | |||
11079 | # qhasm: xmm6 ^= xmm3 | ||
11080 | # asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4 | ||
11081 | # asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3 | ||
11082 | pxor %xmm0,%xmm3 | ||
11083 | |||
11084 | # qhasm: xmm7 = xmm4 | ||
11085 | # asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6 | ||
11086 | # asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5 | ||
11087 | movdqa %xmm2,%xmm5 | ||
11088 | |||
11089 | # qhasm: xmm7 &= xmm6 | ||
11090 | # asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6 | ||
11091 | # asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5 | ||
11092 | pand %xmm3,%xmm5 | ||
11093 | |||
11094 | # qhasm: xmm7 ^= xmm2 | ||
11095 | # asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6 | ||
11096 | # asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5 | ||
11097 | pxor %xmm1,%xmm5 | ||
11098 | |||
11099 | # qhasm: xmm5 = xmm1 | ||
11100 | # asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8 | ||
11101 | # asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7 | ||
11102 | movdqa %xmm4,%xmm7 | ||
11103 | |||
11104 | # qhasm: xmm5 ^= xmm0 | ||
11105 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
11106 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
11107 | pxor %xmm6,%xmm7 | ||
11108 | |||
11109 | # qhasm: xmm3 ^= xmm2 | ||
11110 | # asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1 | ||
11111 | # asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0 | ||
11112 | pxor %xmm1,%xmm0 | ||
11113 | |||
11114 | # qhasm: xmm5 &= xmm3 | ||
11115 | # asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8 | ||
11116 | # asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7 | ||
11117 | pand %xmm0,%xmm7 | ||
11118 | |||
11119 | # qhasm: xmm5 ^= xmm0 | ||
11120 | # asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8 | ||
11121 | # asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7 | ||
11122 | pxor %xmm6,%xmm7 | ||
11123 | |||
11124 | # qhasm: xmm1 ^= xmm5 | ||
11125 | # asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5 | ||
11126 | # asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4 | ||
11127 | pxor %xmm7,%xmm4 | ||
11128 | |||
11129 | # qhasm: xmm2 = xmm6 | ||
11130 | # asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1 | ||
11131 | # asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0 | ||
11132 | movdqa %xmm3,%xmm0 | ||
11133 | |||
11134 | # qhasm: xmm2 ^= xmm5 | ||
11135 | # asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1 | ||
11136 | # asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0 | ||
11137 | pxor %xmm7,%xmm0 | ||
11138 | |||
11139 | # qhasm: xmm2 &= xmm0 | ||
11140 | # asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1 | ||
11141 | # asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0 | ||
11142 | pand %xmm6,%xmm0 | ||
11143 | |||
11144 | # qhasm: xmm1 ^= xmm2 | ||
11145 | # asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5 | ||
11146 | # asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4 | ||
11147 | pxor %xmm0,%xmm4 | ||
11148 | |||
11149 | # qhasm: xmm6 ^= xmm2 | ||
11150 | # asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4 | ||
11151 | # asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3 | ||
11152 | pxor %xmm0,%xmm3 | ||
11153 | |||
11154 | # qhasm: xmm6 &= xmm7 | ||
11155 | # asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4 | ||
11156 | # asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3 | ||
11157 | pand %xmm5,%xmm3 | ||
11158 | |||
11159 | # qhasm: xmm6 ^= xmm4 | ||
11160 | # asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4 | ||
11161 | # asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3 | ||
11162 | pxor %xmm2,%xmm3 | ||
11163 | |||
11164 | # qhasm: xmm4 = xmm14 | ||
11165 | # asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1 | ||
11166 | # asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0 | ||
11167 | movdqa %xmm14,%xmm0 | ||
11168 | |||
11169 | # qhasm: xmm0 = xmm13 | ||
11170 | # asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2 | ||
11171 | # asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1 | ||
11172 | movdqa %xmm13,%xmm1 | ||
11173 | |||
11174 | # qhasm: xmm2 = xmm7 | ||
11175 | # asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3 | ||
11176 | # asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2 | ||
11177 | movdqa %xmm5,%xmm2 | ||
11178 | |||
11179 | # qhasm: xmm2 ^= xmm6 | ||
11180 | # asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3 | ||
11181 | # asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2 | ||
11182 | pxor %xmm3,%xmm2 | ||
11183 | |||
11184 | # qhasm: xmm2 &= xmm14 | ||
11185 | # asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3 | ||
11186 | # asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2 | ||
11187 | pand %xmm14,%xmm2 | ||
11188 | |||
11189 | # qhasm: xmm14 ^= xmm13 | ||
11190 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
11191 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
11192 | pxor %xmm13,%xmm14 | ||
11193 | |||
11194 | # qhasm: xmm14 &= xmm6 | ||
11195 | # asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15 | ||
11196 | # asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14 | ||
11197 | pand %xmm3,%xmm14 | ||
11198 | |||
11199 | # qhasm: xmm13 &= xmm7 | ||
11200 | # asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14 | ||
11201 | # asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13 | ||
11202 | pand %xmm5,%xmm13 | ||
11203 | |||
11204 | # qhasm: xmm14 ^= xmm13 | ||
11205 | # asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15 | ||
11206 | # asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14 | ||
11207 | pxor %xmm13,%xmm14 | ||
11208 | |||
11209 | # qhasm: xmm13 ^= xmm2 | ||
11210 | # asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14 | ||
11211 | # asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13 | ||
11212 | pxor %xmm2,%xmm13 | ||
11213 | |||
11214 | # qhasm: xmm4 ^= xmm8 | ||
11215 | # asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1 | ||
11216 | # asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0 | ||
11217 | pxor %xmm8,%xmm0 | ||
11218 | |||
11219 | # qhasm: xmm0 ^= xmm11 | ||
11220 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2 | ||
11221 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1 | ||
11222 | pxor %xmm11,%xmm1 | ||
11223 | |||
11224 | # qhasm: xmm7 ^= xmm5 | ||
11225 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
11226 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
11227 | pxor %xmm7,%xmm5 | ||
11228 | |||
11229 | # qhasm: xmm6 ^= xmm1 | ||
11230 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
11231 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
11232 | pxor %xmm4,%xmm3 | ||
11233 | |||
11234 | # qhasm: xmm3 = xmm7 | ||
11235 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
11236 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
11237 | movdqa %xmm5,%xmm2 | ||
11238 | |||
11239 | # qhasm: xmm3 ^= xmm6 | ||
11240 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
11241 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
11242 | pxor %xmm3,%xmm2 | ||
11243 | |||
11244 | # qhasm: xmm3 &= xmm4 | ||
11245 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
11246 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
11247 | pand %xmm0,%xmm2 | ||
11248 | |||
11249 | # qhasm: xmm4 ^= xmm0 | ||
11250 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
11251 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
11252 | pxor %xmm1,%xmm0 | ||
11253 | |||
11254 | # qhasm: xmm4 &= xmm6 | ||
11255 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
11256 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
11257 | pand %xmm3,%xmm0 | ||
11258 | |||
11259 | # qhasm: xmm0 &= xmm7 | ||
11260 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
11261 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
11262 | pand %xmm5,%xmm1 | ||
11263 | |||
11264 | # qhasm: xmm0 ^= xmm4 | ||
11265 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
11266 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
11267 | pxor %xmm0,%xmm1 | ||
11268 | |||
11269 | # qhasm: xmm4 ^= xmm3 | ||
11270 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
11271 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
11272 | pxor %xmm2,%xmm0 | ||
11273 | |||
11274 | # qhasm: xmm2 = xmm5 | ||
11275 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
11276 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
11277 | movdqa %xmm7,%xmm2 | ||
11278 | |||
11279 | # qhasm: xmm2 ^= xmm1 | ||
11280 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
11281 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
11282 | pxor %xmm4,%xmm2 | ||
11283 | |||
11284 | # qhasm: xmm2 &= xmm8 | ||
11285 | # asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3 | ||
11286 | # asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2 | ||
11287 | pand %xmm8,%xmm2 | ||
11288 | |||
11289 | # qhasm: xmm8 ^= xmm11 | ||
11290 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
11291 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
11292 | pxor %xmm11,%xmm8 | ||
11293 | |||
11294 | # qhasm: xmm8 &= xmm1 | ||
11295 | # asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9 | ||
11296 | # asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8 | ||
11297 | pand %xmm4,%xmm8 | ||
11298 | |||
11299 | # qhasm: xmm11 &= xmm5 | ||
11300 | # asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12 | ||
11301 | # asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11 | ||
11302 | pand %xmm7,%xmm11 | ||
11303 | |||
11304 | # qhasm: xmm8 ^= xmm11 | ||
11305 | # asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9 | ||
11306 | # asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8 | ||
11307 | pxor %xmm11,%xmm8 | ||
11308 | |||
11309 | # qhasm: xmm11 ^= xmm2 | ||
11310 | # asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12 | ||
11311 | # asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11 | ||
11312 | pxor %xmm2,%xmm11 | ||
11313 | |||
11314 | # qhasm: xmm14 ^= xmm4 | ||
11315 | # asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15 | ||
11316 | # asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14 | ||
11317 | pxor %xmm0,%xmm14 | ||
11318 | |||
11319 | # qhasm: xmm8 ^= xmm4 | ||
11320 | # asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9 | ||
11321 | # asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8 | ||
11322 | pxor %xmm0,%xmm8 | ||
11323 | |||
11324 | # qhasm: xmm13 ^= xmm0 | ||
11325 | # asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14 | ||
11326 | # asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13 | ||
11327 | pxor %xmm1,%xmm13 | ||
11328 | |||
11329 | # qhasm: xmm11 ^= xmm0 | ||
11330 | # asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12 | ||
11331 | # asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11 | ||
11332 | pxor %xmm1,%xmm11 | ||
11333 | |||
11334 | # qhasm: xmm4 = xmm15 | ||
11335 | # asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1 | ||
11336 | # asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0 | ||
11337 | movdqa %xmm15,%xmm0 | ||
11338 | |||
11339 | # qhasm: xmm0 = xmm9 | ||
11340 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2 | ||
11341 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1 | ||
11342 | movdqa %xmm9,%xmm1 | ||
11343 | |||
11344 | # qhasm: xmm4 ^= xmm12 | ||
11345 | # asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1 | ||
11346 | # asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0 | ||
11347 | pxor %xmm12,%xmm0 | ||
11348 | |||
11349 | # qhasm: xmm0 ^= xmm10 | ||
11350 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2 | ||
11351 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1 | ||
11352 | pxor %xmm10,%xmm1 | ||
11353 | |||
11354 | # qhasm: xmm3 = xmm7 | ||
11355 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
11356 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
11357 | movdqa %xmm5,%xmm2 | ||
11358 | |||
11359 | # qhasm: xmm3 ^= xmm6 | ||
11360 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
11361 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
11362 | pxor %xmm3,%xmm2 | ||
11363 | |||
11364 | # qhasm: xmm3 &= xmm4 | ||
11365 | # asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3 | ||
11366 | # asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2 | ||
11367 | pand %xmm0,%xmm2 | ||
11368 | |||
11369 | # qhasm: xmm4 ^= xmm0 | ||
11370 | # asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1 | ||
11371 | # asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0 | ||
11372 | pxor %xmm1,%xmm0 | ||
11373 | |||
11374 | # qhasm: xmm4 &= xmm6 | ||
11375 | # asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1 | ||
11376 | # asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0 | ||
11377 | pand %xmm3,%xmm0 | ||
11378 | |||
11379 | # qhasm: xmm0 &= xmm7 | ||
11380 | # asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2 | ||
11381 | # asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1 | ||
11382 | pand %xmm5,%xmm1 | ||
11383 | |||
11384 | # qhasm: xmm0 ^= xmm4 | ||
11385 | # asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2 | ||
11386 | # asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1 | ||
11387 | pxor %xmm0,%xmm1 | ||
11388 | |||
11389 | # qhasm: xmm4 ^= xmm3 | ||
11390 | # asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1 | ||
11391 | # asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0 | ||
11392 | pxor %xmm2,%xmm0 | ||
11393 | |||
11394 | # qhasm: xmm2 = xmm5 | ||
11395 | # asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3 | ||
11396 | # asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2 | ||
11397 | movdqa %xmm7,%xmm2 | ||
11398 | |||
11399 | # qhasm: xmm2 ^= xmm1 | ||
11400 | # asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3 | ||
11401 | # asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2 | ||
11402 | pxor %xmm4,%xmm2 | ||
11403 | |||
11404 | # qhasm: xmm2 &= xmm12 | ||
11405 | # asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3 | ||
11406 | # asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2 | ||
11407 | pand %xmm12,%xmm2 | ||
11408 | |||
11409 | # qhasm: xmm12 ^= xmm10 | ||
11410 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
11411 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
11412 | pxor %xmm10,%xmm12 | ||
11413 | |||
11414 | # qhasm: xmm12 &= xmm1 | ||
11415 | # asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13 | ||
11416 | # asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12 | ||
11417 | pand %xmm4,%xmm12 | ||
11418 | |||
11419 | # qhasm: xmm10 &= xmm5 | ||
11420 | # asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11 | ||
11421 | # asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10 | ||
11422 | pand %xmm7,%xmm10 | ||
11423 | |||
11424 | # qhasm: xmm12 ^= xmm10 | ||
11425 | # asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13 | ||
11426 | # asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12 | ||
11427 | pxor %xmm10,%xmm12 | ||
11428 | |||
11429 | # qhasm: xmm10 ^= xmm2 | ||
11430 | # asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11 | ||
11431 | # asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10 | ||
11432 | pxor %xmm2,%xmm10 | ||
11433 | |||
11434 | # qhasm: xmm7 ^= xmm5 | ||
11435 | # asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6 | ||
11436 | # asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5 | ||
11437 | pxor %xmm7,%xmm5 | ||
11438 | |||
11439 | # qhasm: xmm6 ^= xmm1 | ||
11440 | # asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4 | ||
11441 | # asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3 | ||
11442 | pxor %xmm4,%xmm3 | ||
11443 | |||
11444 | # qhasm: xmm3 = xmm7 | ||
11445 | # asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3 | ||
11446 | # asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2 | ||
11447 | movdqa %xmm5,%xmm2 | ||
11448 | |||
11449 | # qhasm: xmm3 ^= xmm6 | ||
11450 | # asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3 | ||
11451 | # asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2 | ||
11452 | pxor %xmm3,%xmm2 | ||
11453 | |||
11454 | # qhasm: xmm3 &= xmm15 | ||
11455 | # asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3 | ||
11456 | # asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2 | ||
11457 | pand %xmm15,%xmm2 | ||
11458 | |||
11459 | # qhasm: xmm15 ^= xmm9 | ||
11460 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
11461 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
11462 | pxor %xmm9,%xmm15 | ||
11463 | |||
11464 | # qhasm: xmm15 &= xmm6 | ||
11465 | # asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16 | ||
11466 | # asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15 | ||
11467 | pand %xmm3,%xmm15 | ||
11468 | |||
11469 | # qhasm: xmm9 &= xmm7 | ||
11470 | # asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10 | ||
11471 | # asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9 | ||
11472 | pand %xmm5,%xmm9 | ||
11473 | |||
11474 | # qhasm: xmm15 ^= xmm9 | ||
11475 | # asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16 | ||
11476 | # asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15 | ||
11477 | pxor %xmm9,%xmm15 | ||
11478 | |||
11479 | # qhasm: xmm9 ^= xmm3 | ||
11480 | # asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10 | ||
11481 | # asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9 | ||
11482 | pxor %xmm2,%xmm9 | ||
11483 | |||
11484 | # qhasm: xmm15 ^= xmm4 | ||
11485 | # asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16 | ||
11486 | # asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15 | ||
11487 | pxor %xmm0,%xmm15 | ||
11488 | |||
11489 | # qhasm: xmm12 ^= xmm4 | ||
11490 | # asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13 | ||
11491 | # asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12 | ||
11492 | pxor %xmm0,%xmm12 | ||
11493 | |||
11494 | # qhasm: xmm9 ^= xmm0 | ||
11495 | # asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10 | ||
11496 | # asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9 | ||
11497 | pxor %xmm1,%xmm9 | ||
11498 | |||
11499 | # qhasm: xmm10 ^= xmm0 | ||
11500 | # asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11 | ||
11501 | # asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10 | ||
11502 | pxor %xmm1,%xmm10 | ||
11503 | |||
11504 | # qhasm: xmm15 ^= xmm8 | ||
11505 | # asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16 | ||
11506 | # asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15 | ||
11507 | pxor %xmm8,%xmm15 | ||
11508 | |||
11509 | # qhasm: xmm9 ^= xmm14 | ||
11510 | # asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10 | ||
11511 | # asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9 | ||
11512 | pxor %xmm14,%xmm9 | ||
11513 | |||
11514 | # qhasm: xmm12 ^= xmm15 | ||
11515 | # asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13 | ||
11516 | # asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12 | ||
11517 | pxor %xmm15,%xmm12 | ||
11518 | |||
11519 | # qhasm: xmm14 ^= xmm8 | ||
11520 | # asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15 | ||
11521 | # asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14 | ||
11522 | pxor %xmm8,%xmm14 | ||
11523 | |||
11524 | # qhasm: xmm8 ^= xmm9 | ||
11525 | # asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9 | ||
11526 | # asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8 | ||
11527 | pxor %xmm9,%xmm8 | ||
11528 | |||
11529 | # qhasm: xmm9 ^= xmm13 | ||
11530 | # asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10 | ||
11531 | # asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9 | ||
11532 | pxor %xmm13,%xmm9 | ||
11533 | |||
11534 | # qhasm: xmm13 ^= xmm10 | ||
11535 | # asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14 | ||
11536 | # asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13 | ||
11537 | pxor %xmm10,%xmm13 | ||
11538 | |||
11539 | # qhasm: xmm12 ^= xmm13 | ||
11540 | # asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13 | ||
11541 | # asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12 | ||
11542 | pxor %xmm13,%xmm12 | ||
11543 | |||
11544 | # qhasm: xmm10 ^= xmm11 | ||
11545 | # asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11 | ||
11546 | # asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10 | ||
11547 | pxor %xmm11,%xmm10 | ||
11548 | |||
11549 | # qhasm: xmm11 ^= xmm13 | ||
11550 | # asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12 | ||
11551 | # asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11 | ||
11552 | pxor %xmm13,%xmm11 | ||
11553 | |||
11554 | # qhasm: xmm14 ^= xmm11 | ||
11555 | # asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15 | ||
11556 | # asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14 | ||
11557 | pxor %xmm11,%xmm14 | ||
11558 | |||
11559 | # qhasm: xmm8 ^= *(int128 *)(c + 1280) | ||
11560 | # asm 1: pxor 1280(<c=int64#5),<xmm8=int6464#9 | ||
11561 | # asm 2: pxor 1280(<c=%r8),<xmm8=%xmm8 | ||
11562 | pxor 1280(%r8),%xmm8 | ||
11563 | |||
11564 | # qhasm: xmm9 ^= *(int128 *)(c + 1296) | ||
11565 | # asm 1: pxor 1296(<c=int64#5),<xmm9=int6464#10 | ||
11566 | # asm 2: pxor 1296(<c=%r8),<xmm9=%xmm9 | ||
11567 | pxor 1296(%r8),%xmm9 | ||
11568 | |||
11569 | # qhasm: xmm12 ^= *(int128 *)(c + 1312) | ||
11570 | # asm 1: pxor 1312(<c=int64#5),<xmm12=int6464#13 | ||
11571 | # asm 2: pxor 1312(<c=%r8),<xmm12=%xmm12 | ||
11572 | pxor 1312(%r8),%xmm12 | ||
11573 | |||
11574 | # qhasm: xmm14 ^= *(int128 *)(c + 1328) | ||
11575 | # asm 1: pxor 1328(<c=int64#5),<xmm14=int6464#15 | ||
11576 | # asm 2: pxor 1328(<c=%r8),<xmm14=%xmm14 | ||
11577 | pxor 1328(%r8),%xmm14 | ||
11578 | |||
11579 | # qhasm: xmm11 ^= *(int128 *)(c + 1344) | ||
11580 | # asm 1: pxor 1344(<c=int64#5),<xmm11=int6464#12 | ||
11581 | # asm 2: pxor 1344(<c=%r8),<xmm11=%xmm11 | ||
11582 | pxor 1344(%r8),%xmm11 | ||
11583 | |||
11584 | # qhasm: xmm15 ^= *(int128 *)(c + 1360) | ||
11585 | # asm 1: pxor 1360(<c=int64#5),<xmm15=int6464#16 | ||
11586 | # asm 2: pxor 1360(<c=%r8),<xmm15=%xmm15 | ||
11587 | pxor 1360(%r8),%xmm15 | ||
11588 | |||
11589 | # qhasm: xmm10 ^= *(int128 *)(c + 1376) | ||
11590 | # asm 1: pxor 1376(<c=int64#5),<xmm10=int6464#11 | ||
11591 | # asm 2: pxor 1376(<c=%r8),<xmm10=%xmm10 | ||
11592 | pxor 1376(%r8),%xmm10 | ||
11593 | |||
11594 | # qhasm: xmm13 ^= *(int128 *)(c + 1392) | ||
11595 | # asm 1: pxor 1392(<c=int64#5),<xmm13=int6464#14 | ||
11596 | # asm 2: pxor 1392(<c=%r8),<xmm13=%xmm13 | ||
11597 | pxor 1392(%r8),%xmm13 | ||
11598 | |||
11599 | # qhasm: xmm0 = xmm10 | ||
11600 | # asm 1: movdqa <xmm10=int6464#11,>xmm0=int6464#1 | ||
11601 | # asm 2: movdqa <xmm10=%xmm10,>xmm0=%xmm0 | ||
11602 | movdqa %xmm10,%xmm0 | ||
11603 | |||
11604 | # qhasm: uint6464 xmm0 >>= 1 | ||
11605 | # asm 1: psrlq $1,<xmm0=int6464#1 | ||
11606 | # asm 2: psrlq $1,<xmm0=%xmm0 | ||
11607 | psrlq $1,%xmm0 | ||
11608 | |||
11609 | # qhasm: xmm0 ^= xmm13 | ||
11610 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
11611 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
11612 | pxor %xmm13,%xmm0 | ||
11613 | |||
11614 | # qhasm: xmm0 &= BS0 | ||
11615 | # asm 1: pand BS0,<xmm0=int6464#1 | ||
11616 | # asm 2: pand BS0,<xmm0=%xmm0 | ||
11617 | pand BS0,%xmm0 | ||
11618 | |||
11619 | # qhasm: xmm13 ^= xmm0 | ||
11620 | # asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14 | ||
11621 | # asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13 | ||
11622 | pxor %xmm0,%xmm13 | ||
11623 | |||
11624 | # qhasm: uint6464 xmm0 <<= 1 | ||
11625 | # asm 1: psllq $1,<xmm0=int6464#1 | ||
11626 | # asm 2: psllq $1,<xmm0=%xmm0 | ||
11627 | psllq $1,%xmm0 | ||
11628 | |||
11629 | # qhasm: xmm10 ^= xmm0 | ||
11630 | # asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11 | ||
11631 | # asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10 | ||
11632 | pxor %xmm0,%xmm10 | ||
11633 | |||
11634 | # qhasm: xmm0 = xmm11 | ||
11635 | # asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1 | ||
11636 | # asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0 | ||
11637 | movdqa %xmm11,%xmm0 | ||
11638 | |||
11639 | # qhasm: uint6464 xmm0 >>= 1 | ||
11640 | # asm 1: psrlq $1,<xmm0=int6464#1 | ||
11641 | # asm 2: psrlq $1,<xmm0=%xmm0 | ||
11642 | psrlq $1,%xmm0 | ||
11643 | |||
11644 | # qhasm: xmm0 ^= xmm15 | ||
11645 | # asm 1: pxor <xmm15=int6464#16,<xmm0=int6464#1 | ||
11646 | # asm 2: pxor <xmm15=%xmm15,<xmm0=%xmm0 | ||
11647 | pxor %xmm15,%xmm0 | ||
11648 | |||
11649 | # qhasm: xmm0 &= BS0 | ||
11650 | # asm 1: pand BS0,<xmm0=int6464#1 | ||
11651 | # asm 2: pand BS0,<xmm0=%xmm0 | ||
11652 | pand BS0,%xmm0 | ||
11653 | |||
11654 | # qhasm: xmm15 ^= xmm0 | ||
11655 | # asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16 | ||
11656 | # asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15 | ||
11657 | pxor %xmm0,%xmm15 | ||
11658 | |||
11659 | # qhasm: uint6464 xmm0 <<= 1 | ||
11660 | # asm 1: psllq $1,<xmm0=int6464#1 | ||
11661 | # asm 2: psllq $1,<xmm0=%xmm0 | ||
11662 | psllq $1,%xmm0 | ||
11663 | |||
11664 | # qhasm: xmm11 ^= xmm0 | ||
11665 | # asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12 | ||
11666 | # asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11 | ||
11667 | pxor %xmm0,%xmm11 | ||
11668 | |||
11669 | # qhasm: xmm0 = xmm12 | ||
11670 | # asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1 | ||
11671 | # asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0 | ||
11672 | movdqa %xmm12,%xmm0 | ||
11673 | |||
11674 | # qhasm: uint6464 xmm0 >>= 1 | ||
11675 | # asm 1: psrlq $1,<xmm0=int6464#1 | ||
11676 | # asm 2: psrlq $1,<xmm0=%xmm0 | ||
11677 | psrlq $1,%xmm0 | ||
11678 | |||
11679 | # qhasm: xmm0 ^= xmm14 | ||
11680 | # asm 1: pxor <xmm14=int6464#15,<xmm0=int6464#1 | ||
11681 | # asm 2: pxor <xmm14=%xmm14,<xmm0=%xmm0 | ||
11682 | pxor %xmm14,%xmm0 | ||
11683 | |||
11684 | # qhasm: xmm0 &= BS0 | ||
11685 | # asm 1: pand BS0,<xmm0=int6464#1 | ||
11686 | # asm 2: pand BS0,<xmm0=%xmm0 | ||
11687 | pand BS0,%xmm0 | ||
11688 | |||
11689 | # qhasm: xmm14 ^= xmm0 | ||
11690 | # asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15 | ||
11691 | # asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14 | ||
11692 | pxor %xmm0,%xmm14 | ||
11693 | |||
11694 | # qhasm: uint6464 xmm0 <<= 1 | ||
11695 | # asm 1: psllq $1,<xmm0=int6464#1 | ||
11696 | # asm 2: psllq $1,<xmm0=%xmm0 | ||
11697 | psllq $1,%xmm0 | ||
11698 | |||
11699 | # qhasm: xmm12 ^= xmm0 | ||
11700 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
11701 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
11702 | pxor %xmm0,%xmm12 | ||
11703 | |||
11704 | # qhasm: xmm0 = xmm8 | ||
11705 | # asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1 | ||
11706 | # asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0 | ||
11707 | movdqa %xmm8,%xmm0 | ||
11708 | |||
11709 | # qhasm: uint6464 xmm0 >>= 1 | ||
11710 | # asm 1: psrlq $1,<xmm0=int6464#1 | ||
11711 | # asm 2: psrlq $1,<xmm0=%xmm0 | ||
11712 | psrlq $1,%xmm0 | ||
11713 | |||
11714 | # qhasm: xmm0 ^= xmm9 | ||
11715 | # asm 1: pxor <xmm9=int6464#10,<xmm0=int6464#1 | ||
11716 | # asm 2: pxor <xmm9=%xmm9,<xmm0=%xmm0 | ||
11717 | pxor %xmm9,%xmm0 | ||
11718 | |||
11719 | # qhasm: xmm0 &= BS0 | ||
11720 | # asm 1: pand BS0,<xmm0=int6464#1 | ||
11721 | # asm 2: pand BS0,<xmm0=%xmm0 | ||
11722 | pand BS0,%xmm0 | ||
11723 | |||
11724 | # qhasm: xmm9 ^= xmm0 | ||
11725 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
11726 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
11727 | pxor %xmm0,%xmm9 | ||
11728 | |||
11729 | # qhasm: uint6464 xmm0 <<= 1 | ||
11730 | # asm 1: psllq $1,<xmm0=int6464#1 | ||
11731 | # asm 2: psllq $1,<xmm0=%xmm0 | ||
11732 | psllq $1,%xmm0 | ||
11733 | |||
11734 | # qhasm: xmm8 ^= xmm0 | ||
11735 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
11736 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
11737 | pxor %xmm0,%xmm8 | ||
11738 | |||
11739 | # qhasm: xmm0 = xmm15 | ||
11740 | # asm 1: movdqa <xmm15=int6464#16,>xmm0=int6464#1 | ||
11741 | # asm 2: movdqa <xmm15=%xmm15,>xmm0=%xmm0 | ||
11742 | movdqa %xmm15,%xmm0 | ||
11743 | |||
11744 | # qhasm: uint6464 xmm0 >>= 2 | ||
11745 | # asm 1: psrlq $2,<xmm0=int6464#1 | ||
11746 | # asm 2: psrlq $2,<xmm0=%xmm0 | ||
11747 | psrlq $2,%xmm0 | ||
11748 | |||
11749 | # qhasm: xmm0 ^= xmm13 | ||
11750 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
11751 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
11752 | pxor %xmm13,%xmm0 | ||
11753 | |||
11754 | # qhasm: xmm0 &= BS1 | ||
11755 | # asm 1: pand BS1,<xmm0=int6464#1 | ||
11756 | # asm 2: pand BS1,<xmm0=%xmm0 | ||
11757 | pand BS1,%xmm0 | ||
11758 | |||
11759 | # qhasm: xmm13 ^= xmm0 | ||
11760 | # asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14 | ||
11761 | # asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13 | ||
11762 | pxor %xmm0,%xmm13 | ||
11763 | |||
11764 | # qhasm: uint6464 xmm0 <<= 2 | ||
11765 | # asm 1: psllq $2,<xmm0=int6464#1 | ||
11766 | # asm 2: psllq $2,<xmm0=%xmm0 | ||
11767 | psllq $2,%xmm0 | ||
11768 | |||
11769 | # qhasm: xmm15 ^= xmm0 | ||
11770 | # asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16 | ||
11771 | # asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15 | ||
11772 | pxor %xmm0,%xmm15 | ||
11773 | |||
11774 | # qhasm: xmm0 = xmm11 | ||
11775 | # asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1 | ||
11776 | # asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0 | ||
11777 | movdqa %xmm11,%xmm0 | ||
11778 | |||
11779 | # qhasm: uint6464 xmm0 >>= 2 | ||
11780 | # asm 1: psrlq $2,<xmm0=int6464#1 | ||
11781 | # asm 2: psrlq $2,<xmm0=%xmm0 | ||
11782 | psrlq $2,%xmm0 | ||
11783 | |||
11784 | # qhasm: xmm0 ^= xmm10 | ||
11785 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#1 | ||
11786 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm0 | ||
11787 | pxor %xmm10,%xmm0 | ||
11788 | |||
11789 | # qhasm: xmm0 &= BS1 | ||
11790 | # asm 1: pand BS1,<xmm0=int6464#1 | ||
11791 | # asm 2: pand BS1,<xmm0=%xmm0 | ||
11792 | pand BS1,%xmm0 | ||
11793 | |||
11794 | # qhasm: xmm10 ^= xmm0 | ||
11795 | # asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11 | ||
11796 | # asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10 | ||
11797 | pxor %xmm0,%xmm10 | ||
11798 | |||
11799 | # qhasm: uint6464 xmm0 <<= 2 | ||
11800 | # asm 1: psllq $2,<xmm0=int6464#1 | ||
11801 | # asm 2: psllq $2,<xmm0=%xmm0 | ||
11802 | psllq $2,%xmm0 | ||
11803 | |||
11804 | # qhasm: xmm11 ^= xmm0 | ||
11805 | # asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12 | ||
11806 | # asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11 | ||
11807 | pxor %xmm0,%xmm11 | ||
11808 | |||
11809 | # qhasm: xmm0 = xmm9 | ||
11810 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1 | ||
11811 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0 | ||
11812 | movdqa %xmm9,%xmm0 | ||
11813 | |||
11814 | # qhasm: uint6464 xmm0 >>= 2 | ||
11815 | # asm 1: psrlq $2,<xmm0=int6464#1 | ||
11816 | # asm 2: psrlq $2,<xmm0=%xmm0 | ||
11817 | psrlq $2,%xmm0 | ||
11818 | |||
11819 | # qhasm: xmm0 ^= xmm14 | ||
11820 | # asm 1: pxor <xmm14=int6464#15,<xmm0=int6464#1 | ||
11821 | # asm 2: pxor <xmm14=%xmm14,<xmm0=%xmm0 | ||
11822 | pxor %xmm14,%xmm0 | ||
11823 | |||
11824 | # qhasm: xmm0 &= BS1 | ||
11825 | # asm 1: pand BS1,<xmm0=int6464#1 | ||
11826 | # asm 2: pand BS1,<xmm0=%xmm0 | ||
11827 | pand BS1,%xmm0 | ||
11828 | |||
11829 | # qhasm: xmm14 ^= xmm0 | ||
11830 | # asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15 | ||
11831 | # asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14 | ||
11832 | pxor %xmm0,%xmm14 | ||
11833 | |||
11834 | # qhasm: uint6464 xmm0 <<= 2 | ||
11835 | # asm 1: psllq $2,<xmm0=int6464#1 | ||
11836 | # asm 2: psllq $2,<xmm0=%xmm0 | ||
11837 | psllq $2,%xmm0 | ||
11838 | |||
11839 | # qhasm: xmm9 ^= xmm0 | ||
11840 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
11841 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
11842 | pxor %xmm0,%xmm9 | ||
11843 | |||
11844 | # qhasm: xmm0 = xmm8 | ||
11845 | # asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1 | ||
11846 | # asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0 | ||
11847 | movdqa %xmm8,%xmm0 | ||
11848 | |||
11849 | # qhasm: uint6464 xmm0 >>= 2 | ||
11850 | # asm 1: psrlq $2,<xmm0=int6464#1 | ||
11851 | # asm 2: psrlq $2,<xmm0=%xmm0 | ||
11852 | psrlq $2,%xmm0 | ||
11853 | |||
11854 | # qhasm: xmm0 ^= xmm12 | ||
11855 | # asm 1: pxor <xmm12=int6464#13,<xmm0=int6464#1 | ||
11856 | # asm 2: pxor <xmm12=%xmm12,<xmm0=%xmm0 | ||
11857 | pxor %xmm12,%xmm0 | ||
11858 | |||
11859 | # qhasm: xmm0 &= BS1 | ||
11860 | # asm 1: pand BS1,<xmm0=int6464#1 | ||
11861 | # asm 2: pand BS1,<xmm0=%xmm0 | ||
11862 | pand BS1,%xmm0 | ||
11863 | |||
11864 | # qhasm: xmm12 ^= xmm0 | ||
11865 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
11866 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
11867 | pxor %xmm0,%xmm12 | ||
11868 | |||
11869 | # qhasm: uint6464 xmm0 <<= 2 | ||
11870 | # asm 1: psllq $2,<xmm0=int6464#1 | ||
11871 | # asm 2: psllq $2,<xmm0=%xmm0 | ||
11872 | psllq $2,%xmm0 | ||
11873 | |||
11874 | # qhasm: xmm8 ^= xmm0 | ||
11875 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
11876 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
11877 | pxor %xmm0,%xmm8 | ||
11878 | |||
11879 | # qhasm: xmm0 = xmm14 | ||
11880 | # asm 1: movdqa <xmm14=int6464#15,>xmm0=int6464#1 | ||
11881 | # asm 2: movdqa <xmm14=%xmm14,>xmm0=%xmm0 | ||
11882 | movdqa %xmm14,%xmm0 | ||
11883 | |||
11884 | # qhasm: uint6464 xmm0 >>= 4 | ||
11885 | # asm 1: psrlq $4,<xmm0=int6464#1 | ||
11886 | # asm 2: psrlq $4,<xmm0=%xmm0 | ||
11887 | psrlq $4,%xmm0 | ||
11888 | |||
11889 | # qhasm: xmm0 ^= xmm13 | ||
11890 | # asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1 | ||
11891 | # asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0 | ||
11892 | pxor %xmm13,%xmm0 | ||
11893 | |||
11894 | # qhasm: xmm0 &= BS2 | ||
11895 | # asm 1: pand BS2,<xmm0=int6464#1 | ||
11896 | # asm 2: pand BS2,<xmm0=%xmm0 | ||
11897 | pand BS2,%xmm0 | ||
11898 | |||
11899 | # qhasm: xmm13 ^= xmm0 | ||
11900 | # asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14 | ||
11901 | # asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13 | ||
11902 | pxor %xmm0,%xmm13 | ||
11903 | |||
11904 | # qhasm: uint6464 xmm0 <<= 4 | ||
11905 | # asm 1: psllq $4,<xmm0=int6464#1 | ||
11906 | # asm 2: psllq $4,<xmm0=%xmm0 | ||
11907 | psllq $4,%xmm0 | ||
11908 | |||
11909 | # qhasm: xmm14 ^= xmm0 | ||
11910 | # asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15 | ||
11911 | # asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14 | ||
11912 | pxor %xmm0,%xmm14 | ||
11913 | |||
11914 | # qhasm: xmm0 = xmm12 | ||
11915 | # asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1 | ||
11916 | # asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0 | ||
11917 | movdqa %xmm12,%xmm0 | ||
11918 | |||
11919 | # qhasm: uint6464 xmm0 >>= 4 | ||
11920 | # asm 1: psrlq $4,<xmm0=int6464#1 | ||
11921 | # asm 2: psrlq $4,<xmm0=%xmm0 | ||
11922 | psrlq $4,%xmm0 | ||
11923 | |||
11924 | # qhasm: xmm0 ^= xmm10 | ||
11925 | # asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#1 | ||
11926 | # asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm0 | ||
11927 | pxor %xmm10,%xmm0 | ||
11928 | |||
11929 | # qhasm: xmm0 &= BS2 | ||
11930 | # asm 1: pand BS2,<xmm0=int6464#1 | ||
11931 | # asm 2: pand BS2,<xmm0=%xmm0 | ||
11932 | pand BS2,%xmm0 | ||
11933 | |||
11934 | # qhasm: xmm10 ^= xmm0 | ||
11935 | # asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11 | ||
11936 | # asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10 | ||
11937 | pxor %xmm0,%xmm10 | ||
11938 | |||
11939 | # qhasm: uint6464 xmm0 <<= 4 | ||
11940 | # asm 1: psllq $4,<xmm0=int6464#1 | ||
11941 | # asm 2: psllq $4,<xmm0=%xmm0 | ||
11942 | psllq $4,%xmm0 | ||
11943 | |||
11944 | # qhasm: xmm12 ^= xmm0 | ||
11945 | # asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13 | ||
11946 | # asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12 | ||
11947 | pxor %xmm0,%xmm12 | ||
11948 | |||
11949 | # qhasm: xmm0 = xmm9 | ||
11950 | # asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1 | ||
11951 | # asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0 | ||
11952 | movdqa %xmm9,%xmm0 | ||
11953 | |||
11954 | # qhasm: uint6464 xmm0 >>= 4 | ||
11955 | # asm 1: psrlq $4,<xmm0=int6464#1 | ||
11956 | # asm 2: psrlq $4,<xmm0=%xmm0 | ||
11957 | psrlq $4,%xmm0 | ||
11958 | |||
11959 | # qhasm: xmm0 ^= xmm15 | ||
11960 | # asm 1: pxor <xmm15=int6464#16,<xmm0=int6464#1 | ||
11961 | # asm 2: pxor <xmm15=%xmm15,<xmm0=%xmm0 | ||
11962 | pxor %xmm15,%xmm0 | ||
11963 | |||
11964 | # qhasm: xmm0 &= BS2 | ||
11965 | # asm 1: pand BS2,<xmm0=int6464#1 | ||
11966 | # asm 2: pand BS2,<xmm0=%xmm0 | ||
11967 | pand BS2,%xmm0 | ||
11968 | |||
11969 | # qhasm: xmm15 ^= xmm0 | ||
11970 | # asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16 | ||
11971 | # asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15 | ||
11972 | pxor %xmm0,%xmm15 | ||
11973 | |||
11974 | # qhasm: uint6464 xmm0 <<= 4 | ||
11975 | # asm 1: psllq $4,<xmm0=int6464#1 | ||
11976 | # asm 2: psllq $4,<xmm0=%xmm0 | ||
11977 | psllq $4,%xmm0 | ||
11978 | |||
11979 | # qhasm: xmm9 ^= xmm0 | ||
11980 | # asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10 | ||
11981 | # asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9 | ||
11982 | pxor %xmm0,%xmm9 | ||
11983 | |||
11984 | # qhasm: xmm0 = xmm8 | ||
11985 | # asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1 | ||
11986 | # asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0 | ||
11987 | movdqa %xmm8,%xmm0 | ||
11988 | |||
11989 | # qhasm: uint6464 xmm0 >>= 4 | ||
11990 | # asm 1: psrlq $4,<xmm0=int6464#1 | ||
11991 | # asm 2: psrlq $4,<xmm0=%xmm0 | ||
11992 | psrlq $4,%xmm0 | ||
11993 | |||
11994 | # qhasm: xmm0 ^= xmm11 | ||
11995 | # asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#1 | ||
11996 | # asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm0 | ||
11997 | pxor %xmm11,%xmm0 | ||
11998 | |||
11999 | # qhasm: xmm0 &= BS2 | ||
12000 | # asm 1: pand BS2,<xmm0=int6464#1 | ||
12001 | # asm 2: pand BS2,<xmm0=%xmm0 | ||
12002 | pand BS2,%xmm0 | ||
12003 | |||
12004 | # qhasm: xmm11 ^= xmm0 | ||
12005 | # asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12 | ||
12006 | # asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11 | ||
12007 | pxor %xmm0,%xmm11 | ||
12008 | |||
12009 | # qhasm: uint6464 xmm0 <<= 4 | ||
12010 | # asm 1: psllq $4,<xmm0=int6464#1 | ||
12011 | # asm 2: psllq $4,<xmm0=%xmm0 | ||
12012 | psllq $4,%xmm0 | ||
12013 | |||
12014 | # qhasm: xmm8 ^= xmm0 | ||
12015 | # asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9 | ||
12016 | # asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8 | ||
12017 | pxor %xmm0,%xmm8 | ||
12018 | |||
12019 | # qhasm: unsigned<? =? len-128 | ||
12020 | # asm 1: cmp $128,<len=int64#3 | ||
12021 | # asm 2: cmp $128,<len=%rdx | ||
12022 | cmp $128,%rdx | ||
12023 | # comment:fp stack unchanged by jump | ||
12024 | |||
12025 | # qhasm: goto partial if unsigned< | ||
12026 | jb ._partial | ||
12027 | # comment:fp stack unchanged by jump | ||
12028 | |||
12029 | # qhasm: goto full if = | ||
12030 | je ._full | ||
12031 | |||
12032 | # qhasm: tmp = *(uint32 *)(np + 12) | ||
12033 | # asm 1: movl 12(<np=int64#4),>tmp=int64#6d | ||
12034 | # asm 2: movl 12(<np=%rcx),>tmp=%r9d | ||
12035 | movl 12(%rcx),%r9d | ||
12036 | |||
12037 | # qhasm: (uint32) bswap tmp | ||
12038 | # asm 1: bswap <tmp=int64#6d | ||
12039 | # asm 2: bswap <tmp=%r9d | ||
12040 | bswap %r9d | ||
12041 | |||
12042 | # qhasm: tmp += 8 | ||
12043 | # asm 1: add $8,<tmp=int64#6 | ||
12044 | # asm 2: add $8,<tmp=%r9 | ||
12045 | add $8,%r9 | ||
12046 | |||
12047 | # qhasm: (uint32) bswap tmp | ||
12048 | # asm 1: bswap <tmp=int64#6d | ||
12049 | # asm 2: bswap <tmp=%r9d | ||
12050 | bswap %r9d | ||
12051 | |||
12052 | # qhasm: *(uint32 *)(np + 12) = tmp | ||
12053 | # asm 1: movl <tmp=int64#6d,12(<np=int64#4) | ||
12054 | # asm 2: movl <tmp=%r9d,12(<np=%rcx) | ||
12055 | movl %r9d,12(%rcx) | ||
12056 | |||
12057 | # qhasm: xmm8 ^= *(int128 *)(inp + 0) | ||
12058 | # asm 1: pxor 0(<inp=int64#2),<xmm8=int6464#9 | ||
12059 | # asm 2: pxor 0(<inp=%rsi),<xmm8=%xmm8 | ||
12060 | pxor 0(%rsi),%xmm8 | ||
12061 | |||
12062 | # qhasm: xmm9 ^= *(int128 *)(inp + 16) | ||
12063 | # asm 1: pxor 16(<inp=int64#2),<xmm9=int6464#10 | ||
12064 | # asm 2: pxor 16(<inp=%rsi),<xmm9=%xmm9 | ||
12065 | pxor 16(%rsi),%xmm9 | ||
12066 | |||
12067 | # qhasm: xmm12 ^= *(int128 *)(inp + 32) | ||
12068 | # asm 1: pxor 32(<inp=int64#2),<xmm12=int6464#13 | ||
12069 | # asm 2: pxor 32(<inp=%rsi),<xmm12=%xmm12 | ||
12070 | pxor 32(%rsi),%xmm12 | ||
12071 | |||
12072 | # qhasm: xmm14 ^= *(int128 *)(inp + 48) | ||
12073 | # asm 1: pxor 48(<inp=int64#2),<xmm14=int6464#15 | ||
12074 | # asm 2: pxor 48(<inp=%rsi),<xmm14=%xmm14 | ||
12075 | pxor 48(%rsi),%xmm14 | ||
12076 | |||
12077 | # qhasm: xmm11 ^= *(int128 *)(inp + 64) | ||
12078 | # asm 1: pxor 64(<inp=int64#2),<xmm11=int6464#12 | ||
12079 | # asm 2: pxor 64(<inp=%rsi),<xmm11=%xmm11 | ||
12080 | pxor 64(%rsi),%xmm11 | ||
12081 | |||
12082 | # qhasm: xmm15 ^= *(int128 *)(inp + 80) | ||
12083 | # asm 1: pxor 80(<inp=int64#2),<xmm15=int6464#16 | ||
12084 | # asm 2: pxor 80(<inp=%rsi),<xmm15=%xmm15 | ||
12085 | pxor 80(%rsi),%xmm15 | ||
12086 | |||
12087 | # qhasm: xmm10 ^= *(int128 *)(inp + 96) | ||
12088 | # asm 1: pxor 96(<inp=int64#2),<xmm10=int6464#11 | ||
12089 | # asm 2: pxor 96(<inp=%rsi),<xmm10=%xmm10 | ||
12090 | pxor 96(%rsi),%xmm10 | ||
12091 | |||
12092 | # qhasm: xmm13 ^= *(int128 *)(inp + 112) | ||
12093 | # asm 1: pxor 112(<inp=int64#2),<xmm13=int6464#14 | ||
12094 | # asm 2: pxor 112(<inp=%rsi),<xmm13=%xmm13 | ||
12095 | pxor 112(%rsi),%xmm13 | ||
12096 | |||
12097 | # qhasm: *(int128 *) (outp + 0) = xmm8 | ||
12098 | # asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1) | ||
12099 | # asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi) | ||
12100 | movdqa %xmm8,0(%rdi) | ||
12101 | |||
12102 | # qhasm: *(int128 *) (outp + 16) = xmm9 | ||
12103 | # asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1) | ||
12104 | # asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi) | ||
12105 | movdqa %xmm9,16(%rdi) | ||
12106 | |||
12107 | # qhasm: *(int128 *) (outp + 32) = xmm12 | ||
12108 | # asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1) | ||
12109 | # asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi) | ||
12110 | movdqa %xmm12,32(%rdi) | ||
12111 | |||
12112 | # qhasm: *(int128 *) (outp + 48) = xmm14 | ||
12113 | # asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1) | ||
12114 | # asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi) | ||
12115 | movdqa %xmm14,48(%rdi) | ||
12116 | |||
12117 | # qhasm: *(int128 *) (outp + 64) = xmm11 | ||
12118 | # asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1) | ||
12119 | # asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi) | ||
12120 | movdqa %xmm11,64(%rdi) | ||
12121 | |||
12122 | # qhasm: *(int128 *) (outp + 80) = xmm15 | ||
12123 | # asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1) | ||
12124 | # asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi) | ||
12125 | movdqa %xmm15,80(%rdi) | ||
12126 | |||
12127 | # qhasm: *(int128 *) (outp + 96) = xmm10 | ||
12128 | # asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1) | ||
12129 | # asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi) | ||
12130 | movdqa %xmm10,96(%rdi) | ||
12131 | |||
12132 | # qhasm: *(int128 *) (outp + 112) = xmm13 | ||
12133 | # asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1) | ||
12134 | # asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi) | ||
12135 | movdqa %xmm13,112(%rdi) | ||
12136 | |||
12137 | # qhasm: len -= 128 | ||
12138 | # asm 1: sub $128,<len=int64#3 | ||
12139 | # asm 2: sub $128,<len=%rdx | ||
12140 | sub $128,%rdx | ||
12141 | |||
12142 | # qhasm: inp += 128 | ||
12143 | # asm 1: add $128,<inp=int64#2 | ||
12144 | # asm 2: add $128,<inp=%rsi | ||
12145 | add $128,%rsi | ||
12146 | |||
12147 | # qhasm: outp += 128 | ||
12148 | # asm 1: add $128,<outp=int64#1 | ||
12149 | # asm 2: add $128,<outp=%rdi | ||
12150 | add $128,%rdi | ||
12151 | # comment:fp stack unchanged by jump | ||
12152 | |||
12153 | # qhasm: goto enc_block | ||
12154 | jmp ._enc_block | ||
12155 | |||
12156 | # qhasm: partial: | ||
12157 | ._partial: | ||
12158 | |||
12159 | # qhasm: lensav = len | ||
12160 | # asm 1: mov <len=int64#3,>lensav=int64#5 | ||
12161 | # asm 2: mov <len=%rdx,>lensav=%r8 | ||
12162 | mov %rdx,%r8 | ||
12163 | |||
12164 | # qhasm: (uint32) len >>= 4 | ||
12165 | # asm 1: shr $4,<len=int64#3d | ||
12166 | # asm 2: shr $4,<len=%edx | ||
12167 | shr $4,%edx | ||
12168 | |||
12169 | # qhasm: tmp = *(uint32 *)(np + 12) | ||
12170 | # asm 1: movl 12(<np=int64#4),>tmp=int64#6d | ||
12171 | # asm 2: movl 12(<np=%rcx),>tmp=%r9d | ||
12172 | movl 12(%rcx),%r9d | ||
12173 | |||
12174 | # qhasm: (uint32) bswap tmp | ||
12175 | # asm 1: bswap <tmp=int64#6d | ||
12176 | # asm 2: bswap <tmp=%r9d | ||
12177 | bswap %r9d | ||
12178 | |||
12179 | # qhasm: tmp += len | ||
12180 | # asm 1: add <len=int64#3,<tmp=int64#6 | ||
12181 | # asm 2: add <len=%rdx,<tmp=%r9 | ||
12182 | add %rdx,%r9 | ||
12183 | |||
12184 | # qhasm: (uint32) bswap tmp | ||
12185 | # asm 1: bswap <tmp=int64#6d | ||
12186 | # asm 2: bswap <tmp=%r9d | ||
12187 | bswap %r9d | ||
12188 | |||
12189 | # qhasm: *(uint32 *)(np + 12) = tmp | ||
12190 | # asm 1: movl <tmp=int64#6d,12(<np=int64#4) | ||
12191 | # asm 2: movl <tmp=%r9d,12(<np=%rcx) | ||
12192 | movl %r9d,12(%rcx) | ||
12193 | |||
12194 | # qhasm: blp = &bl | ||
12195 | # asm 1: leaq <bl=stack1024#1,>blp=int64#3 | ||
12196 | # asm 2: leaq <bl=32(%rsp),>blp=%rdx | ||
12197 | leaq 32(%rsp),%rdx | ||
12198 | |||
12199 | # qhasm: *(int128 *)(blp + 0) = xmm8 | ||
12200 | # asm 1: movdqa <xmm8=int6464#9,0(<blp=int64#3) | ||
12201 | # asm 2: movdqa <xmm8=%xmm8,0(<blp=%rdx) | ||
12202 | movdqa %xmm8,0(%rdx) | ||
12203 | |||
12204 | # qhasm: *(int128 *)(blp + 16) = xmm9 | ||
12205 | # asm 1: movdqa <xmm9=int6464#10,16(<blp=int64#3) | ||
12206 | # asm 2: movdqa <xmm9=%xmm9,16(<blp=%rdx) | ||
12207 | movdqa %xmm9,16(%rdx) | ||
12208 | |||
12209 | # qhasm: *(int128 *)(blp + 32) = xmm12 | ||
12210 | # asm 1: movdqa <xmm12=int6464#13,32(<blp=int64#3) | ||
12211 | # asm 2: movdqa <xmm12=%xmm12,32(<blp=%rdx) | ||
12212 | movdqa %xmm12,32(%rdx) | ||
12213 | |||
12214 | # qhasm: *(int128 *)(blp + 48) = xmm14 | ||
12215 | # asm 1: movdqa <xmm14=int6464#15,48(<blp=int64#3) | ||
12216 | # asm 2: movdqa <xmm14=%xmm14,48(<blp=%rdx) | ||
12217 | movdqa %xmm14,48(%rdx) | ||
12218 | |||
12219 | # qhasm: *(int128 *)(blp + 64) = xmm11 | ||
12220 | # asm 1: movdqa <xmm11=int6464#12,64(<blp=int64#3) | ||
12221 | # asm 2: movdqa <xmm11=%xmm11,64(<blp=%rdx) | ||
12222 | movdqa %xmm11,64(%rdx) | ||
12223 | |||
12224 | # qhasm: *(int128 *)(blp + 80) = xmm15 | ||
12225 | # asm 1: movdqa <xmm15=int6464#16,80(<blp=int64#3) | ||
12226 | # asm 2: movdqa <xmm15=%xmm15,80(<blp=%rdx) | ||
12227 | movdqa %xmm15,80(%rdx) | ||
12228 | |||
12229 | # qhasm: *(int128 *)(blp + 96) = xmm10 | ||
12230 | # asm 1: movdqa <xmm10=int6464#11,96(<blp=int64#3) | ||
12231 | # asm 2: movdqa <xmm10=%xmm10,96(<blp=%rdx) | ||
12232 | movdqa %xmm10,96(%rdx) | ||
12233 | |||
12234 | # qhasm: *(int128 *)(blp + 112) = xmm13 | ||
12235 | # asm 1: movdqa <xmm13=int6464#14,112(<blp=int64#3) | ||
12236 | # asm 2: movdqa <xmm13=%xmm13,112(<blp=%rdx) | ||
12237 | movdqa %xmm13,112(%rdx) | ||
12238 | |||
12239 | # qhasm: bytes: | ||
12240 | ._bytes: | ||
12241 | |||
12242 | # qhasm: =? lensav-0 | ||
12243 | # asm 1: cmp $0,<lensav=int64#5 | ||
12244 | # asm 2: cmp $0,<lensav=%r8 | ||
12245 | cmp $0,%r8 | ||
12246 | # comment:fp stack unchanged by jump | ||
12247 | |||
12248 | # qhasm: goto end if = | ||
12249 | je ._end | ||
12250 | |||
12251 | # qhasm: b = *(uint8 *)(blp + 0) | ||
12252 | # asm 1: movzbq 0(<blp=int64#3),>b=int64#4 | ||
12253 | # asm 2: movzbq 0(<blp=%rdx),>b=%rcx | ||
12254 | movzbq 0(%rdx),%rcx | ||
12255 | |||
12256 | # qhasm: (uint8) b ^= *(uint8 *)(inp + 0) | ||
12257 | # asm 1: xorb 0(<inp=int64#2),<b=int64#4b | ||
12258 | # asm 2: xorb 0(<inp=%rsi),<b=%cl | ||
12259 | xorb 0(%rsi),%cl | ||
12260 | |||
12261 | # qhasm: *(uint8 *)(outp + 0) = b | ||
12262 | # asm 1: movb <b=int64#4b,0(<outp=int64#1) | ||
12263 | # asm 2: movb <b=%cl,0(<outp=%rdi) | ||
12264 | movb %cl,0(%rdi) | ||
12265 | |||
12266 | # qhasm: blp += 1 | ||
12267 | # asm 1: add $1,<blp=int64#3 | ||
12268 | # asm 2: add $1,<blp=%rdx | ||
12269 | add $1,%rdx | ||
12270 | |||
12271 | # qhasm: inp +=1 | ||
12272 | # asm 1: add $1,<inp=int64#2 | ||
12273 | # asm 2: add $1,<inp=%rsi | ||
12274 | add $1,%rsi | ||
12275 | |||
12276 | # qhasm: outp +=1 | ||
12277 | # asm 1: add $1,<outp=int64#1 | ||
12278 | # asm 2: add $1,<outp=%rdi | ||
12279 | add $1,%rdi | ||
12280 | |||
12281 | # qhasm: lensav -= 1 | ||
12282 | # asm 1: sub $1,<lensav=int64#5 | ||
12283 | # asm 2: sub $1,<lensav=%r8 | ||
12284 | sub $1,%r8 | ||
12285 | # comment:fp stack unchanged by jump | ||
12286 | |||
12287 | # qhasm: goto bytes | ||
12288 | jmp ._bytes | ||
12289 | |||
12290 | # qhasm: full: | ||
12291 | ._full: | ||
12292 | |||
12293 | # qhasm: tmp = *(uint32 *)(np + 12) | ||
12294 | # asm 1: movl 12(<np=int64#4),>tmp=int64#3d | ||
12295 | # asm 2: movl 12(<np=%rcx),>tmp=%edx | ||
12296 | movl 12(%rcx),%edx | ||
12297 | |||
12298 | # qhasm: (uint32) bswap tmp | ||
12299 | # asm 1: bswap <tmp=int64#3d | ||
12300 | # asm 2: bswap <tmp=%edx | ||
12301 | bswap %edx | ||
12302 | |||
12303 | # qhasm: tmp += 8 | ||
12304 | # asm 1: add $8,<tmp=int64#3 | ||
12305 | # asm 2: add $8,<tmp=%rdx | ||
12306 | add $8,%rdx | ||
12307 | |||
12308 | # qhasm: (uint32) bswap tmp | ||
12309 | # asm 1: bswap <tmp=int64#3d | ||
12310 | # asm 2: bswap <tmp=%edx | ||
12311 | bswap %edx | ||
12312 | |||
12313 | # qhasm: *(uint32 *)(np + 12) = tmp | ||
12314 | # asm 1: movl <tmp=int64#3d,12(<np=int64#4) | ||
12315 | # asm 2: movl <tmp=%edx,12(<np=%rcx) | ||
12316 | movl %edx,12(%rcx) | ||
12317 | |||
12318 | # qhasm: xmm8 ^= *(int128 *)(inp + 0) | ||
12319 | # asm 1: pxor 0(<inp=int64#2),<xmm8=int6464#9 | ||
12320 | # asm 2: pxor 0(<inp=%rsi),<xmm8=%xmm8 | ||
12321 | pxor 0(%rsi),%xmm8 | ||
12322 | |||
12323 | # qhasm: xmm9 ^= *(int128 *)(inp + 16) | ||
12324 | # asm 1: pxor 16(<inp=int64#2),<xmm9=int6464#10 | ||
12325 | # asm 2: pxor 16(<inp=%rsi),<xmm9=%xmm9 | ||
12326 | pxor 16(%rsi),%xmm9 | ||
12327 | |||
12328 | # qhasm: xmm12 ^= *(int128 *)(inp + 32) | ||
12329 | # asm 1: pxor 32(<inp=int64#2),<xmm12=int6464#13 | ||
12330 | # asm 2: pxor 32(<inp=%rsi),<xmm12=%xmm12 | ||
12331 | pxor 32(%rsi),%xmm12 | ||
12332 | |||
12333 | # qhasm: xmm14 ^= *(int128 *)(inp + 48) | ||
12334 | # asm 1: pxor 48(<inp=int64#2),<xmm14=int6464#15 | ||
12335 | # asm 2: pxor 48(<inp=%rsi),<xmm14=%xmm14 | ||
12336 | pxor 48(%rsi),%xmm14 | ||
12337 | |||
12338 | # qhasm: xmm11 ^= *(int128 *)(inp + 64) | ||
12339 | # asm 1: pxor 64(<inp=int64#2),<xmm11=int6464#12 | ||
12340 | # asm 2: pxor 64(<inp=%rsi),<xmm11=%xmm11 | ||
12341 | pxor 64(%rsi),%xmm11 | ||
12342 | |||
12343 | # qhasm: xmm15 ^= *(int128 *)(inp + 80) | ||
12344 | # asm 1: pxor 80(<inp=int64#2),<xmm15=int6464#16 | ||
12345 | # asm 2: pxor 80(<inp=%rsi),<xmm15=%xmm15 | ||
12346 | pxor 80(%rsi),%xmm15 | ||
12347 | |||
12348 | # qhasm: xmm10 ^= *(int128 *)(inp + 96) | ||
12349 | # asm 1: pxor 96(<inp=int64#2),<xmm10=int6464#11 | ||
12350 | # asm 2: pxor 96(<inp=%rsi),<xmm10=%xmm10 | ||
12351 | pxor 96(%rsi),%xmm10 | ||
12352 | |||
12353 | # qhasm: xmm13 ^= *(int128 *)(inp + 112) | ||
12354 | # asm 1: pxor 112(<inp=int64#2),<xmm13=int6464#14 | ||
12355 | # asm 2: pxor 112(<inp=%rsi),<xmm13=%xmm13 | ||
12356 | pxor 112(%rsi),%xmm13 | ||
12357 | |||
12358 | # qhasm: *(int128 *) (outp + 0) = xmm8 | ||
12359 | # asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1) | ||
12360 | # asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi) | ||
12361 | movdqa %xmm8,0(%rdi) | ||
12362 | |||
12363 | # qhasm: *(int128 *) (outp + 16) = xmm9 | ||
12364 | # asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1) | ||
12365 | # asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi) | ||
12366 | movdqa %xmm9,16(%rdi) | ||
12367 | |||
12368 | # qhasm: *(int128 *) (outp + 32) = xmm12 | ||
12369 | # asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1) | ||
12370 | # asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi) | ||
12371 | movdqa %xmm12,32(%rdi) | ||
12372 | |||
12373 | # qhasm: *(int128 *) (outp + 48) = xmm14 | ||
12374 | # asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1) | ||
12375 | # asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi) | ||
12376 | movdqa %xmm14,48(%rdi) | ||
12377 | |||
12378 | # qhasm: *(int128 *) (outp + 64) = xmm11 | ||
12379 | # asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1) | ||
12380 | # asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi) | ||
12381 | movdqa %xmm11,64(%rdi) | ||
12382 | |||
12383 | # qhasm: *(int128 *) (outp + 80) = xmm15 | ||
12384 | # asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1) | ||
12385 | # asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi) | ||
12386 | movdqa %xmm15,80(%rdi) | ||
12387 | |||
12388 | # qhasm: *(int128 *) (outp + 96) = xmm10 | ||
12389 | # asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1) | ||
12390 | # asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi) | ||
12391 | movdqa %xmm10,96(%rdi) | ||
12392 | |||
12393 | # qhasm: *(int128 *) (outp + 112) = xmm13 | ||
12394 | # asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1) | ||
12395 | # asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi) | ||
12396 | movdqa %xmm13,112(%rdi) | ||
12397 | # comment:fp stack unchanged by fallthrough | ||
12398 | |||
12399 | # qhasm: end: | ||
12400 | ._end: | ||
12401 | |||
12402 | # qhasm: leave | ||
12403 | add %r11,%rsp | ||
12404 | mov %rdi,%rax | ||
12405 | mov %rsi,%rdx | ||
12406 | xor %rax,%rax | ||
12407 | ret | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/afternm.c b/nacl/crypto_stream/aes128ctr/portable/afternm.c new file mode 100644 index 00000000..93c96e42 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/afternm.c | |||
@@ -0,0 +1,158 @@ | |||
1 | /* Author: Peter Schwabe, ported from an assembly implementation by Emilia Käsper | ||
2 | * Date: 2009-03-19 | ||
3 | * Public domain */ | ||
4 | |||
5 | #include "int128.h" | ||
6 | #include "common.h" | ||
7 | #include "consts.h" | ||
8 | #include "crypto_stream.h" | ||
9 | |||
10 | int crypto_stream_afternm(unsigned char *outp, unsigned long long len, const unsigned char *noncep, const unsigned char *c) | ||
11 | { | ||
12 | |||
13 | int128 xmm0; | ||
14 | int128 xmm1; | ||
15 | int128 xmm2; | ||
16 | int128 xmm3; | ||
17 | int128 xmm4; | ||
18 | int128 xmm5; | ||
19 | int128 xmm6; | ||
20 | int128 xmm7; | ||
21 | |||
22 | int128 xmm8; | ||
23 | int128 xmm9; | ||
24 | int128 xmm10; | ||
25 | int128 xmm11; | ||
26 | int128 xmm12; | ||
27 | int128 xmm13; | ||
28 | int128 xmm14; | ||
29 | int128 xmm15; | ||
30 | |||
31 | int128 nonce_stack; | ||
32 | unsigned long long lensav; | ||
33 | unsigned char bl[128]; | ||
34 | unsigned char *blp; | ||
35 | unsigned char b; | ||
36 | |||
37 | uint32 tmp; | ||
38 | |||
39 | /* Copy nonce on the stack */ | ||
40 | copy2(&nonce_stack, (int128 *) (noncep + 0)); | ||
41 | unsigned char *np = (unsigned char *)&nonce_stack; | ||
42 | |||
43 | enc_block: | ||
44 | |||
45 | xmm0 = *(int128 *) (np + 0); | ||
46 | copy2(&xmm1, &xmm0); | ||
47 | shufb(&xmm1, SWAP32); | ||
48 | copy2(&xmm2, &xmm1); | ||
49 | copy2(&xmm3, &xmm1); | ||
50 | copy2(&xmm4, &xmm1); | ||
51 | copy2(&xmm5, &xmm1); | ||
52 | copy2(&xmm6, &xmm1); | ||
53 | copy2(&xmm7, &xmm1); | ||
54 | |||
55 | add_uint32_big(&xmm1, 1); | ||
56 | add_uint32_big(&xmm2, 2); | ||
57 | add_uint32_big(&xmm3, 3); | ||
58 | add_uint32_big(&xmm4, 4); | ||
59 | add_uint32_big(&xmm5, 5); | ||
60 | add_uint32_big(&xmm6, 6); | ||
61 | add_uint32_big(&xmm7, 7); | ||
62 | |||
63 | shufb(&xmm0, M0); | ||
64 | shufb(&xmm1, M0SWAP); | ||
65 | shufb(&xmm2, M0SWAP); | ||
66 | shufb(&xmm3, M0SWAP); | ||
67 | shufb(&xmm4, M0SWAP); | ||
68 | shufb(&xmm5, M0SWAP); | ||
69 | shufb(&xmm6, M0SWAP); | ||
70 | shufb(&xmm7, M0SWAP); | ||
71 | |||
72 | bitslice(xmm7, xmm6, xmm5, xmm4, xmm3, xmm2, xmm1, xmm0, xmm8) | ||
73 | |||
74 | aesround( 1, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
75 | aesround( 2, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7,c) | ||
76 | aesround( 3, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
77 | aesround( 4, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7,c) | ||
78 | aesround( 5, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
79 | aesround( 6, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7,c) | ||
80 | aesround( 7, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
81 | aesround( 8, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7,c) | ||
82 | aesround( 9, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
83 | lastround(xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7,c) | ||
84 | |||
85 | bitslice(xmm13, xmm10, xmm15, xmm11, xmm14, xmm12, xmm9, xmm8, xmm0) | ||
86 | |||
87 | if(len < 128) goto partial; | ||
88 | if(len == 128) goto full; | ||
89 | |||
90 | tmp = load32_bigendian(np + 12); | ||
91 | tmp += 8; | ||
92 | store32_bigendian(np + 12, tmp); | ||
93 | |||
94 | *(int128 *) (outp + 0) = xmm8; | ||
95 | *(int128 *) (outp + 16) = xmm9; | ||
96 | *(int128 *) (outp + 32) = xmm12; | ||
97 | *(int128 *) (outp + 48) = xmm14; | ||
98 | *(int128 *) (outp + 64) = xmm11; | ||
99 | *(int128 *) (outp + 80) = xmm15; | ||
100 | *(int128 *) (outp + 96) = xmm10; | ||
101 | *(int128 *) (outp + 112) = xmm13; | ||
102 | |||
103 | len -= 128; | ||
104 | outp += 128; | ||
105 | |||
106 | goto enc_block; | ||
107 | |||
108 | partial: | ||
109 | |||
110 | lensav = len; | ||
111 | len >>= 4; | ||
112 | |||
113 | tmp = load32_bigendian(np + 12); | ||
114 | tmp += len; | ||
115 | store32_bigendian(np + 12, tmp); | ||
116 | |||
117 | blp = bl; | ||
118 | *(int128 *)(blp + 0) = xmm8; | ||
119 | *(int128 *)(blp + 16) = xmm9; | ||
120 | *(int128 *)(blp + 32) = xmm12; | ||
121 | *(int128 *)(blp + 48) = xmm14; | ||
122 | *(int128 *)(blp + 64) = xmm11; | ||
123 | *(int128 *)(blp + 80) = xmm15; | ||
124 | *(int128 *)(blp + 96) = xmm10; | ||
125 | *(int128 *)(blp + 112) = xmm13; | ||
126 | |||
127 | bytes: | ||
128 | |||
129 | if(lensav == 0) goto end; | ||
130 | |||
131 | b = blp[0]; | ||
132 | *(unsigned char *)(outp + 0) = b; | ||
133 | |||
134 | blp += 1; | ||
135 | outp +=1; | ||
136 | lensav -= 1; | ||
137 | |||
138 | goto bytes; | ||
139 | |||
140 | full: | ||
141 | |||
142 | tmp = load32_bigendian(np + 12); | ||
143 | tmp += 8; | ||
144 | store32_bigendian(np + 12, tmp); | ||
145 | |||
146 | *(int128 *) (outp + 0) = xmm8; | ||
147 | *(int128 *) (outp + 16) = xmm9; | ||
148 | *(int128 *) (outp + 32) = xmm12; | ||
149 | *(int128 *) (outp + 48) = xmm14; | ||
150 | *(int128 *) (outp + 64) = xmm11; | ||
151 | *(int128 *) (outp + 80) = xmm15; | ||
152 | *(int128 *) (outp + 96) = xmm10; | ||
153 | *(int128 *) (outp + 112) = xmm13; | ||
154 | |||
155 | end: | ||
156 | return 0; | ||
157 | |||
158 | } | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/api.h b/nacl/crypto_stream/aes128ctr/portable/api.h new file mode 100644 index 00000000..62fc8d88 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/api.h | |||
@@ -0,0 +1,3 @@ | |||
1 | #define CRYPTO_KEYBYTES 16 | ||
2 | #define CRYPTO_NONCEBYTES 16 | ||
3 | #define CRYPTO_BEFORENMBYTES 1408 | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/beforenm.c b/nacl/crypto_stream/aes128ctr/portable/beforenm.c new file mode 100644 index 00000000..8fa2673d --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/beforenm.c | |||
@@ -0,0 +1,59 @@ | |||
1 | /* Author: Peter Schwabe, ported from an assembly implementation by Emilia Käsper | ||
2 | * Date: 2009-03-19 | ||
3 | * Public domain */ | ||
4 | |||
5 | #include "consts.h" | ||
6 | #include "int128.h" | ||
7 | #include "common.h" | ||
8 | #include "crypto_stream.h" | ||
9 | |||
10 | int crypto_stream_beforenm(unsigned char *c, const unsigned char *k) | ||
11 | { | ||
12 | |||
13 | /* | ||
14 | int64 x0; | ||
15 | int64 x1; | ||
16 | int64 x2; | ||
17 | int64 x3; | ||
18 | int64 e; | ||
19 | int64 q0; | ||
20 | int64 q1; | ||
21 | int64 q2; | ||
22 | int64 q3; | ||
23 | */ | ||
24 | |||
25 | int128 xmm0; | ||
26 | int128 xmm1; | ||
27 | int128 xmm2; | ||
28 | int128 xmm3; | ||
29 | int128 xmm4; | ||
30 | int128 xmm5; | ||
31 | int128 xmm6; | ||
32 | int128 xmm7; | ||
33 | int128 xmm8; | ||
34 | int128 xmm9; | ||
35 | int128 xmm10; | ||
36 | int128 xmm11; | ||
37 | int128 xmm12; | ||
38 | int128 xmm13; | ||
39 | int128 xmm14; | ||
40 | int128 xmm15; | ||
41 | int128 t; | ||
42 | |||
43 | bitslicekey0(k, c) | ||
44 | |||
45 | keyexpbs1(xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
46 | keyexpbs(xmm0, xmm1, xmm4, xmm6, xmm3, xmm7, xmm2, xmm5, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xor_rcon(&xmm1);, 2,c) | ||
47 | keyexpbs(xmm0, xmm1, xmm3, xmm2, xmm6, xmm5, xmm4, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xor_rcon(&xmm6);, 3,c) | ||
48 | keyexpbs(xmm0, xmm1, xmm6, xmm4, xmm2, xmm7, xmm3, xmm5, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xor_rcon(&xmm3);, 4,c) | ||
49 | |||
50 | keyexpbs(xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xor_rcon(&xmm3);, 5,c) | ||
51 | keyexpbs(xmm0, xmm1, xmm4, xmm6, xmm3, xmm7, xmm2, xmm5, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xor_rcon(&xmm5);, 6,c) | ||
52 | keyexpbs(xmm0, xmm1, xmm3, xmm2, xmm6, xmm5, xmm4, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xor_rcon(&xmm3);, 7,c) | ||
53 | keyexpbs(xmm0, xmm1, xmm6, xmm4, xmm2, xmm7, xmm3, xmm5, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xor_rcon(&xmm7);, 8,c) | ||
54 | |||
55 | keyexpbs(xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xor_rcon(&xmm0); xor_rcon(&xmm1); xor_rcon(&xmm6); xor_rcon(&xmm3);, 9,c) | ||
56 | keyexpbs10(xmm0, xmm1, xmm4, xmm6, xmm3, xmm7, xmm2, xmm5, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
57 | |||
58 | return 0; | ||
59 | } | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/common.c b/nacl/crypto_stream/aes128ctr/portable/common.c new file mode 100644 index 00000000..14a28cc6 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/common.c | |||
@@ -0,0 +1,64 @@ | |||
1 | #include "common.h" | ||
2 | |||
3 | uint32 load32_bigendian(const unsigned char *x) | ||
4 | { | ||
5 | return | ||
6 | (uint32) (x[3]) \ | ||
7 | | (((uint32) (x[2])) << 8) \ | ||
8 | | (((uint32) (x[1])) << 16) \ | ||
9 | | (((uint32) (x[0])) << 24) | ||
10 | ; | ||
11 | } | ||
12 | |||
13 | void store32_bigendian(unsigned char *x,uint32 u) | ||
14 | { | ||
15 | x[3] = u; u >>= 8; | ||
16 | x[2] = u; u >>= 8; | ||
17 | x[1] = u; u >>= 8; | ||
18 | x[0] = u; | ||
19 | } | ||
20 | |||
21 | uint32 load32_littleendian(const unsigned char *x) | ||
22 | { | ||
23 | return | ||
24 | (uint32) (x[0]) \ | ||
25 | | (((uint32) (x[1])) << 8) \ | ||
26 | | (((uint32) (x[2])) << 16) \ | ||
27 | | (((uint32) (x[3])) << 24) | ||
28 | ; | ||
29 | } | ||
30 | |||
31 | void store32_littleendian(unsigned char *x,uint32 u) | ||
32 | { | ||
33 | x[0] = u; u >>= 8; | ||
34 | x[1] = u; u >>= 8; | ||
35 | x[2] = u; u >>= 8; | ||
36 | x[3] = u; | ||
37 | } | ||
38 | |||
39 | |||
40 | uint64 load64_littleendian(const unsigned char *x) | ||
41 | { | ||
42 | return | ||
43 | (uint64) (x[0]) \ | ||
44 | | (((uint64) (x[1])) << 8) \ | ||
45 | | (((uint64) (x[2])) << 16) \ | ||
46 | | (((uint64) (x[3])) << 24) | ||
47 | | (((uint64) (x[4])) << 32) | ||
48 | | (((uint64) (x[5])) << 40) | ||
49 | | (((uint64) (x[6])) << 48) | ||
50 | | (((uint64) (x[7])) << 56) | ||
51 | ; | ||
52 | } | ||
53 | |||
54 | void store64_littleendian(unsigned char *x,uint64 u) | ||
55 | { | ||
56 | x[0] = u; u >>= 8; | ||
57 | x[1] = u; u >>= 8; | ||
58 | x[2] = u; u >>= 8; | ||
59 | x[3] = u; u >>= 8; | ||
60 | x[4] = u; u >>= 8; | ||
61 | x[5] = u; u >>= 8; | ||
62 | x[6] = u; u >>= 8; | ||
63 | x[7] = u; | ||
64 | } | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/common.h b/nacl/crypto_stream/aes128ctr/portable/common.h new file mode 100644 index 00000000..0f723332 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/common.h | |||
@@ -0,0 +1,788 @@ | |||
1 | /* Author: Peter Schwabe, ported from an assembly implementation by Emilia Käsper | ||
2 | Date: 2009-03-19 | ||
3 | Public domain */ | ||
4 | #ifndef COMMON_H | ||
5 | #define COMMON_H | ||
6 | |||
7 | #include "types.h" | ||
8 | |||
9 | #define load32_bigendian crypto_stream_aes128ctr_portable_load32_bigendian | ||
10 | uint32 load32_bigendian(const unsigned char *x); | ||
11 | |||
12 | #define store32_bigendian crypto_stream_aes128ctr_portable_store32_bigendian | ||
13 | void store32_bigendian(unsigned char *x,uint32 u); | ||
14 | |||
15 | #define load32_littleendian crypto_stream_aes128ctr_portable_load32_littleendian | ||
16 | uint32 load32_littleendian(const unsigned char *x); | ||
17 | |||
18 | #define store32_littleendian crypto_stream_aes128ctr_portable_store32_littleendian | ||
19 | void store32_littleendian(unsigned char *x,uint32 u); | ||
20 | |||
21 | #define load64_littleendian crypto_stream_aes128ctr_portable_load64_littleendian | ||
22 | uint64 load64_littleendian(const unsigned char *x); | ||
23 | |||
24 | #define store64_littleendian crypto_stream_aes128ctr_portable_store64_littleendian | ||
25 | void store64_littleendian(unsigned char *x,uint64 u); | ||
26 | |||
27 | /* Macros required only for key expansion */ | ||
28 | |||
29 | #define keyexpbs1(b0, b1, b2, b3, b4, b5, b6, b7, t0, t1, t2, t3, t4, t5, t6, t7, bskey) \ | ||
30 | rotbyte(&b0);\ | ||
31 | rotbyte(&b1);\ | ||
32 | rotbyte(&b2);\ | ||
33 | rotbyte(&b3);\ | ||
34 | rotbyte(&b4);\ | ||
35 | rotbyte(&b5);\ | ||
36 | rotbyte(&b6);\ | ||
37 | rotbyte(&b7);\ | ||
38 | ;\ | ||
39 | sbox(b0, b1, b2, b3, b4, b5, b6, b7, t0, t1, t2, t3, t4, t5, t6, t7);\ | ||
40 | ;\ | ||
41 | xor_rcon(&b0);\ | ||
42 | shufb(&b0, EXPB0);\ | ||
43 | shufb(&b1, EXPB0);\ | ||
44 | shufb(&b4, EXPB0);\ | ||
45 | shufb(&b6, EXPB0);\ | ||
46 | shufb(&b3, EXPB0);\ | ||
47 | shufb(&b7, EXPB0);\ | ||
48 | shufb(&b2, EXPB0);\ | ||
49 | shufb(&b5, EXPB0);\ | ||
50 | shufb(&b0, EXPB0);\ | ||
51 | ;\ | ||
52 | t0 = *(int128 *)(bskey + 0);\ | ||
53 | t1 = *(int128 *)(bskey + 16);\ | ||
54 | t2 = *(int128 *)(bskey + 32);\ | ||
55 | t3 = *(int128 *)(bskey + 48);\ | ||
56 | t4 = *(int128 *)(bskey + 64);\ | ||
57 | t5 = *(int128 *)(bskey + 80);\ | ||
58 | t6 = *(int128 *)(bskey + 96);\ | ||
59 | t7 = *(int128 *)(bskey + 112);\ | ||
60 | ;\ | ||
61 | xor2(&b0, &t0);\ | ||
62 | xor2(&b1, &t1);\ | ||
63 | xor2(&b4, &t2);\ | ||
64 | xor2(&b6, &t3);\ | ||
65 | xor2(&b3, &t4);\ | ||
66 | xor2(&b7, &t5);\ | ||
67 | xor2(&b2, &t6);\ | ||
68 | xor2(&b5, &t7);\ | ||
69 | ;\ | ||
70 | rshift32_littleendian(&t0, 8);\ | ||
71 | rshift32_littleendian(&t1, 8);\ | ||
72 | rshift32_littleendian(&t2, 8);\ | ||
73 | rshift32_littleendian(&t3, 8);\ | ||
74 | rshift32_littleendian(&t4, 8);\ | ||
75 | rshift32_littleendian(&t5, 8);\ | ||
76 | rshift32_littleendian(&t6, 8);\ | ||
77 | rshift32_littleendian(&t7, 8);\ | ||
78 | ;\ | ||
79 | xor2(&b0, &t0);\ | ||
80 | xor2(&b1, &t1);\ | ||
81 | xor2(&b4, &t2);\ | ||
82 | xor2(&b6, &t3);\ | ||
83 | xor2(&b3, &t4);\ | ||
84 | xor2(&b7, &t5);\ | ||
85 | xor2(&b2, &t6);\ | ||
86 | xor2(&b5, &t7);\ | ||
87 | ;\ | ||
88 | rshift32_littleendian(&t0, 8);\ | ||
89 | rshift32_littleendian(&t1, 8);\ | ||
90 | rshift32_littleendian(&t2, 8);\ | ||
91 | rshift32_littleendian(&t3, 8);\ | ||
92 | rshift32_littleendian(&t4, 8);\ | ||
93 | rshift32_littleendian(&t5, 8);\ | ||
94 | rshift32_littleendian(&t6, 8);\ | ||
95 | rshift32_littleendian(&t7, 8);\ | ||
96 | ;\ | ||
97 | xor2(&b0, &t0);\ | ||
98 | xor2(&b1, &t1);\ | ||
99 | xor2(&b4, &t2);\ | ||
100 | xor2(&b6, &t3);\ | ||
101 | xor2(&b3, &t4);\ | ||
102 | xor2(&b7, &t5);\ | ||
103 | xor2(&b2, &t6);\ | ||
104 | xor2(&b5, &t7);\ | ||
105 | ;\ | ||
106 | rshift32_littleendian(&t0, 8);\ | ||
107 | rshift32_littleendian(&t1, 8);\ | ||
108 | rshift32_littleendian(&t2, 8);\ | ||
109 | rshift32_littleendian(&t3, 8);\ | ||
110 | rshift32_littleendian(&t4, 8);\ | ||
111 | rshift32_littleendian(&t5, 8);\ | ||
112 | rshift32_littleendian(&t6, 8);\ | ||
113 | rshift32_littleendian(&t7, 8);\ | ||
114 | ;\ | ||
115 | xor2(&b0, &t0);\ | ||
116 | xor2(&b1, &t1);\ | ||
117 | xor2(&b4, &t2);\ | ||
118 | xor2(&b6, &t3);\ | ||
119 | xor2(&b3, &t4);\ | ||
120 | xor2(&b7, &t5);\ | ||
121 | xor2(&b2, &t6);\ | ||
122 | xor2(&b5, &t7);\ | ||
123 | ;\ | ||
124 | *(int128 *)(bskey + 128) = b0;\ | ||
125 | *(int128 *)(bskey + 144) = b1;\ | ||
126 | *(int128 *)(bskey + 160) = b4;\ | ||
127 | *(int128 *)(bskey + 176) = b6;\ | ||
128 | *(int128 *)(bskey + 192) = b3;\ | ||
129 | *(int128 *)(bskey + 208) = b7;\ | ||
130 | *(int128 *)(bskey + 224) = b2;\ | ||
131 | *(int128 *)(bskey + 240) = b5;\ | ||
132 | |||
133 | #define keyexpbs10(b0, b1, b2, b3, b4, b5, b6, b7, t0, t1, t2, t3, t4, t5, t6, t7, bskey) ;\ | ||
134 | toggle(&b0);\ | ||
135 | toggle(&b1);\ | ||
136 | toggle(&b5);\ | ||
137 | toggle(&b6);\ | ||
138 | rotbyte(&b0);\ | ||
139 | rotbyte(&b1);\ | ||
140 | rotbyte(&b2);\ | ||
141 | rotbyte(&b3);\ | ||
142 | rotbyte(&b4);\ | ||
143 | rotbyte(&b5);\ | ||
144 | rotbyte(&b6);\ | ||
145 | rotbyte(&b7);\ | ||
146 | ;\ | ||
147 | sbox(b0, b1, b2, b3, b4, b5, b6, b7, t0, t1, t2, t3, t4, t5, t6, t7);\ | ||
148 | ;\ | ||
149 | xor_rcon(&b1);\ | ||
150 | xor_rcon(&b4);\ | ||
151 | xor_rcon(&b3);\ | ||
152 | xor_rcon(&b7);\ | ||
153 | shufb(&b0, EXPB0);\ | ||
154 | shufb(&b1, EXPB0);\ | ||
155 | shufb(&b4, EXPB0);\ | ||
156 | shufb(&b6, EXPB0);\ | ||
157 | shufb(&b3, EXPB0);\ | ||
158 | shufb(&b7, EXPB0);\ | ||
159 | shufb(&b2, EXPB0);\ | ||
160 | shufb(&b5, EXPB0);\ | ||
161 | ;\ | ||
162 | t0 = *(int128 *)(bskey + 9 * 128 + 0);\ | ||
163 | t1 = *(int128 *)(bskey + 9 * 128 + 16);\ | ||
164 | t2 = *(int128 *)(bskey + 9 * 128 + 32);\ | ||
165 | t3 = *(int128 *)(bskey + 9 * 128 + 48);\ | ||
166 | t4 = *(int128 *)(bskey + 9 * 128 + 64);\ | ||
167 | t5 = *(int128 *)(bskey + 9 * 128 + 80);\ | ||
168 | t6 = *(int128 *)(bskey + 9 * 128 + 96);\ | ||
169 | t7 = *(int128 *)(bskey + 9 * 128 + 112);\ | ||
170 | ;\ | ||
171 | toggle(&t0);\ | ||
172 | toggle(&t1);\ | ||
173 | toggle(&t5);\ | ||
174 | toggle(&t6);\ | ||
175 | ;\ | ||
176 | xor2(&b0, &t0);\ | ||
177 | xor2(&b1, &t1);\ | ||
178 | xor2(&b4, &t2);\ | ||
179 | xor2(&b6, &t3);\ | ||
180 | xor2(&b3, &t4);\ | ||
181 | xor2(&b7, &t5);\ | ||
182 | xor2(&b2, &t6);\ | ||
183 | xor2(&b5, &t7);\ | ||
184 | ;\ | ||
185 | rshift32_littleendian(&t0, 8);\ | ||
186 | rshift32_littleendian(&t1, 8);\ | ||
187 | rshift32_littleendian(&t2, 8);\ | ||
188 | rshift32_littleendian(&t3, 8);\ | ||
189 | rshift32_littleendian(&t4, 8);\ | ||
190 | rshift32_littleendian(&t5, 8);\ | ||
191 | rshift32_littleendian(&t6, 8);\ | ||
192 | rshift32_littleendian(&t7, 8);\ | ||
193 | ;\ | ||
194 | xor2(&b0, &t0);\ | ||
195 | xor2(&b1, &t1);\ | ||
196 | xor2(&b4, &t2);\ | ||
197 | xor2(&b6, &t3);\ | ||
198 | xor2(&b3, &t4);\ | ||
199 | xor2(&b7, &t5);\ | ||
200 | xor2(&b2, &t6);\ | ||
201 | xor2(&b5, &t7);\ | ||
202 | ;\ | ||
203 | rshift32_littleendian(&t0, 8);\ | ||
204 | rshift32_littleendian(&t1, 8);\ | ||
205 | rshift32_littleendian(&t2, 8);\ | ||
206 | rshift32_littleendian(&t3, 8);\ | ||
207 | rshift32_littleendian(&t4, 8);\ | ||
208 | rshift32_littleendian(&t5, 8);\ | ||
209 | rshift32_littleendian(&t6, 8);\ | ||
210 | rshift32_littleendian(&t7, 8);\ | ||
211 | ;\ | ||
212 | xor2(&b0, &t0);\ | ||
213 | xor2(&b1, &t1);\ | ||
214 | xor2(&b4, &t2);\ | ||
215 | xor2(&b6, &t3);\ | ||
216 | xor2(&b3, &t4);\ | ||
217 | xor2(&b7, &t5);\ | ||
218 | xor2(&b2, &t6);\ | ||
219 | xor2(&b5, &t7);\ | ||
220 | ;\ | ||
221 | rshift32_littleendian(&t0, 8);\ | ||
222 | rshift32_littleendian(&t1, 8);\ | ||
223 | rshift32_littleendian(&t2, 8);\ | ||
224 | rshift32_littleendian(&t3, 8);\ | ||
225 | rshift32_littleendian(&t4, 8);\ | ||
226 | rshift32_littleendian(&t5, 8);\ | ||
227 | rshift32_littleendian(&t6, 8);\ | ||
228 | rshift32_littleendian(&t7, 8);\ | ||
229 | ;\ | ||
230 | xor2(&b0, &t0);\ | ||
231 | xor2(&b1, &t1);\ | ||
232 | xor2(&b4, &t2);\ | ||
233 | xor2(&b6, &t3);\ | ||
234 | xor2(&b3, &t4);\ | ||
235 | xor2(&b7, &t5);\ | ||
236 | xor2(&b2, &t6);\ | ||
237 | xor2(&b5, &t7);\ | ||
238 | ;\ | ||
239 | shufb(&b0, M0);\ | ||
240 | shufb(&b1, M0);\ | ||
241 | shufb(&b2, M0);\ | ||
242 | shufb(&b3, M0);\ | ||
243 | shufb(&b4, M0);\ | ||
244 | shufb(&b5, M0);\ | ||
245 | shufb(&b6, M0);\ | ||
246 | shufb(&b7, M0);\ | ||
247 | ;\ | ||
248 | *(int128 *)(bskey + 1280) = b0;\ | ||
249 | *(int128 *)(bskey + 1296) = b1;\ | ||
250 | *(int128 *)(bskey + 1312) = b4;\ | ||
251 | *(int128 *)(bskey + 1328) = b6;\ | ||
252 | *(int128 *)(bskey + 1344) = b3;\ | ||
253 | *(int128 *)(bskey + 1360) = b7;\ | ||
254 | *(int128 *)(bskey + 1376) = b2;\ | ||
255 | *(int128 *)(bskey + 1392) = b5;\ | ||
256 | |||
257 | |||
258 | #define keyexpbs(b0, b1, b2, b3, b4, b5, b6, b7, t0, t1, t2, t3, t4, t5, t6, t7, rcon, i, bskey) \ | ||
259 | toggle(&b0);\ | ||
260 | toggle(&b1);\ | ||
261 | toggle(&b5);\ | ||
262 | toggle(&b6);\ | ||
263 | rotbyte(&b0);\ | ||
264 | rotbyte(&b1);\ | ||
265 | rotbyte(&b2);\ | ||
266 | rotbyte(&b3);\ | ||
267 | rotbyte(&b4);\ | ||
268 | rotbyte(&b5);\ | ||
269 | rotbyte(&b6);\ | ||
270 | rotbyte(&b7);\ | ||
271 | ;\ | ||
272 | sbox(b0, b1, b2, b3, b4, b5, b6, b7, t0, t1, t2, t3, t4, t5, t6, t7);\ | ||
273 | ;\ | ||
274 | rcon;\ | ||
275 | shufb(&b0, EXPB0);\ | ||
276 | shufb(&b1, EXPB0);\ | ||
277 | shufb(&b4, EXPB0);\ | ||
278 | shufb(&b6, EXPB0);\ | ||
279 | shufb(&b3, EXPB0);\ | ||
280 | shufb(&b7, EXPB0);\ | ||
281 | shufb(&b2, EXPB0);\ | ||
282 | shufb(&b5, EXPB0);\ | ||
283 | ;\ | ||
284 | t0 = *(int128 *)(bskey + (i-1) * 128 + 0);\ | ||
285 | t1 = *(int128 *)(bskey + (i-1) * 128 + 16);\ | ||
286 | t2 = *(int128 *)(bskey + (i-1) * 128 + 32);\ | ||
287 | t3 = *(int128 *)(bskey + (i-1) * 128 + 48);\ | ||
288 | t4 = *(int128 *)(bskey + (i-1) * 128 + 64);\ | ||
289 | t5 = *(int128 *)(bskey + (i-1) * 128 + 80);\ | ||
290 | t6 = *(int128 *)(bskey + (i-1) * 128 + 96);\ | ||
291 | t7 = *(int128 *)(bskey + (i-1) * 128 + 112);\ | ||
292 | ;\ | ||
293 | toggle(&t0);\ | ||
294 | toggle(&t1);\ | ||
295 | toggle(&t5);\ | ||
296 | toggle(&t6);\ | ||
297 | ;\ | ||
298 | xor2(&b0, &t0);\ | ||
299 | xor2(&b1, &t1);\ | ||
300 | xor2(&b4, &t2);\ | ||
301 | xor2(&b6, &t3);\ | ||
302 | xor2(&b3, &t4);\ | ||
303 | xor2(&b7, &t5);\ | ||
304 | xor2(&b2, &t6);\ | ||
305 | xor2(&b5, &t7);\ | ||
306 | ;\ | ||
307 | rshift32_littleendian(&t0, 8);\ | ||
308 | rshift32_littleendian(&t1, 8);\ | ||
309 | rshift32_littleendian(&t2, 8);\ | ||
310 | rshift32_littleendian(&t3, 8);\ | ||
311 | rshift32_littleendian(&t4, 8);\ | ||
312 | rshift32_littleendian(&t5, 8);\ | ||
313 | rshift32_littleendian(&t6, 8);\ | ||
314 | rshift32_littleendian(&t7, 8);\ | ||
315 | ;\ | ||
316 | xor2(&b0, &t0);\ | ||
317 | xor2(&b1, &t1);\ | ||
318 | xor2(&b4, &t2);\ | ||
319 | xor2(&b6, &t3);\ | ||
320 | xor2(&b3, &t4);\ | ||
321 | xor2(&b7, &t5);\ | ||
322 | xor2(&b2, &t6);\ | ||
323 | xor2(&b5, &t7);\ | ||
324 | ;\ | ||
325 | rshift32_littleendian(&t0, 8);\ | ||
326 | rshift32_littleendian(&t1, 8);\ | ||
327 | rshift32_littleendian(&t2, 8);\ | ||
328 | rshift32_littleendian(&t3, 8);\ | ||
329 | rshift32_littleendian(&t4, 8);\ | ||
330 | rshift32_littleendian(&t5, 8);\ | ||
331 | rshift32_littleendian(&t6, 8);\ | ||
332 | rshift32_littleendian(&t7, 8);\ | ||
333 | ;\ | ||
334 | xor2(&b0, &t0);\ | ||
335 | xor2(&b1, &t1);\ | ||
336 | xor2(&b4, &t2);\ | ||
337 | xor2(&b6, &t3);\ | ||
338 | xor2(&b3, &t4);\ | ||
339 | xor2(&b7, &t5);\ | ||
340 | xor2(&b2, &t6);\ | ||
341 | xor2(&b5, &t7);\ | ||
342 | ;\ | ||
343 | rshift32_littleendian(&t0, 8);\ | ||
344 | rshift32_littleendian(&t1, 8);\ | ||
345 | rshift32_littleendian(&t2, 8);\ | ||
346 | rshift32_littleendian(&t3, 8);\ | ||
347 | rshift32_littleendian(&t4, 8);\ | ||
348 | rshift32_littleendian(&t5, 8);\ | ||
349 | rshift32_littleendian(&t6, 8);\ | ||
350 | rshift32_littleendian(&t7, 8);\ | ||
351 | ;\ | ||
352 | xor2(&b0, &t0);\ | ||
353 | xor2(&b1, &t1);\ | ||
354 | xor2(&b4, &t2);\ | ||
355 | xor2(&b6, &t3);\ | ||
356 | xor2(&b3, &t4);\ | ||
357 | xor2(&b7, &t5);\ | ||
358 | xor2(&b2, &t6);\ | ||
359 | xor2(&b5, &t7);\ | ||
360 | ;\ | ||
361 | *(int128 *)(bskey + i*128 + 0) = b0;\ | ||
362 | *(int128 *)(bskey + i*128 + 16) = b1;\ | ||
363 | *(int128 *)(bskey + i*128 + 32) = b4;\ | ||
364 | *(int128 *)(bskey + i*128 + 48) = b6;\ | ||
365 | *(int128 *)(bskey + i*128 + 64) = b3;\ | ||
366 | *(int128 *)(bskey + i*128 + 80) = b7;\ | ||
367 | *(int128 *)(bskey + i*128 + 96) = b2;\ | ||
368 | *(int128 *)(bskey + i*128 + 112) = b5;\ | ||
369 | |||
370 | /* Macros used in multiple contexts */ | ||
371 | |||
372 | #define bitslicekey0(key, bskey) \ | ||
373 | xmm0 = *(int128 *) (key + 0);\ | ||
374 | shufb(&xmm0, M0);\ | ||
375 | copy2(&xmm1, &xmm0);\ | ||
376 | copy2(&xmm2, &xmm0);\ | ||
377 | copy2(&xmm3, &xmm0);\ | ||
378 | copy2(&xmm4, &xmm0);\ | ||
379 | copy2(&xmm5, &xmm0);\ | ||
380 | copy2(&xmm6, &xmm0);\ | ||
381 | copy2(&xmm7, &xmm0);\ | ||
382 | ;\ | ||
383 | bitslice(xmm7, xmm6, xmm5, xmm4, xmm3, xmm2, xmm1, xmm0, t);\ | ||
384 | ;\ | ||
385 | *(int128 *) (bskey + 0) = xmm0;\ | ||
386 | *(int128 *) (bskey + 16) = xmm1;\ | ||
387 | *(int128 *) (bskey + 32) = xmm2;\ | ||
388 | *(int128 *) (bskey + 48) = xmm3;\ | ||
389 | *(int128 *) (bskey + 64) = xmm4;\ | ||
390 | *(int128 *) (bskey + 80) = xmm5;\ | ||
391 | *(int128 *) (bskey + 96) = xmm6;\ | ||
392 | *(int128 *) (bskey + 112) = xmm7;\ | ||
393 | |||
394 | |||
395 | #define bitslicekey10(key, bskey) \ | ||
396 | xmm0 = *(int128 *) (key + 0);\ | ||
397 | copy2(xmm1, xmm0);\ | ||
398 | copy2(xmm2, xmm0);\ | ||
399 | copy2(xmm3, xmm0);\ | ||
400 | copy2(xmm4, xmm0);\ | ||
401 | copy2(xmm5, xmm0);\ | ||
402 | copy2(xmm6, xmm0);\ | ||
403 | copy2(xmm7, xmm0);\ | ||
404 | ;\ | ||
405 | bitslice(xmm7, xmm6, xmm5, xmm4, xmm3, xmm2, xmm1, xmm0, t);\ | ||
406 | ;\ | ||
407 | toggle(&xmm6);\ | ||
408 | toggle(&xmm5);\ | ||
409 | toggle(&xmm1);\ | ||
410 | toggle(&xmm0);\ | ||
411 | ;\ | ||
412 | *(int128 *) (bskey + 0 + 1280) = xmm0;\ | ||
413 | *(int128 *) (bskey + 16 + 1280) = xmm1;\ | ||
414 | *(int128 *) (bskey + 32 + 1280) = xmm2;\ | ||
415 | *(int128 *) (bskey + 48 + 1280) = xmm3;\ | ||
416 | *(int128 *) (bskey + 64 + 1280) = xmm4;\ | ||
417 | *(int128 *) (bskey + 80 + 1280) = xmm5;\ | ||
418 | *(int128 *) (bskey + 96 + 1280) = xmm6;\ | ||
419 | *(int128 *) (bskey + 112 + 1280) = xmm7;\ | ||
420 | |||
421 | |||
422 | #define bitslicekey(i,key,bskey) \ | ||
423 | xmm0 = *(int128 *) (key + 0);\ | ||
424 | shufb(&xmm0, M0);\ | ||
425 | copy2(&xmm1, &xmm0);\ | ||
426 | copy2(&xmm2, &xmm0);\ | ||
427 | copy2(&xmm3, &xmm0);\ | ||
428 | copy2(&xmm4, &xmm0);\ | ||
429 | copy2(&xmm5, &xmm0);\ | ||
430 | copy2(&xmm6, &xmm0);\ | ||
431 | copy2(&xmm7, &xmm0);\ | ||
432 | ;\ | ||
433 | bitslice(xmm7, xmm6, xmm5, xmm4, xmm3, xmm2, xmm1, xmm0, t);\ | ||
434 | ;\ | ||
435 | toggle(&xmm6);\ | ||
436 | toggle(&xmm5);\ | ||
437 | toggle(&xmm1);\ | ||
438 | toggle(&xmm0);\ | ||
439 | ;\ | ||
440 | *(int128 *) (bskey + 0 + 128*i) = xmm0;\ | ||
441 | *(int128 *) (bskey + 16 + 128*i) = xmm1;\ | ||
442 | *(int128 *) (bskey + 32 + 128*i) = xmm2;\ | ||
443 | *(int128 *) (bskey + 48 + 128*i) = xmm3;\ | ||
444 | *(int128 *) (bskey + 64 + 128*i) = xmm4;\ | ||
445 | *(int128 *) (bskey + 80 + 128*i) = xmm5;\ | ||
446 | *(int128 *) (bskey + 96 + 128*i) = xmm6;\ | ||
447 | *(int128 *) (bskey + 112 + 128*i) = xmm7;\ | ||
448 | |||
449 | |||
450 | #define bitslice(x0, x1, x2, x3, x4, x5, x6, x7, t) \ | ||
451 | swapmove(x0, x1, 1, BS0, t);\ | ||
452 | swapmove(x2, x3, 1, BS0, t);\ | ||
453 | swapmove(x4, x5, 1, BS0, t);\ | ||
454 | swapmove(x6, x7, 1, BS0, t);\ | ||
455 | ;\ | ||
456 | swapmove(x0, x2, 2, BS1, t);\ | ||
457 | swapmove(x1, x3, 2, BS1, t);\ | ||
458 | swapmove(x4, x6, 2, BS1, t);\ | ||
459 | swapmove(x5, x7, 2, BS1, t);\ | ||
460 | ;\ | ||
461 | swapmove(x0, x4, 4, BS2, t);\ | ||
462 | swapmove(x1, x5, 4, BS2, t);\ | ||
463 | swapmove(x2, x6, 4, BS2, t);\ | ||
464 | swapmove(x3, x7, 4, BS2, t);\ | ||
465 | |||
466 | |||
467 | #define swapmove(a, b, n, m, t) \ | ||
468 | copy2(&t, &b);\ | ||
469 | rshift64_littleendian(&t, n);\ | ||
470 | xor2(&t, &a);\ | ||
471 | and2(&t, &m);\ | ||
472 | xor2(&a, &t);\ | ||
473 | lshift64_littleendian(&t, n);\ | ||
474 | xor2(&b, &t); | ||
475 | |||
476 | #define rotbyte(x) \ | ||
477 | shufb(x, ROTB) /* TODO: Make faster */ | ||
478 | |||
479 | |||
480 | /* Macros used for encryption (and decryption) */ | ||
481 | |||
482 | #define shiftrows(x0, x1, x2, x3, x4, x5, x6, x7, i, M, bskey) \ | ||
483 | xor2(&x0, (int128 *)(bskey + 128*(i-1) + 0));\ | ||
484 | shufb(&x0, M);\ | ||
485 | xor2(&x1, (int128 *)(bskey + 128*(i-1) + 16));\ | ||
486 | shufb(&x1, M);\ | ||
487 | xor2(&x2, (int128 *)(bskey + 128*(i-1) + 32));\ | ||
488 | shufb(&x2, M);\ | ||
489 | xor2(&x3, (int128 *)(bskey + 128*(i-1) + 48));\ | ||
490 | shufb(&x3, M);\ | ||
491 | xor2(&x4, (int128 *)(bskey + 128*(i-1) + 64));\ | ||
492 | shufb(&x4, M);\ | ||
493 | xor2(&x5, (int128 *)(bskey + 128*(i-1) + 80));\ | ||
494 | shufb(&x5, M);\ | ||
495 | xor2(&x6, (int128 *)(bskey + 128*(i-1) + 96));\ | ||
496 | shufb(&x6, M);\ | ||
497 | xor2(&x7, (int128 *)(bskey + 128*(i-1) + 112));\ | ||
498 | shufb(&x7, M);\ | ||
499 | |||
500 | |||
501 | #define mixcolumns(x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2, t3, t4, t5, t6, t7) \ | ||
502 | shufd(&t0, &x0, 0x93);\ | ||
503 | shufd(&t1, &x1, 0x93);\ | ||
504 | shufd(&t2, &x2, 0x93);\ | ||
505 | shufd(&t3, &x3, 0x93);\ | ||
506 | shufd(&t4, &x4, 0x93);\ | ||
507 | shufd(&t5, &x5, 0x93);\ | ||
508 | shufd(&t6, &x6, 0x93);\ | ||
509 | shufd(&t7, &x7, 0x93);\ | ||
510 | ;\ | ||
511 | xor2(&x0, &t0);\ | ||
512 | xor2(&x1, &t1);\ | ||
513 | xor2(&x2, &t2);\ | ||
514 | xor2(&x3, &t3);\ | ||
515 | xor2(&x4, &t4);\ | ||
516 | xor2(&x5, &t5);\ | ||
517 | xor2(&x6, &t6);\ | ||
518 | xor2(&x7, &t7);\ | ||
519 | ;\ | ||
520 | xor2(&t0, &x7);\ | ||
521 | xor2(&t1, &x0);\ | ||
522 | xor2(&t2, &x1);\ | ||
523 | xor2(&t1, &x7);\ | ||
524 | xor2(&t3, &x2);\ | ||
525 | xor2(&t4, &x3);\ | ||
526 | xor2(&t5, &x4);\ | ||
527 | xor2(&t3, &x7);\ | ||
528 | xor2(&t6, &x5);\ | ||
529 | xor2(&t7, &x6);\ | ||
530 | xor2(&t4, &x7);\ | ||
531 | ;\ | ||
532 | shufd(&x0, &x0, 0x4e);\ | ||
533 | shufd(&x1, &x1, 0x4e);\ | ||
534 | shufd(&x2, &x2, 0x4e);\ | ||
535 | shufd(&x3, &x3, 0x4e);\ | ||
536 | shufd(&x4, &x4, 0x4e);\ | ||
537 | shufd(&x5, &x5, 0x4e);\ | ||
538 | shufd(&x6, &x6, 0x4e);\ | ||
539 | shufd(&x7, &x7, 0x4e);\ | ||
540 | ;\ | ||
541 | xor2(&t0, &x0);\ | ||
542 | xor2(&t1, &x1);\ | ||
543 | xor2(&t2, &x2);\ | ||
544 | xor2(&t3, &x3);\ | ||
545 | xor2(&t4, &x4);\ | ||
546 | xor2(&t5, &x5);\ | ||
547 | xor2(&t6, &x6);\ | ||
548 | xor2(&t7, &x7);\ | ||
549 | |||
550 | |||
551 | #define aesround(i, b0, b1, b2, b3, b4, b5, b6, b7, t0, t1, t2, t3, t4, t5, t6, t7, bskey) \ | ||
552 | shiftrows(b0, b1, b2, b3, b4, b5, b6, b7, i, SR, bskey);\ | ||
553 | sbox(b0, b1, b2, b3, b4, b5, b6, b7, t0, t1, t2, t3, t4, t5, t6, t7);\ | ||
554 | mixcolumns(b0, b1, b4, b6, b3, b7, b2, b5, t0, t1, t2, t3, t4, t5, t6, t7);\ | ||
555 | |||
556 | |||
557 | #define lastround(b0, b1, b2, b3, b4, b5, b6, b7, t0, t1, t2, t3, t4, t5, t6, t7, bskey) \ | ||
558 | shiftrows(b0, b1, b2, b3, b4, b5, b6, b7, 10, SRM0, bskey);\ | ||
559 | sbox(b0, b1, b2, b3, b4, b5, b6, b7, t0, t1, t2, t3, t4, t5, t6, t7);\ | ||
560 | xor2(&b0,(int128 *)(bskey + 128*10));\ | ||
561 | xor2(&b1,(int128 *)(bskey + 128*10+16));\ | ||
562 | xor2(&b4,(int128 *)(bskey + 128*10+32));\ | ||
563 | xor2(&b6,(int128 *)(bskey + 128*10+48));\ | ||
564 | xor2(&b3,(int128 *)(bskey + 128*10+64));\ | ||
565 | xor2(&b7,(int128 *)(bskey + 128*10+80));\ | ||
566 | xor2(&b2,(int128 *)(bskey + 128*10+96));\ | ||
567 | xor2(&b5,(int128 *)(bskey + 128*10+112));\ | ||
568 | |||
569 | |||
570 | #define sbox(b0, b1, b2, b3, b4, b5, b6, b7, t0, t1, t2, t3, s0, s1, s2, s3) \ | ||
571 | InBasisChange(b0, b1, b2, b3, b4, b5, b6, b7); \ | ||
572 | Inv_GF256(b6, b5, b0, b3, b7, b1, b4, b2, t0, t1, t2, t3, s0, s1, s2, s3); \ | ||
573 | OutBasisChange(b7, b1, b4, b2, b6, b5, b0, b3); \ | ||
574 | |||
575 | |||
576 | #define InBasisChange(b0, b1, b2, b3, b4, b5, b6, b7) \ | ||
577 | xor2(&b5, &b6);\ | ||
578 | xor2(&b2, &b1);\ | ||
579 | xor2(&b5, &b0);\ | ||
580 | xor2(&b6, &b2);\ | ||
581 | xor2(&b3, &b0);\ | ||
582 | ;\ | ||
583 | xor2(&b6, &b3);\ | ||
584 | xor2(&b3, &b7);\ | ||
585 | xor2(&b3, &b4);\ | ||
586 | xor2(&b7, &b5);\ | ||
587 | xor2(&b3, &b1);\ | ||
588 | ;\ | ||
589 | xor2(&b4, &b5);\ | ||
590 | xor2(&b2, &b7);\ | ||
591 | xor2(&b1, &b5);\ | ||
592 | |||
593 | #define OutBasisChange(b0, b1, b2, b3, b4, b5, b6, b7) \ | ||
594 | xor2(&b0, &b6);\ | ||
595 | xor2(&b1, &b4);\ | ||
596 | xor2(&b2, &b0);\ | ||
597 | xor2(&b4, &b6);\ | ||
598 | xor2(&b6, &b1);\ | ||
599 | ;\ | ||
600 | xor2(&b1, &b5);\ | ||
601 | xor2(&b5, &b3);\ | ||
602 | xor2(&b2, &b5);\ | ||
603 | xor2(&b3, &b7);\ | ||
604 | xor2(&b7, &b5);\ | ||
605 | ;\ | ||
606 | xor2(&b4, &b7);\ | ||
607 | |||
608 | #define Mul_GF4(x0, x1, y0, y1, t0) \ | ||
609 | copy2(&t0, &y0);\ | ||
610 | xor2(&t0, &y1);\ | ||
611 | and2(&t0, &x0);\ | ||
612 | xor2(&x0, &x1);\ | ||
613 | and2(&x0, &y1);\ | ||
614 | and2(&x1, &y0);\ | ||
615 | xor2(&x0, &x1);\ | ||
616 | xor2(&x1, &t0);\ | ||
617 | |||
618 | #define Mul_GF4_N(x0, x1, y0, y1, t0) \ | ||
619 | copy2(&t0, &y0);\ | ||
620 | xor2(&t0, &y1);\ | ||
621 | and2(&t0, &x0);\ | ||
622 | xor2(&x0, &x1);\ | ||
623 | and2(&x0, &y1);\ | ||
624 | and2(&x1, &y0);\ | ||
625 | xor2(&x1, &x0);\ | ||
626 | xor2(&x0, &t0);\ | ||
627 | |||
628 | #define Mul_GF4_2(x0, x1, x2, x3, y0, y1, t0, t1) \ | ||
629 | copy2(&t0, = y0);\ | ||
630 | xor2(&t0, &y1);\ | ||
631 | copy2(&t1, &t0);\ | ||
632 | and2(&t0, &x0);\ | ||
633 | and2(&t1, &x2);\ | ||
634 | xor2(&x0, &x1);\ | ||
635 | xor2(&x2, &x3);\ | ||
636 | and2(&x0, &y1);\ | ||
637 | and2(&x2, &y1);\ | ||
638 | and2(&x1, &y0);\ | ||
639 | and2(&x3, &y0);\ | ||
640 | xor2(&x0, &x1);\ | ||
641 | xor2(&x2, &x3);\ | ||
642 | xor2(&x1, &t0);\ | ||
643 | xor2(&x3, &t1);\ | ||
644 | |||
645 | #define Mul_GF16(x0, x1, x2, x3, y0, y1, y2, y3, t0, t1, t2, t3) \ | ||
646 | copy2(&t0, &x0);\ | ||
647 | copy2(&t1, &x1);\ | ||
648 | Mul_GF4(x0, x1, y0, y1, t2);\ | ||
649 | xor2(&t0, &x2);\ | ||
650 | xor2(&t1, &x3);\ | ||
651 | xor2(&y0, &y2);\ | ||
652 | xor2(&y1, &y3);\ | ||
653 | Mul_GF4_N(t0, t1, y0, y1, t2);\ | ||
654 | Mul_GF4(x2, x3, y2, y3, t3);\ | ||
655 | ;\ | ||
656 | xor2(&x0, &t0);\ | ||
657 | xor2(&x2, &t0);\ | ||
658 | xor2(&x1, &t1);\ | ||
659 | xor2(&x3, &t1);\ | ||
660 | |||
661 | #define Mul_GF16_2(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, t0, t1, t2, t3) \ | ||
662 | copy2(&t0, &x0);\ | ||
663 | copy2(&t1, &x1);\ | ||
664 | Mul_GF4(x0, x1, y0, y1, t2);\ | ||
665 | xor2(&t0, &x2);\ | ||
666 | xor2(&t1, &x3);\ | ||
667 | xor2(&y0, &y2);\ | ||
668 | xor2(&y1, &y3);\ | ||
669 | Mul_GF4_N(t0, t1, y0, y1, t3);\ | ||
670 | Mul_GF4(x2, x3, y2, y3, t2);\ | ||
671 | ;\ | ||
672 | xor2(&x0, &t0);\ | ||
673 | xor2(&x2, &t0);\ | ||
674 | xor2(&x1, &t1);\ | ||
675 | xor2(&x3, &t1);\ | ||
676 | ;\ | ||
677 | copy2(&t0, &x4);\ | ||
678 | copy2(&t1, &x5);\ | ||
679 | xor2(&t0, &x6);\ | ||
680 | xor2(&t1, &x7);\ | ||
681 | Mul_GF4_N(t0, t1, y0, y1, t3);\ | ||
682 | Mul_GF4(x6, x7, y2, y3, t2);\ | ||
683 | xor2(&y0, &y2);\ | ||
684 | xor2(&y1, &y3);\ | ||
685 | Mul_GF4(x4, x5, y0, y1, t3);\ | ||
686 | ;\ | ||
687 | xor2(&x4, &t0);\ | ||
688 | xor2(&x6, &t0);\ | ||
689 | xor2(&x5, &t1);\ | ||
690 | xor2(&x7, &t1);\ | ||
691 | |||
692 | #define Inv_GF16(x0, x1, x2, x3, t0, t1, t2, t3) \ | ||
693 | copy2(&t0, &x1);\ | ||
694 | copy2(&t1, &x0);\ | ||
695 | and2(&t0, &x3);\ | ||
696 | or2(&t1, &x2);\ | ||
697 | copy2(&t2, &x1);\ | ||
698 | copy2(&t3, &x0);\ | ||
699 | or2(&t2, &x2);\ | ||
700 | or2(&t3, &x3);\ | ||
701 | xor2(&t2, &t3);\ | ||
702 | ;\ | ||
703 | xor2(&t0, &t2);\ | ||
704 | xor2(&t1, &t2);\ | ||
705 | ;\ | ||
706 | Mul_GF4_2(x0, x1, x2, x3, t1, t0, t2, t3);\ | ||
707 | |||
708 | |||
709 | #define Inv_GF256(x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2, t3, s0, s1, s2, s3) \ | ||
710 | copy2(&t3, &x4);\ | ||
711 | copy2(&t2, &x5);\ | ||
712 | copy2(&t1, &x1);\ | ||
713 | copy2(&s1, &x7);\ | ||
714 | copy2(&s0, &x0);\ | ||
715 | ;\ | ||
716 | xor2(&t3, &x6);\ | ||
717 | xor2(&t2, &x7);\ | ||
718 | xor2(&t1, &x3);\ | ||
719 | xor2(&s1, &x6);\ | ||
720 | xor2(&s0, &x2);\ | ||
721 | ;\ | ||
722 | copy2(&s2, &t3);\ | ||
723 | copy2(&t0, &t2);\ | ||
724 | copy2(&s3, &t3);\ | ||
725 | ;\ | ||
726 | or2(&t2, &t1);\ | ||
727 | or2(&t3, &s0);\ | ||
728 | xor2(&s3, &t0);\ | ||
729 | and2(&s2, &s0);\ | ||
730 | and2(&t0, &t1);\ | ||
731 | xor2(&s0, &t1);\ | ||
732 | and2(&s3, &s0);\ | ||
733 | copy2(&s0, &x3);\ | ||
734 | xor2(&s0, &x2);\ | ||
735 | and2(&s1, &s0);\ | ||
736 | xor2(&t3, &s1);\ | ||
737 | xor2(&t2, &s1);\ | ||
738 | copy2(&s1, &x4);\ | ||
739 | xor2(&s1, &x5);\ | ||
740 | copy2(&s0, &x1);\ | ||
741 | copy2(&t1, &s1);\ | ||
742 | xor2(&s0, &x0);\ | ||
743 | or2(&t1, &s0);\ | ||
744 | and2(&s1, &s0);\ | ||
745 | xor2(&t0, &s1);\ | ||
746 | xor2(&t3, &s3);\ | ||
747 | xor2(&t2, &s2);\ | ||
748 | xor2(&t1, &s3);\ | ||
749 | xor2(&t0, &s2);\ | ||
750 | xor2(&t1, &s2);\ | ||
751 | copy2(&s0, &x7);\ | ||
752 | copy2(&s1, &x6);\ | ||
753 | copy2(&s2, &x5);\ | ||
754 | copy2(&s3, &x4);\ | ||
755 | and2(&s0, &x3);\ | ||
756 | and2(&s1, &x2);\ | ||
757 | and2(&s2, &x1);\ | ||
758 | or2(&s3, &x0);\ | ||
759 | xor2(&t3, &s0);\ | ||
760 | xor2(&t2, &s1);\ | ||
761 | xor2(&t1, &s2);\ | ||
762 | xor2(&t0, &s3);\ | ||
763 | ;\ | ||
764 | copy2(&s0, &t3);\ | ||
765 | xor2(&s0, &t2);\ | ||
766 | and2(&t3, &t1);\ | ||
767 | copy2(&s2, &t0);\ | ||
768 | xor2(&s2, &t3);\ | ||
769 | copy2(&s3, &s0);\ | ||
770 | and2(&s3, &s2);\ | ||
771 | xor2(&s3, &t2);\ | ||
772 | copy2(&s1, &t1);\ | ||
773 | xor2(&s1, &t0);\ | ||
774 | xor2(&t3, &t2);\ | ||
775 | and2(&s1, &t3);\ | ||
776 | xor2(&s1, &t0);\ | ||
777 | xor2(&t1, &s1);\ | ||
778 | copy2(&t2, &s2);\ | ||
779 | xor2(&t2, &s1);\ | ||
780 | and2(&t2, &t0);\ | ||
781 | xor2(&t1, &t2);\ | ||
782 | xor2(&s2, &t2);\ | ||
783 | and2(&s2, &s3);\ | ||
784 | xor2(&s2, &s0);\ | ||
785 | ;\ | ||
786 | Mul_GF16_2(x0, x1, x2, x3, x4, x5, x6, x7, s3, s2, s1, t1, s0, t0, t2, t3);\ | ||
787 | |||
788 | #endif | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/consts.c b/nacl/crypto_stream/aes128ctr/portable/consts.c new file mode 100644 index 00000000..ed2835db --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/consts.c | |||
@@ -0,0 +1,14 @@ | |||
1 | #include "consts.h" | ||
2 | |||
3 | const unsigned char ROTB[16] = {0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08}; | ||
4 | const unsigned char M0[16] = {0x0f, 0x0b, 0x07, 0x03, 0x0e, 0x0a, 0x06, 0x02, 0x0d, 0x09, 0x05, 0x01, 0x0c, 0x08, 0x04, 0x00}; | ||
5 | const unsigned char EXPB0[16] = {0x03, 0x03, 0x03, 0x03, 0x07, 0x07, 0x07, 0x07, 0x0b, 0x0b, 0x0b, 0x0b, 0x0f, 0x0f, 0x0f, 0x0f}; | ||
6 | |||
7 | const unsigned char SWAP32[16] = {0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04, 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c}; | ||
8 | const unsigned char M0SWAP[16] = {0x0c, 0x08, 0x04, 0x00, 0x0d, 0x09, 0x05, 0x01, 0x0e, 0x0a, 0x06, 0x02, 0x0f, 0x0b, 0x07, 0x03}; | ||
9 | const unsigned char SR[16] = {0x01, 0x02, 0x03, 0x00, 0x06, 0x07, 0x04, 0x05, 0x0b, 0x08, 0x09, 0x0a, 0x0c, 0x0d, 0x0e, 0x0f}; | ||
10 | const unsigned char SRM0[16] = {0x0f, 0x0a, 0x05, 0x00, 0x0e, 0x09, 0x04, 0x03, 0x0d, 0x08, 0x07, 0x02, 0x0c, 0x0b, 0x06, 0x01}; | ||
11 | |||
12 | const int128 BS0 = {0x5555555555555555ULL, 0x5555555555555555ULL}; | ||
13 | const int128 BS1 = {0x3333333333333333ULL, 0x3333333333333333ULL}; | ||
14 | const int128 BS2 = {0x0f0f0f0f0f0f0f0fULL, 0x0f0f0f0f0f0f0f0fULL}; | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/consts.h b/nacl/crypto_stream/aes128ctr/portable/consts.h new file mode 100644 index 00000000..4c50360b --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/consts.h | |||
@@ -0,0 +1,28 @@ | |||
1 | #ifndef CONSTS_H | ||
2 | #define CONSTS_H | ||
3 | |||
4 | #include "int128.h" | ||
5 | |||
6 | #define ROTB crypto_stream_aes128ctr_portable_ROTB | ||
7 | #define M0 crypto_stream_aes128ctr_portable_M0 | ||
8 | #define EXPB0 crypto_stream_aes128ctr_portable_EXPB0 | ||
9 | #define SWAP32 crypto_stream_aes128ctr_portable_SWAP32 | ||
10 | #define M0SWAP crypto_stream_aes128ctr_portable_M0SWAP | ||
11 | #define SR crypto_stream_aes128ctr_portable_SR | ||
12 | #define SRM0 crypto_stream_aes128ctr_portable_SRM0 | ||
13 | #define BS0 crypto_stream_aes128ctr_portable_BS0 | ||
14 | #define BS1 crypto_stream_aes128ctr_portable_BS1 | ||
15 | #define BS2 crypto_stream_aes128ctr_portable_BS2 | ||
16 | |||
17 | extern const unsigned char ROTB[16]; | ||
18 | extern const unsigned char M0[16]; | ||
19 | extern const unsigned char EXPB0[16]; | ||
20 | extern const unsigned char SWAP32[16]; | ||
21 | extern const unsigned char M0SWAP[16]; | ||
22 | extern const unsigned char SR[16]; | ||
23 | extern const unsigned char SRM0[16]; | ||
24 | extern const int128 BS0; | ||
25 | extern const int128 BS1; | ||
26 | extern const int128 BS2; | ||
27 | |||
28 | #endif | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/int128.c b/nacl/crypto_stream/aes128ctr/portable/int128.c new file mode 100644 index 00000000..25894d42 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/int128.c | |||
@@ -0,0 +1,128 @@ | |||
1 | #include "int128.h" | ||
2 | #include "common.h" | ||
3 | |||
4 | void xor2(int128 *r, const int128 *x) | ||
5 | { | ||
6 | r->a ^= x->a; | ||
7 | r->b ^= x->b; | ||
8 | } | ||
9 | |||
10 | void and2(int128 *r, const int128 *x) | ||
11 | { | ||
12 | r->a &= x->a; | ||
13 | r->b &= x->b; | ||
14 | } | ||
15 | |||
16 | void or2(int128 *r, const int128 *x) | ||
17 | { | ||
18 | r->a |= x->a; | ||
19 | r->b |= x->b; | ||
20 | } | ||
21 | |||
22 | void copy2(int128 *r, const int128 *x) | ||
23 | { | ||
24 | r->a = x->a; | ||
25 | r->b = x->b; | ||
26 | } | ||
27 | |||
28 | void shufb(int128 *r, const unsigned char *l) | ||
29 | { | ||
30 | int128 t; | ||
31 | copy2(&t,r); | ||
32 | unsigned char *cr = (unsigned char *)r; | ||
33 | unsigned char *ct = (unsigned char *)&t; | ||
34 | cr[0] = ct[l[0]]; | ||
35 | cr[1] = ct[l[1]]; | ||
36 | cr[2] = ct[l[2]]; | ||
37 | cr[3] = ct[l[3]]; | ||
38 | cr[4] = ct[l[4]]; | ||
39 | cr[5] = ct[l[5]]; | ||
40 | cr[6] = ct[l[6]]; | ||
41 | cr[7] = ct[l[7]]; | ||
42 | cr[8] = ct[l[8]]; | ||
43 | cr[9] = ct[l[9]]; | ||
44 | cr[10] = ct[l[10]]; | ||
45 | cr[11] = ct[l[11]]; | ||
46 | cr[12] = ct[l[12]]; | ||
47 | cr[13] = ct[l[13]]; | ||
48 | cr[14] = ct[l[14]]; | ||
49 | cr[15] = ct[l[15]]; | ||
50 | } | ||
51 | |||
52 | void shufd(int128 *r, const int128 *x, const unsigned int c) | ||
53 | { | ||
54 | int128 t; | ||
55 | uint32 *tp = (uint32 *)&t; | ||
56 | uint32 *xp = (uint32 *)x; | ||
57 | tp[0] = xp[c&3]; | ||
58 | tp[1] = xp[(c>>2)&3]; | ||
59 | tp[2] = xp[(c>>4)&3]; | ||
60 | tp[3] = xp[(c>>6)&3]; | ||
61 | copy2(r,&t); | ||
62 | } | ||
63 | |||
64 | void rshift32_littleendian(int128 *r, const unsigned int n) | ||
65 | { | ||
66 | unsigned char *rp = (unsigned char *)r; | ||
67 | uint32 t; | ||
68 | t = load32_littleendian(rp); | ||
69 | t >>= n; | ||
70 | store32_littleendian(rp, t); | ||
71 | t = load32_littleendian(rp+4); | ||
72 | t >>= n; | ||
73 | store32_littleendian(rp+4, t); | ||
74 | t = load32_littleendian(rp+8); | ||
75 | t >>= n; | ||
76 | store32_littleendian(rp+8, t); | ||
77 | t = load32_littleendian(rp+12); | ||
78 | t >>= n; | ||
79 | store32_littleendian(rp+12, t); | ||
80 | } | ||
81 | |||
82 | void rshift64_littleendian(int128 *r, const unsigned int n) | ||
83 | { | ||
84 | unsigned char *rp = (unsigned char *)r; | ||
85 | uint64 t; | ||
86 | t = load64_littleendian(rp); | ||
87 | t >>= n; | ||
88 | store64_littleendian(rp, t); | ||
89 | t = load64_littleendian(rp+8); | ||
90 | t >>= n; | ||
91 | store64_littleendian(rp+8, t); | ||
92 | } | ||
93 | |||
94 | void lshift64_littleendian(int128 *r, const unsigned int n) | ||
95 | { | ||
96 | unsigned char *rp = (unsigned char *)r; | ||
97 | uint64 t; | ||
98 | t = load64_littleendian(rp); | ||
99 | t <<= n; | ||
100 | store64_littleendian(rp, t); | ||
101 | t = load64_littleendian(rp+8); | ||
102 | t <<= n; | ||
103 | store64_littleendian(rp+8, t); | ||
104 | } | ||
105 | |||
106 | void toggle(int128 *r) | ||
107 | { | ||
108 | r->a ^= 0xffffffffffffffffULL; | ||
109 | r->b ^= 0xffffffffffffffffULL; | ||
110 | } | ||
111 | |||
112 | void xor_rcon(int128 *r) | ||
113 | { | ||
114 | unsigned char *rp = (unsigned char *)r; | ||
115 | uint32 t; | ||
116 | t = load32_littleendian(rp+12); | ||
117 | t ^= 0xffffffff; | ||
118 | store32_littleendian(rp+12, t); | ||
119 | } | ||
120 | |||
121 | void add_uint32_big(int128 *r, uint32 x) | ||
122 | { | ||
123 | unsigned char *rp = (unsigned char *)r; | ||
124 | uint32 t; | ||
125 | t = load32_littleendian(rp+12); | ||
126 | t += x; | ||
127 | store32_littleendian(rp+12, t); | ||
128 | } | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/int128.h b/nacl/crypto_stream/aes128ctr/portable/int128.h new file mode 100644 index 00000000..7099e5b1 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/int128.h | |||
@@ -0,0 +1,47 @@ | |||
1 | #ifndef INT128_H | ||
2 | #define INT128_H | ||
3 | |||
4 | #include "common.h" | ||
5 | |||
6 | typedef struct{ | ||
7 | unsigned long long a; | ||
8 | unsigned long long b; | ||
9 | } int128; | ||
10 | |||
11 | #define xor2 crypto_stream_aes128ctr_portable_xor2 | ||
12 | void xor2(int128 *r, const int128 *x); | ||
13 | |||
14 | #define and2 crypto_stream_aes128ctr_portable_and2 | ||
15 | void and2(int128 *r, const int128 *x); | ||
16 | |||
17 | #define or2 crypto_stream_aes128ctr_portable_or2 | ||
18 | void or2(int128 *r, const int128 *x); | ||
19 | |||
20 | #define copy2 crypto_stream_aes128ctr_portable_copy2 | ||
21 | void copy2(int128 *r, const int128 *x); | ||
22 | |||
23 | #define shufb crypto_stream_aes128ctr_portable_shufb | ||
24 | void shufb(int128 *r, const unsigned char *l); | ||
25 | |||
26 | #define shufd crypto_stream_aes128ctr_portable_shufd | ||
27 | void shufd(int128 *r, const int128 *x, const unsigned int c); | ||
28 | |||
29 | #define rshift32_littleendian crypto_stream_aes128ctr_portable_rshift32_littleendian | ||
30 | void rshift32_littleendian(int128 *r, const unsigned int n); | ||
31 | |||
32 | #define rshift64_littleendian crypto_stream_aes128ctr_portable_rshift64_littleendian | ||
33 | void rshift64_littleendian(int128 *r, const unsigned int n); | ||
34 | |||
35 | #define lshift64_littleendian crypto_stream_aes128ctr_portable_lshift64_littleendian | ||
36 | void lshift64_littleendian(int128 *r, const unsigned int n); | ||
37 | |||
38 | #define toggle crypto_stream_aes128ctr_portable_toggle | ||
39 | void toggle(int128 *r); | ||
40 | |||
41 | #define xor_rcon crypto_stream_aes128ctr_portable_xor_rcon | ||
42 | void xor_rcon(int128 *r); | ||
43 | |||
44 | #define add_uint32_big crypto_stream_aes128ctr_portable_add_uint32_big | ||
45 | void add_uint32_big(int128 *r, uint32 x); | ||
46 | |||
47 | #endif | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/stream.c b/nacl/crypto_stream/aes128ctr/portable/stream.c new file mode 100644 index 00000000..963fa8c1 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/stream.c | |||
@@ -0,0 +1,28 @@ | |||
1 | #include "crypto_stream.h" | ||
2 | |||
3 | int crypto_stream( | ||
4 | unsigned char *out, | ||
5 | unsigned long long outlen, | ||
6 | const unsigned char *n, | ||
7 | const unsigned char *k | ||
8 | ) | ||
9 | { | ||
10 | unsigned char d[crypto_stream_BEFORENMBYTES]; | ||
11 | crypto_stream_beforenm(d, k); | ||
12 | crypto_stream_afternm(out, outlen, n, d); | ||
13 | return 0; | ||
14 | } | ||
15 | |||
16 | int crypto_stream_xor( | ||
17 | unsigned char *out, | ||
18 | const unsigned char *in, | ||
19 | unsigned long long inlen, | ||
20 | const unsigned char *n, | ||
21 | const unsigned char *k | ||
22 | ) | ||
23 | { | ||
24 | unsigned char d[crypto_stream_BEFORENMBYTES]; | ||
25 | crypto_stream_beforenm(d, k); | ||
26 | crypto_stream_xor_afternm(out, in, inlen, n, d); | ||
27 | return 0; | ||
28 | } | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/types.h b/nacl/crypto_stream/aes128ctr/portable/types.h new file mode 100644 index 00000000..6aa502fc --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/types.h | |||
@@ -0,0 +1,10 @@ | |||
1 | #ifndef TYPES_H | ||
2 | #define TYPES_H | ||
3 | |||
4 | #include "crypto_uint32.h" | ||
5 | typedef crypto_uint32 uint32; | ||
6 | |||
7 | #include "crypto_uint64.h" | ||
8 | typedef crypto_uint64 uint64; | ||
9 | |||
10 | #endif | ||
diff --git a/nacl/crypto_stream/aes128ctr/portable/xor_afternm.c b/nacl/crypto_stream/aes128ctr/portable/xor_afternm.c new file mode 100644 index 00000000..f2ff8ff6 --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/portable/xor_afternm.c | |||
@@ -0,0 +1,180 @@ | |||
1 | /* Author: Peter Schwabe, ported from an assembly implementation by Emilia Käsper | ||
2 | * Date: 2009-03-19 | ||
3 | * Public domain */ | ||
4 | |||
5 | #include <stdio.h> | ||
6 | #include "int128.h" | ||
7 | #include "common.h" | ||
8 | #include "consts.h" | ||
9 | #include "crypto_stream.h" | ||
10 | |||
11 | int crypto_stream_xor_afternm(unsigned char *outp, const unsigned char *inp, unsigned long long len, const unsigned char *noncep, const unsigned char *c) | ||
12 | { | ||
13 | |||
14 | int128 xmm0; | ||
15 | int128 xmm1; | ||
16 | int128 xmm2; | ||
17 | int128 xmm3; | ||
18 | int128 xmm4; | ||
19 | int128 xmm5; | ||
20 | int128 xmm6; | ||
21 | int128 xmm7; | ||
22 | |||
23 | int128 xmm8; | ||
24 | int128 xmm9; | ||
25 | int128 xmm10; | ||
26 | int128 xmm11; | ||
27 | int128 xmm12; | ||
28 | int128 xmm13; | ||
29 | int128 xmm14; | ||
30 | int128 xmm15; | ||
31 | |||
32 | int128 nonce_stack; | ||
33 | unsigned long long lensav; | ||
34 | unsigned char bl[128]; | ||
35 | unsigned char *blp; | ||
36 | unsigned char b; | ||
37 | |||
38 | uint32 tmp; | ||
39 | |||
40 | /* Copy nonce on the stack */ | ||
41 | copy2(&nonce_stack, (int128 *) (noncep + 0)); | ||
42 | unsigned char *np = (unsigned char *)&nonce_stack; | ||
43 | |||
44 | enc_block: | ||
45 | |||
46 | xmm0 = *(int128 *) (np + 0); | ||
47 | copy2(&xmm1, &xmm0); | ||
48 | shufb(&xmm1, SWAP32); | ||
49 | copy2(&xmm2, &xmm1); | ||
50 | copy2(&xmm3, &xmm1); | ||
51 | copy2(&xmm4, &xmm1); | ||
52 | copy2(&xmm5, &xmm1); | ||
53 | copy2(&xmm6, &xmm1); | ||
54 | copy2(&xmm7, &xmm1); | ||
55 | |||
56 | add_uint32_big(&xmm1, 1); | ||
57 | add_uint32_big(&xmm2, 2); | ||
58 | add_uint32_big(&xmm3, 3); | ||
59 | add_uint32_big(&xmm4, 4); | ||
60 | add_uint32_big(&xmm5, 5); | ||
61 | add_uint32_big(&xmm6, 6); | ||
62 | add_uint32_big(&xmm7, 7); | ||
63 | |||
64 | shufb(&xmm0, M0); | ||
65 | shufb(&xmm1, M0SWAP); | ||
66 | shufb(&xmm2, M0SWAP); | ||
67 | shufb(&xmm3, M0SWAP); | ||
68 | shufb(&xmm4, M0SWAP); | ||
69 | shufb(&xmm5, M0SWAP); | ||
70 | shufb(&xmm6, M0SWAP); | ||
71 | shufb(&xmm7, M0SWAP); | ||
72 | |||
73 | bitslice(xmm7, xmm6, xmm5, xmm4, xmm3, xmm2, xmm1, xmm0, xmm8) | ||
74 | |||
75 | aesround( 1, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
76 | aesround( 2, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7,c) | ||
77 | aesround( 3, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
78 | aesround( 4, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7,c) | ||
79 | aesround( 5, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
80 | aesround( 6, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7,c) | ||
81 | aesround( 7, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
82 | aesround( 8, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7,c) | ||
83 | aesround( 9, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15,c) | ||
84 | lastround(xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15, xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7,c) | ||
85 | |||
86 | bitslice(xmm13, xmm10, xmm15, xmm11, xmm14, xmm12, xmm9, xmm8, xmm0) | ||
87 | |||
88 | if(len < 128) goto partial; | ||
89 | if(len == 128) goto full; | ||
90 | |||
91 | tmp = load32_bigendian(np + 12); | ||
92 | tmp += 8; | ||
93 | store32_bigendian(np + 12, tmp); | ||
94 | |||
95 | xor2(&xmm8, (int128 *)(inp + 0)); | ||
96 | xor2(&xmm9, (int128 *)(inp + 16)); | ||
97 | xor2(&xmm12, (int128 *)(inp + 32)); | ||
98 | xor2(&xmm14, (int128 *)(inp + 48)); | ||
99 | xor2(&xmm11, (int128 *)(inp + 64)); | ||
100 | xor2(&xmm15, (int128 *)(inp + 80)); | ||
101 | xor2(&xmm10, (int128 *)(inp + 96)); | ||
102 | xor2(&xmm13, (int128 *)(inp + 112)); | ||
103 | |||
104 | *(int128 *) (outp + 0) = xmm8; | ||
105 | *(int128 *) (outp + 16) = xmm9; | ||
106 | *(int128 *) (outp + 32) = xmm12; | ||
107 | *(int128 *) (outp + 48) = xmm14; | ||
108 | *(int128 *) (outp + 64) = xmm11; | ||
109 | *(int128 *) (outp + 80) = xmm15; | ||
110 | *(int128 *) (outp + 96) = xmm10; | ||
111 | *(int128 *) (outp + 112) = xmm13; | ||
112 | |||
113 | len -= 128; | ||
114 | inp += 128; | ||
115 | outp += 128; | ||
116 | |||
117 | goto enc_block; | ||
118 | |||
119 | partial: | ||
120 | |||
121 | lensav = len; | ||
122 | len >>= 4; | ||
123 | |||
124 | tmp = load32_bigendian(np + 12); | ||
125 | tmp += len; | ||
126 | store32_bigendian(np + 12, tmp); | ||
127 | |||
128 | blp = bl; | ||
129 | *(int128 *)(blp + 0) = xmm8; | ||
130 | *(int128 *)(blp + 16) = xmm9; | ||
131 | *(int128 *)(blp + 32) = xmm12; | ||
132 | *(int128 *)(blp + 48) = xmm14; | ||
133 | *(int128 *)(blp + 64) = xmm11; | ||
134 | *(int128 *)(blp + 80) = xmm15; | ||
135 | *(int128 *)(blp + 96) = xmm10; | ||
136 | *(int128 *)(blp + 112) = xmm13; | ||
137 | |||
138 | bytes: | ||
139 | |||
140 | if(lensav == 0) goto end; | ||
141 | |||
142 | b = blp[0]; | ||
143 | b ^= *(unsigned char *)(inp + 0); | ||
144 | *(unsigned char *)(outp + 0) = b; | ||
145 | |||
146 | blp += 1; | ||
147 | inp +=1; | ||
148 | outp +=1; | ||
149 | lensav -= 1; | ||
150 | |||
151 | goto bytes; | ||
152 | |||
153 | full: | ||
154 | |||
155 | tmp = load32_bigendian(np + 12); | ||
156 | tmp += 8; | ||
157 | store32_bigendian(np + 12, tmp); | ||
158 | |||
159 | xor2(&xmm8, (int128 *)(inp + 0)); | ||
160 | xor2(&xmm9, (int128 *)(inp + 16)); | ||
161 | xor2(&xmm12, (int128 *)(inp + 32)); | ||
162 | xor2(&xmm14, (int128 *)(inp + 48)); | ||
163 | xor2(&xmm11, (int128 *)(inp + 64)); | ||
164 | xor2(&xmm15, (int128 *)(inp + 80)); | ||
165 | xor2(&xmm10, (int128 *)(inp + 96)); | ||
166 | xor2(&xmm13, (int128 *)(inp + 112)); | ||
167 | |||
168 | *(int128 *) (outp + 0) = xmm8; | ||
169 | *(int128 *) (outp + 16) = xmm9; | ||
170 | *(int128 *) (outp + 32) = xmm12; | ||
171 | *(int128 *) (outp + 48) = xmm14; | ||
172 | *(int128 *) (outp + 64) = xmm11; | ||
173 | *(int128 *) (outp + 80) = xmm15; | ||
174 | *(int128 *) (outp + 96) = xmm10; | ||
175 | *(int128 *) (outp + 112) = xmm13; | ||
176 | |||
177 | end: | ||
178 | return 0; | ||
179 | |||
180 | } | ||
diff --git a/nacl/crypto_stream/aes128ctr/used b/nacl/crypto_stream/aes128ctr/used new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/nacl/crypto_stream/aes128ctr/used | |||
diff --git a/nacl/crypto_stream/measure.c b/nacl/crypto_stream/measure.c new file mode 100644 index 00000000..ff3ab610 --- /dev/null +++ b/nacl/crypto_stream/measure.c | |||
@@ -0,0 +1,73 @@ | |||
1 | #include <stdlib.h> | ||
2 | #include "randombytes.h" | ||
3 | #include "cpucycles.h" | ||
4 | #include "crypto_stream.h" | ||
5 | |||
6 | extern void printentry(long long,const char *,long long *,long long); | ||
7 | extern unsigned char *alignedcalloc(unsigned long long); | ||
8 | extern const char *primitiveimplementation; | ||
9 | extern const char *implementationversion; | ||
10 | extern const char *sizenames[]; | ||
11 | extern const long long sizes[]; | ||
12 | extern void allocate(void); | ||
13 | extern void measure(void); | ||
14 | |||
15 | const char *primitiveimplementation = crypto_stream_IMPLEMENTATION; | ||
16 | const char *implementationversion = crypto_stream_VERSION; | ||
17 | const char *sizenames[] = { "keybytes", "noncebytes", 0 }; | ||
18 | const long long sizes[] = { crypto_stream_KEYBYTES, crypto_stream_NONCEBYTES }; | ||
19 | |||
20 | #define MAXTEST_BYTES 4096 | ||
21 | #ifdef SUPERCOP | ||
22 | #define MGAP 8192 | ||
23 | #else | ||
24 | #define MGAP 8 | ||
25 | #endif | ||
26 | |||
27 | static unsigned char *k; | ||
28 | static unsigned char *n; | ||
29 | static unsigned char *m; | ||
30 | static unsigned char *c; | ||
31 | |||
32 | void preallocate(void) | ||
33 | { | ||
34 | } | ||
35 | |||
36 | void allocate(void) | ||
37 | { | ||
38 | k = alignedcalloc(crypto_stream_KEYBYTES); | ||
39 | n = alignedcalloc(crypto_stream_NONCEBYTES); | ||
40 | m = alignedcalloc(MAXTEST_BYTES); | ||
41 | c = alignedcalloc(MAXTEST_BYTES); | ||
42 | } | ||
43 | |||
44 | #define TIMINGS 15 | ||
45 | static long long cycles[TIMINGS + 1]; | ||
46 | |||
47 | void measure(void) | ||
48 | { | ||
49 | int i; | ||
50 | int loop; | ||
51 | int mlen; | ||
52 | |||
53 | for (loop = 0;loop < LOOPS;++loop) { | ||
54 | for (mlen = 0;mlen <= MAXTEST_BYTES;mlen += 1 + mlen / MGAP) { | ||
55 | randombytes(k,crypto_stream_KEYBYTES); | ||
56 | randombytes(n,crypto_stream_NONCEBYTES); | ||
57 | randombytes(m,mlen); | ||
58 | randombytes(c,mlen); | ||
59 | for (i = 0;i <= TIMINGS;++i) { | ||
60 | cycles[i] = cpucycles(); | ||
61 | crypto_stream(c,mlen,n,k); | ||
62 | } | ||
63 | for (i = 0;i < TIMINGS;++i) cycles[i] = cycles[i + 1] - cycles[i]; | ||
64 | printentry(mlen,"cycles",cycles,TIMINGS); | ||
65 | for (i = 0;i <= TIMINGS;++i) { | ||
66 | cycles[i] = cpucycles(); | ||
67 | crypto_stream_xor(c,m,mlen,n,k); | ||
68 | } | ||
69 | for (i = 0;i < TIMINGS;++i) cycles[i] = cycles[i + 1] - cycles[i]; | ||
70 | printentry(mlen,"xor_cycles",cycles,TIMINGS); | ||
71 | } | ||
72 | } | ||
73 | } | ||
diff --git a/nacl/crypto_stream/salsa20/amd64_xmm6/api.h b/nacl/crypto_stream/salsa20/amd64_xmm6/api.h new file mode 100644 index 00000000..c2b18461 --- /dev/null +++ b/nacl/crypto_stream/salsa20/amd64_xmm6/api.h | |||
@@ -0,0 +1,2 @@ | |||
1 | #define CRYPTO_KEYBYTES 32 | ||
2 | #define CRYPTO_NONCEBYTES 8 | ||
diff --git a/nacl/crypto_stream/salsa20/amd64_xmm6/implementors b/nacl/crypto_stream/salsa20/amd64_xmm6/implementors new file mode 100644 index 00000000..f6fb3c73 --- /dev/null +++ b/nacl/crypto_stream/salsa20/amd64_xmm6/implementors | |||
@@ -0,0 +1 @@ | |||
Daniel J. Bernstein | |||
diff --git a/nacl/crypto_stream/salsa20/amd64_xmm6/stream.s b/nacl/crypto_stream/salsa20/amd64_xmm6/stream.s new file mode 100644 index 00000000..82a897f7 --- /dev/null +++ b/nacl/crypto_stream/salsa20/amd64_xmm6/stream.s | |||
@@ -0,0 +1,4823 @@ | |||
1 | |||
2 | # qhasm: int64 r11_caller | ||
3 | |||
4 | # qhasm: int64 r12_caller | ||
5 | |||
6 | # qhasm: int64 r13_caller | ||
7 | |||
8 | # qhasm: int64 r14_caller | ||
9 | |||
10 | # qhasm: int64 r15_caller | ||
11 | |||
12 | # qhasm: int64 rbx_caller | ||
13 | |||
14 | # qhasm: int64 rbp_caller | ||
15 | |||
16 | # qhasm: caller r11_caller | ||
17 | |||
18 | # qhasm: caller r12_caller | ||
19 | |||
20 | # qhasm: caller r13_caller | ||
21 | |||
22 | # qhasm: caller r14_caller | ||
23 | |||
24 | # qhasm: caller r15_caller | ||
25 | |||
26 | # qhasm: caller rbx_caller | ||
27 | |||
28 | # qhasm: caller rbp_caller | ||
29 | |||
30 | # qhasm: stack64 r11_stack | ||
31 | |||
32 | # qhasm: stack64 r12_stack | ||
33 | |||
34 | # qhasm: stack64 r13_stack | ||
35 | |||
36 | # qhasm: stack64 r14_stack | ||
37 | |||
38 | # qhasm: stack64 r15_stack | ||
39 | |||
40 | # qhasm: stack64 rbx_stack | ||
41 | |||
42 | # qhasm: stack64 rbp_stack | ||
43 | |||
44 | # qhasm: int64 a | ||
45 | |||
46 | # qhasm: int64 arg1 | ||
47 | |||
48 | # qhasm: int64 arg2 | ||
49 | |||
50 | # qhasm: int64 arg3 | ||
51 | |||
52 | # qhasm: int64 arg4 | ||
53 | |||
54 | # qhasm: int64 arg5 | ||
55 | |||
56 | # qhasm: input arg1 | ||
57 | |||
58 | # qhasm: input arg2 | ||
59 | |||
60 | # qhasm: input arg3 | ||
61 | |||
62 | # qhasm: input arg4 | ||
63 | |||
64 | # qhasm: input arg5 | ||
65 | |||
66 | # qhasm: int64 k | ||
67 | |||
68 | # qhasm: int64 kbits | ||
69 | |||
70 | # qhasm: int64 iv | ||
71 | |||
72 | # qhasm: int64 i | ||
73 | |||
74 | # qhasm: stack128 x0 | ||
75 | |||
76 | # qhasm: stack128 x1 | ||
77 | |||
78 | # qhasm: stack128 x2 | ||
79 | |||
80 | # qhasm: stack128 x3 | ||
81 | |||
82 | # qhasm: int64 m | ||
83 | |||
84 | # qhasm: int64 out | ||
85 | |||
86 | # qhasm: int64 bytes | ||
87 | |||
88 | # qhasm: stack32 eax_stack | ||
89 | |||
90 | # qhasm: stack32 ebx_stack | ||
91 | |||
92 | # qhasm: stack32 esi_stack | ||
93 | |||
94 | # qhasm: stack32 edi_stack | ||
95 | |||
96 | # qhasm: stack32 ebp_stack | ||
97 | |||
98 | # qhasm: int6464 diag0 | ||
99 | |||
100 | # qhasm: int6464 diag1 | ||
101 | |||
102 | # qhasm: int6464 diag2 | ||
103 | |||
104 | # qhasm: int6464 diag3 | ||
105 | |||
106 | # qhasm: int6464 a0 | ||
107 | |||
108 | # qhasm: int6464 a1 | ||
109 | |||
110 | # qhasm: int6464 a2 | ||
111 | |||
112 | # qhasm: int6464 a3 | ||
113 | |||
114 | # qhasm: int6464 a4 | ||
115 | |||
116 | # qhasm: int6464 a5 | ||
117 | |||
118 | # qhasm: int6464 a6 | ||
119 | |||
120 | # qhasm: int6464 a7 | ||
121 | |||
122 | # qhasm: int6464 b0 | ||
123 | |||
124 | # qhasm: int6464 b1 | ||
125 | |||
126 | # qhasm: int6464 b2 | ||
127 | |||
128 | # qhasm: int6464 b3 | ||
129 | |||
130 | # qhasm: int6464 b4 | ||
131 | |||
132 | # qhasm: int6464 b5 | ||
133 | |||
134 | # qhasm: int6464 b6 | ||
135 | |||
136 | # qhasm: int6464 b7 | ||
137 | |||
138 | # qhasm: int6464 z0 | ||
139 | |||
140 | # qhasm: int6464 z1 | ||
141 | |||
142 | # qhasm: int6464 z2 | ||
143 | |||
144 | # qhasm: int6464 z3 | ||
145 | |||
146 | # qhasm: int6464 z4 | ||
147 | |||
148 | # qhasm: int6464 z5 | ||
149 | |||
150 | # qhasm: int6464 z6 | ||
151 | |||
152 | # qhasm: int6464 z7 | ||
153 | |||
154 | # qhasm: int6464 z8 | ||
155 | |||
156 | # qhasm: int6464 z9 | ||
157 | |||
158 | # qhasm: int6464 z10 | ||
159 | |||
160 | # qhasm: int6464 z11 | ||
161 | |||
162 | # qhasm: int6464 z12 | ||
163 | |||
164 | # qhasm: int6464 z13 | ||
165 | |||
166 | # qhasm: int6464 z14 | ||
167 | |||
168 | # qhasm: int6464 z15 | ||
169 | |||
170 | # qhasm: stack128 z0_stack | ||
171 | |||
172 | # qhasm: stack128 z1_stack | ||
173 | |||
174 | # qhasm: stack128 z2_stack | ||
175 | |||
176 | # qhasm: stack128 z3_stack | ||
177 | |||
178 | # qhasm: stack128 z4_stack | ||
179 | |||
180 | # qhasm: stack128 z5_stack | ||
181 | |||
182 | # qhasm: stack128 z6_stack | ||
183 | |||
184 | # qhasm: stack128 z7_stack | ||
185 | |||
186 | # qhasm: stack128 z8_stack | ||
187 | |||
188 | # qhasm: stack128 z9_stack | ||
189 | |||
190 | # qhasm: stack128 z10_stack | ||
191 | |||
192 | # qhasm: stack128 z11_stack | ||
193 | |||
194 | # qhasm: stack128 z12_stack | ||
195 | |||
196 | # qhasm: stack128 z13_stack | ||
197 | |||
198 | # qhasm: stack128 z14_stack | ||
199 | |||
200 | # qhasm: stack128 z15_stack | ||
201 | |||
202 | # qhasm: int6464 y0 | ||
203 | |||
204 | # qhasm: int6464 y1 | ||
205 | |||
206 | # qhasm: int6464 y2 | ||
207 | |||
208 | # qhasm: int6464 y3 | ||
209 | |||
210 | # qhasm: int6464 y4 | ||
211 | |||
212 | # qhasm: int6464 y5 | ||
213 | |||
214 | # qhasm: int6464 y6 | ||
215 | |||
216 | # qhasm: int6464 y7 | ||
217 | |||
218 | # qhasm: int6464 y8 | ||
219 | |||
220 | # qhasm: int6464 y9 | ||
221 | |||
222 | # qhasm: int6464 y10 | ||
223 | |||
224 | # qhasm: int6464 y11 | ||
225 | |||
226 | # qhasm: int6464 y12 | ||
227 | |||
228 | # qhasm: int6464 y13 | ||
229 | |||
230 | # qhasm: int6464 y14 | ||
231 | |||
232 | # qhasm: int6464 y15 | ||
233 | |||
234 | # qhasm: int6464 r0 | ||
235 | |||
236 | # qhasm: int6464 r1 | ||
237 | |||
238 | # qhasm: int6464 r2 | ||
239 | |||
240 | # qhasm: int6464 r3 | ||
241 | |||
242 | # qhasm: int6464 r4 | ||
243 | |||
244 | # qhasm: int6464 r5 | ||
245 | |||
246 | # qhasm: int6464 r6 | ||
247 | |||
248 | # qhasm: int6464 r7 | ||
249 | |||
250 | # qhasm: int6464 r8 | ||
251 | |||
252 | # qhasm: int6464 r9 | ||
253 | |||
254 | # qhasm: int6464 r10 | ||
255 | |||
256 | # qhasm: int6464 r11 | ||
257 | |||
258 | # qhasm: int6464 r12 | ||
259 | |||
260 | # qhasm: int6464 r13 | ||
261 | |||
262 | # qhasm: int6464 r14 | ||
263 | |||
264 | # qhasm: int6464 r15 | ||
265 | |||
266 | # qhasm: stack128 orig0 | ||
267 | |||
268 | # qhasm: stack128 orig1 | ||
269 | |||
270 | # qhasm: stack128 orig2 | ||
271 | |||
272 | # qhasm: stack128 orig3 | ||
273 | |||
274 | # qhasm: stack128 orig4 | ||
275 | |||
276 | # qhasm: stack128 orig5 | ||
277 | |||
278 | # qhasm: stack128 orig6 | ||
279 | |||
280 | # qhasm: stack128 orig7 | ||
281 | |||
282 | # qhasm: stack128 orig8 | ||
283 | |||
284 | # qhasm: stack128 orig9 | ||
285 | |||
286 | # qhasm: stack128 orig10 | ||
287 | |||
288 | # qhasm: stack128 orig11 | ||
289 | |||
290 | # qhasm: stack128 orig12 | ||
291 | |||
292 | # qhasm: stack128 orig13 | ||
293 | |||
294 | # qhasm: stack128 orig14 | ||
295 | |||
296 | # qhasm: stack128 orig15 | ||
297 | |||
298 | # qhasm: int64 in0 | ||
299 | |||
300 | # qhasm: int64 in1 | ||
301 | |||
302 | # qhasm: int64 in2 | ||
303 | |||
304 | # qhasm: int64 in3 | ||
305 | |||
306 | # qhasm: int64 in4 | ||
307 | |||
308 | # qhasm: int64 in5 | ||
309 | |||
310 | # qhasm: int64 in6 | ||
311 | |||
312 | # qhasm: int64 in7 | ||
313 | |||
314 | # qhasm: int64 in8 | ||
315 | |||
316 | # qhasm: int64 in9 | ||
317 | |||
318 | # qhasm: int64 in10 | ||
319 | |||
320 | # qhasm: int64 in11 | ||
321 | |||
322 | # qhasm: int64 in12 | ||
323 | |||
324 | # qhasm: int64 in13 | ||
325 | |||
326 | # qhasm: int64 in14 | ||
327 | |||
328 | # qhasm: int64 in15 | ||
329 | |||
330 | # qhasm: stack512 tmp | ||
331 | |||
332 | # qhasm: int64 ctarget | ||
333 | |||
334 | # qhasm: stack64 bytes_backup | ||
335 | |||
336 | # qhasm: enter crypto_stream_salsa20_amd64_xmm6 | ||
337 | .text | ||
338 | .p2align 5 | ||
339 | .globl _crypto_stream_salsa20_amd64_xmm6 | ||
340 | .globl crypto_stream_salsa20_amd64_xmm6 | ||
341 | _crypto_stream_salsa20_amd64_xmm6: | ||
342 | crypto_stream_salsa20_amd64_xmm6: | ||
343 | mov %rsp,%r11 | ||
344 | and $31,%r11 | ||
345 | add $480,%r11 | ||
346 | sub %r11,%rsp | ||
347 | |||
348 | # qhasm: r11_stack = r11_caller | ||
349 | # asm 1: movq <r11_caller=int64#9,>r11_stack=stack64#1 | ||
350 | # asm 2: movq <r11_caller=%r11,>r11_stack=352(%rsp) | ||
351 | movq %r11,352(%rsp) | ||
352 | |||
353 | # qhasm: r12_stack = r12_caller | ||
354 | # asm 1: movq <r12_caller=int64#10,>r12_stack=stack64#2 | ||
355 | # asm 2: movq <r12_caller=%r12,>r12_stack=360(%rsp) | ||
356 | movq %r12,360(%rsp) | ||
357 | |||
358 | # qhasm: r13_stack = r13_caller | ||
359 | # asm 1: movq <r13_caller=int64#11,>r13_stack=stack64#3 | ||
360 | # asm 2: movq <r13_caller=%r13,>r13_stack=368(%rsp) | ||
361 | movq %r13,368(%rsp) | ||
362 | |||
363 | # qhasm: r14_stack = r14_caller | ||
364 | # asm 1: movq <r14_caller=int64#12,>r14_stack=stack64#4 | ||
365 | # asm 2: movq <r14_caller=%r14,>r14_stack=376(%rsp) | ||
366 | movq %r14,376(%rsp) | ||
367 | |||
368 | # qhasm: r15_stack = r15_caller | ||
369 | # asm 1: movq <r15_caller=int64#13,>r15_stack=stack64#5 | ||
370 | # asm 2: movq <r15_caller=%r15,>r15_stack=384(%rsp) | ||
371 | movq %r15,384(%rsp) | ||
372 | |||
373 | # qhasm: rbx_stack = rbx_caller | ||
374 | # asm 1: movq <rbx_caller=int64#14,>rbx_stack=stack64#6 | ||
375 | # asm 2: movq <rbx_caller=%rbx,>rbx_stack=392(%rsp) | ||
376 | movq %rbx,392(%rsp) | ||
377 | |||
378 | # qhasm: rbp_stack = rbp_caller | ||
379 | # asm 1: movq <rbp_caller=int64#15,>rbp_stack=stack64#7 | ||
380 | # asm 2: movq <rbp_caller=%rbp,>rbp_stack=400(%rsp) | ||
381 | movq %rbp,400(%rsp) | ||
382 | |||
383 | # qhasm: bytes = arg2 | ||
384 | # asm 1: mov <arg2=int64#2,>bytes=int64#6 | ||
385 | # asm 2: mov <arg2=%rsi,>bytes=%r9 | ||
386 | mov %rsi,%r9 | ||
387 | |||
388 | # qhasm: out = arg1 | ||
389 | # asm 1: mov <arg1=int64#1,>out=int64#1 | ||
390 | # asm 2: mov <arg1=%rdi,>out=%rdi | ||
391 | mov %rdi,%rdi | ||
392 | |||
393 | # qhasm: m = out | ||
394 | # asm 1: mov <out=int64#1,>m=int64#2 | ||
395 | # asm 2: mov <out=%rdi,>m=%rsi | ||
396 | mov %rdi,%rsi | ||
397 | |||
398 | # qhasm: iv = arg3 | ||
399 | # asm 1: mov <arg3=int64#3,>iv=int64#3 | ||
400 | # asm 2: mov <arg3=%rdx,>iv=%rdx | ||
401 | mov %rdx,%rdx | ||
402 | |||
403 | # qhasm: k = arg4 | ||
404 | # asm 1: mov <arg4=int64#4,>k=int64#8 | ||
405 | # asm 2: mov <arg4=%rcx,>k=%r10 | ||
406 | mov %rcx,%r10 | ||
407 | |||
408 | # qhasm: unsigned>? bytes - 0 | ||
409 | # asm 1: cmp $0,<bytes=int64#6 | ||
410 | # asm 2: cmp $0,<bytes=%r9 | ||
411 | cmp $0,%r9 | ||
412 | # comment:fp stack unchanged by jump | ||
413 | |||
414 | # qhasm: goto done if !unsigned> | ||
415 | jbe ._done | ||
416 | |||
417 | # qhasm: a = 0 | ||
418 | # asm 1: mov $0,>a=int64#7 | ||
419 | # asm 2: mov $0,>a=%rax | ||
420 | mov $0,%rax | ||
421 | |||
422 | # qhasm: i = bytes | ||
423 | # asm 1: mov <bytes=int64#6,>i=int64#4 | ||
424 | # asm 2: mov <bytes=%r9,>i=%rcx | ||
425 | mov %r9,%rcx | ||
426 | |||
427 | # qhasm: while (i) { *out++ = a; --i } | ||
428 | rep stosb | ||
429 | |||
430 | # qhasm: out -= bytes | ||
431 | # asm 1: sub <bytes=int64#6,<out=int64#1 | ||
432 | # asm 2: sub <bytes=%r9,<out=%rdi | ||
433 | sub %r9,%rdi | ||
434 | # comment:fp stack unchanged by jump | ||
435 | |||
436 | # qhasm: goto start | ||
437 | jmp ._start | ||
438 | |||
439 | # qhasm: enter crypto_stream_salsa20_amd64_xmm6_xor | ||
440 | .text | ||
441 | .p2align 5 | ||
442 | .globl _crypto_stream_salsa20_amd64_xmm6_xor | ||
443 | .globl crypto_stream_salsa20_amd64_xmm6_xor | ||
444 | _crypto_stream_salsa20_amd64_xmm6_xor: | ||
445 | crypto_stream_salsa20_amd64_xmm6_xor: | ||
446 | mov %rsp,%r11 | ||
447 | and $31,%r11 | ||
448 | add $480,%r11 | ||
449 | sub %r11,%rsp | ||
450 | |||
451 | # qhasm: r11_stack = r11_caller | ||
452 | # asm 1: movq <r11_caller=int64#9,>r11_stack=stack64#1 | ||
453 | # asm 2: movq <r11_caller=%r11,>r11_stack=352(%rsp) | ||
454 | movq %r11,352(%rsp) | ||
455 | |||
456 | # qhasm: r12_stack = r12_caller | ||
457 | # asm 1: movq <r12_caller=int64#10,>r12_stack=stack64#2 | ||
458 | # asm 2: movq <r12_caller=%r12,>r12_stack=360(%rsp) | ||
459 | movq %r12,360(%rsp) | ||
460 | |||
461 | # qhasm: r13_stack = r13_caller | ||
462 | # asm 1: movq <r13_caller=int64#11,>r13_stack=stack64#3 | ||
463 | # asm 2: movq <r13_caller=%r13,>r13_stack=368(%rsp) | ||
464 | movq %r13,368(%rsp) | ||
465 | |||
466 | # qhasm: r14_stack = r14_caller | ||
467 | # asm 1: movq <r14_caller=int64#12,>r14_stack=stack64#4 | ||
468 | # asm 2: movq <r14_caller=%r14,>r14_stack=376(%rsp) | ||
469 | movq %r14,376(%rsp) | ||
470 | |||
471 | # qhasm: r15_stack = r15_caller | ||
472 | # asm 1: movq <r15_caller=int64#13,>r15_stack=stack64#5 | ||
473 | # asm 2: movq <r15_caller=%r15,>r15_stack=384(%rsp) | ||
474 | movq %r15,384(%rsp) | ||
475 | |||
476 | # qhasm: rbx_stack = rbx_caller | ||
477 | # asm 1: movq <rbx_caller=int64#14,>rbx_stack=stack64#6 | ||
478 | # asm 2: movq <rbx_caller=%rbx,>rbx_stack=392(%rsp) | ||
479 | movq %rbx,392(%rsp) | ||
480 | |||
481 | # qhasm: rbp_stack = rbp_caller | ||
482 | # asm 1: movq <rbp_caller=int64#15,>rbp_stack=stack64#7 | ||
483 | # asm 2: movq <rbp_caller=%rbp,>rbp_stack=400(%rsp) | ||
484 | movq %rbp,400(%rsp) | ||
485 | |||
486 | # qhasm: out = arg1 | ||
487 | # asm 1: mov <arg1=int64#1,>out=int64#1 | ||
488 | # asm 2: mov <arg1=%rdi,>out=%rdi | ||
489 | mov %rdi,%rdi | ||
490 | |||
491 | # qhasm: m = arg2 | ||
492 | # asm 1: mov <arg2=int64#2,>m=int64#2 | ||
493 | # asm 2: mov <arg2=%rsi,>m=%rsi | ||
494 | mov %rsi,%rsi | ||
495 | |||
496 | # qhasm: bytes = arg3 | ||
497 | # asm 1: mov <arg3=int64#3,>bytes=int64#6 | ||
498 | # asm 2: mov <arg3=%rdx,>bytes=%r9 | ||
499 | mov %rdx,%r9 | ||
500 | |||
501 | # qhasm: iv = arg4 | ||
502 | # asm 1: mov <arg4=int64#4,>iv=int64#3 | ||
503 | # asm 2: mov <arg4=%rcx,>iv=%rdx | ||
504 | mov %rcx,%rdx | ||
505 | |||
506 | # qhasm: k = arg5 | ||
507 | # asm 1: mov <arg5=int64#5,>k=int64#8 | ||
508 | # asm 2: mov <arg5=%r8,>k=%r10 | ||
509 | mov %r8,%r10 | ||
510 | |||
511 | # qhasm: unsigned>? bytes - 0 | ||
512 | # asm 1: cmp $0,<bytes=int64#6 | ||
513 | # asm 2: cmp $0,<bytes=%r9 | ||
514 | cmp $0,%r9 | ||
515 | # comment:fp stack unchanged by jump | ||
516 | |||
517 | # qhasm: goto done if !unsigned> | ||
518 | jbe ._done | ||
519 | # comment:fp stack unchanged by fallthrough | ||
520 | |||
521 | # qhasm: start: | ||
522 | ._start: | ||
523 | |||
524 | # qhasm: in12 = *(uint32 *) (k + 20) | ||
525 | # asm 1: movl 20(<k=int64#8),>in12=int64#4d | ||
526 | # asm 2: movl 20(<k=%r10),>in12=%ecx | ||
527 | movl 20(%r10),%ecx | ||
528 | |||
529 | # qhasm: in1 = *(uint32 *) (k + 0) | ||
530 | # asm 1: movl 0(<k=int64#8),>in1=int64#5d | ||
531 | # asm 2: movl 0(<k=%r10),>in1=%r8d | ||
532 | movl 0(%r10),%r8d | ||
533 | |||
534 | # qhasm: in6 = *(uint32 *) (iv + 0) | ||
535 | # asm 1: movl 0(<iv=int64#3),>in6=int64#7d | ||
536 | # asm 2: movl 0(<iv=%rdx),>in6=%eax | ||
537 | movl 0(%rdx),%eax | ||
538 | |||
539 | # qhasm: in11 = *(uint32 *) (k + 16) | ||
540 | # asm 1: movl 16(<k=int64#8),>in11=int64#9d | ||
541 | # asm 2: movl 16(<k=%r10),>in11=%r11d | ||
542 | movl 16(%r10),%r11d | ||
543 | |||
544 | # qhasm: ((uint32 *)&x1)[0] = in12 | ||
545 | # asm 1: movl <in12=int64#4d,>x1=stack128#1 | ||
546 | # asm 2: movl <in12=%ecx,>x1=0(%rsp) | ||
547 | movl %ecx,0(%rsp) | ||
548 | |||
549 | # qhasm: ((uint32 *)&x1)[1] = in1 | ||
550 | # asm 1: movl <in1=int64#5d,4+<x1=stack128#1 | ||
551 | # asm 2: movl <in1=%r8d,4+<x1=0(%rsp) | ||
552 | movl %r8d,4+0(%rsp) | ||
553 | |||
554 | # qhasm: ((uint32 *)&x1)[2] = in6 | ||
555 | # asm 1: movl <in6=int64#7d,8+<x1=stack128#1 | ||
556 | # asm 2: movl <in6=%eax,8+<x1=0(%rsp) | ||
557 | movl %eax,8+0(%rsp) | ||
558 | |||
559 | # qhasm: ((uint32 *)&x1)[3] = in11 | ||
560 | # asm 1: movl <in11=int64#9d,12+<x1=stack128#1 | ||
561 | # asm 2: movl <in11=%r11d,12+<x1=0(%rsp) | ||
562 | movl %r11d,12+0(%rsp) | ||
563 | |||
564 | # qhasm: in8 = 0 | ||
565 | # asm 1: mov $0,>in8=int64#4 | ||
566 | # asm 2: mov $0,>in8=%rcx | ||
567 | mov $0,%rcx | ||
568 | |||
569 | # qhasm: in13 = *(uint32 *) (k + 24) | ||
570 | # asm 1: movl 24(<k=int64#8),>in13=int64#5d | ||
571 | # asm 2: movl 24(<k=%r10),>in13=%r8d | ||
572 | movl 24(%r10),%r8d | ||
573 | |||
574 | # qhasm: in2 = *(uint32 *) (k + 4) | ||
575 | # asm 1: movl 4(<k=int64#8),>in2=int64#7d | ||
576 | # asm 2: movl 4(<k=%r10),>in2=%eax | ||
577 | movl 4(%r10),%eax | ||
578 | |||
579 | # qhasm: in7 = *(uint32 *) (iv + 4) | ||
580 | # asm 1: movl 4(<iv=int64#3),>in7=int64#3d | ||
581 | # asm 2: movl 4(<iv=%rdx),>in7=%edx | ||
582 | movl 4(%rdx),%edx | ||
583 | |||
584 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
585 | # asm 1: movl <in8=int64#4d,>x2=stack128#2 | ||
586 | # asm 2: movl <in8=%ecx,>x2=16(%rsp) | ||
587 | movl %ecx,16(%rsp) | ||
588 | |||
589 | # qhasm: ((uint32 *)&x2)[1] = in13 | ||
590 | # asm 1: movl <in13=int64#5d,4+<x2=stack128#2 | ||
591 | # asm 2: movl <in13=%r8d,4+<x2=16(%rsp) | ||
592 | movl %r8d,4+16(%rsp) | ||
593 | |||
594 | # qhasm: ((uint32 *)&x2)[2] = in2 | ||
595 | # asm 1: movl <in2=int64#7d,8+<x2=stack128#2 | ||
596 | # asm 2: movl <in2=%eax,8+<x2=16(%rsp) | ||
597 | movl %eax,8+16(%rsp) | ||
598 | |||
599 | # qhasm: ((uint32 *)&x2)[3] = in7 | ||
600 | # asm 1: movl <in7=int64#3d,12+<x2=stack128#2 | ||
601 | # asm 2: movl <in7=%edx,12+<x2=16(%rsp) | ||
602 | movl %edx,12+16(%rsp) | ||
603 | |||
604 | # qhasm: in4 = *(uint32 *) (k + 12) | ||
605 | # asm 1: movl 12(<k=int64#8),>in4=int64#3d | ||
606 | # asm 2: movl 12(<k=%r10),>in4=%edx | ||
607 | movl 12(%r10),%edx | ||
608 | |||
609 | # qhasm: in9 = 0 | ||
610 | # asm 1: mov $0,>in9=int64#4 | ||
611 | # asm 2: mov $0,>in9=%rcx | ||
612 | mov $0,%rcx | ||
613 | |||
614 | # qhasm: in14 = *(uint32 *) (k + 28) | ||
615 | # asm 1: movl 28(<k=int64#8),>in14=int64#5d | ||
616 | # asm 2: movl 28(<k=%r10),>in14=%r8d | ||
617 | movl 28(%r10),%r8d | ||
618 | |||
619 | # qhasm: in3 = *(uint32 *) (k + 8) | ||
620 | # asm 1: movl 8(<k=int64#8),>in3=int64#7d | ||
621 | # asm 2: movl 8(<k=%r10),>in3=%eax | ||
622 | movl 8(%r10),%eax | ||
623 | |||
624 | # qhasm: ((uint32 *)&x3)[0] = in4 | ||
625 | # asm 1: movl <in4=int64#3d,>x3=stack128#3 | ||
626 | # asm 2: movl <in4=%edx,>x3=32(%rsp) | ||
627 | movl %edx,32(%rsp) | ||
628 | |||
629 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
630 | # asm 1: movl <in9=int64#4d,4+<x3=stack128#3 | ||
631 | # asm 2: movl <in9=%ecx,4+<x3=32(%rsp) | ||
632 | movl %ecx,4+32(%rsp) | ||
633 | |||
634 | # qhasm: ((uint32 *)&x3)[2] = in14 | ||
635 | # asm 1: movl <in14=int64#5d,8+<x3=stack128#3 | ||
636 | # asm 2: movl <in14=%r8d,8+<x3=32(%rsp) | ||
637 | movl %r8d,8+32(%rsp) | ||
638 | |||
639 | # qhasm: ((uint32 *)&x3)[3] = in3 | ||
640 | # asm 1: movl <in3=int64#7d,12+<x3=stack128#3 | ||
641 | # asm 2: movl <in3=%eax,12+<x3=32(%rsp) | ||
642 | movl %eax,12+32(%rsp) | ||
643 | |||
644 | # qhasm: in0 = 1634760805 | ||
645 | # asm 1: mov $1634760805,>in0=int64#3 | ||
646 | # asm 2: mov $1634760805,>in0=%rdx | ||
647 | mov $1634760805,%rdx | ||
648 | |||
649 | # qhasm: in5 = 857760878 | ||
650 | # asm 1: mov $857760878,>in5=int64#4 | ||
651 | # asm 2: mov $857760878,>in5=%rcx | ||
652 | mov $857760878,%rcx | ||
653 | |||
654 | # qhasm: in10 = 2036477234 | ||
655 | # asm 1: mov $2036477234,>in10=int64#5 | ||
656 | # asm 2: mov $2036477234,>in10=%r8 | ||
657 | mov $2036477234,%r8 | ||
658 | |||
659 | # qhasm: in15 = 1797285236 | ||
660 | # asm 1: mov $1797285236,>in15=int64#7 | ||
661 | # asm 2: mov $1797285236,>in15=%rax | ||
662 | mov $1797285236,%rax | ||
663 | |||
664 | # qhasm: ((uint32 *)&x0)[0] = in0 | ||
665 | # asm 1: movl <in0=int64#3d,>x0=stack128#4 | ||
666 | # asm 2: movl <in0=%edx,>x0=48(%rsp) | ||
667 | movl %edx,48(%rsp) | ||
668 | |||
669 | # qhasm: ((uint32 *)&x0)[1] = in5 | ||
670 | # asm 1: movl <in5=int64#4d,4+<x0=stack128#4 | ||
671 | # asm 2: movl <in5=%ecx,4+<x0=48(%rsp) | ||
672 | movl %ecx,4+48(%rsp) | ||
673 | |||
674 | # qhasm: ((uint32 *)&x0)[2] = in10 | ||
675 | # asm 1: movl <in10=int64#5d,8+<x0=stack128#4 | ||
676 | # asm 2: movl <in10=%r8d,8+<x0=48(%rsp) | ||
677 | movl %r8d,8+48(%rsp) | ||
678 | |||
679 | # qhasm: ((uint32 *)&x0)[3] = in15 | ||
680 | # asm 1: movl <in15=int64#7d,12+<x0=stack128#4 | ||
681 | # asm 2: movl <in15=%eax,12+<x0=48(%rsp) | ||
682 | movl %eax,12+48(%rsp) | ||
683 | |||
684 | # qhasm: unsigned<? bytes - 256 | ||
685 | # asm 1: cmp $256,<bytes=int64#6 | ||
686 | # asm 2: cmp $256,<bytes=%r9 | ||
687 | cmp $256,%r9 | ||
688 | # comment:fp stack unchanged by jump | ||
689 | |||
690 | # qhasm: goto bytesbetween1and255 if unsigned< | ||
691 | jb ._bytesbetween1and255 | ||
692 | |||
693 | # qhasm: z0 = x0 | ||
694 | # asm 1: movdqa <x0=stack128#4,>z0=int6464#1 | ||
695 | # asm 2: movdqa <x0=48(%rsp),>z0=%xmm0 | ||
696 | movdqa 48(%rsp),%xmm0 | ||
697 | |||
698 | # qhasm: z5 = z0[1,1,1,1] | ||
699 | # asm 1: pshufd $0x55,<z0=int6464#1,>z5=int6464#2 | ||
700 | # asm 2: pshufd $0x55,<z0=%xmm0,>z5=%xmm1 | ||
701 | pshufd $0x55,%xmm0,%xmm1 | ||
702 | |||
703 | # qhasm: z10 = z0[2,2,2,2] | ||
704 | # asm 1: pshufd $0xaa,<z0=int6464#1,>z10=int6464#3 | ||
705 | # asm 2: pshufd $0xaa,<z0=%xmm0,>z10=%xmm2 | ||
706 | pshufd $0xaa,%xmm0,%xmm2 | ||
707 | |||
708 | # qhasm: z15 = z0[3,3,3,3] | ||
709 | # asm 1: pshufd $0xff,<z0=int6464#1,>z15=int6464#4 | ||
710 | # asm 2: pshufd $0xff,<z0=%xmm0,>z15=%xmm3 | ||
711 | pshufd $0xff,%xmm0,%xmm3 | ||
712 | |||
713 | # qhasm: z0 = z0[0,0,0,0] | ||
714 | # asm 1: pshufd $0x00,<z0=int6464#1,>z0=int6464#1 | ||
715 | # asm 2: pshufd $0x00,<z0=%xmm0,>z0=%xmm0 | ||
716 | pshufd $0x00,%xmm0,%xmm0 | ||
717 | |||
718 | # qhasm: orig5 = z5 | ||
719 | # asm 1: movdqa <z5=int6464#2,>orig5=stack128#5 | ||
720 | # asm 2: movdqa <z5=%xmm1,>orig5=64(%rsp) | ||
721 | movdqa %xmm1,64(%rsp) | ||
722 | |||
723 | # qhasm: orig10 = z10 | ||
724 | # asm 1: movdqa <z10=int6464#3,>orig10=stack128#6 | ||
725 | # asm 2: movdqa <z10=%xmm2,>orig10=80(%rsp) | ||
726 | movdqa %xmm2,80(%rsp) | ||
727 | |||
728 | # qhasm: orig15 = z15 | ||
729 | # asm 1: movdqa <z15=int6464#4,>orig15=stack128#7 | ||
730 | # asm 2: movdqa <z15=%xmm3,>orig15=96(%rsp) | ||
731 | movdqa %xmm3,96(%rsp) | ||
732 | |||
733 | # qhasm: orig0 = z0 | ||
734 | # asm 1: movdqa <z0=int6464#1,>orig0=stack128#8 | ||
735 | # asm 2: movdqa <z0=%xmm0,>orig0=112(%rsp) | ||
736 | movdqa %xmm0,112(%rsp) | ||
737 | |||
738 | # qhasm: z1 = x1 | ||
739 | # asm 1: movdqa <x1=stack128#1,>z1=int6464#1 | ||
740 | # asm 2: movdqa <x1=0(%rsp),>z1=%xmm0 | ||
741 | movdqa 0(%rsp),%xmm0 | ||
742 | |||
743 | # qhasm: z6 = z1[2,2,2,2] | ||
744 | # asm 1: pshufd $0xaa,<z1=int6464#1,>z6=int6464#2 | ||
745 | # asm 2: pshufd $0xaa,<z1=%xmm0,>z6=%xmm1 | ||
746 | pshufd $0xaa,%xmm0,%xmm1 | ||
747 | |||
748 | # qhasm: z11 = z1[3,3,3,3] | ||
749 | # asm 1: pshufd $0xff,<z1=int6464#1,>z11=int6464#3 | ||
750 | # asm 2: pshufd $0xff,<z1=%xmm0,>z11=%xmm2 | ||
751 | pshufd $0xff,%xmm0,%xmm2 | ||
752 | |||
753 | # qhasm: z12 = z1[0,0,0,0] | ||
754 | # asm 1: pshufd $0x00,<z1=int6464#1,>z12=int6464#4 | ||
755 | # asm 2: pshufd $0x00,<z1=%xmm0,>z12=%xmm3 | ||
756 | pshufd $0x00,%xmm0,%xmm3 | ||
757 | |||
758 | # qhasm: z1 = z1[1,1,1,1] | ||
759 | # asm 1: pshufd $0x55,<z1=int6464#1,>z1=int6464#1 | ||
760 | # asm 2: pshufd $0x55,<z1=%xmm0,>z1=%xmm0 | ||
761 | pshufd $0x55,%xmm0,%xmm0 | ||
762 | |||
763 | # qhasm: orig6 = z6 | ||
764 | # asm 1: movdqa <z6=int6464#2,>orig6=stack128#9 | ||
765 | # asm 2: movdqa <z6=%xmm1,>orig6=128(%rsp) | ||
766 | movdqa %xmm1,128(%rsp) | ||
767 | |||
768 | # qhasm: orig11 = z11 | ||
769 | # asm 1: movdqa <z11=int6464#3,>orig11=stack128#10 | ||
770 | # asm 2: movdqa <z11=%xmm2,>orig11=144(%rsp) | ||
771 | movdqa %xmm2,144(%rsp) | ||
772 | |||
773 | # qhasm: orig12 = z12 | ||
774 | # asm 1: movdqa <z12=int6464#4,>orig12=stack128#11 | ||
775 | # asm 2: movdqa <z12=%xmm3,>orig12=160(%rsp) | ||
776 | movdqa %xmm3,160(%rsp) | ||
777 | |||
778 | # qhasm: orig1 = z1 | ||
779 | # asm 1: movdqa <z1=int6464#1,>orig1=stack128#12 | ||
780 | # asm 2: movdqa <z1=%xmm0,>orig1=176(%rsp) | ||
781 | movdqa %xmm0,176(%rsp) | ||
782 | |||
783 | # qhasm: z2 = x2 | ||
784 | # asm 1: movdqa <x2=stack128#2,>z2=int6464#1 | ||
785 | # asm 2: movdqa <x2=16(%rsp),>z2=%xmm0 | ||
786 | movdqa 16(%rsp),%xmm0 | ||
787 | |||
788 | # qhasm: z7 = z2[3,3,3,3] | ||
789 | # asm 1: pshufd $0xff,<z2=int6464#1,>z7=int6464#2 | ||
790 | # asm 2: pshufd $0xff,<z2=%xmm0,>z7=%xmm1 | ||
791 | pshufd $0xff,%xmm0,%xmm1 | ||
792 | |||
793 | # qhasm: z13 = z2[1,1,1,1] | ||
794 | # asm 1: pshufd $0x55,<z2=int6464#1,>z13=int6464#3 | ||
795 | # asm 2: pshufd $0x55,<z2=%xmm0,>z13=%xmm2 | ||
796 | pshufd $0x55,%xmm0,%xmm2 | ||
797 | |||
798 | # qhasm: z2 = z2[2,2,2,2] | ||
799 | # asm 1: pshufd $0xaa,<z2=int6464#1,>z2=int6464#1 | ||
800 | # asm 2: pshufd $0xaa,<z2=%xmm0,>z2=%xmm0 | ||
801 | pshufd $0xaa,%xmm0,%xmm0 | ||
802 | |||
803 | # qhasm: orig7 = z7 | ||
804 | # asm 1: movdqa <z7=int6464#2,>orig7=stack128#13 | ||
805 | # asm 2: movdqa <z7=%xmm1,>orig7=192(%rsp) | ||
806 | movdqa %xmm1,192(%rsp) | ||
807 | |||
808 | # qhasm: orig13 = z13 | ||
809 | # asm 1: movdqa <z13=int6464#3,>orig13=stack128#14 | ||
810 | # asm 2: movdqa <z13=%xmm2,>orig13=208(%rsp) | ||
811 | movdqa %xmm2,208(%rsp) | ||
812 | |||
813 | # qhasm: orig2 = z2 | ||
814 | # asm 1: movdqa <z2=int6464#1,>orig2=stack128#15 | ||
815 | # asm 2: movdqa <z2=%xmm0,>orig2=224(%rsp) | ||
816 | movdqa %xmm0,224(%rsp) | ||
817 | |||
818 | # qhasm: z3 = x3 | ||
819 | # asm 1: movdqa <x3=stack128#3,>z3=int6464#1 | ||
820 | # asm 2: movdqa <x3=32(%rsp),>z3=%xmm0 | ||
821 | movdqa 32(%rsp),%xmm0 | ||
822 | |||
823 | # qhasm: z4 = z3[0,0,0,0] | ||
824 | # asm 1: pshufd $0x00,<z3=int6464#1,>z4=int6464#2 | ||
825 | # asm 2: pshufd $0x00,<z3=%xmm0,>z4=%xmm1 | ||
826 | pshufd $0x00,%xmm0,%xmm1 | ||
827 | |||
828 | # qhasm: z14 = z3[2,2,2,2] | ||
829 | # asm 1: pshufd $0xaa,<z3=int6464#1,>z14=int6464#3 | ||
830 | # asm 2: pshufd $0xaa,<z3=%xmm0,>z14=%xmm2 | ||
831 | pshufd $0xaa,%xmm0,%xmm2 | ||
832 | |||
833 | # qhasm: z3 = z3[3,3,3,3] | ||
834 | # asm 1: pshufd $0xff,<z3=int6464#1,>z3=int6464#1 | ||
835 | # asm 2: pshufd $0xff,<z3=%xmm0,>z3=%xmm0 | ||
836 | pshufd $0xff,%xmm0,%xmm0 | ||
837 | |||
838 | # qhasm: orig4 = z4 | ||
839 | # asm 1: movdqa <z4=int6464#2,>orig4=stack128#16 | ||
840 | # asm 2: movdqa <z4=%xmm1,>orig4=240(%rsp) | ||
841 | movdqa %xmm1,240(%rsp) | ||
842 | |||
843 | # qhasm: orig14 = z14 | ||
844 | # asm 1: movdqa <z14=int6464#3,>orig14=stack128#17 | ||
845 | # asm 2: movdqa <z14=%xmm2,>orig14=256(%rsp) | ||
846 | movdqa %xmm2,256(%rsp) | ||
847 | |||
848 | # qhasm: orig3 = z3 | ||
849 | # asm 1: movdqa <z3=int6464#1,>orig3=stack128#18 | ||
850 | # asm 2: movdqa <z3=%xmm0,>orig3=272(%rsp) | ||
851 | movdqa %xmm0,272(%rsp) | ||
852 | |||
853 | # qhasm: bytesatleast256: | ||
854 | ._bytesatleast256: | ||
855 | |||
856 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
857 | # asm 1: movl <x2=stack128#2,>in8=int64#3d | ||
858 | # asm 2: movl <x2=16(%rsp),>in8=%edx | ||
859 | movl 16(%rsp),%edx | ||
860 | |||
861 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
862 | # asm 1: movl 4+<x3=stack128#3,>in9=int64#4d | ||
863 | # asm 2: movl 4+<x3=32(%rsp),>in9=%ecx | ||
864 | movl 4+32(%rsp),%ecx | ||
865 | |||
866 | # qhasm: ((uint32 *) &orig8)[0] = in8 | ||
867 | # asm 1: movl <in8=int64#3d,>orig8=stack128#19 | ||
868 | # asm 2: movl <in8=%edx,>orig8=288(%rsp) | ||
869 | movl %edx,288(%rsp) | ||
870 | |||
871 | # qhasm: ((uint32 *) &orig9)[0] = in9 | ||
872 | # asm 1: movl <in9=int64#4d,>orig9=stack128#20 | ||
873 | # asm 2: movl <in9=%ecx,>orig9=304(%rsp) | ||
874 | movl %ecx,304(%rsp) | ||
875 | |||
876 | # qhasm: in8 += 1 | ||
877 | # asm 1: add $1,<in8=int64#3 | ||
878 | # asm 2: add $1,<in8=%rdx | ||
879 | add $1,%rdx | ||
880 | |||
881 | # qhasm: in9 <<= 32 | ||
882 | # asm 1: shl $32,<in9=int64#4 | ||
883 | # asm 2: shl $32,<in9=%rcx | ||
884 | shl $32,%rcx | ||
885 | |||
886 | # qhasm: in8 += in9 | ||
887 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
888 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
889 | add %rcx,%rdx | ||
890 | |||
891 | # qhasm: in9 = in8 | ||
892 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
893 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
894 | mov %rdx,%rcx | ||
895 | |||
896 | # qhasm: (uint64) in9 >>= 32 | ||
897 | # asm 1: shr $32,<in9=int64#4 | ||
898 | # asm 2: shr $32,<in9=%rcx | ||
899 | shr $32,%rcx | ||
900 | |||
901 | # qhasm: ((uint32 *) &orig8)[1] = in8 | ||
902 | # asm 1: movl <in8=int64#3d,4+<orig8=stack128#19 | ||
903 | # asm 2: movl <in8=%edx,4+<orig8=288(%rsp) | ||
904 | movl %edx,4+288(%rsp) | ||
905 | |||
906 | # qhasm: ((uint32 *) &orig9)[1] = in9 | ||
907 | # asm 1: movl <in9=int64#4d,4+<orig9=stack128#20 | ||
908 | # asm 2: movl <in9=%ecx,4+<orig9=304(%rsp) | ||
909 | movl %ecx,4+304(%rsp) | ||
910 | |||
911 | # qhasm: in8 += 1 | ||
912 | # asm 1: add $1,<in8=int64#3 | ||
913 | # asm 2: add $1,<in8=%rdx | ||
914 | add $1,%rdx | ||
915 | |||
916 | # qhasm: in9 <<= 32 | ||
917 | # asm 1: shl $32,<in9=int64#4 | ||
918 | # asm 2: shl $32,<in9=%rcx | ||
919 | shl $32,%rcx | ||
920 | |||
921 | # qhasm: in8 += in9 | ||
922 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
923 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
924 | add %rcx,%rdx | ||
925 | |||
926 | # qhasm: in9 = in8 | ||
927 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
928 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
929 | mov %rdx,%rcx | ||
930 | |||
931 | # qhasm: (uint64) in9 >>= 32 | ||
932 | # asm 1: shr $32,<in9=int64#4 | ||
933 | # asm 2: shr $32,<in9=%rcx | ||
934 | shr $32,%rcx | ||
935 | |||
936 | # qhasm: ((uint32 *) &orig8)[2] = in8 | ||
937 | # asm 1: movl <in8=int64#3d,8+<orig8=stack128#19 | ||
938 | # asm 2: movl <in8=%edx,8+<orig8=288(%rsp) | ||
939 | movl %edx,8+288(%rsp) | ||
940 | |||
941 | # qhasm: ((uint32 *) &orig9)[2] = in9 | ||
942 | # asm 1: movl <in9=int64#4d,8+<orig9=stack128#20 | ||
943 | # asm 2: movl <in9=%ecx,8+<orig9=304(%rsp) | ||
944 | movl %ecx,8+304(%rsp) | ||
945 | |||
946 | # qhasm: in8 += 1 | ||
947 | # asm 1: add $1,<in8=int64#3 | ||
948 | # asm 2: add $1,<in8=%rdx | ||
949 | add $1,%rdx | ||
950 | |||
951 | # qhasm: in9 <<= 32 | ||
952 | # asm 1: shl $32,<in9=int64#4 | ||
953 | # asm 2: shl $32,<in9=%rcx | ||
954 | shl $32,%rcx | ||
955 | |||
956 | # qhasm: in8 += in9 | ||
957 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
958 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
959 | add %rcx,%rdx | ||
960 | |||
961 | # qhasm: in9 = in8 | ||
962 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
963 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
964 | mov %rdx,%rcx | ||
965 | |||
966 | # qhasm: (uint64) in9 >>= 32 | ||
967 | # asm 1: shr $32,<in9=int64#4 | ||
968 | # asm 2: shr $32,<in9=%rcx | ||
969 | shr $32,%rcx | ||
970 | |||
971 | # qhasm: ((uint32 *) &orig8)[3] = in8 | ||
972 | # asm 1: movl <in8=int64#3d,12+<orig8=stack128#19 | ||
973 | # asm 2: movl <in8=%edx,12+<orig8=288(%rsp) | ||
974 | movl %edx,12+288(%rsp) | ||
975 | |||
976 | # qhasm: ((uint32 *) &orig9)[3] = in9 | ||
977 | # asm 1: movl <in9=int64#4d,12+<orig9=stack128#20 | ||
978 | # asm 2: movl <in9=%ecx,12+<orig9=304(%rsp) | ||
979 | movl %ecx,12+304(%rsp) | ||
980 | |||
981 | # qhasm: in8 += 1 | ||
982 | # asm 1: add $1,<in8=int64#3 | ||
983 | # asm 2: add $1,<in8=%rdx | ||
984 | add $1,%rdx | ||
985 | |||
986 | # qhasm: in9 <<= 32 | ||
987 | # asm 1: shl $32,<in9=int64#4 | ||
988 | # asm 2: shl $32,<in9=%rcx | ||
989 | shl $32,%rcx | ||
990 | |||
991 | # qhasm: in8 += in9 | ||
992 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
993 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
994 | add %rcx,%rdx | ||
995 | |||
996 | # qhasm: in9 = in8 | ||
997 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
998 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
999 | mov %rdx,%rcx | ||
1000 | |||
1001 | # qhasm: (uint64) in9 >>= 32 | ||
1002 | # asm 1: shr $32,<in9=int64#4 | ||
1003 | # asm 2: shr $32,<in9=%rcx | ||
1004 | shr $32,%rcx | ||
1005 | |||
1006 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
1007 | # asm 1: movl <in8=int64#3d,>x2=stack128#2 | ||
1008 | # asm 2: movl <in8=%edx,>x2=16(%rsp) | ||
1009 | movl %edx,16(%rsp) | ||
1010 | |||
1011 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
1012 | # asm 1: movl <in9=int64#4d,4+<x3=stack128#3 | ||
1013 | # asm 2: movl <in9=%ecx,4+<x3=32(%rsp) | ||
1014 | movl %ecx,4+32(%rsp) | ||
1015 | |||
1016 | # qhasm: bytes_backup = bytes | ||
1017 | # asm 1: movq <bytes=int64#6,>bytes_backup=stack64#8 | ||
1018 | # asm 2: movq <bytes=%r9,>bytes_backup=408(%rsp) | ||
1019 | movq %r9,408(%rsp) | ||
1020 | |||
1021 | # qhasm: i = 20 | ||
1022 | # asm 1: mov $20,>i=int64#3 | ||
1023 | # asm 2: mov $20,>i=%rdx | ||
1024 | mov $20,%rdx | ||
1025 | |||
1026 | # qhasm: z5 = orig5 | ||
1027 | # asm 1: movdqa <orig5=stack128#5,>z5=int6464#1 | ||
1028 | # asm 2: movdqa <orig5=64(%rsp),>z5=%xmm0 | ||
1029 | movdqa 64(%rsp),%xmm0 | ||
1030 | |||
1031 | # qhasm: z10 = orig10 | ||
1032 | # asm 1: movdqa <orig10=stack128#6,>z10=int6464#2 | ||
1033 | # asm 2: movdqa <orig10=80(%rsp),>z10=%xmm1 | ||
1034 | movdqa 80(%rsp),%xmm1 | ||
1035 | |||
1036 | # qhasm: z15 = orig15 | ||
1037 | # asm 1: movdqa <orig15=stack128#7,>z15=int6464#3 | ||
1038 | # asm 2: movdqa <orig15=96(%rsp),>z15=%xmm2 | ||
1039 | movdqa 96(%rsp),%xmm2 | ||
1040 | |||
1041 | # qhasm: z14 = orig14 | ||
1042 | # asm 1: movdqa <orig14=stack128#17,>z14=int6464#4 | ||
1043 | # asm 2: movdqa <orig14=256(%rsp),>z14=%xmm3 | ||
1044 | movdqa 256(%rsp),%xmm3 | ||
1045 | |||
1046 | # qhasm: z3 = orig3 | ||
1047 | # asm 1: movdqa <orig3=stack128#18,>z3=int6464#5 | ||
1048 | # asm 2: movdqa <orig3=272(%rsp),>z3=%xmm4 | ||
1049 | movdqa 272(%rsp),%xmm4 | ||
1050 | |||
1051 | # qhasm: z6 = orig6 | ||
1052 | # asm 1: movdqa <orig6=stack128#9,>z6=int6464#6 | ||
1053 | # asm 2: movdqa <orig6=128(%rsp),>z6=%xmm5 | ||
1054 | movdqa 128(%rsp),%xmm5 | ||
1055 | |||
1056 | # qhasm: z11 = orig11 | ||
1057 | # asm 1: movdqa <orig11=stack128#10,>z11=int6464#7 | ||
1058 | # asm 2: movdqa <orig11=144(%rsp),>z11=%xmm6 | ||
1059 | movdqa 144(%rsp),%xmm6 | ||
1060 | |||
1061 | # qhasm: z1 = orig1 | ||
1062 | # asm 1: movdqa <orig1=stack128#12,>z1=int6464#8 | ||
1063 | # asm 2: movdqa <orig1=176(%rsp),>z1=%xmm7 | ||
1064 | movdqa 176(%rsp),%xmm7 | ||
1065 | |||
1066 | # qhasm: z7 = orig7 | ||
1067 | # asm 1: movdqa <orig7=stack128#13,>z7=int6464#9 | ||
1068 | # asm 2: movdqa <orig7=192(%rsp),>z7=%xmm8 | ||
1069 | movdqa 192(%rsp),%xmm8 | ||
1070 | |||
1071 | # qhasm: z13 = orig13 | ||
1072 | # asm 1: movdqa <orig13=stack128#14,>z13=int6464#10 | ||
1073 | # asm 2: movdqa <orig13=208(%rsp),>z13=%xmm9 | ||
1074 | movdqa 208(%rsp),%xmm9 | ||
1075 | |||
1076 | # qhasm: z2 = orig2 | ||
1077 | # asm 1: movdqa <orig2=stack128#15,>z2=int6464#11 | ||
1078 | # asm 2: movdqa <orig2=224(%rsp),>z2=%xmm10 | ||
1079 | movdqa 224(%rsp),%xmm10 | ||
1080 | |||
1081 | # qhasm: z9 = orig9 | ||
1082 | # asm 1: movdqa <orig9=stack128#20,>z9=int6464#12 | ||
1083 | # asm 2: movdqa <orig9=304(%rsp),>z9=%xmm11 | ||
1084 | movdqa 304(%rsp),%xmm11 | ||
1085 | |||
1086 | # qhasm: z0 = orig0 | ||
1087 | # asm 1: movdqa <orig0=stack128#8,>z0=int6464#13 | ||
1088 | # asm 2: movdqa <orig0=112(%rsp),>z0=%xmm12 | ||
1089 | movdqa 112(%rsp),%xmm12 | ||
1090 | |||
1091 | # qhasm: z12 = orig12 | ||
1092 | # asm 1: movdqa <orig12=stack128#11,>z12=int6464#14 | ||
1093 | # asm 2: movdqa <orig12=160(%rsp),>z12=%xmm13 | ||
1094 | movdqa 160(%rsp),%xmm13 | ||
1095 | |||
1096 | # qhasm: z4 = orig4 | ||
1097 | # asm 1: movdqa <orig4=stack128#16,>z4=int6464#15 | ||
1098 | # asm 2: movdqa <orig4=240(%rsp),>z4=%xmm14 | ||
1099 | movdqa 240(%rsp),%xmm14 | ||
1100 | |||
1101 | # qhasm: z8 = orig8 | ||
1102 | # asm 1: movdqa <orig8=stack128#19,>z8=int6464#16 | ||
1103 | # asm 2: movdqa <orig8=288(%rsp),>z8=%xmm15 | ||
1104 | movdqa 288(%rsp),%xmm15 | ||
1105 | |||
1106 | # qhasm: mainloop1: | ||
1107 | ._mainloop1: | ||
1108 | |||
1109 | # qhasm: z10_stack = z10 | ||
1110 | # asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#21 | ||
1111 | # asm 2: movdqa <z10=%xmm1,>z10_stack=320(%rsp) | ||
1112 | movdqa %xmm1,320(%rsp) | ||
1113 | |||
1114 | # qhasm: z15_stack = z15 | ||
1115 | # asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#22 | ||
1116 | # asm 2: movdqa <z15=%xmm2,>z15_stack=336(%rsp) | ||
1117 | movdqa %xmm2,336(%rsp) | ||
1118 | |||
1119 | # qhasm: y4 = z12 | ||
1120 | # asm 1: movdqa <z12=int6464#14,>y4=int6464#2 | ||
1121 | # asm 2: movdqa <z12=%xmm13,>y4=%xmm1 | ||
1122 | movdqa %xmm13,%xmm1 | ||
1123 | |||
1124 | # qhasm: uint32323232 y4 += z0 | ||
1125 | # asm 1: paddd <z0=int6464#13,<y4=int6464#2 | ||
1126 | # asm 2: paddd <z0=%xmm12,<y4=%xmm1 | ||
1127 | paddd %xmm12,%xmm1 | ||
1128 | |||
1129 | # qhasm: r4 = y4 | ||
1130 | # asm 1: movdqa <y4=int6464#2,>r4=int6464#3 | ||
1131 | # asm 2: movdqa <y4=%xmm1,>r4=%xmm2 | ||
1132 | movdqa %xmm1,%xmm2 | ||
1133 | |||
1134 | # qhasm: uint32323232 y4 <<= 7 | ||
1135 | # asm 1: pslld $7,<y4=int6464#2 | ||
1136 | # asm 2: pslld $7,<y4=%xmm1 | ||
1137 | pslld $7,%xmm1 | ||
1138 | |||
1139 | # qhasm: z4 ^= y4 | ||
1140 | # asm 1: pxor <y4=int6464#2,<z4=int6464#15 | ||
1141 | # asm 2: pxor <y4=%xmm1,<z4=%xmm14 | ||
1142 | pxor %xmm1,%xmm14 | ||
1143 | |||
1144 | # qhasm: uint32323232 r4 >>= 25 | ||
1145 | # asm 1: psrld $25,<r4=int6464#3 | ||
1146 | # asm 2: psrld $25,<r4=%xmm2 | ||
1147 | psrld $25,%xmm2 | ||
1148 | |||
1149 | # qhasm: z4 ^= r4 | ||
1150 | # asm 1: pxor <r4=int6464#3,<z4=int6464#15 | ||
1151 | # asm 2: pxor <r4=%xmm2,<z4=%xmm14 | ||
1152 | pxor %xmm2,%xmm14 | ||
1153 | |||
1154 | # qhasm: y9 = z1 | ||
1155 | # asm 1: movdqa <z1=int6464#8,>y9=int6464#2 | ||
1156 | # asm 2: movdqa <z1=%xmm7,>y9=%xmm1 | ||
1157 | movdqa %xmm7,%xmm1 | ||
1158 | |||
1159 | # qhasm: uint32323232 y9 += z5 | ||
1160 | # asm 1: paddd <z5=int6464#1,<y9=int6464#2 | ||
1161 | # asm 2: paddd <z5=%xmm0,<y9=%xmm1 | ||
1162 | paddd %xmm0,%xmm1 | ||
1163 | |||
1164 | # qhasm: r9 = y9 | ||
1165 | # asm 1: movdqa <y9=int6464#2,>r9=int6464#3 | ||
1166 | # asm 2: movdqa <y9=%xmm1,>r9=%xmm2 | ||
1167 | movdqa %xmm1,%xmm2 | ||
1168 | |||
1169 | # qhasm: uint32323232 y9 <<= 7 | ||
1170 | # asm 1: pslld $7,<y9=int6464#2 | ||
1171 | # asm 2: pslld $7,<y9=%xmm1 | ||
1172 | pslld $7,%xmm1 | ||
1173 | |||
1174 | # qhasm: z9 ^= y9 | ||
1175 | # asm 1: pxor <y9=int6464#2,<z9=int6464#12 | ||
1176 | # asm 2: pxor <y9=%xmm1,<z9=%xmm11 | ||
1177 | pxor %xmm1,%xmm11 | ||
1178 | |||
1179 | # qhasm: uint32323232 r9 >>= 25 | ||
1180 | # asm 1: psrld $25,<r9=int6464#3 | ||
1181 | # asm 2: psrld $25,<r9=%xmm2 | ||
1182 | psrld $25,%xmm2 | ||
1183 | |||
1184 | # qhasm: z9 ^= r9 | ||
1185 | # asm 1: pxor <r9=int6464#3,<z9=int6464#12 | ||
1186 | # asm 2: pxor <r9=%xmm2,<z9=%xmm11 | ||
1187 | pxor %xmm2,%xmm11 | ||
1188 | |||
1189 | # qhasm: y8 = z0 | ||
1190 | # asm 1: movdqa <z0=int6464#13,>y8=int6464#2 | ||
1191 | # asm 2: movdqa <z0=%xmm12,>y8=%xmm1 | ||
1192 | movdqa %xmm12,%xmm1 | ||
1193 | |||
1194 | # qhasm: uint32323232 y8 += z4 | ||
1195 | # asm 1: paddd <z4=int6464#15,<y8=int6464#2 | ||
1196 | # asm 2: paddd <z4=%xmm14,<y8=%xmm1 | ||
1197 | paddd %xmm14,%xmm1 | ||
1198 | |||
1199 | # qhasm: r8 = y8 | ||
1200 | # asm 1: movdqa <y8=int6464#2,>r8=int6464#3 | ||
1201 | # asm 2: movdqa <y8=%xmm1,>r8=%xmm2 | ||
1202 | movdqa %xmm1,%xmm2 | ||
1203 | |||
1204 | # qhasm: uint32323232 y8 <<= 9 | ||
1205 | # asm 1: pslld $9,<y8=int6464#2 | ||
1206 | # asm 2: pslld $9,<y8=%xmm1 | ||
1207 | pslld $9,%xmm1 | ||
1208 | |||
1209 | # qhasm: z8 ^= y8 | ||
1210 | # asm 1: pxor <y8=int6464#2,<z8=int6464#16 | ||
1211 | # asm 2: pxor <y8=%xmm1,<z8=%xmm15 | ||
1212 | pxor %xmm1,%xmm15 | ||
1213 | |||
1214 | # qhasm: uint32323232 r8 >>= 23 | ||
1215 | # asm 1: psrld $23,<r8=int6464#3 | ||
1216 | # asm 2: psrld $23,<r8=%xmm2 | ||
1217 | psrld $23,%xmm2 | ||
1218 | |||
1219 | # qhasm: z8 ^= r8 | ||
1220 | # asm 1: pxor <r8=int6464#3,<z8=int6464#16 | ||
1221 | # asm 2: pxor <r8=%xmm2,<z8=%xmm15 | ||
1222 | pxor %xmm2,%xmm15 | ||
1223 | |||
1224 | # qhasm: y13 = z5 | ||
1225 | # asm 1: movdqa <z5=int6464#1,>y13=int6464#2 | ||
1226 | # asm 2: movdqa <z5=%xmm0,>y13=%xmm1 | ||
1227 | movdqa %xmm0,%xmm1 | ||
1228 | |||
1229 | # qhasm: uint32323232 y13 += z9 | ||
1230 | # asm 1: paddd <z9=int6464#12,<y13=int6464#2 | ||
1231 | # asm 2: paddd <z9=%xmm11,<y13=%xmm1 | ||
1232 | paddd %xmm11,%xmm1 | ||
1233 | |||
1234 | # qhasm: r13 = y13 | ||
1235 | # asm 1: movdqa <y13=int6464#2,>r13=int6464#3 | ||
1236 | # asm 2: movdqa <y13=%xmm1,>r13=%xmm2 | ||
1237 | movdqa %xmm1,%xmm2 | ||
1238 | |||
1239 | # qhasm: uint32323232 y13 <<= 9 | ||
1240 | # asm 1: pslld $9,<y13=int6464#2 | ||
1241 | # asm 2: pslld $9,<y13=%xmm1 | ||
1242 | pslld $9,%xmm1 | ||
1243 | |||
1244 | # qhasm: z13 ^= y13 | ||
1245 | # asm 1: pxor <y13=int6464#2,<z13=int6464#10 | ||
1246 | # asm 2: pxor <y13=%xmm1,<z13=%xmm9 | ||
1247 | pxor %xmm1,%xmm9 | ||
1248 | |||
1249 | # qhasm: uint32323232 r13 >>= 23 | ||
1250 | # asm 1: psrld $23,<r13=int6464#3 | ||
1251 | # asm 2: psrld $23,<r13=%xmm2 | ||
1252 | psrld $23,%xmm2 | ||
1253 | |||
1254 | # qhasm: z13 ^= r13 | ||
1255 | # asm 1: pxor <r13=int6464#3,<z13=int6464#10 | ||
1256 | # asm 2: pxor <r13=%xmm2,<z13=%xmm9 | ||
1257 | pxor %xmm2,%xmm9 | ||
1258 | |||
1259 | # qhasm: y12 = z4 | ||
1260 | # asm 1: movdqa <z4=int6464#15,>y12=int6464#2 | ||
1261 | # asm 2: movdqa <z4=%xmm14,>y12=%xmm1 | ||
1262 | movdqa %xmm14,%xmm1 | ||
1263 | |||
1264 | # qhasm: uint32323232 y12 += z8 | ||
1265 | # asm 1: paddd <z8=int6464#16,<y12=int6464#2 | ||
1266 | # asm 2: paddd <z8=%xmm15,<y12=%xmm1 | ||
1267 | paddd %xmm15,%xmm1 | ||
1268 | |||
1269 | # qhasm: r12 = y12 | ||
1270 | # asm 1: movdqa <y12=int6464#2,>r12=int6464#3 | ||
1271 | # asm 2: movdqa <y12=%xmm1,>r12=%xmm2 | ||
1272 | movdqa %xmm1,%xmm2 | ||
1273 | |||
1274 | # qhasm: uint32323232 y12 <<= 13 | ||
1275 | # asm 1: pslld $13,<y12=int6464#2 | ||
1276 | # asm 2: pslld $13,<y12=%xmm1 | ||
1277 | pslld $13,%xmm1 | ||
1278 | |||
1279 | # qhasm: z12 ^= y12 | ||
1280 | # asm 1: pxor <y12=int6464#2,<z12=int6464#14 | ||
1281 | # asm 2: pxor <y12=%xmm1,<z12=%xmm13 | ||
1282 | pxor %xmm1,%xmm13 | ||
1283 | |||
1284 | # qhasm: uint32323232 r12 >>= 19 | ||
1285 | # asm 1: psrld $19,<r12=int6464#3 | ||
1286 | # asm 2: psrld $19,<r12=%xmm2 | ||
1287 | psrld $19,%xmm2 | ||
1288 | |||
1289 | # qhasm: z12 ^= r12 | ||
1290 | # asm 1: pxor <r12=int6464#3,<z12=int6464#14 | ||
1291 | # asm 2: pxor <r12=%xmm2,<z12=%xmm13 | ||
1292 | pxor %xmm2,%xmm13 | ||
1293 | |||
1294 | # qhasm: y1 = z9 | ||
1295 | # asm 1: movdqa <z9=int6464#12,>y1=int6464#2 | ||
1296 | # asm 2: movdqa <z9=%xmm11,>y1=%xmm1 | ||
1297 | movdqa %xmm11,%xmm1 | ||
1298 | |||
1299 | # qhasm: uint32323232 y1 += z13 | ||
1300 | # asm 1: paddd <z13=int6464#10,<y1=int6464#2 | ||
1301 | # asm 2: paddd <z13=%xmm9,<y1=%xmm1 | ||
1302 | paddd %xmm9,%xmm1 | ||
1303 | |||
1304 | # qhasm: r1 = y1 | ||
1305 | # asm 1: movdqa <y1=int6464#2,>r1=int6464#3 | ||
1306 | # asm 2: movdqa <y1=%xmm1,>r1=%xmm2 | ||
1307 | movdqa %xmm1,%xmm2 | ||
1308 | |||
1309 | # qhasm: uint32323232 y1 <<= 13 | ||
1310 | # asm 1: pslld $13,<y1=int6464#2 | ||
1311 | # asm 2: pslld $13,<y1=%xmm1 | ||
1312 | pslld $13,%xmm1 | ||
1313 | |||
1314 | # qhasm: z1 ^= y1 | ||
1315 | # asm 1: pxor <y1=int6464#2,<z1=int6464#8 | ||
1316 | # asm 2: pxor <y1=%xmm1,<z1=%xmm7 | ||
1317 | pxor %xmm1,%xmm7 | ||
1318 | |||
1319 | # qhasm: uint32323232 r1 >>= 19 | ||
1320 | # asm 1: psrld $19,<r1=int6464#3 | ||
1321 | # asm 2: psrld $19,<r1=%xmm2 | ||
1322 | psrld $19,%xmm2 | ||
1323 | |||
1324 | # qhasm: z1 ^= r1 | ||
1325 | # asm 1: pxor <r1=int6464#3,<z1=int6464#8 | ||
1326 | # asm 2: pxor <r1=%xmm2,<z1=%xmm7 | ||
1327 | pxor %xmm2,%xmm7 | ||
1328 | |||
1329 | # qhasm: y0 = z8 | ||
1330 | # asm 1: movdqa <z8=int6464#16,>y0=int6464#2 | ||
1331 | # asm 2: movdqa <z8=%xmm15,>y0=%xmm1 | ||
1332 | movdqa %xmm15,%xmm1 | ||
1333 | |||
1334 | # qhasm: uint32323232 y0 += z12 | ||
1335 | # asm 1: paddd <z12=int6464#14,<y0=int6464#2 | ||
1336 | # asm 2: paddd <z12=%xmm13,<y0=%xmm1 | ||
1337 | paddd %xmm13,%xmm1 | ||
1338 | |||
1339 | # qhasm: r0 = y0 | ||
1340 | # asm 1: movdqa <y0=int6464#2,>r0=int6464#3 | ||
1341 | # asm 2: movdqa <y0=%xmm1,>r0=%xmm2 | ||
1342 | movdqa %xmm1,%xmm2 | ||
1343 | |||
1344 | # qhasm: uint32323232 y0 <<= 18 | ||
1345 | # asm 1: pslld $18,<y0=int6464#2 | ||
1346 | # asm 2: pslld $18,<y0=%xmm1 | ||
1347 | pslld $18,%xmm1 | ||
1348 | |||
1349 | # qhasm: z0 ^= y0 | ||
1350 | # asm 1: pxor <y0=int6464#2,<z0=int6464#13 | ||
1351 | # asm 2: pxor <y0=%xmm1,<z0=%xmm12 | ||
1352 | pxor %xmm1,%xmm12 | ||
1353 | |||
1354 | # qhasm: uint32323232 r0 >>= 14 | ||
1355 | # asm 1: psrld $14,<r0=int6464#3 | ||
1356 | # asm 2: psrld $14,<r0=%xmm2 | ||
1357 | psrld $14,%xmm2 | ||
1358 | |||
1359 | # qhasm: z0 ^= r0 | ||
1360 | # asm 1: pxor <r0=int6464#3,<z0=int6464#13 | ||
1361 | # asm 2: pxor <r0=%xmm2,<z0=%xmm12 | ||
1362 | pxor %xmm2,%xmm12 | ||
1363 | |||
1364 | # qhasm: z10 = z10_stack | ||
1365 | # asm 1: movdqa <z10_stack=stack128#21,>z10=int6464#2 | ||
1366 | # asm 2: movdqa <z10_stack=320(%rsp),>z10=%xmm1 | ||
1367 | movdqa 320(%rsp),%xmm1 | ||
1368 | |||
1369 | # qhasm: z0_stack = z0 | ||
1370 | # asm 1: movdqa <z0=int6464#13,>z0_stack=stack128#21 | ||
1371 | # asm 2: movdqa <z0=%xmm12,>z0_stack=320(%rsp) | ||
1372 | movdqa %xmm12,320(%rsp) | ||
1373 | |||
1374 | # qhasm: y5 = z13 | ||
1375 | # asm 1: movdqa <z13=int6464#10,>y5=int6464#3 | ||
1376 | # asm 2: movdqa <z13=%xmm9,>y5=%xmm2 | ||
1377 | movdqa %xmm9,%xmm2 | ||
1378 | |||
1379 | # qhasm: uint32323232 y5 += z1 | ||
1380 | # asm 1: paddd <z1=int6464#8,<y5=int6464#3 | ||
1381 | # asm 2: paddd <z1=%xmm7,<y5=%xmm2 | ||
1382 | paddd %xmm7,%xmm2 | ||
1383 | |||
1384 | # qhasm: r5 = y5 | ||
1385 | # asm 1: movdqa <y5=int6464#3,>r5=int6464#13 | ||
1386 | # asm 2: movdqa <y5=%xmm2,>r5=%xmm12 | ||
1387 | movdqa %xmm2,%xmm12 | ||
1388 | |||
1389 | # qhasm: uint32323232 y5 <<= 18 | ||
1390 | # asm 1: pslld $18,<y5=int6464#3 | ||
1391 | # asm 2: pslld $18,<y5=%xmm2 | ||
1392 | pslld $18,%xmm2 | ||
1393 | |||
1394 | # qhasm: z5 ^= y5 | ||
1395 | # asm 1: pxor <y5=int6464#3,<z5=int6464#1 | ||
1396 | # asm 2: pxor <y5=%xmm2,<z5=%xmm0 | ||
1397 | pxor %xmm2,%xmm0 | ||
1398 | |||
1399 | # qhasm: uint32323232 r5 >>= 14 | ||
1400 | # asm 1: psrld $14,<r5=int6464#13 | ||
1401 | # asm 2: psrld $14,<r5=%xmm12 | ||
1402 | psrld $14,%xmm12 | ||
1403 | |||
1404 | # qhasm: z5 ^= r5 | ||
1405 | # asm 1: pxor <r5=int6464#13,<z5=int6464#1 | ||
1406 | # asm 2: pxor <r5=%xmm12,<z5=%xmm0 | ||
1407 | pxor %xmm12,%xmm0 | ||
1408 | |||
1409 | # qhasm: y14 = z6 | ||
1410 | # asm 1: movdqa <z6=int6464#6,>y14=int6464#3 | ||
1411 | # asm 2: movdqa <z6=%xmm5,>y14=%xmm2 | ||
1412 | movdqa %xmm5,%xmm2 | ||
1413 | |||
1414 | # qhasm: uint32323232 y14 += z10 | ||
1415 | # asm 1: paddd <z10=int6464#2,<y14=int6464#3 | ||
1416 | # asm 2: paddd <z10=%xmm1,<y14=%xmm2 | ||
1417 | paddd %xmm1,%xmm2 | ||
1418 | |||
1419 | # qhasm: r14 = y14 | ||
1420 | # asm 1: movdqa <y14=int6464#3,>r14=int6464#13 | ||
1421 | # asm 2: movdqa <y14=%xmm2,>r14=%xmm12 | ||
1422 | movdqa %xmm2,%xmm12 | ||
1423 | |||
1424 | # qhasm: uint32323232 y14 <<= 7 | ||
1425 | # asm 1: pslld $7,<y14=int6464#3 | ||
1426 | # asm 2: pslld $7,<y14=%xmm2 | ||
1427 | pslld $7,%xmm2 | ||
1428 | |||
1429 | # qhasm: z14 ^= y14 | ||
1430 | # asm 1: pxor <y14=int6464#3,<z14=int6464#4 | ||
1431 | # asm 2: pxor <y14=%xmm2,<z14=%xmm3 | ||
1432 | pxor %xmm2,%xmm3 | ||
1433 | |||
1434 | # qhasm: uint32323232 r14 >>= 25 | ||
1435 | # asm 1: psrld $25,<r14=int6464#13 | ||
1436 | # asm 2: psrld $25,<r14=%xmm12 | ||
1437 | psrld $25,%xmm12 | ||
1438 | |||
1439 | # qhasm: z14 ^= r14 | ||
1440 | # asm 1: pxor <r14=int6464#13,<z14=int6464#4 | ||
1441 | # asm 2: pxor <r14=%xmm12,<z14=%xmm3 | ||
1442 | pxor %xmm12,%xmm3 | ||
1443 | |||
1444 | # qhasm: z15 = z15_stack | ||
1445 | # asm 1: movdqa <z15_stack=stack128#22,>z15=int6464#3 | ||
1446 | # asm 2: movdqa <z15_stack=336(%rsp),>z15=%xmm2 | ||
1447 | movdqa 336(%rsp),%xmm2 | ||
1448 | |||
1449 | # qhasm: z5_stack = z5 | ||
1450 | # asm 1: movdqa <z5=int6464#1,>z5_stack=stack128#22 | ||
1451 | # asm 2: movdqa <z5=%xmm0,>z5_stack=336(%rsp) | ||
1452 | movdqa %xmm0,336(%rsp) | ||
1453 | |||
1454 | # qhasm: y3 = z11 | ||
1455 | # asm 1: movdqa <z11=int6464#7,>y3=int6464#1 | ||
1456 | # asm 2: movdqa <z11=%xmm6,>y3=%xmm0 | ||
1457 | movdqa %xmm6,%xmm0 | ||
1458 | |||
1459 | # qhasm: uint32323232 y3 += z15 | ||
1460 | # asm 1: paddd <z15=int6464#3,<y3=int6464#1 | ||
1461 | # asm 2: paddd <z15=%xmm2,<y3=%xmm0 | ||
1462 | paddd %xmm2,%xmm0 | ||
1463 | |||
1464 | # qhasm: r3 = y3 | ||
1465 | # asm 1: movdqa <y3=int6464#1,>r3=int6464#13 | ||
1466 | # asm 2: movdqa <y3=%xmm0,>r3=%xmm12 | ||
1467 | movdqa %xmm0,%xmm12 | ||
1468 | |||
1469 | # qhasm: uint32323232 y3 <<= 7 | ||
1470 | # asm 1: pslld $7,<y3=int6464#1 | ||
1471 | # asm 2: pslld $7,<y3=%xmm0 | ||
1472 | pslld $7,%xmm0 | ||
1473 | |||
1474 | # qhasm: z3 ^= y3 | ||
1475 | # asm 1: pxor <y3=int6464#1,<z3=int6464#5 | ||
1476 | # asm 2: pxor <y3=%xmm0,<z3=%xmm4 | ||
1477 | pxor %xmm0,%xmm4 | ||
1478 | |||
1479 | # qhasm: uint32323232 r3 >>= 25 | ||
1480 | # asm 1: psrld $25,<r3=int6464#13 | ||
1481 | # asm 2: psrld $25,<r3=%xmm12 | ||
1482 | psrld $25,%xmm12 | ||
1483 | |||
1484 | # qhasm: z3 ^= r3 | ||
1485 | # asm 1: pxor <r3=int6464#13,<z3=int6464#5 | ||
1486 | # asm 2: pxor <r3=%xmm12,<z3=%xmm4 | ||
1487 | pxor %xmm12,%xmm4 | ||
1488 | |||
1489 | # qhasm: y2 = z10 | ||
1490 | # asm 1: movdqa <z10=int6464#2,>y2=int6464#1 | ||
1491 | # asm 2: movdqa <z10=%xmm1,>y2=%xmm0 | ||
1492 | movdqa %xmm1,%xmm0 | ||
1493 | |||
1494 | # qhasm: uint32323232 y2 += z14 | ||
1495 | # asm 1: paddd <z14=int6464#4,<y2=int6464#1 | ||
1496 | # asm 2: paddd <z14=%xmm3,<y2=%xmm0 | ||
1497 | paddd %xmm3,%xmm0 | ||
1498 | |||
1499 | # qhasm: r2 = y2 | ||
1500 | # asm 1: movdqa <y2=int6464#1,>r2=int6464#13 | ||
1501 | # asm 2: movdqa <y2=%xmm0,>r2=%xmm12 | ||
1502 | movdqa %xmm0,%xmm12 | ||
1503 | |||
1504 | # qhasm: uint32323232 y2 <<= 9 | ||
1505 | # asm 1: pslld $9,<y2=int6464#1 | ||
1506 | # asm 2: pslld $9,<y2=%xmm0 | ||
1507 | pslld $9,%xmm0 | ||
1508 | |||
1509 | # qhasm: z2 ^= y2 | ||
1510 | # asm 1: pxor <y2=int6464#1,<z2=int6464#11 | ||
1511 | # asm 2: pxor <y2=%xmm0,<z2=%xmm10 | ||
1512 | pxor %xmm0,%xmm10 | ||
1513 | |||
1514 | # qhasm: uint32323232 r2 >>= 23 | ||
1515 | # asm 1: psrld $23,<r2=int6464#13 | ||
1516 | # asm 2: psrld $23,<r2=%xmm12 | ||
1517 | psrld $23,%xmm12 | ||
1518 | |||
1519 | # qhasm: z2 ^= r2 | ||
1520 | # asm 1: pxor <r2=int6464#13,<z2=int6464#11 | ||
1521 | # asm 2: pxor <r2=%xmm12,<z2=%xmm10 | ||
1522 | pxor %xmm12,%xmm10 | ||
1523 | |||
1524 | # qhasm: y7 = z15 | ||
1525 | # asm 1: movdqa <z15=int6464#3,>y7=int6464#1 | ||
1526 | # asm 2: movdqa <z15=%xmm2,>y7=%xmm0 | ||
1527 | movdqa %xmm2,%xmm0 | ||
1528 | |||
1529 | # qhasm: uint32323232 y7 += z3 | ||
1530 | # asm 1: paddd <z3=int6464#5,<y7=int6464#1 | ||
1531 | # asm 2: paddd <z3=%xmm4,<y7=%xmm0 | ||
1532 | paddd %xmm4,%xmm0 | ||
1533 | |||
1534 | # qhasm: r7 = y7 | ||
1535 | # asm 1: movdqa <y7=int6464#1,>r7=int6464#13 | ||
1536 | # asm 2: movdqa <y7=%xmm0,>r7=%xmm12 | ||
1537 | movdqa %xmm0,%xmm12 | ||
1538 | |||
1539 | # qhasm: uint32323232 y7 <<= 9 | ||
1540 | # asm 1: pslld $9,<y7=int6464#1 | ||
1541 | # asm 2: pslld $9,<y7=%xmm0 | ||
1542 | pslld $9,%xmm0 | ||
1543 | |||
1544 | # qhasm: z7 ^= y7 | ||
1545 | # asm 1: pxor <y7=int6464#1,<z7=int6464#9 | ||
1546 | # asm 2: pxor <y7=%xmm0,<z7=%xmm8 | ||
1547 | pxor %xmm0,%xmm8 | ||
1548 | |||
1549 | # qhasm: uint32323232 r7 >>= 23 | ||
1550 | # asm 1: psrld $23,<r7=int6464#13 | ||
1551 | # asm 2: psrld $23,<r7=%xmm12 | ||
1552 | psrld $23,%xmm12 | ||
1553 | |||
1554 | # qhasm: z7 ^= r7 | ||
1555 | # asm 1: pxor <r7=int6464#13,<z7=int6464#9 | ||
1556 | # asm 2: pxor <r7=%xmm12,<z7=%xmm8 | ||
1557 | pxor %xmm12,%xmm8 | ||
1558 | |||
1559 | # qhasm: y6 = z14 | ||
1560 | # asm 1: movdqa <z14=int6464#4,>y6=int6464#1 | ||
1561 | # asm 2: movdqa <z14=%xmm3,>y6=%xmm0 | ||
1562 | movdqa %xmm3,%xmm0 | ||
1563 | |||
1564 | # qhasm: uint32323232 y6 += z2 | ||
1565 | # asm 1: paddd <z2=int6464#11,<y6=int6464#1 | ||
1566 | # asm 2: paddd <z2=%xmm10,<y6=%xmm0 | ||
1567 | paddd %xmm10,%xmm0 | ||
1568 | |||
1569 | # qhasm: r6 = y6 | ||
1570 | # asm 1: movdqa <y6=int6464#1,>r6=int6464#13 | ||
1571 | # asm 2: movdqa <y6=%xmm0,>r6=%xmm12 | ||
1572 | movdqa %xmm0,%xmm12 | ||
1573 | |||
1574 | # qhasm: uint32323232 y6 <<= 13 | ||
1575 | # asm 1: pslld $13,<y6=int6464#1 | ||
1576 | # asm 2: pslld $13,<y6=%xmm0 | ||
1577 | pslld $13,%xmm0 | ||
1578 | |||
1579 | # qhasm: z6 ^= y6 | ||
1580 | # asm 1: pxor <y6=int6464#1,<z6=int6464#6 | ||
1581 | # asm 2: pxor <y6=%xmm0,<z6=%xmm5 | ||
1582 | pxor %xmm0,%xmm5 | ||
1583 | |||
1584 | # qhasm: uint32323232 r6 >>= 19 | ||
1585 | # asm 1: psrld $19,<r6=int6464#13 | ||
1586 | # asm 2: psrld $19,<r6=%xmm12 | ||
1587 | psrld $19,%xmm12 | ||
1588 | |||
1589 | # qhasm: z6 ^= r6 | ||
1590 | # asm 1: pxor <r6=int6464#13,<z6=int6464#6 | ||
1591 | # asm 2: pxor <r6=%xmm12,<z6=%xmm5 | ||
1592 | pxor %xmm12,%xmm5 | ||
1593 | |||
1594 | # qhasm: y11 = z3 | ||
1595 | # asm 1: movdqa <z3=int6464#5,>y11=int6464#1 | ||
1596 | # asm 2: movdqa <z3=%xmm4,>y11=%xmm0 | ||
1597 | movdqa %xmm4,%xmm0 | ||
1598 | |||
1599 | # qhasm: uint32323232 y11 += z7 | ||
1600 | # asm 1: paddd <z7=int6464#9,<y11=int6464#1 | ||
1601 | # asm 2: paddd <z7=%xmm8,<y11=%xmm0 | ||
1602 | paddd %xmm8,%xmm0 | ||
1603 | |||
1604 | # qhasm: r11 = y11 | ||
1605 | # asm 1: movdqa <y11=int6464#1,>r11=int6464#13 | ||
1606 | # asm 2: movdqa <y11=%xmm0,>r11=%xmm12 | ||
1607 | movdqa %xmm0,%xmm12 | ||
1608 | |||
1609 | # qhasm: uint32323232 y11 <<= 13 | ||
1610 | # asm 1: pslld $13,<y11=int6464#1 | ||
1611 | # asm 2: pslld $13,<y11=%xmm0 | ||
1612 | pslld $13,%xmm0 | ||
1613 | |||
1614 | # qhasm: z11 ^= y11 | ||
1615 | # asm 1: pxor <y11=int6464#1,<z11=int6464#7 | ||
1616 | # asm 2: pxor <y11=%xmm0,<z11=%xmm6 | ||
1617 | pxor %xmm0,%xmm6 | ||
1618 | |||
1619 | # qhasm: uint32323232 r11 >>= 19 | ||
1620 | # asm 1: psrld $19,<r11=int6464#13 | ||
1621 | # asm 2: psrld $19,<r11=%xmm12 | ||
1622 | psrld $19,%xmm12 | ||
1623 | |||
1624 | # qhasm: z11 ^= r11 | ||
1625 | # asm 1: pxor <r11=int6464#13,<z11=int6464#7 | ||
1626 | # asm 2: pxor <r11=%xmm12,<z11=%xmm6 | ||
1627 | pxor %xmm12,%xmm6 | ||
1628 | |||
1629 | # qhasm: y10 = z2 | ||
1630 | # asm 1: movdqa <z2=int6464#11,>y10=int6464#1 | ||
1631 | # asm 2: movdqa <z2=%xmm10,>y10=%xmm0 | ||
1632 | movdqa %xmm10,%xmm0 | ||
1633 | |||
1634 | # qhasm: uint32323232 y10 += z6 | ||
1635 | # asm 1: paddd <z6=int6464#6,<y10=int6464#1 | ||
1636 | # asm 2: paddd <z6=%xmm5,<y10=%xmm0 | ||
1637 | paddd %xmm5,%xmm0 | ||
1638 | |||
1639 | # qhasm: r10 = y10 | ||
1640 | # asm 1: movdqa <y10=int6464#1,>r10=int6464#13 | ||
1641 | # asm 2: movdqa <y10=%xmm0,>r10=%xmm12 | ||
1642 | movdqa %xmm0,%xmm12 | ||
1643 | |||
1644 | # qhasm: uint32323232 y10 <<= 18 | ||
1645 | # asm 1: pslld $18,<y10=int6464#1 | ||
1646 | # asm 2: pslld $18,<y10=%xmm0 | ||
1647 | pslld $18,%xmm0 | ||
1648 | |||
1649 | # qhasm: z10 ^= y10 | ||
1650 | # asm 1: pxor <y10=int6464#1,<z10=int6464#2 | ||
1651 | # asm 2: pxor <y10=%xmm0,<z10=%xmm1 | ||
1652 | pxor %xmm0,%xmm1 | ||
1653 | |||
1654 | # qhasm: uint32323232 r10 >>= 14 | ||
1655 | # asm 1: psrld $14,<r10=int6464#13 | ||
1656 | # asm 2: psrld $14,<r10=%xmm12 | ||
1657 | psrld $14,%xmm12 | ||
1658 | |||
1659 | # qhasm: z10 ^= r10 | ||
1660 | # asm 1: pxor <r10=int6464#13,<z10=int6464#2 | ||
1661 | # asm 2: pxor <r10=%xmm12,<z10=%xmm1 | ||
1662 | pxor %xmm12,%xmm1 | ||
1663 | |||
1664 | # qhasm: z0 = z0_stack | ||
1665 | # asm 1: movdqa <z0_stack=stack128#21,>z0=int6464#1 | ||
1666 | # asm 2: movdqa <z0_stack=320(%rsp),>z0=%xmm0 | ||
1667 | movdqa 320(%rsp),%xmm0 | ||
1668 | |||
1669 | # qhasm: z10_stack = z10 | ||
1670 | # asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#21 | ||
1671 | # asm 2: movdqa <z10=%xmm1,>z10_stack=320(%rsp) | ||
1672 | movdqa %xmm1,320(%rsp) | ||
1673 | |||
1674 | # qhasm: y1 = z3 | ||
1675 | # asm 1: movdqa <z3=int6464#5,>y1=int6464#2 | ||
1676 | # asm 2: movdqa <z3=%xmm4,>y1=%xmm1 | ||
1677 | movdqa %xmm4,%xmm1 | ||
1678 | |||
1679 | # qhasm: uint32323232 y1 += z0 | ||
1680 | # asm 1: paddd <z0=int6464#1,<y1=int6464#2 | ||
1681 | # asm 2: paddd <z0=%xmm0,<y1=%xmm1 | ||
1682 | paddd %xmm0,%xmm1 | ||
1683 | |||
1684 | # qhasm: r1 = y1 | ||
1685 | # asm 1: movdqa <y1=int6464#2,>r1=int6464#13 | ||
1686 | # asm 2: movdqa <y1=%xmm1,>r1=%xmm12 | ||
1687 | movdqa %xmm1,%xmm12 | ||
1688 | |||
1689 | # qhasm: uint32323232 y1 <<= 7 | ||
1690 | # asm 1: pslld $7,<y1=int6464#2 | ||
1691 | # asm 2: pslld $7,<y1=%xmm1 | ||
1692 | pslld $7,%xmm1 | ||
1693 | |||
1694 | # qhasm: z1 ^= y1 | ||
1695 | # asm 1: pxor <y1=int6464#2,<z1=int6464#8 | ||
1696 | # asm 2: pxor <y1=%xmm1,<z1=%xmm7 | ||
1697 | pxor %xmm1,%xmm7 | ||
1698 | |||
1699 | # qhasm: uint32323232 r1 >>= 25 | ||
1700 | # asm 1: psrld $25,<r1=int6464#13 | ||
1701 | # asm 2: psrld $25,<r1=%xmm12 | ||
1702 | psrld $25,%xmm12 | ||
1703 | |||
1704 | # qhasm: z1 ^= r1 | ||
1705 | # asm 1: pxor <r1=int6464#13,<z1=int6464#8 | ||
1706 | # asm 2: pxor <r1=%xmm12,<z1=%xmm7 | ||
1707 | pxor %xmm12,%xmm7 | ||
1708 | |||
1709 | # qhasm: y15 = z7 | ||
1710 | # asm 1: movdqa <z7=int6464#9,>y15=int6464#2 | ||
1711 | # asm 2: movdqa <z7=%xmm8,>y15=%xmm1 | ||
1712 | movdqa %xmm8,%xmm1 | ||
1713 | |||
1714 | # qhasm: uint32323232 y15 += z11 | ||
1715 | # asm 1: paddd <z11=int6464#7,<y15=int6464#2 | ||
1716 | # asm 2: paddd <z11=%xmm6,<y15=%xmm1 | ||
1717 | paddd %xmm6,%xmm1 | ||
1718 | |||
1719 | # qhasm: r15 = y15 | ||
1720 | # asm 1: movdqa <y15=int6464#2,>r15=int6464#13 | ||
1721 | # asm 2: movdqa <y15=%xmm1,>r15=%xmm12 | ||
1722 | movdqa %xmm1,%xmm12 | ||
1723 | |||
1724 | # qhasm: uint32323232 y15 <<= 18 | ||
1725 | # asm 1: pslld $18,<y15=int6464#2 | ||
1726 | # asm 2: pslld $18,<y15=%xmm1 | ||
1727 | pslld $18,%xmm1 | ||
1728 | |||
1729 | # qhasm: z15 ^= y15 | ||
1730 | # asm 1: pxor <y15=int6464#2,<z15=int6464#3 | ||
1731 | # asm 2: pxor <y15=%xmm1,<z15=%xmm2 | ||
1732 | pxor %xmm1,%xmm2 | ||
1733 | |||
1734 | # qhasm: uint32323232 r15 >>= 14 | ||
1735 | # asm 1: psrld $14,<r15=int6464#13 | ||
1736 | # asm 2: psrld $14,<r15=%xmm12 | ||
1737 | psrld $14,%xmm12 | ||
1738 | |||
1739 | # qhasm: z15 ^= r15 | ||
1740 | # asm 1: pxor <r15=int6464#13,<z15=int6464#3 | ||
1741 | # asm 2: pxor <r15=%xmm12,<z15=%xmm2 | ||
1742 | pxor %xmm12,%xmm2 | ||
1743 | |||
1744 | # qhasm: z5 = z5_stack | ||
1745 | # asm 1: movdqa <z5_stack=stack128#22,>z5=int6464#13 | ||
1746 | # asm 2: movdqa <z5_stack=336(%rsp),>z5=%xmm12 | ||
1747 | movdqa 336(%rsp),%xmm12 | ||
1748 | |||
1749 | # qhasm: z15_stack = z15 | ||
1750 | # asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#22 | ||
1751 | # asm 2: movdqa <z15=%xmm2,>z15_stack=336(%rsp) | ||
1752 | movdqa %xmm2,336(%rsp) | ||
1753 | |||
1754 | # qhasm: y6 = z4 | ||
1755 | # asm 1: movdqa <z4=int6464#15,>y6=int6464#2 | ||
1756 | # asm 2: movdqa <z4=%xmm14,>y6=%xmm1 | ||
1757 | movdqa %xmm14,%xmm1 | ||
1758 | |||
1759 | # qhasm: uint32323232 y6 += z5 | ||
1760 | # asm 1: paddd <z5=int6464#13,<y6=int6464#2 | ||
1761 | # asm 2: paddd <z5=%xmm12,<y6=%xmm1 | ||
1762 | paddd %xmm12,%xmm1 | ||
1763 | |||
1764 | # qhasm: r6 = y6 | ||
1765 | # asm 1: movdqa <y6=int6464#2,>r6=int6464#3 | ||
1766 | # asm 2: movdqa <y6=%xmm1,>r6=%xmm2 | ||
1767 | movdqa %xmm1,%xmm2 | ||
1768 | |||
1769 | # qhasm: uint32323232 y6 <<= 7 | ||
1770 | # asm 1: pslld $7,<y6=int6464#2 | ||
1771 | # asm 2: pslld $7,<y6=%xmm1 | ||
1772 | pslld $7,%xmm1 | ||
1773 | |||
1774 | # qhasm: z6 ^= y6 | ||
1775 | # asm 1: pxor <y6=int6464#2,<z6=int6464#6 | ||
1776 | # asm 2: pxor <y6=%xmm1,<z6=%xmm5 | ||
1777 | pxor %xmm1,%xmm5 | ||
1778 | |||
1779 | # qhasm: uint32323232 r6 >>= 25 | ||
1780 | # asm 1: psrld $25,<r6=int6464#3 | ||
1781 | # asm 2: psrld $25,<r6=%xmm2 | ||
1782 | psrld $25,%xmm2 | ||
1783 | |||
1784 | # qhasm: z6 ^= r6 | ||
1785 | # asm 1: pxor <r6=int6464#3,<z6=int6464#6 | ||
1786 | # asm 2: pxor <r6=%xmm2,<z6=%xmm5 | ||
1787 | pxor %xmm2,%xmm5 | ||
1788 | |||
1789 | # qhasm: y2 = z0 | ||
1790 | # asm 1: movdqa <z0=int6464#1,>y2=int6464#2 | ||
1791 | # asm 2: movdqa <z0=%xmm0,>y2=%xmm1 | ||
1792 | movdqa %xmm0,%xmm1 | ||
1793 | |||
1794 | # qhasm: uint32323232 y2 += z1 | ||
1795 | # asm 1: paddd <z1=int6464#8,<y2=int6464#2 | ||
1796 | # asm 2: paddd <z1=%xmm7,<y2=%xmm1 | ||
1797 | paddd %xmm7,%xmm1 | ||
1798 | |||
1799 | # qhasm: r2 = y2 | ||
1800 | # asm 1: movdqa <y2=int6464#2,>r2=int6464#3 | ||
1801 | # asm 2: movdqa <y2=%xmm1,>r2=%xmm2 | ||
1802 | movdqa %xmm1,%xmm2 | ||
1803 | |||
1804 | # qhasm: uint32323232 y2 <<= 9 | ||
1805 | # asm 1: pslld $9,<y2=int6464#2 | ||
1806 | # asm 2: pslld $9,<y2=%xmm1 | ||
1807 | pslld $9,%xmm1 | ||
1808 | |||
1809 | # qhasm: z2 ^= y2 | ||
1810 | # asm 1: pxor <y2=int6464#2,<z2=int6464#11 | ||
1811 | # asm 2: pxor <y2=%xmm1,<z2=%xmm10 | ||
1812 | pxor %xmm1,%xmm10 | ||
1813 | |||
1814 | # qhasm: uint32323232 r2 >>= 23 | ||
1815 | # asm 1: psrld $23,<r2=int6464#3 | ||
1816 | # asm 2: psrld $23,<r2=%xmm2 | ||
1817 | psrld $23,%xmm2 | ||
1818 | |||
1819 | # qhasm: z2 ^= r2 | ||
1820 | # asm 1: pxor <r2=int6464#3,<z2=int6464#11 | ||
1821 | # asm 2: pxor <r2=%xmm2,<z2=%xmm10 | ||
1822 | pxor %xmm2,%xmm10 | ||
1823 | |||
1824 | # qhasm: y7 = z5 | ||
1825 | # asm 1: movdqa <z5=int6464#13,>y7=int6464#2 | ||
1826 | # asm 2: movdqa <z5=%xmm12,>y7=%xmm1 | ||
1827 | movdqa %xmm12,%xmm1 | ||
1828 | |||
1829 | # qhasm: uint32323232 y7 += z6 | ||
1830 | # asm 1: paddd <z6=int6464#6,<y7=int6464#2 | ||
1831 | # asm 2: paddd <z6=%xmm5,<y7=%xmm1 | ||
1832 | paddd %xmm5,%xmm1 | ||
1833 | |||
1834 | # qhasm: r7 = y7 | ||
1835 | # asm 1: movdqa <y7=int6464#2,>r7=int6464#3 | ||
1836 | # asm 2: movdqa <y7=%xmm1,>r7=%xmm2 | ||
1837 | movdqa %xmm1,%xmm2 | ||
1838 | |||
1839 | # qhasm: uint32323232 y7 <<= 9 | ||
1840 | # asm 1: pslld $9,<y7=int6464#2 | ||
1841 | # asm 2: pslld $9,<y7=%xmm1 | ||
1842 | pslld $9,%xmm1 | ||
1843 | |||
1844 | # qhasm: z7 ^= y7 | ||
1845 | # asm 1: pxor <y7=int6464#2,<z7=int6464#9 | ||
1846 | # asm 2: pxor <y7=%xmm1,<z7=%xmm8 | ||
1847 | pxor %xmm1,%xmm8 | ||
1848 | |||
1849 | # qhasm: uint32323232 r7 >>= 23 | ||
1850 | # asm 1: psrld $23,<r7=int6464#3 | ||
1851 | # asm 2: psrld $23,<r7=%xmm2 | ||
1852 | psrld $23,%xmm2 | ||
1853 | |||
1854 | # qhasm: z7 ^= r7 | ||
1855 | # asm 1: pxor <r7=int6464#3,<z7=int6464#9 | ||
1856 | # asm 2: pxor <r7=%xmm2,<z7=%xmm8 | ||
1857 | pxor %xmm2,%xmm8 | ||
1858 | |||
1859 | # qhasm: y3 = z1 | ||
1860 | # asm 1: movdqa <z1=int6464#8,>y3=int6464#2 | ||
1861 | # asm 2: movdqa <z1=%xmm7,>y3=%xmm1 | ||
1862 | movdqa %xmm7,%xmm1 | ||
1863 | |||
1864 | # qhasm: uint32323232 y3 += z2 | ||
1865 | # asm 1: paddd <z2=int6464#11,<y3=int6464#2 | ||
1866 | # asm 2: paddd <z2=%xmm10,<y3=%xmm1 | ||
1867 | paddd %xmm10,%xmm1 | ||
1868 | |||
1869 | # qhasm: r3 = y3 | ||
1870 | # asm 1: movdqa <y3=int6464#2,>r3=int6464#3 | ||
1871 | # asm 2: movdqa <y3=%xmm1,>r3=%xmm2 | ||
1872 | movdqa %xmm1,%xmm2 | ||
1873 | |||
1874 | # qhasm: uint32323232 y3 <<= 13 | ||
1875 | # asm 1: pslld $13,<y3=int6464#2 | ||
1876 | # asm 2: pslld $13,<y3=%xmm1 | ||
1877 | pslld $13,%xmm1 | ||
1878 | |||
1879 | # qhasm: z3 ^= y3 | ||
1880 | # asm 1: pxor <y3=int6464#2,<z3=int6464#5 | ||
1881 | # asm 2: pxor <y3=%xmm1,<z3=%xmm4 | ||
1882 | pxor %xmm1,%xmm4 | ||
1883 | |||
1884 | # qhasm: uint32323232 r3 >>= 19 | ||
1885 | # asm 1: psrld $19,<r3=int6464#3 | ||
1886 | # asm 2: psrld $19,<r3=%xmm2 | ||
1887 | psrld $19,%xmm2 | ||
1888 | |||
1889 | # qhasm: z3 ^= r3 | ||
1890 | # asm 1: pxor <r3=int6464#3,<z3=int6464#5 | ||
1891 | # asm 2: pxor <r3=%xmm2,<z3=%xmm4 | ||
1892 | pxor %xmm2,%xmm4 | ||
1893 | |||
1894 | # qhasm: y4 = z6 | ||
1895 | # asm 1: movdqa <z6=int6464#6,>y4=int6464#2 | ||
1896 | # asm 2: movdqa <z6=%xmm5,>y4=%xmm1 | ||
1897 | movdqa %xmm5,%xmm1 | ||
1898 | |||
1899 | # qhasm: uint32323232 y4 += z7 | ||
1900 | # asm 1: paddd <z7=int6464#9,<y4=int6464#2 | ||
1901 | # asm 2: paddd <z7=%xmm8,<y4=%xmm1 | ||
1902 | paddd %xmm8,%xmm1 | ||
1903 | |||
1904 | # qhasm: r4 = y4 | ||
1905 | # asm 1: movdqa <y4=int6464#2,>r4=int6464#3 | ||
1906 | # asm 2: movdqa <y4=%xmm1,>r4=%xmm2 | ||
1907 | movdqa %xmm1,%xmm2 | ||
1908 | |||
1909 | # qhasm: uint32323232 y4 <<= 13 | ||
1910 | # asm 1: pslld $13,<y4=int6464#2 | ||
1911 | # asm 2: pslld $13,<y4=%xmm1 | ||
1912 | pslld $13,%xmm1 | ||
1913 | |||
1914 | # qhasm: z4 ^= y4 | ||
1915 | # asm 1: pxor <y4=int6464#2,<z4=int6464#15 | ||
1916 | # asm 2: pxor <y4=%xmm1,<z4=%xmm14 | ||
1917 | pxor %xmm1,%xmm14 | ||
1918 | |||
1919 | # qhasm: uint32323232 r4 >>= 19 | ||
1920 | # asm 1: psrld $19,<r4=int6464#3 | ||
1921 | # asm 2: psrld $19,<r4=%xmm2 | ||
1922 | psrld $19,%xmm2 | ||
1923 | |||
1924 | # qhasm: z4 ^= r4 | ||
1925 | # asm 1: pxor <r4=int6464#3,<z4=int6464#15 | ||
1926 | # asm 2: pxor <r4=%xmm2,<z4=%xmm14 | ||
1927 | pxor %xmm2,%xmm14 | ||
1928 | |||
1929 | # qhasm: y0 = z2 | ||
1930 | # asm 1: movdqa <z2=int6464#11,>y0=int6464#2 | ||
1931 | # asm 2: movdqa <z2=%xmm10,>y0=%xmm1 | ||
1932 | movdqa %xmm10,%xmm1 | ||
1933 | |||
1934 | # qhasm: uint32323232 y0 += z3 | ||
1935 | # asm 1: paddd <z3=int6464#5,<y0=int6464#2 | ||
1936 | # asm 2: paddd <z3=%xmm4,<y0=%xmm1 | ||
1937 | paddd %xmm4,%xmm1 | ||
1938 | |||
1939 | # qhasm: r0 = y0 | ||
1940 | # asm 1: movdqa <y0=int6464#2,>r0=int6464#3 | ||
1941 | # asm 2: movdqa <y0=%xmm1,>r0=%xmm2 | ||
1942 | movdqa %xmm1,%xmm2 | ||
1943 | |||
1944 | # qhasm: uint32323232 y0 <<= 18 | ||
1945 | # asm 1: pslld $18,<y0=int6464#2 | ||
1946 | # asm 2: pslld $18,<y0=%xmm1 | ||
1947 | pslld $18,%xmm1 | ||
1948 | |||
1949 | # qhasm: z0 ^= y0 | ||
1950 | # asm 1: pxor <y0=int6464#2,<z0=int6464#1 | ||
1951 | # asm 2: pxor <y0=%xmm1,<z0=%xmm0 | ||
1952 | pxor %xmm1,%xmm0 | ||
1953 | |||
1954 | # qhasm: uint32323232 r0 >>= 14 | ||
1955 | # asm 1: psrld $14,<r0=int6464#3 | ||
1956 | # asm 2: psrld $14,<r0=%xmm2 | ||
1957 | psrld $14,%xmm2 | ||
1958 | |||
1959 | # qhasm: z0 ^= r0 | ||
1960 | # asm 1: pxor <r0=int6464#3,<z0=int6464#1 | ||
1961 | # asm 2: pxor <r0=%xmm2,<z0=%xmm0 | ||
1962 | pxor %xmm2,%xmm0 | ||
1963 | |||
1964 | # qhasm: z10 = z10_stack | ||
1965 | # asm 1: movdqa <z10_stack=stack128#21,>z10=int6464#2 | ||
1966 | # asm 2: movdqa <z10_stack=320(%rsp),>z10=%xmm1 | ||
1967 | movdqa 320(%rsp),%xmm1 | ||
1968 | |||
1969 | # qhasm: z0_stack = z0 | ||
1970 | # asm 1: movdqa <z0=int6464#1,>z0_stack=stack128#21 | ||
1971 | # asm 2: movdqa <z0=%xmm0,>z0_stack=320(%rsp) | ||
1972 | movdqa %xmm0,320(%rsp) | ||
1973 | |||
1974 | # qhasm: y5 = z7 | ||
1975 | # asm 1: movdqa <z7=int6464#9,>y5=int6464#1 | ||
1976 | # asm 2: movdqa <z7=%xmm8,>y5=%xmm0 | ||
1977 | movdqa %xmm8,%xmm0 | ||
1978 | |||
1979 | # qhasm: uint32323232 y5 += z4 | ||
1980 | # asm 1: paddd <z4=int6464#15,<y5=int6464#1 | ||
1981 | # asm 2: paddd <z4=%xmm14,<y5=%xmm0 | ||
1982 | paddd %xmm14,%xmm0 | ||
1983 | |||
1984 | # qhasm: r5 = y5 | ||
1985 | # asm 1: movdqa <y5=int6464#1,>r5=int6464#3 | ||
1986 | # asm 2: movdqa <y5=%xmm0,>r5=%xmm2 | ||
1987 | movdqa %xmm0,%xmm2 | ||
1988 | |||
1989 | # qhasm: uint32323232 y5 <<= 18 | ||
1990 | # asm 1: pslld $18,<y5=int6464#1 | ||
1991 | # asm 2: pslld $18,<y5=%xmm0 | ||
1992 | pslld $18,%xmm0 | ||
1993 | |||
1994 | # qhasm: z5 ^= y5 | ||
1995 | # asm 1: pxor <y5=int6464#1,<z5=int6464#13 | ||
1996 | # asm 2: pxor <y5=%xmm0,<z5=%xmm12 | ||
1997 | pxor %xmm0,%xmm12 | ||
1998 | |||
1999 | # qhasm: uint32323232 r5 >>= 14 | ||
2000 | # asm 1: psrld $14,<r5=int6464#3 | ||
2001 | # asm 2: psrld $14,<r5=%xmm2 | ||
2002 | psrld $14,%xmm2 | ||
2003 | |||
2004 | # qhasm: z5 ^= r5 | ||
2005 | # asm 1: pxor <r5=int6464#3,<z5=int6464#13 | ||
2006 | # asm 2: pxor <r5=%xmm2,<z5=%xmm12 | ||
2007 | pxor %xmm2,%xmm12 | ||
2008 | |||
2009 | # qhasm: y11 = z9 | ||
2010 | # asm 1: movdqa <z9=int6464#12,>y11=int6464#1 | ||
2011 | # asm 2: movdqa <z9=%xmm11,>y11=%xmm0 | ||
2012 | movdqa %xmm11,%xmm0 | ||
2013 | |||
2014 | # qhasm: uint32323232 y11 += z10 | ||
2015 | # asm 1: paddd <z10=int6464#2,<y11=int6464#1 | ||
2016 | # asm 2: paddd <z10=%xmm1,<y11=%xmm0 | ||
2017 | paddd %xmm1,%xmm0 | ||
2018 | |||
2019 | # qhasm: r11 = y11 | ||
2020 | # asm 1: movdqa <y11=int6464#1,>r11=int6464#3 | ||
2021 | # asm 2: movdqa <y11=%xmm0,>r11=%xmm2 | ||
2022 | movdqa %xmm0,%xmm2 | ||
2023 | |||
2024 | # qhasm: uint32323232 y11 <<= 7 | ||
2025 | # asm 1: pslld $7,<y11=int6464#1 | ||
2026 | # asm 2: pslld $7,<y11=%xmm0 | ||
2027 | pslld $7,%xmm0 | ||
2028 | |||
2029 | # qhasm: z11 ^= y11 | ||
2030 | # asm 1: pxor <y11=int6464#1,<z11=int6464#7 | ||
2031 | # asm 2: pxor <y11=%xmm0,<z11=%xmm6 | ||
2032 | pxor %xmm0,%xmm6 | ||
2033 | |||
2034 | # qhasm: uint32323232 r11 >>= 25 | ||
2035 | # asm 1: psrld $25,<r11=int6464#3 | ||
2036 | # asm 2: psrld $25,<r11=%xmm2 | ||
2037 | psrld $25,%xmm2 | ||
2038 | |||
2039 | # qhasm: z11 ^= r11 | ||
2040 | # asm 1: pxor <r11=int6464#3,<z11=int6464#7 | ||
2041 | # asm 2: pxor <r11=%xmm2,<z11=%xmm6 | ||
2042 | pxor %xmm2,%xmm6 | ||
2043 | |||
2044 | # qhasm: z15 = z15_stack | ||
2045 | # asm 1: movdqa <z15_stack=stack128#22,>z15=int6464#3 | ||
2046 | # asm 2: movdqa <z15_stack=336(%rsp),>z15=%xmm2 | ||
2047 | movdqa 336(%rsp),%xmm2 | ||
2048 | |||
2049 | # qhasm: z5_stack = z5 | ||
2050 | # asm 1: movdqa <z5=int6464#13,>z5_stack=stack128#22 | ||
2051 | # asm 2: movdqa <z5=%xmm12,>z5_stack=336(%rsp) | ||
2052 | movdqa %xmm12,336(%rsp) | ||
2053 | |||
2054 | # qhasm: y12 = z14 | ||
2055 | # asm 1: movdqa <z14=int6464#4,>y12=int6464#1 | ||
2056 | # asm 2: movdqa <z14=%xmm3,>y12=%xmm0 | ||
2057 | movdqa %xmm3,%xmm0 | ||
2058 | |||
2059 | # qhasm: uint32323232 y12 += z15 | ||
2060 | # asm 1: paddd <z15=int6464#3,<y12=int6464#1 | ||
2061 | # asm 2: paddd <z15=%xmm2,<y12=%xmm0 | ||
2062 | paddd %xmm2,%xmm0 | ||
2063 | |||
2064 | # qhasm: r12 = y12 | ||
2065 | # asm 1: movdqa <y12=int6464#1,>r12=int6464#13 | ||
2066 | # asm 2: movdqa <y12=%xmm0,>r12=%xmm12 | ||
2067 | movdqa %xmm0,%xmm12 | ||
2068 | |||
2069 | # qhasm: uint32323232 y12 <<= 7 | ||
2070 | # asm 1: pslld $7,<y12=int6464#1 | ||
2071 | # asm 2: pslld $7,<y12=%xmm0 | ||
2072 | pslld $7,%xmm0 | ||
2073 | |||
2074 | # qhasm: z12 ^= y12 | ||
2075 | # asm 1: pxor <y12=int6464#1,<z12=int6464#14 | ||
2076 | # asm 2: pxor <y12=%xmm0,<z12=%xmm13 | ||
2077 | pxor %xmm0,%xmm13 | ||
2078 | |||
2079 | # qhasm: uint32323232 r12 >>= 25 | ||
2080 | # asm 1: psrld $25,<r12=int6464#13 | ||
2081 | # asm 2: psrld $25,<r12=%xmm12 | ||
2082 | psrld $25,%xmm12 | ||
2083 | |||
2084 | # qhasm: z12 ^= r12 | ||
2085 | # asm 1: pxor <r12=int6464#13,<z12=int6464#14 | ||
2086 | # asm 2: pxor <r12=%xmm12,<z12=%xmm13 | ||
2087 | pxor %xmm12,%xmm13 | ||
2088 | |||
2089 | # qhasm: y8 = z10 | ||
2090 | # asm 1: movdqa <z10=int6464#2,>y8=int6464#1 | ||
2091 | # asm 2: movdqa <z10=%xmm1,>y8=%xmm0 | ||
2092 | movdqa %xmm1,%xmm0 | ||
2093 | |||
2094 | # qhasm: uint32323232 y8 += z11 | ||
2095 | # asm 1: paddd <z11=int6464#7,<y8=int6464#1 | ||
2096 | # asm 2: paddd <z11=%xmm6,<y8=%xmm0 | ||
2097 | paddd %xmm6,%xmm0 | ||
2098 | |||
2099 | # qhasm: r8 = y8 | ||
2100 | # asm 1: movdqa <y8=int6464#1,>r8=int6464#13 | ||
2101 | # asm 2: movdqa <y8=%xmm0,>r8=%xmm12 | ||
2102 | movdqa %xmm0,%xmm12 | ||
2103 | |||
2104 | # qhasm: uint32323232 y8 <<= 9 | ||
2105 | # asm 1: pslld $9,<y8=int6464#1 | ||
2106 | # asm 2: pslld $9,<y8=%xmm0 | ||
2107 | pslld $9,%xmm0 | ||
2108 | |||
2109 | # qhasm: z8 ^= y8 | ||
2110 | # asm 1: pxor <y8=int6464#1,<z8=int6464#16 | ||
2111 | # asm 2: pxor <y8=%xmm0,<z8=%xmm15 | ||
2112 | pxor %xmm0,%xmm15 | ||
2113 | |||
2114 | # qhasm: uint32323232 r8 >>= 23 | ||
2115 | # asm 1: psrld $23,<r8=int6464#13 | ||
2116 | # asm 2: psrld $23,<r8=%xmm12 | ||
2117 | psrld $23,%xmm12 | ||
2118 | |||
2119 | # qhasm: z8 ^= r8 | ||
2120 | # asm 1: pxor <r8=int6464#13,<z8=int6464#16 | ||
2121 | # asm 2: pxor <r8=%xmm12,<z8=%xmm15 | ||
2122 | pxor %xmm12,%xmm15 | ||
2123 | |||
2124 | # qhasm: y13 = z15 | ||
2125 | # asm 1: movdqa <z15=int6464#3,>y13=int6464#1 | ||
2126 | # asm 2: movdqa <z15=%xmm2,>y13=%xmm0 | ||
2127 | movdqa %xmm2,%xmm0 | ||
2128 | |||
2129 | # qhasm: uint32323232 y13 += z12 | ||
2130 | # asm 1: paddd <z12=int6464#14,<y13=int6464#1 | ||
2131 | # asm 2: paddd <z12=%xmm13,<y13=%xmm0 | ||
2132 | paddd %xmm13,%xmm0 | ||
2133 | |||
2134 | # qhasm: r13 = y13 | ||
2135 | # asm 1: movdqa <y13=int6464#1,>r13=int6464#13 | ||
2136 | # asm 2: movdqa <y13=%xmm0,>r13=%xmm12 | ||
2137 | movdqa %xmm0,%xmm12 | ||
2138 | |||
2139 | # qhasm: uint32323232 y13 <<= 9 | ||
2140 | # asm 1: pslld $9,<y13=int6464#1 | ||
2141 | # asm 2: pslld $9,<y13=%xmm0 | ||
2142 | pslld $9,%xmm0 | ||
2143 | |||
2144 | # qhasm: z13 ^= y13 | ||
2145 | # asm 1: pxor <y13=int6464#1,<z13=int6464#10 | ||
2146 | # asm 2: pxor <y13=%xmm0,<z13=%xmm9 | ||
2147 | pxor %xmm0,%xmm9 | ||
2148 | |||
2149 | # qhasm: uint32323232 r13 >>= 23 | ||
2150 | # asm 1: psrld $23,<r13=int6464#13 | ||
2151 | # asm 2: psrld $23,<r13=%xmm12 | ||
2152 | psrld $23,%xmm12 | ||
2153 | |||
2154 | # qhasm: z13 ^= r13 | ||
2155 | # asm 1: pxor <r13=int6464#13,<z13=int6464#10 | ||
2156 | # asm 2: pxor <r13=%xmm12,<z13=%xmm9 | ||
2157 | pxor %xmm12,%xmm9 | ||
2158 | |||
2159 | # qhasm: y9 = z11 | ||
2160 | # asm 1: movdqa <z11=int6464#7,>y9=int6464#1 | ||
2161 | # asm 2: movdqa <z11=%xmm6,>y9=%xmm0 | ||
2162 | movdqa %xmm6,%xmm0 | ||
2163 | |||
2164 | # qhasm: uint32323232 y9 += z8 | ||
2165 | # asm 1: paddd <z8=int6464#16,<y9=int6464#1 | ||
2166 | # asm 2: paddd <z8=%xmm15,<y9=%xmm0 | ||
2167 | paddd %xmm15,%xmm0 | ||
2168 | |||
2169 | # qhasm: r9 = y9 | ||
2170 | # asm 1: movdqa <y9=int6464#1,>r9=int6464#13 | ||
2171 | # asm 2: movdqa <y9=%xmm0,>r9=%xmm12 | ||
2172 | movdqa %xmm0,%xmm12 | ||
2173 | |||
2174 | # qhasm: uint32323232 y9 <<= 13 | ||
2175 | # asm 1: pslld $13,<y9=int6464#1 | ||
2176 | # asm 2: pslld $13,<y9=%xmm0 | ||
2177 | pslld $13,%xmm0 | ||
2178 | |||
2179 | # qhasm: z9 ^= y9 | ||
2180 | # asm 1: pxor <y9=int6464#1,<z9=int6464#12 | ||
2181 | # asm 2: pxor <y9=%xmm0,<z9=%xmm11 | ||
2182 | pxor %xmm0,%xmm11 | ||
2183 | |||
2184 | # qhasm: uint32323232 r9 >>= 19 | ||
2185 | # asm 1: psrld $19,<r9=int6464#13 | ||
2186 | # asm 2: psrld $19,<r9=%xmm12 | ||
2187 | psrld $19,%xmm12 | ||
2188 | |||
2189 | # qhasm: z9 ^= r9 | ||
2190 | # asm 1: pxor <r9=int6464#13,<z9=int6464#12 | ||
2191 | # asm 2: pxor <r9=%xmm12,<z9=%xmm11 | ||
2192 | pxor %xmm12,%xmm11 | ||
2193 | |||
2194 | # qhasm: y14 = z12 | ||
2195 | # asm 1: movdqa <z12=int6464#14,>y14=int6464#1 | ||
2196 | # asm 2: movdqa <z12=%xmm13,>y14=%xmm0 | ||
2197 | movdqa %xmm13,%xmm0 | ||
2198 | |||
2199 | # qhasm: uint32323232 y14 += z13 | ||
2200 | # asm 1: paddd <z13=int6464#10,<y14=int6464#1 | ||
2201 | # asm 2: paddd <z13=%xmm9,<y14=%xmm0 | ||
2202 | paddd %xmm9,%xmm0 | ||
2203 | |||
2204 | # qhasm: r14 = y14 | ||
2205 | # asm 1: movdqa <y14=int6464#1,>r14=int6464#13 | ||
2206 | # asm 2: movdqa <y14=%xmm0,>r14=%xmm12 | ||
2207 | movdqa %xmm0,%xmm12 | ||
2208 | |||
2209 | # qhasm: uint32323232 y14 <<= 13 | ||
2210 | # asm 1: pslld $13,<y14=int6464#1 | ||
2211 | # asm 2: pslld $13,<y14=%xmm0 | ||
2212 | pslld $13,%xmm0 | ||
2213 | |||
2214 | # qhasm: z14 ^= y14 | ||
2215 | # asm 1: pxor <y14=int6464#1,<z14=int6464#4 | ||
2216 | # asm 2: pxor <y14=%xmm0,<z14=%xmm3 | ||
2217 | pxor %xmm0,%xmm3 | ||
2218 | |||
2219 | # qhasm: uint32323232 r14 >>= 19 | ||
2220 | # asm 1: psrld $19,<r14=int6464#13 | ||
2221 | # asm 2: psrld $19,<r14=%xmm12 | ||
2222 | psrld $19,%xmm12 | ||
2223 | |||
2224 | # qhasm: z14 ^= r14 | ||
2225 | # asm 1: pxor <r14=int6464#13,<z14=int6464#4 | ||
2226 | # asm 2: pxor <r14=%xmm12,<z14=%xmm3 | ||
2227 | pxor %xmm12,%xmm3 | ||
2228 | |||
2229 | # qhasm: y10 = z8 | ||
2230 | # asm 1: movdqa <z8=int6464#16,>y10=int6464#1 | ||
2231 | # asm 2: movdqa <z8=%xmm15,>y10=%xmm0 | ||
2232 | movdqa %xmm15,%xmm0 | ||
2233 | |||
2234 | # qhasm: uint32323232 y10 += z9 | ||
2235 | # asm 1: paddd <z9=int6464#12,<y10=int6464#1 | ||
2236 | # asm 2: paddd <z9=%xmm11,<y10=%xmm0 | ||
2237 | paddd %xmm11,%xmm0 | ||
2238 | |||
2239 | # qhasm: r10 = y10 | ||
2240 | # asm 1: movdqa <y10=int6464#1,>r10=int6464#13 | ||
2241 | # asm 2: movdqa <y10=%xmm0,>r10=%xmm12 | ||
2242 | movdqa %xmm0,%xmm12 | ||
2243 | |||
2244 | # qhasm: uint32323232 y10 <<= 18 | ||
2245 | # asm 1: pslld $18,<y10=int6464#1 | ||
2246 | # asm 2: pslld $18,<y10=%xmm0 | ||
2247 | pslld $18,%xmm0 | ||
2248 | |||
2249 | # qhasm: z10 ^= y10 | ||
2250 | # asm 1: pxor <y10=int6464#1,<z10=int6464#2 | ||
2251 | # asm 2: pxor <y10=%xmm0,<z10=%xmm1 | ||
2252 | pxor %xmm0,%xmm1 | ||
2253 | |||
2254 | # qhasm: uint32323232 r10 >>= 14 | ||
2255 | # asm 1: psrld $14,<r10=int6464#13 | ||
2256 | # asm 2: psrld $14,<r10=%xmm12 | ||
2257 | psrld $14,%xmm12 | ||
2258 | |||
2259 | # qhasm: z10 ^= r10 | ||
2260 | # asm 1: pxor <r10=int6464#13,<z10=int6464#2 | ||
2261 | # asm 2: pxor <r10=%xmm12,<z10=%xmm1 | ||
2262 | pxor %xmm12,%xmm1 | ||
2263 | |||
2264 | # qhasm: y15 = z13 | ||
2265 | # asm 1: movdqa <z13=int6464#10,>y15=int6464#1 | ||
2266 | # asm 2: movdqa <z13=%xmm9,>y15=%xmm0 | ||
2267 | movdqa %xmm9,%xmm0 | ||
2268 | |||
2269 | # qhasm: uint32323232 y15 += z14 | ||
2270 | # asm 1: paddd <z14=int6464#4,<y15=int6464#1 | ||
2271 | # asm 2: paddd <z14=%xmm3,<y15=%xmm0 | ||
2272 | paddd %xmm3,%xmm0 | ||
2273 | |||
2274 | # qhasm: r15 = y15 | ||
2275 | # asm 1: movdqa <y15=int6464#1,>r15=int6464#13 | ||
2276 | # asm 2: movdqa <y15=%xmm0,>r15=%xmm12 | ||
2277 | movdqa %xmm0,%xmm12 | ||
2278 | |||
2279 | # qhasm: uint32323232 y15 <<= 18 | ||
2280 | # asm 1: pslld $18,<y15=int6464#1 | ||
2281 | # asm 2: pslld $18,<y15=%xmm0 | ||
2282 | pslld $18,%xmm0 | ||
2283 | |||
2284 | # qhasm: z15 ^= y15 | ||
2285 | # asm 1: pxor <y15=int6464#1,<z15=int6464#3 | ||
2286 | # asm 2: pxor <y15=%xmm0,<z15=%xmm2 | ||
2287 | pxor %xmm0,%xmm2 | ||
2288 | |||
2289 | # qhasm: uint32323232 r15 >>= 14 | ||
2290 | # asm 1: psrld $14,<r15=int6464#13 | ||
2291 | # asm 2: psrld $14,<r15=%xmm12 | ||
2292 | psrld $14,%xmm12 | ||
2293 | |||
2294 | # qhasm: z15 ^= r15 | ||
2295 | # asm 1: pxor <r15=int6464#13,<z15=int6464#3 | ||
2296 | # asm 2: pxor <r15=%xmm12,<z15=%xmm2 | ||
2297 | pxor %xmm12,%xmm2 | ||
2298 | |||
2299 | # qhasm: z0 = z0_stack | ||
2300 | # asm 1: movdqa <z0_stack=stack128#21,>z0=int6464#13 | ||
2301 | # asm 2: movdqa <z0_stack=320(%rsp),>z0=%xmm12 | ||
2302 | movdqa 320(%rsp),%xmm12 | ||
2303 | |||
2304 | # qhasm: z5 = z5_stack | ||
2305 | # asm 1: movdqa <z5_stack=stack128#22,>z5=int6464#1 | ||
2306 | # asm 2: movdqa <z5_stack=336(%rsp),>z5=%xmm0 | ||
2307 | movdqa 336(%rsp),%xmm0 | ||
2308 | |||
2309 | # qhasm: unsigned>? i -= 2 | ||
2310 | # asm 1: sub $2,<i=int64#3 | ||
2311 | # asm 2: sub $2,<i=%rdx | ||
2312 | sub $2,%rdx | ||
2313 | # comment:fp stack unchanged by jump | ||
2314 | |||
2315 | # qhasm: goto mainloop1 if unsigned> | ||
2316 | ja ._mainloop1 | ||
2317 | |||
2318 | # qhasm: uint32323232 z0 += orig0 | ||
2319 | # asm 1: paddd <orig0=stack128#8,<z0=int6464#13 | ||
2320 | # asm 2: paddd <orig0=112(%rsp),<z0=%xmm12 | ||
2321 | paddd 112(%rsp),%xmm12 | ||
2322 | |||
2323 | # qhasm: uint32323232 z1 += orig1 | ||
2324 | # asm 1: paddd <orig1=stack128#12,<z1=int6464#8 | ||
2325 | # asm 2: paddd <orig1=176(%rsp),<z1=%xmm7 | ||
2326 | paddd 176(%rsp),%xmm7 | ||
2327 | |||
2328 | # qhasm: uint32323232 z2 += orig2 | ||
2329 | # asm 1: paddd <orig2=stack128#15,<z2=int6464#11 | ||
2330 | # asm 2: paddd <orig2=224(%rsp),<z2=%xmm10 | ||
2331 | paddd 224(%rsp),%xmm10 | ||
2332 | |||
2333 | # qhasm: uint32323232 z3 += orig3 | ||
2334 | # asm 1: paddd <orig3=stack128#18,<z3=int6464#5 | ||
2335 | # asm 2: paddd <orig3=272(%rsp),<z3=%xmm4 | ||
2336 | paddd 272(%rsp),%xmm4 | ||
2337 | |||
2338 | # qhasm: in0 = z0 | ||
2339 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2340 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2341 | movd %xmm12,%rdx | ||
2342 | |||
2343 | # qhasm: in1 = z1 | ||
2344 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2345 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2346 | movd %xmm7,%rcx | ||
2347 | |||
2348 | # qhasm: in2 = z2 | ||
2349 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2350 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2351 | movd %xmm10,%r8 | ||
2352 | |||
2353 | # qhasm: in3 = z3 | ||
2354 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2355 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2356 | movd %xmm4,%r9 | ||
2357 | |||
2358 | # qhasm: z0 <<<= 96 | ||
2359 | # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13 | ||
2360 | # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12 | ||
2361 | pshufd $0x39,%xmm12,%xmm12 | ||
2362 | |||
2363 | # qhasm: z1 <<<= 96 | ||
2364 | # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8 | ||
2365 | # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7 | ||
2366 | pshufd $0x39,%xmm7,%xmm7 | ||
2367 | |||
2368 | # qhasm: z2 <<<= 96 | ||
2369 | # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11 | ||
2370 | # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10 | ||
2371 | pshufd $0x39,%xmm10,%xmm10 | ||
2372 | |||
2373 | # qhasm: z3 <<<= 96 | ||
2374 | # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5 | ||
2375 | # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4 | ||
2376 | pshufd $0x39,%xmm4,%xmm4 | ||
2377 | |||
2378 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 0) | ||
2379 | # asm 1: xorl 0(<m=int64#2),<in0=int64#3d | ||
2380 | # asm 2: xorl 0(<m=%rsi),<in0=%edx | ||
2381 | xorl 0(%rsi),%edx | ||
2382 | |||
2383 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 4) | ||
2384 | # asm 1: xorl 4(<m=int64#2),<in1=int64#4d | ||
2385 | # asm 2: xorl 4(<m=%rsi),<in1=%ecx | ||
2386 | xorl 4(%rsi),%ecx | ||
2387 | |||
2388 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 8) | ||
2389 | # asm 1: xorl 8(<m=int64#2),<in2=int64#5d | ||
2390 | # asm 2: xorl 8(<m=%rsi),<in2=%r8d | ||
2391 | xorl 8(%rsi),%r8d | ||
2392 | |||
2393 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 12) | ||
2394 | # asm 1: xorl 12(<m=int64#2),<in3=int64#6d | ||
2395 | # asm 2: xorl 12(<m=%rsi),<in3=%r9d | ||
2396 | xorl 12(%rsi),%r9d | ||
2397 | |||
2398 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
2399 | # asm 1: movl <in0=int64#3d,0(<out=int64#1) | ||
2400 | # asm 2: movl <in0=%edx,0(<out=%rdi) | ||
2401 | movl %edx,0(%rdi) | ||
2402 | |||
2403 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
2404 | # asm 1: movl <in1=int64#4d,4(<out=int64#1) | ||
2405 | # asm 2: movl <in1=%ecx,4(<out=%rdi) | ||
2406 | movl %ecx,4(%rdi) | ||
2407 | |||
2408 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
2409 | # asm 1: movl <in2=int64#5d,8(<out=int64#1) | ||
2410 | # asm 2: movl <in2=%r8d,8(<out=%rdi) | ||
2411 | movl %r8d,8(%rdi) | ||
2412 | |||
2413 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
2414 | # asm 1: movl <in3=int64#6d,12(<out=int64#1) | ||
2415 | # asm 2: movl <in3=%r9d,12(<out=%rdi) | ||
2416 | movl %r9d,12(%rdi) | ||
2417 | |||
2418 | # qhasm: in0 = z0 | ||
2419 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2420 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2421 | movd %xmm12,%rdx | ||
2422 | |||
2423 | # qhasm: in1 = z1 | ||
2424 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2425 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2426 | movd %xmm7,%rcx | ||
2427 | |||
2428 | # qhasm: in2 = z2 | ||
2429 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2430 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2431 | movd %xmm10,%r8 | ||
2432 | |||
2433 | # qhasm: in3 = z3 | ||
2434 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2435 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2436 | movd %xmm4,%r9 | ||
2437 | |||
2438 | # qhasm: z0 <<<= 96 | ||
2439 | # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13 | ||
2440 | # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12 | ||
2441 | pshufd $0x39,%xmm12,%xmm12 | ||
2442 | |||
2443 | # qhasm: z1 <<<= 96 | ||
2444 | # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8 | ||
2445 | # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7 | ||
2446 | pshufd $0x39,%xmm7,%xmm7 | ||
2447 | |||
2448 | # qhasm: z2 <<<= 96 | ||
2449 | # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11 | ||
2450 | # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10 | ||
2451 | pshufd $0x39,%xmm10,%xmm10 | ||
2452 | |||
2453 | # qhasm: z3 <<<= 96 | ||
2454 | # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5 | ||
2455 | # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4 | ||
2456 | pshufd $0x39,%xmm4,%xmm4 | ||
2457 | |||
2458 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 64) | ||
2459 | # asm 1: xorl 64(<m=int64#2),<in0=int64#3d | ||
2460 | # asm 2: xorl 64(<m=%rsi),<in0=%edx | ||
2461 | xorl 64(%rsi),%edx | ||
2462 | |||
2463 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 68) | ||
2464 | # asm 1: xorl 68(<m=int64#2),<in1=int64#4d | ||
2465 | # asm 2: xorl 68(<m=%rsi),<in1=%ecx | ||
2466 | xorl 68(%rsi),%ecx | ||
2467 | |||
2468 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 72) | ||
2469 | # asm 1: xorl 72(<m=int64#2),<in2=int64#5d | ||
2470 | # asm 2: xorl 72(<m=%rsi),<in2=%r8d | ||
2471 | xorl 72(%rsi),%r8d | ||
2472 | |||
2473 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 76) | ||
2474 | # asm 1: xorl 76(<m=int64#2),<in3=int64#6d | ||
2475 | # asm 2: xorl 76(<m=%rsi),<in3=%r9d | ||
2476 | xorl 76(%rsi),%r9d | ||
2477 | |||
2478 | # qhasm: *(uint32 *) (out + 64) = in0 | ||
2479 | # asm 1: movl <in0=int64#3d,64(<out=int64#1) | ||
2480 | # asm 2: movl <in0=%edx,64(<out=%rdi) | ||
2481 | movl %edx,64(%rdi) | ||
2482 | |||
2483 | # qhasm: *(uint32 *) (out + 68) = in1 | ||
2484 | # asm 1: movl <in1=int64#4d,68(<out=int64#1) | ||
2485 | # asm 2: movl <in1=%ecx,68(<out=%rdi) | ||
2486 | movl %ecx,68(%rdi) | ||
2487 | |||
2488 | # qhasm: *(uint32 *) (out + 72) = in2 | ||
2489 | # asm 1: movl <in2=int64#5d,72(<out=int64#1) | ||
2490 | # asm 2: movl <in2=%r8d,72(<out=%rdi) | ||
2491 | movl %r8d,72(%rdi) | ||
2492 | |||
2493 | # qhasm: *(uint32 *) (out + 76) = in3 | ||
2494 | # asm 1: movl <in3=int64#6d,76(<out=int64#1) | ||
2495 | # asm 2: movl <in3=%r9d,76(<out=%rdi) | ||
2496 | movl %r9d,76(%rdi) | ||
2497 | |||
2498 | # qhasm: in0 = z0 | ||
2499 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2500 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2501 | movd %xmm12,%rdx | ||
2502 | |||
2503 | # qhasm: in1 = z1 | ||
2504 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2505 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2506 | movd %xmm7,%rcx | ||
2507 | |||
2508 | # qhasm: in2 = z2 | ||
2509 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2510 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2511 | movd %xmm10,%r8 | ||
2512 | |||
2513 | # qhasm: in3 = z3 | ||
2514 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2515 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2516 | movd %xmm4,%r9 | ||
2517 | |||
2518 | # qhasm: z0 <<<= 96 | ||
2519 | # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13 | ||
2520 | # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12 | ||
2521 | pshufd $0x39,%xmm12,%xmm12 | ||
2522 | |||
2523 | # qhasm: z1 <<<= 96 | ||
2524 | # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8 | ||
2525 | # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7 | ||
2526 | pshufd $0x39,%xmm7,%xmm7 | ||
2527 | |||
2528 | # qhasm: z2 <<<= 96 | ||
2529 | # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11 | ||
2530 | # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10 | ||
2531 | pshufd $0x39,%xmm10,%xmm10 | ||
2532 | |||
2533 | # qhasm: z3 <<<= 96 | ||
2534 | # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5 | ||
2535 | # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4 | ||
2536 | pshufd $0x39,%xmm4,%xmm4 | ||
2537 | |||
2538 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 128) | ||
2539 | # asm 1: xorl 128(<m=int64#2),<in0=int64#3d | ||
2540 | # asm 2: xorl 128(<m=%rsi),<in0=%edx | ||
2541 | xorl 128(%rsi),%edx | ||
2542 | |||
2543 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 132) | ||
2544 | # asm 1: xorl 132(<m=int64#2),<in1=int64#4d | ||
2545 | # asm 2: xorl 132(<m=%rsi),<in1=%ecx | ||
2546 | xorl 132(%rsi),%ecx | ||
2547 | |||
2548 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 136) | ||
2549 | # asm 1: xorl 136(<m=int64#2),<in2=int64#5d | ||
2550 | # asm 2: xorl 136(<m=%rsi),<in2=%r8d | ||
2551 | xorl 136(%rsi),%r8d | ||
2552 | |||
2553 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 140) | ||
2554 | # asm 1: xorl 140(<m=int64#2),<in3=int64#6d | ||
2555 | # asm 2: xorl 140(<m=%rsi),<in3=%r9d | ||
2556 | xorl 140(%rsi),%r9d | ||
2557 | |||
2558 | # qhasm: *(uint32 *) (out + 128) = in0 | ||
2559 | # asm 1: movl <in0=int64#3d,128(<out=int64#1) | ||
2560 | # asm 2: movl <in0=%edx,128(<out=%rdi) | ||
2561 | movl %edx,128(%rdi) | ||
2562 | |||
2563 | # qhasm: *(uint32 *) (out + 132) = in1 | ||
2564 | # asm 1: movl <in1=int64#4d,132(<out=int64#1) | ||
2565 | # asm 2: movl <in1=%ecx,132(<out=%rdi) | ||
2566 | movl %ecx,132(%rdi) | ||
2567 | |||
2568 | # qhasm: *(uint32 *) (out + 136) = in2 | ||
2569 | # asm 1: movl <in2=int64#5d,136(<out=int64#1) | ||
2570 | # asm 2: movl <in2=%r8d,136(<out=%rdi) | ||
2571 | movl %r8d,136(%rdi) | ||
2572 | |||
2573 | # qhasm: *(uint32 *) (out + 140) = in3 | ||
2574 | # asm 1: movl <in3=int64#6d,140(<out=int64#1) | ||
2575 | # asm 2: movl <in3=%r9d,140(<out=%rdi) | ||
2576 | movl %r9d,140(%rdi) | ||
2577 | |||
2578 | # qhasm: in0 = z0 | ||
2579 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2580 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2581 | movd %xmm12,%rdx | ||
2582 | |||
2583 | # qhasm: in1 = z1 | ||
2584 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2585 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2586 | movd %xmm7,%rcx | ||
2587 | |||
2588 | # qhasm: in2 = z2 | ||
2589 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2590 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2591 | movd %xmm10,%r8 | ||
2592 | |||
2593 | # qhasm: in3 = z3 | ||
2594 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2595 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2596 | movd %xmm4,%r9 | ||
2597 | |||
2598 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 192) | ||
2599 | # asm 1: xorl 192(<m=int64#2),<in0=int64#3d | ||
2600 | # asm 2: xorl 192(<m=%rsi),<in0=%edx | ||
2601 | xorl 192(%rsi),%edx | ||
2602 | |||
2603 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 196) | ||
2604 | # asm 1: xorl 196(<m=int64#2),<in1=int64#4d | ||
2605 | # asm 2: xorl 196(<m=%rsi),<in1=%ecx | ||
2606 | xorl 196(%rsi),%ecx | ||
2607 | |||
2608 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 200) | ||
2609 | # asm 1: xorl 200(<m=int64#2),<in2=int64#5d | ||
2610 | # asm 2: xorl 200(<m=%rsi),<in2=%r8d | ||
2611 | xorl 200(%rsi),%r8d | ||
2612 | |||
2613 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 204) | ||
2614 | # asm 1: xorl 204(<m=int64#2),<in3=int64#6d | ||
2615 | # asm 2: xorl 204(<m=%rsi),<in3=%r9d | ||
2616 | xorl 204(%rsi),%r9d | ||
2617 | |||
2618 | # qhasm: *(uint32 *) (out + 192) = in0 | ||
2619 | # asm 1: movl <in0=int64#3d,192(<out=int64#1) | ||
2620 | # asm 2: movl <in0=%edx,192(<out=%rdi) | ||
2621 | movl %edx,192(%rdi) | ||
2622 | |||
2623 | # qhasm: *(uint32 *) (out + 196) = in1 | ||
2624 | # asm 1: movl <in1=int64#4d,196(<out=int64#1) | ||
2625 | # asm 2: movl <in1=%ecx,196(<out=%rdi) | ||
2626 | movl %ecx,196(%rdi) | ||
2627 | |||
2628 | # qhasm: *(uint32 *) (out + 200) = in2 | ||
2629 | # asm 1: movl <in2=int64#5d,200(<out=int64#1) | ||
2630 | # asm 2: movl <in2=%r8d,200(<out=%rdi) | ||
2631 | movl %r8d,200(%rdi) | ||
2632 | |||
2633 | # qhasm: *(uint32 *) (out + 204) = in3 | ||
2634 | # asm 1: movl <in3=int64#6d,204(<out=int64#1) | ||
2635 | # asm 2: movl <in3=%r9d,204(<out=%rdi) | ||
2636 | movl %r9d,204(%rdi) | ||
2637 | |||
2638 | # qhasm: uint32323232 z4 += orig4 | ||
2639 | # asm 1: paddd <orig4=stack128#16,<z4=int6464#15 | ||
2640 | # asm 2: paddd <orig4=240(%rsp),<z4=%xmm14 | ||
2641 | paddd 240(%rsp),%xmm14 | ||
2642 | |||
2643 | # qhasm: uint32323232 z5 += orig5 | ||
2644 | # asm 1: paddd <orig5=stack128#5,<z5=int6464#1 | ||
2645 | # asm 2: paddd <orig5=64(%rsp),<z5=%xmm0 | ||
2646 | paddd 64(%rsp),%xmm0 | ||
2647 | |||
2648 | # qhasm: uint32323232 z6 += orig6 | ||
2649 | # asm 1: paddd <orig6=stack128#9,<z6=int6464#6 | ||
2650 | # asm 2: paddd <orig6=128(%rsp),<z6=%xmm5 | ||
2651 | paddd 128(%rsp),%xmm5 | ||
2652 | |||
2653 | # qhasm: uint32323232 z7 += orig7 | ||
2654 | # asm 1: paddd <orig7=stack128#13,<z7=int6464#9 | ||
2655 | # asm 2: paddd <orig7=192(%rsp),<z7=%xmm8 | ||
2656 | paddd 192(%rsp),%xmm8 | ||
2657 | |||
2658 | # qhasm: in4 = z4 | ||
2659 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2660 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2661 | movd %xmm14,%rdx | ||
2662 | |||
2663 | # qhasm: in5 = z5 | ||
2664 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2665 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2666 | movd %xmm0,%rcx | ||
2667 | |||
2668 | # qhasm: in6 = z6 | ||
2669 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2670 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2671 | movd %xmm5,%r8 | ||
2672 | |||
2673 | # qhasm: in7 = z7 | ||
2674 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2675 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2676 | movd %xmm8,%r9 | ||
2677 | |||
2678 | # qhasm: z4 <<<= 96 | ||
2679 | # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15 | ||
2680 | # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14 | ||
2681 | pshufd $0x39,%xmm14,%xmm14 | ||
2682 | |||
2683 | # qhasm: z5 <<<= 96 | ||
2684 | # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1 | ||
2685 | # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0 | ||
2686 | pshufd $0x39,%xmm0,%xmm0 | ||
2687 | |||
2688 | # qhasm: z6 <<<= 96 | ||
2689 | # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6 | ||
2690 | # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5 | ||
2691 | pshufd $0x39,%xmm5,%xmm5 | ||
2692 | |||
2693 | # qhasm: z7 <<<= 96 | ||
2694 | # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9 | ||
2695 | # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8 | ||
2696 | pshufd $0x39,%xmm8,%xmm8 | ||
2697 | |||
2698 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 16) | ||
2699 | # asm 1: xorl 16(<m=int64#2),<in4=int64#3d | ||
2700 | # asm 2: xorl 16(<m=%rsi),<in4=%edx | ||
2701 | xorl 16(%rsi),%edx | ||
2702 | |||
2703 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 20) | ||
2704 | # asm 1: xorl 20(<m=int64#2),<in5=int64#4d | ||
2705 | # asm 2: xorl 20(<m=%rsi),<in5=%ecx | ||
2706 | xorl 20(%rsi),%ecx | ||
2707 | |||
2708 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 24) | ||
2709 | # asm 1: xorl 24(<m=int64#2),<in6=int64#5d | ||
2710 | # asm 2: xorl 24(<m=%rsi),<in6=%r8d | ||
2711 | xorl 24(%rsi),%r8d | ||
2712 | |||
2713 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 28) | ||
2714 | # asm 1: xorl 28(<m=int64#2),<in7=int64#6d | ||
2715 | # asm 2: xorl 28(<m=%rsi),<in7=%r9d | ||
2716 | xorl 28(%rsi),%r9d | ||
2717 | |||
2718 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
2719 | # asm 1: movl <in4=int64#3d,16(<out=int64#1) | ||
2720 | # asm 2: movl <in4=%edx,16(<out=%rdi) | ||
2721 | movl %edx,16(%rdi) | ||
2722 | |||
2723 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
2724 | # asm 1: movl <in5=int64#4d,20(<out=int64#1) | ||
2725 | # asm 2: movl <in5=%ecx,20(<out=%rdi) | ||
2726 | movl %ecx,20(%rdi) | ||
2727 | |||
2728 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
2729 | # asm 1: movl <in6=int64#5d,24(<out=int64#1) | ||
2730 | # asm 2: movl <in6=%r8d,24(<out=%rdi) | ||
2731 | movl %r8d,24(%rdi) | ||
2732 | |||
2733 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
2734 | # asm 1: movl <in7=int64#6d,28(<out=int64#1) | ||
2735 | # asm 2: movl <in7=%r9d,28(<out=%rdi) | ||
2736 | movl %r9d,28(%rdi) | ||
2737 | |||
2738 | # qhasm: in4 = z4 | ||
2739 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2740 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2741 | movd %xmm14,%rdx | ||
2742 | |||
2743 | # qhasm: in5 = z5 | ||
2744 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2745 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2746 | movd %xmm0,%rcx | ||
2747 | |||
2748 | # qhasm: in6 = z6 | ||
2749 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2750 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2751 | movd %xmm5,%r8 | ||
2752 | |||
2753 | # qhasm: in7 = z7 | ||
2754 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2755 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2756 | movd %xmm8,%r9 | ||
2757 | |||
2758 | # qhasm: z4 <<<= 96 | ||
2759 | # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15 | ||
2760 | # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14 | ||
2761 | pshufd $0x39,%xmm14,%xmm14 | ||
2762 | |||
2763 | # qhasm: z5 <<<= 96 | ||
2764 | # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1 | ||
2765 | # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0 | ||
2766 | pshufd $0x39,%xmm0,%xmm0 | ||
2767 | |||
2768 | # qhasm: z6 <<<= 96 | ||
2769 | # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6 | ||
2770 | # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5 | ||
2771 | pshufd $0x39,%xmm5,%xmm5 | ||
2772 | |||
2773 | # qhasm: z7 <<<= 96 | ||
2774 | # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9 | ||
2775 | # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8 | ||
2776 | pshufd $0x39,%xmm8,%xmm8 | ||
2777 | |||
2778 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 80) | ||
2779 | # asm 1: xorl 80(<m=int64#2),<in4=int64#3d | ||
2780 | # asm 2: xorl 80(<m=%rsi),<in4=%edx | ||
2781 | xorl 80(%rsi),%edx | ||
2782 | |||
2783 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 84) | ||
2784 | # asm 1: xorl 84(<m=int64#2),<in5=int64#4d | ||
2785 | # asm 2: xorl 84(<m=%rsi),<in5=%ecx | ||
2786 | xorl 84(%rsi),%ecx | ||
2787 | |||
2788 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 88) | ||
2789 | # asm 1: xorl 88(<m=int64#2),<in6=int64#5d | ||
2790 | # asm 2: xorl 88(<m=%rsi),<in6=%r8d | ||
2791 | xorl 88(%rsi),%r8d | ||
2792 | |||
2793 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 92) | ||
2794 | # asm 1: xorl 92(<m=int64#2),<in7=int64#6d | ||
2795 | # asm 2: xorl 92(<m=%rsi),<in7=%r9d | ||
2796 | xorl 92(%rsi),%r9d | ||
2797 | |||
2798 | # qhasm: *(uint32 *) (out + 80) = in4 | ||
2799 | # asm 1: movl <in4=int64#3d,80(<out=int64#1) | ||
2800 | # asm 2: movl <in4=%edx,80(<out=%rdi) | ||
2801 | movl %edx,80(%rdi) | ||
2802 | |||
2803 | # qhasm: *(uint32 *) (out + 84) = in5 | ||
2804 | # asm 1: movl <in5=int64#4d,84(<out=int64#1) | ||
2805 | # asm 2: movl <in5=%ecx,84(<out=%rdi) | ||
2806 | movl %ecx,84(%rdi) | ||
2807 | |||
2808 | # qhasm: *(uint32 *) (out + 88) = in6 | ||
2809 | # asm 1: movl <in6=int64#5d,88(<out=int64#1) | ||
2810 | # asm 2: movl <in6=%r8d,88(<out=%rdi) | ||
2811 | movl %r8d,88(%rdi) | ||
2812 | |||
2813 | # qhasm: *(uint32 *) (out + 92) = in7 | ||
2814 | # asm 1: movl <in7=int64#6d,92(<out=int64#1) | ||
2815 | # asm 2: movl <in7=%r9d,92(<out=%rdi) | ||
2816 | movl %r9d,92(%rdi) | ||
2817 | |||
2818 | # qhasm: in4 = z4 | ||
2819 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2820 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2821 | movd %xmm14,%rdx | ||
2822 | |||
2823 | # qhasm: in5 = z5 | ||
2824 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2825 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2826 | movd %xmm0,%rcx | ||
2827 | |||
2828 | # qhasm: in6 = z6 | ||
2829 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2830 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2831 | movd %xmm5,%r8 | ||
2832 | |||
2833 | # qhasm: in7 = z7 | ||
2834 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2835 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2836 | movd %xmm8,%r9 | ||
2837 | |||
2838 | # qhasm: z4 <<<= 96 | ||
2839 | # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15 | ||
2840 | # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14 | ||
2841 | pshufd $0x39,%xmm14,%xmm14 | ||
2842 | |||
2843 | # qhasm: z5 <<<= 96 | ||
2844 | # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1 | ||
2845 | # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0 | ||
2846 | pshufd $0x39,%xmm0,%xmm0 | ||
2847 | |||
2848 | # qhasm: z6 <<<= 96 | ||
2849 | # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6 | ||
2850 | # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5 | ||
2851 | pshufd $0x39,%xmm5,%xmm5 | ||
2852 | |||
2853 | # qhasm: z7 <<<= 96 | ||
2854 | # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9 | ||
2855 | # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8 | ||
2856 | pshufd $0x39,%xmm8,%xmm8 | ||
2857 | |||
2858 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 144) | ||
2859 | # asm 1: xorl 144(<m=int64#2),<in4=int64#3d | ||
2860 | # asm 2: xorl 144(<m=%rsi),<in4=%edx | ||
2861 | xorl 144(%rsi),%edx | ||
2862 | |||
2863 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 148) | ||
2864 | # asm 1: xorl 148(<m=int64#2),<in5=int64#4d | ||
2865 | # asm 2: xorl 148(<m=%rsi),<in5=%ecx | ||
2866 | xorl 148(%rsi),%ecx | ||
2867 | |||
2868 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 152) | ||
2869 | # asm 1: xorl 152(<m=int64#2),<in6=int64#5d | ||
2870 | # asm 2: xorl 152(<m=%rsi),<in6=%r8d | ||
2871 | xorl 152(%rsi),%r8d | ||
2872 | |||
2873 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 156) | ||
2874 | # asm 1: xorl 156(<m=int64#2),<in7=int64#6d | ||
2875 | # asm 2: xorl 156(<m=%rsi),<in7=%r9d | ||
2876 | xorl 156(%rsi),%r9d | ||
2877 | |||
2878 | # qhasm: *(uint32 *) (out + 144) = in4 | ||
2879 | # asm 1: movl <in4=int64#3d,144(<out=int64#1) | ||
2880 | # asm 2: movl <in4=%edx,144(<out=%rdi) | ||
2881 | movl %edx,144(%rdi) | ||
2882 | |||
2883 | # qhasm: *(uint32 *) (out + 148) = in5 | ||
2884 | # asm 1: movl <in5=int64#4d,148(<out=int64#1) | ||
2885 | # asm 2: movl <in5=%ecx,148(<out=%rdi) | ||
2886 | movl %ecx,148(%rdi) | ||
2887 | |||
2888 | # qhasm: *(uint32 *) (out + 152) = in6 | ||
2889 | # asm 1: movl <in6=int64#5d,152(<out=int64#1) | ||
2890 | # asm 2: movl <in6=%r8d,152(<out=%rdi) | ||
2891 | movl %r8d,152(%rdi) | ||
2892 | |||
2893 | # qhasm: *(uint32 *) (out + 156) = in7 | ||
2894 | # asm 1: movl <in7=int64#6d,156(<out=int64#1) | ||
2895 | # asm 2: movl <in7=%r9d,156(<out=%rdi) | ||
2896 | movl %r9d,156(%rdi) | ||
2897 | |||
2898 | # qhasm: in4 = z4 | ||
2899 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2900 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2901 | movd %xmm14,%rdx | ||
2902 | |||
2903 | # qhasm: in5 = z5 | ||
2904 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2905 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2906 | movd %xmm0,%rcx | ||
2907 | |||
2908 | # qhasm: in6 = z6 | ||
2909 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2910 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2911 | movd %xmm5,%r8 | ||
2912 | |||
2913 | # qhasm: in7 = z7 | ||
2914 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2915 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2916 | movd %xmm8,%r9 | ||
2917 | |||
2918 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 208) | ||
2919 | # asm 1: xorl 208(<m=int64#2),<in4=int64#3d | ||
2920 | # asm 2: xorl 208(<m=%rsi),<in4=%edx | ||
2921 | xorl 208(%rsi),%edx | ||
2922 | |||
2923 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 212) | ||
2924 | # asm 1: xorl 212(<m=int64#2),<in5=int64#4d | ||
2925 | # asm 2: xorl 212(<m=%rsi),<in5=%ecx | ||
2926 | xorl 212(%rsi),%ecx | ||
2927 | |||
2928 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 216) | ||
2929 | # asm 1: xorl 216(<m=int64#2),<in6=int64#5d | ||
2930 | # asm 2: xorl 216(<m=%rsi),<in6=%r8d | ||
2931 | xorl 216(%rsi),%r8d | ||
2932 | |||
2933 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 220) | ||
2934 | # asm 1: xorl 220(<m=int64#2),<in7=int64#6d | ||
2935 | # asm 2: xorl 220(<m=%rsi),<in7=%r9d | ||
2936 | xorl 220(%rsi),%r9d | ||
2937 | |||
2938 | # qhasm: *(uint32 *) (out + 208) = in4 | ||
2939 | # asm 1: movl <in4=int64#3d,208(<out=int64#1) | ||
2940 | # asm 2: movl <in4=%edx,208(<out=%rdi) | ||
2941 | movl %edx,208(%rdi) | ||
2942 | |||
2943 | # qhasm: *(uint32 *) (out + 212) = in5 | ||
2944 | # asm 1: movl <in5=int64#4d,212(<out=int64#1) | ||
2945 | # asm 2: movl <in5=%ecx,212(<out=%rdi) | ||
2946 | movl %ecx,212(%rdi) | ||
2947 | |||
2948 | # qhasm: *(uint32 *) (out + 216) = in6 | ||
2949 | # asm 1: movl <in6=int64#5d,216(<out=int64#1) | ||
2950 | # asm 2: movl <in6=%r8d,216(<out=%rdi) | ||
2951 | movl %r8d,216(%rdi) | ||
2952 | |||
2953 | # qhasm: *(uint32 *) (out + 220) = in7 | ||
2954 | # asm 1: movl <in7=int64#6d,220(<out=int64#1) | ||
2955 | # asm 2: movl <in7=%r9d,220(<out=%rdi) | ||
2956 | movl %r9d,220(%rdi) | ||
2957 | |||
2958 | # qhasm: uint32323232 z8 += orig8 | ||
2959 | # asm 1: paddd <orig8=stack128#19,<z8=int6464#16 | ||
2960 | # asm 2: paddd <orig8=288(%rsp),<z8=%xmm15 | ||
2961 | paddd 288(%rsp),%xmm15 | ||
2962 | |||
2963 | # qhasm: uint32323232 z9 += orig9 | ||
2964 | # asm 1: paddd <orig9=stack128#20,<z9=int6464#12 | ||
2965 | # asm 2: paddd <orig9=304(%rsp),<z9=%xmm11 | ||
2966 | paddd 304(%rsp),%xmm11 | ||
2967 | |||
2968 | # qhasm: uint32323232 z10 += orig10 | ||
2969 | # asm 1: paddd <orig10=stack128#6,<z10=int6464#2 | ||
2970 | # asm 2: paddd <orig10=80(%rsp),<z10=%xmm1 | ||
2971 | paddd 80(%rsp),%xmm1 | ||
2972 | |||
2973 | # qhasm: uint32323232 z11 += orig11 | ||
2974 | # asm 1: paddd <orig11=stack128#10,<z11=int6464#7 | ||
2975 | # asm 2: paddd <orig11=144(%rsp),<z11=%xmm6 | ||
2976 | paddd 144(%rsp),%xmm6 | ||
2977 | |||
2978 | # qhasm: in8 = z8 | ||
2979 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
2980 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
2981 | movd %xmm15,%rdx | ||
2982 | |||
2983 | # qhasm: in9 = z9 | ||
2984 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
2985 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
2986 | movd %xmm11,%rcx | ||
2987 | |||
2988 | # qhasm: in10 = z10 | ||
2989 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
2990 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
2991 | movd %xmm1,%r8 | ||
2992 | |||
2993 | # qhasm: in11 = z11 | ||
2994 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
2995 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
2996 | movd %xmm6,%r9 | ||
2997 | |||
2998 | # qhasm: z8 <<<= 96 | ||
2999 | # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16 | ||
3000 | # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15 | ||
3001 | pshufd $0x39,%xmm15,%xmm15 | ||
3002 | |||
3003 | # qhasm: z9 <<<= 96 | ||
3004 | # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12 | ||
3005 | # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11 | ||
3006 | pshufd $0x39,%xmm11,%xmm11 | ||
3007 | |||
3008 | # qhasm: z10 <<<= 96 | ||
3009 | # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2 | ||
3010 | # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1 | ||
3011 | pshufd $0x39,%xmm1,%xmm1 | ||
3012 | |||
3013 | # qhasm: z11 <<<= 96 | ||
3014 | # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7 | ||
3015 | # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6 | ||
3016 | pshufd $0x39,%xmm6,%xmm6 | ||
3017 | |||
3018 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 32) | ||
3019 | # asm 1: xorl 32(<m=int64#2),<in8=int64#3d | ||
3020 | # asm 2: xorl 32(<m=%rsi),<in8=%edx | ||
3021 | xorl 32(%rsi),%edx | ||
3022 | |||
3023 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 36) | ||
3024 | # asm 1: xorl 36(<m=int64#2),<in9=int64#4d | ||
3025 | # asm 2: xorl 36(<m=%rsi),<in9=%ecx | ||
3026 | xorl 36(%rsi),%ecx | ||
3027 | |||
3028 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 40) | ||
3029 | # asm 1: xorl 40(<m=int64#2),<in10=int64#5d | ||
3030 | # asm 2: xorl 40(<m=%rsi),<in10=%r8d | ||
3031 | xorl 40(%rsi),%r8d | ||
3032 | |||
3033 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 44) | ||
3034 | # asm 1: xorl 44(<m=int64#2),<in11=int64#6d | ||
3035 | # asm 2: xorl 44(<m=%rsi),<in11=%r9d | ||
3036 | xorl 44(%rsi),%r9d | ||
3037 | |||
3038 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
3039 | # asm 1: movl <in8=int64#3d,32(<out=int64#1) | ||
3040 | # asm 2: movl <in8=%edx,32(<out=%rdi) | ||
3041 | movl %edx,32(%rdi) | ||
3042 | |||
3043 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
3044 | # asm 1: movl <in9=int64#4d,36(<out=int64#1) | ||
3045 | # asm 2: movl <in9=%ecx,36(<out=%rdi) | ||
3046 | movl %ecx,36(%rdi) | ||
3047 | |||
3048 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
3049 | # asm 1: movl <in10=int64#5d,40(<out=int64#1) | ||
3050 | # asm 2: movl <in10=%r8d,40(<out=%rdi) | ||
3051 | movl %r8d,40(%rdi) | ||
3052 | |||
3053 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
3054 | # asm 1: movl <in11=int64#6d,44(<out=int64#1) | ||
3055 | # asm 2: movl <in11=%r9d,44(<out=%rdi) | ||
3056 | movl %r9d,44(%rdi) | ||
3057 | |||
3058 | # qhasm: in8 = z8 | ||
3059 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
3060 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
3061 | movd %xmm15,%rdx | ||
3062 | |||
3063 | # qhasm: in9 = z9 | ||
3064 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
3065 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
3066 | movd %xmm11,%rcx | ||
3067 | |||
3068 | # qhasm: in10 = z10 | ||
3069 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
3070 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
3071 | movd %xmm1,%r8 | ||
3072 | |||
3073 | # qhasm: in11 = z11 | ||
3074 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
3075 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
3076 | movd %xmm6,%r9 | ||
3077 | |||
3078 | # qhasm: z8 <<<= 96 | ||
3079 | # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16 | ||
3080 | # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15 | ||
3081 | pshufd $0x39,%xmm15,%xmm15 | ||
3082 | |||
3083 | # qhasm: z9 <<<= 96 | ||
3084 | # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12 | ||
3085 | # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11 | ||
3086 | pshufd $0x39,%xmm11,%xmm11 | ||
3087 | |||
3088 | # qhasm: z10 <<<= 96 | ||
3089 | # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2 | ||
3090 | # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1 | ||
3091 | pshufd $0x39,%xmm1,%xmm1 | ||
3092 | |||
3093 | # qhasm: z11 <<<= 96 | ||
3094 | # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7 | ||
3095 | # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6 | ||
3096 | pshufd $0x39,%xmm6,%xmm6 | ||
3097 | |||
3098 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 96) | ||
3099 | # asm 1: xorl 96(<m=int64#2),<in8=int64#3d | ||
3100 | # asm 2: xorl 96(<m=%rsi),<in8=%edx | ||
3101 | xorl 96(%rsi),%edx | ||
3102 | |||
3103 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 100) | ||
3104 | # asm 1: xorl 100(<m=int64#2),<in9=int64#4d | ||
3105 | # asm 2: xorl 100(<m=%rsi),<in9=%ecx | ||
3106 | xorl 100(%rsi),%ecx | ||
3107 | |||
3108 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 104) | ||
3109 | # asm 1: xorl 104(<m=int64#2),<in10=int64#5d | ||
3110 | # asm 2: xorl 104(<m=%rsi),<in10=%r8d | ||
3111 | xorl 104(%rsi),%r8d | ||
3112 | |||
3113 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 108) | ||
3114 | # asm 1: xorl 108(<m=int64#2),<in11=int64#6d | ||
3115 | # asm 2: xorl 108(<m=%rsi),<in11=%r9d | ||
3116 | xorl 108(%rsi),%r9d | ||
3117 | |||
3118 | # qhasm: *(uint32 *) (out + 96) = in8 | ||
3119 | # asm 1: movl <in8=int64#3d,96(<out=int64#1) | ||
3120 | # asm 2: movl <in8=%edx,96(<out=%rdi) | ||
3121 | movl %edx,96(%rdi) | ||
3122 | |||
3123 | # qhasm: *(uint32 *) (out + 100) = in9 | ||
3124 | # asm 1: movl <in9=int64#4d,100(<out=int64#1) | ||
3125 | # asm 2: movl <in9=%ecx,100(<out=%rdi) | ||
3126 | movl %ecx,100(%rdi) | ||
3127 | |||
3128 | # qhasm: *(uint32 *) (out + 104) = in10 | ||
3129 | # asm 1: movl <in10=int64#5d,104(<out=int64#1) | ||
3130 | # asm 2: movl <in10=%r8d,104(<out=%rdi) | ||
3131 | movl %r8d,104(%rdi) | ||
3132 | |||
3133 | # qhasm: *(uint32 *) (out + 108) = in11 | ||
3134 | # asm 1: movl <in11=int64#6d,108(<out=int64#1) | ||
3135 | # asm 2: movl <in11=%r9d,108(<out=%rdi) | ||
3136 | movl %r9d,108(%rdi) | ||
3137 | |||
3138 | # qhasm: in8 = z8 | ||
3139 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
3140 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
3141 | movd %xmm15,%rdx | ||
3142 | |||
3143 | # qhasm: in9 = z9 | ||
3144 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
3145 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
3146 | movd %xmm11,%rcx | ||
3147 | |||
3148 | # qhasm: in10 = z10 | ||
3149 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
3150 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
3151 | movd %xmm1,%r8 | ||
3152 | |||
3153 | # qhasm: in11 = z11 | ||
3154 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
3155 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
3156 | movd %xmm6,%r9 | ||
3157 | |||
3158 | # qhasm: z8 <<<= 96 | ||
3159 | # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16 | ||
3160 | # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15 | ||
3161 | pshufd $0x39,%xmm15,%xmm15 | ||
3162 | |||
3163 | # qhasm: z9 <<<= 96 | ||
3164 | # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12 | ||
3165 | # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11 | ||
3166 | pshufd $0x39,%xmm11,%xmm11 | ||
3167 | |||
3168 | # qhasm: z10 <<<= 96 | ||
3169 | # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2 | ||
3170 | # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1 | ||
3171 | pshufd $0x39,%xmm1,%xmm1 | ||
3172 | |||
3173 | # qhasm: z11 <<<= 96 | ||
3174 | # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7 | ||
3175 | # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6 | ||
3176 | pshufd $0x39,%xmm6,%xmm6 | ||
3177 | |||
3178 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 160) | ||
3179 | # asm 1: xorl 160(<m=int64#2),<in8=int64#3d | ||
3180 | # asm 2: xorl 160(<m=%rsi),<in8=%edx | ||
3181 | xorl 160(%rsi),%edx | ||
3182 | |||
3183 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 164) | ||
3184 | # asm 1: xorl 164(<m=int64#2),<in9=int64#4d | ||
3185 | # asm 2: xorl 164(<m=%rsi),<in9=%ecx | ||
3186 | xorl 164(%rsi),%ecx | ||
3187 | |||
3188 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 168) | ||
3189 | # asm 1: xorl 168(<m=int64#2),<in10=int64#5d | ||
3190 | # asm 2: xorl 168(<m=%rsi),<in10=%r8d | ||
3191 | xorl 168(%rsi),%r8d | ||
3192 | |||
3193 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 172) | ||
3194 | # asm 1: xorl 172(<m=int64#2),<in11=int64#6d | ||
3195 | # asm 2: xorl 172(<m=%rsi),<in11=%r9d | ||
3196 | xorl 172(%rsi),%r9d | ||
3197 | |||
3198 | # qhasm: *(uint32 *) (out + 160) = in8 | ||
3199 | # asm 1: movl <in8=int64#3d,160(<out=int64#1) | ||
3200 | # asm 2: movl <in8=%edx,160(<out=%rdi) | ||
3201 | movl %edx,160(%rdi) | ||
3202 | |||
3203 | # qhasm: *(uint32 *) (out + 164) = in9 | ||
3204 | # asm 1: movl <in9=int64#4d,164(<out=int64#1) | ||
3205 | # asm 2: movl <in9=%ecx,164(<out=%rdi) | ||
3206 | movl %ecx,164(%rdi) | ||
3207 | |||
3208 | # qhasm: *(uint32 *) (out + 168) = in10 | ||
3209 | # asm 1: movl <in10=int64#5d,168(<out=int64#1) | ||
3210 | # asm 2: movl <in10=%r8d,168(<out=%rdi) | ||
3211 | movl %r8d,168(%rdi) | ||
3212 | |||
3213 | # qhasm: *(uint32 *) (out + 172) = in11 | ||
3214 | # asm 1: movl <in11=int64#6d,172(<out=int64#1) | ||
3215 | # asm 2: movl <in11=%r9d,172(<out=%rdi) | ||
3216 | movl %r9d,172(%rdi) | ||
3217 | |||
3218 | # qhasm: in8 = z8 | ||
3219 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
3220 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
3221 | movd %xmm15,%rdx | ||
3222 | |||
3223 | # qhasm: in9 = z9 | ||
3224 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
3225 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
3226 | movd %xmm11,%rcx | ||
3227 | |||
3228 | # qhasm: in10 = z10 | ||
3229 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
3230 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
3231 | movd %xmm1,%r8 | ||
3232 | |||
3233 | # qhasm: in11 = z11 | ||
3234 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
3235 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
3236 | movd %xmm6,%r9 | ||
3237 | |||
3238 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 224) | ||
3239 | # asm 1: xorl 224(<m=int64#2),<in8=int64#3d | ||
3240 | # asm 2: xorl 224(<m=%rsi),<in8=%edx | ||
3241 | xorl 224(%rsi),%edx | ||
3242 | |||
3243 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 228) | ||
3244 | # asm 1: xorl 228(<m=int64#2),<in9=int64#4d | ||
3245 | # asm 2: xorl 228(<m=%rsi),<in9=%ecx | ||
3246 | xorl 228(%rsi),%ecx | ||
3247 | |||
3248 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 232) | ||
3249 | # asm 1: xorl 232(<m=int64#2),<in10=int64#5d | ||
3250 | # asm 2: xorl 232(<m=%rsi),<in10=%r8d | ||
3251 | xorl 232(%rsi),%r8d | ||
3252 | |||
3253 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 236) | ||
3254 | # asm 1: xorl 236(<m=int64#2),<in11=int64#6d | ||
3255 | # asm 2: xorl 236(<m=%rsi),<in11=%r9d | ||
3256 | xorl 236(%rsi),%r9d | ||
3257 | |||
3258 | # qhasm: *(uint32 *) (out + 224) = in8 | ||
3259 | # asm 1: movl <in8=int64#3d,224(<out=int64#1) | ||
3260 | # asm 2: movl <in8=%edx,224(<out=%rdi) | ||
3261 | movl %edx,224(%rdi) | ||
3262 | |||
3263 | # qhasm: *(uint32 *) (out + 228) = in9 | ||
3264 | # asm 1: movl <in9=int64#4d,228(<out=int64#1) | ||
3265 | # asm 2: movl <in9=%ecx,228(<out=%rdi) | ||
3266 | movl %ecx,228(%rdi) | ||
3267 | |||
3268 | # qhasm: *(uint32 *) (out + 232) = in10 | ||
3269 | # asm 1: movl <in10=int64#5d,232(<out=int64#1) | ||
3270 | # asm 2: movl <in10=%r8d,232(<out=%rdi) | ||
3271 | movl %r8d,232(%rdi) | ||
3272 | |||
3273 | # qhasm: *(uint32 *) (out + 236) = in11 | ||
3274 | # asm 1: movl <in11=int64#6d,236(<out=int64#1) | ||
3275 | # asm 2: movl <in11=%r9d,236(<out=%rdi) | ||
3276 | movl %r9d,236(%rdi) | ||
3277 | |||
3278 | # qhasm: uint32323232 z12 += orig12 | ||
3279 | # asm 1: paddd <orig12=stack128#11,<z12=int6464#14 | ||
3280 | # asm 2: paddd <orig12=160(%rsp),<z12=%xmm13 | ||
3281 | paddd 160(%rsp),%xmm13 | ||
3282 | |||
3283 | # qhasm: uint32323232 z13 += orig13 | ||
3284 | # asm 1: paddd <orig13=stack128#14,<z13=int6464#10 | ||
3285 | # asm 2: paddd <orig13=208(%rsp),<z13=%xmm9 | ||
3286 | paddd 208(%rsp),%xmm9 | ||
3287 | |||
3288 | # qhasm: uint32323232 z14 += orig14 | ||
3289 | # asm 1: paddd <orig14=stack128#17,<z14=int6464#4 | ||
3290 | # asm 2: paddd <orig14=256(%rsp),<z14=%xmm3 | ||
3291 | paddd 256(%rsp),%xmm3 | ||
3292 | |||
3293 | # qhasm: uint32323232 z15 += orig15 | ||
3294 | # asm 1: paddd <orig15=stack128#7,<z15=int6464#3 | ||
3295 | # asm 2: paddd <orig15=96(%rsp),<z15=%xmm2 | ||
3296 | paddd 96(%rsp),%xmm2 | ||
3297 | |||
3298 | # qhasm: in12 = z12 | ||
3299 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3300 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3301 | movd %xmm13,%rdx | ||
3302 | |||
3303 | # qhasm: in13 = z13 | ||
3304 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3305 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3306 | movd %xmm9,%rcx | ||
3307 | |||
3308 | # qhasm: in14 = z14 | ||
3309 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3310 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3311 | movd %xmm3,%r8 | ||
3312 | |||
3313 | # qhasm: in15 = z15 | ||
3314 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3315 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3316 | movd %xmm2,%r9 | ||
3317 | |||
3318 | # qhasm: z12 <<<= 96 | ||
3319 | # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14 | ||
3320 | # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13 | ||
3321 | pshufd $0x39,%xmm13,%xmm13 | ||
3322 | |||
3323 | # qhasm: z13 <<<= 96 | ||
3324 | # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10 | ||
3325 | # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9 | ||
3326 | pshufd $0x39,%xmm9,%xmm9 | ||
3327 | |||
3328 | # qhasm: z14 <<<= 96 | ||
3329 | # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4 | ||
3330 | # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3 | ||
3331 | pshufd $0x39,%xmm3,%xmm3 | ||
3332 | |||
3333 | # qhasm: z15 <<<= 96 | ||
3334 | # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3 | ||
3335 | # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2 | ||
3336 | pshufd $0x39,%xmm2,%xmm2 | ||
3337 | |||
3338 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 48) | ||
3339 | # asm 1: xorl 48(<m=int64#2),<in12=int64#3d | ||
3340 | # asm 2: xorl 48(<m=%rsi),<in12=%edx | ||
3341 | xorl 48(%rsi),%edx | ||
3342 | |||
3343 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 52) | ||
3344 | # asm 1: xorl 52(<m=int64#2),<in13=int64#4d | ||
3345 | # asm 2: xorl 52(<m=%rsi),<in13=%ecx | ||
3346 | xorl 52(%rsi),%ecx | ||
3347 | |||
3348 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 56) | ||
3349 | # asm 1: xorl 56(<m=int64#2),<in14=int64#5d | ||
3350 | # asm 2: xorl 56(<m=%rsi),<in14=%r8d | ||
3351 | xorl 56(%rsi),%r8d | ||
3352 | |||
3353 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 60) | ||
3354 | # asm 1: xorl 60(<m=int64#2),<in15=int64#6d | ||
3355 | # asm 2: xorl 60(<m=%rsi),<in15=%r9d | ||
3356 | xorl 60(%rsi),%r9d | ||
3357 | |||
3358 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
3359 | # asm 1: movl <in12=int64#3d,48(<out=int64#1) | ||
3360 | # asm 2: movl <in12=%edx,48(<out=%rdi) | ||
3361 | movl %edx,48(%rdi) | ||
3362 | |||
3363 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
3364 | # asm 1: movl <in13=int64#4d,52(<out=int64#1) | ||
3365 | # asm 2: movl <in13=%ecx,52(<out=%rdi) | ||
3366 | movl %ecx,52(%rdi) | ||
3367 | |||
3368 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
3369 | # asm 1: movl <in14=int64#5d,56(<out=int64#1) | ||
3370 | # asm 2: movl <in14=%r8d,56(<out=%rdi) | ||
3371 | movl %r8d,56(%rdi) | ||
3372 | |||
3373 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
3374 | # asm 1: movl <in15=int64#6d,60(<out=int64#1) | ||
3375 | # asm 2: movl <in15=%r9d,60(<out=%rdi) | ||
3376 | movl %r9d,60(%rdi) | ||
3377 | |||
3378 | # qhasm: in12 = z12 | ||
3379 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3380 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3381 | movd %xmm13,%rdx | ||
3382 | |||
3383 | # qhasm: in13 = z13 | ||
3384 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3385 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3386 | movd %xmm9,%rcx | ||
3387 | |||
3388 | # qhasm: in14 = z14 | ||
3389 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3390 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3391 | movd %xmm3,%r8 | ||
3392 | |||
3393 | # qhasm: in15 = z15 | ||
3394 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3395 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3396 | movd %xmm2,%r9 | ||
3397 | |||
3398 | # qhasm: z12 <<<= 96 | ||
3399 | # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14 | ||
3400 | # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13 | ||
3401 | pshufd $0x39,%xmm13,%xmm13 | ||
3402 | |||
3403 | # qhasm: z13 <<<= 96 | ||
3404 | # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10 | ||
3405 | # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9 | ||
3406 | pshufd $0x39,%xmm9,%xmm9 | ||
3407 | |||
3408 | # qhasm: z14 <<<= 96 | ||
3409 | # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4 | ||
3410 | # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3 | ||
3411 | pshufd $0x39,%xmm3,%xmm3 | ||
3412 | |||
3413 | # qhasm: z15 <<<= 96 | ||
3414 | # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3 | ||
3415 | # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2 | ||
3416 | pshufd $0x39,%xmm2,%xmm2 | ||
3417 | |||
3418 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 112) | ||
3419 | # asm 1: xorl 112(<m=int64#2),<in12=int64#3d | ||
3420 | # asm 2: xorl 112(<m=%rsi),<in12=%edx | ||
3421 | xorl 112(%rsi),%edx | ||
3422 | |||
3423 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 116) | ||
3424 | # asm 1: xorl 116(<m=int64#2),<in13=int64#4d | ||
3425 | # asm 2: xorl 116(<m=%rsi),<in13=%ecx | ||
3426 | xorl 116(%rsi),%ecx | ||
3427 | |||
3428 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 120) | ||
3429 | # asm 1: xorl 120(<m=int64#2),<in14=int64#5d | ||
3430 | # asm 2: xorl 120(<m=%rsi),<in14=%r8d | ||
3431 | xorl 120(%rsi),%r8d | ||
3432 | |||
3433 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 124) | ||
3434 | # asm 1: xorl 124(<m=int64#2),<in15=int64#6d | ||
3435 | # asm 2: xorl 124(<m=%rsi),<in15=%r9d | ||
3436 | xorl 124(%rsi),%r9d | ||
3437 | |||
3438 | # qhasm: *(uint32 *) (out + 112) = in12 | ||
3439 | # asm 1: movl <in12=int64#3d,112(<out=int64#1) | ||
3440 | # asm 2: movl <in12=%edx,112(<out=%rdi) | ||
3441 | movl %edx,112(%rdi) | ||
3442 | |||
3443 | # qhasm: *(uint32 *) (out + 116) = in13 | ||
3444 | # asm 1: movl <in13=int64#4d,116(<out=int64#1) | ||
3445 | # asm 2: movl <in13=%ecx,116(<out=%rdi) | ||
3446 | movl %ecx,116(%rdi) | ||
3447 | |||
3448 | # qhasm: *(uint32 *) (out + 120) = in14 | ||
3449 | # asm 1: movl <in14=int64#5d,120(<out=int64#1) | ||
3450 | # asm 2: movl <in14=%r8d,120(<out=%rdi) | ||
3451 | movl %r8d,120(%rdi) | ||
3452 | |||
3453 | # qhasm: *(uint32 *) (out + 124) = in15 | ||
3454 | # asm 1: movl <in15=int64#6d,124(<out=int64#1) | ||
3455 | # asm 2: movl <in15=%r9d,124(<out=%rdi) | ||
3456 | movl %r9d,124(%rdi) | ||
3457 | |||
3458 | # qhasm: in12 = z12 | ||
3459 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3460 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3461 | movd %xmm13,%rdx | ||
3462 | |||
3463 | # qhasm: in13 = z13 | ||
3464 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3465 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3466 | movd %xmm9,%rcx | ||
3467 | |||
3468 | # qhasm: in14 = z14 | ||
3469 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3470 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3471 | movd %xmm3,%r8 | ||
3472 | |||
3473 | # qhasm: in15 = z15 | ||
3474 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3475 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3476 | movd %xmm2,%r9 | ||
3477 | |||
3478 | # qhasm: z12 <<<= 96 | ||
3479 | # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14 | ||
3480 | # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13 | ||
3481 | pshufd $0x39,%xmm13,%xmm13 | ||
3482 | |||
3483 | # qhasm: z13 <<<= 96 | ||
3484 | # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10 | ||
3485 | # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9 | ||
3486 | pshufd $0x39,%xmm9,%xmm9 | ||
3487 | |||
3488 | # qhasm: z14 <<<= 96 | ||
3489 | # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4 | ||
3490 | # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3 | ||
3491 | pshufd $0x39,%xmm3,%xmm3 | ||
3492 | |||
3493 | # qhasm: z15 <<<= 96 | ||
3494 | # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3 | ||
3495 | # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2 | ||
3496 | pshufd $0x39,%xmm2,%xmm2 | ||
3497 | |||
3498 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 176) | ||
3499 | # asm 1: xorl 176(<m=int64#2),<in12=int64#3d | ||
3500 | # asm 2: xorl 176(<m=%rsi),<in12=%edx | ||
3501 | xorl 176(%rsi),%edx | ||
3502 | |||
3503 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 180) | ||
3504 | # asm 1: xorl 180(<m=int64#2),<in13=int64#4d | ||
3505 | # asm 2: xorl 180(<m=%rsi),<in13=%ecx | ||
3506 | xorl 180(%rsi),%ecx | ||
3507 | |||
3508 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 184) | ||
3509 | # asm 1: xorl 184(<m=int64#2),<in14=int64#5d | ||
3510 | # asm 2: xorl 184(<m=%rsi),<in14=%r8d | ||
3511 | xorl 184(%rsi),%r8d | ||
3512 | |||
3513 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 188) | ||
3514 | # asm 1: xorl 188(<m=int64#2),<in15=int64#6d | ||
3515 | # asm 2: xorl 188(<m=%rsi),<in15=%r9d | ||
3516 | xorl 188(%rsi),%r9d | ||
3517 | |||
3518 | # qhasm: *(uint32 *) (out + 176) = in12 | ||
3519 | # asm 1: movl <in12=int64#3d,176(<out=int64#1) | ||
3520 | # asm 2: movl <in12=%edx,176(<out=%rdi) | ||
3521 | movl %edx,176(%rdi) | ||
3522 | |||
3523 | # qhasm: *(uint32 *) (out + 180) = in13 | ||
3524 | # asm 1: movl <in13=int64#4d,180(<out=int64#1) | ||
3525 | # asm 2: movl <in13=%ecx,180(<out=%rdi) | ||
3526 | movl %ecx,180(%rdi) | ||
3527 | |||
3528 | # qhasm: *(uint32 *) (out + 184) = in14 | ||
3529 | # asm 1: movl <in14=int64#5d,184(<out=int64#1) | ||
3530 | # asm 2: movl <in14=%r8d,184(<out=%rdi) | ||
3531 | movl %r8d,184(%rdi) | ||
3532 | |||
3533 | # qhasm: *(uint32 *) (out + 188) = in15 | ||
3534 | # asm 1: movl <in15=int64#6d,188(<out=int64#1) | ||
3535 | # asm 2: movl <in15=%r9d,188(<out=%rdi) | ||
3536 | movl %r9d,188(%rdi) | ||
3537 | |||
3538 | # qhasm: in12 = z12 | ||
3539 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3540 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3541 | movd %xmm13,%rdx | ||
3542 | |||
3543 | # qhasm: in13 = z13 | ||
3544 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3545 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3546 | movd %xmm9,%rcx | ||
3547 | |||
3548 | # qhasm: in14 = z14 | ||
3549 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3550 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3551 | movd %xmm3,%r8 | ||
3552 | |||
3553 | # qhasm: in15 = z15 | ||
3554 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3555 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3556 | movd %xmm2,%r9 | ||
3557 | |||
3558 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 240) | ||
3559 | # asm 1: xorl 240(<m=int64#2),<in12=int64#3d | ||
3560 | # asm 2: xorl 240(<m=%rsi),<in12=%edx | ||
3561 | xorl 240(%rsi),%edx | ||
3562 | |||
3563 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 244) | ||
3564 | # asm 1: xorl 244(<m=int64#2),<in13=int64#4d | ||
3565 | # asm 2: xorl 244(<m=%rsi),<in13=%ecx | ||
3566 | xorl 244(%rsi),%ecx | ||
3567 | |||
3568 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 248) | ||
3569 | # asm 1: xorl 248(<m=int64#2),<in14=int64#5d | ||
3570 | # asm 2: xorl 248(<m=%rsi),<in14=%r8d | ||
3571 | xorl 248(%rsi),%r8d | ||
3572 | |||
3573 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 252) | ||
3574 | # asm 1: xorl 252(<m=int64#2),<in15=int64#6d | ||
3575 | # asm 2: xorl 252(<m=%rsi),<in15=%r9d | ||
3576 | xorl 252(%rsi),%r9d | ||
3577 | |||
3578 | # qhasm: *(uint32 *) (out + 240) = in12 | ||
3579 | # asm 1: movl <in12=int64#3d,240(<out=int64#1) | ||
3580 | # asm 2: movl <in12=%edx,240(<out=%rdi) | ||
3581 | movl %edx,240(%rdi) | ||
3582 | |||
3583 | # qhasm: *(uint32 *) (out + 244) = in13 | ||
3584 | # asm 1: movl <in13=int64#4d,244(<out=int64#1) | ||
3585 | # asm 2: movl <in13=%ecx,244(<out=%rdi) | ||
3586 | movl %ecx,244(%rdi) | ||
3587 | |||
3588 | # qhasm: *(uint32 *) (out + 248) = in14 | ||
3589 | # asm 1: movl <in14=int64#5d,248(<out=int64#1) | ||
3590 | # asm 2: movl <in14=%r8d,248(<out=%rdi) | ||
3591 | movl %r8d,248(%rdi) | ||
3592 | |||
3593 | # qhasm: *(uint32 *) (out + 252) = in15 | ||
3594 | # asm 1: movl <in15=int64#6d,252(<out=int64#1) | ||
3595 | # asm 2: movl <in15=%r9d,252(<out=%rdi) | ||
3596 | movl %r9d,252(%rdi) | ||
3597 | |||
3598 | # qhasm: bytes = bytes_backup | ||
3599 | # asm 1: movq <bytes_backup=stack64#8,>bytes=int64#6 | ||
3600 | # asm 2: movq <bytes_backup=408(%rsp),>bytes=%r9 | ||
3601 | movq 408(%rsp),%r9 | ||
3602 | |||
3603 | # qhasm: bytes -= 256 | ||
3604 | # asm 1: sub $256,<bytes=int64#6 | ||
3605 | # asm 2: sub $256,<bytes=%r9 | ||
3606 | sub $256,%r9 | ||
3607 | |||
3608 | # qhasm: m += 256 | ||
3609 | # asm 1: add $256,<m=int64#2 | ||
3610 | # asm 2: add $256,<m=%rsi | ||
3611 | add $256,%rsi | ||
3612 | |||
3613 | # qhasm: out += 256 | ||
3614 | # asm 1: add $256,<out=int64#1 | ||
3615 | # asm 2: add $256,<out=%rdi | ||
3616 | add $256,%rdi | ||
3617 | |||
3618 | # qhasm: unsigned<? bytes - 256 | ||
3619 | # asm 1: cmp $256,<bytes=int64#6 | ||
3620 | # asm 2: cmp $256,<bytes=%r9 | ||
3621 | cmp $256,%r9 | ||
3622 | # comment:fp stack unchanged by jump | ||
3623 | |||
3624 | # qhasm: goto bytesatleast256 if !unsigned< | ||
3625 | jae ._bytesatleast256 | ||
3626 | |||
3627 | # qhasm: unsigned>? bytes - 0 | ||
3628 | # asm 1: cmp $0,<bytes=int64#6 | ||
3629 | # asm 2: cmp $0,<bytes=%r9 | ||
3630 | cmp $0,%r9 | ||
3631 | # comment:fp stack unchanged by jump | ||
3632 | |||
3633 | # qhasm: goto done if !unsigned> | ||
3634 | jbe ._done | ||
3635 | # comment:fp stack unchanged by fallthrough | ||
3636 | |||
3637 | # qhasm: bytesbetween1and255: | ||
3638 | ._bytesbetween1and255: | ||
3639 | |||
3640 | # qhasm: unsigned<? bytes - 64 | ||
3641 | # asm 1: cmp $64,<bytes=int64#6 | ||
3642 | # asm 2: cmp $64,<bytes=%r9 | ||
3643 | cmp $64,%r9 | ||
3644 | # comment:fp stack unchanged by jump | ||
3645 | |||
3646 | # qhasm: goto nocopy if !unsigned< | ||
3647 | jae ._nocopy | ||
3648 | |||
3649 | # qhasm: ctarget = out | ||
3650 | # asm 1: mov <out=int64#1,>ctarget=int64#3 | ||
3651 | # asm 2: mov <out=%rdi,>ctarget=%rdx | ||
3652 | mov %rdi,%rdx | ||
3653 | |||
3654 | # qhasm: out = &tmp | ||
3655 | # asm 1: leaq <tmp=stack512#1,>out=int64#1 | ||
3656 | # asm 2: leaq <tmp=416(%rsp),>out=%rdi | ||
3657 | leaq 416(%rsp),%rdi | ||
3658 | |||
3659 | # qhasm: i = bytes | ||
3660 | # asm 1: mov <bytes=int64#6,>i=int64#4 | ||
3661 | # asm 2: mov <bytes=%r9,>i=%rcx | ||
3662 | mov %r9,%rcx | ||
3663 | |||
3664 | # qhasm: while (i) { *out++ = *m++; --i } | ||
3665 | rep movsb | ||
3666 | |||
3667 | # qhasm: out = &tmp | ||
3668 | # asm 1: leaq <tmp=stack512#1,>out=int64#1 | ||
3669 | # asm 2: leaq <tmp=416(%rsp),>out=%rdi | ||
3670 | leaq 416(%rsp),%rdi | ||
3671 | |||
3672 | # qhasm: m = &tmp | ||
3673 | # asm 1: leaq <tmp=stack512#1,>m=int64#2 | ||
3674 | # asm 2: leaq <tmp=416(%rsp),>m=%rsi | ||
3675 | leaq 416(%rsp),%rsi | ||
3676 | # comment:fp stack unchanged by fallthrough | ||
3677 | |||
3678 | # qhasm: nocopy: | ||
3679 | ._nocopy: | ||
3680 | |||
3681 | # qhasm: bytes_backup = bytes | ||
3682 | # asm 1: movq <bytes=int64#6,>bytes_backup=stack64#8 | ||
3683 | # asm 2: movq <bytes=%r9,>bytes_backup=408(%rsp) | ||
3684 | movq %r9,408(%rsp) | ||
3685 | |||
3686 | # qhasm: diag0 = x0 | ||
3687 | # asm 1: movdqa <x0=stack128#4,>diag0=int6464#1 | ||
3688 | # asm 2: movdqa <x0=48(%rsp),>diag0=%xmm0 | ||
3689 | movdqa 48(%rsp),%xmm0 | ||
3690 | |||
3691 | # qhasm: diag1 = x1 | ||
3692 | # asm 1: movdqa <x1=stack128#1,>diag1=int6464#2 | ||
3693 | # asm 2: movdqa <x1=0(%rsp),>diag1=%xmm1 | ||
3694 | movdqa 0(%rsp),%xmm1 | ||
3695 | |||
3696 | # qhasm: diag2 = x2 | ||
3697 | # asm 1: movdqa <x2=stack128#2,>diag2=int6464#3 | ||
3698 | # asm 2: movdqa <x2=16(%rsp),>diag2=%xmm2 | ||
3699 | movdqa 16(%rsp),%xmm2 | ||
3700 | |||
3701 | # qhasm: diag3 = x3 | ||
3702 | # asm 1: movdqa <x3=stack128#3,>diag3=int6464#4 | ||
3703 | # asm 2: movdqa <x3=32(%rsp),>diag3=%xmm3 | ||
3704 | movdqa 32(%rsp),%xmm3 | ||
3705 | |||
3706 | # qhasm: a0 = diag1 | ||
3707 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
3708 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
3709 | movdqa %xmm1,%xmm4 | ||
3710 | |||
3711 | # qhasm: i = 20 | ||
3712 | # asm 1: mov $20,>i=int64#4 | ||
3713 | # asm 2: mov $20,>i=%rcx | ||
3714 | mov $20,%rcx | ||
3715 | |||
3716 | # qhasm: mainloop2: | ||
3717 | ._mainloop2: | ||
3718 | |||
3719 | # qhasm: uint32323232 a0 += diag0 | ||
3720 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
3721 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
3722 | paddd %xmm0,%xmm4 | ||
3723 | |||
3724 | # qhasm: a1 = diag0 | ||
3725 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
3726 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
3727 | movdqa %xmm0,%xmm5 | ||
3728 | |||
3729 | # qhasm: b0 = a0 | ||
3730 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
3731 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
3732 | movdqa %xmm4,%xmm6 | ||
3733 | |||
3734 | # qhasm: uint32323232 a0 <<= 7 | ||
3735 | # asm 1: pslld $7,<a0=int6464#5 | ||
3736 | # asm 2: pslld $7,<a0=%xmm4 | ||
3737 | pslld $7,%xmm4 | ||
3738 | |||
3739 | # qhasm: uint32323232 b0 >>= 25 | ||
3740 | # asm 1: psrld $25,<b0=int6464#7 | ||
3741 | # asm 2: psrld $25,<b0=%xmm6 | ||
3742 | psrld $25,%xmm6 | ||
3743 | |||
3744 | # qhasm: diag3 ^= a0 | ||
3745 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
3746 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
3747 | pxor %xmm4,%xmm3 | ||
3748 | |||
3749 | # qhasm: diag3 ^= b0 | ||
3750 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
3751 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
3752 | pxor %xmm6,%xmm3 | ||
3753 | |||
3754 | # qhasm: uint32323232 a1 += diag3 | ||
3755 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
3756 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
3757 | paddd %xmm3,%xmm5 | ||
3758 | |||
3759 | # qhasm: a2 = diag3 | ||
3760 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
3761 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
3762 | movdqa %xmm3,%xmm4 | ||
3763 | |||
3764 | # qhasm: b1 = a1 | ||
3765 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
3766 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
3767 | movdqa %xmm5,%xmm6 | ||
3768 | |||
3769 | # qhasm: uint32323232 a1 <<= 9 | ||
3770 | # asm 1: pslld $9,<a1=int6464#6 | ||
3771 | # asm 2: pslld $9,<a1=%xmm5 | ||
3772 | pslld $9,%xmm5 | ||
3773 | |||
3774 | # qhasm: uint32323232 b1 >>= 23 | ||
3775 | # asm 1: psrld $23,<b1=int6464#7 | ||
3776 | # asm 2: psrld $23,<b1=%xmm6 | ||
3777 | psrld $23,%xmm6 | ||
3778 | |||
3779 | # qhasm: diag2 ^= a1 | ||
3780 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
3781 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
3782 | pxor %xmm5,%xmm2 | ||
3783 | |||
3784 | # qhasm: diag3 <<<= 32 | ||
3785 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
3786 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
3787 | pshufd $0x93,%xmm3,%xmm3 | ||
3788 | |||
3789 | # qhasm: diag2 ^= b1 | ||
3790 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
3791 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
3792 | pxor %xmm6,%xmm2 | ||
3793 | |||
3794 | # qhasm: uint32323232 a2 += diag2 | ||
3795 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
3796 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
3797 | paddd %xmm2,%xmm4 | ||
3798 | |||
3799 | # qhasm: a3 = diag2 | ||
3800 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
3801 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
3802 | movdqa %xmm2,%xmm5 | ||
3803 | |||
3804 | # qhasm: b2 = a2 | ||
3805 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
3806 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
3807 | movdqa %xmm4,%xmm6 | ||
3808 | |||
3809 | # qhasm: uint32323232 a2 <<= 13 | ||
3810 | # asm 1: pslld $13,<a2=int6464#5 | ||
3811 | # asm 2: pslld $13,<a2=%xmm4 | ||
3812 | pslld $13,%xmm4 | ||
3813 | |||
3814 | # qhasm: uint32323232 b2 >>= 19 | ||
3815 | # asm 1: psrld $19,<b2=int6464#7 | ||
3816 | # asm 2: psrld $19,<b2=%xmm6 | ||
3817 | psrld $19,%xmm6 | ||
3818 | |||
3819 | # qhasm: diag1 ^= a2 | ||
3820 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
3821 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
3822 | pxor %xmm4,%xmm1 | ||
3823 | |||
3824 | # qhasm: diag2 <<<= 64 | ||
3825 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
3826 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
3827 | pshufd $0x4e,%xmm2,%xmm2 | ||
3828 | |||
3829 | # qhasm: diag1 ^= b2 | ||
3830 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
3831 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
3832 | pxor %xmm6,%xmm1 | ||
3833 | |||
3834 | # qhasm: uint32323232 a3 += diag1 | ||
3835 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
3836 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
3837 | paddd %xmm1,%xmm5 | ||
3838 | |||
3839 | # qhasm: a4 = diag3 | ||
3840 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
3841 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
3842 | movdqa %xmm3,%xmm4 | ||
3843 | |||
3844 | # qhasm: b3 = a3 | ||
3845 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
3846 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
3847 | movdqa %xmm5,%xmm6 | ||
3848 | |||
3849 | # qhasm: uint32323232 a3 <<= 18 | ||
3850 | # asm 1: pslld $18,<a3=int6464#6 | ||
3851 | # asm 2: pslld $18,<a3=%xmm5 | ||
3852 | pslld $18,%xmm5 | ||
3853 | |||
3854 | # qhasm: uint32323232 b3 >>= 14 | ||
3855 | # asm 1: psrld $14,<b3=int6464#7 | ||
3856 | # asm 2: psrld $14,<b3=%xmm6 | ||
3857 | psrld $14,%xmm6 | ||
3858 | |||
3859 | # qhasm: diag0 ^= a3 | ||
3860 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
3861 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
3862 | pxor %xmm5,%xmm0 | ||
3863 | |||
3864 | # qhasm: diag1 <<<= 96 | ||
3865 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
3866 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
3867 | pshufd $0x39,%xmm1,%xmm1 | ||
3868 | |||
3869 | # qhasm: diag0 ^= b3 | ||
3870 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
3871 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
3872 | pxor %xmm6,%xmm0 | ||
3873 | |||
3874 | # qhasm: uint32323232 a4 += diag0 | ||
3875 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
3876 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
3877 | paddd %xmm0,%xmm4 | ||
3878 | |||
3879 | # qhasm: a5 = diag0 | ||
3880 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
3881 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
3882 | movdqa %xmm0,%xmm5 | ||
3883 | |||
3884 | # qhasm: b4 = a4 | ||
3885 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
3886 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
3887 | movdqa %xmm4,%xmm6 | ||
3888 | |||
3889 | # qhasm: uint32323232 a4 <<= 7 | ||
3890 | # asm 1: pslld $7,<a4=int6464#5 | ||
3891 | # asm 2: pslld $7,<a4=%xmm4 | ||
3892 | pslld $7,%xmm4 | ||
3893 | |||
3894 | # qhasm: uint32323232 b4 >>= 25 | ||
3895 | # asm 1: psrld $25,<b4=int6464#7 | ||
3896 | # asm 2: psrld $25,<b4=%xmm6 | ||
3897 | psrld $25,%xmm6 | ||
3898 | |||
3899 | # qhasm: diag1 ^= a4 | ||
3900 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
3901 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
3902 | pxor %xmm4,%xmm1 | ||
3903 | |||
3904 | # qhasm: diag1 ^= b4 | ||
3905 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
3906 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
3907 | pxor %xmm6,%xmm1 | ||
3908 | |||
3909 | # qhasm: uint32323232 a5 += diag1 | ||
3910 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
3911 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
3912 | paddd %xmm1,%xmm5 | ||
3913 | |||
3914 | # qhasm: a6 = diag1 | ||
3915 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
3916 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
3917 | movdqa %xmm1,%xmm4 | ||
3918 | |||
3919 | # qhasm: b5 = a5 | ||
3920 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
3921 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
3922 | movdqa %xmm5,%xmm6 | ||
3923 | |||
3924 | # qhasm: uint32323232 a5 <<= 9 | ||
3925 | # asm 1: pslld $9,<a5=int6464#6 | ||
3926 | # asm 2: pslld $9,<a5=%xmm5 | ||
3927 | pslld $9,%xmm5 | ||
3928 | |||
3929 | # qhasm: uint32323232 b5 >>= 23 | ||
3930 | # asm 1: psrld $23,<b5=int6464#7 | ||
3931 | # asm 2: psrld $23,<b5=%xmm6 | ||
3932 | psrld $23,%xmm6 | ||
3933 | |||
3934 | # qhasm: diag2 ^= a5 | ||
3935 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
3936 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
3937 | pxor %xmm5,%xmm2 | ||
3938 | |||
3939 | # qhasm: diag1 <<<= 32 | ||
3940 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
3941 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
3942 | pshufd $0x93,%xmm1,%xmm1 | ||
3943 | |||
3944 | # qhasm: diag2 ^= b5 | ||
3945 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
3946 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
3947 | pxor %xmm6,%xmm2 | ||
3948 | |||
3949 | # qhasm: uint32323232 a6 += diag2 | ||
3950 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
3951 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
3952 | paddd %xmm2,%xmm4 | ||
3953 | |||
3954 | # qhasm: a7 = diag2 | ||
3955 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
3956 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
3957 | movdqa %xmm2,%xmm5 | ||
3958 | |||
3959 | # qhasm: b6 = a6 | ||
3960 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
3961 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
3962 | movdqa %xmm4,%xmm6 | ||
3963 | |||
3964 | # qhasm: uint32323232 a6 <<= 13 | ||
3965 | # asm 1: pslld $13,<a6=int6464#5 | ||
3966 | # asm 2: pslld $13,<a6=%xmm4 | ||
3967 | pslld $13,%xmm4 | ||
3968 | |||
3969 | # qhasm: uint32323232 b6 >>= 19 | ||
3970 | # asm 1: psrld $19,<b6=int6464#7 | ||
3971 | # asm 2: psrld $19,<b6=%xmm6 | ||
3972 | psrld $19,%xmm6 | ||
3973 | |||
3974 | # qhasm: diag3 ^= a6 | ||
3975 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
3976 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
3977 | pxor %xmm4,%xmm3 | ||
3978 | |||
3979 | # qhasm: diag2 <<<= 64 | ||
3980 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
3981 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
3982 | pshufd $0x4e,%xmm2,%xmm2 | ||
3983 | |||
3984 | # qhasm: diag3 ^= b6 | ||
3985 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
3986 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
3987 | pxor %xmm6,%xmm3 | ||
3988 | |||
3989 | # qhasm: uint32323232 a7 += diag3 | ||
3990 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
3991 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
3992 | paddd %xmm3,%xmm5 | ||
3993 | |||
3994 | # qhasm: a0 = diag1 | ||
3995 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
3996 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
3997 | movdqa %xmm1,%xmm4 | ||
3998 | |||
3999 | # qhasm: b7 = a7 | ||
4000 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4001 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4002 | movdqa %xmm5,%xmm6 | ||
4003 | |||
4004 | # qhasm: uint32323232 a7 <<= 18 | ||
4005 | # asm 1: pslld $18,<a7=int6464#6 | ||
4006 | # asm 2: pslld $18,<a7=%xmm5 | ||
4007 | pslld $18,%xmm5 | ||
4008 | |||
4009 | # qhasm: uint32323232 b7 >>= 14 | ||
4010 | # asm 1: psrld $14,<b7=int6464#7 | ||
4011 | # asm 2: psrld $14,<b7=%xmm6 | ||
4012 | psrld $14,%xmm6 | ||
4013 | |||
4014 | # qhasm: diag0 ^= a7 | ||
4015 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4016 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4017 | pxor %xmm5,%xmm0 | ||
4018 | |||
4019 | # qhasm: diag3 <<<= 96 | ||
4020 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4021 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4022 | pshufd $0x39,%xmm3,%xmm3 | ||
4023 | |||
4024 | # qhasm: diag0 ^= b7 | ||
4025 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4026 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4027 | pxor %xmm6,%xmm0 | ||
4028 | |||
4029 | # qhasm: uint32323232 a0 += diag0 | ||
4030 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
4031 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
4032 | paddd %xmm0,%xmm4 | ||
4033 | |||
4034 | # qhasm: a1 = diag0 | ||
4035 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
4036 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
4037 | movdqa %xmm0,%xmm5 | ||
4038 | |||
4039 | # qhasm: b0 = a0 | ||
4040 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
4041 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
4042 | movdqa %xmm4,%xmm6 | ||
4043 | |||
4044 | # qhasm: uint32323232 a0 <<= 7 | ||
4045 | # asm 1: pslld $7,<a0=int6464#5 | ||
4046 | # asm 2: pslld $7,<a0=%xmm4 | ||
4047 | pslld $7,%xmm4 | ||
4048 | |||
4049 | # qhasm: uint32323232 b0 >>= 25 | ||
4050 | # asm 1: psrld $25,<b0=int6464#7 | ||
4051 | # asm 2: psrld $25,<b0=%xmm6 | ||
4052 | psrld $25,%xmm6 | ||
4053 | |||
4054 | # qhasm: diag3 ^= a0 | ||
4055 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
4056 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
4057 | pxor %xmm4,%xmm3 | ||
4058 | |||
4059 | # qhasm: diag3 ^= b0 | ||
4060 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
4061 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
4062 | pxor %xmm6,%xmm3 | ||
4063 | |||
4064 | # qhasm: uint32323232 a1 += diag3 | ||
4065 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
4066 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
4067 | paddd %xmm3,%xmm5 | ||
4068 | |||
4069 | # qhasm: a2 = diag3 | ||
4070 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
4071 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
4072 | movdqa %xmm3,%xmm4 | ||
4073 | |||
4074 | # qhasm: b1 = a1 | ||
4075 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
4076 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
4077 | movdqa %xmm5,%xmm6 | ||
4078 | |||
4079 | # qhasm: uint32323232 a1 <<= 9 | ||
4080 | # asm 1: pslld $9,<a1=int6464#6 | ||
4081 | # asm 2: pslld $9,<a1=%xmm5 | ||
4082 | pslld $9,%xmm5 | ||
4083 | |||
4084 | # qhasm: uint32323232 b1 >>= 23 | ||
4085 | # asm 1: psrld $23,<b1=int6464#7 | ||
4086 | # asm 2: psrld $23,<b1=%xmm6 | ||
4087 | psrld $23,%xmm6 | ||
4088 | |||
4089 | # qhasm: diag2 ^= a1 | ||
4090 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
4091 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
4092 | pxor %xmm5,%xmm2 | ||
4093 | |||
4094 | # qhasm: diag3 <<<= 32 | ||
4095 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
4096 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
4097 | pshufd $0x93,%xmm3,%xmm3 | ||
4098 | |||
4099 | # qhasm: diag2 ^= b1 | ||
4100 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
4101 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
4102 | pxor %xmm6,%xmm2 | ||
4103 | |||
4104 | # qhasm: uint32323232 a2 += diag2 | ||
4105 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
4106 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
4107 | paddd %xmm2,%xmm4 | ||
4108 | |||
4109 | # qhasm: a3 = diag2 | ||
4110 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
4111 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
4112 | movdqa %xmm2,%xmm5 | ||
4113 | |||
4114 | # qhasm: b2 = a2 | ||
4115 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
4116 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
4117 | movdqa %xmm4,%xmm6 | ||
4118 | |||
4119 | # qhasm: uint32323232 a2 <<= 13 | ||
4120 | # asm 1: pslld $13,<a2=int6464#5 | ||
4121 | # asm 2: pslld $13,<a2=%xmm4 | ||
4122 | pslld $13,%xmm4 | ||
4123 | |||
4124 | # qhasm: uint32323232 b2 >>= 19 | ||
4125 | # asm 1: psrld $19,<b2=int6464#7 | ||
4126 | # asm 2: psrld $19,<b2=%xmm6 | ||
4127 | psrld $19,%xmm6 | ||
4128 | |||
4129 | # qhasm: diag1 ^= a2 | ||
4130 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
4131 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
4132 | pxor %xmm4,%xmm1 | ||
4133 | |||
4134 | # qhasm: diag2 <<<= 64 | ||
4135 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4136 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4137 | pshufd $0x4e,%xmm2,%xmm2 | ||
4138 | |||
4139 | # qhasm: diag1 ^= b2 | ||
4140 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
4141 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
4142 | pxor %xmm6,%xmm1 | ||
4143 | |||
4144 | # qhasm: uint32323232 a3 += diag1 | ||
4145 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
4146 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
4147 | paddd %xmm1,%xmm5 | ||
4148 | |||
4149 | # qhasm: a4 = diag3 | ||
4150 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
4151 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
4152 | movdqa %xmm3,%xmm4 | ||
4153 | |||
4154 | # qhasm: b3 = a3 | ||
4155 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
4156 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
4157 | movdqa %xmm5,%xmm6 | ||
4158 | |||
4159 | # qhasm: uint32323232 a3 <<= 18 | ||
4160 | # asm 1: pslld $18,<a3=int6464#6 | ||
4161 | # asm 2: pslld $18,<a3=%xmm5 | ||
4162 | pslld $18,%xmm5 | ||
4163 | |||
4164 | # qhasm: uint32323232 b3 >>= 14 | ||
4165 | # asm 1: psrld $14,<b3=int6464#7 | ||
4166 | # asm 2: psrld $14,<b3=%xmm6 | ||
4167 | psrld $14,%xmm6 | ||
4168 | |||
4169 | # qhasm: diag0 ^= a3 | ||
4170 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
4171 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
4172 | pxor %xmm5,%xmm0 | ||
4173 | |||
4174 | # qhasm: diag1 <<<= 96 | ||
4175 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4176 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4177 | pshufd $0x39,%xmm1,%xmm1 | ||
4178 | |||
4179 | # qhasm: diag0 ^= b3 | ||
4180 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
4181 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
4182 | pxor %xmm6,%xmm0 | ||
4183 | |||
4184 | # qhasm: uint32323232 a4 += diag0 | ||
4185 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
4186 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
4187 | paddd %xmm0,%xmm4 | ||
4188 | |||
4189 | # qhasm: a5 = diag0 | ||
4190 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
4191 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
4192 | movdqa %xmm0,%xmm5 | ||
4193 | |||
4194 | # qhasm: b4 = a4 | ||
4195 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
4196 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
4197 | movdqa %xmm4,%xmm6 | ||
4198 | |||
4199 | # qhasm: uint32323232 a4 <<= 7 | ||
4200 | # asm 1: pslld $7,<a4=int6464#5 | ||
4201 | # asm 2: pslld $7,<a4=%xmm4 | ||
4202 | pslld $7,%xmm4 | ||
4203 | |||
4204 | # qhasm: uint32323232 b4 >>= 25 | ||
4205 | # asm 1: psrld $25,<b4=int6464#7 | ||
4206 | # asm 2: psrld $25,<b4=%xmm6 | ||
4207 | psrld $25,%xmm6 | ||
4208 | |||
4209 | # qhasm: diag1 ^= a4 | ||
4210 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
4211 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
4212 | pxor %xmm4,%xmm1 | ||
4213 | |||
4214 | # qhasm: diag1 ^= b4 | ||
4215 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
4216 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
4217 | pxor %xmm6,%xmm1 | ||
4218 | |||
4219 | # qhasm: uint32323232 a5 += diag1 | ||
4220 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
4221 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
4222 | paddd %xmm1,%xmm5 | ||
4223 | |||
4224 | # qhasm: a6 = diag1 | ||
4225 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
4226 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
4227 | movdqa %xmm1,%xmm4 | ||
4228 | |||
4229 | # qhasm: b5 = a5 | ||
4230 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
4231 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
4232 | movdqa %xmm5,%xmm6 | ||
4233 | |||
4234 | # qhasm: uint32323232 a5 <<= 9 | ||
4235 | # asm 1: pslld $9,<a5=int6464#6 | ||
4236 | # asm 2: pslld $9,<a5=%xmm5 | ||
4237 | pslld $9,%xmm5 | ||
4238 | |||
4239 | # qhasm: uint32323232 b5 >>= 23 | ||
4240 | # asm 1: psrld $23,<b5=int6464#7 | ||
4241 | # asm 2: psrld $23,<b5=%xmm6 | ||
4242 | psrld $23,%xmm6 | ||
4243 | |||
4244 | # qhasm: diag2 ^= a5 | ||
4245 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
4246 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
4247 | pxor %xmm5,%xmm2 | ||
4248 | |||
4249 | # qhasm: diag1 <<<= 32 | ||
4250 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
4251 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
4252 | pshufd $0x93,%xmm1,%xmm1 | ||
4253 | |||
4254 | # qhasm: diag2 ^= b5 | ||
4255 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
4256 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
4257 | pxor %xmm6,%xmm2 | ||
4258 | |||
4259 | # qhasm: uint32323232 a6 += diag2 | ||
4260 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
4261 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
4262 | paddd %xmm2,%xmm4 | ||
4263 | |||
4264 | # qhasm: a7 = diag2 | ||
4265 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
4266 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
4267 | movdqa %xmm2,%xmm5 | ||
4268 | |||
4269 | # qhasm: b6 = a6 | ||
4270 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
4271 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
4272 | movdqa %xmm4,%xmm6 | ||
4273 | |||
4274 | # qhasm: uint32323232 a6 <<= 13 | ||
4275 | # asm 1: pslld $13,<a6=int6464#5 | ||
4276 | # asm 2: pslld $13,<a6=%xmm4 | ||
4277 | pslld $13,%xmm4 | ||
4278 | |||
4279 | # qhasm: uint32323232 b6 >>= 19 | ||
4280 | # asm 1: psrld $19,<b6=int6464#7 | ||
4281 | # asm 2: psrld $19,<b6=%xmm6 | ||
4282 | psrld $19,%xmm6 | ||
4283 | |||
4284 | # qhasm: diag3 ^= a6 | ||
4285 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
4286 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
4287 | pxor %xmm4,%xmm3 | ||
4288 | |||
4289 | # qhasm: diag2 <<<= 64 | ||
4290 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4291 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4292 | pshufd $0x4e,%xmm2,%xmm2 | ||
4293 | |||
4294 | # qhasm: diag3 ^= b6 | ||
4295 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
4296 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
4297 | pxor %xmm6,%xmm3 | ||
4298 | |||
4299 | # qhasm: unsigned>? i -= 4 | ||
4300 | # asm 1: sub $4,<i=int64#4 | ||
4301 | # asm 2: sub $4,<i=%rcx | ||
4302 | sub $4,%rcx | ||
4303 | |||
4304 | # qhasm: uint32323232 a7 += diag3 | ||
4305 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
4306 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
4307 | paddd %xmm3,%xmm5 | ||
4308 | |||
4309 | # qhasm: a0 = diag1 | ||
4310 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
4311 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
4312 | movdqa %xmm1,%xmm4 | ||
4313 | |||
4314 | # qhasm: b7 = a7 | ||
4315 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4316 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4317 | movdqa %xmm5,%xmm6 | ||
4318 | |||
4319 | # qhasm: uint32323232 a7 <<= 18 | ||
4320 | # asm 1: pslld $18,<a7=int6464#6 | ||
4321 | # asm 2: pslld $18,<a7=%xmm5 | ||
4322 | pslld $18,%xmm5 | ||
4323 | |||
4324 | # qhasm: b0 = 0 | ||
4325 | # asm 1: pxor >b0=int6464#8,>b0=int6464#8 | ||
4326 | # asm 2: pxor >b0=%xmm7,>b0=%xmm7 | ||
4327 | pxor %xmm7,%xmm7 | ||
4328 | |||
4329 | # qhasm: uint32323232 b7 >>= 14 | ||
4330 | # asm 1: psrld $14,<b7=int6464#7 | ||
4331 | # asm 2: psrld $14,<b7=%xmm6 | ||
4332 | psrld $14,%xmm6 | ||
4333 | |||
4334 | # qhasm: diag0 ^= a7 | ||
4335 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4336 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4337 | pxor %xmm5,%xmm0 | ||
4338 | |||
4339 | # qhasm: diag3 <<<= 96 | ||
4340 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4341 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4342 | pshufd $0x39,%xmm3,%xmm3 | ||
4343 | |||
4344 | # qhasm: diag0 ^= b7 | ||
4345 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4346 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4347 | pxor %xmm6,%xmm0 | ||
4348 | # comment:fp stack unchanged by jump | ||
4349 | |||
4350 | # qhasm: goto mainloop2 if unsigned> | ||
4351 | ja ._mainloop2 | ||
4352 | |||
4353 | # qhasm: uint32323232 diag0 += x0 | ||
4354 | # asm 1: paddd <x0=stack128#4,<diag0=int6464#1 | ||
4355 | # asm 2: paddd <x0=48(%rsp),<diag0=%xmm0 | ||
4356 | paddd 48(%rsp),%xmm0 | ||
4357 | |||
4358 | # qhasm: uint32323232 diag1 += x1 | ||
4359 | # asm 1: paddd <x1=stack128#1,<diag1=int6464#2 | ||
4360 | # asm 2: paddd <x1=0(%rsp),<diag1=%xmm1 | ||
4361 | paddd 0(%rsp),%xmm1 | ||
4362 | |||
4363 | # qhasm: uint32323232 diag2 += x2 | ||
4364 | # asm 1: paddd <x2=stack128#2,<diag2=int6464#3 | ||
4365 | # asm 2: paddd <x2=16(%rsp),<diag2=%xmm2 | ||
4366 | paddd 16(%rsp),%xmm2 | ||
4367 | |||
4368 | # qhasm: uint32323232 diag3 += x3 | ||
4369 | # asm 1: paddd <x3=stack128#3,<diag3=int6464#4 | ||
4370 | # asm 2: paddd <x3=32(%rsp),<diag3=%xmm3 | ||
4371 | paddd 32(%rsp),%xmm3 | ||
4372 | |||
4373 | # qhasm: in0 = diag0 | ||
4374 | # asm 1: movd <diag0=int6464#1,>in0=int64#4 | ||
4375 | # asm 2: movd <diag0=%xmm0,>in0=%rcx | ||
4376 | movd %xmm0,%rcx | ||
4377 | |||
4378 | # qhasm: in12 = diag1 | ||
4379 | # asm 1: movd <diag1=int6464#2,>in12=int64#5 | ||
4380 | # asm 2: movd <diag1=%xmm1,>in12=%r8 | ||
4381 | movd %xmm1,%r8 | ||
4382 | |||
4383 | # qhasm: in8 = diag2 | ||
4384 | # asm 1: movd <diag2=int6464#3,>in8=int64#6 | ||
4385 | # asm 2: movd <diag2=%xmm2,>in8=%r9 | ||
4386 | movd %xmm2,%r9 | ||
4387 | |||
4388 | # qhasm: in4 = diag3 | ||
4389 | # asm 1: movd <diag3=int6464#4,>in4=int64#7 | ||
4390 | # asm 2: movd <diag3=%xmm3,>in4=%rax | ||
4391 | movd %xmm3,%rax | ||
4392 | |||
4393 | # qhasm: diag0 <<<= 96 | ||
4394 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4395 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4396 | pshufd $0x39,%xmm0,%xmm0 | ||
4397 | |||
4398 | # qhasm: diag1 <<<= 96 | ||
4399 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4400 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4401 | pshufd $0x39,%xmm1,%xmm1 | ||
4402 | |||
4403 | # qhasm: diag2 <<<= 96 | ||
4404 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4405 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4406 | pshufd $0x39,%xmm2,%xmm2 | ||
4407 | |||
4408 | # qhasm: diag3 <<<= 96 | ||
4409 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4410 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4411 | pshufd $0x39,%xmm3,%xmm3 | ||
4412 | |||
4413 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 0) | ||
4414 | # asm 1: xorl 0(<m=int64#2),<in0=int64#4d | ||
4415 | # asm 2: xorl 0(<m=%rsi),<in0=%ecx | ||
4416 | xorl 0(%rsi),%ecx | ||
4417 | |||
4418 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 48) | ||
4419 | # asm 1: xorl 48(<m=int64#2),<in12=int64#5d | ||
4420 | # asm 2: xorl 48(<m=%rsi),<in12=%r8d | ||
4421 | xorl 48(%rsi),%r8d | ||
4422 | |||
4423 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 32) | ||
4424 | # asm 1: xorl 32(<m=int64#2),<in8=int64#6d | ||
4425 | # asm 2: xorl 32(<m=%rsi),<in8=%r9d | ||
4426 | xorl 32(%rsi),%r9d | ||
4427 | |||
4428 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 16) | ||
4429 | # asm 1: xorl 16(<m=int64#2),<in4=int64#7d | ||
4430 | # asm 2: xorl 16(<m=%rsi),<in4=%eax | ||
4431 | xorl 16(%rsi),%eax | ||
4432 | |||
4433 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
4434 | # asm 1: movl <in0=int64#4d,0(<out=int64#1) | ||
4435 | # asm 2: movl <in0=%ecx,0(<out=%rdi) | ||
4436 | movl %ecx,0(%rdi) | ||
4437 | |||
4438 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
4439 | # asm 1: movl <in12=int64#5d,48(<out=int64#1) | ||
4440 | # asm 2: movl <in12=%r8d,48(<out=%rdi) | ||
4441 | movl %r8d,48(%rdi) | ||
4442 | |||
4443 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
4444 | # asm 1: movl <in8=int64#6d,32(<out=int64#1) | ||
4445 | # asm 2: movl <in8=%r9d,32(<out=%rdi) | ||
4446 | movl %r9d,32(%rdi) | ||
4447 | |||
4448 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
4449 | # asm 1: movl <in4=int64#7d,16(<out=int64#1) | ||
4450 | # asm 2: movl <in4=%eax,16(<out=%rdi) | ||
4451 | movl %eax,16(%rdi) | ||
4452 | |||
4453 | # qhasm: in5 = diag0 | ||
4454 | # asm 1: movd <diag0=int6464#1,>in5=int64#4 | ||
4455 | # asm 2: movd <diag0=%xmm0,>in5=%rcx | ||
4456 | movd %xmm0,%rcx | ||
4457 | |||
4458 | # qhasm: in1 = diag1 | ||
4459 | # asm 1: movd <diag1=int6464#2,>in1=int64#5 | ||
4460 | # asm 2: movd <diag1=%xmm1,>in1=%r8 | ||
4461 | movd %xmm1,%r8 | ||
4462 | |||
4463 | # qhasm: in13 = diag2 | ||
4464 | # asm 1: movd <diag2=int6464#3,>in13=int64#6 | ||
4465 | # asm 2: movd <diag2=%xmm2,>in13=%r9 | ||
4466 | movd %xmm2,%r9 | ||
4467 | |||
4468 | # qhasm: in9 = diag3 | ||
4469 | # asm 1: movd <diag3=int6464#4,>in9=int64#7 | ||
4470 | # asm 2: movd <diag3=%xmm3,>in9=%rax | ||
4471 | movd %xmm3,%rax | ||
4472 | |||
4473 | # qhasm: diag0 <<<= 96 | ||
4474 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4475 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4476 | pshufd $0x39,%xmm0,%xmm0 | ||
4477 | |||
4478 | # qhasm: diag1 <<<= 96 | ||
4479 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4480 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4481 | pshufd $0x39,%xmm1,%xmm1 | ||
4482 | |||
4483 | # qhasm: diag2 <<<= 96 | ||
4484 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4485 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4486 | pshufd $0x39,%xmm2,%xmm2 | ||
4487 | |||
4488 | # qhasm: diag3 <<<= 96 | ||
4489 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4490 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4491 | pshufd $0x39,%xmm3,%xmm3 | ||
4492 | |||
4493 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 20) | ||
4494 | # asm 1: xorl 20(<m=int64#2),<in5=int64#4d | ||
4495 | # asm 2: xorl 20(<m=%rsi),<in5=%ecx | ||
4496 | xorl 20(%rsi),%ecx | ||
4497 | |||
4498 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 4) | ||
4499 | # asm 1: xorl 4(<m=int64#2),<in1=int64#5d | ||
4500 | # asm 2: xorl 4(<m=%rsi),<in1=%r8d | ||
4501 | xorl 4(%rsi),%r8d | ||
4502 | |||
4503 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 52) | ||
4504 | # asm 1: xorl 52(<m=int64#2),<in13=int64#6d | ||
4505 | # asm 2: xorl 52(<m=%rsi),<in13=%r9d | ||
4506 | xorl 52(%rsi),%r9d | ||
4507 | |||
4508 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 36) | ||
4509 | # asm 1: xorl 36(<m=int64#2),<in9=int64#7d | ||
4510 | # asm 2: xorl 36(<m=%rsi),<in9=%eax | ||
4511 | xorl 36(%rsi),%eax | ||
4512 | |||
4513 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
4514 | # asm 1: movl <in5=int64#4d,20(<out=int64#1) | ||
4515 | # asm 2: movl <in5=%ecx,20(<out=%rdi) | ||
4516 | movl %ecx,20(%rdi) | ||
4517 | |||
4518 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
4519 | # asm 1: movl <in1=int64#5d,4(<out=int64#1) | ||
4520 | # asm 2: movl <in1=%r8d,4(<out=%rdi) | ||
4521 | movl %r8d,4(%rdi) | ||
4522 | |||
4523 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
4524 | # asm 1: movl <in13=int64#6d,52(<out=int64#1) | ||
4525 | # asm 2: movl <in13=%r9d,52(<out=%rdi) | ||
4526 | movl %r9d,52(%rdi) | ||
4527 | |||
4528 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
4529 | # asm 1: movl <in9=int64#7d,36(<out=int64#1) | ||
4530 | # asm 2: movl <in9=%eax,36(<out=%rdi) | ||
4531 | movl %eax,36(%rdi) | ||
4532 | |||
4533 | # qhasm: in10 = diag0 | ||
4534 | # asm 1: movd <diag0=int6464#1,>in10=int64#4 | ||
4535 | # asm 2: movd <diag0=%xmm0,>in10=%rcx | ||
4536 | movd %xmm0,%rcx | ||
4537 | |||
4538 | # qhasm: in6 = diag1 | ||
4539 | # asm 1: movd <diag1=int6464#2,>in6=int64#5 | ||
4540 | # asm 2: movd <diag1=%xmm1,>in6=%r8 | ||
4541 | movd %xmm1,%r8 | ||
4542 | |||
4543 | # qhasm: in2 = diag2 | ||
4544 | # asm 1: movd <diag2=int6464#3,>in2=int64#6 | ||
4545 | # asm 2: movd <diag2=%xmm2,>in2=%r9 | ||
4546 | movd %xmm2,%r9 | ||
4547 | |||
4548 | # qhasm: in14 = diag3 | ||
4549 | # asm 1: movd <diag3=int6464#4,>in14=int64#7 | ||
4550 | # asm 2: movd <diag3=%xmm3,>in14=%rax | ||
4551 | movd %xmm3,%rax | ||
4552 | |||
4553 | # qhasm: diag0 <<<= 96 | ||
4554 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4555 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4556 | pshufd $0x39,%xmm0,%xmm0 | ||
4557 | |||
4558 | # qhasm: diag1 <<<= 96 | ||
4559 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4560 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4561 | pshufd $0x39,%xmm1,%xmm1 | ||
4562 | |||
4563 | # qhasm: diag2 <<<= 96 | ||
4564 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4565 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4566 | pshufd $0x39,%xmm2,%xmm2 | ||
4567 | |||
4568 | # qhasm: diag3 <<<= 96 | ||
4569 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4570 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4571 | pshufd $0x39,%xmm3,%xmm3 | ||
4572 | |||
4573 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 40) | ||
4574 | # asm 1: xorl 40(<m=int64#2),<in10=int64#4d | ||
4575 | # asm 2: xorl 40(<m=%rsi),<in10=%ecx | ||
4576 | xorl 40(%rsi),%ecx | ||
4577 | |||
4578 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 24) | ||
4579 | # asm 1: xorl 24(<m=int64#2),<in6=int64#5d | ||
4580 | # asm 2: xorl 24(<m=%rsi),<in6=%r8d | ||
4581 | xorl 24(%rsi),%r8d | ||
4582 | |||
4583 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 8) | ||
4584 | # asm 1: xorl 8(<m=int64#2),<in2=int64#6d | ||
4585 | # asm 2: xorl 8(<m=%rsi),<in2=%r9d | ||
4586 | xorl 8(%rsi),%r9d | ||
4587 | |||
4588 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 56) | ||
4589 | # asm 1: xorl 56(<m=int64#2),<in14=int64#7d | ||
4590 | # asm 2: xorl 56(<m=%rsi),<in14=%eax | ||
4591 | xorl 56(%rsi),%eax | ||
4592 | |||
4593 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
4594 | # asm 1: movl <in10=int64#4d,40(<out=int64#1) | ||
4595 | # asm 2: movl <in10=%ecx,40(<out=%rdi) | ||
4596 | movl %ecx,40(%rdi) | ||
4597 | |||
4598 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
4599 | # asm 1: movl <in6=int64#5d,24(<out=int64#1) | ||
4600 | # asm 2: movl <in6=%r8d,24(<out=%rdi) | ||
4601 | movl %r8d,24(%rdi) | ||
4602 | |||
4603 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
4604 | # asm 1: movl <in2=int64#6d,8(<out=int64#1) | ||
4605 | # asm 2: movl <in2=%r9d,8(<out=%rdi) | ||
4606 | movl %r9d,8(%rdi) | ||
4607 | |||
4608 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
4609 | # asm 1: movl <in14=int64#7d,56(<out=int64#1) | ||
4610 | # asm 2: movl <in14=%eax,56(<out=%rdi) | ||
4611 | movl %eax,56(%rdi) | ||
4612 | |||
4613 | # qhasm: in15 = diag0 | ||
4614 | # asm 1: movd <diag0=int6464#1,>in15=int64#4 | ||
4615 | # asm 2: movd <diag0=%xmm0,>in15=%rcx | ||
4616 | movd %xmm0,%rcx | ||
4617 | |||
4618 | # qhasm: in11 = diag1 | ||
4619 | # asm 1: movd <diag1=int6464#2,>in11=int64#5 | ||
4620 | # asm 2: movd <diag1=%xmm1,>in11=%r8 | ||
4621 | movd %xmm1,%r8 | ||
4622 | |||
4623 | # qhasm: in7 = diag2 | ||
4624 | # asm 1: movd <diag2=int6464#3,>in7=int64#6 | ||
4625 | # asm 2: movd <diag2=%xmm2,>in7=%r9 | ||
4626 | movd %xmm2,%r9 | ||
4627 | |||
4628 | # qhasm: in3 = diag3 | ||
4629 | # asm 1: movd <diag3=int6464#4,>in3=int64#7 | ||
4630 | # asm 2: movd <diag3=%xmm3,>in3=%rax | ||
4631 | movd %xmm3,%rax | ||
4632 | |||
4633 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 60) | ||
4634 | # asm 1: xorl 60(<m=int64#2),<in15=int64#4d | ||
4635 | # asm 2: xorl 60(<m=%rsi),<in15=%ecx | ||
4636 | xorl 60(%rsi),%ecx | ||
4637 | |||
4638 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 44) | ||
4639 | # asm 1: xorl 44(<m=int64#2),<in11=int64#5d | ||
4640 | # asm 2: xorl 44(<m=%rsi),<in11=%r8d | ||
4641 | xorl 44(%rsi),%r8d | ||
4642 | |||
4643 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 28) | ||
4644 | # asm 1: xorl 28(<m=int64#2),<in7=int64#6d | ||
4645 | # asm 2: xorl 28(<m=%rsi),<in7=%r9d | ||
4646 | xorl 28(%rsi),%r9d | ||
4647 | |||
4648 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 12) | ||
4649 | # asm 1: xorl 12(<m=int64#2),<in3=int64#7d | ||
4650 | # asm 2: xorl 12(<m=%rsi),<in3=%eax | ||
4651 | xorl 12(%rsi),%eax | ||
4652 | |||
4653 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
4654 | # asm 1: movl <in15=int64#4d,60(<out=int64#1) | ||
4655 | # asm 2: movl <in15=%ecx,60(<out=%rdi) | ||
4656 | movl %ecx,60(%rdi) | ||
4657 | |||
4658 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
4659 | # asm 1: movl <in11=int64#5d,44(<out=int64#1) | ||
4660 | # asm 2: movl <in11=%r8d,44(<out=%rdi) | ||
4661 | movl %r8d,44(%rdi) | ||
4662 | |||
4663 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
4664 | # asm 1: movl <in7=int64#6d,28(<out=int64#1) | ||
4665 | # asm 2: movl <in7=%r9d,28(<out=%rdi) | ||
4666 | movl %r9d,28(%rdi) | ||
4667 | |||
4668 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
4669 | # asm 1: movl <in3=int64#7d,12(<out=int64#1) | ||
4670 | # asm 2: movl <in3=%eax,12(<out=%rdi) | ||
4671 | movl %eax,12(%rdi) | ||
4672 | |||
4673 | # qhasm: bytes = bytes_backup | ||
4674 | # asm 1: movq <bytes_backup=stack64#8,>bytes=int64#6 | ||
4675 | # asm 2: movq <bytes_backup=408(%rsp),>bytes=%r9 | ||
4676 | movq 408(%rsp),%r9 | ||
4677 | |||
4678 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
4679 | # asm 1: movl <x2=stack128#2,>in8=int64#4d | ||
4680 | # asm 2: movl <x2=16(%rsp),>in8=%ecx | ||
4681 | movl 16(%rsp),%ecx | ||
4682 | |||
4683 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
4684 | # asm 1: movl 4+<x3=stack128#3,>in9=int64#5d | ||
4685 | # asm 2: movl 4+<x3=32(%rsp),>in9=%r8d | ||
4686 | movl 4+32(%rsp),%r8d | ||
4687 | |||
4688 | # qhasm: in8 += 1 | ||
4689 | # asm 1: add $1,<in8=int64#4 | ||
4690 | # asm 2: add $1,<in8=%rcx | ||
4691 | add $1,%rcx | ||
4692 | |||
4693 | # qhasm: in9 <<= 32 | ||
4694 | # asm 1: shl $32,<in9=int64#5 | ||
4695 | # asm 2: shl $32,<in9=%r8 | ||
4696 | shl $32,%r8 | ||
4697 | |||
4698 | # qhasm: in8 += in9 | ||
4699 | # asm 1: add <in9=int64#5,<in8=int64#4 | ||
4700 | # asm 2: add <in9=%r8,<in8=%rcx | ||
4701 | add %r8,%rcx | ||
4702 | |||
4703 | # qhasm: in9 = in8 | ||
4704 | # asm 1: mov <in8=int64#4,>in9=int64#5 | ||
4705 | # asm 2: mov <in8=%rcx,>in9=%r8 | ||
4706 | mov %rcx,%r8 | ||
4707 | |||
4708 | # qhasm: (uint64) in9 >>= 32 | ||
4709 | # asm 1: shr $32,<in9=int64#5 | ||
4710 | # asm 2: shr $32,<in9=%r8 | ||
4711 | shr $32,%r8 | ||
4712 | |||
4713 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
4714 | # asm 1: movl <in8=int64#4d,>x2=stack128#2 | ||
4715 | # asm 2: movl <in8=%ecx,>x2=16(%rsp) | ||
4716 | movl %ecx,16(%rsp) | ||
4717 | |||
4718 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
4719 | # asm 1: movl <in9=int64#5d,4+<x3=stack128#3 | ||
4720 | # asm 2: movl <in9=%r8d,4+<x3=32(%rsp) | ||
4721 | movl %r8d,4+32(%rsp) | ||
4722 | |||
4723 | # qhasm: unsigned>? unsigned<? bytes - 64 | ||
4724 | # asm 1: cmp $64,<bytes=int64#6 | ||
4725 | # asm 2: cmp $64,<bytes=%r9 | ||
4726 | cmp $64,%r9 | ||
4727 | # comment:fp stack unchanged by jump | ||
4728 | |||
4729 | # qhasm: goto bytesatleast65 if unsigned> | ||
4730 | ja ._bytesatleast65 | ||
4731 | # comment:fp stack unchanged by jump | ||
4732 | |||
4733 | # qhasm: goto bytesatleast64 if !unsigned< | ||
4734 | jae ._bytesatleast64 | ||
4735 | |||
4736 | # qhasm: m = out | ||
4737 | # asm 1: mov <out=int64#1,>m=int64#2 | ||
4738 | # asm 2: mov <out=%rdi,>m=%rsi | ||
4739 | mov %rdi,%rsi | ||
4740 | |||
4741 | # qhasm: out = ctarget | ||
4742 | # asm 1: mov <ctarget=int64#3,>out=int64#1 | ||
4743 | # asm 2: mov <ctarget=%rdx,>out=%rdi | ||
4744 | mov %rdx,%rdi | ||
4745 | |||
4746 | # qhasm: i = bytes | ||
4747 | # asm 1: mov <bytes=int64#6,>i=int64#4 | ||
4748 | # asm 2: mov <bytes=%r9,>i=%rcx | ||
4749 | mov %r9,%rcx | ||
4750 | |||
4751 | # qhasm: while (i) { *out++ = *m++; --i } | ||
4752 | rep movsb | ||
4753 | # comment:fp stack unchanged by fallthrough | ||
4754 | |||
4755 | # qhasm: bytesatleast64: | ||
4756 | ._bytesatleast64: | ||
4757 | # comment:fp stack unchanged by fallthrough | ||
4758 | |||
4759 | # qhasm: done: | ||
4760 | ._done: | ||
4761 | |||
4762 | # qhasm: r11_caller = r11_stack | ||
4763 | # asm 1: movq <r11_stack=stack64#1,>r11_caller=int64#9 | ||
4764 | # asm 2: movq <r11_stack=352(%rsp),>r11_caller=%r11 | ||
4765 | movq 352(%rsp),%r11 | ||
4766 | |||
4767 | # qhasm: r12_caller = r12_stack | ||
4768 | # asm 1: movq <r12_stack=stack64#2,>r12_caller=int64#10 | ||
4769 | # asm 2: movq <r12_stack=360(%rsp),>r12_caller=%r12 | ||
4770 | movq 360(%rsp),%r12 | ||
4771 | |||
4772 | # qhasm: r13_caller = r13_stack | ||
4773 | # asm 1: movq <r13_stack=stack64#3,>r13_caller=int64#11 | ||
4774 | # asm 2: movq <r13_stack=368(%rsp),>r13_caller=%r13 | ||
4775 | movq 368(%rsp),%r13 | ||
4776 | |||
4777 | # qhasm: r14_caller = r14_stack | ||
4778 | # asm 1: movq <r14_stack=stack64#4,>r14_caller=int64#12 | ||
4779 | # asm 2: movq <r14_stack=376(%rsp),>r14_caller=%r14 | ||
4780 | movq 376(%rsp),%r14 | ||
4781 | |||
4782 | # qhasm: r15_caller = r15_stack | ||
4783 | # asm 1: movq <r15_stack=stack64#5,>r15_caller=int64#13 | ||
4784 | # asm 2: movq <r15_stack=384(%rsp),>r15_caller=%r15 | ||
4785 | movq 384(%rsp),%r15 | ||
4786 | |||
4787 | # qhasm: rbx_caller = rbx_stack | ||
4788 | # asm 1: movq <rbx_stack=stack64#6,>rbx_caller=int64#14 | ||
4789 | # asm 2: movq <rbx_stack=392(%rsp),>rbx_caller=%rbx | ||
4790 | movq 392(%rsp),%rbx | ||
4791 | |||
4792 | # qhasm: rbp_caller = rbp_stack | ||
4793 | # asm 1: movq <rbp_stack=stack64#7,>rbp_caller=int64#15 | ||
4794 | # asm 2: movq <rbp_stack=400(%rsp),>rbp_caller=%rbp | ||
4795 | movq 400(%rsp),%rbp | ||
4796 | |||
4797 | # qhasm: leave | ||
4798 | add %r11,%rsp | ||
4799 | xor %rax,%rax | ||
4800 | xor %rdx,%rdx | ||
4801 | ret | ||
4802 | |||
4803 | # qhasm: bytesatleast65: | ||
4804 | ._bytesatleast65: | ||
4805 | |||
4806 | # qhasm: bytes -= 64 | ||
4807 | # asm 1: sub $64,<bytes=int64#6 | ||
4808 | # asm 2: sub $64,<bytes=%r9 | ||
4809 | sub $64,%r9 | ||
4810 | |||
4811 | # qhasm: out += 64 | ||
4812 | # asm 1: add $64,<out=int64#1 | ||
4813 | # asm 2: add $64,<out=%rdi | ||
4814 | add $64,%rdi | ||
4815 | |||
4816 | # qhasm: m += 64 | ||
4817 | # asm 1: add $64,<m=int64#2 | ||
4818 | # asm 2: add $64,<m=%rsi | ||
4819 | add $64,%rsi | ||
4820 | # comment:fp stack unchanged by jump | ||
4821 | |||
4822 | # qhasm: goto bytesbetween1and255 | ||
4823 | jmp ._bytesbetween1and255 | ||
diff --git a/nacl/crypto_stream/salsa20/checksum b/nacl/crypto_stream/salsa20/checksum new file mode 100644 index 00000000..78ff05f4 --- /dev/null +++ b/nacl/crypto_stream/salsa20/checksum | |||
@@ -0,0 +1 @@ | |||
44a3966eabcd3a2b13faca2150e38f2b7e6bac187d626618f50a9f875158ae78 | |||
diff --git a/nacl/crypto_stream/salsa20/ref/api.h b/nacl/crypto_stream/salsa20/ref/api.h new file mode 100644 index 00000000..c2b18461 --- /dev/null +++ b/nacl/crypto_stream/salsa20/ref/api.h | |||
@@ -0,0 +1,2 @@ | |||
1 | #define CRYPTO_KEYBYTES 32 | ||
2 | #define CRYPTO_NONCEBYTES 8 | ||
diff --git a/nacl/crypto_stream/salsa20/ref/implementors b/nacl/crypto_stream/salsa20/ref/implementors new file mode 100644 index 00000000..f6fb3c73 --- /dev/null +++ b/nacl/crypto_stream/salsa20/ref/implementors | |||
@@ -0,0 +1 @@ | |||
Daniel J. Bernstein | |||
diff --git a/nacl/crypto_stream/salsa20/ref/stream.c b/nacl/crypto_stream/salsa20/ref/stream.c new file mode 100644 index 00000000..2f0262eb --- /dev/null +++ b/nacl/crypto_stream/salsa20/ref/stream.c | |||
@@ -0,0 +1,49 @@ | |||
1 | /* | ||
2 | version 20080913 | ||
3 | D. J. Bernstein | ||
4 | Public domain. | ||
5 | */ | ||
6 | |||
7 | #include "crypto_core_salsa20.h" | ||
8 | #include "crypto_stream.h" | ||
9 | |||
10 | typedef unsigned int uint32; | ||
11 | |||
12 | static const unsigned char sigma[16] = "expand 32-byte k"; | ||
13 | |||
14 | int crypto_stream( | ||
15 | unsigned char *c,unsigned long long clen, | ||
16 | const unsigned char *n, | ||
17 | const unsigned char *k | ||
18 | ) | ||
19 | { | ||
20 | unsigned char in[16]; | ||
21 | unsigned char block[64]; | ||
22 | int i; | ||
23 | unsigned int u; | ||
24 | |||
25 | if (!clen) return 0; | ||
26 | |||
27 | for (i = 0;i < 8;++i) in[i] = n[i]; | ||
28 | for (i = 8;i < 16;++i) in[i] = 0; | ||
29 | |||
30 | while (clen >= 64) { | ||
31 | crypto_core_salsa20(c,in,k,sigma); | ||
32 | |||
33 | u = 1; | ||
34 | for (i = 8;i < 16;++i) { | ||
35 | u += (unsigned int) in[i]; | ||
36 | in[i] = u; | ||
37 | u >>= 8; | ||
38 | } | ||
39 | |||
40 | clen -= 64; | ||
41 | c += 64; | ||
42 | } | ||
43 | |||
44 | if (clen) { | ||
45 | crypto_core_salsa20(block,in,k,sigma); | ||
46 | for (i = 0;i < clen;++i) c[i] = block[i]; | ||
47 | } | ||
48 | return 0; | ||
49 | } | ||
diff --git a/nacl/crypto_stream/salsa20/ref/xor.c b/nacl/crypto_stream/salsa20/ref/xor.c new file mode 100644 index 00000000..11c7e9f0 --- /dev/null +++ b/nacl/crypto_stream/salsa20/ref/xor.c | |||
@@ -0,0 +1,52 @@ | |||
1 | /* | ||
2 | version 20080913 | ||
3 | D. J. Bernstein | ||
4 | Public domain. | ||
5 | */ | ||
6 | |||
7 | #include "crypto_core_salsa20.h" | ||
8 | #include "crypto_stream.h" | ||
9 | |||
10 | typedef unsigned int uint32; | ||
11 | |||
12 | static const unsigned char sigma[16] = "expand 32-byte k"; | ||
13 | |||
14 | int crypto_stream_xor( | ||
15 | unsigned char *c, | ||
16 | const unsigned char *m,unsigned long long mlen, | ||
17 | const unsigned char *n, | ||
18 | const unsigned char *k | ||
19 | ) | ||
20 | { | ||
21 | unsigned char in[16]; | ||
22 | unsigned char block[64]; | ||
23 | int i; | ||
24 | unsigned int u; | ||
25 | |||
26 | if (!mlen) return 0; | ||
27 | |||
28 | for (i = 0;i < 8;++i) in[i] = n[i]; | ||
29 | for (i = 8;i < 16;++i) in[i] = 0; | ||
30 | |||
31 | while (mlen >= 64) { | ||
32 | crypto_core_salsa20(block,in,k,sigma); | ||
33 | for (i = 0;i < 64;++i) c[i] = m[i] ^ block[i]; | ||
34 | |||
35 | u = 1; | ||
36 | for (i = 8;i < 16;++i) { | ||
37 | u += (unsigned int) in[i]; | ||
38 | in[i] = u; | ||
39 | u >>= 8; | ||
40 | } | ||
41 | |||
42 | mlen -= 64; | ||
43 | c += 64; | ||
44 | m += 64; | ||
45 | } | ||
46 | |||
47 | if (mlen) { | ||
48 | crypto_core_salsa20(block,in,k,sigma); | ||
49 | for (i = 0;i < mlen;++i) c[i] = m[i] ^ block[i]; | ||
50 | } | ||
51 | return 0; | ||
52 | } | ||
diff --git a/nacl/crypto_stream/salsa20/used b/nacl/crypto_stream/salsa20/used new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/nacl/crypto_stream/salsa20/used | |||
diff --git a/nacl/crypto_stream/salsa20/x86_xmm5/api.h b/nacl/crypto_stream/salsa20/x86_xmm5/api.h new file mode 100644 index 00000000..c2b18461 --- /dev/null +++ b/nacl/crypto_stream/salsa20/x86_xmm5/api.h | |||
@@ -0,0 +1,2 @@ | |||
1 | #define CRYPTO_KEYBYTES 32 | ||
2 | #define CRYPTO_NONCEBYTES 8 | ||
diff --git a/nacl/crypto_stream/salsa20/x86_xmm5/implementors b/nacl/crypto_stream/salsa20/x86_xmm5/implementors new file mode 100644 index 00000000..f6fb3c73 --- /dev/null +++ b/nacl/crypto_stream/salsa20/x86_xmm5/implementors | |||
@@ -0,0 +1 @@ | |||
Daniel J. Bernstein | |||
diff --git a/nacl/crypto_stream/salsa20/x86_xmm5/stream.s b/nacl/crypto_stream/salsa20/x86_xmm5/stream.s new file mode 100644 index 00000000..9e32ea43 --- /dev/null +++ b/nacl/crypto_stream/salsa20/x86_xmm5/stream.s | |||
@@ -0,0 +1,5078 @@ | |||
1 | |||
2 | # qhasm: int32 a | ||
3 | |||
4 | # qhasm: stack32 arg1 | ||
5 | |||
6 | # qhasm: stack32 arg2 | ||
7 | |||
8 | # qhasm: stack32 arg3 | ||
9 | |||
10 | # qhasm: stack32 arg4 | ||
11 | |||
12 | # qhasm: stack32 arg5 | ||
13 | |||
14 | # qhasm: stack32 arg6 | ||
15 | |||
16 | # qhasm: input arg1 | ||
17 | |||
18 | # qhasm: input arg2 | ||
19 | |||
20 | # qhasm: input arg3 | ||
21 | |||
22 | # qhasm: input arg4 | ||
23 | |||
24 | # qhasm: input arg5 | ||
25 | |||
26 | # qhasm: input arg6 | ||
27 | |||
28 | # qhasm: int32 eax | ||
29 | |||
30 | # qhasm: int32 ebx | ||
31 | |||
32 | # qhasm: int32 esi | ||
33 | |||
34 | # qhasm: int32 edi | ||
35 | |||
36 | # qhasm: int32 ebp | ||
37 | |||
38 | # qhasm: caller eax | ||
39 | |||
40 | # qhasm: caller ebx | ||
41 | |||
42 | # qhasm: caller esi | ||
43 | |||
44 | # qhasm: caller edi | ||
45 | |||
46 | # qhasm: caller ebp | ||
47 | |||
48 | # qhasm: int32 k | ||
49 | |||
50 | # qhasm: int32 kbits | ||
51 | |||
52 | # qhasm: int32 iv | ||
53 | |||
54 | # qhasm: int32 i | ||
55 | |||
56 | # qhasm: stack128 x0 | ||
57 | |||
58 | # qhasm: stack128 x1 | ||
59 | |||
60 | # qhasm: stack128 x2 | ||
61 | |||
62 | # qhasm: stack128 x3 | ||
63 | |||
64 | # qhasm: int32 m | ||
65 | |||
66 | # qhasm: stack32 out_stack | ||
67 | |||
68 | # qhasm: int32 out | ||
69 | |||
70 | # qhasm: stack32 bytes_stack | ||
71 | |||
72 | # qhasm: int32 bytes | ||
73 | |||
74 | # qhasm: stack32 eax_stack | ||
75 | |||
76 | # qhasm: stack32 ebx_stack | ||
77 | |||
78 | # qhasm: stack32 esi_stack | ||
79 | |||
80 | # qhasm: stack32 edi_stack | ||
81 | |||
82 | # qhasm: stack32 ebp_stack | ||
83 | |||
84 | # qhasm: int6464 diag0 | ||
85 | |||
86 | # qhasm: int6464 diag1 | ||
87 | |||
88 | # qhasm: int6464 diag2 | ||
89 | |||
90 | # qhasm: int6464 diag3 | ||
91 | |||
92 | # qhasm: int6464 a0 | ||
93 | |||
94 | # qhasm: int6464 a1 | ||
95 | |||
96 | # qhasm: int6464 a2 | ||
97 | |||
98 | # qhasm: int6464 a3 | ||
99 | |||
100 | # qhasm: int6464 a4 | ||
101 | |||
102 | # qhasm: int6464 a5 | ||
103 | |||
104 | # qhasm: int6464 a6 | ||
105 | |||
106 | # qhasm: int6464 a7 | ||
107 | |||
108 | # qhasm: int6464 b0 | ||
109 | |||
110 | # qhasm: int6464 b1 | ||
111 | |||
112 | # qhasm: int6464 b2 | ||
113 | |||
114 | # qhasm: int6464 b3 | ||
115 | |||
116 | # qhasm: int6464 b4 | ||
117 | |||
118 | # qhasm: int6464 b5 | ||
119 | |||
120 | # qhasm: int6464 b6 | ||
121 | |||
122 | # qhasm: int6464 b7 | ||
123 | |||
124 | # qhasm: int6464 z0 | ||
125 | |||
126 | # qhasm: int6464 z1 | ||
127 | |||
128 | # qhasm: int6464 z2 | ||
129 | |||
130 | # qhasm: int6464 z3 | ||
131 | |||
132 | # qhasm: int6464 z4 | ||
133 | |||
134 | # qhasm: int6464 z5 | ||
135 | |||
136 | # qhasm: int6464 z6 | ||
137 | |||
138 | # qhasm: int6464 z7 | ||
139 | |||
140 | # qhasm: int6464 z8 | ||
141 | |||
142 | # qhasm: int6464 z9 | ||
143 | |||
144 | # qhasm: int6464 z10 | ||
145 | |||
146 | # qhasm: int6464 z11 | ||
147 | |||
148 | # qhasm: int6464 z12 | ||
149 | |||
150 | # qhasm: int6464 z13 | ||
151 | |||
152 | # qhasm: int6464 z14 | ||
153 | |||
154 | # qhasm: int6464 z15 | ||
155 | |||
156 | # qhasm: stack128 z0_stack | ||
157 | |||
158 | # qhasm: stack128 z1_stack | ||
159 | |||
160 | # qhasm: stack128 z2_stack | ||
161 | |||
162 | # qhasm: stack128 z3_stack | ||
163 | |||
164 | # qhasm: stack128 z4_stack | ||
165 | |||
166 | # qhasm: stack128 z5_stack | ||
167 | |||
168 | # qhasm: stack128 z6_stack | ||
169 | |||
170 | # qhasm: stack128 z7_stack | ||
171 | |||
172 | # qhasm: stack128 z8_stack | ||
173 | |||
174 | # qhasm: stack128 z9_stack | ||
175 | |||
176 | # qhasm: stack128 z10_stack | ||
177 | |||
178 | # qhasm: stack128 z11_stack | ||
179 | |||
180 | # qhasm: stack128 z12_stack | ||
181 | |||
182 | # qhasm: stack128 z13_stack | ||
183 | |||
184 | # qhasm: stack128 z14_stack | ||
185 | |||
186 | # qhasm: stack128 z15_stack | ||
187 | |||
188 | # qhasm: stack128 orig0 | ||
189 | |||
190 | # qhasm: stack128 orig1 | ||
191 | |||
192 | # qhasm: stack128 orig2 | ||
193 | |||
194 | # qhasm: stack128 orig3 | ||
195 | |||
196 | # qhasm: stack128 orig4 | ||
197 | |||
198 | # qhasm: stack128 orig5 | ||
199 | |||
200 | # qhasm: stack128 orig6 | ||
201 | |||
202 | # qhasm: stack128 orig7 | ||
203 | |||
204 | # qhasm: stack128 orig8 | ||
205 | |||
206 | # qhasm: stack128 orig9 | ||
207 | |||
208 | # qhasm: stack128 orig10 | ||
209 | |||
210 | # qhasm: stack128 orig11 | ||
211 | |||
212 | # qhasm: stack128 orig12 | ||
213 | |||
214 | # qhasm: stack128 orig13 | ||
215 | |||
216 | # qhasm: stack128 orig14 | ||
217 | |||
218 | # qhasm: stack128 orig15 | ||
219 | |||
220 | # qhasm: int6464 p | ||
221 | |||
222 | # qhasm: int6464 q | ||
223 | |||
224 | # qhasm: int6464 r | ||
225 | |||
226 | # qhasm: int6464 s | ||
227 | |||
228 | # qhasm: int6464 t | ||
229 | |||
230 | # qhasm: int6464 u | ||
231 | |||
232 | # qhasm: int6464 v | ||
233 | |||
234 | # qhasm: int6464 w | ||
235 | |||
236 | # qhasm: int6464 mp | ||
237 | |||
238 | # qhasm: int6464 mq | ||
239 | |||
240 | # qhasm: int6464 mr | ||
241 | |||
242 | # qhasm: int6464 ms | ||
243 | |||
244 | # qhasm: int6464 mt | ||
245 | |||
246 | # qhasm: int6464 mu | ||
247 | |||
248 | # qhasm: int6464 mv | ||
249 | |||
250 | # qhasm: int6464 mw | ||
251 | |||
252 | # qhasm: int32 in0 | ||
253 | |||
254 | # qhasm: int32 in1 | ||
255 | |||
256 | # qhasm: int32 in2 | ||
257 | |||
258 | # qhasm: int32 in3 | ||
259 | |||
260 | # qhasm: int32 in4 | ||
261 | |||
262 | # qhasm: int32 in5 | ||
263 | |||
264 | # qhasm: int32 in6 | ||
265 | |||
266 | # qhasm: int32 in7 | ||
267 | |||
268 | # qhasm: int32 in8 | ||
269 | |||
270 | # qhasm: int32 in9 | ||
271 | |||
272 | # qhasm: int32 in10 | ||
273 | |||
274 | # qhasm: int32 in11 | ||
275 | |||
276 | # qhasm: int32 in12 | ||
277 | |||
278 | # qhasm: int32 in13 | ||
279 | |||
280 | # qhasm: int32 in14 | ||
281 | |||
282 | # qhasm: int32 in15 | ||
283 | |||
284 | # qhasm: stack512 tmp | ||
285 | |||
286 | # qhasm: stack32 ctarget | ||
287 | |||
288 | # qhasm: enter crypto_stream_salsa20_x86_xmm5 | ||
289 | .text | ||
290 | .p2align 5 | ||
291 | .globl _crypto_stream_salsa20_x86_xmm5 | ||
292 | .globl crypto_stream_salsa20_x86_xmm5 | ||
293 | _crypto_stream_salsa20_x86_xmm5: | ||
294 | crypto_stream_salsa20_x86_xmm5: | ||
295 | mov %esp,%eax | ||
296 | and $31,%eax | ||
297 | add $704,%eax | ||
298 | sub %eax,%esp | ||
299 | |||
300 | # qhasm: eax_stack = eax | ||
301 | # asm 1: movl <eax=int32#1,>eax_stack=stack32#1 | ||
302 | # asm 2: movl <eax=%eax,>eax_stack=0(%esp) | ||
303 | movl %eax,0(%esp) | ||
304 | |||
305 | # qhasm: ebx_stack = ebx | ||
306 | # asm 1: movl <ebx=int32#4,>ebx_stack=stack32#2 | ||
307 | # asm 2: movl <ebx=%ebx,>ebx_stack=4(%esp) | ||
308 | movl %ebx,4(%esp) | ||
309 | |||
310 | # qhasm: esi_stack = esi | ||
311 | # asm 1: movl <esi=int32#5,>esi_stack=stack32#3 | ||
312 | # asm 2: movl <esi=%esi,>esi_stack=8(%esp) | ||
313 | movl %esi,8(%esp) | ||
314 | |||
315 | # qhasm: edi_stack = edi | ||
316 | # asm 1: movl <edi=int32#6,>edi_stack=stack32#4 | ||
317 | # asm 2: movl <edi=%edi,>edi_stack=12(%esp) | ||
318 | movl %edi,12(%esp) | ||
319 | |||
320 | # qhasm: ebp_stack = ebp | ||
321 | # asm 1: movl <ebp=int32#7,>ebp_stack=stack32#5 | ||
322 | # asm 2: movl <ebp=%ebp,>ebp_stack=16(%esp) | ||
323 | movl %ebp,16(%esp) | ||
324 | |||
325 | # qhasm: bytes = arg2 | ||
326 | # asm 1: movl <arg2=stack32#-2,>bytes=int32#3 | ||
327 | # asm 2: movl <arg2=8(%esp,%eax),>bytes=%edx | ||
328 | movl 8(%esp,%eax),%edx | ||
329 | |||
330 | # qhasm: out = arg1 | ||
331 | # asm 1: movl <arg1=stack32#-1,>out=int32#6 | ||
332 | # asm 2: movl <arg1=4(%esp,%eax),>out=%edi | ||
333 | movl 4(%esp,%eax),%edi | ||
334 | |||
335 | # qhasm: m = out | ||
336 | # asm 1: mov <out=int32#6,>m=int32#5 | ||
337 | # asm 2: mov <out=%edi,>m=%esi | ||
338 | mov %edi,%esi | ||
339 | |||
340 | # qhasm: iv = arg4 | ||
341 | # asm 1: movl <arg4=stack32#-4,>iv=int32#4 | ||
342 | # asm 2: movl <arg4=16(%esp,%eax),>iv=%ebx | ||
343 | movl 16(%esp,%eax),%ebx | ||
344 | |||
345 | # qhasm: k = arg5 | ||
346 | # asm 1: movl <arg5=stack32#-5,>k=int32#7 | ||
347 | # asm 2: movl <arg5=20(%esp,%eax),>k=%ebp | ||
348 | movl 20(%esp,%eax),%ebp | ||
349 | |||
350 | # qhasm: unsigned>? bytes - 0 | ||
351 | # asm 1: cmp $0,<bytes=int32#3 | ||
352 | # asm 2: cmp $0,<bytes=%edx | ||
353 | cmp $0,%edx | ||
354 | # comment:fp stack unchanged by jump | ||
355 | |||
356 | # qhasm: goto done if !unsigned> | ||
357 | jbe ._done | ||
358 | |||
359 | # qhasm: a = 0 | ||
360 | # asm 1: mov $0,>a=int32#1 | ||
361 | # asm 2: mov $0,>a=%eax | ||
362 | mov $0,%eax | ||
363 | |||
364 | # qhasm: i = bytes | ||
365 | # asm 1: mov <bytes=int32#3,>i=int32#2 | ||
366 | # asm 2: mov <bytes=%edx,>i=%ecx | ||
367 | mov %edx,%ecx | ||
368 | |||
369 | # qhasm: while (i) { *out++ = a; --i } | ||
370 | rep stosb | ||
371 | |||
372 | # qhasm: out -= bytes | ||
373 | # asm 1: subl <bytes=int32#3,<out=int32#6 | ||
374 | # asm 2: subl <bytes=%edx,<out=%edi | ||
375 | subl %edx,%edi | ||
376 | # comment:fp stack unchanged by jump | ||
377 | |||
378 | # qhasm: goto start | ||
379 | jmp ._start | ||
380 | |||
381 | # qhasm: enter crypto_stream_salsa20_x86_xmm5_xor | ||
382 | .text | ||
383 | .p2align 5 | ||
384 | .globl _crypto_stream_salsa20_x86_xmm5_xor | ||
385 | .globl crypto_stream_salsa20_x86_xmm5_xor | ||
386 | _crypto_stream_salsa20_x86_xmm5_xor: | ||
387 | crypto_stream_salsa20_x86_xmm5_xor: | ||
388 | mov %esp,%eax | ||
389 | and $31,%eax | ||
390 | add $704,%eax | ||
391 | sub %eax,%esp | ||
392 | |||
393 | # qhasm: eax_stack = eax | ||
394 | # asm 1: movl <eax=int32#1,>eax_stack=stack32#1 | ||
395 | # asm 2: movl <eax=%eax,>eax_stack=0(%esp) | ||
396 | movl %eax,0(%esp) | ||
397 | |||
398 | # qhasm: ebx_stack = ebx | ||
399 | # asm 1: movl <ebx=int32#4,>ebx_stack=stack32#2 | ||
400 | # asm 2: movl <ebx=%ebx,>ebx_stack=4(%esp) | ||
401 | movl %ebx,4(%esp) | ||
402 | |||
403 | # qhasm: esi_stack = esi | ||
404 | # asm 1: movl <esi=int32#5,>esi_stack=stack32#3 | ||
405 | # asm 2: movl <esi=%esi,>esi_stack=8(%esp) | ||
406 | movl %esi,8(%esp) | ||
407 | |||
408 | # qhasm: edi_stack = edi | ||
409 | # asm 1: movl <edi=int32#6,>edi_stack=stack32#4 | ||
410 | # asm 2: movl <edi=%edi,>edi_stack=12(%esp) | ||
411 | movl %edi,12(%esp) | ||
412 | |||
413 | # qhasm: ebp_stack = ebp | ||
414 | # asm 1: movl <ebp=int32#7,>ebp_stack=stack32#5 | ||
415 | # asm 2: movl <ebp=%ebp,>ebp_stack=16(%esp) | ||
416 | movl %ebp,16(%esp) | ||
417 | |||
418 | # qhasm: out = arg1 | ||
419 | # asm 1: movl <arg1=stack32#-1,>out=int32#6 | ||
420 | # asm 2: movl <arg1=4(%esp,%eax),>out=%edi | ||
421 | movl 4(%esp,%eax),%edi | ||
422 | |||
423 | # qhasm: m = arg2 | ||
424 | # asm 1: movl <arg2=stack32#-2,>m=int32#5 | ||
425 | # asm 2: movl <arg2=8(%esp,%eax),>m=%esi | ||
426 | movl 8(%esp,%eax),%esi | ||
427 | |||
428 | # qhasm: bytes = arg3 | ||
429 | # asm 1: movl <arg3=stack32#-3,>bytes=int32#3 | ||
430 | # asm 2: movl <arg3=12(%esp,%eax),>bytes=%edx | ||
431 | movl 12(%esp,%eax),%edx | ||
432 | |||
433 | # qhasm: iv = arg5 | ||
434 | # asm 1: movl <arg5=stack32#-5,>iv=int32#4 | ||
435 | # asm 2: movl <arg5=20(%esp,%eax),>iv=%ebx | ||
436 | movl 20(%esp,%eax),%ebx | ||
437 | |||
438 | # qhasm: k = arg6 | ||
439 | # asm 1: movl <arg6=stack32#-6,>k=int32#7 | ||
440 | # asm 2: movl <arg6=24(%esp,%eax),>k=%ebp | ||
441 | movl 24(%esp,%eax),%ebp | ||
442 | |||
443 | # qhasm: unsigned>? bytes - 0 | ||
444 | # asm 1: cmp $0,<bytes=int32#3 | ||
445 | # asm 2: cmp $0,<bytes=%edx | ||
446 | cmp $0,%edx | ||
447 | # comment:fp stack unchanged by jump | ||
448 | |||
449 | # qhasm: goto done if !unsigned> | ||
450 | jbe ._done | ||
451 | # comment:fp stack unchanged by fallthrough | ||
452 | |||
453 | # qhasm: start: | ||
454 | ._start: | ||
455 | |||
456 | # qhasm: out_stack = out | ||
457 | # asm 1: movl <out=int32#6,>out_stack=stack32#6 | ||
458 | # asm 2: movl <out=%edi,>out_stack=20(%esp) | ||
459 | movl %edi,20(%esp) | ||
460 | |||
461 | # qhasm: bytes_stack = bytes | ||
462 | # asm 1: movl <bytes=int32#3,>bytes_stack=stack32#7 | ||
463 | # asm 2: movl <bytes=%edx,>bytes_stack=24(%esp) | ||
464 | movl %edx,24(%esp) | ||
465 | |||
466 | # qhasm: in4 = *(uint32 *) (k + 12) | ||
467 | # asm 1: movl 12(<k=int32#7),>in4=int32#1 | ||
468 | # asm 2: movl 12(<k=%ebp),>in4=%eax | ||
469 | movl 12(%ebp),%eax | ||
470 | |||
471 | # qhasm: in12 = *(uint32 *) (k + 20) | ||
472 | # asm 1: movl 20(<k=int32#7),>in12=int32#2 | ||
473 | # asm 2: movl 20(<k=%ebp),>in12=%ecx | ||
474 | movl 20(%ebp),%ecx | ||
475 | |||
476 | # qhasm: ((uint32 *)&x3)[0] = in4 | ||
477 | # asm 1: movl <in4=int32#1,>x3=stack128#1 | ||
478 | # asm 2: movl <in4=%eax,>x3=32(%esp) | ||
479 | movl %eax,32(%esp) | ||
480 | |||
481 | # qhasm: ((uint32 *)&x1)[0] = in12 | ||
482 | # asm 1: movl <in12=int32#2,>x1=stack128#2 | ||
483 | # asm 2: movl <in12=%ecx,>x1=48(%esp) | ||
484 | movl %ecx,48(%esp) | ||
485 | |||
486 | # qhasm: in0 = 1634760805 | ||
487 | # asm 1: mov $1634760805,>in0=int32#1 | ||
488 | # asm 2: mov $1634760805,>in0=%eax | ||
489 | mov $1634760805,%eax | ||
490 | |||
491 | # qhasm: in8 = 0 | ||
492 | # asm 1: mov $0,>in8=int32#2 | ||
493 | # asm 2: mov $0,>in8=%ecx | ||
494 | mov $0,%ecx | ||
495 | |||
496 | # qhasm: ((uint32 *)&x0)[0] = in0 | ||
497 | # asm 1: movl <in0=int32#1,>x0=stack128#3 | ||
498 | # asm 2: movl <in0=%eax,>x0=64(%esp) | ||
499 | movl %eax,64(%esp) | ||
500 | |||
501 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
502 | # asm 1: movl <in8=int32#2,>x2=stack128#4 | ||
503 | # asm 2: movl <in8=%ecx,>x2=80(%esp) | ||
504 | movl %ecx,80(%esp) | ||
505 | |||
506 | # qhasm: in6 = *(uint32 *) (iv + 0) | ||
507 | # asm 1: movl 0(<iv=int32#4),>in6=int32#1 | ||
508 | # asm 2: movl 0(<iv=%ebx),>in6=%eax | ||
509 | movl 0(%ebx),%eax | ||
510 | |||
511 | # qhasm: in7 = *(uint32 *) (iv + 4) | ||
512 | # asm 1: movl 4(<iv=int32#4),>in7=int32#2 | ||
513 | # asm 2: movl 4(<iv=%ebx),>in7=%ecx | ||
514 | movl 4(%ebx),%ecx | ||
515 | |||
516 | # qhasm: ((uint32 *)&x1)[2] = in6 | ||
517 | # asm 1: movl <in6=int32#1,8+<x1=stack128#2 | ||
518 | # asm 2: movl <in6=%eax,8+<x1=48(%esp) | ||
519 | movl %eax,8+48(%esp) | ||
520 | |||
521 | # qhasm: ((uint32 *)&x2)[3] = in7 | ||
522 | # asm 1: movl <in7=int32#2,12+<x2=stack128#4 | ||
523 | # asm 2: movl <in7=%ecx,12+<x2=80(%esp) | ||
524 | movl %ecx,12+80(%esp) | ||
525 | |||
526 | # qhasm: in9 = 0 | ||
527 | # asm 1: mov $0,>in9=int32#1 | ||
528 | # asm 2: mov $0,>in9=%eax | ||
529 | mov $0,%eax | ||
530 | |||
531 | # qhasm: in10 = 2036477234 | ||
532 | # asm 1: mov $2036477234,>in10=int32#2 | ||
533 | # asm 2: mov $2036477234,>in10=%ecx | ||
534 | mov $2036477234,%ecx | ||
535 | |||
536 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
537 | # asm 1: movl <in9=int32#1,4+<x3=stack128#1 | ||
538 | # asm 2: movl <in9=%eax,4+<x3=32(%esp) | ||
539 | movl %eax,4+32(%esp) | ||
540 | |||
541 | # qhasm: ((uint32 *)&x0)[2] = in10 | ||
542 | # asm 1: movl <in10=int32#2,8+<x0=stack128#3 | ||
543 | # asm 2: movl <in10=%ecx,8+<x0=64(%esp) | ||
544 | movl %ecx,8+64(%esp) | ||
545 | |||
546 | # qhasm: in1 = *(uint32 *) (k + 0) | ||
547 | # asm 1: movl 0(<k=int32#7),>in1=int32#1 | ||
548 | # asm 2: movl 0(<k=%ebp),>in1=%eax | ||
549 | movl 0(%ebp),%eax | ||
550 | |||
551 | # qhasm: in2 = *(uint32 *) (k + 4) | ||
552 | # asm 1: movl 4(<k=int32#7),>in2=int32#2 | ||
553 | # asm 2: movl 4(<k=%ebp),>in2=%ecx | ||
554 | movl 4(%ebp),%ecx | ||
555 | |||
556 | # qhasm: in3 = *(uint32 *) (k + 8) | ||
557 | # asm 1: movl 8(<k=int32#7),>in3=int32#3 | ||
558 | # asm 2: movl 8(<k=%ebp),>in3=%edx | ||
559 | movl 8(%ebp),%edx | ||
560 | |||
561 | # qhasm: in5 = 857760878 | ||
562 | # asm 1: mov $857760878,>in5=int32#4 | ||
563 | # asm 2: mov $857760878,>in5=%ebx | ||
564 | mov $857760878,%ebx | ||
565 | |||
566 | # qhasm: ((uint32 *)&x1)[1] = in1 | ||
567 | # asm 1: movl <in1=int32#1,4+<x1=stack128#2 | ||
568 | # asm 2: movl <in1=%eax,4+<x1=48(%esp) | ||
569 | movl %eax,4+48(%esp) | ||
570 | |||
571 | # qhasm: ((uint32 *)&x2)[2] = in2 | ||
572 | # asm 1: movl <in2=int32#2,8+<x2=stack128#4 | ||
573 | # asm 2: movl <in2=%ecx,8+<x2=80(%esp) | ||
574 | movl %ecx,8+80(%esp) | ||
575 | |||
576 | # qhasm: ((uint32 *)&x3)[3] = in3 | ||
577 | # asm 1: movl <in3=int32#3,12+<x3=stack128#1 | ||
578 | # asm 2: movl <in3=%edx,12+<x3=32(%esp) | ||
579 | movl %edx,12+32(%esp) | ||
580 | |||
581 | # qhasm: ((uint32 *)&x0)[1] = in5 | ||
582 | # asm 1: movl <in5=int32#4,4+<x0=stack128#3 | ||
583 | # asm 2: movl <in5=%ebx,4+<x0=64(%esp) | ||
584 | movl %ebx,4+64(%esp) | ||
585 | |||
586 | # qhasm: in11 = *(uint32 *) (k + 16) | ||
587 | # asm 1: movl 16(<k=int32#7),>in11=int32#1 | ||
588 | # asm 2: movl 16(<k=%ebp),>in11=%eax | ||
589 | movl 16(%ebp),%eax | ||
590 | |||
591 | # qhasm: in13 = *(uint32 *) (k + 24) | ||
592 | # asm 1: movl 24(<k=int32#7),>in13=int32#2 | ||
593 | # asm 2: movl 24(<k=%ebp),>in13=%ecx | ||
594 | movl 24(%ebp),%ecx | ||
595 | |||
596 | # qhasm: in14 = *(uint32 *) (k + 28) | ||
597 | # asm 1: movl 28(<k=int32#7),>in14=int32#3 | ||
598 | # asm 2: movl 28(<k=%ebp),>in14=%edx | ||
599 | movl 28(%ebp),%edx | ||
600 | |||
601 | # qhasm: in15 = 1797285236 | ||
602 | # asm 1: mov $1797285236,>in15=int32#4 | ||
603 | # asm 2: mov $1797285236,>in15=%ebx | ||
604 | mov $1797285236,%ebx | ||
605 | |||
606 | # qhasm: ((uint32 *)&x1)[3] = in11 | ||
607 | # asm 1: movl <in11=int32#1,12+<x1=stack128#2 | ||
608 | # asm 2: movl <in11=%eax,12+<x1=48(%esp) | ||
609 | movl %eax,12+48(%esp) | ||
610 | |||
611 | # qhasm: ((uint32 *)&x2)[1] = in13 | ||
612 | # asm 1: movl <in13=int32#2,4+<x2=stack128#4 | ||
613 | # asm 2: movl <in13=%ecx,4+<x2=80(%esp) | ||
614 | movl %ecx,4+80(%esp) | ||
615 | |||
616 | # qhasm: ((uint32 *)&x3)[2] = in14 | ||
617 | # asm 1: movl <in14=int32#3,8+<x3=stack128#1 | ||
618 | # asm 2: movl <in14=%edx,8+<x3=32(%esp) | ||
619 | movl %edx,8+32(%esp) | ||
620 | |||
621 | # qhasm: ((uint32 *)&x0)[3] = in15 | ||
622 | # asm 1: movl <in15=int32#4,12+<x0=stack128#3 | ||
623 | # asm 2: movl <in15=%ebx,12+<x0=64(%esp) | ||
624 | movl %ebx,12+64(%esp) | ||
625 | |||
626 | # qhasm: bytes = bytes_stack | ||
627 | # asm 1: movl <bytes_stack=stack32#7,>bytes=int32#1 | ||
628 | # asm 2: movl <bytes_stack=24(%esp),>bytes=%eax | ||
629 | movl 24(%esp),%eax | ||
630 | |||
631 | # qhasm: unsigned<? bytes - 256 | ||
632 | # asm 1: cmp $256,<bytes=int32#1 | ||
633 | # asm 2: cmp $256,<bytes=%eax | ||
634 | cmp $256,%eax | ||
635 | # comment:fp stack unchanged by jump | ||
636 | |||
637 | # qhasm: goto bytesbetween1and255 if unsigned< | ||
638 | jb ._bytesbetween1and255 | ||
639 | |||
640 | # qhasm: z0 = x0 | ||
641 | # asm 1: movdqa <x0=stack128#3,>z0=int6464#1 | ||
642 | # asm 2: movdqa <x0=64(%esp),>z0=%xmm0 | ||
643 | movdqa 64(%esp),%xmm0 | ||
644 | |||
645 | # qhasm: z5 = z0[1,1,1,1] | ||
646 | # asm 1: pshufd $0x55,<z0=int6464#1,>z5=int6464#2 | ||
647 | # asm 2: pshufd $0x55,<z0=%xmm0,>z5=%xmm1 | ||
648 | pshufd $0x55,%xmm0,%xmm1 | ||
649 | |||
650 | # qhasm: z10 = z0[2,2,2,2] | ||
651 | # asm 1: pshufd $0xaa,<z0=int6464#1,>z10=int6464#3 | ||
652 | # asm 2: pshufd $0xaa,<z0=%xmm0,>z10=%xmm2 | ||
653 | pshufd $0xaa,%xmm0,%xmm2 | ||
654 | |||
655 | # qhasm: z15 = z0[3,3,3,3] | ||
656 | # asm 1: pshufd $0xff,<z0=int6464#1,>z15=int6464#4 | ||
657 | # asm 2: pshufd $0xff,<z0=%xmm0,>z15=%xmm3 | ||
658 | pshufd $0xff,%xmm0,%xmm3 | ||
659 | |||
660 | # qhasm: z0 = z0[0,0,0,0] | ||
661 | # asm 1: pshufd $0x00,<z0=int6464#1,>z0=int6464#1 | ||
662 | # asm 2: pshufd $0x00,<z0=%xmm0,>z0=%xmm0 | ||
663 | pshufd $0x00,%xmm0,%xmm0 | ||
664 | |||
665 | # qhasm: orig5 = z5 | ||
666 | # asm 1: movdqa <z5=int6464#2,>orig5=stack128#5 | ||
667 | # asm 2: movdqa <z5=%xmm1,>orig5=96(%esp) | ||
668 | movdqa %xmm1,96(%esp) | ||
669 | |||
670 | # qhasm: orig10 = z10 | ||
671 | # asm 1: movdqa <z10=int6464#3,>orig10=stack128#6 | ||
672 | # asm 2: movdqa <z10=%xmm2,>orig10=112(%esp) | ||
673 | movdqa %xmm2,112(%esp) | ||
674 | |||
675 | # qhasm: orig15 = z15 | ||
676 | # asm 1: movdqa <z15=int6464#4,>orig15=stack128#7 | ||
677 | # asm 2: movdqa <z15=%xmm3,>orig15=128(%esp) | ||
678 | movdqa %xmm3,128(%esp) | ||
679 | |||
680 | # qhasm: orig0 = z0 | ||
681 | # asm 1: movdqa <z0=int6464#1,>orig0=stack128#8 | ||
682 | # asm 2: movdqa <z0=%xmm0,>orig0=144(%esp) | ||
683 | movdqa %xmm0,144(%esp) | ||
684 | |||
685 | # qhasm: z1 = x1 | ||
686 | # asm 1: movdqa <x1=stack128#2,>z1=int6464#1 | ||
687 | # asm 2: movdqa <x1=48(%esp),>z1=%xmm0 | ||
688 | movdqa 48(%esp),%xmm0 | ||
689 | |||
690 | # qhasm: z6 = z1[2,2,2,2] | ||
691 | # asm 1: pshufd $0xaa,<z1=int6464#1,>z6=int6464#2 | ||
692 | # asm 2: pshufd $0xaa,<z1=%xmm0,>z6=%xmm1 | ||
693 | pshufd $0xaa,%xmm0,%xmm1 | ||
694 | |||
695 | # qhasm: z11 = z1[3,3,3,3] | ||
696 | # asm 1: pshufd $0xff,<z1=int6464#1,>z11=int6464#3 | ||
697 | # asm 2: pshufd $0xff,<z1=%xmm0,>z11=%xmm2 | ||
698 | pshufd $0xff,%xmm0,%xmm2 | ||
699 | |||
700 | # qhasm: z12 = z1[0,0,0,0] | ||
701 | # asm 1: pshufd $0x00,<z1=int6464#1,>z12=int6464#4 | ||
702 | # asm 2: pshufd $0x00,<z1=%xmm0,>z12=%xmm3 | ||
703 | pshufd $0x00,%xmm0,%xmm3 | ||
704 | |||
705 | # qhasm: z1 = z1[1,1,1,1] | ||
706 | # asm 1: pshufd $0x55,<z1=int6464#1,>z1=int6464#1 | ||
707 | # asm 2: pshufd $0x55,<z1=%xmm0,>z1=%xmm0 | ||
708 | pshufd $0x55,%xmm0,%xmm0 | ||
709 | |||
710 | # qhasm: orig6 = z6 | ||
711 | # asm 1: movdqa <z6=int6464#2,>orig6=stack128#9 | ||
712 | # asm 2: movdqa <z6=%xmm1,>orig6=160(%esp) | ||
713 | movdqa %xmm1,160(%esp) | ||
714 | |||
715 | # qhasm: orig11 = z11 | ||
716 | # asm 1: movdqa <z11=int6464#3,>orig11=stack128#10 | ||
717 | # asm 2: movdqa <z11=%xmm2,>orig11=176(%esp) | ||
718 | movdqa %xmm2,176(%esp) | ||
719 | |||
720 | # qhasm: orig12 = z12 | ||
721 | # asm 1: movdqa <z12=int6464#4,>orig12=stack128#11 | ||
722 | # asm 2: movdqa <z12=%xmm3,>orig12=192(%esp) | ||
723 | movdqa %xmm3,192(%esp) | ||
724 | |||
725 | # qhasm: orig1 = z1 | ||
726 | # asm 1: movdqa <z1=int6464#1,>orig1=stack128#12 | ||
727 | # asm 2: movdqa <z1=%xmm0,>orig1=208(%esp) | ||
728 | movdqa %xmm0,208(%esp) | ||
729 | |||
730 | # qhasm: z2 = x2 | ||
731 | # asm 1: movdqa <x2=stack128#4,>z2=int6464#1 | ||
732 | # asm 2: movdqa <x2=80(%esp),>z2=%xmm0 | ||
733 | movdqa 80(%esp),%xmm0 | ||
734 | |||
735 | # qhasm: z7 = z2[3,3,3,3] | ||
736 | # asm 1: pshufd $0xff,<z2=int6464#1,>z7=int6464#2 | ||
737 | # asm 2: pshufd $0xff,<z2=%xmm0,>z7=%xmm1 | ||
738 | pshufd $0xff,%xmm0,%xmm1 | ||
739 | |||
740 | # qhasm: z13 = z2[1,1,1,1] | ||
741 | # asm 1: pshufd $0x55,<z2=int6464#1,>z13=int6464#3 | ||
742 | # asm 2: pshufd $0x55,<z2=%xmm0,>z13=%xmm2 | ||
743 | pshufd $0x55,%xmm0,%xmm2 | ||
744 | |||
745 | # qhasm: z2 = z2[2,2,2,2] | ||
746 | # asm 1: pshufd $0xaa,<z2=int6464#1,>z2=int6464#1 | ||
747 | # asm 2: pshufd $0xaa,<z2=%xmm0,>z2=%xmm0 | ||
748 | pshufd $0xaa,%xmm0,%xmm0 | ||
749 | |||
750 | # qhasm: orig7 = z7 | ||
751 | # asm 1: movdqa <z7=int6464#2,>orig7=stack128#13 | ||
752 | # asm 2: movdqa <z7=%xmm1,>orig7=224(%esp) | ||
753 | movdqa %xmm1,224(%esp) | ||
754 | |||
755 | # qhasm: orig13 = z13 | ||
756 | # asm 1: movdqa <z13=int6464#3,>orig13=stack128#14 | ||
757 | # asm 2: movdqa <z13=%xmm2,>orig13=240(%esp) | ||
758 | movdqa %xmm2,240(%esp) | ||
759 | |||
760 | # qhasm: orig2 = z2 | ||
761 | # asm 1: movdqa <z2=int6464#1,>orig2=stack128#15 | ||
762 | # asm 2: movdqa <z2=%xmm0,>orig2=256(%esp) | ||
763 | movdqa %xmm0,256(%esp) | ||
764 | |||
765 | # qhasm: z3 = x3 | ||
766 | # asm 1: movdqa <x3=stack128#1,>z3=int6464#1 | ||
767 | # asm 2: movdqa <x3=32(%esp),>z3=%xmm0 | ||
768 | movdqa 32(%esp),%xmm0 | ||
769 | |||
770 | # qhasm: z4 = z3[0,0,0,0] | ||
771 | # asm 1: pshufd $0x00,<z3=int6464#1,>z4=int6464#2 | ||
772 | # asm 2: pshufd $0x00,<z3=%xmm0,>z4=%xmm1 | ||
773 | pshufd $0x00,%xmm0,%xmm1 | ||
774 | |||
775 | # qhasm: z14 = z3[2,2,2,2] | ||
776 | # asm 1: pshufd $0xaa,<z3=int6464#1,>z14=int6464#3 | ||
777 | # asm 2: pshufd $0xaa,<z3=%xmm0,>z14=%xmm2 | ||
778 | pshufd $0xaa,%xmm0,%xmm2 | ||
779 | |||
780 | # qhasm: z3 = z3[3,3,3,3] | ||
781 | # asm 1: pshufd $0xff,<z3=int6464#1,>z3=int6464#1 | ||
782 | # asm 2: pshufd $0xff,<z3=%xmm0,>z3=%xmm0 | ||
783 | pshufd $0xff,%xmm0,%xmm0 | ||
784 | |||
785 | # qhasm: orig4 = z4 | ||
786 | # asm 1: movdqa <z4=int6464#2,>orig4=stack128#16 | ||
787 | # asm 2: movdqa <z4=%xmm1,>orig4=272(%esp) | ||
788 | movdqa %xmm1,272(%esp) | ||
789 | |||
790 | # qhasm: orig14 = z14 | ||
791 | # asm 1: movdqa <z14=int6464#3,>orig14=stack128#17 | ||
792 | # asm 2: movdqa <z14=%xmm2,>orig14=288(%esp) | ||
793 | movdqa %xmm2,288(%esp) | ||
794 | |||
795 | # qhasm: orig3 = z3 | ||
796 | # asm 1: movdqa <z3=int6464#1,>orig3=stack128#18 | ||
797 | # asm 2: movdqa <z3=%xmm0,>orig3=304(%esp) | ||
798 | movdqa %xmm0,304(%esp) | ||
799 | |||
800 | # qhasm: bytesatleast256: | ||
801 | ._bytesatleast256: | ||
802 | |||
803 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
804 | # asm 1: movl <x2=stack128#4,>in8=int32#2 | ||
805 | # asm 2: movl <x2=80(%esp),>in8=%ecx | ||
806 | movl 80(%esp),%ecx | ||
807 | |||
808 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
809 | # asm 1: movl 4+<x3=stack128#1,>in9=int32#3 | ||
810 | # asm 2: movl 4+<x3=32(%esp),>in9=%edx | ||
811 | movl 4+32(%esp),%edx | ||
812 | |||
813 | # qhasm: ((uint32 *) &orig8)[0] = in8 | ||
814 | # asm 1: movl <in8=int32#2,>orig8=stack128#19 | ||
815 | # asm 2: movl <in8=%ecx,>orig8=320(%esp) | ||
816 | movl %ecx,320(%esp) | ||
817 | |||
818 | # qhasm: ((uint32 *) &orig9)[0] = in9 | ||
819 | # asm 1: movl <in9=int32#3,>orig9=stack128#20 | ||
820 | # asm 2: movl <in9=%edx,>orig9=336(%esp) | ||
821 | movl %edx,336(%esp) | ||
822 | |||
823 | # qhasm: carry? in8 += 1 | ||
824 | # asm 1: add $1,<in8=int32#2 | ||
825 | # asm 2: add $1,<in8=%ecx | ||
826 | add $1,%ecx | ||
827 | |||
828 | # qhasm: in9 += 0 + carry | ||
829 | # asm 1: adc $0,<in9=int32#3 | ||
830 | # asm 2: adc $0,<in9=%edx | ||
831 | adc $0,%edx | ||
832 | |||
833 | # qhasm: ((uint32 *) &orig8)[1] = in8 | ||
834 | # asm 1: movl <in8=int32#2,4+<orig8=stack128#19 | ||
835 | # asm 2: movl <in8=%ecx,4+<orig8=320(%esp) | ||
836 | movl %ecx,4+320(%esp) | ||
837 | |||
838 | # qhasm: ((uint32 *) &orig9)[1] = in9 | ||
839 | # asm 1: movl <in9=int32#3,4+<orig9=stack128#20 | ||
840 | # asm 2: movl <in9=%edx,4+<orig9=336(%esp) | ||
841 | movl %edx,4+336(%esp) | ||
842 | |||
843 | # qhasm: carry? in8 += 1 | ||
844 | # asm 1: add $1,<in8=int32#2 | ||
845 | # asm 2: add $1,<in8=%ecx | ||
846 | add $1,%ecx | ||
847 | |||
848 | # qhasm: in9 += 0 + carry | ||
849 | # asm 1: adc $0,<in9=int32#3 | ||
850 | # asm 2: adc $0,<in9=%edx | ||
851 | adc $0,%edx | ||
852 | |||
853 | # qhasm: ((uint32 *) &orig8)[2] = in8 | ||
854 | # asm 1: movl <in8=int32#2,8+<orig8=stack128#19 | ||
855 | # asm 2: movl <in8=%ecx,8+<orig8=320(%esp) | ||
856 | movl %ecx,8+320(%esp) | ||
857 | |||
858 | # qhasm: ((uint32 *) &orig9)[2] = in9 | ||
859 | # asm 1: movl <in9=int32#3,8+<orig9=stack128#20 | ||
860 | # asm 2: movl <in9=%edx,8+<orig9=336(%esp) | ||
861 | movl %edx,8+336(%esp) | ||
862 | |||
863 | # qhasm: carry? in8 += 1 | ||
864 | # asm 1: add $1,<in8=int32#2 | ||
865 | # asm 2: add $1,<in8=%ecx | ||
866 | add $1,%ecx | ||
867 | |||
868 | # qhasm: in9 += 0 + carry | ||
869 | # asm 1: adc $0,<in9=int32#3 | ||
870 | # asm 2: adc $0,<in9=%edx | ||
871 | adc $0,%edx | ||
872 | |||
873 | # qhasm: ((uint32 *) &orig8)[3] = in8 | ||
874 | # asm 1: movl <in8=int32#2,12+<orig8=stack128#19 | ||
875 | # asm 2: movl <in8=%ecx,12+<orig8=320(%esp) | ||
876 | movl %ecx,12+320(%esp) | ||
877 | |||
878 | # qhasm: ((uint32 *) &orig9)[3] = in9 | ||
879 | # asm 1: movl <in9=int32#3,12+<orig9=stack128#20 | ||
880 | # asm 2: movl <in9=%edx,12+<orig9=336(%esp) | ||
881 | movl %edx,12+336(%esp) | ||
882 | |||
883 | # qhasm: carry? in8 += 1 | ||
884 | # asm 1: add $1,<in8=int32#2 | ||
885 | # asm 2: add $1,<in8=%ecx | ||
886 | add $1,%ecx | ||
887 | |||
888 | # qhasm: in9 += 0 + carry | ||
889 | # asm 1: adc $0,<in9=int32#3 | ||
890 | # asm 2: adc $0,<in9=%edx | ||
891 | adc $0,%edx | ||
892 | |||
893 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
894 | # asm 1: movl <in8=int32#2,>x2=stack128#4 | ||
895 | # asm 2: movl <in8=%ecx,>x2=80(%esp) | ||
896 | movl %ecx,80(%esp) | ||
897 | |||
898 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
899 | # asm 1: movl <in9=int32#3,4+<x3=stack128#1 | ||
900 | # asm 2: movl <in9=%edx,4+<x3=32(%esp) | ||
901 | movl %edx,4+32(%esp) | ||
902 | |||
903 | # qhasm: bytes_stack = bytes | ||
904 | # asm 1: movl <bytes=int32#1,>bytes_stack=stack32#7 | ||
905 | # asm 2: movl <bytes=%eax,>bytes_stack=24(%esp) | ||
906 | movl %eax,24(%esp) | ||
907 | |||
908 | # qhasm: i = 20 | ||
909 | # asm 1: mov $20,>i=int32#1 | ||
910 | # asm 2: mov $20,>i=%eax | ||
911 | mov $20,%eax | ||
912 | |||
913 | # qhasm: z5 = orig5 | ||
914 | # asm 1: movdqa <orig5=stack128#5,>z5=int6464#1 | ||
915 | # asm 2: movdqa <orig5=96(%esp),>z5=%xmm0 | ||
916 | movdqa 96(%esp),%xmm0 | ||
917 | |||
918 | # qhasm: z10 = orig10 | ||
919 | # asm 1: movdqa <orig10=stack128#6,>z10=int6464#2 | ||
920 | # asm 2: movdqa <orig10=112(%esp),>z10=%xmm1 | ||
921 | movdqa 112(%esp),%xmm1 | ||
922 | |||
923 | # qhasm: z15 = orig15 | ||
924 | # asm 1: movdqa <orig15=stack128#7,>z15=int6464#3 | ||
925 | # asm 2: movdqa <orig15=128(%esp),>z15=%xmm2 | ||
926 | movdqa 128(%esp),%xmm2 | ||
927 | |||
928 | # qhasm: z14 = orig14 | ||
929 | # asm 1: movdqa <orig14=stack128#17,>z14=int6464#4 | ||
930 | # asm 2: movdqa <orig14=288(%esp),>z14=%xmm3 | ||
931 | movdqa 288(%esp),%xmm3 | ||
932 | |||
933 | # qhasm: z3 = orig3 | ||
934 | # asm 1: movdqa <orig3=stack128#18,>z3=int6464#5 | ||
935 | # asm 2: movdqa <orig3=304(%esp),>z3=%xmm4 | ||
936 | movdqa 304(%esp),%xmm4 | ||
937 | |||
938 | # qhasm: z6 = orig6 | ||
939 | # asm 1: movdqa <orig6=stack128#9,>z6=int6464#6 | ||
940 | # asm 2: movdqa <orig6=160(%esp),>z6=%xmm5 | ||
941 | movdqa 160(%esp),%xmm5 | ||
942 | |||
943 | # qhasm: z11 = orig11 | ||
944 | # asm 1: movdqa <orig11=stack128#10,>z11=int6464#7 | ||
945 | # asm 2: movdqa <orig11=176(%esp),>z11=%xmm6 | ||
946 | movdqa 176(%esp),%xmm6 | ||
947 | |||
948 | # qhasm: z1 = orig1 | ||
949 | # asm 1: movdqa <orig1=stack128#12,>z1=int6464#8 | ||
950 | # asm 2: movdqa <orig1=208(%esp),>z1=%xmm7 | ||
951 | movdqa 208(%esp),%xmm7 | ||
952 | |||
953 | # qhasm: z5_stack = z5 | ||
954 | # asm 1: movdqa <z5=int6464#1,>z5_stack=stack128#21 | ||
955 | # asm 2: movdqa <z5=%xmm0,>z5_stack=352(%esp) | ||
956 | movdqa %xmm0,352(%esp) | ||
957 | |||
958 | # qhasm: z10_stack = z10 | ||
959 | # asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#22 | ||
960 | # asm 2: movdqa <z10=%xmm1,>z10_stack=368(%esp) | ||
961 | movdqa %xmm1,368(%esp) | ||
962 | |||
963 | # qhasm: z15_stack = z15 | ||
964 | # asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#23 | ||
965 | # asm 2: movdqa <z15=%xmm2,>z15_stack=384(%esp) | ||
966 | movdqa %xmm2,384(%esp) | ||
967 | |||
968 | # qhasm: z14_stack = z14 | ||
969 | # asm 1: movdqa <z14=int6464#4,>z14_stack=stack128#24 | ||
970 | # asm 2: movdqa <z14=%xmm3,>z14_stack=400(%esp) | ||
971 | movdqa %xmm3,400(%esp) | ||
972 | |||
973 | # qhasm: z3_stack = z3 | ||
974 | # asm 1: movdqa <z3=int6464#5,>z3_stack=stack128#25 | ||
975 | # asm 2: movdqa <z3=%xmm4,>z3_stack=416(%esp) | ||
976 | movdqa %xmm4,416(%esp) | ||
977 | |||
978 | # qhasm: z6_stack = z6 | ||
979 | # asm 1: movdqa <z6=int6464#6,>z6_stack=stack128#26 | ||
980 | # asm 2: movdqa <z6=%xmm5,>z6_stack=432(%esp) | ||
981 | movdqa %xmm5,432(%esp) | ||
982 | |||
983 | # qhasm: z11_stack = z11 | ||
984 | # asm 1: movdqa <z11=int6464#7,>z11_stack=stack128#27 | ||
985 | # asm 2: movdqa <z11=%xmm6,>z11_stack=448(%esp) | ||
986 | movdqa %xmm6,448(%esp) | ||
987 | |||
988 | # qhasm: z1_stack = z1 | ||
989 | # asm 1: movdqa <z1=int6464#8,>z1_stack=stack128#28 | ||
990 | # asm 2: movdqa <z1=%xmm7,>z1_stack=464(%esp) | ||
991 | movdqa %xmm7,464(%esp) | ||
992 | |||
993 | # qhasm: z7 = orig7 | ||
994 | # asm 1: movdqa <orig7=stack128#13,>z7=int6464#5 | ||
995 | # asm 2: movdqa <orig7=224(%esp),>z7=%xmm4 | ||
996 | movdqa 224(%esp),%xmm4 | ||
997 | |||
998 | # qhasm: z13 = orig13 | ||
999 | # asm 1: movdqa <orig13=stack128#14,>z13=int6464#6 | ||
1000 | # asm 2: movdqa <orig13=240(%esp),>z13=%xmm5 | ||
1001 | movdqa 240(%esp),%xmm5 | ||
1002 | |||
1003 | # qhasm: z2 = orig2 | ||
1004 | # asm 1: movdqa <orig2=stack128#15,>z2=int6464#7 | ||
1005 | # asm 2: movdqa <orig2=256(%esp),>z2=%xmm6 | ||
1006 | movdqa 256(%esp),%xmm6 | ||
1007 | |||
1008 | # qhasm: z9 = orig9 | ||
1009 | # asm 1: movdqa <orig9=stack128#20,>z9=int6464#8 | ||
1010 | # asm 2: movdqa <orig9=336(%esp),>z9=%xmm7 | ||
1011 | movdqa 336(%esp),%xmm7 | ||
1012 | |||
1013 | # qhasm: p = orig0 | ||
1014 | # asm 1: movdqa <orig0=stack128#8,>p=int6464#1 | ||
1015 | # asm 2: movdqa <orig0=144(%esp),>p=%xmm0 | ||
1016 | movdqa 144(%esp),%xmm0 | ||
1017 | |||
1018 | # qhasm: t = orig12 | ||
1019 | # asm 1: movdqa <orig12=stack128#11,>t=int6464#3 | ||
1020 | # asm 2: movdqa <orig12=192(%esp),>t=%xmm2 | ||
1021 | movdqa 192(%esp),%xmm2 | ||
1022 | |||
1023 | # qhasm: q = orig4 | ||
1024 | # asm 1: movdqa <orig4=stack128#16,>q=int6464#4 | ||
1025 | # asm 2: movdqa <orig4=272(%esp),>q=%xmm3 | ||
1026 | movdqa 272(%esp),%xmm3 | ||
1027 | |||
1028 | # qhasm: r = orig8 | ||
1029 | # asm 1: movdqa <orig8=stack128#19,>r=int6464#2 | ||
1030 | # asm 2: movdqa <orig8=320(%esp),>r=%xmm1 | ||
1031 | movdqa 320(%esp),%xmm1 | ||
1032 | |||
1033 | # qhasm: z7_stack = z7 | ||
1034 | # asm 1: movdqa <z7=int6464#5,>z7_stack=stack128#29 | ||
1035 | # asm 2: movdqa <z7=%xmm4,>z7_stack=480(%esp) | ||
1036 | movdqa %xmm4,480(%esp) | ||
1037 | |||
1038 | # qhasm: z13_stack = z13 | ||
1039 | # asm 1: movdqa <z13=int6464#6,>z13_stack=stack128#30 | ||
1040 | # asm 2: movdqa <z13=%xmm5,>z13_stack=496(%esp) | ||
1041 | movdqa %xmm5,496(%esp) | ||
1042 | |||
1043 | # qhasm: z2_stack = z2 | ||
1044 | # asm 1: movdqa <z2=int6464#7,>z2_stack=stack128#31 | ||
1045 | # asm 2: movdqa <z2=%xmm6,>z2_stack=512(%esp) | ||
1046 | movdqa %xmm6,512(%esp) | ||
1047 | |||
1048 | # qhasm: z9_stack = z9 | ||
1049 | # asm 1: movdqa <z9=int6464#8,>z9_stack=stack128#32 | ||
1050 | # asm 2: movdqa <z9=%xmm7,>z9_stack=528(%esp) | ||
1051 | movdqa %xmm7,528(%esp) | ||
1052 | |||
1053 | # qhasm: z0_stack = p | ||
1054 | # asm 1: movdqa <p=int6464#1,>z0_stack=stack128#33 | ||
1055 | # asm 2: movdqa <p=%xmm0,>z0_stack=544(%esp) | ||
1056 | movdqa %xmm0,544(%esp) | ||
1057 | |||
1058 | # qhasm: z12_stack = t | ||
1059 | # asm 1: movdqa <t=int6464#3,>z12_stack=stack128#34 | ||
1060 | # asm 2: movdqa <t=%xmm2,>z12_stack=560(%esp) | ||
1061 | movdqa %xmm2,560(%esp) | ||
1062 | |||
1063 | # qhasm: z4_stack = q | ||
1064 | # asm 1: movdqa <q=int6464#4,>z4_stack=stack128#35 | ||
1065 | # asm 2: movdqa <q=%xmm3,>z4_stack=576(%esp) | ||
1066 | movdqa %xmm3,576(%esp) | ||
1067 | |||
1068 | # qhasm: z8_stack = r | ||
1069 | # asm 1: movdqa <r=int6464#2,>z8_stack=stack128#36 | ||
1070 | # asm 2: movdqa <r=%xmm1,>z8_stack=592(%esp) | ||
1071 | movdqa %xmm1,592(%esp) | ||
1072 | |||
1073 | # qhasm: mainloop1: | ||
1074 | ._mainloop1: | ||
1075 | |||
1076 | # qhasm: assign xmm0 to p | ||
1077 | |||
1078 | # qhasm: assign xmm1 to r | ||
1079 | |||
1080 | # qhasm: assign xmm2 to t | ||
1081 | |||
1082 | # qhasm: assign xmm3 to q | ||
1083 | |||
1084 | # qhasm: s = t | ||
1085 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
1086 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
1087 | movdqa %xmm2,%xmm6 | ||
1088 | |||
1089 | # qhasm: uint32323232 t += p | ||
1090 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
1091 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
1092 | paddd %xmm0,%xmm2 | ||
1093 | |||
1094 | # qhasm: u = t | ||
1095 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1096 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1097 | movdqa %xmm2,%xmm4 | ||
1098 | |||
1099 | # qhasm: uint32323232 t >>= 25 | ||
1100 | # asm 1: psrld $25,<t=int6464#3 | ||
1101 | # asm 2: psrld $25,<t=%xmm2 | ||
1102 | psrld $25,%xmm2 | ||
1103 | |||
1104 | # qhasm: q ^= t | ||
1105 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
1106 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
1107 | pxor %xmm2,%xmm3 | ||
1108 | |||
1109 | # qhasm: uint32323232 u <<= 7 | ||
1110 | # asm 1: pslld $7,<u=int6464#5 | ||
1111 | # asm 2: pslld $7,<u=%xmm4 | ||
1112 | pslld $7,%xmm4 | ||
1113 | |||
1114 | # qhasm: q ^= u | ||
1115 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
1116 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
1117 | pxor %xmm4,%xmm3 | ||
1118 | |||
1119 | # qhasm: z4_stack = q | ||
1120 | # asm 1: movdqa <q=int6464#4,>z4_stack=stack128#33 | ||
1121 | # asm 2: movdqa <q=%xmm3,>z4_stack=544(%esp) | ||
1122 | movdqa %xmm3,544(%esp) | ||
1123 | |||
1124 | # qhasm: t = p | ||
1125 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
1126 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
1127 | movdqa %xmm0,%xmm2 | ||
1128 | |||
1129 | # qhasm: uint32323232 t += q | ||
1130 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
1131 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
1132 | paddd %xmm3,%xmm2 | ||
1133 | |||
1134 | # qhasm: u = t | ||
1135 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1136 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1137 | movdqa %xmm2,%xmm4 | ||
1138 | |||
1139 | # qhasm: uint32323232 t >>= 23 | ||
1140 | # asm 1: psrld $23,<t=int6464#3 | ||
1141 | # asm 2: psrld $23,<t=%xmm2 | ||
1142 | psrld $23,%xmm2 | ||
1143 | |||
1144 | # qhasm: r ^= t | ||
1145 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
1146 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
1147 | pxor %xmm2,%xmm1 | ||
1148 | |||
1149 | # qhasm: uint32323232 u <<= 9 | ||
1150 | # asm 1: pslld $9,<u=int6464#5 | ||
1151 | # asm 2: pslld $9,<u=%xmm4 | ||
1152 | pslld $9,%xmm4 | ||
1153 | |||
1154 | # qhasm: r ^= u | ||
1155 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
1156 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
1157 | pxor %xmm4,%xmm1 | ||
1158 | |||
1159 | # qhasm: z8_stack = r | ||
1160 | # asm 1: movdqa <r=int6464#2,>z8_stack=stack128#34 | ||
1161 | # asm 2: movdqa <r=%xmm1,>z8_stack=560(%esp) | ||
1162 | movdqa %xmm1,560(%esp) | ||
1163 | |||
1164 | # qhasm: uint32323232 q += r | ||
1165 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
1166 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
1167 | paddd %xmm1,%xmm3 | ||
1168 | |||
1169 | # qhasm: u = q | ||
1170 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
1171 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
1172 | movdqa %xmm3,%xmm2 | ||
1173 | |||
1174 | # qhasm: uint32323232 q >>= 19 | ||
1175 | # asm 1: psrld $19,<q=int6464#4 | ||
1176 | # asm 2: psrld $19,<q=%xmm3 | ||
1177 | psrld $19,%xmm3 | ||
1178 | |||
1179 | # qhasm: s ^= q | ||
1180 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
1181 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
1182 | pxor %xmm3,%xmm6 | ||
1183 | |||
1184 | # qhasm: uint32323232 u <<= 13 | ||
1185 | # asm 1: pslld $13,<u=int6464#3 | ||
1186 | # asm 2: pslld $13,<u=%xmm2 | ||
1187 | pslld $13,%xmm2 | ||
1188 | |||
1189 | # qhasm: s ^= u | ||
1190 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
1191 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
1192 | pxor %xmm2,%xmm6 | ||
1193 | |||
1194 | # qhasm: mt = z1_stack | ||
1195 | # asm 1: movdqa <z1_stack=stack128#28,>mt=int6464#3 | ||
1196 | # asm 2: movdqa <z1_stack=464(%esp),>mt=%xmm2 | ||
1197 | movdqa 464(%esp),%xmm2 | ||
1198 | |||
1199 | # qhasm: mp = z5_stack | ||
1200 | # asm 1: movdqa <z5_stack=stack128#21,>mp=int6464#5 | ||
1201 | # asm 2: movdqa <z5_stack=352(%esp),>mp=%xmm4 | ||
1202 | movdqa 352(%esp),%xmm4 | ||
1203 | |||
1204 | # qhasm: mq = z9_stack | ||
1205 | # asm 1: movdqa <z9_stack=stack128#32,>mq=int6464#4 | ||
1206 | # asm 2: movdqa <z9_stack=528(%esp),>mq=%xmm3 | ||
1207 | movdqa 528(%esp),%xmm3 | ||
1208 | |||
1209 | # qhasm: mr = z13_stack | ||
1210 | # asm 1: movdqa <z13_stack=stack128#30,>mr=int6464#6 | ||
1211 | # asm 2: movdqa <z13_stack=496(%esp),>mr=%xmm5 | ||
1212 | movdqa 496(%esp),%xmm5 | ||
1213 | |||
1214 | # qhasm: z12_stack = s | ||
1215 | # asm 1: movdqa <s=int6464#7,>z12_stack=stack128#30 | ||
1216 | # asm 2: movdqa <s=%xmm6,>z12_stack=496(%esp) | ||
1217 | movdqa %xmm6,496(%esp) | ||
1218 | |||
1219 | # qhasm: uint32323232 r += s | ||
1220 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
1221 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
1222 | paddd %xmm6,%xmm1 | ||
1223 | |||
1224 | # qhasm: u = r | ||
1225 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
1226 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
1227 | movdqa %xmm1,%xmm6 | ||
1228 | |||
1229 | # qhasm: uint32323232 r >>= 14 | ||
1230 | # asm 1: psrld $14,<r=int6464#2 | ||
1231 | # asm 2: psrld $14,<r=%xmm1 | ||
1232 | psrld $14,%xmm1 | ||
1233 | |||
1234 | # qhasm: p ^= r | ||
1235 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
1236 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
1237 | pxor %xmm1,%xmm0 | ||
1238 | |||
1239 | # qhasm: uint32323232 u <<= 18 | ||
1240 | # asm 1: pslld $18,<u=int6464#7 | ||
1241 | # asm 2: pslld $18,<u=%xmm6 | ||
1242 | pslld $18,%xmm6 | ||
1243 | |||
1244 | # qhasm: p ^= u | ||
1245 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
1246 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
1247 | pxor %xmm6,%xmm0 | ||
1248 | |||
1249 | # qhasm: z0_stack = p | ||
1250 | # asm 1: movdqa <p=int6464#1,>z0_stack=stack128#21 | ||
1251 | # asm 2: movdqa <p=%xmm0,>z0_stack=352(%esp) | ||
1252 | movdqa %xmm0,352(%esp) | ||
1253 | |||
1254 | # qhasm: assign xmm2 to mt | ||
1255 | |||
1256 | # qhasm: assign xmm3 to mq | ||
1257 | |||
1258 | # qhasm: assign xmm4 to mp | ||
1259 | |||
1260 | # qhasm: assign xmm5 to mr | ||
1261 | |||
1262 | # qhasm: ms = mt | ||
1263 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
1264 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
1265 | movdqa %xmm2,%xmm6 | ||
1266 | |||
1267 | # qhasm: uint32323232 mt += mp | ||
1268 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
1269 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
1270 | paddd %xmm4,%xmm2 | ||
1271 | |||
1272 | # qhasm: mu = mt | ||
1273 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
1274 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
1275 | movdqa %xmm2,%xmm0 | ||
1276 | |||
1277 | # qhasm: uint32323232 mt >>= 25 | ||
1278 | # asm 1: psrld $25,<mt=int6464#3 | ||
1279 | # asm 2: psrld $25,<mt=%xmm2 | ||
1280 | psrld $25,%xmm2 | ||
1281 | |||
1282 | # qhasm: mq ^= mt | ||
1283 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
1284 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
1285 | pxor %xmm2,%xmm3 | ||
1286 | |||
1287 | # qhasm: uint32323232 mu <<= 7 | ||
1288 | # asm 1: pslld $7,<mu=int6464#1 | ||
1289 | # asm 2: pslld $7,<mu=%xmm0 | ||
1290 | pslld $7,%xmm0 | ||
1291 | |||
1292 | # qhasm: mq ^= mu | ||
1293 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
1294 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
1295 | pxor %xmm0,%xmm3 | ||
1296 | |||
1297 | # qhasm: z9_stack = mq | ||
1298 | # asm 1: movdqa <mq=int6464#4,>z9_stack=stack128#32 | ||
1299 | # asm 2: movdqa <mq=%xmm3,>z9_stack=528(%esp) | ||
1300 | movdqa %xmm3,528(%esp) | ||
1301 | |||
1302 | # qhasm: mt = mp | ||
1303 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
1304 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
1305 | movdqa %xmm4,%xmm0 | ||
1306 | |||
1307 | # qhasm: uint32323232 mt += mq | ||
1308 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
1309 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
1310 | paddd %xmm3,%xmm0 | ||
1311 | |||
1312 | # qhasm: mu = mt | ||
1313 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
1314 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
1315 | movdqa %xmm0,%xmm1 | ||
1316 | |||
1317 | # qhasm: uint32323232 mt >>= 23 | ||
1318 | # asm 1: psrld $23,<mt=int6464#1 | ||
1319 | # asm 2: psrld $23,<mt=%xmm0 | ||
1320 | psrld $23,%xmm0 | ||
1321 | |||
1322 | # qhasm: mr ^= mt | ||
1323 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
1324 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
1325 | pxor %xmm0,%xmm5 | ||
1326 | |||
1327 | # qhasm: uint32323232 mu <<= 9 | ||
1328 | # asm 1: pslld $9,<mu=int6464#2 | ||
1329 | # asm 2: pslld $9,<mu=%xmm1 | ||
1330 | pslld $9,%xmm1 | ||
1331 | |||
1332 | # qhasm: mr ^= mu | ||
1333 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
1334 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
1335 | pxor %xmm1,%xmm5 | ||
1336 | |||
1337 | # qhasm: z13_stack = mr | ||
1338 | # asm 1: movdqa <mr=int6464#6,>z13_stack=stack128#35 | ||
1339 | # asm 2: movdqa <mr=%xmm5,>z13_stack=576(%esp) | ||
1340 | movdqa %xmm5,576(%esp) | ||
1341 | |||
1342 | # qhasm: uint32323232 mq += mr | ||
1343 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
1344 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
1345 | paddd %xmm5,%xmm3 | ||
1346 | |||
1347 | # qhasm: mu = mq | ||
1348 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
1349 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
1350 | movdqa %xmm3,%xmm0 | ||
1351 | |||
1352 | # qhasm: uint32323232 mq >>= 19 | ||
1353 | # asm 1: psrld $19,<mq=int6464#4 | ||
1354 | # asm 2: psrld $19,<mq=%xmm3 | ||
1355 | psrld $19,%xmm3 | ||
1356 | |||
1357 | # qhasm: ms ^= mq | ||
1358 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
1359 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
1360 | pxor %xmm3,%xmm6 | ||
1361 | |||
1362 | # qhasm: uint32323232 mu <<= 13 | ||
1363 | # asm 1: pslld $13,<mu=int6464#1 | ||
1364 | # asm 2: pslld $13,<mu=%xmm0 | ||
1365 | pslld $13,%xmm0 | ||
1366 | |||
1367 | # qhasm: ms ^= mu | ||
1368 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
1369 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
1370 | pxor %xmm0,%xmm6 | ||
1371 | |||
1372 | # qhasm: t = z6_stack | ||
1373 | # asm 1: movdqa <z6_stack=stack128#26,>t=int6464#3 | ||
1374 | # asm 2: movdqa <z6_stack=432(%esp),>t=%xmm2 | ||
1375 | movdqa 432(%esp),%xmm2 | ||
1376 | |||
1377 | # qhasm: p = z10_stack | ||
1378 | # asm 1: movdqa <z10_stack=stack128#22,>p=int6464#1 | ||
1379 | # asm 2: movdqa <z10_stack=368(%esp),>p=%xmm0 | ||
1380 | movdqa 368(%esp),%xmm0 | ||
1381 | |||
1382 | # qhasm: q = z14_stack | ||
1383 | # asm 1: movdqa <z14_stack=stack128#24,>q=int6464#4 | ||
1384 | # asm 2: movdqa <z14_stack=400(%esp),>q=%xmm3 | ||
1385 | movdqa 400(%esp),%xmm3 | ||
1386 | |||
1387 | # qhasm: r = z2_stack | ||
1388 | # asm 1: movdqa <z2_stack=stack128#31,>r=int6464#2 | ||
1389 | # asm 2: movdqa <z2_stack=512(%esp),>r=%xmm1 | ||
1390 | movdqa 512(%esp),%xmm1 | ||
1391 | |||
1392 | # qhasm: z1_stack = ms | ||
1393 | # asm 1: movdqa <ms=int6464#7,>z1_stack=stack128#22 | ||
1394 | # asm 2: movdqa <ms=%xmm6,>z1_stack=368(%esp) | ||
1395 | movdqa %xmm6,368(%esp) | ||
1396 | |||
1397 | # qhasm: uint32323232 mr += ms | ||
1398 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
1399 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
1400 | paddd %xmm6,%xmm5 | ||
1401 | |||
1402 | # qhasm: mu = mr | ||
1403 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
1404 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
1405 | movdqa %xmm5,%xmm6 | ||
1406 | |||
1407 | # qhasm: uint32323232 mr >>= 14 | ||
1408 | # asm 1: psrld $14,<mr=int6464#6 | ||
1409 | # asm 2: psrld $14,<mr=%xmm5 | ||
1410 | psrld $14,%xmm5 | ||
1411 | |||
1412 | # qhasm: mp ^= mr | ||
1413 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
1414 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
1415 | pxor %xmm5,%xmm4 | ||
1416 | |||
1417 | # qhasm: uint32323232 mu <<= 18 | ||
1418 | # asm 1: pslld $18,<mu=int6464#7 | ||
1419 | # asm 2: pslld $18,<mu=%xmm6 | ||
1420 | pslld $18,%xmm6 | ||
1421 | |||
1422 | # qhasm: mp ^= mu | ||
1423 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
1424 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
1425 | pxor %xmm6,%xmm4 | ||
1426 | |||
1427 | # qhasm: z5_stack = mp | ||
1428 | # asm 1: movdqa <mp=int6464#5,>z5_stack=stack128#24 | ||
1429 | # asm 2: movdqa <mp=%xmm4,>z5_stack=400(%esp) | ||
1430 | movdqa %xmm4,400(%esp) | ||
1431 | |||
1432 | # qhasm: assign xmm0 to p | ||
1433 | |||
1434 | # qhasm: assign xmm1 to r | ||
1435 | |||
1436 | # qhasm: assign xmm2 to t | ||
1437 | |||
1438 | # qhasm: assign xmm3 to q | ||
1439 | |||
1440 | # qhasm: s = t | ||
1441 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
1442 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
1443 | movdqa %xmm2,%xmm6 | ||
1444 | |||
1445 | # qhasm: uint32323232 t += p | ||
1446 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
1447 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
1448 | paddd %xmm0,%xmm2 | ||
1449 | |||
1450 | # qhasm: u = t | ||
1451 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1452 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1453 | movdqa %xmm2,%xmm4 | ||
1454 | |||
1455 | # qhasm: uint32323232 t >>= 25 | ||
1456 | # asm 1: psrld $25,<t=int6464#3 | ||
1457 | # asm 2: psrld $25,<t=%xmm2 | ||
1458 | psrld $25,%xmm2 | ||
1459 | |||
1460 | # qhasm: q ^= t | ||
1461 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
1462 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
1463 | pxor %xmm2,%xmm3 | ||
1464 | |||
1465 | # qhasm: uint32323232 u <<= 7 | ||
1466 | # asm 1: pslld $7,<u=int6464#5 | ||
1467 | # asm 2: pslld $7,<u=%xmm4 | ||
1468 | pslld $7,%xmm4 | ||
1469 | |||
1470 | # qhasm: q ^= u | ||
1471 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
1472 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
1473 | pxor %xmm4,%xmm3 | ||
1474 | |||
1475 | # qhasm: z14_stack = q | ||
1476 | # asm 1: movdqa <q=int6464#4,>z14_stack=stack128#36 | ||
1477 | # asm 2: movdqa <q=%xmm3,>z14_stack=592(%esp) | ||
1478 | movdqa %xmm3,592(%esp) | ||
1479 | |||
1480 | # qhasm: t = p | ||
1481 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
1482 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
1483 | movdqa %xmm0,%xmm2 | ||
1484 | |||
1485 | # qhasm: uint32323232 t += q | ||
1486 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
1487 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
1488 | paddd %xmm3,%xmm2 | ||
1489 | |||
1490 | # qhasm: u = t | ||
1491 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1492 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1493 | movdqa %xmm2,%xmm4 | ||
1494 | |||
1495 | # qhasm: uint32323232 t >>= 23 | ||
1496 | # asm 1: psrld $23,<t=int6464#3 | ||
1497 | # asm 2: psrld $23,<t=%xmm2 | ||
1498 | psrld $23,%xmm2 | ||
1499 | |||
1500 | # qhasm: r ^= t | ||
1501 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
1502 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
1503 | pxor %xmm2,%xmm1 | ||
1504 | |||
1505 | # qhasm: uint32323232 u <<= 9 | ||
1506 | # asm 1: pslld $9,<u=int6464#5 | ||
1507 | # asm 2: pslld $9,<u=%xmm4 | ||
1508 | pslld $9,%xmm4 | ||
1509 | |||
1510 | # qhasm: r ^= u | ||
1511 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
1512 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
1513 | pxor %xmm4,%xmm1 | ||
1514 | |||
1515 | # qhasm: z2_stack = r | ||
1516 | # asm 1: movdqa <r=int6464#2,>z2_stack=stack128#26 | ||
1517 | # asm 2: movdqa <r=%xmm1,>z2_stack=432(%esp) | ||
1518 | movdqa %xmm1,432(%esp) | ||
1519 | |||
1520 | # qhasm: uint32323232 q += r | ||
1521 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
1522 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
1523 | paddd %xmm1,%xmm3 | ||
1524 | |||
1525 | # qhasm: u = q | ||
1526 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
1527 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
1528 | movdqa %xmm3,%xmm2 | ||
1529 | |||
1530 | # qhasm: uint32323232 q >>= 19 | ||
1531 | # asm 1: psrld $19,<q=int6464#4 | ||
1532 | # asm 2: psrld $19,<q=%xmm3 | ||
1533 | psrld $19,%xmm3 | ||
1534 | |||
1535 | # qhasm: s ^= q | ||
1536 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
1537 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
1538 | pxor %xmm3,%xmm6 | ||
1539 | |||
1540 | # qhasm: uint32323232 u <<= 13 | ||
1541 | # asm 1: pslld $13,<u=int6464#3 | ||
1542 | # asm 2: pslld $13,<u=%xmm2 | ||
1543 | pslld $13,%xmm2 | ||
1544 | |||
1545 | # qhasm: s ^= u | ||
1546 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
1547 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
1548 | pxor %xmm2,%xmm6 | ||
1549 | |||
1550 | # qhasm: mt = z11_stack | ||
1551 | # asm 1: movdqa <z11_stack=stack128#27,>mt=int6464#3 | ||
1552 | # asm 2: movdqa <z11_stack=448(%esp),>mt=%xmm2 | ||
1553 | movdqa 448(%esp),%xmm2 | ||
1554 | |||
1555 | # qhasm: mp = z15_stack | ||
1556 | # asm 1: movdqa <z15_stack=stack128#23,>mp=int6464#5 | ||
1557 | # asm 2: movdqa <z15_stack=384(%esp),>mp=%xmm4 | ||
1558 | movdqa 384(%esp),%xmm4 | ||
1559 | |||
1560 | # qhasm: mq = z3_stack | ||
1561 | # asm 1: movdqa <z3_stack=stack128#25,>mq=int6464#4 | ||
1562 | # asm 2: movdqa <z3_stack=416(%esp),>mq=%xmm3 | ||
1563 | movdqa 416(%esp),%xmm3 | ||
1564 | |||
1565 | # qhasm: mr = z7_stack | ||
1566 | # asm 1: movdqa <z7_stack=stack128#29,>mr=int6464#6 | ||
1567 | # asm 2: movdqa <z7_stack=480(%esp),>mr=%xmm5 | ||
1568 | movdqa 480(%esp),%xmm5 | ||
1569 | |||
1570 | # qhasm: z6_stack = s | ||
1571 | # asm 1: movdqa <s=int6464#7,>z6_stack=stack128#23 | ||
1572 | # asm 2: movdqa <s=%xmm6,>z6_stack=384(%esp) | ||
1573 | movdqa %xmm6,384(%esp) | ||
1574 | |||
1575 | # qhasm: uint32323232 r += s | ||
1576 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
1577 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
1578 | paddd %xmm6,%xmm1 | ||
1579 | |||
1580 | # qhasm: u = r | ||
1581 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
1582 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
1583 | movdqa %xmm1,%xmm6 | ||
1584 | |||
1585 | # qhasm: uint32323232 r >>= 14 | ||
1586 | # asm 1: psrld $14,<r=int6464#2 | ||
1587 | # asm 2: psrld $14,<r=%xmm1 | ||
1588 | psrld $14,%xmm1 | ||
1589 | |||
1590 | # qhasm: p ^= r | ||
1591 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
1592 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
1593 | pxor %xmm1,%xmm0 | ||
1594 | |||
1595 | # qhasm: uint32323232 u <<= 18 | ||
1596 | # asm 1: pslld $18,<u=int6464#7 | ||
1597 | # asm 2: pslld $18,<u=%xmm6 | ||
1598 | pslld $18,%xmm6 | ||
1599 | |||
1600 | # qhasm: p ^= u | ||
1601 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
1602 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
1603 | pxor %xmm6,%xmm0 | ||
1604 | |||
1605 | # qhasm: z10_stack = p | ||
1606 | # asm 1: movdqa <p=int6464#1,>z10_stack=stack128#27 | ||
1607 | # asm 2: movdqa <p=%xmm0,>z10_stack=448(%esp) | ||
1608 | movdqa %xmm0,448(%esp) | ||
1609 | |||
1610 | # qhasm: assign xmm2 to mt | ||
1611 | |||
1612 | # qhasm: assign xmm3 to mq | ||
1613 | |||
1614 | # qhasm: assign xmm4 to mp | ||
1615 | |||
1616 | # qhasm: assign xmm5 to mr | ||
1617 | |||
1618 | # qhasm: ms = mt | ||
1619 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
1620 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
1621 | movdqa %xmm2,%xmm6 | ||
1622 | |||
1623 | # qhasm: uint32323232 mt += mp | ||
1624 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
1625 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
1626 | paddd %xmm4,%xmm2 | ||
1627 | |||
1628 | # qhasm: mu = mt | ||
1629 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
1630 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
1631 | movdqa %xmm2,%xmm0 | ||
1632 | |||
1633 | # qhasm: uint32323232 mt >>= 25 | ||
1634 | # asm 1: psrld $25,<mt=int6464#3 | ||
1635 | # asm 2: psrld $25,<mt=%xmm2 | ||
1636 | psrld $25,%xmm2 | ||
1637 | |||
1638 | # qhasm: mq ^= mt | ||
1639 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
1640 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
1641 | pxor %xmm2,%xmm3 | ||
1642 | |||
1643 | # qhasm: uint32323232 mu <<= 7 | ||
1644 | # asm 1: pslld $7,<mu=int6464#1 | ||
1645 | # asm 2: pslld $7,<mu=%xmm0 | ||
1646 | pslld $7,%xmm0 | ||
1647 | |||
1648 | # qhasm: mq ^= mu | ||
1649 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
1650 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
1651 | pxor %xmm0,%xmm3 | ||
1652 | |||
1653 | # qhasm: z3_stack = mq | ||
1654 | # asm 1: movdqa <mq=int6464#4,>z3_stack=stack128#25 | ||
1655 | # asm 2: movdqa <mq=%xmm3,>z3_stack=416(%esp) | ||
1656 | movdqa %xmm3,416(%esp) | ||
1657 | |||
1658 | # qhasm: mt = mp | ||
1659 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
1660 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
1661 | movdqa %xmm4,%xmm0 | ||
1662 | |||
1663 | # qhasm: uint32323232 mt += mq | ||
1664 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
1665 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
1666 | paddd %xmm3,%xmm0 | ||
1667 | |||
1668 | # qhasm: mu = mt | ||
1669 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
1670 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
1671 | movdqa %xmm0,%xmm1 | ||
1672 | |||
1673 | # qhasm: uint32323232 mt >>= 23 | ||
1674 | # asm 1: psrld $23,<mt=int6464#1 | ||
1675 | # asm 2: psrld $23,<mt=%xmm0 | ||
1676 | psrld $23,%xmm0 | ||
1677 | |||
1678 | # qhasm: mr ^= mt | ||
1679 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
1680 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
1681 | pxor %xmm0,%xmm5 | ||
1682 | |||
1683 | # qhasm: uint32323232 mu <<= 9 | ||
1684 | # asm 1: pslld $9,<mu=int6464#2 | ||
1685 | # asm 2: pslld $9,<mu=%xmm1 | ||
1686 | pslld $9,%xmm1 | ||
1687 | |||
1688 | # qhasm: mr ^= mu | ||
1689 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
1690 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
1691 | pxor %xmm1,%xmm5 | ||
1692 | |||
1693 | # qhasm: z7_stack = mr | ||
1694 | # asm 1: movdqa <mr=int6464#6,>z7_stack=stack128#29 | ||
1695 | # asm 2: movdqa <mr=%xmm5,>z7_stack=480(%esp) | ||
1696 | movdqa %xmm5,480(%esp) | ||
1697 | |||
1698 | # qhasm: uint32323232 mq += mr | ||
1699 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
1700 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
1701 | paddd %xmm5,%xmm3 | ||
1702 | |||
1703 | # qhasm: mu = mq | ||
1704 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
1705 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
1706 | movdqa %xmm3,%xmm0 | ||
1707 | |||
1708 | # qhasm: uint32323232 mq >>= 19 | ||
1709 | # asm 1: psrld $19,<mq=int6464#4 | ||
1710 | # asm 2: psrld $19,<mq=%xmm3 | ||
1711 | psrld $19,%xmm3 | ||
1712 | |||
1713 | # qhasm: ms ^= mq | ||
1714 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
1715 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
1716 | pxor %xmm3,%xmm6 | ||
1717 | |||
1718 | # qhasm: uint32323232 mu <<= 13 | ||
1719 | # asm 1: pslld $13,<mu=int6464#1 | ||
1720 | # asm 2: pslld $13,<mu=%xmm0 | ||
1721 | pslld $13,%xmm0 | ||
1722 | |||
1723 | # qhasm: ms ^= mu | ||
1724 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
1725 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
1726 | pxor %xmm0,%xmm6 | ||
1727 | |||
1728 | # qhasm: t = z3_stack | ||
1729 | # asm 1: movdqa <z3_stack=stack128#25,>t=int6464#3 | ||
1730 | # asm 2: movdqa <z3_stack=416(%esp),>t=%xmm2 | ||
1731 | movdqa 416(%esp),%xmm2 | ||
1732 | |||
1733 | # qhasm: p = z0_stack | ||
1734 | # asm 1: movdqa <z0_stack=stack128#21,>p=int6464#1 | ||
1735 | # asm 2: movdqa <z0_stack=352(%esp),>p=%xmm0 | ||
1736 | movdqa 352(%esp),%xmm0 | ||
1737 | |||
1738 | # qhasm: q = z1_stack | ||
1739 | # asm 1: movdqa <z1_stack=stack128#22,>q=int6464#4 | ||
1740 | # asm 2: movdqa <z1_stack=368(%esp),>q=%xmm3 | ||
1741 | movdqa 368(%esp),%xmm3 | ||
1742 | |||
1743 | # qhasm: r = z2_stack | ||
1744 | # asm 1: movdqa <z2_stack=stack128#26,>r=int6464#2 | ||
1745 | # asm 2: movdqa <z2_stack=432(%esp),>r=%xmm1 | ||
1746 | movdqa 432(%esp),%xmm1 | ||
1747 | |||
1748 | # qhasm: z11_stack = ms | ||
1749 | # asm 1: movdqa <ms=int6464#7,>z11_stack=stack128#21 | ||
1750 | # asm 2: movdqa <ms=%xmm6,>z11_stack=352(%esp) | ||
1751 | movdqa %xmm6,352(%esp) | ||
1752 | |||
1753 | # qhasm: uint32323232 mr += ms | ||
1754 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
1755 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
1756 | paddd %xmm6,%xmm5 | ||
1757 | |||
1758 | # qhasm: mu = mr | ||
1759 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
1760 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
1761 | movdqa %xmm5,%xmm6 | ||
1762 | |||
1763 | # qhasm: uint32323232 mr >>= 14 | ||
1764 | # asm 1: psrld $14,<mr=int6464#6 | ||
1765 | # asm 2: psrld $14,<mr=%xmm5 | ||
1766 | psrld $14,%xmm5 | ||
1767 | |||
1768 | # qhasm: mp ^= mr | ||
1769 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
1770 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
1771 | pxor %xmm5,%xmm4 | ||
1772 | |||
1773 | # qhasm: uint32323232 mu <<= 18 | ||
1774 | # asm 1: pslld $18,<mu=int6464#7 | ||
1775 | # asm 2: pslld $18,<mu=%xmm6 | ||
1776 | pslld $18,%xmm6 | ||
1777 | |||
1778 | # qhasm: mp ^= mu | ||
1779 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
1780 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
1781 | pxor %xmm6,%xmm4 | ||
1782 | |||
1783 | # qhasm: z15_stack = mp | ||
1784 | # asm 1: movdqa <mp=int6464#5,>z15_stack=stack128#22 | ||
1785 | # asm 2: movdqa <mp=%xmm4,>z15_stack=368(%esp) | ||
1786 | movdqa %xmm4,368(%esp) | ||
1787 | |||
1788 | # qhasm: assign xmm0 to p | ||
1789 | |||
1790 | # qhasm: assign xmm1 to r | ||
1791 | |||
1792 | # qhasm: assign xmm2 to t | ||
1793 | |||
1794 | # qhasm: assign xmm3 to q | ||
1795 | |||
1796 | # qhasm: s = t | ||
1797 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
1798 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
1799 | movdqa %xmm2,%xmm6 | ||
1800 | |||
1801 | # qhasm: uint32323232 t += p | ||
1802 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
1803 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
1804 | paddd %xmm0,%xmm2 | ||
1805 | |||
1806 | # qhasm: u = t | ||
1807 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1808 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1809 | movdqa %xmm2,%xmm4 | ||
1810 | |||
1811 | # qhasm: uint32323232 t >>= 25 | ||
1812 | # asm 1: psrld $25,<t=int6464#3 | ||
1813 | # asm 2: psrld $25,<t=%xmm2 | ||
1814 | psrld $25,%xmm2 | ||
1815 | |||
1816 | # qhasm: q ^= t | ||
1817 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
1818 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
1819 | pxor %xmm2,%xmm3 | ||
1820 | |||
1821 | # qhasm: uint32323232 u <<= 7 | ||
1822 | # asm 1: pslld $7,<u=int6464#5 | ||
1823 | # asm 2: pslld $7,<u=%xmm4 | ||
1824 | pslld $7,%xmm4 | ||
1825 | |||
1826 | # qhasm: q ^= u | ||
1827 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
1828 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
1829 | pxor %xmm4,%xmm3 | ||
1830 | |||
1831 | # qhasm: z1_stack = q | ||
1832 | # asm 1: movdqa <q=int6464#4,>z1_stack=stack128#28 | ||
1833 | # asm 2: movdqa <q=%xmm3,>z1_stack=464(%esp) | ||
1834 | movdqa %xmm3,464(%esp) | ||
1835 | |||
1836 | # qhasm: t = p | ||
1837 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
1838 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
1839 | movdqa %xmm0,%xmm2 | ||
1840 | |||
1841 | # qhasm: uint32323232 t += q | ||
1842 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
1843 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
1844 | paddd %xmm3,%xmm2 | ||
1845 | |||
1846 | # qhasm: u = t | ||
1847 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1848 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1849 | movdqa %xmm2,%xmm4 | ||
1850 | |||
1851 | # qhasm: uint32323232 t >>= 23 | ||
1852 | # asm 1: psrld $23,<t=int6464#3 | ||
1853 | # asm 2: psrld $23,<t=%xmm2 | ||
1854 | psrld $23,%xmm2 | ||
1855 | |||
1856 | # qhasm: r ^= t | ||
1857 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
1858 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
1859 | pxor %xmm2,%xmm1 | ||
1860 | |||
1861 | # qhasm: uint32323232 u <<= 9 | ||
1862 | # asm 1: pslld $9,<u=int6464#5 | ||
1863 | # asm 2: pslld $9,<u=%xmm4 | ||
1864 | pslld $9,%xmm4 | ||
1865 | |||
1866 | # qhasm: r ^= u | ||
1867 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
1868 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
1869 | pxor %xmm4,%xmm1 | ||
1870 | |||
1871 | # qhasm: z2_stack = r | ||
1872 | # asm 1: movdqa <r=int6464#2,>z2_stack=stack128#31 | ||
1873 | # asm 2: movdqa <r=%xmm1,>z2_stack=512(%esp) | ||
1874 | movdqa %xmm1,512(%esp) | ||
1875 | |||
1876 | # qhasm: uint32323232 q += r | ||
1877 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
1878 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
1879 | paddd %xmm1,%xmm3 | ||
1880 | |||
1881 | # qhasm: u = q | ||
1882 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
1883 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
1884 | movdqa %xmm3,%xmm2 | ||
1885 | |||
1886 | # qhasm: uint32323232 q >>= 19 | ||
1887 | # asm 1: psrld $19,<q=int6464#4 | ||
1888 | # asm 2: psrld $19,<q=%xmm3 | ||
1889 | psrld $19,%xmm3 | ||
1890 | |||
1891 | # qhasm: s ^= q | ||
1892 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
1893 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
1894 | pxor %xmm3,%xmm6 | ||
1895 | |||
1896 | # qhasm: uint32323232 u <<= 13 | ||
1897 | # asm 1: pslld $13,<u=int6464#3 | ||
1898 | # asm 2: pslld $13,<u=%xmm2 | ||
1899 | pslld $13,%xmm2 | ||
1900 | |||
1901 | # qhasm: s ^= u | ||
1902 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
1903 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
1904 | pxor %xmm2,%xmm6 | ||
1905 | |||
1906 | # qhasm: mt = z4_stack | ||
1907 | # asm 1: movdqa <z4_stack=stack128#33,>mt=int6464#3 | ||
1908 | # asm 2: movdqa <z4_stack=544(%esp),>mt=%xmm2 | ||
1909 | movdqa 544(%esp),%xmm2 | ||
1910 | |||
1911 | # qhasm: mp = z5_stack | ||
1912 | # asm 1: movdqa <z5_stack=stack128#24,>mp=int6464#5 | ||
1913 | # asm 2: movdqa <z5_stack=400(%esp),>mp=%xmm4 | ||
1914 | movdqa 400(%esp),%xmm4 | ||
1915 | |||
1916 | # qhasm: mq = z6_stack | ||
1917 | # asm 1: movdqa <z6_stack=stack128#23,>mq=int6464#4 | ||
1918 | # asm 2: movdqa <z6_stack=384(%esp),>mq=%xmm3 | ||
1919 | movdqa 384(%esp),%xmm3 | ||
1920 | |||
1921 | # qhasm: mr = z7_stack | ||
1922 | # asm 1: movdqa <z7_stack=stack128#29,>mr=int6464#6 | ||
1923 | # asm 2: movdqa <z7_stack=480(%esp),>mr=%xmm5 | ||
1924 | movdqa 480(%esp),%xmm5 | ||
1925 | |||
1926 | # qhasm: z3_stack = s | ||
1927 | # asm 1: movdqa <s=int6464#7,>z3_stack=stack128#25 | ||
1928 | # asm 2: movdqa <s=%xmm6,>z3_stack=416(%esp) | ||
1929 | movdqa %xmm6,416(%esp) | ||
1930 | |||
1931 | # qhasm: uint32323232 r += s | ||
1932 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
1933 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
1934 | paddd %xmm6,%xmm1 | ||
1935 | |||
1936 | # qhasm: u = r | ||
1937 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
1938 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
1939 | movdqa %xmm1,%xmm6 | ||
1940 | |||
1941 | # qhasm: uint32323232 r >>= 14 | ||
1942 | # asm 1: psrld $14,<r=int6464#2 | ||
1943 | # asm 2: psrld $14,<r=%xmm1 | ||
1944 | psrld $14,%xmm1 | ||
1945 | |||
1946 | # qhasm: p ^= r | ||
1947 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
1948 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
1949 | pxor %xmm1,%xmm0 | ||
1950 | |||
1951 | # qhasm: uint32323232 u <<= 18 | ||
1952 | # asm 1: pslld $18,<u=int6464#7 | ||
1953 | # asm 2: pslld $18,<u=%xmm6 | ||
1954 | pslld $18,%xmm6 | ||
1955 | |||
1956 | # qhasm: p ^= u | ||
1957 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
1958 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
1959 | pxor %xmm6,%xmm0 | ||
1960 | |||
1961 | # qhasm: z0_stack = p | ||
1962 | # asm 1: movdqa <p=int6464#1,>z0_stack=stack128#33 | ||
1963 | # asm 2: movdqa <p=%xmm0,>z0_stack=544(%esp) | ||
1964 | movdqa %xmm0,544(%esp) | ||
1965 | |||
1966 | # qhasm: assign xmm2 to mt | ||
1967 | |||
1968 | # qhasm: assign xmm3 to mq | ||
1969 | |||
1970 | # qhasm: assign xmm4 to mp | ||
1971 | |||
1972 | # qhasm: assign xmm5 to mr | ||
1973 | |||
1974 | # qhasm: ms = mt | ||
1975 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
1976 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
1977 | movdqa %xmm2,%xmm6 | ||
1978 | |||
1979 | # qhasm: uint32323232 mt += mp | ||
1980 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
1981 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
1982 | paddd %xmm4,%xmm2 | ||
1983 | |||
1984 | # qhasm: mu = mt | ||
1985 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
1986 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
1987 | movdqa %xmm2,%xmm0 | ||
1988 | |||
1989 | # qhasm: uint32323232 mt >>= 25 | ||
1990 | # asm 1: psrld $25,<mt=int6464#3 | ||
1991 | # asm 2: psrld $25,<mt=%xmm2 | ||
1992 | psrld $25,%xmm2 | ||
1993 | |||
1994 | # qhasm: mq ^= mt | ||
1995 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
1996 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
1997 | pxor %xmm2,%xmm3 | ||
1998 | |||
1999 | # qhasm: uint32323232 mu <<= 7 | ||
2000 | # asm 1: pslld $7,<mu=int6464#1 | ||
2001 | # asm 2: pslld $7,<mu=%xmm0 | ||
2002 | pslld $7,%xmm0 | ||
2003 | |||
2004 | # qhasm: mq ^= mu | ||
2005 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
2006 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
2007 | pxor %xmm0,%xmm3 | ||
2008 | |||
2009 | # qhasm: z6_stack = mq | ||
2010 | # asm 1: movdqa <mq=int6464#4,>z6_stack=stack128#26 | ||
2011 | # asm 2: movdqa <mq=%xmm3,>z6_stack=432(%esp) | ||
2012 | movdqa %xmm3,432(%esp) | ||
2013 | |||
2014 | # qhasm: mt = mp | ||
2015 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
2016 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
2017 | movdqa %xmm4,%xmm0 | ||
2018 | |||
2019 | # qhasm: uint32323232 mt += mq | ||
2020 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
2021 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
2022 | paddd %xmm3,%xmm0 | ||
2023 | |||
2024 | # qhasm: mu = mt | ||
2025 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
2026 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
2027 | movdqa %xmm0,%xmm1 | ||
2028 | |||
2029 | # qhasm: uint32323232 mt >>= 23 | ||
2030 | # asm 1: psrld $23,<mt=int6464#1 | ||
2031 | # asm 2: psrld $23,<mt=%xmm0 | ||
2032 | psrld $23,%xmm0 | ||
2033 | |||
2034 | # qhasm: mr ^= mt | ||
2035 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
2036 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
2037 | pxor %xmm0,%xmm5 | ||
2038 | |||
2039 | # qhasm: uint32323232 mu <<= 9 | ||
2040 | # asm 1: pslld $9,<mu=int6464#2 | ||
2041 | # asm 2: pslld $9,<mu=%xmm1 | ||
2042 | pslld $9,%xmm1 | ||
2043 | |||
2044 | # qhasm: mr ^= mu | ||
2045 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
2046 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
2047 | pxor %xmm1,%xmm5 | ||
2048 | |||
2049 | # qhasm: z7_stack = mr | ||
2050 | # asm 1: movdqa <mr=int6464#6,>z7_stack=stack128#29 | ||
2051 | # asm 2: movdqa <mr=%xmm5,>z7_stack=480(%esp) | ||
2052 | movdqa %xmm5,480(%esp) | ||
2053 | |||
2054 | # qhasm: uint32323232 mq += mr | ||
2055 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
2056 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
2057 | paddd %xmm5,%xmm3 | ||
2058 | |||
2059 | # qhasm: mu = mq | ||
2060 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
2061 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
2062 | movdqa %xmm3,%xmm0 | ||
2063 | |||
2064 | # qhasm: uint32323232 mq >>= 19 | ||
2065 | # asm 1: psrld $19,<mq=int6464#4 | ||
2066 | # asm 2: psrld $19,<mq=%xmm3 | ||
2067 | psrld $19,%xmm3 | ||
2068 | |||
2069 | # qhasm: ms ^= mq | ||
2070 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
2071 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
2072 | pxor %xmm3,%xmm6 | ||
2073 | |||
2074 | # qhasm: uint32323232 mu <<= 13 | ||
2075 | # asm 1: pslld $13,<mu=int6464#1 | ||
2076 | # asm 2: pslld $13,<mu=%xmm0 | ||
2077 | pslld $13,%xmm0 | ||
2078 | |||
2079 | # qhasm: ms ^= mu | ||
2080 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
2081 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
2082 | pxor %xmm0,%xmm6 | ||
2083 | |||
2084 | # qhasm: t = z9_stack | ||
2085 | # asm 1: movdqa <z9_stack=stack128#32,>t=int6464#3 | ||
2086 | # asm 2: movdqa <z9_stack=528(%esp),>t=%xmm2 | ||
2087 | movdqa 528(%esp),%xmm2 | ||
2088 | |||
2089 | # qhasm: p = z10_stack | ||
2090 | # asm 1: movdqa <z10_stack=stack128#27,>p=int6464#1 | ||
2091 | # asm 2: movdqa <z10_stack=448(%esp),>p=%xmm0 | ||
2092 | movdqa 448(%esp),%xmm0 | ||
2093 | |||
2094 | # qhasm: q = z11_stack | ||
2095 | # asm 1: movdqa <z11_stack=stack128#21,>q=int6464#4 | ||
2096 | # asm 2: movdqa <z11_stack=352(%esp),>q=%xmm3 | ||
2097 | movdqa 352(%esp),%xmm3 | ||
2098 | |||
2099 | # qhasm: r = z8_stack | ||
2100 | # asm 1: movdqa <z8_stack=stack128#34,>r=int6464#2 | ||
2101 | # asm 2: movdqa <z8_stack=560(%esp),>r=%xmm1 | ||
2102 | movdqa 560(%esp),%xmm1 | ||
2103 | |||
2104 | # qhasm: z4_stack = ms | ||
2105 | # asm 1: movdqa <ms=int6464#7,>z4_stack=stack128#34 | ||
2106 | # asm 2: movdqa <ms=%xmm6,>z4_stack=560(%esp) | ||
2107 | movdqa %xmm6,560(%esp) | ||
2108 | |||
2109 | # qhasm: uint32323232 mr += ms | ||
2110 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
2111 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
2112 | paddd %xmm6,%xmm5 | ||
2113 | |||
2114 | # qhasm: mu = mr | ||
2115 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
2116 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
2117 | movdqa %xmm5,%xmm6 | ||
2118 | |||
2119 | # qhasm: uint32323232 mr >>= 14 | ||
2120 | # asm 1: psrld $14,<mr=int6464#6 | ||
2121 | # asm 2: psrld $14,<mr=%xmm5 | ||
2122 | psrld $14,%xmm5 | ||
2123 | |||
2124 | # qhasm: mp ^= mr | ||
2125 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
2126 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
2127 | pxor %xmm5,%xmm4 | ||
2128 | |||
2129 | # qhasm: uint32323232 mu <<= 18 | ||
2130 | # asm 1: pslld $18,<mu=int6464#7 | ||
2131 | # asm 2: pslld $18,<mu=%xmm6 | ||
2132 | pslld $18,%xmm6 | ||
2133 | |||
2134 | # qhasm: mp ^= mu | ||
2135 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
2136 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
2137 | pxor %xmm6,%xmm4 | ||
2138 | |||
2139 | # qhasm: z5_stack = mp | ||
2140 | # asm 1: movdqa <mp=int6464#5,>z5_stack=stack128#21 | ||
2141 | # asm 2: movdqa <mp=%xmm4,>z5_stack=352(%esp) | ||
2142 | movdqa %xmm4,352(%esp) | ||
2143 | |||
2144 | # qhasm: assign xmm0 to p | ||
2145 | |||
2146 | # qhasm: assign xmm1 to r | ||
2147 | |||
2148 | # qhasm: assign xmm2 to t | ||
2149 | |||
2150 | # qhasm: assign xmm3 to q | ||
2151 | |||
2152 | # qhasm: s = t | ||
2153 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
2154 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
2155 | movdqa %xmm2,%xmm6 | ||
2156 | |||
2157 | # qhasm: uint32323232 t += p | ||
2158 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
2159 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
2160 | paddd %xmm0,%xmm2 | ||
2161 | |||
2162 | # qhasm: u = t | ||
2163 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
2164 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
2165 | movdqa %xmm2,%xmm4 | ||
2166 | |||
2167 | # qhasm: uint32323232 t >>= 25 | ||
2168 | # asm 1: psrld $25,<t=int6464#3 | ||
2169 | # asm 2: psrld $25,<t=%xmm2 | ||
2170 | psrld $25,%xmm2 | ||
2171 | |||
2172 | # qhasm: q ^= t | ||
2173 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
2174 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
2175 | pxor %xmm2,%xmm3 | ||
2176 | |||
2177 | # qhasm: uint32323232 u <<= 7 | ||
2178 | # asm 1: pslld $7,<u=int6464#5 | ||
2179 | # asm 2: pslld $7,<u=%xmm4 | ||
2180 | pslld $7,%xmm4 | ||
2181 | |||
2182 | # qhasm: q ^= u | ||
2183 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
2184 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
2185 | pxor %xmm4,%xmm3 | ||
2186 | |||
2187 | # qhasm: z11_stack = q | ||
2188 | # asm 1: movdqa <q=int6464#4,>z11_stack=stack128#27 | ||
2189 | # asm 2: movdqa <q=%xmm3,>z11_stack=448(%esp) | ||
2190 | movdqa %xmm3,448(%esp) | ||
2191 | |||
2192 | # qhasm: t = p | ||
2193 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
2194 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
2195 | movdqa %xmm0,%xmm2 | ||
2196 | |||
2197 | # qhasm: uint32323232 t += q | ||
2198 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
2199 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
2200 | paddd %xmm3,%xmm2 | ||
2201 | |||
2202 | # qhasm: u = t | ||
2203 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
2204 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
2205 | movdqa %xmm2,%xmm4 | ||
2206 | |||
2207 | # qhasm: uint32323232 t >>= 23 | ||
2208 | # asm 1: psrld $23,<t=int6464#3 | ||
2209 | # asm 2: psrld $23,<t=%xmm2 | ||
2210 | psrld $23,%xmm2 | ||
2211 | |||
2212 | # qhasm: r ^= t | ||
2213 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
2214 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
2215 | pxor %xmm2,%xmm1 | ||
2216 | |||
2217 | # qhasm: uint32323232 u <<= 9 | ||
2218 | # asm 1: pslld $9,<u=int6464#5 | ||
2219 | # asm 2: pslld $9,<u=%xmm4 | ||
2220 | pslld $9,%xmm4 | ||
2221 | |||
2222 | # qhasm: r ^= u | ||
2223 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
2224 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
2225 | pxor %xmm4,%xmm1 | ||
2226 | |||
2227 | # qhasm: z8_stack = r | ||
2228 | # asm 1: movdqa <r=int6464#2,>z8_stack=stack128#37 | ||
2229 | # asm 2: movdqa <r=%xmm1,>z8_stack=608(%esp) | ||
2230 | movdqa %xmm1,608(%esp) | ||
2231 | |||
2232 | # qhasm: uint32323232 q += r | ||
2233 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
2234 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
2235 | paddd %xmm1,%xmm3 | ||
2236 | |||
2237 | # qhasm: u = q | ||
2238 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
2239 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
2240 | movdqa %xmm3,%xmm2 | ||
2241 | |||
2242 | # qhasm: uint32323232 q >>= 19 | ||
2243 | # asm 1: psrld $19,<q=int6464#4 | ||
2244 | # asm 2: psrld $19,<q=%xmm3 | ||
2245 | psrld $19,%xmm3 | ||
2246 | |||
2247 | # qhasm: s ^= q | ||
2248 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
2249 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
2250 | pxor %xmm3,%xmm6 | ||
2251 | |||
2252 | # qhasm: uint32323232 u <<= 13 | ||
2253 | # asm 1: pslld $13,<u=int6464#3 | ||
2254 | # asm 2: pslld $13,<u=%xmm2 | ||
2255 | pslld $13,%xmm2 | ||
2256 | |||
2257 | # qhasm: s ^= u | ||
2258 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
2259 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
2260 | pxor %xmm2,%xmm6 | ||
2261 | |||
2262 | # qhasm: mt = z14_stack | ||
2263 | # asm 1: movdqa <z14_stack=stack128#36,>mt=int6464#3 | ||
2264 | # asm 2: movdqa <z14_stack=592(%esp),>mt=%xmm2 | ||
2265 | movdqa 592(%esp),%xmm2 | ||
2266 | |||
2267 | # qhasm: mp = z15_stack | ||
2268 | # asm 1: movdqa <z15_stack=stack128#22,>mp=int6464#5 | ||
2269 | # asm 2: movdqa <z15_stack=368(%esp),>mp=%xmm4 | ||
2270 | movdqa 368(%esp),%xmm4 | ||
2271 | |||
2272 | # qhasm: mq = z12_stack | ||
2273 | # asm 1: movdqa <z12_stack=stack128#30,>mq=int6464#4 | ||
2274 | # asm 2: movdqa <z12_stack=496(%esp),>mq=%xmm3 | ||
2275 | movdqa 496(%esp),%xmm3 | ||
2276 | |||
2277 | # qhasm: mr = z13_stack | ||
2278 | # asm 1: movdqa <z13_stack=stack128#35,>mr=int6464#6 | ||
2279 | # asm 2: movdqa <z13_stack=576(%esp),>mr=%xmm5 | ||
2280 | movdqa 576(%esp),%xmm5 | ||
2281 | |||
2282 | # qhasm: z9_stack = s | ||
2283 | # asm 1: movdqa <s=int6464#7,>z9_stack=stack128#32 | ||
2284 | # asm 2: movdqa <s=%xmm6,>z9_stack=528(%esp) | ||
2285 | movdqa %xmm6,528(%esp) | ||
2286 | |||
2287 | # qhasm: uint32323232 r += s | ||
2288 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
2289 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
2290 | paddd %xmm6,%xmm1 | ||
2291 | |||
2292 | # qhasm: u = r | ||
2293 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
2294 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
2295 | movdqa %xmm1,%xmm6 | ||
2296 | |||
2297 | # qhasm: uint32323232 r >>= 14 | ||
2298 | # asm 1: psrld $14,<r=int6464#2 | ||
2299 | # asm 2: psrld $14,<r=%xmm1 | ||
2300 | psrld $14,%xmm1 | ||
2301 | |||
2302 | # qhasm: p ^= r | ||
2303 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
2304 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
2305 | pxor %xmm1,%xmm0 | ||
2306 | |||
2307 | # qhasm: uint32323232 u <<= 18 | ||
2308 | # asm 1: pslld $18,<u=int6464#7 | ||
2309 | # asm 2: pslld $18,<u=%xmm6 | ||
2310 | pslld $18,%xmm6 | ||
2311 | |||
2312 | # qhasm: p ^= u | ||
2313 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
2314 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
2315 | pxor %xmm6,%xmm0 | ||
2316 | |||
2317 | # qhasm: z10_stack = p | ||
2318 | # asm 1: movdqa <p=int6464#1,>z10_stack=stack128#22 | ||
2319 | # asm 2: movdqa <p=%xmm0,>z10_stack=368(%esp) | ||
2320 | movdqa %xmm0,368(%esp) | ||
2321 | |||
2322 | # qhasm: assign xmm2 to mt | ||
2323 | |||
2324 | # qhasm: assign xmm3 to mq | ||
2325 | |||
2326 | # qhasm: assign xmm4 to mp | ||
2327 | |||
2328 | # qhasm: assign xmm5 to mr | ||
2329 | |||
2330 | # qhasm: ms = mt | ||
2331 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
2332 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
2333 | movdqa %xmm2,%xmm6 | ||
2334 | |||
2335 | # qhasm: uint32323232 mt += mp | ||
2336 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
2337 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
2338 | paddd %xmm4,%xmm2 | ||
2339 | |||
2340 | # qhasm: mu = mt | ||
2341 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
2342 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
2343 | movdqa %xmm2,%xmm0 | ||
2344 | |||
2345 | # qhasm: uint32323232 mt >>= 25 | ||
2346 | # asm 1: psrld $25,<mt=int6464#3 | ||
2347 | # asm 2: psrld $25,<mt=%xmm2 | ||
2348 | psrld $25,%xmm2 | ||
2349 | |||
2350 | # qhasm: mq ^= mt | ||
2351 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
2352 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
2353 | pxor %xmm2,%xmm3 | ||
2354 | |||
2355 | # qhasm: uint32323232 mu <<= 7 | ||
2356 | # asm 1: pslld $7,<mu=int6464#1 | ||
2357 | # asm 2: pslld $7,<mu=%xmm0 | ||
2358 | pslld $7,%xmm0 | ||
2359 | |||
2360 | # qhasm: mq ^= mu | ||
2361 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
2362 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
2363 | pxor %xmm0,%xmm3 | ||
2364 | |||
2365 | # qhasm: z12_stack = mq | ||
2366 | # asm 1: movdqa <mq=int6464#4,>z12_stack=stack128#35 | ||
2367 | # asm 2: movdqa <mq=%xmm3,>z12_stack=576(%esp) | ||
2368 | movdqa %xmm3,576(%esp) | ||
2369 | |||
2370 | # qhasm: mt = mp | ||
2371 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
2372 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
2373 | movdqa %xmm4,%xmm0 | ||
2374 | |||
2375 | # qhasm: uint32323232 mt += mq | ||
2376 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
2377 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
2378 | paddd %xmm3,%xmm0 | ||
2379 | |||
2380 | # qhasm: mu = mt | ||
2381 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
2382 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
2383 | movdqa %xmm0,%xmm1 | ||
2384 | |||
2385 | # qhasm: uint32323232 mt >>= 23 | ||
2386 | # asm 1: psrld $23,<mt=int6464#1 | ||
2387 | # asm 2: psrld $23,<mt=%xmm0 | ||
2388 | psrld $23,%xmm0 | ||
2389 | |||
2390 | # qhasm: mr ^= mt | ||
2391 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
2392 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
2393 | pxor %xmm0,%xmm5 | ||
2394 | |||
2395 | # qhasm: uint32323232 mu <<= 9 | ||
2396 | # asm 1: pslld $9,<mu=int6464#2 | ||
2397 | # asm 2: pslld $9,<mu=%xmm1 | ||
2398 | pslld $9,%xmm1 | ||
2399 | |||
2400 | # qhasm: mr ^= mu | ||
2401 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
2402 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
2403 | pxor %xmm1,%xmm5 | ||
2404 | |||
2405 | # qhasm: z13_stack = mr | ||
2406 | # asm 1: movdqa <mr=int6464#6,>z13_stack=stack128#30 | ||
2407 | # asm 2: movdqa <mr=%xmm5,>z13_stack=496(%esp) | ||
2408 | movdqa %xmm5,496(%esp) | ||
2409 | |||
2410 | # qhasm: uint32323232 mq += mr | ||
2411 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
2412 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
2413 | paddd %xmm5,%xmm3 | ||
2414 | |||
2415 | # qhasm: mu = mq | ||
2416 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
2417 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
2418 | movdqa %xmm3,%xmm0 | ||
2419 | |||
2420 | # qhasm: uint32323232 mq >>= 19 | ||
2421 | # asm 1: psrld $19,<mq=int6464#4 | ||
2422 | # asm 2: psrld $19,<mq=%xmm3 | ||
2423 | psrld $19,%xmm3 | ||
2424 | |||
2425 | # qhasm: ms ^= mq | ||
2426 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
2427 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
2428 | pxor %xmm3,%xmm6 | ||
2429 | |||
2430 | # qhasm: uint32323232 mu <<= 13 | ||
2431 | # asm 1: pslld $13,<mu=int6464#1 | ||
2432 | # asm 2: pslld $13,<mu=%xmm0 | ||
2433 | pslld $13,%xmm0 | ||
2434 | |||
2435 | # qhasm: ms ^= mu | ||
2436 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
2437 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
2438 | pxor %xmm0,%xmm6 | ||
2439 | |||
2440 | # qhasm: t = z12_stack | ||
2441 | # asm 1: movdqa <z12_stack=stack128#35,>t=int6464#3 | ||
2442 | # asm 2: movdqa <z12_stack=576(%esp),>t=%xmm2 | ||
2443 | movdqa 576(%esp),%xmm2 | ||
2444 | |||
2445 | # qhasm: p = z0_stack | ||
2446 | # asm 1: movdqa <z0_stack=stack128#33,>p=int6464#1 | ||
2447 | # asm 2: movdqa <z0_stack=544(%esp),>p=%xmm0 | ||
2448 | movdqa 544(%esp),%xmm0 | ||
2449 | |||
2450 | # qhasm: q = z4_stack | ||
2451 | # asm 1: movdqa <z4_stack=stack128#34,>q=int6464#4 | ||
2452 | # asm 2: movdqa <z4_stack=560(%esp),>q=%xmm3 | ||
2453 | movdqa 560(%esp),%xmm3 | ||
2454 | |||
2455 | # qhasm: r = z8_stack | ||
2456 | # asm 1: movdqa <z8_stack=stack128#37,>r=int6464#2 | ||
2457 | # asm 2: movdqa <z8_stack=608(%esp),>r=%xmm1 | ||
2458 | movdqa 608(%esp),%xmm1 | ||
2459 | |||
2460 | # qhasm: z14_stack = ms | ||
2461 | # asm 1: movdqa <ms=int6464#7,>z14_stack=stack128#24 | ||
2462 | # asm 2: movdqa <ms=%xmm6,>z14_stack=400(%esp) | ||
2463 | movdqa %xmm6,400(%esp) | ||
2464 | |||
2465 | # qhasm: uint32323232 mr += ms | ||
2466 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
2467 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
2468 | paddd %xmm6,%xmm5 | ||
2469 | |||
2470 | # qhasm: mu = mr | ||
2471 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
2472 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
2473 | movdqa %xmm5,%xmm6 | ||
2474 | |||
2475 | # qhasm: uint32323232 mr >>= 14 | ||
2476 | # asm 1: psrld $14,<mr=int6464#6 | ||
2477 | # asm 2: psrld $14,<mr=%xmm5 | ||
2478 | psrld $14,%xmm5 | ||
2479 | |||
2480 | # qhasm: mp ^= mr | ||
2481 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
2482 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
2483 | pxor %xmm5,%xmm4 | ||
2484 | |||
2485 | # qhasm: uint32323232 mu <<= 18 | ||
2486 | # asm 1: pslld $18,<mu=int6464#7 | ||
2487 | # asm 2: pslld $18,<mu=%xmm6 | ||
2488 | pslld $18,%xmm6 | ||
2489 | |||
2490 | # qhasm: mp ^= mu | ||
2491 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
2492 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
2493 | pxor %xmm6,%xmm4 | ||
2494 | |||
2495 | # qhasm: z15_stack = mp | ||
2496 | # asm 1: movdqa <mp=int6464#5,>z15_stack=stack128#23 | ||
2497 | # asm 2: movdqa <mp=%xmm4,>z15_stack=384(%esp) | ||
2498 | movdqa %xmm4,384(%esp) | ||
2499 | |||
2500 | # qhasm: unsigned>? i -= 2 | ||
2501 | # asm 1: sub $2,<i=int32#1 | ||
2502 | # asm 2: sub $2,<i=%eax | ||
2503 | sub $2,%eax | ||
2504 | # comment:fp stack unchanged by jump | ||
2505 | |||
2506 | # qhasm: goto mainloop1 if unsigned> | ||
2507 | ja ._mainloop1 | ||
2508 | |||
2509 | # qhasm: out = out_stack | ||
2510 | # asm 1: movl <out_stack=stack32#6,>out=int32#6 | ||
2511 | # asm 2: movl <out_stack=20(%esp),>out=%edi | ||
2512 | movl 20(%esp),%edi | ||
2513 | |||
2514 | # qhasm: z0 = z0_stack | ||
2515 | # asm 1: movdqa <z0_stack=stack128#33,>z0=int6464#1 | ||
2516 | # asm 2: movdqa <z0_stack=544(%esp),>z0=%xmm0 | ||
2517 | movdqa 544(%esp),%xmm0 | ||
2518 | |||
2519 | # qhasm: z1 = z1_stack | ||
2520 | # asm 1: movdqa <z1_stack=stack128#28,>z1=int6464#2 | ||
2521 | # asm 2: movdqa <z1_stack=464(%esp),>z1=%xmm1 | ||
2522 | movdqa 464(%esp),%xmm1 | ||
2523 | |||
2524 | # qhasm: z2 = z2_stack | ||
2525 | # asm 1: movdqa <z2_stack=stack128#31,>z2=int6464#3 | ||
2526 | # asm 2: movdqa <z2_stack=512(%esp),>z2=%xmm2 | ||
2527 | movdqa 512(%esp),%xmm2 | ||
2528 | |||
2529 | # qhasm: z3 = z3_stack | ||
2530 | # asm 1: movdqa <z3_stack=stack128#25,>z3=int6464#4 | ||
2531 | # asm 2: movdqa <z3_stack=416(%esp),>z3=%xmm3 | ||
2532 | movdqa 416(%esp),%xmm3 | ||
2533 | |||
2534 | # qhasm: uint32323232 z0 += orig0 | ||
2535 | # asm 1: paddd <orig0=stack128#8,<z0=int6464#1 | ||
2536 | # asm 2: paddd <orig0=144(%esp),<z0=%xmm0 | ||
2537 | paddd 144(%esp),%xmm0 | ||
2538 | |||
2539 | # qhasm: uint32323232 z1 += orig1 | ||
2540 | # asm 1: paddd <orig1=stack128#12,<z1=int6464#2 | ||
2541 | # asm 2: paddd <orig1=208(%esp),<z1=%xmm1 | ||
2542 | paddd 208(%esp),%xmm1 | ||
2543 | |||
2544 | # qhasm: uint32323232 z2 += orig2 | ||
2545 | # asm 1: paddd <orig2=stack128#15,<z2=int6464#3 | ||
2546 | # asm 2: paddd <orig2=256(%esp),<z2=%xmm2 | ||
2547 | paddd 256(%esp),%xmm2 | ||
2548 | |||
2549 | # qhasm: uint32323232 z3 += orig3 | ||
2550 | # asm 1: paddd <orig3=stack128#18,<z3=int6464#4 | ||
2551 | # asm 2: paddd <orig3=304(%esp),<z3=%xmm3 | ||
2552 | paddd 304(%esp),%xmm3 | ||
2553 | |||
2554 | # qhasm: in0 = z0 | ||
2555 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2556 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2557 | movd %xmm0,%eax | ||
2558 | |||
2559 | # qhasm: in1 = z1 | ||
2560 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2561 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2562 | movd %xmm1,%ecx | ||
2563 | |||
2564 | # qhasm: in2 = z2 | ||
2565 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2566 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2567 | movd %xmm2,%edx | ||
2568 | |||
2569 | # qhasm: in3 = z3 | ||
2570 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2571 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2572 | movd %xmm3,%ebx | ||
2573 | |||
2574 | # qhasm: z0 <<<= 96 | ||
2575 | # asm 1: pshufd $0x39,<z0=int6464#1,<z0=int6464#1 | ||
2576 | # asm 2: pshufd $0x39,<z0=%xmm0,<z0=%xmm0 | ||
2577 | pshufd $0x39,%xmm0,%xmm0 | ||
2578 | |||
2579 | # qhasm: z1 <<<= 96 | ||
2580 | # asm 1: pshufd $0x39,<z1=int6464#2,<z1=int6464#2 | ||
2581 | # asm 2: pshufd $0x39,<z1=%xmm1,<z1=%xmm1 | ||
2582 | pshufd $0x39,%xmm1,%xmm1 | ||
2583 | |||
2584 | # qhasm: z2 <<<= 96 | ||
2585 | # asm 1: pshufd $0x39,<z2=int6464#3,<z2=int6464#3 | ||
2586 | # asm 2: pshufd $0x39,<z2=%xmm2,<z2=%xmm2 | ||
2587 | pshufd $0x39,%xmm2,%xmm2 | ||
2588 | |||
2589 | # qhasm: z3 <<<= 96 | ||
2590 | # asm 1: pshufd $0x39,<z3=int6464#4,<z3=int6464#4 | ||
2591 | # asm 2: pshufd $0x39,<z3=%xmm3,<z3=%xmm3 | ||
2592 | pshufd $0x39,%xmm3,%xmm3 | ||
2593 | |||
2594 | # qhasm: in0 ^= *(uint32 *) (m + 0) | ||
2595 | # asm 1: xorl 0(<m=int32#5),<in0=int32#1 | ||
2596 | # asm 2: xorl 0(<m=%esi),<in0=%eax | ||
2597 | xorl 0(%esi),%eax | ||
2598 | |||
2599 | # qhasm: in1 ^= *(uint32 *) (m + 4) | ||
2600 | # asm 1: xorl 4(<m=int32#5),<in1=int32#2 | ||
2601 | # asm 2: xorl 4(<m=%esi),<in1=%ecx | ||
2602 | xorl 4(%esi),%ecx | ||
2603 | |||
2604 | # qhasm: in2 ^= *(uint32 *) (m + 8) | ||
2605 | # asm 1: xorl 8(<m=int32#5),<in2=int32#3 | ||
2606 | # asm 2: xorl 8(<m=%esi),<in2=%edx | ||
2607 | xorl 8(%esi),%edx | ||
2608 | |||
2609 | # qhasm: in3 ^= *(uint32 *) (m + 12) | ||
2610 | # asm 1: xorl 12(<m=int32#5),<in3=int32#4 | ||
2611 | # asm 2: xorl 12(<m=%esi),<in3=%ebx | ||
2612 | xorl 12(%esi),%ebx | ||
2613 | |||
2614 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
2615 | # asm 1: movl <in0=int32#1,0(<out=int32#6) | ||
2616 | # asm 2: movl <in0=%eax,0(<out=%edi) | ||
2617 | movl %eax,0(%edi) | ||
2618 | |||
2619 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
2620 | # asm 1: movl <in1=int32#2,4(<out=int32#6) | ||
2621 | # asm 2: movl <in1=%ecx,4(<out=%edi) | ||
2622 | movl %ecx,4(%edi) | ||
2623 | |||
2624 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
2625 | # asm 1: movl <in2=int32#3,8(<out=int32#6) | ||
2626 | # asm 2: movl <in2=%edx,8(<out=%edi) | ||
2627 | movl %edx,8(%edi) | ||
2628 | |||
2629 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
2630 | # asm 1: movl <in3=int32#4,12(<out=int32#6) | ||
2631 | # asm 2: movl <in3=%ebx,12(<out=%edi) | ||
2632 | movl %ebx,12(%edi) | ||
2633 | |||
2634 | # qhasm: in0 = z0 | ||
2635 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2636 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2637 | movd %xmm0,%eax | ||
2638 | |||
2639 | # qhasm: in1 = z1 | ||
2640 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2641 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2642 | movd %xmm1,%ecx | ||
2643 | |||
2644 | # qhasm: in2 = z2 | ||
2645 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2646 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2647 | movd %xmm2,%edx | ||
2648 | |||
2649 | # qhasm: in3 = z3 | ||
2650 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2651 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2652 | movd %xmm3,%ebx | ||
2653 | |||
2654 | # qhasm: z0 <<<= 96 | ||
2655 | # asm 1: pshufd $0x39,<z0=int6464#1,<z0=int6464#1 | ||
2656 | # asm 2: pshufd $0x39,<z0=%xmm0,<z0=%xmm0 | ||
2657 | pshufd $0x39,%xmm0,%xmm0 | ||
2658 | |||
2659 | # qhasm: z1 <<<= 96 | ||
2660 | # asm 1: pshufd $0x39,<z1=int6464#2,<z1=int6464#2 | ||
2661 | # asm 2: pshufd $0x39,<z1=%xmm1,<z1=%xmm1 | ||
2662 | pshufd $0x39,%xmm1,%xmm1 | ||
2663 | |||
2664 | # qhasm: z2 <<<= 96 | ||
2665 | # asm 1: pshufd $0x39,<z2=int6464#3,<z2=int6464#3 | ||
2666 | # asm 2: pshufd $0x39,<z2=%xmm2,<z2=%xmm2 | ||
2667 | pshufd $0x39,%xmm2,%xmm2 | ||
2668 | |||
2669 | # qhasm: z3 <<<= 96 | ||
2670 | # asm 1: pshufd $0x39,<z3=int6464#4,<z3=int6464#4 | ||
2671 | # asm 2: pshufd $0x39,<z3=%xmm3,<z3=%xmm3 | ||
2672 | pshufd $0x39,%xmm3,%xmm3 | ||
2673 | |||
2674 | # qhasm: in0 ^= *(uint32 *) (m + 64) | ||
2675 | # asm 1: xorl 64(<m=int32#5),<in0=int32#1 | ||
2676 | # asm 2: xorl 64(<m=%esi),<in0=%eax | ||
2677 | xorl 64(%esi),%eax | ||
2678 | |||
2679 | # qhasm: in1 ^= *(uint32 *) (m + 68) | ||
2680 | # asm 1: xorl 68(<m=int32#5),<in1=int32#2 | ||
2681 | # asm 2: xorl 68(<m=%esi),<in1=%ecx | ||
2682 | xorl 68(%esi),%ecx | ||
2683 | |||
2684 | # qhasm: in2 ^= *(uint32 *) (m + 72) | ||
2685 | # asm 1: xorl 72(<m=int32#5),<in2=int32#3 | ||
2686 | # asm 2: xorl 72(<m=%esi),<in2=%edx | ||
2687 | xorl 72(%esi),%edx | ||
2688 | |||
2689 | # qhasm: in3 ^= *(uint32 *) (m + 76) | ||
2690 | # asm 1: xorl 76(<m=int32#5),<in3=int32#4 | ||
2691 | # asm 2: xorl 76(<m=%esi),<in3=%ebx | ||
2692 | xorl 76(%esi),%ebx | ||
2693 | |||
2694 | # qhasm: *(uint32 *) (out + 64) = in0 | ||
2695 | # asm 1: movl <in0=int32#1,64(<out=int32#6) | ||
2696 | # asm 2: movl <in0=%eax,64(<out=%edi) | ||
2697 | movl %eax,64(%edi) | ||
2698 | |||
2699 | # qhasm: *(uint32 *) (out + 68) = in1 | ||
2700 | # asm 1: movl <in1=int32#2,68(<out=int32#6) | ||
2701 | # asm 2: movl <in1=%ecx,68(<out=%edi) | ||
2702 | movl %ecx,68(%edi) | ||
2703 | |||
2704 | # qhasm: *(uint32 *) (out + 72) = in2 | ||
2705 | # asm 1: movl <in2=int32#3,72(<out=int32#6) | ||
2706 | # asm 2: movl <in2=%edx,72(<out=%edi) | ||
2707 | movl %edx,72(%edi) | ||
2708 | |||
2709 | # qhasm: *(uint32 *) (out + 76) = in3 | ||
2710 | # asm 1: movl <in3=int32#4,76(<out=int32#6) | ||
2711 | # asm 2: movl <in3=%ebx,76(<out=%edi) | ||
2712 | movl %ebx,76(%edi) | ||
2713 | |||
2714 | # qhasm: in0 = z0 | ||
2715 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2716 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2717 | movd %xmm0,%eax | ||
2718 | |||
2719 | # qhasm: in1 = z1 | ||
2720 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2721 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2722 | movd %xmm1,%ecx | ||
2723 | |||
2724 | # qhasm: in2 = z2 | ||
2725 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2726 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2727 | movd %xmm2,%edx | ||
2728 | |||
2729 | # qhasm: in3 = z3 | ||
2730 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2731 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2732 | movd %xmm3,%ebx | ||
2733 | |||
2734 | # qhasm: z0 <<<= 96 | ||
2735 | # asm 1: pshufd $0x39,<z0=int6464#1,<z0=int6464#1 | ||
2736 | # asm 2: pshufd $0x39,<z0=%xmm0,<z0=%xmm0 | ||
2737 | pshufd $0x39,%xmm0,%xmm0 | ||
2738 | |||
2739 | # qhasm: z1 <<<= 96 | ||
2740 | # asm 1: pshufd $0x39,<z1=int6464#2,<z1=int6464#2 | ||
2741 | # asm 2: pshufd $0x39,<z1=%xmm1,<z1=%xmm1 | ||
2742 | pshufd $0x39,%xmm1,%xmm1 | ||
2743 | |||
2744 | # qhasm: z2 <<<= 96 | ||
2745 | # asm 1: pshufd $0x39,<z2=int6464#3,<z2=int6464#3 | ||
2746 | # asm 2: pshufd $0x39,<z2=%xmm2,<z2=%xmm2 | ||
2747 | pshufd $0x39,%xmm2,%xmm2 | ||
2748 | |||
2749 | # qhasm: z3 <<<= 96 | ||
2750 | # asm 1: pshufd $0x39,<z3=int6464#4,<z3=int6464#4 | ||
2751 | # asm 2: pshufd $0x39,<z3=%xmm3,<z3=%xmm3 | ||
2752 | pshufd $0x39,%xmm3,%xmm3 | ||
2753 | |||
2754 | # qhasm: in0 ^= *(uint32 *) (m + 128) | ||
2755 | # asm 1: xorl 128(<m=int32#5),<in0=int32#1 | ||
2756 | # asm 2: xorl 128(<m=%esi),<in0=%eax | ||
2757 | xorl 128(%esi),%eax | ||
2758 | |||
2759 | # qhasm: in1 ^= *(uint32 *) (m + 132) | ||
2760 | # asm 1: xorl 132(<m=int32#5),<in1=int32#2 | ||
2761 | # asm 2: xorl 132(<m=%esi),<in1=%ecx | ||
2762 | xorl 132(%esi),%ecx | ||
2763 | |||
2764 | # qhasm: in2 ^= *(uint32 *) (m + 136) | ||
2765 | # asm 1: xorl 136(<m=int32#5),<in2=int32#3 | ||
2766 | # asm 2: xorl 136(<m=%esi),<in2=%edx | ||
2767 | xorl 136(%esi),%edx | ||
2768 | |||
2769 | # qhasm: in3 ^= *(uint32 *) (m + 140) | ||
2770 | # asm 1: xorl 140(<m=int32#5),<in3=int32#4 | ||
2771 | # asm 2: xorl 140(<m=%esi),<in3=%ebx | ||
2772 | xorl 140(%esi),%ebx | ||
2773 | |||
2774 | # qhasm: *(uint32 *) (out + 128) = in0 | ||
2775 | # asm 1: movl <in0=int32#1,128(<out=int32#6) | ||
2776 | # asm 2: movl <in0=%eax,128(<out=%edi) | ||
2777 | movl %eax,128(%edi) | ||
2778 | |||
2779 | # qhasm: *(uint32 *) (out + 132) = in1 | ||
2780 | # asm 1: movl <in1=int32#2,132(<out=int32#6) | ||
2781 | # asm 2: movl <in1=%ecx,132(<out=%edi) | ||
2782 | movl %ecx,132(%edi) | ||
2783 | |||
2784 | # qhasm: *(uint32 *) (out + 136) = in2 | ||
2785 | # asm 1: movl <in2=int32#3,136(<out=int32#6) | ||
2786 | # asm 2: movl <in2=%edx,136(<out=%edi) | ||
2787 | movl %edx,136(%edi) | ||
2788 | |||
2789 | # qhasm: *(uint32 *) (out + 140) = in3 | ||
2790 | # asm 1: movl <in3=int32#4,140(<out=int32#6) | ||
2791 | # asm 2: movl <in3=%ebx,140(<out=%edi) | ||
2792 | movl %ebx,140(%edi) | ||
2793 | |||
2794 | # qhasm: in0 = z0 | ||
2795 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2796 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2797 | movd %xmm0,%eax | ||
2798 | |||
2799 | # qhasm: in1 = z1 | ||
2800 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2801 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2802 | movd %xmm1,%ecx | ||
2803 | |||
2804 | # qhasm: in2 = z2 | ||
2805 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2806 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2807 | movd %xmm2,%edx | ||
2808 | |||
2809 | # qhasm: in3 = z3 | ||
2810 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2811 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2812 | movd %xmm3,%ebx | ||
2813 | |||
2814 | # qhasm: in0 ^= *(uint32 *) (m + 192) | ||
2815 | # asm 1: xorl 192(<m=int32#5),<in0=int32#1 | ||
2816 | # asm 2: xorl 192(<m=%esi),<in0=%eax | ||
2817 | xorl 192(%esi),%eax | ||
2818 | |||
2819 | # qhasm: in1 ^= *(uint32 *) (m + 196) | ||
2820 | # asm 1: xorl 196(<m=int32#5),<in1=int32#2 | ||
2821 | # asm 2: xorl 196(<m=%esi),<in1=%ecx | ||
2822 | xorl 196(%esi),%ecx | ||
2823 | |||
2824 | # qhasm: in2 ^= *(uint32 *) (m + 200) | ||
2825 | # asm 1: xorl 200(<m=int32#5),<in2=int32#3 | ||
2826 | # asm 2: xorl 200(<m=%esi),<in2=%edx | ||
2827 | xorl 200(%esi),%edx | ||
2828 | |||
2829 | # qhasm: in3 ^= *(uint32 *) (m + 204) | ||
2830 | # asm 1: xorl 204(<m=int32#5),<in3=int32#4 | ||
2831 | # asm 2: xorl 204(<m=%esi),<in3=%ebx | ||
2832 | xorl 204(%esi),%ebx | ||
2833 | |||
2834 | # qhasm: *(uint32 *) (out + 192) = in0 | ||
2835 | # asm 1: movl <in0=int32#1,192(<out=int32#6) | ||
2836 | # asm 2: movl <in0=%eax,192(<out=%edi) | ||
2837 | movl %eax,192(%edi) | ||
2838 | |||
2839 | # qhasm: *(uint32 *) (out + 196) = in1 | ||
2840 | # asm 1: movl <in1=int32#2,196(<out=int32#6) | ||
2841 | # asm 2: movl <in1=%ecx,196(<out=%edi) | ||
2842 | movl %ecx,196(%edi) | ||
2843 | |||
2844 | # qhasm: *(uint32 *) (out + 200) = in2 | ||
2845 | # asm 1: movl <in2=int32#3,200(<out=int32#6) | ||
2846 | # asm 2: movl <in2=%edx,200(<out=%edi) | ||
2847 | movl %edx,200(%edi) | ||
2848 | |||
2849 | # qhasm: *(uint32 *) (out + 204) = in3 | ||
2850 | # asm 1: movl <in3=int32#4,204(<out=int32#6) | ||
2851 | # asm 2: movl <in3=%ebx,204(<out=%edi) | ||
2852 | movl %ebx,204(%edi) | ||
2853 | |||
2854 | # qhasm: z4 = z4_stack | ||
2855 | # asm 1: movdqa <z4_stack=stack128#34,>z4=int6464#1 | ||
2856 | # asm 2: movdqa <z4_stack=560(%esp),>z4=%xmm0 | ||
2857 | movdqa 560(%esp),%xmm0 | ||
2858 | |||
2859 | # qhasm: z5 = z5_stack | ||
2860 | # asm 1: movdqa <z5_stack=stack128#21,>z5=int6464#2 | ||
2861 | # asm 2: movdqa <z5_stack=352(%esp),>z5=%xmm1 | ||
2862 | movdqa 352(%esp),%xmm1 | ||
2863 | |||
2864 | # qhasm: z6 = z6_stack | ||
2865 | # asm 1: movdqa <z6_stack=stack128#26,>z6=int6464#3 | ||
2866 | # asm 2: movdqa <z6_stack=432(%esp),>z6=%xmm2 | ||
2867 | movdqa 432(%esp),%xmm2 | ||
2868 | |||
2869 | # qhasm: z7 = z7_stack | ||
2870 | # asm 1: movdqa <z7_stack=stack128#29,>z7=int6464#4 | ||
2871 | # asm 2: movdqa <z7_stack=480(%esp),>z7=%xmm3 | ||
2872 | movdqa 480(%esp),%xmm3 | ||
2873 | |||
2874 | # qhasm: uint32323232 z4 += orig4 | ||
2875 | # asm 1: paddd <orig4=stack128#16,<z4=int6464#1 | ||
2876 | # asm 2: paddd <orig4=272(%esp),<z4=%xmm0 | ||
2877 | paddd 272(%esp),%xmm0 | ||
2878 | |||
2879 | # qhasm: uint32323232 z5 += orig5 | ||
2880 | # asm 1: paddd <orig5=stack128#5,<z5=int6464#2 | ||
2881 | # asm 2: paddd <orig5=96(%esp),<z5=%xmm1 | ||
2882 | paddd 96(%esp),%xmm1 | ||
2883 | |||
2884 | # qhasm: uint32323232 z6 += orig6 | ||
2885 | # asm 1: paddd <orig6=stack128#9,<z6=int6464#3 | ||
2886 | # asm 2: paddd <orig6=160(%esp),<z6=%xmm2 | ||
2887 | paddd 160(%esp),%xmm2 | ||
2888 | |||
2889 | # qhasm: uint32323232 z7 += orig7 | ||
2890 | # asm 1: paddd <orig7=stack128#13,<z7=int6464#4 | ||
2891 | # asm 2: paddd <orig7=224(%esp),<z7=%xmm3 | ||
2892 | paddd 224(%esp),%xmm3 | ||
2893 | |||
2894 | # qhasm: in4 = z4 | ||
2895 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
2896 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
2897 | movd %xmm0,%eax | ||
2898 | |||
2899 | # qhasm: in5 = z5 | ||
2900 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
2901 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
2902 | movd %xmm1,%ecx | ||
2903 | |||
2904 | # qhasm: in6 = z6 | ||
2905 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
2906 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
2907 | movd %xmm2,%edx | ||
2908 | |||
2909 | # qhasm: in7 = z7 | ||
2910 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
2911 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
2912 | movd %xmm3,%ebx | ||
2913 | |||
2914 | # qhasm: z4 <<<= 96 | ||
2915 | # asm 1: pshufd $0x39,<z4=int6464#1,<z4=int6464#1 | ||
2916 | # asm 2: pshufd $0x39,<z4=%xmm0,<z4=%xmm0 | ||
2917 | pshufd $0x39,%xmm0,%xmm0 | ||
2918 | |||
2919 | # qhasm: z5 <<<= 96 | ||
2920 | # asm 1: pshufd $0x39,<z5=int6464#2,<z5=int6464#2 | ||
2921 | # asm 2: pshufd $0x39,<z5=%xmm1,<z5=%xmm1 | ||
2922 | pshufd $0x39,%xmm1,%xmm1 | ||
2923 | |||
2924 | # qhasm: z6 <<<= 96 | ||
2925 | # asm 1: pshufd $0x39,<z6=int6464#3,<z6=int6464#3 | ||
2926 | # asm 2: pshufd $0x39,<z6=%xmm2,<z6=%xmm2 | ||
2927 | pshufd $0x39,%xmm2,%xmm2 | ||
2928 | |||
2929 | # qhasm: z7 <<<= 96 | ||
2930 | # asm 1: pshufd $0x39,<z7=int6464#4,<z7=int6464#4 | ||
2931 | # asm 2: pshufd $0x39,<z7=%xmm3,<z7=%xmm3 | ||
2932 | pshufd $0x39,%xmm3,%xmm3 | ||
2933 | |||
2934 | # qhasm: in4 ^= *(uint32 *) (m + 16) | ||
2935 | # asm 1: xorl 16(<m=int32#5),<in4=int32#1 | ||
2936 | # asm 2: xorl 16(<m=%esi),<in4=%eax | ||
2937 | xorl 16(%esi),%eax | ||
2938 | |||
2939 | # qhasm: in5 ^= *(uint32 *) (m + 20) | ||
2940 | # asm 1: xorl 20(<m=int32#5),<in5=int32#2 | ||
2941 | # asm 2: xorl 20(<m=%esi),<in5=%ecx | ||
2942 | xorl 20(%esi),%ecx | ||
2943 | |||
2944 | # qhasm: in6 ^= *(uint32 *) (m + 24) | ||
2945 | # asm 1: xorl 24(<m=int32#5),<in6=int32#3 | ||
2946 | # asm 2: xorl 24(<m=%esi),<in6=%edx | ||
2947 | xorl 24(%esi),%edx | ||
2948 | |||
2949 | # qhasm: in7 ^= *(uint32 *) (m + 28) | ||
2950 | # asm 1: xorl 28(<m=int32#5),<in7=int32#4 | ||
2951 | # asm 2: xorl 28(<m=%esi),<in7=%ebx | ||
2952 | xorl 28(%esi),%ebx | ||
2953 | |||
2954 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
2955 | # asm 1: movl <in4=int32#1,16(<out=int32#6) | ||
2956 | # asm 2: movl <in4=%eax,16(<out=%edi) | ||
2957 | movl %eax,16(%edi) | ||
2958 | |||
2959 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
2960 | # asm 1: movl <in5=int32#2,20(<out=int32#6) | ||
2961 | # asm 2: movl <in5=%ecx,20(<out=%edi) | ||
2962 | movl %ecx,20(%edi) | ||
2963 | |||
2964 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
2965 | # asm 1: movl <in6=int32#3,24(<out=int32#6) | ||
2966 | # asm 2: movl <in6=%edx,24(<out=%edi) | ||
2967 | movl %edx,24(%edi) | ||
2968 | |||
2969 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
2970 | # asm 1: movl <in7=int32#4,28(<out=int32#6) | ||
2971 | # asm 2: movl <in7=%ebx,28(<out=%edi) | ||
2972 | movl %ebx,28(%edi) | ||
2973 | |||
2974 | # qhasm: in4 = z4 | ||
2975 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
2976 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
2977 | movd %xmm0,%eax | ||
2978 | |||
2979 | # qhasm: in5 = z5 | ||
2980 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
2981 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
2982 | movd %xmm1,%ecx | ||
2983 | |||
2984 | # qhasm: in6 = z6 | ||
2985 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
2986 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
2987 | movd %xmm2,%edx | ||
2988 | |||
2989 | # qhasm: in7 = z7 | ||
2990 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
2991 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
2992 | movd %xmm3,%ebx | ||
2993 | |||
2994 | # qhasm: z4 <<<= 96 | ||
2995 | # asm 1: pshufd $0x39,<z4=int6464#1,<z4=int6464#1 | ||
2996 | # asm 2: pshufd $0x39,<z4=%xmm0,<z4=%xmm0 | ||
2997 | pshufd $0x39,%xmm0,%xmm0 | ||
2998 | |||
2999 | # qhasm: z5 <<<= 96 | ||
3000 | # asm 1: pshufd $0x39,<z5=int6464#2,<z5=int6464#2 | ||
3001 | # asm 2: pshufd $0x39,<z5=%xmm1,<z5=%xmm1 | ||
3002 | pshufd $0x39,%xmm1,%xmm1 | ||
3003 | |||
3004 | # qhasm: z6 <<<= 96 | ||
3005 | # asm 1: pshufd $0x39,<z6=int6464#3,<z6=int6464#3 | ||
3006 | # asm 2: pshufd $0x39,<z6=%xmm2,<z6=%xmm2 | ||
3007 | pshufd $0x39,%xmm2,%xmm2 | ||
3008 | |||
3009 | # qhasm: z7 <<<= 96 | ||
3010 | # asm 1: pshufd $0x39,<z7=int6464#4,<z7=int6464#4 | ||
3011 | # asm 2: pshufd $0x39,<z7=%xmm3,<z7=%xmm3 | ||
3012 | pshufd $0x39,%xmm3,%xmm3 | ||
3013 | |||
3014 | # qhasm: in4 ^= *(uint32 *) (m + 80) | ||
3015 | # asm 1: xorl 80(<m=int32#5),<in4=int32#1 | ||
3016 | # asm 2: xorl 80(<m=%esi),<in4=%eax | ||
3017 | xorl 80(%esi),%eax | ||
3018 | |||
3019 | # qhasm: in5 ^= *(uint32 *) (m + 84) | ||
3020 | # asm 1: xorl 84(<m=int32#5),<in5=int32#2 | ||
3021 | # asm 2: xorl 84(<m=%esi),<in5=%ecx | ||
3022 | xorl 84(%esi),%ecx | ||
3023 | |||
3024 | # qhasm: in6 ^= *(uint32 *) (m + 88) | ||
3025 | # asm 1: xorl 88(<m=int32#5),<in6=int32#3 | ||
3026 | # asm 2: xorl 88(<m=%esi),<in6=%edx | ||
3027 | xorl 88(%esi),%edx | ||
3028 | |||
3029 | # qhasm: in7 ^= *(uint32 *) (m + 92) | ||
3030 | # asm 1: xorl 92(<m=int32#5),<in7=int32#4 | ||
3031 | # asm 2: xorl 92(<m=%esi),<in7=%ebx | ||
3032 | xorl 92(%esi),%ebx | ||
3033 | |||
3034 | # qhasm: *(uint32 *) (out + 80) = in4 | ||
3035 | # asm 1: movl <in4=int32#1,80(<out=int32#6) | ||
3036 | # asm 2: movl <in4=%eax,80(<out=%edi) | ||
3037 | movl %eax,80(%edi) | ||
3038 | |||
3039 | # qhasm: *(uint32 *) (out + 84) = in5 | ||
3040 | # asm 1: movl <in5=int32#2,84(<out=int32#6) | ||
3041 | # asm 2: movl <in5=%ecx,84(<out=%edi) | ||
3042 | movl %ecx,84(%edi) | ||
3043 | |||
3044 | # qhasm: *(uint32 *) (out + 88) = in6 | ||
3045 | # asm 1: movl <in6=int32#3,88(<out=int32#6) | ||
3046 | # asm 2: movl <in6=%edx,88(<out=%edi) | ||
3047 | movl %edx,88(%edi) | ||
3048 | |||
3049 | # qhasm: *(uint32 *) (out + 92) = in7 | ||
3050 | # asm 1: movl <in7=int32#4,92(<out=int32#6) | ||
3051 | # asm 2: movl <in7=%ebx,92(<out=%edi) | ||
3052 | movl %ebx,92(%edi) | ||
3053 | |||
3054 | # qhasm: in4 = z4 | ||
3055 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
3056 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
3057 | movd %xmm0,%eax | ||
3058 | |||
3059 | # qhasm: in5 = z5 | ||
3060 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
3061 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
3062 | movd %xmm1,%ecx | ||
3063 | |||
3064 | # qhasm: in6 = z6 | ||
3065 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
3066 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
3067 | movd %xmm2,%edx | ||
3068 | |||
3069 | # qhasm: in7 = z7 | ||
3070 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
3071 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
3072 | movd %xmm3,%ebx | ||
3073 | |||
3074 | # qhasm: z4 <<<= 96 | ||
3075 | # asm 1: pshufd $0x39,<z4=int6464#1,<z4=int6464#1 | ||
3076 | # asm 2: pshufd $0x39,<z4=%xmm0,<z4=%xmm0 | ||
3077 | pshufd $0x39,%xmm0,%xmm0 | ||
3078 | |||
3079 | # qhasm: z5 <<<= 96 | ||
3080 | # asm 1: pshufd $0x39,<z5=int6464#2,<z5=int6464#2 | ||
3081 | # asm 2: pshufd $0x39,<z5=%xmm1,<z5=%xmm1 | ||
3082 | pshufd $0x39,%xmm1,%xmm1 | ||
3083 | |||
3084 | # qhasm: z6 <<<= 96 | ||
3085 | # asm 1: pshufd $0x39,<z6=int6464#3,<z6=int6464#3 | ||
3086 | # asm 2: pshufd $0x39,<z6=%xmm2,<z6=%xmm2 | ||
3087 | pshufd $0x39,%xmm2,%xmm2 | ||
3088 | |||
3089 | # qhasm: z7 <<<= 96 | ||
3090 | # asm 1: pshufd $0x39,<z7=int6464#4,<z7=int6464#4 | ||
3091 | # asm 2: pshufd $0x39,<z7=%xmm3,<z7=%xmm3 | ||
3092 | pshufd $0x39,%xmm3,%xmm3 | ||
3093 | |||
3094 | # qhasm: in4 ^= *(uint32 *) (m + 144) | ||
3095 | # asm 1: xorl 144(<m=int32#5),<in4=int32#1 | ||
3096 | # asm 2: xorl 144(<m=%esi),<in4=%eax | ||
3097 | xorl 144(%esi),%eax | ||
3098 | |||
3099 | # qhasm: in5 ^= *(uint32 *) (m + 148) | ||
3100 | # asm 1: xorl 148(<m=int32#5),<in5=int32#2 | ||
3101 | # asm 2: xorl 148(<m=%esi),<in5=%ecx | ||
3102 | xorl 148(%esi),%ecx | ||
3103 | |||
3104 | # qhasm: in6 ^= *(uint32 *) (m + 152) | ||
3105 | # asm 1: xorl 152(<m=int32#5),<in6=int32#3 | ||
3106 | # asm 2: xorl 152(<m=%esi),<in6=%edx | ||
3107 | xorl 152(%esi),%edx | ||
3108 | |||
3109 | # qhasm: in7 ^= *(uint32 *) (m + 156) | ||
3110 | # asm 1: xorl 156(<m=int32#5),<in7=int32#4 | ||
3111 | # asm 2: xorl 156(<m=%esi),<in7=%ebx | ||
3112 | xorl 156(%esi),%ebx | ||
3113 | |||
3114 | # qhasm: *(uint32 *) (out + 144) = in4 | ||
3115 | # asm 1: movl <in4=int32#1,144(<out=int32#6) | ||
3116 | # asm 2: movl <in4=%eax,144(<out=%edi) | ||
3117 | movl %eax,144(%edi) | ||
3118 | |||
3119 | # qhasm: *(uint32 *) (out + 148) = in5 | ||
3120 | # asm 1: movl <in5=int32#2,148(<out=int32#6) | ||
3121 | # asm 2: movl <in5=%ecx,148(<out=%edi) | ||
3122 | movl %ecx,148(%edi) | ||
3123 | |||
3124 | # qhasm: *(uint32 *) (out + 152) = in6 | ||
3125 | # asm 1: movl <in6=int32#3,152(<out=int32#6) | ||
3126 | # asm 2: movl <in6=%edx,152(<out=%edi) | ||
3127 | movl %edx,152(%edi) | ||
3128 | |||
3129 | # qhasm: *(uint32 *) (out + 156) = in7 | ||
3130 | # asm 1: movl <in7=int32#4,156(<out=int32#6) | ||
3131 | # asm 2: movl <in7=%ebx,156(<out=%edi) | ||
3132 | movl %ebx,156(%edi) | ||
3133 | |||
3134 | # qhasm: in4 = z4 | ||
3135 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
3136 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
3137 | movd %xmm0,%eax | ||
3138 | |||
3139 | # qhasm: in5 = z5 | ||
3140 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
3141 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
3142 | movd %xmm1,%ecx | ||
3143 | |||
3144 | # qhasm: in6 = z6 | ||
3145 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
3146 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
3147 | movd %xmm2,%edx | ||
3148 | |||
3149 | # qhasm: in7 = z7 | ||
3150 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
3151 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
3152 | movd %xmm3,%ebx | ||
3153 | |||
3154 | # qhasm: in4 ^= *(uint32 *) (m + 208) | ||
3155 | # asm 1: xorl 208(<m=int32#5),<in4=int32#1 | ||
3156 | # asm 2: xorl 208(<m=%esi),<in4=%eax | ||
3157 | xorl 208(%esi),%eax | ||
3158 | |||
3159 | # qhasm: in5 ^= *(uint32 *) (m + 212) | ||
3160 | # asm 1: xorl 212(<m=int32#5),<in5=int32#2 | ||
3161 | # asm 2: xorl 212(<m=%esi),<in5=%ecx | ||
3162 | xorl 212(%esi),%ecx | ||
3163 | |||
3164 | # qhasm: in6 ^= *(uint32 *) (m + 216) | ||
3165 | # asm 1: xorl 216(<m=int32#5),<in6=int32#3 | ||
3166 | # asm 2: xorl 216(<m=%esi),<in6=%edx | ||
3167 | xorl 216(%esi),%edx | ||
3168 | |||
3169 | # qhasm: in7 ^= *(uint32 *) (m + 220) | ||
3170 | # asm 1: xorl 220(<m=int32#5),<in7=int32#4 | ||
3171 | # asm 2: xorl 220(<m=%esi),<in7=%ebx | ||
3172 | xorl 220(%esi),%ebx | ||
3173 | |||
3174 | # qhasm: *(uint32 *) (out + 208) = in4 | ||
3175 | # asm 1: movl <in4=int32#1,208(<out=int32#6) | ||
3176 | # asm 2: movl <in4=%eax,208(<out=%edi) | ||
3177 | movl %eax,208(%edi) | ||
3178 | |||
3179 | # qhasm: *(uint32 *) (out + 212) = in5 | ||
3180 | # asm 1: movl <in5=int32#2,212(<out=int32#6) | ||
3181 | # asm 2: movl <in5=%ecx,212(<out=%edi) | ||
3182 | movl %ecx,212(%edi) | ||
3183 | |||
3184 | # qhasm: *(uint32 *) (out + 216) = in6 | ||
3185 | # asm 1: movl <in6=int32#3,216(<out=int32#6) | ||
3186 | # asm 2: movl <in6=%edx,216(<out=%edi) | ||
3187 | movl %edx,216(%edi) | ||
3188 | |||
3189 | # qhasm: *(uint32 *) (out + 220) = in7 | ||
3190 | # asm 1: movl <in7=int32#4,220(<out=int32#6) | ||
3191 | # asm 2: movl <in7=%ebx,220(<out=%edi) | ||
3192 | movl %ebx,220(%edi) | ||
3193 | |||
3194 | # qhasm: z8 = z8_stack | ||
3195 | # asm 1: movdqa <z8_stack=stack128#37,>z8=int6464#1 | ||
3196 | # asm 2: movdqa <z8_stack=608(%esp),>z8=%xmm0 | ||
3197 | movdqa 608(%esp),%xmm0 | ||
3198 | |||
3199 | # qhasm: z9 = z9_stack | ||
3200 | # asm 1: movdqa <z9_stack=stack128#32,>z9=int6464#2 | ||
3201 | # asm 2: movdqa <z9_stack=528(%esp),>z9=%xmm1 | ||
3202 | movdqa 528(%esp),%xmm1 | ||
3203 | |||
3204 | # qhasm: z10 = z10_stack | ||
3205 | # asm 1: movdqa <z10_stack=stack128#22,>z10=int6464#3 | ||
3206 | # asm 2: movdqa <z10_stack=368(%esp),>z10=%xmm2 | ||
3207 | movdqa 368(%esp),%xmm2 | ||
3208 | |||
3209 | # qhasm: z11 = z11_stack | ||
3210 | # asm 1: movdqa <z11_stack=stack128#27,>z11=int6464#4 | ||
3211 | # asm 2: movdqa <z11_stack=448(%esp),>z11=%xmm3 | ||
3212 | movdqa 448(%esp),%xmm3 | ||
3213 | |||
3214 | # qhasm: uint32323232 z8 += orig8 | ||
3215 | # asm 1: paddd <orig8=stack128#19,<z8=int6464#1 | ||
3216 | # asm 2: paddd <orig8=320(%esp),<z8=%xmm0 | ||
3217 | paddd 320(%esp),%xmm0 | ||
3218 | |||
3219 | # qhasm: uint32323232 z9 += orig9 | ||
3220 | # asm 1: paddd <orig9=stack128#20,<z9=int6464#2 | ||
3221 | # asm 2: paddd <orig9=336(%esp),<z9=%xmm1 | ||
3222 | paddd 336(%esp),%xmm1 | ||
3223 | |||
3224 | # qhasm: uint32323232 z10 += orig10 | ||
3225 | # asm 1: paddd <orig10=stack128#6,<z10=int6464#3 | ||
3226 | # asm 2: paddd <orig10=112(%esp),<z10=%xmm2 | ||
3227 | paddd 112(%esp),%xmm2 | ||
3228 | |||
3229 | # qhasm: uint32323232 z11 += orig11 | ||
3230 | # asm 1: paddd <orig11=stack128#10,<z11=int6464#4 | ||
3231 | # asm 2: paddd <orig11=176(%esp),<z11=%xmm3 | ||
3232 | paddd 176(%esp),%xmm3 | ||
3233 | |||
3234 | # qhasm: in8 = z8 | ||
3235 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3236 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3237 | movd %xmm0,%eax | ||
3238 | |||
3239 | # qhasm: in9 = z9 | ||
3240 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3241 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3242 | movd %xmm1,%ecx | ||
3243 | |||
3244 | # qhasm: in10 = z10 | ||
3245 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3246 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3247 | movd %xmm2,%edx | ||
3248 | |||
3249 | # qhasm: in11 = z11 | ||
3250 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3251 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3252 | movd %xmm3,%ebx | ||
3253 | |||
3254 | # qhasm: z8 <<<= 96 | ||
3255 | # asm 1: pshufd $0x39,<z8=int6464#1,<z8=int6464#1 | ||
3256 | # asm 2: pshufd $0x39,<z8=%xmm0,<z8=%xmm0 | ||
3257 | pshufd $0x39,%xmm0,%xmm0 | ||
3258 | |||
3259 | # qhasm: z9 <<<= 96 | ||
3260 | # asm 1: pshufd $0x39,<z9=int6464#2,<z9=int6464#2 | ||
3261 | # asm 2: pshufd $0x39,<z9=%xmm1,<z9=%xmm1 | ||
3262 | pshufd $0x39,%xmm1,%xmm1 | ||
3263 | |||
3264 | # qhasm: z10 <<<= 96 | ||
3265 | # asm 1: pshufd $0x39,<z10=int6464#3,<z10=int6464#3 | ||
3266 | # asm 2: pshufd $0x39,<z10=%xmm2,<z10=%xmm2 | ||
3267 | pshufd $0x39,%xmm2,%xmm2 | ||
3268 | |||
3269 | # qhasm: z11 <<<= 96 | ||
3270 | # asm 1: pshufd $0x39,<z11=int6464#4,<z11=int6464#4 | ||
3271 | # asm 2: pshufd $0x39,<z11=%xmm3,<z11=%xmm3 | ||
3272 | pshufd $0x39,%xmm3,%xmm3 | ||
3273 | |||
3274 | # qhasm: in8 ^= *(uint32 *) (m + 32) | ||
3275 | # asm 1: xorl 32(<m=int32#5),<in8=int32#1 | ||
3276 | # asm 2: xorl 32(<m=%esi),<in8=%eax | ||
3277 | xorl 32(%esi),%eax | ||
3278 | |||
3279 | # qhasm: in9 ^= *(uint32 *) (m + 36) | ||
3280 | # asm 1: xorl 36(<m=int32#5),<in9=int32#2 | ||
3281 | # asm 2: xorl 36(<m=%esi),<in9=%ecx | ||
3282 | xorl 36(%esi),%ecx | ||
3283 | |||
3284 | # qhasm: in10 ^= *(uint32 *) (m + 40) | ||
3285 | # asm 1: xorl 40(<m=int32#5),<in10=int32#3 | ||
3286 | # asm 2: xorl 40(<m=%esi),<in10=%edx | ||
3287 | xorl 40(%esi),%edx | ||
3288 | |||
3289 | # qhasm: in11 ^= *(uint32 *) (m + 44) | ||
3290 | # asm 1: xorl 44(<m=int32#5),<in11=int32#4 | ||
3291 | # asm 2: xorl 44(<m=%esi),<in11=%ebx | ||
3292 | xorl 44(%esi),%ebx | ||
3293 | |||
3294 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
3295 | # asm 1: movl <in8=int32#1,32(<out=int32#6) | ||
3296 | # asm 2: movl <in8=%eax,32(<out=%edi) | ||
3297 | movl %eax,32(%edi) | ||
3298 | |||
3299 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
3300 | # asm 1: movl <in9=int32#2,36(<out=int32#6) | ||
3301 | # asm 2: movl <in9=%ecx,36(<out=%edi) | ||
3302 | movl %ecx,36(%edi) | ||
3303 | |||
3304 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
3305 | # asm 1: movl <in10=int32#3,40(<out=int32#6) | ||
3306 | # asm 2: movl <in10=%edx,40(<out=%edi) | ||
3307 | movl %edx,40(%edi) | ||
3308 | |||
3309 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
3310 | # asm 1: movl <in11=int32#4,44(<out=int32#6) | ||
3311 | # asm 2: movl <in11=%ebx,44(<out=%edi) | ||
3312 | movl %ebx,44(%edi) | ||
3313 | |||
3314 | # qhasm: in8 = z8 | ||
3315 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3316 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3317 | movd %xmm0,%eax | ||
3318 | |||
3319 | # qhasm: in9 = z9 | ||
3320 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3321 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3322 | movd %xmm1,%ecx | ||
3323 | |||
3324 | # qhasm: in10 = z10 | ||
3325 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3326 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3327 | movd %xmm2,%edx | ||
3328 | |||
3329 | # qhasm: in11 = z11 | ||
3330 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3331 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3332 | movd %xmm3,%ebx | ||
3333 | |||
3334 | # qhasm: z8 <<<= 96 | ||
3335 | # asm 1: pshufd $0x39,<z8=int6464#1,<z8=int6464#1 | ||
3336 | # asm 2: pshufd $0x39,<z8=%xmm0,<z8=%xmm0 | ||
3337 | pshufd $0x39,%xmm0,%xmm0 | ||
3338 | |||
3339 | # qhasm: z9 <<<= 96 | ||
3340 | # asm 1: pshufd $0x39,<z9=int6464#2,<z9=int6464#2 | ||
3341 | # asm 2: pshufd $0x39,<z9=%xmm1,<z9=%xmm1 | ||
3342 | pshufd $0x39,%xmm1,%xmm1 | ||
3343 | |||
3344 | # qhasm: z10 <<<= 96 | ||
3345 | # asm 1: pshufd $0x39,<z10=int6464#3,<z10=int6464#3 | ||
3346 | # asm 2: pshufd $0x39,<z10=%xmm2,<z10=%xmm2 | ||
3347 | pshufd $0x39,%xmm2,%xmm2 | ||
3348 | |||
3349 | # qhasm: z11 <<<= 96 | ||
3350 | # asm 1: pshufd $0x39,<z11=int6464#4,<z11=int6464#4 | ||
3351 | # asm 2: pshufd $0x39,<z11=%xmm3,<z11=%xmm3 | ||
3352 | pshufd $0x39,%xmm3,%xmm3 | ||
3353 | |||
3354 | # qhasm: in8 ^= *(uint32 *) (m + 96) | ||
3355 | # asm 1: xorl 96(<m=int32#5),<in8=int32#1 | ||
3356 | # asm 2: xorl 96(<m=%esi),<in8=%eax | ||
3357 | xorl 96(%esi),%eax | ||
3358 | |||
3359 | # qhasm: in9 ^= *(uint32 *) (m + 100) | ||
3360 | # asm 1: xorl 100(<m=int32#5),<in9=int32#2 | ||
3361 | # asm 2: xorl 100(<m=%esi),<in9=%ecx | ||
3362 | xorl 100(%esi),%ecx | ||
3363 | |||
3364 | # qhasm: in10 ^= *(uint32 *) (m + 104) | ||
3365 | # asm 1: xorl 104(<m=int32#5),<in10=int32#3 | ||
3366 | # asm 2: xorl 104(<m=%esi),<in10=%edx | ||
3367 | xorl 104(%esi),%edx | ||
3368 | |||
3369 | # qhasm: in11 ^= *(uint32 *) (m + 108) | ||
3370 | # asm 1: xorl 108(<m=int32#5),<in11=int32#4 | ||
3371 | # asm 2: xorl 108(<m=%esi),<in11=%ebx | ||
3372 | xorl 108(%esi),%ebx | ||
3373 | |||
3374 | # qhasm: *(uint32 *) (out + 96) = in8 | ||
3375 | # asm 1: movl <in8=int32#1,96(<out=int32#6) | ||
3376 | # asm 2: movl <in8=%eax,96(<out=%edi) | ||
3377 | movl %eax,96(%edi) | ||
3378 | |||
3379 | # qhasm: *(uint32 *) (out + 100) = in9 | ||
3380 | # asm 1: movl <in9=int32#2,100(<out=int32#6) | ||
3381 | # asm 2: movl <in9=%ecx,100(<out=%edi) | ||
3382 | movl %ecx,100(%edi) | ||
3383 | |||
3384 | # qhasm: *(uint32 *) (out + 104) = in10 | ||
3385 | # asm 1: movl <in10=int32#3,104(<out=int32#6) | ||
3386 | # asm 2: movl <in10=%edx,104(<out=%edi) | ||
3387 | movl %edx,104(%edi) | ||
3388 | |||
3389 | # qhasm: *(uint32 *) (out + 108) = in11 | ||
3390 | # asm 1: movl <in11=int32#4,108(<out=int32#6) | ||
3391 | # asm 2: movl <in11=%ebx,108(<out=%edi) | ||
3392 | movl %ebx,108(%edi) | ||
3393 | |||
3394 | # qhasm: in8 = z8 | ||
3395 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3396 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3397 | movd %xmm0,%eax | ||
3398 | |||
3399 | # qhasm: in9 = z9 | ||
3400 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3401 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3402 | movd %xmm1,%ecx | ||
3403 | |||
3404 | # qhasm: in10 = z10 | ||
3405 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3406 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3407 | movd %xmm2,%edx | ||
3408 | |||
3409 | # qhasm: in11 = z11 | ||
3410 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3411 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3412 | movd %xmm3,%ebx | ||
3413 | |||
3414 | # qhasm: z8 <<<= 96 | ||
3415 | # asm 1: pshufd $0x39,<z8=int6464#1,<z8=int6464#1 | ||
3416 | # asm 2: pshufd $0x39,<z8=%xmm0,<z8=%xmm0 | ||
3417 | pshufd $0x39,%xmm0,%xmm0 | ||
3418 | |||
3419 | # qhasm: z9 <<<= 96 | ||
3420 | # asm 1: pshufd $0x39,<z9=int6464#2,<z9=int6464#2 | ||
3421 | # asm 2: pshufd $0x39,<z9=%xmm1,<z9=%xmm1 | ||
3422 | pshufd $0x39,%xmm1,%xmm1 | ||
3423 | |||
3424 | # qhasm: z10 <<<= 96 | ||
3425 | # asm 1: pshufd $0x39,<z10=int6464#3,<z10=int6464#3 | ||
3426 | # asm 2: pshufd $0x39,<z10=%xmm2,<z10=%xmm2 | ||
3427 | pshufd $0x39,%xmm2,%xmm2 | ||
3428 | |||
3429 | # qhasm: z11 <<<= 96 | ||
3430 | # asm 1: pshufd $0x39,<z11=int6464#4,<z11=int6464#4 | ||
3431 | # asm 2: pshufd $0x39,<z11=%xmm3,<z11=%xmm3 | ||
3432 | pshufd $0x39,%xmm3,%xmm3 | ||
3433 | |||
3434 | # qhasm: in8 ^= *(uint32 *) (m + 160) | ||
3435 | # asm 1: xorl 160(<m=int32#5),<in8=int32#1 | ||
3436 | # asm 2: xorl 160(<m=%esi),<in8=%eax | ||
3437 | xorl 160(%esi),%eax | ||
3438 | |||
3439 | # qhasm: in9 ^= *(uint32 *) (m + 164) | ||
3440 | # asm 1: xorl 164(<m=int32#5),<in9=int32#2 | ||
3441 | # asm 2: xorl 164(<m=%esi),<in9=%ecx | ||
3442 | xorl 164(%esi),%ecx | ||
3443 | |||
3444 | # qhasm: in10 ^= *(uint32 *) (m + 168) | ||
3445 | # asm 1: xorl 168(<m=int32#5),<in10=int32#3 | ||
3446 | # asm 2: xorl 168(<m=%esi),<in10=%edx | ||
3447 | xorl 168(%esi),%edx | ||
3448 | |||
3449 | # qhasm: in11 ^= *(uint32 *) (m + 172) | ||
3450 | # asm 1: xorl 172(<m=int32#5),<in11=int32#4 | ||
3451 | # asm 2: xorl 172(<m=%esi),<in11=%ebx | ||
3452 | xorl 172(%esi),%ebx | ||
3453 | |||
3454 | # qhasm: *(uint32 *) (out + 160) = in8 | ||
3455 | # asm 1: movl <in8=int32#1,160(<out=int32#6) | ||
3456 | # asm 2: movl <in8=%eax,160(<out=%edi) | ||
3457 | movl %eax,160(%edi) | ||
3458 | |||
3459 | # qhasm: *(uint32 *) (out + 164) = in9 | ||
3460 | # asm 1: movl <in9=int32#2,164(<out=int32#6) | ||
3461 | # asm 2: movl <in9=%ecx,164(<out=%edi) | ||
3462 | movl %ecx,164(%edi) | ||
3463 | |||
3464 | # qhasm: *(uint32 *) (out + 168) = in10 | ||
3465 | # asm 1: movl <in10=int32#3,168(<out=int32#6) | ||
3466 | # asm 2: movl <in10=%edx,168(<out=%edi) | ||
3467 | movl %edx,168(%edi) | ||
3468 | |||
3469 | # qhasm: *(uint32 *) (out + 172) = in11 | ||
3470 | # asm 1: movl <in11=int32#4,172(<out=int32#6) | ||
3471 | # asm 2: movl <in11=%ebx,172(<out=%edi) | ||
3472 | movl %ebx,172(%edi) | ||
3473 | |||
3474 | # qhasm: in8 = z8 | ||
3475 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3476 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3477 | movd %xmm0,%eax | ||
3478 | |||
3479 | # qhasm: in9 = z9 | ||
3480 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3481 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3482 | movd %xmm1,%ecx | ||
3483 | |||
3484 | # qhasm: in10 = z10 | ||
3485 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3486 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3487 | movd %xmm2,%edx | ||
3488 | |||
3489 | # qhasm: in11 = z11 | ||
3490 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3491 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3492 | movd %xmm3,%ebx | ||
3493 | |||
3494 | # qhasm: in8 ^= *(uint32 *) (m + 224) | ||
3495 | # asm 1: xorl 224(<m=int32#5),<in8=int32#1 | ||
3496 | # asm 2: xorl 224(<m=%esi),<in8=%eax | ||
3497 | xorl 224(%esi),%eax | ||
3498 | |||
3499 | # qhasm: in9 ^= *(uint32 *) (m + 228) | ||
3500 | # asm 1: xorl 228(<m=int32#5),<in9=int32#2 | ||
3501 | # asm 2: xorl 228(<m=%esi),<in9=%ecx | ||
3502 | xorl 228(%esi),%ecx | ||
3503 | |||
3504 | # qhasm: in10 ^= *(uint32 *) (m + 232) | ||
3505 | # asm 1: xorl 232(<m=int32#5),<in10=int32#3 | ||
3506 | # asm 2: xorl 232(<m=%esi),<in10=%edx | ||
3507 | xorl 232(%esi),%edx | ||
3508 | |||
3509 | # qhasm: in11 ^= *(uint32 *) (m + 236) | ||
3510 | # asm 1: xorl 236(<m=int32#5),<in11=int32#4 | ||
3511 | # asm 2: xorl 236(<m=%esi),<in11=%ebx | ||
3512 | xorl 236(%esi),%ebx | ||
3513 | |||
3514 | # qhasm: *(uint32 *) (out + 224) = in8 | ||
3515 | # asm 1: movl <in8=int32#1,224(<out=int32#6) | ||
3516 | # asm 2: movl <in8=%eax,224(<out=%edi) | ||
3517 | movl %eax,224(%edi) | ||
3518 | |||
3519 | # qhasm: *(uint32 *) (out + 228) = in9 | ||
3520 | # asm 1: movl <in9=int32#2,228(<out=int32#6) | ||
3521 | # asm 2: movl <in9=%ecx,228(<out=%edi) | ||
3522 | movl %ecx,228(%edi) | ||
3523 | |||
3524 | # qhasm: *(uint32 *) (out + 232) = in10 | ||
3525 | # asm 1: movl <in10=int32#3,232(<out=int32#6) | ||
3526 | # asm 2: movl <in10=%edx,232(<out=%edi) | ||
3527 | movl %edx,232(%edi) | ||
3528 | |||
3529 | # qhasm: *(uint32 *) (out + 236) = in11 | ||
3530 | # asm 1: movl <in11=int32#4,236(<out=int32#6) | ||
3531 | # asm 2: movl <in11=%ebx,236(<out=%edi) | ||
3532 | movl %ebx,236(%edi) | ||
3533 | |||
3534 | # qhasm: z12 = z12_stack | ||
3535 | # asm 1: movdqa <z12_stack=stack128#35,>z12=int6464#1 | ||
3536 | # asm 2: movdqa <z12_stack=576(%esp),>z12=%xmm0 | ||
3537 | movdqa 576(%esp),%xmm0 | ||
3538 | |||
3539 | # qhasm: z13 = z13_stack | ||
3540 | # asm 1: movdqa <z13_stack=stack128#30,>z13=int6464#2 | ||
3541 | # asm 2: movdqa <z13_stack=496(%esp),>z13=%xmm1 | ||
3542 | movdqa 496(%esp),%xmm1 | ||
3543 | |||
3544 | # qhasm: z14 = z14_stack | ||
3545 | # asm 1: movdqa <z14_stack=stack128#24,>z14=int6464#3 | ||
3546 | # asm 2: movdqa <z14_stack=400(%esp),>z14=%xmm2 | ||
3547 | movdqa 400(%esp),%xmm2 | ||
3548 | |||
3549 | # qhasm: z15 = z15_stack | ||
3550 | # asm 1: movdqa <z15_stack=stack128#23,>z15=int6464#4 | ||
3551 | # asm 2: movdqa <z15_stack=384(%esp),>z15=%xmm3 | ||
3552 | movdqa 384(%esp),%xmm3 | ||
3553 | |||
3554 | # qhasm: uint32323232 z12 += orig12 | ||
3555 | # asm 1: paddd <orig12=stack128#11,<z12=int6464#1 | ||
3556 | # asm 2: paddd <orig12=192(%esp),<z12=%xmm0 | ||
3557 | paddd 192(%esp),%xmm0 | ||
3558 | |||
3559 | # qhasm: uint32323232 z13 += orig13 | ||
3560 | # asm 1: paddd <orig13=stack128#14,<z13=int6464#2 | ||
3561 | # asm 2: paddd <orig13=240(%esp),<z13=%xmm1 | ||
3562 | paddd 240(%esp),%xmm1 | ||
3563 | |||
3564 | # qhasm: uint32323232 z14 += orig14 | ||
3565 | # asm 1: paddd <orig14=stack128#17,<z14=int6464#3 | ||
3566 | # asm 2: paddd <orig14=288(%esp),<z14=%xmm2 | ||
3567 | paddd 288(%esp),%xmm2 | ||
3568 | |||
3569 | # qhasm: uint32323232 z15 += orig15 | ||
3570 | # asm 1: paddd <orig15=stack128#7,<z15=int6464#4 | ||
3571 | # asm 2: paddd <orig15=128(%esp),<z15=%xmm3 | ||
3572 | paddd 128(%esp),%xmm3 | ||
3573 | |||
3574 | # qhasm: in12 = z12 | ||
3575 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3576 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3577 | movd %xmm0,%eax | ||
3578 | |||
3579 | # qhasm: in13 = z13 | ||
3580 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3581 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3582 | movd %xmm1,%ecx | ||
3583 | |||
3584 | # qhasm: in14 = z14 | ||
3585 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3586 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3587 | movd %xmm2,%edx | ||
3588 | |||
3589 | # qhasm: in15 = z15 | ||
3590 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3591 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3592 | movd %xmm3,%ebx | ||
3593 | |||
3594 | # qhasm: z12 <<<= 96 | ||
3595 | # asm 1: pshufd $0x39,<z12=int6464#1,<z12=int6464#1 | ||
3596 | # asm 2: pshufd $0x39,<z12=%xmm0,<z12=%xmm0 | ||
3597 | pshufd $0x39,%xmm0,%xmm0 | ||
3598 | |||
3599 | # qhasm: z13 <<<= 96 | ||
3600 | # asm 1: pshufd $0x39,<z13=int6464#2,<z13=int6464#2 | ||
3601 | # asm 2: pshufd $0x39,<z13=%xmm1,<z13=%xmm1 | ||
3602 | pshufd $0x39,%xmm1,%xmm1 | ||
3603 | |||
3604 | # qhasm: z14 <<<= 96 | ||
3605 | # asm 1: pshufd $0x39,<z14=int6464#3,<z14=int6464#3 | ||
3606 | # asm 2: pshufd $0x39,<z14=%xmm2,<z14=%xmm2 | ||
3607 | pshufd $0x39,%xmm2,%xmm2 | ||
3608 | |||
3609 | # qhasm: z15 <<<= 96 | ||
3610 | # asm 1: pshufd $0x39,<z15=int6464#4,<z15=int6464#4 | ||
3611 | # asm 2: pshufd $0x39,<z15=%xmm3,<z15=%xmm3 | ||
3612 | pshufd $0x39,%xmm3,%xmm3 | ||
3613 | |||
3614 | # qhasm: in12 ^= *(uint32 *) (m + 48) | ||
3615 | # asm 1: xorl 48(<m=int32#5),<in12=int32#1 | ||
3616 | # asm 2: xorl 48(<m=%esi),<in12=%eax | ||
3617 | xorl 48(%esi),%eax | ||
3618 | |||
3619 | # qhasm: in13 ^= *(uint32 *) (m + 52) | ||
3620 | # asm 1: xorl 52(<m=int32#5),<in13=int32#2 | ||
3621 | # asm 2: xorl 52(<m=%esi),<in13=%ecx | ||
3622 | xorl 52(%esi),%ecx | ||
3623 | |||
3624 | # qhasm: in14 ^= *(uint32 *) (m + 56) | ||
3625 | # asm 1: xorl 56(<m=int32#5),<in14=int32#3 | ||
3626 | # asm 2: xorl 56(<m=%esi),<in14=%edx | ||
3627 | xorl 56(%esi),%edx | ||
3628 | |||
3629 | # qhasm: in15 ^= *(uint32 *) (m + 60) | ||
3630 | # asm 1: xorl 60(<m=int32#5),<in15=int32#4 | ||
3631 | # asm 2: xorl 60(<m=%esi),<in15=%ebx | ||
3632 | xorl 60(%esi),%ebx | ||
3633 | |||
3634 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
3635 | # asm 1: movl <in12=int32#1,48(<out=int32#6) | ||
3636 | # asm 2: movl <in12=%eax,48(<out=%edi) | ||
3637 | movl %eax,48(%edi) | ||
3638 | |||
3639 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
3640 | # asm 1: movl <in13=int32#2,52(<out=int32#6) | ||
3641 | # asm 2: movl <in13=%ecx,52(<out=%edi) | ||
3642 | movl %ecx,52(%edi) | ||
3643 | |||
3644 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
3645 | # asm 1: movl <in14=int32#3,56(<out=int32#6) | ||
3646 | # asm 2: movl <in14=%edx,56(<out=%edi) | ||
3647 | movl %edx,56(%edi) | ||
3648 | |||
3649 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
3650 | # asm 1: movl <in15=int32#4,60(<out=int32#6) | ||
3651 | # asm 2: movl <in15=%ebx,60(<out=%edi) | ||
3652 | movl %ebx,60(%edi) | ||
3653 | |||
3654 | # qhasm: in12 = z12 | ||
3655 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3656 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3657 | movd %xmm0,%eax | ||
3658 | |||
3659 | # qhasm: in13 = z13 | ||
3660 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3661 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3662 | movd %xmm1,%ecx | ||
3663 | |||
3664 | # qhasm: in14 = z14 | ||
3665 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3666 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3667 | movd %xmm2,%edx | ||
3668 | |||
3669 | # qhasm: in15 = z15 | ||
3670 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3671 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3672 | movd %xmm3,%ebx | ||
3673 | |||
3674 | # qhasm: z12 <<<= 96 | ||
3675 | # asm 1: pshufd $0x39,<z12=int6464#1,<z12=int6464#1 | ||
3676 | # asm 2: pshufd $0x39,<z12=%xmm0,<z12=%xmm0 | ||
3677 | pshufd $0x39,%xmm0,%xmm0 | ||
3678 | |||
3679 | # qhasm: z13 <<<= 96 | ||
3680 | # asm 1: pshufd $0x39,<z13=int6464#2,<z13=int6464#2 | ||
3681 | # asm 2: pshufd $0x39,<z13=%xmm1,<z13=%xmm1 | ||
3682 | pshufd $0x39,%xmm1,%xmm1 | ||
3683 | |||
3684 | # qhasm: z14 <<<= 96 | ||
3685 | # asm 1: pshufd $0x39,<z14=int6464#3,<z14=int6464#3 | ||
3686 | # asm 2: pshufd $0x39,<z14=%xmm2,<z14=%xmm2 | ||
3687 | pshufd $0x39,%xmm2,%xmm2 | ||
3688 | |||
3689 | # qhasm: z15 <<<= 96 | ||
3690 | # asm 1: pshufd $0x39,<z15=int6464#4,<z15=int6464#4 | ||
3691 | # asm 2: pshufd $0x39,<z15=%xmm3,<z15=%xmm3 | ||
3692 | pshufd $0x39,%xmm3,%xmm3 | ||
3693 | |||
3694 | # qhasm: in12 ^= *(uint32 *) (m + 112) | ||
3695 | # asm 1: xorl 112(<m=int32#5),<in12=int32#1 | ||
3696 | # asm 2: xorl 112(<m=%esi),<in12=%eax | ||
3697 | xorl 112(%esi),%eax | ||
3698 | |||
3699 | # qhasm: in13 ^= *(uint32 *) (m + 116) | ||
3700 | # asm 1: xorl 116(<m=int32#5),<in13=int32#2 | ||
3701 | # asm 2: xorl 116(<m=%esi),<in13=%ecx | ||
3702 | xorl 116(%esi),%ecx | ||
3703 | |||
3704 | # qhasm: in14 ^= *(uint32 *) (m + 120) | ||
3705 | # asm 1: xorl 120(<m=int32#5),<in14=int32#3 | ||
3706 | # asm 2: xorl 120(<m=%esi),<in14=%edx | ||
3707 | xorl 120(%esi),%edx | ||
3708 | |||
3709 | # qhasm: in15 ^= *(uint32 *) (m + 124) | ||
3710 | # asm 1: xorl 124(<m=int32#5),<in15=int32#4 | ||
3711 | # asm 2: xorl 124(<m=%esi),<in15=%ebx | ||
3712 | xorl 124(%esi),%ebx | ||
3713 | |||
3714 | # qhasm: *(uint32 *) (out + 112) = in12 | ||
3715 | # asm 1: movl <in12=int32#1,112(<out=int32#6) | ||
3716 | # asm 2: movl <in12=%eax,112(<out=%edi) | ||
3717 | movl %eax,112(%edi) | ||
3718 | |||
3719 | # qhasm: *(uint32 *) (out + 116) = in13 | ||
3720 | # asm 1: movl <in13=int32#2,116(<out=int32#6) | ||
3721 | # asm 2: movl <in13=%ecx,116(<out=%edi) | ||
3722 | movl %ecx,116(%edi) | ||
3723 | |||
3724 | # qhasm: *(uint32 *) (out + 120) = in14 | ||
3725 | # asm 1: movl <in14=int32#3,120(<out=int32#6) | ||
3726 | # asm 2: movl <in14=%edx,120(<out=%edi) | ||
3727 | movl %edx,120(%edi) | ||
3728 | |||
3729 | # qhasm: *(uint32 *) (out + 124) = in15 | ||
3730 | # asm 1: movl <in15=int32#4,124(<out=int32#6) | ||
3731 | # asm 2: movl <in15=%ebx,124(<out=%edi) | ||
3732 | movl %ebx,124(%edi) | ||
3733 | |||
3734 | # qhasm: in12 = z12 | ||
3735 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3736 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3737 | movd %xmm0,%eax | ||
3738 | |||
3739 | # qhasm: in13 = z13 | ||
3740 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3741 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3742 | movd %xmm1,%ecx | ||
3743 | |||
3744 | # qhasm: in14 = z14 | ||
3745 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3746 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3747 | movd %xmm2,%edx | ||
3748 | |||
3749 | # qhasm: in15 = z15 | ||
3750 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3751 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3752 | movd %xmm3,%ebx | ||
3753 | |||
3754 | # qhasm: z12 <<<= 96 | ||
3755 | # asm 1: pshufd $0x39,<z12=int6464#1,<z12=int6464#1 | ||
3756 | # asm 2: pshufd $0x39,<z12=%xmm0,<z12=%xmm0 | ||
3757 | pshufd $0x39,%xmm0,%xmm0 | ||
3758 | |||
3759 | # qhasm: z13 <<<= 96 | ||
3760 | # asm 1: pshufd $0x39,<z13=int6464#2,<z13=int6464#2 | ||
3761 | # asm 2: pshufd $0x39,<z13=%xmm1,<z13=%xmm1 | ||
3762 | pshufd $0x39,%xmm1,%xmm1 | ||
3763 | |||
3764 | # qhasm: z14 <<<= 96 | ||
3765 | # asm 1: pshufd $0x39,<z14=int6464#3,<z14=int6464#3 | ||
3766 | # asm 2: pshufd $0x39,<z14=%xmm2,<z14=%xmm2 | ||
3767 | pshufd $0x39,%xmm2,%xmm2 | ||
3768 | |||
3769 | # qhasm: z15 <<<= 96 | ||
3770 | # asm 1: pshufd $0x39,<z15=int6464#4,<z15=int6464#4 | ||
3771 | # asm 2: pshufd $0x39,<z15=%xmm3,<z15=%xmm3 | ||
3772 | pshufd $0x39,%xmm3,%xmm3 | ||
3773 | |||
3774 | # qhasm: in12 ^= *(uint32 *) (m + 176) | ||
3775 | # asm 1: xorl 176(<m=int32#5),<in12=int32#1 | ||
3776 | # asm 2: xorl 176(<m=%esi),<in12=%eax | ||
3777 | xorl 176(%esi),%eax | ||
3778 | |||
3779 | # qhasm: in13 ^= *(uint32 *) (m + 180) | ||
3780 | # asm 1: xorl 180(<m=int32#5),<in13=int32#2 | ||
3781 | # asm 2: xorl 180(<m=%esi),<in13=%ecx | ||
3782 | xorl 180(%esi),%ecx | ||
3783 | |||
3784 | # qhasm: in14 ^= *(uint32 *) (m + 184) | ||
3785 | # asm 1: xorl 184(<m=int32#5),<in14=int32#3 | ||
3786 | # asm 2: xorl 184(<m=%esi),<in14=%edx | ||
3787 | xorl 184(%esi),%edx | ||
3788 | |||
3789 | # qhasm: in15 ^= *(uint32 *) (m + 188) | ||
3790 | # asm 1: xorl 188(<m=int32#5),<in15=int32#4 | ||
3791 | # asm 2: xorl 188(<m=%esi),<in15=%ebx | ||
3792 | xorl 188(%esi),%ebx | ||
3793 | |||
3794 | # qhasm: *(uint32 *) (out + 176) = in12 | ||
3795 | # asm 1: movl <in12=int32#1,176(<out=int32#6) | ||
3796 | # asm 2: movl <in12=%eax,176(<out=%edi) | ||
3797 | movl %eax,176(%edi) | ||
3798 | |||
3799 | # qhasm: *(uint32 *) (out + 180) = in13 | ||
3800 | # asm 1: movl <in13=int32#2,180(<out=int32#6) | ||
3801 | # asm 2: movl <in13=%ecx,180(<out=%edi) | ||
3802 | movl %ecx,180(%edi) | ||
3803 | |||
3804 | # qhasm: *(uint32 *) (out + 184) = in14 | ||
3805 | # asm 1: movl <in14=int32#3,184(<out=int32#6) | ||
3806 | # asm 2: movl <in14=%edx,184(<out=%edi) | ||
3807 | movl %edx,184(%edi) | ||
3808 | |||
3809 | # qhasm: *(uint32 *) (out + 188) = in15 | ||
3810 | # asm 1: movl <in15=int32#4,188(<out=int32#6) | ||
3811 | # asm 2: movl <in15=%ebx,188(<out=%edi) | ||
3812 | movl %ebx,188(%edi) | ||
3813 | |||
3814 | # qhasm: in12 = z12 | ||
3815 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3816 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3817 | movd %xmm0,%eax | ||
3818 | |||
3819 | # qhasm: in13 = z13 | ||
3820 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3821 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3822 | movd %xmm1,%ecx | ||
3823 | |||
3824 | # qhasm: in14 = z14 | ||
3825 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3826 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3827 | movd %xmm2,%edx | ||
3828 | |||
3829 | # qhasm: in15 = z15 | ||
3830 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3831 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3832 | movd %xmm3,%ebx | ||
3833 | |||
3834 | # qhasm: in12 ^= *(uint32 *) (m + 240) | ||
3835 | # asm 1: xorl 240(<m=int32#5),<in12=int32#1 | ||
3836 | # asm 2: xorl 240(<m=%esi),<in12=%eax | ||
3837 | xorl 240(%esi),%eax | ||
3838 | |||
3839 | # qhasm: in13 ^= *(uint32 *) (m + 244) | ||
3840 | # asm 1: xorl 244(<m=int32#5),<in13=int32#2 | ||
3841 | # asm 2: xorl 244(<m=%esi),<in13=%ecx | ||
3842 | xorl 244(%esi),%ecx | ||
3843 | |||
3844 | # qhasm: in14 ^= *(uint32 *) (m + 248) | ||
3845 | # asm 1: xorl 248(<m=int32#5),<in14=int32#3 | ||
3846 | # asm 2: xorl 248(<m=%esi),<in14=%edx | ||
3847 | xorl 248(%esi),%edx | ||
3848 | |||
3849 | # qhasm: in15 ^= *(uint32 *) (m + 252) | ||
3850 | # asm 1: xorl 252(<m=int32#5),<in15=int32#4 | ||
3851 | # asm 2: xorl 252(<m=%esi),<in15=%ebx | ||
3852 | xorl 252(%esi),%ebx | ||
3853 | |||
3854 | # qhasm: *(uint32 *) (out + 240) = in12 | ||
3855 | # asm 1: movl <in12=int32#1,240(<out=int32#6) | ||
3856 | # asm 2: movl <in12=%eax,240(<out=%edi) | ||
3857 | movl %eax,240(%edi) | ||
3858 | |||
3859 | # qhasm: *(uint32 *) (out + 244) = in13 | ||
3860 | # asm 1: movl <in13=int32#2,244(<out=int32#6) | ||
3861 | # asm 2: movl <in13=%ecx,244(<out=%edi) | ||
3862 | movl %ecx,244(%edi) | ||
3863 | |||
3864 | # qhasm: *(uint32 *) (out + 248) = in14 | ||
3865 | # asm 1: movl <in14=int32#3,248(<out=int32#6) | ||
3866 | # asm 2: movl <in14=%edx,248(<out=%edi) | ||
3867 | movl %edx,248(%edi) | ||
3868 | |||
3869 | # qhasm: *(uint32 *) (out + 252) = in15 | ||
3870 | # asm 1: movl <in15=int32#4,252(<out=int32#6) | ||
3871 | # asm 2: movl <in15=%ebx,252(<out=%edi) | ||
3872 | movl %ebx,252(%edi) | ||
3873 | |||
3874 | # qhasm: bytes = bytes_stack | ||
3875 | # asm 1: movl <bytes_stack=stack32#7,>bytes=int32#1 | ||
3876 | # asm 2: movl <bytes_stack=24(%esp),>bytes=%eax | ||
3877 | movl 24(%esp),%eax | ||
3878 | |||
3879 | # qhasm: bytes -= 256 | ||
3880 | # asm 1: sub $256,<bytes=int32#1 | ||
3881 | # asm 2: sub $256,<bytes=%eax | ||
3882 | sub $256,%eax | ||
3883 | |||
3884 | # qhasm: m += 256 | ||
3885 | # asm 1: add $256,<m=int32#5 | ||
3886 | # asm 2: add $256,<m=%esi | ||
3887 | add $256,%esi | ||
3888 | |||
3889 | # qhasm: out += 256 | ||
3890 | # asm 1: add $256,<out=int32#6 | ||
3891 | # asm 2: add $256,<out=%edi | ||
3892 | add $256,%edi | ||
3893 | |||
3894 | # qhasm: out_stack = out | ||
3895 | # asm 1: movl <out=int32#6,>out_stack=stack32#6 | ||
3896 | # asm 2: movl <out=%edi,>out_stack=20(%esp) | ||
3897 | movl %edi,20(%esp) | ||
3898 | |||
3899 | # qhasm: unsigned<? bytes - 256 | ||
3900 | # asm 1: cmp $256,<bytes=int32#1 | ||
3901 | # asm 2: cmp $256,<bytes=%eax | ||
3902 | cmp $256,%eax | ||
3903 | # comment:fp stack unchanged by jump | ||
3904 | |||
3905 | # qhasm: goto bytesatleast256 if !unsigned< | ||
3906 | jae ._bytesatleast256 | ||
3907 | |||
3908 | # qhasm: unsigned>? bytes - 0 | ||
3909 | # asm 1: cmp $0,<bytes=int32#1 | ||
3910 | # asm 2: cmp $0,<bytes=%eax | ||
3911 | cmp $0,%eax | ||
3912 | # comment:fp stack unchanged by jump | ||
3913 | |||
3914 | # qhasm: goto done if !unsigned> | ||
3915 | jbe ._done | ||
3916 | # comment:fp stack unchanged by fallthrough | ||
3917 | |||
3918 | # qhasm: bytesbetween1and255: | ||
3919 | ._bytesbetween1and255: | ||
3920 | |||
3921 | # qhasm: unsigned<? bytes - 64 | ||
3922 | # asm 1: cmp $64,<bytes=int32#1 | ||
3923 | # asm 2: cmp $64,<bytes=%eax | ||
3924 | cmp $64,%eax | ||
3925 | # comment:fp stack unchanged by jump | ||
3926 | |||
3927 | # qhasm: goto nocopy if !unsigned< | ||
3928 | jae ._nocopy | ||
3929 | |||
3930 | # qhasm: ctarget = out | ||
3931 | # asm 1: movl <out=int32#6,>ctarget=stack32#6 | ||
3932 | # asm 2: movl <out=%edi,>ctarget=20(%esp) | ||
3933 | movl %edi,20(%esp) | ||
3934 | |||
3935 | # qhasm: out = &tmp | ||
3936 | # asm 1: leal <tmp=stack512#1,>out=int32#6 | ||
3937 | # asm 2: leal <tmp=640(%esp),>out=%edi | ||
3938 | leal 640(%esp),%edi | ||
3939 | |||
3940 | # qhasm: i = bytes | ||
3941 | # asm 1: mov <bytes=int32#1,>i=int32#2 | ||
3942 | # asm 2: mov <bytes=%eax,>i=%ecx | ||
3943 | mov %eax,%ecx | ||
3944 | |||
3945 | # qhasm: while (i) { *out++ = *m++; --i } | ||
3946 | rep movsb | ||
3947 | |||
3948 | # qhasm: out = &tmp | ||
3949 | # asm 1: leal <tmp=stack512#1,>out=int32#6 | ||
3950 | # asm 2: leal <tmp=640(%esp),>out=%edi | ||
3951 | leal 640(%esp),%edi | ||
3952 | |||
3953 | # qhasm: m = &tmp | ||
3954 | # asm 1: leal <tmp=stack512#1,>m=int32#5 | ||
3955 | # asm 2: leal <tmp=640(%esp),>m=%esi | ||
3956 | leal 640(%esp),%esi | ||
3957 | # comment:fp stack unchanged by fallthrough | ||
3958 | |||
3959 | # qhasm: nocopy: | ||
3960 | ._nocopy: | ||
3961 | |||
3962 | # qhasm: bytes_stack = bytes | ||
3963 | # asm 1: movl <bytes=int32#1,>bytes_stack=stack32#7 | ||
3964 | # asm 2: movl <bytes=%eax,>bytes_stack=24(%esp) | ||
3965 | movl %eax,24(%esp) | ||
3966 | |||
3967 | # qhasm: diag0 = x0 | ||
3968 | # asm 1: movdqa <x0=stack128#3,>diag0=int6464#1 | ||
3969 | # asm 2: movdqa <x0=64(%esp),>diag0=%xmm0 | ||
3970 | movdqa 64(%esp),%xmm0 | ||
3971 | |||
3972 | # qhasm: diag1 = x1 | ||
3973 | # asm 1: movdqa <x1=stack128#2,>diag1=int6464#2 | ||
3974 | # asm 2: movdqa <x1=48(%esp),>diag1=%xmm1 | ||
3975 | movdqa 48(%esp),%xmm1 | ||
3976 | |||
3977 | # qhasm: diag2 = x2 | ||
3978 | # asm 1: movdqa <x2=stack128#4,>diag2=int6464#3 | ||
3979 | # asm 2: movdqa <x2=80(%esp),>diag2=%xmm2 | ||
3980 | movdqa 80(%esp),%xmm2 | ||
3981 | |||
3982 | # qhasm: diag3 = x3 | ||
3983 | # asm 1: movdqa <x3=stack128#1,>diag3=int6464#4 | ||
3984 | # asm 2: movdqa <x3=32(%esp),>diag3=%xmm3 | ||
3985 | movdqa 32(%esp),%xmm3 | ||
3986 | |||
3987 | # qhasm: a0 = diag1 | ||
3988 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
3989 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
3990 | movdqa %xmm1,%xmm4 | ||
3991 | |||
3992 | # qhasm: i = 20 | ||
3993 | # asm 1: mov $20,>i=int32#1 | ||
3994 | # asm 2: mov $20,>i=%eax | ||
3995 | mov $20,%eax | ||
3996 | |||
3997 | # qhasm: mainloop2: | ||
3998 | ._mainloop2: | ||
3999 | |||
4000 | # qhasm: uint32323232 a0 += diag0 | ||
4001 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
4002 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
4003 | paddd %xmm0,%xmm4 | ||
4004 | |||
4005 | # qhasm: a1 = diag0 | ||
4006 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
4007 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
4008 | movdqa %xmm0,%xmm5 | ||
4009 | |||
4010 | # qhasm: b0 = a0 | ||
4011 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
4012 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
4013 | movdqa %xmm4,%xmm6 | ||
4014 | |||
4015 | # qhasm: uint32323232 a0 <<= 7 | ||
4016 | # asm 1: pslld $7,<a0=int6464#5 | ||
4017 | # asm 2: pslld $7,<a0=%xmm4 | ||
4018 | pslld $7,%xmm4 | ||
4019 | |||
4020 | # qhasm: uint32323232 b0 >>= 25 | ||
4021 | # asm 1: psrld $25,<b0=int6464#7 | ||
4022 | # asm 2: psrld $25,<b0=%xmm6 | ||
4023 | psrld $25,%xmm6 | ||
4024 | |||
4025 | # qhasm: diag3 ^= a0 | ||
4026 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
4027 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
4028 | pxor %xmm4,%xmm3 | ||
4029 | |||
4030 | # qhasm: diag3 ^= b0 | ||
4031 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
4032 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
4033 | pxor %xmm6,%xmm3 | ||
4034 | |||
4035 | # qhasm: uint32323232 a1 += diag3 | ||
4036 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
4037 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
4038 | paddd %xmm3,%xmm5 | ||
4039 | |||
4040 | # qhasm: a2 = diag3 | ||
4041 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
4042 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
4043 | movdqa %xmm3,%xmm4 | ||
4044 | |||
4045 | # qhasm: b1 = a1 | ||
4046 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
4047 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
4048 | movdqa %xmm5,%xmm6 | ||
4049 | |||
4050 | # qhasm: uint32323232 a1 <<= 9 | ||
4051 | # asm 1: pslld $9,<a1=int6464#6 | ||
4052 | # asm 2: pslld $9,<a1=%xmm5 | ||
4053 | pslld $9,%xmm5 | ||
4054 | |||
4055 | # qhasm: uint32323232 b1 >>= 23 | ||
4056 | # asm 1: psrld $23,<b1=int6464#7 | ||
4057 | # asm 2: psrld $23,<b1=%xmm6 | ||
4058 | psrld $23,%xmm6 | ||
4059 | |||
4060 | # qhasm: diag2 ^= a1 | ||
4061 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
4062 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
4063 | pxor %xmm5,%xmm2 | ||
4064 | |||
4065 | # qhasm: diag3 <<<= 32 | ||
4066 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
4067 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
4068 | pshufd $0x93,%xmm3,%xmm3 | ||
4069 | |||
4070 | # qhasm: diag2 ^= b1 | ||
4071 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
4072 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
4073 | pxor %xmm6,%xmm2 | ||
4074 | |||
4075 | # qhasm: uint32323232 a2 += diag2 | ||
4076 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
4077 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
4078 | paddd %xmm2,%xmm4 | ||
4079 | |||
4080 | # qhasm: a3 = diag2 | ||
4081 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
4082 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
4083 | movdqa %xmm2,%xmm5 | ||
4084 | |||
4085 | # qhasm: b2 = a2 | ||
4086 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
4087 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
4088 | movdqa %xmm4,%xmm6 | ||
4089 | |||
4090 | # qhasm: uint32323232 a2 <<= 13 | ||
4091 | # asm 1: pslld $13,<a2=int6464#5 | ||
4092 | # asm 2: pslld $13,<a2=%xmm4 | ||
4093 | pslld $13,%xmm4 | ||
4094 | |||
4095 | # qhasm: uint32323232 b2 >>= 19 | ||
4096 | # asm 1: psrld $19,<b2=int6464#7 | ||
4097 | # asm 2: psrld $19,<b2=%xmm6 | ||
4098 | psrld $19,%xmm6 | ||
4099 | |||
4100 | # qhasm: diag1 ^= a2 | ||
4101 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
4102 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
4103 | pxor %xmm4,%xmm1 | ||
4104 | |||
4105 | # qhasm: diag2 <<<= 64 | ||
4106 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4107 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4108 | pshufd $0x4e,%xmm2,%xmm2 | ||
4109 | |||
4110 | # qhasm: diag1 ^= b2 | ||
4111 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
4112 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
4113 | pxor %xmm6,%xmm1 | ||
4114 | |||
4115 | # qhasm: uint32323232 a3 += diag1 | ||
4116 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
4117 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
4118 | paddd %xmm1,%xmm5 | ||
4119 | |||
4120 | # qhasm: a4 = diag3 | ||
4121 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
4122 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
4123 | movdqa %xmm3,%xmm4 | ||
4124 | |||
4125 | # qhasm: b3 = a3 | ||
4126 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
4127 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
4128 | movdqa %xmm5,%xmm6 | ||
4129 | |||
4130 | # qhasm: uint32323232 a3 <<= 18 | ||
4131 | # asm 1: pslld $18,<a3=int6464#6 | ||
4132 | # asm 2: pslld $18,<a3=%xmm5 | ||
4133 | pslld $18,%xmm5 | ||
4134 | |||
4135 | # qhasm: uint32323232 b3 >>= 14 | ||
4136 | # asm 1: psrld $14,<b3=int6464#7 | ||
4137 | # asm 2: psrld $14,<b3=%xmm6 | ||
4138 | psrld $14,%xmm6 | ||
4139 | |||
4140 | # qhasm: diag0 ^= a3 | ||
4141 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
4142 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
4143 | pxor %xmm5,%xmm0 | ||
4144 | |||
4145 | # qhasm: diag1 <<<= 96 | ||
4146 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4147 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4148 | pshufd $0x39,%xmm1,%xmm1 | ||
4149 | |||
4150 | # qhasm: diag0 ^= b3 | ||
4151 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
4152 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
4153 | pxor %xmm6,%xmm0 | ||
4154 | |||
4155 | # qhasm: uint32323232 a4 += diag0 | ||
4156 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
4157 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
4158 | paddd %xmm0,%xmm4 | ||
4159 | |||
4160 | # qhasm: a5 = diag0 | ||
4161 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
4162 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
4163 | movdqa %xmm0,%xmm5 | ||
4164 | |||
4165 | # qhasm: b4 = a4 | ||
4166 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
4167 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
4168 | movdqa %xmm4,%xmm6 | ||
4169 | |||
4170 | # qhasm: uint32323232 a4 <<= 7 | ||
4171 | # asm 1: pslld $7,<a4=int6464#5 | ||
4172 | # asm 2: pslld $7,<a4=%xmm4 | ||
4173 | pslld $7,%xmm4 | ||
4174 | |||
4175 | # qhasm: uint32323232 b4 >>= 25 | ||
4176 | # asm 1: psrld $25,<b4=int6464#7 | ||
4177 | # asm 2: psrld $25,<b4=%xmm6 | ||
4178 | psrld $25,%xmm6 | ||
4179 | |||
4180 | # qhasm: diag1 ^= a4 | ||
4181 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
4182 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
4183 | pxor %xmm4,%xmm1 | ||
4184 | |||
4185 | # qhasm: diag1 ^= b4 | ||
4186 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
4187 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
4188 | pxor %xmm6,%xmm1 | ||
4189 | |||
4190 | # qhasm: uint32323232 a5 += diag1 | ||
4191 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
4192 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
4193 | paddd %xmm1,%xmm5 | ||
4194 | |||
4195 | # qhasm: a6 = diag1 | ||
4196 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
4197 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
4198 | movdqa %xmm1,%xmm4 | ||
4199 | |||
4200 | # qhasm: b5 = a5 | ||
4201 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
4202 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
4203 | movdqa %xmm5,%xmm6 | ||
4204 | |||
4205 | # qhasm: uint32323232 a5 <<= 9 | ||
4206 | # asm 1: pslld $9,<a5=int6464#6 | ||
4207 | # asm 2: pslld $9,<a5=%xmm5 | ||
4208 | pslld $9,%xmm5 | ||
4209 | |||
4210 | # qhasm: uint32323232 b5 >>= 23 | ||
4211 | # asm 1: psrld $23,<b5=int6464#7 | ||
4212 | # asm 2: psrld $23,<b5=%xmm6 | ||
4213 | psrld $23,%xmm6 | ||
4214 | |||
4215 | # qhasm: diag2 ^= a5 | ||
4216 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
4217 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
4218 | pxor %xmm5,%xmm2 | ||
4219 | |||
4220 | # qhasm: diag1 <<<= 32 | ||
4221 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
4222 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
4223 | pshufd $0x93,%xmm1,%xmm1 | ||
4224 | |||
4225 | # qhasm: diag2 ^= b5 | ||
4226 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
4227 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
4228 | pxor %xmm6,%xmm2 | ||
4229 | |||
4230 | # qhasm: uint32323232 a6 += diag2 | ||
4231 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
4232 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
4233 | paddd %xmm2,%xmm4 | ||
4234 | |||
4235 | # qhasm: a7 = diag2 | ||
4236 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
4237 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
4238 | movdqa %xmm2,%xmm5 | ||
4239 | |||
4240 | # qhasm: b6 = a6 | ||
4241 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
4242 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
4243 | movdqa %xmm4,%xmm6 | ||
4244 | |||
4245 | # qhasm: uint32323232 a6 <<= 13 | ||
4246 | # asm 1: pslld $13,<a6=int6464#5 | ||
4247 | # asm 2: pslld $13,<a6=%xmm4 | ||
4248 | pslld $13,%xmm4 | ||
4249 | |||
4250 | # qhasm: uint32323232 b6 >>= 19 | ||
4251 | # asm 1: psrld $19,<b6=int6464#7 | ||
4252 | # asm 2: psrld $19,<b6=%xmm6 | ||
4253 | psrld $19,%xmm6 | ||
4254 | |||
4255 | # qhasm: diag3 ^= a6 | ||
4256 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
4257 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
4258 | pxor %xmm4,%xmm3 | ||
4259 | |||
4260 | # qhasm: diag2 <<<= 64 | ||
4261 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4262 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4263 | pshufd $0x4e,%xmm2,%xmm2 | ||
4264 | |||
4265 | # qhasm: diag3 ^= b6 | ||
4266 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
4267 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
4268 | pxor %xmm6,%xmm3 | ||
4269 | |||
4270 | # qhasm: uint32323232 a7 += diag3 | ||
4271 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
4272 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
4273 | paddd %xmm3,%xmm5 | ||
4274 | |||
4275 | # qhasm: a0 = diag1 | ||
4276 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
4277 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
4278 | movdqa %xmm1,%xmm4 | ||
4279 | |||
4280 | # qhasm: b7 = a7 | ||
4281 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4282 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4283 | movdqa %xmm5,%xmm6 | ||
4284 | |||
4285 | # qhasm: uint32323232 a7 <<= 18 | ||
4286 | # asm 1: pslld $18,<a7=int6464#6 | ||
4287 | # asm 2: pslld $18,<a7=%xmm5 | ||
4288 | pslld $18,%xmm5 | ||
4289 | |||
4290 | # qhasm: uint32323232 b7 >>= 14 | ||
4291 | # asm 1: psrld $14,<b7=int6464#7 | ||
4292 | # asm 2: psrld $14,<b7=%xmm6 | ||
4293 | psrld $14,%xmm6 | ||
4294 | |||
4295 | # qhasm: diag0 ^= a7 | ||
4296 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4297 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4298 | pxor %xmm5,%xmm0 | ||
4299 | |||
4300 | # qhasm: diag3 <<<= 96 | ||
4301 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4302 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4303 | pshufd $0x39,%xmm3,%xmm3 | ||
4304 | |||
4305 | # qhasm: diag0 ^= b7 | ||
4306 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4307 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4308 | pxor %xmm6,%xmm0 | ||
4309 | |||
4310 | # qhasm: uint32323232 a0 += diag0 | ||
4311 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
4312 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
4313 | paddd %xmm0,%xmm4 | ||
4314 | |||
4315 | # qhasm: a1 = diag0 | ||
4316 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
4317 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
4318 | movdqa %xmm0,%xmm5 | ||
4319 | |||
4320 | # qhasm: b0 = a0 | ||
4321 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
4322 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
4323 | movdqa %xmm4,%xmm6 | ||
4324 | |||
4325 | # qhasm: uint32323232 a0 <<= 7 | ||
4326 | # asm 1: pslld $7,<a0=int6464#5 | ||
4327 | # asm 2: pslld $7,<a0=%xmm4 | ||
4328 | pslld $7,%xmm4 | ||
4329 | |||
4330 | # qhasm: uint32323232 b0 >>= 25 | ||
4331 | # asm 1: psrld $25,<b0=int6464#7 | ||
4332 | # asm 2: psrld $25,<b0=%xmm6 | ||
4333 | psrld $25,%xmm6 | ||
4334 | |||
4335 | # qhasm: diag3 ^= a0 | ||
4336 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
4337 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
4338 | pxor %xmm4,%xmm3 | ||
4339 | |||
4340 | # qhasm: diag3 ^= b0 | ||
4341 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
4342 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
4343 | pxor %xmm6,%xmm3 | ||
4344 | |||
4345 | # qhasm: uint32323232 a1 += diag3 | ||
4346 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
4347 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
4348 | paddd %xmm3,%xmm5 | ||
4349 | |||
4350 | # qhasm: a2 = diag3 | ||
4351 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
4352 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
4353 | movdqa %xmm3,%xmm4 | ||
4354 | |||
4355 | # qhasm: b1 = a1 | ||
4356 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
4357 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
4358 | movdqa %xmm5,%xmm6 | ||
4359 | |||
4360 | # qhasm: uint32323232 a1 <<= 9 | ||
4361 | # asm 1: pslld $9,<a1=int6464#6 | ||
4362 | # asm 2: pslld $9,<a1=%xmm5 | ||
4363 | pslld $9,%xmm5 | ||
4364 | |||
4365 | # qhasm: uint32323232 b1 >>= 23 | ||
4366 | # asm 1: psrld $23,<b1=int6464#7 | ||
4367 | # asm 2: psrld $23,<b1=%xmm6 | ||
4368 | psrld $23,%xmm6 | ||
4369 | |||
4370 | # qhasm: diag2 ^= a1 | ||
4371 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
4372 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
4373 | pxor %xmm5,%xmm2 | ||
4374 | |||
4375 | # qhasm: diag3 <<<= 32 | ||
4376 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
4377 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
4378 | pshufd $0x93,%xmm3,%xmm3 | ||
4379 | |||
4380 | # qhasm: diag2 ^= b1 | ||
4381 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
4382 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
4383 | pxor %xmm6,%xmm2 | ||
4384 | |||
4385 | # qhasm: uint32323232 a2 += diag2 | ||
4386 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
4387 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
4388 | paddd %xmm2,%xmm4 | ||
4389 | |||
4390 | # qhasm: a3 = diag2 | ||
4391 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
4392 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
4393 | movdqa %xmm2,%xmm5 | ||
4394 | |||
4395 | # qhasm: b2 = a2 | ||
4396 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
4397 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
4398 | movdqa %xmm4,%xmm6 | ||
4399 | |||
4400 | # qhasm: uint32323232 a2 <<= 13 | ||
4401 | # asm 1: pslld $13,<a2=int6464#5 | ||
4402 | # asm 2: pslld $13,<a2=%xmm4 | ||
4403 | pslld $13,%xmm4 | ||
4404 | |||
4405 | # qhasm: uint32323232 b2 >>= 19 | ||
4406 | # asm 1: psrld $19,<b2=int6464#7 | ||
4407 | # asm 2: psrld $19,<b2=%xmm6 | ||
4408 | psrld $19,%xmm6 | ||
4409 | |||
4410 | # qhasm: diag1 ^= a2 | ||
4411 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
4412 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
4413 | pxor %xmm4,%xmm1 | ||
4414 | |||
4415 | # qhasm: diag2 <<<= 64 | ||
4416 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4417 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4418 | pshufd $0x4e,%xmm2,%xmm2 | ||
4419 | |||
4420 | # qhasm: diag1 ^= b2 | ||
4421 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
4422 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
4423 | pxor %xmm6,%xmm1 | ||
4424 | |||
4425 | # qhasm: uint32323232 a3 += diag1 | ||
4426 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
4427 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
4428 | paddd %xmm1,%xmm5 | ||
4429 | |||
4430 | # qhasm: a4 = diag3 | ||
4431 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
4432 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
4433 | movdqa %xmm3,%xmm4 | ||
4434 | |||
4435 | # qhasm: b3 = a3 | ||
4436 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
4437 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
4438 | movdqa %xmm5,%xmm6 | ||
4439 | |||
4440 | # qhasm: uint32323232 a3 <<= 18 | ||
4441 | # asm 1: pslld $18,<a3=int6464#6 | ||
4442 | # asm 2: pslld $18,<a3=%xmm5 | ||
4443 | pslld $18,%xmm5 | ||
4444 | |||
4445 | # qhasm: uint32323232 b3 >>= 14 | ||
4446 | # asm 1: psrld $14,<b3=int6464#7 | ||
4447 | # asm 2: psrld $14,<b3=%xmm6 | ||
4448 | psrld $14,%xmm6 | ||
4449 | |||
4450 | # qhasm: diag0 ^= a3 | ||
4451 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
4452 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
4453 | pxor %xmm5,%xmm0 | ||
4454 | |||
4455 | # qhasm: diag1 <<<= 96 | ||
4456 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4457 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4458 | pshufd $0x39,%xmm1,%xmm1 | ||
4459 | |||
4460 | # qhasm: diag0 ^= b3 | ||
4461 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
4462 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
4463 | pxor %xmm6,%xmm0 | ||
4464 | |||
4465 | # qhasm: uint32323232 a4 += diag0 | ||
4466 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
4467 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
4468 | paddd %xmm0,%xmm4 | ||
4469 | |||
4470 | # qhasm: a5 = diag0 | ||
4471 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
4472 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
4473 | movdqa %xmm0,%xmm5 | ||
4474 | |||
4475 | # qhasm: b4 = a4 | ||
4476 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
4477 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
4478 | movdqa %xmm4,%xmm6 | ||
4479 | |||
4480 | # qhasm: uint32323232 a4 <<= 7 | ||
4481 | # asm 1: pslld $7,<a4=int6464#5 | ||
4482 | # asm 2: pslld $7,<a4=%xmm4 | ||
4483 | pslld $7,%xmm4 | ||
4484 | |||
4485 | # qhasm: uint32323232 b4 >>= 25 | ||
4486 | # asm 1: psrld $25,<b4=int6464#7 | ||
4487 | # asm 2: psrld $25,<b4=%xmm6 | ||
4488 | psrld $25,%xmm6 | ||
4489 | |||
4490 | # qhasm: diag1 ^= a4 | ||
4491 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
4492 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
4493 | pxor %xmm4,%xmm1 | ||
4494 | |||
4495 | # qhasm: diag1 ^= b4 | ||
4496 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
4497 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
4498 | pxor %xmm6,%xmm1 | ||
4499 | |||
4500 | # qhasm: uint32323232 a5 += diag1 | ||
4501 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
4502 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
4503 | paddd %xmm1,%xmm5 | ||
4504 | |||
4505 | # qhasm: a6 = diag1 | ||
4506 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
4507 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
4508 | movdqa %xmm1,%xmm4 | ||
4509 | |||
4510 | # qhasm: b5 = a5 | ||
4511 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
4512 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
4513 | movdqa %xmm5,%xmm6 | ||
4514 | |||
4515 | # qhasm: uint32323232 a5 <<= 9 | ||
4516 | # asm 1: pslld $9,<a5=int6464#6 | ||
4517 | # asm 2: pslld $9,<a5=%xmm5 | ||
4518 | pslld $9,%xmm5 | ||
4519 | |||
4520 | # qhasm: uint32323232 b5 >>= 23 | ||
4521 | # asm 1: psrld $23,<b5=int6464#7 | ||
4522 | # asm 2: psrld $23,<b5=%xmm6 | ||
4523 | psrld $23,%xmm6 | ||
4524 | |||
4525 | # qhasm: diag2 ^= a5 | ||
4526 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
4527 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
4528 | pxor %xmm5,%xmm2 | ||
4529 | |||
4530 | # qhasm: diag1 <<<= 32 | ||
4531 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
4532 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
4533 | pshufd $0x93,%xmm1,%xmm1 | ||
4534 | |||
4535 | # qhasm: diag2 ^= b5 | ||
4536 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
4537 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
4538 | pxor %xmm6,%xmm2 | ||
4539 | |||
4540 | # qhasm: uint32323232 a6 += diag2 | ||
4541 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
4542 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
4543 | paddd %xmm2,%xmm4 | ||
4544 | |||
4545 | # qhasm: a7 = diag2 | ||
4546 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
4547 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
4548 | movdqa %xmm2,%xmm5 | ||
4549 | |||
4550 | # qhasm: b6 = a6 | ||
4551 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
4552 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
4553 | movdqa %xmm4,%xmm6 | ||
4554 | |||
4555 | # qhasm: uint32323232 a6 <<= 13 | ||
4556 | # asm 1: pslld $13,<a6=int6464#5 | ||
4557 | # asm 2: pslld $13,<a6=%xmm4 | ||
4558 | pslld $13,%xmm4 | ||
4559 | |||
4560 | # qhasm: uint32323232 b6 >>= 19 | ||
4561 | # asm 1: psrld $19,<b6=int6464#7 | ||
4562 | # asm 2: psrld $19,<b6=%xmm6 | ||
4563 | psrld $19,%xmm6 | ||
4564 | |||
4565 | # qhasm: diag3 ^= a6 | ||
4566 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
4567 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
4568 | pxor %xmm4,%xmm3 | ||
4569 | |||
4570 | # qhasm: diag2 <<<= 64 | ||
4571 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4572 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4573 | pshufd $0x4e,%xmm2,%xmm2 | ||
4574 | |||
4575 | # qhasm: diag3 ^= b6 | ||
4576 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
4577 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
4578 | pxor %xmm6,%xmm3 | ||
4579 | |||
4580 | # qhasm: unsigned>? i -= 4 | ||
4581 | # asm 1: sub $4,<i=int32#1 | ||
4582 | # asm 2: sub $4,<i=%eax | ||
4583 | sub $4,%eax | ||
4584 | |||
4585 | # qhasm: uint32323232 a7 += diag3 | ||
4586 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
4587 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
4588 | paddd %xmm3,%xmm5 | ||
4589 | |||
4590 | # qhasm: a0 = diag1 | ||
4591 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
4592 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
4593 | movdqa %xmm1,%xmm4 | ||
4594 | |||
4595 | # qhasm: b7 = a7 | ||
4596 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4597 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4598 | movdqa %xmm5,%xmm6 | ||
4599 | |||
4600 | # qhasm: uint32323232 a7 <<= 18 | ||
4601 | # asm 1: pslld $18,<a7=int6464#6 | ||
4602 | # asm 2: pslld $18,<a7=%xmm5 | ||
4603 | pslld $18,%xmm5 | ||
4604 | |||
4605 | # qhasm: b0 = 0 | ||
4606 | # asm 1: pxor >b0=int6464#8,>b0=int6464#8 | ||
4607 | # asm 2: pxor >b0=%xmm7,>b0=%xmm7 | ||
4608 | pxor %xmm7,%xmm7 | ||
4609 | |||
4610 | # qhasm: uint32323232 b7 >>= 14 | ||
4611 | # asm 1: psrld $14,<b7=int6464#7 | ||
4612 | # asm 2: psrld $14,<b7=%xmm6 | ||
4613 | psrld $14,%xmm6 | ||
4614 | |||
4615 | # qhasm: diag0 ^= a7 | ||
4616 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4617 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4618 | pxor %xmm5,%xmm0 | ||
4619 | |||
4620 | # qhasm: diag3 <<<= 96 | ||
4621 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4622 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4623 | pshufd $0x39,%xmm3,%xmm3 | ||
4624 | |||
4625 | # qhasm: diag0 ^= b7 | ||
4626 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4627 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4628 | pxor %xmm6,%xmm0 | ||
4629 | # comment:fp stack unchanged by jump | ||
4630 | |||
4631 | # qhasm: goto mainloop2 if unsigned> | ||
4632 | ja ._mainloop2 | ||
4633 | |||
4634 | # qhasm: uint32323232 diag0 += x0 | ||
4635 | # asm 1: paddd <x0=stack128#3,<diag0=int6464#1 | ||
4636 | # asm 2: paddd <x0=64(%esp),<diag0=%xmm0 | ||
4637 | paddd 64(%esp),%xmm0 | ||
4638 | |||
4639 | # qhasm: uint32323232 diag1 += x1 | ||
4640 | # asm 1: paddd <x1=stack128#2,<diag1=int6464#2 | ||
4641 | # asm 2: paddd <x1=48(%esp),<diag1=%xmm1 | ||
4642 | paddd 48(%esp),%xmm1 | ||
4643 | |||
4644 | # qhasm: uint32323232 diag2 += x2 | ||
4645 | # asm 1: paddd <x2=stack128#4,<diag2=int6464#3 | ||
4646 | # asm 2: paddd <x2=80(%esp),<diag2=%xmm2 | ||
4647 | paddd 80(%esp),%xmm2 | ||
4648 | |||
4649 | # qhasm: uint32323232 diag3 += x3 | ||
4650 | # asm 1: paddd <x3=stack128#1,<diag3=int6464#4 | ||
4651 | # asm 2: paddd <x3=32(%esp),<diag3=%xmm3 | ||
4652 | paddd 32(%esp),%xmm3 | ||
4653 | |||
4654 | # qhasm: in0 = diag0 | ||
4655 | # asm 1: movd <diag0=int6464#1,>in0=int32#1 | ||
4656 | # asm 2: movd <diag0=%xmm0,>in0=%eax | ||
4657 | movd %xmm0,%eax | ||
4658 | |||
4659 | # qhasm: in12 = diag1 | ||
4660 | # asm 1: movd <diag1=int6464#2,>in12=int32#2 | ||
4661 | # asm 2: movd <diag1=%xmm1,>in12=%ecx | ||
4662 | movd %xmm1,%ecx | ||
4663 | |||
4664 | # qhasm: in8 = diag2 | ||
4665 | # asm 1: movd <diag2=int6464#3,>in8=int32#3 | ||
4666 | # asm 2: movd <diag2=%xmm2,>in8=%edx | ||
4667 | movd %xmm2,%edx | ||
4668 | |||
4669 | # qhasm: in4 = diag3 | ||
4670 | # asm 1: movd <diag3=int6464#4,>in4=int32#4 | ||
4671 | # asm 2: movd <diag3=%xmm3,>in4=%ebx | ||
4672 | movd %xmm3,%ebx | ||
4673 | |||
4674 | # qhasm: diag0 <<<= 96 | ||
4675 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4676 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4677 | pshufd $0x39,%xmm0,%xmm0 | ||
4678 | |||
4679 | # qhasm: diag1 <<<= 96 | ||
4680 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4681 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4682 | pshufd $0x39,%xmm1,%xmm1 | ||
4683 | |||
4684 | # qhasm: diag2 <<<= 96 | ||
4685 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4686 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4687 | pshufd $0x39,%xmm2,%xmm2 | ||
4688 | |||
4689 | # qhasm: diag3 <<<= 96 | ||
4690 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4691 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4692 | pshufd $0x39,%xmm3,%xmm3 | ||
4693 | |||
4694 | # qhasm: in0 ^= *(uint32 *) (m + 0) | ||
4695 | # asm 1: xorl 0(<m=int32#5),<in0=int32#1 | ||
4696 | # asm 2: xorl 0(<m=%esi),<in0=%eax | ||
4697 | xorl 0(%esi),%eax | ||
4698 | |||
4699 | # qhasm: in12 ^= *(uint32 *) (m + 48) | ||
4700 | # asm 1: xorl 48(<m=int32#5),<in12=int32#2 | ||
4701 | # asm 2: xorl 48(<m=%esi),<in12=%ecx | ||
4702 | xorl 48(%esi),%ecx | ||
4703 | |||
4704 | # qhasm: in8 ^= *(uint32 *) (m + 32) | ||
4705 | # asm 1: xorl 32(<m=int32#5),<in8=int32#3 | ||
4706 | # asm 2: xorl 32(<m=%esi),<in8=%edx | ||
4707 | xorl 32(%esi),%edx | ||
4708 | |||
4709 | # qhasm: in4 ^= *(uint32 *) (m + 16) | ||
4710 | # asm 1: xorl 16(<m=int32#5),<in4=int32#4 | ||
4711 | # asm 2: xorl 16(<m=%esi),<in4=%ebx | ||
4712 | xorl 16(%esi),%ebx | ||
4713 | |||
4714 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
4715 | # asm 1: movl <in0=int32#1,0(<out=int32#6) | ||
4716 | # asm 2: movl <in0=%eax,0(<out=%edi) | ||
4717 | movl %eax,0(%edi) | ||
4718 | |||
4719 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
4720 | # asm 1: movl <in12=int32#2,48(<out=int32#6) | ||
4721 | # asm 2: movl <in12=%ecx,48(<out=%edi) | ||
4722 | movl %ecx,48(%edi) | ||
4723 | |||
4724 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
4725 | # asm 1: movl <in8=int32#3,32(<out=int32#6) | ||
4726 | # asm 2: movl <in8=%edx,32(<out=%edi) | ||
4727 | movl %edx,32(%edi) | ||
4728 | |||
4729 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
4730 | # asm 1: movl <in4=int32#4,16(<out=int32#6) | ||
4731 | # asm 2: movl <in4=%ebx,16(<out=%edi) | ||
4732 | movl %ebx,16(%edi) | ||
4733 | |||
4734 | # qhasm: in5 = diag0 | ||
4735 | # asm 1: movd <diag0=int6464#1,>in5=int32#1 | ||
4736 | # asm 2: movd <diag0=%xmm0,>in5=%eax | ||
4737 | movd %xmm0,%eax | ||
4738 | |||
4739 | # qhasm: in1 = diag1 | ||
4740 | # asm 1: movd <diag1=int6464#2,>in1=int32#2 | ||
4741 | # asm 2: movd <diag1=%xmm1,>in1=%ecx | ||
4742 | movd %xmm1,%ecx | ||
4743 | |||
4744 | # qhasm: in13 = diag2 | ||
4745 | # asm 1: movd <diag2=int6464#3,>in13=int32#3 | ||
4746 | # asm 2: movd <diag2=%xmm2,>in13=%edx | ||
4747 | movd %xmm2,%edx | ||
4748 | |||
4749 | # qhasm: in9 = diag3 | ||
4750 | # asm 1: movd <diag3=int6464#4,>in9=int32#4 | ||
4751 | # asm 2: movd <diag3=%xmm3,>in9=%ebx | ||
4752 | movd %xmm3,%ebx | ||
4753 | |||
4754 | # qhasm: diag0 <<<= 96 | ||
4755 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4756 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4757 | pshufd $0x39,%xmm0,%xmm0 | ||
4758 | |||
4759 | # qhasm: diag1 <<<= 96 | ||
4760 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4761 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4762 | pshufd $0x39,%xmm1,%xmm1 | ||
4763 | |||
4764 | # qhasm: diag2 <<<= 96 | ||
4765 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4766 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4767 | pshufd $0x39,%xmm2,%xmm2 | ||
4768 | |||
4769 | # qhasm: diag3 <<<= 96 | ||
4770 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4771 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4772 | pshufd $0x39,%xmm3,%xmm3 | ||
4773 | |||
4774 | # qhasm: in5 ^= *(uint32 *) (m + 20) | ||
4775 | # asm 1: xorl 20(<m=int32#5),<in5=int32#1 | ||
4776 | # asm 2: xorl 20(<m=%esi),<in5=%eax | ||
4777 | xorl 20(%esi),%eax | ||
4778 | |||
4779 | # qhasm: in1 ^= *(uint32 *) (m + 4) | ||
4780 | # asm 1: xorl 4(<m=int32#5),<in1=int32#2 | ||
4781 | # asm 2: xorl 4(<m=%esi),<in1=%ecx | ||
4782 | xorl 4(%esi),%ecx | ||
4783 | |||
4784 | # qhasm: in13 ^= *(uint32 *) (m + 52) | ||
4785 | # asm 1: xorl 52(<m=int32#5),<in13=int32#3 | ||
4786 | # asm 2: xorl 52(<m=%esi),<in13=%edx | ||
4787 | xorl 52(%esi),%edx | ||
4788 | |||
4789 | # qhasm: in9 ^= *(uint32 *) (m + 36) | ||
4790 | # asm 1: xorl 36(<m=int32#5),<in9=int32#4 | ||
4791 | # asm 2: xorl 36(<m=%esi),<in9=%ebx | ||
4792 | xorl 36(%esi),%ebx | ||
4793 | |||
4794 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
4795 | # asm 1: movl <in5=int32#1,20(<out=int32#6) | ||
4796 | # asm 2: movl <in5=%eax,20(<out=%edi) | ||
4797 | movl %eax,20(%edi) | ||
4798 | |||
4799 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
4800 | # asm 1: movl <in1=int32#2,4(<out=int32#6) | ||
4801 | # asm 2: movl <in1=%ecx,4(<out=%edi) | ||
4802 | movl %ecx,4(%edi) | ||
4803 | |||
4804 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
4805 | # asm 1: movl <in13=int32#3,52(<out=int32#6) | ||
4806 | # asm 2: movl <in13=%edx,52(<out=%edi) | ||
4807 | movl %edx,52(%edi) | ||
4808 | |||
4809 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
4810 | # asm 1: movl <in9=int32#4,36(<out=int32#6) | ||
4811 | # asm 2: movl <in9=%ebx,36(<out=%edi) | ||
4812 | movl %ebx,36(%edi) | ||
4813 | |||
4814 | # qhasm: in10 = diag0 | ||
4815 | # asm 1: movd <diag0=int6464#1,>in10=int32#1 | ||
4816 | # asm 2: movd <diag0=%xmm0,>in10=%eax | ||
4817 | movd %xmm0,%eax | ||
4818 | |||
4819 | # qhasm: in6 = diag1 | ||
4820 | # asm 1: movd <diag1=int6464#2,>in6=int32#2 | ||
4821 | # asm 2: movd <diag1=%xmm1,>in6=%ecx | ||
4822 | movd %xmm1,%ecx | ||
4823 | |||
4824 | # qhasm: in2 = diag2 | ||
4825 | # asm 1: movd <diag2=int6464#3,>in2=int32#3 | ||
4826 | # asm 2: movd <diag2=%xmm2,>in2=%edx | ||
4827 | movd %xmm2,%edx | ||
4828 | |||
4829 | # qhasm: in14 = diag3 | ||
4830 | # asm 1: movd <diag3=int6464#4,>in14=int32#4 | ||
4831 | # asm 2: movd <diag3=%xmm3,>in14=%ebx | ||
4832 | movd %xmm3,%ebx | ||
4833 | |||
4834 | # qhasm: diag0 <<<= 96 | ||
4835 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4836 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4837 | pshufd $0x39,%xmm0,%xmm0 | ||
4838 | |||
4839 | # qhasm: diag1 <<<= 96 | ||
4840 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4841 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4842 | pshufd $0x39,%xmm1,%xmm1 | ||
4843 | |||
4844 | # qhasm: diag2 <<<= 96 | ||
4845 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4846 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4847 | pshufd $0x39,%xmm2,%xmm2 | ||
4848 | |||
4849 | # qhasm: diag3 <<<= 96 | ||
4850 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4851 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4852 | pshufd $0x39,%xmm3,%xmm3 | ||
4853 | |||
4854 | # qhasm: in10 ^= *(uint32 *) (m + 40) | ||
4855 | # asm 1: xorl 40(<m=int32#5),<in10=int32#1 | ||
4856 | # asm 2: xorl 40(<m=%esi),<in10=%eax | ||
4857 | xorl 40(%esi),%eax | ||
4858 | |||
4859 | # qhasm: in6 ^= *(uint32 *) (m + 24) | ||
4860 | # asm 1: xorl 24(<m=int32#5),<in6=int32#2 | ||
4861 | # asm 2: xorl 24(<m=%esi),<in6=%ecx | ||
4862 | xorl 24(%esi),%ecx | ||
4863 | |||
4864 | # qhasm: in2 ^= *(uint32 *) (m + 8) | ||
4865 | # asm 1: xorl 8(<m=int32#5),<in2=int32#3 | ||
4866 | # asm 2: xorl 8(<m=%esi),<in2=%edx | ||
4867 | xorl 8(%esi),%edx | ||
4868 | |||
4869 | # qhasm: in14 ^= *(uint32 *) (m + 56) | ||
4870 | # asm 1: xorl 56(<m=int32#5),<in14=int32#4 | ||
4871 | # asm 2: xorl 56(<m=%esi),<in14=%ebx | ||
4872 | xorl 56(%esi),%ebx | ||
4873 | |||
4874 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
4875 | # asm 1: movl <in10=int32#1,40(<out=int32#6) | ||
4876 | # asm 2: movl <in10=%eax,40(<out=%edi) | ||
4877 | movl %eax,40(%edi) | ||
4878 | |||
4879 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
4880 | # asm 1: movl <in6=int32#2,24(<out=int32#6) | ||
4881 | # asm 2: movl <in6=%ecx,24(<out=%edi) | ||
4882 | movl %ecx,24(%edi) | ||
4883 | |||
4884 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
4885 | # asm 1: movl <in2=int32#3,8(<out=int32#6) | ||
4886 | # asm 2: movl <in2=%edx,8(<out=%edi) | ||
4887 | movl %edx,8(%edi) | ||
4888 | |||
4889 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
4890 | # asm 1: movl <in14=int32#4,56(<out=int32#6) | ||
4891 | # asm 2: movl <in14=%ebx,56(<out=%edi) | ||
4892 | movl %ebx,56(%edi) | ||
4893 | |||
4894 | # qhasm: in15 = diag0 | ||
4895 | # asm 1: movd <diag0=int6464#1,>in15=int32#1 | ||
4896 | # asm 2: movd <diag0=%xmm0,>in15=%eax | ||
4897 | movd %xmm0,%eax | ||
4898 | |||
4899 | # qhasm: in11 = diag1 | ||
4900 | # asm 1: movd <diag1=int6464#2,>in11=int32#2 | ||
4901 | # asm 2: movd <diag1=%xmm1,>in11=%ecx | ||
4902 | movd %xmm1,%ecx | ||
4903 | |||
4904 | # qhasm: in7 = diag2 | ||
4905 | # asm 1: movd <diag2=int6464#3,>in7=int32#3 | ||
4906 | # asm 2: movd <diag2=%xmm2,>in7=%edx | ||
4907 | movd %xmm2,%edx | ||
4908 | |||
4909 | # qhasm: in3 = diag3 | ||
4910 | # asm 1: movd <diag3=int6464#4,>in3=int32#4 | ||
4911 | # asm 2: movd <diag3=%xmm3,>in3=%ebx | ||
4912 | movd %xmm3,%ebx | ||
4913 | |||
4914 | # qhasm: in15 ^= *(uint32 *) (m + 60) | ||
4915 | # asm 1: xorl 60(<m=int32#5),<in15=int32#1 | ||
4916 | # asm 2: xorl 60(<m=%esi),<in15=%eax | ||
4917 | xorl 60(%esi),%eax | ||
4918 | |||
4919 | # qhasm: in11 ^= *(uint32 *) (m + 44) | ||
4920 | # asm 1: xorl 44(<m=int32#5),<in11=int32#2 | ||
4921 | # asm 2: xorl 44(<m=%esi),<in11=%ecx | ||
4922 | xorl 44(%esi),%ecx | ||
4923 | |||
4924 | # qhasm: in7 ^= *(uint32 *) (m + 28) | ||
4925 | # asm 1: xorl 28(<m=int32#5),<in7=int32#3 | ||
4926 | # asm 2: xorl 28(<m=%esi),<in7=%edx | ||
4927 | xorl 28(%esi),%edx | ||
4928 | |||
4929 | # qhasm: in3 ^= *(uint32 *) (m + 12) | ||
4930 | # asm 1: xorl 12(<m=int32#5),<in3=int32#4 | ||
4931 | # asm 2: xorl 12(<m=%esi),<in3=%ebx | ||
4932 | xorl 12(%esi),%ebx | ||
4933 | |||
4934 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
4935 | # asm 1: movl <in15=int32#1,60(<out=int32#6) | ||
4936 | # asm 2: movl <in15=%eax,60(<out=%edi) | ||
4937 | movl %eax,60(%edi) | ||
4938 | |||
4939 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
4940 | # asm 1: movl <in11=int32#2,44(<out=int32#6) | ||
4941 | # asm 2: movl <in11=%ecx,44(<out=%edi) | ||
4942 | movl %ecx,44(%edi) | ||
4943 | |||
4944 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
4945 | # asm 1: movl <in7=int32#3,28(<out=int32#6) | ||
4946 | # asm 2: movl <in7=%edx,28(<out=%edi) | ||
4947 | movl %edx,28(%edi) | ||
4948 | |||
4949 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
4950 | # asm 1: movl <in3=int32#4,12(<out=int32#6) | ||
4951 | # asm 2: movl <in3=%ebx,12(<out=%edi) | ||
4952 | movl %ebx,12(%edi) | ||
4953 | |||
4954 | # qhasm: bytes = bytes_stack | ||
4955 | # asm 1: movl <bytes_stack=stack32#7,>bytes=int32#1 | ||
4956 | # asm 2: movl <bytes_stack=24(%esp),>bytes=%eax | ||
4957 | movl 24(%esp),%eax | ||
4958 | |||
4959 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
4960 | # asm 1: movl <x2=stack128#4,>in8=int32#2 | ||
4961 | # asm 2: movl <x2=80(%esp),>in8=%ecx | ||
4962 | movl 80(%esp),%ecx | ||
4963 | |||
4964 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
4965 | # asm 1: movl 4+<x3=stack128#1,>in9=int32#3 | ||
4966 | # asm 2: movl 4+<x3=32(%esp),>in9=%edx | ||
4967 | movl 4+32(%esp),%edx | ||
4968 | |||
4969 | # qhasm: carry? in8 += 1 | ||
4970 | # asm 1: add $1,<in8=int32#2 | ||
4971 | # asm 2: add $1,<in8=%ecx | ||
4972 | add $1,%ecx | ||
4973 | |||
4974 | # qhasm: in9 += 0 + carry | ||
4975 | # asm 1: adc $0,<in9=int32#3 | ||
4976 | # asm 2: adc $0,<in9=%edx | ||
4977 | adc $0,%edx | ||
4978 | |||
4979 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
4980 | # asm 1: movl <in8=int32#2,>x2=stack128#4 | ||
4981 | # asm 2: movl <in8=%ecx,>x2=80(%esp) | ||
4982 | movl %ecx,80(%esp) | ||
4983 | |||
4984 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
4985 | # asm 1: movl <in9=int32#3,4+<x3=stack128#1 | ||
4986 | # asm 2: movl <in9=%edx,4+<x3=32(%esp) | ||
4987 | movl %edx,4+32(%esp) | ||
4988 | |||
4989 | # qhasm: unsigned>? unsigned<? bytes - 64 | ||
4990 | # asm 1: cmp $64,<bytes=int32#1 | ||
4991 | # asm 2: cmp $64,<bytes=%eax | ||
4992 | cmp $64,%eax | ||
4993 | # comment:fp stack unchanged by jump | ||
4994 | |||
4995 | # qhasm: goto bytesatleast65 if unsigned> | ||
4996 | ja ._bytesatleast65 | ||
4997 | # comment:fp stack unchanged by jump | ||
4998 | |||
4999 | # qhasm: goto bytesatleast64 if !unsigned< | ||
5000 | jae ._bytesatleast64 | ||
5001 | |||
5002 | # qhasm: m = out | ||
5003 | # asm 1: mov <out=int32#6,>m=int32#5 | ||
5004 | # asm 2: mov <out=%edi,>m=%esi | ||
5005 | mov %edi,%esi | ||
5006 | |||
5007 | # qhasm: out = ctarget | ||
5008 | # asm 1: movl <ctarget=stack32#6,>out=int32#6 | ||
5009 | # asm 2: movl <ctarget=20(%esp),>out=%edi | ||
5010 | movl 20(%esp),%edi | ||
5011 | |||
5012 | # qhasm: i = bytes | ||
5013 | # asm 1: mov <bytes=int32#1,>i=int32#2 | ||
5014 | # asm 2: mov <bytes=%eax,>i=%ecx | ||
5015 | mov %eax,%ecx | ||
5016 | |||
5017 | # qhasm: while (i) { *out++ = *m++; --i } | ||
5018 | rep movsb | ||
5019 | # comment:fp stack unchanged by fallthrough | ||
5020 | |||
5021 | # qhasm: bytesatleast64: | ||
5022 | ._bytesatleast64: | ||
5023 | # comment:fp stack unchanged by fallthrough | ||
5024 | |||
5025 | # qhasm: done: | ||
5026 | ._done: | ||
5027 | |||
5028 | # qhasm: eax = eax_stack | ||
5029 | # asm 1: movl <eax_stack=stack32#1,>eax=int32#1 | ||
5030 | # asm 2: movl <eax_stack=0(%esp),>eax=%eax | ||
5031 | movl 0(%esp),%eax | ||
5032 | |||
5033 | # qhasm: ebx = ebx_stack | ||
5034 | # asm 1: movl <ebx_stack=stack32#2,>ebx=int32#4 | ||
5035 | # asm 2: movl <ebx_stack=4(%esp),>ebx=%ebx | ||
5036 | movl 4(%esp),%ebx | ||
5037 | |||
5038 | # qhasm: esi = esi_stack | ||
5039 | # asm 1: movl <esi_stack=stack32#3,>esi=int32#5 | ||
5040 | # asm 2: movl <esi_stack=8(%esp),>esi=%esi | ||
5041 | movl 8(%esp),%esi | ||
5042 | |||
5043 | # qhasm: edi = edi_stack | ||
5044 | # asm 1: movl <edi_stack=stack32#4,>edi=int32#6 | ||
5045 | # asm 2: movl <edi_stack=12(%esp),>edi=%edi | ||
5046 | movl 12(%esp),%edi | ||
5047 | |||
5048 | # qhasm: ebp = ebp_stack | ||
5049 | # asm 1: movl <ebp_stack=stack32#5,>ebp=int32#7 | ||
5050 | # asm 2: movl <ebp_stack=16(%esp),>ebp=%ebp | ||
5051 | movl 16(%esp),%ebp | ||
5052 | |||
5053 | # qhasm: leave | ||
5054 | add %eax,%esp | ||
5055 | xor %eax,%eax | ||
5056 | ret | ||
5057 | |||
5058 | # qhasm: bytesatleast65: | ||
5059 | ._bytesatleast65: | ||
5060 | |||
5061 | # qhasm: bytes -= 64 | ||
5062 | # asm 1: sub $64,<bytes=int32#1 | ||
5063 | # asm 2: sub $64,<bytes=%eax | ||
5064 | sub $64,%eax | ||
5065 | |||
5066 | # qhasm: out += 64 | ||
5067 | # asm 1: add $64,<out=int32#6 | ||
5068 | # asm 2: add $64,<out=%edi | ||
5069 | add $64,%edi | ||
5070 | |||
5071 | # qhasm: m += 64 | ||
5072 | # asm 1: add $64,<m=int32#5 | ||
5073 | # asm 2: add $64,<m=%esi | ||
5074 | add $64,%esi | ||
5075 | # comment:fp stack unchanged by jump | ||
5076 | |||
5077 | # qhasm: goto bytesbetween1and255 | ||
5078 | jmp ._bytesbetween1and255 | ||
diff --git a/nacl/crypto_stream/salsa2012/amd64_xmm6/api.h b/nacl/crypto_stream/salsa2012/amd64_xmm6/api.h new file mode 100644 index 00000000..c2b18461 --- /dev/null +++ b/nacl/crypto_stream/salsa2012/amd64_xmm6/api.h | |||
@@ -0,0 +1,2 @@ | |||
1 | #define CRYPTO_KEYBYTES 32 | ||
2 | #define CRYPTO_NONCEBYTES 8 | ||
diff --git a/nacl/crypto_stream/salsa2012/amd64_xmm6/implementors b/nacl/crypto_stream/salsa2012/amd64_xmm6/implementors new file mode 100644 index 00000000..f6fb3c73 --- /dev/null +++ b/nacl/crypto_stream/salsa2012/amd64_xmm6/implementors | |||
@@ -0,0 +1 @@ | |||
Daniel J. Bernstein | |||
diff --git a/nacl/crypto_stream/salsa2012/amd64_xmm6/stream.s b/nacl/crypto_stream/salsa2012/amd64_xmm6/stream.s new file mode 100644 index 00000000..0e26dc9f --- /dev/null +++ b/nacl/crypto_stream/salsa2012/amd64_xmm6/stream.s | |||
@@ -0,0 +1,4823 @@ | |||
1 | |||
2 | # qhasm: int64 r11_caller | ||
3 | |||
4 | # qhasm: int64 r12_caller | ||
5 | |||
6 | # qhasm: int64 r13_caller | ||
7 | |||
8 | # qhasm: int64 r14_caller | ||
9 | |||
10 | # qhasm: int64 r15_caller | ||
11 | |||
12 | # qhasm: int64 rbx_caller | ||
13 | |||
14 | # qhasm: int64 rbp_caller | ||
15 | |||
16 | # qhasm: caller r11_caller | ||
17 | |||
18 | # qhasm: caller r12_caller | ||
19 | |||
20 | # qhasm: caller r13_caller | ||
21 | |||
22 | # qhasm: caller r14_caller | ||
23 | |||
24 | # qhasm: caller r15_caller | ||
25 | |||
26 | # qhasm: caller rbx_caller | ||
27 | |||
28 | # qhasm: caller rbp_caller | ||
29 | |||
30 | # qhasm: stack64 r11_stack | ||
31 | |||
32 | # qhasm: stack64 r12_stack | ||
33 | |||
34 | # qhasm: stack64 r13_stack | ||
35 | |||
36 | # qhasm: stack64 r14_stack | ||
37 | |||
38 | # qhasm: stack64 r15_stack | ||
39 | |||
40 | # qhasm: stack64 rbx_stack | ||
41 | |||
42 | # qhasm: stack64 rbp_stack | ||
43 | |||
44 | # qhasm: int64 a | ||
45 | |||
46 | # qhasm: int64 arg1 | ||
47 | |||
48 | # qhasm: int64 arg2 | ||
49 | |||
50 | # qhasm: int64 arg3 | ||
51 | |||
52 | # qhasm: int64 arg4 | ||
53 | |||
54 | # qhasm: int64 arg5 | ||
55 | |||
56 | # qhasm: input arg1 | ||
57 | |||
58 | # qhasm: input arg2 | ||
59 | |||
60 | # qhasm: input arg3 | ||
61 | |||
62 | # qhasm: input arg4 | ||
63 | |||
64 | # qhasm: input arg5 | ||
65 | |||
66 | # qhasm: int64 k | ||
67 | |||
68 | # qhasm: int64 kbits | ||
69 | |||
70 | # qhasm: int64 iv | ||
71 | |||
72 | # qhasm: int64 i | ||
73 | |||
74 | # qhasm: stack128 x0 | ||
75 | |||
76 | # qhasm: stack128 x1 | ||
77 | |||
78 | # qhasm: stack128 x2 | ||
79 | |||
80 | # qhasm: stack128 x3 | ||
81 | |||
82 | # qhasm: int64 m | ||
83 | |||
84 | # qhasm: int64 out | ||
85 | |||
86 | # qhasm: int64 bytes | ||
87 | |||
88 | # qhasm: stack32 eax_stack | ||
89 | |||
90 | # qhasm: stack32 ebx_stack | ||
91 | |||
92 | # qhasm: stack32 esi_stack | ||
93 | |||
94 | # qhasm: stack32 edi_stack | ||
95 | |||
96 | # qhasm: stack32 ebp_stack | ||
97 | |||
98 | # qhasm: int6464 diag0 | ||
99 | |||
100 | # qhasm: int6464 diag1 | ||
101 | |||
102 | # qhasm: int6464 diag2 | ||
103 | |||
104 | # qhasm: int6464 diag3 | ||
105 | |||
106 | # qhasm: int6464 a0 | ||
107 | |||
108 | # qhasm: int6464 a1 | ||
109 | |||
110 | # qhasm: int6464 a2 | ||
111 | |||
112 | # qhasm: int6464 a3 | ||
113 | |||
114 | # qhasm: int6464 a4 | ||
115 | |||
116 | # qhasm: int6464 a5 | ||
117 | |||
118 | # qhasm: int6464 a6 | ||
119 | |||
120 | # qhasm: int6464 a7 | ||
121 | |||
122 | # qhasm: int6464 b0 | ||
123 | |||
124 | # qhasm: int6464 b1 | ||
125 | |||
126 | # qhasm: int6464 b2 | ||
127 | |||
128 | # qhasm: int6464 b3 | ||
129 | |||
130 | # qhasm: int6464 b4 | ||
131 | |||
132 | # qhasm: int6464 b5 | ||
133 | |||
134 | # qhasm: int6464 b6 | ||
135 | |||
136 | # qhasm: int6464 b7 | ||
137 | |||
138 | # qhasm: int6464 z0 | ||
139 | |||
140 | # qhasm: int6464 z1 | ||
141 | |||
142 | # qhasm: int6464 z2 | ||
143 | |||
144 | # qhasm: int6464 z3 | ||
145 | |||
146 | # qhasm: int6464 z4 | ||
147 | |||
148 | # qhasm: int6464 z5 | ||
149 | |||
150 | # qhasm: int6464 z6 | ||
151 | |||
152 | # qhasm: int6464 z7 | ||
153 | |||
154 | # qhasm: int6464 z8 | ||
155 | |||
156 | # qhasm: int6464 z9 | ||
157 | |||
158 | # qhasm: int6464 z10 | ||
159 | |||
160 | # qhasm: int6464 z11 | ||
161 | |||
162 | # qhasm: int6464 z12 | ||
163 | |||
164 | # qhasm: int6464 z13 | ||
165 | |||
166 | # qhasm: int6464 z14 | ||
167 | |||
168 | # qhasm: int6464 z15 | ||
169 | |||
170 | # qhasm: stack128 z0_stack | ||
171 | |||
172 | # qhasm: stack128 z1_stack | ||
173 | |||
174 | # qhasm: stack128 z2_stack | ||
175 | |||
176 | # qhasm: stack128 z3_stack | ||
177 | |||
178 | # qhasm: stack128 z4_stack | ||
179 | |||
180 | # qhasm: stack128 z5_stack | ||
181 | |||
182 | # qhasm: stack128 z6_stack | ||
183 | |||
184 | # qhasm: stack128 z7_stack | ||
185 | |||
186 | # qhasm: stack128 z8_stack | ||
187 | |||
188 | # qhasm: stack128 z9_stack | ||
189 | |||
190 | # qhasm: stack128 z10_stack | ||
191 | |||
192 | # qhasm: stack128 z11_stack | ||
193 | |||
194 | # qhasm: stack128 z12_stack | ||
195 | |||
196 | # qhasm: stack128 z13_stack | ||
197 | |||
198 | # qhasm: stack128 z14_stack | ||
199 | |||
200 | # qhasm: stack128 z15_stack | ||
201 | |||
202 | # qhasm: int6464 y0 | ||
203 | |||
204 | # qhasm: int6464 y1 | ||
205 | |||
206 | # qhasm: int6464 y2 | ||
207 | |||
208 | # qhasm: int6464 y3 | ||
209 | |||
210 | # qhasm: int6464 y4 | ||
211 | |||
212 | # qhasm: int6464 y5 | ||
213 | |||
214 | # qhasm: int6464 y6 | ||
215 | |||
216 | # qhasm: int6464 y7 | ||
217 | |||
218 | # qhasm: int6464 y8 | ||
219 | |||
220 | # qhasm: int6464 y9 | ||
221 | |||
222 | # qhasm: int6464 y10 | ||
223 | |||
224 | # qhasm: int6464 y11 | ||
225 | |||
226 | # qhasm: int6464 y12 | ||
227 | |||
228 | # qhasm: int6464 y13 | ||
229 | |||
230 | # qhasm: int6464 y14 | ||
231 | |||
232 | # qhasm: int6464 y15 | ||
233 | |||
234 | # qhasm: int6464 r0 | ||
235 | |||
236 | # qhasm: int6464 r1 | ||
237 | |||
238 | # qhasm: int6464 r2 | ||
239 | |||
240 | # qhasm: int6464 r3 | ||
241 | |||
242 | # qhasm: int6464 r4 | ||
243 | |||
244 | # qhasm: int6464 r5 | ||
245 | |||
246 | # qhasm: int6464 r6 | ||
247 | |||
248 | # qhasm: int6464 r7 | ||
249 | |||
250 | # qhasm: int6464 r8 | ||
251 | |||
252 | # qhasm: int6464 r9 | ||
253 | |||
254 | # qhasm: int6464 r10 | ||
255 | |||
256 | # qhasm: int6464 r11 | ||
257 | |||
258 | # qhasm: int6464 r12 | ||
259 | |||
260 | # qhasm: int6464 r13 | ||
261 | |||
262 | # qhasm: int6464 r14 | ||
263 | |||
264 | # qhasm: int6464 r15 | ||
265 | |||
266 | # qhasm: stack128 orig0 | ||
267 | |||
268 | # qhasm: stack128 orig1 | ||
269 | |||
270 | # qhasm: stack128 orig2 | ||
271 | |||
272 | # qhasm: stack128 orig3 | ||
273 | |||
274 | # qhasm: stack128 orig4 | ||
275 | |||
276 | # qhasm: stack128 orig5 | ||
277 | |||
278 | # qhasm: stack128 orig6 | ||
279 | |||
280 | # qhasm: stack128 orig7 | ||
281 | |||
282 | # qhasm: stack128 orig8 | ||
283 | |||
284 | # qhasm: stack128 orig9 | ||
285 | |||
286 | # qhasm: stack128 orig10 | ||
287 | |||
288 | # qhasm: stack128 orig11 | ||
289 | |||
290 | # qhasm: stack128 orig12 | ||
291 | |||
292 | # qhasm: stack128 orig13 | ||
293 | |||
294 | # qhasm: stack128 orig14 | ||
295 | |||
296 | # qhasm: stack128 orig15 | ||
297 | |||
298 | # qhasm: int64 in0 | ||
299 | |||
300 | # qhasm: int64 in1 | ||
301 | |||
302 | # qhasm: int64 in2 | ||
303 | |||
304 | # qhasm: int64 in3 | ||
305 | |||
306 | # qhasm: int64 in4 | ||
307 | |||
308 | # qhasm: int64 in5 | ||
309 | |||
310 | # qhasm: int64 in6 | ||
311 | |||
312 | # qhasm: int64 in7 | ||
313 | |||
314 | # qhasm: int64 in8 | ||
315 | |||
316 | # qhasm: int64 in9 | ||
317 | |||
318 | # qhasm: int64 in10 | ||
319 | |||
320 | # qhasm: int64 in11 | ||
321 | |||
322 | # qhasm: int64 in12 | ||
323 | |||
324 | # qhasm: int64 in13 | ||
325 | |||
326 | # qhasm: int64 in14 | ||
327 | |||
328 | # qhasm: int64 in15 | ||
329 | |||
330 | # qhasm: stack512 tmp | ||
331 | |||
332 | # qhasm: int64 ctarget | ||
333 | |||
334 | # qhasm: stack64 bytes_backup | ||
335 | |||
336 | # qhasm: enter crypto_stream_salsa2012_amd64_xmm6 | ||
337 | .text | ||
338 | .p2align 5 | ||
339 | .globl _crypto_stream_salsa2012_amd64_xmm6 | ||
340 | .globl crypto_stream_salsa2012_amd64_xmm6 | ||
341 | _crypto_stream_salsa2012_amd64_xmm6: | ||
342 | crypto_stream_salsa2012_amd64_xmm6: | ||
343 | mov %rsp,%r11 | ||
344 | and $31,%r11 | ||
345 | add $480,%r11 | ||
346 | sub %r11,%rsp | ||
347 | |||
348 | # qhasm: r11_stack = r11_caller | ||
349 | # asm 1: movq <r11_caller=int64#9,>r11_stack=stack64#1 | ||
350 | # asm 2: movq <r11_caller=%r11,>r11_stack=352(%rsp) | ||
351 | movq %r11,352(%rsp) | ||
352 | |||
353 | # qhasm: r12_stack = r12_caller | ||
354 | # asm 1: movq <r12_caller=int64#10,>r12_stack=stack64#2 | ||
355 | # asm 2: movq <r12_caller=%r12,>r12_stack=360(%rsp) | ||
356 | movq %r12,360(%rsp) | ||
357 | |||
358 | # qhasm: r13_stack = r13_caller | ||
359 | # asm 1: movq <r13_caller=int64#11,>r13_stack=stack64#3 | ||
360 | # asm 2: movq <r13_caller=%r13,>r13_stack=368(%rsp) | ||
361 | movq %r13,368(%rsp) | ||
362 | |||
363 | # qhasm: r14_stack = r14_caller | ||
364 | # asm 1: movq <r14_caller=int64#12,>r14_stack=stack64#4 | ||
365 | # asm 2: movq <r14_caller=%r14,>r14_stack=376(%rsp) | ||
366 | movq %r14,376(%rsp) | ||
367 | |||
368 | # qhasm: r15_stack = r15_caller | ||
369 | # asm 1: movq <r15_caller=int64#13,>r15_stack=stack64#5 | ||
370 | # asm 2: movq <r15_caller=%r15,>r15_stack=384(%rsp) | ||
371 | movq %r15,384(%rsp) | ||
372 | |||
373 | # qhasm: rbx_stack = rbx_caller | ||
374 | # asm 1: movq <rbx_caller=int64#14,>rbx_stack=stack64#6 | ||
375 | # asm 2: movq <rbx_caller=%rbx,>rbx_stack=392(%rsp) | ||
376 | movq %rbx,392(%rsp) | ||
377 | |||
378 | # qhasm: rbp_stack = rbp_caller | ||
379 | # asm 1: movq <rbp_caller=int64#15,>rbp_stack=stack64#7 | ||
380 | # asm 2: movq <rbp_caller=%rbp,>rbp_stack=400(%rsp) | ||
381 | movq %rbp,400(%rsp) | ||
382 | |||
383 | # qhasm: bytes = arg2 | ||
384 | # asm 1: mov <arg2=int64#2,>bytes=int64#6 | ||
385 | # asm 2: mov <arg2=%rsi,>bytes=%r9 | ||
386 | mov %rsi,%r9 | ||
387 | |||
388 | # qhasm: out = arg1 | ||
389 | # asm 1: mov <arg1=int64#1,>out=int64#1 | ||
390 | # asm 2: mov <arg1=%rdi,>out=%rdi | ||
391 | mov %rdi,%rdi | ||
392 | |||
393 | # qhasm: m = out | ||
394 | # asm 1: mov <out=int64#1,>m=int64#2 | ||
395 | # asm 2: mov <out=%rdi,>m=%rsi | ||
396 | mov %rdi,%rsi | ||
397 | |||
398 | # qhasm: iv = arg3 | ||
399 | # asm 1: mov <arg3=int64#3,>iv=int64#3 | ||
400 | # asm 2: mov <arg3=%rdx,>iv=%rdx | ||
401 | mov %rdx,%rdx | ||
402 | |||
403 | # qhasm: k = arg4 | ||
404 | # asm 1: mov <arg4=int64#4,>k=int64#8 | ||
405 | # asm 2: mov <arg4=%rcx,>k=%r10 | ||
406 | mov %rcx,%r10 | ||
407 | |||
408 | # qhasm: unsigned>? bytes - 0 | ||
409 | # asm 1: cmp $0,<bytes=int64#6 | ||
410 | # asm 2: cmp $0,<bytes=%r9 | ||
411 | cmp $0,%r9 | ||
412 | # comment:fp stack unchanged by jump | ||
413 | |||
414 | # qhasm: goto done if !unsigned> | ||
415 | jbe ._done | ||
416 | |||
417 | # qhasm: a = 0 | ||
418 | # asm 1: mov $0,>a=int64#7 | ||
419 | # asm 2: mov $0,>a=%rax | ||
420 | mov $0,%rax | ||
421 | |||
422 | # qhasm: i = bytes | ||
423 | # asm 1: mov <bytes=int64#6,>i=int64#4 | ||
424 | # asm 2: mov <bytes=%r9,>i=%rcx | ||
425 | mov %r9,%rcx | ||
426 | |||
427 | # qhasm: while (i) { *out++ = a; --i } | ||
428 | rep stosb | ||
429 | |||
430 | # qhasm: out -= bytes | ||
431 | # asm 1: sub <bytes=int64#6,<out=int64#1 | ||
432 | # asm 2: sub <bytes=%r9,<out=%rdi | ||
433 | sub %r9,%rdi | ||
434 | # comment:fp stack unchanged by jump | ||
435 | |||
436 | # qhasm: goto start | ||
437 | jmp ._start | ||
438 | |||
439 | # qhasm: enter crypto_stream_salsa2012_amd64_xmm6_xor | ||
440 | .text | ||
441 | .p2align 5 | ||
442 | .globl _crypto_stream_salsa2012_amd64_xmm6_xor | ||
443 | .globl crypto_stream_salsa2012_amd64_xmm6_xor | ||
444 | _crypto_stream_salsa2012_amd64_xmm6_xor: | ||
445 | crypto_stream_salsa2012_amd64_xmm6_xor: | ||
446 | mov %rsp,%r11 | ||
447 | and $31,%r11 | ||
448 | add $480,%r11 | ||
449 | sub %r11,%rsp | ||
450 | |||
451 | # qhasm: r11_stack = r11_caller | ||
452 | # asm 1: movq <r11_caller=int64#9,>r11_stack=stack64#1 | ||
453 | # asm 2: movq <r11_caller=%r11,>r11_stack=352(%rsp) | ||
454 | movq %r11,352(%rsp) | ||
455 | |||
456 | # qhasm: r12_stack = r12_caller | ||
457 | # asm 1: movq <r12_caller=int64#10,>r12_stack=stack64#2 | ||
458 | # asm 2: movq <r12_caller=%r12,>r12_stack=360(%rsp) | ||
459 | movq %r12,360(%rsp) | ||
460 | |||
461 | # qhasm: r13_stack = r13_caller | ||
462 | # asm 1: movq <r13_caller=int64#11,>r13_stack=stack64#3 | ||
463 | # asm 2: movq <r13_caller=%r13,>r13_stack=368(%rsp) | ||
464 | movq %r13,368(%rsp) | ||
465 | |||
466 | # qhasm: r14_stack = r14_caller | ||
467 | # asm 1: movq <r14_caller=int64#12,>r14_stack=stack64#4 | ||
468 | # asm 2: movq <r14_caller=%r14,>r14_stack=376(%rsp) | ||
469 | movq %r14,376(%rsp) | ||
470 | |||
471 | # qhasm: r15_stack = r15_caller | ||
472 | # asm 1: movq <r15_caller=int64#13,>r15_stack=stack64#5 | ||
473 | # asm 2: movq <r15_caller=%r15,>r15_stack=384(%rsp) | ||
474 | movq %r15,384(%rsp) | ||
475 | |||
476 | # qhasm: rbx_stack = rbx_caller | ||
477 | # asm 1: movq <rbx_caller=int64#14,>rbx_stack=stack64#6 | ||
478 | # asm 2: movq <rbx_caller=%rbx,>rbx_stack=392(%rsp) | ||
479 | movq %rbx,392(%rsp) | ||
480 | |||
481 | # qhasm: rbp_stack = rbp_caller | ||
482 | # asm 1: movq <rbp_caller=int64#15,>rbp_stack=stack64#7 | ||
483 | # asm 2: movq <rbp_caller=%rbp,>rbp_stack=400(%rsp) | ||
484 | movq %rbp,400(%rsp) | ||
485 | |||
486 | # qhasm: out = arg1 | ||
487 | # asm 1: mov <arg1=int64#1,>out=int64#1 | ||
488 | # asm 2: mov <arg1=%rdi,>out=%rdi | ||
489 | mov %rdi,%rdi | ||
490 | |||
491 | # qhasm: m = arg2 | ||
492 | # asm 1: mov <arg2=int64#2,>m=int64#2 | ||
493 | # asm 2: mov <arg2=%rsi,>m=%rsi | ||
494 | mov %rsi,%rsi | ||
495 | |||
496 | # qhasm: bytes = arg3 | ||
497 | # asm 1: mov <arg3=int64#3,>bytes=int64#6 | ||
498 | # asm 2: mov <arg3=%rdx,>bytes=%r9 | ||
499 | mov %rdx,%r9 | ||
500 | |||
501 | # qhasm: iv = arg4 | ||
502 | # asm 1: mov <arg4=int64#4,>iv=int64#3 | ||
503 | # asm 2: mov <arg4=%rcx,>iv=%rdx | ||
504 | mov %rcx,%rdx | ||
505 | |||
506 | # qhasm: k = arg5 | ||
507 | # asm 1: mov <arg5=int64#5,>k=int64#8 | ||
508 | # asm 2: mov <arg5=%r8,>k=%r10 | ||
509 | mov %r8,%r10 | ||
510 | |||
511 | # qhasm: unsigned>? bytes - 0 | ||
512 | # asm 1: cmp $0,<bytes=int64#6 | ||
513 | # asm 2: cmp $0,<bytes=%r9 | ||
514 | cmp $0,%r9 | ||
515 | # comment:fp stack unchanged by jump | ||
516 | |||
517 | # qhasm: goto done if !unsigned> | ||
518 | jbe ._done | ||
519 | # comment:fp stack unchanged by fallthrough | ||
520 | |||
521 | # qhasm: start: | ||
522 | ._start: | ||
523 | |||
524 | # qhasm: in12 = *(uint32 *) (k + 20) | ||
525 | # asm 1: movl 20(<k=int64#8),>in12=int64#4d | ||
526 | # asm 2: movl 20(<k=%r10),>in12=%ecx | ||
527 | movl 20(%r10),%ecx | ||
528 | |||
529 | # qhasm: in1 = *(uint32 *) (k + 0) | ||
530 | # asm 1: movl 0(<k=int64#8),>in1=int64#5d | ||
531 | # asm 2: movl 0(<k=%r10),>in1=%r8d | ||
532 | movl 0(%r10),%r8d | ||
533 | |||
534 | # qhasm: in6 = *(uint32 *) (iv + 0) | ||
535 | # asm 1: movl 0(<iv=int64#3),>in6=int64#7d | ||
536 | # asm 2: movl 0(<iv=%rdx),>in6=%eax | ||
537 | movl 0(%rdx),%eax | ||
538 | |||
539 | # qhasm: in11 = *(uint32 *) (k + 16) | ||
540 | # asm 1: movl 16(<k=int64#8),>in11=int64#9d | ||
541 | # asm 2: movl 16(<k=%r10),>in11=%r11d | ||
542 | movl 16(%r10),%r11d | ||
543 | |||
544 | # qhasm: ((uint32 *)&x1)[0] = in12 | ||
545 | # asm 1: movl <in12=int64#4d,>x1=stack128#1 | ||
546 | # asm 2: movl <in12=%ecx,>x1=0(%rsp) | ||
547 | movl %ecx,0(%rsp) | ||
548 | |||
549 | # qhasm: ((uint32 *)&x1)[1] = in1 | ||
550 | # asm 1: movl <in1=int64#5d,4+<x1=stack128#1 | ||
551 | # asm 2: movl <in1=%r8d,4+<x1=0(%rsp) | ||
552 | movl %r8d,4+0(%rsp) | ||
553 | |||
554 | # qhasm: ((uint32 *)&x1)[2] = in6 | ||
555 | # asm 1: movl <in6=int64#7d,8+<x1=stack128#1 | ||
556 | # asm 2: movl <in6=%eax,8+<x1=0(%rsp) | ||
557 | movl %eax,8+0(%rsp) | ||
558 | |||
559 | # qhasm: ((uint32 *)&x1)[3] = in11 | ||
560 | # asm 1: movl <in11=int64#9d,12+<x1=stack128#1 | ||
561 | # asm 2: movl <in11=%r11d,12+<x1=0(%rsp) | ||
562 | movl %r11d,12+0(%rsp) | ||
563 | |||
564 | # qhasm: in8 = 0 | ||
565 | # asm 1: mov $0,>in8=int64#4 | ||
566 | # asm 2: mov $0,>in8=%rcx | ||
567 | mov $0,%rcx | ||
568 | |||
569 | # qhasm: in13 = *(uint32 *) (k + 24) | ||
570 | # asm 1: movl 24(<k=int64#8),>in13=int64#5d | ||
571 | # asm 2: movl 24(<k=%r10),>in13=%r8d | ||
572 | movl 24(%r10),%r8d | ||
573 | |||
574 | # qhasm: in2 = *(uint32 *) (k + 4) | ||
575 | # asm 1: movl 4(<k=int64#8),>in2=int64#7d | ||
576 | # asm 2: movl 4(<k=%r10),>in2=%eax | ||
577 | movl 4(%r10),%eax | ||
578 | |||
579 | # qhasm: in7 = *(uint32 *) (iv + 4) | ||
580 | # asm 1: movl 4(<iv=int64#3),>in7=int64#3d | ||
581 | # asm 2: movl 4(<iv=%rdx),>in7=%edx | ||
582 | movl 4(%rdx),%edx | ||
583 | |||
584 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
585 | # asm 1: movl <in8=int64#4d,>x2=stack128#2 | ||
586 | # asm 2: movl <in8=%ecx,>x2=16(%rsp) | ||
587 | movl %ecx,16(%rsp) | ||
588 | |||
589 | # qhasm: ((uint32 *)&x2)[1] = in13 | ||
590 | # asm 1: movl <in13=int64#5d,4+<x2=stack128#2 | ||
591 | # asm 2: movl <in13=%r8d,4+<x2=16(%rsp) | ||
592 | movl %r8d,4+16(%rsp) | ||
593 | |||
594 | # qhasm: ((uint32 *)&x2)[2] = in2 | ||
595 | # asm 1: movl <in2=int64#7d,8+<x2=stack128#2 | ||
596 | # asm 2: movl <in2=%eax,8+<x2=16(%rsp) | ||
597 | movl %eax,8+16(%rsp) | ||
598 | |||
599 | # qhasm: ((uint32 *)&x2)[3] = in7 | ||
600 | # asm 1: movl <in7=int64#3d,12+<x2=stack128#2 | ||
601 | # asm 2: movl <in7=%edx,12+<x2=16(%rsp) | ||
602 | movl %edx,12+16(%rsp) | ||
603 | |||
604 | # qhasm: in4 = *(uint32 *) (k + 12) | ||
605 | # asm 1: movl 12(<k=int64#8),>in4=int64#3d | ||
606 | # asm 2: movl 12(<k=%r10),>in4=%edx | ||
607 | movl 12(%r10),%edx | ||
608 | |||
609 | # qhasm: in9 = 0 | ||
610 | # asm 1: mov $0,>in9=int64#4 | ||
611 | # asm 2: mov $0,>in9=%rcx | ||
612 | mov $0,%rcx | ||
613 | |||
614 | # qhasm: in14 = *(uint32 *) (k + 28) | ||
615 | # asm 1: movl 28(<k=int64#8),>in14=int64#5d | ||
616 | # asm 2: movl 28(<k=%r10),>in14=%r8d | ||
617 | movl 28(%r10),%r8d | ||
618 | |||
619 | # qhasm: in3 = *(uint32 *) (k + 8) | ||
620 | # asm 1: movl 8(<k=int64#8),>in3=int64#7d | ||
621 | # asm 2: movl 8(<k=%r10),>in3=%eax | ||
622 | movl 8(%r10),%eax | ||
623 | |||
624 | # qhasm: ((uint32 *)&x3)[0] = in4 | ||
625 | # asm 1: movl <in4=int64#3d,>x3=stack128#3 | ||
626 | # asm 2: movl <in4=%edx,>x3=32(%rsp) | ||
627 | movl %edx,32(%rsp) | ||
628 | |||
629 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
630 | # asm 1: movl <in9=int64#4d,4+<x3=stack128#3 | ||
631 | # asm 2: movl <in9=%ecx,4+<x3=32(%rsp) | ||
632 | movl %ecx,4+32(%rsp) | ||
633 | |||
634 | # qhasm: ((uint32 *)&x3)[2] = in14 | ||
635 | # asm 1: movl <in14=int64#5d,8+<x3=stack128#3 | ||
636 | # asm 2: movl <in14=%r8d,8+<x3=32(%rsp) | ||
637 | movl %r8d,8+32(%rsp) | ||
638 | |||
639 | # qhasm: ((uint32 *)&x3)[3] = in3 | ||
640 | # asm 1: movl <in3=int64#7d,12+<x3=stack128#3 | ||
641 | # asm 2: movl <in3=%eax,12+<x3=32(%rsp) | ||
642 | movl %eax,12+32(%rsp) | ||
643 | |||
644 | # qhasm: in0 = 1634760805 | ||
645 | # asm 1: mov $1634760805,>in0=int64#3 | ||
646 | # asm 2: mov $1634760805,>in0=%rdx | ||
647 | mov $1634760805,%rdx | ||
648 | |||
649 | # qhasm: in5 = 857760878 | ||
650 | # asm 1: mov $857760878,>in5=int64#4 | ||
651 | # asm 2: mov $857760878,>in5=%rcx | ||
652 | mov $857760878,%rcx | ||
653 | |||
654 | # qhasm: in10 = 2036477234 | ||
655 | # asm 1: mov $2036477234,>in10=int64#5 | ||
656 | # asm 2: mov $2036477234,>in10=%r8 | ||
657 | mov $2036477234,%r8 | ||
658 | |||
659 | # qhasm: in15 = 1797285236 | ||
660 | # asm 1: mov $1797285236,>in15=int64#7 | ||
661 | # asm 2: mov $1797285236,>in15=%rax | ||
662 | mov $1797285236,%rax | ||
663 | |||
664 | # qhasm: ((uint32 *)&x0)[0] = in0 | ||
665 | # asm 1: movl <in0=int64#3d,>x0=stack128#4 | ||
666 | # asm 2: movl <in0=%edx,>x0=48(%rsp) | ||
667 | movl %edx,48(%rsp) | ||
668 | |||
669 | # qhasm: ((uint32 *)&x0)[1] = in5 | ||
670 | # asm 1: movl <in5=int64#4d,4+<x0=stack128#4 | ||
671 | # asm 2: movl <in5=%ecx,4+<x0=48(%rsp) | ||
672 | movl %ecx,4+48(%rsp) | ||
673 | |||
674 | # qhasm: ((uint32 *)&x0)[2] = in10 | ||
675 | # asm 1: movl <in10=int64#5d,8+<x0=stack128#4 | ||
676 | # asm 2: movl <in10=%r8d,8+<x0=48(%rsp) | ||
677 | movl %r8d,8+48(%rsp) | ||
678 | |||
679 | # qhasm: ((uint32 *)&x0)[3] = in15 | ||
680 | # asm 1: movl <in15=int64#7d,12+<x0=stack128#4 | ||
681 | # asm 2: movl <in15=%eax,12+<x0=48(%rsp) | ||
682 | movl %eax,12+48(%rsp) | ||
683 | |||
684 | # qhasm: unsigned<? bytes - 256 | ||
685 | # asm 1: cmp $256,<bytes=int64#6 | ||
686 | # asm 2: cmp $256,<bytes=%r9 | ||
687 | cmp $256,%r9 | ||
688 | # comment:fp stack unchanged by jump | ||
689 | |||
690 | # qhasm: goto bytesbetween1and255 if unsigned< | ||
691 | jb ._bytesbetween1and255 | ||
692 | |||
693 | # qhasm: z0 = x0 | ||
694 | # asm 1: movdqa <x0=stack128#4,>z0=int6464#1 | ||
695 | # asm 2: movdqa <x0=48(%rsp),>z0=%xmm0 | ||
696 | movdqa 48(%rsp),%xmm0 | ||
697 | |||
698 | # qhasm: z5 = z0[1,1,1,1] | ||
699 | # asm 1: pshufd $0x55,<z0=int6464#1,>z5=int6464#2 | ||
700 | # asm 2: pshufd $0x55,<z0=%xmm0,>z5=%xmm1 | ||
701 | pshufd $0x55,%xmm0,%xmm1 | ||
702 | |||
703 | # qhasm: z10 = z0[2,2,2,2] | ||
704 | # asm 1: pshufd $0xaa,<z0=int6464#1,>z10=int6464#3 | ||
705 | # asm 2: pshufd $0xaa,<z0=%xmm0,>z10=%xmm2 | ||
706 | pshufd $0xaa,%xmm0,%xmm2 | ||
707 | |||
708 | # qhasm: z15 = z0[3,3,3,3] | ||
709 | # asm 1: pshufd $0xff,<z0=int6464#1,>z15=int6464#4 | ||
710 | # asm 2: pshufd $0xff,<z0=%xmm0,>z15=%xmm3 | ||
711 | pshufd $0xff,%xmm0,%xmm3 | ||
712 | |||
713 | # qhasm: z0 = z0[0,0,0,0] | ||
714 | # asm 1: pshufd $0x00,<z0=int6464#1,>z0=int6464#1 | ||
715 | # asm 2: pshufd $0x00,<z0=%xmm0,>z0=%xmm0 | ||
716 | pshufd $0x00,%xmm0,%xmm0 | ||
717 | |||
718 | # qhasm: orig5 = z5 | ||
719 | # asm 1: movdqa <z5=int6464#2,>orig5=stack128#5 | ||
720 | # asm 2: movdqa <z5=%xmm1,>orig5=64(%rsp) | ||
721 | movdqa %xmm1,64(%rsp) | ||
722 | |||
723 | # qhasm: orig10 = z10 | ||
724 | # asm 1: movdqa <z10=int6464#3,>orig10=stack128#6 | ||
725 | # asm 2: movdqa <z10=%xmm2,>orig10=80(%rsp) | ||
726 | movdqa %xmm2,80(%rsp) | ||
727 | |||
728 | # qhasm: orig15 = z15 | ||
729 | # asm 1: movdqa <z15=int6464#4,>orig15=stack128#7 | ||
730 | # asm 2: movdqa <z15=%xmm3,>orig15=96(%rsp) | ||
731 | movdqa %xmm3,96(%rsp) | ||
732 | |||
733 | # qhasm: orig0 = z0 | ||
734 | # asm 1: movdqa <z0=int6464#1,>orig0=stack128#8 | ||
735 | # asm 2: movdqa <z0=%xmm0,>orig0=112(%rsp) | ||
736 | movdqa %xmm0,112(%rsp) | ||
737 | |||
738 | # qhasm: z1 = x1 | ||
739 | # asm 1: movdqa <x1=stack128#1,>z1=int6464#1 | ||
740 | # asm 2: movdqa <x1=0(%rsp),>z1=%xmm0 | ||
741 | movdqa 0(%rsp),%xmm0 | ||
742 | |||
743 | # qhasm: z6 = z1[2,2,2,2] | ||
744 | # asm 1: pshufd $0xaa,<z1=int6464#1,>z6=int6464#2 | ||
745 | # asm 2: pshufd $0xaa,<z1=%xmm0,>z6=%xmm1 | ||
746 | pshufd $0xaa,%xmm0,%xmm1 | ||
747 | |||
748 | # qhasm: z11 = z1[3,3,3,3] | ||
749 | # asm 1: pshufd $0xff,<z1=int6464#1,>z11=int6464#3 | ||
750 | # asm 2: pshufd $0xff,<z1=%xmm0,>z11=%xmm2 | ||
751 | pshufd $0xff,%xmm0,%xmm2 | ||
752 | |||
753 | # qhasm: z12 = z1[0,0,0,0] | ||
754 | # asm 1: pshufd $0x00,<z1=int6464#1,>z12=int6464#4 | ||
755 | # asm 2: pshufd $0x00,<z1=%xmm0,>z12=%xmm3 | ||
756 | pshufd $0x00,%xmm0,%xmm3 | ||
757 | |||
758 | # qhasm: z1 = z1[1,1,1,1] | ||
759 | # asm 1: pshufd $0x55,<z1=int6464#1,>z1=int6464#1 | ||
760 | # asm 2: pshufd $0x55,<z1=%xmm0,>z1=%xmm0 | ||
761 | pshufd $0x55,%xmm0,%xmm0 | ||
762 | |||
763 | # qhasm: orig6 = z6 | ||
764 | # asm 1: movdqa <z6=int6464#2,>orig6=stack128#9 | ||
765 | # asm 2: movdqa <z6=%xmm1,>orig6=128(%rsp) | ||
766 | movdqa %xmm1,128(%rsp) | ||
767 | |||
768 | # qhasm: orig11 = z11 | ||
769 | # asm 1: movdqa <z11=int6464#3,>orig11=stack128#10 | ||
770 | # asm 2: movdqa <z11=%xmm2,>orig11=144(%rsp) | ||
771 | movdqa %xmm2,144(%rsp) | ||
772 | |||
773 | # qhasm: orig12 = z12 | ||
774 | # asm 1: movdqa <z12=int6464#4,>orig12=stack128#11 | ||
775 | # asm 2: movdqa <z12=%xmm3,>orig12=160(%rsp) | ||
776 | movdqa %xmm3,160(%rsp) | ||
777 | |||
778 | # qhasm: orig1 = z1 | ||
779 | # asm 1: movdqa <z1=int6464#1,>orig1=stack128#12 | ||
780 | # asm 2: movdqa <z1=%xmm0,>orig1=176(%rsp) | ||
781 | movdqa %xmm0,176(%rsp) | ||
782 | |||
783 | # qhasm: z2 = x2 | ||
784 | # asm 1: movdqa <x2=stack128#2,>z2=int6464#1 | ||
785 | # asm 2: movdqa <x2=16(%rsp),>z2=%xmm0 | ||
786 | movdqa 16(%rsp),%xmm0 | ||
787 | |||
788 | # qhasm: z7 = z2[3,3,3,3] | ||
789 | # asm 1: pshufd $0xff,<z2=int6464#1,>z7=int6464#2 | ||
790 | # asm 2: pshufd $0xff,<z2=%xmm0,>z7=%xmm1 | ||
791 | pshufd $0xff,%xmm0,%xmm1 | ||
792 | |||
793 | # qhasm: z13 = z2[1,1,1,1] | ||
794 | # asm 1: pshufd $0x55,<z2=int6464#1,>z13=int6464#3 | ||
795 | # asm 2: pshufd $0x55,<z2=%xmm0,>z13=%xmm2 | ||
796 | pshufd $0x55,%xmm0,%xmm2 | ||
797 | |||
798 | # qhasm: z2 = z2[2,2,2,2] | ||
799 | # asm 1: pshufd $0xaa,<z2=int6464#1,>z2=int6464#1 | ||
800 | # asm 2: pshufd $0xaa,<z2=%xmm0,>z2=%xmm0 | ||
801 | pshufd $0xaa,%xmm0,%xmm0 | ||
802 | |||
803 | # qhasm: orig7 = z7 | ||
804 | # asm 1: movdqa <z7=int6464#2,>orig7=stack128#13 | ||
805 | # asm 2: movdqa <z7=%xmm1,>orig7=192(%rsp) | ||
806 | movdqa %xmm1,192(%rsp) | ||
807 | |||
808 | # qhasm: orig13 = z13 | ||
809 | # asm 1: movdqa <z13=int6464#3,>orig13=stack128#14 | ||
810 | # asm 2: movdqa <z13=%xmm2,>orig13=208(%rsp) | ||
811 | movdqa %xmm2,208(%rsp) | ||
812 | |||
813 | # qhasm: orig2 = z2 | ||
814 | # asm 1: movdqa <z2=int6464#1,>orig2=stack128#15 | ||
815 | # asm 2: movdqa <z2=%xmm0,>orig2=224(%rsp) | ||
816 | movdqa %xmm0,224(%rsp) | ||
817 | |||
818 | # qhasm: z3 = x3 | ||
819 | # asm 1: movdqa <x3=stack128#3,>z3=int6464#1 | ||
820 | # asm 2: movdqa <x3=32(%rsp),>z3=%xmm0 | ||
821 | movdqa 32(%rsp),%xmm0 | ||
822 | |||
823 | # qhasm: z4 = z3[0,0,0,0] | ||
824 | # asm 1: pshufd $0x00,<z3=int6464#1,>z4=int6464#2 | ||
825 | # asm 2: pshufd $0x00,<z3=%xmm0,>z4=%xmm1 | ||
826 | pshufd $0x00,%xmm0,%xmm1 | ||
827 | |||
828 | # qhasm: z14 = z3[2,2,2,2] | ||
829 | # asm 1: pshufd $0xaa,<z3=int6464#1,>z14=int6464#3 | ||
830 | # asm 2: pshufd $0xaa,<z3=%xmm0,>z14=%xmm2 | ||
831 | pshufd $0xaa,%xmm0,%xmm2 | ||
832 | |||
833 | # qhasm: z3 = z3[3,3,3,3] | ||
834 | # asm 1: pshufd $0xff,<z3=int6464#1,>z3=int6464#1 | ||
835 | # asm 2: pshufd $0xff,<z3=%xmm0,>z3=%xmm0 | ||
836 | pshufd $0xff,%xmm0,%xmm0 | ||
837 | |||
838 | # qhasm: orig4 = z4 | ||
839 | # asm 1: movdqa <z4=int6464#2,>orig4=stack128#16 | ||
840 | # asm 2: movdqa <z4=%xmm1,>orig4=240(%rsp) | ||
841 | movdqa %xmm1,240(%rsp) | ||
842 | |||
843 | # qhasm: orig14 = z14 | ||
844 | # asm 1: movdqa <z14=int6464#3,>orig14=stack128#17 | ||
845 | # asm 2: movdqa <z14=%xmm2,>orig14=256(%rsp) | ||
846 | movdqa %xmm2,256(%rsp) | ||
847 | |||
848 | # qhasm: orig3 = z3 | ||
849 | # asm 1: movdqa <z3=int6464#1,>orig3=stack128#18 | ||
850 | # asm 2: movdqa <z3=%xmm0,>orig3=272(%rsp) | ||
851 | movdqa %xmm0,272(%rsp) | ||
852 | |||
853 | # qhasm: bytesatleast256: | ||
854 | ._bytesatleast256: | ||
855 | |||
856 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
857 | # asm 1: movl <x2=stack128#2,>in8=int64#3d | ||
858 | # asm 2: movl <x2=16(%rsp),>in8=%edx | ||
859 | movl 16(%rsp),%edx | ||
860 | |||
861 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
862 | # asm 1: movl 4+<x3=stack128#3,>in9=int64#4d | ||
863 | # asm 2: movl 4+<x3=32(%rsp),>in9=%ecx | ||
864 | movl 4+32(%rsp),%ecx | ||
865 | |||
866 | # qhasm: ((uint32 *) &orig8)[0] = in8 | ||
867 | # asm 1: movl <in8=int64#3d,>orig8=stack128#19 | ||
868 | # asm 2: movl <in8=%edx,>orig8=288(%rsp) | ||
869 | movl %edx,288(%rsp) | ||
870 | |||
871 | # qhasm: ((uint32 *) &orig9)[0] = in9 | ||
872 | # asm 1: movl <in9=int64#4d,>orig9=stack128#20 | ||
873 | # asm 2: movl <in9=%ecx,>orig9=304(%rsp) | ||
874 | movl %ecx,304(%rsp) | ||
875 | |||
876 | # qhasm: in8 += 1 | ||
877 | # asm 1: add $1,<in8=int64#3 | ||
878 | # asm 2: add $1,<in8=%rdx | ||
879 | add $1,%rdx | ||
880 | |||
881 | # qhasm: in9 <<= 32 | ||
882 | # asm 1: shl $32,<in9=int64#4 | ||
883 | # asm 2: shl $32,<in9=%rcx | ||
884 | shl $32,%rcx | ||
885 | |||
886 | # qhasm: in8 += in9 | ||
887 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
888 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
889 | add %rcx,%rdx | ||
890 | |||
891 | # qhasm: in9 = in8 | ||
892 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
893 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
894 | mov %rdx,%rcx | ||
895 | |||
896 | # qhasm: (uint64) in9 >>= 32 | ||
897 | # asm 1: shr $32,<in9=int64#4 | ||
898 | # asm 2: shr $32,<in9=%rcx | ||
899 | shr $32,%rcx | ||
900 | |||
901 | # qhasm: ((uint32 *) &orig8)[1] = in8 | ||
902 | # asm 1: movl <in8=int64#3d,4+<orig8=stack128#19 | ||
903 | # asm 2: movl <in8=%edx,4+<orig8=288(%rsp) | ||
904 | movl %edx,4+288(%rsp) | ||
905 | |||
906 | # qhasm: ((uint32 *) &orig9)[1] = in9 | ||
907 | # asm 1: movl <in9=int64#4d,4+<orig9=stack128#20 | ||
908 | # asm 2: movl <in9=%ecx,4+<orig9=304(%rsp) | ||
909 | movl %ecx,4+304(%rsp) | ||
910 | |||
911 | # qhasm: in8 += 1 | ||
912 | # asm 1: add $1,<in8=int64#3 | ||
913 | # asm 2: add $1,<in8=%rdx | ||
914 | add $1,%rdx | ||
915 | |||
916 | # qhasm: in9 <<= 32 | ||
917 | # asm 1: shl $32,<in9=int64#4 | ||
918 | # asm 2: shl $32,<in9=%rcx | ||
919 | shl $32,%rcx | ||
920 | |||
921 | # qhasm: in8 += in9 | ||
922 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
923 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
924 | add %rcx,%rdx | ||
925 | |||
926 | # qhasm: in9 = in8 | ||
927 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
928 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
929 | mov %rdx,%rcx | ||
930 | |||
931 | # qhasm: (uint64) in9 >>= 32 | ||
932 | # asm 1: shr $32,<in9=int64#4 | ||
933 | # asm 2: shr $32,<in9=%rcx | ||
934 | shr $32,%rcx | ||
935 | |||
936 | # qhasm: ((uint32 *) &orig8)[2] = in8 | ||
937 | # asm 1: movl <in8=int64#3d,8+<orig8=stack128#19 | ||
938 | # asm 2: movl <in8=%edx,8+<orig8=288(%rsp) | ||
939 | movl %edx,8+288(%rsp) | ||
940 | |||
941 | # qhasm: ((uint32 *) &orig9)[2] = in9 | ||
942 | # asm 1: movl <in9=int64#4d,8+<orig9=stack128#20 | ||
943 | # asm 2: movl <in9=%ecx,8+<orig9=304(%rsp) | ||
944 | movl %ecx,8+304(%rsp) | ||
945 | |||
946 | # qhasm: in8 += 1 | ||
947 | # asm 1: add $1,<in8=int64#3 | ||
948 | # asm 2: add $1,<in8=%rdx | ||
949 | add $1,%rdx | ||
950 | |||
951 | # qhasm: in9 <<= 32 | ||
952 | # asm 1: shl $32,<in9=int64#4 | ||
953 | # asm 2: shl $32,<in9=%rcx | ||
954 | shl $32,%rcx | ||
955 | |||
956 | # qhasm: in8 += in9 | ||
957 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
958 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
959 | add %rcx,%rdx | ||
960 | |||
961 | # qhasm: in9 = in8 | ||
962 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
963 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
964 | mov %rdx,%rcx | ||
965 | |||
966 | # qhasm: (uint64) in9 >>= 32 | ||
967 | # asm 1: shr $32,<in9=int64#4 | ||
968 | # asm 2: shr $32,<in9=%rcx | ||
969 | shr $32,%rcx | ||
970 | |||
971 | # qhasm: ((uint32 *) &orig8)[3] = in8 | ||
972 | # asm 1: movl <in8=int64#3d,12+<orig8=stack128#19 | ||
973 | # asm 2: movl <in8=%edx,12+<orig8=288(%rsp) | ||
974 | movl %edx,12+288(%rsp) | ||
975 | |||
976 | # qhasm: ((uint32 *) &orig9)[3] = in9 | ||
977 | # asm 1: movl <in9=int64#4d,12+<orig9=stack128#20 | ||
978 | # asm 2: movl <in9=%ecx,12+<orig9=304(%rsp) | ||
979 | movl %ecx,12+304(%rsp) | ||
980 | |||
981 | # qhasm: in8 += 1 | ||
982 | # asm 1: add $1,<in8=int64#3 | ||
983 | # asm 2: add $1,<in8=%rdx | ||
984 | add $1,%rdx | ||
985 | |||
986 | # qhasm: in9 <<= 32 | ||
987 | # asm 1: shl $32,<in9=int64#4 | ||
988 | # asm 2: shl $32,<in9=%rcx | ||
989 | shl $32,%rcx | ||
990 | |||
991 | # qhasm: in8 += in9 | ||
992 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
993 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
994 | add %rcx,%rdx | ||
995 | |||
996 | # qhasm: in9 = in8 | ||
997 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
998 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
999 | mov %rdx,%rcx | ||
1000 | |||
1001 | # qhasm: (uint64) in9 >>= 32 | ||
1002 | # asm 1: shr $32,<in9=int64#4 | ||
1003 | # asm 2: shr $32,<in9=%rcx | ||
1004 | shr $32,%rcx | ||
1005 | |||
1006 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
1007 | # asm 1: movl <in8=int64#3d,>x2=stack128#2 | ||
1008 | # asm 2: movl <in8=%edx,>x2=16(%rsp) | ||
1009 | movl %edx,16(%rsp) | ||
1010 | |||
1011 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
1012 | # asm 1: movl <in9=int64#4d,4+<x3=stack128#3 | ||
1013 | # asm 2: movl <in9=%ecx,4+<x3=32(%rsp) | ||
1014 | movl %ecx,4+32(%rsp) | ||
1015 | |||
1016 | # qhasm: bytes_backup = bytes | ||
1017 | # asm 1: movq <bytes=int64#6,>bytes_backup=stack64#8 | ||
1018 | # asm 2: movq <bytes=%r9,>bytes_backup=408(%rsp) | ||
1019 | movq %r9,408(%rsp) | ||
1020 | |||
1021 | # qhasm: i = 12 | ||
1022 | # asm 1: mov $12,>i=int64#3 | ||
1023 | # asm 2: mov $12,>i=%rdx | ||
1024 | mov $12,%rdx | ||
1025 | |||
1026 | # qhasm: z5 = orig5 | ||
1027 | # asm 1: movdqa <orig5=stack128#5,>z5=int6464#1 | ||
1028 | # asm 2: movdqa <orig5=64(%rsp),>z5=%xmm0 | ||
1029 | movdqa 64(%rsp),%xmm0 | ||
1030 | |||
1031 | # qhasm: z10 = orig10 | ||
1032 | # asm 1: movdqa <orig10=stack128#6,>z10=int6464#2 | ||
1033 | # asm 2: movdqa <orig10=80(%rsp),>z10=%xmm1 | ||
1034 | movdqa 80(%rsp),%xmm1 | ||
1035 | |||
1036 | # qhasm: z15 = orig15 | ||
1037 | # asm 1: movdqa <orig15=stack128#7,>z15=int6464#3 | ||
1038 | # asm 2: movdqa <orig15=96(%rsp),>z15=%xmm2 | ||
1039 | movdqa 96(%rsp),%xmm2 | ||
1040 | |||
1041 | # qhasm: z14 = orig14 | ||
1042 | # asm 1: movdqa <orig14=stack128#17,>z14=int6464#4 | ||
1043 | # asm 2: movdqa <orig14=256(%rsp),>z14=%xmm3 | ||
1044 | movdqa 256(%rsp),%xmm3 | ||
1045 | |||
1046 | # qhasm: z3 = orig3 | ||
1047 | # asm 1: movdqa <orig3=stack128#18,>z3=int6464#5 | ||
1048 | # asm 2: movdqa <orig3=272(%rsp),>z3=%xmm4 | ||
1049 | movdqa 272(%rsp),%xmm4 | ||
1050 | |||
1051 | # qhasm: z6 = orig6 | ||
1052 | # asm 1: movdqa <orig6=stack128#9,>z6=int6464#6 | ||
1053 | # asm 2: movdqa <orig6=128(%rsp),>z6=%xmm5 | ||
1054 | movdqa 128(%rsp),%xmm5 | ||
1055 | |||
1056 | # qhasm: z11 = orig11 | ||
1057 | # asm 1: movdqa <orig11=stack128#10,>z11=int6464#7 | ||
1058 | # asm 2: movdqa <orig11=144(%rsp),>z11=%xmm6 | ||
1059 | movdqa 144(%rsp),%xmm6 | ||
1060 | |||
1061 | # qhasm: z1 = orig1 | ||
1062 | # asm 1: movdqa <orig1=stack128#12,>z1=int6464#8 | ||
1063 | # asm 2: movdqa <orig1=176(%rsp),>z1=%xmm7 | ||
1064 | movdqa 176(%rsp),%xmm7 | ||
1065 | |||
1066 | # qhasm: z7 = orig7 | ||
1067 | # asm 1: movdqa <orig7=stack128#13,>z7=int6464#9 | ||
1068 | # asm 2: movdqa <orig7=192(%rsp),>z7=%xmm8 | ||
1069 | movdqa 192(%rsp),%xmm8 | ||
1070 | |||
1071 | # qhasm: z13 = orig13 | ||
1072 | # asm 1: movdqa <orig13=stack128#14,>z13=int6464#10 | ||
1073 | # asm 2: movdqa <orig13=208(%rsp),>z13=%xmm9 | ||
1074 | movdqa 208(%rsp),%xmm9 | ||
1075 | |||
1076 | # qhasm: z2 = orig2 | ||
1077 | # asm 1: movdqa <orig2=stack128#15,>z2=int6464#11 | ||
1078 | # asm 2: movdqa <orig2=224(%rsp),>z2=%xmm10 | ||
1079 | movdqa 224(%rsp),%xmm10 | ||
1080 | |||
1081 | # qhasm: z9 = orig9 | ||
1082 | # asm 1: movdqa <orig9=stack128#20,>z9=int6464#12 | ||
1083 | # asm 2: movdqa <orig9=304(%rsp),>z9=%xmm11 | ||
1084 | movdqa 304(%rsp),%xmm11 | ||
1085 | |||
1086 | # qhasm: z0 = orig0 | ||
1087 | # asm 1: movdqa <orig0=stack128#8,>z0=int6464#13 | ||
1088 | # asm 2: movdqa <orig0=112(%rsp),>z0=%xmm12 | ||
1089 | movdqa 112(%rsp),%xmm12 | ||
1090 | |||
1091 | # qhasm: z12 = orig12 | ||
1092 | # asm 1: movdqa <orig12=stack128#11,>z12=int6464#14 | ||
1093 | # asm 2: movdqa <orig12=160(%rsp),>z12=%xmm13 | ||
1094 | movdqa 160(%rsp),%xmm13 | ||
1095 | |||
1096 | # qhasm: z4 = orig4 | ||
1097 | # asm 1: movdqa <orig4=stack128#16,>z4=int6464#15 | ||
1098 | # asm 2: movdqa <orig4=240(%rsp),>z4=%xmm14 | ||
1099 | movdqa 240(%rsp),%xmm14 | ||
1100 | |||
1101 | # qhasm: z8 = orig8 | ||
1102 | # asm 1: movdqa <orig8=stack128#19,>z8=int6464#16 | ||
1103 | # asm 2: movdqa <orig8=288(%rsp),>z8=%xmm15 | ||
1104 | movdqa 288(%rsp),%xmm15 | ||
1105 | |||
1106 | # qhasm: mainloop1: | ||
1107 | ._mainloop1: | ||
1108 | |||
1109 | # qhasm: z10_stack = z10 | ||
1110 | # asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#21 | ||
1111 | # asm 2: movdqa <z10=%xmm1,>z10_stack=320(%rsp) | ||
1112 | movdqa %xmm1,320(%rsp) | ||
1113 | |||
1114 | # qhasm: z15_stack = z15 | ||
1115 | # asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#22 | ||
1116 | # asm 2: movdqa <z15=%xmm2,>z15_stack=336(%rsp) | ||
1117 | movdqa %xmm2,336(%rsp) | ||
1118 | |||
1119 | # qhasm: y4 = z12 | ||
1120 | # asm 1: movdqa <z12=int6464#14,>y4=int6464#2 | ||
1121 | # asm 2: movdqa <z12=%xmm13,>y4=%xmm1 | ||
1122 | movdqa %xmm13,%xmm1 | ||
1123 | |||
1124 | # qhasm: uint32323232 y4 += z0 | ||
1125 | # asm 1: paddd <z0=int6464#13,<y4=int6464#2 | ||
1126 | # asm 2: paddd <z0=%xmm12,<y4=%xmm1 | ||
1127 | paddd %xmm12,%xmm1 | ||
1128 | |||
1129 | # qhasm: r4 = y4 | ||
1130 | # asm 1: movdqa <y4=int6464#2,>r4=int6464#3 | ||
1131 | # asm 2: movdqa <y4=%xmm1,>r4=%xmm2 | ||
1132 | movdqa %xmm1,%xmm2 | ||
1133 | |||
1134 | # qhasm: uint32323232 y4 <<= 7 | ||
1135 | # asm 1: pslld $7,<y4=int6464#2 | ||
1136 | # asm 2: pslld $7,<y4=%xmm1 | ||
1137 | pslld $7,%xmm1 | ||
1138 | |||
1139 | # qhasm: z4 ^= y4 | ||
1140 | # asm 1: pxor <y4=int6464#2,<z4=int6464#15 | ||
1141 | # asm 2: pxor <y4=%xmm1,<z4=%xmm14 | ||
1142 | pxor %xmm1,%xmm14 | ||
1143 | |||
1144 | # qhasm: uint32323232 r4 >>= 25 | ||
1145 | # asm 1: psrld $25,<r4=int6464#3 | ||
1146 | # asm 2: psrld $25,<r4=%xmm2 | ||
1147 | psrld $25,%xmm2 | ||
1148 | |||
1149 | # qhasm: z4 ^= r4 | ||
1150 | # asm 1: pxor <r4=int6464#3,<z4=int6464#15 | ||
1151 | # asm 2: pxor <r4=%xmm2,<z4=%xmm14 | ||
1152 | pxor %xmm2,%xmm14 | ||
1153 | |||
1154 | # qhasm: y9 = z1 | ||
1155 | # asm 1: movdqa <z1=int6464#8,>y9=int6464#2 | ||
1156 | # asm 2: movdqa <z1=%xmm7,>y9=%xmm1 | ||
1157 | movdqa %xmm7,%xmm1 | ||
1158 | |||
1159 | # qhasm: uint32323232 y9 += z5 | ||
1160 | # asm 1: paddd <z5=int6464#1,<y9=int6464#2 | ||
1161 | # asm 2: paddd <z5=%xmm0,<y9=%xmm1 | ||
1162 | paddd %xmm0,%xmm1 | ||
1163 | |||
1164 | # qhasm: r9 = y9 | ||
1165 | # asm 1: movdqa <y9=int6464#2,>r9=int6464#3 | ||
1166 | # asm 2: movdqa <y9=%xmm1,>r9=%xmm2 | ||
1167 | movdqa %xmm1,%xmm2 | ||
1168 | |||
1169 | # qhasm: uint32323232 y9 <<= 7 | ||
1170 | # asm 1: pslld $7,<y9=int6464#2 | ||
1171 | # asm 2: pslld $7,<y9=%xmm1 | ||
1172 | pslld $7,%xmm1 | ||
1173 | |||
1174 | # qhasm: z9 ^= y9 | ||
1175 | # asm 1: pxor <y9=int6464#2,<z9=int6464#12 | ||
1176 | # asm 2: pxor <y9=%xmm1,<z9=%xmm11 | ||
1177 | pxor %xmm1,%xmm11 | ||
1178 | |||
1179 | # qhasm: uint32323232 r9 >>= 25 | ||
1180 | # asm 1: psrld $25,<r9=int6464#3 | ||
1181 | # asm 2: psrld $25,<r9=%xmm2 | ||
1182 | psrld $25,%xmm2 | ||
1183 | |||
1184 | # qhasm: z9 ^= r9 | ||
1185 | # asm 1: pxor <r9=int6464#3,<z9=int6464#12 | ||
1186 | # asm 2: pxor <r9=%xmm2,<z9=%xmm11 | ||
1187 | pxor %xmm2,%xmm11 | ||
1188 | |||
1189 | # qhasm: y8 = z0 | ||
1190 | # asm 1: movdqa <z0=int6464#13,>y8=int6464#2 | ||
1191 | # asm 2: movdqa <z0=%xmm12,>y8=%xmm1 | ||
1192 | movdqa %xmm12,%xmm1 | ||
1193 | |||
1194 | # qhasm: uint32323232 y8 += z4 | ||
1195 | # asm 1: paddd <z4=int6464#15,<y8=int6464#2 | ||
1196 | # asm 2: paddd <z4=%xmm14,<y8=%xmm1 | ||
1197 | paddd %xmm14,%xmm1 | ||
1198 | |||
1199 | # qhasm: r8 = y8 | ||
1200 | # asm 1: movdqa <y8=int6464#2,>r8=int6464#3 | ||
1201 | # asm 2: movdqa <y8=%xmm1,>r8=%xmm2 | ||
1202 | movdqa %xmm1,%xmm2 | ||
1203 | |||
1204 | # qhasm: uint32323232 y8 <<= 9 | ||
1205 | # asm 1: pslld $9,<y8=int6464#2 | ||
1206 | # asm 2: pslld $9,<y8=%xmm1 | ||
1207 | pslld $9,%xmm1 | ||
1208 | |||
1209 | # qhasm: z8 ^= y8 | ||
1210 | # asm 1: pxor <y8=int6464#2,<z8=int6464#16 | ||
1211 | # asm 2: pxor <y8=%xmm1,<z8=%xmm15 | ||
1212 | pxor %xmm1,%xmm15 | ||
1213 | |||
1214 | # qhasm: uint32323232 r8 >>= 23 | ||
1215 | # asm 1: psrld $23,<r8=int6464#3 | ||
1216 | # asm 2: psrld $23,<r8=%xmm2 | ||
1217 | psrld $23,%xmm2 | ||
1218 | |||
1219 | # qhasm: z8 ^= r8 | ||
1220 | # asm 1: pxor <r8=int6464#3,<z8=int6464#16 | ||
1221 | # asm 2: pxor <r8=%xmm2,<z8=%xmm15 | ||
1222 | pxor %xmm2,%xmm15 | ||
1223 | |||
1224 | # qhasm: y13 = z5 | ||
1225 | # asm 1: movdqa <z5=int6464#1,>y13=int6464#2 | ||
1226 | # asm 2: movdqa <z5=%xmm0,>y13=%xmm1 | ||
1227 | movdqa %xmm0,%xmm1 | ||
1228 | |||
1229 | # qhasm: uint32323232 y13 += z9 | ||
1230 | # asm 1: paddd <z9=int6464#12,<y13=int6464#2 | ||
1231 | # asm 2: paddd <z9=%xmm11,<y13=%xmm1 | ||
1232 | paddd %xmm11,%xmm1 | ||
1233 | |||
1234 | # qhasm: r13 = y13 | ||
1235 | # asm 1: movdqa <y13=int6464#2,>r13=int6464#3 | ||
1236 | # asm 2: movdqa <y13=%xmm1,>r13=%xmm2 | ||
1237 | movdqa %xmm1,%xmm2 | ||
1238 | |||
1239 | # qhasm: uint32323232 y13 <<= 9 | ||
1240 | # asm 1: pslld $9,<y13=int6464#2 | ||
1241 | # asm 2: pslld $9,<y13=%xmm1 | ||
1242 | pslld $9,%xmm1 | ||
1243 | |||
1244 | # qhasm: z13 ^= y13 | ||
1245 | # asm 1: pxor <y13=int6464#2,<z13=int6464#10 | ||
1246 | # asm 2: pxor <y13=%xmm1,<z13=%xmm9 | ||
1247 | pxor %xmm1,%xmm9 | ||
1248 | |||
1249 | # qhasm: uint32323232 r13 >>= 23 | ||
1250 | # asm 1: psrld $23,<r13=int6464#3 | ||
1251 | # asm 2: psrld $23,<r13=%xmm2 | ||
1252 | psrld $23,%xmm2 | ||
1253 | |||
1254 | # qhasm: z13 ^= r13 | ||
1255 | # asm 1: pxor <r13=int6464#3,<z13=int6464#10 | ||
1256 | # asm 2: pxor <r13=%xmm2,<z13=%xmm9 | ||
1257 | pxor %xmm2,%xmm9 | ||
1258 | |||
1259 | # qhasm: y12 = z4 | ||
1260 | # asm 1: movdqa <z4=int6464#15,>y12=int6464#2 | ||
1261 | # asm 2: movdqa <z4=%xmm14,>y12=%xmm1 | ||
1262 | movdqa %xmm14,%xmm1 | ||
1263 | |||
1264 | # qhasm: uint32323232 y12 += z8 | ||
1265 | # asm 1: paddd <z8=int6464#16,<y12=int6464#2 | ||
1266 | # asm 2: paddd <z8=%xmm15,<y12=%xmm1 | ||
1267 | paddd %xmm15,%xmm1 | ||
1268 | |||
1269 | # qhasm: r12 = y12 | ||
1270 | # asm 1: movdqa <y12=int6464#2,>r12=int6464#3 | ||
1271 | # asm 2: movdqa <y12=%xmm1,>r12=%xmm2 | ||
1272 | movdqa %xmm1,%xmm2 | ||
1273 | |||
1274 | # qhasm: uint32323232 y12 <<= 13 | ||
1275 | # asm 1: pslld $13,<y12=int6464#2 | ||
1276 | # asm 2: pslld $13,<y12=%xmm1 | ||
1277 | pslld $13,%xmm1 | ||
1278 | |||
1279 | # qhasm: z12 ^= y12 | ||
1280 | # asm 1: pxor <y12=int6464#2,<z12=int6464#14 | ||
1281 | # asm 2: pxor <y12=%xmm1,<z12=%xmm13 | ||
1282 | pxor %xmm1,%xmm13 | ||
1283 | |||
1284 | # qhasm: uint32323232 r12 >>= 19 | ||
1285 | # asm 1: psrld $19,<r12=int6464#3 | ||
1286 | # asm 2: psrld $19,<r12=%xmm2 | ||
1287 | psrld $19,%xmm2 | ||
1288 | |||
1289 | # qhasm: z12 ^= r12 | ||
1290 | # asm 1: pxor <r12=int6464#3,<z12=int6464#14 | ||
1291 | # asm 2: pxor <r12=%xmm2,<z12=%xmm13 | ||
1292 | pxor %xmm2,%xmm13 | ||
1293 | |||
1294 | # qhasm: y1 = z9 | ||
1295 | # asm 1: movdqa <z9=int6464#12,>y1=int6464#2 | ||
1296 | # asm 2: movdqa <z9=%xmm11,>y1=%xmm1 | ||
1297 | movdqa %xmm11,%xmm1 | ||
1298 | |||
1299 | # qhasm: uint32323232 y1 += z13 | ||
1300 | # asm 1: paddd <z13=int6464#10,<y1=int6464#2 | ||
1301 | # asm 2: paddd <z13=%xmm9,<y1=%xmm1 | ||
1302 | paddd %xmm9,%xmm1 | ||
1303 | |||
1304 | # qhasm: r1 = y1 | ||
1305 | # asm 1: movdqa <y1=int6464#2,>r1=int6464#3 | ||
1306 | # asm 2: movdqa <y1=%xmm1,>r1=%xmm2 | ||
1307 | movdqa %xmm1,%xmm2 | ||
1308 | |||
1309 | # qhasm: uint32323232 y1 <<= 13 | ||
1310 | # asm 1: pslld $13,<y1=int6464#2 | ||
1311 | # asm 2: pslld $13,<y1=%xmm1 | ||
1312 | pslld $13,%xmm1 | ||
1313 | |||
1314 | # qhasm: z1 ^= y1 | ||
1315 | # asm 1: pxor <y1=int6464#2,<z1=int6464#8 | ||
1316 | # asm 2: pxor <y1=%xmm1,<z1=%xmm7 | ||
1317 | pxor %xmm1,%xmm7 | ||
1318 | |||
1319 | # qhasm: uint32323232 r1 >>= 19 | ||
1320 | # asm 1: psrld $19,<r1=int6464#3 | ||
1321 | # asm 2: psrld $19,<r1=%xmm2 | ||
1322 | psrld $19,%xmm2 | ||
1323 | |||
1324 | # qhasm: z1 ^= r1 | ||
1325 | # asm 1: pxor <r1=int6464#3,<z1=int6464#8 | ||
1326 | # asm 2: pxor <r1=%xmm2,<z1=%xmm7 | ||
1327 | pxor %xmm2,%xmm7 | ||
1328 | |||
1329 | # qhasm: y0 = z8 | ||
1330 | # asm 1: movdqa <z8=int6464#16,>y0=int6464#2 | ||
1331 | # asm 2: movdqa <z8=%xmm15,>y0=%xmm1 | ||
1332 | movdqa %xmm15,%xmm1 | ||
1333 | |||
1334 | # qhasm: uint32323232 y0 += z12 | ||
1335 | # asm 1: paddd <z12=int6464#14,<y0=int6464#2 | ||
1336 | # asm 2: paddd <z12=%xmm13,<y0=%xmm1 | ||
1337 | paddd %xmm13,%xmm1 | ||
1338 | |||
1339 | # qhasm: r0 = y0 | ||
1340 | # asm 1: movdqa <y0=int6464#2,>r0=int6464#3 | ||
1341 | # asm 2: movdqa <y0=%xmm1,>r0=%xmm2 | ||
1342 | movdqa %xmm1,%xmm2 | ||
1343 | |||
1344 | # qhasm: uint32323232 y0 <<= 18 | ||
1345 | # asm 1: pslld $18,<y0=int6464#2 | ||
1346 | # asm 2: pslld $18,<y0=%xmm1 | ||
1347 | pslld $18,%xmm1 | ||
1348 | |||
1349 | # qhasm: z0 ^= y0 | ||
1350 | # asm 1: pxor <y0=int6464#2,<z0=int6464#13 | ||
1351 | # asm 2: pxor <y0=%xmm1,<z0=%xmm12 | ||
1352 | pxor %xmm1,%xmm12 | ||
1353 | |||
1354 | # qhasm: uint32323232 r0 >>= 14 | ||
1355 | # asm 1: psrld $14,<r0=int6464#3 | ||
1356 | # asm 2: psrld $14,<r0=%xmm2 | ||
1357 | psrld $14,%xmm2 | ||
1358 | |||
1359 | # qhasm: z0 ^= r0 | ||
1360 | # asm 1: pxor <r0=int6464#3,<z0=int6464#13 | ||
1361 | # asm 2: pxor <r0=%xmm2,<z0=%xmm12 | ||
1362 | pxor %xmm2,%xmm12 | ||
1363 | |||
1364 | # qhasm: z10 = z10_stack | ||
1365 | # asm 1: movdqa <z10_stack=stack128#21,>z10=int6464#2 | ||
1366 | # asm 2: movdqa <z10_stack=320(%rsp),>z10=%xmm1 | ||
1367 | movdqa 320(%rsp),%xmm1 | ||
1368 | |||
1369 | # qhasm: z0_stack = z0 | ||
1370 | # asm 1: movdqa <z0=int6464#13,>z0_stack=stack128#21 | ||
1371 | # asm 2: movdqa <z0=%xmm12,>z0_stack=320(%rsp) | ||
1372 | movdqa %xmm12,320(%rsp) | ||
1373 | |||
1374 | # qhasm: y5 = z13 | ||
1375 | # asm 1: movdqa <z13=int6464#10,>y5=int6464#3 | ||
1376 | # asm 2: movdqa <z13=%xmm9,>y5=%xmm2 | ||
1377 | movdqa %xmm9,%xmm2 | ||
1378 | |||
1379 | # qhasm: uint32323232 y5 += z1 | ||
1380 | # asm 1: paddd <z1=int6464#8,<y5=int6464#3 | ||
1381 | # asm 2: paddd <z1=%xmm7,<y5=%xmm2 | ||
1382 | paddd %xmm7,%xmm2 | ||
1383 | |||
1384 | # qhasm: r5 = y5 | ||
1385 | # asm 1: movdqa <y5=int6464#3,>r5=int6464#13 | ||
1386 | # asm 2: movdqa <y5=%xmm2,>r5=%xmm12 | ||
1387 | movdqa %xmm2,%xmm12 | ||
1388 | |||
1389 | # qhasm: uint32323232 y5 <<= 18 | ||
1390 | # asm 1: pslld $18,<y5=int6464#3 | ||
1391 | # asm 2: pslld $18,<y5=%xmm2 | ||
1392 | pslld $18,%xmm2 | ||
1393 | |||
1394 | # qhasm: z5 ^= y5 | ||
1395 | # asm 1: pxor <y5=int6464#3,<z5=int6464#1 | ||
1396 | # asm 2: pxor <y5=%xmm2,<z5=%xmm0 | ||
1397 | pxor %xmm2,%xmm0 | ||
1398 | |||
1399 | # qhasm: uint32323232 r5 >>= 14 | ||
1400 | # asm 1: psrld $14,<r5=int6464#13 | ||
1401 | # asm 2: psrld $14,<r5=%xmm12 | ||
1402 | psrld $14,%xmm12 | ||
1403 | |||
1404 | # qhasm: z5 ^= r5 | ||
1405 | # asm 1: pxor <r5=int6464#13,<z5=int6464#1 | ||
1406 | # asm 2: pxor <r5=%xmm12,<z5=%xmm0 | ||
1407 | pxor %xmm12,%xmm0 | ||
1408 | |||
1409 | # qhasm: y14 = z6 | ||
1410 | # asm 1: movdqa <z6=int6464#6,>y14=int6464#3 | ||
1411 | # asm 2: movdqa <z6=%xmm5,>y14=%xmm2 | ||
1412 | movdqa %xmm5,%xmm2 | ||
1413 | |||
1414 | # qhasm: uint32323232 y14 += z10 | ||
1415 | # asm 1: paddd <z10=int6464#2,<y14=int6464#3 | ||
1416 | # asm 2: paddd <z10=%xmm1,<y14=%xmm2 | ||
1417 | paddd %xmm1,%xmm2 | ||
1418 | |||
1419 | # qhasm: r14 = y14 | ||
1420 | # asm 1: movdqa <y14=int6464#3,>r14=int6464#13 | ||
1421 | # asm 2: movdqa <y14=%xmm2,>r14=%xmm12 | ||
1422 | movdqa %xmm2,%xmm12 | ||
1423 | |||
1424 | # qhasm: uint32323232 y14 <<= 7 | ||
1425 | # asm 1: pslld $7,<y14=int6464#3 | ||
1426 | # asm 2: pslld $7,<y14=%xmm2 | ||
1427 | pslld $7,%xmm2 | ||
1428 | |||
1429 | # qhasm: z14 ^= y14 | ||
1430 | # asm 1: pxor <y14=int6464#3,<z14=int6464#4 | ||
1431 | # asm 2: pxor <y14=%xmm2,<z14=%xmm3 | ||
1432 | pxor %xmm2,%xmm3 | ||
1433 | |||
1434 | # qhasm: uint32323232 r14 >>= 25 | ||
1435 | # asm 1: psrld $25,<r14=int6464#13 | ||
1436 | # asm 2: psrld $25,<r14=%xmm12 | ||
1437 | psrld $25,%xmm12 | ||
1438 | |||
1439 | # qhasm: z14 ^= r14 | ||
1440 | # asm 1: pxor <r14=int6464#13,<z14=int6464#4 | ||
1441 | # asm 2: pxor <r14=%xmm12,<z14=%xmm3 | ||
1442 | pxor %xmm12,%xmm3 | ||
1443 | |||
1444 | # qhasm: z15 = z15_stack | ||
1445 | # asm 1: movdqa <z15_stack=stack128#22,>z15=int6464#3 | ||
1446 | # asm 2: movdqa <z15_stack=336(%rsp),>z15=%xmm2 | ||
1447 | movdqa 336(%rsp),%xmm2 | ||
1448 | |||
1449 | # qhasm: z5_stack = z5 | ||
1450 | # asm 1: movdqa <z5=int6464#1,>z5_stack=stack128#22 | ||
1451 | # asm 2: movdqa <z5=%xmm0,>z5_stack=336(%rsp) | ||
1452 | movdqa %xmm0,336(%rsp) | ||
1453 | |||
1454 | # qhasm: y3 = z11 | ||
1455 | # asm 1: movdqa <z11=int6464#7,>y3=int6464#1 | ||
1456 | # asm 2: movdqa <z11=%xmm6,>y3=%xmm0 | ||
1457 | movdqa %xmm6,%xmm0 | ||
1458 | |||
1459 | # qhasm: uint32323232 y3 += z15 | ||
1460 | # asm 1: paddd <z15=int6464#3,<y3=int6464#1 | ||
1461 | # asm 2: paddd <z15=%xmm2,<y3=%xmm0 | ||
1462 | paddd %xmm2,%xmm0 | ||
1463 | |||
1464 | # qhasm: r3 = y3 | ||
1465 | # asm 1: movdqa <y3=int6464#1,>r3=int6464#13 | ||
1466 | # asm 2: movdqa <y3=%xmm0,>r3=%xmm12 | ||
1467 | movdqa %xmm0,%xmm12 | ||
1468 | |||
1469 | # qhasm: uint32323232 y3 <<= 7 | ||
1470 | # asm 1: pslld $7,<y3=int6464#1 | ||
1471 | # asm 2: pslld $7,<y3=%xmm0 | ||
1472 | pslld $7,%xmm0 | ||
1473 | |||
1474 | # qhasm: z3 ^= y3 | ||
1475 | # asm 1: pxor <y3=int6464#1,<z3=int6464#5 | ||
1476 | # asm 2: pxor <y3=%xmm0,<z3=%xmm4 | ||
1477 | pxor %xmm0,%xmm4 | ||
1478 | |||
1479 | # qhasm: uint32323232 r3 >>= 25 | ||
1480 | # asm 1: psrld $25,<r3=int6464#13 | ||
1481 | # asm 2: psrld $25,<r3=%xmm12 | ||
1482 | psrld $25,%xmm12 | ||
1483 | |||
1484 | # qhasm: z3 ^= r3 | ||
1485 | # asm 1: pxor <r3=int6464#13,<z3=int6464#5 | ||
1486 | # asm 2: pxor <r3=%xmm12,<z3=%xmm4 | ||
1487 | pxor %xmm12,%xmm4 | ||
1488 | |||
1489 | # qhasm: y2 = z10 | ||
1490 | # asm 1: movdqa <z10=int6464#2,>y2=int6464#1 | ||
1491 | # asm 2: movdqa <z10=%xmm1,>y2=%xmm0 | ||
1492 | movdqa %xmm1,%xmm0 | ||
1493 | |||
1494 | # qhasm: uint32323232 y2 += z14 | ||
1495 | # asm 1: paddd <z14=int6464#4,<y2=int6464#1 | ||
1496 | # asm 2: paddd <z14=%xmm3,<y2=%xmm0 | ||
1497 | paddd %xmm3,%xmm0 | ||
1498 | |||
1499 | # qhasm: r2 = y2 | ||
1500 | # asm 1: movdqa <y2=int6464#1,>r2=int6464#13 | ||
1501 | # asm 2: movdqa <y2=%xmm0,>r2=%xmm12 | ||
1502 | movdqa %xmm0,%xmm12 | ||
1503 | |||
1504 | # qhasm: uint32323232 y2 <<= 9 | ||
1505 | # asm 1: pslld $9,<y2=int6464#1 | ||
1506 | # asm 2: pslld $9,<y2=%xmm0 | ||
1507 | pslld $9,%xmm0 | ||
1508 | |||
1509 | # qhasm: z2 ^= y2 | ||
1510 | # asm 1: pxor <y2=int6464#1,<z2=int6464#11 | ||
1511 | # asm 2: pxor <y2=%xmm0,<z2=%xmm10 | ||
1512 | pxor %xmm0,%xmm10 | ||
1513 | |||
1514 | # qhasm: uint32323232 r2 >>= 23 | ||
1515 | # asm 1: psrld $23,<r2=int6464#13 | ||
1516 | # asm 2: psrld $23,<r2=%xmm12 | ||
1517 | psrld $23,%xmm12 | ||
1518 | |||
1519 | # qhasm: z2 ^= r2 | ||
1520 | # asm 1: pxor <r2=int6464#13,<z2=int6464#11 | ||
1521 | # asm 2: pxor <r2=%xmm12,<z2=%xmm10 | ||
1522 | pxor %xmm12,%xmm10 | ||
1523 | |||
1524 | # qhasm: y7 = z15 | ||
1525 | # asm 1: movdqa <z15=int6464#3,>y7=int6464#1 | ||
1526 | # asm 2: movdqa <z15=%xmm2,>y7=%xmm0 | ||
1527 | movdqa %xmm2,%xmm0 | ||
1528 | |||
1529 | # qhasm: uint32323232 y7 += z3 | ||
1530 | # asm 1: paddd <z3=int6464#5,<y7=int6464#1 | ||
1531 | # asm 2: paddd <z3=%xmm4,<y7=%xmm0 | ||
1532 | paddd %xmm4,%xmm0 | ||
1533 | |||
1534 | # qhasm: r7 = y7 | ||
1535 | # asm 1: movdqa <y7=int6464#1,>r7=int6464#13 | ||
1536 | # asm 2: movdqa <y7=%xmm0,>r7=%xmm12 | ||
1537 | movdqa %xmm0,%xmm12 | ||
1538 | |||
1539 | # qhasm: uint32323232 y7 <<= 9 | ||
1540 | # asm 1: pslld $9,<y7=int6464#1 | ||
1541 | # asm 2: pslld $9,<y7=%xmm0 | ||
1542 | pslld $9,%xmm0 | ||
1543 | |||
1544 | # qhasm: z7 ^= y7 | ||
1545 | # asm 1: pxor <y7=int6464#1,<z7=int6464#9 | ||
1546 | # asm 2: pxor <y7=%xmm0,<z7=%xmm8 | ||
1547 | pxor %xmm0,%xmm8 | ||
1548 | |||
1549 | # qhasm: uint32323232 r7 >>= 23 | ||
1550 | # asm 1: psrld $23,<r7=int6464#13 | ||
1551 | # asm 2: psrld $23,<r7=%xmm12 | ||
1552 | psrld $23,%xmm12 | ||
1553 | |||
1554 | # qhasm: z7 ^= r7 | ||
1555 | # asm 1: pxor <r7=int6464#13,<z7=int6464#9 | ||
1556 | # asm 2: pxor <r7=%xmm12,<z7=%xmm8 | ||
1557 | pxor %xmm12,%xmm8 | ||
1558 | |||
1559 | # qhasm: y6 = z14 | ||
1560 | # asm 1: movdqa <z14=int6464#4,>y6=int6464#1 | ||
1561 | # asm 2: movdqa <z14=%xmm3,>y6=%xmm0 | ||
1562 | movdqa %xmm3,%xmm0 | ||
1563 | |||
1564 | # qhasm: uint32323232 y6 += z2 | ||
1565 | # asm 1: paddd <z2=int6464#11,<y6=int6464#1 | ||
1566 | # asm 2: paddd <z2=%xmm10,<y6=%xmm0 | ||
1567 | paddd %xmm10,%xmm0 | ||
1568 | |||
1569 | # qhasm: r6 = y6 | ||
1570 | # asm 1: movdqa <y6=int6464#1,>r6=int6464#13 | ||
1571 | # asm 2: movdqa <y6=%xmm0,>r6=%xmm12 | ||
1572 | movdqa %xmm0,%xmm12 | ||
1573 | |||
1574 | # qhasm: uint32323232 y6 <<= 13 | ||
1575 | # asm 1: pslld $13,<y6=int6464#1 | ||
1576 | # asm 2: pslld $13,<y6=%xmm0 | ||
1577 | pslld $13,%xmm0 | ||
1578 | |||
1579 | # qhasm: z6 ^= y6 | ||
1580 | # asm 1: pxor <y6=int6464#1,<z6=int6464#6 | ||
1581 | # asm 2: pxor <y6=%xmm0,<z6=%xmm5 | ||
1582 | pxor %xmm0,%xmm5 | ||
1583 | |||
1584 | # qhasm: uint32323232 r6 >>= 19 | ||
1585 | # asm 1: psrld $19,<r6=int6464#13 | ||
1586 | # asm 2: psrld $19,<r6=%xmm12 | ||
1587 | psrld $19,%xmm12 | ||
1588 | |||
1589 | # qhasm: z6 ^= r6 | ||
1590 | # asm 1: pxor <r6=int6464#13,<z6=int6464#6 | ||
1591 | # asm 2: pxor <r6=%xmm12,<z6=%xmm5 | ||
1592 | pxor %xmm12,%xmm5 | ||
1593 | |||
1594 | # qhasm: y11 = z3 | ||
1595 | # asm 1: movdqa <z3=int6464#5,>y11=int6464#1 | ||
1596 | # asm 2: movdqa <z3=%xmm4,>y11=%xmm0 | ||
1597 | movdqa %xmm4,%xmm0 | ||
1598 | |||
1599 | # qhasm: uint32323232 y11 += z7 | ||
1600 | # asm 1: paddd <z7=int6464#9,<y11=int6464#1 | ||
1601 | # asm 2: paddd <z7=%xmm8,<y11=%xmm0 | ||
1602 | paddd %xmm8,%xmm0 | ||
1603 | |||
1604 | # qhasm: r11 = y11 | ||
1605 | # asm 1: movdqa <y11=int6464#1,>r11=int6464#13 | ||
1606 | # asm 2: movdqa <y11=%xmm0,>r11=%xmm12 | ||
1607 | movdqa %xmm0,%xmm12 | ||
1608 | |||
1609 | # qhasm: uint32323232 y11 <<= 13 | ||
1610 | # asm 1: pslld $13,<y11=int6464#1 | ||
1611 | # asm 2: pslld $13,<y11=%xmm0 | ||
1612 | pslld $13,%xmm0 | ||
1613 | |||
1614 | # qhasm: z11 ^= y11 | ||
1615 | # asm 1: pxor <y11=int6464#1,<z11=int6464#7 | ||
1616 | # asm 2: pxor <y11=%xmm0,<z11=%xmm6 | ||
1617 | pxor %xmm0,%xmm6 | ||
1618 | |||
1619 | # qhasm: uint32323232 r11 >>= 19 | ||
1620 | # asm 1: psrld $19,<r11=int6464#13 | ||
1621 | # asm 2: psrld $19,<r11=%xmm12 | ||
1622 | psrld $19,%xmm12 | ||
1623 | |||
1624 | # qhasm: z11 ^= r11 | ||
1625 | # asm 1: pxor <r11=int6464#13,<z11=int6464#7 | ||
1626 | # asm 2: pxor <r11=%xmm12,<z11=%xmm6 | ||
1627 | pxor %xmm12,%xmm6 | ||
1628 | |||
1629 | # qhasm: y10 = z2 | ||
1630 | # asm 1: movdqa <z2=int6464#11,>y10=int6464#1 | ||
1631 | # asm 2: movdqa <z2=%xmm10,>y10=%xmm0 | ||
1632 | movdqa %xmm10,%xmm0 | ||
1633 | |||
1634 | # qhasm: uint32323232 y10 += z6 | ||
1635 | # asm 1: paddd <z6=int6464#6,<y10=int6464#1 | ||
1636 | # asm 2: paddd <z6=%xmm5,<y10=%xmm0 | ||
1637 | paddd %xmm5,%xmm0 | ||
1638 | |||
1639 | # qhasm: r10 = y10 | ||
1640 | # asm 1: movdqa <y10=int6464#1,>r10=int6464#13 | ||
1641 | # asm 2: movdqa <y10=%xmm0,>r10=%xmm12 | ||
1642 | movdqa %xmm0,%xmm12 | ||
1643 | |||
1644 | # qhasm: uint32323232 y10 <<= 18 | ||
1645 | # asm 1: pslld $18,<y10=int6464#1 | ||
1646 | # asm 2: pslld $18,<y10=%xmm0 | ||
1647 | pslld $18,%xmm0 | ||
1648 | |||
1649 | # qhasm: z10 ^= y10 | ||
1650 | # asm 1: pxor <y10=int6464#1,<z10=int6464#2 | ||
1651 | # asm 2: pxor <y10=%xmm0,<z10=%xmm1 | ||
1652 | pxor %xmm0,%xmm1 | ||
1653 | |||
1654 | # qhasm: uint32323232 r10 >>= 14 | ||
1655 | # asm 1: psrld $14,<r10=int6464#13 | ||
1656 | # asm 2: psrld $14,<r10=%xmm12 | ||
1657 | psrld $14,%xmm12 | ||
1658 | |||
1659 | # qhasm: z10 ^= r10 | ||
1660 | # asm 1: pxor <r10=int6464#13,<z10=int6464#2 | ||
1661 | # asm 2: pxor <r10=%xmm12,<z10=%xmm1 | ||
1662 | pxor %xmm12,%xmm1 | ||
1663 | |||
1664 | # qhasm: z0 = z0_stack | ||
1665 | # asm 1: movdqa <z0_stack=stack128#21,>z0=int6464#1 | ||
1666 | # asm 2: movdqa <z0_stack=320(%rsp),>z0=%xmm0 | ||
1667 | movdqa 320(%rsp),%xmm0 | ||
1668 | |||
1669 | # qhasm: z10_stack = z10 | ||
1670 | # asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#21 | ||
1671 | # asm 2: movdqa <z10=%xmm1,>z10_stack=320(%rsp) | ||
1672 | movdqa %xmm1,320(%rsp) | ||
1673 | |||
1674 | # qhasm: y1 = z3 | ||
1675 | # asm 1: movdqa <z3=int6464#5,>y1=int6464#2 | ||
1676 | # asm 2: movdqa <z3=%xmm4,>y1=%xmm1 | ||
1677 | movdqa %xmm4,%xmm1 | ||
1678 | |||
1679 | # qhasm: uint32323232 y1 += z0 | ||
1680 | # asm 1: paddd <z0=int6464#1,<y1=int6464#2 | ||
1681 | # asm 2: paddd <z0=%xmm0,<y1=%xmm1 | ||
1682 | paddd %xmm0,%xmm1 | ||
1683 | |||
1684 | # qhasm: r1 = y1 | ||
1685 | # asm 1: movdqa <y1=int6464#2,>r1=int6464#13 | ||
1686 | # asm 2: movdqa <y1=%xmm1,>r1=%xmm12 | ||
1687 | movdqa %xmm1,%xmm12 | ||
1688 | |||
1689 | # qhasm: uint32323232 y1 <<= 7 | ||
1690 | # asm 1: pslld $7,<y1=int6464#2 | ||
1691 | # asm 2: pslld $7,<y1=%xmm1 | ||
1692 | pslld $7,%xmm1 | ||
1693 | |||
1694 | # qhasm: z1 ^= y1 | ||
1695 | # asm 1: pxor <y1=int6464#2,<z1=int6464#8 | ||
1696 | # asm 2: pxor <y1=%xmm1,<z1=%xmm7 | ||
1697 | pxor %xmm1,%xmm7 | ||
1698 | |||
1699 | # qhasm: uint32323232 r1 >>= 25 | ||
1700 | # asm 1: psrld $25,<r1=int6464#13 | ||
1701 | # asm 2: psrld $25,<r1=%xmm12 | ||
1702 | psrld $25,%xmm12 | ||
1703 | |||
1704 | # qhasm: z1 ^= r1 | ||
1705 | # asm 1: pxor <r1=int6464#13,<z1=int6464#8 | ||
1706 | # asm 2: pxor <r1=%xmm12,<z1=%xmm7 | ||
1707 | pxor %xmm12,%xmm7 | ||
1708 | |||
1709 | # qhasm: y15 = z7 | ||
1710 | # asm 1: movdqa <z7=int6464#9,>y15=int6464#2 | ||
1711 | # asm 2: movdqa <z7=%xmm8,>y15=%xmm1 | ||
1712 | movdqa %xmm8,%xmm1 | ||
1713 | |||
1714 | # qhasm: uint32323232 y15 += z11 | ||
1715 | # asm 1: paddd <z11=int6464#7,<y15=int6464#2 | ||
1716 | # asm 2: paddd <z11=%xmm6,<y15=%xmm1 | ||
1717 | paddd %xmm6,%xmm1 | ||
1718 | |||
1719 | # qhasm: r15 = y15 | ||
1720 | # asm 1: movdqa <y15=int6464#2,>r15=int6464#13 | ||
1721 | # asm 2: movdqa <y15=%xmm1,>r15=%xmm12 | ||
1722 | movdqa %xmm1,%xmm12 | ||
1723 | |||
1724 | # qhasm: uint32323232 y15 <<= 18 | ||
1725 | # asm 1: pslld $18,<y15=int6464#2 | ||
1726 | # asm 2: pslld $18,<y15=%xmm1 | ||
1727 | pslld $18,%xmm1 | ||
1728 | |||
1729 | # qhasm: z15 ^= y15 | ||
1730 | # asm 1: pxor <y15=int6464#2,<z15=int6464#3 | ||
1731 | # asm 2: pxor <y15=%xmm1,<z15=%xmm2 | ||
1732 | pxor %xmm1,%xmm2 | ||
1733 | |||
1734 | # qhasm: uint32323232 r15 >>= 14 | ||
1735 | # asm 1: psrld $14,<r15=int6464#13 | ||
1736 | # asm 2: psrld $14,<r15=%xmm12 | ||
1737 | psrld $14,%xmm12 | ||
1738 | |||
1739 | # qhasm: z15 ^= r15 | ||
1740 | # asm 1: pxor <r15=int6464#13,<z15=int6464#3 | ||
1741 | # asm 2: pxor <r15=%xmm12,<z15=%xmm2 | ||
1742 | pxor %xmm12,%xmm2 | ||
1743 | |||
1744 | # qhasm: z5 = z5_stack | ||
1745 | # asm 1: movdqa <z5_stack=stack128#22,>z5=int6464#13 | ||
1746 | # asm 2: movdqa <z5_stack=336(%rsp),>z5=%xmm12 | ||
1747 | movdqa 336(%rsp),%xmm12 | ||
1748 | |||
1749 | # qhasm: z15_stack = z15 | ||
1750 | # asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#22 | ||
1751 | # asm 2: movdqa <z15=%xmm2,>z15_stack=336(%rsp) | ||
1752 | movdqa %xmm2,336(%rsp) | ||
1753 | |||
1754 | # qhasm: y6 = z4 | ||
1755 | # asm 1: movdqa <z4=int6464#15,>y6=int6464#2 | ||
1756 | # asm 2: movdqa <z4=%xmm14,>y6=%xmm1 | ||
1757 | movdqa %xmm14,%xmm1 | ||
1758 | |||
1759 | # qhasm: uint32323232 y6 += z5 | ||
1760 | # asm 1: paddd <z5=int6464#13,<y6=int6464#2 | ||
1761 | # asm 2: paddd <z5=%xmm12,<y6=%xmm1 | ||
1762 | paddd %xmm12,%xmm1 | ||
1763 | |||
1764 | # qhasm: r6 = y6 | ||
1765 | # asm 1: movdqa <y6=int6464#2,>r6=int6464#3 | ||
1766 | # asm 2: movdqa <y6=%xmm1,>r6=%xmm2 | ||
1767 | movdqa %xmm1,%xmm2 | ||
1768 | |||
1769 | # qhasm: uint32323232 y6 <<= 7 | ||
1770 | # asm 1: pslld $7,<y6=int6464#2 | ||
1771 | # asm 2: pslld $7,<y6=%xmm1 | ||
1772 | pslld $7,%xmm1 | ||
1773 | |||
1774 | # qhasm: z6 ^= y6 | ||
1775 | # asm 1: pxor <y6=int6464#2,<z6=int6464#6 | ||
1776 | # asm 2: pxor <y6=%xmm1,<z6=%xmm5 | ||
1777 | pxor %xmm1,%xmm5 | ||
1778 | |||
1779 | # qhasm: uint32323232 r6 >>= 25 | ||
1780 | # asm 1: psrld $25,<r6=int6464#3 | ||
1781 | # asm 2: psrld $25,<r6=%xmm2 | ||
1782 | psrld $25,%xmm2 | ||
1783 | |||
1784 | # qhasm: z6 ^= r6 | ||
1785 | # asm 1: pxor <r6=int6464#3,<z6=int6464#6 | ||
1786 | # asm 2: pxor <r6=%xmm2,<z6=%xmm5 | ||
1787 | pxor %xmm2,%xmm5 | ||
1788 | |||
1789 | # qhasm: y2 = z0 | ||
1790 | # asm 1: movdqa <z0=int6464#1,>y2=int6464#2 | ||
1791 | # asm 2: movdqa <z0=%xmm0,>y2=%xmm1 | ||
1792 | movdqa %xmm0,%xmm1 | ||
1793 | |||
1794 | # qhasm: uint32323232 y2 += z1 | ||
1795 | # asm 1: paddd <z1=int6464#8,<y2=int6464#2 | ||
1796 | # asm 2: paddd <z1=%xmm7,<y2=%xmm1 | ||
1797 | paddd %xmm7,%xmm1 | ||
1798 | |||
1799 | # qhasm: r2 = y2 | ||
1800 | # asm 1: movdqa <y2=int6464#2,>r2=int6464#3 | ||
1801 | # asm 2: movdqa <y2=%xmm1,>r2=%xmm2 | ||
1802 | movdqa %xmm1,%xmm2 | ||
1803 | |||
1804 | # qhasm: uint32323232 y2 <<= 9 | ||
1805 | # asm 1: pslld $9,<y2=int6464#2 | ||
1806 | # asm 2: pslld $9,<y2=%xmm1 | ||
1807 | pslld $9,%xmm1 | ||
1808 | |||
1809 | # qhasm: z2 ^= y2 | ||
1810 | # asm 1: pxor <y2=int6464#2,<z2=int6464#11 | ||
1811 | # asm 2: pxor <y2=%xmm1,<z2=%xmm10 | ||
1812 | pxor %xmm1,%xmm10 | ||
1813 | |||
1814 | # qhasm: uint32323232 r2 >>= 23 | ||
1815 | # asm 1: psrld $23,<r2=int6464#3 | ||
1816 | # asm 2: psrld $23,<r2=%xmm2 | ||
1817 | psrld $23,%xmm2 | ||
1818 | |||
1819 | # qhasm: z2 ^= r2 | ||
1820 | # asm 1: pxor <r2=int6464#3,<z2=int6464#11 | ||
1821 | # asm 2: pxor <r2=%xmm2,<z2=%xmm10 | ||
1822 | pxor %xmm2,%xmm10 | ||
1823 | |||
1824 | # qhasm: y7 = z5 | ||
1825 | # asm 1: movdqa <z5=int6464#13,>y7=int6464#2 | ||
1826 | # asm 2: movdqa <z5=%xmm12,>y7=%xmm1 | ||
1827 | movdqa %xmm12,%xmm1 | ||
1828 | |||
1829 | # qhasm: uint32323232 y7 += z6 | ||
1830 | # asm 1: paddd <z6=int6464#6,<y7=int6464#2 | ||
1831 | # asm 2: paddd <z6=%xmm5,<y7=%xmm1 | ||
1832 | paddd %xmm5,%xmm1 | ||
1833 | |||
1834 | # qhasm: r7 = y7 | ||
1835 | # asm 1: movdqa <y7=int6464#2,>r7=int6464#3 | ||
1836 | # asm 2: movdqa <y7=%xmm1,>r7=%xmm2 | ||
1837 | movdqa %xmm1,%xmm2 | ||
1838 | |||
1839 | # qhasm: uint32323232 y7 <<= 9 | ||
1840 | # asm 1: pslld $9,<y7=int6464#2 | ||
1841 | # asm 2: pslld $9,<y7=%xmm1 | ||
1842 | pslld $9,%xmm1 | ||
1843 | |||
1844 | # qhasm: z7 ^= y7 | ||
1845 | # asm 1: pxor <y7=int6464#2,<z7=int6464#9 | ||
1846 | # asm 2: pxor <y7=%xmm1,<z7=%xmm8 | ||
1847 | pxor %xmm1,%xmm8 | ||
1848 | |||
1849 | # qhasm: uint32323232 r7 >>= 23 | ||
1850 | # asm 1: psrld $23,<r7=int6464#3 | ||
1851 | # asm 2: psrld $23,<r7=%xmm2 | ||
1852 | psrld $23,%xmm2 | ||
1853 | |||
1854 | # qhasm: z7 ^= r7 | ||
1855 | # asm 1: pxor <r7=int6464#3,<z7=int6464#9 | ||
1856 | # asm 2: pxor <r7=%xmm2,<z7=%xmm8 | ||
1857 | pxor %xmm2,%xmm8 | ||
1858 | |||
1859 | # qhasm: y3 = z1 | ||
1860 | # asm 1: movdqa <z1=int6464#8,>y3=int6464#2 | ||
1861 | # asm 2: movdqa <z1=%xmm7,>y3=%xmm1 | ||
1862 | movdqa %xmm7,%xmm1 | ||
1863 | |||
1864 | # qhasm: uint32323232 y3 += z2 | ||
1865 | # asm 1: paddd <z2=int6464#11,<y3=int6464#2 | ||
1866 | # asm 2: paddd <z2=%xmm10,<y3=%xmm1 | ||
1867 | paddd %xmm10,%xmm1 | ||
1868 | |||
1869 | # qhasm: r3 = y3 | ||
1870 | # asm 1: movdqa <y3=int6464#2,>r3=int6464#3 | ||
1871 | # asm 2: movdqa <y3=%xmm1,>r3=%xmm2 | ||
1872 | movdqa %xmm1,%xmm2 | ||
1873 | |||
1874 | # qhasm: uint32323232 y3 <<= 13 | ||
1875 | # asm 1: pslld $13,<y3=int6464#2 | ||
1876 | # asm 2: pslld $13,<y3=%xmm1 | ||
1877 | pslld $13,%xmm1 | ||
1878 | |||
1879 | # qhasm: z3 ^= y3 | ||
1880 | # asm 1: pxor <y3=int6464#2,<z3=int6464#5 | ||
1881 | # asm 2: pxor <y3=%xmm1,<z3=%xmm4 | ||
1882 | pxor %xmm1,%xmm4 | ||
1883 | |||
1884 | # qhasm: uint32323232 r3 >>= 19 | ||
1885 | # asm 1: psrld $19,<r3=int6464#3 | ||
1886 | # asm 2: psrld $19,<r3=%xmm2 | ||
1887 | psrld $19,%xmm2 | ||
1888 | |||
1889 | # qhasm: z3 ^= r3 | ||
1890 | # asm 1: pxor <r3=int6464#3,<z3=int6464#5 | ||
1891 | # asm 2: pxor <r3=%xmm2,<z3=%xmm4 | ||
1892 | pxor %xmm2,%xmm4 | ||
1893 | |||
1894 | # qhasm: y4 = z6 | ||
1895 | # asm 1: movdqa <z6=int6464#6,>y4=int6464#2 | ||
1896 | # asm 2: movdqa <z6=%xmm5,>y4=%xmm1 | ||
1897 | movdqa %xmm5,%xmm1 | ||
1898 | |||
1899 | # qhasm: uint32323232 y4 += z7 | ||
1900 | # asm 1: paddd <z7=int6464#9,<y4=int6464#2 | ||
1901 | # asm 2: paddd <z7=%xmm8,<y4=%xmm1 | ||
1902 | paddd %xmm8,%xmm1 | ||
1903 | |||
1904 | # qhasm: r4 = y4 | ||
1905 | # asm 1: movdqa <y4=int6464#2,>r4=int6464#3 | ||
1906 | # asm 2: movdqa <y4=%xmm1,>r4=%xmm2 | ||
1907 | movdqa %xmm1,%xmm2 | ||
1908 | |||
1909 | # qhasm: uint32323232 y4 <<= 13 | ||
1910 | # asm 1: pslld $13,<y4=int6464#2 | ||
1911 | # asm 2: pslld $13,<y4=%xmm1 | ||
1912 | pslld $13,%xmm1 | ||
1913 | |||
1914 | # qhasm: z4 ^= y4 | ||
1915 | # asm 1: pxor <y4=int6464#2,<z4=int6464#15 | ||
1916 | # asm 2: pxor <y4=%xmm1,<z4=%xmm14 | ||
1917 | pxor %xmm1,%xmm14 | ||
1918 | |||
1919 | # qhasm: uint32323232 r4 >>= 19 | ||
1920 | # asm 1: psrld $19,<r4=int6464#3 | ||
1921 | # asm 2: psrld $19,<r4=%xmm2 | ||
1922 | psrld $19,%xmm2 | ||
1923 | |||
1924 | # qhasm: z4 ^= r4 | ||
1925 | # asm 1: pxor <r4=int6464#3,<z4=int6464#15 | ||
1926 | # asm 2: pxor <r4=%xmm2,<z4=%xmm14 | ||
1927 | pxor %xmm2,%xmm14 | ||
1928 | |||
1929 | # qhasm: y0 = z2 | ||
1930 | # asm 1: movdqa <z2=int6464#11,>y0=int6464#2 | ||
1931 | # asm 2: movdqa <z2=%xmm10,>y0=%xmm1 | ||
1932 | movdqa %xmm10,%xmm1 | ||
1933 | |||
1934 | # qhasm: uint32323232 y0 += z3 | ||
1935 | # asm 1: paddd <z3=int6464#5,<y0=int6464#2 | ||
1936 | # asm 2: paddd <z3=%xmm4,<y0=%xmm1 | ||
1937 | paddd %xmm4,%xmm1 | ||
1938 | |||
1939 | # qhasm: r0 = y0 | ||
1940 | # asm 1: movdqa <y0=int6464#2,>r0=int6464#3 | ||
1941 | # asm 2: movdqa <y0=%xmm1,>r0=%xmm2 | ||
1942 | movdqa %xmm1,%xmm2 | ||
1943 | |||
1944 | # qhasm: uint32323232 y0 <<= 18 | ||
1945 | # asm 1: pslld $18,<y0=int6464#2 | ||
1946 | # asm 2: pslld $18,<y0=%xmm1 | ||
1947 | pslld $18,%xmm1 | ||
1948 | |||
1949 | # qhasm: z0 ^= y0 | ||
1950 | # asm 1: pxor <y0=int6464#2,<z0=int6464#1 | ||
1951 | # asm 2: pxor <y0=%xmm1,<z0=%xmm0 | ||
1952 | pxor %xmm1,%xmm0 | ||
1953 | |||
1954 | # qhasm: uint32323232 r0 >>= 14 | ||
1955 | # asm 1: psrld $14,<r0=int6464#3 | ||
1956 | # asm 2: psrld $14,<r0=%xmm2 | ||
1957 | psrld $14,%xmm2 | ||
1958 | |||
1959 | # qhasm: z0 ^= r0 | ||
1960 | # asm 1: pxor <r0=int6464#3,<z0=int6464#1 | ||
1961 | # asm 2: pxor <r0=%xmm2,<z0=%xmm0 | ||
1962 | pxor %xmm2,%xmm0 | ||
1963 | |||
1964 | # qhasm: z10 = z10_stack | ||
1965 | # asm 1: movdqa <z10_stack=stack128#21,>z10=int6464#2 | ||
1966 | # asm 2: movdqa <z10_stack=320(%rsp),>z10=%xmm1 | ||
1967 | movdqa 320(%rsp),%xmm1 | ||
1968 | |||
1969 | # qhasm: z0_stack = z0 | ||
1970 | # asm 1: movdqa <z0=int6464#1,>z0_stack=stack128#21 | ||
1971 | # asm 2: movdqa <z0=%xmm0,>z0_stack=320(%rsp) | ||
1972 | movdqa %xmm0,320(%rsp) | ||
1973 | |||
1974 | # qhasm: y5 = z7 | ||
1975 | # asm 1: movdqa <z7=int6464#9,>y5=int6464#1 | ||
1976 | # asm 2: movdqa <z7=%xmm8,>y5=%xmm0 | ||
1977 | movdqa %xmm8,%xmm0 | ||
1978 | |||
1979 | # qhasm: uint32323232 y5 += z4 | ||
1980 | # asm 1: paddd <z4=int6464#15,<y5=int6464#1 | ||
1981 | # asm 2: paddd <z4=%xmm14,<y5=%xmm0 | ||
1982 | paddd %xmm14,%xmm0 | ||
1983 | |||
1984 | # qhasm: r5 = y5 | ||
1985 | # asm 1: movdqa <y5=int6464#1,>r5=int6464#3 | ||
1986 | # asm 2: movdqa <y5=%xmm0,>r5=%xmm2 | ||
1987 | movdqa %xmm0,%xmm2 | ||
1988 | |||
1989 | # qhasm: uint32323232 y5 <<= 18 | ||
1990 | # asm 1: pslld $18,<y5=int6464#1 | ||
1991 | # asm 2: pslld $18,<y5=%xmm0 | ||
1992 | pslld $18,%xmm0 | ||
1993 | |||
1994 | # qhasm: z5 ^= y5 | ||
1995 | # asm 1: pxor <y5=int6464#1,<z5=int6464#13 | ||
1996 | # asm 2: pxor <y5=%xmm0,<z5=%xmm12 | ||
1997 | pxor %xmm0,%xmm12 | ||
1998 | |||
1999 | # qhasm: uint32323232 r5 >>= 14 | ||
2000 | # asm 1: psrld $14,<r5=int6464#3 | ||
2001 | # asm 2: psrld $14,<r5=%xmm2 | ||
2002 | psrld $14,%xmm2 | ||
2003 | |||
2004 | # qhasm: z5 ^= r5 | ||
2005 | # asm 1: pxor <r5=int6464#3,<z5=int6464#13 | ||
2006 | # asm 2: pxor <r5=%xmm2,<z5=%xmm12 | ||
2007 | pxor %xmm2,%xmm12 | ||
2008 | |||
2009 | # qhasm: y11 = z9 | ||
2010 | # asm 1: movdqa <z9=int6464#12,>y11=int6464#1 | ||
2011 | # asm 2: movdqa <z9=%xmm11,>y11=%xmm0 | ||
2012 | movdqa %xmm11,%xmm0 | ||
2013 | |||
2014 | # qhasm: uint32323232 y11 += z10 | ||
2015 | # asm 1: paddd <z10=int6464#2,<y11=int6464#1 | ||
2016 | # asm 2: paddd <z10=%xmm1,<y11=%xmm0 | ||
2017 | paddd %xmm1,%xmm0 | ||
2018 | |||
2019 | # qhasm: r11 = y11 | ||
2020 | # asm 1: movdqa <y11=int6464#1,>r11=int6464#3 | ||
2021 | # asm 2: movdqa <y11=%xmm0,>r11=%xmm2 | ||
2022 | movdqa %xmm0,%xmm2 | ||
2023 | |||
2024 | # qhasm: uint32323232 y11 <<= 7 | ||
2025 | # asm 1: pslld $7,<y11=int6464#1 | ||
2026 | # asm 2: pslld $7,<y11=%xmm0 | ||
2027 | pslld $7,%xmm0 | ||
2028 | |||
2029 | # qhasm: z11 ^= y11 | ||
2030 | # asm 1: pxor <y11=int6464#1,<z11=int6464#7 | ||
2031 | # asm 2: pxor <y11=%xmm0,<z11=%xmm6 | ||
2032 | pxor %xmm0,%xmm6 | ||
2033 | |||
2034 | # qhasm: uint32323232 r11 >>= 25 | ||
2035 | # asm 1: psrld $25,<r11=int6464#3 | ||
2036 | # asm 2: psrld $25,<r11=%xmm2 | ||
2037 | psrld $25,%xmm2 | ||
2038 | |||
2039 | # qhasm: z11 ^= r11 | ||
2040 | # asm 1: pxor <r11=int6464#3,<z11=int6464#7 | ||
2041 | # asm 2: pxor <r11=%xmm2,<z11=%xmm6 | ||
2042 | pxor %xmm2,%xmm6 | ||
2043 | |||
2044 | # qhasm: z15 = z15_stack | ||
2045 | # asm 1: movdqa <z15_stack=stack128#22,>z15=int6464#3 | ||
2046 | # asm 2: movdqa <z15_stack=336(%rsp),>z15=%xmm2 | ||
2047 | movdqa 336(%rsp),%xmm2 | ||
2048 | |||
2049 | # qhasm: z5_stack = z5 | ||
2050 | # asm 1: movdqa <z5=int6464#13,>z5_stack=stack128#22 | ||
2051 | # asm 2: movdqa <z5=%xmm12,>z5_stack=336(%rsp) | ||
2052 | movdqa %xmm12,336(%rsp) | ||
2053 | |||
2054 | # qhasm: y12 = z14 | ||
2055 | # asm 1: movdqa <z14=int6464#4,>y12=int6464#1 | ||
2056 | # asm 2: movdqa <z14=%xmm3,>y12=%xmm0 | ||
2057 | movdqa %xmm3,%xmm0 | ||
2058 | |||
2059 | # qhasm: uint32323232 y12 += z15 | ||
2060 | # asm 1: paddd <z15=int6464#3,<y12=int6464#1 | ||
2061 | # asm 2: paddd <z15=%xmm2,<y12=%xmm0 | ||
2062 | paddd %xmm2,%xmm0 | ||
2063 | |||
2064 | # qhasm: r12 = y12 | ||
2065 | # asm 1: movdqa <y12=int6464#1,>r12=int6464#13 | ||
2066 | # asm 2: movdqa <y12=%xmm0,>r12=%xmm12 | ||
2067 | movdqa %xmm0,%xmm12 | ||
2068 | |||
2069 | # qhasm: uint32323232 y12 <<= 7 | ||
2070 | # asm 1: pslld $7,<y12=int6464#1 | ||
2071 | # asm 2: pslld $7,<y12=%xmm0 | ||
2072 | pslld $7,%xmm0 | ||
2073 | |||
2074 | # qhasm: z12 ^= y12 | ||
2075 | # asm 1: pxor <y12=int6464#1,<z12=int6464#14 | ||
2076 | # asm 2: pxor <y12=%xmm0,<z12=%xmm13 | ||
2077 | pxor %xmm0,%xmm13 | ||
2078 | |||
2079 | # qhasm: uint32323232 r12 >>= 25 | ||
2080 | # asm 1: psrld $25,<r12=int6464#13 | ||
2081 | # asm 2: psrld $25,<r12=%xmm12 | ||
2082 | psrld $25,%xmm12 | ||
2083 | |||
2084 | # qhasm: z12 ^= r12 | ||
2085 | # asm 1: pxor <r12=int6464#13,<z12=int6464#14 | ||
2086 | # asm 2: pxor <r12=%xmm12,<z12=%xmm13 | ||
2087 | pxor %xmm12,%xmm13 | ||
2088 | |||
2089 | # qhasm: y8 = z10 | ||
2090 | # asm 1: movdqa <z10=int6464#2,>y8=int6464#1 | ||
2091 | # asm 2: movdqa <z10=%xmm1,>y8=%xmm0 | ||
2092 | movdqa %xmm1,%xmm0 | ||
2093 | |||
2094 | # qhasm: uint32323232 y8 += z11 | ||
2095 | # asm 1: paddd <z11=int6464#7,<y8=int6464#1 | ||
2096 | # asm 2: paddd <z11=%xmm6,<y8=%xmm0 | ||
2097 | paddd %xmm6,%xmm0 | ||
2098 | |||
2099 | # qhasm: r8 = y8 | ||
2100 | # asm 1: movdqa <y8=int6464#1,>r8=int6464#13 | ||
2101 | # asm 2: movdqa <y8=%xmm0,>r8=%xmm12 | ||
2102 | movdqa %xmm0,%xmm12 | ||
2103 | |||
2104 | # qhasm: uint32323232 y8 <<= 9 | ||
2105 | # asm 1: pslld $9,<y8=int6464#1 | ||
2106 | # asm 2: pslld $9,<y8=%xmm0 | ||
2107 | pslld $9,%xmm0 | ||
2108 | |||
2109 | # qhasm: z8 ^= y8 | ||
2110 | # asm 1: pxor <y8=int6464#1,<z8=int6464#16 | ||
2111 | # asm 2: pxor <y8=%xmm0,<z8=%xmm15 | ||
2112 | pxor %xmm0,%xmm15 | ||
2113 | |||
2114 | # qhasm: uint32323232 r8 >>= 23 | ||
2115 | # asm 1: psrld $23,<r8=int6464#13 | ||
2116 | # asm 2: psrld $23,<r8=%xmm12 | ||
2117 | psrld $23,%xmm12 | ||
2118 | |||
2119 | # qhasm: z8 ^= r8 | ||
2120 | # asm 1: pxor <r8=int6464#13,<z8=int6464#16 | ||
2121 | # asm 2: pxor <r8=%xmm12,<z8=%xmm15 | ||
2122 | pxor %xmm12,%xmm15 | ||
2123 | |||
2124 | # qhasm: y13 = z15 | ||
2125 | # asm 1: movdqa <z15=int6464#3,>y13=int6464#1 | ||
2126 | # asm 2: movdqa <z15=%xmm2,>y13=%xmm0 | ||
2127 | movdqa %xmm2,%xmm0 | ||
2128 | |||
2129 | # qhasm: uint32323232 y13 += z12 | ||
2130 | # asm 1: paddd <z12=int6464#14,<y13=int6464#1 | ||
2131 | # asm 2: paddd <z12=%xmm13,<y13=%xmm0 | ||
2132 | paddd %xmm13,%xmm0 | ||
2133 | |||
2134 | # qhasm: r13 = y13 | ||
2135 | # asm 1: movdqa <y13=int6464#1,>r13=int6464#13 | ||
2136 | # asm 2: movdqa <y13=%xmm0,>r13=%xmm12 | ||
2137 | movdqa %xmm0,%xmm12 | ||
2138 | |||
2139 | # qhasm: uint32323232 y13 <<= 9 | ||
2140 | # asm 1: pslld $9,<y13=int6464#1 | ||
2141 | # asm 2: pslld $9,<y13=%xmm0 | ||
2142 | pslld $9,%xmm0 | ||
2143 | |||
2144 | # qhasm: z13 ^= y13 | ||
2145 | # asm 1: pxor <y13=int6464#1,<z13=int6464#10 | ||
2146 | # asm 2: pxor <y13=%xmm0,<z13=%xmm9 | ||
2147 | pxor %xmm0,%xmm9 | ||
2148 | |||
2149 | # qhasm: uint32323232 r13 >>= 23 | ||
2150 | # asm 1: psrld $23,<r13=int6464#13 | ||
2151 | # asm 2: psrld $23,<r13=%xmm12 | ||
2152 | psrld $23,%xmm12 | ||
2153 | |||
2154 | # qhasm: z13 ^= r13 | ||
2155 | # asm 1: pxor <r13=int6464#13,<z13=int6464#10 | ||
2156 | # asm 2: pxor <r13=%xmm12,<z13=%xmm9 | ||
2157 | pxor %xmm12,%xmm9 | ||
2158 | |||
2159 | # qhasm: y9 = z11 | ||
2160 | # asm 1: movdqa <z11=int6464#7,>y9=int6464#1 | ||
2161 | # asm 2: movdqa <z11=%xmm6,>y9=%xmm0 | ||
2162 | movdqa %xmm6,%xmm0 | ||
2163 | |||
2164 | # qhasm: uint32323232 y9 += z8 | ||
2165 | # asm 1: paddd <z8=int6464#16,<y9=int6464#1 | ||
2166 | # asm 2: paddd <z8=%xmm15,<y9=%xmm0 | ||
2167 | paddd %xmm15,%xmm0 | ||
2168 | |||
2169 | # qhasm: r9 = y9 | ||
2170 | # asm 1: movdqa <y9=int6464#1,>r9=int6464#13 | ||
2171 | # asm 2: movdqa <y9=%xmm0,>r9=%xmm12 | ||
2172 | movdqa %xmm0,%xmm12 | ||
2173 | |||
2174 | # qhasm: uint32323232 y9 <<= 13 | ||
2175 | # asm 1: pslld $13,<y9=int6464#1 | ||
2176 | # asm 2: pslld $13,<y9=%xmm0 | ||
2177 | pslld $13,%xmm0 | ||
2178 | |||
2179 | # qhasm: z9 ^= y9 | ||
2180 | # asm 1: pxor <y9=int6464#1,<z9=int6464#12 | ||
2181 | # asm 2: pxor <y9=%xmm0,<z9=%xmm11 | ||
2182 | pxor %xmm0,%xmm11 | ||
2183 | |||
2184 | # qhasm: uint32323232 r9 >>= 19 | ||
2185 | # asm 1: psrld $19,<r9=int6464#13 | ||
2186 | # asm 2: psrld $19,<r9=%xmm12 | ||
2187 | psrld $19,%xmm12 | ||
2188 | |||
2189 | # qhasm: z9 ^= r9 | ||
2190 | # asm 1: pxor <r9=int6464#13,<z9=int6464#12 | ||
2191 | # asm 2: pxor <r9=%xmm12,<z9=%xmm11 | ||
2192 | pxor %xmm12,%xmm11 | ||
2193 | |||
2194 | # qhasm: y14 = z12 | ||
2195 | # asm 1: movdqa <z12=int6464#14,>y14=int6464#1 | ||
2196 | # asm 2: movdqa <z12=%xmm13,>y14=%xmm0 | ||
2197 | movdqa %xmm13,%xmm0 | ||
2198 | |||
2199 | # qhasm: uint32323232 y14 += z13 | ||
2200 | # asm 1: paddd <z13=int6464#10,<y14=int6464#1 | ||
2201 | # asm 2: paddd <z13=%xmm9,<y14=%xmm0 | ||
2202 | paddd %xmm9,%xmm0 | ||
2203 | |||
2204 | # qhasm: r14 = y14 | ||
2205 | # asm 1: movdqa <y14=int6464#1,>r14=int6464#13 | ||
2206 | # asm 2: movdqa <y14=%xmm0,>r14=%xmm12 | ||
2207 | movdqa %xmm0,%xmm12 | ||
2208 | |||
2209 | # qhasm: uint32323232 y14 <<= 13 | ||
2210 | # asm 1: pslld $13,<y14=int6464#1 | ||
2211 | # asm 2: pslld $13,<y14=%xmm0 | ||
2212 | pslld $13,%xmm0 | ||
2213 | |||
2214 | # qhasm: z14 ^= y14 | ||
2215 | # asm 1: pxor <y14=int6464#1,<z14=int6464#4 | ||
2216 | # asm 2: pxor <y14=%xmm0,<z14=%xmm3 | ||
2217 | pxor %xmm0,%xmm3 | ||
2218 | |||
2219 | # qhasm: uint32323232 r14 >>= 19 | ||
2220 | # asm 1: psrld $19,<r14=int6464#13 | ||
2221 | # asm 2: psrld $19,<r14=%xmm12 | ||
2222 | psrld $19,%xmm12 | ||
2223 | |||
2224 | # qhasm: z14 ^= r14 | ||
2225 | # asm 1: pxor <r14=int6464#13,<z14=int6464#4 | ||
2226 | # asm 2: pxor <r14=%xmm12,<z14=%xmm3 | ||
2227 | pxor %xmm12,%xmm3 | ||
2228 | |||
2229 | # qhasm: y10 = z8 | ||
2230 | # asm 1: movdqa <z8=int6464#16,>y10=int6464#1 | ||
2231 | # asm 2: movdqa <z8=%xmm15,>y10=%xmm0 | ||
2232 | movdqa %xmm15,%xmm0 | ||
2233 | |||
2234 | # qhasm: uint32323232 y10 += z9 | ||
2235 | # asm 1: paddd <z9=int6464#12,<y10=int6464#1 | ||
2236 | # asm 2: paddd <z9=%xmm11,<y10=%xmm0 | ||
2237 | paddd %xmm11,%xmm0 | ||
2238 | |||
2239 | # qhasm: r10 = y10 | ||
2240 | # asm 1: movdqa <y10=int6464#1,>r10=int6464#13 | ||
2241 | # asm 2: movdqa <y10=%xmm0,>r10=%xmm12 | ||
2242 | movdqa %xmm0,%xmm12 | ||
2243 | |||
2244 | # qhasm: uint32323232 y10 <<= 18 | ||
2245 | # asm 1: pslld $18,<y10=int6464#1 | ||
2246 | # asm 2: pslld $18,<y10=%xmm0 | ||
2247 | pslld $18,%xmm0 | ||
2248 | |||
2249 | # qhasm: z10 ^= y10 | ||
2250 | # asm 1: pxor <y10=int6464#1,<z10=int6464#2 | ||
2251 | # asm 2: pxor <y10=%xmm0,<z10=%xmm1 | ||
2252 | pxor %xmm0,%xmm1 | ||
2253 | |||
2254 | # qhasm: uint32323232 r10 >>= 14 | ||
2255 | # asm 1: psrld $14,<r10=int6464#13 | ||
2256 | # asm 2: psrld $14,<r10=%xmm12 | ||
2257 | psrld $14,%xmm12 | ||
2258 | |||
2259 | # qhasm: z10 ^= r10 | ||
2260 | # asm 1: pxor <r10=int6464#13,<z10=int6464#2 | ||
2261 | # asm 2: pxor <r10=%xmm12,<z10=%xmm1 | ||
2262 | pxor %xmm12,%xmm1 | ||
2263 | |||
2264 | # qhasm: y15 = z13 | ||
2265 | # asm 1: movdqa <z13=int6464#10,>y15=int6464#1 | ||
2266 | # asm 2: movdqa <z13=%xmm9,>y15=%xmm0 | ||
2267 | movdqa %xmm9,%xmm0 | ||
2268 | |||
2269 | # qhasm: uint32323232 y15 += z14 | ||
2270 | # asm 1: paddd <z14=int6464#4,<y15=int6464#1 | ||
2271 | # asm 2: paddd <z14=%xmm3,<y15=%xmm0 | ||
2272 | paddd %xmm3,%xmm0 | ||
2273 | |||
2274 | # qhasm: r15 = y15 | ||
2275 | # asm 1: movdqa <y15=int6464#1,>r15=int6464#13 | ||
2276 | # asm 2: movdqa <y15=%xmm0,>r15=%xmm12 | ||
2277 | movdqa %xmm0,%xmm12 | ||
2278 | |||
2279 | # qhasm: uint32323232 y15 <<= 18 | ||
2280 | # asm 1: pslld $18,<y15=int6464#1 | ||
2281 | # asm 2: pslld $18,<y15=%xmm0 | ||
2282 | pslld $18,%xmm0 | ||
2283 | |||
2284 | # qhasm: z15 ^= y15 | ||
2285 | # asm 1: pxor <y15=int6464#1,<z15=int6464#3 | ||
2286 | # asm 2: pxor <y15=%xmm0,<z15=%xmm2 | ||
2287 | pxor %xmm0,%xmm2 | ||
2288 | |||
2289 | # qhasm: uint32323232 r15 >>= 14 | ||
2290 | # asm 1: psrld $14,<r15=int6464#13 | ||
2291 | # asm 2: psrld $14,<r15=%xmm12 | ||
2292 | psrld $14,%xmm12 | ||
2293 | |||
2294 | # qhasm: z15 ^= r15 | ||
2295 | # asm 1: pxor <r15=int6464#13,<z15=int6464#3 | ||
2296 | # asm 2: pxor <r15=%xmm12,<z15=%xmm2 | ||
2297 | pxor %xmm12,%xmm2 | ||
2298 | |||
2299 | # qhasm: z0 = z0_stack | ||
2300 | # asm 1: movdqa <z0_stack=stack128#21,>z0=int6464#13 | ||
2301 | # asm 2: movdqa <z0_stack=320(%rsp),>z0=%xmm12 | ||
2302 | movdqa 320(%rsp),%xmm12 | ||
2303 | |||
2304 | # qhasm: z5 = z5_stack | ||
2305 | # asm 1: movdqa <z5_stack=stack128#22,>z5=int6464#1 | ||
2306 | # asm 2: movdqa <z5_stack=336(%rsp),>z5=%xmm0 | ||
2307 | movdqa 336(%rsp),%xmm0 | ||
2308 | |||
2309 | # qhasm: unsigned>? i -= 2 | ||
2310 | # asm 1: sub $2,<i=int64#3 | ||
2311 | # asm 2: sub $2,<i=%rdx | ||
2312 | sub $2,%rdx | ||
2313 | # comment:fp stack unchanged by jump | ||
2314 | |||
2315 | # qhasm: goto mainloop1 if unsigned> | ||
2316 | ja ._mainloop1 | ||
2317 | |||
2318 | # qhasm: uint32323232 z0 += orig0 | ||
2319 | # asm 1: paddd <orig0=stack128#8,<z0=int6464#13 | ||
2320 | # asm 2: paddd <orig0=112(%rsp),<z0=%xmm12 | ||
2321 | paddd 112(%rsp),%xmm12 | ||
2322 | |||
2323 | # qhasm: uint32323232 z1 += orig1 | ||
2324 | # asm 1: paddd <orig1=stack128#12,<z1=int6464#8 | ||
2325 | # asm 2: paddd <orig1=176(%rsp),<z1=%xmm7 | ||
2326 | paddd 176(%rsp),%xmm7 | ||
2327 | |||
2328 | # qhasm: uint32323232 z2 += orig2 | ||
2329 | # asm 1: paddd <orig2=stack128#15,<z2=int6464#11 | ||
2330 | # asm 2: paddd <orig2=224(%rsp),<z2=%xmm10 | ||
2331 | paddd 224(%rsp),%xmm10 | ||
2332 | |||
2333 | # qhasm: uint32323232 z3 += orig3 | ||
2334 | # asm 1: paddd <orig3=stack128#18,<z3=int6464#5 | ||
2335 | # asm 2: paddd <orig3=272(%rsp),<z3=%xmm4 | ||
2336 | paddd 272(%rsp),%xmm4 | ||
2337 | |||
2338 | # qhasm: in0 = z0 | ||
2339 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2340 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2341 | movd %xmm12,%rdx | ||
2342 | |||
2343 | # qhasm: in1 = z1 | ||
2344 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2345 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2346 | movd %xmm7,%rcx | ||
2347 | |||
2348 | # qhasm: in2 = z2 | ||
2349 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2350 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2351 | movd %xmm10,%r8 | ||
2352 | |||
2353 | # qhasm: in3 = z3 | ||
2354 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2355 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2356 | movd %xmm4,%r9 | ||
2357 | |||
2358 | # qhasm: z0 <<<= 96 | ||
2359 | # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13 | ||
2360 | # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12 | ||
2361 | pshufd $0x39,%xmm12,%xmm12 | ||
2362 | |||
2363 | # qhasm: z1 <<<= 96 | ||
2364 | # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8 | ||
2365 | # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7 | ||
2366 | pshufd $0x39,%xmm7,%xmm7 | ||
2367 | |||
2368 | # qhasm: z2 <<<= 96 | ||
2369 | # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11 | ||
2370 | # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10 | ||
2371 | pshufd $0x39,%xmm10,%xmm10 | ||
2372 | |||
2373 | # qhasm: z3 <<<= 96 | ||
2374 | # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5 | ||
2375 | # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4 | ||
2376 | pshufd $0x39,%xmm4,%xmm4 | ||
2377 | |||
2378 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 0) | ||
2379 | # asm 1: xorl 0(<m=int64#2),<in0=int64#3d | ||
2380 | # asm 2: xorl 0(<m=%rsi),<in0=%edx | ||
2381 | xorl 0(%rsi),%edx | ||
2382 | |||
2383 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 4) | ||
2384 | # asm 1: xorl 4(<m=int64#2),<in1=int64#4d | ||
2385 | # asm 2: xorl 4(<m=%rsi),<in1=%ecx | ||
2386 | xorl 4(%rsi),%ecx | ||
2387 | |||
2388 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 8) | ||
2389 | # asm 1: xorl 8(<m=int64#2),<in2=int64#5d | ||
2390 | # asm 2: xorl 8(<m=%rsi),<in2=%r8d | ||
2391 | xorl 8(%rsi),%r8d | ||
2392 | |||
2393 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 12) | ||
2394 | # asm 1: xorl 12(<m=int64#2),<in3=int64#6d | ||
2395 | # asm 2: xorl 12(<m=%rsi),<in3=%r9d | ||
2396 | xorl 12(%rsi),%r9d | ||
2397 | |||
2398 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
2399 | # asm 1: movl <in0=int64#3d,0(<out=int64#1) | ||
2400 | # asm 2: movl <in0=%edx,0(<out=%rdi) | ||
2401 | movl %edx,0(%rdi) | ||
2402 | |||
2403 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
2404 | # asm 1: movl <in1=int64#4d,4(<out=int64#1) | ||
2405 | # asm 2: movl <in1=%ecx,4(<out=%rdi) | ||
2406 | movl %ecx,4(%rdi) | ||
2407 | |||
2408 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
2409 | # asm 1: movl <in2=int64#5d,8(<out=int64#1) | ||
2410 | # asm 2: movl <in2=%r8d,8(<out=%rdi) | ||
2411 | movl %r8d,8(%rdi) | ||
2412 | |||
2413 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
2414 | # asm 1: movl <in3=int64#6d,12(<out=int64#1) | ||
2415 | # asm 2: movl <in3=%r9d,12(<out=%rdi) | ||
2416 | movl %r9d,12(%rdi) | ||
2417 | |||
2418 | # qhasm: in0 = z0 | ||
2419 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2420 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2421 | movd %xmm12,%rdx | ||
2422 | |||
2423 | # qhasm: in1 = z1 | ||
2424 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2425 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2426 | movd %xmm7,%rcx | ||
2427 | |||
2428 | # qhasm: in2 = z2 | ||
2429 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2430 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2431 | movd %xmm10,%r8 | ||
2432 | |||
2433 | # qhasm: in3 = z3 | ||
2434 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2435 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2436 | movd %xmm4,%r9 | ||
2437 | |||
2438 | # qhasm: z0 <<<= 96 | ||
2439 | # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13 | ||
2440 | # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12 | ||
2441 | pshufd $0x39,%xmm12,%xmm12 | ||
2442 | |||
2443 | # qhasm: z1 <<<= 96 | ||
2444 | # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8 | ||
2445 | # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7 | ||
2446 | pshufd $0x39,%xmm7,%xmm7 | ||
2447 | |||
2448 | # qhasm: z2 <<<= 96 | ||
2449 | # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11 | ||
2450 | # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10 | ||
2451 | pshufd $0x39,%xmm10,%xmm10 | ||
2452 | |||
2453 | # qhasm: z3 <<<= 96 | ||
2454 | # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5 | ||
2455 | # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4 | ||
2456 | pshufd $0x39,%xmm4,%xmm4 | ||
2457 | |||
2458 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 64) | ||
2459 | # asm 1: xorl 64(<m=int64#2),<in0=int64#3d | ||
2460 | # asm 2: xorl 64(<m=%rsi),<in0=%edx | ||
2461 | xorl 64(%rsi),%edx | ||
2462 | |||
2463 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 68) | ||
2464 | # asm 1: xorl 68(<m=int64#2),<in1=int64#4d | ||
2465 | # asm 2: xorl 68(<m=%rsi),<in1=%ecx | ||
2466 | xorl 68(%rsi),%ecx | ||
2467 | |||
2468 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 72) | ||
2469 | # asm 1: xorl 72(<m=int64#2),<in2=int64#5d | ||
2470 | # asm 2: xorl 72(<m=%rsi),<in2=%r8d | ||
2471 | xorl 72(%rsi),%r8d | ||
2472 | |||
2473 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 76) | ||
2474 | # asm 1: xorl 76(<m=int64#2),<in3=int64#6d | ||
2475 | # asm 2: xorl 76(<m=%rsi),<in3=%r9d | ||
2476 | xorl 76(%rsi),%r9d | ||
2477 | |||
2478 | # qhasm: *(uint32 *) (out + 64) = in0 | ||
2479 | # asm 1: movl <in0=int64#3d,64(<out=int64#1) | ||
2480 | # asm 2: movl <in0=%edx,64(<out=%rdi) | ||
2481 | movl %edx,64(%rdi) | ||
2482 | |||
2483 | # qhasm: *(uint32 *) (out + 68) = in1 | ||
2484 | # asm 1: movl <in1=int64#4d,68(<out=int64#1) | ||
2485 | # asm 2: movl <in1=%ecx,68(<out=%rdi) | ||
2486 | movl %ecx,68(%rdi) | ||
2487 | |||
2488 | # qhasm: *(uint32 *) (out + 72) = in2 | ||
2489 | # asm 1: movl <in2=int64#5d,72(<out=int64#1) | ||
2490 | # asm 2: movl <in2=%r8d,72(<out=%rdi) | ||
2491 | movl %r8d,72(%rdi) | ||
2492 | |||
2493 | # qhasm: *(uint32 *) (out + 76) = in3 | ||
2494 | # asm 1: movl <in3=int64#6d,76(<out=int64#1) | ||
2495 | # asm 2: movl <in3=%r9d,76(<out=%rdi) | ||
2496 | movl %r9d,76(%rdi) | ||
2497 | |||
2498 | # qhasm: in0 = z0 | ||
2499 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2500 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2501 | movd %xmm12,%rdx | ||
2502 | |||
2503 | # qhasm: in1 = z1 | ||
2504 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2505 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2506 | movd %xmm7,%rcx | ||
2507 | |||
2508 | # qhasm: in2 = z2 | ||
2509 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2510 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2511 | movd %xmm10,%r8 | ||
2512 | |||
2513 | # qhasm: in3 = z3 | ||
2514 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2515 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2516 | movd %xmm4,%r9 | ||
2517 | |||
2518 | # qhasm: z0 <<<= 96 | ||
2519 | # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13 | ||
2520 | # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12 | ||
2521 | pshufd $0x39,%xmm12,%xmm12 | ||
2522 | |||
2523 | # qhasm: z1 <<<= 96 | ||
2524 | # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8 | ||
2525 | # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7 | ||
2526 | pshufd $0x39,%xmm7,%xmm7 | ||
2527 | |||
2528 | # qhasm: z2 <<<= 96 | ||
2529 | # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11 | ||
2530 | # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10 | ||
2531 | pshufd $0x39,%xmm10,%xmm10 | ||
2532 | |||
2533 | # qhasm: z3 <<<= 96 | ||
2534 | # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5 | ||
2535 | # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4 | ||
2536 | pshufd $0x39,%xmm4,%xmm4 | ||
2537 | |||
2538 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 128) | ||
2539 | # asm 1: xorl 128(<m=int64#2),<in0=int64#3d | ||
2540 | # asm 2: xorl 128(<m=%rsi),<in0=%edx | ||
2541 | xorl 128(%rsi),%edx | ||
2542 | |||
2543 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 132) | ||
2544 | # asm 1: xorl 132(<m=int64#2),<in1=int64#4d | ||
2545 | # asm 2: xorl 132(<m=%rsi),<in1=%ecx | ||
2546 | xorl 132(%rsi),%ecx | ||
2547 | |||
2548 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 136) | ||
2549 | # asm 1: xorl 136(<m=int64#2),<in2=int64#5d | ||
2550 | # asm 2: xorl 136(<m=%rsi),<in2=%r8d | ||
2551 | xorl 136(%rsi),%r8d | ||
2552 | |||
2553 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 140) | ||
2554 | # asm 1: xorl 140(<m=int64#2),<in3=int64#6d | ||
2555 | # asm 2: xorl 140(<m=%rsi),<in3=%r9d | ||
2556 | xorl 140(%rsi),%r9d | ||
2557 | |||
2558 | # qhasm: *(uint32 *) (out + 128) = in0 | ||
2559 | # asm 1: movl <in0=int64#3d,128(<out=int64#1) | ||
2560 | # asm 2: movl <in0=%edx,128(<out=%rdi) | ||
2561 | movl %edx,128(%rdi) | ||
2562 | |||
2563 | # qhasm: *(uint32 *) (out + 132) = in1 | ||
2564 | # asm 1: movl <in1=int64#4d,132(<out=int64#1) | ||
2565 | # asm 2: movl <in1=%ecx,132(<out=%rdi) | ||
2566 | movl %ecx,132(%rdi) | ||
2567 | |||
2568 | # qhasm: *(uint32 *) (out + 136) = in2 | ||
2569 | # asm 1: movl <in2=int64#5d,136(<out=int64#1) | ||
2570 | # asm 2: movl <in2=%r8d,136(<out=%rdi) | ||
2571 | movl %r8d,136(%rdi) | ||
2572 | |||
2573 | # qhasm: *(uint32 *) (out + 140) = in3 | ||
2574 | # asm 1: movl <in3=int64#6d,140(<out=int64#1) | ||
2575 | # asm 2: movl <in3=%r9d,140(<out=%rdi) | ||
2576 | movl %r9d,140(%rdi) | ||
2577 | |||
2578 | # qhasm: in0 = z0 | ||
2579 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2580 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2581 | movd %xmm12,%rdx | ||
2582 | |||
2583 | # qhasm: in1 = z1 | ||
2584 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2585 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2586 | movd %xmm7,%rcx | ||
2587 | |||
2588 | # qhasm: in2 = z2 | ||
2589 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2590 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2591 | movd %xmm10,%r8 | ||
2592 | |||
2593 | # qhasm: in3 = z3 | ||
2594 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2595 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2596 | movd %xmm4,%r9 | ||
2597 | |||
2598 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 192) | ||
2599 | # asm 1: xorl 192(<m=int64#2),<in0=int64#3d | ||
2600 | # asm 2: xorl 192(<m=%rsi),<in0=%edx | ||
2601 | xorl 192(%rsi),%edx | ||
2602 | |||
2603 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 196) | ||
2604 | # asm 1: xorl 196(<m=int64#2),<in1=int64#4d | ||
2605 | # asm 2: xorl 196(<m=%rsi),<in1=%ecx | ||
2606 | xorl 196(%rsi),%ecx | ||
2607 | |||
2608 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 200) | ||
2609 | # asm 1: xorl 200(<m=int64#2),<in2=int64#5d | ||
2610 | # asm 2: xorl 200(<m=%rsi),<in2=%r8d | ||
2611 | xorl 200(%rsi),%r8d | ||
2612 | |||
2613 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 204) | ||
2614 | # asm 1: xorl 204(<m=int64#2),<in3=int64#6d | ||
2615 | # asm 2: xorl 204(<m=%rsi),<in3=%r9d | ||
2616 | xorl 204(%rsi),%r9d | ||
2617 | |||
2618 | # qhasm: *(uint32 *) (out + 192) = in0 | ||
2619 | # asm 1: movl <in0=int64#3d,192(<out=int64#1) | ||
2620 | # asm 2: movl <in0=%edx,192(<out=%rdi) | ||
2621 | movl %edx,192(%rdi) | ||
2622 | |||
2623 | # qhasm: *(uint32 *) (out + 196) = in1 | ||
2624 | # asm 1: movl <in1=int64#4d,196(<out=int64#1) | ||
2625 | # asm 2: movl <in1=%ecx,196(<out=%rdi) | ||
2626 | movl %ecx,196(%rdi) | ||
2627 | |||
2628 | # qhasm: *(uint32 *) (out + 200) = in2 | ||
2629 | # asm 1: movl <in2=int64#5d,200(<out=int64#1) | ||
2630 | # asm 2: movl <in2=%r8d,200(<out=%rdi) | ||
2631 | movl %r8d,200(%rdi) | ||
2632 | |||
2633 | # qhasm: *(uint32 *) (out + 204) = in3 | ||
2634 | # asm 1: movl <in3=int64#6d,204(<out=int64#1) | ||
2635 | # asm 2: movl <in3=%r9d,204(<out=%rdi) | ||
2636 | movl %r9d,204(%rdi) | ||
2637 | |||
2638 | # qhasm: uint32323232 z4 += orig4 | ||
2639 | # asm 1: paddd <orig4=stack128#16,<z4=int6464#15 | ||
2640 | # asm 2: paddd <orig4=240(%rsp),<z4=%xmm14 | ||
2641 | paddd 240(%rsp),%xmm14 | ||
2642 | |||
2643 | # qhasm: uint32323232 z5 += orig5 | ||
2644 | # asm 1: paddd <orig5=stack128#5,<z5=int6464#1 | ||
2645 | # asm 2: paddd <orig5=64(%rsp),<z5=%xmm0 | ||
2646 | paddd 64(%rsp),%xmm0 | ||
2647 | |||
2648 | # qhasm: uint32323232 z6 += orig6 | ||
2649 | # asm 1: paddd <orig6=stack128#9,<z6=int6464#6 | ||
2650 | # asm 2: paddd <orig6=128(%rsp),<z6=%xmm5 | ||
2651 | paddd 128(%rsp),%xmm5 | ||
2652 | |||
2653 | # qhasm: uint32323232 z7 += orig7 | ||
2654 | # asm 1: paddd <orig7=stack128#13,<z7=int6464#9 | ||
2655 | # asm 2: paddd <orig7=192(%rsp),<z7=%xmm8 | ||
2656 | paddd 192(%rsp),%xmm8 | ||
2657 | |||
2658 | # qhasm: in4 = z4 | ||
2659 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2660 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2661 | movd %xmm14,%rdx | ||
2662 | |||
2663 | # qhasm: in5 = z5 | ||
2664 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2665 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2666 | movd %xmm0,%rcx | ||
2667 | |||
2668 | # qhasm: in6 = z6 | ||
2669 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2670 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2671 | movd %xmm5,%r8 | ||
2672 | |||
2673 | # qhasm: in7 = z7 | ||
2674 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2675 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2676 | movd %xmm8,%r9 | ||
2677 | |||
2678 | # qhasm: z4 <<<= 96 | ||
2679 | # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15 | ||
2680 | # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14 | ||
2681 | pshufd $0x39,%xmm14,%xmm14 | ||
2682 | |||
2683 | # qhasm: z5 <<<= 96 | ||
2684 | # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1 | ||
2685 | # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0 | ||
2686 | pshufd $0x39,%xmm0,%xmm0 | ||
2687 | |||
2688 | # qhasm: z6 <<<= 96 | ||
2689 | # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6 | ||
2690 | # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5 | ||
2691 | pshufd $0x39,%xmm5,%xmm5 | ||
2692 | |||
2693 | # qhasm: z7 <<<= 96 | ||
2694 | # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9 | ||
2695 | # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8 | ||
2696 | pshufd $0x39,%xmm8,%xmm8 | ||
2697 | |||
2698 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 16) | ||
2699 | # asm 1: xorl 16(<m=int64#2),<in4=int64#3d | ||
2700 | # asm 2: xorl 16(<m=%rsi),<in4=%edx | ||
2701 | xorl 16(%rsi),%edx | ||
2702 | |||
2703 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 20) | ||
2704 | # asm 1: xorl 20(<m=int64#2),<in5=int64#4d | ||
2705 | # asm 2: xorl 20(<m=%rsi),<in5=%ecx | ||
2706 | xorl 20(%rsi),%ecx | ||
2707 | |||
2708 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 24) | ||
2709 | # asm 1: xorl 24(<m=int64#2),<in6=int64#5d | ||
2710 | # asm 2: xorl 24(<m=%rsi),<in6=%r8d | ||
2711 | xorl 24(%rsi),%r8d | ||
2712 | |||
2713 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 28) | ||
2714 | # asm 1: xorl 28(<m=int64#2),<in7=int64#6d | ||
2715 | # asm 2: xorl 28(<m=%rsi),<in7=%r9d | ||
2716 | xorl 28(%rsi),%r9d | ||
2717 | |||
2718 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
2719 | # asm 1: movl <in4=int64#3d,16(<out=int64#1) | ||
2720 | # asm 2: movl <in4=%edx,16(<out=%rdi) | ||
2721 | movl %edx,16(%rdi) | ||
2722 | |||
2723 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
2724 | # asm 1: movl <in5=int64#4d,20(<out=int64#1) | ||
2725 | # asm 2: movl <in5=%ecx,20(<out=%rdi) | ||
2726 | movl %ecx,20(%rdi) | ||
2727 | |||
2728 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
2729 | # asm 1: movl <in6=int64#5d,24(<out=int64#1) | ||
2730 | # asm 2: movl <in6=%r8d,24(<out=%rdi) | ||
2731 | movl %r8d,24(%rdi) | ||
2732 | |||
2733 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
2734 | # asm 1: movl <in7=int64#6d,28(<out=int64#1) | ||
2735 | # asm 2: movl <in7=%r9d,28(<out=%rdi) | ||
2736 | movl %r9d,28(%rdi) | ||
2737 | |||
2738 | # qhasm: in4 = z4 | ||
2739 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2740 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2741 | movd %xmm14,%rdx | ||
2742 | |||
2743 | # qhasm: in5 = z5 | ||
2744 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2745 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2746 | movd %xmm0,%rcx | ||
2747 | |||
2748 | # qhasm: in6 = z6 | ||
2749 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2750 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2751 | movd %xmm5,%r8 | ||
2752 | |||
2753 | # qhasm: in7 = z7 | ||
2754 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2755 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2756 | movd %xmm8,%r9 | ||
2757 | |||
2758 | # qhasm: z4 <<<= 96 | ||
2759 | # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15 | ||
2760 | # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14 | ||
2761 | pshufd $0x39,%xmm14,%xmm14 | ||
2762 | |||
2763 | # qhasm: z5 <<<= 96 | ||
2764 | # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1 | ||
2765 | # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0 | ||
2766 | pshufd $0x39,%xmm0,%xmm0 | ||
2767 | |||
2768 | # qhasm: z6 <<<= 96 | ||
2769 | # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6 | ||
2770 | # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5 | ||
2771 | pshufd $0x39,%xmm5,%xmm5 | ||
2772 | |||
2773 | # qhasm: z7 <<<= 96 | ||
2774 | # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9 | ||
2775 | # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8 | ||
2776 | pshufd $0x39,%xmm8,%xmm8 | ||
2777 | |||
2778 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 80) | ||
2779 | # asm 1: xorl 80(<m=int64#2),<in4=int64#3d | ||
2780 | # asm 2: xorl 80(<m=%rsi),<in4=%edx | ||
2781 | xorl 80(%rsi),%edx | ||
2782 | |||
2783 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 84) | ||
2784 | # asm 1: xorl 84(<m=int64#2),<in5=int64#4d | ||
2785 | # asm 2: xorl 84(<m=%rsi),<in5=%ecx | ||
2786 | xorl 84(%rsi),%ecx | ||
2787 | |||
2788 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 88) | ||
2789 | # asm 1: xorl 88(<m=int64#2),<in6=int64#5d | ||
2790 | # asm 2: xorl 88(<m=%rsi),<in6=%r8d | ||
2791 | xorl 88(%rsi),%r8d | ||
2792 | |||
2793 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 92) | ||
2794 | # asm 1: xorl 92(<m=int64#2),<in7=int64#6d | ||
2795 | # asm 2: xorl 92(<m=%rsi),<in7=%r9d | ||
2796 | xorl 92(%rsi),%r9d | ||
2797 | |||
2798 | # qhasm: *(uint32 *) (out + 80) = in4 | ||
2799 | # asm 1: movl <in4=int64#3d,80(<out=int64#1) | ||
2800 | # asm 2: movl <in4=%edx,80(<out=%rdi) | ||
2801 | movl %edx,80(%rdi) | ||
2802 | |||
2803 | # qhasm: *(uint32 *) (out + 84) = in5 | ||
2804 | # asm 1: movl <in5=int64#4d,84(<out=int64#1) | ||
2805 | # asm 2: movl <in5=%ecx,84(<out=%rdi) | ||
2806 | movl %ecx,84(%rdi) | ||
2807 | |||
2808 | # qhasm: *(uint32 *) (out + 88) = in6 | ||
2809 | # asm 1: movl <in6=int64#5d,88(<out=int64#1) | ||
2810 | # asm 2: movl <in6=%r8d,88(<out=%rdi) | ||
2811 | movl %r8d,88(%rdi) | ||
2812 | |||
2813 | # qhasm: *(uint32 *) (out + 92) = in7 | ||
2814 | # asm 1: movl <in7=int64#6d,92(<out=int64#1) | ||
2815 | # asm 2: movl <in7=%r9d,92(<out=%rdi) | ||
2816 | movl %r9d,92(%rdi) | ||
2817 | |||
2818 | # qhasm: in4 = z4 | ||
2819 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2820 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2821 | movd %xmm14,%rdx | ||
2822 | |||
2823 | # qhasm: in5 = z5 | ||
2824 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2825 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2826 | movd %xmm0,%rcx | ||
2827 | |||
2828 | # qhasm: in6 = z6 | ||
2829 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2830 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2831 | movd %xmm5,%r8 | ||
2832 | |||
2833 | # qhasm: in7 = z7 | ||
2834 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2835 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2836 | movd %xmm8,%r9 | ||
2837 | |||
2838 | # qhasm: z4 <<<= 96 | ||
2839 | # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15 | ||
2840 | # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14 | ||
2841 | pshufd $0x39,%xmm14,%xmm14 | ||
2842 | |||
2843 | # qhasm: z5 <<<= 96 | ||
2844 | # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1 | ||
2845 | # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0 | ||
2846 | pshufd $0x39,%xmm0,%xmm0 | ||
2847 | |||
2848 | # qhasm: z6 <<<= 96 | ||
2849 | # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6 | ||
2850 | # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5 | ||
2851 | pshufd $0x39,%xmm5,%xmm5 | ||
2852 | |||
2853 | # qhasm: z7 <<<= 96 | ||
2854 | # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9 | ||
2855 | # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8 | ||
2856 | pshufd $0x39,%xmm8,%xmm8 | ||
2857 | |||
2858 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 144) | ||
2859 | # asm 1: xorl 144(<m=int64#2),<in4=int64#3d | ||
2860 | # asm 2: xorl 144(<m=%rsi),<in4=%edx | ||
2861 | xorl 144(%rsi),%edx | ||
2862 | |||
2863 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 148) | ||
2864 | # asm 1: xorl 148(<m=int64#2),<in5=int64#4d | ||
2865 | # asm 2: xorl 148(<m=%rsi),<in5=%ecx | ||
2866 | xorl 148(%rsi),%ecx | ||
2867 | |||
2868 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 152) | ||
2869 | # asm 1: xorl 152(<m=int64#2),<in6=int64#5d | ||
2870 | # asm 2: xorl 152(<m=%rsi),<in6=%r8d | ||
2871 | xorl 152(%rsi),%r8d | ||
2872 | |||
2873 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 156) | ||
2874 | # asm 1: xorl 156(<m=int64#2),<in7=int64#6d | ||
2875 | # asm 2: xorl 156(<m=%rsi),<in7=%r9d | ||
2876 | xorl 156(%rsi),%r9d | ||
2877 | |||
2878 | # qhasm: *(uint32 *) (out + 144) = in4 | ||
2879 | # asm 1: movl <in4=int64#3d,144(<out=int64#1) | ||
2880 | # asm 2: movl <in4=%edx,144(<out=%rdi) | ||
2881 | movl %edx,144(%rdi) | ||
2882 | |||
2883 | # qhasm: *(uint32 *) (out + 148) = in5 | ||
2884 | # asm 1: movl <in5=int64#4d,148(<out=int64#1) | ||
2885 | # asm 2: movl <in5=%ecx,148(<out=%rdi) | ||
2886 | movl %ecx,148(%rdi) | ||
2887 | |||
2888 | # qhasm: *(uint32 *) (out + 152) = in6 | ||
2889 | # asm 1: movl <in6=int64#5d,152(<out=int64#1) | ||
2890 | # asm 2: movl <in6=%r8d,152(<out=%rdi) | ||
2891 | movl %r8d,152(%rdi) | ||
2892 | |||
2893 | # qhasm: *(uint32 *) (out + 156) = in7 | ||
2894 | # asm 1: movl <in7=int64#6d,156(<out=int64#1) | ||
2895 | # asm 2: movl <in7=%r9d,156(<out=%rdi) | ||
2896 | movl %r9d,156(%rdi) | ||
2897 | |||
2898 | # qhasm: in4 = z4 | ||
2899 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2900 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2901 | movd %xmm14,%rdx | ||
2902 | |||
2903 | # qhasm: in5 = z5 | ||
2904 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2905 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2906 | movd %xmm0,%rcx | ||
2907 | |||
2908 | # qhasm: in6 = z6 | ||
2909 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2910 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2911 | movd %xmm5,%r8 | ||
2912 | |||
2913 | # qhasm: in7 = z7 | ||
2914 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2915 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2916 | movd %xmm8,%r9 | ||
2917 | |||
2918 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 208) | ||
2919 | # asm 1: xorl 208(<m=int64#2),<in4=int64#3d | ||
2920 | # asm 2: xorl 208(<m=%rsi),<in4=%edx | ||
2921 | xorl 208(%rsi),%edx | ||
2922 | |||
2923 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 212) | ||
2924 | # asm 1: xorl 212(<m=int64#2),<in5=int64#4d | ||
2925 | # asm 2: xorl 212(<m=%rsi),<in5=%ecx | ||
2926 | xorl 212(%rsi),%ecx | ||
2927 | |||
2928 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 216) | ||
2929 | # asm 1: xorl 216(<m=int64#2),<in6=int64#5d | ||
2930 | # asm 2: xorl 216(<m=%rsi),<in6=%r8d | ||
2931 | xorl 216(%rsi),%r8d | ||
2932 | |||
2933 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 220) | ||
2934 | # asm 1: xorl 220(<m=int64#2),<in7=int64#6d | ||
2935 | # asm 2: xorl 220(<m=%rsi),<in7=%r9d | ||
2936 | xorl 220(%rsi),%r9d | ||
2937 | |||
2938 | # qhasm: *(uint32 *) (out + 208) = in4 | ||
2939 | # asm 1: movl <in4=int64#3d,208(<out=int64#1) | ||
2940 | # asm 2: movl <in4=%edx,208(<out=%rdi) | ||
2941 | movl %edx,208(%rdi) | ||
2942 | |||
2943 | # qhasm: *(uint32 *) (out + 212) = in5 | ||
2944 | # asm 1: movl <in5=int64#4d,212(<out=int64#1) | ||
2945 | # asm 2: movl <in5=%ecx,212(<out=%rdi) | ||
2946 | movl %ecx,212(%rdi) | ||
2947 | |||
2948 | # qhasm: *(uint32 *) (out + 216) = in6 | ||
2949 | # asm 1: movl <in6=int64#5d,216(<out=int64#1) | ||
2950 | # asm 2: movl <in6=%r8d,216(<out=%rdi) | ||
2951 | movl %r8d,216(%rdi) | ||
2952 | |||
2953 | # qhasm: *(uint32 *) (out + 220) = in7 | ||
2954 | # asm 1: movl <in7=int64#6d,220(<out=int64#1) | ||
2955 | # asm 2: movl <in7=%r9d,220(<out=%rdi) | ||
2956 | movl %r9d,220(%rdi) | ||
2957 | |||
2958 | # qhasm: uint32323232 z8 += orig8 | ||
2959 | # asm 1: paddd <orig8=stack128#19,<z8=int6464#16 | ||
2960 | # asm 2: paddd <orig8=288(%rsp),<z8=%xmm15 | ||
2961 | paddd 288(%rsp),%xmm15 | ||
2962 | |||
2963 | # qhasm: uint32323232 z9 += orig9 | ||
2964 | # asm 1: paddd <orig9=stack128#20,<z9=int6464#12 | ||
2965 | # asm 2: paddd <orig9=304(%rsp),<z9=%xmm11 | ||
2966 | paddd 304(%rsp),%xmm11 | ||
2967 | |||
2968 | # qhasm: uint32323232 z10 += orig10 | ||
2969 | # asm 1: paddd <orig10=stack128#6,<z10=int6464#2 | ||
2970 | # asm 2: paddd <orig10=80(%rsp),<z10=%xmm1 | ||
2971 | paddd 80(%rsp),%xmm1 | ||
2972 | |||
2973 | # qhasm: uint32323232 z11 += orig11 | ||
2974 | # asm 1: paddd <orig11=stack128#10,<z11=int6464#7 | ||
2975 | # asm 2: paddd <orig11=144(%rsp),<z11=%xmm6 | ||
2976 | paddd 144(%rsp),%xmm6 | ||
2977 | |||
2978 | # qhasm: in8 = z8 | ||
2979 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
2980 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
2981 | movd %xmm15,%rdx | ||
2982 | |||
2983 | # qhasm: in9 = z9 | ||
2984 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
2985 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
2986 | movd %xmm11,%rcx | ||
2987 | |||
2988 | # qhasm: in10 = z10 | ||
2989 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
2990 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
2991 | movd %xmm1,%r8 | ||
2992 | |||
2993 | # qhasm: in11 = z11 | ||
2994 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
2995 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
2996 | movd %xmm6,%r9 | ||
2997 | |||
2998 | # qhasm: z8 <<<= 96 | ||
2999 | # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16 | ||
3000 | # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15 | ||
3001 | pshufd $0x39,%xmm15,%xmm15 | ||
3002 | |||
3003 | # qhasm: z9 <<<= 96 | ||
3004 | # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12 | ||
3005 | # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11 | ||
3006 | pshufd $0x39,%xmm11,%xmm11 | ||
3007 | |||
3008 | # qhasm: z10 <<<= 96 | ||
3009 | # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2 | ||
3010 | # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1 | ||
3011 | pshufd $0x39,%xmm1,%xmm1 | ||
3012 | |||
3013 | # qhasm: z11 <<<= 96 | ||
3014 | # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7 | ||
3015 | # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6 | ||
3016 | pshufd $0x39,%xmm6,%xmm6 | ||
3017 | |||
3018 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 32) | ||
3019 | # asm 1: xorl 32(<m=int64#2),<in8=int64#3d | ||
3020 | # asm 2: xorl 32(<m=%rsi),<in8=%edx | ||
3021 | xorl 32(%rsi),%edx | ||
3022 | |||
3023 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 36) | ||
3024 | # asm 1: xorl 36(<m=int64#2),<in9=int64#4d | ||
3025 | # asm 2: xorl 36(<m=%rsi),<in9=%ecx | ||
3026 | xorl 36(%rsi),%ecx | ||
3027 | |||
3028 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 40) | ||
3029 | # asm 1: xorl 40(<m=int64#2),<in10=int64#5d | ||
3030 | # asm 2: xorl 40(<m=%rsi),<in10=%r8d | ||
3031 | xorl 40(%rsi),%r8d | ||
3032 | |||
3033 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 44) | ||
3034 | # asm 1: xorl 44(<m=int64#2),<in11=int64#6d | ||
3035 | # asm 2: xorl 44(<m=%rsi),<in11=%r9d | ||
3036 | xorl 44(%rsi),%r9d | ||
3037 | |||
3038 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
3039 | # asm 1: movl <in8=int64#3d,32(<out=int64#1) | ||
3040 | # asm 2: movl <in8=%edx,32(<out=%rdi) | ||
3041 | movl %edx,32(%rdi) | ||
3042 | |||
3043 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
3044 | # asm 1: movl <in9=int64#4d,36(<out=int64#1) | ||
3045 | # asm 2: movl <in9=%ecx,36(<out=%rdi) | ||
3046 | movl %ecx,36(%rdi) | ||
3047 | |||
3048 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
3049 | # asm 1: movl <in10=int64#5d,40(<out=int64#1) | ||
3050 | # asm 2: movl <in10=%r8d,40(<out=%rdi) | ||
3051 | movl %r8d,40(%rdi) | ||
3052 | |||
3053 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
3054 | # asm 1: movl <in11=int64#6d,44(<out=int64#1) | ||
3055 | # asm 2: movl <in11=%r9d,44(<out=%rdi) | ||
3056 | movl %r9d,44(%rdi) | ||
3057 | |||
3058 | # qhasm: in8 = z8 | ||
3059 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
3060 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
3061 | movd %xmm15,%rdx | ||
3062 | |||
3063 | # qhasm: in9 = z9 | ||
3064 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
3065 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
3066 | movd %xmm11,%rcx | ||
3067 | |||
3068 | # qhasm: in10 = z10 | ||
3069 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
3070 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
3071 | movd %xmm1,%r8 | ||
3072 | |||
3073 | # qhasm: in11 = z11 | ||
3074 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
3075 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
3076 | movd %xmm6,%r9 | ||
3077 | |||
3078 | # qhasm: z8 <<<= 96 | ||
3079 | # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16 | ||
3080 | # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15 | ||
3081 | pshufd $0x39,%xmm15,%xmm15 | ||
3082 | |||
3083 | # qhasm: z9 <<<= 96 | ||
3084 | # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12 | ||
3085 | # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11 | ||
3086 | pshufd $0x39,%xmm11,%xmm11 | ||
3087 | |||
3088 | # qhasm: z10 <<<= 96 | ||
3089 | # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2 | ||
3090 | # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1 | ||
3091 | pshufd $0x39,%xmm1,%xmm1 | ||
3092 | |||
3093 | # qhasm: z11 <<<= 96 | ||
3094 | # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7 | ||
3095 | # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6 | ||
3096 | pshufd $0x39,%xmm6,%xmm6 | ||
3097 | |||
3098 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 96) | ||
3099 | # asm 1: xorl 96(<m=int64#2),<in8=int64#3d | ||
3100 | # asm 2: xorl 96(<m=%rsi),<in8=%edx | ||
3101 | xorl 96(%rsi),%edx | ||
3102 | |||
3103 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 100) | ||
3104 | # asm 1: xorl 100(<m=int64#2),<in9=int64#4d | ||
3105 | # asm 2: xorl 100(<m=%rsi),<in9=%ecx | ||
3106 | xorl 100(%rsi),%ecx | ||
3107 | |||
3108 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 104) | ||
3109 | # asm 1: xorl 104(<m=int64#2),<in10=int64#5d | ||
3110 | # asm 2: xorl 104(<m=%rsi),<in10=%r8d | ||
3111 | xorl 104(%rsi),%r8d | ||
3112 | |||
3113 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 108) | ||
3114 | # asm 1: xorl 108(<m=int64#2),<in11=int64#6d | ||
3115 | # asm 2: xorl 108(<m=%rsi),<in11=%r9d | ||
3116 | xorl 108(%rsi),%r9d | ||
3117 | |||
3118 | # qhasm: *(uint32 *) (out + 96) = in8 | ||
3119 | # asm 1: movl <in8=int64#3d,96(<out=int64#1) | ||
3120 | # asm 2: movl <in8=%edx,96(<out=%rdi) | ||
3121 | movl %edx,96(%rdi) | ||
3122 | |||
3123 | # qhasm: *(uint32 *) (out + 100) = in9 | ||
3124 | # asm 1: movl <in9=int64#4d,100(<out=int64#1) | ||
3125 | # asm 2: movl <in9=%ecx,100(<out=%rdi) | ||
3126 | movl %ecx,100(%rdi) | ||
3127 | |||
3128 | # qhasm: *(uint32 *) (out + 104) = in10 | ||
3129 | # asm 1: movl <in10=int64#5d,104(<out=int64#1) | ||
3130 | # asm 2: movl <in10=%r8d,104(<out=%rdi) | ||
3131 | movl %r8d,104(%rdi) | ||
3132 | |||
3133 | # qhasm: *(uint32 *) (out + 108) = in11 | ||
3134 | # asm 1: movl <in11=int64#6d,108(<out=int64#1) | ||
3135 | # asm 2: movl <in11=%r9d,108(<out=%rdi) | ||
3136 | movl %r9d,108(%rdi) | ||
3137 | |||
3138 | # qhasm: in8 = z8 | ||
3139 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
3140 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
3141 | movd %xmm15,%rdx | ||
3142 | |||
3143 | # qhasm: in9 = z9 | ||
3144 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
3145 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
3146 | movd %xmm11,%rcx | ||
3147 | |||
3148 | # qhasm: in10 = z10 | ||
3149 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
3150 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
3151 | movd %xmm1,%r8 | ||
3152 | |||
3153 | # qhasm: in11 = z11 | ||
3154 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
3155 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
3156 | movd %xmm6,%r9 | ||
3157 | |||
3158 | # qhasm: z8 <<<= 96 | ||
3159 | # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16 | ||
3160 | # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15 | ||
3161 | pshufd $0x39,%xmm15,%xmm15 | ||
3162 | |||
3163 | # qhasm: z9 <<<= 96 | ||
3164 | # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12 | ||
3165 | # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11 | ||
3166 | pshufd $0x39,%xmm11,%xmm11 | ||
3167 | |||
3168 | # qhasm: z10 <<<= 96 | ||
3169 | # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2 | ||
3170 | # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1 | ||
3171 | pshufd $0x39,%xmm1,%xmm1 | ||
3172 | |||
3173 | # qhasm: z11 <<<= 96 | ||
3174 | # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7 | ||
3175 | # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6 | ||
3176 | pshufd $0x39,%xmm6,%xmm6 | ||
3177 | |||
3178 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 160) | ||
3179 | # asm 1: xorl 160(<m=int64#2),<in8=int64#3d | ||
3180 | # asm 2: xorl 160(<m=%rsi),<in8=%edx | ||
3181 | xorl 160(%rsi),%edx | ||
3182 | |||
3183 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 164) | ||
3184 | # asm 1: xorl 164(<m=int64#2),<in9=int64#4d | ||
3185 | # asm 2: xorl 164(<m=%rsi),<in9=%ecx | ||
3186 | xorl 164(%rsi),%ecx | ||
3187 | |||
3188 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 168) | ||
3189 | # asm 1: xorl 168(<m=int64#2),<in10=int64#5d | ||
3190 | # asm 2: xorl 168(<m=%rsi),<in10=%r8d | ||
3191 | xorl 168(%rsi),%r8d | ||
3192 | |||
3193 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 172) | ||
3194 | # asm 1: xorl 172(<m=int64#2),<in11=int64#6d | ||
3195 | # asm 2: xorl 172(<m=%rsi),<in11=%r9d | ||
3196 | xorl 172(%rsi),%r9d | ||
3197 | |||
3198 | # qhasm: *(uint32 *) (out + 160) = in8 | ||
3199 | # asm 1: movl <in8=int64#3d,160(<out=int64#1) | ||
3200 | # asm 2: movl <in8=%edx,160(<out=%rdi) | ||
3201 | movl %edx,160(%rdi) | ||
3202 | |||
3203 | # qhasm: *(uint32 *) (out + 164) = in9 | ||
3204 | # asm 1: movl <in9=int64#4d,164(<out=int64#1) | ||
3205 | # asm 2: movl <in9=%ecx,164(<out=%rdi) | ||
3206 | movl %ecx,164(%rdi) | ||
3207 | |||
3208 | # qhasm: *(uint32 *) (out + 168) = in10 | ||
3209 | # asm 1: movl <in10=int64#5d,168(<out=int64#1) | ||
3210 | # asm 2: movl <in10=%r8d,168(<out=%rdi) | ||
3211 | movl %r8d,168(%rdi) | ||
3212 | |||
3213 | # qhasm: *(uint32 *) (out + 172) = in11 | ||
3214 | # asm 1: movl <in11=int64#6d,172(<out=int64#1) | ||
3215 | # asm 2: movl <in11=%r9d,172(<out=%rdi) | ||
3216 | movl %r9d,172(%rdi) | ||
3217 | |||
3218 | # qhasm: in8 = z8 | ||
3219 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
3220 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
3221 | movd %xmm15,%rdx | ||
3222 | |||
3223 | # qhasm: in9 = z9 | ||
3224 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
3225 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
3226 | movd %xmm11,%rcx | ||
3227 | |||
3228 | # qhasm: in10 = z10 | ||
3229 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
3230 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
3231 | movd %xmm1,%r8 | ||
3232 | |||
3233 | # qhasm: in11 = z11 | ||
3234 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
3235 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
3236 | movd %xmm6,%r9 | ||
3237 | |||
3238 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 224) | ||
3239 | # asm 1: xorl 224(<m=int64#2),<in8=int64#3d | ||
3240 | # asm 2: xorl 224(<m=%rsi),<in8=%edx | ||
3241 | xorl 224(%rsi),%edx | ||
3242 | |||
3243 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 228) | ||
3244 | # asm 1: xorl 228(<m=int64#2),<in9=int64#4d | ||
3245 | # asm 2: xorl 228(<m=%rsi),<in9=%ecx | ||
3246 | xorl 228(%rsi),%ecx | ||
3247 | |||
3248 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 232) | ||
3249 | # asm 1: xorl 232(<m=int64#2),<in10=int64#5d | ||
3250 | # asm 2: xorl 232(<m=%rsi),<in10=%r8d | ||
3251 | xorl 232(%rsi),%r8d | ||
3252 | |||
3253 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 236) | ||
3254 | # asm 1: xorl 236(<m=int64#2),<in11=int64#6d | ||
3255 | # asm 2: xorl 236(<m=%rsi),<in11=%r9d | ||
3256 | xorl 236(%rsi),%r9d | ||
3257 | |||
3258 | # qhasm: *(uint32 *) (out + 224) = in8 | ||
3259 | # asm 1: movl <in8=int64#3d,224(<out=int64#1) | ||
3260 | # asm 2: movl <in8=%edx,224(<out=%rdi) | ||
3261 | movl %edx,224(%rdi) | ||
3262 | |||
3263 | # qhasm: *(uint32 *) (out + 228) = in9 | ||
3264 | # asm 1: movl <in9=int64#4d,228(<out=int64#1) | ||
3265 | # asm 2: movl <in9=%ecx,228(<out=%rdi) | ||
3266 | movl %ecx,228(%rdi) | ||
3267 | |||
3268 | # qhasm: *(uint32 *) (out + 232) = in10 | ||
3269 | # asm 1: movl <in10=int64#5d,232(<out=int64#1) | ||
3270 | # asm 2: movl <in10=%r8d,232(<out=%rdi) | ||
3271 | movl %r8d,232(%rdi) | ||
3272 | |||
3273 | # qhasm: *(uint32 *) (out + 236) = in11 | ||
3274 | # asm 1: movl <in11=int64#6d,236(<out=int64#1) | ||
3275 | # asm 2: movl <in11=%r9d,236(<out=%rdi) | ||
3276 | movl %r9d,236(%rdi) | ||
3277 | |||
3278 | # qhasm: uint32323232 z12 += orig12 | ||
3279 | # asm 1: paddd <orig12=stack128#11,<z12=int6464#14 | ||
3280 | # asm 2: paddd <orig12=160(%rsp),<z12=%xmm13 | ||
3281 | paddd 160(%rsp),%xmm13 | ||
3282 | |||
3283 | # qhasm: uint32323232 z13 += orig13 | ||
3284 | # asm 1: paddd <orig13=stack128#14,<z13=int6464#10 | ||
3285 | # asm 2: paddd <orig13=208(%rsp),<z13=%xmm9 | ||
3286 | paddd 208(%rsp),%xmm9 | ||
3287 | |||
3288 | # qhasm: uint32323232 z14 += orig14 | ||
3289 | # asm 1: paddd <orig14=stack128#17,<z14=int6464#4 | ||
3290 | # asm 2: paddd <orig14=256(%rsp),<z14=%xmm3 | ||
3291 | paddd 256(%rsp),%xmm3 | ||
3292 | |||
3293 | # qhasm: uint32323232 z15 += orig15 | ||
3294 | # asm 1: paddd <orig15=stack128#7,<z15=int6464#3 | ||
3295 | # asm 2: paddd <orig15=96(%rsp),<z15=%xmm2 | ||
3296 | paddd 96(%rsp),%xmm2 | ||
3297 | |||
3298 | # qhasm: in12 = z12 | ||
3299 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3300 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3301 | movd %xmm13,%rdx | ||
3302 | |||
3303 | # qhasm: in13 = z13 | ||
3304 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3305 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3306 | movd %xmm9,%rcx | ||
3307 | |||
3308 | # qhasm: in14 = z14 | ||
3309 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3310 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3311 | movd %xmm3,%r8 | ||
3312 | |||
3313 | # qhasm: in15 = z15 | ||
3314 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3315 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3316 | movd %xmm2,%r9 | ||
3317 | |||
3318 | # qhasm: z12 <<<= 96 | ||
3319 | # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14 | ||
3320 | # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13 | ||
3321 | pshufd $0x39,%xmm13,%xmm13 | ||
3322 | |||
3323 | # qhasm: z13 <<<= 96 | ||
3324 | # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10 | ||
3325 | # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9 | ||
3326 | pshufd $0x39,%xmm9,%xmm9 | ||
3327 | |||
3328 | # qhasm: z14 <<<= 96 | ||
3329 | # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4 | ||
3330 | # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3 | ||
3331 | pshufd $0x39,%xmm3,%xmm3 | ||
3332 | |||
3333 | # qhasm: z15 <<<= 96 | ||
3334 | # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3 | ||
3335 | # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2 | ||
3336 | pshufd $0x39,%xmm2,%xmm2 | ||
3337 | |||
3338 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 48) | ||
3339 | # asm 1: xorl 48(<m=int64#2),<in12=int64#3d | ||
3340 | # asm 2: xorl 48(<m=%rsi),<in12=%edx | ||
3341 | xorl 48(%rsi),%edx | ||
3342 | |||
3343 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 52) | ||
3344 | # asm 1: xorl 52(<m=int64#2),<in13=int64#4d | ||
3345 | # asm 2: xorl 52(<m=%rsi),<in13=%ecx | ||
3346 | xorl 52(%rsi),%ecx | ||
3347 | |||
3348 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 56) | ||
3349 | # asm 1: xorl 56(<m=int64#2),<in14=int64#5d | ||
3350 | # asm 2: xorl 56(<m=%rsi),<in14=%r8d | ||
3351 | xorl 56(%rsi),%r8d | ||
3352 | |||
3353 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 60) | ||
3354 | # asm 1: xorl 60(<m=int64#2),<in15=int64#6d | ||
3355 | # asm 2: xorl 60(<m=%rsi),<in15=%r9d | ||
3356 | xorl 60(%rsi),%r9d | ||
3357 | |||
3358 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
3359 | # asm 1: movl <in12=int64#3d,48(<out=int64#1) | ||
3360 | # asm 2: movl <in12=%edx,48(<out=%rdi) | ||
3361 | movl %edx,48(%rdi) | ||
3362 | |||
3363 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
3364 | # asm 1: movl <in13=int64#4d,52(<out=int64#1) | ||
3365 | # asm 2: movl <in13=%ecx,52(<out=%rdi) | ||
3366 | movl %ecx,52(%rdi) | ||
3367 | |||
3368 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
3369 | # asm 1: movl <in14=int64#5d,56(<out=int64#1) | ||
3370 | # asm 2: movl <in14=%r8d,56(<out=%rdi) | ||
3371 | movl %r8d,56(%rdi) | ||
3372 | |||
3373 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
3374 | # asm 1: movl <in15=int64#6d,60(<out=int64#1) | ||
3375 | # asm 2: movl <in15=%r9d,60(<out=%rdi) | ||
3376 | movl %r9d,60(%rdi) | ||
3377 | |||
3378 | # qhasm: in12 = z12 | ||
3379 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3380 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3381 | movd %xmm13,%rdx | ||
3382 | |||
3383 | # qhasm: in13 = z13 | ||
3384 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3385 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3386 | movd %xmm9,%rcx | ||
3387 | |||
3388 | # qhasm: in14 = z14 | ||
3389 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3390 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3391 | movd %xmm3,%r8 | ||
3392 | |||
3393 | # qhasm: in15 = z15 | ||
3394 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3395 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3396 | movd %xmm2,%r9 | ||
3397 | |||
3398 | # qhasm: z12 <<<= 96 | ||
3399 | # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14 | ||
3400 | # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13 | ||
3401 | pshufd $0x39,%xmm13,%xmm13 | ||
3402 | |||
3403 | # qhasm: z13 <<<= 96 | ||
3404 | # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10 | ||
3405 | # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9 | ||
3406 | pshufd $0x39,%xmm9,%xmm9 | ||
3407 | |||
3408 | # qhasm: z14 <<<= 96 | ||
3409 | # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4 | ||
3410 | # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3 | ||
3411 | pshufd $0x39,%xmm3,%xmm3 | ||
3412 | |||
3413 | # qhasm: z15 <<<= 96 | ||
3414 | # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3 | ||
3415 | # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2 | ||
3416 | pshufd $0x39,%xmm2,%xmm2 | ||
3417 | |||
3418 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 112) | ||
3419 | # asm 1: xorl 112(<m=int64#2),<in12=int64#3d | ||
3420 | # asm 2: xorl 112(<m=%rsi),<in12=%edx | ||
3421 | xorl 112(%rsi),%edx | ||
3422 | |||
3423 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 116) | ||
3424 | # asm 1: xorl 116(<m=int64#2),<in13=int64#4d | ||
3425 | # asm 2: xorl 116(<m=%rsi),<in13=%ecx | ||
3426 | xorl 116(%rsi),%ecx | ||
3427 | |||
3428 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 120) | ||
3429 | # asm 1: xorl 120(<m=int64#2),<in14=int64#5d | ||
3430 | # asm 2: xorl 120(<m=%rsi),<in14=%r8d | ||
3431 | xorl 120(%rsi),%r8d | ||
3432 | |||
3433 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 124) | ||
3434 | # asm 1: xorl 124(<m=int64#2),<in15=int64#6d | ||
3435 | # asm 2: xorl 124(<m=%rsi),<in15=%r9d | ||
3436 | xorl 124(%rsi),%r9d | ||
3437 | |||
3438 | # qhasm: *(uint32 *) (out + 112) = in12 | ||
3439 | # asm 1: movl <in12=int64#3d,112(<out=int64#1) | ||
3440 | # asm 2: movl <in12=%edx,112(<out=%rdi) | ||
3441 | movl %edx,112(%rdi) | ||
3442 | |||
3443 | # qhasm: *(uint32 *) (out + 116) = in13 | ||
3444 | # asm 1: movl <in13=int64#4d,116(<out=int64#1) | ||
3445 | # asm 2: movl <in13=%ecx,116(<out=%rdi) | ||
3446 | movl %ecx,116(%rdi) | ||
3447 | |||
3448 | # qhasm: *(uint32 *) (out + 120) = in14 | ||
3449 | # asm 1: movl <in14=int64#5d,120(<out=int64#1) | ||
3450 | # asm 2: movl <in14=%r8d,120(<out=%rdi) | ||
3451 | movl %r8d,120(%rdi) | ||
3452 | |||
3453 | # qhasm: *(uint32 *) (out + 124) = in15 | ||
3454 | # asm 1: movl <in15=int64#6d,124(<out=int64#1) | ||
3455 | # asm 2: movl <in15=%r9d,124(<out=%rdi) | ||
3456 | movl %r9d,124(%rdi) | ||
3457 | |||
3458 | # qhasm: in12 = z12 | ||
3459 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3460 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3461 | movd %xmm13,%rdx | ||
3462 | |||
3463 | # qhasm: in13 = z13 | ||
3464 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3465 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3466 | movd %xmm9,%rcx | ||
3467 | |||
3468 | # qhasm: in14 = z14 | ||
3469 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3470 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3471 | movd %xmm3,%r8 | ||
3472 | |||
3473 | # qhasm: in15 = z15 | ||
3474 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3475 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3476 | movd %xmm2,%r9 | ||
3477 | |||
3478 | # qhasm: z12 <<<= 96 | ||
3479 | # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14 | ||
3480 | # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13 | ||
3481 | pshufd $0x39,%xmm13,%xmm13 | ||
3482 | |||
3483 | # qhasm: z13 <<<= 96 | ||
3484 | # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10 | ||
3485 | # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9 | ||
3486 | pshufd $0x39,%xmm9,%xmm9 | ||
3487 | |||
3488 | # qhasm: z14 <<<= 96 | ||
3489 | # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4 | ||
3490 | # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3 | ||
3491 | pshufd $0x39,%xmm3,%xmm3 | ||
3492 | |||
3493 | # qhasm: z15 <<<= 96 | ||
3494 | # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3 | ||
3495 | # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2 | ||
3496 | pshufd $0x39,%xmm2,%xmm2 | ||
3497 | |||
3498 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 176) | ||
3499 | # asm 1: xorl 176(<m=int64#2),<in12=int64#3d | ||
3500 | # asm 2: xorl 176(<m=%rsi),<in12=%edx | ||
3501 | xorl 176(%rsi),%edx | ||
3502 | |||
3503 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 180) | ||
3504 | # asm 1: xorl 180(<m=int64#2),<in13=int64#4d | ||
3505 | # asm 2: xorl 180(<m=%rsi),<in13=%ecx | ||
3506 | xorl 180(%rsi),%ecx | ||
3507 | |||
3508 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 184) | ||
3509 | # asm 1: xorl 184(<m=int64#2),<in14=int64#5d | ||
3510 | # asm 2: xorl 184(<m=%rsi),<in14=%r8d | ||
3511 | xorl 184(%rsi),%r8d | ||
3512 | |||
3513 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 188) | ||
3514 | # asm 1: xorl 188(<m=int64#2),<in15=int64#6d | ||
3515 | # asm 2: xorl 188(<m=%rsi),<in15=%r9d | ||
3516 | xorl 188(%rsi),%r9d | ||
3517 | |||
3518 | # qhasm: *(uint32 *) (out + 176) = in12 | ||
3519 | # asm 1: movl <in12=int64#3d,176(<out=int64#1) | ||
3520 | # asm 2: movl <in12=%edx,176(<out=%rdi) | ||
3521 | movl %edx,176(%rdi) | ||
3522 | |||
3523 | # qhasm: *(uint32 *) (out + 180) = in13 | ||
3524 | # asm 1: movl <in13=int64#4d,180(<out=int64#1) | ||
3525 | # asm 2: movl <in13=%ecx,180(<out=%rdi) | ||
3526 | movl %ecx,180(%rdi) | ||
3527 | |||
3528 | # qhasm: *(uint32 *) (out + 184) = in14 | ||
3529 | # asm 1: movl <in14=int64#5d,184(<out=int64#1) | ||
3530 | # asm 2: movl <in14=%r8d,184(<out=%rdi) | ||
3531 | movl %r8d,184(%rdi) | ||
3532 | |||
3533 | # qhasm: *(uint32 *) (out + 188) = in15 | ||
3534 | # asm 1: movl <in15=int64#6d,188(<out=int64#1) | ||
3535 | # asm 2: movl <in15=%r9d,188(<out=%rdi) | ||
3536 | movl %r9d,188(%rdi) | ||
3537 | |||
3538 | # qhasm: in12 = z12 | ||
3539 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3540 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3541 | movd %xmm13,%rdx | ||
3542 | |||
3543 | # qhasm: in13 = z13 | ||
3544 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3545 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3546 | movd %xmm9,%rcx | ||
3547 | |||
3548 | # qhasm: in14 = z14 | ||
3549 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3550 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3551 | movd %xmm3,%r8 | ||
3552 | |||
3553 | # qhasm: in15 = z15 | ||
3554 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3555 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3556 | movd %xmm2,%r9 | ||
3557 | |||
3558 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 240) | ||
3559 | # asm 1: xorl 240(<m=int64#2),<in12=int64#3d | ||
3560 | # asm 2: xorl 240(<m=%rsi),<in12=%edx | ||
3561 | xorl 240(%rsi),%edx | ||
3562 | |||
3563 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 244) | ||
3564 | # asm 1: xorl 244(<m=int64#2),<in13=int64#4d | ||
3565 | # asm 2: xorl 244(<m=%rsi),<in13=%ecx | ||
3566 | xorl 244(%rsi),%ecx | ||
3567 | |||
3568 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 248) | ||
3569 | # asm 1: xorl 248(<m=int64#2),<in14=int64#5d | ||
3570 | # asm 2: xorl 248(<m=%rsi),<in14=%r8d | ||
3571 | xorl 248(%rsi),%r8d | ||
3572 | |||
3573 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 252) | ||
3574 | # asm 1: xorl 252(<m=int64#2),<in15=int64#6d | ||
3575 | # asm 2: xorl 252(<m=%rsi),<in15=%r9d | ||
3576 | xorl 252(%rsi),%r9d | ||
3577 | |||
3578 | # qhasm: *(uint32 *) (out + 240) = in12 | ||
3579 | # asm 1: movl <in12=int64#3d,240(<out=int64#1) | ||
3580 | # asm 2: movl <in12=%edx,240(<out=%rdi) | ||
3581 | movl %edx,240(%rdi) | ||
3582 | |||
3583 | # qhasm: *(uint32 *) (out + 244) = in13 | ||
3584 | # asm 1: movl <in13=int64#4d,244(<out=int64#1) | ||
3585 | # asm 2: movl <in13=%ecx,244(<out=%rdi) | ||
3586 | movl %ecx,244(%rdi) | ||
3587 | |||
3588 | # qhasm: *(uint32 *) (out + 248) = in14 | ||
3589 | # asm 1: movl <in14=int64#5d,248(<out=int64#1) | ||
3590 | # asm 2: movl <in14=%r8d,248(<out=%rdi) | ||
3591 | movl %r8d,248(%rdi) | ||
3592 | |||
3593 | # qhasm: *(uint32 *) (out + 252) = in15 | ||
3594 | # asm 1: movl <in15=int64#6d,252(<out=int64#1) | ||
3595 | # asm 2: movl <in15=%r9d,252(<out=%rdi) | ||
3596 | movl %r9d,252(%rdi) | ||
3597 | |||
3598 | # qhasm: bytes = bytes_backup | ||
3599 | # asm 1: movq <bytes_backup=stack64#8,>bytes=int64#6 | ||
3600 | # asm 2: movq <bytes_backup=408(%rsp),>bytes=%r9 | ||
3601 | movq 408(%rsp),%r9 | ||
3602 | |||
3603 | # qhasm: bytes -= 256 | ||
3604 | # asm 1: sub $256,<bytes=int64#6 | ||
3605 | # asm 2: sub $256,<bytes=%r9 | ||
3606 | sub $256,%r9 | ||
3607 | |||
3608 | # qhasm: m += 256 | ||
3609 | # asm 1: add $256,<m=int64#2 | ||
3610 | # asm 2: add $256,<m=%rsi | ||
3611 | add $256,%rsi | ||
3612 | |||
3613 | # qhasm: out += 256 | ||
3614 | # asm 1: add $256,<out=int64#1 | ||
3615 | # asm 2: add $256,<out=%rdi | ||
3616 | add $256,%rdi | ||
3617 | |||
3618 | # qhasm: unsigned<? bytes - 256 | ||
3619 | # asm 1: cmp $256,<bytes=int64#6 | ||
3620 | # asm 2: cmp $256,<bytes=%r9 | ||
3621 | cmp $256,%r9 | ||
3622 | # comment:fp stack unchanged by jump | ||
3623 | |||
3624 | # qhasm: goto bytesatleast256 if !unsigned< | ||
3625 | jae ._bytesatleast256 | ||
3626 | |||
3627 | # qhasm: unsigned>? bytes - 0 | ||
3628 | # asm 1: cmp $0,<bytes=int64#6 | ||
3629 | # asm 2: cmp $0,<bytes=%r9 | ||
3630 | cmp $0,%r9 | ||
3631 | # comment:fp stack unchanged by jump | ||
3632 | |||
3633 | # qhasm: goto done if !unsigned> | ||
3634 | jbe ._done | ||
3635 | # comment:fp stack unchanged by fallthrough | ||
3636 | |||
3637 | # qhasm: bytesbetween1and255: | ||
3638 | ._bytesbetween1and255: | ||
3639 | |||
3640 | # qhasm: unsigned<? bytes - 64 | ||
3641 | # asm 1: cmp $64,<bytes=int64#6 | ||
3642 | # asm 2: cmp $64,<bytes=%r9 | ||
3643 | cmp $64,%r9 | ||
3644 | # comment:fp stack unchanged by jump | ||
3645 | |||
3646 | # qhasm: goto nocopy if !unsigned< | ||
3647 | jae ._nocopy | ||
3648 | |||
3649 | # qhasm: ctarget = out | ||
3650 | # asm 1: mov <out=int64#1,>ctarget=int64#3 | ||
3651 | # asm 2: mov <out=%rdi,>ctarget=%rdx | ||
3652 | mov %rdi,%rdx | ||
3653 | |||
3654 | # qhasm: out = &tmp | ||
3655 | # asm 1: leaq <tmp=stack512#1,>out=int64#1 | ||
3656 | # asm 2: leaq <tmp=416(%rsp),>out=%rdi | ||
3657 | leaq 416(%rsp),%rdi | ||
3658 | |||
3659 | # qhasm: i = bytes | ||
3660 | # asm 1: mov <bytes=int64#6,>i=int64#4 | ||
3661 | # asm 2: mov <bytes=%r9,>i=%rcx | ||
3662 | mov %r9,%rcx | ||
3663 | |||
3664 | # qhasm: while (i) { *out++ = *m++; --i } | ||
3665 | rep movsb | ||
3666 | |||
3667 | # qhasm: out = &tmp | ||
3668 | # asm 1: leaq <tmp=stack512#1,>out=int64#1 | ||
3669 | # asm 2: leaq <tmp=416(%rsp),>out=%rdi | ||
3670 | leaq 416(%rsp),%rdi | ||
3671 | |||
3672 | # qhasm: m = &tmp | ||
3673 | # asm 1: leaq <tmp=stack512#1,>m=int64#2 | ||
3674 | # asm 2: leaq <tmp=416(%rsp),>m=%rsi | ||
3675 | leaq 416(%rsp),%rsi | ||
3676 | # comment:fp stack unchanged by fallthrough | ||
3677 | |||
3678 | # qhasm: nocopy: | ||
3679 | ._nocopy: | ||
3680 | |||
3681 | # qhasm: bytes_backup = bytes | ||
3682 | # asm 1: movq <bytes=int64#6,>bytes_backup=stack64#8 | ||
3683 | # asm 2: movq <bytes=%r9,>bytes_backup=408(%rsp) | ||
3684 | movq %r9,408(%rsp) | ||
3685 | |||
3686 | # qhasm: diag0 = x0 | ||
3687 | # asm 1: movdqa <x0=stack128#4,>diag0=int6464#1 | ||
3688 | # asm 2: movdqa <x0=48(%rsp),>diag0=%xmm0 | ||
3689 | movdqa 48(%rsp),%xmm0 | ||
3690 | |||
3691 | # qhasm: diag1 = x1 | ||
3692 | # asm 1: movdqa <x1=stack128#1,>diag1=int6464#2 | ||
3693 | # asm 2: movdqa <x1=0(%rsp),>diag1=%xmm1 | ||
3694 | movdqa 0(%rsp),%xmm1 | ||
3695 | |||
3696 | # qhasm: diag2 = x2 | ||
3697 | # asm 1: movdqa <x2=stack128#2,>diag2=int6464#3 | ||
3698 | # asm 2: movdqa <x2=16(%rsp),>diag2=%xmm2 | ||
3699 | movdqa 16(%rsp),%xmm2 | ||
3700 | |||
3701 | # qhasm: diag3 = x3 | ||
3702 | # asm 1: movdqa <x3=stack128#3,>diag3=int6464#4 | ||
3703 | # asm 2: movdqa <x3=32(%rsp),>diag3=%xmm3 | ||
3704 | movdqa 32(%rsp),%xmm3 | ||
3705 | |||
3706 | # qhasm: a0 = diag1 | ||
3707 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
3708 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
3709 | movdqa %xmm1,%xmm4 | ||
3710 | |||
3711 | # qhasm: i = 12 | ||
3712 | # asm 1: mov $12,>i=int64#4 | ||
3713 | # asm 2: mov $12,>i=%rcx | ||
3714 | mov $12,%rcx | ||
3715 | |||
3716 | # qhasm: mainloop2: | ||
3717 | ._mainloop2: | ||
3718 | |||
3719 | # qhasm: uint32323232 a0 += diag0 | ||
3720 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
3721 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
3722 | paddd %xmm0,%xmm4 | ||
3723 | |||
3724 | # qhasm: a1 = diag0 | ||
3725 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
3726 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
3727 | movdqa %xmm0,%xmm5 | ||
3728 | |||
3729 | # qhasm: b0 = a0 | ||
3730 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
3731 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
3732 | movdqa %xmm4,%xmm6 | ||
3733 | |||
3734 | # qhasm: uint32323232 a0 <<= 7 | ||
3735 | # asm 1: pslld $7,<a0=int6464#5 | ||
3736 | # asm 2: pslld $7,<a0=%xmm4 | ||
3737 | pslld $7,%xmm4 | ||
3738 | |||
3739 | # qhasm: uint32323232 b0 >>= 25 | ||
3740 | # asm 1: psrld $25,<b0=int6464#7 | ||
3741 | # asm 2: psrld $25,<b0=%xmm6 | ||
3742 | psrld $25,%xmm6 | ||
3743 | |||
3744 | # qhasm: diag3 ^= a0 | ||
3745 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
3746 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
3747 | pxor %xmm4,%xmm3 | ||
3748 | |||
3749 | # qhasm: diag3 ^= b0 | ||
3750 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
3751 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
3752 | pxor %xmm6,%xmm3 | ||
3753 | |||
3754 | # qhasm: uint32323232 a1 += diag3 | ||
3755 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
3756 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
3757 | paddd %xmm3,%xmm5 | ||
3758 | |||
3759 | # qhasm: a2 = diag3 | ||
3760 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
3761 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
3762 | movdqa %xmm3,%xmm4 | ||
3763 | |||
3764 | # qhasm: b1 = a1 | ||
3765 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
3766 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
3767 | movdqa %xmm5,%xmm6 | ||
3768 | |||
3769 | # qhasm: uint32323232 a1 <<= 9 | ||
3770 | # asm 1: pslld $9,<a1=int6464#6 | ||
3771 | # asm 2: pslld $9,<a1=%xmm5 | ||
3772 | pslld $9,%xmm5 | ||
3773 | |||
3774 | # qhasm: uint32323232 b1 >>= 23 | ||
3775 | # asm 1: psrld $23,<b1=int6464#7 | ||
3776 | # asm 2: psrld $23,<b1=%xmm6 | ||
3777 | psrld $23,%xmm6 | ||
3778 | |||
3779 | # qhasm: diag2 ^= a1 | ||
3780 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
3781 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
3782 | pxor %xmm5,%xmm2 | ||
3783 | |||
3784 | # qhasm: diag3 <<<= 32 | ||
3785 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
3786 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
3787 | pshufd $0x93,%xmm3,%xmm3 | ||
3788 | |||
3789 | # qhasm: diag2 ^= b1 | ||
3790 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
3791 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
3792 | pxor %xmm6,%xmm2 | ||
3793 | |||
3794 | # qhasm: uint32323232 a2 += diag2 | ||
3795 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
3796 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
3797 | paddd %xmm2,%xmm4 | ||
3798 | |||
3799 | # qhasm: a3 = diag2 | ||
3800 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
3801 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
3802 | movdqa %xmm2,%xmm5 | ||
3803 | |||
3804 | # qhasm: b2 = a2 | ||
3805 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
3806 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
3807 | movdqa %xmm4,%xmm6 | ||
3808 | |||
3809 | # qhasm: uint32323232 a2 <<= 13 | ||
3810 | # asm 1: pslld $13,<a2=int6464#5 | ||
3811 | # asm 2: pslld $13,<a2=%xmm4 | ||
3812 | pslld $13,%xmm4 | ||
3813 | |||
3814 | # qhasm: uint32323232 b2 >>= 19 | ||
3815 | # asm 1: psrld $19,<b2=int6464#7 | ||
3816 | # asm 2: psrld $19,<b2=%xmm6 | ||
3817 | psrld $19,%xmm6 | ||
3818 | |||
3819 | # qhasm: diag1 ^= a2 | ||
3820 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
3821 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
3822 | pxor %xmm4,%xmm1 | ||
3823 | |||
3824 | # qhasm: diag2 <<<= 64 | ||
3825 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
3826 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
3827 | pshufd $0x4e,%xmm2,%xmm2 | ||
3828 | |||
3829 | # qhasm: diag1 ^= b2 | ||
3830 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
3831 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
3832 | pxor %xmm6,%xmm1 | ||
3833 | |||
3834 | # qhasm: uint32323232 a3 += diag1 | ||
3835 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
3836 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
3837 | paddd %xmm1,%xmm5 | ||
3838 | |||
3839 | # qhasm: a4 = diag3 | ||
3840 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
3841 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
3842 | movdqa %xmm3,%xmm4 | ||
3843 | |||
3844 | # qhasm: b3 = a3 | ||
3845 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
3846 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
3847 | movdqa %xmm5,%xmm6 | ||
3848 | |||
3849 | # qhasm: uint32323232 a3 <<= 18 | ||
3850 | # asm 1: pslld $18,<a3=int6464#6 | ||
3851 | # asm 2: pslld $18,<a3=%xmm5 | ||
3852 | pslld $18,%xmm5 | ||
3853 | |||
3854 | # qhasm: uint32323232 b3 >>= 14 | ||
3855 | # asm 1: psrld $14,<b3=int6464#7 | ||
3856 | # asm 2: psrld $14,<b3=%xmm6 | ||
3857 | psrld $14,%xmm6 | ||
3858 | |||
3859 | # qhasm: diag0 ^= a3 | ||
3860 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
3861 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
3862 | pxor %xmm5,%xmm0 | ||
3863 | |||
3864 | # qhasm: diag1 <<<= 96 | ||
3865 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
3866 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
3867 | pshufd $0x39,%xmm1,%xmm1 | ||
3868 | |||
3869 | # qhasm: diag0 ^= b3 | ||
3870 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
3871 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
3872 | pxor %xmm6,%xmm0 | ||
3873 | |||
3874 | # qhasm: uint32323232 a4 += diag0 | ||
3875 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
3876 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
3877 | paddd %xmm0,%xmm4 | ||
3878 | |||
3879 | # qhasm: a5 = diag0 | ||
3880 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
3881 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
3882 | movdqa %xmm0,%xmm5 | ||
3883 | |||
3884 | # qhasm: b4 = a4 | ||
3885 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
3886 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
3887 | movdqa %xmm4,%xmm6 | ||
3888 | |||
3889 | # qhasm: uint32323232 a4 <<= 7 | ||
3890 | # asm 1: pslld $7,<a4=int6464#5 | ||
3891 | # asm 2: pslld $7,<a4=%xmm4 | ||
3892 | pslld $7,%xmm4 | ||
3893 | |||
3894 | # qhasm: uint32323232 b4 >>= 25 | ||
3895 | # asm 1: psrld $25,<b4=int6464#7 | ||
3896 | # asm 2: psrld $25,<b4=%xmm6 | ||
3897 | psrld $25,%xmm6 | ||
3898 | |||
3899 | # qhasm: diag1 ^= a4 | ||
3900 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
3901 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
3902 | pxor %xmm4,%xmm1 | ||
3903 | |||
3904 | # qhasm: diag1 ^= b4 | ||
3905 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
3906 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
3907 | pxor %xmm6,%xmm1 | ||
3908 | |||
3909 | # qhasm: uint32323232 a5 += diag1 | ||
3910 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
3911 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
3912 | paddd %xmm1,%xmm5 | ||
3913 | |||
3914 | # qhasm: a6 = diag1 | ||
3915 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
3916 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
3917 | movdqa %xmm1,%xmm4 | ||
3918 | |||
3919 | # qhasm: b5 = a5 | ||
3920 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
3921 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
3922 | movdqa %xmm5,%xmm6 | ||
3923 | |||
3924 | # qhasm: uint32323232 a5 <<= 9 | ||
3925 | # asm 1: pslld $9,<a5=int6464#6 | ||
3926 | # asm 2: pslld $9,<a5=%xmm5 | ||
3927 | pslld $9,%xmm5 | ||
3928 | |||
3929 | # qhasm: uint32323232 b5 >>= 23 | ||
3930 | # asm 1: psrld $23,<b5=int6464#7 | ||
3931 | # asm 2: psrld $23,<b5=%xmm6 | ||
3932 | psrld $23,%xmm6 | ||
3933 | |||
3934 | # qhasm: diag2 ^= a5 | ||
3935 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
3936 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
3937 | pxor %xmm5,%xmm2 | ||
3938 | |||
3939 | # qhasm: diag1 <<<= 32 | ||
3940 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
3941 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
3942 | pshufd $0x93,%xmm1,%xmm1 | ||
3943 | |||
3944 | # qhasm: diag2 ^= b5 | ||
3945 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
3946 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
3947 | pxor %xmm6,%xmm2 | ||
3948 | |||
3949 | # qhasm: uint32323232 a6 += diag2 | ||
3950 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
3951 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
3952 | paddd %xmm2,%xmm4 | ||
3953 | |||
3954 | # qhasm: a7 = diag2 | ||
3955 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
3956 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
3957 | movdqa %xmm2,%xmm5 | ||
3958 | |||
3959 | # qhasm: b6 = a6 | ||
3960 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
3961 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
3962 | movdqa %xmm4,%xmm6 | ||
3963 | |||
3964 | # qhasm: uint32323232 a6 <<= 13 | ||
3965 | # asm 1: pslld $13,<a6=int6464#5 | ||
3966 | # asm 2: pslld $13,<a6=%xmm4 | ||
3967 | pslld $13,%xmm4 | ||
3968 | |||
3969 | # qhasm: uint32323232 b6 >>= 19 | ||
3970 | # asm 1: psrld $19,<b6=int6464#7 | ||
3971 | # asm 2: psrld $19,<b6=%xmm6 | ||
3972 | psrld $19,%xmm6 | ||
3973 | |||
3974 | # qhasm: diag3 ^= a6 | ||
3975 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
3976 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
3977 | pxor %xmm4,%xmm3 | ||
3978 | |||
3979 | # qhasm: diag2 <<<= 64 | ||
3980 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
3981 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
3982 | pshufd $0x4e,%xmm2,%xmm2 | ||
3983 | |||
3984 | # qhasm: diag3 ^= b6 | ||
3985 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
3986 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
3987 | pxor %xmm6,%xmm3 | ||
3988 | |||
3989 | # qhasm: uint32323232 a7 += diag3 | ||
3990 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
3991 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
3992 | paddd %xmm3,%xmm5 | ||
3993 | |||
3994 | # qhasm: a0 = diag1 | ||
3995 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
3996 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
3997 | movdqa %xmm1,%xmm4 | ||
3998 | |||
3999 | # qhasm: b7 = a7 | ||
4000 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4001 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4002 | movdqa %xmm5,%xmm6 | ||
4003 | |||
4004 | # qhasm: uint32323232 a7 <<= 18 | ||
4005 | # asm 1: pslld $18,<a7=int6464#6 | ||
4006 | # asm 2: pslld $18,<a7=%xmm5 | ||
4007 | pslld $18,%xmm5 | ||
4008 | |||
4009 | # qhasm: uint32323232 b7 >>= 14 | ||
4010 | # asm 1: psrld $14,<b7=int6464#7 | ||
4011 | # asm 2: psrld $14,<b7=%xmm6 | ||
4012 | psrld $14,%xmm6 | ||
4013 | |||
4014 | # qhasm: diag0 ^= a7 | ||
4015 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4016 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4017 | pxor %xmm5,%xmm0 | ||
4018 | |||
4019 | # qhasm: diag3 <<<= 96 | ||
4020 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4021 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4022 | pshufd $0x39,%xmm3,%xmm3 | ||
4023 | |||
4024 | # qhasm: diag0 ^= b7 | ||
4025 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4026 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4027 | pxor %xmm6,%xmm0 | ||
4028 | |||
4029 | # qhasm: uint32323232 a0 += diag0 | ||
4030 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
4031 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
4032 | paddd %xmm0,%xmm4 | ||
4033 | |||
4034 | # qhasm: a1 = diag0 | ||
4035 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
4036 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
4037 | movdqa %xmm0,%xmm5 | ||
4038 | |||
4039 | # qhasm: b0 = a0 | ||
4040 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
4041 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
4042 | movdqa %xmm4,%xmm6 | ||
4043 | |||
4044 | # qhasm: uint32323232 a0 <<= 7 | ||
4045 | # asm 1: pslld $7,<a0=int6464#5 | ||
4046 | # asm 2: pslld $7,<a0=%xmm4 | ||
4047 | pslld $7,%xmm4 | ||
4048 | |||
4049 | # qhasm: uint32323232 b0 >>= 25 | ||
4050 | # asm 1: psrld $25,<b0=int6464#7 | ||
4051 | # asm 2: psrld $25,<b0=%xmm6 | ||
4052 | psrld $25,%xmm6 | ||
4053 | |||
4054 | # qhasm: diag3 ^= a0 | ||
4055 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
4056 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
4057 | pxor %xmm4,%xmm3 | ||
4058 | |||
4059 | # qhasm: diag3 ^= b0 | ||
4060 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
4061 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
4062 | pxor %xmm6,%xmm3 | ||
4063 | |||
4064 | # qhasm: uint32323232 a1 += diag3 | ||
4065 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
4066 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
4067 | paddd %xmm3,%xmm5 | ||
4068 | |||
4069 | # qhasm: a2 = diag3 | ||
4070 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
4071 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
4072 | movdqa %xmm3,%xmm4 | ||
4073 | |||
4074 | # qhasm: b1 = a1 | ||
4075 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
4076 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
4077 | movdqa %xmm5,%xmm6 | ||
4078 | |||
4079 | # qhasm: uint32323232 a1 <<= 9 | ||
4080 | # asm 1: pslld $9,<a1=int6464#6 | ||
4081 | # asm 2: pslld $9,<a1=%xmm5 | ||
4082 | pslld $9,%xmm5 | ||
4083 | |||
4084 | # qhasm: uint32323232 b1 >>= 23 | ||
4085 | # asm 1: psrld $23,<b1=int6464#7 | ||
4086 | # asm 2: psrld $23,<b1=%xmm6 | ||
4087 | psrld $23,%xmm6 | ||
4088 | |||
4089 | # qhasm: diag2 ^= a1 | ||
4090 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
4091 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
4092 | pxor %xmm5,%xmm2 | ||
4093 | |||
4094 | # qhasm: diag3 <<<= 32 | ||
4095 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
4096 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
4097 | pshufd $0x93,%xmm3,%xmm3 | ||
4098 | |||
4099 | # qhasm: diag2 ^= b1 | ||
4100 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
4101 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
4102 | pxor %xmm6,%xmm2 | ||
4103 | |||
4104 | # qhasm: uint32323232 a2 += diag2 | ||
4105 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
4106 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
4107 | paddd %xmm2,%xmm4 | ||
4108 | |||
4109 | # qhasm: a3 = diag2 | ||
4110 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
4111 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
4112 | movdqa %xmm2,%xmm5 | ||
4113 | |||
4114 | # qhasm: b2 = a2 | ||
4115 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
4116 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
4117 | movdqa %xmm4,%xmm6 | ||
4118 | |||
4119 | # qhasm: uint32323232 a2 <<= 13 | ||
4120 | # asm 1: pslld $13,<a2=int6464#5 | ||
4121 | # asm 2: pslld $13,<a2=%xmm4 | ||
4122 | pslld $13,%xmm4 | ||
4123 | |||
4124 | # qhasm: uint32323232 b2 >>= 19 | ||
4125 | # asm 1: psrld $19,<b2=int6464#7 | ||
4126 | # asm 2: psrld $19,<b2=%xmm6 | ||
4127 | psrld $19,%xmm6 | ||
4128 | |||
4129 | # qhasm: diag1 ^= a2 | ||
4130 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
4131 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
4132 | pxor %xmm4,%xmm1 | ||
4133 | |||
4134 | # qhasm: diag2 <<<= 64 | ||
4135 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4136 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4137 | pshufd $0x4e,%xmm2,%xmm2 | ||
4138 | |||
4139 | # qhasm: diag1 ^= b2 | ||
4140 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
4141 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
4142 | pxor %xmm6,%xmm1 | ||
4143 | |||
4144 | # qhasm: uint32323232 a3 += diag1 | ||
4145 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
4146 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
4147 | paddd %xmm1,%xmm5 | ||
4148 | |||
4149 | # qhasm: a4 = diag3 | ||
4150 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
4151 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
4152 | movdqa %xmm3,%xmm4 | ||
4153 | |||
4154 | # qhasm: b3 = a3 | ||
4155 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
4156 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
4157 | movdqa %xmm5,%xmm6 | ||
4158 | |||
4159 | # qhasm: uint32323232 a3 <<= 18 | ||
4160 | # asm 1: pslld $18,<a3=int6464#6 | ||
4161 | # asm 2: pslld $18,<a3=%xmm5 | ||
4162 | pslld $18,%xmm5 | ||
4163 | |||
4164 | # qhasm: uint32323232 b3 >>= 14 | ||
4165 | # asm 1: psrld $14,<b3=int6464#7 | ||
4166 | # asm 2: psrld $14,<b3=%xmm6 | ||
4167 | psrld $14,%xmm6 | ||
4168 | |||
4169 | # qhasm: diag0 ^= a3 | ||
4170 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
4171 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
4172 | pxor %xmm5,%xmm0 | ||
4173 | |||
4174 | # qhasm: diag1 <<<= 96 | ||
4175 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4176 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4177 | pshufd $0x39,%xmm1,%xmm1 | ||
4178 | |||
4179 | # qhasm: diag0 ^= b3 | ||
4180 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
4181 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
4182 | pxor %xmm6,%xmm0 | ||
4183 | |||
4184 | # qhasm: uint32323232 a4 += diag0 | ||
4185 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
4186 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
4187 | paddd %xmm0,%xmm4 | ||
4188 | |||
4189 | # qhasm: a5 = diag0 | ||
4190 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
4191 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
4192 | movdqa %xmm0,%xmm5 | ||
4193 | |||
4194 | # qhasm: b4 = a4 | ||
4195 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
4196 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
4197 | movdqa %xmm4,%xmm6 | ||
4198 | |||
4199 | # qhasm: uint32323232 a4 <<= 7 | ||
4200 | # asm 1: pslld $7,<a4=int6464#5 | ||
4201 | # asm 2: pslld $7,<a4=%xmm4 | ||
4202 | pslld $7,%xmm4 | ||
4203 | |||
4204 | # qhasm: uint32323232 b4 >>= 25 | ||
4205 | # asm 1: psrld $25,<b4=int6464#7 | ||
4206 | # asm 2: psrld $25,<b4=%xmm6 | ||
4207 | psrld $25,%xmm6 | ||
4208 | |||
4209 | # qhasm: diag1 ^= a4 | ||
4210 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
4211 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
4212 | pxor %xmm4,%xmm1 | ||
4213 | |||
4214 | # qhasm: diag1 ^= b4 | ||
4215 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
4216 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
4217 | pxor %xmm6,%xmm1 | ||
4218 | |||
4219 | # qhasm: uint32323232 a5 += diag1 | ||
4220 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
4221 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
4222 | paddd %xmm1,%xmm5 | ||
4223 | |||
4224 | # qhasm: a6 = diag1 | ||
4225 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
4226 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
4227 | movdqa %xmm1,%xmm4 | ||
4228 | |||
4229 | # qhasm: b5 = a5 | ||
4230 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
4231 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
4232 | movdqa %xmm5,%xmm6 | ||
4233 | |||
4234 | # qhasm: uint32323232 a5 <<= 9 | ||
4235 | # asm 1: pslld $9,<a5=int6464#6 | ||
4236 | # asm 2: pslld $9,<a5=%xmm5 | ||
4237 | pslld $9,%xmm5 | ||
4238 | |||
4239 | # qhasm: uint32323232 b5 >>= 23 | ||
4240 | # asm 1: psrld $23,<b5=int6464#7 | ||
4241 | # asm 2: psrld $23,<b5=%xmm6 | ||
4242 | psrld $23,%xmm6 | ||
4243 | |||
4244 | # qhasm: diag2 ^= a5 | ||
4245 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
4246 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
4247 | pxor %xmm5,%xmm2 | ||
4248 | |||
4249 | # qhasm: diag1 <<<= 32 | ||
4250 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
4251 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
4252 | pshufd $0x93,%xmm1,%xmm1 | ||
4253 | |||
4254 | # qhasm: diag2 ^= b5 | ||
4255 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
4256 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
4257 | pxor %xmm6,%xmm2 | ||
4258 | |||
4259 | # qhasm: uint32323232 a6 += diag2 | ||
4260 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
4261 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
4262 | paddd %xmm2,%xmm4 | ||
4263 | |||
4264 | # qhasm: a7 = diag2 | ||
4265 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
4266 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
4267 | movdqa %xmm2,%xmm5 | ||
4268 | |||
4269 | # qhasm: b6 = a6 | ||
4270 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
4271 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
4272 | movdqa %xmm4,%xmm6 | ||
4273 | |||
4274 | # qhasm: uint32323232 a6 <<= 13 | ||
4275 | # asm 1: pslld $13,<a6=int6464#5 | ||
4276 | # asm 2: pslld $13,<a6=%xmm4 | ||
4277 | pslld $13,%xmm4 | ||
4278 | |||
4279 | # qhasm: uint32323232 b6 >>= 19 | ||
4280 | # asm 1: psrld $19,<b6=int6464#7 | ||
4281 | # asm 2: psrld $19,<b6=%xmm6 | ||
4282 | psrld $19,%xmm6 | ||
4283 | |||
4284 | # qhasm: diag3 ^= a6 | ||
4285 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
4286 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
4287 | pxor %xmm4,%xmm3 | ||
4288 | |||
4289 | # qhasm: diag2 <<<= 64 | ||
4290 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4291 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4292 | pshufd $0x4e,%xmm2,%xmm2 | ||
4293 | |||
4294 | # qhasm: diag3 ^= b6 | ||
4295 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
4296 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
4297 | pxor %xmm6,%xmm3 | ||
4298 | |||
4299 | # qhasm: unsigned>? i -= 4 | ||
4300 | # asm 1: sub $4,<i=int64#4 | ||
4301 | # asm 2: sub $4,<i=%rcx | ||
4302 | sub $4,%rcx | ||
4303 | |||
4304 | # qhasm: uint32323232 a7 += diag3 | ||
4305 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
4306 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
4307 | paddd %xmm3,%xmm5 | ||
4308 | |||
4309 | # qhasm: a0 = diag1 | ||
4310 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
4311 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
4312 | movdqa %xmm1,%xmm4 | ||
4313 | |||
4314 | # qhasm: b7 = a7 | ||
4315 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4316 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4317 | movdqa %xmm5,%xmm6 | ||
4318 | |||
4319 | # qhasm: uint32323232 a7 <<= 18 | ||
4320 | # asm 1: pslld $18,<a7=int6464#6 | ||
4321 | # asm 2: pslld $18,<a7=%xmm5 | ||
4322 | pslld $18,%xmm5 | ||
4323 | |||
4324 | # qhasm: b0 = 0 | ||
4325 | # asm 1: pxor >b0=int6464#8,>b0=int6464#8 | ||
4326 | # asm 2: pxor >b0=%xmm7,>b0=%xmm7 | ||
4327 | pxor %xmm7,%xmm7 | ||
4328 | |||
4329 | # qhasm: uint32323232 b7 >>= 14 | ||
4330 | # asm 1: psrld $14,<b7=int6464#7 | ||
4331 | # asm 2: psrld $14,<b7=%xmm6 | ||
4332 | psrld $14,%xmm6 | ||
4333 | |||
4334 | # qhasm: diag0 ^= a7 | ||
4335 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4336 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4337 | pxor %xmm5,%xmm0 | ||
4338 | |||
4339 | # qhasm: diag3 <<<= 96 | ||
4340 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4341 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4342 | pshufd $0x39,%xmm3,%xmm3 | ||
4343 | |||
4344 | # qhasm: diag0 ^= b7 | ||
4345 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4346 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4347 | pxor %xmm6,%xmm0 | ||
4348 | # comment:fp stack unchanged by jump | ||
4349 | |||
4350 | # qhasm: goto mainloop2 if unsigned> | ||
4351 | ja ._mainloop2 | ||
4352 | |||
4353 | # qhasm: uint32323232 diag0 += x0 | ||
4354 | # asm 1: paddd <x0=stack128#4,<diag0=int6464#1 | ||
4355 | # asm 2: paddd <x0=48(%rsp),<diag0=%xmm0 | ||
4356 | paddd 48(%rsp),%xmm0 | ||
4357 | |||
4358 | # qhasm: uint32323232 diag1 += x1 | ||
4359 | # asm 1: paddd <x1=stack128#1,<diag1=int6464#2 | ||
4360 | # asm 2: paddd <x1=0(%rsp),<diag1=%xmm1 | ||
4361 | paddd 0(%rsp),%xmm1 | ||
4362 | |||
4363 | # qhasm: uint32323232 diag2 += x2 | ||
4364 | # asm 1: paddd <x2=stack128#2,<diag2=int6464#3 | ||
4365 | # asm 2: paddd <x2=16(%rsp),<diag2=%xmm2 | ||
4366 | paddd 16(%rsp),%xmm2 | ||
4367 | |||
4368 | # qhasm: uint32323232 diag3 += x3 | ||
4369 | # asm 1: paddd <x3=stack128#3,<diag3=int6464#4 | ||
4370 | # asm 2: paddd <x3=32(%rsp),<diag3=%xmm3 | ||
4371 | paddd 32(%rsp),%xmm3 | ||
4372 | |||
4373 | # qhasm: in0 = diag0 | ||
4374 | # asm 1: movd <diag0=int6464#1,>in0=int64#4 | ||
4375 | # asm 2: movd <diag0=%xmm0,>in0=%rcx | ||
4376 | movd %xmm0,%rcx | ||
4377 | |||
4378 | # qhasm: in12 = diag1 | ||
4379 | # asm 1: movd <diag1=int6464#2,>in12=int64#5 | ||
4380 | # asm 2: movd <diag1=%xmm1,>in12=%r8 | ||
4381 | movd %xmm1,%r8 | ||
4382 | |||
4383 | # qhasm: in8 = diag2 | ||
4384 | # asm 1: movd <diag2=int6464#3,>in8=int64#6 | ||
4385 | # asm 2: movd <diag2=%xmm2,>in8=%r9 | ||
4386 | movd %xmm2,%r9 | ||
4387 | |||
4388 | # qhasm: in4 = diag3 | ||
4389 | # asm 1: movd <diag3=int6464#4,>in4=int64#7 | ||
4390 | # asm 2: movd <diag3=%xmm3,>in4=%rax | ||
4391 | movd %xmm3,%rax | ||
4392 | |||
4393 | # qhasm: diag0 <<<= 96 | ||
4394 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4395 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4396 | pshufd $0x39,%xmm0,%xmm0 | ||
4397 | |||
4398 | # qhasm: diag1 <<<= 96 | ||
4399 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4400 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4401 | pshufd $0x39,%xmm1,%xmm1 | ||
4402 | |||
4403 | # qhasm: diag2 <<<= 96 | ||
4404 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4405 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4406 | pshufd $0x39,%xmm2,%xmm2 | ||
4407 | |||
4408 | # qhasm: diag3 <<<= 96 | ||
4409 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4410 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4411 | pshufd $0x39,%xmm3,%xmm3 | ||
4412 | |||
4413 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 0) | ||
4414 | # asm 1: xorl 0(<m=int64#2),<in0=int64#4d | ||
4415 | # asm 2: xorl 0(<m=%rsi),<in0=%ecx | ||
4416 | xorl 0(%rsi),%ecx | ||
4417 | |||
4418 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 48) | ||
4419 | # asm 1: xorl 48(<m=int64#2),<in12=int64#5d | ||
4420 | # asm 2: xorl 48(<m=%rsi),<in12=%r8d | ||
4421 | xorl 48(%rsi),%r8d | ||
4422 | |||
4423 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 32) | ||
4424 | # asm 1: xorl 32(<m=int64#2),<in8=int64#6d | ||
4425 | # asm 2: xorl 32(<m=%rsi),<in8=%r9d | ||
4426 | xorl 32(%rsi),%r9d | ||
4427 | |||
4428 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 16) | ||
4429 | # asm 1: xorl 16(<m=int64#2),<in4=int64#7d | ||
4430 | # asm 2: xorl 16(<m=%rsi),<in4=%eax | ||
4431 | xorl 16(%rsi),%eax | ||
4432 | |||
4433 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
4434 | # asm 1: movl <in0=int64#4d,0(<out=int64#1) | ||
4435 | # asm 2: movl <in0=%ecx,0(<out=%rdi) | ||
4436 | movl %ecx,0(%rdi) | ||
4437 | |||
4438 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
4439 | # asm 1: movl <in12=int64#5d,48(<out=int64#1) | ||
4440 | # asm 2: movl <in12=%r8d,48(<out=%rdi) | ||
4441 | movl %r8d,48(%rdi) | ||
4442 | |||
4443 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
4444 | # asm 1: movl <in8=int64#6d,32(<out=int64#1) | ||
4445 | # asm 2: movl <in8=%r9d,32(<out=%rdi) | ||
4446 | movl %r9d,32(%rdi) | ||
4447 | |||
4448 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
4449 | # asm 1: movl <in4=int64#7d,16(<out=int64#1) | ||
4450 | # asm 2: movl <in4=%eax,16(<out=%rdi) | ||
4451 | movl %eax,16(%rdi) | ||
4452 | |||
4453 | # qhasm: in5 = diag0 | ||
4454 | # asm 1: movd <diag0=int6464#1,>in5=int64#4 | ||
4455 | # asm 2: movd <diag0=%xmm0,>in5=%rcx | ||
4456 | movd %xmm0,%rcx | ||
4457 | |||
4458 | # qhasm: in1 = diag1 | ||
4459 | # asm 1: movd <diag1=int6464#2,>in1=int64#5 | ||
4460 | # asm 2: movd <diag1=%xmm1,>in1=%r8 | ||
4461 | movd %xmm1,%r8 | ||
4462 | |||
4463 | # qhasm: in13 = diag2 | ||
4464 | # asm 1: movd <diag2=int6464#3,>in13=int64#6 | ||
4465 | # asm 2: movd <diag2=%xmm2,>in13=%r9 | ||
4466 | movd %xmm2,%r9 | ||
4467 | |||
4468 | # qhasm: in9 = diag3 | ||
4469 | # asm 1: movd <diag3=int6464#4,>in9=int64#7 | ||
4470 | # asm 2: movd <diag3=%xmm3,>in9=%rax | ||
4471 | movd %xmm3,%rax | ||
4472 | |||
4473 | # qhasm: diag0 <<<= 96 | ||
4474 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4475 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4476 | pshufd $0x39,%xmm0,%xmm0 | ||
4477 | |||
4478 | # qhasm: diag1 <<<= 96 | ||
4479 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4480 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4481 | pshufd $0x39,%xmm1,%xmm1 | ||
4482 | |||
4483 | # qhasm: diag2 <<<= 96 | ||
4484 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4485 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4486 | pshufd $0x39,%xmm2,%xmm2 | ||
4487 | |||
4488 | # qhasm: diag3 <<<= 96 | ||
4489 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4490 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4491 | pshufd $0x39,%xmm3,%xmm3 | ||
4492 | |||
4493 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 20) | ||
4494 | # asm 1: xorl 20(<m=int64#2),<in5=int64#4d | ||
4495 | # asm 2: xorl 20(<m=%rsi),<in5=%ecx | ||
4496 | xorl 20(%rsi),%ecx | ||
4497 | |||
4498 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 4) | ||
4499 | # asm 1: xorl 4(<m=int64#2),<in1=int64#5d | ||
4500 | # asm 2: xorl 4(<m=%rsi),<in1=%r8d | ||
4501 | xorl 4(%rsi),%r8d | ||
4502 | |||
4503 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 52) | ||
4504 | # asm 1: xorl 52(<m=int64#2),<in13=int64#6d | ||
4505 | # asm 2: xorl 52(<m=%rsi),<in13=%r9d | ||
4506 | xorl 52(%rsi),%r9d | ||
4507 | |||
4508 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 36) | ||
4509 | # asm 1: xorl 36(<m=int64#2),<in9=int64#7d | ||
4510 | # asm 2: xorl 36(<m=%rsi),<in9=%eax | ||
4511 | xorl 36(%rsi),%eax | ||
4512 | |||
4513 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
4514 | # asm 1: movl <in5=int64#4d,20(<out=int64#1) | ||
4515 | # asm 2: movl <in5=%ecx,20(<out=%rdi) | ||
4516 | movl %ecx,20(%rdi) | ||
4517 | |||
4518 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
4519 | # asm 1: movl <in1=int64#5d,4(<out=int64#1) | ||
4520 | # asm 2: movl <in1=%r8d,4(<out=%rdi) | ||
4521 | movl %r8d,4(%rdi) | ||
4522 | |||
4523 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
4524 | # asm 1: movl <in13=int64#6d,52(<out=int64#1) | ||
4525 | # asm 2: movl <in13=%r9d,52(<out=%rdi) | ||
4526 | movl %r9d,52(%rdi) | ||
4527 | |||
4528 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
4529 | # asm 1: movl <in9=int64#7d,36(<out=int64#1) | ||
4530 | # asm 2: movl <in9=%eax,36(<out=%rdi) | ||
4531 | movl %eax,36(%rdi) | ||
4532 | |||
4533 | # qhasm: in10 = diag0 | ||
4534 | # asm 1: movd <diag0=int6464#1,>in10=int64#4 | ||
4535 | # asm 2: movd <diag0=%xmm0,>in10=%rcx | ||
4536 | movd %xmm0,%rcx | ||
4537 | |||
4538 | # qhasm: in6 = diag1 | ||
4539 | # asm 1: movd <diag1=int6464#2,>in6=int64#5 | ||
4540 | # asm 2: movd <diag1=%xmm1,>in6=%r8 | ||
4541 | movd %xmm1,%r8 | ||
4542 | |||
4543 | # qhasm: in2 = diag2 | ||
4544 | # asm 1: movd <diag2=int6464#3,>in2=int64#6 | ||
4545 | # asm 2: movd <diag2=%xmm2,>in2=%r9 | ||
4546 | movd %xmm2,%r9 | ||
4547 | |||
4548 | # qhasm: in14 = diag3 | ||
4549 | # asm 1: movd <diag3=int6464#4,>in14=int64#7 | ||
4550 | # asm 2: movd <diag3=%xmm3,>in14=%rax | ||
4551 | movd %xmm3,%rax | ||
4552 | |||
4553 | # qhasm: diag0 <<<= 96 | ||
4554 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4555 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4556 | pshufd $0x39,%xmm0,%xmm0 | ||
4557 | |||
4558 | # qhasm: diag1 <<<= 96 | ||
4559 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4560 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4561 | pshufd $0x39,%xmm1,%xmm1 | ||
4562 | |||
4563 | # qhasm: diag2 <<<= 96 | ||
4564 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4565 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4566 | pshufd $0x39,%xmm2,%xmm2 | ||
4567 | |||
4568 | # qhasm: diag3 <<<= 96 | ||
4569 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4570 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4571 | pshufd $0x39,%xmm3,%xmm3 | ||
4572 | |||
4573 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 40) | ||
4574 | # asm 1: xorl 40(<m=int64#2),<in10=int64#4d | ||
4575 | # asm 2: xorl 40(<m=%rsi),<in10=%ecx | ||
4576 | xorl 40(%rsi),%ecx | ||
4577 | |||
4578 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 24) | ||
4579 | # asm 1: xorl 24(<m=int64#2),<in6=int64#5d | ||
4580 | # asm 2: xorl 24(<m=%rsi),<in6=%r8d | ||
4581 | xorl 24(%rsi),%r8d | ||
4582 | |||
4583 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 8) | ||
4584 | # asm 1: xorl 8(<m=int64#2),<in2=int64#6d | ||
4585 | # asm 2: xorl 8(<m=%rsi),<in2=%r9d | ||
4586 | xorl 8(%rsi),%r9d | ||
4587 | |||
4588 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 56) | ||
4589 | # asm 1: xorl 56(<m=int64#2),<in14=int64#7d | ||
4590 | # asm 2: xorl 56(<m=%rsi),<in14=%eax | ||
4591 | xorl 56(%rsi),%eax | ||
4592 | |||
4593 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
4594 | # asm 1: movl <in10=int64#4d,40(<out=int64#1) | ||
4595 | # asm 2: movl <in10=%ecx,40(<out=%rdi) | ||
4596 | movl %ecx,40(%rdi) | ||
4597 | |||
4598 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
4599 | # asm 1: movl <in6=int64#5d,24(<out=int64#1) | ||
4600 | # asm 2: movl <in6=%r8d,24(<out=%rdi) | ||
4601 | movl %r8d,24(%rdi) | ||
4602 | |||
4603 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
4604 | # asm 1: movl <in2=int64#6d,8(<out=int64#1) | ||
4605 | # asm 2: movl <in2=%r9d,8(<out=%rdi) | ||
4606 | movl %r9d,8(%rdi) | ||
4607 | |||
4608 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
4609 | # asm 1: movl <in14=int64#7d,56(<out=int64#1) | ||
4610 | # asm 2: movl <in14=%eax,56(<out=%rdi) | ||
4611 | movl %eax,56(%rdi) | ||
4612 | |||
4613 | # qhasm: in15 = diag0 | ||
4614 | # asm 1: movd <diag0=int6464#1,>in15=int64#4 | ||
4615 | # asm 2: movd <diag0=%xmm0,>in15=%rcx | ||
4616 | movd %xmm0,%rcx | ||
4617 | |||
4618 | # qhasm: in11 = diag1 | ||
4619 | # asm 1: movd <diag1=int6464#2,>in11=int64#5 | ||
4620 | # asm 2: movd <diag1=%xmm1,>in11=%r8 | ||
4621 | movd %xmm1,%r8 | ||
4622 | |||
4623 | # qhasm: in7 = diag2 | ||
4624 | # asm 1: movd <diag2=int6464#3,>in7=int64#6 | ||
4625 | # asm 2: movd <diag2=%xmm2,>in7=%r9 | ||
4626 | movd %xmm2,%r9 | ||
4627 | |||
4628 | # qhasm: in3 = diag3 | ||
4629 | # asm 1: movd <diag3=int6464#4,>in3=int64#7 | ||
4630 | # asm 2: movd <diag3=%xmm3,>in3=%rax | ||
4631 | movd %xmm3,%rax | ||
4632 | |||
4633 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 60) | ||
4634 | # asm 1: xorl 60(<m=int64#2),<in15=int64#4d | ||
4635 | # asm 2: xorl 60(<m=%rsi),<in15=%ecx | ||
4636 | xorl 60(%rsi),%ecx | ||
4637 | |||
4638 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 44) | ||
4639 | # asm 1: xorl 44(<m=int64#2),<in11=int64#5d | ||
4640 | # asm 2: xorl 44(<m=%rsi),<in11=%r8d | ||
4641 | xorl 44(%rsi),%r8d | ||
4642 | |||
4643 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 28) | ||
4644 | # asm 1: xorl 28(<m=int64#2),<in7=int64#6d | ||
4645 | # asm 2: xorl 28(<m=%rsi),<in7=%r9d | ||
4646 | xorl 28(%rsi),%r9d | ||
4647 | |||
4648 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 12) | ||
4649 | # asm 1: xorl 12(<m=int64#2),<in3=int64#7d | ||
4650 | # asm 2: xorl 12(<m=%rsi),<in3=%eax | ||
4651 | xorl 12(%rsi),%eax | ||
4652 | |||
4653 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
4654 | # asm 1: movl <in15=int64#4d,60(<out=int64#1) | ||
4655 | # asm 2: movl <in15=%ecx,60(<out=%rdi) | ||
4656 | movl %ecx,60(%rdi) | ||
4657 | |||
4658 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
4659 | # asm 1: movl <in11=int64#5d,44(<out=int64#1) | ||
4660 | # asm 2: movl <in11=%r8d,44(<out=%rdi) | ||
4661 | movl %r8d,44(%rdi) | ||
4662 | |||
4663 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
4664 | # asm 1: movl <in7=int64#6d,28(<out=int64#1) | ||
4665 | # asm 2: movl <in7=%r9d,28(<out=%rdi) | ||
4666 | movl %r9d,28(%rdi) | ||
4667 | |||
4668 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
4669 | # asm 1: movl <in3=int64#7d,12(<out=int64#1) | ||
4670 | # asm 2: movl <in3=%eax,12(<out=%rdi) | ||
4671 | movl %eax,12(%rdi) | ||
4672 | |||
4673 | # qhasm: bytes = bytes_backup | ||
4674 | # asm 1: movq <bytes_backup=stack64#8,>bytes=int64#6 | ||
4675 | # asm 2: movq <bytes_backup=408(%rsp),>bytes=%r9 | ||
4676 | movq 408(%rsp),%r9 | ||
4677 | |||
4678 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
4679 | # asm 1: movl <x2=stack128#2,>in8=int64#4d | ||
4680 | # asm 2: movl <x2=16(%rsp),>in8=%ecx | ||
4681 | movl 16(%rsp),%ecx | ||
4682 | |||
4683 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
4684 | # asm 1: movl 4+<x3=stack128#3,>in9=int64#5d | ||
4685 | # asm 2: movl 4+<x3=32(%rsp),>in9=%r8d | ||
4686 | movl 4+32(%rsp),%r8d | ||
4687 | |||
4688 | # qhasm: in8 += 1 | ||
4689 | # asm 1: add $1,<in8=int64#4 | ||
4690 | # asm 2: add $1,<in8=%rcx | ||
4691 | add $1,%rcx | ||
4692 | |||
4693 | # qhasm: in9 <<= 32 | ||
4694 | # asm 1: shl $32,<in9=int64#5 | ||
4695 | # asm 2: shl $32,<in9=%r8 | ||
4696 | shl $32,%r8 | ||
4697 | |||
4698 | # qhasm: in8 += in9 | ||
4699 | # asm 1: add <in9=int64#5,<in8=int64#4 | ||
4700 | # asm 2: add <in9=%r8,<in8=%rcx | ||
4701 | add %r8,%rcx | ||
4702 | |||
4703 | # qhasm: in9 = in8 | ||
4704 | # asm 1: mov <in8=int64#4,>in9=int64#5 | ||
4705 | # asm 2: mov <in8=%rcx,>in9=%r8 | ||
4706 | mov %rcx,%r8 | ||
4707 | |||
4708 | # qhasm: (uint64) in9 >>= 32 | ||
4709 | # asm 1: shr $32,<in9=int64#5 | ||
4710 | # asm 2: shr $32,<in9=%r8 | ||
4711 | shr $32,%r8 | ||
4712 | |||
4713 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
4714 | # asm 1: movl <in8=int64#4d,>x2=stack128#2 | ||
4715 | # asm 2: movl <in8=%ecx,>x2=16(%rsp) | ||
4716 | movl %ecx,16(%rsp) | ||
4717 | |||
4718 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
4719 | # asm 1: movl <in9=int64#5d,4+<x3=stack128#3 | ||
4720 | # asm 2: movl <in9=%r8d,4+<x3=32(%rsp) | ||
4721 | movl %r8d,4+32(%rsp) | ||
4722 | |||
4723 | # qhasm: unsigned>? unsigned<? bytes - 64 | ||
4724 | # asm 1: cmp $64,<bytes=int64#6 | ||
4725 | # asm 2: cmp $64,<bytes=%r9 | ||
4726 | cmp $64,%r9 | ||
4727 | # comment:fp stack unchanged by jump | ||
4728 | |||
4729 | # qhasm: goto bytesatleast65 if unsigned> | ||
4730 | ja ._bytesatleast65 | ||
4731 | # comment:fp stack unchanged by jump | ||
4732 | |||
4733 | # qhasm: goto bytesatleast64 if !unsigned< | ||
4734 | jae ._bytesatleast64 | ||
4735 | |||
4736 | # qhasm: m = out | ||
4737 | # asm 1: mov <out=int64#1,>m=int64#2 | ||
4738 | # asm 2: mov <out=%rdi,>m=%rsi | ||
4739 | mov %rdi,%rsi | ||
4740 | |||
4741 | # qhasm: out = ctarget | ||
4742 | # asm 1: mov <ctarget=int64#3,>out=int64#1 | ||
4743 | # asm 2: mov <ctarget=%rdx,>out=%rdi | ||
4744 | mov %rdx,%rdi | ||
4745 | |||
4746 | # qhasm: i = bytes | ||
4747 | # asm 1: mov <bytes=int64#6,>i=int64#4 | ||
4748 | # asm 2: mov <bytes=%r9,>i=%rcx | ||
4749 | mov %r9,%rcx | ||
4750 | |||
4751 | # qhasm: while (i) { *out++ = *m++; --i } | ||
4752 | rep movsb | ||
4753 | # comment:fp stack unchanged by fallthrough | ||
4754 | |||
4755 | # qhasm: bytesatleast64: | ||
4756 | ._bytesatleast64: | ||
4757 | # comment:fp stack unchanged by fallthrough | ||
4758 | |||
4759 | # qhasm: done: | ||
4760 | ._done: | ||
4761 | |||
4762 | # qhasm: r11_caller = r11_stack | ||
4763 | # asm 1: movq <r11_stack=stack64#1,>r11_caller=int64#9 | ||
4764 | # asm 2: movq <r11_stack=352(%rsp),>r11_caller=%r11 | ||
4765 | movq 352(%rsp),%r11 | ||
4766 | |||
4767 | # qhasm: r12_caller = r12_stack | ||
4768 | # asm 1: movq <r12_stack=stack64#2,>r12_caller=int64#10 | ||
4769 | # asm 2: movq <r12_stack=360(%rsp),>r12_caller=%r12 | ||
4770 | movq 360(%rsp),%r12 | ||
4771 | |||
4772 | # qhasm: r13_caller = r13_stack | ||
4773 | # asm 1: movq <r13_stack=stack64#3,>r13_caller=int64#11 | ||
4774 | # asm 2: movq <r13_stack=368(%rsp),>r13_caller=%r13 | ||
4775 | movq 368(%rsp),%r13 | ||
4776 | |||
4777 | # qhasm: r14_caller = r14_stack | ||
4778 | # asm 1: movq <r14_stack=stack64#4,>r14_caller=int64#12 | ||
4779 | # asm 2: movq <r14_stack=376(%rsp),>r14_caller=%r14 | ||
4780 | movq 376(%rsp),%r14 | ||
4781 | |||
4782 | # qhasm: r15_caller = r15_stack | ||
4783 | # asm 1: movq <r15_stack=stack64#5,>r15_caller=int64#13 | ||
4784 | # asm 2: movq <r15_stack=384(%rsp),>r15_caller=%r15 | ||
4785 | movq 384(%rsp),%r15 | ||
4786 | |||
4787 | # qhasm: rbx_caller = rbx_stack | ||
4788 | # asm 1: movq <rbx_stack=stack64#6,>rbx_caller=int64#14 | ||
4789 | # asm 2: movq <rbx_stack=392(%rsp),>rbx_caller=%rbx | ||
4790 | movq 392(%rsp),%rbx | ||
4791 | |||
4792 | # qhasm: rbp_caller = rbp_stack | ||
4793 | # asm 1: movq <rbp_stack=stack64#7,>rbp_caller=int64#15 | ||
4794 | # asm 2: movq <rbp_stack=400(%rsp),>rbp_caller=%rbp | ||
4795 | movq 400(%rsp),%rbp | ||
4796 | |||
4797 | # qhasm: leave | ||
4798 | add %r11,%rsp | ||
4799 | xor %rax,%rax | ||
4800 | xor %rdx,%rdx | ||
4801 | ret | ||
4802 | |||
4803 | # qhasm: bytesatleast65: | ||
4804 | ._bytesatleast65: | ||
4805 | |||
4806 | # qhasm: bytes -= 64 | ||
4807 | # asm 1: sub $64,<bytes=int64#6 | ||
4808 | # asm 2: sub $64,<bytes=%r9 | ||
4809 | sub $64,%r9 | ||
4810 | |||
4811 | # qhasm: out += 64 | ||
4812 | # asm 1: add $64,<out=int64#1 | ||
4813 | # asm 2: add $64,<out=%rdi | ||
4814 | add $64,%rdi | ||
4815 | |||
4816 | # qhasm: m += 64 | ||
4817 | # asm 1: add $64,<m=int64#2 | ||
4818 | # asm 2: add $64,<m=%rsi | ||
4819 | add $64,%rsi | ||
4820 | # comment:fp stack unchanged by jump | ||
4821 | |||
4822 | # qhasm: goto bytesbetween1and255 | ||
4823 | jmp ._bytesbetween1and255 | ||
diff --git a/nacl/crypto_stream/salsa2012/checksum b/nacl/crypto_stream/salsa2012/checksum new file mode 100644 index 00000000..f801d9e3 --- /dev/null +++ b/nacl/crypto_stream/salsa2012/checksum | |||
@@ -0,0 +1 @@ | |||
ecc758f200061c3cc770b25797da73583548d4f90f69a967fbbe1a6d94d1705c | |||
diff --git a/nacl/crypto_stream/salsa2012/ref/api.h b/nacl/crypto_stream/salsa2012/ref/api.h new file mode 100644 index 00000000..c2b18461 --- /dev/null +++ b/nacl/crypto_stream/salsa2012/ref/api.h | |||
@@ -0,0 +1,2 @@ | |||
1 | #define CRYPTO_KEYBYTES 32 | ||
2 | #define CRYPTO_NONCEBYTES 8 | ||
diff --git a/nacl/crypto_stream/salsa2012/ref/implementors b/nacl/crypto_stream/salsa2012/ref/implementors new file mode 100644 index 00000000..f6fb3c73 --- /dev/null +++ b/nacl/crypto_stream/salsa2012/ref/implementors | |||
@@ -0,0 +1 @@ | |||
Daniel J. Bernstein | |||
diff --git a/nacl/crypto_stream/salsa2012/ref/stream.c b/nacl/crypto_stream/salsa2012/ref/stream.c new file mode 100644 index 00000000..86053337 --- /dev/null +++ b/nacl/crypto_stream/salsa2012/ref/stream.c | |||
@@ -0,0 +1,49 @@ | |||
1 | /* | ||
2 | version 20080913 | ||
3 | D. J. Bernstein | ||
4 | Public domain. | ||
5 | */ | ||
6 | |||
7 | #include "crypto_core_salsa2012.h" | ||
8 | #include "crypto_stream.h" | ||
9 | |||
10 | typedef unsigned int uint32; | ||
11 | |||
12 | static const unsigned char sigma[16] = "expand 32-byte k"; | ||
13 | |||
14 | int crypto_stream( | ||
15 | unsigned char *c,unsigned long long clen, | ||
16 | const unsigned char *n, | ||
17 | const unsigned char *k | ||
18 | ) | ||
19 | { | ||
20 | unsigned char in[16]; | ||
21 | unsigned char block[64]; | ||
22 | int i; | ||
23 | unsigned int u; | ||
24 | |||
25 | if (!clen) return 0; | ||
26 | |||
27 | for (i = 0;i < 8;++i) in[i] = n[i]; | ||
28 | for (i = 8;i < 16;++i) in[i] = 0; | ||
29 | |||
30 | while (clen >= 64) { | ||
31 | crypto_core_salsa2012(c,in,k,sigma); | ||
32 | |||
33 | u = 1; | ||
34 | for (i = 8;i < 16;++i) { | ||
35 | u += (unsigned int) in[i]; | ||
36 | in[i] = u; | ||
37 | u >>= 8; | ||
38 | } | ||
39 | |||
40 | clen -= 64; | ||
41 | c += 64; | ||
42 | } | ||
43 | |||
44 | if (clen) { | ||
45 | crypto_core_salsa2012(block,in,k,sigma); | ||
46 | for (i = 0;i < clen;++i) c[i] = block[i]; | ||
47 | } | ||
48 | return 0; | ||
49 | } | ||
diff --git a/nacl/crypto_stream/salsa2012/ref/xor.c b/nacl/crypto_stream/salsa2012/ref/xor.c new file mode 100644 index 00000000..90206426 --- /dev/null +++ b/nacl/crypto_stream/salsa2012/ref/xor.c | |||
@@ -0,0 +1,52 @@ | |||
1 | /* | ||
2 | version 20080913 | ||
3 | D. J. Bernstein | ||
4 | Public domain. | ||
5 | */ | ||
6 | |||
7 | #include "crypto_core_salsa2012.h" | ||
8 | #include "crypto_stream.h" | ||
9 | |||
10 | typedef unsigned int uint32; | ||
11 | |||
12 | static const unsigned char sigma[16] = "expand 32-byte k"; | ||
13 | |||
14 | int crypto_stream_xor( | ||
15 | unsigned char *c, | ||
16 | const unsigned char *m,unsigned long long mlen, | ||
17 | const unsigned char *n, | ||
18 | const unsigned char *k | ||
19 | ) | ||
20 | { | ||
21 | unsigned char in[16]; | ||
22 | unsigned char block[64]; | ||
23 | int i; | ||
24 | unsigned int u; | ||
25 | |||
26 | if (!mlen) return 0; | ||
27 | |||
28 | for (i = 0;i < 8;++i) in[i] = n[i]; | ||
29 | for (i = 8;i < 16;++i) in[i] = 0; | ||
30 | |||
31 | while (mlen >= 64) { | ||
32 | crypto_core_salsa2012(block,in,k,sigma); | ||
33 | for (i = 0;i < 64;++i) c[i] = m[i] ^ block[i]; | ||
34 | |||
35 | u = 1; | ||
36 | for (i = 8;i < 16;++i) { | ||
37 | u += (unsigned int) in[i]; | ||
38 | in[i] = u; | ||
39 | u >>= 8; | ||
40 | } | ||
41 | |||
42 | mlen -= 64; | ||
43 | c += 64; | ||
44 | m += 64; | ||
45 | } | ||
46 | |||
47 | if (mlen) { | ||
48 | crypto_core_salsa2012(block,in,k,sigma); | ||
49 | for (i = 0;i < mlen;++i) c[i] = m[i] ^ block[i]; | ||
50 | } | ||
51 | return 0; | ||
52 | } | ||
diff --git a/nacl/crypto_stream/salsa2012/used b/nacl/crypto_stream/salsa2012/used new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/nacl/crypto_stream/salsa2012/used | |||
diff --git a/nacl/crypto_stream/salsa2012/x86_xmm5/api.h b/nacl/crypto_stream/salsa2012/x86_xmm5/api.h new file mode 100644 index 00000000..c2b18461 --- /dev/null +++ b/nacl/crypto_stream/salsa2012/x86_xmm5/api.h | |||
@@ -0,0 +1,2 @@ | |||
1 | #define CRYPTO_KEYBYTES 32 | ||
2 | #define CRYPTO_NONCEBYTES 8 | ||
diff --git a/nacl/crypto_stream/salsa2012/x86_xmm5/implementors b/nacl/crypto_stream/salsa2012/x86_xmm5/implementors new file mode 100644 index 00000000..f6fb3c73 --- /dev/null +++ b/nacl/crypto_stream/salsa2012/x86_xmm5/implementors | |||
@@ -0,0 +1 @@ | |||
Daniel J. Bernstein | |||
diff --git a/nacl/crypto_stream/salsa2012/x86_xmm5/stream.s b/nacl/crypto_stream/salsa2012/x86_xmm5/stream.s new file mode 100644 index 00000000..c511b0d3 --- /dev/null +++ b/nacl/crypto_stream/salsa2012/x86_xmm5/stream.s | |||
@@ -0,0 +1,5078 @@ | |||
1 | |||
2 | # qhasm: int32 a | ||
3 | |||
4 | # qhasm: stack32 arg1 | ||
5 | |||
6 | # qhasm: stack32 arg2 | ||
7 | |||
8 | # qhasm: stack32 arg3 | ||
9 | |||
10 | # qhasm: stack32 arg4 | ||
11 | |||
12 | # qhasm: stack32 arg5 | ||
13 | |||
14 | # qhasm: stack32 arg6 | ||
15 | |||
16 | # qhasm: input arg1 | ||
17 | |||
18 | # qhasm: input arg2 | ||
19 | |||
20 | # qhasm: input arg3 | ||
21 | |||
22 | # qhasm: input arg4 | ||
23 | |||
24 | # qhasm: input arg5 | ||
25 | |||
26 | # qhasm: input arg6 | ||
27 | |||
28 | # qhasm: int32 eax | ||
29 | |||
30 | # qhasm: int32 ebx | ||
31 | |||
32 | # qhasm: int32 esi | ||
33 | |||
34 | # qhasm: int32 edi | ||
35 | |||
36 | # qhasm: int32 ebp | ||
37 | |||
38 | # qhasm: caller eax | ||
39 | |||
40 | # qhasm: caller ebx | ||
41 | |||
42 | # qhasm: caller esi | ||
43 | |||
44 | # qhasm: caller edi | ||
45 | |||
46 | # qhasm: caller ebp | ||
47 | |||
48 | # qhasm: int32 k | ||
49 | |||
50 | # qhasm: int32 kbits | ||
51 | |||
52 | # qhasm: int32 iv | ||
53 | |||
54 | # qhasm: int32 i | ||
55 | |||
56 | # qhasm: stack128 x0 | ||
57 | |||
58 | # qhasm: stack128 x1 | ||
59 | |||
60 | # qhasm: stack128 x2 | ||
61 | |||
62 | # qhasm: stack128 x3 | ||
63 | |||
64 | # qhasm: int32 m | ||
65 | |||
66 | # qhasm: stack32 out_stack | ||
67 | |||
68 | # qhasm: int32 out | ||
69 | |||
70 | # qhasm: stack32 bytes_stack | ||
71 | |||
72 | # qhasm: int32 bytes | ||
73 | |||
74 | # qhasm: stack32 eax_stack | ||
75 | |||
76 | # qhasm: stack32 ebx_stack | ||
77 | |||
78 | # qhasm: stack32 esi_stack | ||
79 | |||
80 | # qhasm: stack32 edi_stack | ||
81 | |||
82 | # qhasm: stack32 ebp_stack | ||
83 | |||
84 | # qhasm: int6464 diag0 | ||
85 | |||
86 | # qhasm: int6464 diag1 | ||
87 | |||
88 | # qhasm: int6464 diag2 | ||
89 | |||
90 | # qhasm: int6464 diag3 | ||
91 | |||
92 | # qhasm: int6464 a0 | ||
93 | |||
94 | # qhasm: int6464 a1 | ||
95 | |||
96 | # qhasm: int6464 a2 | ||
97 | |||
98 | # qhasm: int6464 a3 | ||
99 | |||
100 | # qhasm: int6464 a4 | ||
101 | |||
102 | # qhasm: int6464 a5 | ||
103 | |||
104 | # qhasm: int6464 a6 | ||
105 | |||
106 | # qhasm: int6464 a7 | ||
107 | |||
108 | # qhasm: int6464 b0 | ||
109 | |||
110 | # qhasm: int6464 b1 | ||
111 | |||
112 | # qhasm: int6464 b2 | ||
113 | |||
114 | # qhasm: int6464 b3 | ||
115 | |||
116 | # qhasm: int6464 b4 | ||
117 | |||
118 | # qhasm: int6464 b5 | ||
119 | |||
120 | # qhasm: int6464 b6 | ||
121 | |||
122 | # qhasm: int6464 b7 | ||
123 | |||
124 | # qhasm: int6464 z0 | ||
125 | |||
126 | # qhasm: int6464 z1 | ||
127 | |||
128 | # qhasm: int6464 z2 | ||
129 | |||
130 | # qhasm: int6464 z3 | ||
131 | |||
132 | # qhasm: int6464 z4 | ||
133 | |||
134 | # qhasm: int6464 z5 | ||
135 | |||
136 | # qhasm: int6464 z6 | ||
137 | |||
138 | # qhasm: int6464 z7 | ||
139 | |||
140 | # qhasm: int6464 z8 | ||
141 | |||
142 | # qhasm: int6464 z9 | ||
143 | |||
144 | # qhasm: int6464 z10 | ||
145 | |||
146 | # qhasm: int6464 z11 | ||
147 | |||
148 | # qhasm: int6464 z12 | ||
149 | |||
150 | # qhasm: int6464 z13 | ||
151 | |||
152 | # qhasm: int6464 z14 | ||
153 | |||
154 | # qhasm: int6464 z15 | ||
155 | |||
156 | # qhasm: stack128 z0_stack | ||
157 | |||
158 | # qhasm: stack128 z1_stack | ||
159 | |||
160 | # qhasm: stack128 z2_stack | ||
161 | |||
162 | # qhasm: stack128 z3_stack | ||
163 | |||
164 | # qhasm: stack128 z4_stack | ||
165 | |||
166 | # qhasm: stack128 z5_stack | ||
167 | |||
168 | # qhasm: stack128 z6_stack | ||
169 | |||
170 | # qhasm: stack128 z7_stack | ||
171 | |||
172 | # qhasm: stack128 z8_stack | ||
173 | |||
174 | # qhasm: stack128 z9_stack | ||
175 | |||
176 | # qhasm: stack128 z10_stack | ||
177 | |||
178 | # qhasm: stack128 z11_stack | ||
179 | |||
180 | # qhasm: stack128 z12_stack | ||
181 | |||
182 | # qhasm: stack128 z13_stack | ||
183 | |||
184 | # qhasm: stack128 z14_stack | ||
185 | |||
186 | # qhasm: stack128 z15_stack | ||
187 | |||
188 | # qhasm: stack128 orig0 | ||
189 | |||
190 | # qhasm: stack128 orig1 | ||
191 | |||
192 | # qhasm: stack128 orig2 | ||
193 | |||
194 | # qhasm: stack128 orig3 | ||
195 | |||
196 | # qhasm: stack128 orig4 | ||
197 | |||
198 | # qhasm: stack128 orig5 | ||
199 | |||
200 | # qhasm: stack128 orig6 | ||
201 | |||
202 | # qhasm: stack128 orig7 | ||
203 | |||
204 | # qhasm: stack128 orig8 | ||
205 | |||
206 | # qhasm: stack128 orig9 | ||
207 | |||
208 | # qhasm: stack128 orig10 | ||
209 | |||
210 | # qhasm: stack128 orig11 | ||
211 | |||
212 | # qhasm: stack128 orig12 | ||
213 | |||
214 | # qhasm: stack128 orig13 | ||
215 | |||
216 | # qhasm: stack128 orig14 | ||
217 | |||
218 | # qhasm: stack128 orig15 | ||
219 | |||
220 | # qhasm: int6464 p | ||
221 | |||
222 | # qhasm: int6464 q | ||
223 | |||
224 | # qhasm: int6464 r | ||
225 | |||
226 | # qhasm: int6464 s | ||
227 | |||
228 | # qhasm: int6464 t | ||
229 | |||
230 | # qhasm: int6464 u | ||
231 | |||
232 | # qhasm: int6464 v | ||
233 | |||
234 | # qhasm: int6464 w | ||
235 | |||
236 | # qhasm: int6464 mp | ||
237 | |||
238 | # qhasm: int6464 mq | ||
239 | |||
240 | # qhasm: int6464 mr | ||
241 | |||
242 | # qhasm: int6464 ms | ||
243 | |||
244 | # qhasm: int6464 mt | ||
245 | |||
246 | # qhasm: int6464 mu | ||
247 | |||
248 | # qhasm: int6464 mv | ||
249 | |||
250 | # qhasm: int6464 mw | ||
251 | |||
252 | # qhasm: int32 in0 | ||
253 | |||
254 | # qhasm: int32 in1 | ||
255 | |||
256 | # qhasm: int32 in2 | ||
257 | |||
258 | # qhasm: int32 in3 | ||
259 | |||
260 | # qhasm: int32 in4 | ||
261 | |||
262 | # qhasm: int32 in5 | ||
263 | |||
264 | # qhasm: int32 in6 | ||
265 | |||
266 | # qhasm: int32 in7 | ||
267 | |||
268 | # qhasm: int32 in8 | ||
269 | |||
270 | # qhasm: int32 in9 | ||
271 | |||
272 | # qhasm: int32 in10 | ||
273 | |||
274 | # qhasm: int32 in11 | ||
275 | |||
276 | # qhasm: int32 in12 | ||
277 | |||
278 | # qhasm: int32 in13 | ||
279 | |||
280 | # qhasm: int32 in14 | ||
281 | |||
282 | # qhasm: int32 in15 | ||
283 | |||
284 | # qhasm: stack512 tmp | ||
285 | |||
286 | # qhasm: stack32 ctarget | ||
287 | |||
288 | # qhasm: enter crypto_stream_salsa2012_x86_xmm5 | ||
289 | .text | ||
290 | .p2align 5 | ||
291 | .globl _crypto_stream_salsa2012_x86_xmm5 | ||
292 | .globl crypto_stream_salsa2012_x86_xmm5 | ||
293 | _crypto_stream_salsa2012_x86_xmm5: | ||
294 | crypto_stream_salsa2012_x86_xmm5: | ||
295 | mov %esp,%eax | ||
296 | and $31,%eax | ||
297 | add $704,%eax | ||
298 | sub %eax,%esp | ||
299 | |||
300 | # qhasm: eax_stack = eax | ||
301 | # asm 1: movl <eax=int32#1,>eax_stack=stack32#1 | ||
302 | # asm 2: movl <eax=%eax,>eax_stack=0(%esp) | ||
303 | movl %eax,0(%esp) | ||
304 | |||
305 | # qhasm: ebx_stack = ebx | ||
306 | # asm 1: movl <ebx=int32#4,>ebx_stack=stack32#2 | ||
307 | # asm 2: movl <ebx=%ebx,>ebx_stack=4(%esp) | ||
308 | movl %ebx,4(%esp) | ||
309 | |||
310 | # qhasm: esi_stack = esi | ||
311 | # asm 1: movl <esi=int32#5,>esi_stack=stack32#3 | ||
312 | # asm 2: movl <esi=%esi,>esi_stack=8(%esp) | ||
313 | movl %esi,8(%esp) | ||
314 | |||
315 | # qhasm: edi_stack = edi | ||
316 | # asm 1: movl <edi=int32#6,>edi_stack=stack32#4 | ||
317 | # asm 2: movl <edi=%edi,>edi_stack=12(%esp) | ||
318 | movl %edi,12(%esp) | ||
319 | |||
320 | # qhasm: ebp_stack = ebp | ||
321 | # asm 1: movl <ebp=int32#7,>ebp_stack=stack32#5 | ||
322 | # asm 2: movl <ebp=%ebp,>ebp_stack=16(%esp) | ||
323 | movl %ebp,16(%esp) | ||
324 | |||
325 | # qhasm: bytes = arg2 | ||
326 | # asm 1: movl <arg2=stack32#-2,>bytes=int32#3 | ||
327 | # asm 2: movl <arg2=8(%esp,%eax),>bytes=%edx | ||
328 | movl 8(%esp,%eax),%edx | ||
329 | |||
330 | # qhasm: out = arg1 | ||
331 | # asm 1: movl <arg1=stack32#-1,>out=int32#6 | ||
332 | # asm 2: movl <arg1=4(%esp,%eax),>out=%edi | ||
333 | movl 4(%esp,%eax),%edi | ||
334 | |||
335 | # qhasm: m = out | ||
336 | # asm 1: mov <out=int32#6,>m=int32#5 | ||
337 | # asm 2: mov <out=%edi,>m=%esi | ||
338 | mov %edi,%esi | ||
339 | |||
340 | # qhasm: iv = arg4 | ||
341 | # asm 1: movl <arg4=stack32#-4,>iv=int32#4 | ||
342 | # asm 2: movl <arg4=16(%esp,%eax),>iv=%ebx | ||
343 | movl 16(%esp,%eax),%ebx | ||
344 | |||
345 | # qhasm: k = arg5 | ||
346 | # asm 1: movl <arg5=stack32#-5,>k=int32#7 | ||
347 | # asm 2: movl <arg5=20(%esp,%eax),>k=%ebp | ||
348 | movl 20(%esp,%eax),%ebp | ||
349 | |||
350 | # qhasm: unsigned>? bytes - 0 | ||
351 | # asm 1: cmp $0,<bytes=int32#3 | ||
352 | # asm 2: cmp $0,<bytes=%edx | ||
353 | cmp $0,%edx | ||
354 | # comment:fp stack unchanged by jump | ||
355 | |||
356 | # qhasm: goto done if !unsigned> | ||
357 | jbe ._done | ||
358 | |||
359 | # qhasm: a = 0 | ||
360 | # asm 1: mov $0,>a=int32#1 | ||
361 | # asm 2: mov $0,>a=%eax | ||
362 | mov $0,%eax | ||
363 | |||
364 | # qhasm: i = bytes | ||
365 | # asm 1: mov <bytes=int32#3,>i=int32#2 | ||
366 | # asm 2: mov <bytes=%edx,>i=%ecx | ||
367 | mov %edx,%ecx | ||
368 | |||
369 | # qhasm: while (i) { *out++ = a; --i } | ||
370 | rep stosb | ||
371 | |||
372 | # qhasm: out -= bytes | ||
373 | # asm 1: subl <bytes=int32#3,<out=int32#6 | ||
374 | # asm 2: subl <bytes=%edx,<out=%edi | ||
375 | subl %edx,%edi | ||
376 | # comment:fp stack unchanged by jump | ||
377 | |||
378 | # qhasm: goto start | ||
379 | jmp ._start | ||
380 | |||
381 | # qhasm: enter crypto_stream_salsa2012_x86_xmm5_xor | ||
382 | .text | ||
383 | .p2align 5 | ||
384 | .globl _crypto_stream_salsa2012_x86_xmm5_xor | ||
385 | .globl crypto_stream_salsa2012_x86_xmm5_xor | ||
386 | _crypto_stream_salsa2012_x86_xmm5_xor: | ||
387 | crypto_stream_salsa2012_x86_xmm5_xor: | ||
388 | mov %esp,%eax | ||
389 | and $31,%eax | ||
390 | add $704,%eax | ||
391 | sub %eax,%esp | ||
392 | |||
393 | # qhasm: eax_stack = eax | ||
394 | # asm 1: movl <eax=int32#1,>eax_stack=stack32#1 | ||
395 | # asm 2: movl <eax=%eax,>eax_stack=0(%esp) | ||
396 | movl %eax,0(%esp) | ||
397 | |||
398 | # qhasm: ebx_stack = ebx | ||
399 | # asm 1: movl <ebx=int32#4,>ebx_stack=stack32#2 | ||
400 | # asm 2: movl <ebx=%ebx,>ebx_stack=4(%esp) | ||
401 | movl %ebx,4(%esp) | ||
402 | |||
403 | # qhasm: esi_stack = esi | ||
404 | # asm 1: movl <esi=int32#5,>esi_stack=stack32#3 | ||
405 | # asm 2: movl <esi=%esi,>esi_stack=8(%esp) | ||
406 | movl %esi,8(%esp) | ||
407 | |||
408 | # qhasm: edi_stack = edi | ||
409 | # asm 1: movl <edi=int32#6,>edi_stack=stack32#4 | ||
410 | # asm 2: movl <edi=%edi,>edi_stack=12(%esp) | ||
411 | movl %edi,12(%esp) | ||
412 | |||
413 | # qhasm: ebp_stack = ebp | ||
414 | # asm 1: movl <ebp=int32#7,>ebp_stack=stack32#5 | ||
415 | # asm 2: movl <ebp=%ebp,>ebp_stack=16(%esp) | ||
416 | movl %ebp,16(%esp) | ||
417 | |||
418 | # qhasm: out = arg1 | ||
419 | # asm 1: movl <arg1=stack32#-1,>out=int32#6 | ||
420 | # asm 2: movl <arg1=4(%esp,%eax),>out=%edi | ||
421 | movl 4(%esp,%eax),%edi | ||
422 | |||
423 | # qhasm: m = arg2 | ||
424 | # asm 1: movl <arg2=stack32#-2,>m=int32#5 | ||
425 | # asm 2: movl <arg2=8(%esp,%eax),>m=%esi | ||
426 | movl 8(%esp,%eax),%esi | ||
427 | |||
428 | # qhasm: bytes = arg3 | ||
429 | # asm 1: movl <arg3=stack32#-3,>bytes=int32#3 | ||
430 | # asm 2: movl <arg3=12(%esp,%eax),>bytes=%edx | ||
431 | movl 12(%esp,%eax),%edx | ||
432 | |||
433 | # qhasm: iv = arg5 | ||
434 | # asm 1: movl <arg5=stack32#-5,>iv=int32#4 | ||
435 | # asm 2: movl <arg5=20(%esp,%eax),>iv=%ebx | ||
436 | movl 20(%esp,%eax),%ebx | ||
437 | |||
438 | # qhasm: k = arg6 | ||
439 | # asm 1: movl <arg6=stack32#-6,>k=int32#7 | ||
440 | # asm 2: movl <arg6=24(%esp,%eax),>k=%ebp | ||
441 | movl 24(%esp,%eax),%ebp | ||
442 | |||
443 | # qhasm: unsigned>? bytes - 0 | ||
444 | # asm 1: cmp $0,<bytes=int32#3 | ||
445 | # asm 2: cmp $0,<bytes=%edx | ||
446 | cmp $0,%edx | ||
447 | # comment:fp stack unchanged by jump | ||
448 | |||
449 | # qhasm: goto done if !unsigned> | ||
450 | jbe ._done | ||
451 | # comment:fp stack unchanged by fallthrough | ||
452 | |||
453 | # qhasm: start: | ||
454 | ._start: | ||
455 | |||
456 | # qhasm: out_stack = out | ||
457 | # asm 1: movl <out=int32#6,>out_stack=stack32#6 | ||
458 | # asm 2: movl <out=%edi,>out_stack=20(%esp) | ||
459 | movl %edi,20(%esp) | ||
460 | |||
461 | # qhasm: bytes_stack = bytes | ||
462 | # asm 1: movl <bytes=int32#3,>bytes_stack=stack32#7 | ||
463 | # asm 2: movl <bytes=%edx,>bytes_stack=24(%esp) | ||
464 | movl %edx,24(%esp) | ||
465 | |||
466 | # qhasm: in4 = *(uint32 *) (k + 12) | ||
467 | # asm 1: movl 12(<k=int32#7),>in4=int32#1 | ||
468 | # asm 2: movl 12(<k=%ebp),>in4=%eax | ||
469 | movl 12(%ebp),%eax | ||
470 | |||
471 | # qhasm: in12 = *(uint32 *) (k + 20) | ||
472 | # asm 1: movl 20(<k=int32#7),>in12=int32#2 | ||
473 | # asm 2: movl 20(<k=%ebp),>in12=%ecx | ||
474 | movl 20(%ebp),%ecx | ||
475 | |||
476 | # qhasm: ((uint32 *)&x3)[0] = in4 | ||
477 | # asm 1: movl <in4=int32#1,>x3=stack128#1 | ||
478 | # asm 2: movl <in4=%eax,>x3=32(%esp) | ||
479 | movl %eax,32(%esp) | ||
480 | |||
481 | # qhasm: ((uint32 *)&x1)[0] = in12 | ||
482 | # asm 1: movl <in12=int32#2,>x1=stack128#2 | ||
483 | # asm 2: movl <in12=%ecx,>x1=48(%esp) | ||
484 | movl %ecx,48(%esp) | ||
485 | |||
486 | # qhasm: in0 = 1634760805 | ||
487 | # asm 1: mov $1634760805,>in0=int32#1 | ||
488 | # asm 2: mov $1634760805,>in0=%eax | ||
489 | mov $1634760805,%eax | ||
490 | |||
491 | # qhasm: in8 = 0 | ||
492 | # asm 1: mov $0,>in8=int32#2 | ||
493 | # asm 2: mov $0,>in8=%ecx | ||
494 | mov $0,%ecx | ||
495 | |||
496 | # qhasm: ((uint32 *)&x0)[0] = in0 | ||
497 | # asm 1: movl <in0=int32#1,>x0=stack128#3 | ||
498 | # asm 2: movl <in0=%eax,>x0=64(%esp) | ||
499 | movl %eax,64(%esp) | ||
500 | |||
501 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
502 | # asm 1: movl <in8=int32#2,>x2=stack128#4 | ||
503 | # asm 2: movl <in8=%ecx,>x2=80(%esp) | ||
504 | movl %ecx,80(%esp) | ||
505 | |||
506 | # qhasm: in6 = *(uint32 *) (iv + 0) | ||
507 | # asm 1: movl 0(<iv=int32#4),>in6=int32#1 | ||
508 | # asm 2: movl 0(<iv=%ebx),>in6=%eax | ||
509 | movl 0(%ebx),%eax | ||
510 | |||
511 | # qhasm: in7 = *(uint32 *) (iv + 4) | ||
512 | # asm 1: movl 4(<iv=int32#4),>in7=int32#2 | ||
513 | # asm 2: movl 4(<iv=%ebx),>in7=%ecx | ||
514 | movl 4(%ebx),%ecx | ||
515 | |||
516 | # qhasm: ((uint32 *)&x1)[2] = in6 | ||
517 | # asm 1: movl <in6=int32#1,8+<x1=stack128#2 | ||
518 | # asm 2: movl <in6=%eax,8+<x1=48(%esp) | ||
519 | movl %eax,8+48(%esp) | ||
520 | |||
521 | # qhasm: ((uint32 *)&x2)[3] = in7 | ||
522 | # asm 1: movl <in7=int32#2,12+<x2=stack128#4 | ||
523 | # asm 2: movl <in7=%ecx,12+<x2=80(%esp) | ||
524 | movl %ecx,12+80(%esp) | ||
525 | |||
526 | # qhasm: in9 = 0 | ||
527 | # asm 1: mov $0,>in9=int32#1 | ||
528 | # asm 2: mov $0,>in9=%eax | ||
529 | mov $0,%eax | ||
530 | |||
531 | # qhasm: in10 = 2036477234 | ||
532 | # asm 1: mov $2036477234,>in10=int32#2 | ||
533 | # asm 2: mov $2036477234,>in10=%ecx | ||
534 | mov $2036477234,%ecx | ||
535 | |||
536 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
537 | # asm 1: movl <in9=int32#1,4+<x3=stack128#1 | ||
538 | # asm 2: movl <in9=%eax,4+<x3=32(%esp) | ||
539 | movl %eax,4+32(%esp) | ||
540 | |||
541 | # qhasm: ((uint32 *)&x0)[2] = in10 | ||
542 | # asm 1: movl <in10=int32#2,8+<x0=stack128#3 | ||
543 | # asm 2: movl <in10=%ecx,8+<x0=64(%esp) | ||
544 | movl %ecx,8+64(%esp) | ||
545 | |||
546 | # qhasm: in1 = *(uint32 *) (k + 0) | ||
547 | # asm 1: movl 0(<k=int32#7),>in1=int32#1 | ||
548 | # asm 2: movl 0(<k=%ebp),>in1=%eax | ||
549 | movl 0(%ebp),%eax | ||
550 | |||
551 | # qhasm: in2 = *(uint32 *) (k + 4) | ||
552 | # asm 1: movl 4(<k=int32#7),>in2=int32#2 | ||
553 | # asm 2: movl 4(<k=%ebp),>in2=%ecx | ||
554 | movl 4(%ebp),%ecx | ||
555 | |||
556 | # qhasm: in3 = *(uint32 *) (k + 8) | ||
557 | # asm 1: movl 8(<k=int32#7),>in3=int32#3 | ||
558 | # asm 2: movl 8(<k=%ebp),>in3=%edx | ||
559 | movl 8(%ebp),%edx | ||
560 | |||
561 | # qhasm: in5 = 857760878 | ||
562 | # asm 1: mov $857760878,>in5=int32#4 | ||
563 | # asm 2: mov $857760878,>in5=%ebx | ||
564 | mov $857760878,%ebx | ||
565 | |||
566 | # qhasm: ((uint32 *)&x1)[1] = in1 | ||
567 | # asm 1: movl <in1=int32#1,4+<x1=stack128#2 | ||
568 | # asm 2: movl <in1=%eax,4+<x1=48(%esp) | ||
569 | movl %eax,4+48(%esp) | ||
570 | |||
571 | # qhasm: ((uint32 *)&x2)[2] = in2 | ||
572 | # asm 1: movl <in2=int32#2,8+<x2=stack128#4 | ||
573 | # asm 2: movl <in2=%ecx,8+<x2=80(%esp) | ||
574 | movl %ecx,8+80(%esp) | ||
575 | |||
576 | # qhasm: ((uint32 *)&x3)[3] = in3 | ||
577 | # asm 1: movl <in3=int32#3,12+<x3=stack128#1 | ||
578 | # asm 2: movl <in3=%edx,12+<x3=32(%esp) | ||
579 | movl %edx,12+32(%esp) | ||
580 | |||
581 | # qhasm: ((uint32 *)&x0)[1] = in5 | ||
582 | # asm 1: movl <in5=int32#4,4+<x0=stack128#3 | ||
583 | # asm 2: movl <in5=%ebx,4+<x0=64(%esp) | ||
584 | movl %ebx,4+64(%esp) | ||
585 | |||
586 | # qhasm: in11 = *(uint32 *) (k + 16) | ||
587 | # asm 1: movl 16(<k=int32#7),>in11=int32#1 | ||
588 | # asm 2: movl 16(<k=%ebp),>in11=%eax | ||
589 | movl 16(%ebp),%eax | ||
590 | |||
591 | # qhasm: in13 = *(uint32 *) (k + 24) | ||
592 | # asm 1: movl 24(<k=int32#7),>in13=int32#2 | ||
593 | # asm 2: movl 24(<k=%ebp),>in13=%ecx | ||
594 | movl 24(%ebp),%ecx | ||
595 | |||
596 | # qhasm: in14 = *(uint32 *) (k + 28) | ||
597 | # asm 1: movl 28(<k=int32#7),>in14=int32#3 | ||
598 | # asm 2: movl 28(<k=%ebp),>in14=%edx | ||
599 | movl 28(%ebp),%edx | ||
600 | |||
601 | # qhasm: in15 = 1797285236 | ||
602 | # asm 1: mov $1797285236,>in15=int32#4 | ||
603 | # asm 2: mov $1797285236,>in15=%ebx | ||
604 | mov $1797285236,%ebx | ||
605 | |||
606 | # qhasm: ((uint32 *)&x1)[3] = in11 | ||
607 | # asm 1: movl <in11=int32#1,12+<x1=stack128#2 | ||
608 | # asm 2: movl <in11=%eax,12+<x1=48(%esp) | ||
609 | movl %eax,12+48(%esp) | ||
610 | |||
611 | # qhasm: ((uint32 *)&x2)[1] = in13 | ||
612 | # asm 1: movl <in13=int32#2,4+<x2=stack128#4 | ||
613 | # asm 2: movl <in13=%ecx,4+<x2=80(%esp) | ||
614 | movl %ecx,4+80(%esp) | ||
615 | |||
616 | # qhasm: ((uint32 *)&x3)[2] = in14 | ||
617 | # asm 1: movl <in14=int32#3,8+<x3=stack128#1 | ||
618 | # asm 2: movl <in14=%edx,8+<x3=32(%esp) | ||
619 | movl %edx,8+32(%esp) | ||
620 | |||
621 | # qhasm: ((uint32 *)&x0)[3] = in15 | ||
622 | # asm 1: movl <in15=int32#4,12+<x0=stack128#3 | ||
623 | # asm 2: movl <in15=%ebx,12+<x0=64(%esp) | ||
624 | movl %ebx,12+64(%esp) | ||
625 | |||
626 | # qhasm: bytes = bytes_stack | ||
627 | # asm 1: movl <bytes_stack=stack32#7,>bytes=int32#1 | ||
628 | # asm 2: movl <bytes_stack=24(%esp),>bytes=%eax | ||
629 | movl 24(%esp),%eax | ||
630 | |||
631 | # qhasm: unsigned<? bytes - 256 | ||
632 | # asm 1: cmp $256,<bytes=int32#1 | ||
633 | # asm 2: cmp $256,<bytes=%eax | ||
634 | cmp $256,%eax | ||
635 | # comment:fp stack unchanged by jump | ||
636 | |||
637 | # qhasm: goto bytesbetween1and255 if unsigned< | ||
638 | jb ._bytesbetween1and255 | ||
639 | |||
640 | # qhasm: z0 = x0 | ||
641 | # asm 1: movdqa <x0=stack128#3,>z0=int6464#1 | ||
642 | # asm 2: movdqa <x0=64(%esp),>z0=%xmm0 | ||
643 | movdqa 64(%esp),%xmm0 | ||
644 | |||
645 | # qhasm: z5 = z0[1,1,1,1] | ||
646 | # asm 1: pshufd $0x55,<z0=int6464#1,>z5=int6464#2 | ||
647 | # asm 2: pshufd $0x55,<z0=%xmm0,>z5=%xmm1 | ||
648 | pshufd $0x55,%xmm0,%xmm1 | ||
649 | |||
650 | # qhasm: z10 = z0[2,2,2,2] | ||
651 | # asm 1: pshufd $0xaa,<z0=int6464#1,>z10=int6464#3 | ||
652 | # asm 2: pshufd $0xaa,<z0=%xmm0,>z10=%xmm2 | ||
653 | pshufd $0xaa,%xmm0,%xmm2 | ||
654 | |||
655 | # qhasm: z15 = z0[3,3,3,3] | ||
656 | # asm 1: pshufd $0xff,<z0=int6464#1,>z15=int6464#4 | ||
657 | # asm 2: pshufd $0xff,<z0=%xmm0,>z15=%xmm3 | ||
658 | pshufd $0xff,%xmm0,%xmm3 | ||
659 | |||
660 | # qhasm: z0 = z0[0,0,0,0] | ||
661 | # asm 1: pshufd $0x00,<z0=int6464#1,>z0=int6464#1 | ||
662 | # asm 2: pshufd $0x00,<z0=%xmm0,>z0=%xmm0 | ||
663 | pshufd $0x00,%xmm0,%xmm0 | ||
664 | |||
665 | # qhasm: orig5 = z5 | ||
666 | # asm 1: movdqa <z5=int6464#2,>orig5=stack128#5 | ||
667 | # asm 2: movdqa <z5=%xmm1,>orig5=96(%esp) | ||
668 | movdqa %xmm1,96(%esp) | ||
669 | |||
670 | # qhasm: orig10 = z10 | ||
671 | # asm 1: movdqa <z10=int6464#3,>orig10=stack128#6 | ||
672 | # asm 2: movdqa <z10=%xmm2,>orig10=112(%esp) | ||
673 | movdqa %xmm2,112(%esp) | ||
674 | |||
675 | # qhasm: orig15 = z15 | ||
676 | # asm 1: movdqa <z15=int6464#4,>orig15=stack128#7 | ||
677 | # asm 2: movdqa <z15=%xmm3,>orig15=128(%esp) | ||
678 | movdqa %xmm3,128(%esp) | ||
679 | |||
680 | # qhasm: orig0 = z0 | ||
681 | # asm 1: movdqa <z0=int6464#1,>orig0=stack128#8 | ||
682 | # asm 2: movdqa <z0=%xmm0,>orig0=144(%esp) | ||
683 | movdqa %xmm0,144(%esp) | ||
684 | |||
685 | # qhasm: z1 = x1 | ||
686 | # asm 1: movdqa <x1=stack128#2,>z1=int6464#1 | ||
687 | # asm 2: movdqa <x1=48(%esp),>z1=%xmm0 | ||
688 | movdqa 48(%esp),%xmm0 | ||
689 | |||
690 | # qhasm: z6 = z1[2,2,2,2] | ||
691 | # asm 1: pshufd $0xaa,<z1=int6464#1,>z6=int6464#2 | ||
692 | # asm 2: pshufd $0xaa,<z1=%xmm0,>z6=%xmm1 | ||
693 | pshufd $0xaa,%xmm0,%xmm1 | ||
694 | |||
695 | # qhasm: z11 = z1[3,3,3,3] | ||
696 | # asm 1: pshufd $0xff,<z1=int6464#1,>z11=int6464#3 | ||
697 | # asm 2: pshufd $0xff,<z1=%xmm0,>z11=%xmm2 | ||
698 | pshufd $0xff,%xmm0,%xmm2 | ||
699 | |||
700 | # qhasm: z12 = z1[0,0,0,0] | ||
701 | # asm 1: pshufd $0x00,<z1=int6464#1,>z12=int6464#4 | ||
702 | # asm 2: pshufd $0x00,<z1=%xmm0,>z12=%xmm3 | ||
703 | pshufd $0x00,%xmm0,%xmm3 | ||
704 | |||
705 | # qhasm: z1 = z1[1,1,1,1] | ||
706 | # asm 1: pshufd $0x55,<z1=int6464#1,>z1=int6464#1 | ||
707 | # asm 2: pshufd $0x55,<z1=%xmm0,>z1=%xmm0 | ||
708 | pshufd $0x55,%xmm0,%xmm0 | ||
709 | |||
710 | # qhasm: orig6 = z6 | ||
711 | # asm 1: movdqa <z6=int6464#2,>orig6=stack128#9 | ||
712 | # asm 2: movdqa <z6=%xmm1,>orig6=160(%esp) | ||
713 | movdqa %xmm1,160(%esp) | ||
714 | |||
715 | # qhasm: orig11 = z11 | ||
716 | # asm 1: movdqa <z11=int6464#3,>orig11=stack128#10 | ||
717 | # asm 2: movdqa <z11=%xmm2,>orig11=176(%esp) | ||
718 | movdqa %xmm2,176(%esp) | ||
719 | |||
720 | # qhasm: orig12 = z12 | ||
721 | # asm 1: movdqa <z12=int6464#4,>orig12=stack128#11 | ||
722 | # asm 2: movdqa <z12=%xmm3,>orig12=192(%esp) | ||
723 | movdqa %xmm3,192(%esp) | ||
724 | |||
725 | # qhasm: orig1 = z1 | ||
726 | # asm 1: movdqa <z1=int6464#1,>orig1=stack128#12 | ||
727 | # asm 2: movdqa <z1=%xmm0,>orig1=208(%esp) | ||
728 | movdqa %xmm0,208(%esp) | ||
729 | |||
730 | # qhasm: z2 = x2 | ||
731 | # asm 1: movdqa <x2=stack128#4,>z2=int6464#1 | ||
732 | # asm 2: movdqa <x2=80(%esp),>z2=%xmm0 | ||
733 | movdqa 80(%esp),%xmm0 | ||
734 | |||
735 | # qhasm: z7 = z2[3,3,3,3] | ||
736 | # asm 1: pshufd $0xff,<z2=int6464#1,>z7=int6464#2 | ||
737 | # asm 2: pshufd $0xff,<z2=%xmm0,>z7=%xmm1 | ||
738 | pshufd $0xff,%xmm0,%xmm1 | ||
739 | |||
740 | # qhasm: z13 = z2[1,1,1,1] | ||
741 | # asm 1: pshufd $0x55,<z2=int6464#1,>z13=int6464#3 | ||
742 | # asm 2: pshufd $0x55,<z2=%xmm0,>z13=%xmm2 | ||
743 | pshufd $0x55,%xmm0,%xmm2 | ||
744 | |||
745 | # qhasm: z2 = z2[2,2,2,2] | ||
746 | # asm 1: pshufd $0xaa,<z2=int6464#1,>z2=int6464#1 | ||
747 | # asm 2: pshufd $0xaa,<z2=%xmm0,>z2=%xmm0 | ||
748 | pshufd $0xaa,%xmm0,%xmm0 | ||
749 | |||
750 | # qhasm: orig7 = z7 | ||
751 | # asm 1: movdqa <z7=int6464#2,>orig7=stack128#13 | ||
752 | # asm 2: movdqa <z7=%xmm1,>orig7=224(%esp) | ||
753 | movdqa %xmm1,224(%esp) | ||
754 | |||
755 | # qhasm: orig13 = z13 | ||
756 | # asm 1: movdqa <z13=int6464#3,>orig13=stack128#14 | ||
757 | # asm 2: movdqa <z13=%xmm2,>orig13=240(%esp) | ||
758 | movdqa %xmm2,240(%esp) | ||
759 | |||
760 | # qhasm: orig2 = z2 | ||
761 | # asm 1: movdqa <z2=int6464#1,>orig2=stack128#15 | ||
762 | # asm 2: movdqa <z2=%xmm0,>orig2=256(%esp) | ||
763 | movdqa %xmm0,256(%esp) | ||
764 | |||
765 | # qhasm: z3 = x3 | ||
766 | # asm 1: movdqa <x3=stack128#1,>z3=int6464#1 | ||
767 | # asm 2: movdqa <x3=32(%esp),>z3=%xmm0 | ||
768 | movdqa 32(%esp),%xmm0 | ||
769 | |||
770 | # qhasm: z4 = z3[0,0,0,0] | ||
771 | # asm 1: pshufd $0x00,<z3=int6464#1,>z4=int6464#2 | ||
772 | # asm 2: pshufd $0x00,<z3=%xmm0,>z4=%xmm1 | ||
773 | pshufd $0x00,%xmm0,%xmm1 | ||
774 | |||
775 | # qhasm: z14 = z3[2,2,2,2] | ||
776 | # asm 1: pshufd $0xaa,<z3=int6464#1,>z14=int6464#3 | ||
777 | # asm 2: pshufd $0xaa,<z3=%xmm0,>z14=%xmm2 | ||
778 | pshufd $0xaa,%xmm0,%xmm2 | ||
779 | |||
780 | # qhasm: z3 = z3[3,3,3,3] | ||
781 | # asm 1: pshufd $0xff,<z3=int6464#1,>z3=int6464#1 | ||
782 | # asm 2: pshufd $0xff,<z3=%xmm0,>z3=%xmm0 | ||
783 | pshufd $0xff,%xmm0,%xmm0 | ||
784 | |||
785 | # qhasm: orig4 = z4 | ||
786 | # asm 1: movdqa <z4=int6464#2,>orig4=stack128#16 | ||
787 | # asm 2: movdqa <z4=%xmm1,>orig4=272(%esp) | ||
788 | movdqa %xmm1,272(%esp) | ||
789 | |||
790 | # qhasm: orig14 = z14 | ||
791 | # asm 1: movdqa <z14=int6464#3,>orig14=stack128#17 | ||
792 | # asm 2: movdqa <z14=%xmm2,>orig14=288(%esp) | ||
793 | movdqa %xmm2,288(%esp) | ||
794 | |||
795 | # qhasm: orig3 = z3 | ||
796 | # asm 1: movdqa <z3=int6464#1,>orig3=stack128#18 | ||
797 | # asm 2: movdqa <z3=%xmm0,>orig3=304(%esp) | ||
798 | movdqa %xmm0,304(%esp) | ||
799 | |||
800 | # qhasm: bytesatleast256: | ||
801 | ._bytesatleast256: | ||
802 | |||
803 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
804 | # asm 1: movl <x2=stack128#4,>in8=int32#2 | ||
805 | # asm 2: movl <x2=80(%esp),>in8=%ecx | ||
806 | movl 80(%esp),%ecx | ||
807 | |||
808 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
809 | # asm 1: movl 4+<x3=stack128#1,>in9=int32#3 | ||
810 | # asm 2: movl 4+<x3=32(%esp),>in9=%edx | ||
811 | movl 4+32(%esp),%edx | ||
812 | |||
813 | # qhasm: ((uint32 *) &orig8)[0] = in8 | ||
814 | # asm 1: movl <in8=int32#2,>orig8=stack128#19 | ||
815 | # asm 2: movl <in8=%ecx,>orig8=320(%esp) | ||
816 | movl %ecx,320(%esp) | ||
817 | |||
818 | # qhasm: ((uint32 *) &orig9)[0] = in9 | ||
819 | # asm 1: movl <in9=int32#3,>orig9=stack128#20 | ||
820 | # asm 2: movl <in9=%edx,>orig9=336(%esp) | ||
821 | movl %edx,336(%esp) | ||
822 | |||
823 | # qhasm: carry? in8 += 1 | ||
824 | # asm 1: add $1,<in8=int32#2 | ||
825 | # asm 2: add $1,<in8=%ecx | ||
826 | add $1,%ecx | ||
827 | |||
828 | # qhasm: in9 += 0 + carry | ||
829 | # asm 1: adc $0,<in9=int32#3 | ||
830 | # asm 2: adc $0,<in9=%edx | ||
831 | adc $0,%edx | ||
832 | |||
833 | # qhasm: ((uint32 *) &orig8)[1] = in8 | ||
834 | # asm 1: movl <in8=int32#2,4+<orig8=stack128#19 | ||
835 | # asm 2: movl <in8=%ecx,4+<orig8=320(%esp) | ||
836 | movl %ecx,4+320(%esp) | ||
837 | |||
838 | # qhasm: ((uint32 *) &orig9)[1] = in9 | ||
839 | # asm 1: movl <in9=int32#3,4+<orig9=stack128#20 | ||
840 | # asm 2: movl <in9=%edx,4+<orig9=336(%esp) | ||
841 | movl %edx,4+336(%esp) | ||
842 | |||
843 | # qhasm: carry? in8 += 1 | ||
844 | # asm 1: add $1,<in8=int32#2 | ||
845 | # asm 2: add $1,<in8=%ecx | ||
846 | add $1,%ecx | ||
847 | |||
848 | # qhasm: in9 += 0 + carry | ||
849 | # asm 1: adc $0,<in9=int32#3 | ||
850 | # asm 2: adc $0,<in9=%edx | ||
851 | adc $0,%edx | ||
852 | |||
853 | # qhasm: ((uint32 *) &orig8)[2] = in8 | ||
854 | # asm 1: movl <in8=int32#2,8+<orig8=stack128#19 | ||
855 | # asm 2: movl <in8=%ecx,8+<orig8=320(%esp) | ||
856 | movl %ecx,8+320(%esp) | ||
857 | |||
858 | # qhasm: ((uint32 *) &orig9)[2] = in9 | ||
859 | # asm 1: movl <in9=int32#3,8+<orig9=stack128#20 | ||
860 | # asm 2: movl <in9=%edx,8+<orig9=336(%esp) | ||
861 | movl %edx,8+336(%esp) | ||
862 | |||
863 | # qhasm: carry? in8 += 1 | ||
864 | # asm 1: add $1,<in8=int32#2 | ||
865 | # asm 2: add $1,<in8=%ecx | ||
866 | add $1,%ecx | ||
867 | |||
868 | # qhasm: in9 += 0 + carry | ||
869 | # asm 1: adc $0,<in9=int32#3 | ||
870 | # asm 2: adc $0,<in9=%edx | ||
871 | adc $0,%edx | ||
872 | |||
873 | # qhasm: ((uint32 *) &orig8)[3] = in8 | ||
874 | # asm 1: movl <in8=int32#2,12+<orig8=stack128#19 | ||
875 | # asm 2: movl <in8=%ecx,12+<orig8=320(%esp) | ||
876 | movl %ecx,12+320(%esp) | ||
877 | |||
878 | # qhasm: ((uint32 *) &orig9)[3] = in9 | ||
879 | # asm 1: movl <in9=int32#3,12+<orig9=stack128#20 | ||
880 | # asm 2: movl <in9=%edx,12+<orig9=336(%esp) | ||
881 | movl %edx,12+336(%esp) | ||
882 | |||
883 | # qhasm: carry? in8 += 1 | ||
884 | # asm 1: add $1,<in8=int32#2 | ||
885 | # asm 2: add $1,<in8=%ecx | ||
886 | add $1,%ecx | ||
887 | |||
888 | # qhasm: in9 += 0 + carry | ||
889 | # asm 1: adc $0,<in9=int32#3 | ||
890 | # asm 2: adc $0,<in9=%edx | ||
891 | adc $0,%edx | ||
892 | |||
893 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
894 | # asm 1: movl <in8=int32#2,>x2=stack128#4 | ||
895 | # asm 2: movl <in8=%ecx,>x2=80(%esp) | ||
896 | movl %ecx,80(%esp) | ||
897 | |||
898 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
899 | # asm 1: movl <in9=int32#3,4+<x3=stack128#1 | ||
900 | # asm 2: movl <in9=%edx,4+<x3=32(%esp) | ||
901 | movl %edx,4+32(%esp) | ||
902 | |||
903 | # qhasm: bytes_stack = bytes | ||
904 | # asm 1: movl <bytes=int32#1,>bytes_stack=stack32#7 | ||
905 | # asm 2: movl <bytes=%eax,>bytes_stack=24(%esp) | ||
906 | movl %eax,24(%esp) | ||
907 | |||
908 | # qhasm: i = 12 | ||
909 | # asm 1: mov $12,>i=int32#1 | ||
910 | # asm 2: mov $12,>i=%eax | ||
911 | mov $12,%eax | ||
912 | |||
913 | # qhasm: z5 = orig5 | ||
914 | # asm 1: movdqa <orig5=stack128#5,>z5=int6464#1 | ||
915 | # asm 2: movdqa <orig5=96(%esp),>z5=%xmm0 | ||
916 | movdqa 96(%esp),%xmm0 | ||
917 | |||
918 | # qhasm: z10 = orig10 | ||
919 | # asm 1: movdqa <orig10=stack128#6,>z10=int6464#2 | ||
920 | # asm 2: movdqa <orig10=112(%esp),>z10=%xmm1 | ||
921 | movdqa 112(%esp),%xmm1 | ||
922 | |||
923 | # qhasm: z15 = orig15 | ||
924 | # asm 1: movdqa <orig15=stack128#7,>z15=int6464#3 | ||
925 | # asm 2: movdqa <orig15=128(%esp),>z15=%xmm2 | ||
926 | movdqa 128(%esp),%xmm2 | ||
927 | |||
928 | # qhasm: z14 = orig14 | ||
929 | # asm 1: movdqa <orig14=stack128#17,>z14=int6464#4 | ||
930 | # asm 2: movdqa <orig14=288(%esp),>z14=%xmm3 | ||
931 | movdqa 288(%esp),%xmm3 | ||
932 | |||
933 | # qhasm: z3 = orig3 | ||
934 | # asm 1: movdqa <orig3=stack128#18,>z3=int6464#5 | ||
935 | # asm 2: movdqa <orig3=304(%esp),>z3=%xmm4 | ||
936 | movdqa 304(%esp),%xmm4 | ||
937 | |||
938 | # qhasm: z6 = orig6 | ||
939 | # asm 1: movdqa <orig6=stack128#9,>z6=int6464#6 | ||
940 | # asm 2: movdqa <orig6=160(%esp),>z6=%xmm5 | ||
941 | movdqa 160(%esp),%xmm5 | ||
942 | |||
943 | # qhasm: z11 = orig11 | ||
944 | # asm 1: movdqa <orig11=stack128#10,>z11=int6464#7 | ||
945 | # asm 2: movdqa <orig11=176(%esp),>z11=%xmm6 | ||
946 | movdqa 176(%esp),%xmm6 | ||
947 | |||
948 | # qhasm: z1 = orig1 | ||
949 | # asm 1: movdqa <orig1=stack128#12,>z1=int6464#8 | ||
950 | # asm 2: movdqa <orig1=208(%esp),>z1=%xmm7 | ||
951 | movdqa 208(%esp),%xmm7 | ||
952 | |||
953 | # qhasm: z5_stack = z5 | ||
954 | # asm 1: movdqa <z5=int6464#1,>z5_stack=stack128#21 | ||
955 | # asm 2: movdqa <z5=%xmm0,>z5_stack=352(%esp) | ||
956 | movdqa %xmm0,352(%esp) | ||
957 | |||
958 | # qhasm: z10_stack = z10 | ||
959 | # asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#22 | ||
960 | # asm 2: movdqa <z10=%xmm1,>z10_stack=368(%esp) | ||
961 | movdqa %xmm1,368(%esp) | ||
962 | |||
963 | # qhasm: z15_stack = z15 | ||
964 | # asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#23 | ||
965 | # asm 2: movdqa <z15=%xmm2,>z15_stack=384(%esp) | ||
966 | movdqa %xmm2,384(%esp) | ||
967 | |||
968 | # qhasm: z14_stack = z14 | ||
969 | # asm 1: movdqa <z14=int6464#4,>z14_stack=stack128#24 | ||
970 | # asm 2: movdqa <z14=%xmm3,>z14_stack=400(%esp) | ||
971 | movdqa %xmm3,400(%esp) | ||
972 | |||
973 | # qhasm: z3_stack = z3 | ||
974 | # asm 1: movdqa <z3=int6464#5,>z3_stack=stack128#25 | ||
975 | # asm 2: movdqa <z3=%xmm4,>z3_stack=416(%esp) | ||
976 | movdqa %xmm4,416(%esp) | ||
977 | |||
978 | # qhasm: z6_stack = z6 | ||
979 | # asm 1: movdqa <z6=int6464#6,>z6_stack=stack128#26 | ||
980 | # asm 2: movdqa <z6=%xmm5,>z6_stack=432(%esp) | ||
981 | movdqa %xmm5,432(%esp) | ||
982 | |||
983 | # qhasm: z11_stack = z11 | ||
984 | # asm 1: movdqa <z11=int6464#7,>z11_stack=stack128#27 | ||
985 | # asm 2: movdqa <z11=%xmm6,>z11_stack=448(%esp) | ||
986 | movdqa %xmm6,448(%esp) | ||
987 | |||
988 | # qhasm: z1_stack = z1 | ||
989 | # asm 1: movdqa <z1=int6464#8,>z1_stack=stack128#28 | ||
990 | # asm 2: movdqa <z1=%xmm7,>z1_stack=464(%esp) | ||
991 | movdqa %xmm7,464(%esp) | ||
992 | |||
993 | # qhasm: z7 = orig7 | ||
994 | # asm 1: movdqa <orig7=stack128#13,>z7=int6464#5 | ||
995 | # asm 2: movdqa <orig7=224(%esp),>z7=%xmm4 | ||
996 | movdqa 224(%esp),%xmm4 | ||
997 | |||
998 | # qhasm: z13 = orig13 | ||
999 | # asm 1: movdqa <orig13=stack128#14,>z13=int6464#6 | ||
1000 | # asm 2: movdqa <orig13=240(%esp),>z13=%xmm5 | ||
1001 | movdqa 240(%esp),%xmm5 | ||
1002 | |||
1003 | # qhasm: z2 = orig2 | ||
1004 | # asm 1: movdqa <orig2=stack128#15,>z2=int6464#7 | ||
1005 | # asm 2: movdqa <orig2=256(%esp),>z2=%xmm6 | ||
1006 | movdqa 256(%esp),%xmm6 | ||
1007 | |||
1008 | # qhasm: z9 = orig9 | ||
1009 | # asm 1: movdqa <orig9=stack128#20,>z9=int6464#8 | ||
1010 | # asm 2: movdqa <orig9=336(%esp),>z9=%xmm7 | ||
1011 | movdqa 336(%esp),%xmm7 | ||
1012 | |||
1013 | # qhasm: p = orig0 | ||
1014 | # asm 1: movdqa <orig0=stack128#8,>p=int6464#1 | ||
1015 | # asm 2: movdqa <orig0=144(%esp),>p=%xmm0 | ||
1016 | movdqa 144(%esp),%xmm0 | ||
1017 | |||
1018 | # qhasm: t = orig12 | ||
1019 | # asm 1: movdqa <orig12=stack128#11,>t=int6464#3 | ||
1020 | # asm 2: movdqa <orig12=192(%esp),>t=%xmm2 | ||
1021 | movdqa 192(%esp),%xmm2 | ||
1022 | |||
1023 | # qhasm: q = orig4 | ||
1024 | # asm 1: movdqa <orig4=stack128#16,>q=int6464#4 | ||
1025 | # asm 2: movdqa <orig4=272(%esp),>q=%xmm3 | ||
1026 | movdqa 272(%esp),%xmm3 | ||
1027 | |||
1028 | # qhasm: r = orig8 | ||
1029 | # asm 1: movdqa <orig8=stack128#19,>r=int6464#2 | ||
1030 | # asm 2: movdqa <orig8=320(%esp),>r=%xmm1 | ||
1031 | movdqa 320(%esp),%xmm1 | ||
1032 | |||
1033 | # qhasm: z7_stack = z7 | ||
1034 | # asm 1: movdqa <z7=int6464#5,>z7_stack=stack128#29 | ||
1035 | # asm 2: movdqa <z7=%xmm4,>z7_stack=480(%esp) | ||
1036 | movdqa %xmm4,480(%esp) | ||
1037 | |||
1038 | # qhasm: z13_stack = z13 | ||
1039 | # asm 1: movdqa <z13=int6464#6,>z13_stack=stack128#30 | ||
1040 | # asm 2: movdqa <z13=%xmm5,>z13_stack=496(%esp) | ||
1041 | movdqa %xmm5,496(%esp) | ||
1042 | |||
1043 | # qhasm: z2_stack = z2 | ||
1044 | # asm 1: movdqa <z2=int6464#7,>z2_stack=stack128#31 | ||
1045 | # asm 2: movdqa <z2=%xmm6,>z2_stack=512(%esp) | ||
1046 | movdqa %xmm6,512(%esp) | ||
1047 | |||
1048 | # qhasm: z9_stack = z9 | ||
1049 | # asm 1: movdqa <z9=int6464#8,>z9_stack=stack128#32 | ||
1050 | # asm 2: movdqa <z9=%xmm7,>z9_stack=528(%esp) | ||
1051 | movdqa %xmm7,528(%esp) | ||
1052 | |||
1053 | # qhasm: z0_stack = p | ||
1054 | # asm 1: movdqa <p=int6464#1,>z0_stack=stack128#33 | ||
1055 | # asm 2: movdqa <p=%xmm0,>z0_stack=544(%esp) | ||
1056 | movdqa %xmm0,544(%esp) | ||
1057 | |||
1058 | # qhasm: z12_stack = t | ||
1059 | # asm 1: movdqa <t=int6464#3,>z12_stack=stack128#34 | ||
1060 | # asm 2: movdqa <t=%xmm2,>z12_stack=560(%esp) | ||
1061 | movdqa %xmm2,560(%esp) | ||
1062 | |||
1063 | # qhasm: z4_stack = q | ||
1064 | # asm 1: movdqa <q=int6464#4,>z4_stack=stack128#35 | ||
1065 | # asm 2: movdqa <q=%xmm3,>z4_stack=576(%esp) | ||
1066 | movdqa %xmm3,576(%esp) | ||
1067 | |||
1068 | # qhasm: z8_stack = r | ||
1069 | # asm 1: movdqa <r=int6464#2,>z8_stack=stack128#36 | ||
1070 | # asm 2: movdqa <r=%xmm1,>z8_stack=592(%esp) | ||
1071 | movdqa %xmm1,592(%esp) | ||
1072 | |||
1073 | # qhasm: mainloop1: | ||
1074 | ._mainloop1: | ||
1075 | |||
1076 | # qhasm: assign xmm0 to p | ||
1077 | |||
1078 | # qhasm: assign xmm1 to r | ||
1079 | |||
1080 | # qhasm: assign xmm2 to t | ||
1081 | |||
1082 | # qhasm: assign xmm3 to q | ||
1083 | |||
1084 | # qhasm: s = t | ||
1085 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
1086 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
1087 | movdqa %xmm2,%xmm6 | ||
1088 | |||
1089 | # qhasm: uint32323232 t += p | ||
1090 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
1091 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
1092 | paddd %xmm0,%xmm2 | ||
1093 | |||
1094 | # qhasm: u = t | ||
1095 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1096 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1097 | movdqa %xmm2,%xmm4 | ||
1098 | |||
1099 | # qhasm: uint32323232 t >>= 25 | ||
1100 | # asm 1: psrld $25,<t=int6464#3 | ||
1101 | # asm 2: psrld $25,<t=%xmm2 | ||
1102 | psrld $25,%xmm2 | ||
1103 | |||
1104 | # qhasm: q ^= t | ||
1105 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
1106 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
1107 | pxor %xmm2,%xmm3 | ||
1108 | |||
1109 | # qhasm: uint32323232 u <<= 7 | ||
1110 | # asm 1: pslld $7,<u=int6464#5 | ||
1111 | # asm 2: pslld $7,<u=%xmm4 | ||
1112 | pslld $7,%xmm4 | ||
1113 | |||
1114 | # qhasm: q ^= u | ||
1115 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
1116 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
1117 | pxor %xmm4,%xmm3 | ||
1118 | |||
1119 | # qhasm: z4_stack = q | ||
1120 | # asm 1: movdqa <q=int6464#4,>z4_stack=stack128#33 | ||
1121 | # asm 2: movdqa <q=%xmm3,>z4_stack=544(%esp) | ||
1122 | movdqa %xmm3,544(%esp) | ||
1123 | |||
1124 | # qhasm: t = p | ||
1125 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
1126 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
1127 | movdqa %xmm0,%xmm2 | ||
1128 | |||
1129 | # qhasm: uint32323232 t += q | ||
1130 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
1131 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
1132 | paddd %xmm3,%xmm2 | ||
1133 | |||
1134 | # qhasm: u = t | ||
1135 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1136 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1137 | movdqa %xmm2,%xmm4 | ||
1138 | |||
1139 | # qhasm: uint32323232 t >>= 23 | ||
1140 | # asm 1: psrld $23,<t=int6464#3 | ||
1141 | # asm 2: psrld $23,<t=%xmm2 | ||
1142 | psrld $23,%xmm2 | ||
1143 | |||
1144 | # qhasm: r ^= t | ||
1145 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
1146 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
1147 | pxor %xmm2,%xmm1 | ||
1148 | |||
1149 | # qhasm: uint32323232 u <<= 9 | ||
1150 | # asm 1: pslld $9,<u=int6464#5 | ||
1151 | # asm 2: pslld $9,<u=%xmm4 | ||
1152 | pslld $9,%xmm4 | ||
1153 | |||
1154 | # qhasm: r ^= u | ||
1155 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
1156 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
1157 | pxor %xmm4,%xmm1 | ||
1158 | |||
1159 | # qhasm: z8_stack = r | ||
1160 | # asm 1: movdqa <r=int6464#2,>z8_stack=stack128#34 | ||
1161 | # asm 2: movdqa <r=%xmm1,>z8_stack=560(%esp) | ||
1162 | movdqa %xmm1,560(%esp) | ||
1163 | |||
1164 | # qhasm: uint32323232 q += r | ||
1165 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
1166 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
1167 | paddd %xmm1,%xmm3 | ||
1168 | |||
1169 | # qhasm: u = q | ||
1170 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
1171 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
1172 | movdqa %xmm3,%xmm2 | ||
1173 | |||
1174 | # qhasm: uint32323232 q >>= 19 | ||
1175 | # asm 1: psrld $19,<q=int6464#4 | ||
1176 | # asm 2: psrld $19,<q=%xmm3 | ||
1177 | psrld $19,%xmm3 | ||
1178 | |||
1179 | # qhasm: s ^= q | ||
1180 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
1181 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
1182 | pxor %xmm3,%xmm6 | ||
1183 | |||
1184 | # qhasm: uint32323232 u <<= 13 | ||
1185 | # asm 1: pslld $13,<u=int6464#3 | ||
1186 | # asm 2: pslld $13,<u=%xmm2 | ||
1187 | pslld $13,%xmm2 | ||
1188 | |||
1189 | # qhasm: s ^= u | ||
1190 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
1191 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
1192 | pxor %xmm2,%xmm6 | ||
1193 | |||
1194 | # qhasm: mt = z1_stack | ||
1195 | # asm 1: movdqa <z1_stack=stack128#28,>mt=int6464#3 | ||
1196 | # asm 2: movdqa <z1_stack=464(%esp),>mt=%xmm2 | ||
1197 | movdqa 464(%esp),%xmm2 | ||
1198 | |||
1199 | # qhasm: mp = z5_stack | ||
1200 | # asm 1: movdqa <z5_stack=stack128#21,>mp=int6464#5 | ||
1201 | # asm 2: movdqa <z5_stack=352(%esp),>mp=%xmm4 | ||
1202 | movdqa 352(%esp),%xmm4 | ||
1203 | |||
1204 | # qhasm: mq = z9_stack | ||
1205 | # asm 1: movdqa <z9_stack=stack128#32,>mq=int6464#4 | ||
1206 | # asm 2: movdqa <z9_stack=528(%esp),>mq=%xmm3 | ||
1207 | movdqa 528(%esp),%xmm3 | ||
1208 | |||
1209 | # qhasm: mr = z13_stack | ||
1210 | # asm 1: movdqa <z13_stack=stack128#30,>mr=int6464#6 | ||
1211 | # asm 2: movdqa <z13_stack=496(%esp),>mr=%xmm5 | ||
1212 | movdqa 496(%esp),%xmm5 | ||
1213 | |||
1214 | # qhasm: z12_stack = s | ||
1215 | # asm 1: movdqa <s=int6464#7,>z12_stack=stack128#30 | ||
1216 | # asm 2: movdqa <s=%xmm6,>z12_stack=496(%esp) | ||
1217 | movdqa %xmm6,496(%esp) | ||
1218 | |||
1219 | # qhasm: uint32323232 r += s | ||
1220 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
1221 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
1222 | paddd %xmm6,%xmm1 | ||
1223 | |||
1224 | # qhasm: u = r | ||
1225 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
1226 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
1227 | movdqa %xmm1,%xmm6 | ||
1228 | |||
1229 | # qhasm: uint32323232 r >>= 14 | ||
1230 | # asm 1: psrld $14,<r=int6464#2 | ||
1231 | # asm 2: psrld $14,<r=%xmm1 | ||
1232 | psrld $14,%xmm1 | ||
1233 | |||
1234 | # qhasm: p ^= r | ||
1235 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
1236 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
1237 | pxor %xmm1,%xmm0 | ||
1238 | |||
1239 | # qhasm: uint32323232 u <<= 18 | ||
1240 | # asm 1: pslld $18,<u=int6464#7 | ||
1241 | # asm 2: pslld $18,<u=%xmm6 | ||
1242 | pslld $18,%xmm6 | ||
1243 | |||
1244 | # qhasm: p ^= u | ||
1245 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
1246 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
1247 | pxor %xmm6,%xmm0 | ||
1248 | |||
1249 | # qhasm: z0_stack = p | ||
1250 | # asm 1: movdqa <p=int6464#1,>z0_stack=stack128#21 | ||
1251 | # asm 2: movdqa <p=%xmm0,>z0_stack=352(%esp) | ||
1252 | movdqa %xmm0,352(%esp) | ||
1253 | |||
1254 | # qhasm: assign xmm2 to mt | ||
1255 | |||
1256 | # qhasm: assign xmm3 to mq | ||
1257 | |||
1258 | # qhasm: assign xmm4 to mp | ||
1259 | |||
1260 | # qhasm: assign xmm5 to mr | ||
1261 | |||
1262 | # qhasm: ms = mt | ||
1263 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
1264 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
1265 | movdqa %xmm2,%xmm6 | ||
1266 | |||
1267 | # qhasm: uint32323232 mt += mp | ||
1268 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
1269 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
1270 | paddd %xmm4,%xmm2 | ||
1271 | |||
1272 | # qhasm: mu = mt | ||
1273 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
1274 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
1275 | movdqa %xmm2,%xmm0 | ||
1276 | |||
1277 | # qhasm: uint32323232 mt >>= 25 | ||
1278 | # asm 1: psrld $25,<mt=int6464#3 | ||
1279 | # asm 2: psrld $25,<mt=%xmm2 | ||
1280 | psrld $25,%xmm2 | ||
1281 | |||
1282 | # qhasm: mq ^= mt | ||
1283 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
1284 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
1285 | pxor %xmm2,%xmm3 | ||
1286 | |||
1287 | # qhasm: uint32323232 mu <<= 7 | ||
1288 | # asm 1: pslld $7,<mu=int6464#1 | ||
1289 | # asm 2: pslld $7,<mu=%xmm0 | ||
1290 | pslld $7,%xmm0 | ||
1291 | |||
1292 | # qhasm: mq ^= mu | ||
1293 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
1294 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
1295 | pxor %xmm0,%xmm3 | ||
1296 | |||
1297 | # qhasm: z9_stack = mq | ||
1298 | # asm 1: movdqa <mq=int6464#4,>z9_stack=stack128#32 | ||
1299 | # asm 2: movdqa <mq=%xmm3,>z9_stack=528(%esp) | ||
1300 | movdqa %xmm3,528(%esp) | ||
1301 | |||
1302 | # qhasm: mt = mp | ||
1303 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
1304 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
1305 | movdqa %xmm4,%xmm0 | ||
1306 | |||
1307 | # qhasm: uint32323232 mt += mq | ||
1308 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
1309 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
1310 | paddd %xmm3,%xmm0 | ||
1311 | |||
1312 | # qhasm: mu = mt | ||
1313 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
1314 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
1315 | movdqa %xmm0,%xmm1 | ||
1316 | |||
1317 | # qhasm: uint32323232 mt >>= 23 | ||
1318 | # asm 1: psrld $23,<mt=int6464#1 | ||
1319 | # asm 2: psrld $23,<mt=%xmm0 | ||
1320 | psrld $23,%xmm0 | ||
1321 | |||
1322 | # qhasm: mr ^= mt | ||
1323 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
1324 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
1325 | pxor %xmm0,%xmm5 | ||
1326 | |||
1327 | # qhasm: uint32323232 mu <<= 9 | ||
1328 | # asm 1: pslld $9,<mu=int6464#2 | ||
1329 | # asm 2: pslld $9,<mu=%xmm1 | ||
1330 | pslld $9,%xmm1 | ||
1331 | |||
1332 | # qhasm: mr ^= mu | ||
1333 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
1334 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
1335 | pxor %xmm1,%xmm5 | ||
1336 | |||
1337 | # qhasm: z13_stack = mr | ||
1338 | # asm 1: movdqa <mr=int6464#6,>z13_stack=stack128#35 | ||
1339 | # asm 2: movdqa <mr=%xmm5,>z13_stack=576(%esp) | ||
1340 | movdqa %xmm5,576(%esp) | ||
1341 | |||
1342 | # qhasm: uint32323232 mq += mr | ||
1343 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
1344 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
1345 | paddd %xmm5,%xmm3 | ||
1346 | |||
1347 | # qhasm: mu = mq | ||
1348 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
1349 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
1350 | movdqa %xmm3,%xmm0 | ||
1351 | |||
1352 | # qhasm: uint32323232 mq >>= 19 | ||
1353 | # asm 1: psrld $19,<mq=int6464#4 | ||
1354 | # asm 2: psrld $19,<mq=%xmm3 | ||
1355 | psrld $19,%xmm3 | ||
1356 | |||
1357 | # qhasm: ms ^= mq | ||
1358 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
1359 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
1360 | pxor %xmm3,%xmm6 | ||
1361 | |||
1362 | # qhasm: uint32323232 mu <<= 13 | ||
1363 | # asm 1: pslld $13,<mu=int6464#1 | ||
1364 | # asm 2: pslld $13,<mu=%xmm0 | ||
1365 | pslld $13,%xmm0 | ||
1366 | |||
1367 | # qhasm: ms ^= mu | ||
1368 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
1369 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
1370 | pxor %xmm0,%xmm6 | ||
1371 | |||
1372 | # qhasm: t = z6_stack | ||
1373 | # asm 1: movdqa <z6_stack=stack128#26,>t=int6464#3 | ||
1374 | # asm 2: movdqa <z6_stack=432(%esp),>t=%xmm2 | ||
1375 | movdqa 432(%esp),%xmm2 | ||
1376 | |||
1377 | # qhasm: p = z10_stack | ||
1378 | # asm 1: movdqa <z10_stack=stack128#22,>p=int6464#1 | ||
1379 | # asm 2: movdqa <z10_stack=368(%esp),>p=%xmm0 | ||
1380 | movdqa 368(%esp),%xmm0 | ||
1381 | |||
1382 | # qhasm: q = z14_stack | ||
1383 | # asm 1: movdqa <z14_stack=stack128#24,>q=int6464#4 | ||
1384 | # asm 2: movdqa <z14_stack=400(%esp),>q=%xmm3 | ||
1385 | movdqa 400(%esp),%xmm3 | ||
1386 | |||
1387 | # qhasm: r = z2_stack | ||
1388 | # asm 1: movdqa <z2_stack=stack128#31,>r=int6464#2 | ||
1389 | # asm 2: movdqa <z2_stack=512(%esp),>r=%xmm1 | ||
1390 | movdqa 512(%esp),%xmm1 | ||
1391 | |||
1392 | # qhasm: z1_stack = ms | ||
1393 | # asm 1: movdqa <ms=int6464#7,>z1_stack=stack128#22 | ||
1394 | # asm 2: movdqa <ms=%xmm6,>z1_stack=368(%esp) | ||
1395 | movdqa %xmm6,368(%esp) | ||
1396 | |||
1397 | # qhasm: uint32323232 mr += ms | ||
1398 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
1399 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
1400 | paddd %xmm6,%xmm5 | ||
1401 | |||
1402 | # qhasm: mu = mr | ||
1403 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
1404 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
1405 | movdqa %xmm5,%xmm6 | ||
1406 | |||
1407 | # qhasm: uint32323232 mr >>= 14 | ||
1408 | # asm 1: psrld $14,<mr=int6464#6 | ||
1409 | # asm 2: psrld $14,<mr=%xmm5 | ||
1410 | psrld $14,%xmm5 | ||
1411 | |||
1412 | # qhasm: mp ^= mr | ||
1413 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
1414 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
1415 | pxor %xmm5,%xmm4 | ||
1416 | |||
1417 | # qhasm: uint32323232 mu <<= 18 | ||
1418 | # asm 1: pslld $18,<mu=int6464#7 | ||
1419 | # asm 2: pslld $18,<mu=%xmm6 | ||
1420 | pslld $18,%xmm6 | ||
1421 | |||
1422 | # qhasm: mp ^= mu | ||
1423 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
1424 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
1425 | pxor %xmm6,%xmm4 | ||
1426 | |||
1427 | # qhasm: z5_stack = mp | ||
1428 | # asm 1: movdqa <mp=int6464#5,>z5_stack=stack128#24 | ||
1429 | # asm 2: movdqa <mp=%xmm4,>z5_stack=400(%esp) | ||
1430 | movdqa %xmm4,400(%esp) | ||
1431 | |||
1432 | # qhasm: assign xmm0 to p | ||
1433 | |||
1434 | # qhasm: assign xmm1 to r | ||
1435 | |||
1436 | # qhasm: assign xmm2 to t | ||
1437 | |||
1438 | # qhasm: assign xmm3 to q | ||
1439 | |||
1440 | # qhasm: s = t | ||
1441 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
1442 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
1443 | movdqa %xmm2,%xmm6 | ||
1444 | |||
1445 | # qhasm: uint32323232 t += p | ||
1446 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
1447 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
1448 | paddd %xmm0,%xmm2 | ||
1449 | |||
1450 | # qhasm: u = t | ||
1451 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1452 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1453 | movdqa %xmm2,%xmm4 | ||
1454 | |||
1455 | # qhasm: uint32323232 t >>= 25 | ||
1456 | # asm 1: psrld $25,<t=int6464#3 | ||
1457 | # asm 2: psrld $25,<t=%xmm2 | ||
1458 | psrld $25,%xmm2 | ||
1459 | |||
1460 | # qhasm: q ^= t | ||
1461 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
1462 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
1463 | pxor %xmm2,%xmm3 | ||
1464 | |||
1465 | # qhasm: uint32323232 u <<= 7 | ||
1466 | # asm 1: pslld $7,<u=int6464#5 | ||
1467 | # asm 2: pslld $7,<u=%xmm4 | ||
1468 | pslld $7,%xmm4 | ||
1469 | |||
1470 | # qhasm: q ^= u | ||
1471 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
1472 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
1473 | pxor %xmm4,%xmm3 | ||
1474 | |||
1475 | # qhasm: z14_stack = q | ||
1476 | # asm 1: movdqa <q=int6464#4,>z14_stack=stack128#36 | ||
1477 | # asm 2: movdqa <q=%xmm3,>z14_stack=592(%esp) | ||
1478 | movdqa %xmm3,592(%esp) | ||
1479 | |||
1480 | # qhasm: t = p | ||
1481 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
1482 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
1483 | movdqa %xmm0,%xmm2 | ||
1484 | |||
1485 | # qhasm: uint32323232 t += q | ||
1486 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
1487 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
1488 | paddd %xmm3,%xmm2 | ||
1489 | |||
1490 | # qhasm: u = t | ||
1491 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1492 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1493 | movdqa %xmm2,%xmm4 | ||
1494 | |||
1495 | # qhasm: uint32323232 t >>= 23 | ||
1496 | # asm 1: psrld $23,<t=int6464#3 | ||
1497 | # asm 2: psrld $23,<t=%xmm2 | ||
1498 | psrld $23,%xmm2 | ||
1499 | |||
1500 | # qhasm: r ^= t | ||
1501 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
1502 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
1503 | pxor %xmm2,%xmm1 | ||
1504 | |||
1505 | # qhasm: uint32323232 u <<= 9 | ||
1506 | # asm 1: pslld $9,<u=int6464#5 | ||
1507 | # asm 2: pslld $9,<u=%xmm4 | ||
1508 | pslld $9,%xmm4 | ||
1509 | |||
1510 | # qhasm: r ^= u | ||
1511 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
1512 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
1513 | pxor %xmm4,%xmm1 | ||
1514 | |||
1515 | # qhasm: z2_stack = r | ||
1516 | # asm 1: movdqa <r=int6464#2,>z2_stack=stack128#26 | ||
1517 | # asm 2: movdqa <r=%xmm1,>z2_stack=432(%esp) | ||
1518 | movdqa %xmm1,432(%esp) | ||
1519 | |||
1520 | # qhasm: uint32323232 q += r | ||
1521 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
1522 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
1523 | paddd %xmm1,%xmm3 | ||
1524 | |||
1525 | # qhasm: u = q | ||
1526 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
1527 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
1528 | movdqa %xmm3,%xmm2 | ||
1529 | |||
1530 | # qhasm: uint32323232 q >>= 19 | ||
1531 | # asm 1: psrld $19,<q=int6464#4 | ||
1532 | # asm 2: psrld $19,<q=%xmm3 | ||
1533 | psrld $19,%xmm3 | ||
1534 | |||
1535 | # qhasm: s ^= q | ||
1536 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
1537 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
1538 | pxor %xmm3,%xmm6 | ||
1539 | |||
1540 | # qhasm: uint32323232 u <<= 13 | ||
1541 | # asm 1: pslld $13,<u=int6464#3 | ||
1542 | # asm 2: pslld $13,<u=%xmm2 | ||
1543 | pslld $13,%xmm2 | ||
1544 | |||
1545 | # qhasm: s ^= u | ||
1546 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
1547 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
1548 | pxor %xmm2,%xmm6 | ||
1549 | |||
1550 | # qhasm: mt = z11_stack | ||
1551 | # asm 1: movdqa <z11_stack=stack128#27,>mt=int6464#3 | ||
1552 | # asm 2: movdqa <z11_stack=448(%esp),>mt=%xmm2 | ||
1553 | movdqa 448(%esp),%xmm2 | ||
1554 | |||
1555 | # qhasm: mp = z15_stack | ||
1556 | # asm 1: movdqa <z15_stack=stack128#23,>mp=int6464#5 | ||
1557 | # asm 2: movdqa <z15_stack=384(%esp),>mp=%xmm4 | ||
1558 | movdqa 384(%esp),%xmm4 | ||
1559 | |||
1560 | # qhasm: mq = z3_stack | ||
1561 | # asm 1: movdqa <z3_stack=stack128#25,>mq=int6464#4 | ||
1562 | # asm 2: movdqa <z3_stack=416(%esp),>mq=%xmm3 | ||
1563 | movdqa 416(%esp),%xmm3 | ||
1564 | |||
1565 | # qhasm: mr = z7_stack | ||
1566 | # asm 1: movdqa <z7_stack=stack128#29,>mr=int6464#6 | ||
1567 | # asm 2: movdqa <z7_stack=480(%esp),>mr=%xmm5 | ||
1568 | movdqa 480(%esp),%xmm5 | ||
1569 | |||
1570 | # qhasm: z6_stack = s | ||
1571 | # asm 1: movdqa <s=int6464#7,>z6_stack=stack128#23 | ||
1572 | # asm 2: movdqa <s=%xmm6,>z6_stack=384(%esp) | ||
1573 | movdqa %xmm6,384(%esp) | ||
1574 | |||
1575 | # qhasm: uint32323232 r += s | ||
1576 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
1577 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
1578 | paddd %xmm6,%xmm1 | ||
1579 | |||
1580 | # qhasm: u = r | ||
1581 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
1582 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
1583 | movdqa %xmm1,%xmm6 | ||
1584 | |||
1585 | # qhasm: uint32323232 r >>= 14 | ||
1586 | # asm 1: psrld $14,<r=int6464#2 | ||
1587 | # asm 2: psrld $14,<r=%xmm1 | ||
1588 | psrld $14,%xmm1 | ||
1589 | |||
1590 | # qhasm: p ^= r | ||
1591 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
1592 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
1593 | pxor %xmm1,%xmm0 | ||
1594 | |||
1595 | # qhasm: uint32323232 u <<= 18 | ||
1596 | # asm 1: pslld $18,<u=int6464#7 | ||
1597 | # asm 2: pslld $18,<u=%xmm6 | ||
1598 | pslld $18,%xmm6 | ||
1599 | |||
1600 | # qhasm: p ^= u | ||
1601 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
1602 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
1603 | pxor %xmm6,%xmm0 | ||
1604 | |||
1605 | # qhasm: z10_stack = p | ||
1606 | # asm 1: movdqa <p=int6464#1,>z10_stack=stack128#27 | ||
1607 | # asm 2: movdqa <p=%xmm0,>z10_stack=448(%esp) | ||
1608 | movdqa %xmm0,448(%esp) | ||
1609 | |||
1610 | # qhasm: assign xmm2 to mt | ||
1611 | |||
1612 | # qhasm: assign xmm3 to mq | ||
1613 | |||
1614 | # qhasm: assign xmm4 to mp | ||
1615 | |||
1616 | # qhasm: assign xmm5 to mr | ||
1617 | |||
1618 | # qhasm: ms = mt | ||
1619 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
1620 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
1621 | movdqa %xmm2,%xmm6 | ||
1622 | |||
1623 | # qhasm: uint32323232 mt += mp | ||
1624 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
1625 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
1626 | paddd %xmm4,%xmm2 | ||
1627 | |||
1628 | # qhasm: mu = mt | ||
1629 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
1630 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
1631 | movdqa %xmm2,%xmm0 | ||
1632 | |||
1633 | # qhasm: uint32323232 mt >>= 25 | ||
1634 | # asm 1: psrld $25,<mt=int6464#3 | ||
1635 | # asm 2: psrld $25,<mt=%xmm2 | ||
1636 | psrld $25,%xmm2 | ||
1637 | |||
1638 | # qhasm: mq ^= mt | ||
1639 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
1640 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
1641 | pxor %xmm2,%xmm3 | ||
1642 | |||
1643 | # qhasm: uint32323232 mu <<= 7 | ||
1644 | # asm 1: pslld $7,<mu=int6464#1 | ||
1645 | # asm 2: pslld $7,<mu=%xmm0 | ||
1646 | pslld $7,%xmm0 | ||
1647 | |||
1648 | # qhasm: mq ^= mu | ||
1649 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
1650 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
1651 | pxor %xmm0,%xmm3 | ||
1652 | |||
1653 | # qhasm: z3_stack = mq | ||
1654 | # asm 1: movdqa <mq=int6464#4,>z3_stack=stack128#25 | ||
1655 | # asm 2: movdqa <mq=%xmm3,>z3_stack=416(%esp) | ||
1656 | movdqa %xmm3,416(%esp) | ||
1657 | |||
1658 | # qhasm: mt = mp | ||
1659 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
1660 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
1661 | movdqa %xmm4,%xmm0 | ||
1662 | |||
1663 | # qhasm: uint32323232 mt += mq | ||
1664 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
1665 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
1666 | paddd %xmm3,%xmm0 | ||
1667 | |||
1668 | # qhasm: mu = mt | ||
1669 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
1670 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
1671 | movdqa %xmm0,%xmm1 | ||
1672 | |||
1673 | # qhasm: uint32323232 mt >>= 23 | ||
1674 | # asm 1: psrld $23,<mt=int6464#1 | ||
1675 | # asm 2: psrld $23,<mt=%xmm0 | ||
1676 | psrld $23,%xmm0 | ||
1677 | |||
1678 | # qhasm: mr ^= mt | ||
1679 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
1680 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
1681 | pxor %xmm0,%xmm5 | ||
1682 | |||
1683 | # qhasm: uint32323232 mu <<= 9 | ||
1684 | # asm 1: pslld $9,<mu=int6464#2 | ||
1685 | # asm 2: pslld $9,<mu=%xmm1 | ||
1686 | pslld $9,%xmm1 | ||
1687 | |||
1688 | # qhasm: mr ^= mu | ||
1689 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
1690 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
1691 | pxor %xmm1,%xmm5 | ||
1692 | |||
1693 | # qhasm: z7_stack = mr | ||
1694 | # asm 1: movdqa <mr=int6464#6,>z7_stack=stack128#29 | ||
1695 | # asm 2: movdqa <mr=%xmm5,>z7_stack=480(%esp) | ||
1696 | movdqa %xmm5,480(%esp) | ||
1697 | |||
1698 | # qhasm: uint32323232 mq += mr | ||
1699 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
1700 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
1701 | paddd %xmm5,%xmm3 | ||
1702 | |||
1703 | # qhasm: mu = mq | ||
1704 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
1705 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
1706 | movdqa %xmm3,%xmm0 | ||
1707 | |||
1708 | # qhasm: uint32323232 mq >>= 19 | ||
1709 | # asm 1: psrld $19,<mq=int6464#4 | ||
1710 | # asm 2: psrld $19,<mq=%xmm3 | ||
1711 | psrld $19,%xmm3 | ||
1712 | |||
1713 | # qhasm: ms ^= mq | ||
1714 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
1715 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
1716 | pxor %xmm3,%xmm6 | ||
1717 | |||
1718 | # qhasm: uint32323232 mu <<= 13 | ||
1719 | # asm 1: pslld $13,<mu=int6464#1 | ||
1720 | # asm 2: pslld $13,<mu=%xmm0 | ||
1721 | pslld $13,%xmm0 | ||
1722 | |||
1723 | # qhasm: ms ^= mu | ||
1724 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
1725 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
1726 | pxor %xmm0,%xmm6 | ||
1727 | |||
1728 | # qhasm: t = z3_stack | ||
1729 | # asm 1: movdqa <z3_stack=stack128#25,>t=int6464#3 | ||
1730 | # asm 2: movdqa <z3_stack=416(%esp),>t=%xmm2 | ||
1731 | movdqa 416(%esp),%xmm2 | ||
1732 | |||
1733 | # qhasm: p = z0_stack | ||
1734 | # asm 1: movdqa <z0_stack=stack128#21,>p=int6464#1 | ||
1735 | # asm 2: movdqa <z0_stack=352(%esp),>p=%xmm0 | ||
1736 | movdqa 352(%esp),%xmm0 | ||
1737 | |||
1738 | # qhasm: q = z1_stack | ||
1739 | # asm 1: movdqa <z1_stack=stack128#22,>q=int6464#4 | ||
1740 | # asm 2: movdqa <z1_stack=368(%esp),>q=%xmm3 | ||
1741 | movdqa 368(%esp),%xmm3 | ||
1742 | |||
1743 | # qhasm: r = z2_stack | ||
1744 | # asm 1: movdqa <z2_stack=stack128#26,>r=int6464#2 | ||
1745 | # asm 2: movdqa <z2_stack=432(%esp),>r=%xmm1 | ||
1746 | movdqa 432(%esp),%xmm1 | ||
1747 | |||
1748 | # qhasm: z11_stack = ms | ||
1749 | # asm 1: movdqa <ms=int6464#7,>z11_stack=stack128#21 | ||
1750 | # asm 2: movdqa <ms=%xmm6,>z11_stack=352(%esp) | ||
1751 | movdqa %xmm6,352(%esp) | ||
1752 | |||
1753 | # qhasm: uint32323232 mr += ms | ||
1754 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
1755 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
1756 | paddd %xmm6,%xmm5 | ||
1757 | |||
1758 | # qhasm: mu = mr | ||
1759 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
1760 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
1761 | movdqa %xmm5,%xmm6 | ||
1762 | |||
1763 | # qhasm: uint32323232 mr >>= 14 | ||
1764 | # asm 1: psrld $14,<mr=int6464#6 | ||
1765 | # asm 2: psrld $14,<mr=%xmm5 | ||
1766 | psrld $14,%xmm5 | ||
1767 | |||
1768 | # qhasm: mp ^= mr | ||
1769 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
1770 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
1771 | pxor %xmm5,%xmm4 | ||
1772 | |||
1773 | # qhasm: uint32323232 mu <<= 18 | ||
1774 | # asm 1: pslld $18,<mu=int6464#7 | ||
1775 | # asm 2: pslld $18,<mu=%xmm6 | ||
1776 | pslld $18,%xmm6 | ||
1777 | |||
1778 | # qhasm: mp ^= mu | ||
1779 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
1780 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
1781 | pxor %xmm6,%xmm4 | ||
1782 | |||
1783 | # qhasm: z15_stack = mp | ||
1784 | # asm 1: movdqa <mp=int6464#5,>z15_stack=stack128#22 | ||
1785 | # asm 2: movdqa <mp=%xmm4,>z15_stack=368(%esp) | ||
1786 | movdqa %xmm4,368(%esp) | ||
1787 | |||
1788 | # qhasm: assign xmm0 to p | ||
1789 | |||
1790 | # qhasm: assign xmm1 to r | ||
1791 | |||
1792 | # qhasm: assign xmm2 to t | ||
1793 | |||
1794 | # qhasm: assign xmm3 to q | ||
1795 | |||
1796 | # qhasm: s = t | ||
1797 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
1798 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
1799 | movdqa %xmm2,%xmm6 | ||
1800 | |||
1801 | # qhasm: uint32323232 t += p | ||
1802 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
1803 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
1804 | paddd %xmm0,%xmm2 | ||
1805 | |||
1806 | # qhasm: u = t | ||
1807 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1808 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1809 | movdqa %xmm2,%xmm4 | ||
1810 | |||
1811 | # qhasm: uint32323232 t >>= 25 | ||
1812 | # asm 1: psrld $25,<t=int6464#3 | ||
1813 | # asm 2: psrld $25,<t=%xmm2 | ||
1814 | psrld $25,%xmm2 | ||
1815 | |||
1816 | # qhasm: q ^= t | ||
1817 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
1818 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
1819 | pxor %xmm2,%xmm3 | ||
1820 | |||
1821 | # qhasm: uint32323232 u <<= 7 | ||
1822 | # asm 1: pslld $7,<u=int6464#5 | ||
1823 | # asm 2: pslld $7,<u=%xmm4 | ||
1824 | pslld $7,%xmm4 | ||
1825 | |||
1826 | # qhasm: q ^= u | ||
1827 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
1828 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
1829 | pxor %xmm4,%xmm3 | ||
1830 | |||
1831 | # qhasm: z1_stack = q | ||
1832 | # asm 1: movdqa <q=int6464#4,>z1_stack=stack128#28 | ||
1833 | # asm 2: movdqa <q=%xmm3,>z1_stack=464(%esp) | ||
1834 | movdqa %xmm3,464(%esp) | ||
1835 | |||
1836 | # qhasm: t = p | ||
1837 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
1838 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
1839 | movdqa %xmm0,%xmm2 | ||
1840 | |||
1841 | # qhasm: uint32323232 t += q | ||
1842 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
1843 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
1844 | paddd %xmm3,%xmm2 | ||
1845 | |||
1846 | # qhasm: u = t | ||
1847 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1848 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1849 | movdqa %xmm2,%xmm4 | ||
1850 | |||
1851 | # qhasm: uint32323232 t >>= 23 | ||
1852 | # asm 1: psrld $23,<t=int6464#3 | ||
1853 | # asm 2: psrld $23,<t=%xmm2 | ||
1854 | psrld $23,%xmm2 | ||
1855 | |||
1856 | # qhasm: r ^= t | ||
1857 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
1858 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
1859 | pxor %xmm2,%xmm1 | ||
1860 | |||
1861 | # qhasm: uint32323232 u <<= 9 | ||
1862 | # asm 1: pslld $9,<u=int6464#5 | ||
1863 | # asm 2: pslld $9,<u=%xmm4 | ||
1864 | pslld $9,%xmm4 | ||
1865 | |||
1866 | # qhasm: r ^= u | ||
1867 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
1868 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
1869 | pxor %xmm4,%xmm1 | ||
1870 | |||
1871 | # qhasm: z2_stack = r | ||
1872 | # asm 1: movdqa <r=int6464#2,>z2_stack=stack128#31 | ||
1873 | # asm 2: movdqa <r=%xmm1,>z2_stack=512(%esp) | ||
1874 | movdqa %xmm1,512(%esp) | ||
1875 | |||
1876 | # qhasm: uint32323232 q += r | ||
1877 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
1878 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
1879 | paddd %xmm1,%xmm3 | ||
1880 | |||
1881 | # qhasm: u = q | ||
1882 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
1883 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
1884 | movdqa %xmm3,%xmm2 | ||
1885 | |||
1886 | # qhasm: uint32323232 q >>= 19 | ||
1887 | # asm 1: psrld $19,<q=int6464#4 | ||
1888 | # asm 2: psrld $19,<q=%xmm3 | ||
1889 | psrld $19,%xmm3 | ||
1890 | |||
1891 | # qhasm: s ^= q | ||
1892 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
1893 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
1894 | pxor %xmm3,%xmm6 | ||
1895 | |||
1896 | # qhasm: uint32323232 u <<= 13 | ||
1897 | # asm 1: pslld $13,<u=int6464#3 | ||
1898 | # asm 2: pslld $13,<u=%xmm2 | ||
1899 | pslld $13,%xmm2 | ||
1900 | |||
1901 | # qhasm: s ^= u | ||
1902 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
1903 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
1904 | pxor %xmm2,%xmm6 | ||
1905 | |||
1906 | # qhasm: mt = z4_stack | ||
1907 | # asm 1: movdqa <z4_stack=stack128#33,>mt=int6464#3 | ||
1908 | # asm 2: movdqa <z4_stack=544(%esp),>mt=%xmm2 | ||
1909 | movdqa 544(%esp),%xmm2 | ||
1910 | |||
1911 | # qhasm: mp = z5_stack | ||
1912 | # asm 1: movdqa <z5_stack=stack128#24,>mp=int6464#5 | ||
1913 | # asm 2: movdqa <z5_stack=400(%esp),>mp=%xmm4 | ||
1914 | movdqa 400(%esp),%xmm4 | ||
1915 | |||
1916 | # qhasm: mq = z6_stack | ||
1917 | # asm 1: movdqa <z6_stack=stack128#23,>mq=int6464#4 | ||
1918 | # asm 2: movdqa <z6_stack=384(%esp),>mq=%xmm3 | ||
1919 | movdqa 384(%esp),%xmm3 | ||
1920 | |||
1921 | # qhasm: mr = z7_stack | ||
1922 | # asm 1: movdqa <z7_stack=stack128#29,>mr=int6464#6 | ||
1923 | # asm 2: movdqa <z7_stack=480(%esp),>mr=%xmm5 | ||
1924 | movdqa 480(%esp),%xmm5 | ||
1925 | |||
1926 | # qhasm: z3_stack = s | ||
1927 | # asm 1: movdqa <s=int6464#7,>z3_stack=stack128#25 | ||
1928 | # asm 2: movdqa <s=%xmm6,>z3_stack=416(%esp) | ||
1929 | movdqa %xmm6,416(%esp) | ||
1930 | |||
1931 | # qhasm: uint32323232 r += s | ||
1932 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
1933 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
1934 | paddd %xmm6,%xmm1 | ||
1935 | |||
1936 | # qhasm: u = r | ||
1937 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
1938 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
1939 | movdqa %xmm1,%xmm6 | ||
1940 | |||
1941 | # qhasm: uint32323232 r >>= 14 | ||
1942 | # asm 1: psrld $14,<r=int6464#2 | ||
1943 | # asm 2: psrld $14,<r=%xmm1 | ||
1944 | psrld $14,%xmm1 | ||
1945 | |||
1946 | # qhasm: p ^= r | ||
1947 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
1948 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
1949 | pxor %xmm1,%xmm0 | ||
1950 | |||
1951 | # qhasm: uint32323232 u <<= 18 | ||
1952 | # asm 1: pslld $18,<u=int6464#7 | ||
1953 | # asm 2: pslld $18,<u=%xmm6 | ||
1954 | pslld $18,%xmm6 | ||
1955 | |||
1956 | # qhasm: p ^= u | ||
1957 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
1958 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
1959 | pxor %xmm6,%xmm0 | ||
1960 | |||
1961 | # qhasm: z0_stack = p | ||
1962 | # asm 1: movdqa <p=int6464#1,>z0_stack=stack128#33 | ||
1963 | # asm 2: movdqa <p=%xmm0,>z0_stack=544(%esp) | ||
1964 | movdqa %xmm0,544(%esp) | ||
1965 | |||
1966 | # qhasm: assign xmm2 to mt | ||
1967 | |||
1968 | # qhasm: assign xmm3 to mq | ||
1969 | |||
1970 | # qhasm: assign xmm4 to mp | ||
1971 | |||
1972 | # qhasm: assign xmm5 to mr | ||
1973 | |||
1974 | # qhasm: ms = mt | ||
1975 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
1976 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
1977 | movdqa %xmm2,%xmm6 | ||
1978 | |||
1979 | # qhasm: uint32323232 mt += mp | ||
1980 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
1981 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
1982 | paddd %xmm4,%xmm2 | ||
1983 | |||
1984 | # qhasm: mu = mt | ||
1985 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
1986 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
1987 | movdqa %xmm2,%xmm0 | ||
1988 | |||
1989 | # qhasm: uint32323232 mt >>= 25 | ||
1990 | # asm 1: psrld $25,<mt=int6464#3 | ||
1991 | # asm 2: psrld $25,<mt=%xmm2 | ||
1992 | psrld $25,%xmm2 | ||
1993 | |||
1994 | # qhasm: mq ^= mt | ||
1995 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
1996 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
1997 | pxor %xmm2,%xmm3 | ||
1998 | |||
1999 | # qhasm: uint32323232 mu <<= 7 | ||
2000 | # asm 1: pslld $7,<mu=int6464#1 | ||
2001 | # asm 2: pslld $7,<mu=%xmm0 | ||
2002 | pslld $7,%xmm0 | ||
2003 | |||
2004 | # qhasm: mq ^= mu | ||
2005 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
2006 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
2007 | pxor %xmm0,%xmm3 | ||
2008 | |||
2009 | # qhasm: z6_stack = mq | ||
2010 | # asm 1: movdqa <mq=int6464#4,>z6_stack=stack128#26 | ||
2011 | # asm 2: movdqa <mq=%xmm3,>z6_stack=432(%esp) | ||
2012 | movdqa %xmm3,432(%esp) | ||
2013 | |||
2014 | # qhasm: mt = mp | ||
2015 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
2016 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
2017 | movdqa %xmm4,%xmm0 | ||
2018 | |||
2019 | # qhasm: uint32323232 mt += mq | ||
2020 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
2021 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
2022 | paddd %xmm3,%xmm0 | ||
2023 | |||
2024 | # qhasm: mu = mt | ||
2025 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
2026 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
2027 | movdqa %xmm0,%xmm1 | ||
2028 | |||
2029 | # qhasm: uint32323232 mt >>= 23 | ||
2030 | # asm 1: psrld $23,<mt=int6464#1 | ||
2031 | # asm 2: psrld $23,<mt=%xmm0 | ||
2032 | psrld $23,%xmm0 | ||
2033 | |||
2034 | # qhasm: mr ^= mt | ||
2035 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
2036 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
2037 | pxor %xmm0,%xmm5 | ||
2038 | |||
2039 | # qhasm: uint32323232 mu <<= 9 | ||
2040 | # asm 1: pslld $9,<mu=int6464#2 | ||
2041 | # asm 2: pslld $9,<mu=%xmm1 | ||
2042 | pslld $9,%xmm1 | ||
2043 | |||
2044 | # qhasm: mr ^= mu | ||
2045 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
2046 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
2047 | pxor %xmm1,%xmm5 | ||
2048 | |||
2049 | # qhasm: z7_stack = mr | ||
2050 | # asm 1: movdqa <mr=int6464#6,>z7_stack=stack128#29 | ||
2051 | # asm 2: movdqa <mr=%xmm5,>z7_stack=480(%esp) | ||
2052 | movdqa %xmm5,480(%esp) | ||
2053 | |||
2054 | # qhasm: uint32323232 mq += mr | ||
2055 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
2056 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
2057 | paddd %xmm5,%xmm3 | ||
2058 | |||
2059 | # qhasm: mu = mq | ||
2060 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
2061 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
2062 | movdqa %xmm3,%xmm0 | ||
2063 | |||
2064 | # qhasm: uint32323232 mq >>= 19 | ||
2065 | # asm 1: psrld $19,<mq=int6464#4 | ||
2066 | # asm 2: psrld $19,<mq=%xmm3 | ||
2067 | psrld $19,%xmm3 | ||
2068 | |||
2069 | # qhasm: ms ^= mq | ||
2070 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
2071 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
2072 | pxor %xmm3,%xmm6 | ||
2073 | |||
2074 | # qhasm: uint32323232 mu <<= 13 | ||
2075 | # asm 1: pslld $13,<mu=int6464#1 | ||
2076 | # asm 2: pslld $13,<mu=%xmm0 | ||
2077 | pslld $13,%xmm0 | ||
2078 | |||
2079 | # qhasm: ms ^= mu | ||
2080 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
2081 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
2082 | pxor %xmm0,%xmm6 | ||
2083 | |||
2084 | # qhasm: t = z9_stack | ||
2085 | # asm 1: movdqa <z9_stack=stack128#32,>t=int6464#3 | ||
2086 | # asm 2: movdqa <z9_stack=528(%esp),>t=%xmm2 | ||
2087 | movdqa 528(%esp),%xmm2 | ||
2088 | |||
2089 | # qhasm: p = z10_stack | ||
2090 | # asm 1: movdqa <z10_stack=stack128#27,>p=int6464#1 | ||
2091 | # asm 2: movdqa <z10_stack=448(%esp),>p=%xmm0 | ||
2092 | movdqa 448(%esp),%xmm0 | ||
2093 | |||
2094 | # qhasm: q = z11_stack | ||
2095 | # asm 1: movdqa <z11_stack=stack128#21,>q=int6464#4 | ||
2096 | # asm 2: movdqa <z11_stack=352(%esp),>q=%xmm3 | ||
2097 | movdqa 352(%esp),%xmm3 | ||
2098 | |||
2099 | # qhasm: r = z8_stack | ||
2100 | # asm 1: movdqa <z8_stack=stack128#34,>r=int6464#2 | ||
2101 | # asm 2: movdqa <z8_stack=560(%esp),>r=%xmm1 | ||
2102 | movdqa 560(%esp),%xmm1 | ||
2103 | |||
2104 | # qhasm: z4_stack = ms | ||
2105 | # asm 1: movdqa <ms=int6464#7,>z4_stack=stack128#34 | ||
2106 | # asm 2: movdqa <ms=%xmm6,>z4_stack=560(%esp) | ||
2107 | movdqa %xmm6,560(%esp) | ||
2108 | |||
2109 | # qhasm: uint32323232 mr += ms | ||
2110 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
2111 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
2112 | paddd %xmm6,%xmm5 | ||
2113 | |||
2114 | # qhasm: mu = mr | ||
2115 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
2116 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
2117 | movdqa %xmm5,%xmm6 | ||
2118 | |||
2119 | # qhasm: uint32323232 mr >>= 14 | ||
2120 | # asm 1: psrld $14,<mr=int6464#6 | ||
2121 | # asm 2: psrld $14,<mr=%xmm5 | ||
2122 | psrld $14,%xmm5 | ||
2123 | |||
2124 | # qhasm: mp ^= mr | ||
2125 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
2126 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
2127 | pxor %xmm5,%xmm4 | ||
2128 | |||
2129 | # qhasm: uint32323232 mu <<= 18 | ||
2130 | # asm 1: pslld $18,<mu=int6464#7 | ||
2131 | # asm 2: pslld $18,<mu=%xmm6 | ||
2132 | pslld $18,%xmm6 | ||
2133 | |||
2134 | # qhasm: mp ^= mu | ||
2135 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
2136 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
2137 | pxor %xmm6,%xmm4 | ||
2138 | |||
2139 | # qhasm: z5_stack = mp | ||
2140 | # asm 1: movdqa <mp=int6464#5,>z5_stack=stack128#21 | ||
2141 | # asm 2: movdqa <mp=%xmm4,>z5_stack=352(%esp) | ||
2142 | movdqa %xmm4,352(%esp) | ||
2143 | |||
2144 | # qhasm: assign xmm0 to p | ||
2145 | |||
2146 | # qhasm: assign xmm1 to r | ||
2147 | |||
2148 | # qhasm: assign xmm2 to t | ||
2149 | |||
2150 | # qhasm: assign xmm3 to q | ||
2151 | |||
2152 | # qhasm: s = t | ||
2153 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
2154 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
2155 | movdqa %xmm2,%xmm6 | ||
2156 | |||
2157 | # qhasm: uint32323232 t += p | ||
2158 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
2159 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
2160 | paddd %xmm0,%xmm2 | ||
2161 | |||
2162 | # qhasm: u = t | ||
2163 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
2164 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
2165 | movdqa %xmm2,%xmm4 | ||
2166 | |||
2167 | # qhasm: uint32323232 t >>= 25 | ||
2168 | # asm 1: psrld $25,<t=int6464#3 | ||
2169 | # asm 2: psrld $25,<t=%xmm2 | ||
2170 | psrld $25,%xmm2 | ||
2171 | |||
2172 | # qhasm: q ^= t | ||
2173 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
2174 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
2175 | pxor %xmm2,%xmm3 | ||
2176 | |||
2177 | # qhasm: uint32323232 u <<= 7 | ||
2178 | # asm 1: pslld $7,<u=int6464#5 | ||
2179 | # asm 2: pslld $7,<u=%xmm4 | ||
2180 | pslld $7,%xmm4 | ||
2181 | |||
2182 | # qhasm: q ^= u | ||
2183 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
2184 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
2185 | pxor %xmm4,%xmm3 | ||
2186 | |||
2187 | # qhasm: z11_stack = q | ||
2188 | # asm 1: movdqa <q=int6464#4,>z11_stack=stack128#27 | ||
2189 | # asm 2: movdqa <q=%xmm3,>z11_stack=448(%esp) | ||
2190 | movdqa %xmm3,448(%esp) | ||
2191 | |||
2192 | # qhasm: t = p | ||
2193 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
2194 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
2195 | movdqa %xmm0,%xmm2 | ||
2196 | |||
2197 | # qhasm: uint32323232 t += q | ||
2198 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
2199 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
2200 | paddd %xmm3,%xmm2 | ||
2201 | |||
2202 | # qhasm: u = t | ||
2203 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
2204 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
2205 | movdqa %xmm2,%xmm4 | ||
2206 | |||
2207 | # qhasm: uint32323232 t >>= 23 | ||
2208 | # asm 1: psrld $23,<t=int6464#3 | ||
2209 | # asm 2: psrld $23,<t=%xmm2 | ||
2210 | psrld $23,%xmm2 | ||
2211 | |||
2212 | # qhasm: r ^= t | ||
2213 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
2214 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
2215 | pxor %xmm2,%xmm1 | ||
2216 | |||
2217 | # qhasm: uint32323232 u <<= 9 | ||
2218 | # asm 1: pslld $9,<u=int6464#5 | ||
2219 | # asm 2: pslld $9,<u=%xmm4 | ||
2220 | pslld $9,%xmm4 | ||
2221 | |||
2222 | # qhasm: r ^= u | ||
2223 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
2224 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
2225 | pxor %xmm4,%xmm1 | ||
2226 | |||
2227 | # qhasm: z8_stack = r | ||
2228 | # asm 1: movdqa <r=int6464#2,>z8_stack=stack128#37 | ||
2229 | # asm 2: movdqa <r=%xmm1,>z8_stack=608(%esp) | ||
2230 | movdqa %xmm1,608(%esp) | ||
2231 | |||
2232 | # qhasm: uint32323232 q += r | ||
2233 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
2234 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
2235 | paddd %xmm1,%xmm3 | ||
2236 | |||
2237 | # qhasm: u = q | ||
2238 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
2239 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
2240 | movdqa %xmm3,%xmm2 | ||
2241 | |||
2242 | # qhasm: uint32323232 q >>= 19 | ||
2243 | # asm 1: psrld $19,<q=int6464#4 | ||
2244 | # asm 2: psrld $19,<q=%xmm3 | ||
2245 | psrld $19,%xmm3 | ||
2246 | |||
2247 | # qhasm: s ^= q | ||
2248 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
2249 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
2250 | pxor %xmm3,%xmm6 | ||
2251 | |||
2252 | # qhasm: uint32323232 u <<= 13 | ||
2253 | # asm 1: pslld $13,<u=int6464#3 | ||
2254 | # asm 2: pslld $13,<u=%xmm2 | ||
2255 | pslld $13,%xmm2 | ||
2256 | |||
2257 | # qhasm: s ^= u | ||
2258 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
2259 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
2260 | pxor %xmm2,%xmm6 | ||
2261 | |||
2262 | # qhasm: mt = z14_stack | ||
2263 | # asm 1: movdqa <z14_stack=stack128#36,>mt=int6464#3 | ||
2264 | # asm 2: movdqa <z14_stack=592(%esp),>mt=%xmm2 | ||
2265 | movdqa 592(%esp),%xmm2 | ||
2266 | |||
2267 | # qhasm: mp = z15_stack | ||
2268 | # asm 1: movdqa <z15_stack=stack128#22,>mp=int6464#5 | ||
2269 | # asm 2: movdqa <z15_stack=368(%esp),>mp=%xmm4 | ||
2270 | movdqa 368(%esp),%xmm4 | ||
2271 | |||
2272 | # qhasm: mq = z12_stack | ||
2273 | # asm 1: movdqa <z12_stack=stack128#30,>mq=int6464#4 | ||
2274 | # asm 2: movdqa <z12_stack=496(%esp),>mq=%xmm3 | ||
2275 | movdqa 496(%esp),%xmm3 | ||
2276 | |||
2277 | # qhasm: mr = z13_stack | ||
2278 | # asm 1: movdqa <z13_stack=stack128#35,>mr=int6464#6 | ||
2279 | # asm 2: movdqa <z13_stack=576(%esp),>mr=%xmm5 | ||
2280 | movdqa 576(%esp),%xmm5 | ||
2281 | |||
2282 | # qhasm: z9_stack = s | ||
2283 | # asm 1: movdqa <s=int6464#7,>z9_stack=stack128#32 | ||
2284 | # asm 2: movdqa <s=%xmm6,>z9_stack=528(%esp) | ||
2285 | movdqa %xmm6,528(%esp) | ||
2286 | |||
2287 | # qhasm: uint32323232 r += s | ||
2288 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
2289 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
2290 | paddd %xmm6,%xmm1 | ||
2291 | |||
2292 | # qhasm: u = r | ||
2293 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
2294 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
2295 | movdqa %xmm1,%xmm6 | ||
2296 | |||
2297 | # qhasm: uint32323232 r >>= 14 | ||
2298 | # asm 1: psrld $14,<r=int6464#2 | ||
2299 | # asm 2: psrld $14,<r=%xmm1 | ||
2300 | psrld $14,%xmm1 | ||
2301 | |||
2302 | # qhasm: p ^= r | ||
2303 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
2304 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
2305 | pxor %xmm1,%xmm0 | ||
2306 | |||
2307 | # qhasm: uint32323232 u <<= 18 | ||
2308 | # asm 1: pslld $18,<u=int6464#7 | ||
2309 | # asm 2: pslld $18,<u=%xmm6 | ||
2310 | pslld $18,%xmm6 | ||
2311 | |||
2312 | # qhasm: p ^= u | ||
2313 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
2314 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
2315 | pxor %xmm6,%xmm0 | ||
2316 | |||
2317 | # qhasm: z10_stack = p | ||
2318 | # asm 1: movdqa <p=int6464#1,>z10_stack=stack128#22 | ||
2319 | # asm 2: movdqa <p=%xmm0,>z10_stack=368(%esp) | ||
2320 | movdqa %xmm0,368(%esp) | ||
2321 | |||
2322 | # qhasm: assign xmm2 to mt | ||
2323 | |||
2324 | # qhasm: assign xmm3 to mq | ||
2325 | |||
2326 | # qhasm: assign xmm4 to mp | ||
2327 | |||
2328 | # qhasm: assign xmm5 to mr | ||
2329 | |||
2330 | # qhasm: ms = mt | ||
2331 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
2332 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
2333 | movdqa %xmm2,%xmm6 | ||
2334 | |||
2335 | # qhasm: uint32323232 mt += mp | ||
2336 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
2337 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
2338 | paddd %xmm4,%xmm2 | ||
2339 | |||
2340 | # qhasm: mu = mt | ||
2341 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
2342 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
2343 | movdqa %xmm2,%xmm0 | ||
2344 | |||
2345 | # qhasm: uint32323232 mt >>= 25 | ||
2346 | # asm 1: psrld $25,<mt=int6464#3 | ||
2347 | # asm 2: psrld $25,<mt=%xmm2 | ||
2348 | psrld $25,%xmm2 | ||
2349 | |||
2350 | # qhasm: mq ^= mt | ||
2351 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
2352 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
2353 | pxor %xmm2,%xmm3 | ||
2354 | |||
2355 | # qhasm: uint32323232 mu <<= 7 | ||
2356 | # asm 1: pslld $7,<mu=int6464#1 | ||
2357 | # asm 2: pslld $7,<mu=%xmm0 | ||
2358 | pslld $7,%xmm0 | ||
2359 | |||
2360 | # qhasm: mq ^= mu | ||
2361 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
2362 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
2363 | pxor %xmm0,%xmm3 | ||
2364 | |||
2365 | # qhasm: z12_stack = mq | ||
2366 | # asm 1: movdqa <mq=int6464#4,>z12_stack=stack128#35 | ||
2367 | # asm 2: movdqa <mq=%xmm3,>z12_stack=576(%esp) | ||
2368 | movdqa %xmm3,576(%esp) | ||
2369 | |||
2370 | # qhasm: mt = mp | ||
2371 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
2372 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
2373 | movdqa %xmm4,%xmm0 | ||
2374 | |||
2375 | # qhasm: uint32323232 mt += mq | ||
2376 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
2377 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
2378 | paddd %xmm3,%xmm0 | ||
2379 | |||
2380 | # qhasm: mu = mt | ||
2381 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
2382 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
2383 | movdqa %xmm0,%xmm1 | ||
2384 | |||
2385 | # qhasm: uint32323232 mt >>= 23 | ||
2386 | # asm 1: psrld $23,<mt=int6464#1 | ||
2387 | # asm 2: psrld $23,<mt=%xmm0 | ||
2388 | psrld $23,%xmm0 | ||
2389 | |||
2390 | # qhasm: mr ^= mt | ||
2391 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
2392 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
2393 | pxor %xmm0,%xmm5 | ||
2394 | |||
2395 | # qhasm: uint32323232 mu <<= 9 | ||
2396 | # asm 1: pslld $9,<mu=int6464#2 | ||
2397 | # asm 2: pslld $9,<mu=%xmm1 | ||
2398 | pslld $9,%xmm1 | ||
2399 | |||
2400 | # qhasm: mr ^= mu | ||
2401 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
2402 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
2403 | pxor %xmm1,%xmm5 | ||
2404 | |||
2405 | # qhasm: z13_stack = mr | ||
2406 | # asm 1: movdqa <mr=int6464#6,>z13_stack=stack128#30 | ||
2407 | # asm 2: movdqa <mr=%xmm5,>z13_stack=496(%esp) | ||
2408 | movdqa %xmm5,496(%esp) | ||
2409 | |||
2410 | # qhasm: uint32323232 mq += mr | ||
2411 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
2412 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
2413 | paddd %xmm5,%xmm3 | ||
2414 | |||
2415 | # qhasm: mu = mq | ||
2416 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
2417 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
2418 | movdqa %xmm3,%xmm0 | ||
2419 | |||
2420 | # qhasm: uint32323232 mq >>= 19 | ||
2421 | # asm 1: psrld $19,<mq=int6464#4 | ||
2422 | # asm 2: psrld $19,<mq=%xmm3 | ||
2423 | psrld $19,%xmm3 | ||
2424 | |||
2425 | # qhasm: ms ^= mq | ||
2426 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
2427 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
2428 | pxor %xmm3,%xmm6 | ||
2429 | |||
2430 | # qhasm: uint32323232 mu <<= 13 | ||
2431 | # asm 1: pslld $13,<mu=int6464#1 | ||
2432 | # asm 2: pslld $13,<mu=%xmm0 | ||
2433 | pslld $13,%xmm0 | ||
2434 | |||
2435 | # qhasm: ms ^= mu | ||
2436 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
2437 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
2438 | pxor %xmm0,%xmm6 | ||
2439 | |||
2440 | # qhasm: t = z12_stack | ||
2441 | # asm 1: movdqa <z12_stack=stack128#35,>t=int6464#3 | ||
2442 | # asm 2: movdqa <z12_stack=576(%esp),>t=%xmm2 | ||
2443 | movdqa 576(%esp),%xmm2 | ||
2444 | |||
2445 | # qhasm: p = z0_stack | ||
2446 | # asm 1: movdqa <z0_stack=stack128#33,>p=int6464#1 | ||
2447 | # asm 2: movdqa <z0_stack=544(%esp),>p=%xmm0 | ||
2448 | movdqa 544(%esp),%xmm0 | ||
2449 | |||
2450 | # qhasm: q = z4_stack | ||
2451 | # asm 1: movdqa <z4_stack=stack128#34,>q=int6464#4 | ||
2452 | # asm 2: movdqa <z4_stack=560(%esp),>q=%xmm3 | ||
2453 | movdqa 560(%esp),%xmm3 | ||
2454 | |||
2455 | # qhasm: r = z8_stack | ||
2456 | # asm 1: movdqa <z8_stack=stack128#37,>r=int6464#2 | ||
2457 | # asm 2: movdqa <z8_stack=608(%esp),>r=%xmm1 | ||
2458 | movdqa 608(%esp),%xmm1 | ||
2459 | |||
2460 | # qhasm: z14_stack = ms | ||
2461 | # asm 1: movdqa <ms=int6464#7,>z14_stack=stack128#24 | ||
2462 | # asm 2: movdqa <ms=%xmm6,>z14_stack=400(%esp) | ||
2463 | movdqa %xmm6,400(%esp) | ||
2464 | |||
2465 | # qhasm: uint32323232 mr += ms | ||
2466 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
2467 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
2468 | paddd %xmm6,%xmm5 | ||
2469 | |||
2470 | # qhasm: mu = mr | ||
2471 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
2472 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
2473 | movdqa %xmm5,%xmm6 | ||
2474 | |||
2475 | # qhasm: uint32323232 mr >>= 14 | ||
2476 | # asm 1: psrld $14,<mr=int6464#6 | ||
2477 | # asm 2: psrld $14,<mr=%xmm5 | ||
2478 | psrld $14,%xmm5 | ||
2479 | |||
2480 | # qhasm: mp ^= mr | ||
2481 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
2482 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
2483 | pxor %xmm5,%xmm4 | ||
2484 | |||
2485 | # qhasm: uint32323232 mu <<= 18 | ||
2486 | # asm 1: pslld $18,<mu=int6464#7 | ||
2487 | # asm 2: pslld $18,<mu=%xmm6 | ||
2488 | pslld $18,%xmm6 | ||
2489 | |||
2490 | # qhasm: mp ^= mu | ||
2491 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
2492 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
2493 | pxor %xmm6,%xmm4 | ||
2494 | |||
2495 | # qhasm: z15_stack = mp | ||
2496 | # asm 1: movdqa <mp=int6464#5,>z15_stack=stack128#23 | ||
2497 | # asm 2: movdqa <mp=%xmm4,>z15_stack=384(%esp) | ||
2498 | movdqa %xmm4,384(%esp) | ||
2499 | |||
2500 | # qhasm: unsigned>? i -= 2 | ||
2501 | # asm 1: sub $2,<i=int32#1 | ||
2502 | # asm 2: sub $2,<i=%eax | ||
2503 | sub $2,%eax | ||
2504 | # comment:fp stack unchanged by jump | ||
2505 | |||
2506 | # qhasm: goto mainloop1 if unsigned> | ||
2507 | ja ._mainloop1 | ||
2508 | |||
2509 | # qhasm: out = out_stack | ||
2510 | # asm 1: movl <out_stack=stack32#6,>out=int32#6 | ||
2511 | # asm 2: movl <out_stack=20(%esp),>out=%edi | ||
2512 | movl 20(%esp),%edi | ||
2513 | |||
2514 | # qhasm: z0 = z0_stack | ||
2515 | # asm 1: movdqa <z0_stack=stack128#33,>z0=int6464#1 | ||
2516 | # asm 2: movdqa <z0_stack=544(%esp),>z0=%xmm0 | ||
2517 | movdqa 544(%esp),%xmm0 | ||
2518 | |||
2519 | # qhasm: z1 = z1_stack | ||
2520 | # asm 1: movdqa <z1_stack=stack128#28,>z1=int6464#2 | ||
2521 | # asm 2: movdqa <z1_stack=464(%esp),>z1=%xmm1 | ||
2522 | movdqa 464(%esp),%xmm1 | ||
2523 | |||
2524 | # qhasm: z2 = z2_stack | ||
2525 | # asm 1: movdqa <z2_stack=stack128#31,>z2=int6464#3 | ||
2526 | # asm 2: movdqa <z2_stack=512(%esp),>z2=%xmm2 | ||
2527 | movdqa 512(%esp),%xmm2 | ||
2528 | |||
2529 | # qhasm: z3 = z3_stack | ||
2530 | # asm 1: movdqa <z3_stack=stack128#25,>z3=int6464#4 | ||
2531 | # asm 2: movdqa <z3_stack=416(%esp),>z3=%xmm3 | ||
2532 | movdqa 416(%esp),%xmm3 | ||
2533 | |||
2534 | # qhasm: uint32323232 z0 += orig0 | ||
2535 | # asm 1: paddd <orig0=stack128#8,<z0=int6464#1 | ||
2536 | # asm 2: paddd <orig0=144(%esp),<z0=%xmm0 | ||
2537 | paddd 144(%esp),%xmm0 | ||
2538 | |||
2539 | # qhasm: uint32323232 z1 += orig1 | ||
2540 | # asm 1: paddd <orig1=stack128#12,<z1=int6464#2 | ||
2541 | # asm 2: paddd <orig1=208(%esp),<z1=%xmm1 | ||
2542 | paddd 208(%esp),%xmm1 | ||
2543 | |||
2544 | # qhasm: uint32323232 z2 += orig2 | ||
2545 | # asm 1: paddd <orig2=stack128#15,<z2=int6464#3 | ||
2546 | # asm 2: paddd <orig2=256(%esp),<z2=%xmm2 | ||
2547 | paddd 256(%esp),%xmm2 | ||
2548 | |||
2549 | # qhasm: uint32323232 z3 += orig3 | ||
2550 | # asm 1: paddd <orig3=stack128#18,<z3=int6464#4 | ||
2551 | # asm 2: paddd <orig3=304(%esp),<z3=%xmm3 | ||
2552 | paddd 304(%esp),%xmm3 | ||
2553 | |||
2554 | # qhasm: in0 = z0 | ||
2555 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2556 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2557 | movd %xmm0,%eax | ||
2558 | |||
2559 | # qhasm: in1 = z1 | ||
2560 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2561 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2562 | movd %xmm1,%ecx | ||
2563 | |||
2564 | # qhasm: in2 = z2 | ||
2565 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2566 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2567 | movd %xmm2,%edx | ||
2568 | |||
2569 | # qhasm: in3 = z3 | ||
2570 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2571 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2572 | movd %xmm3,%ebx | ||
2573 | |||
2574 | # qhasm: z0 <<<= 96 | ||
2575 | # asm 1: pshufd $0x39,<z0=int6464#1,<z0=int6464#1 | ||
2576 | # asm 2: pshufd $0x39,<z0=%xmm0,<z0=%xmm0 | ||
2577 | pshufd $0x39,%xmm0,%xmm0 | ||
2578 | |||
2579 | # qhasm: z1 <<<= 96 | ||
2580 | # asm 1: pshufd $0x39,<z1=int6464#2,<z1=int6464#2 | ||
2581 | # asm 2: pshufd $0x39,<z1=%xmm1,<z1=%xmm1 | ||
2582 | pshufd $0x39,%xmm1,%xmm1 | ||
2583 | |||
2584 | # qhasm: z2 <<<= 96 | ||
2585 | # asm 1: pshufd $0x39,<z2=int6464#3,<z2=int6464#3 | ||
2586 | # asm 2: pshufd $0x39,<z2=%xmm2,<z2=%xmm2 | ||
2587 | pshufd $0x39,%xmm2,%xmm2 | ||
2588 | |||
2589 | # qhasm: z3 <<<= 96 | ||
2590 | # asm 1: pshufd $0x39,<z3=int6464#4,<z3=int6464#4 | ||
2591 | # asm 2: pshufd $0x39,<z3=%xmm3,<z3=%xmm3 | ||
2592 | pshufd $0x39,%xmm3,%xmm3 | ||
2593 | |||
2594 | # qhasm: in0 ^= *(uint32 *) (m + 0) | ||
2595 | # asm 1: xorl 0(<m=int32#5),<in0=int32#1 | ||
2596 | # asm 2: xorl 0(<m=%esi),<in0=%eax | ||
2597 | xorl 0(%esi),%eax | ||
2598 | |||
2599 | # qhasm: in1 ^= *(uint32 *) (m + 4) | ||
2600 | # asm 1: xorl 4(<m=int32#5),<in1=int32#2 | ||
2601 | # asm 2: xorl 4(<m=%esi),<in1=%ecx | ||
2602 | xorl 4(%esi),%ecx | ||
2603 | |||
2604 | # qhasm: in2 ^= *(uint32 *) (m + 8) | ||
2605 | # asm 1: xorl 8(<m=int32#5),<in2=int32#3 | ||
2606 | # asm 2: xorl 8(<m=%esi),<in2=%edx | ||
2607 | xorl 8(%esi),%edx | ||
2608 | |||
2609 | # qhasm: in3 ^= *(uint32 *) (m + 12) | ||
2610 | # asm 1: xorl 12(<m=int32#5),<in3=int32#4 | ||
2611 | # asm 2: xorl 12(<m=%esi),<in3=%ebx | ||
2612 | xorl 12(%esi),%ebx | ||
2613 | |||
2614 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
2615 | # asm 1: movl <in0=int32#1,0(<out=int32#6) | ||
2616 | # asm 2: movl <in0=%eax,0(<out=%edi) | ||
2617 | movl %eax,0(%edi) | ||
2618 | |||
2619 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
2620 | # asm 1: movl <in1=int32#2,4(<out=int32#6) | ||
2621 | # asm 2: movl <in1=%ecx,4(<out=%edi) | ||
2622 | movl %ecx,4(%edi) | ||
2623 | |||
2624 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
2625 | # asm 1: movl <in2=int32#3,8(<out=int32#6) | ||
2626 | # asm 2: movl <in2=%edx,8(<out=%edi) | ||
2627 | movl %edx,8(%edi) | ||
2628 | |||
2629 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
2630 | # asm 1: movl <in3=int32#4,12(<out=int32#6) | ||
2631 | # asm 2: movl <in3=%ebx,12(<out=%edi) | ||
2632 | movl %ebx,12(%edi) | ||
2633 | |||
2634 | # qhasm: in0 = z0 | ||
2635 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2636 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2637 | movd %xmm0,%eax | ||
2638 | |||
2639 | # qhasm: in1 = z1 | ||
2640 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2641 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2642 | movd %xmm1,%ecx | ||
2643 | |||
2644 | # qhasm: in2 = z2 | ||
2645 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2646 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2647 | movd %xmm2,%edx | ||
2648 | |||
2649 | # qhasm: in3 = z3 | ||
2650 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2651 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2652 | movd %xmm3,%ebx | ||
2653 | |||
2654 | # qhasm: z0 <<<= 96 | ||
2655 | # asm 1: pshufd $0x39,<z0=int6464#1,<z0=int6464#1 | ||
2656 | # asm 2: pshufd $0x39,<z0=%xmm0,<z0=%xmm0 | ||
2657 | pshufd $0x39,%xmm0,%xmm0 | ||
2658 | |||
2659 | # qhasm: z1 <<<= 96 | ||
2660 | # asm 1: pshufd $0x39,<z1=int6464#2,<z1=int6464#2 | ||
2661 | # asm 2: pshufd $0x39,<z1=%xmm1,<z1=%xmm1 | ||
2662 | pshufd $0x39,%xmm1,%xmm1 | ||
2663 | |||
2664 | # qhasm: z2 <<<= 96 | ||
2665 | # asm 1: pshufd $0x39,<z2=int6464#3,<z2=int6464#3 | ||
2666 | # asm 2: pshufd $0x39,<z2=%xmm2,<z2=%xmm2 | ||
2667 | pshufd $0x39,%xmm2,%xmm2 | ||
2668 | |||
2669 | # qhasm: z3 <<<= 96 | ||
2670 | # asm 1: pshufd $0x39,<z3=int6464#4,<z3=int6464#4 | ||
2671 | # asm 2: pshufd $0x39,<z3=%xmm3,<z3=%xmm3 | ||
2672 | pshufd $0x39,%xmm3,%xmm3 | ||
2673 | |||
2674 | # qhasm: in0 ^= *(uint32 *) (m + 64) | ||
2675 | # asm 1: xorl 64(<m=int32#5),<in0=int32#1 | ||
2676 | # asm 2: xorl 64(<m=%esi),<in0=%eax | ||
2677 | xorl 64(%esi),%eax | ||
2678 | |||
2679 | # qhasm: in1 ^= *(uint32 *) (m + 68) | ||
2680 | # asm 1: xorl 68(<m=int32#5),<in1=int32#2 | ||
2681 | # asm 2: xorl 68(<m=%esi),<in1=%ecx | ||
2682 | xorl 68(%esi),%ecx | ||
2683 | |||
2684 | # qhasm: in2 ^= *(uint32 *) (m + 72) | ||
2685 | # asm 1: xorl 72(<m=int32#5),<in2=int32#3 | ||
2686 | # asm 2: xorl 72(<m=%esi),<in2=%edx | ||
2687 | xorl 72(%esi),%edx | ||
2688 | |||
2689 | # qhasm: in3 ^= *(uint32 *) (m + 76) | ||
2690 | # asm 1: xorl 76(<m=int32#5),<in3=int32#4 | ||
2691 | # asm 2: xorl 76(<m=%esi),<in3=%ebx | ||
2692 | xorl 76(%esi),%ebx | ||
2693 | |||
2694 | # qhasm: *(uint32 *) (out + 64) = in0 | ||
2695 | # asm 1: movl <in0=int32#1,64(<out=int32#6) | ||
2696 | # asm 2: movl <in0=%eax,64(<out=%edi) | ||
2697 | movl %eax,64(%edi) | ||
2698 | |||
2699 | # qhasm: *(uint32 *) (out + 68) = in1 | ||
2700 | # asm 1: movl <in1=int32#2,68(<out=int32#6) | ||
2701 | # asm 2: movl <in1=%ecx,68(<out=%edi) | ||
2702 | movl %ecx,68(%edi) | ||
2703 | |||
2704 | # qhasm: *(uint32 *) (out + 72) = in2 | ||
2705 | # asm 1: movl <in2=int32#3,72(<out=int32#6) | ||
2706 | # asm 2: movl <in2=%edx,72(<out=%edi) | ||
2707 | movl %edx,72(%edi) | ||
2708 | |||
2709 | # qhasm: *(uint32 *) (out + 76) = in3 | ||
2710 | # asm 1: movl <in3=int32#4,76(<out=int32#6) | ||
2711 | # asm 2: movl <in3=%ebx,76(<out=%edi) | ||
2712 | movl %ebx,76(%edi) | ||
2713 | |||
2714 | # qhasm: in0 = z0 | ||
2715 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2716 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2717 | movd %xmm0,%eax | ||
2718 | |||
2719 | # qhasm: in1 = z1 | ||
2720 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2721 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2722 | movd %xmm1,%ecx | ||
2723 | |||
2724 | # qhasm: in2 = z2 | ||
2725 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2726 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2727 | movd %xmm2,%edx | ||
2728 | |||
2729 | # qhasm: in3 = z3 | ||
2730 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2731 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2732 | movd %xmm3,%ebx | ||
2733 | |||
2734 | # qhasm: z0 <<<= 96 | ||
2735 | # asm 1: pshufd $0x39,<z0=int6464#1,<z0=int6464#1 | ||
2736 | # asm 2: pshufd $0x39,<z0=%xmm0,<z0=%xmm0 | ||
2737 | pshufd $0x39,%xmm0,%xmm0 | ||
2738 | |||
2739 | # qhasm: z1 <<<= 96 | ||
2740 | # asm 1: pshufd $0x39,<z1=int6464#2,<z1=int6464#2 | ||
2741 | # asm 2: pshufd $0x39,<z1=%xmm1,<z1=%xmm1 | ||
2742 | pshufd $0x39,%xmm1,%xmm1 | ||
2743 | |||
2744 | # qhasm: z2 <<<= 96 | ||
2745 | # asm 1: pshufd $0x39,<z2=int6464#3,<z2=int6464#3 | ||
2746 | # asm 2: pshufd $0x39,<z2=%xmm2,<z2=%xmm2 | ||
2747 | pshufd $0x39,%xmm2,%xmm2 | ||
2748 | |||
2749 | # qhasm: z3 <<<= 96 | ||
2750 | # asm 1: pshufd $0x39,<z3=int6464#4,<z3=int6464#4 | ||
2751 | # asm 2: pshufd $0x39,<z3=%xmm3,<z3=%xmm3 | ||
2752 | pshufd $0x39,%xmm3,%xmm3 | ||
2753 | |||
2754 | # qhasm: in0 ^= *(uint32 *) (m + 128) | ||
2755 | # asm 1: xorl 128(<m=int32#5),<in0=int32#1 | ||
2756 | # asm 2: xorl 128(<m=%esi),<in0=%eax | ||
2757 | xorl 128(%esi),%eax | ||
2758 | |||
2759 | # qhasm: in1 ^= *(uint32 *) (m + 132) | ||
2760 | # asm 1: xorl 132(<m=int32#5),<in1=int32#2 | ||
2761 | # asm 2: xorl 132(<m=%esi),<in1=%ecx | ||
2762 | xorl 132(%esi),%ecx | ||
2763 | |||
2764 | # qhasm: in2 ^= *(uint32 *) (m + 136) | ||
2765 | # asm 1: xorl 136(<m=int32#5),<in2=int32#3 | ||
2766 | # asm 2: xorl 136(<m=%esi),<in2=%edx | ||
2767 | xorl 136(%esi),%edx | ||
2768 | |||
2769 | # qhasm: in3 ^= *(uint32 *) (m + 140) | ||
2770 | # asm 1: xorl 140(<m=int32#5),<in3=int32#4 | ||
2771 | # asm 2: xorl 140(<m=%esi),<in3=%ebx | ||
2772 | xorl 140(%esi),%ebx | ||
2773 | |||
2774 | # qhasm: *(uint32 *) (out + 128) = in0 | ||
2775 | # asm 1: movl <in0=int32#1,128(<out=int32#6) | ||
2776 | # asm 2: movl <in0=%eax,128(<out=%edi) | ||
2777 | movl %eax,128(%edi) | ||
2778 | |||
2779 | # qhasm: *(uint32 *) (out + 132) = in1 | ||
2780 | # asm 1: movl <in1=int32#2,132(<out=int32#6) | ||
2781 | # asm 2: movl <in1=%ecx,132(<out=%edi) | ||
2782 | movl %ecx,132(%edi) | ||
2783 | |||
2784 | # qhasm: *(uint32 *) (out + 136) = in2 | ||
2785 | # asm 1: movl <in2=int32#3,136(<out=int32#6) | ||
2786 | # asm 2: movl <in2=%edx,136(<out=%edi) | ||
2787 | movl %edx,136(%edi) | ||
2788 | |||
2789 | # qhasm: *(uint32 *) (out + 140) = in3 | ||
2790 | # asm 1: movl <in3=int32#4,140(<out=int32#6) | ||
2791 | # asm 2: movl <in3=%ebx,140(<out=%edi) | ||
2792 | movl %ebx,140(%edi) | ||
2793 | |||
2794 | # qhasm: in0 = z0 | ||
2795 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2796 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2797 | movd %xmm0,%eax | ||
2798 | |||
2799 | # qhasm: in1 = z1 | ||
2800 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2801 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2802 | movd %xmm1,%ecx | ||
2803 | |||
2804 | # qhasm: in2 = z2 | ||
2805 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2806 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2807 | movd %xmm2,%edx | ||
2808 | |||
2809 | # qhasm: in3 = z3 | ||
2810 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2811 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2812 | movd %xmm3,%ebx | ||
2813 | |||
2814 | # qhasm: in0 ^= *(uint32 *) (m + 192) | ||
2815 | # asm 1: xorl 192(<m=int32#5),<in0=int32#1 | ||
2816 | # asm 2: xorl 192(<m=%esi),<in0=%eax | ||
2817 | xorl 192(%esi),%eax | ||
2818 | |||
2819 | # qhasm: in1 ^= *(uint32 *) (m + 196) | ||
2820 | # asm 1: xorl 196(<m=int32#5),<in1=int32#2 | ||
2821 | # asm 2: xorl 196(<m=%esi),<in1=%ecx | ||
2822 | xorl 196(%esi),%ecx | ||
2823 | |||
2824 | # qhasm: in2 ^= *(uint32 *) (m + 200) | ||
2825 | # asm 1: xorl 200(<m=int32#5),<in2=int32#3 | ||
2826 | # asm 2: xorl 200(<m=%esi),<in2=%edx | ||
2827 | xorl 200(%esi),%edx | ||
2828 | |||
2829 | # qhasm: in3 ^= *(uint32 *) (m + 204) | ||
2830 | # asm 1: xorl 204(<m=int32#5),<in3=int32#4 | ||
2831 | # asm 2: xorl 204(<m=%esi),<in3=%ebx | ||
2832 | xorl 204(%esi),%ebx | ||
2833 | |||
2834 | # qhasm: *(uint32 *) (out + 192) = in0 | ||
2835 | # asm 1: movl <in0=int32#1,192(<out=int32#6) | ||
2836 | # asm 2: movl <in0=%eax,192(<out=%edi) | ||
2837 | movl %eax,192(%edi) | ||
2838 | |||
2839 | # qhasm: *(uint32 *) (out + 196) = in1 | ||
2840 | # asm 1: movl <in1=int32#2,196(<out=int32#6) | ||
2841 | # asm 2: movl <in1=%ecx,196(<out=%edi) | ||
2842 | movl %ecx,196(%edi) | ||
2843 | |||
2844 | # qhasm: *(uint32 *) (out + 200) = in2 | ||
2845 | # asm 1: movl <in2=int32#3,200(<out=int32#6) | ||
2846 | # asm 2: movl <in2=%edx,200(<out=%edi) | ||
2847 | movl %edx,200(%edi) | ||
2848 | |||
2849 | # qhasm: *(uint32 *) (out + 204) = in3 | ||
2850 | # asm 1: movl <in3=int32#4,204(<out=int32#6) | ||
2851 | # asm 2: movl <in3=%ebx,204(<out=%edi) | ||
2852 | movl %ebx,204(%edi) | ||
2853 | |||
2854 | # qhasm: z4 = z4_stack | ||
2855 | # asm 1: movdqa <z4_stack=stack128#34,>z4=int6464#1 | ||
2856 | # asm 2: movdqa <z4_stack=560(%esp),>z4=%xmm0 | ||
2857 | movdqa 560(%esp),%xmm0 | ||
2858 | |||
2859 | # qhasm: z5 = z5_stack | ||
2860 | # asm 1: movdqa <z5_stack=stack128#21,>z5=int6464#2 | ||
2861 | # asm 2: movdqa <z5_stack=352(%esp),>z5=%xmm1 | ||
2862 | movdqa 352(%esp),%xmm1 | ||
2863 | |||
2864 | # qhasm: z6 = z6_stack | ||
2865 | # asm 1: movdqa <z6_stack=stack128#26,>z6=int6464#3 | ||
2866 | # asm 2: movdqa <z6_stack=432(%esp),>z6=%xmm2 | ||
2867 | movdqa 432(%esp),%xmm2 | ||
2868 | |||
2869 | # qhasm: z7 = z7_stack | ||
2870 | # asm 1: movdqa <z7_stack=stack128#29,>z7=int6464#4 | ||
2871 | # asm 2: movdqa <z7_stack=480(%esp),>z7=%xmm3 | ||
2872 | movdqa 480(%esp),%xmm3 | ||
2873 | |||
2874 | # qhasm: uint32323232 z4 += orig4 | ||
2875 | # asm 1: paddd <orig4=stack128#16,<z4=int6464#1 | ||
2876 | # asm 2: paddd <orig4=272(%esp),<z4=%xmm0 | ||
2877 | paddd 272(%esp),%xmm0 | ||
2878 | |||
2879 | # qhasm: uint32323232 z5 += orig5 | ||
2880 | # asm 1: paddd <orig5=stack128#5,<z5=int6464#2 | ||
2881 | # asm 2: paddd <orig5=96(%esp),<z5=%xmm1 | ||
2882 | paddd 96(%esp),%xmm1 | ||
2883 | |||
2884 | # qhasm: uint32323232 z6 += orig6 | ||
2885 | # asm 1: paddd <orig6=stack128#9,<z6=int6464#3 | ||
2886 | # asm 2: paddd <orig6=160(%esp),<z6=%xmm2 | ||
2887 | paddd 160(%esp),%xmm2 | ||
2888 | |||
2889 | # qhasm: uint32323232 z7 += orig7 | ||
2890 | # asm 1: paddd <orig7=stack128#13,<z7=int6464#4 | ||
2891 | # asm 2: paddd <orig7=224(%esp),<z7=%xmm3 | ||
2892 | paddd 224(%esp),%xmm3 | ||
2893 | |||
2894 | # qhasm: in4 = z4 | ||
2895 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
2896 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
2897 | movd %xmm0,%eax | ||
2898 | |||
2899 | # qhasm: in5 = z5 | ||
2900 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
2901 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
2902 | movd %xmm1,%ecx | ||
2903 | |||
2904 | # qhasm: in6 = z6 | ||
2905 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
2906 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
2907 | movd %xmm2,%edx | ||
2908 | |||
2909 | # qhasm: in7 = z7 | ||
2910 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
2911 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
2912 | movd %xmm3,%ebx | ||
2913 | |||
2914 | # qhasm: z4 <<<= 96 | ||
2915 | # asm 1: pshufd $0x39,<z4=int6464#1,<z4=int6464#1 | ||
2916 | # asm 2: pshufd $0x39,<z4=%xmm0,<z4=%xmm0 | ||
2917 | pshufd $0x39,%xmm0,%xmm0 | ||
2918 | |||
2919 | # qhasm: z5 <<<= 96 | ||
2920 | # asm 1: pshufd $0x39,<z5=int6464#2,<z5=int6464#2 | ||
2921 | # asm 2: pshufd $0x39,<z5=%xmm1,<z5=%xmm1 | ||
2922 | pshufd $0x39,%xmm1,%xmm1 | ||
2923 | |||
2924 | # qhasm: z6 <<<= 96 | ||
2925 | # asm 1: pshufd $0x39,<z6=int6464#3,<z6=int6464#3 | ||
2926 | # asm 2: pshufd $0x39,<z6=%xmm2,<z6=%xmm2 | ||
2927 | pshufd $0x39,%xmm2,%xmm2 | ||
2928 | |||
2929 | # qhasm: z7 <<<= 96 | ||
2930 | # asm 1: pshufd $0x39,<z7=int6464#4,<z7=int6464#4 | ||
2931 | # asm 2: pshufd $0x39,<z7=%xmm3,<z7=%xmm3 | ||
2932 | pshufd $0x39,%xmm3,%xmm3 | ||
2933 | |||
2934 | # qhasm: in4 ^= *(uint32 *) (m + 16) | ||
2935 | # asm 1: xorl 16(<m=int32#5),<in4=int32#1 | ||
2936 | # asm 2: xorl 16(<m=%esi),<in4=%eax | ||
2937 | xorl 16(%esi),%eax | ||
2938 | |||
2939 | # qhasm: in5 ^= *(uint32 *) (m + 20) | ||
2940 | # asm 1: xorl 20(<m=int32#5),<in5=int32#2 | ||
2941 | # asm 2: xorl 20(<m=%esi),<in5=%ecx | ||
2942 | xorl 20(%esi),%ecx | ||
2943 | |||
2944 | # qhasm: in6 ^= *(uint32 *) (m + 24) | ||
2945 | # asm 1: xorl 24(<m=int32#5),<in6=int32#3 | ||
2946 | # asm 2: xorl 24(<m=%esi),<in6=%edx | ||
2947 | xorl 24(%esi),%edx | ||
2948 | |||
2949 | # qhasm: in7 ^= *(uint32 *) (m + 28) | ||
2950 | # asm 1: xorl 28(<m=int32#5),<in7=int32#4 | ||
2951 | # asm 2: xorl 28(<m=%esi),<in7=%ebx | ||
2952 | xorl 28(%esi),%ebx | ||
2953 | |||
2954 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
2955 | # asm 1: movl <in4=int32#1,16(<out=int32#6) | ||
2956 | # asm 2: movl <in4=%eax,16(<out=%edi) | ||
2957 | movl %eax,16(%edi) | ||
2958 | |||
2959 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
2960 | # asm 1: movl <in5=int32#2,20(<out=int32#6) | ||
2961 | # asm 2: movl <in5=%ecx,20(<out=%edi) | ||
2962 | movl %ecx,20(%edi) | ||
2963 | |||
2964 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
2965 | # asm 1: movl <in6=int32#3,24(<out=int32#6) | ||
2966 | # asm 2: movl <in6=%edx,24(<out=%edi) | ||
2967 | movl %edx,24(%edi) | ||
2968 | |||
2969 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
2970 | # asm 1: movl <in7=int32#4,28(<out=int32#6) | ||
2971 | # asm 2: movl <in7=%ebx,28(<out=%edi) | ||
2972 | movl %ebx,28(%edi) | ||
2973 | |||
2974 | # qhasm: in4 = z4 | ||
2975 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
2976 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
2977 | movd %xmm0,%eax | ||
2978 | |||
2979 | # qhasm: in5 = z5 | ||
2980 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
2981 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
2982 | movd %xmm1,%ecx | ||
2983 | |||
2984 | # qhasm: in6 = z6 | ||
2985 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
2986 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
2987 | movd %xmm2,%edx | ||
2988 | |||
2989 | # qhasm: in7 = z7 | ||
2990 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
2991 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
2992 | movd %xmm3,%ebx | ||
2993 | |||
2994 | # qhasm: z4 <<<= 96 | ||
2995 | # asm 1: pshufd $0x39,<z4=int6464#1,<z4=int6464#1 | ||
2996 | # asm 2: pshufd $0x39,<z4=%xmm0,<z4=%xmm0 | ||
2997 | pshufd $0x39,%xmm0,%xmm0 | ||
2998 | |||
2999 | # qhasm: z5 <<<= 96 | ||
3000 | # asm 1: pshufd $0x39,<z5=int6464#2,<z5=int6464#2 | ||
3001 | # asm 2: pshufd $0x39,<z5=%xmm1,<z5=%xmm1 | ||
3002 | pshufd $0x39,%xmm1,%xmm1 | ||
3003 | |||
3004 | # qhasm: z6 <<<= 96 | ||
3005 | # asm 1: pshufd $0x39,<z6=int6464#3,<z6=int6464#3 | ||
3006 | # asm 2: pshufd $0x39,<z6=%xmm2,<z6=%xmm2 | ||
3007 | pshufd $0x39,%xmm2,%xmm2 | ||
3008 | |||
3009 | # qhasm: z7 <<<= 96 | ||
3010 | # asm 1: pshufd $0x39,<z7=int6464#4,<z7=int6464#4 | ||
3011 | # asm 2: pshufd $0x39,<z7=%xmm3,<z7=%xmm3 | ||
3012 | pshufd $0x39,%xmm3,%xmm3 | ||
3013 | |||
3014 | # qhasm: in4 ^= *(uint32 *) (m + 80) | ||
3015 | # asm 1: xorl 80(<m=int32#5),<in4=int32#1 | ||
3016 | # asm 2: xorl 80(<m=%esi),<in4=%eax | ||
3017 | xorl 80(%esi),%eax | ||
3018 | |||
3019 | # qhasm: in5 ^= *(uint32 *) (m + 84) | ||
3020 | # asm 1: xorl 84(<m=int32#5),<in5=int32#2 | ||
3021 | # asm 2: xorl 84(<m=%esi),<in5=%ecx | ||
3022 | xorl 84(%esi),%ecx | ||
3023 | |||
3024 | # qhasm: in6 ^= *(uint32 *) (m + 88) | ||
3025 | # asm 1: xorl 88(<m=int32#5),<in6=int32#3 | ||
3026 | # asm 2: xorl 88(<m=%esi),<in6=%edx | ||
3027 | xorl 88(%esi),%edx | ||
3028 | |||
3029 | # qhasm: in7 ^= *(uint32 *) (m + 92) | ||
3030 | # asm 1: xorl 92(<m=int32#5),<in7=int32#4 | ||
3031 | # asm 2: xorl 92(<m=%esi),<in7=%ebx | ||
3032 | xorl 92(%esi),%ebx | ||
3033 | |||
3034 | # qhasm: *(uint32 *) (out + 80) = in4 | ||
3035 | # asm 1: movl <in4=int32#1,80(<out=int32#6) | ||
3036 | # asm 2: movl <in4=%eax,80(<out=%edi) | ||
3037 | movl %eax,80(%edi) | ||
3038 | |||
3039 | # qhasm: *(uint32 *) (out + 84) = in5 | ||
3040 | # asm 1: movl <in5=int32#2,84(<out=int32#6) | ||
3041 | # asm 2: movl <in5=%ecx,84(<out=%edi) | ||
3042 | movl %ecx,84(%edi) | ||
3043 | |||
3044 | # qhasm: *(uint32 *) (out + 88) = in6 | ||
3045 | # asm 1: movl <in6=int32#3,88(<out=int32#6) | ||
3046 | # asm 2: movl <in6=%edx,88(<out=%edi) | ||
3047 | movl %edx,88(%edi) | ||
3048 | |||
3049 | # qhasm: *(uint32 *) (out + 92) = in7 | ||
3050 | # asm 1: movl <in7=int32#4,92(<out=int32#6) | ||
3051 | # asm 2: movl <in7=%ebx,92(<out=%edi) | ||
3052 | movl %ebx,92(%edi) | ||
3053 | |||
3054 | # qhasm: in4 = z4 | ||
3055 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
3056 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
3057 | movd %xmm0,%eax | ||
3058 | |||
3059 | # qhasm: in5 = z5 | ||
3060 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
3061 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
3062 | movd %xmm1,%ecx | ||
3063 | |||
3064 | # qhasm: in6 = z6 | ||
3065 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
3066 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
3067 | movd %xmm2,%edx | ||
3068 | |||
3069 | # qhasm: in7 = z7 | ||
3070 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
3071 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
3072 | movd %xmm3,%ebx | ||
3073 | |||
3074 | # qhasm: z4 <<<= 96 | ||
3075 | # asm 1: pshufd $0x39,<z4=int6464#1,<z4=int6464#1 | ||
3076 | # asm 2: pshufd $0x39,<z4=%xmm0,<z4=%xmm0 | ||
3077 | pshufd $0x39,%xmm0,%xmm0 | ||
3078 | |||
3079 | # qhasm: z5 <<<= 96 | ||
3080 | # asm 1: pshufd $0x39,<z5=int6464#2,<z5=int6464#2 | ||
3081 | # asm 2: pshufd $0x39,<z5=%xmm1,<z5=%xmm1 | ||
3082 | pshufd $0x39,%xmm1,%xmm1 | ||
3083 | |||
3084 | # qhasm: z6 <<<= 96 | ||
3085 | # asm 1: pshufd $0x39,<z6=int6464#3,<z6=int6464#3 | ||
3086 | # asm 2: pshufd $0x39,<z6=%xmm2,<z6=%xmm2 | ||
3087 | pshufd $0x39,%xmm2,%xmm2 | ||
3088 | |||
3089 | # qhasm: z7 <<<= 96 | ||
3090 | # asm 1: pshufd $0x39,<z7=int6464#4,<z7=int6464#4 | ||
3091 | # asm 2: pshufd $0x39,<z7=%xmm3,<z7=%xmm3 | ||
3092 | pshufd $0x39,%xmm3,%xmm3 | ||
3093 | |||
3094 | # qhasm: in4 ^= *(uint32 *) (m + 144) | ||
3095 | # asm 1: xorl 144(<m=int32#5),<in4=int32#1 | ||
3096 | # asm 2: xorl 144(<m=%esi),<in4=%eax | ||
3097 | xorl 144(%esi),%eax | ||
3098 | |||
3099 | # qhasm: in5 ^= *(uint32 *) (m + 148) | ||
3100 | # asm 1: xorl 148(<m=int32#5),<in5=int32#2 | ||
3101 | # asm 2: xorl 148(<m=%esi),<in5=%ecx | ||
3102 | xorl 148(%esi),%ecx | ||
3103 | |||
3104 | # qhasm: in6 ^= *(uint32 *) (m + 152) | ||
3105 | # asm 1: xorl 152(<m=int32#5),<in6=int32#3 | ||
3106 | # asm 2: xorl 152(<m=%esi),<in6=%edx | ||
3107 | xorl 152(%esi),%edx | ||
3108 | |||
3109 | # qhasm: in7 ^= *(uint32 *) (m + 156) | ||
3110 | # asm 1: xorl 156(<m=int32#5),<in7=int32#4 | ||
3111 | # asm 2: xorl 156(<m=%esi),<in7=%ebx | ||
3112 | xorl 156(%esi),%ebx | ||
3113 | |||
3114 | # qhasm: *(uint32 *) (out + 144) = in4 | ||
3115 | # asm 1: movl <in4=int32#1,144(<out=int32#6) | ||
3116 | # asm 2: movl <in4=%eax,144(<out=%edi) | ||
3117 | movl %eax,144(%edi) | ||
3118 | |||
3119 | # qhasm: *(uint32 *) (out + 148) = in5 | ||
3120 | # asm 1: movl <in5=int32#2,148(<out=int32#6) | ||
3121 | # asm 2: movl <in5=%ecx,148(<out=%edi) | ||
3122 | movl %ecx,148(%edi) | ||
3123 | |||
3124 | # qhasm: *(uint32 *) (out + 152) = in6 | ||
3125 | # asm 1: movl <in6=int32#3,152(<out=int32#6) | ||
3126 | # asm 2: movl <in6=%edx,152(<out=%edi) | ||
3127 | movl %edx,152(%edi) | ||
3128 | |||
3129 | # qhasm: *(uint32 *) (out + 156) = in7 | ||
3130 | # asm 1: movl <in7=int32#4,156(<out=int32#6) | ||
3131 | # asm 2: movl <in7=%ebx,156(<out=%edi) | ||
3132 | movl %ebx,156(%edi) | ||
3133 | |||
3134 | # qhasm: in4 = z4 | ||
3135 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
3136 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
3137 | movd %xmm0,%eax | ||
3138 | |||
3139 | # qhasm: in5 = z5 | ||
3140 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
3141 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
3142 | movd %xmm1,%ecx | ||
3143 | |||
3144 | # qhasm: in6 = z6 | ||
3145 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
3146 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
3147 | movd %xmm2,%edx | ||
3148 | |||
3149 | # qhasm: in7 = z7 | ||
3150 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
3151 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
3152 | movd %xmm3,%ebx | ||
3153 | |||
3154 | # qhasm: in4 ^= *(uint32 *) (m + 208) | ||
3155 | # asm 1: xorl 208(<m=int32#5),<in4=int32#1 | ||
3156 | # asm 2: xorl 208(<m=%esi),<in4=%eax | ||
3157 | xorl 208(%esi),%eax | ||
3158 | |||
3159 | # qhasm: in5 ^= *(uint32 *) (m + 212) | ||
3160 | # asm 1: xorl 212(<m=int32#5),<in5=int32#2 | ||
3161 | # asm 2: xorl 212(<m=%esi),<in5=%ecx | ||
3162 | xorl 212(%esi),%ecx | ||
3163 | |||
3164 | # qhasm: in6 ^= *(uint32 *) (m + 216) | ||
3165 | # asm 1: xorl 216(<m=int32#5),<in6=int32#3 | ||
3166 | # asm 2: xorl 216(<m=%esi),<in6=%edx | ||
3167 | xorl 216(%esi),%edx | ||
3168 | |||
3169 | # qhasm: in7 ^= *(uint32 *) (m + 220) | ||
3170 | # asm 1: xorl 220(<m=int32#5),<in7=int32#4 | ||
3171 | # asm 2: xorl 220(<m=%esi),<in7=%ebx | ||
3172 | xorl 220(%esi),%ebx | ||
3173 | |||
3174 | # qhasm: *(uint32 *) (out + 208) = in4 | ||
3175 | # asm 1: movl <in4=int32#1,208(<out=int32#6) | ||
3176 | # asm 2: movl <in4=%eax,208(<out=%edi) | ||
3177 | movl %eax,208(%edi) | ||
3178 | |||
3179 | # qhasm: *(uint32 *) (out + 212) = in5 | ||
3180 | # asm 1: movl <in5=int32#2,212(<out=int32#6) | ||
3181 | # asm 2: movl <in5=%ecx,212(<out=%edi) | ||
3182 | movl %ecx,212(%edi) | ||
3183 | |||
3184 | # qhasm: *(uint32 *) (out + 216) = in6 | ||
3185 | # asm 1: movl <in6=int32#3,216(<out=int32#6) | ||
3186 | # asm 2: movl <in6=%edx,216(<out=%edi) | ||
3187 | movl %edx,216(%edi) | ||
3188 | |||
3189 | # qhasm: *(uint32 *) (out + 220) = in7 | ||
3190 | # asm 1: movl <in7=int32#4,220(<out=int32#6) | ||
3191 | # asm 2: movl <in7=%ebx,220(<out=%edi) | ||
3192 | movl %ebx,220(%edi) | ||
3193 | |||
3194 | # qhasm: z8 = z8_stack | ||
3195 | # asm 1: movdqa <z8_stack=stack128#37,>z8=int6464#1 | ||
3196 | # asm 2: movdqa <z8_stack=608(%esp),>z8=%xmm0 | ||
3197 | movdqa 608(%esp),%xmm0 | ||
3198 | |||
3199 | # qhasm: z9 = z9_stack | ||
3200 | # asm 1: movdqa <z9_stack=stack128#32,>z9=int6464#2 | ||
3201 | # asm 2: movdqa <z9_stack=528(%esp),>z9=%xmm1 | ||
3202 | movdqa 528(%esp),%xmm1 | ||
3203 | |||
3204 | # qhasm: z10 = z10_stack | ||
3205 | # asm 1: movdqa <z10_stack=stack128#22,>z10=int6464#3 | ||
3206 | # asm 2: movdqa <z10_stack=368(%esp),>z10=%xmm2 | ||
3207 | movdqa 368(%esp),%xmm2 | ||
3208 | |||
3209 | # qhasm: z11 = z11_stack | ||
3210 | # asm 1: movdqa <z11_stack=stack128#27,>z11=int6464#4 | ||
3211 | # asm 2: movdqa <z11_stack=448(%esp),>z11=%xmm3 | ||
3212 | movdqa 448(%esp),%xmm3 | ||
3213 | |||
3214 | # qhasm: uint32323232 z8 += orig8 | ||
3215 | # asm 1: paddd <orig8=stack128#19,<z8=int6464#1 | ||
3216 | # asm 2: paddd <orig8=320(%esp),<z8=%xmm0 | ||
3217 | paddd 320(%esp),%xmm0 | ||
3218 | |||
3219 | # qhasm: uint32323232 z9 += orig9 | ||
3220 | # asm 1: paddd <orig9=stack128#20,<z9=int6464#2 | ||
3221 | # asm 2: paddd <orig9=336(%esp),<z9=%xmm1 | ||
3222 | paddd 336(%esp),%xmm1 | ||
3223 | |||
3224 | # qhasm: uint32323232 z10 += orig10 | ||
3225 | # asm 1: paddd <orig10=stack128#6,<z10=int6464#3 | ||
3226 | # asm 2: paddd <orig10=112(%esp),<z10=%xmm2 | ||
3227 | paddd 112(%esp),%xmm2 | ||
3228 | |||
3229 | # qhasm: uint32323232 z11 += orig11 | ||
3230 | # asm 1: paddd <orig11=stack128#10,<z11=int6464#4 | ||
3231 | # asm 2: paddd <orig11=176(%esp),<z11=%xmm3 | ||
3232 | paddd 176(%esp),%xmm3 | ||
3233 | |||
3234 | # qhasm: in8 = z8 | ||
3235 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3236 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3237 | movd %xmm0,%eax | ||
3238 | |||
3239 | # qhasm: in9 = z9 | ||
3240 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3241 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3242 | movd %xmm1,%ecx | ||
3243 | |||
3244 | # qhasm: in10 = z10 | ||
3245 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3246 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3247 | movd %xmm2,%edx | ||
3248 | |||
3249 | # qhasm: in11 = z11 | ||
3250 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3251 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3252 | movd %xmm3,%ebx | ||
3253 | |||
3254 | # qhasm: z8 <<<= 96 | ||
3255 | # asm 1: pshufd $0x39,<z8=int6464#1,<z8=int6464#1 | ||
3256 | # asm 2: pshufd $0x39,<z8=%xmm0,<z8=%xmm0 | ||
3257 | pshufd $0x39,%xmm0,%xmm0 | ||
3258 | |||
3259 | # qhasm: z9 <<<= 96 | ||
3260 | # asm 1: pshufd $0x39,<z9=int6464#2,<z9=int6464#2 | ||
3261 | # asm 2: pshufd $0x39,<z9=%xmm1,<z9=%xmm1 | ||
3262 | pshufd $0x39,%xmm1,%xmm1 | ||
3263 | |||
3264 | # qhasm: z10 <<<= 96 | ||
3265 | # asm 1: pshufd $0x39,<z10=int6464#3,<z10=int6464#3 | ||
3266 | # asm 2: pshufd $0x39,<z10=%xmm2,<z10=%xmm2 | ||
3267 | pshufd $0x39,%xmm2,%xmm2 | ||
3268 | |||
3269 | # qhasm: z11 <<<= 96 | ||
3270 | # asm 1: pshufd $0x39,<z11=int6464#4,<z11=int6464#4 | ||
3271 | # asm 2: pshufd $0x39,<z11=%xmm3,<z11=%xmm3 | ||
3272 | pshufd $0x39,%xmm3,%xmm3 | ||
3273 | |||
3274 | # qhasm: in8 ^= *(uint32 *) (m + 32) | ||
3275 | # asm 1: xorl 32(<m=int32#5),<in8=int32#1 | ||
3276 | # asm 2: xorl 32(<m=%esi),<in8=%eax | ||
3277 | xorl 32(%esi),%eax | ||
3278 | |||
3279 | # qhasm: in9 ^= *(uint32 *) (m + 36) | ||
3280 | # asm 1: xorl 36(<m=int32#5),<in9=int32#2 | ||
3281 | # asm 2: xorl 36(<m=%esi),<in9=%ecx | ||
3282 | xorl 36(%esi),%ecx | ||
3283 | |||
3284 | # qhasm: in10 ^= *(uint32 *) (m + 40) | ||
3285 | # asm 1: xorl 40(<m=int32#5),<in10=int32#3 | ||
3286 | # asm 2: xorl 40(<m=%esi),<in10=%edx | ||
3287 | xorl 40(%esi),%edx | ||
3288 | |||
3289 | # qhasm: in11 ^= *(uint32 *) (m + 44) | ||
3290 | # asm 1: xorl 44(<m=int32#5),<in11=int32#4 | ||
3291 | # asm 2: xorl 44(<m=%esi),<in11=%ebx | ||
3292 | xorl 44(%esi),%ebx | ||
3293 | |||
3294 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
3295 | # asm 1: movl <in8=int32#1,32(<out=int32#6) | ||
3296 | # asm 2: movl <in8=%eax,32(<out=%edi) | ||
3297 | movl %eax,32(%edi) | ||
3298 | |||
3299 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
3300 | # asm 1: movl <in9=int32#2,36(<out=int32#6) | ||
3301 | # asm 2: movl <in9=%ecx,36(<out=%edi) | ||
3302 | movl %ecx,36(%edi) | ||
3303 | |||
3304 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
3305 | # asm 1: movl <in10=int32#3,40(<out=int32#6) | ||
3306 | # asm 2: movl <in10=%edx,40(<out=%edi) | ||
3307 | movl %edx,40(%edi) | ||
3308 | |||
3309 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
3310 | # asm 1: movl <in11=int32#4,44(<out=int32#6) | ||
3311 | # asm 2: movl <in11=%ebx,44(<out=%edi) | ||
3312 | movl %ebx,44(%edi) | ||
3313 | |||
3314 | # qhasm: in8 = z8 | ||
3315 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3316 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3317 | movd %xmm0,%eax | ||
3318 | |||
3319 | # qhasm: in9 = z9 | ||
3320 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3321 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3322 | movd %xmm1,%ecx | ||
3323 | |||
3324 | # qhasm: in10 = z10 | ||
3325 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3326 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3327 | movd %xmm2,%edx | ||
3328 | |||
3329 | # qhasm: in11 = z11 | ||
3330 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3331 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3332 | movd %xmm3,%ebx | ||
3333 | |||
3334 | # qhasm: z8 <<<= 96 | ||
3335 | # asm 1: pshufd $0x39,<z8=int6464#1,<z8=int6464#1 | ||
3336 | # asm 2: pshufd $0x39,<z8=%xmm0,<z8=%xmm0 | ||
3337 | pshufd $0x39,%xmm0,%xmm0 | ||
3338 | |||
3339 | # qhasm: z9 <<<= 96 | ||
3340 | # asm 1: pshufd $0x39,<z9=int6464#2,<z9=int6464#2 | ||
3341 | # asm 2: pshufd $0x39,<z9=%xmm1,<z9=%xmm1 | ||
3342 | pshufd $0x39,%xmm1,%xmm1 | ||
3343 | |||
3344 | # qhasm: z10 <<<= 96 | ||
3345 | # asm 1: pshufd $0x39,<z10=int6464#3,<z10=int6464#3 | ||
3346 | # asm 2: pshufd $0x39,<z10=%xmm2,<z10=%xmm2 | ||
3347 | pshufd $0x39,%xmm2,%xmm2 | ||
3348 | |||
3349 | # qhasm: z11 <<<= 96 | ||
3350 | # asm 1: pshufd $0x39,<z11=int6464#4,<z11=int6464#4 | ||
3351 | # asm 2: pshufd $0x39,<z11=%xmm3,<z11=%xmm3 | ||
3352 | pshufd $0x39,%xmm3,%xmm3 | ||
3353 | |||
3354 | # qhasm: in8 ^= *(uint32 *) (m + 96) | ||
3355 | # asm 1: xorl 96(<m=int32#5),<in8=int32#1 | ||
3356 | # asm 2: xorl 96(<m=%esi),<in8=%eax | ||
3357 | xorl 96(%esi),%eax | ||
3358 | |||
3359 | # qhasm: in9 ^= *(uint32 *) (m + 100) | ||
3360 | # asm 1: xorl 100(<m=int32#5),<in9=int32#2 | ||
3361 | # asm 2: xorl 100(<m=%esi),<in9=%ecx | ||
3362 | xorl 100(%esi),%ecx | ||
3363 | |||
3364 | # qhasm: in10 ^= *(uint32 *) (m + 104) | ||
3365 | # asm 1: xorl 104(<m=int32#5),<in10=int32#3 | ||
3366 | # asm 2: xorl 104(<m=%esi),<in10=%edx | ||
3367 | xorl 104(%esi),%edx | ||
3368 | |||
3369 | # qhasm: in11 ^= *(uint32 *) (m + 108) | ||
3370 | # asm 1: xorl 108(<m=int32#5),<in11=int32#4 | ||
3371 | # asm 2: xorl 108(<m=%esi),<in11=%ebx | ||
3372 | xorl 108(%esi),%ebx | ||
3373 | |||
3374 | # qhasm: *(uint32 *) (out + 96) = in8 | ||
3375 | # asm 1: movl <in8=int32#1,96(<out=int32#6) | ||
3376 | # asm 2: movl <in8=%eax,96(<out=%edi) | ||
3377 | movl %eax,96(%edi) | ||
3378 | |||
3379 | # qhasm: *(uint32 *) (out + 100) = in9 | ||
3380 | # asm 1: movl <in9=int32#2,100(<out=int32#6) | ||
3381 | # asm 2: movl <in9=%ecx,100(<out=%edi) | ||
3382 | movl %ecx,100(%edi) | ||
3383 | |||
3384 | # qhasm: *(uint32 *) (out + 104) = in10 | ||
3385 | # asm 1: movl <in10=int32#3,104(<out=int32#6) | ||
3386 | # asm 2: movl <in10=%edx,104(<out=%edi) | ||
3387 | movl %edx,104(%edi) | ||
3388 | |||
3389 | # qhasm: *(uint32 *) (out + 108) = in11 | ||
3390 | # asm 1: movl <in11=int32#4,108(<out=int32#6) | ||
3391 | # asm 2: movl <in11=%ebx,108(<out=%edi) | ||
3392 | movl %ebx,108(%edi) | ||
3393 | |||
3394 | # qhasm: in8 = z8 | ||
3395 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3396 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3397 | movd %xmm0,%eax | ||
3398 | |||
3399 | # qhasm: in9 = z9 | ||
3400 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3401 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3402 | movd %xmm1,%ecx | ||
3403 | |||
3404 | # qhasm: in10 = z10 | ||
3405 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3406 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3407 | movd %xmm2,%edx | ||
3408 | |||
3409 | # qhasm: in11 = z11 | ||
3410 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3411 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3412 | movd %xmm3,%ebx | ||
3413 | |||
3414 | # qhasm: z8 <<<= 96 | ||
3415 | # asm 1: pshufd $0x39,<z8=int6464#1,<z8=int6464#1 | ||
3416 | # asm 2: pshufd $0x39,<z8=%xmm0,<z8=%xmm0 | ||
3417 | pshufd $0x39,%xmm0,%xmm0 | ||
3418 | |||
3419 | # qhasm: z9 <<<= 96 | ||
3420 | # asm 1: pshufd $0x39,<z9=int6464#2,<z9=int6464#2 | ||
3421 | # asm 2: pshufd $0x39,<z9=%xmm1,<z9=%xmm1 | ||
3422 | pshufd $0x39,%xmm1,%xmm1 | ||
3423 | |||
3424 | # qhasm: z10 <<<= 96 | ||
3425 | # asm 1: pshufd $0x39,<z10=int6464#3,<z10=int6464#3 | ||
3426 | # asm 2: pshufd $0x39,<z10=%xmm2,<z10=%xmm2 | ||
3427 | pshufd $0x39,%xmm2,%xmm2 | ||
3428 | |||
3429 | # qhasm: z11 <<<= 96 | ||
3430 | # asm 1: pshufd $0x39,<z11=int6464#4,<z11=int6464#4 | ||
3431 | # asm 2: pshufd $0x39,<z11=%xmm3,<z11=%xmm3 | ||
3432 | pshufd $0x39,%xmm3,%xmm3 | ||
3433 | |||
3434 | # qhasm: in8 ^= *(uint32 *) (m + 160) | ||
3435 | # asm 1: xorl 160(<m=int32#5),<in8=int32#1 | ||
3436 | # asm 2: xorl 160(<m=%esi),<in8=%eax | ||
3437 | xorl 160(%esi),%eax | ||
3438 | |||
3439 | # qhasm: in9 ^= *(uint32 *) (m + 164) | ||
3440 | # asm 1: xorl 164(<m=int32#5),<in9=int32#2 | ||
3441 | # asm 2: xorl 164(<m=%esi),<in9=%ecx | ||
3442 | xorl 164(%esi),%ecx | ||
3443 | |||
3444 | # qhasm: in10 ^= *(uint32 *) (m + 168) | ||
3445 | # asm 1: xorl 168(<m=int32#5),<in10=int32#3 | ||
3446 | # asm 2: xorl 168(<m=%esi),<in10=%edx | ||
3447 | xorl 168(%esi),%edx | ||
3448 | |||
3449 | # qhasm: in11 ^= *(uint32 *) (m + 172) | ||
3450 | # asm 1: xorl 172(<m=int32#5),<in11=int32#4 | ||
3451 | # asm 2: xorl 172(<m=%esi),<in11=%ebx | ||
3452 | xorl 172(%esi),%ebx | ||
3453 | |||
3454 | # qhasm: *(uint32 *) (out + 160) = in8 | ||
3455 | # asm 1: movl <in8=int32#1,160(<out=int32#6) | ||
3456 | # asm 2: movl <in8=%eax,160(<out=%edi) | ||
3457 | movl %eax,160(%edi) | ||
3458 | |||
3459 | # qhasm: *(uint32 *) (out + 164) = in9 | ||
3460 | # asm 1: movl <in9=int32#2,164(<out=int32#6) | ||
3461 | # asm 2: movl <in9=%ecx,164(<out=%edi) | ||
3462 | movl %ecx,164(%edi) | ||
3463 | |||
3464 | # qhasm: *(uint32 *) (out + 168) = in10 | ||
3465 | # asm 1: movl <in10=int32#3,168(<out=int32#6) | ||
3466 | # asm 2: movl <in10=%edx,168(<out=%edi) | ||
3467 | movl %edx,168(%edi) | ||
3468 | |||
3469 | # qhasm: *(uint32 *) (out + 172) = in11 | ||
3470 | # asm 1: movl <in11=int32#4,172(<out=int32#6) | ||
3471 | # asm 2: movl <in11=%ebx,172(<out=%edi) | ||
3472 | movl %ebx,172(%edi) | ||
3473 | |||
3474 | # qhasm: in8 = z8 | ||
3475 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3476 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3477 | movd %xmm0,%eax | ||
3478 | |||
3479 | # qhasm: in9 = z9 | ||
3480 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3481 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3482 | movd %xmm1,%ecx | ||
3483 | |||
3484 | # qhasm: in10 = z10 | ||
3485 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3486 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3487 | movd %xmm2,%edx | ||
3488 | |||
3489 | # qhasm: in11 = z11 | ||
3490 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3491 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3492 | movd %xmm3,%ebx | ||
3493 | |||
3494 | # qhasm: in8 ^= *(uint32 *) (m + 224) | ||
3495 | # asm 1: xorl 224(<m=int32#5),<in8=int32#1 | ||
3496 | # asm 2: xorl 224(<m=%esi),<in8=%eax | ||
3497 | xorl 224(%esi),%eax | ||
3498 | |||
3499 | # qhasm: in9 ^= *(uint32 *) (m + 228) | ||
3500 | # asm 1: xorl 228(<m=int32#5),<in9=int32#2 | ||
3501 | # asm 2: xorl 228(<m=%esi),<in9=%ecx | ||
3502 | xorl 228(%esi),%ecx | ||
3503 | |||
3504 | # qhasm: in10 ^= *(uint32 *) (m + 232) | ||
3505 | # asm 1: xorl 232(<m=int32#5),<in10=int32#3 | ||
3506 | # asm 2: xorl 232(<m=%esi),<in10=%edx | ||
3507 | xorl 232(%esi),%edx | ||
3508 | |||
3509 | # qhasm: in11 ^= *(uint32 *) (m + 236) | ||
3510 | # asm 1: xorl 236(<m=int32#5),<in11=int32#4 | ||
3511 | # asm 2: xorl 236(<m=%esi),<in11=%ebx | ||
3512 | xorl 236(%esi),%ebx | ||
3513 | |||
3514 | # qhasm: *(uint32 *) (out + 224) = in8 | ||
3515 | # asm 1: movl <in8=int32#1,224(<out=int32#6) | ||
3516 | # asm 2: movl <in8=%eax,224(<out=%edi) | ||
3517 | movl %eax,224(%edi) | ||
3518 | |||
3519 | # qhasm: *(uint32 *) (out + 228) = in9 | ||
3520 | # asm 1: movl <in9=int32#2,228(<out=int32#6) | ||
3521 | # asm 2: movl <in9=%ecx,228(<out=%edi) | ||
3522 | movl %ecx,228(%edi) | ||
3523 | |||
3524 | # qhasm: *(uint32 *) (out + 232) = in10 | ||
3525 | # asm 1: movl <in10=int32#3,232(<out=int32#6) | ||
3526 | # asm 2: movl <in10=%edx,232(<out=%edi) | ||
3527 | movl %edx,232(%edi) | ||
3528 | |||
3529 | # qhasm: *(uint32 *) (out + 236) = in11 | ||
3530 | # asm 1: movl <in11=int32#4,236(<out=int32#6) | ||
3531 | # asm 2: movl <in11=%ebx,236(<out=%edi) | ||
3532 | movl %ebx,236(%edi) | ||
3533 | |||
3534 | # qhasm: z12 = z12_stack | ||
3535 | # asm 1: movdqa <z12_stack=stack128#35,>z12=int6464#1 | ||
3536 | # asm 2: movdqa <z12_stack=576(%esp),>z12=%xmm0 | ||
3537 | movdqa 576(%esp),%xmm0 | ||
3538 | |||
3539 | # qhasm: z13 = z13_stack | ||
3540 | # asm 1: movdqa <z13_stack=stack128#30,>z13=int6464#2 | ||
3541 | # asm 2: movdqa <z13_stack=496(%esp),>z13=%xmm1 | ||
3542 | movdqa 496(%esp),%xmm1 | ||
3543 | |||
3544 | # qhasm: z14 = z14_stack | ||
3545 | # asm 1: movdqa <z14_stack=stack128#24,>z14=int6464#3 | ||
3546 | # asm 2: movdqa <z14_stack=400(%esp),>z14=%xmm2 | ||
3547 | movdqa 400(%esp),%xmm2 | ||
3548 | |||
3549 | # qhasm: z15 = z15_stack | ||
3550 | # asm 1: movdqa <z15_stack=stack128#23,>z15=int6464#4 | ||
3551 | # asm 2: movdqa <z15_stack=384(%esp),>z15=%xmm3 | ||
3552 | movdqa 384(%esp),%xmm3 | ||
3553 | |||
3554 | # qhasm: uint32323232 z12 += orig12 | ||
3555 | # asm 1: paddd <orig12=stack128#11,<z12=int6464#1 | ||
3556 | # asm 2: paddd <orig12=192(%esp),<z12=%xmm0 | ||
3557 | paddd 192(%esp),%xmm0 | ||
3558 | |||
3559 | # qhasm: uint32323232 z13 += orig13 | ||
3560 | # asm 1: paddd <orig13=stack128#14,<z13=int6464#2 | ||
3561 | # asm 2: paddd <orig13=240(%esp),<z13=%xmm1 | ||
3562 | paddd 240(%esp),%xmm1 | ||
3563 | |||
3564 | # qhasm: uint32323232 z14 += orig14 | ||
3565 | # asm 1: paddd <orig14=stack128#17,<z14=int6464#3 | ||
3566 | # asm 2: paddd <orig14=288(%esp),<z14=%xmm2 | ||
3567 | paddd 288(%esp),%xmm2 | ||
3568 | |||
3569 | # qhasm: uint32323232 z15 += orig15 | ||
3570 | # asm 1: paddd <orig15=stack128#7,<z15=int6464#4 | ||
3571 | # asm 2: paddd <orig15=128(%esp),<z15=%xmm3 | ||
3572 | paddd 128(%esp),%xmm3 | ||
3573 | |||
3574 | # qhasm: in12 = z12 | ||
3575 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3576 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3577 | movd %xmm0,%eax | ||
3578 | |||
3579 | # qhasm: in13 = z13 | ||
3580 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3581 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3582 | movd %xmm1,%ecx | ||
3583 | |||
3584 | # qhasm: in14 = z14 | ||
3585 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3586 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3587 | movd %xmm2,%edx | ||
3588 | |||
3589 | # qhasm: in15 = z15 | ||
3590 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3591 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3592 | movd %xmm3,%ebx | ||
3593 | |||
3594 | # qhasm: z12 <<<= 96 | ||
3595 | # asm 1: pshufd $0x39,<z12=int6464#1,<z12=int6464#1 | ||
3596 | # asm 2: pshufd $0x39,<z12=%xmm0,<z12=%xmm0 | ||
3597 | pshufd $0x39,%xmm0,%xmm0 | ||
3598 | |||
3599 | # qhasm: z13 <<<= 96 | ||
3600 | # asm 1: pshufd $0x39,<z13=int6464#2,<z13=int6464#2 | ||
3601 | # asm 2: pshufd $0x39,<z13=%xmm1,<z13=%xmm1 | ||
3602 | pshufd $0x39,%xmm1,%xmm1 | ||
3603 | |||
3604 | # qhasm: z14 <<<= 96 | ||
3605 | # asm 1: pshufd $0x39,<z14=int6464#3,<z14=int6464#3 | ||
3606 | # asm 2: pshufd $0x39,<z14=%xmm2,<z14=%xmm2 | ||
3607 | pshufd $0x39,%xmm2,%xmm2 | ||
3608 | |||
3609 | # qhasm: z15 <<<= 96 | ||
3610 | # asm 1: pshufd $0x39,<z15=int6464#4,<z15=int6464#4 | ||
3611 | # asm 2: pshufd $0x39,<z15=%xmm3,<z15=%xmm3 | ||
3612 | pshufd $0x39,%xmm3,%xmm3 | ||
3613 | |||
3614 | # qhasm: in12 ^= *(uint32 *) (m + 48) | ||
3615 | # asm 1: xorl 48(<m=int32#5),<in12=int32#1 | ||
3616 | # asm 2: xorl 48(<m=%esi),<in12=%eax | ||
3617 | xorl 48(%esi),%eax | ||
3618 | |||
3619 | # qhasm: in13 ^= *(uint32 *) (m + 52) | ||
3620 | # asm 1: xorl 52(<m=int32#5),<in13=int32#2 | ||
3621 | # asm 2: xorl 52(<m=%esi),<in13=%ecx | ||
3622 | xorl 52(%esi),%ecx | ||
3623 | |||
3624 | # qhasm: in14 ^= *(uint32 *) (m + 56) | ||
3625 | # asm 1: xorl 56(<m=int32#5),<in14=int32#3 | ||
3626 | # asm 2: xorl 56(<m=%esi),<in14=%edx | ||
3627 | xorl 56(%esi),%edx | ||
3628 | |||
3629 | # qhasm: in15 ^= *(uint32 *) (m + 60) | ||
3630 | # asm 1: xorl 60(<m=int32#5),<in15=int32#4 | ||
3631 | # asm 2: xorl 60(<m=%esi),<in15=%ebx | ||
3632 | xorl 60(%esi),%ebx | ||
3633 | |||
3634 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
3635 | # asm 1: movl <in12=int32#1,48(<out=int32#6) | ||
3636 | # asm 2: movl <in12=%eax,48(<out=%edi) | ||
3637 | movl %eax,48(%edi) | ||
3638 | |||
3639 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
3640 | # asm 1: movl <in13=int32#2,52(<out=int32#6) | ||
3641 | # asm 2: movl <in13=%ecx,52(<out=%edi) | ||
3642 | movl %ecx,52(%edi) | ||
3643 | |||
3644 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
3645 | # asm 1: movl <in14=int32#3,56(<out=int32#6) | ||
3646 | # asm 2: movl <in14=%edx,56(<out=%edi) | ||
3647 | movl %edx,56(%edi) | ||
3648 | |||
3649 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
3650 | # asm 1: movl <in15=int32#4,60(<out=int32#6) | ||
3651 | # asm 2: movl <in15=%ebx,60(<out=%edi) | ||
3652 | movl %ebx,60(%edi) | ||
3653 | |||
3654 | # qhasm: in12 = z12 | ||
3655 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3656 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3657 | movd %xmm0,%eax | ||
3658 | |||
3659 | # qhasm: in13 = z13 | ||
3660 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3661 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3662 | movd %xmm1,%ecx | ||
3663 | |||
3664 | # qhasm: in14 = z14 | ||
3665 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3666 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3667 | movd %xmm2,%edx | ||
3668 | |||
3669 | # qhasm: in15 = z15 | ||
3670 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3671 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3672 | movd %xmm3,%ebx | ||
3673 | |||
3674 | # qhasm: z12 <<<= 96 | ||
3675 | # asm 1: pshufd $0x39,<z12=int6464#1,<z12=int6464#1 | ||
3676 | # asm 2: pshufd $0x39,<z12=%xmm0,<z12=%xmm0 | ||
3677 | pshufd $0x39,%xmm0,%xmm0 | ||
3678 | |||
3679 | # qhasm: z13 <<<= 96 | ||
3680 | # asm 1: pshufd $0x39,<z13=int6464#2,<z13=int6464#2 | ||
3681 | # asm 2: pshufd $0x39,<z13=%xmm1,<z13=%xmm1 | ||
3682 | pshufd $0x39,%xmm1,%xmm1 | ||
3683 | |||
3684 | # qhasm: z14 <<<= 96 | ||
3685 | # asm 1: pshufd $0x39,<z14=int6464#3,<z14=int6464#3 | ||
3686 | # asm 2: pshufd $0x39,<z14=%xmm2,<z14=%xmm2 | ||
3687 | pshufd $0x39,%xmm2,%xmm2 | ||
3688 | |||
3689 | # qhasm: z15 <<<= 96 | ||
3690 | # asm 1: pshufd $0x39,<z15=int6464#4,<z15=int6464#4 | ||
3691 | # asm 2: pshufd $0x39,<z15=%xmm3,<z15=%xmm3 | ||
3692 | pshufd $0x39,%xmm3,%xmm3 | ||
3693 | |||
3694 | # qhasm: in12 ^= *(uint32 *) (m + 112) | ||
3695 | # asm 1: xorl 112(<m=int32#5),<in12=int32#1 | ||
3696 | # asm 2: xorl 112(<m=%esi),<in12=%eax | ||
3697 | xorl 112(%esi),%eax | ||
3698 | |||
3699 | # qhasm: in13 ^= *(uint32 *) (m + 116) | ||
3700 | # asm 1: xorl 116(<m=int32#5),<in13=int32#2 | ||
3701 | # asm 2: xorl 116(<m=%esi),<in13=%ecx | ||
3702 | xorl 116(%esi),%ecx | ||
3703 | |||
3704 | # qhasm: in14 ^= *(uint32 *) (m + 120) | ||
3705 | # asm 1: xorl 120(<m=int32#5),<in14=int32#3 | ||
3706 | # asm 2: xorl 120(<m=%esi),<in14=%edx | ||
3707 | xorl 120(%esi),%edx | ||
3708 | |||
3709 | # qhasm: in15 ^= *(uint32 *) (m + 124) | ||
3710 | # asm 1: xorl 124(<m=int32#5),<in15=int32#4 | ||
3711 | # asm 2: xorl 124(<m=%esi),<in15=%ebx | ||
3712 | xorl 124(%esi),%ebx | ||
3713 | |||
3714 | # qhasm: *(uint32 *) (out + 112) = in12 | ||
3715 | # asm 1: movl <in12=int32#1,112(<out=int32#6) | ||
3716 | # asm 2: movl <in12=%eax,112(<out=%edi) | ||
3717 | movl %eax,112(%edi) | ||
3718 | |||
3719 | # qhasm: *(uint32 *) (out + 116) = in13 | ||
3720 | # asm 1: movl <in13=int32#2,116(<out=int32#6) | ||
3721 | # asm 2: movl <in13=%ecx,116(<out=%edi) | ||
3722 | movl %ecx,116(%edi) | ||
3723 | |||
3724 | # qhasm: *(uint32 *) (out + 120) = in14 | ||
3725 | # asm 1: movl <in14=int32#3,120(<out=int32#6) | ||
3726 | # asm 2: movl <in14=%edx,120(<out=%edi) | ||
3727 | movl %edx,120(%edi) | ||
3728 | |||
3729 | # qhasm: *(uint32 *) (out + 124) = in15 | ||
3730 | # asm 1: movl <in15=int32#4,124(<out=int32#6) | ||
3731 | # asm 2: movl <in15=%ebx,124(<out=%edi) | ||
3732 | movl %ebx,124(%edi) | ||
3733 | |||
3734 | # qhasm: in12 = z12 | ||
3735 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3736 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3737 | movd %xmm0,%eax | ||
3738 | |||
3739 | # qhasm: in13 = z13 | ||
3740 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3741 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3742 | movd %xmm1,%ecx | ||
3743 | |||
3744 | # qhasm: in14 = z14 | ||
3745 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3746 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3747 | movd %xmm2,%edx | ||
3748 | |||
3749 | # qhasm: in15 = z15 | ||
3750 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3751 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3752 | movd %xmm3,%ebx | ||
3753 | |||
3754 | # qhasm: z12 <<<= 96 | ||
3755 | # asm 1: pshufd $0x39,<z12=int6464#1,<z12=int6464#1 | ||
3756 | # asm 2: pshufd $0x39,<z12=%xmm0,<z12=%xmm0 | ||
3757 | pshufd $0x39,%xmm0,%xmm0 | ||
3758 | |||
3759 | # qhasm: z13 <<<= 96 | ||
3760 | # asm 1: pshufd $0x39,<z13=int6464#2,<z13=int6464#2 | ||
3761 | # asm 2: pshufd $0x39,<z13=%xmm1,<z13=%xmm1 | ||
3762 | pshufd $0x39,%xmm1,%xmm1 | ||
3763 | |||
3764 | # qhasm: z14 <<<= 96 | ||
3765 | # asm 1: pshufd $0x39,<z14=int6464#3,<z14=int6464#3 | ||
3766 | # asm 2: pshufd $0x39,<z14=%xmm2,<z14=%xmm2 | ||
3767 | pshufd $0x39,%xmm2,%xmm2 | ||
3768 | |||
3769 | # qhasm: z15 <<<= 96 | ||
3770 | # asm 1: pshufd $0x39,<z15=int6464#4,<z15=int6464#4 | ||
3771 | # asm 2: pshufd $0x39,<z15=%xmm3,<z15=%xmm3 | ||
3772 | pshufd $0x39,%xmm3,%xmm3 | ||
3773 | |||
3774 | # qhasm: in12 ^= *(uint32 *) (m + 176) | ||
3775 | # asm 1: xorl 176(<m=int32#5),<in12=int32#1 | ||
3776 | # asm 2: xorl 176(<m=%esi),<in12=%eax | ||
3777 | xorl 176(%esi),%eax | ||
3778 | |||
3779 | # qhasm: in13 ^= *(uint32 *) (m + 180) | ||
3780 | # asm 1: xorl 180(<m=int32#5),<in13=int32#2 | ||
3781 | # asm 2: xorl 180(<m=%esi),<in13=%ecx | ||
3782 | xorl 180(%esi),%ecx | ||
3783 | |||
3784 | # qhasm: in14 ^= *(uint32 *) (m + 184) | ||
3785 | # asm 1: xorl 184(<m=int32#5),<in14=int32#3 | ||
3786 | # asm 2: xorl 184(<m=%esi),<in14=%edx | ||
3787 | xorl 184(%esi),%edx | ||
3788 | |||
3789 | # qhasm: in15 ^= *(uint32 *) (m + 188) | ||
3790 | # asm 1: xorl 188(<m=int32#5),<in15=int32#4 | ||
3791 | # asm 2: xorl 188(<m=%esi),<in15=%ebx | ||
3792 | xorl 188(%esi),%ebx | ||
3793 | |||
3794 | # qhasm: *(uint32 *) (out + 176) = in12 | ||
3795 | # asm 1: movl <in12=int32#1,176(<out=int32#6) | ||
3796 | # asm 2: movl <in12=%eax,176(<out=%edi) | ||
3797 | movl %eax,176(%edi) | ||
3798 | |||
3799 | # qhasm: *(uint32 *) (out + 180) = in13 | ||
3800 | # asm 1: movl <in13=int32#2,180(<out=int32#6) | ||
3801 | # asm 2: movl <in13=%ecx,180(<out=%edi) | ||
3802 | movl %ecx,180(%edi) | ||
3803 | |||
3804 | # qhasm: *(uint32 *) (out + 184) = in14 | ||
3805 | # asm 1: movl <in14=int32#3,184(<out=int32#6) | ||
3806 | # asm 2: movl <in14=%edx,184(<out=%edi) | ||
3807 | movl %edx,184(%edi) | ||
3808 | |||
3809 | # qhasm: *(uint32 *) (out + 188) = in15 | ||
3810 | # asm 1: movl <in15=int32#4,188(<out=int32#6) | ||
3811 | # asm 2: movl <in15=%ebx,188(<out=%edi) | ||
3812 | movl %ebx,188(%edi) | ||
3813 | |||
3814 | # qhasm: in12 = z12 | ||
3815 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3816 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3817 | movd %xmm0,%eax | ||
3818 | |||
3819 | # qhasm: in13 = z13 | ||
3820 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3821 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3822 | movd %xmm1,%ecx | ||
3823 | |||
3824 | # qhasm: in14 = z14 | ||
3825 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3826 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3827 | movd %xmm2,%edx | ||
3828 | |||
3829 | # qhasm: in15 = z15 | ||
3830 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3831 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3832 | movd %xmm3,%ebx | ||
3833 | |||
3834 | # qhasm: in12 ^= *(uint32 *) (m + 240) | ||
3835 | # asm 1: xorl 240(<m=int32#5),<in12=int32#1 | ||
3836 | # asm 2: xorl 240(<m=%esi),<in12=%eax | ||
3837 | xorl 240(%esi),%eax | ||
3838 | |||
3839 | # qhasm: in13 ^= *(uint32 *) (m + 244) | ||
3840 | # asm 1: xorl 244(<m=int32#5),<in13=int32#2 | ||
3841 | # asm 2: xorl 244(<m=%esi),<in13=%ecx | ||
3842 | xorl 244(%esi),%ecx | ||
3843 | |||
3844 | # qhasm: in14 ^= *(uint32 *) (m + 248) | ||
3845 | # asm 1: xorl 248(<m=int32#5),<in14=int32#3 | ||
3846 | # asm 2: xorl 248(<m=%esi),<in14=%edx | ||
3847 | xorl 248(%esi),%edx | ||
3848 | |||
3849 | # qhasm: in15 ^= *(uint32 *) (m + 252) | ||
3850 | # asm 1: xorl 252(<m=int32#5),<in15=int32#4 | ||
3851 | # asm 2: xorl 252(<m=%esi),<in15=%ebx | ||
3852 | xorl 252(%esi),%ebx | ||
3853 | |||
3854 | # qhasm: *(uint32 *) (out + 240) = in12 | ||
3855 | # asm 1: movl <in12=int32#1,240(<out=int32#6) | ||
3856 | # asm 2: movl <in12=%eax,240(<out=%edi) | ||
3857 | movl %eax,240(%edi) | ||
3858 | |||
3859 | # qhasm: *(uint32 *) (out + 244) = in13 | ||
3860 | # asm 1: movl <in13=int32#2,244(<out=int32#6) | ||
3861 | # asm 2: movl <in13=%ecx,244(<out=%edi) | ||
3862 | movl %ecx,244(%edi) | ||
3863 | |||
3864 | # qhasm: *(uint32 *) (out + 248) = in14 | ||
3865 | # asm 1: movl <in14=int32#3,248(<out=int32#6) | ||
3866 | # asm 2: movl <in14=%edx,248(<out=%edi) | ||
3867 | movl %edx,248(%edi) | ||
3868 | |||
3869 | # qhasm: *(uint32 *) (out + 252) = in15 | ||
3870 | # asm 1: movl <in15=int32#4,252(<out=int32#6) | ||
3871 | # asm 2: movl <in15=%ebx,252(<out=%edi) | ||
3872 | movl %ebx,252(%edi) | ||
3873 | |||
3874 | # qhasm: bytes = bytes_stack | ||
3875 | # asm 1: movl <bytes_stack=stack32#7,>bytes=int32#1 | ||
3876 | # asm 2: movl <bytes_stack=24(%esp),>bytes=%eax | ||
3877 | movl 24(%esp),%eax | ||
3878 | |||
3879 | # qhasm: bytes -= 256 | ||
3880 | # asm 1: sub $256,<bytes=int32#1 | ||
3881 | # asm 2: sub $256,<bytes=%eax | ||
3882 | sub $256,%eax | ||
3883 | |||
3884 | # qhasm: m += 256 | ||
3885 | # asm 1: add $256,<m=int32#5 | ||
3886 | # asm 2: add $256,<m=%esi | ||
3887 | add $256,%esi | ||
3888 | |||
3889 | # qhasm: out += 256 | ||
3890 | # asm 1: add $256,<out=int32#6 | ||
3891 | # asm 2: add $256,<out=%edi | ||
3892 | add $256,%edi | ||
3893 | |||
3894 | # qhasm: out_stack = out | ||
3895 | # asm 1: movl <out=int32#6,>out_stack=stack32#6 | ||
3896 | # asm 2: movl <out=%edi,>out_stack=20(%esp) | ||
3897 | movl %edi,20(%esp) | ||
3898 | |||
3899 | # qhasm: unsigned<? bytes - 256 | ||
3900 | # asm 1: cmp $256,<bytes=int32#1 | ||
3901 | # asm 2: cmp $256,<bytes=%eax | ||
3902 | cmp $256,%eax | ||
3903 | # comment:fp stack unchanged by jump | ||
3904 | |||
3905 | # qhasm: goto bytesatleast256 if !unsigned< | ||
3906 | jae ._bytesatleast256 | ||
3907 | |||
3908 | # qhasm: unsigned>? bytes - 0 | ||
3909 | # asm 1: cmp $0,<bytes=int32#1 | ||
3910 | # asm 2: cmp $0,<bytes=%eax | ||
3911 | cmp $0,%eax | ||
3912 | # comment:fp stack unchanged by jump | ||
3913 | |||
3914 | # qhasm: goto done if !unsigned> | ||
3915 | jbe ._done | ||
3916 | # comment:fp stack unchanged by fallthrough | ||
3917 | |||
3918 | # qhasm: bytesbetween1and255: | ||
3919 | ._bytesbetween1and255: | ||
3920 | |||
3921 | # qhasm: unsigned<? bytes - 64 | ||
3922 | # asm 1: cmp $64,<bytes=int32#1 | ||
3923 | # asm 2: cmp $64,<bytes=%eax | ||
3924 | cmp $64,%eax | ||
3925 | # comment:fp stack unchanged by jump | ||
3926 | |||
3927 | # qhasm: goto nocopy if !unsigned< | ||
3928 | jae ._nocopy | ||
3929 | |||
3930 | # qhasm: ctarget = out | ||
3931 | # asm 1: movl <out=int32#6,>ctarget=stack32#6 | ||
3932 | # asm 2: movl <out=%edi,>ctarget=20(%esp) | ||
3933 | movl %edi,20(%esp) | ||
3934 | |||
3935 | # qhasm: out = &tmp | ||
3936 | # asm 1: leal <tmp=stack512#1,>out=int32#6 | ||
3937 | # asm 2: leal <tmp=640(%esp),>out=%edi | ||
3938 | leal 640(%esp),%edi | ||
3939 | |||
3940 | # qhasm: i = bytes | ||
3941 | # asm 1: mov <bytes=int32#1,>i=int32#2 | ||
3942 | # asm 2: mov <bytes=%eax,>i=%ecx | ||
3943 | mov %eax,%ecx | ||
3944 | |||
3945 | # qhasm: while (i) { *out++ = *m++; --i } | ||
3946 | rep movsb | ||
3947 | |||
3948 | # qhasm: out = &tmp | ||
3949 | # asm 1: leal <tmp=stack512#1,>out=int32#6 | ||
3950 | # asm 2: leal <tmp=640(%esp),>out=%edi | ||
3951 | leal 640(%esp),%edi | ||
3952 | |||
3953 | # qhasm: m = &tmp | ||
3954 | # asm 1: leal <tmp=stack512#1,>m=int32#5 | ||
3955 | # asm 2: leal <tmp=640(%esp),>m=%esi | ||
3956 | leal 640(%esp),%esi | ||
3957 | # comment:fp stack unchanged by fallthrough | ||
3958 | |||
3959 | # qhasm: nocopy: | ||
3960 | ._nocopy: | ||
3961 | |||
3962 | # qhasm: bytes_stack = bytes | ||
3963 | # asm 1: movl <bytes=int32#1,>bytes_stack=stack32#7 | ||
3964 | # asm 2: movl <bytes=%eax,>bytes_stack=24(%esp) | ||
3965 | movl %eax,24(%esp) | ||
3966 | |||
3967 | # qhasm: diag0 = x0 | ||
3968 | # asm 1: movdqa <x0=stack128#3,>diag0=int6464#1 | ||
3969 | # asm 2: movdqa <x0=64(%esp),>diag0=%xmm0 | ||
3970 | movdqa 64(%esp),%xmm0 | ||
3971 | |||
3972 | # qhasm: diag1 = x1 | ||
3973 | # asm 1: movdqa <x1=stack128#2,>diag1=int6464#2 | ||
3974 | # asm 2: movdqa <x1=48(%esp),>diag1=%xmm1 | ||
3975 | movdqa 48(%esp),%xmm1 | ||
3976 | |||
3977 | # qhasm: diag2 = x2 | ||
3978 | # asm 1: movdqa <x2=stack128#4,>diag2=int6464#3 | ||
3979 | # asm 2: movdqa <x2=80(%esp),>diag2=%xmm2 | ||
3980 | movdqa 80(%esp),%xmm2 | ||
3981 | |||
3982 | # qhasm: diag3 = x3 | ||
3983 | # asm 1: movdqa <x3=stack128#1,>diag3=int6464#4 | ||
3984 | # asm 2: movdqa <x3=32(%esp),>diag3=%xmm3 | ||
3985 | movdqa 32(%esp),%xmm3 | ||
3986 | |||
3987 | # qhasm: a0 = diag1 | ||
3988 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
3989 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
3990 | movdqa %xmm1,%xmm4 | ||
3991 | |||
3992 | # qhasm: i = 12 | ||
3993 | # asm 1: mov $12,>i=int32#1 | ||
3994 | # asm 2: mov $12,>i=%eax | ||
3995 | mov $12,%eax | ||
3996 | |||
3997 | # qhasm: mainloop2: | ||
3998 | ._mainloop2: | ||
3999 | |||
4000 | # qhasm: uint32323232 a0 += diag0 | ||
4001 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
4002 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
4003 | paddd %xmm0,%xmm4 | ||
4004 | |||
4005 | # qhasm: a1 = diag0 | ||
4006 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
4007 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
4008 | movdqa %xmm0,%xmm5 | ||
4009 | |||
4010 | # qhasm: b0 = a0 | ||
4011 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
4012 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
4013 | movdqa %xmm4,%xmm6 | ||
4014 | |||
4015 | # qhasm: uint32323232 a0 <<= 7 | ||
4016 | # asm 1: pslld $7,<a0=int6464#5 | ||
4017 | # asm 2: pslld $7,<a0=%xmm4 | ||
4018 | pslld $7,%xmm4 | ||
4019 | |||
4020 | # qhasm: uint32323232 b0 >>= 25 | ||
4021 | # asm 1: psrld $25,<b0=int6464#7 | ||
4022 | # asm 2: psrld $25,<b0=%xmm6 | ||
4023 | psrld $25,%xmm6 | ||
4024 | |||
4025 | # qhasm: diag3 ^= a0 | ||
4026 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
4027 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
4028 | pxor %xmm4,%xmm3 | ||
4029 | |||
4030 | # qhasm: diag3 ^= b0 | ||
4031 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
4032 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
4033 | pxor %xmm6,%xmm3 | ||
4034 | |||
4035 | # qhasm: uint32323232 a1 += diag3 | ||
4036 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
4037 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
4038 | paddd %xmm3,%xmm5 | ||
4039 | |||
4040 | # qhasm: a2 = diag3 | ||
4041 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
4042 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
4043 | movdqa %xmm3,%xmm4 | ||
4044 | |||
4045 | # qhasm: b1 = a1 | ||
4046 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
4047 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
4048 | movdqa %xmm5,%xmm6 | ||
4049 | |||
4050 | # qhasm: uint32323232 a1 <<= 9 | ||
4051 | # asm 1: pslld $9,<a1=int6464#6 | ||
4052 | # asm 2: pslld $9,<a1=%xmm5 | ||
4053 | pslld $9,%xmm5 | ||
4054 | |||
4055 | # qhasm: uint32323232 b1 >>= 23 | ||
4056 | # asm 1: psrld $23,<b1=int6464#7 | ||
4057 | # asm 2: psrld $23,<b1=%xmm6 | ||
4058 | psrld $23,%xmm6 | ||
4059 | |||
4060 | # qhasm: diag2 ^= a1 | ||
4061 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
4062 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
4063 | pxor %xmm5,%xmm2 | ||
4064 | |||
4065 | # qhasm: diag3 <<<= 32 | ||
4066 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
4067 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
4068 | pshufd $0x93,%xmm3,%xmm3 | ||
4069 | |||
4070 | # qhasm: diag2 ^= b1 | ||
4071 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
4072 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
4073 | pxor %xmm6,%xmm2 | ||
4074 | |||
4075 | # qhasm: uint32323232 a2 += diag2 | ||
4076 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
4077 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
4078 | paddd %xmm2,%xmm4 | ||
4079 | |||
4080 | # qhasm: a3 = diag2 | ||
4081 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
4082 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
4083 | movdqa %xmm2,%xmm5 | ||
4084 | |||
4085 | # qhasm: b2 = a2 | ||
4086 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
4087 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
4088 | movdqa %xmm4,%xmm6 | ||
4089 | |||
4090 | # qhasm: uint32323232 a2 <<= 13 | ||
4091 | # asm 1: pslld $13,<a2=int6464#5 | ||
4092 | # asm 2: pslld $13,<a2=%xmm4 | ||
4093 | pslld $13,%xmm4 | ||
4094 | |||
4095 | # qhasm: uint32323232 b2 >>= 19 | ||
4096 | # asm 1: psrld $19,<b2=int6464#7 | ||
4097 | # asm 2: psrld $19,<b2=%xmm6 | ||
4098 | psrld $19,%xmm6 | ||
4099 | |||
4100 | # qhasm: diag1 ^= a2 | ||
4101 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
4102 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
4103 | pxor %xmm4,%xmm1 | ||
4104 | |||
4105 | # qhasm: diag2 <<<= 64 | ||
4106 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4107 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4108 | pshufd $0x4e,%xmm2,%xmm2 | ||
4109 | |||
4110 | # qhasm: diag1 ^= b2 | ||
4111 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
4112 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
4113 | pxor %xmm6,%xmm1 | ||
4114 | |||
4115 | # qhasm: uint32323232 a3 += diag1 | ||
4116 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
4117 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
4118 | paddd %xmm1,%xmm5 | ||
4119 | |||
4120 | # qhasm: a4 = diag3 | ||
4121 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
4122 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
4123 | movdqa %xmm3,%xmm4 | ||
4124 | |||
4125 | # qhasm: b3 = a3 | ||
4126 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
4127 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
4128 | movdqa %xmm5,%xmm6 | ||
4129 | |||
4130 | # qhasm: uint32323232 a3 <<= 18 | ||
4131 | # asm 1: pslld $18,<a3=int6464#6 | ||
4132 | # asm 2: pslld $18,<a3=%xmm5 | ||
4133 | pslld $18,%xmm5 | ||
4134 | |||
4135 | # qhasm: uint32323232 b3 >>= 14 | ||
4136 | # asm 1: psrld $14,<b3=int6464#7 | ||
4137 | # asm 2: psrld $14,<b3=%xmm6 | ||
4138 | psrld $14,%xmm6 | ||
4139 | |||
4140 | # qhasm: diag0 ^= a3 | ||
4141 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
4142 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
4143 | pxor %xmm5,%xmm0 | ||
4144 | |||
4145 | # qhasm: diag1 <<<= 96 | ||
4146 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4147 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4148 | pshufd $0x39,%xmm1,%xmm1 | ||
4149 | |||
4150 | # qhasm: diag0 ^= b3 | ||
4151 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
4152 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
4153 | pxor %xmm6,%xmm0 | ||
4154 | |||
4155 | # qhasm: uint32323232 a4 += diag0 | ||
4156 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
4157 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
4158 | paddd %xmm0,%xmm4 | ||
4159 | |||
4160 | # qhasm: a5 = diag0 | ||
4161 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
4162 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
4163 | movdqa %xmm0,%xmm5 | ||
4164 | |||
4165 | # qhasm: b4 = a4 | ||
4166 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
4167 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
4168 | movdqa %xmm4,%xmm6 | ||
4169 | |||
4170 | # qhasm: uint32323232 a4 <<= 7 | ||
4171 | # asm 1: pslld $7,<a4=int6464#5 | ||
4172 | # asm 2: pslld $7,<a4=%xmm4 | ||
4173 | pslld $7,%xmm4 | ||
4174 | |||
4175 | # qhasm: uint32323232 b4 >>= 25 | ||
4176 | # asm 1: psrld $25,<b4=int6464#7 | ||
4177 | # asm 2: psrld $25,<b4=%xmm6 | ||
4178 | psrld $25,%xmm6 | ||
4179 | |||
4180 | # qhasm: diag1 ^= a4 | ||
4181 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
4182 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
4183 | pxor %xmm4,%xmm1 | ||
4184 | |||
4185 | # qhasm: diag1 ^= b4 | ||
4186 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
4187 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
4188 | pxor %xmm6,%xmm1 | ||
4189 | |||
4190 | # qhasm: uint32323232 a5 += diag1 | ||
4191 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
4192 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
4193 | paddd %xmm1,%xmm5 | ||
4194 | |||
4195 | # qhasm: a6 = diag1 | ||
4196 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
4197 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
4198 | movdqa %xmm1,%xmm4 | ||
4199 | |||
4200 | # qhasm: b5 = a5 | ||
4201 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
4202 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
4203 | movdqa %xmm5,%xmm6 | ||
4204 | |||
4205 | # qhasm: uint32323232 a5 <<= 9 | ||
4206 | # asm 1: pslld $9,<a5=int6464#6 | ||
4207 | # asm 2: pslld $9,<a5=%xmm5 | ||
4208 | pslld $9,%xmm5 | ||
4209 | |||
4210 | # qhasm: uint32323232 b5 >>= 23 | ||
4211 | # asm 1: psrld $23,<b5=int6464#7 | ||
4212 | # asm 2: psrld $23,<b5=%xmm6 | ||
4213 | psrld $23,%xmm6 | ||
4214 | |||
4215 | # qhasm: diag2 ^= a5 | ||
4216 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
4217 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
4218 | pxor %xmm5,%xmm2 | ||
4219 | |||
4220 | # qhasm: diag1 <<<= 32 | ||
4221 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
4222 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
4223 | pshufd $0x93,%xmm1,%xmm1 | ||
4224 | |||
4225 | # qhasm: diag2 ^= b5 | ||
4226 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
4227 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
4228 | pxor %xmm6,%xmm2 | ||
4229 | |||
4230 | # qhasm: uint32323232 a6 += diag2 | ||
4231 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
4232 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
4233 | paddd %xmm2,%xmm4 | ||
4234 | |||
4235 | # qhasm: a7 = diag2 | ||
4236 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
4237 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
4238 | movdqa %xmm2,%xmm5 | ||
4239 | |||
4240 | # qhasm: b6 = a6 | ||
4241 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
4242 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
4243 | movdqa %xmm4,%xmm6 | ||
4244 | |||
4245 | # qhasm: uint32323232 a6 <<= 13 | ||
4246 | # asm 1: pslld $13,<a6=int6464#5 | ||
4247 | # asm 2: pslld $13,<a6=%xmm4 | ||
4248 | pslld $13,%xmm4 | ||
4249 | |||
4250 | # qhasm: uint32323232 b6 >>= 19 | ||
4251 | # asm 1: psrld $19,<b6=int6464#7 | ||
4252 | # asm 2: psrld $19,<b6=%xmm6 | ||
4253 | psrld $19,%xmm6 | ||
4254 | |||
4255 | # qhasm: diag3 ^= a6 | ||
4256 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
4257 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
4258 | pxor %xmm4,%xmm3 | ||
4259 | |||
4260 | # qhasm: diag2 <<<= 64 | ||
4261 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4262 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4263 | pshufd $0x4e,%xmm2,%xmm2 | ||
4264 | |||
4265 | # qhasm: diag3 ^= b6 | ||
4266 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
4267 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
4268 | pxor %xmm6,%xmm3 | ||
4269 | |||
4270 | # qhasm: uint32323232 a7 += diag3 | ||
4271 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
4272 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
4273 | paddd %xmm3,%xmm5 | ||
4274 | |||
4275 | # qhasm: a0 = diag1 | ||
4276 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
4277 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
4278 | movdqa %xmm1,%xmm4 | ||
4279 | |||
4280 | # qhasm: b7 = a7 | ||
4281 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4282 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4283 | movdqa %xmm5,%xmm6 | ||
4284 | |||
4285 | # qhasm: uint32323232 a7 <<= 18 | ||
4286 | # asm 1: pslld $18,<a7=int6464#6 | ||
4287 | # asm 2: pslld $18,<a7=%xmm5 | ||
4288 | pslld $18,%xmm5 | ||
4289 | |||
4290 | # qhasm: uint32323232 b7 >>= 14 | ||
4291 | # asm 1: psrld $14,<b7=int6464#7 | ||
4292 | # asm 2: psrld $14,<b7=%xmm6 | ||
4293 | psrld $14,%xmm6 | ||
4294 | |||
4295 | # qhasm: diag0 ^= a7 | ||
4296 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4297 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4298 | pxor %xmm5,%xmm0 | ||
4299 | |||
4300 | # qhasm: diag3 <<<= 96 | ||
4301 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4302 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4303 | pshufd $0x39,%xmm3,%xmm3 | ||
4304 | |||
4305 | # qhasm: diag0 ^= b7 | ||
4306 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4307 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4308 | pxor %xmm6,%xmm0 | ||
4309 | |||
4310 | # qhasm: uint32323232 a0 += diag0 | ||
4311 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
4312 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
4313 | paddd %xmm0,%xmm4 | ||
4314 | |||
4315 | # qhasm: a1 = diag0 | ||
4316 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
4317 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
4318 | movdqa %xmm0,%xmm5 | ||
4319 | |||
4320 | # qhasm: b0 = a0 | ||
4321 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
4322 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
4323 | movdqa %xmm4,%xmm6 | ||
4324 | |||
4325 | # qhasm: uint32323232 a0 <<= 7 | ||
4326 | # asm 1: pslld $7,<a0=int6464#5 | ||
4327 | # asm 2: pslld $7,<a0=%xmm4 | ||
4328 | pslld $7,%xmm4 | ||
4329 | |||
4330 | # qhasm: uint32323232 b0 >>= 25 | ||
4331 | # asm 1: psrld $25,<b0=int6464#7 | ||
4332 | # asm 2: psrld $25,<b0=%xmm6 | ||
4333 | psrld $25,%xmm6 | ||
4334 | |||
4335 | # qhasm: diag3 ^= a0 | ||
4336 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
4337 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
4338 | pxor %xmm4,%xmm3 | ||
4339 | |||
4340 | # qhasm: diag3 ^= b0 | ||
4341 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
4342 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
4343 | pxor %xmm6,%xmm3 | ||
4344 | |||
4345 | # qhasm: uint32323232 a1 += diag3 | ||
4346 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
4347 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
4348 | paddd %xmm3,%xmm5 | ||
4349 | |||
4350 | # qhasm: a2 = diag3 | ||
4351 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
4352 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
4353 | movdqa %xmm3,%xmm4 | ||
4354 | |||
4355 | # qhasm: b1 = a1 | ||
4356 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
4357 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
4358 | movdqa %xmm5,%xmm6 | ||
4359 | |||
4360 | # qhasm: uint32323232 a1 <<= 9 | ||
4361 | # asm 1: pslld $9,<a1=int6464#6 | ||
4362 | # asm 2: pslld $9,<a1=%xmm5 | ||
4363 | pslld $9,%xmm5 | ||
4364 | |||
4365 | # qhasm: uint32323232 b1 >>= 23 | ||
4366 | # asm 1: psrld $23,<b1=int6464#7 | ||
4367 | # asm 2: psrld $23,<b1=%xmm6 | ||
4368 | psrld $23,%xmm6 | ||
4369 | |||
4370 | # qhasm: diag2 ^= a1 | ||
4371 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
4372 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
4373 | pxor %xmm5,%xmm2 | ||
4374 | |||
4375 | # qhasm: diag3 <<<= 32 | ||
4376 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
4377 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
4378 | pshufd $0x93,%xmm3,%xmm3 | ||
4379 | |||
4380 | # qhasm: diag2 ^= b1 | ||
4381 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
4382 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
4383 | pxor %xmm6,%xmm2 | ||
4384 | |||
4385 | # qhasm: uint32323232 a2 += diag2 | ||
4386 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
4387 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
4388 | paddd %xmm2,%xmm4 | ||
4389 | |||
4390 | # qhasm: a3 = diag2 | ||
4391 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
4392 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
4393 | movdqa %xmm2,%xmm5 | ||
4394 | |||
4395 | # qhasm: b2 = a2 | ||
4396 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
4397 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
4398 | movdqa %xmm4,%xmm6 | ||
4399 | |||
4400 | # qhasm: uint32323232 a2 <<= 13 | ||
4401 | # asm 1: pslld $13,<a2=int6464#5 | ||
4402 | # asm 2: pslld $13,<a2=%xmm4 | ||
4403 | pslld $13,%xmm4 | ||
4404 | |||
4405 | # qhasm: uint32323232 b2 >>= 19 | ||
4406 | # asm 1: psrld $19,<b2=int6464#7 | ||
4407 | # asm 2: psrld $19,<b2=%xmm6 | ||
4408 | psrld $19,%xmm6 | ||
4409 | |||
4410 | # qhasm: diag1 ^= a2 | ||
4411 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
4412 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
4413 | pxor %xmm4,%xmm1 | ||
4414 | |||
4415 | # qhasm: diag2 <<<= 64 | ||
4416 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4417 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4418 | pshufd $0x4e,%xmm2,%xmm2 | ||
4419 | |||
4420 | # qhasm: diag1 ^= b2 | ||
4421 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
4422 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
4423 | pxor %xmm6,%xmm1 | ||
4424 | |||
4425 | # qhasm: uint32323232 a3 += diag1 | ||
4426 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
4427 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
4428 | paddd %xmm1,%xmm5 | ||
4429 | |||
4430 | # qhasm: a4 = diag3 | ||
4431 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
4432 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
4433 | movdqa %xmm3,%xmm4 | ||
4434 | |||
4435 | # qhasm: b3 = a3 | ||
4436 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
4437 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
4438 | movdqa %xmm5,%xmm6 | ||
4439 | |||
4440 | # qhasm: uint32323232 a3 <<= 18 | ||
4441 | # asm 1: pslld $18,<a3=int6464#6 | ||
4442 | # asm 2: pslld $18,<a3=%xmm5 | ||
4443 | pslld $18,%xmm5 | ||
4444 | |||
4445 | # qhasm: uint32323232 b3 >>= 14 | ||
4446 | # asm 1: psrld $14,<b3=int6464#7 | ||
4447 | # asm 2: psrld $14,<b3=%xmm6 | ||
4448 | psrld $14,%xmm6 | ||
4449 | |||
4450 | # qhasm: diag0 ^= a3 | ||
4451 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
4452 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
4453 | pxor %xmm5,%xmm0 | ||
4454 | |||
4455 | # qhasm: diag1 <<<= 96 | ||
4456 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4457 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4458 | pshufd $0x39,%xmm1,%xmm1 | ||
4459 | |||
4460 | # qhasm: diag0 ^= b3 | ||
4461 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
4462 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
4463 | pxor %xmm6,%xmm0 | ||
4464 | |||
4465 | # qhasm: uint32323232 a4 += diag0 | ||
4466 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
4467 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
4468 | paddd %xmm0,%xmm4 | ||
4469 | |||
4470 | # qhasm: a5 = diag0 | ||
4471 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
4472 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
4473 | movdqa %xmm0,%xmm5 | ||
4474 | |||
4475 | # qhasm: b4 = a4 | ||
4476 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
4477 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
4478 | movdqa %xmm4,%xmm6 | ||
4479 | |||
4480 | # qhasm: uint32323232 a4 <<= 7 | ||
4481 | # asm 1: pslld $7,<a4=int6464#5 | ||
4482 | # asm 2: pslld $7,<a4=%xmm4 | ||
4483 | pslld $7,%xmm4 | ||
4484 | |||
4485 | # qhasm: uint32323232 b4 >>= 25 | ||
4486 | # asm 1: psrld $25,<b4=int6464#7 | ||
4487 | # asm 2: psrld $25,<b4=%xmm6 | ||
4488 | psrld $25,%xmm6 | ||
4489 | |||
4490 | # qhasm: diag1 ^= a4 | ||
4491 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
4492 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
4493 | pxor %xmm4,%xmm1 | ||
4494 | |||
4495 | # qhasm: diag1 ^= b4 | ||
4496 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
4497 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
4498 | pxor %xmm6,%xmm1 | ||
4499 | |||
4500 | # qhasm: uint32323232 a5 += diag1 | ||
4501 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
4502 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
4503 | paddd %xmm1,%xmm5 | ||
4504 | |||
4505 | # qhasm: a6 = diag1 | ||
4506 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
4507 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
4508 | movdqa %xmm1,%xmm4 | ||
4509 | |||
4510 | # qhasm: b5 = a5 | ||
4511 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
4512 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
4513 | movdqa %xmm5,%xmm6 | ||
4514 | |||
4515 | # qhasm: uint32323232 a5 <<= 9 | ||
4516 | # asm 1: pslld $9,<a5=int6464#6 | ||
4517 | # asm 2: pslld $9,<a5=%xmm5 | ||
4518 | pslld $9,%xmm5 | ||
4519 | |||
4520 | # qhasm: uint32323232 b5 >>= 23 | ||
4521 | # asm 1: psrld $23,<b5=int6464#7 | ||
4522 | # asm 2: psrld $23,<b5=%xmm6 | ||
4523 | psrld $23,%xmm6 | ||
4524 | |||
4525 | # qhasm: diag2 ^= a5 | ||
4526 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
4527 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
4528 | pxor %xmm5,%xmm2 | ||
4529 | |||
4530 | # qhasm: diag1 <<<= 32 | ||
4531 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
4532 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
4533 | pshufd $0x93,%xmm1,%xmm1 | ||
4534 | |||
4535 | # qhasm: diag2 ^= b5 | ||
4536 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
4537 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
4538 | pxor %xmm6,%xmm2 | ||
4539 | |||
4540 | # qhasm: uint32323232 a6 += diag2 | ||
4541 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
4542 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
4543 | paddd %xmm2,%xmm4 | ||
4544 | |||
4545 | # qhasm: a7 = diag2 | ||
4546 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
4547 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
4548 | movdqa %xmm2,%xmm5 | ||
4549 | |||
4550 | # qhasm: b6 = a6 | ||
4551 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
4552 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
4553 | movdqa %xmm4,%xmm6 | ||
4554 | |||
4555 | # qhasm: uint32323232 a6 <<= 13 | ||
4556 | # asm 1: pslld $13,<a6=int6464#5 | ||
4557 | # asm 2: pslld $13,<a6=%xmm4 | ||
4558 | pslld $13,%xmm4 | ||
4559 | |||
4560 | # qhasm: uint32323232 b6 >>= 19 | ||
4561 | # asm 1: psrld $19,<b6=int6464#7 | ||
4562 | # asm 2: psrld $19,<b6=%xmm6 | ||
4563 | psrld $19,%xmm6 | ||
4564 | |||
4565 | # qhasm: diag3 ^= a6 | ||
4566 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
4567 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
4568 | pxor %xmm4,%xmm3 | ||
4569 | |||
4570 | # qhasm: diag2 <<<= 64 | ||
4571 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4572 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4573 | pshufd $0x4e,%xmm2,%xmm2 | ||
4574 | |||
4575 | # qhasm: diag3 ^= b6 | ||
4576 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
4577 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
4578 | pxor %xmm6,%xmm3 | ||
4579 | |||
4580 | # qhasm: unsigned>? i -= 4 | ||
4581 | # asm 1: sub $4,<i=int32#1 | ||
4582 | # asm 2: sub $4,<i=%eax | ||
4583 | sub $4,%eax | ||
4584 | |||
4585 | # qhasm: uint32323232 a7 += diag3 | ||
4586 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
4587 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
4588 | paddd %xmm3,%xmm5 | ||
4589 | |||
4590 | # qhasm: a0 = diag1 | ||
4591 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
4592 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
4593 | movdqa %xmm1,%xmm4 | ||
4594 | |||
4595 | # qhasm: b7 = a7 | ||
4596 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4597 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4598 | movdqa %xmm5,%xmm6 | ||
4599 | |||
4600 | # qhasm: uint32323232 a7 <<= 18 | ||
4601 | # asm 1: pslld $18,<a7=int6464#6 | ||
4602 | # asm 2: pslld $18,<a7=%xmm5 | ||
4603 | pslld $18,%xmm5 | ||
4604 | |||
4605 | # qhasm: b0 = 0 | ||
4606 | # asm 1: pxor >b0=int6464#8,>b0=int6464#8 | ||
4607 | # asm 2: pxor >b0=%xmm7,>b0=%xmm7 | ||
4608 | pxor %xmm7,%xmm7 | ||
4609 | |||
4610 | # qhasm: uint32323232 b7 >>= 14 | ||
4611 | # asm 1: psrld $14,<b7=int6464#7 | ||
4612 | # asm 2: psrld $14,<b7=%xmm6 | ||
4613 | psrld $14,%xmm6 | ||
4614 | |||
4615 | # qhasm: diag0 ^= a7 | ||
4616 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4617 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4618 | pxor %xmm5,%xmm0 | ||
4619 | |||
4620 | # qhasm: diag3 <<<= 96 | ||
4621 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4622 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4623 | pshufd $0x39,%xmm3,%xmm3 | ||
4624 | |||
4625 | # qhasm: diag0 ^= b7 | ||
4626 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4627 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4628 | pxor %xmm6,%xmm0 | ||
4629 | # comment:fp stack unchanged by jump | ||
4630 | |||
4631 | # qhasm: goto mainloop2 if unsigned> | ||
4632 | ja ._mainloop2 | ||
4633 | |||
4634 | # qhasm: uint32323232 diag0 += x0 | ||
4635 | # asm 1: paddd <x0=stack128#3,<diag0=int6464#1 | ||
4636 | # asm 2: paddd <x0=64(%esp),<diag0=%xmm0 | ||
4637 | paddd 64(%esp),%xmm0 | ||
4638 | |||
4639 | # qhasm: uint32323232 diag1 += x1 | ||
4640 | # asm 1: paddd <x1=stack128#2,<diag1=int6464#2 | ||
4641 | # asm 2: paddd <x1=48(%esp),<diag1=%xmm1 | ||
4642 | paddd 48(%esp),%xmm1 | ||
4643 | |||
4644 | # qhasm: uint32323232 diag2 += x2 | ||
4645 | # asm 1: paddd <x2=stack128#4,<diag2=int6464#3 | ||
4646 | # asm 2: paddd <x2=80(%esp),<diag2=%xmm2 | ||
4647 | paddd 80(%esp),%xmm2 | ||
4648 | |||
4649 | # qhasm: uint32323232 diag3 += x3 | ||
4650 | # asm 1: paddd <x3=stack128#1,<diag3=int6464#4 | ||
4651 | # asm 2: paddd <x3=32(%esp),<diag3=%xmm3 | ||
4652 | paddd 32(%esp),%xmm3 | ||
4653 | |||
4654 | # qhasm: in0 = diag0 | ||
4655 | # asm 1: movd <diag0=int6464#1,>in0=int32#1 | ||
4656 | # asm 2: movd <diag0=%xmm0,>in0=%eax | ||
4657 | movd %xmm0,%eax | ||
4658 | |||
4659 | # qhasm: in12 = diag1 | ||
4660 | # asm 1: movd <diag1=int6464#2,>in12=int32#2 | ||
4661 | # asm 2: movd <diag1=%xmm1,>in12=%ecx | ||
4662 | movd %xmm1,%ecx | ||
4663 | |||
4664 | # qhasm: in8 = diag2 | ||
4665 | # asm 1: movd <diag2=int6464#3,>in8=int32#3 | ||
4666 | # asm 2: movd <diag2=%xmm2,>in8=%edx | ||
4667 | movd %xmm2,%edx | ||
4668 | |||
4669 | # qhasm: in4 = diag3 | ||
4670 | # asm 1: movd <diag3=int6464#4,>in4=int32#4 | ||
4671 | # asm 2: movd <diag3=%xmm3,>in4=%ebx | ||
4672 | movd %xmm3,%ebx | ||
4673 | |||
4674 | # qhasm: diag0 <<<= 96 | ||
4675 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4676 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4677 | pshufd $0x39,%xmm0,%xmm0 | ||
4678 | |||
4679 | # qhasm: diag1 <<<= 96 | ||
4680 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4681 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4682 | pshufd $0x39,%xmm1,%xmm1 | ||
4683 | |||
4684 | # qhasm: diag2 <<<= 96 | ||
4685 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4686 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4687 | pshufd $0x39,%xmm2,%xmm2 | ||
4688 | |||
4689 | # qhasm: diag3 <<<= 96 | ||
4690 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4691 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4692 | pshufd $0x39,%xmm3,%xmm3 | ||
4693 | |||
4694 | # qhasm: in0 ^= *(uint32 *) (m + 0) | ||
4695 | # asm 1: xorl 0(<m=int32#5),<in0=int32#1 | ||
4696 | # asm 2: xorl 0(<m=%esi),<in0=%eax | ||
4697 | xorl 0(%esi),%eax | ||
4698 | |||
4699 | # qhasm: in12 ^= *(uint32 *) (m + 48) | ||
4700 | # asm 1: xorl 48(<m=int32#5),<in12=int32#2 | ||
4701 | # asm 2: xorl 48(<m=%esi),<in12=%ecx | ||
4702 | xorl 48(%esi),%ecx | ||
4703 | |||
4704 | # qhasm: in8 ^= *(uint32 *) (m + 32) | ||
4705 | # asm 1: xorl 32(<m=int32#5),<in8=int32#3 | ||
4706 | # asm 2: xorl 32(<m=%esi),<in8=%edx | ||
4707 | xorl 32(%esi),%edx | ||
4708 | |||
4709 | # qhasm: in4 ^= *(uint32 *) (m + 16) | ||
4710 | # asm 1: xorl 16(<m=int32#5),<in4=int32#4 | ||
4711 | # asm 2: xorl 16(<m=%esi),<in4=%ebx | ||
4712 | xorl 16(%esi),%ebx | ||
4713 | |||
4714 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
4715 | # asm 1: movl <in0=int32#1,0(<out=int32#6) | ||
4716 | # asm 2: movl <in0=%eax,0(<out=%edi) | ||
4717 | movl %eax,0(%edi) | ||
4718 | |||
4719 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
4720 | # asm 1: movl <in12=int32#2,48(<out=int32#6) | ||
4721 | # asm 2: movl <in12=%ecx,48(<out=%edi) | ||
4722 | movl %ecx,48(%edi) | ||
4723 | |||
4724 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
4725 | # asm 1: movl <in8=int32#3,32(<out=int32#6) | ||
4726 | # asm 2: movl <in8=%edx,32(<out=%edi) | ||
4727 | movl %edx,32(%edi) | ||
4728 | |||
4729 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
4730 | # asm 1: movl <in4=int32#4,16(<out=int32#6) | ||
4731 | # asm 2: movl <in4=%ebx,16(<out=%edi) | ||
4732 | movl %ebx,16(%edi) | ||
4733 | |||
4734 | # qhasm: in5 = diag0 | ||
4735 | # asm 1: movd <diag0=int6464#1,>in5=int32#1 | ||
4736 | # asm 2: movd <diag0=%xmm0,>in5=%eax | ||
4737 | movd %xmm0,%eax | ||
4738 | |||
4739 | # qhasm: in1 = diag1 | ||
4740 | # asm 1: movd <diag1=int6464#2,>in1=int32#2 | ||
4741 | # asm 2: movd <diag1=%xmm1,>in1=%ecx | ||
4742 | movd %xmm1,%ecx | ||
4743 | |||
4744 | # qhasm: in13 = diag2 | ||
4745 | # asm 1: movd <diag2=int6464#3,>in13=int32#3 | ||
4746 | # asm 2: movd <diag2=%xmm2,>in13=%edx | ||
4747 | movd %xmm2,%edx | ||
4748 | |||
4749 | # qhasm: in9 = diag3 | ||
4750 | # asm 1: movd <diag3=int6464#4,>in9=int32#4 | ||
4751 | # asm 2: movd <diag3=%xmm3,>in9=%ebx | ||
4752 | movd %xmm3,%ebx | ||
4753 | |||
4754 | # qhasm: diag0 <<<= 96 | ||
4755 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4756 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4757 | pshufd $0x39,%xmm0,%xmm0 | ||
4758 | |||
4759 | # qhasm: diag1 <<<= 96 | ||
4760 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4761 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4762 | pshufd $0x39,%xmm1,%xmm1 | ||
4763 | |||
4764 | # qhasm: diag2 <<<= 96 | ||
4765 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4766 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4767 | pshufd $0x39,%xmm2,%xmm2 | ||
4768 | |||
4769 | # qhasm: diag3 <<<= 96 | ||
4770 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4771 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4772 | pshufd $0x39,%xmm3,%xmm3 | ||
4773 | |||
4774 | # qhasm: in5 ^= *(uint32 *) (m + 20) | ||
4775 | # asm 1: xorl 20(<m=int32#5),<in5=int32#1 | ||
4776 | # asm 2: xorl 20(<m=%esi),<in5=%eax | ||
4777 | xorl 20(%esi),%eax | ||
4778 | |||
4779 | # qhasm: in1 ^= *(uint32 *) (m + 4) | ||
4780 | # asm 1: xorl 4(<m=int32#5),<in1=int32#2 | ||
4781 | # asm 2: xorl 4(<m=%esi),<in1=%ecx | ||
4782 | xorl 4(%esi),%ecx | ||
4783 | |||
4784 | # qhasm: in13 ^= *(uint32 *) (m + 52) | ||
4785 | # asm 1: xorl 52(<m=int32#5),<in13=int32#3 | ||
4786 | # asm 2: xorl 52(<m=%esi),<in13=%edx | ||
4787 | xorl 52(%esi),%edx | ||
4788 | |||
4789 | # qhasm: in9 ^= *(uint32 *) (m + 36) | ||
4790 | # asm 1: xorl 36(<m=int32#5),<in9=int32#4 | ||
4791 | # asm 2: xorl 36(<m=%esi),<in9=%ebx | ||
4792 | xorl 36(%esi),%ebx | ||
4793 | |||
4794 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
4795 | # asm 1: movl <in5=int32#1,20(<out=int32#6) | ||
4796 | # asm 2: movl <in5=%eax,20(<out=%edi) | ||
4797 | movl %eax,20(%edi) | ||
4798 | |||
4799 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
4800 | # asm 1: movl <in1=int32#2,4(<out=int32#6) | ||
4801 | # asm 2: movl <in1=%ecx,4(<out=%edi) | ||
4802 | movl %ecx,4(%edi) | ||
4803 | |||
4804 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
4805 | # asm 1: movl <in13=int32#3,52(<out=int32#6) | ||
4806 | # asm 2: movl <in13=%edx,52(<out=%edi) | ||
4807 | movl %edx,52(%edi) | ||
4808 | |||
4809 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
4810 | # asm 1: movl <in9=int32#4,36(<out=int32#6) | ||
4811 | # asm 2: movl <in9=%ebx,36(<out=%edi) | ||
4812 | movl %ebx,36(%edi) | ||
4813 | |||
4814 | # qhasm: in10 = diag0 | ||
4815 | # asm 1: movd <diag0=int6464#1,>in10=int32#1 | ||
4816 | # asm 2: movd <diag0=%xmm0,>in10=%eax | ||
4817 | movd %xmm0,%eax | ||
4818 | |||
4819 | # qhasm: in6 = diag1 | ||
4820 | # asm 1: movd <diag1=int6464#2,>in6=int32#2 | ||
4821 | # asm 2: movd <diag1=%xmm1,>in6=%ecx | ||
4822 | movd %xmm1,%ecx | ||
4823 | |||
4824 | # qhasm: in2 = diag2 | ||
4825 | # asm 1: movd <diag2=int6464#3,>in2=int32#3 | ||
4826 | # asm 2: movd <diag2=%xmm2,>in2=%edx | ||
4827 | movd %xmm2,%edx | ||
4828 | |||
4829 | # qhasm: in14 = diag3 | ||
4830 | # asm 1: movd <diag3=int6464#4,>in14=int32#4 | ||
4831 | # asm 2: movd <diag3=%xmm3,>in14=%ebx | ||
4832 | movd %xmm3,%ebx | ||
4833 | |||
4834 | # qhasm: diag0 <<<= 96 | ||
4835 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4836 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4837 | pshufd $0x39,%xmm0,%xmm0 | ||
4838 | |||
4839 | # qhasm: diag1 <<<= 96 | ||
4840 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4841 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4842 | pshufd $0x39,%xmm1,%xmm1 | ||
4843 | |||
4844 | # qhasm: diag2 <<<= 96 | ||
4845 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4846 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4847 | pshufd $0x39,%xmm2,%xmm2 | ||
4848 | |||
4849 | # qhasm: diag3 <<<= 96 | ||
4850 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4851 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4852 | pshufd $0x39,%xmm3,%xmm3 | ||
4853 | |||
4854 | # qhasm: in10 ^= *(uint32 *) (m + 40) | ||
4855 | # asm 1: xorl 40(<m=int32#5),<in10=int32#1 | ||
4856 | # asm 2: xorl 40(<m=%esi),<in10=%eax | ||
4857 | xorl 40(%esi),%eax | ||
4858 | |||
4859 | # qhasm: in6 ^= *(uint32 *) (m + 24) | ||
4860 | # asm 1: xorl 24(<m=int32#5),<in6=int32#2 | ||
4861 | # asm 2: xorl 24(<m=%esi),<in6=%ecx | ||
4862 | xorl 24(%esi),%ecx | ||
4863 | |||
4864 | # qhasm: in2 ^= *(uint32 *) (m + 8) | ||
4865 | # asm 1: xorl 8(<m=int32#5),<in2=int32#3 | ||
4866 | # asm 2: xorl 8(<m=%esi),<in2=%edx | ||
4867 | xorl 8(%esi),%edx | ||
4868 | |||
4869 | # qhasm: in14 ^= *(uint32 *) (m + 56) | ||
4870 | # asm 1: xorl 56(<m=int32#5),<in14=int32#4 | ||
4871 | # asm 2: xorl 56(<m=%esi),<in14=%ebx | ||
4872 | xorl 56(%esi),%ebx | ||
4873 | |||
4874 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
4875 | # asm 1: movl <in10=int32#1,40(<out=int32#6) | ||
4876 | # asm 2: movl <in10=%eax,40(<out=%edi) | ||
4877 | movl %eax,40(%edi) | ||
4878 | |||
4879 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
4880 | # asm 1: movl <in6=int32#2,24(<out=int32#6) | ||
4881 | # asm 2: movl <in6=%ecx,24(<out=%edi) | ||
4882 | movl %ecx,24(%edi) | ||
4883 | |||
4884 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
4885 | # asm 1: movl <in2=int32#3,8(<out=int32#6) | ||
4886 | # asm 2: movl <in2=%edx,8(<out=%edi) | ||
4887 | movl %edx,8(%edi) | ||
4888 | |||
4889 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
4890 | # asm 1: movl <in14=int32#4,56(<out=int32#6) | ||
4891 | # asm 2: movl <in14=%ebx,56(<out=%edi) | ||
4892 | movl %ebx,56(%edi) | ||
4893 | |||
4894 | # qhasm: in15 = diag0 | ||
4895 | # asm 1: movd <diag0=int6464#1,>in15=int32#1 | ||
4896 | # asm 2: movd <diag0=%xmm0,>in15=%eax | ||
4897 | movd %xmm0,%eax | ||
4898 | |||
4899 | # qhasm: in11 = diag1 | ||
4900 | # asm 1: movd <diag1=int6464#2,>in11=int32#2 | ||
4901 | # asm 2: movd <diag1=%xmm1,>in11=%ecx | ||
4902 | movd %xmm1,%ecx | ||
4903 | |||
4904 | # qhasm: in7 = diag2 | ||
4905 | # asm 1: movd <diag2=int6464#3,>in7=int32#3 | ||
4906 | # asm 2: movd <diag2=%xmm2,>in7=%edx | ||
4907 | movd %xmm2,%edx | ||
4908 | |||
4909 | # qhasm: in3 = diag3 | ||
4910 | # asm 1: movd <diag3=int6464#4,>in3=int32#4 | ||
4911 | # asm 2: movd <diag3=%xmm3,>in3=%ebx | ||
4912 | movd %xmm3,%ebx | ||
4913 | |||
4914 | # qhasm: in15 ^= *(uint32 *) (m + 60) | ||
4915 | # asm 1: xorl 60(<m=int32#5),<in15=int32#1 | ||
4916 | # asm 2: xorl 60(<m=%esi),<in15=%eax | ||
4917 | xorl 60(%esi),%eax | ||
4918 | |||
4919 | # qhasm: in11 ^= *(uint32 *) (m + 44) | ||
4920 | # asm 1: xorl 44(<m=int32#5),<in11=int32#2 | ||
4921 | # asm 2: xorl 44(<m=%esi),<in11=%ecx | ||
4922 | xorl 44(%esi),%ecx | ||
4923 | |||
4924 | # qhasm: in7 ^= *(uint32 *) (m + 28) | ||
4925 | # asm 1: xorl 28(<m=int32#5),<in7=int32#3 | ||
4926 | # asm 2: xorl 28(<m=%esi),<in7=%edx | ||
4927 | xorl 28(%esi),%edx | ||
4928 | |||
4929 | # qhasm: in3 ^= *(uint32 *) (m + 12) | ||
4930 | # asm 1: xorl 12(<m=int32#5),<in3=int32#4 | ||
4931 | # asm 2: xorl 12(<m=%esi),<in3=%ebx | ||
4932 | xorl 12(%esi),%ebx | ||
4933 | |||
4934 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
4935 | # asm 1: movl <in15=int32#1,60(<out=int32#6) | ||
4936 | # asm 2: movl <in15=%eax,60(<out=%edi) | ||
4937 | movl %eax,60(%edi) | ||
4938 | |||
4939 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
4940 | # asm 1: movl <in11=int32#2,44(<out=int32#6) | ||
4941 | # asm 2: movl <in11=%ecx,44(<out=%edi) | ||
4942 | movl %ecx,44(%edi) | ||
4943 | |||
4944 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
4945 | # asm 1: movl <in7=int32#3,28(<out=int32#6) | ||
4946 | # asm 2: movl <in7=%edx,28(<out=%edi) | ||
4947 | movl %edx,28(%edi) | ||
4948 | |||
4949 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
4950 | # asm 1: movl <in3=int32#4,12(<out=int32#6) | ||
4951 | # asm 2: movl <in3=%ebx,12(<out=%edi) | ||
4952 | movl %ebx,12(%edi) | ||
4953 | |||
4954 | # qhasm: bytes = bytes_stack | ||
4955 | # asm 1: movl <bytes_stack=stack32#7,>bytes=int32#1 | ||
4956 | # asm 2: movl <bytes_stack=24(%esp),>bytes=%eax | ||
4957 | movl 24(%esp),%eax | ||
4958 | |||
4959 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
4960 | # asm 1: movl <x2=stack128#4,>in8=int32#2 | ||
4961 | # asm 2: movl <x2=80(%esp),>in8=%ecx | ||
4962 | movl 80(%esp),%ecx | ||
4963 | |||
4964 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
4965 | # asm 1: movl 4+<x3=stack128#1,>in9=int32#3 | ||
4966 | # asm 2: movl 4+<x3=32(%esp),>in9=%edx | ||
4967 | movl 4+32(%esp),%edx | ||
4968 | |||
4969 | # qhasm: carry? in8 += 1 | ||
4970 | # asm 1: add $1,<in8=int32#2 | ||
4971 | # asm 2: add $1,<in8=%ecx | ||
4972 | add $1,%ecx | ||
4973 | |||
4974 | # qhasm: in9 += 0 + carry | ||
4975 | # asm 1: adc $0,<in9=int32#3 | ||
4976 | # asm 2: adc $0,<in9=%edx | ||
4977 | adc $0,%edx | ||
4978 | |||
4979 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
4980 | # asm 1: movl <in8=int32#2,>x2=stack128#4 | ||
4981 | # asm 2: movl <in8=%ecx,>x2=80(%esp) | ||
4982 | movl %ecx,80(%esp) | ||
4983 | |||
4984 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
4985 | # asm 1: movl <in9=int32#3,4+<x3=stack128#1 | ||
4986 | # asm 2: movl <in9=%edx,4+<x3=32(%esp) | ||
4987 | movl %edx,4+32(%esp) | ||
4988 | |||
4989 | # qhasm: unsigned>? unsigned<? bytes - 64 | ||
4990 | # asm 1: cmp $64,<bytes=int32#1 | ||
4991 | # asm 2: cmp $64,<bytes=%eax | ||
4992 | cmp $64,%eax | ||
4993 | # comment:fp stack unchanged by jump | ||
4994 | |||
4995 | # qhasm: goto bytesatleast65 if unsigned> | ||
4996 | ja ._bytesatleast65 | ||
4997 | # comment:fp stack unchanged by jump | ||
4998 | |||
4999 | # qhasm: goto bytesatleast64 if !unsigned< | ||
5000 | jae ._bytesatleast64 | ||
5001 | |||
5002 | # qhasm: m = out | ||
5003 | # asm 1: mov <out=int32#6,>m=int32#5 | ||
5004 | # asm 2: mov <out=%edi,>m=%esi | ||
5005 | mov %edi,%esi | ||
5006 | |||
5007 | # qhasm: out = ctarget | ||
5008 | # asm 1: movl <ctarget=stack32#6,>out=int32#6 | ||
5009 | # asm 2: movl <ctarget=20(%esp),>out=%edi | ||
5010 | movl 20(%esp),%edi | ||
5011 | |||
5012 | # qhasm: i = bytes | ||
5013 | # asm 1: mov <bytes=int32#1,>i=int32#2 | ||
5014 | # asm 2: mov <bytes=%eax,>i=%ecx | ||
5015 | mov %eax,%ecx | ||
5016 | |||
5017 | # qhasm: while (i) { *out++ = *m++; --i } | ||
5018 | rep movsb | ||
5019 | # comment:fp stack unchanged by fallthrough | ||
5020 | |||
5021 | # qhasm: bytesatleast64: | ||
5022 | ._bytesatleast64: | ||
5023 | # comment:fp stack unchanged by fallthrough | ||
5024 | |||
5025 | # qhasm: done: | ||
5026 | ._done: | ||
5027 | |||
5028 | # qhasm: eax = eax_stack | ||
5029 | # asm 1: movl <eax_stack=stack32#1,>eax=int32#1 | ||
5030 | # asm 2: movl <eax_stack=0(%esp),>eax=%eax | ||
5031 | movl 0(%esp),%eax | ||
5032 | |||
5033 | # qhasm: ebx = ebx_stack | ||
5034 | # asm 1: movl <ebx_stack=stack32#2,>ebx=int32#4 | ||
5035 | # asm 2: movl <ebx_stack=4(%esp),>ebx=%ebx | ||
5036 | movl 4(%esp),%ebx | ||
5037 | |||
5038 | # qhasm: esi = esi_stack | ||
5039 | # asm 1: movl <esi_stack=stack32#3,>esi=int32#5 | ||
5040 | # asm 2: movl <esi_stack=8(%esp),>esi=%esi | ||
5041 | movl 8(%esp),%esi | ||
5042 | |||
5043 | # qhasm: edi = edi_stack | ||
5044 | # asm 1: movl <edi_stack=stack32#4,>edi=int32#6 | ||
5045 | # asm 2: movl <edi_stack=12(%esp),>edi=%edi | ||
5046 | movl 12(%esp),%edi | ||
5047 | |||
5048 | # qhasm: ebp = ebp_stack | ||
5049 | # asm 1: movl <ebp_stack=stack32#5,>ebp=int32#7 | ||
5050 | # asm 2: movl <ebp_stack=16(%esp),>ebp=%ebp | ||
5051 | movl 16(%esp),%ebp | ||
5052 | |||
5053 | # qhasm: leave | ||
5054 | add %eax,%esp | ||
5055 | xor %eax,%eax | ||
5056 | ret | ||
5057 | |||
5058 | # qhasm: bytesatleast65: | ||
5059 | ._bytesatleast65: | ||
5060 | |||
5061 | # qhasm: bytes -= 64 | ||
5062 | # asm 1: sub $64,<bytes=int32#1 | ||
5063 | # asm 2: sub $64,<bytes=%eax | ||
5064 | sub $64,%eax | ||
5065 | |||
5066 | # qhasm: out += 64 | ||
5067 | # asm 1: add $64,<out=int32#6 | ||
5068 | # asm 2: add $64,<out=%edi | ||
5069 | add $64,%edi | ||
5070 | |||
5071 | # qhasm: m += 64 | ||
5072 | # asm 1: add $64,<m=int32#5 | ||
5073 | # asm 2: add $64,<m=%esi | ||
5074 | add $64,%esi | ||
5075 | # comment:fp stack unchanged by jump | ||
5076 | |||
5077 | # qhasm: goto bytesbetween1and255 | ||
5078 | jmp ._bytesbetween1and255 | ||
diff --git a/nacl/crypto_stream/salsa208/amd64_xmm6/api.h b/nacl/crypto_stream/salsa208/amd64_xmm6/api.h new file mode 100644 index 00000000..c2b18461 --- /dev/null +++ b/nacl/crypto_stream/salsa208/amd64_xmm6/api.h | |||
@@ -0,0 +1,2 @@ | |||
1 | #define CRYPTO_KEYBYTES 32 | ||
2 | #define CRYPTO_NONCEBYTES 8 | ||
diff --git a/nacl/crypto_stream/salsa208/amd64_xmm6/implementors b/nacl/crypto_stream/salsa208/amd64_xmm6/implementors new file mode 100644 index 00000000..f6fb3c73 --- /dev/null +++ b/nacl/crypto_stream/salsa208/amd64_xmm6/implementors | |||
@@ -0,0 +1 @@ | |||
Daniel J. Bernstein | |||
diff --git a/nacl/crypto_stream/salsa208/amd64_xmm6/stream.s b/nacl/crypto_stream/salsa208/amd64_xmm6/stream.s new file mode 100644 index 00000000..f27411fe --- /dev/null +++ b/nacl/crypto_stream/salsa208/amd64_xmm6/stream.s | |||
@@ -0,0 +1,4823 @@ | |||
1 | |||
2 | # qhasm: int64 r11_caller | ||
3 | |||
4 | # qhasm: int64 r12_caller | ||
5 | |||
6 | # qhasm: int64 r13_caller | ||
7 | |||
8 | # qhasm: int64 r14_caller | ||
9 | |||
10 | # qhasm: int64 r15_caller | ||
11 | |||
12 | # qhasm: int64 rbx_caller | ||
13 | |||
14 | # qhasm: int64 rbp_caller | ||
15 | |||
16 | # qhasm: caller r11_caller | ||
17 | |||
18 | # qhasm: caller r12_caller | ||
19 | |||
20 | # qhasm: caller r13_caller | ||
21 | |||
22 | # qhasm: caller r14_caller | ||
23 | |||
24 | # qhasm: caller r15_caller | ||
25 | |||
26 | # qhasm: caller rbx_caller | ||
27 | |||
28 | # qhasm: caller rbp_caller | ||
29 | |||
30 | # qhasm: stack64 r11_stack | ||
31 | |||
32 | # qhasm: stack64 r12_stack | ||
33 | |||
34 | # qhasm: stack64 r13_stack | ||
35 | |||
36 | # qhasm: stack64 r14_stack | ||
37 | |||
38 | # qhasm: stack64 r15_stack | ||
39 | |||
40 | # qhasm: stack64 rbx_stack | ||
41 | |||
42 | # qhasm: stack64 rbp_stack | ||
43 | |||
44 | # qhasm: int64 a | ||
45 | |||
46 | # qhasm: int64 arg1 | ||
47 | |||
48 | # qhasm: int64 arg2 | ||
49 | |||
50 | # qhasm: int64 arg3 | ||
51 | |||
52 | # qhasm: int64 arg4 | ||
53 | |||
54 | # qhasm: int64 arg5 | ||
55 | |||
56 | # qhasm: input arg1 | ||
57 | |||
58 | # qhasm: input arg2 | ||
59 | |||
60 | # qhasm: input arg3 | ||
61 | |||
62 | # qhasm: input arg4 | ||
63 | |||
64 | # qhasm: input arg5 | ||
65 | |||
66 | # qhasm: int64 k | ||
67 | |||
68 | # qhasm: int64 kbits | ||
69 | |||
70 | # qhasm: int64 iv | ||
71 | |||
72 | # qhasm: int64 i | ||
73 | |||
74 | # qhasm: stack128 x0 | ||
75 | |||
76 | # qhasm: stack128 x1 | ||
77 | |||
78 | # qhasm: stack128 x2 | ||
79 | |||
80 | # qhasm: stack128 x3 | ||
81 | |||
82 | # qhasm: int64 m | ||
83 | |||
84 | # qhasm: int64 out | ||
85 | |||
86 | # qhasm: int64 bytes | ||
87 | |||
88 | # qhasm: stack32 eax_stack | ||
89 | |||
90 | # qhasm: stack32 ebx_stack | ||
91 | |||
92 | # qhasm: stack32 esi_stack | ||
93 | |||
94 | # qhasm: stack32 edi_stack | ||
95 | |||
96 | # qhasm: stack32 ebp_stack | ||
97 | |||
98 | # qhasm: int6464 diag0 | ||
99 | |||
100 | # qhasm: int6464 diag1 | ||
101 | |||
102 | # qhasm: int6464 diag2 | ||
103 | |||
104 | # qhasm: int6464 diag3 | ||
105 | |||
106 | # qhasm: int6464 a0 | ||
107 | |||
108 | # qhasm: int6464 a1 | ||
109 | |||
110 | # qhasm: int6464 a2 | ||
111 | |||
112 | # qhasm: int6464 a3 | ||
113 | |||
114 | # qhasm: int6464 a4 | ||
115 | |||
116 | # qhasm: int6464 a5 | ||
117 | |||
118 | # qhasm: int6464 a6 | ||
119 | |||
120 | # qhasm: int6464 a7 | ||
121 | |||
122 | # qhasm: int6464 b0 | ||
123 | |||
124 | # qhasm: int6464 b1 | ||
125 | |||
126 | # qhasm: int6464 b2 | ||
127 | |||
128 | # qhasm: int6464 b3 | ||
129 | |||
130 | # qhasm: int6464 b4 | ||
131 | |||
132 | # qhasm: int6464 b5 | ||
133 | |||
134 | # qhasm: int6464 b6 | ||
135 | |||
136 | # qhasm: int6464 b7 | ||
137 | |||
138 | # qhasm: int6464 z0 | ||
139 | |||
140 | # qhasm: int6464 z1 | ||
141 | |||
142 | # qhasm: int6464 z2 | ||
143 | |||
144 | # qhasm: int6464 z3 | ||
145 | |||
146 | # qhasm: int6464 z4 | ||
147 | |||
148 | # qhasm: int6464 z5 | ||
149 | |||
150 | # qhasm: int6464 z6 | ||
151 | |||
152 | # qhasm: int6464 z7 | ||
153 | |||
154 | # qhasm: int6464 z8 | ||
155 | |||
156 | # qhasm: int6464 z9 | ||
157 | |||
158 | # qhasm: int6464 z10 | ||
159 | |||
160 | # qhasm: int6464 z11 | ||
161 | |||
162 | # qhasm: int6464 z12 | ||
163 | |||
164 | # qhasm: int6464 z13 | ||
165 | |||
166 | # qhasm: int6464 z14 | ||
167 | |||
168 | # qhasm: int6464 z15 | ||
169 | |||
170 | # qhasm: stack128 z0_stack | ||
171 | |||
172 | # qhasm: stack128 z1_stack | ||
173 | |||
174 | # qhasm: stack128 z2_stack | ||
175 | |||
176 | # qhasm: stack128 z3_stack | ||
177 | |||
178 | # qhasm: stack128 z4_stack | ||
179 | |||
180 | # qhasm: stack128 z5_stack | ||
181 | |||
182 | # qhasm: stack128 z6_stack | ||
183 | |||
184 | # qhasm: stack128 z7_stack | ||
185 | |||
186 | # qhasm: stack128 z8_stack | ||
187 | |||
188 | # qhasm: stack128 z9_stack | ||
189 | |||
190 | # qhasm: stack128 z10_stack | ||
191 | |||
192 | # qhasm: stack128 z11_stack | ||
193 | |||
194 | # qhasm: stack128 z12_stack | ||
195 | |||
196 | # qhasm: stack128 z13_stack | ||
197 | |||
198 | # qhasm: stack128 z14_stack | ||
199 | |||
200 | # qhasm: stack128 z15_stack | ||
201 | |||
202 | # qhasm: int6464 y0 | ||
203 | |||
204 | # qhasm: int6464 y1 | ||
205 | |||
206 | # qhasm: int6464 y2 | ||
207 | |||
208 | # qhasm: int6464 y3 | ||
209 | |||
210 | # qhasm: int6464 y4 | ||
211 | |||
212 | # qhasm: int6464 y5 | ||
213 | |||
214 | # qhasm: int6464 y6 | ||
215 | |||
216 | # qhasm: int6464 y7 | ||
217 | |||
218 | # qhasm: int6464 y8 | ||
219 | |||
220 | # qhasm: int6464 y9 | ||
221 | |||
222 | # qhasm: int6464 y10 | ||
223 | |||
224 | # qhasm: int6464 y11 | ||
225 | |||
226 | # qhasm: int6464 y12 | ||
227 | |||
228 | # qhasm: int6464 y13 | ||
229 | |||
230 | # qhasm: int6464 y14 | ||
231 | |||
232 | # qhasm: int6464 y15 | ||
233 | |||
234 | # qhasm: int6464 r0 | ||
235 | |||
236 | # qhasm: int6464 r1 | ||
237 | |||
238 | # qhasm: int6464 r2 | ||
239 | |||
240 | # qhasm: int6464 r3 | ||
241 | |||
242 | # qhasm: int6464 r4 | ||
243 | |||
244 | # qhasm: int6464 r5 | ||
245 | |||
246 | # qhasm: int6464 r6 | ||
247 | |||
248 | # qhasm: int6464 r7 | ||
249 | |||
250 | # qhasm: int6464 r8 | ||
251 | |||
252 | # qhasm: int6464 r9 | ||
253 | |||
254 | # qhasm: int6464 r10 | ||
255 | |||
256 | # qhasm: int6464 r11 | ||
257 | |||
258 | # qhasm: int6464 r12 | ||
259 | |||
260 | # qhasm: int6464 r13 | ||
261 | |||
262 | # qhasm: int6464 r14 | ||
263 | |||
264 | # qhasm: int6464 r15 | ||
265 | |||
266 | # qhasm: stack128 orig0 | ||
267 | |||
268 | # qhasm: stack128 orig1 | ||
269 | |||
270 | # qhasm: stack128 orig2 | ||
271 | |||
272 | # qhasm: stack128 orig3 | ||
273 | |||
274 | # qhasm: stack128 orig4 | ||
275 | |||
276 | # qhasm: stack128 orig5 | ||
277 | |||
278 | # qhasm: stack128 orig6 | ||
279 | |||
280 | # qhasm: stack128 orig7 | ||
281 | |||
282 | # qhasm: stack128 orig8 | ||
283 | |||
284 | # qhasm: stack128 orig9 | ||
285 | |||
286 | # qhasm: stack128 orig10 | ||
287 | |||
288 | # qhasm: stack128 orig11 | ||
289 | |||
290 | # qhasm: stack128 orig12 | ||
291 | |||
292 | # qhasm: stack128 orig13 | ||
293 | |||
294 | # qhasm: stack128 orig14 | ||
295 | |||
296 | # qhasm: stack128 orig15 | ||
297 | |||
298 | # qhasm: int64 in0 | ||
299 | |||
300 | # qhasm: int64 in1 | ||
301 | |||
302 | # qhasm: int64 in2 | ||
303 | |||
304 | # qhasm: int64 in3 | ||
305 | |||
306 | # qhasm: int64 in4 | ||
307 | |||
308 | # qhasm: int64 in5 | ||
309 | |||
310 | # qhasm: int64 in6 | ||
311 | |||
312 | # qhasm: int64 in7 | ||
313 | |||
314 | # qhasm: int64 in8 | ||
315 | |||
316 | # qhasm: int64 in9 | ||
317 | |||
318 | # qhasm: int64 in10 | ||
319 | |||
320 | # qhasm: int64 in11 | ||
321 | |||
322 | # qhasm: int64 in12 | ||
323 | |||
324 | # qhasm: int64 in13 | ||
325 | |||
326 | # qhasm: int64 in14 | ||
327 | |||
328 | # qhasm: int64 in15 | ||
329 | |||
330 | # qhasm: stack512 tmp | ||
331 | |||
332 | # qhasm: int64 ctarget | ||
333 | |||
334 | # qhasm: stack64 bytes_backup | ||
335 | |||
336 | # qhasm: enter crypto_stream_salsa208_amd64_xmm6 | ||
337 | .text | ||
338 | .p2align 5 | ||
339 | .globl _crypto_stream_salsa208_amd64_xmm6 | ||
340 | .globl crypto_stream_salsa208_amd64_xmm6 | ||
341 | _crypto_stream_salsa208_amd64_xmm6: | ||
342 | crypto_stream_salsa208_amd64_xmm6: | ||
343 | mov %rsp,%r11 | ||
344 | and $31,%r11 | ||
345 | add $480,%r11 | ||
346 | sub %r11,%rsp | ||
347 | |||
348 | # qhasm: r11_stack = r11_caller | ||
349 | # asm 1: movq <r11_caller=int64#9,>r11_stack=stack64#1 | ||
350 | # asm 2: movq <r11_caller=%r11,>r11_stack=352(%rsp) | ||
351 | movq %r11,352(%rsp) | ||
352 | |||
353 | # qhasm: r12_stack = r12_caller | ||
354 | # asm 1: movq <r12_caller=int64#10,>r12_stack=stack64#2 | ||
355 | # asm 2: movq <r12_caller=%r12,>r12_stack=360(%rsp) | ||
356 | movq %r12,360(%rsp) | ||
357 | |||
358 | # qhasm: r13_stack = r13_caller | ||
359 | # asm 1: movq <r13_caller=int64#11,>r13_stack=stack64#3 | ||
360 | # asm 2: movq <r13_caller=%r13,>r13_stack=368(%rsp) | ||
361 | movq %r13,368(%rsp) | ||
362 | |||
363 | # qhasm: r14_stack = r14_caller | ||
364 | # asm 1: movq <r14_caller=int64#12,>r14_stack=stack64#4 | ||
365 | # asm 2: movq <r14_caller=%r14,>r14_stack=376(%rsp) | ||
366 | movq %r14,376(%rsp) | ||
367 | |||
368 | # qhasm: r15_stack = r15_caller | ||
369 | # asm 1: movq <r15_caller=int64#13,>r15_stack=stack64#5 | ||
370 | # asm 2: movq <r15_caller=%r15,>r15_stack=384(%rsp) | ||
371 | movq %r15,384(%rsp) | ||
372 | |||
373 | # qhasm: rbx_stack = rbx_caller | ||
374 | # asm 1: movq <rbx_caller=int64#14,>rbx_stack=stack64#6 | ||
375 | # asm 2: movq <rbx_caller=%rbx,>rbx_stack=392(%rsp) | ||
376 | movq %rbx,392(%rsp) | ||
377 | |||
378 | # qhasm: rbp_stack = rbp_caller | ||
379 | # asm 1: movq <rbp_caller=int64#15,>rbp_stack=stack64#7 | ||
380 | # asm 2: movq <rbp_caller=%rbp,>rbp_stack=400(%rsp) | ||
381 | movq %rbp,400(%rsp) | ||
382 | |||
383 | # qhasm: bytes = arg2 | ||
384 | # asm 1: mov <arg2=int64#2,>bytes=int64#6 | ||
385 | # asm 2: mov <arg2=%rsi,>bytes=%r9 | ||
386 | mov %rsi,%r9 | ||
387 | |||
388 | # qhasm: out = arg1 | ||
389 | # asm 1: mov <arg1=int64#1,>out=int64#1 | ||
390 | # asm 2: mov <arg1=%rdi,>out=%rdi | ||
391 | mov %rdi,%rdi | ||
392 | |||
393 | # qhasm: m = out | ||
394 | # asm 1: mov <out=int64#1,>m=int64#2 | ||
395 | # asm 2: mov <out=%rdi,>m=%rsi | ||
396 | mov %rdi,%rsi | ||
397 | |||
398 | # qhasm: iv = arg3 | ||
399 | # asm 1: mov <arg3=int64#3,>iv=int64#3 | ||
400 | # asm 2: mov <arg3=%rdx,>iv=%rdx | ||
401 | mov %rdx,%rdx | ||
402 | |||
403 | # qhasm: k = arg4 | ||
404 | # asm 1: mov <arg4=int64#4,>k=int64#8 | ||
405 | # asm 2: mov <arg4=%rcx,>k=%r10 | ||
406 | mov %rcx,%r10 | ||
407 | |||
408 | # qhasm: unsigned>? bytes - 0 | ||
409 | # asm 1: cmp $0,<bytes=int64#6 | ||
410 | # asm 2: cmp $0,<bytes=%r9 | ||
411 | cmp $0,%r9 | ||
412 | # comment:fp stack unchanged by jump | ||
413 | |||
414 | # qhasm: goto done if !unsigned> | ||
415 | jbe ._done | ||
416 | |||
417 | # qhasm: a = 0 | ||
418 | # asm 1: mov $0,>a=int64#7 | ||
419 | # asm 2: mov $0,>a=%rax | ||
420 | mov $0,%rax | ||
421 | |||
422 | # qhasm: i = bytes | ||
423 | # asm 1: mov <bytes=int64#6,>i=int64#4 | ||
424 | # asm 2: mov <bytes=%r9,>i=%rcx | ||
425 | mov %r9,%rcx | ||
426 | |||
427 | # qhasm: while (i) { *out++ = a; --i } | ||
428 | rep stosb | ||
429 | |||
430 | # qhasm: out -= bytes | ||
431 | # asm 1: sub <bytes=int64#6,<out=int64#1 | ||
432 | # asm 2: sub <bytes=%r9,<out=%rdi | ||
433 | sub %r9,%rdi | ||
434 | # comment:fp stack unchanged by jump | ||
435 | |||
436 | # qhasm: goto start | ||
437 | jmp ._start | ||
438 | |||
439 | # qhasm: enter crypto_stream_salsa208_amd64_xmm6_xor | ||
440 | .text | ||
441 | .p2align 5 | ||
442 | .globl _crypto_stream_salsa208_amd64_xmm6_xor | ||
443 | .globl crypto_stream_salsa208_amd64_xmm6_xor | ||
444 | _crypto_stream_salsa208_amd64_xmm6_xor: | ||
445 | crypto_stream_salsa208_amd64_xmm6_xor: | ||
446 | mov %rsp,%r11 | ||
447 | and $31,%r11 | ||
448 | add $480,%r11 | ||
449 | sub %r11,%rsp | ||
450 | |||
451 | # qhasm: r11_stack = r11_caller | ||
452 | # asm 1: movq <r11_caller=int64#9,>r11_stack=stack64#1 | ||
453 | # asm 2: movq <r11_caller=%r11,>r11_stack=352(%rsp) | ||
454 | movq %r11,352(%rsp) | ||
455 | |||
456 | # qhasm: r12_stack = r12_caller | ||
457 | # asm 1: movq <r12_caller=int64#10,>r12_stack=stack64#2 | ||
458 | # asm 2: movq <r12_caller=%r12,>r12_stack=360(%rsp) | ||
459 | movq %r12,360(%rsp) | ||
460 | |||
461 | # qhasm: r13_stack = r13_caller | ||
462 | # asm 1: movq <r13_caller=int64#11,>r13_stack=stack64#3 | ||
463 | # asm 2: movq <r13_caller=%r13,>r13_stack=368(%rsp) | ||
464 | movq %r13,368(%rsp) | ||
465 | |||
466 | # qhasm: r14_stack = r14_caller | ||
467 | # asm 1: movq <r14_caller=int64#12,>r14_stack=stack64#4 | ||
468 | # asm 2: movq <r14_caller=%r14,>r14_stack=376(%rsp) | ||
469 | movq %r14,376(%rsp) | ||
470 | |||
471 | # qhasm: r15_stack = r15_caller | ||
472 | # asm 1: movq <r15_caller=int64#13,>r15_stack=stack64#5 | ||
473 | # asm 2: movq <r15_caller=%r15,>r15_stack=384(%rsp) | ||
474 | movq %r15,384(%rsp) | ||
475 | |||
476 | # qhasm: rbx_stack = rbx_caller | ||
477 | # asm 1: movq <rbx_caller=int64#14,>rbx_stack=stack64#6 | ||
478 | # asm 2: movq <rbx_caller=%rbx,>rbx_stack=392(%rsp) | ||
479 | movq %rbx,392(%rsp) | ||
480 | |||
481 | # qhasm: rbp_stack = rbp_caller | ||
482 | # asm 1: movq <rbp_caller=int64#15,>rbp_stack=stack64#7 | ||
483 | # asm 2: movq <rbp_caller=%rbp,>rbp_stack=400(%rsp) | ||
484 | movq %rbp,400(%rsp) | ||
485 | |||
486 | # qhasm: out = arg1 | ||
487 | # asm 1: mov <arg1=int64#1,>out=int64#1 | ||
488 | # asm 2: mov <arg1=%rdi,>out=%rdi | ||
489 | mov %rdi,%rdi | ||
490 | |||
491 | # qhasm: m = arg2 | ||
492 | # asm 1: mov <arg2=int64#2,>m=int64#2 | ||
493 | # asm 2: mov <arg2=%rsi,>m=%rsi | ||
494 | mov %rsi,%rsi | ||
495 | |||
496 | # qhasm: bytes = arg3 | ||
497 | # asm 1: mov <arg3=int64#3,>bytes=int64#6 | ||
498 | # asm 2: mov <arg3=%rdx,>bytes=%r9 | ||
499 | mov %rdx,%r9 | ||
500 | |||
501 | # qhasm: iv = arg4 | ||
502 | # asm 1: mov <arg4=int64#4,>iv=int64#3 | ||
503 | # asm 2: mov <arg4=%rcx,>iv=%rdx | ||
504 | mov %rcx,%rdx | ||
505 | |||
506 | # qhasm: k = arg5 | ||
507 | # asm 1: mov <arg5=int64#5,>k=int64#8 | ||
508 | # asm 2: mov <arg5=%r8,>k=%r10 | ||
509 | mov %r8,%r10 | ||
510 | |||
511 | # qhasm: unsigned>? bytes - 0 | ||
512 | # asm 1: cmp $0,<bytes=int64#6 | ||
513 | # asm 2: cmp $0,<bytes=%r9 | ||
514 | cmp $0,%r9 | ||
515 | # comment:fp stack unchanged by jump | ||
516 | |||
517 | # qhasm: goto done if !unsigned> | ||
518 | jbe ._done | ||
519 | # comment:fp stack unchanged by fallthrough | ||
520 | |||
521 | # qhasm: start: | ||
522 | ._start: | ||
523 | |||
524 | # qhasm: in12 = *(uint32 *) (k + 20) | ||
525 | # asm 1: movl 20(<k=int64#8),>in12=int64#4d | ||
526 | # asm 2: movl 20(<k=%r10),>in12=%ecx | ||
527 | movl 20(%r10),%ecx | ||
528 | |||
529 | # qhasm: in1 = *(uint32 *) (k + 0) | ||
530 | # asm 1: movl 0(<k=int64#8),>in1=int64#5d | ||
531 | # asm 2: movl 0(<k=%r10),>in1=%r8d | ||
532 | movl 0(%r10),%r8d | ||
533 | |||
534 | # qhasm: in6 = *(uint32 *) (iv + 0) | ||
535 | # asm 1: movl 0(<iv=int64#3),>in6=int64#7d | ||
536 | # asm 2: movl 0(<iv=%rdx),>in6=%eax | ||
537 | movl 0(%rdx),%eax | ||
538 | |||
539 | # qhasm: in11 = *(uint32 *) (k + 16) | ||
540 | # asm 1: movl 16(<k=int64#8),>in11=int64#9d | ||
541 | # asm 2: movl 16(<k=%r10),>in11=%r11d | ||
542 | movl 16(%r10),%r11d | ||
543 | |||
544 | # qhasm: ((uint32 *)&x1)[0] = in12 | ||
545 | # asm 1: movl <in12=int64#4d,>x1=stack128#1 | ||
546 | # asm 2: movl <in12=%ecx,>x1=0(%rsp) | ||
547 | movl %ecx,0(%rsp) | ||
548 | |||
549 | # qhasm: ((uint32 *)&x1)[1] = in1 | ||
550 | # asm 1: movl <in1=int64#5d,4+<x1=stack128#1 | ||
551 | # asm 2: movl <in1=%r8d,4+<x1=0(%rsp) | ||
552 | movl %r8d,4+0(%rsp) | ||
553 | |||
554 | # qhasm: ((uint32 *)&x1)[2] = in6 | ||
555 | # asm 1: movl <in6=int64#7d,8+<x1=stack128#1 | ||
556 | # asm 2: movl <in6=%eax,8+<x1=0(%rsp) | ||
557 | movl %eax,8+0(%rsp) | ||
558 | |||
559 | # qhasm: ((uint32 *)&x1)[3] = in11 | ||
560 | # asm 1: movl <in11=int64#9d,12+<x1=stack128#1 | ||
561 | # asm 2: movl <in11=%r11d,12+<x1=0(%rsp) | ||
562 | movl %r11d,12+0(%rsp) | ||
563 | |||
564 | # qhasm: in8 = 0 | ||
565 | # asm 1: mov $0,>in8=int64#4 | ||
566 | # asm 2: mov $0,>in8=%rcx | ||
567 | mov $0,%rcx | ||
568 | |||
569 | # qhasm: in13 = *(uint32 *) (k + 24) | ||
570 | # asm 1: movl 24(<k=int64#8),>in13=int64#5d | ||
571 | # asm 2: movl 24(<k=%r10),>in13=%r8d | ||
572 | movl 24(%r10),%r8d | ||
573 | |||
574 | # qhasm: in2 = *(uint32 *) (k + 4) | ||
575 | # asm 1: movl 4(<k=int64#8),>in2=int64#7d | ||
576 | # asm 2: movl 4(<k=%r10),>in2=%eax | ||
577 | movl 4(%r10),%eax | ||
578 | |||
579 | # qhasm: in7 = *(uint32 *) (iv + 4) | ||
580 | # asm 1: movl 4(<iv=int64#3),>in7=int64#3d | ||
581 | # asm 2: movl 4(<iv=%rdx),>in7=%edx | ||
582 | movl 4(%rdx),%edx | ||
583 | |||
584 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
585 | # asm 1: movl <in8=int64#4d,>x2=stack128#2 | ||
586 | # asm 2: movl <in8=%ecx,>x2=16(%rsp) | ||
587 | movl %ecx,16(%rsp) | ||
588 | |||
589 | # qhasm: ((uint32 *)&x2)[1] = in13 | ||
590 | # asm 1: movl <in13=int64#5d,4+<x2=stack128#2 | ||
591 | # asm 2: movl <in13=%r8d,4+<x2=16(%rsp) | ||
592 | movl %r8d,4+16(%rsp) | ||
593 | |||
594 | # qhasm: ((uint32 *)&x2)[2] = in2 | ||
595 | # asm 1: movl <in2=int64#7d,8+<x2=stack128#2 | ||
596 | # asm 2: movl <in2=%eax,8+<x2=16(%rsp) | ||
597 | movl %eax,8+16(%rsp) | ||
598 | |||
599 | # qhasm: ((uint32 *)&x2)[3] = in7 | ||
600 | # asm 1: movl <in7=int64#3d,12+<x2=stack128#2 | ||
601 | # asm 2: movl <in7=%edx,12+<x2=16(%rsp) | ||
602 | movl %edx,12+16(%rsp) | ||
603 | |||
604 | # qhasm: in4 = *(uint32 *) (k + 12) | ||
605 | # asm 1: movl 12(<k=int64#8),>in4=int64#3d | ||
606 | # asm 2: movl 12(<k=%r10),>in4=%edx | ||
607 | movl 12(%r10),%edx | ||
608 | |||
609 | # qhasm: in9 = 0 | ||
610 | # asm 1: mov $0,>in9=int64#4 | ||
611 | # asm 2: mov $0,>in9=%rcx | ||
612 | mov $0,%rcx | ||
613 | |||
614 | # qhasm: in14 = *(uint32 *) (k + 28) | ||
615 | # asm 1: movl 28(<k=int64#8),>in14=int64#5d | ||
616 | # asm 2: movl 28(<k=%r10),>in14=%r8d | ||
617 | movl 28(%r10),%r8d | ||
618 | |||
619 | # qhasm: in3 = *(uint32 *) (k + 8) | ||
620 | # asm 1: movl 8(<k=int64#8),>in3=int64#7d | ||
621 | # asm 2: movl 8(<k=%r10),>in3=%eax | ||
622 | movl 8(%r10),%eax | ||
623 | |||
624 | # qhasm: ((uint32 *)&x3)[0] = in4 | ||
625 | # asm 1: movl <in4=int64#3d,>x3=stack128#3 | ||
626 | # asm 2: movl <in4=%edx,>x3=32(%rsp) | ||
627 | movl %edx,32(%rsp) | ||
628 | |||
629 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
630 | # asm 1: movl <in9=int64#4d,4+<x3=stack128#3 | ||
631 | # asm 2: movl <in9=%ecx,4+<x3=32(%rsp) | ||
632 | movl %ecx,4+32(%rsp) | ||
633 | |||
634 | # qhasm: ((uint32 *)&x3)[2] = in14 | ||
635 | # asm 1: movl <in14=int64#5d,8+<x3=stack128#3 | ||
636 | # asm 2: movl <in14=%r8d,8+<x3=32(%rsp) | ||
637 | movl %r8d,8+32(%rsp) | ||
638 | |||
639 | # qhasm: ((uint32 *)&x3)[3] = in3 | ||
640 | # asm 1: movl <in3=int64#7d,12+<x3=stack128#3 | ||
641 | # asm 2: movl <in3=%eax,12+<x3=32(%rsp) | ||
642 | movl %eax,12+32(%rsp) | ||
643 | |||
644 | # qhasm: in0 = 1634760805 | ||
645 | # asm 1: mov $1634760805,>in0=int64#3 | ||
646 | # asm 2: mov $1634760805,>in0=%rdx | ||
647 | mov $1634760805,%rdx | ||
648 | |||
649 | # qhasm: in5 = 857760878 | ||
650 | # asm 1: mov $857760878,>in5=int64#4 | ||
651 | # asm 2: mov $857760878,>in5=%rcx | ||
652 | mov $857760878,%rcx | ||
653 | |||
654 | # qhasm: in10 = 2036477234 | ||
655 | # asm 1: mov $2036477234,>in10=int64#5 | ||
656 | # asm 2: mov $2036477234,>in10=%r8 | ||
657 | mov $2036477234,%r8 | ||
658 | |||
659 | # qhasm: in15 = 1797285236 | ||
660 | # asm 1: mov $1797285236,>in15=int64#7 | ||
661 | # asm 2: mov $1797285236,>in15=%rax | ||
662 | mov $1797285236,%rax | ||
663 | |||
664 | # qhasm: ((uint32 *)&x0)[0] = in0 | ||
665 | # asm 1: movl <in0=int64#3d,>x0=stack128#4 | ||
666 | # asm 2: movl <in0=%edx,>x0=48(%rsp) | ||
667 | movl %edx,48(%rsp) | ||
668 | |||
669 | # qhasm: ((uint32 *)&x0)[1] = in5 | ||
670 | # asm 1: movl <in5=int64#4d,4+<x0=stack128#4 | ||
671 | # asm 2: movl <in5=%ecx,4+<x0=48(%rsp) | ||
672 | movl %ecx,4+48(%rsp) | ||
673 | |||
674 | # qhasm: ((uint32 *)&x0)[2] = in10 | ||
675 | # asm 1: movl <in10=int64#5d,8+<x0=stack128#4 | ||
676 | # asm 2: movl <in10=%r8d,8+<x0=48(%rsp) | ||
677 | movl %r8d,8+48(%rsp) | ||
678 | |||
679 | # qhasm: ((uint32 *)&x0)[3] = in15 | ||
680 | # asm 1: movl <in15=int64#7d,12+<x0=stack128#4 | ||
681 | # asm 2: movl <in15=%eax,12+<x0=48(%rsp) | ||
682 | movl %eax,12+48(%rsp) | ||
683 | |||
684 | # qhasm: unsigned<? bytes - 256 | ||
685 | # asm 1: cmp $256,<bytes=int64#6 | ||
686 | # asm 2: cmp $256,<bytes=%r9 | ||
687 | cmp $256,%r9 | ||
688 | # comment:fp stack unchanged by jump | ||
689 | |||
690 | # qhasm: goto bytesbetween1and255 if unsigned< | ||
691 | jb ._bytesbetween1and255 | ||
692 | |||
693 | # qhasm: z0 = x0 | ||
694 | # asm 1: movdqa <x0=stack128#4,>z0=int6464#1 | ||
695 | # asm 2: movdqa <x0=48(%rsp),>z0=%xmm0 | ||
696 | movdqa 48(%rsp),%xmm0 | ||
697 | |||
698 | # qhasm: z5 = z0[1,1,1,1] | ||
699 | # asm 1: pshufd $0x55,<z0=int6464#1,>z5=int6464#2 | ||
700 | # asm 2: pshufd $0x55,<z0=%xmm0,>z5=%xmm1 | ||
701 | pshufd $0x55,%xmm0,%xmm1 | ||
702 | |||
703 | # qhasm: z10 = z0[2,2,2,2] | ||
704 | # asm 1: pshufd $0xaa,<z0=int6464#1,>z10=int6464#3 | ||
705 | # asm 2: pshufd $0xaa,<z0=%xmm0,>z10=%xmm2 | ||
706 | pshufd $0xaa,%xmm0,%xmm2 | ||
707 | |||
708 | # qhasm: z15 = z0[3,3,3,3] | ||
709 | # asm 1: pshufd $0xff,<z0=int6464#1,>z15=int6464#4 | ||
710 | # asm 2: pshufd $0xff,<z0=%xmm0,>z15=%xmm3 | ||
711 | pshufd $0xff,%xmm0,%xmm3 | ||
712 | |||
713 | # qhasm: z0 = z0[0,0,0,0] | ||
714 | # asm 1: pshufd $0x00,<z0=int6464#1,>z0=int6464#1 | ||
715 | # asm 2: pshufd $0x00,<z0=%xmm0,>z0=%xmm0 | ||
716 | pshufd $0x00,%xmm0,%xmm0 | ||
717 | |||
718 | # qhasm: orig5 = z5 | ||
719 | # asm 1: movdqa <z5=int6464#2,>orig5=stack128#5 | ||
720 | # asm 2: movdqa <z5=%xmm1,>orig5=64(%rsp) | ||
721 | movdqa %xmm1,64(%rsp) | ||
722 | |||
723 | # qhasm: orig10 = z10 | ||
724 | # asm 1: movdqa <z10=int6464#3,>orig10=stack128#6 | ||
725 | # asm 2: movdqa <z10=%xmm2,>orig10=80(%rsp) | ||
726 | movdqa %xmm2,80(%rsp) | ||
727 | |||
728 | # qhasm: orig15 = z15 | ||
729 | # asm 1: movdqa <z15=int6464#4,>orig15=stack128#7 | ||
730 | # asm 2: movdqa <z15=%xmm3,>orig15=96(%rsp) | ||
731 | movdqa %xmm3,96(%rsp) | ||
732 | |||
733 | # qhasm: orig0 = z0 | ||
734 | # asm 1: movdqa <z0=int6464#1,>orig0=stack128#8 | ||
735 | # asm 2: movdqa <z0=%xmm0,>orig0=112(%rsp) | ||
736 | movdqa %xmm0,112(%rsp) | ||
737 | |||
738 | # qhasm: z1 = x1 | ||
739 | # asm 1: movdqa <x1=stack128#1,>z1=int6464#1 | ||
740 | # asm 2: movdqa <x1=0(%rsp),>z1=%xmm0 | ||
741 | movdqa 0(%rsp),%xmm0 | ||
742 | |||
743 | # qhasm: z6 = z1[2,2,2,2] | ||
744 | # asm 1: pshufd $0xaa,<z1=int6464#1,>z6=int6464#2 | ||
745 | # asm 2: pshufd $0xaa,<z1=%xmm0,>z6=%xmm1 | ||
746 | pshufd $0xaa,%xmm0,%xmm1 | ||
747 | |||
748 | # qhasm: z11 = z1[3,3,3,3] | ||
749 | # asm 1: pshufd $0xff,<z1=int6464#1,>z11=int6464#3 | ||
750 | # asm 2: pshufd $0xff,<z1=%xmm0,>z11=%xmm2 | ||
751 | pshufd $0xff,%xmm0,%xmm2 | ||
752 | |||
753 | # qhasm: z12 = z1[0,0,0,0] | ||
754 | # asm 1: pshufd $0x00,<z1=int6464#1,>z12=int6464#4 | ||
755 | # asm 2: pshufd $0x00,<z1=%xmm0,>z12=%xmm3 | ||
756 | pshufd $0x00,%xmm0,%xmm3 | ||
757 | |||
758 | # qhasm: z1 = z1[1,1,1,1] | ||
759 | # asm 1: pshufd $0x55,<z1=int6464#1,>z1=int6464#1 | ||
760 | # asm 2: pshufd $0x55,<z1=%xmm0,>z1=%xmm0 | ||
761 | pshufd $0x55,%xmm0,%xmm0 | ||
762 | |||
763 | # qhasm: orig6 = z6 | ||
764 | # asm 1: movdqa <z6=int6464#2,>orig6=stack128#9 | ||
765 | # asm 2: movdqa <z6=%xmm1,>orig6=128(%rsp) | ||
766 | movdqa %xmm1,128(%rsp) | ||
767 | |||
768 | # qhasm: orig11 = z11 | ||
769 | # asm 1: movdqa <z11=int6464#3,>orig11=stack128#10 | ||
770 | # asm 2: movdqa <z11=%xmm2,>orig11=144(%rsp) | ||
771 | movdqa %xmm2,144(%rsp) | ||
772 | |||
773 | # qhasm: orig12 = z12 | ||
774 | # asm 1: movdqa <z12=int6464#4,>orig12=stack128#11 | ||
775 | # asm 2: movdqa <z12=%xmm3,>orig12=160(%rsp) | ||
776 | movdqa %xmm3,160(%rsp) | ||
777 | |||
778 | # qhasm: orig1 = z1 | ||
779 | # asm 1: movdqa <z1=int6464#1,>orig1=stack128#12 | ||
780 | # asm 2: movdqa <z1=%xmm0,>orig1=176(%rsp) | ||
781 | movdqa %xmm0,176(%rsp) | ||
782 | |||
783 | # qhasm: z2 = x2 | ||
784 | # asm 1: movdqa <x2=stack128#2,>z2=int6464#1 | ||
785 | # asm 2: movdqa <x2=16(%rsp),>z2=%xmm0 | ||
786 | movdqa 16(%rsp),%xmm0 | ||
787 | |||
788 | # qhasm: z7 = z2[3,3,3,3] | ||
789 | # asm 1: pshufd $0xff,<z2=int6464#1,>z7=int6464#2 | ||
790 | # asm 2: pshufd $0xff,<z2=%xmm0,>z7=%xmm1 | ||
791 | pshufd $0xff,%xmm0,%xmm1 | ||
792 | |||
793 | # qhasm: z13 = z2[1,1,1,1] | ||
794 | # asm 1: pshufd $0x55,<z2=int6464#1,>z13=int6464#3 | ||
795 | # asm 2: pshufd $0x55,<z2=%xmm0,>z13=%xmm2 | ||
796 | pshufd $0x55,%xmm0,%xmm2 | ||
797 | |||
798 | # qhasm: z2 = z2[2,2,2,2] | ||
799 | # asm 1: pshufd $0xaa,<z2=int6464#1,>z2=int6464#1 | ||
800 | # asm 2: pshufd $0xaa,<z2=%xmm0,>z2=%xmm0 | ||
801 | pshufd $0xaa,%xmm0,%xmm0 | ||
802 | |||
803 | # qhasm: orig7 = z7 | ||
804 | # asm 1: movdqa <z7=int6464#2,>orig7=stack128#13 | ||
805 | # asm 2: movdqa <z7=%xmm1,>orig7=192(%rsp) | ||
806 | movdqa %xmm1,192(%rsp) | ||
807 | |||
808 | # qhasm: orig13 = z13 | ||
809 | # asm 1: movdqa <z13=int6464#3,>orig13=stack128#14 | ||
810 | # asm 2: movdqa <z13=%xmm2,>orig13=208(%rsp) | ||
811 | movdqa %xmm2,208(%rsp) | ||
812 | |||
813 | # qhasm: orig2 = z2 | ||
814 | # asm 1: movdqa <z2=int6464#1,>orig2=stack128#15 | ||
815 | # asm 2: movdqa <z2=%xmm0,>orig2=224(%rsp) | ||
816 | movdqa %xmm0,224(%rsp) | ||
817 | |||
818 | # qhasm: z3 = x3 | ||
819 | # asm 1: movdqa <x3=stack128#3,>z3=int6464#1 | ||
820 | # asm 2: movdqa <x3=32(%rsp),>z3=%xmm0 | ||
821 | movdqa 32(%rsp),%xmm0 | ||
822 | |||
823 | # qhasm: z4 = z3[0,0,0,0] | ||
824 | # asm 1: pshufd $0x00,<z3=int6464#1,>z4=int6464#2 | ||
825 | # asm 2: pshufd $0x00,<z3=%xmm0,>z4=%xmm1 | ||
826 | pshufd $0x00,%xmm0,%xmm1 | ||
827 | |||
828 | # qhasm: z14 = z3[2,2,2,2] | ||
829 | # asm 1: pshufd $0xaa,<z3=int6464#1,>z14=int6464#3 | ||
830 | # asm 2: pshufd $0xaa,<z3=%xmm0,>z14=%xmm2 | ||
831 | pshufd $0xaa,%xmm0,%xmm2 | ||
832 | |||
833 | # qhasm: z3 = z3[3,3,3,3] | ||
834 | # asm 1: pshufd $0xff,<z3=int6464#1,>z3=int6464#1 | ||
835 | # asm 2: pshufd $0xff,<z3=%xmm0,>z3=%xmm0 | ||
836 | pshufd $0xff,%xmm0,%xmm0 | ||
837 | |||
838 | # qhasm: orig4 = z4 | ||
839 | # asm 1: movdqa <z4=int6464#2,>orig4=stack128#16 | ||
840 | # asm 2: movdqa <z4=%xmm1,>orig4=240(%rsp) | ||
841 | movdqa %xmm1,240(%rsp) | ||
842 | |||
843 | # qhasm: orig14 = z14 | ||
844 | # asm 1: movdqa <z14=int6464#3,>orig14=stack128#17 | ||
845 | # asm 2: movdqa <z14=%xmm2,>orig14=256(%rsp) | ||
846 | movdqa %xmm2,256(%rsp) | ||
847 | |||
848 | # qhasm: orig3 = z3 | ||
849 | # asm 1: movdqa <z3=int6464#1,>orig3=stack128#18 | ||
850 | # asm 2: movdqa <z3=%xmm0,>orig3=272(%rsp) | ||
851 | movdqa %xmm0,272(%rsp) | ||
852 | |||
853 | # qhasm: bytesatleast256: | ||
854 | ._bytesatleast256: | ||
855 | |||
856 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
857 | # asm 1: movl <x2=stack128#2,>in8=int64#3d | ||
858 | # asm 2: movl <x2=16(%rsp),>in8=%edx | ||
859 | movl 16(%rsp),%edx | ||
860 | |||
861 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
862 | # asm 1: movl 4+<x3=stack128#3,>in9=int64#4d | ||
863 | # asm 2: movl 4+<x3=32(%rsp),>in9=%ecx | ||
864 | movl 4+32(%rsp),%ecx | ||
865 | |||
866 | # qhasm: ((uint32 *) &orig8)[0] = in8 | ||
867 | # asm 1: movl <in8=int64#3d,>orig8=stack128#19 | ||
868 | # asm 2: movl <in8=%edx,>orig8=288(%rsp) | ||
869 | movl %edx,288(%rsp) | ||
870 | |||
871 | # qhasm: ((uint32 *) &orig9)[0] = in9 | ||
872 | # asm 1: movl <in9=int64#4d,>orig9=stack128#20 | ||
873 | # asm 2: movl <in9=%ecx,>orig9=304(%rsp) | ||
874 | movl %ecx,304(%rsp) | ||
875 | |||
876 | # qhasm: in8 += 1 | ||
877 | # asm 1: add $1,<in8=int64#3 | ||
878 | # asm 2: add $1,<in8=%rdx | ||
879 | add $1,%rdx | ||
880 | |||
881 | # qhasm: in9 <<= 32 | ||
882 | # asm 1: shl $32,<in9=int64#4 | ||
883 | # asm 2: shl $32,<in9=%rcx | ||
884 | shl $32,%rcx | ||
885 | |||
886 | # qhasm: in8 += in9 | ||
887 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
888 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
889 | add %rcx,%rdx | ||
890 | |||
891 | # qhasm: in9 = in8 | ||
892 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
893 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
894 | mov %rdx,%rcx | ||
895 | |||
896 | # qhasm: (uint64) in9 >>= 32 | ||
897 | # asm 1: shr $32,<in9=int64#4 | ||
898 | # asm 2: shr $32,<in9=%rcx | ||
899 | shr $32,%rcx | ||
900 | |||
901 | # qhasm: ((uint32 *) &orig8)[1] = in8 | ||
902 | # asm 1: movl <in8=int64#3d,4+<orig8=stack128#19 | ||
903 | # asm 2: movl <in8=%edx,4+<orig8=288(%rsp) | ||
904 | movl %edx,4+288(%rsp) | ||
905 | |||
906 | # qhasm: ((uint32 *) &orig9)[1] = in9 | ||
907 | # asm 1: movl <in9=int64#4d,4+<orig9=stack128#20 | ||
908 | # asm 2: movl <in9=%ecx,4+<orig9=304(%rsp) | ||
909 | movl %ecx,4+304(%rsp) | ||
910 | |||
911 | # qhasm: in8 += 1 | ||
912 | # asm 1: add $1,<in8=int64#3 | ||
913 | # asm 2: add $1,<in8=%rdx | ||
914 | add $1,%rdx | ||
915 | |||
916 | # qhasm: in9 <<= 32 | ||
917 | # asm 1: shl $32,<in9=int64#4 | ||
918 | # asm 2: shl $32,<in9=%rcx | ||
919 | shl $32,%rcx | ||
920 | |||
921 | # qhasm: in8 += in9 | ||
922 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
923 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
924 | add %rcx,%rdx | ||
925 | |||
926 | # qhasm: in9 = in8 | ||
927 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
928 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
929 | mov %rdx,%rcx | ||
930 | |||
931 | # qhasm: (uint64) in9 >>= 32 | ||
932 | # asm 1: shr $32,<in9=int64#4 | ||
933 | # asm 2: shr $32,<in9=%rcx | ||
934 | shr $32,%rcx | ||
935 | |||
936 | # qhasm: ((uint32 *) &orig8)[2] = in8 | ||
937 | # asm 1: movl <in8=int64#3d,8+<orig8=stack128#19 | ||
938 | # asm 2: movl <in8=%edx,8+<orig8=288(%rsp) | ||
939 | movl %edx,8+288(%rsp) | ||
940 | |||
941 | # qhasm: ((uint32 *) &orig9)[2] = in9 | ||
942 | # asm 1: movl <in9=int64#4d,8+<orig9=stack128#20 | ||
943 | # asm 2: movl <in9=%ecx,8+<orig9=304(%rsp) | ||
944 | movl %ecx,8+304(%rsp) | ||
945 | |||
946 | # qhasm: in8 += 1 | ||
947 | # asm 1: add $1,<in8=int64#3 | ||
948 | # asm 2: add $1,<in8=%rdx | ||
949 | add $1,%rdx | ||
950 | |||
951 | # qhasm: in9 <<= 32 | ||
952 | # asm 1: shl $32,<in9=int64#4 | ||
953 | # asm 2: shl $32,<in9=%rcx | ||
954 | shl $32,%rcx | ||
955 | |||
956 | # qhasm: in8 += in9 | ||
957 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
958 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
959 | add %rcx,%rdx | ||
960 | |||
961 | # qhasm: in9 = in8 | ||
962 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
963 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
964 | mov %rdx,%rcx | ||
965 | |||
966 | # qhasm: (uint64) in9 >>= 32 | ||
967 | # asm 1: shr $32,<in9=int64#4 | ||
968 | # asm 2: shr $32,<in9=%rcx | ||
969 | shr $32,%rcx | ||
970 | |||
971 | # qhasm: ((uint32 *) &orig8)[3] = in8 | ||
972 | # asm 1: movl <in8=int64#3d,12+<orig8=stack128#19 | ||
973 | # asm 2: movl <in8=%edx,12+<orig8=288(%rsp) | ||
974 | movl %edx,12+288(%rsp) | ||
975 | |||
976 | # qhasm: ((uint32 *) &orig9)[3] = in9 | ||
977 | # asm 1: movl <in9=int64#4d,12+<orig9=stack128#20 | ||
978 | # asm 2: movl <in9=%ecx,12+<orig9=304(%rsp) | ||
979 | movl %ecx,12+304(%rsp) | ||
980 | |||
981 | # qhasm: in8 += 1 | ||
982 | # asm 1: add $1,<in8=int64#3 | ||
983 | # asm 2: add $1,<in8=%rdx | ||
984 | add $1,%rdx | ||
985 | |||
986 | # qhasm: in9 <<= 32 | ||
987 | # asm 1: shl $32,<in9=int64#4 | ||
988 | # asm 2: shl $32,<in9=%rcx | ||
989 | shl $32,%rcx | ||
990 | |||
991 | # qhasm: in8 += in9 | ||
992 | # asm 1: add <in9=int64#4,<in8=int64#3 | ||
993 | # asm 2: add <in9=%rcx,<in8=%rdx | ||
994 | add %rcx,%rdx | ||
995 | |||
996 | # qhasm: in9 = in8 | ||
997 | # asm 1: mov <in8=int64#3,>in9=int64#4 | ||
998 | # asm 2: mov <in8=%rdx,>in9=%rcx | ||
999 | mov %rdx,%rcx | ||
1000 | |||
1001 | # qhasm: (uint64) in9 >>= 32 | ||
1002 | # asm 1: shr $32,<in9=int64#4 | ||
1003 | # asm 2: shr $32,<in9=%rcx | ||
1004 | shr $32,%rcx | ||
1005 | |||
1006 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
1007 | # asm 1: movl <in8=int64#3d,>x2=stack128#2 | ||
1008 | # asm 2: movl <in8=%edx,>x2=16(%rsp) | ||
1009 | movl %edx,16(%rsp) | ||
1010 | |||
1011 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
1012 | # asm 1: movl <in9=int64#4d,4+<x3=stack128#3 | ||
1013 | # asm 2: movl <in9=%ecx,4+<x3=32(%rsp) | ||
1014 | movl %ecx,4+32(%rsp) | ||
1015 | |||
1016 | # qhasm: bytes_backup = bytes | ||
1017 | # asm 1: movq <bytes=int64#6,>bytes_backup=stack64#8 | ||
1018 | # asm 2: movq <bytes=%r9,>bytes_backup=408(%rsp) | ||
1019 | movq %r9,408(%rsp) | ||
1020 | |||
1021 | # qhasm: i = 8 | ||
1022 | # asm 1: mov $8,>i=int64#3 | ||
1023 | # asm 2: mov $8,>i=%rdx | ||
1024 | mov $8,%rdx | ||
1025 | |||
1026 | # qhasm: z5 = orig5 | ||
1027 | # asm 1: movdqa <orig5=stack128#5,>z5=int6464#1 | ||
1028 | # asm 2: movdqa <orig5=64(%rsp),>z5=%xmm0 | ||
1029 | movdqa 64(%rsp),%xmm0 | ||
1030 | |||
1031 | # qhasm: z10 = orig10 | ||
1032 | # asm 1: movdqa <orig10=stack128#6,>z10=int6464#2 | ||
1033 | # asm 2: movdqa <orig10=80(%rsp),>z10=%xmm1 | ||
1034 | movdqa 80(%rsp),%xmm1 | ||
1035 | |||
1036 | # qhasm: z15 = orig15 | ||
1037 | # asm 1: movdqa <orig15=stack128#7,>z15=int6464#3 | ||
1038 | # asm 2: movdqa <orig15=96(%rsp),>z15=%xmm2 | ||
1039 | movdqa 96(%rsp),%xmm2 | ||
1040 | |||
1041 | # qhasm: z14 = orig14 | ||
1042 | # asm 1: movdqa <orig14=stack128#17,>z14=int6464#4 | ||
1043 | # asm 2: movdqa <orig14=256(%rsp),>z14=%xmm3 | ||
1044 | movdqa 256(%rsp),%xmm3 | ||
1045 | |||
1046 | # qhasm: z3 = orig3 | ||
1047 | # asm 1: movdqa <orig3=stack128#18,>z3=int6464#5 | ||
1048 | # asm 2: movdqa <orig3=272(%rsp),>z3=%xmm4 | ||
1049 | movdqa 272(%rsp),%xmm4 | ||
1050 | |||
1051 | # qhasm: z6 = orig6 | ||
1052 | # asm 1: movdqa <orig6=stack128#9,>z6=int6464#6 | ||
1053 | # asm 2: movdqa <orig6=128(%rsp),>z6=%xmm5 | ||
1054 | movdqa 128(%rsp),%xmm5 | ||
1055 | |||
1056 | # qhasm: z11 = orig11 | ||
1057 | # asm 1: movdqa <orig11=stack128#10,>z11=int6464#7 | ||
1058 | # asm 2: movdqa <orig11=144(%rsp),>z11=%xmm6 | ||
1059 | movdqa 144(%rsp),%xmm6 | ||
1060 | |||
1061 | # qhasm: z1 = orig1 | ||
1062 | # asm 1: movdqa <orig1=stack128#12,>z1=int6464#8 | ||
1063 | # asm 2: movdqa <orig1=176(%rsp),>z1=%xmm7 | ||
1064 | movdqa 176(%rsp),%xmm7 | ||
1065 | |||
1066 | # qhasm: z7 = orig7 | ||
1067 | # asm 1: movdqa <orig7=stack128#13,>z7=int6464#9 | ||
1068 | # asm 2: movdqa <orig7=192(%rsp),>z7=%xmm8 | ||
1069 | movdqa 192(%rsp),%xmm8 | ||
1070 | |||
1071 | # qhasm: z13 = orig13 | ||
1072 | # asm 1: movdqa <orig13=stack128#14,>z13=int6464#10 | ||
1073 | # asm 2: movdqa <orig13=208(%rsp),>z13=%xmm9 | ||
1074 | movdqa 208(%rsp),%xmm9 | ||
1075 | |||
1076 | # qhasm: z2 = orig2 | ||
1077 | # asm 1: movdqa <orig2=stack128#15,>z2=int6464#11 | ||
1078 | # asm 2: movdqa <orig2=224(%rsp),>z2=%xmm10 | ||
1079 | movdqa 224(%rsp),%xmm10 | ||
1080 | |||
1081 | # qhasm: z9 = orig9 | ||
1082 | # asm 1: movdqa <orig9=stack128#20,>z9=int6464#12 | ||
1083 | # asm 2: movdqa <orig9=304(%rsp),>z9=%xmm11 | ||
1084 | movdqa 304(%rsp),%xmm11 | ||
1085 | |||
1086 | # qhasm: z0 = orig0 | ||
1087 | # asm 1: movdqa <orig0=stack128#8,>z0=int6464#13 | ||
1088 | # asm 2: movdqa <orig0=112(%rsp),>z0=%xmm12 | ||
1089 | movdqa 112(%rsp),%xmm12 | ||
1090 | |||
1091 | # qhasm: z12 = orig12 | ||
1092 | # asm 1: movdqa <orig12=stack128#11,>z12=int6464#14 | ||
1093 | # asm 2: movdqa <orig12=160(%rsp),>z12=%xmm13 | ||
1094 | movdqa 160(%rsp),%xmm13 | ||
1095 | |||
1096 | # qhasm: z4 = orig4 | ||
1097 | # asm 1: movdqa <orig4=stack128#16,>z4=int6464#15 | ||
1098 | # asm 2: movdqa <orig4=240(%rsp),>z4=%xmm14 | ||
1099 | movdqa 240(%rsp),%xmm14 | ||
1100 | |||
1101 | # qhasm: z8 = orig8 | ||
1102 | # asm 1: movdqa <orig8=stack128#19,>z8=int6464#16 | ||
1103 | # asm 2: movdqa <orig8=288(%rsp),>z8=%xmm15 | ||
1104 | movdqa 288(%rsp),%xmm15 | ||
1105 | |||
1106 | # qhasm: mainloop1: | ||
1107 | ._mainloop1: | ||
1108 | |||
1109 | # qhasm: z10_stack = z10 | ||
1110 | # asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#21 | ||
1111 | # asm 2: movdqa <z10=%xmm1,>z10_stack=320(%rsp) | ||
1112 | movdqa %xmm1,320(%rsp) | ||
1113 | |||
1114 | # qhasm: z15_stack = z15 | ||
1115 | # asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#22 | ||
1116 | # asm 2: movdqa <z15=%xmm2,>z15_stack=336(%rsp) | ||
1117 | movdqa %xmm2,336(%rsp) | ||
1118 | |||
1119 | # qhasm: y4 = z12 | ||
1120 | # asm 1: movdqa <z12=int6464#14,>y4=int6464#2 | ||
1121 | # asm 2: movdqa <z12=%xmm13,>y4=%xmm1 | ||
1122 | movdqa %xmm13,%xmm1 | ||
1123 | |||
1124 | # qhasm: uint32323232 y4 += z0 | ||
1125 | # asm 1: paddd <z0=int6464#13,<y4=int6464#2 | ||
1126 | # asm 2: paddd <z0=%xmm12,<y4=%xmm1 | ||
1127 | paddd %xmm12,%xmm1 | ||
1128 | |||
1129 | # qhasm: r4 = y4 | ||
1130 | # asm 1: movdqa <y4=int6464#2,>r4=int6464#3 | ||
1131 | # asm 2: movdqa <y4=%xmm1,>r4=%xmm2 | ||
1132 | movdqa %xmm1,%xmm2 | ||
1133 | |||
1134 | # qhasm: uint32323232 y4 <<= 7 | ||
1135 | # asm 1: pslld $7,<y4=int6464#2 | ||
1136 | # asm 2: pslld $7,<y4=%xmm1 | ||
1137 | pslld $7,%xmm1 | ||
1138 | |||
1139 | # qhasm: z4 ^= y4 | ||
1140 | # asm 1: pxor <y4=int6464#2,<z4=int6464#15 | ||
1141 | # asm 2: pxor <y4=%xmm1,<z4=%xmm14 | ||
1142 | pxor %xmm1,%xmm14 | ||
1143 | |||
1144 | # qhasm: uint32323232 r4 >>= 25 | ||
1145 | # asm 1: psrld $25,<r4=int6464#3 | ||
1146 | # asm 2: psrld $25,<r4=%xmm2 | ||
1147 | psrld $25,%xmm2 | ||
1148 | |||
1149 | # qhasm: z4 ^= r4 | ||
1150 | # asm 1: pxor <r4=int6464#3,<z4=int6464#15 | ||
1151 | # asm 2: pxor <r4=%xmm2,<z4=%xmm14 | ||
1152 | pxor %xmm2,%xmm14 | ||
1153 | |||
1154 | # qhasm: y9 = z1 | ||
1155 | # asm 1: movdqa <z1=int6464#8,>y9=int6464#2 | ||
1156 | # asm 2: movdqa <z1=%xmm7,>y9=%xmm1 | ||
1157 | movdqa %xmm7,%xmm1 | ||
1158 | |||
1159 | # qhasm: uint32323232 y9 += z5 | ||
1160 | # asm 1: paddd <z5=int6464#1,<y9=int6464#2 | ||
1161 | # asm 2: paddd <z5=%xmm0,<y9=%xmm1 | ||
1162 | paddd %xmm0,%xmm1 | ||
1163 | |||
1164 | # qhasm: r9 = y9 | ||
1165 | # asm 1: movdqa <y9=int6464#2,>r9=int6464#3 | ||
1166 | # asm 2: movdqa <y9=%xmm1,>r9=%xmm2 | ||
1167 | movdqa %xmm1,%xmm2 | ||
1168 | |||
1169 | # qhasm: uint32323232 y9 <<= 7 | ||
1170 | # asm 1: pslld $7,<y9=int6464#2 | ||
1171 | # asm 2: pslld $7,<y9=%xmm1 | ||
1172 | pslld $7,%xmm1 | ||
1173 | |||
1174 | # qhasm: z9 ^= y9 | ||
1175 | # asm 1: pxor <y9=int6464#2,<z9=int6464#12 | ||
1176 | # asm 2: pxor <y9=%xmm1,<z9=%xmm11 | ||
1177 | pxor %xmm1,%xmm11 | ||
1178 | |||
1179 | # qhasm: uint32323232 r9 >>= 25 | ||
1180 | # asm 1: psrld $25,<r9=int6464#3 | ||
1181 | # asm 2: psrld $25,<r9=%xmm2 | ||
1182 | psrld $25,%xmm2 | ||
1183 | |||
1184 | # qhasm: z9 ^= r9 | ||
1185 | # asm 1: pxor <r9=int6464#3,<z9=int6464#12 | ||
1186 | # asm 2: pxor <r9=%xmm2,<z9=%xmm11 | ||
1187 | pxor %xmm2,%xmm11 | ||
1188 | |||
1189 | # qhasm: y8 = z0 | ||
1190 | # asm 1: movdqa <z0=int6464#13,>y8=int6464#2 | ||
1191 | # asm 2: movdqa <z0=%xmm12,>y8=%xmm1 | ||
1192 | movdqa %xmm12,%xmm1 | ||
1193 | |||
1194 | # qhasm: uint32323232 y8 += z4 | ||
1195 | # asm 1: paddd <z4=int6464#15,<y8=int6464#2 | ||
1196 | # asm 2: paddd <z4=%xmm14,<y8=%xmm1 | ||
1197 | paddd %xmm14,%xmm1 | ||
1198 | |||
1199 | # qhasm: r8 = y8 | ||
1200 | # asm 1: movdqa <y8=int6464#2,>r8=int6464#3 | ||
1201 | # asm 2: movdqa <y8=%xmm1,>r8=%xmm2 | ||
1202 | movdqa %xmm1,%xmm2 | ||
1203 | |||
1204 | # qhasm: uint32323232 y8 <<= 9 | ||
1205 | # asm 1: pslld $9,<y8=int6464#2 | ||
1206 | # asm 2: pslld $9,<y8=%xmm1 | ||
1207 | pslld $9,%xmm1 | ||
1208 | |||
1209 | # qhasm: z8 ^= y8 | ||
1210 | # asm 1: pxor <y8=int6464#2,<z8=int6464#16 | ||
1211 | # asm 2: pxor <y8=%xmm1,<z8=%xmm15 | ||
1212 | pxor %xmm1,%xmm15 | ||
1213 | |||
1214 | # qhasm: uint32323232 r8 >>= 23 | ||
1215 | # asm 1: psrld $23,<r8=int6464#3 | ||
1216 | # asm 2: psrld $23,<r8=%xmm2 | ||
1217 | psrld $23,%xmm2 | ||
1218 | |||
1219 | # qhasm: z8 ^= r8 | ||
1220 | # asm 1: pxor <r8=int6464#3,<z8=int6464#16 | ||
1221 | # asm 2: pxor <r8=%xmm2,<z8=%xmm15 | ||
1222 | pxor %xmm2,%xmm15 | ||
1223 | |||
1224 | # qhasm: y13 = z5 | ||
1225 | # asm 1: movdqa <z5=int6464#1,>y13=int6464#2 | ||
1226 | # asm 2: movdqa <z5=%xmm0,>y13=%xmm1 | ||
1227 | movdqa %xmm0,%xmm1 | ||
1228 | |||
1229 | # qhasm: uint32323232 y13 += z9 | ||
1230 | # asm 1: paddd <z9=int6464#12,<y13=int6464#2 | ||
1231 | # asm 2: paddd <z9=%xmm11,<y13=%xmm1 | ||
1232 | paddd %xmm11,%xmm1 | ||
1233 | |||
1234 | # qhasm: r13 = y13 | ||
1235 | # asm 1: movdqa <y13=int6464#2,>r13=int6464#3 | ||
1236 | # asm 2: movdqa <y13=%xmm1,>r13=%xmm2 | ||
1237 | movdqa %xmm1,%xmm2 | ||
1238 | |||
1239 | # qhasm: uint32323232 y13 <<= 9 | ||
1240 | # asm 1: pslld $9,<y13=int6464#2 | ||
1241 | # asm 2: pslld $9,<y13=%xmm1 | ||
1242 | pslld $9,%xmm1 | ||
1243 | |||
1244 | # qhasm: z13 ^= y13 | ||
1245 | # asm 1: pxor <y13=int6464#2,<z13=int6464#10 | ||
1246 | # asm 2: pxor <y13=%xmm1,<z13=%xmm9 | ||
1247 | pxor %xmm1,%xmm9 | ||
1248 | |||
1249 | # qhasm: uint32323232 r13 >>= 23 | ||
1250 | # asm 1: psrld $23,<r13=int6464#3 | ||
1251 | # asm 2: psrld $23,<r13=%xmm2 | ||
1252 | psrld $23,%xmm2 | ||
1253 | |||
1254 | # qhasm: z13 ^= r13 | ||
1255 | # asm 1: pxor <r13=int6464#3,<z13=int6464#10 | ||
1256 | # asm 2: pxor <r13=%xmm2,<z13=%xmm9 | ||
1257 | pxor %xmm2,%xmm9 | ||
1258 | |||
1259 | # qhasm: y12 = z4 | ||
1260 | # asm 1: movdqa <z4=int6464#15,>y12=int6464#2 | ||
1261 | # asm 2: movdqa <z4=%xmm14,>y12=%xmm1 | ||
1262 | movdqa %xmm14,%xmm1 | ||
1263 | |||
1264 | # qhasm: uint32323232 y12 += z8 | ||
1265 | # asm 1: paddd <z8=int6464#16,<y12=int6464#2 | ||
1266 | # asm 2: paddd <z8=%xmm15,<y12=%xmm1 | ||
1267 | paddd %xmm15,%xmm1 | ||
1268 | |||
1269 | # qhasm: r12 = y12 | ||
1270 | # asm 1: movdqa <y12=int6464#2,>r12=int6464#3 | ||
1271 | # asm 2: movdqa <y12=%xmm1,>r12=%xmm2 | ||
1272 | movdqa %xmm1,%xmm2 | ||
1273 | |||
1274 | # qhasm: uint32323232 y12 <<= 13 | ||
1275 | # asm 1: pslld $13,<y12=int6464#2 | ||
1276 | # asm 2: pslld $13,<y12=%xmm1 | ||
1277 | pslld $13,%xmm1 | ||
1278 | |||
1279 | # qhasm: z12 ^= y12 | ||
1280 | # asm 1: pxor <y12=int6464#2,<z12=int6464#14 | ||
1281 | # asm 2: pxor <y12=%xmm1,<z12=%xmm13 | ||
1282 | pxor %xmm1,%xmm13 | ||
1283 | |||
1284 | # qhasm: uint32323232 r12 >>= 19 | ||
1285 | # asm 1: psrld $19,<r12=int6464#3 | ||
1286 | # asm 2: psrld $19,<r12=%xmm2 | ||
1287 | psrld $19,%xmm2 | ||
1288 | |||
1289 | # qhasm: z12 ^= r12 | ||
1290 | # asm 1: pxor <r12=int6464#3,<z12=int6464#14 | ||
1291 | # asm 2: pxor <r12=%xmm2,<z12=%xmm13 | ||
1292 | pxor %xmm2,%xmm13 | ||
1293 | |||
1294 | # qhasm: y1 = z9 | ||
1295 | # asm 1: movdqa <z9=int6464#12,>y1=int6464#2 | ||
1296 | # asm 2: movdqa <z9=%xmm11,>y1=%xmm1 | ||
1297 | movdqa %xmm11,%xmm1 | ||
1298 | |||
1299 | # qhasm: uint32323232 y1 += z13 | ||
1300 | # asm 1: paddd <z13=int6464#10,<y1=int6464#2 | ||
1301 | # asm 2: paddd <z13=%xmm9,<y1=%xmm1 | ||
1302 | paddd %xmm9,%xmm1 | ||
1303 | |||
1304 | # qhasm: r1 = y1 | ||
1305 | # asm 1: movdqa <y1=int6464#2,>r1=int6464#3 | ||
1306 | # asm 2: movdqa <y1=%xmm1,>r1=%xmm2 | ||
1307 | movdqa %xmm1,%xmm2 | ||
1308 | |||
1309 | # qhasm: uint32323232 y1 <<= 13 | ||
1310 | # asm 1: pslld $13,<y1=int6464#2 | ||
1311 | # asm 2: pslld $13,<y1=%xmm1 | ||
1312 | pslld $13,%xmm1 | ||
1313 | |||
1314 | # qhasm: z1 ^= y1 | ||
1315 | # asm 1: pxor <y1=int6464#2,<z1=int6464#8 | ||
1316 | # asm 2: pxor <y1=%xmm1,<z1=%xmm7 | ||
1317 | pxor %xmm1,%xmm7 | ||
1318 | |||
1319 | # qhasm: uint32323232 r1 >>= 19 | ||
1320 | # asm 1: psrld $19,<r1=int6464#3 | ||
1321 | # asm 2: psrld $19,<r1=%xmm2 | ||
1322 | psrld $19,%xmm2 | ||
1323 | |||
1324 | # qhasm: z1 ^= r1 | ||
1325 | # asm 1: pxor <r1=int6464#3,<z1=int6464#8 | ||
1326 | # asm 2: pxor <r1=%xmm2,<z1=%xmm7 | ||
1327 | pxor %xmm2,%xmm7 | ||
1328 | |||
1329 | # qhasm: y0 = z8 | ||
1330 | # asm 1: movdqa <z8=int6464#16,>y0=int6464#2 | ||
1331 | # asm 2: movdqa <z8=%xmm15,>y0=%xmm1 | ||
1332 | movdqa %xmm15,%xmm1 | ||
1333 | |||
1334 | # qhasm: uint32323232 y0 += z12 | ||
1335 | # asm 1: paddd <z12=int6464#14,<y0=int6464#2 | ||
1336 | # asm 2: paddd <z12=%xmm13,<y0=%xmm1 | ||
1337 | paddd %xmm13,%xmm1 | ||
1338 | |||
1339 | # qhasm: r0 = y0 | ||
1340 | # asm 1: movdqa <y0=int6464#2,>r0=int6464#3 | ||
1341 | # asm 2: movdqa <y0=%xmm1,>r0=%xmm2 | ||
1342 | movdqa %xmm1,%xmm2 | ||
1343 | |||
1344 | # qhasm: uint32323232 y0 <<= 18 | ||
1345 | # asm 1: pslld $18,<y0=int6464#2 | ||
1346 | # asm 2: pslld $18,<y0=%xmm1 | ||
1347 | pslld $18,%xmm1 | ||
1348 | |||
1349 | # qhasm: z0 ^= y0 | ||
1350 | # asm 1: pxor <y0=int6464#2,<z0=int6464#13 | ||
1351 | # asm 2: pxor <y0=%xmm1,<z0=%xmm12 | ||
1352 | pxor %xmm1,%xmm12 | ||
1353 | |||
1354 | # qhasm: uint32323232 r0 >>= 14 | ||
1355 | # asm 1: psrld $14,<r0=int6464#3 | ||
1356 | # asm 2: psrld $14,<r0=%xmm2 | ||
1357 | psrld $14,%xmm2 | ||
1358 | |||
1359 | # qhasm: z0 ^= r0 | ||
1360 | # asm 1: pxor <r0=int6464#3,<z0=int6464#13 | ||
1361 | # asm 2: pxor <r0=%xmm2,<z0=%xmm12 | ||
1362 | pxor %xmm2,%xmm12 | ||
1363 | |||
1364 | # qhasm: z10 = z10_stack | ||
1365 | # asm 1: movdqa <z10_stack=stack128#21,>z10=int6464#2 | ||
1366 | # asm 2: movdqa <z10_stack=320(%rsp),>z10=%xmm1 | ||
1367 | movdqa 320(%rsp),%xmm1 | ||
1368 | |||
1369 | # qhasm: z0_stack = z0 | ||
1370 | # asm 1: movdqa <z0=int6464#13,>z0_stack=stack128#21 | ||
1371 | # asm 2: movdqa <z0=%xmm12,>z0_stack=320(%rsp) | ||
1372 | movdqa %xmm12,320(%rsp) | ||
1373 | |||
1374 | # qhasm: y5 = z13 | ||
1375 | # asm 1: movdqa <z13=int6464#10,>y5=int6464#3 | ||
1376 | # asm 2: movdqa <z13=%xmm9,>y5=%xmm2 | ||
1377 | movdqa %xmm9,%xmm2 | ||
1378 | |||
1379 | # qhasm: uint32323232 y5 += z1 | ||
1380 | # asm 1: paddd <z1=int6464#8,<y5=int6464#3 | ||
1381 | # asm 2: paddd <z1=%xmm7,<y5=%xmm2 | ||
1382 | paddd %xmm7,%xmm2 | ||
1383 | |||
1384 | # qhasm: r5 = y5 | ||
1385 | # asm 1: movdqa <y5=int6464#3,>r5=int6464#13 | ||
1386 | # asm 2: movdqa <y5=%xmm2,>r5=%xmm12 | ||
1387 | movdqa %xmm2,%xmm12 | ||
1388 | |||
1389 | # qhasm: uint32323232 y5 <<= 18 | ||
1390 | # asm 1: pslld $18,<y5=int6464#3 | ||
1391 | # asm 2: pslld $18,<y5=%xmm2 | ||
1392 | pslld $18,%xmm2 | ||
1393 | |||
1394 | # qhasm: z5 ^= y5 | ||
1395 | # asm 1: pxor <y5=int6464#3,<z5=int6464#1 | ||
1396 | # asm 2: pxor <y5=%xmm2,<z5=%xmm0 | ||
1397 | pxor %xmm2,%xmm0 | ||
1398 | |||
1399 | # qhasm: uint32323232 r5 >>= 14 | ||
1400 | # asm 1: psrld $14,<r5=int6464#13 | ||
1401 | # asm 2: psrld $14,<r5=%xmm12 | ||
1402 | psrld $14,%xmm12 | ||
1403 | |||
1404 | # qhasm: z5 ^= r5 | ||
1405 | # asm 1: pxor <r5=int6464#13,<z5=int6464#1 | ||
1406 | # asm 2: pxor <r5=%xmm12,<z5=%xmm0 | ||
1407 | pxor %xmm12,%xmm0 | ||
1408 | |||
1409 | # qhasm: y14 = z6 | ||
1410 | # asm 1: movdqa <z6=int6464#6,>y14=int6464#3 | ||
1411 | # asm 2: movdqa <z6=%xmm5,>y14=%xmm2 | ||
1412 | movdqa %xmm5,%xmm2 | ||
1413 | |||
1414 | # qhasm: uint32323232 y14 += z10 | ||
1415 | # asm 1: paddd <z10=int6464#2,<y14=int6464#3 | ||
1416 | # asm 2: paddd <z10=%xmm1,<y14=%xmm2 | ||
1417 | paddd %xmm1,%xmm2 | ||
1418 | |||
1419 | # qhasm: r14 = y14 | ||
1420 | # asm 1: movdqa <y14=int6464#3,>r14=int6464#13 | ||
1421 | # asm 2: movdqa <y14=%xmm2,>r14=%xmm12 | ||
1422 | movdqa %xmm2,%xmm12 | ||
1423 | |||
1424 | # qhasm: uint32323232 y14 <<= 7 | ||
1425 | # asm 1: pslld $7,<y14=int6464#3 | ||
1426 | # asm 2: pslld $7,<y14=%xmm2 | ||
1427 | pslld $7,%xmm2 | ||
1428 | |||
1429 | # qhasm: z14 ^= y14 | ||
1430 | # asm 1: pxor <y14=int6464#3,<z14=int6464#4 | ||
1431 | # asm 2: pxor <y14=%xmm2,<z14=%xmm3 | ||
1432 | pxor %xmm2,%xmm3 | ||
1433 | |||
1434 | # qhasm: uint32323232 r14 >>= 25 | ||
1435 | # asm 1: psrld $25,<r14=int6464#13 | ||
1436 | # asm 2: psrld $25,<r14=%xmm12 | ||
1437 | psrld $25,%xmm12 | ||
1438 | |||
1439 | # qhasm: z14 ^= r14 | ||
1440 | # asm 1: pxor <r14=int6464#13,<z14=int6464#4 | ||
1441 | # asm 2: pxor <r14=%xmm12,<z14=%xmm3 | ||
1442 | pxor %xmm12,%xmm3 | ||
1443 | |||
1444 | # qhasm: z15 = z15_stack | ||
1445 | # asm 1: movdqa <z15_stack=stack128#22,>z15=int6464#3 | ||
1446 | # asm 2: movdqa <z15_stack=336(%rsp),>z15=%xmm2 | ||
1447 | movdqa 336(%rsp),%xmm2 | ||
1448 | |||
1449 | # qhasm: z5_stack = z5 | ||
1450 | # asm 1: movdqa <z5=int6464#1,>z5_stack=stack128#22 | ||
1451 | # asm 2: movdqa <z5=%xmm0,>z5_stack=336(%rsp) | ||
1452 | movdqa %xmm0,336(%rsp) | ||
1453 | |||
1454 | # qhasm: y3 = z11 | ||
1455 | # asm 1: movdqa <z11=int6464#7,>y3=int6464#1 | ||
1456 | # asm 2: movdqa <z11=%xmm6,>y3=%xmm0 | ||
1457 | movdqa %xmm6,%xmm0 | ||
1458 | |||
1459 | # qhasm: uint32323232 y3 += z15 | ||
1460 | # asm 1: paddd <z15=int6464#3,<y3=int6464#1 | ||
1461 | # asm 2: paddd <z15=%xmm2,<y3=%xmm0 | ||
1462 | paddd %xmm2,%xmm0 | ||
1463 | |||
1464 | # qhasm: r3 = y3 | ||
1465 | # asm 1: movdqa <y3=int6464#1,>r3=int6464#13 | ||
1466 | # asm 2: movdqa <y3=%xmm0,>r3=%xmm12 | ||
1467 | movdqa %xmm0,%xmm12 | ||
1468 | |||
1469 | # qhasm: uint32323232 y3 <<= 7 | ||
1470 | # asm 1: pslld $7,<y3=int6464#1 | ||
1471 | # asm 2: pslld $7,<y3=%xmm0 | ||
1472 | pslld $7,%xmm0 | ||
1473 | |||
1474 | # qhasm: z3 ^= y3 | ||
1475 | # asm 1: pxor <y3=int6464#1,<z3=int6464#5 | ||
1476 | # asm 2: pxor <y3=%xmm0,<z3=%xmm4 | ||
1477 | pxor %xmm0,%xmm4 | ||
1478 | |||
1479 | # qhasm: uint32323232 r3 >>= 25 | ||
1480 | # asm 1: psrld $25,<r3=int6464#13 | ||
1481 | # asm 2: psrld $25,<r3=%xmm12 | ||
1482 | psrld $25,%xmm12 | ||
1483 | |||
1484 | # qhasm: z3 ^= r3 | ||
1485 | # asm 1: pxor <r3=int6464#13,<z3=int6464#5 | ||
1486 | # asm 2: pxor <r3=%xmm12,<z3=%xmm4 | ||
1487 | pxor %xmm12,%xmm4 | ||
1488 | |||
1489 | # qhasm: y2 = z10 | ||
1490 | # asm 1: movdqa <z10=int6464#2,>y2=int6464#1 | ||
1491 | # asm 2: movdqa <z10=%xmm1,>y2=%xmm0 | ||
1492 | movdqa %xmm1,%xmm0 | ||
1493 | |||
1494 | # qhasm: uint32323232 y2 += z14 | ||
1495 | # asm 1: paddd <z14=int6464#4,<y2=int6464#1 | ||
1496 | # asm 2: paddd <z14=%xmm3,<y2=%xmm0 | ||
1497 | paddd %xmm3,%xmm0 | ||
1498 | |||
1499 | # qhasm: r2 = y2 | ||
1500 | # asm 1: movdqa <y2=int6464#1,>r2=int6464#13 | ||
1501 | # asm 2: movdqa <y2=%xmm0,>r2=%xmm12 | ||
1502 | movdqa %xmm0,%xmm12 | ||
1503 | |||
1504 | # qhasm: uint32323232 y2 <<= 9 | ||
1505 | # asm 1: pslld $9,<y2=int6464#1 | ||
1506 | # asm 2: pslld $9,<y2=%xmm0 | ||
1507 | pslld $9,%xmm0 | ||
1508 | |||
1509 | # qhasm: z2 ^= y2 | ||
1510 | # asm 1: pxor <y2=int6464#1,<z2=int6464#11 | ||
1511 | # asm 2: pxor <y2=%xmm0,<z2=%xmm10 | ||
1512 | pxor %xmm0,%xmm10 | ||
1513 | |||
1514 | # qhasm: uint32323232 r2 >>= 23 | ||
1515 | # asm 1: psrld $23,<r2=int6464#13 | ||
1516 | # asm 2: psrld $23,<r2=%xmm12 | ||
1517 | psrld $23,%xmm12 | ||
1518 | |||
1519 | # qhasm: z2 ^= r2 | ||
1520 | # asm 1: pxor <r2=int6464#13,<z2=int6464#11 | ||
1521 | # asm 2: pxor <r2=%xmm12,<z2=%xmm10 | ||
1522 | pxor %xmm12,%xmm10 | ||
1523 | |||
1524 | # qhasm: y7 = z15 | ||
1525 | # asm 1: movdqa <z15=int6464#3,>y7=int6464#1 | ||
1526 | # asm 2: movdqa <z15=%xmm2,>y7=%xmm0 | ||
1527 | movdqa %xmm2,%xmm0 | ||
1528 | |||
1529 | # qhasm: uint32323232 y7 += z3 | ||
1530 | # asm 1: paddd <z3=int6464#5,<y7=int6464#1 | ||
1531 | # asm 2: paddd <z3=%xmm4,<y7=%xmm0 | ||
1532 | paddd %xmm4,%xmm0 | ||
1533 | |||
1534 | # qhasm: r7 = y7 | ||
1535 | # asm 1: movdqa <y7=int6464#1,>r7=int6464#13 | ||
1536 | # asm 2: movdqa <y7=%xmm0,>r7=%xmm12 | ||
1537 | movdqa %xmm0,%xmm12 | ||
1538 | |||
1539 | # qhasm: uint32323232 y7 <<= 9 | ||
1540 | # asm 1: pslld $9,<y7=int6464#1 | ||
1541 | # asm 2: pslld $9,<y7=%xmm0 | ||
1542 | pslld $9,%xmm0 | ||
1543 | |||
1544 | # qhasm: z7 ^= y7 | ||
1545 | # asm 1: pxor <y7=int6464#1,<z7=int6464#9 | ||
1546 | # asm 2: pxor <y7=%xmm0,<z7=%xmm8 | ||
1547 | pxor %xmm0,%xmm8 | ||
1548 | |||
1549 | # qhasm: uint32323232 r7 >>= 23 | ||
1550 | # asm 1: psrld $23,<r7=int6464#13 | ||
1551 | # asm 2: psrld $23,<r7=%xmm12 | ||
1552 | psrld $23,%xmm12 | ||
1553 | |||
1554 | # qhasm: z7 ^= r7 | ||
1555 | # asm 1: pxor <r7=int6464#13,<z7=int6464#9 | ||
1556 | # asm 2: pxor <r7=%xmm12,<z7=%xmm8 | ||
1557 | pxor %xmm12,%xmm8 | ||
1558 | |||
1559 | # qhasm: y6 = z14 | ||
1560 | # asm 1: movdqa <z14=int6464#4,>y6=int6464#1 | ||
1561 | # asm 2: movdqa <z14=%xmm3,>y6=%xmm0 | ||
1562 | movdqa %xmm3,%xmm0 | ||
1563 | |||
1564 | # qhasm: uint32323232 y6 += z2 | ||
1565 | # asm 1: paddd <z2=int6464#11,<y6=int6464#1 | ||
1566 | # asm 2: paddd <z2=%xmm10,<y6=%xmm0 | ||
1567 | paddd %xmm10,%xmm0 | ||
1568 | |||
1569 | # qhasm: r6 = y6 | ||
1570 | # asm 1: movdqa <y6=int6464#1,>r6=int6464#13 | ||
1571 | # asm 2: movdqa <y6=%xmm0,>r6=%xmm12 | ||
1572 | movdqa %xmm0,%xmm12 | ||
1573 | |||
1574 | # qhasm: uint32323232 y6 <<= 13 | ||
1575 | # asm 1: pslld $13,<y6=int6464#1 | ||
1576 | # asm 2: pslld $13,<y6=%xmm0 | ||
1577 | pslld $13,%xmm0 | ||
1578 | |||
1579 | # qhasm: z6 ^= y6 | ||
1580 | # asm 1: pxor <y6=int6464#1,<z6=int6464#6 | ||
1581 | # asm 2: pxor <y6=%xmm0,<z6=%xmm5 | ||
1582 | pxor %xmm0,%xmm5 | ||
1583 | |||
1584 | # qhasm: uint32323232 r6 >>= 19 | ||
1585 | # asm 1: psrld $19,<r6=int6464#13 | ||
1586 | # asm 2: psrld $19,<r6=%xmm12 | ||
1587 | psrld $19,%xmm12 | ||
1588 | |||
1589 | # qhasm: z6 ^= r6 | ||
1590 | # asm 1: pxor <r6=int6464#13,<z6=int6464#6 | ||
1591 | # asm 2: pxor <r6=%xmm12,<z6=%xmm5 | ||
1592 | pxor %xmm12,%xmm5 | ||
1593 | |||
1594 | # qhasm: y11 = z3 | ||
1595 | # asm 1: movdqa <z3=int6464#5,>y11=int6464#1 | ||
1596 | # asm 2: movdqa <z3=%xmm4,>y11=%xmm0 | ||
1597 | movdqa %xmm4,%xmm0 | ||
1598 | |||
1599 | # qhasm: uint32323232 y11 += z7 | ||
1600 | # asm 1: paddd <z7=int6464#9,<y11=int6464#1 | ||
1601 | # asm 2: paddd <z7=%xmm8,<y11=%xmm0 | ||
1602 | paddd %xmm8,%xmm0 | ||
1603 | |||
1604 | # qhasm: r11 = y11 | ||
1605 | # asm 1: movdqa <y11=int6464#1,>r11=int6464#13 | ||
1606 | # asm 2: movdqa <y11=%xmm0,>r11=%xmm12 | ||
1607 | movdqa %xmm0,%xmm12 | ||
1608 | |||
1609 | # qhasm: uint32323232 y11 <<= 13 | ||
1610 | # asm 1: pslld $13,<y11=int6464#1 | ||
1611 | # asm 2: pslld $13,<y11=%xmm0 | ||
1612 | pslld $13,%xmm0 | ||
1613 | |||
1614 | # qhasm: z11 ^= y11 | ||
1615 | # asm 1: pxor <y11=int6464#1,<z11=int6464#7 | ||
1616 | # asm 2: pxor <y11=%xmm0,<z11=%xmm6 | ||
1617 | pxor %xmm0,%xmm6 | ||
1618 | |||
1619 | # qhasm: uint32323232 r11 >>= 19 | ||
1620 | # asm 1: psrld $19,<r11=int6464#13 | ||
1621 | # asm 2: psrld $19,<r11=%xmm12 | ||
1622 | psrld $19,%xmm12 | ||
1623 | |||
1624 | # qhasm: z11 ^= r11 | ||
1625 | # asm 1: pxor <r11=int6464#13,<z11=int6464#7 | ||
1626 | # asm 2: pxor <r11=%xmm12,<z11=%xmm6 | ||
1627 | pxor %xmm12,%xmm6 | ||
1628 | |||
1629 | # qhasm: y10 = z2 | ||
1630 | # asm 1: movdqa <z2=int6464#11,>y10=int6464#1 | ||
1631 | # asm 2: movdqa <z2=%xmm10,>y10=%xmm0 | ||
1632 | movdqa %xmm10,%xmm0 | ||
1633 | |||
1634 | # qhasm: uint32323232 y10 += z6 | ||
1635 | # asm 1: paddd <z6=int6464#6,<y10=int6464#1 | ||
1636 | # asm 2: paddd <z6=%xmm5,<y10=%xmm0 | ||
1637 | paddd %xmm5,%xmm0 | ||
1638 | |||
1639 | # qhasm: r10 = y10 | ||
1640 | # asm 1: movdqa <y10=int6464#1,>r10=int6464#13 | ||
1641 | # asm 2: movdqa <y10=%xmm0,>r10=%xmm12 | ||
1642 | movdqa %xmm0,%xmm12 | ||
1643 | |||
1644 | # qhasm: uint32323232 y10 <<= 18 | ||
1645 | # asm 1: pslld $18,<y10=int6464#1 | ||
1646 | # asm 2: pslld $18,<y10=%xmm0 | ||
1647 | pslld $18,%xmm0 | ||
1648 | |||
1649 | # qhasm: z10 ^= y10 | ||
1650 | # asm 1: pxor <y10=int6464#1,<z10=int6464#2 | ||
1651 | # asm 2: pxor <y10=%xmm0,<z10=%xmm1 | ||
1652 | pxor %xmm0,%xmm1 | ||
1653 | |||
1654 | # qhasm: uint32323232 r10 >>= 14 | ||
1655 | # asm 1: psrld $14,<r10=int6464#13 | ||
1656 | # asm 2: psrld $14,<r10=%xmm12 | ||
1657 | psrld $14,%xmm12 | ||
1658 | |||
1659 | # qhasm: z10 ^= r10 | ||
1660 | # asm 1: pxor <r10=int6464#13,<z10=int6464#2 | ||
1661 | # asm 2: pxor <r10=%xmm12,<z10=%xmm1 | ||
1662 | pxor %xmm12,%xmm1 | ||
1663 | |||
1664 | # qhasm: z0 = z0_stack | ||
1665 | # asm 1: movdqa <z0_stack=stack128#21,>z0=int6464#1 | ||
1666 | # asm 2: movdqa <z0_stack=320(%rsp),>z0=%xmm0 | ||
1667 | movdqa 320(%rsp),%xmm0 | ||
1668 | |||
1669 | # qhasm: z10_stack = z10 | ||
1670 | # asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#21 | ||
1671 | # asm 2: movdqa <z10=%xmm1,>z10_stack=320(%rsp) | ||
1672 | movdqa %xmm1,320(%rsp) | ||
1673 | |||
1674 | # qhasm: y1 = z3 | ||
1675 | # asm 1: movdqa <z3=int6464#5,>y1=int6464#2 | ||
1676 | # asm 2: movdqa <z3=%xmm4,>y1=%xmm1 | ||
1677 | movdqa %xmm4,%xmm1 | ||
1678 | |||
1679 | # qhasm: uint32323232 y1 += z0 | ||
1680 | # asm 1: paddd <z0=int6464#1,<y1=int6464#2 | ||
1681 | # asm 2: paddd <z0=%xmm0,<y1=%xmm1 | ||
1682 | paddd %xmm0,%xmm1 | ||
1683 | |||
1684 | # qhasm: r1 = y1 | ||
1685 | # asm 1: movdqa <y1=int6464#2,>r1=int6464#13 | ||
1686 | # asm 2: movdqa <y1=%xmm1,>r1=%xmm12 | ||
1687 | movdqa %xmm1,%xmm12 | ||
1688 | |||
1689 | # qhasm: uint32323232 y1 <<= 7 | ||
1690 | # asm 1: pslld $7,<y1=int6464#2 | ||
1691 | # asm 2: pslld $7,<y1=%xmm1 | ||
1692 | pslld $7,%xmm1 | ||
1693 | |||
1694 | # qhasm: z1 ^= y1 | ||
1695 | # asm 1: pxor <y1=int6464#2,<z1=int6464#8 | ||
1696 | # asm 2: pxor <y1=%xmm1,<z1=%xmm7 | ||
1697 | pxor %xmm1,%xmm7 | ||
1698 | |||
1699 | # qhasm: uint32323232 r1 >>= 25 | ||
1700 | # asm 1: psrld $25,<r1=int6464#13 | ||
1701 | # asm 2: psrld $25,<r1=%xmm12 | ||
1702 | psrld $25,%xmm12 | ||
1703 | |||
1704 | # qhasm: z1 ^= r1 | ||
1705 | # asm 1: pxor <r1=int6464#13,<z1=int6464#8 | ||
1706 | # asm 2: pxor <r1=%xmm12,<z1=%xmm7 | ||
1707 | pxor %xmm12,%xmm7 | ||
1708 | |||
1709 | # qhasm: y15 = z7 | ||
1710 | # asm 1: movdqa <z7=int6464#9,>y15=int6464#2 | ||
1711 | # asm 2: movdqa <z7=%xmm8,>y15=%xmm1 | ||
1712 | movdqa %xmm8,%xmm1 | ||
1713 | |||
1714 | # qhasm: uint32323232 y15 += z11 | ||
1715 | # asm 1: paddd <z11=int6464#7,<y15=int6464#2 | ||
1716 | # asm 2: paddd <z11=%xmm6,<y15=%xmm1 | ||
1717 | paddd %xmm6,%xmm1 | ||
1718 | |||
1719 | # qhasm: r15 = y15 | ||
1720 | # asm 1: movdqa <y15=int6464#2,>r15=int6464#13 | ||
1721 | # asm 2: movdqa <y15=%xmm1,>r15=%xmm12 | ||
1722 | movdqa %xmm1,%xmm12 | ||
1723 | |||
1724 | # qhasm: uint32323232 y15 <<= 18 | ||
1725 | # asm 1: pslld $18,<y15=int6464#2 | ||
1726 | # asm 2: pslld $18,<y15=%xmm1 | ||
1727 | pslld $18,%xmm1 | ||
1728 | |||
1729 | # qhasm: z15 ^= y15 | ||
1730 | # asm 1: pxor <y15=int6464#2,<z15=int6464#3 | ||
1731 | # asm 2: pxor <y15=%xmm1,<z15=%xmm2 | ||
1732 | pxor %xmm1,%xmm2 | ||
1733 | |||
1734 | # qhasm: uint32323232 r15 >>= 14 | ||
1735 | # asm 1: psrld $14,<r15=int6464#13 | ||
1736 | # asm 2: psrld $14,<r15=%xmm12 | ||
1737 | psrld $14,%xmm12 | ||
1738 | |||
1739 | # qhasm: z15 ^= r15 | ||
1740 | # asm 1: pxor <r15=int6464#13,<z15=int6464#3 | ||
1741 | # asm 2: pxor <r15=%xmm12,<z15=%xmm2 | ||
1742 | pxor %xmm12,%xmm2 | ||
1743 | |||
1744 | # qhasm: z5 = z5_stack | ||
1745 | # asm 1: movdqa <z5_stack=stack128#22,>z5=int6464#13 | ||
1746 | # asm 2: movdqa <z5_stack=336(%rsp),>z5=%xmm12 | ||
1747 | movdqa 336(%rsp),%xmm12 | ||
1748 | |||
1749 | # qhasm: z15_stack = z15 | ||
1750 | # asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#22 | ||
1751 | # asm 2: movdqa <z15=%xmm2,>z15_stack=336(%rsp) | ||
1752 | movdqa %xmm2,336(%rsp) | ||
1753 | |||
1754 | # qhasm: y6 = z4 | ||
1755 | # asm 1: movdqa <z4=int6464#15,>y6=int6464#2 | ||
1756 | # asm 2: movdqa <z4=%xmm14,>y6=%xmm1 | ||
1757 | movdqa %xmm14,%xmm1 | ||
1758 | |||
1759 | # qhasm: uint32323232 y6 += z5 | ||
1760 | # asm 1: paddd <z5=int6464#13,<y6=int6464#2 | ||
1761 | # asm 2: paddd <z5=%xmm12,<y6=%xmm1 | ||
1762 | paddd %xmm12,%xmm1 | ||
1763 | |||
1764 | # qhasm: r6 = y6 | ||
1765 | # asm 1: movdqa <y6=int6464#2,>r6=int6464#3 | ||
1766 | # asm 2: movdqa <y6=%xmm1,>r6=%xmm2 | ||
1767 | movdqa %xmm1,%xmm2 | ||
1768 | |||
1769 | # qhasm: uint32323232 y6 <<= 7 | ||
1770 | # asm 1: pslld $7,<y6=int6464#2 | ||
1771 | # asm 2: pslld $7,<y6=%xmm1 | ||
1772 | pslld $7,%xmm1 | ||
1773 | |||
1774 | # qhasm: z6 ^= y6 | ||
1775 | # asm 1: pxor <y6=int6464#2,<z6=int6464#6 | ||
1776 | # asm 2: pxor <y6=%xmm1,<z6=%xmm5 | ||
1777 | pxor %xmm1,%xmm5 | ||
1778 | |||
1779 | # qhasm: uint32323232 r6 >>= 25 | ||
1780 | # asm 1: psrld $25,<r6=int6464#3 | ||
1781 | # asm 2: psrld $25,<r6=%xmm2 | ||
1782 | psrld $25,%xmm2 | ||
1783 | |||
1784 | # qhasm: z6 ^= r6 | ||
1785 | # asm 1: pxor <r6=int6464#3,<z6=int6464#6 | ||
1786 | # asm 2: pxor <r6=%xmm2,<z6=%xmm5 | ||
1787 | pxor %xmm2,%xmm5 | ||
1788 | |||
1789 | # qhasm: y2 = z0 | ||
1790 | # asm 1: movdqa <z0=int6464#1,>y2=int6464#2 | ||
1791 | # asm 2: movdqa <z0=%xmm0,>y2=%xmm1 | ||
1792 | movdqa %xmm0,%xmm1 | ||
1793 | |||
1794 | # qhasm: uint32323232 y2 += z1 | ||
1795 | # asm 1: paddd <z1=int6464#8,<y2=int6464#2 | ||
1796 | # asm 2: paddd <z1=%xmm7,<y2=%xmm1 | ||
1797 | paddd %xmm7,%xmm1 | ||
1798 | |||
1799 | # qhasm: r2 = y2 | ||
1800 | # asm 1: movdqa <y2=int6464#2,>r2=int6464#3 | ||
1801 | # asm 2: movdqa <y2=%xmm1,>r2=%xmm2 | ||
1802 | movdqa %xmm1,%xmm2 | ||
1803 | |||
1804 | # qhasm: uint32323232 y2 <<= 9 | ||
1805 | # asm 1: pslld $9,<y2=int6464#2 | ||
1806 | # asm 2: pslld $9,<y2=%xmm1 | ||
1807 | pslld $9,%xmm1 | ||
1808 | |||
1809 | # qhasm: z2 ^= y2 | ||
1810 | # asm 1: pxor <y2=int6464#2,<z2=int6464#11 | ||
1811 | # asm 2: pxor <y2=%xmm1,<z2=%xmm10 | ||
1812 | pxor %xmm1,%xmm10 | ||
1813 | |||
1814 | # qhasm: uint32323232 r2 >>= 23 | ||
1815 | # asm 1: psrld $23,<r2=int6464#3 | ||
1816 | # asm 2: psrld $23,<r2=%xmm2 | ||
1817 | psrld $23,%xmm2 | ||
1818 | |||
1819 | # qhasm: z2 ^= r2 | ||
1820 | # asm 1: pxor <r2=int6464#3,<z2=int6464#11 | ||
1821 | # asm 2: pxor <r2=%xmm2,<z2=%xmm10 | ||
1822 | pxor %xmm2,%xmm10 | ||
1823 | |||
1824 | # qhasm: y7 = z5 | ||
1825 | # asm 1: movdqa <z5=int6464#13,>y7=int6464#2 | ||
1826 | # asm 2: movdqa <z5=%xmm12,>y7=%xmm1 | ||
1827 | movdqa %xmm12,%xmm1 | ||
1828 | |||
1829 | # qhasm: uint32323232 y7 += z6 | ||
1830 | # asm 1: paddd <z6=int6464#6,<y7=int6464#2 | ||
1831 | # asm 2: paddd <z6=%xmm5,<y7=%xmm1 | ||
1832 | paddd %xmm5,%xmm1 | ||
1833 | |||
1834 | # qhasm: r7 = y7 | ||
1835 | # asm 1: movdqa <y7=int6464#2,>r7=int6464#3 | ||
1836 | # asm 2: movdqa <y7=%xmm1,>r7=%xmm2 | ||
1837 | movdqa %xmm1,%xmm2 | ||
1838 | |||
1839 | # qhasm: uint32323232 y7 <<= 9 | ||
1840 | # asm 1: pslld $9,<y7=int6464#2 | ||
1841 | # asm 2: pslld $9,<y7=%xmm1 | ||
1842 | pslld $9,%xmm1 | ||
1843 | |||
1844 | # qhasm: z7 ^= y7 | ||
1845 | # asm 1: pxor <y7=int6464#2,<z7=int6464#9 | ||
1846 | # asm 2: pxor <y7=%xmm1,<z7=%xmm8 | ||
1847 | pxor %xmm1,%xmm8 | ||
1848 | |||
1849 | # qhasm: uint32323232 r7 >>= 23 | ||
1850 | # asm 1: psrld $23,<r7=int6464#3 | ||
1851 | # asm 2: psrld $23,<r7=%xmm2 | ||
1852 | psrld $23,%xmm2 | ||
1853 | |||
1854 | # qhasm: z7 ^= r7 | ||
1855 | # asm 1: pxor <r7=int6464#3,<z7=int6464#9 | ||
1856 | # asm 2: pxor <r7=%xmm2,<z7=%xmm8 | ||
1857 | pxor %xmm2,%xmm8 | ||
1858 | |||
1859 | # qhasm: y3 = z1 | ||
1860 | # asm 1: movdqa <z1=int6464#8,>y3=int6464#2 | ||
1861 | # asm 2: movdqa <z1=%xmm7,>y3=%xmm1 | ||
1862 | movdqa %xmm7,%xmm1 | ||
1863 | |||
1864 | # qhasm: uint32323232 y3 += z2 | ||
1865 | # asm 1: paddd <z2=int6464#11,<y3=int6464#2 | ||
1866 | # asm 2: paddd <z2=%xmm10,<y3=%xmm1 | ||
1867 | paddd %xmm10,%xmm1 | ||
1868 | |||
1869 | # qhasm: r3 = y3 | ||
1870 | # asm 1: movdqa <y3=int6464#2,>r3=int6464#3 | ||
1871 | # asm 2: movdqa <y3=%xmm1,>r3=%xmm2 | ||
1872 | movdqa %xmm1,%xmm2 | ||
1873 | |||
1874 | # qhasm: uint32323232 y3 <<= 13 | ||
1875 | # asm 1: pslld $13,<y3=int6464#2 | ||
1876 | # asm 2: pslld $13,<y3=%xmm1 | ||
1877 | pslld $13,%xmm1 | ||
1878 | |||
1879 | # qhasm: z3 ^= y3 | ||
1880 | # asm 1: pxor <y3=int6464#2,<z3=int6464#5 | ||
1881 | # asm 2: pxor <y3=%xmm1,<z3=%xmm4 | ||
1882 | pxor %xmm1,%xmm4 | ||
1883 | |||
1884 | # qhasm: uint32323232 r3 >>= 19 | ||
1885 | # asm 1: psrld $19,<r3=int6464#3 | ||
1886 | # asm 2: psrld $19,<r3=%xmm2 | ||
1887 | psrld $19,%xmm2 | ||
1888 | |||
1889 | # qhasm: z3 ^= r3 | ||
1890 | # asm 1: pxor <r3=int6464#3,<z3=int6464#5 | ||
1891 | # asm 2: pxor <r3=%xmm2,<z3=%xmm4 | ||
1892 | pxor %xmm2,%xmm4 | ||
1893 | |||
1894 | # qhasm: y4 = z6 | ||
1895 | # asm 1: movdqa <z6=int6464#6,>y4=int6464#2 | ||
1896 | # asm 2: movdqa <z6=%xmm5,>y4=%xmm1 | ||
1897 | movdqa %xmm5,%xmm1 | ||
1898 | |||
1899 | # qhasm: uint32323232 y4 += z7 | ||
1900 | # asm 1: paddd <z7=int6464#9,<y4=int6464#2 | ||
1901 | # asm 2: paddd <z7=%xmm8,<y4=%xmm1 | ||
1902 | paddd %xmm8,%xmm1 | ||
1903 | |||
1904 | # qhasm: r4 = y4 | ||
1905 | # asm 1: movdqa <y4=int6464#2,>r4=int6464#3 | ||
1906 | # asm 2: movdqa <y4=%xmm1,>r4=%xmm2 | ||
1907 | movdqa %xmm1,%xmm2 | ||
1908 | |||
1909 | # qhasm: uint32323232 y4 <<= 13 | ||
1910 | # asm 1: pslld $13,<y4=int6464#2 | ||
1911 | # asm 2: pslld $13,<y4=%xmm1 | ||
1912 | pslld $13,%xmm1 | ||
1913 | |||
1914 | # qhasm: z4 ^= y4 | ||
1915 | # asm 1: pxor <y4=int6464#2,<z4=int6464#15 | ||
1916 | # asm 2: pxor <y4=%xmm1,<z4=%xmm14 | ||
1917 | pxor %xmm1,%xmm14 | ||
1918 | |||
1919 | # qhasm: uint32323232 r4 >>= 19 | ||
1920 | # asm 1: psrld $19,<r4=int6464#3 | ||
1921 | # asm 2: psrld $19,<r4=%xmm2 | ||
1922 | psrld $19,%xmm2 | ||
1923 | |||
1924 | # qhasm: z4 ^= r4 | ||
1925 | # asm 1: pxor <r4=int6464#3,<z4=int6464#15 | ||
1926 | # asm 2: pxor <r4=%xmm2,<z4=%xmm14 | ||
1927 | pxor %xmm2,%xmm14 | ||
1928 | |||
1929 | # qhasm: y0 = z2 | ||
1930 | # asm 1: movdqa <z2=int6464#11,>y0=int6464#2 | ||
1931 | # asm 2: movdqa <z2=%xmm10,>y0=%xmm1 | ||
1932 | movdqa %xmm10,%xmm1 | ||
1933 | |||
1934 | # qhasm: uint32323232 y0 += z3 | ||
1935 | # asm 1: paddd <z3=int6464#5,<y0=int6464#2 | ||
1936 | # asm 2: paddd <z3=%xmm4,<y0=%xmm1 | ||
1937 | paddd %xmm4,%xmm1 | ||
1938 | |||
1939 | # qhasm: r0 = y0 | ||
1940 | # asm 1: movdqa <y0=int6464#2,>r0=int6464#3 | ||
1941 | # asm 2: movdqa <y0=%xmm1,>r0=%xmm2 | ||
1942 | movdqa %xmm1,%xmm2 | ||
1943 | |||
1944 | # qhasm: uint32323232 y0 <<= 18 | ||
1945 | # asm 1: pslld $18,<y0=int6464#2 | ||
1946 | # asm 2: pslld $18,<y0=%xmm1 | ||
1947 | pslld $18,%xmm1 | ||
1948 | |||
1949 | # qhasm: z0 ^= y0 | ||
1950 | # asm 1: pxor <y0=int6464#2,<z0=int6464#1 | ||
1951 | # asm 2: pxor <y0=%xmm1,<z0=%xmm0 | ||
1952 | pxor %xmm1,%xmm0 | ||
1953 | |||
1954 | # qhasm: uint32323232 r0 >>= 14 | ||
1955 | # asm 1: psrld $14,<r0=int6464#3 | ||
1956 | # asm 2: psrld $14,<r0=%xmm2 | ||
1957 | psrld $14,%xmm2 | ||
1958 | |||
1959 | # qhasm: z0 ^= r0 | ||
1960 | # asm 1: pxor <r0=int6464#3,<z0=int6464#1 | ||
1961 | # asm 2: pxor <r0=%xmm2,<z0=%xmm0 | ||
1962 | pxor %xmm2,%xmm0 | ||
1963 | |||
1964 | # qhasm: z10 = z10_stack | ||
1965 | # asm 1: movdqa <z10_stack=stack128#21,>z10=int6464#2 | ||
1966 | # asm 2: movdqa <z10_stack=320(%rsp),>z10=%xmm1 | ||
1967 | movdqa 320(%rsp),%xmm1 | ||
1968 | |||
1969 | # qhasm: z0_stack = z0 | ||
1970 | # asm 1: movdqa <z0=int6464#1,>z0_stack=stack128#21 | ||
1971 | # asm 2: movdqa <z0=%xmm0,>z0_stack=320(%rsp) | ||
1972 | movdqa %xmm0,320(%rsp) | ||
1973 | |||
1974 | # qhasm: y5 = z7 | ||
1975 | # asm 1: movdqa <z7=int6464#9,>y5=int6464#1 | ||
1976 | # asm 2: movdqa <z7=%xmm8,>y5=%xmm0 | ||
1977 | movdqa %xmm8,%xmm0 | ||
1978 | |||
1979 | # qhasm: uint32323232 y5 += z4 | ||
1980 | # asm 1: paddd <z4=int6464#15,<y5=int6464#1 | ||
1981 | # asm 2: paddd <z4=%xmm14,<y5=%xmm0 | ||
1982 | paddd %xmm14,%xmm0 | ||
1983 | |||
1984 | # qhasm: r5 = y5 | ||
1985 | # asm 1: movdqa <y5=int6464#1,>r5=int6464#3 | ||
1986 | # asm 2: movdqa <y5=%xmm0,>r5=%xmm2 | ||
1987 | movdqa %xmm0,%xmm2 | ||
1988 | |||
1989 | # qhasm: uint32323232 y5 <<= 18 | ||
1990 | # asm 1: pslld $18,<y5=int6464#1 | ||
1991 | # asm 2: pslld $18,<y5=%xmm0 | ||
1992 | pslld $18,%xmm0 | ||
1993 | |||
1994 | # qhasm: z5 ^= y5 | ||
1995 | # asm 1: pxor <y5=int6464#1,<z5=int6464#13 | ||
1996 | # asm 2: pxor <y5=%xmm0,<z5=%xmm12 | ||
1997 | pxor %xmm0,%xmm12 | ||
1998 | |||
1999 | # qhasm: uint32323232 r5 >>= 14 | ||
2000 | # asm 1: psrld $14,<r5=int6464#3 | ||
2001 | # asm 2: psrld $14,<r5=%xmm2 | ||
2002 | psrld $14,%xmm2 | ||
2003 | |||
2004 | # qhasm: z5 ^= r5 | ||
2005 | # asm 1: pxor <r5=int6464#3,<z5=int6464#13 | ||
2006 | # asm 2: pxor <r5=%xmm2,<z5=%xmm12 | ||
2007 | pxor %xmm2,%xmm12 | ||
2008 | |||
2009 | # qhasm: y11 = z9 | ||
2010 | # asm 1: movdqa <z9=int6464#12,>y11=int6464#1 | ||
2011 | # asm 2: movdqa <z9=%xmm11,>y11=%xmm0 | ||
2012 | movdqa %xmm11,%xmm0 | ||
2013 | |||
2014 | # qhasm: uint32323232 y11 += z10 | ||
2015 | # asm 1: paddd <z10=int6464#2,<y11=int6464#1 | ||
2016 | # asm 2: paddd <z10=%xmm1,<y11=%xmm0 | ||
2017 | paddd %xmm1,%xmm0 | ||
2018 | |||
2019 | # qhasm: r11 = y11 | ||
2020 | # asm 1: movdqa <y11=int6464#1,>r11=int6464#3 | ||
2021 | # asm 2: movdqa <y11=%xmm0,>r11=%xmm2 | ||
2022 | movdqa %xmm0,%xmm2 | ||
2023 | |||
2024 | # qhasm: uint32323232 y11 <<= 7 | ||
2025 | # asm 1: pslld $7,<y11=int6464#1 | ||
2026 | # asm 2: pslld $7,<y11=%xmm0 | ||
2027 | pslld $7,%xmm0 | ||
2028 | |||
2029 | # qhasm: z11 ^= y11 | ||
2030 | # asm 1: pxor <y11=int6464#1,<z11=int6464#7 | ||
2031 | # asm 2: pxor <y11=%xmm0,<z11=%xmm6 | ||
2032 | pxor %xmm0,%xmm6 | ||
2033 | |||
2034 | # qhasm: uint32323232 r11 >>= 25 | ||
2035 | # asm 1: psrld $25,<r11=int6464#3 | ||
2036 | # asm 2: psrld $25,<r11=%xmm2 | ||
2037 | psrld $25,%xmm2 | ||
2038 | |||
2039 | # qhasm: z11 ^= r11 | ||
2040 | # asm 1: pxor <r11=int6464#3,<z11=int6464#7 | ||
2041 | # asm 2: pxor <r11=%xmm2,<z11=%xmm6 | ||
2042 | pxor %xmm2,%xmm6 | ||
2043 | |||
2044 | # qhasm: z15 = z15_stack | ||
2045 | # asm 1: movdqa <z15_stack=stack128#22,>z15=int6464#3 | ||
2046 | # asm 2: movdqa <z15_stack=336(%rsp),>z15=%xmm2 | ||
2047 | movdqa 336(%rsp),%xmm2 | ||
2048 | |||
2049 | # qhasm: z5_stack = z5 | ||
2050 | # asm 1: movdqa <z5=int6464#13,>z5_stack=stack128#22 | ||
2051 | # asm 2: movdqa <z5=%xmm12,>z5_stack=336(%rsp) | ||
2052 | movdqa %xmm12,336(%rsp) | ||
2053 | |||
2054 | # qhasm: y12 = z14 | ||
2055 | # asm 1: movdqa <z14=int6464#4,>y12=int6464#1 | ||
2056 | # asm 2: movdqa <z14=%xmm3,>y12=%xmm0 | ||
2057 | movdqa %xmm3,%xmm0 | ||
2058 | |||
2059 | # qhasm: uint32323232 y12 += z15 | ||
2060 | # asm 1: paddd <z15=int6464#3,<y12=int6464#1 | ||
2061 | # asm 2: paddd <z15=%xmm2,<y12=%xmm0 | ||
2062 | paddd %xmm2,%xmm0 | ||
2063 | |||
2064 | # qhasm: r12 = y12 | ||
2065 | # asm 1: movdqa <y12=int6464#1,>r12=int6464#13 | ||
2066 | # asm 2: movdqa <y12=%xmm0,>r12=%xmm12 | ||
2067 | movdqa %xmm0,%xmm12 | ||
2068 | |||
2069 | # qhasm: uint32323232 y12 <<= 7 | ||
2070 | # asm 1: pslld $7,<y12=int6464#1 | ||
2071 | # asm 2: pslld $7,<y12=%xmm0 | ||
2072 | pslld $7,%xmm0 | ||
2073 | |||
2074 | # qhasm: z12 ^= y12 | ||
2075 | # asm 1: pxor <y12=int6464#1,<z12=int6464#14 | ||
2076 | # asm 2: pxor <y12=%xmm0,<z12=%xmm13 | ||
2077 | pxor %xmm0,%xmm13 | ||
2078 | |||
2079 | # qhasm: uint32323232 r12 >>= 25 | ||
2080 | # asm 1: psrld $25,<r12=int6464#13 | ||
2081 | # asm 2: psrld $25,<r12=%xmm12 | ||
2082 | psrld $25,%xmm12 | ||
2083 | |||
2084 | # qhasm: z12 ^= r12 | ||
2085 | # asm 1: pxor <r12=int6464#13,<z12=int6464#14 | ||
2086 | # asm 2: pxor <r12=%xmm12,<z12=%xmm13 | ||
2087 | pxor %xmm12,%xmm13 | ||
2088 | |||
2089 | # qhasm: y8 = z10 | ||
2090 | # asm 1: movdqa <z10=int6464#2,>y8=int6464#1 | ||
2091 | # asm 2: movdqa <z10=%xmm1,>y8=%xmm0 | ||
2092 | movdqa %xmm1,%xmm0 | ||
2093 | |||
2094 | # qhasm: uint32323232 y8 += z11 | ||
2095 | # asm 1: paddd <z11=int6464#7,<y8=int6464#1 | ||
2096 | # asm 2: paddd <z11=%xmm6,<y8=%xmm0 | ||
2097 | paddd %xmm6,%xmm0 | ||
2098 | |||
2099 | # qhasm: r8 = y8 | ||
2100 | # asm 1: movdqa <y8=int6464#1,>r8=int6464#13 | ||
2101 | # asm 2: movdqa <y8=%xmm0,>r8=%xmm12 | ||
2102 | movdqa %xmm0,%xmm12 | ||
2103 | |||
2104 | # qhasm: uint32323232 y8 <<= 9 | ||
2105 | # asm 1: pslld $9,<y8=int6464#1 | ||
2106 | # asm 2: pslld $9,<y8=%xmm0 | ||
2107 | pslld $9,%xmm0 | ||
2108 | |||
2109 | # qhasm: z8 ^= y8 | ||
2110 | # asm 1: pxor <y8=int6464#1,<z8=int6464#16 | ||
2111 | # asm 2: pxor <y8=%xmm0,<z8=%xmm15 | ||
2112 | pxor %xmm0,%xmm15 | ||
2113 | |||
2114 | # qhasm: uint32323232 r8 >>= 23 | ||
2115 | # asm 1: psrld $23,<r8=int6464#13 | ||
2116 | # asm 2: psrld $23,<r8=%xmm12 | ||
2117 | psrld $23,%xmm12 | ||
2118 | |||
2119 | # qhasm: z8 ^= r8 | ||
2120 | # asm 1: pxor <r8=int6464#13,<z8=int6464#16 | ||
2121 | # asm 2: pxor <r8=%xmm12,<z8=%xmm15 | ||
2122 | pxor %xmm12,%xmm15 | ||
2123 | |||
2124 | # qhasm: y13 = z15 | ||
2125 | # asm 1: movdqa <z15=int6464#3,>y13=int6464#1 | ||
2126 | # asm 2: movdqa <z15=%xmm2,>y13=%xmm0 | ||
2127 | movdqa %xmm2,%xmm0 | ||
2128 | |||
2129 | # qhasm: uint32323232 y13 += z12 | ||
2130 | # asm 1: paddd <z12=int6464#14,<y13=int6464#1 | ||
2131 | # asm 2: paddd <z12=%xmm13,<y13=%xmm0 | ||
2132 | paddd %xmm13,%xmm0 | ||
2133 | |||
2134 | # qhasm: r13 = y13 | ||
2135 | # asm 1: movdqa <y13=int6464#1,>r13=int6464#13 | ||
2136 | # asm 2: movdqa <y13=%xmm0,>r13=%xmm12 | ||
2137 | movdqa %xmm0,%xmm12 | ||
2138 | |||
2139 | # qhasm: uint32323232 y13 <<= 9 | ||
2140 | # asm 1: pslld $9,<y13=int6464#1 | ||
2141 | # asm 2: pslld $9,<y13=%xmm0 | ||
2142 | pslld $9,%xmm0 | ||
2143 | |||
2144 | # qhasm: z13 ^= y13 | ||
2145 | # asm 1: pxor <y13=int6464#1,<z13=int6464#10 | ||
2146 | # asm 2: pxor <y13=%xmm0,<z13=%xmm9 | ||
2147 | pxor %xmm0,%xmm9 | ||
2148 | |||
2149 | # qhasm: uint32323232 r13 >>= 23 | ||
2150 | # asm 1: psrld $23,<r13=int6464#13 | ||
2151 | # asm 2: psrld $23,<r13=%xmm12 | ||
2152 | psrld $23,%xmm12 | ||
2153 | |||
2154 | # qhasm: z13 ^= r13 | ||
2155 | # asm 1: pxor <r13=int6464#13,<z13=int6464#10 | ||
2156 | # asm 2: pxor <r13=%xmm12,<z13=%xmm9 | ||
2157 | pxor %xmm12,%xmm9 | ||
2158 | |||
2159 | # qhasm: y9 = z11 | ||
2160 | # asm 1: movdqa <z11=int6464#7,>y9=int6464#1 | ||
2161 | # asm 2: movdqa <z11=%xmm6,>y9=%xmm0 | ||
2162 | movdqa %xmm6,%xmm0 | ||
2163 | |||
2164 | # qhasm: uint32323232 y9 += z8 | ||
2165 | # asm 1: paddd <z8=int6464#16,<y9=int6464#1 | ||
2166 | # asm 2: paddd <z8=%xmm15,<y9=%xmm0 | ||
2167 | paddd %xmm15,%xmm0 | ||
2168 | |||
2169 | # qhasm: r9 = y9 | ||
2170 | # asm 1: movdqa <y9=int6464#1,>r9=int6464#13 | ||
2171 | # asm 2: movdqa <y9=%xmm0,>r9=%xmm12 | ||
2172 | movdqa %xmm0,%xmm12 | ||
2173 | |||
2174 | # qhasm: uint32323232 y9 <<= 13 | ||
2175 | # asm 1: pslld $13,<y9=int6464#1 | ||
2176 | # asm 2: pslld $13,<y9=%xmm0 | ||
2177 | pslld $13,%xmm0 | ||
2178 | |||
2179 | # qhasm: z9 ^= y9 | ||
2180 | # asm 1: pxor <y9=int6464#1,<z9=int6464#12 | ||
2181 | # asm 2: pxor <y9=%xmm0,<z9=%xmm11 | ||
2182 | pxor %xmm0,%xmm11 | ||
2183 | |||
2184 | # qhasm: uint32323232 r9 >>= 19 | ||
2185 | # asm 1: psrld $19,<r9=int6464#13 | ||
2186 | # asm 2: psrld $19,<r9=%xmm12 | ||
2187 | psrld $19,%xmm12 | ||
2188 | |||
2189 | # qhasm: z9 ^= r9 | ||
2190 | # asm 1: pxor <r9=int6464#13,<z9=int6464#12 | ||
2191 | # asm 2: pxor <r9=%xmm12,<z9=%xmm11 | ||
2192 | pxor %xmm12,%xmm11 | ||
2193 | |||
2194 | # qhasm: y14 = z12 | ||
2195 | # asm 1: movdqa <z12=int6464#14,>y14=int6464#1 | ||
2196 | # asm 2: movdqa <z12=%xmm13,>y14=%xmm0 | ||
2197 | movdqa %xmm13,%xmm0 | ||
2198 | |||
2199 | # qhasm: uint32323232 y14 += z13 | ||
2200 | # asm 1: paddd <z13=int6464#10,<y14=int6464#1 | ||
2201 | # asm 2: paddd <z13=%xmm9,<y14=%xmm0 | ||
2202 | paddd %xmm9,%xmm0 | ||
2203 | |||
2204 | # qhasm: r14 = y14 | ||
2205 | # asm 1: movdqa <y14=int6464#1,>r14=int6464#13 | ||
2206 | # asm 2: movdqa <y14=%xmm0,>r14=%xmm12 | ||
2207 | movdqa %xmm0,%xmm12 | ||
2208 | |||
2209 | # qhasm: uint32323232 y14 <<= 13 | ||
2210 | # asm 1: pslld $13,<y14=int6464#1 | ||
2211 | # asm 2: pslld $13,<y14=%xmm0 | ||
2212 | pslld $13,%xmm0 | ||
2213 | |||
2214 | # qhasm: z14 ^= y14 | ||
2215 | # asm 1: pxor <y14=int6464#1,<z14=int6464#4 | ||
2216 | # asm 2: pxor <y14=%xmm0,<z14=%xmm3 | ||
2217 | pxor %xmm0,%xmm3 | ||
2218 | |||
2219 | # qhasm: uint32323232 r14 >>= 19 | ||
2220 | # asm 1: psrld $19,<r14=int6464#13 | ||
2221 | # asm 2: psrld $19,<r14=%xmm12 | ||
2222 | psrld $19,%xmm12 | ||
2223 | |||
2224 | # qhasm: z14 ^= r14 | ||
2225 | # asm 1: pxor <r14=int6464#13,<z14=int6464#4 | ||
2226 | # asm 2: pxor <r14=%xmm12,<z14=%xmm3 | ||
2227 | pxor %xmm12,%xmm3 | ||
2228 | |||
2229 | # qhasm: y10 = z8 | ||
2230 | # asm 1: movdqa <z8=int6464#16,>y10=int6464#1 | ||
2231 | # asm 2: movdqa <z8=%xmm15,>y10=%xmm0 | ||
2232 | movdqa %xmm15,%xmm0 | ||
2233 | |||
2234 | # qhasm: uint32323232 y10 += z9 | ||
2235 | # asm 1: paddd <z9=int6464#12,<y10=int6464#1 | ||
2236 | # asm 2: paddd <z9=%xmm11,<y10=%xmm0 | ||
2237 | paddd %xmm11,%xmm0 | ||
2238 | |||
2239 | # qhasm: r10 = y10 | ||
2240 | # asm 1: movdqa <y10=int6464#1,>r10=int6464#13 | ||
2241 | # asm 2: movdqa <y10=%xmm0,>r10=%xmm12 | ||
2242 | movdqa %xmm0,%xmm12 | ||
2243 | |||
2244 | # qhasm: uint32323232 y10 <<= 18 | ||
2245 | # asm 1: pslld $18,<y10=int6464#1 | ||
2246 | # asm 2: pslld $18,<y10=%xmm0 | ||
2247 | pslld $18,%xmm0 | ||
2248 | |||
2249 | # qhasm: z10 ^= y10 | ||
2250 | # asm 1: pxor <y10=int6464#1,<z10=int6464#2 | ||
2251 | # asm 2: pxor <y10=%xmm0,<z10=%xmm1 | ||
2252 | pxor %xmm0,%xmm1 | ||
2253 | |||
2254 | # qhasm: uint32323232 r10 >>= 14 | ||
2255 | # asm 1: psrld $14,<r10=int6464#13 | ||
2256 | # asm 2: psrld $14,<r10=%xmm12 | ||
2257 | psrld $14,%xmm12 | ||
2258 | |||
2259 | # qhasm: z10 ^= r10 | ||
2260 | # asm 1: pxor <r10=int6464#13,<z10=int6464#2 | ||
2261 | # asm 2: pxor <r10=%xmm12,<z10=%xmm1 | ||
2262 | pxor %xmm12,%xmm1 | ||
2263 | |||
2264 | # qhasm: y15 = z13 | ||
2265 | # asm 1: movdqa <z13=int6464#10,>y15=int6464#1 | ||
2266 | # asm 2: movdqa <z13=%xmm9,>y15=%xmm0 | ||
2267 | movdqa %xmm9,%xmm0 | ||
2268 | |||
2269 | # qhasm: uint32323232 y15 += z14 | ||
2270 | # asm 1: paddd <z14=int6464#4,<y15=int6464#1 | ||
2271 | # asm 2: paddd <z14=%xmm3,<y15=%xmm0 | ||
2272 | paddd %xmm3,%xmm0 | ||
2273 | |||
2274 | # qhasm: r15 = y15 | ||
2275 | # asm 1: movdqa <y15=int6464#1,>r15=int6464#13 | ||
2276 | # asm 2: movdqa <y15=%xmm0,>r15=%xmm12 | ||
2277 | movdqa %xmm0,%xmm12 | ||
2278 | |||
2279 | # qhasm: uint32323232 y15 <<= 18 | ||
2280 | # asm 1: pslld $18,<y15=int6464#1 | ||
2281 | # asm 2: pslld $18,<y15=%xmm0 | ||
2282 | pslld $18,%xmm0 | ||
2283 | |||
2284 | # qhasm: z15 ^= y15 | ||
2285 | # asm 1: pxor <y15=int6464#1,<z15=int6464#3 | ||
2286 | # asm 2: pxor <y15=%xmm0,<z15=%xmm2 | ||
2287 | pxor %xmm0,%xmm2 | ||
2288 | |||
2289 | # qhasm: uint32323232 r15 >>= 14 | ||
2290 | # asm 1: psrld $14,<r15=int6464#13 | ||
2291 | # asm 2: psrld $14,<r15=%xmm12 | ||
2292 | psrld $14,%xmm12 | ||
2293 | |||
2294 | # qhasm: z15 ^= r15 | ||
2295 | # asm 1: pxor <r15=int6464#13,<z15=int6464#3 | ||
2296 | # asm 2: pxor <r15=%xmm12,<z15=%xmm2 | ||
2297 | pxor %xmm12,%xmm2 | ||
2298 | |||
2299 | # qhasm: z0 = z0_stack | ||
2300 | # asm 1: movdqa <z0_stack=stack128#21,>z0=int6464#13 | ||
2301 | # asm 2: movdqa <z0_stack=320(%rsp),>z0=%xmm12 | ||
2302 | movdqa 320(%rsp),%xmm12 | ||
2303 | |||
2304 | # qhasm: z5 = z5_stack | ||
2305 | # asm 1: movdqa <z5_stack=stack128#22,>z5=int6464#1 | ||
2306 | # asm 2: movdqa <z5_stack=336(%rsp),>z5=%xmm0 | ||
2307 | movdqa 336(%rsp),%xmm0 | ||
2308 | |||
2309 | # qhasm: unsigned>? i -= 2 | ||
2310 | # asm 1: sub $2,<i=int64#3 | ||
2311 | # asm 2: sub $2,<i=%rdx | ||
2312 | sub $2,%rdx | ||
2313 | # comment:fp stack unchanged by jump | ||
2314 | |||
2315 | # qhasm: goto mainloop1 if unsigned> | ||
2316 | ja ._mainloop1 | ||
2317 | |||
2318 | # qhasm: uint32323232 z0 += orig0 | ||
2319 | # asm 1: paddd <orig0=stack128#8,<z0=int6464#13 | ||
2320 | # asm 2: paddd <orig0=112(%rsp),<z0=%xmm12 | ||
2321 | paddd 112(%rsp),%xmm12 | ||
2322 | |||
2323 | # qhasm: uint32323232 z1 += orig1 | ||
2324 | # asm 1: paddd <orig1=stack128#12,<z1=int6464#8 | ||
2325 | # asm 2: paddd <orig1=176(%rsp),<z1=%xmm7 | ||
2326 | paddd 176(%rsp),%xmm7 | ||
2327 | |||
2328 | # qhasm: uint32323232 z2 += orig2 | ||
2329 | # asm 1: paddd <orig2=stack128#15,<z2=int6464#11 | ||
2330 | # asm 2: paddd <orig2=224(%rsp),<z2=%xmm10 | ||
2331 | paddd 224(%rsp),%xmm10 | ||
2332 | |||
2333 | # qhasm: uint32323232 z3 += orig3 | ||
2334 | # asm 1: paddd <orig3=stack128#18,<z3=int6464#5 | ||
2335 | # asm 2: paddd <orig3=272(%rsp),<z3=%xmm4 | ||
2336 | paddd 272(%rsp),%xmm4 | ||
2337 | |||
2338 | # qhasm: in0 = z0 | ||
2339 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2340 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2341 | movd %xmm12,%rdx | ||
2342 | |||
2343 | # qhasm: in1 = z1 | ||
2344 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2345 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2346 | movd %xmm7,%rcx | ||
2347 | |||
2348 | # qhasm: in2 = z2 | ||
2349 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2350 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2351 | movd %xmm10,%r8 | ||
2352 | |||
2353 | # qhasm: in3 = z3 | ||
2354 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2355 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2356 | movd %xmm4,%r9 | ||
2357 | |||
2358 | # qhasm: z0 <<<= 96 | ||
2359 | # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13 | ||
2360 | # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12 | ||
2361 | pshufd $0x39,%xmm12,%xmm12 | ||
2362 | |||
2363 | # qhasm: z1 <<<= 96 | ||
2364 | # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8 | ||
2365 | # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7 | ||
2366 | pshufd $0x39,%xmm7,%xmm7 | ||
2367 | |||
2368 | # qhasm: z2 <<<= 96 | ||
2369 | # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11 | ||
2370 | # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10 | ||
2371 | pshufd $0x39,%xmm10,%xmm10 | ||
2372 | |||
2373 | # qhasm: z3 <<<= 96 | ||
2374 | # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5 | ||
2375 | # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4 | ||
2376 | pshufd $0x39,%xmm4,%xmm4 | ||
2377 | |||
2378 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 0) | ||
2379 | # asm 1: xorl 0(<m=int64#2),<in0=int64#3d | ||
2380 | # asm 2: xorl 0(<m=%rsi),<in0=%edx | ||
2381 | xorl 0(%rsi),%edx | ||
2382 | |||
2383 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 4) | ||
2384 | # asm 1: xorl 4(<m=int64#2),<in1=int64#4d | ||
2385 | # asm 2: xorl 4(<m=%rsi),<in1=%ecx | ||
2386 | xorl 4(%rsi),%ecx | ||
2387 | |||
2388 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 8) | ||
2389 | # asm 1: xorl 8(<m=int64#2),<in2=int64#5d | ||
2390 | # asm 2: xorl 8(<m=%rsi),<in2=%r8d | ||
2391 | xorl 8(%rsi),%r8d | ||
2392 | |||
2393 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 12) | ||
2394 | # asm 1: xorl 12(<m=int64#2),<in3=int64#6d | ||
2395 | # asm 2: xorl 12(<m=%rsi),<in3=%r9d | ||
2396 | xorl 12(%rsi),%r9d | ||
2397 | |||
2398 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
2399 | # asm 1: movl <in0=int64#3d,0(<out=int64#1) | ||
2400 | # asm 2: movl <in0=%edx,0(<out=%rdi) | ||
2401 | movl %edx,0(%rdi) | ||
2402 | |||
2403 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
2404 | # asm 1: movl <in1=int64#4d,4(<out=int64#1) | ||
2405 | # asm 2: movl <in1=%ecx,4(<out=%rdi) | ||
2406 | movl %ecx,4(%rdi) | ||
2407 | |||
2408 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
2409 | # asm 1: movl <in2=int64#5d,8(<out=int64#1) | ||
2410 | # asm 2: movl <in2=%r8d,8(<out=%rdi) | ||
2411 | movl %r8d,8(%rdi) | ||
2412 | |||
2413 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
2414 | # asm 1: movl <in3=int64#6d,12(<out=int64#1) | ||
2415 | # asm 2: movl <in3=%r9d,12(<out=%rdi) | ||
2416 | movl %r9d,12(%rdi) | ||
2417 | |||
2418 | # qhasm: in0 = z0 | ||
2419 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2420 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2421 | movd %xmm12,%rdx | ||
2422 | |||
2423 | # qhasm: in1 = z1 | ||
2424 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2425 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2426 | movd %xmm7,%rcx | ||
2427 | |||
2428 | # qhasm: in2 = z2 | ||
2429 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2430 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2431 | movd %xmm10,%r8 | ||
2432 | |||
2433 | # qhasm: in3 = z3 | ||
2434 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2435 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2436 | movd %xmm4,%r9 | ||
2437 | |||
2438 | # qhasm: z0 <<<= 96 | ||
2439 | # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13 | ||
2440 | # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12 | ||
2441 | pshufd $0x39,%xmm12,%xmm12 | ||
2442 | |||
2443 | # qhasm: z1 <<<= 96 | ||
2444 | # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8 | ||
2445 | # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7 | ||
2446 | pshufd $0x39,%xmm7,%xmm7 | ||
2447 | |||
2448 | # qhasm: z2 <<<= 96 | ||
2449 | # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11 | ||
2450 | # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10 | ||
2451 | pshufd $0x39,%xmm10,%xmm10 | ||
2452 | |||
2453 | # qhasm: z3 <<<= 96 | ||
2454 | # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5 | ||
2455 | # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4 | ||
2456 | pshufd $0x39,%xmm4,%xmm4 | ||
2457 | |||
2458 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 64) | ||
2459 | # asm 1: xorl 64(<m=int64#2),<in0=int64#3d | ||
2460 | # asm 2: xorl 64(<m=%rsi),<in0=%edx | ||
2461 | xorl 64(%rsi),%edx | ||
2462 | |||
2463 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 68) | ||
2464 | # asm 1: xorl 68(<m=int64#2),<in1=int64#4d | ||
2465 | # asm 2: xorl 68(<m=%rsi),<in1=%ecx | ||
2466 | xorl 68(%rsi),%ecx | ||
2467 | |||
2468 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 72) | ||
2469 | # asm 1: xorl 72(<m=int64#2),<in2=int64#5d | ||
2470 | # asm 2: xorl 72(<m=%rsi),<in2=%r8d | ||
2471 | xorl 72(%rsi),%r8d | ||
2472 | |||
2473 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 76) | ||
2474 | # asm 1: xorl 76(<m=int64#2),<in3=int64#6d | ||
2475 | # asm 2: xorl 76(<m=%rsi),<in3=%r9d | ||
2476 | xorl 76(%rsi),%r9d | ||
2477 | |||
2478 | # qhasm: *(uint32 *) (out + 64) = in0 | ||
2479 | # asm 1: movl <in0=int64#3d,64(<out=int64#1) | ||
2480 | # asm 2: movl <in0=%edx,64(<out=%rdi) | ||
2481 | movl %edx,64(%rdi) | ||
2482 | |||
2483 | # qhasm: *(uint32 *) (out + 68) = in1 | ||
2484 | # asm 1: movl <in1=int64#4d,68(<out=int64#1) | ||
2485 | # asm 2: movl <in1=%ecx,68(<out=%rdi) | ||
2486 | movl %ecx,68(%rdi) | ||
2487 | |||
2488 | # qhasm: *(uint32 *) (out + 72) = in2 | ||
2489 | # asm 1: movl <in2=int64#5d,72(<out=int64#1) | ||
2490 | # asm 2: movl <in2=%r8d,72(<out=%rdi) | ||
2491 | movl %r8d,72(%rdi) | ||
2492 | |||
2493 | # qhasm: *(uint32 *) (out + 76) = in3 | ||
2494 | # asm 1: movl <in3=int64#6d,76(<out=int64#1) | ||
2495 | # asm 2: movl <in3=%r9d,76(<out=%rdi) | ||
2496 | movl %r9d,76(%rdi) | ||
2497 | |||
2498 | # qhasm: in0 = z0 | ||
2499 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2500 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2501 | movd %xmm12,%rdx | ||
2502 | |||
2503 | # qhasm: in1 = z1 | ||
2504 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2505 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2506 | movd %xmm7,%rcx | ||
2507 | |||
2508 | # qhasm: in2 = z2 | ||
2509 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2510 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2511 | movd %xmm10,%r8 | ||
2512 | |||
2513 | # qhasm: in3 = z3 | ||
2514 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2515 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2516 | movd %xmm4,%r9 | ||
2517 | |||
2518 | # qhasm: z0 <<<= 96 | ||
2519 | # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13 | ||
2520 | # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12 | ||
2521 | pshufd $0x39,%xmm12,%xmm12 | ||
2522 | |||
2523 | # qhasm: z1 <<<= 96 | ||
2524 | # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8 | ||
2525 | # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7 | ||
2526 | pshufd $0x39,%xmm7,%xmm7 | ||
2527 | |||
2528 | # qhasm: z2 <<<= 96 | ||
2529 | # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11 | ||
2530 | # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10 | ||
2531 | pshufd $0x39,%xmm10,%xmm10 | ||
2532 | |||
2533 | # qhasm: z3 <<<= 96 | ||
2534 | # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5 | ||
2535 | # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4 | ||
2536 | pshufd $0x39,%xmm4,%xmm4 | ||
2537 | |||
2538 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 128) | ||
2539 | # asm 1: xorl 128(<m=int64#2),<in0=int64#3d | ||
2540 | # asm 2: xorl 128(<m=%rsi),<in0=%edx | ||
2541 | xorl 128(%rsi),%edx | ||
2542 | |||
2543 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 132) | ||
2544 | # asm 1: xorl 132(<m=int64#2),<in1=int64#4d | ||
2545 | # asm 2: xorl 132(<m=%rsi),<in1=%ecx | ||
2546 | xorl 132(%rsi),%ecx | ||
2547 | |||
2548 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 136) | ||
2549 | # asm 1: xorl 136(<m=int64#2),<in2=int64#5d | ||
2550 | # asm 2: xorl 136(<m=%rsi),<in2=%r8d | ||
2551 | xorl 136(%rsi),%r8d | ||
2552 | |||
2553 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 140) | ||
2554 | # asm 1: xorl 140(<m=int64#2),<in3=int64#6d | ||
2555 | # asm 2: xorl 140(<m=%rsi),<in3=%r9d | ||
2556 | xorl 140(%rsi),%r9d | ||
2557 | |||
2558 | # qhasm: *(uint32 *) (out + 128) = in0 | ||
2559 | # asm 1: movl <in0=int64#3d,128(<out=int64#1) | ||
2560 | # asm 2: movl <in0=%edx,128(<out=%rdi) | ||
2561 | movl %edx,128(%rdi) | ||
2562 | |||
2563 | # qhasm: *(uint32 *) (out + 132) = in1 | ||
2564 | # asm 1: movl <in1=int64#4d,132(<out=int64#1) | ||
2565 | # asm 2: movl <in1=%ecx,132(<out=%rdi) | ||
2566 | movl %ecx,132(%rdi) | ||
2567 | |||
2568 | # qhasm: *(uint32 *) (out + 136) = in2 | ||
2569 | # asm 1: movl <in2=int64#5d,136(<out=int64#1) | ||
2570 | # asm 2: movl <in2=%r8d,136(<out=%rdi) | ||
2571 | movl %r8d,136(%rdi) | ||
2572 | |||
2573 | # qhasm: *(uint32 *) (out + 140) = in3 | ||
2574 | # asm 1: movl <in3=int64#6d,140(<out=int64#1) | ||
2575 | # asm 2: movl <in3=%r9d,140(<out=%rdi) | ||
2576 | movl %r9d,140(%rdi) | ||
2577 | |||
2578 | # qhasm: in0 = z0 | ||
2579 | # asm 1: movd <z0=int6464#13,>in0=int64#3 | ||
2580 | # asm 2: movd <z0=%xmm12,>in0=%rdx | ||
2581 | movd %xmm12,%rdx | ||
2582 | |||
2583 | # qhasm: in1 = z1 | ||
2584 | # asm 1: movd <z1=int6464#8,>in1=int64#4 | ||
2585 | # asm 2: movd <z1=%xmm7,>in1=%rcx | ||
2586 | movd %xmm7,%rcx | ||
2587 | |||
2588 | # qhasm: in2 = z2 | ||
2589 | # asm 1: movd <z2=int6464#11,>in2=int64#5 | ||
2590 | # asm 2: movd <z2=%xmm10,>in2=%r8 | ||
2591 | movd %xmm10,%r8 | ||
2592 | |||
2593 | # qhasm: in3 = z3 | ||
2594 | # asm 1: movd <z3=int6464#5,>in3=int64#6 | ||
2595 | # asm 2: movd <z3=%xmm4,>in3=%r9 | ||
2596 | movd %xmm4,%r9 | ||
2597 | |||
2598 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 192) | ||
2599 | # asm 1: xorl 192(<m=int64#2),<in0=int64#3d | ||
2600 | # asm 2: xorl 192(<m=%rsi),<in0=%edx | ||
2601 | xorl 192(%rsi),%edx | ||
2602 | |||
2603 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 196) | ||
2604 | # asm 1: xorl 196(<m=int64#2),<in1=int64#4d | ||
2605 | # asm 2: xorl 196(<m=%rsi),<in1=%ecx | ||
2606 | xorl 196(%rsi),%ecx | ||
2607 | |||
2608 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 200) | ||
2609 | # asm 1: xorl 200(<m=int64#2),<in2=int64#5d | ||
2610 | # asm 2: xorl 200(<m=%rsi),<in2=%r8d | ||
2611 | xorl 200(%rsi),%r8d | ||
2612 | |||
2613 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 204) | ||
2614 | # asm 1: xorl 204(<m=int64#2),<in3=int64#6d | ||
2615 | # asm 2: xorl 204(<m=%rsi),<in3=%r9d | ||
2616 | xorl 204(%rsi),%r9d | ||
2617 | |||
2618 | # qhasm: *(uint32 *) (out + 192) = in0 | ||
2619 | # asm 1: movl <in0=int64#3d,192(<out=int64#1) | ||
2620 | # asm 2: movl <in0=%edx,192(<out=%rdi) | ||
2621 | movl %edx,192(%rdi) | ||
2622 | |||
2623 | # qhasm: *(uint32 *) (out + 196) = in1 | ||
2624 | # asm 1: movl <in1=int64#4d,196(<out=int64#1) | ||
2625 | # asm 2: movl <in1=%ecx,196(<out=%rdi) | ||
2626 | movl %ecx,196(%rdi) | ||
2627 | |||
2628 | # qhasm: *(uint32 *) (out + 200) = in2 | ||
2629 | # asm 1: movl <in2=int64#5d,200(<out=int64#1) | ||
2630 | # asm 2: movl <in2=%r8d,200(<out=%rdi) | ||
2631 | movl %r8d,200(%rdi) | ||
2632 | |||
2633 | # qhasm: *(uint32 *) (out + 204) = in3 | ||
2634 | # asm 1: movl <in3=int64#6d,204(<out=int64#1) | ||
2635 | # asm 2: movl <in3=%r9d,204(<out=%rdi) | ||
2636 | movl %r9d,204(%rdi) | ||
2637 | |||
2638 | # qhasm: uint32323232 z4 += orig4 | ||
2639 | # asm 1: paddd <orig4=stack128#16,<z4=int6464#15 | ||
2640 | # asm 2: paddd <orig4=240(%rsp),<z4=%xmm14 | ||
2641 | paddd 240(%rsp),%xmm14 | ||
2642 | |||
2643 | # qhasm: uint32323232 z5 += orig5 | ||
2644 | # asm 1: paddd <orig5=stack128#5,<z5=int6464#1 | ||
2645 | # asm 2: paddd <orig5=64(%rsp),<z5=%xmm0 | ||
2646 | paddd 64(%rsp),%xmm0 | ||
2647 | |||
2648 | # qhasm: uint32323232 z6 += orig6 | ||
2649 | # asm 1: paddd <orig6=stack128#9,<z6=int6464#6 | ||
2650 | # asm 2: paddd <orig6=128(%rsp),<z6=%xmm5 | ||
2651 | paddd 128(%rsp),%xmm5 | ||
2652 | |||
2653 | # qhasm: uint32323232 z7 += orig7 | ||
2654 | # asm 1: paddd <orig7=stack128#13,<z7=int6464#9 | ||
2655 | # asm 2: paddd <orig7=192(%rsp),<z7=%xmm8 | ||
2656 | paddd 192(%rsp),%xmm8 | ||
2657 | |||
2658 | # qhasm: in4 = z4 | ||
2659 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2660 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2661 | movd %xmm14,%rdx | ||
2662 | |||
2663 | # qhasm: in5 = z5 | ||
2664 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2665 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2666 | movd %xmm0,%rcx | ||
2667 | |||
2668 | # qhasm: in6 = z6 | ||
2669 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2670 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2671 | movd %xmm5,%r8 | ||
2672 | |||
2673 | # qhasm: in7 = z7 | ||
2674 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2675 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2676 | movd %xmm8,%r9 | ||
2677 | |||
2678 | # qhasm: z4 <<<= 96 | ||
2679 | # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15 | ||
2680 | # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14 | ||
2681 | pshufd $0x39,%xmm14,%xmm14 | ||
2682 | |||
2683 | # qhasm: z5 <<<= 96 | ||
2684 | # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1 | ||
2685 | # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0 | ||
2686 | pshufd $0x39,%xmm0,%xmm0 | ||
2687 | |||
2688 | # qhasm: z6 <<<= 96 | ||
2689 | # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6 | ||
2690 | # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5 | ||
2691 | pshufd $0x39,%xmm5,%xmm5 | ||
2692 | |||
2693 | # qhasm: z7 <<<= 96 | ||
2694 | # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9 | ||
2695 | # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8 | ||
2696 | pshufd $0x39,%xmm8,%xmm8 | ||
2697 | |||
2698 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 16) | ||
2699 | # asm 1: xorl 16(<m=int64#2),<in4=int64#3d | ||
2700 | # asm 2: xorl 16(<m=%rsi),<in4=%edx | ||
2701 | xorl 16(%rsi),%edx | ||
2702 | |||
2703 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 20) | ||
2704 | # asm 1: xorl 20(<m=int64#2),<in5=int64#4d | ||
2705 | # asm 2: xorl 20(<m=%rsi),<in5=%ecx | ||
2706 | xorl 20(%rsi),%ecx | ||
2707 | |||
2708 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 24) | ||
2709 | # asm 1: xorl 24(<m=int64#2),<in6=int64#5d | ||
2710 | # asm 2: xorl 24(<m=%rsi),<in6=%r8d | ||
2711 | xorl 24(%rsi),%r8d | ||
2712 | |||
2713 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 28) | ||
2714 | # asm 1: xorl 28(<m=int64#2),<in7=int64#6d | ||
2715 | # asm 2: xorl 28(<m=%rsi),<in7=%r9d | ||
2716 | xorl 28(%rsi),%r9d | ||
2717 | |||
2718 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
2719 | # asm 1: movl <in4=int64#3d,16(<out=int64#1) | ||
2720 | # asm 2: movl <in4=%edx,16(<out=%rdi) | ||
2721 | movl %edx,16(%rdi) | ||
2722 | |||
2723 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
2724 | # asm 1: movl <in5=int64#4d,20(<out=int64#1) | ||
2725 | # asm 2: movl <in5=%ecx,20(<out=%rdi) | ||
2726 | movl %ecx,20(%rdi) | ||
2727 | |||
2728 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
2729 | # asm 1: movl <in6=int64#5d,24(<out=int64#1) | ||
2730 | # asm 2: movl <in6=%r8d,24(<out=%rdi) | ||
2731 | movl %r8d,24(%rdi) | ||
2732 | |||
2733 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
2734 | # asm 1: movl <in7=int64#6d,28(<out=int64#1) | ||
2735 | # asm 2: movl <in7=%r9d,28(<out=%rdi) | ||
2736 | movl %r9d,28(%rdi) | ||
2737 | |||
2738 | # qhasm: in4 = z4 | ||
2739 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2740 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2741 | movd %xmm14,%rdx | ||
2742 | |||
2743 | # qhasm: in5 = z5 | ||
2744 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2745 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2746 | movd %xmm0,%rcx | ||
2747 | |||
2748 | # qhasm: in6 = z6 | ||
2749 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2750 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2751 | movd %xmm5,%r8 | ||
2752 | |||
2753 | # qhasm: in7 = z7 | ||
2754 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2755 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2756 | movd %xmm8,%r9 | ||
2757 | |||
2758 | # qhasm: z4 <<<= 96 | ||
2759 | # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15 | ||
2760 | # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14 | ||
2761 | pshufd $0x39,%xmm14,%xmm14 | ||
2762 | |||
2763 | # qhasm: z5 <<<= 96 | ||
2764 | # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1 | ||
2765 | # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0 | ||
2766 | pshufd $0x39,%xmm0,%xmm0 | ||
2767 | |||
2768 | # qhasm: z6 <<<= 96 | ||
2769 | # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6 | ||
2770 | # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5 | ||
2771 | pshufd $0x39,%xmm5,%xmm5 | ||
2772 | |||
2773 | # qhasm: z7 <<<= 96 | ||
2774 | # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9 | ||
2775 | # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8 | ||
2776 | pshufd $0x39,%xmm8,%xmm8 | ||
2777 | |||
2778 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 80) | ||
2779 | # asm 1: xorl 80(<m=int64#2),<in4=int64#3d | ||
2780 | # asm 2: xorl 80(<m=%rsi),<in4=%edx | ||
2781 | xorl 80(%rsi),%edx | ||
2782 | |||
2783 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 84) | ||
2784 | # asm 1: xorl 84(<m=int64#2),<in5=int64#4d | ||
2785 | # asm 2: xorl 84(<m=%rsi),<in5=%ecx | ||
2786 | xorl 84(%rsi),%ecx | ||
2787 | |||
2788 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 88) | ||
2789 | # asm 1: xorl 88(<m=int64#2),<in6=int64#5d | ||
2790 | # asm 2: xorl 88(<m=%rsi),<in6=%r8d | ||
2791 | xorl 88(%rsi),%r8d | ||
2792 | |||
2793 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 92) | ||
2794 | # asm 1: xorl 92(<m=int64#2),<in7=int64#6d | ||
2795 | # asm 2: xorl 92(<m=%rsi),<in7=%r9d | ||
2796 | xorl 92(%rsi),%r9d | ||
2797 | |||
2798 | # qhasm: *(uint32 *) (out + 80) = in4 | ||
2799 | # asm 1: movl <in4=int64#3d,80(<out=int64#1) | ||
2800 | # asm 2: movl <in4=%edx,80(<out=%rdi) | ||
2801 | movl %edx,80(%rdi) | ||
2802 | |||
2803 | # qhasm: *(uint32 *) (out + 84) = in5 | ||
2804 | # asm 1: movl <in5=int64#4d,84(<out=int64#1) | ||
2805 | # asm 2: movl <in5=%ecx,84(<out=%rdi) | ||
2806 | movl %ecx,84(%rdi) | ||
2807 | |||
2808 | # qhasm: *(uint32 *) (out + 88) = in6 | ||
2809 | # asm 1: movl <in6=int64#5d,88(<out=int64#1) | ||
2810 | # asm 2: movl <in6=%r8d,88(<out=%rdi) | ||
2811 | movl %r8d,88(%rdi) | ||
2812 | |||
2813 | # qhasm: *(uint32 *) (out + 92) = in7 | ||
2814 | # asm 1: movl <in7=int64#6d,92(<out=int64#1) | ||
2815 | # asm 2: movl <in7=%r9d,92(<out=%rdi) | ||
2816 | movl %r9d,92(%rdi) | ||
2817 | |||
2818 | # qhasm: in4 = z4 | ||
2819 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2820 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2821 | movd %xmm14,%rdx | ||
2822 | |||
2823 | # qhasm: in5 = z5 | ||
2824 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2825 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2826 | movd %xmm0,%rcx | ||
2827 | |||
2828 | # qhasm: in6 = z6 | ||
2829 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2830 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2831 | movd %xmm5,%r8 | ||
2832 | |||
2833 | # qhasm: in7 = z7 | ||
2834 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2835 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2836 | movd %xmm8,%r9 | ||
2837 | |||
2838 | # qhasm: z4 <<<= 96 | ||
2839 | # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15 | ||
2840 | # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14 | ||
2841 | pshufd $0x39,%xmm14,%xmm14 | ||
2842 | |||
2843 | # qhasm: z5 <<<= 96 | ||
2844 | # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1 | ||
2845 | # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0 | ||
2846 | pshufd $0x39,%xmm0,%xmm0 | ||
2847 | |||
2848 | # qhasm: z6 <<<= 96 | ||
2849 | # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6 | ||
2850 | # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5 | ||
2851 | pshufd $0x39,%xmm5,%xmm5 | ||
2852 | |||
2853 | # qhasm: z7 <<<= 96 | ||
2854 | # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9 | ||
2855 | # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8 | ||
2856 | pshufd $0x39,%xmm8,%xmm8 | ||
2857 | |||
2858 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 144) | ||
2859 | # asm 1: xorl 144(<m=int64#2),<in4=int64#3d | ||
2860 | # asm 2: xorl 144(<m=%rsi),<in4=%edx | ||
2861 | xorl 144(%rsi),%edx | ||
2862 | |||
2863 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 148) | ||
2864 | # asm 1: xorl 148(<m=int64#2),<in5=int64#4d | ||
2865 | # asm 2: xorl 148(<m=%rsi),<in5=%ecx | ||
2866 | xorl 148(%rsi),%ecx | ||
2867 | |||
2868 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 152) | ||
2869 | # asm 1: xorl 152(<m=int64#2),<in6=int64#5d | ||
2870 | # asm 2: xorl 152(<m=%rsi),<in6=%r8d | ||
2871 | xorl 152(%rsi),%r8d | ||
2872 | |||
2873 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 156) | ||
2874 | # asm 1: xorl 156(<m=int64#2),<in7=int64#6d | ||
2875 | # asm 2: xorl 156(<m=%rsi),<in7=%r9d | ||
2876 | xorl 156(%rsi),%r9d | ||
2877 | |||
2878 | # qhasm: *(uint32 *) (out + 144) = in4 | ||
2879 | # asm 1: movl <in4=int64#3d,144(<out=int64#1) | ||
2880 | # asm 2: movl <in4=%edx,144(<out=%rdi) | ||
2881 | movl %edx,144(%rdi) | ||
2882 | |||
2883 | # qhasm: *(uint32 *) (out + 148) = in5 | ||
2884 | # asm 1: movl <in5=int64#4d,148(<out=int64#1) | ||
2885 | # asm 2: movl <in5=%ecx,148(<out=%rdi) | ||
2886 | movl %ecx,148(%rdi) | ||
2887 | |||
2888 | # qhasm: *(uint32 *) (out + 152) = in6 | ||
2889 | # asm 1: movl <in6=int64#5d,152(<out=int64#1) | ||
2890 | # asm 2: movl <in6=%r8d,152(<out=%rdi) | ||
2891 | movl %r8d,152(%rdi) | ||
2892 | |||
2893 | # qhasm: *(uint32 *) (out + 156) = in7 | ||
2894 | # asm 1: movl <in7=int64#6d,156(<out=int64#1) | ||
2895 | # asm 2: movl <in7=%r9d,156(<out=%rdi) | ||
2896 | movl %r9d,156(%rdi) | ||
2897 | |||
2898 | # qhasm: in4 = z4 | ||
2899 | # asm 1: movd <z4=int6464#15,>in4=int64#3 | ||
2900 | # asm 2: movd <z4=%xmm14,>in4=%rdx | ||
2901 | movd %xmm14,%rdx | ||
2902 | |||
2903 | # qhasm: in5 = z5 | ||
2904 | # asm 1: movd <z5=int6464#1,>in5=int64#4 | ||
2905 | # asm 2: movd <z5=%xmm0,>in5=%rcx | ||
2906 | movd %xmm0,%rcx | ||
2907 | |||
2908 | # qhasm: in6 = z6 | ||
2909 | # asm 1: movd <z6=int6464#6,>in6=int64#5 | ||
2910 | # asm 2: movd <z6=%xmm5,>in6=%r8 | ||
2911 | movd %xmm5,%r8 | ||
2912 | |||
2913 | # qhasm: in7 = z7 | ||
2914 | # asm 1: movd <z7=int6464#9,>in7=int64#6 | ||
2915 | # asm 2: movd <z7=%xmm8,>in7=%r9 | ||
2916 | movd %xmm8,%r9 | ||
2917 | |||
2918 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 208) | ||
2919 | # asm 1: xorl 208(<m=int64#2),<in4=int64#3d | ||
2920 | # asm 2: xorl 208(<m=%rsi),<in4=%edx | ||
2921 | xorl 208(%rsi),%edx | ||
2922 | |||
2923 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 212) | ||
2924 | # asm 1: xorl 212(<m=int64#2),<in5=int64#4d | ||
2925 | # asm 2: xorl 212(<m=%rsi),<in5=%ecx | ||
2926 | xorl 212(%rsi),%ecx | ||
2927 | |||
2928 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 216) | ||
2929 | # asm 1: xorl 216(<m=int64#2),<in6=int64#5d | ||
2930 | # asm 2: xorl 216(<m=%rsi),<in6=%r8d | ||
2931 | xorl 216(%rsi),%r8d | ||
2932 | |||
2933 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 220) | ||
2934 | # asm 1: xorl 220(<m=int64#2),<in7=int64#6d | ||
2935 | # asm 2: xorl 220(<m=%rsi),<in7=%r9d | ||
2936 | xorl 220(%rsi),%r9d | ||
2937 | |||
2938 | # qhasm: *(uint32 *) (out + 208) = in4 | ||
2939 | # asm 1: movl <in4=int64#3d,208(<out=int64#1) | ||
2940 | # asm 2: movl <in4=%edx,208(<out=%rdi) | ||
2941 | movl %edx,208(%rdi) | ||
2942 | |||
2943 | # qhasm: *(uint32 *) (out + 212) = in5 | ||
2944 | # asm 1: movl <in5=int64#4d,212(<out=int64#1) | ||
2945 | # asm 2: movl <in5=%ecx,212(<out=%rdi) | ||
2946 | movl %ecx,212(%rdi) | ||
2947 | |||
2948 | # qhasm: *(uint32 *) (out + 216) = in6 | ||
2949 | # asm 1: movl <in6=int64#5d,216(<out=int64#1) | ||
2950 | # asm 2: movl <in6=%r8d,216(<out=%rdi) | ||
2951 | movl %r8d,216(%rdi) | ||
2952 | |||
2953 | # qhasm: *(uint32 *) (out + 220) = in7 | ||
2954 | # asm 1: movl <in7=int64#6d,220(<out=int64#1) | ||
2955 | # asm 2: movl <in7=%r9d,220(<out=%rdi) | ||
2956 | movl %r9d,220(%rdi) | ||
2957 | |||
2958 | # qhasm: uint32323232 z8 += orig8 | ||
2959 | # asm 1: paddd <orig8=stack128#19,<z8=int6464#16 | ||
2960 | # asm 2: paddd <orig8=288(%rsp),<z8=%xmm15 | ||
2961 | paddd 288(%rsp),%xmm15 | ||
2962 | |||
2963 | # qhasm: uint32323232 z9 += orig9 | ||
2964 | # asm 1: paddd <orig9=stack128#20,<z9=int6464#12 | ||
2965 | # asm 2: paddd <orig9=304(%rsp),<z9=%xmm11 | ||
2966 | paddd 304(%rsp),%xmm11 | ||
2967 | |||
2968 | # qhasm: uint32323232 z10 += orig10 | ||
2969 | # asm 1: paddd <orig10=stack128#6,<z10=int6464#2 | ||
2970 | # asm 2: paddd <orig10=80(%rsp),<z10=%xmm1 | ||
2971 | paddd 80(%rsp),%xmm1 | ||
2972 | |||
2973 | # qhasm: uint32323232 z11 += orig11 | ||
2974 | # asm 1: paddd <orig11=stack128#10,<z11=int6464#7 | ||
2975 | # asm 2: paddd <orig11=144(%rsp),<z11=%xmm6 | ||
2976 | paddd 144(%rsp),%xmm6 | ||
2977 | |||
2978 | # qhasm: in8 = z8 | ||
2979 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
2980 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
2981 | movd %xmm15,%rdx | ||
2982 | |||
2983 | # qhasm: in9 = z9 | ||
2984 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
2985 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
2986 | movd %xmm11,%rcx | ||
2987 | |||
2988 | # qhasm: in10 = z10 | ||
2989 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
2990 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
2991 | movd %xmm1,%r8 | ||
2992 | |||
2993 | # qhasm: in11 = z11 | ||
2994 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
2995 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
2996 | movd %xmm6,%r9 | ||
2997 | |||
2998 | # qhasm: z8 <<<= 96 | ||
2999 | # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16 | ||
3000 | # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15 | ||
3001 | pshufd $0x39,%xmm15,%xmm15 | ||
3002 | |||
3003 | # qhasm: z9 <<<= 96 | ||
3004 | # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12 | ||
3005 | # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11 | ||
3006 | pshufd $0x39,%xmm11,%xmm11 | ||
3007 | |||
3008 | # qhasm: z10 <<<= 96 | ||
3009 | # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2 | ||
3010 | # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1 | ||
3011 | pshufd $0x39,%xmm1,%xmm1 | ||
3012 | |||
3013 | # qhasm: z11 <<<= 96 | ||
3014 | # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7 | ||
3015 | # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6 | ||
3016 | pshufd $0x39,%xmm6,%xmm6 | ||
3017 | |||
3018 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 32) | ||
3019 | # asm 1: xorl 32(<m=int64#2),<in8=int64#3d | ||
3020 | # asm 2: xorl 32(<m=%rsi),<in8=%edx | ||
3021 | xorl 32(%rsi),%edx | ||
3022 | |||
3023 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 36) | ||
3024 | # asm 1: xorl 36(<m=int64#2),<in9=int64#4d | ||
3025 | # asm 2: xorl 36(<m=%rsi),<in9=%ecx | ||
3026 | xorl 36(%rsi),%ecx | ||
3027 | |||
3028 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 40) | ||
3029 | # asm 1: xorl 40(<m=int64#2),<in10=int64#5d | ||
3030 | # asm 2: xorl 40(<m=%rsi),<in10=%r8d | ||
3031 | xorl 40(%rsi),%r8d | ||
3032 | |||
3033 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 44) | ||
3034 | # asm 1: xorl 44(<m=int64#2),<in11=int64#6d | ||
3035 | # asm 2: xorl 44(<m=%rsi),<in11=%r9d | ||
3036 | xorl 44(%rsi),%r9d | ||
3037 | |||
3038 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
3039 | # asm 1: movl <in8=int64#3d,32(<out=int64#1) | ||
3040 | # asm 2: movl <in8=%edx,32(<out=%rdi) | ||
3041 | movl %edx,32(%rdi) | ||
3042 | |||
3043 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
3044 | # asm 1: movl <in9=int64#4d,36(<out=int64#1) | ||
3045 | # asm 2: movl <in9=%ecx,36(<out=%rdi) | ||
3046 | movl %ecx,36(%rdi) | ||
3047 | |||
3048 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
3049 | # asm 1: movl <in10=int64#5d,40(<out=int64#1) | ||
3050 | # asm 2: movl <in10=%r8d,40(<out=%rdi) | ||
3051 | movl %r8d,40(%rdi) | ||
3052 | |||
3053 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
3054 | # asm 1: movl <in11=int64#6d,44(<out=int64#1) | ||
3055 | # asm 2: movl <in11=%r9d,44(<out=%rdi) | ||
3056 | movl %r9d,44(%rdi) | ||
3057 | |||
3058 | # qhasm: in8 = z8 | ||
3059 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
3060 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
3061 | movd %xmm15,%rdx | ||
3062 | |||
3063 | # qhasm: in9 = z9 | ||
3064 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
3065 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
3066 | movd %xmm11,%rcx | ||
3067 | |||
3068 | # qhasm: in10 = z10 | ||
3069 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
3070 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
3071 | movd %xmm1,%r8 | ||
3072 | |||
3073 | # qhasm: in11 = z11 | ||
3074 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
3075 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
3076 | movd %xmm6,%r9 | ||
3077 | |||
3078 | # qhasm: z8 <<<= 96 | ||
3079 | # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16 | ||
3080 | # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15 | ||
3081 | pshufd $0x39,%xmm15,%xmm15 | ||
3082 | |||
3083 | # qhasm: z9 <<<= 96 | ||
3084 | # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12 | ||
3085 | # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11 | ||
3086 | pshufd $0x39,%xmm11,%xmm11 | ||
3087 | |||
3088 | # qhasm: z10 <<<= 96 | ||
3089 | # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2 | ||
3090 | # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1 | ||
3091 | pshufd $0x39,%xmm1,%xmm1 | ||
3092 | |||
3093 | # qhasm: z11 <<<= 96 | ||
3094 | # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7 | ||
3095 | # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6 | ||
3096 | pshufd $0x39,%xmm6,%xmm6 | ||
3097 | |||
3098 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 96) | ||
3099 | # asm 1: xorl 96(<m=int64#2),<in8=int64#3d | ||
3100 | # asm 2: xorl 96(<m=%rsi),<in8=%edx | ||
3101 | xorl 96(%rsi),%edx | ||
3102 | |||
3103 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 100) | ||
3104 | # asm 1: xorl 100(<m=int64#2),<in9=int64#4d | ||
3105 | # asm 2: xorl 100(<m=%rsi),<in9=%ecx | ||
3106 | xorl 100(%rsi),%ecx | ||
3107 | |||
3108 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 104) | ||
3109 | # asm 1: xorl 104(<m=int64#2),<in10=int64#5d | ||
3110 | # asm 2: xorl 104(<m=%rsi),<in10=%r8d | ||
3111 | xorl 104(%rsi),%r8d | ||
3112 | |||
3113 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 108) | ||
3114 | # asm 1: xorl 108(<m=int64#2),<in11=int64#6d | ||
3115 | # asm 2: xorl 108(<m=%rsi),<in11=%r9d | ||
3116 | xorl 108(%rsi),%r9d | ||
3117 | |||
3118 | # qhasm: *(uint32 *) (out + 96) = in8 | ||
3119 | # asm 1: movl <in8=int64#3d,96(<out=int64#1) | ||
3120 | # asm 2: movl <in8=%edx,96(<out=%rdi) | ||
3121 | movl %edx,96(%rdi) | ||
3122 | |||
3123 | # qhasm: *(uint32 *) (out + 100) = in9 | ||
3124 | # asm 1: movl <in9=int64#4d,100(<out=int64#1) | ||
3125 | # asm 2: movl <in9=%ecx,100(<out=%rdi) | ||
3126 | movl %ecx,100(%rdi) | ||
3127 | |||
3128 | # qhasm: *(uint32 *) (out + 104) = in10 | ||
3129 | # asm 1: movl <in10=int64#5d,104(<out=int64#1) | ||
3130 | # asm 2: movl <in10=%r8d,104(<out=%rdi) | ||
3131 | movl %r8d,104(%rdi) | ||
3132 | |||
3133 | # qhasm: *(uint32 *) (out + 108) = in11 | ||
3134 | # asm 1: movl <in11=int64#6d,108(<out=int64#1) | ||
3135 | # asm 2: movl <in11=%r9d,108(<out=%rdi) | ||
3136 | movl %r9d,108(%rdi) | ||
3137 | |||
3138 | # qhasm: in8 = z8 | ||
3139 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
3140 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
3141 | movd %xmm15,%rdx | ||
3142 | |||
3143 | # qhasm: in9 = z9 | ||
3144 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
3145 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
3146 | movd %xmm11,%rcx | ||
3147 | |||
3148 | # qhasm: in10 = z10 | ||
3149 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
3150 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
3151 | movd %xmm1,%r8 | ||
3152 | |||
3153 | # qhasm: in11 = z11 | ||
3154 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
3155 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
3156 | movd %xmm6,%r9 | ||
3157 | |||
3158 | # qhasm: z8 <<<= 96 | ||
3159 | # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16 | ||
3160 | # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15 | ||
3161 | pshufd $0x39,%xmm15,%xmm15 | ||
3162 | |||
3163 | # qhasm: z9 <<<= 96 | ||
3164 | # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12 | ||
3165 | # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11 | ||
3166 | pshufd $0x39,%xmm11,%xmm11 | ||
3167 | |||
3168 | # qhasm: z10 <<<= 96 | ||
3169 | # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2 | ||
3170 | # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1 | ||
3171 | pshufd $0x39,%xmm1,%xmm1 | ||
3172 | |||
3173 | # qhasm: z11 <<<= 96 | ||
3174 | # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7 | ||
3175 | # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6 | ||
3176 | pshufd $0x39,%xmm6,%xmm6 | ||
3177 | |||
3178 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 160) | ||
3179 | # asm 1: xorl 160(<m=int64#2),<in8=int64#3d | ||
3180 | # asm 2: xorl 160(<m=%rsi),<in8=%edx | ||
3181 | xorl 160(%rsi),%edx | ||
3182 | |||
3183 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 164) | ||
3184 | # asm 1: xorl 164(<m=int64#2),<in9=int64#4d | ||
3185 | # asm 2: xorl 164(<m=%rsi),<in9=%ecx | ||
3186 | xorl 164(%rsi),%ecx | ||
3187 | |||
3188 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 168) | ||
3189 | # asm 1: xorl 168(<m=int64#2),<in10=int64#5d | ||
3190 | # asm 2: xorl 168(<m=%rsi),<in10=%r8d | ||
3191 | xorl 168(%rsi),%r8d | ||
3192 | |||
3193 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 172) | ||
3194 | # asm 1: xorl 172(<m=int64#2),<in11=int64#6d | ||
3195 | # asm 2: xorl 172(<m=%rsi),<in11=%r9d | ||
3196 | xorl 172(%rsi),%r9d | ||
3197 | |||
3198 | # qhasm: *(uint32 *) (out + 160) = in8 | ||
3199 | # asm 1: movl <in8=int64#3d,160(<out=int64#1) | ||
3200 | # asm 2: movl <in8=%edx,160(<out=%rdi) | ||
3201 | movl %edx,160(%rdi) | ||
3202 | |||
3203 | # qhasm: *(uint32 *) (out + 164) = in9 | ||
3204 | # asm 1: movl <in9=int64#4d,164(<out=int64#1) | ||
3205 | # asm 2: movl <in9=%ecx,164(<out=%rdi) | ||
3206 | movl %ecx,164(%rdi) | ||
3207 | |||
3208 | # qhasm: *(uint32 *) (out + 168) = in10 | ||
3209 | # asm 1: movl <in10=int64#5d,168(<out=int64#1) | ||
3210 | # asm 2: movl <in10=%r8d,168(<out=%rdi) | ||
3211 | movl %r8d,168(%rdi) | ||
3212 | |||
3213 | # qhasm: *(uint32 *) (out + 172) = in11 | ||
3214 | # asm 1: movl <in11=int64#6d,172(<out=int64#1) | ||
3215 | # asm 2: movl <in11=%r9d,172(<out=%rdi) | ||
3216 | movl %r9d,172(%rdi) | ||
3217 | |||
3218 | # qhasm: in8 = z8 | ||
3219 | # asm 1: movd <z8=int6464#16,>in8=int64#3 | ||
3220 | # asm 2: movd <z8=%xmm15,>in8=%rdx | ||
3221 | movd %xmm15,%rdx | ||
3222 | |||
3223 | # qhasm: in9 = z9 | ||
3224 | # asm 1: movd <z9=int6464#12,>in9=int64#4 | ||
3225 | # asm 2: movd <z9=%xmm11,>in9=%rcx | ||
3226 | movd %xmm11,%rcx | ||
3227 | |||
3228 | # qhasm: in10 = z10 | ||
3229 | # asm 1: movd <z10=int6464#2,>in10=int64#5 | ||
3230 | # asm 2: movd <z10=%xmm1,>in10=%r8 | ||
3231 | movd %xmm1,%r8 | ||
3232 | |||
3233 | # qhasm: in11 = z11 | ||
3234 | # asm 1: movd <z11=int6464#7,>in11=int64#6 | ||
3235 | # asm 2: movd <z11=%xmm6,>in11=%r9 | ||
3236 | movd %xmm6,%r9 | ||
3237 | |||
3238 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 224) | ||
3239 | # asm 1: xorl 224(<m=int64#2),<in8=int64#3d | ||
3240 | # asm 2: xorl 224(<m=%rsi),<in8=%edx | ||
3241 | xorl 224(%rsi),%edx | ||
3242 | |||
3243 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 228) | ||
3244 | # asm 1: xorl 228(<m=int64#2),<in9=int64#4d | ||
3245 | # asm 2: xorl 228(<m=%rsi),<in9=%ecx | ||
3246 | xorl 228(%rsi),%ecx | ||
3247 | |||
3248 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 232) | ||
3249 | # asm 1: xorl 232(<m=int64#2),<in10=int64#5d | ||
3250 | # asm 2: xorl 232(<m=%rsi),<in10=%r8d | ||
3251 | xorl 232(%rsi),%r8d | ||
3252 | |||
3253 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 236) | ||
3254 | # asm 1: xorl 236(<m=int64#2),<in11=int64#6d | ||
3255 | # asm 2: xorl 236(<m=%rsi),<in11=%r9d | ||
3256 | xorl 236(%rsi),%r9d | ||
3257 | |||
3258 | # qhasm: *(uint32 *) (out + 224) = in8 | ||
3259 | # asm 1: movl <in8=int64#3d,224(<out=int64#1) | ||
3260 | # asm 2: movl <in8=%edx,224(<out=%rdi) | ||
3261 | movl %edx,224(%rdi) | ||
3262 | |||
3263 | # qhasm: *(uint32 *) (out + 228) = in9 | ||
3264 | # asm 1: movl <in9=int64#4d,228(<out=int64#1) | ||
3265 | # asm 2: movl <in9=%ecx,228(<out=%rdi) | ||
3266 | movl %ecx,228(%rdi) | ||
3267 | |||
3268 | # qhasm: *(uint32 *) (out + 232) = in10 | ||
3269 | # asm 1: movl <in10=int64#5d,232(<out=int64#1) | ||
3270 | # asm 2: movl <in10=%r8d,232(<out=%rdi) | ||
3271 | movl %r8d,232(%rdi) | ||
3272 | |||
3273 | # qhasm: *(uint32 *) (out + 236) = in11 | ||
3274 | # asm 1: movl <in11=int64#6d,236(<out=int64#1) | ||
3275 | # asm 2: movl <in11=%r9d,236(<out=%rdi) | ||
3276 | movl %r9d,236(%rdi) | ||
3277 | |||
3278 | # qhasm: uint32323232 z12 += orig12 | ||
3279 | # asm 1: paddd <orig12=stack128#11,<z12=int6464#14 | ||
3280 | # asm 2: paddd <orig12=160(%rsp),<z12=%xmm13 | ||
3281 | paddd 160(%rsp),%xmm13 | ||
3282 | |||
3283 | # qhasm: uint32323232 z13 += orig13 | ||
3284 | # asm 1: paddd <orig13=stack128#14,<z13=int6464#10 | ||
3285 | # asm 2: paddd <orig13=208(%rsp),<z13=%xmm9 | ||
3286 | paddd 208(%rsp),%xmm9 | ||
3287 | |||
3288 | # qhasm: uint32323232 z14 += orig14 | ||
3289 | # asm 1: paddd <orig14=stack128#17,<z14=int6464#4 | ||
3290 | # asm 2: paddd <orig14=256(%rsp),<z14=%xmm3 | ||
3291 | paddd 256(%rsp),%xmm3 | ||
3292 | |||
3293 | # qhasm: uint32323232 z15 += orig15 | ||
3294 | # asm 1: paddd <orig15=stack128#7,<z15=int6464#3 | ||
3295 | # asm 2: paddd <orig15=96(%rsp),<z15=%xmm2 | ||
3296 | paddd 96(%rsp),%xmm2 | ||
3297 | |||
3298 | # qhasm: in12 = z12 | ||
3299 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3300 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3301 | movd %xmm13,%rdx | ||
3302 | |||
3303 | # qhasm: in13 = z13 | ||
3304 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3305 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3306 | movd %xmm9,%rcx | ||
3307 | |||
3308 | # qhasm: in14 = z14 | ||
3309 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3310 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3311 | movd %xmm3,%r8 | ||
3312 | |||
3313 | # qhasm: in15 = z15 | ||
3314 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3315 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3316 | movd %xmm2,%r9 | ||
3317 | |||
3318 | # qhasm: z12 <<<= 96 | ||
3319 | # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14 | ||
3320 | # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13 | ||
3321 | pshufd $0x39,%xmm13,%xmm13 | ||
3322 | |||
3323 | # qhasm: z13 <<<= 96 | ||
3324 | # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10 | ||
3325 | # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9 | ||
3326 | pshufd $0x39,%xmm9,%xmm9 | ||
3327 | |||
3328 | # qhasm: z14 <<<= 96 | ||
3329 | # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4 | ||
3330 | # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3 | ||
3331 | pshufd $0x39,%xmm3,%xmm3 | ||
3332 | |||
3333 | # qhasm: z15 <<<= 96 | ||
3334 | # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3 | ||
3335 | # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2 | ||
3336 | pshufd $0x39,%xmm2,%xmm2 | ||
3337 | |||
3338 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 48) | ||
3339 | # asm 1: xorl 48(<m=int64#2),<in12=int64#3d | ||
3340 | # asm 2: xorl 48(<m=%rsi),<in12=%edx | ||
3341 | xorl 48(%rsi),%edx | ||
3342 | |||
3343 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 52) | ||
3344 | # asm 1: xorl 52(<m=int64#2),<in13=int64#4d | ||
3345 | # asm 2: xorl 52(<m=%rsi),<in13=%ecx | ||
3346 | xorl 52(%rsi),%ecx | ||
3347 | |||
3348 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 56) | ||
3349 | # asm 1: xorl 56(<m=int64#2),<in14=int64#5d | ||
3350 | # asm 2: xorl 56(<m=%rsi),<in14=%r8d | ||
3351 | xorl 56(%rsi),%r8d | ||
3352 | |||
3353 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 60) | ||
3354 | # asm 1: xorl 60(<m=int64#2),<in15=int64#6d | ||
3355 | # asm 2: xorl 60(<m=%rsi),<in15=%r9d | ||
3356 | xorl 60(%rsi),%r9d | ||
3357 | |||
3358 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
3359 | # asm 1: movl <in12=int64#3d,48(<out=int64#1) | ||
3360 | # asm 2: movl <in12=%edx,48(<out=%rdi) | ||
3361 | movl %edx,48(%rdi) | ||
3362 | |||
3363 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
3364 | # asm 1: movl <in13=int64#4d,52(<out=int64#1) | ||
3365 | # asm 2: movl <in13=%ecx,52(<out=%rdi) | ||
3366 | movl %ecx,52(%rdi) | ||
3367 | |||
3368 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
3369 | # asm 1: movl <in14=int64#5d,56(<out=int64#1) | ||
3370 | # asm 2: movl <in14=%r8d,56(<out=%rdi) | ||
3371 | movl %r8d,56(%rdi) | ||
3372 | |||
3373 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
3374 | # asm 1: movl <in15=int64#6d,60(<out=int64#1) | ||
3375 | # asm 2: movl <in15=%r9d,60(<out=%rdi) | ||
3376 | movl %r9d,60(%rdi) | ||
3377 | |||
3378 | # qhasm: in12 = z12 | ||
3379 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3380 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3381 | movd %xmm13,%rdx | ||
3382 | |||
3383 | # qhasm: in13 = z13 | ||
3384 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3385 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3386 | movd %xmm9,%rcx | ||
3387 | |||
3388 | # qhasm: in14 = z14 | ||
3389 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3390 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3391 | movd %xmm3,%r8 | ||
3392 | |||
3393 | # qhasm: in15 = z15 | ||
3394 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3395 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3396 | movd %xmm2,%r9 | ||
3397 | |||
3398 | # qhasm: z12 <<<= 96 | ||
3399 | # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14 | ||
3400 | # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13 | ||
3401 | pshufd $0x39,%xmm13,%xmm13 | ||
3402 | |||
3403 | # qhasm: z13 <<<= 96 | ||
3404 | # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10 | ||
3405 | # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9 | ||
3406 | pshufd $0x39,%xmm9,%xmm9 | ||
3407 | |||
3408 | # qhasm: z14 <<<= 96 | ||
3409 | # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4 | ||
3410 | # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3 | ||
3411 | pshufd $0x39,%xmm3,%xmm3 | ||
3412 | |||
3413 | # qhasm: z15 <<<= 96 | ||
3414 | # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3 | ||
3415 | # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2 | ||
3416 | pshufd $0x39,%xmm2,%xmm2 | ||
3417 | |||
3418 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 112) | ||
3419 | # asm 1: xorl 112(<m=int64#2),<in12=int64#3d | ||
3420 | # asm 2: xorl 112(<m=%rsi),<in12=%edx | ||
3421 | xorl 112(%rsi),%edx | ||
3422 | |||
3423 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 116) | ||
3424 | # asm 1: xorl 116(<m=int64#2),<in13=int64#4d | ||
3425 | # asm 2: xorl 116(<m=%rsi),<in13=%ecx | ||
3426 | xorl 116(%rsi),%ecx | ||
3427 | |||
3428 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 120) | ||
3429 | # asm 1: xorl 120(<m=int64#2),<in14=int64#5d | ||
3430 | # asm 2: xorl 120(<m=%rsi),<in14=%r8d | ||
3431 | xorl 120(%rsi),%r8d | ||
3432 | |||
3433 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 124) | ||
3434 | # asm 1: xorl 124(<m=int64#2),<in15=int64#6d | ||
3435 | # asm 2: xorl 124(<m=%rsi),<in15=%r9d | ||
3436 | xorl 124(%rsi),%r9d | ||
3437 | |||
3438 | # qhasm: *(uint32 *) (out + 112) = in12 | ||
3439 | # asm 1: movl <in12=int64#3d,112(<out=int64#1) | ||
3440 | # asm 2: movl <in12=%edx,112(<out=%rdi) | ||
3441 | movl %edx,112(%rdi) | ||
3442 | |||
3443 | # qhasm: *(uint32 *) (out + 116) = in13 | ||
3444 | # asm 1: movl <in13=int64#4d,116(<out=int64#1) | ||
3445 | # asm 2: movl <in13=%ecx,116(<out=%rdi) | ||
3446 | movl %ecx,116(%rdi) | ||
3447 | |||
3448 | # qhasm: *(uint32 *) (out + 120) = in14 | ||
3449 | # asm 1: movl <in14=int64#5d,120(<out=int64#1) | ||
3450 | # asm 2: movl <in14=%r8d,120(<out=%rdi) | ||
3451 | movl %r8d,120(%rdi) | ||
3452 | |||
3453 | # qhasm: *(uint32 *) (out + 124) = in15 | ||
3454 | # asm 1: movl <in15=int64#6d,124(<out=int64#1) | ||
3455 | # asm 2: movl <in15=%r9d,124(<out=%rdi) | ||
3456 | movl %r9d,124(%rdi) | ||
3457 | |||
3458 | # qhasm: in12 = z12 | ||
3459 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3460 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3461 | movd %xmm13,%rdx | ||
3462 | |||
3463 | # qhasm: in13 = z13 | ||
3464 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3465 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3466 | movd %xmm9,%rcx | ||
3467 | |||
3468 | # qhasm: in14 = z14 | ||
3469 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3470 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3471 | movd %xmm3,%r8 | ||
3472 | |||
3473 | # qhasm: in15 = z15 | ||
3474 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3475 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3476 | movd %xmm2,%r9 | ||
3477 | |||
3478 | # qhasm: z12 <<<= 96 | ||
3479 | # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14 | ||
3480 | # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13 | ||
3481 | pshufd $0x39,%xmm13,%xmm13 | ||
3482 | |||
3483 | # qhasm: z13 <<<= 96 | ||
3484 | # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10 | ||
3485 | # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9 | ||
3486 | pshufd $0x39,%xmm9,%xmm9 | ||
3487 | |||
3488 | # qhasm: z14 <<<= 96 | ||
3489 | # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4 | ||
3490 | # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3 | ||
3491 | pshufd $0x39,%xmm3,%xmm3 | ||
3492 | |||
3493 | # qhasm: z15 <<<= 96 | ||
3494 | # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3 | ||
3495 | # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2 | ||
3496 | pshufd $0x39,%xmm2,%xmm2 | ||
3497 | |||
3498 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 176) | ||
3499 | # asm 1: xorl 176(<m=int64#2),<in12=int64#3d | ||
3500 | # asm 2: xorl 176(<m=%rsi),<in12=%edx | ||
3501 | xorl 176(%rsi),%edx | ||
3502 | |||
3503 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 180) | ||
3504 | # asm 1: xorl 180(<m=int64#2),<in13=int64#4d | ||
3505 | # asm 2: xorl 180(<m=%rsi),<in13=%ecx | ||
3506 | xorl 180(%rsi),%ecx | ||
3507 | |||
3508 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 184) | ||
3509 | # asm 1: xorl 184(<m=int64#2),<in14=int64#5d | ||
3510 | # asm 2: xorl 184(<m=%rsi),<in14=%r8d | ||
3511 | xorl 184(%rsi),%r8d | ||
3512 | |||
3513 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 188) | ||
3514 | # asm 1: xorl 188(<m=int64#2),<in15=int64#6d | ||
3515 | # asm 2: xorl 188(<m=%rsi),<in15=%r9d | ||
3516 | xorl 188(%rsi),%r9d | ||
3517 | |||
3518 | # qhasm: *(uint32 *) (out + 176) = in12 | ||
3519 | # asm 1: movl <in12=int64#3d,176(<out=int64#1) | ||
3520 | # asm 2: movl <in12=%edx,176(<out=%rdi) | ||
3521 | movl %edx,176(%rdi) | ||
3522 | |||
3523 | # qhasm: *(uint32 *) (out + 180) = in13 | ||
3524 | # asm 1: movl <in13=int64#4d,180(<out=int64#1) | ||
3525 | # asm 2: movl <in13=%ecx,180(<out=%rdi) | ||
3526 | movl %ecx,180(%rdi) | ||
3527 | |||
3528 | # qhasm: *(uint32 *) (out + 184) = in14 | ||
3529 | # asm 1: movl <in14=int64#5d,184(<out=int64#1) | ||
3530 | # asm 2: movl <in14=%r8d,184(<out=%rdi) | ||
3531 | movl %r8d,184(%rdi) | ||
3532 | |||
3533 | # qhasm: *(uint32 *) (out + 188) = in15 | ||
3534 | # asm 1: movl <in15=int64#6d,188(<out=int64#1) | ||
3535 | # asm 2: movl <in15=%r9d,188(<out=%rdi) | ||
3536 | movl %r9d,188(%rdi) | ||
3537 | |||
3538 | # qhasm: in12 = z12 | ||
3539 | # asm 1: movd <z12=int6464#14,>in12=int64#3 | ||
3540 | # asm 2: movd <z12=%xmm13,>in12=%rdx | ||
3541 | movd %xmm13,%rdx | ||
3542 | |||
3543 | # qhasm: in13 = z13 | ||
3544 | # asm 1: movd <z13=int6464#10,>in13=int64#4 | ||
3545 | # asm 2: movd <z13=%xmm9,>in13=%rcx | ||
3546 | movd %xmm9,%rcx | ||
3547 | |||
3548 | # qhasm: in14 = z14 | ||
3549 | # asm 1: movd <z14=int6464#4,>in14=int64#5 | ||
3550 | # asm 2: movd <z14=%xmm3,>in14=%r8 | ||
3551 | movd %xmm3,%r8 | ||
3552 | |||
3553 | # qhasm: in15 = z15 | ||
3554 | # asm 1: movd <z15=int6464#3,>in15=int64#6 | ||
3555 | # asm 2: movd <z15=%xmm2,>in15=%r9 | ||
3556 | movd %xmm2,%r9 | ||
3557 | |||
3558 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 240) | ||
3559 | # asm 1: xorl 240(<m=int64#2),<in12=int64#3d | ||
3560 | # asm 2: xorl 240(<m=%rsi),<in12=%edx | ||
3561 | xorl 240(%rsi),%edx | ||
3562 | |||
3563 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 244) | ||
3564 | # asm 1: xorl 244(<m=int64#2),<in13=int64#4d | ||
3565 | # asm 2: xorl 244(<m=%rsi),<in13=%ecx | ||
3566 | xorl 244(%rsi),%ecx | ||
3567 | |||
3568 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 248) | ||
3569 | # asm 1: xorl 248(<m=int64#2),<in14=int64#5d | ||
3570 | # asm 2: xorl 248(<m=%rsi),<in14=%r8d | ||
3571 | xorl 248(%rsi),%r8d | ||
3572 | |||
3573 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 252) | ||
3574 | # asm 1: xorl 252(<m=int64#2),<in15=int64#6d | ||
3575 | # asm 2: xorl 252(<m=%rsi),<in15=%r9d | ||
3576 | xorl 252(%rsi),%r9d | ||
3577 | |||
3578 | # qhasm: *(uint32 *) (out + 240) = in12 | ||
3579 | # asm 1: movl <in12=int64#3d,240(<out=int64#1) | ||
3580 | # asm 2: movl <in12=%edx,240(<out=%rdi) | ||
3581 | movl %edx,240(%rdi) | ||
3582 | |||
3583 | # qhasm: *(uint32 *) (out + 244) = in13 | ||
3584 | # asm 1: movl <in13=int64#4d,244(<out=int64#1) | ||
3585 | # asm 2: movl <in13=%ecx,244(<out=%rdi) | ||
3586 | movl %ecx,244(%rdi) | ||
3587 | |||
3588 | # qhasm: *(uint32 *) (out + 248) = in14 | ||
3589 | # asm 1: movl <in14=int64#5d,248(<out=int64#1) | ||
3590 | # asm 2: movl <in14=%r8d,248(<out=%rdi) | ||
3591 | movl %r8d,248(%rdi) | ||
3592 | |||
3593 | # qhasm: *(uint32 *) (out + 252) = in15 | ||
3594 | # asm 1: movl <in15=int64#6d,252(<out=int64#1) | ||
3595 | # asm 2: movl <in15=%r9d,252(<out=%rdi) | ||
3596 | movl %r9d,252(%rdi) | ||
3597 | |||
3598 | # qhasm: bytes = bytes_backup | ||
3599 | # asm 1: movq <bytes_backup=stack64#8,>bytes=int64#6 | ||
3600 | # asm 2: movq <bytes_backup=408(%rsp),>bytes=%r9 | ||
3601 | movq 408(%rsp),%r9 | ||
3602 | |||
3603 | # qhasm: bytes -= 256 | ||
3604 | # asm 1: sub $256,<bytes=int64#6 | ||
3605 | # asm 2: sub $256,<bytes=%r9 | ||
3606 | sub $256,%r9 | ||
3607 | |||
3608 | # qhasm: m += 256 | ||
3609 | # asm 1: add $256,<m=int64#2 | ||
3610 | # asm 2: add $256,<m=%rsi | ||
3611 | add $256,%rsi | ||
3612 | |||
3613 | # qhasm: out += 256 | ||
3614 | # asm 1: add $256,<out=int64#1 | ||
3615 | # asm 2: add $256,<out=%rdi | ||
3616 | add $256,%rdi | ||
3617 | |||
3618 | # qhasm: unsigned<? bytes - 256 | ||
3619 | # asm 1: cmp $256,<bytes=int64#6 | ||
3620 | # asm 2: cmp $256,<bytes=%r9 | ||
3621 | cmp $256,%r9 | ||
3622 | # comment:fp stack unchanged by jump | ||
3623 | |||
3624 | # qhasm: goto bytesatleast256 if !unsigned< | ||
3625 | jae ._bytesatleast256 | ||
3626 | |||
3627 | # qhasm: unsigned>? bytes - 0 | ||
3628 | # asm 1: cmp $0,<bytes=int64#6 | ||
3629 | # asm 2: cmp $0,<bytes=%r9 | ||
3630 | cmp $0,%r9 | ||
3631 | # comment:fp stack unchanged by jump | ||
3632 | |||
3633 | # qhasm: goto done if !unsigned> | ||
3634 | jbe ._done | ||
3635 | # comment:fp stack unchanged by fallthrough | ||
3636 | |||
3637 | # qhasm: bytesbetween1and255: | ||
3638 | ._bytesbetween1and255: | ||
3639 | |||
3640 | # qhasm: unsigned<? bytes - 64 | ||
3641 | # asm 1: cmp $64,<bytes=int64#6 | ||
3642 | # asm 2: cmp $64,<bytes=%r9 | ||
3643 | cmp $64,%r9 | ||
3644 | # comment:fp stack unchanged by jump | ||
3645 | |||
3646 | # qhasm: goto nocopy if !unsigned< | ||
3647 | jae ._nocopy | ||
3648 | |||
3649 | # qhasm: ctarget = out | ||
3650 | # asm 1: mov <out=int64#1,>ctarget=int64#3 | ||
3651 | # asm 2: mov <out=%rdi,>ctarget=%rdx | ||
3652 | mov %rdi,%rdx | ||
3653 | |||
3654 | # qhasm: out = &tmp | ||
3655 | # asm 1: leaq <tmp=stack512#1,>out=int64#1 | ||
3656 | # asm 2: leaq <tmp=416(%rsp),>out=%rdi | ||
3657 | leaq 416(%rsp),%rdi | ||
3658 | |||
3659 | # qhasm: i = bytes | ||
3660 | # asm 1: mov <bytes=int64#6,>i=int64#4 | ||
3661 | # asm 2: mov <bytes=%r9,>i=%rcx | ||
3662 | mov %r9,%rcx | ||
3663 | |||
3664 | # qhasm: while (i) { *out++ = *m++; --i } | ||
3665 | rep movsb | ||
3666 | |||
3667 | # qhasm: out = &tmp | ||
3668 | # asm 1: leaq <tmp=stack512#1,>out=int64#1 | ||
3669 | # asm 2: leaq <tmp=416(%rsp),>out=%rdi | ||
3670 | leaq 416(%rsp),%rdi | ||
3671 | |||
3672 | # qhasm: m = &tmp | ||
3673 | # asm 1: leaq <tmp=stack512#1,>m=int64#2 | ||
3674 | # asm 2: leaq <tmp=416(%rsp),>m=%rsi | ||
3675 | leaq 416(%rsp),%rsi | ||
3676 | # comment:fp stack unchanged by fallthrough | ||
3677 | |||
3678 | # qhasm: nocopy: | ||
3679 | ._nocopy: | ||
3680 | |||
3681 | # qhasm: bytes_backup = bytes | ||
3682 | # asm 1: movq <bytes=int64#6,>bytes_backup=stack64#8 | ||
3683 | # asm 2: movq <bytes=%r9,>bytes_backup=408(%rsp) | ||
3684 | movq %r9,408(%rsp) | ||
3685 | |||
3686 | # qhasm: diag0 = x0 | ||
3687 | # asm 1: movdqa <x0=stack128#4,>diag0=int6464#1 | ||
3688 | # asm 2: movdqa <x0=48(%rsp),>diag0=%xmm0 | ||
3689 | movdqa 48(%rsp),%xmm0 | ||
3690 | |||
3691 | # qhasm: diag1 = x1 | ||
3692 | # asm 1: movdqa <x1=stack128#1,>diag1=int6464#2 | ||
3693 | # asm 2: movdqa <x1=0(%rsp),>diag1=%xmm1 | ||
3694 | movdqa 0(%rsp),%xmm1 | ||
3695 | |||
3696 | # qhasm: diag2 = x2 | ||
3697 | # asm 1: movdqa <x2=stack128#2,>diag2=int6464#3 | ||
3698 | # asm 2: movdqa <x2=16(%rsp),>diag2=%xmm2 | ||
3699 | movdqa 16(%rsp),%xmm2 | ||
3700 | |||
3701 | # qhasm: diag3 = x3 | ||
3702 | # asm 1: movdqa <x3=stack128#3,>diag3=int6464#4 | ||
3703 | # asm 2: movdqa <x3=32(%rsp),>diag3=%xmm3 | ||
3704 | movdqa 32(%rsp),%xmm3 | ||
3705 | |||
3706 | # qhasm: a0 = diag1 | ||
3707 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
3708 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
3709 | movdqa %xmm1,%xmm4 | ||
3710 | |||
3711 | # qhasm: i = 8 | ||
3712 | # asm 1: mov $8,>i=int64#4 | ||
3713 | # asm 2: mov $8,>i=%rcx | ||
3714 | mov $8,%rcx | ||
3715 | |||
3716 | # qhasm: mainloop2: | ||
3717 | ._mainloop2: | ||
3718 | |||
3719 | # qhasm: uint32323232 a0 += diag0 | ||
3720 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
3721 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
3722 | paddd %xmm0,%xmm4 | ||
3723 | |||
3724 | # qhasm: a1 = diag0 | ||
3725 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
3726 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
3727 | movdqa %xmm0,%xmm5 | ||
3728 | |||
3729 | # qhasm: b0 = a0 | ||
3730 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
3731 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
3732 | movdqa %xmm4,%xmm6 | ||
3733 | |||
3734 | # qhasm: uint32323232 a0 <<= 7 | ||
3735 | # asm 1: pslld $7,<a0=int6464#5 | ||
3736 | # asm 2: pslld $7,<a0=%xmm4 | ||
3737 | pslld $7,%xmm4 | ||
3738 | |||
3739 | # qhasm: uint32323232 b0 >>= 25 | ||
3740 | # asm 1: psrld $25,<b0=int6464#7 | ||
3741 | # asm 2: psrld $25,<b0=%xmm6 | ||
3742 | psrld $25,%xmm6 | ||
3743 | |||
3744 | # qhasm: diag3 ^= a0 | ||
3745 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
3746 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
3747 | pxor %xmm4,%xmm3 | ||
3748 | |||
3749 | # qhasm: diag3 ^= b0 | ||
3750 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
3751 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
3752 | pxor %xmm6,%xmm3 | ||
3753 | |||
3754 | # qhasm: uint32323232 a1 += diag3 | ||
3755 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
3756 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
3757 | paddd %xmm3,%xmm5 | ||
3758 | |||
3759 | # qhasm: a2 = diag3 | ||
3760 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
3761 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
3762 | movdqa %xmm3,%xmm4 | ||
3763 | |||
3764 | # qhasm: b1 = a1 | ||
3765 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
3766 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
3767 | movdqa %xmm5,%xmm6 | ||
3768 | |||
3769 | # qhasm: uint32323232 a1 <<= 9 | ||
3770 | # asm 1: pslld $9,<a1=int6464#6 | ||
3771 | # asm 2: pslld $9,<a1=%xmm5 | ||
3772 | pslld $9,%xmm5 | ||
3773 | |||
3774 | # qhasm: uint32323232 b1 >>= 23 | ||
3775 | # asm 1: psrld $23,<b1=int6464#7 | ||
3776 | # asm 2: psrld $23,<b1=%xmm6 | ||
3777 | psrld $23,%xmm6 | ||
3778 | |||
3779 | # qhasm: diag2 ^= a1 | ||
3780 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
3781 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
3782 | pxor %xmm5,%xmm2 | ||
3783 | |||
3784 | # qhasm: diag3 <<<= 32 | ||
3785 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
3786 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
3787 | pshufd $0x93,%xmm3,%xmm3 | ||
3788 | |||
3789 | # qhasm: diag2 ^= b1 | ||
3790 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
3791 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
3792 | pxor %xmm6,%xmm2 | ||
3793 | |||
3794 | # qhasm: uint32323232 a2 += diag2 | ||
3795 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
3796 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
3797 | paddd %xmm2,%xmm4 | ||
3798 | |||
3799 | # qhasm: a3 = diag2 | ||
3800 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
3801 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
3802 | movdqa %xmm2,%xmm5 | ||
3803 | |||
3804 | # qhasm: b2 = a2 | ||
3805 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
3806 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
3807 | movdqa %xmm4,%xmm6 | ||
3808 | |||
3809 | # qhasm: uint32323232 a2 <<= 13 | ||
3810 | # asm 1: pslld $13,<a2=int6464#5 | ||
3811 | # asm 2: pslld $13,<a2=%xmm4 | ||
3812 | pslld $13,%xmm4 | ||
3813 | |||
3814 | # qhasm: uint32323232 b2 >>= 19 | ||
3815 | # asm 1: psrld $19,<b2=int6464#7 | ||
3816 | # asm 2: psrld $19,<b2=%xmm6 | ||
3817 | psrld $19,%xmm6 | ||
3818 | |||
3819 | # qhasm: diag1 ^= a2 | ||
3820 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
3821 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
3822 | pxor %xmm4,%xmm1 | ||
3823 | |||
3824 | # qhasm: diag2 <<<= 64 | ||
3825 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
3826 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
3827 | pshufd $0x4e,%xmm2,%xmm2 | ||
3828 | |||
3829 | # qhasm: diag1 ^= b2 | ||
3830 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
3831 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
3832 | pxor %xmm6,%xmm1 | ||
3833 | |||
3834 | # qhasm: uint32323232 a3 += diag1 | ||
3835 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
3836 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
3837 | paddd %xmm1,%xmm5 | ||
3838 | |||
3839 | # qhasm: a4 = diag3 | ||
3840 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
3841 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
3842 | movdqa %xmm3,%xmm4 | ||
3843 | |||
3844 | # qhasm: b3 = a3 | ||
3845 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
3846 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
3847 | movdqa %xmm5,%xmm6 | ||
3848 | |||
3849 | # qhasm: uint32323232 a3 <<= 18 | ||
3850 | # asm 1: pslld $18,<a3=int6464#6 | ||
3851 | # asm 2: pslld $18,<a3=%xmm5 | ||
3852 | pslld $18,%xmm5 | ||
3853 | |||
3854 | # qhasm: uint32323232 b3 >>= 14 | ||
3855 | # asm 1: psrld $14,<b3=int6464#7 | ||
3856 | # asm 2: psrld $14,<b3=%xmm6 | ||
3857 | psrld $14,%xmm6 | ||
3858 | |||
3859 | # qhasm: diag0 ^= a3 | ||
3860 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
3861 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
3862 | pxor %xmm5,%xmm0 | ||
3863 | |||
3864 | # qhasm: diag1 <<<= 96 | ||
3865 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
3866 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
3867 | pshufd $0x39,%xmm1,%xmm1 | ||
3868 | |||
3869 | # qhasm: diag0 ^= b3 | ||
3870 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
3871 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
3872 | pxor %xmm6,%xmm0 | ||
3873 | |||
3874 | # qhasm: uint32323232 a4 += diag0 | ||
3875 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
3876 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
3877 | paddd %xmm0,%xmm4 | ||
3878 | |||
3879 | # qhasm: a5 = diag0 | ||
3880 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
3881 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
3882 | movdqa %xmm0,%xmm5 | ||
3883 | |||
3884 | # qhasm: b4 = a4 | ||
3885 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
3886 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
3887 | movdqa %xmm4,%xmm6 | ||
3888 | |||
3889 | # qhasm: uint32323232 a4 <<= 7 | ||
3890 | # asm 1: pslld $7,<a4=int6464#5 | ||
3891 | # asm 2: pslld $7,<a4=%xmm4 | ||
3892 | pslld $7,%xmm4 | ||
3893 | |||
3894 | # qhasm: uint32323232 b4 >>= 25 | ||
3895 | # asm 1: psrld $25,<b4=int6464#7 | ||
3896 | # asm 2: psrld $25,<b4=%xmm6 | ||
3897 | psrld $25,%xmm6 | ||
3898 | |||
3899 | # qhasm: diag1 ^= a4 | ||
3900 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
3901 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
3902 | pxor %xmm4,%xmm1 | ||
3903 | |||
3904 | # qhasm: diag1 ^= b4 | ||
3905 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
3906 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
3907 | pxor %xmm6,%xmm1 | ||
3908 | |||
3909 | # qhasm: uint32323232 a5 += diag1 | ||
3910 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
3911 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
3912 | paddd %xmm1,%xmm5 | ||
3913 | |||
3914 | # qhasm: a6 = diag1 | ||
3915 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
3916 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
3917 | movdqa %xmm1,%xmm4 | ||
3918 | |||
3919 | # qhasm: b5 = a5 | ||
3920 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
3921 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
3922 | movdqa %xmm5,%xmm6 | ||
3923 | |||
3924 | # qhasm: uint32323232 a5 <<= 9 | ||
3925 | # asm 1: pslld $9,<a5=int6464#6 | ||
3926 | # asm 2: pslld $9,<a5=%xmm5 | ||
3927 | pslld $9,%xmm5 | ||
3928 | |||
3929 | # qhasm: uint32323232 b5 >>= 23 | ||
3930 | # asm 1: psrld $23,<b5=int6464#7 | ||
3931 | # asm 2: psrld $23,<b5=%xmm6 | ||
3932 | psrld $23,%xmm6 | ||
3933 | |||
3934 | # qhasm: diag2 ^= a5 | ||
3935 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
3936 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
3937 | pxor %xmm5,%xmm2 | ||
3938 | |||
3939 | # qhasm: diag1 <<<= 32 | ||
3940 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
3941 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
3942 | pshufd $0x93,%xmm1,%xmm1 | ||
3943 | |||
3944 | # qhasm: diag2 ^= b5 | ||
3945 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
3946 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
3947 | pxor %xmm6,%xmm2 | ||
3948 | |||
3949 | # qhasm: uint32323232 a6 += diag2 | ||
3950 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
3951 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
3952 | paddd %xmm2,%xmm4 | ||
3953 | |||
3954 | # qhasm: a7 = diag2 | ||
3955 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
3956 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
3957 | movdqa %xmm2,%xmm5 | ||
3958 | |||
3959 | # qhasm: b6 = a6 | ||
3960 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
3961 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
3962 | movdqa %xmm4,%xmm6 | ||
3963 | |||
3964 | # qhasm: uint32323232 a6 <<= 13 | ||
3965 | # asm 1: pslld $13,<a6=int6464#5 | ||
3966 | # asm 2: pslld $13,<a6=%xmm4 | ||
3967 | pslld $13,%xmm4 | ||
3968 | |||
3969 | # qhasm: uint32323232 b6 >>= 19 | ||
3970 | # asm 1: psrld $19,<b6=int6464#7 | ||
3971 | # asm 2: psrld $19,<b6=%xmm6 | ||
3972 | psrld $19,%xmm6 | ||
3973 | |||
3974 | # qhasm: diag3 ^= a6 | ||
3975 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
3976 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
3977 | pxor %xmm4,%xmm3 | ||
3978 | |||
3979 | # qhasm: diag2 <<<= 64 | ||
3980 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
3981 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
3982 | pshufd $0x4e,%xmm2,%xmm2 | ||
3983 | |||
3984 | # qhasm: diag3 ^= b6 | ||
3985 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
3986 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
3987 | pxor %xmm6,%xmm3 | ||
3988 | |||
3989 | # qhasm: uint32323232 a7 += diag3 | ||
3990 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
3991 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
3992 | paddd %xmm3,%xmm5 | ||
3993 | |||
3994 | # qhasm: a0 = diag1 | ||
3995 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
3996 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
3997 | movdqa %xmm1,%xmm4 | ||
3998 | |||
3999 | # qhasm: b7 = a7 | ||
4000 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4001 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4002 | movdqa %xmm5,%xmm6 | ||
4003 | |||
4004 | # qhasm: uint32323232 a7 <<= 18 | ||
4005 | # asm 1: pslld $18,<a7=int6464#6 | ||
4006 | # asm 2: pslld $18,<a7=%xmm5 | ||
4007 | pslld $18,%xmm5 | ||
4008 | |||
4009 | # qhasm: uint32323232 b7 >>= 14 | ||
4010 | # asm 1: psrld $14,<b7=int6464#7 | ||
4011 | # asm 2: psrld $14,<b7=%xmm6 | ||
4012 | psrld $14,%xmm6 | ||
4013 | |||
4014 | # qhasm: diag0 ^= a7 | ||
4015 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4016 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4017 | pxor %xmm5,%xmm0 | ||
4018 | |||
4019 | # qhasm: diag3 <<<= 96 | ||
4020 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4021 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4022 | pshufd $0x39,%xmm3,%xmm3 | ||
4023 | |||
4024 | # qhasm: diag0 ^= b7 | ||
4025 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4026 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4027 | pxor %xmm6,%xmm0 | ||
4028 | |||
4029 | # qhasm: uint32323232 a0 += diag0 | ||
4030 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
4031 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
4032 | paddd %xmm0,%xmm4 | ||
4033 | |||
4034 | # qhasm: a1 = diag0 | ||
4035 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
4036 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
4037 | movdqa %xmm0,%xmm5 | ||
4038 | |||
4039 | # qhasm: b0 = a0 | ||
4040 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
4041 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
4042 | movdqa %xmm4,%xmm6 | ||
4043 | |||
4044 | # qhasm: uint32323232 a0 <<= 7 | ||
4045 | # asm 1: pslld $7,<a0=int6464#5 | ||
4046 | # asm 2: pslld $7,<a0=%xmm4 | ||
4047 | pslld $7,%xmm4 | ||
4048 | |||
4049 | # qhasm: uint32323232 b0 >>= 25 | ||
4050 | # asm 1: psrld $25,<b0=int6464#7 | ||
4051 | # asm 2: psrld $25,<b0=%xmm6 | ||
4052 | psrld $25,%xmm6 | ||
4053 | |||
4054 | # qhasm: diag3 ^= a0 | ||
4055 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
4056 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
4057 | pxor %xmm4,%xmm3 | ||
4058 | |||
4059 | # qhasm: diag3 ^= b0 | ||
4060 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
4061 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
4062 | pxor %xmm6,%xmm3 | ||
4063 | |||
4064 | # qhasm: uint32323232 a1 += diag3 | ||
4065 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
4066 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
4067 | paddd %xmm3,%xmm5 | ||
4068 | |||
4069 | # qhasm: a2 = diag3 | ||
4070 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
4071 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
4072 | movdqa %xmm3,%xmm4 | ||
4073 | |||
4074 | # qhasm: b1 = a1 | ||
4075 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
4076 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
4077 | movdqa %xmm5,%xmm6 | ||
4078 | |||
4079 | # qhasm: uint32323232 a1 <<= 9 | ||
4080 | # asm 1: pslld $9,<a1=int6464#6 | ||
4081 | # asm 2: pslld $9,<a1=%xmm5 | ||
4082 | pslld $9,%xmm5 | ||
4083 | |||
4084 | # qhasm: uint32323232 b1 >>= 23 | ||
4085 | # asm 1: psrld $23,<b1=int6464#7 | ||
4086 | # asm 2: psrld $23,<b1=%xmm6 | ||
4087 | psrld $23,%xmm6 | ||
4088 | |||
4089 | # qhasm: diag2 ^= a1 | ||
4090 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
4091 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
4092 | pxor %xmm5,%xmm2 | ||
4093 | |||
4094 | # qhasm: diag3 <<<= 32 | ||
4095 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
4096 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
4097 | pshufd $0x93,%xmm3,%xmm3 | ||
4098 | |||
4099 | # qhasm: diag2 ^= b1 | ||
4100 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
4101 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
4102 | pxor %xmm6,%xmm2 | ||
4103 | |||
4104 | # qhasm: uint32323232 a2 += diag2 | ||
4105 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
4106 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
4107 | paddd %xmm2,%xmm4 | ||
4108 | |||
4109 | # qhasm: a3 = diag2 | ||
4110 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
4111 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
4112 | movdqa %xmm2,%xmm5 | ||
4113 | |||
4114 | # qhasm: b2 = a2 | ||
4115 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
4116 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
4117 | movdqa %xmm4,%xmm6 | ||
4118 | |||
4119 | # qhasm: uint32323232 a2 <<= 13 | ||
4120 | # asm 1: pslld $13,<a2=int6464#5 | ||
4121 | # asm 2: pslld $13,<a2=%xmm4 | ||
4122 | pslld $13,%xmm4 | ||
4123 | |||
4124 | # qhasm: uint32323232 b2 >>= 19 | ||
4125 | # asm 1: psrld $19,<b2=int6464#7 | ||
4126 | # asm 2: psrld $19,<b2=%xmm6 | ||
4127 | psrld $19,%xmm6 | ||
4128 | |||
4129 | # qhasm: diag1 ^= a2 | ||
4130 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
4131 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
4132 | pxor %xmm4,%xmm1 | ||
4133 | |||
4134 | # qhasm: diag2 <<<= 64 | ||
4135 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4136 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4137 | pshufd $0x4e,%xmm2,%xmm2 | ||
4138 | |||
4139 | # qhasm: diag1 ^= b2 | ||
4140 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
4141 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
4142 | pxor %xmm6,%xmm1 | ||
4143 | |||
4144 | # qhasm: uint32323232 a3 += diag1 | ||
4145 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
4146 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
4147 | paddd %xmm1,%xmm5 | ||
4148 | |||
4149 | # qhasm: a4 = diag3 | ||
4150 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
4151 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
4152 | movdqa %xmm3,%xmm4 | ||
4153 | |||
4154 | # qhasm: b3 = a3 | ||
4155 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
4156 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
4157 | movdqa %xmm5,%xmm6 | ||
4158 | |||
4159 | # qhasm: uint32323232 a3 <<= 18 | ||
4160 | # asm 1: pslld $18,<a3=int6464#6 | ||
4161 | # asm 2: pslld $18,<a3=%xmm5 | ||
4162 | pslld $18,%xmm5 | ||
4163 | |||
4164 | # qhasm: uint32323232 b3 >>= 14 | ||
4165 | # asm 1: psrld $14,<b3=int6464#7 | ||
4166 | # asm 2: psrld $14,<b3=%xmm6 | ||
4167 | psrld $14,%xmm6 | ||
4168 | |||
4169 | # qhasm: diag0 ^= a3 | ||
4170 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
4171 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
4172 | pxor %xmm5,%xmm0 | ||
4173 | |||
4174 | # qhasm: diag1 <<<= 96 | ||
4175 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4176 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4177 | pshufd $0x39,%xmm1,%xmm1 | ||
4178 | |||
4179 | # qhasm: diag0 ^= b3 | ||
4180 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
4181 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
4182 | pxor %xmm6,%xmm0 | ||
4183 | |||
4184 | # qhasm: uint32323232 a4 += diag0 | ||
4185 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
4186 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
4187 | paddd %xmm0,%xmm4 | ||
4188 | |||
4189 | # qhasm: a5 = diag0 | ||
4190 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
4191 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
4192 | movdqa %xmm0,%xmm5 | ||
4193 | |||
4194 | # qhasm: b4 = a4 | ||
4195 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
4196 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
4197 | movdqa %xmm4,%xmm6 | ||
4198 | |||
4199 | # qhasm: uint32323232 a4 <<= 7 | ||
4200 | # asm 1: pslld $7,<a4=int6464#5 | ||
4201 | # asm 2: pslld $7,<a4=%xmm4 | ||
4202 | pslld $7,%xmm4 | ||
4203 | |||
4204 | # qhasm: uint32323232 b4 >>= 25 | ||
4205 | # asm 1: psrld $25,<b4=int6464#7 | ||
4206 | # asm 2: psrld $25,<b4=%xmm6 | ||
4207 | psrld $25,%xmm6 | ||
4208 | |||
4209 | # qhasm: diag1 ^= a4 | ||
4210 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
4211 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
4212 | pxor %xmm4,%xmm1 | ||
4213 | |||
4214 | # qhasm: diag1 ^= b4 | ||
4215 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
4216 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
4217 | pxor %xmm6,%xmm1 | ||
4218 | |||
4219 | # qhasm: uint32323232 a5 += diag1 | ||
4220 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
4221 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
4222 | paddd %xmm1,%xmm5 | ||
4223 | |||
4224 | # qhasm: a6 = diag1 | ||
4225 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
4226 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
4227 | movdqa %xmm1,%xmm4 | ||
4228 | |||
4229 | # qhasm: b5 = a5 | ||
4230 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
4231 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
4232 | movdqa %xmm5,%xmm6 | ||
4233 | |||
4234 | # qhasm: uint32323232 a5 <<= 9 | ||
4235 | # asm 1: pslld $9,<a5=int6464#6 | ||
4236 | # asm 2: pslld $9,<a5=%xmm5 | ||
4237 | pslld $9,%xmm5 | ||
4238 | |||
4239 | # qhasm: uint32323232 b5 >>= 23 | ||
4240 | # asm 1: psrld $23,<b5=int6464#7 | ||
4241 | # asm 2: psrld $23,<b5=%xmm6 | ||
4242 | psrld $23,%xmm6 | ||
4243 | |||
4244 | # qhasm: diag2 ^= a5 | ||
4245 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
4246 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
4247 | pxor %xmm5,%xmm2 | ||
4248 | |||
4249 | # qhasm: diag1 <<<= 32 | ||
4250 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
4251 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
4252 | pshufd $0x93,%xmm1,%xmm1 | ||
4253 | |||
4254 | # qhasm: diag2 ^= b5 | ||
4255 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
4256 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
4257 | pxor %xmm6,%xmm2 | ||
4258 | |||
4259 | # qhasm: uint32323232 a6 += diag2 | ||
4260 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
4261 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
4262 | paddd %xmm2,%xmm4 | ||
4263 | |||
4264 | # qhasm: a7 = diag2 | ||
4265 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
4266 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
4267 | movdqa %xmm2,%xmm5 | ||
4268 | |||
4269 | # qhasm: b6 = a6 | ||
4270 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
4271 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
4272 | movdqa %xmm4,%xmm6 | ||
4273 | |||
4274 | # qhasm: uint32323232 a6 <<= 13 | ||
4275 | # asm 1: pslld $13,<a6=int6464#5 | ||
4276 | # asm 2: pslld $13,<a6=%xmm4 | ||
4277 | pslld $13,%xmm4 | ||
4278 | |||
4279 | # qhasm: uint32323232 b6 >>= 19 | ||
4280 | # asm 1: psrld $19,<b6=int6464#7 | ||
4281 | # asm 2: psrld $19,<b6=%xmm6 | ||
4282 | psrld $19,%xmm6 | ||
4283 | |||
4284 | # qhasm: diag3 ^= a6 | ||
4285 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
4286 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
4287 | pxor %xmm4,%xmm3 | ||
4288 | |||
4289 | # qhasm: diag2 <<<= 64 | ||
4290 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4291 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4292 | pshufd $0x4e,%xmm2,%xmm2 | ||
4293 | |||
4294 | # qhasm: diag3 ^= b6 | ||
4295 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
4296 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
4297 | pxor %xmm6,%xmm3 | ||
4298 | |||
4299 | # qhasm: unsigned>? i -= 4 | ||
4300 | # asm 1: sub $4,<i=int64#4 | ||
4301 | # asm 2: sub $4,<i=%rcx | ||
4302 | sub $4,%rcx | ||
4303 | |||
4304 | # qhasm: uint32323232 a7 += diag3 | ||
4305 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
4306 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
4307 | paddd %xmm3,%xmm5 | ||
4308 | |||
4309 | # qhasm: a0 = diag1 | ||
4310 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
4311 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
4312 | movdqa %xmm1,%xmm4 | ||
4313 | |||
4314 | # qhasm: b7 = a7 | ||
4315 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4316 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4317 | movdqa %xmm5,%xmm6 | ||
4318 | |||
4319 | # qhasm: uint32323232 a7 <<= 18 | ||
4320 | # asm 1: pslld $18,<a7=int6464#6 | ||
4321 | # asm 2: pslld $18,<a7=%xmm5 | ||
4322 | pslld $18,%xmm5 | ||
4323 | |||
4324 | # qhasm: b0 = 0 | ||
4325 | # asm 1: pxor >b0=int6464#8,>b0=int6464#8 | ||
4326 | # asm 2: pxor >b0=%xmm7,>b0=%xmm7 | ||
4327 | pxor %xmm7,%xmm7 | ||
4328 | |||
4329 | # qhasm: uint32323232 b7 >>= 14 | ||
4330 | # asm 1: psrld $14,<b7=int6464#7 | ||
4331 | # asm 2: psrld $14,<b7=%xmm6 | ||
4332 | psrld $14,%xmm6 | ||
4333 | |||
4334 | # qhasm: diag0 ^= a7 | ||
4335 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4336 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4337 | pxor %xmm5,%xmm0 | ||
4338 | |||
4339 | # qhasm: diag3 <<<= 96 | ||
4340 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4341 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4342 | pshufd $0x39,%xmm3,%xmm3 | ||
4343 | |||
4344 | # qhasm: diag0 ^= b7 | ||
4345 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4346 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4347 | pxor %xmm6,%xmm0 | ||
4348 | # comment:fp stack unchanged by jump | ||
4349 | |||
4350 | # qhasm: goto mainloop2 if unsigned> | ||
4351 | ja ._mainloop2 | ||
4352 | |||
4353 | # qhasm: uint32323232 diag0 += x0 | ||
4354 | # asm 1: paddd <x0=stack128#4,<diag0=int6464#1 | ||
4355 | # asm 2: paddd <x0=48(%rsp),<diag0=%xmm0 | ||
4356 | paddd 48(%rsp),%xmm0 | ||
4357 | |||
4358 | # qhasm: uint32323232 diag1 += x1 | ||
4359 | # asm 1: paddd <x1=stack128#1,<diag1=int6464#2 | ||
4360 | # asm 2: paddd <x1=0(%rsp),<diag1=%xmm1 | ||
4361 | paddd 0(%rsp),%xmm1 | ||
4362 | |||
4363 | # qhasm: uint32323232 diag2 += x2 | ||
4364 | # asm 1: paddd <x2=stack128#2,<diag2=int6464#3 | ||
4365 | # asm 2: paddd <x2=16(%rsp),<diag2=%xmm2 | ||
4366 | paddd 16(%rsp),%xmm2 | ||
4367 | |||
4368 | # qhasm: uint32323232 diag3 += x3 | ||
4369 | # asm 1: paddd <x3=stack128#3,<diag3=int6464#4 | ||
4370 | # asm 2: paddd <x3=32(%rsp),<diag3=%xmm3 | ||
4371 | paddd 32(%rsp),%xmm3 | ||
4372 | |||
4373 | # qhasm: in0 = diag0 | ||
4374 | # asm 1: movd <diag0=int6464#1,>in0=int64#4 | ||
4375 | # asm 2: movd <diag0=%xmm0,>in0=%rcx | ||
4376 | movd %xmm0,%rcx | ||
4377 | |||
4378 | # qhasm: in12 = diag1 | ||
4379 | # asm 1: movd <diag1=int6464#2,>in12=int64#5 | ||
4380 | # asm 2: movd <diag1=%xmm1,>in12=%r8 | ||
4381 | movd %xmm1,%r8 | ||
4382 | |||
4383 | # qhasm: in8 = diag2 | ||
4384 | # asm 1: movd <diag2=int6464#3,>in8=int64#6 | ||
4385 | # asm 2: movd <diag2=%xmm2,>in8=%r9 | ||
4386 | movd %xmm2,%r9 | ||
4387 | |||
4388 | # qhasm: in4 = diag3 | ||
4389 | # asm 1: movd <diag3=int6464#4,>in4=int64#7 | ||
4390 | # asm 2: movd <diag3=%xmm3,>in4=%rax | ||
4391 | movd %xmm3,%rax | ||
4392 | |||
4393 | # qhasm: diag0 <<<= 96 | ||
4394 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4395 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4396 | pshufd $0x39,%xmm0,%xmm0 | ||
4397 | |||
4398 | # qhasm: diag1 <<<= 96 | ||
4399 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4400 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4401 | pshufd $0x39,%xmm1,%xmm1 | ||
4402 | |||
4403 | # qhasm: diag2 <<<= 96 | ||
4404 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4405 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4406 | pshufd $0x39,%xmm2,%xmm2 | ||
4407 | |||
4408 | # qhasm: diag3 <<<= 96 | ||
4409 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4410 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4411 | pshufd $0x39,%xmm3,%xmm3 | ||
4412 | |||
4413 | # qhasm: (uint32) in0 ^= *(uint32 *) (m + 0) | ||
4414 | # asm 1: xorl 0(<m=int64#2),<in0=int64#4d | ||
4415 | # asm 2: xorl 0(<m=%rsi),<in0=%ecx | ||
4416 | xorl 0(%rsi),%ecx | ||
4417 | |||
4418 | # qhasm: (uint32) in12 ^= *(uint32 *) (m + 48) | ||
4419 | # asm 1: xorl 48(<m=int64#2),<in12=int64#5d | ||
4420 | # asm 2: xorl 48(<m=%rsi),<in12=%r8d | ||
4421 | xorl 48(%rsi),%r8d | ||
4422 | |||
4423 | # qhasm: (uint32) in8 ^= *(uint32 *) (m + 32) | ||
4424 | # asm 1: xorl 32(<m=int64#2),<in8=int64#6d | ||
4425 | # asm 2: xorl 32(<m=%rsi),<in8=%r9d | ||
4426 | xorl 32(%rsi),%r9d | ||
4427 | |||
4428 | # qhasm: (uint32) in4 ^= *(uint32 *) (m + 16) | ||
4429 | # asm 1: xorl 16(<m=int64#2),<in4=int64#7d | ||
4430 | # asm 2: xorl 16(<m=%rsi),<in4=%eax | ||
4431 | xorl 16(%rsi),%eax | ||
4432 | |||
4433 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
4434 | # asm 1: movl <in0=int64#4d,0(<out=int64#1) | ||
4435 | # asm 2: movl <in0=%ecx,0(<out=%rdi) | ||
4436 | movl %ecx,0(%rdi) | ||
4437 | |||
4438 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
4439 | # asm 1: movl <in12=int64#5d,48(<out=int64#1) | ||
4440 | # asm 2: movl <in12=%r8d,48(<out=%rdi) | ||
4441 | movl %r8d,48(%rdi) | ||
4442 | |||
4443 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
4444 | # asm 1: movl <in8=int64#6d,32(<out=int64#1) | ||
4445 | # asm 2: movl <in8=%r9d,32(<out=%rdi) | ||
4446 | movl %r9d,32(%rdi) | ||
4447 | |||
4448 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
4449 | # asm 1: movl <in4=int64#7d,16(<out=int64#1) | ||
4450 | # asm 2: movl <in4=%eax,16(<out=%rdi) | ||
4451 | movl %eax,16(%rdi) | ||
4452 | |||
4453 | # qhasm: in5 = diag0 | ||
4454 | # asm 1: movd <diag0=int6464#1,>in5=int64#4 | ||
4455 | # asm 2: movd <diag0=%xmm0,>in5=%rcx | ||
4456 | movd %xmm0,%rcx | ||
4457 | |||
4458 | # qhasm: in1 = diag1 | ||
4459 | # asm 1: movd <diag1=int6464#2,>in1=int64#5 | ||
4460 | # asm 2: movd <diag1=%xmm1,>in1=%r8 | ||
4461 | movd %xmm1,%r8 | ||
4462 | |||
4463 | # qhasm: in13 = diag2 | ||
4464 | # asm 1: movd <diag2=int6464#3,>in13=int64#6 | ||
4465 | # asm 2: movd <diag2=%xmm2,>in13=%r9 | ||
4466 | movd %xmm2,%r9 | ||
4467 | |||
4468 | # qhasm: in9 = diag3 | ||
4469 | # asm 1: movd <diag3=int6464#4,>in9=int64#7 | ||
4470 | # asm 2: movd <diag3=%xmm3,>in9=%rax | ||
4471 | movd %xmm3,%rax | ||
4472 | |||
4473 | # qhasm: diag0 <<<= 96 | ||
4474 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4475 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4476 | pshufd $0x39,%xmm0,%xmm0 | ||
4477 | |||
4478 | # qhasm: diag1 <<<= 96 | ||
4479 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4480 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4481 | pshufd $0x39,%xmm1,%xmm1 | ||
4482 | |||
4483 | # qhasm: diag2 <<<= 96 | ||
4484 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4485 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4486 | pshufd $0x39,%xmm2,%xmm2 | ||
4487 | |||
4488 | # qhasm: diag3 <<<= 96 | ||
4489 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4490 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4491 | pshufd $0x39,%xmm3,%xmm3 | ||
4492 | |||
4493 | # qhasm: (uint32) in5 ^= *(uint32 *) (m + 20) | ||
4494 | # asm 1: xorl 20(<m=int64#2),<in5=int64#4d | ||
4495 | # asm 2: xorl 20(<m=%rsi),<in5=%ecx | ||
4496 | xorl 20(%rsi),%ecx | ||
4497 | |||
4498 | # qhasm: (uint32) in1 ^= *(uint32 *) (m + 4) | ||
4499 | # asm 1: xorl 4(<m=int64#2),<in1=int64#5d | ||
4500 | # asm 2: xorl 4(<m=%rsi),<in1=%r8d | ||
4501 | xorl 4(%rsi),%r8d | ||
4502 | |||
4503 | # qhasm: (uint32) in13 ^= *(uint32 *) (m + 52) | ||
4504 | # asm 1: xorl 52(<m=int64#2),<in13=int64#6d | ||
4505 | # asm 2: xorl 52(<m=%rsi),<in13=%r9d | ||
4506 | xorl 52(%rsi),%r9d | ||
4507 | |||
4508 | # qhasm: (uint32) in9 ^= *(uint32 *) (m + 36) | ||
4509 | # asm 1: xorl 36(<m=int64#2),<in9=int64#7d | ||
4510 | # asm 2: xorl 36(<m=%rsi),<in9=%eax | ||
4511 | xorl 36(%rsi),%eax | ||
4512 | |||
4513 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
4514 | # asm 1: movl <in5=int64#4d,20(<out=int64#1) | ||
4515 | # asm 2: movl <in5=%ecx,20(<out=%rdi) | ||
4516 | movl %ecx,20(%rdi) | ||
4517 | |||
4518 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
4519 | # asm 1: movl <in1=int64#5d,4(<out=int64#1) | ||
4520 | # asm 2: movl <in1=%r8d,4(<out=%rdi) | ||
4521 | movl %r8d,4(%rdi) | ||
4522 | |||
4523 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
4524 | # asm 1: movl <in13=int64#6d,52(<out=int64#1) | ||
4525 | # asm 2: movl <in13=%r9d,52(<out=%rdi) | ||
4526 | movl %r9d,52(%rdi) | ||
4527 | |||
4528 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
4529 | # asm 1: movl <in9=int64#7d,36(<out=int64#1) | ||
4530 | # asm 2: movl <in9=%eax,36(<out=%rdi) | ||
4531 | movl %eax,36(%rdi) | ||
4532 | |||
4533 | # qhasm: in10 = diag0 | ||
4534 | # asm 1: movd <diag0=int6464#1,>in10=int64#4 | ||
4535 | # asm 2: movd <diag0=%xmm0,>in10=%rcx | ||
4536 | movd %xmm0,%rcx | ||
4537 | |||
4538 | # qhasm: in6 = diag1 | ||
4539 | # asm 1: movd <diag1=int6464#2,>in6=int64#5 | ||
4540 | # asm 2: movd <diag1=%xmm1,>in6=%r8 | ||
4541 | movd %xmm1,%r8 | ||
4542 | |||
4543 | # qhasm: in2 = diag2 | ||
4544 | # asm 1: movd <diag2=int6464#3,>in2=int64#6 | ||
4545 | # asm 2: movd <diag2=%xmm2,>in2=%r9 | ||
4546 | movd %xmm2,%r9 | ||
4547 | |||
4548 | # qhasm: in14 = diag3 | ||
4549 | # asm 1: movd <diag3=int6464#4,>in14=int64#7 | ||
4550 | # asm 2: movd <diag3=%xmm3,>in14=%rax | ||
4551 | movd %xmm3,%rax | ||
4552 | |||
4553 | # qhasm: diag0 <<<= 96 | ||
4554 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4555 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4556 | pshufd $0x39,%xmm0,%xmm0 | ||
4557 | |||
4558 | # qhasm: diag1 <<<= 96 | ||
4559 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4560 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4561 | pshufd $0x39,%xmm1,%xmm1 | ||
4562 | |||
4563 | # qhasm: diag2 <<<= 96 | ||
4564 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4565 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4566 | pshufd $0x39,%xmm2,%xmm2 | ||
4567 | |||
4568 | # qhasm: diag3 <<<= 96 | ||
4569 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4570 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4571 | pshufd $0x39,%xmm3,%xmm3 | ||
4572 | |||
4573 | # qhasm: (uint32) in10 ^= *(uint32 *) (m + 40) | ||
4574 | # asm 1: xorl 40(<m=int64#2),<in10=int64#4d | ||
4575 | # asm 2: xorl 40(<m=%rsi),<in10=%ecx | ||
4576 | xorl 40(%rsi),%ecx | ||
4577 | |||
4578 | # qhasm: (uint32) in6 ^= *(uint32 *) (m + 24) | ||
4579 | # asm 1: xorl 24(<m=int64#2),<in6=int64#5d | ||
4580 | # asm 2: xorl 24(<m=%rsi),<in6=%r8d | ||
4581 | xorl 24(%rsi),%r8d | ||
4582 | |||
4583 | # qhasm: (uint32) in2 ^= *(uint32 *) (m + 8) | ||
4584 | # asm 1: xorl 8(<m=int64#2),<in2=int64#6d | ||
4585 | # asm 2: xorl 8(<m=%rsi),<in2=%r9d | ||
4586 | xorl 8(%rsi),%r9d | ||
4587 | |||
4588 | # qhasm: (uint32) in14 ^= *(uint32 *) (m + 56) | ||
4589 | # asm 1: xorl 56(<m=int64#2),<in14=int64#7d | ||
4590 | # asm 2: xorl 56(<m=%rsi),<in14=%eax | ||
4591 | xorl 56(%rsi),%eax | ||
4592 | |||
4593 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
4594 | # asm 1: movl <in10=int64#4d,40(<out=int64#1) | ||
4595 | # asm 2: movl <in10=%ecx,40(<out=%rdi) | ||
4596 | movl %ecx,40(%rdi) | ||
4597 | |||
4598 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
4599 | # asm 1: movl <in6=int64#5d,24(<out=int64#1) | ||
4600 | # asm 2: movl <in6=%r8d,24(<out=%rdi) | ||
4601 | movl %r8d,24(%rdi) | ||
4602 | |||
4603 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
4604 | # asm 1: movl <in2=int64#6d,8(<out=int64#1) | ||
4605 | # asm 2: movl <in2=%r9d,8(<out=%rdi) | ||
4606 | movl %r9d,8(%rdi) | ||
4607 | |||
4608 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
4609 | # asm 1: movl <in14=int64#7d,56(<out=int64#1) | ||
4610 | # asm 2: movl <in14=%eax,56(<out=%rdi) | ||
4611 | movl %eax,56(%rdi) | ||
4612 | |||
4613 | # qhasm: in15 = diag0 | ||
4614 | # asm 1: movd <diag0=int6464#1,>in15=int64#4 | ||
4615 | # asm 2: movd <diag0=%xmm0,>in15=%rcx | ||
4616 | movd %xmm0,%rcx | ||
4617 | |||
4618 | # qhasm: in11 = diag1 | ||
4619 | # asm 1: movd <diag1=int6464#2,>in11=int64#5 | ||
4620 | # asm 2: movd <diag1=%xmm1,>in11=%r8 | ||
4621 | movd %xmm1,%r8 | ||
4622 | |||
4623 | # qhasm: in7 = diag2 | ||
4624 | # asm 1: movd <diag2=int6464#3,>in7=int64#6 | ||
4625 | # asm 2: movd <diag2=%xmm2,>in7=%r9 | ||
4626 | movd %xmm2,%r9 | ||
4627 | |||
4628 | # qhasm: in3 = diag3 | ||
4629 | # asm 1: movd <diag3=int6464#4,>in3=int64#7 | ||
4630 | # asm 2: movd <diag3=%xmm3,>in3=%rax | ||
4631 | movd %xmm3,%rax | ||
4632 | |||
4633 | # qhasm: (uint32) in15 ^= *(uint32 *) (m + 60) | ||
4634 | # asm 1: xorl 60(<m=int64#2),<in15=int64#4d | ||
4635 | # asm 2: xorl 60(<m=%rsi),<in15=%ecx | ||
4636 | xorl 60(%rsi),%ecx | ||
4637 | |||
4638 | # qhasm: (uint32) in11 ^= *(uint32 *) (m + 44) | ||
4639 | # asm 1: xorl 44(<m=int64#2),<in11=int64#5d | ||
4640 | # asm 2: xorl 44(<m=%rsi),<in11=%r8d | ||
4641 | xorl 44(%rsi),%r8d | ||
4642 | |||
4643 | # qhasm: (uint32) in7 ^= *(uint32 *) (m + 28) | ||
4644 | # asm 1: xorl 28(<m=int64#2),<in7=int64#6d | ||
4645 | # asm 2: xorl 28(<m=%rsi),<in7=%r9d | ||
4646 | xorl 28(%rsi),%r9d | ||
4647 | |||
4648 | # qhasm: (uint32) in3 ^= *(uint32 *) (m + 12) | ||
4649 | # asm 1: xorl 12(<m=int64#2),<in3=int64#7d | ||
4650 | # asm 2: xorl 12(<m=%rsi),<in3=%eax | ||
4651 | xorl 12(%rsi),%eax | ||
4652 | |||
4653 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
4654 | # asm 1: movl <in15=int64#4d,60(<out=int64#1) | ||
4655 | # asm 2: movl <in15=%ecx,60(<out=%rdi) | ||
4656 | movl %ecx,60(%rdi) | ||
4657 | |||
4658 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
4659 | # asm 1: movl <in11=int64#5d,44(<out=int64#1) | ||
4660 | # asm 2: movl <in11=%r8d,44(<out=%rdi) | ||
4661 | movl %r8d,44(%rdi) | ||
4662 | |||
4663 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
4664 | # asm 1: movl <in7=int64#6d,28(<out=int64#1) | ||
4665 | # asm 2: movl <in7=%r9d,28(<out=%rdi) | ||
4666 | movl %r9d,28(%rdi) | ||
4667 | |||
4668 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
4669 | # asm 1: movl <in3=int64#7d,12(<out=int64#1) | ||
4670 | # asm 2: movl <in3=%eax,12(<out=%rdi) | ||
4671 | movl %eax,12(%rdi) | ||
4672 | |||
4673 | # qhasm: bytes = bytes_backup | ||
4674 | # asm 1: movq <bytes_backup=stack64#8,>bytes=int64#6 | ||
4675 | # asm 2: movq <bytes_backup=408(%rsp),>bytes=%r9 | ||
4676 | movq 408(%rsp),%r9 | ||
4677 | |||
4678 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
4679 | # asm 1: movl <x2=stack128#2,>in8=int64#4d | ||
4680 | # asm 2: movl <x2=16(%rsp),>in8=%ecx | ||
4681 | movl 16(%rsp),%ecx | ||
4682 | |||
4683 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
4684 | # asm 1: movl 4+<x3=stack128#3,>in9=int64#5d | ||
4685 | # asm 2: movl 4+<x3=32(%rsp),>in9=%r8d | ||
4686 | movl 4+32(%rsp),%r8d | ||
4687 | |||
4688 | # qhasm: in8 += 1 | ||
4689 | # asm 1: add $1,<in8=int64#4 | ||
4690 | # asm 2: add $1,<in8=%rcx | ||
4691 | add $1,%rcx | ||
4692 | |||
4693 | # qhasm: in9 <<= 32 | ||
4694 | # asm 1: shl $32,<in9=int64#5 | ||
4695 | # asm 2: shl $32,<in9=%r8 | ||
4696 | shl $32,%r8 | ||
4697 | |||
4698 | # qhasm: in8 += in9 | ||
4699 | # asm 1: add <in9=int64#5,<in8=int64#4 | ||
4700 | # asm 2: add <in9=%r8,<in8=%rcx | ||
4701 | add %r8,%rcx | ||
4702 | |||
4703 | # qhasm: in9 = in8 | ||
4704 | # asm 1: mov <in8=int64#4,>in9=int64#5 | ||
4705 | # asm 2: mov <in8=%rcx,>in9=%r8 | ||
4706 | mov %rcx,%r8 | ||
4707 | |||
4708 | # qhasm: (uint64) in9 >>= 32 | ||
4709 | # asm 1: shr $32,<in9=int64#5 | ||
4710 | # asm 2: shr $32,<in9=%r8 | ||
4711 | shr $32,%r8 | ||
4712 | |||
4713 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
4714 | # asm 1: movl <in8=int64#4d,>x2=stack128#2 | ||
4715 | # asm 2: movl <in8=%ecx,>x2=16(%rsp) | ||
4716 | movl %ecx,16(%rsp) | ||
4717 | |||
4718 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
4719 | # asm 1: movl <in9=int64#5d,4+<x3=stack128#3 | ||
4720 | # asm 2: movl <in9=%r8d,4+<x3=32(%rsp) | ||
4721 | movl %r8d,4+32(%rsp) | ||
4722 | |||
4723 | # qhasm: unsigned>? unsigned<? bytes - 64 | ||
4724 | # asm 1: cmp $64,<bytes=int64#6 | ||
4725 | # asm 2: cmp $64,<bytes=%r9 | ||
4726 | cmp $64,%r9 | ||
4727 | # comment:fp stack unchanged by jump | ||
4728 | |||
4729 | # qhasm: goto bytesatleast65 if unsigned> | ||
4730 | ja ._bytesatleast65 | ||
4731 | # comment:fp stack unchanged by jump | ||
4732 | |||
4733 | # qhasm: goto bytesatleast64 if !unsigned< | ||
4734 | jae ._bytesatleast64 | ||
4735 | |||
4736 | # qhasm: m = out | ||
4737 | # asm 1: mov <out=int64#1,>m=int64#2 | ||
4738 | # asm 2: mov <out=%rdi,>m=%rsi | ||
4739 | mov %rdi,%rsi | ||
4740 | |||
4741 | # qhasm: out = ctarget | ||
4742 | # asm 1: mov <ctarget=int64#3,>out=int64#1 | ||
4743 | # asm 2: mov <ctarget=%rdx,>out=%rdi | ||
4744 | mov %rdx,%rdi | ||
4745 | |||
4746 | # qhasm: i = bytes | ||
4747 | # asm 1: mov <bytes=int64#6,>i=int64#4 | ||
4748 | # asm 2: mov <bytes=%r9,>i=%rcx | ||
4749 | mov %r9,%rcx | ||
4750 | |||
4751 | # qhasm: while (i) { *out++ = *m++; --i } | ||
4752 | rep movsb | ||
4753 | # comment:fp stack unchanged by fallthrough | ||
4754 | |||
4755 | # qhasm: bytesatleast64: | ||
4756 | ._bytesatleast64: | ||
4757 | # comment:fp stack unchanged by fallthrough | ||
4758 | |||
4759 | # qhasm: done: | ||
4760 | ._done: | ||
4761 | |||
4762 | # qhasm: r11_caller = r11_stack | ||
4763 | # asm 1: movq <r11_stack=stack64#1,>r11_caller=int64#9 | ||
4764 | # asm 2: movq <r11_stack=352(%rsp),>r11_caller=%r11 | ||
4765 | movq 352(%rsp),%r11 | ||
4766 | |||
4767 | # qhasm: r12_caller = r12_stack | ||
4768 | # asm 1: movq <r12_stack=stack64#2,>r12_caller=int64#10 | ||
4769 | # asm 2: movq <r12_stack=360(%rsp),>r12_caller=%r12 | ||
4770 | movq 360(%rsp),%r12 | ||
4771 | |||
4772 | # qhasm: r13_caller = r13_stack | ||
4773 | # asm 1: movq <r13_stack=stack64#3,>r13_caller=int64#11 | ||
4774 | # asm 2: movq <r13_stack=368(%rsp),>r13_caller=%r13 | ||
4775 | movq 368(%rsp),%r13 | ||
4776 | |||
4777 | # qhasm: r14_caller = r14_stack | ||
4778 | # asm 1: movq <r14_stack=stack64#4,>r14_caller=int64#12 | ||
4779 | # asm 2: movq <r14_stack=376(%rsp),>r14_caller=%r14 | ||
4780 | movq 376(%rsp),%r14 | ||
4781 | |||
4782 | # qhasm: r15_caller = r15_stack | ||
4783 | # asm 1: movq <r15_stack=stack64#5,>r15_caller=int64#13 | ||
4784 | # asm 2: movq <r15_stack=384(%rsp),>r15_caller=%r15 | ||
4785 | movq 384(%rsp),%r15 | ||
4786 | |||
4787 | # qhasm: rbx_caller = rbx_stack | ||
4788 | # asm 1: movq <rbx_stack=stack64#6,>rbx_caller=int64#14 | ||
4789 | # asm 2: movq <rbx_stack=392(%rsp),>rbx_caller=%rbx | ||
4790 | movq 392(%rsp),%rbx | ||
4791 | |||
4792 | # qhasm: rbp_caller = rbp_stack | ||
4793 | # asm 1: movq <rbp_stack=stack64#7,>rbp_caller=int64#15 | ||
4794 | # asm 2: movq <rbp_stack=400(%rsp),>rbp_caller=%rbp | ||
4795 | movq 400(%rsp),%rbp | ||
4796 | |||
4797 | # qhasm: leave | ||
4798 | add %r11,%rsp | ||
4799 | xor %rax,%rax | ||
4800 | xor %rdx,%rdx | ||
4801 | ret | ||
4802 | |||
4803 | # qhasm: bytesatleast65: | ||
4804 | ._bytesatleast65: | ||
4805 | |||
4806 | # qhasm: bytes -= 64 | ||
4807 | # asm 1: sub $64,<bytes=int64#6 | ||
4808 | # asm 2: sub $64,<bytes=%r9 | ||
4809 | sub $64,%r9 | ||
4810 | |||
4811 | # qhasm: out += 64 | ||
4812 | # asm 1: add $64,<out=int64#1 | ||
4813 | # asm 2: add $64,<out=%rdi | ||
4814 | add $64,%rdi | ||
4815 | |||
4816 | # qhasm: m += 64 | ||
4817 | # asm 1: add $64,<m=int64#2 | ||
4818 | # asm 2: add $64,<m=%rsi | ||
4819 | add $64,%rsi | ||
4820 | # comment:fp stack unchanged by jump | ||
4821 | |||
4822 | # qhasm: goto bytesbetween1and255 | ||
4823 | jmp ._bytesbetween1and255 | ||
diff --git a/nacl/crypto_stream/salsa208/checksum b/nacl/crypto_stream/salsa208/checksum new file mode 100644 index 00000000..c87364e6 --- /dev/null +++ b/nacl/crypto_stream/salsa208/checksum | |||
@@ -0,0 +1 @@ | |||
05f32b0647417aaa446b0b3127318133cf9af32b771869eab267000bf02710cd | |||
diff --git a/nacl/crypto_stream/salsa208/ref/api.h b/nacl/crypto_stream/salsa208/ref/api.h new file mode 100644 index 00000000..c2b18461 --- /dev/null +++ b/nacl/crypto_stream/salsa208/ref/api.h | |||
@@ -0,0 +1,2 @@ | |||
1 | #define CRYPTO_KEYBYTES 32 | ||
2 | #define CRYPTO_NONCEBYTES 8 | ||
diff --git a/nacl/crypto_stream/salsa208/ref/implementors b/nacl/crypto_stream/salsa208/ref/implementors new file mode 100644 index 00000000..f6fb3c73 --- /dev/null +++ b/nacl/crypto_stream/salsa208/ref/implementors | |||
@@ -0,0 +1 @@ | |||
Daniel J. Bernstein | |||
diff --git a/nacl/crypto_stream/salsa208/ref/stream.c b/nacl/crypto_stream/salsa208/ref/stream.c new file mode 100644 index 00000000..cdcfbc0e --- /dev/null +++ b/nacl/crypto_stream/salsa208/ref/stream.c | |||
@@ -0,0 +1,49 @@ | |||
1 | /* | ||
2 | version 20080913 | ||
3 | D. J. Bernstein | ||
4 | Public domain. | ||
5 | */ | ||
6 | |||
7 | #include "crypto_core_salsa208.h" | ||
8 | #include "crypto_stream.h" | ||
9 | |||
10 | typedef unsigned int uint32; | ||
11 | |||
12 | static const unsigned char sigma[16] = "expand 32-byte k"; | ||
13 | |||
14 | int crypto_stream( | ||
15 | unsigned char *c,unsigned long long clen, | ||
16 | const unsigned char *n, | ||
17 | const unsigned char *k | ||
18 | ) | ||
19 | { | ||
20 | unsigned char in[16]; | ||
21 | unsigned char block[64]; | ||
22 | int i; | ||
23 | unsigned int u; | ||
24 | |||
25 | if (!clen) return 0; | ||
26 | |||
27 | for (i = 0;i < 8;++i) in[i] = n[i]; | ||
28 | for (i = 8;i < 16;++i) in[i] = 0; | ||
29 | |||
30 | while (clen >= 64) { | ||
31 | crypto_core_salsa208(c,in,k,sigma); | ||
32 | |||
33 | u = 1; | ||
34 | for (i = 8;i < 16;++i) { | ||
35 | u += (unsigned int) in[i]; | ||
36 | in[i] = u; | ||
37 | u >>= 8; | ||
38 | } | ||
39 | |||
40 | clen -= 64; | ||
41 | c += 64; | ||
42 | } | ||
43 | |||
44 | if (clen) { | ||
45 | crypto_core_salsa208(block,in,k,sigma); | ||
46 | for (i = 0;i < clen;++i) c[i] = block[i]; | ||
47 | } | ||
48 | return 0; | ||
49 | } | ||
diff --git a/nacl/crypto_stream/salsa208/ref/xor.c b/nacl/crypto_stream/salsa208/ref/xor.c new file mode 100644 index 00000000..c017ac42 --- /dev/null +++ b/nacl/crypto_stream/salsa208/ref/xor.c | |||
@@ -0,0 +1,52 @@ | |||
1 | /* | ||
2 | version 20080913 | ||
3 | D. J. Bernstein | ||
4 | Public domain. | ||
5 | */ | ||
6 | |||
7 | #include "crypto_core_salsa208.h" | ||
8 | #include "crypto_stream.h" | ||
9 | |||
10 | typedef unsigned int uint32; | ||
11 | |||
12 | static const unsigned char sigma[16] = "expand 32-byte k"; | ||
13 | |||
14 | int crypto_stream_xor( | ||
15 | unsigned char *c, | ||
16 | const unsigned char *m,unsigned long long mlen, | ||
17 | const unsigned char *n, | ||
18 | const unsigned char *k | ||
19 | ) | ||
20 | { | ||
21 | unsigned char in[16]; | ||
22 | unsigned char block[64]; | ||
23 | int i; | ||
24 | unsigned int u; | ||
25 | |||
26 | if (!mlen) return 0; | ||
27 | |||
28 | for (i = 0;i < 8;++i) in[i] = n[i]; | ||
29 | for (i = 8;i < 16;++i) in[i] = 0; | ||
30 | |||
31 | while (mlen >= 64) { | ||
32 | crypto_core_salsa208(block,in,k,sigma); | ||
33 | for (i = 0;i < 64;++i) c[i] = m[i] ^ block[i]; | ||
34 | |||
35 | u = 1; | ||
36 | for (i = 8;i < 16;++i) { | ||
37 | u += (unsigned int) in[i]; | ||
38 | in[i] = u; | ||
39 | u >>= 8; | ||
40 | } | ||
41 | |||
42 | mlen -= 64; | ||
43 | c += 64; | ||
44 | m += 64; | ||
45 | } | ||
46 | |||
47 | if (mlen) { | ||
48 | crypto_core_salsa208(block,in,k,sigma); | ||
49 | for (i = 0;i < mlen;++i) c[i] = m[i] ^ block[i]; | ||
50 | } | ||
51 | return 0; | ||
52 | } | ||
diff --git a/nacl/crypto_stream/salsa208/used b/nacl/crypto_stream/salsa208/used new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/nacl/crypto_stream/salsa208/used | |||
diff --git a/nacl/crypto_stream/salsa208/x86_xmm5/api.h b/nacl/crypto_stream/salsa208/x86_xmm5/api.h new file mode 100644 index 00000000..c2b18461 --- /dev/null +++ b/nacl/crypto_stream/salsa208/x86_xmm5/api.h | |||
@@ -0,0 +1,2 @@ | |||
1 | #define CRYPTO_KEYBYTES 32 | ||
2 | #define CRYPTO_NONCEBYTES 8 | ||
diff --git a/nacl/crypto_stream/salsa208/x86_xmm5/implementors b/nacl/crypto_stream/salsa208/x86_xmm5/implementors new file mode 100644 index 00000000..f6fb3c73 --- /dev/null +++ b/nacl/crypto_stream/salsa208/x86_xmm5/implementors | |||
@@ -0,0 +1 @@ | |||
Daniel J. Bernstein | |||
diff --git a/nacl/crypto_stream/salsa208/x86_xmm5/stream.s b/nacl/crypto_stream/salsa208/x86_xmm5/stream.s new file mode 100644 index 00000000..065253a8 --- /dev/null +++ b/nacl/crypto_stream/salsa208/x86_xmm5/stream.s | |||
@@ -0,0 +1,5078 @@ | |||
1 | |||
2 | # qhasm: int32 a | ||
3 | |||
4 | # qhasm: stack32 arg1 | ||
5 | |||
6 | # qhasm: stack32 arg2 | ||
7 | |||
8 | # qhasm: stack32 arg3 | ||
9 | |||
10 | # qhasm: stack32 arg4 | ||
11 | |||
12 | # qhasm: stack32 arg5 | ||
13 | |||
14 | # qhasm: stack32 arg6 | ||
15 | |||
16 | # qhasm: input arg1 | ||
17 | |||
18 | # qhasm: input arg2 | ||
19 | |||
20 | # qhasm: input arg3 | ||
21 | |||
22 | # qhasm: input arg4 | ||
23 | |||
24 | # qhasm: input arg5 | ||
25 | |||
26 | # qhasm: input arg6 | ||
27 | |||
28 | # qhasm: int32 eax | ||
29 | |||
30 | # qhasm: int32 ebx | ||
31 | |||
32 | # qhasm: int32 esi | ||
33 | |||
34 | # qhasm: int32 edi | ||
35 | |||
36 | # qhasm: int32 ebp | ||
37 | |||
38 | # qhasm: caller eax | ||
39 | |||
40 | # qhasm: caller ebx | ||
41 | |||
42 | # qhasm: caller esi | ||
43 | |||
44 | # qhasm: caller edi | ||
45 | |||
46 | # qhasm: caller ebp | ||
47 | |||
48 | # qhasm: int32 k | ||
49 | |||
50 | # qhasm: int32 kbits | ||
51 | |||
52 | # qhasm: int32 iv | ||
53 | |||
54 | # qhasm: int32 i | ||
55 | |||
56 | # qhasm: stack128 x0 | ||
57 | |||
58 | # qhasm: stack128 x1 | ||
59 | |||
60 | # qhasm: stack128 x2 | ||
61 | |||
62 | # qhasm: stack128 x3 | ||
63 | |||
64 | # qhasm: int32 m | ||
65 | |||
66 | # qhasm: stack32 out_stack | ||
67 | |||
68 | # qhasm: int32 out | ||
69 | |||
70 | # qhasm: stack32 bytes_stack | ||
71 | |||
72 | # qhasm: int32 bytes | ||
73 | |||
74 | # qhasm: stack32 eax_stack | ||
75 | |||
76 | # qhasm: stack32 ebx_stack | ||
77 | |||
78 | # qhasm: stack32 esi_stack | ||
79 | |||
80 | # qhasm: stack32 edi_stack | ||
81 | |||
82 | # qhasm: stack32 ebp_stack | ||
83 | |||
84 | # qhasm: int6464 diag0 | ||
85 | |||
86 | # qhasm: int6464 diag1 | ||
87 | |||
88 | # qhasm: int6464 diag2 | ||
89 | |||
90 | # qhasm: int6464 diag3 | ||
91 | |||
92 | # qhasm: int6464 a0 | ||
93 | |||
94 | # qhasm: int6464 a1 | ||
95 | |||
96 | # qhasm: int6464 a2 | ||
97 | |||
98 | # qhasm: int6464 a3 | ||
99 | |||
100 | # qhasm: int6464 a4 | ||
101 | |||
102 | # qhasm: int6464 a5 | ||
103 | |||
104 | # qhasm: int6464 a6 | ||
105 | |||
106 | # qhasm: int6464 a7 | ||
107 | |||
108 | # qhasm: int6464 b0 | ||
109 | |||
110 | # qhasm: int6464 b1 | ||
111 | |||
112 | # qhasm: int6464 b2 | ||
113 | |||
114 | # qhasm: int6464 b3 | ||
115 | |||
116 | # qhasm: int6464 b4 | ||
117 | |||
118 | # qhasm: int6464 b5 | ||
119 | |||
120 | # qhasm: int6464 b6 | ||
121 | |||
122 | # qhasm: int6464 b7 | ||
123 | |||
124 | # qhasm: int6464 z0 | ||
125 | |||
126 | # qhasm: int6464 z1 | ||
127 | |||
128 | # qhasm: int6464 z2 | ||
129 | |||
130 | # qhasm: int6464 z3 | ||
131 | |||
132 | # qhasm: int6464 z4 | ||
133 | |||
134 | # qhasm: int6464 z5 | ||
135 | |||
136 | # qhasm: int6464 z6 | ||
137 | |||
138 | # qhasm: int6464 z7 | ||
139 | |||
140 | # qhasm: int6464 z8 | ||
141 | |||
142 | # qhasm: int6464 z9 | ||
143 | |||
144 | # qhasm: int6464 z10 | ||
145 | |||
146 | # qhasm: int6464 z11 | ||
147 | |||
148 | # qhasm: int6464 z12 | ||
149 | |||
150 | # qhasm: int6464 z13 | ||
151 | |||
152 | # qhasm: int6464 z14 | ||
153 | |||
154 | # qhasm: int6464 z15 | ||
155 | |||
156 | # qhasm: stack128 z0_stack | ||
157 | |||
158 | # qhasm: stack128 z1_stack | ||
159 | |||
160 | # qhasm: stack128 z2_stack | ||
161 | |||
162 | # qhasm: stack128 z3_stack | ||
163 | |||
164 | # qhasm: stack128 z4_stack | ||
165 | |||
166 | # qhasm: stack128 z5_stack | ||
167 | |||
168 | # qhasm: stack128 z6_stack | ||
169 | |||
170 | # qhasm: stack128 z7_stack | ||
171 | |||
172 | # qhasm: stack128 z8_stack | ||
173 | |||
174 | # qhasm: stack128 z9_stack | ||
175 | |||
176 | # qhasm: stack128 z10_stack | ||
177 | |||
178 | # qhasm: stack128 z11_stack | ||
179 | |||
180 | # qhasm: stack128 z12_stack | ||
181 | |||
182 | # qhasm: stack128 z13_stack | ||
183 | |||
184 | # qhasm: stack128 z14_stack | ||
185 | |||
186 | # qhasm: stack128 z15_stack | ||
187 | |||
188 | # qhasm: stack128 orig0 | ||
189 | |||
190 | # qhasm: stack128 orig1 | ||
191 | |||
192 | # qhasm: stack128 orig2 | ||
193 | |||
194 | # qhasm: stack128 orig3 | ||
195 | |||
196 | # qhasm: stack128 orig4 | ||
197 | |||
198 | # qhasm: stack128 orig5 | ||
199 | |||
200 | # qhasm: stack128 orig6 | ||
201 | |||
202 | # qhasm: stack128 orig7 | ||
203 | |||
204 | # qhasm: stack128 orig8 | ||
205 | |||
206 | # qhasm: stack128 orig9 | ||
207 | |||
208 | # qhasm: stack128 orig10 | ||
209 | |||
210 | # qhasm: stack128 orig11 | ||
211 | |||
212 | # qhasm: stack128 orig12 | ||
213 | |||
214 | # qhasm: stack128 orig13 | ||
215 | |||
216 | # qhasm: stack128 orig14 | ||
217 | |||
218 | # qhasm: stack128 orig15 | ||
219 | |||
220 | # qhasm: int6464 p | ||
221 | |||
222 | # qhasm: int6464 q | ||
223 | |||
224 | # qhasm: int6464 r | ||
225 | |||
226 | # qhasm: int6464 s | ||
227 | |||
228 | # qhasm: int6464 t | ||
229 | |||
230 | # qhasm: int6464 u | ||
231 | |||
232 | # qhasm: int6464 v | ||
233 | |||
234 | # qhasm: int6464 w | ||
235 | |||
236 | # qhasm: int6464 mp | ||
237 | |||
238 | # qhasm: int6464 mq | ||
239 | |||
240 | # qhasm: int6464 mr | ||
241 | |||
242 | # qhasm: int6464 ms | ||
243 | |||
244 | # qhasm: int6464 mt | ||
245 | |||
246 | # qhasm: int6464 mu | ||
247 | |||
248 | # qhasm: int6464 mv | ||
249 | |||
250 | # qhasm: int6464 mw | ||
251 | |||
252 | # qhasm: int32 in0 | ||
253 | |||
254 | # qhasm: int32 in1 | ||
255 | |||
256 | # qhasm: int32 in2 | ||
257 | |||
258 | # qhasm: int32 in3 | ||
259 | |||
260 | # qhasm: int32 in4 | ||
261 | |||
262 | # qhasm: int32 in5 | ||
263 | |||
264 | # qhasm: int32 in6 | ||
265 | |||
266 | # qhasm: int32 in7 | ||
267 | |||
268 | # qhasm: int32 in8 | ||
269 | |||
270 | # qhasm: int32 in9 | ||
271 | |||
272 | # qhasm: int32 in10 | ||
273 | |||
274 | # qhasm: int32 in11 | ||
275 | |||
276 | # qhasm: int32 in12 | ||
277 | |||
278 | # qhasm: int32 in13 | ||
279 | |||
280 | # qhasm: int32 in14 | ||
281 | |||
282 | # qhasm: int32 in15 | ||
283 | |||
284 | # qhasm: stack512 tmp | ||
285 | |||
286 | # qhasm: stack32 ctarget | ||
287 | |||
288 | # qhasm: enter crypto_stream_salsa208_x86_xmm5 | ||
289 | .text | ||
290 | .p2align 5 | ||
291 | .globl _crypto_stream_salsa208_x86_xmm5 | ||
292 | .globl crypto_stream_salsa208_x86_xmm5 | ||
293 | _crypto_stream_salsa208_x86_xmm5: | ||
294 | crypto_stream_salsa208_x86_xmm5: | ||
295 | mov %esp,%eax | ||
296 | and $31,%eax | ||
297 | add $704,%eax | ||
298 | sub %eax,%esp | ||
299 | |||
300 | # qhasm: eax_stack = eax | ||
301 | # asm 1: movl <eax=int32#1,>eax_stack=stack32#1 | ||
302 | # asm 2: movl <eax=%eax,>eax_stack=0(%esp) | ||
303 | movl %eax,0(%esp) | ||
304 | |||
305 | # qhasm: ebx_stack = ebx | ||
306 | # asm 1: movl <ebx=int32#4,>ebx_stack=stack32#2 | ||
307 | # asm 2: movl <ebx=%ebx,>ebx_stack=4(%esp) | ||
308 | movl %ebx,4(%esp) | ||
309 | |||
310 | # qhasm: esi_stack = esi | ||
311 | # asm 1: movl <esi=int32#5,>esi_stack=stack32#3 | ||
312 | # asm 2: movl <esi=%esi,>esi_stack=8(%esp) | ||
313 | movl %esi,8(%esp) | ||
314 | |||
315 | # qhasm: edi_stack = edi | ||
316 | # asm 1: movl <edi=int32#6,>edi_stack=stack32#4 | ||
317 | # asm 2: movl <edi=%edi,>edi_stack=12(%esp) | ||
318 | movl %edi,12(%esp) | ||
319 | |||
320 | # qhasm: ebp_stack = ebp | ||
321 | # asm 1: movl <ebp=int32#7,>ebp_stack=stack32#5 | ||
322 | # asm 2: movl <ebp=%ebp,>ebp_stack=16(%esp) | ||
323 | movl %ebp,16(%esp) | ||
324 | |||
325 | # qhasm: bytes = arg2 | ||
326 | # asm 1: movl <arg2=stack32#-2,>bytes=int32#3 | ||
327 | # asm 2: movl <arg2=8(%esp,%eax),>bytes=%edx | ||
328 | movl 8(%esp,%eax),%edx | ||
329 | |||
330 | # qhasm: out = arg1 | ||
331 | # asm 1: movl <arg1=stack32#-1,>out=int32#6 | ||
332 | # asm 2: movl <arg1=4(%esp,%eax),>out=%edi | ||
333 | movl 4(%esp,%eax),%edi | ||
334 | |||
335 | # qhasm: m = out | ||
336 | # asm 1: mov <out=int32#6,>m=int32#5 | ||
337 | # asm 2: mov <out=%edi,>m=%esi | ||
338 | mov %edi,%esi | ||
339 | |||
340 | # qhasm: iv = arg4 | ||
341 | # asm 1: movl <arg4=stack32#-4,>iv=int32#4 | ||
342 | # asm 2: movl <arg4=16(%esp,%eax),>iv=%ebx | ||
343 | movl 16(%esp,%eax),%ebx | ||
344 | |||
345 | # qhasm: k = arg5 | ||
346 | # asm 1: movl <arg5=stack32#-5,>k=int32#7 | ||
347 | # asm 2: movl <arg5=20(%esp,%eax),>k=%ebp | ||
348 | movl 20(%esp,%eax),%ebp | ||
349 | |||
350 | # qhasm: unsigned>? bytes - 0 | ||
351 | # asm 1: cmp $0,<bytes=int32#3 | ||
352 | # asm 2: cmp $0,<bytes=%edx | ||
353 | cmp $0,%edx | ||
354 | # comment:fp stack unchanged by jump | ||
355 | |||
356 | # qhasm: goto done if !unsigned> | ||
357 | jbe ._done | ||
358 | |||
359 | # qhasm: a = 0 | ||
360 | # asm 1: mov $0,>a=int32#1 | ||
361 | # asm 2: mov $0,>a=%eax | ||
362 | mov $0,%eax | ||
363 | |||
364 | # qhasm: i = bytes | ||
365 | # asm 1: mov <bytes=int32#3,>i=int32#2 | ||
366 | # asm 2: mov <bytes=%edx,>i=%ecx | ||
367 | mov %edx,%ecx | ||
368 | |||
369 | # qhasm: while (i) { *out++ = a; --i } | ||
370 | rep stosb | ||
371 | |||
372 | # qhasm: out -= bytes | ||
373 | # asm 1: subl <bytes=int32#3,<out=int32#6 | ||
374 | # asm 2: subl <bytes=%edx,<out=%edi | ||
375 | subl %edx,%edi | ||
376 | # comment:fp stack unchanged by jump | ||
377 | |||
378 | # qhasm: goto start | ||
379 | jmp ._start | ||
380 | |||
381 | # qhasm: enter crypto_stream_salsa208_x86_xmm5_xor | ||
382 | .text | ||
383 | .p2align 5 | ||
384 | .globl _crypto_stream_salsa208_x86_xmm5_xor | ||
385 | .globl crypto_stream_salsa208_x86_xmm5_xor | ||
386 | _crypto_stream_salsa208_x86_xmm5_xor: | ||
387 | crypto_stream_salsa208_x86_xmm5_xor: | ||
388 | mov %esp,%eax | ||
389 | and $31,%eax | ||
390 | add $704,%eax | ||
391 | sub %eax,%esp | ||
392 | |||
393 | # qhasm: eax_stack = eax | ||
394 | # asm 1: movl <eax=int32#1,>eax_stack=stack32#1 | ||
395 | # asm 2: movl <eax=%eax,>eax_stack=0(%esp) | ||
396 | movl %eax,0(%esp) | ||
397 | |||
398 | # qhasm: ebx_stack = ebx | ||
399 | # asm 1: movl <ebx=int32#4,>ebx_stack=stack32#2 | ||
400 | # asm 2: movl <ebx=%ebx,>ebx_stack=4(%esp) | ||
401 | movl %ebx,4(%esp) | ||
402 | |||
403 | # qhasm: esi_stack = esi | ||
404 | # asm 1: movl <esi=int32#5,>esi_stack=stack32#3 | ||
405 | # asm 2: movl <esi=%esi,>esi_stack=8(%esp) | ||
406 | movl %esi,8(%esp) | ||
407 | |||
408 | # qhasm: edi_stack = edi | ||
409 | # asm 1: movl <edi=int32#6,>edi_stack=stack32#4 | ||
410 | # asm 2: movl <edi=%edi,>edi_stack=12(%esp) | ||
411 | movl %edi,12(%esp) | ||
412 | |||
413 | # qhasm: ebp_stack = ebp | ||
414 | # asm 1: movl <ebp=int32#7,>ebp_stack=stack32#5 | ||
415 | # asm 2: movl <ebp=%ebp,>ebp_stack=16(%esp) | ||
416 | movl %ebp,16(%esp) | ||
417 | |||
418 | # qhasm: out = arg1 | ||
419 | # asm 1: movl <arg1=stack32#-1,>out=int32#6 | ||
420 | # asm 2: movl <arg1=4(%esp,%eax),>out=%edi | ||
421 | movl 4(%esp,%eax),%edi | ||
422 | |||
423 | # qhasm: m = arg2 | ||
424 | # asm 1: movl <arg2=stack32#-2,>m=int32#5 | ||
425 | # asm 2: movl <arg2=8(%esp,%eax),>m=%esi | ||
426 | movl 8(%esp,%eax),%esi | ||
427 | |||
428 | # qhasm: bytes = arg3 | ||
429 | # asm 1: movl <arg3=stack32#-3,>bytes=int32#3 | ||
430 | # asm 2: movl <arg3=12(%esp,%eax),>bytes=%edx | ||
431 | movl 12(%esp,%eax),%edx | ||
432 | |||
433 | # qhasm: iv = arg5 | ||
434 | # asm 1: movl <arg5=stack32#-5,>iv=int32#4 | ||
435 | # asm 2: movl <arg5=20(%esp,%eax),>iv=%ebx | ||
436 | movl 20(%esp,%eax),%ebx | ||
437 | |||
438 | # qhasm: k = arg6 | ||
439 | # asm 1: movl <arg6=stack32#-6,>k=int32#7 | ||
440 | # asm 2: movl <arg6=24(%esp,%eax),>k=%ebp | ||
441 | movl 24(%esp,%eax),%ebp | ||
442 | |||
443 | # qhasm: unsigned>? bytes - 0 | ||
444 | # asm 1: cmp $0,<bytes=int32#3 | ||
445 | # asm 2: cmp $0,<bytes=%edx | ||
446 | cmp $0,%edx | ||
447 | # comment:fp stack unchanged by jump | ||
448 | |||
449 | # qhasm: goto done if !unsigned> | ||
450 | jbe ._done | ||
451 | # comment:fp stack unchanged by fallthrough | ||
452 | |||
453 | # qhasm: start: | ||
454 | ._start: | ||
455 | |||
456 | # qhasm: out_stack = out | ||
457 | # asm 1: movl <out=int32#6,>out_stack=stack32#6 | ||
458 | # asm 2: movl <out=%edi,>out_stack=20(%esp) | ||
459 | movl %edi,20(%esp) | ||
460 | |||
461 | # qhasm: bytes_stack = bytes | ||
462 | # asm 1: movl <bytes=int32#3,>bytes_stack=stack32#7 | ||
463 | # asm 2: movl <bytes=%edx,>bytes_stack=24(%esp) | ||
464 | movl %edx,24(%esp) | ||
465 | |||
466 | # qhasm: in4 = *(uint32 *) (k + 12) | ||
467 | # asm 1: movl 12(<k=int32#7),>in4=int32#1 | ||
468 | # asm 2: movl 12(<k=%ebp),>in4=%eax | ||
469 | movl 12(%ebp),%eax | ||
470 | |||
471 | # qhasm: in12 = *(uint32 *) (k + 20) | ||
472 | # asm 1: movl 20(<k=int32#7),>in12=int32#2 | ||
473 | # asm 2: movl 20(<k=%ebp),>in12=%ecx | ||
474 | movl 20(%ebp),%ecx | ||
475 | |||
476 | # qhasm: ((uint32 *)&x3)[0] = in4 | ||
477 | # asm 1: movl <in4=int32#1,>x3=stack128#1 | ||
478 | # asm 2: movl <in4=%eax,>x3=32(%esp) | ||
479 | movl %eax,32(%esp) | ||
480 | |||
481 | # qhasm: ((uint32 *)&x1)[0] = in12 | ||
482 | # asm 1: movl <in12=int32#2,>x1=stack128#2 | ||
483 | # asm 2: movl <in12=%ecx,>x1=48(%esp) | ||
484 | movl %ecx,48(%esp) | ||
485 | |||
486 | # qhasm: in0 = 1634760805 | ||
487 | # asm 1: mov $1634760805,>in0=int32#1 | ||
488 | # asm 2: mov $1634760805,>in0=%eax | ||
489 | mov $1634760805,%eax | ||
490 | |||
491 | # qhasm: in8 = 0 | ||
492 | # asm 1: mov $0,>in8=int32#2 | ||
493 | # asm 2: mov $0,>in8=%ecx | ||
494 | mov $0,%ecx | ||
495 | |||
496 | # qhasm: ((uint32 *)&x0)[0] = in0 | ||
497 | # asm 1: movl <in0=int32#1,>x0=stack128#3 | ||
498 | # asm 2: movl <in0=%eax,>x0=64(%esp) | ||
499 | movl %eax,64(%esp) | ||
500 | |||
501 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
502 | # asm 1: movl <in8=int32#2,>x2=stack128#4 | ||
503 | # asm 2: movl <in8=%ecx,>x2=80(%esp) | ||
504 | movl %ecx,80(%esp) | ||
505 | |||
506 | # qhasm: in6 = *(uint32 *) (iv + 0) | ||
507 | # asm 1: movl 0(<iv=int32#4),>in6=int32#1 | ||
508 | # asm 2: movl 0(<iv=%ebx),>in6=%eax | ||
509 | movl 0(%ebx),%eax | ||
510 | |||
511 | # qhasm: in7 = *(uint32 *) (iv + 4) | ||
512 | # asm 1: movl 4(<iv=int32#4),>in7=int32#2 | ||
513 | # asm 2: movl 4(<iv=%ebx),>in7=%ecx | ||
514 | movl 4(%ebx),%ecx | ||
515 | |||
516 | # qhasm: ((uint32 *)&x1)[2] = in6 | ||
517 | # asm 1: movl <in6=int32#1,8+<x1=stack128#2 | ||
518 | # asm 2: movl <in6=%eax,8+<x1=48(%esp) | ||
519 | movl %eax,8+48(%esp) | ||
520 | |||
521 | # qhasm: ((uint32 *)&x2)[3] = in7 | ||
522 | # asm 1: movl <in7=int32#2,12+<x2=stack128#4 | ||
523 | # asm 2: movl <in7=%ecx,12+<x2=80(%esp) | ||
524 | movl %ecx,12+80(%esp) | ||
525 | |||
526 | # qhasm: in9 = 0 | ||
527 | # asm 1: mov $0,>in9=int32#1 | ||
528 | # asm 2: mov $0,>in9=%eax | ||
529 | mov $0,%eax | ||
530 | |||
531 | # qhasm: in10 = 2036477234 | ||
532 | # asm 1: mov $2036477234,>in10=int32#2 | ||
533 | # asm 2: mov $2036477234,>in10=%ecx | ||
534 | mov $2036477234,%ecx | ||
535 | |||
536 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
537 | # asm 1: movl <in9=int32#1,4+<x3=stack128#1 | ||
538 | # asm 2: movl <in9=%eax,4+<x3=32(%esp) | ||
539 | movl %eax,4+32(%esp) | ||
540 | |||
541 | # qhasm: ((uint32 *)&x0)[2] = in10 | ||
542 | # asm 1: movl <in10=int32#2,8+<x0=stack128#3 | ||
543 | # asm 2: movl <in10=%ecx,8+<x0=64(%esp) | ||
544 | movl %ecx,8+64(%esp) | ||
545 | |||
546 | # qhasm: in1 = *(uint32 *) (k + 0) | ||
547 | # asm 1: movl 0(<k=int32#7),>in1=int32#1 | ||
548 | # asm 2: movl 0(<k=%ebp),>in1=%eax | ||
549 | movl 0(%ebp),%eax | ||
550 | |||
551 | # qhasm: in2 = *(uint32 *) (k + 4) | ||
552 | # asm 1: movl 4(<k=int32#7),>in2=int32#2 | ||
553 | # asm 2: movl 4(<k=%ebp),>in2=%ecx | ||
554 | movl 4(%ebp),%ecx | ||
555 | |||
556 | # qhasm: in3 = *(uint32 *) (k + 8) | ||
557 | # asm 1: movl 8(<k=int32#7),>in3=int32#3 | ||
558 | # asm 2: movl 8(<k=%ebp),>in3=%edx | ||
559 | movl 8(%ebp),%edx | ||
560 | |||
561 | # qhasm: in5 = 857760878 | ||
562 | # asm 1: mov $857760878,>in5=int32#4 | ||
563 | # asm 2: mov $857760878,>in5=%ebx | ||
564 | mov $857760878,%ebx | ||
565 | |||
566 | # qhasm: ((uint32 *)&x1)[1] = in1 | ||
567 | # asm 1: movl <in1=int32#1,4+<x1=stack128#2 | ||
568 | # asm 2: movl <in1=%eax,4+<x1=48(%esp) | ||
569 | movl %eax,4+48(%esp) | ||
570 | |||
571 | # qhasm: ((uint32 *)&x2)[2] = in2 | ||
572 | # asm 1: movl <in2=int32#2,8+<x2=stack128#4 | ||
573 | # asm 2: movl <in2=%ecx,8+<x2=80(%esp) | ||
574 | movl %ecx,8+80(%esp) | ||
575 | |||
576 | # qhasm: ((uint32 *)&x3)[3] = in3 | ||
577 | # asm 1: movl <in3=int32#3,12+<x3=stack128#1 | ||
578 | # asm 2: movl <in3=%edx,12+<x3=32(%esp) | ||
579 | movl %edx,12+32(%esp) | ||
580 | |||
581 | # qhasm: ((uint32 *)&x0)[1] = in5 | ||
582 | # asm 1: movl <in5=int32#4,4+<x0=stack128#3 | ||
583 | # asm 2: movl <in5=%ebx,4+<x0=64(%esp) | ||
584 | movl %ebx,4+64(%esp) | ||
585 | |||
586 | # qhasm: in11 = *(uint32 *) (k + 16) | ||
587 | # asm 1: movl 16(<k=int32#7),>in11=int32#1 | ||
588 | # asm 2: movl 16(<k=%ebp),>in11=%eax | ||
589 | movl 16(%ebp),%eax | ||
590 | |||
591 | # qhasm: in13 = *(uint32 *) (k + 24) | ||
592 | # asm 1: movl 24(<k=int32#7),>in13=int32#2 | ||
593 | # asm 2: movl 24(<k=%ebp),>in13=%ecx | ||
594 | movl 24(%ebp),%ecx | ||
595 | |||
596 | # qhasm: in14 = *(uint32 *) (k + 28) | ||
597 | # asm 1: movl 28(<k=int32#7),>in14=int32#3 | ||
598 | # asm 2: movl 28(<k=%ebp),>in14=%edx | ||
599 | movl 28(%ebp),%edx | ||
600 | |||
601 | # qhasm: in15 = 1797285236 | ||
602 | # asm 1: mov $1797285236,>in15=int32#4 | ||
603 | # asm 2: mov $1797285236,>in15=%ebx | ||
604 | mov $1797285236,%ebx | ||
605 | |||
606 | # qhasm: ((uint32 *)&x1)[3] = in11 | ||
607 | # asm 1: movl <in11=int32#1,12+<x1=stack128#2 | ||
608 | # asm 2: movl <in11=%eax,12+<x1=48(%esp) | ||
609 | movl %eax,12+48(%esp) | ||
610 | |||
611 | # qhasm: ((uint32 *)&x2)[1] = in13 | ||
612 | # asm 1: movl <in13=int32#2,4+<x2=stack128#4 | ||
613 | # asm 2: movl <in13=%ecx,4+<x2=80(%esp) | ||
614 | movl %ecx,4+80(%esp) | ||
615 | |||
616 | # qhasm: ((uint32 *)&x3)[2] = in14 | ||
617 | # asm 1: movl <in14=int32#3,8+<x3=stack128#1 | ||
618 | # asm 2: movl <in14=%edx,8+<x3=32(%esp) | ||
619 | movl %edx,8+32(%esp) | ||
620 | |||
621 | # qhasm: ((uint32 *)&x0)[3] = in15 | ||
622 | # asm 1: movl <in15=int32#4,12+<x0=stack128#3 | ||
623 | # asm 2: movl <in15=%ebx,12+<x0=64(%esp) | ||
624 | movl %ebx,12+64(%esp) | ||
625 | |||
626 | # qhasm: bytes = bytes_stack | ||
627 | # asm 1: movl <bytes_stack=stack32#7,>bytes=int32#1 | ||
628 | # asm 2: movl <bytes_stack=24(%esp),>bytes=%eax | ||
629 | movl 24(%esp),%eax | ||
630 | |||
631 | # qhasm: unsigned<? bytes - 256 | ||
632 | # asm 1: cmp $256,<bytes=int32#1 | ||
633 | # asm 2: cmp $256,<bytes=%eax | ||
634 | cmp $256,%eax | ||
635 | # comment:fp stack unchanged by jump | ||
636 | |||
637 | # qhasm: goto bytesbetween1and255 if unsigned< | ||
638 | jb ._bytesbetween1and255 | ||
639 | |||
640 | # qhasm: z0 = x0 | ||
641 | # asm 1: movdqa <x0=stack128#3,>z0=int6464#1 | ||
642 | # asm 2: movdqa <x0=64(%esp),>z0=%xmm0 | ||
643 | movdqa 64(%esp),%xmm0 | ||
644 | |||
645 | # qhasm: z5 = z0[1,1,1,1] | ||
646 | # asm 1: pshufd $0x55,<z0=int6464#1,>z5=int6464#2 | ||
647 | # asm 2: pshufd $0x55,<z0=%xmm0,>z5=%xmm1 | ||
648 | pshufd $0x55,%xmm0,%xmm1 | ||
649 | |||
650 | # qhasm: z10 = z0[2,2,2,2] | ||
651 | # asm 1: pshufd $0xaa,<z0=int6464#1,>z10=int6464#3 | ||
652 | # asm 2: pshufd $0xaa,<z0=%xmm0,>z10=%xmm2 | ||
653 | pshufd $0xaa,%xmm0,%xmm2 | ||
654 | |||
655 | # qhasm: z15 = z0[3,3,3,3] | ||
656 | # asm 1: pshufd $0xff,<z0=int6464#1,>z15=int6464#4 | ||
657 | # asm 2: pshufd $0xff,<z0=%xmm0,>z15=%xmm3 | ||
658 | pshufd $0xff,%xmm0,%xmm3 | ||
659 | |||
660 | # qhasm: z0 = z0[0,0,0,0] | ||
661 | # asm 1: pshufd $0x00,<z0=int6464#1,>z0=int6464#1 | ||
662 | # asm 2: pshufd $0x00,<z0=%xmm0,>z0=%xmm0 | ||
663 | pshufd $0x00,%xmm0,%xmm0 | ||
664 | |||
665 | # qhasm: orig5 = z5 | ||
666 | # asm 1: movdqa <z5=int6464#2,>orig5=stack128#5 | ||
667 | # asm 2: movdqa <z5=%xmm1,>orig5=96(%esp) | ||
668 | movdqa %xmm1,96(%esp) | ||
669 | |||
670 | # qhasm: orig10 = z10 | ||
671 | # asm 1: movdqa <z10=int6464#3,>orig10=stack128#6 | ||
672 | # asm 2: movdqa <z10=%xmm2,>orig10=112(%esp) | ||
673 | movdqa %xmm2,112(%esp) | ||
674 | |||
675 | # qhasm: orig15 = z15 | ||
676 | # asm 1: movdqa <z15=int6464#4,>orig15=stack128#7 | ||
677 | # asm 2: movdqa <z15=%xmm3,>orig15=128(%esp) | ||
678 | movdqa %xmm3,128(%esp) | ||
679 | |||
680 | # qhasm: orig0 = z0 | ||
681 | # asm 1: movdqa <z0=int6464#1,>orig0=stack128#8 | ||
682 | # asm 2: movdqa <z0=%xmm0,>orig0=144(%esp) | ||
683 | movdqa %xmm0,144(%esp) | ||
684 | |||
685 | # qhasm: z1 = x1 | ||
686 | # asm 1: movdqa <x1=stack128#2,>z1=int6464#1 | ||
687 | # asm 2: movdqa <x1=48(%esp),>z1=%xmm0 | ||
688 | movdqa 48(%esp),%xmm0 | ||
689 | |||
690 | # qhasm: z6 = z1[2,2,2,2] | ||
691 | # asm 1: pshufd $0xaa,<z1=int6464#1,>z6=int6464#2 | ||
692 | # asm 2: pshufd $0xaa,<z1=%xmm0,>z6=%xmm1 | ||
693 | pshufd $0xaa,%xmm0,%xmm1 | ||
694 | |||
695 | # qhasm: z11 = z1[3,3,3,3] | ||
696 | # asm 1: pshufd $0xff,<z1=int6464#1,>z11=int6464#3 | ||
697 | # asm 2: pshufd $0xff,<z1=%xmm0,>z11=%xmm2 | ||
698 | pshufd $0xff,%xmm0,%xmm2 | ||
699 | |||
700 | # qhasm: z12 = z1[0,0,0,0] | ||
701 | # asm 1: pshufd $0x00,<z1=int6464#1,>z12=int6464#4 | ||
702 | # asm 2: pshufd $0x00,<z1=%xmm0,>z12=%xmm3 | ||
703 | pshufd $0x00,%xmm0,%xmm3 | ||
704 | |||
705 | # qhasm: z1 = z1[1,1,1,1] | ||
706 | # asm 1: pshufd $0x55,<z1=int6464#1,>z1=int6464#1 | ||
707 | # asm 2: pshufd $0x55,<z1=%xmm0,>z1=%xmm0 | ||
708 | pshufd $0x55,%xmm0,%xmm0 | ||
709 | |||
710 | # qhasm: orig6 = z6 | ||
711 | # asm 1: movdqa <z6=int6464#2,>orig6=stack128#9 | ||
712 | # asm 2: movdqa <z6=%xmm1,>orig6=160(%esp) | ||
713 | movdqa %xmm1,160(%esp) | ||
714 | |||
715 | # qhasm: orig11 = z11 | ||
716 | # asm 1: movdqa <z11=int6464#3,>orig11=stack128#10 | ||
717 | # asm 2: movdqa <z11=%xmm2,>orig11=176(%esp) | ||
718 | movdqa %xmm2,176(%esp) | ||
719 | |||
720 | # qhasm: orig12 = z12 | ||
721 | # asm 1: movdqa <z12=int6464#4,>orig12=stack128#11 | ||
722 | # asm 2: movdqa <z12=%xmm3,>orig12=192(%esp) | ||
723 | movdqa %xmm3,192(%esp) | ||
724 | |||
725 | # qhasm: orig1 = z1 | ||
726 | # asm 1: movdqa <z1=int6464#1,>orig1=stack128#12 | ||
727 | # asm 2: movdqa <z1=%xmm0,>orig1=208(%esp) | ||
728 | movdqa %xmm0,208(%esp) | ||
729 | |||
730 | # qhasm: z2 = x2 | ||
731 | # asm 1: movdqa <x2=stack128#4,>z2=int6464#1 | ||
732 | # asm 2: movdqa <x2=80(%esp),>z2=%xmm0 | ||
733 | movdqa 80(%esp),%xmm0 | ||
734 | |||
735 | # qhasm: z7 = z2[3,3,3,3] | ||
736 | # asm 1: pshufd $0xff,<z2=int6464#1,>z7=int6464#2 | ||
737 | # asm 2: pshufd $0xff,<z2=%xmm0,>z7=%xmm1 | ||
738 | pshufd $0xff,%xmm0,%xmm1 | ||
739 | |||
740 | # qhasm: z13 = z2[1,1,1,1] | ||
741 | # asm 1: pshufd $0x55,<z2=int6464#1,>z13=int6464#3 | ||
742 | # asm 2: pshufd $0x55,<z2=%xmm0,>z13=%xmm2 | ||
743 | pshufd $0x55,%xmm0,%xmm2 | ||
744 | |||
745 | # qhasm: z2 = z2[2,2,2,2] | ||
746 | # asm 1: pshufd $0xaa,<z2=int6464#1,>z2=int6464#1 | ||
747 | # asm 2: pshufd $0xaa,<z2=%xmm0,>z2=%xmm0 | ||
748 | pshufd $0xaa,%xmm0,%xmm0 | ||
749 | |||
750 | # qhasm: orig7 = z7 | ||
751 | # asm 1: movdqa <z7=int6464#2,>orig7=stack128#13 | ||
752 | # asm 2: movdqa <z7=%xmm1,>orig7=224(%esp) | ||
753 | movdqa %xmm1,224(%esp) | ||
754 | |||
755 | # qhasm: orig13 = z13 | ||
756 | # asm 1: movdqa <z13=int6464#3,>orig13=stack128#14 | ||
757 | # asm 2: movdqa <z13=%xmm2,>orig13=240(%esp) | ||
758 | movdqa %xmm2,240(%esp) | ||
759 | |||
760 | # qhasm: orig2 = z2 | ||
761 | # asm 1: movdqa <z2=int6464#1,>orig2=stack128#15 | ||
762 | # asm 2: movdqa <z2=%xmm0,>orig2=256(%esp) | ||
763 | movdqa %xmm0,256(%esp) | ||
764 | |||
765 | # qhasm: z3 = x3 | ||
766 | # asm 1: movdqa <x3=stack128#1,>z3=int6464#1 | ||
767 | # asm 2: movdqa <x3=32(%esp),>z3=%xmm0 | ||
768 | movdqa 32(%esp),%xmm0 | ||
769 | |||
770 | # qhasm: z4 = z3[0,0,0,0] | ||
771 | # asm 1: pshufd $0x00,<z3=int6464#1,>z4=int6464#2 | ||
772 | # asm 2: pshufd $0x00,<z3=%xmm0,>z4=%xmm1 | ||
773 | pshufd $0x00,%xmm0,%xmm1 | ||
774 | |||
775 | # qhasm: z14 = z3[2,2,2,2] | ||
776 | # asm 1: pshufd $0xaa,<z3=int6464#1,>z14=int6464#3 | ||
777 | # asm 2: pshufd $0xaa,<z3=%xmm0,>z14=%xmm2 | ||
778 | pshufd $0xaa,%xmm0,%xmm2 | ||
779 | |||
780 | # qhasm: z3 = z3[3,3,3,3] | ||
781 | # asm 1: pshufd $0xff,<z3=int6464#1,>z3=int6464#1 | ||
782 | # asm 2: pshufd $0xff,<z3=%xmm0,>z3=%xmm0 | ||
783 | pshufd $0xff,%xmm0,%xmm0 | ||
784 | |||
785 | # qhasm: orig4 = z4 | ||
786 | # asm 1: movdqa <z4=int6464#2,>orig4=stack128#16 | ||
787 | # asm 2: movdqa <z4=%xmm1,>orig4=272(%esp) | ||
788 | movdqa %xmm1,272(%esp) | ||
789 | |||
790 | # qhasm: orig14 = z14 | ||
791 | # asm 1: movdqa <z14=int6464#3,>orig14=stack128#17 | ||
792 | # asm 2: movdqa <z14=%xmm2,>orig14=288(%esp) | ||
793 | movdqa %xmm2,288(%esp) | ||
794 | |||
795 | # qhasm: orig3 = z3 | ||
796 | # asm 1: movdqa <z3=int6464#1,>orig3=stack128#18 | ||
797 | # asm 2: movdqa <z3=%xmm0,>orig3=304(%esp) | ||
798 | movdqa %xmm0,304(%esp) | ||
799 | |||
800 | # qhasm: bytesatleast256: | ||
801 | ._bytesatleast256: | ||
802 | |||
803 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
804 | # asm 1: movl <x2=stack128#4,>in8=int32#2 | ||
805 | # asm 2: movl <x2=80(%esp),>in8=%ecx | ||
806 | movl 80(%esp),%ecx | ||
807 | |||
808 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
809 | # asm 1: movl 4+<x3=stack128#1,>in9=int32#3 | ||
810 | # asm 2: movl 4+<x3=32(%esp),>in9=%edx | ||
811 | movl 4+32(%esp),%edx | ||
812 | |||
813 | # qhasm: ((uint32 *) &orig8)[0] = in8 | ||
814 | # asm 1: movl <in8=int32#2,>orig8=stack128#19 | ||
815 | # asm 2: movl <in8=%ecx,>orig8=320(%esp) | ||
816 | movl %ecx,320(%esp) | ||
817 | |||
818 | # qhasm: ((uint32 *) &orig9)[0] = in9 | ||
819 | # asm 1: movl <in9=int32#3,>orig9=stack128#20 | ||
820 | # asm 2: movl <in9=%edx,>orig9=336(%esp) | ||
821 | movl %edx,336(%esp) | ||
822 | |||
823 | # qhasm: carry? in8 += 1 | ||
824 | # asm 1: add $1,<in8=int32#2 | ||
825 | # asm 2: add $1,<in8=%ecx | ||
826 | add $1,%ecx | ||
827 | |||
828 | # qhasm: in9 += 0 + carry | ||
829 | # asm 1: adc $0,<in9=int32#3 | ||
830 | # asm 2: adc $0,<in9=%edx | ||
831 | adc $0,%edx | ||
832 | |||
833 | # qhasm: ((uint32 *) &orig8)[1] = in8 | ||
834 | # asm 1: movl <in8=int32#2,4+<orig8=stack128#19 | ||
835 | # asm 2: movl <in8=%ecx,4+<orig8=320(%esp) | ||
836 | movl %ecx,4+320(%esp) | ||
837 | |||
838 | # qhasm: ((uint32 *) &orig9)[1] = in9 | ||
839 | # asm 1: movl <in9=int32#3,4+<orig9=stack128#20 | ||
840 | # asm 2: movl <in9=%edx,4+<orig9=336(%esp) | ||
841 | movl %edx,4+336(%esp) | ||
842 | |||
843 | # qhasm: carry? in8 += 1 | ||
844 | # asm 1: add $1,<in8=int32#2 | ||
845 | # asm 2: add $1,<in8=%ecx | ||
846 | add $1,%ecx | ||
847 | |||
848 | # qhasm: in9 += 0 + carry | ||
849 | # asm 1: adc $0,<in9=int32#3 | ||
850 | # asm 2: adc $0,<in9=%edx | ||
851 | adc $0,%edx | ||
852 | |||
853 | # qhasm: ((uint32 *) &orig8)[2] = in8 | ||
854 | # asm 1: movl <in8=int32#2,8+<orig8=stack128#19 | ||
855 | # asm 2: movl <in8=%ecx,8+<orig8=320(%esp) | ||
856 | movl %ecx,8+320(%esp) | ||
857 | |||
858 | # qhasm: ((uint32 *) &orig9)[2] = in9 | ||
859 | # asm 1: movl <in9=int32#3,8+<orig9=stack128#20 | ||
860 | # asm 2: movl <in9=%edx,8+<orig9=336(%esp) | ||
861 | movl %edx,8+336(%esp) | ||
862 | |||
863 | # qhasm: carry? in8 += 1 | ||
864 | # asm 1: add $1,<in8=int32#2 | ||
865 | # asm 2: add $1,<in8=%ecx | ||
866 | add $1,%ecx | ||
867 | |||
868 | # qhasm: in9 += 0 + carry | ||
869 | # asm 1: adc $0,<in9=int32#3 | ||
870 | # asm 2: adc $0,<in9=%edx | ||
871 | adc $0,%edx | ||
872 | |||
873 | # qhasm: ((uint32 *) &orig8)[3] = in8 | ||
874 | # asm 1: movl <in8=int32#2,12+<orig8=stack128#19 | ||
875 | # asm 2: movl <in8=%ecx,12+<orig8=320(%esp) | ||
876 | movl %ecx,12+320(%esp) | ||
877 | |||
878 | # qhasm: ((uint32 *) &orig9)[3] = in9 | ||
879 | # asm 1: movl <in9=int32#3,12+<orig9=stack128#20 | ||
880 | # asm 2: movl <in9=%edx,12+<orig9=336(%esp) | ||
881 | movl %edx,12+336(%esp) | ||
882 | |||
883 | # qhasm: carry? in8 += 1 | ||
884 | # asm 1: add $1,<in8=int32#2 | ||
885 | # asm 2: add $1,<in8=%ecx | ||
886 | add $1,%ecx | ||
887 | |||
888 | # qhasm: in9 += 0 + carry | ||
889 | # asm 1: adc $0,<in9=int32#3 | ||
890 | # asm 2: adc $0,<in9=%edx | ||
891 | adc $0,%edx | ||
892 | |||
893 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
894 | # asm 1: movl <in8=int32#2,>x2=stack128#4 | ||
895 | # asm 2: movl <in8=%ecx,>x2=80(%esp) | ||
896 | movl %ecx,80(%esp) | ||
897 | |||
898 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
899 | # asm 1: movl <in9=int32#3,4+<x3=stack128#1 | ||
900 | # asm 2: movl <in9=%edx,4+<x3=32(%esp) | ||
901 | movl %edx,4+32(%esp) | ||
902 | |||
903 | # qhasm: bytes_stack = bytes | ||
904 | # asm 1: movl <bytes=int32#1,>bytes_stack=stack32#7 | ||
905 | # asm 2: movl <bytes=%eax,>bytes_stack=24(%esp) | ||
906 | movl %eax,24(%esp) | ||
907 | |||
908 | # qhasm: i = 8 | ||
909 | # asm 1: mov $8,>i=int32#1 | ||
910 | # asm 2: mov $8,>i=%eax | ||
911 | mov $8,%eax | ||
912 | |||
913 | # qhasm: z5 = orig5 | ||
914 | # asm 1: movdqa <orig5=stack128#5,>z5=int6464#1 | ||
915 | # asm 2: movdqa <orig5=96(%esp),>z5=%xmm0 | ||
916 | movdqa 96(%esp),%xmm0 | ||
917 | |||
918 | # qhasm: z10 = orig10 | ||
919 | # asm 1: movdqa <orig10=stack128#6,>z10=int6464#2 | ||
920 | # asm 2: movdqa <orig10=112(%esp),>z10=%xmm1 | ||
921 | movdqa 112(%esp),%xmm1 | ||
922 | |||
923 | # qhasm: z15 = orig15 | ||
924 | # asm 1: movdqa <orig15=stack128#7,>z15=int6464#3 | ||
925 | # asm 2: movdqa <orig15=128(%esp),>z15=%xmm2 | ||
926 | movdqa 128(%esp),%xmm2 | ||
927 | |||
928 | # qhasm: z14 = orig14 | ||
929 | # asm 1: movdqa <orig14=stack128#17,>z14=int6464#4 | ||
930 | # asm 2: movdqa <orig14=288(%esp),>z14=%xmm3 | ||
931 | movdqa 288(%esp),%xmm3 | ||
932 | |||
933 | # qhasm: z3 = orig3 | ||
934 | # asm 1: movdqa <orig3=stack128#18,>z3=int6464#5 | ||
935 | # asm 2: movdqa <orig3=304(%esp),>z3=%xmm4 | ||
936 | movdqa 304(%esp),%xmm4 | ||
937 | |||
938 | # qhasm: z6 = orig6 | ||
939 | # asm 1: movdqa <orig6=stack128#9,>z6=int6464#6 | ||
940 | # asm 2: movdqa <orig6=160(%esp),>z6=%xmm5 | ||
941 | movdqa 160(%esp),%xmm5 | ||
942 | |||
943 | # qhasm: z11 = orig11 | ||
944 | # asm 1: movdqa <orig11=stack128#10,>z11=int6464#7 | ||
945 | # asm 2: movdqa <orig11=176(%esp),>z11=%xmm6 | ||
946 | movdqa 176(%esp),%xmm6 | ||
947 | |||
948 | # qhasm: z1 = orig1 | ||
949 | # asm 1: movdqa <orig1=stack128#12,>z1=int6464#8 | ||
950 | # asm 2: movdqa <orig1=208(%esp),>z1=%xmm7 | ||
951 | movdqa 208(%esp),%xmm7 | ||
952 | |||
953 | # qhasm: z5_stack = z5 | ||
954 | # asm 1: movdqa <z5=int6464#1,>z5_stack=stack128#21 | ||
955 | # asm 2: movdqa <z5=%xmm0,>z5_stack=352(%esp) | ||
956 | movdqa %xmm0,352(%esp) | ||
957 | |||
958 | # qhasm: z10_stack = z10 | ||
959 | # asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#22 | ||
960 | # asm 2: movdqa <z10=%xmm1,>z10_stack=368(%esp) | ||
961 | movdqa %xmm1,368(%esp) | ||
962 | |||
963 | # qhasm: z15_stack = z15 | ||
964 | # asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#23 | ||
965 | # asm 2: movdqa <z15=%xmm2,>z15_stack=384(%esp) | ||
966 | movdqa %xmm2,384(%esp) | ||
967 | |||
968 | # qhasm: z14_stack = z14 | ||
969 | # asm 1: movdqa <z14=int6464#4,>z14_stack=stack128#24 | ||
970 | # asm 2: movdqa <z14=%xmm3,>z14_stack=400(%esp) | ||
971 | movdqa %xmm3,400(%esp) | ||
972 | |||
973 | # qhasm: z3_stack = z3 | ||
974 | # asm 1: movdqa <z3=int6464#5,>z3_stack=stack128#25 | ||
975 | # asm 2: movdqa <z3=%xmm4,>z3_stack=416(%esp) | ||
976 | movdqa %xmm4,416(%esp) | ||
977 | |||
978 | # qhasm: z6_stack = z6 | ||
979 | # asm 1: movdqa <z6=int6464#6,>z6_stack=stack128#26 | ||
980 | # asm 2: movdqa <z6=%xmm5,>z6_stack=432(%esp) | ||
981 | movdqa %xmm5,432(%esp) | ||
982 | |||
983 | # qhasm: z11_stack = z11 | ||
984 | # asm 1: movdqa <z11=int6464#7,>z11_stack=stack128#27 | ||
985 | # asm 2: movdqa <z11=%xmm6,>z11_stack=448(%esp) | ||
986 | movdqa %xmm6,448(%esp) | ||
987 | |||
988 | # qhasm: z1_stack = z1 | ||
989 | # asm 1: movdqa <z1=int6464#8,>z1_stack=stack128#28 | ||
990 | # asm 2: movdqa <z1=%xmm7,>z1_stack=464(%esp) | ||
991 | movdqa %xmm7,464(%esp) | ||
992 | |||
993 | # qhasm: z7 = orig7 | ||
994 | # asm 1: movdqa <orig7=stack128#13,>z7=int6464#5 | ||
995 | # asm 2: movdqa <orig7=224(%esp),>z7=%xmm4 | ||
996 | movdqa 224(%esp),%xmm4 | ||
997 | |||
998 | # qhasm: z13 = orig13 | ||
999 | # asm 1: movdqa <orig13=stack128#14,>z13=int6464#6 | ||
1000 | # asm 2: movdqa <orig13=240(%esp),>z13=%xmm5 | ||
1001 | movdqa 240(%esp),%xmm5 | ||
1002 | |||
1003 | # qhasm: z2 = orig2 | ||
1004 | # asm 1: movdqa <orig2=stack128#15,>z2=int6464#7 | ||
1005 | # asm 2: movdqa <orig2=256(%esp),>z2=%xmm6 | ||
1006 | movdqa 256(%esp),%xmm6 | ||
1007 | |||
1008 | # qhasm: z9 = orig9 | ||
1009 | # asm 1: movdqa <orig9=stack128#20,>z9=int6464#8 | ||
1010 | # asm 2: movdqa <orig9=336(%esp),>z9=%xmm7 | ||
1011 | movdqa 336(%esp),%xmm7 | ||
1012 | |||
1013 | # qhasm: p = orig0 | ||
1014 | # asm 1: movdqa <orig0=stack128#8,>p=int6464#1 | ||
1015 | # asm 2: movdqa <orig0=144(%esp),>p=%xmm0 | ||
1016 | movdqa 144(%esp),%xmm0 | ||
1017 | |||
1018 | # qhasm: t = orig12 | ||
1019 | # asm 1: movdqa <orig12=stack128#11,>t=int6464#3 | ||
1020 | # asm 2: movdqa <orig12=192(%esp),>t=%xmm2 | ||
1021 | movdqa 192(%esp),%xmm2 | ||
1022 | |||
1023 | # qhasm: q = orig4 | ||
1024 | # asm 1: movdqa <orig4=stack128#16,>q=int6464#4 | ||
1025 | # asm 2: movdqa <orig4=272(%esp),>q=%xmm3 | ||
1026 | movdqa 272(%esp),%xmm3 | ||
1027 | |||
1028 | # qhasm: r = orig8 | ||
1029 | # asm 1: movdqa <orig8=stack128#19,>r=int6464#2 | ||
1030 | # asm 2: movdqa <orig8=320(%esp),>r=%xmm1 | ||
1031 | movdqa 320(%esp),%xmm1 | ||
1032 | |||
1033 | # qhasm: z7_stack = z7 | ||
1034 | # asm 1: movdqa <z7=int6464#5,>z7_stack=stack128#29 | ||
1035 | # asm 2: movdqa <z7=%xmm4,>z7_stack=480(%esp) | ||
1036 | movdqa %xmm4,480(%esp) | ||
1037 | |||
1038 | # qhasm: z13_stack = z13 | ||
1039 | # asm 1: movdqa <z13=int6464#6,>z13_stack=stack128#30 | ||
1040 | # asm 2: movdqa <z13=%xmm5,>z13_stack=496(%esp) | ||
1041 | movdqa %xmm5,496(%esp) | ||
1042 | |||
1043 | # qhasm: z2_stack = z2 | ||
1044 | # asm 1: movdqa <z2=int6464#7,>z2_stack=stack128#31 | ||
1045 | # asm 2: movdqa <z2=%xmm6,>z2_stack=512(%esp) | ||
1046 | movdqa %xmm6,512(%esp) | ||
1047 | |||
1048 | # qhasm: z9_stack = z9 | ||
1049 | # asm 1: movdqa <z9=int6464#8,>z9_stack=stack128#32 | ||
1050 | # asm 2: movdqa <z9=%xmm7,>z9_stack=528(%esp) | ||
1051 | movdqa %xmm7,528(%esp) | ||
1052 | |||
1053 | # qhasm: z0_stack = p | ||
1054 | # asm 1: movdqa <p=int6464#1,>z0_stack=stack128#33 | ||
1055 | # asm 2: movdqa <p=%xmm0,>z0_stack=544(%esp) | ||
1056 | movdqa %xmm0,544(%esp) | ||
1057 | |||
1058 | # qhasm: z12_stack = t | ||
1059 | # asm 1: movdqa <t=int6464#3,>z12_stack=stack128#34 | ||
1060 | # asm 2: movdqa <t=%xmm2,>z12_stack=560(%esp) | ||
1061 | movdqa %xmm2,560(%esp) | ||
1062 | |||
1063 | # qhasm: z4_stack = q | ||
1064 | # asm 1: movdqa <q=int6464#4,>z4_stack=stack128#35 | ||
1065 | # asm 2: movdqa <q=%xmm3,>z4_stack=576(%esp) | ||
1066 | movdqa %xmm3,576(%esp) | ||
1067 | |||
1068 | # qhasm: z8_stack = r | ||
1069 | # asm 1: movdqa <r=int6464#2,>z8_stack=stack128#36 | ||
1070 | # asm 2: movdqa <r=%xmm1,>z8_stack=592(%esp) | ||
1071 | movdqa %xmm1,592(%esp) | ||
1072 | |||
1073 | # qhasm: mainloop1: | ||
1074 | ._mainloop1: | ||
1075 | |||
1076 | # qhasm: assign xmm0 to p | ||
1077 | |||
1078 | # qhasm: assign xmm1 to r | ||
1079 | |||
1080 | # qhasm: assign xmm2 to t | ||
1081 | |||
1082 | # qhasm: assign xmm3 to q | ||
1083 | |||
1084 | # qhasm: s = t | ||
1085 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
1086 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
1087 | movdqa %xmm2,%xmm6 | ||
1088 | |||
1089 | # qhasm: uint32323232 t += p | ||
1090 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
1091 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
1092 | paddd %xmm0,%xmm2 | ||
1093 | |||
1094 | # qhasm: u = t | ||
1095 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1096 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1097 | movdqa %xmm2,%xmm4 | ||
1098 | |||
1099 | # qhasm: uint32323232 t >>= 25 | ||
1100 | # asm 1: psrld $25,<t=int6464#3 | ||
1101 | # asm 2: psrld $25,<t=%xmm2 | ||
1102 | psrld $25,%xmm2 | ||
1103 | |||
1104 | # qhasm: q ^= t | ||
1105 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
1106 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
1107 | pxor %xmm2,%xmm3 | ||
1108 | |||
1109 | # qhasm: uint32323232 u <<= 7 | ||
1110 | # asm 1: pslld $7,<u=int6464#5 | ||
1111 | # asm 2: pslld $7,<u=%xmm4 | ||
1112 | pslld $7,%xmm4 | ||
1113 | |||
1114 | # qhasm: q ^= u | ||
1115 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
1116 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
1117 | pxor %xmm4,%xmm3 | ||
1118 | |||
1119 | # qhasm: z4_stack = q | ||
1120 | # asm 1: movdqa <q=int6464#4,>z4_stack=stack128#33 | ||
1121 | # asm 2: movdqa <q=%xmm3,>z4_stack=544(%esp) | ||
1122 | movdqa %xmm3,544(%esp) | ||
1123 | |||
1124 | # qhasm: t = p | ||
1125 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
1126 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
1127 | movdqa %xmm0,%xmm2 | ||
1128 | |||
1129 | # qhasm: uint32323232 t += q | ||
1130 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
1131 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
1132 | paddd %xmm3,%xmm2 | ||
1133 | |||
1134 | # qhasm: u = t | ||
1135 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1136 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1137 | movdqa %xmm2,%xmm4 | ||
1138 | |||
1139 | # qhasm: uint32323232 t >>= 23 | ||
1140 | # asm 1: psrld $23,<t=int6464#3 | ||
1141 | # asm 2: psrld $23,<t=%xmm2 | ||
1142 | psrld $23,%xmm2 | ||
1143 | |||
1144 | # qhasm: r ^= t | ||
1145 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
1146 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
1147 | pxor %xmm2,%xmm1 | ||
1148 | |||
1149 | # qhasm: uint32323232 u <<= 9 | ||
1150 | # asm 1: pslld $9,<u=int6464#5 | ||
1151 | # asm 2: pslld $9,<u=%xmm4 | ||
1152 | pslld $9,%xmm4 | ||
1153 | |||
1154 | # qhasm: r ^= u | ||
1155 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
1156 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
1157 | pxor %xmm4,%xmm1 | ||
1158 | |||
1159 | # qhasm: z8_stack = r | ||
1160 | # asm 1: movdqa <r=int6464#2,>z8_stack=stack128#34 | ||
1161 | # asm 2: movdqa <r=%xmm1,>z8_stack=560(%esp) | ||
1162 | movdqa %xmm1,560(%esp) | ||
1163 | |||
1164 | # qhasm: uint32323232 q += r | ||
1165 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
1166 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
1167 | paddd %xmm1,%xmm3 | ||
1168 | |||
1169 | # qhasm: u = q | ||
1170 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
1171 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
1172 | movdqa %xmm3,%xmm2 | ||
1173 | |||
1174 | # qhasm: uint32323232 q >>= 19 | ||
1175 | # asm 1: psrld $19,<q=int6464#4 | ||
1176 | # asm 2: psrld $19,<q=%xmm3 | ||
1177 | psrld $19,%xmm3 | ||
1178 | |||
1179 | # qhasm: s ^= q | ||
1180 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
1181 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
1182 | pxor %xmm3,%xmm6 | ||
1183 | |||
1184 | # qhasm: uint32323232 u <<= 13 | ||
1185 | # asm 1: pslld $13,<u=int6464#3 | ||
1186 | # asm 2: pslld $13,<u=%xmm2 | ||
1187 | pslld $13,%xmm2 | ||
1188 | |||
1189 | # qhasm: s ^= u | ||
1190 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
1191 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
1192 | pxor %xmm2,%xmm6 | ||
1193 | |||
1194 | # qhasm: mt = z1_stack | ||
1195 | # asm 1: movdqa <z1_stack=stack128#28,>mt=int6464#3 | ||
1196 | # asm 2: movdqa <z1_stack=464(%esp),>mt=%xmm2 | ||
1197 | movdqa 464(%esp),%xmm2 | ||
1198 | |||
1199 | # qhasm: mp = z5_stack | ||
1200 | # asm 1: movdqa <z5_stack=stack128#21,>mp=int6464#5 | ||
1201 | # asm 2: movdqa <z5_stack=352(%esp),>mp=%xmm4 | ||
1202 | movdqa 352(%esp),%xmm4 | ||
1203 | |||
1204 | # qhasm: mq = z9_stack | ||
1205 | # asm 1: movdqa <z9_stack=stack128#32,>mq=int6464#4 | ||
1206 | # asm 2: movdqa <z9_stack=528(%esp),>mq=%xmm3 | ||
1207 | movdqa 528(%esp),%xmm3 | ||
1208 | |||
1209 | # qhasm: mr = z13_stack | ||
1210 | # asm 1: movdqa <z13_stack=stack128#30,>mr=int6464#6 | ||
1211 | # asm 2: movdqa <z13_stack=496(%esp),>mr=%xmm5 | ||
1212 | movdqa 496(%esp),%xmm5 | ||
1213 | |||
1214 | # qhasm: z12_stack = s | ||
1215 | # asm 1: movdqa <s=int6464#7,>z12_stack=stack128#30 | ||
1216 | # asm 2: movdqa <s=%xmm6,>z12_stack=496(%esp) | ||
1217 | movdqa %xmm6,496(%esp) | ||
1218 | |||
1219 | # qhasm: uint32323232 r += s | ||
1220 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
1221 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
1222 | paddd %xmm6,%xmm1 | ||
1223 | |||
1224 | # qhasm: u = r | ||
1225 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
1226 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
1227 | movdqa %xmm1,%xmm6 | ||
1228 | |||
1229 | # qhasm: uint32323232 r >>= 14 | ||
1230 | # asm 1: psrld $14,<r=int6464#2 | ||
1231 | # asm 2: psrld $14,<r=%xmm1 | ||
1232 | psrld $14,%xmm1 | ||
1233 | |||
1234 | # qhasm: p ^= r | ||
1235 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
1236 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
1237 | pxor %xmm1,%xmm0 | ||
1238 | |||
1239 | # qhasm: uint32323232 u <<= 18 | ||
1240 | # asm 1: pslld $18,<u=int6464#7 | ||
1241 | # asm 2: pslld $18,<u=%xmm6 | ||
1242 | pslld $18,%xmm6 | ||
1243 | |||
1244 | # qhasm: p ^= u | ||
1245 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
1246 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
1247 | pxor %xmm6,%xmm0 | ||
1248 | |||
1249 | # qhasm: z0_stack = p | ||
1250 | # asm 1: movdqa <p=int6464#1,>z0_stack=stack128#21 | ||
1251 | # asm 2: movdqa <p=%xmm0,>z0_stack=352(%esp) | ||
1252 | movdqa %xmm0,352(%esp) | ||
1253 | |||
1254 | # qhasm: assign xmm2 to mt | ||
1255 | |||
1256 | # qhasm: assign xmm3 to mq | ||
1257 | |||
1258 | # qhasm: assign xmm4 to mp | ||
1259 | |||
1260 | # qhasm: assign xmm5 to mr | ||
1261 | |||
1262 | # qhasm: ms = mt | ||
1263 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
1264 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
1265 | movdqa %xmm2,%xmm6 | ||
1266 | |||
1267 | # qhasm: uint32323232 mt += mp | ||
1268 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
1269 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
1270 | paddd %xmm4,%xmm2 | ||
1271 | |||
1272 | # qhasm: mu = mt | ||
1273 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
1274 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
1275 | movdqa %xmm2,%xmm0 | ||
1276 | |||
1277 | # qhasm: uint32323232 mt >>= 25 | ||
1278 | # asm 1: psrld $25,<mt=int6464#3 | ||
1279 | # asm 2: psrld $25,<mt=%xmm2 | ||
1280 | psrld $25,%xmm2 | ||
1281 | |||
1282 | # qhasm: mq ^= mt | ||
1283 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
1284 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
1285 | pxor %xmm2,%xmm3 | ||
1286 | |||
1287 | # qhasm: uint32323232 mu <<= 7 | ||
1288 | # asm 1: pslld $7,<mu=int6464#1 | ||
1289 | # asm 2: pslld $7,<mu=%xmm0 | ||
1290 | pslld $7,%xmm0 | ||
1291 | |||
1292 | # qhasm: mq ^= mu | ||
1293 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
1294 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
1295 | pxor %xmm0,%xmm3 | ||
1296 | |||
1297 | # qhasm: z9_stack = mq | ||
1298 | # asm 1: movdqa <mq=int6464#4,>z9_stack=stack128#32 | ||
1299 | # asm 2: movdqa <mq=%xmm3,>z9_stack=528(%esp) | ||
1300 | movdqa %xmm3,528(%esp) | ||
1301 | |||
1302 | # qhasm: mt = mp | ||
1303 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
1304 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
1305 | movdqa %xmm4,%xmm0 | ||
1306 | |||
1307 | # qhasm: uint32323232 mt += mq | ||
1308 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
1309 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
1310 | paddd %xmm3,%xmm0 | ||
1311 | |||
1312 | # qhasm: mu = mt | ||
1313 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
1314 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
1315 | movdqa %xmm0,%xmm1 | ||
1316 | |||
1317 | # qhasm: uint32323232 mt >>= 23 | ||
1318 | # asm 1: psrld $23,<mt=int6464#1 | ||
1319 | # asm 2: psrld $23,<mt=%xmm0 | ||
1320 | psrld $23,%xmm0 | ||
1321 | |||
1322 | # qhasm: mr ^= mt | ||
1323 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
1324 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
1325 | pxor %xmm0,%xmm5 | ||
1326 | |||
1327 | # qhasm: uint32323232 mu <<= 9 | ||
1328 | # asm 1: pslld $9,<mu=int6464#2 | ||
1329 | # asm 2: pslld $9,<mu=%xmm1 | ||
1330 | pslld $9,%xmm1 | ||
1331 | |||
1332 | # qhasm: mr ^= mu | ||
1333 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
1334 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
1335 | pxor %xmm1,%xmm5 | ||
1336 | |||
1337 | # qhasm: z13_stack = mr | ||
1338 | # asm 1: movdqa <mr=int6464#6,>z13_stack=stack128#35 | ||
1339 | # asm 2: movdqa <mr=%xmm5,>z13_stack=576(%esp) | ||
1340 | movdqa %xmm5,576(%esp) | ||
1341 | |||
1342 | # qhasm: uint32323232 mq += mr | ||
1343 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
1344 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
1345 | paddd %xmm5,%xmm3 | ||
1346 | |||
1347 | # qhasm: mu = mq | ||
1348 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
1349 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
1350 | movdqa %xmm3,%xmm0 | ||
1351 | |||
1352 | # qhasm: uint32323232 mq >>= 19 | ||
1353 | # asm 1: psrld $19,<mq=int6464#4 | ||
1354 | # asm 2: psrld $19,<mq=%xmm3 | ||
1355 | psrld $19,%xmm3 | ||
1356 | |||
1357 | # qhasm: ms ^= mq | ||
1358 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
1359 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
1360 | pxor %xmm3,%xmm6 | ||
1361 | |||
1362 | # qhasm: uint32323232 mu <<= 13 | ||
1363 | # asm 1: pslld $13,<mu=int6464#1 | ||
1364 | # asm 2: pslld $13,<mu=%xmm0 | ||
1365 | pslld $13,%xmm0 | ||
1366 | |||
1367 | # qhasm: ms ^= mu | ||
1368 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
1369 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
1370 | pxor %xmm0,%xmm6 | ||
1371 | |||
1372 | # qhasm: t = z6_stack | ||
1373 | # asm 1: movdqa <z6_stack=stack128#26,>t=int6464#3 | ||
1374 | # asm 2: movdqa <z6_stack=432(%esp),>t=%xmm2 | ||
1375 | movdqa 432(%esp),%xmm2 | ||
1376 | |||
1377 | # qhasm: p = z10_stack | ||
1378 | # asm 1: movdqa <z10_stack=stack128#22,>p=int6464#1 | ||
1379 | # asm 2: movdqa <z10_stack=368(%esp),>p=%xmm0 | ||
1380 | movdqa 368(%esp),%xmm0 | ||
1381 | |||
1382 | # qhasm: q = z14_stack | ||
1383 | # asm 1: movdqa <z14_stack=stack128#24,>q=int6464#4 | ||
1384 | # asm 2: movdqa <z14_stack=400(%esp),>q=%xmm3 | ||
1385 | movdqa 400(%esp),%xmm3 | ||
1386 | |||
1387 | # qhasm: r = z2_stack | ||
1388 | # asm 1: movdqa <z2_stack=stack128#31,>r=int6464#2 | ||
1389 | # asm 2: movdqa <z2_stack=512(%esp),>r=%xmm1 | ||
1390 | movdqa 512(%esp),%xmm1 | ||
1391 | |||
1392 | # qhasm: z1_stack = ms | ||
1393 | # asm 1: movdqa <ms=int6464#7,>z1_stack=stack128#22 | ||
1394 | # asm 2: movdqa <ms=%xmm6,>z1_stack=368(%esp) | ||
1395 | movdqa %xmm6,368(%esp) | ||
1396 | |||
1397 | # qhasm: uint32323232 mr += ms | ||
1398 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
1399 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
1400 | paddd %xmm6,%xmm5 | ||
1401 | |||
1402 | # qhasm: mu = mr | ||
1403 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
1404 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
1405 | movdqa %xmm5,%xmm6 | ||
1406 | |||
1407 | # qhasm: uint32323232 mr >>= 14 | ||
1408 | # asm 1: psrld $14,<mr=int6464#6 | ||
1409 | # asm 2: psrld $14,<mr=%xmm5 | ||
1410 | psrld $14,%xmm5 | ||
1411 | |||
1412 | # qhasm: mp ^= mr | ||
1413 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
1414 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
1415 | pxor %xmm5,%xmm4 | ||
1416 | |||
1417 | # qhasm: uint32323232 mu <<= 18 | ||
1418 | # asm 1: pslld $18,<mu=int6464#7 | ||
1419 | # asm 2: pslld $18,<mu=%xmm6 | ||
1420 | pslld $18,%xmm6 | ||
1421 | |||
1422 | # qhasm: mp ^= mu | ||
1423 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
1424 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
1425 | pxor %xmm6,%xmm4 | ||
1426 | |||
1427 | # qhasm: z5_stack = mp | ||
1428 | # asm 1: movdqa <mp=int6464#5,>z5_stack=stack128#24 | ||
1429 | # asm 2: movdqa <mp=%xmm4,>z5_stack=400(%esp) | ||
1430 | movdqa %xmm4,400(%esp) | ||
1431 | |||
1432 | # qhasm: assign xmm0 to p | ||
1433 | |||
1434 | # qhasm: assign xmm1 to r | ||
1435 | |||
1436 | # qhasm: assign xmm2 to t | ||
1437 | |||
1438 | # qhasm: assign xmm3 to q | ||
1439 | |||
1440 | # qhasm: s = t | ||
1441 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
1442 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
1443 | movdqa %xmm2,%xmm6 | ||
1444 | |||
1445 | # qhasm: uint32323232 t += p | ||
1446 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
1447 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
1448 | paddd %xmm0,%xmm2 | ||
1449 | |||
1450 | # qhasm: u = t | ||
1451 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1452 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1453 | movdqa %xmm2,%xmm4 | ||
1454 | |||
1455 | # qhasm: uint32323232 t >>= 25 | ||
1456 | # asm 1: psrld $25,<t=int6464#3 | ||
1457 | # asm 2: psrld $25,<t=%xmm2 | ||
1458 | psrld $25,%xmm2 | ||
1459 | |||
1460 | # qhasm: q ^= t | ||
1461 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
1462 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
1463 | pxor %xmm2,%xmm3 | ||
1464 | |||
1465 | # qhasm: uint32323232 u <<= 7 | ||
1466 | # asm 1: pslld $7,<u=int6464#5 | ||
1467 | # asm 2: pslld $7,<u=%xmm4 | ||
1468 | pslld $7,%xmm4 | ||
1469 | |||
1470 | # qhasm: q ^= u | ||
1471 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
1472 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
1473 | pxor %xmm4,%xmm3 | ||
1474 | |||
1475 | # qhasm: z14_stack = q | ||
1476 | # asm 1: movdqa <q=int6464#4,>z14_stack=stack128#36 | ||
1477 | # asm 2: movdqa <q=%xmm3,>z14_stack=592(%esp) | ||
1478 | movdqa %xmm3,592(%esp) | ||
1479 | |||
1480 | # qhasm: t = p | ||
1481 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
1482 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
1483 | movdqa %xmm0,%xmm2 | ||
1484 | |||
1485 | # qhasm: uint32323232 t += q | ||
1486 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
1487 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
1488 | paddd %xmm3,%xmm2 | ||
1489 | |||
1490 | # qhasm: u = t | ||
1491 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1492 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1493 | movdqa %xmm2,%xmm4 | ||
1494 | |||
1495 | # qhasm: uint32323232 t >>= 23 | ||
1496 | # asm 1: psrld $23,<t=int6464#3 | ||
1497 | # asm 2: psrld $23,<t=%xmm2 | ||
1498 | psrld $23,%xmm2 | ||
1499 | |||
1500 | # qhasm: r ^= t | ||
1501 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
1502 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
1503 | pxor %xmm2,%xmm1 | ||
1504 | |||
1505 | # qhasm: uint32323232 u <<= 9 | ||
1506 | # asm 1: pslld $9,<u=int6464#5 | ||
1507 | # asm 2: pslld $9,<u=%xmm4 | ||
1508 | pslld $9,%xmm4 | ||
1509 | |||
1510 | # qhasm: r ^= u | ||
1511 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
1512 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
1513 | pxor %xmm4,%xmm1 | ||
1514 | |||
1515 | # qhasm: z2_stack = r | ||
1516 | # asm 1: movdqa <r=int6464#2,>z2_stack=stack128#26 | ||
1517 | # asm 2: movdqa <r=%xmm1,>z2_stack=432(%esp) | ||
1518 | movdqa %xmm1,432(%esp) | ||
1519 | |||
1520 | # qhasm: uint32323232 q += r | ||
1521 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
1522 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
1523 | paddd %xmm1,%xmm3 | ||
1524 | |||
1525 | # qhasm: u = q | ||
1526 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
1527 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
1528 | movdqa %xmm3,%xmm2 | ||
1529 | |||
1530 | # qhasm: uint32323232 q >>= 19 | ||
1531 | # asm 1: psrld $19,<q=int6464#4 | ||
1532 | # asm 2: psrld $19,<q=%xmm3 | ||
1533 | psrld $19,%xmm3 | ||
1534 | |||
1535 | # qhasm: s ^= q | ||
1536 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
1537 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
1538 | pxor %xmm3,%xmm6 | ||
1539 | |||
1540 | # qhasm: uint32323232 u <<= 13 | ||
1541 | # asm 1: pslld $13,<u=int6464#3 | ||
1542 | # asm 2: pslld $13,<u=%xmm2 | ||
1543 | pslld $13,%xmm2 | ||
1544 | |||
1545 | # qhasm: s ^= u | ||
1546 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
1547 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
1548 | pxor %xmm2,%xmm6 | ||
1549 | |||
1550 | # qhasm: mt = z11_stack | ||
1551 | # asm 1: movdqa <z11_stack=stack128#27,>mt=int6464#3 | ||
1552 | # asm 2: movdqa <z11_stack=448(%esp),>mt=%xmm2 | ||
1553 | movdqa 448(%esp),%xmm2 | ||
1554 | |||
1555 | # qhasm: mp = z15_stack | ||
1556 | # asm 1: movdqa <z15_stack=stack128#23,>mp=int6464#5 | ||
1557 | # asm 2: movdqa <z15_stack=384(%esp),>mp=%xmm4 | ||
1558 | movdqa 384(%esp),%xmm4 | ||
1559 | |||
1560 | # qhasm: mq = z3_stack | ||
1561 | # asm 1: movdqa <z3_stack=stack128#25,>mq=int6464#4 | ||
1562 | # asm 2: movdqa <z3_stack=416(%esp),>mq=%xmm3 | ||
1563 | movdqa 416(%esp),%xmm3 | ||
1564 | |||
1565 | # qhasm: mr = z7_stack | ||
1566 | # asm 1: movdqa <z7_stack=stack128#29,>mr=int6464#6 | ||
1567 | # asm 2: movdqa <z7_stack=480(%esp),>mr=%xmm5 | ||
1568 | movdqa 480(%esp),%xmm5 | ||
1569 | |||
1570 | # qhasm: z6_stack = s | ||
1571 | # asm 1: movdqa <s=int6464#7,>z6_stack=stack128#23 | ||
1572 | # asm 2: movdqa <s=%xmm6,>z6_stack=384(%esp) | ||
1573 | movdqa %xmm6,384(%esp) | ||
1574 | |||
1575 | # qhasm: uint32323232 r += s | ||
1576 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
1577 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
1578 | paddd %xmm6,%xmm1 | ||
1579 | |||
1580 | # qhasm: u = r | ||
1581 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
1582 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
1583 | movdqa %xmm1,%xmm6 | ||
1584 | |||
1585 | # qhasm: uint32323232 r >>= 14 | ||
1586 | # asm 1: psrld $14,<r=int6464#2 | ||
1587 | # asm 2: psrld $14,<r=%xmm1 | ||
1588 | psrld $14,%xmm1 | ||
1589 | |||
1590 | # qhasm: p ^= r | ||
1591 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
1592 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
1593 | pxor %xmm1,%xmm0 | ||
1594 | |||
1595 | # qhasm: uint32323232 u <<= 18 | ||
1596 | # asm 1: pslld $18,<u=int6464#7 | ||
1597 | # asm 2: pslld $18,<u=%xmm6 | ||
1598 | pslld $18,%xmm6 | ||
1599 | |||
1600 | # qhasm: p ^= u | ||
1601 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
1602 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
1603 | pxor %xmm6,%xmm0 | ||
1604 | |||
1605 | # qhasm: z10_stack = p | ||
1606 | # asm 1: movdqa <p=int6464#1,>z10_stack=stack128#27 | ||
1607 | # asm 2: movdqa <p=%xmm0,>z10_stack=448(%esp) | ||
1608 | movdqa %xmm0,448(%esp) | ||
1609 | |||
1610 | # qhasm: assign xmm2 to mt | ||
1611 | |||
1612 | # qhasm: assign xmm3 to mq | ||
1613 | |||
1614 | # qhasm: assign xmm4 to mp | ||
1615 | |||
1616 | # qhasm: assign xmm5 to mr | ||
1617 | |||
1618 | # qhasm: ms = mt | ||
1619 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
1620 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
1621 | movdqa %xmm2,%xmm6 | ||
1622 | |||
1623 | # qhasm: uint32323232 mt += mp | ||
1624 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
1625 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
1626 | paddd %xmm4,%xmm2 | ||
1627 | |||
1628 | # qhasm: mu = mt | ||
1629 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
1630 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
1631 | movdqa %xmm2,%xmm0 | ||
1632 | |||
1633 | # qhasm: uint32323232 mt >>= 25 | ||
1634 | # asm 1: psrld $25,<mt=int6464#3 | ||
1635 | # asm 2: psrld $25,<mt=%xmm2 | ||
1636 | psrld $25,%xmm2 | ||
1637 | |||
1638 | # qhasm: mq ^= mt | ||
1639 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
1640 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
1641 | pxor %xmm2,%xmm3 | ||
1642 | |||
1643 | # qhasm: uint32323232 mu <<= 7 | ||
1644 | # asm 1: pslld $7,<mu=int6464#1 | ||
1645 | # asm 2: pslld $7,<mu=%xmm0 | ||
1646 | pslld $7,%xmm0 | ||
1647 | |||
1648 | # qhasm: mq ^= mu | ||
1649 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
1650 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
1651 | pxor %xmm0,%xmm3 | ||
1652 | |||
1653 | # qhasm: z3_stack = mq | ||
1654 | # asm 1: movdqa <mq=int6464#4,>z3_stack=stack128#25 | ||
1655 | # asm 2: movdqa <mq=%xmm3,>z3_stack=416(%esp) | ||
1656 | movdqa %xmm3,416(%esp) | ||
1657 | |||
1658 | # qhasm: mt = mp | ||
1659 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
1660 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
1661 | movdqa %xmm4,%xmm0 | ||
1662 | |||
1663 | # qhasm: uint32323232 mt += mq | ||
1664 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
1665 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
1666 | paddd %xmm3,%xmm0 | ||
1667 | |||
1668 | # qhasm: mu = mt | ||
1669 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
1670 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
1671 | movdqa %xmm0,%xmm1 | ||
1672 | |||
1673 | # qhasm: uint32323232 mt >>= 23 | ||
1674 | # asm 1: psrld $23,<mt=int6464#1 | ||
1675 | # asm 2: psrld $23,<mt=%xmm0 | ||
1676 | psrld $23,%xmm0 | ||
1677 | |||
1678 | # qhasm: mr ^= mt | ||
1679 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
1680 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
1681 | pxor %xmm0,%xmm5 | ||
1682 | |||
1683 | # qhasm: uint32323232 mu <<= 9 | ||
1684 | # asm 1: pslld $9,<mu=int6464#2 | ||
1685 | # asm 2: pslld $9,<mu=%xmm1 | ||
1686 | pslld $9,%xmm1 | ||
1687 | |||
1688 | # qhasm: mr ^= mu | ||
1689 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
1690 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
1691 | pxor %xmm1,%xmm5 | ||
1692 | |||
1693 | # qhasm: z7_stack = mr | ||
1694 | # asm 1: movdqa <mr=int6464#6,>z7_stack=stack128#29 | ||
1695 | # asm 2: movdqa <mr=%xmm5,>z7_stack=480(%esp) | ||
1696 | movdqa %xmm5,480(%esp) | ||
1697 | |||
1698 | # qhasm: uint32323232 mq += mr | ||
1699 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
1700 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
1701 | paddd %xmm5,%xmm3 | ||
1702 | |||
1703 | # qhasm: mu = mq | ||
1704 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
1705 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
1706 | movdqa %xmm3,%xmm0 | ||
1707 | |||
1708 | # qhasm: uint32323232 mq >>= 19 | ||
1709 | # asm 1: psrld $19,<mq=int6464#4 | ||
1710 | # asm 2: psrld $19,<mq=%xmm3 | ||
1711 | psrld $19,%xmm3 | ||
1712 | |||
1713 | # qhasm: ms ^= mq | ||
1714 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
1715 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
1716 | pxor %xmm3,%xmm6 | ||
1717 | |||
1718 | # qhasm: uint32323232 mu <<= 13 | ||
1719 | # asm 1: pslld $13,<mu=int6464#1 | ||
1720 | # asm 2: pslld $13,<mu=%xmm0 | ||
1721 | pslld $13,%xmm0 | ||
1722 | |||
1723 | # qhasm: ms ^= mu | ||
1724 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
1725 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
1726 | pxor %xmm0,%xmm6 | ||
1727 | |||
1728 | # qhasm: t = z3_stack | ||
1729 | # asm 1: movdqa <z3_stack=stack128#25,>t=int6464#3 | ||
1730 | # asm 2: movdqa <z3_stack=416(%esp),>t=%xmm2 | ||
1731 | movdqa 416(%esp),%xmm2 | ||
1732 | |||
1733 | # qhasm: p = z0_stack | ||
1734 | # asm 1: movdqa <z0_stack=stack128#21,>p=int6464#1 | ||
1735 | # asm 2: movdqa <z0_stack=352(%esp),>p=%xmm0 | ||
1736 | movdqa 352(%esp),%xmm0 | ||
1737 | |||
1738 | # qhasm: q = z1_stack | ||
1739 | # asm 1: movdqa <z1_stack=stack128#22,>q=int6464#4 | ||
1740 | # asm 2: movdqa <z1_stack=368(%esp),>q=%xmm3 | ||
1741 | movdqa 368(%esp),%xmm3 | ||
1742 | |||
1743 | # qhasm: r = z2_stack | ||
1744 | # asm 1: movdqa <z2_stack=stack128#26,>r=int6464#2 | ||
1745 | # asm 2: movdqa <z2_stack=432(%esp),>r=%xmm1 | ||
1746 | movdqa 432(%esp),%xmm1 | ||
1747 | |||
1748 | # qhasm: z11_stack = ms | ||
1749 | # asm 1: movdqa <ms=int6464#7,>z11_stack=stack128#21 | ||
1750 | # asm 2: movdqa <ms=%xmm6,>z11_stack=352(%esp) | ||
1751 | movdqa %xmm6,352(%esp) | ||
1752 | |||
1753 | # qhasm: uint32323232 mr += ms | ||
1754 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
1755 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
1756 | paddd %xmm6,%xmm5 | ||
1757 | |||
1758 | # qhasm: mu = mr | ||
1759 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
1760 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
1761 | movdqa %xmm5,%xmm6 | ||
1762 | |||
1763 | # qhasm: uint32323232 mr >>= 14 | ||
1764 | # asm 1: psrld $14,<mr=int6464#6 | ||
1765 | # asm 2: psrld $14,<mr=%xmm5 | ||
1766 | psrld $14,%xmm5 | ||
1767 | |||
1768 | # qhasm: mp ^= mr | ||
1769 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
1770 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
1771 | pxor %xmm5,%xmm4 | ||
1772 | |||
1773 | # qhasm: uint32323232 mu <<= 18 | ||
1774 | # asm 1: pslld $18,<mu=int6464#7 | ||
1775 | # asm 2: pslld $18,<mu=%xmm6 | ||
1776 | pslld $18,%xmm6 | ||
1777 | |||
1778 | # qhasm: mp ^= mu | ||
1779 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
1780 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
1781 | pxor %xmm6,%xmm4 | ||
1782 | |||
1783 | # qhasm: z15_stack = mp | ||
1784 | # asm 1: movdqa <mp=int6464#5,>z15_stack=stack128#22 | ||
1785 | # asm 2: movdqa <mp=%xmm4,>z15_stack=368(%esp) | ||
1786 | movdqa %xmm4,368(%esp) | ||
1787 | |||
1788 | # qhasm: assign xmm0 to p | ||
1789 | |||
1790 | # qhasm: assign xmm1 to r | ||
1791 | |||
1792 | # qhasm: assign xmm2 to t | ||
1793 | |||
1794 | # qhasm: assign xmm3 to q | ||
1795 | |||
1796 | # qhasm: s = t | ||
1797 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
1798 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
1799 | movdqa %xmm2,%xmm6 | ||
1800 | |||
1801 | # qhasm: uint32323232 t += p | ||
1802 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
1803 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
1804 | paddd %xmm0,%xmm2 | ||
1805 | |||
1806 | # qhasm: u = t | ||
1807 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1808 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1809 | movdqa %xmm2,%xmm4 | ||
1810 | |||
1811 | # qhasm: uint32323232 t >>= 25 | ||
1812 | # asm 1: psrld $25,<t=int6464#3 | ||
1813 | # asm 2: psrld $25,<t=%xmm2 | ||
1814 | psrld $25,%xmm2 | ||
1815 | |||
1816 | # qhasm: q ^= t | ||
1817 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
1818 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
1819 | pxor %xmm2,%xmm3 | ||
1820 | |||
1821 | # qhasm: uint32323232 u <<= 7 | ||
1822 | # asm 1: pslld $7,<u=int6464#5 | ||
1823 | # asm 2: pslld $7,<u=%xmm4 | ||
1824 | pslld $7,%xmm4 | ||
1825 | |||
1826 | # qhasm: q ^= u | ||
1827 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
1828 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
1829 | pxor %xmm4,%xmm3 | ||
1830 | |||
1831 | # qhasm: z1_stack = q | ||
1832 | # asm 1: movdqa <q=int6464#4,>z1_stack=stack128#28 | ||
1833 | # asm 2: movdqa <q=%xmm3,>z1_stack=464(%esp) | ||
1834 | movdqa %xmm3,464(%esp) | ||
1835 | |||
1836 | # qhasm: t = p | ||
1837 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
1838 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
1839 | movdqa %xmm0,%xmm2 | ||
1840 | |||
1841 | # qhasm: uint32323232 t += q | ||
1842 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
1843 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
1844 | paddd %xmm3,%xmm2 | ||
1845 | |||
1846 | # qhasm: u = t | ||
1847 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
1848 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
1849 | movdqa %xmm2,%xmm4 | ||
1850 | |||
1851 | # qhasm: uint32323232 t >>= 23 | ||
1852 | # asm 1: psrld $23,<t=int6464#3 | ||
1853 | # asm 2: psrld $23,<t=%xmm2 | ||
1854 | psrld $23,%xmm2 | ||
1855 | |||
1856 | # qhasm: r ^= t | ||
1857 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
1858 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
1859 | pxor %xmm2,%xmm1 | ||
1860 | |||
1861 | # qhasm: uint32323232 u <<= 9 | ||
1862 | # asm 1: pslld $9,<u=int6464#5 | ||
1863 | # asm 2: pslld $9,<u=%xmm4 | ||
1864 | pslld $9,%xmm4 | ||
1865 | |||
1866 | # qhasm: r ^= u | ||
1867 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
1868 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
1869 | pxor %xmm4,%xmm1 | ||
1870 | |||
1871 | # qhasm: z2_stack = r | ||
1872 | # asm 1: movdqa <r=int6464#2,>z2_stack=stack128#31 | ||
1873 | # asm 2: movdqa <r=%xmm1,>z2_stack=512(%esp) | ||
1874 | movdqa %xmm1,512(%esp) | ||
1875 | |||
1876 | # qhasm: uint32323232 q += r | ||
1877 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
1878 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
1879 | paddd %xmm1,%xmm3 | ||
1880 | |||
1881 | # qhasm: u = q | ||
1882 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
1883 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
1884 | movdqa %xmm3,%xmm2 | ||
1885 | |||
1886 | # qhasm: uint32323232 q >>= 19 | ||
1887 | # asm 1: psrld $19,<q=int6464#4 | ||
1888 | # asm 2: psrld $19,<q=%xmm3 | ||
1889 | psrld $19,%xmm3 | ||
1890 | |||
1891 | # qhasm: s ^= q | ||
1892 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
1893 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
1894 | pxor %xmm3,%xmm6 | ||
1895 | |||
1896 | # qhasm: uint32323232 u <<= 13 | ||
1897 | # asm 1: pslld $13,<u=int6464#3 | ||
1898 | # asm 2: pslld $13,<u=%xmm2 | ||
1899 | pslld $13,%xmm2 | ||
1900 | |||
1901 | # qhasm: s ^= u | ||
1902 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
1903 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
1904 | pxor %xmm2,%xmm6 | ||
1905 | |||
1906 | # qhasm: mt = z4_stack | ||
1907 | # asm 1: movdqa <z4_stack=stack128#33,>mt=int6464#3 | ||
1908 | # asm 2: movdqa <z4_stack=544(%esp),>mt=%xmm2 | ||
1909 | movdqa 544(%esp),%xmm2 | ||
1910 | |||
1911 | # qhasm: mp = z5_stack | ||
1912 | # asm 1: movdqa <z5_stack=stack128#24,>mp=int6464#5 | ||
1913 | # asm 2: movdqa <z5_stack=400(%esp),>mp=%xmm4 | ||
1914 | movdqa 400(%esp),%xmm4 | ||
1915 | |||
1916 | # qhasm: mq = z6_stack | ||
1917 | # asm 1: movdqa <z6_stack=stack128#23,>mq=int6464#4 | ||
1918 | # asm 2: movdqa <z6_stack=384(%esp),>mq=%xmm3 | ||
1919 | movdqa 384(%esp),%xmm3 | ||
1920 | |||
1921 | # qhasm: mr = z7_stack | ||
1922 | # asm 1: movdqa <z7_stack=stack128#29,>mr=int6464#6 | ||
1923 | # asm 2: movdqa <z7_stack=480(%esp),>mr=%xmm5 | ||
1924 | movdqa 480(%esp),%xmm5 | ||
1925 | |||
1926 | # qhasm: z3_stack = s | ||
1927 | # asm 1: movdqa <s=int6464#7,>z3_stack=stack128#25 | ||
1928 | # asm 2: movdqa <s=%xmm6,>z3_stack=416(%esp) | ||
1929 | movdqa %xmm6,416(%esp) | ||
1930 | |||
1931 | # qhasm: uint32323232 r += s | ||
1932 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
1933 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
1934 | paddd %xmm6,%xmm1 | ||
1935 | |||
1936 | # qhasm: u = r | ||
1937 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
1938 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
1939 | movdqa %xmm1,%xmm6 | ||
1940 | |||
1941 | # qhasm: uint32323232 r >>= 14 | ||
1942 | # asm 1: psrld $14,<r=int6464#2 | ||
1943 | # asm 2: psrld $14,<r=%xmm1 | ||
1944 | psrld $14,%xmm1 | ||
1945 | |||
1946 | # qhasm: p ^= r | ||
1947 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
1948 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
1949 | pxor %xmm1,%xmm0 | ||
1950 | |||
1951 | # qhasm: uint32323232 u <<= 18 | ||
1952 | # asm 1: pslld $18,<u=int6464#7 | ||
1953 | # asm 2: pslld $18,<u=%xmm6 | ||
1954 | pslld $18,%xmm6 | ||
1955 | |||
1956 | # qhasm: p ^= u | ||
1957 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
1958 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
1959 | pxor %xmm6,%xmm0 | ||
1960 | |||
1961 | # qhasm: z0_stack = p | ||
1962 | # asm 1: movdqa <p=int6464#1,>z0_stack=stack128#33 | ||
1963 | # asm 2: movdqa <p=%xmm0,>z0_stack=544(%esp) | ||
1964 | movdqa %xmm0,544(%esp) | ||
1965 | |||
1966 | # qhasm: assign xmm2 to mt | ||
1967 | |||
1968 | # qhasm: assign xmm3 to mq | ||
1969 | |||
1970 | # qhasm: assign xmm4 to mp | ||
1971 | |||
1972 | # qhasm: assign xmm5 to mr | ||
1973 | |||
1974 | # qhasm: ms = mt | ||
1975 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
1976 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
1977 | movdqa %xmm2,%xmm6 | ||
1978 | |||
1979 | # qhasm: uint32323232 mt += mp | ||
1980 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
1981 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
1982 | paddd %xmm4,%xmm2 | ||
1983 | |||
1984 | # qhasm: mu = mt | ||
1985 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
1986 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
1987 | movdqa %xmm2,%xmm0 | ||
1988 | |||
1989 | # qhasm: uint32323232 mt >>= 25 | ||
1990 | # asm 1: psrld $25,<mt=int6464#3 | ||
1991 | # asm 2: psrld $25,<mt=%xmm2 | ||
1992 | psrld $25,%xmm2 | ||
1993 | |||
1994 | # qhasm: mq ^= mt | ||
1995 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
1996 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
1997 | pxor %xmm2,%xmm3 | ||
1998 | |||
1999 | # qhasm: uint32323232 mu <<= 7 | ||
2000 | # asm 1: pslld $7,<mu=int6464#1 | ||
2001 | # asm 2: pslld $7,<mu=%xmm0 | ||
2002 | pslld $7,%xmm0 | ||
2003 | |||
2004 | # qhasm: mq ^= mu | ||
2005 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
2006 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
2007 | pxor %xmm0,%xmm3 | ||
2008 | |||
2009 | # qhasm: z6_stack = mq | ||
2010 | # asm 1: movdqa <mq=int6464#4,>z6_stack=stack128#26 | ||
2011 | # asm 2: movdqa <mq=%xmm3,>z6_stack=432(%esp) | ||
2012 | movdqa %xmm3,432(%esp) | ||
2013 | |||
2014 | # qhasm: mt = mp | ||
2015 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
2016 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
2017 | movdqa %xmm4,%xmm0 | ||
2018 | |||
2019 | # qhasm: uint32323232 mt += mq | ||
2020 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
2021 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
2022 | paddd %xmm3,%xmm0 | ||
2023 | |||
2024 | # qhasm: mu = mt | ||
2025 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
2026 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
2027 | movdqa %xmm0,%xmm1 | ||
2028 | |||
2029 | # qhasm: uint32323232 mt >>= 23 | ||
2030 | # asm 1: psrld $23,<mt=int6464#1 | ||
2031 | # asm 2: psrld $23,<mt=%xmm0 | ||
2032 | psrld $23,%xmm0 | ||
2033 | |||
2034 | # qhasm: mr ^= mt | ||
2035 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
2036 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
2037 | pxor %xmm0,%xmm5 | ||
2038 | |||
2039 | # qhasm: uint32323232 mu <<= 9 | ||
2040 | # asm 1: pslld $9,<mu=int6464#2 | ||
2041 | # asm 2: pslld $9,<mu=%xmm1 | ||
2042 | pslld $9,%xmm1 | ||
2043 | |||
2044 | # qhasm: mr ^= mu | ||
2045 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
2046 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
2047 | pxor %xmm1,%xmm5 | ||
2048 | |||
2049 | # qhasm: z7_stack = mr | ||
2050 | # asm 1: movdqa <mr=int6464#6,>z7_stack=stack128#29 | ||
2051 | # asm 2: movdqa <mr=%xmm5,>z7_stack=480(%esp) | ||
2052 | movdqa %xmm5,480(%esp) | ||
2053 | |||
2054 | # qhasm: uint32323232 mq += mr | ||
2055 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
2056 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
2057 | paddd %xmm5,%xmm3 | ||
2058 | |||
2059 | # qhasm: mu = mq | ||
2060 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
2061 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
2062 | movdqa %xmm3,%xmm0 | ||
2063 | |||
2064 | # qhasm: uint32323232 mq >>= 19 | ||
2065 | # asm 1: psrld $19,<mq=int6464#4 | ||
2066 | # asm 2: psrld $19,<mq=%xmm3 | ||
2067 | psrld $19,%xmm3 | ||
2068 | |||
2069 | # qhasm: ms ^= mq | ||
2070 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
2071 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
2072 | pxor %xmm3,%xmm6 | ||
2073 | |||
2074 | # qhasm: uint32323232 mu <<= 13 | ||
2075 | # asm 1: pslld $13,<mu=int6464#1 | ||
2076 | # asm 2: pslld $13,<mu=%xmm0 | ||
2077 | pslld $13,%xmm0 | ||
2078 | |||
2079 | # qhasm: ms ^= mu | ||
2080 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
2081 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
2082 | pxor %xmm0,%xmm6 | ||
2083 | |||
2084 | # qhasm: t = z9_stack | ||
2085 | # asm 1: movdqa <z9_stack=stack128#32,>t=int6464#3 | ||
2086 | # asm 2: movdqa <z9_stack=528(%esp),>t=%xmm2 | ||
2087 | movdqa 528(%esp),%xmm2 | ||
2088 | |||
2089 | # qhasm: p = z10_stack | ||
2090 | # asm 1: movdqa <z10_stack=stack128#27,>p=int6464#1 | ||
2091 | # asm 2: movdqa <z10_stack=448(%esp),>p=%xmm0 | ||
2092 | movdqa 448(%esp),%xmm0 | ||
2093 | |||
2094 | # qhasm: q = z11_stack | ||
2095 | # asm 1: movdqa <z11_stack=stack128#21,>q=int6464#4 | ||
2096 | # asm 2: movdqa <z11_stack=352(%esp),>q=%xmm3 | ||
2097 | movdqa 352(%esp),%xmm3 | ||
2098 | |||
2099 | # qhasm: r = z8_stack | ||
2100 | # asm 1: movdqa <z8_stack=stack128#34,>r=int6464#2 | ||
2101 | # asm 2: movdqa <z8_stack=560(%esp),>r=%xmm1 | ||
2102 | movdqa 560(%esp),%xmm1 | ||
2103 | |||
2104 | # qhasm: z4_stack = ms | ||
2105 | # asm 1: movdqa <ms=int6464#7,>z4_stack=stack128#34 | ||
2106 | # asm 2: movdqa <ms=%xmm6,>z4_stack=560(%esp) | ||
2107 | movdqa %xmm6,560(%esp) | ||
2108 | |||
2109 | # qhasm: uint32323232 mr += ms | ||
2110 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
2111 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
2112 | paddd %xmm6,%xmm5 | ||
2113 | |||
2114 | # qhasm: mu = mr | ||
2115 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
2116 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
2117 | movdqa %xmm5,%xmm6 | ||
2118 | |||
2119 | # qhasm: uint32323232 mr >>= 14 | ||
2120 | # asm 1: psrld $14,<mr=int6464#6 | ||
2121 | # asm 2: psrld $14,<mr=%xmm5 | ||
2122 | psrld $14,%xmm5 | ||
2123 | |||
2124 | # qhasm: mp ^= mr | ||
2125 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
2126 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
2127 | pxor %xmm5,%xmm4 | ||
2128 | |||
2129 | # qhasm: uint32323232 mu <<= 18 | ||
2130 | # asm 1: pslld $18,<mu=int6464#7 | ||
2131 | # asm 2: pslld $18,<mu=%xmm6 | ||
2132 | pslld $18,%xmm6 | ||
2133 | |||
2134 | # qhasm: mp ^= mu | ||
2135 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
2136 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
2137 | pxor %xmm6,%xmm4 | ||
2138 | |||
2139 | # qhasm: z5_stack = mp | ||
2140 | # asm 1: movdqa <mp=int6464#5,>z5_stack=stack128#21 | ||
2141 | # asm 2: movdqa <mp=%xmm4,>z5_stack=352(%esp) | ||
2142 | movdqa %xmm4,352(%esp) | ||
2143 | |||
2144 | # qhasm: assign xmm0 to p | ||
2145 | |||
2146 | # qhasm: assign xmm1 to r | ||
2147 | |||
2148 | # qhasm: assign xmm2 to t | ||
2149 | |||
2150 | # qhasm: assign xmm3 to q | ||
2151 | |||
2152 | # qhasm: s = t | ||
2153 | # asm 1: movdqa <t=int6464#3,>s=int6464#7 | ||
2154 | # asm 2: movdqa <t=%xmm2,>s=%xmm6 | ||
2155 | movdqa %xmm2,%xmm6 | ||
2156 | |||
2157 | # qhasm: uint32323232 t += p | ||
2158 | # asm 1: paddd <p=int6464#1,<t=int6464#3 | ||
2159 | # asm 2: paddd <p=%xmm0,<t=%xmm2 | ||
2160 | paddd %xmm0,%xmm2 | ||
2161 | |||
2162 | # qhasm: u = t | ||
2163 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
2164 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
2165 | movdqa %xmm2,%xmm4 | ||
2166 | |||
2167 | # qhasm: uint32323232 t >>= 25 | ||
2168 | # asm 1: psrld $25,<t=int6464#3 | ||
2169 | # asm 2: psrld $25,<t=%xmm2 | ||
2170 | psrld $25,%xmm2 | ||
2171 | |||
2172 | # qhasm: q ^= t | ||
2173 | # asm 1: pxor <t=int6464#3,<q=int6464#4 | ||
2174 | # asm 2: pxor <t=%xmm2,<q=%xmm3 | ||
2175 | pxor %xmm2,%xmm3 | ||
2176 | |||
2177 | # qhasm: uint32323232 u <<= 7 | ||
2178 | # asm 1: pslld $7,<u=int6464#5 | ||
2179 | # asm 2: pslld $7,<u=%xmm4 | ||
2180 | pslld $7,%xmm4 | ||
2181 | |||
2182 | # qhasm: q ^= u | ||
2183 | # asm 1: pxor <u=int6464#5,<q=int6464#4 | ||
2184 | # asm 2: pxor <u=%xmm4,<q=%xmm3 | ||
2185 | pxor %xmm4,%xmm3 | ||
2186 | |||
2187 | # qhasm: z11_stack = q | ||
2188 | # asm 1: movdqa <q=int6464#4,>z11_stack=stack128#27 | ||
2189 | # asm 2: movdqa <q=%xmm3,>z11_stack=448(%esp) | ||
2190 | movdqa %xmm3,448(%esp) | ||
2191 | |||
2192 | # qhasm: t = p | ||
2193 | # asm 1: movdqa <p=int6464#1,>t=int6464#3 | ||
2194 | # asm 2: movdqa <p=%xmm0,>t=%xmm2 | ||
2195 | movdqa %xmm0,%xmm2 | ||
2196 | |||
2197 | # qhasm: uint32323232 t += q | ||
2198 | # asm 1: paddd <q=int6464#4,<t=int6464#3 | ||
2199 | # asm 2: paddd <q=%xmm3,<t=%xmm2 | ||
2200 | paddd %xmm3,%xmm2 | ||
2201 | |||
2202 | # qhasm: u = t | ||
2203 | # asm 1: movdqa <t=int6464#3,>u=int6464#5 | ||
2204 | # asm 2: movdqa <t=%xmm2,>u=%xmm4 | ||
2205 | movdqa %xmm2,%xmm4 | ||
2206 | |||
2207 | # qhasm: uint32323232 t >>= 23 | ||
2208 | # asm 1: psrld $23,<t=int6464#3 | ||
2209 | # asm 2: psrld $23,<t=%xmm2 | ||
2210 | psrld $23,%xmm2 | ||
2211 | |||
2212 | # qhasm: r ^= t | ||
2213 | # asm 1: pxor <t=int6464#3,<r=int6464#2 | ||
2214 | # asm 2: pxor <t=%xmm2,<r=%xmm1 | ||
2215 | pxor %xmm2,%xmm1 | ||
2216 | |||
2217 | # qhasm: uint32323232 u <<= 9 | ||
2218 | # asm 1: pslld $9,<u=int6464#5 | ||
2219 | # asm 2: pslld $9,<u=%xmm4 | ||
2220 | pslld $9,%xmm4 | ||
2221 | |||
2222 | # qhasm: r ^= u | ||
2223 | # asm 1: pxor <u=int6464#5,<r=int6464#2 | ||
2224 | # asm 2: pxor <u=%xmm4,<r=%xmm1 | ||
2225 | pxor %xmm4,%xmm1 | ||
2226 | |||
2227 | # qhasm: z8_stack = r | ||
2228 | # asm 1: movdqa <r=int6464#2,>z8_stack=stack128#37 | ||
2229 | # asm 2: movdqa <r=%xmm1,>z8_stack=608(%esp) | ||
2230 | movdqa %xmm1,608(%esp) | ||
2231 | |||
2232 | # qhasm: uint32323232 q += r | ||
2233 | # asm 1: paddd <r=int6464#2,<q=int6464#4 | ||
2234 | # asm 2: paddd <r=%xmm1,<q=%xmm3 | ||
2235 | paddd %xmm1,%xmm3 | ||
2236 | |||
2237 | # qhasm: u = q | ||
2238 | # asm 1: movdqa <q=int6464#4,>u=int6464#3 | ||
2239 | # asm 2: movdqa <q=%xmm3,>u=%xmm2 | ||
2240 | movdqa %xmm3,%xmm2 | ||
2241 | |||
2242 | # qhasm: uint32323232 q >>= 19 | ||
2243 | # asm 1: psrld $19,<q=int6464#4 | ||
2244 | # asm 2: psrld $19,<q=%xmm3 | ||
2245 | psrld $19,%xmm3 | ||
2246 | |||
2247 | # qhasm: s ^= q | ||
2248 | # asm 1: pxor <q=int6464#4,<s=int6464#7 | ||
2249 | # asm 2: pxor <q=%xmm3,<s=%xmm6 | ||
2250 | pxor %xmm3,%xmm6 | ||
2251 | |||
2252 | # qhasm: uint32323232 u <<= 13 | ||
2253 | # asm 1: pslld $13,<u=int6464#3 | ||
2254 | # asm 2: pslld $13,<u=%xmm2 | ||
2255 | pslld $13,%xmm2 | ||
2256 | |||
2257 | # qhasm: s ^= u | ||
2258 | # asm 1: pxor <u=int6464#3,<s=int6464#7 | ||
2259 | # asm 2: pxor <u=%xmm2,<s=%xmm6 | ||
2260 | pxor %xmm2,%xmm6 | ||
2261 | |||
2262 | # qhasm: mt = z14_stack | ||
2263 | # asm 1: movdqa <z14_stack=stack128#36,>mt=int6464#3 | ||
2264 | # asm 2: movdqa <z14_stack=592(%esp),>mt=%xmm2 | ||
2265 | movdqa 592(%esp),%xmm2 | ||
2266 | |||
2267 | # qhasm: mp = z15_stack | ||
2268 | # asm 1: movdqa <z15_stack=stack128#22,>mp=int6464#5 | ||
2269 | # asm 2: movdqa <z15_stack=368(%esp),>mp=%xmm4 | ||
2270 | movdqa 368(%esp),%xmm4 | ||
2271 | |||
2272 | # qhasm: mq = z12_stack | ||
2273 | # asm 1: movdqa <z12_stack=stack128#30,>mq=int6464#4 | ||
2274 | # asm 2: movdqa <z12_stack=496(%esp),>mq=%xmm3 | ||
2275 | movdqa 496(%esp),%xmm3 | ||
2276 | |||
2277 | # qhasm: mr = z13_stack | ||
2278 | # asm 1: movdqa <z13_stack=stack128#35,>mr=int6464#6 | ||
2279 | # asm 2: movdqa <z13_stack=576(%esp),>mr=%xmm5 | ||
2280 | movdqa 576(%esp),%xmm5 | ||
2281 | |||
2282 | # qhasm: z9_stack = s | ||
2283 | # asm 1: movdqa <s=int6464#7,>z9_stack=stack128#32 | ||
2284 | # asm 2: movdqa <s=%xmm6,>z9_stack=528(%esp) | ||
2285 | movdqa %xmm6,528(%esp) | ||
2286 | |||
2287 | # qhasm: uint32323232 r += s | ||
2288 | # asm 1: paddd <s=int6464#7,<r=int6464#2 | ||
2289 | # asm 2: paddd <s=%xmm6,<r=%xmm1 | ||
2290 | paddd %xmm6,%xmm1 | ||
2291 | |||
2292 | # qhasm: u = r | ||
2293 | # asm 1: movdqa <r=int6464#2,>u=int6464#7 | ||
2294 | # asm 2: movdqa <r=%xmm1,>u=%xmm6 | ||
2295 | movdqa %xmm1,%xmm6 | ||
2296 | |||
2297 | # qhasm: uint32323232 r >>= 14 | ||
2298 | # asm 1: psrld $14,<r=int6464#2 | ||
2299 | # asm 2: psrld $14,<r=%xmm1 | ||
2300 | psrld $14,%xmm1 | ||
2301 | |||
2302 | # qhasm: p ^= r | ||
2303 | # asm 1: pxor <r=int6464#2,<p=int6464#1 | ||
2304 | # asm 2: pxor <r=%xmm1,<p=%xmm0 | ||
2305 | pxor %xmm1,%xmm0 | ||
2306 | |||
2307 | # qhasm: uint32323232 u <<= 18 | ||
2308 | # asm 1: pslld $18,<u=int6464#7 | ||
2309 | # asm 2: pslld $18,<u=%xmm6 | ||
2310 | pslld $18,%xmm6 | ||
2311 | |||
2312 | # qhasm: p ^= u | ||
2313 | # asm 1: pxor <u=int6464#7,<p=int6464#1 | ||
2314 | # asm 2: pxor <u=%xmm6,<p=%xmm0 | ||
2315 | pxor %xmm6,%xmm0 | ||
2316 | |||
2317 | # qhasm: z10_stack = p | ||
2318 | # asm 1: movdqa <p=int6464#1,>z10_stack=stack128#22 | ||
2319 | # asm 2: movdqa <p=%xmm0,>z10_stack=368(%esp) | ||
2320 | movdqa %xmm0,368(%esp) | ||
2321 | |||
2322 | # qhasm: assign xmm2 to mt | ||
2323 | |||
2324 | # qhasm: assign xmm3 to mq | ||
2325 | |||
2326 | # qhasm: assign xmm4 to mp | ||
2327 | |||
2328 | # qhasm: assign xmm5 to mr | ||
2329 | |||
2330 | # qhasm: ms = mt | ||
2331 | # asm 1: movdqa <mt=int6464#3,>ms=int6464#7 | ||
2332 | # asm 2: movdqa <mt=%xmm2,>ms=%xmm6 | ||
2333 | movdqa %xmm2,%xmm6 | ||
2334 | |||
2335 | # qhasm: uint32323232 mt += mp | ||
2336 | # asm 1: paddd <mp=int6464#5,<mt=int6464#3 | ||
2337 | # asm 2: paddd <mp=%xmm4,<mt=%xmm2 | ||
2338 | paddd %xmm4,%xmm2 | ||
2339 | |||
2340 | # qhasm: mu = mt | ||
2341 | # asm 1: movdqa <mt=int6464#3,>mu=int6464#1 | ||
2342 | # asm 2: movdqa <mt=%xmm2,>mu=%xmm0 | ||
2343 | movdqa %xmm2,%xmm0 | ||
2344 | |||
2345 | # qhasm: uint32323232 mt >>= 25 | ||
2346 | # asm 1: psrld $25,<mt=int6464#3 | ||
2347 | # asm 2: psrld $25,<mt=%xmm2 | ||
2348 | psrld $25,%xmm2 | ||
2349 | |||
2350 | # qhasm: mq ^= mt | ||
2351 | # asm 1: pxor <mt=int6464#3,<mq=int6464#4 | ||
2352 | # asm 2: pxor <mt=%xmm2,<mq=%xmm3 | ||
2353 | pxor %xmm2,%xmm3 | ||
2354 | |||
2355 | # qhasm: uint32323232 mu <<= 7 | ||
2356 | # asm 1: pslld $7,<mu=int6464#1 | ||
2357 | # asm 2: pslld $7,<mu=%xmm0 | ||
2358 | pslld $7,%xmm0 | ||
2359 | |||
2360 | # qhasm: mq ^= mu | ||
2361 | # asm 1: pxor <mu=int6464#1,<mq=int6464#4 | ||
2362 | # asm 2: pxor <mu=%xmm0,<mq=%xmm3 | ||
2363 | pxor %xmm0,%xmm3 | ||
2364 | |||
2365 | # qhasm: z12_stack = mq | ||
2366 | # asm 1: movdqa <mq=int6464#4,>z12_stack=stack128#35 | ||
2367 | # asm 2: movdqa <mq=%xmm3,>z12_stack=576(%esp) | ||
2368 | movdqa %xmm3,576(%esp) | ||
2369 | |||
2370 | # qhasm: mt = mp | ||
2371 | # asm 1: movdqa <mp=int6464#5,>mt=int6464#1 | ||
2372 | # asm 2: movdqa <mp=%xmm4,>mt=%xmm0 | ||
2373 | movdqa %xmm4,%xmm0 | ||
2374 | |||
2375 | # qhasm: uint32323232 mt += mq | ||
2376 | # asm 1: paddd <mq=int6464#4,<mt=int6464#1 | ||
2377 | # asm 2: paddd <mq=%xmm3,<mt=%xmm0 | ||
2378 | paddd %xmm3,%xmm0 | ||
2379 | |||
2380 | # qhasm: mu = mt | ||
2381 | # asm 1: movdqa <mt=int6464#1,>mu=int6464#2 | ||
2382 | # asm 2: movdqa <mt=%xmm0,>mu=%xmm1 | ||
2383 | movdqa %xmm0,%xmm1 | ||
2384 | |||
2385 | # qhasm: uint32323232 mt >>= 23 | ||
2386 | # asm 1: psrld $23,<mt=int6464#1 | ||
2387 | # asm 2: psrld $23,<mt=%xmm0 | ||
2388 | psrld $23,%xmm0 | ||
2389 | |||
2390 | # qhasm: mr ^= mt | ||
2391 | # asm 1: pxor <mt=int6464#1,<mr=int6464#6 | ||
2392 | # asm 2: pxor <mt=%xmm0,<mr=%xmm5 | ||
2393 | pxor %xmm0,%xmm5 | ||
2394 | |||
2395 | # qhasm: uint32323232 mu <<= 9 | ||
2396 | # asm 1: pslld $9,<mu=int6464#2 | ||
2397 | # asm 2: pslld $9,<mu=%xmm1 | ||
2398 | pslld $9,%xmm1 | ||
2399 | |||
2400 | # qhasm: mr ^= mu | ||
2401 | # asm 1: pxor <mu=int6464#2,<mr=int6464#6 | ||
2402 | # asm 2: pxor <mu=%xmm1,<mr=%xmm5 | ||
2403 | pxor %xmm1,%xmm5 | ||
2404 | |||
2405 | # qhasm: z13_stack = mr | ||
2406 | # asm 1: movdqa <mr=int6464#6,>z13_stack=stack128#30 | ||
2407 | # asm 2: movdqa <mr=%xmm5,>z13_stack=496(%esp) | ||
2408 | movdqa %xmm5,496(%esp) | ||
2409 | |||
2410 | # qhasm: uint32323232 mq += mr | ||
2411 | # asm 1: paddd <mr=int6464#6,<mq=int6464#4 | ||
2412 | # asm 2: paddd <mr=%xmm5,<mq=%xmm3 | ||
2413 | paddd %xmm5,%xmm3 | ||
2414 | |||
2415 | # qhasm: mu = mq | ||
2416 | # asm 1: movdqa <mq=int6464#4,>mu=int6464#1 | ||
2417 | # asm 2: movdqa <mq=%xmm3,>mu=%xmm0 | ||
2418 | movdqa %xmm3,%xmm0 | ||
2419 | |||
2420 | # qhasm: uint32323232 mq >>= 19 | ||
2421 | # asm 1: psrld $19,<mq=int6464#4 | ||
2422 | # asm 2: psrld $19,<mq=%xmm3 | ||
2423 | psrld $19,%xmm3 | ||
2424 | |||
2425 | # qhasm: ms ^= mq | ||
2426 | # asm 1: pxor <mq=int6464#4,<ms=int6464#7 | ||
2427 | # asm 2: pxor <mq=%xmm3,<ms=%xmm6 | ||
2428 | pxor %xmm3,%xmm6 | ||
2429 | |||
2430 | # qhasm: uint32323232 mu <<= 13 | ||
2431 | # asm 1: pslld $13,<mu=int6464#1 | ||
2432 | # asm 2: pslld $13,<mu=%xmm0 | ||
2433 | pslld $13,%xmm0 | ||
2434 | |||
2435 | # qhasm: ms ^= mu | ||
2436 | # asm 1: pxor <mu=int6464#1,<ms=int6464#7 | ||
2437 | # asm 2: pxor <mu=%xmm0,<ms=%xmm6 | ||
2438 | pxor %xmm0,%xmm6 | ||
2439 | |||
2440 | # qhasm: t = z12_stack | ||
2441 | # asm 1: movdqa <z12_stack=stack128#35,>t=int6464#3 | ||
2442 | # asm 2: movdqa <z12_stack=576(%esp),>t=%xmm2 | ||
2443 | movdqa 576(%esp),%xmm2 | ||
2444 | |||
2445 | # qhasm: p = z0_stack | ||
2446 | # asm 1: movdqa <z0_stack=stack128#33,>p=int6464#1 | ||
2447 | # asm 2: movdqa <z0_stack=544(%esp),>p=%xmm0 | ||
2448 | movdqa 544(%esp),%xmm0 | ||
2449 | |||
2450 | # qhasm: q = z4_stack | ||
2451 | # asm 1: movdqa <z4_stack=stack128#34,>q=int6464#4 | ||
2452 | # asm 2: movdqa <z4_stack=560(%esp),>q=%xmm3 | ||
2453 | movdqa 560(%esp),%xmm3 | ||
2454 | |||
2455 | # qhasm: r = z8_stack | ||
2456 | # asm 1: movdqa <z8_stack=stack128#37,>r=int6464#2 | ||
2457 | # asm 2: movdqa <z8_stack=608(%esp),>r=%xmm1 | ||
2458 | movdqa 608(%esp),%xmm1 | ||
2459 | |||
2460 | # qhasm: z14_stack = ms | ||
2461 | # asm 1: movdqa <ms=int6464#7,>z14_stack=stack128#24 | ||
2462 | # asm 2: movdqa <ms=%xmm6,>z14_stack=400(%esp) | ||
2463 | movdqa %xmm6,400(%esp) | ||
2464 | |||
2465 | # qhasm: uint32323232 mr += ms | ||
2466 | # asm 1: paddd <ms=int6464#7,<mr=int6464#6 | ||
2467 | # asm 2: paddd <ms=%xmm6,<mr=%xmm5 | ||
2468 | paddd %xmm6,%xmm5 | ||
2469 | |||
2470 | # qhasm: mu = mr | ||
2471 | # asm 1: movdqa <mr=int6464#6,>mu=int6464#7 | ||
2472 | # asm 2: movdqa <mr=%xmm5,>mu=%xmm6 | ||
2473 | movdqa %xmm5,%xmm6 | ||
2474 | |||
2475 | # qhasm: uint32323232 mr >>= 14 | ||
2476 | # asm 1: psrld $14,<mr=int6464#6 | ||
2477 | # asm 2: psrld $14,<mr=%xmm5 | ||
2478 | psrld $14,%xmm5 | ||
2479 | |||
2480 | # qhasm: mp ^= mr | ||
2481 | # asm 1: pxor <mr=int6464#6,<mp=int6464#5 | ||
2482 | # asm 2: pxor <mr=%xmm5,<mp=%xmm4 | ||
2483 | pxor %xmm5,%xmm4 | ||
2484 | |||
2485 | # qhasm: uint32323232 mu <<= 18 | ||
2486 | # asm 1: pslld $18,<mu=int6464#7 | ||
2487 | # asm 2: pslld $18,<mu=%xmm6 | ||
2488 | pslld $18,%xmm6 | ||
2489 | |||
2490 | # qhasm: mp ^= mu | ||
2491 | # asm 1: pxor <mu=int6464#7,<mp=int6464#5 | ||
2492 | # asm 2: pxor <mu=%xmm6,<mp=%xmm4 | ||
2493 | pxor %xmm6,%xmm4 | ||
2494 | |||
2495 | # qhasm: z15_stack = mp | ||
2496 | # asm 1: movdqa <mp=int6464#5,>z15_stack=stack128#23 | ||
2497 | # asm 2: movdqa <mp=%xmm4,>z15_stack=384(%esp) | ||
2498 | movdqa %xmm4,384(%esp) | ||
2499 | |||
2500 | # qhasm: unsigned>? i -= 2 | ||
2501 | # asm 1: sub $2,<i=int32#1 | ||
2502 | # asm 2: sub $2,<i=%eax | ||
2503 | sub $2,%eax | ||
2504 | # comment:fp stack unchanged by jump | ||
2505 | |||
2506 | # qhasm: goto mainloop1 if unsigned> | ||
2507 | ja ._mainloop1 | ||
2508 | |||
2509 | # qhasm: out = out_stack | ||
2510 | # asm 1: movl <out_stack=stack32#6,>out=int32#6 | ||
2511 | # asm 2: movl <out_stack=20(%esp),>out=%edi | ||
2512 | movl 20(%esp),%edi | ||
2513 | |||
2514 | # qhasm: z0 = z0_stack | ||
2515 | # asm 1: movdqa <z0_stack=stack128#33,>z0=int6464#1 | ||
2516 | # asm 2: movdqa <z0_stack=544(%esp),>z0=%xmm0 | ||
2517 | movdqa 544(%esp),%xmm0 | ||
2518 | |||
2519 | # qhasm: z1 = z1_stack | ||
2520 | # asm 1: movdqa <z1_stack=stack128#28,>z1=int6464#2 | ||
2521 | # asm 2: movdqa <z1_stack=464(%esp),>z1=%xmm1 | ||
2522 | movdqa 464(%esp),%xmm1 | ||
2523 | |||
2524 | # qhasm: z2 = z2_stack | ||
2525 | # asm 1: movdqa <z2_stack=stack128#31,>z2=int6464#3 | ||
2526 | # asm 2: movdqa <z2_stack=512(%esp),>z2=%xmm2 | ||
2527 | movdqa 512(%esp),%xmm2 | ||
2528 | |||
2529 | # qhasm: z3 = z3_stack | ||
2530 | # asm 1: movdqa <z3_stack=stack128#25,>z3=int6464#4 | ||
2531 | # asm 2: movdqa <z3_stack=416(%esp),>z3=%xmm3 | ||
2532 | movdqa 416(%esp),%xmm3 | ||
2533 | |||
2534 | # qhasm: uint32323232 z0 += orig0 | ||
2535 | # asm 1: paddd <orig0=stack128#8,<z0=int6464#1 | ||
2536 | # asm 2: paddd <orig0=144(%esp),<z0=%xmm0 | ||
2537 | paddd 144(%esp),%xmm0 | ||
2538 | |||
2539 | # qhasm: uint32323232 z1 += orig1 | ||
2540 | # asm 1: paddd <orig1=stack128#12,<z1=int6464#2 | ||
2541 | # asm 2: paddd <orig1=208(%esp),<z1=%xmm1 | ||
2542 | paddd 208(%esp),%xmm1 | ||
2543 | |||
2544 | # qhasm: uint32323232 z2 += orig2 | ||
2545 | # asm 1: paddd <orig2=stack128#15,<z2=int6464#3 | ||
2546 | # asm 2: paddd <orig2=256(%esp),<z2=%xmm2 | ||
2547 | paddd 256(%esp),%xmm2 | ||
2548 | |||
2549 | # qhasm: uint32323232 z3 += orig3 | ||
2550 | # asm 1: paddd <orig3=stack128#18,<z3=int6464#4 | ||
2551 | # asm 2: paddd <orig3=304(%esp),<z3=%xmm3 | ||
2552 | paddd 304(%esp),%xmm3 | ||
2553 | |||
2554 | # qhasm: in0 = z0 | ||
2555 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2556 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2557 | movd %xmm0,%eax | ||
2558 | |||
2559 | # qhasm: in1 = z1 | ||
2560 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2561 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2562 | movd %xmm1,%ecx | ||
2563 | |||
2564 | # qhasm: in2 = z2 | ||
2565 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2566 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2567 | movd %xmm2,%edx | ||
2568 | |||
2569 | # qhasm: in3 = z3 | ||
2570 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2571 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2572 | movd %xmm3,%ebx | ||
2573 | |||
2574 | # qhasm: z0 <<<= 96 | ||
2575 | # asm 1: pshufd $0x39,<z0=int6464#1,<z0=int6464#1 | ||
2576 | # asm 2: pshufd $0x39,<z0=%xmm0,<z0=%xmm0 | ||
2577 | pshufd $0x39,%xmm0,%xmm0 | ||
2578 | |||
2579 | # qhasm: z1 <<<= 96 | ||
2580 | # asm 1: pshufd $0x39,<z1=int6464#2,<z1=int6464#2 | ||
2581 | # asm 2: pshufd $0x39,<z1=%xmm1,<z1=%xmm1 | ||
2582 | pshufd $0x39,%xmm1,%xmm1 | ||
2583 | |||
2584 | # qhasm: z2 <<<= 96 | ||
2585 | # asm 1: pshufd $0x39,<z2=int6464#3,<z2=int6464#3 | ||
2586 | # asm 2: pshufd $0x39,<z2=%xmm2,<z2=%xmm2 | ||
2587 | pshufd $0x39,%xmm2,%xmm2 | ||
2588 | |||
2589 | # qhasm: z3 <<<= 96 | ||
2590 | # asm 1: pshufd $0x39,<z3=int6464#4,<z3=int6464#4 | ||
2591 | # asm 2: pshufd $0x39,<z3=%xmm3,<z3=%xmm3 | ||
2592 | pshufd $0x39,%xmm3,%xmm3 | ||
2593 | |||
2594 | # qhasm: in0 ^= *(uint32 *) (m + 0) | ||
2595 | # asm 1: xorl 0(<m=int32#5),<in0=int32#1 | ||
2596 | # asm 2: xorl 0(<m=%esi),<in0=%eax | ||
2597 | xorl 0(%esi),%eax | ||
2598 | |||
2599 | # qhasm: in1 ^= *(uint32 *) (m + 4) | ||
2600 | # asm 1: xorl 4(<m=int32#5),<in1=int32#2 | ||
2601 | # asm 2: xorl 4(<m=%esi),<in1=%ecx | ||
2602 | xorl 4(%esi),%ecx | ||
2603 | |||
2604 | # qhasm: in2 ^= *(uint32 *) (m + 8) | ||
2605 | # asm 1: xorl 8(<m=int32#5),<in2=int32#3 | ||
2606 | # asm 2: xorl 8(<m=%esi),<in2=%edx | ||
2607 | xorl 8(%esi),%edx | ||
2608 | |||
2609 | # qhasm: in3 ^= *(uint32 *) (m + 12) | ||
2610 | # asm 1: xorl 12(<m=int32#5),<in3=int32#4 | ||
2611 | # asm 2: xorl 12(<m=%esi),<in3=%ebx | ||
2612 | xorl 12(%esi),%ebx | ||
2613 | |||
2614 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
2615 | # asm 1: movl <in0=int32#1,0(<out=int32#6) | ||
2616 | # asm 2: movl <in0=%eax,0(<out=%edi) | ||
2617 | movl %eax,0(%edi) | ||
2618 | |||
2619 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
2620 | # asm 1: movl <in1=int32#2,4(<out=int32#6) | ||
2621 | # asm 2: movl <in1=%ecx,4(<out=%edi) | ||
2622 | movl %ecx,4(%edi) | ||
2623 | |||
2624 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
2625 | # asm 1: movl <in2=int32#3,8(<out=int32#6) | ||
2626 | # asm 2: movl <in2=%edx,8(<out=%edi) | ||
2627 | movl %edx,8(%edi) | ||
2628 | |||
2629 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
2630 | # asm 1: movl <in3=int32#4,12(<out=int32#6) | ||
2631 | # asm 2: movl <in3=%ebx,12(<out=%edi) | ||
2632 | movl %ebx,12(%edi) | ||
2633 | |||
2634 | # qhasm: in0 = z0 | ||
2635 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2636 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2637 | movd %xmm0,%eax | ||
2638 | |||
2639 | # qhasm: in1 = z1 | ||
2640 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2641 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2642 | movd %xmm1,%ecx | ||
2643 | |||
2644 | # qhasm: in2 = z2 | ||
2645 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2646 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2647 | movd %xmm2,%edx | ||
2648 | |||
2649 | # qhasm: in3 = z3 | ||
2650 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2651 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2652 | movd %xmm3,%ebx | ||
2653 | |||
2654 | # qhasm: z0 <<<= 96 | ||
2655 | # asm 1: pshufd $0x39,<z0=int6464#1,<z0=int6464#1 | ||
2656 | # asm 2: pshufd $0x39,<z0=%xmm0,<z0=%xmm0 | ||
2657 | pshufd $0x39,%xmm0,%xmm0 | ||
2658 | |||
2659 | # qhasm: z1 <<<= 96 | ||
2660 | # asm 1: pshufd $0x39,<z1=int6464#2,<z1=int6464#2 | ||
2661 | # asm 2: pshufd $0x39,<z1=%xmm1,<z1=%xmm1 | ||
2662 | pshufd $0x39,%xmm1,%xmm1 | ||
2663 | |||
2664 | # qhasm: z2 <<<= 96 | ||
2665 | # asm 1: pshufd $0x39,<z2=int6464#3,<z2=int6464#3 | ||
2666 | # asm 2: pshufd $0x39,<z2=%xmm2,<z2=%xmm2 | ||
2667 | pshufd $0x39,%xmm2,%xmm2 | ||
2668 | |||
2669 | # qhasm: z3 <<<= 96 | ||
2670 | # asm 1: pshufd $0x39,<z3=int6464#4,<z3=int6464#4 | ||
2671 | # asm 2: pshufd $0x39,<z3=%xmm3,<z3=%xmm3 | ||
2672 | pshufd $0x39,%xmm3,%xmm3 | ||
2673 | |||
2674 | # qhasm: in0 ^= *(uint32 *) (m + 64) | ||
2675 | # asm 1: xorl 64(<m=int32#5),<in0=int32#1 | ||
2676 | # asm 2: xorl 64(<m=%esi),<in0=%eax | ||
2677 | xorl 64(%esi),%eax | ||
2678 | |||
2679 | # qhasm: in1 ^= *(uint32 *) (m + 68) | ||
2680 | # asm 1: xorl 68(<m=int32#5),<in1=int32#2 | ||
2681 | # asm 2: xorl 68(<m=%esi),<in1=%ecx | ||
2682 | xorl 68(%esi),%ecx | ||
2683 | |||
2684 | # qhasm: in2 ^= *(uint32 *) (m + 72) | ||
2685 | # asm 1: xorl 72(<m=int32#5),<in2=int32#3 | ||
2686 | # asm 2: xorl 72(<m=%esi),<in2=%edx | ||
2687 | xorl 72(%esi),%edx | ||
2688 | |||
2689 | # qhasm: in3 ^= *(uint32 *) (m + 76) | ||
2690 | # asm 1: xorl 76(<m=int32#5),<in3=int32#4 | ||
2691 | # asm 2: xorl 76(<m=%esi),<in3=%ebx | ||
2692 | xorl 76(%esi),%ebx | ||
2693 | |||
2694 | # qhasm: *(uint32 *) (out + 64) = in0 | ||
2695 | # asm 1: movl <in0=int32#1,64(<out=int32#6) | ||
2696 | # asm 2: movl <in0=%eax,64(<out=%edi) | ||
2697 | movl %eax,64(%edi) | ||
2698 | |||
2699 | # qhasm: *(uint32 *) (out + 68) = in1 | ||
2700 | # asm 1: movl <in1=int32#2,68(<out=int32#6) | ||
2701 | # asm 2: movl <in1=%ecx,68(<out=%edi) | ||
2702 | movl %ecx,68(%edi) | ||
2703 | |||
2704 | # qhasm: *(uint32 *) (out + 72) = in2 | ||
2705 | # asm 1: movl <in2=int32#3,72(<out=int32#6) | ||
2706 | # asm 2: movl <in2=%edx,72(<out=%edi) | ||
2707 | movl %edx,72(%edi) | ||
2708 | |||
2709 | # qhasm: *(uint32 *) (out + 76) = in3 | ||
2710 | # asm 1: movl <in3=int32#4,76(<out=int32#6) | ||
2711 | # asm 2: movl <in3=%ebx,76(<out=%edi) | ||
2712 | movl %ebx,76(%edi) | ||
2713 | |||
2714 | # qhasm: in0 = z0 | ||
2715 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2716 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2717 | movd %xmm0,%eax | ||
2718 | |||
2719 | # qhasm: in1 = z1 | ||
2720 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2721 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2722 | movd %xmm1,%ecx | ||
2723 | |||
2724 | # qhasm: in2 = z2 | ||
2725 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2726 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2727 | movd %xmm2,%edx | ||
2728 | |||
2729 | # qhasm: in3 = z3 | ||
2730 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2731 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2732 | movd %xmm3,%ebx | ||
2733 | |||
2734 | # qhasm: z0 <<<= 96 | ||
2735 | # asm 1: pshufd $0x39,<z0=int6464#1,<z0=int6464#1 | ||
2736 | # asm 2: pshufd $0x39,<z0=%xmm0,<z0=%xmm0 | ||
2737 | pshufd $0x39,%xmm0,%xmm0 | ||
2738 | |||
2739 | # qhasm: z1 <<<= 96 | ||
2740 | # asm 1: pshufd $0x39,<z1=int6464#2,<z1=int6464#2 | ||
2741 | # asm 2: pshufd $0x39,<z1=%xmm1,<z1=%xmm1 | ||
2742 | pshufd $0x39,%xmm1,%xmm1 | ||
2743 | |||
2744 | # qhasm: z2 <<<= 96 | ||
2745 | # asm 1: pshufd $0x39,<z2=int6464#3,<z2=int6464#3 | ||
2746 | # asm 2: pshufd $0x39,<z2=%xmm2,<z2=%xmm2 | ||
2747 | pshufd $0x39,%xmm2,%xmm2 | ||
2748 | |||
2749 | # qhasm: z3 <<<= 96 | ||
2750 | # asm 1: pshufd $0x39,<z3=int6464#4,<z3=int6464#4 | ||
2751 | # asm 2: pshufd $0x39,<z3=%xmm3,<z3=%xmm3 | ||
2752 | pshufd $0x39,%xmm3,%xmm3 | ||
2753 | |||
2754 | # qhasm: in0 ^= *(uint32 *) (m + 128) | ||
2755 | # asm 1: xorl 128(<m=int32#5),<in0=int32#1 | ||
2756 | # asm 2: xorl 128(<m=%esi),<in0=%eax | ||
2757 | xorl 128(%esi),%eax | ||
2758 | |||
2759 | # qhasm: in1 ^= *(uint32 *) (m + 132) | ||
2760 | # asm 1: xorl 132(<m=int32#5),<in1=int32#2 | ||
2761 | # asm 2: xorl 132(<m=%esi),<in1=%ecx | ||
2762 | xorl 132(%esi),%ecx | ||
2763 | |||
2764 | # qhasm: in2 ^= *(uint32 *) (m + 136) | ||
2765 | # asm 1: xorl 136(<m=int32#5),<in2=int32#3 | ||
2766 | # asm 2: xorl 136(<m=%esi),<in2=%edx | ||
2767 | xorl 136(%esi),%edx | ||
2768 | |||
2769 | # qhasm: in3 ^= *(uint32 *) (m + 140) | ||
2770 | # asm 1: xorl 140(<m=int32#5),<in3=int32#4 | ||
2771 | # asm 2: xorl 140(<m=%esi),<in3=%ebx | ||
2772 | xorl 140(%esi),%ebx | ||
2773 | |||
2774 | # qhasm: *(uint32 *) (out + 128) = in0 | ||
2775 | # asm 1: movl <in0=int32#1,128(<out=int32#6) | ||
2776 | # asm 2: movl <in0=%eax,128(<out=%edi) | ||
2777 | movl %eax,128(%edi) | ||
2778 | |||
2779 | # qhasm: *(uint32 *) (out + 132) = in1 | ||
2780 | # asm 1: movl <in1=int32#2,132(<out=int32#6) | ||
2781 | # asm 2: movl <in1=%ecx,132(<out=%edi) | ||
2782 | movl %ecx,132(%edi) | ||
2783 | |||
2784 | # qhasm: *(uint32 *) (out + 136) = in2 | ||
2785 | # asm 1: movl <in2=int32#3,136(<out=int32#6) | ||
2786 | # asm 2: movl <in2=%edx,136(<out=%edi) | ||
2787 | movl %edx,136(%edi) | ||
2788 | |||
2789 | # qhasm: *(uint32 *) (out + 140) = in3 | ||
2790 | # asm 1: movl <in3=int32#4,140(<out=int32#6) | ||
2791 | # asm 2: movl <in3=%ebx,140(<out=%edi) | ||
2792 | movl %ebx,140(%edi) | ||
2793 | |||
2794 | # qhasm: in0 = z0 | ||
2795 | # asm 1: movd <z0=int6464#1,>in0=int32#1 | ||
2796 | # asm 2: movd <z0=%xmm0,>in0=%eax | ||
2797 | movd %xmm0,%eax | ||
2798 | |||
2799 | # qhasm: in1 = z1 | ||
2800 | # asm 1: movd <z1=int6464#2,>in1=int32#2 | ||
2801 | # asm 2: movd <z1=%xmm1,>in1=%ecx | ||
2802 | movd %xmm1,%ecx | ||
2803 | |||
2804 | # qhasm: in2 = z2 | ||
2805 | # asm 1: movd <z2=int6464#3,>in2=int32#3 | ||
2806 | # asm 2: movd <z2=%xmm2,>in2=%edx | ||
2807 | movd %xmm2,%edx | ||
2808 | |||
2809 | # qhasm: in3 = z3 | ||
2810 | # asm 1: movd <z3=int6464#4,>in3=int32#4 | ||
2811 | # asm 2: movd <z3=%xmm3,>in3=%ebx | ||
2812 | movd %xmm3,%ebx | ||
2813 | |||
2814 | # qhasm: in0 ^= *(uint32 *) (m + 192) | ||
2815 | # asm 1: xorl 192(<m=int32#5),<in0=int32#1 | ||
2816 | # asm 2: xorl 192(<m=%esi),<in0=%eax | ||
2817 | xorl 192(%esi),%eax | ||
2818 | |||
2819 | # qhasm: in1 ^= *(uint32 *) (m + 196) | ||
2820 | # asm 1: xorl 196(<m=int32#5),<in1=int32#2 | ||
2821 | # asm 2: xorl 196(<m=%esi),<in1=%ecx | ||
2822 | xorl 196(%esi),%ecx | ||
2823 | |||
2824 | # qhasm: in2 ^= *(uint32 *) (m + 200) | ||
2825 | # asm 1: xorl 200(<m=int32#5),<in2=int32#3 | ||
2826 | # asm 2: xorl 200(<m=%esi),<in2=%edx | ||
2827 | xorl 200(%esi),%edx | ||
2828 | |||
2829 | # qhasm: in3 ^= *(uint32 *) (m + 204) | ||
2830 | # asm 1: xorl 204(<m=int32#5),<in3=int32#4 | ||
2831 | # asm 2: xorl 204(<m=%esi),<in3=%ebx | ||
2832 | xorl 204(%esi),%ebx | ||
2833 | |||
2834 | # qhasm: *(uint32 *) (out + 192) = in0 | ||
2835 | # asm 1: movl <in0=int32#1,192(<out=int32#6) | ||
2836 | # asm 2: movl <in0=%eax,192(<out=%edi) | ||
2837 | movl %eax,192(%edi) | ||
2838 | |||
2839 | # qhasm: *(uint32 *) (out + 196) = in1 | ||
2840 | # asm 1: movl <in1=int32#2,196(<out=int32#6) | ||
2841 | # asm 2: movl <in1=%ecx,196(<out=%edi) | ||
2842 | movl %ecx,196(%edi) | ||
2843 | |||
2844 | # qhasm: *(uint32 *) (out + 200) = in2 | ||
2845 | # asm 1: movl <in2=int32#3,200(<out=int32#6) | ||
2846 | # asm 2: movl <in2=%edx,200(<out=%edi) | ||
2847 | movl %edx,200(%edi) | ||
2848 | |||
2849 | # qhasm: *(uint32 *) (out + 204) = in3 | ||
2850 | # asm 1: movl <in3=int32#4,204(<out=int32#6) | ||
2851 | # asm 2: movl <in3=%ebx,204(<out=%edi) | ||
2852 | movl %ebx,204(%edi) | ||
2853 | |||
2854 | # qhasm: z4 = z4_stack | ||
2855 | # asm 1: movdqa <z4_stack=stack128#34,>z4=int6464#1 | ||
2856 | # asm 2: movdqa <z4_stack=560(%esp),>z4=%xmm0 | ||
2857 | movdqa 560(%esp),%xmm0 | ||
2858 | |||
2859 | # qhasm: z5 = z5_stack | ||
2860 | # asm 1: movdqa <z5_stack=stack128#21,>z5=int6464#2 | ||
2861 | # asm 2: movdqa <z5_stack=352(%esp),>z5=%xmm1 | ||
2862 | movdqa 352(%esp),%xmm1 | ||
2863 | |||
2864 | # qhasm: z6 = z6_stack | ||
2865 | # asm 1: movdqa <z6_stack=stack128#26,>z6=int6464#3 | ||
2866 | # asm 2: movdqa <z6_stack=432(%esp),>z6=%xmm2 | ||
2867 | movdqa 432(%esp),%xmm2 | ||
2868 | |||
2869 | # qhasm: z7 = z7_stack | ||
2870 | # asm 1: movdqa <z7_stack=stack128#29,>z7=int6464#4 | ||
2871 | # asm 2: movdqa <z7_stack=480(%esp),>z7=%xmm3 | ||
2872 | movdqa 480(%esp),%xmm3 | ||
2873 | |||
2874 | # qhasm: uint32323232 z4 += orig4 | ||
2875 | # asm 1: paddd <orig4=stack128#16,<z4=int6464#1 | ||
2876 | # asm 2: paddd <orig4=272(%esp),<z4=%xmm0 | ||
2877 | paddd 272(%esp),%xmm0 | ||
2878 | |||
2879 | # qhasm: uint32323232 z5 += orig5 | ||
2880 | # asm 1: paddd <orig5=stack128#5,<z5=int6464#2 | ||
2881 | # asm 2: paddd <orig5=96(%esp),<z5=%xmm1 | ||
2882 | paddd 96(%esp),%xmm1 | ||
2883 | |||
2884 | # qhasm: uint32323232 z6 += orig6 | ||
2885 | # asm 1: paddd <orig6=stack128#9,<z6=int6464#3 | ||
2886 | # asm 2: paddd <orig6=160(%esp),<z6=%xmm2 | ||
2887 | paddd 160(%esp),%xmm2 | ||
2888 | |||
2889 | # qhasm: uint32323232 z7 += orig7 | ||
2890 | # asm 1: paddd <orig7=stack128#13,<z7=int6464#4 | ||
2891 | # asm 2: paddd <orig7=224(%esp),<z7=%xmm3 | ||
2892 | paddd 224(%esp),%xmm3 | ||
2893 | |||
2894 | # qhasm: in4 = z4 | ||
2895 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
2896 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
2897 | movd %xmm0,%eax | ||
2898 | |||
2899 | # qhasm: in5 = z5 | ||
2900 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
2901 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
2902 | movd %xmm1,%ecx | ||
2903 | |||
2904 | # qhasm: in6 = z6 | ||
2905 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
2906 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
2907 | movd %xmm2,%edx | ||
2908 | |||
2909 | # qhasm: in7 = z7 | ||
2910 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
2911 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
2912 | movd %xmm3,%ebx | ||
2913 | |||
2914 | # qhasm: z4 <<<= 96 | ||
2915 | # asm 1: pshufd $0x39,<z4=int6464#1,<z4=int6464#1 | ||
2916 | # asm 2: pshufd $0x39,<z4=%xmm0,<z4=%xmm0 | ||
2917 | pshufd $0x39,%xmm0,%xmm0 | ||
2918 | |||
2919 | # qhasm: z5 <<<= 96 | ||
2920 | # asm 1: pshufd $0x39,<z5=int6464#2,<z5=int6464#2 | ||
2921 | # asm 2: pshufd $0x39,<z5=%xmm1,<z5=%xmm1 | ||
2922 | pshufd $0x39,%xmm1,%xmm1 | ||
2923 | |||
2924 | # qhasm: z6 <<<= 96 | ||
2925 | # asm 1: pshufd $0x39,<z6=int6464#3,<z6=int6464#3 | ||
2926 | # asm 2: pshufd $0x39,<z6=%xmm2,<z6=%xmm2 | ||
2927 | pshufd $0x39,%xmm2,%xmm2 | ||
2928 | |||
2929 | # qhasm: z7 <<<= 96 | ||
2930 | # asm 1: pshufd $0x39,<z7=int6464#4,<z7=int6464#4 | ||
2931 | # asm 2: pshufd $0x39,<z7=%xmm3,<z7=%xmm3 | ||
2932 | pshufd $0x39,%xmm3,%xmm3 | ||
2933 | |||
2934 | # qhasm: in4 ^= *(uint32 *) (m + 16) | ||
2935 | # asm 1: xorl 16(<m=int32#5),<in4=int32#1 | ||
2936 | # asm 2: xorl 16(<m=%esi),<in4=%eax | ||
2937 | xorl 16(%esi),%eax | ||
2938 | |||
2939 | # qhasm: in5 ^= *(uint32 *) (m + 20) | ||
2940 | # asm 1: xorl 20(<m=int32#5),<in5=int32#2 | ||
2941 | # asm 2: xorl 20(<m=%esi),<in5=%ecx | ||
2942 | xorl 20(%esi),%ecx | ||
2943 | |||
2944 | # qhasm: in6 ^= *(uint32 *) (m + 24) | ||
2945 | # asm 1: xorl 24(<m=int32#5),<in6=int32#3 | ||
2946 | # asm 2: xorl 24(<m=%esi),<in6=%edx | ||
2947 | xorl 24(%esi),%edx | ||
2948 | |||
2949 | # qhasm: in7 ^= *(uint32 *) (m + 28) | ||
2950 | # asm 1: xorl 28(<m=int32#5),<in7=int32#4 | ||
2951 | # asm 2: xorl 28(<m=%esi),<in7=%ebx | ||
2952 | xorl 28(%esi),%ebx | ||
2953 | |||
2954 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
2955 | # asm 1: movl <in4=int32#1,16(<out=int32#6) | ||
2956 | # asm 2: movl <in4=%eax,16(<out=%edi) | ||
2957 | movl %eax,16(%edi) | ||
2958 | |||
2959 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
2960 | # asm 1: movl <in5=int32#2,20(<out=int32#6) | ||
2961 | # asm 2: movl <in5=%ecx,20(<out=%edi) | ||
2962 | movl %ecx,20(%edi) | ||
2963 | |||
2964 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
2965 | # asm 1: movl <in6=int32#3,24(<out=int32#6) | ||
2966 | # asm 2: movl <in6=%edx,24(<out=%edi) | ||
2967 | movl %edx,24(%edi) | ||
2968 | |||
2969 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
2970 | # asm 1: movl <in7=int32#4,28(<out=int32#6) | ||
2971 | # asm 2: movl <in7=%ebx,28(<out=%edi) | ||
2972 | movl %ebx,28(%edi) | ||
2973 | |||
2974 | # qhasm: in4 = z4 | ||
2975 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
2976 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
2977 | movd %xmm0,%eax | ||
2978 | |||
2979 | # qhasm: in5 = z5 | ||
2980 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
2981 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
2982 | movd %xmm1,%ecx | ||
2983 | |||
2984 | # qhasm: in6 = z6 | ||
2985 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
2986 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
2987 | movd %xmm2,%edx | ||
2988 | |||
2989 | # qhasm: in7 = z7 | ||
2990 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
2991 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
2992 | movd %xmm3,%ebx | ||
2993 | |||
2994 | # qhasm: z4 <<<= 96 | ||
2995 | # asm 1: pshufd $0x39,<z4=int6464#1,<z4=int6464#1 | ||
2996 | # asm 2: pshufd $0x39,<z4=%xmm0,<z4=%xmm0 | ||
2997 | pshufd $0x39,%xmm0,%xmm0 | ||
2998 | |||
2999 | # qhasm: z5 <<<= 96 | ||
3000 | # asm 1: pshufd $0x39,<z5=int6464#2,<z5=int6464#2 | ||
3001 | # asm 2: pshufd $0x39,<z5=%xmm1,<z5=%xmm1 | ||
3002 | pshufd $0x39,%xmm1,%xmm1 | ||
3003 | |||
3004 | # qhasm: z6 <<<= 96 | ||
3005 | # asm 1: pshufd $0x39,<z6=int6464#3,<z6=int6464#3 | ||
3006 | # asm 2: pshufd $0x39,<z6=%xmm2,<z6=%xmm2 | ||
3007 | pshufd $0x39,%xmm2,%xmm2 | ||
3008 | |||
3009 | # qhasm: z7 <<<= 96 | ||
3010 | # asm 1: pshufd $0x39,<z7=int6464#4,<z7=int6464#4 | ||
3011 | # asm 2: pshufd $0x39,<z7=%xmm3,<z7=%xmm3 | ||
3012 | pshufd $0x39,%xmm3,%xmm3 | ||
3013 | |||
3014 | # qhasm: in4 ^= *(uint32 *) (m + 80) | ||
3015 | # asm 1: xorl 80(<m=int32#5),<in4=int32#1 | ||
3016 | # asm 2: xorl 80(<m=%esi),<in4=%eax | ||
3017 | xorl 80(%esi),%eax | ||
3018 | |||
3019 | # qhasm: in5 ^= *(uint32 *) (m + 84) | ||
3020 | # asm 1: xorl 84(<m=int32#5),<in5=int32#2 | ||
3021 | # asm 2: xorl 84(<m=%esi),<in5=%ecx | ||
3022 | xorl 84(%esi),%ecx | ||
3023 | |||
3024 | # qhasm: in6 ^= *(uint32 *) (m + 88) | ||
3025 | # asm 1: xorl 88(<m=int32#5),<in6=int32#3 | ||
3026 | # asm 2: xorl 88(<m=%esi),<in6=%edx | ||
3027 | xorl 88(%esi),%edx | ||
3028 | |||
3029 | # qhasm: in7 ^= *(uint32 *) (m + 92) | ||
3030 | # asm 1: xorl 92(<m=int32#5),<in7=int32#4 | ||
3031 | # asm 2: xorl 92(<m=%esi),<in7=%ebx | ||
3032 | xorl 92(%esi),%ebx | ||
3033 | |||
3034 | # qhasm: *(uint32 *) (out + 80) = in4 | ||
3035 | # asm 1: movl <in4=int32#1,80(<out=int32#6) | ||
3036 | # asm 2: movl <in4=%eax,80(<out=%edi) | ||
3037 | movl %eax,80(%edi) | ||
3038 | |||
3039 | # qhasm: *(uint32 *) (out + 84) = in5 | ||
3040 | # asm 1: movl <in5=int32#2,84(<out=int32#6) | ||
3041 | # asm 2: movl <in5=%ecx,84(<out=%edi) | ||
3042 | movl %ecx,84(%edi) | ||
3043 | |||
3044 | # qhasm: *(uint32 *) (out + 88) = in6 | ||
3045 | # asm 1: movl <in6=int32#3,88(<out=int32#6) | ||
3046 | # asm 2: movl <in6=%edx,88(<out=%edi) | ||
3047 | movl %edx,88(%edi) | ||
3048 | |||
3049 | # qhasm: *(uint32 *) (out + 92) = in7 | ||
3050 | # asm 1: movl <in7=int32#4,92(<out=int32#6) | ||
3051 | # asm 2: movl <in7=%ebx,92(<out=%edi) | ||
3052 | movl %ebx,92(%edi) | ||
3053 | |||
3054 | # qhasm: in4 = z4 | ||
3055 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
3056 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
3057 | movd %xmm0,%eax | ||
3058 | |||
3059 | # qhasm: in5 = z5 | ||
3060 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
3061 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
3062 | movd %xmm1,%ecx | ||
3063 | |||
3064 | # qhasm: in6 = z6 | ||
3065 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
3066 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
3067 | movd %xmm2,%edx | ||
3068 | |||
3069 | # qhasm: in7 = z7 | ||
3070 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
3071 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
3072 | movd %xmm3,%ebx | ||
3073 | |||
3074 | # qhasm: z4 <<<= 96 | ||
3075 | # asm 1: pshufd $0x39,<z4=int6464#1,<z4=int6464#1 | ||
3076 | # asm 2: pshufd $0x39,<z4=%xmm0,<z4=%xmm0 | ||
3077 | pshufd $0x39,%xmm0,%xmm0 | ||
3078 | |||
3079 | # qhasm: z5 <<<= 96 | ||
3080 | # asm 1: pshufd $0x39,<z5=int6464#2,<z5=int6464#2 | ||
3081 | # asm 2: pshufd $0x39,<z5=%xmm1,<z5=%xmm1 | ||
3082 | pshufd $0x39,%xmm1,%xmm1 | ||
3083 | |||
3084 | # qhasm: z6 <<<= 96 | ||
3085 | # asm 1: pshufd $0x39,<z6=int6464#3,<z6=int6464#3 | ||
3086 | # asm 2: pshufd $0x39,<z6=%xmm2,<z6=%xmm2 | ||
3087 | pshufd $0x39,%xmm2,%xmm2 | ||
3088 | |||
3089 | # qhasm: z7 <<<= 96 | ||
3090 | # asm 1: pshufd $0x39,<z7=int6464#4,<z7=int6464#4 | ||
3091 | # asm 2: pshufd $0x39,<z7=%xmm3,<z7=%xmm3 | ||
3092 | pshufd $0x39,%xmm3,%xmm3 | ||
3093 | |||
3094 | # qhasm: in4 ^= *(uint32 *) (m + 144) | ||
3095 | # asm 1: xorl 144(<m=int32#5),<in4=int32#1 | ||
3096 | # asm 2: xorl 144(<m=%esi),<in4=%eax | ||
3097 | xorl 144(%esi),%eax | ||
3098 | |||
3099 | # qhasm: in5 ^= *(uint32 *) (m + 148) | ||
3100 | # asm 1: xorl 148(<m=int32#5),<in5=int32#2 | ||
3101 | # asm 2: xorl 148(<m=%esi),<in5=%ecx | ||
3102 | xorl 148(%esi),%ecx | ||
3103 | |||
3104 | # qhasm: in6 ^= *(uint32 *) (m + 152) | ||
3105 | # asm 1: xorl 152(<m=int32#5),<in6=int32#3 | ||
3106 | # asm 2: xorl 152(<m=%esi),<in6=%edx | ||
3107 | xorl 152(%esi),%edx | ||
3108 | |||
3109 | # qhasm: in7 ^= *(uint32 *) (m + 156) | ||
3110 | # asm 1: xorl 156(<m=int32#5),<in7=int32#4 | ||
3111 | # asm 2: xorl 156(<m=%esi),<in7=%ebx | ||
3112 | xorl 156(%esi),%ebx | ||
3113 | |||
3114 | # qhasm: *(uint32 *) (out + 144) = in4 | ||
3115 | # asm 1: movl <in4=int32#1,144(<out=int32#6) | ||
3116 | # asm 2: movl <in4=%eax,144(<out=%edi) | ||
3117 | movl %eax,144(%edi) | ||
3118 | |||
3119 | # qhasm: *(uint32 *) (out + 148) = in5 | ||
3120 | # asm 1: movl <in5=int32#2,148(<out=int32#6) | ||
3121 | # asm 2: movl <in5=%ecx,148(<out=%edi) | ||
3122 | movl %ecx,148(%edi) | ||
3123 | |||
3124 | # qhasm: *(uint32 *) (out + 152) = in6 | ||
3125 | # asm 1: movl <in6=int32#3,152(<out=int32#6) | ||
3126 | # asm 2: movl <in6=%edx,152(<out=%edi) | ||
3127 | movl %edx,152(%edi) | ||
3128 | |||
3129 | # qhasm: *(uint32 *) (out + 156) = in7 | ||
3130 | # asm 1: movl <in7=int32#4,156(<out=int32#6) | ||
3131 | # asm 2: movl <in7=%ebx,156(<out=%edi) | ||
3132 | movl %ebx,156(%edi) | ||
3133 | |||
3134 | # qhasm: in4 = z4 | ||
3135 | # asm 1: movd <z4=int6464#1,>in4=int32#1 | ||
3136 | # asm 2: movd <z4=%xmm0,>in4=%eax | ||
3137 | movd %xmm0,%eax | ||
3138 | |||
3139 | # qhasm: in5 = z5 | ||
3140 | # asm 1: movd <z5=int6464#2,>in5=int32#2 | ||
3141 | # asm 2: movd <z5=%xmm1,>in5=%ecx | ||
3142 | movd %xmm1,%ecx | ||
3143 | |||
3144 | # qhasm: in6 = z6 | ||
3145 | # asm 1: movd <z6=int6464#3,>in6=int32#3 | ||
3146 | # asm 2: movd <z6=%xmm2,>in6=%edx | ||
3147 | movd %xmm2,%edx | ||
3148 | |||
3149 | # qhasm: in7 = z7 | ||
3150 | # asm 1: movd <z7=int6464#4,>in7=int32#4 | ||
3151 | # asm 2: movd <z7=%xmm3,>in7=%ebx | ||
3152 | movd %xmm3,%ebx | ||
3153 | |||
3154 | # qhasm: in4 ^= *(uint32 *) (m + 208) | ||
3155 | # asm 1: xorl 208(<m=int32#5),<in4=int32#1 | ||
3156 | # asm 2: xorl 208(<m=%esi),<in4=%eax | ||
3157 | xorl 208(%esi),%eax | ||
3158 | |||
3159 | # qhasm: in5 ^= *(uint32 *) (m + 212) | ||
3160 | # asm 1: xorl 212(<m=int32#5),<in5=int32#2 | ||
3161 | # asm 2: xorl 212(<m=%esi),<in5=%ecx | ||
3162 | xorl 212(%esi),%ecx | ||
3163 | |||
3164 | # qhasm: in6 ^= *(uint32 *) (m + 216) | ||
3165 | # asm 1: xorl 216(<m=int32#5),<in6=int32#3 | ||
3166 | # asm 2: xorl 216(<m=%esi),<in6=%edx | ||
3167 | xorl 216(%esi),%edx | ||
3168 | |||
3169 | # qhasm: in7 ^= *(uint32 *) (m + 220) | ||
3170 | # asm 1: xorl 220(<m=int32#5),<in7=int32#4 | ||
3171 | # asm 2: xorl 220(<m=%esi),<in7=%ebx | ||
3172 | xorl 220(%esi),%ebx | ||
3173 | |||
3174 | # qhasm: *(uint32 *) (out + 208) = in4 | ||
3175 | # asm 1: movl <in4=int32#1,208(<out=int32#6) | ||
3176 | # asm 2: movl <in4=%eax,208(<out=%edi) | ||
3177 | movl %eax,208(%edi) | ||
3178 | |||
3179 | # qhasm: *(uint32 *) (out + 212) = in5 | ||
3180 | # asm 1: movl <in5=int32#2,212(<out=int32#6) | ||
3181 | # asm 2: movl <in5=%ecx,212(<out=%edi) | ||
3182 | movl %ecx,212(%edi) | ||
3183 | |||
3184 | # qhasm: *(uint32 *) (out + 216) = in6 | ||
3185 | # asm 1: movl <in6=int32#3,216(<out=int32#6) | ||
3186 | # asm 2: movl <in6=%edx,216(<out=%edi) | ||
3187 | movl %edx,216(%edi) | ||
3188 | |||
3189 | # qhasm: *(uint32 *) (out + 220) = in7 | ||
3190 | # asm 1: movl <in7=int32#4,220(<out=int32#6) | ||
3191 | # asm 2: movl <in7=%ebx,220(<out=%edi) | ||
3192 | movl %ebx,220(%edi) | ||
3193 | |||
3194 | # qhasm: z8 = z8_stack | ||
3195 | # asm 1: movdqa <z8_stack=stack128#37,>z8=int6464#1 | ||
3196 | # asm 2: movdqa <z8_stack=608(%esp),>z8=%xmm0 | ||
3197 | movdqa 608(%esp),%xmm0 | ||
3198 | |||
3199 | # qhasm: z9 = z9_stack | ||
3200 | # asm 1: movdqa <z9_stack=stack128#32,>z9=int6464#2 | ||
3201 | # asm 2: movdqa <z9_stack=528(%esp),>z9=%xmm1 | ||
3202 | movdqa 528(%esp),%xmm1 | ||
3203 | |||
3204 | # qhasm: z10 = z10_stack | ||
3205 | # asm 1: movdqa <z10_stack=stack128#22,>z10=int6464#3 | ||
3206 | # asm 2: movdqa <z10_stack=368(%esp),>z10=%xmm2 | ||
3207 | movdqa 368(%esp),%xmm2 | ||
3208 | |||
3209 | # qhasm: z11 = z11_stack | ||
3210 | # asm 1: movdqa <z11_stack=stack128#27,>z11=int6464#4 | ||
3211 | # asm 2: movdqa <z11_stack=448(%esp),>z11=%xmm3 | ||
3212 | movdqa 448(%esp),%xmm3 | ||
3213 | |||
3214 | # qhasm: uint32323232 z8 += orig8 | ||
3215 | # asm 1: paddd <orig8=stack128#19,<z8=int6464#1 | ||
3216 | # asm 2: paddd <orig8=320(%esp),<z8=%xmm0 | ||
3217 | paddd 320(%esp),%xmm0 | ||
3218 | |||
3219 | # qhasm: uint32323232 z9 += orig9 | ||
3220 | # asm 1: paddd <orig9=stack128#20,<z9=int6464#2 | ||
3221 | # asm 2: paddd <orig9=336(%esp),<z9=%xmm1 | ||
3222 | paddd 336(%esp),%xmm1 | ||
3223 | |||
3224 | # qhasm: uint32323232 z10 += orig10 | ||
3225 | # asm 1: paddd <orig10=stack128#6,<z10=int6464#3 | ||
3226 | # asm 2: paddd <orig10=112(%esp),<z10=%xmm2 | ||
3227 | paddd 112(%esp),%xmm2 | ||
3228 | |||
3229 | # qhasm: uint32323232 z11 += orig11 | ||
3230 | # asm 1: paddd <orig11=stack128#10,<z11=int6464#4 | ||
3231 | # asm 2: paddd <orig11=176(%esp),<z11=%xmm3 | ||
3232 | paddd 176(%esp),%xmm3 | ||
3233 | |||
3234 | # qhasm: in8 = z8 | ||
3235 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3236 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3237 | movd %xmm0,%eax | ||
3238 | |||
3239 | # qhasm: in9 = z9 | ||
3240 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3241 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3242 | movd %xmm1,%ecx | ||
3243 | |||
3244 | # qhasm: in10 = z10 | ||
3245 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3246 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3247 | movd %xmm2,%edx | ||
3248 | |||
3249 | # qhasm: in11 = z11 | ||
3250 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3251 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3252 | movd %xmm3,%ebx | ||
3253 | |||
3254 | # qhasm: z8 <<<= 96 | ||
3255 | # asm 1: pshufd $0x39,<z8=int6464#1,<z8=int6464#1 | ||
3256 | # asm 2: pshufd $0x39,<z8=%xmm0,<z8=%xmm0 | ||
3257 | pshufd $0x39,%xmm0,%xmm0 | ||
3258 | |||
3259 | # qhasm: z9 <<<= 96 | ||
3260 | # asm 1: pshufd $0x39,<z9=int6464#2,<z9=int6464#2 | ||
3261 | # asm 2: pshufd $0x39,<z9=%xmm1,<z9=%xmm1 | ||
3262 | pshufd $0x39,%xmm1,%xmm1 | ||
3263 | |||
3264 | # qhasm: z10 <<<= 96 | ||
3265 | # asm 1: pshufd $0x39,<z10=int6464#3,<z10=int6464#3 | ||
3266 | # asm 2: pshufd $0x39,<z10=%xmm2,<z10=%xmm2 | ||
3267 | pshufd $0x39,%xmm2,%xmm2 | ||
3268 | |||
3269 | # qhasm: z11 <<<= 96 | ||
3270 | # asm 1: pshufd $0x39,<z11=int6464#4,<z11=int6464#4 | ||
3271 | # asm 2: pshufd $0x39,<z11=%xmm3,<z11=%xmm3 | ||
3272 | pshufd $0x39,%xmm3,%xmm3 | ||
3273 | |||
3274 | # qhasm: in8 ^= *(uint32 *) (m + 32) | ||
3275 | # asm 1: xorl 32(<m=int32#5),<in8=int32#1 | ||
3276 | # asm 2: xorl 32(<m=%esi),<in8=%eax | ||
3277 | xorl 32(%esi),%eax | ||
3278 | |||
3279 | # qhasm: in9 ^= *(uint32 *) (m + 36) | ||
3280 | # asm 1: xorl 36(<m=int32#5),<in9=int32#2 | ||
3281 | # asm 2: xorl 36(<m=%esi),<in9=%ecx | ||
3282 | xorl 36(%esi),%ecx | ||
3283 | |||
3284 | # qhasm: in10 ^= *(uint32 *) (m + 40) | ||
3285 | # asm 1: xorl 40(<m=int32#5),<in10=int32#3 | ||
3286 | # asm 2: xorl 40(<m=%esi),<in10=%edx | ||
3287 | xorl 40(%esi),%edx | ||
3288 | |||
3289 | # qhasm: in11 ^= *(uint32 *) (m + 44) | ||
3290 | # asm 1: xorl 44(<m=int32#5),<in11=int32#4 | ||
3291 | # asm 2: xorl 44(<m=%esi),<in11=%ebx | ||
3292 | xorl 44(%esi),%ebx | ||
3293 | |||
3294 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
3295 | # asm 1: movl <in8=int32#1,32(<out=int32#6) | ||
3296 | # asm 2: movl <in8=%eax,32(<out=%edi) | ||
3297 | movl %eax,32(%edi) | ||
3298 | |||
3299 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
3300 | # asm 1: movl <in9=int32#2,36(<out=int32#6) | ||
3301 | # asm 2: movl <in9=%ecx,36(<out=%edi) | ||
3302 | movl %ecx,36(%edi) | ||
3303 | |||
3304 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
3305 | # asm 1: movl <in10=int32#3,40(<out=int32#6) | ||
3306 | # asm 2: movl <in10=%edx,40(<out=%edi) | ||
3307 | movl %edx,40(%edi) | ||
3308 | |||
3309 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
3310 | # asm 1: movl <in11=int32#4,44(<out=int32#6) | ||
3311 | # asm 2: movl <in11=%ebx,44(<out=%edi) | ||
3312 | movl %ebx,44(%edi) | ||
3313 | |||
3314 | # qhasm: in8 = z8 | ||
3315 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3316 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3317 | movd %xmm0,%eax | ||
3318 | |||
3319 | # qhasm: in9 = z9 | ||
3320 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3321 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3322 | movd %xmm1,%ecx | ||
3323 | |||
3324 | # qhasm: in10 = z10 | ||
3325 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3326 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3327 | movd %xmm2,%edx | ||
3328 | |||
3329 | # qhasm: in11 = z11 | ||
3330 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3331 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3332 | movd %xmm3,%ebx | ||
3333 | |||
3334 | # qhasm: z8 <<<= 96 | ||
3335 | # asm 1: pshufd $0x39,<z8=int6464#1,<z8=int6464#1 | ||
3336 | # asm 2: pshufd $0x39,<z8=%xmm0,<z8=%xmm0 | ||
3337 | pshufd $0x39,%xmm0,%xmm0 | ||
3338 | |||
3339 | # qhasm: z9 <<<= 96 | ||
3340 | # asm 1: pshufd $0x39,<z9=int6464#2,<z9=int6464#2 | ||
3341 | # asm 2: pshufd $0x39,<z9=%xmm1,<z9=%xmm1 | ||
3342 | pshufd $0x39,%xmm1,%xmm1 | ||
3343 | |||
3344 | # qhasm: z10 <<<= 96 | ||
3345 | # asm 1: pshufd $0x39,<z10=int6464#3,<z10=int6464#3 | ||
3346 | # asm 2: pshufd $0x39,<z10=%xmm2,<z10=%xmm2 | ||
3347 | pshufd $0x39,%xmm2,%xmm2 | ||
3348 | |||
3349 | # qhasm: z11 <<<= 96 | ||
3350 | # asm 1: pshufd $0x39,<z11=int6464#4,<z11=int6464#4 | ||
3351 | # asm 2: pshufd $0x39,<z11=%xmm3,<z11=%xmm3 | ||
3352 | pshufd $0x39,%xmm3,%xmm3 | ||
3353 | |||
3354 | # qhasm: in8 ^= *(uint32 *) (m + 96) | ||
3355 | # asm 1: xorl 96(<m=int32#5),<in8=int32#1 | ||
3356 | # asm 2: xorl 96(<m=%esi),<in8=%eax | ||
3357 | xorl 96(%esi),%eax | ||
3358 | |||
3359 | # qhasm: in9 ^= *(uint32 *) (m + 100) | ||
3360 | # asm 1: xorl 100(<m=int32#5),<in9=int32#2 | ||
3361 | # asm 2: xorl 100(<m=%esi),<in9=%ecx | ||
3362 | xorl 100(%esi),%ecx | ||
3363 | |||
3364 | # qhasm: in10 ^= *(uint32 *) (m + 104) | ||
3365 | # asm 1: xorl 104(<m=int32#5),<in10=int32#3 | ||
3366 | # asm 2: xorl 104(<m=%esi),<in10=%edx | ||
3367 | xorl 104(%esi),%edx | ||
3368 | |||
3369 | # qhasm: in11 ^= *(uint32 *) (m + 108) | ||
3370 | # asm 1: xorl 108(<m=int32#5),<in11=int32#4 | ||
3371 | # asm 2: xorl 108(<m=%esi),<in11=%ebx | ||
3372 | xorl 108(%esi),%ebx | ||
3373 | |||
3374 | # qhasm: *(uint32 *) (out + 96) = in8 | ||
3375 | # asm 1: movl <in8=int32#1,96(<out=int32#6) | ||
3376 | # asm 2: movl <in8=%eax,96(<out=%edi) | ||
3377 | movl %eax,96(%edi) | ||
3378 | |||
3379 | # qhasm: *(uint32 *) (out + 100) = in9 | ||
3380 | # asm 1: movl <in9=int32#2,100(<out=int32#6) | ||
3381 | # asm 2: movl <in9=%ecx,100(<out=%edi) | ||
3382 | movl %ecx,100(%edi) | ||
3383 | |||
3384 | # qhasm: *(uint32 *) (out + 104) = in10 | ||
3385 | # asm 1: movl <in10=int32#3,104(<out=int32#6) | ||
3386 | # asm 2: movl <in10=%edx,104(<out=%edi) | ||
3387 | movl %edx,104(%edi) | ||
3388 | |||
3389 | # qhasm: *(uint32 *) (out + 108) = in11 | ||
3390 | # asm 1: movl <in11=int32#4,108(<out=int32#6) | ||
3391 | # asm 2: movl <in11=%ebx,108(<out=%edi) | ||
3392 | movl %ebx,108(%edi) | ||
3393 | |||
3394 | # qhasm: in8 = z8 | ||
3395 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3396 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3397 | movd %xmm0,%eax | ||
3398 | |||
3399 | # qhasm: in9 = z9 | ||
3400 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3401 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3402 | movd %xmm1,%ecx | ||
3403 | |||
3404 | # qhasm: in10 = z10 | ||
3405 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3406 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3407 | movd %xmm2,%edx | ||
3408 | |||
3409 | # qhasm: in11 = z11 | ||
3410 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3411 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3412 | movd %xmm3,%ebx | ||
3413 | |||
3414 | # qhasm: z8 <<<= 96 | ||
3415 | # asm 1: pshufd $0x39,<z8=int6464#1,<z8=int6464#1 | ||
3416 | # asm 2: pshufd $0x39,<z8=%xmm0,<z8=%xmm0 | ||
3417 | pshufd $0x39,%xmm0,%xmm0 | ||
3418 | |||
3419 | # qhasm: z9 <<<= 96 | ||
3420 | # asm 1: pshufd $0x39,<z9=int6464#2,<z9=int6464#2 | ||
3421 | # asm 2: pshufd $0x39,<z9=%xmm1,<z9=%xmm1 | ||
3422 | pshufd $0x39,%xmm1,%xmm1 | ||
3423 | |||
3424 | # qhasm: z10 <<<= 96 | ||
3425 | # asm 1: pshufd $0x39,<z10=int6464#3,<z10=int6464#3 | ||
3426 | # asm 2: pshufd $0x39,<z10=%xmm2,<z10=%xmm2 | ||
3427 | pshufd $0x39,%xmm2,%xmm2 | ||
3428 | |||
3429 | # qhasm: z11 <<<= 96 | ||
3430 | # asm 1: pshufd $0x39,<z11=int6464#4,<z11=int6464#4 | ||
3431 | # asm 2: pshufd $0x39,<z11=%xmm3,<z11=%xmm3 | ||
3432 | pshufd $0x39,%xmm3,%xmm3 | ||
3433 | |||
3434 | # qhasm: in8 ^= *(uint32 *) (m + 160) | ||
3435 | # asm 1: xorl 160(<m=int32#5),<in8=int32#1 | ||
3436 | # asm 2: xorl 160(<m=%esi),<in8=%eax | ||
3437 | xorl 160(%esi),%eax | ||
3438 | |||
3439 | # qhasm: in9 ^= *(uint32 *) (m + 164) | ||
3440 | # asm 1: xorl 164(<m=int32#5),<in9=int32#2 | ||
3441 | # asm 2: xorl 164(<m=%esi),<in9=%ecx | ||
3442 | xorl 164(%esi),%ecx | ||
3443 | |||
3444 | # qhasm: in10 ^= *(uint32 *) (m + 168) | ||
3445 | # asm 1: xorl 168(<m=int32#5),<in10=int32#3 | ||
3446 | # asm 2: xorl 168(<m=%esi),<in10=%edx | ||
3447 | xorl 168(%esi),%edx | ||
3448 | |||
3449 | # qhasm: in11 ^= *(uint32 *) (m + 172) | ||
3450 | # asm 1: xorl 172(<m=int32#5),<in11=int32#4 | ||
3451 | # asm 2: xorl 172(<m=%esi),<in11=%ebx | ||
3452 | xorl 172(%esi),%ebx | ||
3453 | |||
3454 | # qhasm: *(uint32 *) (out + 160) = in8 | ||
3455 | # asm 1: movl <in8=int32#1,160(<out=int32#6) | ||
3456 | # asm 2: movl <in8=%eax,160(<out=%edi) | ||
3457 | movl %eax,160(%edi) | ||
3458 | |||
3459 | # qhasm: *(uint32 *) (out + 164) = in9 | ||
3460 | # asm 1: movl <in9=int32#2,164(<out=int32#6) | ||
3461 | # asm 2: movl <in9=%ecx,164(<out=%edi) | ||
3462 | movl %ecx,164(%edi) | ||
3463 | |||
3464 | # qhasm: *(uint32 *) (out + 168) = in10 | ||
3465 | # asm 1: movl <in10=int32#3,168(<out=int32#6) | ||
3466 | # asm 2: movl <in10=%edx,168(<out=%edi) | ||
3467 | movl %edx,168(%edi) | ||
3468 | |||
3469 | # qhasm: *(uint32 *) (out + 172) = in11 | ||
3470 | # asm 1: movl <in11=int32#4,172(<out=int32#6) | ||
3471 | # asm 2: movl <in11=%ebx,172(<out=%edi) | ||
3472 | movl %ebx,172(%edi) | ||
3473 | |||
3474 | # qhasm: in8 = z8 | ||
3475 | # asm 1: movd <z8=int6464#1,>in8=int32#1 | ||
3476 | # asm 2: movd <z8=%xmm0,>in8=%eax | ||
3477 | movd %xmm0,%eax | ||
3478 | |||
3479 | # qhasm: in9 = z9 | ||
3480 | # asm 1: movd <z9=int6464#2,>in9=int32#2 | ||
3481 | # asm 2: movd <z9=%xmm1,>in9=%ecx | ||
3482 | movd %xmm1,%ecx | ||
3483 | |||
3484 | # qhasm: in10 = z10 | ||
3485 | # asm 1: movd <z10=int6464#3,>in10=int32#3 | ||
3486 | # asm 2: movd <z10=%xmm2,>in10=%edx | ||
3487 | movd %xmm2,%edx | ||
3488 | |||
3489 | # qhasm: in11 = z11 | ||
3490 | # asm 1: movd <z11=int6464#4,>in11=int32#4 | ||
3491 | # asm 2: movd <z11=%xmm3,>in11=%ebx | ||
3492 | movd %xmm3,%ebx | ||
3493 | |||
3494 | # qhasm: in8 ^= *(uint32 *) (m + 224) | ||
3495 | # asm 1: xorl 224(<m=int32#5),<in8=int32#1 | ||
3496 | # asm 2: xorl 224(<m=%esi),<in8=%eax | ||
3497 | xorl 224(%esi),%eax | ||
3498 | |||
3499 | # qhasm: in9 ^= *(uint32 *) (m + 228) | ||
3500 | # asm 1: xorl 228(<m=int32#5),<in9=int32#2 | ||
3501 | # asm 2: xorl 228(<m=%esi),<in9=%ecx | ||
3502 | xorl 228(%esi),%ecx | ||
3503 | |||
3504 | # qhasm: in10 ^= *(uint32 *) (m + 232) | ||
3505 | # asm 1: xorl 232(<m=int32#5),<in10=int32#3 | ||
3506 | # asm 2: xorl 232(<m=%esi),<in10=%edx | ||
3507 | xorl 232(%esi),%edx | ||
3508 | |||
3509 | # qhasm: in11 ^= *(uint32 *) (m + 236) | ||
3510 | # asm 1: xorl 236(<m=int32#5),<in11=int32#4 | ||
3511 | # asm 2: xorl 236(<m=%esi),<in11=%ebx | ||
3512 | xorl 236(%esi),%ebx | ||
3513 | |||
3514 | # qhasm: *(uint32 *) (out + 224) = in8 | ||
3515 | # asm 1: movl <in8=int32#1,224(<out=int32#6) | ||
3516 | # asm 2: movl <in8=%eax,224(<out=%edi) | ||
3517 | movl %eax,224(%edi) | ||
3518 | |||
3519 | # qhasm: *(uint32 *) (out + 228) = in9 | ||
3520 | # asm 1: movl <in9=int32#2,228(<out=int32#6) | ||
3521 | # asm 2: movl <in9=%ecx,228(<out=%edi) | ||
3522 | movl %ecx,228(%edi) | ||
3523 | |||
3524 | # qhasm: *(uint32 *) (out + 232) = in10 | ||
3525 | # asm 1: movl <in10=int32#3,232(<out=int32#6) | ||
3526 | # asm 2: movl <in10=%edx,232(<out=%edi) | ||
3527 | movl %edx,232(%edi) | ||
3528 | |||
3529 | # qhasm: *(uint32 *) (out + 236) = in11 | ||
3530 | # asm 1: movl <in11=int32#4,236(<out=int32#6) | ||
3531 | # asm 2: movl <in11=%ebx,236(<out=%edi) | ||
3532 | movl %ebx,236(%edi) | ||
3533 | |||
3534 | # qhasm: z12 = z12_stack | ||
3535 | # asm 1: movdqa <z12_stack=stack128#35,>z12=int6464#1 | ||
3536 | # asm 2: movdqa <z12_stack=576(%esp),>z12=%xmm0 | ||
3537 | movdqa 576(%esp),%xmm0 | ||
3538 | |||
3539 | # qhasm: z13 = z13_stack | ||
3540 | # asm 1: movdqa <z13_stack=stack128#30,>z13=int6464#2 | ||
3541 | # asm 2: movdqa <z13_stack=496(%esp),>z13=%xmm1 | ||
3542 | movdqa 496(%esp),%xmm1 | ||
3543 | |||
3544 | # qhasm: z14 = z14_stack | ||
3545 | # asm 1: movdqa <z14_stack=stack128#24,>z14=int6464#3 | ||
3546 | # asm 2: movdqa <z14_stack=400(%esp),>z14=%xmm2 | ||
3547 | movdqa 400(%esp),%xmm2 | ||
3548 | |||
3549 | # qhasm: z15 = z15_stack | ||
3550 | # asm 1: movdqa <z15_stack=stack128#23,>z15=int6464#4 | ||
3551 | # asm 2: movdqa <z15_stack=384(%esp),>z15=%xmm3 | ||
3552 | movdqa 384(%esp),%xmm3 | ||
3553 | |||
3554 | # qhasm: uint32323232 z12 += orig12 | ||
3555 | # asm 1: paddd <orig12=stack128#11,<z12=int6464#1 | ||
3556 | # asm 2: paddd <orig12=192(%esp),<z12=%xmm0 | ||
3557 | paddd 192(%esp),%xmm0 | ||
3558 | |||
3559 | # qhasm: uint32323232 z13 += orig13 | ||
3560 | # asm 1: paddd <orig13=stack128#14,<z13=int6464#2 | ||
3561 | # asm 2: paddd <orig13=240(%esp),<z13=%xmm1 | ||
3562 | paddd 240(%esp),%xmm1 | ||
3563 | |||
3564 | # qhasm: uint32323232 z14 += orig14 | ||
3565 | # asm 1: paddd <orig14=stack128#17,<z14=int6464#3 | ||
3566 | # asm 2: paddd <orig14=288(%esp),<z14=%xmm2 | ||
3567 | paddd 288(%esp),%xmm2 | ||
3568 | |||
3569 | # qhasm: uint32323232 z15 += orig15 | ||
3570 | # asm 1: paddd <orig15=stack128#7,<z15=int6464#4 | ||
3571 | # asm 2: paddd <orig15=128(%esp),<z15=%xmm3 | ||
3572 | paddd 128(%esp),%xmm3 | ||
3573 | |||
3574 | # qhasm: in12 = z12 | ||
3575 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3576 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3577 | movd %xmm0,%eax | ||
3578 | |||
3579 | # qhasm: in13 = z13 | ||
3580 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3581 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3582 | movd %xmm1,%ecx | ||
3583 | |||
3584 | # qhasm: in14 = z14 | ||
3585 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3586 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3587 | movd %xmm2,%edx | ||
3588 | |||
3589 | # qhasm: in15 = z15 | ||
3590 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3591 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3592 | movd %xmm3,%ebx | ||
3593 | |||
3594 | # qhasm: z12 <<<= 96 | ||
3595 | # asm 1: pshufd $0x39,<z12=int6464#1,<z12=int6464#1 | ||
3596 | # asm 2: pshufd $0x39,<z12=%xmm0,<z12=%xmm0 | ||
3597 | pshufd $0x39,%xmm0,%xmm0 | ||
3598 | |||
3599 | # qhasm: z13 <<<= 96 | ||
3600 | # asm 1: pshufd $0x39,<z13=int6464#2,<z13=int6464#2 | ||
3601 | # asm 2: pshufd $0x39,<z13=%xmm1,<z13=%xmm1 | ||
3602 | pshufd $0x39,%xmm1,%xmm1 | ||
3603 | |||
3604 | # qhasm: z14 <<<= 96 | ||
3605 | # asm 1: pshufd $0x39,<z14=int6464#3,<z14=int6464#3 | ||
3606 | # asm 2: pshufd $0x39,<z14=%xmm2,<z14=%xmm2 | ||
3607 | pshufd $0x39,%xmm2,%xmm2 | ||
3608 | |||
3609 | # qhasm: z15 <<<= 96 | ||
3610 | # asm 1: pshufd $0x39,<z15=int6464#4,<z15=int6464#4 | ||
3611 | # asm 2: pshufd $0x39,<z15=%xmm3,<z15=%xmm3 | ||
3612 | pshufd $0x39,%xmm3,%xmm3 | ||
3613 | |||
3614 | # qhasm: in12 ^= *(uint32 *) (m + 48) | ||
3615 | # asm 1: xorl 48(<m=int32#5),<in12=int32#1 | ||
3616 | # asm 2: xorl 48(<m=%esi),<in12=%eax | ||
3617 | xorl 48(%esi),%eax | ||
3618 | |||
3619 | # qhasm: in13 ^= *(uint32 *) (m + 52) | ||
3620 | # asm 1: xorl 52(<m=int32#5),<in13=int32#2 | ||
3621 | # asm 2: xorl 52(<m=%esi),<in13=%ecx | ||
3622 | xorl 52(%esi),%ecx | ||
3623 | |||
3624 | # qhasm: in14 ^= *(uint32 *) (m + 56) | ||
3625 | # asm 1: xorl 56(<m=int32#5),<in14=int32#3 | ||
3626 | # asm 2: xorl 56(<m=%esi),<in14=%edx | ||
3627 | xorl 56(%esi),%edx | ||
3628 | |||
3629 | # qhasm: in15 ^= *(uint32 *) (m + 60) | ||
3630 | # asm 1: xorl 60(<m=int32#5),<in15=int32#4 | ||
3631 | # asm 2: xorl 60(<m=%esi),<in15=%ebx | ||
3632 | xorl 60(%esi),%ebx | ||
3633 | |||
3634 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
3635 | # asm 1: movl <in12=int32#1,48(<out=int32#6) | ||
3636 | # asm 2: movl <in12=%eax,48(<out=%edi) | ||
3637 | movl %eax,48(%edi) | ||
3638 | |||
3639 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
3640 | # asm 1: movl <in13=int32#2,52(<out=int32#6) | ||
3641 | # asm 2: movl <in13=%ecx,52(<out=%edi) | ||
3642 | movl %ecx,52(%edi) | ||
3643 | |||
3644 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
3645 | # asm 1: movl <in14=int32#3,56(<out=int32#6) | ||
3646 | # asm 2: movl <in14=%edx,56(<out=%edi) | ||
3647 | movl %edx,56(%edi) | ||
3648 | |||
3649 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
3650 | # asm 1: movl <in15=int32#4,60(<out=int32#6) | ||
3651 | # asm 2: movl <in15=%ebx,60(<out=%edi) | ||
3652 | movl %ebx,60(%edi) | ||
3653 | |||
3654 | # qhasm: in12 = z12 | ||
3655 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3656 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3657 | movd %xmm0,%eax | ||
3658 | |||
3659 | # qhasm: in13 = z13 | ||
3660 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3661 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3662 | movd %xmm1,%ecx | ||
3663 | |||
3664 | # qhasm: in14 = z14 | ||
3665 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3666 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3667 | movd %xmm2,%edx | ||
3668 | |||
3669 | # qhasm: in15 = z15 | ||
3670 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3671 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3672 | movd %xmm3,%ebx | ||
3673 | |||
3674 | # qhasm: z12 <<<= 96 | ||
3675 | # asm 1: pshufd $0x39,<z12=int6464#1,<z12=int6464#1 | ||
3676 | # asm 2: pshufd $0x39,<z12=%xmm0,<z12=%xmm0 | ||
3677 | pshufd $0x39,%xmm0,%xmm0 | ||
3678 | |||
3679 | # qhasm: z13 <<<= 96 | ||
3680 | # asm 1: pshufd $0x39,<z13=int6464#2,<z13=int6464#2 | ||
3681 | # asm 2: pshufd $0x39,<z13=%xmm1,<z13=%xmm1 | ||
3682 | pshufd $0x39,%xmm1,%xmm1 | ||
3683 | |||
3684 | # qhasm: z14 <<<= 96 | ||
3685 | # asm 1: pshufd $0x39,<z14=int6464#3,<z14=int6464#3 | ||
3686 | # asm 2: pshufd $0x39,<z14=%xmm2,<z14=%xmm2 | ||
3687 | pshufd $0x39,%xmm2,%xmm2 | ||
3688 | |||
3689 | # qhasm: z15 <<<= 96 | ||
3690 | # asm 1: pshufd $0x39,<z15=int6464#4,<z15=int6464#4 | ||
3691 | # asm 2: pshufd $0x39,<z15=%xmm3,<z15=%xmm3 | ||
3692 | pshufd $0x39,%xmm3,%xmm3 | ||
3693 | |||
3694 | # qhasm: in12 ^= *(uint32 *) (m + 112) | ||
3695 | # asm 1: xorl 112(<m=int32#5),<in12=int32#1 | ||
3696 | # asm 2: xorl 112(<m=%esi),<in12=%eax | ||
3697 | xorl 112(%esi),%eax | ||
3698 | |||
3699 | # qhasm: in13 ^= *(uint32 *) (m + 116) | ||
3700 | # asm 1: xorl 116(<m=int32#5),<in13=int32#2 | ||
3701 | # asm 2: xorl 116(<m=%esi),<in13=%ecx | ||
3702 | xorl 116(%esi),%ecx | ||
3703 | |||
3704 | # qhasm: in14 ^= *(uint32 *) (m + 120) | ||
3705 | # asm 1: xorl 120(<m=int32#5),<in14=int32#3 | ||
3706 | # asm 2: xorl 120(<m=%esi),<in14=%edx | ||
3707 | xorl 120(%esi),%edx | ||
3708 | |||
3709 | # qhasm: in15 ^= *(uint32 *) (m + 124) | ||
3710 | # asm 1: xorl 124(<m=int32#5),<in15=int32#4 | ||
3711 | # asm 2: xorl 124(<m=%esi),<in15=%ebx | ||
3712 | xorl 124(%esi),%ebx | ||
3713 | |||
3714 | # qhasm: *(uint32 *) (out + 112) = in12 | ||
3715 | # asm 1: movl <in12=int32#1,112(<out=int32#6) | ||
3716 | # asm 2: movl <in12=%eax,112(<out=%edi) | ||
3717 | movl %eax,112(%edi) | ||
3718 | |||
3719 | # qhasm: *(uint32 *) (out + 116) = in13 | ||
3720 | # asm 1: movl <in13=int32#2,116(<out=int32#6) | ||
3721 | # asm 2: movl <in13=%ecx,116(<out=%edi) | ||
3722 | movl %ecx,116(%edi) | ||
3723 | |||
3724 | # qhasm: *(uint32 *) (out + 120) = in14 | ||
3725 | # asm 1: movl <in14=int32#3,120(<out=int32#6) | ||
3726 | # asm 2: movl <in14=%edx,120(<out=%edi) | ||
3727 | movl %edx,120(%edi) | ||
3728 | |||
3729 | # qhasm: *(uint32 *) (out + 124) = in15 | ||
3730 | # asm 1: movl <in15=int32#4,124(<out=int32#6) | ||
3731 | # asm 2: movl <in15=%ebx,124(<out=%edi) | ||
3732 | movl %ebx,124(%edi) | ||
3733 | |||
3734 | # qhasm: in12 = z12 | ||
3735 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3736 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3737 | movd %xmm0,%eax | ||
3738 | |||
3739 | # qhasm: in13 = z13 | ||
3740 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3741 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3742 | movd %xmm1,%ecx | ||
3743 | |||
3744 | # qhasm: in14 = z14 | ||
3745 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3746 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3747 | movd %xmm2,%edx | ||
3748 | |||
3749 | # qhasm: in15 = z15 | ||
3750 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3751 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3752 | movd %xmm3,%ebx | ||
3753 | |||
3754 | # qhasm: z12 <<<= 96 | ||
3755 | # asm 1: pshufd $0x39,<z12=int6464#1,<z12=int6464#1 | ||
3756 | # asm 2: pshufd $0x39,<z12=%xmm0,<z12=%xmm0 | ||
3757 | pshufd $0x39,%xmm0,%xmm0 | ||
3758 | |||
3759 | # qhasm: z13 <<<= 96 | ||
3760 | # asm 1: pshufd $0x39,<z13=int6464#2,<z13=int6464#2 | ||
3761 | # asm 2: pshufd $0x39,<z13=%xmm1,<z13=%xmm1 | ||
3762 | pshufd $0x39,%xmm1,%xmm1 | ||
3763 | |||
3764 | # qhasm: z14 <<<= 96 | ||
3765 | # asm 1: pshufd $0x39,<z14=int6464#3,<z14=int6464#3 | ||
3766 | # asm 2: pshufd $0x39,<z14=%xmm2,<z14=%xmm2 | ||
3767 | pshufd $0x39,%xmm2,%xmm2 | ||
3768 | |||
3769 | # qhasm: z15 <<<= 96 | ||
3770 | # asm 1: pshufd $0x39,<z15=int6464#4,<z15=int6464#4 | ||
3771 | # asm 2: pshufd $0x39,<z15=%xmm3,<z15=%xmm3 | ||
3772 | pshufd $0x39,%xmm3,%xmm3 | ||
3773 | |||
3774 | # qhasm: in12 ^= *(uint32 *) (m + 176) | ||
3775 | # asm 1: xorl 176(<m=int32#5),<in12=int32#1 | ||
3776 | # asm 2: xorl 176(<m=%esi),<in12=%eax | ||
3777 | xorl 176(%esi),%eax | ||
3778 | |||
3779 | # qhasm: in13 ^= *(uint32 *) (m + 180) | ||
3780 | # asm 1: xorl 180(<m=int32#5),<in13=int32#2 | ||
3781 | # asm 2: xorl 180(<m=%esi),<in13=%ecx | ||
3782 | xorl 180(%esi),%ecx | ||
3783 | |||
3784 | # qhasm: in14 ^= *(uint32 *) (m + 184) | ||
3785 | # asm 1: xorl 184(<m=int32#5),<in14=int32#3 | ||
3786 | # asm 2: xorl 184(<m=%esi),<in14=%edx | ||
3787 | xorl 184(%esi),%edx | ||
3788 | |||
3789 | # qhasm: in15 ^= *(uint32 *) (m + 188) | ||
3790 | # asm 1: xorl 188(<m=int32#5),<in15=int32#4 | ||
3791 | # asm 2: xorl 188(<m=%esi),<in15=%ebx | ||
3792 | xorl 188(%esi),%ebx | ||
3793 | |||
3794 | # qhasm: *(uint32 *) (out + 176) = in12 | ||
3795 | # asm 1: movl <in12=int32#1,176(<out=int32#6) | ||
3796 | # asm 2: movl <in12=%eax,176(<out=%edi) | ||
3797 | movl %eax,176(%edi) | ||
3798 | |||
3799 | # qhasm: *(uint32 *) (out + 180) = in13 | ||
3800 | # asm 1: movl <in13=int32#2,180(<out=int32#6) | ||
3801 | # asm 2: movl <in13=%ecx,180(<out=%edi) | ||
3802 | movl %ecx,180(%edi) | ||
3803 | |||
3804 | # qhasm: *(uint32 *) (out + 184) = in14 | ||
3805 | # asm 1: movl <in14=int32#3,184(<out=int32#6) | ||
3806 | # asm 2: movl <in14=%edx,184(<out=%edi) | ||
3807 | movl %edx,184(%edi) | ||
3808 | |||
3809 | # qhasm: *(uint32 *) (out + 188) = in15 | ||
3810 | # asm 1: movl <in15=int32#4,188(<out=int32#6) | ||
3811 | # asm 2: movl <in15=%ebx,188(<out=%edi) | ||
3812 | movl %ebx,188(%edi) | ||
3813 | |||
3814 | # qhasm: in12 = z12 | ||
3815 | # asm 1: movd <z12=int6464#1,>in12=int32#1 | ||
3816 | # asm 2: movd <z12=%xmm0,>in12=%eax | ||
3817 | movd %xmm0,%eax | ||
3818 | |||
3819 | # qhasm: in13 = z13 | ||
3820 | # asm 1: movd <z13=int6464#2,>in13=int32#2 | ||
3821 | # asm 2: movd <z13=%xmm1,>in13=%ecx | ||
3822 | movd %xmm1,%ecx | ||
3823 | |||
3824 | # qhasm: in14 = z14 | ||
3825 | # asm 1: movd <z14=int6464#3,>in14=int32#3 | ||
3826 | # asm 2: movd <z14=%xmm2,>in14=%edx | ||
3827 | movd %xmm2,%edx | ||
3828 | |||
3829 | # qhasm: in15 = z15 | ||
3830 | # asm 1: movd <z15=int6464#4,>in15=int32#4 | ||
3831 | # asm 2: movd <z15=%xmm3,>in15=%ebx | ||
3832 | movd %xmm3,%ebx | ||
3833 | |||
3834 | # qhasm: in12 ^= *(uint32 *) (m + 240) | ||
3835 | # asm 1: xorl 240(<m=int32#5),<in12=int32#1 | ||
3836 | # asm 2: xorl 240(<m=%esi),<in12=%eax | ||
3837 | xorl 240(%esi),%eax | ||
3838 | |||
3839 | # qhasm: in13 ^= *(uint32 *) (m + 244) | ||
3840 | # asm 1: xorl 244(<m=int32#5),<in13=int32#2 | ||
3841 | # asm 2: xorl 244(<m=%esi),<in13=%ecx | ||
3842 | xorl 244(%esi),%ecx | ||
3843 | |||
3844 | # qhasm: in14 ^= *(uint32 *) (m + 248) | ||
3845 | # asm 1: xorl 248(<m=int32#5),<in14=int32#3 | ||
3846 | # asm 2: xorl 248(<m=%esi),<in14=%edx | ||
3847 | xorl 248(%esi),%edx | ||
3848 | |||
3849 | # qhasm: in15 ^= *(uint32 *) (m + 252) | ||
3850 | # asm 1: xorl 252(<m=int32#5),<in15=int32#4 | ||
3851 | # asm 2: xorl 252(<m=%esi),<in15=%ebx | ||
3852 | xorl 252(%esi),%ebx | ||
3853 | |||
3854 | # qhasm: *(uint32 *) (out + 240) = in12 | ||
3855 | # asm 1: movl <in12=int32#1,240(<out=int32#6) | ||
3856 | # asm 2: movl <in12=%eax,240(<out=%edi) | ||
3857 | movl %eax,240(%edi) | ||
3858 | |||
3859 | # qhasm: *(uint32 *) (out + 244) = in13 | ||
3860 | # asm 1: movl <in13=int32#2,244(<out=int32#6) | ||
3861 | # asm 2: movl <in13=%ecx,244(<out=%edi) | ||
3862 | movl %ecx,244(%edi) | ||
3863 | |||
3864 | # qhasm: *(uint32 *) (out + 248) = in14 | ||
3865 | # asm 1: movl <in14=int32#3,248(<out=int32#6) | ||
3866 | # asm 2: movl <in14=%edx,248(<out=%edi) | ||
3867 | movl %edx,248(%edi) | ||
3868 | |||
3869 | # qhasm: *(uint32 *) (out + 252) = in15 | ||
3870 | # asm 1: movl <in15=int32#4,252(<out=int32#6) | ||
3871 | # asm 2: movl <in15=%ebx,252(<out=%edi) | ||
3872 | movl %ebx,252(%edi) | ||
3873 | |||
3874 | # qhasm: bytes = bytes_stack | ||
3875 | # asm 1: movl <bytes_stack=stack32#7,>bytes=int32#1 | ||
3876 | # asm 2: movl <bytes_stack=24(%esp),>bytes=%eax | ||
3877 | movl 24(%esp),%eax | ||
3878 | |||
3879 | # qhasm: bytes -= 256 | ||
3880 | # asm 1: sub $256,<bytes=int32#1 | ||
3881 | # asm 2: sub $256,<bytes=%eax | ||
3882 | sub $256,%eax | ||
3883 | |||
3884 | # qhasm: m += 256 | ||
3885 | # asm 1: add $256,<m=int32#5 | ||
3886 | # asm 2: add $256,<m=%esi | ||
3887 | add $256,%esi | ||
3888 | |||
3889 | # qhasm: out += 256 | ||
3890 | # asm 1: add $256,<out=int32#6 | ||
3891 | # asm 2: add $256,<out=%edi | ||
3892 | add $256,%edi | ||
3893 | |||
3894 | # qhasm: out_stack = out | ||
3895 | # asm 1: movl <out=int32#6,>out_stack=stack32#6 | ||
3896 | # asm 2: movl <out=%edi,>out_stack=20(%esp) | ||
3897 | movl %edi,20(%esp) | ||
3898 | |||
3899 | # qhasm: unsigned<? bytes - 256 | ||
3900 | # asm 1: cmp $256,<bytes=int32#1 | ||
3901 | # asm 2: cmp $256,<bytes=%eax | ||
3902 | cmp $256,%eax | ||
3903 | # comment:fp stack unchanged by jump | ||
3904 | |||
3905 | # qhasm: goto bytesatleast256 if !unsigned< | ||
3906 | jae ._bytesatleast256 | ||
3907 | |||
3908 | # qhasm: unsigned>? bytes - 0 | ||
3909 | # asm 1: cmp $0,<bytes=int32#1 | ||
3910 | # asm 2: cmp $0,<bytes=%eax | ||
3911 | cmp $0,%eax | ||
3912 | # comment:fp stack unchanged by jump | ||
3913 | |||
3914 | # qhasm: goto done if !unsigned> | ||
3915 | jbe ._done | ||
3916 | # comment:fp stack unchanged by fallthrough | ||
3917 | |||
3918 | # qhasm: bytesbetween1and255: | ||
3919 | ._bytesbetween1and255: | ||
3920 | |||
3921 | # qhasm: unsigned<? bytes - 64 | ||
3922 | # asm 1: cmp $64,<bytes=int32#1 | ||
3923 | # asm 2: cmp $64,<bytes=%eax | ||
3924 | cmp $64,%eax | ||
3925 | # comment:fp stack unchanged by jump | ||
3926 | |||
3927 | # qhasm: goto nocopy if !unsigned< | ||
3928 | jae ._nocopy | ||
3929 | |||
3930 | # qhasm: ctarget = out | ||
3931 | # asm 1: movl <out=int32#6,>ctarget=stack32#6 | ||
3932 | # asm 2: movl <out=%edi,>ctarget=20(%esp) | ||
3933 | movl %edi,20(%esp) | ||
3934 | |||
3935 | # qhasm: out = &tmp | ||
3936 | # asm 1: leal <tmp=stack512#1,>out=int32#6 | ||
3937 | # asm 2: leal <tmp=640(%esp),>out=%edi | ||
3938 | leal 640(%esp),%edi | ||
3939 | |||
3940 | # qhasm: i = bytes | ||
3941 | # asm 1: mov <bytes=int32#1,>i=int32#2 | ||
3942 | # asm 2: mov <bytes=%eax,>i=%ecx | ||
3943 | mov %eax,%ecx | ||
3944 | |||
3945 | # qhasm: while (i) { *out++ = *m++; --i } | ||
3946 | rep movsb | ||
3947 | |||
3948 | # qhasm: out = &tmp | ||
3949 | # asm 1: leal <tmp=stack512#1,>out=int32#6 | ||
3950 | # asm 2: leal <tmp=640(%esp),>out=%edi | ||
3951 | leal 640(%esp),%edi | ||
3952 | |||
3953 | # qhasm: m = &tmp | ||
3954 | # asm 1: leal <tmp=stack512#1,>m=int32#5 | ||
3955 | # asm 2: leal <tmp=640(%esp),>m=%esi | ||
3956 | leal 640(%esp),%esi | ||
3957 | # comment:fp stack unchanged by fallthrough | ||
3958 | |||
3959 | # qhasm: nocopy: | ||
3960 | ._nocopy: | ||
3961 | |||
3962 | # qhasm: bytes_stack = bytes | ||
3963 | # asm 1: movl <bytes=int32#1,>bytes_stack=stack32#7 | ||
3964 | # asm 2: movl <bytes=%eax,>bytes_stack=24(%esp) | ||
3965 | movl %eax,24(%esp) | ||
3966 | |||
3967 | # qhasm: diag0 = x0 | ||
3968 | # asm 1: movdqa <x0=stack128#3,>diag0=int6464#1 | ||
3969 | # asm 2: movdqa <x0=64(%esp),>diag0=%xmm0 | ||
3970 | movdqa 64(%esp),%xmm0 | ||
3971 | |||
3972 | # qhasm: diag1 = x1 | ||
3973 | # asm 1: movdqa <x1=stack128#2,>diag1=int6464#2 | ||
3974 | # asm 2: movdqa <x1=48(%esp),>diag1=%xmm1 | ||
3975 | movdqa 48(%esp),%xmm1 | ||
3976 | |||
3977 | # qhasm: diag2 = x2 | ||
3978 | # asm 1: movdqa <x2=stack128#4,>diag2=int6464#3 | ||
3979 | # asm 2: movdqa <x2=80(%esp),>diag2=%xmm2 | ||
3980 | movdqa 80(%esp),%xmm2 | ||
3981 | |||
3982 | # qhasm: diag3 = x3 | ||
3983 | # asm 1: movdqa <x3=stack128#1,>diag3=int6464#4 | ||
3984 | # asm 2: movdqa <x3=32(%esp),>diag3=%xmm3 | ||
3985 | movdqa 32(%esp),%xmm3 | ||
3986 | |||
3987 | # qhasm: a0 = diag1 | ||
3988 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
3989 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
3990 | movdqa %xmm1,%xmm4 | ||
3991 | |||
3992 | # qhasm: i = 8 | ||
3993 | # asm 1: mov $8,>i=int32#1 | ||
3994 | # asm 2: mov $8,>i=%eax | ||
3995 | mov $8,%eax | ||
3996 | |||
3997 | # qhasm: mainloop2: | ||
3998 | ._mainloop2: | ||
3999 | |||
4000 | # qhasm: uint32323232 a0 += diag0 | ||
4001 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
4002 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
4003 | paddd %xmm0,%xmm4 | ||
4004 | |||
4005 | # qhasm: a1 = diag0 | ||
4006 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
4007 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
4008 | movdqa %xmm0,%xmm5 | ||
4009 | |||
4010 | # qhasm: b0 = a0 | ||
4011 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
4012 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
4013 | movdqa %xmm4,%xmm6 | ||
4014 | |||
4015 | # qhasm: uint32323232 a0 <<= 7 | ||
4016 | # asm 1: pslld $7,<a0=int6464#5 | ||
4017 | # asm 2: pslld $7,<a0=%xmm4 | ||
4018 | pslld $7,%xmm4 | ||
4019 | |||
4020 | # qhasm: uint32323232 b0 >>= 25 | ||
4021 | # asm 1: psrld $25,<b0=int6464#7 | ||
4022 | # asm 2: psrld $25,<b0=%xmm6 | ||
4023 | psrld $25,%xmm6 | ||
4024 | |||
4025 | # qhasm: diag3 ^= a0 | ||
4026 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
4027 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
4028 | pxor %xmm4,%xmm3 | ||
4029 | |||
4030 | # qhasm: diag3 ^= b0 | ||
4031 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
4032 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
4033 | pxor %xmm6,%xmm3 | ||
4034 | |||
4035 | # qhasm: uint32323232 a1 += diag3 | ||
4036 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
4037 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
4038 | paddd %xmm3,%xmm5 | ||
4039 | |||
4040 | # qhasm: a2 = diag3 | ||
4041 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
4042 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
4043 | movdqa %xmm3,%xmm4 | ||
4044 | |||
4045 | # qhasm: b1 = a1 | ||
4046 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
4047 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
4048 | movdqa %xmm5,%xmm6 | ||
4049 | |||
4050 | # qhasm: uint32323232 a1 <<= 9 | ||
4051 | # asm 1: pslld $9,<a1=int6464#6 | ||
4052 | # asm 2: pslld $9,<a1=%xmm5 | ||
4053 | pslld $9,%xmm5 | ||
4054 | |||
4055 | # qhasm: uint32323232 b1 >>= 23 | ||
4056 | # asm 1: psrld $23,<b1=int6464#7 | ||
4057 | # asm 2: psrld $23,<b1=%xmm6 | ||
4058 | psrld $23,%xmm6 | ||
4059 | |||
4060 | # qhasm: diag2 ^= a1 | ||
4061 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
4062 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
4063 | pxor %xmm5,%xmm2 | ||
4064 | |||
4065 | # qhasm: diag3 <<<= 32 | ||
4066 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
4067 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
4068 | pshufd $0x93,%xmm3,%xmm3 | ||
4069 | |||
4070 | # qhasm: diag2 ^= b1 | ||
4071 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
4072 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
4073 | pxor %xmm6,%xmm2 | ||
4074 | |||
4075 | # qhasm: uint32323232 a2 += diag2 | ||
4076 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
4077 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
4078 | paddd %xmm2,%xmm4 | ||
4079 | |||
4080 | # qhasm: a3 = diag2 | ||
4081 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
4082 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
4083 | movdqa %xmm2,%xmm5 | ||
4084 | |||
4085 | # qhasm: b2 = a2 | ||
4086 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
4087 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
4088 | movdqa %xmm4,%xmm6 | ||
4089 | |||
4090 | # qhasm: uint32323232 a2 <<= 13 | ||
4091 | # asm 1: pslld $13,<a2=int6464#5 | ||
4092 | # asm 2: pslld $13,<a2=%xmm4 | ||
4093 | pslld $13,%xmm4 | ||
4094 | |||
4095 | # qhasm: uint32323232 b2 >>= 19 | ||
4096 | # asm 1: psrld $19,<b2=int6464#7 | ||
4097 | # asm 2: psrld $19,<b2=%xmm6 | ||
4098 | psrld $19,%xmm6 | ||
4099 | |||
4100 | # qhasm: diag1 ^= a2 | ||
4101 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
4102 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
4103 | pxor %xmm4,%xmm1 | ||
4104 | |||
4105 | # qhasm: diag2 <<<= 64 | ||
4106 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4107 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4108 | pshufd $0x4e,%xmm2,%xmm2 | ||
4109 | |||
4110 | # qhasm: diag1 ^= b2 | ||
4111 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
4112 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
4113 | pxor %xmm6,%xmm1 | ||
4114 | |||
4115 | # qhasm: uint32323232 a3 += diag1 | ||
4116 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
4117 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
4118 | paddd %xmm1,%xmm5 | ||
4119 | |||
4120 | # qhasm: a4 = diag3 | ||
4121 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
4122 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
4123 | movdqa %xmm3,%xmm4 | ||
4124 | |||
4125 | # qhasm: b3 = a3 | ||
4126 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
4127 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
4128 | movdqa %xmm5,%xmm6 | ||
4129 | |||
4130 | # qhasm: uint32323232 a3 <<= 18 | ||
4131 | # asm 1: pslld $18,<a3=int6464#6 | ||
4132 | # asm 2: pslld $18,<a3=%xmm5 | ||
4133 | pslld $18,%xmm5 | ||
4134 | |||
4135 | # qhasm: uint32323232 b3 >>= 14 | ||
4136 | # asm 1: psrld $14,<b3=int6464#7 | ||
4137 | # asm 2: psrld $14,<b3=%xmm6 | ||
4138 | psrld $14,%xmm6 | ||
4139 | |||
4140 | # qhasm: diag0 ^= a3 | ||
4141 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
4142 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
4143 | pxor %xmm5,%xmm0 | ||
4144 | |||
4145 | # qhasm: diag1 <<<= 96 | ||
4146 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4147 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4148 | pshufd $0x39,%xmm1,%xmm1 | ||
4149 | |||
4150 | # qhasm: diag0 ^= b3 | ||
4151 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
4152 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
4153 | pxor %xmm6,%xmm0 | ||
4154 | |||
4155 | # qhasm: uint32323232 a4 += diag0 | ||
4156 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
4157 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
4158 | paddd %xmm0,%xmm4 | ||
4159 | |||
4160 | # qhasm: a5 = diag0 | ||
4161 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
4162 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
4163 | movdqa %xmm0,%xmm5 | ||
4164 | |||
4165 | # qhasm: b4 = a4 | ||
4166 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
4167 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
4168 | movdqa %xmm4,%xmm6 | ||
4169 | |||
4170 | # qhasm: uint32323232 a4 <<= 7 | ||
4171 | # asm 1: pslld $7,<a4=int6464#5 | ||
4172 | # asm 2: pslld $7,<a4=%xmm4 | ||
4173 | pslld $7,%xmm4 | ||
4174 | |||
4175 | # qhasm: uint32323232 b4 >>= 25 | ||
4176 | # asm 1: psrld $25,<b4=int6464#7 | ||
4177 | # asm 2: psrld $25,<b4=%xmm6 | ||
4178 | psrld $25,%xmm6 | ||
4179 | |||
4180 | # qhasm: diag1 ^= a4 | ||
4181 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
4182 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
4183 | pxor %xmm4,%xmm1 | ||
4184 | |||
4185 | # qhasm: diag1 ^= b4 | ||
4186 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
4187 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
4188 | pxor %xmm6,%xmm1 | ||
4189 | |||
4190 | # qhasm: uint32323232 a5 += diag1 | ||
4191 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
4192 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
4193 | paddd %xmm1,%xmm5 | ||
4194 | |||
4195 | # qhasm: a6 = diag1 | ||
4196 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
4197 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
4198 | movdqa %xmm1,%xmm4 | ||
4199 | |||
4200 | # qhasm: b5 = a5 | ||
4201 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
4202 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
4203 | movdqa %xmm5,%xmm6 | ||
4204 | |||
4205 | # qhasm: uint32323232 a5 <<= 9 | ||
4206 | # asm 1: pslld $9,<a5=int6464#6 | ||
4207 | # asm 2: pslld $9,<a5=%xmm5 | ||
4208 | pslld $9,%xmm5 | ||
4209 | |||
4210 | # qhasm: uint32323232 b5 >>= 23 | ||
4211 | # asm 1: psrld $23,<b5=int6464#7 | ||
4212 | # asm 2: psrld $23,<b5=%xmm6 | ||
4213 | psrld $23,%xmm6 | ||
4214 | |||
4215 | # qhasm: diag2 ^= a5 | ||
4216 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
4217 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
4218 | pxor %xmm5,%xmm2 | ||
4219 | |||
4220 | # qhasm: diag1 <<<= 32 | ||
4221 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
4222 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
4223 | pshufd $0x93,%xmm1,%xmm1 | ||
4224 | |||
4225 | # qhasm: diag2 ^= b5 | ||
4226 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
4227 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
4228 | pxor %xmm6,%xmm2 | ||
4229 | |||
4230 | # qhasm: uint32323232 a6 += diag2 | ||
4231 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
4232 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
4233 | paddd %xmm2,%xmm4 | ||
4234 | |||
4235 | # qhasm: a7 = diag2 | ||
4236 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
4237 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
4238 | movdqa %xmm2,%xmm5 | ||
4239 | |||
4240 | # qhasm: b6 = a6 | ||
4241 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
4242 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
4243 | movdqa %xmm4,%xmm6 | ||
4244 | |||
4245 | # qhasm: uint32323232 a6 <<= 13 | ||
4246 | # asm 1: pslld $13,<a6=int6464#5 | ||
4247 | # asm 2: pslld $13,<a6=%xmm4 | ||
4248 | pslld $13,%xmm4 | ||
4249 | |||
4250 | # qhasm: uint32323232 b6 >>= 19 | ||
4251 | # asm 1: psrld $19,<b6=int6464#7 | ||
4252 | # asm 2: psrld $19,<b6=%xmm6 | ||
4253 | psrld $19,%xmm6 | ||
4254 | |||
4255 | # qhasm: diag3 ^= a6 | ||
4256 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
4257 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
4258 | pxor %xmm4,%xmm3 | ||
4259 | |||
4260 | # qhasm: diag2 <<<= 64 | ||
4261 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4262 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4263 | pshufd $0x4e,%xmm2,%xmm2 | ||
4264 | |||
4265 | # qhasm: diag3 ^= b6 | ||
4266 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
4267 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
4268 | pxor %xmm6,%xmm3 | ||
4269 | |||
4270 | # qhasm: uint32323232 a7 += diag3 | ||
4271 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
4272 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
4273 | paddd %xmm3,%xmm5 | ||
4274 | |||
4275 | # qhasm: a0 = diag1 | ||
4276 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
4277 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
4278 | movdqa %xmm1,%xmm4 | ||
4279 | |||
4280 | # qhasm: b7 = a7 | ||
4281 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4282 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4283 | movdqa %xmm5,%xmm6 | ||
4284 | |||
4285 | # qhasm: uint32323232 a7 <<= 18 | ||
4286 | # asm 1: pslld $18,<a7=int6464#6 | ||
4287 | # asm 2: pslld $18,<a7=%xmm5 | ||
4288 | pslld $18,%xmm5 | ||
4289 | |||
4290 | # qhasm: uint32323232 b7 >>= 14 | ||
4291 | # asm 1: psrld $14,<b7=int6464#7 | ||
4292 | # asm 2: psrld $14,<b7=%xmm6 | ||
4293 | psrld $14,%xmm6 | ||
4294 | |||
4295 | # qhasm: diag0 ^= a7 | ||
4296 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4297 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4298 | pxor %xmm5,%xmm0 | ||
4299 | |||
4300 | # qhasm: diag3 <<<= 96 | ||
4301 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4302 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4303 | pshufd $0x39,%xmm3,%xmm3 | ||
4304 | |||
4305 | # qhasm: diag0 ^= b7 | ||
4306 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4307 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4308 | pxor %xmm6,%xmm0 | ||
4309 | |||
4310 | # qhasm: uint32323232 a0 += diag0 | ||
4311 | # asm 1: paddd <diag0=int6464#1,<a0=int6464#5 | ||
4312 | # asm 2: paddd <diag0=%xmm0,<a0=%xmm4 | ||
4313 | paddd %xmm0,%xmm4 | ||
4314 | |||
4315 | # qhasm: a1 = diag0 | ||
4316 | # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6 | ||
4317 | # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5 | ||
4318 | movdqa %xmm0,%xmm5 | ||
4319 | |||
4320 | # qhasm: b0 = a0 | ||
4321 | # asm 1: movdqa <a0=int6464#5,>b0=int6464#7 | ||
4322 | # asm 2: movdqa <a0=%xmm4,>b0=%xmm6 | ||
4323 | movdqa %xmm4,%xmm6 | ||
4324 | |||
4325 | # qhasm: uint32323232 a0 <<= 7 | ||
4326 | # asm 1: pslld $7,<a0=int6464#5 | ||
4327 | # asm 2: pslld $7,<a0=%xmm4 | ||
4328 | pslld $7,%xmm4 | ||
4329 | |||
4330 | # qhasm: uint32323232 b0 >>= 25 | ||
4331 | # asm 1: psrld $25,<b0=int6464#7 | ||
4332 | # asm 2: psrld $25,<b0=%xmm6 | ||
4333 | psrld $25,%xmm6 | ||
4334 | |||
4335 | # qhasm: diag3 ^= a0 | ||
4336 | # asm 1: pxor <a0=int6464#5,<diag3=int6464#4 | ||
4337 | # asm 2: pxor <a0=%xmm4,<diag3=%xmm3 | ||
4338 | pxor %xmm4,%xmm3 | ||
4339 | |||
4340 | # qhasm: diag3 ^= b0 | ||
4341 | # asm 1: pxor <b0=int6464#7,<diag3=int6464#4 | ||
4342 | # asm 2: pxor <b0=%xmm6,<diag3=%xmm3 | ||
4343 | pxor %xmm6,%xmm3 | ||
4344 | |||
4345 | # qhasm: uint32323232 a1 += diag3 | ||
4346 | # asm 1: paddd <diag3=int6464#4,<a1=int6464#6 | ||
4347 | # asm 2: paddd <diag3=%xmm3,<a1=%xmm5 | ||
4348 | paddd %xmm3,%xmm5 | ||
4349 | |||
4350 | # qhasm: a2 = diag3 | ||
4351 | # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5 | ||
4352 | # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4 | ||
4353 | movdqa %xmm3,%xmm4 | ||
4354 | |||
4355 | # qhasm: b1 = a1 | ||
4356 | # asm 1: movdqa <a1=int6464#6,>b1=int6464#7 | ||
4357 | # asm 2: movdqa <a1=%xmm5,>b1=%xmm6 | ||
4358 | movdqa %xmm5,%xmm6 | ||
4359 | |||
4360 | # qhasm: uint32323232 a1 <<= 9 | ||
4361 | # asm 1: pslld $9,<a1=int6464#6 | ||
4362 | # asm 2: pslld $9,<a1=%xmm5 | ||
4363 | pslld $9,%xmm5 | ||
4364 | |||
4365 | # qhasm: uint32323232 b1 >>= 23 | ||
4366 | # asm 1: psrld $23,<b1=int6464#7 | ||
4367 | # asm 2: psrld $23,<b1=%xmm6 | ||
4368 | psrld $23,%xmm6 | ||
4369 | |||
4370 | # qhasm: diag2 ^= a1 | ||
4371 | # asm 1: pxor <a1=int6464#6,<diag2=int6464#3 | ||
4372 | # asm 2: pxor <a1=%xmm5,<diag2=%xmm2 | ||
4373 | pxor %xmm5,%xmm2 | ||
4374 | |||
4375 | # qhasm: diag3 <<<= 32 | ||
4376 | # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4 | ||
4377 | # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3 | ||
4378 | pshufd $0x93,%xmm3,%xmm3 | ||
4379 | |||
4380 | # qhasm: diag2 ^= b1 | ||
4381 | # asm 1: pxor <b1=int6464#7,<diag2=int6464#3 | ||
4382 | # asm 2: pxor <b1=%xmm6,<diag2=%xmm2 | ||
4383 | pxor %xmm6,%xmm2 | ||
4384 | |||
4385 | # qhasm: uint32323232 a2 += diag2 | ||
4386 | # asm 1: paddd <diag2=int6464#3,<a2=int6464#5 | ||
4387 | # asm 2: paddd <diag2=%xmm2,<a2=%xmm4 | ||
4388 | paddd %xmm2,%xmm4 | ||
4389 | |||
4390 | # qhasm: a3 = diag2 | ||
4391 | # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6 | ||
4392 | # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5 | ||
4393 | movdqa %xmm2,%xmm5 | ||
4394 | |||
4395 | # qhasm: b2 = a2 | ||
4396 | # asm 1: movdqa <a2=int6464#5,>b2=int6464#7 | ||
4397 | # asm 2: movdqa <a2=%xmm4,>b2=%xmm6 | ||
4398 | movdqa %xmm4,%xmm6 | ||
4399 | |||
4400 | # qhasm: uint32323232 a2 <<= 13 | ||
4401 | # asm 1: pslld $13,<a2=int6464#5 | ||
4402 | # asm 2: pslld $13,<a2=%xmm4 | ||
4403 | pslld $13,%xmm4 | ||
4404 | |||
4405 | # qhasm: uint32323232 b2 >>= 19 | ||
4406 | # asm 1: psrld $19,<b2=int6464#7 | ||
4407 | # asm 2: psrld $19,<b2=%xmm6 | ||
4408 | psrld $19,%xmm6 | ||
4409 | |||
4410 | # qhasm: diag1 ^= a2 | ||
4411 | # asm 1: pxor <a2=int6464#5,<diag1=int6464#2 | ||
4412 | # asm 2: pxor <a2=%xmm4,<diag1=%xmm1 | ||
4413 | pxor %xmm4,%xmm1 | ||
4414 | |||
4415 | # qhasm: diag2 <<<= 64 | ||
4416 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4417 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4418 | pshufd $0x4e,%xmm2,%xmm2 | ||
4419 | |||
4420 | # qhasm: diag1 ^= b2 | ||
4421 | # asm 1: pxor <b2=int6464#7,<diag1=int6464#2 | ||
4422 | # asm 2: pxor <b2=%xmm6,<diag1=%xmm1 | ||
4423 | pxor %xmm6,%xmm1 | ||
4424 | |||
4425 | # qhasm: uint32323232 a3 += diag1 | ||
4426 | # asm 1: paddd <diag1=int6464#2,<a3=int6464#6 | ||
4427 | # asm 2: paddd <diag1=%xmm1,<a3=%xmm5 | ||
4428 | paddd %xmm1,%xmm5 | ||
4429 | |||
4430 | # qhasm: a4 = diag3 | ||
4431 | # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5 | ||
4432 | # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4 | ||
4433 | movdqa %xmm3,%xmm4 | ||
4434 | |||
4435 | # qhasm: b3 = a3 | ||
4436 | # asm 1: movdqa <a3=int6464#6,>b3=int6464#7 | ||
4437 | # asm 2: movdqa <a3=%xmm5,>b3=%xmm6 | ||
4438 | movdqa %xmm5,%xmm6 | ||
4439 | |||
4440 | # qhasm: uint32323232 a3 <<= 18 | ||
4441 | # asm 1: pslld $18,<a3=int6464#6 | ||
4442 | # asm 2: pslld $18,<a3=%xmm5 | ||
4443 | pslld $18,%xmm5 | ||
4444 | |||
4445 | # qhasm: uint32323232 b3 >>= 14 | ||
4446 | # asm 1: psrld $14,<b3=int6464#7 | ||
4447 | # asm 2: psrld $14,<b3=%xmm6 | ||
4448 | psrld $14,%xmm6 | ||
4449 | |||
4450 | # qhasm: diag0 ^= a3 | ||
4451 | # asm 1: pxor <a3=int6464#6,<diag0=int6464#1 | ||
4452 | # asm 2: pxor <a3=%xmm5,<diag0=%xmm0 | ||
4453 | pxor %xmm5,%xmm0 | ||
4454 | |||
4455 | # qhasm: diag1 <<<= 96 | ||
4456 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4457 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4458 | pshufd $0x39,%xmm1,%xmm1 | ||
4459 | |||
4460 | # qhasm: diag0 ^= b3 | ||
4461 | # asm 1: pxor <b3=int6464#7,<diag0=int6464#1 | ||
4462 | # asm 2: pxor <b3=%xmm6,<diag0=%xmm0 | ||
4463 | pxor %xmm6,%xmm0 | ||
4464 | |||
4465 | # qhasm: uint32323232 a4 += diag0 | ||
4466 | # asm 1: paddd <diag0=int6464#1,<a4=int6464#5 | ||
4467 | # asm 2: paddd <diag0=%xmm0,<a4=%xmm4 | ||
4468 | paddd %xmm0,%xmm4 | ||
4469 | |||
4470 | # qhasm: a5 = diag0 | ||
4471 | # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6 | ||
4472 | # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5 | ||
4473 | movdqa %xmm0,%xmm5 | ||
4474 | |||
4475 | # qhasm: b4 = a4 | ||
4476 | # asm 1: movdqa <a4=int6464#5,>b4=int6464#7 | ||
4477 | # asm 2: movdqa <a4=%xmm4,>b4=%xmm6 | ||
4478 | movdqa %xmm4,%xmm6 | ||
4479 | |||
4480 | # qhasm: uint32323232 a4 <<= 7 | ||
4481 | # asm 1: pslld $7,<a4=int6464#5 | ||
4482 | # asm 2: pslld $7,<a4=%xmm4 | ||
4483 | pslld $7,%xmm4 | ||
4484 | |||
4485 | # qhasm: uint32323232 b4 >>= 25 | ||
4486 | # asm 1: psrld $25,<b4=int6464#7 | ||
4487 | # asm 2: psrld $25,<b4=%xmm6 | ||
4488 | psrld $25,%xmm6 | ||
4489 | |||
4490 | # qhasm: diag1 ^= a4 | ||
4491 | # asm 1: pxor <a4=int6464#5,<diag1=int6464#2 | ||
4492 | # asm 2: pxor <a4=%xmm4,<diag1=%xmm1 | ||
4493 | pxor %xmm4,%xmm1 | ||
4494 | |||
4495 | # qhasm: diag1 ^= b4 | ||
4496 | # asm 1: pxor <b4=int6464#7,<diag1=int6464#2 | ||
4497 | # asm 2: pxor <b4=%xmm6,<diag1=%xmm1 | ||
4498 | pxor %xmm6,%xmm1 | ||
4499 | |||
4500 | # qhasm: uint32323232 a5 += diag1 | ||
4501 | # asm 1: paddd <diag1=int6464#2,<a5=int6464#6 | ||
4502 | # asm 2: paddd <diag1=%xmm1,<a5=%xmm5 | ||
4503 | paddd %xmm1,%xmm5 | ||
4504 | |||
4505 | # qhasm: a6 = diag1 | ||
4506 | # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5 | ||
4507 | # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4 | ||
4508 | movdqa %xmm1,%xmm4 | ||
4509 | |||
4510 | # qhasm: b5 = a5 | ||
4511 | # asm 1: movdqa <a5=int6464#6,>b5=int6464#7 | ||
4512 | # asm 2: movdqa <a5=%xmm5,>b5=%xmm6 | ||
4513 | movdqa %xmm5,%xmm6 | ||
4514 | |||
4515 | # qhasm: uint32323232 a5 <<= 9 | ||
4516 | # asm 1: pslld $9,<a5=int6464#6 | ||
4517 | # asm 2: pslld $9,<a5=%xmm5 | ||
4518 | pslld $9,%xmm5 | ||
4519 | |||
4520 | # qhasm: uint32323232 b5 >>= 23 | ||
4521 | # asm 1: psrld $23,<b5=int6464#7 | ||
4522 | # asm 2: psrld $23,<b5=%xmm6 | ||
4523 | psrld $23,%xmm6 | ||
4524 | |||
4525 | # qhasm: diag2 ^= a5 | ||
4526 | # asm 1: pxor <a5=int6464#6,<diag2=int6464#3 | ||
4527 | # asm 2: pxor <a5=%xmm5,<diag2=%xmm2 | ||
4528 | pxor %xmm5,%xmm2 | ||
4529 | |||
4530 | # qhasm: diag1 <<<= 32 | ||
4531 | # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2 | ||
4532 | # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1 | ||
4533 | pshufd $0x93,%xmm1,%xmm1 | ||
4534 | |||
4535 | # qhasm: diag2 ^= b5 | ||
4536 | # asm 1: pxor <b5=int6464#7,<diag2=int6464#3 | ||
4537 | # asm 2: pxor <b5=%xmm6,<diag2=%xmm2 | ||
4538 | pxor %xmm6,%xmm2 | ||
4539 | |||
4540 | # qhasm: uint32323232 a6 += diag2 | ||
4541 | # asm 1: paddd <diag2=int6464#3,<a6=int6464#5 | ||
4542 | # asm 2: paddd <diag2=%xmm2,<a6=%xmm4 | ||
4543 | paddd %xmm2,%xmm4 | ||
4544 | |||
4545 | # qhasm: a7 = diag2 | ||
4546 | # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6 | ||
4547 | # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5 | ||
4548 | movdqa %xmm2,%xmm5 | ||
4549 | |||
4550 | # qhasm: b6 = a6 | ||
4551 | # asm 1: movdqa <a6=int6464#5,>b6=int6464#7 | ||
4552 | # asm 2: movdqa <a6=%xmm4,>b6=%xmm6 | ||
4553 | movdqa %xmm4,%xmm6 | ||
4554 | |||
4555 | # qhasm: uint32323232 a6 <<= 13 | ||
4556 | # asm 1: pslld $13,<a6=int6464#5 | ||
4557 | # asm 2: pslld $13,<a6=%xmm4 | ||
4558 | pslld $13,%xmm4 | ||
4559 | |||
4560 | # qhasm: uint32323232 b6 >>= 19 | ||
4561 | # asm 1: psrld $19,<b6=int6464#7 | ||
4562 | # asm 2: psrld $19,<b6=%xmm6 | ||
4563 | psrld $19,%xmm6 | ||
4564 | |||
4565 | # qhasm: diag3 ^= a6 | ||
4566 | # asm 1: pxor <a6=int6464#5,<diag3=int6464#4 | ||
4567 | # asm 2: pxor <a6=%xmm4,<diag3=%xmm3 | ||
4568 | pxor %xmm4,%xmm3 | ||
4569 | |||
4570 | # qhasm: diag2 <<<= 64 | ||
4571 | # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3 | ||
4572 | # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2 | ||
4573 | pshufd $0x4e,%xmm2,%xmm2 | ||
4574 | |||
4575 | # qhasm: diag3 ^= b6 | ||
4576 | # asm 1: pxor <b6=int6464#7,<diag3=int6464#4 | ||
4577 | # asm 2: pxor <b6=%xmm6,<diag3=%xmm3 | ||
4578 | pxor %xmm6,%xmm3 | ||
4579 | |||
4580 | # qhasm: unsigned>? i -= 4 | ||
4581 | # asm 1: sub $4,<i=int32#1 | ||
4582 | # asm 2: sub $4,<i=%eax | ||
4583 | sub $4,%eax | ||
4584 | |||
4585 | # qhasm: uint32323232 a7 += diag3 | ||
4586 | # asm 1: paddd <diag3=int6464#4,<a7=int6464#6 | ||
4587 | # asm 2: paddd <diag3=%xmm3,<a7=%xmm5 | ||
4588 | paddd %xmm3,%xmm5 | ||
4589 | |||
4590 | # qhasm: a0 = diag1 | ||
4591 | # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5 | ||
4592 | # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4 | ||
4593 | movdqa %xmm1,%xmm4 | ||
4594 | |||
4595 | # qhasm: b7 = a7 | ||
4596 | # asm 1: movdqa <a7=int6464#6,>b7=int6464#7 | ||
4597 | # asm 2: movdqa <a7=%xmm5,>b7=%xmm6 | ||
4598 | movdqa %xmm5,%xmm6 | ||
4599 | |||
4600 | # qhasm: uint32323232 a7 <<= 18 | ||
4601 | # asm 1: pslld $18,<a7=int6464#6 | ||
4602 | # asm 2: pslld $18,<a7=%xmm5 | ||
4603 | pslld $18,%xmm5 | ||
4604 | |||
4605 | # qhasm: b0 = 0 | ||
4606 | # asm 1: pxor >b0=int6464#8,>b0=int6464#8 | ||
4607 | # asm 2: pxor >b0=%xmm7,>b0=%xmm7 | ||
4608 | pxor %xmm7,%xmm7 | ||
4609 | |||
4610 | # qhasm: uint32323232 b7 >>= 14 | ||
4611 | # asm 1: psrld $14,<b7=int6464#7 | ||
4612 | # asm 2: psrld $14,<b7=%xmm6 | ||
4613 | psrld $14,%xmm6 | ||
4614 | |||
4615 | # qhasm: diag0 ^= a7 | ||
4616 | # asm 1: pxor <a7=int6464#6,<diag0=int6464#1 | ||
4617 | # asm 2: pxor <a7=%xmm5,<diag0=%xmm0 | ||
4618 | pxor %xmm5,%xmm0 | ||
4619 | |||
4620 | # qhasm: diag3 <<<= 96 | ||
4621 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4622 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4623 | pshufd $0x39,%xmm3,%xmm3 | ||
4624 | |||
4625 | # qhasm: diag0 ^= b7 | ||
4626 | # asm 1: pxor <b7=int6464#7,<diag0=int6464#1 | ||
4627 | # asm 2: pxor <b7=%xmm6,<diag0=%xmm0 | ||
4628 | pxor %xmm6,%xmm0 | ||
4629 | # comment:fp stack unchanged by jump | ||
4630 | |||
4631 | # qhasm: goto mainloop2 if unsigned> | ||
4632 | ja ._mainloop2 | ||
4633 | |||
4634 | # qhasm: uint32323232 diag0 += x0 | ||
4635 | # asm 1: paddd <x0=stack128#3,<diag0=int6464#1 | ||
4636 | # asm 2: paddd <x0=64(%esp),<diag0=%xmm0 | ||
4637 | paddd 64(%esp),%xmm0 | ||
4638 | |||
4639 | # qhasm: uint32323232 diag1 += x1 | ||
4640 | # asm 1: paddd <x1=stack128#2,<diag1=int6464#2 | ||
4641 | # asm 2: paddd <x1=48(%esp),<diag1=%xmm1 | ||
4642 | paddd 48(%esp),%xmm1 | ||
4643 | |||
4644 | # qhasm: uint32323232 diag2 += x2 | ||
4645 | # asm 1: paddd <x2=stack128#4,<diag2=int6464#3 | ||
4646 | # asm 2: paddd <x2=80(%esp),<diag2=%xmm2 | ||
4647 | paddd 80(%esp),%xmm2 | ||
4648 | |||
4649 | # qhasm: uint32323232 diag3 += x3 | ||
4650 | # asm 1: paddd <x3=stack128#1,<diag3=int6464#4 | ||
4651 | # asm 2: paddd <x3=32(%esp),<diag3=%xmm3 | ||
4652 | paddd 32(%esp),%xmm3 | ||
4653 | |||
4654 | # qhasm: in0 = diag0 | ||
4655 | # asm 1: movd <diag0=int6464#1,>in0=int32#1 | ||
4656 | # asm 2: movd <diag0=%xmm0,>in0=%eax | ||
4657 | movd %xmm0,%eax | ||
4658 | |||
4659 | # qhasm: in12 = diag1 | ||
4660 | # asm 1: movd <diag1=int6464#2,>in12=int32#2 | ||
4661 | # asm 2: movd <diag1=%xmm1,>in12=%ecx | ||
4662 | movd %xmm1,%ecx | ||
4663 | |||
4664 | # qhasm: in8 = diag2 | ||
4665 | # asm 1: movd <diag2=int6464#3,>in8=int32#3 | ||
4666 | # asm 2: movd <diag2=%xmm2,>in8=%edx | ||
4667 | movd %xmm2,%edx | ||
4668 | |||
4669 | # qhasm: in4 = diag3 | ||
4670 | # asm 1: movd <diag3=int6464#4,>in4=int32#4 | ||
4671 | # asm 2: movd <diag3=%xmm3,>in4=%ebx | ||
4672 | movd %xmm3,%ebx | ||
4673 | |||
4674 | # qhasm: diag0 <<<= 96 | ||
4675 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4676 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4677 | pshufd $0x39,%xmm0,%xmm0 | ||
4678 | |||
4679 | # qhasm: diag1 <<<= 96 | ||
4680 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4681 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4682 | pshufd $0x39,%xmm1,%xmm1 | ||
4683 | |||
4684 | # qhasm: diag2 <<<= 96 | ||
4685 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4686 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4687 | pshufd $0x39,%xmm2,%xmm2 | ||
4688 | |||
4689 | # qhasm: diag3 <<<= 96 | ||
4690 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4691 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4692 | pshufd $0x39,%xmm3,%xmm3 | ||
4693 | |||
4694 | # qhasm: in0 ^= *(uint32 *) (m + 0) | ||
4695 | # asm 1: xorl 0(<m=int32#5),<in0=int32#1 | ||
4696 | # asm 2: xorl 0(<m=%esi),<in0=%eax | ||
4697 | xorl 0(%esi),%eax | ||
4698 | |||
4699 | # qhasm: in12 ^= *(uint32 *) (m + 48) | ||
4700 | # asm 1: xorl 48(<m=int32#5),<in12=int32#2 | ||
4701 | # asm 2: xorl 48(<m=%esi),<in12=%ecx | ||
4702 | xorl 48(%esi),%ecx | ||
4703 | |||
4704 | # qhasm: in8 ^= *(uint32 *) (m + 32) | ||
4705 | # asm 1: xorl 32(<m=int32#5),<in8=int32#3 | ||
4706 | # asm 2: xorl 32(<m=%esi),<in8=%edx | ||
4707 | xorl 32(%esi),%edx | ||
4708 | |||
4709 | # qhasm: in4 ^= *(uint32 *) (m + 16) | ||
4710 | # asm 1: xorl 16(<m=int32#5),<in4=int32#4 | ||
4711 | # asm 2: xorl 16(<m=%esi),<in4=%ebx | ||
4712 | xorl 16(%esi),%ebx | ||
4713 | |||
4714 | # qhasm: *(uint32 *) (out + 0) = in0 | ||
4715 | # asm 1: movl <in0=int32#1,0(<out=int32#6) | ||
4716 | # asm 2: movl <in0=%eax,0(<out=%edi) | ||
4717 | movl %eax,0(%edi) | ||
4718 | |||
4719 | # qhasm: *(uint32 *) (out + 48) = in12 | ||
4720 | # asm 1: movl <in12=int32#2,48(<out=int32#6) | ||
4721 | # asm 2: movl <in12=%ecx,48(<out=%edi) | ||
4722 | movl %ecx,48(%edi) | ||
4723 | |||
4724 | # qhasm: *(uint32 *) (out + 32) = in8 | ||
4725 | # asm 1: movl <in8=int32#3,32(<out=int32#6) | ||
4726 | # asm 2: movl <in8=%edx,32(<out=%edi) | ||
4727 | movl %edx,32(%edi) | ||
4728 | |||
4729 | # qhasm: *(uint32 *) (out + 16) = in4 | ||
4730 | # asm 1: movl <in4=int32#4,16(<out=int32#6) | ||
4731 | # asm 2: movl <in4=%ebx,16(<out=%edi) | ||
4732 | movl %ebx,16(%edi) | ||
4733 | |||
4734 | # qhasm: in5 = diag0 | ||
4735 | # asm 1: movd <diag0=int6464#1,>in5=int32#1 | ||
4736 | # asm 2: movd <diag0=%xmm0,>in5=%eax | ||
4737 | movd %xmm0,%eax | ||
4738 | |||
4739 | # qhasm: in1 = diag1 | ||
4740 | # asm 1: movd <diag1=int6464#2,>in1=int32#2 | ||
4741 | # asm 2: movd <diag1=%xmm1,>in1=%ecx | ||
4742 | movd %xmm1,%ecx | ||
4743 | |||
4744 | # qhasm: in13 = diag2 | ||
4745 | # asm 1: movd <diag2=int6464#3,>in13=int32#3 | ||
4746 | # asm 2: movd <diag2=%xmm2,>in13=%edx | ||
4747 | movd %xmm2,%edx | ||
4748 | |||
4749 | # qhasm: in9 = diag3 | ||
4750 | # asm 1: movd <diag3=int6464#4,>in9=int32#4 | ||
4751 | # asm 2: movd <diag3=%xmm3,>in9=%ebx | ||
4752 | movd %xmm3,%ebx | ||
4753 | |||
4754 | # qhasm: diag0 <<<= 96 | ||
4755 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4756 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4757 | pshufd $0x39,%xmm0,%xmm0 | ||
4758 | |||
4759 | # qhasm: diag1 <<<= 96 | ||
4760 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4761 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4762 | pshufd $0x39,%xmm1,%xmm1 | ||
4763 | |||
4764 | # qhasm: diag2 <<<= 96 | ||
4765 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4766 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4767 | pshufd $0x39,%xmm2,%xmm2 | ||
4768 | |||
4769 | # qhasm: diag3 <<<= 96 | ||
4770 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4771 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4772 | pshufd $0x39,%xmm3,%xmm3 | ||
4773 | |||
4774 | # qhasm: in5 ^= *(uint32 *) (m + 20) | ||
4775 | # asm 1: xorl 20(<m=int32#5),<in5=int32#1 | ||
4776 | # asm 2: xorl 20(<m=%esi),<in5=%eax | ||
4777 | xorl 20(%esi),%eax | ||
4778 | |||
4779 | # qhasm: in1 ^= *(uint32 *) (m + 4) | ||
4780 | # asm 1: xorl 4(<m=int32#5),<in1=int32#2 | ||
4781 | # asm 2: xorl 4(<m=%esi),<in1=%ecx | ||
4782 | xorl 4(%esi),%ecx | ||
4783 | |||
4784 | # qhasm: in13 ^= *(uint32 *) (m + 52) | ||
4785 | # asm 1: xorl 52(<m=int32#5),<in13=int32#3 | ||
4786 | # asm 2: xorl 52(<m=%esi),<in13=%edx | ||
4787 | xorl 52(%esi),%edx | ||
4788 | |||
4789 | # qhasm: in9 ^= *(uint32 *) (m + 36) | ||
4790 | # asm 1: xorl 36(<m=int32#5),<in9=int32#4 | ||
4791 | # asm 2: xorl 36(<m=%esi),<in9=%ebx | ||
4792 | xorl 36(%esi),%ebx | ||
4793 | |||
4794 | # qhasm: *(uint32 *) (out + 20) = in5 | ||
4795 | # asm 1: movl <in5=int32#1,20(<out=int32#6) | ||
4796 | # asm 2: movl <in5=%eax,20(<out=%edi) | ||
4797 | movl %eax,20(%edi) | ||
4798 | |||
4799 | # qhasm: *(uint32 *) (out + 4) = in1 | ||
4800 | # asm 1: movl <in1=int32#2,4(<out=int32#6) | ||
4801 | # asm 2: movl <in1=%ecx,4(<out=%edi) | ||
4802 | movl %ecx,4(%edi) | ||
4803 | |||
4804 | # qhasm: *(uint32 *) (out + 52) = in13 | ||
4805 | # asm 1: movl <in13=int32#3,52(<out=int32#6) | ||
4806 | # asm 2: movl <in13=%edx,52(<out=%edi) | ||
4807 | movl %edx,52(%edi) | ||
4808 | |||
4809 | # qhasm: *(uint32 *) (out + 36) = in9 | ||
4810 | # asm 1: movl <in9=int32#4,36(<out=int32#6) | ||
4811 | # asm 2: movl <in9=%ebx,36(<out=%edi) | ||
4812 | movl %ebx,36(%edi) | ||
4813 | |||
4814 | # qhasm: in10 = diag0 | ||
4815 | # asm 1: movd <diag0=int6464#1,>in10=int32#1 | ||
4816 | # asm 2: movd <diag0=%xmm0,>in10=%eax | ||
4817 | movd %xmm0,%eax | ||
4818 | |||
4819 | # qhasm: in6 = diag1 | ||
4820 | # asm 1: movd <diag1=int6464#2,>in6=int32#2 | ||
4821 | # asm 2: movd <diag1=%xmm1,>in6=%ecx | ||
4822 | movd %xmm1,%ecx | ||
4823 | |||
4824 | # qhasm: in2 = diag2 | ||
4825 | # asm 1: movd <diag2=int6464#3,>in2=int32#3 | ||
4826 | # asm 2: movd <diag2=%xmm2,>in2=%edx | ||
4827 | movd %xmm2,%edx | ||
4828 | |||
4829 | # qhasm: in14 = diag3 | ||
4830 | # asm 1: movd <diag3=int6464#4,>in14=int32#4 | ||
4831 | # asm 2: movd <diag3=%xmm3,>in14=%ebx | ||
4832 | movd %xmm3,%ebx | ||
4833 | |||
4834 | # qhasm: diag0 <<<= 96 | ||
4835 | # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1 | ||
4836 | # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0 | ||
4837 | pshufd $0x39,%xmm0,%xmm0 | ||
4838 | |||
4839 | # qhasm: diag1 <<<= 96 | ||
4840 | # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2 | ||
4841 | # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1 | ||
4842 | pshufd $0x39,%xmm1,%xmm1 | ||
4843 | |||
4844 | # qhasm: diag2 <<<= 96 | ||
4845 | # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3 | ||
4846 | # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2 | ||
4847 | pshufd $0x39,%xmm2,%xmm2 | ||
4848 | |||
4849 | # qhasm: diag3 <<<= 96 | ||
4850 | # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4 | ||
4851 | # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3 | ||
4852 | pshufd $0x39,%xmm3,%xmm3 | ||
4853 | |||
4854 | # qhasm: in10 ^= *(uint32 *) (m + 40) | ||
4855 | # asm 1: xorl 40(<m=int32#5),<in10=int32#1 | ||
4856 | # asm 2: xorl 40(<m=%esi),<in10=%eax | ||
4857 | xorl 40(%esi),%eax | ||
4858 | |||
4859 | # qhasm: in6 ^= *(uint32 *) (m + 24) | ||
4860 | # asm 1: xorl 24(<m=int32#5),<in6=int32#2 | ||
4861 | # asm 2: xorl 24(<m=%esi),<in6=%ecx | ||
4862 | xorl 24(%esi),%ecx | ||
4863 | |||
4864 | # qhasm: in2 ^= *(uint32 *) (m + 8) | ||
4865 | # asm 1: xorl 8(<m=int32#5),<in2=int32#3 | ||
4866 | # asm 2: xorl 8(<m=%esi),<in2=%edx | ||
4867 | xorl 8(%esi),%edx | ||
4868 | |||
4869 | # qhasm: in14 ^= *(uint32 *) (m + 56) | ||
4870 | # asm 1: xorl 56(<m=int32#5),<in14=int32#4 | ||
4871 | # asm 2: xorl 56(<m=%esi),<in14=%ebx | ||
4872 | xorl 56(%esi),%ebx | ||
4873 | |||
4874 | # qhasm: *(uint32 *) (out + 40) = in10 | ||
4875 | # asm 1: movl <in10=int32#1,40(<out=int32#6) | ||
4876 | # asm 2: movl <in10=%eax,40(<out=%edi) | ||
4877 | movl %eax,40(%edi) | ||
4878 | |||
4879 | # qhasm: *(uint32 *) (out + 24) = in6 | ||
4880 | # asm 1: movl <in6=int32#2,24(<out=int32#6) | ||
4881 | # asm 2: movl <in6=%ecx,24(<out=%edi) | ||
4882 | movl %ecx,24(%edi) | ||
4883 | |||
4884 | # qhasm: *(uint32 *) (out + 8) = in2 | ||
4885 | # asm 1: movl <in2=int32#3,8(<out=int32#6) | ||
4886 | # asm 2: movl <in2=%edx,8(<out=%edi) | ||
4887 | movl %edx,8(%edi) | ||
4888 | |||
4889 | # qhasm: *(uint32 *) (out + 56) = in14 | ||
4890 | # asm 1: movl <in14=int32#4,56(<out=int32#6) | ||
4891 | # asm 2: movl <in14=%ebx,56(<out=%edi) | ||
4892 | movl %ebx,56(%edi) | ||
4893 | |||
4894 | # qhasm: in15 = diag0 | ||
4895 | # asm 1: movd <diag0=int6464#1,>in15=int32#1 | ||
4896 | # asm 2: movd <diag0=%xmm0,>in15=%eax | ||
4897 | movd %xmm0,%eax | ||
4898 | |||
4899 | # qhasm: in11 = diag1 | ||
4900 | # asm 1: movd <diag1=int6464#2,>in11=int32#2 | ||
4901 | # asm 2: movd <diag1=%xmm1,>in11=%ecx | ||
4902 | movd %xmm1,%ecx | ||
4903 | |||
4904 | # qhasm: in7 = diag2 | ||
4905 | # asm 1: movd <diag2=int6464#3,>in7=int32#3 | ||
4906 | # asm 2: movd <diag2=%xmm2,>in7=%edx | ||
4907 | movd %xmm2,%edx | ||
4908 | |||
4909 | # qhasm: in3 = diag3 | ||
4910 | # asm 1: movd <diag3=int6464#4,>in3=int32#4 | ||
4911 | # asm 2: movd <diag3=%xmm3,>in3=%ebx | ||
4912 | movd %xmm3,%ebx | ||
4913 | |||
4914 | # qhasm: in15 ^= *(uint32 *) (m + 60) | ||
4915 | # asm 1: xorl 60(<m=int32#5),<in15=int32#1 | ||
4916 | # asm 2: xorl 60(<m=%esi),<in15=%eax | ||
4917 | xorl 60(%esi),%eax | ||
4918 | |||
4919 | # qhasm: in11 ^= *(uint32 *) (m + 44) | ||
4920 | # asm 1: xorl 44(<m=int32#5),<in11=int32#2 | ||
4921 | # asm 2: xorl 44(<m=%esi),<in11=%ecx | ||
4922 | xorl 44(%esi),%ecx | ||
4923 | |||
4924 | # qhasm: in7 ^= *(uint32 *) (m + 28) | ||
4925 | # asm 1: xorl 28(<m=int32#5),<in7=int32#3 | ||
4926 | # asm 2: xorl 28(<m=%esi),<in7=%edx | ||
4927 | xorl 28(%esi),%edx | ||
4928 | |||
4929 | # qhasm: in3 ^= *(uint32 *) (m + 12) | ||
4930 | # asm 1: xorl 12(<m=int32#5),<in3=int32#4 | ||
4931 | # asm 2: xorl 12(<m=%esi),<in3=%ebx | ||
4932 | xorl 12(%esi),%ebx | ||
4933 | |||
4934 | # qhasm: *(uint32 *) (out + 60) = in15 | ||
4935 | # asm 1: movl <in15=int32#1,60(<out=int32#6) | ||
4936 | # asm 2: movl <in15=%eax,60(<out=%edi) | ||
4937 | movl %eax,60(%edi) | ||
4938 | |||
4939 | # qhasm: *(uint32 *) (out + 44) = in11 | ||
4940 | # asm 1: movl <in11=int32#2,44(<out=int32#6) | ||
4941 | # asm 2: movl <in11=%ecx,44(<out=%edi) | ||
4942 | movl %ecx,44(%edi) | ||
4943 | |||
4944 | # qhasm: *(uint32 *) (out + 28) = in7 | ||
4945 | # asm 1: movl <in7=int32#3,28(<out=int32#6) | ||
4946 | # asm 2: movl <in7=%edx,28(<out=%edi) | ||
4947 | movl %edx,28(%edi) | ||
4948 | |||
4949 | # qhasm: *(uint32 *) (out + 12) = in3 | ||
4950 | # asm 1: movl <in3=int32#4,12(<out=int32#6) | ||
4951 | # asm 2: movl <in3=%ebx,12(<out=%edi) | ||
4952 | movl %ebx,12(%edi) | ||
4953 | |||
4954 | # qhasm: bytes = bytes_stack | ||
4955 | # asm 1: movl <bytes_stack=stack32#7,>bytes=int32#1 | ||
4956 | # asm 2: movl <bytes_stack=24(%esp),>bytes=%eax | ||
4957 | movl 24(%esp),%eax | ||
4958 | |||
4959 | # qhasm: in8 = ((uint32 *)&x2)[0] | ||
4960 | # asm 1: movl <x2=stack128#4,>in8=int32#2 | ||
4961 | # asm 2: movl <x2=80(%esp),>in8=%ecx | ||
4962 | movl 80(%esp),%ecx | ||
4963 | |||
4964 | # qhasm: in9 = ((uint32 *)&x3)[1] | ||
4965 | # asm 1: movl 4+<x3=stack128#1,>in9=int32#3 | ||
4966 | # asm 2: movl 4+<x3=32(%esp),>in9=%edx | ||
4967 | movl 4+32(%esp),%edx | ||
4968 | |||
4969 | # qhasm: carry? in8 += 1 | ||
4970 | # asm 1: add $1,<in8=int32#2 | ||
4971 | # asm 2: add $1,<in8=%ecx | ||
4972 | add $1,%ecx | ||
4973 | |||
4974 | # qhasm: in9 += 0 + carry | ||
4975 | # asm 1: adc $0,<in9=int32#3 | ||
4976 | # asm 2: adc $0,<in9=%edx | ||
4977 | adc $0,%edx | ||
4978 | |||
4979 | # qhasm: ((uint32 *)&x2)[0] = in8 | ||
4980 | # asm 1: movl <in8=int32#2,>x2=stack128#4 | ||
4981 | # asm 2: movl <in8=%ecx,>x2=80(%esp) | ||
4982 | movl %ecx,80(%esp) | ||
4983 | |||
4984 | # qhasm: ((uint32 *)&x3)[1] = in9 | ||
4985 | # asm 1: movl <in9=int32#3,4+<x3=stack128#1 | ||
4986 | # asm 2: movl <in9=%edx,4+<x3=32(%esp) | ||
4987 | movl %edx,4+32(%esp) | ||
4988 | |||
4989 | # qhasm: unsigned>? unsigned<? bytes - 64 | ||
4990 | # asm 1: cmp $64,<bytes=int32#1 | ||
4991 | # asm 2: cmp $64,<bytes=%eax | ||
4992 | cmp $64,%eax | ||
4993 | # comment:fp stack unchanged by jump | ||
4994 | |||
4995 | # qhasm: goto bytesatleast65 if unsigned> | ||
4996 | ja ._bytesatleast65 | ||
4997 | # comment:fp stack unchanged by jump | ||
4998 | |||
4999 | # qhasm: goto bytesatleast64 if !unsigned< | ||
5000 | jae ._bytesatleast64 | ||
5001 | |||
5002 | # qhasm: m = out | ||
5003 | # asm 1: mov <out=int32#6,>m=int32#5 | ||
5004 | # asm 2: mov <out=%edi,>m=%esi | ||
5005 | mov %edi,%esi | ||
5006 | |||
5007 | # qhasm: out = ctarget | ||
5008 | # asm 1: movl <ctarget=stack32#6,>out=int32#6 | ||
5009 | # asm 2: movl <ctarget=20(%esp),>out=%edi | ||
5010 | movl 20(%esp),%edi | ||
5011 | |||
5012 | # qhasm: i = bytes | ||
5013 | # asm 1: mov <bytes=int32#1,>i=int32#2 | ||
5014 | # asm 2: mov <bytes=%eax,>i=%ecx | ||
5015 | mov %eax,%ecx | ||
5016 | |||
5017 | # qhasm: while (i) { *out++ = *m++; --i } | ||
5018 | rep movsb | ||
5019 | # comment:fp stack unchanged by fallthrough | ||
5020 | |||
5021 | # qhasm: bytesatleast64: | ||
5022 | ._bytesatleast64: | ||
5023 | # comment:fp stack unchanged by fallthrough | ||
5024 | |||
5025 | # qhasm: done: | ||
5026 | ._done: | ||
5027 | |||
5028 | # qhasm: eax = eax_stack | ||
5029 | # asm 1: movl <eax_stack=stack32#1,>eax=int32#1 | ||
5030 | # asm 2: movl <eax_stack=0(%esp),>eax=%eax | ||
5031 | movl 0(%esp),%eax | ||
5032 | |||
5033 | # qhasm: ebx = ebx_stack | ||
5034 | # asm 1: movl <ebx_stack=stack32#2,>ebx=int32#4 | ||
5035 | # asm 2: movl <ebx_stack=4(%esp),>ebx=%ebx | ||
5036 | movl 4(%esp),%ebx | ||
5037 | |||
5038 | # qhasm: esi = esi_stack | ||
5039 | # asm 1: movl <esi_stack=stack32#3,>esi=int32#5 | ||
5040 | # asm 2: movl <esi_stack=8(%esp),>esi=%esi | ||
5041 | movl 8(%esp),%esi | ||
5042 | |||
5043 | # qhasm: edi = edi_stack | ||
5044 | # asm 1: movl <edi_stack=stack32#4,>edi=int32#6 | ||
5045 | # asm 2: movl <edi_stack=12(%esp),>edi=%edi | ||
5046 | movl 12(%esp),%edi | ||
5047 | |||
5048 | # qhasm: ebp = ebp_stack | ||
5049 | # asm 1: movl <ebp_stack=stack32#5,>ebp=int32#7 | ||
5050 | # asm 2: movl <ebp_stack=16(%esp),>ebp=%ebp | ||
5051 | movl 16(%esp),%ebp | ||
5052 | |||
5053 | # qhasm: leave | ||
5054 | add %eax,%esp | ||
5055 | xor %eax,%eax | ||
5056 | ret | ||
5057 | |||
5058 | # qhasm: bytesatleast65: | ||
5059 | ._bytesatleast65: | ||
5060 | |||
5061 | # qhasm: bytes -= 64 | ||
5062 | # asm 1: sub $64,<bytes=int32#1 | ||
5063 | # asm 2: sub $64,<bytes=%eax | ||
5064 | sub $64,%eax | ||
5065 | |||
5066 | # qhasm: out += 64 | ||
5067 | # asm 1: add $64,<out=int32#6 | ||
5068 | # asm 2: add $64,<out=%edi | ||
5069 | add $64,%edi | ||
5070 | |||
5071 | # qhasm: m += 64 | ||
5072 | # asm 1: add $64,<m=int32#5 | ||
5073 | # asm 2: add $64,<m=%esi | ||
5074 | add $64,%esi | ||
5075 | # comment:fp stack unchanged by jump | ||
5076 | |||
5077 | # qhasm: goto bytesbetween1and255 | ||
5078 | jmp ._bytesbetween1and255 | ||
diff --git a/nacl/crypto_stream/try.c b/nacl/crypto_stream/try.c new file mode 100644 index 00000000..9a36d760 --- /dev/null +++ b/nacl/crypto_stream/try.c | |||
@@ -0,0 +1,124 @@ | |||
1 | /* | ||
2 | * crypto_stream/try.c version 20090118 | ||
3 | * D. J. Bernstein | ||
4 | * Public domain. | ||
5 | */ | ||
6 | |||
7 | #include <stdlib.h> | ||
8 | #include "crypto_stream.h" | ||
9 | |||
10 | extern unsigned char *alignedcalloc(unsigned long long); | ||
11 | |||
12 | const char *primitiveimplementation = crypto_stream_IMPLEMENTATION; | ||
13 | |||
14 | #define MAXTEST_BYTES 10000 | ||
15 | #define CHECKSUM_BYTES 4096 | ||
16 | #define TUNE_BYTES 1536 | ||
17 | |||
18 | static unsigned char *k; | ||
19 | static unsigned char *n; | ||
20 | static unsigned char *m; | ||
21 | static unsigned char *c; | ||
22 | static unsigned char *s; | ||
23 | static unsigned char *k2; | ||
24 | static unsigned char *n2; | ||
25 | static unsigned char *m2; | ||
26 | static unsigned char *c2; | ||
27 | static unsigned char *s2; | ||
28 | |||
29 | void preallocate(void) | ||
30 | { | ||
31 | } | ||
32 | |||
33 | void allocate(void) | ||
34 | { | ||
35 | k = alignedcalloc(crypto_stream_KEYBYTES); | ||
36 | n = alignedcalloc(crypto_stream_NONCEBYTES); | ||
37 | m = alignedcalloc(MAXTEST_BYTES); | ||
38 | c = alignedcalloc(MAXTEST_BYTES); | ||
39 | s = alignedcalloc(MAXTEST_BYTES); | ||
40 | k2 = alignedcalloc(crypto_stream_KEYBYTES); | ||
41 | n2 = alignedcalloc(crypto_stream_NONCEBYTES); | ||
42 | m2 = alignedcalloc(MAXTEST_BYTES); | ||
43 | c2 = alignedcalloc(MAXTEST_BYTES); | ||
44 | s2 = alignedcalloc(MAXTEST_BYTES); | ||
45 | } | ||
46 | |||
47 | void predoit(void) | ||
48 | { | ||
49 | } | ||
50 | |||
51 | void doit(void) | ||
52 | { | ||
53 | crypto_stream_xor(c,m,TUNE_BYTES,n,k); | ||
54 | } | ||
55 | |||
56 | char checksum[crypto_stream_KEYBYTES * 2 + 1]; | ||
57 | |||
58 | const char *checksum_compute(void) | ||
59 | { | ||
60 | long long i; | ||
61 | long long j; | ||
62 | |||
63 | for (i = 0;i < CHECKSUM_BYTES;++i) { | ||
64 | long long mlen = i; | ||
65 | long long clen = i; | ||
66 | long long slen = i; | ||
67 | long long klen = crypto_stream_KEYBYTES; | ||
68 | long long nlen = crypto_stream_NONCEBYTES; | ||
69 | for (j = -16;j < 0;++j) m[j] = random(); | ||
70 | for (j = -16;j < 0;++j) c[j] = random(); | ||
71 | for (j = -16;j < 0;++j) s[j] = random(); | ||
72 | for (j = -16;j < 0;++j) n[j] = random(); | ||
73 | for (j = -16;j < 0;++j) k[j] = random(); | ||
74 | for (j = mlen;j < mlen + 16;++j) m[j] = random(); | ||
75 | for (j = clen;j < clen + 16;++j) c[j] = random(); | ||
76 | for (j = slen;j < slen + 16;++j) s[j] = random(); | ||
77 | for (j = nlen;j < nlen + 16;++j) n[j] = random(); | ||
78 | for (j = klen;j < klen + 16;++j) k[j] = random(); | ||
79 | for (j = -16;j < mlen + 16;++j) m2[j] = m[j]; | ||
80 | for (j = -16;j < clen + 16;++j) c2[j] = c[j]; | ||
81 | for (j = -16;j < slen + 16;++j) s2[j] = s[j]; | ||
82 | for (j = -16;j < nlen + 16;++j) n2[j] = n[j]; | ||
83 | for (j = -16;j < klen + 16;++j) k2[j] = k[j]; | ||
84 | |||
85 | crypto_stream_xor(c,m,mlen,n,k); | ||
86 | |||
87 | for (j = -16;j < mlen + 16;++j) if (m[j] != m2[j]) return "crypto_stream_xor overwrites m"; | ||
88 | for (j = -16;j < slen + 16;++j) if (s[j] != s2[j]) return "crypto_stream_xor overwrites s"; | ||
89 | for (j = -16;j < nlen + 16;++j) if (n[j] != n2[j]) return "crypto_stream_xor overwrites n"; | ||
90 | for (j = -16;j < klen + 16;++j) if (k[j] != k2[j]) return "crypto_stream_xor overwrites k"; | ||
91 | for (j = -16;j < 0;++j) if (c[j] != c2[j]) return "crypto_stream_xor writes before output"; | ||
92 | for (j = clen;j < clen + 16;++j) if (c[j] != c2[j]) return "crypto_stream_xor writes after output"; | ||
93 | |||
94 | for (j = -16;j < clen + 16;++j) c2[j] = c[j]; | ||
95 | |||
96 | crypto_stream(s,slen,n,k); | ||
97 | |||
98 | for (j = -16;j < mlen + 16;++j) if (m[j] != m2[j]) return "crypto_stream overwrites m"; | ||
99 | for (j = -16;j < clen + 16;++j) if (c[j] != c2[j]) return "crypto_stream overwrites c"; | ||
100 | for (j = -16;j < nlen + 16;++j) if (n[j] != n2[j]) return "crypto_stream overwrites n"; | ||
101 | for (j = -16;j < klen + 16;++j) if (k[j] != k2[j]) return "crypto_stream overwrites k"; | ||
102 | for (j = -16;j < 0;++j) if (s[j] != s2[j]) return "crypto_stream writes before output"; | ||
103 | for (j = slen;j < slen + 16;++j) if (s[j] != s2[j]) return "crypto_stream writes after output"; | ||
104 | |||
105 | for (j = 0;j < mlen;++j) | ||
106 | if ((s[j] ^ m[j]) != c[j]) return "crypto_stream_xor does not match crypto_stream"; | ||
107 | |||
108 | for (j = 0;j < clen;++j) k[j % klen] ^= c[j]; | ||
109 | crypto_stream_xor(m,c,clen,n,k); | ||
110 | crypto_stream(s,slen,n,k); | ||
111 | for (j = 0;j < mlen;++j) | ||
112 | if ((s[j] ^ m[j]) != c[j]) return "crypto_stream_xor does not match crypto_stream"; | ||
113 | for (j = 0;j < mlen;++j) n[j % nlen] ^= m[j]; | ||
114 | m[mlen] = 0; | ||
115 | } | ||
116 | |||
117 | for (i = 0;i < crypto_stream_KEYBYTES;++i) { | ||
118 | checksum[2 * i] = "0123456789abcdef"[15 & (k[i] >> 4)]; | ||
119 | checksum[2 * i + 1] = "0123456789abcdef"[15 & k[i]]; | ||
120 | } | ||
121 | checksum[2 * i] = 0; | ||
122 | |||
123 | return 0; | ||
124 | } | ||
diff --git a/nacl/crypto_stream/wrapper-stream.cpp b/nacl/crypto_stream/wrapper-stream.cpp new file mode 100644 index 00000000..dd10c2f6 --- /dev/null +++ b/nacl/crypto_stream/wrapper-stream.cpp | |||
@@ -0,0 +1,12 @@ | |||
1 | #include <string> | ||
2 | using std::string; | ||
3 | #include "crypto_stream.h" | ||
4 | |||
5 | string crypto_stream(size_t clen,const string &n,const string &k) | ||
6 | { | ||
7 | if (n.size() != crypto_stream_NONCEBYTES) throw "incorrect nonce length"; | ||
8 | if (k.size() != crypto_stream_KEYBYTES) throw "incorrect key length"; | ||
9 | unsigned char c[clen]; | ||
10 | crypto_stream(c,clen,(const unsigned char *) n.c_str(),(const unsigned char *) k.c_str()); | ||
11 | return string((char *) c,clen); | ||
12 | } | ||
diff --git a/nacl/crypto_stream/wrapper-xor.cpp b/nacl/crypto_stream/wrapper-xor.cpp new file mode 100644 index 00000000..8d770d1e --- /dev/null +++ b/nacl/crypto_stream/wrapper-xor.cpp | |||
@@ -0,0 +1,17 @@ | |||
1 | #include <string> | ||
2 | using std::string; | ||
3 | #include "crypto_stream.h" | ||
4 | |||
5 | string crypto_stream_xor(const string &m,const string &n,const string &k) | ||
6 | { | ||
7 | if (n.size() != crypto_stream_NONCEBYTES) throw "incorrect nonce length"; | ||
8 | if (k.size() != crypto_stream_KEYBYTES) throw "incorrect key length"; | ||
9 | size_t mlen = m.size(); | ||
10 | unsigned char c[mlen]; | ||
11 | crypto_stream_xor(c, | ||
12 | (const unsigned char *) m.c_str(),mlen, | ||
13 | (const unsigned char *) n.c_str(), | ||
14 | (const unsigned char *) k.c_str() | ||
15 | ); | ||
16 | return string((char *) c,mlen); | ||
17 | } | ||
diff --git a/nacl/crypto_stream/xsalsa20/checksum b/nacl/crypto_stream/xsalsa20/checksum new file mode 100644 index 00000000..cae64c0d --- /dev/null +++ b/nacl/crypto_stream/xsalsa20/checksum | |||
@@ -0,0 +1 @@ | |||
201bc58a96adcb6ed339ca33c188af8ca04a4ce68be1e0953309ee09a0cf8e7a | |||
diff --git a/nacl/crypto_stream/xsalsa20/ref/api.h b/nacl/crypto_stream/xsalsa20/ref/api.h new file mode 100644 index 00000000..6910a7dc --- /dev/null +++ b/nacl/crypto_stream/xsalsa20/ref/api.h | |||
@@ -0,0 +1,2 @@ | |||
1 | #define CRYPTO_KEYBYTES 32 | ||
2 | #define CRYPTO_NONCEBYTES 24 | ||
diff --git a/nacl/crypto_stream/xsalsa20/ref/implementors b/nacl/crypto_stream/xsalsa20/ref/implementors new file mode 100644 index 00000000..f6fb3c73 --- /dev/null +++ b/nacl/crypto_stream/xsalsa20/ref/implementors | |||
@@ -0,0 +1 @@ | |||
Daniel J. Bernstein | |||
diff --git a/nacl/crypto_stream/xsalsa20/ref/stream.c b/nacl/crypto_stream/xsalsa20/ref/stream.c new file mode 100644 index 00000000..2d710709 --- /dev/null +++ b/nacl/crypto_stream/xsalsa20/ref/stream.c | |||
@@ -0,0 +1,22 @@ | |||
1 | /* | ||
2 | version 20080914 | ||
3 | D. J. Bernstein | ||
4 | Public domain. | ||
5 | */ | ||
6 | |||
7 | #include "crypto_core_hsalsa20.h" | ||
8 | #include "crypto_stream_salsa20.h" | ||
9 | #include "crypto_stream.h" | ||
10 | |||
11 | static const unsigned char sigma[16] = "expand 32-byte k"; | ||
12 | |||
13 | int crypto_stream( | ||
14 | unsigned char *c,unsigned long long clen, | ||
15 | const unsigned char *n, | ||
16 | const unsigned char *k | ||
17 | ) | ||
18 | { | ||
19 | unsigned char subkey[32]; | ||
20 | crypto_core_hsalsa20(subkey,n,k,sigma); | ||
21 | return crypto_stream_salsa20(c,clen,n + 16,subkey); | ||
22 | } | ||
diff --git a/nacl/crypto_stream/xsalsa20/ref/xor.c b/nacl/crypto_stream/xsalsa20/ref/xor.c new file mode 100644 index 00000000..13f3134a --- /dev/null +++ b/nacl/crypto_stream/xsalsa20/ref/xor.c | |||
@@ -0,0 +1,23 @@ | |||
1 | /* | ||
2 | version 20080913 | ||
3 | D. J. Bernstein | ||
4 | Public domain. | ||
5 | */ | ||
6 | |||
7 | #include "crypto_core_hsalsa20.h" | ||
8 | #include "crypto_stream_salsa20.h" | ||
9 | #include "crypto_stream.h" | ||
10 | |||
11 | static const unsigned char sigma[16] = "expand 32-byte k"; | ||
12 | |||
13 | int crypto_stream_xor( | ||
14 | unsigned char *c, | ||
15 | const unsigned char *m,unsigned long long mlen, | ||
16 | const unsigned char *n, | ||
17 | const unsigned char *k | ||
18 | ) | ||
19 | { | ||
20 | unsigned char subkey[32]; | ||
21 | crypto_core_hsalsa20(subkey,n,k,sigma); | ||
22 | return crypto_stream_salsa20_xor(c,m,mlen,n + 16,subkey); | ||
23 | } | ||
diff --git a/nacl/crypto_stream/xsalsa20/selected b/nacl/crypto_stream/xsalsa20/selected new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/nacl/crypto_stream/xsalsa20/selected | |||
diff --git a/nacl/crypto_stream/xsalsa20/used b/nacl/crypto_stream/xsalsa20/used new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/nacl/crypto_stream/xsalsa20/used | |||