2 /* Do not modify. This file is auto-generated from aesni-sha256-x86_64.pl. */
6 .globl aesni_cbc_sha256_enc
7 .type aesni_cbc_sha256_enc,@function
10 leaq OPENSSL_ia32cap_P(%rip),%r11
17 jc aesni_cbc_sha256_enc_shaext
22 jnz aesni_cbc_sha256_enc_xop
25 je aesni_cbc_sha256_enc_avx2
27 jnz aesni_cbc_sha256_enc_avx
35 .size aesni_cbc_sha256_enc,.-aesni_cbc_sha256_enc
40 .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
41 .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
42 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
43 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
44 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
45 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
46 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
47 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
48 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
49 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
50 .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
51 .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
52 .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
53 .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
54 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
55 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
56 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
57 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
58 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
59 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
60 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
61 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
62 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
63 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
64 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
65 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
66 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
67 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
68 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
69 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
70 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
71 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
73 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
74 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
75 .long 0,0,0,0, 0,0,0,0, -1,-1,-1,-1
76 .long 0,0,0,0, 0,0,0,0
77 .byte 65,69,83,78,73,45,67,66,67,43,83,72,65,50,53,54,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
79 .type aesni_cbc_sha256_enc_xop,@function
81 aesni_cbc_sha256_enc_xop:
101 movq %rdx,64+16(%rsp)
105 movq %r10,64+48(%rsp)
106 movq %r11,64+56(%rsp)
112 leaq K256+544(%rip),%r13
113 movl 240-128(%rdi),%r14d
128 vmovdqa 0(%r13,%r14,8),%xmm14
129 vmovdqa 16(%r13,%r14,8),%xmm13
130 vmovdqa 32(%r13,%r14,8),%xmm12
131 vmovdqu 0-128(%rdi),%xmm10
135 vmovdqa K256+512(%rip),%xmm7
136 vmovdqu 0(%rsi,%r12,1),%xmm0
137 vmovdqu 16(%rsi,%r12,1),%xmm1
138 vmovdqu 32(%rsi,%r12,1),%xmm2
139 vmovdqu 48(%rsi,%r12,1),%xmm3
140 vpshufb %xmm7,%xmm0,%xmm0
142 vpshufb %xmm7,%xmm1,%xmm1
143 vpshufb %xmm7,%xmm2,%xmm2
144 vpaddd 0(%rbp),%xmm0,%xmm4
145 vpshufb %xmm7,%xmm3,%xmm3
146 vpaddd 32(%rbp),%xmm1,%xmm5
147 vpaddd 64(%rbp),%xmm2,%xmm6
148 vpaddd 96(%rbp),%xmm3,%xmm7
149 vmovdqa %xmm4,0(%rsp)
151 vmovdqa %xmm5,16(%rsp)
153 vmovdqa %xmm6,32(%rsp)
155 vmovdqa %xmm7,48(%rsp)
164 vpalignr $4,%xmm0,%xmm1,%xmm4
167 vpalignr $4,%xmm2,%xmm3,%xmm7
170 .byte 143,232,120,194,236,14
173 vpsrld $3,%xmm4,%xmm4
176 vpaddd %xmm7,%xmm0,%xmm0
178 vpxor %xmm10,%xmm9,%xmm9
179 vmovdqu 16-128(%rdi),%xmm10
183 .byte 143,232,120,194,245,11
186 vpxor %xmm5,%xmm4,%xmm4
191 .byte 143,232,120,194,251,13
194 vpxor %xmm6,%xmm4,%xmm4
197 vpsrld $10,%xmm3,%xmm6
200 vpaddd %xmm4,%xmm0,%xmm0
203 .byte 143,232,120,194,239,2
206 vpxor %xmm6,%xmm7,%xmm7
211 vpxor %xmm5,%xmm7,%xmm7
215 vpxor %xmm8,%xmm9,%xmm9
217 vpsrldq $8,%xmm7,%xmm7
222 vpaddd %xmm7,%xmm0,%xmm0
227 .byte 143,232,120,194,248,13
230 vpsrld $10,%xmm0,%xmm6
233 .byte 143,232,120,194,239,2
236 vpxor %xmm6,%xmm7,%xmm7
241 vpxor %xmm5,%xmm7,%xmm7
246 vpslldq $8,%xmm7,%xmm7
250 vaesenc %xmm10,%xmm9,%xmm9
251 vmovdqu 32-128(%rdi),%xmm10
253 vpaddd %xmm7,%xmm0,%xmm0
258 vpaddd 0(%rbp),%xmm0,%xmm6
280 vaesenc %xmm10,%xmm9,%xmm9
281 vmovdqu 48-128(%rdi),%xmm10
299 vmovdqa %xmm6,0(%rsp)
300 vpalignr $4,%xmm1,%xmm2,%xmm4
303 vpalignr $4,%xmm3,%xmm0,%xmm7
306 .byte 143,232,120,194,236,14
309 vpsrld $3,%xmm4,%xmm4
312 vpaddd %xmm7,%xmm1,%xmm1
314 vaesenc %xmm10,%xmm9,%xmm9
315 vmovdqu 64-128(%rdi),%xmm10
319 .byte 143,232,120,194,245,11
322 vpxor %xmm5,%xmm4,%xmm4
327 .byte 143,232,120,194,248,13
330 vpxor %xmm6,%xmm4,%xmm4
333 vpsrld $10,%xmm0,%xmm6
336 vpaddd %xmm4,%xmm1,%xmm1
339 .byte 143,232,120,194,239,2
342 vpxor %xmm6,%xmm7,%xmm7
347 vpxor %xmm5,%xmm7,%xmm7
351 vaesenc %xmm10,%xmm9,%xmm9
352 vmovdqu 80-128(%rdi),%xmm10
354 vpsrldq $8,%xmm7,%xmm7
359 vpaddd %xmm7,%xmm1,%xmm1
364 .byte 143,232,120,194,249,13
367 vpsrld $10,%xmm1,%xmm6
370 .byte 143,232,120,194,239,2
373 vpxor %xmm6,%xmm7,%xmm7
378 vpxor %xmm5,%xmm7,%xmm7
383 vpslldq $8,%xmm7,%xmm7
387 vaesenc %xmm10,%xmm9,%xmm9
388 vmovdqu 96-128(%rdi),%xmm10
390 vpaddd %xmm7,%xmm1,%xmm1
395 vpaddd 32(%rbp),%xmm1,%xmm6
417 vaesenc %xmm10,%xmm9,%xmm9
418 vmovdqu 112-128(%rdi),%xmm10
436 vmovdqa %xmm6,16(%rsp)
437 vpalignr $4,%xmm2,%xmm3,%xmm4
440 vpalignr $4,%xmm0,%xmm1,%xmm7
443 .byte 143,232,120,194,236,14
446 vpsrld $3,%xmm4,%xmm4
449 vpaddd %xmm7,%xmm2,%xmm2
451 vaesenc %xmm10,%xmm9,%xmm9
452 vmovdqu 128-128(%rdi),%xmm10
456 .byte 143,232,120,194,245,11
459 vpxor %xmm5,%xmm4,%xmm4
464 .byte 143,232,120,194,249,13
467 vpxor %xmm6,%xmm4,%xmm4
470 vpsrld $10,%xmm1,%xmm6
473 vpaddd %xmm4,%xmm2,%xmm2
476 .byte 143,232,120,194,239,2
479 vpxor %xmm6,%xmm7,%xmm7
484 vpxor %xmm5,%xmm7,%xmm7
488 vaesenc %xmm10,%xmm9,%xmm9
489 vmovdqu 144-128(%rdi),%xmm10
491 vpsrldq $8,%xmm7,%xmm7
496 vpaddd %xmm7,%xmm2,%xmm2
501 .byte 143,232,120,194,250,13
504 vpsrld $10,%xmm2,%xmm6
507 .byte 143,232,120,194,239,2
510 vpxor %xmm6,%xmm7,%xmm7
515 vpxor %xmm5,%xmm7,%xmm7
520 vpslldq $8,%xmm7,%xmm7
524 vaesenc %xmm10,%xmm9,%xmm9
525 vmovdqu 160-128(%rdi),%xmm10
527 vpaddd %xmm7,%xmm2,%xmm2
532 vpaddd 64(%rbp),%xmm2,%xmm6
554 vaesenclast %xmm10,%xmm9,%xmm11
555 vaesenc %xmm10,%xmm9,%xmm9
556 vmovdqu 176-128(%rdi),%xmm10
574 vmovdqa %xmm6,32(%rsp)
575 vpalignr $4,%xmm3,%xmm0,%xmm4
578 vpalignr $4,%xmm1,%xmm2,%xmm7
581 .byte 143,232,120,194,236,14
584 vpsrld $3,%xmm4,%xmm4
587 vpaddd %xmm7,%xmm3,%xmm3
589 vpand %xmm12,%xmm11,%xmm8
590 vaesenc %xmm10,%xmm9,%xmm9
591 vmovdqu 192-128(%rdi),%xmm10
595 .byte 143,232,120,194,245,11
598 vpxor %xmm5,%xmm4,%xmm4
603 .byte 143,232,120,194,250,13
606 vpxor %xmm6,%xmm4,%xmm4
609 vpsrld $10,%xmm2,%xmm6
612 vpaddd %xmm4,%xmm3,%xmm3
615 .byte 143,232,120,194,239,2
618 vpxor %xmm6,%xmm7,%xmm7
623 vpxor %xmm5,%xmm7,%xmm7
627 vaesenclast %xmm10,%xmm9,%xmm11
628 vaesenc %xmm10,%xmm9,%xmm9
629 vmovdqu 208-128(%rdi),%xmm10
631 vpsrldq $8,%xmm7,%xmm7
636 vpaddd %xmm7,%xmm3,%xmm3
641 .byte 143,232,120,194,251,13
644 vpsrld $10,%xmm3,%xmm6
647 .byte 143,232,120,194,239,2
650 vpxor %xmm6,%xmm7,%xmm7
655 vpxor %xmm5,%xmm7,%xmm7
660 vpslldq $8,%xmm7,%xmm7
664 vpand %xmm13,%xmm11,%xmm11
665 vaesenc %xmm10,%xmm9,%xmm9
666 vmovdqu 224-128(%rdi),%xmm10
668 vpaddd %xmm7,%xmm3,%xmm3
673 vpaddd 96(%rbp),%xmm3,%xmm6
695 vpor %xmm11,%xmm8,%xmm8
696 vaesenclast %xmm10,%xmm9,%xmm11
697 vmovdqu 0-128(%rdi),%xmm10
715 vmovdqa %xmm6,48(%rsp)
717 vpand %xmm14,%xmm11,%xmm11
719 vpor %xmm11,%xmm8,%xmm8
720 vmovdqu %xmm8,(%r15,%r12,1)
735 vpxor %xmm10,%xmm9,%xmm9
736 vmovdqu 16-128(%rdi),%xmm10
763 vpxor %xmm8,%xmm9,%xmm9
790 vaesenc %xmm10,%xmm9,%xmm9
791 vmovdqu 32-128(%rdi),%xmm10
818 vaesenc %xmm10,%xmm9,%xmm9
819 vmovdqu 48-128(%rdi),%xmm10
846 vaesenc %xmm10,%xmm9,%xmm9
847 vmovdqu 64-128(%rdi),%xmm10
874 vaesenc %xmm10,%xmm9,%xmm9
875 vmovdqu 80-128(%rdi),%xmm10
902 vaesenc %xmm10,%xmm9,%xmm9
903 vmovdqu 96-128(%rdi),%xmm10
930 vaesenc %xmm10,%xmm9,%xmm9
931 vmovdqu 112-128(%rdi),%xmm10
958 vaesenc %xmm10,%xmm9,%xmm9
959 vmovdqu 128-128(%rdi),%xmm10
986 vaesenc %xmm10,%xmm9,%xmm9
987 vmovdqu 144-128(%rdi),%xmm10
1014 vaesenc %xmm10,%xmm9,%xmm9
1015 vmovdqu 160-128(%rdi),%xmm10
1042 vaesenclast %xmm10,%xmm9,%xmm11
1043 vaesenc %xmm10,%xmm9,%xmm9
1044 vmovdqu 176-128(%rdi),%xmm10
1071 vpand %xmm12,%xmm11,%xmm8
1072 vaesenc %xmm10,%xmm9,%xmm9
1073 vmovdqu 192-128(%rdi),%xmm10
1100 vaesenclast %xmm10,%xmm9,%xmm11
1101 vaesenc %xmm10,%xmm9,%xmm9
1102 vmovdqu 208-128(%rdi),%xmm10
1129 vpand %xmm13,%xmm11,%xmm11
1130 vaesenc %xmm10,%xmm9,%xmm9
1131 vmovdqu 224-128(%rdi),%xmm10
1158 vpor %xmm11,%xmm8,%xmm8
1159 vaesenclast %xmm10,%xmm9,%xmm11
1160 vmovdqu 0-128(%rdi),%xmm10
1178 movq 64+0(%rsp),%r12
1179 movq 64+8(%rsp),%r13
1180 movq 64+40(%rsp),%r15
1181 movq 64+48(%rsp),%rsi
1183 vpand %xmm14,%xmm11,%xmm11
1185 vpor %xmm11,%xmm8,%xmm8
1186 vmovdqu %xmm8,(%r12,%r13,1)
1198 cmpq 64+16(%rsp),%r12
1211 movq 64+32(%rsp),%r8
1212 movq 64+56(%rsp),%rsi
1224 .size aesni_cbc_sha256_enc_xop,.-aesni_cbc_sha256_enc_xop
1225 .type aesni_cbc_sha256_enc_avx,@function
1227 aesni_cbc_sha256_enc_avx:
1246 movq %rsi,64+8(%rsp)
1247 movq %rdx,64+16(%rsp)
1249 movq %r8,64+32(%rsp)
1250 movq %r9,64+40(%rsp)
1251 movq %r10,64+48(%rsp)
1252 movq %r11,64+56(%rsp)
1258 leaq K256+544(%rip),%r13
1259 movl 240-128(%rdi),%r14d
1274 vmovdqa 0(%r13,%r14,8),%xmm14
1275 vmovdqa 16(%r13,%r14,8),%xmm13
1276 vmovdqa 32(%r13,%r14,8),%xmm12
1277 vmovdqu 0-128(%rdi),%xmm10
1281 vmovdqa K256+512(%rip),%xmm7
1282 vmovdqu 0(%rsi,%r12,1),%xmm0
1283 vmovdqu 16(%rsi,%r12,1),%xmm1
1284 vmovdqu 32(%rsi,%r12,1),%xmm2
1285 vmovdqu 48(%rsi,%r12,1),%xmm3
1286 vpshufb %xmm7,%xmm0,%xmm0
1287 leaq K256(%rip),%rbp
1288 vpshufb %xmm7,%xmm1,%xmm1
1289 vpshufb %xmm7,%xmm2,%xmm2
1290 vpaddd 0(%rbp),%xmm0,%xmm4
1291 vpshufb %xmm7,%xmm3,%xmm3
1292 vpaddd 32(%rbp),%xmm1,%xmm5
1293 vpaddd 64(%rbp),%xmm2,%xmm6
1294 vpaddd 96(%rbp),%xmm3,%xmm7
1295 vmovdqa %xmm4,0(%rsp)
1297 vmovdqa %xmm5,16(%rsp)
1299 vmovdqa %xmm6,32(%rsp)
1301 vmovdqa %xmm7,48(%rsp)
1308 vmovdqu (%r12),%xmm9
1309 movq %r12,64+0(%rsp)
1310 vpalignr $4,%xmm0,%xmm1,%xmm4
1311 shrdl $14,%r13d,%r13d
1314 vpalignr $4,%xmm2,%xmm3,%xmm7
1316 shrdl $9,%r14d,%r14d
1318 vpsrld $7,%xmm4,%xmm6
1319 shrdl $5,%r13d,%r13d
1322 vpaddd %xmm7,%xmm0,%xmm0
1323 vpxor %xmm10,%xmm9,%xmm9
1324 vmovdqu 16-128(%rdi),%xmm10
1328 vpsrld $3,%xmm4,%xmm7
1329 shrdl $11,%r14d,%r14d
1332 vpslld $14,%xmm4,%xmm5
1333 shrdl $6,%r13d,%r13d
1336 vpxor %xmm6,%xmm7,%xmm4
1340 vpshufd $250,%xmm3,%xmm7
1342 shrdl $2,%r14d,%r14d
1344 vpsrld $11,%xmm6,%xmm6
1347 shrdl $14,%r13d,%r13d
1348 vpxor %xmm5,%xmm4,%xmm4
1352 vpslld $11,%xmm5,%xmm5
1353 shrdl $9,%r14d,%r14d
1355 shrdl $5,%r13d,%r13d
1356 vpxor %xmm6,%xmm4,%xmm4
1359 vpxor %xmm8,%xmm9,%xmm9
1361 vpsrld $10,%xmm7,%xmm6
1364 shrdl $11,%r14d,%r14d
1365 vpxor %xmm5,%xmm4,%xmm4
1368 shrdl $6,%r13d,%r13d
1369 vpsrlq $17,%xmm7,%xmm7
1373 vpaddd %xmm4,%xmm0,%xmm0
1377 vpxor %xmm7,%xmm6,%xmm6
1378 shrdl $2,%r14d,%r14d
1381 vpsrlq $2,%xmm7,%xmm7
1383 shrdl $14,%r13d,%r13d
1385 vpxor %xmm7,%xmm6,%xmm6
1388 shrdl $9,%r14d,%r14d
1389 vpshufd $132,%xmm6,%xmm6
1391 shrdl $5,%r13d,%r13d
1393 vpsrldq $8,%xmm6,%xmm6
1395 vaesenc %xmm10,%xmm9,%xmm9
1396 vmovdqu 32-128(%rdi),%xmm10
1399 vpaddd %xmm6,%xmm0,%xmm0
1401 shrdl $11,%r14d,%r14d
1403 vpshufd $80,%xmm0,%xmm7
1405 shrdl $6,%r13d,%r13d
1407 vpsrld $10,%xmm7,%xmm6
1411 vpsrlq $17,%xmm7,%xmm7
1414 shrdl $2,%r14d,%r14d
1415 vpxor %xmm7,%xmm6,%xmm6
1419 vpsrlq $2,%xmm7,%xmm7
1420 shrdl $14,%r13d,%r13d
1423 vpxor %xmm7,%xmm6,%xmm6
1425 shrdl $9,%r14d,%r14d
1427 vpshufd $232,%xmm6,%xmm6
1428 shrdl $5,%r13d,%r13d
1431 vpslldq $8,%xmm6,%xmm6
1432 vaesenc %xmm10,%xmm9,%xmm9
1433 vmovdqu 48-128(%rdi),%xmm10
1437 vpaddd %xmm6,%xmm0,%xmm0
1438 shrdl $11,%r14d,%r14d
1441 vpaddd 0(%rbp),%xmm0,%xmm6
1442 shrdl $6,%r13d,%r13d
1449 shrdl $2,%r14d,%r14d
1453 vmovdqa %xmm6,0(%rsp)
1454 vpalignr $4,%xmm1,%xmm2,%xmm4
1455 shrdl $14,%r13d,%r13d
1458 vpalignr $4,%xmm3,%xmm0,%xmm7
1460 shrdl $9,%r14d,%r14d
1462 vpsrld $7,%xmm4,%xmm6
1463 shrdl $5,%r13d,%r13d
1466 vpaddd %xmm7,%xmm1,%xmm1
1467 vaesenc %xmm10,%xmm9,%xmm9
1468 vmovdqu 64-128(%rdi),%xmm10
1472 vpsrld $3,%xmm4,%xmm7
1473 shrdl $11,%r14d,%r14d
1476 vpslld $14,%xmm4,%xmm5
1477 shrdl $6,%r13d,%r13d
1480 vpxor %xmm6,%xmm7,%xmm4
1484 vpshufd $250,%xmm0,%xmm7
1486 shrdl $2,%r14d,%r14d
1488 vpsrld $11,%xmm6,%xmm6
1491 shrdl $14,%r13d,%r13d
1492 vpxor %xmm5,%xmm4,%xmm4
1496 vpslld $11,%xmm5,%xmm5
1497 shrdl $9,%r14d,%r14d
1499 shrdl $5,%r13d,%r13d
1500 vpxor %xmm6,%xmm4,%xmm4
1503 vaesenc %xmm10,%xmm9,%xmm9
1504 vmovdqu 80-128(%rdi),%xmm10
1506 vpsrld $10,%xmm7,%xmm6
1509 shrdl $11,%r14d,%r14d
1510 vpxor %xmm5,%xmm4,%xmm4
1513 shrdl $6,%r13d,%r13d
1514 vpsrlq $17,%xmm7,%xmm7
1518 vpaddd %xmm4,%xmm1,%xmm1
1522 vpxor %xmm7,%xmm6,%xmm6
1523 shrdl $2,%r14d,%r14d
1526 vpsrlq $2,%xmm7,%xmm7
1528 shrdl $14,%r13d,%r13d
1530 vpxor %xmm7,%xmm6,%xmm6
1533 shrdl $9,%r14d,%r14d
1534 vpshufd $132,%xmm6,%xmm6
1536 shrdl $5,%r13d,%r13d
1538 vpsrldq $8,%xmm6,%xmm6
1540 vaesenc %xmm10,%xmm9,%xmm9
1541 vmovdqu 96-128(%rdi),%xmm10
1544 vpaddd %xmm6,%xmm1,%xmm1
1546 shrdl $11,%r14d,%r14d
1548 vpshufd $80,%xmm1,%xmm7
1550 shrdl $6,%r13d,%r13d
1552 vpsrld $10,%xmm7,%xmm6
1556 vpsrlq $17,%xmm7,%xmm7
1559 shrdl $2,%r14d,%r14d
1560 vpxor %xmm7,%xmm6,%xmm6
1564 vpsrlq $2,%xmm7,%xmm7
1565 shrdl $14,%r13d,%r13d
1568 vpxor %xmm7,%xmm6,%xmm6
1570 shrdl $9,%r14d,%r14d
1572 vpshufd $232,%xmm6,%xmm6
1573 shrdl $5,%r13d,%r13d
1576 vpslldq $8,%xmm6,%xmm6
1577 vaesenc %xmm10,%xmm9,%xmm9
1578 vmovdqu 112-128(%rdi),%xmm10
1582 vpaddd %xmm6,%xmm1,%xmm1
1583 shrdl $11,%r14d,%r14d
1586 vpaddd 32(%rbp),%xmm1,%xmm6
1587 shrdl $6,%r13d,%r13d
1594 shrdl $2,%r14d,%r14d
1598 vmovdqa %xmm6,16(%rsp)
1599 vpalignr $4,%xmm2,%xmm3,%xmm4
1600 shrdl $14,%r13d,%r13d
1603 vpalignr $4,%xmm0,%xmm1,%xmm7
1605 shrdl $9,%r14d,%r14d
1607 vpsrld $7,%xmm4,%xmm6
1608 shrdl $5,%r13d,%r13d
1611 vpaddd %xmm7,%xmm2,%xmm2
1612 vaesenc %xmm10,%xmm9,%xmm9
1613 vmovdqu 128-128(%rdi),%xmm10
1617 vpsrld $3,%xmm4,%xmm7
1618 shrdl $11,%r14d,%r14d
1621 vpslld $14,%xmm4,%xmm5
1622 shrdl $6,%r13d,%r13d
1625 vpxor %xmm6,%xmm7,%xmm4
1629 vpshufd $250,%xmm1,%xmm7
1631 shrdl $2,%r14d,%r14d
1633 vpsrld $11,%xmm6,%xmm6
1636 shrdl $14,%r13d,%r13d
1637 vpxor %xmm5,%xmm4,%xmm4
1641 vpslld $11,%xmm5,%xmm5
1642 shrdl $9,%r14d,%r14d
1644 shrdl $5,%r13d,%r13d
1645 vpxor %xmm6,%xmm4,%xmm4
1648 vaesenc %xmm10,%xmm9,%xmm9
1649 vmovdqu 144-128(%rdi),%xmm10
1651 vpsrld $10,%xmm7,%xmm6
1654 shrdl $11,%r14d,%r14d
1655 vpxor %xmm5,%xmm4,%xmm4
1658 shrdl $6,%r13d,%r13d
1659 vpsrlq $17,%xmm7,%xmm7
1663 vpaddd %xmm4,%xmm2,%xmm2
1667 vpxor %xmm7,%xmm6,%xmm6
1668 shrdl $2,%r14d,%r14d
1671 vpsrlq $2,%xmm7,%xmm7
1673 shrdl $14,%r13d,%r13d
1675 vpxor %xmm7,%xmm6,%xmm6
1678 shrdl $9,%r14d,%r14d
1679 vpshufd $132,%xmm6,%xmm6
1681 shrdl $5,%r13d,%r13d
1683 vpsrldq $8,%xmm6,%xmm6
1685 vaesenc %xmm10,%xmm9,%xmm9
1686 vmovdqu 160-128(%rdi),%xmm10
1689 vpaddd %xmm6,%xmm2,%xmm2
1691 shrdl $11,%r14d,%r14d
1693 vpshufd $80,%xmm2,%xmm7
1695 shrdl $6,%r13d,%r13d
1697 vpsrld $10,%xmm7,%xmm6
1701 vpsrlq $17,%xmm7,%xmm7
1704 shrdl $2,%r14d,%r14d
1705 vpxor %xmm7,%xmm6,%xmm6
1709 vpsrlq $2,%xmm7,%xmm7
1710 shrdl $14,%r13d,%r13d
1713 vpxor %xmm7,%xmm6,%xmm6
1715 shrdl $9,%r14d,%r14d
1717 vpshufd $232,%xmm6,%xmm6
1718 shrdl $5,%r13d,%r13d
1721 vpslldq $8,%xmm6,%xmm6
1722 vaesenclast %xmm10,%xmm9,%xmm11
1723 vaesenc %xmm10,%xmm9,%xmm9
1724 vmovdqu 176-128(%rdi),%xmm10
1728 vpaddd %xmm6,%xmm2,%xmm2
1729 shrdl $11,%r14d,%r14d
1732 vpaddd 64(%rbp),%xmm2,%xmm6
1733 shrdl $6,%r13d,%r13d
1740 shrdl $2,%r14d,%r14d
1744 vmovdqa %xmm6,32(%rsp)
1745 vpalignr $4,%xmm3,%xmm0,%xmm4
1746 shrdl $14,%r13d,%r13d
1749 vpalignr $4,%xmm1,%xmm2,%xmm7
1751 shrdl $9,%r14d,%r14d
1753 vpsrld $7,%xmm4,%xmm6
1754 shrdl $5,%r13d,%r13d
1757 vpaddd %xmm7,%xmm3,%xmm3
1758 vpand %xmm12,%xmm11,%xmm8
1759 vaesenc %xmm10,%xmm9,%xmm9
1760 vmovdqu 192-128(%rdi),%xmm10
1764 vpsrld $3,%xmm4,%xmm7
1765 shrdl $11,%r14d,%r14d
1768 vpslld $14,%xmm4,%xmm5
1769 shrdl $6,%r13d,%r13d
1772 vpxor %xmm6,%xmm7,%xmm4
1776 vpshufd $250,%xmm2,%xmm7
1778 shrdl $2,%r14d,%r14d
1780 vpsrld $11,%xmm6,%xmm6
1783 shrdl $14,%r13d,%r13d
1784 vpxor %xmm5,%xmm4,%xmm4
1788 vpslld $11,%xmm5,%xmm5
1789 shrdl $9,%r14d,%r14d
1791 shrdl $5,%r13d,%r13d
1792 vpxor %xmm6,%xmm4,%xmm4
1795 vaesenclast %xmm10,%xmm9,%xmm11
1796 vaesenc %xmm10,%xmm9,%xmm9
1797 vmovdqu 208-128(%rdi),%xmm10
1799 vpsrld $10,%xmm7,%xmm6
1802 shrdl $11,%r14d,%r14d
1803 vpxor %xmm5,%xmm4,%xmm4
1806 shrdl $6,%r13d,%r13d
1807 vpsrlq $17,%xmm7,%xmm7
1811 vpaddd %xmm4,%xmm3,%xmm3
1815 vpxor %xmm7,%xmm6,%xmm6
1816 shrdl $2,%r14d,%r14d
1819 vpsrlq $2,%xmm7,%xmm7
1821 shrdl $14,%r13d,%r13d
1823 vpxor %xmm7,%xmm6,%xmm6
1826 shrdl $9,%r14d,%r14d
1827 vpshufd $132,%xmm6,%xmm6
1829 shrdl $5,%r13d,%r13d
1831 vpsrldq $8,%xmm6,%xmm6
1833 vpand %xmm13,%xmm11,%xmm11
1834 vaesenc %xmm10,%xmm9,%xmm9
1835 vmovdqu 224-128(%rdi),%xmm10
1838 vpaddd %xmm6,%xmm3,%xmm3
1840 shrdl $11,%r14d,%r14d
1842 vpshufd $80,%xmm3,%xmm7
1844 shrdl $6,%r13d,%r13d
1846 vpsrld $10,%xmm7,%xmm6
1850 vpsrlq $17,%xmm7,%xmm7
1853 shrdl $2,%r14d,%r14d
1854 vpxor %xmm7,%xmm6,%xmm6
1858 vpsrlq $2,%xmm7,%xmm7
1859 shrdl $14,%r13d,%r13d
1862 vpxor %xmm7,%xmm6,%xmm6
1864 shrdl $9,%r14d,%r14d
1866 vpshufd $232,%xmm6,%xmm6
1867 shrdl $5,%r13d,%r13d
1870 vpslldq $8,%xmm6,%xmm6
1871 vpor %xmm11,%xmm8,%xmm8
1872 vaesenclast %xmm10,%xmm9,%xmm11
1873 vmovdqu 0-128(%rdi),%xmm10
1877 vpaddd %xmm6,%xmm3,%xmm3
1878 shrdl $11,%r14d,%r14d
1881 vpaddd 96(%rbp),%xmm3,%xmm6
1882 shrdl $6,%r13d,%r13d
1889 shrdl $2,%r14d,%r14d
1893 vmovdqa %xmm6,48(%rsp)
1894 movq 64+0(%rsp),%r12
1895 vpand %xmm14,%xmm11,%xmm11
1896 movq 64+8(%rsp),%r15
1897 vpor %xmm11,%xmm8,%xmm8
1898 vmovdqu %xmm8,(%r15,%r12,1)
1902 vmovdqu (%r12),%xmm9
1903 movq %r12,64+0(%rsp)
1904 shrdl $14,%r13d,%r13d
1908 shrdl $9,%r14d,%r14d
1910 shrdl $5,%r13d,%r13d
1913 vpxor %xmm10,%xmm9,%xmm9
1914 vmovdqu 16-128(%rdi),%xmm10
1918 shrdl $11,%r14d,%r14d
1921 shrdl $6,%r13d,%r13d
1928 shrdl $2,%r14d,%r14d
1932 shrdl $14,%r13d,%r13d
1936 shrdl $9,%r14d,%r14d
1938 shrdl $5,%r13d,%r13d
1941 vpxor %xmm8,%xmm9,%xmm9
1945 shrdl $11,%r14d,%r14d
1948 shrdl $6,%r13d,%r13d
1955 shrdl $2,%r14d,%r14d
1959 shrdl $14,%r13d,%r13d
1963 shrdl $9,%r14d,%r14d
1965 shrdl $5,%r13d,%r13d
1968 vaesenc %xmm10,%xmm9,%xmm9
1969 vmovdqu 32-128(%rdi),%xmm10
1973 shrdl $11,%r14d,%r14d
1976 shrdl $6,%r13d,%r13d
1983 shrdl $2,%r14d,%r14d
1987 shrdl $14,%r13d,%r13d
1991 shrdl $9,%r14d,%r14d
1993 shrdl $5,%r13d,%r13d
1996 vaesenc %xmm10,%xmm9,%xmm9
1997 vmovdqu 48-128(%rdi),%xmm10
2001 shrdl $11,%r14d,%r14d
2004 shrdl $6,%r13d,%r13d
2011 shrdl $2,%r14d,%r14d
2015 shrdl $14,%r13d,%r13d
2019 shrdl $9,%r14d,%r14d
2021 shrdl $5,%r13d,%r13d
2024 vaesenc %xmm10,%xmm9,%xmm9
2025 vmovdqu 64-128(%rdi),%xmm10
2029 shrdl $11,%r14d,%r14d
2032 shrdl $6,%r13d,%r13d
2039 shrdl $2,%r14d,%r14d
2043 shrdl $14,%r13d,%r13d
2047 shrdl $9,%r14d,%r14d
2049 shrdl $5,%r13d,%r13d
2052 vaesenc %xmm10,%xmm9,%xmm9
2053 vmovdqu 80-128(%rdi),%xmm10
2057 shrdl $11,%r14d,%r14d
2060 shrdl $6,%r13d,%r13d
2067 shrdl $2,%r14d,%r14d
2071 shrdl $14,%r13d,%r13d
2075 shrdl $9,%r14d,%r14d
2077 shrdl $5,%r13d,%r13d
2080 vaesenc %xmm10,%xmm9,%xmm9
2081 vmovdqu 96-128(%rdi),%xmm10
2085 shrdl $11,%r14d,%r14d
2088 shrdl $6,%r13d,%r13d
2095 shrdl $2,%r14d,%r14d
2099 shrdl $14,%r13d,%r13d
2103 shrdl $9,%r14d,%r14d
2105 shrdl $5,%r13d,%r13d
2108 vaesenc %xmm10,%xmm9,%xmm9
2109 vmovdqu 112-128(%rdi),%xmm10
2113 shrdl $11,%r14d,%r14d
2116 shrdl $6,%r13d,%r13d
2123 shrdl $2,%r14d,%r14d
2127 shrdl $14,%r13d,%r13d
2131 shrdl $9,%r14d,%r14d
2133 shrdl $5,%r13d,%r13d
2136 vaesenc %xmm10,%xmm9,%xmm9
2137 vmovdqu 128-128(%rdi),%xmm10
2141 shrdl $11,%r14d,%r14d
2144 shrdl $6,%r13d,%r13d
2151 shrdl $2,%r14d,%r14d
2155 shrdl $14,%r13d,%r13d
2159 shrdl $9,%r14d,%r14d
2161 shrdl $5,%r13d,%r13d
2164 vaesenc %xmm10,%xmm9,%xmm9
2165 vmovdqu 144-128(%rdi),%xmm10
2169 shrdl $11,%r14d,%r14d
2172 shrdl $6,%r13d,%r13d
2179 shrdl $2,%r14d,%r14d
2183 shrdl $14,%r13d,%r13d
2187 shrdl $9,%r14d,%r14d
2189 shrdl $5,%r13d,%r13d
2192 vaesenc %xmm10,%xmm9,%xmm9
2193 vmovdqu 160-128(%rdi),%xmm10
2197 shrdl $11,%r14d,%r14d
2200 shrdl $6,%r13d,%r13d
2207 shrdl $2,%r14d,%r14d
2211 shrdl $14,%r13d,%r13d
2215 shrdl $9,%r14d,%r14d
2217 shrdl $5,%r13d,%r13d
2220 vaesenclast %xmm10,%xmm9,%xmm11
2221 vaesenc %xmm10,%xmm9,%xmm9
2222 vmovdqu 176-128(%rdi),%xmm10
2226 shrdl $11,%r14d,%r14d
2229 shrdl $6,%r13d,%r13d
2236 shrdl $2,%r14d,%r14d
2240 shrdl $14,%r13d,%r13d
2244 shrdl $9,%r14d,%r14d
2246 shrdl $5,%r13d,%r13d
2249 vpand %xmm12,%xmm11,%xmm8
2250 vaesenc %xmm10,%xmm9,%xmm9
2251 vmovdqu 192-128(%rdi),%xmm10
2255 shrdl $11,%r14d,%r14d
2258 shrdl $6,%r13d,%r13d
2265 shrdl $2,%r14d,%r14d
2269 shrdl $14,%r13d,%r13d
2273 shrdl $9,%r14d,%r14d
2275 shrdl $5,%r13d,%r13d
2278 vaesenclast %xmm10,%xmm9,%xmm11
2279 vaesenc %xmm10,%xmm9,%xmm9
2280 vmovdqu 208-128(%rdi),%xmm10
2284 shrdl $11,%r14d,%r14d
2287 shrdl $6,%r13d,%r13d
2294 shrdl $2,%r14d,%r14d
2298 shrdl $14,%r13d,%r13d
2302 shrdl $9,%r14d,%r14d
2304 shrdl $5,%r13d,%r13d
2307 vpand %xmm13,%xmm11,%xmm11
2308 vaesenc %xmm10,%xmm9,%xmm9
2309 vmovdqu 224-128(%rdi),%xmm10
2313 shrdl $11,%r14d,%r14d
2316 shrdl $6,%r13d,%r13d
2323 shrdl $2,%r14d,%r14d
2327 shrdl $14,%r13d,%r13d
2331 shrdl $9,%r14d,%r14d
2333 shrdl $5,%r13d,%r13d
2336 vpor %xmm11,%xmm8,%xmm8
2337 vaesenclast %xmm10,%xmm9,%xmm11
2338 vmovdqu 0-128(%rdi),%xmm10
2342 shrdl $11,%r14d,%r14d
2345 shrdl $6,%r13d,%r13d
2352 shrdl $2,%r14d,%r14d
2356 movq 64+0(%rsp),%r12
2357 movq 64+8(%rsp),%r13
2358 movq 64+40(%rsp),%r15
2359 movq 64+48(%rsp),%rsi
2361 vpand %xmm14,%xmm11,%xmm11
2363 vpor %xmm11,%xmm8,%xmm8
2364 vmovdqu %xmm8,(%r12,%r13,1)
2376 cmpq 64+16(%rsp),%r12
2388 movq 64+32(%rsp),%r8
2389 movq 64+56(%rsp),%rsi
2401 .size aesni_cbc_sha256_enc_avx,.-aesni_cbc_sha256_enc_avx
2402 .type aesni_cbc_sha256_enc_avx2,@function
2404 aesni_cbc_sha256_enc_avx2:
2425 movq %rdx,64+16(%rsp)
2427 movq %r8,64+32(%rsp)
2428 movq %r9,64+40(%rsp)
2429 movq %r10,64+48(%rsp)
2430 movq %r11,64+56(%rsp)
2435 vpinsrq $1,%rsi,%xmm15,%xmm15
2437 leaq K256+544(%rip),%r12
2438 movl 240-128(%rdi),%r14d
2444 vmovdqa 0(%r12,%r14,8),%xmm14
2445 vmovdqa 16(%r12,%r14,8),%xmm13
2446 vmovdqa 32(%r12,%r14,8),%xmm12
2450 leaq (%rsi,%r13,1),%r12
2460 vmovdqu 0-128(%rdi),%xmm10
2464 vmovdqa K256+512(%rip),%ymm7
2465 vmovdqu -64+0(%rsi,%r13,1),%xmm0
2466 vmovdqu -64+16(%rsi,%r13,1),%xmm1
2467 vmovdqu -64+32(%rsi,%r13,1),%xmm2
2468 vmovdqu -64+48(%rsi,%r13,1),%xmm3
2470 vinserti128 $1,(%r12),%ymm0,%ymm0
2471 vinserti128 $1,16(%r12),%ymm1,%ymm1
2472 vpshufb %ymm7,%ymm0,%ymm0
2473 vinserti128 $1,32(%r12),%ymm2,%ymm2
2474 vpshufb %ymm7,%ymm1,%ymm1
2475 vinserti128 $1,48(%r12),%ymm3,%ymm3
2477 leaq K256(%rip),%rbp
2478 vpshufb %ymm7,%ymm2,%ymm2
2480 vpaddd 0(%rbp),%ymm0,%ymm4
2481 vpshufb %ymm7,%ymm3,%ymm3
2482 vpaddd 32(%rbp),%ymm1,%ymm5
2483 vpaddd 64(%rbp),%ymm2,%ymm6
2484 vpaddd 96(%rbp),%ymm3,%ymm7
2485 vmovdqa %ymm4,0(%rsp)
2487 vmovdqa %ymm5,32(%rsp)
2490 vmovdqa %ymm6,0(%rsp)
2492 vmovdqa %ymm7,32(%rsp)
2499 vmovdqu (%r13),%xmm9
2500 vpinsrq $0,%r13,%xmm15,%xmm15
2502 vpalignr $4,%ymm0,%ymm1,%ymm4
2503 addl 0+128(%rsp),%r11d
2505 rorxl $25,%r8d,%r13d
2506 vpalignr $4,%ymm2,%ymm3,%ymm7
2507 rorxl $11,%r8d,%r15d
2508 leal (%rax,%r14,1),%eax
2509 leal (%r11,%r12,1),%r11d
2510 vpsrld $7,%ymm4,%ymm6
2511 andnl %r10d,%r8d,%r12d
2514 vpaddd %ymm7,%ymm0,%ymm0
2515 leal (%r11,%r12,1),%r11d
2518 vpsrld $3,%ymm4,%ymm7
2519 rorxl $22,%eax,%r12d
2520 leal (%r11,%r13,1),%r11d
2522 vpslld $14,%ymm4,%ymm5
2523 rorxl $13,%eax,%r14d
2525 leal (%rdx,%r11,1),%edx
2526 vpxor %ymm6,%ymm7,%ymm4
2528 vpxor %xmm10,%xmm9,%xmm9
2529 vmovdqu 16-128(%rdi),%xmm10
2532 vpshufd $250,%ymm3,%ymm7
2534 leal (%r11,%rsi,1),%r11d
2536 vpsrld $11,%ymm6,%ymm6
2537 addl 4+128(%rsp),%r10d
2539 rorxl $25,%edx,%r13d
2540 vpxor %ymm5,%ymm4,%ymm4
2542 leal (%r11,%r14,1),%r11d
2543 leal (%r10,%r12,1),%r10d
2544 vpslld $11,%ymm5,%ymm5
2545 andnl %r9d,%edx,%r12d
2548 vpxor %ymm6,%ymm4,%ymm4
2549 leal (%r10,%r12,1),%r10d
2552 vpsrld $10,%ymm7,%ymm6
2553 rorxl $22,%r11d,%r12d
2554 leal (%r10,%r13,1),%r10d
2556 vpxor %ymm5,%ymm4,%ymm4
2557 rorxl $13,%r11d,%r14d
2558 rorxl $2,%r11d,%r13d
2559 leal (%rcx,%r10,1),%ecx
2560 vpsrlq $17,%ymm7,%ymm7
2562 vpxor %xmm8,%xmm9,%xmm9
2565 vpaddd %ymm4,%ymm0,%ymm0
2567 leal (%r10,%r15,1),%r10d
2569 vpxor %ymm7,%ymm6,%ymm6
2570 addl 8+128(%rsp),%r9d
2572 rorxl $25,%ecx,%r13d
2573 vpsrlq $2,%ymm7,%ymm7
2574 rorxl $11,%ecx,%r15d
2575 leal (%r10,%r14,1),%r10d
2576 leal (%r9,%r12,1),%r9d
2577 vpxor %ymm7,%ymm6,%ymm6
2578 andnl %r8d,%ecx,%r12d
2581 vpshufd $132,%ymm6,%ymm6
2582 leal (%r9,%r12,1),%r9d
2585 vpsrldq $8,%ymm6,%ymm6
2586 rorxl $22,%r10d,%r12d
2587 leal (%r9,%r13,1),%r9d
2589 vpaddd %ymm6,%ymm0,%ymm0
2590 rorxl $13,%r10d,%r14d
2591 rorxl $2,%r10d,%r13d
2592 leal (%rbx,%r9,1),%ebx
2593 vpshufd $80,%ymm0,%ymm7
2595 vaesenc %xmm10,%xmm9,%xmm9
2596 vmovdqu 32-128(%rdi),%xmm10
2599 vpsrld $10,%ymm7,%ymm6
2601 leal (%r9,%rsi,1),%r9d
2603 vpsrlq $17,%ymm7,%ymm7
2604 addl 12+128(%rsp),%r8d
2606 rorxl $25,%ebx,%r13d
2607 vpxor %ymm7,%ymm6,%ymm6
2609 leal (%r9,%r14,1),%r9d
2610 leal (%r8,%r12,1),%r8d
2611 vpsrlq $2,%ymm7,%ymm7
2612 andnl %edx,%ebx,%r12d
2615 vpxor %ymm7,%ymm6,%ymm6
2616 leal (%r8,%r12,1),%r8d
2619 vpshufd $232,%ymm6,%ymm6
2620 rorxl $22,%r9d,%r12d
2621 leal (%r8,%r13,1),%r8d
2623 vpslldq $8,%ymm6,%ymm6
2624 rorxl $13,%r9d,%r14d
2626 leal (%rax,%r8,1),%eax
2627 vpaddd %ymm6,%ymm0,%ymm0
2629 vaesenc %xmm10,%xmm9,%xmm9
2630 vmovdqu 48-128(%rdi),%xmm10
2633 vpaddd 0(%rbp),%ymm0,%ymm6
2635 leal (%r8,%r15,1),%r8d
2637 vmovdqa %ymm6,0(%rsp)
2638 vpalignr $4,%ymm1,%ymm2,%ymm4
2639 addl 32+128(%rsp),%edx
2641 rorxl $25,%eax,%r13d
2642 vpalignr $4,%ymm3,%ymm0,%ymm7
2643 rorxl $11,%eax,%r15d
2644 leal (%r8,%r14,1),%r8d
2645 leal (%rdx,%r12,1),%edx
2646 vpsrld $7,%ymm4,%ymm6
2647 andnl %ecx,%eax,%r12d
2650 vpaddd %ymm7,%ymm1,%ymm1
2651 leal (%rdx,%r12,1),%edx
2654 vpsrld $3,%ymm4,%ymm7
2655 rorxl $22,%r8d,%r12d
2656 leal (%rdx,%r13,1),%edx
2658 vpslld $14,%ymm4,%ymm5
2659 rorxl $13,%r8d,%r14d
2661 leal (%r11,%rdx,1),%r11d
2662 vpxor %ymm6,%ymm7,%ymm4
2664 vaesenc %xmm10,%xmm9,%xmm9
2665 vmovdqu 64-128(%rdi),%xmm10
2668 vpshufd $250,%ymm0,%ymm7
2670 leal (%rdx,%rsi,1),%edx
2672 vpsrld $11,%ymm6,%ymm6
2673 addl 36+128(%rsp),%ecx
2675 rorxl $25,%r11d,%r13d
2676 vpxor %ymm5,%ymm4,%ymm4
2677 rorxl $11,%r11d,%esi
2678 leal (%rdx,%r14,1),%edx
2679 leal (%rcx,%r12,1),%ecx
2680 vpslld $11,%ymm5,%ymm5
2681 andnl %ebx,%r11d,%r12d
2683 rorxl $6,%r11d,%r14d
2684 vpxor %ymm6,%ymm4,%ymm4
2685 leal (%rcx,%r12,1),%ecx
2688 vpsrld $10,%ymm7,%ymm6
2689 rorxl $22,%edx,%r12d
2690 leal (%rcx,%r13,1),%ecx
2692 vpxor %ymm5,%ymm4,%ymm4
2693 rorxl $13,%edx,%r14d
2695 leal (%r10,%rcx,1),%r10d
2696 vpsrlq $17,%ymm7,%ymm7
2698 vaesenc %xmm10,%xmm9,%xmm9
2699 vmovdqu 80-128(%rdi),%xmm10
2702 vpaddd %ymm4,%ymm1,%ymm1
2704 leal (%rcx,%r15,1),%ecx
2706 vpxor %ymm7,%ymm6,%ymm6
2707 addl 40+128(%rsp),%ebx
2709 rorxl $25,%r10d,%r13d
2710 vpsrlq $2,%ymm7,%ymm7
2711 rorxl $11,%r10d,%r15d
2712 leal (%rcx,%r14,1),%ecx
2713 leal (%rbx,%r12,1),%ebx
2714 vpxor %ymm7,%ymm6,%ymm6
2715 andnl %eax,%r10d,%r12d
2717 rorxl $6,%r10d,%r14d
2718 vpshufd $132,%ymm6,%ymm6
2719 leal (%rbx,%r12,1),%ebx
2722 vpsrldq $8,%ymm6,%ymm6
2723 rorxl $22,%ecx,%r12d
2724 leal (%rbx,%r13,1),%ebx
2726 vpaddd %ymm6,%ymm1,%ymm1
2727 rorxl $13,%ecx,%r14d
2729 leal (%r9,%rbx,1),%r9d
2730 vpshufd $80,%ymm1,%ymm7
2732 vaesenc %xmm10,%xmm9,%xmm9
2733 vmovdqu 96-128(%rdi),%xmm10
2736 vpsrld $10,%ymm7,%ymm6
2738 leal (%rbx,%rsi,1),%ebx
2740 vpsrlq $17,%ymm7,%ymm7
2741 addl 44+128(%rsp),%eax
2743 rorxl $25,%r9d,%r13d
2744 vpxor %ymm7,%ymm6,%ymm6
2746 leal (%rbx,%r14,1),%ebx
2747 leal (%rax,%r12,1),%eax
2748 vpsrlq $2,%ymm7,%ymm7
2749 andnl %r11d,%r9d,%r12d
2752 vpxor %ymm7,%ymm6,%ymm6
2753 leal (%rax,%r12,1),%eax
2756 vpshufd $232,%ymm6,%ymm6
2757 rorxl $22,%ebx,%r12d
2758 leal (%rax,%r13,1),%eax
2760 vpslldq $8,%ymm6,%ymm6
2761 rorxl $13,%ebx,%r14d
2763 leal (%r8,%rax,1),%r8d
2764 vpaddd %ymm6,%ymm1,%ymm1
2766 vaesenc %xmm10,%xmm9,%xmm9
2767 vmovdqu 112-128(%rdi),%xmm10
2770 vpaddd 32(%rbp),%ymm1,%ymm6
2772 leal (%rax,%r15,1),%eax
2774 vmovdqa %ymm6,32(%rsp)
2776 vpalignr $4,%ymm2,%ymm3,%ymm4
2777 addl 0+128(%rsp),%r11d
2779 rorxl $25,%r8d,%r13d
2780 vpalignr $4,%ymm0,%ymm1,%ymm7
2781 rorxl $11,%r8d,%r15d
2782 leal (%rax,%r14,1),%eax
2783 leal (%r11,%r12,1),%r11d
2784 vpsrld $7,%ymm4,%ymm6
2785 andnl %r10d,%r8d,%r12d
2788 vpaddd %ymm7,%ymm2,%ymm2
2789 leal (%r11,%r12,1),%r11d
2792 vpsrld $3,%ymm4,%ymm7
2793 rorxl $22,%eax,%r12d
2794 leal (%r11,%r13,1),%r11d
2796 vpslld $14,%ymm4,%ymm5
2797 rorxl $13,%eax,%r14d
2799 leal (%rdx,%r11,1),%edx
2800 vpxor %ymm6,%ymm7,%ymm4
2802 vaesenc %xmm10,%xmm9,%xmm9
2803 vmovdqu 128-128(%rdi),%xmm10
2806 vpshufd $250,%ymm1,%ymm7
2808 leal (%r11,%rsi,1),%r11d
2810 vpsrld $11,%ymm6,%ymm6
2811 addl 4+128(%rsp),%r10d
2813 rorxl $25,%edx,%r13d
2814 vpxor %ymm5,%ymm4,%ymm4
2816 leal (%r11,%r14,1),%r11d
2817 leal (%r10,%r12,1),%r10d
2818 vpslld $11,%ymm5,%ymm5
2819 andnl %r9d,%edx,%r12d
2822 vpxor %ymm6,%ymm4,%ymm4
2823 leal (%r10,%r12,1),%r10d
2826 vpsrld $10,%ymm7,%ymm6
2827 rorxl $22,%r11d,%r12d
2828 leal (%r10,%r13,1),%r10d
2830 vpxor %ymm5,%ymm4,%ymm4
2831 rorxl $13,%r11d,%r14d
2832 rorxl $2,%r11d,%r13d
2833 leal (%rcx,%r10,1),%ecx
2834 vpsrlq $17,%ymm7,%ymm7
2836 vaesenc %xmm10,%xmm9,%xmm9
2837 vmovdqu 144-128(%rdi),%xmm10
2840 vpaddd %ymm4,%ymm2,%ymm2
2842 leal (%r10,%r15,1),%r10d
2844 vpxor %ymm7,%ymm6,%ymm6
2845 addl 8+128(%rsp),%r9d
2847 rorxl $25,%ecx,%r13d
2848 vpsrlq $2,%ymm7,%ymm7
2849 rorxl $11,%ecx,%r15d
2850 leal (%r10,%r14,1),%r10d
2851 leal (%r9,%r12,1),%r9d
2852 vpxor %ymm7,%ymm6,%ymm6
2853 andnl %r8d,%ecx,%r12d
2856 vpshufd $132,%ymm6,%ymm6
2857 leal (%r9,%r12,1),%r9d
2860 vpsrldq $8,%ymm6,%ymm6
2861 rorxl $22,%r10d,%r12d
2862 leal (%r9,%r13,1),%r9d
2864 vpaddd %ymm6,%ymm2,%ymm2
2865 rorxl $13,%r10d,%r14d
2866 rorxl $2,%r10d,%r13d
2867 leal (%rbx,%r9,1),%ebx
2868 vpshufd $80,%ymm2,%ymm7
2870 vaesenc %xmm10,%xmm9,%xmm9
2871 vmovdqu 160-128(%rdi),%xmm10
2874 vpsrld $10,%ymm7,%ymm6
2876 leal (%r9,%rsi,1),%r9d
2878 vpsrlq $17,%ymm7,%ymm7
2879 addl 12+128(%rsp),%r8d
2881 rorxl $25,%ebx,%r13d
2882 vpxor %ymm7,%ymm6,%ymm6
2884 leal (%r9,%r14,1),%r9d
2885 leal (%r8,%r12,1),%r8d
2886 vpsrlq $2,%ymm7,%ymm7
2887 andnl %edx,%ebx,%r12d
2890 vpxor %ymm7,%ymm6,%ymm6
2891 leal (%r8,%r12,1),%r8d
2894 vpshufd $232,%ymm6,%ymm6
2895 rorxl $22,%r9d,%r12d
2896 leal (%r8,%r13,1),%r8d
2898 vpslldq $8,%ymm6,%ymm6
2899 rorxl $13,%r9d,%r14d
2901 leal (%rax,%r8,1),%eax
2902 vpaddd %ymm6,%ymm2,%ymm2
2904 vaesenclast %xmm10,%xmm9,%xmm11
2905 vaesenc %xmm10,%xmm9,%xmm9
2906 vmovdqu 176-128(%rdi),%xmm10
2909 vpaddd 64(%rbp),%ymm2,%ymm6
2911 leal (%r8,%r15,1),%r8d
2913 vmovdqa %ymm6,0(%rsp)
2914 vpalignr $4,%ymm3,%ymm0,%ymm4
2915 addl 32+128(%rsp),%edx
2917 rorxl $25,%eax,%r13d
2918 vpalignr $4,%ymm1,%ymm2,%ymm7
2919 rorxl $11,%eax,%r15d
2920 leal (%r8,%r14,1),%r8d
2921 leal (%rdx,%r12,1),%edx
2922 vpsrld $7,%ymm4,%ymm6
2923 andnl %ecx,%eax,%r12d
2926 vpaddd %ymm7,%ymm3,%ymm3
2927 leal (%rdx,%r12,1),%edx
2930 vpsrld $3,%ymm4,%ymm7
2931 rorxl $22,%r8d,%r12d
2932 leal (%rdx,%r13,1),%edx
2934 vpslld $14,%ymm4,%ymm5
2935 rorxl $13,%r8d,%r14d
2937 leal (%r11,%rdx,1),%r11d
2938 vpxor %ymm6,%ymm7,%ymm4
2940 vpand %xmm12,%xmm11,%xmm8
2941 vaesenc %xmm10,%xmm9,%xmm9
2942 vmovdqu 192-128(%rdi),%xmm10
2945 vpshufd $250,%ymm2,%ymm7
2947 leal (%rdx,%rsi,1),%edx
2949 vpsrld $11,%ymm6,%ymm6
2950 addl 36+128(%rsp),%ecx
2952 rorxl $25,%r11d,%r13d
2953 vpxor %ymm5,%ymm4,%ymm4
2954 rorxl $11,%r11d,%esi
2955 leal (%rdx,%r14,1),%edx
2956 leal (%rcx,%r12,1),%ecx
2957 vpslld $11,%ymm5,%ymm5
2958 andnl %ebx,%r11d,%r12d
2960 rorxl $6,%r11d,%r14d
2961 vpxor %ymm6,%ymm4,%ymm4
2962 leal (%rcx,%r12,1),%ecx
2965 vpsrld $10,%ymm7,%ymm6
2966 rorxl $22,%edx,%r12d
2967 leal (%rcx,%r13,1),%ecx
2969 vpxor %ymm5,%ymm4,%ymm4
2970 rorxl $13,%edx,%r14d
2972 leal (%r10,%rcx,1),%r10d
2973 vpsrlq $17,%ymm7,%ymm7
2975 vaesenclast %xmm10,%xmm9,%xmm11
2976 vaesenc %xmm10,%xmm9,%xmm9
2977 vmovdqu 208-128(%rdi),%xmm10
2980 vpaddd %ymm4,%ymm3,%ymm3
2982 leal (%rcx,%r15,1),%ecx
2984 vpxor %ymm7,%ymm6,%ymm6
2985 addl 40+128(%rsp),%ebx
2987 rorxl $25,%r10d,%r13d
2988 vpsrlq $2,%ymm7,%ymm7
2989 rorxl $11,%r10d,%r15d
2990 leal (%rcx,%r14,1),%ecx
2991 leal (%rbx,%r12,1),%ebx
2992 vpxor %ymm7,%ymm6,%ymm6
2993 andnl %eax,%r10d,%r12d
2995 rorxl $6,%r10d,%r14d
2996 vpshufd $132,%ymm6,%ymm6
2997 leal (%rbx,%r12,1),%ebx
3000 vpsrldq $8,%ymm6,%ymm6
3001 rorxl $22,%ecx,%r12d
3002 leal (%rbx,%r13,1),%ebx
3004 vpaddd %ymm6,%ymm3,%ymm3
3005 rorxl $13,%ecx,%r14d
3007 leal (%r9,%rbx,1),%r9d
3008 vpshufd $80,%ymm3,%ymm7
3010 vpand %xmm13,%xmm11,%xmm11
3011 vaesenc %xmm10,%xmm9,%xmm9
3012 vmovdqu 224-128(%rdi),%xmm10
3015 vpsrld $10,%ymm7,%ymm6
3017 leal (%rbx,%rsi,1),%ebx
3019 vpsrlq $17,%ymm7,%ymm7
3020 addl 44+128(%rsp),%eax
3022 rorxl $25,%r9d,%r13d
3023 vpxor %ymm7,%ymm6,%ymm6
3025 leal (%rbx,%r14,1),%ebx
3026 leal (%rax,%r12,1),%eax
3027 vpsrlq $2,%ymm7,%ymm7
3028 andnl %r11d,%r9d,%r12d
3031 vpxor %ymm7,%ymm6,%ymm6
3032 leal (%rax,%r12,1),%eax
3035 vpshufd $232,%ymm6,%ymm6
3036 rorxl $22,%ebx,%r12d
3037 leal (%rax,%r13,1),%eax
3039 vpslldq $8,%ymm6,%ymm6
3040 rorxl $13,%ebx,%r14d
3042 leal (%r8,%rax,1),%r8d
3043 vpaddd %ymm6,%ymm3,%ymm3
3045 vpor %xmm11,%xmm8,%xmm8
3046 vaesenclast %xmm10,%xmm9,%xmm11
3047 vmovdqu 0-128(%rdi),%xmm10
3050 vpaddd 96(%rbp),%ymm3,%ymm6
3052 leal (%rax,%r15,1),%eax
3054 vmovdqa %ymm6,32(%rsp)
3056 vpextrq $1,%xmm15,%r15
3057 vpand %xmm14,%xmm11,%xmm11
3058 vpor %xmm11,%xmm8,%xmm8
3059 vmovdqu %xmm8,(%r15,%r13,1)
3064 vmovdqu (%r13),%xmm9
3065 vpinsrq $0,%r13,%xmm15,%xmm15
3066 addl 0+64(%rsp),%r11d
3068 rorxl $25,%r8d,%r13d
3069 rorxl $11,%r8d,%r15d
3070 leal (%rax,%r14,1),%eax
3071 leal (%r11,%r12,1),%r11d
3072 andnl %r10d,%r8d,%r12d
3075 leal (%r11,%r12,1),%r11d
3078 rorxl $22,%eax,%r12d
3079 leal (%r11,%r13,1),%r11d
3081 rorxl $13,%eax,%r14d
3083 leal (%rdx,%r11,1),%edx
3085 vpxor %xmm10,%xmm9,%xmm9
3086 vmovdqu 16-128(%rdi),%xmm10
3090 leal (%r11,%rsi,1),%r11d
3092 addl 4+64(%rsp),%r10d
3094 rorxl $25,%edx,%r13d
3096 leal (%r11,%r14,1),%r11d
3097 leal (%r10,%r12,1),%r10d
3098 andnl %r9d,%edx,%r12d
3101 leal (%r10,%r12,1),%r10d
3104 rorxl $22,%r11d,%r12d
3105 leal (%r10,%r13,1),%r10d
3107 rorxl $13,%r11d,%r14d
3108 rorxl $2,%r11d,%r13d
3109 leal (%rcx,%r10,1),%ecx
3111 vpxor %xmm8,%xmm9,%xmm9
3115 leal (%r10,%r15,1),%r10d
3117 addl 8+64(%rsp),%r9d
3119 rorxl $25,%ecx,%r13d
3120 rorxl $11,%ecx,%r15d
3121 leal (%r10,%r14,1),%r10d
3122 leal (%r9,%r12,1),%r9d
3123 andnl %r8d,%ecx,%r12d
3126 leal (%r9,%r12,1),%r9d
3129 rorxl $22,%r10d,%r12d
3130 leal (%r9,%r13,1),%r9d
3132 rorxl $13,%r10d,%r14d
3133 rorxl $2,%r10d,%r13d
3134 leal (%rbx,%r9,1),%ebx
3136 vaesenc %xmm10,%xmm9,%xmm9
3137 vmovdqu 32-128(%rdi),%xmm10
3141 leal (%r9,%rsi,1),%r9d
3143 addl 12+64(%rsp),%r8d
3145 rorxl $25,%ebx,%r13d
3147 leal (%r9,%r14,1),%r9d
3148 leal (%r8,%r12,1),%r8d
3149 andnl %edx,%ebx,%r12d
3152 leal (%r8,%r12,1),%r8d
3155 rorxl $22,%r9d,%r12d
3156 leal (%r8,%r13,1),%r8d
3158 rorxl $13,%r9d,%r14d
3160 leal (%rax,%r8,1),%eax
3162 vaesenc %xmm10,%xmm9,%xmm9
3163 vmovdqu 48-128(%rdi),%xmm10
3167 leal (%r8,%r15,1),%r8d
3169 addl 32+64(%rsp),%edx
3171 rorxl $25,%eax,%r13d
3172 rorxl $11,%eax,%r15d
3173 leal (%r8,%r14,1),%r8d
3174 leal (%rdx,%r12,1),%edx
3175 andnl %ecx,%eax,%r12d
3178 leal (%rdx,%r12,1),%edx
3181 rorxl $22,%r8d,%r12d
3182 leal (%rdx,%r13,1),%edx
3184 rorxl $13,%r8d,%r14d
3186 leal (%r11,%rdx,1),%r11d
3188 vaesenc %xmm10,%xmm9,%xmm9
3189 vmovdqu 64-128(%rdi),%xmm10
3193 leal (%rdx,%rsi,1),%edx
3195 addl 36+64(%rsp),%ecx
3197 rorxl $25,%r11d,%r13d
3198 rorxl $11,%r11d,%esi
3199 leal (%rdx,%r14,1),%edx
3200 leal (%rcx,%r12,1),%ecx
3201 andnl %ebx,%r11d,%r12d
3203 rorxl $6,%r11d,%r14d
3204 leal (%rcx,%r12,1),%ecx
3207 rorxl $22,%edx,%r12d
3208 leal (%rcx,%r13,1),%ecx
3210 rorxl $13,%edx,%r14d
3212 leal (%r10,%rcx,1),%r10d
3214 vaesenc %xmm10,%xmm9,%xmm9
3215 vmovdqu 80-128(%rdi),%xmm10
3219 leal (%rcx,%r15,1),%ecx
3221 addl 40+64(%rsp),%ebx
3223 rorxl $25,%r10d,%r13d
3224 rorxl $11,%r10d,%r15d
3225 leal (%rcx,%r14,1),%ecx
3226 leal (%rbx,%r12,1),%ebx
3227 andnl %eax,%r10d,%r12d
3229 rorxl $6,%r10d,%r14d
3230 leal (%rbx,%r12,1),%ebx
3233 rorxl $22,%ecx,%r12d
3234 leal (%rbx,%r13,1),%ebx
3236 rorxl $13,%ecx,%r14d
3238 leal (%r9,%rbx,1),%r9d
3240 vaesenc %xmm10,%xmm9,%xmm9
3241 vmovdqu 96-128(%rdi),%xmm10
3245 leal (%rbx,%rsi,1),%ebx
3247 addl 44+64(%rsp),%eax
3249 rorxl $25,%r9d,%r13d
3251 leal (%rbx,%r14,1),%ebx
3252 leal (%rax,%r12,1),%eax
3253 andnl %r11d,%r9d,%r12d
3256 leal (%rax,%r12,1),%eax
3259 rorxl $22,%ebx,%r12d
3260 leal (%rax,%r13,1),%eax
3262 rorxl $13,%ebx,%r14d
3264 leal (%r8,%rax,1),%r8d
3266 vaesenc %xmm10,%xmm9,%xmm9
3267 vmovdqu 112-128(%rdi),%xmm10
3271 leal (%rax,%r15,1),%eax
3275 rorxl $25,%r8d,%r13d
3276 rorxl $11,%r8d,%r15d
3277 leal (%rax,%r14,1),%eax
3278 leal (%r11,%r12,1),%r11d
3279 andnl %r10d,%r8d,%r12d
3282 leal (%r11,%r12,1),%r11d
3285 rorxl $22,%eax,%r12d
3286 leal (%r11,%r13,1),%r11d
3288 rorxl $13,%eax,%r14d
3290 leal (%rdx,%r11,1),%edx
3292 vaesenc %xmm10,%xmm9,%xmm9
3293 vmovdqu 128-128(%rdi),%xmm10
3297 leal (%r11,%rsi,1),%r11d
3301 rorxl $25,%edx,%r13d
3303 leal (%r11,%r14,1),%r11d
3304 leal (%r10,%r12,1),%r10d
3305 andnl %r9d,%edx,%r12d
3308 leal (%r10,%r12,1),%r10d
3311 rorxl $22,%r11d,%r12d
3312 leal (%r10,%r13,1),%r10d
3314 rorxl $13,%r11d,%r14d
3315 rorxl $2,%r11d,%r13d
3316 leal (%rcx,%r10,1),%ecx
3318 vaesenc %xmm10,%xmm9,%xmm9
3319 vmovdqu 144-128(%rdi),%xmm10
3323 leal (%r10,%r15,1),%r10d
3327 rorxl $25,%ecx,%r13d
3328 rorxl $11,%ecx,%r15d
3329 leal (%r10,%r14,1),%r10d
3330 leal (%r9,%r12,1),%r9d
3331 andnl %r8d,%ecx,%r12d
3334 leal (%r9,%r12,1),%r9d
3337 rorxl $22,%r10d,%r12d
3338 leal (%r9,%r13,1),%r9d
3340 rorxl $13,%r10d,%r14d
3341 rorxl $2,%r10d,%r13d
3342 leal (%rbx,%r9,1),%ebx
3344 vaesenc %xmm10,%xmm9,%xmm9
3345 vmovdqu 160-128(%rdi),%xmm10
3349 leal (%r9,%rsi,1),%r9d
3353 rorxl $25,%ebx,%r13d
3355 leal (%r9,%r14,1),%r9d
3356 leal (%r8,%r12,1),%r8d
3357 andnl %edx,%ebx,%r12d
3360 leal (%r8,%r12,1),%r8d
3363 rorxl $22,%r9d,%r12d
3364 leal (%r8,%r13,1),%r8d
3366 rorxl $13,%r9d,%r14d
3368 leal (%rax,%r8,1),%eax
3370 vaesenclast %xmm10,%xmm9,%xmm11
3371 vaesenc %xmm10,%xmm9,%xmm9
3372 vmovdqu 176-128(%rdi),%xmm10
3376 leal (%r8,%r15,1),%r8d
3380 rorxl $25,%eax,%r13d
3381 rorxl $11,%eax,%r15d
3382 leal (%r8,%r14,1),%r8d
3383 leal (%rdx,%r12,1),%edx
3384 andnl %ecx,%eax,%r12d
3387 leal (%rdx,%r12,1),%edx
3390 rorxl $22,%r8d,%r12d
3391 leal (%rdx,%r13,1),%edx
3393 rorxl $13,%r8d,%r14d
3395 leal (%r11,%rdx,1),%r11d
3397 vpand %xmm12,%xmm11,%xmm8
3398 vaesenc %xmm10,%xmm9,%xmm9
3399 vmovdqu 192-128(%rdi),%xmm10
3403 leal (%rdx,%rsi,1),%edx
3407 rorxl $25,%r11d,%r13d
3408 rorxl $11,%r11d,%esi
3409 leal (%rdx,%r14,1),%edx
3410 leal (%rcx,%r12,1),%ecx
3411 andnl %ebx,%r11d,%r12d
3413 rorxl $6,%r11d,%r14d
3414 leal (%rcx,%r12,1),%ecx
3417 rorxl $22,%edx,%r12d
3418 leal (%rcx,%r13,1),%ecx
3420 rorxl $13,%edx,%r14d
3422 leal (%r10,%rcx,1),%r10d
3424 vaesenclast %xmm10,%xmm9,%xmm11
3425 vaesenc %xmm10,%xmm9,%xmm9
3426 vmovdqu 208-128(%rdi),%xmm10
3430 leal (%rcx,%r15,1),%ecx
3434 rorxl $25,%r10d,%r13d
3435 rorxl $11,%r10d,%r15d
3436 leal (%rcx,%r14,1),%ecx
3437 leal (%rbx,%r12,1),%ebx
3438 andnl %eax,%r10d,%r12d
3440 rorxl $6,%r10d,%r14d
3441 leal (%rbx,%r12,1),%ebx
3444 rorxl $22,%ecx,%r12d
3445 leal (%rbx,%r13,1),%ebx
3447 rorxl $13,%ecx,%r14d
3449 leal (%r9,%rbx,1),%r9d
3451 vpand %xmm13,%xmm11,%xmm11
3452 vaesenc %xmm10,%xmm9,%xmm9
3453 vmovdqu 224-128(%rdi),%xmm10
3457 leal (%rbx,%rsi,1),%ebx
3461 rorxl $25,%r9d,%r13d
3463 leal (%rbx,%r14,1),%ebx
3464 leal (%rax,%r12,1),%eax
3465 andnl %r11d,%r9d,%r12d
3468 leal (%rax,%r12,1),%eax
3471 rorxl $22,%ebx,%r12d
3472 leal (%rax,%r13,1),%eax
3474 rorxl $13,%ebx,%r14d
3476 leal (%r8,%rax,1),%r8d
3478 vpor %xmm11,%xmm8,%xmm8
3479 vaesenclast %xmm10,%xmm9,%xmm11
3480 vmovdqu 0-128(%rdi),%xmm10
3484 leal (%rax,%r15,1),%eax
3486 vpextrq $1,%xmm15,%r12
3492 vpand %xmm14,%xmm11,%xmm11
3493 vpor %xmm11,%xmm8,%xmm8
3494 vmovdqu %xmm8,(%r12,%r13,1)
3525 vmovdqu (%r13),%xmm9
3526 vpinsrq $0,%r13,%xmm15,%xmm15
3527 addl 0+16(%rbp),%r11d
3529 rorxl $25,%r8d,%r13d
3530 rorxl $11,%r8d,%r15d
3531 leal (%rax,%r14,1),%eax
3532 leal (%r11,%r12,1),%r11d
3533 andnl %r10d,%r8d,%r12d
3536 leal (%r11,%r12,1),%r11d
3539 rorxl $22,%eax,%r12d
3540 leal (%r11,%r13,1),%r11d
3542 rorxl $13,%eax,%r14d
3544 leal (%rdx,%r11,1),%edx
3546 vpxor %xmm10,%xmm9,%xmm9
3547 vmovdqu 16-128(%rdi),%xmm10
3551 leal (%r11,%rsi,1),%r11d
3553 addl 4+16(%rbp),%r10d
3555 rorxl $25,%edx,%r13d
3557 leal (%r11,%r14,1),%r11d
3558 leal (%r10,%r12,1),%r10d
3559 andnl %r9d,%edx,%r12d
3562 leal (%r10,%r12,1),%r10d
3565 rorxl $22,%r11d,%r12d
3566 leal (%r10,%r13,1),%r10d
3568 rorxl $13,%r11d,%r14d
3569 rorxl $2,%r11d,%r13d
3570 leal (%rcx,%r10,1),%ecx
3572 vpxor %xmm8,%xmm9,%xmm9
3576 leal (%r10,%r15,1),%r10d
3578 addl 8+16(%rbp),%r9d
3580 rorxl $25,%ecx,%r13d
3581 rorxl $11,%ecx,%r15d
3582 leal (%r10,%r14,1),%r10d
3583 leal (%r9,%r12,1),%r9d
3584 andnl %r8d,%ecx,%r12d
3587 leal (%r9,%r12,1),%r9d
3590 rorxl $22,%r10d,%r12d
3591 leal (%r9,%r13,1),%r9d
3593 rorxl $13,%r10d,%r14d
3594 rorxl $2,%r10d,%r13d
3595 leal (%rbx,%r9,1),%ebx
3597 vaesenc %xmm10,%xmm9,%xmm9
3598 vmovdqu 32-128(%rdi),%xmm10
3602 leal (%r9,%rsi,1),%r9d
3604 addl 12+16(%rbp),%r8d
3606 rorxl $25,%ebx,%r13d
3608 leal (%r9,%r14,1),%r9d
3609 leal (%r8,%r12,1),%r8d
3610 andnl %edx,%ebx,%r12d
3613 leal (%r8,%r12,1),%r8d
3616 rorxl $22,%r9d,%r12d
3617 leal (%r8,%r13,1),%r8d
3619 rorxl $13,%r9d,%r14d
3621 leal (%rax,%r8,1),%eax
3623 vaesenc %xmm10,%xmm9,%xmm9
3624 vmovdqu 48-128(%rdi),%xmm10
3628 leal (%r8,%r15,1),%r8d
3630 addl 32+16(%rbp),%edx
3632 rorxl $25,%eax,%r13d
3633 rorxl $11,%eax,%r15d
3634 leal (%r8,%r14,1),%r8d
3635 leal (%rdx,%r12,1),%edx
3636 andnl %ecx,%eax,%r12d
3639 leal (%rdx,%r12,1),%edx
3642 rorxl $22,%r8d,%r12d
3643 leal (%rdx,%r13,1),%edx
3645 rorxl $13,%r8d,%r14d
3647 leal (%r11,%rdx,1),%r11d
3649 vaesenc %xmm10,%xmm9,%xmm9
3650 vmovdqu 64-128(%rdi),%xmm10
3654 leal (%rdx,%rsi,1),%edx
3656 addl 36+16(%rbp),%ecx
3658 rorxl $25,%r11d,%r13d
3659 rorxl $11,%r11d,%esi
3660 leal (%rdx,%r14,1),%edx
3661 leal (%rcx,%r12,1),%ecx
3662 andnl %ebx,%r11d,%r12d
3664 rorxl $6,%r11d,%r14d
3665 leal (%rcx,%r12,1),%ecx
3668 rorxl $22,%edx,%r12d
3669 leal (%rcx,%r13,1),%ecx
3671 rorxl $13,%edx,%r14d
3673 leal (%r10,%rcx,1),%r10d
3675 vaesenc %xmm10,%xmm9,%xmm9
3676 vmovdqu 80-128(%rdi),%xmm10
3680 leal (%rcx,%r15,1),%ecx
3682 addl 40+16(%rbp),%ebx
3684 rorxl $25,%r10d,%r13d
3685 rorxl $11,%r10d,%r15d
3686 leal (%rcx,%r14,1),%ecx
3687 leal (%rbx,%r12,1),%ebx
3688 andnl %eax,%r10d,%r12d
3690 rorxl $6,%r10d,%r14d
3691 leal (%rbx,%r12,1),%ebx
3694 rorxl $22,%ecx,%r12d
3695 leal (%rbx,%r13,1),%ebx
3697 rorxl $13,%ecx,%r14d
3699 leal (%r9,%rbx,1),%r9d
3701 vaesenc %xmm10,%xmm9,%xmm9
3702 vmovdqu 96-128(%rdi),%xmm10
3706 leal (%rbx,%rsi,1),%ebx
3708 addl 44+16(%rbp),%eax
3710 rorxl $25,%r9d,%r13d
3712 leal (%rbx,%r14,1),%ebx
3713 leal (%rax,%r12,1),%eax
3714 andnl %r11d,%r9d,%r12d
3717 leal (%rax,%r12,1),%eax
3720 rorxl $22,%ebx,%r12d
3721 leal (%rax,%r13,1),%eax
3723 rorxl $13,%ebx,%r14d
3725 leal (%r8,%rax,1),%r8d
3727 vaesenc %xmm10,%xmm9,%xmm9
3728 vmovdqu 112-128(%rdi),%xmm10
3732 leal (%rax,%r15,1),%eax
3735 addl 0+16(%rbp),%r11d
3737 rorxl $25,%r8d,%r13d
3738 rorxl $11,%r8d,%r15d
3739 leal (%rax,%r14,1),%eax
3740 leal (%r11,%r12,1),%r11d
3741 andnl %r10d,%r8d,%r12d
3744 leal (%r11,%r12,1),%r11d
3747 rorxl $22,%eax,%r12d
3748 leal (%r11,%r13,1),%r11d
3750 rorxl $13,%eax,%r14d
3752 leal (%rdx,%r11,1),%edx
3754 vaesenc %xmm10,%xmm9,%xmm9
3755 vmovdqu 128-128(%rdi),%xmm10
3759 leal (%r11,%rsi,1),%r11d
3761 addl 4+16(%rbp),%r10d
3763 rorxl $25,%edx,%r13d
3765 leal (%r11,%r14,1),%r11d
3766 leal (%r10,%r12,1),%r10d
3767 andnl %r9d,%edx,%r12d
3770 leal (%r10,%r12,1),%r10d
3773 rorxl $22,%r11d,%r12d
3774 leal (%r10,%r13,1),%r10d
3776 rorxl $13,%r11d,%r14d
3777 rorxl $2,%r11d,%r13d
3778 leal (%rcx,%r10,1),%ecx
3780 vaesenc %xmm10,%xmm9,%xmm9
3781 vmovdqu 144-128(%rdi),%xmm10
3785 leal (%r10,%r15,1),%r10d
3787 addl 8+16(%rbp),%r9d
3789 rorxl $25,%ecx,%r13d
3790 rorxl $11,%ecx,%r15d
3791 leal (%r10,%r14,1),%r10d
3792 leal (%r9,%r12,1),%r9d
3793 andnl %r8d,%ecx,%r12d
3796 leal (%r9,%r12,1),%r9d
3799 rorxl $22,%r10d,%r12d
3800 leal (%r9,%r13,1),%r9d
3802 rorxl $13,%r10d,%r14d
3803 rorxl $2,%r10d,%r13d
3804 leal (%rbx,%r9,1),%ebx
3806 vaesenc %xmm10,%xmm9,%xmm9
3807 vmovdqu 160-128(%rdi),%xmm10
3811 leal (%r9,%rsi,1),%r9d
3813 addl 12+16(%rbp),%r8d
3815 rorxl $25,%ebx,%r13d
3817 leal (%r9,%r14,1),%r9d
3818 leal (%r8,%r12,1),%r8d
3819 andnl %edx,%ebx,%r12d
3822 leal (%r8,%r12,1),%r8d
3825 rorxl $22,%r9d,%r12d
3826 leal (%r8,%r13,1),%r8d
3828 rorxl $13,%r9d,%r14d
3830 leal (%rax,%r8,1),%eax
3832 vaesenclast %xmm10,%xmm9,%xmm11
3833 vaesenc %xmm10,%xmm9,%xmm9
3834 vmovdqu 176-128(%rdi),%xmm10
3838 leal (%r8,%r15,1),%r8d
3840 addl 32+16(%rbp),%edx
3842 rorxl $25,%eax,%r13d
3843 rorxl $11,%eax,%r15d
3844 leal (%r8,%r14,1),%r8d
3845 leal (%rdx,%r12,1),%edx
3846 andnl %ecx,%eax,%r12d
3849 leal (%rdx,%r12,1),%edx
3852 rorxl $22,%r8d,%r12d
3853 leal (%rdx,%r13,1),%edx
3855 rorxl $13,%r8d,%r14d
3857 leal (%r11,%rdx,1),%r11d
3859 vpand %xmm12,%xmm11,%xmm8
3860 vaesenc %xmm10,%xmm9,%xmm9
3861 vmovdqu 192-128(%rdi),%xmm10
3865 leal (%rdx,%rsi,1),%edx
3867 addl 36+16(%rbp),%ecx
3869 rorxl $25,%r11d,%r13d
3870 rorxl $11,%r11d,%esi
3871 leal (%rdx,%r14,1),%edx
3872 leal (%rcx,%r12,1),%ecx
3873 andnl %ebx,%r11d,%r12d
3875 rorxl $6,%r11d,%r14d
3876 leal (%rcx,%r12,1),%ecx
3879 rorxl $22,%edx,%r12d
3880 leal (%rcx,%r13,1),%ecx
3882 rorxl $13,%edx,%r14d
3884 leal (%r10,%rcx,1),%r10d
3886 vaesenclast %xmm10,%xmm9,%xmm11
3887 vaesenc %xmm10,%xmm9,%xmm9
3888 vmovdqu 208-128(%rdi),%xmm10
3892 leal (%rcx,%r15,1),%ecx
3894 addl 40+16(%rbp),%ebx
3896 rorxl $25,%r10d,%r13d
3897 rorxl $11,%r10d,%r15d
3898 leal (%rcx,%r14,1),%ecx
3899 leal (%rbx,%r12,1),%ebx
3900 andnl %eax,%r10d,%r12d
3902 rorxl $6,%r10d,%r14d
3903 leal (%rbx,%r12,1),%ebx
3906 rorxl $22,%ecx,%r12d
3907 leal (%rbx,%r13,1),%ebx
3909 rorxl $13,%ecx,%r14d
3911 leal (%r9,%rbx,1),%r9d
3913 vpand %xmm13,%xmm11,%xmm11
3914 vaesenc %xmm10,%xmm9,%xmm9
3915 vmovdqu 224-128(%rdi),%xmm10
3919 leal (%rbx,%rsi,1),%ebx
3921 addl 44+16(%rbp),%eax
3923 rorxl $25,%r9d,%r13d
3925 leal (%rbx,%r14,1),%ebx
3926 leal (%rax,%r12,1),%eax
3927 andnl %r11d,%r9d,%r12d
3930 leal (%rax,%r12,1),%eax
3933 rorxl $22,%ebx,%r12d
3934 leal (%rax,%r13,1),%eax
3936 rorxl $13,%ebx,%r14d
3938 leal (%r8,%rax,1),%r8d
3940 vpor %xmm11,%xmm8,%xmm8
3941 vaesenclast %xmm10,%xmm9,%xmm11
3942 vmovdqu 0-128(%rdi),%xmm10
3946 leal (%rax,%r15,1),%eax
3949 vpextrq $1,%xmm15,%r15
3950 vpand %xmm14,%xmm11,%xmm11
3951 vpor %xmm11,%xmm8,%xmm8
3953 vmovdqu %xmm8,(%r15,%r13,1)
3971 leaq (%rsi,%r13,1),%r12
3974 cmpq 64+16(%rsp),%r13
3991 movq 64+32(%rsp),%r8
3992 movq 64+56(%rsp),%rsi
4004 .size aesni_cbc_sha256_enc_avx2,.-aesni_cbc_sha256_enc_avx2
4005 .type aesni_cbc_sha256_enc_shaext,@function
4007 aesni_cbc_sha256_enc_shaext:
4009 leaq K256+128(%rip),%rax
4011 movdqu 16(%r9),%xmm2
4012 movdqa 512-128(%rax),%xmm3
4014 movl 240(%rcx),%r11d
4016 movups (%rcx),%xmm15
4017 movups 16(%rcx),%xmm4
4020 pshufd $0x1b,%xmm1,%xmm0
4021 pshufd $0xb1,%xmm1,%xmm1
4022 pshufd $0x1b,%xmm2,%xmm2
4024 .byte 102,15,58,15,202,8
4025 punpcklqdq %xmm0,%xmm2
4031 movdqu (%r10),%xmm10
4032 movdqu 16(%r10),%xmm11
4033 movdqu 32(%r10),%xmm12
4034 .byte 102,68,15,56,0,211
4035 movdqu 48(%r10),%xmm13
4037 movdqa 0-128(%rax),%xmm0
4039 .byte 102,68,15,56,0,219
4042 movups 0(%rdi),%xmm14
4045 movups -80(%rcx),%xmm5
4048 pshufd $0x0e,%xmm0,%xmm0
4049 movups -64(%rcx),%xmm4
4053 movdqa 32-128(%rax),%xmm0
4055 .byte 102,68,15,56,0,227
4057 movups -48(%rcx),%xmm5
4060 pshufd $0x0e,%xmm0,%xmm0
4061 movups -32(%rcx),%xmm4
4065 movdqa 64-128(%rax),%xmm0
4067 .byte 102,68,15,56,0,235
4068 .byte 69,15,56,204,211
4069 movups -16(%rcx),%xmm5
4072 pshufd $0x0e,%xmm0,%xmm0
4074 .byte 102,65,15,58,15,220,4
4076 movups 0(%rcx),%xmm4
4080 movdqa 96-128(%rax),%xmm0
4082 .byte 69,15,56,205,213
4083 .byte 69,15,56,204,220
4084 movups 16(%rcx),%xmm5
4087 pshufd $0x0e,%xmm0,%xmm0
4088 movups 32(%rcx),%xmm4
4091 .byte 102,65,15,58,15,221,4
4094 movdqa 128-128(%rax),%xmm0
4096 .byte 69,15,56,205,218
4097 .byte 69,15,56,204,229
4098 movups 48(%rcx),%xmm5
4101 pshufd $0x0e,%xmm0,%xmm0
4103 .byte 102,65,15,58,15,218,4
4107 movups 64(%rcx),%xmm4
4109 movups 80(%rcx),%xmm5
4112 movups 96(%rcx),%xmm4
4114 movups 112(%rcx),%xmm5
4117 aesenclast %xmm5,%xmm6
4118 movups 16-112(%rcx),%xmm4
4121 movups 16(%rdi),%xmm14
4123 movups %xmm6,0(%rsi,%rdi,1)
4125 movups -80(%rcx),%xmm5
4127 movdqa 160-128(%rax),%xmm0
4129 .byte 69,15,56,205,227
4130 .byte 69,15,56,204,234
4131 movups -64(%rcx),%xmm4
4134 pshufd $0x0e,%xmm0,%xmm0
4136 .byte 102,65,15,58,15,219,4
4138 movups -48(%rcx),%xmm5
4141 movdqa 192-128(%rax),%xmm0
4143 .byte 69,15,56,205,236
4144 .byte 69,15,56,204,211
4145 movups -32(%rcx),%xmm4
4148 pshufd $0x0e,%xmm0,%xmm0
4150 .byte 102,65,15,58,15,220,4
4152 movups -16(%rcx),%xmm5
4155 movdqa 224-128(%rax),%xmm0
4157 .byte 69,15,56,205,213
4158 .byte 69,15,56,204,220
4159 movups 0(%rcx),%xmm4
4162 pshufd $0x0e,%xmm0,%xmm0
4164 .byte 102,65,15,58,15,221,4
4166 movups 16(%rcx),%xmm5
4169 movdqa 256-128(%rax),%xmm0
4171 .byte 69,15,56,205,218
4172 .byte 69,15,56,204,229
4173 movups 32(%rcx),%xmm4
4176 pshufd $0x0e,%xmm0,%xmm0
4178 .byte 102,65,15,58,15,218,4
4180 movups 48(%rcx),%xmm5
4184 movups 64(%rcx),%xmm4
4186 movups 80(%rcx),%xmm5
4189 movups 96(%rcx),%xmm4
4191 movups 112(%rcx),%xmm5
4194 aesenclast %xmm5,%xmm6
4195 movups 16-112(%rcx),%xmm4
4198 movups 32(%rdi),%xmm14
4200 movups %xmm6,16(%rsi,%rdi,1)
4202 movups -80(%rcx),%xmm5
4204 movdqa 288-128(%rax),%xmm0
4206 .byte 69,15,56,205,227
4207 .byte 69,15,56,204,234
4208 movups -64(%rcx),%xmm4
4211 pshufd $0x0e,%xmm0,%xmm0
4213 .byte 102,65,15,58,15,219,4
4215 movups -48(%rcx),%xmm5
4218 movdqa 320-128(%rax),%xmm0
4220 .byte 69,15,56,205,236
4221 .byte 69,15,56,204,211
4222 movups -32(%rcx),%xmm4
4225 pshufd $0x0e,%xmm0,%xmm0
4227 .byte 102,65,15,58,15,220,4
4229 movups -16(%rcx),%xmm5
4232 movdqa 352-128(%rax),%xmm0
4234 .byte 69,15,56,205,213
4235 .byte 69,15,56,204,220
4236 movups 0(%rcx),%xmm4
4239 pshufd $0x0e,%xmm0,%xmm0
4241 .byte 102,65,15,58,15,221,4
4243 movups 16(%rcx),%xmm5
4246 movdqa 384-128(%rax),%xmm0
4248 .byte 69,15,56,205,218
4249 .byte 69,15,56,204,229
4250 movups 32(%rcx),%xmm4
4253 pshufd $0x0e,%xmm0,%xmm0
4255 .byte 102,65,15,58,15,218,4
4257 movups 48(%rcx),%xmm5
4260 movdqa 416-128(%rax),%xmm0
4262 .byte 69,15,56,205,227
4263 .byte 69,15,56,204,234
4266 movups 64(%rcx),%xmm4
4268 movups 80(%rcx),%xmm5
4271 movups 96(%rcx),%xmm4
4273 movups 112(%rcx),%xmm5
4276 aesenclast %xmm5,%xmm6
4277 movups 16-112(%rcx),%xmm4
4280 pshufd $0x0e,%xmm0,%xmm0
4282 .byte 102,65,15,58,15,219,4
4284 movups 48(%rdi),%xmm14
4286 movups %xmm6,32(%rsi,%rdi,1)
4288 movups -80(%rcx),%xmm5
4290 movups -64(%rcx),%xmm4
4294 movdqa 448-128(%rax),%xmm0
4296 .byte 69,15,56,205,236
4298 movups -48(%rcx),%xmm5
4301 pshufd $0x0e,%xmm0,%xmm0
4302 movups -32(%rcx),%xmm4
4306 movdqa 480-128(%rax),%xmm0
4308 movups -16(%rcx),%xmm5
4310 movups 0(%rcx),%xmm4
4313 pshufd $0x0e,%xmm0,%xmm0
4314 movups 16(%rcx),%xmm5
4318 movups 32(%rcx),%xmm4
4320 movups 48(%rcx),%xmm5
4324 movups 64(%rcx),%xmm4
4326 movups 80(%rcx),%xmm5
4329 movups 96(%rcx),%xmm4
4331 movups 112(%rcx),%xmm5
4334 aesenclast %xmm5,%xmm6
4335 movups 16-112(%rcx),%xmm4
4342 movups %xmm6,48(%rsi,%rdi,1)
4346 pshufd $0xb1,%xmm2,%xmm2
4347 pshufd $0x1b,%xmm1,%xmm3
4348 pshufd $0xb1,%xmm1,%xmm1
4349 punpckhqdq %xmm2,%xmm1
4350 .byte 102,15,58,15,211,8
4354 movdqu %xmm2,16(%r9)
4356 .size aesni_cbc_sha256_enc_shaext,.-aesni_cbc_sha256_enc_shaext