2 /* Do not modify. This file is auto-generated from aesni-sha256-x86_64.pl. */
6 .globl aesni_cbc_sha256_enc
7 .type aesni_cbc_sha256_enc,@function
11 leaq OPENSSL_ia32cap_P(%rip),%r11
18 jc aesni_cbc_sha256_enc_shaext
23 jnz aesni_cbc_sha256_enc_xop
26 je aesni_cbc_sha256_enc_avx2
28 jnz aesni_cbc_sha256_enc_avx
37 .size aesni_cbc_sha256_enc,.-aesni_cbc_sha256_enc
42 .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
43 .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
44 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
45 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
46 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
47 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
48 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
49 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
50 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
51 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
52 .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
53 .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
54 .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
55 .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
56 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
57 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
58 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
59 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
60 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
61 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
62 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
63 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
64 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
65 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
66 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
67 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
68 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
69 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
70 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
71 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
72 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
73 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
75 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
76 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
77 .long 0,0,0,0, 0,0,0,0, -1,-1,-1,-1
78 .long 0,0,0,0, 0,0,0,0
79 .byte 65,69,83,78,73,45,67,66,67,43,83,72,65,50,53,54,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
81 .type aesni_cbc_sha256_enc_xop,@function
83 aesni_cbc_sha256_enc_xop:
88 .cfi_def_cfa_register %rax
111 movq %rdx,64+16(%rsp)
115 movq %r10,64+48(%rsp)
117 .cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
123 leaq K256+544(%rip),%r13
124 movl 240-128(%rdi),%r14d
139 vmovdqa 0(%r13,%r14,8),%xmm14
140 vmovdqa 16(%r13,%r14,8),%xmm13
141 vmovdqa 32(%r13,%r14,8),%xmm12
142 vmovdqu 0-128(%rdi),%xmm10
146 vmovdqa K256+512(%rip),%xmm7
147 vmovdqu 0(%rsi,%r12,1),%xmm0
148 vmovdqu 16(%rsi,%r12,1),%xmm1
149 vmovdqu 32(%rsi,%r12,1),%xmm2
150 vmovdqu 48(%rsi,%r12,1),%xmm3
151 vpshufb %xmm7,%xmm0,%xmm0
153 vpshufb %xmm7,%xmm1,%xmm1
154 vpshufb %xmm7,%xmm2,%xmm2
155 vpaddd 0(%rbp),%xmm0,%xmm4
156 vpshufb %xmm7,%xmm3,%xmm3
157 vpaddd 32(%rbp),%xmm1,%xmm5
158 vpaddd 64(%rbp),%xmm2,%xmm6
159 vpaddd 96(%rbp),%xmm3,%xmm7
160 vmovdqa %xmm4,0(%rsp)
162 vmovdqa %xmm5,16(%rsp)
164 vmovdqa %xmm6,32(%rsp)
166 vmovdqa %xmm7,48(%rsp)
175 vpalignr $4,%xmm0,%xmm1,%xmm4
178 vpalignr $4,%xmm2,%xmm3,%xmm7
181 .byte 143,232,120,194,236,14
184 vpsrld $3,%xmm4,%xmm4
187 vpaddd %xmm7,%xmm0,%xmm0
189 vpxor %xmm10,%xmm9,%xmm9
190 vmovdqu 16-128(%rdi),%xmm10
194 .byte 143,232,120,194,245,11
197 vpxor %xmm5,%xmm4,%xmm4
202 .byte 143,232,120,194,251,13
205 vpxor %xmm6,%xmm4,%xmm4
208 vpsrld $10,%xmm3,%xmm6
211 vpaddd %xmm4,%xmm0,%xmm0
214 .byte 143,232,120,194,239,2
217 vpxor %xmm6,%xmm7,%xmm7
222 vpxor %xmm5,%xmm7,%xmm7
226 vpxor %xmm8,%xmm9,%xmm9
228 vpsrldq $8,%xmm7,%xmm7
233 vpaddd %xmm7,%xmm0,%xmm0
238 .byte 143,232,120,194,248,13
241 vpsrld $10,%xmm0,%xmm6
244 .byte 143,232,120,194,239,2
247 vpxor %xmm6,%xmm7,%xmm7
252 vpxor %xmm5,%xmm7,%xmm7
257 vpslldq $8,%xmm7,%xmm7
261 vaesenc %xmm10,%xmm9,%xmm9
262 vmovdqu 32-128(%rdi),%xmm10
264 vpaddd %xmm7,%xmm0,%xmm0
269 vpaddd 0(%rbp),%xmm0,%xmm6
291 vaesenc %xmm10,%xmm9,%xmm9
292 vmovdqu 48-128(%rdi),%xmm10
310 vmovdqa %xmm6,0(%rsp)
311 vpalignr $4,%xmm1,%xmm2,%xmm4
314 vpalignr $4,%xmm3,%xmm0,%xmm7
317 .byte 143,232,120,194,236,14
320 vpsrld $3,%xmm4,%xmm4
323 vpaddd %xmm7,%xmm1,%xmm1
325 vaesenc %xmm10,%xmm9,%xmm9
326 vmovdqu 64-128(%rdi),%xmm10
330 .byte 143,232,120,194,245,11
333 vpxor %xmm5,%xmm4,%xmm4
338 .byte 143,232,120,194,248,13
341 vpxor %xmm6,%xmm4,%xmm4
344 vpsrld $10,%xmm0,%xmm6
347 vpaddd %xmm4,%xmm1,%xmm1
350 .byte 143,232,120,194,239,2
353 vpxor %xmm6,%xmm7,%xmm7
358 vpxor %xmm5,%xmm7,%xmm7
362 vaesenc %xmm10,%xmm9,%xmm9
363 vmovdqu 80-128(%rdi),%xmm10
365 vpsrldq $8,%xmm7,%xmm7
370 vpaddd %xmm7,%xmm1,%xmm1
375 .byte 143,232,120,194,249,13
378 vpsrld $10,%xmm1,%xmm6
381 .byte 143,232,120,194,239,2
384 vpxor %xmm6,%xmm7,%xmm7
389 vpxor %xmm5,%xmm7,%xmm7
394 vpslldq $8,%xmm7,%xmm7
398 vaesenc %xmm10,%xmm9,%xmm9
399 vmovdqu 96-128(%rdi),%xmm10
401 vpaddd %xmm7,%xmm1,%xmm1
406 vpaddd 32(%rbp),%xmm1,%xmm6
428 vaesenc %xmm10,%xmm9,%xmm9
429 vmovdqu 112-128(%rdi),%xmm10
447 vmovdqa %xmm6,16(%rsp)
448 vpalignr $4,%xmm2,%xmm3,%xmm4
451 vpalignr $4,%xmm0,%xmm1,%xmm7
454 .byte 143,232,120,194,236,14
457 vpsrld $3,%xmm4,%xmm4
460 vpaddd %xmm7,%xmm2,%xmm2
462 vaesenc %xmm10,%xmm9,%xmm9
463 vmovdqu 128-128(%rdi),%xmm10
467 .byte 143,232,120,194,245,11
470 vpxor %xmm5,%xmm4,%xmm4
475 .byte 143,232,120,194,249,13
478 vpxor %xmm6,%xmm4,%xmm4
481 vpsrld $10,%xmm1,%xmm6
484 vpaddd %xmm4,%xmm2,%xmm2
487 .byte 143,232,120,194,239,2
490 vpxor %xmm6,%xmm7,%xmm7
495 vpxor %xmm5,%xmm7,%xmm7
499 vaesenc %xmm10,%xmm9,%xmm9
500 vmovdqu 144-128(%rdi),%xmm10
502 vpsrldq $8,%xmm7,%xmm7
507 vpaddd %xmm7,%xmm2,%xmm2
512 .byte 143,232,120,194,250,13
515 vpsrld $10,%xmm2,%xmm6
518 .byte 143,232,120,194,239,2
521 vpxor %xmm6,%xmm7,%xmm7
526 vpxor %xmm5,%xmm7,%xmm7
531 vpslldq $8,%xmm7,%xmm7
535 vaesenc %xmm10,%xmm9,%xmm9
536 vmovdqu 160-128(%rdi),%xmm10
538 vpaddd %xmm7,%xmm2,%xmm2
543 vpaddd 64(%rbp),%xmm2,%xmm6
565 vaesenclast %xmm10,%xmm9,%xmm11
566 vaesenc %xmm10,%xmm9,%xmm9
567 vmovdqu 176-128(%rdi),%xmm10
585 vmovdqa %xmm6,32(%rsp)
586 vpalignr $4,%xmm3,%xmm0,%xmm4
589 vpalignr $4,%xmm1,%xmm2,%xmm7
592 .byte 143,232,120,194,236,14
595 vpsrld $3,%xmm4,%xmm4
598 vpaddd %xmm7,%xmm3,%xmm3
600 vpand %xmm12,%xmm11,%xmm8
601 vaesenc %xmm10,%xmm9,%xmm9
602 vmovdqu 192-128(%rdi),%xmm10
606 .byte 143,232,120,194,245,11
609 vpxor %xmm5,%xmm4,%xmm4
614 .byte 143,232,120,194,250,13
617 vpxor %xmm6,%xmm4,%xmm4
620 vpsrld $10,%xmm2,%xmm6
623 vpaddd %xmm4,%xmm3,%xmm3
626 .byte 143,232,120,194,239,2
629 vpxor %xmm6,%xmm7,%xmm7
634 vpxor %xmm5,%xmm7,%xmm7
638 vaesenclast %xmm10,%xmm9,%xmm11
639 vaesenc %xmm10,%xmm9,%xmm9
640 vmovdqu 208-128(%rdi),%xmm10
642 vpsrldq $8,%xmm7,%xmm7
647 vpaddd %xmm7,%xmm3,%xmm3
652 .byte 143,232,120,194,251,13
655 vpsrld $10,%xmm3,%xmm6
658 .byte 143,232,120,194,239,2
661 vpxor %xmm6,%xmm7,%xmm7
666 vpxor %xmm5,%xmm7,%xmm7
671 vpslldq $8,%xmm7,%xmm7
675 vpand %xmm13,%xmm11,%xmm11
676 vaesenc %xmm10,%xmm9,%xmm9
677 vmovdqu 224-128(%rdi),%xmm10
679 vpaddd %xmm7,%xmm3,%xmm3
684 vpaddd 96(%rbp),%xmm3,%xmm6
706 vpor %xmm11,%xmm8,%xmm8
707 vaesenclast %xmm10,%xmm9,%xmm11
708 vmovdqu 0-128(%rdi),%xmm10
726 vmovdqa %xmm6,48(%rsp)
728 vpand %xmm14,%xmm11,%xmm11
730 vpor %xmm11,%xmm8,%xmm8
731 vmovdqu %xmm8,(%r15,%r12,1)
746 vpxor %xmm10,%xmm9,%xmm9
747 vmovdqu 16-128(%rdi),%xmm10
774 vpxor %xmm8,%xmm9,%xmm9
801 vaesenc %xmm10,%xmm9,%xmm9
802 vmovdqu 32-128(%rdi),%xmm10
829 vaesenc %xmm10,%xmm9,%xmm9
830 vmovdqu 48-128(%rdi),%xmm10
857 vaesenc %xmm10,%xmm9,%xmm9
858 vmovdqu 64-128(%rdi),%xmm10
885 vaesenc %xmm10,%xmm9,%xmm9
886 vmovdqu 80-128(%rdi),%xmm10
913 vaesenc %xmm10,%xmm9,%xmm9
914 vmovdqu 96-128(%rdi),%xmm10
941 vaesenc %xmm10,%xmm9,%xmm9
942 vmovdqu 112-128(%rdi),%xmm10
969 vaesenc %xmm10,%xmm9,%xmm9
970 vmovdqu 128-128(%rdi),%xmm10
997 vaesenc %xmm10,%xmm9,%xmm9
998 vmovdqu 144-128(%rdi),%xmm10
1025 vaesenc %xmm10,%xmm9,%xmm9
1026 vmovdqu 160-128(%rdi),%xmm10
1053 vaesenclast %xmm10,%xmm9,%xmm11
1054 vaesenc %xmm10,%xmm9,%xmm9
1055 vmovdqu 176-128(%rdi),%xmm10
1082 vpand %xmm12,%xmm11,%xmm8
1083 vaesenc %xmm10,%xmm9,%xmm9
1084 vmovdqu 192-128(%rdi),%xmm10
1111 vaesenclast %xmm10,%xmm9,%xmm11
1112 vaesenc %xmm10,%xmm9,%xmm9
1113 vmovdqu 208-128(%rdi),%xmm10
1140 vpand %xmm13,%xmm11,%xmm11
1141 vaesenc %xmm10,%xmm9,%xmm9
1142 vmovdqu 224-128(%rdi),%xmm10
1169 vpor %xmm11,%xmm8,%xmm8
1170 vaesenclast %xmm10,%xmm9,%xmm11
1171 vmovdqu 0-128(%rdi),%xmm10
1189 movq 64+0(%rsp),%r12
1190 movq 64+8(%rsp),%r13
1191 movq 64+40(%rsp),%r15
1192 movq 64+48(%rsp),%rsi
1194 vpand %xmm14,%xmm11,%xmm11
1196 vpor %xmm11,%xmm8,%xmm8
1197 vmovdqu %xmm8,(%r12,%r13,1)
1209 cmpq 64+16(%rsp),%r12
1222 movq 64+32(%rsp),%r8
1240 .cfi_def_cfa_register %rsp
1244 .size aesni_cbc_sha256_enc_xop,.-aesni_cbc_sha256_enc_xop
1245 .type aesni_cbc_sha256_enc_avx,@function
1247 aesni_cbc_sha256_enc_avx:
1252 .cfi_def_cfa_register %rax
1254 .cfi_offset %rbx,-16
1256 .cfi_offset %rbp,-24
1258 .cfi_offset %r12,-32
1260 .cfi_offset %r13,-40
1262 .cfi_offset %r14,-48
1264 .cfi_offset %r15,-56
1274 movq %rsi,64+8(%rsp)
1275 movq %rdx,64+16(%rsp)
1277 movq %r8,64+32(%rsp)
1278 movq %r9,64+40(%rsp)
1279 movq %r10,64+48(%rsp)
1281 .cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
1287 leaq K256+544(%rip),%r13
1288 movl 240-128(%rdi),%r14d
1303 vmovdqa 0(%r13,%r14,8),%xmm14
1304 vmovdqa 16(%r13,%r14,8),%xmm13
1305 vmovdqa 32(%r13,%r14,8),%xmm12
1306 vmovdqu 0-128(%rdi),%xmm10
1310 vmovdqa K256+512(%rip),%xmm7
1311 vmovdqu 0(%rsi,%r12,1),%xmm0
1312 vmovdqu 16(%rsi,%r12,1),%xmm1
1313 vmovdqu 32(%rsi,%r12,1),%xmm2
1314 vmovdqu 48(%rsi,%r12,1),%xmm3
1315 vpshufb %xmm7,%xmm0,%xmm0
1316 leaq K256(%rip),%rbp
1317 vpshufb %xmm7,%xmm1,%xmm1
1318 vpshufb %xmm7,%xmm2,%xmm2
1319 vpaddd 0(%rbp),%xmm0,%xmm4
1320 vpshufb %xmm7,%xmm3,%xmm3
1321 vpaddd 32(%rbp),%xmm1,%xmm5
1322 vpaddd 64(%rbp),%xmm2,%xmm6
1323 vpaddd 96(%rbp),%xmm3,%xmm7
1324 vmovdqa %xmm4,0(%rsp)
1326 vmovdqa %xmm5,16(%rsp)
1328 vmovdqa %xmm6,32(%rsp)
1330 vmovdqa %xmm7,48(%rsp)
1337 vmovdqu (%r12),%xmm9
1338 movq %r12,64+0(%rsp)
1339 vpalignr $4,%xmm0,%xmm1,%xmm4
1340 shrdl $14,%r13d,%r13d
1343 vpalignr $4,%xmm2,%xmm3,%xmm7
1345 shrdl $9,%r14d,%r14d
1347 vpsrld $7,%xmm4,%xmm6
1348 shrdl $5,%r13d,%r13d
1351 vpaddd %xmm7,%xmm0,%xmm0
1352 vpxor %xmm10,%xmm9,%xmm9
1353 vmovdqu 16-128(%rdi),%xmm10
1357 vpsrld $3,%xmm4,%xmm7
1358 shrdl $11,%r14d,%r14d
1361 vpslld $14,%xmm4,%xmm5
1362 shrdl $6,%r13d,%r13d
1365 vpxor %xmm6,%xmm7,%xmm4
1369 vpshufd $250,%xmm3,%xmm7
1371 shrdl $2,%r14d,%r14d
1373 vpsrld $11,%xmm6,%xmm6
1376 shrdl $14,%r13d,%r13d
1377 vpxor %xmm5,%xmm4,%xmm4
1381 vpslld $11,%xmm5,%xmm5
1382 shrdl $9,%r14d,%r14d
1384 shrdl $5,%r13d,%r13d
1385 vpxor %xmm6,%xmm4,%xmm4
1388 vpxor %xmm8,%xmm9,%xmm9
1390 vpsrld $10,%xmm7,%xmm6
1393 shrdl $11,%r14d,%r14d
1394 vpxor %xmm5,%xmm4,%xmm4
1397 shrdl $6,%r13d,%r13d
1398 vpsrlq $17,%xmm7,%xmm7
1402 vpaddd %xmm4,%xmm0,%xmm0
1406 vpxor %xmm7,%xmm6,%xmm6
1407 shrdl $2,%r14d,%r14d
1410 vpsrlq $2,%xmm7,%xmm7
1412 shrdl $14,%r13d,%r13d
1414 vpxor %xmm7,%xmm6,%xmm6
1417 shrdl $9,%r14d,%r14d
1418 vpshufd $132,%xmm6,%xmm6
1420 shrdl $5,%r13d,%r13d
1422 vpsrldq $8,%xmm6,%xmm6
1424 vaesenc %xmm10,%xmm9,%xmm9
1425 vmovdqu 32-128(%rdi),%xmm10
1428 vpaddd %xmm6,%xmm0,%xmm0
1430 shrdl $11,%r14d,%r14d
1432 vpshufd $80,%xmm0,%xmm7
1434 shrdl $6,%r13d,%r13d
1436 vpsrld $10,%xmm7,%xmm6
1440 vpsrlq $17,%xmm7,%xmm7
1443 shrdl $2,%r14d,%r14d
1444 vpxor %xmm7,%xmm6,%xmm6
1448 vpsrlq $2,%xmm7,%xmm7
1449 shrdl $14,%r13d,%r13d
1452 vpxor %xmm7,%xmm6,%xmm6
1454 shrdl $9,%r14d,%r14d
1456 vpshufd $232,%xmm6,%xmm6
1457 shrdl $5,%r13d,%r13d
1460 vpslldq $8,%xmm6,%xmm6
1461 vaesenc %xmm10,%xmm9,%xmm9
1462 vmovdqu 48-128(%rdi),%xmm10
1466 vpaddd %xmm6,%xmm0,%xmm0
1467 shrdl $11,%r14d,%r14d
1470 vpaddd 0(%rbp),%xmm0,%xmm6
1471 shrdl $6,%r13d,%r13d
1478 shrdl $2,%r14d,%r14d
1482 vmovdqa %xmm6,0(%rsp)
1483 vpalignr $4,%xmm1,%xmm2,%xmm4
1484 shrdl $14,%r13d,%r13d
1487 vpalignr $4,%xmm3,%xmm0,%xmm7
1489 shrdl $9,%r14d,%r14d
1491 vpsrld $7,%xmm4,%xmm6
1492 shrdl $5,%r13d,%r13d
1495 vpaddd %xmm7,%xmm1,%xmm1
1496 vaesenc %xmm10,%xmm9,%xmm9
1497 vmovdqu 64-128(%rdi),%xmm10
1501 vpsrld $3,%xmm4,%xmm7
1502 shrdl $11,%r14d,%r14d
1505 vpslld $14,%xmm4,%xmm5
1506 shrdl $6,%r13d,%r13d
1509 vpxor %xmm6,%xmm7,%xmm4
1513 vpshufd $250,%xmm0,%xmm7
1515 shrdl $2,%r14d,%r14d
1517 vpsrld $11,%xmm6,%xmm6
1520 shrdl $14,%r13d,%r13d
1521 vpxor %xmm5,%xmm4,%xmm4
1525 vpslld $11,%xmm5,%xmm5
1526 shrdl $9,%r14d,%r14d
1528 shrdl $5,%r13d,%r13d
1529 vpxor %xmm6,%xmm4,%xmm4
1532 vaesenc %xmm10,%xmm9,%xmm9
1533 vmovdqu 80-128(%rdi),%xmm10
1535 vpsrld $10,%xmm7,%xmm6
1538 shrdl $11,%r14d,%r14d
1539 vpxor %xmm5,%xmm4,%xmm4
1542 shrdl $6,%r13d,%r13d
1543 vpsrlq $17,%xmm7,%xmm7
1547 vpaddd %xmm4,%xmm1,%xmm1
1551 vpxor %xmm7,%xmm6,%xmm6
1552 shrdl $2,%r14d,%r14d
1555 vpsrlq $2,%xmm7,%xmm7
1557 shrdl $14,%r13d,%r13d
1559 vpxor %xmm7,%xmm6,%xmm6
1562 shrdl $9,%r14d,%r14d
1563 vpshufd $132,%xmm6,%xmm6
1565 shrdl $5,%r13d,%r13d
1567 vpsrldq $8,%xmm6,%xmm6
1569 vaesenc %xmm10,%xmm9,%xmm9
1570 vmovdqu 96-128(%rdi),%xmm10
1573 vpaddd %xmm6,%xmm1,%xmm1
1575 shrdl $11,%r14d,%r14d
1577 vpshufd $80,%xmm1,%xmm7
1579 shrdl $6,%r13d,%r13d
1581 vpsrld $10,%xmm7,%xmm6
1585 vpsrlq $17,%xmm7,%xmm7
1588 shrdl $2,%r14d,%r14d
1589 vpxor %xmm7,%xmm6,%xmm6
1593 vpsrlq $2,%xmm7,%xmm7
1594 shrdl $14,%r13d,%r13d
1597 vpxor %xmm7,%xmm6,%xmm6
1599 shrdl $9,%r14d,%r14d
1601 vpshufd $232,%xmm6,%xmm6
1602 shrdl $5,%r13d,%r13d
1605 vpslldq $8,%xmm6,%xmm6
1606 vaesenc %xmm10,%xmm9,%xmm9
1607 vmovdqu 112-128(%rdi),%xmm10
1611 vpaddd %xmm6,%xmm1,%xmm1
1612 shrdl $11,%r14d,%r14d
1615 vpaddd 32(%rbp),%xmm1,%xmm6
1616 shrdl $6,%r13d,%r13d
1623 shrdl $2,%r14d,%r14d
1627 vmovdqa %xmm6,16(%rsp)
1628 vpalignr $4,%xmm2,%xmm3,%xmm4
1629 shrdl $14,%r13d,%r13d
1632 vpalignr $4,%xmm0,%xmm1,%xmm7
1634 shrdl $9,%r14d,%r14d
1636 vpsrld $7,%xmm4,%xmm6
1637 shrdl $5,%r13d,%r13d
1640 vpaddd %xmm7,%xmm2,%xmm2
1641 vaesenc %xmm10,%xmm9,%xmm9
1642 vmovdqu 128-128(%rdi),%xmm10
1646 vpsrld $3,%xmm4,%xmm7
1647 shrdl $11,%r14d,%r14d
1650 vpslld $14,%xmm4,%xmm5
1651 shrdl $6,%r13d,%r13d
1654 vpxor %xmm6,%xmm7,%xmm4
1658 vpshufd $250,%xmm1,%xmm7
1660 shrdl $2,%r14d,%r14d
1662 vpsrld $11,%xmm6,%xmm6
1665 shrdl $14,%r13d,%r13d
1666 vpxor %xmm5,%xmm4,%xmm4
1670 vpslld $11,%xmm5,%xmm5
1671 shrdl $9,%r14d,%r14d
1673 shrdl $5,%r13d,%r13d
1674 vpxor %xmm6,%xmm4,%xmm4
1677 vaesenc %xmm10,%xmm9,%xmm9
1678 vmovdqu 144-128(%rdi),%xmm10
1680 vpsrld $10,%xmm7,%xmm6
1683 shrdl $11,%r14d,%r14d
1684 vpxor %xmm5,%xmm4,%xmm4
1687 shrdl $6,%r13d,%r13d
1688 vpsrlq $17,%xmm7,%xmm7
1692 vpaddd %xmm4,%xmm2,%xmm2
1696 vpxor %xmm7,%xmm6,%xmm6
1697 shrdl $2,%r14d,%r14d
1700 vpsrlq $2,%xmm7,%xmm7
1702 shrdl $14,%r13d,%r13d
1704 vpxor %xmm7,%xmm6,%xmm6
1707 shrdl $9,%r14d,%r14d
1708 vpshufd $132,%xmm6,%xmm6
1710 shrdl $5,%r13d,%r13d
1712 vpsrldq $8,%xmm6,%xmm6
1714 vaesenc %xmm10,%xmm9,%xmm9
1715 vmovdqu 160-128(%rdi),%xmm10
1718 vpaddd %xmm6,%xmm2,%xmm2
1720 shrdl $11,%r14d,%r14d
1722 vpshufd $80,%xmm2,%xmm7
1724 shrdl $6,%r13d,%r13d
1726 vpsrld $10,%xmm7,%xmm6
1730 vpsrlq $17,%xmm7,%xmm7
1733 shrdl $2,%r14d,%r14d
1734 vpxor %xmm7,%xmm6,%xmm6
1738 vpsrlq $2,%xmm7,%xmm7
1739 shrdl $14,%r13d,%r13d
1742 vpxor %xmm7,%xmm6,%xmm6
1744 shrdl $9,%r14d,%r14d
1746 vpshufd $232,%xmm6,%xmm6
1747 shrdl $5,%r13d,%r13d
1750 vpslldq $8,%xmm6,%xmm6
1751 vaesenclast %xmm10,%xmm9,%xmm11
1752 vaesenc %xmm10,%xmm9,%xmm9
1753 vmovdqu 176-128(%rdi),%xmm10
1757 vpaddd %xmm6,%xmm2,%xmm2
1758 shrdl $11,%r14d,%r14d
1761 vpaddd 64(%rbp),%xmm2,%xmm6
1762 shrdl $6,%r13d,%r13d
1769 shrdl $2,%r14d,%r14d
1773 vmovdqa %xmm6,32(%rsp)
1774 vpalignr $4,%xmm3,%xmm0,%xmm4
1775 shrdl $14,%r13d,%r13d
1778 vpalignr $4,%xmm1,%xmm2,%xmm7
1780 shrdl $9,%r14d,%r14d
1782 vpsrld $7,%xmm4,%xmm6
1783 shrdl $5,%r13d,%r13d
1786 vpaddd %xmm7,%xmm3,%xmm3
1787 vpand %xmm12,%xmm11,%xmm8
1788 vaesenc %xmm10,%xmm9,%xmm9
1789 vmovdqu 192-128(%rdi),%xmm10
1793 vpsrld $3,%xmm4,%xmm7
1794 shrdl $11,%r14d,%r14d
1797 vpslld $14,%xmm4,%xmm5
1798 shrdl $6,%r13d,%r13d
1801 vpxor %xmm6,%xmm7,%xmm4
1805 vpshufd $250,%xmm2,%xmm7
1807 shrdl $2,%r14d,%r14d
1809 vpsrld $11,%xmm6,%xmm6
1812 shrdl $14,%r13d,%r13d
1813 vpxor %xmm5,%xmm4,%xmm4
1817 vpslld $11,%xmm5,%xmm5
1818 shrdl $9,%r14d,%r14d
1820 shrdl $5,%r13d,%r13d
1821 vpxor %xmm6,%xmm4,%xmm4
1824 vaesenclast %xmm10,%xmm9,%xmm11
1825 vaesenc %xmm10,%xmm9,%xmm9
1826 vmovdqu 208-128(%rdi),%xmm10
1828 vpsrld $10,%xmm7,%xmm6
1831 shrdl $11,%r14d,%r14d
1832 vpxor %xmm5,%xmm4,%xmm4
1835 shrdl $6,%r13d,%r13d
1836 vpsrlq $17,%xmm7,%xmm7
1840 vpaddd %xmm4,%xmm3,%xmm3
1844 vpxor %xmm7,%xmm6,%xmm6
1845 shrdl $2,%r14d,%r14d
1848 vpsrlq $2,%xmm7,%xmm7
1850 shrdl $14,%r13d,%r13d
1852 vpxor %xmm7,%xmm6,%xmm6
1855 shrdl $9,%r14d,%r14d
1856 vpshufd $132,%xmm6,%xmm6
1858 shrdl $5,%r13d,%r13d
1860 vpsrldq $8,%xmm6,%xmm6
1862 vpand %xmm13,%xmm11,%xmm11
1863 vaesenc %xmm10,%xmm9,%xmm9
1864 vmovdqu 224-128(%rdi),%xmm10
1867 vpaddd %xmm6,%xmm3,%xmm3
1869 shrdl $11,%r14d,%r14d
1871 vpshufd $80,%xmm3,%xmm7
1873 shrdl $6,%r13d,%r13d
1875 vpsrld $10,%xmm7,%xmm6
1879 vpsrlq $17,%xmm7,%xmm7
1882 shrdl $2,%r14d,%r14d
1883 vpxor %xmm7,%xmm6,%xmm6
1887 vpsrlq $2,%xmm7,%xmm7
1888 shrdl $14,%r13d,%r13d
1891 vpxor %xmm7,%xmm6,%xmm6
1893 shrdl $9,%r14d,%r14d
1895 vpshufd $232,%xmm6,%xmm6
1896 shrdl $5,%r13d,%r13d
1899 vpslldq $8,%xmm6,%xmm6
1900 vpor %xmm11,%xmm8,%xmm8
1901 vaesenclast %xmm10,%xmm9,%xmm11
1902 vmovdqu 0-128(%rdi),%xmm10
1906 vpaddd %xmm6,%xmm3,%xmm3
1907 shrdl $11,%r14d,%r14d
1910 vpaddd 96(%rbp),%xmm3,%xmm6
1911 shrdl $6,%r13d,%r13d
1918 shrdl $2,%r14d,%r14d
1922 vmovdqa %xmm6,48(%rsp)
1923 movq 64+0(%rsp),%r12
1924 vpand %xmm14,%xmm11,%xmm11
1925 movq 64+8(%rsp),%r15
1926 vpor %xmm11,%xmm8,%xmm8
1927 vmovdqu %xmm8,(%r15,%r12,1)
1931 vmovdqu (%r12),%xmm9
1932 movq %r12,64+0(%rsp)
1933 shrdl $14,%r13d,%r13d
1937 shrdl $9,%r14d,%r14d
1939 shrdl $5,%r13d,%r13d
1942 vpxor %xmm10,%xmm9,%xmm9
1943 vmovdqu 16-128(%rdi),%xmm10
1947 shrdl $11,%r14d,%r14d
1950 shrdl $6,%r13d,%r13d
1957 shrdl $2,%r14d,%r14d
1961 shrdl $14,%r13d,%r13d
1965 shrdl $9,%r14d,%r14d
1967 shrdl $5,%r13d,%r13d
1970 vpxor %xmm8,%xmm9,%xmm9
1974 shrdl $11,%r14d,%r14d
1977 shrdl $6,%r13d,%r13d
1984 shrdl $2,%r14d,%r14d
1988 shrdl $14,%r13d,%r13d
1992 shrdl $9,%r14d,%r14d
1994 shrdl $5,%r13d,%r13d
1997 vaesenc %xmm10,%xmm9,%xmm9
1998 vmovdqu 32-128(%rdi),%xmm10
2002 shrdl $11,%r14d,%r14d
2005 shrdl $6,%r13d,%r13d
2012 shrdl $2,%r14d,%r14d
2016 shrdl $14,%r13d,%r13d
2020 shrdl $9,%r14d,%r14d
2022 shrdl $5,%r13d,%r13d
2025 vaesenc %xmm10,%xmm9,%xmm9
2026 vmovdqu 48-128(%rdi),%xmm10
2030 shrdl $11,%r14d,%r14d
2033 shrdl $6,%r13d,%r13d
2040 shrdl $2,%r14d,%r14d
2044 shrdl $14,%r13d,%r13d
2048 shrdl $9,%r14d,%r14d
2050 shrdl $5,%r13d,%r13d
2053 vaesenc %xmm10,%xmm9,%xmm9
2054 vmovdqu 64-128(%rdi),%xmm10
2058 shrdl $11,%r14d,%r14d
2061 shrdl $6,%r13d,%r13d
2068 shrdl $2,%r14d,%r14d
2072 shrdl $14,%r13d,%r13d
2076 shrdl $9,%r14d,%r14d
2078 shrdl $5,%r13d,%r13d
2081 vaesenc %xmm10,%xmm9,%xmm9
2082 vmovdqu 80-128(%rdi),%xmm10
2086 shrdl $11,%r14d,%r14d
2089 shrdl $6,%r13d,%r13d
2096 shrdl $2,%r14d,%r14d
2100 shrdl $14,%r13d,%r13d
2104 shrdl $9,%r14d,%r14d
2106 shrdl $5,%r13d,%r13d
2109 vaesenc %xmm10,%xmm9,%xmm9
2110 vmovdqu 96-128(%rdi),%xmm10
2114 shrdl $11,%r14d,%r14d
2117 shrdl $6,%r13d,%r13d
2124 shrdl $2,%r14d,%r14d
2128 shrdl $14,%r13d,%r13d
2132 shrdl $9,%r14d,%r14d
2134 shrdl $5,%r13d,%r13d
2137 vaesenc %xmm10,%xmm9,%xmm9
2138 vmovdqu 112-128(%rdi),%xmm10
2142 shrdl $11,%r14d,%r14d
2145 shrdl $6,%r13d,%r13d
2152 shrdl $2,%r14d,%r14d
2156 shrdl $14,%r13d,%r13d
2160 shrdl $9,%r14d,%r14d
2162 shrdl $5,%r13d,%r13d
2165 vaesenc %xmm10,%xmm9,%xmm9
2166 vmovdqu 128-128(%rdi),%xmm10
2170 shrdl $11,%r14d,%r14d
2173 shrdl $6,%r13d,%r13d
2180 shrdl $2,%r14d,%r14d
2184 shrdl $14,%r13d,%r13d
2188 shrdl $9,%r14d,%r14d
2190 shrdl $5,%r13d,%r13d
2193 vaesenc %xmm10,%xmm9,%xmm9
2194 vmovdqu 144-128(%rdi),%xmm10
2198 shrdl $11,%r14d,%r14d
2201 shrdl $6,%r13d,%r13d
2208 shrdl $2,%r14d,%r14d
2212 shrdl $14,%r13d,%r13d
2216 shrdl $9,%r14d,%r14d
2218 shrdl $5,%r13d,%r13d
2221 vaesenc %xmm10,%xmm9,%xmm9
2222 vmovdqu 160-128(%rdi),%xmm10
2226 shrdl $11,%r14d,%r14d
2229 shrdl $6,%r13d,%r13d
2236 shrdl $2,%r14d,%r14d
2240 shrdl $14,%r13d,%r13d
2244 shrdl $9,%r14d,%r14d
2246 shrdl $5,%r13d,%r13d
2249 vaesenclast %xmm10,%xmm9,%xmm11
2250 vaesenc %xmm10,%xmm9,%xmm9
2251 vmovdqu 176-128(%rdi),%xmm10
2255 shrdl $11,%r14d,%r14d
2258 shrdl $6,%r13d,%r13d
2265 shrdl $2,%r14d,%r14d
2269 shrdl $14,%r13d,%r13d
2273 shrdl $9,%r14d,%r14d
2275 shrdl $5,%r13d,%r13d
2278 vpand %xmm12,%xmm11,%xmm8
2279 vaesenc %xmm10,%xmm9,%xmm9
2280 vmovdqu 192-128(%rdi),%xmm10
2284 shrdl $11,%r14d,%r14d
2287 shrdl $6,%r13d,%r13d
2294 shrdl $2,%r14d,%r14d
2298 shrdl $14,%r13d,%r13d
2302 shrdl $9,%r14d,%r14d
2304 shrdl $5,%r13d,%r13d
2307 vaesenclast %xmm10,%xmm9,%xmm11
2308 vaesenc %xmm10,%xmm9,%xmm9
2309 vmovdqu 208-128(%rdi),%xmm10
2313 shrdl $11,%r14d,%r14d
2316 shrdl $6,%r13d,%r13d
2323 shrdl $2,%r14d,%r14d
2327 shrdl $14,%r13d,%r13d
2331 shrdl $9,%r14d,%r14d
2333 shrdl $5,%r13d,%r13d
2336 vpand %xmm13,%xmm11,%xmm11
2337 vaesenc %xmm10,%xmm9,%xmm9
2338 vmovdqu 224-128(%rdi),%xmm10
2342 shrdl $11,%r14d,%r14d
2345 shrdl $6,%r13d,%r13d
2352 shrdl $2,%r14d,%r14d
2356 shrdl $14,%r13d,%r13d
2360 shrdl $9,%r14d,%r14d
2362 shrdl $5,%r13d,%r13d
2365 vpor %xmm11,%xmm8,%xmm8
2366 vaesenclast %xmm10,%xmm9,%xmm11
2367 vmovdqu 0-128(%rdi),%xmm10
2371 shrdl $11,%r14d,%r14d
2374 shrdl $6,%r13d,%r13d
2381 shrdl $2,%r14d,%r14d
2385 movq 64+0(%rsp),%r12
2386 movq 64+8(%rsp),%r13
2387 movq 64+40(%rsp),%r15
2388 movq 64+48(%rsp),%rsi
2390 vpand %xmm14,%xmm11,%xmm11
2392 vpor %xmm11,%xmm8,%xmm8
2393 vmovdqu %xmm8,(%r12,%r13,1)
2405 cmpq 64+16(%rsp),%r12
2417 movq 64+32(%rsp),%r8
2435 .cfi_def_cfa_register %rsp
2439 .size aesni_cbc_sha256_enc_avx,.-aesni_cbc_sha256_enc_avx
2440 .type aesni_cbc_sha256_enc_avx2,@function
2442 aesni_cbc_sha256_enc_avx2:
2447 .cfi_def_cfa_register %rax
2449 .cfi_offset %rbx,-16
2451 .cfi_offset %rbp,-24
2453 .cfi_offset %r12,-32
2455 .cfi_offset %r13,-40
2457 .cfi_offset %r14,-48
2459 .cfi_offset %r15,-56
2471 movq %rdx,64+16(%rsp)
2473 movq %r8,64+32(%rsp)
2474 movq %r9,64+40(%rsp)
2475 movq %r10,64+48(%rsp)
2477 .cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
2482 vpinsrq $1,%rsi,%xmm15,%xmm15
2484 leaq K256+544(%rip),%r12
2485 movl 240-128(%rdi),%r14d
2491 vmovdqa 0(%r12,%r14,8),%xmm14
2492 vmovdqa 16(%r12,%r14,8),%xmm13
2493 vmovdqa 32(%r12,%r14,8),%xmm12
2497 leaq (%rsi,%r13,1),%r12
2507 vmovdqu 0-128(%rdi),%xmm10
2511 vmovdqa K256+512(%rip),%ymm7
2512 vmovdqu -64+0(%rsi,%r13,1),%xmm0
2513 vmovdqu -64+16(%rsi,%r13,1),%xmm1
2514 vmovdqu -64+32(%rsi,%r13,1),%xmm2
2515 vmovdqu -64+48(%rsi,%r13,1),%xmm3
2517 vinserti128 $1,(%r12),%ymm0,%ymm0
2518 vinserti128 $1,16(%r12),%ymm1,%ymm1
2519 vpshufb %ymm7,%ymm0,%ymm0
2520 vinserti128 $1,32(%r12),%ymm2,%ymm2
2521 vpshufb %ymm7,%ymm1,%ymm1
2522 vinserti128 $1,48(%r12),%ymm3,%ymm3
2524 leaq K256(%rip),%rbp
2525 vpshufb %ymm7,%ymm2,%ymm2
2527 vpaddd 0(%rbp),%ymm0,%ymm4
2528 vpshufb %ymm7,%ymm3,%ymm3
2529 vpaddd 32(%rbp),%ymm1,%ymm5
2530 vpaddd 64(%rbp),%ymm2,%ymm6
2531 vpaddd 96(%rbp),%ymm3,%ymm7
2532 vmovdqa %ymm4,0(%rsp)
2534 vmovdqa %ymm5,32(%rsp)
2543 .cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08
2545 vmovdqa %ymm6,0(%rsp)
2547 vmovdqa %ymm7,32(%rsp)
2554 vmovdqu (%r13),%xmm9
2555 vpinsrq $0,%r13,%xmm15,%xmm15
2557 .cfi_escape 0x0f,0x05,0x77,0x38,0x06,0x23,0x08
2560 .cfi_escape 0x0f,0x05,0x77,0x00,0x06,0x23,0x08
2562 .cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08
2563 vpalignr $4,%ymm0,%ymm1,%ymm4
2564 addl 0+128(%rsp),%r11d
2566 rorxl $25,%r8d,%r13d
2567 vpalignr $4,%ymm2,%ymm3,%ymm7
2568 rorxl $11,%r8d,%r15d
2569 leal (%rax,%r14,1),%eax
2570 leal (%r11,%r12,1),%r11d
2571 vpsrld $7,%ymm4,%ymm6
2572 andnl %r10d,%r8d,%r12d
2575 vpaddd %ymm7,%ymm0,%ymm0
2576 leal (%r11,%r12,1),%r11d
2579 vpsrld $3,%ymm4,%ymm7
2580 rorxl $22,%eax,%r12d
2581 leal (%r11,%r13,1),%r11d
2583 vpslld $14,%ymm4,%ymm5
2584 rorxl $13,%eax,%r14d
2586 leal (%rdx,%r11,1),%edx
2587 vpxor %ymm6,%ymm7,%ymm4
2589 vpxor %xmm10,%xmm9,%xmm9
2590 vmovdqu 16-128(%rdi),%xmm10
2593 vpshufd $250,%ymm3,%ymm7
2595 leal (%r11,%rsi,1),%r11d
2597 vpsrld $11,%ymm6,%ymm6
2598 addl 4+128(%rsp),%r10d
2600 rorxl $25,%edx,%r13d
2601 vpxor %ymm5,%ymm4,%ymm4
2603 leal (%r11,%r14,1),%r11d
2604 leal (%r10,%r12,1),%r10d
2605 vpslld $11,%ymm5,%ymm5
2606 andnl %r9d,%edx,%r12d
2609 vpxor %ymm6,%ymm4,%ymm4
2610 leal (%r10,%r12,1),%r10d
2613 vpsrld $10,%ymm7,%ymm6
2614 rorxl $22,%r11d,%r12d
2615 leal (%r10,%r13,1),%r10d
2617 vpxor %ymm5,%ymm4,%ymm4
2618 rorxl $13,%r11d,%r14d
2619 rorxl $2,%r11d,%r13d
2620 leal (%rcx,%r10,1),%ecx
2621 vpsrlq $17,%ymm7,%ymm7
2623 vpxor %xmm8,%xmm9,%xmm9
2626 vpaddd %ymm4,%ymm0,%ymm0
2628 leal (%r10,%r15,1),%r10d
2630 vpxor %ymm7,%ymm6,%ymm6
2631 addl 8+128(%rsp),%r9d
2633 rorxl $25,%ecx,%r13d
2634 vpsrlq $2,%ymm7,%ymm7
2635 rorxl $11,%ecx,%r15d
2636 leal (%r10,%r14,1),%r10d
2637 leal (%r9,%r12,1),%r9d
2638 vpxor %ymm7,%ymm6,%ymm6
2639 andnl %r8d,%ecx,%r12d
2642 vpshufd $132,%ymm6,%ymm6
2643 leal (%r9,%r12,1),%r9d
2646 vpsrldq $8,%ymm6,%ymm6
2647 rorxl $22,%r10d,%r12d
2648 leal (%r9,%r13,1),%r9d
2650 vpaddd %ymm6,%ymm0,%ymm0
2651 rorxl $13,%r10d,%r14d
2652 rorxl $2,%r10d,%r13d
2653 leal (%rbx,%r9,1),%ebx
2654 vpshufd $80,%ymm0,%ymm7
2656 vaesenc %xmm10,%xmm9,%xmm9
2657 vmovdqu 32-128(%rdi),%xmm10
2660 vpsrld $10,%ymm7,%ymm6
2662 leal (%r9,%rsi,1),%r9d
2664 vpsrlq $17,%ymm7,%ymm7
2665 addl 12+128(%rsp),%r8d
2667 rorxl $25,%ebx,%r13d
2668 vpxor %ymm7,%ymm6,%ymm6
2670 leal (%r9,%r14,1),%r9d
2671 leal (%r8,%r12,1),%r8d
2672 vpsrlq $2,%ymm7,%ymm7
2673 andnl %edx,%ebx,%r12d
2676 vpxor %ymm7,%ymm6,%ymm6
2677 leal (%r8,%r12,1),%r8d
2680 vpshufd $232,%ymm6,%ymm6
2681 rorxl $22,%r9d,%r12d
2682 leal (%r8,%r13,1),%r8d
2684 vpslldq $8,%ymm6,%ymm6
2685 rorxl $13,%r9d,%r14d
2687 leal (%rax,%r8,1),%eax
2688 vpaddd %ymm6,%ymm0,%ymm0
2690 vaesenc %xmm10,%xmm9,%xmm9
2691 vmovdqu 48-128(%rdi),%xmm10
2694 vpaddd 0(%rbp),%ymm0,%ymm6
2696 leal (%r8,%r15,1),%r8d
2698 vmovdqa %ymm6,0(%rsp)
2699 vpalignr $4,%ymm1,%ymm2,%ymm4
2700 addl 32+128(%rsp),%edx
2702 rorxl $25,%eax,%r13d
2703 vpalignr $4,%ymm3,%ymm0,%ymm7
2704 rorxl $11,%eax,%r15d
2705 leal (%r8,%r14,1),%r8d
2706 leal (%rdx,%r12,1),%edx
2707 vpsrld $7,%ymm4,%ymm6
2708 andnl %ecx,%eax,%r12d
2711 vpaddd %ymm7,%ymm1,%ymm1
2712 leal (%rdx,%r12,1),%edx
2715 vpsrld $3,%ymm4,%ymm7
2716 rorxl $22,%r8d,%r12d
2717 leal (%rdx,%r13,1),%edx
2719 vpslld $14,%ymm4,%ymm5
2720 rorxl $13,%r8d,%r14d
2722 leal (%r11,%rdx,1),%r11d
2723 vpxor %ymm6,%ymm7,%ymm4
2725 vaesenc %xmm10,%xmm9,%xmm9
2726 vmovdqu 64-128(%rdi),%xmm10
2729 vpshufd $250,%ymm0,%ymm7
2731 leal (%rdx,%rsi,1),%edx
2733 vpsrld $11,%ymm6,%ymm6
2734 addl 36+128(%rsp),%ecx
2736 rorxl $25,%r11d,%r13d
2737 vpxor %ymm5,%ymm4,%ymm4
2738 rorxl $11,%r11d,%esi
2739 leal (%rdx,%r14,1),%edx
2740 leal (%rcx,%r12,1),%ecx
2741 vpslld $11,%ymm5,%ymm5
2742 andnl %ebx,%r11d,%r12d
2744 rorxl $6,%r11d,%r14d
2745 vpxor %ymm6,%ymm4,%ymm4
2746 leal (%rcx,%r12,1),%ecx
2749 vpsrld $10,%ymm7,%ymm6
2750 rorxl $22,%edx,%r12d
2751 leal (%rcx,%r13,1),%ecx
2753 vpxor %ymm5,%ymm4,%ymm4
2754 rorxl $13,%edx,%r14d
2756 leal (%r10,%rcx,1),%r10d
2757 vpsrlq $17,%ymm7,%ymm7
2759 vaesenc %xmm10,%xmm9,%xmm9
2760 vmovdqu 80-128(%rdi),%xmm10
2763 vpaddd %ymm4,%ymm1,%ymm1
2765 leal (%rcx,%r15,1),%ecx
2767 vpxor %ymm7,%ymm6,%ymm6
2768 addl 40+128(%rsp),%ebx
2770 rorxl $25,%r10d,%r13d
2771 vpsrlq $2,%ymm7,%ymm7
2772 rorxl $11,%r10d,%r15d
2773 leal (%rcx,%r14,1),%ecx
2774 leal (%rbx,%r12,1),%ebx
2775 vpxor %ymm7,%ymm6,%ymm6
2776 andnl %eax,%r10d,%r12d
2778 rorxl $6,%r10d,%r14d
2779 vpshufd $132,%ymm6,%ymm6
2780 leal (%rbx,%r12,1),%ebx
2783 vpsrldq $8,%ymm6,%ymm6
2784 rorxl $22,%ecx,%r12d
2785 leal (%rbx,%r13,1),%ebx
2787 vpaddd %ymm6,%ymm1,%ymm1
2788 rorxl $13,%ecx,%r14d
2790 leal (%r9,%rbx,1),%r9d
2791 vpshufd $80,%ymm1,%ymm7
2793 vaesenc %xmm10,%xmm9,%xmm9
2794 vmovdqu 96-128(%rdi),%xmm10
2797 vpsrld $10,%ymm7,%ymm6
2799 leal (%rbx,%rsi,1),%ebx
2801 vpsrlq $17,%ymm7,%ymm7
2802 addl 44+128(%rsp),%eax
2804 rorxl $25,%r9d,%r13d
2805 vpxor %ymm7,%ymm6,%ymm6
2807 leal (%rbx,%r14,1),%ebx
2808 leal (%rax,%r12,1),%eax
2809 vpsrlq $2,%ymm7,%ymm7
2810 andnl %r11d,%r9d,%r12d
2813 vpxor %ymm7,%ymm6,%ymm6
2814 leal (%rax,%r12,1),%eax
2817 vpshufd $232,%ymm6,%ymm6
2818 rorxl $22,%ebx,%r12d
2819 leal (%rax,%r13,1),%eax
2821 vpslldq $8,%ymm6,%ymm6
2822 rorxl $13,%ebx,%r14d
2824 leal (%r8,%rax,1),%r8d
2825 vpaddd %ymm6,%ymm1,%ymm1
2827 vaesenc %xmm10,%xmm9,%xmm9
2828 vmovdqu 112-128(%rdi),%xmm10
2831 vpaddd 32(%rbp),%ymm1,%ymm6
2833 leal (%rax,%r15,1),%eax
2835 vmovdqa %ymm6,32(%rsp)
2837 .cfi_escape 0x0f,0x05,0x77,0x38,0x06,0x23,0x08
2840 .cfi_escape 0x0f,0x05,0x77,0x00,0x06,0x23,0x08
2842 .cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08
2843 vpalignr $4,%ymm2,%ymm3,%ymm4
2844 addl 0+128(%rsp),%r11d
2846 rorxl $25,%r8d,%r13d
2847 vpalignr $4,%ymm0,%ymm1,%ymm7
2848 rorxl $11,%r8d,%r15d
2849 leal (%rax,%r14,1),%eax
2850 leal (%r11,%r12,1),%r11d
2851 vpsrld $7,%ymm4,%ymm6
2852 andnl %r10d,%r8d,%r12d
2855 vpaddd %ymm7,%ymm2,%ymm2
2856 leal (%r11,%r12,1),%r11d
2859 vpsrld $3,%ymm4,%ymm7
2860 rorxl $22,%eax,%r12d
2861 leal (%r11,%r13,1),%r11d
2863 vpslld $14,%ymm4,%ymm5
2864 rorxl $13,%eax,%r14d
2866 leal (%rdx,%r11,1),%edx
2867 vpxor %ymm6,%ymm7,%ymm4
2869 vaesenc %xmm10,%xmm9,%xmm9
2870 vmovdqu 128-128(%rdi),%xmm10
2873 vpshufd $250,%ymm1,%ymm7
2875 leal (%r11,%rsi,1),%r11d
2877 vpsrld $11,%ymm6,%ymm6
2878 addl 4+128(%rsp),%r10d
2880 rorxl $25,%edx,%r13d
2881 vpxor %ymm5,%ymm4,%ymm4
2883 leal (%r11,%r14,1),%r11d
2884 leal (%r10,%r12,1),%r10d
2885 vpslld $11,%ymm5,%ymm5
2886 andnl %r9d,%edx,%r12d
2889 vpxor %ymm6,%ymm4,%ymm4
2890 leal (%r10,%r12,1),%r10d
2893 vpsrld $10,%ymm7,%ymm6
2894 rorxl $22,%r11d,%r12d
2895 leal (%r10,%r13,1),%r10d
2897 vpxor %ymm5,%ymm4,%ymm4
2898 rorxl $13,%r11d,%r14d
2899 rorxl $2,%r11d,%r13d
2900 leal (%rcx,%r10,1),%ecx
2901 vpsrlq $17,%ymm7,%ymm7
2903 vaesenc %xmm10,%xmm9,%xmm9
2904 vmovdqu 144-128(%rdi),%xmm10
2907 vpaddd %ymm4,%ymm2,%ymm2
2909 leal (%r10,%r15,1),%r10d
2911 vpxor %ymm7,%ymm6,%ymm6
2912 addl 8+128(%rsp),%r9d
2914 rorxl $25,%ecx,%r13d
2915 vpsrlq $2,%ymm7,%ymm7
2916 rorxl $11,%ecx,%r15d
2917 leal (%r10,%r14,1),%r10d
2918 leal (%r9,%r12,1),%r9d
2919 vpxor %ymm7,%ymm6,%ymm6
2920 andnl %r8d,%ecx,%r12d
2923 vpshufd $132,%ymm6,%ymm6
2924 leal (%r9,%r12,1),%r9d
2927 vpsrldq $8,%ymm6,%ymm6
2928 rorxl $22,%r10d,%r12d
2929 leal (%r9,%r13,1),%r9d
2931 vpaddd %ymm6,%ymm2,%ymm2
2932 rorxl $13,%r10d,%r14d
2933 rorxl $2,%r10d,%r13d
2934 leal (%rbx,%r9,1),%ebx
2935 vpshufd $80,%ymm2,%ymm7
2937 vaesenc %xmm10,%xmm9,%xmm9
2938 vmovdqu 160-128(%rdi),%xmm10
2941 vpsrld $10,%ymm7,%ymm6
2943 leal (%r9,%rsi,1),%r9d
2945 vpsrlq $17,%ymm7,%ymm7
2946 addl 12+128(%rsp),%r8d
2948 rorxl $25,%ebx,%r13d
2949 vpxor %ymm7,%ymm6,%ymm6
2951 leal (%r9,%r14,1),%r9d
2952 leal (%r8,%r12,1),%r8d
2953 vpsrlq $2,%ymm7,%ymm7
2954 andnl %edx,%ebx,%r12d
2957 vpxor %ymm7,%ymm6,%ymm6
2958 leal (%r8,%r12,1),%r8d
2961 vpshufd $232,%ymm6,%ymm6
2962 rorxl $22,%r9d,%r12d
2963 leal (%r8,%r13,1),%r8d
2965 vpslldq $8,%ymm6,%ymm6
2966 rorxl $13,%r9d,%r14d
2968 leal (%rax,%r8,1),%eax
2969 vpaddd %ymm6,%ymm2,%ymm2
2971 vaesenclast %xmm10,%xmm9,%xmm11
2972 vaesenc %xmm10,%xmm9,%xmm9
2973 vmovdqu 176-128(%rdi),%xmm10
2976 vpaddd 64(%rbp),%ymm2,%ymm6
2978 leal (%r8,%r15,1),%r8d
2980 vmovdqa %ymm6,0(%rsp)
2981 vpalignr $4,%ymm3,%ymm0,%ymm4
2982 addl 32+128(%rsp),%edx
2984 rorxl $25,%eax,%r13d
2985 vpalignr $4,%ymm1,%ymm2,%ymm7
2986 rorxl $11,%eax,%r15d
2987 leal (%r8,%r14,1),%r8d
2988 leal (%rdx,%r12,1),%edx
2989 vpsrld $7,%ymm4,%ymm6
2990 andnl %ecx,%eax,%r12d
2993 vpaddd %ymm7,%ymm3,%ymm3
2994 leal (%rdx,%r12,1),%edx
2997 vpsrld $3,%ymm4,%ymm7
2998 rorxl $22,%r8d,%r12d
2999 leal (%rdx,%r13,1),%edx
3001 vpslld $14,%ymm4,%ymm5
3002 rorxl $13,%r8d,%r14d
3004 leal (%r11,%rdx,1),%r11d
3005 vpxor %ymm6,%ymm7,%ymm4
3007 vpand %xmm12,%xmm11,%xmm8
3008 vaesenc %xmm10,%xmm9,%xmm9
3009 vmovdqu 192-128(%rdi),%xmm10
3012 vpshufd $250,%ymm2,%ymm7
3014 leal (%rdx,%rsi,1),%edx
3016 vpsrld $11,%ymm6,%ymm6
3017 addl 36+128(%rsp),%ecx
3019 rorxl $25,%r11d,%r13d
3020 vpxor %ymm5,%ymm4,%ymm4
3021 rorxl $11,%r11d,%esi
3022 leal (%rdx,%r14,1),%edx
3023 leal (%rcx,%r12,1),%ecx
3024 vpslld $11,%ymm5,%ymm5
3025 andnl %ebx,%r11d,%r12d
3027 rorxl $6,%r11d,%r14d
3028 vpxor %ymm6,%ymm4,%ymm4
3029 leal (%rcx,%r12,1),%ecx
3032 vpsrld $10,%ymm7,%ymm6
3033 rorxl $22,%edx,%r12d
3034 leal (%rcx,%r13,1),%ecx
3036 vpxor %ymm5,%ymm4,%ymm4
3037 rorxl $13,%edx,%r14d
3039 leal (%r10,%rcx,1),%r10d
3040 vpsrlq $17,%ymm7,%ymm7
3042 vaesenclast %xmm10,%xmm9,%xmm11
3043 vaesenc %xmm10,%xmm9,%xmm9
3044 vmovdqu 208-128(%rdi),%xmm10
3047 vpaddd %ymm4,%ymm3,%ymm3
3049 leal (%rcx,%r15,1),%ecx
3051 vpxor %ymm7,%ymm6,%ymm6
3052 addl 40+128(%rsp),%ebx
3054 rorxl $25,%r10d,%r13d
3055 vpsrlq $2,%ymm7,%ymm7
3056 rorxl $11,%r10d,%r15d
3057 leal (%rcx,%r14,1),%ecx
3058 leal (%rbx,%r12,1),%ebx
3059 vpxor %ymm7,%ymm6,%ymm6
3060 andnl %eax,%r10d,%r12d
3062 rorxl $6,%r10d,%r14d
3063 vpshufd $132,%ymm6,%ymm6
3064 leal (%rbx,%r12,1),%ebx
3067 vpsrldq $8,%ymm6,%ymm6
3068 rorxl $22,%ecx,%r12d
3069 leal (%rbx,%r13,1),%ebx
3071 vpaddd %ymm6,%ymm3,%ymm3
3072 rorxl $13,%ecx,%r14d
3074 leal (%r9,%rbx,1),%r9d
3075 vpshufd $80,%ymm3,%ymm7
3077 vpand %xmm13,%xmm11,%xmm11
3078 vaesenc %xmm10,%xmm9,%xmm9
3079 vmovdqu 224-128(%rdi),%xmm10
3082 vpsrld $10,%ymm7,%ymm6
3084 leal (%rbx,%rsi,1),%ebx
3086 vpsrlq $17,%ymm7,%ymm7
3087 addl 44+128(%rsp),%eax
3089 rorxl $25,%r9d,%r13d
3090 vpxor %ymm7,%ymm6,%ymm6
3092 leal (%rbx,%r14,1),%ebx
3093 leal (%rax,%r12,1),%eax
3094 vpsrlq $2,%ymm7,%ymm7
3095 andnl %r11d,%r9d,%r12d
3098 vpxor %ymm7,%ymm6,%ymm6
3099 leal (%rax,%r12,1),%eax
3102 vpshufd $232,%ymm6,%ymm6
3103 rorxl $22,%ebx,%r12d
3104 leal (%rax,%r13,1),%eax
3106 vpslldq $8,%ymm6,%ymm6
3107 rorxl $13,%ebx,%r14d
3109 leal (%r8,%rax,1),%r8d
3110 vpaddd %ymm6,%ymm3,%ymm3
3112 vpor %xmm11,%xmm8,%xmm8
3113 vaesenclast %xmm10,%xmm9,%xmm11
3114 vmovdqu 0-128(%rdi),%xmm10
3117 vpaddd 96(%rbp),%ymm3,%ymm6
3119 leal (%rax,%r15,1),%eax
3121 vmovdqa %ymm6,32(%rsp)
3123 vpextrq $1,%xmm15,%r15
3124 vpand %xmm14,%xmm11,%xmm11
3125 vpor %xmm11,%xmm8,%xmm8
3126 vmovdqu %xmm8,(%r15,%r13,1)
3131 vmovdqu (%r13),%xmm9
3132 vpinsrq $0,%r13,%xmm15,%xmm15
3133 addl 0+64(%rsp),%r11d
3135 rorxl $25,%r8d,%r13d
3136 rorxl $11,%r8d,%r15d
3137 leal (%rax,%r14,1),%eax
3138 leal (%r11,%r12,1),%r11d
3139 andnl %r10d,%r8d,%r12d
3142 leal (%r11,%r12,1),%r11d
3145 rorxl $22,%eax,%r12d
3146 leal (%r11,%r13,1),%r11d
3148 rorxl $13,%eax,%r14d
3150 leal (%rdx,%r11,1),%edx
3152 vpxor %xmm10,%xmm9,%xmm9
3153 vmovdqu 16-128(%rdi),%xmm10
3157 leal (%r11,%rsi,1),%r11d
3159 addl 4+64(%rsp),%r10d
3161 rorxl $25,%edx,%r13d
3163 leal (%r11,%r14,1),%r11d
3164 leal (%r10,%r12,1),%r10d
3165 andnl %r9d,%edx,%r12d
3168 leal (%r10,%r12,1),%r10d
3171 rorxl $22,%r11d,%r12d
3172 leal (%r10,%r13,1),%r10d
3174 rorxl $13,%r11d,%r14d
3175 rorxl $2,%r11d,%r13d
3176 leal (%rcx,%r10,1),%ecx
3178 vpxor %xmm8,%xmm9,%xmm9
3182 leal (%r10,%r15,1),%r10d
3184 addl 8+64(%rsp),%r9d
3186 rorxl $25,%ecx,%r13d
3187 rorxl $11,%ecx,%r15d
3188 leal (%r10,%r14,1),%r10d
3189 leal (%r9,%r12,1),%r9d
3190 andnl %r8d,%ecx,%r12d
3193 leal (%r9,%r12,1),%r9d
3196 rorxl $22,%r10d,%r12d
3197 leal (%r9,%r13,1),%r9d
3199 rorxl $13,%r10d,%r14d
3200 rorxl $2,%r10d,%r13d
3201 leal (%rbx,%r9,1),%ebx
3203 vaesenc %xmm10,%xmm9,%xmm9
3204 vmovdqu 32-128(%rdi),%xmm10
3208 leal (%r9,%rsi,1),%r9d
3210 addl 12+64(%rsp),%r8d
3212 rorxl $25,%ebx,%r13d
3214 leal (%r9,%r14,1),%r9d
3215 leal (%r8,%r12,1),%r8d
3216 andnl %edx,%ebx,%r12d
3219 leal (%r8,%r12,1),%r8d
3222 rorxl $22,%r9d,%r12d
3223 leal (%r8,%r13,1),%r8d
3225 rorxl $13,%r9d,%r14d
3227 leal (%rax,%r8,1),%eax
3229 vaesenc %xmm10,%xmm9,%xmm9
3230 vmovdqu 48-128(%rdi),%xmm10
3234 leal (%r8,%r15,1),%r8d
3236 addl 32+64(%rsp),%edx
3238 rorxl $25,%eax,%r13d
3239 rorxl $11,%eax,%r15d
3240 leal (%r8,%r14,1),%r8d
3241 leal (%rdx,%r12,1),%edx
3242 andnl %ecx,%eax,%r12d
3245 leal (%rdx,%r12,1),%edx
3248 rorxl $22,%r8d,%r12d
3249 leal (%rdx,%r13,1),%edx
3251 rorxl $13,%r8d,%r14d
3253 leal (%r11,%rdx,1),%r11d
3255 vaesenc %xmm10,%xmm9,%xmm9
3256 vmovdqu 64-128(%rdi),%xmm10
3260 leal (%rdx,%rsi,1),%edx
3262 addl 36+64(%rsp),%ecx
3264 rorxl $25,%r11d,%r13d
3265 rorxl $11,%r11d,%esi
3266 leal (%rdx,%r14,1),%edx
3267 leal (%rcx,%r12,1),%ecx
3268 andnl %ebx,%r11d,%r12d
3270 rorxl $6,%r11d,%r14d
3271 leal (%rcx,%r12,1),%ecx
3274 rorxl $22,%edx,%r12d
3275 leal (%rcx,%r13,1),%ecx
3277 rorxl $13,%edx,%r14d
3279 leal (%r10,%rcx,1),%r10d
3281 vaesenc %xmm10,%xmm9,%xmm9
3282 vmovdqu 80-128(%rdi),%xmm10
3286 leal (%rcx,%r15,1),%ecx
3288 addl 40+64(%rsp),%ebx
3290 rorxl $25,%r10d,%r13d
3291 rorxl $11,%r10d,%r15d
3292 leal (%rcx,%r14,1),%ecx
3293 leal (%rbx,%r12,1),%ebx
3294 andnl %eax,%r10d,%r12d
3296 rorxl $6,%r10d,%r14d
3297 leal (%rbx,%r12,1),%ebx
3300 rorxl $22,%ecx,%r12d
3301 leal (%rbx,%r13,1),%ebx
3303 rorxl $13,%ecx,%r14d
3305 leal (%r9,%rbx,1),%r9d
3307 vaesenc %xmm10,%xmm9,%xmm9
3308 vmovdqu 96-128(%rdi),%xmm10
3312 leal (%rbx,%rsi,1),%ebx
3314 addl 44+64(%rsp),%eax
3316 rorxl $25,%r9d,%r13d
3318 leal (%rbx,%r14,1),%ebx
3319 leal (%rax,%r12,1),%eax
3320 andnl %r11d,%r9d,%r12d
3323 leal (%rax,%r12,1),%eax
3326 rorxl $22,%ebx,%r12d
3327 leal (%rax,%r13,1),%eax
3329 rorxl $13,%ebx,%r14d
3331 leal (%r8,%rax,1),%r8d
3333 vaesenc %xmm10,%xmm9,%xmm9
3334 vmovdqu 112-128(%rdi),%xmm10
3338 leal (%rax,%r15,1),%eax
3342 rorxl $25,%r8d,%r13d
3343 rorxl $11,%r8d,%r15d
3344 leal (%rax,%r14,1),%eax
3345 leal (%r11,%r12,1),%r11d
3346 andnl %r10d,%r8d,%r12d
3349 leal (%r11,%r12,1),%r11d
3352 rorxl $22,%eax,%r12d
3353 leal (%r11,%r13,1),%r11d
3355 rorxl $13,%eax,%r14d
3357 leal (%rdx,%r11,1),%edx
3359 vaesenc %xmm10,%xmm9,%xmm9
3360 vmovdqu 128-128(%rdi),%xmm10
3364 leal (%r11,%rsi,1),%r11d
3368 rorxl $25,%edx,%r13d
3370 leal (%r11,%r14,1),%r11d
3371 leal (%r10,%r12,1),%r10d
3372 andnl %r9d,%edx,%r12d
3375 leal (%r10,%r12,1),%r10d
3378 rorxl $22,%r11d,%r12d
3379 leal (%r10,%r13,1),%r10d
3381 rorxl $13,%r11d,%r14d
3382 rorxl $2,%r11d,%r13d
3383 leal (%rcx,%r10,1),%ecx
3385 vaesenc %xmm10,%xmm9,%xmm9
3386 vmovdqu 144-128(%rdi),%xmm10
3390 leal (%r10,%r15,1),%r10d
3394 rorxl $25,%ecx,%r13d
3395 rorxl $11,%ecx,%r15d
3396 leal (%r10,%r14,1),%r10d
3397 leal (%r9,%r12,1),%r9d
3398 andnl %r8d,%ecx,%r12d
3401 leal (%r9,%r12,1),%r9d
3404 rorxl $22,%r10d,%r12d
3405 leal (%r9,%r13,1),%r9d
3407 rorxl $13,%r10d,%r14d
3408 rorxl $2,%r10d,%r13d
3409 leal (%rbx,%r9,1),%ebx
3411 vaesenc %xmm10,%xmm9,%xmm9
3412 vmovdqu 160-128(%rdi),%xmm10
3416 leal (%r9,%rsi,1),%r9d
3420 rorxl $25,%ebx,%r13d
3422 leal (%r9,%r14,1),%r9d
3423 leal (%r8,%r12,1),%r8d
3424 andnl %edx,%ebx,%r12d
3427 leal (%r8,%r12,1),%r8d
3430 rorxl $22,%r9d,%r12d
3431 leal (%r8,%r13,1),%r8d
3433 rorxl $13,%r9d,%r14d
3435 leal (%rax,%r8,1),%eax
3437 vaesenclast %xmm10,%xmm9,%xmm11
3438 vaesenc %xmm10,%xmm9,%xmm9
3439 vmovdqu 176-128(%rdi),%xmm10
3443 leal (%r8,%r15,1),%r8d
3447 rorxl $25,%eax,%r13d
3448 rorxl $11,%eax,%r15d
3449 leal (%r8,%r14,1),%r8d
3450 leal (%rdx,%r12,1),%edx
3451 andnl %ecx,%eax,%r12d
3454 leal (%rdx,%r12,1),%edx
3457 rorxl $22,%r8d,%r12d
3458 leal (%rdx,%r13,1),%edx
3460 rorxl $13,%r8d,%r14d
3462 leal (%r11,%rdx,1),%r11d
3464 vpand %xmm12,%xmm11,%xmm8
3465 vaesenc %xmm10,%xmm9,%xmm9
3466 vmovdqu 192-128(%rdi),%xmm10
3470 leal (%rdx,%rsi,1),%edx
3474 rorxl $25,%r11d,%r13d
3475 rorxl $11,%r11d,%esi
3476 leal (%rdx,%r14,1),%edx
3477 leal (%rcx,%r12,1),%ecx
3478 andnl %ebx,%r11d,%r12d
3480 rorxl $6,%r11d,%r14d
3481 leal (%rcx,%r12,1),%ecx
3484 rorxl $22,%edx,%r12d
3485 leal (%rcx,%r13,1),%ecx
3487 rorxl $13,%edx,%r14d
3489 leal (%r10,%rcx,1),%r10d
3491 vaesenclast %xmm10,%xmm9,%xmm11
3492 vaesenc %xmm10,%xmm9,%xmm9
3493 vmovdqu 208-128(%rdi),%xmm10
3497 leal (%rcx,%r15,1),%ecx
3501 rorxl $25,%r10d,%r13d
3502 rorxl $11,%r10d,%r15d
3503 leal (%rcx,%r14,1),%ecx
3504 leal (%rbx,%r12,1),%ebx
3505 andnl %eax,%r10d,%r12d
3507 rorxl $6,%r10d,%r14d
3508 leal (%rbx,%r12,1),%ebx
3511 rorxl $22,%ecx,%r12d
3512 leal (%rbx,%r13,1),%ebx
3514 rorxl $13,%ecx,%r14d
3516 leal (%r9,%rbx,1),%r9d
3518 vpand %xmm13,%xmm11,%xmm11
3519 vaesenc %xmm10,%xmm9,%xmm9
3520 vmovdqu 224-128(%rdi),%xmm10
3524 leal (%rbx,%rsi,1),%ebx
3528 rorxl $25,%r9d,%r13d
3530 leal (%rbx,%r14,1),%ebx
3531 leal (%rax,%r12,1),%eax
3532 andnl %r11d,%r9d,%r12d
3535 leal (%rax,%r12,1),%eax
3538 rorxl $22,%ebx,%r12d
3539 leal (%rax,%r13,1),%eax
3541 rorxl $13,%ebx,%r14d
3543 leal (%r8,%rax,1),%r8d
3545 vpor %xmm11,%xmm8,%xmm8
3546 vaesenclast %xmm10,%xmm9,%xmm11
3547 vmovdqu 0-128(%rdi),%xmm10
3551 leal (%rax,%r15,1),%eax
3553 vpextrq $1,%xmm15,%r12
3559 vpand %xmm14,%xmm11,%xmm11
3560 vpor %xmm11,%xmm8,%xmm8
3561 vmovdqu %xmm8,(%r12,%r13,1)
3592 vmovdqu (%r13),%xmm9
3593 vpinsrq $0,%r13,%xmm15,%xmm15
3594 addl 0+16(%rbp),%r11d
3596 rorxl $25,%r8d,%r13d
3597 rorxl $11,%r8d,%r15d
3598 leal (%rax,%r14,1),%eax
3599 leal (%r11,%r12,1),%r11d
3600 andnl %r10d,%r8d,%r12d
3603 leal (%r11,%r12,1),%r11d
3606 rorxl $22,%eax,%r12d
3607 leal (%r11,%r13,1),%r11d
3609 rorxl $13,%eax,%r14d
3611 leal (%rdx,%r11,1),%edx
3613 vpxor %xmm10,%xmm9,%xmm9
3614 vmovdqu 16-128(%rdi),%xmm10
3618 leal (%r11,%rsi,1),%r11d
3620 addl 4+16(%rbp),%r10d
3622 rorxl $25,%edx,%r13d
3624 leal (%r11,%r14,1),%r11d
3625 leal (%r10,%r12,1),%r10d
3626 andnl %r9d,%edx,%r12d
3629 leal (%r10,%r12,1),%r10d
3632 rorxl $22,%r11d,%r12d
3633 leal (%r10,%r13,1),%r10d
3635 rorxl $13,%r11d,%r14d
3636 rorxl $2,%r11d,%r13d
3637 leal (%rcx,%r10,1),%ecx
3639 vpxor %xmm8,%xmm9,%xmm9
3643 leal (%r10,%r15,1),%r10d
3645 addl 8+16(%rbp),%r9d
3647 rorxl $25,%ecx,%r13d
3648 rorxl $11,%ecx,%r15d
3649 leal (%r10,%r14,1),%r10d
3650 leal (%r9,%r12,1),%r9d
3651 andnl %r8d,%ecx,%r12d
3654 leal (%r9,%r12,1),%r9d
3657 rorxl $22,%r10d,%r12d
3658 leal (%r9,%r13,1),%r9d
3660 rorxl $13,%r10d,%r14d
3661 rorxl $2,%r10d,%r13d
3662 leal (%rbx,%r9,1),%ebx
3664 vaesenc %xmm10,%xmm9,%xmm9
3665 vmovdqu 32-128(%rdi),%xmm10
3669 leal (%r9,%rsi,1),%r9d
3671 addl 12+16(%rbp),%r8d
3673 rorxl $25,%ebx,%r13d
3675 leal (%r9,%r14,1),%r9d
3676 leal (%r8,%r12,1),%r8d
3677 andnl %edx,%ebx,%r12d
3680 leal (%r8,%r12,1),%r8d
3683 rorxl $22,%r9d,%r12d
3684 leal (%r8,%r13,1),%r8d
3686 rorxl $13,%r9d,%r14d
3688 leal (%rax,%r8,1),%eax
3690 vaesenc %xmm10,%xmm9,%xmm9
3691 vmovdqu 48-128(%rdi),%xmm10
3695 leal (%r8,%r15,1),%r8d
3697 addl 32+16(%rbp),%edx
3699 rorxl $25,%eax,%r13d
3700 rorxl $11,%eax,%r15d
3701 leal (%r8,%r14,1),%r8d
3702 leal (%rdx,%r12,1),%edx
3703 andnl %ecx,%eax,%r12d
3706 leal (%rdx,%r12,1),%edx
3709 rorxl $22,%r8d,%r12d
3710 leal (%rdx,%r13,1),%edx
3712 rorxl $13,%r8d,%r14d
3714 leal (%r11,%rdx,1),%r11d
3716 vaesenc %xmm10,%xmm9,%xmm9
3717 vmovdqu 64-128(%rdi),%xmm10
3721 leal (%rdx,%rsi,1),%edx
3723 addl 36+16(%rbp),%ecx
3725 rorxl $25,%r11d,%r13d
3726 rorxl $11,%r11d,%esi
3727 leal (%rdx,%r14,1),%edx
3728 leal (%rcx,%r12,1),%ecx
3729 andnl %ebx,%r11d,%r12d
3731 rorxl $6,%r11d,%r14d
3732 leal (%rcx,%r12,1),%ecx
3735 rorxl $22,%edx,%r12d
3736 leal (%rcx,%r13,1),%ecx
3738 rorxl $13,%edx,%r14d
3740 leal (%r10,%rcx,1),%r10d
3742 vaesenc %xmm10,%xmm9,%xmm9
3743 vmovdqu 80-128(%rdi),%xmm10
3747 leal (%rcx,%r15,1),%ecx
3749 addl 40+16(%rbp),%ebx
3751 rorxl $25,%r10d,%r13d
3752 rorxl $11,%r10d,%r15d
3753 leal (%rcx,%r14,1),%ecx
3754 leal (%rbx,%r12,1),%ebx
3755 andnl %eax,%r10d,%r12d
3757 rorxl $6,%r10d,%r14d
3758 leal (%rbx,%r12,1),%ebx
3761 rorxl $22,%ecx,%r12d
3762 leal (%rbx,%r13,1),%ebx
3764 rorxl $13,%ecx,%r14d
3766 leal (%r9,%rbx,1),%r9d
3768 vaesenc %xmm10,%xmm9,%xmm9
3769 vmovdqu 96-128(%rdi),%xmm10
3773 leal (%rbx,%rsi,1),%ebx
3775 addl 44+16(%rbp),%eax
3777 rorxl $25,%r9d,%r13d
3779 leal (%rbx,%r14,1),%ebx
3780 leal (%rax,%r12,1),%eax
3781 andnl %r11d,%r9d,%r12d
3784 leal (%rax,%r12,1),%eax
3787 rorxl $22,%ebx,%r12d
3788 leal (%rax,%r13,1),%eax
3790 rorxl $13,%ebx,%r14d
3792 leal (%r8,%rax,1),%r8d
3794 vaesenc %xmm10,%xmm9,%xmm9
3795 vmovdqu 112-128(%rdi),%xmm10
3799 leal (%rax,%r15,1),%eax
3802 addl 0+16(%rbp),%r11d
3804 rorxl $25,%r8d,%r13d
3805 rorxl $11,%r8d,%r15d
3806 leal (%rax,%r14,1),%eax
3807 leal (%r11,%r12,1),%r11d
3808 andnl %r10d,%r8d,%r12d
3811 leal (%r11,%r12,1),%r11d
3814 rorxl $22,%eax,%r12d
3815 leal (%r11,%r13,1),%r11d
3817 rorxl $13,%eax,%r14d
3819 leal (%rdx,%r11,1),%edx
3821 vaesenc %xmm10,%xmm9,%xmm9
3822 vmovdqu 128-128(%rdi),%xmm10
3826 leal (%r11,%rsi,1),%r11d
3828 addl 4+16(%rbp),%r10d
3830 rorxl $25,%edx,%r13d
3832 leal (%r11,%r14,1),%r11d
3833 leal (%r10,%r12,1),%r10d
3834 andnl %r9d,%edx,%r12d
3837 leal (%r10,%r12,1),%r10d
3840 rorxl $22,%r11d,%r12d
3841 leal (%r10,%r13,1),%r10d
3843 rorxl $13,%r11d,%r14d
3844 rorxl $2,%r11d,%r13d
3845 leal (%rcx,%r10,1),%ecx
3847 vaesenc %xmm10,%xmm9,%xmm9
3848 vmovdqu 144-128(%rdi),%xmm10
3852 leal (%r10,%r15,1),%r10d
3854 addl 8+16(%rbp),%r9d
3856 rorxl $25,%ecx,%r13d
3857 rorxl $11,%ecx,%r15d
3858 leal (%r10,%r14,1),%r10d
3859 leal (%r9,%r12,1),%r9d
3860 andnl %r8d,%ecx,%r12d
3863 leal (%r9,%r12,1),%r9d
3866 rorxl $22,%r10d,%r12d
3867 leal (%r9,%r13,1),%r9d
3869 rorxl $13,%r10d,%r14d
3870 rorxl $2,%r10d,%r13d
3871 leal (%rbx,%r9,1),%ebx
3873 vaesenc %xmm10,%xmm9,%xmm9
3874 vmovdqu 160-128(%rdi),%xmm10
3878 leal (%r9,%rsi,1),%r9d
3880 addl 12+16(%rbp),%r8d
3882 rorxl $25,%ebx,%r13d
3884 leal (%r9,%r14,1),%r9d
3885 leal (%r8,%r12,1),%r8d
3886 andnl %edx,%ebx,%r12d
3889 leal (%r8,%r12,1),%r8d
3892 rorxl $22,%r9d,%r12d
3893 leal (%r8,%r13,1),%r8d
3895 rorxl $13,%r9d,%r14d
3897 leal (%rax,%r8,1),%eax
3899 vaesenclast %xmm10,%xmm9,%xmm11
3900 vaesenc %xmm10,%xmm9,%xmm9
3901 vmovdqu 176-128(%rdi),%xmm10
3905 leal (%r8,%r15,1),%r8d
3907 addl 32+16(%rbp),%edx
3909 rorxl $25,%eax,%r13d
3910 rorxl $11,%eax,%r15d
3911 leal (%r8,%r14,1),%r8d
3912 leal (%rdx,%r12,1),%edx
3913 andnl %ecx,%eax,%r12d
3916 leal (%rdx,%r12,1),%edx
3919 rorxl $22,%r8d,%r12d
3920 leal (%rdx,%r13,1),%edx
3922 rorxl $13,%r8d,%r14d
3924 leal (%r11,%rdx,1),%r11d
3926 vpand %xmm12,%xmm11,%xmm8
3927 vaesenc %xmm10,%xmm9,%xmm9
3928 vmovdqu 192-128(%rdi),%xmm10
3932 leal (%rdx,%rsi,1),%edx
3934 addl 36+16(%rbp),%ecx
3936 rorxl $25,%r11d,%r13d
3937 rorxl $11,%r11d,%esi
3938 leal (%rdx,%r14,1),%edx
3939 leal (%rcx,%r12,1),%ecx
3940 andnl %ebx,%r11d,%r12d
3942 rorxl $6,%r11d,%r14d
3943 leal (%rcx,%r12,1),%ecx
3946 rorxl $22,%edx,%r12d
3947 leal (%rcx,%r13,1),%ecx
3949 rorxl $13,%edx,%r14d
3951 leal (%r10,%rcx,1),%r10d
3953 vaesenclast %xmm10,%xmm9,%xmm11
3954 vaesenc %xmm10,%xmm9,%xmm9
3955 vmovdqu 208-128(%rdi),%xmm10
3959 leal (%rcx,%r15,1),%ecx
3961 addl 40+16(%rbp),%ebx
3963 rorxl $25,%r10d,%r13d
3964 rorxl $11,%r10d,%r15d
3965 leal (%rcx,%r14,1),%ecx
3966 leal (%rbx,%r12,1),%ebx
3967 andnl %eax,%r10d,%r12d
3969 rorxl $6,%r10d,%r14d
3970 leal (%rbx,%r12,1),%ebx
3973 rorxl $22,%ecx,%r12d
3974 leal (%rbx,%r13,1),%ebx
3976 rorxl $13,%ecx,%r14d
3978 leal (%r9,%rbx,1),%r9d
3980 vpand %xmm13,%xmm11,%xmm11
3981 vaesenc %xmm10,%xmm9,%xmm9
3982 vmovdqu 224-128(%rdi),%xmm10
3986 leal (%rbx,%rsi,1),%ebx
3988 addl 44+16(%rbp),%eax
3990 rorxl $25,%r9d,%r13d
3992 leal (%rbx,%r14,1),%ebx
3993 leal (%rax,%r12,1),%eax
3994 andnl %r11d,%r9d,%r12d
3997 leal (%rax,%r12,1),%eax
4000 rorxl $22,%ebx,%r12d
4001 leal (%rax,%r13,1),%eax
4003 rorxl $13,%ebx,%r14d
4005 leal (%r8,%rax,1),%r8d
4007 vpor %xmm11,%xmm8,%xmm8
4008 vaesenclast %xmm10,%xmm9,%xmm11
4009 vmovdqu 0-128(%rdi),%xmm10
4013 leal (%rax,%r15,1),%eax
4016 vpextrq $1,%xmm15,%r15
4017 vpand %xmm14,%xmm11,%xmm11
4018 vpor %xmm11,%xmm8,%xmm8
4020 vmovdqu %xmm8,(%r15,%r13,1)
4038 leaq (%rsi,%r13,1),%r12
4041 cmpq 64+16(%rsp),%r13
4057 .cfi_escape 0x0f,0x06,0x76,0xf8,0x00,0x06,0x23,0x08
4060 movq 64+32(%rbp),%r8
4061 movq 64+56(%rbp),%rsi
4078 .cfi_def_cfa_register %rsp
4082 .size aesni_cbc_sha256_enc_avx2,.-aesni_cbc_sha256_enc_avx2
4083 .type aesni_cbc_sha256_enc_shaext,@function
4085 aesni_cbc_sha256_enc_shaext:
4088 leaq K256+128(%rip),%rax
4090 movdqu 16(%r9),%xmm2
4091 movdqa 512-128(%rax),%xmm3
4093 movl 240(%rcx),%r11d
4095 movups (%rcx),%xmm15
4097 movups 16(%rcx),%xmm4
4100 pshufd $0x1b,%xmm1,%xmm0
4101 pshufd $0xb1,%xmm1,%xmm1
4102 pshufd $0x1b,%xmm2,%xmm2
4104 .byte 102,15,58,15,202,8
4105 punpcklqdq %xmm0,%xmm2
4111 movdqu (%r10),%xmm10
4112 movdqu 16(%r10),%xmm11
4113 movdqu 32(%r10),%xmm12
4114 .byte 102,68,15,56,0,211
4115 movdqu 48(%r10),%xmm13
4117 movdqa 0-128(%rax),%xmm0
4119 .byte 102,68,15,56,0,219
4122 movups 0(%rdi),%xmm14
4125 movups -80(%rcx),%xmm5
4128 pshufd $0x0e,%xmm0,%xmm0
4129 movups -64(%rcx),%xmm4
4133 movdqa 32-128(%rax),%xmm0
4135 .byte 102,68,15,56,0,227
4137 movups -48(%rcx),%xmm5
4140 pshufd $0x0e,%xmm0,%xmm0
4141 movups -32(%rcx),%xmm4
4145 movdqa 64-128(%rax),%xmm0
4147 .byte 102,68,15,56,0,235
4148 .byte 69,15,56,204,211
4149 movups -16(%rcx),%xmm5
4152 pshufd $0x0e,%xmm0,%xmm0
4154 .byte 102,65,15,58,15,220,4
4156 movups 0(%rcx),%xmm4
4160 movdqa 96-128(%rax),%xmm0
4162 .byte 69,15,56,205,213
4163 .byte 69,15,56,204,220
4164 movups 16(%rcx),%xmm5
4167 pshufd $0x0e,%xmm0,%xmm0
4168 movups 32(%rcx),%xmm4
4171 .byte 102,65,15,58,15,221,4
4174 movdqa 128-128(%rax),%xmm0
4176 .byte 69,15,56,205,218
4177 .byte 69,15,56,204,229
4178 movups 48(%rcx),%xmm5
4181 pshufd $0x0e,%xmm0,%xmm0
4183 .byte 102,65,15,58,15,218,4
4187 movups 64(%rcx),%xmm4
4189 movups 80(%rcx),%xmm5
4192 movups 96(%rcx),%xmm4
4194 movups 112(%rcx),%xmm5
4197 aesenclast %xmm5,%xmm6
4198 movups 16-112(%rcx),%xmm4
4201 movups 16(%rdi),%xmm14
4203 movups %xmm6,0(%rsi,%rdi,1)
4205 movups -80(%rcx),%xmm5
4207 movdqa 160-128(%rax),%xmm0
4209 .byte 69,15,56,205,227
4210 .byte 69,15,56,204,234
4211 movups -64(%rcx),%xmm4
4214 pshufd $0x0e,%xmm0,%xmm0
4216 .byte 102,65,15,58,15,219,4
4218 movups -48(%rcx),%xmm5
4221 movdqa 192-128(%rax),%xmm0
4223 .byte 69,15,56,205,236
4224 .byte 69,15,56,204,211
4225 movups -32(%rcx),%xmm4
4228 pshufd $0x0e,%xmm0,%xmm0
4230 .byte 102,65,15,58,15,220,4
4232 movups -16(%rcx),%xmm5
4235 movdqa 224-128(%rax),%xmm0
4237 .byte 69,15,56,205,213
4238 .byte 69,15,56,204,220
4239 movups 0(%rcx),%xmm4
4242 pshufd $0x0e,%xmm0,%xmm0
4244 .byte 102,65,15,58,15,221,4
4246 movups 16(%rcx),%xmm5
4249 movdqa 256-128(%rax),%xmm0
4251 .byte 69,15,56,205,218
4252 .byte 69,15,56,204,229
4253 movups 32(%rcx),%xmm4
4256 pshufd $0x0e,%xmm0,%xmm0
4258 .byte 102,65,15,58,15,218,4
4260 movups 48(%rcx),%xmm5
4264 movups 64(%rcx),%xmm4
4266 movups 80(%rcx),%xmm5
4269 movups 96(%rcx),%xmm4
4271 movups 112(%rcx),%xmm5
4274 aesenclast %xmm5,%xmm6
4275 movups 16-112(%rcx),%xmm4
4278 movups 32(%rdi),%xmm14
4280 movups %xmm6,16(%rsi,%rdi,1)
4282 movups -80(%rcx),%xmm5
4284 movdqa 288-128(%rax),%xmm0
4286 .byte 69,15,56,205,227
4287 .byte 69,15,56,204,234
4288 movups -64(%rcx),%xmm4
4291 pshufd $0x0e,%xmm0,%xmm0
4293 .byte 102,65,15,58,15,219,4
4295 movups -48(%rcx),%xmm5
4298 movdqa 320-128(%rax),%xmm0
4300 .byte 69,15,56,205,236
4301 .byte 69,15,56,204,211
4302 movups -32(%rcx),%xmm4
4305 pshufd $0x0e,%xmm0,%xmm0
4307 .byte 102,65,15,58,15,220,4
4309 movups -16(%rcx),%xmm5
4312 movdqa 352-128(%rax),%xmm0
4314 .byte 69,15,56,205,213
4315 .byte 69,15,56,204,220
4316 movups 0(%rcx),%xmm4
4319 pshufd $0x0e,%xmm0,%xmm0
4321 .byte 102,65,15,58,15,221,4
4323 movups 16(%rcx),%xmm5
4326 movdqa 384-128(%rax),%xmm0
4328 .byte 69,15,56,205,218
4329 .byte 69,15,56,204,229
4330 movups 32(%rcx),%xmm4
4333 pshufd $0x0e,%xmm0,%xmm0
4335 .byte 102,65,15,58,15,218,4
4337 movups 48(%rcx),%xmm5
4340 movdqa 416-128(%rax),%xmm0
4342 .byte 69,15,56,205,227
4343 .byte 69,15,56,204,234
4346 movups 64(%rcx),%xmm4
4348 movups 80(%rcx),%xmm5
4351 movups 96(%rcx),%xmm4
4353 movups 112(%rcx),%xmm5
4356 aesenclast %xmm5,%xmm6
4357 movups 16-112(%rcx),%xmm4
4360 pshufd $0x0e,%xmm0,%xmm0
4362 .byte 102,65,15,58,15,219,4
4364 movups 48(%rdi),%xmm14
4366 movups %xmm6,32(%rsi,%rdi,1)
4368 movups -80(%rcx),%xmm5
4370 movups -64(%rcx),%xmm4
4374 movdqa 448-128(%rax),%xmm0
4376 .byte 69,15,56,205,236
4378 movups -48(%rcx),%xmm5
4381 pshufd $0x0e,%xmm0,%xmm0
4382 movups -32(%rcx),%xmm4
4386 movdqa 480-128(%rax),%xmm0
4388 movups -16(%rcx),%xmm5
4390 movups 0(%rcx),%xmm4
4393 pshufd $0x0e,%xmm0,%xmm0
4394 movups 16(%rcx),%xmm5
4398 movups 32(%rcx),%xmm4
4400 movups 48(%rcx),%xmm5
4404 movups 64(%rcx),%xmm4
4406 movups 80(%rcx),%xmm5
4409 movups 96(%rcx),%xmm4
4411 movups 112(%rcx),%xmm5
4414 aesenclast %xmm5,%xmm6
4415 movups 16-112(%rcx),%xmm4
4422 movups %xmm6,48(%rsi,%rdi,1)
4426 pshufd $0xb1,%xmm2,%xmm2
4427 pshufd $0x1b,%xmm1,%xmm3
4428 pshufd $0xb1,%xmm1,%xmm1
4429 punpckhqdq %xmm2,%xmm1
4430 .byte 102,15,58,15,211,8
4434 movdqu %xmm2,16(%r9)
4437 .size aesni_cbc_sha256_enc_shaext,.-aesni_cbc_sha256_enc_shaext