2 /* Do not modify. This file is auto-generated from aesni-sha256-x86_64.pl. */
6 .globl aesni_cbc_sha256_enc
7 .type aesni_cbc_sha256_enc,@function
10 leaq OPENSSL_ia32cap_P(%rip),%r11
17 jc aesni_cbc_sha256_enc_shaext
22 jnz aesni_cbc_sha256_enc_xop
25 je aesni_cbc_sha256_enc_avx2
27 jnz aesni_cbc_sha256_enc_avx
35 .size aesni_cbc_sha256_enc,.-aesni_cbc_sha256_enc
40 .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
41 .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
42 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
43 .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
44 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
45 .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
46 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
47 .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
48 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
49 .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
50 .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
51 .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
52 .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
53 .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
54 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
55 .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
56 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
57 .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
58 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
59 .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
60 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
61 .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
62 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
63 .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
64 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
65 .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
66 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
67 .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
68 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
69 .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
70 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
71 .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
73 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
74 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
75 .long 0,0,0,0, 0,0,0,0, -1,-1,-1,-1
76 .long 0,0,0,0, 0,0,0,0
77 .byte 65,69,83,78,73,45,67,66,67,43,83,72,65,50,53,54,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
79 .type aesni_cbc_sha256_enc_xop,@function
81 aesni_cbc_sha256_enc_xop:
86 .cfi_def_cfa_register %rax
109 movq %rdx,64+16(%rsp)
113 movq %r10,64+48(%rsp)
115 .cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
121 leaq K256+544(%rip),%r13
122 movl 240-128(%rdi),%r14d
137 vmovdqa 0(%r13,%r14,8),%xmm14
138 vmovdqa 16(%r13,%r14,8),%xmm13
139 vmovdqa 32(%r13,%r14,8),%xmm12
140 vmovdqu 0-128(%rdi),%xmm10
144 vmovdqa K256+512(%rip),%xmm7
145 vmovdqu 0(%rsi,%r12,1),%xmm0
146 vmovdqu 16(%rsi,%r12,1),%xmm1
147 vmovdqu 32(%rsi,%r12,1),%xmm2
148 vmovdqu 48(%rsi,%r12,1),%xmm3
149 vpshufb %xmm7,%xmm0,%xmm0
151 vpshufb %xmm7,%xmm1,%xmm1
152 vpshufb %xmm7,%xmm2,%xmm2
153 vpaddd 0(%rbp),%xmm0,%xmm4
154 vpshufb %xmm7,%xmm3,%xmm3
155 vpaddd 32(%rbp),%xmm1,%xmm5
156 vpaddd 64(%rbp),%xmm2,%xmm6
157 vpaddd 96(%rbp),%xmm3,%xmm7
158 vmovdqa %xmm4,0(%rsp)
160 vmovdqa %xmm5,16(%rsp)
162 vmovdqa %xmm6,32(%rsp)
164 vmovdqa %xmm7,48(%rsp)
173 vpalignr $4,%xmm0,%xmm1,%xmm4
176 vpalignr $4,%xmm2,%xmm3,%xmm7
179 .byte 143,232,120,194,236,14
182 vpsrld $3,%xmm4,%xmm4
185 vpaddd %xmm7,%xmm0,%xmm0
187 vpxor %xmm10,%xmm9,%xmm9
188 vmovdqu 16-128(%rdi),%xmm10
192 .byte 143,232,120,194,245,11
195 vpxor %xmm5,%xmm4,%xmm4
200 .byte 143,232,120,194,251,13
203 vpxor %xmm6,%xmm4,%xmm4
206 vpsrld $10,%xmm3,%xmm6
209 vpaddd %xmm4,%xmm0,%xmm0
212 .byte 143,232,120,194,239,2
215 vpxor %xmm6,%xmm7,%xmm7
220 vpxor %xmm5,%xmm7,%xmm7
224 vpxor %xmm8,%xmm9,%xmm9
226 vpsrldq $8,%xmm7,%xmm7
231 vpaddd %xmm7,%xmm0,%xmm0
236 .byte 143,232,120,194,248,13
239 vpsrld $10,%xmm0,%xmm6
242 .byte 143,232,120,194,239,2
245 vpxor %xmm6,%xmm7,%xmm7
250 vpxor %xmm5,%xmm7,%xmm7
255 vpslldq $8,%xmm7,%xmm7
259 vaesenc %xmm10,%xmm9,%xmm9
260 vmovdqu 32-128(%rdi),%xmm10
262 vpaddd %xmm7,%xmm0,%xmm0
267 vpaddd 0(%rbp),%xmm0,%xmm6
289 vaesenc %xmm10,%xmm9,%xmm9
290 vmovdqu 48-128(%rdi),%xmm10
308 vmovdqa %xmm6,0(%rsp)
309 vpalignr $4,%xmm1,%xmm2,%xmm4
312 vpalignr $4,%xmm3,%xmm0,%xmm7
315 .byte 143,232,120,194,236,14
318 vpsrld $3,%xmm4,%xmm4
321 vpaddd %xmm7,%xmm1,%xmm1
323 vaesenc %xmm10,%xmm9,%xmm9
324 vmovdqu 64-128(%rdi),%xmm10
328 .byte 143,232,120,194,245,11
331 vpxor %xmm5,%xmm4,%xmm4
336 .byte 143,232,120,194,248,13
339 vpxor %xmm6,%xmm4,%xmm4
342 vpsrld $10,%xmm0,%xmm6
345 vpaddd %xmm4,%xmm1,%xmm1
348 .byte 143,232,120,194,239,2
351 vpxor %xmm6,%xmm7,%xmm7
356 vpxor %xmm5,%xmm7,%xmm7
360 vaesenc %xmm10,%xmm9,%xmm9
361 vmovdqu 80-128(%rdi),%xmm10
363 vpsrldq $8,%xmm7,%xmm7
368 vpaddd %xmm7,%xmm1,%xmm1
373 .byte 143,232,120,194,249,13
376 vpsrld $10,%xmm1,%xmm6
379 .byte 143,232,120,194,239,2
382 vpxor %xmm6,%xmm7,%xmm7
387 vpxor %xmm5,%xmm7,%xmm7
392 vpslldq $8,%xmm7,%xmm7
396 vaesenc %xmm10,%xmm9,%xmm9
397 vmovdqu 96-128(%rdi),%xmm10
399 vpaddd %xmm7,%xmm1,%xmm1
404 vpaddd 32(%rbp),%xmm1,%xmm6
426 vaesenc %xmm10,%xmm9,%xmm9
427 vmovdqu 112-128(%rdi),%xmm10
445 vmovdqa %xmm6,16(%rsp)
446 vpalignr $4,%xmm2,%xmm3,%xmm4
449 vpalignr $4,%xmm0,%xmm1,%xmm7
452 .byte 143,232,120,194,236,14
455 vpsrld $3,%xmm4,%xmm4
458 vpaddd %xmm7,%xmm2,%xmm2
460 vaesenc %xmm10,%xmm9,%xmm9
461 vmovdqu 128-128(%rdi),%xmm10
465 .byte 143,232,120,194,245,11
468 vpxor %xmm5,%xmm4,%xmm4
473 .byte 143,232,120,194,249,13
476 vpxor %xmm6,%xmm4,%xmm4
479 vpsrld $10,%xmm1,%xmm6
482 vpaddd %xmm4,%xmm2,%xmm2
485 .byte 143,232,120,194,239,2
488 vpxor %xmm6,%xmm7,%xmm7
493 vpxor %xmm5,%xmm7,%xmm7
497 vaesenc %xmm10,%xmm9,%xmm9
498 vmovdqu 144-128(%rdi),%xmm10
500 vpsrldq $8,%xmm7,%xmm7
505 vpaddd %xmm7,%xmm2,%xmm2
510 .byte 143,232,120,194,250,13
513 vpsrld $10,%xmm2,%xmm6
516 .byte 143,232,120,194,239,2
519 vpxor %xmm6,%xmm7,%xmm7
524 vpxor %xmm5,%xmm7,%xmm7
529 vpslldq $8,%xmm7,%xmm7
533 vaesenc %xmm10,%xmm9,%xmm9
534 vmovdqu 160-128(%rdi),%xmm10
536 vpaddd %xmm7,%xmm2,%xmm2
541 vpaddd 64(%rbp),%xmm2,%xmm6
563 vaesenclast %xmm10,%xmm9,%xmm11
564 vaesenc %xmm10,%xmm9,%xmm9
565 vmovdqu 176-128(%rdi),%xmm10
583 vmovdqa %xmm6,32(%rsp)
584 vpalignr $4,%xmm3,%xmm0,%xmm4
587 vpalignr $4,%xmm1,%xmm2,%xmm7
590 .byte 143,232,120,194,236,14
593 vpsrld $3,%xmm4,%xmm4
596 vpaddd %xmm7,%xmm3,%xmm3
598 vpand %xmm12,%xmm11,%xmm8
599 vaesenc %xmm10,%xmm9,%xmm9
600 vmovdqu 192-128(%rdi),%xmm10
604 .byte 143,232,120,194,245,11
607 vpxor %xmm5,%xmm4,%xmm4
612 .byte 143,232,120,194,250,13
615 vpxor %xmm6,%xmm4,%xmm4
618 vpsrld $10,%xmm2,%xmm6
621 vpaddd %xmm4,%xmm3,%xmm3
624 .byte 143,232,120,194,239,2
627 vpxor %xmm6,%xmm7,%xmm7
632 vpxor %xmm5,%xmm7,%xmm7
636 vaesenclast %xmm10,%xmm9,%xmm11
637 vaesenc %xmm10,%xmm9,%xmm9
638 vmovdqu 208-128(%rdi),%xmm10
640 vpsrldq $8,%xmm7,%xmm7
645 vpaddd %xmm7,%xmm3,%xmm3
650 .byte 143,232,120,194,251,13
653 vpsrld $10,%xmm3,%xmm6
656 .byte 143,232,120,194,239,2
659 vpxor %xmm6,%xmm7,%xmm7
664 vpxor %xmm5,%xmm7,%xmm7
669 vpslldq $8,%xmm7,%xmm7
673 vpand %xmm13,%xmm11,%xmm11
674 vaesenc %xmm10,%xmm9,%xmm9
675 vmovdqu 224-128(%rdi),%xmm10
677 vpaddd %xmm7,%xmm3,%xmm3
682 vpaddd 96(%rbp),%xmm3,%xmm6
704 vpor %xmm11,%xmm8,%xmm8
705 vaesenclast %xmm10,%xmm9,%xmm11
706 vmovdqu 0-128(%rdi),%xmm10
724 vmovdqa %xmm6,48(%rsp)
726 vpand %xmm14,%xmm11,%xmm11
728 vpor %xmm11,%xmm8,%xmm8
729 vmovdqu %xmm8,(%r15,%r12,1)
744 vpxor %xmm10,%xmm9,%xmm9
745 vmovdqu 16-128(%rdi),%xmm10
772 vpxor %xmm8,%xmm9,%xmm9
799 vaesenc %xmm10,%xmm9,%xmm9
800 vmovdqu 32-128(%rdi),%xmm10
827 vaesenc %xmm10,%xmm9,%xmm9
828 vmovdqu 48-128(%rdi),%xmm10
855 vaesenc %xmm10,%xmm9,%xmm9
856 vmovdqu 64-128(%rdi),%xmm10
883 vaesenc %xmm10,%xmm9,%xmm9
884 vmovdqu 80-128(%rdi),%xmm10
911 vaesenc %xmm10,%xmm9,%xmm9
912 vmovdqu 96-128(%rdi),%xmm10
939 vaesenc %xmm10,%xmm9,%xmm9
940 vmovdqu 112-128(%rdi),%xmm10
967 vaesenc %xmm10,%xmm9,%xmm9
968 vmovdqu 128-128(%rdi),%xmm10
995 vaesenc %xmm10,%xmm9,%xmm9
996 vmovdqu 144-128(%rdi),%xmm10
1023 vaesenc %xmm10,%xmm9,%xmm9
1024 vmovdqu 160-128(%rdi),%xmm10
1051 vaesenclast %xmm10,%xmm9,%xmm11
1052 vaesenc %xmm10,%xmm9,%xmm9
1053 vmovdqu 176-128(%rdi),%xmm10
1080 vpand %xmm12,%xmm11,%xmm8
1081 vaesenc %xmm10,%xmm9,%xmm9
1082 vmovdqu 192-128(%rdi),%xmm10
1109 vaesenclast %xmm10,%xmm9,%xmm11
1110 vaesenc %xmm10,%xmm9,%xmm9
1111 vmovdqu 208-128(%rdi),%xmm10
1138 vpand %xmm13,%xmm11,%xmm11
1139 vaesenc %xmm10,%xmm9,%xmm9
1140 vmovdqu 224-128(%rdi),%xmm10
1167 vpor %xmm11,%xmm8,%xmm8
1168 vaesenclast %xmm10,%xmm9,%xmm11
1169 vmovdqu 0-128(%rdi),%xmm10
1187 movq 64+0(%rsp),%r12
1188 movq 64+8(%rsp),%r13
1189 movq 64+40(%rsp),%r15
1190 movq 64+48(%rsp),%rsi
1192 vpand %xmm14,%xmm11,%xmm11
1194 vpor %xmm11,%xmm8,%xmm8
1195 vmovdqu %xmm8,(%r12,%r13,1)
1207 cmpq 64+16(%rsp),%r12
1220 movq 64+32(%rsp),%r8
1238 .cfi_def_cfa_register %rsp
1242 .size aesni_cbc_sha256_enc_xop,.-aesni_cbc_sha256_enc_xop
1243 .type aesni_cbc_sha256_enc_avx,@function
1245 aesni_cbc_sha256_enc_avx:
1250 .cfi_def_cfa_register %rax
1252 .cfi_offset %rbx,-16
1254 .cfi_offset %rbp,-24
1256 .cfi_offset %r12,-32
1258 .cfi_offset %r13,-40
1260 .cfi_offset %r14,-48
1262 .cfi_offset %r15,-56
1272 movq %rsi,64+8(%rsp)
1273 movq %rdx,64+16(%rsp)
1275 movq %r8,64+32(%rsp)
1276 movq %r9,64+40(%rsp)
1277 movq %r10,64+48(%rsp)
1279 .cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
1285 leaq K256+544(%rip),%r13
1286 movl 240-128(%rdi),%r14d
1301 vmovdqa 0(%r13,%r14,8),%xmm14
1302 vmovdqa 16(%r13,%r14,8),%xmm13
1303 vmovdqa 32(%r13,%r14,8),%xmm12
1304 vmovdqu 0-128(%rdi),%xmm10
1308 vmovdqa K256+512(%rip),%xmm7
1309 vmovdqu 0(%rsi,%r12,1),%xmm0
1310 vmovdqu 16(%rsi,%r12,1),%xmm1
1311 vmovdqu 32(%rsi,%r12,1),%xmm2
1312 vmovdqu 48(%rsi,%r12,1),%xmm3
1313 vpshufb %xmm7,%xmm0,%xmm0
1314 leaq K256(%rip),%rbp
1315 vpshufb %xmm7,%xmm1,%xmm1
1316 vpshufb %xmm7,%xmm2,%xmm2
1317 vpaddd 0(%rbp),%xmm0,%xmm4
1318 vpshufb %xmm7,%xmm3,%xmm3
1319 vpaddd 32(%rbp),%xmm1,%xmm5
1320 vpaddd 64(%rbp),%xmm2,%xmm6
1321 vpaddd 96(%rbp),%xmm3,%xmm7
1322 vmovdqa %xmm4,0(%rsp)
1324 vmovdqa %xmm5,16(%rsp)
1326 vmovdqa %xmm6,32(%rsp)
1328 vmovdqa %xmm7,48(%rsp)
1335 vmovdqu (%r12),%xmm9
1336 movq %r12,64+0(%rsp)
1337 vpalignr $4,%xmm0,%xmm1,%xmm4
1338 shrdl $14,%r13d,%r13d
1341 vpalignr $4,%xmm2,%xmm3,%xmm7
1343 shrdl $9,%r14d,%r14d
1345 vpsrld $7,%xmm4,%xmm6
1346 shrdl $5,%r13d,%r13d
1349 vpaddd %xmm7,%xmm0,%xmm0
1350 vpxor %xmm10,%xmm9,%xmm9
1351 vmovdqu 16-128(%rdi),%xmm10
1355 vpsrld $3,%xmm4,%xmm7
1356 shrdl $11,%r14d,%r14d
1359 vpslld $14,%xmm4,%xmm5
1360 shrdl $6,%r13d,%r13d
1363 vpxor %xmm6,%xmm7,%xmm4
1367 vpshufd $250,%xmm3,%xmm7
1369 shrdl $2,%r14d,%r14d
1371 vpsrld $11,%xmm6,%xmm6
1374 shrdl $14,%r13d,%r13d
1375 vpxor %xmm5,%xmm4,%xmm4
1379 vpslld $11,%xmm5,%xmm5
1380 shrdl $9,%r14d,%r14d
1382 shrdl $5,%r13d,%r13d
1383 vpxor %xmm6,%xmm4,%xmm4
1386 vpxor %xmm8,%xmm9,%xmm9
1388 vpsrld $10,%xmm7,%xmm6
1391 shrdl $11,%r14d,%r14d
1392 vpxor %xmm5,%xmm4,%xmm4
1395 shrdl $6,%r13d,%r13d
1396 vpsrlq $17,%xmm7,%xmm7
1400 vpaddd %xmm4,%xmm0,%xmm0
1404 vpxor %xmm7,%xmm6,%xmm6
1405 shrdl $2,%r14d,%r14d
1408 vpsrlq $2,%xmm7,%xmm7
1410 shrdl $14,%r13d,%r13d
1412 vpxor %xmm7,%xmm6,%xmm6
1415 shrdl $9,%r14d,%r14d
1416 vpshufd $132,%xmm6,%xmm6
1418 shrdl $5,%r13d,%r13d
1420 vpsrldq $8,%xmm6,%xmm6
1422 vaesenc %xmm10,%xmm9,%xmm9
1423 vmovdqu 32-128(%rdi),%xmm10
1426 vpaddd %xmm6,%xmm0,%xmm0
1428 shrdl $11,%r14d,%r14d
1430 vpshufd $80,%xmm0,%xmm7
1432 shrdl $6,%r13d,%r13d
1434 vpsrld $10,%xmm7,%xmm6
1438 vpsrlq $17,%xmm7,%xmm7
1441 shrdl $2,%r14d,%r14d
1442 vpxor %xmm7,%xmm6,%xmm6
1446 vpsrlq $2,%xmm7,%xmm7
1447 shrdl $14,%r13d,%r13d
1450 vpxor %xmm7,%xmm6,%xmm6
1452 shrdl $9,%r14d,%r14d
1454 vpshufd $232,%xmm6,%xmm6
1455 shrdl $5,%r13d,%r13d
1458 vpslldq $8,%xmm6,%xmm6
1459 vaesenc %xmm10,%xmm9,%xmm9
1460 vmovdqu 48-128(%rdi),%xmm10
1464 vpaddd %xmm6,%xmm0,%xmm0
1465 shrdl $11,%r14d,%r14d
1468 vpaddd 0(%rbp),%xmm0,%xmm6
1469 shrdl $6,%r13d,%r13d
1476 shrdl $2,%r14d,%r14d
1480 vmovdqa %xmm6,0(%rsp)
1481 vpalignr $4,%xmm1,%xmm2,%xmm4
1482 shrdl $14,%r13d,%r13d
1485 vpalignr $4,%xmm3,%xmm0,%xmm7
1487 shrdl $9,%r14d,%r14d
1489 vpsrld $7,%xmm4,%xmm6
1490 shrdl $5,%r13d,%r13d
1493 vpaddd %xmm7,%xmm1,%xmm1
1494 vaesenc %xmm10,%xmm9,%xmm9
1495 vmovdqu 64-128(%rdi),%xmm10
1499 vpsrld $3,%xmm4,%xmm7
1500 shrdl $11,%r14d,%r14d
1503 vpslld $14,%xmm4,%xmm5
1504 shrdl $6,%r13d,%r13d
1507 vpxor %xmm6,%xmm7,%xmm4
1511 vpshufd $250,%xmm0,%xmm7
1513 shrdl $2,%r14d,%r14d
1515 vpsrld $11,%xmm6,%xmm6
1518 shrdl $14,%r13d,%r13d
1519 vpxor %xmm5,%xmm4,%xmm4
1523 vpslld $11,%xmm5,%xmm5
1524 shrdl $9,%r14d,%r14d
1526 shrdl $5,%r13d,%r13d
1527 vpxor %xmm6,%xmm4,%xmm4
1530 vaesenc %xmm10,%xmm9,%xmm9
1531 vmovdqu 80-128(%rdi),%xmm10
1533 vpsrld $10,%xmm7,%xmm6
1536 shrdl $11,%r14d,%r14d
1537 vpxor %xmm5,%xmm4,%xmm4
1540 shrdl $6,%r13d,%r13d
1541 vpsrlq $17,%xmm7,%xmm7
1545 vpaddd %xmm4,%xmm1,%xmm1
1549 vpxor %xmm7,%xmm6,%xmm6
1550 shrdl $2,%r14d,%r14d
1553 vpsrlq $2,%xmm7,%xmm7
1555 shrdl $14,%r13d,%r13d
1557 vpxor %xmm7,%xmm6,%xmm6
1560 shrdl $9,%r14d,%r14d
1561 vpshufd $132,%xmm6,%xmm6
1563 shrdl $5,%r13d,%r13d
1565 vpsrldq $8,%xmm6,%xmm6
1567 vaesenc %xmm10,%xmm9,%xmm9
1568 vmovdqu 96-128(%rdi),%xmm10
1571 vpaddd %xmm6,%xmm1,%xmm1
1573 shrdl $11,%r14d,%r14d
1575 vpshufd $80,%xmm1,%xmm7
1577 shrdl $6,%r13d,%r13d
1579 vpsrld $10,%xmm7,%xmm6
1583 vpsrlq $17,%xmm7,%xmm7
1586 shrdl $2,%r14d,%r14d
1587 vpxor %xmm7,%xmm6,%xmm6
1591 vpsrlq $2,%xmm7,%xmm7
1592 shrdl $14,%r13d,%r13d
1595 vpxor %xmm7,%xmm6,%xmm6
1597 shrdl $9,%r14d,%r14d
1599 vpshufd $232,%xmm6,%xmm6
1600 shrdl $5,%r13d,%r13d
1603 vpslldq $8,%xmm6,%xmm6
1604 vaesenc %xmm10,%xmm9,%xmm9
1605 vmovdqu 112-128(%rdi),%xmm10
1609 vpaddd %xmm6,%xmm1,%xmm1
1610 shrdl $11,%r14d,%r14d
1613 vpaddd 32(%rbp),%xmm1,%xmm6
1614 shrdl $6,%r13d,%r13d
1621 shrdl $2,%r14d,%r14d
1625 vmovdqa %xmm6,16(%rsp)
1626 vpalignr $4,%xmm2,%xmm3,%xmm4
1627 shrdl $14,%r13d,%r13d
1630 vpalignr $4,%xmm0,%xmm1,%xmm7
1632 shrdl $9,%r14d,%r14d
1634 vpsrld $7,%xmm4,%xmm6
1635 shrdl $5,%r13d,%r13d
1638 vpaddd %xmm7,%xmm2,%xmm2
1639 vaesenc %xmm10,%xmm9,%xmm9
1640 vmovdqu 128-128(%rdi),%xmm10
1644 vpsrld $3,%xmm4,%xmm7
1645 shrdl $11,%r14d,%r14d
1648 vpslld $14,%xmm4,%xmm5
1649 shrdl $6,%r13d,%r13d
1652 vpxor %xmm6,%xmm7,%xmm4
1656 vpshufd $250,%xmm1,%xmm7
1658 shrdl $2,%r14d,%r14d
1660 vpsrld $11,%xmm6,%xmm6
1663 shrdl $14,%r13d,%r13d
1664 vpxor %xmm5,%xmm4,%xmm4
1668 vpslld $11,%xmm5,%xmm5
1669 shrdl $9,%r14d,%r14d
1671 shrdl $5,%r13d,%r13d
1672 vpxor %xmm6,%xmm4,%xmm4
1675 vaesenc %xmm10,%xmm9,%xmm9
1676 vmovdqu 144-128(%rdi),%xmm10
1678 vpsrld $10,%xmm7,%xmm6
1681 shrdl $11,%r14d,%r14d
1682 vpxor %xmm5,%xmm4,%xmm4
1685 shrdl $6,%r13d,%r13d
1686 vpsrlq $17,%xmm7,%xmm7
1690 vpaddd %xmm4,%xmm2,%xmm2
1694 vpxor %xmm7,%xmm6,%xmm6
1695 shrdl $2,%r14d,%r14d
1698 vpsrlq $2,%xmm7,%xmm7
1700 shrdl $14,%r13d,%r13d
1702 vpxor %xmm7,%xmm6,%xmm6
1705 shrdl $9,%r14d,%r14d
1706 vpshufd $132,%xmm6,%xmm6
1708 shrdl $5,%r13d,%r13d
1710 vpsrldq $8,%xmm6,%xmm6
1712 vaesenc %xmm10,%xmm9,%xmm9
1713 vmovdqu 160-128(%rdi),%xmm10
1716 vpaddd %xmm6,%xmm2,%xmm2
1718 shrdl $11,%r14d,%r14d
1720 vpshufd $80,%xmm2,%xmm7
1722 shrdl $6,%r13d,%r13d
1724 vpsrld $10,%xmm7,%xmm6
1728 vpsrlq $17,%xmm7,%xmm7
1731 shrdl $2,%r14d,%r14d
1732 vpxor %xmm7,%xmm6,%xmm6
1736 vpsrlq $2,%xmm7,%xmm7
1737 shrdl $14,%r13d,%r13d
1740 vpxor %xmm7,%xmm6,%xmm6
1742 shrdl $9,%r14d,%r14d
1744 vpshufd $232,%xmm6,%xmm6
1745 shrdl $5,%r13d,%r13d
1748 vpslldq $8,%xmm6,%xmm6
1749 vaesenclast %xmm10,%xmm9,%xmm11
1750 vaesenc %xmm10,%xmm9,%xmm9
1751 vmovdqu 176-128(%rdi),%xmm10
1755 vpaddd %xmm6,%xmm2,%xmm2
1756 shrdl $11,%r14d,%r14d
1759 vpaddd 64(%rbp),%xmm2,%xmm6
1760 shrdl $6,%r13d,%r13d
1767 shrdl $2,%r14d,%r14d
1771 vmovdqa %xmm6,32(%rsp)
1772 vpalignr $4,%xmm3,%xmm0,%xmm4
1773 shrdl $14,%r13d,%r13d
1776 vpalignr $4,%xmm1,%xmm2,%xmm7
1778 shrdl $9,%r14d,%r14d
1780 vpsrld $7,%xmm4,%xmm6
1781 shrdl $5,%r13d,%r13d
1784 vpaddd %xmm7,%xmm3,%xmm3
1785 vpand %xmm12,%xmm11,%xmm8
1786 vaesenc %xmm10,%xmm9,%xmm9
1787 vmovdqu 192-128(%rdi),%xmm10
1791 vpsrld $3,%xmm4,%xmm7
1792 shrdl $11,%r14d,%r14d
1795 vpslld $14,%xmm4,%xmm5
1796 shrdl $6,%r13d,%r13d
1799 vpxor %xmm6,%xmm7,%xmm4
1803 vpshufd $250,%xmm2,%xmm7
1805 shrdl $2,%r14d,%r14d
1807 vpsrld $11,%xmm6,%xmm6
1810 shrdl $14,%r13d,%r13d
1811 vpxor %xmm5,%xmm4,%xmm4
1815 vpslld $11,%xmm5,%xmm5
1816 shrdl $9,%r14d,%r14d
1818 shrdl $5,%r13d,%r13d
1819 vpxor %xmm6,%xmm4,%xmm4
1822 vaesenclast %xmm10,%xmm9,%xmm11
1823 vaesenc %xmm10,%xmm9,%xmm9
1824 vmovdqu 208-128(%rdi),%xmm10
1826 vpsrld $10,%xmm7,%xmm6
1829 shrdl $11,%r14d,%r14d
1830 vpxor %xmm5,%xmm4,%xmm4
1833 shrdl $6,%r13d,%r13d
1834 vpsrlq $17,%xmm7,%xmm7
1838 vpaddd %xmm4,%xmm3,%xmm3
1842 vpxor %xmm7,%xmm6,%xmm6
1843 shrdl $2,%r14d,%r14d
1846 vpsrlq $2,%xmm7,%xmm7
1848 shrdl $14,%r13d,%r13d
1850 vpxor %xmm7,%xmm6,%xmm6
1853 shrdl $9,%r14d,%r14d
1854 vpshufd $132,%xmm6,%xmm6
1856 shrdl $5,%r13d,%r13d
1858 vpsrldq $8,%xmm6,%xmm6
1860 vpand %xmm13,%xmm11,%xmm11
1861 vaesenc %xmm10,%xmm9,%xmm9
1862 vmovdqu 224-128(%rdi),%xmm10
1865 vpaddd %xmm6,%xmm3,%xmm3
1867 shrdl $11,%r14d,%r14d
1869 vpshufd $80,%xmm3,%xmm7
1871 shrdl $6,%r13d,%r13d
1873 vpsrld $10,%xmm7,%xmm6
1877 vpsrlq $17,%xmm7,%xmm7
1880 shrdl $2,%r14d,%r14d
1881 vpxor %xmm7,%xmm6,%xmm6
1885 vpsrlq $2,%xmm7,%xmm7
1886 shrdl $14,%r13d,%r13d
1889 vpxor %xmm7,%xmm6,%xmm6
1891 shrdl $9,%r14d,%r14d
1893 vpshufd $232,%xmm6,%xmm6
1894 shrdl $5,%r13d,%r13d
1897 vpslldq $8,%xmm6,%xmm6
1898 vpor %xmm11,%xmm8,%xmm8
1899 vaesenclast %xmm10,%xmm9,%xmm11
1900 vmovdqu 0-128(%rdi),%xmm10
1904 vpaddd %xmm6,%xmm3,%xmm3
1905 shrdl $11,%r14d,%r14d
1908 vpaddd 96(%rbp),%xmm3,%xmm6
1909 shrdl $6,%r13d,%r13d
1916 shrdl $2,%r14d,%r14d
1920 vmovdqa %xmm6,48(%rsp)
1921 movq 64+0(%rsp),%r12
1922 vpand %xmm14,%xmm11,%xmm11
1923 movq 64+8(%rsp),%r15
1924 vpor %xmm11,%xmm8,%xmm8
1925 vmovdqu %xmm8,(%r15,%r12,1)
1929 vmovdqu (%r12),%xmm9
1930 movq %r12,64+0(%rsp)
1931 shrdl $14,%r13d,%r13d
1935 shrdl $9,%r14d,%r14d
1937 shrdl $5,%r13d,%r13d
1940 vpxor %xmm10,%xmm9,%xmm9
1941 vmovdqu 16-128(%rdi),%xmm10
1945 shrdl $11,%r14d,%r14d
1948 shrdl $6,%r13d,%r13d
1955 shrdl $2,%r14d,%r14d
1959 shrdl $14,%r13d,%r13d
1963 shrdl $9,%r14d,%r14d
1965 shrdl $5,%r13d,%r13d
1968 vpxor %xmm8,%xmm9,%xmm9
1972 shrdl $11,%r14d,%r14d
1975 shrdl $6,%r13d,%r13d
1982 shrdl $2,%r14d,%r14d
1986 shrdl $14,%r13d,%r13d
1990 shrdl $9,%r14d,%r14d
1992 shrdl $5,%r13d,%r13d
1995 vaesenc %xmm10,%xmm9,%xmm9
1996 vmovdqu 32-128(%rdi),%xmm10
2000 shrdl $11,%r14d,%r14d
2003 shrdl $6,%r13d,%r13d
2010 shrdl $2,%r14d,%r14d
2014 shrdl $14,%r13d,%r13d
2018 shrdl $9,%r14d,%r14d
2020 shrdl $5,%r13d,%r13d
2023 vaesenc %xmm10,%xmm9,%xmm9
2024 vmovdqu 48-128(%rdi),%xmm10
2028 shrdl $11,%r14d,%r14d
2031 shrdl $6,%r13d,%r13d
2038 shrdl $2,%r14d,%r14d
2042 shrdl $14,%r13d,%r13d
2046 shrdl $9,%r14d,%r14d
2048 shrdl $5,%r13d,%r13d
2051 vaesenc %xmm10,%xmm9,%xmm9
2052 vmovdqu 64-128(%rdi),%xmm10
2056 shrdl $11,%r14d,%r14d
2059 shrdl $6,%r13d,%r13d
2066 shrdl $2,%r14d,%r14d
2070 shrdl $14,%r13d,%r13d
2074 shrdl $9,%r14d,%r14d
2076 shrdl $5,%r13d,%r13d
2079 vaesenc %xmm10,%xmm9,%xmm9
2080 vmovdqu 80-128(%rdi),%xmm10
2084 shrdl $11,%r14d,%r14d
2087 shrdl $6,%r13d,%r13d
2094 shrdl $2,%r14d,%r14d
2098 shrdl $14,%r13d,%r13d
2102 shrdl $9,%r14d,%r14d
2104 shrdl $5,%r13d,%r13d
2107 vaesenc %xmm10,%xmm9,%xmm9
2108 vmovdqu 96-128(%rdi),%xmm10
2112 shrdl $11,%r14d,%r14d
2115 shrdl $6,%r13d,%r13d
2122 shrdl $2,%r14d,%r14d
2126 shrdl $14,%r13d,%r13d
2130 shrdl $9,%r14d,%r14d
2132 shrdl $5,%r13d,%r13d
2135 vaesenc %xmm10,%xmm9,%xmm9
2136 vmovdqu 112-128(%rdi),%xmm10
2140 shrdl $11,%r14d,%r14d
2143 shrdl $6,%r13d,%r13d
2150 shrdl $2,%r14d,%r14d
2154 shrdl $14,%r13d,%r13d
2158 shrdl $9,%r14d,%r14d
2160 shrdl $5,%r13d,%r13d
2163 vaesenc %xmm10,%xmm9,%xmm9
2164 vmovdqu 128-128(%rdi),%xmm10
2168 shrdl $11,%r14d,%r14d
2171 shrdl $6,%r13d,%r13d
2178 shrdl $2,%r14d,%r14d
2182 shrdl $14,%r13d,%r13d
2186 shrdl $9,%r14d,%r14d
2188 shrdl $5,%r13d,%r13d
2191 vaesenc %xmm10,%xmm9,%xmm9
2192 vmovdqu 144-128(%rdi),%xmm10
2196 shrdl $11,%r14d,%r14d
2199 shrdl $6,%r13d,%r13d
2206 shrdl $2,%r14d,%r14d
2210 shrdl $14,%r13d,%r13d
2214 shrdl $9,%r14d,%r14d
2216 shrdl $5,%r13d,%r13d
2219 vaesenc %xmm10,%xmm9,%xmm9
2220 vmovdqu 160-128(%rdi),%xmm10
2224 shrdl $11,%r14d,%r14d
2227 shrdl $6,%r13d,%r13d
2234 shrdl $2,%r14d,%r14d
2238 shrdl $14,%r13d,%r13d
2242 shrdl $9,%r14d,%r14d
2244 shrdl $5,%r13d,%r13d
2247 vaesenclast %xmm10,%xmm9,%xmm11
2248 vaesenc %xmm10,%xmm9,%xmm9
2249 vmovdqu 176-128(%rdi),%xmm10
2253 shrdl $11,%r14d,%r14d
2256 shrdl $6,%r13d,%r13d
2263 shrdl $2,%r14d,%r14d
2267 shrdl $14,%r13d,%r13d
2271 shrdl $9,%r14d,%r14d
2273 shrdl $5,%r13d,%r13d
2276 vpand %xmm12,%xmm11,%xmm8
2277 vaesenc %xmm10,%xmm9,%xmm9
2278 vmovdqu 192-128(%rdi),%xmm10
2282 shrdl $11,%r14d,%r14d
2285 shrdl $6,%r13d,%r13d
2292 shrdl $2,%r14d,%r14d
2296 shrdl $14,%r13d,%r13d
2300 shrdl $9,%r14d,%r14d
2302 shrdl $5,%r13d,%r13d
2305 vaesenclast %xmm10,%xmm9,%xmm11
2306 vaesenc %xmm10,%xmm9,%xmm9
2307 vmovdqu 208-128(%rdi),%xmm10
2311 shrdl $11,%r14d,%r14d
2314 shrdl $6,%r13d,%r13d
2321 shrdl $2,%r14d,%r14d
2325 shrdl $14,%r13d,%r13d
2329 shrdl $9,%r14d,%r14d
2331 shrdl $5,%r13d,%r13d
2334 vpand %xmm13,%xmm11,%xmm11
2335 vaesenc %xmm10,%xmm9,%xmm9
2336 vmovdqu 224-128(%rdi),%xmm10
2340 shrdl $11,%r14d,%r14d
2343 shrdl $6,%r13d,%r13d
2350 shrdl $2,%r14d,%r14d
2354 shrdl $14,%r13d,%r13d
2358 shrdl $9,%r14d,%r14d
2360 shrdl $5,%r13d,%r13d
2363 vpor %xmm11,%xmm8,%xmm8
2364 vaesenclast %xmm10,%xmm9,%xmm11
2365 vmovdqu 0-128(%rdi),%xmm10
2369 shrdl $11,%r14d,%r14d
2372 shrdl $6,%r13d,%r13d
2379 shrdl $2,%r14d,%r14d
2383 movq 64+0(%rsp),%r12
2384 movq 64+8(%rsp),%r13
2385 movq 64+40(%rsp),%r15
2386 movq 64+48(%rsp),%rsi
2388 vpand %xmm14,%xmm11,%xmm11
2390 vpor %xmm11,%xmm8,%xmm8
2391 vmovdqu %xmm8,(%r12,%r13,1)
2403 cmpq 64+16(%rsp),%r12
2415 movq 64+32(%rsp),%r8
2433 .cfi_def_cfa_register %rsp
2437 .size aesni_cbc_sha256_enc_avx,.-aesni_cbc_sha256_enc_avx
2438 .type aesni_cbc_sha256_enc_avx2,@function
2440 aesni_cbc_sha256_enc_avx2:
2445 .cfi_def_cfa_register %rax
2447 .cfi_offset %rbx,-16
2449 .cfi_offset %rbp,-24
2451 .cfi_offset %r12,-32
2453 .cfi_offset %r13,-40
2455 .cfi_offset %r14,-48
2457 .cfi_offset %r15,-56
2469 movq %rdx,64+16(%rsp)
2471 movq %r8,64+32(%rsp)
2472 movq %r9,64+40(%rsp)
2473 movq %r10,64+48(%rsp)
2475 .cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
2480 vpinsrq $1,%rsi,%xmm15,%xmm15
2482 leaq K256+544(%rip),%r12
2483 movl 240-128(%rdi),%r14d
2489 vmovdqa 0(%r12,%r14,8),%xmm14
2490 vmovdqa 16(%r12,%r14,8),%xmm13
2491 vmovdqa 32(%r12,%r14,8),%xmm12
2495 leaq (%rsi,%r13,1),%r12
2505 vmovdqu 0-128(%rdi),%xmm10
2509 vmovdqa K256+512(%rip),%ymm7
2510 vmovdqu -64+0(%rsi,%r13,1),%xmm0
2511 vmovdqu -64+16(%rsi,%r13,1),%xmm1
2512 vmovdqu -64+32(%rsi,%r13,1),%xmm2
2513 vmovdqu -64+48(%rsi,%r13,1),%xmm3
2515 vinserti128 $1,(%r12),%ymm0,%ymm0
2516 vinserti128 $1,16(%r12),%ymm1,%ymm1
2517 vpshufb %ymm7,%ymm0,%ymm0
2518 vinserti128 $1,32(%r12),%ymm2,%ymm2
2519 vpshufb %ymm7,%ymm1,%ymm1
2520 vinserti128 $1,48(%r12),%ymm3,%ymm3
2522 leaq K256(%rip),%rbp
2523 vpshufb %ymm7,%ymm2,%ymm2
2525 vpaddd 0(%rbp),%ymm0,%ymm4
2526 vpshufb %ymm7,%ymm3,%ymm3
2527 vpaddd 32(%rbp),%ymm1,%ymm5
2528 vpaddd 64(%rbp),%ymm2,%ymm6
2529 vpaddd 96(%rbp),%ymm3,%ymm7
2530 vmovdqa %ymm4,0(%rsp)
2532 vmovdqa %ymm5,32(%rsp)
2535 vmovdqa %ymm6,0(%rsp)
2537 vmovdqa %ymm7,32(%rsp)
2544 vmovdqu (%r13),%xmm9
2545 vpinsrq $0,%r13,%xmm15,%xmm15
2547 vpalignr $4,%ymm0,%ymm1,%ymm4
2548 addl 0+128(%rsp),%r11d
2550 rorxl $25,%r8d,%r13d
2551 vpalignr $4,%ymm2,%ymm3,%ymm7
2552 rorxl $11,%r8d,%r15d
2553 leal (%rax,%r14,1),%eax
2554 leal (%r11,%r12,1),%r11d
2555 vpsrld $7,%ymm4,%ymm6
2556 andnl %r10d,%r8d,%r12d
2559 vpaddd %ymm7,%ymm0,%ymm0
2560 leal (%r11,%r12,1),%r11d
2563 vpsrld $3,%ymm4,%ymm7
2564 rorxl $22,%eax,%r12d
2565 leal (%r11,%r13,1),%r11d
2567 vpslld $14,%ymm4,%ymm5
2568 rorxl $13,%eax,%r14d
2570 leal (%rdx,%r11,1),%edx
2571 vpxor %ymm6,%ymm7,%ymm4
2573 vpxor %xmm10,%xmm9,%xmm9
2574 vmovdqu 16-128(%rdi),%xmm10
2577 vpshufd $250,%ymm3,%ymm7
2579 leal (%r11,%rsi,1),%r11d
2581 vpsrld $11,%ymm6,%ymm6
2582 addl 4+128(%rsp),%r10d
2584 rorxl $25,%edx,%r13d
2585 vpxor %ymm5,%ymm4,%ymm4
2587 leal (%r11,%r14,1),%r11d
2588 leal (%r10,%r12,1),%r10d
2589 vpslld $11,%ymm5,%ymm5
2590 andnl %r9d,%edx,%r12d
2593 vpxor %ymm6,%ymm4,%ymm4
2594 leal (%r10,%r12,1),%r10d
2597 vpsrld $10,%ymm7,%ymm6
2598 rorxl $22,%r11d,%r12d
2599 leal (%r10,%r13,1),%r10d
2601 vpxor %ymm5,%ymm4,%ymm4
2602 rorxl $13,%r11d,%r14d
2603 rorxl $2,%r11d,%r13d
2604 leal (%rcx,%r10,1),%ecx
2605 vpsrlq $17,%ymm7,%ymm7
2607 vpxor %xmm8,%xmm9,%xmm9
2610 vpaddd %ymm4,%ymm0,%ymm0
2612 leal (%r10,%r15,1),%r10d
2614 vpxor %ymm7,%ymm6,%ymm6
2615 addl 8+128(%rsp),%r9d
2617 rorxl $25,%ecx,%r13d
2618 vpsrlq $2,%ymm7,%ymm7
2619 rorxl $11,%ecx,%r15d
2620 leal (%r10,%r14,1),%r10d
2621 leal (%r9,%r12,1),%r9d
2622 vpxor %ymm7,%ymm6,%ymm6
2623 andnl %r8d,%ecx,%r12d
2626 vpshufd $132,%ymm6,%ymm6
2627 leal (%r9,%r12,1),%r9d
2630 vpsrldq $8,%ymm6,%ymm6
2631 rorxl $22,%r10d,%r12d
2632 leal (%r9,%r13,1),%r9d
2634 vpaddd %ymm6,%ymm0,%ymm0
2635 rorxl $13,%r10d,%r14d
2636 rorxl $2,%r10d,%r13d
2637 leal (%rbx,%r9,1),%ebx
2638 vpshufd $80,%ymm0,%ymm7
2640 vaesenc %xmm10,%xmm9,%xmm9
2641 vmovdqu 32-128(%rdi),%xmm10
2644 vpsrld $10,%ymm7,%ymm6
2646 leal (%r9,%rsi,1),%r9d
2648 vpsrlq $17,%ymm7,%ymm7
2649 addl 12+128(%rsp),%r8d
2651 rorxl $25,%ebx,%r13d
2652 vpxor %ymm7,%ymm6,%ymm6
2654 leal (%r9,%r14,1),%r9d
2655 leal (%r8,%r12,1),%r8d
2656 vpsrlq $2,%ymm7,%ymm7
2657 andnl %edx,%ebx,%r12d
2660 vpxor %ymm7,%ymm6,%ymm6
2661 leal (%r8,%r12,1),%r8d
2664 vpshufd $232,%ymm6,%ymm6
2665 rorxl $22,%r9d,%r12d
2666 leal (%r8,%r13,1),%r8d
2668 vpslldq $8,%ymm6,%ymm6
2669 rorxl $13,%r9d,%r14d
2671 leal (%rax,%r8,1),%eax
2672 vpaddd %ymm6,%ymm0,%ymm0
2674 vaesenc %xmm10,%xmm9,%xmm9
2675 vmovdqu 48-128(%rdi),%xmm10
2678 vpaddd 0(%rbp),%ymm0,%ymm6
2680 leal (%r8,%r15,1),%r8d
2682 vmovdqa %ymm6,0(%rsp)
2683 vpalignr $4,%ymm1,%ymm2,%ymm4
2684 addl 32+128(%rsp),%edx
2686 rorxl $25,%eax,%r13d
2687 vpalignr $4,%ymm3,%ymm0,%ymm7
2688 rorxl $11,%eax,%r15d
2689 leal (%r8,%r14,1),%r8d
2690 leal (%rdx,%r12,1),%edx
2691 vpsrld $7,%ymm4,%ymm6
2692 andnl %ecx,%eax,%r12d
2695 vpaddd %ymm7,%ymm1,%ymm1
2696 leal (%rdx,%r12,1),%edx
2699 vpsrld $3,%ymm4,%ymm7
2700 rorxl $22,%r8d,%r12d
2701 leal (%rdx,%r13,1),%edx
2703 vpslld $14,%ymm4,%ymm5
2704 rorxl $13,%r8d,%r14d
2706 leal (%r11,%rdx,1),%r11d
2707 vpxor %ymm6,%ymm7,%ymm4
2709 vaesenc %xmm10,%xmm9,%xmm9
2710 vmovdqu 64-128(%rdi),%xmm10
2713 vpshufd $250,%ymm0,%ymm7
2715 leal (%rdx,%rsi,1),%edx
2717 vpsrld $11,%ymm6,%ymm6
2718 addl 36+128(%rsp),%ecx
2720 rorxl $25,%r11d,%r13d
2721 vpxor %ymm5,%ymm4,%ymm4
2722 rorxl $11,%r11d,%esi
2723 leal (%rdx,%r14,1),%edx
2724 leal (%rcx,%r12,1),%ecx
2725 vpslld $11,%ymm5,%ymm5
2726 andnl %ebx,%r11d,%r12d
2728 rorxl $6,%r11d,%r14d
2729 vpxor %ymm6,%ymm4,%ymm4
2730 leal (%rcx,%r12,1),%ecx
2733 vpsrld $10,%ymm7,%ymm6
2734 rorxl $22,%edx,%r12d
2735 leal (%rcx,%r13,1),%ecx
2737 vpxor %ymm5,%ymm4,%ymm4
2738 rorxl $13,%edx,%r14d
2740 leal (%r10,%rcx,1),%r10d
2741 vpsrlq $17,%ymm7,%ymm7
2743 vaesenc %xmm10,%xmm9,%xmm9
2744 vmovdqu 80-128(%rdi),%xmm10
2747 vpaddd %ymm4,%ymm1,%ymm1
2749 leal (%rcx,%r15,1),%ecx
2751 vpxor %ymm7,%ymm6,%ymm6
2752 addl 40+128(%rsp),%ebx
2754 rorxl $25,%r10d,%r13d
2755 vpsrlq $2,%ymm7,%ymm7
2756 rorxl $11,%r10d,%r15d
2757 leal (%rcx,%r14,1),%ecx
2758 leal (%rbx,%r12,1),%ebx
2759 vpxor %ymm7,%ymm6,%ymm6
2760 andnl %eax,%r10d,%r12d
2762 rorxl $6,%r10d,%r14d
2763 vpshufd $132,%ymm6,%ymm6
2764 leal (%rbx,%r12,1),%ebx
2767 vpsrldq $8,%ymm6,%ymm6
2768 rorxl $22,%ecx,%r12d
2769 leal (%rbx,%r13,1),%ebx
2771 vpaddd %ymm6,%ymm1,%ymm1
2772 rorxl $13,%ecx,%r14d
2774 leal (%r9,%rbx,1),%r9d
2775 vpshufd $80,%ymm1,%ymm7
2777 vaesenc %xmm10,%xmm9,%xmm9
2778 vmovdqu 96-128(%rdi),%xmm10
2781 vpsrld $10,%ymm7,%ymm6
2783 leal (%rbx,%rsi,1),%ebx
2785 vpsrlq $17,%ymm7,%ymm7
2786 addl 44+128(%rsp),%eax
2788 rorxl $25,%r9d,%r13d
2789 vpxor %ymm7,%ymm6,%ymm6
2791 leal (%rbx,%r14,1),%ebx
2792 leal (%rax,%r12,1),%eax
2793 vpsrlq $2,%ymm7,%ymm7
2794 andnl %r11d,%r9d,%r12d
2797 vpxor %ymm7,%ymm6,%ymm6
2798 leal (%rax,%r12,1),%eax
2801 vpshufd $232,%ymm6,%ymm6
2802 rorxl $22,%ebx,%r12d
2803 leal (%rax,%r13,1),%eax
2805 vpslldq $8,%ymm6,%ymm6
2806 rorxl $13,%ebx,%r14d
2808 leal (%r8,%rax,1),%r8d
2809 vpaddd %ymm6,%ymm1,%ymm1
2811 vaesenc %xmm10,%xmm9,%xmm9
2812 vmovdqu 112-128(%rdi),%xmm10
2815 vpaddd 32(%rbp),%ymm1,%ymm6
2817 leal (%rax,%r15,1),%eax
2819 vmovdqa %ymm6,32(%rsp)
2821 vpalignr $4,%ymm2,%ymm3,%ymm4
2822 addl 0+128(%rsp),%r11d
2824 rorxl $25,%r8d,%r13d
2825 vpalignr $4,%ymm0,%ymm1,%ymm7
2826 rorxl $11,%r8d,%r15d
2827 leal (%rax,%r14,1),%eax
2828 leal (%r11,%r12,1),%r11d
2829 vpsrld $7,%ymm4,%ymm6
2830 andnl %r10d,%r8d,%r12d
2833 vpaddd %ymm7,%ymm2,%ymm2
2834 leal (%r11,%r12,1),%r11d
2837 vpsrld $3,%ymm4,%ymm7
2838 rorxl $22,%eax,%r12d
2839 leal (%r11,%r13,1),%r11d
2841 vpslld $14,%ymm4,%ymm5
2842 rorxl $13,%eax,%r14d
2844 leal (%rdx,%r11,1),%edx
2845 vpxor %ymm6,%ymm7,%ymm4
2847 vaesenc %xmm10,%xmm9,%xmm9
2848 vmovdqu 128-128(%rdi),%xmm10
2851 vpshufd $250,%ymm1,%ymm7
2853 leal (%r11,%rsi,1),%r11d
2855 vpsrld $11,%ymm6,%ymm6
2856 addl 4+128(%rsp),%r10d
2858 rorxl $25,%edx,%r13d
2859 vpxor %ymm5,%ymm4,%ymm4
2861 leal (%r11,%r14,1),%r11d
2862 leal (%r10,%r12,1),%r10d
2863 vpslld $11,%ymm5,%ymm5
2864 andnl %r9d,%edx,%r12d
2867 vpxor %ymm6,%ymm4,%ymm4
2868 leal (%r10,%r12,1),%r10d
2871 vpsrld $10,%ymm7,%ymm6
2872 rorxl $22,%r11d,%r12d
2873 leal (%r10,%r13,1),%r10d
2875 vpxor %ymm5,%ymm4,%ymm4
2876 rorxl $13,%r11d,%r14d
2877 rorxl $2,%r11d,%r13d
2878 leal (%rcx,%r10,1),%ecx
2879 vpsrlq $17,%ymm7,%ymm7
2881 vaesenc %xmm10,%xmm9,%xmm9
2882 vmovdqu 144-128(%rdi),%xmm10
2885 vpaddd %ymm4,%ymm2,%ymm2
2887 leal (%r10,%r15,1),%r10d
2889 vpxor %ymm7,%ymm6,%ymm6
2890 addl 8+128(%rsp),%r9d
2892 rorxl $25,%ecx,%r13d
2893 vpsrlq $2,%ymm7,%ymm7
2894 rorxl $11,%ecx,%r15d
2895 leal (%r10,%r14,1),%r10d
2896 leal (%r9,%r12,1),%r9d
2897 vpxor %ymm7,%ymm6,%ymm6
2898 andnl %r8d,%ecx,%r12d
2901 vpshufd $132,%ymm6,%ymm6
2902 leal (%r9,%r12,1),%r9d
2905 vpsrldq $8,%ymm6,%ymm6
2906 rorxl $22,%r10d,%r12d
2907 leal (%r9,%r13,1),%r9d
2909 vpaddd %ymm6,%ymm2,%ymm2
2910 rorxl $13,%r10d,%r14d
2911 rorxl $2,%r10d,%r13d
2912 leal (%rbx,%r9,1),%ebx
2913 vpshufd $80,%ymm2,%ymm7
2915 vaesenc %xmm10,%xmm9,%xmm9
2916 vmovdqu 160-128(%rdi),%xmm10
2919 vpsrld $10,%ymm7,%ymm6
2921 leal (%r9,%rsi,1),%r9d
2923 vpsrlq $17,%ymm7,%ymm7
2924 addl 12+128(%rsp),%r8d
2926 rorxl $25,%ebx,%r13d
2927 vpxor %ymm7,%ymm6,%ymm6
2929 leal (%r9,%r14,1),%r9d
2930 leal (%r8,%r12,1),%r8d
2931 vpsrlq $2,%ymm7,%ymm7
2932 andnl %edx,%ebx,%r12d
2935 vpxor %ymm7,%ymm6,%ymm6
2936 leal (%r8,%r12,1),%r8d
2939 vpshufd $232,%ymm6,%ymm6
2940 rorxl $22,%r9d,%r12d
2941 leal (%r8,%r13,1),%r8d
2943 vpslldq $8,%ymm6,%ymm6
2944 rorxl $13,%r9d,%r14d
2946 leal (%rax,%r8,1),%eax
2947 vpaddd %ymm6,%ymm2,%ymm2
2949 vaesenclast %xmm10,%xmm9,%xmm11
2950 vaesenc %xmm10,%xmm9,%xmm9
2951 vmovdqu 176-128(%rdi),%xmm10
2954 vpaddd 64(%rbp),%ymm2,%ymm6
2956 leal (%r8,%r15,1),%r8d
2958 vmovdqa %ymm6,0(%rsp)
2959 vpalignr $4,%ymm3,%ymm0,%ymm4
2960 addl 32+128(%rsp),%edx
2962 rorxl $25,%eax,%r13d
2963 vpalignr $4,%ymm1,%ymm2,%ymm7
2964 rorxl $11,%eax,%r15d
2965 leal (%r8,%r14,1),%r8d
2966 leal (%rdx,%r12,1),%edx
2967 vpsrld $7,%ymm4,%ymm6
2968 andnl %ecx,%eax,%r12d
2971 vpaddd %ymm7,%ymm3,%ymm3
2972 leal (%rdx,%r12,1),%edx
2975 vpsrld $3,%ymm4,%ymm7
2976 rorxl $22,%r8d,%r12d
2977 leal (%rdx,%r13,1),%edx
2979 vpslld $14,%ymm4,%ymm5
2980 rorxl $13,%r8d,%r14d
2982 leal (%r11,%rdx,1),%r11d
2983 vpxor %ymm6,%ymm7,%ymm4
2985 vpand %xmm12,%xmm11,%xmm8
2986 vaesenc %xmm10,%xmm9,%xmm9
2987 vmovdqu 192-128(%rdi),%xmm10
2990 vpshufd $250,%ymm2,%ymm7
2992 leal (%rdx,%rsi,1),%edx
2994 vpsrld $11,%ymm6,%ymm6
2995 addl 36+128(%rsp),%ecx
2997 rorxl $25,%r11d,%r13d
2998 vpxor %ymm5,%ymm4,%ymm4
2999 rorxl $11,%r11d,%esi
3000 leal (%rdx,%r14,1),%edx
3001 leal (%rcx,%r12,1),%ecx
3002 vpslld $11,%ymm5,%ymm5
3003 andnl %ebx,%r11d,%r12d
3005 rorxl $6,%r11d,%r14d
3006 vpxor %ymm6,%ymm4,%ymm4
3007 leal (%rcx,%r12,1),%ecx
3010 vpsrld $10,%ymm7,%ymm6
3011 rorxl $22,%edx,%r12d
3012 leal (%rcx,%r13,1),%ecx
3014 vpxor %ymm5,%ymm4,%ymm4
3015 rorxl $13,%edx,%r14d
3017 leal (%r10,%rcx,1),%r10d
3018 vpsrlq $17,%ymm7,%ymm7
3020 vaesenclast %xmm10,%xmm9,%xmm11
3021 vaesenc %xmm10,%xmm9,%xmm9
3022 vmovdqu 208-128(%rdi),%xmm10
3025 vpaddd %ymm4,%ymm3,%ymm3
3027 leal (%rcx,%r15,1),%ecx
3029 vpxor %ymm7,%ymm6,%ymm6
3030 addl 40+128(%rsp),%ebx
3032 rorxl $25,%r10d,%r13d
3033 vpsrlq $2,%ymm7,%ymm7
3034 rorxl $11,%r10d,%r15d
3035 leal (%rcx,%r14,1),%ecx
3036 leal (%rbx,%r12,1),%ebx
3037 vpxor %ymm7,%ymm6,%ymm6
3038 andnl %eax,%r10d,%r12d
3040 rorxl $6,%r10d,%r14d
3041 vpshufd $132,%ymm6,%ymm6
3042 leal (%rbx,%r12,1),%ebx
3045 vpsrldq $8,%ymm6,%ymm6
3046 rorxl $22,%ecx,%r12d
3047 leal (%rbx,%r13,1),%ebx
3049 vpaddd %ymm6,%ymm3,%ymm3
3050 rorxl $13,%ecx,%r14d
3052 leal (%r9,%rbx,1),%r9d
3053 vpshufd $80,%ymm3,%ymm7
3055 vpand %xmm13,%xmm11,%xmm11
3056 vaesenc %xmm10,%xmm9,%xmm9
3057 vmovdqu 224-128(%rdi),%xmm10
3060 vpsrld $10,%ymm7,%ymm6
3062 leal (%rbx,%rsi,1),%ebx
3064 vpsrlq $17,%ymm7,%ymm7
3065 addl 44+128(%rsp),%eax
3067 rorxl $25,%r9d,%r13d
3068 vpxor %ymm7,%ymm6,%ymm6
3070 leal (%rbx,%r14,1),%ebx
3071 leal (%rax,%r12,1),%eax
3072 vpsrlq $2,%ymm7,%ymm7
3073 andnl %r11d,%r9d,%r12d
3076 vpxor %ymm7,%ymm6,%ymm6
3077 leal (%rax,%r12,1),%eax
3080 vpshufd $232,%ymm6,%ymm6
3081 rorxl $22,%ebx,%r12d
3082 leal (%rax,%r13,1),%eax
3084 vpslldq $8,%ymm6,%ymm6
3085 rorxl $13,%ebx,%r14d
3087 leal (%r8,%rax,1),%r8d
3088 vpaddd %ymm6,%ymm3,%ymm3
3090 vpor %xmm11,%xmm8,%xmm8
3091 vaesenclast %xmm10,%xmm9,%xmm11
3092 vmovdqu 0-128(%rdi),%xmm10
3095 vpaddd 96(%rbp),%ymm3,%ymm6
3097 leal (%rax,%r15,1),%eax
3099 vmovdqa %ymm6,32(%rsp)
3101 vpextrq $1,%xmm15,%r15
3102 vpand %xmm14,%xmm11,%xmm11
3103 vpor %xmm11,%xmm8,%xmm8
3104 vmovdqu %xmm8,(%r15,%r13,1)
3109 vmovdqu (%r13),%xmm9
3110 vpinsrq $0,%r13,%xmm15,%xmm15
3111 addl 0+64(%rsp),%r11d
3113 rorxl $25,%r8d,%r13d
3114 rorxl $11,%r8d,%r15d
3115 leal (%rax,%r14,1),%eax
3116 leal (%r11,%r12,1),%r11d
3117 andnl %r10d,%r8d,%r12d
3120 leal (%r11,%r12,1),%r11d
3123 rorxl $22,%eax,%r12d
3124 leal (%r11,%r13,1),%r11d
3126 rorxl $13,%eax,%r14d
3128 leal (%rdx,%r11,1),%edx
3130 vpxor %xmm10,%xmm9,%xmm9
3131 vmovdqu 16-128(%rdi),%xmm10
3135 leal (%r11,%rsi,1),%r11d
3137 addl 4+64(%rsp),%r10d
3139 rorxl $25,%edx,%r13d
3141 leal (%r11,%r14,1),%r11d
3142 leal (%r10,%r12,1),%r10d
3143 andnl %r9d,%edx,%r12d
3146 leal (%r10,%r12,1),%r10d
3149 rorxl $22,%r11d,%r12d
3150 leal (%r10,%r13,1),%r10d
3152 rorxl $13,%r11d,%r14d
3153 rorxl $2,%r11d,%r13d
3154 leal (%rcx,%r10,1),%ecx
3156 vpxor %xmm8,%xmm9,%xmm9
3160 leal (%r10,%r15,1),%r10d
3162 addl 8+64(%rsp),%r9d
3164 rorxl $25,%ecx,%r13d
3165 rorxl $11,%ecx,%r15d
3166 leal (%r10,%r14,1),%r10d
3167 leal (%r9,%r12,1),%r9d
3168 andnl %r8d,%ecx,%r12d
3171 leal (%r9,%r12,1),%r9d
3174 rorxl $22,%r10d,%r12d
3175 leal (%r9,%r13,1),%r9d
3177 rorxl $13,%r10d,%r14d
3178 rorxl $2,%r10d,%r13d
3179 leal (%rbx,%r9,1),%ebx
3181 vaesenc %xmm10,%xmm9,%xmm9
3182 vmovdqu 32-128(%rdi),%xmm10
3186 leal (%r9,%rsi,1),%r9d
3188 addl 12+64(%rsp),%r8d
3190 rorxl $25,%ebx,%r13d
3192 leal (%r9,%r14,1),%r9d
3193 leal (%r8,%r12,1),%r8d
3194 andnl %edx,%ebx,%r12d
3197 leal (%r8,%r12,1),%r8d
3200 rorxl $22,%r9d,%r12d
3201 leal (%r8,%r13,1),%r8d
3203 rorxl $13,%r9d,%r14d
3205 leal (%rax,%r8,1),%eax
3207 vaesenc %xmm10,%xmm9,%xmm9
3208 vmovdqu 48-128(%rdi),%xmm10
3212 leal (%r8,%r15,1),%r8d
3214 addl 32+64(%rsp),%edx
3216 rorxl $25,%eax,%r13d
3217 rorxl $11,%eax,%r15d
3218 leal (%r8,%r14,1),%r8d
3219 leal (%rdx,%r12,1),%edx
3220 andnl %ecx,%eax,%r12d
3223 leal (%rdx,%r12,1),%edx
3226 rorxl $22,%r8d,%r12d
3227 leal (%rdx,%r13,1),%edx
3229 rorxl $13,%r8d,%r14d
3231 leal (%r11,%rdx,1),%r11d
3233 vaesenc %xmm10,%xmm9,%xmm9
3234 vmovdqu 64-128(%rdi),%xmm10
3238 leal (%rdx,%rsi,1),%edx
3240 addl 36+64(%rsp),%ecx
3242 rorxl $25,%r11d,%r13d
3243 rorxl $11,%r11d,%esi
3244 leal (%rdx,%r14,1),%edx
3245 leal (%rcx,%r12,1),%ecx
3246 andnl %ebx,%r11d,%r12d
3248 rorxl $6,%r11d,%r14d
3249 leal (%rcx,%r12,1),%ecx
3252 rorxl $22,%edx,%r12d
3253 leal (%rcx,%r13,1),%ecx
3255 rorxl $13,%edx,%r14d
3257 leal (%r10,%rcx,1),%r10d
3259 vaesenc %xmm10,%xmm9,%xmm9
3260 vmovdqu 80-128(%rdi),%xmm10
3264 leal (%rcx,%r15,1),%ecx
3266 addl 40+64(%rsp),%ebx
3268 rorxl $25,%r10d,%r13d
3269 rorxl $11,%r10d,%r15d
3270 leal (%rcx,%r14,1),%ecx
3271 leal (%rbx,%r12,1),%ebx
3272 andnl %eax,%r10d,%r12d
3274 rorxl $6,%r10d,%r14d
3275 leal (%rbx,%r12,1),%ebx
3278 rorxl $22,%ecx,%r12d
3279 leal (%rbx,%r13,1),%ebx
3281 rorxl $13,%ecx,%r14d
3283 leal (%r9,%rbx,1),%r9d
3285 vaesenc %xmm10,%xmm9,%xmm9
3286 vmovdqu 96-128(%rdi),%xmm10
3290 leal (%rbx,%rsi,1),%ebx
3292 addl 44+64(%rsp),%eax
3294 rorxl $25,%r9d,%r13d
3296 leal (%rbx,%r14,1),%ebx
3297 leal (%rax,%r12,1),%eax
3298 andnl %r11d,%r9d,%r12d
3301 leal (%rax,%r12,1),%eax
3304 rorxl $22,%ebx,%r12d
3305 leal (%rax,%r13,1),%eax
3307 rorxl $13,%ebx,%r14d
3309 leal (%r8,%rax,1),%r8d
3311 vaesenc %xmm10,%xmm9,%xmm9
3312 vmovdqu 112-128(%rdi),%xmm10
3316 leal (%rax,%r15,1),%eax
3320 rorxl $25,%r8d,%r13d
3321 rorxl $11,%r8d,%r15d
3322 leal (%rax,%r14,1),%eax
3323 leal (%r11,%r12,1),%r11d
3324 andnl %r10d,%r8d,%r12d
3327 leal (%r11,%r12,1),%r11d
3330 rorxl $22,%eax,%r12d
3331 leal (%r11,%r13,1),%r11d
3333 rorxl $13,%eax,%r14d
3335 leal (%rdx,%r11,1),%edx
3337 vaesenc %xmm10,%xmm9,%xmm9
3338 vmovdqu 128-128(%rdi),%xmm10
3342 leal (%r11,%rsi,1),%r11d
3346 rorxl $25,%edx,%r13d
3348 leal (%r11,%r14,1),%r11d
3349 leal (%r10,%r12,1),%r10d
3350 andnl %r9d,%edx,%r12d
3353 leal (%r10,%r12,1),%r10d
3356 rorxl $22,%r11d,%r12d
3357 leal (%r10,%r13,1),%r10d
3359 rorxl $13,%r11d,%r14d
3360 rorxl $2,%r11d,%r13d
3361 leal (%rcx,%r10,1),%ecx
3363 vaesenc %xmm10,%xmm9,%xmm9
3364 vmovdqu 144-128(%rdi),%xmm10
3368 leal (%r10,%r15,1),%r10d
3372 rorxl $25,%ecx,%r13d
3373 rorxl $11,%ecx,%r15d
3374 leal (%r10,%r14,1),%r10d
3375 leal (%r9,%r12,1),%r9d
3376 andnl %r8d,%ecx,%r12d
3379 leal (%r9,%r12,1),%r9d
3382 rorxl $22,%r10d,%r12d
3383 leal (%r9,%r13,1),%r9d
3385 rorxl $13,%r10d,%r14d
3386 rorxl $2,%r10d,%r13d
3387 leal (%rbx,%r9,1),%ebx
3389 vaesenc %xmm10,%xmm9,%xmm9
3390 vmovdqu 160-128(%rdi),%xmm10
3394 leal (%r9,%rsi,1),%r9d
3398 rorxl $25,%ebx,%r13d
3400 leal (%r9,%r14,1),%r9d
3401 leal (%r8,%r12,1),%r8d
3402 andnl %edx,%ebx,%r12d
3405 leal (%r8,%r12,1),%r8d
3408 rorxl $22,%r9d,%r12d
3409 leal (%r8,%r13,1),%r8d
3411 rorxl $13,%r9d,%r14d
3413 leal (%rax,%r8,1),%eax
3415 vaesenclast %xmm10,%xmm9,%xmm11
3416 vaesenc %xmm10,%xmm9,%xmm9
3417 vmovdqu 176-128(%rdi),%xmm10
3421 leal (%r8,%r15,1),%r8d
3425 rorxl $25,%eax,%r13d
3426 rorxl $11,%eax,%r15d
3427 leal (%r8,%r14,1),%r8d
3428 leal (%rdx,%r12,1),%edx
3429 andnl %ecx,%eax,%r12d
3432 leal (%rdx,%r12,1),%edx
3435 rorxl $22,%r8d,%r12d
3436 leal (%rdx,%r13,1),%edx
3438 rorxl $13,%r8d,%r14d
3440 leal (%r11,%rdx,1),%r11d
3442 vpand %xmm12,%xmm11,%xmm8
3443 vaesenc %xmm10,%xmm9,%xmm9
3444 vmovdqu 192-128(%rdi),%xmm10
3448 leal (%rdx,%rsi,1),%edx
3452 rorxl $25,%r11d,%r13d
3453 rorxl $11,%r11d,%esi
3454 leal (%rdx,%r14,1),%edx
3455 leal (%rcx,%r12,1),%ecx
3456 andnl %ebx,%r11d,%r12d
3458 rorxl $6,%r11d,%r14d
3459 leal (%rcx,%r12,1),%ecx
3462 rorxl $22,%edx,%r12d
3463 leal (%rcx,%r13,1),%ecx
3465 rorxl $13,%edx,%r14d
3467 leal (%r10,%rcx,1),%r10d
3469 vaesenclast %xmm10,%xmm9,%xmm11
3470 vaesenc %xmm10,%xmm9,%xmm9
3471 vmovdqu 208-128(%rdi),%xmm10
3475 leal (%rcx,%r15,1),%ecx
3479 rorxl $25,%r10d,%r13d
3480 rorxl $11,%r10d,%r15d
3481 leal (%rcx,%r14,1),%ecx
3482 leal (%rbx,%r12,1),%ebx
3483 andnl %eax,%r10d,%r12d
3485 rorxl $6,%r10d,%r14d
3486 leal (%rbx,%r12,1),%ebx
3489 rorxl $22,%ecx,%r12d
3490 leal (%rbx,%r13,1),%ebx
3492 rorxl $13,%ecx,%r14d
3494 leal (%r9,%rbx,1),%r9d
3496 vpand %xmm13,%xmm11,%xmm11
3497 vaesenc %xmm10,%xmm9,%xmm9
3498 vmovdqu 224-128(%rdi),%xmm10
3502 leal (%rbx,%rsi,1),%ebx
3506 rorxl $25,%r9d,%r13d
3508 leal (%rbx,%r14,1),%ebx
3509 leal (%rax,%r12,1),%eax
3510 andnl %r11d,%r9d,%r12d
3513 leal (%rax,%r12,1),%eax
3516 rorxl $22,%ebx,%r12d
3517 leal (%rax,%r13,1),%eax
3519 rorxl $13,%ebx,%r14d
3521 leal (%r8,%rax,1),%r8d
3523 vpor %xmm11,%xmm8,%xmm8
3524 vaesenclast %xmm10,%xmm9,%xmm11
3525 vmovdqu 0-128(%rdi),%xmm10
3529 leal (%rax,%r15,1),%eax
3531 vpextrq $1,%xmm15,%r12
3537 vpand %xmm14,%xmm11,%xmm11
3538 vpor %xmm11,%xmm8,%xmm8
3539 vmovdqu %xmm8,(%r12,%r13,1)
3570 vmovdqu (%r13),%xmm9
3571 vpinsrq $0,%r13,%xmm15,%xmm15
3572 addl 0+16(%rbp),%r11d
3574 rorxl $25,%r8d,%r13d
3575 rorxl $11,%r8d,%r15d
3576 leal (%rax,%r14,1),%eax
3577 leal (%r11,%r12,1),%r11d
3578 andnl %r10d,%r8d,%r12d
3581 leal (%r11,%r12,1),%r11d
3584 rorxl $22,%eax,%r12d
3585 leal (%r11,%r13,1),%r11d
3587 rorxl $13,%eax,%r14d
3589 leal (%rdx,%r11,1),%edx
3591 vpxor %xmm10,%xmm9,%xmm9
3592 vmovdqu 16-128(%rdi),%xmm10
3596 leal (%r11,%rsi,1),%r11d
3598 addl 4+16(%rbp),%r10d
3600 rorxl $25,%edx,%r13d
3602 leal (%r11,%r14,1),%r11d
3603 leal (%r10,%r12,1),%r10d
3604 andnl %r9d,%edx,%r12d
3607 leal (%r10,%r12,1),%r10d
3610 rorxl $22,%r11d,%r12d
3611 leal (%r10,%r13,1),%r10d
3613 rorxl $13,%r11d,%r14d
3614 rorxl $2,%r11d,%r13d
3615 leal (%rcx,%r10,1),%ecx
3617 vpxor %xmm8,%xmm9,%xmm9
3621 leal (%r10,%r15,1),%r10d
3623 addl 8+16(%rbp),%r9d
3625 rorxl $25,%ecx,%r13d
3626 rorxl $11,%ecx,%r15d
3627 leal (%r10,%r14,1),%r10d
3628 leal (%r9,%r12,1),%r9d
3629 andnl %r8d,%ecx,%r12d
3632 leal (%r9,%r12,1),%r9d
3635 rorxl $22,%r10d,%r12d
3636 leal (%r9,%r13,1),%r9d
3638 rorxl $13,%r10d,%r14d
3639 rorxl $2,%r10d,%r13d
3640 leal (%rbx,%r9,1),%ebx
3642 vaesenc %xmm10,%xmm9,%xmm9
3643 vmovdqu 32-128(%rdi),%xmm10
3647 leal (%r9,%rsi,1),%r9d
3649 addl 12+16(%rbp),%r8d
3651 rorxl $25,%ebx,%r13d
3653 leal (%r9,%r14,1),%r9d
3654 leal (%r8,%r12,1),%r8d
3655 andnl %edx,%ebx,%r12d
3658 leal (%r8,%r12,1),%r8d
3661 rorxl $22,%r9d,%r12d
3662 leal (%r8,%r13,1),%r8d
3664 rorxl $13,%r9d,%r14d
3666 leal (%rax,%r8,1),%eax
3668 vaesenc %xmm10,%xmm9,%xmm9
3669 vmovdqu 48-128(%rdi),%xmm10
3673 leal (%r8,%r15,1),%r8d
3675 addl 32+16(%rbp),%edx
3677 rorxl $25,%eax,%r13d
3678 rorxl $11,%eax,%r15d
3679 leal (%r8,%r14,1),%r8d
3680 leal (%rdx,%r12,1),%edx
3681 andnl %ecx,%eax,%r12d
3684 leal (%rdx,%r12,1),%edx
3687 rorxl $22,%r8d,%r12d
3688 leal (%rdx,%r13,1),%edx
3690 rorxl $13,%r8d,%r14d
3692 leal (%r11,%rdx,1),%r11d
3694 vaesenc %xmm10,%xmm9,%xmm9
3695 vmovdqu 64-128(%rdi),%xmm10
3699 leal (%rdx,%rsi,1),%edx
3701 addl 36+16(%rbp),%ecx
3703 rorxl $25,%r11d,%r13d
3704 rorxl $11,%r11d,%esi
3705 leal (%rdx,%r14,1),%edx
3706 leal (%rcx,%r12,1),%ecx
3707 andnl %ebx,%r11d,%r12d
3709 rorxl $6,%r11d,%r14d
3710 leal (%rcx,%r12,1),%ecx
3713 rorxl $22,%edx,%r12d
3714 leal (%rcx,%r13,1),%ecx
3716 rorxl $13,%edx,%r14d
3718 leal (%r10,%rcx,1),%r10d
3720 vaesenc %xmm10,%xmm9,%xmm9
3721 vmovdqu 80-128(%rdi),%xmm10
3725 leal (%rcx,%r15,1),%ecx
3727 addl 40+16(%rbp),%ebx
3729 rorxl $25,%r10d,%r13d
3730 rorxl $11,%r10d,%r15d
3731 leal (%rcx,%r14,1),%ecx
3732 leal (%rbx,%r12,1),%ebx
3733 andnl %eax,%r10d,%r12d
3735 rorxl $6,%r10d,%r14d
3736 leal (%rbx,%r12,1),%ebx
3739 rorxl $22,%ecx,%r12d
3740 leal (%rbx,%r13,1),%ebx
3742 rorxl $13,%ecx,%r14d
3744 leal (%r9,%rbx,1),%r9d
3746 vaesenc %xmm10,%xmm9,%xmm9
3747 vmovdqu 96-128(%rdi),%xmm10
3751 leal (%rbx,%rsi,1),%ebx
3753 addl 44+16(%rbp),%eax
3755 rorxl $25,%r9d,%r13d
3757 leal (%rbx,%r14,1),%ebx
3758 leal (%rax,%r12,1),%eax
3759 andnl %r11d,%r9d,%r12d
3762 leal (%rax,%r12,1),%eax
3765 rorxl $22,%ebx,%r12d
3766 leal (%rax,%r13,1),%eax
3768 rorxl $13,%ebx,%r14d
3770 leal (%r8,%rax,1),%r8d
3772 vaesenc %xmm10,%xmm9,%xmm9
3773 vmovdqu 112-128(%rdi),%xmm10
3777 leal (%rax,%r15,1),%eax
3780 addl 0+16(%rbp),%r11d
3782 rorxl $25,%r8d,%r13d
3783 rorxl $11,%r8d,%r15d
3784 leal (%rax,%r14,1),%eax
3785 leal (%r11,%r12,1),%r11d
3786 andnl %r10d,%r8d,%r12d
3789 leal (%r11,%r12,1),%r11d
3792 rorxl $22,%eax,%r12d
3793 leal (%r11,%r13,1),%r11d
3795 rorxl $13,%eax,%r14d
3797 leal (%rdx,%r11,1),%edx
3799 vaesenc %xmm10,%xmm9,%xmm9
3800 vmovdqu 128-128(%rdi),%xmm10
3804 leal (%r11,%rsi,1),%r11d
3806 addl 4+16(%rbp),%r10d
3808 rorxl $25,%edx,%r13d
3810 leal (%r11,%r14,1),%r11d
3811 leal (%r10,%r12,1),%r10d
3812 andnl %r9d,%edx,%r12d
3815 leal (%r10,%r12,1),%r10d
3818 rorxl $22,%r11d,%r12d
3819 leal (%r10,%r13,1),%r10d
3821 rorxl $13,%r11d,%r14d
3822 rorxl $2,%r11d,%r13d
3823 leal (%rcx,%r10,1),%ecx
3825 vaesenc %xmm10,%xmm9,%xmm9
3826 vmovdqu 144-128(%rdi),%xmm10
3830 leal (%r10,%r15,1),%r10d
3832 addl 8+16(%rbp),%r9d
3834 rorxl $25,%ecx,%r13d
3835 rorxl $11,%ecx,%r15d
3836 leal (%r10,%r14,1),%r10d
3837 leal (%r9,%r12,1),%r9d
3838 andnl %r8d,%ecx,%r12d
3841 leal (%r9,%r12,1),%r9d
3844 rorxl $22,%r10d,%r12d
3845 leal (%r9,%r13,1),%r9d
3847 rorxl $13,%r10d,%r14d
3848 rorxl $2,%r10d,%r13d
3849 leal (%rbx,%r9,1),%ebx
3851 vaesenc %xmm10,%xmm9,%xmm9
3852 vmovdqu 160-128(%rdi),%xmm10
3856 leal (%r9,%rsi,1),%r9d
3858 addl 12+16(%rbp),%r8d
3860 rorxl $25,%ebx,%r13d
3862 leal (%r9,%r14,1),%r9d
3863 leal (%r8,%r12,1),%r8d
3864 andnl %edx,%ebx,%r12d
3867 leal (%r8,%r12,1),%r8d
3870 rorxl $22,%r9d,%r12d
3871 leal (%r8,%r13,1),%r8d
3873 rorxl $13,%r9d,%r14d
3875 leal (%rax,%r8,1),%eax
3877 vaesenclast %xmm10,%xmm9,%xmm11
3878 vaesenc %xmm10,%xmm9,%xmm9
3879 vmovdqu 176-128(%rdi),%xmm10
3883 leal (%r8,%r15,1),%r8d
3885 addl 32+16(%rbp),%edx
3887 rorxl $25,%eax,%r13d
3888 rorxl $11,%eax,%r15d
3889 leal (%r8,%r14,1),%r8d
3890 leal (%rdx,%r12,1),%edx
3891 andnl %ecx,%eax,%r12d
3894 leal (%rdx,%r12,1),%edx
3897 rorxl $22,%r8d,%r12d
3898 leal (%rdx,%r13,1),%edx
3900 rorxl $13,%r8d,%r14d
3902 leal (%r11,%rdx,1),%r11d
3904 vpand %xmm12,%xmm11,%xmm8
3905 vaesenc %xmm10,%xmm9,%xmm9
3906 vmovdqu 192-128(%rdi),%xmm10
3910 leal (%rdx,%rsi,1),%edx
3912 addl 36+16(%rbp),%ecx
3914 rorxl $25,%r11d,%r13d
3915 rorxl $11,%r11d,%esi
3916 leal (%rdx,%r14,1),%edx
3917 leal (%rcx,%r12,1),%ecx
3918 andnl %ebx,%r11d,%r12d
3920 rorxl $6,%r11d,%r14d
3921 leal (%rcx,%r12,1),%ecx
3924 rorxl $22,%edx,%r12d
3925 leal (%rcx,%r13,1),%ecx
3927 rorxl $13,%edx,%r14d
3929 leal (%r10,%rcx,1),%r10d
3931 vaesenclast %xmm10,%xmm9,%xmm11
3932 vaesenc %xmm10,%xmm9,%xmm9
3933 vmovdqu 208-128(%rdi),%xmm10
3937 leal (%rcx,%r15,1),%ecx
3939 addl 40+16(%rbp),%ebx
3941 rorxl $25,%r10d,%r13d
3942 rorxl $11,%r10d,%r15d
3943 leal (%rcx,%r14,1),%ecx
3944 leal (%rbx,%r12,1),%ebx
3945 andnl %eax,%r10d,%r12d
3947 rorxl $6,%r10d,%r14d
3948 leal (%rbx,%r12,1),%ebx
3951 rorxl $22,%ecx,%r12d
3952 leal (%rbx,%r13,1),%ebx
3954 rorxl $13,%ecx,%r14d
3956 leal (%r9,%rbx,1),%r9d
3958 vpand %xmm13,%xmm11,%xmm11
3959 vaesenc %xmm10,%xmm9,%xmm9
3960 vmovdqu 224-128(%rdi),%xmm10
3964 leal (%rbx,%rsi,1),%ebx
3966 addl 44+16(%rbp),%eax
3968 rorxl $25,%r9d,%r13d
3970 leal (%rbx,%r14,1),%ebx
3971 leal (%rax,%r12,1),%eax
3972 andnl %r11d,%r9d,%r12d
3975 leal (%rax,%r12,1),%eax
3978 rorxl $22,%ebx,%r12d
3979 leal (%rax,%r13,1),%eax
3981 rorxl $13,%ebx,%r14d
3983 leal (%r8,%rax,1),%r8d
3985 vpor %xmm11,%xmm8,%xmm8
3986 vaesenclast %xmm10,%xmm9,%xmm11
3987 vmovdqu 0-128(%rdi),%xmm10
3991 leal (%rax,%r15,1),%eax
3994 vpextrq $1,%xmm15,%r15
3995 vpand %xmm14,%xmm11,%xmm11
3996 vpor %xmm11,%xmm8,%xmm8
3998 vmovdqu %xmm8,(%r15,%r13,1)
4016 leaq (%rsi,%r13,1),%r12
4019 cmpq 64+16(%rsp),%r13
4036 movq 64+32(%rsp),%r8
4054 .cfi_def_cfa_register %rsp
4058 .size aesni_cbc_sha256_enc_avx2,.-aesni_cbc_sha256_enc_avx2
4059 .type aesni_cbc_sha256_enc_shaext,@function
4061 aesni_cbc_sha256_enc_shaext:
4063 leaq K256+128(%rip),%rax
4065 movdqu 16(%r9),%xmm2
4066 movdqa 512-128(%rax),%xmm3
4068 movl 240(%rcx),%r11d
4070 movups (%rcx),%xmm15
4072 movups 16(%rcx),%xmm4
4075 pshufd $0x1b,%xmm1,%xmm0
4076 pshufd $0xb1,%xmm1,%xmm1
4077 pshufd $0x1b,%xmm2,%xmm2
4079 .byte 102,15,58,15,202,8
4080 punpcklqdq %xmm0,%xmm2
4086 movdqu (%r10),%xmm10
4087 movdqu 16(%r10),%xmm11
4088 movdqu 32(%r10),%xmm12
4089 .byte 102,68,15,56,0,211
4090 movdqu 48(%r10),%xmm13
4092 movdqa 0-128(%rax),%xmm0
4094 .byte 102,68,15,56,0,219
4097 movups 0(%rdi),%xmm14
4100 movups -80(%rcx),%xmm5
4103 pshufd $0x0e,%xmm0,%xmm0
4104 movups -64(%rcx),%xmm4
4108 movdqa 32-128(%rax),%xmm0
4110 .byte 102,68,15,56,0,227
4112 movups -48(%rcx),%xmm5
4115 pshufd $0x0e,%xmm0,%xmm0
4116 movups -32(%rcx),%xmm4
4120 movdqa 64-128(%rax),%xmm0
4122 .byte 102,68,15,56,0,235
4123 .byte 69,15,56,204,211
4124 movups -16(%rcx),%xmm5
4127 pshufd $0x0e,%xmm0,%xmm0
4129 .byte 102,65,15,58,15,220,4
4131 movups 0(%rcx),%xmm4
4135 movdqa 96-128(%rax),%xmm0
4137 .byte 69,15,56,205,213
4138 .byte 69,15,56,204,220
4139 movups 16(%rcx),%xmm5
4142 pshufd $0x0e,%xmm0,%xmm0
4143 movups 32(%rcx),%xmm4
4146 .byte 102,65,15,58,15,221,4
4149 movdqa 128-128(%rax),%xmm0
4151 .byte 69,15,56,205,218
4152 .byte 69,15,56,204,229
4153 movups 48(%rcx),%xmm5
4156 pshufd $0x0e,%xmm0,%xmm0
4158 .byte 102,65,15,58,15,218,4
4162 movups 64(%rcx),%xmm4
4164 movups 80(%rcx),%xmm5
4167 movups 96(%rcx),%xmm4
4169 movups 112(%rcx),%xmm5
4172 aesenclast %xmm5,%xmm6
4173 movups 16-112(%rcx),%xmm4
4176 movups 16(%rdi),%xmm14
4178 movups %xmm6,0(%rsi,%rdi,1)
4180 movups -80(%rcx),%xmm5
4182 movdqa 160-128(%rax),%xmm0
4184 .byte 69,15,56,205,227
4185 .byte 69,15,56,204,234
4186 movups -64(%rcx),%xmm4
4189 pshufd $0x0e,%xmm0,%xmm0
4191 .byte 102,65,15,58,15,219,4
4193 movups -48(%rcx),%xmm5
4196 movdqa 192-128(%rax),%xmm0
4198 .byte 69,15,56,205,236
4199 .byte 69,15,56,204,211
4200 movups -32(%rcx),%xmm4
4203 pshufd $0x0e,%xmm0,%xmm0
4205 .byte 102,65,15,58,15,220,4
4207 movups -16(%rcx),%xmm5
4210 movdqa 224-128(%rax),%xmm0
4212 .byte 69,15,56,205,213
4213 .byte 69,15,56,204,220
4214 movups 0(%rcx),%xmm4
4217 pshufd $0x0e,%xmm0,%xmm0
4219 .byte 102,65,15,58,15,221,4
4221 movups 16(%rcx),%xmm5
4224 movdqa 256-128(%rax),%xmm0
4226 .byte 69,15,56,205,218
4227 .byte 69,15,56,204,229
4228 movups 32(%rcx),%xmm4
4231 pshufd $0x0e,%xmm0,%xmm0
4233 .byte 102,65,15,58,15,218,4
4235 movups 48(%rcx),%xmm5
4239 movups 64(%rcx),%xmm4
4241 movups 80(%rcx),%xmm5
4244 movups 96(%rcx),%xmm4
4246 movups 112(%rcx),%xmm5
4249 aesenclast %xmm5,%xmm6
4250 movups 16-112(%rcx),%xmm4
4253 movups 32(%rdi),%xmm14
4255 movups %xmm6,16(%rsi,%rdi,1)
4257 movups -80(%rcx),%xmm5
4259 movdqa 288-128(%rax),%xmm0
4261 .byte 69,15,56,205,227
4262 .byte 69,15,56,204,234
4263 movups -64(%rcx),%xmm4
4266 pshufd $0x0e,%xmm0,%xmm0
4268 .byte 102,65,15,58,15,219,4
4270 movups -48(%rcx),%xmm5
4273 movdqa 320-128(%rax),%xmm0
4275 .byte 69,15,56,205,236
4276 .byte 69,15,56,204,211
4277 movups -32(%rcx),%xmm4
4280 pshufd $0x0e,%xmm0,%xmm0
4282 .byte 102,65,15,58,15,220,4
4284 movups -16(%rcx),%xmm5
4287 movdqa 352-128(%rax),%xmm0
4289 .byte 69,15,56,205,213
4290 .byte 69,15,56,204,220
4291 movups 0(%rcx),%xmm4
4294 pshufd $0x0e,%xmm0,%xmm0
4296 .byte 102,65,15,58,15,221,4
4298 movups 16(%rcx),%xmm5
4301 movdqa 384-128(%rax),%xmm0
4303 .byte 69,15,56,205,218
4304 .byte 69,15,56,204,229
4305 movups 32(%rcx),%xmm4
4308 pshufd $0x0e,%xmm0,%xmm0
4310 .byte 102,65,15,58,15,218,4
4312 movups 48(%rcx),%xmm5
4315 movdqa 416-128(%rax),%xmm0
4317 .byte 69,15,56,205,227
4318 .byte 69,15,56,204,234
4321 movups 64(%rcx),%xmm4
4323 movups 80(%rcx),%xmm5
4326 movups 96(%rcx),%xmm4
4328 movups 112(%rcx),%xmm5
4331 aesenclast %xmm5,%xmm6
4332 movups 16-112(%rcx),%xmm4
4335 pshufd $0x0e,%xmm0,%xmm0
4337 .byte 102,65,15,58,15,219,4
4339 movups 48(%rdi),%xmm14
4341 movups %xmm6,32(%rsi,%rdi,1)
4343 movups -80(%rcx),%xmm5
4345 movups -64(%rcx),%xmm4
4349 movdqa 448-128(%rax),%xmm0
4351 .byte 69,15,56,205,236
4353 movups -48(%rcx),%xmm5
4356 pshufd $0x0e,%xmm0,%xmm0
4357 movups -32(%rcx),%xmm4
4361 movdqa 480-128(%rax),%xmm0
4363 movups -16(%rcx),%xmm5
4365 movups 0(%rcx),%xmm4
4368 pshufd $0x0e,%xmm0,%xmm0
4369 movups 16(%rcx),%xmm5
4373 movups 32(%rcx),%xmm4
4375 movups 48(%rcx),%xmm5
4379 movups 64(%rcx),%xmm4
4381 movups 80(%rcx),%xmm5
4384 movups 96(%rcx),%xmm4
4386 movups 112(%rcx),%xmm5
4389 aesenclast %xmm5,%xmm6
4390 movups 16-112(%rcx),%xmm4
4397 movups %xmm6,48(%rsi,%rdi,1)
4401 pshufd $0xb1,%xmm2,%xmm2
4402 pshufd $0x1b,%xmm1,%xmm3
4403 pshufd $0xb1,%xmm1,%xmm1
4404 punpckhqdq %xmm2,%xmm1
4405 .byte 102,15,58,15,211,8
4409 movdqu %xmm2,16(%r9)
4411 .size aesni_cbc_sha256_enc_shaext,.-aesni_cbc_sha256_enc_shaext