2 # Do not modify. This file is auto-generated from aesni-sha1-x86_64.pl.
6 .globl aesni_cbc_sha1_enc
7 .type aesni_cbc_sha1_enc,@function
11 movl OPENSSL_ia32cap_P+0(%rip),%r10d
12 movl OPENSSL_ia32cap_P+4(%rip),%r11d
14 andl $1073741824,%r10d
16 cmpl $1342177280,%r10d
17 je aesni_cbc_sha1_enc_avx
18 jmp aesni_cbc_sha1_enc_ssse3
20 .size aesni_cbc_sha1_enc,.-aesni_cbc_sha1_enc
21 .type aesni_cbc_sha1_enc_ssse3,@function
23 aesni_cbc_sha1_enc_ssse3:
47 leaq K_XX_XX(%rip),%r11
76 movups 16(%r15),%xmm14
85 .byte 102,69,15,56,220,222
86 movups 32(%r15),%xmm15
89 .byte 102,15,58,15,224,8
109 movdqa %xmm9,48(%rsp)
111 .byte 102,69,15,56,220,223
112 movups 48(%r15),%xmm14
138 .byte 102,69,15,56,220,222
139 movups 64(%r15),%xmm15
144 movdqa 0(%r11),%xmm10
154 .byte 102,15,58,15,233,8
168 .byte 102,69,15,56,220,223
169 movups 80(%r15),%xmm14
176 movdqa %xmm10,0(%rsp)
193 .byte 102,69,15,56,220,222
194 movups 96(%r15),%xmm15
209 movdqa 16(%r11),%xmm8
219 .byte 102,15,58,15,242,8
222 .byte 102,69,15,56,220,223
223 movups 112(%r15),%xmm14
241 movdqa %xmm8,16(%rsp)
249 .byte 102,69,15,56,220,222
250 movups 128(%r15),%xmm15
274 movdqa 16(%r11),%xmm9
276 .byte 102,69,15,56,220,223
277 movups 144(%r15),%xmm14
286 .byte 102,15,58,15,251,8
303 .byte 102,69,15,56,220,222
304 movups 160(%r15),%xmm15
308 movdqa %xmm9,32(%rsp)
334 movups 176(%r15),%xmm14
335 .byte 102,69,15,56,220,223
336 movups 192(%r15),%xmm15
337 .byte 102,69,15,56,220,222
339 movups 208(%r15),%xmm14
340 .byte 102,69,15,56,220,223
341 movups 224(%r15),%xmm15
342 .byte 102,69,15,56,220,222
344 .byte 102,69,15,56,221,223
345 movups 16(%r15),%xmm14
353 movdqa 16(%r11),%xmm10
362 .byte 102,68,15,58,15,206,8
372 movups 16(%r12),%xmm12
374 movups %xmm11,0(%r13,%r12,1)
376 .byte 102,69,15,56,220,222
377 movups 32(%r15),%xmm15
385 movdqa %xmm10,48(%rsp)
400 .byte 102,69,15,56,220,223
401 movups 48(%r15),%xmm14
421 .byte 102,69,15,56,220,222
422 movups 64(%r15),%xmm15
424 .byte 102,68,15,58,15,215,8
453 .byte 102,69,15,56,220,223
454 movups 80(%r15),%xmm14
470 .byte 102,68,15,58,15,192,8
477 movdqa 32(%r11),%xmm10
483 .byte 102,69,15,56,220,222
484 movups 96(%r15),%xmm15
489 movdqa %xmm9,16(%rsp)
511 .byte 102,69,15,56,220,223
512 movups 112(%r15),%xmm14
518 .byte 102,68,15,58,15,201,8
535 movdqa %xmm10,32(%rsp)
542 .byte 102,69,15,56,220,222
543 movups 128(%r15),%xmm15
564 .byte 102,68,15,58,15,210,8
570 .byte 102,69,15,56,220,223
571 movups 144(%r15),%xmm14
583 movdqa %xmm8,48(%rsp)
600 .byte 102,69,15,56,220,222
601 movups 160(%r15),%xmm15
612 .byte 102,68,15,58,15,195,8
633 movups 176(%r15),%xmm14
634 .byte 102,69,15,56,220,223
635 movups 192(%r15),%xmm15
636 .byte 102,69,15,56,220,222
638 movups 208(%r15),%xmm14
639 .byte 102,69,15,56,220,223
640 movups 224(%r15),%xmm15
641 .byte 102,69,15,56,220,222
643 .byte 102,69,15,56,221,223
644 movups 16(%r15),%xmm14
669 movups 32(%r12),%xmm12
671 movups %xmm11,16(%r13,%r12,1)
673 .byte 102,69,15,56,220,222
674 movups 32(%r15),%xmm15
676 .byte 102,68,15,58,15,204,8
693 movdqa %xmm10,16(%rsp)
705 .byte 102,69,15,56,220,223
706 movups 48(%r15),%xmm14
729 .byte 102,69,15,56,220,222
730 movups 64(%r15),%xmm15
740 .byte 102,68,15,58,15,213,8
747 movdqa 48(%r11),%xmm9
757 movdqa %xmm8,32(%rsp)
759 .byte 102,69,15,56,220,223
760 movups 80(%r15),%xmm14
785 .byte 102,69,15,56,220,222
786 movups 96(%r15),%xmm15
804 .byte 102,68,15,58,15,198,8
810 .byte 102,69,15,56,220,223
811 movups 112(%r15),%xmm14
823 movdqa %xmm9,48(%rsp)
840 .byte 102,69,15,56,220,222
841 movups 128(%r15),%xmm15
863 .byte 102,69,15,56,220,223
864 movups 144(%r15),%xmm14
870 .byte 102,68,15,58,15,207,8
887 movdqa %xmm10,0(%rsp)
894 .byte 102,69,15,56,220,222
895 movups 160(%r15),%xmm15
921 movups 176(%r15),%xmm14
922 .byte 102,69,15,56,220,223
923 movups 192(%r15),%xmm15
924 .byte 102,69,15,56,220,222
926 movups 208(%r15),%xmm14
927 .byte 102,69,15,56,220,223
928 movups 224(%r15),%xmm15
929 .byte 102,69,15,56,220,222
931 .byte 102,69,15,56,221,223
932 movups 16(%r15),%xmm14
946 .byte 102,68,15,58,15,208,8
959 movups 48(%r12),%xmm12
961 movups %xmm11,32(%r13,%r12,1)
963 .byte 102,69,15,56,220,222
964 movups 32(%r15),%xmm15
969 movdqa %xmm8,16(%rsp)
991 .byte 102,69,15,56,220,223
992 movups 48(%r15),%xmm14
1013 .byte 102,69,15,56,220,222
1014 movups 64(%r15),%xmm15
1016 .byte 102,68,15,58,15,193,8
1033 movdqa %xmm9,32(%rsp)
1045 .byte 102,69,15,56,220,223
1046 movups 80(%r15),%xmm14
1065 movdqa %xmm10,48(%rsp)
1070 .byte 102,69,15,56,220,222
1071 movups 96(%r15),%xmm15
1092 .byte 102,69,15,56,220,223
1093 movups 112(%r15),%xmm14
1099 movdqa 64(%r11),%xmm6
1100 movdqa 0(%r11),%xmm9
1101 movdqu 0(%r10),%xmm0
1102 movdqu 16(%r10),%xmm1
1103 movdqu 32(%r10),%xmm2
1104 movdqu 48(%r10),%xmm3
1105 .byte 102,15,56,0,198
1109 .byte 102,15,56,0,206
1117 movdqa %xmm0,0(%rsp)
1128 .byte 102,69,15,56,220,222
1129 movups 128(%r15),%xmm15
1147 .byte 102,15,56,0,214
1152 .byte 102,69,15,56,220,223
1153 movups 144(%r15),%xmm14
1157 movdqa %xmm1,16(%rsp)
1176 .byte 102,69,15,56,220,222
1177 movups 160(%r15),%xmm15
1187 .byte 102,15,56,0,222
1195 movdqa %xmm2,32(%rsp)
1204 movups 176(%r15),%xmm14
1205 .byte 102,69,15,56,220,223
1206 movups 192(%r15),%xmm15
1207 .byte 102,69,15,56,220,222
1209 movups 208(%r15),%xmm14
1210 .byte 102,69,15,56,220,223
1211 movups 224(%r15),%xmm15
1212 .byte 102,69,15,56,220,222
1214 .byte 102,69,15,56,221,223
1215 movups 16(%r15),%xmm14
1235 movups %xmm11,48(%r13,%r12,1)
1270 .byte 102,69,15,56,220,222
1271 movups 128(%r15),%xmm15
1292 .byte 102,69,15,56,220,223
1293 movups 144(%r15),%xmm14
1314 .byte 102,69,15,56,220,222
1315 movups 160(%r15),%xmm15
1338 movups 176(%r15),%xmm14
1339 .byte 102,69,15,56,220,223
1340 movups 192(%r15),%xmm15
1341 .byte 102,69,15,56,220,222
1343 movups 208(%r15),%xmm14
1344 .byte 102,69,15,56,220,223
1345 movups 224(%r15),%xmm15
1346 .byte 102,69,15,56,220,222
1348 .byte 102,69,15,56,221,223
1349 movups 16(%r15),%xmm14
1369 movups %xmm11,48(%r13,%r12,1)
1393 .size aesni_cbc_sha1_enc_ssse3,.-aesni_cbc_sha1_enc_ssse3
1394 .type aesni_cbc_sha1_enc_avx,@function
1396 aesni_cbc_sha1_enc_avx:
1406 leaq -104(%rsp),%rsp
1414 vmovdqu (%r8),%xmm11
1422 leaq K_XX_XX(%rip),%r11
1430 vmovdqa 64(%r11),%xmm6
1431 vmovdqa 0(%r11),%xmm9
1432 vmovdqu 0(%r10),%xmm0
1433 vmovdqu 16(%r10),%xmm1
1434 vmovdqu 32(%r10),%xmm2
1435 vmovdqu 48(%r10),%xmm3
1436 vpshufb %xmm6,%xmm0,%xmm0
1438 vpshufb %xmm6,%xmm1,%xmm1
1439 vpshufb %xmm6,%xmm2,%xmm2
1440 vpshufb %xmm6,%xmm3,%xmm3
1441 vpaddd %xmm9,%xmm0,%xmm4
1442 vpaddd %xmm9,%xmm1,%xmm5
1443 vpaddd %xmm9,%xmm2,%xmm6
1444 vmovdqa %xmm4,0(%rsp)
1445 vmovdqa %xmm5,16(%rsp)
1446 vmovdqa %xmm6,32(%rsp)
1447 vmovups -112(%r15),%xmm13
1448 vmovups 16-112(%r15),%xmm14
1453 vmovups 0(%r12),%xmm12
1454 vxorps %xmm13,%xmm12,%xmm12
1455 vxorps %xmm12,%xmm11,%xmm11
1456 vaesenc %xmm14,%xmm11,%xmm11
1457 vmovups -80(%r15),%xmm15
1459 vpalignr $8,%xmm0,%xmm1,%xmm4
1462 vpaddd %xmm3,%xmm9,%xmm9
1465 vpsrldq $4,%xmm3,%xmm8
1468 vpxor %xmm0,%xmm4,%xmm4
1471 vpxor %xmm2,%xmm8,%xmm8
1476 vpxor %xmm8,%xmm4,%xmm4
1479 vmovdqa %xmm9,48(%rsp)
1481 vaesenc %xmm15,%xmm11,%xmm11
1482 vmovups -64(%r15),%xmm14
1484 vpsrld $31,%xmm4,%xmm8
1489 vpslldq $12,%xmm4,%xmm10
1490 vpaddd %xmm4,%xmm4,%xmm4
1495 vpsrld $30,%xmm10,%xmm9
1496 vpor %xmm8,%xmm4,%xmm4
1501 vpslld $2,%xmm10,%xmm10
1502 vpxor %xmm9,%xmm4,%xmm4
1507 vaesenc %xmm14,%xmm11,%xmm11
1508 vmovups -48(%r15),%xmm15
1509 vpxor %xmm10,%xmm4,%xmm4
1512 vmovdqa 0(%r11),%xmm10
1519 vpalignr $8,%xmm1,%xmm2,%xmm5
1522 vpaddd %xmm4,%xmm10,%xmm10
1525 vpsrldq $4,%xmm4,%xmm9
1528 vpxor %xmm1,%xmm5,%xmm5
1531 vpxor %xmm3,%xmm9,%xmm9
1533 vaesenc %xmm15,%xmm11,%xmm11
1534 vmovups -32(%r15),%xmm14
1538 vpxor %xmm9,%xmm5,%xmm5
1541 vmovdqa %xmm10,0(%rsp)
1544 vpsrld $31,%xmm5,%xmm9
1549 vpslldq $12,%xmm5,%xmm8
1550 vpaddd %xmm5,%xmm5,%xmm5
1555 vpsrld $30,%xmm8,%xmm10
1556 vpor %xmm9,%xmm5,%xmm5
1558 vaesenc %xmm14,%xmm11,%xmm11
1559 vmovups -16(%r15),%xmm15
1563 vpslld $2,%xmm8,%xmm8
1564 vpxor %xmm10,%xmm5,%xmm5
1569 vpxor %xmm8,%xmm5,%xmm5
1572 vmovdqa 16(%r11),%xmm8
1579 vpalignr $8,%xmm2,%xmm3,%xmm6
1582 vaesenc %xmm15,%xmm11,%xmm11
1583 vmovups 0(%r15),%xmm14
1584 vpaddd %xmm5,%xmm8,%xmm8
1587 vpsrldq $4,%xmm5,%xmm10
1590 vpxor %xmm2,%xmm6,%xmm6
1593 vpxor %xmm4,%xmm10,%xmm10
1598 vpxor %xmm10,%xmm6,%xmm6
1601 vmovdqa %xmm8,16(%rsp)
1604 vpsrld $31,%xmm6,%xmm10
1608 vaesenc %xmm14,%xmm11,%xmm11
1609 vmovups 16(%r15),%xmm15
1611 vpslldq $12,%xmm6,%xmm9
1612 vpaddd %xmm6,%xmm6,%xmm6
1617 vpsrld $30,%xmm9,%xmm8
1618 vpor %xmm10,%xmm6,%xmm6
1623 vpslld $2,%xmm9,%xmm9
1624 vpxor %xmm8,%xmm6,%xmm6
1629 vpxor %xmm9,%xmm6,%xmm6
1632 vmovdqa 16(%r11),%xmm9
1634 vaesenc %xmm15,%xmm11,%xmm11
1635 vmovups 32(%r15),%xmm14
1641 vpalignr $8,%xmm3,%xmm4,%xmm7
1644 vpaddd %xmm6,%xmm9,%xmm9
1647 vpsrldq $4,%xmm6,%xmm8
1650 vpxor %xmm3,%xmm7,%xmm7
1653 vpxor %xmm5,%xmm8,%xmm8
1658 vaesenc %xmm14,%xmm11,%xmm11
1659 vmovups 48(%r15),%xmm15
1660 vpxor %xmm8,%xmm7,%xmm7
1663 vmovdqa %xmm9,32(%rsp)
1666 vpsrld $31,%xmm7,%xmm8
1671 vpslldq $12,%xmm7,%xmm10
1672 vpaddd %xmm7,%xmm7,%xmm7
1677 vpsrld $30,%xmm10,%xmm9
1678 vpor %xmm8,%xmm7,%xmm7
1683 vpslld $2,%xmm10,%xmm10
1684 vpxor %xmm9,%xmm7,%xmm7
1688 vaesenc %xmm15,%xmm11,%xmm11
1689 vmovups 64(%r15),%xmm14
1690 vaesenc %xmm14,%xmm11,%xmm11
1691 vmovups 80(%r15),%xmm15
1693 vaesenc %xmm15,%xmm11,%xmm11
1694 vmovups 96(%r15),%xmm14
1695 vaesenc %xmm14,%xmm11,%xmm11
1696 vmovups 112(%r15),%xmm15
1698 vaesenclast %xmm15,%xmm11,%xmm11
1699 vmovups 16-112(%r15),%xmm14
1703 vpxor %xmm10,%xmm7,%xmm7
1706 vmovdqa 16(%r11),%xmm10
1711 vpalignr $8,%xmm6,%xmm7,%xmm9
1712 vpxor %xmm4,%xmm0,%xmm0
1717 vpxor %xmm1,%xmm0,%xmm0
1720 vmovdqa %xmm10,%xmm8
1721 vpaddd %xmm7,%xmm10,%xmm10
1723 vmovups 16(%r12),%xmm12
1724 vxorps %xmm13,%xmm12,%xmm12
1725 vmovups %xmm11,0(%r13,%r12,1)
1726 vxorps %xmm12,%xmm11,%xmm11
1727 vaesenc %xmm14,%xmm11,%xmm11
1728 vmovups -80(%r15),%xmm15
1730 vpxor %xmm9,%xmm0,%xmm0
1735 vpsrld $30,%xmm0,%xmm9
1736 vmovdqa %xmm10,48(%rsp)
1741 vpslld $2,%xmm0,%xmm0
1750 vaesenc %xmm15,%xmm11,%xmm11
1751 vmovups -64(%r15),%xmm14
1752 vpor %xmm9,%xmm0,%xmm0
1755 vmovdqa %xmm0,%xmm10
1770 vpalignr $8,%xmm7,%xmm0,%xmm10
1771 vpxor %xmm5,%xmm1,%xmm1
1773 vaesenc %xmm14,%xmm11,%xmm11
1774 vmovups -48(%r15),%xmm15
1778 vpxor %xmm2,%xmm1,%xmm1
1782 vpaddd %xmm0,%xmm8,%xmm8
1785 vpxor %xmm10,%xmm1,%xmm1
1790 vpsrld $30,%xmm1,%xmm10
1791 vmovdqa %xmm8,0(%rsp)
1796 vpslld $2,%xmm1,%xmm1
1802 vaesenc %xmm15,%xmm11,%xmm11
1803 vmovups -32(%r15),%xmm14
1807 vpor %xmm10,%xmm1,%xmm1
1817 vpalignr $8,%xmm0,%xmm1,%xmm8
1818 vpxor %xmm6,%xmm2,%xmm2
1823 vpxor %xmm3,%xmm2,%xmm2
1826 vmovdqa 32(%r11),%xmm10
1827 vpaddd %xmm1,%xmm9,%xmm9
1830 vpxor %xmm8,%xmm2,%xmm2
1832 vaesenc %xmm14,%xmm11,%xmm11
1833 vmovups -16(%r15),%xmm15
1837 vpsrld $30,%xmm2,%xmm8
1838 vmovdqa %xmm9,16(%rsp)
1843 vpslld $2,%xmm2,%xmm2
1852 vpor %xmm8,%xmm2,%xmm2
1859 vaesenc %xmm15,%xmm11,%xmm11
1860 vmovups 0(%r15),%xmm14
1864 vpalignr $8,%xmm1,%xmm2,%xmm9
1865 vpxor %xmm7,%xmm3,%xmm3
1870 vpxor %xmm4,%xmm3,%xmm3
1873 vmovdqa %xmm10,%xmm8
1874 vpaddd %xmm2,%xmm10,%xmm10
1877 vpxor %xmm9,%xmm3,%xmm3
1882 vpsrld $30,%xmm3,%xmm9
1883 vmovdqa %xmm10,32(%rsp)
1888 vpslld $2,%xmm3,%xmm3
1890 vaesenc %xmm14,%xmm11,%xmm11
1891 vmovups 16(%r15),%xmm15
1899 vpor %xmm9,%xmm3,%xmm3
1902 vmovdqa %xmm3,%xmm10
1909 vpalignr $8,%xmm2,%xmm3,%xmm10
1910 vpxor %xmm0,%xmm4,%xmm4
1915 vpxor %xmm5,%xmm4,%xmm4
1917 vaesenc %xmm15,%xmm11,%xmm11
1918 vmovups 32(%r15),%xmm14
1921 vpaddd %xmm3,%xmm8,%xmm8
1924 vpxor %xmm10,%xmm4,%xmm4
1929 vpsrld $30,%xmm4,%xmm10
1930 vmovdqa %xmm8,48(%rsp)
1935 vpslld $2,%xmm4,%xmm4
1944 vpor %xmm10,%xmm4,%xmm4
1946 vaesenc %xmm14,%xmm11,%xmm11
1947 vmovups 48(%r15),%xmm15
1956 vpalignr $8,%xmm3,%xmm4,%xmm8
1957 vpxor %xmm1,%xmm5,%xmm5
1962 vpxor %xmm6,%xmm5,%xmm5
1965 vmovdqa %xmm9,%xmm10
1966 vpaddd %xmm4,%xmm9,%xmm9
1969 vpxor %xmm8,%xmm5,%xmm5
1974 vpsrld $30,%xmm5,%xmm8
1975 vmovdqa %xmm9,0(%rsp)
1979 vaesenc %xmm15,%xmm11,%xmm11
1980 vmovups 64(%r15),%xmm14
1981 vaesenc %xmm14,%xmm11,%xmm11
1982 vmovups 80(%r15),%xmm15
1984 vaesenc %xmm15,%xmm11,%xmm11
1985 vmovups 96(%r15),%xmm14
1986 vaesenc %xmm14,%xmm11,%xmm11
1987 vmovups 112(%r15),%xmm15
1989 vaesenclast %xmm15,%xmm11,%xmm11
1990 vmovups 16-112(%r15),%xmm14
1994 vpslld $2,%xmm5,%xmm5
2003 vpor %xmm8,%xmm5,%xmm5
2013 vpalignr $8,%xmm4,%xmm5,%xmm9
2014 vpxor %xmm2,%xmm6,%xmm6
2016 vmovups 32(%r12),%xmm12
2017 vxorps %xmm13,%xmm12,%xmm12
2018 vmovups %xmm11,16(%r13,%r12,1)
2019 vxorps %xmm12,%xmm11,%xmm11
2020 vaesenc %xmm14,%xmm11,%xmm11
2021 vmovups -80(%r15),%xmm15
2025 vpxor %xmm7,%xmm6,%xmm6
2028 vmovdqa %xmm10,%xmm8
2029 vpaddd %xmm5,%xmm10,%xmm10
2032 vpxor %xmm9,%xmm6,%xmm6
2037 vpsrld $30,%xmm6,%xmm9
2038 vmovdqa %xmm10,16(%rsp)
2043 vpslld $2,%xmm6,%xmm6
2049 vaesenc %xmm15,%xmm11,%xmm11
2050 vmovups -64(%r15),%xmm14
2054 vpor %xmm9,%xmm6,%xmm6
2057 vmovdqa %xmm6,%xmm10
2073 vaesenc %xmm14,%xmm11,%xmm11
2074 vmovups -48(%r15),%xmm15
2082 vpalignr $8,%xmm5,%xmm6,%xmm10
2083 vpxor %xmm3,%xmm7,%xmm7
2088 vpxor %xmm0,%xmm7,%xmm7
2091 vmovdqa 48(%r11),%xmm9
2092 vpaddd %xmm6,%xmm8,%xmm8
2095 vpxor %xmm10,%xmm7,%xmm7
2100 vpsrld $30,%xmm7,%xmm10
2101 vmovdqa %xmm8,32(%rsp)
2103 vaesenc %xmm15,%xmm11,%xmm11
2104 vmovups -32(%r15),%xmm14
2108 vpslld $2,%xmm7,%xmm7
2117 vpor %xmm10,%xmm7,%xmm7
2128 vaesenc %xmm14,%xmm11,%xmm11
2129 vmovups -16(%r15),%xmm15
2145 vpalignr $8,%xmm6,%xmm7,%xmm8
2146 vpxor %xmm4,%xmm0,%xmm0
2151 vpxor %xmm1,%xmm0,%xmm0
2153 vaesenc %xmm15,%xmm11,%xmm11
2154 vmovups 0(%r15),%xmm14
2156 vmovdqa %xmm9,%xmm10
2157 vpaddd %xmm7,%xmm9,%xmm9
2160 vpxor %xmm8,%xmm0,%xmm0
2165 vpsrld $30,%xmm0,%xmm8
2166 vmovdqa %xmm9,48(%rsp)
2171 vpslld $2,%xmm0,%xmm0
2180 vpor %xmm8,%xmm0,%xmm0
2182 vaesenc %xmm14,%xmm11,%xmm11
2183 vmovups 16(%r15),%xmm15
2205 vaesenc %xmm15,%xmm11,%xmm11
2206 vmovups 32(%r15),%xmm14
2210 vpalignr $8,%xmm7,%xmm0,%xmm9
2211 vpxor %xmm5,%xmm1,%xmm1
2216 vpxor %xmm2,%xmm1,%xmm1
2219 vmovdqa %xmm10,%xmm8
2220 vpaddd %xmm0,%xmm10,%xmm10
2223 vpxor %xmm9,%xmm1,%xmm1
2228 vpsrld $30,%xmm1,%xmm9
2229 vmovdqa %xmm10,0(%rsp)
2234 vpslld $2,%xmm1,%xmm1
2236 vaesenc %xmm14,%xmm11,%xmm11
2237 vmovups 48(%r15),%xmm15
2245 vpor %xmm9,%xmm1,%xmm1
2248 vmovdqa %xmm1,%xmm10
2262 vaesenc %xmm15,%xmm11,%xmm11
2263 vmovups 64(%r15),%xmm14
2264 vaesenc %xmm14,%xmm11,%xmm11
2265 vmovups 80(%r15),%xmm15
2267 vaesenc %xmm15,%xmm11,%xmm11
2268 vmovups 96(%r15),%xmm14
2269 vaesenc %xmm14,%xmm11,%xmm11
2270 vmovups 112(%r15),%xmm15
2272 vaesenclast %xmm15,%xmm11,%xmm11
2273 vmovups 16-112(%r15),%xmm14
2285 vpalignr $8,%xmm0,%xmm1,%xmm10
2286 vpxor %xmm6,%xmm2,%xmm2
2291 vpxor %xmm3,%xmm2,%xmm2
2295 vpaddd %xmm1,%xmm8,%xmm8
2298 vpxor %xmm10,%xmm2,%xmm2
2300 vmovups 48(%r12),%xmm12
2301 vxorps %xmm13,%xmm12,%xmm12
2302 vmovups %xmm11,32(%r13,%r12,1)
2303 vxorps %xmm12,%xmm11,%xmm11
2304 vaesenc %xmm14,%xmm11,%xmm11
2305 vmovups -80(%r15),%xmm15
2309 vpsrld $30,%xmm2,%xmm10
2310 vmovdqa %xmm8,16(%rsp)
2315 vpslld $2,%xmm2,%xmm2
2324 vpor %xmm10,%xmm2,%xmm2
2331 vaesenc %xmm15,%xmm11,%xmm11
2332 vmovups -64(%r15),%xmm14
2352 vpalignr $8,%xmm1,%xmm2,%xmm8
2353 vpxor %xmm7,%xmm3,%xmm3
2355 vaesenc %xmm14,%xmm11,%xmm11
2356 vmovups -48(%r15),%xmm15
2360 vpxor %xmm4,%xmm3,%xmm3
2363 vmovdqa %xmm9,%xmm10
2364 vpaddd %xmm2,%xmm9,%xmm9
2367 vpxor %xmm8,%xmm3,%xmm3
2372 vpsrld $30,%xmm3,%xmm8
2373 vmovdqa %xmm9,32(%rsp)
2378 vpslld $2,%xmm3,%xmm3
2384 vaesenc %xmm15,%xmm11,%xmm11
2385 vmovups -32(%r15),%xmm14
2389 vpor %xmm8,%xmm3,%xmm3
2399 vpaddd %xmm3,%xmm10,%xmm10
2404 movdqa %xmm10,48(%rsp)
2409 vaesenc %xmm14,%xmm11,%xmm11
2410 vmovups -16(%r15),%xmm15
2431 vaesenc %xmm15,%xmm11,%xmm11
2432 vmovups 0(%r15),%xmm14
2438 vmovdqa 64(%r11),%xmm6
2439 vmovdqa 0(%r11),%xmm9
2440 vmovdqu 0(%r10),%xmm0
2441 vmovdqu 16(%r10),%xmm1
2442 vmovdqu 32(%r10),%xmm2
2443 vmovdqu 48(%r10),%xmm3
2444 vpshufb %xmm6,%xmm0,%xmm0
2448 vpshufb %xmm6,%xmm1,%xmm1
2451 vpaddd %xmm9,%xmm0,%xmm4
2456 vmovdqa %xmm4,0(%rsp)
2466 vaesenc %xmm14,%xmm11,%xmm11
2467 vmovups 16(%r15),%xmm15
2485 vpshufb %xmm6,%xmm2,%xmm2
2488 vpaddd %xmm9,%xmm1,%xmm5
2490 vaesenc %xmm15,%xmm11,%xmm11
2491 vmovups 32(%r15),%xmm14
2495 vmovdqa %xmm5,16(%rsp)
2513 vaesenc %xmm14,%xmm11,%xmm11
2514 vmovups 48(%r15),%xmm15
2524 vpshufb %xmm6,%xmm3,%xmm3
2527 vpaddd %xmm9,%xmm2,%xmm6
2532 vmovdqa %xmm6,32(%rsp)
2540 vaesenc %xmm15,%xmm11,%xmm11
2541 vmovups 64(%r15),%xmm14
2542 vaesenc %xmm14,%xmm11,%xmm11
2543 vmovups 80(%r15),%xmm15
2545 vaesenc %xmm15,%xmm11,%xmm11
2546 vmovups 96(%r15),%xmm14
2547 vaesenc %xmm14,%xmm11,%xmm11
2548 vmovups 112(%r15),%xmm15
2550 vaesenclast %xmm15,%xmm11,%xmm11
2551 vmovups 16-112(%r15),%xmm14
2571 vmovups %xmm11,48(%r13,%r12,1)
2606 vaesenc %xmm14,%xmm11,%xmm11
2607 vmovups 16(%r15),%xmm15
2628 vaesenc %xmm15,%xmm11,%xmm11
2629 vmovups 32(%r15),%xmm14
2650 vaesenc %xmm14,%xmm11,%xmm11
2651 vmovups 48(%r15),%xmm15
2674 vaesenc %xmm15,%xmm11,%xmm11
2675 vmovups 64(%r15),%xmm14
2676 vaesenc %xmm14,%xmm11,%xmm11
2677 vmovups 80(%r15),%xmm15
2679 vaesenc %xmm15,%xmm11,%xmm11
2680 vmovups 96(%r15),%xmm14
2681 vaesenc %xmm14,%xmm11,%xmm11
2682 vmovups 112(%r15),%xmm15
2684 vaesenclast %xmm15,%xmm11,%xmm11
2685 vmovups 16-112(%r15),%xmm14
2705 vmovups %xmm11,48(%r13,%r12,1)
2718 vmovups %xmm11,(%r8)
2730 .size aesni_cbc_sha1_enc_avx,.-aesni_cbc_sha1_enc_avx
2733 .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
2734 .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
2735 .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
2736 .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
2737 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
2739 .byte 65,69,83,78,73,45,67,66,67,43,83,72,65,49,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0