5 * Copyright (c) 2009 David McCullough <david.mccullough@securecomputing.com>
7 * Copyright (c) 2003-2007 Cavium Networks (support@cavium.com). All rights
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions are met:
12 * 1. Redistributions of source code must retain the above copyright notice,
13 * this list of conditions and the following disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright notice,
15 * this list of conditions and the following disclaimer in the documentation
16 * and/or other materials provided with the distribution.
17 * 3. All advertising materials mentioning features or use of this software
18 * must display the following acknowledgement:
19 * This product includes software developed by Cavium Networks
20 * 4. Cavium Networks' name may not be used to endorse or promote products
21 * derived from this software without specific prior written permission.
23 * This Software, including technical data, may be subject to U.S. export
24 * control laws, including the U.S. Export Administration Act and its
25 * associated regulations, and may be subject to export or import regulations
26 * in other countries. You warrant that You will comply strictly in all
27 * respects with all such regulations and acknowledge that you have the
28 * responsibility to obtain licenses to export, re-export or import the
31 * TO THE MAXIMUM EXTENT PERMITTED BY LAW, THE SOFTWARE IS PROVIDED "AS IS" AND
32 * WITH ALL FAULTS AND CAVIUM MAKES NO PROMISES, REPRESENTATIONS OR WARRANTIES,
33 * EITHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, WITH RESPECT TO THE
34 * SOFTWARE, INCLUDING ITS CONDITION, ITS CONFORMITY TO ANY REPRESENTATION OR
35 * DESCRIPTION, OR THE EXISTENCE OF ANY LATENT OR PATENT DEFECTS, AND CAVIUM
36 * SPECIFICALLY DISCLAIMS ALL IMPLIED (IF ANY) WARRANTIES OF TITLE,
37 * MERCHANTABILITY, NONINFRINGEMENT, FITNESS FOR A PARTICULAR PURPOSE, LACK OF
38 * VIRUSES, ACCURACY OR COMPLETENESS, QUIET ENJOYMENT, QUIET POSSESSION OR
39 * CORRESPONDENCE TO DESCRIPTION. THE ENTIRE RISK ARISING OUT OF USE OR
40 * PERFORMANCE OF THE SOFTWARE LIES WITH YOU.
42 /****************************************************************************/
44 #include <sys/cdefs.h>
45 __FBSDID("$FreeBSD$");
47 #include <sys/param.h>
48 #include <sys/systm.h>
49 #include <sys/kernel.h>
50 #include <sys/module.h>
51 #include <sys/malloc.h>
54 #include <opencrypto/cryptodev.h>
56 #include <contrib/octeon-sdk/cvmx.h>
58 #include <mips/cavium/cryptocteon/cryptocteonvar.h>
60 /****************************************************************************/
62 #define IOV_INIT(iov, ptr, idx, len) \
65 (ptr) = (iov)[(idx)].iov_base; \
66 (len) = (iov)[(idx)].iov_len; \
71 * It would be better if this were an IOV_READ/IOV_WRITE macro instead so
72 * that we could detect overflow before it happens rather than right after,
73 * which is especially bad since there is usually no IOV_CONSUME after the
74 * final read or write.
76 #define IOV_CONSUME(iov, ptr, idx, len) \
78 if ((len) > sizeof *(ptr)) { \
79 (len) -= sizeof *(ptr); \
82 if ((len) != sizeof *(ptr)) \
83 panic("%s: went past end of iovec.", __func__); \
85 (ptr) = (iov)[(idx)].iov_base; \
86 (len) = (iov)[(idx)].iov_len; \
90 static inline unsigned long octeon_crypto_enable(void)
95 mips_wr_status(mips_rd_status() | MIPS_SR_COP_2_BIT);
100 static inline void octeon_crypto_disable(register_t s)
102 mips_wr_status(mips_rd_status() & ~MIPS_SR_COP_2_BIT);
106 #define ESP_HEADER_LENGTH 8
107 #define DES_CBC_IV_LENGTH 8
108 #define AES_CBC_IV_LENGTH 16
109 #define ESP_HMAC_LEN 12
111 #define ESP_HEADER_LENGTH 8
112 #define DES_CBC_IV_LENGTH 8
114 /****************************************************************************/
116 #define CVM_LOAD_SHA_UNIT(dat, next) { \
119 CVMX_MT_HSH_DAT (dat, 0); \
120 } else if (next == 1) { \
122 CVMX_MT_HSH_DAT (dat, 1); \
123 } else if (next == 2) { \
125 CVMX_MT_HSH_DAT (dat, 2); \
126 } else if (next == 3) { \
128 CVMX_MT_HSH_DAT (dat, 3); \
129 } else if (next == 4) { \
131 CVMX_MT_HSH_DAT (dat, 4); \
132 } else if (next == 5) { \
134 CVMX_MT_HSH_DAT (dat, 5); \
135 } else if (next == 6) { \
137 CVMX_MT_HSH_DAT (dat, 6); \
139 CVMX_MT_HSH_STARTSHA (dat); \
144 #define CVM_LOAD2_SHA_UNIT(dat1, dat2, next) { \
146 CVMX_MT_HSH_DAT (dat1, 0); \
147 CVMX_MT_HSH_DAT (dat2, 1); \
149 } else if (next == 1) { \
150 CVMX_MT_HSH_DAT (dat1, 1); \
151 CVMX_MT_HSH_DAT (dat2, 2); \
153 } else if (next == 2) { \
154 CVMX_MT_HSH_DAT (dat1, 2); \
155 CVMX_MT_HSH_DAT (dat2, 3); \
157 } else if (next == 3) { \
158 CVMX_MT_HSH_DAT (dat1, 3); \
159 CVMX_MT_HSH_DAT (dat2, 4); \
161 } else if (next == 4) { \
162 CVMX_MT_HSH_DAT (dat1, 4); \
163 CVMX_MT_HSH_DAT (dat2, 5); \
165 } else if (next == 5) { \
166 CVMX_MT_HSH_DAT (dat1, 5); \
167 CVMX_MT_HSH_DAT (dat2, 6); \
169 } else if (next == 6) { \
170 CVMX_MT_HSH_DAT (dat1, 6); \
171 CVMX_MT_HSH_STARTSHA (dat2); \
174 CVMX_MT_HSH_STARTSHA (dat1); \
175 CVMX_MT_HSH_DAT (dat2, 0); \
180 /****************************************************************************/
182 #define CVM_LOAD_MD5_UNIT(dat, next) { \
185 CVMX_MT_HSH_DAT (dat, 0); \
186 } else if (next == 1) { \
188 CVMX_MT_HSH_DAT (dat, 1); \
189 } else if (next == 2) { \
191 CVMX_MT_HSH_DAT (dat, 2); \
192 } else if (next == 3) { \
194 CVMX_MT_HSH_DAT (dat, 3); \
195 } else if (next == 4) { \
197 CVMX_MT_HSH_DAT (dat, 4); \
198 } else if (next == 5) { \
200 CVMX_MT_HSH_DAT (dat, 5); \
201 } else if (next == 6) { \
203 CVMX_MT_HSH_DAT (dat, 6); \
205 CVMX_MT_HSH_STARTMD5 (dat); \
210 #define CVM_LOAD2_MD5_UNIT(dat1, dat2, next) { \
212 CVMX_MT_HSH_DAT (dat1, 0); \
213 CVMX_MT_HSH_DAT (dat2, 1); \
215 } else if (next == 1) { \
216 CVMX_MT_HSH_DAT (dat1, 1); \
217 CVMX_MT_HSH_DAT (dat2, 2); \
219 } else if (next == 2) { \
220 CVMX_MT_HSH_DAT (dat1, 2); \
221 CVMX_MT_HSH_DAT (dat2, 3); \
223 } else if (next == 3) { \
224 CVMX_MT_HSH_DAT (dat1, 3); \
225 CVMX_MT_HSH_DAT (dat2, 4); \
227 } else if (next == 4) { \
228 CVMX_MT_HSH_DAT (dat1, 4); \
229 CVMX_MT_HSH_DAT (dat2, 5); \
231 } else if (next == 5) { \
232 CVMX_MT_HSH_DAT (dat1, 5); \
233 CVMX_MT_HSH_DAT (dat2, 6); \
235 } else if (next == 6) { \
236 CVMX_MT_HSH_DAT (dat1, 6); \
237 CVMX_MT_HSH_STARTMD5 (dat2); \
240 CVMX_MT_HSH_STARTMD5 (dat1); \
241 CVMX_MT_HSH_DAT (dat2, 0); \
246 /****************************************************************************/
249 octo_calc_hash(uint8_t auth, unsigned char *key, uint64_t *inner, uint64_t *outer)
251 uint8_t hash_key[64];
253 register uint64_t xor1 = 0x3636363636363636ULL;
254 register uint64_t xor2 = 0x5c5c5c5c5c5c5c5cULL;
257 dprintf("%s()\n", __func__);
259 memset(hash_key, 0, sizeof(hash_key));
260 memcpy(hash_key, (uint8_t *) key, (auth ? 20 : 16));
261 key1 = (uint64_t *) hash_key;
262 s = octeon_crypto_enable();
264 CVMX_MT_HSH_IV(0x67452301EFCDAB89ULL, 0);
265 CVMX_MT_HSH_IV(0x98BADCFE10325476ULL, 1);
266 CVMX_MT_HSH_IV(0xC3D2E1F000000000ULL, 2);
268 CVMX_MT_HSH_IV(0x0123456789ABCDEFULL, 0);
269 CVMX_MT_HSH_IV(0xFEDCBA9876543210ULL, 1);
272 CVMX_MT_HSH_DAT((*key1 ^ xor1), 0);
274 CVMX_MT_HSH_DAT((*key1 ^ xor1), 1);
276 CVMX_MT_HSH_DAT((*key1 ^ xor1), 2);
278 CVMX_MT_HSH_DAT((*key1 ^ xor1), 3);
280 CVMX_MT_HSH_DAT((*key1 ^ xor1), 4);
282 CVMX_MT_HSH_DAT((*key1 ^ xor1), 5);
284 CVMX_MT_HSH_DAT((*key1 ^ xor1), 6);
287 CVMX_MT_HSH_STARTSHA((*key1 ^ xor1));
289 CVMX_MT_HSH_STARTMD5((*key1 ^ xor1));
291 CVMX_MF_HSH_IV(inner[0], 0);
292 CVMX_MF_HSH_IV(inner[1], 1);
295 CVMX_MF_HSH_IV(((uint64_t *) inner)[2], 2);
298 memset(hash_key, 0, sizeof(hash_key));
299 memcpy(hash_key, (uint8_t *) key, (auth ? 20 : 16));
300 key1 = (uint64_t *) hash_key;
302 CVMX_MT_HSH_IV(0x67452301EFCDAB89ULL, 0);
303 CVMX_MT_HSH_IV(0x98BADCFE10325476ULL, 1);
304 CVMX_MT_HSH_IV(0xC3D2E1F000000000ULL, 2);
306 CVMX_MT_HSH_IV(0x0123456789ABCDEFULL, 0);
307 CVMX_MT_HSH_IV(0xFEDCBA9876543210ULL, 1);
310 CVMX_MT_HSH_DAT((*key1 ^ xor2), 0);
312 CVMX_MT_HSH_DAT((*key1 ^ xor2), 1);
314 CVMX_MT_HSH_DAT((*key1 ^ xor2), 2);
316 CVMX_MT_HSH_DAT((*key1 ^ xor2), 3);
318 CVMX_MT_HSH_DAT((*key1 ^ xor2), 4);
320 CVMX_MT_HSH_DAT((*key1 ^ xor2), 5);
322 CVMX_MT_HSH_DAT((*key1 ^ xor2), 6);
325 CVMX_MT_HSH_STARTSHA((*key1 ^ xor2));
327 CVMX_MT_HSH_STARTMD5((*key1 ^ xor2));
329 CVMX_MF_HSH_IV(outer[0], 0);
330 CVMX_MF_HSH_IV(outer[1], 1);
333 CVMX_MF_HSH_IV(outer[2], 2);
335 octeon_crypto_disable(s);
339 /****************************************************************************/
343 octo_des_cbc_encrypt(
344 struct octo_sess *od,
345 struct iovec *iov, size_t iovcnt, size_t iovlen,
346 int auth_off, int auth_len,
347 int crypt_off, int crypt_len,
348 int icv_off, uint8_t *ivp)
354 dprintf("%s()\n", __func__);
356 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
357 (crypt_off & 0x7) || (crypt_off + crypt_len > iovlen))) {
358 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
359 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
360 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
361 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
365 IOV_INIT(iov, data, data_i, data_l);
368 CVMX_PREFETCH0(od->octo_enckey);
370 s = octeon_crypto_enable();
373 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
374 if (od->octo_encklen == 24) {
375 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
376 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
377 } else if (od->octo_encklen == 8) {
378 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
379 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
381 octeon_crypto_disable(s);
382 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
386 CVMX_MT_3DES_IV(* (uint64_t *) ivp);
388 while (crypt_off > 0) {
389 IOV_CONSUME(iov, data, data_i, data_l);
393 while (crypt_len > 0) {
394 CVMX_MT_3DES_ENC_CBC(*data);
395 CVMX_MF_3DES_RESULT(*data);
396 IOV_CONSUME(iov, data, data_i, data_l);
400 octeon_crypto_disable(s);
406 octo_des_cbc_decrypt(
407 struct octo_sess *od,
408 struct iovec *iov, size_t iovcnt, size_t iovlen,
409 int auth_off, int auth_len,
410 int crypt_off, int crypt_len,
411 int icv_off, uint8_t *ivp)
417 dprintf("%s()\n", __func__);
419 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
420 (crypt_off & 0x7) || (crypt_off + crypt_len > iovlen))) {
421 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
422 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
423 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
424 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
428 IOV_INIT(iov, data, data_i, data_l);
431 CVMX_PREFETCH0(od->octo_enckey);
433 s = octeon_crypto_enable();
436 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
437 if (od->octo_encklen == 24) {
438 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
439 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
440 } else if (od->octo_encklen == 8) {
441 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
442 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
444 octeon_crypto_disable(s);
445 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
449 CVMX_MT_3DES_IV(* (uint64_t *) ivp);
451 while (crypt_off > 0) {
452 IOV_CONSUME(iov, data, data_i, data_l);
456 while (crypt_len > 0) {
457 CVMX_MT_3DES_DEC_CBC(*data);
458 CVMX_MF_3DES_RESULT(*data);
459 IOV_CONSUME(iov, data, data_i, data_l);
463 octeon_crypto_disable(s);
467 /****************************************************************************/
471 octo_aes_cbc_encrypt(
472 struct octo_sess *od,
473 struct iovec *iov, size_t iovcnt, size_t iovlen,
474 int auth_off, int auth_len,
475 int crypt_off, int crypt_len,
476 int icv_off, uint8_t *ivp)
478 uint64_t *data, *pdata;
482 dprintf("%s()\n", __func__);
484 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
485 (crypt_off & 0x7) || (crypt_off + crypt_len > iovlen))) {
486 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
487 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
488 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
489 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
493 IOV_INIT(iov, data, data_i, data_l);
496 CVMX_PREFETCH0(od->octo_enckey);
498 s = octeon_crypto_enable();
501 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
502 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
504 if (od->octo_encklen == 16) {
505 CVMX_MT_AES_KEY(0x0, 2);
506 CVMX_MT_AES_KEY(0x0, 3);
507 } else if (od->octo_encklen == 24) {
508 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
509 CVMX_MT_AES_KEY(0x0, 3);
510 } else if (od->octo_encklen == 32) {
511 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
512 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
514 octeon_crypto_disable(s);
515 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
518 CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
520 CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
521 CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
523 while (crypt_off > 0) {
524 IOV_CONSUME(iov, data, data_i, data_l);
528 while (crypt_len > 0) {
530 CVMX_MT_AES_ENC_CBC0(*data);
531 IOV_CONSUME(iov, data, data_i, data_l);
532 CVMX_MT_AES_ENC_CBC1(*data);
533 CVMX_MF_AES_RESULT(*pdata, 0);
534 CVMX_MF_AES_RESULT(*data, 1);
535 IOV_CONSUME(iov, data, data_i, data_l);
539 octeon_crypto_disable(s);
545 octo_aes_cbc_decrypt(
546 struct octo_sess *od,
547 struct iovec *iov, size_t iovcnt, size_t iovlen,
548 int auth_off, int auth_len,
549 int crypt_off, int crypt_len,
550 int icv_off, uint8_t *ivp)
552 uint64_t *data, *pdata;
556 dprintf("%s()\n", __func__);
558 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
559 (crypt_off & 0x7) || (crypt_off + crypt_len > iovlen))) {
560 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
561 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
562 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
563 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
567 IOV_INIT(iov, data, data_i, data_l);
570 CVMX_PREFETCH0(od->octo_enckey);
572 s = octeon_crypto_enable();
575 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
576 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
578 if (od->octo_encklen == 16) {
579 CVMX_MT_AES_KEY(0x0, 2);
580 CVMX_MT_AES_KEY(0x0, 3);
581 } else if (od->octo_encklen == 24) {
582 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
583 CVMX_MT_AES_KEY(0x0, 3);
584 } else if (od->octo_encklen == 32) {
585 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
586 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
588 octeon_crypto_disable(s);
589 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
592 CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
594 CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
595 CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
597 while (crypt_off > 0) {
598 IOV_CONSUME(iov, data, data_i, data_l);
602 while (crypt_len > 0) {
604 CVMX_MT_AES_DEC_CBC0(*data);
605 IOV_CONSUME(iov, data, data_i, data_l);
606 CVMX_MT_AES_DEC_CBC1(*data);
607 CVMX_MF_AES_RESULT(*pdata, 0);
608 CVMX_MF_AES_RESULT(*data, 1);
609 IOV_CONSUME(iov, data, data_i, data_l);
613 octeon_crypto_disable(s);
617 /****************************************************************************/
621 octo_null_md5_encrypt(
622 struct octo_sess *od,
623 struct iovec *iov, size_t iovcnt, size_t iovlen,
624 int auth_off, int auth_len,
625 int crypt_off, int crypt_len,
626 int icv_off, uint8_t *ivp)
628 register int next = 0;
631 int data_i, data_l, alen = auth_len;
634 dprintf("%s()\n", __func__);
636 if (__predict_false(od == NULL || iov==NULL || iovlen==0 ||
637 (auth_off & 0x7) || (auth_off + auth_len > iovlen))) {
638 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
639 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
640 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
641 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
645 IOV_INIT(iov, data, data_i, data_l);
647 s = octeon_crypto_enable();
650 CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
651 CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
653 while (auth_off > 0) {
654 IOV_CONSUME(iov, data, data_i, data_l);
658 while (auth_len > 0) {
659 CVM_LOAD_MD5_UNIT(*data, next);
661 IOV_CONSUME(iov, data, data_i, data_l);
664 /* finish the hash */
665 CVMX_PREFETCH0(od->octo_hmouter);
667 if (__predict_false(inplen)) {
669 uint8_t *p = (uint8_t *) & tmp;
673 p[inplen] = ((uint8_t *) data)[inplen];
675 CVM_LOAD_MD5_UNIT(tmp, next);
677 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
680 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
683 /* Finish Inner hash */
685 CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
687 CVMX_ES64(tmp1, ((alen + 64) << 3));
688 CVM_LOAD_MD5_UNIT(tmp1, next);
690 /* Get the inner hash of HMAC */
691 CVMX_MF_HSH_IV(tmp1, 0);
692 CVMX_MF_HSH_IV(tmp2, 1);
694 /* Initialize hash unit */
695 CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
696 CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
698 CVMX_MT_HSH_DAT(tmp1, 0);
699 CVMX_MT_HSH_DAT(tmp2, 1);
700 CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
705 CVMX_ES64(tmp1, ((64 + 16) << 3));
706 CVMX_MT_HSH_STARTMD5(tmp1);
709 IOV_INIT(iov, data, data_i, data_l);
710 while (icv_off > 0) {
711 IOV_CONSUME(iov, data, data_i, data_l);
714 CVMX_MF_HSH_IV(*data, 0);
715 IOV_CONSUME(iov, data, data_i, data_l);
716 CVMX_MF_HSH_IV(tmp1, 1);
717 *(uint32_t *)data = (uint32_t) (tmp1 >> 32);
719 octeon_crypto_disable(s);
723 /****************************************************************************/
727 octo_null_sha1_encrypt(
728 struct octo_sess *od,
729 struct iovec *iov, size_t iovcnt, size_t iovlen,
730 int auth_off, int auth_len,
731 int crypt_off, int crypt_len,
732 int icv_off, uint8_t *ivp)
734 register int next = 0;
736 uint64_t tmp1, tmp2, tmp3;
737 int data_i, data_l, alen = auth_len;
740 dprintf("%s()\n", __func__);
742 if (__predict_false(od == NULL || iov==NULL || iovlen==0 ||
743 (auth_off & 0x7) || (auth_off + auth_len > iovlen))) {
744 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
745 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
746 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
747 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
751 IOV_INIT(iov, data, data_i, data_l);
753 s = octeon_crypto_enable();
756 CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
757 CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
758 CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
760 while (auth_off > 0) {
761 IOV_CONSUME(iov, data, data_i, data_l);
765 while (auth_len > 0) {
766 CVM_LOAD_SHA_UNIT(*data, next);
768 IOV_CONSUME(iov, data, data_i, data_l);
771 /* finish the hash */
772 CVMX_PREFETCH0(od->octo_hmouter);
774 if (__predict_false(inplen)) {
776 uint8_t *p = (uint8_t *) & tmp;
780 p[inplen] = ((uint8_t *) data)[inplen];
782 CVM_LOAD_MD5_UNIT(tmp, next);
784 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
787 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
790 /* Finish Inner hash */
792 CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
794 CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
796 /* Get the inner hash of HMAC */
797 CVMX_MF_HSH_IV(tmp1, 0);
798 CVMX_MF_HSH_IV(tmp2, 1);
800 CVMX_MF_HSH_IV(tmp3, 2);
802 /* Initialize hash unit */
803 CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
804 CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
805 CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
807 CVMX_MT_HSH_DAT(tmp1, 0);
808 CVMX_MT_HSH_DAT(tmp2, 1);
809 tmp3 |= 0x0000000080000000;
810 CVMX_MT_HSH_DAT(tmp3, 2);
815 CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
818 IOV_INIT(iov, data, data_i, data_l);
819 while (icv_off > 0) {
820 IOV_CONSUME(iov, data, data_i, data_l);
823 CVMX_MF_HSH_IV(*data, 0);
824 IOV_CONSUME(iov, data, data_i, data_l);
825 CVMX_MF_HSH_IV(tmp1, 1);
826 *(uint32_t *)data = (uint32_t) (tmp1 >> 32);
828 octeon_crypto_disable(s);
832 /****************************************************************************/
836 octo_des_cbc_md5_encrypt(
837 struct octo_sess *od,
838 struct iovec *iov, size_t iovcnt, size_t iovlen,
839 int auth_off, int auth_len,
840 int crypt_off, int crypt_len,
841 int icv_off, uint8_t *ivp)
843 register int next = 0;
848 uint64_t *data = &mydata.data64[0];
851 int data_i, data_l, alen = auth_len;
854 dprintf("%s()\n", __func__);
856 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
857 (crypt_off & 0x3) || (crypt_off + crypt_len > iovlen) ||
860 (auth_off & 0x3) || (auth_off + auth_len > iovlen))) {
861 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
862 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
863 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
864 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
868 IOV_INIT(iov, data32, data_i, data_l);
871 CVMX_PREFETCH0(od->octo_enckey);
873 s = octeon_crypto_enable();
876 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
877 if (od->octo_encklen == 24) {
878 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
879 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
880 } else if (od->octo_encklen == 8) {
881 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
882 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
884 octeon_crypto_disable(s);
885 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
889 CVMX_MT_3DES_IV(* (uint64_t *) ivp);
892 CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
893 CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
895 while (crypt_off > 0 && auth_off > 0) {
896 IOV_CONSUME(iov, data32, data_i, data_l);
901 while (crypt_len > 0 || auth_len > 0) {
902 uint32_t *first = data32;
903 mydata.data32[0] = *first;
904 IOV_CONSUME(iov, data32, data_i, data_l);
905 mydata.data32[1] = *data32;
906 if (crypt_off <= 0) {
908 CVMX_MT_3DES_ENC_CBC(*data);
909 CVMX_MF_3DES_RESULT(*data);
916 CVM_LOAD_MD5_UNIT(*data, next);
921 *first = mydata.data32[0];
922 *data32 = mydata.data32[1];
923 IOV_CONSUME(iov, data32, data_i, data_l);
926 /* finish the hash */
927 CVMX_PREFETCH0(od->octo_hmouter);
929 if (__predict_false(inplen)) {
931 uint8_t *p = (uint8_t *) & tmp;
935 p[inplen] = ((uint8_t *) data)[inplen];
937 CVM_LOAD_MD5_UNIT(tmp, next);
939 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
942 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
945 /* Finish Inner hash */
947 CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
949 CVMX_ES64(tmp1, ((alen + 64) << 3));
950 CVM_LOAD_MD5_UNIT(tmp1, next);
952 /* Get the inner hash of HMAC */
953 CVMX_MF_HSH_IV(tmp1, 0);
954 CVMX_MF_HSH_IV(tmp2, 1);
956 /* Initialize hash unit */
957 CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
958 CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
960 CVMX_MT_HSH_DAT(tmp1, 0);
961 CVMX_MT_HSH_DAT(tmp2, 1);
962 CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
967 CVMX_ES64(tmp1, ((64 + 16) << 3));
968 CVMX_MT_HSH_STARTMD5(tmp1);
971 IOV_INIT(iov, data32, data_i, data_l);
972 while (icv_off > 0) {
973 IOV_CONSUME(iov, data32, data_i, data_l);
976 CVMX_MF_HSH_IV(tmp1, 0);
977 *data32 = (uint32_t) (tmp1 >> 32);
978 IOV_CONSUME(iov, data32, data_i, data_l);
979 *data32 = (uint32_t) tmp1;
980 IOV_CONSUME(iov, data32, data_i, data_l);
981 CVMX_MF_HSH_IV(tmp1, 1);
982 *data32 = (uint32_t) (tmp1 >> 32);
984 octeon_crypto_disable(s);
989 octo_des_cbc_md5_decrypt(
990 struct octo_sess *od,
991 struct iovec *iov, size_t iovcnt, size_t iovlen,
992 int auth_off, int auth_len,
993 int crypt_off, int crypt_len,
994 int icv_off, uint8_t *ivp)
996 register int next = 0;
1001 uint64_t *data = &mydata.data64[0];
1003 uint64_t tmp1, tmp2;
1004 int data_i, data_l, alen = auth_len;
1007 dprintf("%s()\n", __func__);
1009 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
1010 (crypt_off & 0x3) || (crypt_off + crypt_len > iovlen) ||
1011 (crypt_len & 0x7) ||
1013 (auth_off & 0x3) || (auth_off + auth_len > iovlen))) {
1014 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
1015 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1016 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
1017 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1021 IOV_INIT(iov, data32, data_i, data_l);
1023 CVMX_PREFETCH0(ivp);
1024 CVMX_PREFETCH0(od->octo_enckey);
1026 s = octeon_crypto_enable();
1029 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1030 if (od->octo_encklen == 24) {
1031 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1032 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1033 } else if (od->octo_encklen == 8) {
1034 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
1035 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
1037 octeon_crypto_disable(s);
1038 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
1042 CVMX_MT_3DES_IV(* (uint64_t *) ivp);
1045 CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1046 CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1048 while (crypt_off > 0 && auth_off > 0) {
1049 IOV_CONSUME(iov, data32, data_i, data_l);
1054 while (crypt_len > 0 || auth_len > 0) {
1055 uint32_t *first = data32;
1056 mydata.data32[0] = *first;
1057 IOV_CONSUME(iov, data32, data_i, data_l);
1058 mydata.data32[1] = *data32;
1059 if (auth_off <= 0) {
1061 CVM_LOAD_MD5_UNIT(*data, next);
1066 if (crypt_off <= 0) {
1067 if (crypt_len > 0) {
1068 CVMX_MT_3DES_DEC_CBC(*data);
1069 CVMX_MF_3DES_RESULT(*data);
1074 *first = mydata.data32[0];
1075 *data32 = mydata.data32[1];
1076 IOV_CONSUME(iov, data32, data_i, data_l);
1079 /* finish the hash */
1080 CVMX_PREFETCH0(od->octo_hmouter);
1082 if (__predict_false(inplen)) {
1084 uint8_t *p = (uint8_t *) & tmp;
1088 p[inplen] = ((uint8_t *) data)[inplen];
1090 CVM_LOAD_MD5_UNIT(tmp, next);
1092 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1095 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1098 /* Finish Inner hash */
1100 CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
1102 CVMX_ES64(tmp1, ((alen + 64) << 3));
1103 CVM_LOAD_MD5_UNIT(tmp1, next);
1105 /* Get the inner hash of HMAC */
1106 CVMX_MF_HSH_IV(tmp1, 0);
1107 CVMX_MF_HSH_IV(tmp2, 1);
1109 /* Initialize hash unit */
1110 CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
1111 CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
1113 CVMX_MT_HSH_DAT(tmp1, 0);
1114 CVMX_MT_HSH_DAT(tmp2, 1);
1115 CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
1116 CVMX_MT_HSH_DATZ(3);
1117 CVMX_MT_HSH_DATZ(4);
1118 CVMX_MT_HSH_DATZ(5);
1119 CVMX_MT_HSH_DATZ(6);
1120 CVMX_ES64(tmp1, ((64 + 16) << 3));
1121 CVMX_MT_HSH_STARTMD5(tmp1);
1124 IOV_INIT(iov, data32, data_i, data_l);
1125 while (icv_off > 0) {
1126 IOV_CONSUME(iov, data32, data_i, data_l);
1129 CVMX_MF_HSH_IV(tmp1, 0);
1130 *data32 = (uint32_t) (tmp1 >> 32);
1131 IOV_CONSUME(iov, data32, data_i, data_l);
1132 *data32 = (uint32_t) tmp1;
1133 IOV_CONSUME(iov, data32, data_i, data_l);
1134 CVMX_MF_HSH_IV(tmp1, 1);
1135 *data32 = (uint32_t) (tmp1 >> 32);
1137 octeon_crypto_disable(s);
1141 /****************************************************************************/
1145 octo_des_cbc_sha1_encrypt(
1146 struct octo_sess *od,
1147 struct iovec *iov, size_t iovcnt, size_t iovlen,
1148 int auth_off, int auth_len,
1149 int crypt_off, int crypt_len,
1150 int icv_off, uint8_t *ivp)
1152 register int next = 0;
1157 uint64_t *data = &mydata.data64[0];
1159 uint64_t tmp1, tmp2, tmp3;
1160 int data_i, data_l, alen = auth_len;
1163 dprintf("%s()\n", __func__);
1165 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
1166 (crypt_off & 0x3) || (crypt_off + crypt_len > iovlen) ||
1167 (crypt_len & 0x7) ||
1169 (auth_off & 0x3) || (auth_off + auth_len > iovlen))) {
1170 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
1171 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1172 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
1173 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1177 IOV_INIT(iov, data32, data_i, data_l);
1179 CVMX_PREFETCH0(ivp);
1180 CVMX_PREFETCH0(od->octo_enckey);
1182 s = octeon_crypto_enable();
1185 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1186 if (od->octo_encklen == 24) {
1187 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1188 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1189 } else if (od->octo_encklen == 8) {
1190 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
1191 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
1193 octeon_crypto_disable(s);
1194 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
1198 CVMX_MT_3DES_IV(* (uint64_t *) ivp);
1201 CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1202 CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1203 CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
1205 while (crypt_off > 0 && auth_off > 0) {
1206 IOV_CONSUME(iov, data32, data_i, data_l);
1211 while (crypt_len > 0 || auth_len > 0) {
1212 uint32_t *first = data32;
1213 mydata.data32[0] = *first;
1214 IOV_CONSUME(iov, data32, data_i, data_l);
1215 mydata.data32[1] = *data32;
1216 if (crypt_off <= 0) {
1217 if (crypt_len > 0) {
1218 CVMX_MT_3DES_ENC_CBC(*data);
1219 CVMX_MF_3DES_RESULT(*data);
1224 if (auth_off <= 0) {
1226 CVM_LOAD_SHA_UNIT(*data, next);
1231 *first = mydata.data32[0];
1232 *data32 = mydata.data32[1];
1233 IOV_CONSUME(iov, data32, data_i, data_l);
1236 /* finish the hash */
1237 CVMX_PREFETCH0(od->octo_hmouter);
1239 if (__predict_false(inplen)) {
1241 uint8_t *p = (uint8_t *) & tmp;
1245 p[inplen] = ((uint8_t *) data)[inplen];
1247 CVM_LOAD_SHA_UNIT(tmp, next);
1249 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1252 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1255 /* Finish Inner hash */
1257 CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
1259 CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
1261 /* Get the inner hash of HMAC */
1262 CVMX_MF_HSH_IV(tmp1, 0);
1263 CVMX_MF_HSH_IV(tmp2, 1);
1265 CVMX_MF_HSH_IV(tmp3, 2);
1267 /* Initialize hash unit */
1268 CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
1269 CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
1270 CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
1272 CVMX_MT_HSH_DAT(tmp1, 0);
1273 CVMX_MT_HSH_DAT(tmp2, 1);
1274 tmp3 |= 0x0000000080000000;
1275 CVMX_MT_HSH_DAT(tmp3, 2);
1276 CVMX_MT_HSH_DATZ(3);
1277 CVMX_MT_HSH_DATZ(4);
1278 CVMX_MT_HSH_DATZ(5);
1279 CVMX_MT_HSH_DATZ(6);
1280 CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
1283 IOV_INIT(iov, data32, data_i, data_l);
1284 while (icv_off > 0) {
1285 IOV_CONSUME(iov, data32, data_i, data_l);
1288 CVMX_MF_HSH_IV(tmp1, 0);
1289 *data32 = (uint32_t) (tmp1 >> 32);
1290 IOV_CONSUME(iov, data32, data_i, data_l);
1291 *data32 = (uint32_t) tmp1;
1292 IOV_CONSUME(iov, data32, data_i, data_l);
1293 CVMX_MF_HSH_IV(tmp1, 1);
1294 *data32 = (uint32_t) (tmp1 >> 32);
1296 octeon_crypto_disable(s);
1301 octo_des_cbc_sha1_decrypt(
1302 struct octo_sess *od,
1303 struct iovec *iov, size_t iovcnt, size_t iovlen,
1304 int auth_off, int auth_len,
1305 int crypt_off, int crypt_len,
1306 int icv_off, uint8_t *ivp)
1308 register int next = 0;
1313 uint64_t *data = &mydata.data64[0];
1315 uint64_t tmp1, tmp2, tmp3;
1316 int data_i, data_l, alen = auth_len;
1319 dprintf("%s()\n", __func__);
1321 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
1322 (crypt_off & 0x3) || (crypt_off + crypt_len > iovlen) ||
1323 (crypt_len & 0x7) ||
1325 (auth_off & 0x3) || (auth_off + auth_len > iovlen))) {
1326 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
1327 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1328 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
1329 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1333 IOV_INIT(iov, data32, data_i, data_l);
1335 CVMX_PREFETCH0(ivp);
1336 CVMX_PREFETCH0(od->octo_enckey);
1338 s = octeon_crypto_enable();
1341 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1342 if (od->octo_encklen == 24) {
1343 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1344 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1345 } else if (od->octo_encklen == 8) {
1346 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
1347 CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
1349 octeon_crypto_disable(s);
1350 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
1354 CVMX_MT_3DES_IV(* (uint64_t *) ivp);
1357 CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1358 CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1359 CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
1361 while (crypt_off > 0 && auth_off > 0) {
1362 IOV_CONSUME(iov, data32, data_i, data_l);
1367 while (crypt_len > 0 || auth_len > 0) {
1368 uint32_t *first = data32;
1369 mydata.data32[0] = *first;
1370 IOV_CONSUME(iov, data32, data_i, data_l);
1371 mydata.data32[1] = *data32;
1372 if (auth_off <= 0) {
1374 CVM_LOAD_SHA_UNIT(*data, next);
1379 if (crypt_off <= 0) {
1380 if (crypt_len > 0) {
1381 CVMX_MT_3DES_DEC_CBC(*data);
1382 CVMX_MF_3DES_RESULT(*data);
1387 *first = mydata.data32[0];
1388 *data32 = mydata.data32[1];
1389 IOV_CONSUME(iov, data32, data_i, data_l);
1392 /* finish the hash */
1393 CVMX_PREFETCH0(od->octo_hmouter);
1395 if (__predict_false(inplen)) {
1397 uint8_t *p = (uint8_t *) & tmp;
1401 p[inplen] = ((uint8_t *) data)[inplen];
1403 CVM_LOAD_SHA_UNIT(tmp, next);
1405 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1408 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1411 /* Finish Inner hash */
1413 CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
1415 CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
1417 /* Get the inner hash of HMAC */
1418 CVMX_MF_HSH_IV(tmp1, 0);
1419 CVMX_MF_HSH_IV(tmp2, 1);
1421 CVMX_MF_HSH_IV(tmp3, 2);
1423 /* Initialize hash unit */
1424 CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
1425 CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
1426 CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
1428 CVMX_MT_HSH_DAT(tmp1, 0);
1429 CVMX_MT_HSH_DAT(tmp2, 1);
1430 tmp3 |= 0x0000000080000000;
1431 CVMX_MT_HSH_DAT(tmp3, 2);
1432 CVMX_MT_HSH_DATZ(3);
1433 CVMX_MT_HSH_DATZ(4);
1434 CVMX_MT_HSH_DATZ(5);
1435 CVMX_MT_HSH_DATZ(6);
1436 CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
1438 IOV_INIT(iov, data32, data_i, data_l);
1439 while (icv_off > 0) {
1440 IOV_CONSUME(iov, data32, data_i, data_l);
1443 CVMX_MF_HSH_IV(tmp1, 0);
1444 *data32 = (uint32_t) (tmp1 >> 32);
1445 IOV_CONSUME(iov, data32, data_i, data_l);
1446 *data32 = (uint32_t) tmp1;
1447 IOV_CONSUME(iov, data32, data_i, data_l);
1448 CVMX_MF_HSH_IV(tmp1, 1);
1449 *data32 = (uint32_t) (tmp1 >> 32);
1451 octeon_crypto_disable(s);
1455 /****************************************************************************/
1459 octo_aes_cbc_md5_encrypt(
1460 struct octo_sess *od,
1461 struct iovec *iov, size_t iovcnt, size_t iovlen,
1462 int auth_off, int auth_len,
1463 int crypt_off, int crypt_len,
1464 int icv_off, uint8_t *ivp)
1466 register int next = 0;
1471 uint64_t *pdata = &mydata[0].data64[0];
1472 uint64_t *data = &mydata[1].data64[0];
1474 uint64_t tmp1, tmp2;
1475 int data_i, data_l, alen = auth_len;
1478 dprintf("%s()\n", __func__);
1480 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
1481 (crypt_off & 0x3) || (crypt_off + crypt_len > iovlen) ||
1482 (crypt_len & 0x7) ||
1484 (auth_off & 0x3) || (auth_off + auth_len > iovlen))) {
1485 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
1486 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1487 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
1488 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1492 IOV_INIT(iov, data32, data_i, data_l);
1494 CVMX_PREFETCH0(ivp);
1495 CVMX_PREFETCH0(od->octo_enckey);
1497 s = octeon_crypto_enable();
1500 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1501 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1503 if (od->octo_encklen == 16) {
1504 CVMX_MT_AES_KEY(0x0, 2);
1505 CVMX_MT_AES_KEY(0x0, 3);
1506 } else if (od->octo_encklen == 24) {
1507 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1508 CVMX_MT_AES_KEY(0x0, 3);
1509 } else if (od->octo_encklen == 32) {
1510 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1511 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
1513 octeon_crypto_disable(s);
1514 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
1517 CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
1519 CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
1520 CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
1523 CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1524 CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1526 while (crypt_off > 0 && auth_off > 0) {
1527 IOV_CONSUME(iov, data32, data_i, data_l);
1532 while (crypt_len > 0 || auth_len > 0) {
1533 uint32_t *pdata32[3];
1535 pdata32[0] = data32;
1536 mydata[0].data32[0] = *data32;
1537 IOV_CONSUME(iov, data32, data_i, data_l);
1539 pdata32[1] = data32;
1540 mydata[0].data32[1] = *data32;
1541 IOV_CONSUME(iov, data32, data_i, data_l);
1543 pdata32[2] = data32;
1544 mydata[1].data32[0] = *data32;
1545 IOV_CONSUME(iov, data32, data_i, data_l);
1547 mydata[1].data32[1] = *data32;
1550 if (crypt_off <= 0) {
1551 if (crypt_len > 0) {
1552 CVMX_MT_AES_ENC_CBC0(*pdata);
1553 CVMX_MT_AES_ENC_CBC1(*data);
1554 CVMX_MF_AES_RESULT(*pdata, 0);
1555 CVMX_MF_AES_RESULT(*data, 1);
1561 if (auth_off <= 0) {
1563 CVM_LOAD_MD5_UNIT(*pdata, next);
1564 CVM_LOAD_MD5_UNIT(*data, next);
1570 *pdata32[0] = mydata[0].data32[0];
1571 *pdata32[1] = mydata[0].data32[1];
1572 *pdata32[2] = mydata[1].data32[0];
1573 *data32 = mydata[1].data32[1];
1575 IOV_CONSUME(iov, data32, data_i, data_l);
1578 /* finish the hash */
1579 CVMX_PREFETCH0(od->octo_hmouter);
1581 if (__predict_false(inplen)) {
1583 uint8_t *p = (uint8_t *) & tmp;
1587 p[inplen] = ((uint8_t *) data)[inplen];
1589 CVM_LOAD_MD5_UNIT(tmp, next);
1591 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1594 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1597 /* Finish Inner hash */
1599 CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
1601 CVMX_ES64(tmp1, ((alen + 64) << 3));
1602 CVM_LOAD_MD5_UNIT(tmp1, next);
1604 /* Get the inner hash of HMAC */
1605 CVMX_MF_HSH_IV(tmp1, 0);
1606 CVMX_MF_HSH_IV(tmp2, 1);
1608 /* Initialize hash unit */
1609 CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
1610 CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
1612 CVMX_MT_HSH_DAT(tmp1, 0);
1613 CVMX_MT_HSH_DAT(tmp2, 1);
1614 CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
1615 CVMX_MT_HSH_DATZ(3);
1616 CVMX_MT_HSH_DATZ(4);
1617 CVMX_MT_HSH_DATZ(5);
1618 CVMX_MT_HSH_DATZ(6);
1619 CVMX_ES64(tmp1, ((64 + 16) << 3));
1620 CVMX_MT_HSH_STARTMD5(tmp1);
1623 IOV_INIT(iov, data32, data_i, data_l);
1624 while (icv_off > 0) {
1625 IOV_CONSUME(iov, data32, data_i, data_l);
1628 CVMX_MF_HSH_IV(tmp1, 0);
1629 *data32 = (uint32_t) (tmp1 >> 32);
1630 IOV_CONSUME(iov, data32, data_i, data_l);
1631 *data32 = (uint32_t) tmp1;
1632 IOV_CONSUME(iov, data32, data_i, data_l);
1633 CVMX_MF_HSH_IV(tmp1, 1);
1634 *data32 = (uint32_t) (tmp1 >> 32);
1636 octeon_crypto_disable(s);
1641 octo_aes_cbc_md5_decrypt(
1642 struct octo_sess *od,
1643 struct iovec *iov, size_t iovcnt, size_t iovlen,
1644 int auth_off, int auth_len,
1645 int crypt_off, int crypt_len,
1646 int icv_off, uint8_t *ivp)
1648 register int next = 0;
1653 uint64_t *pdata = &mydata[0].data64[0];
1654 uint64_t *data = &mydata[1].data64[0];
1656 uint64_t tmp1, tmp2;
1657 int data_i, data_l, alen = auth_len;
1660 dprintf("%s()\n", __func__);
1662 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
1663 (crypt_off & 0x3) || (crypt_off + crypt_len > iovlen) ||
1664 (crypt_len & 0x7) ||
1666 (auth_off & 0x3) || (auth_off + auth_len > iovlen))) {
1667 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
1668 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1669 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
1670 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1674 IOV_INIT(iov, data32, data_i, data_l);
1676 CVMX_PREFETCH0(ivp);
1677 CVMX_PREFETCH0(od->octo_enckey);
1679 s = octeon_crypto_enable();
1682 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1683 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1685 if (od->octo_encklen == 16) {
1686 CVMX_MT_AES_KEY(0x0, 2);
1687 CVMX_MT_AES_KEY(0x0, 3);
1688 } else if (od->octo_encklen == 24) {
1689 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1690 CVMX_MT_AES_KEY(0x0, 3);
1691 } else if (od->octo_encklen == 32) {
1692 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1693 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
1695 octeon_crypto_disable(s);
1696 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
1699 CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
1701 CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
1702 CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
1705 CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1706 CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1708 while (crypt_off > 0 && auth_off > 0) {
1709 IOV_CONSUME(iov, data32, data_i, data_l);
1714 while (crypt_len > 0 || auth_len > 0) {
1715 uint32_t *pdata32[3];
1717 pdata32[0] = data32;
1718 mydata[0].data32[0] = *data32;
1719 IOV_CONSUME(iov, data32, data_i, data_l);
1720 pdata32[1] = data32;
1721 mydata[0].data32[1] = *data32;
1722 IOV_CONSUME(iov, data32, data_i, data_l);
1723 pdata32[2] = data32;
1724 mydata[1].data32[0] = *data32;
1725 IOV_CONSUME(iov, data32, data_i, data_l);
1726 mydata[1].data32[1] = *data32;
1728 if (auth_off <= 0) {
1730 CVM_LOAD_MD5_UNIT(*pdata, next);
1731 CVM_LOAD_MD5_UNIT(*data, next);
1737 if (crypt_off <= 0) {
1738 if (crypt_len > 0) {
1739 CVMX_MT_AES_DEC_CBC0(*pdata);
1740 CVMX_MT_AES_DEC_CBC1(*data);
1741 CVMX_MF_AES_RESULT(*pdata, 0);
1742 CVMX_MF_AES_RESULT(*data, 1);
1748 *pdata32[0] = mydata[0].data32[0];
1749 *pdata32[1] = mydata[0].data32[1];
1750 *pdata32[2] = mydata[1].data32[0];
1751 *data32 = mydata[1].data32[1];
1753 IOV_CONSUME(iov, data32, data_i, data_l);
1756 /* finish the hash */
1757 CVMX_PREFETCH0(od->octo_hmouter);
1759 if (__predict_false(inplen)) {
1761 uint8_t *p = (uint8_t *) & tmp;
1765 p[inplen] = ((uint8_t *) data)[inplen];
1767 CVM_LOAD_MD5_UNIT(tmp, next);
1769 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1772 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1775 /* Finish Inner hash */
1777 CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
1779 CVMX_ES64(tmp1, ((alen + 64) << 3));
1780 CVM_LOAD_MD5_UNIT(tmp1, next);
1782 /* Get the inner hash of HMAC */
1783 CVMX_MF_HSH_IV(tmp1, 0);
1784 CVMX_MF_HSH_IV(tmp2, 1);
1786 /* Initialize hash unit */
1787 CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
1788 CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
1790 CVMX_MT_HSH_DAT(tmp1, 0);
1791 CVMX_MT_HSH_DAT(tmp2, 1);
1792 CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
1793 CVMX_MT_HSH_DATZ(3);
1794 CVMX_MT_HSH_DATZ(4);
1795 CVMX_MT_HSH_DATZ(5);
1796 CVMX_MT_HSH_DATZ(6);
1797 CVMX_ES64(tmp1, ((64 + 16) << 3));
1798 CVMX_MT_HSH_STARTMD5(tmp1);
1801 IOV_INIT(iov, data32, data_i, data_l);
1802 while (icv_off > 0) {
1803 IOV_CONSUME(iov, data32, data_i, data_l);
1806 CVMX_MF_HSH_IV(tmp1, 0);
1807 *data32 = (uint32_t) (tmp1 >> 32);
1808 IOV_CONSUME(iov, data32, data_i, data_l);
1809 *data32 = (uint32_t) tmp1;
1810 IOV_CONSUME(iov, data32, data_i, data_l);
1811 CVMX_MF_HSH_IV(tmp1, 1);
1812 *data32 = (uint32_t) (tmp1 >> 32);
1814 octeon_crypto_disable(s);
1818 /****************************************************************************/
1822 octo_aes_cbc_sha1_encrypt(
1823 struct octo_sess *od,
1824 struct iovec *iov, size_t iovcnt, size_t iovlen,
1825 int auth_off, int auth_len,
1826 int crypt_off, int crypt_len,
1827 int icv_off, uint8_t *ivp)
1829 register int next = 0;
1834 uint64_t *pdata = &mydata[0].data64[0];
1835 uint64_t *data = &mydata[1].data64[0];
1837 uint64_t tmp1, tmp2, tmp3;
1838 int data_i, data_l, alen = auth_len;
1841 dprintf("%s()\n", __func__);
1843 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
1844 (crypt_off & 0x3) || (crypt_off + crypt_len > iovlen) ||
1845 (crypt_len & 0x7) ||
1847 (auth_off & 0x3) || (auth_off + auth_len > iovlen))) {
1848 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
1849 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1850 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
1851 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1855 IOV_INIT(iov, data32, data_i, data_l);
1857 CVMX_PREFETCH0(ivp);
1858 CVMX_PREFETCH0(od->octo_enckey);
1860 s = octeon_crypto_enable();
1863 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1864 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1866 if (od->octo_encklen == 16) {
1867 CVMX_MT_AES_KEY(0x0, 2);
1868 CVMX_MT_AES_KEY(0x0, 3);
1869 } else if (od->octo_encklen == 24) {
1870 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1871 CVMX_MT_AES_KEY(0x0, 3);
1872 } else if (od->octo_encklen == 32) {
1873 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1874 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
1876 octeon_crypto_disable(s);
1877 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
1880 CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
1882 CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
1883 CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
1886 CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1887 CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1888 CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
1890 while (crypt_off > 0 && auth_off > 0) {
1891 IOV_CONSUME(iov, data32, data_i, data_l);
1896 while (crypt_len > 0 || auth_len > 0) {
1897 uint32_t *pdata32[3];
1899 pdata32[0] = data32;
1900 mydata[0].data32[0] = *data32;
1901 IOV_CONSUME(iov, data32, data_i, data_l);
1902 pdata32[1] = data32;
1903 mydata[0].data32[1] = *data32;
1904 IOV_CONSUME(iov, data32, data_i, data_l);
1905 pdata32[2] = data32;
1906 mydata[1].data32[0] = *data32;
1907 IOV_CONSUME(iov, data32, data_i, data_l);
1908 mydata[1].data32[1] = *data32;
1911 if (crypt_off <= 0) {
1912 if (crypt_len > 0) {
1913 CVMX_MT_AES_ENC_CBC0(*pdata);
1914 CVMX_MT_AES_ENC_CBC1(*data);
1915 CVMX_MF_AES_RESULT(*pdata, 0);
1916 CVMX_MF_AES_RESULT(*data, 1);
1922 if (auth_off <= 0) {
1924 CVM_LOAD_SHA_UNIT(*pdata, next);
1925 CVM_LOAD_SHA_UNIT(*data, next);
1931 *pdata32[0] = mydata[0].data32[0];
1932 *pdata32[1] = mydata[0].data32[1];
1933 *pdata32[2] = mydata[1].data32[0];
1934 *data32 = mydata[1].data32[1];
1936 IOV_CONSUME(iov, data32, data_i, data_l);
1939 /* finish the hash */
1940 CVMX_PREFETCH0(od->octo_hmouter);
1942 if (__predict_false(inplen)) {
1944 uint8_t *p = (uint8_t *) & tmp;
1948 p[inplen] = ((uint8_t *) data)[inplen];
1950 CVM_LOAD_SHA_UNIT(tmp, next);
1952 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1955 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1958 /* Finish Inner hash */
1960 CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
1962 CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
1964 /* Get the inner hash of HMAC */
1965 CVMX_MF_HSH_IV(tmp1, 0);
1966 CVMX_MF_HSH_IV(tmp2, 1);
1968 CVMX_MF_HSH_IV(tmp3, 2);
1970 /* Initialize hash unit */
1971 CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
1972 CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
1973 CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
1975 CVMX_MT_HSH_DAT(tmp1, 0);
1976 CVMX_MT_HSH_DAT(tmp2, 1);
1977 tmp3 |= 0x0000000080000000;
1978 CVMX_MT_HSH_DAT(tmp3, 2);
1979 CVMX_MT_HSH_DATZ(3);
1980 CVMX_MT_HSH_DATZ(4);
1981 CVMX_MT_HSH_DATZ(5);
1982 CVMX_MT_HSH_DATZ(6);
1983 CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
1985 /* finish the hash */
1986 CVMX_PREFETCH0(od->octo_hmouter);
1988 if (__predict_false(inplen)) {
1990 uint8_t *p = (uint8_t *) & tmp;
1994 p[inplen] = ((uint8_t *) data)[inplen];
1996 CVM_LOAD_MD5_UNIT(tmp, next);
1998 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
2001 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
2005 IOV_INIT(iov, data32, data_i, data_l);
2006 while (icv_off > 0) {
2007 IOV_CONSUME(iov, data32, data_i, data_l);
2010 CVMX_MF_HSH_IV(tmp1, 0);
2011 *data32 = (uint32_t) (tmp1 >> 32);
2012 IOV_CONSUME(iov, data32, data_i, data_l);
2013 *data32 = (uint32_t) tmp1;
2014 IOV_CONSUME(iov, data32, data_i, data_l);
2015 CVMX_MF_HSH_IV(tmp1, 1);
2016 *data32 = (uint32_t) (tmp1 >> 32);
2018 octeon_crypto_disable(s);
2023 octo_aes_cbc_sha1_decrypt(
2024 struct octo_sess *od,
2025 struct iovec *iov, size_t iovcnt, size_t iovlen,
2026 int auth_off, int auth_len,
2027 int crypt_off, int crypt_len,
2028 int icv_off, uint8_t *ivp)
2030 register int next = 0;
2035 uint64_t *pdata = &mydata[0].data64[0];
2036 uint64_t *data = &mydata[1].data64[0];
2038 uint64_t tmp1, tmp2, tmp3;
2039 int data_i, data_l, alen = auth_len;
2042 dprintf("%s()\n", __func__);
2044 if (__predict_false(od == NULL || iov==NULL || iovlen==0 || ivp==NULL ||
2045 (crypt_off & 0x3) || (crypt_off + crypt_len > iovlen) ||
2046 (crypt_len & 0x7) ||
2048 (auth_off & 0x3) || (auth_off + auth_len > iovlen))) {
2049 dprintf("%s: Bad parameters od=%p iov=%p iovlen=%jd "
2050 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
2051 "icv_off=%d ivp=%p\n", __func__, od, iov, iovlen,
2052 auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
2056 IOV_INIT(iov, data32, data_i, data_l);
2058 CVMX_PREFETCH0(ivp);
2059 CVMX_PREFETCH0(od->octo_enckey);
2061 s = octeon_crypto_enable();
2064 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
2065 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
2067 if (od->octo_encklen == 16) {
2068 CVMX_MT_AES_KEY(0x0, 2);
2069 CVMX_MT_AES_KEY(0x0, 3);
2070 } else if (od->octo_encklen == 24) {
2071 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
2072 CVMX_MT_AES_KEY(0x0, 3);
2073 } else if (od->octo_encklen == 32) {
2074 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
2075 CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
2077 octeon_crypto_disable(s);
2078 dprintf("%s: Bad key length %d\n", __func__, od->octo_encklen);
2081 CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
2083 CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
2084 CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
2087 CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
2088 CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
2089 CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
2091 while (crypt_off > 0 && auth_off > 0) {
2092 IOV_CONSUME(iov, data32, data_i, data_l);
2097 while (crypt_len > 0 || auth_len > 0) {
2098 uint32_t *pdata32[3];
2100 pdata32[0] = data32;
2101 mydata[0].data32[0] = *data32;
2102 IOV_CONSUME(iov, data32, data_i, data_l);
2103 pdata32[1] = data32;
2104 mydata[0].data32[1] = *data32;
2105 IOV_CONSUME(iov, data32, data_i, data_l);
2106 pdata32[2] = data32;
2107 mydata[1].data32[0] = *data32;
2108 IOV_CONSUME(iov, data32, data_i, data_l);
2109 mydata[1].data32[1] = *data32;
2111 if (auth_off <= 0) {
2113 CVM_LOAD_SHA_UNIT(*pdata, next);
2114 CVM_LOAD_SHA_UNIT(*data, next);
2120 if (crypt_off <= 0) {
2121 if (crypt_len > 0) {
2122 CVMX_MT_AES_DEC_CBC0(*pdata);
2123 CVMX_MT_AES_DEC_CBC1(*data);
2124 CVMX_MF_AES_RESULT(*pdata, 0);
2125 CVMX_MF_AES_RESULT(*data, 1);
2131 *pdata32[0] = mydata[0].data32[0];
2132 *pdata32[1] = mydata[0].data32[1];
2133 *pdata32[2] = mydata[1].data32[0];
2134 *data32 = mydata[1].data32[1];
2136 IOV_CONSUME(iov, data32, data_i, data_l);
2139 /* finish the hash */
2140 CVMX_PREFETCH0(od->octo_hmouter);
2142 if (__predict_false(inplen)) {
2144 uint8_t *p = (uint8_t *) & tmp;
2148 p[inplen] = ((uint8_t *) data)[inplen];
2150 CVM_LOAD_SHA_UNIT(tmp, next);
2152 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
2155 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
2158 /* Finish Inner hash */
2160 CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
2162 CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
2164 /* Get the inner hash of HMAC */
2165 CVMX_MF_HSH_IV(tmp1, 0);
2166 CVMX_MF_HSH_IV(tmp2, 1);
2168 CVMX_MF_HSH_IV(tmp3, 2);
2170 /* Initialize hash unit */
2171 CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
2172 CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
2173 CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
2175 CVMX_MT_HSH_DAT(tmp1, 0);
2176 CVMX_MT_HSH_DAT(tmp2, 1);
2177 tmp3 |= 0x0000000080000000;
2178 CVMX_MT_HSH_DAT(tmp3, 2);
2179 CVMX_MT_HSH_DATZ(3);
2180 CVMX_MT_HSH_DATZ(4);
2181 CVMX_MT_HSH_DATZ(5);
2182 CVMX_MT_HSH_DATZ(6);
2183 CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
2185 /* finish the hash */
2186 CVMX_PREFETCH0(od->octo_hmouter);
2188 if (__predict_false(inplen)) {
2190 uint8_t *p = (uint8_t *) & tmp;
2194 p[inplen] = ((uint8_t *) data)[inplen];
2196 CVM_LOAD_MD5_UNIT(tmp, next);
2198 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
2201 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
2205 IOV_INIT(iov, data32, data_i, data_l);
2206 while (icv_off > 0) {
2207 IOV_CONSUME(iov, data32, data_i, data_l);
2210 CVMX_MF_HSH_IV(tmp1, 0);
2211 *data32 = (uint32_t) (tmp1 >> 32);
2212 IOV_CONSUME(iov, data32, data_i, data_l);
2213 *data32 = (uint32_t) tmp1;
2214 IOV_CONSUME(iov, data32, data_i, data_l);
2215 CVMX_MF_HSH_IV(tmp1, 1);
2216 *data32 = (uint32_t) (tmp1 >> 32);
2218 octeon_crypto_disable(s);
2222 /****************************************************************************/