1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Permission to use, copy, and modify this software with or without fee
20 * is hereby granted, provided that this entire notice is included in
21 * all source code copies of any software which is or includes a copy or
22 * modification of this software.
24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD$");
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
49 #include <crypto/blowfish/blowfish.h>
50 #include <crypto/sha1.h>
51 #include <opencrypto/rmd160.h>
52 #include <opencrypto/cast.h>
53 #include <opencrypto/skipjack.h>
56 #include <opencrypto/cryptodev.h>
57 #include <opencrypto/cryptosoft.h>
58 #include <opencrypto/xform.h>
62 #include "cryptodev_if.h"
64 static int32_t swcr_id;
66 u_int8_t hmac_ipad_buffer[HMAC_MAX_BLOCK_LEN];
67 u_int8_t hmac_opad_buffer[HMAC_MAX_BLOCK_LEN];
69 static int swcr_encdec(struct cryptodesc *, struct swcr_data *, caddr_t, int);
70 static int swcr_authcompute(struct cryptodesc *, struct swcr_data *, caddr_t, int);
71 static int swcr_authenc(struct cryptop *crp);
72 static int swcr_compdec(struct cryptodesc *, struct swcr_data *, caddr_t, int);
73 static void swcr_freesession(device_t dev, crypto_session_t cses);
76 * Apply a symmetric encryption/decryption algorithm.
79 swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
82 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
83 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
84 struct enc_xform *exf;
85 int i, j, k, blks, ind, count, ivlen;
86 struct uio *uio, uiolcl;
87 struct iovec iovlcl[4];
95 blks = exf->blocksize;
98 /* Check for non-padded data */
99 if (crd->crd_len % blks)
102 if (crd->crd_alg == CRYPTO_AES_ICM &&
103 (crd->crd_flags & CRD_F_IV_EXPLICIT) == 0)
106 /* Initialize the IV */
107 if (crd->crd_flags & CRD_F_ENCRYPT) {
108 /* IV explicitly provided ? */
109 if (crd->crd_flags & CRD_F_IV_EXPLICIT)
110 bcopy(crd->crd_iv, iv, ivlen);
112 arc4rand(iv, ivlen, 0);
114 /* Do we need to write the IV */
115 if (!(crd->crd_flags & CRD_F_IV_PRESENT))
116 crypto_copyback(flags, buf, crd->crd_inject, ivlen, iv);
118 } else { /* Decryption */
119 /* IV explicitly provided ? */
120 if (crd->crd_flags & CRD_F_IV_EXPLICIT)
121 bcopy(crd->crd_iv, iv, ivlen);
124 crypto_copydata(flags, buf, crd->crd_inject, ivlen, iv);
128 if (crd->crd_flags & CRD_F_KEY_EXPLICIT) {
131 if (sw->sw_kschedule)
132 exf->zerokey(&(sw->sw_kschedule));
134 error = exf->setkey(&sw->sw_kschedule,
135 crd->crd_key, crd->crd_klen / 8);
141 iovcnt = nitems(iovlcl);
144 if ((flags & CRYPTO_F_IMBUF) != 0) {
145 error = crypto_mbuftoiov((struct mbuf *)buf, &iov, &iovcnt,
150 uio->uio_iovcnt = iovcnt;
151 } else if ((flags & CRYPTO_F_IOV) != 0)
152 uio = (struct uio *)buf;
154 iov[0].iov_base = buf;
155 iov[0].iov_len = crd->crd_skip + crd->crd_len;
164 * xforms that provide a reinit method perform all IV
165 * handling themselves.
167 exf->reinit(sw->sw_kschedule, iv);
170 count = crd->crd_skip;
171 ind = cuio_getptr(uio, count, &k);
181 * If there's insufficient data at the end of
182 * an iovec, we have to do some copying.
184 if (uio->uio_iov[ind].iov_len < k + blks &&
185 uio->uio_iov[ind].iov_len != k) {
186 cuio_copydata(uio, count, blks, blk);
188 /* Actual encryption/decryption */
190 if (crd->crd_flags & CRD_F_ENCRYPT) {
191 exf->encrypt(sw->sw_kschedule,
194 exf->decrypt(sw->sw_kschedule,
197 } else if (crd->crd_flags & CRD_F_ENCRYPT) {
198 /* XOR with previous block */
199 for (j = 0; j < blks; j++)
202 exf->encrypt(sw->sw_kschedule, blk);
205 * Keep encrypted block for XOR'ing
208 bcopy(blk, iv, blks);
210 } else { /* decrypt */
212 * Keep encrypted block for XOR'ing
215 nivp = (ivp == iv) ? iv2 : iv;
216 bcopy(blk, nivp, blks);
218 exf->decrypt(sw->sw_kschedule, blk);
220 /* XOR with previous block */
221 for (j = 0; j < blks; j++)
227 /* Copy back decrypted block */
228 cuio_copyback(uio, count, blks, blk);
232 /* Advance pointer */
233 ind = cuio_getptr(uio, count, &k);
241 /* Could be done... */
246 while (uio->uio_iov[ind].iov_len >= k + blks && i > 0) {
252 uio->uio_iov[ind].iov_len - (size_t)k);
253 idat = (uint8_t *)uio->uio_iov[ind].iov_base + k;
256 if ((crd->crd_flags & CRD_F_ENCRYPT) != 0 &&
257 exf->encrypt_multi == NULL)
258 exf->encrypt(sw->sw_kschedule,
260 else if ((crd->crd_flags & CRD_F_ENCRYPT) != 0) {
261 nb = rounddown(rem, blks);
262 exf->encrypt_multi(sw->sw_kschedule,
264 } else if (exf->decrypt_multi == NULL)
265 exf->decrypt(sw->sw_kschedule,
268 nb = rounddown(rem, blks);
269 exf->decrypt_multi(sw->sw_kschedule,
272 } else if (crd->crd_flags & CRD_F_ENCRYPT) {
273 /* XOR with previous block/IV */
274 for (j = 0; j < blks; j++)
277 exf->encrypt(sw->sw_kschedule, idat);
279 } else { /* decrypt */
281 * Keep encrypted block to be used
282 * in next block's processing.
284 nivp = (ivp == iv) ? iv2 : iv;
285 bcopy(idat, nivp, blks);
287 exf->decrypt(sw->sw_kschedule, idat);
289 /* XOR with previous block/IV */
290 for (j = 0; j < blks; j++)
302 * Advance to the next iov if the end of the current iov
303 * is aligned with the end of a cipher block.
304 * Note that the code is equivalent to calling:
305 * ind = cuio_getptr(uio, count, &k);
307 if (i > 0 && k == uio->uio_iov[ind].iov_len) {
310 if (ind >= uio->uio_iovcnt) {
319 free(iov, M_CRYPTO_DATA);
325 swcr_authprepare(struct auth_hash *axf, struct swcr_data *sw, u_char *key,
333 case CRYPTO_MD5_HMAC:
334 case CRYPTO_SHA1_HMAC:
335 case CRYPTO_SHA2_224_HMAC:
336 case CRYPTO_SHA2_256_HMAC:
337 case CRYPTO_SHA2_384_HMAC:
338 case CRYPTO_SHA2_512_HMAC:
339 case CRYPTO_NULL_HMAC:
340 case CRYPTO_RIPEMD160_HMAC:
341 for (k = 0; k < klen; k++)
342 key[k] ^= HMAC_IPAD_VAL;
344 axf->Init(sw->sw_ictx);
345 axf->Update(sw->sw_ictx, key, klen);
346 axf->Update(sw->sw_ictx, hmac_ipad_buffer, axf->blocksize - klen);
348 for (k = 0; k < klen; k++)
349 key[k] ^= (HMAC_IPAD_VAL ^ HMAC_OPAD_VAL);
351 axf->Init(sw->sw_octx);
352 axf->Update(sw->sw_octx, key, klen);
353 axf->Update(sw->sw_octx, hmac_opad_buffer, axf->blocksize - klen);
355 for (k = 0; k < klen; k++)
356 key[k] ^= HMAC_OPAD_VAL;
358 case CRYPTO_MD5_KPDK:
359 case CRYPTO_SHA1_KPDK:
362 * We need a buffer that can hold an md5 and a sha1 result
363 * just to throw it away.
364 * What we do here is the initial part of:
365 * ALGO( key, keyfill, .. )
366 * adding the key to sw_ictx and abusing Final() to get the
368 * In addition we abuse the sw_octx to save the key to have
369 * it to be able to append it at the end in swcr_authcompute().
371 u_char buf[SHA1_RESULTLEN];
374 bcopy(key, sw->sw_octx, klen);
375 axf->Init(sw->sw_ictx);
376 axf->Update(sw->sw_ictx, key, klen);
377 axf->Final(buf, sw->sw_ictx);
382 axf->Setkey(sw->sw_ictx, key, klen);
383 axf->Init(sw->sw_ictx);
386 printf("%s: CRD_F_KEY_EXPLICIT flag given, but algorithm %d "
387 "doesn't use keys.\n", __func__, axf->type);
392 * Compute keyed-hash authenticator.
395 swcr_authcompute(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
398 unsigned char aalg[HASH_MAX_LEN];
399 struct auth_hash *axf;
403 if (sw->sw_ictx == 0)
408 if (crd->crd_flags & CRD_F_KEY_EXPLICIT)
409 swcr_authprepare(axf, sw, crd->crd_key, crd->crd_klen);
411 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
413 err = crypto_apply(flags, buf, crd->crd_skip, crd->crd_len,
414 (int (*)(void *, void *, unsigned int))axf->Update, (caddr_t)&ctx);
418 switch (sw->sw_alg) {
420 case CRYPTO_SHA2_224:
421 case CRYPTO_SHA2_256:
422 case CRYPTO_SHA2_384:
423 case CRYPTO_SHA2_512:
424 axf->Final(aalg, &ctx);
427 case CRYPTO_MD5_HMAC:
428 case CRYPTO_SHA1_HMAC:
429 case CRYPTO_SHA2_224_HMAC:
430 case CRYPTO_SHA2_256_HMAC:
431 case CRYPTO_SHA2_384_HMAC:
432 case CRYPTO_SHA2_512_HMAC:
433 case CRYPTO_RIPEMD160_HMAC:
434 if (sw->sw_octx == NULL)
437 axf->Final(aalg, &ctx);
438 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
439 axf->Update(&ctx, aalg, axf->hashsize);
440 axf->Final(aalg, &ctx);
443 case CRYPTO_MD5_KPDK:
444 case CRYPTO_SHA1_KPDK:
445 /* If we have no key saved, return error. */
446 if (sw->sw_octx == NULL)
450 * Add the trailing copy of the key (see comment in
451 * swcr_authprepare()) after the data:
452 * ALGO( .., key, algofill )
453 * and let Final() do the proper, natural "algofill"
456 axf->Update(&ctx, sw->sw_octx, sw->sw_klen);
457 axf->Final(aalg, &ctx);
462 case CRYPTO_NULL_HMAC:
463 axf->Final(aalg, &ctx);
467 /* Inject the authentication data */
468 crypto_copyback(flags, buf, crd->crd_inject,
469 sw->sw_mlen == 0 ? axf->hashsize : sw->sw_mlen, aalg);
473 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
474 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
477 * Apply a combined encryption-authentication transformation
480 swcr_authenc(struct cryptop *crp)
482 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
483 u_char *blk = (u_char *)blkbuf;
484 u_char aalg[AALG_MAX_RESULT_LEN];
485 u_char uaalg[AALG_MAX_RESULT_LEN];
486 u_char iv[EALG_MAX_BLOCK_LEN];
488 struct cryptodesc *crd, *crda = NULL, *crde = NULL;
489 struct swcr_data *sw, *swa, *swe = NULL;
490 struct auth_hash *axf = NULL;
491 struct enc_xform *exf = NULL;
492 caddr_t buf = (caddr_t)crp->crp_buf;
494 int aadlen, blksz, i, ivlen, len, iskip, oskip, r;
496 ivlen = blksz = iskip = oskip = 0;
498 for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
499 for (sw = crypto_get_driver_session(crp->crp_session);
500 sw && sw->sw_alg != crd->crd_alg;
506 switch (sw->sw_alg) {
507 case CRYPTO_AES_NIST_GCM_16:
508 case CRYPTO_AES_NIST_GMAC:
514 case CRYPTO_AES_128_NIST_GMAC:
515 case CRYPTO_AES_192_NIST_GMAC:
516 case CRYPTO_AES_256_NIST_GMAC:
520 if (swa->sw_ictx == 0)
522 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
523 blksz = axf->blocksize;
529 if (crde == NULL || crda == NULL)
532 if (crde->crd_alg == CRYPTO_AES_NIST_GCM_16 &&
533 (crde->crd_flags & CRD_F_IV_EXPLICIT) == 0)
536 if (crde->crd_klen != crda->crd_klen)
539 /* Initialize the IV */
540 if (crde->crd_flags & CRD_F_ENCRYPT) {
541 /* IV explicitly provided ? */
542 if (crde->crd_flags & CRD_F_IV_EXPLICIT)
543 bcopy(crde->crd_iv, iv, ivlen);
545 arc4rand(iv, ivlen, 0);
547 /* Do we need to write the IV */
548 if (!(crde->crd_flags & CRD_F_IV_PRESENT))
549 crypto_copyback(crp->crp_flags, buf, crde->crd_inject,
552 } else { /* Decryption */
553 /* IV explicitly provided ? */
554 if (crde->crd_flags & CRD_F_IV_EXPLICIT)
555 bcopy(crde->crd_iv, iv, ivlen);
558 crypto_copydata(crp->crp_flags, buf, crde->crd_inject,
563 /* Supply MAC with IV */
565 axf->Reinit(&ctx, iv, ivlen);
567 /* Supply MAC with AAD */
568 aadlen = crda->crd_len;
570 for (i = iskip; i < crda->crd_len; i += blksz) {
571 len = MIN(crda->crd_len - i, blksz - oskip);
572 crypto_copydata(crp->crp_flags, buf, crda->crd_skip + i, len,
574 bzero(blk + len + oskip, blksz - len - oskip);
575 axf->Update(&ctx, blk, blksz);
576 oskip = 0; /* reset initial output offset */
580 exf->reinit(swe->sw_kschedule, iv);
582 /* Do encryption/decryption with MAC */
583 for (i = 0; i < crde->crd_len; i += len) {
584 if (exf->encrypt_multi != NULL) {
585 len = rounddown(crde->crd_len - i, blksz);
589 len = MIN(len, sizeof(blkbuf));
592 len = MIN(crde->crd_len - i, len);
595 crypto_copydata(crp->crp_flags, buf, crde->crd_skip + i, len,
597 if (crde->crd_flags & CRD_F_ENCRYPT) {
598 if (exf->encrypt_multi != NULL)
599 exf->encrypt_multi(swe->sw_kschedule, blk,
602 exf->encrypt(swe->sw_kschedule, blk);
603 axf->Update(&ctx, blk, len);
604 crypto_copyback(crp->crp_flags, buf,
605 crde->crd_skip + i, len, blk);
607 axf->Update(&ctx, blk, len);
611 /* Do any required special finalization */
612 switch (crda->crd_alg) {
613 case CRYPTO_AES_128_NIST_GMAC:
614 case CRYPTO_AES_192_NIST_GMAC:
615 case CRYPTO_AES_256_NIST_GMAC:
618 blkp = (uint32_t *)blk + 1;
619 *blkp = htobe32(aadlen * 8);
620 blkp = (uint32_t *)blk + 3;
621 *blkp = htobe32(crde->crd_len * 8);
622 axf->Update(&ctx, blk, blksz);
627 axf->Final(aalg, &ctx);
630 if (!(crde->crd_flags & CRD_F_ENCRYPT)) {
631 crypto_copydata(crp->crp_flags, buf, crda->crd_inject,
632 axf->hashsize, uaalg);
634 r = timingsafe_bcmp(aalg, uaalg, axf->hashsize);
636 /* tag matches, decrypt data */
637 for (i = 0; i < crde->crd_len; i += blksz) {
638 len = MIN(crde->crd_len - i, blksz);
641 crypto_copydata(crp->crp_flags, buf,
642 crde->crd_skip + i, len, blk);
643 exf->decrypt(swe->sw_kschedule, blk);
644 crypto_copyback(crp->crp_flags, buf,
645 crde->crd_skip + i, len, blk);
650 /* Inject the authentication data */
651 crypto_copyback(crp->crp_flags, buf, crda->crd_inject,
652 axf->hashsize, aalg);
659 * Apply a compression/decompression algorithm
662 swcr_compdec(struct cryptodesc *crd, struct swcr_data *sw,
663 caddr_t buf, int flags)
665 u_int8_t *data, *out;
666 struct comp_algo *cxf;
672 /* We must handle the whole buffer of data in one time
673 * then if there is not all the data in the mbuf, we must
677 data = malloc(crd->crd_len, M_CRYPTO_DATA, M_NOWAIT);
680 crypto_copydata(flags, buf, crd->crd_skip, crd->crd_len, data);
682 if (crd->crd_flags & CRD_F_COMP)
683 result = cxf->compress(data, crd->crd_len, &out);
685 result = cxf->decompress(data, crd->crd_len, &out);
687 free(data, M_CRYPTO_DATA);
691 /* Copy back the (de)compressed data. m_copyback is
692 * extending the mbuf as necessary.
694 sw->sw_size = result;
695 /* Check the compressed size when doing compression */
696 if (crd->crd_flags & CRD_F_COMP) {
697 if (result >= crd->crd_len) {
698 /* Compression was useless, we lost time */
699 free(out, M_CRYPTO_DATA);
704 crypto_copyback(flags, buf, crd->crd_skip, result, out);
705 if (result < crd->crd_len) {
706 adj = result - crd->crd_len;
707 if (flags & CRYPTO_F_IMBUF) {
708 adj = result - crd->crd_len;
709 m_adj((struct mbuf *)buf, adj);
710 } else if (flags & CRYPTO_F_IOV) {
711 struct uio *uio = (struct uio *)buf;
714 adj = crd->crd_len - result;
715 ind = uio->uio_iovcnt - 1;
717 while (adj > 0 && ind >= 0) {
718 if (adj < uio->uio_iov[ind].iov_len) {
719 uio->uio_iov[ind].iov_len -= adj;
723 adj -= uio->uio_iov[ind].iov_len;
724 uio->uio_iov[ind].iov_len = 0;
730 free(out, M_CRYPTO_DATA);
735 * Generate a new software session.
738 swcr_newsession(device_t dev, crypto_session_t cses, struct cryptoini *cri)
740 struct swcr_data **swd, *ses;
741 struct auth_hash *axf;
742 struct enc_xform *txf;
743 struct comp_algo *cxf;
747 if (cses == NULL || cri == NULL)
750 ses = crypto_get_driver_session(cses);
755 *swd = malloc(sizeof(struct swcr_data),
756 M_CRYPTO_DATA, M_WAITOK | M_ZERO);
758 swcr_freesession(dev, cses);
762 switch (cri->cri_alg) {
764 txf = &enc_xform_des;
766 case CRYPTO_3DES_CBC:
767 txf = &enc_xform_3des;
770 txf = &enc_xform_blf;
772 case CRYPTO_CAST_CBC:
773 txf = &enc_xform_cast5;
775 case CRYPTO_SKIPJACK_CBC:
776 txf = &enc_xform_skipjack;
778 case CRYPTO_RIJNDAEL128_CBC:
779 txf = &enc_xform_rijndael128;
782 txf = &enc_xform_aes_xts;
785 txf = &enc_xform_aes_icm;
787 case CRYPTO_AES_NIST_GCM_16:
788 txf = &enc_xform_aes_nist_gcm;
790 case CRYPTO_AES_NIST_GMAC:
791 txf = &enc_xform_aes_nist_gmac;
792 (*swd)->sw_exf = txf;
794 case CRYPTO_CAMELLIA_CBC:
795 txf = &enc_xform_camellia;
797 case CRYPTO_NULL_CBC:
798 txf = &enc_xform_null;
800 case CRYPTO_CHACHA20:
801 txf = &enc_xform_chacha20;
804 if (cri->cri_key != NULL) {
805 error = txf->setkey(&((*swd)->sw_kschedule),
806 cri->cri_key, cri->cri_klen / 8);
808 swcr_freesession(dev, cses);
812 (*swd)->sw_exf = txf;
815 case CRYPTO_MD5_HMAC:
816 axf = &auth_hash_hmac_md5;
818 case CRYPTO_SHA1_HMAC:
819 axf = &auth_hash_hmac_sha1;
821 case CRYPTO_SHA2_224_HMAC:
822 axf = &auth_hash_hmac_sha2_224;
824 case CRYPTO_SHA2_256_HMAC:
825 axf = &auth_hash_hmac_sha2_256;
827 case CRYPTO_SHA2_384_HMAC:
828 axf = &auth_hash_hmac_sha2_384;
830 case CRYPTO_SHA2_512_HMAC:
831 axf = &auth_hash_hmac_sha2_512;
833 case CRYPTO_NULL_HMAC:
834 axf = &auth_hash_null;
836 case CRYPTO_RIPEMD160_HMAC:
837 axf = &auth_hash_hmac_ripemd_160;
839 (*swd)->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
841 if ((*swd)->sw_ictx == NULL) {
842 swcr_freesession(dev, cses);
846 (*swd)->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
848 if ((*swd)->sw_octx == NULL) {
849 swcr_freesession(dev, cses);
853 if (cri->cri_key != NULL) {
854 swcr_authprepare(axf, *swd, cri->cri_key,
858 (*swd)->sw_mlen = cri->cri_mlen;
859 (*swd)->sw_axf = axf;
862 case CRYPTO_MD5_KPDK:
863 axf = &auth_hash_key_md5;
866 case CRYPTO_SHA1_KPDK:
867 axf = &auth_hash_key_sha1;
869 (*swd)->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
871 if ((*swd)->sw_ictx == NULL) {
872 swcr_freesession(dev, cses);
876 (*swd)->sw_octx = malloc(cri->cri_klen / 8,
877 M_CRYPTO_DATA, M_NOWAIT);
878 if ((*swd)->sw_octx == NULL) {
879 swcr_freesession(dev, cses);
883 /* Store the key so we can "append" it to the payload */
884 if (cri->cri_key != NULL) {
885 swcr_authprepare(axf, *swd, cri->cri_key,
889 (*swd)->sw_mlen = cri->cri_mlen;
890 (*swd)->sw_axf = axf;
894 axf = &auth_hash_md5;
899 axf = &auth_hash_sha1;
901 case CRYPTO_SHA2_224:
902 axf = &auth_hash_sha2_224;
904 case CRYPTO_SHA2_256:
905 axf = &auth_hash_sha2_256;
907 case CRYPTO_SHA2_384:
908 axf = &auth_hash_sha2_384;
910 case CRYPTO_SHA2_512:
911 axf = &auth_hash_sha2_512;
914 (*swd)->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
916 if ((*swd)->sw_ictx == NULL) {
917 swcr_freesession(dev, cses);
921 axf->Init((*swd)->sw_ictx);
922 (*swd)->sw_mlen = cri->cri_mlen;
923 (*swd)->sw_axf = axf;
926 case CRYPTO_AES_128_NIST_GMAC:
927 axf = &auth_hash_nist_gmac_aes_128;
930 case CRYPTO_AES_192_NIST_GMAC:
931 axf = &auth_hash_nist_gmac_aes_192;
934 case CRYPTO_AES_256_NIST_GMAC:
935 axf = &auth_hash_nist_gmac_aes_256;
937 len = cri->cri_klen / 8;
938 if (len != 16 && len != 24 && len != 32) {
939 swcr_freesession(dev, cses);
943 (*swd)->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
945 if ((*swd)->sw_ictx == NULL) {
946 swcr_freesession(dev, cses);
949 axf->Init((*swd)->sw_ictx);
950 axf->Setkey((*swd)->sw_ictx, cri->cri_key, len);
951 (*swd)->sw_axf = axf;
955 axf = &auth_hash_blake2b;
958 axf = &auth_hash_blake2s;
960 (*swd)->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
962 if ((*swd)->sw_ictx == NULL) {
963 swcr_freesession(dev, cses);
966 axf->Setkey((*swd)->sw_ictx, cri->cri_key,
968 axf->Init((*swd)->sw_ictx);
969 (*swd)->sw_axf = axf;
972 case CRYPTO_DEFLATE_COMP:
973 cxf = &comp_algo_deflate;
974 (*swd)->sw_cxf = cxf;
977 swcr_freesession(dev, cses);
981 (*swd)->sw_alg = cri->cri_alg;
983 swd = &((*swd)->sw_next);
989 swcr_freesession(device_t dev, crypto_session_t cses)
991 struct swcr_data *ses, *swd, *next;
992 struct enc_xform *txf;
993 struct auth_hash *axf;
995 ses = crypto_get_driver_session(cses);
997 for (swd = ses; swd != NULL; swd = next) {
1000 switch (swd->sw_alg) {
1001 case CRYPTO_DES_CBC:
1002 case CRYPTO_3DES_CBC:
1003 case CRYPTO_BLF_CBC:
1004 case CRYPTO_CAST_CBC:
1005 case CRYPTO_SKIPJACK_CBC:
1006 case CRYPTO_RIJNDAEL128_CBC:
1007 case CRYPTO_AES_XTS:
1008 case CRYPTO_AES_ICM:
1009 case CRYPTO_AES_NIST_GCM_16:
1010 case CRYPTO_AES_NIST_GMAC:
1011 case CRYPTO_CAMELLIA_CBC:
1012 case CRYPTO_NULL_CBC:
1013 case CRYPTO_CHACHA20:
1016 if (swd->sw_kschedule)
1017 txf->zerokey(&(swd->sw_kschedule));
1020 case CRYPTO_MD5_HMAC:
1021 case CRYPTO_SHA1_HMAC:
1022 case CRYPTO_SHA2_224_HMAC:
1023 case CRYPTO_SHA2_256_HMAC:
1024 case CRYPTO_SHA2_384_HMAC:
1025 case CRYPTO_SHA2_512_HMAC:
1026 case CRYPTO_RIPEMD160_HMAC:
1027 case CRYPTO_NULL_HMAC:
1031 bzero(swd->sw_ictx, axf->ctxsize);
1032 free(swd->sw_ictx, M_CRYPTO_DATA);
1035 bzero(swd->sw_octx, axf->ctxsize);
1036 free(swd->sw_octx, M_CRYPTO_DATA);
1040 case CRYPTO_MD5_KPDK:
1041 case CRYPTO_SHA1_KPDK:
1045 bzero(swd->sw_ictx, axf->ctxsize);
1046 free(swd->sw_ictx, M_CRYPTO_DATA);
1049 bzero(swd->sw_octx, swd->sw_klen);
1050 free(swd->sw_octx, M_CRYPTO_DATA);
1054 case CRYPTO_BLAKE2B:
1055 case CRYPTO_BLAKE2S:
1058 case CRYPTO_SHA2_224:
1059 case CRYPTO_SHA2_256:
1060 case CRYPTO_SHA2_384:
1061 case CRYPTO_SHA2_512:
1065 explicit_bzero(swd->sw_ictx, axf->ctxsize);
1066 free(swd->sw_ictx, M_CRYPTO_DATA);
1070 case CRYPTO_DEFLATE_COMP:
1075 /* OCF owns and frees the primary session object */
1077 free(swd, M_CRYPTO_DATA);
1082 * Process a software request.
1085 swcr_process(device_t dev, struct cryptop *crp, int hint)
1087 struct cryptodesc *crd;
1088 struct swcr_data *sw, *ses;
1094 if (crp->crp_desc == NULL || crp->crp_buf == NULL) {
1095 crp->crp_etype = EINVAL;
1099 ses = crypto_get_driver_session(crp->crp_session);
1101 /* Go through crypto descriptors, processing as we go */
1102 for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
1104 * Find the crypto context.
1106 * XXX Note that the logic here prevents us from having
1107 * XXX the same algorithm multiple times in a session
1108 * XXX (or rather, we can but it won't give us the right
1109 * XXX results). To do that, we'd need some way of differentiating
1110 * XXX between the various instances of an algorithm (so we can
1111 * XXX locate the correct crypto context).
1113 for (sw = ses; sw && sw->sw_alg != crd->crd_alg;
1117 /* No such context ? */
1119 crp->crp_etype = EINVAL;
1122 switch (sw->sw_alg) {
1123 case CRYPTO_DES_CBC:
1124 case CRYPTO_3DES_CBC:
1125 case CRYPTO_BLF_CBC:
1126 case CRYPTO_CAST_CBC:
1127 case CRYPTO_SKIPJACK_CBC:
1128 case CRYPTO_RIJNDAEL128_CBC:
1129 case CRYPTO_AES_XTS:
1130 case CRYPTO_AES_ICM:
1131 case CRYPTO_CAMELLIA_CBC:
1132 case CRYPTO_CHACHA20:
1133 if ((crp->crp_etype = swcr_encdec(crd, sw,
1134 crp->crp_buf, crp->crp_flags)) != 0)
1137 case CRYPTO_NULL_CBC:
1140 case CRYPTO_MD5_HMAC:
1141 case CRYPTO_SHA1_HMAC:
1142 case CRYPTO_SHA2_224_HMAC:
1143 case CRYPTO_SHA2_256_HMAC:
1144 case CRYPTO_SHA2_384_HMAC:
1145 case CRYPTO_SHA2_512_HMAC:
1146 case CRYPTO_RIPEMD160_HMAC:
1147 case CRYPTO_NULL_HMAC:
1148 case CRYPTO_MD5_KPDK:
1149 case CRYPTO_SHA1_KPDK:
1152 case CRYPTO_SHA2_224:
1153 case CRYPTO_SHA2_256:
1154 case CRYPTO_SHA2_384:
1155 case CRYPTO_SHA2_512:
1156 case CRYPTO_BLAKE2B:
1157 case CRYPTO_BLAKE2S:
1158 if ((crp->crp_etype = swcr_authcompute(crd, sw,
1159 crp->crp_buf, crp->crp_flags)) != 0)
1163 case CRYPTO_AES_NIST_GCM_16:
1164 case CRYPTO_AES_NIST_GMAC:
1165 case CRYPTO_AES_128_NIST_GMAC:
1166 case CRYPTO_AES_192_NIST_GMAC:
1167 case CRYPTO_AES_256_NIST_GMAC:
1168 crp->crp_etype = swcr_authenc(crp);
1171 case CRYPTO_DEFLATE_COMP:
1172 if ((crp->crp_etype = swcr_compdec(crd, sw,
1173 crp->crp_buf, crp->crp_flags)) != 0)
1176 crp->crp_olen = (int)sw->sw_size;
1180 /* Unknown/unsupported algorithm */
1181 crp->crp_etype = EINVAL;
1192 swcr_identify(driver_t *drv, device_t parent)
1194 /* NB: order 10 is so we get attached after h/w devices */
1195 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1196 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1197 panic("cryptosoft: could not attach");
1201 swcr_probe(device_t dev)
1203 device_set_desc(dev, "software crypto");
1204 return (BUS_PROBE_NOWILDCARD);
1208 swcr_attach(device_t dev)
1210 memset(hmac_ipad_buffer, HMAC_IPAD_VAL, HMAC_MAX_BLOCK_LEN);
1211 memset(hmac_opad_buffer, HMAC_OPAD_VAL, HMAC_MAX_BLOCK_LEN);
1213 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_data),
1214 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1216 device_printf(dev, "cannot initialize!");
1219 #define REGISTER(alg) \
1220 crypto_register(swcr_id, alg, 0,0)
1221 REGISTER(CRYPTO_DES_CBC);
1222 REGISTER(CRYPTO_3DES_CBC);
1223 REGISTER(CRYPTO_BLF_CBC);
1224 REGISTER(CRYPTO_CAST_CBC);
1225 REGISTER(CRYPTO_SKIPJACK_CBC);
1226 REGISTER(CRYPTO_NULL_CBC);
1227 REGISTER(CRYPTO_MD5_HMAC);
1228 REGISTER(CRYPTO_SHA1_HMAC);
1229 REGISTER(CRYPTO_SHA2_224_HMAC);
1230 REGISTER(CRYPTO_SHA2_256_HMAC);
1231 REGISTER(CRYPTO_SHA2_384_HMAC);
1232 REGISTER(CRYPTO_SHA2_512_HMAC);
1233 REGISTER(CRYPTO_RIPEMD160_HMAC);
1234 REGISTER(CRYPTO_NULL_HMAC);
1235 REGISTER(CRYPTO_MD5_KPDK);
1236 REGISTER(CRYPTO_SHA1_KPDK);
1237 REGISTER(CRYPTO_MD5);
1238 REGISTER(CRYPTO_SHA1);
1239 REGISTER(CRYPTO_SHA2_224);
1240 REGISTER(CRYPTO_SHA2_256);
1241 REGISTER(CRYPTO_SHA2_384);
1242 REGISTER(CRYPTO_SHA2_512);
1243 REGISTER(CRYPTO_RIJNDAEL128_CBC);
1244 REGISTER(CRYPTO_AES_XTS);
1245 REGISTER(CRYPTO_AES_ICM);
1246 REGISTER(CRYPTO_AES_NIST_GCM_16);
1247 REGISTER(CRYPTO_AES_NIST_GMAC);
1248 REGISTER(CRYPTO_AES_128_NIST_GMAC);
1249 REGISTER(CRYPTO_AES_192_NIST_GMAC);
1250 REGISTER(CRYPTO_AES_256_NIST_GMAC);
1251 REGISTER(CRYPTO_CAMELLIA_CBC);
1252 REGISTER(CRYPTO_DEFLATE_COMP);
1253 REGISTER(CRYPTO_BLAKE2B);
1254 REGISTER(CRYPTO_BLAKE2S);
1255 REGISTER(CRYPTO_CHACHA20);
1262 swcr_detach(device_t dev)
1264 crypto_unregister_all(swcr_id);
1268 static device_method_t swcr_methods[] = {
1269 DEVMETHOD(device_identify, swcr_identify),
1270 DEVMETHOD(device_probe, swcr_probe),
1271 DEVMETHOD(device_attach, swcr_attach),
1272 DEVMETHOD(device_detach, swcr_detach),
1274 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1275 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1276 DEVMETHOD(cryptodev_process, swcr_process),
1281 static driver_t swcr_driver = {
1284 0, /* NB: no softc */
1286 static devclass_t swcr_devclass;
1289 * NB: We explicitly reference the crypto module so we
1290 * get the necessary ordering when built as a loadable
1291 * module. This is required because we bundle the crypto
1292 * module code together with the cryptosoft driver (otherwise
1293 * normal module dependencies would handle things).
1295 extern int crypto_modevent(struct module *, int, void *);
1296 /* XXX where to attach */
1297 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1298 MODULE_VERSION(cryptosoft, 1);
1299 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);