1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Permission to use, copy, and modify this software with or without fee
20 * is hereby granted, provided that this entire notice is included in
21 * all source code copies of any software which is or includes a copy or
22 * modification of this software.
24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD$");
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
48 #include <sys/mutex.h>
50 #include <crypto/blowfish/blowfish.h>
51 #include <crypto/sha1.h>
52 #include <opencrypto/rmd160.h>
53 #include <opencrypto/cast.h>
54 #include <opencrypto/skipjack.h>
57 #include <opencrypto/cryptodev.h>
58 #include <opencrypto/cryptosoft.h>
59 #include <opencrypto/xform.h>
63 #include "cryptodev_if.h"
65 _Static_assert(AES_CCM_IV_LEN == AES_GCM_IV_LEN,
66 "AES_GCM_IV_LEN must currently be the same as AES_CCM_IV_LEN");
68 static int32_t swcr_id;
70 u_int8_t hmac_ipad_buffer[HMAC_MAX_BLOCK_LEN];
71 u_int8_t hmac_opad_buffer[HMAC_MAX_BLOCK_LEN];
73 static int swcr_encdec(struct cryptodesc *, struct swcr_data *, caddr_t, int);
74 static int swcr_authcompute(struct cryptodesc *, struct swcr_data *, caddr_t, int);
75 static int swcr_authenc(struct cryptop *crp);
76 static int swcr_compdec(struct cryptodesc *, struct swcr_data *, caddr_t, int);
77 static void swcr_freesession(device_t dev, crypto_session_t cses);
80 * Apply a symmetric encryption/decryption algorithm.
83 swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
86 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
87 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
88 struct enc_xform *exf;
89 int i, j, k, blks, ind, count, ivlen;
90 struct uio *uio, uiolcl;
91 struct iovec iovlcl[4];
99 blks = exf->blocksize;
102 /* Check for non-padded data */
103 if (crd->crd_len % blks)
106 if (crd->crd_alg == CRYPTO_AES_ICM &&
107 (crd->crd_flags & CRD_F_IV_EXPLICIT) == 0)
110 /* Initialize the IV */
111 if (crd->crd_flags & CRD_F_ENCRYPT) {
112 /* IV explicitly provided ? */
113 if (crd->crd_flags & CRD_F_IV_EXPLICIT)
114 bcopy(crd->crd_iv, iv, ivlen);
116 arc4rand(iv, ivlen, 0);
118 /* Do we need to write the IV */
119 if (!(crd->crd_flags & CRD_F_IV_PRESENT))
120 crypto_copyback(flags, buf, crd->crd_inject, ivlen, iv);
122 } else { /* Decryption */
123 /* IV explicitly provided ? */
124 if (crd->crd_flags & CRD_F_IV_EXPLICIT)
125 bcopy(crd->crd_iv, iv, ivlen);
128 crypto_copydata(flags, buf, crd->crd_inject, ivlen, iv);
132 if (crd->crd_flags & CRD_F_KEY_EXPLICIT) {
135 if (sw->sw_kschedule)
136 exf->zerokey(&(sw->sw_kschedule));
138 error = exf->setkey(&sw->sw_kschedule,
139 crd->crd_key, crd->crd_klen / 8);
145 iovcnt = nitems(iovlcl);
148 if ((flags & CRYPTO_F_IMBUF) != 0) {
149 error = crypto_mbuftoiov((struct mbuf *)buf, &iov, &iovcnt,
154 uio->uio_iovcnt = iovcnt;
155 } else if ((flags & CRYPTO_F_IOV) != 0)
156 uio = (struct uio *)buf;
158 iov[0].iov_base = buf;
159 iov[0].iov_len = crd->crd_skip + crd->crd_len;
168 * xforms that provide a reinit method perform all IV
169 * handling themselves.
171 exf->reinit(sw->sw_kschedule, iv);
174 count = crd->crd_skip;
175 ind = cuio_getptr(uio, count, &k);
185 * If there's insufficient data at the end of
186 * an iovec, we have to do some copying.
188 if (uio->uio_iov[ind].iov_len < k + blks &&
189 uio->uio_iov[ind].iov_len != k) {
190 cuio_copydata(uio, count, blks, blk);
192 /* Actual encryption/decryption */
194 if (crd->crd_flags & CRD_F_ENCRYPT) {
195 exf->encrypt(sw->sw_kschedule,
198 exf->decrypt(sw->sw_kschedule,
201 } else if (crd->crd_flags & CRD_F_ENCRYPT) {
202 /* XOR with previous block */
203 for (j = 0; j < blks; j++)
206 exf->encrypt(sw->sw_kschedule, blk);
209 * Keep encrypted block for XOR'ing
212 bcopy(blk, iv, blks);
214 } else { /* decrypt */
216 * Keep encrypted block for XOR'ing
219 nivp = (ivp == iv) ? iv2 : iv;
220 bcopy(blk, nivp, blks);
222 exf->decrypt(sw->sw_kschedule, blk);
224 /* XOR with previous block */
225 for (j = 0; j < blks; j++)
231 /* Copy back decrypted block */
232 cuio_copyback(uio, count, blks, blk);
236 /* Advance pointer */
237 ind = cuio_getptr(uio, count, &k);
245 /* Could be done... */
250 while (uio->uio_iov[ind].iov_len >= k + blks && i > 0) {
256 uio->uio_iov[ind].iov_len - (size_t)k);
257 idat = (uint8_t *)uio->uio_iov[ind].iov_base + k;
260 if ((crd->crd_flags & CRD_F_ENCRYPT) != 0 &&
261 exf->encrypt_multi == NULL)
262 exf->encrypt(sw->sw_kschedule,
264 else if ((crd->crd_flags & CRD_F_ENCRYPT) != 0) {
265 nb = rounddown(rem, blks);
266 exf->encrypt_multi(sw->sw_kschedule,
268 } else if (exf->decrypt_multi == NULL)
269 exf->decrypt(sw->sw_kschedule,
272 nb = rounddown(rem, blks);
273 exf->decrypt_multi(sw->sw_kschedule,
276 } else if (crd->crd_flags & CRD_F_ENCRYPT) {
277 /* XOR with previous block/IV */
278 for (j = 0; j < blks; j++)
281 exf->encrypt(sw->sw_kschedule, idat);
283 } else { /* decrypt */
285 * Keep encrypted block to be used
286 * in next block's processing.
288 nivp = (ivp == iv) ? iv2 : iv;
289 bcopy(idat, nivp, blks);
291 exf->decrypt(sw->sw_kschedule, idat);
293 /* XOR with previous block/IV */
294 for (j = 0; j < blks; j++)
306 * Advance to the next iov if the end of the current iov
307 * is aligned with the end of a cipher block.
308 * Note that the code is equivalent to calling:
309 * ind = cuio_getptr(uio, count, &k);
311 if (i > 0 && k == uio->uio_iov[ind].iov_len) {
314 if (ind >= uio->uio_iovcnt) {
323 free(iov, M_CRYPTO_DATA);
328 static int __result_use_check
329 swcr_authprepare(struct auth_hash *axf, struct swcr_data *sw, u_char *key,
337 case CRYPTO_MD5_HMAC:
338 case CRYPTO_SHA1_HMAC:
339 case CRYPTO_SHA2_224_HMAC:
340 case CRYPTO_SHA2_256_HMAC:
341 case CRYPTO_SHA2_384_HMAC:
342 case CRYPTO_SHA2_512_HMAC:
343 case CRYPTO_NULL_HMAC:
344 case CRYPTO_RIPEMD160_HMAC:
345 for (k = 0; k < klen; k++)
346 key[k] ^= HMAC_IPAD_VAL;
348 axf->Init(sw->sw_ictx);
349 axf->Update(sw->sw_ictx, key, klen);
350 axf->Update(sw->sw_ictx, hmac_ipad_buffer, axf->blocksize - klen);
352 for (k = 0; k < klen; k++)
353 key[k] ^= (HMAC_IPAD_VAL ^ HMAC_OPAD_VAL);
355 axf->Init(sw->sw_octx);
356 axf->Update(sw->sw_octx, key, klen);
357 axf->Update(sw->sw_octx, hmac_opad_buffer, axf->blocksize - klen);
359 for (k = 0; k < klen; k++)
360 key[k] ^= HMAC_OPAD_VAL;
362 case CRYPTO_MD5_KPDK:
363 case CRYPTO_SHA1_KPDK:
366 * We need a buffer that can hold an md5 and a sha1 result
367 * just to throw it away.
368 * What we do here is the initial part of:
369 * ALGO( key, keyfill, .. )
370 * adding the key to sw_ictx and abusing Final() to get the
372 * In addition we abuse the sw_octx to save the key to have
373 * it to be able to append it at the end in swcr_authcompute().
375 u_char buf[SHA1_RESULTLEN];
378 bcopy(key, sw->sw_octx, klen);
379 axf->Init(sw->sw_ictx);
380 axf->Update(sw->sw_ictx, key, klen);
381 axf->Final(buf, sw->sw_ictx);
384 case CRYPTO_POLY1305:
385 if (klen != POLY1305_KEY_LEN) {
386 CRYPTDEB("bad poly1305 key size %d", klen);
392 axf->Setkey(sw->sw_ictx, key, klen);
393 axf->Init(sw->sw_ictx);
396 printf("%s: CRD_F_KEY_EXPLICIT flag given, but algorithm %d "
397 "doesn't use keys.\n", __func__, axf->type);
404 * Compute keyed-hash authenticator.
407 swcr_authcompute(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
410 unsigned char aalg[HASH_MAX_LEN];
411 struct auth_hash *axf;
415 if (sw->sw_ictx == 0)
420 if (crd->crd_flags & CRD_F_KEY_EXPLICIT) {
421 err = swcr_authprepare(axf, sw, crd->crd_key, crd->crd_klen);
426 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
428 err = crypto_apply(flags, buf, crd->crd_skip, crd->crd_len,
429 (int (*)(void *, void *, unsigned int))axf->Update, (caddr_t)&ctx);
433 switch (sw->sw_alg) {
435 case CRYPTO_SHA2_224:
436 case CRYPTO_SHA2_256:
437 case CRYPTO_SHA2_384:
438 case CRYPTO_SHA2_512:
439 axf->Final(aalg, &ctx);
442 case CRYPTO_MD5_HMAC:
443 case CRYPTO_SHA1_HMAC:
444 case CRYPTO_SHA2_224_HMAC:
445 case CRYPTO_SHA2_256_HMAC:
446 case CRYPTO_SHA2_384_HMAC:
447 case CRYPTO_SHA2_512_HMAC:
448 case CRYPTO_RIPEMD160_HMAC:
449 if (sw->sw_octx == NULL)
452 axf->Final(aalg, &ctx);
453 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
454 axf->Update(&ctx, aalg, axf->hashsize);
455 axf->Final(aalg, &ctx);
458 case CRYPTO_MD5_KPDK:
459 case CRYPTO_SHA1_KPDK:
460 /* If we have no key saved, return error. */
461 if (sw->sw_octx == NULL)
465 * Add the trailing copy of the key (see comment in
466 * swcr_authprepare()) after the data:
467 * ALGO( .., key, algofill )
468 * and let Final() do the proper, natural "algofill"
471 axf->Update(&ctx, sw->sw_octx, sw->sw_klen);
472 axf->Final(aalg, &ctx);
477 case CRYPTO_NULL_HMAC:
478 case CRYPTO_POLY1305:
479 axf->Final(aalg, &ctx);
483 /* Inject the authentication data */
484 crypto_copyback(flags, buf, crd->crd_inject,
485 sw->sw_mlen == 0 ? axf->hashsize : sw->sw_mlen, aalg);
489 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
490 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
493 * Apply a combined encryption-authentication transformation
496 swcr_authenc(struct cryptop *crp)
498 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
499 u_char *blk = (u_char *)blkbuf;
500 u_char aalg[AALG_MAX_RESULT_LEN];
501 u_char uaalg[AALG_MAX_RESULT_LEN];
502 u_char iv[EALG_MAX_BLOCK_LEN];
504 struct swcr_session *ses;
505 struct cryptodesc *crd, *crda = NULL, *crde = NULL;
506 struct swcr_data *sw, *swa, *swe = NULL;
507 struct auth_hash *axf = NULL;
508 struct enc_xform *exf = NULL;
509 caddr_t buf = (caddr_t)crp->crp_buf;
511 int aadlen, blksz, i, ivlen, len, iskip, oskip, r;
514 ivlen = blksz = iskip = oskip = 0;
516 ses = crypto_get_driver_session(crp->crp_session);
518 for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
519 for (i = 0; i < nitems(ses->swcr_algorithms) &&
520 ses->swcr_algorithms[i].sw_alg != crd->crd_alg; i++)
522 if (i == nitems(ses->swcr_algorithms))
525 sw = &ses->swcr_algorithms[i];
526 switch (sw->sw_alg) {
527 case CRYPTO_AES_CCM_16:
528 case CRYPTO_AES_NIST_GCM_16:
529 case CRYPTO_AES_NIST_GMAC:
533 /* AES_CCM_IV_LEN and AES_GCM_IV_LEN are both 12 */
534 ivlen = AES_CCM_IV_LEN;
536 case CRYPTO_AES_CCM_CBC_MAC:
539 case CRYPTO_AES_128_NIST_GMAC:
540 case CRYPTO_AES_192_NIST_GMAC:
541 case CRYPTO_AES_256_NIST_GMAC:
545 if (swa->sw_ictx == 0)
547 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
548 blksz = axf->blocksize;
554 if (crde == NULL || crda == NULL)
557 * We need to make sure that the auth algorithm matches the
558 * encr algorithm. Specifically, for AES-GCM must go with
559 * AES NIST GMAC, and AES-CCM must go with CBC-MAC.
561 if (crde->crd_alg == CRYPTO_AES_NIST_GCM_16) {
562 switch (crda->crd_alg) {
563 case CRYPTO_AES_128_NIST_GMAC:
564 case CRYPTO_AES_192_NIST_GMAC:
565 case CRYPTO_AES_256_NIST_GMAC:
568 return (EINVAL); /* Not good! */
570 } else if (crde->crd_alg == CRYPTO_AES_CCM_16 &&
571 crda->crd_alg != CRYPTO_AES_CCM_CBC_MAC)
574 if ((crde->crd_alg == CRYPTO_AES_NIST_GCM_16 ||
575 crde->crd_alg == CRYPTO_AES_CCM_16) &&
576 (crde->crd_flags & CRD_F_IV_EXPLICIT) == 0)
579 if (crde->crd_klen != crda->crd_klen)
582 /* Initialize the IV */
583 if (crde->crd_flags & CRD_F_ENCRYPT) {
584 /* IV explicitly provided ? */
585 if (crde->crd_flags & CRD_F_IV_EXPLICIT)
586 bcopy(crde->crd_iv, iv, ivlen);
588 arc4rand(iv, ivlen, 0);
590 /* Do we need to write the IV */
591 if (!(crde->crd_flags & CRD_F_IV_PRESENT))
592 crypto_copyback(crp->crp_flags, buf, crde->crd_inject,
595 } else { /* Decryption */
596 /* IV explicitly provided ? */
597 if (crde->crd_flags & CRD_F_IV_EXPLICIT)
598 bcopy(crde->crd_iv, iv, ivlen);
601 crypto_copydata(crp->crp_flags, buf, crde->crd_inject,
606 if (swa->sw_alg == CRYPTO_AES_CCM_CBC_MAC) {
608 * AES CCM-CBC needs to know the length of
609 * both the auth data, and payload data, before
610 * doing the auth computation.
612 ctx.aes_cbc_mac_ctx.authDataLength = crda->crd_len;
613 ctx.aes_cbc_mac_ctx.cryptDataLength = crde->crd_len;
615 /* Supply MAC with IV */
617 axf->Reinit(&ctx, iv, ivlen);
619 /* Supply MAC with AAD */
620 aadlen = crda->crd_len;
622 for (i = iskip; i < crda->crd_len; i += blksz) {
623 len = MIN(crda->crd_len - i, blksz - oskip);
624 crypto_copydata(crp->crp_flags, buf, crda->crd_skip + i, len,
626 bzero(blk + len + oskip, blksz - len - oskip);
627 axf->Update(&ctx, blk, blksz);
628 oskip = 0; /* reset initial output offset */
632 exf->reinit(swe->sw_kschedule, iv);
634 /* Do encryption/decryption with MAC */
635 for (i = 0; i < crde->crd_len; i += len) {
636 if (exf->encrypt_multi != NULL) {
637 len = rounddown(crde->crd_len - i, blksz);
641 len = MIN(len, sizeof(blkbuf));
644 len = MIN(crde->crd_len - i, len);
647 crypto_copydata(crp->crp_flags, buf, crde->crd_skip + i, len,
650 * One of the problems with CCM+CBC is that the authentication
651 * is done on the unecncrypted data. As a result, we have
652 * to do the authentication update at different times,
653 * depending on whether it's CCM or not.
655 if (crde->crd_flags & CRD_F_ENCRYPT) {
657 axf->Update(&ctx, blk, len);
658 if (exf->encrypt_multi != NULL)
659 exf->encrypt_multi(swe->sw_kschedule, blk,
662 exf->encrypt(swe->sw_kschedule, blk);
664 axf->Update(&ctx, blk, len);
665 crypto_copyback(crp->crp_flags, buf,
666 crde->crd_skip + i, len, blk);
669 KASSERT(exf->encrypt_multi == NULL,
670 ("assume CCM is single-block only"));
671 exf->decrypt(swe->sw_kschedule, blk);
673 axf->Update(&ctx, blk, len);
677 /* Do any required special finalization */
678 switch (crda->crd_alg) {
679 case CRYPTO_AES_128_NIST_GMAC:
680 case CRYPTO_AES_192_NIST_GMAC:
681 case CRYPTO_AES_256_NIST_GMAC:
684 blkp = (uint32_t *)blk + 1;
685 *blkp = htobe32(aadlen * 8);
686 blkp = (uint32_t *)blk + 3;
687 *blkp = htobe32(crde->crd_len * 8);
688 axf->Update(&ctx, blk, blksz);
693 axf->Final(aalg, &ctx);
696 if (!(crde->crd_flags & CRD_F_ENCRYPT)) {
697 crypto_copydata(crp->crp_flags, buf, crda->crd_inject,
698 axf->hashsize, uaalg);
700 r = timingsafe_bcmp(aalg, uaalg, axf->hashsize);
702 /* tag matches, decrypt data */
704 KASSERT(exf->reinit != NULL,
705 ("AES-CCM reinit function must be set"));
706 exf->reinit(swe->sw_kschedule, iv);
708 for (i = 0; i < crde->crd_len; i += blksz) {
709 len = MIN(crde->crd_len - i, blksz);
712 crypto_copydata(crp->crp_flags, buf,
713 crde->crd_skip + i, len, blk);
714 exf->decrypt(swe->sw_kschedule, blk);
715 crypto_copyback(crp->crp_flags, buf,
716 crde->crd_skip + i, len, blk);
721 /* Inject the authentication data */
722 crypto_copyback(crp->crp_flags, buf, crda->crd_inject,
723 axf->hashsize, aalg);
730 * Apply a compression/decompression algorithm
733 swcr_compdec(struct cryptodesc *crd, struct swcr_data *sw,
734 caddr_t buf, int flags)
736 u_int8_t *data, *out;
737 struct comp_algo *cxf;
743 /* We must handle the whole buffer of data in one time
744 * then if there is not all the data in the mbuf, we must
748 data = malloc(crd->crd_len, M_CRYPTO_DATA, M_NOWAIT);
751 crypto_copydata(flags, buf, crd->crd_skip, crd->crd_len, data);
753 if (crd->crd_flags & CRD_F_COMP)
754 result = cxf->compress(data, crd->crd_len, &out);
756 result = cxf->decompress(data, crd->crd_len, &out);
758 free(data, M_CRYPTO_DATA);
762 /* Copy back the (de)compressed data. m_copyback is
763 * extending the mbuf as necessary.
765 sw->sw_size = result;
766 /* Check the compressed size when doing compression */
767 if (crd->crd_flags & CRD_F_COMP) {
768 if (result >= crd->crd_len) {
769 /* Compression was useless, we lost time */
770 free(out, M_CRYPTO_DATA);
775 crypto_copyback(flags, buf, crd->crd_skip, result, out);
776 if (result < crd->crd_len) {
777 adj = result - crd->crd_len;
778 if (flags & CRYPTO_F_IMBUF) {
779 adj = result - crd->crd_len;
780 m_adj((struct mbuf *)buf, adj);
781 } else if (flags & CRYPTO_F_IOV) {
782 struct uio *uio = (struct uio *)buf;
785 adj = crd->crd_len - result;
786 ind = uio->uio_iovcnt - 1;
788 while (adj > 0 && ind >= 0) {
789 if (adj < uio->uio_iov[ind].iov_len) {
790 uio->uio_iov[ind].iov_len -= adj;
794 adj -= uio->uio_iov[ind].iov_len;
795 uio->uio_iov[ind].iov_len = 0;
801 free(out, M_CRYPTO_DATA);
806 * Generate a new software session.
809 swcr_newsession(device_t dev, crypto_session_t cses, struct cryptoini *cri)
811 struct swcr_session *ses;
812 struct swcr_data *swd;
813 struct auth_hash *axf;
814 struct enc_xform *txf;
815 struct comp_algo *cxf;
820 if (cses == NULL || cri == NULL)
823 ses = crypto_get_driver_session(cses);
824 mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
826 for (i = 0; cri != NULL && i < nitems(ses->swcr_algorithms); i++) {
827 swd = &ses->swcr_algorithms[i];
829 switch (cri->cri_alg) {
831 txf = &enc_xform_des;
833 case CRYPTO_3DES_CBC:
834 txf = &enc_xform_3des;
837 txf = &enc_xform_blf;
839 case CRYPTO_CAST_CBC:
840 txf = &enc_xform_cast5;
842 case CRYPTO_SKIPJACK_CBC:
843 txf = &enc_xform_skipjack;
845 case CRYPTO_RIJNDAEL128_CBC:
846 txf = &enc_xform_rijndael128;
849 txf = &enc_xform_aes_xts;
852 txf = &enc_xform_aes_icm;
854 case CRYPTO_AES_NIST_GCM_16:
855 txf = &enc_xform_aes_nist_gcm;
857 case CRYPTO_AES_CCM_16:
858 txf = &enc_xform_ccm;
860 case CRYPTO_AES_NIST_GMAC:
861 txf = &enc_xform_aes_nist_gmac;
864 case CRYPTO_CAMELLIA_CBC:
865 txf = &enc_xform_camellia;
867 case CRYPTO_NULL_CBC:
868 txf = &enc_xform_null;
870 case CRYPTO_CHACHA20:
871 txf = &enc_xform_chacha20;
874 if (cri->cri_key != NULL) {
875 error = txf->setkey(&swd->sw_kschedule,
876 cri->cri_key, cri->cri_klen / 8);
878 swcr_freesession(dev, cses);
885 case CRYPTO_MD5_HMAC:
886 axf = &auth_hash_hmac_md5;
888 case CRYPTO_SHA1_HMAC:
889 axf = &auth_hash_hmac_sha1;
891 case CRYPTO_SHA2_224_HMAC:
892 axf = &auth_hash_hmac_sha2_224;
894 case CRYPTO_SHA2_256_HMAC:
895 axf = &auth_hash_hmac_sha2_256;
897 case CRYPTO_SHA2_384_HMAC:
898 axf = &auth_hash_hmac_sha2_384;
900 case CRYPTO_SHA2_512_HMAC:
901 axf = &auth_hash_hmac_sha2_512;
903 case CRYPTO_NULL_HMAC:
904 axf = &auth_hash_null;
906 case CRYPTO_RIPEMD160_HMAC:
907 axf = &auth_hash_hmac_ripemd_160;
909 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
911 if (swd->sw_ictx == NULL) {
912 swcr_freesession(dev, cses);
916 swd->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
918 if (swd->sw_octx == NULL) {
919 swcr_freesession(dev, cses);
923 if (cri->cri_key != NULL) {
924 error = swcr_authprepare(axf, swd,
925 cri->cri_key, cri->cri_klen);
927 swcr_freesession(dev, cses);
932 swd->sw_mlen = cri->cri_mlen;
936 case CRYPTO_MD5_KPDK:
937 axf = &auth_hash_key_md5;
940 case CRYPTO_SHA1_KPDK:
941 axf = &auth_hash_key_sha1;
943 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
945 if (swd->sw_ictx == NULL) {
946 swcr_freesession(dev, cses);
950 swd->sw_octx = malloc(cri->cri_klen / 8,
951 M_CRYPTO_DATA, M_NOWAIT);
952 if (swd->sw_octx == NULL) {
953 swcr_freesession(dev, cses);
957 /* Store the key so we can "append" it to the payload */
958 if (cri->cri_key != NULL) {
959 error = swcr_authprepare(axf, swd,
960 cri->cri_key, cri->cri_klen);
962 swcr_freesession(dev, cses);
967 swd->sw_mlen = cri->cri_mlen;
972 axf = &auth_hash_md5;
977 axf = &auth_hash_sha1;
979 case CRYPTO_SHA2_224:
980 axf = &auth_hash_sha2_224;
982 case CRYPTO_SHA2_256:
983 axf = &auth_hash_sha2_256;
985 case CRYPTO_SHA2_384:
986 axf = &auth_hash_sha2_384;
988 case CRYPTO_SHA2_512:
989 axf = &auth_hash_sha2_512;
992 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
994 if (swd->sw_ictx == NULL) {
995 swcr_freesession(dev, cses);
999 axf->Init(swd->sw_ictx);
1000 swd->sw_mlen = cri->cri_mlen;
1004 case CRYPTO_AES_CCM_CBC_MAC:
1005 switch (cri->cri_klen) {
1007 axf = &auth_hash_ccm_cbc_mac_128;
1010 axf = &auth_hash_ccm_cbc_mac_192;
1013 axf = &auth_hash_ccm_cbc_mac_256;
1016 swcr_freesession(dev, cses);
1020 case CRYPTO_AES_128_NIST_GMAC:
1021 axf = &auth_hash_nist_gmac_aes_128;
1024 case CRYPTO_AES_192_NIST_GMAC:
1025 axf = &auth_hash_nist_gmac_aes_192;
1028 case CRYPTO_AES_256_NIST_GMAC:
1029 axf = &auth_hash_nist_gmac_aes_256;
1031 len = cri->cri_klen / 8;
1032 if (len != 16 && len != 24 && len != 32) {
1033 swcr_freesession(dev, cses);
1037 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1039 if (swd->sw_ictx == NULL) {
1040 swcr_freesession(dev, cses);
1043 axf->Init(swd->sw_ictx);
1044 axf->Setkey(swd->sw_ictx, cri->cri_key, len);
1048 case CRYPTO_BLAKE2B:
1049 axf = &auth_hash_blake2b;
1051 case CRYPTO_BLAKE2S:
1052 axf = &auth_hash_blake2s;
1054 case CRYPTO_POLY1305:
1055 axf = &auth_hash_poly1305;
1057 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1059 if (swd->sw_ictx == NULL) {
1060 swcr_freesession(dev, cses);
1063 axf->Setkey(swd->sw_ictx, cri->cri_key,
1065 axf->Init(swd->sw_ictx);
1069 case CRYPTO_DEFLATE_COMP:
1070 cxf = &comp_algo_deflate;
1074 swcr_freesession(dev, cses);
1078 swd->sw_alg = cri->cri_alg;
1079 cri = cri->cri_next;
1084 CRYPTDEB("Bogus session request for three or more algorithms");
1091 swcr_freesession(device_t dev, crypto_session_t cses)
1093 struct swcr_session *ses;
1094 struct swcr_data *swd;
1095 struct enc_xform *txf;
1096 struct auth_hash *axf;
1099 ses = crypto_get_driver_session(cses);
1101 mtx_destroy(&ses->swcr_lock);
1102 for (i = 0; i < nitems(ses->swcr_algorithms); i++) {
1103 swd = &ses->swcr_algorithms[i];
1105 switch (swd->sw_alg) {
1106 case CRYPTO_DES_CBC:
1107 case CRYPTO_3DES_CBC:
1108 case CRYPTO_BLF_CBC:
1109 case CRYPTO_CAST_CBC:
1110 case CRYPTO_SKIPJACK_CBC:
1111 case CRYPTO_RIJNDAEL128_CBC:
1112 case CRYPTO_AES_XTS:
1113 case CRYPTO_AES_ICM:
1114 case CRYPTO_AES_NIST_GCM_16:
1115 case CRYPTO_AES_NIST_GMAC:
1116 case CRYPTO_CAMELLIA_CBC:
1117 case CRYPTO_NULL_CBC:
1118 case CRYPTO_CHACHA20:
1119 case CRYPTO_AES_CCM_16:
1122 if (swd->sw_kschedule)
1123 txf->zerokey(&(swd->sw_kschedule));
1126 case CRYPTO_MD5_HMAC:
1127 case CRYPTO_SHA1_HMAC:
1128 case CRYPTO_SHA2_224_HMAC:
1129 case CRYPTO_SHA2_256_HMAC:
1130 case CRYPTO_SHA2_384_HMAC:
1131 case CRYPTO_SHA2_512_HMAC:
1132 case CRYPTO_RIPEMD160_HMAC:
1133 case CRYPTO_NULL_HMAC:
1134 case CRYPTO_AES_CCM_CBC_MAC:
1138 bzero(swd->sw_ictx, axf->ctxsize);
1139 free(swd->sw_ictx, M_CRYPTO_DATA);
1142 bzero(swd->sw_octx, axf->ctxsize);
1143 free(swd->sw_octx, M_CRYPTO_DATA);
1147 case CRYPTO_MD5_KPDK:
1148 case CRYPTO_SHA1_KPDK:
1152 bzero(swd->sw_ictx, axf->ctxsize);
1153 free(swd->sw_ictx, M_CRYPTO_DATA);
1156 bzero(swd->sw_octx, swd->sw_klen);
1157 free(swd->sw_octx, M_CRYPTO_DATA);
1161 case CRYPTO_BLAKE2B:
1162 case CRYPTO_BLAKE2S:
1164 case CRYPTO_POLY1305:
1166 case CRYPTO_SHA2_224:
1167 case CRYPTO_SHA2_256:
1168 case CRYPTO_SHA2_384:
1169 case CRYPTO_SHA2_512:
1170 case CRYPTO_AES_128_NIST_GMAC:
1171 case CRYPTO_AES_192_NIST_GMAC:
1172 case CRYPTO_AES_256_NIST_GMAC:
1176 explicit_bzero(swd->sw_ictx, axf->ctxsize);
1177 free(swd->sw_ictx, M_CRYPTO_DATA);
1181 case CRYPTO_DEFLATE_COMP:
1189 * Process a software request.
1192 swcr_process(device_t dev, struct cryptop *crp, int hint)
1194 struct swcr_session *ses = NULL;
1195 struct cryptodesc *crd;
1196 struct swcr_data *sw;
1203 if (crp->crp_desc == NULL || crp->crp_buf == NULL) {
1204 crp->crp_etype = EINVAL;
1208 ses = crypto_get_driver_session(crp->crp_session);
1209 mtx_lock(&ses->swcr_lock);
1211 /* Go through crypto descriptors, processing as we go */
1212 for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
1214 * Find the crypto context.
1216 * XXX Note that the logic here prevents us from having
1217 * XXX the same algorithm multiple times in a session
1218 * XXX (or rather, we can but it won't give us the right
1219 * XXX results). To do that, we'd need some way of differentiating
1220 * XXX between the various instances of an algorithm (so we can
1221 * XXX locate the correct crypto context).
1223 for (i = 0; i < nitems(ses->swcr_algorithms) &&
1224 ses->swcr_algorithms[i].sw_alg != crd->crd_alg; i++)
1227 /* No such context ? */
1228 if (i == nitems(ses->swcr_algorithms)) {
1229 crp->crp_etype = EINVAL;
1232 sw = &ses->swcr_algorithms[i];
1233 switch (sw->sw_alg) {
1234 case CRYPTO_DES_CBC:
1235 case CRYPTO_3DES_CBC:
1236 case CRYPTO_BLF_CBC:
1237 case CRYPTO_CAST_CBC:
1238 case CRYPTO_SKIPJACK_CBC:
1239 case CRYPTO_RIJNDAEL128_CBC:
1240 case CRYPTO_AES_XTS:
1241 case CRYPTO_AES_ICM:
1242 case CRYPTO_CAMELLIA_CBC:
1243 case CRYPTO_CHACHA20:
1244 if ((crp->crp_etype = swcr_encdec(crd, sw,
1245 crp->crp_buf, crp->crp_flags)) != 0)
1248 case CRYPTO_NULL_CBC:
1251 case CRYPTO_MD5_HMAC:
1252 case CRYPTO_SHA1_HMAC:
1253 case CRYPTO_SHA2_224_HMAC:
1254 case CRYPTO_SHA2_256_HMAC:
1255 case CRYPTO_SHA2_384_HMAC:
1256 case CRYPTO_SHA2_512_HMAC:
1257 case CRYPTO_RIPEMD160_HMAC:
1258 case CRYPTO_NULL_HMAC:
1259 case CRYPTO_MD5_KPDK:
1260 case CRYPTO_SHA1_KPDK:
1263 case CRYPTO_SHA2_224:
1264 case CRYPTO_SHA2_256:
1265 case CRYPTO_SHA2_384:
1266 case CRYPTO_SHA2_512:
1267 case CRYPTO_BLAKE2B:
1268 case CRYPTO_BLAKE2S:
1269 case CRYPTO_POLY1305:
1270 if ((crp->crp_etype = swcr_authcompute(crd, sw,
1271 crp->crp_buf, crp->crp_flags)) != 0)
1275 case CRYPTO_AES_NIST_GCM_16:
1276 case CRYPTO_AES_NIST_GMAC:
1277 case CRYPTO_AES_128_NIST_GMAC:
1278 case CRYPTO_AES_192_NIST_GMAC:
1279 case CRYPTO_AES_256_NIST_GMAC:
1280 case CRYPTO_AES_CCM_16:
1281 case CRYPTO_AES_CCM_CBC_MAC:
1282 crp->crp_etype = swcr_authenc(crp);
1285 case CRYPTO_DEFLATE_COMP:
1286 if ((crp->crp_etype = swcr_compdec(crd, sw,
1287 crp->crp_buf, crp->crp_flags)) != 0)
1290 crp->crp_olen = (int)sw->sw_size;
1294 /* Unknown/unsupported algorithm */
1295 crp->crp_etype = EINVAL;
1302 mtx_unlock(&ses->swcr_lock);
1308 swcr_identify(driver_t *drv, device_t parent)
1310 /* NB: order 10 is so we get attached after h/w devices */
1311 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1312 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1313 panic("cryptosoft: could not attach");
1317 swcr_probe(device_t dev)
1319 device_set_desc(dev, "software crypto");
1320 return (BUS_PROBE_NOWILDCARD);
1324 swcr_attach(device_t dev)
1326 memset(hmac_ipad_buffer, HMAC_IPAD_VAL, HMAC_MAX_BLOCK_LEN);
1327 memset(hmac_opad_buffer, HMAC_OPAD_VAL, HMAC_MAX_BLOCK_LEN);
1329 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1330 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1332 device_printf(dev, "cannot initialize!");
1335 #define REGISTER(alg) \
1336 crypto_register(swcr_id, alg, 0,0)
1337 REGISTER(CRYPTO_DES_CBC);
1338 REGISTER(CRYPTO_3DES_CBC);
1339 REGISTER(CRYPTO_BLF_CBC);
1340 REGISTER(CRYPTO_CAST_CBC);
1341 REGISTER(CRYPTO_SKIPJACK_CBC);
1342 REGISTER(CRYPTO_NULL_CBC);
1343 REGISTER(CRYPTO_MD5_HMAC);
1344 REGISTER(CRYPTO_SHA1_HMAC);
1345 REGISTER(CRYPTO_SHA2_224_HMAC);
1346 REGISTER(CRYPTO_SHA2_256_HMAC);
1347 REGISTER(CRYPTO_SHA2_384_HMAC);
1348 REGISTER(CRYPTO_SHA2_512_HMAC);
1349 REGISTER(CRYPTO_RIPEMD160_HMAC);
1350 REGISTER(CRYPTO_NULL_HMAC);
1351 REGISTER(CRYPTO_MD5_KPDK);
1352 REGISTER(CRYPTO_SHA1_KPDK);
1353 REGISTER(CRYPTO_MD5);
1354 REGISTER(CRYPTO_SHA1);
1355 REGISTER(CRYPTO_SHA2_224);
1356 REGISTER(CRYPTO_SHA2_256);
1357 REGISTER(CRYPTO_SHA2_384);
1358 REGISTER(CRYPTO_SHA2_512);
1359 REGISTER(CRYPTO_RIJNDAEL128_CBC);
1360 REGISTER(CRYPTO_AES_XTS);
1361 REGISTER(CRYPTO_AES_ICM);
1362 REGISTER(CRYPTO_AES_NIST_GCM_16);
1363 REGISTER(CRYPTO_AES_NIST_GMAC);
1364 REGISTER(CRYPTO_AES_128_NIST_GMAC);
1365 REGISTER(CRYPTO_AES_192_NIST_GMAC);
1366 REGISTER(CRYPTO_AES_256_NIST_GMAC);
1367 REGISTER(CRYPTO_CAMELLIA_CBC);
1368 REGISTER(CRYPTO_DEFLATE_COMP);
1369 REGISTER(CRYPTO_BLAKE2B);
1370 REGISTER(CRYPTO_BLAKE2S);
1371 REGISTER(CRYPTO_CHACHA20);
1372 REGISTER(CRYPTO_AES_CCM_16);
1373 REGISTER(CRYPTO_AES_CCM_CBC_MAC);
1374 REGISTER(CRYPTO_POLY1305);
1381 swcr_detach(device_t dev)
1383 crypto_unregister_all(swcr_id);
1387 static device_method_t swcr_methods[] = {
1388 DEVMETHOD(device_identify, swcr_identify),
1389 DEVMETHOD(device_probe, swcr_probe),
1390 DEVMETHOD(device_attach, swcr_attach),
1391 DEVMETHOD(device_detach, swcr_detach),
1393 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1394 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1395 DEVMETHOD(cryptodev_process, swcr_process),
1400 static driver_t swcr_driver = {
1403 0, /* NB: no softc */
1405 static devclass_t swcr_devclass;
1408 * NB: We explicitly reference the crypto module so we
1409 * get the necessary ordering when built as a loadable
1410 * module. This is required because we bundle the crypto
1411 * module code together with the cryptosoft driver (otherwise
1412 * normal module dependencies would handle things).
1414 extern int crypto_modevent(struct module *, int, void *);
1415 /* XXX where to attach */
1416 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1417 MODULE_VERSION(cryptosoft, 1);
1418 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);