1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Permission to use, copy, and modify this software with or without fee
20 * is hereby granted, provided that this entire notice is included in
21 * all source code copies of any software which is or includes a copy or
22 * modification of this software.
24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD$");
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
48 #include <sys/mutex.h>
50 #include <crypto/blowfish/blowfish.h>
51 #include <crypto/sha1.h>
52 #include <opencrypto/rmd160.h>
55 #include <opencrypto/cryptodev.h>
56 #include <opencrypto/xform.h>
60 #include "cryptodev_if.h"
65 struct auth_hash *sw_axf;
70 uint8_t *sw_kschedule;
71 struct enc_xform *sw_exf;
75 struct comp_algo *sw_cxf;
80 int (*swcr_process)(struct swcr_session *, struct cryptop *);
82 struct swcr_auth swcr_auth;
83 struct swcr_encdec swcr_encdec;
84 struct swcr_compdec swcr_compdec;
87 static int32_t swcr_id;
89 static void swcr_freesession(device_t dev, crypto_session_t cses);
91 /* Used for CRYPTO_NULL_CBC. */
93 swcr_null(struct swcr_session *ses, struct cryptop *crp)
100 * Apply a symmetric encryption/decryption algorithm.
103 swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
105 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
106 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
107 const struct crypto_session_params *csp;
108 struct swcr_encdec *sw;
109 struct enc_xform *exf;
110 int i, j, k, blks, ind, count, ivlen;
111 struct uio *uio, uiolcl;
112 struct iovec iovlcl[4];
114 int iovcnt, iovalloc;
120 sw = &ses->swcr_encdec;
122 blks = exf->blocksize;
125 /* Check for non-padded data */
126 if ((crp->crp_payload_length % blks) != 0)
129 if (exf == &enc_xform_aes_icm &&
130 (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
133 crypto_read_iv(crp, iv);
135 if (crp->crp_cipher_key != NULL) {
136 if (sw->sw_kschedule)
137 exf->zerokey(&(sw->sw_kschedule));
139 csp = crypto_get_params(crp->crp_session);
140 error = exf->setkey(&sw->sw_kschedule,
141 crp->crp_cipher_key, csp->csp_cipher_klen);
147 iovcnt = nitems(iovlcl);
150 switch (crp->crp_buf_type) {
151 case CRYPTO_BUF_MBUF:
152 error = crypto_mbuftoiov(crp->crp_mbuf, &iov, &iovcnt,
157 uio->uio_iovcnt = iovcnt;
162 case CRYPTO_BUF_CONTIG:
163 iov[0].iov_base = crp->crp_buf;
164 iov[0].iov_len = crp->crp_ilen;
174 * xforms that provide a reinit method perform all IV
175 * handling themselves.
177 exf->reinit(sw->sw_kschedule, iv);
180 count = crp->crp_payload_start;
181 ind = cuio_getptr(uio, count, &k);
187 i = crp->crp_payload_length;
188 encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
192 * If there's insufficient data at the end of
193 * an iovec, we have to do some copying.
195 if (uio->uio_iov[ind].iov_len < k + blks &&
196 uio->uio_iov[ind].iov_len != k) {
197 cuio_copydata(uio, count, blks, blk);
199 /* Actual encryption/decryption */
202 exf->encrypt(sw->sw_kschedule,
205 exf->decrypt(sw->sw_kschedule,
208 } else if (encrypting) {
209 /* XOR with previous block */
210 for (j = 0; j < blks; j++)
213 exf->encrypt(sw->sw_kschedule, blk);
216 * Keep encrypted block for XOR'ing
219 bcopy(blk, iv, blks);
221 } else { /* decrypt */
223 * Keep encrypted block for XOR'ing
226 nivp = (ivp == iv) ? iv2 : iv;
227 bcopy(blk, nivp, blks);
229 exf->decrypt(sw->sw_kschedule, blk);
231 /* XOR with previous block */
232 for (j = 0; j < blks; j++)
238 /* Copy back decrypted block */
239 cuio_copyback(uio, count, blks, blk);
243 /* Advance pointer */
244 ind = cuio_getptr(uio, count, &k);
252 /* Could be done... */
257 while (uio->uio_iov[ind].iov_len >= k + blks && i > 0) {
263 uio->uio_iov[ind].iov_len - (size_t)k);
264 idat = (uint8_t *)uio->uio_iov[ind].iov_base + k;
267 if (encrypting && exf->encrypt_multi == NULL)
268 exf->encrypt(sw->sw_kschedule,
270 else if (encrypting) {
271 nb = rounddown(rem, blks);
272 exf->encrypt_multi(sw->sw_kschedule,
274 } else if (exf->decrypt_multi == NULL)
275 exf->decrypt(sw->sw_kschedule,
278 nb = rounddown(rem, blks);
279 exf->decrypt_multi(sw->sw_kschedule,
282 } else if (encrypting) {
283 /* XOR with previous block/IV */
284 for (j = 0; j < blks; j++)
287 exf->encrypt(sw->sw_kschedule, idat);
289 } else { /* decrypt */
291 * Keep encrypted block to be used
292 * in next block's processing.
294 nivp = (ivp == iv) ? iv2 : iv;
295 bcopy(idat, nivp, blks);
297 exf->decrypt(sw->sw_kschedule, idat);
299 /* XOR with previous block/IV */
300 for (j = 0; j < blks; j++)
312 * Advance to the next iov if the end of the current iov
313 * is aligned with the end of a cipher block.
314 * Note that the code is equivalent to calling:
315 * ind = cuio_getptr(uio, count, &k);
317 if (i > 0 && k == uio->uio_iov[ind].iov_len) {
320 if (ind >= uio->uio_iovcnt) {
329 free(iov, M_CRYPTO_DATA);
335 swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw,
336 const uint8_t *key, int klen)
340 case CRYPTO_MD5_HMAC:
341 case CRYPTO_SHA1_HMAC:
342 case CRYPTO_SHA2_224_HMAC:
343 case CRYPTO_SHA2_256_HMAC:
344 case CRYPTO_SHA2_384_HMAC:
345 case CRYPTO_SHA2_512_HMAC:
346 case CRYPTO_NULL_HMAC:
347 case CRYPTO_RIPEMD160_HMAC:
348 hmac_init_ipad(axf, key, klen, sw->sw_ictx);
349 hmac_init_opad(axf, key, klen, sw->sw_octx);
351 case CRYPTO_POLY1305:
354 axf->Setkey(sw->sw_ictx, key, klen);
355 axf->Init(sw->sw_ictx);
358 panic("%s: algorithm %d doesn't use keys", __func__, axf->type);
363 * Compute or verify hash.
366 swcr_authcompute(struct swcr_session *ses, struct cryptop *crp)
368 u_char aalg[HASH_MAX_LEN];
369 u_char uaalg[HASH_MAX_LEN];
370 const struct crypto_session_params *csp;
371 struct swcr_auth *sw;
372 struct auth_hash *axf;
376 sw = &ses->swcr_auth;
380 if (crp->crp_auth_key != NULL) {
381 csp = crypto_get_params(crp->crp_session);
382 swcr_authprepare(axf, sw, crp->crp_auth_key,
386 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
388 err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
389 (int (*)(void *, void *, unsigned int))axf->Update, &ctx);
393 err = crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
394 (int (*)(void *, void *, unsigned int))axf->Update, &ctx);
400 case CRYPTO_SHA2_224:
401 case CRYPTO_SHA2_256:
402 case CRYPTO_SHA2_384:
403 case CRYPTO_SHA2_512:
404 axf->Final(aalg, &ctx);
407 case CRYPTO_MD5_HMAC:
408 case CRYPTO_SHA1_HMAC:
409 case CRYPTO_SHA2_224_HMAC:
410 case CRYPTO_SHA2_256_HMAC:
411 case CRYPTO_SHA2_384_HMAC:
412 case CRYPTO_SHA2_512_HMAC:
413 case CRYPTO_RIPEMD160_HMAC:
414 if (sw->sw_octx == NULL)
417 axf->Final(aalg, &ctx);
418 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
419 axf->Update(&ctx, aalg, axf->hashsize);
420 axf->Final(aalg, &ctx);
425 case CRYPTO_NULL_HMAC:
426 case CRYPTO_POLY1305:
427 axf->Final(aalg, &ctx);
431 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
432 crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg);
433 if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0)
436 /* Inject the authentication data */
437 crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg);
442 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
443 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
446 swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
448 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
449 u_char *blk = (u_char *)blkbuf;
450 u_char aalg[AALG_MAX_RESULT_LEN];
451 u_char uaalg[AALG_MAX_RESULT_LEN];
452 u_char iv[EALG_MAX_BLOCK_LEN];
454 struct swcr_auth *swa;
455 struct auth_hash *axf;
457 int blksz, i, ivlen, len;
459 swa = &ses->swcr_auth;
462 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
463 blksz = axf->blocksize;
465 /* Initialize the IV */
466 ivlen = AES_GCM_IV_LEN;
467 crypto_read_iv(crp, iv);
469 axf->Reinit(&ctx, iv, ivlen);
470 for (i = 0; i < crp->crp_payload_length; i += blksz) {
471 len = MIN(crp->crp_payload_length - i, blksz);
472 crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
473 bzero(blk + len, blksz - len);
474 axf->Update(&ctx, blk, blksz);
479 blkp = (uint32_t *)blk + 1;
480 *blkp = htobe32(crp->crp_payload_length * 8);
481 axf->Update(&ctx, blk, blksz);
484 axf->Final(aalg, &ctx);
486 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
487 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
489 if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
492 /* Inject the authentication data */
493 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
499 swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
501 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
502 u_char *blk = (u_char *)blkbuf;
503 u_char aalg[AALG_MAX_RESULT_LEN];
504 u_char uaalg[AALG_MAX_RESULT_LEN];
505 u_char iv[EALG_MAX_BLOCK_LEN];
507 struct swcr_auth *swa;
508 struct swcr_encdec *swe;
509 struct auth_hash *axf;
510 struct enc_xform *exf;
512 int blksz, i, ivlen, len, r;
514 swa = &ses->swcr_auth;
517 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
518 blksz = axf->blocksize;
520 swe = &ses->swcr_encdec;
523 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
526 /* Initialize the IV */
527 ivlen = AES_GCM_IV_LEN;
528 bcopy(crp->crp_iv, iv, ivlen);
530 /* Supply MAC with IV */
531 axf->Reinit(&ctx, iv, ivlen);
533 /* Supply MAC with AAD */
534 for (i = 0; i < crp->crp_aad_length; i += blksz) {
535 len = MIN(crp->crp_aad_length - i, blksz);
536 crypto_copydata(crp, crp->crp_aad_start + i, len, blk);
537 bzero(blk + len, blksz - len);
538 axf->Update(&ctx, blk, blksz);
541 exf->reinit(swe->sw_kschedule, iv);
543 /* Do encryption with MAC */
544 for (i = 0; i < crp->crp_payload_length; i += len) {
545 len = MIN(crp->crp_payload_length - i, blksz);
548 crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
549 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
550 exf->encrypt(swe->sw_kschedule, blk);
551 axf->Update(&ctx, blk, len);
552 crypto_copyback(crp, crp->crp_payload_start + i, len,
555 axf->Update(&ctx, blk, len);
561 blkp = (uint32_t *)blk + 1;
562 *blkp = htobe32(crp->crp_aad_length * 8);
563 blkp = (uint32_t *)blk + 3;
564 *blkp = htobe32(crp->crp_payload_length * 8);
565 axf->Update(&ctx, blk, blksz);
568 axf->Final(aalg, &ctx);
571 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
572 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
575 r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
579 /* tag matches, decrypt data */
580 for (i = 0; i < crp->crp_payload_length; i += blksz) {
581 len = MIN(crp->crp_payload_length - i, blksz);
584 crypto_copydata(crp, crp->crp_payload_start + i, len,
586 exf->decrypt(swe->sw_kschedule, blk);
587 crypto_copyback(crp, crp->crp_payload_start + i, len,
591 /* Inject the authentication data */
592 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
600 swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp)
602 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
603 u_char *blk = (u_char *)blkbuf;
604 u_char aalg[AALG_MAX_RESULT_LEN];
605 u_char uaalg[AALG_MAX_RESULT_LEN];
606 u_char iv[EALG_MAX_BLOCK_LEN];
608 struct swcr_auth *swa;
609 struct auth_hash *axf;
610 int blksz, i, ivlen, len;
612 swa = &ses->swcr_auth;
615 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
616 blksz = axf->blocksize;
618 /* Initialize the IV */
619 ivlen = AES_CCM_IV_LEN;
620 crypto_read_iv(crp, iv);
623 * AES CCM-CBC-MAC needs to know the length of both the auth
624 * data and payload data before doing the auth computation.
626 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length;
627 ctx.aes_cbc_mac_ctx.cryptDataLength = 0;
629 axf->Reinit(&ctx, iv, ivlen);
630 for (i = 0; i < crp->crp_payload_length; i += blksz) {
631 len = MIN(crp->crp_payload_length - i, blksz);
632 crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
633 bzero(blk + len, blksz - len);
634 axf->Update(&ctx, blk, blksz);
638 axf->Final(aalg, &ctx);
640 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
641 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
643 if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
646 /* Inject the authentication data */
647 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
653 swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
655 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
656 u_char *blk = (u_char *)blkbuf;
657 u_char aalg[AALG_MAX_RESULT_LEN];
658 u_char uaalg[AALG_MAX_RESULT_LEN];
659 u_char iv[EALG_MAX_BLOCK_LEN];
661 struct swcr_auth *swa;
662 struct swcr_encdec *swe;
663 struct auth_hash *axf;
664 struct enc_xform *exf;
665 int blksz, i, ivlen, len, r;
667 swa = &ses->swcr_auth;
670 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
671 blksz = axf->blocksize;
673 swe = &ses->swcr_encdec;
676 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
679 /* Initialize the IV */
680 ivlen = AES_CCM_IV_LEN;
681 bcopy(crp->crp_iv, iv, ivlen);
684 * AES CCM-CBC-MAC needs to know the length of both the auth
685 * data and payload data before doing the auth computation.
687 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length;
688 ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length;
690 /* Supply MAC with IV */
691 axf->Reinit(&ctx, iv, ivlen);
693 /* Supply MAC with AAD */
694 for (i = 0; i < crp->crp_aad_length; i += blksz) {
695 len = MIN(crp->crp_aad_length - i, blksz);
696 crypto_copydata(crp, crp->crp_aad_start + i, len, blk);
697 bzero(blk + len, blksz - len);
698 axf->Update(&ctx, blk, blksz);
701 exf->reinit(swe->sw_kschedule, iv);
703 /* Do encryption/decryption with MAC */
704 for (i = 0; i < crp->crp_payload_length; i += len) {
705 len = MIN(crp->crp_payload_length - i, blksz);
708 crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
709 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
710 axf->Update(&ctx, blk, len);
711 exf->encrypt(swe->sw_kschedule, blk);
712 crypto_copyback(crp, crp->crp_payload_start + i, len,
716 * One of the problems with CCM+CBC is that
717 * the authentication is done on the
718 * unecncrypted data. As a result, we have to
719 * decrypt the data twice: once to generate
720 * the tag and a second time after the tag is
723 exf->decrypt(swe->sw_kschedule, blk);
724 axf->Update(&ctx, blk, len);
729 axf->Final(aalg, &ctx);
732 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
733 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
736 r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
740 /* tag matches, decrypt data */
741 exf->reinit(swe->sw_kschedule, iv);
742 for (i = 0; i < crp->crp_payload_length; i += blksz) {
743 len = MIN(crp->crp_payload_length - i, blksz);
746 crypto_copydata(crp, crp->crp_payload_start + i, len,
748 exf->decrypt(swe->sw_kschedule, blk);
749 crypto_copyback(crp, crp->crp_payload_start + i, len,
753 /* Inject the authentication data */
754 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
762 * Apply a cipher and a digest to perform EtA.
765 swcr_eta(struct swcr_session *ses, struct cryptop *crp)
769 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
770 error = swcr_encdec(ses, crp);
772 error = swcr_authcompute(ses, crp);
774 error = swcr_authcompute(ses, crp);
776 error = swcr_encdec(ses, crp);
782 * Apply a compression/decompression algorithm
785 swcr_compdec(struct swcr_session *ses, struct cryptop *crp)
787 u_int8_t *data, *out;
788 struct comp_algo *cxf;
792 cxf = ses->swcr_compdec.sw_cxf;
794 /* We must handle the whole buffer of data in one time
795 * then if there is not all the data in the mbuf, we must
799 data = malloc(crp->crp_payload_length, M_CRYPTO_DATA, M_NOWAIT);
802 crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
805 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
806 result = cxf->compress(data, crp->crp_payload_length, &out);
808 result = cxf->decompress(data, crp->crp_payload_length, &out);
810 free(data, M_CRYPTO_DATA);
813 crp->crp_olen = result;
815 /* Check the compressed size when doing compression */
816 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
817 if (result >= crp->crp_payload_length) {
818 /* Compression was useless, we lost time */
819 free(out, M_CRYPTO_DATA);
824 /* Copy back the (de)compressed data. m_copyback is
825 * extending the mbuf as necessary.
827 crypto_copyback(crp, crp->crp_payload_start, result, out);
828 if (result < crp->crp_payload_length) {
829 switch (crp->crp_buf_type) {
830 case CRYPTO_BUF_MBUF:
831 adj = result - crp->crp_payload_length;
832 m_adj(crp->crp_mbuf, adj);
834 case CRYPTO_BUF_UIO: {
835 struct uio *uio = crp->crp_uio;
838 adj = crp->crp_payload_length - result;
839 ind = uio->uio_iovcnt - 1;
841 while (adj > 0 && ind >= 0) {
842 if (adj < uio->uio_iov[ind].iov_len) {
843 uio->uio_iov[ind].iov_len -= adj;
847 adj -= uio->uio_iov[ind].iov_len;
848 uio->uio_iov[ind].iov_len = 0;
856 free(out, M_CRYPTO_DATA);
861 swcr_setup_encdec(struct swcr_session *ses,
862 const struct crypto_session_params *csp)
864 struct swcr_encdec *swe;
865 struct enc_xform *txf;
868 swe = &ses->swcr_encdec;
869 txf = crypto_cipher(csp);
870 MPASS(txf->ivsize == csp->csp_ivlen);
871 if (csp->csp_cipher_key != NULL) {
872 error = txf->setkey(&swe->sw_kschedule,
873 csp->csp_cipher_key, csp->csp_cipher_klen);
882 swcr_setup_auth(struct swcr_session *ses,
883 const struct crypto_session_params *csp)
885 struct swcr_auth *swa;
886 struct auth_hash *axf;
888 swa = &ses->swcr_auth;
890 axf = crypto_auth_hash(csp);
892 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
894 if (csp->csp_auth_mlen == 0)
895 swa->sw_mlen = axf->hashsize;
897 swa->sw_mlen = csp->csp_auth_mlen;
898 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
899 if (swa->sw_ictx == NULL)
902 switch (csp->csp_auth_alg) {
903 case CRYPTO_MD5_HMAC:
904 case CRYPTO_SHA1_HMAC:
905 case CRYPTO_SHA2_224_HMAC:
906 case CRYPTO_SHA2_256_HMAC:
907 case CRYPTO_SHA2_384_HMAC:
908 case CRYPTO_SHA2_512_HMAC:
909 case CRYPTO_NULL_HMAC:
910 case CRYPTO_RIPEMD160_HMAC:
911 swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
913 if (swa->sw_octx == NULL)
916 if (csp->csp_auth_key != NULL) {
917 swcr_authprepare(axf, swa, csp->csp_auth_key,
921 if (csp->csp_mode == CSP_MODE_DIGEST)
922 ses->swcr_process = swcr_authcompute;
925 case CRYPTO_SHA2_224:
926 case CRYPTO_SHA2_256:
927 case CRYPTO_SHA2_384:
928 case CRYPTO_SHA2_512:
929 axf->Init(swa->sw_ictx);
930 if (csp->csp_mode == CSP_MODE_DIGEST)
931 ses->swcr_process = swcr_authcompute;
933 case CRYPTO_AES_NIST_GMAC:
934 axf->Init(swa->sw_ictx);
935 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
937 if (csp->csp_mode == CSP_MODE_DIGEST)
938 ses->swcr_process = swcr_gmac;
940 case CRYPTO_POLY1305:
944 * Blake2b and Blake2s support an optional key but do
947 if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL)
948 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
950 axf->Init(swa->sw_ictx);
951 if (csp->csp_mode == CSP_MODE_DIGEST)
952 ses->swcr_process = swcr_authcompute;
954 case CRYPTO_AES_CCM_CBC_MAC:
955 axf->Init(swa->sw_ictx);
956 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
958 if (csp->csp_mode == CSP_MODE_DIGEST)
959 ses->swcr_process = swcr_ccm_cbc_mac;
967 swcr_setup_gcm(struct swcr_session *ses,
968 const struct crypto_session_params *csp)
970 struct swcr_encdec *swe;
971 struct swcr_auth *swa;
972 struct enc_xform *txf;
973 struct auth_hash *axf;
976 if (csp->csp_ivlen != AES_GCM_IV_LEN)
979 /* First, setup the auth side. */
980 swa = &ses->swcr_auth;
981 switch (csp->csp_cipher_klen * 8) {
983 axf = &auth_hash_nist_gmac_aes_128;
986 axf = &auth_hash_nist_gmac_aes_192;
989 axf = &auth_hash_nist_gmac_aes_256;
995 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
997 if (csp->csp_auth_mlen == 0)
998 swa->sw_mlen = axf->hashsize;
1000 swa->sw_mlen = csp->csp_auth_mlen;
1001 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1002 if (swa->sw_ictx == NULL)
1004 axf->Init(swa->sw_ictx);
1005 if (csp->csp_cipher_key != NULL)
1006 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1007 csp->csp_cipher_klen);
1009 /* Second, setup the cipher side. */
1010 swe = &ses->swcr_encdec;
1011 txf = &enc_xform_aes_nist_gcm;
1012 if (csp->csp_cipher_key != NULL) {
1013 error = txf->setkey(&swe->sw_kschedule,
1014 csp->csp_cipher_key, csp->csp_cipher_klen);
1024 swcr_setup_ccm(struct swcr_session *ses,
1025 const struct crypto_session_params *csp)
1027 struct swcr_encdec *swe;
1028 struct swcr_auth *swa;
1029 struct enc_xform *txf;
1030 struct auth_hash *axf;
1033 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1036 /* First, setup the auth side. */
1037 swa = &ses->swcr_auth;
1038 switch (csp->csp_cipher_klen * 8) {
1040 axf = &auth_hash_ccm_cbc_mac_128;
1043 axf = &auth_hash_ccm_cbc_mac_192;
1046 axf = &auth_hash_ccm_cbc_mac_256;
1052 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1054 if (csp->csp_auth_mlen == 0)
1055 swa->sw_mlen = axf->hashsize;
1057 swa->sw_mlen = csp->csp_auth_mlen;
1058 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1059 if (swa->sw_ictx == NULL)
1061 axf->Init(swa->sw_ictx);
1062 if (csp->csp_cipher_key != NULL)
1063 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1064 csp->csp_cipher_klen);
1066 /* Second, setup the cipher side. */
1067 swe = &ses->swcr_encdec;
1068 txf = &enc_xform_ccm;
1069 if (csp->csp_cipher_key != NULL) {
1070 error = txf->setkey(&swe->sw_kschedule,
1071 csp->csp_cipher_key, csp->csp_cipher_klen);
1081 swcr_auth_supported(const struct crypto_session_params *csp)
1083 struct auth_hash *axf;
1085 axf = crypto_auth_hash(csp);
1088 switch (csp->csp_auth_alg) {
1089 case CRYPTO_MD5_HMAC:
1090 case CRYPTO_SHA1_HMAC:
1091 case CRYPTO_SHA2_224_HMAC:
1092 case CRYPTO_SHA2_256_HMAC:
1093 case CRYPTO_SHA2_384_HMAC:
1094 case CRYPTO_SHA2_512_HMAC:
1095 case CRYPTO_NULL_HMAC:
1096 case CRYPTO_RIPEMD160_HMAC:
1098 case CRYPTO_AES_NIST_GMAC:
1099 switch (csp->csp_auth_klen * 8) {
1107 if (csp->csp_auth_key == NULL)
1109 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1112 case CRYPTO_POLY1305:
1113 if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1116 case CRYPTO_AES_CCM_CBC_MAC:
1117 switch (csp->csp_auth_klen * 8) {
1125 if (csp->csp_auth_key == NULL)
1127 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1135 swcr_cipher_supported(const struct crypto_session_params *csp)
1137 struct enc_xform *txf;
1139 txf = crypto_cipher(csp);
1142 if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1143 txf->ivsize != csp->csp_ivlen)
1149 swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1152 if (csp->csp_flags != 0)
1154 switch (csp->csp_mode) {
1155 case CSP_MODE_COMPRESS:
1156 switch (csp->csp_cipher_alg) {
1157 case CRYPTO_DEFLATE_COMP:
1163 case CSP_MODE_CIPHER:
1164 switch (csp->csp_cipher_alg) {
1165 case CRYPTO_AES_NIST_GCM_16:
1166 case CRYPTO_AES_CCM_16:
1169 if (!swcr_cipher_supported(csp))
1174 case CSP_MODE_DIGEST:
1175 if (!swcr_auth_supported(csp))
1179 switch (csp->csp_cipher_alg) {
1180 case CRYPTO_AES_NIST_GCM_16:
1181 case CRYPTO_AES_CCM_16:
1188 /* AEAD algorithms cannot be used for EtA. */
1189 switch (csp->csp_cipher_alg) {
1190 case CRYPTO_AES_NIST_GCM_16:
1191 case CRYPTO_AES_CCM_16:
1194 switch (csp->csp_auth_alg) {
1195 case CRYPTO_AES_NIST_GMAC:
1196 case CRYPTO_AES_CCM_CBC_MAC:
1200 if (!swcr_cipher_supported(csp) ||
1201 !swcr_auth_supported(csp))
1208 return (CRYPTODEV_PROBE_SOFTWARE);
1212 * Generate a new software session.
1215 swcr_newsession(device_t dev, crypto_session_t cses,
1216 const struct crypto_session_params *csp)
1218 struct swcr_session *ses;
1219 struct swcr_encdec *swe;
1220 struct swcr_auth *swa;
1221 struct comp_algo *cxf;
1224 ses = crypto_get_driver_session(cses);
1225 mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
1228 swe = &ses->swcr_encdec;
1229 swa = &ses->swcr_auth;
1230 switch (csp->csp_mode) {
1231 case CSP_MODE_COMPRESS:
1232 switch (csp->csp_cipher_alg) {
1233 case CRYPTO_DEFLATE_COMP:
1234 cxf = &comp_algo_deflate;
1238 panic("bad compression algo");
1241 ses->swcr_compdec.sw_cxf = cxf;
1242 ses->swcr_process = swcr_compdec;
1244 case CSP_MODE_CIPHER:
1245 switch (csp->csp_cipher_alg) {
1246 case CRYPTO_NULL_CBC:
1247 ses->swcr_process = swcr_null;
1250 case CRYPTO_AES_NIST_GCM_16:
1251 case CRYPTO_AES_CCM_16:
1252 panic("bad cipher algo");
1255 error = swcr_setup_encdec(ses, csp);
1257 ses->swcr_process = swcr_encdec;
1260 case CSP_MODE_DIGEST:
1261 error = swcr_setup_auth(ses, csp);
1264 switch (csp->csp_cipher_alg) {
1265 case CRYPTO_AES_NIST_GCM_16:
1266 error = swcr_setup_gcm(ses, csp);
1268 ses->swcr_process = swcr_gcm;
1270 case CRYPTO_AES_CCM_16:
1271 error = swcr_setup_ccm(ses, csp);
1273 ses->swcr_process = swcr_ccm;
1277 panic("bad aead algo");
1283 switch (csp->csp_cipher_alg) {
1284 case CRYPTO_AES_NIST_GCM_16:
1285 case CRYPTO_AES_CCM_16:
1286 panic("bad eta cipher algo");
1288 switch (csp->csp_auth_alg) {
1289 case CRYPTO_AES_NIST_GMAC:
1290 case CRYPTO_AES_CCM_CBC_MAC:
1291 panic("bad eta auth algo");
1295 error = swcr_setup_auth(ses, csp);
1298 if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1299 /* Effectively degrade to digest mode. */
1300 ses->swcr_process = swcr_authcompute;
1304 error = swcr_setup_encdec(ses, csp);
1306 ses->swcr_process = swcr_eta;
1313 swcr_freesession(dev, cses);
1318 swcr_freesession(device_t dev, crypto_session_t cses)
1320 struct swcr_session *ses;
1321 struct swcr_auth *swa;
1322 struct enc_xform *txf;
1323 struct auth_hash *axf;
1325 ses = crypto_get_driver_session(cses);
1327 mtx_destroy(&ses->swcr_lock);
1329 txf = ses->swcr_encdec.sw_exf;
1331 if (ses->swcr_encdec.sw_kschedule != NULL)
1332 txf->zerokey(&(ses->swcr_encdec.sw_kschedule));
1335 axf = ses->swcr_auth.sw_axf;
1337 swa = &ses->swcr_auth;
1338 if (swa->sw_ictx != NULL) {
1339 explicit_bzero(swa->sw_ictx, axf->ctxsize);
1340 free(swa->sw_ictx, M_CRYPTO_DATA);
1342 if (swa->sw_octx != NULL) {
1343 explicit_bzero(swa->sw_octx, axf->ctxsize);
1344 free(swa->sw_octx, M_CRYPTO_DATA);
1350 * Process a software request.
1353 swcr_process(device_t dev, struct cryptop *crp, int hint)
1355 struct swcr_session *ses;
1357 ses = crypto_get_driver_session(crp->crp_session);
1358 mtx_lock(&ses->swcr_lock);
1360 crp->crp_etype = ses->swcr_process(ses, crp);
1362 mtx_unlock(&ses->swcr_lock);
1368 swcr_identify(driver_t *drv, device_t parent)
1370 /* NB: order 10 is so we get attached after h/w devices */
1371 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1372 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1373 panic("cryptosoft: could not attach");
1377 swcr_probe(device_t dev)
1379 device_set_desc(dev, "software crypto");
1380 return (BUS_PROBE_NOWILDCARD);
1384 swcr_attach(device_t dev)
1387 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1388 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1390 device_printf(dev, "cannot initialize!");
1398 swcr_detach(device_t dev)
1400 crypto_unregister_all(swcr_id);
1404 static device_method_t swcr_methods[] = {
1405 DEVMETHOD(device_identify, swcr_identify),
1406 DEVMETHOD(device_probe, swcr_probe),
1407 DEVMETHOD(device_attach, swcr_attach),
1408 DEVMETHOD(device_detach, swcr_detach),
1410 DEVMETHOD(cryptodev_probesession, swcr_probesession),
1411 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1412 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1413 DEVMETHOD(cryptodev_process, swcr_process),
1418 static driver_t swcr_driver = {
1421 0, /* NB: no softc */
1423 static devclass_t swcr_devclass;
1426 * NB: We explicitly reference the crypto module so we
1427 * get the necessary ordering when built as a loadable
1428 * module. This is required because we bundle the crypto
1429 * module code together with the cryptosoft driver (otherwise
1430 * normal module dependencies would handle things).
1432 extern int crypto_modevent(struct module *, int, void *);
1433 /* XXX where to attach */
1434 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1435 MODULE_VERSION(cryptosoft, 1);
1436 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);