1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014-2021 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Portions of this software were developed by Ararat River
20 * Consulting, LLC under sponsorship of the FreeBSD Foundation.
22 * Permission to use, copy, and modify this software with or without fee
23 * is hereby granted, provided that this entire notice is included in
24 * all source code copies of any software which is or includes a copy or
25 * modification of this software.
27 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
28 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
29 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
30 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
34 #include <sys/cdefs.h>
35 __FBSDID("$FreeBSD$");
37 #include <sys/param.h>
38 #include <sys/systm.h>
39 #include <sys/malloc.h>
41 #include <sys/module.h>
42 #include <sys/sysctl.h>
43 #include <sys/errno.h>
44 #include <sys/random.h>
45 #include <sys/kernel.h>
48 #include <sys/rwlock.h>
49 #include <sys/endian.h>
50 #include <sys/limits.h>
51 #include <sys/mutex.h>
53 #include <crypto/sha1.h>
54 #include <opencrypto/rmd160.h>
56 #include <opencrypto/cryptodev.h>
57 #include <opencrypto/xform.h>
61 #include "cryptodev_if.h"
66 const struct auth_hash *sw_axf;
72 const struct enc_xform *sw_exf;
76 const struct comp_algo *sw_cxf;
81 int (*swcr_process)(struct swcr_session *, struct cryptop *);
83 struct swcr_auth swcr_auth;
84 struct swcr_encdec swcr_encdec;
85 struct swcr_compdec swcr_compdec;
88 static int32_t swcr_id;
90 static void swcr_freesession(device_t dev, crypto_session_t cses);
92 /* Used for CRYPTO_NULL_CBC. */
94 swcr_null(struct swcr_session *ses, struct cryptop *crp)
101 * Apply a symmetric encryption/decryption algorithm.
104 swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
106 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
107 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
108 const struct crypto_session_params *csp;
109 const struct enc_xform *exf;
110 struct swcr_encdec *sw;
111 size_t inlen, outlen;
113 struct crypto_buffer_cursor cc_in, cc_out;
114 const unsigned char *inblk;
115 unsigned char *outblk;
121 sw = &ses->swcr_encdec;
123 csp = crypto_get_params(crp->crp_session);
125 if (exf->native_blocksize == 0) {
126 /* Check for non-padded data */
127 if ((crp->crp_payload_length % exf->blocksize) != 0)
130 blks = exf->blocksize;
132 blks = exf->native_blocksize;
134 if (exf == &enc_xform_aes_icm &&
135 (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
138 if (crp->crp_cipher_key != NULL) {
139 error = exf->setkey(sw->sw_kschedule,
140 crp->crp_cipher_key, csp->csp_cipher_klen);
145 crypto_read_iv(crp, iv);
149 * xforms that provide a reinit method perform all IV
150 * handling themselves.
152 exf->reinit(sw->sw_kschedule, iv, csp->csp_ivlen);
157 crypto_cursor_init(&cc_in, &crp->crp_buf);
158 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
159 inblk = crypto_cursor_segment(&cc_in, &inlen);
160 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
161 crypto_cursor_init(&cc_out, &crp->crp_obuf);
162 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
165 outblk = crypto_cursor_segment(&cc_out, &outlen);
167 resid = crp->crp_payload_length;
168 encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
171 * Loop through encrypting blocks. 'inlen' is the remaining
172 * length of the current segment in the input buffer.
173 * 'outlen' is the remaining length of current segment in the
176 while (resid >= blks) {
178 * If the current block is not contained within the
179 * current input/output segment, use 'blk' as a local
183 crypto_cursor_copydata(&cc_in, blks, blk);
190 * Ciphers without a 'reinit' hook are assumed to be
191 * used in CBC mode where the chaining is done here.
193 if (exf->reinit != NULL) {
195 exf->encrypt(sw->sw_kschedule, inblk, outblk);
197 exf->decrypt(sw->sw_kschedule, inblk, outblk);
198 } else if (encrypting) {
199 /* XOR with previous block */
200 for (i = 0; i < blks; i++)
201 outblk[i] = inblk[i] ^ ivp[i];
203 exf->encrypt(sw->sw_kschedule, outblk, outblk);
206 * Keep encrypted block for XOR'ing
209 memcpy(iv, outblk, blks);
211 } else { /* decrypt */
213 * Keep encrypted block for XOR'ing
216 nivp = (ivp == iv) ? iv2 : iv;
217 memcpy(nivp, inblk, blks);
219 exf->decrypt(sw->sw_kschedule, inblk, outblk);
221 /* XOR with previous block */
222 for (i = 0; i < blks; i++)
229 inblk = crypto_cursor_segment(&cc_in, &inlen);
231 crypto_cursor_advance(&cc_in, blks);
237 crypto_cursor_copyback(&cc_out, blks, blk);
238 outblk = crypto_cursor_segment(&cc_out, &outlen);
240 crypto_cursor_advance(&cc_out, blks);
248 /* Handle trailing partial block for stream ciphers. */
250 KASSERT(exf->native_blocksize != 0,
251 ("%s: partial block of %d bytes for cipher %s",
252 __func__, i, exf->name));
253 KASSERT(exf->reinit != NULL,
254 ("%s: partial block cipher %s without reinit hook",
255 __func__, exf->name));
256 KASSERT(resid < blks, ("%s: partial block too big", __func__));
258 inblk = crypto_cursor_segment(&cc_in, &inlen);
259 outblk = crypto_cursor_segment(&cc_out, &outlen);
261 crypto_cursor_copydata(&cc_in, resid, blk);
267 exf->encrypt_last(sw->sw_kschedule, inblk, outblk,
270 exf->decrypt_last(sw->sw_kschedule, inblk, outblk,
273 crypto_cursor_copyback(&cc_out, resid, blk);
276 explicit_bzero(blk, sizeof(blk));
277 explicit_bzero(iv, sizeof(iv));
278 explicit_bzero(iv2, sizeof(iv2));
283 swcr_authprepare(const struct auth_hash *axf, struct swcr_auth *sw,
284 const uint8_t *key, int klen)
288 case CRYPTO_SHA1_HMAC:
289 case CRYPTO_SHA2_224_HMAC:
290 case CRYPTO_SHA2_256_HMAC:
291 case CRYPTO_SHA2_384_HMAC:
292 case CRYPTO_SHA2_512_HMAC:
293 case CRYPTO_NULL_HMAC:
294 case CRYPTO_RIPEMD160_HMAC:
295 hmac_init_ipad(axf, key, klen, sw->sw_ictx);
296 hmac_init_opad(axf, key, klen, sw->sw_octx);
298 case CRYPTO_POLY1305:
301 axf->Setkey(sw->sw_ictx, key, klen);
302 axf->Init(sw->sw_ictx);
305 panic("%s: algorithm %d doesn't use keys", __func__, axf->type);
310 * Compute or verify hash.
313 swcr_authcompute(struct swcr_session *ses, struct cryptop *crp)
315 u_char aalg[HASH_MAX_LEN];
316 const struct crypto_session_params *csp;
317 struct swcr_auth *sw;
318 const struct auth_hash *axf;
322 sw = &ses->swcr_auth;
326 csp = crypto_get_params(crp->crp_session);
327 if (crp->crp_auth_key != NULL) {
328 swcr_authprepare(axf, sw, crp->crp_auth_key,
332 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
334 if (crp->crp_aad != NULL)
335 err = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
337 err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
342 if (CRYPTO_HAS_OUTPUT_BUFFER(crp) &&
343 CRYPTO_OP_IS_ENCRYPT(crp->crp_op))
344 err = crypto_apply_buf(&crp->crp_obuf,
345 crp->crp_payload_output_start, crp->crp_payload_length,
348 err = crypto_apply(crp, crp->crp_payload_start,
349 crp->crp_payload_length, axf->Update, &ctx);
353 if (csp->csp_flags & CSP_F_ESN)
354 axf->Update(&ctx, crp->crp_esn, 4);
356 axf->Final(aalg, &ctx);
357 if (sw->sw_octx != NULL) {
358 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
359 axf->Update(&ctx, aalg, axf->hashsize);
360 axf->Final(aalg, &ctx);
363 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
364 u_char uaalg[HASH_MAX_LEN];
366 crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg);
367 if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0)
369 explicit_bzero(uaalg, sizeof(uaalg));
371 /* Inject the authentication data */
372 crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg);
374 explicit_bzero(aalg, sizeof(aalg));
376 explicit_bzero(&ctx, sizeof(ctx));
380 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
381 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
384 swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
386 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
387 u_char *blk = (u_char *)blkbuf;
388 u_char tag[GMAC_DIGEST_LEN];
389 u_char iv[AES_BLOCK_LEN];
390 struct crypto_buffer_cursor cc;
393 struct swcr_auth *swa;
394 const struct auth_hash *axf;
397 int blksz, error, ivlen, resid;
399 swa = &ses->swcr_auth;
402 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
403 blksz = GMAC_BLOCK_LEN;
404 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
407 /* Initialize the IV */
408 ivlen = AES_GCM_IV_LEN;
409 crypto_read_iv(crp, iv);
411 axf->Reinit(&ctx, iv, ivlen);
412 crypto_cursor_init(&cc, &crp->crp_buf);
413 crypto_cursor_advance(&cc, crp->crp_payload_start);
414 for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) {
415 inblk = crypto_cursor_segment(&cc, &len);
417 len = rounddown(MIN(len, resid), blksz);
418 crypto_cursor_advance(&cc, len);
421 crypto_cursor_copydata(&cc, len, blk);
424 axf->Update(&ctx, inblk, len);
427 memset(blk, 0, blksz);
428 crypto_cursor_copydata(&cc, resid, blk);
429 axf->Update(&ctx, blk, blksz);
433 memset(blk, 0, blksz);
434 blkp = (uint32_t *)blk + 1;
435 *blkp = htobe32(crp->crp_payload_length * 8);
436 axf->Update(&ctx, blk, blksz);
439 axf->Final(tag, &ctx);
442 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
443 u_char tag2[GMAC_DIGEST_LEN];
445 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
447 if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0)
449 explicit_bzero(tag2, sizeof(tag2));
451 /* Inject the authentication data */
452 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
454 explicit_bzero(blkbuf, sizeof(blkbuf));
455 explicit_bzero(tag, sizeof(tag));
456 explicit_bzero(iv, sizeof(iv));
461 swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
463 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
464 u_char *blk = (u_char *)blkbuf;
465 u_char tag[GMAC_DIGEST_LEN];
466 u_char iv[AES_BLOCK_LEN];
467 struct crypto_buffer_cursor cc_in, cc_out;
471 struct swcr_auth *swa;
472 struct swcr_encdec *swe;
473 const struct auth_hash *axf;
474 const struct enc_xform *exf;
477 int blksz, error, ivlen, r, resid;
479 swa = &ses->swcr_auth;
482 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
483 blksz = GMAC_BLOCK_LEN;
484 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
487 swe = &ses->swcr_encdec;
489 KASSERT(axf->blocksize == exf->native_blocksize,
490 ("%s: blocksize mismatch", __func__));
492 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
495 /* Initialize the IV */
496 ivlen = AES_GCM_IV_LEN;
497 bcopy(crp->crp_iv, iv, ivlen);
499 /* Supply MAC with IV */
500 axf->Reinit(&ctx, iv, ivlen);
502 /* Supply MAC with AAD */
503 if (crp->crp_aad != NULL) {
504 len = rounddown(crp->crp_aad_length, blksz);
506 axf->Update(&ctx, crp->crp_aad, len);
507 if (crp->crp_aad_length != len) {
508 memset(blk, 0, blksz);
509 memcpy(blk, (char *)crp->crp_aad + len,
510 crp->crp_aad_length - len);
511 axf->Update(&ctx, blk, blksz);
514 crypto_cursor_init(&cc_in, &crp->crp_buf);
515 crypto_cursor_advance(&cc_in, crp->crp_aad_start);
516 for (resid = crp->crp_aad_length; resid >= blksz;
518 inblk = crypto_cursor_segment(&cc_in, &len);
520 len = rounddown(MIN(len, resid), blksz);
521 crypto_cursor_advance(&cc_in, len);
524 crypto_cursor_copydata(&cc_in, len, blk);
527 axf->Update(&ctx, inblk, len);
530 memset(blk, 0, blksz);
531 crypto_cursor_copydata(&cc_in, resid, blk);
532 axf->Update(&ctx, blk, blksz);
536 if (crp->crp_cipher_key != NULL)
537 exf->setkey(swe->sw_kschedule, crp->crp_cipher_key,
538 crypto_get_params(crp->crp_session)->csp_cipher_klen);
539 exf->reinit(swe->sw_kschedule, iv, ivlen);
541 /* Do encryption with MAC */
542 crypto_cursor_init(&cc_in, &crp->crp_buf);
543 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
544 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
545 crypto_cursor_init(&cc_out, &crp->crp_obuf);
546 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
549 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
550 inblk = crypto_cursor_segment(&cc_in, &len);
552 crypto_cursor_copydata(&cc_in, blksz, blk);
555 crypto_cursor_advance(&cc_in, blksz);
557 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
558 outblk = crypto_cursor_segment(&cc_out, &len);
561 exf->encrypt(swe->sw_kschedule, inblk, outblk);
562 axf->Update(&ctx, outblk, blksz);
564 crypto_cursor_copyback(&cc_out, blksz, blk);
566 crypto_cursor_advance(&cc_out, blksz);
568 axf->Update(&ctx, inblk, blksz);
572 crypto_cursor_copydata(&cc_in, resid, blk);
573 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
574 exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
575 crypto_cursor_copyback(&cc_out, resid, blk);
577 axf->Update(&ctx, blk, resid);
581 memset(blk, 0, blksz);
582 blkp = (uint32_t *)blk + 1;
583 *blkp = htobe32(crp->crp_aad_length * 8);
584 blkp = (uint32_t *)blk + 3;
585 *blkp = htobe32(crp->crp_payload_length * 8);
586 axf->Update(&ctx, blk, blksz);
589 axf->Final(tag, &ctx);
593 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
594 u_char tag2[GMAC_DIGEST_LEN];
596 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, tag2);
598 r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
599 explicit_bzero(tag2, sizeof(tag2));
605 /* tag matches, decrypt data */
606 crypto_cursor_init(&cc_in, &crp->crp_buf);
607 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
608 for (resid = crp->crp_payload_length; resid > blksz;
610 inblk = crypto_cursor_segment(&cc_in, &len);
612 crypto_cursor_copydata(&cc_in, blksz, blk);
615 crypto_cursor_advance(&cc_in, blksz);
616 outblk = crypto_cursor_segment(&cc_out, &len);
619 exf->decrypt(swe->sw_kschedule, inblk, outblk);
621 crypto_cursor_copyback(&cc_out, blksz, blk);
623 crypto_cursor_advance(&cc_out, blksz);
626 crypto_cursor_copydata(&cc_in, resid, blk);
627 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
628 crypto_cursor_copyback(&cc_out, resid, blk);
631 /* Inject the authentication data */
632 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
636 explicit_bzero(blkbuf, sizeof(blkbuf));
637 explicit_bzero(tag, sizeof(tag));
638 explicit_bzero(iv, sizeof(iv));
644 swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp)
646 u_char tag[AES_CBC_MAC_HASH_LEN];
647 u_char iv[AES_BLOCK_LEN];
649 struct swcr_auth *swa;
650 const struct auth_hash *axf;
653 swa = &ses->swcr_auth;
656 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
658 /* Initialize the IV */
659 ivlen = AES_CCM_IV_LEN;
660 crypto_read_iv(crp, iv);
663 * AES CCM-CBC-MAC needs to know the length of both the auth
664 * data and payload data before doing the auth computation.
666 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length;
667 ctx.aes_cbc_mac_ctx.cryptDataLength = 0;
669 axf->Reinit(&ctx, iv, ivlen);
670 if (crp->crp_aad != NULL)
671 error = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
673 error = crypto_apply(crp, crp->crp_payload_start,
674 crp->crp_payload_length, axf->Update, &ctx);
679 axf->Final(tag, &ctx);
681 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
682 u_char tag2[AES_CBC_MAC_HASH_LEN];
684 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
686 if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0)
688 explicit_bzero(tag2, sizeof(tag));
690 /* Inject the authentication data */
691 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
693 explicit_bzero(tag, sizeof(tag));
694 explicit_bzero(iv, sizeof(iv));
699 swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
701 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
702 u_char *blk = (u_char *)blkbuf;
703 u_char tag[AES_CBC_MAC_HASH_LEN];
704 u_char iv[AES_BLOCK_LEN];
705 struct crypto_buffer_cursor cc_in, cc_out;
709 struct swcr_auth *swa;
710 struct swcr_encdec *swe;
711 const struct auth_hash *axf;
712 const struct enc_xform *exf;
714 int blksz, error, ivlen, r, resid;
716 swa = &ses->swcr_auth;
719 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
720 blksz = AES_BLOCK_LEN;
721 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
724 swe = &ses->swcr_encdec;
726 KASSERT(axf->blocksize == exf->native_blocksize,
727 ("%s: blocksize mismatch", __func__));
729 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
732 /* Initialize the IV */
733 ivlen = AES_CCM_IV_LEN;
734 bcopy(crp->crp_iv, iv, ivlen);
737 * AES CCM-CBC-MAC needs to know the length of both the auth
738 * data and payload data before doing the auth computation.
740 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length;
741 ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length;
743 /* Supply MAC with IV */
744 axf->Reinit(&ctx, iv, ivlen);
746 /* Supply MAC with AAD */
747 if (crp->crp_aad != NULL)
748 error = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
750 error = crypto_apply(crp, crp->crp_aad_start,
751 crp->crp_aad_length, axf->Update, &ctx);
755 if (crp->crp_cipher_key != NULL)
756 exf->setkey(swe->sw_kschedule, crp->crp_cipher_key,
757 crypto_get_params(crp->crp_session)->csp_cipher_klen);
758 exf->reinit(swe->sw_kschedule, iv, ivlen);
760 /* Do encryption/decryption with MAC */
761 crypto_cursor_init(&cc_in, &crp->crp_buf);
762 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
763 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
764 crypto_cursor_init(&cc_out, &crp->crp_obuf);
765 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
768 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
769 inblk = crypto_cursor_segment(&cc_in, &len);
771 crypto_cursor_copydata(&cc_in, blksz, blk);
774 crypto_cursor_advance(&cc_in, blksz);
775 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
776 outblk = crypto_cursor_segment(&cc_out, &len);
779 axf->Update(&ctx, inblk, blksz);
780 exf->encrypt(swe->sw_kschedule, inblk, outblk);
782 crypto_cursor_copyback(&cc_out, blksz, blk);
784 crypto_cursor_advance(&cc_out, blksz);
787 * One of the problems with CCM+CBC is that
788 * the authentication is done on the
789 * unencrypted data. As a result, we have to
790 * decrypt the data twice: once to generate
791 * the tag and a second time after the tag is
794 exf->decrypt(swe->sw_kschedule, inblk, blk);
795 axf->Update(&ctx, blk, blksz);
799 crypto_cursor_copydata(&cc_in, resid, blk);
800 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
801 axf->Update(&ctx, blk, resid);
802 exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
803 crypto_cursor_copyback(&cc_out, resid, blk);
805 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
806 axf->Update(&ctx, blk, resid);
811 axf->Final(tag, &ctx);
815 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
816 u_char tag2[AES_CBC_MAC_HASH_LEN];
818 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
821 r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
822 explicit_bzero(tag2, sizeof(tag2));
828 /* tag matches, decrypt data */
829 exf->reinit(swe->sw_kschedule, iv, ivlen);
830 crypto_cursor_init(&cc_in, &crp->crp_buf);
831 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
832 for (resid = crp->crp_payload_length; resid > blksz;
834 inblk = crypto_cursor_segment(&cc_in, &len);
836 crypto_cursor_copydata(&cc_in, blksz, blk);
839 crypto_cursor_advance(&cc_in, blksz);
840 outblk = crypto_cursor_segment(&cc_out, &len);
843 exf->decrypt(swe->sw_kschedule, inblk, outblk);
845 crypto_cursor_copyback(&cc_out, blksz, blk);
847 crypto_cursor_advance(&cc_out, blksz);
850 crypto_cursor_copydata(&cc_in, resid, blk);
851 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
852 crypto_cursor_copyback(&cc_out, resid, blk);
855 /* Inject the authentication data */
856 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
860 explicit_bzero(blkbuf, sizeof(blkbuf));
861 explicit_bzero(tag, sizeof(tag));
862 explicit_bzero(iv, sizeof(iv));
867 swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp)
869 const struct crypto_session_params *csp;
870 uint64_t blkbuf[howmany(CHACHA20_NATIVE_BLOCK_LEN, sizeof(uint64_t))];
871 u_char *blk = (u_char *)blkbuf;
872 u_char tag[POLY1305_HASH_LEN];
873 struct crypto_buffer_cursor cc_in, cc_out;
878 struct swcr_auth *swa;
879 struct swcr_encdec *swe;
880 const struct auth_hash *axf;
881 const struct enc_xform *exf;
883 int blksz, error, r, resid;
885 swa = &ses->swcr_auth;
888 swe = &ses->swcr_encdec;
890 blksz = exf->native_blocksize;
891 KASSERT(blksz <= sizeof(blkbuf), ("%s: blocksize mismatch", __func__));
893 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
896 csp = crypto_get_params(crp->crp_session);
898 /* Generate Poly1305 key. */
899 if (crp->crp_cipher_key != NULL)
900 axf->Setkey(&ctx, crp->crp_cipher_key, csp->csp_cipher_klen);
902 axf->Setkey(&ctx, csp->csp_cipher_key, csp->csp_cipher_klen);
903 axf->Reinit(&ctx, crp->crp_iv, csp->csp_ivlen);
905 /* Supply MAC with AAD */
906 if (crp->crp_aad != NULL)
907 axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
909 crypto_apply(crp, crp->crp_aad_start,
910 crp->crp_aad_length, axf->Update, &ctx);
911 if (crp->crp_aad_length % 16 != 0) {
914 axf->Update(&ctx, blk, 16 - crp->crp_aad_length % 16);
917 if (crp->crp_cipher_key != NULL)
918 exf->setkey(swe->sw_kschedule, crp->crp_cipher_key,
919 csp->csp_cipher_klen);
920 exf->reinit(swe->sw_kschedule, crp->crp_iv, csp->csp_ivlen);
922 /* Do encryption with MAC */
923 crypto_cursor_init(&cc_in, &crp->crp_buf);
924 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
925 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
926 crypto_cursor_init(&cc_out, &crp->crp_obuf);
927 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
930 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
931 inblk = crypto_cursor_segment(&cc_in, &len);
933 crypto_cursor_copydata(&cc_in, blksz, blk);
936 crypto_cursor_advance(&cc_in, blksz);
937 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
938 outblk = crypto_cursor_segment(&cc_out, &len);
941 exf->encrypt(swe->sw_kschedule, inblk, outblk);
942 axf->Update(&ctx, outblk, blksz);
944 crypto_cursor_copyback(&cc_out, blksz, blk);
946 crypto_cursor_advance(&cc_out, blksz);
948 axf->Update(&ctx, inblk, blksz);
952 crypto_cursor_copydata(&cc_in, resid, blk);
953 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
954 exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
955 crypto_cursor_copyback(&cc_out, resid, blk);
957 axf->Update(&ctx, blk, resid);
958 if (resid % 16 != 0) {
961 axf->Update(&ctx, blk, 16 - resid % 16);
966 blkp = (uint64_t *)blk;
967 blkp[0] = htole64(crp->crp_aad_length);
968 blkp[1] = htole64(crp->crp_payload_length);
969 axf->Update(&ctx, blk, sizeof(uint64_t) * 2);
972 axf->Final(tag, &ctx);
976 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
977 u_char tag2[POLY1305_HASH_LEN];
979 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, tag2);
981 r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
982 explicit_bzero(tag2, sizeof(tag2));
988 /* tag matches, decrypt data */
989 crypto_cursor_init(&cc_in, &crp->crp_buf);
990 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
991 for (resid = crp->crp_payload_length; resid > blksz;
993 inblk = crypto_cursor_segment(&cc_in, &len);
995 crypto_cursor_copydata(&cc_in, blksz, blk);
998 crypto_cursor_advance(&cc_in, blksz);
999 outblk = crypto_cursor_segment(&cc_out, &len);
1002 exf->decrypt(swe->sw_kschedule, inblk, outblk);
1004 crypto_cursor_copyback(&cc_out, blksz, blk);
1006 crypto_cursor_advance(&cc_out, blksz);
1009 crypto_cursor_copydata(&cc_in, resid, blk);
1010 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
1011 crypto_cursor_copyback(&cc_out, resid, blk);
1014 /* Inject the authentication data */
1015 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
1019 explicit_bzero(blkbuf, sizeof(blkbuf));
1020 explicit_bzero(tag, sizeof(tag));
1021 explicit_bzero(&ctx, sizeof(ctx));
1026 * Apply a cipher and a digest to perform EtA.
1029 swcr_eta(struct swcr_session *ses, struct cryptop *crp)
1033 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
1034 error = swcr_encdec(ses, crp);
1036 error = swcr_authcompute(ses, crp);
1038 error = swcr_authcompute(ses, crp);
1040 error = swcr_encdec(ses, crp);
1046 * Apply a compression/decompression algorithm
1049 swcr_compdec(struct swcr_session *ses, struct cryptop *crp)
1051 const struct comp_algo *cxf;
1052 uint8_t *data, *out;
1056 cxf = ses->swcr_compdec.sw_cxf;
1058 /* We must handle the whole buffer of data in one time
1059 * then if there is not all the data in the mbuf, we must
1063 data = malloc(crp->crp_payload_length, M_CRYPTO_DATA, M_NOWAIT);
1066 crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
1069 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
1070 result = cxf->compress(data, crp->crp_payload_length, &out);
1072 result = cxf->decompress(data, crp->crp_payload_length, &out);
1074 free(data, M_CRYPTO_DATA);
1077 crp->crp_olen = result;
1079 /* Check the compressed size when doing compression */
1080 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
1081 if (result >= crp->crp_payload_length) {
1082 /* Compression was useless, we lost time */
1083 free(out, M_CRYPTO_DATA);
1088 /* Copy back the (de)compressed data. m_copyback is
1089 * extending the mbuf as necessary.
1091 crypto_copyback(crp, crp->crp_payload_start, result, out);
1092 if (result < crp->crp_payload_length) {
1093 switch (crp->crp_buf.cb_type) {
1094 case CRYPTO_BUF_MBUF:
1095 case CRYPTO_BUF_SINGLE_MBUF:
1096 adj = result - crp->crp_payload_length;
1097 m_adj(crp->crp_buf.cb_mbuf, adj);
1099 case CRYPTO_BUF_UIO: {
1100 struct uio *uio = crp->crp_buf.cb_uio;
1103 adj = crp->crp_payload_length - result;
1104 ind = uio->uio_iovcnt - 1;
1106 while (adj > 0 && ind >= 0) {
1107 if (adj < uio->uio_iov[ind].iov_len) {
1108 uio->uio_iov[ind].iov_len -= adj;
1112 adj -= uio->uio_iov[ind].iov_len;
1113 uio->uio_iov[ind].iov_len = 0;
1119 case CRYPTO_BUF_VMPAGE:
1120 adj = crp->crp_payload_length - result;
1121 crp->crp_buf.cb_vm_page_len -= adj;
1127 free(out, M_CRYPTO_DATA);
1132 swcr_setup_cipher(struct swcr_session *ses,
1133 const struct crypto_session_params *csp)
1135 struct swcr_encdec *swe;
1136 const struct enc_xform *txf;
1139 swe = &ses->swcr_encdec;
1140 txf = crypto_cipher(csp);
1141 MPASS(txf->ivsize == csp->csp_ivlen);
1142 if (txf->ctxsize != 0) {
1143 swe->sw_kschedule = malloc(txf->ctxsize, M_CRYPTO_DATA,
1145 if (swe->sw_kschedule == NULL)
1148 if (csp->csp_cipher_key != NULL) {
1149 error = txf->setkey(swe->sw_kschedule,
1150 csp->csp_cipher_key, csp->csp_cipher_klen);
1159 swcr_setup_auth(struct swcr_session *ses,
1160 const struct crypto_session_params *csp)
1162 struct swcr_auth *swa;
1163 const struct auth_hash *axf;
1165 swa = &ses->swcr_auth;
1167 axf = crypto_auth_hash(csp);
1169 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1171 if (csp->csp_auth_mlen == 0)
1172 swa->sw_mlen = axf->hashsize;
1174 swa->sw_mlen = csp->csp_auth_mlen;
1175 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1176 if (swa->sw_ictx == NULL)
1179 switch (csp->csp_auth_alg) {
1180 case CRYPTO_SHA1_HMAC:
1181 case CRYPTO_SHA2_224_HMAC:
1182 case CRYPTO_SHA2_256_HMAC:
1183 case CRYPTO_SHA2_384_HMAC:
1184 case CRYPTO_SHA2_512_HMAC:
1185 case CRYPTO_NULL_HMAC:
1186 case CRYPTO_RIPEMD160_HMAC:
1187 swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1189 if (swa->sw_octx == NULL)
1192 if (csp->csp_auth_key != NULL) {
1193 swcr_authprepare(axf, swa, csp->csp_auth_key,
1194 csp->csp_auth_klen);
1197 if (csp->csp_mode == CSP_MODE_DIGEST)
1198 ses->swcr_process = swcr_authcompute;
1201 case CRYPTO_SHA2_224:
1202 case CRYPTO_SHA2_256:
1203 case CRYPTO_SHA2_384:
1204 case CRYPTO_SHA2_512:
1205 axf->Init(swa->sw_ictx);
1206 if (csp->csp_mode == CSP_MODE_DIGEST)
1207 ses->swcr_process = swcr_authcompute;
1209 case CRYPTO_AES_NIST_GMAC:
1210 axf->Init(swa->sw_ictx);
1211 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1212 csp->csp_auth_klen);
1213 if (csp->csp_mode == CSP_MODE_DIGEST)
1214 ses->swcr_process = swcr_gmac;
1216 case CRYPTO_POLY1305:
1217 case CRYPTO_BLAKE2B:
1218 case CRYPTO_BLAKE2S:
1220 * Blake2b and Blake2s support an optional key but do
1223 if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL)
1224 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1225 csp->csp_auth_klen);
1226 axf->Init(swa->sw_ictx);
1227 if (csp->csp_mode == CSP_MODE_DIGEST)
1228 ses->swcr_process = swcr_authcompute;
1230 case CRYPTO_AES_CCM_CBC_MAC:
1231 axf->Init(swa->sw_ictx);
1232 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1233 csp->csp_auth_klen);
1234 if (csp->csp_mode == CSP_MODE_DIGEST)
1235 ses->swcr_process = swcr_ccm_cbc_mac;
1243 swcr_setup_gcm(struct swcr_session *ses,
1244 const struct crypto_session_params *csp)
1246 struct swcr_auth *swa;
1247 const struct auth_hash *axf;
1249 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1252 /* First, setup the auth side. */
1253 swa = &ses->swcr_auth;
1254 switch (csp->csp_cipher_klen * 8) {
1256 axf = &auth_hash_nist_gmac_aes_128;
1259 axf = &auth_hash_nist_gmac_aes_192;
1262 axf = &auth_hash_nist_gmac_aes_256;
1268 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1270 if (csp->csp_auth_mlen == 0)
1271 swa->sw_mlen = axf->hashsize;
1273 swa->sw_mlen = csp->csp_auth_mlen;
1274 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1275 if (swa->sw_ictx == NULL)
1277 axf->Init(swa->sw_ictx);
1278 if (csp->csp_cipher_key != NULL)
1279 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1280 csp->csp_cipher_klen);
1282 /* Second, setup the cipher side. */
1283 return (swcr_setup_cipher(ses, csp));
1287 swcr_setup_ccm(struct swcr_session *ses,
1288 const struct crypto_session_params *csp)
1290 struct swcr_auth *swa;
1291 const struct auth_hash *axf;
1293 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1296 /* First, setup the auth side. */
1297 swa = &ses->swcr_auth;
1298 switch (csp->csp_cipher_klen * 8) {
1300 axf = &auth_hash_ccm_cbc_mac_128;
1303 axf = &auth_hash_ccm_cbc_mac_192;
1306 axf = &auth_hash_ccm_cbc_mac_256;
1312 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1314 if (csp->csp_auth_mlen == 0)
1315 swa->sw_mlen = axf->hashsize;
1317 swa->sw_mlen = csp->csp_auth_mlen;
1318 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1319 if (swa->sw_ictx == NULL)
1321 axf->Init(swa->sw_ictx);
1322 if (csp->csp_cipher_key != NULL)
1323 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1324 csp->csp_cipher_klen);
1326 /* Second, setup the cipher side. */
1327 return (swcr_setup_cipher(ses, csp));
1331 swcr_setup_chacha20_poly1305(struct swcr_session *ses,
1332 const struct crypto_session_params *csp)
1334 struct swcr_auth *swa;
1335 const struct auth_hash *axf;
1337 if (csp->csp_ivlen != CHACHA20_POLY1305_IV_LEN)
1340 /* First, setup the auth side. */
1341 swa = &ses->swcr_auth;
1342 axf = &auth_hash_chacha20_poly1305;
1344 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1346 if (csp->csp_auth_mlen == 0)
1347 swa->sw_mlen = axf->hashsize;
1349 swa->sw_mlen = csp->csp_auth_mlen;
1351 /* The auth state is regenerated for each nonce. */
1353 /* Second, setup the cipher side. */
1354 return (swcr_setup_cipher(ses, csp));
1358 swcr_auth_supported(const struct crypto_session_params *csp)
1360 const struct auth_hash *axf;
1362 axf = crypto_auth_hash(csp);
1365 switch (csp->csp_auth_alg) {
1366 case CRYPTO_SHA1_HMAC:
1367 case CRYPTO_SHA2_224_HMAC:
1368 case CRYPTO_SHA2_256_HMAC:
1369 case CRYPTO_SHA2_384_HMAC:
1370 case CRYPTO_SHA2_512_HMAC:
1371 case CRYPTO_NULL_HMAC:
1372 case CRYPTO_RIPEMD160_HMAC:
1374 case CRYPTO_AES_NIST_GMAC:
1375 switch (csp->csp_auth_klen * 8) {
1383 if (csp->csp_auth_key == NULL)
1385 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1388 case CRYPTO_POLY1305:
1389 if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1392 case CRYPTO_AES_CCM_CBC_MAC:
1393 switch (csp->csp_auth_klen * 8) {
1401 if (csp->csp_auth_key == NULL)
1403 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1411 swcr_cipher_supported(const struct crypto_session_params *csp)
1413 const struct enc_xform *txf;
1415 txf = crypto_cipher(csp);
1418 if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1419 txf->ivsize != csp->csp_ivlen)
1424 #define SUPPORTED_SES (CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD | CSP_F_ESN)
1427 swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1429 if ((csp->csp_flags & ~(SUPPORTED_SES)) != 0)
1431 switch (csp->csp_mode) {
1432 case CSP_MODE_COMPRESS:
1433 switch (csp->csp_cipher_alg) {
1434 case CRYPTO_DEFLATE_COMP:
1440 case CSP_MODE_CIPHER:
1441 switch (csp->csp_cipher_alg) {
1442 case CRYPTO_AES_NIST_GCM_16:
1443 case CRYPTO_AES_CCM_16:
1444 case CRYPTO_CHACHA20_POLY1305:
1447 if (!swcr_cipher_supported(csp))
1452 case CSP_MODE_DIGEST:
1453 if (!swcr_auth_supported(csp))
1457 switch (csp->csp_cipher_alg) {
1458 case CRYPTO_AES_NIST_GCM_16:
1459 case CRYPTO_AES_CCM_16:
1460 case CRYPTO_CHACHA20_POLY1305:
1467 /* AEAD algorithms cannot be used for EtA. */
1468 switch (csp->csp_cipher_alg) {
1469 case CRYPTO_AES_NIST_GCM_16:
1470 case CRYPTO_AES_CCM_16:
1471 case CRYPTO_CHACHA20_POLY1305:
1474 switch (csp->csp_auth_alg) {
1475 case CRYPTO_AES_NIST_GMAC:
1476 case CRYPTO_AES_CCM_CBC_MAC:
1480 if (!swcr_cipher_supported(csp) ||
1481 !swcr_auth_supported(csp))
1488 return (CRYPTODEV_PROBE_SOFTWARE);
1492 * Generate a new software session.
1495 swcr_newsession(device_t dev, crypto_session_t cses,
1496 const struct crypto_session_params *csp)
1498 struct swcr_session *ses;
1499 struct swcr_encdec *swe;
1500 struct swcr_auth *swa;
1501 const struct comp_algo *cxf;
1504 ses = crypto_get_driver_session(cses);
1505 mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
1508 swe = &ses->swcr_encdec;
1509 swa = &ses->swcr_auth;
1510 switch (csp->csp_mode) {
1511 case CSP_MODE_COMPRESS:
1512 switch (csp->csp_cipher_alg) {
1513 case CRYPTO_DEFLATE_COMP:
1514 cxf = &comp_algo_deflate;
1518 panic("bad compression algo");
1521 ses->swcr_compdec.sw_cxf = cxf;
1522 ses->swcr_process = swcr_compdec;
1524 case CSP_MODE_CIPHER:
1525 switch (csp->csp_cipher_alg) {
1526 case CRYPTO_NULL_CBC:
1527 ses->swcr_process = swcr_null;
1530 case CRYPTO_AES_NIST_GCM_16:
1531 case CRYPTO_AES_CCM_16:
1532 case CRYPTO_CHACHA20_POLY1305:
1533 panic("bad cipher algo");
1536 error = swcr_setup_cipher(ses, csp);
1538 ses->swcr_process = swcr_encdec;
1541 case CSP_MODE_DIGEST:
1542 error = swcr_setup_auth(ses, csp);
1545 switch (csp->csp_cipher_alg) {
1546 case CRYPTO_AES_NIST_GCM_16:
1547 error = swcr_setup_gcm(ses, csp);
1549 ses->swcr_process = swcr_gcm;
1551 case CRYPTO_AES_CCM_16:
1552 error = swcr_setup_ccm(ses, csp);
1554 ses->swcr_process = swcr_ccm;
1556 case CRYPTO_CHACHA20_POLY1305:
1557 error = swcr_setup_chacha20_poly1305(ses, csp);
1559 ses->swcr_process = swcr_chacha20_poly1305;
1563 panic("bad aead algo");
1569 switch (csp->csp_cipher_alg) {
1570 case CRYPTO_AES_NIST_GCM_16:
1571 case CRYPTO_AES_CCM_16:
1572 case CRYPTO_CHACHA20_POLY1305:
1573 panic("bad eta cipher algo");
1575 switch (csp->csp_auth_alg) {
1576 case CRYPTO_AES_NIST_GMAC:
1577 case CRYPTO_AES_CCM_CBC_MAC:
1578 panic("bad eta auth algo");
1582 error = swcr_setup_auth(ses, csp);
1585 if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1586 /* Effectively degrade to digest mode. */
1587 ses->swcr_process = swcr_authcompute;
1591 error = swcr_setup_cipher(ses, csp);
1593 ses->swcr_process = swcr_eta;
1600 swcr_freesession(dev, cses);
1605 swcr_freesession(device_t dev, crypto_session_t cses)
1607 struct swcr_session *ses;
1609 ses = crypto_get_driver_session(cses);
1611 mtx_destroy(&ses->swcr_lock);
1613 zfree(ses->swcr_encdec.sw_kschedule, M_CRYPTO_DATA);
1614 zfree(ses->swcr_auth.sw_ictx, M_CRYPTO_DATA);
1615 zfree(ses->swcr_auth.sw_octx, M_CRYPTO_DATA);
1619 * Process a software request.
1622 swcr_process(device_t dev, struct cryptop *crp, int hint)
1624 struct swcr_session *ses;
1626 ses = crypto_get_driver_session(crp->crp_session);
1627 mtx_lock(&ses->swcr_lock);
1629 crp->crp_etype = ses->swcr_process(ses, crp);
1631 mtx_unlock(&ses->swcr_lock);
1637 swcr_identify(driver_t *drv, device_t parent)
1639 /* NB: order 10 is so we get attached after h/w devices */
1640 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1641 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1642 panic("cryptosoft: could not attach");
1646 swcr_probe(device_t dev)
1648 device_set_desc(dev, "software crypto");
1650 return (BUS_PROBE_NOWILDCARD);
1654 swcr_attach(device_t dev)
1657 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1658 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1660 device_printf(dev, "cannot initialize!");
1668 swcr_detach(device_t dev)
1670 crypto_unregister_all(swcr_id);
1674 static device_method_t swcr_methods[] = {
1675 DEVMETHOD(device_identify, swcr_identify),
1676 DEVMETHOD(device_probe, swcr_probe),
1677 DEVMETHOD(device_attach, swcr_attach),
1678 DEVMETHOD(device_detach, swcr_detach),
1680 DEVMETHOD(cryptodev_probesession, swcr_probesession),
1681 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1682 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1683 DEVMETHOD(cryptodev_process, swcr_process),
1688 static driver_t swcr_driver = {
1691 0, /* NB: no softc */
1693 static devclass_t swcr_devclass;
1696 * NB: We explicitly reference the crypto module so we
1697 * get the necessary ordering when built as a loadable
1698 * module. This is required because we bundle the crypto
1699 * module code together with the cryptosoft driver (otherwise
1700 * normal module dependencies would handle things).
1702 extern int crypto_modevent(struct module *, int, void *);
1703 /* XXX where to attach */
1704 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1705 MODULE_VERSION(cryptosoft, 1);
1706 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);