1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014-2021 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Portions of this software were developed by Ararat River
20 * Consulting, LLC under sponsorship of the FreeBSD Foundation.
22 * Permission to use, copy, and modify this software with or without fee
23 * is hereby granted, provided that this entire notice is included in
24 * all source code copies of any software which is or includes a copy or
25 * modification of this software.
27 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
28 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
29 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
30 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
44 #include <sys/endian.h>
45 #include <sys/limits.h>
47 #include <crypto/sha1.h>
48 #include <opencrypto/rmd160.h>
50 #include <opencrypto/cryptodev.h>
51 #include <opencrypto/xform.h>
55 #include "cryptodev_if.h"
60 const struct auth_hash *sw_axf;
67 const struct enc_xform *sw_exf;
71 const struct comp_algo *sw_cxf;
75 int (*swcr_process)(const struct swcr_session *, struct cryptop *);
77 struct swcr_auth swcr_auth;
78 struct swcr_encdec swcr_encdec;
79 struct swcr_compdec swcr_compdec;
82 static int32_t swcr_id;
84 static void swcr_freesession(device_t dev, crypto_session_t cses);
86 /* Used for CRYPTO_NULL_CBC. */
88 swcr_null(const struct swcr_session *ses, struct cryptop *crp)
95 * Apply a symmetric encryption/decryption algorithm.
98 swcr_encdec(const struct swcr_session *ses, struct cryptop *crp)
100 unsigned char blk[EALG_MAX_BLOCK_LEN];
101 const struct crypto_session_params *csp;
102 const struct enc_xform *exf;
103 const struct swcr_encdec *sw;
105 size_t inlen, outlen, todo;
107 struct crypto_buffer_cursor cc_in, cc_out;
108 const unsigned char *inblk;
109 unsigned char *outblk;
115 sw = &ses->swcr_encdec;
117 csp = crypto_get_params(crp->crp_session);
119 if (exf->native_blocksize == 0) {
120 /* Check for non-padded data */
121 if ((crp->crp_payload_length % exf->blocksize) != 0)
124 blksz = exf->blocksize;
126 blksz = exf->native_blocksize;
128 if (exf == &enc_xform_aes_icm &&
129 (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
132 ctx = __builtin_alloca(exf->ctxsize);
133 if (crp->crp_cipher_key != NULL) {
134 error = exf->setkey(ctx, crp->crp_cipher_key,
135 csp->csp_cipher_klen);
139 memcpy(ctx, sw->sw_ctx, exf->ctxsize);
141 crypto_read_iv(crp, blk);
142 exf->reinit(ctx, blk, csp->csp_ivlen);
144 crypto_cursor_init(&cc_in, &crp->crp_buf);
145 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
146 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
147 crypto_cursor_init(&cc_out, &crp->crp_obuf);
148 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
152 encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
155 * Loop through encrypting blocks. 'inlen' is the remaining
156 * length of the current segment in the input buffer.
157 * 'outlen' is the remaining length of current segment in the
161 for (resid = crp->crp_payload_length; resid >= blksz; resid -= todo) {
163 inblk = crypto_cursor_segment(&cc_in, &inlen);
165 outblk = crypto_cursor_segment(&cc_out, &outlen);
168 * If the current block is not contained within the
169 * current input/output segment, use 'blk' as a local
173 crypto_cursor_copydata(&cc_in, blksz, blk);
177 if (outlen < blksz) {
182 todo = rounddown2(MIN(resid, MIN(inlen, outlen)), blksz);
185 exf->encrypt_multi(ctx, inblk, outblk, todo);
187 exf->decrypt_multi(ctx, inblk, outblk, todo);
190 inblk = crypto_cursor_segment(&cc_in, &inlen);
192 crypto_cursor_advance(&cc_in, todo);
198 crypto_cursor_copyback(&cc_out, blksz, blk);
199 outblk = crypto_cursor_segment(&cc_out, &outlen);
201 crypto_cursor_advance(&cc_out, todo);
207 /* Handle trailing partial block for stream ciphers. */
209 KASSERT(exf->native_blocksize != 0,
210 ("%s: partial block of %d bytes for cipher %s",
211 __func__, resid, exf->name));
212 KASSERT(resid < blksz, ("%s: partial block too big", __func__));
214 inblk = crypto_cursor_segment(&cc_in, &inlen);
215 outblk = crypto_cursor_segment(&cc_out, &outlen);
217 crypto_cursor_copydata(&cc_in, resid, blk);
223 exf->encrypt_last(ctx, inblk, outblk,
226 exf->decrypt_last(ctx, inblk, outblk,
229 crypto_cursor_copyback(&cc_out, resid, blk);
232 explicit_bzero(ctx, exf->ctxsize);
233 explicit_bzero(blk, sizeof(blk));
238 * Compute or verify hash.
241 swcr_authcompute(const struct swcr_session *ses, struct cryptop *crp)
245 u_char aalg[HASH_MAX_LEN];
246 u_char uaalg[HASH_MAX_LEN];
248 const struct crypto_session_params *csp;
249 const struct swcr_auth *sw;
250 const struct auth_hash *axf;
253 sw = &ses->swcr_auth;
257 csp = crypto_get_params(crp->crp_session);
258 if (crp->crp_auth_key != NULL) {
260 hmac_init_ipad(axf, crp->crp_auth_key,
261 csp->csp_auth_klen, &s.ctx);
264 axf->Setkey(&s.ctx, crp->crp_auth_key,
268 memcpy(&s.ctx, sw->sw_ictx, axf->ctxsize);
270 if (crp->crp_aad != NULL)
271 err = axf->Update(&s.ctx, crp->crp_aad, crp->crp_aad_length);
273 err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
274 axf->Update, &s.ctx);
278 if (CRYPTO_HAS_OUTPUT_BUFFER(crp) &&
279 CRYPTO_OP_IS_ENCRYPT(crp->crp_op))
280 err = crypto_apply_buf(&crp->crp_obuf,
281 crp->crp_payload_output_start, crp->crp_payload_length,
282 axf->Update, &s.ctx);
284 err = crypto_apply(crp, crp->crp_payload_start,
285 crp->crp_payload_length, axf->Update, &s.ctx);
289 if (csp->csp_flags & CSP_F_ESN)
290 axf->Update(&s.ctx, crp->crp_esn, 4);
292 axf->Final(s.aalg, &s.ctx);
294 if (crp->crp_auth_key != NULL)
295 hmac_init_opad(axf, crp->crp_auth_key,
296 csp->csp_auth_klen, &s.ctx);
298 memcpy(&s.ctx, sw->sw_octx, axf->ctxsize);
299 axf->Update(&s.ctx, s.aalg, axf->hashsize);
300 axf->Final(s.aalg, &s.ctx);
303 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
304 crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, s.uaalg);
305 if (timingsafe_bcmp(s.aalg, s.uaalg, sw->sw_mlen) != 0)
308 /* Inject the authentication data */
309 crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, s.aalg);
312 explicit_bzero(&s, sizeof(s));
316 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
317 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
320 swcr_gmac(const struct swcr_session *ses, struct cryptop *crp)
324 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
325 u_char tag[GMAC_DIGEST_LEN];
326 u_char tag2[GMAC_DIGEST_LEN];
328 u_char *blk = (u_char *)s.blkbuf;
329 struct crypto_buffer_cursor cc;
331 const struct swcr_auth *swa;
332 const struct auth_hash *axf;
335 int blksz, error, ivlen, resid;
337 swa = &ses->swcr_auth;
339 blksz = GMAC_BLOCK_LEN;
340 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
343 if (crp->crp_auth_key != NULL) {
345 axf->Setkey(&s.ctx, crp->crp_auth_key,
346 crypto_get_params(crp->crp_session)->csp_auth_klen);
348 memcpy(&s.ctx, swa->sw_ictx, axf->ctxsize);
350 /* Initialize the IV */
351 ivlen = AES_GCM_IV_LEN;
352 crypto_read_iv(crp, blk);
354 axf->Reinit(&s.ctx, blk, ivlen);
355 crypto_cursor_init(&cc, &crp->crp_buf);
356 crypto_cursor_advance(&cc, crp->crp_payload_start);
357 for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) {
358 inblk = crypto_cursor_segment(&cc, &len);
360 len = rounddown(MIN(len, resid), blksz);
361 crypto_cursor_advance(&cc, len);
364 crypto_cursor_copydata(&cc, len, blk);
367 axf->Update(&s.ctx, inblk, len);
370 memset(blk, 0, blksz);
371 crypto_cursor_copydata(&cc, resid, blk);
372 axf->Update(&s.ctx, blk, blksz);
376 memset(blk, 0, blksz);
377 blkp = (uint32_t *)blk + 1;
378 *blkp = htobe32(crp->crp_payload_length * 8);
379 axf->Update(&s.ctx, blk, blksz);
382 axf->Final(s.tag, &s.ctx);
385 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
386 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
388 if (timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen) != 0)
391 /* Inject the authentication data */
392 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, s.tag);
394 explicit_bzero(&s, sizeof(s));
399 swcr_gcm(const struct swcr_session *ses, struct cryptop *crp)
402 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
403 u_char tag[GMAC_DIGEST_LEN];
404 u_char tag2[GMAC_DIGEST_LEN];
406 u_char *blk = (u_char *)s.blkbuf;
407 struct crypto_buffer_cursor cc_in, cc_out;
410 size_t inlen, outlen, todo;
411 const struct swcr_auth *swa;
412 const struct swcr_encdec *swe;
413 const struct enc_xform *exf;
416 int blksz, error, ivlen, r, resid;
418 swa = &ses->swcr_auth;
419 swe = &ses->swcr_encdec;
421 blksz = GMAC_BLOCK_LEN;
422 KASSERT(blksz == exf->native_blocksize,
423 ("%s: blocksize mismatch", __func__));
425 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
428 ivlen = AES_GCM_IV_LEN;
430 ctx = __builtin_alloca(exf->ctxsize);
431 if (crp->crp_cipher_key != NULL)
432 exf->setkey(ctx, crp->crp_cipher_key,
433 crypto_get_params(crp->crp_session)->csp_cipher_klen);
435 memcpy(ctx, swe->sw_ctx, exf->ctxsize);
436 exf->reinit(ctx, crp->crp_iv, ivlen);
438 /* Supply MAC with AAD */
439 if (crp->crp_aad != NULL) {
440 inlen = rounddown2(crp->crp_aad_length, blksz);
442 exf->update(ctx, crp->crp_aad, inlen);
443 if (crp->crp_aad_length != inlen) {
444 memset(blk, 0, blksz);
445 memcpy(blk, (char *)crp->crp_aad + inlen,
446 crp->crp_aad_length - inlen);
447 exf->update(ctx, blk, blksz);
450 crypto_cursor_init(&cc_in, &crp->crp_buf);
451 crypto_cursor_advance(&cc_in, crp->crp_aad_start);
452 for (resid = crp->crp_aad_length; resid >= blksz;
454 inblk = crypto_cursor_segment(&cc_in, &inlen);
455 if (inlen >= blksz) {
456 inlen = rounddown2(MIN(inlen, resid), blksz);
457 crypto_cursor_advance(&cc_in, inlen);
460 crypto_cursor_copydata(&cc_in, inlen, blk);
463 exf->update(ctx, inblk, inlen);
466 memset(blk, 0, blksz);
467 crypto_cursor_copydata(&cc_in, resid, blk);
468 exf->update(ctx, blk, blksz);
472 /* Do encryption with MAC */
473 crypto_cursor_init(&cc_in, &crp->crp_buf);
474 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
475 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
476 crypto_cursor_init(&cc_out, &crp->crp_obuf);
477 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
482 for (resid = crp->crp_payload_length; resid >= blksz; resid -= todo) {
484 inblk = crypto_cursor_segment(&cc_in, &inlen);
486 outblk = crypto_cursor_segment(&cc_out, &outlen);
489 crypto_cursor_copydata(&cc_in, blksz, blk);
494 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
495 if (outlen < blksz) {
500 todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
503 exf->encrypt_multi(ctx, inblk, outblk, todo);
504 exf->update(ctx, outblk, todo);
507 crypto_cursor_copyback(&cc_out, blksz, blk);
508 outblk = crypto_cursor_segment(&cc_out, &outlen);
510 crypto_cursor_advance(&cc_out, todo);
515 todo = rounddown2(MIN(resid, inlen), blksz);
516 exf->update(ctx, inblk, todo);
520 inblk = crypto_cursor_segment(&cc_in, &inlen);
522 crypto_cursor_advance(&cc_in, todo);
528 crypto_cursor_copydata(&cc_in, resid, blk);
529 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
530 exf->encrypt_last(ctx, blk, blk, resid);
531 crypto_cursor_copyback(&cc_out, resid, blk);
533 exf->update(ctx, blk, resid);
537 memset(blk, 0, blksz);
538 blkp = (uint32_t *)blk + 1;
539 *blkp = htobe32(crp->crp_aad_length * 8);
540 blkp = (uint32_t *)blk + 3;
541 *blkp = htobe32(crp->crp_payload_length * 8);
542 exf->update(ctx, blk, blksz);
545 exf->final(s.tag, ctx);
549 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
550 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
552 r = timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen);
558 /* tag matches, decrypt data */
559 crypto_cursor_init(&cc_in, &crp->crp_buf);
560 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
563 for (resid = crp->crp_payload_length; resid > blksz;
566 inblk = crypto_cursor_segment(&cc_in, &inlen);
568 outblk = crypto_cursor_segment(&cc_out, &outlen);
570 crypto_cursor_copydata(&cc_in, blksz, blk);
574 if (outlen < blksz) {
579 todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
582 exf->decrypt_multi(ctx, inblk, outblk, todo);
585 inblk = crypto_cursor_segment(&cc_in, &inlen);
587 crypto_cursor_advance(&cc_in, todo);
593 crypto_cursor_copyback(&cc_out, blksz, blk);
594 outblk = crypto_cursor_segment(&cc_out,
597 crypto_cursor_advance(&cc_out, todo);
603 crypto_cursor_copydata(&cc_in, resid, blk);
604 exf->decrypt_last(ctx, blk, blk, resid);
605 crypto_cursor_copyback(&cc_out, resid, blk);
608 /* Inject the authentication data */
609 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
614 explicit_bzero(ctx, exf->ctxsize);
615 explicit_bzero(&s, sizeof(s));
621 build_ccm_b0(const char *nonce, u_int nonce_length, u_int aad_length,
622 u_int data_length, u_int tag_length, uint8_t *b0)
627 KASSERT(nonce_length >= 7 && nonce_length <= 13,
628 ("nonce_length must be between 7 and 13 bytes"));
631 * Need to determine the L field value. This is the number of
632 * bytes needed to specify the length of the message; the length
633 * is whatever is left in the 16 bytes after specifying flags and
636 L = 15 - nonce_length;
638 flags = ((aad_length > 0) << 6) +
639 (((tag_length - 2) / 2) << 3) +
643 * Now we need to set up the first block, which has flags, nonce,
644 * and the message length.
647 memcpy(b0 + 1, nonce, nonce_length);
648 bp = b0 + 1 + nonce_length;
650 /* Need to copy L' [aka L-1] bytes of data_length */
651 for (uint8_t *dst = b0 + CCM_CBC_BLOCK_LEN - 1; dst >= bp; dst--) {
657 /* NB: OCF only supports AAD lengths < 2^32. */
659 build_ccm_aad_length(u_int aad_length, uint8_t *blk)
661 if (aad_length < ((1 << 16) - (1 << 8))) {
662 be16enc(blk, aad_length);
663 return (sizeof(uint16_t));
667 be32enc(blk + 2, aad_length);
668 return (2 + sizeof(uint32_t));
673 swcr_ccm_cbc_mac(const struct swcr_session *ses, struct cryptop *crp)
677 u_char blk[CCM_CBC_BLOCK_LEN];
678 u_char tag[AES_CBC_MAC_HASH_LEN];
679 u_char tag2[AES_CBC_MAC_HASH_LEN];
681 const struct crypto_session_params *csp;
682 const struct swcr_auth *swa;
683 const struct auth_hash *axf;
684 int error, ivlen, len;
686 csp = crypto_get_params(crp->crp_session);
687 swa = &ses->swcr_auth;
690 if (crp->crp_auth_key != NULL) {
692 axf->Setkey(&s.ctx, crp->crp_auth_key, csp->csp_auth_klen);
694 memcpy(&s.ctx, swa->sw_ictx, axf->ctxsize);
696 /* Initialize the IV */
697 ivlen = csp->csp_ivlen;
699 /* Supply MAC with IV */
700 axf->Reinit(&s.ctx, crp->crp_iv, ivlen);
702 /* Supply MAC with b0. */
703 build_ccm_b0(crp->crp_iv, ivlen, crp->crp_payload_length, 0,
704 swa->sw_mlen, s.blk);
705 axf->Update(&s.ctx, s.blk, CCM_CBC_BLOCK_LEN);
707 len = build_ccm_aad_length(crp->crp_payload_length, s.blk);
708 axf->Update(&s.ctx, s.blk, len);
710 crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
711 axf->Update, &s.ctx);
714 axf->Final(s.tag, &s.ctx);
717 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
718 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
720 if (timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen) != 0)
723 /* Inject the authentication data */
724 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
727 explicit_bzero(&s, sizeof(s));
732 swcr_ccm(const struct swcr_session *ses, struct cryptop *crp)
734 const struct crypto_session_params *csp;
736 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
737 u_char tag[AES_CBC_MAC_HASH_LEN];
738 u_char tag2[AES_CBC_MAC_HASH_LEN];
740 u_char *blk = (u_char *)s.blkbuf;
741 struct crypto_buffer_cursor cc_in, cc_out;
744 size_t inlen, outlen, todo;
745 const struct swcr_auth *swa;
746 const struct swcr_encdec *swe;
747 const struct enc_xform *exf;
750 int blksz, error, ivlen, r, resid;
752 csp = crypto_get_params(crp->crp_session);
753 swa = &ses->swcr_auth;
754 swe = &ses->swcr_encdec;
756 blksz = AES_BLOCK_LEN;
757 KASSERT(blksz == exf->native_blocksize,
758 ("%s: blocksize mismatch", __func__));
760 if (crp->crp_payload_length > ccm_max_payload_length(csp))
763 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
766 ivlen = csp->csp_ivlen;
768 ctx = __builtin_alloca(exf->ctxsize);
769 if (crp->crp_cipher_key != NULL)
770 exf->setkey(ctx, crp->crp_cipher_key,
771 crypto_get_params(crp->crp_session)->csp_cipher_klen);
773 memcpy(ctx, swe->sw_ctx, exf->ctxsize);
774 exf->reinit(ctx, crp->crp_iv, ivlen);
776 /* Supply MAC with b0. */
777 _Static_assert(sizeof(s.blkbuf) >= CCM_CBC_BLOCK_LEN,
778 "blkbuf too small for b0");
779 build_ccm_b0(crp->crp_iv, ivlen, crp->crp_aad_length,
780 crp->crp_payload_length, swa->sw_mlen, blk);
781 exf->update(ctx, blk, CCM_CBC_BLOCK_LEN);
783 /* Supply MAC with AAD */
784 if (crp->crp_aad_length != 0) {
785 len = build_ccm_aad_length(crp->crp_aad_length, blk);
786 exf->update(ctx, blk, len);
787 if (crp->crp_aad != NULL)
788 exf->update(ctx, crp->crp_aad, crp->crp_aad_length);
790 crypto_apply(crp, crp->crp_aad_start,
791 crp->crp_aad_length, exf->update, ctx);
793 /* Pad the AAD (including length field) to a full block. */
794 len = (len + crp->crp_aad_length) % CCM_CBC_BLOCK_LEN;
796 len = CCM_CBC_BLOCK_LEN - len;
797 memset(blk, 0, CCM_CBC_BLOCK_LEN);
798 exf->update(ctx, blk, len);
802 /* Do encryption/decryption with MAC */
803 crypto_cursor_init(&cc_in, &crp->crp_buf);
804 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
805 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
806 crypto_cursor_init(&cc_out, &crp->crp_obuf);
807 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
812 for (resid = crp->crp_payload_length; resid >= blksz; resid -= todo) {
814 inblk = crypto_cursor_segment(&cc_in, &inlen);
816 outblk = crypto_cursor_segment(&cc_out, &outlen);
819 crypto_cursor_copydata(&cc_in, blksz, blk);
824 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
825 if (outlen < blksz) {
830 todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
833 exf->update(ctx, inblk, todo);
834 exf->encrypt_multi(ctx, inblk, outblk, todo);
837 crypto_cursor_copyback(&cc_out, blksz, blk);
838 outblk = crypto_cursor_segment(&cc_out, &outlen);
840 crypto_cursor_advance(&cc_out, todo);
846 * One of the problems with CCM+CBC is that
847 * the authentication is done on the
848 * unencrypted data. As a result, we have to
849 * decrypt the data twice: once to generate
850 * the tag and a second time after the tag is
854 exf->decrypt(ctx, inblk, blk);
855 exf->update(ctx, blk, todo);
859 inblk = crypto_cursor_segment(&cc_in, &inlen);
861 crypto_cursor_advance(&cc_in, todo);
867 crypto_cursor_copydata(&cc_in, resid, blk);
868 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
869 exf->update(ctx, blk, resid);
870 exf->encrypt_last(ctx, blk, blk, resid);
871 crypto_cursor_copyback(&cc_out, resid, blk);
873 exf->decrypt_last(ctx, blk, blk, resid);
874 exf->update(ctx, blk, resid);
879 exf->final(s.tag, ctx);
883 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
884 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
886 r = timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen);
892 /* tag matches, decrypt data */
893 exf->reinit(ctx, crp->crp_iv, ivlen);
894 crypto_cursor_init(&cc_in, &crp->crp_buf);
895 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
898 for (resid = crp->crp_payload_length; resid >= blksz;
901 inblk = crypto_cursor_segment(&cc_in, &inlen);
903 outblk = crypto_cursor_segment(&cc_out,
907 crypto_cursor_copydata(&cc_in, blksz, blk);
911 if (outlen < blksz) {
916 todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
919 exf->decrypt_multi(ctx, inblk, outblk, todo);
922 inblk = crypto_cursor_segment(&cc_in, &inlen);
924 crypto_cursor_advance(&cc_in, todo);
930 crypto_cursor_copyback(&cc_out, blksz, blk);
931 outblk = crypto_cursor_segment(&cc_out,
934 crypto_cursor_advance(&cc_out, todo);
940 crypto_cursor_copydata(&cc_in, resid, blk);
941 exf->decrypt_last(ctx, blk, blk, resid);
942 crypto_cursor_copyback(&cc_out, resid, blk);
945 /* Inject the authentication data */
946 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
951 explicit_bzero(ctx, exf->ctxsize);
952 explicit_bzero(&s, sizeof(s));
957 swcr_chacha20_poly1305(const struct swcr_session *ses, struct cryptop *crp)
959 const struct crypto_session_params *csp;
961 uint64_t blkbuf[howmany(CHACHA20_NATIVE_BLOCK_LEN, sizeof(uint64_t))];
962 u_char tag[POLY1305_HASH_LEN];
963 u_char tag2[POLY1305_HASH_LEN];
965 u_char *blk = (u_char *)s.blkbuf;
966 struct crypto_buffer_cursor cc_in, cc_out;
969 size_t inlen, outlen, todo;
971 const struct swcr_auth *swa;
972 const struct swcr_encdec *swe;
973 const struct enc_xform *exf;
975 int blksz, error, r, resid;
977 swa = &ses->swcr_auth;
978 swe = &ses->swcr_encdec;
980 blksz = exf->native_blocksize;
981 KASSERT(blksz <= sizeof(s.blkbuf), ("%s: blocksize mismatch", __func__));
983 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
986 csp = crypto_get_params(crp->crp_session);
988 ctx = __builtin_alloca(exf->ctxsize);
989 if (crp->crp_cipher_key != NULL)
990 exf->setkey(ctx, crp->crp_cipher_key,
991 csp->csp_cipher_klen);
993 memcpy(ctx, swe->sw_ctx, exf->ctxsize);
994 exf->reinit(ctx, crp->crp_iv, csp->csp_ivlen);
996 /* Supply MAC with AAD */
997 if (crp->crp_aad != NULL)
998 exf->update(ctx, crp->crp_aad, crp->crp_aad_length);
1000 crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
1002 if (crp->crp_aad_length % POLY1305_BLOCK_LEN != 0) {
1004 memset(blk, 0, POLY1305_BLOCK_LEN);
1005 exf->update(ctx, blk, POLY1305_BLOCK_LEN -
1006 crp->crp_aad_length % POLY1305_BLOCK_LEN);
1009 /* Do encryption with MAC */
1010 crypto_cursor_init(&cc_in, &crp->crp_buf);
1011 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
1012 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
1013 crypto_cursor_init(&cc_out, &crp->crp_obuf);
1014 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
1019 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
1020 for (resid = crp->crp_payload_length; resid >= blksz;
1023 inblk = crypto_cursor_segment(&cc_in, &inlen);
1025 outblk = crypto_cursor_segment(&cc_out,
1028 if (inlen < blksz) {
1029 crypto_cursor_copydata(&cc_in, blksz, blk);
1034 if (outlen < blksz) {
1039 todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
1042 exf->encrypt_multi(ctx, inblk, outblk, todo);
1043 exf->update(ctx, outblk, todo);
1046 inblk = crypto_cursor_segment(&cc_in, &inlen);
1048 crypto_cursor_advance(&cc_in, todo);
1053 if (outblk == blk) {
1054 crypto_cursor_copyback(&cc_out, blksz, blk);
1055 outblk = crypto_cursor_segment(&cc_out, &outlen);
1057 crypto_cursor_advance(&cc_out, todo);
1063 crypto_cursor_copydata(&cc_in, resid, blk);
1064 exf->encrypt_last(ctx, blk, blk, resid);
1065 crypto_cursor_copyback(&cc_out, resid, blk);
1066 exf->update(ctx, blk, resid);
1069 crypto_apply(crp, crp->crp_payload_start,
1070 crp->crp_payload_length, exf->update, ctx);
1071 if (crp->crp_payload_length % POLY1305_BLOCK_LEN != 0) {
1073 memset(blk, 0, POLY1305_BLOCK_LEN);
1074 exf->update(ctx, blk, POLY1305_BLOCK_LEN -
1075 crp->crp_payload_length % POLY1305_BLOCK_LEN);
1079 blkp = (uint64_t *)blk;
1080 blkp[0] = htole64(crp->crp_aad_length);
1081 blkp[1] = htole64(crp->crp_payload_length);
1082 exf->update(ctx, blk, sizeof(uint64_t) * 2);
1085 exf->final(s.tag, ctx);
1089 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
1090 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
1092 r = timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen);
1098 /* tag matches, decrypt data */
1099 crypto_cursor_init(&cc_in, &crp->crp_buf);
1100 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
1103 for (resid = crp->crp_payload_length; resid > blksz;
1106 inblk = crypto_cursor_segment(&cc_in, &inlen);
1108 outblk = crypto_cursor_segment(&cc_out,
1110 if (inlen < blksz) {
1111 crypto_cursor_copydata(&cc_in, blksz, blk);
1115 if (outlen < blksz) {
1120 todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
1123 exf->decrypt_multi(ctx, inblk, outblk, todo);
1126 inblk = crypto_cursor_segment(&cc_in, &inlen);
1128 crypto_cursor_advance(&cc_in, todo);
1133 if (outblk == blk) {
1134 crypto_cursor_copyback(&cc_out, blksz, blk);
1135 outblk = crypto_cursor_segment(&cc_out,
1138 crypto_cursor_advance(&cc_out, todo);
1144 crypto_cursor_copydata(&cc_in, resid, blk);
1145 exf->decrypt_last(ctx, blk, blk, resid);
1146 crypto_cursor_copyback(&cc_out, resid, blk);
1149 /* Inject the authentication data */
1150 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
1155 explicit_bzero(ctx, exf->ctxsize);
1156 explicit_bzero(&s, sizeof(s));
1161 * Apply a cipher and a digest to perform EtA.
1164 swcr_eta(const struct swcr_session *ses, struct cryptop *crp)
1168 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
1169 error = swcr_encdec(ses, crp);
1171 error = swcr_authcompute(ses, crp);
1173 error = swcr_authcompute(ses, crp);
1175 error = swcr_encdec(ses, crp);
1181 * Apply a compression/decompression algorithm
1184 swcr_compdec(const struct swcr_session *ses, struct cryptop *crp)
1186 const struct comp_algo *cxf;
1187 uint8_t *data, *out;
1191 cxf = ses->swcr_compdec.sw_cxf;
1193 /* We must handle the whole buffer of data in one time
1194 * then if there is not all the data in the mbuf, we must
1198 data = malloc(crp->crp_payload_length, M_CRYPTO_DATA, M_NOWAIT);
1201 crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
1204 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
1205 result = cxf->compress(data, crp->crp_payload_length, &out);
1207 result = cxf->decompress(data, crp->crp_payload_length, &out);
1209 free(data, M_CRYPTO_DATA);
1212 crp->crp_olen = result;
1214 /* Check the compressed size when doing compression */
1215 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
1216 if (result >= crp->crp_payload_length) {
1217 /* Compression was useless, we lost time */
1218 free(out, M_CRYPTO_DATA);
1223 /* Copy back the (de)compressed data. m_copyback is
1224 * extending the mbuf as necessary.
1226 crypto_copyback(crp, crp->crp_payload_start, result, out);
1227 if (result < crp->crp_payload_length) {
1228 switch (crp->crp_buf.cb_type) {
1229 case CRYPTO_BUF_MBUF:
1230 case CRYPTO_BUF_SINGLE_MBUF:
1231 adj = result - crp->crp_payload_length;
1232 m_adj(crp->crp_buf.cb_mbuf, adj);
1234 case CRYPTO_BUF_UIO: {
1235 struct uio *uio = crp->crp_buf.cb_uio;
1238 adj = crp->crp_payload_length - result;
1239 ind = uio->uio_iovcnt - 1;
1241 while (adj > 0 && ind >= 0) {
1242 if (adj < uio->uio_iov[ind].iov_len) {
1243 uio->uio_iov[ind].iov_len -= adj;
1247 adj -= uio->uio_iov[ind].iov_len;
1248 uio->uio_iov[ind].iov_len = 0;
1254 case CRYPTO_BUF_VMPAGE:
1255 adj = crp->crp_payload_length - result;
1256 crp->crp_buf.cb_vm_page_len -= adj;
1262 free(out, M_CRYPTO_DATA);
1267 swcr_setup_cipher(struct swcr_session *ses,
1268 const struct crypto_session_params *csp)
1270 struct swcr_encdec *swe;
1271 const struct enc_xform *txf;
1274 swe = &ses->swcr_encdec;
1275 txf = crypto_cipher(csp);
1276 if (csp->csp_cipher_key != NULL) {
1277 if (txf->ctxsize != 0) {
1278 swe->sw_ctx = malloc(txf->ctxsize, M_CRYPTO_DATA,
1280 if (swe->sw_ctx == NULL)
1283 error = txf->setkey(swe->sw_ctx,
1284 csp->csp_cipher_key, csp->csp_cipher_klen);
1293 swcr_setup_auth(struct swcr_session *ses,
1294 const struct crypto_session_params *csp)
1296 struct swcr_auth *swa;
1297 const struct auth_hash *axf;
1299 swa = &ses->swcr_auth;
1301 axf = crypto_auth_hash(csp);
1303 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1305 if (csp->csp_auth_mlen == 0)
1306 swa->sw_mlen = axf->hashsize;
1308 swa->sw_mlen = csp->csp_auth_mlen;
1309 if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL) {
1310 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1312 if (swa->sw_ictx == NULL)
1316 switch (csp->csp_auth_alg) {
1317 case CRYPTO_SHA1_HMAC:
1318 case CRYPTO_SHA2_224_HMAC:
1319 case CRYPTO_SHA2_256_HMAC:
1320 case CRYPTO_SHA2_384_HMAC:
1321 case CRYPTO_SHA2_512_HMAC:
1322 case CRYPTO_RIPEMD160_HMAC:
1323 swa->sw_hmac = true;
1324 if (csp->csp_auth_key != NULL) {
1325 swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1327 if (swa->sw_octx == NULL)
1329 hmac_init_ipad(axf, csp->csp_auth_key,
1330 csp->csp_auth_klen, swa->sw_ictx);
1331 hmac_init_opad(axf, csp->csp_auth_key,
1332 csp->csp_auth_klen, swa->sw_octx);
1335 case CRYPTO_RIPEMD160:
1337 case CRYPTO_SHA2_224:
1338 case CRYPTO_SHA2_256:
1339 case CRYPTO_SHA2_384:
1340 case CRYPTO_SHA2_512:
1341 case CRYPTO_NULL_HMAC:
1342 axf->Init(swa->sw_ictx);
1344 case CRYPTO_AES_NIST_GMAC:
1345 case CRYPTO_AES_CCM_CBC_MAC:
1346 case CRYPTO_POLY1305:
1347 if (csp->csp_auth_key != NULL) {
1348 axf->Init(swa->sw_ictx);
1349 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1350 csp->csp_auth_klen);
1353 case CRYPTO_BLAKE2B:
1354 case CRYPTO_BLAKE2S:
1356 * Blake2b and Blake2s support an optional key but do
1359 if (csp->csp_auth_klen == 0)
1360 axf->Init(swa->sw_ictx);
1361 else if (csp->csp_auth_key != NULL)
1362 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1363 csp->csp_auth_klen);
1367 if (csp->csp_mode == CSP_MODE_DIGEST) {
1368 switch (csp->csp_auth_alg) {
1369 case CRYPTO_AES_NIST_GMAC:
1370 ses->swcr_process = swcr_gmac;
1372 case CRYPTO_AES_CCM_CBC_MAC:
1373 ses->swcr_process = swcr_ccm_cbc_mac;
1376 ses->swcr_process = swcr_authcompute;
1384 swcr_setup_aead(struct swcr_session *ses,
1385 const struct crypto_session_params *csp)
1387 struct swcr_auth *swa;
1390 error = swcr_setup_cipher(ses, csp);
1394 swa = &ses->swcr_auth;
1395 if (csp->csp_auth_mlen == 0)
1396 swa->sw_mlen = ses->swcr_encdec.sw_exf->macsize;
1398 swa->sw_mlen = csp->csp_auth_mlen;
1403 swcr_auth_supported(const struct crypto_session_params *csp)
1405 const struct auth_hash *axf;
1407 axf = crypto_auth_hash(csp);
1410 switch (csp->csp_auth_alg) {
1411 case CRYPTO_SHA1_HMAC:
1412 case CRYPTO_SHA2_224_HMAC:
1413 case CRYPTO_SHA2_256_HMAC:
1414 case CRYPTO_SHA2_384_HMAC:
1415 case CRYPTO_SHA2_512_HMAC:
1416 case CRYPTO_NULL_HMAC:
1417 case CRYPTO_RIPEMD160_HMAC:
1419 case CRYPTO_AES_NIST_GMAC:
1420 switch (csp->csp_auth_klen * 8) {
1428 if (csp->csp_auth_key == NULL)
1430 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1433 case CRYPTO_POLY1305:
1434 if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1437 case CRYPTO_AES_CCM_CBC_MAC:
1438 switch (csp->csp_auth_klen * 8) {
1446 if (csp->csp_auth_key == NULL)
1454 swcr_cipher_supported(const struct crypto_session_params *csp)
1456 const struct enc_xform *txf;
1458 txf = crypto_cipher(csp);
1461 if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1462 txf->ivsize != csp->csp_ivlen)
1467 #define SUPPORTED_SES (CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD | CSP_F_ESN)
1470 swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1472 if ((csp->csp_flags & ~(SUPPORTED_SES)) != 0)
1474 switch (csp->csp_mode) {
1475 case CSP_MODE_COMPRESS:
1476 switch (csp->csp_cipher_alg) {
1477 case CRYPTO_DEFLATE_COMP:
1483 case CSP_MODE_CIPHER:
1484 switch (csp->csp_cipher_alg) {
1485 case CRYPTO_AES_NIST_GCM_16:
1486 case CRYPTO_AES_CCM_16:
1487 case CRYPTO_CHACHA20_POLY1305:
1488 case CRYPTO_XCHACHA20_POLY1305:
1491 if (!swcr_cipher_supported(csp))
1496 case CSP_MODE_DIGEST:
1497 if (!swcr_auth_supported(csp))
1501 switch (csp->csp_cipher_alg) {
1502 case CRYPTO_AES_NIST_GCM_16:
1503 case CRYPTO_AES_CCM_16:
1504 switch (csp->csp_cipher_klen * 8) {
1513 case CRYPTO_CHACHA20_POLY1305:
1514 case CRYPTO_XCHACHA20_POLY1305:
1521 /* AEAD algorithms cannot be used for EtA. */
1522 switch (csp->csp_cipher_alg) {
1523 case CRYPTO_AES_NIST_GCM_16:
1524 case CRYPTO_AES_CCM_16:
1525 case CRYPTO_CHACHA20_POLY1305:
1526 case CRYPTO_XCHACHA20_POLY1305:
1529 switch (csp->csp_auth_alg) {
1530 case CRYPTO_AES_NIST_GMAC:
1531 case CRYPTO_AES_CCM_CBC_MAC:
1535 if (!swcr_cipher_supported(csp) ||
1536 !swcr_auth_supported(csp))
1543 return (CRYPTODEV_PROBE_SOFTWARE);
1547 * Generate a new software session.
1550 swcr_newsession(device_t dev, crypto_session_t cses,
1551 const struct crypto_session_params *csp)
1553 struct swcr_session *ses;
1554 const struct comp_algo *cxf;
1557 ses = crypto_get_driver_session(cses);
1560 switch (csp->csp_mode) {
1561 case CSP_MODE_COMPRESS:
1562 switch (csp->csp_cipher_alg) {
1563 case CRYPTO_DEFLATE_COMP:
1564 cxf = &comp_algo_deflate;
1568 panic("bad compression algo");
1571 ses->swcr_compdec.sw_cxf = cxf;
1572 ses->swcr_process = swcr_compdec;
1574 case CSP_MODE_CIPHER:
1575 switch (csp->csp_cipher_alg) {
1576 case CRYPTO_NULL_CBC:
1577 ses->swcr_process = swcr_null;
1580 case CRYPTO_AES_NIST_GCM_16:
1581 case CRYPTO_AES_CCM_16:
1582 case CRYPTO_CHACHA20_POLY1305:
1583 case CRYPTO_XCHACHA20_POLY1305:
1584 panic("bad cipher algo");
1587 error = swcr_setup_cipher(ses, csp);
1589 ses->swcr_process = swcr_encdec;
1592 case CSP_MODE_DIGEST:
1593 error = swcr_setup_auth(ses, csp);
1596 switch (csp->csp_cipher_alg) {
1597 case CRYPTO_AES_NIST_GCM_16:
1598 error = swcr_setup_aead(ses, csp);
1600 ses->swcr_process = swcr_gcm;
1602 case CRYPTO_AES_CCM_16:
1603 error = swcr_setup_aead(ses, csp);
1605 ses->swcr_process = swcr_ccm;
1607 case CRYPTO_CHACHA20_POLY1305:
1608 case CRYPTO_XCHACHA20_POLY1305:
1609 error = swcr_setup_aead(ses, csp);
1611 ses->swcr_process = swcr_chacha20_poly1305;
1615 panic("bad aead algo");
1621 switch (csp->csp_cipher_alg) {
1622 case CRYPTO_AES_NIST_GCM_16:
1623 case CRYPTO_AES_CCM_16:
1624 case CRYPTO_CHACHA20_POLY1305:
1625 case CRYPTO_XCHACHA20_POLY1305:
1626 panic("bad eta cipher algo");
1628 switch (csp->csp_auth_alg) {
1629 case CRYPTO_AES_NIST_GMAC:
1630 case CRYPTO_AES_CCM_CBC_MAC:
1631 panic("bad eta auth algo");
1635 error = swcr_setup_auth(ses, csp);
1638 if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1639 /* Effectively degrade to digest mode. */
1640 ses->swcr_process = swcr_authcompute;
1644 error = swcr_setup_cipher(ses, csp);
1646 ses->swcr_process = swcr_eta;
1653 swcr_freesession(dev, cses);
1658 swcr_freesession(device_t dev, crypto_session_t cses)
1660 struct swcr_session *ses;
1662 ses = crypto_get_driver_session(cses);
1664 zfree(ses->swcr_encdec.sw_ctx, M_CRYPTO_DATA);
1665 zfree(ses->swcr_auth.sw_ictx, M_CRYPTO_DATA);
1666 zfree(ses->swcr_auth.sw_octx, M_CRYPTO_DATA);
1670 * Process a software request.
1673 swcr_process(device_t dev, struct cryptop *crp, int hint)
1675 struct swcr_session *ses;
1677 ses = crypto_get_driver_session(crp->crp_session);
1679 crp->crp_etype = ses->swcr_process(ses, crp);
1686 swcr_identify(driver_t *drv, device_t parent)
1688 /* NB: order 10 is so we get attached after h/w devices */
1689 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1690 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1691 panic("cryptosoft: could not attach");
1695 swcr_probe(device_t dev)
1697 device_set_desc(dev, "software crypto");
1699 return (BUS_PROBE_NOWILDCARD);
1703 swcr_attach(device_t dev)
1706 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1707 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1709 device_printf(dev, "cannot initialize!");
1717 swcr_detach(device_t dev)
1719 crypto_unregister_all(swcr_id);
1723 static device_method_t swcr_methods[] = {
1724 DEVMETHOD(device_identify, swcr_identify),
1725 DEVMETHOD(device_probe, swcr_probe),
1726 DEVMETHOD(device_attach, swcr_attach),
1727 DEVMETHOD(device_detach, swcr_detach),
1729 DEVMETHOD(cryptodev_probesession, swcr_probesession),
1730 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1731 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1732 DEVMETHOD(cryptodev_process, swcr_process),
1737 static driver_t swcr_driver = {
1740 0, /* NB: no softc */
1744 * NB: We explicitly reference the crypto module so we
1745 * get the necessary ordering when built as a loadable
1746 * module. This is required because we bundle the crypto
1747 * module code together with the cryptosoft driver (otherwise
1748 * normal module dependencies would handle things).
1750 extern int crypto_modevent(struct module *, int, void *);
1751 /* XXX where to attach */
1752 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, crypto_modevent, NULL);
1753 MODULE_VERSION(cryptosoft, 1);
1754 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);