1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Permission to use, copy, and modify this software with or without fee
20 * is hereby granted, provided that this entire notice is included in
21 * all source code copies of any software which is or includes a copy or
22 * modification of this software.
24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD$");
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
48 #include <sys/mutex.h>
50 #include <crypto/sha1.h>
51 #include <opencrypto/rmd160.h>
53 #include <opencrypto/cryptodev.h>
54 #include <opencrypto/xform.h>
58 #include "cryptodev_if.h"
63 struct auth_hash *sw_axf;
69 struct enc_xform *sw_exf;
73 struct comp_algo *sw_cxf;
78 int (*swcr_process)(struct swcr_session *, struct cryptop *);
80 struct swcr_auth swcr_auth;
81 struct swcr_encdec swcr_encdec;
82 struct swcr_compdec swcr_compdec;
85 static int32_t swcr_id;
87 static void swcr_freesession(device_t dev, crypto_session_t cses);
89 /* Used for CRYPTO_NULL_CBC. */
91 swcr_null(struct swcr_session *ses, struct cryptop *crp)
98 * Apply a symmetric encryption/decryption algorithm.
101 swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
103 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
104 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
105 const struct crypto_session_params *csp;
106 struct swcr_encdec *sw;
107 struct enc_xform *exf;
108 int i, blks, inlen, ivlen, outlen, resid;
109 struct crypto_buffer_cursor cc_in, cc_out;
110 const unsigned char *inblk;
111 unsigned char *outblk;
117 sw = &ses->swcr_encdec;
121 if (exf->native_blocksize == 0) {
122 /* Check for non-padded data */
123 if ((crp->crp_payload_length % exf->blocksize) != 0)
126 blks = exf->blocksize;
128 blks = exf->native_blocksize;
130 if (exf == &enc_xform_aes_icm &&
131 (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
134 if (crp->crp_cipher_key != NULL) {
135 csp = crypto_get_params(crp->crp_session);
136 error = exf->setkey(sw->sw_kschedule,
137 crp->crp_cipher_key, csp->csp_cipher_klen);
142 crypto_read_iv(crp, iv);
146 * xforms that provide a reinit method perform all IV
147 * handling themselves.
149 exf->reinit(sw->sw_kschedule, iv);
154 crypto_cursor_init(&cc_in, &crp->crp_buf);
155 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
156 inlen = crypto_cursor_seglen(&cc_in);
157 inblk = crypto_cursor_segbase(&cc_in);
158 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
159 crypto_cursor_init(&cc_out, &crp->crp_obuf);
160 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
163 outlen = crypto_cursor_seglen(&cc_out);
164 outblk = crypto_cursor_segbase(&cc_out);
166 resid = crp->crp_payload_length;
167 encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
170 * Loop through encrypting blocks. 'inlen' is the remaining
171 * length of the current segment in the input buffer.
172 * 'outlen' is the remaining length of current segment in the
175 while (resid >= blks) {
177 * If the current block is not contained within the
178 * current input/output segment, use 'blk' as a local
182 crypto_cursor_copydata(&cc_in, blks, blk);
189 * Ciphers without a 'reinit' hook are assumed to be
190 * used in CBC mode where the chaining is done here.
192 if (exf->reinit != NULL) {
194 exf->encrypt(sw->sw_kschedule, inblk, outblk);
196 exf->decrypt(sw->sw_kschedule, inblk, outblk);
197 } else if (encrypting) {
198 /* XOR with previous block */
199 for (i = 0; i < blks; i++)
200 outblk[i] = inblk[i] ^ ivp[i];
202 exf->encrypt(sw->sw_kschedule, outblk, outblk);
205 * Keep encrypted block for XOR'ing
208 memcpy(iv, outblk, blks);
210 } else { /* decrypt */
212 * Keep encrypted block for XOR'ing
215 nivp = (ivp == iv) ? iv2 : iv;
216 memcpy(nivp, inblk, blks);
218 exf->decrypt(sw->sw_kschedule, inblk, outblk);
220 /* XOR with previous block */
221 for (i = 0; i < blks; i++)
228 inlen = crypto_cursor_seglen(&cc_in);
229 inblk = crypto_cursor_segbase(&cc_in);
231 crypto_cursor_advance(&cc_in, blks);
237 crypto_cursor_copyback(&cc_out, blks, blk);
238 outlen = crypto_cursor_seglen(&cc_out);
239 outblk = crypto_cursor_segbase(&cc_out);
241 crypto_cursor_advance(&cc_out, blks);
249 /* Handle trailing partial block for stream ciphers. */
251 KASSERT(exf->native_blocksize != 0,
252 ("%s: partial block of %d bytes for cipher %s",
253 __func__, i, exf->name));
254 KASSERT(exf->reinit != NULL,
255 ("%s: partial block cipher %s without reinit hook",
256 __func__, exf->name));
257 KASSERT(resid < blks, ("%s: partial block too big", __func__));
259 inlen = crypto_cursor_seglen(&cc_in);
260 outlen = crypto_cursor_seglen(&cc_out);
262 crypto_cursor_copydata(&cc_in, resid, blk);
265 inblk = crypto_cursor_segbase(&cc_in);
269 outblk = crypto_cursor_segbase(&cc_out);
271 exf->encrypt_last(sw->sw_kschedule, inblk, outblk,
274 exf->decrypt_last(sw->sw_kschedule, inblk, outblk,
277 crypto_cursor_copyback(&cc_out, resid, blk);
280 explicit_bzero(blk, sizeof(blk));
281 explicit_bzero(iv, sizeof(iv));
282 explicit_bzero(iv2, sizeof(iv2));
287 swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw,
288 const uint8_t *key, int klen)
292 case CRYPTO_SHA1_HMAC:
293 case CRYPTO_SHA2_224_HMAC:
294 case CRYPTO_SHA2_256_HMAC:
295 case CRYPTO_SHA2_384_HMAC:
296 case CRYPTO_SHA2_512_HMAC:
297 case CRYPTO_NULL_HMAC:
298 case CRYPTO_RIPEMD160_HMAC:
299 hmac_init_ipad(axf, key, klen, sw->sw_ictx);
300 hmac_init_opad(axf, key, klen, sw->sw_octx);
302 case CRYPTO_POLY1305:
305 axf->Setkey(sw->sw_ictx, key, klen);
306 axf->Init(sw->sw_ictx);
309 panic("%s: algorithm %d doesn't use keys", __func__, axf->type);
314 * Compute or verify hash.
317 swcr_authcompute(struct swcr_session *ses, struct cryptop *crp)
319 u_char aalg[HASH_MAX_LEN];
320 const struct crypto_session_params *csp;
321 struct swcr_auth *sw;
322 struct auth_hash *axf;
326 sw = &ses->swcr_auth;
330 if (crp->crp_auth_key != NULL) {
331 csp = crypto_get_params(crp->crp_session);
332 swcr_authprepare(axf, sw, crp->crp_auth_key,
336 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
338 if (crp->crp_aad != NULL)
339 err = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
341 err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
346 if (CRYPTO_HAS_OUTPUT_BUFFER(crp) &&
347 CRYPTO_OP_IS_ENCRYPT(crp->crp_op))
348 err = crypto_apply_buf(&crp->crp_obuf,
349 crp->crp_payload_output_start, crp->crp_payload_length,
352 err = crypto_apply(crp, crp->crp_payload_start,
353 crp->crp_payload_length, axf->Update, &ctx);
359 case CRYPTO_SHA2_224:
360 case CRYPTO_SHA2_256:
361 case CRYPTO_SHA2_384:
362 case CRYPTO_SHA2_512:
363 axf->Final(aalg, &ctx);
366 case CRYPTO_SHA1_HMAC:
367 case CRYPTO_SHA2_224_HMAC:
368 case CRYPTO_SHA2_256_HMAC:
369 case CRYPTO_SHA2_384_HMAC:
370 case CRYPTO_SHA2_512_HMAC:
371 case CRYPTO_RIPEMD160_HMAC:
372 if (sw->sw_octx == NULL)
375 axf->Final(aalg, &ctx);
376 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
377 axf->Update(&ctx, aalg, axf->hashsize);
378 axf->Final(aalg, &ctx);
383 case CRYPTO_NULL_HMAC:
384 case CRYPTO_POLY1305:
385 axf->Final(aalg, &ctx);
389 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
390 u_char uaalg[HASH_MAX_LEN];
392 crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg);
393 if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0)
395 explicit_bzero(uaalg, sizeof(uaalg));
397 /* Inject the authentication data */
398 crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg);
400 explicit_bzero(aalg, sizeof(aalg));
404 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
405 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
408 swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
410 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
411 u_char *blk = (u_char *)blkbuf;
412 u_char tag[GMAC_DIGEST_LEN];
413 u_char iv[AES_BLOCK_LEN];
414 struct crypto_buffer_cursor cc;
417 struct swcr_auth *swa;
418 struct auth_hash *axf;
420 int blksz, error, ivlen, len, resid;
422 swa = &ses->swcr_auth;
425 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
426 blksz = GMAC_BLOCK_LEN;
427 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
430 /* Initialize the IV */
431 ivlen = AES_GCM_IV_LEN;
432 crypto_read_iv(crp, iv);
434 axf->Reinit(&ctx, iv, ivlen);
435 crypto_cursor_init(&cc, &crp->crp_buf);
436 crypto_cursor_advance(&cc, crp->crp_payload_start);
437 for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) {
438 len = crypto_cursor_seglen(&cc);
440 inblk = crypto_cursor_segbase(&cc);
441 len = rounddown(MIN(len, resid), blksz);
442 crypto_cursor_advance(&cc, len);
445 crypto_cursor_copydata(&cc, len, blk);
448 axf->Update(&ctx, inblk, len);
451 memset(blk, 0, blksz);
452 crypto_cursor_copydata(&cc, resid, blk);
453 axf->Update(&ctx, blk, blksz);
457 memset(blk, 0, blksz);
458 blkp = (uint32_t *)blk + 1;
459 *blkp = htobe32(crp->crp_payload_length * 8);
460 axf->Update(&ctx, blk, blksz);
463 axf->Final(tag, &ctx);
466 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
467 u_char tag2[GMAC_DIGEST_LEN];
469 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
471 if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0)
473 explicit_bzero(tag2, sizeof(tag2));
475 /* Inject the authentication data */
476 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
478 explicit_bzero(blkbuf, sizeof(blkbuf));
479 explicit_bzero(tag, sizeof(tag));
480 explicit_bzero(iv, sizeof(iv));
485 swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
487 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
488 u_char *blk = (u_char *)blkbuf;
489 u_char tag[GMAC_DIGEST_LEN];
490 u_char iv[AES_BLOCK_LEN];
491 struct crypto_buffer_cursor cc_in, cc_out;
495 struct swcr_auth *swa;
496 struct swcr_encdec *swe;
497 struct auth_hash *axf;
498 struct enc_xform *exf;
500 int blksz, error, ivlen, len, r, resid;
502 swa = &ses->swcr_auth;
505 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
506 blksz = GMAC_BLOCK_LEN;
507 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
510 swe = &ses->swcr_encdec;
512 KASSERT(axf->blocksize == exf->native_blocksize,
513 ("%s: blocksize mismatch", __func__));
515 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
518 /* Initialize the IV */
519 ivlen = AES_GCM_IV_LEN;
520 bcopy(crp->crp_iv, iv, ivlen);
522 /* Supply MAC with IV */
523 axf->Reinit(&ctx, iv, ivlen);
525 /* Supply MAC with AAD */
526 if (crp->crp_aad != NULL) {
527 len = rounddown(crp->crp_aad_length, blksz);
529 axf->Update(&ctx, crp->crp_aad, len);
530 if (crp->crp_aad_length != len) {
531 memset(blk, 0, blksz);
532 memcpy(blk, (char *)crp->crp_aad + len,
533 crp->crp_aad_length - len);
534 axf->Update(&ctx, blk, blksz);
537 crypto_cursor_init(&cc_in, &crp->crp_buf);
538 crypto_cursor_advance(&cc_in, crp->crp_aad_start);
539 for (resid = crp->crp_aad_length; resid >= blksz;
541 len = crypto_cursor_seglen(&cc_in);
543 inblk = crypto_cursor_segbase(&cc_in);
544 len = rounddown(MIN(len, resid), blksz);
545 crypto_cursor_advance(&cc_in, len);
548 crypto_cursor_copydata(&cc_in, len, blk);
551 axf->Update(&ctx, inblk, len);
554 memset(blk, 0, blksz);
555 crypto_cursor_copydata(&cc_in, resid, blk);
556 axf->Update(&ctx, blk, blksz);
560 exf->reinit(swe->sw_kschedule, iv);
562 /* Do encryption with MAC */
563 crypto_cursor_init(&cc_in, &crp->crp_buf);
564 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
565 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
566 crypto_cursor_init(&cc_out, &crp->crp_obuf);
567 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
570 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
571 if (crypto_cursor_seglen(&cc_in) < blksz) {
572 crypto_cursor_copydata(&cc_in, blksz, blk);
575 inblk = crypto_cursor_segbase(&cc_in);
576 crypto_cursor_advance(&cc_in, blksz);
578 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
579 if (crypto_cursor_seglen(&cc_out) < blksz)
582 outblk = crypto_cursor_segbase(&cc_out);
583 exf->encrypt(swe->sw_kschedule, inblk, outblk);
584 axf->Update(&ctx, outblk, blksz);
586 crypto_cursor_copyback(&cc_out, blksz, blk);
588 crypto_cursor_advance(&cc_out, blksz);
590 axf->Update(&ctx, inblk, blksz);
594 crypto_cursor_copydata(&cc_in, resid, blk);
595 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
596 exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
597 crypto_cursor_copyback(&cc_out, resid, blk);
599 axf->Update(&ctx, blk, resid);
603 memset(blk, 0, blksz);
604 blkp = (uint32_t *)blk + 1;
605 *blkp = htobe32(crp->crp_aad_length * 8);
606 blkp = (uint32_t *)blk + 3;
607 *blkp = htobe32(crp->crp_payload_length * 8);
608 axf->Update(&ctx, blk, blksz);
611 axf->Final(tag, &ctx);
615 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
616 u_char tag2[GMAC_DIGEST_LEN];
618 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, tag2);
620 r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
621 explicit_bzero(tag2, sizeof(tag2));
627 /* tag matches, decrypt data */
628 crypto_cursor_init(&cc_in, &crp->crp_buf);
629 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
630 for (resid = crp->crp_payload_length; resid > blksz;
632 if (crypto_cursor_seglen(&cc_in) < blksz) {
633 crypto_cursor_copydata(&cc_in, blksz, blk);
636 inblk = crypto_cursor_segbase(&cc_in);
637 crypto_cursor_advance(&cc_in, blksz);
639 if (crypto_cursor_seglen(&cc_out) < blksz)
642 outblk = crypto_cursor_segbase(&cc_out);
643 exf->decrypt(swe->sw_kschedule, inblk, outblk);
645 crypto_cursor_copyback(&cc_out, blksz, blk);
647 crypto_cursor_advance(&cc_out, blksz);
650 crypto_cursor_copydata(&cc_in, resid, blk);
651 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
652 crypto_cursor_copyback(&cc_out, resid, blk);
655 /* Inject the authentication data */
656 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
660 explicit_bzero(blkbuf, sizeof(blkbuf));
661 explicit_bzero(tag, sizeof(tag));
662 explicit_bzero(iv, sizeof(iv));
668 swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp)
670 u_char tag[AES_CBC_MAC_HASH_LEN];
671 u_char iv[AES_BLOCK_LEN];
673 struct swcr_auth *swa;
674 struct auth_hash *axf;
677 swa = &ses->swcr_auth;
680 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
682 /* Initialize the IV */
683 ivlen = AES_CCM_IV_LEN;
684 crypto_read_iv(crp, iv);
687 * AES CCM-CBC-MAC needs to know the length of both the auth
688 * data and payload data before doing the auth computation.
690 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length;
691 ctx.aes_cbc_mac_ctx.cryptDataLength = 0;
693 axf->Reinit(&ctx, iv, ivlen);
694 if (crp->crp_aad != NULL)
695 error = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
697 error = crypto_apply(crp, crp->crp_payload_start,
698 crp->crp_payload_length, axf->Update, &ctx);
703 axf->Final(tag, &ctx);
705 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
706 u_char tag2[AES_CBC_MAC_HASH_LEN];
708 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
710 if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0)
712 explicit_bzero(tag2, sizeof(tag));
714 /* Inject the authentication data */
715 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
717 explicit_bzero(tag, sizeof(tag));
718 explicit_bzero(iv, sizeof(iv));
723 swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
725 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
726 u_char *blk = (u_char *)blkbuf;
727 u_char tag[AES_CBC_MAC_HASH_LEN];
728 u_char iv[AES_BLOCK_LEN];
729 struct crypto_buffer_cursor cc_in, cc_out;
733 struct swcr_auth *swa;
734 struct swcr_encdec *swe;
735 struct auth_hash *axf;
736 struct enc_xform *exf;
737 int blksz, error, ivlen, r, resid;
739 swa = &ses->swcr_auth;
742 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
743 blksz = AES_BLOCK_LEN;
744 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
747 swe = &ses->swcr_encdec;
749 KASSERT(axf->blocksize == exf->native_blocksize,
750 ("%s: blocksize mismatch", __func__));
752 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
755 /* Initialize the IV */
756 ivlen = AES_CCM_IV_LEN;
757 bcopy(crp->crp_iv, iv, ivlen);
760 * AES CCM-CBC-MAC needs to know the length of both the auth
761 * data and payload data before doing the auth computation.
763 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length;
764 ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length;
766 /* Supply MAC with IV */
767 axf->Reinit(&ctx, iv, ivlen);
769 /* Supply MAC with AAD */
770 if (crp->crp_aad != NULL)
771 error = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
773 error = crypto_apply(crp, crp->crp_aad_start,
774 crp->crp_aad_length, axf->Update, &ctx);
778 exf->reinit(swe->sw_kschedule, iv);
780 /* Do encryption/decryption with MAC */
781 crypto_cursor_init(&cc_in, &crp->crp_buf);
782 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
783 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
784 crypto_cursor_init(&cc_out, &crp->crp_obuf);
785 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
788 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
789 if (crypto_cursor_seglen(&cc_in) < blksz) {
790 crypto_cursor_copydata(&cc_in, blksz, blk);
793 inblk = crypto_cursor_segbase(&cc_in);
794 crypto_cursor_advance(&cc_in, blksz);
796 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
797 if (crypto_cursor_seglen(&cc_out) < blksz)
800 outblk = crypto_cursor_segbase(&cc_out);
801 axf->Update(&ctx, inblk, blksz);
802 exf->encrypt(swe->sw_kschedule, inblk, outblk);
804 crypto_cursor_copyback(&cc_out, blksz, blk);
806 crypto_cursor_advance(&cc_out, blksz);
809 * One of the problems with CCM+CBC is that
810 * the authentication is done on the
811 * unencrypted data. As a result, we have to
812 * decrypt the data twice: once to generate
813 * the tag and a second time after the tag is
816 exf->decrypt(swe->sw_kschedule, inblk, blk);
817 axf->Update(&ctx, blk, blksz);
821 crypto_cursor_copydata(&cc_in, resid, blk);
822 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
823 axf->Update(&ctx, blk, resid);
824 exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
825 crypto_cursor_copyback(&cc_out, resid, blk);
827 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
828 axf->Update(&ctx, blk, resid);
833 axf->Final(tag, &ctx);
837 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
838 u_char tag2[AES_CBC_MAC_HASH_LEN];
840 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
843 r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
844 explicit_bzero(tag2, sizeof(tag2));
850 /* tag matches, decrypt data */
851 exf->reinit(swe->sw_kschedule, iv);
852 crypto_cursor_init(&cc_in, &crp->crp_buf);
853 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
854 for (resid = crp->crp_payload_length; resid > blksz;
856 if (crypto_cursor_seglen(&cc_in) < blksz) {
857 crypto_cursor_copydata(&cc_in, blksz, blk);
860 inblk = crypto_cursor_segbase(&cc_in);
861 crypto_cursor_advance(&cc_in, blksz);
863 if (crypto_cursor_seglen(&cc_out) < blksz)
866 outblk = crypto_cursor_segbase(&cc_out);
867 exf->decrypt(swe->sw_kschedule, inblk, outblk);
869 crypto_cursor_copyback(&cc_out, blksz, blk);
871 crypto_cursor_advance(&cc_out, blksz);
874 crypto_cursor_copydata(&cc_in, resid, blk);
875 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
876 crypto_cursor_copyback(&cc_out, resid, blk);
879 /* Inject the authentication data */
880 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
884 explicit_bzero(blkbuf, sizeof(blkbuf));
885 explicit_bzero(tag, sizeof(tag));
886 explicit_bzero(iv, sizeof(iv));
891 * Apply a cipher and a digest to perform EtA.
894 swcr_eta(struct swcr_session *ses, struct cryptop *crp)
898 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
899 error = swcr_encdec(ses, crp);
901 error = swcr_authcompute(ses, crp);
903 error = swcr_authcompute(ses, crp);
905 error = swcr_encdec(ses, crp);
911 * Apply a compression/decompression algorithm
914 swcr_compdec(struct swcr_session *ses, struct cryptop *crp)
916 u_int8_t *data, *out;
917 struct comp_algo *cxf;
921 cxf = ses->swcr_compdec.sw_cxf;
923 /* We must handle the whole buffer of data in one time
924 * then if there is not all the data in the mbuf, we must
928 data = malloc(crp->crp_payload_length, M_CRYPTO_DATA, M_NOWAIT);
931 crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
934 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
935 result = cxf->compress(data, crp->crp_payload_length, &out);
937 result = cxf->decompress(data, crp->crp_payload_length, &out);
939 free(data, M_CRYPTO_DATA);
942 crp->crp_olen = result;
944 /* Check the compressed size when doing compression */
945 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
946 if (result >= crp->crp_payload_length) {
947 /* Compression was useless, we lost time */
948 free(out, M_CRYPTO_DATA);
953 /* Copy back the (de)compressed data. m_copyback is
954 * extending the mbuf as necessary.
956 crypto_copyback(crp, crp->crp_payload_start, result, out);
957 if (result < crp->crp_payload_length) {
958 switch (crp->crp_buf.cb_type) {
959 case CRYPTO_BUF_MBUF:
960 adj = result - crp->crp_payload_length;
961 m_adj(crp->crp_buf.cb_mbuf, adj);
963 case CRYPTO_BUF_UIO: {
964 struct uio *uio = crp->crp_buf.cb_uio;
967 adj = crp->crp_payload_length - result;
968 ind = uio->uio_iovcnt - 1;
970 while (adj > 0 && ind >= 0) {
971 if (adj < uio->uio_iov[ind].iov_len) {
972 uio->uio_iov[ind].iov_len -= adj;
976 adj -= uio->uio_iov[ind].iov_len;
977 uio->uio_iov[ind].iov_len = 0;
987 free(out, M_CRYPTO_DATA);
992 swcr_setup_cipher(struct swcr_session *ses,
993 const struct crypto_session_params *csp)
995 struct swcr_encdec *swe;
996 struct enc_xform *txf;
999 swe = &ses->swcr_encdec;
1000 txf = crypto_cipher(csp);
1001 MPASS(txf->ivsize == csp->csp_ivlen);
1002 if (txf->ctxsize != 0) {
1003 swe->sw_kschedule = malloc(txf->ctxsize, M_CRYPTO_DATA,
1005 if (swe->sw_kschedule == NULL)
1008 if (csp->csp_cipher_key != NULL) {
1009 error = txf->setkey(swe->sw_kschedule,
1010 csp->csp_cipher_key, csp->csp_cipher_klen);
1019 swcr_setup_auth(struct swcr_session *ses,
1020 const struct crypto_session_params *csp)
1022 struct swcr_auth *swa;
1023 struct auth_hash *axf;
1025 swa = &ses->swcr_auth;
1027 axf = crypto_auth_hash(csp);
1029 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1031 if (csp->csp_auth_mlen == 0)
1032 swa->sw_mlen = axf->hashsize;
1034 swa->sw_mlen = csp->csp_auth_mlen;
1035 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1036 if (swa->sw_ictx == NULL)
1039 switch (csp->csp_auth_alg) {
1040 case CRYPTO_SHA1_HMAC:
1041 case CRYPTO_SHA2_224_HMAC:
1042 case CRYPTO_SHA2_256_HMAC:
1043 case CRYPTO_SHA2_384_HMAC:
1044 case CRYPTO_SHA2_512_HMAC:
1045 case CRYPTO_NULL_HMAC:
1046 case CRYPTO_RIPEMD160_HMAC:
1047 swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1049 if (swa->sw_octx == NULL)
1052 if (csp->csp_auth_key != NULL) {
1053 swcr_authprepare(axf, swa, csp->csp_auth_key,
1054 csp->csp_auth_klen);
1057 if (csp->csp_mode == CSP_MODE_DIGEST)
1058 ses->swcr_process = swcr_authcompute;
1061 case CRYPTO_SHA2_224:
1062 case CRYPTO_SHA2_256:
1063 case CRYPTO_SHA2_384:
1064 case CRYPTO_SHA2_512:
1065 axf->Init(swa->sw_ictx);
1066 if (csp->csp_mode == CSP_MODE_DIGEST)
1067 ses->swcr_process = swcr_authcompute;
1069 case CRYPTO_AES_NIST_GMAC:
1070 axf->Init(swa->sw_ictx);
1071 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1072 csp->csp_auth_klen);
1073 if (csp->csp_mode == CSP_MODE_DIGEST)
1074 ses->swcr_process = swcr_gmac;
1076 case CRYPTO_POLY1305:
1077 case CRYPTO_BLAKE2B:
1078 case CRYPTO_BLAKE2S:
1080 * Blake2b and Blake2s support an optional key but do
1083 if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL)
1084 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1085 csp->csp_auth_klen);
1086 axf->Init(swa->sw_ictx);
1087 if (csp->csp_mode == CSP_MODE_DIGEST)
1088 ses->swcr_process = swcr_authcompute;
1090 case CRYPTO_AES_CCM_CBC_MAC:
1091 axf->Init(swa->sw_ictx);
1092 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1093 csp->csp_auth_klen);
1094 if (csp->csp_mode == CSP_MODE_DIGEST)
1095 ses->swcr_process = swcr_ccm_cbc_mac;
1103 swcr_setup_gcm(struct swcr_session *ses,
1104 const struct crypto_session_params *csp)
1106 struct swcr_auth *swa;
1107 struct auth_hash *axf;
1109 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1112 /* First, setup the auth side. */
1113 swa = &ses->swcr_auth;
1114 switch (csp->csp_cipher_klen * 8) {
1116 axf = &auth_hash_nist_gmac_aes_128;
1119 axf = &auth_hash_nist_gmac_aes_192;
1122 axf = &auth_hash_nist_gmac_aes_256;
1128 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1130 if (csp->csp_auth_mlen == 0)
1131 swa->sw_mlen = axf->hashsize;
1133 swa->sw_mlen = csp->csp_auth_mlen;
1134 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1135 if (swa->sw_ictx == NULL)
1137 axf->Init(swa->sw_ictx);
1138 if (csp->csp_cipher_key != NULL)
1139 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1140 csp->csp_cipher_klen);
1142 /* Second, setup the cipher side. */
1143 return (swcr_setup_cipher(ses, csp));
1147 swcr_setup_ccm(struct swcr_session *ses,
1148 const struct crypto_session_params *csp)
1150 struct swcr_auth *swa;
1151 struct auth_hash *axf;
1153 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1156 /* First, setup the auth side. */
1157 swa = &ses->swcr_auth;
1158 switch (csp->csp_cipher_klen * 8) {
1160 axf = &auth_hash_ccm_cbc_mac_128;
1163 axf = &auth_hash_ccm_cbc_mac_192;
1166 axf = &auth_hash_ccm_cbc_mac_256;
1172 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1174 if (csp->csp_auth_mlen == 0)
1175 swa->sw_mlen = axf->hashsize;
1177 swa->sw_mlen = csp->csp_auth_mlen;
1178 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1179 if (swa->sw_ictx == NULL)
1181 axf->Init(swa->sw_ictx);
1182 if (csp->csp_cipher_key != NULL)
1183 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1184 csp->csp_cipher_klen);
1186 /* Second, setup the cipher side. */
1187 return (swcr_setup_cipher(ses, csp));
1191 swcr_auth_supported(const struct crypto_session_params *csp)
1193 struct auth_hash *axf;
1195 axf = crypto_auth_hash(csp);
1198 switch (csp->csp_auth_alg) {
1199 case CRYPTO_SHA1_HMAC:
1200 case CRYPTO_SHA2_224_HMAC:
1201 case CRYPTO_SHA2_256_HMAC:
1202 case CRYPTO_SHA2_384_HMAC:
1203 case CRYPTO_SHA2_512_HMAC:
1204 case CRYPTO_NULL_HMAC:
1205 case CRYPTO_RIPEMD160_HMAC:
1207 case CRYPTO_AES_NIST_GMAC:
1208 switch (csp->csp_auth_klen * 8) {
1216 if (csp->csp_auth_key == NULL)
1218 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1221 case CRYPTO_POLY1305:
1222 if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1225 case CRYPTO_AES_CCM_CBC_MAC:
1226 switch (csp->csp_auth_klen * 8) {
1234 if (csp->csp_auth_key == NULL)
1236 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1244 swcr_cipher_supported(const struct crypto_session_params *csp)
1246 struct enc_xform *txf;
1248 txf = crypto_cipher(csp);
1251 if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1252 txf->ivsize != csp->csp_ivlen)
1258 swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1261 if ((csp->csp_flags & ~(CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD)) !=
1264 switch (csp->csp_mode) {
1265 case CSP_MODE_COMPRESS:
1266 switch (csp->csp_cipher_alg) {
1267 case CRYPTO_DEFLATE_COMP:
1273 case CSP_MODE_CIPHER:
1274 switch (csp->csp_cipher_alg) {
1275 case CRYPTO_AES_NIST_GCM_16:
1276 case CRYPTO_AES_CCM_16:
1279 if (!swcr_cipher_supported(csp))
1284 case CSP_MODE_DIGEST:
1285 if (!swcr_auth_supported(csp))
1289 switch (csp->csp_cipher_alg) {
1290 case CRYPTO_AES_NIST_GCM_16:
1291 case CRYPTO_AES_CCM_16:
1298 /* AEAD algorithms cannot be used for EtA. */
1299 switch (csp->csp_cipher_alg) {
1300 case CRYPTO_AES_NIST_GCM_16:
1301 case CRYPTO_AES_CCM_16:
1304 switch (csp->csp_auth_alg) {
1305 case CRYPTO_AES_NIST_GMAC:
1306 case CRYPTO_AES_CCM_CBC_MAC:
1310 if (!swcr_cipher_supported(csp) ||
1311 !swcr_auth_supported(csp))
1318 return (CRYPTODEV_PROBE_SOFTWARE);
1322 * Generate a new software session.
1325 swcr_newsession(device_t dev, crypto_session_t cses,
1326 const struct crypto_session_params *csp)
1328 struct swcr_session *ses;
1329 struct swcr_encdec *swe;
1330 struct swcr_auth *swa;
1331 struct comp_algo *cxf;
1334 ses = crypto_get_driver_session(cses);
1335 mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
1338 swe = &ses->swcr_encdec;
1339 swa = &ses->swcr_auth;
1340 switch (csp->csp_mode) {
1341 case CSP_MODE_COMPRESS:
1342 switch (csp->csp_cipher_alg) {
1343 case CRYPTO_DEFLATE_COMP:
1344 cxf = &comp_algo_deflate;
1348 panic("bad compression algo");
1351 ses->swcr_compdec.sw_cxf = cxf;
1352 ses->swcr_process = swcr_compdec;
1354 case CSP_MODE_CIPHER:
1355 switch (csp->csp_cipher_alg) {
1356 case CRYPTO_NULL_CBC:
1357 ses->swcr_process = swcr_null;
1360 case CRYPTO_AES_NIST_GCM_16:
1361 case CRYPTO_AES_CCM_16:
1362 panic("bad cipher algo");
1365 error = swcr_setup_cipher(ses, csp);
1367 ses->swcr_process = swcr_encdec;
1370 case CSP_MODE_DIGEST:
1371 error = swcr_setup_auth(ses, csp);
1374 switch (csp->csp_cipher_alg) {
1375 case CRYPTO_AES_NIST_GCM_16:
1376 error = swcr_setup_gcm(ses, csp);
1378 ses->swcr_process = swcr_gcm;
1380 case CRYPTO_AES_CCM_16:
1381 error = swcr_setup_ccm(ses, csp);
1383 ses->swcr_process = swcr_ccm;
1387 panic("bad aead algo");
1393 switch (csp->csp_cipher_alg) {
1394 case CRYPTO_AES_NIST_GCM_16:
1395 case CRYPTO_AES_CCM_16:
1396 panic("bad eta cipher algo");
1398 switch (csp->csp_auth_alg) {
1399 case CRYPTO_AES_NIST_GMAC:
1400 case CRYPTO_AES_CCM_CBC_MAC:
1401 panic("bad eta auth algo");
1405 error = swcr_setup_auth(ses, csp);
1408 if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1409 /* Effectively degrade to digest mode. */
1410 ses->swcr_process = swcr_authcompute;
1414 error = swcr_setup_cipher(ses, csp);
1416 ses->swcr_process = swcr_eta;
1423 swcr_freesession(dev, cses);
1428 swcr_freesession(device_t dev, crypto_session_t cses)
1430 struct swcr_session *ses;
1432 ses = crypto_get_driver_session(cses);
1434 mtx_destroy(&ses->swcr_lock);
1436 zfree(ses->swcr_encdec.sw_kschedule, M_CRYPTO_DATA);
1437 zfree(ses->swcr_auth.sw_ictx, M_CRYPTO_DATA);
1438 zfree(ses->swcr_auth.sw_octx, M_CRYPTO_DATA);
1442 * Process a software request.
1445 swcr_process(device_t dev, struct cryptop *crp, int hint)
1447 struct swcr_session *ses;
1449 ses = crypto_get_driver_session(crp->crp_session);
1450 mtx_lock(&ses->swcr_lock);
1452 crp->crp_etype = ses->swcr_process(ses, crp);
1454 mtx_unlock(&ses->swcr_lock);
1460 swcr_identify(driver_t *drv, device_t parent)
1462 /* NB: order 10 is so we get attached after h/w devices */
1463 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1464 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1465 panic("cryptosoft: could not attach");
1469 swcr_probe(device_t dev)
1471 device_set_desc(dev, "software crypto");
1472 return (BUS_PROBE_NOWILDCARD);
1476 swcr_attach(device_t dev)
1479 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1480 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1482 device_printf(dev, "cannot initialize!");
1490 swcr_detach(device_t dev)
1492 crypto_unregister_all(swcr_id);
1496 static device_method_t swcr_methods[] = {
1497 DEVMETHOD(device_identify, swcr_identify),
1498 DEVMETHOD(device_probe, swcr_probe),
1499 DEVMETHOD(device_attach, swcr_attach),
1500 DEVMETHOD(device_detach, swcr_detach),
1502 DEVMETHOD(cryptodev_probesession, swcr_probesession),
1503 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1504 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1505 DEVMETHOD(cryptodev_process, swcr_process),
1510 static driver_t swcr_driver = {
1513 0, /* NB: no softc */
1515 static devclass_t swcr_devclass;
1518 * NB: We explicitly reference the crypto module so we
1519 * get the necessary ordering when built as a loadable
1520 * module. This is required because we bundle the crypto
1521 * module code together with the cryptosoft driver (otherwise
1522 * normal module dependencies would handle things).
1524 extern int crypto_modevent(struct module *, int, void *);
1525 /* XXX where to attach */
1526 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1527 MODULE_VERSION(cryptosoft, 1);
1528 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);