1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Permission to use, copy, and modify this software with or without fee
20 * is hereby granted, provided that this entire notice is included in
21 * all source code copies of any software which is or includes a copy or
22 * modification of this software.
24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD$");
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
48 #include <sys/mutex.h>
50 #include <crypto/sha1.h>
51 #include <opencrypto/rmd160.h>
53 #include <opencrypto/cryptodev.h>
54 #include <opencrypto/xform.h>
58 #include "cryptodev_if.h"
63 struct auth_hash *sw_axf;
69 struct enc_xform *sw_exf;
73 struct comp_algo *sw_cxf;
78 int (*swcr_process)(struct swcr_session *, struct cryptop *);
80 struct swcr_auth swcr_auth;
81 struct swcr_encdec swcr_encdec;
82 struct swcr_compdec swcr_compdec;
85 static int32_t swcr_id;
87 static void swcr_freesession(device_t dev, crypto_session_t cses);
89 /* Used for CRYPTO_NULL_CBC. */
91 swcr_null(struct swcr_session *ses, struct cryptop *crp)
98 * Apply a symmetric encryption/decryption algorithm.
101 swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
103 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
104 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
105 const struct crypto_session_params *csp;
106 struct swcr_encdec *sw;
107 struct enc_xform *exf;
108 int i, blks, inlen, ivlen, outlen, resid;
109 struct crypto_buffer_cursor cc_in, cc_out;
110 const unsigned char *inblk;
111 unsigned char *outblk;
117 sw = &ses->swcr_encdec;
121 if (exf->native_blocksize == 0) {
122 /* Check for non-padded data */
123 if ((crp->crp_payload_length % exf->blocksize) != 0)
126 blks = exf->blocksize;
128 blks = exf->native_blocksize;
130 if (exf == &enc_xform_aes_icm &&
131 (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
134 if (crp->crp_cipher_key != NULL) {
135 csp = crypto_get_params(crp->crp_session);
136 error = exf->setkey(sw->sw_kschedule,
137 crp->crp_cipher_key, csp->csp_cipher_klen);
142 crypto_read_iv(crp, iv);
146 * xforms that provide a reinit method perform all IV
147 * handling themselves.
149 exf->reinit(sw->sw_kschedule, iv);
154 crypto_cursor_init(&cc_in, &crp->crp_buf);
155 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
156 inlen = crypto_cursor_seglen(&cc_in);
157 inblk = crypto_cursor_segbase(&cc_in);
158 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
159 crypto_cursor_init(&cc_out, &crp->crp_obuf);
160 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
163 outlen = crypto_cursor_seglen(&cc_out);
164 outblk = crypto_cursor_segbase(&cc_out);
166 resid = crp->crp_payload_length;
167 encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
170 * Loop through encrypting blocks. 'inlen' is the remaining
171 * length of the current segment in the input buffer.
172 * 'outlen' is the remaining length of current segment in the
175 while (resid >= blks) {
177 * If the current block is not contained within the
178 * current input/output segment, use 'blk' as a local
182 crypto_cursor_copydata(&cc_in, blks, blk);
189 * Ciphers without a 'reinit' hook are assumed to be
190 * used in CBC mode where the chaining is done here.
192 if (exf->reinit != NULL) {
194 exf->encrypt(sw->sw_kschedule, inblk, outblk);
196 exf->decrypt(sw->sw_kschedule, inblk, outblk);
197 } else if (encrypting) {
198 /* XOR with previous block */
199 for (i = 0; i < blks; i++)
200 outblk[i] = inblk[i] ^ ivp[i];
202 exf->encrypt(sw->sw_kschedule, outblk, outblk);
205 * Keep encrypted block for XOR'ing
208 memcpy(iv, outblk, blks);
210 } else { /* decrypt */
212 * Keep encrypted block for XOR'ing
215 nivp = (ivp == iv) ? iv2 : iv;
216 memcpy(nivp, inblk, blks);
218 exf->decrypt(sw->sw_kschedule, inblk, outblk);
220 /* XOR with previous block */
221 for (i = 0; i < blks; i++)
228 inlen = crypto_cursor_seglen(&cc_in);
229 inblk = crypto_cursor_segbase(&cc_in);
231 crypto_cursor_advance(&cc_in, blks);
237 crypto_cursor_copyback(&cc_out, blks, blk);
238 outlen = crypto_cursor_seglen(&cc_out);
239 outblk = crypto_cursor_segbase(&cc_out);
241 crypto_cursor_advance(&cc_out, blks);
249 /* Handle trailing partial block for stream ciphers. */
251 KASSERT(exf->native_blocksize != 0,
252 ("%s: partial block of %d bytes for cipher %s",
253 __func__, i, exf->name));
254 KASSERT(exf->reinit != NULL,
255 ("%s: partial block cipher %s without reinit hook",
256 __func__, exf->name));
257 KASSERT(resid < blks, ("%s: partial block too big", __func__));
259 inlen = crypto_cursor_seglen(&cc_in);
260 outlen = crypto_cursor_seglen(&cc_out);
262 crypto_cursor_copydata(&cc_in, resid, blk);
265 inblk = crypto_cursor_segbase(&cc_in);
269 outblk = crypto_cursor_segbase(&cc_out);
271 exf->encrypt_last(sw->sw_kschedule, inblk, outblk,
274 exf->decrypt_last(sw->sw_kschedule, inblk, outblk,
277 crypto_cursor_copyback(&cc_out, resid, blk);
280 explicit_bzero(blk, sizeof(blk));
281 explicit_bzero(iv, sizeof(iv));
282 explicit_bzero(iv2, sizeof(iv2));
287 swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw,
288 const uint8_t *key, int klen)
292 case CRYPTO_SHA1_HMAC:
293 case CRYPTO_SHA2_224_HMAC:
294 case CRYPTO_SHA2_256_HMAC:
295 case CRYPTO_SHA2_384_HMAC:
296 case CRYPTO_SHA2_512_HMAC:
297 case CRYPTO_NULL_HMAC:
298 case CRYPTO_RIPEMD160_HMAC:
299 hmac_init_ipad(axf, key, klen, sw->sw_ictx);
300 hmac_init_opad(axf, key, klen, sw->sw_octx);
302 case CRYPTO_POLY1305:
305 axf->Setkey(sw->sw_ictx, key, klen);
306 axf->Init(sw->sw_ictx);
309 panic("%s: algorithm %d doesn't use keys", __func__, axf->type);
314 * Compute or verify hash.
317 swcr_authcompute(struct swcr_session *ses, struct cryptop *crp)
319 u_char aalg[HASH_MAX_LEN];
320 const struct crypto_session_params *csp;
321 struct swcr_auth *sw;
322 struct auth_hash *axf;
326 sw = &ses->swcr_auth;
330 if (crp->crp_auth_key != NULL) {
331 csp = crypto_get_params(crp->crp_session);
332 swcr_authprepare(axf, sw, crp->crp_auth_key,
336 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
338 err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
343 if (CRYPTO_HAS_OUTPUT_BUFFER(crp) &&
344 CRYPTO_OP_IS_ENCRYPT(crp->crp_op))
345 err = crypto_apply_buf(&crp->crp_obuf,
346 crp->crp_payload_output_start, crp->crp_payload_length,
349 err = crypto_apply(crp, crp->crp_payload_start,
350 crp->crp_payload_length, axf->Update, &ctx);
356 case CRYPTO_SHA2_224:
357 case CRYPTO_SHA2_256:
358 case CRYPTO_SHA2_384:
359 case CRYPTO_SHA2_512:
360 axf->Final(aalg, &ctx);
363 case CRYPTO_SHA1_HMAC:
364 case CRYPTO_SHA2_224_HMAC:
365 case CRYPTO_SHA2_256_HMAC:
366 case CRYPTO_SHA2_384_HMAC:
367 case CRYPTO_SHA2_512_HMAC:
368 case CRYPTO_RIPEMD160_HMAC:
369 if (sw->sw_octx == NULL)
372 axf->Final(aalg, &ctx);
373 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
374 axf->Update(&ctx, aalg, axf->hashsize);
375 axf->Final(aalg, &ctx);
380 case CRYPTO_NULL_HMAC:
381 case CRYPTO_POLY1305:
382 axf->Final(aalg, &ctx);
386 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
387 u_char uaalg[HASH_MAX_LEN];
389 crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg);
390 if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0)
392 explicit_bzero(uaalg, sizeof(uaalg));
394 /* Inject the authentication data */
395 crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg);
397 explicit_bzero(aalg, sizeof(aalg));
401 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
402 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
405 swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
407 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
408 u_char *blk = (u_char *)blkbuf;
409 u_char tag[GMAC_DIGEST_LEN];
410 u_char iv[AES_BLOCK_LEN];
411 struct crypto_buffer_cursor cc;
414 struct swcr_auth *swa;
415 struct auth_hash *axf;
417 int blksz, error, ivlen, len, resid;
419 swa = &ses->swcr_auth;
422 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
423 blksz = GMAC_BLOCK_LEN;
424 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
427 /* Initialize the IV */
428 ivlen = AES_GCM_IV_LEN;
429 crypto_read_iv(crp, iv);
431 axf->Reinit(&ctx, iv, ivlen);
432 crypto_cursor_init(&cc, &crp->crp_buf);
433 crypto_cursor_advance(&cc, crp->crp_payload_start);
434 for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) {
435 len = crypto_cursor_seglen(&cc);
437 inblk = crypto_cursor_segbase(&cc);
438 len = rounddown(MIN(len, resid), blksz);
439 crypto_cursor_advance(&cc, len);
442 crypto_cursor_copydata(&cc, len, blk);
445 axf->Update(&ctx, inblk, len);
448 memset(blk, 0, blksz);
449 crypto_cursor_copydata(&cc, resid, blk);
450 axf->Update(&ctx, blk, blksz);
454 memset(blk, 0, blksz);
455 blkp = (uint32_t *)blk + 1;
456 *blkp = htobe32(crp->crp_payload_length * 8);
457 axf->Update(&ctx, blk, blksz);
460 axf->Final(tag, &ctx);
463 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
464 u_char tag2[GMAC_DIGEST_LEN];
466 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
468 if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0)
470 explicit_bzero(tag2, sizeof(tag2));
472 /* Inject the authentication data */
473 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
475 explicit_bzero(blkbuf, sizeof(blkbuf));
476 explicit_bzero(tag, sizeof(tag));
477 explicit_bzero(iv, sizeof(iv));
482 swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
484 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
485 u_char *blk = (u_char *)blkbuf;
486 u_char tag[GMAC_DIGEST_LEN];
487 u_char iv[AES_BLOCK_LEN];
488 struct crypto_buffer_cursor cc_in, cc_out;
492 struct swcr_auth *swa;
493 struct swcr_encdec *swe;
494 struct auth_hash *axf;
495 struct enc_xform *exf;
497 int blksz, error, ivlen, len, r, resid;
499 swa = &ses->swcr_auth;
502 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
503 blksz = GMAC_BLOCK_LEN;
504 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
507 swe = &ses->swcr_encdec;
509 KASSERT(axf->blocksize == exf->native_blocksize,
510 ("%s: blocksize mismatch", __func__));
512 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
515 /* Initialize the IV */
516 ivlen = AES_GCM_IV_LEN;
517 bcopy(crp->crp_iv, iv, ivlen);
519 /* Supply MAC with IV */
520 axf->Reinit(&ctx, iv, ivlen);
522 /* Supply MAC with AAD */
523 crypto_cursor_init(&cc_in, &crp->crp_buf);
524 crypto_cursor_advance(&cc_in, crp->crp_aad_start);
525 for (resid = crp->crp_aad_length; resid >= blksz; resid -= len) {
526 len = crypto_cursor_seglen(&cc_in);
528 inblk = crypto_cursor_segbase(&cc_in);
529 len = rounddown(MIN(len, resid), blksz);
530 crypto_cursor_advance(&cc_in, len);
533 crypto_cursor_copydata(&cc_in, len, blk);
536 axf->Update(&ctx, inblk, len);
539 memset(blk, 0, blksz);
540 crypto_cursor_copydata(&cc_in, resid, blk);
541 axf->Update(&ctx, blk, blksz);
544 exf->reinit(swe->sw_kschedule, iv);
546 /* Do encryption with MAC */
547 crypto_cursor_init(&cc_in, &crp->crp_buf);
548 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
549 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
550 crypto_cursor_init(&cc_out, &crp->crp_obuf);
551 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
554 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
555 if (crypto_cursor_seglen(&cc_in) < blksz) {
556 crypto_cursor_copydata(&cc_in, blksz, blk);
559 inblk = crypto_cursor_segbase(&cc_in);
560 crypto_cursor_advance(&cc_in, blksz);
562 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
563 if (crypto_cursor_seglen(&cc_out) < blksz)
566 outblk = crypto_cursor_segbase(&cc_out);
567 exf->encrypt(swe->sw_kschedule, inblk, outblk);
568 axf->Update(&ctx, outblk, blksz);
570 crypto_cursor_copyback(&cc_out, blksz, blk);
572 crypto_cursor_advance(&cc_out, blksz);
574 axf->Update(&ctx, inblk, blksz);
578 crypto_cursor_copydata(&cc_in, resid, blk);
579 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
580 exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
581 crypto_cursor_copyback(&cc_out, resid, blk);
583 axf->Update(&ctx, blk, resid);
587 memset(blk, 0, blksz);
588 blkp = (uint32_t *)blk + 1;
589 *blkp = htobe32(crp->crp_aad_length * 8);
590 blkp = (uint32_t *)blk + 3;
591 *blkp = htobe32(crp->crp_payload_length * 8);
592 axf->Update(&ctx, blk, blksz);
595 axf->Final(tag, &ctx);
599 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
600 u_char tag2[GMAC_DIGEST_LEN];
602 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, tag2);
604 r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
605 explicit_bzero(tag2, sizeof(tag2));
611 /* tag matches, decrypt data */
612 crypto_cursor_init(&cc_in, &crp->crp_buf);
613 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
614 for (resid = crp->crp_payload_length; resid > blksz;
616 if (crypto_cursor_seglen(&cc_in) < blksz) {
617 crypto_cursor_copydata(&cc_in, blksz, blk);
620 inblk = crypto_cursor_segbase(&cc_in);
621 crypto_cursor_advance(&cc_in, blksz);
623 if (crypto_cursor_seglen(&cc_out) < blksz)
626 outblk = crypto_cursor_segbase(&cc_out);
627 exf->decrypt(swe->sw_kschedule, inblk, outblk);
629 crypto_cursor_copyback(&cc_out, blksz, blk);
631 crypto_cursor_advance(&cc_out, blksz);
634 crypto_cursor_copydata(&cc_in, resid, blk);
635 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
636 crypto_cursor_copyback(&cc_out, resid, blk);
639 /* Inject the authentication data */
640 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
644 explicit_bzero(blkbuf, sizeof(blkbuf));
645 explicit_bzero(tag, sizeof(tag));
646 explicit_bzero(iv, sizeof(iv));
652 swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp)
654 u_char tag[AES_CBC_MAC_HASH_LEN];
655 u_char iv[AES_BLOCK_LEN];
657 struct swcr_auth *swa;
658 struct auth_hash *axf;
661 swa = &ses->swcr_auth;
664 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
666 /* Initialize the IV */
667 ivlen = AES_CCM_IV_LEN;
668 crypto_read_iv(crp, iv);
671 * AES CCM-CBC-MAC needs to know the length of both the auth
672 * data and payload data before doing the auth computation.
674 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length;
675 ctx.aes_cbc_mac_ctx.cryptDataLength = 0;
677 axf->Reinit(&ctx, iv, ivlen);
678 error = crypto_apply(crp, crp->crp_payload_start,
679 crp->crp_payload_length, axf->Update, &ctx);
684 axf->Final(tag, &ctx);
686 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
687 u_char tag2[AES_CBC_MAC_HASH_LEN];
689 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
691 if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0)
693 explicit_bzero(tag2, sizeof(tag));
695 /* Inject the authentication data */
696 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
698 explicit_bzero(tag, sizeof(tag));
699 explicit_bzero(iv, sizeof(iv));
704 swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
706 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
707 u_char *blk = (u_char *)blkbuf;
708 u_char tag[AES_CBC_MAC_HASH_LEN];
709 u_char iv[AES_BLOCK_LEN];
710 struct crypto_buffer_cursor cc_in, cc_out;
714 struct swcr_auth *swa;
715 struct swcr_encdec *swe;
716 struct auth_hash *axf;
717 struct enc_xform *exf;
718 int blksz, error, ivlen, r, resid;
720 swa = &ses->swcr_auth;
723 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
724 blksz = AES_BLOCK_LEN;
725 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
728 swe = &ses->swcr_encdec;
730 KASSERT(axf->blocksize == exf->native_blocksize,
731 ("%s: blocksize mismatch", __func__));
733 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
736 /* Initialize the IV */
737 ivlen = AES_CCM_IV_LEN;
738 bcopy(crp->crp_iv, iv, ivlen);
741 * AES CCM-CBC-MAC needs to know the length of both the auth
742 * data and payload data before doing the auth computation.
744 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length;
745 ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length;
747 /* Supply MAC with IV */
748 axf->Reinit(&ctx, iv, ivlen);
750 /* Supply MAC with AAD */
751 error = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
756 exf->reinit(swe->sw_kschedule, iv);
758 /* Do encryption/decryption with MAC */
759 crypto_cursor_init(&cc_in, &crp->crp_buf);
760 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
761 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
762 crypto_cursor_init(&cc_out, &crp->crp_obuf);
763 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
766 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
767 if (crypto_cursor_seglen(&cc_in) < blksz) {
768 crypto_cursor_copydata(&cc_in, blksz, blk);
771 inblk = crypto_cursor_segbase(&cc_in);
772 crypto_cursor_advance(&cc_in, blksz);
774 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
775 if (crypto_cursor_seglen(&cc_out) < blksz)
778 outblk = crypto_cursor_segbase(&cc_out);
779 axf->Update(&ctx, inblk, blksz);
780 exf->encrypt(swe->sw_kschedule, inblk, outblk);
782 crypto_cursor_copyback(&cc_out, blksz, blk);
784 crypto_cursor_advance(&cc_out, blksz);
787 * One of the problems with CCM+CBC is that
788 * the authentication is done on the
789 * unencrypted data. As a result, we have to
790 * decrypt the data twice: once to generate
791 * the tag and a second time after the tag is
794 exf->decrypt(swe->sw_kschedule, inblk, blk);
795 axf->Update(&ctx, blk, blksz);
799 crypto_cursor_copydata(&cc_in, resid, blk);
800 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
801 axf->Update(&ctx, blk, resid);
802 exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
803 crypto_cursor_copyback(&cc_out, resid, blk);
805 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
806 axf->Update(&ctx, blk, resid);
811 axf->Final(tag, &ctx);
815 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
816 u_char tag2[AES_CBC_MAC_HASH_LEN];
818 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
821 r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
822 explicit_bzero(tag2, sizeof(tag2));
828 /* tag matches, decrypt data */
829 exf->reinit(swe->sw_kschedule, iv);
830 crypto_cursor_init(&cc_in, &crp->crp_buf);
831 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
832 for (resid = crp->crp_payload_length; resid > blksz;
834 if (crypto_cursor_seglen(&cc_in) < blksz) {
835 crypto_cursor_copydata(&cc_in, blksz, blk);
838 inblk = crypto_cursor_segbase(&cc_in);
839 crypto_cursor_advance(&cc_in, blksz);
841 if (crypto_cursor_seglen(&cc_out) < blksz)
844 outblk = crypto_cursor_segbase(&cc_out);
845 exf->decrypt(swe->sw_kschedule, inblk, outblk);
847 crypto_cursor_copyback(&cc_out, blksz, blk);
849 crypto_cursor_advance(&cc_out, blksz);
852 crypto_cursor_copydata(&cc_in, resid, blk);
853 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
854 crypto_cursor_copyback(&cc_out, resid, blk);
857 /* Inject the authentication data */
858 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
862 explicit_bzero(blkbuf, sizeof(blkbuf));
863 explicit_bzero(tag, sizeof(tag));
864 explicit_bzero(iv, sizeof(iv));
869 * Apply a cipher and a digest to perform EtA.
872 swcr_eta(struct swcr_session *ses, struct cryptop *crp)
876 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
877 error = swcr_encdec(ses, crp);
879 error = swcr_authcompute(ses, crp);
881 error = swcr_authcompute(ses, crp);
883 error = swcr_encdec(ses, crp);
889 * Apply a compression/decompression algorithm
892 swcr_compdec(struct swcr_session *ses, struct cryptop *crp)
894 u_int8_t *data, *out;
895 struct comp_algo *cxf;
899 cxf = ses->swcr_compdec.sw_cxf;
901 /* We must handle the whole buffer of data in one time
902 * then if there is not all the data in the mbuf, we must
906 data = malloc(crp->crp_payload_length, M_CRYPTO_DATA, M_NOWAIT);
909 crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
912 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
913 result = cxf->compress(data, crp->crp_payload_length, &out);
915 result = cxf->decompress(data, crp->crp_payload_length, &out);
917 free(data, M_CRYPTO_DATA);
920 crp->crp_olen = result;
922 /* Check the compressed size when doing compression */
923 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
924 if (result >= crp->crp_payload_length) {
925 /* Compression was useless, we lost time */
926 free(out, M_CRYPTO_DATA);
931 /* Copy back the (de)compressed data. m_copyback is
932 * extending the mbuf as necessary.
934 crypto_copyback(crp, crp->crp_payload_start, result, out);
935 if (result < crp->crp_payload_length) {
936 switch (crp->crp_buf.cb_type) {
937 case CRYPTO_BUF_MBUF:
938 adj = result - crp->crp_payload_length;
939 m_adj(crp->crp_buf.cb_mbuf, adj);
941 case CRYPTO_BUF_UIO: {
942 struct uio *uio = crp->crp_buf.cb_uio;
945 adj = crp->crp_payload_length - result;
946 ind = uio->uio_iovcnt - 1;
948 while (adj > 0 && ind >= 0) {
949 if (adj < uio->uio_iov[ind].iov_len) {
950 uio->uio_iov[ind].iov_len -= adj;
954 adj -= uio->uio_iov[ind].iov_len;
955 uio->uio_iov[ind].iov_len = 0;
965 free(out, M_CRYPTO_DATA);
970 swcr_setup_cipher(struct swcr_session *ses,
971 const struct crypto_session_params *csp)
973 struct swcr_encdec *swe;
974 struct enc_xform *txf;
977 swe = &ses->swcr_encdec;
978 txf = crypto_cipher(csp);
979 MPASS(txf->ivsize == csp->csp_ivlen);
980 if (txf->ctxsize != 0) {
981 swe->sw_kschedule = malloc(txf->ctxsize, M_CRYPTO_DATA,
983 if (swe->sw_kschedule == NULL)
986 if (csp->csp_cipher_key != NULL) {
987 error = txf->setkey(swe->sw_kschedule,
988 csp->csp_cipher_key, csp->csp_cipher_klen);
997 swcr_setup_auth(struct swcr_session *ses,
998 const struct crypto_session_params *csp)
1000 struct swcr_auth *swa;
1001 struct auth_hash *axf;
1003 swa = &ses->swcr_auth;
1005 axf = crypto_auth_hash(csp);
1007 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1009 if (csp->csp_auth_mlen == 0)
1010 swa->sw_mlen = axf->hashsize;
1012 swa->sw_mlen = csp->csp_auth_mlen;
1013 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1014 if (swa->sw_ictx == NULL)
1017 switch (csp->csp_auth_alg) {
1018 case CRYPTO_SHA1_HMAC:
1019 case CRYPTO_SHA2_224_HMAC:
1020 case CRYPTO_SHA2_256_HMAC:
1021 case CRYPTO_SHA2_384_HMAC:
1022 case CRYPTO_SHA2_512_HMAC:
1023 case CRYPTO_NULL_HMAC:
1024 case CRYPTO_RIPEMD160_HMAC:
1025 swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1027 if (swa->sw_octx == NULL)
1030 if (csp->csp_auth_key != NULL) {
1031 swcr_authprepare(axf, swa, csp->csp_auth_key,
1032 csp->csp_auth_klen);
1035 if (csp->csp_mode == CSP_MODE_DIGEST)
1036 ses->swcr_process = swcr_authcompute;
1039 case CRYPTO_SHA2_224:
1040 case CRYPTO_SHA2_256:
1041 case CRYPTO_SHA2_384:
1042 case CRYPTO_SHA2_512:
1043 axf->Init(swa->sw_ictx);
1044 if (csp->csp_mode == CSP_MODE_DIGEST)
1045 ses->swcr_process = swcr_authcompute;
1047 case CRYPTO_AES_NIST_GMAC:
1048 axf->Init(swa->sw_ictx);
1049 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1050 csp->csp_auth_klen);
1051 if (csp->csp_mode == CSP_MODE_DIGEST)
1052 ses->swcr_process = swcr_gmac;
1054 case CRYPTO_POLY1305:
1055 case CRYPTO_BLAKE2B:
1056 case CRYPTO_BLAKE2S:
1058 * Blake2b and Blake2s support an optional key but do
1061 if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL)
1062 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1063 csp->csp_auth_klen);
1064 axf->Init(swa->sw_ictx);
1065 if (csp->csp_mode == CSP_MODE_DIGEST)
1066 ses->swcr_process = swcr_authcompute;
1068 case CRYPTO_AES_CCM_CBC_MAC:
1069 axf->Init(swa->sw_ictx);
1070 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1071 csp->csp_auth_klen);
1072 if (csp->csp_mode == CSP_MODE_DIGEST)
1073 ses->swcr_process = swcr_ccm_cbc_mac;
1081 swcr_setup_gcm(struct swcr_session *ses,
1082 const struct crypto_session_params *csp)
1084 struct swcr_auth *swa;
1085 struct auth_hash *axf;
1087 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1090 /* First, setup the auth side. */
1091 swa = &ses->swcr_auth;
1092 switch (csp->csp_cipher_klen * 8) {
1094 axf = &auth_hash_nist_gmac_aes_128;
1097 axf = &auth_hash_nist_gmac_aes_192;
1100 axf = &auth_hash_nist_gmac_aes_256;
1106 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1108 if (csp->csp_auth_mlen == 0)
1109 swa->sw_mlen = axf->hashsize;
1111 swa->sw_mlen = csp->csp_auth_mlen;
1112 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1113 if (swa->sw_ictx == NULL)
1115 axf->Init(swa->sw_ictx);
1116 if (csp->csp_cipher_key != NULL)
1117 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1118 csp->csp_cipher_klen);
1120 /* Second, setup the cipher side. */
1121 return (swcr_setup_cipher(ses, csp));
1125 swcr_setup_ccm(struct swcr_session *ses,
1126 const struct crypto_session_params *csp)
1128 struct swcr_auth *swa;
1129 struct auth_hash *axf;
1131 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1134 /* First, setup the auth side. */
1135 swa = &ses->swcr_auth;
1136 switch (csp->csp_cipher_klen * 8) {
1138 axf = &auth_hash_ccm_cbc_mac_128;
1141 axf = &auth_hash_ccm_cbc_mac_192;
1144 axf = &auth_hash_ccm_cbc_mac_256;
1150 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1152 if (csp->csp_auth_mlen == 0)
1153 swa->sw_mlen = axf->hashsize;
1155 swa->sw_mlen = csp->csp_auth_mlen;
1156 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1157 if (swa->sw_ictx == NULL)
1159 axf->Init(swa->sw_ictx);
1160 if (csp->csp_cipher_key != NULL)
1161 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1162 csp->csp_cipher_klen);
1164 /* Second, setup the cipher side. */
1165 return (swcr_setup_cipher(ses, csp));
1169 swcr_auth_supported(const struct crypto_session_params *csp)
1171 struct auth_hash *axf;
1173 axf = crypto_auth_hash(csp);
1176 switch (csp->csp_auth_alg) {
1177 case CRYPTO_SHA1_HMAC:
1178 case CRYPTO_SHA2_224_HMAC:
1179 case CRYPTO_SHA2_256_HMAC:
1180 case CRYPTO_SHA2_384_HMAC:
1181 case CRYPTO_SHA2_512_HMAC:
1182 case CRYPTO_NULL_HMAC:
1183 case CRYPTO_RIPEMD160_HMAC:
1185 case CRYPTO_AES_NIST_GMAC:
1186 switch (csp->csp_auth_klen * 8) {
1194 if (csp->csp_auth_key == NULL)
1196 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1199 case CRYPTO_POLY1305:
1200 if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1203 case CRYPTO_AES_CCM_CBC_MAC:
1204 switch (csp->csp_auth_klen * 8) {
1212 if (csp->csp_auth_key == NULL)
1214 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1222 swcr_cipher_supported(const struct crypto_session_params *csp)
1224 struct enc_xform *txf;
1226 txf = crypto_cipher(csp);
1229 if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1230 txf->ivsize != csp->csp_ivlen)
1236 swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1239 if ((csp->csp_flags & ~(CSP_F_SEPARATE_OUTPUT)) != 0)
1241 switch (csp->csp_mode) {
1242 case CSP_MODE_COMPRESS:
1243 switch (csp->csp_cipher_alg) {
1244 case CRYPTO_DEFLATE_COMP:
1250 case CSP_MODE_CIPHER:
1251 switch (csp->csp_cipher_alg) {
1252 case CRYPTO_AES_NIST_GCM_16:
1253 case CRYPTO_AES_CCM_16:
1256 if (!swcr_cipher_supported(csp))
1261 case CSP_MODE_DIGEST:
1262 if (!swcr_auth_supported(csp))
1266 switch (csp->csp_cipher_alg) {
1267 case CRYPTO_AES_NIST_GCM_16:
1268 case CRYPTO_AES_CCM_16:
1275 /* AEAD algorithms cannot be used for EtA. */
1276 switch (csp->csp_cipher_alg) {
1277 case CRYPTO_AES_NIST_GCM_16:
1278 case CRYPTO_AES_CCM_16:
1281 switch (csp->csp_auth_alg) {
1282 case CRYPTO_AES_NIST_GMAC:
1283 case CRYPTO_AES_CCM_CBC_MAC:
1287 if (!swcr_cipher_supported(csp) ||
1288 !swcr_auth_supported(csp))
1295 return (CRYPTODEV_PROBE_SOFTWARE);
1299 * Generate a new software session.
1302 swcr_newsession(device_t dev, crypto_session_t cses,
1303 const struct crypto_session_params *csp)
1305 struct swcr_session *ses;
1306 struct swcr_encdec *swe;
1307 struct swcr_auth *swa;
1308 struct comp_algo *cxf;
1311 ses = crypto_get_driver_session(cses);
1312 mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
1315 swe = &ses->swcr_encdec;
1316 swa = &ses->swcr_auth;
1317 switch (csp->csp_mode) {
1318 case CSP_MODE_COMPRESS:
1319 switch (csp->csp_cipher_alg) {
1320 case CRYPTO_DEFLATE_COMP:
1321 cxf = &comp_algo_deflate;
1325 panic("bad compression algo");
1328 ses->swcr_compdec.sw_cxf = cxf;
1329 ses->swcr_process = swcr_compdec;
1331 case CSP_MODE_CIPHER:
1332 switch (csp->csp_cipher_alg) {
1333 case CRYPTO_NULL_CBC:
1334 ses->swcr_process = swcr_null;
1337 case CRYPTO_AES_NIST_GCM_16:
1338 case CRYPTO_AES_CCM_16:
1339 panic("bad cipher algo");
1342 error = swcr_setup_cipher(ses, csp);
1344 ses->swcr_process = swcr_encdec;
1347 case CSP_MODE_DIGEST:
1348 error = swcr_setup_auth(ses, csp);
1351 switch (csp->csp_cipher_alg) {
1352 case CRYPTO_AES_NIST_GCM_16:
1353 error = swcr_setup_gcm(ses, csp);
1355 ses->swcr_process = swcr_gcm;
1357 case CRYPTO_AES_CCM_16:
1358 error = swcr_setup_ccm(ses, csp);
1360 ses->swcr_process = swcr_ccm;
1364 panic("bad aead algo");
1370 switch (csp->csp_cipher_alg) {
1371 case CRYPTO_AES_NIST_GCM_16:
1372 case CRYPTO_AES_CCM_16:
1373 panic("bad eta cipher algo");
1375 switch (csp->csp_auth_alg) {
1376 case CRYPTO_AES_NIST_GMAC:
1377 case CRYPTO_AES_CCM_CBC_MAC:
1378 panic("bad eta auth algo");
1382 error = swcr_setup_auth(ses, csp);
1385 if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1386 /* Effectively degrade to digest mode. */
1387 ses->swcr_process = swcr_authcompute;
1391 error = swcr_setup_cipher(ses, csp);
1393 ses->swcr_process = swcr_eta;
1400 swcr_freesession(dev, cses);
1405 swcr_freesession(device_t dev, crypto_session_t cses)
1407 struct swcr_session *ses;
1408 struct swcr_auth *swa;
1409 struct auth_hash *axf;
1411 ses = crypto_get_driver_session(cses);
1413 mtx_destroy(&ses->swcr_lock);
1415 zfree(ses->swcr_encdec.sw_kschedule, M_CRYPTO_DATA);
1417 axf = ses->swcr_auth.sw_axf;
1419 swa = &ses->swcr_auth;
1420 if (swa->sw_ictx != NULL) {
1421 explicit_bzero(swa->sw_ictx, axf->ctxsize);
1422 free(swa->sw_ictx, M_CRYPTO_DATA);
1424 if (swa->sw_octx != NULL) {
1425 explicit_bzero(swa->sw_octx, axf->ctxsize);
1426 free(swa->sw_octx, M_CRYPTO_DATA);
1432 * Process a software request.
1435 swcr_process(device_t dev, struct cryptop *crp, int hint)
1437 struct swcr_session *ses;
1439 ses = crypto_get_driver_session(crp->crp_session);
1440 mtx_lock(&ses->swcr_lock);
1442 crp->crp_etype = ses->swcr_process(ses, crp);
1444 mtx_unlock(&ses->swcr_lock);
1450 swcr_identify(driver_t *drv, device_t parent)
1452 /* NB: order 10 is so we get attached after h/w devices */
1453 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1454 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1455 panic("cryptosoft: could not attach");
1459 swcr_probe(device_t dev)
1461 device_set_desc(dev, "software crypto");
1462 return (BUS_PROBE_NOWILDCARD);
1466 swcr_attach(device_t dev)
1469 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1470 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1472 device_printf(dev, "cannot initialize!");
1480 swcr_detach(device_t dev)
1482 crypto_unregister_all(swcr_id);
1486 static device_method_t swcr_methods[] = {
1487 DEVMETHOD(device_identify, swcr_identify),
1488 DEVMETHOD(device_probe, swcr_probe),
1489 DEVMETHOD(device_attach, swcr_attach),
1490 DEVMETHOD(device_detach, swcr_detach),
1492 DEVMETHOD(cryptodev_probesession, swcr_probesession),
1493 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1494 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1495 DEVMETHOD(cryptodev_process, swcr_process),
1500 static driver_t swcr_driver = {
1503 0, /* NB: no softc */
1505 static devclass_t swcr_devclass;
1508 * NB: We explicitly reference the crypto module so we
1509 * get the necessary ordering when built as a loadable
1510 * module. This is required because we bundle the crypto
1511 * module code together with the cryptosoft driver (otherwise
1512 * normal module dependencies would handle things).
1514 extern int crypto_modevent(struct module *, int, void *);
1515 /* XXX where to attach */
1516 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1517 MODULE_VERSION(cryptosoft, 1);
1518 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);