1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Permission to use, copy, and modify this software with or without fee
20 * is hereby granted, provided that this entire notice is included in
21 * all source code copies of any software which is or includes a copy or
22 * modification of this software.
24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD$");
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
48 #include <sys/mutex.h>
50 #include <crypto/sha1.h>
51 #include <opencrypto/rmd160.h>
53 #include <opencrypto/cryptodev.h>
54 #include <opencrypto/xform.h>
58 #include "cryptodev_if.h"
63 struct auth_hash *sw_axf;
69 struct enc_xform *sw_exf;
73 struct comp_algo *sw_cxf;
78 int (*swcr_process)(struct swcr_session *, struct cryptop *);
80 struct swcr_auth swcr_auth;
81 struct swcr_encdec swcr_encdec;
82 struct swcr_compdec swcr_compdec;
85 static int32_t swcr_id;
87 static void swcr_freesession(device_t dev, crypto_session_t cses);
89 /* Used for CRYPTO_NULL_CBC. */
91 swcr_null(struct swcr_session *ses, struct cryptop *crp)
98 * Apply a symmetric encryption/decryption algorithm.
101 swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
103 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
104 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
105 const struct crypto_session_params *csp;
106 struct swcr_encdec *sw;
107 struct enc_xform *exf;
108 int i, blks, inlen, ivlen, outlen, resid;
109 struct crypto_buffer_cursor cc_in, cc_out;
110 const unsigned char *inblk;
111 unsigned char *outblk;
117 sw = &ses->swcr_encdec;
121 if (exf->native_blocksize == 0) {
122 /* Check for non-padded data */
123 if ((crp->crp_payload_length % exf->blocksize) != 0)
126 blks = exf->blocksize;
128 blks = exf->native_blocksize;
130 if (exf == &enc_xform_aes_icm &&
131 (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
134 if (crp->crp_cipher_key != NULL) {
135 csp = crypto_get_params(crp->crp_session);
136 error = exf->setkey(sw->sw_kschedule,
137 crp->crp_cipher_key, csp->csp_cipher_klen);
142 crypto_read_iv(crp, iv);
146 * xforms that provide a reinit method perform all IV
147 * handling themselves.
149 exf->reinit(sw->sw_kschedule, iv);
154 crypto_cursor_init(&cc_in, &crp->crp_buf);
155 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
156 inlen = crypto_cursor_seglen(&cc_in);
157 inblk = crypto_cursor_segbase(&cc_in);
158 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
159 crypto_cursor_init(&cc_out, &crp->crp_obuf);
160 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
163 outlen = crypto_cursor_seglen(&cc_out);
164 outblk = crypto_cursor_segbase(&cc_out);
166 resid = crp->crp_payload_length;
167 encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
170 * Loop through encrypting blocks. 'inlen' is the remaining
171 * length of the current segment in the input buffer.
172 * 'outlen' is the remaining length of current segment in the
175 while (resid >= blks) {
177 * If the current block is not contained within the
178 * current input/output segment, use 'blk' as a local
182 crypto_cursor_copydata(&cc_in, blks, blk);
189 * Ciphers without a 'reinit' hook are assumed to be
190 * used in CBC mode where the chaining is done here.
192 if (exf->reinit != NULL) {
194 exf->encrypt(sw->sw_kschedule, inblk, outblk);
196 exf->decrypt(sw->sw_kschedule, inblk, outblk);
197 } else if (encrypting) {
198 /* XOR with previous block */
199 for (i = 0; i < blks; i++)
200 outblk[i] = inblk[i] ^ ivp[i];
202 exf->encrypt(sw->sw_kschedule, outblk, outblk);
205 * Keep encrypted block for XOR'ing
208 memcpy(iv, outblk, blks);
210 } else { /* decrypt */
212 * Keep encrypted block for XOR'ing
215 nivp = (ivp == iv) ? iv2 : iv;
216 memcpy(nivp, inblk, blks);
218 exf->decrypt(sw->sw_kschedule, inblk, outblk);
220 /* XOR with previous block */
221 for (i = 0; i < blks; i++)
228 inlen = crypto_cursor_seglen(&cc_in);
229 inblk = crypto_cursor_segbase(&cc_in);
231 crypto_cursor_advance(&cc_in, blks);
237 crypto_cursor_copyback(&cc_out, blks, blk);
238 outlen = crypto_cursor_seglen(&cc_out);
239 outblk = crypto_cursor_segbase(&cc_out);
241 crypto_cursor_advance(&cc_out, blks);
249 /* Handle trailing partial block for stream ciphers. */
251 KASSERT(exf->native_blocksize != 0,
252 ("%s: partial block of %d bytes for cipher %s",
253 __func__, i, exf->name));
254 KASSERT(exf->reinit != NULL,
255 ("%s: partial block cipher %s without reinit hook",
256 __func__, exf->name));
257 KASSERT(resid < blks, ("%s: partial block too big", __func__));
259 inlen = crypto_cursor_seglen(&cc_in);
260 outlen = crypto_cursor_seglen(&cc_out);
262 crypto_cursor_copydata(&cc_in, resid, blk);
265 inblk = crypto_cursor_segbase(&cc_in);
269 outblk = crypto_cursor_segbase(&cc_out);
271 exf->encrypt_last(sw->sw_kschedule, inblk, outblk,
274 exf->decrypt_last(sw->sw_kschedule, inblk, outblk,
277 crypto_cursor_copyback(&cc_out, resid, blk);
280 explicit_bzero(blk, sizeof(blk));
281 explicit_bzero(iv, sizeof(iv));
282 explicit_bzero(iv2, sizeof(iv2));
287 swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw,
288 const uint8_t *key, int klen)
292 case CRYPTO_SHA1_HMAC:
293 case CRYPTO_SHA2_224_HMAC:
294 case CRYPTO_SHA2_256_HMAC:
295 case CRYPTO_SHA2_384_HMAC:
296 case CRYPTO_SHA2_512_HMAC:
297 case CRYPTO_NULL_HMAC:
298 case CRYPTO_RIPEMD160_HMAC:
299 hmac_init_ipad(axf, key, klen, sw->sw_ictx);
300 hmac_init_opad(axf, key, klen, sw->sw_octx);
302 case CRYPTO_POLY1305:
305 axf->Setkey(sw->sw_ictx, key, klen);
306 axf->Init(sw->sw_ictx);
309 panic("%s: algorithm %d doesn't use keys", __func__, axf->type);
314 * Compute or verify hash.
317 swcr_authcompute(struct swcr_session *ses, struct cryptop *crp)
319 u_char aalg[HASH_MAX_LEN];
320 const struct crypto_session_params *csp;
321 struct swcr_auth *sw;
322 struct auth_hash *axf;
326 sw = &ses->swcr_auth;
330 csp = crypto_get_params(crp->crp_session);
331 if (crp->crp_auth_key != NULL) {
332 swcr_authprepare(axf, sw, crp->crp_auth_key,
336 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
338 if (crp->crp_aad != NULL)
339 err = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
341 err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
346 if (CRYPTO_HAS_OUTPUT_BUFFER(crp) &&
347 CRYPTO_OP_IS_ENCRYPT(crp->crp_op))
348 err = crypto_apply_buf(&crp->crp_obuf,
349 crp->crp_payload_output_start, crp->crp_payload_length,
352 err = crypto_apply(crp, crp->crp_payload_start,
353 crp->crp_payload_length, axf->Update, &ctx);
357 if (csp->csp_flags & CSP_F_ESN)
358 axf->Update(&ctx, crp->crp_esn, 4);
360 axf->Final(aalg, &ctx);
361 if (sw->sw_octx != NULL) {
362 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
363 axf->Update(&ctx, aalg, axf->hashsize);
364 axf->Final(aalg, &ctx);
367 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
368 u_char uaalg[HASH_MAX_LEN];
370 crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg);
371 if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0)
373 explicit_bzero(uaalg, sizeof(uaalg));
375 /* Inject the authentication data */
376 crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg);
378 explicit_bzero(aalg, sizeof(aalg));
380 explicit_bzero(&ctx, sizeof(ctx));
384 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
385 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
388 swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
390 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
391 u_char *blk = (u_char *)blkbuf;
392 u_char tag[GMAC_DIGEST_LEN];
393 u_char iv[AES_BLOCK_LEN];
394 struct crypto_buffer_cursor cc;
397 struct swcr_auth *swa;
398 struct auth_hash *axf;
400 int blksz, error, ivlen, len, resid;
402 swa = &ses->swcr_auth;
405 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
406 blksz = GMAC_BLOCK_LEN;
407 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
410 /* Initialize the IV */
411 ivlen = AES_GCM_IV_LEN;
412 crypto_read_iv(crp, iv);
414 axf->Reinit(&ctx, iv, ivlen);
415 crypto_cursor_init(&cc, &crp->crp_buf);
416 crypto_cursor_advance(&cc, crp->crp_payload_start);
417 for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) {
418 len = crypto_cursor_seglen(&cc);
420 inblk = crypto_cursor_segbase(&cc);
421 len = rounddown(MIN(len, resid), blksz);
422 crypto_cursor_advance(&cc, len);
425 crypto_cursor_copydata(&cc, len, blk);
428 axf->Update(&ctx, inblk, len);
431 memset(blk, 0, blksz);
432 crypto_cursor_copydata(&cc, resid, blk);
433 axf->Update(&ctx, blk, blksz);
437 memset(blk, 0, blksz);
438 blkp = (uint32_t *)blk + 1;
439 *blkp = htobe32(crp->crp_payload_length * 8);
440 axf->Update(&ctx, blk, blksz);
443 axf->Final(tag, &ctx);
446 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
447 u_char tag2[GMAC_DIGEST_LEN];
449 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
451 if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0)
453 explicit_bzero(tag2, sizeof(tag2));
455 /* Inject the authentication data */
456 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
458 explicit_bzero(blkbuf, sizeof(blkbuf));
459 explicit_bzero(tag, sizeof(tag));
460 explicit_bzero(iv, sizeof(iv));
465 swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
467 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
468 u_char *blk = (u_char *)blkbuf;
469 u_char tag[GMAC_DIGEST_LEN];
470 u_char iv[AES_BLOCK_LEN];
471 struct crypto_buffer_cursor cc_in, cc_out;
475 struct swcr_auth *swa;
476 struct swcr_encdec *swe;
477 struct auth_hash *axf;
478 struct enc_xform *exf;
480 int blksz, error, ivlen, len, r, resid;
482 swa = &ses->swcr_auth;
485 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
486 blksz = GMAC_BLOCK_LEN;
487 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
490 swe = &ses->swcr_encdec;
492 KASSERT(axf->blocksize == exf->native_blocksize,
493 ("%s: blocksize mismatch", __func__));
495 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
498 /* Initialize the IV */
499 ivlen = AES_GCM_IV_LEN;
500 bcopy(crp->crp_iv, iv, ivlen);
502 /* Supply MAC with IV */
503 axf->Reinit(&ctx, iv, ivlen);
505 /* Supply MAC with AAD */
506 if (crp->crp_aad != NULL) {
507 len = rounddown(crp->crp_aad_length, blksz);
509 axf->Update(&ctx, crp->crp_aad, len);
510 if (crp->crp_aad_length != len) {
511 memset(blk, 0, blksz);
512 memcpy(blk, (char *)crp->crp_aad + len,
513 crp->crp_aad_length - len);
514 axf->Update(&ctx, blk, blksz);
517 crypto_cursor_init(&cc_in, &crp->crp_buf);
518 crypto_cursor_advance(&cc_in, crp->crp_aad_start);
519 for (resid = crp->crp_aad_length; resid >= blksz;
521 len = crypto_cursor_seglen(&cc_in);
523 inblk = crypto_cursor_segbase(&cc_in);
524 len = rounddown(MIN(len, resid), blksz);
525 crypto_cursor_advance(&cc_in, len);
528 crypto_cursor_copydata(&cc_in, len, blk);
531 axf->Update(&ctx, inblk, len);
534 memset(blk, 0, blksz);
535 crypto_cursor_copydata(&cc_in, resid, blk);
536 axf->Update(&ctx, blk, blksz);
540 exf->reinit(swe->sw_kschedule, iv);
542 /* Do encryption with MAC */
543 crypto_cursor_init(&cc_in, &crp->crp_buf);
544 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
545 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
546 crypto_cursor_init(&cc_out, &crp->crp_obuf);
547 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
550 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
551 if (crypto_cursor_seglen(&cc_in) < blksz) {
552 crypto_cursor_copydata(&cc_in, blksz, blk);
555 inblk = crypto_cursor_segbase(&cc_in);
556 crypto_cursor_advance(&cc_in, blksz);
558 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
559 if (crypto_cursor_seglen(&cc_out) < blksz)
562 outblk = crypto_cursor_segbase(&cc_out);
563 exf->encrypt(swe->sw_kschedule, inblk, outblk);
564 axf->Update(&ctx, outblk, blksz);
566 crypto_cursor_copyback(&cc_out, blksz, blk);
568 crypto_cursor_advance(&cc_out, blksz);
570 axf->Update(&ctx, inblk, blksz);
574 crypto_cursor_copydata(&cc_in, resid, blk);
575 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
576 exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
577 crypto_cursor_copyback(&cc_out, resid, blk);
579 axf->Update(&ctx, blk, resid);
583 memset(blk, 0, blksz);
584 blkp = (uint32_t *)blk + 1;
585 *blkp = htobe32(crp->crp_aad_length * 8);
586 blkp = (uint32_t *)blk + 3;
587 *blkp = htobe32(crp->crp_payload_length * 8);
588 axf->Update(&ctx, blk, blksz);
591 axf->Final(tag, &ctx);
595 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
596 u_char tag2[GMAC_DIGEST_LEN];
598 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, tag2);
600 r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
601 explicit_bzero(tag2, sizeof(tag2));
607 /* tag matches, decrypt data */
608 crypto_cursor_init(&cc_in, &crp->crp_buf);
609 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
610 for (resid = crp->crp_payload_length; resid > blksz;
612 if (crypto_cursor_seglen(&cc_in) < blksz) {
613 crypto_cursor_copydata(&cc_in, blksz, blk);
616 inblk = crypto_cursor_segbase(&cc_in);
617 crypto_cursor_advance(&cc_in, blksz);
619 if (crypto_cursor_seglen(&cc_out) < blksz)
622 outblk = crypto_cursor_segbase(&cc_out);
623 exf->decrypt(swe->sw_kschedule, inblk, outblk);
625 crypto_cursor_copyback(&cc_out, blksz, blk);
627 crypto_cursor_advance(&cc_out, blksz);
630 crypto_cursor_copydata(&cc_in, resid, blk);
631 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
632 crypto_cursor_copyback(&cc_out, resid, blk);
635 /* Inject the authentication data */
636 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
640 explicit_bzero(blkbuf, sizeof(blkbuf));
641 explicit_bzero(tag, sizeof(tag));
642 explicit_bzero(iv, sizeof(iv));
648 swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp)
650 u_char tag[AES_CBC_MAC_HASH_LEN];
651 u_char iv[AES_BLOCK_LEN];
653 struct swcr_auth *swa;
654 struct auth_hash *axf;
657 swa = &ses->swcr_auth;
660 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
662 /* Initialize the IV */
663 ivlen = AES_CCM_IV_LEN;
664 crypto_read_iv(crp, iv);
667 * AES CCM-CBC-MAC needs to know the length of both the auth
668 * data and payload data before doing the auth computation.
670 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length;
671 ctx.aes_cbc_mac_ctx.cryptDataLength = 0;
673 axf->Reinit(&ctx, iv, ivlen);
674 if (crp->crp_aad != NULL)
675 error = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
677 error = crypto_apply(crp, crp->crp_payload_start,
678 crp->crp_payload_length, axf->Update, &ctx);
683 axf->Final(tag, &ctx);
685 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
686 u_char tag2[AES_CBC_MAC_HASH_LEN];
688 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
690 if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0)
692 explicit_bzero(tag2, sizeof(tag));
694 /* Inject the authentication data */
695 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
697 explicit_bzero(tag, sizeof(tag));
698 explicit_bzero(iv, sizeof(iv));
703 swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
705 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
706 u_char *blk = (u_char *)blkbuf;
707 u_char tag[AES_CBC_MAC_HASH_LEN];
708 u_char iv[AES_BLOCK_LEN];
709 struct crypto_buffer_cursor cc_in, cc_out;
713 struct swcr_auth *swa;
714 struct swcr_encdec *swe;
715 struct auth_hash *axf;
716 struct enc_xform *exf;
717 int blksz, error, ivlen, r, resid;
719 swa = &ses->swcr_auth;
722 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
723 blksz = AES_BLOCK_LEN;
724 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
727 swe = &ses->swcr_encdec;
729 KASSERT(axf->blocksize == exf->native_blocksize,
730 ("%s: blocksize mismatch", __func__));
732 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
735 /* Initialize the IV */
736 ivlen = AES_CCM_IV_LEN;
737 bcopy(crp->crp_iv, iv, ivlen);
740 * AES CCM-CBC-MAC needs to know the length of both the auth
741 * data and payload data before doing the auth computation.
743 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length;
744 ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length;
746 /* Supply MAC with IV */
747 axf->Reinit(&ctx, iv, ivlen);
749 /* Supply MAC with AAD */
750 if (crp->crp_aad != NULL)
751 error = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
753 error = crypto_apply(crp, crp->crp_aad_start,
754 crp->crp_aad_length, axf->Update, &ctx);
758 exf->reinit(swe->sw_kschedule, iv);
760 /* Do encryption/decryption with MAC */
761 crypto_cursor_init(&cc_in, &crp->crp_buf);
762 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
763 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
764 crypto_cursor_init(&cc_out, &crp->crp_obuf);
765 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
768 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
769 if (crypto_cursor_seglen(&cc_in) < blksz) {
770 crypto_cursor_copydata(&cc_in, blksz, blk);
773 inblk = crypto_cursor_segbase(&cc_in);
774 crypto_cursor_advance(&cc_in, blksz);
776 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
777 if (crypto_cursor_seglen(&cc_out) < blksz)
780 outblk = crypto_cursor_segbase(&cc_out);
781 axf->Update(&ctx, inblk, blksz);
782 exf->encrypt(swe->sw_kschedule, inblk, outblk);
784 crypto_cursor_copyback(&cc_out, blksz, blk);
786 crypto_cursor_advance(&cc_out, blksz);
789 * One of the problems with CCM+CBC is that
790 * the authentication is done on the
791 * unencrypted data. As a result, we have to
792 * decrypt the data twice: once to generate
793 * the tag and a second time after the tag is
796 exf->decrypt(swe->sw_kschedule, inblk, blk);
797 axf->Update(&ctx, blk, blksz);
801 crypto_cursor_copydata(&cc_in, resid, blk);
802 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
803 axf->Update(&ctx, blk, resid);
804 exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
805 crypto_cursor_copyback(&cc_out, resid, blk);
807 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
808 axf->Update(&ctx, blk, resid);
813 axf->Final(tag, &ctx);
817 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
818 u_char tag2[AES_CBC_MAC_HASH_LEN];
820 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
823 r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
824 explicit_bzero(tag2, sizeof(tag2));
830 /* tag matches, decrypt data */
831 exf->reinit(swe->sw_kschedule, iv);
832 crypto_cursor_init(&cc_in, &crp->crp_buf);
833 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
834 for (resid = crp->crp_payload_length; resid > blksz;
836 if (crypto_cursor_seglen(&cc_in) < blksz) {
837 crypto_cursor_copydata(&cc_in, blksz, blk);
840 inblk = crypto_cursor_segbase(&cc_in);
841 crypto_cursor_advance(&cc_in, blksz);
843 if (crypto_cursor_seglen(&cc_out) < blksz)
846 outblk = crypto_cursor_segbase(&cc_out);
847 exf->decrypt(swe->sw_kschedule, inblk, outblk);
849 crypto_cursor_copyback(&cc_out, blksz, blk);
851 crypto_cursor_advance(&cc_out, blksz);
854 crypto_cursor_copydata(&cc_in, resid, blk);
855 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
856 crypto_cursor_copyback(&cc_out, resid, blk);
859 /* Inject the authentication data */
860 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
864 explicit_bzero(blkbuf, sizeof(blkbuf));
865 explicit_bzero(tag, sizeof(tag));
866 explicit_bzero(iv, sizeof(iv));
871 * Apply a cipher and a digest to perform EtA.
874 swcr_eta(struct swcr_session *ses, struct cryptop *crp)
878 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
879 error = swcr_encdec(ses, crp);
881 error = swcr_authcompute(ses, crp);
883 error = swcr_authcompute(ses, crp);
885 error = swcr_encdec(ses, crp);
891 * Apply a compression/decompression algorithm
894 swcr_compdec(struct swcr_session *ses, struct cryptop *crp)
896 u_int8_t *data, *out;
897 struct comp_algo *cxf;
901 cxf = ses->swcr_compdec.sw_cxf;
903 /* We must handle the whole buffer of data in one time
904 * then if there is not all the data in the mbuf, we must
908 data = malloc(crp->crp_payload_length, M_CRYPTO_DATA, M_NOWAIT);
911 crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
914 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
915 result = cxf->compress(data, crp->crp_payload_length, &out);
917 result = cxf->decompress(data, crp->crp_payload_length, &out);
919 free(data, M_CRYPTO_DATA);
922 crp->crp_olen = result;
924 /* Check the compressed size when doing compression */
925 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
926 if (result >= crp->crp_payload_length) {
927 /* Compression was useless, we lost time */
928 free(out, M_CRYPTO_DATA);
933 /* Copy back the (de)compressed data. m_copyback is
934 * extending the mbuf as necessary.
936 crypto_copyback(crp, crp->crp_payload_start, result, out);
937 if (result < crp->crp_payload_length) {
938 switch (crp->crp_buf.cb_type) {
939 case CRYPTO_BUF_MBUF:
940 adj = result - crp->crp_payload_length;
941 m_adj(crp->crp_buf.cb_mbuf, adj);
943 case CRYPTO_BUF_UIO: {
944 struct uio *uio = crp->crp_buf.cb_uio;
947 adj = crp->crp_payload_length - result;
948 ind = uio->uio_iovcnt - 1;
950 while (adj > 0 && ind >= 0) {
951 if (adj < uio->uio_iov[ind].iov_len) {
952 uio->uio_iov[ind].iov_len -= adj;
956 adj -= uio->uio_iov[ind].iov_len;
957 uio->uio_iov[ind].iov_len = 0;
963 case CRYPTO_BUF_VMPAGE:
964 adj = crp->crp_payload_length - result;
965 crp->crp_buf.cb_vm_page_len -= adj;
971 free(out, M_CRYPTO_DATA);
976 swcr_setup_cipher(struct swcr_session *ses,
977 const struct crypto_session_params *csp)
979 struct swcr_encdec *swe;
980 struct enc_xform *txf;
983 swe = &ses->swcr_encdec;
984 txf = crypto_cipher(csp);
985 MPASS(txf->ivsize == csp->csp_ivlen);
986 if (txf->ctxsize != 0) {
987 swe->sw_kschedule = malloc(txf->ctxsize, M_CRYPTO_DATA,
989 if (swe->sw_kschedule == NULL)
992 if (csp->csp_cipher_key != NULL) {
993 error = txf->setkey(swe->sw_kschedule,
994 csp->csp_cipher_key, csp->csp_cipher_klen);
1003 swcr_setup_auth(struct swcr_session *ses,
1004 const struct crypto_session_params *csp)
1006 struct swcr_auth *swa;
1007 struct auth_hash *axf;
1009 swa = &ses->swcr_auth;
1011 axf = crypto_auth_hash(csp);
1013 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1015 if (csp->csp_auth_mlen == 0)
1016 swa->sw_mlen = axf->hashsize;
1018 swa->sw_mlen = csp->csp_auth_mlen;
1019 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1020 if (swa->sw_ictx == NULL)
1023 switch (csp->csp_auth_alg) {
1024 case CRYPTO_SHA1_HMAC:
1025 case CRYPTO_SHA2_224_HMAC:
1026 case CRYPTO_SHA2_256_HMAC:
1027 case CRYPTO_SHA2_384_HMAC:
1028 case CRYPTO_SHA2_512_HMAC:
1029 case CRYPTO_NULL_HMAC:
1030 case CRYPTO_RIPEMD160_HMAC:
1031 swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1033 if (swa->sw_octx == NULL)
1036 if (csp->csp_auth_key != NULL) {
1037 swcr_authprepare(axf, swa, csp->csp_auth_key,
1038 csp->csp_auth_klen);
1041 if (csp->csp_mode == CSP_MODE_DIGEST)
1042 ses->swcr_process = swcr_authcompute;
1045 case CRYPTO_SHA2_224:
1046 case CRYPTO_SHA2_256:
1047 case CRYPTO_SHA2_384:
1048 case CRYPTO_SHA2_512:
1049 axf->Init(swa->sw_ictx);
1050 if (csp->csp_mode == CSP_MODE_DIGEST)
1051 ses->swcr_process = swcr_authcompute;
1053 case CRYPTO_AES_NIST_GMAC:
1054 axf->Init(swa->sw_ictx);
1055 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1056 csp->csp_auth_klen);
1057 if (csp->csp_mode == CSP_MODE_DIGEST)
1058 ses->swcr_process = swcr_gmac;
1060 case CRYPTO_POLY1305:
1061 case CRYPTO_BLAKE2B:
1062 case CRYPTO_BLAKE2S:
1064 * Blake2b and Blake2s support an optional key but do
1067 if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL)
1068 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1069 csp->csp_auth_klen);
1070 axf->Init(swa->sw_ictx);
1071 if (csp->csp_mode == CSP_MODE_DIGEST)
1072 ses->swcr_process = swcr_authcompute;
1074 case CRYPTO_AES_CCM_CBC_MAC:
1075 axf->Init(swa->sw_ictx);
1076 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1077 csp->csp_auth_klen);
1078 if (csp->csp_mode == CSP_MODE_DIGEST)
1079 ses->swcr_process = swcr_ccm_cbc_mac;
1087 swcr_setup_gcm(struct swcr_session *ses,
1088 const struct crypto_session_params *csp)
1090 struct swcr_auth *swa;
1091 struct auth_hash *axf;
1093 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1096 /* First, setup the auth side. */
1097 swa = &ses->swcr_auth;
1098 switch (csp->csp_cipher_klen * 8) {
1100 axf = &auth_hash_nist_gmac_aes_128;
1103 axf = &auth_hash_nist_gmac_aes_192;
1106 axf = &auth_hash_nist_gmac_aes_256;
1112 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1114 if (csp->csp_auth_mlen == 0)
1115 swa->sw_mlen = axf->hashsize;
1117 swa->sw_mlen = csp->csp_auth_mlen;
1118 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1119 if (swa->sw_ictx == NULL)
1121 axf->Init(swa->sw_ictx);
1122 if (csp->csp_cipher_key != NULL)
1123 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1124 csp->csp_cipher_klen);
1126 /* Second, setup the cipher side. */
1127 return (swcr_setup_cipher(ses, csp));
1131 swcr_setup_ccm(struct swcr_session *ses,
1132 const struct crypto_session_params *csp)
1134 struct swcr_auth *swa;
1135 struct auth_hash *axf;
1137 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1140 /* First, setup the auth side. */
1141 swa = &ses->swcr_auth;
1142 switch (csp->csp_cipher_klen * 8) {
1144 axf = &auth_hash_ccm_cbc_mac_128;
1147 axf = &auth_hash_ccm_cbc_mac_192;
1150 axf = &auth_hash_ccm_cbc_mac_256;
1156 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1158 if (csp->csp_auth_mlen == 0)
1159 swa->sw_mlen = axf->hashsize;
1161 swa->sw_mlen = csp->csp_auth_mlen;
1162 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1163 if (swa->sw_ictx == NULL)
1165 axf->Init(swa->sw_ictx);
1166 if (csp->csp_cipher_key != NULL)
1167 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1168 csp->csp_cipher_klen);
1170 /* Second, setup the cipher side. */
1171 return (swcr_setup_cipher(ses, csp));
1175 swcr_auth_supported(const struct crypto_session_params *csp)
1177 struct auth_hash *axf;
1179 axf = crypto_auth_hash(csp);
1182 switch (csp->csp_auth_alg) {
1183 case CRYPTO_SHA1_HMAC:
1184 case CRYPTO_SHA2_224_HMAC:
1185 case CRYPTO_SHA2_256_HMAC:
1186 case CRYPTO_SHA2_384_HMAC:
1187 case CRYPTO_SHA2_512_HMAC:
1188 case CRYPTO_NULL_HMAC:
1189 case CRYPTO_RIPEMD160_HMAC:
1191 case CRYPTO_AES_NIST_GMAC:
1192 switch (csp->csp_auth_klen * 8) {
1200 if (csp->csp_auth_key == NULL)
1202 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1205 case CRYPTO_POLY1305:
1206 if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1209 case CRYPTO_AES_CCM_CBC_MAC:
1210 switch (csp->csp_auth_klen * 8) {
1218 if (csp->csp_auth_key == NULL)
1220 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1228 swcr_cipher_supported(const struct crypto_session_params *csp)
1230 struct enc_xform *txf;
1232 txf = crypto_cipher(csp);
1235 if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1236 txf->ivsize != csp->csp_ivlen)
1241 #define SUPPORTED_SES (CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD | CSP_F_ESN)
1244 swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1246 if ((csp->csp_flags & ~(SUPPORTED_SES)) != 0)
1248 switch (csp->csp_mode) {
1249 case CSP_MODE_COMPRESS:
1250 switch (csp->csp_cipher_alg) {
1251 case CRYPTO_DEFLATE_COMP:
1257 case CSP_MODE_CIPHER:
1258 switch (csp->csp_cipher_alg) {
1259 case CRYPTO_AES_NIST_GCM_16:
1260 case CRYPTO_AES_CCM_16:
1263 if (!swcr_cipher_supported(csp))
1268 case CSP_MODE_DIGEST:
1269 if (!swcr_auth_supported(csp))
1273 switch (csp->csp_cipher_alg) {
1274 case CRYPTO_AES_NIST_GCM_16:
1275 case CRYPTO_AES_CCM_16:
1282 /* AEAD algorithms cannot be used for EtA. */
1283 switch (csp->csp_cipher_alg) {
1284 case CRYPTO_AES_NIST_GCM_16:
1285 case CRYPTO_AES_CCM_16:
1288 switch (csp->csp_auth_alg) {
1289 case CRYPTO_AES_NIST_GMAC:
1290 case CRYPTO_AES_CCM_CBC_MAC:
1294 if (!swcr_cipher_supported(csp) ||
1295 !swcr_auth_supported(csp))
1302 return (CRYPTODEV_PROBE_SOFTWARE);
1306 * Generate a new software session.
1309 swcr_newsession(device_t dev, crypto_session_t cses,
1310 const struct crypto_session_params *csp)
1312 struct swcr_session *ses;
1313 struct swcr_encdec *swe;
1314 struct swcr_auth *swa;
1315 struct comp_algo *cxf;
1318 ses = crypto_get_driver_session(cses);
1319 mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
1322 swe = &ses->swcr_encdec;
1323 swa = &ses->swcr_auth;
1324 switch (csp->csp_mode) {
1325 case CSP_MODE_COMPRESS:
1326 switch (csp->csp_cipher_alg) {
1327 case CRYPTO_DEFLATE_COMP:
1328 cxf = &comp_algo_deflate;
1332 panic("bad compression algo");
1335 ses->swcr_compdec.sw_cxf = cxf;
1336 ses->swcr_process = swcr_compdec;
1338 case CSP_MODE_CIPHER:
1339 switch (csp->csp_cipher_alg) {
1340 case CRYPTO_NULL_CBC:
1341 ses->swcr_process = swcr_null;
1344 case CRYPTO_AES_NIST_GCM_16:
1345 case CRYPTO_AES_CCM_16:
1346 panic("bad cipher algo");
1349 error = swcr_setup_cipher(ses, csp);
1351 ses->swcr_process = swcr_encdec;
1354 case CSP_MODE_DIGEST:
1355 error = swcr_setup_auth(ses, csp);
1358 switch (csp->csp_cipher_alg) {
1359 case CRYPTO_AES_NIST_GCM_16:
1360 error = swcr_setup_gcm(ses, csp);
1362 ses->swcr_process = swcr_gcm;
1364 case CRYPTO_AES_CCM_16:
1365 error = swcr_setup_ccm(ses, csp);
1367 ses->swcr_process = swcr_ccm;
1371 panic("bad aead algo");
1377 switch (csp->csp_cipher_alg) {
1378 case CRYPTO_AES_NIST_GCM_16:
1379 case CRYPTO_AES_CCM_16:
1380 panic("bad eta cipher algo");
1382 switch (csp->csp_auth_alg) {
1383 case CRYPTO_AES_NIST_GMAC:
1384 case CRYPTO_AES_CCM_CBC_MAC:
1385 panic("bad eta auth algo");
1389 error = swcr_setup_auth(ses, csp);
1392 if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1393 /* Effectively degrade to digest mode. */
1394 ses->swcr_process = swcr_authcompute;
1398 error = swcr_setup_cipher(ses, csp);
1400 ses->swcr_process = swcr_eta;
1407 swcr_freesession(dev, cses);
1412 swcr_freesession(device_t dev, crypto_session_t cses)
1414 struct swcr_session *ses;
1416 ses = crypto_get_driver_session(cses);
1418 mtx_destroy(&ses->swcr_lock);
1420 zfree(ses->swcr_encdec.sw_kschedule, M_CRYPTO_DATA);
1421 zfree(ses->swcr_auth.sw_ictx, M_CRYPTO_DATA);
1422 zfree(ses->swcr_auth.sw_octx, M_CRYPTO_DATA);
1426 * Process a software request.
1429 swcr_process(device_t dev, struct cryptop *crp, int hint)
1431 struct swcr_session *ses;
1433 ses = crypto_get_driver_session(crp->crp_session);
1434 mtx_lock(&ses->swcr_lock);
1436 crp->crp_etype = ses->swcr_process(ses, crp);
1438 mtx_unlock(&ses->swcr_lock);
1444 swcr_identify(driver_t *drv, device_t parent)
1446 /* NB: order 10 is so we get attached after h/w devices */
1447 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1448 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1449 panic("cryptosoft: could not attach");
1453 swcr_probe(device_t dev)
1455 device_set_desc(dev, "software crypto");
1456 return (BUS_PROBE_NOWILDCARD);
1460 swcr_attach(device_t dev)
1463 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1464 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1466 device_printf(dev, "cannot initialize!");
1474 swcr_detach(device_t dev)
1476 crypto_unregister_all(swcr_id);
1480 static device_method_t swcr_methods[] = {
1481 DEVMETHOD(device_identify, swcr_identify),
1482 DEVMETHOD(device_probe, swcr_probe),
1483 DEVMETHOD(device_attach, swcr_attach),
1484 DEVMETHOD(device_detach, swcr_detach),
1486 DEVMETHOD(cryptodev_probesession, swcr_probesession),
1487 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1488 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1489 DEVMETHOD(cryptodev_process, swcr_process),
1494 static driver_t swcr_driver = {
1497 0, /* NB: no softc */
1499 static devclass_t swcr_devclass;
1502 * NB: We explicitly reference the crypto module so we
1503 * get the necessary ordering when built as a loadable
1504 * module. This is required because we bundle the crypto
1505 * module code together with the cryptosoft driver (otherwise
1506 * normal module dependencies would handle things).
1508 extern int crypto_modevent(struct module *, int, void *);
1509 /* XXX where to attach */
1510 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1511 MODULE_VERSION(cryptosoft, 1);
1512 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);