1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Permission to use, copy, and modify this software with or without fee
20 * is hereby granted, provided that this entire notice is included in
21 * all source code copies of any software which is or includes a copy or
22 * modification of this software.
24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD$");
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
48 #include <sys/mutex.h>
50 #include <crypto/blowfish/blowfish.h>
51 #include <crypto/sha1.h>
52 #include <opencrypto/rmd160.h>
53 #include <opencrypto/cast.h>
54 #include <opencrypto/skipjack.h>
57 #include <opencrypto/cryptodev.h>
58 #include <opencrypto/xform.h>
62 #include "cryptodev_if.h"
67 struct auth_hash *sw_axf;
73 uint8_t *sw_kschedule;
74 struct enc_xform *sw_exf;
78 struct comp_algo *sw_cxf;
83 int (*swcr_process)(struct swcr_session *, struct cryptop *);
85 struct swcr_auth swcr_auth;
86 struct swcr_encdec swcr_encdec;
87 struct swcr_compdec swcr_compdec;
90 static int32_t swcr_id;
92 static void swcr_freesession(device_t dev, crypto_session_t cses);
94 /* Used for CRYPTO_NULL_CBC. */
96 swcr_null(struct swcr_session *ses, struct cryptop *crp)
103 * Apply a symmetric encryption/decryption algorithm.
106 swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
108 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
109 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
110 const struct crypto_session_params *csp;
111 struct swcr_encdec *sw;
112 struct enc_xform *exf;
113 int i, j, k, blks, ind, count, ivlen;
114 struct uio *uio, uiolcl;
115 struct iovec iovlcl[4];
117 int iovcnt, iovalloc;
123 sw = &ses->swcr_encdec;
125 blks = exf->blocksize;
128 /* Check for non-padded data */
129 if ((crp->crp_payload_length % blks) != 0)
132 if (exf == &enc_xform_aes_icm &&
133 (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
136 /* IV explicitly provided ? */
137 if (crp->crp_flags & CRYPTO_F_IV_SEPARATE)
138 bcopy(crp->crp_iv, iv, ivlen);
139 else if (crp->crp_flags & CRYPTO_F_IV_GENERATE) {
140 arc4rand(iv, ivlen, 0);
141 crypto_copyback(crp, crp->crp_iv_start, ivlen, iv);
143 crypto_copydata(crp, crp->crp_iv_start, ivlen, iv);
145 if (crp->crp_cipher_key != NULL) {
146 if (sw->sw_kschedule)
147 exf->zerokey(&(sw->sw_kschedule));
149 csp = crypto_get_params(crp->crp_session);
150 error = exf->setkey(&sw->sw_kschedule,
151 crp->crp_cipher_key, csp->csp_cipher_klen);
157 iovcnt = nitems(iovlcl);
160 switch (crp->crp_buf_type) {
161 case CRYPTO_BUF_MBUF:
162 error = crypto_mbuftoiov(crp->crp_mbuf, &iov, &iovcnt,
167 uio->uio_iovcnt = iovcnt;
172 case CRYPTO_BUF_CONTIG:
173 iov[0].iov_base = crp->crp_buf;
174 iov[0].iov_len = crp->crp_ilen;
184 * xforms that provide a reinit method perform all IV
185 * handling themselves.
187 exf->reinit(sw->sw_kschedule, iv);
190 count = crp->crp_payload_start;
191 ind = cuio_getptr(uio, count, &k);
197 i = crp->crp_payload_length;
198 encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
202 * If there's insufficient data at the end of
203 * an iovec, we have to do some copying.
205 if (uio->uio_iov[ind].iov_len < k + blks &&
206 uio->uio_iov[ind].iov_len != k) {
207 cuio_copydata(uio, count, blks, blk);
209 /* Actual encryption/decryption */
212 exf->encrypt(sw->sw_kschedule,
215 exf->decrypt(sw->sw_kschedule,
218 } else if (encrypting) {
219 /* XOR with previous block */
220 for (j = 0; j < blks; j++)
223 exf->encrypt(sw->sw_kschedule, blk);
226 * Keep encrypted block for XOR'ing
229 bcopy(blk, iv, blks);
231 } else { /* decrypt */
233 * Keep encrypted block for XOR'ing
236 nivp = (ivp == iv) ? iv2 : iv;
237 bcopy(blk, nivp, blks);
239 exf->decrypt(sw->sw_kschedule, blk);
241 /* XOR with previous block */
242 for (j = 0; j < blks; j++)
248 /* Copy back decrypted block */
249 cuio_copyback(uio, count, blks, blk);
253 /* Advance pointer */
254 ind = cuio_getptr(uio, count, &k);
262 /* Could be done... */
267 while (uio->uio_iov[ind].iov_len >= k + blks && i > 0) {
273 uio->uio_iov[ind].iov_len - (size_t)k);
274 idat = (uint8_t *)uio->uio_iov[ind].iov_base + k;
277 if (encrypting && exf->encrypt_multi == NULL)
278 exf->encrypt(sw->sw_kschedule,
280 else if (encrypting) {
281 nb = rounddown(rem, blks);
282 exf->encrypt_multi(sw->sw_kschedule,
284 } else if (exf->decrypt_multi == NULL)
285 exf->decrypt(sw->sw_kschedule,
288 nb = rounddown(rem, blks);
289 exf->decrypt_multi(sw->sw_kschedule,
292 } else if (encrypting) {
293 /* XOR with previous block/IV */
294 for (j = 0; j < blks; j++)
297 exf->encrypt(sw->sw_kschedule, idat);
299 } else { /* decrypt */
301 * Keep encrypted block to be used
302 * in next block's processing.
304 nivp = (ivp == iv) ? iv2 : iv;
305 bcopy(idat, nivp, blks);
307 exf->decrypt(sw->sw_kschedule, idat);
309 /* XOR with previous block/IV */
310 for (j = 0; j < blks; j++)
322 * Advance to the next iov if the end of the current iov
323 * is aligned with the end of a cipher block.
324 * Note that the code is equivalent to calling:
325 * ind = cuio_getptr(uio, count, &k);
327 if (i > 0 && k == uio->uio_iov[ind].iov_len) {
330 if (ind >= uio->uio_iovcnt) {
339 free(iov, M_CRYPTO_DATA);
345 swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw,
346 const uint8_t *key, int klen)
350 case CRYPTO_MD5_HMAC:
351 case CRYPTO_SHA1_HMAC:
352 case CRYPTO_SHA2_224_HMAC:
353 case CRYPTO_SHA2_256_HMAC:
354 case CRYPTO_SHA2_384_HMAC:
355 case CRYPTO_SHA2_512_HMAC:
356 case CRYPTO_NULL_HMAC:
357 case CRYPTO_RIPEMD160_HMAC:
358 hmac_init_ipad(axf, key, klen, sw->sw_ictx);
359 hmac_init_opad(axf, key, klen, sw->sw_octx);
361 case CRYPTO_MD5_KPDK:
362 case CRYPTO_SHA1_KPDK:
365 * We need a buffer that can hold an md5 and a sha1 result
366 * just to throw it away.
367 * What we do here is the initial part of:
368 * ALGO( key, keyfill, .. )
369 * adding the key to sw_ictx and abusing Final() to get the
371 * In addition we abuse the sw_octx to save the key to have
372 * it to be able to append it at the end in swcr_authcompute().
374 u_char buf[SHA1_RESULTLEN];
376 bcopy(key, sw->sw_octx, klen);
377 axf->Init(sw->sw_ictx);
378 axf->Update(sw->sw_ictx, key, klen);
379 axf->Final(buf, sw->sw_ictx);
382 case CRYPTO_POLY1305:
385 axf->Setkey(sw->sw_ictx, key, klen);
386 axf->Init(sw->sw_ictx);
389 panic("%s: algorithm %d doesn't use keys", __func__, axf->type);
394 * Compute or verify hash.
397 swcr_authcompute(struct swcr_session *ses, struct cryptop *crp)
399 u_char aalg[HASH_MAX_LEN];
400 u_char uaalg[HASH_MAX_LEN];
401 const struct crypto_session_params *csp;
402 struct swcr_auth *sw;
403 struct auth_hash *axf;
407 sw = &ses->swcr_auth;
411 if (crp->crp_auth_key != NULL) {
412 csp = crypto_get_params(crp->crp_session);
413 swcr_authprepare(axf, sw, crp->crp_auth_key,
417 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
419 err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
420 (int (*)(void *, void *, unsigned int))axf->Update, &ctx);
424 err = crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
425 (int (*)(void *, void *, unsigned int))axf->Update, &ctx);
431 case CRYPTO_SHA2_224:
432 case CRYPTO_SHA2_256:
433 case CRYPTO_SHA2_384:
434 case CRYPTO_SHA2_512:
435 axf->Final(aalg, &ctx);
438 case CRYPTO_MD5_HMAC:
439 case CRYPTO_SHA1_HMAC:
440 case CRYPTO_SHA2_224_HMAC:
441 case CRYPTO_SHA2_256_HMAC:
442 case CRYPTO_SHA2_384_HMAC:
443 case CRYPTO_SHA2_512_HMAC:
444 case CRYPTO_RIPEMD160_HMAC:
445 if (sw->sw_octx == NULL)
448 axf->Final(aalg, &ctx);
449 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
450 axf->Update(&ctx, aalg, axf->hashsize);
451 axf->Final(aalg, &ctx);
454 case CRYPTO_MD5_KPDK:
455 case CRYPTO_SHA1_KPDK:
456 /* If we have no key saved, return error. */
457 if (sw->sw_octx == NULL)
461 * Add the trailing copy of the key (see comment in
462 * swcr_authprepare()) after the data:
463 * ALGO( .., key, algofill )
464 * and let Final() do the proper, natural "algofill"
467 axf->Update(&ctx, sw->sw_octx, sw->sw_octx_len);
468 axf->Final(aalg, &ctx);
473 case CRYPTO_NULL_HMAC:
474 case CRYPTO_POLY1305:
475 axf->Final(aalg, &ctx);
479 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
480 crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg);
481 if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0)
484 /* Inject the authentication data */
485 crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg);
490 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
491 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
494 swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
496 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
497 u_char *blk = (u_char *)blkbuf;
498 u_char aalg[AALG_MAX_RESULT_LEN];
499 u_char uaalg[AALG_MAX_RESULT_LEN];
500 u_char iv[EALG_MAX_BLOCK_LEN];
502 struct swcr_auth *swa;
503 struct auth_hash *axf;
505 int blksz, i, ivlen, len;
507 swa = &ses->swcr_auth;
510 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
511 blksz = axf->blocksize;
513 if (crp->crp_flags & CRYPTO_F_IV_GENERATE)
516 /* Initialize the IV */
517 ivlen = AES_GCM_IV_LEN;
518 if (crp->crp_flags & CRYPTO_F_IV_SEPARATE)
519 bcopy(crp->crp_iv, iv, ivlen);
521 crypto_copydata(crp, crp->crp_iv_start, ivlen, iv);
523 axf->Reinit(&ctx, iv, ivlen);
524 for (i = 0; i < crp->crp_payload_length; i += blksz) {
525 len = MIN(crp->crp_payload_length - i, blksz);
526 crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
527 bzero(blk + len, blksz - len);
528 axf->Update(&ctx, blk, blksz);
533 blkp = (uint32_t *)blk + 1;
534 *blkp = htobe32(crp->crp_payload_length * 8);
535 axf->Update(&ctx, blk, blksz);
538 axf->Final(aalg, &ctx);
540 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
541 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
543 if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
546 /* Inject the authentication data */
547 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
553 swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
555 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
556 u_char *blk = (u_char *)blkbuf;
557 u_char aalg[AALG_MAX_RESULT_LEN];
558 u_char uaalg[AALG_MAX_RESULT_LEN];
559 u_char iv[EALG_MAX_BLOCK_LEN];
561 struct swcr_auth *swa;
562 struct swcr_encdec *swe;
563 struct auth_hash *axf;
564 struct enc_xform *exf;
566 int blksz, i, ivlen, len, r;
568 swa = &ses->swcr_auth;
571 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
572 blksz = axf->blocksize;
574 swe = &ses->swcr_encdec;
577 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
580 /* Initialize the IV */
581 ivlen = AES_GCM_IV_LEN;
582 bcopy(crp->crp_iv, iv, ivlen);
584 /* Supply MAC with IV */
585 axf->Reinit(&ctx, iv, ivlen);
587 /* Supply MAC with AAD */
588 for (i = 0; i < crp->crp_aad_length; i += blksz) {
589 len = MIN(crp->crp_aad_length - i, blksz);
590 crypto_copydata(crp, crp->crp_aad_start + i, len, blk);
591 bzero(blk + len, blksz - len);
592 axf->Update(&ctx, blk, blksz);
595 exf->reinit(swe->sw_kschedule, iv);
597 /* Do encryption with MAC */
598 for (i = 0; i < crp->crp_payload_length; i += len) {
599 len = MIN(crp->crp_payload_length - i, blksz);
602 crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
603 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
604 exf->encrypt(swe->sw_kschedule, blk);
605 axf->Update(&ctx, blk, len);
606 crypto_copyback(crp, crp->crp_payload_start + i, len,
609 axf->Update(&ctx, blk, len);
615 blkp = (uint32_t *)blk + 1;
616 *blkp = htobe32(crp->crp_aad_length * 8);
617 blkp = (uint32_t *)blk + 3;
618 *blkp = htobe32(crp->crp_payload_length * 8);
619 axf->Update(&ctx, blk, blksz);
622 axf->Final(aalg, &ctx);
625 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
626 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
629 r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
633 /* tag matches, decrypt data */
634 for (i = 0; i < crp->crp_payload_length; i += blksz) {
635 len = MIN(crp->crp_payload_length - i, blksz);
638 crypto_copydata(crp, crp->crp_payload_start + i, len,
640 exf->decrypt(swe->sw_kschedule, blk);
641 crypto_copyback(crp, crp->crp_payload_start + i, len,
645 /* Inject the authentication data */
646 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
654 swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp)
656 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
657 u_char *blk = (u_char *)blkbuf;
658 u_char aalg[AALG_MAX_RESULT_LEN];
659 u_char uaalg[AALG_MAX_RESULT_LEN];
660 u_char iv[EALG_MAX_BLOCK_LEN];
662 struct swcr_auth *swa;
663 struct auth_hash *axf;
664 int blksz, i, ivlen, len;
666 swa = &ses->swcr_auth;
669 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
670 blksz = axf->blocksize;
672 if (crp->crp_flags & CRYPTO_F_IV_GENERATE)
675 /* Initialize the IV */
676 ivlen = AES_CCM_IV_LEN;
677 if (crp->crp_flags & CRYPTO_F_IV_SEPARATE)
678 bcopy(crp->crp_iv, iv, ivlen);
680 crypto_copydata(crp, crp->crp_iv_start, ivlen, iv);
683 * AES CCM-CBC-MAC needs to know the length of both the auth
684 * data and payload data before doing the auth computation.
686 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length;
687 ctx.aes_cbc_mac_ctx.cryptDataLength = 0;
689 axf->Reinit(&ctx, iv, ivlen);
690 for (i = 0; i < crp->crp_payload_length; i += blksz) {
691 len = MIN(crp->crp_payload_length - i, blksz);
692 crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
693 bzero(blk + len, blksz - len);
694 axf->Update(&ctx, blk, blksz);
698 axf->Final(aalg, &ctx);
700 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
701 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
703 if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
706 /* Inject the authentication data */
707 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
713 swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
715 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
716 u_char *blk = (u_char *)blkbuf;
717 u_char aalg[AALG_MAX_RESULT_LEN];
718 u_char uaalg[AALG_MAX_RESULT_LEN];
719 u_char iv[EALG_MAX_BLOCK_LEN];
721 struct swcr_auth *swa;
722 struct swcr_encdec *swe;
723 struct auth_hash *axf;
724 struct enc_xform *exf;
725 int blksz, i, ivlen, len, r;
727 swa = &ses->swcr_auth;
730 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
731 blksz = axf->blocksize;
733 swe = &ses->swcr_encdec;
736 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
739 /* Initialize the IV */
740 ivlen = AES_CCM_IV_LEN;
741 bcopy(crp->crp_iv, iv, ivlen);
744 * AES CCM-CBC-MAC needs to know the length of both the auth
745 * data and payload data before doing the auth computation.
747 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length;
748 ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length;
750 /* Supply MAC with IV */
751 axf->Reinit(&ctx, iv, ivlen);
753 /* Supply MAC with AAD */
754 for (i = 0; i < crp->crp_aad_length; i += blksz) {
755 len = MIN(crp->crp_aad_length - i, blksz);
756 crypto_copydata(crp, crp->crp_aad_start + i, len, blk);
757 bzero(blk + len, blksz - len);
758 axf->Update(&ctx, blk, blksz);
761 exf->reinit(swe->sw_kschedule, iv);
763 /* Do encryption/decryption with MAC */
764 for (i = 0; i < crp->crp_payload_length; i += len) {
765 len = MIN(crp->crp_payload_length - i, blksz);
768 crypto_copydata(crp, crp->crp_payload_start + i, len, blk);
769 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
770 axf->Update(&ctx, blk, len);
771 exf->encrypt(swe->sw_kschedule, blk);
772 crypto_copyback(crp, crp->crp_payload_start + i, len,
776 * One of the problems with CCM+CBC is that
777 * the authentication is done on the
778 * unecncrypted data. As a result, we have to
779 * decrypt the data twice: once to generate
780 * the tag and a second time after the tag is
783 exf->decrypt(swe->sw_kschedule, blk);
784 axf->Update(&ctx, blk, len);
789 axf->Final(aalg, &ctx);
792 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
793 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
796 r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
800 /* tag matches, decrypt data */
801 exf->reinit(swe->sw_kschedule, iv);
802 for (i = 0; i < crp->crp_payload_length; i += blksz) {
803 len = MIN(crp->crp_payload_length - i, blksz);
806 crypto_copydata(crp, crp->crp_payload_start + i, len,
808 exf->decrypt(swe->sw_kschedule, blk);
809 crypto_copyback(crp, crp->crp_payload_start + i, len,
813 /* Inject the authentication data */
814 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
822 * Apply a cipher and a digest to perform EtA.
825 swcr_eta(struct swcr_session *ses, struct cryptop *crp)
829 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
830 error = swcr_encdec(ses, crp);
832 error = swcr_authcompute(ses, crp);
834 error = swcr_authcompute(ses, crp);
836 error = swcr_encdec(ses, crp);
842 * Apply a compression/decompression algorithm
845 swcr_compdec(struct swcr_session *ses, struct cryptop *crp)
847 u_int8_t *data, *out;
848 struct comp_algo *cxf;
852 cxf = ses->swcr_compdec.sw_cxf;
854 /* We must handle the whole buffer of data in one time
855 * then if there is not all the data in the mbuf, we must
859 data = malloc(crp->crp_payload_length, M_CRYPTO_DATA, M_NOWAIT);
862 crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
865 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
866 result = cxf->compress(data, crp->crp_payload_length, &out);
868 result = cxf->decompress(data, crp->crp_payload_length, &out);
870 free(data, M_CRYPTO_DATA);
873 crp->crp_olen = result;
875 /* Check the compressed size when doing compression */
876 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
877 if (result >= crp->crp_payload_length) {
878 /* Compression was useless, we lost time */
879 free(out, M_CRYPTO_DATA);
884 /* Copy back the (de)compressed data. m_copyback is
885 * extending the mbuf as necessary.
887 crypto_copyback(crp, crp->crp_payload_start, result, out);
888 if (result < crp->crp_payload_length) {
889 switch (crp->crp_buf_type) {
890 case CRYPTO_BUF_MBUF:
891 adj = result - crp->crp_payload_length;
892 m_adj(crp->crp_mbuf, adj);
894 case CRYPTO_BUF_UIO: {
895 struct uio *uio = crp->crp_uio;
898 adj = crp->crp_payload_length - result;
899 ind = uio->uio_iovcnt - 1;
901 while (adj > 0 && ind >= 0) {
902 if (adj < uio->uio_iov[ind].iov_len) {
903 uio->uio_iov[ind].iov_len -= adj;
907 adj -= uio->uio_iov[ind].iov_len;
908 uio->uio_iov[ind].iov_len = 0;
916 free(out, M_CRYPTO_DATA);
921 swcr_setup_encdec(struct swcr_session *ses,
922 const struct crypto_session_params *csp)
924 struct swcr_encdec *swe;
925 struct enc_xform *txf;
928 swe = &ses->swcr_encdec;
929 txf = crypto_cipher(csp);
930 MPASS(txf->ivsize == csp->csp_ivlen);
931 if (csp->csp_cipher_key != NULL) {
932 error = txf->setkey(&swe->sw_kschedule,
933 csp->csp_cipher_key, csp->csp_cipher_klen);
942 swcr_setup_auth(struct swcr_session *ses,
943 const struct crypto_session_params *csp)
945 struct swcr_auth *swa;
946 struct auth_hash *axf;
948 swa = &ses->swcr_auth;
950 axf = crypto_auth_hash(csp);
952 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
954 if (csp->csp_auth_mlen == 0)
955 swa->sw_mlen = axf->hashsize;
957 swa->sw_mlen = csp->csp_auth_mlen;
958 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
959 if (swa->sw_ictx == NULL)
962 switch (csp->csp_auth_alg) {
963 case CRYPTO_MD5_HMAC:
964 case CRYPTO_SHA1_HMAC:
965 case CRYPTO_SHA2_224_HMAC:
966 case CRYPTO_SHA2_256_HMAC:
967 case CRYPTO_SHA2_384_HMAC:
968 case CRYPTO_SHA2_512_HMAC:
969 case CRYPTO_NULL_HMAC:
970 case CRYPTO_RIPEMD160_HMAC:
971 swa->sw_octx_len = axf->ctxsize;
972 swa->sw_octx = malloc(swa->sw_octx_len, M_CRYPTO_DATA,
974 if (swa->sw_octx == NULL)
977 if (csp->csp_auth_key != NULL) {
978 swcr_authprepare(axf, swa, csp->csp_auth_key,
982 if (csp->csp_mode == CSP_MODE_DIGEST)
983 ses->swcr_process = swcr_authcompute;
985 case CRYPTO_MD5_KPDK:
986 case CRYPTO_SHA1_KPDK:
987 swa->sw_octx_len = csp->csp_auth_klen;
988 swa->sw_octx = malloc(swa->sw_octx_len, M_CRYPTO_DATA,
990 if (swa->sw_octx == NULL)
993 /* Store the key so we can "append" it to the payload */
994 if (csp->csp_auth_key != NULL) {
995 swcr_authprepare(axf, swa, csp->csp_auth_key,
999 if (csp->csp_mode == CSP_MODE_DIGEST)
1000 ses->swcr_process = swcr_authcompute;
1006 case CRYPTO_SHA2_224:
1007 case CRYPTO_SHA2_256:
1008 case CRYPTO_SHA2_384:
1009 case CRYPTO_SHA2_512:
1010 axf->Init(swa->sw_ictx);
1011 if (csp->csp_mode == CSP_MODE_DIGEST)
1012 ses->swcr_process = swcr_authcompute;
1014 case CRYPTO_AES_NIST_GMAC:
1015 axf->Init(swa->sw_ictx);
1016 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1017 csp->csp_auth_klen);
1018 if (csp->csp_mode == CSP_MODE_DIGEST)
1019 ses->swcr_process = swcr_gmac;
1021 case CRYPTO_POLY1305:
1022 case CRYPTO_BLAKE2B:
1023 case CRYPTO_BLAKE2S:
1025 * Blake2b and Blake2s support an optional key but do
1028 if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL)
1029 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1030 csp->csp_auth_klen);
1031 axf->Init(swa->sw_ictx);
1032 if (csp->csp_mode == CSP_MODE_DIGEST)
1033 ses->swcr_process = swcr_authcompute;
1035 case CRYPTO_AES_CCM_CBC_MAC:
1036 axf->Init(swa->sw_ictx);
1037 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1038 csp->csp_auth_klen);
1039 if (csp->csp_mode == CSP_MODE_DIGEST)
1040 ses->swcr_process = swcr_ccm_cbc_mac;
1048 swcr_setup_gcm(struct swcr_session *ses,
1049 const struct crypto_session_params *csp)
1051 struct swcr_encdec *swe;
1052 struct swcr_auth *swa;
1053 struct enc_xform *txf;
1054 struct auth_hash *axf;
1057 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1060 /* First, setup the auth side. */
1061 swa = &ses->swcr_auth;
1062 switch (csp->csp_cipher_klen * 8) {
1064 axf = &auth_hash_nist_gmac_aes_128;
1067 axf = &auth_hash_nist_gmac_aes_192;
1070 axf = &auth_hash_nist_gmac_aes_256;
1076 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1078 if (csp->csp_auth_mlen == 0)
1079 swa->sw_mlen = axf->hashsize;
1081 swa->sw_mlen = csp->csp_auth_mlen;
1082 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1083 if (swa->sw_ictx == NULL)
1085 axf->Init(swa->sw_ictx);
1086 if (csp->csp_cipher_key != NULL)
1087 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1088 csp->csp_cipher_klen);
1090 /* Second, setup the cipher side. */
1091 swe = &ses->swcr_encdec;
1092 txf = &enc_xform_aes_nist_gcm;
1093 if (csp->csp_cipher_key != NULL) {
1094 error = txf->setkey(&swe->sw_kschedule,
1095 csp->csp_cipher_key, csp->csp_cipher_klen);
1105 swcr_setup_ccm(struct swcr_session *ses,
1106 const struct crypto_session_params *csp)
1108 struct swcr_encdec *swe;
1109 struct swcr_auth *swa;
1110 struct enc_xform *txf;
1111 struct auth_hash *axf;
1114 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1117 /* First, setup the auth side. */
1118 swa = &ses->swcr_auth;
1119 switch (csp->csp_cipher_klen * 8) {
1121 axf = &auth_hash_ccm_cbc_mac_128;
1124 axf = &auth_hash_ccm_cbc_mac_192;
1127 axf = &auth_hash_ccm_cbc_mac_256;
1133 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1135 if (csp->csp_auth_mlen == 0)
1136 swa->sw_mlen = axf->hashsize;
1138 swa->sw_mlen = csp->csp_auth_mlen;
1139 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1140 if (swa->sw_ictx == NULL)
1142 axf->Init(swa->sw_ictx);
1143 if (csp->csp_cipher_key != NULL)
1144 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1145 csp->csp_cipher_klen);
1147 /* Second, setup the cipher side. */
1148 swe = &ses->swcr_encdec;
1149 txf = &enc_xform_ccm;
1150 if (csp->csp_cipher_key != NULL) {
1151 error = txf->setkey(&swe->sw_kschedule,
1152 csp->csp_cipher_key, csp->csp_cipher_klen);
1162 swcr_auth_supported(const struct crypto_session_params *csp)
1164 struct auth_hash *axf;
1166 axf = crypto_auth_hash(csp);
1169 switch (csp->csp_auth_alg) {
1170 case CRYPTO_MD5_HMAC:
1171 case CRYPTO_SHA1_HMAC:
1172 case CRYPTO_SHA2_224_HMAC:
1173 case CRYPTO_SHA2_256_HMAC:
1174 case CRYPTO_SHA2_384_HMAC:
1175 case CRYPTO_SHA2_512_HMAC:
1176 case CRYPTO_NULL_HMAC:
1177 case CRYPTO_RIPEMD160_HMAC:
1178 case CRYPTO_MD5_KPDK:
1179 case CRYPTO_SHA1_KPDK:
1181 case CRYPTO_AES_NIST_GMAC:
1182 switch (csp->csp_auth_klen * 8) {
1190 if (csp->csp_auth_key == NULL)
1192 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1195 case CRYPTO_POLY1305:
1196 if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1199 case CRYPTO_AES_CCM_CBC_MAC:
1200 switch (csp->csp_auth_klen * 8) {
1208 if (csp->csp_auth_key == NULL)
1210 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1218 swcr_cipher_supported(const struct crypto_session_params *csp)
1220 struct enc_xform *txf;
1222 txf = crypto_cipher(csp);
1225 if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1226 txf->ivsize != csp->csp_ivlen)
1232 swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1235 if (csp->csp_flags != 0)
1237 switch (csp->csp_mode) {
1238 case CSP_MODE_COMPRESS:
1239 switch (csp->csp_cipher_alg) {
1240 case CRYPTO_DEFLATE_COMP:
1246 case CSP_MODE_CIPHER:
1247 switch (csp->csp_cipher_alg) {
1248 case CRYPTO_AES_NIST_GCM_16:
1249 case CRYPTO_AES_CCM_16:
1252 if (!swcr_cipher_supported(csp))
1257 case CSP_MODE_DIGEST:
1258 if (!swcr_auth_supported(csp))
1262 switch (csp->csp_cipher_alg) {
1263 case CRYPTO_AES_NIST_GCM_16:
1264 case CRYPTO_AES_CCM_16:
1271 /* AEAD algorithms cannot be used for EtA. */
1272 switch (csp->csp_cipher_alg) {
1273 case CRYPTO_AES_NIST_GCM_16:
1274 case CRYPTO_AES_CCM_16:
1277 switch (csp->csp_auth_alg) {
1278 case CRYPTO_AES_NIST_GMAC:
1279 case CRYPTO_AES_CCM_CBC_MAC:
1283 if (!swcr_cipher_supported(csp) ||
1284 !swcr_auth_supported(csp))
1291 return (CRYPTODEV_PROBE_SOFTWARE);
1295 * Generate a new software session.
1298 swcr_newsession(device_t dev, crypto_session_t cses,
1299 const struct crypto_session_params *csp)
1301 struct swcr_session *ses;
1302 struct swcr_encdec *swe;
1303 struct swcr_auth *swa;
1304 struct comp_algo *cxf;
1307 ses = crypto_get_driver_session(cses);
1308 mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
1311 swe = &ses->swcr_encdec;
1312 swa = &ses->swcr_auth;
1313 switch (csp->csp_mode) {
1314 case CSP_MODE_COMPRESS:
1315 switch (csp->csp_cipher_alg) {
1316 case CRYPTO_DEFLATE_COMP:
1317 cxf = &comp_algo_deflate;
1321 panic("bad compression algo");
1324 ses->swcr_compdec.sw_cxf = cxf;
1325 ses->swcr_process = swcr_compdec;
1327 case CSP_MODE_CIPHER:
1328 switch (csp->csp_cipher_alg) {
1329 case CRYPTO_NULL_CBC:
1330 ses->swcr_process = swcr_null;
1333 case CRYPTO_AES_NIST_GCM_16:
1334 case CRYPTO_AES_CCM_16:
1335 panic("bad cipher algo");
1338 error = swcr_setup_encdec(ses, csp);
1340 ses->swcr_process = swcr_encdec;
1343 case CSP_MODE_DIGEST:
1344 error = swcr_setup_auth(ses, csp);
1347 switch (csp->csp_cipher_alg) {
1348 case CRYPTO_AES_NIST_GCM_16:
1349 error = swcr_setup_gcm(ses, csp);
1351 ses->swcr_process = swcr_gcm;
1353 case CRYPTO_AES_CCM_16:
1354 error = swcr_setup_ccm(ses, csp);
1356 ses->swcr_process = swcr_ccm;
1360 panic("bad aead algo");
1366 switch (csp->csp_cipher_alg) {
1367 case CRYPTO_AES_NIST_GCM_16:
1368 case CRYPTO_AES_CCM_16:
1369 panic("bad eta cipher algo");
1371 switch (csp->csp_auth_alg) {
1372 case CRYPTO_AES_NIST_GMAC:
1373 case CRYPTO_AES_CCM_CBC_MAC:
1374 panic("bad eta auth algo");
1378 error = swcr_setup_auth(ses, csp);
1381 if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1382 /* Effectively degrade to digest mode. */
1383 ses->swcr_process = swcr_authcompute;
1387 error = swcr_setup_encdec(ses, csp);
1389 ses->swcr_process = swcr_eta;
1396 swcr_freesession(dev, cses);
1401 swcr_freesession(device_t dev, crypto_session_t cses)
1403 struct swcr_session *ses;
1404 struct swcr_auth *swa;
1405 struct enc_xform *txf;
1406 struct auth_hash *axf;
1408 ses = crypto_get_driver_session(cses);
1410 mtx_destroy(&ses->swcr_lock);
1412 txf = ses->swcr_encdec.sw_exf;
1414 if (ses->swcr_encdec.sw_kschedule != NULL)
1415 txf->zerokey(&(ses->swcr_encdec.sw_kschedule));
1418 axf = ses->swcr_auth.sw_axf;
1420 swa = &ses->swcr_auth;
1421 if (swa->sw_ictx != NULL) {
1422 explicit_bzero(swa->sw_ictx, axf->ctxsize);
1423 free(swa->sw_ictx, M_CRYPTO_DATA);
1425 if (swa->sw_octx != NULL) {
1426 explicit_bzero(swa->sw_octx, swa->sw_octx_len);
1427 free(swa->sw_octx, M_CRYPTO_DATA);
1433 * Process a software request.
1436 swcr_process(device_t dev, struct cryptop *crp, int hint)
1438 struct swcr_session *ses;
1440 ses = crypto_get_driver_session(crp->crp_session);
1441 mtx_lock(&ses->swcr_lock);
1443 crp->crp_etype = ses->swcr_process(ses, crp);
1445 mtx_unlock(&ses->swcr_lock);
1451 swcr_identify(driver_t *drv, device_t parent)
1453 /* NB: order 10 is so we get attached after h/w devices */
1454 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1455 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1456 panic("cryptosoft: could not attach");
1460 swcr_probe(device_t dev)
1462 device_set_desc(dev, "software crypto");
1463 return (BUS_PROBE_NOWILDCARD);
1467 swcr_attach(device_t dev)
1470 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1471 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1473 device_printf(dev, "cannot initialize!");
1481 swcr_detach(device_t dev)
1483 crypto_unregister_all(swcr_id);
1487 static device_method_t swcr_methods[] = {
1488 DEVMETHOD(device_identify, swcr_identify),
1489 DEVMETHOD(device_probe, swcr_probe),
1490 DEVMETHOD(device_attach, swcr_attach),
1491 DEVMETHOD(device_detach, swcr_detach),
1493 DEVMETHOD(cryptodev_probesession, swcr_probesession),
1494 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1495 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1496 DEVMETHOD(cryptodev_process, swcr_process),
1501 static driver_t swcr_driver = {
1504 0, /* NB: no softc */
1506 static devclass_t swcr_devclass;
1509 * NB: We explicitly reference the crypto module so we
1510 * get the necessary ordering when built as a loadable
1511 * module. This is required because we bundle the crypto
1512 * module code together with the cryptosoft driver (otherwise
1513 * normal module dependencies would handle things).
1515 extern int crypto_modevent(struct module *, int, void *);
1516 /* XXX where to attach */
1517 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1518 MODULE_VERSION(cryptosoft, 1);
1519 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);