1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014-2021 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Portions of this software were developed by Ararat River
20 * Consulting, LLC under sponsorship of the FreeBSD Foundation.
22 * Permission to use, copy, and modify this software with or without fee
23 * is hereby granted, provided that this entire notice is included in
24 * all source code copies of any software which is or includes a copy or
25 * modification of this software.
27 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
28 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
29 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
30 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
34 #include <sys/cdefs.h>
35 __FBSDID("$FreeBSD$");
37 #include <sys/param.h>
38 #include <sys/systm.h>
39 #include <sys/malloc.h>
41 #include <sys/module.h>
42 #include <sys/sysctl.h>
43 #include <sys/errno.h>
44 #include <sys/random.h>
45 #include <sys/kernel.h>
47 #include <sys/endian.h>
48 #include <sys/limits.h>
50 #include <crypto/sha1.h>
51 #include <opencrypto/rmd160.h>
53 #include <opencrypto/cryptodev.h>
54 #include <opencrypto/xform.h>
58 #include "cryptodev_if.h"
63 const struct auth_hash *sw_axf;
70 const struct enc_xform *sw_exf;
74 const struct comp_algo *sw_cxf;
78 int (*swcr_process)(const struct swcr_session *, struct cryptop *);
80 struct swcr_auth swcr_auth;
81 struct swcr_encdec swcr_encdec;
82 struct swcr_compdec swcr_compdec;
85 static int32_t swcr_id;
87 static void swcr_freesession(device_t dev, crypto_session_t cses);
89 /* Used for CRYPTO_NULL_CBC. */
91 swcr_null(const struct swcr_session *ses, struct cryptop *crp)
98 * Apply a symmetric encryption/decryption algorithm.
101 swcr_encdec(const struct swcr_session *ses, struct cryptop *crp)
103 unsigned char blk[EALG_MAX_BLOCK_LEN];
104 const struct crypto_session_params *csp;
105 const struct enc_xform *exf;
106 const struct swcr_encdec *sw;
108 size_t inlen, outlen, todo;
110 struct crypto_buffer_cursor cc_in, cc_out;
111 const unsigned char *inblk;
112 unsigned char *outblk;
118 sw = &ses->swcr_encdec;
120 csp = crypto_get_params(crp->crp_session);
122 if (exf->native_blocksize == 0) {
123 /* Check for non-padded data */
124 if ((crp->crp_payload_length % exf->blocksize) != 0)
127 blks = exf->blocksize;
129 blks = exf->native_blocksize;
131 if (exf == &enc_xform_aes_icm &&
132 (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
135 ctx = __builtin_alloca(exf->ctxsize);
136 if (crp->crp_cipher_key != NULL) {
137 error = exf->setkey(ctx, crp->crp_cipher_key,
138 csp->csp_cipher_klen);
142 memcpy(ctx, sw->sw_ctx, exf->ctxsize);
144 crypto_read_iv(crp, blk);
145 exf->reinit(ctx, blk, csp->csp_ivlen);
147 crypto_cursor_init(&cc_in, &crp->crp_buf);
148 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
149 inblk = crypto_cursor_segment(&cc_in, &inlen);
150 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
151 crypto_cursor_init(&cc_out, &crp->crp_obuf);
152 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
155 outblk = crypto_cursor_segment(&cc_out, &outlen);
157 encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
160 * Loop through encrypting blocks. 'inlen' is the remaining
161 * length of the current segment in the input buffer.
162 * 'outlen' is the remaining length of current segment in the
165 for (resid = crp->crp_payload_length; resid >= blks; resid -= todo) {
167 * If the current block is not contained within the
168 * current input/output segment, use 'blk' as a local
172 crypto_cursor_copydata(&cc_in, blks, blk);
181 todo = rounddown2(MIN(resid, MIN(inlen, outlen)), blks);
184 exf->encrypt_multi(ctx, inblk, outblk, todo);
186 exf->decrypt_multi(ctx, inblk, outblk, todo);
189 inblk = crypto_cursor_segment(&cc_in, &inlen);
191 crypto_cursor_advance(&cc_in, todo);
195 inblk = crypto_cursor_segment(&cc_in, &inlen);
199 crypto_cursor_copyback(&cc_out, blks, blk);
200 outblk = crypto_cursor_segment(&cc_out, &outlen);
202 crypto_cursor_advance(&cc_out, todo);
206 outblk = crypto_cursor_segment(&cc_out,
211 /* Handle trailing partial block for stream ciphers. */
213 KASSERT(exf->native_blocksize != 0,
214 ("%s: partial block of %d bytes for cipher %s",
215 __func__, resid, exf->name));
216 KASSERT(resid < blks, ("%s: partial block too big", __func__));
218 inblk = crypto_cursor_segment(&cc_in, &inlen);
219 outblk = crypto_cursor_segment(&cc_out, &outlen);
221 crypto_cursor_copydata(&cc_in, resid, blk);
227 exf->encrypt_last(ctx, inblk, outblk,
230 exf->decrypt_last(ctx, inblk, outblk,
233 crypto_cursor_copyback(&cc_out, resid, blk);
236 explicit_bzero(ctx, exf->ctxsize);
237 explicit_bzero(blk, sizeof(blk));
242 * Compute or verify hash.
245 swcr_authcompute(const struct swcr_session *ses, struct cryptop *crp)
249 u_char aalg[HASH_MAX_LEN];
250 u_char uaalg[HASH_MAX_LEN];
252 const struct crypto_session_params *csp;
253 const struct swcr_auth *sw;
254 const struct auth_hash *axf;
257 sw = &ses->swcr_auth;
261 csp = crypto_get_params(crp->crp_session);
262 if (crp->crp_auth_key != NULL) {
264 hmac_init_ipad(axf, crp->crp_auth_key,
265 csp->csp_auth_klen, &s.ctx);
268 axf->Setkey(&s.ctx, crp->crp_auth_key,
272 memcpy(&s.ctx, sw->sw_ictx, axf->ctxsize);
274 if (crp->crp_aad != NULL)
275 err = axf->Update(&s.ctx, crp->crp_aad, crp->crp_aad_length);
277 err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
278 axf->Update, &s.ctx);
282 if (CRYPTO_HAS_OUTPUT_BUFFER(crp) &&
283 CRYPTO_OP_IS_ENCRYPT(crp->crp_op))
284 err = crypto_apply_buf(&crp->crp_obuf,
285 crp->crp_payload_output_start, crp->crp_payload_length,
286 axf->Update, &s.ctx);
288 err = crypto_apply(crp, crp->crp_payload_start,
289 crp->crp_payload_length, axf->Update, &s.ctx);
293 if (csp->csp_flags & CSP_F_ESN)
294 axf->Update(&s.ctx, crp->crp_esn, 4);
296 axf->Final(s.aalg, &s.ctx);
298 if (crp->crp_auth_key != NULL)
299 hmac_init_opad(axf, crp->crp_auth_key,
300 csp->csp_auth_klen, &s.ctx);
302 memcpy(&s.ctx, sw->sw_octx, axf->ctxsize);
303 axf->Update(&s.ctx, s.aalg, axf->hashsize);
304 axf->Final(s.aalg, &s.ctx);
307 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
308 crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, s.uaalg);
309 if (timingsafe_bcmp(s.aalg, s.uaalg, sw->sw_mlen) != 0)
312 /* Inject the authentication data */
313 crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, s.aalg);
316 explicit_bzero(&s, sizeof(s));
320 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
321 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
324 swcr_gmac(const struct swcr_session *ses, struct cryptop *crp)
328 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
329 u_char tag[GMAC_DIGEST_LEN];
330 u_char tag2[GMAC_DIGEST_LEN];
332 u_char *blk = (u_char *)s.blkbuf;
333 struct crypto_buffer_cursor cc;
335 const struct swcr_auth *swa;
336 const struct auth_hash *axf;
339 int blksz, error, ivlen, resid;
341 swa = &ses->swcr_auth;
343 blksz = GMAC_BLOCK_LEN;
344 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
347 if (crp->crp_auth_key != NULL) {
349 axf->Setkey(&s.ctx, crp->crp_auth_key,
350 crypto_get_params(crp->crp_session)->csp_auth_klen);
352 memcpy(&s.ctx, swa->sw_ictx, axf->ctxsize);
354 /* Initialize the IV */
355 ivlen = AES_GCM_IV_LEN;
356 crypto_read_iv(crp, blk);
358 axf->Reinit(&s.ctx, blk, ivlen);
359 crypto_cursor_init(&cc, &crp->crp_buf);
360 crypto_cursor_advance(&cc, crp->crp_payload_start);
361 for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) {
362 inblk = crypto_cursor_segment(&cc, &len);
364 len = rounddown(MIN(len, resid), blksz);
365 crypto_cursor_advance(&cc, len);
368 crypto_cursor_copydata(&cc, len, blk);
371 axf->Update(&s.ctx, inblk, len);
374 memset(blk, 0, blksz);
375 crypto_cursor_copydata(&cc, resid, blk);
376 axf->Update(&s.ctx, blk, blksz);
380 memset(blk, 0, blksz);
381 blkp = (uint32_t *)blk + 1;
382 *blkp = htobe32(crp->crp_payload_length * 8);
383 axf->Update(&s.ctx, blk, blksz);
386 axf->Final(s.tag, &s.ctx);
389 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
390 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
392 if (timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen) != 0)
395 /* Inject the authentication data */
396 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, s.tag);
398 explicit_bzero(&s, sizeof(s));
403 swcr_gcm(const struct swcr_session *ses, struct cryptop *crp)
406 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
407 u_char tag[GMAC_DIGEST_LEN];
408 u_char tag2[GMAC_DIGEST_LEN];
410 u_char *blk = (u_char *)s.blkbuf;
411 struct crypto_buffer_cursor cc_in, cc_out;
414 const struct swcr_auth *swa;
415 const struct swcr_encdec *swe;
416 const struct enc_xform *exf;
420 int blksz, error, ivlen, r, resid;
422 swa = &ses->swcr_auth;
423 swe = &ses->swcr_encdec;
425 blksz = GMAC_BLOCK_LEN;
426 KASSERT(blksz == exf->native_blocksize,
427 ("%s: blocksize mismatch", __func__));
429 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
432 ivlen = AES_GCM_IV_LEN;
434 ctx = __builtin_alloca(exf->ctxsize);
435 if (crp->crp_cipher_key != NULL)
436 exf->setkey(ctx, crp->crp_cipher_key,
437 crypto_get_params(crp->crp_session)->csp_cipher_klen);
439 memcpy(ctx, swe->sw_ctx, exf->ctxsize);
440 exf->reinit(ctx, crp->crp_iv, ivlen);
442 /* Supply MAC with AAD */
443 if (crp->crp_aad != NULL) {
444 len = rounddown(crp->crp_aad_length, blksz);
446 exf->update(ctx, crp->crp_aad, len);
447 if (crp->crp_aad_length != len) {
448 memset(blk, 0, blksz);
449 memcpy(blk, (char *)crp->crp_aad + len,
450 crp->crp_aad_length - len);
451 exf->update(ctx, blk, blksz);
454 crypto_cursor_init(&cc_in, &crp->crp_buf);
455 crypto_cursor_advance(&cc_in, crp->crp_aad_start);
456 for (resid = crp->crp_aad_length; resid >= blksz;
458 inblk = crypto_cursor_segment(&cc_in, &len);
460 len = rounddown(MIN(len, resid), blksz);
461 crypto_cursor_advance(&cc_in, len);
464 crypto_cursor_copydata(&cc_in, len, blk);
467 exf->update(ctx, inblk, len);
470 memset(blk, 0, blksz);
471 crypto_cursor_copydata(&cc_in, resid, blk);
472 exf->update(ctx, blk, blksz);
476 /* Do encryption with MAC */
477 crypto_cursor_init(&cc_in, &crp->crp_buf);
478 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
479 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
480 crypto_cursor_init(&cc_out, &crp->crp_obuf);
481 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
484 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
485 inblk = crypto_cursor_segment(&cc_in, &len);
487 crypto_cursor_copydata(&cc_in, blksz, blk);
490 crypto_cursor_advance(&cc_in, blksz);
492 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
493 outblk = crypto_cursor_segment(&cc_out, &len);
496 exf->encrypt(ctx, inblk, outblk);
497 exf->update(ctx, outblk, blksz);
499 crypto_cursor_copyback(&cc_out, blksz, blk);
501 crypto_cursor_advance(&cc_out, blksz);
503 exf->update(ctx, inblk, blksz);
507 crypto_cursor_copydata(&cc_in, resid, blk);
508 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
509 exf->encrypt_last(ctx, blk, blk, resid);
510 crypto_cursor_copyback(&cc_out, resid, blk);
512 exf->update(ctx, blk, resid);
516 memset(blk, 0, blksz);
517 blkp = (uint32_t *)blk + 1;
518 *blkp = htobe32(crp->crp_aad_length * 8);
519 blkp = (uint32_t *)blk + 3;
520 *blkp = htobe32(crp->crp_payload_length * 8);
521 exf->update(ctx, blk, blksz);
524 exf->final(s.tag, ctx);
528 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
529 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
531 r = timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen);
537 /* tag matches, decrypt data */
538 crypto_cursor_init(&cc_in, &crp->crp_buf);
539 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
540 for (resid = crp->crp_payload_length; resid > blksz;
542 inblk = crypto_cursor_segment(&cc_in, &len);
544 crypto_cursor_copydata(&cc_in, blksz, blk);
547 crypto_cursor_advance(&cc_in, blksz);
548 outblk = crypto_cursor_segment(&cc_out, &len);
551 exf->decrypt(ctx, inblk, outblk);
553 crypto_cursor_copyback(&cc_out, blksz, blk);
555 crypto_cursor_advance(&cc_out, blksz);
558 crypto_cursor_copydata(&cc_in, resid, blk);
559 exf->decrypt_last(ctx, blk, blk, resid);
560 crypto_cursor_copyback(&cc_out, resid, blk);
563 /* Inject the authentication data */
564 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
569 explicit_bzero(ctx, exf->ctxsize);
570 explicit_bzero(&s, sizeof(s));
576 build_ccm_b0(const char *nonce, u_int nonce_length, u_int aad_length,
577 u_int data_length, u_int tag_length, uint8_t *b0)
582 KASSERT(nonce_length >= 7 && nonce_length <= 13,
583 ("nonce_length must be between 7 and 13 bytes"));
586 * Need to determine the L field value. This is the number of
587 * bytes needed to specify the length of the message; the length
588 * is whatever is left in the 16 bytes after specifying flags and
591 L = 15 - nonce_length;
593 flags = ((aad_length > 0) << 6) +
594 (((tag_length - 2) / 2) << 3) +
598 * Now we need to set up the first block, which has flags, nonce,
599 * and the message length.
602 memcpy(b0 + 1, nonce, nonce_length);
603 bp = b0 + 1 + nonce_length;
605 /* Need to copy L' [aka L-1] bytes of data_length */
606 for (uint8_t *dst = b0 + CCM_CBC_BLOCK_LEN - 1; dst >= bp; dst--) {
612 /* NB: OCF only supports AAD lengths < 2^32. */
614 build_ccm_aad_length(u_int aad_length, uint8_t *blk)
616 if (aad_length < ((1 << 16) - (1 << 8))) {
617 be16enc(blk, aad_length);
618 return (sizeof(uint16_t));
622 be32enc(blk + 2, aad_length);
623 return (2 + sizeof(uint32_t));
628 swcr_ccm_cbc_mac(const struct swcr_session *ses, struct cryptop *crp)
632 u_char blk[CCM_CBC_BLOCK_LEN];
633 u_char tag[AES_CBC_MAC_HASH_LEN];
634 u_char tag2[AES_CBC_MAC_HASH_LEN];
636 const struct crypto_session_params *csp;
637 const struct swcr_auth *swa;
638 const struct auth_hash *axf;
639 int error, ivlen, len;
641 csp = crypto_get_params(crp->crp_session);
642 swa = &ses->swcr_auth;
645 if (crp->crp_auth_key != NULL) {
647 axf->Setkey(&s.ctx, crp->crp_auth_key, csp->csp_auth_klen);
649 memcpy(&s.ctx, swa->sw_ictx, axf->ctxsize);
651 /* Initialize the IV */
652 ivlen = csp->csp_ivlen;
654 /* Supply MAC with IV */
655 axf->Reinit(&s.ctx, crp->crp_iv, ivlen);
657 /* Supply MAC with b0. */
658 build_ccm_b0(crp->crp_iv, ivlen, crp->crp_payload_length, 0,
659 swa->sw_mlen, s.blk);
660 axf->Update(&s.ctx, s.blk, CCM_CBC_BLOCK_LEN);
662 len = build_ccm_aad_length(crp->crp_payload_length, s.blk);
663 axf->Update(&s.ctx, s.blk, len);
665 crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
666 axf->Update, &s.ctx);
669 axf->Final(s.tag, &s.ctx);
672 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
673 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
675 if (timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen) != 0)
678 /* Inject the authentication data */
679 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
682 explicit_bzero(&s, sizeof(s));
687 swcr_ccm(const struct swcr_session *ses, struct cryptop *crp)
689 const struct crypto_session_params *csp;
691 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
692 u_char tag[AES_CBC_MAC_HASH_LEN];
693 u_char tag2[AES_CBC_MAC_HASH_LEN];
695 u_char *blk = (u_char *)s.blkbuf;
696 struct crypto_buffer_cursor cc_in, cc_out;
699 const struct swcr_auth *swa;
700 const struct swcr_encdec *swe;
701 const struct enc_xform *exf;
704 int blksz, error, ivlen, r, resid;
706 csp = crypto_get_params(crp->crp_session);
707 swa = &ses->swcr_auth;
708 swe = &ses->swcr_encdec;
710 blksz = AES_BLOCK_LEN;
711 KASSERT(blksz == exf->native_blocksize,
712 ("%s: blocksize mismatch", __func__));
714 if (crp->crp_payload_length > ccm_max_payload_length(csp))
717 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
720 ivlen = csp->csp_ivlen;
722 ctx = __builtin_alloca(exf->ctxsize);
723 if (crp->crp_cipher_key != NULL)
724 exf->setkey(ctx, crp->crp_cipher_key,
725 crypto_get_params(crp->crp_session)->csp_cipher_klen);
727 memcpy(ctx, swe->sw_ctx, exf->ctxsize);
728 exf->reinit(ctx, crp->crp_iv, ivlen);
730 /* Supply MAC with b0. */
731 _Static_assert(sizeof(s.blkbuf) >= CCM_CBC_BLOCK_LEN,
732 "blkbuf too small for b0");
733 build_ccm_b0(crp->crp_iv, ivlen, crp->crp_aad_length,
734 crp->crp_payload_length, swa->sw_mlen, blk);
735 exf->update(ctx, blk, CCM_CBC_BLOCK_LEN);
737 /* Supply MAC with AAD */
738 if (crp->crp_aad_length != 0) {
739 len = build_ccm_aad_length(crp->crp_aad_length, blk);
740 exf->update(ctx, blk, len);
741 if (crp->crp_aad != NULL)
742 exf->update(ctx, crp->crp_aad, crp->crp_aad_length);
744 crypto_apply(crp, crp->crp_aad_start,
745 crp->crp_aad_length, exf->update, ctx);
747 /* Pad the AAD (including length field) to a full block. */
748 len = (len + crp->crp_aad_length) % CCM_CBC_BLOCK_LEN;
750 len = CCM_CBC_BLOCK_LEN - len;
751 memset(blk, 0, CCM_CBC_BLOCK_LEN);
752 exf->update(ctx, blk, len);
756 /* Do encryption/decryption with MAC */
757 crypto_cursor_init(&cc_in, &crp->crp_buf);
758 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
759 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
760 crypto_cursor_init(&cc_out, &crp->crp_obuf);
761 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
764 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
765 inblk = crypto_cursor_segment(&cc_in, &len);
767 crypto_cursor_copydata(&cc_in, blksz, blk);
770 crypto_cursor_advance(&cc_in, blksz);
771 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
772 outblk = crypto_cursor_segment(&cc_out, &len);
775 exf->update(ctx, inblk, blksz);
776 exf->encrypt(ctx, inblk, outblk);
778 crypto_cursor_copyback(&cc_out, blksz, blk);
780 crypto_cursor_advance(&cc_out, blksz);
783 * One of the problems with CCM+CBC is that
784 * the authentication is done on the
785 * unencrypted data. As a result, we have to
786 * decrypt the data twice: once to generate
787 * the tag and a second time after the tag is
790 exf->decrypt(ctx, inblk, blk);
791 exf->update(ctx, blk, blksz);
795 crypto_cursor_copydata(&cc_in, resid, blk);
796 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
797 exf->update(ctx, blk, resid);
798 exf->encrypt_last(ctx, blk, blk, resid);
799 crypto_cursor_copyback(&cc_out, resid, blk);
801 exf->decrypt_last(ctx, blk, blk, resid);
802 exf->update(ctx, blk, resid);
807 exf->final(s.tag, ctx);
811 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
812 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
814 r = timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen);
820 /* tag matches, decrypt data */
821 exf->reinit(ctx, crp->crp_iv, ivlen);
822 crypto_cursor_init(&cc_in, &crp->crp_buf);
823 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
824 for (resid = crp->crp_payload_length; resid > blksz;
826 inblk = crypto_cursor_segment(&cc_in, &len);
828 crypto_cursor_copydata(&cc_in, blksz, blk);
831 crypto_cursor_advance(&cc_in, blksz);
832 outblk = crypto_cursor_segment(&cc_out, &len);
835 exf->decrypt(ctx, inblk, outblk);
837 crypto_cursor_copyback(&cc_out, blksz, blk);
839 crypto_cursor_advance(&cc_out, blksz);
842 crypto_cursor_copydata(&cc_in, resid, blk);
843 exf->decrypt_last(ctx, blk, blk, resid);
844 crypto_cursor_copyback(&cc_out, resid, blk);
847 /* Inject the authentication data */
848 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
853 explicit_bzero(ctx, exf->ctxsize);
854 explicit_bzero(&s, sizeof(s));
859 swcr_chacha20_poly1305(const struct swcr_session *ses, struct cryptop *crp)
861 const struct crypto_session_params *csp;
863 uint64_t blkbuf[howmany(CHACHA20_NATIVE_BLOCK_LEN, sizeof(uint64_t))];
864 u_char tag[POLY1305_HASH_LEN];
865 u_char tag2[POLY1305_HASH_LEN];
867 u_char *blk = (u_char *)s.blkbuf;
868 struct crypto_buffer_cursor cc_in, cc_out;
872 const struct swcr_auth *swa;
873 const struct swcr_encdec *swe;
874 const struct enc_xform *exf;
877 int blksz, error, r, resid;
879 swa = &ses->swcr_auth;
880 swe = &ses->swcr_encdec;
882 blksz = exf->native_blocksize;
883 KASSERT(blksz <= sizeof(s.blkbuf), ("%s: blocksize mismatch", __func__));
885 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
888 csp = crypto_get_params(crp->crp_session);
890 ctx = __builtin_alloca(exf->ctxsize);
891 if (crp->crp_cipher_key != NULL)
892 exf->setkey(ctx, crp->crp_cipher_key,
893 csp->csp_cipher_klen);
895 memcpy(ctx, swe->sw_ctx, exf->ctxsize);
896 exf->reinit(ctx, crp->crp_iv, csp->csp_ivlen);
898 /* Supply MAC with AAD */
899 if (crp->crp_aad != NULL)
900 exf->update(ctx, crp->crp_aad, crp->crp_aad_length);
902 crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
904 if (crp->crp_aad_length % POLY1305_BLOCK_LEN != 0) {
906 memset(blk, 0, POLY1305_BLOCK_LEN);
907 exf->update(ctx, blk, POLY1305_BLOCK_LEN -
908 crp->crp_aad_length % POLY1305_BLOCK_LEN);
911 /* Do encryption with MAC */
912 crypto_cursor_init(&cc_in, &crp->crp_buf);
913 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
914 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
915 crypto_cursor_init(&cc_out, &crp->crp_obuf);
916 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
919 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
920 inblk = crypto_cursor_segment(&cc_in, &len);
922 crypto_cursor_copydata(&cc_in, blksz, blk);
925 crypto_cursor_advance(&cc_in, blksz);
926 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
927 outblk = crypto_cursor_segment(&cc_out, &len);
930 exf->encrypt(ctx, inblk, outblk);
931 exf->update(ctx, outblk, blksz);
933 crypto_cursor_copyback(&cc_out, blksz, blk);
935 crypto_cursor_advance(&cc_out, blksz);
937 exf->update(ctx, inblk, blksz);
941 crypto_cursor_copydata(&cc_in, resid, blk);
942 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
943 exf->encrypt_last(ctx, blk, blk, resid);
944 crypto_cursor_copyback(&cc_out, resid, blk);
946 exf->update(ctx, blk, resid);
947 if (resid % POLY1305_BLOCK_LEN != 0) {
949 memset(blk, 0, POLY1305_BLOCK_LEN);
950 exf->update(ctx, blk, POLY1305_BLOCK_LEN -
951 resid % POLY1305_BLOCK_LEN);
956 blkp = (uint64_t *)blk;
957 blkp[0] = htole64(crp->crp_aad_length);
958 blkp[1] = htole64(crp->crp_payload_length);
959 exf->update(ctx, blk, sizeof(uint64_t) * 2);
962 exf->final(s.tag, ctx);
966 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
967 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
969 r = timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen);
975 /* tag matches, decrypt data */
976 crypto_cursor_init(&cc_in, &crp->crp_buf);
977 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
978 for (resid = crp->crp_payload_length; resid > blksz;
980 inblk = crypto_cursor_segment(&cc_in, &len);
982 crypto_cursor_copydata(&cc_in, blksz, blk);
985 crypto_cursor_advance(&cc_in, blksz);
986 outblk = crypto_cursor_segment(&cc_out, &len);
989 exf->decrypt(ctx, inblk, outblk);
991 crypto_cursor_copyback(&cc_out, blksz, blk);
993 crypto_cursor_advance(&cc_out, blksz);
996 crypto_cursor_copydata(&cc_in, resid, blk);
997 exf->decrypt_last(ctx, blk, blk, resid);
998 crypto_cursor_copyback(&cc_out, resid, blk);
1001 /* Inject the authentication data */
1002 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
1007 explicit_bzero(ctx, exf->ctxsize);
1008 explicit_bzero(&s, sizeof(s));
1013 * Apply a cipher and a digest to perform EtA.
1016 swcr_eta(const struct swcr_session *ses, struct cryptop *crp)
1020 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
1021 error = swcr_encdec(ses, crp);
1023 error = swcr_authcompute(ses, crp);
1025 error = swcr_authcompute(ses, crp);
1027 error = swcr_encdec(ses, crp);
1033 * Apply a compression/decompression algorithm
1036 swcr_compdec(const struct swcr_session *ses, struct cryptop *crp)
1038 const struct comp_algo *cxf;
1039 uint8_t *data, *out;
1043 cxf = ses->swcr_compdec.sw_cxf;
1045 /* We must handle the whole buffer of data in one time
1046 * then if there is not all the data in the mbuf, we must
1050 data = malloc(crp->crp_payload_length, M_CRYPTO_DATA, M_NOWAIT);
1053 crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
1056 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
1057 result = cxf->compress(data, crp->crp_payload_length, &out);
1059 result = cxf->decompress(data, crp->crp_payload_length, &out);
1061 free(data, M_CRYPTO_DATA);
1064 crp->crp_olen = result;
1066 /* Check the compressed size when doing compression */
1067 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
1068 if (result >= crp->crp_payload_length) {
1069 /* Compression was useless, we lost time */
1070 free(out, M_CRYPTO_DATA);
1075 /* Copy back the (de)compressed data. m_copyback is
1076 * extending the mbuf as necessary.
1078 crypto_copyback(crp, crp->crp_payload_start, result, out);
1079 if (result < crp->crp_payload_length) {
1080 switch (crp->crp_buf.cb_type) {
1081 case CRYPTO_BUF_MBUF:
1082 case CRYPTO_BUF_SINGLE_MBUF:
1083 adj = result - crp->crp_payload_length;
1084 m_adj(crp->crp_buf.cb_mbuf, adj);
1086 case CRYPTO_BUF_UIO: {
1087 struct uio *uio = crp->crp_buf.cb_uio;
1090 adj = crp->crp_payload_length - result;
1091 ind = uio->uio_iovcnt - 1;
1093 while (adj > 0 && ind >= 0) {
1094 if (adj < uio->uio_iov[ind].iov_len) {
1095 uio->uio_iov[ind].iov_len -= adj;
1099 adj -= uio->uio_iov[ind].iov_len;
1100 uio->uio_iov[ind].iov_len = 0;
1106 case CRYPTO_BUF_VMPAGE:
1107 adj = crp->crp_payload_length - result;
1108 crp->crp_buf.cb_vm_page_len -= adj;
1114 free(out, M_CRYPTO_DATA);
1119 swcr_setup_cipher(struct swcr_session *ses,
1120 const struct crypto_session_params *csp)
1122 struct swcr_encdec *swe;
1123 const struct enc_xform *txf;
1126 swe = &ses->swcr_encdec;
1127 txf = crypto_cipher(csp);
1128 if (csp->csp_cipher_key != NULL) {
1129 if (txf->ctxsize != 0) {
1130 swe->sw_ctx = malloc(txf->ctxsize, M_CRYPTO_DATA,
1132 if (swe->sw_ctx == NULL)
1135 error = txf->setkey(swe->sw_ctx,
1136 csp->csp_cipher_key, csp->csp_cipher_klen);
1145 swcr_setup_auth(struct swcr_session *ses,
1146 const struct crypto_session_params *csp)
1148 struct swcr_auth *swa;
1149 const struct auth_hash *axf;
1151 swa = &ses->swcr_auth;
1153 axf = crypto_auth_hash(csp);
1155 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1157 if (csp->csp_auth_mlen == 0)
1158 swa->sw_mlen = axf->hashsize;
1160 swa->sw_mlen = csp->csp_auth_mlen;
1161 if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL) {
1162 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1164 if (swa->sw_ictx == NULL)
1168 switch (csp->csp_auth_alg) {
1169 case CRYPTO_SHA1_HMAC:
1170 case CRYPTO_SHA2_224_HMAC:
1171 case CRYPTO_SHA2_256_HMAC:
1172 case CRYPTO_SHA2_384_HMAC:
1173 case CRYPTO_SHA2_512_HMAC:
1174 case CRYPTO_RIPEMD160_HMAC:
1175 swa->sw_hmac = true;
1176 if (csp->csp_auth_key != NULL) {
1177 swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1179 if (swa->sw_octx == NULL)
1181 hmac_init_ipad(axf, csp->csp_auth_key,
1182 csp->csp_auth_klen, swa->sw_ictx);
1183 hmac_init_opad(axf, csp->csp_auth_key,
1184 csp->csp_auth_klen, swa->sw_octx);
1187 case CRYPTO_RIPEMD160:
1189 case CRYPTO_SHA2_224:
1190 case CRYPTO_SHA2_256:
1191 case CRYPTO_SHA2_384:
1192 case CRYPTO_SHA2_512:
1193 case CRYPTO_NULL_HMAC:
1194 axf->Init(swa->sw_ictx);
1196 case CRYPTO_AES_NIST_GMAC:
1197 case CRYPTO_AES_CCM_CBC_MAC:
1198 case CRYPTO_POLY1305:
1199 if (csp->csp_auth_key != NULL) {
1200 axf->Init(swa->sw_ictx);
1201 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1202 csp->csp_auth_klen);
1205 case CRYPTO_BLAKE2B:
1206 case CRYPTO_BLAKE2S:
1208 * Blake2b and Blake2s support an optional key but do
1211 if (csp->csp_auth_klen == 0)
1212 axf->Init(swa->sw_ictx);
1213 else if (csp->csp_auth_key != NULL)
1214 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1215 csp->csp_auth_klen);
1219 if (csp->csp_mode == CSP_MODE_DIGEST) {
1220 switch (csp->csp_auth_alg) {
1221 case CRYPTO_AES_NIST_GMAC:
1222 ses->swcr_process = swcr_gmac;
1224 case CRYPTO_AES_CCM_CBC_MAC:
1225 ses->swcr_process = swcr_ccm_cbc_mac;
1228 ses->swcr_process = swcr_authcompute;
1236 swcr_setup_aead(struct swcr_session *ses,
1237 const struct crypto_session_params *csp)
1239 struct swcr_auth *swa;
1242 error = swcr_setup_cipher(ses, csp);
1246 swa = &ses->swcr_auth;
1247 if (csp->csp_auth_mlen == 0)
1248 swa->sw_mlen = ses->swcr_encdec.sw_exf->macsize;
1250 swa->sw_mlen = csp->csp_auth_mlen;
1255 swcr_auth_supported(const struct crypto_session_params *csp)
1257 const struct auth_hash *axf;
1259 axf = crypto_auth_hash(csp);
1262 switch (csp->csp_auth_alg) {
1263 case CRYPTO_SHA1_HMAC:
1264 case CRYPTO_SHA2_224_HMAC:
1265 case CRYPTO_SHA2_256_HMAC:
1266 case CRYPTO_SHA2_384_HMAC:
1267 case CRYPTO_SHA2_512_HMAC:
1268 case CRYPTO_NULL_HMAC:
1269 case CRYPTO_RIPEMD160_HMAC:
1271 case CRYPTO_AES_NIST_GMAC:
1272 switch (csp->csp_auth_klen * 8) {
1280 if (csp->csp_auth_key == NULL)
1282 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1285 case CRYPTO_POLY1305:
1286 if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1289 case CRYPTO_AES_CCM_CBC_MAC:
1290 switch (csp->csp_auth_klen * 8) {
1298 if (csp->csp_auth_key == NULL)
1306 swcr_cipher_supported(const struct crypto_session_params *csp)
1308 const struct enc_xform *txf;
1310 txf = crypto_cipher(csp);
1313 if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1314 txf->ivsize != csp->csp_ivlen)
1319 #define SUPPORTED_SES (CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD | CSP_F_ESN)
1322 swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1324 if ((csp->csp_flags & ~(SUPPORTED_SES)) != 0)
1326 switch (csp->csp_mode) {
1327 case CSP_MODE_COMPRESS:
1328 switch (csp->csp_cipher_alg) {
1329 case CRYPTO_DEFLATE_COMP:
1335 case CSP_MODE_CIPHER:
1336 switch (csp->csp_cipher_alg) {
1337 case CRYPTO_AES_NIST_GCM_16:
1338 case CRYPTO_AES_CCM_16:
1339 case CRYPTO_CHACHA20_POLY1305:
1340 case CRYPTO_XCHACHA20_POLY1305:
1343 if (!swcr_cipher_supported(csp))
1348 case CSP_MODE_DIGEST:
1349 if (!swcr_auth_supported(csp))
1353 switch (csp->csp_cipher_alg) {
1354 case CRYPTO_AES_NIST_GCM_16:
1355 case CRYPTO_AES_CCM_16:
1356 switch (csp->csp_cipher_klen * 8) {
1365 case CRYPTO_CHACHA20_POLY1305:
1366 case CRYPTO_XCHACHA20_POLY1305:
1373 /* AEAD algorithms cannot be used for EtA. */
1374 switch (csp->csp_cipher_alg) {
1375 case CRYPTO_AES_NIST_GCM_16:
1376 case CRYPTO_AES_CCM_16:
1377 case CRYPTO_CHACHA20_POLY1305:
1378 case CRYPTO_XCHACHA20_POLY1305:
1381 switch (csp->csp_auth_alg) {
1382 case CRYPTO_AES_NIST_GMAC:
1383 case CRYPTO_AES_CCM_CBC_MAC:
1387 if (!swcr_cipher_supported(csp) ||
1388 !swcr_auth_supported(csp))
1395 return (CRYPTODEV_PROBE_SOFTWARE);
1399 * Generate a new software session.
1402 swcr_newsession(device_t dev, crypto_session_t cses,
1403 const struct crypto_session_params *csp)
1405 struct swcr_session *ses;
1406 const struct comp_algo *cxf;
1409 ses = crypto_get_driver_session(cses);
1412 switch (csp->csp_mode) {
1413 case CSP_MODE_COMPRESS:
1414 switch (csp->csp_cipher_alg) {
1415 case CRYPTO_DEFLATE_COMP:
1416 cxf = &comp_algo_deflate;
1420 panic("bad compression algo");
1423 ses->swcr_compdec.sw_cxf = cxf;
1424 ses->swcr_process = swcr_compdec;
1426 case CSP_MODE_CIPHER:
1427 switch (csp->csp_cipher_alg) {
1428 case CRYPTO_NULL_CBC:
1429 ses->swcr_process = swcr_null;
1432 case CRYPTO_AES_NIST_GCM_16:
1433 case CRYPTO_AES_CCM_16:
1434 case CRYPTO_CHACHA20_POLY1305:
1435 case CRYPTO_XCHACHA20_POLY1305:
1436 panic("bad cipher algo");
1439 error = swcr_setup_cipher(ses, csp);
1441 ses->swcr_process = swcr_encdec;
1444 case CSP_MODE_DIGEST:
1445 error = swcr_setup_auth(ses, csp);
1448 switch (csp->csp_cipher_alg) {
1449 case CRYPTO_AES_NIST_GCM_16:
1450 error = swcr_setup_aead(ses, csp);
1452 ses->swcr_process = swcr_gcm;
1454 case CRYPTO_AES_CCM_16:
1455 error = swcr_setup_aead(ses, csp);
1457 ses->swcr_process = swcr_ccm;
1459 case CRYPTO_CHACHA20_POLY1305:
1460 case CRYPTO_XCHACHA20_POLY1305:
1461 error = swcr_setup_aead(ses, csp);
1463 ses->swcr_process = swcr_chacha20_poly1305;
1467 panic("bad aead algo");
1473 switch (csp->csp_cipher_alg) {
1474 case CRYPTO_AES_NIST_GCM_16:
1475 case CRYPTO_AES_CCM_16:
1476 case CRYPTO_CHACHA20_POLY1305:
1477 case CRYPTO_XCHACHA20_POLY1305:
1478 panic("bad eta cipher algo");
1480 switch (csp->csp_auth_alg) {
1481 case CRYPTO_AES_NIST_GMAC:
1482 case CRYPTO_AES_CCM_CBC_MAC:
1483 panic("bad eta auth algo");
1487 error = swcr_setup_auth(ses, csp);
1490 if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1491 /* Effectively degrade to digest mode. */
1492 ses->swcr_process = swcr_authcompute;
1496 error = swcr_setup_cipher(ses, csp);
1498 ses->swcr_process = swcr_eta;
1505 swcr_freesession(dev, cses);
1510 swcr_freesession(device_t dev, crypto_session_t cses)
1512 struct swcr_session *ses;
1514 ses = crypto_get_driver_session(cses);
1516 zfree(ses->swcr_encdec.sw_ctx, M_CRYPTO_DATA);
1517 zfree(ses->swcr_auth.sw_ictx, M_CRYPTO_DATA);
1518 zfree(ses->swcr_auth.sw_octx, M_CRYPTO_DATA);
1522 * Process a software request.
1525 swcr_process(device_t dev, struct cryptop *crp, int hint)
1527 struct swcr_session *ses;
1529 ses = crypto_get_driver_session(crp->crp_session);
1531 crp->crp_etype = ses->swcr_process(ses, crp);
1538 swcr_identify(driver_t *drv, device_t parent)
1540 /* NB: order 10 is so we get attached after h/w devices */
1541 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1542 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1543 panic("cryptosoft: could not attach");
1547 swcr_probe(device_t dev)
1549 device_set_desc(dev, "software crypto");
1551 return (BUS_PROBE_NOWILDCARD);
1555 swcr_attach(device_t dev)
1558 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1559 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1561 device_printf(dev, "cannot initialize!");
1569 swcr_detach(device_t dev)
1571 crypto_unregister_all(swcr_id);
1575 static device_method_t swcr_methods[] = {
1576 DEVMETHOD(device_identify, swcr_identify),
1577 DEVMETHOD(device_probe, swcr_probe),
1578 DEVMETHOD(device_attach, swcr_attach),
1579 DEVMETHOD(device_detach, swcr_detach),
1581 DEVMETHOD(cryptodev_probesession, swcr_probesession),
1582 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1583 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1584 DEVMETHOD(cryptodev_process, swcr_process),
1589 static driver_t swcr_driver = {
1592 0, /* NB: no softc */
1594 static devclass_t swcr_devclass;
1597 * NB: We explicitly reference the crypto module so we
1598 * get the necessary ordering when built as a loadable
1599 * module. This is required because we bundle the crypto
1600 * module code together with the cryptosoft driver (otherwise
1601 * normal module dependencies would handle things).
1603 extern int crypto_modevent(struct module *, int, void *);
1604 /* XXX where to attach */
1605 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1606 MODULE_VERSION(cryptosoft, 1);
1607 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);