1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Permission to use, copy, and modify this software with or without fee
20 * is hereby granted, provided that this entire notice is included in
21 * all source code copies of any software which is or includes a copy or
22 * modification of this software.
24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD$");
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
48 #include <sys/mutex.h>
50 #include <crypto/sha1.h>
51 #include <opencrypto/rmd160.h>
53 #include <opencrypto/cryptodev.h>
54 #include <opencrypto/xform.h>
58 #include "cryptodev_if.h"
63 struct auth_hash *sw_axf;
69 struct enc_xform *sw_exf;
73 struct comp_algo *sw_cxf;
78 int (*swcr_process)(struct swcr_session *, struct cryptop *);
80 struct swcr_auth swcr_auth;
81 struct swcr_encdec swcr_encdec;
82 struct swcr_compdec swcr_compdec;
85 static int32_t swcr_id;
87 static void swcr_freesession(device_t dev, crypto_session_t cses);
89 /* Used for CRYPTO_NULL_CBC. */
91 swcr_null(struct swcr_session *ses, struct cryptop *crp)
98 * Apply a symmetric encryption/decryption algorithm.
101 swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
103 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
104 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
105 const struct crypto_session_params *csp;
106 struct swcr_encdec *sw;
107 struct enc_xform *exf;
108 int i, blks, inlen, ivlen, outlen, resid;
109 struct crypto_buffer_cursor cc_in, cc_out;
117 sw = &ses->swcr_encdec;
121 if (exf->native_blocksize == 0) {
122 /* Check for non-padded data */
123 if ((crp->crp_payload_length % exf->blocksize) != 0)
126 blks = exf->blocksize;
128 blks = exf->native_blocksize;
130 if (exf == &enc_xform_aes_icm &&
131 (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
134 crypto_read_iv(crp, iv);
136 if (crp->crp_cipher_key != NULL) {
137 csp = crypto_get_params(crp->crp_session);
138 error = exf->setkey(sw->sw_kschedule,
139 crp->crp_cipher_key, csp->csp_cipher_klen);
146 * xforms that provide a reinit method perform all IV
147 * handling themselves.
149 exf->reinit(sw->sw_kschedule, iv);
154 crypto_cursor_init(&cc_in, &crp->crp_buf);
155 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
156 inlen = crypto_cursor_seglen(&cc_in);
157 inblk = crypto_cursor_segbase(&cc_in);
158 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
159 crypto_cursor_init(&cc_out, &crp->crp_obuf);
160 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
163 outlen = crypto_cursor_seglen(&cc_out);
164 outblk = crypto_cursor_segbase(&cc_out);
166 resid = crp->crp_payload_length;
167 encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
170 * Loop through encrypting blocks. 'inlen' is the remaining
171 * length of the current segment in the input buffer.
172 * 'outlen' is the remaining length of current segment in the
175 while (resid >= blks) {
177 * If the current block is not contained within the
178 * current input/output segment, use 'blk' as a local
182 crypto_cursor_copydata(&cc_in, blks, blk);
189 * Ciphers without a 'reinit' hook are assumed to be
190 * used in CBC mode where the chaining is done here.
192 if (exf->reinit != NULL) {
194 exf->encrypt(sw->sw_kschedule, inblk, outblk);
196 exf->decrypt(sw->sw_kschedule, inblk, outblk);
197 } else if (encrypting) {
198 /* XOR with previous block */
199 for (i = 0; i < blks; i++)
200 outblk[i] = inblk[i] ^ ivp[i];
202 exf->encrypt(sw->sw_kschedule, outblk, outblk);
205 * Keep encrypted block for XOR'ing
208 memcpy(iv, outblk, blks);
210 } else { /* decrypt */
212 * Keep encrypted block for XOR'ing
215 nivp = (ivp == iv) ? iv2 : iv;
216 memcpy(nivp, inblk, blks);
218 exf->decrypt(sw->sw_kschedule, inblk, outblk);
220 /* XOR with previous block */
221 for (i = 0; i < blks; i++)
228 inlen = crypto_cursor_seglen(&cc_in);
229 inblk = crypto_cursor_segbase(&cc_in);
231 crypto_cursor_advance(&cc_in, blks);
237 crypto_cursor_copyback(&cc_out, blks, blk);
238 outlen = crypto_cursor_seglen(&cc_out);
239 outblk = crypto_cursor_segbase(&cc_out);
241 crypto_cursor_advance(&cc_out, blks);
249 /* Handle trailing partial block for stream ciphers. */
251 KASSERT(exf->native_blocksize != 0,
252 ("%s: partial block of %d bytes for cipher %s",
253 __func__, i, exf->name));
254 KASSERT(exf->reinit != NULL,
255 ("%s: partial block cipher %s without reinit hook",
256 __func__, exf->name));
257 KASSERT(resid < blks, ("%s: partial block too big", __func__));
259 inlen = crypto_cursor_seglen(&cc_in);
260 outlen = crypto_cursor_seglen(&cc_out);
262 crypto_cursor_copydata(&cc_in, resid, blk);
265 inblk = crypto_cursor_segbase(&cc_in);
269 outblk = crypto_cursor_segbase(&cc_out);
271 exf->encrypt_last(sw->sw_kschedule, inblk, outblk,
274 exf->decrypt_last(sw->sw_kschedule, inblk, outblk,
277 crypto_cursor_copyback(&cc_out, resid, blk);
284 swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw,
285 const uint8_t *key, int klen)
289 case CRYPTO_SHA1_HMAC:
290 case CRYPTO_SHA2_224_HMAC:
291 case CRYPTO_SHA2_256_HMAC:
292 case CRYPTO_SHA2_384_HMAC:
293 case CRYPTO_SHA2_512_HMAC:
294 case CRYPTO_NULL_HMAC:
295 case CRYPTO_RIPEMD160_HMAC:
296 hmac_init_ipad(axf, key, klen, sw->sw_ictx);
297 hmac_init_opad(axf, key, klen, sw->sw_octx);
299 case CRYPTO_POLY1305:
302 axf->Setkey(sw->sw_ictx, key, klen);
303 axf->Init(sw->sw_ictx);
306 panic("%s: algorithm %d doesn't use keys", __func__, axf->type);
311 * Compute or verify hash.
314 swcr_authcompute(struct swcr_session *ses, struct cryptop *crp)
316 u_char aalg[HASH_MAX_LEN];
317 u_char uaalg[HASH_MAX_LEN];
318 const struct crypto_session_params *csp;
319 struct swcr_auth *sw;
320 struct auth_hash *axf;
324 sw = &ses->swcr_auth;
328 if (crp->crp_auth_key != NULL) {
329 csp = crypto_get_params(crp->crp_session);
330 swcr_authprepare(axf, sw, crp->crp_auth_key,
334 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
336 err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
337 (int (*)(void *, void *, unsigned int))axf->Update, &ctx);
341 if (CRYPTO_HAS_OUTPUT_BUFFER(crp) &&
342 CRYPTO_OP_IS_ENCRYPT(crp->crp_op))
343 err = crypto_apply_buf(&crp->crp_obuf,
344 crp->crp_payload_output_start, crp->crp_payload_length,
345 (int (*)(void *, void *, unsigned int))axf->Update, &ctx);
347 err = crypto_apply(crp, crp->crp_payload_start,
348 crp->crp_payload_length,
349 (int (*)(void *, void *, unsigned int))axf->Update, &ctx);
355 case CRYPTO_SHA2_224:
356 case CRYPTO_SHA2_256:
357 case CRYPTO_SHA2_384:
358 case CRYPTO_SHA2_512:
359 axf->Final(aalg, &ctx);
362 case CRYPTO_SHA1_HMAC:
363 case CRYPTO_SHA2_224_HMAC:
364 case CRYPTO_SHA2_256_HMAC:
365 case CRYPTO_SHA2_384_HMAC:
366 case CRYPTO_SHA2_512_HMAC:
367 case CRYPTO_RIPEMD160_HMAC:
368 if (sw->sw_octx == NULL)
371 axf->Final(aalg, &ctx);
372 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
373 axf->Update(&ctx, aalg, axf->hashsize);
374 axf->Final(aalg, &ctx);
379 case CRYPTO_NULL_HMAC:
380 case CRYPTO_POLY1305:
381 axf->Final(aalg, &ctx);
385 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
386 crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg);
387 if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0)
390 /* Inject the authentication data */
391 crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg);
396 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
397 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
400 swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
402 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
403 u_char *blk = (u_char *)blkbuf;
404 u_char aalg[AALG_MAX_RESULT_LEN];
405 u_char uaalg[AALG_MAX_RESULT_LEN];
406 u_char iv[EALG_MAX_BLOCK_LEN];
407 struct crypto_buffer_cursor cc;
409 struct swcr_auth *swa;
410 struct auth_hash *axf;
412 int blksz, ivlen, len, resid;
414 swa = &ses->swcr_auth;
417 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
418 blksz = axf->blocksize;
420 /* Initialize the IV */
421 ivlen = AES_GCM_IV_LEN;
422 crypto_read_iv(crp, iv);
424 axf->Reinit(&ctx, iv, ivlen);
425 crypto_cursor_init(&cc, &crp->crp_buf);
426 crypto_cursor_advance(&cc, crp->crp_payload_start);
427 for (resid = crp->crp_payload_length; resid > 0; resid -= len) {
428 len = MIN(resid, blksz);
429 crypto_cursor_copydata(&cc, len, blk);
430 bzero(blk + len, blksz - len);
431 axf->Update(&ctx, blk, blksz);
436 blkp = (uint32_t *)blk + 1;
437 *blkp = htobe32(crp->crp_payload_length * 8);
438 axf->Update(&ctx, blk, blksz);
441 axf->Final(aalg, &ctx);
443 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
444 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
446 if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
449 /* Inject the authentication data */
450 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
456 swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
458 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
459 u_char *blk = (u_char *)blkbuf;
460 u_char aalg[AALG_MAX_RESULT_LEN];
461 u_char uaalg[AALG_MAX_RESULT_LEN];
462 u_char iv[EALG_MAX_BLOCK_LEN];
463 struct crypto_buffer_cursor cc_in, cc_out;
465 struct swcr_auth *swa;
466 struct swcr_encdec *swe;
467 struct auth_hash *axf;
468 struct enc_xform *exf;
470 int blksz, ivlen, len, r, resid;
472 swa = &ses->swcr_auth;
475 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
476 blksz = axf->blocksize;
478 swe = &ses->swcr_encdec;
480 KASSERT(axf->blocksize == exf->native_blocksize,
481 ("%s: blocksize mismatch", __func__));
483 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
486 /* Initialize the IV */
487 ivlen = AES_GCM_IV_LEN;
488 bcopy(crp->crp_iv, iv, ivlen);
490 /* Supply MAC with IV */
491 axf->Reinit(&ctx, iv, ivlen);
493 /* Supply MAC with AAD */
494 crypto_cursor_init(&cc_in, &crp->crp_buf);
495 crypto_cursor_advance(&cc_in, crp->crp_aad_start);
496 for (resid = crp->crp_aad_length; resid > 0; resid -= len) {
497 len = MIN(resid, blksz);
498 crypto_cursor_copydata(&cc_in, len, blk);
499 bzero(blk + len, blksz - len);
500 axf->Update(&ctx, blk, blksz);
503 exf->reinit(swe->sw_kschedule, iv);
505 /* Do encryption with MAC */
506 crypto_cursor_init(&cc_in, &crp->crp_buf);
507 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
508 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
509 crypto_cursor_init(&cc_out, &crp->crp_obuf);
510 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
513 for (resid = crp->crp_payload_length; resid > 0; resid -= len) {
514 len = MIN(resid, blksz);
517 crypto_cursor_copydata(&cc_in, len, blk);
518 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
519 exf->encrypt(swe->sw_kschedule, blk, blk);
520 axf->Update(&ctx, blk, len);
521 crypto_cursor_copyback(&cc_out, len, blk);
523 axf->Update(&ctx, blk, len);
529 blkp = (uint32_t *)blk + 1;
530 *blkp = htobe32(crp->crp_aad_length * 8);
531 blkp = (uint32_t *)blk + 3;
532 *blkp = htobe32(crp->crp_payload_length * 8);
533 axf->Update(&ctx, blk, blksz);
536 axf->Final(aalg, &ctx);
539 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
540 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
543 r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
547 /* tag matches, decrypt data */
548 crypto_cursor_init(&cc_in, &crp->crp_buf);
549 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
550 for (resid = crp->crp_payload_length; resid > 0;
552 len = MIN(resid, blksz);
555 crypto_cursor_copydata(&cc_in, len, blk);
556 exf->decrypt(swe->sw_kschedule, blk, blk);
557 crypto_cursor_copyback(&cc_out, len, blk);
560 /* Inject the authentication data */
561 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
569 swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp)
571 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
572 u_char *blk = (u_char *)blkbuf;
573 u_char aalg[AALG_MAX_RESULT_LEN];
574 u_char uaalg[AALG_MAX_RESULT_LEN];
575 u_char iv[EALG_MAX_BLOCK_LEN];
576 struct crypto_buffer_cursor cc;
578 struct swcr_auth *swa;
579 struct auth_hash *axf;
580 int blksz, ivlen, len, resid;
582 swa = &ses->swcr_auth;
585 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
586 blksz = axf->blocksize;
588 /* Initialize the IV */
589 ivlen = AES_CCM_IV_LEN;
590 crypto_read_iv(crp, iv);
593 * AES CCM-CBC-MAC needs to know the length of both the auth
594 * data and payload data before doing the auth computation.
596 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length;
597 ctx.aes_cbc_mac_ctx.cryptDataLength = 0;
599 axf->Reinit(&ctx, iv, ivlen);
600 crypto_cursor_init(&cc, &crp->crp_buf);
601 crypto_cursor_advance(&cc, crp->crp_aad_start);
602 for (resid = crp->crp_payload_length; resid > 0; resid -= len) {
603 len = MIN(resid, blksz);
604 crypto_cursor_copydata(&cc, len, blk);
605 bzero(blk + len, blksz - len);
606 axf->Update(&ctx, blk, blksz);
610 axf->Final(aalg, &ctx);
612 if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
613 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
615 if (timingsafe_bcmp(aalg, uaalg, swa->sw_mlen) != 0)
618 /* Inject the authentication data */
619 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, aalg);
625 swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
627 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
628 u_char *blk = (u_char *)blkbuf;
629 u_char aalg[AALG_MAX_RESULT_LEN];
630 u_char uaalg[AALG_MAX_RESULT_LEN];
631 u_char iv[EALG_MAX_BLOCK_LEN];
632 struct crypto_buffer_cursor cc_in, cc_out;
634 struct swcr_auth *swa;
635 struct swcr_encdec *swe;
636 struct auth_hash *axf;
637 struct enc_xform *exf;
638 int blksz, ivlen, len, r, resid;
640 swa = &ses->swcr_auth;
643 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
644 blksz = axf->blocksize;
646 swe = &ses->swcr_encdec;
648 KASSERT(axf->blocksize == exf->native_blocksize,
649 ("%s: blocksize mismatch", __func__));
651 if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
654 /* Initialize the IV */
655 ivlen = AES_CCM_IV_LEN;
656 bcopy(crp->crp_iv, iv, ivlen);
659 * AES CCM-CBC-MAC needs to know the length of both the auth
660 * data and payload data before doing the auth computation.
662 ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length;
663 ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length;
665 /* Supply MAC with IV */
666 axf->Reinit(&ctx, iv, ivlen);
668 /* Supply MAC with AAD */
669 crypto_cursor_init(&cc_in, &crp->crp_buf);
670 crypto_cursor_advance(&cc_in, crp->crp_aad_start);
671 for (resid = crp->crp_aad_length; resid > 0; resid -= len) {
672 len = MIN(resid, blksz);
673 crypto_cursor_copydata(&cc_in, len, blk);
674 bzero(blk + len, blksz - len);
675 axf->Update(&ctx, blk, blksz);
678 exf->reinit(swe->sw_kschedule, iv);
680 /* Do encryption/decryption with MAC */
681 crypto_cursor_init(&cc_in, &crp->crp_buf);
682 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
683 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
684 crypto_cursor_init(&cc_out, &crp->crp_obuf);
685 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
688 for (resid = crp->crp_payload_length; resid > 0; resid -= len) {
689 len = MIN(resid, blksz);
692 crypto_cursor_copydata(&cc_in, len, blk);
693 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
694 axf->Update(&ctx, blk, len);
695 exf->encrypt(swe->sw_kschedule, blk, blk);
696 crypto_cursor_copyback(&cc_out, len, blk);
699 * One of the problems with CCM+CBC is that
700 * the authentication is done on the
701 * unecncrypted data. As a result, we have to
702 * decrypt the data twice: once to generate
703 * the tag and a second time after the tag is
706 exf->decrypt(swe->sw_kschedule, blk, blk);
707 axf->Update(&ctx, blk, len);
712 axf->Final(aalg, &ctx);
715 if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
716 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
719 r = timingsafe_bcmp(aalg, uaalg, swa->sw_mlen);
723 /* tag matches, decrypt data */
724 exf->reinit(swe->sw_kschedule, iv);
725 crypto_cursor_init(&cc_in, &crp->crp_buf);
726 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
727 for (resid = crp->crp_payload_length; resid > 0;
729 len = MIN(resid, blksz);
732 crypto_cursor_copydata(&cc_in, len, blk);
733 exf->decrypt(swe->sw_kschedule, blk, blk);
734 crypto_cursor_copyback(&cc_out, len, blk);
737 /* Inject the authentication data */
738 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
746 * Apply a cipher and a digest to perform EtA.
749 swcr_eta(struct swcr_session *ses, struct cryptop *crp)
753 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
754 error = swcr_encdec(ses, crp);
756 error = swcr_authcompute(ses, crp);
758 error = swcr_authcompute(ses, crp);
760 error = swcr_encdec(ses, crp);
766 * Apply a compression/decompression algorithm
769 swcr_compdec(struct swcr_session *ses, struct cryptop *crp)
771 u_int8_t *data, *out;
772 struct comp_algo *cxf;
776 cxf = ses->swcr_compdec.sw_cxf;
778 /* We must handle the whole buffer of data in one time
779 * then if there is not all the data in the mbuf, we must
783 data = malloc(crp->crp_payload_length, M_CRYPTO_DATA, M_NOWAIT);
786 crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
789 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
790 result = cxf->compress(data, crp->crp_payload_length, &out);
792 result = cxf->decompress(data, crp->crp_payload_length, &out);
794 free(data, M_CRYPTO_DATA);
797 crp->crp_olen = result;
799 /* Check the compressed size when doing compression */
800 if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
801 if (result >= crp->crp_payload_length) {
802 /* Compression was useless, we lost time */
803 free(out, M_CRYPTO_DATA);
808 /* Copy back the (de)compressed data. m_copyback is
809 * extending the mbuf as necessary.
811 crypto_copyback(crp, crp->crp_payload_start, result, out);
812 if (result < crp->crp_payload_length) {
813 switch (crp->crp_buf.cb_type) {
814 case CRYPTO_BUF_MBUF:
815 adj = result - crp->crp_payload_length;
816 m_adj(crp->crp_buf.cb_mbuf, adj);
818 case CRYPTO_BUF_UIO: {
819 struct uio *uio = crp->crp_buf.cb_uio;
822 adj = crp->crp_payload_length - result;
823 ind = uio->uio_iovcnt - 1;
825 while (adj > 0 && ind >= 0) {
826 if (adj < uio->uio_iov[ind].iov_len) {
827 uio->uio_iov[ind].iov_len -= adj;
831 adj -= uio->uio_iov[ind].iov_len;
832 uio->uio_iov[ind].iov_len = 0;
842 free(out, M_CRYPTO_DATA);
847 swcr_setup_cipher(struct swcr_session *ses,
848 const struct crypto_session_params *csp)
850 struct swcr_encdec *swe;
851 struct enc_xform *txf;
854 swe = &ses->swcr_encdec;
855 txf = crypto_cipher(csp);
856 MPASS(txf->ivsize == csp->csp_ivlen);
857 if (txf->ctxsize != 0) {
858 swe->sw_kschedule = malloc(txf->ctxsize, M_CRYPTO_DATA,
860 if (swe->sw_kschedule == NULL)
863 if (csp->csp_cipher_key != NULL) {
864 error = txf->setkey(swe->sw_kschedule,
865 csp->csp_cipher_key, csp->csp_cipher_klen);
874 swcr_setup_auth(struct swcr_session *ses,
875 const struct crypto_session_params *csp)
877 struct swcr_auth *swa;
878 struct auth_hash *axf;
880 swa = &ses->swcr_auth;
882 axf = crypto_auth_hash(csp);
884 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
886 if (csp->csp_auth_mlen == 0)
887 swa->sw_mlen = axf->hashsize;
889 swa->sw_mlen = csp->csp_auth_mlen;
890 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
891 if (swa->sw_ictx == NULL)
894 switch (csp->csp_auth_alg) {
895 case CRYPTO_SHA1_HMAC:
896 case CRYPTO_SHA2_224_HMAC:
897 case CRYPTO_SHA2_256_HMAC:
898 case CRYPTO_SHA2_384_HMAC:
899 case CRYPTO_SHA2_512_HMAC:
900 case CRYPTO_NULL_HMAC:
901 case CRYPTO_RIPEMD160_HMAC:
902 swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
904 if (swa->sw_octx == NULL)
907 if (csp->csp_auth_key != NULL) {
908 swcr_authprepare(axf, swa, csp->csp_auth_key,
912 if (csp->csp_mode == CSP_MODE_DIGEST)
913 ses->swcr_process = swcr_authcompute;
916 case CRYPTO_SHA2_224:
917 case CRYPTO_SHA2_256:
918 case CRYPTO_SHA2_384:
919 case CRYPTO_SHA2_512:
920 axf->Init(swa->sw_ictx);
921 if (csp->csp_mode == CSP_MODE_DIGEST)
922 ses->swcr_process = swcr_authcompute;
924 case CRYPTO_AES_NIST_GMAC:
925 axf->Init(swa->sw_ictx);
926 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
928 if (csp->csp_mode == CSP_MODE_DIGEST)
929 ses->swcr_process = swcr_gmac;
931 case CRYPTO_POLY1305:
935 * Blake2b and Blake2s support an optional key but do
938 if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL)
939 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
941 axf->Init(swa->sw_ictx);
942 if (csp->csp_mode == CSP_MODE_DIGEST)
943 ses->swcr_process = swcr_authcompute;
945 case CRYPTO_AES_CCM_CBC_MAC:
946 axf->Init(swa->sw_ictx);
947 axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
949 if (csp->csp_mode == CSP_MODE_DIGEST)
950 ses->swcr_process = swcr_ccm_cbc_mac;
958 swcr_setup_gcm(struct swcr_session *ses,
959 const struct crypto_session_params *csp)
961 struct swcr_auth *swa;
962 struct auth_hash *axf;
964 if (csp->csp_ivlen != AES_GCM_IV_LEN)
967 /* First, setup the auth side. */
968 swa = &ses->swcr_auth;
969 switch (csp->csp_cipher_klen * 8) {
971 axf = &auth_hash_nist_gmac_aes_128;
974 axf = &auth_hash_nist_gmac_aes_192;
977 axf = &auth_hash_nist_gmac_aes_256;
983 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
985 if (csp->csp_auth_mlen == 0)
986 swa->sw_mlen = axf->hashsize;
988 swa->sw_mlen = csp->csp_auth_mlen;
989 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
990 if (swa->sw_ictx == NULL)
992 axf->Init(swa->sw_ictx);
993 if (csp->csp_cipher_key != NULL)
994 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
995 csp->csp_cipher_klen);
997 /* Second, setup the cipher side. */
998 return (swcr_setup_cipher(ses, csp));
1002 swcr_setup_ccm(struct swcr_session *ses,
1003 const struct crypto_session_params *csp)
1005 struct swcr_auth *swa;
1006 struct auth_hash *axf;
1008 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1011 /* First, setup the auth side. */
1012 swa = &ses->swcr_auth;
1013 switch (csp->csp_cipher_klen * 8) {
1015 axf = &auth_hash_ccm_cbc_mac_128;
1018 axf = &auth_hash_ccm_cbc_mac_192;
1021 axf = &auth_hash_ccm_cbc_mac_256;
1027 if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1029 if (csp->csp_auth_mlen == 0)
1030 swa->sw_mlen = axf->hashsize;
1032 swa->sw_mlen = csp->csp_auth_mlen;
1033 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
1034 if (swa->sw_ictx == NULL)
1036 axf->Init(swa->sw_ictx);
1037 if (csp->csp_cipher_key != NULL)
1038 axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
1039 csp->csp_cipher_klen);
1041 /* Second, setup the cipher side. */
1042 return (swcr_setup_cipher(ses, csp));
1046 swcr_auth_supported(const struct crypto_session_params *csp)
1048 struct auth_hash *axf;
1050 axf = crypto_auth_hash(csp);
1053 switch (csp->csp_auth_alg) {
1054 case CRYPTO_SHA1_HMAC:
1055 case CRYPTO_SHA2_224_HMAC:
1056 case CRYPTO_SHA2_256_HMAC:
1057 case CRYPTO_SHA2_384_HMAC:
1058 case CRYPTO_SHA2_512_HMAC:
1059 case CRYPTO_NULL_HMAC:
1060 case CRYPTO_RIPEMD160_HMAC:
1062 case CRYPTO_AES_NIST_GMAC:
1063 switch (csp->csp_auth_klen * 8) {
1071 if (csp->csp_auth_key == NULL)
1073 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1076 case CRYPTO_POLY1305:
1077 if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1080 case CRYPTO_AES_CCM_CBC_MAC:
1081 switch (csp->csp_auth_klen * 8) {
1089 if (csp->csp_auth_key == NULL)
1091 if (csp->csp_ivlen != AES_CCM_IV_LEN)
1099 swcr_cipher_supported(const struct crypto_session_params *csp)
1101 struct enc_xform *txf;
1103 txf = crypto_cipher(csp);
1106 if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1107 txf->ivsize != csp->csp_ivlen)
1113 swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1116 if ((csp->csp_flags & ~(CSP_F_SEPARATE_OUTPUT)) != 0)
1118 switch (csp->csp_mode) {
1119 case CSP_MODE_COMPRESS:
1120 switch (csp->csp_cipher_alg) {
1121 case CRYPTO_DEFLATE_COMP:
1127 case CSP_MODE_CIPHER:
1128 switch (csp->csp_cipher_alg) {
1129 case CRYPTO_AES_NIST_GCM_16:
1130 case CRYPTO_AES_CCM_16:
1133 if (!swcr_cipher_supported(csp))
1138 case CSP_MODE_DIGEST:
1139 if (!swcr_auth_supported(csp))
1143 switch (csp->csp_cipher_alg) {
1144 case CRYPTO_AES_NIST_GCM_16:
1145 case CRYPTO_AES_CCM_16:
1152 /* AEAD algorithms cannot be used for EtA. */
1153 switch (csp->csp_cipher_alg) {
1154 case CRYPTO_AES_NIST_GCM_16:
1155 case CRYPTO_AES_CCM_16:
1158 switch (csp->csp_auth_alg) {
1159 case CRYPTO_AES_NIST_GMAC:
1160 case CRYPTO_AES_CCM_CBC_MAC:
1164 if (!swcr_cipher_supported(csp) ||
1165 !swcr_auth_supported(csp))
1172 return (CRYPTODEV_PROBE_SOFTWARE);
1176 * Generate a new software session.
1179 swcr_newsession(device_t dev, crypto_session_t cses,
1180 const struct crypto_session_params *csp)
1182 struct swcr_session *ses;
1183 struct swcr_encdec *swe;
1184 struct swcr_auth *swa;
1185 struct comp_algo *cxf;
1188 ses = crypto_get_driver_session(cses);
1189 mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
1192 swe = &ses->swcr_encdec;
1193 swa = &ses->swcr_auth;
1194 switch (csp->csp_mode) {
1195 case CSP_MODE_COMPRESS:
1196 switch (csp->csp_cipher_alg) {
1197 case CRYPTO_DEFLATE_COMP:
1198 cxf = &comp_algo_deflate;
1202 panic("bad compression algo");
1205 ses->swcr_compdec.sw_cxf = cxf;
1206 ses->swcr_process = swcr_compdec;
1208 case CSP_MODE_CIPHER:
1209 switch (csp->csp_cipher_alg) {
1210 case CRYPTO_NULL_CBC:
1211 ses->swcr_process = swcr_null;
1214 case CRYPTO_AES_NIST_GCM_16:
1215 case CRYPTO_AES_CCM_16:
1216 panic("bad cipher algo");
1219 error = swcr_setup_cipher(ses, csp);
1221 ses->swcr_process = swcr_encdec;
1224 case CSP_MODE_DIGEST:
1225 error = swcr_setup_auth(ses, csp);
1228 switch (csp->csp_cipher_alg) {
1229 case CRYPTO_AES_NIST_GCM_16:
1230 error = swcr_setup_gcm(ses, csp);
1232 ses->swcr_process = swcr_gcm;
1234 case CRYPTO_AES_CCM_16:
1235 error = swcr_setup_ccm(ses, csp);
1237 ses->swcr_process = swcr_ccm;
1241 panic("bad aead algo");
1247 switch (csp->csp_cipher_alg) {
1248 case CRYPTO_AES_NIST_GCM_16:
1249 case CRYPTO_AES_CCM_16:
1250 panic("bad eta cipher algo");
1252 switch (csp->csp_auth_alg) {
1253 case CRYPTO_AES_NIST_GMAC:
1254 case CRYPTO_AES_CCM_CBC_MAC:
1255 panic("bad eta auth algo");
1259 error = swcr_setup_auth(ses, csp);
1262 if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1263 /* Effectively degrade to digest mode. */
1264 ses->swcr_process = swcr_authcompute;
1268 error = swcr_setup_cipher(ses, csp);
1270 ses->swcr_process = swcr_eta;
1277 swcr_freesession(dev, cses);
1282 swcr_freesession(device_t dev, crypto_session_t cses)
1284 struct swcr_session *ses;
1285 struct swcr_auth *swa;
1286 struct auth_hash *axf;
1288 ses = crypto_get_driver_session(cses);
1290 mtx_destroy(&ses->swcr_lock);
1292 zfree(ses->swcr_encdec.sw_kschedule, M_CRYPTO_DATA);
1294 axf = ses->swcr_auth.sw_axf;
1296 swa = &ses->swcr_auth;
1297 if (swa->sw_ictx != NULL) {
1298 explicit_bzero(swa->sw_ictx, axf->ctxsize);
1299 free(swa->sw_ictx, M_CRYPTO_DATA);
1301 if (swa->sw_octx != NULL) {
1302 explicit_bzero(swa->sw_octx, axf->ctxsize);
1303 free(swa->sw_octx, M_CRYPTO_DATA);
1309 * Process a software request.
1312 swcr_process(device_t dev, struct cryptop *crp, int hint)
1314 struct swcr_session *ses;
1316 ses = crypto_get_driver_session(crp->crp_session);
1317 mtx_lock(&ses->swcr_lock);
1319 crp->crp_etype = ses->swcr_process(ses, crp);
1321 mtx_unlock(&ses->swcr_lock);
1327 swcr_identify(driver_t *drv, device_t parent)
1329 /* NB: order 10 is so we get attached after h/w devices */
1330 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1331 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1332 panic("cryptosoft: could not attach");
1336 swcr_probe(device_t dev)
1338 device_set_desc(dev, "software crypto");
1339 return (BUS_PROBE_NOWILDCARD);
1343 swcr_attach(device_t dev)
1346 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1347 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1349 device_printf(dev, "cannot initialize!");
1357 swcr_detach(device_t dev)
1359 crypto_unregister_all(swcr_id);
1363 static device_method_t swcr_methods[] = {
1364 DEVMETHOD(device_identify, swcr_identify),
1365 DEVMETHOD(device_probe, swcr_probe),
1366 DEVMETHOD(device_attach, swcr_attach),
1367 DEVMETHOD(device_detach, swcr_detach),
1369 DEVMETHOD(cryptodev_probesession, swcr_probesession),
1370 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1371 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1372 DEVMETHOD(cryptodev_process, swcr_process),
1377 static driver_t swcr_driver = {
1380 0, /* NB: no softc */
1382 static devclass_t swcr_devclass;
1385 * NB: We explicitly reference the crypto module so we
1386 * get the necessary ordering when built as a loadable
1387 * module. This is required because we bundle the crypto
1388 * module code together with the cryptosoft driver (otherwise
1389 * normal module dependencies would handle things).
1391 extern int crypto_modevent(struct module *, int, void *);
1392 /* XXX where to attach */
1393 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1394 MODULE_VERSION(cryptosoft, 1);
1395 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);