1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014 The FreeBSD Foundation
13 * All rights reserved.
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
19 * Permission to use, copy, and modify this software with or without fee
20 * is hereby granted, provided that this entire notice is included in
21 * all source code copies of any software which is or includes a copy or
22 * modification of this software.
24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD$");
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
48 #include <sys/mutex.h>
50 #include <crypto/blowfish/blowfish.h>
51 #include <crypto/sha1.h>
52 #include <opencrypto/rmd160.h>
53 #include <opencrypto/cast.h>
54 #include <opencrypto/skipjack.h>
57 #include <opencrypto/cryptodev.h>
58 #include <opencrypto/cryptosoft.h>
59 #include <opencrypto/xform.h>
63 #include "cryptodev_if.h"
65 static int32_t swcr_id;
67 u_int8_t hmac_ipad_buffer[HMAC_MAX_BLOCK_LEN];
68 u_int8_t hmac_opad_buffer[HMAC_MAX_BLOCK_LEN];
70 static int swcr_encdec(struct cryptodesc *, struct swcr_data *, caddr_t, int);
71 static int swcr_authcompute(struct cryptodesc *, struct swcr_data *, caddr_t, int);
72 static int swcr_authenc(struct cryptop *crp);
73 static int swcr_compdec(struct cryptodesc *, struct swcr_data *, caddr_t, int);
74 static void swcr_freesession(device_t dev, crypto_session_t cses);
77 * Apply a symmetric encryption/decryption algorithm.
80 swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
83 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
84 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
85 struct enc_xform *exf;
86 int i, j, k, blks, ind, count, ivlen;
87 struct uio *uio, uiolcl;
88 struct iovec iovlcl[4];
96 blks = exf->blocksize;
99 /* Check for non-padded data */
100 if (crd->crd_len % blks)
103 if (crd->crd_alg == CRYPTO_AES_ICM &&
104 (crd->crd_flags & CRD_F_IV_EXPLICIT) == 0)
107 /* Initialize the IV */
108 if (crd->crd_flags & CRD_F_ENCRYPT) {
109 /* IV explicitly provided ? */
110 if (crd->crd_flags & CRD_F_IV_EXPLICIT)
111 bcopy(crd->crd_iv, iv, ivlen);
113 arc4rand(iv, ivlen, 0);
115 /* Do we need to write the IV */
116 if (!(crd->crd_flags & CRD_F_IV_PRESENT))
117 crypto_copyback(flags, buf, crd->crd_inject, ivlen, iv);
119 } else { /* Decryption */
120 /* IV explicitly provided ? */
121 if (crd->crd_flags & CRD_F_IV_EXPLICIT)
122 bcopy(crd->crd_iv, iv, ivlen);
125 crypto_copydata(flags, buf, crd->crd_inject, ivlen, iv);
129 if (crd->crd_flags & CRD_F_KEY_EXPLICIT) {
132 if (sw->sw_kschedule)
133 exf->zerokey(&(sw->sw_kschedule));
135 error = exf->setkey(&sw->sw_kschedule,
136 crd->crd_key, crd->crd_klen / 8);
142 iovcnt = nitems(iovlcl);
145 if ((flags & CRYPTO_F_IMBUF) != 0) {
146 error = crypto_mbuftoiov((struct mbuf *)buf, &iov, &iovcnt,
151 uio->uio_iovcnt = iovcnt;
152 } else if ((flags & CRYPTO_F_IOV) != 0)
153 uio = (struct uio *)buf;
155 iov[0].iov_base = buf;
156 iov[0].iov_len = crd->crd_skip + crd->crd_len;
165 * xforms that provide a reinit method perform all IV
166 * handling themselves.
168 exf->reinit(sw->sw_kschedule, iv);
171 count = crd->crd_skip;
172 ind = cuio_getptr(uio, count, &k);
182 * If there's insufficient data at the end of
183 * an iovec, we have to do some copying.
185 if (uio->uio_iov[ind].iov_len < k + blks &&
186 uio->uio_iov[ind].iov_len != k) {
187 cuio_copydata(uio, count, blks, blk);
189 /* Actual encryption/decryption */
191 if (crd->crd_flags & CRD_F_ENCRYPT) {
192 exf->encrypt(sw->sw_kschedule,
195 exf->decrypt(sw->sw_kschedule,
198 } else if (crd->crd_flags & CRD_F_ENCRYPT) {
199 /* XOR with previous block */
200 for (j = 0; j < blks; j++)
203 exf->encrypt(sw->sw_kschedule, blk);
206 * Keep encrypted block for XOR'ing
209 bcopy(blk, iv, blks);
211 } else { /* decrypt */
213 * Keep encrypted block for XOR'ing
216 nivp = (ivp == iv) ? iv2 : iv;
217 bcopy(blk, nivp, blks);
219 exf->decrypt(sw->sw_kschedule, blk);
221 /* XOR with previous block */
222 for (j = 0; j < blks; j++)
228 /* Copy back decrypted block */
229 cuio_copyback(uio, count, blks, blk);
233 /* Advance pointer */
234 ind = cuio_getptr(uio, count, &k);
242 /* Could be done... */
247 while (uio->uio_iov[ind].iov_len >= k + blks && i > 0) {
253 uio->uio_iov[ind].iov_len - (size_t)k);
254 idat = (uint8_t *)uio->uio_iov[ind].iov_base + k;
257 if ((crd->crd_flags & CRD_F_ENCRYPT) != 0 &&
258 exf->encrypt_multi == NULL)
259 exf->encrypt(sw->sw_kschedule,
261 else if ((crd->crd_flags & CRD_F_ENCRYPT) != 0) {
262 nb = rounddown(rem, blks);
263 exf->encrypt_multi(sw->sw_kschedule,
265 } else if (exf->decrypt_multi == NULL)
266 exf->decrypt(sw->sw_kschedule,
269 nb = rounddown(rem, blks);
270 exf->decrypt_multi(sw->sw_kschedule,
273 } else if (crd->crd_flags & CRD_F_ENCRYPT) {
274 /* XOR with previous block/IV */
275 for (j = 0; j < blks; j++)
278 exf->encrypt(sw->sw_kschedule, idat);
280 } else { /* decrypt */
282 * Keep encrypted block to be used
283 * in next block's processing.
285 nivp = (ivp == iv) ? iv2 : iv;
286 bcopy(idat, nivp, blks);
288 exf->decrypt(sw->sw_kschedule, idat);
290 /* XOR with previous block/IV */
291 for (j = 0; j < blks; j++)
303 * Advance to the next iov if the end of the current iov
304 * is aligned with the end of a cipher block.
305 * Note that the code is equivalent to calling:
306 * ind = cuio_getptr(uio, count, &k);
308 if (i > 0 && k == uio->uio_iov[ind].iov_len) {
311 if (ind >= uio->uio_iovcnt) {
320 free(iov, M_CRYPTO_DATA);
325 static int __result_use_check
326 swcr_authprepare(struct auth_hash *axf, struct swcr_data *sw, u_char *key,
334 case CRYPTO_MD5_HMAC:
335 case CRYPTO_SHA1_HMAC:
336 case CRYPTO_SHA2_224_HMAC:
337 case CRYPTO_SHA2_256_HMAC:
338 case CRYPTO_SHA2_384_HMAC:
339 case CRYPTO_SHA2_512_HMAC:
340 case CRYPTO_NULL_HMAC:
341 case CRYPTO_RIPEMD160_HMAC:
342 for (k = 0; k < klen; k++)
343 key[k] ^= HMAC_IPAD_VAL;
345 axf->Init(sw->sw_ictx);
346 axf->Update(sw->sw_ictx, key, klen);
347 axf->Update(sw->sw_ictx, hmac_ipad_buffer, axf->blocksize - klen);
349 for (k = 0; k < klen; k++)
350 key[k] ^= (HMAC_IPAD_VAL ^ HMAC_OPAD_VAL);
352 axf->Init(sw->sw_octx);
353 axf->Update(sw->sw_octx, key, klen);
354 axf->Update(sw->sw_octx, hmac_opad_buffer, axf->blocksize - klen);
356 for (k = 0; k < klen; k++)
357 key[k] ^= HMAC_OPAD_VAL;
359 case CRYPTO_MD5_KPDK:
360 case CRYPTO_SHA1_KPDK:
363 * We need a buffer that can hold an md5 and a sha1 result
364 * just to throw it away.
365 * What we do here is the initial part of:
366 * ALGO( key, keyfill, .. )
367 * adding the key to sw_ictx and abusing Final() to get the
369 * In addition we abuse the sw_octx to save the key to have
370 * it to be able to append it at the end in swcr_authcompute().
372 u_char buf[SHA1_RESULTLEN];
375 bcopy(key, sw->sw_octx, klen);
376 axf->Init(sw->sw_ictx);
377 axf->Update(sw->sw_ictx, key, klen);
378 axf->Final(buf, sw->sw_ictx);
381 case CRYPTO_POLY1305:
382 if (klen != POLY1305_KEY_LEN) {
383 CRYPTDEB("bad poly1305 key size %d", klen);
389 axf->Setkey(sw->sw_ictx, key, klen);
390 axf->Init(sw->sw_ictx);
393 printf("%s: CRD_F_KEY_EXPLICIT flag given, but algorithm %d "
394 "doesn't use keys.\n", __func__, axf->type);
401 * Compute keyed-hash authenticator.
404 swcr_authcompute(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
407 unsigned char aalg[HASH_MAX_LEN];
408 struct auth_hash *axf;
412 if (sw->sw_ictx == 0)
417 if (crd->crd_flags & CRD_F_KEY_EXPLICIT) {
418 err = swcr_authprepare(axf, sw, crd->crd_key, crd->crd_klen);
423 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
425 err = crypto_apply(flags, buf, crd->crd_skip, crd->crd_len,
426 (int (*)(void *, void *, unsigned int))axf->Update, (caddr_t)&ctx);
430 switch (sw->sw_alg) {
432 case CRYPTO_SHA2_224:
433 case CRYPTO_SHA2_256:
434 case CRYPTO_SHA2_384:
435 case CRYPTO_SHA2_512:
436 axf->Final(aalg, &ctx);
439 case CRYPTO_MD5_HMAC:
440 case CRYPTO_SHA1_HMAC:
441 case CRYPTO_SHA2_224_HMAC:
442 case CRYPTO_SHA2_256_HMAC:
443 case CRYPTO_SHA2_384_HMAC:
444 case CRYPTO_SHA2_512_HMAC:
445 case CRYPTO_RIPEMD160_HMAC:
446 if (sw->sw_octx == NULL)
449 axf->Final(aalg, &ctx);
450 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
451 axf->Update(&ctx, aalg, axf->hashsize);
452 axf->Final(aalg, &ctx);
455 case CRYPTO_MD5_KPDK:
456 case CRYPTO_SHA1_KPDK:
457 /* If we have no key saved, return error. */
458 if (sw->sw_octx == NULL)
462 * Add the trailing copy of the key (see comment in
463 * swcr_authprepare()) after the data:
464 * ALGO( .., key, algofill )
465 * and let Final() do the proper, natural "algofill"
468 axf->Update(&ctx, sw->sw_octx, sw->sw_klen);
469 axf->Final(aalg, &ctx);
474 case CRYPTO_NULL_HMAC:
475 case CRYPTO_POLY1305:
476 axf->Final(aalg, &ctx);
480 /* Inject the authentication data */
481 crypto_copyback(flags, buf, crd->crd_inject,
482 sw->sw_mlen == 0 ? axf->hashsize : sw->sw_mlen, aalg);
486 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
487 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
490 * Apply a combined encryption-authentication transformation
493 swcr_authenc(struct cryptop *crp)
495 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
496 u_char *blk = (u_char *)blkbuf;
497 u_char aalg[AALG_MAX_RESULT_LEN];
498 u_char uaalg[AALG_MAX_RESULT_LEN];
499 u_char iv[EALG_MAX_BLOCK_LEN];
501 struct swcr_session *ses;
502 struct cryptodesc *crd, *crda = NULL, *crde = NULL;
503 struct swcr_data *sw, *swa, *swe = NULL;
504 struct auth_hash *axf = NULL;
505 struct enc_xform *exf = NULL;
506 caddr_t buf = (caddr_t)crp->crp_buf;
508 int aadlen, blksz, i, ivlen, len, iskip, oskip, r;
510 ivlen = blksz = iskip = oskip = 0;
512 ses = crypto_get_driver_session(crp->crp_session);
514 for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
515 for (i = 0; i < nitems(ses->swcr_algorithms) &&
516 ses->swcr_algorithms[i].sw_alg != crd->crd_alg; i++)
518 if (i == nitems(ses->swcr_algorithms))
521 sw = &ses->swcr_algorithms[i];
522 switch (sw->sw_alg) {
523 case CRYPTO_AES_NIST_GCM_16:
524 case CRYPTO_AES_NIST_GMAC:
530 case CRYPTO_AES_128_NIST_GMAC:
531 case CRYPTO_AES_192_NIST_GMAC:
532 case CRYPTO_AES_256_NIST_GMAC:
536 if (swa->sw_ictx == 0)
538 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
539 blksz = axf->blocksize;
545 if (crde == NULL || crda == NULL)
548 if (crde->crd_alg == CRYPTO_AES_NIST_GCM_16 &&
549 (crde->crd_flags & CRD_F_IV_EXPLICIT) == 0)
552 if (crde->crd_klen != crda->crd_klen)
555 /* Initialize the IV */
556 if (crde->crd_flags & CRD_F_ENCRYPT) {
557 /* IV explicitly provided ? */
558 if (crde->crd_flags & CRD_F_IV_EXPLICIT)
559 bcopy(crde->crd_iv, iv, ivlen);
561 arc4rand(iv, ivlen, 0);
563 /* Do we need to write the IV */
564 if (!(crde->crd_flags & CRD_F_IV_PRESENT))
565 crypto_copyback(crp->crp_flags, buf, crde->crd_inject,
568 } else { /* Decryption */
569 /* IV explicitly provided ? */
570 if (crde->crd_flags & CRD_F_IV_EXPLICIT)
571 bcopy(crde->crd_iv, iv, ivlen);
574 crypto_copydata(crp->crp_flags, buf, crde->crd_inject,
579 /* Supply MAC with IV */
581 axf->Reinit(&ctx, iv, ivlen);
583 /* Supply MAC with AAD */
584 aadlen = crda->crd_len;
586 for (i = iskip; i < crda->crd_len; i += blksz) {
587 len = MIN(crda->crd_len - i, blksz - oskip);
588 crypto_copydata(crp->crp_flags, buf, crda->crd_skip + i, len,
590 bzero(blk + len + oskip, blksz - len - oskip);
591 axf->Update(&ctx, blk, blksz);
592 oskip = 0; /* reset initial output offset */
596 exf->reinit(swe->sw_kschedule, iv);
598 /* Do encryption/decryption with MAC */
599 for (i = 0; i < crde->crd_len; i += len) {
600 if (exf->encrypt_multi != NULL) {
601 len = rounddown(crde->crd_len - i, blksz);
605 len = MIN(len, sizeof(blkbuf));
608 len = MIN(crde->crd_len - i, len);
611 crypto_copydata(crp->crp_flags, buf, crde->crd_skip + i, len,
613 if (crde->crd_flags & CRD_F_ENCRYPT) {
614 if (exf->encrypt_multi != NULL)
615 exf->encrypt_multi(swe->sw_kschedule, blk,
618 exf->encrypt(swe->sw_kschedule, blk);
619 axf->Update(&ctx, blk, len);
620 crypto_copyback(crp->crp_flags, buf,
621 crde->crd_skip + i, len, blk);
623 axf->Update(&ctx, blk, len);
627 /* Do any required special finalization */
628 switch (crda->crd_alg) {
629 case CRYPTO_AES_128_NIST_GMAC:
630 case CRYPTO_AES_192_NIST_GMAC:
631 case CRYPTO_AES_256_NIST_GMAC:
634 blkp = (uint32_t *)blk + 1;
635 *blkp = htobe32(aadlen * 8);
636 blkp = (uint32_t *)blk + 3;
637 *blkp = htobe32(crde->crd_len * 8);
638 axf->Update(&ctx, blk, blksz);
643 axf->Final(aalg, &ctx);
646 if (!(crde->crd_flags & CRD_F_ENCRYPT)) {
647 crypto_copydata(crp->crp_flags, buf, crda->crd_inject,
648 axf->hashsize, uaalg);
650 r = timingsafe_bcmp(aalg, uaalg, axf->hashsize);
652 /* tag matches, decrypt data */
653 for (i = 0; i < crde->crd_len; i += blksz) {
654 len = MIN(crde->crd_len - i, blksz);
657 crypto_copydata(crp->crp_flags, buf,
658 crde->crd_skip + i, len, blk);
659 exf->decrypt(swe->sw_kschedule, blk);
660 crypto_copyback(crp->crp_flags, buf,
661 crde->crd_skip + i, len, blk);
666 /* Inject the authentication data */
667 crypto_copyback(crp->crp_flags, buf, crda->crd_inject,
668 axf->hashsize, aalg);
675 * Apply a compression/decompression algorithm
678 swcr_compdec(struct cryptodesc *crd, struct swcr_data *sw,
679 caddr_t buf, int flags)
681 u_int8_t *data, *out;
682 struct comp_algo *cxf;
688 /* We must handle the whole buffer of data in one time
689 * then if there is not all the data in the mbuf, we must
693 data = malloc(crd->crd_len, M_CRYPTO_DATA, M_NOWAIT);
696 crypto_copydata(flags, buf, crd->crd_skip, crd->crd_len, data);
698 if (crd->crd_flags & CRD_F_COMP)
699 result = cxf->compress(data, crd->crd_len, &out);
701 result = cxf->decompress(data, crd->crd_len, &out);
703 free(data, M_CRYPTO_DATA);
707 /* Copy back the (de)compressed data. m_copyback is
708 * extending the mbuf as necessary.
710 sw->sw_size = result;
711 /* Check the compressed size when doing compression */
712 if (crd->crd_flags & CRD_F_COMP) {
713 if (result >= crd->crd_len) {
714 /* Compression was useless, we lost time */
715 free(out, M_CRYPTO_DATA);
720 crypto_copyback(flags, buf, crd->crd_skip, result, out);
721 if (result < crd->crd_len) {
722 adj = result - crd->crd_len;
723 if (flags & CRYPTO_F_IMBUF) {
724 adj = result - crd->crd_len;
725 m_adj((struct mbuf *)buf, adj);
726 } else if (flags & CRYPTO_F_IOV) {
727 struct uio *uio = (struct uio *)buf;
730 adj = crd->crd_len - result;
731 ind = uio->uio_iovcnt - 1;
733 while (adj > 0 && ind >= 0) {
734 if (adj < uio->uio_iov[ind].iov_len) {
735 uio->uio_iov[ind].iov_len -= adj;
739 adj -= uio->uio_iov[ind].iov_len;
740 uio->uio_iov[ind].iov_len = 0;
746 free(out, M_CRYPTO_DATA);
751 * Generate a new software session.
754 swcr_newsession(device_t dev, crypto_session_t cses, struct cryptoini *cri)
756 struct swcr_session *ses;
757 struct swcr_data *swd;
758 struct auth_hash *axf;
759 struct enc_xform *txf;
760 struct comp_algo *cxf;
765 if (cses == NULL || cri == NULL)
768 ses = crypto_get_driver_session(cses);
769 mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
771 for (i = 0; cri != NULL && i < nitems(ses->swcr_algorithms); i++) {
772 swd = &ses->swcr_algorithms[i];
774 switch (cri->cri_alg) {
776 txf = &enc_xform_des;
778 case CRYPTO_3DES_CBC:
779 txf = &enc_xform_3des;
782 txf = &enc_xform_blf;
784 case CRYPTO_CAST_CBC:
785 txf = &enc_xform_cast5;
787 case CRYPTO_SKIPJACK_CBC:
788 txf = &enc_xform_skipjack;
790 case CRYPTO_RIJNDAEL128_CBC:
791 txf = &enc_xform_rijndael128;
794 txf = &enc_xform_aes_xts;
797 txf = &enc_xform_aes_icm;
799 case CRYPTO_AES_NIST_GCM_16:
800 txf = &enc_xform_aes_nist_gcm;
802 case CRYPTO_AES_NIST_GMAC:
803 txf = &enc_xform_aes_nist_gmac;
806 case CRYPTO_CAMELLIA_CBC:
807 txf = &enc_xform_camellia;
809 case CRYPTO_NULL_CBC:
810 txf = &enc_xform_null;
812 case CRYPTO_CHACHA20:
813 txf = &enc_xform_chacha20;
816 if (cri->cri_key != NULL) {
817 error = txf->setkey(&swd->sw_kschedule,
818 cri->cri_key, cri->cri_klen / 8);
820 swcr_freesession(dev, cses);
827 case CRYPTO_MD5_HMAC:
828 axf = &auth_hash_hmac_md5;
830 case CRYPTO_SHA1_HMAC:
831 axf = &auth_hash_hmac_sha1;
833 case CRYPTO_SHA2_224_HMAC:
834 axf = &auth_hash_hmac_sha2_224;
836 case CRYPTO_SHA2_256_HMAC:
837 axf = &auth_hash_hmac_sha2_256;
839 case CRYPTO_SHA2_384_HMAC:
840 axf = &auth_hash_hmac_sha2_384;
842 case CRYPTO_SHA2_512_HMAC:
843 axf = &auth_hash_hmac_sha2_512;
845 case CRYPTO_NULL_HMAC:
846 axf = &auth_hash_null;
848 case CRYPTO_RIPEMD160_HMAC:
849 axf = &auth_hash_hmac_ripemd_160;
851 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
853 if (swd->sw_ictx == NULL) {
854 swcr_freesession(dev, cses);
858 swd->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
860 if (swd->sw_octx == NULL) {
861 swcr_freesession(dev, cses);
865 if (cri->cri_key != NULL) {
866 error = swcr_authprepare(axf, swd,
867 cri->cri_key, cri->cri_klen);
869 swcr_freesession(dev, cses);
874 swd->sw_mlen = cri->cri_mlen;
878 case CRYPTO_MD5_KPDK:
879 axf = &auth_hash_key_md5;
882 case CRYPTO_SHA1_KPDK:
883 axf = &auth_hash_key_sha1;
885 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
887 if (swd->sw_ictx == NULL) {
888 swcr_freesession(dev, cses);
892 swd->sw_octx = malloc(cri->cri_klen / 8,
893 M_CRYPTO_DATA, M_NOWAIT);
894 if (swd->sw_octx == NULL) {
895 swcr_freesession(dev, cses);
899 /* Store the key so we can "append" it to the payload */
900 if (cri->cri_key != NULL) {
901 error = swcr_authprepare(axf, swd,
902 cri->cri_key, cri->cri_klen);
904 swcr_freesession(dev, cses);
909 swd->sw_mlen = cri->cri_mlen;
914 axf = &auth_hash_md5;
919 axf = &auth_hash_sha1;
921 case CRYPTO_SHA2_224:
922 axf = &auth_hash_sha2_224;
924 case CRYPTO_SHA2_256:
925 axf = &auth_hash_sha2_256;
927 case CRYPTO_SHA2_384:
928 axf = &auth_hash_sha2_384;
930 case CRYPTO_SHA2_512:
931 axf = &auth_hash_sha2_512;
934 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
936 if (swd->sw_ictx == NULL) {
937 swcr_freesession(dev, cses);
941 axf->Init(swd->sw_ictx);
942 swd->sw_mlen = cri->cri_mlen;
946 case CRYPTO_AES_128_NIST_GMAC:
947 axf = &auth_hash_nist_gmac_aes_128;
950 case CRYPTO_AES_192_NIST_GMAC:
951 axf = &auth_hash_nist_gmac_aes_192;
954 case CRYPTO_AES_256_NIST_GMAC:
955 axf = &auth_hash_nist_gmac_aes_256;
957 len = cri->cri_klen / 8;
958 if (len != 16 && len != 24 && len != 32) {
959 swcr_freesession(dev, cses);
963 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
965 if (swd->sw_ictx == NULL) {
966 swcr_freesession(dev, cses);
969 axf->Init(swd->sw_ictx);
970 axf->Setkey(swd->sw_ictx, cri->cri_key, len);
975 axf = &auth_hash_blake2b;
978 axf = &auth_hash_blake2s;
980 case CRYPTO_POLY1305:
981 axf = &auth_hash_poly1305;
983 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
985 if (swd->sw_ictx == NULL) {
986 swcr_freesession(dev, cses);
989 axf->Setkey(swd->sw_ictx, cri->cri_key,
991 axf->Init(swd->sw_ictx);
995 case CRYPTO_DEFLATE_COMP:
996 cxf = &comp_algo_deflate;
1000 swcr_freesession(dev, cses);
1004 swd->sw_alg = cri->cri_alg;
1005 cri = cri->cri_next;
1010 CRYPTDEB("Bogus session request for three or more algorithms");
1017 swcr_freesession(device_t dev, crypto_session_t cses)
1019 struct swcr_session *ses;
1020 struct swcr_data *swd;
1021 struct enc_xform *txf;
1022 struct auth_hash *axf;
1025 ses = crypto_get_driver_session(cses);
1027 mtx_destroy(&ses->swcr_lock);
1028 for (i = 0; i < nitems(ses->swcr_algorithms); i++) {
1029 swd = &ses->swcr_algorithms[i];
1031 switch (swd->sw_alg) {
1032 case CRYPTO_DES_CBC:
1033 case CRYPTO_3DES_CBC:
1034 case CRYPTO_BLF_CBC:
1035 case CRYPTO_CAST_CBC:
1036 case CRYPTO_SKIPJACK_CBC:
1037 case CRYPTO_RIJNDAEL128_CBC:
1038 case CRYPTO_AES_XTS:
1039 case CRYPTO_AES_ICM:
1040 case CRYPTO_AES_NIST_GCM_16:
1041 case CRYPTO_AES_NIST_GMAC:
1042 case CRYPTO_CAMELLIA_CBC:
1043 case CRYPTO_NULL_CBC:
1044 case CRYPTO_CHACHA20:
1047 if (swd->sw_kschedule)
1048 txf->zerokey(&(swd->sw_kschedule));
1051 case CRYPTO_MD5_HMAC:
1052 case CRYPTO_SHA1_HMAC:
1053 case CRYPTO_SHA2_224_HMAC:
1054 case CRYPTO_SHA2_256_HMAC:
1055 case CRYPTO_SHA2_384_HMAC:
1056 case CRYPTO_SHA2_512_HMAC:
1057 case CRYPTO_RIPEMD160_HMAC:
1058 case CRYPTO_NULL_HMAC:
1062 bzero(swd->sw_ictx, axf->ctxsize);
1063 free(swd->sw_ictx, M_CRYPTO_DATA);
1066 bzero(swd->sw_octx, axf->ctxsize);
1067 free(swd->sw_octx, M_CRYPTO_DATA);
1071 case CRYPTO_MD5_KPDK:
1072 case CRYPTO_SHA1_KPDK:
1076 bzero(swd->sw_ictx, axf->ctxsize);
1077 free(swd->sw_ictx, M_CRYPTO_DATA);
1080 bzero(swd->sw_octx, swd->sw_klen);
1081 free(swd->sw_octx, M_CRYPTO_DATA);
1085 case CRYPTO_BLAKE2B:
1086 case CRYPTO_BLAKE2S:
1088 case CRYPTO_POLY1305:
1090 case CRYPTO_SHA2_224:
1091 case CRYPTO_SHA2_256:
1092 case CRYPTO_SHA2_384:
1093 case CRYPTO_SHA2_512:
1094 case CRYPTO_AES_128_NIST_GMAC:
1095 case CRYPTO_AES_192_NIST_GMAC:
1096 case CRYPTO_AES_256_NIST_GMAC:
1100 explicit_bzero(swd->sw_ictx, axf->ctxsize);
1101 free(swd->sw_ictx, M_CRYPTO_DATA);
1105 case CRYPTO_DEFLATE_COMP:
1113 * Process a software request.
1116 swcr_process(device_t dev, struct cryptop *crp, int hint)
1118 struct swcr_session *ses = NULL;
1119 struct cryptodesc *crd;
1120 struct swcr_data *sw;
1127 if (crp->crp_desc == NULL || crp->crp_buf == NULL) {
1128 crp->crp_etype = EINVAL;
1132 ses = crypto_get_driver_session(crp->crp_session);
1133 mtx_lock(&ses->swcr_lock);
1135 /* Go through crypto descriptors, processing as we go */
1136 for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
1138 * Find the crypto context.
1140 * XXX Note that the logic here prevents us from having
1141 * XXX the same algorithm multiple times in a session
1142 * XXX (or rather, we can but it won't give us the right
1143 * XXX results). To do that, we'd need some way of differentiating
1144 * XXX between the various instances of an algorithm (so we can
1145 * XXX locate the correct crypto context).
1147 for (i = 0; i < nitems(ses->swcr_algorithms) &&
1148 ses->swcr_algorithms[i].sw_alg != crd->crd_alg; i++)
1151 /* No such context ? */
1152 if (i == nitems(ses->swcr_algorithms)) {
1153 crp->crp_etype = EINVAL;
1156 sw = &ses->swcr_algorithms[i];
1157 switch (sw->sw_alg) {
1158 case CRYPTO_DES_CBC:
1159 case CRYPTO_3DES_CBC:
1160 case CRYPTO_BLF_CBC:
1161 case CRYPTO_CAST_CBC:
1162 case CRYPTO_SKIPJACK_CBC:
1163 case CRYPTO_RIJNDAEL128_CBC:
1164 case CRYPTO_AES_XTS:
1165 case CRYPTO_AES_ICM:
1166 case CRYPTO_CAMELLIA_CBC:
1167 case CRYPTO_CHACHA20:
1168 if ((crp->crp_etype = swcr_encdec(crd, sw,
1169 crp->crp_buf, crp->crp_flags)) != 0)
1172 case CRYPTO_NULL_CBC:
1175 case CRYPTO_MD5_HMAC:
1176 case CRYPTO_SHA1_HMAC:
1177 case CRYPTO_SHA2_224_HMAC:
1178 case CRYPTO_SHA2_256_HMAC:
1179 case CRYPTO_SHA2_384_HMAC:
1180 case CRYPTO_SHA2_512_HMAC:
1181 case CRYPTO_RIPEMD160_HMAC:
1182 case CRYPTO_NULL_HMAC:
1183 case CRYPTO_MD5_KPDK:
1184 case CRYPTO_SHA1_KPDK:
1187 case CRYPTO_SHA2_224:
1188 case CRYPTO_SHA2_256:
1189 case CRYPTO_SHA2_384:
1190 case CRYPTO_SHA2_512:
1191 case CRYPTO_BLAKE2B:
1192 case CRYPTO_BLAKE2S:
1193 case CRYPTO_POLY1305:
1194 if ((crp->crp_etype = swcr_authcompute(crd, sw,
1195 crp->crp_buf, crp->crp_flags)) != 0)
1199 case CRYPTO_AES_NIST_GCM_16:
1200 case CRYPTO_AES_NIST_GMAC:
1201 case CRYPTO_AES_128_NIST_GMAC:
1202 case CRYPTO_AES_192_NIST_GMAC:
1203 case CRYPTO_AES_256_NIST_GMAC:
1204 crp->crp_etype = swcr_authenc(crp);
1207 case CRYPTO_DEFLATE_COMP:
1208 if ((crp->crp_etype = swcr_compdec(crd, sw,
1209 crp->crp_buf, crp->crp_flags)) != 0)
1212 crp->crp_olen = (int)sw->sw_size;
1216 /* Unknown/unsupported algorithm */
1217 crp->crp_etype = EINVAL;
1224 mtx_unlock(&ses->swcr_lock);
1230 swcr_identify(driver_t *drv, device_t parent)
1232 /* NB: order 10 is so we get attached after h/w devices */
1233 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1234 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1235 panic("cryptosoft: could not attach");
1239 swcr_probe(device_t dev)
1241 device_set_desc(dev, "software crypto");
1242 return (BUS_PROBE_NOWILDCARD);
1246 swcr_attach(device_t dev)
1248 memset(hmac_ipad_buffer, HMAC_IPAD_VAL, HMAC_MAX_BLOCK_LEN);
1249 memset(hmac_opad_buffer, HMAC_OPAD_VAL, HMAC_MAX_BLOCK_LEN);
1251 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1252 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1254 device_printf(dev, "cannot initialize!");
1257 #define REGISTER(alg) \
1258 crypto_register(swcr_id, alg, 0,0)
1259 REGISTER(CRYPTO_DES_CBC);
1260 REGISTER(CRYPTO_3DES_CBC);
1261 REGISTER(CRYPTO_BLF_CBC);
1262 REGISTER(CRYPTO_CAST_CBC);
1263 REGISTER(CRYPTO_SKIPJACK_CBC);
1264 REGISTER(CRYPTO_NULL_CBC);
1265 REGISTER(CRYPTO_MD5_HMAC);
1266 REGISTER(CRYPTO_SHA1_HMAC);
1267 REGISTER(CRYPTO_SHA2_224_HMAC);
1268 REGISTER(CRYPTO_SHA2_256_HMAC);
1269 REGISTER(CRYPTO_SHA2_384_HMAC);
1270 REGISTER(CRYPTO_SHA2_512_HMAC);
1271 REGISTER(CRYPTO_RIPEMD160_HMAC);
1272 REGISTER(CRYPTO_NULL_HMAC);
1273 REGISTER(CRYPTO_MD5_KPDK);
1274 REGISTER(CRYPTO_SHA1_KPDK);
1275 REGISTER(CRYPTO_MD5);
1276 REGISTER(CRYPTO_SHA1);
1277 REGISTER(CRYPTO_SHA2_224);
1278 REGISTER(CRYPTO_SHA2_256);
1279 REGISTER(CRYPTO_SHA2_384);
1280 REGISTER(CRYPTO_SHA2_512);
1281 REGISTER(CRYPTO_RIJNDAEL128_CBC);
1282 REGISTER(CRYPTO_AES_XTS);
1283 REGISTER(CRYPTO_AES_ICM);
1284 REGISTER(CRYPTO_AES_NIST_GCM_16);
1285 REGISTER(CRYPTO_AES_NIST_GMAC);
1286 REGISTER(CRYPTO_AES_128_NIST_GMAC);
1287 REGISTER(CRYPTO_AES_192_NIST_GMAC);
1288 REGISTER(CRYPTO_AES_256_NIST_GMAC);
1289 REGISTER(CRYPTO_CAMELLIA_CBC);
1290 REGISTER(CRYPTO_DEFLATE_COMP);
1291 REGISTER(CRYPTO_BLAKE2B);
1292 REGISTER(CRYPTO_BLAKE2S);
1293 REGISTER(CRYPTO_CHACHA20);
1294 REGISTER(CRYPTO_POLY1305);
1301 swcr_detach(device_t dev)
1303 crypto_unregister_all(swcr_id);
1307 static device_method_t swcr_methods[] = {
1308 DEVMETHOD(device_identify, swcr_identify),
1309 DEVMETHOD(device_probe, swcr_probe),
1310 DEVMETHOD(device_attach, swcr_attach),
1311 DEVMETHOD(device_detach, swcr_detach),
1313 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1314 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1315 DEVMETHOD(cryptodev_process, swcr_process),
1320 static driver_t swcr_driver = {
1323 0, /* NB: no softc */
1325 static devclass_t swcr_devclass;
1328 * NB: We explicitly reference the crypto module so we
1329 * get the necessary ordering when built as a loadable
1330 * module. This is required because we bundle the crypto
1331 * module code together with the cryptosoft driver (otherwise
1332 * normal module dependencies would handle things).
1334 extern int crypto_modevent(struct module *, int, void *);
1335 /* XXX where to attach */
1336 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1337 MODULE_VERSION(cryptosoft, 1);
1338 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);