]> CyberLeo.Net >> Repos - FreeBSD/FreeBSD.git/blob - sys/opencrypto/cryptosoft.c
ssh: update with post-release V_8_9 branch commits
[FreeBSD/FreeBSD.git] / sys / opencrypto / cryptosoft.c
1 /*      $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
2
3 /*-
4  * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5  * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
6  *
7  * This code was written by Angelos D. Keromytis in Athens, Greece, in
8  * February 2000. Network Security Technologies Inc. (NSTI) kindly
9  * supported the development of this code.
10  *
11  * Copyright (c) 2000, 2001 Angelos D. Keromytis
12  * Copyright (c) 2014-2021 The FreeBSD Foundation
13  * All rights reserved.
14  *
15  * Portions of this software were developed by John-Mark Gurney
16  * under sponsorship of the FreeBSD Foundation and
17  * Rubicon Communications, LLC (Netgate).
18  *
19  * Portions of this software were developed by Ararat River
20  * Consulting, LLC under sponsorship of the FreeBSD Foundation.
21  *
22  * Permission to use, copy, and modify this software with or without fee
23  * is hereby granted, provided that this entire notice is included in
24  * all source code copies of any software which is or includes a copy or
25  * modification of this software.
26  *
27  * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
28  * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
29  * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
30  * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
31  * PURPOSE.
32  */
33
34 #include <sys/cdefs.h>
35 __FBSDID("$FreeBSD$");
36
37 #include <sys/param.h>
38 #include <sys/systm.h>
39 #include <sys/malloc.h>
40 #include <sys/mbuf.h>
41 #include <sys/module.h>
42 #include <sys/sysctl.h>
43 #include <sys/errno.h>
44 #include <sys/random.h>
45 #include <sys/kernel.h>
46 #include <sys/uio.h>
47 #include <sys/endian.h>
48 #include <sys/limits.h>
49
50 #include <crypto/sha1.h>
51 #include <opencrypto/rmd160.h>
52
53 #include <opencrypto/cryptodev.h>
54 #include <opencrypto/xform.h>
55
56 #include <sys/kobj.h>
57 #include <sys/bus.h>
58 #include "cryptodev_if.h"
59
60 struct swcr_auth {
61         void            *sw_ictx;
62         void            *sw_octx;
63         const struct auth_hash *sw_axf;
64         uint16_t        sw_mlen;
65         bool            sw_hmac;
66 };
67
68 struct swcr_encdec {
69         void            *sw_ctx;
70         const struct enc_xform *sw_exf;
71 };
72
73 struct swcr_compdec {
74         const struct comp_algo *sw_cxf;
75 };
76
77 struct swcr_session {
78         int     (*swcr_process)(const struct swcr_session *, struct cryptop *);
79
80         struct swcr_auth swcr_auth;
81         struct swcr_encdec swcr_encdec;
82         struct swcr_compdec swcr_compdec;
83 };
84
85 static  int32_t swcr_id;
86
87 static  void swcr_freesession(device_t dev, crypto_session_t cses);
88
89 /* Used for CRYPTO_NULL_CBC. */
90 static int
91 swcr_null(const struct swcr_session *ses, struct cryptop *crp)
92 {
93
94         return (0);
95 }
96
97 /*
98  * Apply a symmetric encryption/decryption algorithm.
99  */
100 static int
101 swcr_encdec(const struct swcr_session *ses, struct cryptop *crp)
102 {
103         unsigned char blk[EALG_MAX_BLOCK_LEN];
104         const struct crypto_session_params *csp;
105         const struct enc_xform *exf;
106         const struct swcr_encdec *sw;
107         void *ctx;
108         size_t inlen, outlen, todo;
109         int blksz, resid;
110         struct crypto_buffer_cursor cc_in, cc_out;
111         const unsigned char *inblk;
112         unsigned char *outblk;
113         int error;
114         bool encrypting;
115
116         error = 0;
117
118         sw = &ses->swcr_encdec;
119         exf = sw->sw_exf;
120         csp = crypto_get_params(crp->crp_session);
121
122         if (exf->native_blocksize == 0) {
123                 /* Check for non-padded data */
124                 if ((crp->crp_payload_length % exf->blocksize) != 0)
125                         return (EINVAL);
126
127                 blksz = exf->blocksize;
128         } else
129                 blksz = exf->native_blocksize;
130
131         if (exf == &enc_xform_aes_icm &&
132             (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
133                 return (EINVAL);
134
135         ctx = __builtin_alloca(exf->ctxsize);
136         if (crp->crp_cipher_key != NULL) {
137                 error = exf->setkey(ctx, crp->crp_cipher_key,
138                     csp->csp_cipher_klen);
139                 if (error)
140                         return (error);
141         } else
142                 memcpy(ctx, sw->sw_ctx, exf->ctxsize);
143
144         crypto_read_iv(crp, blk);
145         exf->reinit(ctx, blk, csp->csp_ivlen);
146
147         crypto_cursor_init(&cc_in, &crp->crp_buf);
148         crypto_cursor_advance(&cc_in, crp->crp_payload_start);
149         if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
150                 crypto_cursor_init(&cc_out, &crp->crp_obuf);
151                 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
152         } else
153                 cc_out = cc_in;
154
155         encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
156
157         /*
158          * Loop through encrypting blocks.  'inlen' is the remaining
159          * length of the current segment in the input buffer.
160          * 'outlen' is the remaining length of current segment in the
161          * output buffer.
162          */
163         inlen = outlen = 0;
164         for (resid = crp->crp_payload_length; resid >= blksz; resid -= todo) {
165                 if (inlen == 0)
166                         inblk = crypto_cursor_segment(&cc_in, &inlen);
167                 if (outlen == 0)
168                         outblk = crypto_cursor_segment(&cc_out, &outlen);
169
170                 /*
171                  * If the current block is not contained within the
172                  * current input/output segment, use 'blk' as a local
173                  * buffer.
174                  */
175                 if (inlen < blksz) {
176                         crypto_cursor_copydata(&cc_in, blksz, blk);
177                         inblk = blk;
178                         inlen = blksz;
179                 }
180                 if (outlen < blksz) {
181                         outblk = blk;
182                         outlen = blksz;
183                 }
184
185                 todo = rounddown2(MIN(resid, MIN(inlen, outlen)), blksz);
186
187                 if (encrypting)
188                         exf->encrypt_multi(ctx, inblk, outblk, todo);
189                 else
190                         exf->decrypt_multi(ctx, inblk, outblk, todo);
191
192                 if (inblk == blk) {
193                         inblk = crypto_cursor_segment(&cc_in, &inlen);
194                 } else {
195                         crypto_cursor_advance(&cc_in, todo);
196                         inlen -= todo;
197                         inblk += todo;
198                 }
199
200                 if (outblk == blk) {
201                         crypto_cursor_copyback(&cc_out, blksz, blk);
202                         outblk = crypto_cursor_segment(&cc_out, &outlen);
203                 } else {
204                         crypto_cursor_advance(&cc_out, todo);
205                         outlen -= todo;
206                         outblk += todo;
207                 }
208         }
209
210         /* Handle trailing partial block for stream ciphers. */
211         if (resid > 0) {
212                 KASSERT(exf->native_blocksize != 0,
213                     ("%s: partial block of %d bytes for cipher %s",
214                     __func__, resid, exf->name));
215                 KASSERT(resid < blksz, ("%s: partial block too big", __func__));
216
217                 inblk = crypto_cursor_segment(&cc_in, &inlen);
218                 outblk = crypto_cursor_segment(&cc_out, &outlen);
219                 if (inlen < resid) {
220                         crypto_cursor_copydata(&cc_in, resid, blk);
221                         inblk = blk;
222                 }
223                 if (outlen < resid)
224                         outblk = blk;
225                 if (encrypting)
226                         exf->encrypt_last(ctx, inblk, outblk,
227                             resid);
228                 else
229                         exf->decrypt_last(ctx, inblk, outblk,
230                             resid);
231                 if (outlen < resid)
232                         crypto_cursor_copyback(&cc_out, resid, blk);
233         }
234
235         explicit_bzero(ctx, exf->ctxsize);
236         explicit_bzero(blk, sizeof(blk));
237         return (0);
238 }
239
240 /*
241  * Compute or verify hash.
242  */
243 static int
244 swcr_authcompute(const struct swcr_session *ses, struct cryptop *crp)
245 {
246         struct {
247                 union authctx ctx;
248                 u_char aalg[HASH_MAX_LEN];
249                 u_char uaalg[HASH_MAX_LEN];
250         } s;
251         const struct crypto_session_params *csp;
252         const struct swcr_auth *sw;
253         const struct auth_hash *axf;
254         int err;
255
256         sw = &ses->swcr_auth;
257
258         axf = sw->sw_axf;
259
260         csp = crypto_get_params(crp->crp_session);
261         if (crp->crp_auth_key != NULL) {
262                 if (sw->sw_hmac) {
263                         hmac_init_ipad(axf, crp->crp_auth_key,
264                             csp->csp_auth_klen, &s.ctx);
265                 } else {
266                         axf->Init(&s.ctx);
267                         axf->Setkey(&s.ctx, crp->crp_auth_key,
268                             csp->csp_auth_klen);
269                 }
270         } else
271                 memcpy(&s.ctx, sw->sw_ictx, axf->ctxsize);
272
273         if (crp->crp_aad != NULL)
274                 err = axf->Update(&s.ctx, crp->crp_aad, crp->crp_aad_length);
275         else
276                 err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
277                     axf->Update, &s.ctx);
278         if (err)
279                 goto out;
280
281         if (CRYPTO_HAS_OUTPUT_BUFFER(crp) &&
282             CRYPTO_OP_IS_ENCRYPT(crp->crp_op))
283                 err = crypto_apply_buf(&crp->crp_obuf,
284                     crp->crp_payload_output_start, crp->crp_payload_length,
285                     axf->Update, &s.ctx);
286         else
287                 err = crypto_apply(crp, crp->crp_payload_start,
288                     crp->crp_payload_length, axf->Update, &s.ctx);
289         if (err)
290                 goto out;
291
292         if (csp->csp_flags & CSP_F_ESN)
293                 axf->Update(&s.ctx, crp->crp_esn, 4);
294
295         axf->Final(s.aalg, &s.ctx);
296         if (sw->sw_hmac) {
297                 if (crp->crp_auth_key != NULL)
298                         hmac_init_opad(axf, crp->crp_auth_key,
299                             csp->csp_auth_klen, &s.ctx);
300                 else
301                         memcpy(&s.ctx, sw->sw_octx, axf->ctxsize);
302                 axf->Update(&s.ctx, s.aalg, axf->hashsize);
303                 axf->Final(s.aalg, &s.ctx);
304         }
305
306         if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
307                 crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, s.uaalg);
308                 if (timingsafe_bcmp(s.aalg, s.uaalg, sw->sw_mlen) != 0)
309                         err = EBADMSG;
310         } else {
311                 /* Inject the authentication data */
312                 crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, s.aalg);
313         }
314 out:
315         explicit_bzero(&s, sizeof(s));
316         return (err);
317 }
318
319 CTASSERT(INT_MAX <= (1ll<<39) - 256);   /* GCM: plain text < 2^39-256 */
320 CTASSERT(INT_MAX <= (uint64_t)-1);      /* GCM: associated data <= 2^64-1 */
321
322 static int
323 swcr_gmac(const struct swcr_session *ses, struct cryptop *crp)
324 {
325         struct {
326                 union authctx ctx;
327                 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
328                 u_char tag[GMAC_DIGEST_LEN];
329                 u_char tag2[GMAC_DIGEST_LEN];
330         } s;
331         u_char *blk = (u_char *)s.blkbuf;
332         struct crypto_buffer_cursor cc;
333         const u_char *inblk;
334         const struct swcr_auth *swa;
335         const struct auth_hash *axf;
336         uint32_t *blkp;
337         size_t len;
338         int blksz, error, ivlen, resid;
339
340         swa = &ses->swcr_auth;
341         axf = swa->sw_axf;
342         blksz = GMAC_BLOCK_LEN;
343         KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
344             __func__));
345
346         if (crp->crp_auth_key != NULL) {
347                 axf->Init(&s.ctx);
348                 axf->Setkey(&s.ctx, crp->crp_auth_key,
349                     crypto_get_params(crp->crp_session)->csp_auth_klen);
350         } else
351                 memcpy(&s.ctx, swa->sw_ictx, axf->ctxsize);
352
353         /* Initialize the IV */
354         ivlen = AES_GCM_IV_LEN;
355         crypto_read_iv(crp, blk);
356
357         axf->Reinit(&s.ctx, blk, ivlen);
358         crypto_cursor_init(&cc, &crp->crp_buf);
359         crypto_cursor_advance(&cc, crp->crp_payload_start);
360         for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) {
361                 inblk = crypto_cursor_segment(&cc, &len);
362                 if (len >= blksz) {
363                         len = rounddown(MIN(len, resid), blksz);
364                         crypto_cursor_advance(&cc, len);
365                 } else {
366                         len = blksz;
367                         crypto_cursor_copydata(&cc, len, blk);
368                         inblk = blk;
369                 }
370                 axf->Update(&s.ctx, inblk, len);
371         }
372         if (resid > 0) {
373                 memset(blk, 0, blksz);
374                 crypto_cursor_copydata(&cc, resid, blk);
375                 axf->Update(&s.ctx, blk, blksz);
376         }
377
378         /* length block */
379         memset(blk, 0, blksz);
380         blkp = (uint32_t *)blk + 1;
381         *blkp = htobe32(crp->crp_payload_length * 8);
382         axf->Update(&s.ctx, blk, blksz);
383
384         /* Finalize MAC */
385         axf->Final(s.tag, &s.ctx);
386
387         error = 0;
388         if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
389                 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
390                     s.tag2);
391                 if (timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen) != 0)
392                         error = EBADMSG;
393         } else {
394                 /* Inject the authentication data */
395                 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, s.tag);
396         }
397         explicit_bzero(&s, sizeof(s));
398         return (error);
399 }
400
401 static int
402 swcr_gcm(const struct swcr_session *ses, struct cryptop *crp)
403 {
404         struct {
405                 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
406                 u_char tag[GMAC_DIGEST_LEN];
407                 u_char tag2[GMAC_DIGEST_LEN];
408         } s;
409         u_char *blk = (u_char *)s.blkbuf;
410         struct crypto_buffer_cursor cc_in, cc_out;
411         const u_char *inblk;
412         u_char *outblk;
413         size_t inlen, outlen, todo;
414         const struct swcr_auth *swa;
415         const struct swcr_encdec *swe;
416         const struct enc_xform *exf;
417         void *ctx;
418         uint32_t *blkp;
419         int blksz, error, ivlen, r, resid;
420
421         swa = &ses->swcr_auth;
422         swe = &ses->swcr_encdec;
423         exf = swe->sw_exf;
424         blksz = GMAC_BLOCK_LEN;
425         KASSERT(blksz == exf->native_blocksize,
426             ("%s: blocksize mismatch", __func__));
427
428         if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
429                 return (EINVAL);
430
431         ivlen = AES_GCM_IV_LEN;
432
433         ctx = __builtin_alloca(exf->ctxsize);
434         if (crp->crp_cipher_key != NULL)
435                 exf->setkey(ctx, crp->crp_cipher_key,
436                     crypto_get_params(crp->crp_session)->csp_cipher_klen);
437         else
438                 memcpy(ctx, swe->sw_ctx, exf->ctxsize);
439         exf->reinit(ctx, crp->crp_iv, ivlen);
440
441         /* Supply MAC with AAD */
442         if (crp->crp_aad != NULL) {
443                 inlen = rounddown2(crp->crp_aad_length, blksz);
444                 if (inlen != 0)
445                         exf->update(ctx, crp->crp_aad, inlen);
446                 if (crp->crp_aad_length != inlen) {
447                         memset(blk, 0, blksz);
448                         memcpy(blk, (char *)crp->crp_aad + inlen,
449                             crp->crp_aad_length - inlen);
450                         exf->update(ctx, blk, blksz);
451                 }
452         } else {
453                 crypto_cursor_init(&cc_in, &crp->crp_buf);
454                 crypto_cursor_advance(&cc_in, crp->crp_aad_start);
455                 for (resid = crp->crp_aad_length; resid >= blksz;
456                      resid -= inlen) {
457                         inblk = crypto_cursor_segment(&cc_in, &inlen);
458                         if (inlen >= blksz) {
459                                 inlen = rounddown2(MIN(inlen, resid), blksz);
460                                 crypto_cursor_advance(&cc_in, inlen);
461                         } else {
462                                 inlen = blksz;
463                                 crypto_cursor_copydata(&cc_in, inlen, blk);
464                                 inblk = blk;
465                         }
466                         exf->update(ctx, inblk, inlen);
467                 }
468                 if (resid > 0) {
469                         memset(blk, 0, blksz);
470                         crypto_cursor_copydata(&cc_in, resid, blk);
471                         exf->update(ctx, blk, blksz);
472                 }
473         }
474
475         /* Do encryption with MAC */
476         crypto_cursor_init(&cc_in, &crp->crp_buf);
477         crypto_cursor_advance(&cc_in, crp->crp_payload_start);
478         if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
479                 crypto_cursor_init(&cc_out, &crp->crp_obuf);
480                 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
481         } else
482                 cc_out = cc_in;
483
484         inlen = outlen = 0;
485         for (resid = crp->crp_payload_length; resid >= blksz; resid -= todo) {
486                 if (inlen == 0)
487                         inblk = crypto_cursor_segment(&cc_in, &inlen);
488                 if (outlen == 0)
489                         outblk = crypto_cursor_segment(&cc_out, &outlen);
490
491                 if (inlen < blksz) {
492                         crypto_cursor_copydata(&cc_in, blksz, blk);
493                         inblk = blk;
494                         inlen = blksz;
495                 }
496
497                 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
498                         if (outlen < blksz) {
499                                 outblk = blk;
500                                 outlen = blksz;
501                         }
502
503                         todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
504                             blksz);
505
506                         exf->encrypt_multi(ctx, inblk, outblk, todo);
507                         exf->update(ctx, outblk, todo);
508
509                         if (outblk == blk) {
510                                 crypto_cursor_copyback(&cc_out, blksz, blk);
511                                 outblk = crypto_cursor_segment(&cc_out, &outlen);
512                         } else {
513                                 crypto_cursor_advance(&cc_out, todo);
514                                 outlen -= todo;
515                                 outblk += todo;
516                         }
517                 } else {
518                         todo = rounddown2(MIN(resid, inlen), blksz);
519                         exf->update(ctx, inblk, todo);
520                 }
521
522                 if (inblk == blk) {
523                         inblk = crypto_cursor_segment(&cc_in, &inlen);
524                 } else {
525                         crypto_cursor_advance(&cc_in, todo);
526                         inlen -= todo;
527                         inblk += todo;
528                 }
529         }
530         if (resid > 0) {
531                 crypto_cursor_copydata(&cc_in, resid, blk);
532                 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
533                         exf->encrypt_last(ctx, blk, blk, resid);
534                         crypto_cursor_copyback(&cc_out, resid, blk);
535                 }
536                 exf->update(ctx, blk, resid);
537         }
538
539         /* length block */
540         memset(blk, 0, blksz);
541         blkp = (uint32_t *)blk + 1;
542         *blkp = htobe32(crp->crp_aad_length * 8);
543         blkp = (uint32_t *)blk + 3;
544         *blkp = htobe32(crp->crp_payload_length * 8);
545         exf->update(ctx, blk, blksz);
546
547         /* Finalize MAC */
548         exf->final(s.tag, ctx);
549
550         /* Validate tag */
551         error = 0;
552         if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
553                 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
554                     s.tag2);
555                 r = timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen);
556                 if (r != 0) {
557                         error = EBADMSG;
558                         goto out;
559                 }
560
561                 /* tag matches, decrypt data */
562                 crypto_cursor_init(&cc_in, &crp->crp_buf);
563                 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
564
565                 inlen = 0;
566                 for (resid = crp->crp_payload_length; resid > blksz;
567                      resid -= todo) {
568                         if (inlen == 0)
569                                 inblk = crypto_cursor_segment(&cc_in, &inlen);
570                         if (outlen == 0)
571                                 outblk = crypto_cursor_segment(&cc_out, &outlen);
572                         if (inlen < blksz) {
573                                 crypto_cursor_copydata(&cc_in, blksz, blk);
574                                 inblk = blk;
575                                 inlen = blksz;
576                         }
577                         if (outlen < blksz) {
578                                 outblk = blk;
579                                 outlen = blksz;
580                         }
581
582                         todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
583                             blksz);
584
585                         exf->decrypt_multi(ctx, inblk, outblk, todo);
586
587                         if (inblk == blk) {
588                                 inblk = crypto_cursor_segment(&cc_in, &inlen);
589                         } else {
590                                 crypto_cursor_advance(&cc_in, todo);
591                                 inlen -= todo;
592                                 inblk += todo;
593                         }
594
595                         if (outblk == blk) {
596                                 crypto_cursor_copyback(&cc_out, blksz, blk);
597                                 outblk = crypto_cursor_segment(&cc_out,
598                                     &outlen);
599                         } else {
600                                 crypto_cursor_advance(&cc_out, todo);
601                                 outlen -= todo;
602                                 outblk += todo;
603                         }
604                 }
605                 if (resid > 0) {
606                         crypto_cursor_copydata(&cc_in, resid, blk);
607                         exf->decrypt_last(ctx, blk, blk, resid);
608                         crypto_cursor_copyback(&cc_out, resid, blk);
609                 }
610         } else {
611                 /* Inject the authentication data */
612                 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
613                     s.tag);
614         }
615
616 out:
617         explicit_bzero(ctx, exf->ctxsize);
618         explicit_bzero(&s, sizeof(s));
619
620         return (error);
621 }
622
623 static void
624 build_ccm_b0(const char *nonce, u_int nonce_length, u_int aad_length,
625     u_int data_length, u_int tag_length, uint8_t *b0)
626 {
627         uint8_t *bp;
628         uint8_t flags, L;
629
630         KASSERT(nonce_length >= 7 && nonce_length <= 13,
631             ("nonce_length must be between 7 and 13 bytes"));
632
633         /*
634          * Need to determine the L field value.  This is the number of
635          * bytes needed to specify the length of the message; the length
636          * is whatever is left in the 16 bytes after specifying flags and
637          * the nonce.
638          */
639         L = 15 - nonce_length;
640
641         flags = ((aad_length > 0) << 6) +
642             (((tag_length - 2) / 2) << 3) +
643             L - 1;
644
645         /*
646          * Now we need to set up the first block, which has flags, nonce,
647          * and the message length.
648          */
649         b0[0] = flags;
650         memcpy(b0 + 1, nonce, nonce_length);
651         bp = b0 + 1 + nonce_length;
652
653         /* Need to copy L' [aka L-1] bytes of data_length */
654         for (uint8_t *dst = b0 + CCM_CBC_BLOCK_LEN - 1; dst >= bp; dst--) {
655                 *dst = data_length;
656                 data_length >>= 8;
657         }
658 }
659
660 /* NB: OCF only supports AAD lengths < 2^32. */
661 static int
662 build_ccm_aad_length(u_int aad_length, uint8_t *blk)
663 {
664         if (aad_length < ((1 << 16) - (1 << 8))) {
665                 be16enc(blk, aad_length);
666                 return (sizeof(uint16_t));
667         } else {
668                 blk[0] = 0xff;
669                 blk[1] = 0xfe;
670                 be32enc(blk + 2, aad_length);
671                 return (2 + sizeof(uint32_t));
672         }
673 }
674
675 static int
676 swcr_ccm_cbc_mac(const struct swcr_session *ses, struct cryptop *crp)
677 {
678         struct {
679                 union authctx ctx;
680                 u_char blk[CCM_CBC_BLOCK_LEN];
681                 u_char tag[AES_CBC_MAC_HASH_LEN];
682                 u_char tag2[AES_CBC_MAC_HASH_LEN];
683         } s;
684         const struct crypto_session_params *csp;
685         const struct swcr_auth *swa;
686         const struct auth_hash *axf;
687         int error, ivlen, len;
688
689         csp = crypto_get_params(crp->crp_session);
690         swa = &ses->swcr_auth;
691         axf = swa->sw_axf;
692
693         if (crp->crp_auth_key != NULL) {
694                 axf->Init(&s.ctx);
695                 axf->Setkey(&s.ctx, crp->crp_auth_key, csp->csp_auth_klen);
696         } else
697                 memcpy(&s.ctx, swa->sw_ictx, axf->ctxsize);
698
699         /* Initialize the IV */
700         ivlen = csp->csp_ivlen;
701
702         /* Supply MAC with IV */
703         axf->Reinit(&s.ctx, crp->crp_iv, ivlen);
704
705         /* Supply MAC with b0. */
706         build_ccm_b0(crp->crp_iv, ivlen, crp->crp_payload_length, 0,
707             swa->sw_mlen, s.blk);
708         axf->Update(&s.ctx, s.blk, CCM_CBC_BLOCK_LEN);
709
710         len = build_ccm_aad_length(crp->crp_payload_length, s.blk);
711         axf->Update(&s.ctx, s.blk, len);
712
713         crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
714             axf->Update, &s.ctx);
715
716         /* Finalize MAC */
717         axf->Final(s.tag, &s.ctx);
718
719         error = 0;
720         if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) {
721                 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
722                     s.tag2);
723                 if (timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen) != 0)
724                         error = EBADMSG;
725         } else {
726                 /* Inject the authentication data */
727                 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
728                     s.tag);
729         }
730         explicit_bzero(&s, sizeof(s));
731         return (error);
732 }
733
734 static int
735 swcr_ccm(const struct swcr_session *ses, struct cryptop *crp)
736 {
737         const struct crypto_session_params *csp;
738         struct {
739                 uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))];
740                 u_char tag[AES_CBC_MAC_HASH_LEN];
741                 u_char tag2[AES_CBC_MAC_HASH_LEN];
742         } s;
743         u_char *blk = (u_char *)s.blkbuf;
744         struct crypto_buffer_cursor cc_in, cc_out;
745         const u_char *inblk;
746         u_char *outblk;
747         size_t inlen, outlen, todo;
748         const struct swcr_auth *swa;
749         const struct swcr_encdec *swe;
750         const struct enc_xform *exf;
751         void *ctx;
752         size_t len;
753         int blksz, error, ivlen, r, resid;
754
755         csp = crypto_get_params(crp->crp_session);
756         swa = &ses->swcr_auth;
757         swe = &ses->swcr_encdec;
758         exf = swe->sw_exf;
759         blksz = AES_BLOCK_LEN;
760         KASSERT(blksz == exf->native_blocksize,
761             ("%s: blocksize mismatch", __func__));
762
763         if (crp->crp_payload_length > ccm_max_payload_length(csp))
764                 return (EMSGSIZE);
765
766         if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
767                 return (EINVAL);
768
769         ivlen = csp->csp_ivlen;
770
771         ctx = __builtin_alloca(exf->ctxsize);
772         if (crp->crp_cipher_key != NULL)
773                 exf->setkey(ctx, crp->crp_cipher_key,
774                     crypto_get_params(crp->crp_session)->csp_cipher_klen);
775         else
776                 memcpy(ctx, swe->sw_ctx, exf->ctxsize);
777         exf->reinit(ctx, crp->crp_iv, ivlen);
778
779         /* Supply MAC with b0. */
780         _Static_assert(sizeof(s.blkbuf) >= CCM_CBC_BLOCK_LEN,
781             "blkbuf too small for b0");
782         build_ccm_b0(crp->crp_iv, ivlen, crp->crp_aad_length,
783             crp->crp_payload_length, swa->sw_mlen, blk);
784         exf->update(ctx, blk, CCM_CBC_BLOCK_LEN);
785
786         /* Supply MAC with AAD */
787         if (crp->crp_aad_length != 0) {
788                 len = build_ccm_aad_length(crp->crp_aad_length, blk);
789                 exf->update(ctx, blk, len);
790                 if (crp->crp_aad != NULL)
791                         exf->update(ctx, crp->crp_aad, crp->crp_aad_length);
792                 else
793                         crypto_apply(crp, crp->crp_aad_start,
794                             crp->crp_aad_length, exf->update, ctx);
795
796                 /* Pad the AAD (including length field) to a full block. */
797                 len = (len + crp->crp_aad_length) % CCM_CBC_BLOCK_LEN;
798                 if (len != 0) {
799                         len = CCM_CBC_BLOCK_LEN - len;
800                         memset(blk, 0, CCM_CBC_BLOCK_LEN);
801                         exf->update(ctx, blk, len);
802                 }
803         }
804
805         /* Do encryption/decryption with MAC */
806         crypto_cursor_init(&cc_in, &crp->crp_buf);
807         crypto_cursor_advance(&cc_in, crp->crp_payload_start);
808         if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
809                 crypto_cursor_init(&cc_out, &crp->crp_obuf);
810                 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
811         } else
812                 cc_out = cc_in;
813
814         inlen = outlen = 0;
815         for (resid = crp->crp_payload_length; resid >= blksz; resid -= todo) {
816                 if (inlen == 0)
817                         inblk = crypto_cursor_segment(&cc_in, &inlen);
818                 if (outlen == 0)
819                         outblk = crypto_cursor_segment(&cc_out, &outlen);
820
821                 if (inlen < blksz) {
822                         crypto_cursor_copydata(&cc_in, blksz, blk);
823                         inblk = blk;
824                         inlen = blksz;
825                 }
826
827                 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
828                         if (outlen < blksz) {
829                                 outblk = blk;
830                                 outlen = blksz;
831                         }
832
833                         todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
834                             blksz);
835
836                         exf->update(ctx, inblk, todo);
837                         exf->encrypt_multi(ctx, inblk, outblk, todo);
838
839                         if (outblk == blk) {
840                                 crypto_cursor_copyback(&cc_out, blksz, blk);
841                                 outblk = crypto_cursor_segment(&cc_out, &outlen);
842                         } else {
843                                 crypto_cursor_advance(&cc_out, todo);
844                                 outlen -= todo;
845                                 outblk += todo;
846                         }
847                 } else {
848                         /*
849                          * One of the problems with CCM+CBC is that
850                          * the authentication is done on the
851                          * unencrypted data.  As a result, we have to
852                          * decrypt the data twice: once to generate
853                          * the tag and a second time after the tag is
854                          * verified.
855                          */
856                         todo = blksz;
857                         exf->decrypt(ctx, inblk, blk);
858                         exf->update(ctx, blk, todo);
859                 }
860
861                 if (inblk == blk) {
862                         inblk = crypto_cursor_segment(&cc_in, &inlen);
863                 } else {
864                         crypto_cursor_advance(&cc_in, todo);
865                         inlen -= todo;
866                         inblk += todo;
867                 }
868         }
869         if (resid > 0) {
870                 crypto_cursor_copydata(&cc_in, resid, blk);
871                 if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
872                         exf->update(ctx, blk, resid);
873                         exf->encrypt_last(ctx, blk, blk, resid);
874                         crypto_cursor_copyback(&cc_out, resid, blk);
875                 } else {
876                         exf->decrypt_last(ctx, blk, blk, resid);
877                         exf->update(ctx, blk, resid);
878                 }
879         }
880
881         /* Finalize MAC */
882         exf->final(s.tag, ctx);
883
884         /* Validate tag */
885         error = 0;
886         if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
887                 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
888                     s.tag2);
889                 r = timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen);
890                 if (r != 0) {
891                         error = EBADMSG;
892                         goto out;
893                 }
894
895                 /* tag matches, decrypt data */
896                 exf->reinit(ctx, crp->crp_iv, ivlen);
897                 crypto_cursor_init(&cc_in, &crp->crp_buf);
898                 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
899
900                 inlen = 0;
901                 for (resid = crp->crp_payload_length; resid >= blksz;
902                      resid -= todo) {
903                         if (inlen == 0)
904                                 inblk = crypto_cursor_segment(&cc_in, &inlen);
905                         if (outlen == 0)
906                                 outblk = crypto_cursor_segment(&cc_out,
907                                     &outlen);
908
909                         if (inlen < blksz) {
910                                 crypto_cursor_copydata(&cc_in, blksz, blk);
911                                 inblk = blk;
912                                 inlen = blksz;
913                         }
914                         if (outlen < blksz) {
915                                 outblk = blk;
916                                 outlen = blksz;
917                         }
918
919                         todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
920                             blksz);
921
922                         exf->decrypt_multi(ctx, inblk, outblk, todo);
923
924                         if (inblk == blk) {
925                                 inblk = crypto_cursor_segment(&cc_in, &inlen);
926                         } else {
927                                 crypto_cursor_advance(&cc_in, todo);
928                                 inlen -= todo;
929                                 inblk += todo;
930                         }
931
932                         if (outblk == blk) {
933                                 crypto_cursor_copyback(&cc_out, blksz, blk);
934                                 outblk = crypto_cursor_segment(&cc_out,
935                                     &outlen);
936                         } else {
937                                 crypto_cursor_advance(&cc_out, todo);
938                                 outlen -= todo;
939                                 outblk += todo;
940                         }
941                 }
942                 if (resid > 0) {
943                         crypto_cursor_copydata(&cc_in, resid, blk);
944                         exf->decrypt_last(ctx, blk, blk, resid);
945                         crypto_cursor_copyback(&cc_out, resid, blk);
946                 }
947         } else {
948                 /* Inject the authentication data */
949                 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
950                     s.tag);
951         }
952
953 out:
954         explicit_bzero(ctx, exf->ctxsize);
955         explicit_bzero(&s, sizeof(s));
956         return (error);
957 }
958
959 static int
960 swcr_chacha20_poly1305(const struct swcr_session *ses, struct cryptop *crp)
961 {
962         const struct crypto_session_params *csp;
963         struct {
964                 uint64_t blkbuf[howmany(CHACHA20_NATIVE_BLOCK_LEN, sizeof(uint64_t))];
965                 u_char tag[POLY1305_HASH_LEN];
966                 u_char tag2[POLY1305_HASH_LEN];
967         } s;
968         u_char *blk = (u_char *)s.blkbuf;
969         struct crypto_buffer_cursor cc_in, cc_out;
970         const u_char *inblk;
971         u_char *outblk;
972         size_t inlen, outlen, todo;
973         uint64_t *blkp;
974         const struct swcr_auth *swa;
975         const struct swcr_encdec *swe;
976         const struct enc_xform *exf;
977         void *ctx;
978         int blksz, error, r, resid;
979
980         swa = &ses->swcr_auth;
981         swe = &ses->swcr_encdec;
982         exf = swe->sw_exf;
983         blksz = exf->native_blocksize;
984         KASSERT(blksz <= sizeof(s.blkbuf), ("%s: blocksize mismatch", __func__));
985
986         if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
987                 return (EINVAL);
988
989         csp = crypto_get_params(crp->crp_session);
990
991         ctx = __builtin_alloca(exf->ctxsize);
992         if (crp->crp_cipher_key != NULL)
993                 exf->setkey(ctx, crp->crp_cipher_key,
994                     csp->csp_cipher_klen);
995         else
996                 memcpy(ctx, swe->sw_ctx, exf->ctxsize);
997         exf->reinit(ctx, crp->crp_iv, csp->csp_ivlen);
998
999         /* Supply MAC with AAD */
1000         if (crp->crp_aad != NULL)
1001                 exf->update(ctx, crp->crp_aad, crp->crp_aad_length);
1002         else
1003                 crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
1004                     exf->update, ctx);
1005         if (crp->crp_aad_length % POLY1305_BLOCK_LEN != 0) {
1006                 /* padding1 */
1007                 memset(blk, 0, POLY1305_BLOCK_LEN);
1008                 exf->update(ctx, blk, POLY1305_BLOCK_LEN -
1009                     crp->crp_aad_length % POLY1305_BLOCK_LEN);
1010         }
1011
1012         /* Do encryption with MAC */
1013         crypto_cursor_init(&cc_in, &crp->crp_buf);
1014         crypto_cursor_advance(&cc_in, crp->crp_payload_start);
1015         if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
1016                 crypto_cursor_init(&cc_out, &crp->crp_obuf);
1017                 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
1018         } else
1019                 cc_out = cc_in;
1020
1021         inlen = outlen = 0;
1022         if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
1023                 for (resid = crp->crp_payload_length; resid >= blksz;
1024                      resid -= todo) {
1025                         if (inlen == 0)
1026                                 inblk = crypto_cursor_segment(&cc_in, &inlen);
1027                         if (outlen == 0)
1028                                 outblk = crypto_cursor_segment(&cc_out,
1029                                     &outlen);
1030
1031                         if (inlen < blksz) {
1032                                 crypto_cursor_copydata(&cc_in, blksz, blk);
1033                                 inblk = blk;
1034                                 inlen = blksz;
1035                         }
1036
1037                         if (outlen < blksz) {
1038                                 outblk = blk;
1039                                 outlen = blksz;
1040                         }
1041
1042                         todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
1043                             blksz);
1044
1045                         exf->encrypt_multi(ctx, inblk, outblk, todo);
1046                         exf->update(ctx, outblk, todo);
1047
1048                         if (inblk == blk) {
1049                                 inblk = crypto_cursor_segment(&cc_in, &inlen);
1050                         } else {
1051                                 crypto_cursor_advance(&cc_in, todo);
1052                                 inlen -= todo;
1053                                 inblk += todo;
1054                         }
1055
1056                         if (outblk == blk) {
1057                                 crypto_cursor_copyback(&cc_out, blksz, blk);
1058                                 outblk = crypto_cursor_segment(&cc_out, &outlen);
1059                         } else {
1060                                 crypto_cursor_advance(&cc_out, todo);
1061                                 outlen -= todo;
1062                                 outblk += todo;
1063                         }
1064                 }
1065                 if (resid > 0) {
1066                         crypto_cursor_copydata(&cc_in, resid, blk);
1067                         exf->encrypt_last(ctx, blk, blk, resid);
1068                         crypto_cursor_copyback(&cc_out, resid, blk);
1069                         exf->update(ctx, blk, resid);
1070                 }
1071         } else
1072                 crypto_apply(crp, crp->crp_payload_start,
1073                     crp->crp_payload_length, exf->update, ctx);
1074         if (crp->crp_payload_length % POLY1305_BLOCK_LEN != 0) {
1075                 /* padding2 */
1076                 memset(blk, 0, POLY1305_BLOCK_LEN);
1077                 exf->update(ctx, blk, POLY1305_BLOCK_LEN -
1078                     crp->crp_payload_length % POLY1305_BLOCK_LEN);
1079         }
1080
1081         /* lengths */
1082         blkp = (uint64_t *)blk;
1083         blkp[0] = htole64(crp->crp_aad_length);
1084         blkp[1] = htole64(crp->crp_payload_length);
1085         exf->update(ctx, blk, sizeof(uint64_t) * 2);
1086
1087         /* Finalize MAC */
1088         exf->final(s.tag, ctx);
1089
1090         /* Validate tag */
1091         error = 0;
1092         if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
1093                 crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen,
1094                     s.tag2);
1095                 r = timingsafe_bcmp(s.tag, s.tag2, swa->sw_mlen);
1096                 if (r != 0) {
1097                         error = EBADMSG;
1098                         goto out;
1099                 }
1100
1101                 /* tag matches, decrypt data */
1102                 crypto_cursor_init(&cc_in, &crp->crp_buf);
1103                 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
1104
1105                 inlen = 0;
1106                 for (resid = crp->crp_payload_length; resid > blksz;
1107                      resid -= todo) {
1108                         if (inlen == 0)
1109                                 inblk = crypto_cursor_segment(&cc_in, &inlen);
1110                         if (outlen == 0)
1111                                 outblk = crypto_cursor_segment(&cc_out,
1112                                     &outlen);
1113                         if (inlen < blksz) {
1114                                 crypto_cursor_copydata(&cc_in, blksz, blk);
1115                                 inblk = blk;
1116                                 inlen = blksz;
1117                         }
1118                         if (outlen < blksz) {
1119                                 outblk = blk;
1120                                 outlen = blksz;
1121                         }
1122
1123                         todo = rounddown2(MIN(resid, MIN(inlen, outlen)),
1124                             blksz);
1125
1126                         exf->decrypt_multi(ctx, inblk, outblk, todo);
1127
1128                         if (inblk == blk) {
1129                                 inblk = crypto_cursor_segment(&cc_in, &inlen);
1130                         } else {
1131                                 crypto_cursor_advance(&cc_in, todo);
1132                                 inlen -= todo;
1133                                 inblk += todo;
1134                         }
1135
1136                         if (outblk == blk) {
1137                                 crypto_cursor_copyback(&cc_out, blksz, blk);
1138                                 outblk = crypto_cursor_segment(&cc_out,
1139                                     &outlen);
1140                         } else {
1141                                 crypto_cursor_advance(&cc_out, todo);
1142                                 outlen -= todo;
1143                                 outblk += todo;
1144                         }
1145                 }
1146                 if (resid > 0) {
1147                         crypto_cursor_copydata(&cc_in, resid, blk);
1148                         exf->decrypt_last(ctx, blk, blk, resid);
1149                         crypto_cursor_copyback(&cc_out, resid, blk);
1150                 }
1151         } else {
1152                 /* Inject the authentication data */
1153                 crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen,
1154                     s.tag);
1155         }
1156
1157 out:
1158         explicit_bzero(ctx, exf->ctxsize);
1159         explicit_bzero(&s, sizeof(s));
1160         return (error);
1161 }
1162
1163 /*
1164  * Apply a cipher and a digest to perform EtA.
1165  */
1166 static int
1167 swcr_eta(const struct swcr_session *ses, struct cryptop *crp)
1168 {
1169         int error;
1170
1171         if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
1172                 error = swcr_encdec(ses, crp);
1173                 if (error == 0)
1174                         error = swcr_authcompute(ses, crp);
1175         } else {
1176                 error = swcr_authcompute(ses, crp);
1177                 if (error == 0)
1178                         error = swcr_encdec(ses, crp);
1179         }
1180         return (error);
1181 }
1182
1183 /*
1184  * Apply a compression/decompression algorithm
1185  */
1186 static int
1187 swcr_compdec(const struct swcr_session *ses, struct cryptop *crp)
1188 {
1189         const struct comp_algo *cxf;
1190         uint8_t *data, *out;
1191         int adj;
1192         uint32_t result;
1193
1194         cxf = ses->swcr_compdec.sw_cxf;
1195
1196         /* We must handle the whole buffer of data in one time
1197          * then if there is not all the data in the mbuf, we must
1198          * copy in a buffer.
1199          */
1200
1201         data = malloc(crp->crp_payload_length, M_CRYPTO_DATA,  M_NOWAIT);
1202         if (data == NULL)
1203                 return (EINVAL);
1204         crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length,
1205             data);
1206
1207         if (CRYPTO_OP_IS_COMPRESS(crp->crp_op))
1208                 result = cxf->compress(data, crp->crp_payload_length, &out);
1209         else
1210                 result = cxf->decompress(data, crp->crp_payload_length, &out);
1211
1212         free(data, M_CRYPTO_DATA);
1213         if (result == 0)
1214                 return (EINVAL);
1215         crp->crp_olen = result;
1216
1217         /* Check the compressed size when doing compression */
1218         if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) {
1219                 if (result >= crp->crp_payload_length) {
1220                         /* Compression was useless, we lost time */
1221                         free(out, M_CRYPTO_DATA);
1222                         return (0);
1223                 }
1224         }
1225
1226         /* Copy back the (de)compressed data. m_copyback is
1227          * extending the mbuf as necessary.
1228          */
1229         crypto_copyback(crp, crp->crp_payload_start, result, out);
1230         if (result < crp->crp_payload_length) {
1231                 switch (crp->crp_buf.cb_type) {
1232                 case CRYPTO_BUF_MBUF:
1233                 case CRYPTO_BUF_SINGLE_MBUF:
1234                         adj = result - crp->crp_payload_length;
1235                         m_adj(crp->crp_buf.cb_mbuf, adj);
1236                         break;
1237                 case CRYPTO_BUF_UIO: {
1238                         struct uio *uio = crp->crp_buf.cb_uio;
1239                         int ind;
1240
1241                         adj = crp->crp_payload_length - result;
1242                         ind = uio->uio_iovcnt - 1;
1243
1244                         while (adj > 0 && ind >= 0) {
1245                                 if (adj < uio->uio_iov[ind].iov_len) {
1246                                         uio->uio_iov[ind].iov_len -= adj;
1247                                         break;
1248                                 }
1249
1250                                 adj -= uio->uio_iov[ind].iov_len;
1251                                 uio->uio_iov[ind].iov_len = 0;
1252                                 ind--;
1253                                 uio->uio_iovcnt--;
1254                         }
1255                         }
1256                         break;
1257                 case CRYPTO_BUF_VMPAGE:
1258                         adj = crp->crp_payload_length - result;
1259                         crp->crp_buf.cb_vm_page_len -= adj;
1260                         break;
1261                 default:
1262                         break;
1263                 }
1264         }
1265         free(out, M_CRYPTO_DATA);
1266         return 0;
1267 }
1268
1269 static int
1270 swcr_setup_cipher(struct swcr_session *ses,
1271     const struct crypto_session_params *csp)
1272 {
1273         struct swcr_encdec *swe;
1274         const struct enc_xform *txf;
1275         int error;
1276
1277         swe = &ses->swcr_encdec;
1278         txf = crypto_cipher(csp);
1279         if (csp->csp_cipher_key != NULL) {
1280                 if (txf->ctxsize != 0) {
1281                         swe->sw_ctx = malloc(txf->ctxsize, M_CRYPTO_DATA,
1282                             M_NOWAIT);
1283                         if (swe->sw_ctx == NULL)
1284                                 return (ENOMEM);
1285                 }
1286                 error = txf->setkey(swe->sw_ctx,
1287                     csp->csp_cipher_key, csp->csp_cipher_klen);
1288                 if (error)
1289                         return (error);
1290         }
1291         swe->sw_exf = txf;
1292         return (0);
1293 }
1294
1295 static int
1296 swcr_setup_auth(struct swcr_session *ses,
1297     const struct crypto_session_params *csp)
1298 {
1299         struct swcr_auth *swa;
1300         const struct auth_hash *axf;
1301
1302         swa = &ses->swcr_auth;
1303
1304         axf = crypto_auth_hash(csp);
1305         swa->sw_axf = axf;
1306         if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
1307                 return (EINVAL);
1308         if (csp->csp_auth_mlen == 0)
1309                 swa->sw_mlen = axf->hashsize;
1310         else
1311                 swa->sw_mlen = csp->csp_auth_mlen;
1312         if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL) {
1313                 swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1314                     M_NOWAIT);
1315                 if (swa->sw_ictx == NULL)
1316                         return (ENOBUFS);
1317         }
1318
1319         switch (csp->csp_auth_alg) {
1320         case CRYPTO_SHA1_HMAC:
1321         case CRYPTO_SHA2_224_HMAC:
1322         case CRYPTO_SHA2_256_HMAC:
1323         case CRYPTO_SHA2_384_HMAC:
1324         case CRYPTO_SHA2_512_HMAC:
1325         case CRYPTO_RIPEMD160_HMAC:
1326                 swa->sw_hmac = true;
1327                 if (csp->csp_auth_key != NULL) {
1328                         swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1329                             M_NOWAIT);
1330                         if (swa->sw_octx == NULL)
1331                                 return (ENOBUFS);
1332                         hmac_init_ipad(axf, csp->csp_auth_key,
1333                             csp->csp_auth_klen, swa->sw_ictx);
1334                         hmac_init_opad(axf, csp->csp_auth_key,
1335                             csp->csp_auth_klen, swa->sw_octx);
1336                 }
1337                 break;
1338         case CRYPTO_RIPEMD160:
1339         case CRYPTO_SHA1:
1340         case CRYPTO_SHA2_224:
1341         case CRYPTO_SHA2_256:
1342         case CRYPTO_SHA2_384:
1343         case CRYPTO_SHA2_512:
1344         case CRYPTO_NULL_HMAC:
1345                 axf->Init(swa->sw_ictx);
1346                 break;
1347         case CRYPTO_AES_NIST_GMAC:
1348         case CRYPTO_AES_CCM_CBC_MAC:
1349         case CRYPTO_POLY1305:
1350                 if (csp->csp_auth_key != NULL) {
1351                         axf->Init(swa->sw_ictx);
1352                         axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1353                             csp->csp_auth_klen);
1354                 }
1355                 break;
1356         case CRYPTO_BLAKE2B:
1357         case CRYPTO_BLAKE2S:
1358                 /*
1359                  * Blake2b and Blake2s support an optional key but do
1360                  * not require one.
1361                  */
1362                 if (csp->csp_auth_klen == 0)
1363                         axf->Init(swa->sw_ictx);
1364                 else if (csp->csp_auth_key != NULL)
1365                         axf->Setkey(swa->sw_ictx, csp->csp_auth_key,
1366                             csp->csp_auth_klen);
1367                 break;
1368         }
1369
1370         if (csp->csp_mode == CSP_MODE_DIGEST) {
1371                 switch (csp->csp_auth_alg) {
1372                 case CRYPTO_AES_NIST_GMAC:
1373                         ses->swcr_process = swcr_gmac;
1374                         break;
1375                 case CRYPTO_AES_CCM_CBC_MAC:
1376                         ses->swcr_process = swcr_ccm_cbc_mac;
1377                         break;
1378                 default:
1379                         ses->swcr_process = swcr_authcompute;
1380                 }
1381         }
1382
1383         return (0);
1384 }
1385
1386 static int
1387 swcr_setup_aead(struct swcr_session *ses,
1388     const struct crypto_session_params *csp)
1389 {
1390         struct swcr_auth *swa;
1391         int error;
1392
1393         error = swcr_setup_cipher(ses, csp);
1394         if (error)
1395                 return (error);
1396
1397         swa = &ses->swcr_auth;
1398         if (csp->csp_auth_mlen == 0)
1399                 swa->sw_mlen = ses->swcr_encdec.sw_exf->macsize;
1400         else
1401                 swa->sw_mlen = csp->csp_auth_mlen;
1402         return (0);
1403 }
1404
1405 static bool
1406 swcr_auth_supported(const struct crypto_session_params *csp)
1407 {
1408         const struct auth_hash *axf;
1409
1410         axf = crypto_auth_hash(csp);
1411         if (axf == NULL)
1412                 return (false);
1413         switch (csp->csp_auth_alg) {
1414         case CRYPTO_SHA1_HMAC:
1415         case CRYPTO_SHA2_224_HMAC:
1416         case CRYPTO_SHA2_256_HMAC:
1417         case CRYPTO_SHA2_384_HMAC:
1418         case CRYPTO_SHA2_512_HMAC:
1419         case CRYPTO_NULL_HMAC:
1420         case CRYPTO_RIPEMD160_HMAC:
1421                 break;
1422         case CRYPTO_AES_NIST_GMAC:
1423                 switch (csp->csp_auth_klen * 8) {
1424                 case 128:
1425                 case 192:
1426                 case 256:
1427                         break;
1428                 default:
1429                         return (false);
1430                 }
1431                 if (csp->csp_auth_key == NULL)
1432                         return (false);
1433                 if (csp->csp_ivlen != AES_GCM_IV_LEN)
1434                         return (false);
1435                 break;
1436         case CRYPTO_POLY1305:
1437                 if (csp->csp_auth_klen != POLY1305_KEY_LEN)
1438                         return (false);
1439                 break;
1440         case CRYPTO_AES_CCM_CBC_MAC:
1441                 switch (csp->csp_auth_klen * 8) {
1442                 case 128:
1443                 case 192:
1444                 case 256:
1445                         break;
1446                 default:
1447                         return (false);
1448                 }
1449                 if (csp->csp_auth_key == NULL)
1450                         return (false);
1451                 break;
1452         }
1453         return (true);
1454 }
1455
1456 static bool
1457 swcr_cipher_supported(const struct crypto_session_params *csp)
1458 {
1459         const struct enc_xform *txf;
1460
1461         txf = crypto_cipher(csp);
1462         if (txf == NULL)
1463                 return (false);
1464         if (csp->csp_cipher_alg != CRYPTO_NULL_CBC &&
1465             txf->ivsize != csp->csp_ivlen)
1466                 return (false);
1467         return (true);
1468 }
1469
1470 #define SUPPORTED_SES (CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD | CSP_F_ESN)
1471
1472 static int
1473 swcr_probesession(device_t dev, const struct crypto_session_params *csp)
1474 {
1475         if ((csp->csp_flags & ~(SUPPORTED_SES)) != 0)
1476                 return (EINVAL);
1477         switch (csp->csp_mode) {
1478         case CSP_MODE_COMPRESS:
1479                 switch (csp->csp_cipher_alg) {
1480                 case CRYPTO_DEFLATE_COMP:
1481                         break;
1482                 default:
1483                         return (EINVAL);
1484                 }
1485                 break;
1486         case CSP_MODE_CIPHER:
1487                 switch (csp->csp_cipher_alg) {
1488                 case CRYPTO_AES_NIST_GCM_16:
1489                 case CRYPTO_AES_CCM_16:
1490                 case CRYPTO_CHACHA20_POLY1305:
1491                 case CRYPTO_XCHACHA20_POLY1305:
1492                         return (EINVAL);
1493                 default:
1494                         if (!swcr_cipher_supported(csp))
1495                                 return (EINVAL);
1496                         break;
1497                 }
1498                 break;
1499         case CSP_MODE_DIGEST:
1500                 if (!swcr_auth_supported(csp))
1501                         return (EINVAL);
1502                 break;
1503         case CSP_MODE_AEAD:
1504                 switch (csp->csp_cipher_alg) {
1505                 case CRYPTO_AES_NIST_GCM_16:
1506                 case CRYPTO_AES_CCM_16:
1507                         switch (csp->csp_cipher_klen * 8) {
1508                         case 128:
1509                         case 192:
1510                         case 256:
1511                                 break;
1512                         default:
1513                                 return (EINVAL);
1514                         }
1515                         break;
1516                 case CRYPTO_CHACHA20_POLY1305:
1517                 case CRYPTO_XCHACHA20_POLY1305:
1518                         break;
1519                 default:
1520                         return (EINVAL);
1521                 }
1522                 break;
1523         case CSP_MODE_ETA:
1524                 /* AEAD algorithms cannot be used for EtA. */
1525                 switch (csp->csp_cipher_alg) {
1526                 case CRYPTO_AES_NIST_GCM_16:
1527                 case CRYPTO_AES_CCM_16:
1528                 case CRYPTO_CHACHA20_POLY1305:
1529                 case CRYPTO_XCHACHA20_POLY1305:
1530                         return (EINVAL);
1531                 }
1532                 switch (csp->csp_auth_alg) {
1533                 case CRYPTO_AES_NIST_GMAC:
1534                 case CRYPTO_AES_CCM_CBC_MAC:
1535                         return (EINVAL);
1536                 }
1537
1538                 if (!swcr_cipher_supported(csp) ||
1539                     !swcr_auth_supported(csp))
1540                         return (EINVAL);
1541                 break;
1542         default:
1543                 return (EINVAL);
1544         }
1545
1546         return (CRYPTODEV_PROBE_SOFTWARE);
1547 }
1548
1549 /*
1550  * Generate a new software session.
1551  */
1552 static int
1553 swcr_newsession(device_t dev, crypto_session_t cses,
1554     const struct crypto_session_params *csp)
1555 {
1556         struct swcr_session *ses;
1557         const struct comp_algo *cxf;
1558         int error;
1559
1560         ses = crypto_get_driver_session(cses);
1561
1562         error = 0;
1563         switch (csp->csp_mode) {
1564         case CSP_MODE_COMPRESS:
1565                 switch (csp->csp_cipher_alg) {
1566                 case CRYPTO_DEFLATE_COMP:
1567                         cxf = &comp_algo_deflate;
1568                         break;
1569 #ifdef INVARIANTS
1570                 default:
1571                         panic("bad compression algo");
1572 #endif
1573                 }
1574                 ses->swcr_compdec.sw_cxf = cxf;
1575                 ses->swcr_process = swcr_compdec;
1576                 break;
1577         case CSP_MODE_CIPHER:
1578                 switch (csp->csp_cipher_alg) {
1579                 case CRYPTO_NULL_CBC:
1580                         ses->swcr_process = swcr_null;
1581                         break;
1582 #ifdef INVARIANTS
1583                 case CRYPTO_AES_NIST_GCM_16:
1584                 case CRYPTO_AES_CCM_16:
1585                 case CRYPTO_CHACHA20_POLY1305:
1586                 case CRYPTO_XCHACHA20_POLY1305:
1587                         panic("bad cipher algo");
1588 #endif
1589                 default:
1590                         error = swcr_setup_cipher(ses, csp);
1591                         if (error == 0)
1592                                 ses->swcr_process = swcr_encdec;
1593                 }
1594                 break;
1595         case CSP_MODE_DIGEST:
1596                 error = swcr_setup_auth(ses, csp);
1597                 break;
1598         case CSP_MODE_AEAD:
1599                 switch (csp->csp_cipher_alg) {
1600                 case CRYPTO_AES_NIST_GCM_16:
1601                         error = swcr_setup_aead(ses, csp);
1602                         if (error == 0)
1603                                 ses->swcr_process = swcr_gcm;
1604                         break;
1605                 case CRYPTO_AES_CCM_16:
1606                         error = swcr_setup_aead(ses, csp);
1607                         if (error == 0)
1608                                 ses->swcr_process = swcr_ccm;
1609                         break;
1610                 case CRYPTO_CHACHA20_POLY1305:
1611                 case CRYPTO_XCHACHA20_POLY1305:
1612                         error = swcr_setup_aead(ses, csp);
1613                         if (error == 0)
1614                                 ses->swcr_process = swcr_chacha20_poly1305;
1615                         break;
1616 #ifdef INVARIANTS
1617                 default:
1618                         panic("bad aead algo");
1619 #endif
1620                 }
1621                 break;
1622         case CSP_MODE_ETA:
1623 #ifdef INVARIANTS
1624                 switch (csp->csp_cipher_alg) {
1625                 case CRYPTO_AES_NIST_GCM_16:
1626                 case CRYPTO_AES_CCM_16:
1627                 case CRYPTO_CHACHA20_POLY1305:
1628                 case CRYPTO_XCHACHA20_POLY1305:
1629                         panic("bad eta cipher algo");
1630                 }
1631                 switch (csp->csp_auth_alg) {
1632                 case CRYPTO_AES_NIST_GMAC:
1633                 case CRYPTO_AES_CCM_CBC_MAC:
1634                         panic("bad eta auth algo");
1635                 }
1636 #endif
1637
1638                 error = swcr_setup_auth(ses, csp);
1639                 if (error)
1640                         break;
1641                 if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) {
1642                         /* Effectively degrade to digest mode. */
1643                         ses->swcr_process = swcr_authcompute;
1644                         break;
1645                 }
1646
1647                 error = swcr_setup_cipher(ses, csp);
1648                 if (error == 0)
1649                         ses->swcr_process = swcr_eta;
1650                 break;
1651         default:
1652                 error = EINVAL;
1653         }
1654
1655         if (error)
1656                 swcr_freesession(dev, cses);
1657         return (error);
1658 }
1659
1660 static void
1661 swcr_freesession(device_t dev, crypto_session_t cses)
1662 {
1663         struct swcr_session *ses;
1664
1665         ses = crypto_get_driver_session(cses);
1666
1667         zfree(ses->swcr_encdec.sw_ctx, M_CRYPTO_DATA);
1668         zfree(ses->swcr_auth.sw_ictx, M_CRYPTO_DATA);
1669         zfree(ses->swcr_auth.sw_octx, M_CRYPTO_DATA);
1670 }
1671
1672 /*
1673  * Process a software request.
1674  */
1675 static int
1676 swcr_process(device_t dev, struct cryptop *crp, int hint)
1677 {
1678         struct swcr_session *ses;
1679
1680         ses = crypto_get_driver_session(crp->crp_session);
1681
1682         crp->crp_etype = ses->swcr_process(ses, crp);
1683
1684         crypto_done(crp);
1685         return (0);
1686 }
1687
1688 static void
1689 swcr_identify(driver_t *drv, device_t parent)
1690 {
1691         /* NB: order 10 is so we get attached after h/w devices */
1692         if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1693             BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1694                 panic("cryptosoft: could not attach");
1695 }
1696
1697 static int
1698 swcr_probe(device_t dev)
1699 {
1700         device_set_desc(dev, "software crypto");
1701         device_quiet(dev);
1702         return (BUS_PROBE_NOWILDCARD);
1703 }
1704
1705 static int
1706 swcr_attach(device_t dev)
1707 {
1708
1709         swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1710                         CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1711         if (swcr_id < 0) {
1712                 device_printf(dev, "cannot initialize!");
1713                 return (ENXIO);
1714         }
1715
1716         return (0);
1717 }
1718
1719 static int
1720 swcr_detach(device_t dev)
1721 {
1722         crypto_unregister_all(swcr_id);
1723         return 0;
1724 }
1725
1726 static device_method_t swcr_methods[] = {
1727         DEVMETHOD(device_identify,      swcr_identify),
1728         DEVMETHOD(device_probe,         swcr_probe),
1729         DEVMETHOD(device_attach,        swcr_attach),
1730         DEVMETHOD(device_detach,        swcr_detach),
1731
1732         DEVMETHOD(cryptodev_probesession, swcr_probesession),
1733         DEVMETHOD(cryptodev_newsession, swcr_newsession),
1734         DEVMETHOD(cryptodev_freesession,swcr_freesession),
1735         DEVMETHOD(cryptodev_process,    swcr_process),
1736
1737         {0, 0},
1738 };
1739
1740 static driver_t swcr_driver = {
1741         "cryptosoft",
1742         swcr_methods,
1743         0,              /* NB: no softc */
1744 };
1745 static devclass_t swcr_devclass;
1746
1747 /*
1748  * NB: We explicitly reference the crypto module so we
1749  * get the necessary ordering when built as a loadable
1750  * module.  This is required because we bundle the crypto
1751  * module code together with the cryptosoft driver (otherwise
1752  * normal module dependencies would handle things).
1753  */
1754 extern int crypto_modevent(struct module *, int, void *);
1755 /* XXX where to attach */
1756 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1757 MODULE_VERSION(cryptosoft, 1);
1758 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);