2 * SPDX-License-Identifier: BSD-2-Clause
4 * Copyright (c) 2019 Netflix Inc.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28 #include <sys/cdefs.h>
29 __FBSDID("$FreeBSD$");
31 #include <sys/param.h>
32 #include <sys/systm.h>
33 #include <sys/counter.h>
34 #include <sys/endian.h>
35 #include <sys/kernel.h>
38 #include <sys/malloc.h>
40 #include <sys/module.h>
41 #include <sys/mutex.h>
42 #include <sys/sysctl.h>
46 #include <vm/vm_param.h>
47 #include <opencrypto/cryptodev.h>
48 #include <opencrypto/ktls.h>
50 struct ktls_ocf_session {
52 crypto_session_t mac_sid;
57 /* Only used for TLS 1.0 with the implicit IV. */
62 char iv[AES_BLOCK_LEN];
65 struct ocf_operation {
66 struct ktls_ocf_session *os;
70 static MALLOC_DEFINE(M_KTLS_OCF, "ktls_ocf", "OCF KTLS");
72 SYSCTL_DECL(_kern_ipc_tls);
73 SYSCTL_DECL(_kern_ipc_tls_stats);
75 static SYSCTL_NODE(_kern_ipc_tls_stats, OID_AUTO, ocf,
76 CTLFLAG_RD | CTLFLAG_MPSAFE, 0,
77 "Kernel TLS offload via OCF stats");
79 static COUNTER_U64_DEFINE_EARLY(ocf_tls10_cbc_encrypts);
80 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls10_cbc_encrypts,
81 CTLFLAG_RD, &ocf_tls10_cbc_encrypts,
82 "Total number of OCF TLS 1.0 CBC encryption operations");
84 static COUNTER_U64_DEFINE_EARLY(ocf_tls11_cbc_encrypts);
85 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls11_cbc_encrypts,
86 CTLFLAG_RD, &ocf_tls11_cbc_encrypts,
87 "Total number of OCF TLS 1.1/1.2 CBC encryption operations");
89 static COUNTER_U64_DEFINE_EARLY(ocf_tls12_gcm_decrypts);
90 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls12_gcm_decrypts,
91 CTLFLAG_RD, &ocf_tls12_gcm_decrypts,
92 "Total number of OCF TLS 1.2 GCM decryption operations");
94 static COUNTER_U64_DEFINE_EARLY(ocf_tls12_gcm_encrypts);
95 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls12_gcm_encrypts,
96 CTLFLAG_RD, &ocf_tls12_gcm_encrypts,
97 "Total number of OCF TLS 1.2 GCM encryption operations");
99 static COUNTER_U64_DEFINE_EARLY(ocf_tls12_chacha20_decrypts);
100 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls12_chacha20_decrypts,
101 CTLFLAG_RD, &ocf_tls12_chacha20_decrypts,
102 "Total number of OCF TLS 1.2 Chacha20-Poly1305 decryption operations");
104 static COUNTER_U64_DEFINE_EARLY(ocf_tls12_chacha20_encrypts);
105 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls12_chacha20_encrypts,
106 CTLFLAG_RD, &ocf_tls12_chacha20_encrypts,
107 "Total number of OCF TLS 1.2 Chacha20-Poly1305 encryption operations");
109 static COUNTER_U64_DEFINE_EARLY(ocf_tls13_gcm_decrypts);
110 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls13_gcm_decrypts,
111 CTLFLAG_RD, &ocf_tls13_gcm_decrypts,
112 "Total number of OCF TLS 1.3 GCM decryption operations");
114 static COUNTER_U64_DEFINE_EARLY(ocf_tls13_gcm_encrypts);
115 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls13_gcm_encrypts,
116 CTLFLAG_RD, &ocf_tls13_gcm_encrypts,
117 "Total number of OCF TLS 1.3 GCM encryption operations");
119 static COUNTER_U64_DEFINE_EARLY(ocf_tls13_chacha20_decrypts);
120 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls13_chacha20_decrypts,
121 CTLFLAG_RD, &ocf_tls13_chacha20_decrypts,
122 "Total number of OCF TLS 1.3 Chacha20-Poly1305 decryption operations");
124 static COUNTER_U64_DEFINE_EARLY(ocf_tls13_chacha20_encrypts);
125 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls13_chacha20_encrypts,
126 CTLFLAG_RD, &ocf_tls13_chacha20_encrypts,
127 "Total number of OCF TLS 1.3 Chacha20-Poly1305 encryption operations");
129 static COUNTER_U64_DEFINE_EARLY(ocf_inplace);
130 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, inplace,
131 CTLFLAG_RD, &ocf_inplace,
132 "Total number of OCF in-place operations");
134 static COUNTER_U64_DEFINE_EARLY(ocf_separate_output);
135 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, separate_output,
136 CTLFLAG_RD, &ocf_separate_output,
137 "Total number of OCF operations with a separate output buffer");
139 static COUNTER_U64_DEFINE_EARLY(ocf_retries);
140 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, retries, CTLFLAG_RD,
142 "Number of OCF encryption operation retries");
145 ktls_ocf_callback_sync(struct cryptop *crp __unused)
151 ktls_ocf_callback_async(struct cryptop *crp)
153 struct ocf_operation *oo;
155 oo = crp->crp_opaque;
156 mtx_lock(&oo->os->lock);
158 mtx_unlock(&oo->os->lock);
164 ktls_ocf_dispatch(struct ktls_ocf_session *os, struct cryptop *crp)
166 struct ocf_operation oo;
173 crp->crp_opaque = &oo;
175 async = !CRYPTO_SESS_SYNC(crp->crp_session);
176 crp->crp_callback = async ? ktls_ocf_callback_async :
177 ktls_ocf_callback_sync;
179 error = crypto_dispatch(crp);
185 mtx_sleep(&oo, &os->lock, 0, "ocfktls", 0);
186 mtx_unlock(&os->lock);
189 if (crp->crp_etype != EAGAIN) {
190 error = crp->crp_etype;
195 crp->crp_flags &= ~CRYPTO_F_DONE;
197 counter_u64_add(ocf_retries, 1);
203 ktls_ocf_dispatch_async_cb(struct cryptop *crp)
205 struct ktls_ocf_encrypt_state *state;
208 state = crp->crp_opaque;
209 if (crp->crp_etype == EAGAIN) {
211 crp->crp_flags &= ~CRYPTO_F_DONE;
212 counter_u64_add(ocf_retries, 1);
213 error = crypto_dispatch(crp);
215 crypto_destroyreq(crp);
216 ktls_encrypt_cb(state, error);
221 error = crp->crp_etype;
222 crypto_destroyreq(crp);
223 ktls_encrypt_cb(state, error);
228 ktls_ocf_dispatch_async(struct ktls_ocf_encrypt_state *state,
233 crp->crp_opaque = state;
234 crp->crp_callback = ktls_ocf_dispatch_async_cb;
235 error = crypto_dispatch(crp);
237 crypto_destroyreq(crp);
242 ktls_ocf_tls_cbc_encrypt(struct ktls_ocf_encrypt_state *state,
243 struct ktls_session *tls, struct mbuf *m, struct iovec *outiov,
246 const struct tls_record_layer *hdr;
248 struct tls_mac_data *ad;
250 struct ktls_ocf_session *os;
251 struct iovec iov[m->m_epg_npgs + 2];
254 uint16_t tls_comp_len;
257 MPASS(outiovcnt + 1 <= nitems(iov));
259 os = tls->ocf_session;
260 hdr = (const struct tls_record_layer *)m->m_epg_hdr;
263 MPASS(tls->sync_dispatch);
266 if (os->implicit_iv) {
268 KASSERT(!os->in_progress,
269 ("concurrent implicit IV encryptions"));
270 if (os->next_seqno != m->m_epg_seqno) {
271 printf("KTLS CBC: TLS records out of order. "
272 "Expected %ju, got %ju\n",
273 (uintmax_t)os->next_seqno,
274 (uintmax_t)m->m_epg_seqno);
275 mtx_unlock(&os->lock);
278 os->in_progress = true;
279 mtx_unlock(&os->lock);
283 /* Payload length. */
284 tls_comp_len = m->m_len - (m->m_epg_hdrlen + m->m_epg_trllen);
286 /* Initialize the AAD. */
288 ad->seq = htobe64(m->m_epg_seqno);
289 ad->type = hdr->tls_type;
290 ad->tls_vmajor = hdr->tls_vmajor;
291 ad->tls_vminor = hdr->tls_vminor;
292 ad->tls_length = htons(tls_comp_len);
294 /* First, compute the MAC. */
295 iov[0].iov_base = ad;
296 iov[0].iov_len = sizeof(*ad);
297 pgoff = m->m_epg_1st_off;
298 for (i = 0; i < m->m_epg_npgs; i++, pgoff = 0) {
299 iov[i + 1].iov_base = (void *)PHYS_TO_DMAP(m->m_epg_pa[i] +
301 iov[i + 1].iov_len = m_epg_pagelen(m, i, pgoff);
303 iov[m->m_epg_npgs + 1].iov_base = m->m_epg_trail;
304 iov[m->m_epg_npgs + 1].iov_len = os->mac_len;
306 uio->uio_iovcnt = m->m_epg_npgs + 2;
308 uio->uio_segflg = UIO_SYSSPACE;
309 uio->uio_td = curthread;
310 uio->uio_resid = sizeof(*ad) + tls_comp_len + os->mac_len;
312 crypto_initreq(crp, os->mac_sid);
313 crp->crp_payload_start = 0;
314 crp->crp_payload_length = sizeof(*ad) + tls_comp_len;
315 crp->crp_digest_start = crp->crp_payload_length;
316 crp->crp_op = CRYPTO_OP_COMPUTE_DIGEST;
317 crp->crp_flags = CRYPTO_F_CBIMM;
318 crypto_use_uio(crp, uio);
319 error = ktls_ocf_dispatch(os, crp);
321 crypto_destroyreq(crp);
324 if (os->implicit_iv) {
326 os->in_progress = false;
327 mtx_unlock(&os->lock);
333 /* Second, add the padding. */
334 pad = m->m_epg_trllen - os->mac_len - 1;
335 for (i = 0; i < pad + 1; i++)
336 m->m_epg_trail[os->mac_len + i] = pad;
338 /* Finally, encrypt the record. */
339 crypto_initreq(crp, os->sid);
340 crp->crp_payload_start = m->m_epg_hdrlen;
341 crp->crp_payload_length = tls_comp_len + m->m_epg_trllen;
342 KASSERT(crp->crp_payload_length % AES_BLOCK_LEN == 0,
343 ("invalid encryption size"));
344 crypto_use_single_mbuf(crp, m);
345 crp->crp_op = CRYPTO_OP_ENCRYPT;
346 crp->crp_flags = CRYPTO_F_CBIMM | CRYPTO_F_IV_SEPARATE;
348 memcpy(crp->crp_iv, os->iv, AES_BLOCK_LEN);
350 memcpy(crp->crp_iv, hdr + 1, AES_BLOCK_LEN);
352 if (outiov != NULL) {
353 uio->uio_iov = outiov;
354 uio->uio_iovcnt = outiovcnt;
356 uio->uio_segflg = UIO_SYSSPACE;
357 uio->uio_td = curthread;
358 uio->uio_resid = crp->crp_payload_length;
359 crypto_use_output_uio(crp, uio);
363 counter_u64_add(ocf_tls10_cbc_encrypts, 1);
365 counter_u64_add(ocf_tls11_cbc_encrypts, 1);
367 counter_u64_add(ocf_separate_output, 1);
369 counter_u64_add(ocf_inplace, 1);
370 error = ktls_ocf_dispatch(os, crp);
372 crypto_destroyreq(crp);
374 if (os->implicit_iv) {
375 KASSERT(os->mac_len + pad + 1 >= AES_BLOCK_LEN,
376 ("trailer too short to read IV"));
377 memcpy(os->iv, m->m_epg_trail + m->m_epg_trllen - AES_BLOCK_LEN,
381 os->next_seqno = m->m_epg_seqno + 1;
382 os->in_progress = false;
383 mtx_unlock(&os->lock);
390 ktls_ocf_tls12_aead_encrypt(struct ktls_ocf_encrypt_state *state,
391 struct ktls_session *tls, struct mbuf *m, struct iovec *outiov,
394 const struct tls_record_layer *hdr;
396 struct tls_aead_data *ad;
398 struct ktls_ocf_session *os;
400 uint16_t tls_comp_len;
402 os = tls->ocf_session;
403 hdr = (const struct tls_record_layer *)m->m_epg_hdr;
407 crypto_initreq(crp, os->sid);
410 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) {
411 memcpy(crp->crp_iv, tls->params.iv, TLS_AEAD_GCM_LEN);
412 memcpy(crp->crp_iv + TLS_AEAD_GCM_LEN, hdr + 1,
416 * Chacha20-Poly1305 constructs the IV for TLS 1.2
417 * identically to constructing the IV for AEAD in TLS
420 memcpy(crp->crp_iv, tls->params.iv, tls->params.iv_len);
421 *(uint64_t *)(crp->crp_iv + 4) ^= htobe64(m->m_epg_seqno);
426 tls_comp_len = m->m_len - (m->m_epg_hdrlen + m->m_epg_trllen);
427 ad->seq = htobe64(m->m_epg_seqno);
428 ad->type = hdr->tls_type;
429 ad->tls_vmajor = hdr->tls_vmajor;
430 ad->tls_vminor = hdr->tls_vminor;
431 ad->tls_length = htons(tls_comp_len);
433 crp->crp_aad_length = sizeof(*ad);
435 /* Set fields for input payload. */
436 crypto_use_single_mbuf(crp, m);
437 crp->crp_payload_start = m->m_epg_hdrlen;
438 crp->crp_payload_length = tls_comp_len;
440 if (outiov != NULL) {
441 crp->crp_digest_start = crp->crp_payload_length;
443 uio->uio_iov = outiov;
444 uio->uio_iovcnt = outiovcnt;
446 uio->uio_segflg = UIO_SYSSPACE;
447 uio->uio_td = curthread;
448 uio->uio_resid = crp->crp_payload_length + tls->params.tls_tlen;
449 crypto_use_output_uio(crp, uio);
451 crp->crp_digest_start = crp->crp_payload_start +
452 crp->crp_payload_length;
454 crp->crp_op = CRYPTO_OP_ENCRYPT | CRYPTO_OP_COMPUTE_DIGEST;
455 crp->crp_flags = CRYPTO_F_CBIMM | CRYPTO_F_IV_SEPARATE;
456 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
457 counter_u64_add(ocf_tls12_gcm_encrypts, 1);
459 counter_u64_add(ocf_tls12_chacha20_encrypts, 1);
461 counter_u64_add(ocf_separate_output, 1);
463 counter_u64_add(ocf_inplace, 1);
464 if (tls->sync_dispatch) {
465 error = ktls_ocf_dispatch(os, crp);
466 crypto_destroyreq(crp);
468 error = ktls_ocf_dispatch_async(state, crp);
473 ktls_ocf_tls12_aead_decrypt(struct ktls_session *tls,
474 const struct tls_record_layer *hdr, struct mbuf *m, uint64_t seqno,
477 struct tls_aead_data ad;
479 struct ktls_ocf_session *os;
481 uint16_t tls_comp_len;
483 os = tls->ocf_session;
485 crypto_initreq(&crp, os->sid);
488 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) {
489 memcpy(crp.crp_iv, tls->params.iv, TLS_AEAD_GCM_LEN);
490 memcpy(crp.crp_iv + TLS_AEAD_GCM_LEN, hdr + 1,
494 * Chacha20-Poly1305 constructs the IV for TLS 1.2
495 * identically to constructing the IV for AEAD in TLS
498 memcpy(crp.crp_iv, tls->params.iv, tls->params.iv_len);
499 *(uint64_t *)(crp.crp_iv + 4) ^= htobe64(seqno);
503 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
504 tls_comp_len = ntohs(hdr->tls_length) -
505 (AES_GMAC_HASH_LEN + sizeof(uint64_t));
507 tls_comp_len = ntohs(hdr->tls_length) - POLY1305_HASH_LEN;
508 ad.seq = htobe64(seqno);
509 ad.type = hdr->tls_type;
510 ad.tls_vmajor = hdr->tls_vmajor;
511 ad.tls_vminor = hdr->tls_vminor;
512 ad.tls_length = htons(tls_comp_len);
514 crp.crp_aad_length = sizeof(ad);
516 crp.crp_payload_start = tls->params.tls_hlen;
517 crp.crp_payload_length = tls_comp_len;
518 crp.crp_digest_start = crp.crp_payload_start + crp.crp_payload_length;
520 crp.crp_op = CRYPTO_OP_DECRYPT | CRYPTO_OP_VERIFY_DIGEST;
521 crp.crp_flags = CRYPTO_F_CBIMM | CRYPTO_F_IV_SEPARATE;
522 crypto_use_mbuf(&crp, m);
524 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
525 counter_u64_add(ocf_tls12_gcm_decrypts, 1);
527 counter_u64_add(ocf_tls12_chacha20_decrypts, 1);
528 error = ktls_ocf_dispatch(os, &crp);
530 crypto_destroyreq(&crp);
531 *trailer_len = tls->params.tls_tlen;
536 ktls_ocf_tls13_aead_encrypt(struct ktls_ocf_encrypt_state *state,
537 struct ktls_session *tls, struct mbuf *m, struct iovec *outiov,
540 const struct tls_record_layer *hdr;
542 struct tls_aead_data_13 *ad;
544 struct ktls_ocf_session *os;
548 os = tls->ocf_session;
549 hdr = (const struct tls_record_layer *)m->m_epg_hdr;
553 crypto_initreq(crp, os->sid);
555 /* Setup the nonce. */
556 memcpy(nonce, tls->params.iv, tls->params.iv_len);
557 *(uint64_t *)(nonce + 4) ^= htobe64(m->m_epg_seqno);
561 ad->type = hdr->tls_type;
562 ad->tls_vmajor = hdr->tls_vmajor;
563 ad->tls_vminor = hdr->tls_vminor;
564 ad->tls_length = hdr->tls_length;
566 crp->crp_aad_length = sizeof(*ad);
568 /* Set fields for input payload. */
569 crypto_use_single_mbuf(crp, m);
570 crp->crp_payload_start = m->m_epg_hdrlen;
571 crp->crp_payload_length = m->m_len -
572 (m->m_epg_hdrlen + m->m_epg_trllen);
574 /* Store the record type as the first byte of the trailer. */
575 m->m_epg_trail[0] = m->m_epg_record_type;
576 crp->crp_payload_length++;
578 if (outiov != NULL) {
579 crp->crp_digest_start = crp->crp_payload_length;
581 uio->uio_iov = outiov;
582 uio->uio_iovcnt = outiovcnt;
584 uio->uio_segflg = UIO_SYSSPACE;
585 uio->uio_td = curthread;
586 uio->uio_resid = m->m_len - m->m_epg_hdrlen;
587 crypto_use_output_uio(crp, uio);
589 crp->crp_digest_start = crp->crp_payload_start +
590 crp->crp_payload_length;
592 crp->crp_op = CRYPTO_OP_ENCRYPT | CRYPTO_OP_COMPUTE_DIGEST;
593 crp->crp_flags = CRYPTO_F_CBIMM | CRYPTO_F_IV_SEPARATE;
595 memcpy(crp->crp_iv, nonce, sizeof(nonce));
597 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
598 counter_u64_add(ocf_tls13_gcm_encrypts, 1);
600 counter_u64_add(ocf_tls13_chacha20_encrypts, 1);
602 counter_u64_add(ocf_separate_output, 1);
604 counter_u64_add(ocf_inplace, 1);
605 if (tls->sync_dispatch) {
606 error = ktls_ocf_dispatch(os, crp);
607 crypto_destroyreq(crp);
609 error = ktls_ocf_dispatch_async(state, crp);
614 ktls_ocf_tls13_aead_decrypt(struct ktls_session *tls,
615 const struct tls_record_layer *hdr, struct mbuf *m, uint64_t seqno,
618 struct tls_aead_data_13 ad;
620 struct ktls_ocf_session *os;
624 os = tls->ocf_session;
626 tag_len = tls->params.tls_tlen - 1;
628 /* Payload must contain at least one byte for the record type. */
629 if (ntohs(hdr->tls_length) < tag_len + 1)
632 crypto_initreq(&crp, os->sid);
634 /* Setup the nonce. */
635 memcpy(crp.crp_iv, tls->params.iv, tls->params.iv_len);
636 *(uint64_t *)(crp.crp_iv + 4) ^= htobe64(seqno);
639 ad.type = hdr->tls_type;
640 ad.tls_vmajor = hdr->tls_vmajor;
641 ad.tls_vminor = hdr->tls_vminor;
642 ad.tls_length = hdr->tls_length;
644 crp.crp_aad_length = sizeof(ad);
646 crp.crp_payload_start = tls->params.tls_hlen;
647 crp.crp_payload_length = ntohs(hdr->tls_length) - tag_len;
648 crp.crp_digest_start = crp.crp_payload_start + crp.crp_payload_length;
650 crp.crp_op = CRYPTO_OP_DECRYPT | CRYPTO_OP_VERIFY_DIGEST;
651 crp.crp_flags = CRYPTO_F_CBIMM | CRYPTO_F_IV_SEPARATE;
652 crypto_use_mbuf(&crp, m);
654 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
655 counter_u64_add(ocf_tls13_gcm_decrypts, 1);
657 counter_u64_add(ocf_tls13_chacha20_decrypts, 1);
658 error = ktls_ocf_dispatch(os, &crp);
660 crypto_destroyreq(&crp);
661 *trailer_len = tag_len;
666 ktls_ocf_free(struct ktls_session *tls)
668 struct ktls_ocf_session *os;
670 os = tls->ocf_session;
671 crypto_freesession(os->sid);
672 mtx_destroy(&os->lock);
673 zfree(os, M_KTLS_OCF);
677 ktls_ocf_try(struct socket *so, struct ktls_session *tls, int direction)
679 struct crypto_session_params csp, mac_csp;
680 struct ktls_ocf_session *os;
683 memset(&csp, 0, sizeof(csp));
684 memset(&mac_csp, 0, sizeof(mac_csp));
685 mac_csp.csp_mode = CSP_MODE_NONE;
688 switch (tls->params.cipher_algorithm) {
689 case CRYPTO_AES_NIST_GCM_16:
690 switch (tls->params.cipher_key_len) {
698 /* Only TLS 1.2 and 1.3 are supported. */
699 if (tls->params.tls_vmajor != TLS_MAJOR_VER_ONE ||
700 tls->params.tls_vminor < TLS_MINOR_VER_TWO ||
701 tls->params.tls_vminor > TLS_MINOR_VER_THREE)
702 return (EPROTONOSUPPORT);
704 csp.csp_flags |= CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD;
705 csp.csp_mode = CSP_MODE_AEAD;
706 csp.csp_cipher_alg = CRYPTO_AES_NIST_GCM_16;
707 csp.csp_cipher_key = tls->params.cipher_key;
708 csp.csp_cipher_klen = tls->params.cipher_key_len;
709 csp.csp_ivlen = AES_GCM_IV_LEN;
712 switch (tls->params.cipher_key_len) {
720 switch (tls->params.auth_algorithm) {
721 case CRYPTO_SHA1_HMAC:
722 mac_len = SHA1_HASH_LEN;
724 case CRYPTO_SHA2_256_HMAC:
725 mac_len = SHA2_256_HASH_LEN;
727 case CRYPTO_SHA2_384_HMAC:
728 mac_len = SHA2_384_HASH_LEN;
734 /* Only TLS 1.0-1.2 are supported. */
735 if (tls->params.tls_vmajor != TLS_MAJOR_VER_ONE ||
736 tls->params.tls_vminor < TLS_MINOR_VER_ZERO ||
737 tls->params.tls_vminor > TLS_MINOR_VER_TWO)
738 return (EPROTONOSUPPORT);
740 /* AES-CBC is not supported for receive. */
741 if (direction == KTLS_RX)
742 return (EPROTONOSUPPORT);
744 csp.csp_flags |= CSP_F_SEPARATE_OUTPUT;
745 csp.csp_mode = CSP_MODE_CIPHER;
746 csp.csp_cipher_alg = CRYPTO_AES_CBC;
747 csp.csp_cipher_key = tls->params.cipher_key;
748 csp.csp_cipher_klen = tls->params.cipher_key_len;
749 csp.csp_ivlen = AES_BLOCK_LEN;
751 mac_csp.csp_flags |= CSP_F_SEPARATE_OUTPUT;
752 mac_csp.csp_mode = CSP_MODE_DIGEST;
753 mac_csp.csp_auth_alg = tls->params.auth_algorithm;
754 mac_csp.csp_auth_key = tls->params.auth_key;
755 mac_csp.csp_auth_klen = tls->params.auth_key_len;
757 case CRYPTO_CHACHA20_POLY1305:
758 switch (tls->params.cipher_key_len) {
765 /* Only TLS 1.2 and 1.3 are supported. */
766 if (tls->params.tls_vmajor != TLS_MAJOR_VER_ONE ||
767 tls->params.tls_vminor < TLS_MINOR_VER_TWO ||
768 tls->params.tls_vminor > TLS_MINOR_VER_THREE)
769 return (EPROTONOSUPPORT);
771 csp.csp_flags |= CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD;
772 csp.csp_mode = CSP_MODE_AEAD;
773 csp.csp_cipher_alg = CRYPTO_CHACHA20_POLY1305;
774 csp.csp_cipher_key = tls->params.cipher_key;
775 csp.csp_cipher_klen = tls->params.cipher_key_len;
776 csp.csp_ivlen = CHACHA20_POLY1305_IV_LEN;
779 return (EPROTONOSUPPORT);
782 os = malloc(sizeof(*os), M_KTLS_OCF, M_NOWAIT | M_ZERO);
786 error = crypto_newsession(&os->sid, &csp,
787 CRYPTO_FLAG_HARDWARE | CRYPTO_FLAG_SOFTWARE);
789 free(os, M_KTLS_OCF);
793 if (mac_csp.csp_mode != CSP_MODE_NONE) {
794 error = crypto_newsession(&os->mac_sid, &mac_csp,
795 CRYPTO_FLAG_HARDWARE | CRYPTO_FLAG_SOFTWARE);
797 crypto_freesession(os->sid);
798 free(os, M_KTLS_OCF);
801 os->mac_len = mac_len;
804 mtx_init(&os->lock, "ktls_ocf", NULL, MTX_DEF);
805 tls->ocf_session = os;
806 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16 ||
807 tls->params.cipher_algorithm == CRYPTO_CHACHA20_POLY1305) {
808 if (direction == KTLS_TX) {
809 if (tls->params.tls_vminor == TLS_MINOR_VER_THREE)
810 tls->sw_encrypt = ktls_ocf_tls13_aead_encrypt;
812 tls->sw_encrypt = ktls_ocf_tls12_aead_encrypt;
814 if (tls->params.tls_vminor == TLS_MINOR_VER_THREE)
815 tls->sw_decrypt = ktls_ocf_tls13_aead_decrypt;
817 tls->sw_decrypt = ktls_ocf_tls12_aead_decrypt;
820 tls->sw_encrypt = ktls_ocf_tls_cbc_encrypt;
821 if (tls->params.tls_vminor == TLS_MINOR_VER_ZERO) {
822 os->implicit_iv = true;
823 memcpy(os->iv, tls->params.iv, AES_BLOCK_LEN);
825 os->next_seqno = tls->next_seqno;
831 * AES-CBC is always synchronous currently. Asynchronous
832 * operation would require multiple callbacks and an additional
833 * iovec array in ktls_ocf_encrypt_state.
835 tls->sync_dispatch = CRYPTO_SESS_SYNC(os->sid) ||
836 tls->params.cipher_algorithm == CRYPTO_AES_CBC;