2 * SPDX-License-Identifier: BSD-2-Clause
4 * Copyright (c) 2021 Netflix Inc.
5 * Written by: John Baldwin <jhb@FreeBSD.org>
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29 #include <sys/types.h>
30 #include <sys/endian.h>
31 #include <sys/event.h>
33 #include <sys/socket.h>
34 #include <sys/sysctl.h>
35 #include <netinet/in.h>
36 #include <netinet/tcp.h>
37 #include <crypto/cryptodev.h>
46 #include <openssl/err.h>
47 #include <openssl/evp.h>
48 #include <openssl/hmac.h>
57 if (sysctlbyname("kern.ipc.tls.enable", &enable, &len, NULL, 0) == -1) {
59 atf_tc_skip("kernel does not support TLS offload");
60 atf_libc_error(errno, "Failed to read kern.ipc.tls.enable");
64 atf_tc_skip("Kernel TLS is disabled");
67 #define ATF_REQUIRE_KTLS() require_ktls()
72 /* ASCII printable values between 0x20 and 0x7e */
73 return (0x20 + random() % (0x7f - 0x20));
77 alloc_buffer(size_t len)
85 for (i = 0; i < len; i++)
91 socketpair_tcp(int *sv)
94 struct sockaddr_in sin;
98 ls = socket(PF_INET, SOCK_STREAM, 0);
100 warn("socket() for listen");
104 memset(&sin, 0, sizeof(sin));
105 sin.sin_len = sizeof(sin);
106 sin.sin_family = AF_INET;
107 sin.sin_addr.s_addr = htonl(INADDR_LOOPBACK);
108 if (bind(ls, (struct sockaddr *)&sin, sizeof(sin)) == -1) {
114 if (listen(ls, 1) == -1) {
121 if (getsockname(ls, (struct sockaddr *)&sin, &len) == -1) {
127 cs = socket(PF_INET, SOCK_STREAM | SOCK_NONBLOCK, 0);
129 warn("socket() for connect");
134 if (connect(cs, (struct sockaddr *)&sin, sizeof(sin)) == -1) {
135 if (errno != EINPROGRESS) {
143 as = accept4(ls, NULL, NULL, SOCK_NONBLOCK);
154 pfd.events = POLLOUT;
156 ATF_REQUIRE(poll(&pfd, 1, INFTIM) == 1);
157 ATF_REQUIRE(pfd.revents == POLLOUT);
165 fd_set_blocking(int fd)
169 ATF_REQUIRE((flags = fcntl(fd, F_GETFL)) != -1);
170 flags &= ~O_NONBLOCK;
171 ATF_REQUIRE(fcntl(fd, F_SETFL, flags) != -1);
175 cbc_decrypt(const EVP_CIPHER *cipher, const char *key, const char *iv,
176 const char *input, char *output, size_t size)
181 ctx = EVP_CIPHER_CTX_new();
183 warnx("EVP_CIPHER_CTX_new failed: %s",
184 ERR_error_string(ERR_get_error(), NULL));
187 if (EVP_CipherInit_ex(ctx, cipher, NULL, (const u_char *)key,
188 (const u_char *)iv, 0) != 1) {
189 warnx("EVP_CipherInit_ex failed: %s",
190 ERR_error_string(ERR_get_error(), NULL));
191 EVP_CIPHER_CTX_free(ctx);
194 EVP_CIPHER_CTX_set_padding(ctx, 0);
195 if (EVP_CipherUpdate(ctx, (u_char *)output, &outl,
196 (const u_char *)input, size) != 1) {
197 warnx("EVP_CipherUpdate failed: %s",
198 ERR_error_string(ERR_get_error(), NULL));
199 EVP_CIPHER_CTX_free(ctx);
203 if (EVP_CipherFinal_ex(ctx, (u_char *)output + outl, &outl) != 1) {
204 warnx("EVP_CipherFinal_ex failed: %s",
205 ERR_error_string(ERR_get_error(), NULL));
206 EVP_CIPHER_CTX_free(ctx);
210 if ((size_t)total != size) {
211 warnx("decrypt size mismatch: %zu vs %d", size, total);
212 EVP_CIPHER_CTX_free(ctx);
215 EVP_CIPHER_CTX_free(ctx);
220 verify_hash(const EVP_MD *md, const void *key, size_t key_len, const void *aad,
221 size_t aad_len, const void *buffer, size_t len, const void *digest)
224 unsigned char digest2[EVP_MAX_MD_SIZE];
227 ctx = HMAC_CTX_new();
229 warnx("HMAC_CTX_new failed: %s",
230 ERR_error_string(ERR_get_error(), NULL));
233 if (HMAC_Init_ex(ctx, key, key_len, md, NULL) != 1) {
234 warnx("HMAC_Init_ex failed: %s",
235 ERR_error_string(ERR_get_error(), NULL));
239 if (HMAC_Update(ctx, aad, aad_len) != 1) {
240 warnx("HMAC_Update (aad) failed: %s",
241 ERR_error_string(ERR_get_error(), NULL));
245 if (HMAC_Update(ctx, buffer, len) != 1) {
246 warnx("HMAC_Update (payload) failed: %s",
247 ERR_error_string(ERR_get_error(), NULL));
251 if (HMAC_Final(ctx, digest2, &digest_len) != 1) {
252 warnx("HMAC_Final failed: %s",
253 ERR_error_string(ERR_get_error(), NULL));
258 if (memcmp(digest, digest2, digest_len) != 0) {
259 warnx("HMAC mismatch");
266 aead_encrypt(const EVP_CIPHER *cipher, const char *key, const char *nonce,
267 const void *aad, size_t aad_len, const char *input, char *output,
268 size_t size, char *tag, size_t tag_len)
273 ctx = EVP_CIPHER_CTX_new();
275 warnx("EVP_CIPHER_CTX_new failed: %s",
276 ERR_error_string(ERR_get_error(), NULL));
279 if (EVP_EncryptInit_ex(ctx, cipher, NULL, (const u_char *)key,
280 (const u_char *)nonce) != 1) {
281 warnx("EVP_EncryptInit_ex failed: %s",
282 ERR_error_string(ERR_get_error(), NULL));
283 EVP_CIPHER_CTX_free(ctx);
286 EVP_CIPHER_CTX_set_padding(ctx, 0);
288 if (EVP_EncryptUpdate(ctx, NULL, &outl, (const u_char *)aad,
290 warnx("EVP_EncryptUpdate for AAD failed: %s",
291 ERR_error_string(ERR_get_error(), NULL));
292 EVP_CIPHER_CTX_free(ctx);
296 if (EVP_EncryptUpdate(ctx, (u_char *)output, &outl,
297 (const u_char *)input, size) != 1) {
298 warnx("EVP_EncryptUpdate failed: %s",
299 ERR_error_string(ERR_get_error(), NULL));
300 EVP_CIPHER_CTX_free(ctx);
304 if (EVP_EncryptFinal_ex(ctx, (u_char *)output + outl, &outl) != 1) {
305 warnx("EVP_EncryptFinal_ex failed: %s",
306 ERR_error_string(ERR_get_error(), NULL));
307 EVP_CIPHER_CTX_free(ctx);
311 if ((size_t)total != size) {
312 warnx("encrypt size mismatch: %zu vs %d", size, total);
313 EVP_CIPHER_CTX_free(ctx);
316 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG, tag_len, tag) !=
318 warnx("EVP_CIPHER_CTX_ctrl(EVP_CTRL_AEAD_GET_TAG) failed: %s",
319 ERR_error_string(ERR_get_error(), NULL));
320 EVP_CIPHER_CTX_free(ctx);
323 EVP_CIPHER_CTX_free(ctx);
328 aead_decrypt(const EVP_CIPHER *cipher, const char *key, const char *nonce,
329 const void *aad, size_t aad_len, const char *input, char *output,
330 size_t size, const char *tag, size_t tag_len)
336 ctx = EVP_CIPHER_CTX_new();
338 warnx("EVP_CIPHER_CTX_new failed: %s",
339 ERR_error_string(ERR_get_error(), NULL));
342 if (EVP_DecryptInit_ex(ctx, cipher, NULL, (const u_char *)key,
343 (const u_char *)nonce) != 1) {
344 warnx("EVP_DecryptInit_ex failed: %s",
345 ERR_error_string(ERR_get_error(), NULL));
346 EVP_CIPHER_CTX_free(ctx);
349 EVP_CIPHER_CTX_set_padding(ctx, 0);
351 if (EVP_DecryptUpdate(ctx, NULL, &outl, (const u_char *)aad,
353 warnx("EVP_DecryptUpdate for AAD failed: %s",
354 ERR_error_string(ERR_get_error(), NULL));
355 EVP_CIPHER_CTX_free(ctx);
359 if (EVP_DecryptUpdate(ctx, (u_char *)output, &outl,
360 (const u_char *)input, size) != 1) {
361 warnx("EVP_DecryptUpdate failed: %s",
362 ERR_error_string(ERR_get_error(), NULL));
363 EVP_CIPHER_CTX_free(ctx);
367 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, tag_len,
368 __DECONST(char *, tag)) != 1) {
369 warnx("EVP_CIPHER_CTX_ctrl(EVP_CTRL_AEAD_SET_TAG) failed: %s",
370 ERR_error_string(ERR_get_error(), NULL));
371 EVP_CIPHER_CTX_free(ctx);
374 valid = (EVP_DecryptFinal_ex(ctx, (u_char *)output + outl, &outl) == 1);
376 if ((size_t)total != size) {
377 warnx("decrypt size mismatch: %zu vs %d", size, total);
378 EVP_CIPHER_CTX_free(ctx);
382 warnx("tag mismatch");
383 EVP_CIPHER_CTX_free(ctx);
388 build_tls_enable(int cipher_alg, size_t cipher_key_len, int auth_alg,
389 int minor, uint64_t seqno, struct tls_enable *en)
391 u_int auth_key_len, iv_len;
393 memset(en, 0, sizeof(*en));
395 switch (cipher_alg) {
397 if (minor == TLS_MINOR_VER_ZERO)
398 iv_len = AES_BLOCK_LEN;
402 case CRYPTO_AES_NIST_GCM_16:
403 if (minor == TLS_MINOR_VER_TWO)
404 iv_len = TLS_AEAD_GCM_LEN;
406 iv_len = TLS_1_3_GCM_IV_LEN;
408 case CRYPTO_CHACHA20_POLY1305:
409 iv_len = TLS_CHACHA20_IV_LEN;
416 case CRYPTO_SHA1_HMAC:
417 auth_key_len = SHA1_HASH_LEN;
419 case CRYPTO_SHA2_256_HMAC:
420 auth_key_len = SHA2_256_HASH_LEN;
422 case CRYPTO_SHA2_384_HMAC:
423 auth_key_len = SHA2_384_HASH_LEN;
429 en->cipher_key = alloc_buffer(cipher_key_len);
430 en->iv = alloc_buffer(iv_len);
431 en->auth_key = alloc_buffer(auth_key_len);
432 en->cipher_algorithm = cipher_alg;
433 en->cipher_key_len = cipher_key_len;
435 en->auth_algorithm = auth_alg;
436 en->auth_key_len = auth_key_len;
437 en->tls_vmajor = TLS_MAJOR_VER_ONE;
438 en->tls_vminor = minor;
439 be64enc(en->rec_seq, seqno);
443 free_tls_enable(struct tls_enable *en)
445 free(__DECONST(void *, en->cipher_key));
446 free(__DECONST(void *, en->iv));
447 free(__DECONST(void *, en->auth_key));
450 static const EVP_CIPHER *
451 tls_EVP_CIPHER(const struct tls_enable *en)
453 switch (en->cipher_algorithm) {
455 switch (en->cipher_key_len) {
457 return (EVP_aes_128_cbc());
459 return (EVP_aes_256_cbc());
464 case CRYPTO_AES_NIST_GCM_16:
465 switch (en->cipher_key_len) {
467 return (EVP_aes_128_gcm());
469 return (EVP_aes_256_gcm());
474 case CRYPTO_CHACHA20_POLY1305:
475 return (EVP_chacha20_poly1305());
481 static const EVP_MD *
482 tls_EVP_MD(const struct tls_enable *en)
484 switch (en->auth_algorithm) {
485 case CRYPTO_SHA1_HMAC:
487 case CRYPTO_SHA2_256_HMAC:
488 return (EVP_sha256());
489 case CRYPTO_SHA2_384_HMAC:
490 return (EVP_sha384());
497 tls_header_len(struct tls_enable *en)
501 len = sizeof(struct tls_record_layer);
502 switch (en->cipher_algorithm) {
504 if (en->tls_vminor != TLS_MINOR_VER_ZERO)
505 len += AES_BLOCK_LEN;
507 case CRYPTO_AES_NIST_GCM_16:
508 if (en->tls_vminor == TLS_MINOR_VER_TWO)
509 len += sizeof(uint64_t);
511 case CRYPTO_CHACHA20_POLY1305:
519 tls_mac_len(struct tls_enable *en)
521 switch (en->cipher_algorithm) {
523 switch (en->auth_algorithm) {
524 case CRYPTO_SHA1_HMAC:
525 return (SHA1_HASH_LEN);
526 case CRYPTO_SHA2_256_HMAC:
527 return (SHA2_256_HASH_LEN);
528 case CRYPTO_SHA2_384_HMAC:
529 return (SHA2_384_HASH_LEN);
533 case CRYPTO_AES_NIST_GCM_16:
534 return (AES_GMAC_HASH_LEN);
535 case CRYPTO_CHACHA20_POLY1305:
536 return (POLY1305_HASH_LEN);
542 /* Includes maximum padding for MTE. */
544 tls_trailer_len(struct tls_enable *en)
548 len = tls_mac_len(en);
549 if (en->cipher_algorithm == CRYPTO_AES_CBC)
550 len += AES_BLOCK_LEN;
551 if (en->tls_vminor == TLS_MINOR_VER_THREE)
556 /* 'len' is the length of the payload application data. */
558 tls_mte_aad(struct tls_enable *en, size_t len,
559 const struct tls_record_layer *hdr, uint64_t seqno, struct tls_mac_data *ad)
561 ad->seq = htobe64(seqno);
562 ad->type = hdr->tls_type;
563 ad->tls_vmajor = hdr->tls_vmajor;
564 ad->tls_vminor = hdr->tls_vminor;
565 ad->tls_length = htons(len);
569 tls_12_aead_aad(struct tls_enable *en, size_t len,
570 const struct tls_record_layer *hdr, uint64_t seqno,
571 struct tls_aead_data *ad)
573 ad->seq = htobe64(seqno);
574 ad->type = hdr->tls_type;
575 ad->tls_vmajor = hdr->tls_vmajor;
576 ad->tls_vminor = hdr->tls_vminor;
577 ad->tls_length = htons(len);
581 tls_13_aad(struct tls_enable *en, const struct tls_record_layer *hdr,
582 uint64_t seqno, struct tls_aead_data_13 *ad)
584 ad->type = hdr->tls_type;
585 ad->tls_vmajor = hdr->tls_vmajor;
586 ad->tls_vminor = hdr->tls_vminor;
587 ad->tls_length = hdr->tls_length;
591 tls_12_gcm_nonce(struct tls_enable *en, const struct tls_record_layer *hdr,
594 memcpy(nonce, en->iv, TLS_AEAD_GCM_LEN);
595 memcpy(nonce + TLS_AEAD_GCM_LEN, hdr + 1, sizeof(uint64_t));
599 tls_13_nonce(struct tls_enable *en, uint64_t seqno, char *nonce)
601 static_assert(TLS_1_3_GCM_IV_LEN == TLS_CHACHA20_IV_LEN,
602 "TLS 1.3 nonce length mismatch");
603 memcpy(nonce, en->iv, TLS_1_3_GCM_IV_LEN);
604 *(uint64_t *)(nonce + 4) ^= htobe64(seqno);
608 * Decrypt a TLS record 'len' bytes long at 'src' and store the result at
609 * 'dst'. If the TLS record header length doesn't match or 'dst' doesn't
610 * have sufficient room ('avail'), fail the test.
613 decrypt_tls_aes_cbc_mte(struct tls_enable *en, uint64_t seqno, const void *src,
614 size_t len, void *dst, size_t avail, uint8_t *record_type)
616 const struct tls_record_layer *hdr;
617 struct tls_mac_data aad;
620 size_t hdr_len, mac_len, payload_len;
624 hdr_len = tls_header_len(en);
625 mac_len = tls_mac_len(en);
626 ATF_REQUIRE(hdr->tls_vmajor == TLS_MAJOR_VER_ONE);
627 ATF_REQUIRE(hdr->tls_vminor == en->tls_vminor);
629 /* First, decrypt the outer payload into a temporary buffer. */
630 payload_len = len - hdr_len;
631 buf = malloc(payload_len);
632 if (en->tls_vminor == TLS_MINOR_VER_ZERO)
635 iv = (void *)(hdr + 1);
636 ATF_REQUIRE(cbc_decrypt(tls_EVP_CIPHER(en), en->cipher_key, iv,
637 (const u_char *)src + hdr_len, buf, payload_len));
640 * Copy the last encrypted block to use as the IV for the next
641 * record for TLS 1.0.
643 if (en->tls_vminor == TLS_MINOR_VER_ZERO)
644 memcpy(__DECONST(uint8_t *, en->iv), (const u_char *)src +
645 (len - AES_BLOCK_LEN), AES_BLOCK_LEN);
648 * Verify trailing padding and strip.
650 * The kernel always generates the smallest amount of padding.
652 padding = buf[payload_len - 1] + 1;
653 ATF_REQUIRE(padding > 0 && padding <= AES_BLOCK_LEN);
654 ATF_REQUIRE(payload_len >= mac_len + padding);
655 payload_len -= padding;
658 payload_len -= mac_len;
659 tls_mte_aad(en, payload_len, hdr, seqno, &aad);
660 ATF_REQUIRE(verify_hash(tls_EVP_MD(en), en->auth_key, en->auth_key_len,
661 &aad, sizeof(aad), buf, payload_len, buf + payload_len));
663 ATF_REQUIRE(payload_len <= avail);
664 memcpy(dst, buf, payload_len);
665 *record_type = hdr->tls_type;
666 return (payload_len);
670 decrypt_tls_12_aead(struct tls_enable *en, uint64_t seqno, const void *src,
671 size_t len, void *dst, uint8_t *record_type)
673 const struct tls_record_layer *hdr;
674 struct tls_aead_data aad;
676 size_t hdr_len, mac_len, payload_len;
680 hdr_len = tls_header_len(en);
681 mac_len = tls_mac_len(en);
682 payload_len = len - (hdr_len + mac_len);
683 ATF_REQUIRE(hdr->tls_vmajor == TLS_MAJOR_VER_ONE);
684 ATF_REQUIRE(hdr->tls_vminor == TLS_MINOR_VER_TWO);
686 tls_12_aead_aad(en, payload_len, hdr, seqno, &aad);
687 if (en->cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
688 tls_12_gcm_nonce(en, hdr, nonce);
690 tls_13_nonce(en, seqno, nonce);
692 ATF_REQUIRE(aead_decrypt(tls_EVP_CIPHER(en), en->cipher_key, nonce,
693 &aad, sizeof(aad), (const char *)src + hdr_len, dst, payload_len,
694 (const char *)src + hdr_len + payload_len, mac_len));
696 *record_type = hdr->tls_type;
697 return (payload_len);
701 decrypt_tls_13_aead(struct tls_enable *en, uint64_t seqno, const void *src,
702 size_t len, void *dst, uint8_t *record_type)
704 const struct tls_record_layer *hdr;
705 struct tls_aead_data_13 aad;
708 size_t hdr_len, mac_len, payload_len;
712 hdr_len = tls_header_len(en);
713 mac_len = tls_mac_len(en);
714 payload_len = len - (hdr_len + mac_len);
715 ATF_REQUIRE(payload_len >= 1);
716 ATF_REQUIRE(hdr->tls_type == TLS_RLTYPE_APP);
717 ATF_REQUIRE(hdr->tls_vmajor == TLS_MAJOR_VER_ONE);
718 ATF_REQUIRE(hdr->tls_vminor == TLS_MINOR_VER_TWO);
720 tls_13_aad(en, hdr, seqno, &aad);
721 tls_13_nonce(en, seqno, nonce);
724 * Have to use a temporary buffer for the output due to the
725 * record type as the last byte of the trailer.
727 buf = malloc(payload_len);
729 ATF_REQUIRE(aead_decrypt(tls_EVP_CIPHER(en), en->cipher_key, nonce,
730 &aad, sizeof(aad), (const char *)src + hdr_len, buf, payload_len,
731 (const char *)src + hdr_len + payload_len, mac_len));
733 /* Trim record type. */
734 *record_type = buf[payload_len - 1];
737 memcpy(dst, buf, payload_len);
740 return (payload_len);
744 decrypt_tls_aead(struct tls_enable *en, uint64_t seqno, const void *src,
745 size_t len, void *dst, size_t avail, uint8_t *record_type)
747 const struct tls_record_layer *hdr;
751 ATF_REQUIRE(ntohs(hdr->tls_length) + sizeof(*hdr) == len);
753 payload_len = len - (tls_header_len(en) + tls_trailer_len(en));
754 ATF_REQUIRE(payload_len <= avail);
756 if (en->tls_vminor == TLS_MINOR_VER_TWO) {
757 ATF_REQUIRE(decrypt_tls_12_aead(en, seqno, src, len, dst,
758 record_type) == payload_len);
760 ATF_REQUIRE(decrypt_tls_13_aead(en, seqno, src, len, dst,
761 record_type) == payload_len);
764 return (payload_len);
768 decrypt_tls_record(struct tls_enable *en, uint64_t seqno, const void *src,
769 size_t len, void *dst, size_t avail, uint8_t *record_type)
771 if (en->cipher_algorithm == CRYPTO_AES_CBC)
772 return (decrypt_tls_aes_cbc_mte(en, seqno, src, len, dst, avail,
775 return (decrypt_tls_aead(en, seqno, src, len, dst, avail,
780 * Encrypt a TLS record of type 'record_type' with payload 'len' bytes
781 * long at 'src' and store the result at 'dst'. If 'dst' doesn't have
782 * sufficient room ('avail'), fail the test.
785 encrypt_tls_12_aead(struct tls_enable *en, uint8_t record_type, uint64_t seqno,
786 const void *src, size_t len, void *dst)
788 struct tls_record_layer *hdr;
789 struct tls_aead_data aad;
791 size_t hdr_len, mac_len, record_len;
795 hdr_len = tls_header_len(en);
796 mac_len = tls_mac_len(en);
797 record_len = hdr_len + len + mac_len;
799 hdr->tls_type = record_type;
800 hdr->tls_vmajor = TLS_MAJOR_VER_ONE;
801 hdr->tls_vminor = TLS_MINOR_VER_TWO;
802 hdr->tls_length = htons(record_len - sizeof(*hdr));
803 if (en->cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
804 memcpy(hdr + 1, &seqno, sizeof(seqno));
806 tls_12_aead_aad(en, len, hdr, seqno, &aad);
807 if (en->cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
808 tls_12_gcm_nonce(en, hdr, nonce);
810 tls_13_nonce(en, seqno, nonce);
812 ATF_REQUIRE(aead_encrypt(tls_EVP_CIPHER(en), en->cipher_key, nonce,
813 &aad, sizeof(aad), src, (char *)dst + hdr_len, len,
814 (char *)dst + hdr_len + len, mac_len));
820 encrypt_tls_13_aead(struct tls_enable *en, uint8_t record_type, uint64_t seqno,
821 const void *src, size_t len, void *dst, size_t padding)
823 struct tls_record_layer *hdr;
824 struct tls_aead_data_13 aad;
827 size_t hdr_len, mac_len, record_len;
831 hdr_len = tls_header_len(en);
832 mac_len = tls_mac_len(en);
833 record_len = hdr_len + len + 1 + padding + mac_len;
835 hdr->tls_type = TLS_RLTYPE_APP;
836 hdr->tls_vmajor = TLS_MAJOR_VER_ONE;
837 hdr->tls_vminor = TLS_MINOR_VER_TWO;
838 hdr->tls_length = htons(record_len - sizeof(*hdr));
840 tls_13_aad(en, hdr, seqno, &aad);
841 tls_13_nonce(en, seqno, nonce);
844 * Have to use a temporary buffer for the input so that the record
845 * type can be appended.
847 buf = malloc(len + 1 + padding);
848 memcpy(buf, src, len);
849 buf[len] = record_type;
850 memset(buf + len + 1, 0, padding);
852 ATF_REQUIRE(aead_encrypt(tls_EVP_CIPHER(en), en->cipher_key, nonce,
853 &aad, sizeof(aad), buf, (char *)dst + hdr_len, len + 1 + padding,
854 (char *)dst + hdr_len + len + 1 + padding, mac_len));
862 encrypt_tls_aead(struct tls_enable *en, uint8_t record_type, uint64_t seqno,
863 const void *src, size_t len, void *dst, size_t avail, size_t padding)
867 record_len = tls_header_len(en) + len + padding + tls_trailer_len(en);
868 ATF_REQUIRE(record_len <= avail);
870 if (en->tls_vminor == TLS_MINOR_VER_TWO) {
871 ATF_REQUIRE(padding == 0);
872 ATF_REQUIRE(encrypt_tls_12_aead(en, record_type, seqno, src,
873 len, dst) == record_len);
875 ATF_REQUIRE(encrypt_tls_13_aead(en, record_type, seqno, src,
876 len, dst, padding) == record_len);
882 encrypt_tls_record(struct tls_enable *en, uint8_t record_type, uint64_t seqno,
883 const void *src, size_t len, void *dst, size_t avail, size_t padding)
885 return (encrypt_tls_aead(en, record_type, seqno, src, len, dst, avail,
890 test_ktls_transmit_app_data(struct tls_enable *en, uint64_t seqno, size_t len)
893 struct tls_record_layer *hdr;
894 char *plaintext, *decrypted, *outbuf;
895 size_t decrypted_len, outbuf_len, outbuf_cap, record_len, written;
900 plaintext = alloc_buffer(len);
901 decrypted = malloc(len);
902 outbuf_cap = tls_header_len(en) + TLS_MAX_MSG_SIZE_V10_2 +
904 outbuf = malloc(outbuf_cap);
905 hdr = (struct tls_record_layer *)outbuf;
907 ATF_REQUIRE((kq = kqueue()) != -1);
909 ATF_REQUIRE_MSG(socketpair_tcp(sockets), "failed to create sockets");
911 ATF_REQUIRE(setsockopt(sockets[1], IPPROTO_TCP, TCP_TXTLS_ENABLE, en,
914 EV_SET(&ev, sockets[0], EVFILT_READ, EV_ADD, 0, 0, NULL);
915 ATF_REQUIRE(kevent(kq, &ev, 1, NULL, 0, NULL) == 0);
916 EV_SET(&ev, sockets[1], EVFILT_WRITE, EV_ADD, 0, 0, NULL);
917 ATF_REQUIRE(kevent(kq, &ev, 1, NULL, 0, NULL) == 0);
923 while (decrypted_len != len) {
924 ATF_REQUIRE(kevent(kq, NULL, 0, &ev, 1, NULL) == 1);
928 /* Try to write any remaining data. */
929 rv = write(ev.ident, plaintext + written,
931 ATF_REQUIRE_MSG(rv > 0,
932 "failed to write to socket");
934 if (written == len) {
935 ev.flags = EV_DISABLE;
936 ATF_REQUIRE(kevent(kq, &ev, 1, NULL, 0,
942 ATF_REQUIRE((ev.flags & EV_EOF) == 0);
945 * Try to read data for the next TLS record
946 * into outbuf. Start by reading the header
947 * to determine how much additional data to
950 if (outbuf_len < sizeof(struct tls_record_layer)) {
951 rv = read(ev.ident, outbuf + outbuf_len,
952 sizeof(struct tls_record_layer) -
954 ATF_REQUIRE_MSG(rv > 0,
955 "failed to read from socket");
959 if (outbuf_len < sizeof(struct tls_record_layer))
962 record_len = sizeof(struct tls_record_layer) +
963 ntohs(hdr->tls_length);
964 ATF_REQUIRE(record_len <= outbuf_cap);
965 ATF_REQUIRE(record_len > outbuf_len);
966 rv = read(ev.ident, outbuf + outbuf_len,
967 record_len - outbuf_len);
968 if (rv == -1 && errno == EAGAIN)
970 ATF_REQUIRE_MSG(rv > 0, "failed to read from socket");
973 if (outbuf_len == record_len) {
974 decrypted_len += decrypt_tls_record(en, seqno,
976 decrypted + decrypted_len,
977 len - decrypted_len, &record_type);
978 ATF_REQUIRE(record_type == TLS_RLTYPE_APP);
987 ATF_REQUIRE_MSG(written == decrypted_len,
988 "read %zu decrypted bytes, but wrote %zu", decrypted_len, written);
990 ATF_REQUIRE(memcmp(plaintext, decrypted, len) == 0);
996 ATF_REQUIRE(close(sockets[1]) == 0);
997 ATF_REQUIRE(close(sockets[0]) == 0);
998 ATF_REQUIRE(close(kq) == 0);
1002 ktls_send_control_message(int fd, uint8_t type, void *data, size_t len)
1005 struct cmsghdr *cmsg;
1006 char cbuf[CMSG_SPACE(sizeof(type))];
1009 memset(&msg, 0, sizeof(msg));
1011 msg.msg_control = cbuf;
1012 msg.msg_controllen = sizeof(cbuf);
1013 cmsg = CMSG_FIRSTHDR(&msg);
1014 cmsg->cmsg_level = IPPROTO_TCP;
1015 cmsg->cmsg_type = TLS_SET_RECORD_TYPE;
1016 cmsg->cmsg_len = CMSG_LEN(sizeof(type));
1017 *(uint8_t *)CMSG_DATA(cmsg) = type;
1019 iov.iov_base = data;
1024 ATF_REQUIRE(sendmsg(fd, &msg, 0) == (ssize_t)len);
1028 test_ktls_transmit_control(struct tls_enable *en, uint64_t seqno, uint8_t type,
1031 struct tls_record_layer *hdr;
1032 char *plaintext, *decrypted, *outbuf;
1033 size_t outbuf_cap, payload_len, record_len;
1036 uint8_t record_type;
1038 ATF_REQUIRE(len <= TLS_MAX_MSG_SIZE_V10_2);
1040 plaintext = alloc_buffer(len);
1041 decrypted = malloc(len);
1042 outbuf_cap = tls_header_len(en) + len + tls_trailer_len(en);
1043 outbuf = malloc(outbuf_cap);
1044 hdr = (struct tls_record_layer *)outbuf;
1046 ATF_REQUIRE_MSG(socketpair_tcp(sockets), "failed to create sockets");
1048 ATF_REQUIRE(setsockopt(sockets[1], IPPROTO_TCP, TCP_TXTLS_ENABLE, en,
1051 fd_set_blocking(sockets[0]);
1052 fd_set_blocking(sockets[1]);
1054 ktls_send_control_message(sockets[1], type, plaintext, len);
1057 * First read the header to determine how much additional data
1060 rv = read(sockets[0], outbuf, sizeof(struct tls_record_layer));
1061 ATF_REQUIRE(rv == sizeof(struct tls_record_layer));
1062 payload_len = ntohs(hdr->tls_length);
1063 record_len = payload_len + sizeof(struct tls_record_layer);
1064 ATF_REQUIRE(record_len <= outbuf_cap);
1065 rv = read(sockets[0], outbuf + sizeof(struct tls_record_layer),
1067 ATF_REQUIRE(rv == (ssize_t)payload_len);
1069 rv = decrypt_tls_record(en, seqno, outbuf, record_len, decrypted, len,
1072 ATF_REQUIRE_MSG((ssize_t)len == rv,
1073 "read %zd decrypted bytes, but wrote %zu", rv, len);
1074 ATF_REQUIRE(record_type == type);
1076 ATF_REQUIRE(memcmp(plaintext, decrypted, len) == 0);
1082 ATF_REQUIRE(close(sockets[1]) == 0);
1083 ATF_REQUIRE(close(sockets[0]) == 0);
1087 test_ktls_transmit_empty_fragment(struct tls_enable *en, uint64_t seqno)
1089 struct tls_record_layer *hdr;
1091 size_t outbuf_cap, payload_len, record_len;
1094 uint8_t record_type;
1096 outbuf_cap = tls_header_len(en) + tls_trailer_len(en);
1097 outbuf = malloc(outbuf_cap);
1098 hdr = (struct tls_record_layer *)outbuf;
1100 ATF_REQUIRE_MSG(socketpair_tcp(sockets), "failed to create sockets");
1102 ATF_REQUIRE(setsockopt(sockets[1], IPPROTO_TCP, TCP_TXTLS_ENABLE, en,
1105 fd_set_blocking(sockets[0]);
1106 fd_set_blocking(sockets[1]);
1109 * A write of zero bytes should send an empty fragment only for
1110 * TLS 1.0, otherwise an error should be raised.
1112 rv = write(sockets[1], NULL, 0);
1114 ATF_REQUIRE(en->cipher_algorithm == CRYPTO_AES_CBC);
1115 ATF_REQUIRE(en->tls_vminor == TLS_MINOR_VER_ZERO);
1117 ATF_REQUIRE(rv == -1);
1118 ATF_REQUIRE(errno == EINVAL);
1123 * First read the header to determine how much additional data
1126 rv = read(sockets[0], outbuf, sizeof(struct tls_record_layer));
1127 ATF_REQUIRE(rv == sizeof(struct tls_record_layer));
1128 payload_len = ntohs(hdr->tls_length);
1129 record_len = payload_len + sizeof(struct tls_record_layer);
1130 ATF_REQUIRE(record_len <= outbuf_cap);
1131 rv = read(sockets[0], outbuf + sizeof(struct tls_record_layer),
1133 ATF_REQUIRE(rv == (ssize_t)payload_len);
1135 rv = decrypt_tls_record(en, seqno, outbuf, record_len, NULL, 0,
1138 ATF_REQUIRE_MSG(rv == 0,
1139 "read %zd decrypted bytes for an empty fragment", rv);
1140 ATF_REQUIRE(record_type == TLS_RLTYPE_APP);
1145 ATF_REQUIRE(close(sockets[1]) == 0);
1146 ATF_REQUIRE(close(sockets[0]) == 0);
1150 ktls_receive_tls_record(struct tls_enable *en, int fd, uint8_t record_type,
1151 void *data, size_t len)
1154 struct cmsghdr *cmsg;
1155 struct tls_get_record *tgr;
1156 char cbuf[CMSG_SPACE(sizeof(*tgr))];
1160 memset(&msg, 0, sizeof(msg));
1162 msg.msg_control = cbuf;
1163 msg.msg_controllen = sizeof(cbuf);
1165 iov.iov_base = data;
1170 ATF_REQUIRE((rv = recvmsg(fd, &msg, 0)) > 0);
1172 ATF_REQUIRE((msg.msg_flags & (MSG_EOR | MSG_CTRUNC)) == MSG_EOR);
1174 cmsg = CMSG_FIRSTHDR(&msg);
1175 ATF_REQUIRE(cmsg != NULL);
1176 ATF_REQUIRE(cmsg->cmsg_level == IPPROTO_TCP);
1177 ATF_REQUIRE(cmsg->cmsg_type == TLS_GET_RECORD);
1178 ATF_REQUIRE(cmsg->cmsg_len == CMSG_LEN(sizeof(*tgr)));
1180 tgr = (struct tls_get_record *)CMSG_DATA(cmsg);
1181 ATF_REQUIRE(tgr->tls_type == record_type);
1182 ATF_REQUIRE(tgr->tls_vmajor == en->tls_vmajor);
1183 /* XXX: Not sure if this is what OpenSSL expects? */
1184 if (en->tls_vminor == TLS_MINOR_VER_THREE)
1185 ATF_REQUIRE(tgr->tls_vminor == TLS_MINOR_VER_TWO);
1187 ATF_REQUIRE(tgr->tls_vminor == en->tls_vminor);
1188 ATF_REQUIRE(tgr->tls_length == htons(rv));
1194 test_ktls_receive_app_data(struct tls_enable *en, uint64_t seqno, size_t len,
1198 char *plaintext, *received, *outbuf;
1199 size_t outbuf_cap, outbuf_len, outbuf_sent, received_len, todo, written;
1203 plaintext = alloc_buffer(len);
1204 received = malloc(len);
1205 outbuf_cap = tls_header_len(en) + TLS_MAX_MSG_SIZE_V10_2 +
1206 tls_trailer_len(en);
1207 outbuf = malloc(outbuf_cap);
1209 ATF_REQUIRE((kq = kqueue()) != -1);
1211 ATF_REQUIRE_MSG(socketpair_tcp(sockets), "failed to create sockets");
1213 ATF_REQUIRE(setsockopt(sockets[0], IPPROTO_TCP, TCP_RXTLS_ENABLE, en,
1216 EV_SET(&ev, sockets[0], EVFILT_READ, EV_ADD, 0, 0, NULL);
1217 ATF_REQUIRE(kevent(kq, &ev, 1, NULL, 0, NULL) == 0);
1218 EV_SET(&ev, sockets[1], EVFILT_WRITE, EV_ADD, 0, 0, NULL);
1219 ATF_REQUIRE(kevent(kq, &ev, 1, NULL, 0, NULL) == 0);
1225 while (received_len != len) {
1226 ATF_REQUIRE(kevent(kq, NULL, 0, &ev, 1, NULL) == 1);
1228 switch (ev.filter) {
1231 * Compose the next TLS record to send.
1233 if (outbuf_len == 0) {
1234 ATF_REQUIRE(written < len);
1235 todo = len - written;
1236 if (todo > TLS_MAX_MSG_SIZE_V10_2 - padding)
1237 todo = TLS_MAX_MSG_SIZE_V10_2 - padding;
1238 outbuf_len = encrypt_tls_record(en,
1239 TLS_RLTYPE_APP, seqno, plaintext + written,
1240 todo, outbuf, outbuf_cap, padding);
1247 * Try to write the remainder of the current
1250 rv = write(ev.ident, outbuf + outbuf_sent,
1251 outbuf_len - outbuf_sent);
1252 ATF_REQUIRE_MSG(rv > 0,
1253 "failed to write to socket");
1255 if (outbuf_sent == outbuf_len) {
1257 if (written == len) {
1258 ev.flags = EV_DISABLE;
1259 ATF_REQUIRE(kevent(kq, &ev, 1, NULL, 0,
1266 ATF_REQUIRE((ev.flags & EV_EOF) == 0);
1268 rv = ktls_receive_tls_record(en, ev.ident,
1269 TLS_RLTYPE_APP, received + received_len,
1270 len - received_len);
1276 ATF_REQUIRE_MSG(written == received_len,
1277 "read %zu decrypted bytes, but wrote %zu", received_len, written);
1279 ATF_REQUIRE(memcmp(plaintext, received, len) == 0);
1285 ATF_REQUIRE(close(sockets[1]) == 0);
1286 ATF_REQUIRE(close(sockets[0]) == 0);
1287 ATF_REQUIRE(close(kq) == 0);
1290 #define TLS_10_TESTS(M) \
1291 M(aes128_cbc_1_0_sha1, CRYPTO_AES_CBC, 128 / 8, \
1293 M(aes256_cbc_1_0_sha1, CRYPTO_AES_CBC, 256 / 8, \
1296 #define TLS_13_TESTS(M) \
1297 M(aes128_gcm_1_3, CRYPTO_AES_NIST_GCM_16, 128 / 8, 0, \
1298 TLS_MINOR_VER_THREE) \
1299 M(aes256_gcm_1_3, CRYPTO_AES_NIST_GCM_16, 256 / 8, 0, \
1300 TLS_MINOR_VER_THREE) \
1301 M(chacha20_poly1305_1_3, CRYPTO_CHACHA20_POLY1305, 256 / 8, 0, \
1302 TLS_MINOR_VER_THREE)
1304 #define AES_CBC_TESTS(M) \
1305 M(aes128_cbc_1_0_sha1, CRYPTO_AES_CBC, 128 / 8, \
1306 CRYPTO_SHA1_HMAC, TLS_MINOR_VER_ZERO) \
1307 M(aes256_cbc_1_0_sha1, CRYPTO_AES_CBC, 256 / 8, \
1308 CRYPTO_SHA1_HMAC, TLS_MINOR_VER_ZERO) \
1309 M(aes128_cbc_1_1_sha1, CRYPTO_AES_CBC, 128 / 8, \
1310 CRYPTO_SHA1_HMAC, TLS_MINOR_VER_ONE) \
1311 M(aes256_cbc_1_1_sha1, CRYPTO_AES_CBC, 256 / 8, \
1312 CRYPTO_SHA1_HMAC, TLS_MINOR_VER_ONE) \
1313 M(aes128_cbc_1_2_sha1, CRYPTO_AES_CBC, 128 / 8, \
1314 CRYPTO_SHA1_HMAC, TLS_MINOR_VER_TWO) \
1315 M(aes256_cbc_1_2_sha1, CRYPTO_AES_CBC, 256 / 8, \
1316 CRYPTO_SHA1_HMAC, TLS_MINOR_VER_TWO) \
1317 M(aes128_cbc_1_2_sha256, CRYPTO_AES_CBC, 128 / 8, \
1318 CRYPTO_SHA2_256_HMAC, TLS_MINOR_VER_TWO) \
1319 M(aes256_cbc_1_2_sha256, CRYPTO_AES_CBC, 256 / 8, \
1320 CRYPTO_SHA2_256_HMAC, TLS_MINOR_VER_TWO) \
1321 M(aes128_cbc_1_2_sha384, CRYPTO_AES_CBC, 128 / 8, \
1322 CRYPTO_SHA2_384_HMAC, TLS_MINOR_VER_TWO) \
1323 M(aes256_cbc_1_2_sha384, CRYPTO_AES_CBC, 256 / 8, \
1324 CRYPTO_SHA2_384_HMAC, TLS_MINOR_VER_TWO) \
1326 #define AES_GCM_TESTS(M) \
1327 M(aes128_gcm_1_2, CRYPTO_AES_NIST_GCM_16, 128 / 8, 0, \
1328 TLS_MINOR_VER_TWO) \
1329 M(aes256_gcm_1_2, CRYPTO_AES_NIST_GCM_16, 256 / 8, 0, \
1330 TLS_MINOR_VER_TWO) \
1331 M(aes128_gcm_1_3, CRYPTO_AES_NIST_GCM_16, 128 / 8, 0, \
1332 TLS_MINOR_VER_THREE) \
1333 M(aes256_gcm_1_3, CRYPTO_AES_NIST_GCM_16, 256 / 8, 0, \
1334 TLS_MINOR_VER_THREE)
1336 #define CHACHA20_TESTS(M) \
1337 M(chacha20_poly1305_1_2, CRYPTO_CHACHA20_POLY1305, 256 / 8, 0, \
1338 TLS_MINOR_VER_TWO) \
1339 M(chacha20_poly1305_1_3, CRYPTO_CHACHA20_POLY1305, 256 / 8, 0, \
1340 TLS_MINOR_VER_THREE)
1342 #define GEN_TRANSMIT_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1343 auth_alg, minor, name, len) \
1344 ATF_TC_WITHOUT_HEAD(ktls_transmit_##cipher_name##_##name); \
1345 ATF_TC_BODY(ktls_transmit_##cipher_name##_##name, tc) \
1347 struct tls_enable en; \
1350 ATF_REQUIRE_KTLS(); \
1352 build_tls_enable(cipher_alg, key_size, auth_alg, minor, seqno, \
1354 test_ktls_transmit_app_data(&en, seqno, len); \
1355 free_tls_enable(&en); \
1358 #define ADD_TRANSMIT_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1359 auth_alg, minor, name) \
1360 ATF_TP_ADD_TC(tp, ktls_transmit_##cipher_name##_##name);
1362 #define GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1363 auth_alg, minor, name, type, len) \
1364 ATF_TC_WITHOUT_HEAD(ktls_transmit_##cipher_name##_##name); \
1365 ATF_TC_BODY(ktls_transmit_##cipher_name##_##name, tc) \
1367 struct tls_enable en; \
1370 ATF_REQUIRE_KTLS(); \
1372 build_tls_enable(cipher_alg, key_size, auth_alg, minor, seqno, \
1374 test_ktls_transmit_control(&en, seqno, type, len); \
1375 free_tls_enable(&en); \
1378 #define ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1379 auth_alg, minor, name) \
1380 ATF_TP_ADD_TC(tp, ktls_transmit_##cipher_name##_##name);
1382 #define GEN_TRANSMIT_EMPTY_FRAGMENT_TEST(cipher_name, cipher_alg, \
1383 key_size, auth_alg, minor) \
1384 ATF_TC_WITHOUT_HEAD(ktls_transmit_##cipher_name##_empty_fragment); \
1385 ATF_TC_BODY(ktls_transmit_##cipher_name##_empty_fragment, tc) \
1387 struct tls_enable en; \
1390 ATF_REQUIRE_KTLS(); \
1392 build_tls_enable(cipher_alg, key_size, auth_alg, minor, seqno, \
1394 test_ktls_transmit_empty_fragment(&en, seqno); \
1395 free_tls_enable(&en); \
1398 #define ADD_TRANSMIT_EMPTY_FRAGMENT_TEST(cipher_name, cipher_alg, \
1399 key_size, auth_alg, minor) \
1400 ATF_TP_ADD_TC(tp, ktls_transmit_##cipher_name##_empty_fragment);
1402 #define GEN_TRANSMIT_TESTS(cipher_name, cipher_alg, key_size, auth_alg, \
1404 GEN_TRANSMIT_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1405 auth_alg, minor, short, 64) \
1406 GEN_TRANSMIT_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1407 auth_alg, minor, long, 64 * 1024) \
1408 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1409 auth_alg, minor, control, 0x21 /* Alert */, 32)
1411 #define ADD_TRANSMIT_TESTS(cipher_name, cipher_alg, key_size, auth_alg, \
1413 ADD_TRANSMIT_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1414 auth_alg, minor, short) \
1415 ADD_TRANSMIT_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1416 auth_alg, minor, long) \
1417 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1418 auth_alg, minor, control)
1421 * For each supported cipher suite, run three transmit tests:
1423 * - a short test which sends 64 bytes of application data (likely as
1424 * a single TLS record)
1426 * - a long test which sends 64KB of application data (split across
1427 * multiple TLS records)
1429 * - a control test which sends a single record with a specific
1430 * content type via sendmsg()
1432 AES_CBC_TESTS(GEN_TRANSMIT_TESTS);
1433 AES_GCM_TESTS(GEN_TRANSMIT_TESTS);
1434 CHACHA20_TESTS(GEN_TRANSMIT_TESTS);
1436 #define GEN_TRANSMIT_PADDING_TESTS(cipher_name, cipher_alg, key_size, \
1438 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1439 auth_alg, minor, padding_1, 0x21 /* Alert */, 1) \
1440 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1441 auth_alg, minor, padding_2, 0x21 /* Alert */, 2) \
1442 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1443 auth_alg, minor, padding_3, 0x21 /* Alert */, 3) \
1444 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1445 auth_alg, minor, padding_4, 0x21 /* Alert */, 4) \
1446 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1447 auth_alg, minor, padding_5, 0x21 /* Alert */, 5) \
1448 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1449 auth_alg, minor, padding_6, 0x21 /* Alert */, 6) \
1450 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1451 auth_alg, minor, padding_7, 0x21 /* Alert */, 7) \
1452 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1453 auth_alg, minor, padding_8, 0x21 /* Alert */, 8) \
1454 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1455 auth_alg, minor, padding_9, 0x21 /* Alert */, 9) \
1456 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1457 auth_alg, minor, padding_10, 0x21 /* Alert */, 10) \
1458 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1459 auth_alg, minor, padding_11, 0x21 /* Alert */, 11) \
1460 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1461 auth_alg, minor, padding_12, 0x21 /* Alert */, 12) \
1462 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1463 auth_alg, minor, padding_13, 0x21 /* Alert */, 13) \
1464 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1465 auth_alg, minor, padding_14, 0x21 /* Alert */, 14) \
1466 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1467 auth_alg, minor, padding_15, 0x21 /* Alert */, 15) \
1468 GEN_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1469 auth_alg, minor, padding_16, 0x21 /* Alert */, 16)
1471 #define ADD_TRANSMIT_PADDING_TESTS(cipher_name, cipher_alg, key_size, \
1473 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1474 auth_alg, minor, padding_1) \
1475 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1476 auth_alg, minor, padding_2) \
1477 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1478 auth_alg, minor, padding_3) \
1479 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1480 auth_alg, minor, padding_4) \
1481 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1482 auth_alg, minor, padding_5) \
1483 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1484 auth_alg, minor, padding_6) \
1485 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1486 auth_alg, minor, padding_7) \
1487 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1488 auth_alg, minor, padding_8) \
1489 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1490 auth_alg, minor, padding_9) \
1491 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1492 auth_alg, minor, padding_10) \
1493 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1494 auth_alg, minor, padding_11) \
1495 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1496 auth_alg, minor, padding_12) \
1497 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1498 auth_alg, minor, padding_13) \
1499 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1500 auth_alg, minor, padding_14) \
1501 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1502 auth_alg, minor, padding_15) \
1503 ADD_TRANSMIT_CONTROL_TEST(cipher_name, cipher_alg, key_size, \
1504 auth_alg, minor, padding_16)
1507 * For AES-CBC MTE cipher suites using padding, add tests of messages
1508 * with each possible padding size. Note that the padding_<N> tests
1509 * do not necessarily test <N> bytes of padding as the padding is a
1510 * function of the cipher suite's MAC length. However, cycling
1511 * through all of the payload sizes from 1 to 16 should exercise all
1512 * of the possible padding lengths for each suite.
1514 AES_CBC_TESTS(GEN_TRANSMIT_PADDING_TESTS);
1517 * Test "empty fragments" which are TLS records with no payload that
1518 * OpenSSL can send for TLS 1.0 connections.
1520 AES_CBC_TESTS(GEN_TRANSMIT_EMPTY_FRAGMENT_TEST);
1521 AES_GCM_TESTS(GEN_TRANSMIT_EMPTY_FRAGMENT_TEST);
1522 CHACHA20_TESTS(GEN_TRANSMIT_EMPTY_FRAGMENT_TEST);
1525 test_ktls_invalid_transmit_cipher_suite(struct tls_enable *en)
1529 ATF_REQUIRE_MSG(socketpair_tcp(sockets), "failed to create sockets");
1531 ATF_REQUIRE(setsockopt(sockets[1], IPPROTO_TCP, TCP_TXTLS_ENABLE, en,
1532 sizeof(*en)) == -1);
1533 ATF_REQUIRE(errno == EINVAL);
1535 ATF_REQUIRE(close(sockets[1]) == 0);
1536 ATF_REQUIRE(close(sockets[0]) == 0);
1539 #define GEN_INVALID_TRANSMIT_TEST(name, cipher_alg, key_size, auth_alg, \
1541 ATF_TC_WITHOUT_HEAD(ktls_transmit_invalid_##name); \
1542 ATF_TC_BODY(ktls_transmit_invalid_##name, tc) \
1544 struct tls_enable en; \
1547 ATF_REQUIRE_KTLS(); \
1549 build_tls_enable(cipher_alg, key_size, auth_alg, minor, seqno, \
1551 test_ktls_invalid_transmit_cipher_suite(&en); \
1552 free_tls_enable(&en); \
1555 #define ADD_INVALID_TRANSMIT_TEST(name, cipher_alg, key_size, auth_alg, \
1557 ATF_TP_ADD_TC(tp, ktls_transmit_invalid_##name);
1559 #define INVALID_CIPHER_SUITES(M) \
1560 M(aes128_cbc_1_0_sha256, CRYPTO_AES_CBC, 128 / 8, \
1561 CRYPTO_SHA2_256_HMAC, TLS_MINOR_VER_ZERO) \
1562 M(aes128_cbc_1_0_sha384, CRYPTO_AES_CBC, 128 / 8, \
1563 CRYPTO_SHA2_384_HMAC, TLS_MINOR_VER_ZERO) \
1564 M(aes128_gcm_1_0, CRYPTO_AES_NIST_GCM_16, 128 / 8, 0, \
1565 TLS_MINOR_VER_ZERO) \
1566 M(chacha20_poly1305_1_0, CRYPTO_CHACHA20_POLY1305, 256 / 8, 0, \
1567 TLS_MINOR_VER_ZERO) \
1568 M(aes128_cbc_1_1_sha256, CRYPTO_AES_CBC, 128 / 8, \
1569 CRYPTO_SHA2_256_HMAC, TLS_MINOR_VER_ONE) \
1570 M(aes128_cbc_1_1_sha384, CRYPTO_AES_CBC, 128 / 8, \
1571 CRYPTO_SHA2_384_HMAC, TLS_MINOR_VER_ONE) \
1572 M(aes128_gcm_1_1, CRYPTO_AES_NIST_GCM_16, 128 / 8, 0, \
1573 TLS_MINOR_VER_ONE) \
1574 M(chacha20_poly1305_1_1, CRYPTO_CHACHA20_POLY1305, 256 / 8, 0, \
1575 TLS_MINOR_VER_ONE) \
1576 M(aes128_cbc_1_3_sha1, CRYPTO_AES_CBC, 128 / 8, \
1577 CRYPTO_SHA1_HMAC, TLS_MINOR_VER_THREE) \
1578 M(aes128_cbc_1_3_sha256, CRYPTO_AES_CBC, 128 / 8, \
1579 CRYPTO_SHA2_256_HMAC, TLS_MINOR_VER_THREE) \
1580 M(aes128_cbc_1_3_sha384, CRYPTO_AES_CBC, 128 / 8, \
1581 CRYPTO_SHA2_384_HMAC, TLS_MINOR_VER_THREE)
1584 * Ensure that invalid cipher suites are rejected for transmit.
1586 INVALID_CIPHER_SUITES(GEN_INVALID_TRANSMIT_TEST);
1588 #define GEN_RECEIVE_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1589 auth_alg, minor, name, len, padding) \
1590 ATF_TC_WITHOUT_HEAD(ktls_receive_##cipher_name##_##name); \
1591 ATF_TC_BODY(ktls_receive_##cipher_name##_##name, tc) \
1593 struct tls_enable en; \
1596 ATF_REQUIRE_KTLS(); \
1598 build_tls_enable(cipher_alg, key_size, auth_alg, minor, seqno, \
1600 test_ktls_receive_app_data(&en, seqno, len, padding); \
1601 free_tls_enable(&en); \
1604 #define ADD_RECEIVE_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1605 auth_alg, minor, name) \
1606 ATF_TP_ADD_TC(tp, ktls_receive_##cipher_name##_##name);
1608 #define GEN_RECEIVE_TESTS(cipher_name, cipher_alg, key_size, auth_alg, \
1610 GEN_RECEIVE_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1611 auth_alg, minor, short, 64, 0) \
1612 GEN_RECEIVE_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1613 auth_alg, minor, long, 64 * 1024, 0)
1615 #define ADD_RECEIVE_TESTS(cipher_name, cipher_alg, key_size, auth_alg, \
1617 ADD_RECEIVE_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1618 auth_alg, minor, short) \
1619 ADD_RECEIVE_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1620 auth_alg, minor, long)
1623 * For each supported cipher suite, run two receive tests:
1625 * - a short test which sends 64 bytes of application data (likely as
1626 * a single TLS record)
1628 * - a long test which sends 64KB of application data (split across
1629 * multiple TLS records)
1631 * Note that receive is currently only supported for TLS 1.2 AEAD
1634 AES_GCM_TESTS(GEN_RECEIVE_TESTS);
1635 CHACHA20_TESTS(GEN_RECEIVE_TESTS);
1637 #define GEN_PADDING_RECEIVE_TESTS(cipher_name, cipher_alg, key_size, \
1639 GEN_RECEIVE_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1640 auth_alg, minor, short_padded, 64, 16) \
1641 GEN_RECEIVE_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1642 auth_alg, minor, long_padded, 64 * 1024, 15)
1644 #define ADD_PADDING_RECEIVE_TESTS(cipher_name, cipher_alg, key_size, \
1646 ADD_RECEIVE_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1647 auth_alg, minor, short_padded) \
1648 ADD_RECEIVE_APP_DATA_TEST(cipher_name, cipher_alg, key_size, \
1649 auth_alg, minor, long_padded)
1652 * For TLS 1.3 cipher suites, run two additional receive tests which
1653 * use add padding to each record.
1655 TLS_13_TESTS(GEN_PADDING_RECEIVE_TESTS);
1658 test_ktls_invalid_receive_cipher_suite(struct tls_enable *en)
1662 ATF_REQUIRE_MSG(socketpair_tcp(sockets), "failed to create sockets");
1664 ATF_REQUIRE(setsockopt(sockets[1], IPPROTO_TCP, TCP_RXTLS_ENABLE, en,
1665 sizeof(*en)) == -1);
1666 ATF_REQUIRE(errno == EINVAL);
1668 ATF_REQUIRE(close(sockets[1]) == 0);
1669 ATF_REQUIRE(close(sockets[0]) == 0);
1672 #define GEN_INVALID_RECEIVE_TEST(name, cipher_alg, key_size, auth_alg, \
1674 ATF_TC_WITHOUT_HEAD(ktls_receive_invalid_##name); \
1675 ATF_TC_BODY(ktls_receive_invalid_##name, tc) \
1677 struct tls_enable en; \
1680 ATF_REQUIRE_KTLS(); \
1682 build_tls_enable(cipher_alg, key_size, auth_alg, minor, seqno, \
1684 test_ktls_invalid_receive_cipher_suite(&en); \
1685 free_tls_enable(&en); \
1688 #define ADD_INVALID_RECEIVE_TEST(name, cipher_alg, key_size, auth_alg, \
1690 ATF_TP_ADD_TC(tp, ktls_receive_invalid_##name);
1693 * Ensure that invalid cipher suites are rejected for receive.
1695 INVALID_CIPHER_SUITES(GEN_INVALID_RECEIVE_TEST);
1698 test_ktls_unsupported_receive_cipher_suite(struct tls_enable *en)
1702 ATF_REQUIRE_MSG(socketpair_tcp(sockets), "failed to create sockets");
1704 ATF_REQUIRE(setsockopt(sockets[1], IPPROTO_TCP, TCP_RXTLS_ENABLE, en,
1705 sizeof(*en)) == -1);
1706 ATF_REQUIRE(errno == EPROTONOSUPPORT);
1708 ATF_REQUIRE(close(sockets[1]) == 0);
1709 ATF_REQUIRE(close(sockets[0]) == 0);
1712 #define GEN_UNSUPPORTED_RECEIVE_TEST(name, cipher_alg, key_size, \
1714 ATF_TC_WITHOUT_HEAD(ktls_receive_unsupported_##name); \
1715 ATF_TC_BODY(ktls_receive_unsupported_##name, tc) \
1717 struct tls_enable en; \
1720 ATF_REQUIRE_KTLS(); \
1722 build_tls_enable(cipher_alg, key_size, auth_alg, minor, seqno, \
1724 test_ktls_unsupported_receive_cipher_suite(&en); \
1725 free_tls_enable(&en); \
1728 #define ADD_UNSUPPORTED_RECEIVE_TEST(name, cipher_alg, key_size, \
1730 ATF_TP_ADD_TC(tp, ktls_receive_unsupported_##name);
1733 * Ensure that valid cipher suites not supported for receive are
1736 AES_CBC_TESTS(GEN_UNSUPPORTED_RECEIVE_TEST);
1739 * Try to perform an invalid sendto(2) on a TXTLS-enabled socket, to exercise
1740 * KTLS error handling in the socket layer.
1742 ATF_TC_WITHOUT_HEAD(ktls_sendto_baddst);
1743 ATF_TC_BODY(ktls_sendto_baddst, tc)
1746 struct sockaddr_in dst;
1747 struct tls_enable en;
1753 s = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
1754 ATF_REQUIRE(s >= 0);
1756 build_tls_enable(CRYPTO_AES_NIST_GCM_16, 128 / 8, 0,
1757 TLS_MINOR_VER_THREE, (uint64_t)random(), &en);
1759 ATF_REQUIRE(setsockopt(s, IPPROTO_TCP, TCP_TXTLS_ENABLE, &en,
1762 memset(&dst, 0, sizeof(dst));
1763 dst.sin_family = AF_INET;
1764 dst.sin_len = sizeof(dst);
1765 dst.sin_addr.s_addr = htonl(INADDR_BROADCAST);
1766 dst.sin_port = htons(12345);
1768 memset(buf, 0, sizeof(buf));
1769 n = sendto(s, buf, sizeof(buf), 0, (struct sockaddr *)&dst,
1772 /* Can't transmit to the broadcast address over TCP. */
1773 ATF_REQUIRE_ERRNO(EACCES, n == -1);
1774 ATF_REQUIRE(close(s) == 0);
1779 /* Transmit tests */
1780 AES_CBC_TESTS(ADD_TRANSMIT_TESTS);
1781 AES_GCM_TESTS(ADD_TRANSMIT_TESTS);
1782 CHACHA20_TESTS(ADD_TRANSMIT_TESTS);
1783 AES_CBC_TESTS(ADD_TRANSMIT_PADDING_TESTS);
1784 AES_CBC_TESTS(ADD_TRANSMIT_EMPTY_FRAGMENT_TEST);
1785 AES_GCM_TESTS(ADD_TRANSMIT_EMPTY_FRAGMENT_TEST);
1786 CHACHA20_TESTS(ADD_TRANSMIT_EMPTY_FRAGMENT_TEST);
1787 INVALID_CIPHER_SUITES(ADD_INVALID_TRANSMIT_TEST);
1790 AES_CBC_TESTS(ADD_UNSUPPORTED_RECEIVE_TEST);
1791 AES_GCM_TESTS(ADD_RECEIVE_TESTS);
1792 CHACHA20_TESTS(ADD_RECEIVE_TESTS);
1793 TLS_13_TESTS(ADD_PADDING_RECEIVE_TESTS);
1794 INVALID_CIPHER_SUITES(ADD_INVALID_RECEIVE_TEST);
1797 ATF_TP_ADD_TC(tp, ktls_sendto_baddst);
1799 return (atf_no_error());