2 * Copyright 2001-2020 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the OpenSSL license (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include "crypto/evp.h"
18 #include "modes_local.h"
19 #include <openssl/rand.h>
20 #include "evp_local.h"
38 } ks; /* AES key schedule to use */
39 int key_set; /* Set if key initialised */
40 int iv_set; /* Set if an iv is set */
42 unsigned char *iv; /* Temporary IV store */
43 int ivlen; /* IV length */
45 int iv_gen; /* It is OK to generate IVs */
46 int tls_aad_len; /* TLS AAD length */
54 } ks1, ks2; /* AES key schedules to use */
56 void (*stream) (const unsigned char *in,
57 unsigned char *out, size_t length,
58 const AES_KEY *key1, const AES_KEY *key2,
59 const unsigned char iv[16]);
66 } ks; /* AES key schedule to use */
67 int key_set; /* Set if key initialised */
68 int iv_set; /* Set if an iv is set */
69 int tag_set; /* Set if tag is valid */
70 int len_set; /* Set if message length set */
71 int L, M; /* L and M parameters from RFC3610 */
72 int tls_aad_len; /* TLS AAD length */
77 #ifndef OPENSSL_NO_OCB
82 } ksenc; /* AES key schedule to use for encryption */
86 } ksdec; /* AES key schedule to use for decryption */
87 int key_set; /* Set if key initialised */
88 int iv_set; /* Set if an iv is set */
90 unsigned char *iv; /* Temporary IV store */
91 unsigned char tag[16];
92 unsigned char data_buf[16]; /* Store partial data blocks */
93 unsigned char aad_buf[16]; /* Store partial AAD blocks */
96 int ivlen; /* IV length */
101 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
104 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
106 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
109 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
111 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
114 void vpaes_cbc_encrypt(const unsigned char *in,
117 const AES_KEY *key, unsigned char *ivec, int enc);
120 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
121 size_t length, const AES_KEY *key,
122 unsigned char ivec[16], int enc);
123 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
124 size_t len, const AES_KEY *key,
125 const unsigned char ivec[16]);
126 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
127 size_t len, const AES_KEY *key1,
128 const AES_KEY *key2, const unsigned char iv[16]);
129 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
130 size_t len, const AES_KEY *key1,
131 const AES_KEY *key2, const unsigned char iv[16]);
133 #if !defined(AES_ASM) && !defined(AES_CTR_ASM) \
134 && defined(OPENSSL_AES_CONST_TIME) \
135 && !defined(OPENSSL_SMALL_FOOTPRINT)
139 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
140 size_t blocks, const AES_KEY *key,
141 const unsigned char ivec[AES_BLOCK_SIZE]);
144 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
145 const AES_KEY *key1, const AES_KEY *key2,
146 const unsigned char iv[16]);
147 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
148 const AES_KEY *key1, const AES_KEY *key2,
149 const unsigned char iv[16]);
152 /* increment counter (64-bit int) by 1 */
153 static void ctr64_inc(unsigned char *counter)
168 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
169 # include "ppc_arch.h"
171 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
173 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
174 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
175 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
176 # define HWAES_encrypt aes_p8_encrypt
177 # define HWAES_decrypt aes_p8_decrypt
178 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
179 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
180 # define HWAES_xts_encrypt aes_p8_xts_encrypt
181 # define HWAES_xts_decrypt aes_p8_xts_decrypt
184 #if defined(OPENSSL_CPUID_OBJ) && ( \
185 ((defined(__i386) || defined(__i386__) || \
186 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
187 defined(__x86_64) || defined(__x86_64__) || \
188 defined(_M_AMD64) || defined(_M_X64) )
190 extern unsigned int OPENSSL_ia32cap_P[];
193 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
196 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
201 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
203 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
205 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
208 void aesni_encrypt(const unsigned char *in, unsigned char *out,
210 void aesni_decrypt(const unsigned char *in, unsigned char *out,
213 void aesni_ecb_encrypt(const unsigned char *in,
215 size_t length, const AES_KEY *key, int enc);
216 void aesni_cbc_encrypt(const unsigned char *in,
219 const AES_KEY *key, unsigned char *ivec, int enc);
221 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
224 const void *key, const unsigned char *ivec);
226 void aesni_xts_encrypt(const unsigned char *in,
229 const AES_KEY *key1, const AES_KEY *key2,
230 const unsigned char iv[16]);
232 void aesni_xts_decrypt(const unsigned char *in,
235 const AES_KEY *key1, const AES_KEY *key2,
236 const unsigned char iv[16]);
238 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
242 const unsigned char ivec[16],
243 unsigned char cmac[16]);
245 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
249 const unsigned char ivec[16],
250 unsigned char cmac[16]);
252 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
253 size_t aesni_gcm_encrypt(const unsigned char *in,
256 const void *key, unsigned char ivec[16], u64 *Xi);
257 # define AES_gcm_encrypt aesni_gcm_encrypt
258 size_t aesni_gcm_decrypt(const unsigned char *in,
261 const void *key, unsigned char ivec[16], u64 *Xi);
262 # define AES_gcm_decrypt aesni_gcm_decrypt
263 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
265 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
266 gctx->gcm.ghash==gcm_ghash_avx)
267 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
268 gctx->gcm.ghash==gcm_ghash_avx)
269 # undef AES_GCM_ASM2 /* minor size optimization */
272 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
273 const unsigned char *iv, int enc)
276 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
278 mode = EVP_CIPHER_CTX_mode(ctx);
279 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
281 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
283 dat->block = (block128_f) aesni_decrypt;
284 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
285 (cbc128_f) aesni_cbc_encrypt : NULL;
287 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
289 dat->block = (block128_f) aesni_encrypt;
290 if (mode == EVP_CIPH_CBC_MODE)
291 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
292 else if (mode == EVP_CIPH_CTR_MODE)
293 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
295 dat->stream.cbc = NULL;
299 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
306 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
307 const unsigned char *in, size_t len)
309 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
310 EVP_CIPHER_CTX_iv_noconst(ctx),
311 EVP_CIPHER_CTX_encrypting(ctx));
316 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
317 const unsigned char *in, size_t len)
319 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
324 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
325 EVP_CIPHER_CTX_encrypting(ctx));
330 # define aesni_ofb_cipher aes_ofb_cipher
331 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
332 const unsigned char *in, size_t len);
334 # define aesni_cfb_cipher aes_cfb_cipher
335 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
336 const unsigned char *in, size_t len);
338 # define aesni_cfb8_cipher aes_cfb8_cipher
339 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
340 const unsigned char *in, size_t len);
342 # define aesni_cfb1_cipher aes_cfb1_cipher
343 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
344 const unsigned char *in, size_t len);
346 # define aesni_ctr_cipher aes_ctr_cipher
347 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
348 const unsigned char *in, size_t len);
350 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
351 const unsigned char *iv, int enc)
353 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
357 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
359 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
360 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
362 * If we have an iv can set it directly, otherwise use saved IV.
364 if (iv == NULL && gctx->iv_set)
367 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
372 /* If key set use IV, otherwise copy */
374 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
376 memcpy(gctx->iv, iv, gctx->ivlen);
383 # define aesni_gcm_cipher aes_gcm_cipher
384 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
385 const unsigned char *in, size_t len);
387 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
388 const unsigned char *iv, int enc)
390 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
396 /* The key is two half length keys in reality */
397 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
400 * Verify that the two keys are different.
402 * This addresses Rogaway's vulnerability.
403 * See comment in aes_xts_init_key() below.
405 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
406 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
410 /* key_len is two AES keys */
412 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
414 xctx->xts.block1 = (block128_f) aesni_encrypt;
415 xctx->stream = aesni_xts_encrypt;
417 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
419 xctx->xts.block1 = (block128_f) aesni_decrypt;
420 xctx->stream = aesni_xts_decrypt;
423 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
424 EVP_CIPHER_CTX_key_length(ctx) * 4,
426 xctx->xts.block2 = (block128_f) aesni_encrypt;
428 xctx->xts.key1 = &xctx->ks1;
432 xctx->xts.key2 = &xctx->ks2;
433 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
439 # define aesni_xts_cipher aes_xts_cipher
440 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
441 const unsigned char *in, size_t len);
443 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
444 const unsigned char *iv, int enc)
446 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
450 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
452 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
453 &cctx->ks, (block128_f) aesni_encrypt);
454 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
455 (ccm128_f) aesni_ccm64_decrypt_blocks;
459 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
465 # define aesni_ccm_cipher aes_ccm_cipher
466 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
467 const unsigned char *in, size_t len);
469 # ifndef OPENSSL_NO_OCB
470 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
471 size_t blocks, const void *key,
472 size_t start_block_num,
473 unsigned char offset_i[16],
474 const unsigned char L_[][16],
475 unsigned char checksum[16]);
476 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
477 size_t blocks, const void *key,
478 size_t start_block_num,
479 unsigned char offset_i[16],
480 const unsigned char L_[][16],
481 unsigned char checksum[16]);
483 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
484 const unsigned char *iv, int enc)
486 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
492 * We set both the encrypt and decrypt key here because decrypt
493 * needs both. We could possibly optimise to remove setting the
494 * decrypt for an encryption operation.
496 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
498 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
500 if (!CRYPTO_ocb128_init(&octx->ocb,
501 &octx->ksenc.ks, &octx->ksdec.ks,
502 (block128_f) aesni_encrypt,
503 (block128_f) aesni_decrypt,
504 enc ? aesni_ocb_encrypt
505 : aesni_ocb_decrypt))
511 * If we have an iv we can set it directly, otherwise use saved IV.
513 if (iv == NULL && octx->iv_set)
516 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
523 /* If key set use IV, otherwise copy */
525 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
527 memcpy(octx->iv, iv, octx->ivlen);
533 # define aesni_ocb_cipher aes_ocb_cipher
534 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
535 const unsigned char *in, size_t len);
536 # endif /* OPENSSL_NO_OCB */
538 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
539 static const EVP_CIPHER aesni_##keylen##_##mode = { \
540 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
541 flags|EVP_CIPH_##MODE##_MODE, \
543 aesni_##mode##_cipher, \
545 sizeof(EVP_AES_KEY), \
546 NULL,NULL,NULL,NULL }; \
547 static const EVP_CIPHER aes_##keylen##_##mode = { \
548 nid##_##keylen##_##nmode,blocksize, \
550 flags|EVP_CIPH_##MODE##_MODE, \
552 aes_##mode##_cipher, \
554 sizeof(EVP_AES_KEY), \
555 NULL,NULL,NULL,NULL }; \
556 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
557 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
559 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
560 static const EVP_CIPHER aesni_##keylen##_##mode = { \
561 nid##_##keylen##_##mode,blocksize, \
562 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
563 flags|EVP_CIPH_##MODE##_MODE, \
564 aesni_##mode##_init_key, \
565 aesni_##mode##_cipher, \
566 aes_##mode##_cleanup, \
567 sizeof(EVP_AES_##MODE##_CTX), \
568 NULL,NULL,aes_##mode##_ctrl,NULL }; \
569 static const EVP_CIPHER aes_##keylen##_##mode = { \
570 nid##_##keylen##_##mode,blocksize, \
571 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
572 flags|EVP_CIPH_##MODE##_MODE, \
573 aes_##mode##_init_key, \
574 aes_##mode##_cipher, \
575 aes_##mode##_cleanup, \
576 sizeof(EVP_AES_##MODE##_CTX), \
577 NULL,NULL,aes_##mode##_ctrl,NULL }; \
578 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
579 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
581 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
583 # include "sparc_arch.h"
585 extern unsigned int OPENSSL_sparcv9cap_P[];
588 * Initial Fujitsu SPARC64 X support
590 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
591 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
592 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
593 # define HWAES_encrypt aes_fx_encrypt
594 # define HWAES_decrypt aes_fx_decrypt
595 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
596 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
598 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
600 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
601 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
602 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
604 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
607 * Key-length specific subroutines were chosen for following reason.
608 * Each SPARC T4 core can execute up to 8 threads which share core's
609 * resources. Loading as much key material to registers allows to
610 * minimize references to shared memory interface, as well as amount
611 * of instructions in inner loops [much needed on T4]. But then having
612 * non-key-length specific routines would require conditional branches
613 * either in inner loops or on subroutines' entries. Former is hardly
614 * acceptable, while latter means code size increase to size occupied
615 * by multiple key-length specific subroutines, so why fight?
617 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
618 size_t len, const AES_KEY *key,
619 unsigned char *ivec);
620 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
621 size_t len, const AES_KEY *key,
622 unsigned char *ivec);
623 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
624 size_t len, const AES_KEY *key,
625 unsigned char *ivec);
626 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
627 size_t len, const AES_KEY *key,
628 unsigned char *ivec);
629 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
630 size_t len, const AES_KEY *key,
631 unsigned char *ivec);
632 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
633 size_t len, const AES_KEY *key,
634 unsigned char *ivec);
635 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
636 size_t blocks, const AES_KEY *key,
637 unsigned char *ivec);
638 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
639 size_t blocks, const AES_KEY *key,
640 unsigned char *ivec);
641 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
642 size_t blocks, const AES_KEY *key,
643 unsigned char *ivec);
644 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
645 size_t blocks, const AES_KEY *key1,
646 const AES_KEY *key2, const unsigned char *ivec);
647 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
648 size_t blocks, const AES_KEY *key1,
649 const AES_KEY *key2, const unsigned char *ivec);
650 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
651 size_t blocks, const AES_KEY *key1,
652 const AES_KEY *key2, const unsigned char *ivec);
653 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
654 size_t blocks, const AES_KEY *key1,
655 const AES_KEY *key2, const unsigned char *ivec);
657 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
658 const unsigned char *iv, int enc)
661 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
663 mode = EVP_CIPHER_CTX_mode(ctx);
664 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
665 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
668 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
669 dat->block = (block128_f) aes_t4_decrypt;
672 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
673 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
676 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
677 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
680 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
681 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
688 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
689 dat->block = (block128_f) aes_t4_encrypt;
692 if (mode == EVP_CIPH_CBC_MODE)
693 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
694 else if (mode == EVP_CIPH_CTR_MODE)
695 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
697 dat->stream.cbc = NULL;
700 if (mode == EVP_CIPH_CBC_MODE)
701 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
702 else if (mode == EVP_CIPH_CTR_MODE)
703 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
705 dat->stream.cbc = NULL;
708 if (mode == EVP_CIPH_CBC_MODE)
709 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
710 else if (mode == EVP_CIPH_CTR_MODE)
711 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
713 dat->stream.cbc = NULL;
721 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
728 # define aes_t4_cbc_cipher aes_cbc_cipher
729 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
730 const unsigned char *in, size_t len);
732 # define aes_t4_ecb_cipher aes_ecb_cipher
733 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
734 const unsigned char *in, size_t len);
736 # define aes_t4_ofb_cipher aes_ofb_cipher
737 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
738 const unsigned char *in, size_t len);
740 # define aes_t4_cfb_cipher aes_cfb_cipher
741 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
742 const unsigned char *in, size_t len);
744 # define aes_t4_cfb8_cipher aes_cfb8_cipher
745 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
746 const unsigned char *in, size_t len);
748 # define aes_t4_cfb1_cipher aes_cfb1_cipher
749 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
750 const unsigned char *in, size_t len);
752 # define aes_t4_ctr_cipher aes_ctr_cipher
753 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
754 const unsigned char *in, size_t len);
756 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
757 const unsigned char *iv, int enc)
759 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
763 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
764 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
765 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
766 (block128_f) aes_t4_encrypt);
769 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
772 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
775 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
781 * If we have an iv can set it directly, otherwise use saved IV.
783 if (iv == NULL && gctx->iv_set)
786 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
791 /* If key set use IV, otherwise copy */
793 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
795 memcpy(gctx->iv, iv, gctx->ivlen);
802 # define aes_t4_gcm_cipher aes_gcm_cipher
803 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
804 const unsigned char *in, size_t len);
806 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
807 const unsigned char *iv, int enc)
809 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
815 /* The key is two half length keys in reality */
816 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
817 const int bits = bytes * 8;
820 * Verify that the two keys are different.
822 * This addresses Rogaway's vulnerability.
823 * See comment in aes_xts_init_key() below.
825 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
826 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
831 /* key_len is two AES keys */
833 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
834 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
837 xctx->stream = aes128_t4_xts_encrypt;
840 xctx->stream = aes256_t4_xts_encrypt;
846 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
848 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
851 xctx->stream = aes128_t4_xts_decrypt;
854 xctx->stream = aes256_t4_xts_decrypt;
861 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
862 EVP_CIPHER_CTX_key_length(ctx) * 4,
864 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
866 xctx->xts.key1 = &xctx->ks1;
870 xctx->xts.key2 = &xctx->ks2;
871 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
877 # define aes_t4_xts_cipher aes_xts_cipher
878 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
879 const unsigned char *in, size_t len);
881 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
882 const unsigned char *iv, int enc)
884 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
888 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
889 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
890 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
891 &cctx->ks, (block128_f) aes_t4_encrypt);
896 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
902 # define aes_t4_ccm_cipher aes_ccm_cipher
903 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
904 const unsigned char *in, size_t len);
906 # ifndef OPENSSL_NO_OCB
907 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
908 const unsigned char *iv, int enc)
910 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
916 * We set both the encrypt and decrypt key here because decrypt
917 * needs both. We could possibly optimise to remove setting the
918 * decrypt for an encryption operation.
920 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
922 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
924 if (!CRYPTO_ocb128_init(&octx->ocb,
925 &octx->ksenc.ks, &octx->ksdec.ks,
926 (block128_f) aes_t4_encrypt,
927 (block128_f) aes_t4_decrypt,
934 * If we have an iv we can set it directly, otherwise use saved IV.
936 if (iv == NULL && octx->iv_set)
939 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
946 /* If key set use IV, otherwise copy */
948 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
950 memcpy(octx->iv, iv, octx->ivlen);
956 # define aes_t4_ocb_cipher aes_ocb_cipher
957 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
958 const unsigned char *in, size_t len);
959 # endif /* OPENSSL_NO_OCB */
961 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
962 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
963 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
964 flags|EVP_CIPH_##MODE##_MODE, \
966 aes_t4_##mode##_cipher, \
968 sizeof(EVP_AES_KEY), \
969 NULL,NULL,NULL,NULL }; \
970 static const EVP_CIPHER aes_##keylen##_##mode = { \
971 nid##_##keylen##_##nmode,blocksize, \
973 flags|EVP_CIPH_##MODE##_MODE, \
975 aes_##mode##_cipher, \
977 sizeof(EVP_AES_KEY), \
978 NULL,NULL,NULL,NULL }; \
979 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
980 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
982 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
983 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
984 nid##_##keylen##_##mode,blocksize, \
985 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
986 flags|EVP_CIPH_##MODE##_MODE, \
987 aes_t4_##mode##_init_key, \
988 aes_t4_##mode##_cipher, \
989 aes_##mode##_cleanup, \
990 sizeof(EVP_AES_##MODE##_CTX), \
991 NULL,NULL,aes_##mode##_ctrl,NULL }; \
992 static const EVP_CIPHER aes_##keylen##_##mode = { \
993 nid##_##keylen##_##mode,blocksize, \
994 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
995 flags|EVP_CIPH_##MODE##_MODE, \
996 aes_##mode##_init_key, \
997 aes_##mode##_cipher, \
998 aes_##mode##_cleanup, \
999 sizeof(EVP_AES_##MODE##_CTX), \
1000 NULL,NULL,aes_##mode##_ctrl,NULL }; \
1001 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
1002 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
1004 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
1008 # include "s390x_arch.h"
1014 * KM-AES parameter block - begin
1015 * (see z/Architecture Principles of Operation >= SA22-7832-06)
1018 unsigned char k[32];
1020 /* KM-AES parameter block - end */
1023 } S390X_AES_ECB_CTX;
1029 * KMO-AES parameter block - begin
1030 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1033 unsigned char cv[16];
1034 unsigned char k[32];
1036 /* KMO-AES parameter block - end */
1041 } S390X_AES_OFB_CTX;
1047 * KMF-AES parameter block - begin
1048 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1051 unsigned char cv[16];
1052 unsigned char k[32];
1054 /* KMF-AES parameter block - end */
1059 } S390X_AES_CFB_CTX;
1065 * KMA-GCM-AES parameter block - begin
1066 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1069 unsigned char reserved[12];
1075 unsigned long long g[2];
1076 unsigned char b[16];
1078 unsigned char h[16];
1079 unsigned long long taadl;
1080 unsigned long long tpcl;
1082 unsigned long long g[2];
1085 unsigned char k[32];
1087 /* KMA-GCM-AES parameter block - end */
1099 unsigned char ares[16];
1100 unsigned char mres[16];
1101 unsigned char kres[16];
1107 } S390X_AES_GCM_CTX;
1113 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1114 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1115 * rounds field is used to store the function code and that the key
1116 * schedule is not stored (if aes hardware support is detected).
1119 unsigned char pad[16];
1125 * KMAC-AES parameter block - begin
1126 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1130 unsigned long long g[2];
1131 unsigned char b[16];
1133 unsigned char k[32];
1135 /* KMAC-AES parameter block - end */
1138 unsigned long long g[2];
1139 unsigned char b[16];
1142 unsigned long long g[2];
1143 unsigned char b[16];
1146 unsigned long long blocks;
1155 unsigned char pad[140];
1159 } S390X_AES_CCM_CTX;
1161 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1162 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1164 /* Most modes of operation need km for partial block processing. */
1165 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1166 S390X_CAPBIT(S390X_AES_128))
1167 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1168 S390X_CAPBIT(S390X_AES_192))
1169 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1170 S390X_CAPBIT(S390X_AES_256))
1172 # define s390x_aes_init_key aes_init_key
1173 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1174 const unsigned char *iv, int enc);
1176 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1177 # define S390X_aes_192_cbc_CAPABLE 1
1178 # define S390X_aes_256_cbc_CAPABLE 1
1179 # define S390X_AES_CBC_CTX EVP_AES_KEY
1181 # define s390x_aes_cbc_init_key aes_init_key
1183 # define s390x_aes_cbc_cipher aes_cbc_cipher
1184 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1185 const unsigned char *in, size_t len);
1187 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1188 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1189 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1191 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1192 const unsigned char *key,
1193 const unsigned char *iv, int enc)
1195 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1196 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1198 cctx->fc = S390X_AES_FC(keylen);
1200 cctx->fc |= S390X_DECRYPT;
1202 memcpy(cctx->km.param.k, key, keylen);
1206 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1207 const unsigned char *in, size_t len)
1209 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1211 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1215 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1216 (OPENSSL_s390xcap_P.kmo[0] & \
1217 S390X_CAPBIT(S390X_AES_128)))
1218 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1219 (OPENSSL_s390xcap_P.kmo[0] & \
1220 S390X_CAPBIT(S390X_AES_192)))
1221 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1222 (OPENSSL_s390xcap_P.kmo[0] & \
1223 S390X_CAPBIT(S390X_AES_256)))
1225 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1226 const unsigned char *key,
1227 const unsigned char *ivec, int enc)
1229 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1230 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1231 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1232 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1234 memcpy(cctx->kmo.param.cv, iv, ivlen);
1235 memcpy(cctx->kmo.param.k, key, keylen);
1236 cctx->fc = S390X_AES_FC(keylen);
1241 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1242 const unsigned char *in, size_t len)
1244 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1249 *out = *in ^ cctx->kmo.param.cv[n];
1258 len &= ~(size_t)0xf;
1260 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1267 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1271 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1280 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1281 (OPENSSL_s390xcap_P.kmf[0] & \
1282 S390X_CAPBIT(S390X_AES_128)))
1283 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1284 (OPENSSL_s390xcap_P.kmf[0] & \
1285 S390X_CAPBIT(S390X_AES_192)))
1286 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1287 (OPENSSL_s390xcap_P.kmf[0] & \
1288 S390X_CAPBIT(S390X_AES_256)))
1290 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1291 const unsigned char *key,
1292 const unsigned char *ivec, int enc)
1294 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1295 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1296 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1297 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1299 cctx->fc = S390X_AES_FC(keylen);
1300 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1302 cctx->fc |= S390X_DECRYPT;
1305 memcpy(cctx->kmf.param.cv, iv, ivlen);
1306 memcpy(cctx->kmf.param.k, key, keylen);
1310 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1311 const unsigned char *in, size_t len)
1313 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1314 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1315 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1322 *out = cctx->kmf.param.cv[n] ^ tmp;
1323 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1332 len &= ~(size_t)0xf;
1334 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1341 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1342 S390X_AES_FC(keylen), cctx->kmf.param.k);
1346 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1347 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1356 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1357 S390X_CAPBIT(S390X_AES_128))
1358 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1359 S390X_CAPBIT(S390X_AES_192))
1360 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1361 S390X_CAPBIT(S390X_AES_256))
1363 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1364 const unsigned char *key,
1365 const unsigned char *ivec, int enc)
1367 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1368 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1369 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1370 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1372 cctx->fc = S390X_AES_FC(keylen);
1373 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1375 cctx->fc |= S390X_DECRYPT;
1377 memcpy(cctx->kmf.param.cv, iv, ivlen);
1378 memcpy(cctx->kmf.param.k, key, keylen);
1382 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1383 const unsigned char *in, size_t len)
1385 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1387 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1391 # define S390X_aes_128_cfb1_CAPABLE 0
1392 # define S390X_aes_192_cfb1_CAPABLE 0
1393 # define S390X_aes_256_cfb1_CAPABLE 0
1395 # define s390x_aes_cfb1_init_key aes_init_key
1397 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1398 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1399 const unsigned char *in, size_t len);
1401 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1402 # define S390X_aes_192_ctr_CAPABLE 1
1403 # define S390X_aes_256_ctr_CAPABLE 1
1404 # define S390X_AES_CTR_CTX EVP_AES_KEY
1406 # define s390x_aes_ctr_init_key aes_init_key
1408 # define s390x_aes_ctr_cipher aes_ctr_cipher
1409 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1410 const unsigned char *in, size_t len);
1412 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1413 (OPENSSL_s390xcap_P.kma[0] & \
1414 S390X_CAPBIT(S390X_AES_128)))
1415 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1416 (OPENSSL_s390xcap_P.kma[0] & \
1417 S390X_CAPBIT(S390X_AES_192)))
1418 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1419 (OPENSSL_s390xcap_P.kma[0] & \
1420 S390X_CAPBIT(S390X_AES_256)))
1422 /* iv + padding length for iv lengths != 12 */
1423 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1426 * Process additional authenticated data. Returns 0 on success. Code is
1429 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1432 unsigned long long alen;
1435 if (ctx->kma.param.tpcl)
1438 alen = ctx->kma.param.taadl + len;
1439 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1441 ctx->kma.param.taadl = alen;
1446 ctx->ares[n] = *aad;
1451 /* ctx->ares contains a complete block if offset has wrapped around */
1453 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1454 ctx->fc |= S390X_KMA_HS;
1461 len &= ~(size_t)0xf;
1463 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1465 ctx->fc |= S390X_KMA_HS;
1473 ctx->ares[rem] = aad[rem];
1480 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1481 * success. Code is big-endian.
1483 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1484 unsigned char *out, size_t len)
1486 const unsigned char *inptr;
1487 unsigned long long mlen;
1490 unsigned char b[16];
1495 mlen = ctx->kma.param.tpcl + len;
1496 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1498 ctx->kma.param.tpcl = mlen;
1504 while (n && inlen) {
1505 ctx->mres[n] = *inptr;
1510 /* ctx->mres contains a complete block if offset has wrapped around */
1512 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1513 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1514 ctx->fc |= S390X_KMA_HS;
1517 /* previous call already encrypted/decrypted its remainder,
1518 * see comment below */
1533 len &= ~(size_t)0xf;
1535 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1536 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1539 ctx->fc |= S390X_KMA_HS;
1544 * If there is a remainder, it has to be saved such that it can be
1545 * processed by kma later. However, we also have to do the for-now
1546 * unauthenticated encryption/decryption part here and now...
1549 if (!ctx->mreslen) {
1550 buf.w[0] = ctx->kma.param.j0.w[0];
1551 buf.w[1] = ctx->kma.param.j0.w[1];
1552 buf.w[2] = ctx->kma.param.j0.w[2];
1553 buf.w[3] = ctx->kma.param.cv.w + 1;
1554 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1558 for (i = 0; i < rem; i++) {
1559 ctx->mres[n + i] = in[i];
1560 out[i] = in[i] ^ ctx->kres[n + i];
1563 ctx->mreslen += rem;
1569 * Initialize context structure. Code is big-endian.
1571 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1572 const unsigned char *iv)
1574 ctx->kma.param.t.g[0] = 0;
1575 ctx->kma.param.t.g[1] = 0;
1576 ctx->kma.param.tpcl = 0;
1577 ctx->kma.param.taadl = 0;
1582 if (ctx->ivlen == 12) {
1583 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1584 ctx->kma.param.j0.w[3] = 1;
1585 ctx->kma.param.cv.w = 1;
1587 /* ctx->iv has the right size and is already padded. */
1588 memcpy(ctx->iv, iv, ctx->ivlen);
1589 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1590 ctx->fc, &ctx->kma.param);
1591 ctx->fc |= S390X_KMA_HS;
1593 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1594 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1595 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1596 ctx->kma.param.t.g[0] = 0;
1597 ctx->kma.param.t.g[1] = 0;
1602 * Performs various operations on the context structure depending on control
1603 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1604 * Code is big-endian.
1606 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1608 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1609 S390X_AES_GCM_CTX *gctx_out;
1610 EVP_CIPHER_CTX *out;
1611 unsigned char *buf, *iv;
1612 int ivlen, enc, len;
1616 ivlen = EVP_CIPHER_iv_length(c->cipher);
1617 iv = EVP_CIPHER_CTX_iv_noconst(c);
1620 gctx->ivlen = ivlen;
1624 gctx->tls_aad_len = -1;
1627 case EVP_CTRL_GET_IVLEN:
1628 *(int *)ptr = gctx->ivlen;
1631 case EVP_CTRL_AEAD_SET_IVLEN:
1636 iv = EVP_CIPHER_CTX_iv_noconst(c);
1637 len = S390X_gcm_ivpadlen(arg);
1639 /* Allocate memory for iv if needed. */
1640 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1642 OPENSSL_free(gctx->iv);
1644 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1645 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1650 memset(gctx->iv + arg, 0, len - arg - 8);
1651 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1656 case EVP_CTRL_AEAD_SET_TAG:
1657 buf = EVP_CIPHER_CTX_buf_noconst(c);
1658 enc = EVP_CIPHER_CTX_encrypting(c);
1659 if (arg <= 0 || arg > 16 || enc)
1662 memcpy(buf, ptr, arg);
1666 case EVP_CTRL_AEAD_GET_TAG:
1667 enc = EVP_CIPHER_CTX_encrypting(c);
1668 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1671 memcpy(ptr, gctx->kma.param.t.b, arg);
1674 case EVP_CTRL_GCM_SET_IV_FIXED:
1675 /* Special case: -1 length restores whole iv */
1677 memcpy(gctx->iv, ptr, gctx->ivlen);
1682 * Fixed field must be at least 4 bytes and invocation field at least
1685 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1689 memcpy(gctx->iv, ptr, arg);
1691 enc = EVP_CIPHER_CTX_encrypting(c);
1692 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1698 case EVP_CTRL_GCM_IV_GEN:
1699 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1702 s390x_aes_gcm_setiv(gctx, gctx->iv);
1704 if (arg <= 0 || arg > gctx->ivlen)
1707 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1709 * Invocation field will be at least 8 bytes in size and so no need
1710 * to check wrap around or increment more than last 8 bytes.
1712 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1716 case EVP_CTRL_GCM_SET_IV_INV:
1717 enc = EVP_CIPHER_CTX_encrypting(c);
1718 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1721 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1722 s390x_aes_gcm_setiv(gctx, gctx->iv);
1726 case EVP_CTRL_AEAD_TLS1_AAD:
1727 /* Save the aad for later use. */
1728 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1731 buf = EVP_CIPHER_CTX_buf_noconst(c);
1732 memcpy(buf, ptr, arg);
1733 gctx->tls_aad_len = arg;
1735 len = buf[arg - 2] << 8 | buf[arg - 1];
1736 /* Correct length for explicit iv. */
1737 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1739 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1741 /* If decrypting correct for tag too. */
1742 enc = EVP_CIPHER_CTX_encrypting(c);
1744 if (len < EVP_GCM_TLS_TAG_LEN)
1746 len -= EVP_GCM_TLS_TAG_LEN;
1748 buf[arg - 2] = len >> 8;
1749 buf[arg - 1] = len & 0xff;
1750 /* Extra padding: tag appended to record. */
1751 return EVP_GCM_TLS_TAG_LEN;
1755 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1756 iv = EVP_CIPHER_CTX_iv_noconst(c);
1758 if (gctx->iv == iv) {
1759 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1761 len = S390X_gcm_ivpadlen(gctx->ivlen);
1763 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1764 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1768 memcpy(gctx_out->iv, gctx->iv, len);
1778 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1780 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1781 const unsigned char *key,
1782 const unsigned char *iv, int enc)
1784 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1787 if (iv == NULL && key == NULL)
1791 keylen = EVP_CIPHER_CTX_key_length(ctx);
1792 memcpy(&gctx->kma.param.k, key, keylen);
1794 gctx->fc = S390X_AES_FC(keylen);
1796 gctx->fc |= S390X_DECRYPT;
1798 if (iv == NULL && gctx->iv_set)
1802 s390x_aes_gcm_setiv(gctx, iv);
1808 s390x_aes_gcm_setiv(gctx, iv);
1810 memcpy(gctx->iv, iv, gctx->ivlen);
1819 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1820 * if successful. Otherwise -1 is returned. Code is big-endian.
1822 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1823 const unsigned char *in, size_t len)
1825 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1826 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1827 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1830 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1833 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1834 : EVP_CTRL_GCM_SET_IV_INV,
1835 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1838 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1839 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1840 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1842 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1843 gctx->kma.param.tpcl = len << 3;
1844 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1845 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1848 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1849 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1851 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1852 EVP_GCM_TLS_TAG_LEN)) {
1853 OPENSSL_cleanse(out, len);
1860 gctx->tls_aad_len = -1;
1865 * Called from EVP layer to initialize context, process additional
1866 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1867 * ciphertext or process a TLS packet, depending on context. Returns bytes
1868 * written on success. Otherwise -1 is returned. Code is big-endian.
1870 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1871 const unsigned char *in, size_t len)
1873 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1874 unsigned char *buf, tmp[16];
1880 if (gctx->tls_aad_len >= 0)
1881 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1888 if (s390x_aes_gcm_aad(gctx, in, len))
1891 if (s390x_aes_gcm(gctx, in, out, len))
1896 gctx->kma.param.taadl <<= 3;
1897 gctx->kma.param.tpcl <<= 3;
1898 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1899 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1900 /* recall that we already did en-/decrypt gctx->mres
1901 * and returned it to caller... */
1902 OPENSSL_cleanse(tmp, gctx->mreslen);
1905 enc = EVP_CIPHER_CTX_encrypting(ctx);
1909 if (gctx->taglen < 0)
1912 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1913 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1920 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1922 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1923 const unsigned char *iv;
1928 iv = EVP_CIPHER_CTX_iv(c);
1930 OPENSSL_free(gctx->iv);
1932 OPENSSL_cleanse(gctx, sizeof(*gctx));
1936 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1937 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1938 # define S390X_aes_256_xts_CAPABLE 1
1940 # define s390x_aes_xts_init_key aes_xts_init_key
1941 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1942 const unsigned char *key,
1943 const unsigned char *iv, int enc);
1944 # define s390x_aes_xts_cipher aes_xts_cipher
1945 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1946 const unsigned char *in, size_t len);
1947 # define s390x_aes_xts_ctrl aes_xts_ctrl
1948 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1949 # define s390x_aes_xts_cleanup aes_xts_cleanup
1951 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1952 (OPENSSL_s390xcap_P.kmac[0] & \
1953 S390X_CAPBIT(S390X_AES_128)))
1954 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1955 (OPENSSL_s390xcap_P.kmac[0] & \
1956 S390X_CAPBIT(S390X_AES_192)))
1957 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1958 (OPENSSL_s390xcap_P.kmac[0] & \
1959 S390X_CAPBIT(S390X_AES_256)))
1961 # define S390X_CCM_AAD_FLAG 0x40
1964 * Set nonce and length fields. Code is big-endian.
1966 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1967 const unsigned char *nonce,
1970 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1971 ctx->aes.ccm.nonce.g[1] = mlen;
1972 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1976 * Process additional authenticated data. Code is big-endian.
1978 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1987 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1989 /* Suppress 'type-punned pointer dereference' warning. */
1990 ptr = ctx->aes.ccm.buf.b;
1992 if (alen < ((1 << 16) - (1 << 8))) {
1993 *(uint16_t *)ptr = alen;
1995 } else if (sizeof(alen) == 8
1996 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1997 *(uint16_t *)ptr = 0xffff;
1998 *(uint64_t *)(ptr + 2) = alen;
2001 *(uint16_t *)ptr = 0xfffe;
2002 *(uint32_t *)(ptr + 2) = alen;
2006 while (i < 16 && alen) {
2007 ctx->aes.ccm.buf.b[i] = *aad;
2013 ctx->aes.ccm.buf.b[i] = 0;
2017 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
2018 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
2019 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
2020 &ctx->aes.ccm.kmac_param);
2021 ctx->aes.ccm.blocks += 2;
2024 alen &= ~(size_t)0xf;
2026 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2027 ctx->aes.ccm.blocks += alen >> 4;
2031 for (i = 0; i < rem; i++)
2032 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
2034 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2035 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2036 ctx->aes.ccm.kmac_param.k);
2037 ctx->aes.ccm.blocks++;
2042 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
2045 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
2046 unsigned char *out, size_t len, int enc)
2049 unsigned int i, l, num;
2050 unsigned char flags;
2052 flags = ctx->aes.ccm.nonce.b[0];
2053 if (!(flags & S390X_CCM_AAD_FLAG)) {
2054 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
2055 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
2056 ctx->aes.ccm.blocks++;
2059 ctx->aes.ccm.nonce.b[0] = l;
2062 * Reconstruct length from encoded length field
2063 * and initialize it with counter value.
2066 for (i = 15 - l; i < 15; i++) {
2067 n |= ctx->aes.ccm.nonce.b[i];
2068 ctx->aes.ccm.nonce.b[i] = 0;
2071 n |= ctx->aes.ccm.nonce.b[15];
2072 ctx->aes.ccm.nonce.b[15] = 1;
2075 return -1; /* length mismatch */
2078 /* Two operations per block plus one for tag encryption */
2079 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
2080 if (ctx->aes.ccm.blocks > (1ULL << 61))
2081 return -2; /* too much data */
2086 len &= ~(size_t)0xf;
2089 /* mac-then-encrypt */
2091 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2093 for (i = 0; i < rem; i++)
2094 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
2096 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2097 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2098 ctx->aes.ccm.kmac_param.k);
2101 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2102 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2103 &num, (ctr128_f)AES_ctr32_encrypt);
2105 /* decrypt-then-mac */
2106 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2107 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2108 &num, (ctr128_f)AES_ctr32_encrypt);
2111 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2113 for (i = 0; i < rem; i++)
2114 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
2116 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2117 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2118 ctx->aes.ccm.kmac_param.k);
2122 for (i = 15 - l; i < 16; i++)
2123 ctx->aes.ccm.nonce.b[i] = 0;
2125 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
2126 ctx->aes.ccm.kmac_param.k);
2127 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
2128 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
2130 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
2135 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2136 * if successful. Otherwise -1 is returned.
2138 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2139 const unsigned char *in, size_t len)
2141 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2142 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2143 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2144 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2147 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
2151 /* Set explicit iv (sequence number). */
2152 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2155 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2157 * Get explicit iv (sequence number). We already have fixed iv
2158 * (server/client_write_iv) here.
2160 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2161 s390x_aes_ccm_setiv(cctx, ivec, len);
2163 /* Process aad (sequence number|type|version|length) */
2164 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
2166 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2167 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2170 if (s390x_aes_ccm(cctx, in, out, len, enc))
2173 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2174 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2176 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2177 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2182 OPENSSL_cleanse(out, len);
2188 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2191 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2192 const unsigned char *key,
2193 const unsigned char *iv, int enc)
2195 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2196 unsigned char *ivec;
2199 if (iv == NULL && key == NULL)
2203 keylen = EVP_CIPHER_CTX_key_length(ctx);
2204 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2205 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2207 /* Store encoded m and l. */
2208 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2209 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2210 memset(cctx->aes.ccm.nonce.b + 1, 0,
2211 sizeof(cctx->aes.ccm.nonce.b));
2212 cctx->aes.ccm.blocks = 0;
2214 cctx->aes.ccm.key_set = 1;
2218 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2219 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
2221 cctx->aes.ccm.iv_set = 1;
2228 * Called from EVP layer to initialize context, process additional
2229 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2230 * plaintext or process a TLS packet, depending on context. Returns bytes
2231 * written on success. Otherwise -1 is returned.
2233 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2234 const unsigned char *in, size_t len)
2236 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2237 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2239 unsigned char *buf, *ivec;
2241 if (!cctx->aes.ccm.key_set)
2244 if (cctx->aes.ccm.tls_aad_len >= 0)
2245 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2248 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2249 * so integrity must be checked already at Update() i.e., before
2250 * potentially corrupted data is output.
2252 if (in == NULL && out != NULL)
2255 if (!cctx->aes.ccm.iv_set)
2259 /* Update(): Pass message length. */
2261 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2262 s390x_aes_ccm_setiv(cctx, ivec, len);
2264 cctx->aes.ccm.len_set = 1;
2268 /* Update(): Process aad. */
2269 if (!cctx->aes.ccm.len_set && len)
2272 s390x_aes_ccm_aad(cctx, in, len);
2276 /* The tag must be set before actually decrypting data */
2277 if (!enc && !cctx->aes.ccm.tag_set)
2280 /* Update(): Process message. */
2282 if (!cctx->aes.ccm.len_set) {
2284 * In case message length was not previously set explicitly via
2285 * Update(), set it now.
2287 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2288 s390x_aes_ccm_setiv(cctx, ivec, len);
2290 cctx->aes.ccm.len_set = 1;
2294 if (s390x_aes_ccm(cctx, in, out, len, enc))
2297 cctx->aes.ccm.tag_set = 1;
2302 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2303 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2304 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2310 OPENSSL_cleanse(out, len);
2312 cctx->aes.ccm.iv_set = 0;
2313 cctx->aes.ccm.tag_set = 0;
2314 cctx->aes.ccm.len_set = 0;
2320 * Performs various operations on the context structure depending on control
2321 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2322 * Code is big-endian.
2324 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2326 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2327 unsigned char *buf, *iv;
2332 cctx->aes.ccm.key_set = 0;
2333 cctx->aes.ccm.iv_set = 0;
2334 cctx->aes.ccm.l = 8;
2335 cctx->aes.ccm.m = 12;
2336 cctx->aes.ccm.tag_set = 0;
2337 cctx->aes.ccm.len_set = 0;
2338 cctx->aes.ccm.tls_aad_len = -1;
2341 case EVP_CTRL_GET_IVLEN:
2342 *(int *)ptr = 15 - cctx->aes.ccm.l;
2345 case EVP_CTRL_AEAD_TLS1_AAD:
2346 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2349 /* Save the aad for later use. */
2350 buf = EVP_CIPHER_CTX_buf_noconst(c);
2351 memcpy(buf, ptr, arg);
2352 cctx->aes.ccm.tls_aad_len = arg;
2354 len = buf[arg - 2] << 8 | buf[arg - 1];
2355 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2358 /* Correct length for explicit iv. */
2359 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2361 enc = EVP_CIPHER_CTX_encrypting(c);
2363 if (len < cctx->aes.ccm.m)
2366 /* Correct length for tag. */
2367 len -= cctx->aes.ccm.m;
2370 buf[arg - 2] = len >> 8;
2371 buf[arg - 1] = len & 0xff;
2373 /* Extra padding: tag appended to record. */
2374 return cctx->aes.ccm.m;
2376 case EVP_CTRL_CCM_SET_IV_FIXED:
2377 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2380 /* Copy to first part of the iv. */
2381 iv = EVP_CIPHER_CTX_iv_noconst(c);
2382 memcpy(iv, ptr, arg);
2385 case EVP_CTRL_AEAD_SET_IVLEN:
2389 case EVP_CTRL_CCM_SET_L:
2390 if (arg < 2 || arg > 8)
2393 cctx->aes.ccm.l = arg;
2396 case EVP_CTRL_AEAD_SET_TAG:
2397 if ((arg & 1) || arg < 4 || arg > 16)
2400 enc = EVP_CIPHER_CTX_encrypting(c);
2405 cctx->aes.ccm.tag_set = 1;
2406 buf = EVP_CIPHER_CTX_buf_noconst(c);
2407 memcpy(buf, ptr, arg);
2410 cctx->aes.ccm.m = arg;
2413 case EVP_CTRL_AEAD_GET_TAG:
2414 enc = EVP_CIPHER_CTX_encrypting(c);
2415 if (!enc || !cctx->aes.ccm.tag_set)
2418 if(arg < cctx->aes.ccm.m)
2421 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2422 cctx->aes.ccm.tag_set = 0;
2423 cctx->aes.ccm.iv_set = 0;
2424 cctx->aes.ccm.len_set = 0;
2435 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2437 # ifndef OPENSSL_NO_OCB
2438 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2439 # define S390X_aes_128_ocb_CAPABLE 0
2440 # define S390X_aes_192_ocb_CAPABLE 0
2441 # define S390X_aes_256_ocb_CAPABLE 0
2443 # define s390x_aes_ocb_init_key aes_ocb_init_key
2444 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2445 const unsigned char *iv, int enc);
2446 # define s390x_aes_ocb_cipher aes_ocb_cipher
2447 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2448 const unsigned char *in, size_t len);
2449 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2450 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2451 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2452 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2455 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2457 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2458 nid##_##keylen##_##nmode,blocksize, \
2461 flags | EVP_CIPH_##MODE##_MODE, \
2462 s390x_aes_##mode##_init_key, \
2463 s390x_aes_##mode##_cipher, \
2465 sizeof(S390X_AES_##MODE##_CTX), \
2471 static const EVP_CIPHER aes_##keylen##_##mode = { \
2472 nid##_##keylen##_##nmode, \
2476 flags | EVP_CIPH_##MODE##_MODE, \
2478 aes_##mode##_cipher, \
2480 sizeof(EVP_AES_KEY), \
2486 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2488 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2489 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2492 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2493 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2494 nid##_##keylen##_##mode, \
2496 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2498 flags | EVP_CIPH_##MODE##_MODE, \
2499 s390x_aes_##mode##_init_key, \
2500 s390x_aes_##mode##_cipher, \
2501 s390x_aes_##mode##_cleanup, \
2502 sizeof(S390X_AES_##MODE##_CTX), \
2505 s390x_aes_##mode##_ctrl, \
2508 static const EVP_CIPHER aes_##keylen##_##mode = { \
2509 nid##_##keylen##_##mode,blocksize, \
2510 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2512 flags | EVP_CIPH_##MODE##_MODE, \
2513 aes_##mode##_init_key, \
2514 aes_##mode##_cipher, \
2515 aes_##mode##_cleanup, \
2516 sizeof(EVP_AES_##MODE##_CTX), \
2519 aes_##mode##_ctrl, \
2522 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2524 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2525 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2530 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2531 static const EVP_CIPHER aes_##keylen##_##mode = { \
2532 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2533 flags|EVP_CIPH_##MODE##_MODE, \
2535 aes_##mode##_cipher, \
2537 sizeof(EVP_AES_KEY), \
2538 NULL,NULL,NULL,NULL }; \
2539 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2540 { return &aes_##keylen##_##mode; }
2542 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2543 static const EVP_CIPHER aes_##keylen##_##mode = { \
2544 nid##_##keylen##_##mode,blocksize, \
2545 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
2546 flags|EVP_CIPH_##MODE##_MODE, \
2547 aes_##mode##_init_key, \
2548 aes_##mode##_cipher, \
2549 aes_##mode##_cleanup, \
2550 sizeof(EVP_AES_##MODE##_CTX), \
2551 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2552 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2553 { return &aes_##keylen##_##mode; }
2557 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2558 # include "arm_arch.h"
2559 # if __ARM_MAX_ARCH__>=7
2560 # if defined(BSAES_ASM)
2561 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2563 # if defined(VPAES_ASM)
2564 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2566 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2567 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2568 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2569 # define HWAES_encrypt aes_v8_encrypt
2570 # define HWAES_decrypt aes_v8_decrypt
2571 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2572 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2576 #if defined(HWAES_CAPABLE)
2577 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2579 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2581 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2582 const AES_KEY *key);
2583 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2584 const AES_KEY *key);
2585 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2586 size_t length, const AES_KEY *key,
2587 unsigned char *ivec, const int enc);
2588 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2589 size_t len, const AES_KEY *key,
2590 const unsigned char ivec[16]);
2591 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2592 size_t len, const AES_KEY *key1,
2593 const AES_KEY *key2, const unsigned char iv[16]);
2594 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2595 size_t len, const AES_KEY *key1,
2596 const AES_KEY *key2, const unsigned char iv[16]);
2599 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2600 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2601 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2602 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2603 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2604 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2605 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2606 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2608 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2609 const unsigned char *iv, int enc)
2612 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2614 mode = EVP_CIPHER_CTX_mode(ctx);
2615 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2617 #ifdef HWAES_CAPABLE
2618 if (HWAES_CAPABLE) {
2619 ret = HWAES_set_decrypt_key(key,
2620 EVP_CIPHER_CTX_key_length(ctx) * 8,
2622 dat->block = (block128_f) HWAES_decrypt;
2623 dat->stream.cbc = NULL;
2624 # ifdef HWAES_cbc_encrypt
2625 if (mode == EVP_CIPH_CBC_MODE)
2626 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2630 #ifdef BSAES_CAPABLE
2631 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2632 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2634 dat->block = (block128_f) AES_decrypt;
2635 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2638 #ifdef VPAES_CAPABLE
2639 if (VPAES_CAPABLE) {
2640 ret = vpaes_set_decrypt_key(key,
2641 EVP_CIPHER_CTX_key_length(ctx) * 8,
2643 dat->block = (block128_f) vpaes_decrypt;
2644 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2645 (cbc128_f) vpaes_cbc_encrypt : NULL;
2649 ret = AES_set_decrypt_key(key,
2650 EVP_CIPHER_CTX_key_length(ctx) * 8,
2652 dat->block = (block128_f) AES_decrypt;
2653 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2654 (cbc128_f) AES_cbc_encrypt : NULL;
2657 #ifdef HWAES_CAPABLE
2658 if (HWAES_CAPABLE) {
2659 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2661 dat->block = (block128_f) HWAES_encrypt;
2662 dat->stream.cbc = NULL;
2663 # ifdef HWAES_cbc_encrypt
2664 if (mode == EVP_CIPH_CBC_MODE)
2665 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2668 # ifdef HWAES_ctr32_encrypt_blocks
2669 if (mode == EVP_CIPH_CTR_MODE)
2670 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2673 (void)0; /* terminate potentially open 'else' */
2676 #ifdef BSAES_CAPABLE
2677 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2678 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2680 dat->block = (block128_f) AES_encrypt;
2681 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2684 #ifdef VPAES_CAPABLE
2685 if (VPAES_CAPABLE) {
2686 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2688 dat->block = (block128_f) vpaes_encrypt;
2689 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2690 (cbc128_f) vpaes_cbc_encrypt : NULL;
2694 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2696 dat->block = (block128_f) AES_encrypt;
2697 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2698 (cbc128_f) AES_cbc_encrypt : NULL;
2700 if (mode == EVP_CIPH_CTR_MODE)
2701 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2706 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2713 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2714 const unsigned char *in, size_t len)
2716 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2718 if (dat->stream.cbc)
2719 (*dat->stream.cbc) (in, out, len, &dat->ks,
2720 EVP_CIPHER_CTX_iv_noconst(ctx),
2721 EVP_CIPHER_CTX_encrypting(ctx));
2722 else if (EVP_CIPHER_CTX_encrypting(ctx))
2723 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2724 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2726 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2727 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2732 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2733 const unsigned char *in, size_t len)
2735 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2737 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2742 for (i = 0, len -= bl; i <= len; i += bl)
2743 (*dat->block) (in + i, out + i, &dat->ks);
2748 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2749 const unsigned char *in, size_t len)
2751 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2753 int num = EVP_CIPHER_CTX_num(ctx);
2754 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2755 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2756 EVP_CIPHER_CTX_set_num(ctx, num);
2760 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2761 const unsigned char *in, size_t len)
2763 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2765 int num = EVP_CIPHER_CTX_num(ctx);
2766 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2767 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2768 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2769 EVP_CIPHER_CTX_set_num(ctx, num);
2773 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2774 const unsigned char *in, size_t len)
2776 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2778 int num = EVP_CIPHER_CTX_num(ctx);
2779 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2780 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2781 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2782 EVP_CIPHER_CTX_set_num(ctx, num);
2786 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2787 const unsigned char *in, size_t len)
2789 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2791 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2792 int num = EVP_CIPHER_CTX_num(ctx);
2793 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2794 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2795 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2796 EVP_CIPHER_CTX_set_num(ctx, num);
2800 while (len >= MAXBITCHUNK) {
2801 int num = EVP_CIPHER_CTX_num(ctx);
2802 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2803 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2804 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2805 EVP_CIPHER_CTX_set_num(ctx, num);
2811 int num = EVP_CIPHER_CTX_num(ctx);
2812 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2813 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2814 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2815 EVP_CIPHER_CTX_set_num(ctx, num);
2821 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2822 const unsigned char *in, size_t len)
2824 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2825 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2827 if (dat->stream.ctr)
2828 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2829 EVP_CIPHER_CTX_iv_noconst(ctx),
2830 EVP_CIPHER_CTX_buf_noconst(ctx),
2831 &num, dat->stream.ctr);
2833 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2834 EVP_CIPHER_CTX_iv_noconst(ctx),
2835 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2837 EVP_CIPHER_CTX_set_num(ctx, num);
2841 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2842 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2843 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2845 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2847 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2850 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2851 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2852 OPENSSL_free(gctx->iv);
2856 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2858 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2863 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
2867 gctx->tls_aad_len = -1;
2870 case EVP_CTRL_GET_IVLEN:
2871 *(int *)ptr = gctx->ivlen;
2874 case EVP_CTRL_AEAD_SET_IVLEN:
2877 /* Allocate memory for IV if needed */
2878 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2879 if (gctx->iv != c->iv)
2880 OPENSSL_free(gctx->iv);
2881 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2882 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2889 case EVP_CTRL_AEAD_SET_TAG:
2890 if (arg <= 0 || arg > 16 || c->encrypt)
2892 memcpy(c->buf, ptr, arg);
2896 case EVP_CTRL_AEAD_GET_TAG:
2897 if (arg <= 0 || arg > 16 || !c->encrypt
2898 || gctx->taglen < 0)
2900 memcpy(ptr, c->buf, arg);
2903 case EVP_CTRL_GCM_SET_IV_FIXED:
2904 /* Special case: -1 length restores whole IV */
2906 memcpy(gctx->iv, ptr, gctx->ivlen);
2911 * Fixed field must be at least 4 bytes and invocation field at least
2914 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2917 memcpy(gctx->iv, ptr, arg);
2918 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2923 case EVP_CTRL_GCM_IV_GEN:
2924 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2926 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2927 if (arg <= 0 || arg > gctx->ivlen)
2929 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2931 * Invocation field will be at least 8 bytes in size and so no need
2932 * to check wrap around or increment more than last 8 bytes.
2934 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2938 case EVP_CTRL_GCM_SET_IV_INV:
2939 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2941 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2942 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2946 case EVP_CTRL_AEAD_TLS1_AAD:
2947 /* Save the AAD for later use */
2948 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2950 memcpy(c->buf, ptr, arg);
2951 gctx->tls_aad_len = arg;
2953 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2954 /* Correct length for explicit IV */
2955 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2957 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2958 /* If decrypting correct for tag too */
2960 if (len < EVP_GCM_TLS_TAG_LEN)
2962 len -= EVP_GCM_TLS_TAG_LEN;
2964 c->buf[arg - 2] = len >> 8;
2965 c->buf[arg - 1] = len & 0xff;
2967 /* Extra padding: tag appended to record */
2968 return EVP_GCM_TLS_TAG_LEN;
2972 EVP_CIPHER_CTX *out = ptr;
2973 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2974 if (gctx->gcm.key) {
2975 if (gctx->gcm.key != &gctx->ks)
2977 gctx_out->gcm.key = &gctx_out->ks;
2979 if (gctx->iv == c->iv)
2980 gctx_out->iv = out->iv;
2982 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2983 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2986 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2997 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2998 const unsigned char *iv, int enc)
3000 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3005 #ifdef HWAES_CAPABLE
3006 if (HWAES_CAPABLE) {
3007 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3008 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3009 (block128_f) HWAES_encrypt);
3010 # ifdef HWAES_ctr32_encrypt_blocks
3011 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
3018 #ifdef BSAES_CAPABLE
3019 if (BSAES_CAPABLE) {
3020 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3021 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3022 (block128_f) AES_encrypt);
3023 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
3027 #ifdef VPAES_CAPABLE
3028 if (VPAES_CAPABLE) {
3029 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3030 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3031 (block128_f) vpaes_encrypt);
3036 (void)0; /* terminate potentially open 'else' */
3038 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3039 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3040 (block128_f) AES_encrypt);
3042 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
3049 * If we have an iv can set it directly, otherwise use saved IV.
3051 if (iv == NULL && gctx->iv_set)
3054 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3059 /* If key set use IV, otherwise copy */
3061 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3063 memcpy(gctx->iv, iv, gctx->ivlen);
3071 * Handle TLS GCM packet format. This consists of the last portion of the IV
3072 * followed by the payload and finally the tag. On encrypt generate IV,
3073 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3077 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3078 const unsigned char *in, size_t len)
3080 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3082 /* Encrypt/decrypt must be performed in place */
3084 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
3087 * Set IV from start of buffer or generate IV and write to start of
3090 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
3091 : EVP_CTRL_GCM_SET_IV_INV,
3092 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
3095 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
3097 /* Fix buffer and length to point to payload */
3098 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3099 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3100 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3102 /* Encrypt payload */
3105 #if defined(AES_GCM_ASM)
3106 if (len >= 32 && AES_GCM_ASM(gctx)) {
3107 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3110 bulk = AES_gcm_encrypt(in, out, len,
3112 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3113 gctx->gcm.len.u[1] += bulk;
3116 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3119 len - bulk, gctx->ctr))
3123 #if defined(AES_GCM_ASM2)
3124 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3125 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3128 bulk = AES_gcm_encrypt(in, out, len,
3130 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3131 gctx->gcm.len.u[1] += bulk;
3134 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3135 in + bulk, out + bulk, len - bulk))
3139 /* Finally write tag */
3140 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
3141 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3146 #if defined(AES_GCM_ASM)
3147 if (len >= 16 && AES_GCM_ASM(gctx)) {
3148 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3151 bulk = AES_gcm_decrypt(in, out, len,
3153 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3154 gctx->gcm.len.u[1] += bulk;
3157 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3160 len - bulk, gctx->ctr))
3164 #if defined(AES_GCM_ASM2)
3165 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3166 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3169 bulk = AES_gcm_decrypt(in, out, len,
3171 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3172 gctx->gcm.len.u[1] += bulk;
3175 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3176 in + bulk, out + bulk, len - bulk))
3180 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3181 /* If tag mismatch wipe buffer */
3182 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3183 OPENSSL_cleanse(out, len);
3191 gctx->tls_aad_len = -1;
3195 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3196 const unsigned char *in, size_t len)
3198 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3199 /* If not set up, return error */
3203 if (gctx->tls_aad_len >= 0)
3204 return aes_gcm_tls_cipher(ctx, out, in, len);
3210 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3212 } else if (ctx->encrypt) {
3215 #if defined(AES_GCM_ASM)
3216 if (len >= 32 && AES_GCM_ASM(gctx)) {
3217 size_t res = (16 - gctx->gcm.mres) % 16;
3219 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3222 bulk = AES_gcm_encrypt(in + res,
3223 out + res, len - res,
3224 gctx->gcm.key, gctx->gcm.Yi.c,
3226 gctx->gcm.len.u[1] += bulk;
3230 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3233 len - bulk, gctx->ctr))
3237 #if defined(AES_GCM_ASM2)
3238 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3239 size_t res = (16 - gctx->gcm.mres) % 16;
3241 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3244 bulk = AES_gcm_encrypt(in + res,
3245 out + res, len - res,
3246 gctx->gcm.key, gctx->gcm.Yi.c,
3248 gctx->gcm.len.u[1] += bulk;
3252 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3253 in + bulk, out + bulk, len - bulk))
3259 #if defined(AES_GCM_ASM)
3260 if (len >= 16 && AES_GCM_ASM(gctx)) {
3261 size_t res = (16 - gctx->gcm.mres) % 16;
3263 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3266 bulk = AES_gcm_decrypt(in + res,
3267 out + res, len - res,
3269 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3270 gctx->gcm.len.u[1] += bulk;
3274 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3277 len - bulk, gctx->ctr))
3281 #if defined(AES_GCM_ASM2)
3282 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3283 size_t res = (16 - gctx->gcm.mres) % 16;
3285 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3288 bulk = AES_gcm_decrypt(in + res,
3289 out + res, len - res,
3291 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3292 gctx->gcm.len.u[1] += bulk;
3296 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3297 in + bulk, out + bulk, len - bulk))
3303 if (!ctx->encrypt) {
3304 if (gctx->taglen < 0)
3306 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3311 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3313 /* Don't reuse the IV */
3320 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3321 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3322 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3323 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
3325 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3326 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3327 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3328 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3329 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3330 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3332 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3334 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3336 if (type == EVP_CTRL_COPY) {
3337 EVP_CIPHER_CTX *out = ptr;
3338 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3340 if (xctx->xts.key1) {
3341 if (xctx->xts.key1 != &xctx->ks1)
3343 xctx_out->xts.key1 = &xctx_out->ks1;
3345 if (xctx->xts.key2) {
3346 if (xctx->xts.key2 != &xctx->ks2)
3348 xctx_out->xts.key2 = &xctx_out->ks2;
3351 } else if (type != EVP_CTRL_INIT)
3353 /* key1 and key2 are used as an indicator both key and IV are set */
3354 xctx->xts.key1 = NULL;
3355 xctx->xts.key2 = NULL;
3359 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3360 const unsigned char *iv, int enc)
3362 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3369 /* The key is two half length keys in reality */
3370 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3373 * Verify that the two keys are different.
3375 * This addresses the vulnerability described in Rogaway's
3376 * September 2004 paper:
3378 * "Efficient Instantiations of Tweakable Blockciphers and
3379 * Refinements to Modes OCB and PMAC".
3380 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3382 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3384 * "The check for Key_1 != Key_2 shall be done at any place
3385 * BEFORE using the keys in the XTS-AES algorithm to process
3388 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3389 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
3394 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3396 xctx->stream = NULL;
3398 /* key_len is two AES keys */
3399 #ifdef HWAES_CAPABLE
3400 if (HWAES_CAPABLE) {
3402 HWAES_set_encrypt_key(key,
3403 EVP_CIPHER_CTX_key_length(ctx) * 4,
3405 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3406 # ifdef HWAES_xts_encrypt
3407 xctx->stream = HWAES_xts_encrypt;
3410 HWAES_set_decrypt_key(key,
3411 EVP_CIPHER_CTX_key_length(ctx) * 4,
3413 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3414 # ifdef HWAES_xts_decrypt
3415 xctx->stream = HWAES_xts_decrypt;
3419 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3420 EVP_CIPHER_CTX_key_length(ctx) * 4,
3422 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3424 xctx->xts.key1 = &xctx->ks1;
3428 #ifdef BSAES_CAPABLE
3430 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3433 #ifdef VPAES_CAPABLE
3434 if (VPAES_CAPABLE) {
3436 vpaes_set_encrypt_key(key,
3437 EVP_CIPHER_CTX_key_length(ctx) * 4,
3439 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3441 vpaes_set_decrypt_key(key,
3442 EVP_CIPHER_CTX_key_length(ctx) * 4,
3444 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3447 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3448 EVP_CIPHER_CTX_key_length(ctx) * 4,
3450 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3452 xctx->xts.key1 = &xctx->ks1;
3456 (void)0; /* terminate potentially open 'else' */
3459 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3461 xctx->xts.block1 = (block128_f) AES_encrypt;
3463 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3465 xctx->xts.block1 = (block128_f) AES_decrypt;
3468 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3469 EVP_CIPHER_CTX_key_length(ctx) * 4,
3471 xctx->xts.block2 = (block128_f) AES_encrypt;
3473 xctx->xts.key1 = &xctx->ks1;
3477 xctx->xts.key2 = &xctx->ks2;
3478 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3484 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3485 const unsigned char *in, size_t len)
3487 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3488 if (!xctx->xts.key1 || !xctx->xts.key2)
3490 if (!out || !in || len < AES_BLOCK_SIZE)
3493 (*xctx->stream) (in, out, len,
3494 xctx->xts.key1, xctx->xts.key2,
3495 EVP_CIPHER_CTX_iv_noconst(ctx));
3496 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3498 EVP_CIPHER_CTX_encrypting(ctx)))
3503 #define aes_xts_cleanup NULL
3505 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3506 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3507 | EVP_CIPH_CUSTOM_COPY)
3509 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3510 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3512 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3514 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3523 cctx->tls_aad_len = -1;
3525 case EVP_CTRL_GET_IVLEN:
3526 *(int *)ptr = 15 - cctx->L;
3528 case EVP_CTRL_AEAD_TLS1_AAD:
3529 /* Save the AAD for later use */
3530 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3532 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3533 cctx->tls_aad_len = arg;
3536 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3537 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3538 /* Correct length for explicit IV */
3539 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3541 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3542 /* If decrypting correct for tag too */
3543 if (!EVP_CIPHER_CTX_encrypting(c)) {
3548 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3549 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3551 /* Extra padding: tag appended to record */
3554 case EVP_CTRL_CCM_SET_IV_FIXED:
3555 /* Sanity check length */
3556 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3558 /* Just copy to first part of IV */
3559 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3562 case EVP_CTRL_AEAD_SET_IVLEN:
3565 case EVP_CTRL_CCM_SET_L:
3566 if (arg < 2 || arg > 8)
3571 case EVP_CTRL_AEAD_SET_TAG:
3572 if ((arg & 1) || arg < 4 || arg > 16)
3574 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3578 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3583 case EVP_CTRL_AEAD_GET_TAG:
3584 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3586 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3595 EVP_CIPHER_CTX *out = ptr;
3596 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3597 if (cctx->ccm.key) {
3598 if (cctx->ccm.key != &cctx->ks)
3600 cctx_out->ccm.key = &cctx_out->ks;
3611 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3612 const unsigned char *iv, int enc)
3614 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3619 #ifdef HWAES_CAPABLE
3620 if (HWAES_CAPABLE) {
3621 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3624 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3625 &cctx->ks, (block128_f) HWAES_encrypt);
3631 #ifdef VPAES_CAPABLE
3632 if (VPAES_CAPABLE) {
3633 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3635 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3636 &cctx->ks, (block128_f) vpaes_encrypt);
3642 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3644 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3645 &cctx->ks, (block128_f) AES_encrypt);
3650 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3656 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3657 const unsigned char *in, size_t len)
3659 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3660 CCM128_CONTEXT *ccm = &cctx->ccm;
3661 /* Encrypt/decrypt must be performed in place */
3662 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3664 /* If encrypting set explicit IV from sequence number (start of AAD) */
3665 if (EVP_CIPHER_CTX_encrypting(ctx))
3666 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3667 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3668 /* Get rest of IV from explicit IV */
3669 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3670 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3671 /* Correct length value */
3672 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3673 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3677 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3678 /* Fix buffer to point to payload */
3679 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3680 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3681 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3682 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3684 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3686 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3688 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3690 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3692 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3693 unsigned char tag[16];
3694 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3695 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3699 OPENSSL_cleanse(out, len);
3704 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3705 const unsigned char *in, size_t len)
3707 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3708 CCM128_CONTEXT *ccm = &cctx->ccm;
3709 /* If not set up, return error */
3713 if (cctx->tls_aad_len >= 0)
3714 return aes_ccm_tls_cipher(ctx, out, in, len);
3716 /* EVP_*Final() doesn't return any data */
3717 if (in == NULL && out != NULL)
3725 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3731 /* If have AAD need message length */
3732 if (!cctx->len_set && len)
3734 CRYPTO_ccm128_aad(ccm, in, len);
3738 /* The tag must be set before actually decrypting data */
3739 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3742 /* If not set length yet do it */
3743 if (!cctx->len_set) {
3744 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3749 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3750 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3752 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3758 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3760 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3761 unsigned char tag[16];
3762 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3763 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3769 OPENSSL_cleanse(out, len);
3777 #define aes_ccm_cleanup NULL
3779 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3780 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3781 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3782 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3783 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3784 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3791 /* Indicates if IV has been set */
3795 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3796 const unsigned char *iv, int enc)
3798 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3802 if (EVP_CIPHER_CTX_encrypting(ctx))
3803 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3806 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3812 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3813 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3818 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3819 const unsigned char *in, size_t inlen)
3821 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3823 /* AES wrap with padding has IV length of 4, without padding 8 */
3824 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3825 /* No final operation so always return zero length */
3828 /* Input length must always be non-zero */
3831 /* If decrypting need at least 16 bytes and multiple of 8 */
3832 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3834 /* If not padding input must be multiple of 8 */
3835 if (!pad && inlen & 0x7)
3837 if (is_partially_overlapping(out, in, inlen)) {
3838 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3842 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3843 /* If padding round up to multiple of 8 */
3845 inlen = (inlen + 7) / 8 * 8;
3850 * If not padding output will be exactly 8 bytes smaller than
3851 * input. If padding it will be at least 8 bytes smaller but we
3852 * don't know how much.
3858 if (EVP_CIPHER_CTX_encrypting(ctx))
3859 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3861 (block128_f) AES_encrypt);
3863 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3865 (block128_f) AES_decrypt);
3867 if (EVP_CIPHER_CTX_encrypting(ctx))
3868 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3869 out, in, inlen, (block128_f) AES_encrypt);
3871 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3872 out, in, inlen, (block128_f) AES_decrypt);
3874 return rv ? (int)rv : -1;
3877 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3878 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3879 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3881 static const EVP_CIPHER aes_128_wrap = {
3883 8, 16, 8, WRAP_FLAGS,
3884 aes_wrap_init_key, aes_wrap_cipher,
3886 sizeof(EVP_AES_WRAP_CTX),
3887 NULL, NULL, NULL, NULL
3890 const EVP_CIPHER *EVP_aes_128_wrap(void)
3892 return &aes_128_wrap;
3895 static const EVP_CIPHER aes_192_wrap = {
3897 8, 24, 8, WRAP_FLAGS,
3898 aes_wrap_init_key, aes_wrap_cipher,
3900 sizeof(EVP_AES_WRAP_CTX),
3901 NULL, NULL, NULL, NULL
3904 const EVP_CIPHER *EVP_aes_192_wrap(void)
3906 return &aes_192_wrap;
3909 static const EVP_CIPHER aes_256_wrap = {
3911 8, 32, 8, WRAP_FLAGS,
3912 aes_wrap_init_key, aes_wrap_cipher,
3914 sizeof(EVP_AES_WRAP_CTX),
3915 NULL, NULL, NULL, NULL
3918 const EVP_CIPHER *EVP_aes_256_wrap(void)
3920 return &aes_256_wrap;
3923 static const EVP_CIPHER aes_128_wrap_pad = {
3924 NID_id_aes128_wrap_pad,
3925 8, 16, 4, WRAP_FLAGS,
3926 aes_wrap_init_key, aes_wrap_cipher,
3928 sizeof(EVP_AES_WRAP_CTX),
3929 NULL, NULL, NULL, NULL
3932 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3934 return &aes_128_wrap_pad;
3937 static const EVP_CIPHER aes_192_wrap_pad = {
3938 NID_id_aes192_wrap_pad,
3939 8, 24, 4, WRAP_FLAGS,
3940 aes_wrap_init_key, aes_wrap_cipher,
3942 sizeof(EVP_AES_WRAP_CTX),
3943 NULL, NULL, NULL, NULL
3946 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3948 return &aes_192_wrap_pad;
3951 static const EVP_CIPHER aes_256_wrap_pad = {
3952 NID_id_aes256_wrap_pad,
3953 8, 32, 4, WRAP_FLAGS,
3954 aes_wrap_init_key, aes_wrap_cipher,
3956 sizeof(EVP_AES_WRAP_CTX),
3957 NULL, NULL, NULL, NULL
3960 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3962 return &aes_256_wrap_pad;
3965 #ifndef OPENSSL_NO_OCB
3966 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3968 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3969 EVP_CIPHER_CTX *newc;
3970 EVP_AES_OCB_CTX *new_octx;
3976 octx->ivlen = EVP_CIPHER_iv_length(c->cipher);
3977 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3979 octx->data_buf_len = 0;
3980 octx->aad_buf_len = 0;
3983 case EVP_CTRL_GET_IVLEN:
3984 *(int *)ptr = octx->ivlen;
3987 case EVP_CTRL_AEAD_SET_IVLEN:
3988 /* IV len must be 1 to 15 */
3989 if (arg <= 0 || arg > 15)
3995 case EVP_CTRL_AEAD_SET_TAG:
3997 /* Tag len must be 0 to 16 */
3998 if (arg < 0 || arg > 16)
4004 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
4006 memcpy(octx->tag, ptr, arg);
4009 case EVP_CTRL_AEAD_GET_TAG:
4010 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
4013 memcpy(ptr, octx->tag, arg);
4017 newc = (EVP_CIPHER_CTX *)ptr;
4018 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
4019 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
4020 &new_octx->ksenc.ks,
4021 &new_octx->ksdec.ks);
4029 # ifdef HWAES_CAPABLE
4030 # ifdef HWAES_ocb_encrypt
4031 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
4032 size_t blocks, const void *key,
4033 size_t start_block_num,
4034 unsigned char offset_i[16],
4035 const unsigned char L_[][16],
4036 unsigned char checksum[16]);
4038 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
4040 # ifdef HWAES_ocb_decrypt
4041 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
4042 size_t blocks, const void *key,
4043 size_t start_block_num,
4044 unsigned char offset_i[16],
4045 const unsigned char L_[][16],
4046 unsigned char checksum[16]);
4048 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
4052 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4053 const unsigned char *iv, int enc)
4055 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4061 * We set both the encrypt and decrypt key here because decrypt
4062 * needs both. We could possibly optimise to remove setting the
4063 * decrypt for an encryption operation.
4065 # ifdef HWAES_CAPABLE
4066 if (HWAES_CAPABLE) {
4067 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4069 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4071 if (!CRYPTO_ocb128_init(&octx->ocb,
4072 &octx->ksenc.ks, &octx->ksdec.ks,
4073 (block128_f) HWAES_encrypt,
4074 (block128_f) HWAES_decrypt,
4075 enc ? HWAES_ocb_encrypt
4076 : HWAES_ocb_decrypt))
4081 # ifdef VPAES_CAPABLE
4082 if (VPAES_CAPABLE) {
4083 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4085 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4087 if (!CRYPTO_ocb128_init(&octx->ocb,
4088 &octx->ksenc.ks, &octx->ksdec.ks,
4089 (block128_f) vpaes_encrypt,
4090 (block128_f) vpaes_decrypt,
4096 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4098 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4100 if (!CRYPTO_ocb128_init(&octx->ocb,
4101 &octx->ksenc.ks, &octx->ksdec.ks,
4102 (block128_f) AES_encrypt,
4103 (block128_f) AES_decrypt,
4110 * If we have an iv we can set it directly, otherwise use saved IV.
4112 if (iv == NULL && octx->iv_set)
4115 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
4122 /* If key set use IV, otherwise copy */
4124 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
4126 memcpy(octx->iv, iv, octx->ivlen);
4132 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4133 const unsigned char *in, size_t len)
4137 int written_len = 0;
4138 size_t trailing_len;
4139 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4141 /* If IV or Key not set then return error */
4150 * Need to ensure we are only passing full blocks to low level OCB
4151 * routines. We do it here rather than in EVP_EncryptUpdate/
4152 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4153 * and those routines don't support that
4156 /* Are we dealing with AAD or normal data here? */
4158 buf = octx->aad_buf;
4159 buf_len = &(octx->aad_buf_len);
4161 buf = octx->data_buf;
4162 buf_len = &(octx->data_buf_len);
4164 if (is_partially_overlapping(out + *buf_len, in, len)) {
4165 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
4171 * If we've got a partially filled buffer from a previous call then
4172 * use that data first
4175 unsigned int remaining;
4177 remaining = AES_BLOCK_SIZE - (*buf_len);
4178 if (remaining > len) {
4179 memcpy(buf + (*buf_len), in, len);
4183 memcpy(buf + (*buf_len), in, remaining);
4186 * If we get here we've filled the buffer, so process it
4191 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4193 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4194 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4198 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4202 written_len = AES_BLOCK_SIZE;
4205 out += AES_BLOCK_SIZE;
4208 /* Do we have a partial block to handle at the end? */
4209 trailing_len = len % AES_BLOCK_SIZE;
4212 * If we've got some full blocks to handle, then process these first
4214 if (len != trailing_len) {
4216 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4218 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4219 if (!CRYPTO_ocb128_encrypt
4220 (&octx->ocb, in, out, len - trailing_len))
4223 if (!CRYPTO_ocb128_decrypt
4224 (&octx->ocb, in, out, len - trailing_len))
4227 written_len += len - trailing_len;
4228 in += len - trailing_len;
4231 /* Handle any trailing partial block */
4232 if (trailing_len > 0) {
4233 memcpy(buf, in, trailing_len);
4234 *buf_len = trailing_len;
4240 * First of all empty the buffer of any partial block that we might
4241 * have been provided - both for data and AAD
4243 if (octx->data_buf_len > 0) {
4244 if (EVP_CIPHER_CTX_encrypting(ctx)) {
4245 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4246 octx->data_buf_len))
4249 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4250 octx->data_buf_len))
4253 written_len = octx->data_buf_len;
4254 octx->data_buf_len = 0;
4256 if (octx->aad_buf_len > 0) {
4257 if (!CRYPTO_ocb128_aad
4258 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4260 octx->aad_buf_len = 0;
4262 /* If decrypting then verify */
4263 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4264 if (octx->taglen < 0)
4266 if (CRYPTO_ocb128_finish(&octx->ocb,
4267 octx->tag, octx->taglen) != 0)
4272 /* If encrypting then just get the tag */
4273 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4275 /* Don't reuse the IV */
4281 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4283 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4284 CRYPTO_ocb128_cleanup(&octx->ocb);
4288 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4289 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4290 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4291 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4292 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4293 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4294 #endif /* OPENSSL_NO_OCB */