2 * Copyright (C) 2008 Damien Miller <djm@mindrot.org>
3 * Copyright (c) 2010 Konstantin Belousov <kib@FreeBSD.org>
4 * Copyright (c) 2010-2011 Pawel Jakub Dawidek <pawel@dawidek.net>
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29 #include <sys/cdefs.h>
30 __FBSDID("$FreeBSD$");
32 #include <sys/param.h>
33 #include <sys/libkern.h>
34 #include <sys/malloc.h>
36 #include <sys/systm.h>
37 #include <crypto/aesni/aesni.h>
39 MALLOC_DECLARE(M_AESNI);
42 aesni_encrypt_cbc(int rounds, const void *key_schedule, size_t len,
43 const uint8_t *from, uint8_t *to, const uint8_t iv[AES_BLOCK_LEN])
50 for (i = 0; i < len; i++) {
51 aesni_enc(rounds - 1, key_schedule, from, to, ivp);
53 from += AES_BLOCK_LEN;
59 aesni_encrypt_ecb(int rounds, const void *key_schedule, size_t len,
60 const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
65 for (i = 0; i < len; i++) {
66 aesni_enc(rounds - 1, key_schedule, from, to, NULL);
67 from += AES_BLOCK_LEN;
73 aesni_decrypt_ecb(int rounds, const void *key_schedule, size_t len,
74 const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
79 for (i = 0; i < len; i++) {
80 aesni_dec(rounds - 1, key_schedule, from, to, NULL);
81 from += AES_BLOCK_LEN;
86 #define AES_XTS_BLOCKSIZE 16
87 #define AES_XTS_IVSIZE 8
88 #define AES_XTS_ALPHA 0x87 /* GF(2^128) generator polynomial */
91 aesni_crypt_xts_block(int rounds, const void *key_schedule, uint64_t *tweak,
92 const uint64_t *from, uint64_t *to, uint64_t *block, int do_encrypt)
96 block[0] = from[0] ^ tweak[0];
97 block[1] = from[1] ^ tweak[1];
100 aesni_enc(rounds - 1, key_schedule, (uint8_t *)block, (uint8_t *)to, NULL);
102 aesni_dec(rounds - 1, key_schedule, (uint8_t *)block, (uint8_t *)to, NULL);
107 /* Exponentiate tweak. */
108 carry = ((tweak[0] & 0x8000000000000000ULL) > 0);
110 if (tweak[1] & 0x8000000000000000ULL) {
111 uint8_t *twk = (uint8_t *)tweak;
113 twk[0] ^= AES_XTS_ALPHA;
121 aesni_crypt_xts(int rounds, const void *data_schedule,
122 const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
123 const uint8_t iv[AES_BLOCK_LEN], int do_encrypt)
125 uint64_t block[AES_XTS_BLOCKSIZE / 8];
126 uint8_t tweak[AES_XTS_BLOCKSIZE];
130 * Prepare tweak as E_k2(IV). IV is specified as LE representation
131 * of a 64-bit block number which we allow to be passed in directly.
133 #if BYTE_ORDER == LITTLE_ENDIAN
134 bcopy(iv, tweak, AES_XTS_IVSIZE);
135 /* Last 64 bits of IV are always zero. */
136 bzero(tweak + AES_XTS_IVSIZE, AES_XTS_IVSIZE);
138 #error Only LITTLE_ENDIAN architectures are supported.
140 aesni_enc(rounds - 1, tweak_schedule, tweak, tweak, NULL);
142 len /= AES_XTS_BLOCKSIZE;
143 for (i = 0; i < len; i++) {
144 aesni_crypt_xts_block(rounds, data_schedule, (uint64_t *)tweak,
145 (const uint64_t *)from, (uint64_t *)to, block, do_encrypt);
146 from += AES_XTS_BLOCKSIZE;
147 to += AES_XTS_BLOCKSIZE;
150 bzero(tweak, sizeof(tweak));
151 bzero(block, sizeof(block));
155 aesni_encrypt_xts(int rounds, const void *data_schedule,
156 const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
157 const uint8_t iv[AES_BLOCK_LEN])
160 aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
165 aesni_decrypt_xts(int rounds, const void *data_schedule,
166 const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
167 const uint8_t iv[AES_BLOCK_LEN])
170 aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
175 aesni_cipher_setup_common(struct aesni_session *ses, const uint8_t *key,
183 ses->rounds = AES128_ROUNDS;
186 ses->rounds = AES192_ROUNDS;
189 ses->rounds = AES256_ROUNDS;
198 ses->rounds = AES128_ROUNDS;
201 ses->rounds = AES256_ROUNDS;
211 aesni_set_enckey(key, ses->enc_schedule, ses->rounds);
212 aesni_set_deckey(ses->enc_schedule, ses->dec_schedule, ses->rounds);
213 if (ses->algo == CRYPTO_AES_CBC)
214 arc4rand(ses->iv, sizeof(ses->iv), 0);
215 else /* if (ses->algo == CRYPTO_AES_XTS) */ {
216 aesni_set_enckey(key + keylen / 16, ses->xts_schedule,
224 aesni_cipher_setup(struct aesni_session *ses, struct cryptoini *encini)
227 int error, saved_ctx;
230 if (!is_fpu_kern_thread(0)) {
231 error = fpu_kern_enter(td, ses->fpu_ctx, FPU_KERN_NORMAL);
238 error = aesni_cipher_setup_common(ses, encini->cri_key,
241 fpu_kern_leave(td, ses->fpu_ctx);
247 aesni_cipher_process(struct aesni_session *ses, struct cryptodesc *enccrd,
252 int error, allocated, saved_ctx;
254 buf = aesni_cipher_alloc(enccrd, crp, &allocated);
259 if (!is_fpu_kern_thread(0)) {
260 error = fpu_kern_enter(td, ses->fpu_ctx, FPU_KERN_NORMAL);
269 if ((enccrd->crd_flags & CRD_F_KEY_EXPLICIT) != 0) {
270 error = aesni_cipher_setup_common(ses, enccrd->crd_key,
276 if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0) {
277 if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
278 bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
279 if ((enccrd->crd_flags & CRD_F_IV_PRESENT) == 0)
280 crypto_copyback(crp->crp_flags, crp->crp_buf,
281 enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
282 if (ses->algo == CRYPTO_AES_CBC) {
283 aesni_encrypt_cbc(ses->rounds, ses->enc_schedule,
284 enccrd->crd_len, buf, buf, ses->iv);
285 } else /* if (ses->algo == CRYPTO_AES_XTS) */ {
286 aesni_encrypt_xts(ses->rounds, ses->enc_schedule,
287 ses->xts_schedule, enccrd->crd_len, buf, buf,
291 if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
292 bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
294 crypto_copydata(crp->crp_flags, crp->crp_buf,
295 enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
296 if (ses->algo == CRYPTO_AES_CBC) {
297 aesni_decrypt_cbc(ses->rounds, ses->dec_schedule,
298 enccrd->crd_len, buf, ses->iv);
299 } else /* if (ses->algo == CRYPTO_AES_XTS) */ {
300 aesni_decrypt_xts(ses->rounds, ses->dec_schedule,
301 ses->xts_schedule, enccrd->crd_len, buf, buf,
306 fpu_kern_leave(td, ses->fpu_ctx);
308 crypto_copyback(crp->crp_flags, crp->crp_buf, enccrd->crd_skip,
309 enccrd->crd_len, buf);
310 if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0)
311 crypto_copydata(crp->crp_flags, crp->crp_buf,
312 enccrd->crd_skip + enccrd->crd_len - AES_BLOCK_LEN,
313 AES_BLOCK_LEN, ses->iv);
316 bzero(buf, enccrd->crd_len);