2 * Copyright (c) 2010 Konstantin Belousov <kib@FreeBSD.org>
3 * Copyright (c) 2010 Pawel Jakub Dawidek <pjd@FreeBSD.org>
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28 #include <sys/cdefs.h>
29 __FBSDID("$FreeBSD$");
31 #include <sys/param.h>
32 #include <sys/libkern.h>
33 #include <sys/malloc.h>
35 #include <sys/systm.h>
36 #include <crypto/aesni/aesni.h>
38 MALLOC_DECLARE(M_AESNI);
41 aesni_encrypt_cbc(int rounds, const void *key_schedule, size_t len,
42 const uint8_t *from, uint8_t *to, const uint8_t iv[AES_BLOCK_LEN])
49 for (i = 0; i < len; i++) {
50 aesni_enc(rounds - 1, key_schedule, from, to, ivp);
52 from += AES_BLOCK_LEN;
58 aesni_encrypt_ecb(int rounds, const void *key_schedule, size_t len,
59 const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
64 for (i = 0; i < len; i++) {
65 aesni_enc(rounds - 1, key_schedule, from, to, NULL);
66 from += AES_BLOCK_LEN;
72 aesni_decrypt_ecb(int rounds, const void *key_schedule, size_t len,
73 const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
78 for (i = 0; i < len; i++) {
79 aesni_dec(rounds - 1, key_schedule, from, to, NULL);
80 from += AES_BLOCK_LEN;
85 #define AES_XTS_BLOCKSIZE 16
86 #define AES_XTS_IVSIZE 8
87 #define AES_XTS_ALPHA 0x87 /* GF(2^128) generator polynomial */
90 aesni_crypt_xts_block(int rounds, const void *key_schedule, uint8_t *tweak,
91 const uint8_t *from, uint8_t *to, int do_encrypt)
93 uint8_t block[AES_XTS_BLOCKSIZE];
94 u_int i, carry_in, carry_out;
96 for (i = 0; i < AES_XTS_BLOCKSIZE; i++)
97 block[i] = from[i] ^ tweak[i];
100 aesni_enc(rounds - 1, key_schedule, block, to, NULL);
102 aesni_dec(rounds - 1, key_schedule, block, to, NULL);
104 for (i = 0; i < AES_XTS_BLOCKSIZE; i++)
107 /* Exponentiate tweak. */
109 for (i = 0; i < AES_XTS_BLOCKSIZE; i++) {
110 carry_out = tweak[i] & 0x80;
111 tweak[i] = (tweak[i] << 1) | (carry_in ? 1 : 0);
112 carry_in = carry_out;
115 tweak[0] ^= AES_XTS_ALPHA;
116 bzero(block, sizeof(block));
120 aesni_crypt_xts(int rounds, const void *data_schedule,
121 const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
122 const uint8_t iv[AES_BLOCK_LEN], int do_encrypt)
124 uint8_t tweak[AES_XTS_BLOCKSIZE];
129 * Prepare tweak as E_k2(IV). IV is specified as LE representation
130 * of a 64-bit block number which we allow to be passed in directly.
132 bcopy(iv, &blocknum, AES_XTS_IVSIZE);
133 for (i = 0; i < AES_XTS_IVSIZE; i++) {
134 tweak[i] = blocknum & 0xff;
137 /* Last 64 bits of IV are always zero. */
138 bzero(tweak + AES_XTS_IVSIZE, AES_XTS_IVSIZE);
139 aesni_enc(rounds - 1, tweak_schedule, tweak, tweak, NULL);
141 len /= AES_XTS_BLOCKSIZE;
142 for (i = 0; i < len; i++) {
143 aesni_crypt_xts_block(rounds, data_schedule, tweak, from, to,
145 from += AES_XTS_BLOCKSIZE;
146 to += AES_XTS_BLOCKSIZE;
149 bzero(tweak, sizeof(tweak));
153 aesni_encrypt_xts(int rounds, const void *data_schedule,
154 const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
155 const uint8_t iv[AES_BLOCK_LEN])
158 aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
163 aesni_decrypt_xts(int rounds, const void *data_schedule,
164 const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
165 const uint8_t iv[AES_BLOCK_LEN])
168 aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
173 aesni_cipher_setup_common(struct aesni_session *ses, const uint8_t *key,
181 ses->rounds = AES128_ROUNDS;
184 ses->rounds = AES192_ROUNDS;
187 ses->rounds = AES256_ROUNDS;
196 ses->rounds = AES128_ROUNDS;
199 ses->rounds = AES256_ROUNDS;
209 aesni_set_enckey(key, ses->enc_schedule, ses->rounds);
210 aesni_set_deckey(ses->enc_schedule, ses->dec_schedule, ses->rounds);
211 if (ses->algo == CRYPTO_AES_CBC)
212 arc4rand(ses->iv, sizeof(ses->iv), 0);
213 else /* if (ses->algo == CRYPTO_AES_XTS) */ {
214 aesni_set_enckey(key + keylen / 16, ses->xts_schedule,
222 aesni_cipher_setup(struct aesni_session *ses, struct cryptoini *encini)
225 int error, saved_ctx;
228 if (!is_fpu_kern_thread(0)) {
229 error = fpu_kern_enter(td, &ses->fpu_ctx, FPU_KERN_NORMAL);
236 error = aesni_cipher_setup_common(ses, encini->cri_key,
239 fpu_kern_leave(td, &ses->fpu_ctx);
245 aesni_cipher_process(struct aesni_session *ses, struct cryptodesc *enccrd,
250 int error, allocated, saved_ctx;
252 buf = aesni_cipher_alloc(enccrd, crp, &allocated);
257 if (!is_fpu_kern_thread(0)) {
258 error = fpu_kern_enter(td, &ses->fpu_ctx, FPU_KERN_NORMAL);
267 if ((enccrd->crd_flags & CRD_F_KEY_EXPLICIT) != 0) {
268 error = aesni_cipher_setup_common(ses, enccrd->crd_key,
274 if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0) {
275 if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
276 bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
277 if ((enccrd->crd_flags & CRD_F_IV_PRESENT) == 0)
278 crypto_copyback(crp->crp_flags, crp->crp_buf,
279 enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
280 if (ses->algo == CRYPTO_AES_CBC) {
281 aesni_encrypt_cbc(ses->rounds, ses->enc_schedule,
282 enccrd->crd_len, buf, buf, ses->iv);
283 } else /* if (ses->algo == CRYPTO_AES_XTS) */ {
284 aesni_encrypt_xts(ses->rounds, ses->enc_schedule,
285 ses->xts_schedule, enccrd->crd_len, buf, buf,
289 if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
290 bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
292 crypto_copydata(crp->crp_flags, crp->crp_buf,
293 enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
294 if (ses->algo == CRYPTO_AES_CBC) {
295 aesni_decrypt_cbc(ses->rounds, ses->dec_schedule,
296 enccrd->crd_len, buf, ses->iv);
297 } else /* if (ses->algo == CRYPTO_AES_XTS) */ {
298 aesni_decrypt_xts(ses->rounds, ses->dec_schedule,
299 ses->xts_schedule, enccrd->crd_len, buf, buf,
304 fpu_kern_leave(td, &ses->fpu_ctx);
306 crypto_copyback(crp->crp_flags, crp->crp_buf, enccrd->crd_skip,
307 enccrd->crd_len, buf);
308 if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0)
309 crypto_copydata(crp->crp_flags, crp->crp_buf,
310 enccrd->crd_skip + enccrd->crd_len - AES_BLOCK_LEN,
311 AES_BLOCK_LEN, ses->iv);
314 bzero(buf, enccrd->crd_len);