2 * WPA Supplicant / wrapper functions for libgcrypt
3 * Copyright (c) 2004-2017, Jouni Malinen <j@w1.fi>
5 * This software may be distributed under the terms of the BSD license.
6 * See README for more details.
20 static int gnutls_digest_vector(int algo, size_t num_elem,
21 const u8 *addr[], const size_t *len, u8 *mac)
30 if (gcry_md_open(&hd, algo, 0) != GPG_ERR_NO_ERROR)
32 for (i = 0; i < num_elem; i++)
33 gcry_md_write(hd, addr[i], len[i]);
34 p = gcry_md_read(hd, algo);
36 memcpy(mac, p, gcry_md_get_algo_dlen(algo));
42 int md4_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
44 return gnutls_digest_vector(GCRY_MD_MD4, num_elem, addr, len, mac);
48 int des_encrypt(const u8 *clear, const u8 *key, u8 *cypher)
51 u8 pkey[8], next, tmp;
54 /* Add parity bits to the key */
56 for (i = 0; i < 7; i++) {
58 pkey[i] = (tmp >> i) | next | 1;
59 next = tmp << (7 - i);
63 gcry_cipher_open(&hd, GCRY_CIPHER_DES, GCRY_CIPHER_MODE_ECB, 0);
64 gcry_err_code(gcry_cipher_setkey(hd, pkey, 8));
65 gcry_cipher_encrypt(hd, cypher, 8, clear, 8);
66 gcry_cipher_close(hd);
71 int md5_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
73 return gnutls_digest_vector(GCRY_MD_MD5, num_elem, addr, len, mac);
77 int sha1_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
79 return gnutls_digest_vector(GCRY_MD_SHA1, num_elem, addr, len, mac);
83 int sha256_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
85 return gnutls_digest_vector(GCRY_MD_SHA256, num_elem, addr, len, mac);
89 int sha384_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
91 return gnutls_digest_vector(GCRY_MD_SHA384, num_elem, addr, len, mac);
95 int sha512_vector(size_t num_elem, const u8 *addr[], const size_t *len, u8 *mac)
97 return gnutls_digest_vector(GCRY_MD_SHA512, num_elem, addr, len, mac);
101 static int gnutls_hmac_vector(int algo, const u8 *key, size_t key_len,
102 size_t num_elem, const u8 *addr[],
103 const size_t *len, u8 *mac)
112 if (gcry_md_open(&hd, algo, GCRY_MD_FLAG_HMAC) != GPG_ERR_NO_ERROR)
114 if (gcry_md_setkey(hd, key, key_len) != GPG_ERR_NO_ERROR) {
118 for (i = 0; i < num_elem; i++)
119 gcry_md_write(hd, addr[i], len[i]);
120 p = gcry_md_read(hd, algo);
122 memcpy(mac, p, gcry_md_get_algo_dlen(algo));
128 int hmac_md5_vector(const u8 *key, size_t key_len, size_t num_elem,
129 const u8 *addr[], const size_t *len, u8 *mac)
131 return gnutls_hmac_vector(GCRY_MD_MD5, key, key_len, num_elem, addr,
136 int hmac_md5(const u8 *key, size_t key_len, const u8 *data, size_t data_len,
139 return hmac_md5_vector(key, key_len, 1, &data, &data_len, mac);
143 int hmac_sha1_vector(const u8 *key, size_t key_len, size_t num_elem,
144 const u8 *addr[], const size_t *len, u8 *mac)
146 return gnutls_hmac_vector(GCRY_MD_SHA1, key, key_len, num_elem, addr,
151 int hmac_sha1(const u8 *key, size_t key_len, const u8 *data, size_t data_len,
154 return hmac_sha1_vector(key, key_len, 1, &data, &data_len, mac);
160 int hmac_sha256_vector(const u8 *key, size_t key_len, size_t num_elem,
161 const u8 *addr[], const size_t *len, u8 *mac)
163 return gnutls_hmac_vector(GCRY_MD_SHA256, key, key_len, num_elem, addr,
168 int hmac_sha256(const u8 *key, size_t key_len, const u8 *data,
169 size_t data_len, u8 *mac)
171 return hmac_sha256_vector(key, key_len, 1, &data, &data_len, mac);
174 #endif /* CONFIG_SHA256 */
179 int hmac_sha384_vector(const u8 *key, size_t key_len, size_t num_elem,
180 const u8 *addr[], const size_t *len, u8 *mac)
182 return gnutls_hmac_vector(GCRY_MD_SHA384, key, key_len, num_elem, addr,
187 int hmac_sha384(const u8 *key, size_t key_len, const u8 *data,
188 size_t data_len, u8 *mac)
190 return hmac_sha384_vector(key, key_len, 1, &data, &data_len, mac);
193 #endif /* CONFIG_SHA384 */
198 int hmac_sha512_vector(const u8 *key, size_t key_len, size_t num_elem,
199 const u8 *addr[], const size_t *len, u8 *mac)
201 return gnutls_hmac_vector(GCRY_MD_SHA512, key, key_len, num_elem, addr,
206 int hmac_sha512(const u8 *key, size_t key_len, const u8 *data,
207 size_t data_len, u8 *mac)
209 return hmac_sha512_vector(key, key_len, 1, &data, &data_len, mac);
212 #endif /* CONFIG_SHA512 */
215 void * aes_encrypt_init(const u8 *key, size_t len)
222 if (gcry_cipher_open(&hd, GCRY_CIPHER_AES, GCRY_CIPHER_MODE_ECB, 0) !=
224 printf("cipher open failed\n");
227 if (gcry_cipher_setkey(hd, key, len) != GPG_ERR_NO_ERROR) {
228 printf("setkey failed\n");
229 gcry_cipher_close(hd);
237 int aes_encrypt(void *ctx, const u8 *plain, u8 *crypt)
239 gcry_cipher_hd_t hd = ctx;
240 gcry_cipher_encrypt(hd, crypt, 16, plain, 16);
245 void aes_encrypt_deinit(void *ctx)
247 gcry_cipher_hd_t hd = ctx;
248 gcry_cipher_close(hd);
252 void * aes_decrypt_init(const u8 *key, size_t len)
259 if (gcry_cipher_open(&hd, GCRY_CIPHER_AES, GCRY_CIPHER_MODE_ECB, 0) !=
262 if (gcry_cipher_setkey(hd, key, len) != GPG_ERR_NO_ERROR) {
263 gcry_cipher_close(hd);
271 int aes_decrypt(void *ctx, const u8 *crypt, u8 *plain)
273 gcry_cipher_hd_t hd = ctx;
274 gcry_cipher_decrypt(hd, plain, 16, crypt, 16);
279 void aes_decrypt_deinit(void *ctx)
281 gcry_cipher_hd_t hd = ctx;
282 gcry_cipher_close(hd);
286 int crypto_dh_init(u8 generator, const u8 *prime, size_t prime_len, u8 *privkey,
289 size_t pubkey_len, pad;
291 if (os_get_random(privkey, prime_len) < 0)
293 if (os_memcmp(privkey, prime, prime_len) > 0) {
294 /* Make sure private value is smaller than prime */
298 pubkey_len = prime_len;
299 if (crypto_mod_exp(&generator, 1, privkey, prime_len, prime, prime_len,
300 pubkey, &pubkey_len) < 0)
302 if (pubkey_len < prime_len) {
303 pad = prime_len - pubkey_len;
304 os_memmove(pubkey + pad, pubkey, pubkey_len);
305 os_memset(pubkey, 0, pad);
312 int crypto_dh_derive_secret(u8 generator, const u8 *prime, size_t prime_len,
313 const u8 *order, size_t order_len,
314 const u8 *privkey, size_t privkey_len,
315 const u8 *pubkey, size_t pubkey_len,
316 u8 *secret, size_t *len)
318 gcry_mpi_t pub = NULL;
321 if (pubkey_len > prime_len ||
322 (pubkey_len == prime_len &&
323 os_memcmp(pubkey, prime, prime_len) >= 0))
326 if (gcry_mpi_scan(&pub, GCRYMPI_FMT_USG, pubkey, pubkey_len, NULL) !=
328 gcry_mpi_cmp_ui(pub, 1) <= 0)
332 gcry_mpi_t p = NULL, q = NULL, tmp;
335 /* verify: pubkey^q == 1 mod p */
336 tmp = gcry_mpi_new(prime_len * 8);
338 gcry_mpi_scan(&p, GCRYMPI_FMT_USG, prime, prime_len,
339 NULL) != GPG_ERR_NO_ERROR ||
340 gcry_mpi_scan(&q, GCRYMPI_FMT_USG, order, order_len,
341 NULL) != GPG_ERR_NO_ERROR;
343 gcry_mpi_powm(tmp, pub, q, p);
344 failed = gcry_mpi_cmp_ui(tmp, 1) != 0;
348 gcry_mpi_release(tmp);
353 res = crypto_mod_exp(pubkey, pubkey_len, privkey, privkey_len,
354 prime, prime_len, secret, len);
356 gcry_mpi_release(pub);
361 int crypto_mod_exp(const u8 *base, size_t base_len,
362 const u8 *power, size_t power_len,
363 const u8 *modulus, size_t modulus_len,
364 u8 *result, size_t *result_len)
366 gcry_mpi_t bn_base = NULL, bn_exp = NULL, bn_modulus = NULL,
370 if (gcry_mpi_scan(&bn_base, GCRYMPI_FMT_USG, base, base_len, NULL) !=
372 gcry_mpi_scan(&bn_exp, GCRYMPI_FMT_USG, power, power_len, NULL) !=
374 gcry_mpi_scan(&bn_modulus, GCRYMPI_FMT_USG, modulus, modulus_len,
375 NULL) != GPG_ERR_NO_ERROR)
377 bn_result = gcry_mpi_new(modulus_len * 8);
379 gcry_mpi_powm(bn_result, bn_base, bn_exp, bn_modulus);
381 if (gcry_mpi_print(GCRYMPI_FMT_USG, result, *result_len, result_len,
382 bn_result) != GPG_ERR_NO_ERROR)
388 gcry_mpi_release(bn_base);
389 gcry_mpi_release(bn_exp);
390 gcry_mpi_release(bn_modulus);
391 gcry_mpi_release(bn_result);
396 struct crypto_cipher {
397 gcry_cipher_hd_t enc;
398 gcry_cipher_hd_t dec;
402 struct crypto_cipher * crypto_cipher_init(enum crypto_cipher_alg alg,
403 const u8 *iv, const u8 *key,
406 struct crypto_cipher *ctx;
408 enum gcry_cipher_algos a;
411 ctx = os_zalloc(sizeof(*ctx));
416 case CRYPTO_CIPHER_ALG_RC4:
417 a = GCRY_CIPHER_ARCFOUR;
418 res = gcry_cipher_open(&ctx->enc, a, GCRY_CIPHER_MODE_STREAM,
420 gcry_cipher_open(&ctx->dec, a, GCRY_CIPHER_MODE_STREAM, 0);
422 case CRYPTO_CIPHER_ALG_AES:
424 a = GCRY_CIPHER_AES192;
425 else if (key_len == 32)
426 a = GCRY_CIPHER_AES256;
429 res = gcry_cipher_open(&ctx->enc, a, GCRY_CIPHER_MODE_CBC, 0);
430 gcry_cipher_open(&ctx->dec, a, GCRY_CIPHER_MODE_CBC, 0);
432 case CRYPTO_CIPHER_ALG_3DES:
433 a = GCRY_CIPHER_3DES;
434 res = gcry_cipher_open(&ctx->enc, a, GCRY_CIPHER_MODE_CBC, 0);
435 gcry_cipher_open(&ctx->dec, a, GCRY_CIPHER_MODE_CBC, 0);
437 case CRYPTO_CIPHER_ALG_DES:
439 res = gcry_cipher_open(&ctx->enc, a, GCRY_CIPHER_MODE_CBC, 0);
440 gcry_cipher_open(&ctx->dec, a, GCRY_CIPHER_MODE_CBC, 0);
442 case CRYPTO_CIPHER_ALG_RC2:
444 a = GCRY_CIPHER_RFC2268_40;
446 a = GCRY_CIPHER_RFC2268_128;
447 res = gcry_cipher_open(&ctx->enc, a, GCRY_CIPHER_MODE_CBC, 0);
448 gcry_cipher_open(&ctx->dec, a, GCRY_CIPHER_MODE_CBC, 0);
455 if (res != GPG_ERR_NO_ERROR) {
460 if (gcry_cipher_setkey(ctx->enc, key, key_len) != GPG_ERR_NO_ERROR ||
461 gcry_cipher_setkey(ctx->dec, key, key_len) != GPG_ERR_NO_ERROR) {
462 gcry_cipher_close(ctx->enc);
463 gcry_cipher_close(ctx->dec);
468 ivlen = gcry_cipher_get_algo_blklen(a);
469 if (gcry_cipher_setiv(ctx->enc, iv, ivlen) != GPG_ERR_NO_ERROR ||
470 gcry_cipher_setiv(ctx->dec, iv, ivlen) != GPG_ERR_NO_ERROR) {
471 gcry_cipher_close(ctx->enc);
472 gcry_cipher_close(ctx->dec);
481 int crypto_cipher_encrypt(struct crypto_cipher *ctx, const u8 *plain,
482 u8 *crypt, size_t len)
484 if (gcry_cipher_encrypt(ctx->enc, crypt, len, plain, len) !=
491 int crypto_cipher_decrypt(struct crypto_cipher *ctx, const u8 *crypt,
492 u8 *plain, size_t len)
494 if (gcry_cipher_decrypt(ctx->dec, plain, len, crypt, len) !=
501 void crypto_cipher_deinit(struct crypto_cipher *ctx)
503 gcry_cipher_close(ctx->enc);
504 gcry_cipher_close(ctx->dec);