4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
23 * Copyright 2010 Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
27 #include <sys/zfs_context.h>
28 #include <sys/modctl.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/spi.h>
31 #include <sys/crypto/icp.h>
34 #include <sha2/sha2_impl.h>
37 * The sha2 module is created with two modlinkages:
38 * - a modlmisc that allows consumers to directly call the entry points
39 * SHA2Init, SHA2Update, and SHA2Final.
40 * - a modlcrypto that allows the module to register with the Kernel
41 * Cryptographic Framework (KCF) as a software provider for the SHA2
45 static struct modlcrypto modlcrypto = {
47 "SHA2 Kernel SW Provider"
50 static struct modlinkage modlinkage = {
51 MODREV_1, {&modlcrypto, NULL}
55 * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
56 * by KCF to one of the entry points.
59 #define PROV_SHA2_CTX(ctx) ((sha2_ctx_t *)(ctx)->cc_provider_private)
60 #define PROV_SHA2_HMAC_CTX(ctx) ((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
62 /* to extract the digest length passed as mechanism parameter */
63 #define PROV_SHA2_GET_DIGEST_LEN(m, len) { \
64 if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t))) \
65 (len) = (uint32_t)*((ulong_t *)(m)->cm_param); \
68 bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t)); \
69 (len) = (uint32_t)tmp_ulong; \
73 #define PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) { \
74 SHA2Init(mech, ctx); \
75 SHA2Update(ctx, key, len); \
76 SHA2Final(digest, ctx); \
80 * Mechanism info structure passed to KCF during registration.
82 static crypto_mech_info_t sha2_mech_info_tab[] = {
84 {SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
85 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
86 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
88 {SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
89 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
90 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
91 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
92 /* SHA256-HMAC GENERAL */
93 {SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
94 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
95 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
96 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
98 {SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
99 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
100 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
102 {SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
103 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
104 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
105 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
106 /* SHA384-HMAC GENERAL */
107 {SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
108 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
109 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
110 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
112 {SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
113 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
114 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
116 {SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
117 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
118 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
119 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
120 /* SHA512-HMAC GENERAL */
121 {SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
122 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
123 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
124 CRYPTO_KEYSIZE_UNIT_IN_BYTES}
127 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
129 static crypto_control_ops_t sha2_control_ops = {
133 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
134 crypto_req_handle_t);
135 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
136 crypto_req_handle_t);
137 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
138 crypto_req_handle_t);
139 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
140 crypto_req_handle_t);
141 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
142 crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
143 crypto_req_handle_t);
145 static crypto_digest_ops_t sha2_digest_ops = {
146 .digest_init = sha2_digest_init,
147 .digest = sha2_digest,
148 .digest_update = sha2_digest_update,
150 .digest_final = sha2_digest_final,
151 .digest_atomic = sha2_digest_atomic
154 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
155 crypto_spi_ctx_template_t, crypto_req_handle_t);
156 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
157 crypto_req_handle_t);
158 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
159 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
160 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
161 crypto_spi_ctx_template_t, crypto_req_handle_t);
162 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
163 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
164 crypto_spi_ctx_template_t, crypto_req_handle_t);
166 static crypto_mac_ops_t sha2_mac_ops = {
167 .mac_init = sha2_mac_init,
169 .mac_update = sha2_mac_update,
170 .mac_final = sha2_mac_final,
171 .mac_atomic = sha2_mac_atomic,
172 .mac_verify_atomic = sha2_mac_verify_atomic
175 static int sha2_create_ctx_template(crypto_provider_handle_t,
176 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
177 size_t *, crypto_req_handle_t);
178 static int sha2_free_context(crypto_ctx_t *);
180 static crypto_ctx_ops_t sha2_ctx_ops = {
181 .create_ctx_template = sha2_create_ctx_template,
182 .free_context = sha2_free_context
185 static crypto_ops_t sha2_crypto_ops = {{{{{
202 static crypto_provider_info_t sha2_prov_info = {{{{
203 CRYPTO_SPI_VERSION_1,
204 "SHA2 Software Provider",
208 sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
212 static crypto_kcf_provider_handle_t sha2_prov_handle = 0;
219 if ((ret = mod_install(&modlinkage)) != 0)
223 * Register with KCF. If the registration fails, log an
224 * error but do not uninstall the module, since the functionality
225 * provided by misc/sha2 should still be available.
227 if ((ret = crypto_register_provider(&sha2_prov_info,
228 &sha2_prov_handle)) != CRYPTO_SUCCESS)
229 cmn_err(CE_WARN, "sha2 _init: "
230 "crypto_register_provider() failed (0x%x)", ret);
240 if (sha2_prov_handle != 0) {
241 if ((ret = crypto_unregister_provider(sha2_prov_handle)) !=
244 "sha2 _fini: crypto_unregister_provider() "
245 "failed (0x%x)", ret);
248 sha2_prov_handle = 0;
251 return (mod_remove(&modlinkage));
255 * KCF software provider control entry points.
259 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
261 *status = CRYPTO_PROVIDER_READY;
265 * KCF software provider digest entry points.
269 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
270 crypto_req_handle_t req)
274 * Allocate and initialize SHA2 context.
276 ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
278 if (ctx->cc_provider_private == NULL)
279 return (CRYPTO_HOST_MEMORY);
281 PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
282 SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
284 return (CRYPTO_SUCCESS);
288 * Helper SHA2 digest update function for uio data.
291 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
293 off_t offset = data->cd_offset;
294 size_t length = data->cd_length;
298 /* we support only kernel buffer */
299 if (uio_segflg(data->cd_uio) != UIO_SYSSPACE)
300 return (CRYPTO_ARGUMENTS_BAD);
303 * Jump to the first iovec containing data to be
306 offset = uio_index_at_offset(data->cd_uio, offset, &vec_idx);
307 if (vec_idx == uio_iovcnt(data->cd_uio)) {
309 * The caller specified an offset that is larger than the
310 * total size of the buffers it provided.
312 return (CRYPTO_DATA_LEN_RANGE);
316 * Now do the digesting on the iovecs.
318 while (vec_idx < uio_iovcnt(data->cd_uio) && length > 0) {
319 cur_len = MIN(uio_iovlen(data->cd_uio, vec_idx) -
322 SHA2Update(sha2_ctx, (uint8_t *)uio_iovbase(data->cd_uio,
323 vec_idx) + offset, cur_len);
329 if (vec_idx == uio_iovcnt(data->cd_uio) && length > 0) {
331 * The end of the specified iovec's was reached but
332 * the length requested could not be processed, i.e.
333 * The caller requested to digest more data than it provided.
335 return (CRYPTO_DATA_LEN_RANGE);
338 return (CRYPTO_SUCCESS);
342 * Helper SHA2 digest final function for uio data.
343 * digest_len is the length of the desired digest. If digest_len
344 * is smaller than the default SHA2 digest length, the caller
345 * must pass a scratch buffer, digest_scratch, which must
346 * be at least the algorithm's digest length bytes.
349 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
350 ulong_t digest_len, uchar_t *digest_scratch)
352 off_t offset = digest->cd_offset;
355 /* we support only kernel buffer */
356 if (uio_segflg(digest->cd_uio) != UIO_SYSSPACE)
357 return (CRYPTO_ARGUMENTS_BAD);
360 * Jump to the first iovec containing ptr to the digest to
363 offset = uio_index_at_offset(digest->cd_uio, offset, &vec_idx);
364 if (vec_idx == uio_iovcnt(digest->cd_uio)) {
366 * The caller specified an offset that is
367 * larger than the total size of the buffers
370 return (CRYPTO_DATA_LEN_RANGE);
373 if (offset + digest_len <=
374 uio_iovlen(digest->cd_uio, vec_idx)) {
376 * The computed SHA2 digest will fit in the current
379 if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
380 (digest_len != SHA256_DIGEST_LENGTH)) ||
381 ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
382 (digest_len != SHA512_DIGEST_LENGTH))) {
384 * The caller requested a short digest. Digest
385 * into a scratch buffer and return to
386 * the user only what was requested.
388 SHA2Final(digest_scratch, sha2_ctx);
390 bcopy(digest_scratch, (uchar_t *)uio_iovbase(digest->
391 cd_uio, vec_idx) + offset,
394 SHA2Final((uchar_t *)uio_iovbase(digest->
395 cd_uio, vec_idx) + offset,
401 * The computed digest will be crossing one or more iovec's.
402 * This is bad performance-wise but we need to support it.
403 * Allocate a small scratch buffer on the stack and
404 * copy it piece meal to the specified digest iovec's.
406 uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
407 off_t scratch_offset = 0;
408 size_t length = digest_len;
411 SHA2Final(digest_tmp, sha2_ctx);
413 while (vec_idx < uio_iovcnt(digest->cd_uio) && length > 0) {
415 MIN(uio_iovlen(digest->cd_uio, vec_idx) -
417 bcopy(digest_tmp + scratch_offset,
418 uio_iovbase(digest->cd_uio, vec_idx) + offset,
423 scratch_offset += cur_len;
427 if (vec_idx == uio_iovcnt(digest->cd_uio) && length > 0) {
429 * The end of the specified iovec's was reached but
430 * the length requested could not be processed, i.e.
431 * The caller requested to digest more data than it
434 return (CRYPTO_DATA_LEN_RANGE);
438 return (CRYPTO_SUCCESS);
443 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
444 crypto_req_handle_t req)
446 int ret = CRYPTO_SUCCESS;
447 uint_t sha_digest_len;
449 ASSERT(ctx->cc_provider_private != NULL);
451 switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
452 case SHA256_MECH_INFO_TYPE:
453 sha_digest_len = SHA256_DIGEST_LENGTH;
455 case SHA384_MECH_INFO_TYPE:
456 sha_digest_len = SHA384_DIGEST_LENGTH;
458 case SHA512_MECH_INFO_TYPE:
459 sha_digest_len = SHA512_DIGEST_LENGTH;
462 return (CRYPTO_MECHANISM_INVALID);
466 * We need to just return the length needed to store the output.
467 * We should not destroy the context for the following cases.
469 if ((digest->cd_length == 0) ||
470 (digest->cd_length < sha_digest_len)) {
471 digest->cd_length = sha_digest_len;
472 return (CRYPTO_BUFFER_TOO_SMALL);
476 * Do the SHA2 update on the specified input data.
478 switch (data->cd_format) {
479 case CRYPTO_DATA_RAW:
480 SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
481 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
484 case CRYPTO_DATA_UIO:
485 ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
489 ret = CRYPTO_ARGUMENTS_BAD;
492 if (ret != CRYPTO_SUCCESS) {
493 /* the update failed, free context and bail */
494 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
495 ctx->cc_provider_private = NULL;
496 digest->cd_length = 0;
501 * Do a SHA2 final, must be done separately since the digest
502 * type can be different than the input data type.
504 switch (digest->cd_format) {
505 case CRYPTO_DATA_RAW:
506 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
507 digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
509 case CRYPTO_DATA_UIO:
510 ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
511 digest, sha_digest_len, NULL);
514 ret = CRYPTO_ARGUMENTS_BAD;
517 /* all done, free context and return */
519 if (ret == CRYPTO_SUCCESS)
520 digest->cd_length = sha_digest_len;
522 digest->cd_length = 0;
524 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
525 ctx->cc_provider_private = NULL;
531 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
532 crypto_req_handle_t req)
534 int ret = CRYPTO_SUCCESS;
536 ASSERT(ctx->cc_provider_private != NULL);
539 * Do the SHA2 update on the specified input data.
541 switch (data->cd_format) {
542 case CRYPTO_DATA_RAW:
543 SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
544 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
547 case CRYPTO_DATA_UIO:
548 ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
552 ret = CRYPTO_ARGUMENTS_BAD;
560 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
561 crypto_req_handle_t req)
563 int ret = CRYPTO_SUCCESS;
564 uint_t sha_digest_len;
566 ASSERT(ctx->cc_provider_private != NULL);
568 switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
569 case SHA256_MECH_INFO_TYPE:
570 sha_digest_len = SHA256_DIGEST_LENGTH;
572 case SHA384_MECH_INFO_TYPE:
573 sha_digest_len = SHA384_DIGEST_LENGTH;
575 case SHA512_MECH_INFO_TYPE:
576 sha_digest_len = SHA512_DIGEST_LENGTH;
579 return (CRYPTO_MECHANISM_INVALID);
583 * We need to just return the length needed to store the output.
584 * We should not destroy the context for the following cases.
586 if ((digest->cd_length == 0) ||
587 (digest->cd_length < sha_digest_len)) {
588 digest->cd_length = sha_digest_len;
589 return (CRYPTO_BUFFER_TOO_SMALL);
595 switch (digest->cd_format) {
596 case CRYPTO_DATA_RAW:
597 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
598 digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
600 case CRYPTO_DATA_UIO:
601 ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
602 digest, sha_digest_len, NULL);
605 ret = CRYPTO_ARGUMENTS_BAD;
608 /* all done, free context and return */
610 if (ret == CRYPTO_SUCCESS)
611 digest->cd_length = sha_digest_len;
613 digest->cd_length = 0;
615 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
616 ctx->cc_provider_private = NULL;
623 sha2_digest_atomic(crypto_provider_handle_t provider,
624 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
625 crypto_data_t *data, crypto_data_t *digest,
626 crypto_req_handle_t req)
628 int ret = CRYPTO_SUCCESS;
630 uint32_t sha_digest_len;
636 SHA2Init(mechanism->cm_type, &sha2_ctx);
638 switch (data->cd_format) {
639 case CRYPTO_DATA_RAW:
640 SHA2Update(&sha2_ctx, (uint8_t *)data->
641 cd_raw.iov_base + data->cd_offset, data->cd_length);
643 case CRYPTO_DATA_UIO:
644 ret = sha2_digest_update_uio(&sha2_ctx, data);
647 ret = CRYPTO_ARGUMENTS_BAD;
651 * Do the SHA updates on the specified input data.
654 if (ret != CRYPTO_SUCCESS) {
655 /* the update failed, bail */
656 digest->cd_length = 0;
660 if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
661 sha_digest_len = SHA256_DIGEST_LENGTH;
663 sha_digest_len = SHA512_DIGEST_LENGTH;
666 * Do a SHA2 final, must be done separately since the digest
667 * type can be different than the input data type.
669 switch (digest->cd_format) {
670 case CRYPTO_DATA_RAW:
671 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
672 digest->cd_offset, &sha2_ctx);
674 case CRYPTO_DATA_UIO:
675 ret = sha2_digest_final_uio(&sha2_ctx, digest,
676 sha_digest_len, NULL);
679 ret = CRYPTO_ARGUMENTS_BAD;
682 if (ret == CRYPTO_SUCCESS)
683 digest->cd_length = sha_digest_len;
685 digest->cd_length = 0;
691 * KCF software provider mac entry points.
693 * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
696 * The initialization routine initializes what we denote
697 * as the inner and outer contexts by doing
698 * - for inner context: SHA2(key XOR ipad)
699 * - for outer context: SHA2(key XOR opad)
702 * Each subsequent SHA2 HMAC update will result in an
703 * update of the inner context with the specified data.
706 * The SHA2 HMAC final will do a SHA2 final operation on the
707 * inner context, and the resulting digest will be used
708 * as the data for an update on the outer context. Last
709 * but not least, a SHA2 final on the outer context will
710 * be performed to obtain the SHA2 HMAC digest to return
715 * Initialize a SHA2-HMAC context.
718 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
720 uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
721 uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
722 int i, block_size, blocks_per_int64;
724 /* Determine the block size */
725 if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
726 block_size = SHA256_HMAC_BLOCK_SIZE;
727 blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
729 block_size = SHA512_HMAC_BLOCK_SIZE;
730 blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
733 (void) bzero(ipad, block_size);
734 (void) bzero(opad, block_size);
735 (void) bcopy(keyval, ipad, length_in_bytes);
736 (void) bcopy(keyval, opad, length_in_bytes);
738 /* XOR key with ipad (0x36) and opad (0x5c) */
739 for (i = 0; i < blocks_per_int64; i ++) {
740 ipad[i] ^= 0x3636363636363636;
741 opad[i] ^= 0x5c5c5c5c5c5c5c5c;
744 /* perform SHA2 on ipad */
745 SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
746 SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
748 /* perform SHA2 on opad */
749 SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
750 SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
757 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
758 crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
759 crypto_req_handle_t req)
761 int ret = CRYPTO_SUCCESS;
762 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
763 uint_t sha_digest_len, sha_hmac_block_size;
766 * Set the digest length and block size to values appropriate to the
769 switch (mechanism->cm_type) {
770 case SHA256_HMAC_MECH_INFO_TYPE:
771 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
772 sha_digest_len = SHA256_DIGEST_LENGTH;
773 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
775 case SHA384_HMAC_MECH_INFO_TYPE:
776 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
777 case SHA512_HMAC_MECH_INFO_TYPE:
778 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
779 sha_digest_len = SHA512_DIGEST_LENGTH;
780 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
783 return (CRYPTO_MECHANISM_INVALID);
786 if (key->ck_format != CRYPTO_KEY_RAW)
787 return (CRYPTO_ARGUMENTS_BAD);
789 ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
791 if (ctx->cc_provider_private == NULL)
792 return (CRYPTO_HOST_MEMORY);
794 PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
795 if (ctx_template != NULL) {
796 /* reuse context template */
797 bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
798 sizeof (sha2_hmac_ctx_t));
800 /* no context template, compute context */
801 if (keylen_in_bytes > sha_hmac_block_size) {
802 uchar_t digested_key[SHA512_DIGEST_LENGTH];
803 sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
806 * Hash the passed-in key to get a smaller key.
807 * The inner context is used since it hasn't been
810 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
811 &hmac_ctx->hc_icontext,
812 key->ck_data, keylen_in_bytes, digested_key);
813 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
814 digested_key, sha_digest_len);
816 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
817 key->ck_data, keylen_in_bytes);
822 * Get the mechanism parameters, if applicable.
824 if (mechanism->cm_type % 3 == 2) {
825 if (mechanism->cm_param == NULL ||
826 mechanism->cm_param_len != sizeof (ulong_t))
827 ret = CRYPTO_MECHANISM_PARAM_INVALID;
828 PROV_SHA2_GET_DIGEST_LEN(mechanism,
829 PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
830 if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
831 ret = CRYPTO_MECHANISM_PARAM_INVALID;
834 if (ret != CRYPTO_SUCCESS) {
835 bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
836 kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
837 ctx->cc_provider_private = NULL;
845 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
846 crypto_req_handle_t req)
848 int ret = CRYPTO_SUCCESS;
850 ASSERT(ctx->cc_provider_private != NULL);
853 * Do a SHA2 update of the inner context using the specified
856 switch (data->cd_format) {
857 case CRYPTO_DATA_RAW:
858 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
859 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
862 case CRYPTO_DATA_UIO:
863 ret = sha2_digest_update_uio(
864 &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
867 ret = CRYPTO_ARGUMENTS_BAD;
875 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
877 int ret = CRYPTO_SUCCESS;
878 uchar_t digest[SHA512_DIGEST_LENGTH];
879 uint32_t digest_len, sha_digest_len;
881 ASSERT(ctx->cc_provider_private != NULL);
883 /* Set the digest lengths to values appropriate to the mechanism */
884 switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
885 case SHA256_HMAC_MECH_INFO_TYPE:
886 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
888 case SHA384_HMAC_MECH_INFO_TYPE:
889 sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
891 case SHA512_HMAC_MECH_INFO_TYPE:
892 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
894 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
895 sha_digest_len = SHA256_DIGEST_LENGTH;
896 digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
898 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
899 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
900 sha_digest_len = SHA512_DIGEST_LENGTH;
901 digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
904 return (CRYPTO_ARGUMENTS_BAD);
908 * We need to just return the length needed to store the output.
909 * We should not destroy the context for the following cases.
911 if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
912 mac->cd_length = digest_len;
913 return (CRYPTO_BUFFER_TOO_SMALL);
917 * Do a SHA2 final on the inner context.
919 SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
922 * Do a SHA2 update on the outer context, feeding the inner
925 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
929 * Do a SHA2 final on the outer context, storing the computing
930 * digest in the users buffer.
932 switch (mac->cd_format) {
933 case CRYPTO_DATA_RAW:
934 if (digest_len != sha_digest_len) {
936 * The caller requested a short digest. Digest
937 * into a scratch buffer and return to
938 * the user only what was requested.
941 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
942 bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
943 mac->cd_offset, digest_len);
945 SHA2Final((unsigned char *)mac->cd_raw.iov_base +
947 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
950 case CRYPTO_DATA_UIO:
951 ret = sha2_digest_final_uio(
952 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
956 ret = CRYPTO_ARGUMENTS_BAD;
959 if (ret == CRYPTO_SUCCESS)
960 mac->cd_length = digest_len;
964 bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
965 kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
966 ctx->cc_provider_private = NULL;
971 #define SHA2_MAC_UPDATE(data, ctx, ret) { \
972 switch (data->cd_format) { \
973 case CRYPTO_DATA_RAW: \
974 SHA2Update(&(ctx).hc_icontext, \
975 (uint8_t *)data->cd_raw.iov_base + \
976 data->cd_offset, data->cd_length); \
978 case CRYPTO_DATA_UIO: \
979 ret = sha2_digest_update_uio(&(ctx).hc_icontext, data); \
982 ret = CRYPTO_ARGUMENTS_BAD; \
988 sha2_mac_atomic(crypto_provider_handle_t provider,
989 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
990 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
991 crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
993 int ret = CRYPTO_SUCCESS;
994 uchar_t digest[SHA512_DIGEST_LENGTH];
995 sha2_hmac_ctx_t sha2_hmac_ctx;
996 uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
997 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1000 * Set the digest length and block size to values appropriate to the
1003 switch (mechanism->cm_type) {
1004 case SHA256_HMAC_MECH_INFO_TYPE:
1005 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1006 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1007 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1009 case SHA384_HMAC_MECH_INFO_TYPE:
1010 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1011 case SHA512_HMAC_MECH_INFO_TYPE:
1012 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1013 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1014 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1017 return (CRYPTO_MECHANISM_INVALID);
1020 /* Add support for key by attributes (RFE 4706552) */
1021 if (key->ck_format != CRYPTO_KEY_RAW)
1022 return (CRYPTO_ARGUMENTS_BAD);
1024 if (ctx_template != NULL) {
1025 /* reuse context template */
1026 bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1028 sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1029 /* no context template, initialize context */
1030 if (keylen_in_bytes > sha_hmac_block_size) {
1032 * Hash the passed-in key to get a smaller key.
1033 * The inner context is used since it hasn't been
1036 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1037 &sha2_hmac_ctx.hc_icontext,
1038 key->ck_data, keylen_in_bytes, digest);
1039 sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1042 sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1047 /* get the mechanism parameters, if applicable */
1048 if ((mechanism->cm_type % 3) == 2) {
1049 if (mechanism->cm_param == NULL ||
1050 mechanism->cm_param_len != sizeof (ulong_t)) {
1051 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1054 PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1055 if (digest_len > sha_digest_len) {
1056 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1061 /* do a SHA2 update of the inner context using the specified data */
1062 SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1063 if (ret != CRYPTO_SUCCESS)
1064 /* the update failed, free context and bail */
1068 * Do a SHA2 final on the inner context.
1070 SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1073 * Do an SHA2 update on the outer context, feeding the inner
1076 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1077 * bytes of the inner hash value.
1079 if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1080 mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1081 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1082 SHA384_DIGEST_LENGTH);
1084 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1087 * Do a SHA2 final on the outer context, storing the computed
1088 * digest in the users buffer.
1090 switch (mac->cd_format) {
1091 case CRYPTO_DATA_RAW:
1092 if (digest_len != sha_digest_len) {
1094 * The caller requested a short digest. Digest
1095 * into a scratch buffer and return to
1096 * the user only what was requested.
1098 SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1099 bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1100 mac->cd_offset, digest_len);
1102 SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1103 mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1106 case CRYPTO_DATA_UIO:
1107 ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1108 digest_len, digest);
1111 ret = CRYPTO_ARGUMENTS_BAD;
1114 if (ret == CRYPTO_SUCCESS) {
1115 mac->cd_length = digest_len;
1116 return (CRYPTO_SUCCESS);
1119 bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1126 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1127 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1128 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1129 crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1131 int ret = CRYPTO_SUCCESS;
1132 uchar_t digest[SHA512_DIGEST_LENGTH];
1133 sha2_hmac_ctx_t sha2_hmac_ctx;
1134 uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1135 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1138 * Set the digest length and block size to values appropriate to the
1141 switch (mechanism->cm_type) {
1142 case SHA256_HMAC_MECH_INFO_TYPE:
1143 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1144 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1145 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1147 case SHA384_HMAC_MECH_INFO_TYPE:
1148 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1149 case SHA512_HMAC_MECH_INFO_TYPE:
1150 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1151 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1152 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1155 return (CRYPTO_MECHANISM_INVALID);
1158 /* Add support for key by attributes (RFE 4706552) */
1159 if (key->ck_format != CRYPTO_KEY_RAW)
1160 return (CRYPTO_ARGUMENTS_BAD);
1162 if (ctx_template != NULL) {
1163 /* reuse context template */
1164 bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1166 sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1167 /* no context template, initialize context */
1168 if (keylen_in_bytes > sha_hmac_block_size) {
1170 * Hash the passed-in key to get a smaller key.
1171 * The inner context is used since it hasn't been
1174 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1175 &sha2_hmac_ctx.hc_icontext,
1176 key->ck_data, keylen_in_bytes, digest);
1177 sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1180 sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1185 /* get the mechanism parameters, if applicable */
1186 if (mechanism->cm_type % 3 == 2) {
1187 if (mechanism->cm_param == NULL ||
1188 mechanism->cm_param_len != sizeof (ulong_t)) {
1189 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1192 PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1193 if (digest_len > sha_digest_len) {
1194 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1199 if (mac->cd_length != digest_len) {
1200 ret = CRYPTO_INVALID_MAC;
1204 /* do a SHA2 update of the inner context using the specified data */
1205 SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1206 if (ret != CRYPTO_SUCCESS)
1207 /* the update failed, free context and bail */
1210 /* do a SHA2 final on the inner context */
1211 SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1214 * Do an SHA2 update on the outer context, feeding the inner
1217 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1218 * bytes of the inner hash value.
1220 if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1221 mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1222 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1223 SHA384_DIGEST_LENGTH);
1225 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1228 * Do a SHA2 final on the outer context, storing the computed
1229 * digest in the users buffer.
1231 SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1234 * Compare the computed digest against the expected digest passed
1238 switch (mac->cd_format) {
1240 case CRYPTO_DATA_RAW:
1241 if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1242 mac->cd_offset, digest_len) != 0)
1243 ret = CRYPTO_INVALID_MAC;
1246 case CRYPTO_DATA_UIO: {
1247 off_t offset = mac->cd_offset;
1249 off_t scratch_offset = 0;
1250 size_t length = digest_len;
1253 /* we support only kernel buffer */
1254 if (uio_segflg(mac->cd_uio) != UIO_SYSSPACE)
1255 return (CRYPTO_ARGUMENTS_BAD);
1257 /* jump to the first iovec containing the expected digest */
1258 offset = uio_index_at_offset(mac->cd_uio, offset, &vec_idx);
1259 if (vec_idx == uio_iovcnt(mac->cd_uio)) {
1261 * The caller specified an offset that is
1262 * larger than the total size of the buffers
1265 ret = CRYPTO_DATA_LEN_RANGE;
1269 /* do the comparison of computed digest vs specified one */
1270 while (vec_idx < uio_iovcnt(mac->cd_uio) && length > 0) {
1271 cur_len = MIN(uio_iovlen(mac->cd_uio, vec_idx) -
1274 if (bcmp(digest + scratch_offset,
1275 uio_iovbase(mac->cd_uio, vec_idx) + offset,
1277 ret = CRYPTO_INVALID_MAC;
1283 scratch_offset += cur_len;
1290 ret = CRYPTO_ARGUMENTS_BAD;
1295 bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1301 * KCF software provider context management entry points.
1306 sha2_create_ctx_template(crypto_provider_handle_t provider,
1307 crypto_mechanism_t *mechanism, crypto_key_t *key,
1308 crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1309 crypto_req_handle_t req)
1311 sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1312 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1313 uint32_t sha_digest_len, sha_hmac_block_size;
1316 * Set the digest length and block size to values appropriate to the
1319 switch (mechanism->cm_type) {
1320 case SHA256_HMAC_MECH_INFO_TYPE:
1321 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1322 sha_digest_len = SHA256_DIGEST_LENGTH;
1323 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1325 case SHA384_HMAC_MECH_INFO_TYPE:
1326 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1327 case SHA512_HMAC_MECH_INFO_TYPE:
1328 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1329 sha_digest_len = SHA512_DIGEST_LENGTH;
1330 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1333 return (CRYPTO_MECHANISM_INVALID);
1336 /* Add support for key by attributes (RFE 4706552) */
1337 if (key->ck_format != CRYPTO_KEY_RAW)
1338 return (CRYPTO_ARGUMENTS_BAD);
1341 * Allocate and initialize SHA2 context.
1343 sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1344 crypto_kmflag(req));
1345 if (sha2_hmac_ctx_tmpl == NULL)
1346 return (CRYPTO_HOST_MEMORY);
1348 sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1350 if (keylen_in_bytes > sha_hmac_block_size) {
1351 uchar_t digested_key[SHA512_DIGEST_LENGTH];
1354 * Hash the passed-in key to get a smaller key.
1355 * The inner context is used since it hasn't been
1358 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1359 &sha2_hmac_ctx_tmpl->hc_icontext,
1360 key->ck_data, keylen_in_bytes, digested_key);
1361 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1364 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1368 *ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1369 *ctx_template_size = sizeof (sha2_hmac_ctx_t);
1371 return (CRYPTO_SUCCESS);
1375 sha2_free_context(crypto_ctx_t *ctx)
1379 if (ctx->cc_provider_private == NULL)
1380 return (CRYPTO_SUCCESS);
1383 * We have to free either SHA2 or SHA2-HMAC contexts, which
1384 * have different lengths.
1386 * Note: Below is dependent on the mechanism ordering.
1389 if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1390 ctx_len = sizeof (sha2_ctx_t);
1392 ctx_len = sizeof (sha2_hmac_ctx_t);
1394 bzero(ctx->cc_provider_private, ctx_len);
1395 kmem_free(ctx->cc_provider_private, ctx_len);
1396 ctx->cc_provider_private = NULL;
1398 return (CRYPTO_SUCCESS);