Print this page
4746 remove EXPORT_SRC leftovers
Split |
Close |
Expand all |
Collapse all |
--- old/usr/src/uts/common/crypto/io/aes.c
+++ new/usr/src/uts/common/crypto/io/aes.c
1 1 /*
2 2 * CDDL HEADER START
3 3 *
4 4 * The contents of this file are subject to the terms of the
5 5 * Common Development and Distribution License (the "License").
6 6 * You may not use this file except in compliance with the License.
7 7 *
8 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 9 * or http://www.opensolaris.org/os/licensing.
10 10 * See the License for the specific language governing permissions
11 11 * and limitations under the License.
12 12 *
13 13 * When distributing Covered Code, include this CDDL HEADER in each
14 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 15 * If applicable, add the following below this CDDL HEADER, with the
16 16 * fields enclosed by brackets "[]" replaced with your own identifying
17 17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 18 *
19 19 * CDDL HEADER END
20 20 */
21 21 /*
22 22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 23 */
24 24
25 25 /*
26 26 * AES provider for the Kernel Cryptographic Framework (KCF)
27 27 */
28 28
29 29 #include <sys/types.h>
30 30 #include <sys/systm.h>
31 31 #include <sys/modctl.h>
32 32 #include <sys/cmn_err.h>
33 33 #include <sys/ddi.h>
34 34 #include <sys/crypto/common.h>
35 35 #include <sys/crypto/impl.h>
36 36 #include <sys/crypto/spi.h>
37 37 #include <sys/sysmacros.h>
38 38 #include <sys/strsun.h>
39 39 #include <modes/modes.h>
40 40 #define _AES_IMPL
41 41 #include <aes/aes_impl.h>
42 42
43 43 extern struct mod_ops mod_cryptoops;
44 44
45 45 /*
46 46 * Module linkage information for the kernel.
47 47 */
48 48 static struct modlcrypto modlcrypto = {
49 49 &mod_cryptoops,
↓ open down ↓ |
49 lines elided |
↑ open up ↑ |
50 50 "AES Kernel SW Provider"
51 51 };
52 52
53 53 static struct modlinkage modlinkage = {
54 54 MODREV_1,
55 55 (void *)&modlcrypto,
56 56 NULL
57 57 };
58 58
59 59 /*
60 - * The following definitions are to keep EXPORT_SRC happy.
61 - */
62 -#ifndef AES_MIN_KEY_BYTES
63 -#define AES_MIN_KEY_BYTES 0
64 -#endif
65 -
66 -#ifndef AES_MAX_KEY_BYTES
67 -#define AES_MAX_KEY_BYTES 0
68 -#endif
69 -
70 -/*
71 60 * Mechanism info structure passed to KCF during registration.
72 61 */
73 62 static crypto_mech_info_t aes_mech_info_tab[] = {
74 63 /* AES_ECB */
75 64 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
76 65 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
77 66 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
78 67 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
79 68 /* AES_CBC */
80 69 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
81 70 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
82 71 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
83 72 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
84 73 /* AES_CTR */
85 74 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
86 75 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
87 76 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
88 77 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
89 78 /* AES_CCM */
90 79 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
91 80 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
92 81 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
93 82 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
94 83 /* AES_GCM */
95 84 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
96 85 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
97 86 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
98 87 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
99 88 /* AES_GMAC */
100 89 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
101 90 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
102 91 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
103 92 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
104 93 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
105 94 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
106 95 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
107 96 };
108 97
109 98 /* operations are in-place if the output buffer is NULL */
110 99 #define AES_ARG_INPLACE(input, output) \
111 100 if ((output) == NULL) \
112 101 (output) = (input);
113 102
114 103 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
115 104
116 105 static crypto_control_ops_t aes_control_ops = {
117 106 aes_provider_status
118 107 };
119 108
120 109 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
121 110 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
122 111 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
123 112 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
124 113 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
125 114 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
126 115 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
127 116 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
128 117 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
129 118 crypto_req_handle_t);
130 119 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
131 120 crypto_req_handle_t);
132 121
133 122 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
134 123 crypto_req_handle_t);
135 124 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
136 125 crypto_data_t *, crypto_req_handle_t);
137 126 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
138 127 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
139 128 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
140 129
141 130 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
142 131 crypto_req_handle_t);
143 132 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
144 133 crypto_data_t *, crypto_req_handle_t);
145 134 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
146 135 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
147 136 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
148 137
149 138 static crypto_cipher_ops_t aes_cipher_ops = {
150 139 aes_encrypt_init,
151 140 aes_encrypt,
152 141 aes_encrypt_update,
153 142 aes_encrypt_final,
154 143 aes_encrypt_atomic,
155 144 aes_decrypt_init,
156 145 aes_decrypt,
157 146 aes_decrypt_update,
158 147 aes_decrypt_final,
159 148 aes_decrypt_atomic
160 149 };
161 150
162 151 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
163 152 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
164 153 crypto_spi_ctx_template_t, crypto_req_handle_t);
165 154 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
166 155 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
167 156 crypto_spi_ctx_template_t, crypto_req_handle_t);
168 157
169 158 static crypto_mac_ops_t aes_mac_ops = {
170 159 NULL,
171 160 NULL,
172 161 NULL,
173 162 NULL,
174 163 aes_mac_atomic,
175 164 aes_mac_verify_atomic
176 165 };
177 166
178 167 static int aes_create_ctx_template(crypto_provider_handle_t,
179 168 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
180 169 size_t *, crypto_req_handle_t);
181 170 static int aes_free_context(crypto_ctx_t *);
182 171
183 172 static crypto_ctx_ops_t aes_ctx_ops = {
184 173 aes_create_ctx_template,
185 174 aes_free_context
186 175 };
187 176
188 177 static crypto_ops_t aes_crypto_ops = {
189 178 &aes_control_ops,
190 179 NULL,
191 180 &aes_cipher_ops,
192 181 &aes_mac_ops,
193 182 NULL,
194 183 NULL,
195 184 NULL,
196 185 NULL,
197 186 NULL,
198 187 NULL,
199 188 NULL,
200 189 NULL,
201 190 NULL,
202 191 &aes_ctx_ops,
203 192 NULL,
204 193 NULL,
205 194 NULL,
206 195 };
207 196
208 197 static crypto_provider_info_t aes_prov_info = {
209 198 CRYPTO_SPI_VERSION_4,
210 199 "AES Software Provider",
211 200 CRYPTO_SW_PROVIDER,
212 201 {&modlinkage},
213 202 NULL,
214 203 &aes_crypto_ops,
215 204 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
216 205 aes_mech_info_tab
217 206 };
218 207
219 208 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
220 209 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
221 210
222 211 int
223 212 _init(void)
224 213 {
225 214 int ret;
226 215
227 216 if ((ret = mod_install(&modlinkage)) != 0)
228 217 return (ret);
229 218
230 219 /* Register with KCF. If the registration fails, remove the module. */
231 220 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
232 221 (void) mod_remove(&modlinkage);
233 222 return (EACCES);
234 223 }
235 224
236 225 return (0);
237 226 }
238 227
239 228 int
240 229 _fini(void)
241 230 {
242 231 /* Unregister from KCF if module is registered */
243 232 if (aes_prov_handle != NULL) {
244 233 if (crypto_unregister_provider(aes_prov_handle))
245 234 return (EBUSY);
246 235
247 236 aes_prov_handle = NULL;
248 237 }
249 238
250 239 return (mod_remove(&modlinkage));
251 240 }
252 241
253 242 int
254 243 _info(struct modinfo *modinfop)
255 244 {
256 245 return (mod_info(&modlinkage, modinfop));
257 246 }
258 247
259 248
260 249 static int
261 250 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
262 251 {
263 252 void *p = NULL;
264 253 boolean_t param_required = B_TRUE;
265 254 size_t param_len;
266 255 void *(*alloc_fun)(int);
267 256 int rv = CRYPTO_SUCCESS;
268 257
269 258 switch (mechanism->cm_type) {
270 259 case AES_ECB_MECH_INFO_TYPE:
271 260 param_required = B_FALSE;
272 261 alloc_fun = ecb_alloc_ctx;
273 262 break;
274 263 case AES_CBC_MECH_INFO_TYPE:
275 264 param_len = AES_BLOCK_LEN;
276 265 alloc_fun = cbc_alloc_ctx;
277 266 break;
278 267 case AES_CTR_MECH_INFO_TYPE:
279 268 param_len = sizeof (CK_AES_CTR_PARAMS);
280 269 alloc_fun = ctr_alloc_ctx;
281 270 break;
282 271 case AES_CCM_MECH_INFO_TYPE:
283 272 param_len = sizeof (CK_AES_CCM_PARAMS);
284 273 alloc_fun = ccm_alloc_ctx;
285 274 break;
286 275 case AES_GCM_MECH_INFO_TYPE:
287 276 param_len = sizeof (CK_AES_GCM_PARAMS);
288 277 alloc_fun = gcm_alloc_ctx;
289 278 break;
290 279 case AES_GMAC_MECH_INFO_TYPE:
291 280 param_len = sizeof (CK_AES_GMAC_PARAMS);
292 281 alloc_fun = gmac_alloc_ctx;
293 282 break;
294 283 default:
295 284 rv = CRYPTO_MECHANISM_INVALID;
296 285 return (rv);
297 286 }
298 287 if (param_required && mechanism->cm_param != NULL &&
299 288 mechanism->cm_param_len != param_len) {
300 289 rv = CRYPTO_MECHANISM_PARAM_INVALID;
301 290 }
302 291 if (ctx != NULL) {
303 292 p = (alloc_fun)(kmflag);
304 293 *ctx = p;
305 294 }
306 295 return (rv);
307 296 }
308 297
309 298 /*
310 299 * Initialize key schedules for AES
311 300 */
312 301 static int
313 302 init_keysched(crypto_key_t *key, void *newbie)
314 303 {
315 304 /*
316 305 * Only keys by value are supported by this module.
317 306 */
318 307 switch (key->ck_format) {
319 308 case CRYPTO_KEY_RAW:
320 309 if (key->ck_length < AES_MINBITS ||
321 310 key->ck_length > AES_MAXBITS) {
322 311 return (CRYPTO_KEY_SIZE_RANGE);
323 312 }
324 313
325 314 /* key length must be either 128, 192, or 256 */
326 315 if ((key->ck_length & 63) != 0)
327 316 return (CRYPTO_KEY_SIZE_RANGE);
328 317 break;
329 318 default:
330 319 return (CRYPTO_KEY_TYPE_INCONSISTENT);
331 320 }
332 321
333 322 aes_init_keysched(key->ck_data, key->ck_length, newbie);
334 323 return (CRYPTO_SUCCESS);
335 324 }
336 325
337 326 /*
338 327 * KCF software provider control entry points.
339 328 */
340 329 /* ARGSUSED */
341 330 static void
342 331 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
343 332 {
344 333 *status = CRYPTO_PROVIDER_READY;
345 334 }
346 335
347 336 static int
348 337 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
349 338 crypto_key_t *key, crypto_spi_ctx_template_t template,
350 339 crypto_req_handle_t req) {
351 340 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
352 341 }
353 342
354 343 static int
355 344 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
356 345 crypto_key_t *key, crypto_spi_ctx_template_t template,
357 346 crypto_req_handle_t req) {
358 347 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
359 348 }
360 349
361 350
362 351
363 352 /*
364 353 * KCF software provider encrypt entry points.
365 354 */
366 355 static int
367 356 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
368 357 crypto_key_t *key, crypto_spi_ctx_template_t template,
369 358 crypto_req_handle_t req, boolean_t is_encrypt_init)
370 359 {
371 360 aes_ctx_t *aes_ctx;
372 361 int rv;
373 362 int kmflag;
374 363
375 364 /*
376 365 * Only keys by value are supported by this module.
377 366 */
378 367 if (key->ck_format != CRYPTO_KEY_RAW) {
379 368 return (CRYPTO_KEY_TYPE_INCONSISTENT);
380 369 }
381 370
382 371 kmflag = crypto_kmflag(req);
383 372 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
384 373 != CRYPTO_SUCCESS)
385 374 return (rv);
386 375
387 376 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
388 377 is_encrypt_init);
389 378 if (rv != CRYPTO_SUCCESS) {
390 379 crypto_free_mode_ctx(aes_ctx);
391 380 return (rv);
392 381 }
393 382
394 383 ctx->cc_provider_private = aes_ctx;
395 384
396 385 return (CRYPTO_SUCCESS);
397 386 }
398 387
399 388 static void
400 389 aes_copy_block64(uint8_t *in, uint64_t *out)
401 390 {
402 391 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
403 392 /* LINTED: pointer alignment */
404 393 out[0] = *(uint64_t *)&in[0];
405 394 /* LINTED: pointer alignment */
406 395 out[1] = *(uint64_t *)&in[8];
407 396 } else {
408 397 uint8_t *iv8 = (uint8_t *)&out[0];
409 398
410 399 AES_COPY_BLOCK(in, iv8);
411 400 }
412 401 }
413 402
414 403
415 404 static int
416 405 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
417 406 crypto_data_t *ciphertext, crypto_req_handle_t req)
418 407 {
419 408 int ret = CRYPTO_FAILED;
420 409
421 410 aes_ctx_t *aes_ctx;
422 411 size_t saved_length, saved_offset, length_needed;
423 412
424 413 ASSERT(ctx->cc_provider_private != NULL);
425 414 aes_ctx = ctx->cc_provider_private;
426 415
427 416 /*
428 417 * For block ciphers, plaintext must be a multiple of AES block size.
429 418 * This test is only valid for ciphers whose blocksize is a power of 2.
430 419 */
431 420 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
432 421 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
433 422 return (CRYPTO_DATA_LEN_RANGE);
434 423
435 424 AES_ARG_INPLACE(plaintext, ciphertext);
436 425
437 426 /*
438 427 * We need to just return the length needed to store the output.
439 428 * We should not destroy the context for the following case.
440 429 */
441 430 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
442 431 case CCM_MODE:
443 432 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
444 433 break;
445 434 case GCM_MODE:
446 435 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
447 436 break;
448 437 case GMAC_MODE:
449 438 if (plaintext->cd_length != 0)
450 439 return (CRYPTO_ARGUMENTS_BAD);
451 440
452 441 length_needed = aes_ctx->ac_tag_len;
453 442 break;
454 443 default:
455 444 length_needed = plaintext->cd_length;
456 445 }
457 446
458 447 if (ciphertext->cd_length < length_needed) {
459 448 ciphertext->cd_length = length_needed;
460 449 return (CRYPTO_BUFFER_TOO_SMALL);
461 450 }
462 451
463 452 saved_length = ciphertext->cd_length;
464 453 saved_offset = ciphertext->cd_offset;
465 454
466 455 /*
467 456 * Do an update on the specified input data.
468 457 */
469 458 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
470 459 if (ret != CRYPTO_SUCCESS) {
471 460 return (ret);
472 461 }
473 462
474 463 /*
475 464 * For CCM mode, aes_ccm_encrypt_final() will take care of any
476 465 * left-over unprocessed data, and compute the MAC
477 466 */
478 467 if (aes_ctx->ac_flags & CCM_MODE) {
479 468 /*
480 469 * ccm_encrypt_final() will compute the MAC and append
481 470 * it to existing ciphertext. So, need to adjust the left over
482 471 * length value accordingly
483 472 */
484 473
485 474 /* order of following 2 lines MUST not be reversed */
486 475 ciphertext->cd_offset = ciphertext->cd_length;
487 476 ciphertext->cd_length = saved_length - ciphertext->cd_length;
488 477 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
489 478 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
490 479 if (ret != CRYPTO_SUCCESS) {
491 480 return (ret);
492 481 }
493 482
494 483 if (plaintext != ciphertext) {
495 484 ciphertext->cd_length =
496 485 ciphertext->cd_offset - saved_offset;
497 486 }
498 487 ciphertext->cd_offset = saved_offset;
499 488 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
500 489 /*
501 490 * gcm_encrypt_final() will compute the MAC and append
502 491 * it to existing ciphertext. So, need to adjust the left over
503 492 * length value accordingly
504 493 */
505 494
506 495 /* order of following 2 lines MUST not be reversed */
507 496 ciphertext->cd_offset = ciphertext->cd_length;
508 497 ciphertext->cd_length = saved_length - ciphertext->cd_length;
509 498 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
510 499 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
511 500 aes_xor_block);
512 501 if (ret != CRYPTO_SUCCESS) {
513 502 return (ret);
514 503 }
515 504
516 505 if (plaintext != ciphertext) {
517 506 ciphertext->cd_length =
518 507 ciphertext->cd_offset - saved_offset;
519 508 }
520 509 ciphertext->cd_offset = saved_offset;
521 510 }
522 511
523 512 ASSERT(aes_ctx->ac_remainder_len == 0);
524 513 (void) aes_free_context(ctx);
525 514
526 515 return (ret);
527 516 }
528 517
529 518
530 519 static int
531 520 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
532 521 crypto_data_t *plaintext, crypto_req_handle_t req)
533 522 {
534 523 int ret = CRYPTO_FAILED;
535 524
536 525 aes_ctx_t *aes_ctx;
537 526 off_t saved_offset;
538 527 size_t saved_length, length_needed;
539 528
540 529 ASSERT(ctx->cc_provider_private != NULL);
541 530 aes_ctx = ctx->cc_provider_private;
542 531
543 532 /*
544 533 * For block ciphers, plaintext must be a multiple of AES block size.
545 534 * This test is only valid for ciphers whose blocksize is a power of 2.
546 535 */
547 536 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
548 537 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
549 538 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
550 539 }
551 540
552 541 AES_ARG_INPLACE(ciphertext, plaintext);
553 542
554 543 /*
555 544 * Return length needed to store the output.
556 545 * Do not destroy context when plaintext buffer is too small.
557 546 *
558 547 * CCM: plaintext is MAC len smaller than cipher text
559 548 * GCM: plaintext is TAG len smaller than cipher text
560 549 * GMAC: plaintext length must be zero
561 550 */
562 551 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
563 552 case CCM_MODE:
564 553 length_needed = aes_ctx->ac_processed_data_len;
565 554 break;
566 555 case GCM_MODE:
567 556 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
568 557 break;
569 558 case GMAC_MODE:
570 559 if (plaintext->cd_length != 0)
571 560 return (CRYPTO_ARGUMENTS_BAD);
572 561
573 562 length_needed = 0;
574 563 break;
575 564 default:
576 565 length_needed = ciphertext->cd_length;
577 566 }
578 567
579 568 if (plaintext->cd_length < length_needed) {
580 569 plaintext->cd_length = length_needed;
581 570 return (CRYPTO_BUFFER_TOO_SMALL);
582 571 }
583 572
584 573 saved_offset = plaintext->cd_offset;
585 574 saved_length = plaintext->cd_length;
586 575
587 576 /*
588 577 * Do an update on the specified input data.
589 578 */
590 579 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
591 580 if (ret != CRYPTO_SUCCESS) {
592 581 goto cleanup;
593 582 }
594 583
595 584 if (aes_ctx->ac_flags & CCM_MODE) {
596 585 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
597 586 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
598 587
599 588 /* order of following 2 lines MUST not be reversed */
600 589 plaintext->cd_offset = plaintext->cd_length;
601 590 plaintext->cd_length = saved_length - plaintext->cd_length;
602 591
603 592 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
604 593 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
605 594 aes_xor_block);
606 595 if (ret == CRYPTO_SUCCESS) {
607 596 if (plaintext != ciphertext) {
608 597 plaintext->cd_length =
609 598 plaintext->cd_offset - saved_offset;
610 599 }
611 600 } else {
612 601 plaintext->cd_length = saved_length;
613 602 }
614 603
615 604 plaintext->cd_offset = saved_offset;
616 605 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
617 606 /* order of following 2 lines MUST not be reversed */
618 607 plaintext->cd_offset = plaintext->cd_length;
619 608 plaintext->cd_length = saved_length - plaintext->cd_length;
620 609
621 610 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
622 611 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
623 612 if (ret == CRYPTO_SUCCESS) {
624 613 if (plaintext != ciphertext) {
625 614 plaintext->cd_length =
626 615 plaintext->cd_offset - saved_offset;
627 616 }
628 617 } else {
629 618 plaintext->cd_length = saved_length;
630 619 }
631 620
632 621 plaintext->cd_offset = saved_offset;
633 622 }
634 623
635 624 ASSERT(aes_ctx->ac_remainder_len == 0);
636 625
637 626 cleanup:
638 627 (void) aes_free_context(ctx);
639 628
640 629 return (ret);
641 630 }
642 631
643 632
644 633 /* ARGSUSED */
645 634 static int
646 635 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
647 636 crypto_data_t *ciphertext, crypto_req_handle_t req)
648 637 {
649 638 off_t saved_offset;
650 639 size_t saved_length, out_len;
651 640 int ret = CRYPTO_SUCCESS;
652 641 aes_ctx_t *aes_ctx;
653 642
654 643 ASSERT(ctx->cc_provider_private != NULL);
655 644 aes_ctx = ctx->cc_provider_private;
656 645
657 646 AES_ARG_INPLACE(plaintext, ciphertext);
658 647
659 648 /* compute number of bytes that will hold the ciphertext */
660 649 out_len = aes_ctx->ac_remainder_len;
661 650 out_len += plaintext->cd_length;
662 651 out_len &= ~(AES_BLOCK_LEN - 1);
663 652
664 653 /* return length needed to store the output */
665 654 if (ciphertext->cd_length < out_len) {
666 655 ciphertext->cd_length = out_len;
667 656 return (CRYPTO_BUFFER_TOO_SMALL);
668 657 }
669 658
670 659 saved_offset = ciphertext->cd_offset;
671 660 saved_length = ciphertext->cd_length;
672 661
673 662 /*
674 663 * Do the AES update on the specified input data.
675 664 */
676 665 switch (plaintext->cd_format) {
677 666 case CRYPTO_DATA_RAW:
678 667 ret = crypto_update_iov(ctx->cc_provider_private,
679 668 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
680 669 aes_copy_block64);
681 670 break;
682 671 case CRYPTO_DATA_UIO:
683 672 ret = crypto_update_uio(ctx->cc_provider_private,
684 673 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
685 674 aes_copy_block64);
686 675 break;
687 676 case CRYPTO_DATA_MBLK:
688 677 ret = crypto_update_mp(ctx->cc_provider_private,
689 678 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
690 679 aes_copy_block64);
691 680 break;
692 681 default:
693 682 ret = CRYPTO_ARGUMENTS_BAD;
694 683 }
695 684
696 685 /*
697 686 * Since AES counter mode is a stream cipher, we call
698 687 * ctr_mode_final() to pick up any remaining bytes.
699 688 * It is an internal function that does not destroy
700 689 * the context like *normal* final routines.
701 690 */
702 691 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
703 692 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
704 693 ciphertext, aes_encrypt_block);
705 694 }
706 695
707 696 if (ret == CRYPTO_SUCCESS) {
708 697 if (plaintext != ciphertext)
709 698 ciphertext->cd_length =
710 699 ciphertext->cd_offset - saved_offset;
711 700 } else {
712 701 ciphertext->cd_length = saved_length;
713 702 }
714 703 ciphertext->cd_offset = saved_offset;
715 704
716 705 return (ret);
717 706 }
718 707
719 708
720 709 static int
721 710 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
722 711 crypto_data_t *plaintext, crypto_req_handle_t req)
723 712 {
724 713 off_t saved_offset;
725 714 size_t saved_length, out_len;
726 715 int ret = CRYPTO_SUCCESS;
727 716 aes_ctx_t *aes_ctx;
728 717
729 718 ASSERT(ctx->cc_provider_private != NULL);
730 719 aes_ctx = ctx->cc_provider_private;
731 720
732 721 AES_ARG_INPLACE(ciphertext, plaintext);
733 722
734 723 /*
735 724 * Compute number of bytes that will hold the plaintext.
736 725 * This is not necessary for CCM, GCM, and GMAC since these
737 726 * mechanisms never return plaintext for update operations.
738 727 */
739 728 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
740 729 out_len = aes_ctx->ac_remainder_len;
741 730 out_len += ciphertext->cd_length;
742 731 out_len &= ~(AES_BLOCK_LEN - 1);
743 732
744 733 /* return length needed to store the output */
745 734 if (plaintext->cd_length < out_len) {
746 735 plaintext->cd_length = out_len;
747 736 return (CRYPTO_BUFFER_TOO_SMALL);
748 737 }
749 738 }
750 739
751 740 saved_offset = plaintext->cd_offset;
752 741 saved_length = plaintext->cd_length;
753 742
754 743 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
755 744 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
756 745
757 746 /*
758 747 * Do the AES update on the specified input data.
759 748 */
760 749 switch (ciphertext->cd_format) {
761 750 case CRYPTO_DATA_RAW:
762 751 ret = crypto_update_iov(ctx->cc_provider_private,
763 752 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
764 753 aes_copy_block64);
765 754 break;
766 755 case CRYPTO_DATA_UIO:
767 756 ret = crypto_update_uio(ctx->cc_provider_private,
768 757 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
769 758 aes_copy_block64);
770 759 break;
771 760 case CRYPTO_DATA_MBLK:
772 761 ret = crypto_update_mp(ctx->cc_provider_private,
773 762 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
774 763 aes_copy_block64);
775 764 break;
776 765 default:
777 766 ret = CRYPTO_ARGUMENTS_BAD;
778 767 }
779 768
780 769 /*
781 770 * Since AES counter mode is a stream cipher, we call
782 771 * ctr_mode_final() to pick up any remaining bytes.
783 772 * It is an internal function that does not destroy
784 773 * the context like *normal* final routines.
785 774 */
786 775 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
787 776 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
788 777 aes_encrypt_block);
789 778 if (ret == CRYPTO_DATA_LEN_RANGE)
790 779 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
791 780 }
792 781
793 782 if (ret == CRYPTO_SUCCESS) {
794 783 if (ciphertext != plaintext)
795 784 plaintext->cd_length =
796 785 plaintext->cd_offset - saved_offset;
797 786 } else {
798 787 plaintext->cd_length = saved_length;
799 788 }
800 789 plaintext->cd_offset = saved_offset;
801 790
802 791
803 792 return (ret);
804 793 }
805 794
806 795 /* ARGSUSED */
807 796 static int
808 797 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
809 798 crypto_req_handle_t req)
810 799 {
811 800 aes_ctx_t *aes_ctx;
812 801 int ret;
813 802
814 803 ASSERT(ctx->cc_provider_private != NULL);
815 804 aes_ctx = ctx->cc_provider_private;
816 805
817 806 if (data->cd_format != CRYPTO_DATA_RAW &&
818 807 data->cd_format != CRYPTO_DATA_UIO &&
819 808 data->cd_format != CRYPTO_DATA_MBLK) {
820 809 return (CRYPTO_ARGUMENTS_BAD);
821 810 }
822 811
823 812 if (aes_ctx->ac_flags & CTR_MODE) {
824 813 if (aes_ctx->ac_remainder_len > 0) {
825 814 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
826 815 aes_encrypt_block);
827 816 if (ret != CRYPTO_SUCCESS)
828 817 return (ret);
829 818 }
830 819 } else if (aes_ctx->ac_flags & CCM_MODE) {
831 820 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
832 821 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
833 822 if (ret != CRYPTO_SUCCESS) {
834 823 return (ret);
835 824 }
836 825 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
837 826 size_t saved_offset = data->cd_offset;
838 827
839 828 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
840 829 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
841 830 aes_xor_block);
842 831 if (ret != CRYPTO_SUCCESS) {
843 832 return (ret);
844 833 }
845 834 data->cd_length = data->cd_offset - saved_offset;
846 835 data->cd_offset = saved_offset;
847 836 } else {
848 837 /*
849 838 * There must be no unprocessed plaintext.
850 839 * This happens if the length of the last data is
851 840 * not a multiple of the AES block length.
852 841 */
853 842 if (aes_ctx->ac_remainder_len > 0) {
854 843 return (CRYPTO_DATA_LEN_RANGE);
855 844 }
856 845 data->cd_length = 0;
857 846 }
858 847
859 848 (void) aes_free_context(ctx);
860 849
861 850 return (CRYPTO_SUCCESS);
862 851 }
863 852
864 853 /* ARGSUSED */
865 854 static int
866 855 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
867 856 crypto_req_handle_t req)
868 857 {
869 858 aes_ctx_t *aes_ctx;
870 859 int ret;
871 860 off_t saved_offset;
872 861 size_t saved_length;
873 862
874 863 ASSERT(ctx->cc_provider_private != NULL);
875 864 aes_ctx = ctx->cc_provider_private;
876 865
877 866 if (data->cd_format != CRYPTO_DATA_RAW &&
878 867 data->cd_format != CRYPTO_DATA_UIO &&
879 868 data->cd_format != CRYPTO_DATA_MBLK) {
880 869 return (CRYPTO_ARGUMENTS_BAD);
881 870 }
882 871
883 872 /*
884 873 * There must be no unprocessed ciphertext.
885 874 * This happens if the length of the last ciphertext is
886 875 * not a multiple of the AES block length.
887 876 */
888 877 if (aes_ctx->ac_remainder_len > 0) {
889 878 if ((aes_ctx->ac_flags & CTR_MODE) == 0)
890 879 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
891 880 else {
892 881 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
893 882 aes_encrypt_block);
894 883 if (ret == CRYPTO_DATA_LEN_RANGE)
895 884 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
896 885 if (ret != CRYPTO_SUCCESS)
897 886 return (ret);
898 887 }
899 888 }
900 889
901 890 if (aes_ctx->ac_flags & CCM_MODE) {
902 891 /*
903 892 * This is where all the plaintext is returned, make sure
904 893 * the plaintext buffer is big enough
905 894 */
906 895 size_t pt_len = aes_ctx->ac_data_len;
907 896 if (data->cd_length < pt_len) {
908 897 data->cd_length = pt_len;
909 898 return (CRYPTO_BUFFER_TOO_SMALL);
910 899 }
911 900
912 901 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
913 902 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
914 903 saved_offset = data->cd_offset;
915 904 saved_length = data->cd_length;
916 905 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
917 906 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
918 907 aes_xor_block);
919 908 if (ret == CRYPTO_SUCCESS) {
920 909 data->cd_length = data->cd_offset - saved_offset;
921 910 } else {
922 911 data->cd_length = saved_length;
923 912 }
924 913
925 914 data->cd_offset = saved_offset;
926 915 if (ret != CRYPTO_SUCCESS) {
927 916 return (ret);
928 917 }
929 918 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
930 919 /*
931 920 * This is where all the plaintext is returned, make sure
932 921 * the plaintext buffer is big enough
933 922 */
934 923 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
935 924 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
936 925
937 926 if (data->cd_length < pt_len) {
938 927 data->cd_length = pt_len;
939 928 return (CRYPTO_BUFFER_TOO_SMALL);
940 929 }
941 930
942 931 saved_offset = data->cd_offset;
943 932 saved_length = data->cd_length;
944 933 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
945 934 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
946 935 if (ret == CRYPTO_SUCCESS) {
947 936 data->cd_length = data->cd_offset - saved_offset;
948 937 } else {
949 938 data->cd_length = saved_length;
950 939 }
951 940
952 941 data->cd_offset = saved_offset;
953 942 if (ret != CRYPTO_SUCCESS) {
954 943 return (ret);
955 944 }
956 945 }
957 946
958 947
959 948 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
960 949 data->cd_length = 0;
961 950 }
962 951
963 952 (void) aes_free_context(ctx);
964 953
965 954 return (CRYPTO_SUCCESS);
966 955 }
967 956
968 957 /* ARGSUSED */
969 958 static int
970 959 aes_encrypt_atomic(crypto_provider_handle_t provider,
971 960 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
972 961 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
973 962 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
974 963 {
975 964 aes_ctx_t aes_ctx; /* on the stack */
976 965 off_t saved_offset;
977 966 size_t saved_length;
978 967 size_t length_needed;
979 968 int ret;
980 969
981 970 AES_ARG_INPLACE(plaintext, ciphertext);
982 971
983 972 /*
984 973 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
985 974 * be a multiple of AES block size.
986 975 */
987 976 switch (mechanism->cm_type) {
988 977 case AES_CTR_MECH_INFO_TYPE:
989 978 case AES_CCM_MECH_INFO_TYPE:
990 979 case AES_GCM_MECH_INFO_TYPE:
991 980 case AES_GMAC_MECH_INFO_TYPE:
992 981 break;
993 982 default:
994 983 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
995 984 return (CRYPTO_DATA_LEN_RANGE);
996 985 }
997 986
998 987 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
999 988 return (ret);
1000 989
1001 990 bzero(&aes_ctx, sizeof (aes_ctx_t));
1002 991
1003 992 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1004 993 crypto_kmflag(req), B_TRUE);
1005 994 if (ret != CRYPTO_SUCCESS)
1006 995 return (ret);
1007 996
1008 997 switch (mechanism->cm_type) {
1009 998 case AES_CCM_MECH_INFO_TYPE:
1010 999 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1011 1000 break;
1012 1001 case AES_GMAC_MECH_INFO_TYPE:
1013 1002 if (plaintext->cd_length != 0)
1014 1003 return (CRYPTO_ARGUMENTS_BAD);
1015 1004 /* FALLTHRU */
1016 1005 case AES_GCM_MECH_INFO_TYPE:
1017 1006 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1018 1007 break;
1019 1008 default:
1020 1009 length_needed = plaintext->cd_length;
1021 1010 }
1022 1011
1023 1012 /* return size of buffer needed to store output */
1024 1013 if (ciphertext->cd_length < length_needed) {
1025 1014 ciphertext->cd_length = length_needed;
1026 1015 ret = CRYPTO_BUFFER_TOO_SMALL;
1027 1016 goto out;
1028 1017 }
1029 1018
1030 1019 saved_offset = ciphertext->cd_offset;
1031 1020 saved_length = ciphertext->cd_length;
1032 1021
1033 1022 /*
1034 1023 * Do an update on the specified input data.
1035 1024 */
1036 1025 switch (plaintext->cd_format) {
1037 1026 case CRYPTO_DATA_RAW:
1038 1027 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1039 1028 aes_encrypt_contiguous_blocks, aes_copy_block64);
1040 1029 break;
1041 1030 case CRYPTO_DATA_UIO:
1042 1031 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1043 1032 aes_encrypt_contiguous_blocks, aes_copy_block64);
1044 1033 break;
1045 1034 case CRYPTO_DATA_MBLK:
1046 1035 ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1047 1036 aes_encrypt_contiguous_blocks, aes_copy_block64);
1048 1037 break;
1049 1038 default:
1050 1039 ret = CRYPTO_ARGUMENTS_BAD;
1051 1040 }
1052 1041
1053 1042 if (ret == CRYPTO_SUCCESS) {
1054 1043 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1055 1044 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1056 1045 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1057 1046 aes_xor_block);
1058 1047 if (ret != CRYPTO_SUCCESS)
1059 1048 goto out;
1060 1049 ASSERT(aes_ctx.ac_remainder_len == 0);
1061 1050 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1062 1051 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1063 1052 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1064 1053 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1065 1054 aes_copy_block, aes_xor_block);
1066 1055 if (ret != CRYPTO_SUCCESS)
1067 1056 goto out;
1068 1057 ASSERT(aes_ctx.ac_remainder_len == 0);
1069 1058 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1070 1059 if (aes_ctx.ac_remainder_len > 0) {
1071 1060 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1072 1061 ciphertext, aes_encrypt_block);
1073 1062 if (ret != CRYPTO_SUCCESS)
1074 1063 goto out;
1075 1064 }
1076 1065 } else {
1077 1066 ASSERT(aes_ctx.ac_remainder_len == 0);
1078 1067 }
1079 1068
1080 1069 if (plaintext != ciphertext) {
1081 1070 ciphertext->cd_length =
1082 1071 ciphertext->cd_offset - saved_offset;
1083 1072 }
1084 1073 } else {
1085 1074 ciphertext->cd_length = saved_length;
1086 1075 }
1087 1076 ciphertext->cd_offset = saved_offset;
1088 1077
1089 1078 out:
1090 1079 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1091 1080 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1092 1081 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1093 1082 }
1094 1083
1095 1084 return (ret);
1096 1085 }
1097 1086
1098 1087 /* ARGSUSED */
1099 1088 static int
1100 1089 aes_decrypt_atomic(crypto_provider_handle_t provider,
1101 1090 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1102 1091 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1103 1092 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1104 1093 {
1105 1094 aes_ctx_t aes_ctx; /* on the stack */
1106 1095 off_t saved_offset;
1107 1096 size_t saved_length;
1108 1097 size_t length_needed;
1109 1098 int ret;
1110 1099
1111 1100 AES_ARG_INPLACE(ciphertext, plaintext);
1112 1101
1113 1102 /*
1114 1103 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1115 1104 * be a multiple of AES block size.
1116 1105 */
1117 1106 switch (mechanism->cm_type) {
1118 1107 case AES_CTR_MECH_INFO_TYPE:
1119 1108 case AES_CCM_MECH_INFO_TYPE:
1120 1109 case AES_GCM_MECH_INFO_TYPE:
1121 1110 case AES_GMAC_MECH_INFO_TYPE:
1122 1111 break;
1123 1112 default:
1124 1113 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1125 1114 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1126 1115 }
1127 1116
1128 1117 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1129 1118 return (ret);
1130 1119
1131 1120 bzero(&aes_ctx, sizeof (aes_ctx_t));
1132 1121
1133 1122 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1134 1123 crypto_kmflag(req), B_FALSE);
1135 1124 if (ret != CRYPTO_SUCCESS)
1136 1125 return (ret);
1137 1126
1138 1127 switch (mechanism->cm_type) {
1139 1128 case AES_CCM_MECH_INFO_TYPE:
1140 1129 length_needed = aes_ctx.ac_data_len;
1141 1130 break;
1142 1131 case AES_GCM_MECH_INFO_TYPE:
1143 1132 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1144 1133 break;
1145 1134 case AES_GMAC_MECH_INFO_TYPE:
1146 1135 if (plaintext->cd_length != 0)
1147 1136 return (CRYPTO_ARGUMENTS_BAD);
1148 1137 length_needed = 0;
1149 1138 break;
1150 1139 default:
1151 1140 length_needed = ciphertext->cd_length;
1152 1141 }
1153 1142
1154 1143 /* return size of buffer needed to store output */
1155 1144 if (plaintext->cd_length < length_needed) {
1156 1145 plaintext->cd_length = length_needed;
1157 1146 ret = CRYPTO_BUFFER_TOO_SMALL;
1158 1147 goto out;
1159 1148 }
1160 1149
1161 1150 saved_offset = plaintext->cd_offset;
1162 1151 saved_length = plaintext->cd_length;
1163 1152
1164 1153 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1165 1154 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1166 1155 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1167 1156
1168 1157 /*
1169 1158 * Do an update on the specified input data.
1170 1159 */
1171 1160 switch (ciphertext->cd_format) {
1172 1161 case CRYPTO_DATA_RAW:
1173 1162 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1174 1163 aes_decrypt_contiguous_blocks, aes_copy_block64);
1175 1164 break;
1176 1165 case CRYPTO_DATA_UIO:
1177 1166 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1178 1167 aes_decrypt_contiguous_blocks, aes_copy_block64);
1179 1168 break;
1180 1169 case CRYPTO_DATA_MBLK:
1181 1170 ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1182 1171 aes_decrypt_contiguous_blocks, aes_copy_block64);
1183 1172 break;
1184 1173 default:
1185 1174 ret = CRYPTO_ARGUMENTS_BAD;
1186 1175 }
1187 1176
1188 1177 if (ret == CRYPTO_SUCCESS) {
1189 1178 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1190 1179 ASSERT(aes_ctx.ac_processed_data_len
1191 1180 == aes_ctx.ac_data_len);
1192 1181 ASSERT(aes_ctx.ac_processed_mac_len
1193 1182 == aes_ctx.ac_mac_len);
1194 1183 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1195 1184 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1196 1185 aes_copy_block, aes_xor_block);
1197 1186 ASSERT(aes_ctx.ac_remainder_len == 0);
1198 1187 if ((ret == CRYPTO_SUCCESS) &&
1199 1188 (ciphertext != plaintext)) {
1200 1189 plaintext->cd_length =
1201 1190 plaintext->cd_offset - saved_offset;
1202 1191 } else {
1203 1192 plaintext->cd_length = saved_length;
1204 1193 }
1205 1194 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1206 1195 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1207 1196 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1208 1197 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1209 1198 aes_xor_block);
1210 1199 ASSERT(aes_ctx.ac_remainder_len == 0);
1211 1200 if ((ret == CRYPTO_SUCCESS) &&
1212 1201 (ciphertext != plaintext)) {
1213 1202 plaintext->cd_length =
1214 1203 plaintext->cd_offset - saved_offset;
1215 1204 } else {
1216 1205 plaintext->cd_length = saved_length;
1217 1206 }
1218 1207 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1219 1208 ASSERT(aes_ctx.ac_remainder_len == 0);
1220 1209 if (ciphertext != plaintext)
1221 1210 plaintext->cd_length =
1222 1211 plaintext->cd_offset - saved_offset;
1223 1212 } else {
1224 1213 if (aes_ctx.ac_remainder_len > 0) {
1225 1214 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1226 1215 plaintext, aes_encrypt_block);
1227 1216 if (ret == CRYPTO_DATA_LEN_RANGE)
1228 1217 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1229 1218 if (ret != CRYPTO_SUCCESS)
1230 1219 goto out;
1231 1220 }
1232 1221 if (ciphertext != plaintext)
1233 1222 plaintext->cd_length =
1234 1223 plaintext->cd_offset - saved_offset;
1235 1224 }
1236 1225 } else {
1237 1226 plaintext->cd_length = saved_length;
1238 1227 }
1239 1228 plaintext->cd_offset = saved_offset;
1240 1229
1241 1230 out:
1242 1231 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1243 1232 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1244 1233 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1245 1234 }
1246 1235
1247 1236 if (aes_ctx.ac_flags & CCM_MODE) {
1248 1237 if (aes_ctx.ac_pt_buf != NULL) {
1249 1238 kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1250 1239 }
1251 1240 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1252 1241 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1253 1242 kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1254 1243 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1255 1244 }
1256 1245 }
1257 1246
1258 1247 return (ret);
1259 1248 }
1260 1249
1261 1250 /*
1262 1251 * KCF software provider context template entry points.
1263 1252 */
1264 1253 /* ARGSUSED */
1265 1254 static int
1266 1255 aes_create_ctx_template(crypto_provider_handle_t provider,
1267 1256 crypto_mechanism_t *mechanism, crypto_key_t *key,
1268 1257 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1269 1258 {
1270 1259 void *keysched;
1271 1260 size_t size;
1272 1261 int rv;
1273 1262
1274 1263 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1275 1264 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1276 1265 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1277 1266 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1278 1267 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1279 1268 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1280 1269 return (CRYPTO_MECHANISM_INVALID);
1281 1270
1282 1271 if ((keysched = aes_alloc_keysched(&size,
1283 1272 crypto_kmflag(req))) == NULL) {
1284 1273 return (CRYPTO_HOST_MEMORY);
1285 1274 }
1286 1275
1287 1276 /*
1288 1277 * Initialize key schedule. Key length information is stored
1289 1278 * in the key.
1290 1279 */
1291 1280 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1292 1281 bzero(keysched, size);
1293 1282 kmem_free(keysched, size);
1294 1283 return (rv);
1295 1284 }
1296 1285
1297 1286 *tmpl = keysched;
1298 1287 *tmpl_size = size;
1299 1288
1300 1289 return (CRYPTO_SUCCESS);
1301 1290 }
1302 1291
1303 1292
1304 1293 static int
1305 1294 aes_free_context(crypto_ctx_t *ctx)
1306 1295 {
1307 1296 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1308 1297
1309 1298 if (aes_ctx != NULL) {
1310 1299 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1311 1300 ASSERT(aes_ctx->ac_keysched_len != 0);
1312 1301 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1313 1302 kmem_free(aes_ctx->ac_keysched,
1314 1303 aes_ctx->ac_keysched_len);
1315 1304 }
1316 1305 crypto_free_mode_ctx(aes_ctx);
1317 1306 ctx->cc_provider_private = NULL;
1318 1307 }
1319 1308
1320 1309 return (CRYPTO_SUCCESS);
1321 1310 }
1322 1311
1323 1312
1324 1313 static int
1325 1314 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1326 1315 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1327 1316 boolean_t is_encrypt_init)
1328 1317 {
1329 1318 int rv = CRYPTO_SUCCESS;
1330 1319 void *keysched;
1331 1320 size_t size;
1332 1321
1333 1322 if (template == NULL) {
1334 1323 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1335 1324 return (CRYPTO_HOST_MEMORY);
1336 1325 /*
1337 1326 * Initialize key schedule.
1338 1327 * Key length is stored in the key.
1339 1328 */
1340 1329 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1341 1330 kmem_free(keysched, size);
1342 1331 return (rv);
1343 1332 }
1344 1333
1345 1334 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1346 1335 aes_ctx->ac_keysched_len = size;
1347 1336 } else {
1348 1337 keysched = template;
1349 1338 }
1350 1339 aes_ctx->ac_keysched = keysched;
1351 1340
1352 1341 switch (mechanism->cm_type) {
1353 1342 case AES_CBC_MECH_INFO_TYPE:
1354 1343 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1355 1344 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1356 1345 break;
1357 1346 case AES_CTR_MECH_INFO_TYPE: {
1358 1347 CK_AES_CTR_PARAMS *pp;
1359 1348
1360 1349 if (mechanism->cm_param == NULL ||
1361 1350 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1362 1351 return (CRYPTO_MECHANISM_PARAM_INVALID);
1363 1352 }
1364 1353 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1365 1354 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1366 1355 pp->cb, aes_copy_block);
1367 1356 break;
1368 1357 }
1369 1358 case AES_CCM_MECH_INFO_TYPE:
1370 1359 if (mechanism->cm_param == NULL ||
1371 1360 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1372 1361 return (CRYPTO_MECHANISM_PARAM_INVALID);
1373 1362 }
1374 1363 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1375 1364 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1376 1365 aes_xor_block);
1377 1366 break;
1378 1367 case AES_GCM_MECH_INFO_TYPE:
1379 1368 if (mechanism->cm_param == NULL ||
1380 1369 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1381 1370 return (CRYPTO_MECHANISM_PARAM_INVALID);
1382 1371 }
1383 1372 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1384 1373 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1385 1374 aes_xor_block);
1386 1375 break;
1387 1376 case AES_GMAC_MECH_INFO_TYPE:
1388 1377 if (mechanism->cm_param == NULL ||
1389 1378 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1390 1379 return (CRYPTO_MECHANISM_PARAM_INVALID);
1391 1380 }
1392 1381 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1393 1382 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1394 1383 aes_xor_block);
1395 1384 break;
1396 1385 case AES_ECB_MECH_INFO_TYPE:
1397 1386 aes_ctx->ac_flags |= ECB_MODE;
1398 1387 }
1399 1388
1400 1389 if (rv != CRYPTO_SUCCESS) {
1401 1390 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1402 1391 bzero(keysched, size);
1403 1392 kmem_free(keysched, size);
1404 1393 }
1405 1394 }
1406 1395
1407 1396 return (rv);
1408 1397 }
1409 1398
1410 1399 static int
1411 1400 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1412 1401 CK_AES_GCM_PARAMS *gcm_params)
1413 1402 {
1414 1403 /* LINTED: pointer alignment */
1415 1404 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1416 1405
1417 1406 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1418 1407 return (CRYPTO_MECHANISM_INVALID);
1419 1408
1420 1409 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1421 1410 return (CRYPTO_MECHANISM_PARAM_INVALID);
1422 1411
1423 1412 if (params->pIv == NULL)
1424 1413 return (CRYPTO_MECHANISM_PARAM_INVALID);
1425 1414
1426 1415 gcm_params->pIv = params->pIv;
1427 1416 gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1428 1417 gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1429 1418
1430 1419 if (data == NULL)
1431 1420 return (CRYPTO_SUCCESS);
1432 1421
1433 1422 if (data->cd_format != CRYPTO_DATA_RAW)
1434 1423 return (CRYPTO_ARGUMENTS_BAD);
1435 1424
1436 1425 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1437 1426 gcm_params->ulAADLen = data->cd_length;
1438 1427 return (CRYPTO_SUCCESS);
1439 1428 }
1440 1429
1441 1430 static int
1442 1431 aes_mac_atomic(crypto_provider_handle_t provider,
1443 1432 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1444 1433 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1445 1434 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1446 1435 {
1447 1436 CK_AES_GCM_PARAMS gcm_params;
1448 1437 crypto_mechanism_t gcm_mech;
1449 1438 int rv;
1450 1439
1451 1440 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1452 1441 != CRYPTO_SUCCESS)
1453 1442 return (rv);
1454 1443
1455 1444 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1456 1445 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1457 1446 gcm_mech.cm_param = (char *)&gcm_params;
1458 1447
1459 1448 return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1460 1449 key, &null_crypto_data, mac, template, req));
1461 1450 }
1462 1451
1463 1452 static int
1464 1453 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1465 1454 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1466 1455 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1467 1456 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1468 1457 {
1469 1458 CK_AES_GCM_PARAMS gcm_params;
1470 1459 crypto_mechanism_t gcm_mech;
1471 1460 int rv;
1472 1461
1473 1462 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1474 1463 != CRYPTO_SUCCESS)
1475 1464 return (rv);
1476 1465
1477 1466 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1478 1467 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1479 1468 gcm_mech.cm_param = (char *)&gcm_params;
1480 1469
1481 1470 return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1482 1471 key, mac, &null_crypto_data, template, req));
1483 1472 }
↓ open down ↓ |
1403 lines elided |
↑ open up ↑ |
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX