289 break;
290 case AES_GMAC_MECH_INFO_TYPE:
291 param_len = sizeof (CK_AES_GMAC_PARAMS);
292 alloc_fun = gmac_alloc_ctx;
293 break;
294 default:
295 rv = CRYPTO_MECHANISM_INVALID;
296 return (rv);
297 }
298 if (param_required && mechanism->cm_param != NULL &&
299 mechanism->cm_param_len != param_len) {
300 rv = CRYPTO_MECHANISM_PARAM_INVALID;
301 }
302 if (ctx != NULL) {
303 p = (alloc_fun)(kmflag);
304 *ctx = p;
305 }
306 return (rv);
307 }
308
309 /* EXPORT DELETE START */
310
311 /*
312 * Initialize key schedules for AES
313 */
314 static int
315 init_keysched(crypto_key_t *key, void *newbie)
316 {
317 /*
318 * Only keys by value are supported by this module.
319 */
320 switch (key->ck_format) {
321 case CRYPTO_KEY_RAW:
322 if (key->ck_length < AES_MINBITS ||
323 key->ck_length > AES_MAXBITS) {
324 return (CRYPTO_KEY_SIZE_RANGE);
325 }
326
327 /* key length must be either 128, 192, or 256 */
328 if ((key->ck_length & 63) != 0)
329 return (CRYPTO_KEY_SIZE_RANGE);
330 break;
331 default:
332 return (CRYPTO_KEY_TYPE_INCONSISTENT);
333 }
334
335 aes_init_keysched(key->ck_data, key->ck_length, newbie);
336 return (CRYPTO_SUCCESS);
337 }
338
339 /* EXPORT DELETE END */
340
341 /*
342 * KCF software provider control entry points.
343 */
344 /* ARGSUSED */
345 static void
346 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
347 {
348 *status = CRYPTO_PROVIDER_READY;
349 }
350
351 static int
352 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
353 crypto_key_t *key, crypto_spi_ctx_template_t template,
354 crypto_req_handle_t req) {
355 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
356 }
357
358 static int
359 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
360 crypto_key_t *key, crypto_spi_ctx_template_t template,
361 crypto_req_handle_t req) {
362 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
363 }
364
365
366
367 /*
368 * KCF software provider encrypt entry points.
369 */
370 static int
371 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
372 crypto_key_t *key, crypto_spi_ctx_template_t template,
373 crypto_req_handle_t req, boolean_t is_encrypt_init)
374 {
375
376 /* EXPORT DELETE START */
377
378 aes_ctx_t *aes_ctx;
379 int rv;
380 int kmflag;
381
382 /*
383 * Only keys by value are supported by this module.
384 */
385 if (key->ck_format != CRYPTO_KEY_RAW) {
386 return (CRYPTO_KEY_TYPE_INCONSISTENT);
387 }
388
389 kmflag = crypto_kmflag(req);
390 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
391 != CRYPTO_SUCCESS)
392 return (rv);
393
394 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
395 is_encrypt_init);
396 if (rv != CRYPTO_SUCCESS) {
397 crypto_free_mode_ctx(aes_ctx);
398 return (rv);
399 }
400
401 ctx->cc_provider_private = aes_ctx;
402
403 /* EXPORT DELETE END */
404
405 return (CRYPTO_SUCCESS);
406 }
407
408 static void
409 aes_copy_block64(uint8_t *in, uint64_t *out)
410 {
411 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
412 /* LINTED: pointer alignment */
413 out[0] = *(uint64_t *)&in[0];
414 /* LINTED: pointer alignment */
415 out[1] = *(uint64_t *)&in[8];
416 } else {
417 uint8_t *iv8 = (uint8_t *)&out[0];
418
419 AES_COPY_BLOCK(in, iv8);
420 }
421 }
422
423
424 static int
425 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
426 crypto_data_t *ciphertext, crypto_req_handle_t req)
427 {
428 int ret = CRYPTO_FAILED;
429
430 /* EXPORT DELETE START */
431
432 aes_ctx_t *aes_ctx;
433 size_t saved_length, saved_offset, length_needed;
434
435 ASSERT(ctx->cc_provider_private != NULL);
436 aes_ctx = ctx->cc_provider_private;
437
438 /*
439 * For block ciphers, plaintext must be a multiple of AES block size.
440 * This test is only valid for ciphers whose blocksize is a power of 2.
441 */
442 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
443 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
444 return (CRYPTO_DATA_LEN_RANGE);
445
446 AES_ARG_INPLACE(plaintext, ciphertext);
447
448 /*
449 * We need to just return the length needed to store the output.
450 * We should not destroy the context for the following case.
451 */
517 /* order of following 2 lines MUST not be reversed */
518 ciphertext->cd_offset = ciphertext->cd_length;
519 ciphertext->cd_length = saved_length - ciphertext->cd_length;
520 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
521 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
522 aes_xor_block);
523 if (ret != CRYPTO_SUCCESS) {
524 return (ret);
525 }
526
527 if (plaintext != ciphertext) {
528 ciphertext->cd_length =
529 ciphertext->cd_offset - saved_offset;
530 }
531 ciphertext->cd_offset = saved_offset;
532 }
533
534 ASSERT(aes_ctx->ac_remainder_len == 0);
535 (void) aes_free_context(ctx);
536
537 /* EXPORT DELETE END */
538
539 return (ret);
540 }
541
542
543 static int
544 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
545 crypto_data_t *plaintext, crypto_req_handle_t req)
546 {
547 int ret = CRYPTO_FAILED;
548
549 /* EXPORT DELETE START */
550
551 aes_ctx_t *aes_ctx;
552 off_t saved_offset;
553 size_t saved_length, length_needed;
554
555 ASSERT(ctx->cc_provider_private != NULL);
556 aes_ctx = ctx->cc_provider_private;
557
558 /*
559 * For block ciphers, plaintext must be a multiple of AES block size.
560 * This test is only valid for ciphers whose blocksize is a power of 2.
561 */
562 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
563 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
564 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
565 }
566
567 AES_ARG_INPLACE(ciphertext, plaintext);
568
569 /*
570 * Return length needed to store the output.
635
636 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
637 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
638 if (ret == CRYPTO_SUCCESS) {
639 if (plaintext != ciphertext) {
640 plaintext->cd_length =
641 plaintext->cd_offset - saved_offset;
642 }
643 } else {
644 plaintext->cd_length = saved_length;
645 }
646
647 plaintext->cd_offset = saved_offset;
648 }
649
650 ASSERT(aes_ctx->ac_remainder_len == 0);
651
652 cleanup:
653 (void) aes_free_context(ctx);
654
655 /* EXPORT DELETE END */
656
657 return (ret);
658 }
659
660
661 /* ARGSUSED */
662 static int
663 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
664 crypto_data_t *ciphertext, crypto_req_handle_t req)
665 {
666 off_t saved_offset;
667 size_t saved_length, out_len;
668 int ret = CRYPTO_SUCCESS;
669 aes_ctx_t *aes_ctx;
670
671 ASSERT(ctx->cc_provider_private != NULL);
672 aes_ctx = ctx->cc_provider_private;
673
674 AES_ARG_INPLACE(plaintext, ciphertext);
675
676 /* compute number of bytes that will hold the ciphertext */
808 }
809
810 if (ret == CRYPTO_SUCCESS) {
811 if (ciphertext != plaintext)
812 plaintext->cd_length =
813 plaintext->cd_offset - saved_offset;
814 } else {
815 plaintext->cd_length = saved_length;
816 }
817 plaintext->cd_offset = saved_offset;
818
819
820 return (ret);
821 }
822
823 /* ARGSUSED */
824 static int
825 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
826 crypto_req_handle_t req)
827 {
828
829 /* EXPORT DELETE START */
830
831 aes_ctx_t *aes_ctx;
832 int ret;
833
834 ASSERT(ctx->cc_provider_private != NULL);
835 aes_ctx = ctx->cc_provider_private;
836
837 if (data->cd_format != CRYPTO_DATA_RAW &&
838 data->cd_format != CRYPTO_DATA_UIO &&
839 data->cd_format != CRYPTO_DATA_MBLK) {
840 return (CRYPTO_ARGUMENTS_BAD);
841 }
842
843 if (aes_ctx->ac_flags & CTR_MODE) {
844 if (aes_ctx->ac_remainder_len > 0) {
845 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
846 aes_encrypt_block);
847 if (ret != CRYPTO_SUCCESS)
848 return (ret);
849 }
850 } else if (aes_ctx->ac_flags & CCM_MODE) {
861 aes_xor_block);
862 if (ret != CRYPTO_SUCCESS) {
863 return (ret);
864 }
865 data->cd_length = data->cd_offset - saved_offset;
866 data->cd_offset = saved_offset;
867 } else {
868 /*
869 * There must be no unprocessed plaintext.
870 * This happens if the length of the last data is
871 * not a multiple of the AES block length.
872 */
873 if (aes_ctx->ac_remainder_len > 0) {
874 return (CRYPTO_DATA_LEN_RANGE);
875 }
876 data->cd_length = 0;
877 }
878
879 (void) aes_free_context(ctx);
880
881 /* EXPORT DELETE END */
882
883 return (CRYPTO_SUCCESS);
884 }
885
886 /* ARGSUSED */
887 static int
888 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
889 crypto_req_handle_t req)
890 {
891
892 /* EXPORT DELETE START */
893
894 aes_ctx_t *aes_ctx;
895 int ret;
896 off_t saved_offset;
897 size_t saved_length;
898
899 ASSERT(ctx->cc_provider_private != NULL);
900 aes_ctx = ctx->cc_provider_private;
901
902 if (data->cd_format != CRYPTO_DATA_RAW &&
903 data->cd_format != CRYPTO_DATA_UIO &&
904 data->cd_format != CRYPTO_DATA_MBLK) {
905 return (CRYPTO_ARGUMENTS_BAD);
906 }
907
908 /*
909 * There must be no unprocessed ciphertext.
910 * This happens if the length of the last ciphertext is
911 * not a multiple of the AES block length.
912 */
913 if (aes_ctx->ac_remainder_len > 0) {
970 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
971 if (ret == CRYPTO_SUCCESS) {
972 data->cd_length = data->cd_offset - saved_offset;
973 } else {
974 data->cd_length = saved_length;
975 }
976
977 data->cd_offset = saved_offset;
978 if (ret != CRYPTO_SUCCESS) {
979 return (ret);
980 }
981 }
982
983
984 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
985 data->cd_length = 0;
986 }
987
988 (void) aes_free_context(ctx);
989
990 /* EXPORT DELETE END */
991
992 return (CRYPTO_SUCCESS);
993 }
994
995 /* ARGSUSED */
996 static int
997 aes_encrypt_atomic(crypto_provider_handle_t provider,
998 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
999 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1000 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1001 {
1002 aes_ctx_t aes_ctx; /* on the stack */
1003 off_t saved_offset;
1004 size_t saved_length;
1005 size_t length_needed;
1006 int ret;
1007
1008 AES_ARG_INPLACE(plaintext, ciphertext);
1009
1010 /*
1011 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
1277 }
1278 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1279 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1280 kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1281 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1282 }
1283 }
1284
1285 return (ret);
1286 }
1287
1288 /*
1289 * KCF software provider context template entry points.
1290 */
1291 /* ARGSUSED */
1292 static int
1293 aes_create_ctx_template(crypto_provider_handle_t provider,
1294 crypto_mechanism_t *mechanism, crypto_key_t *key,
1295 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1296 {
1297
1298 /* EXPORT DELETE START */
1299
1300 void *keysched;
1301 size_t size;
1302 int rv;
1303
1304 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1305 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1306 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1307 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1308 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1309 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1310 return (CRYPTO_MECHANISM_INVALID);
1311
1312 if ((keysched = aes_alloc_keysched(&size,
1313 crypto_kmflag(req))) == NULL) {
1314 return (CRYPTO_HOST_MEMORY);
1315 }
1316
1317 /*
1318 * Initialize key schedule. Key length information is stored
1319 * in the key.
1320 */
1321 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1322 bzero(keysched, size);
1323 kmem_free(keysched, size);
1324 return (rv);
1325 }
1326
1327 *tmpl = keysched;
1328 *tmpl_size = size;
1329
1330 /* EXPORT DELETE END */
1331
1332 return (CRYPTO_SUCCESS);
1333 }
1334
1335
1336 static int
1337 aes_free_context(crypto_ctx_t *ctx)
1338 {
1339
1340 /* EXPORT DELETE START */
1341
1342 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1343
1344 if (aes_ctx != NULL) {
1345 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1346 ASSERT(aes_ctx->ac_keysched_len != 0);
1347 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1348 kmem_free(aes_ctx->ac_keysched,
1349 aes_ctx->ac_keysched_len);
1350 }
1351 crypto_free_mode_ctx(aes_ctx);
1352 ctx->cc_provider_private = NULL;
1353 }
1354
1355 /* EXPORT DELETE END */
1356
1357 return (CRYPTO_SUCCESS);
1358 }
1359
1360
1361 static int
1362 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1363 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1364 boolean_t is_encrypt_init)
1365 {
1366 int rv = CRYPTO_SUCCESS;
1367
1368 /* EXPORT DELETE START */
1369
1370 void *keysched;
1371 size_t size;
1372
1373 if (template == NULL) {
1374 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1375 return (CRYPTO_HOST_MEMORY);
1376 /*
1377 * Initialize key schedule.
1378 * Key length is stored in the key.
1379 */
1380 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1381 kmem_free(keysched, size);
1382 return (rv);
1383 }
1384
1385 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1386 aes_ctx->ac_keysched_len = size;
1387 } else {
1388 keysched = template;
1389 }
1427 case AES_GMAC_MECH_INFO_TYPE:
1428 if (mechanism->cm_param == NULL ||
1429 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1430 return (CRYPTO_MECHANISM_PARAM_INVALID);
1431 }
1432 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1433 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1434 aes_xor_block);
1435 break;
1436 case AES_ECB_MECH_INFO_TYPE:
1437 aes_ctx->ac_flags |= ECB_MODE;
1438 }
1439
1440 if (rv != CRYPTO_SUCCESS) {
1441 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1442 bzero(keysched, size);
1443 kmem_free(keysched, size);
1444 }
1445 }
1446
1447 /* EXPORT DELETE END */
1448
1449 return (rv);
1450 }
1451
1452 static int
1453 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1454 CK_AES_GCM_PARAMS *gcm_params)
1455 {
1456 /* LINTED: pointer alignment */
1457 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1458
1459 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1460 return (CRYPTO_MECHANISM_INVALID);
1461
1462 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1463 return (CRYPTO_MECHANISM_PARAM_INVALID);
1464
1465 if (params->pIv == NULL)
1466 return (CRYPTO_MECHANISM_PARAM_INVALID);
1467
1468 gcm_params->pIv = params->pIv;
|
289 break;
290 case AES_GMAC_MECH_INFO_TYPE:
291 param_len = sizeof (CK_AES_GMAC_PARAMS);
292 alloc_fun = gmac_alloc_ctx;
293 break;
294 default:
295 rv = CRYPTO_MECHANISM_INVALID;
296 return (rv);
297 }
298 if (param_required && mechanism->cm_param != NULL &&
299 mechanism->cm_param_len != param_len) {
300 rv = CRYPTO_MECHANISM_PARAM_INVALID;
301 }
302 if (ctx != NULL) {
303 p = (alloc_fun)(kmflag);
304 *ctx = p;
305 }
306 return (rv);
307 }
308
309 /*
310 * Initialize key schedules for AES
311 */
312 static int
313 init_keysched(crypto_key_t *key, void *newbie)
314 {
315 /*
316 * Only keys by value are supported by this module.
317 */
318 switch (key->ck_format) {
319 case CRYPTO_KEY_RAW:
320 if (key->ck_length < AES_MINBITS ||
321 key->ck_length > AES_MAXBITS) {
322 return (CRYPTO_KEY_SIZE_RANGE);
323 }
324
325 /* key length must be either 128, 192, or 256 */
326 if ((key->ck_length & 63) != 0)
327 return (CRYPTO_KEY_SIZE_RANGE);
328 break;
329 default:
330 return (CRYPTO_KEY_TYPE_INCONSISTENT);
331 }
332
333 aes_init_keysched(key->ck_data, key->ck_length, newbie);
334 return (CRYPTO_SUCCESS);
335 }
336
337 /*
338 * KCF software provider control entry points.
339 */
340 /* ARGSUSED */
341 static void
342 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
343 {
344 *status = CRYPTO_PROVIDER_READY;
345 }
346
347 static int
348 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
349 crypto_key_t *key, crypto_spi_ctx_template_t template,
350 crypto_req_handle_t req) {
351 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
352 }
353
354 static int
355 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
356 crypto_key_t *key, crypto_spi_ctx_template_t template,
357 crypto_req_handle_t req) {
358 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
359 }
360
361
362
363 /*
364 * KCF software provider encrypt entry points.
365 */
366 static int
367 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
368 crypto_key_t *key, crypto_spi_ctx_template_t template,
369 crypto_req_handle_t req, boolean_t is_encrypt_init)
370 {
371 aes_ctx_t *aes_ctx;
372 int rv;
373 int kmflag;
374
375 /*
376 * Only keys by value are supported by this module.
377 */
378 if (key->ck_format != CRYPTO_KEY_RAW) {
379 return (CRYPTO_KEY_TYPE_INCONSISTENT);
380 }
381
382 kmflag = crypto_kmflag(req);
383 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
384 != CRYPTO_SUCCESS)
385 return (rv);
386
387 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
388 is_encrypt_init);
389 if (rv != CRYPTO_SUCCESS) {
390 crypto_free_mode_ctx(aes_ctx);
391 return (rv);
392 }
393
394 ctx->cc_provider_private = aes_ctx;
395
396 return (CRYPTO_SUCCESS);
397 }
398
399 static void
400 aes_copy_block64(uint8_t *in, uint64_t *out)
401 {
402 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
403 /* LINTED: pointer alignment */
404 out[0] = *(uint64_t *)&in[0];
405 /* LINTED: pointer alignment */
406 out[1] = *(uint64_t *)&in[8];
407 } else {
408 uint8_t *iv8 = (uint8_t *)&out[0];
409
410 AES_COPY_BLOCK(in, iv8);
411 }
412 }
413
414
415 static int
416 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
417 crypto_data_t *ciphertext, crypto_req_handle_t req)
418 {
419 int ret = CRYPTO_FAILED;
420
421 aes_ctx_t *aes_ctx;
422 size_t saved_length, saved_offset, length_needed;
423
424 ASSERT(ctx->cc_provider_private != NULL);
425 aes_ctx = ctx->cc_provider_private;
426
427 /*
428 * For block ciphers, plaintext must be a multiple of AES block size.
429 * This test is only valid for ciphers whose blocksize is a power of 2.
430 */
431 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
432 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
433 return (CRYPTO_DATA_LEN_RANGE);
434
435 AES_ARG_INPLACE(plaintext, ciphertext);
436
437 /*
438 * We need to just return the length needed to store the output.
439 * We should not destroy the context for the following case.
440 */
506 /* order of following 2 lines MUST not be reversed */
507 ciphertext->cd_offset = ciphertext->cd_length;
508 ciphertext->cd_length = saved_length - ciphertext->cd_length;
509 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
510 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
511 aes_xor_block);
512 if (ret != CRYPTO_SUCCESS) {
513 return (ret);
514 }
515
516 if (plaintext != ciphertext) {
517 ciphertext->cd_length =
518 ciphertext->cd_offset - saved_offset;
519 }
520 ciphertext->cd_offset = saved_offset;
521 }
522
523 ASSERT(aes_ctx->ac_remainder_len == 0);
524 (void) aes_free_context(ctx);
525
526 return (ret);
527 }
528
529
530 static int
531 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
532 crypto_data_t *plaintext, crypto_req_handle_t req)
533 {
534 int ret = CRYPTO_FAILED;
535
536 aes_ctx_t *aes_ctx;
537 off_t saved_offset;
538 size_t saved_length, length_needed;
539
540 ASSERT(ctx->cc_provider_private != NULL);
541 aes_ctx = ctx->cc_provider_private;
542
543 /*
544 * For block ciphers, plaintext must be a multiple of AES block size.
545 * This test is only valid for ciphers whose blocksize is a power of 2.
546 */
547 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
548 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
549 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
550 }
551
552 AES_ARG_INPLACE(ciphertext, plaintext);
553
554 /*
555 * Return length needed to store the output.
620
621 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
622 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
623 if (ret == CRYPTO_SUCCESS) {
624 if (plaintext != ciphertext) {
625 plaintext->cd_length =
626 plaintext->cd_offset - saved_offset;
627 }
628 } else {
629 plaintext->cd_length = saved_length;
630 }
631
632 plaintext->cd_offset = saved_offset;
633 }
634
635 ASSERT(aes_ctx->ac_remainder_len == 0);
636
637 cleanup:
638 (void) aes_free_context(ctx);
639
640 return (ret);
641 }
642
643
644 /* ARGSUSED */
645 static int
646 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
647 crypto_data_t *ciphertext, crypto_req_handle_t req)
648 {
649 off_t saved_offset;
650 size_t saved_length, out_len;
651 int ret = CRYPTO_SUCCESS;
652 aes_ctx_t *aes_ctx;
653
654 ASSERT(ctx->cc_provider_private != NULL);
655 aes_ctx = ctx->cc_provider_private;
656
657 AES_ARG_INPLACE(plaintext, ciphertext);
658
659 /* compute number of bytes that will hold the ciphertext */
791 }
792
793 if (ret == CRYPTO_SUCCESS) {
794 if (ciphertext != plaintext)
795 plaintext->cd_length =
796 plaintext->cd_offset - saved_offset;
797 } else {
798 plaintext->cd_length = saved_length;
799 }
800 plaintext->cd_offset = saved_offset;
801
802
803 return (ret);
804 }
805
806 /* ARGSUSED */
807 static int
808 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
809 crypto_req_handle_t req)
810 {
811 aes_ctx_t *aes_ctx;
812 int ret;
813
814 ASSERT(ctx->cc_provider_private != NULL);
815 aes_ctx = ctx->cc_provider_private;
816
817 if (data->cd_format != CRYPTO_DATA_RAW &&
818 data->cd_format != CRYPTO_DATA_UIO &&
819 data->cd_format != CRYPTO_DATA_MBLK) {
820 return (CRYPTO_ARGUMENTS_BAD);
821 }
822
823 if (aes_ctx->ac_flags & CTR_MODE) {
824 if (aes_ctx->ac_remainder_len > 0) {
825 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
826 aes_encrypt_block);
827 if (ret != CRYPTO_SUCCESS)
828 return (ret);
829 }
830 } else if (aes_ctx->ac_flags & CCM_MODE) {
841 aes_xor_block);
842 if (ret != CRYPTO_SUCCESS) {
843 return (ret);
844 }
845 data->cd_length = data->cd_offset - saved_offset;
846 data->cd_offset = saved_offset;
847 } else {
848 /*
849 * There must be no unprocessed plaintext.
850 * This happens if the length of the last data is
851 * not a multiple of the AES block length.
852 */
853 if (aes_ctx->ac_remainder_len > 0) {
854 return (CRYPTO_DATA_LEN_RANGE);
855 }
856 data->cd_length = 0;
857 }
858
859 (void) aes_free_context(ctx);
860
861 return (CRYPTO_SUCCESS);
862 }
863
864 /* ARGSUSED */
865 static int
866 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
867 crypto_req_handle_t req)
868 {
869 aes_ctx_t *aes_ctx;
870 int ret;
871 off_t saved_offset;
872 size_t saved_length;
873
874 ASSERT(ctx->cc_provider_private != NULL);
875 aes_ctx = ctx->cc_provider_private;
876
877 if (data->cd_format != CRYPTO_DATA_RAW &&
878 data->cd_format != CRYPTO_DATA_UIO &&
879 data->cd_format != CRYPTO_DATA_MBLK) {
880 return (CRYPTO_ARGUMENTS_BAD);
881 }
882
883 /*
884 * There must be no unprocessed ciphertext.
885 * This happens if the length of the last ciphertext is
886 * not a multiple of the AES block length.
887 */
888 if (aes_ctx->ac_remainder_len > 0) {
945 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
946 if (ret == CRYPTO_SUCCESS) {
947 data->cd_length = data->cd_offset - saved_offset;
948 } else {
949 data->cd_length = saved_length;
950 }
951
952 data->cd_offset = saved_offset;
953 if (ret != CRYPTO_SUCCESS) {
954 return (ret);
955 }
956 }
957
958
959 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
960 data->cd_length = 0;
961 }
962
963 (void) aes_free_context(ctx);
964
965 return (CRYPTO_SUCCESS);
966 }
967
968 /* ARGSUSED */
969 static int
970 aes_encrypt_atomic(crypto_provider_handle_t provider,
971 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
972 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
973 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
974 {
975 aes_ctx_t aes_ctx; /* on the stack */
976 off_t saved_offset;
977 size_t saved_length;
978 size_t length_needed;
979 int ret;
980
981 AES_ARG_INPLACE(plaintext, ciphertext);
982
983 /*
984 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
1250 }
1251 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1252 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1253 kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1254 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1255 }
1256 }
1257
1258 return (ret);
1259 }
1260
1261 /*
1262 * KCF software provider context template entry points.
1263 */
1264 /* ARGSUSED */
1265 static int
1266 aes_create_ctx_template(crypto_provider_handle_t provider,
1267 crypto_mechanism_t *mechanism, crypto_key_t *key,
1268 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1269 {
1270 void *keysched;
1271 size_t size;
1272 int rv;
1273
1274 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1275 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1276 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1277 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1278 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1279 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1280 return (CRYPTO_MECHANISM_INVALID);
1281
1282 if ((keysched = aes_alloc_keysched(&size,
1283 crypto_kmflag(req))) == NULL) {
1284 return (CRYPTO_HOST_MEMORY);
1285 }
1286
1287 /*
1288 * Initialize key schedule. Key length information is stored
1289 * in the key.
1290 */
1291 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1292 bzero(keysched, size);
1293 kmem_free(keysched, size);
1294 return (rv);
1295 }
1296
1297 *tmpl = keysched;
1298 *tmpl_size = size;
1299
1300 return (CRYPTO_SUCCESS);
1301 }
1302
1303
1304 static int
1305 aes_free_context(crypto_ctx_t *ctx)
1306 {
1307 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1308
1309 if (aes_ctx != NULL) {
1310 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1311 ASSERT(aes_ctx->ac_keysched_len != 0);
1312 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1313 kmem_free(aes_ctx->ac_keysched,
1314 aes_ctx->ac_keysched_len);
1315 }
1316 crypto_free_mode_ctx(aes_ctx);
1317 ctx->cc_provider_private = NULL;
1318 }
1319
1320 return (CRYPTO_SUCCESS);
1321 }
1322
1323
1324 static int
1325 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1326 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1327 boolean_t is_encrypt_init)
1328 {
1329 int rv = CRYPTO_SUCCESS;
1330 void *keysched;
1331 size_t size;
1332
1333 if (template == NULL) {
1334 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1335 return (CRYPTO_HOST_MEMORY);
1336 /*
1337 * Initialize key schedule.
1338 * Key length is stored in the key.
1339 */
1340 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1341 kmem_free(keysched, size);
1342 return (rv);
1343 }
1344
1345 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1346 aes_ctx->ac_keysched_len = size;
1347 } else {
1348 keysched = template;
1349 }
1387 case AES_GMAC_MECH_INFO_TYPE:
1388 if (mechanism->cm_param == NULL ||
1389 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1390 return (CRYPTO_MECHANISM_PARAM_INVALID);
1391 }
1392 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1393 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1394 aes_xor_block);
1395 break;
1396 case AES_ECB_MECH_INFO_TYPE:
1397 aes_ctx->ac_flags |= ECB_MODE;
1398 }
1399
1400 if (rv != CRYPTO_SUCCESS) {
1401 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1402 bzero(keysched, size);
1403 kmem_free(keysched, size);
1404 }
1405 }
1406
1407 return (rv);
1408 }
1409
1410 static int
1411 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1412 CK_AES_GCM_PARAMS *gcm_params)
1413 {
1414 /* LINTED: pointer alignment */
1415 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1416
1417 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1418 return (CRYPTO_MECHANISM_INVALID);
1419
1420 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1421 return (CRYPTO_MECHANISM_PARAM_INVALID);
1422
1423 if (params->pIv == NULL)
1424 return (CRYPTO_MECHANISM_PARAM_INVALID);
1425
1426 gcm_params->pIv = params->pIv;
|