1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright 2009 Sun Microsystems, Inc. All rights reserved. 23 * Use is subject to license terms. 24 * Copyright 2019 Joyent, Inc. 25 */ 26 27 #include <sys/types.h> 28 #include <sys/sysmacros.h> 29 #include <modes/modes.h> 30 #include "aes_impl.h" 31 #ifndef _KERNEL 32 #include <stdlib.h> 33 #endif /* !_KERNEL */ 34 35 36 /* Copy a 16-byte AES block from "in" to "out" */ 37 void 38 aes_copy_block(uint8_t *in, uint8_t *out) 39 { 40 if (IS_P2ALIGNED2(in, out, sizeof (uint32_t))) { 41 /* LINTED: pointer alignment */ 42 *(uint32_t *)&out[0] = *(uint32_t *)&in[0]; 43 /* LINTED: pointer alignment */ 44 *(uint32_t *)&out[4] = *(uint32_t *)&in[4]; 45 /* LINTED: pointer alignment */ 46 *(uint32_t *)&out[8] = *(uint32_t *)&in[8]; 47 /* LINTED: pointer alignment */ 48 *(uint32_t *)&out[12] = *(uint32_t *)&in[12]; 49 } else { 50 AES_COPY_BLOCK(in, out); 51 } 52 } 53 54 /* 55 * Copy a 16-byte AES block in 64-bit chunks if the input address is aligned 56 * to 64-bits 57 */ 58 void 59 aes_copy_block64(uint8_t *in, uint64_t *out) 60 { 61 if (IS_P2ALIGNED(in, sizeof (uint64_t))) { 62 /* LINTED: pointer alignment */ 63 out[0] = *(uint64_t *)&in[0]; 64 /* LINTED: pointer alignment */ 65 out[1] = *(uint64_t *)&in[8]; 66 } else { 67 uint8_t *iv8 = (uint8_t *)&out[0]; 68 69 AES_COPY_BLOCK(in, iv8); 70 } 71 } 72 73 /* XOR a 16-byte AES block of data into dst */ 74 void 75 aes_xor_block(uint8_t *data, uint8_t *dst) 76 { 77 if (IS_P2ALIGNED2(dst, data, sizeof (uint32_t))) { 78 /* LINTED: pointer alignment */ 79 *(uint32_t *)&dst[0] ^= *(uint32_t *)&data[0]; 80 /* LINTED: pointer alignment */ 81 *(uint32_t *)&dst[4] ^= *(uint32_t *)&data[4]; 82 /* LINTED: pointer alignment */ 83 *(uint32_t *)&dst[8] ^= *(uint32_t *)&data[8]; 84 /* LINTED: pointer alignment */ 85 *(uint32_t *)&dst[12] ^= *(uint32_t *)&data[12]; 86 } else { 87 AES_XOR_BLOCK(data, dst); 88 } 89 } 90 91 92 /* 93 * Encrypt multiple blocks of data according to mode. 94 */ 95 int 96 aes_encrypt_contiguous_blocks(void *ctx, char *data, size_t length, 97 crypto_data_t *out) 98 { 99 aes_ctx_t *aes_ctx = ctx; 100 int rv; 101 102 if (aes_ctx->ac_flags & CTR_MODE) { 103 rv = ctr_mode_contiguous_blocks(ctx, data, length, out, 104 AES_BLOCK_LEN, aes_encrypt_block); 105 } else if (aes_ctx->ac_flags & CCM_MODE) { 106 rv = ccm_mode_encrypt_contiguous_blocks(ctx, data, length, 107 out, AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 108 aes_xor_block); 109 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 110 rv = gcm_mode_encrypt_contiguous_blocks(ctx, data, length, 111 out, AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 112 aes_xor_block); 113 } else if (aes_ctx->ac_flags & (CBC_MODE|CMAC_MODE)) { 114 rv = cbc_encrypt_contiguous_blocks(ctx, 115 data, length, out, AES_BLOCK_LEN, aes_encrypt_block, 116 aes_copy_block, aes_xor_block); 117 } else { 118 rv = ecb_cipher_contiguous_blocks(ctx, data, length, out, 119 AES_BLOCK_LEN, aes_encrypt_block); 120 } 121 return (rv); 122 } 123 124 125 /* 126 * Decrypt multiple blocks of data according to mode. 127 */ 128 int 129 aes_decrypt_contiguous_blocks(void *ctx, char *data, size_t length, 130 crypto_data_t *out) 131 { 132 aes_ctx_t *aes_ctx = ctx; 133 int rv; 134 135 if (aes_ctx->ac_flags & CTR_MODE) { 136 rv = ctr_mode_contiguous_blocks(ctx, data, length, out, 137 AES_BLOCK_LEN, aes_encrypt_block); 138 if (rv == CRYPTO_DATA_LEN_RANGE) 139 rv = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 140 } else if (aes_ctx->ac_flags & CCM_MODE) { 141 rv = ccm_mode_decrypt_contiguous_blocks(ctx, data, length, 142 out, AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 143 aes_xor_block); 144 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 145 rv = gcm_mode_decrypt_contiguous_blocks(ctx, data, length, 146 out, AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 147 aes_xor_block); 148 } else if (aes_ctx->ac_flags & CBC_MODE) { 149 rv = cbc_decrypt_contiguous_blocks(ctx, data, length, out, 150 AES_BLOCK_LEN, aes_decrypt_block, aes_copy_block, 151 aes_xor_block); 152 } else { 153 rv = ecb_cipher_contiguous_blocks(ctx, data, length, out, 154 AES_BLOCK_LEN, aes_decrypt_block); 155 if (rv == CRYPTO_DATA_LEN_RANGE) 156 rv = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 157 } 158 return (rv); 159 } 160