1 /* SPDX-License-Identifier: BSD-3-Clause */ 2 /* Copyright(c) 2007-2022 Intel Corporation */ 3 /* $FreeBSD$ */ 4 5 /** 6 *************************************************************************** 7 * @file lac_sym_qat_cipher.c QAT-related support functions for Cipher 8 * 9 * @ingroup LacSymQat_Cipher 10 * 11 * @description Functions to support the QAT related operations for Cipher 12 ***************************************************************************/ 13 14 /* 15 ******************************************************************************* 16 * Include public/global header files 17 ******************************************************************************* 18 */ 19 20 #include "cpa.h" 21 #include "icp_accel_devices.h" 22 #include "icp_adf_debug.h" 23 #include "lac_sym_qat.h" 24 #include "lac_sym_qat_cipher.h" 25 #include "lac_mem.h" 26 #include "lac_common.h" 27 #include "cpa_cy_sym.h" 28 #include "lac_sym_qat.h" 29 #include "lac_sym_cipher_defs.h" 30 #include "icp_qat_hw.h" 31 #include "icp_qat_fw_la.h" 32 #include "sal_hw_gen.h" 33 34 #define LAC_UNUSED_POS_MASK 0x3 35 36 /***************************************************************************** 37 * Internal data 38 *****************************************************************************/ 39 40 typedef enum _icp_qat_hw_key_depend { 41 IS_KEY_DEP_NO = 0, 42 IS_KEY_DEP_YES, 43 } icp_qat_hw_key_depend; 44 45 /* LAC_CIPHER_IS_XTS_MODE */ 46 static const uint8_t key_size_xts[] = { 47 0, 48 0, 49 0, 50 0, 51 0, 52 0, 53 0, 54 0, 55 0, 56 0, 57 0, 58 0, 59 0, 60 0, 61 0, 62 0, 63 0, 64 0, 65 0, 66 0, 67 0, 68 0, 69 0, 70 0, 71 0, 72 0, 73 0, 74 0, 75 0, 76 0, 77 0, 78 0, 79 ICP_QAT_HW_CIPHER_ALGO_AES128, /* ICP_QAT_HW_AES_128_XTS_KEY_SZ */ 80 0, 81 0, 82 0, 83 0, 84 0, 85 0, 86 0, 87 0, 88 0, 89 0, 90 0, 91 0, 92 0, 93 0, 94 0, 95 0, 96 0, 97 0, 98 0, 99 0, 100 0, 101 0, 102 0, 103 0, 104 0, 105 0, 106 0, 107 0, 108 0, 109 0, 110 0, 111 ICP_QAT_HW_CIPHER_ALGO_AES256 /* ICP_QAT_HW_AES_256_XTS_KEY_SZ */ 112 }; 113 /* LAC_CIPHER_IS_AES */ 114 static const uint8_t key_size_aes[] = { 115 0, 116 0, 117 0, 118 0, 119 0, 120 0, 121 0, 122 0, 123 0, 124 0, 125 0, 126 0, 127 0, 128 0, 129 0, 130 0, 131 ICP_QAT_HW_CIPHER_ALGO_AES128, /* ICP_QAT_HW_AES_128_KEY_SZ */ 132 0, 133 0, 134 0, 135 0, 136 0, 137 0, 138 0, 139 ICP_QAT_HW_CIPHER_ALGO_AES192, /* ICP_QAT_HW_AES_192_KEY_SZ */ 140 0, 141 0, 142 0, 143 0, 144 0, 145 0, 146 0, 147 ICP_QAT_HW_CIPHER_ALGO_AES256 /* ICP_QAT_HW_AES_256_KEY_SZ */ 148 }; 149 /* LAC_CIPHER_IS_AES_F8 */ 150 static const uint8_t key_size_f8[] = { 151 0, 152 0, 153 0, 154 0, 155 0, 156 0, 157 0, 158 0, 159 0, 160 0, 161 0, 162 0, 163 0, 164 0, 165 0, 166 0, 167 0, 168 0, 169 0, 170 0, 171 0, 172 0, 173 0, 174 0, 175 0, 176 0, 177 0, 178 0, 179 0, 180 0, 181 0, 182 0, 183 ICP_QAT_HW_CIPHER_ALGO_AES128, /* ICP_QAT_HW_AES_128_F8_KEY_SZ */ 184 0, 185 0, 186 0, 187 0, 188 0, 189 0, 190 0, 191 0, 192 0, 193 0, 194 0, 195 0, 196 0, 197 0, 198 0, 199 ICP_QAT_HW_CIPHER_ALGO_AES192, /* ICP_QAT_HW_AES_192_F8_KEY_SZ */ 200 0, 201 0, 202 0, 203 0, 204 0, 205 0, 206 0, 207 0, 208 0, 209 0, 210 0, 211 0, 212 0, 213 0, 214 0, 215 ICP_QAT_HW_CIPHER_ALGO_AES256 /* ICP_QAT_HW_AES_256_F8_KEY_SZ */ 216 }; 217 218 typedef struct _icp_qat_hw_cipher_info { 219 icp_qat_hw_cipher_algo_t algorithm; 220 icp_qat_hw_cipher_mode_t mode; 221 icp_qat_hw_cipher_convert_t key_convert[2]; 222 icp_qat_hw_cipher_dir_t dir[2]; 223 icp_qat_hw_key_depend isKeyLenDepend; 224 const uint8_t *pAlgByKeySize; 225 } icp_qat_hw_cipher_info; 226 227 static const icp_qat_hw_cipher_info icp_qat_alg_info[] = { 228 /* CPA_CY_SYM_CIPHER_NULL */ 229 { 230 ICP_QAT_HW_CIPHER_ALGO_NULL, 231 ICP_QAT_HW_CIPHER_ECB_MODE, 232 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 233 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 234 IS_KEY_DEP_NO, 235 NULL, 236 }, 237 /* CPA_CY_SYM_CIPHER_ARC4 */ 238 { 239 ICP_QAT_HW_CIPHER_ALGO_ARC4, 240 ICP_QAT_HW_CIPHER_ECB_MODE, 241 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 242 /* Streaming ciphers are a special case. Decrypt = encrypt */ 243 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT }, 244 IS_KEY_DEP_NO, 245 NULL, 246 }, 247 /* CPA_CY_SYM_CIPHER_AES_ECB */ 248 { 249 ICP_QAT_HW_CIPHER_ALGO_AES128, 250 ICP_QAT_HW_CIPHER_ECB_MODE, 251 /* AES decrypt key needs to be reversed. Instead of reversing the 252 * key at session registration, it is instead reversed on-the-fly by 253 * setting the KEY_CONVERT bit here 254 */ 255 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT }, 256 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 257 IS_KEY_DEP_YES, 258 key_size_aes, 259 }, 260 /* CPA_CY_SYM_CIPHER_AES_CBC */ 261 { 262 ICP_QAT_HW_CIPHER_ALGO_AES128, 263 ICP_QAT_HW_CIPHER_CBC_MODE, 264 /* AES decrypt key needs to be reversed. Instead of reversing the 265 * key at session registration, it is instead reversed on-the-fly by 266 * setting the KEY_CONVERT bit here 267 */ 268 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT }, 269 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 270 IS_KEY_DEP_YES, 271 key_size_aes, 272 }, 273 /* CPA_CY_SYM_CIPHER_AES_CTR */ 274 { 275 ICP_QAT_HW_CIPHER_ALGO_AES128, 276 ICP_QAT_HW_CIPHER_CTR_MODE, 277 /* AES decrypt key needs to be reversed. Instead of reversing the 278 * key at session registration, it is instead reversed on-the-fly by 279 * setting the KEY_CONVERT bit here 280 */ 281 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 282 /* Streaming ciphers are a special case. Decrypt = encrypt 283 * Overriding default values previously set for AES 284 */ 285 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT }, 286 IS_KEY_DEP_YES, 287 key_size_aes, 288 }, 289 /* CPA_CY_SYM_CIPHER_AES_CCM */ 290 { 291 ICP_QAT_HW_CIPHER_ALGO_AES128, 292 ICP_QAT_HW_CIPHER_CTR_MODE, 293 /* AES decrypt key needs to be reversed. Instead of reversing the 294 * key at session registration, it is instead reversed on-the-fly by 295 * setting the KEY_CONVERT bit here 296 */ 297 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 298 /* Streaming ciphers are a special case. Decrypt = encrypt 299 * Overriding default values previously set for AES 300 */ 301 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT }, 302 IS_KEY_DEP_YES, 303 key_size_aes, 304 }, 305 /* CPA_CY_SYM_CIPHER_AES_GCM */ 306 { 307 ICP_QAT_HW_CIPHER_ALGO_AES128, 308 ICP_QAT_HW_CIPHER_CTR_MODE, 309 /* AES decrypt key needs to be reversed. Instead of reversing the 310 * key at session registration, it is instead reversed on-the-fly by 311 * setting the KEY_CONVERT bit here 312 */ 313 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 314 /* Streaming ciphers are a special case. Decrypt = encrypt 315 * Overriding default values previously set for AES 316 */ 317 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT }, 318 IS_KEY_DEP_YES, 319 key_size_aes, 320 }, 321 /* CPA_CY_SYM_CIPHER_DES_ECB */ 322 { 323 ICP_QAT_HW_CIPHER_ALGO_DES, 324 ICP_QAT_HW_CIPHER_ECB_MODE, 325 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 326 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 327 IS_KEY_DEP_NO, 328 NULL, 329 }, 330 /* CPA_CY_SYM_CIPHER_DES_CBC */ 331 { 332 ICP_QAT_HW_CIPHER_ALGO_DES, 333 ICP_QAT_HW_CIPHER_CBC_MODE, 334 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 335 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 336 IS_KEY_DEP_NO, 337 NULL, 338 }, 339 /* CPA_CY_SYM_CIPHER_3DES_ECB */ 340 { 341 ICP_QAT_HW_CIPHER_ALGO_3DES, 342 ICP_QAT_HW_CIPHER_ECB_MODE, 343 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 344 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 345 IS_KEY_DEP_NO, 346 NULL, 347 }, 348 /* CPA_CY_SYM_CIPHER_3DES_CBC */ 349 { 350 ICP_QAT_HW_CIPHER_ALGO_3DES, 351 ICP_QAT_HW_CIPHER_CBC_MODE, 352 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 353 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 354 IS_KEY_DEP_NO, 355 NULL, 356 }, 357 /* CPA_CY_SYM_CIPHER_3DES_CTR */ 358 { 359 ICP_QAT_HW_CIPHER_ALGO_3DES, 360 ICP_QAT_HW_CIPHER_CTR_MODE, 361 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 362 /* Streaming ciphers are a special case. Decrypt = encrypt 363 * Overriding default values previously set for AES 364 */ 365 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT }, 366 IS_KEY_DEP_NO, 367 NULL, 368 }, 369 /* CPA_CY_SYM_CIPHER_KASUMI_F8 */ 370 { 371 ICP_QAT_HW_CIPHER_ALGO_KASUMI, 372 ICP_QAT_HW_CIPHER_F8_MODE, 373 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 374 /* Streaming ciphers are a special case. Decrypt = encrypt */ 375 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT }, 376 IS_KEY_DEP_NO, 377 NULL, 378 }, 379 /* CPA_CY_SYM_CIPHER_SNOW3G_UEA2 */ 380 { 381 /* The KEY_CONVERT bit has to be set for Snow_3G operation */ 382 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2, 383 ICP_QAT_HW_CIPHER_ECB_MODE, 384 { ICP_QAT_HW_CIPHER_KEY_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT }, 385 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 386 IS_KEY_DEP_NO, 387 NULL, 388 }, 389 /* CPA_CY_SYM_CIPHER_AES_F8 */ 390 { 391 ICP_QAT_HW_CIPHER_ALGO_AES128, 392 ICP_QAT_HW_CIPHER_F8_MODE, 393 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 394 /* Streaming ciphers are a special case. Decrypt = encrypt */ 395 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT }, 396 IS_KEY_DEP_YES, 397 key_size_f8, 398 }, 399 /* CPA_CY_SYM_CIPHER_AES_XTS */ 400 { 401 ICP_QAT_HW_CIPHER_ALGO_AES128, 402 ICP_QAT_HW_CIPHER_XTS_MODE, 403 /* AES decrypt key needs to be reversed. Instead of reversing the 404 * key at session registration, it is instead reversed on-the-fly by 405 * setting the KEY_CONVERT bit here 406 */ 407 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT }, 408 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 409 IS_KEY_DEP_YES, 410 key_size_xts, 411 }, 412 /* CPA_CY_SYM_CIPHER_ZUC_EEA3 */ 413 { 414 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3, 415 ICP_QAT_HW_CIPHER_ECB_MODE, 416 { ICP_QAT_HW_CIPHER_KEY_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT }, 417 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 418 IS_KEY_DEP_NO, 419 NULL, 420 }, 421 /* CPA_CY_SYM_CIPHER_CHACHA */ 422 { 423 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305, 424 ICP_QAT_HW_CIPHER_CTR_MODE, 425 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 426 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 427 IS_KEY_DEP_NO, 428 NULL, 429 }, 430 /* CPA_CY_SYM_CIPHER_SM4_ECB */ 431 { 432 ICP_QAT_HW_CIPHER_ALGO_SM4, 433 ICP_QAT_HW_CIPHER_ECB_MODE, 434 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT }, 435 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 436 IS_KEY_DEP_NO, 437 NULL, 438 }, 439 /* CPA_CY_SYM_CIPHER_SM4_CBC */ 440 { 441 ICP_QAT_HW_CIPHER_ALGO_SM4, 442 ICP_QAT_HW_CIPHER_CBC_MODE, 443 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT }, 444 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT }, 445 IS_KEY_DEP_NO, 446 NULL, 447 }, 448 /* CPA_CY_SYM_CIPHER_SM4_CTR */ 449 { 450 ICP_QAT_HW_CIPHER_ALGO_SM4, 451 ICP_QAT_HW_CIPHER_CTR_MODE, 452 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT }, 453 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT }, 454 IS_KEY_DEP_NO, 455 NULL, 456 }, 457 }; 458 459 /***************************************************************************** 460 * Internal functions 461 *****************************************************************************/ 462 463 void 464 LacSymQat_CipherCtrlBlockWrite(icp_qat_la_bulk_req_ftr_t *pMsg, 465 Cpa32U cipherAlgorithm, 466 Cpa32U targetKeyLenInBytes, 467 Cpa32U sliceType, 468 icp_qat_fw_slice_t nextSlice, 469 Cpa8U cipherCfgOffsetInQuadWord) 470 { 471 icp_qat_fw_cipher_cd_ctrl_hdr_t *cd_ctrl = 472 (icp_qat_fw_cipher_cd_ctrl_hdr_t *)&(pMsg->cd_ctrl); 473 474 /* state_padding_sz is nonzero for f8 mode only */ 475 cd_ctrl->cipher_padding_sz = 0; 476 477 /* Special handling of AES 192 key for UCS slice. 478 UCS requires it to have 32 bytes - set is as targetKeyLen 479 in this case, and add padding. It makes no sense 480 to force applications to provide such key length for couple reasons: 481 1. It won't be possible to distinguish between AES 192 and 256 based 482 on key lenght only 483 2. Only some modes of AES will use UCS slice, then application will 484 have to know which ones */ 485 if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == sliceType && 486 ICP_QAT_HW_AES_192_KEY_SZ == targetKeyLenInBytes) { 487 targetKeyLenInBytes = ICP_QAT_HW_UCS_AES_192_KEY_SZ; 488 } 489 490 switch (cipherAlgorithm) { 491 /* Base Key is not passed down to QAT in the case of ARC4 or NULL */ 492 case CPA_CY_SYM_CIPHER_ARC4: 493 case CPA_CY_SYM_CIPHER_NULL: 494 cd_ctrl->cipher_key_sz = 0; 495 break; 496 case CPA_CY_SYM_CIPHER_KASUMI_F8: 497 cd_ctrl->cipher_key_sz = 498 LAC_BYTES_TO_QUADWORDS(ICP_QAT_HW_KASUMI_F8_KEY_SZ); 499 cd_ctrl->cipher_padding_sz = 500 ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR; 501 break; 502 /* For Snow3G UEA2 content descriptor key size is 503 key size plus iv size */ 504 case CPA_CY_SYM_CIPHER_SNOW3G_UEA2: 505 cd_ctrl->cipher_key_sz = 506 LAC_BYTES_TO_QUADWORDS(ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ + 507 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ); 508 break; 509 case CPA_CY_SYM_CIPHER_AES_F8: 510 cd_ctrl->cipher_key_sz = 511 LAC_BYTES_TO_QUADWORDS(targetKeyLenInBytes); 512 cd_ctrl->cipher_padding_sz = 513 (2 * ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR); 514 break; 515 /* For ZUC EEA3 content descriptor key size is 516 key size plus iv size */ 517 case CPA_CY_SYM_CIPHER_ZUC_EEA3: 518 cd_ctrl->cipher_key_sz = 519 LAC_BYTES_TO_QUADWORDS(ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ + 520 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ); 521 break; 522 default: 523 cd_ctrl->cipher_key_sz = 524 LAC_BYTES_TO_QUADWORDS(targetKeyLenInBytes); 525 } 526 527 cd_ctrl->cipher_state_sz = LAC_BYTES_TO_QUADWORDS( 528 LacSymQat_CipherIvSizeBytesGet(cipherAlgorithm)); 529 530 cd_ctrl->cipher_cfg_offset = cipherCfgOffsetInQuadWord; 531 532 ICP_QAT_FW_COMN_NEXT_ID_SET(cd_ctrl, nextSlice); 533 ICP_QAT_FW_COMN_CURR_ID_SET(cd_ctrl, ICP_QAT_FW_SLICE_CIPHER); 534 } 535 536 void 537 LacSymQat_CipherGetCfgData(lac_session_desc_t *pSession, 538 icp_qat_hw_cipher_algo_t *pAlgorithm, 539 icp_qat_hw_cipher_mode_t *pMode, 540 icp_qat_hw_cipher_dir_t *pDir, 541 icp_qat_hw_cipher_convert_t *pKey_convert) 542 { 543 sal_crypto_service_t *pService = 544 (sal_crypto_service_t *)pSession->pInstance; 545 546 CpaCySymCipherAlgorithm cipherAlgorithm = 0; 547 icp_qat_hw_cipher_dir_t cipherDirection = 0; 548 549 /* Set defaults */ 550 *pKey_convert = ICP_QAT_HW_CIPHER_NO_CONVERT; 551 *pAlgorithm = ICP_QAT_HW_CIPHER_ALGO_NULL; 552 *pMode = ICP_QAT_HW_CIPHER_ECB_MODE; 553 *pDir = ICP_QAT_HW_CIPHER_ENCRYPT; 554 555 /* decrease since it's numbered from 1 instead of 0 */ 556 cipherAlgorithm = pSession->cipherAlgorithm - 1; 557 cipherDirection = 558 pSession->cipherDirection == CPA_CY_SYM_CIPHER_DIRECTION_ENCRYPT ? 559 ICP_QAT_HW_CIPHER_ENCRYPT : 560 ICP_QAT_HW_CIPHER_DECRYPT; 561 562 *pAlgorithm = icp_qat_alg_info[cipherAlgorithm].algorithm; 563 *pMode = icp_qat_alg_info[cipherAlgorithm].mode; 564 *pDir = icp_qat_alg_info[cipherAlgorithm].dir[cipherDirection]; 565 *pKey_convert = 566 icp_qat_alg_info[cipherAlgorithm].key_convert[cipherDirection]; 567 568 if (IS_KEY_DEP_NO != icp_qat_alg_info[cipherAlgorithm].isKeyLenDepend) { 569 *pAlgorithm = icp_qat_alg_info[cipherAlgorithm] 570 .pAlgByKeySize[pSession->cipherKeyLenInBytes]; 571 } 572 573 /* CCP and AES_GCM single pass, despite being limited to CTR/AEAD mode, 574 * support both Encrypt/Decrypt modes - this is because of the 575 * differences in the hash computation/verification paths in 576 * encrypt/decrypt modes respectively. 577 * By default CCP is set as CTR Mode.Set AEAD Mode for AES_GCM. 578 */ 579 if (SPC == pSession->singlePassState) { 580 if (LAC_CIPHER_IS_GCM(pSession->cipherAlgorithm)) 581 *pMode = ICP_QAT_HW_CIPHER_AEAD_MODE; 582 else if (isCyGen4x(pService) && 583 LAC_CIPHER_IS_CCM(pSession->cipherAlgorithm)) 584 *pMode = ICP_QAT_HW_CIPHER_CCM_MODE; 585 586 if (cipherDirection == ICP_QAT_HW_CIPHER_DECRYPT) 587 *pDir = ICP_QAT_HW_CIPHER_DECRYPT; 588 } 589 } 590 591 void 592 LacSymQat_CipherHwBlockPopulateCfgData(lac_session_desc_t *pSession, 593 const void *pCipherHwBlock, 594 Cpa32U *pSizeInBytes) 595 { 596 icp_qat_hw_cipher_algo_t algorithm = ICP_QAT_HW_CIPHER_ALGO_NULL; 597 icp_qat_hw_cipher_mode_t mode = ICP_QAT_HW_CIPHER_ECB_MODE; 598 icp_qat_hw_cipher_dir_t dir = ICP_QAT_HW_CIPHER_ENCRYPT; 599 icp_qat_hw_cipher_convert_t key_convert; 600 icp_qat_hw_cipher_config_t *pCipherConfig = 601 (icp_qat_hw_cipher_config_t *)pCipherHwBlock; 602 icp_qat_hw_ucs_cipher_config_t *pUCSCipherConfig = 603 (icp_qat_hw_ucs_cipher_config_t *)pCipherHwBlock; 604 605 Cpa32U val, reserved; 606 Cpa32U aed_hash_cmp_length = 0; 607 608 *pSizeInBytes = 0; 609 610 LacSymQat_CipherGetCfgData( 611 pSession, &algorithm, &mode, &dir, &key_convert); 612 613 /* Build the cipher config into the hardware setup block */ 614 if (SPC == pSession->singlePassState) { 615 aed_hash_cmp_length = pSession->hashResultSize; 616 reserved = ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER( 617 pSession->aadLenInBytes); 618 } else { 619 reserved = 0; 620 } 621 622 val = ICP_QAT_HW_CIPHER_CONFIG_BUILD( 623 mode, algorithm, key_convert, dir, aed_hash_cmp_length); 624 625 /* UCS slice has 128-bit configuration register. 626 Leacy cipher slice has 64-bit config register */ 627 if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == pSession->cipherSliceType) { 628 pUCSCipherConfig->val = val; 629 pUCSCipherConfig->reserved[0] = reserved; 630 pUCSCipherConfig->reserved[1] = 0; 631 pUCSCipherConfig->reserved[2] = 0; 632 *pSizeInBytes = sizeof(icp_qat_hw_ucs_cipher_config_t); 633 } else { 634 pCipherConfig->val = val; 635 pCipherConfig->reserved = reserved; 636 *pSizeInBytes = sizeof(icp_qat_hw_cipher_config_t); 637 } 638 } 639 640 void 641 LacSymQat_CipherHwBlockPopulateKeySetup( 642 lac_session_desc_t *pSessionDesc, 643 const CpaCySymCipherSetupData *pCipherSetupData, 644 Cpa32U targetKeyLenInBytes, 645 Cpa32U sliceType, 646 const void *pCipherHwBlock, 647 Cpa32U *pSizeInBytes) 648 { 649 Cpa8U *pCipherKey = (Cpa8U *)pCipherHwBlock; 650 Cpa32U actualKeyLenInBytes = pCipherSetupData->cipherKeyLenInBytes; 651 652 *pSizeInBytes = 0; 653 654 /* Key is copied into content descriptor for all cases except for 655 * Arc4 and Null cipher */ 656 if (!(LAC_CIPHER_IS_ARC4(pCipherSetupData->cipherAlgorithm) || 657 LAC_CIPHER_IS_NULL(pCipherSetupData->cipherAlgorithm))) { 658 /* Special handling of AES 192 key for UCS slice. 659 UCS requires it to have 32 bytes - set is as targetKeyLen 660 in this case, and add padding. It makes no sense 661 to force applications to provide such key length for couple 662 reasons: 663 1. It won't be possible to distinguish between AES 192 and 664 256 based on key lenght only 665 2. Only some modes of AES will use UCS slice, then 666 application will have to know which ones */ 667 if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == sliceType && 668 ICP_QAT_HW_AES_192_KEY_SZ == targetKeyLenInBytes) { 669 targetKeyLenInBytes = ICP_QAT_HW_UCS_AES_192_KEY_SZ; 670 } 671 672 /* Set the Cipher key field in the cipher block */ 673 memcpy(pCipherKey, 674 pCipherSetupData->pCipherKey, 675 actualKeyLenInBytes); 676 /* Pad the key with 0's if required */ 677 if (0 < (targetKeyLenInBytes - actualKeyLenInBytes)) { 678 LAC_OS_BZERO(pCipherKey + actualKeyLenInBytes, 679 targetKeyLenInBytes - actualKeyLenInBytes); 680 } 681 *pSizeInBytes += targetKeyLenInBytes; 682 683 switch (pCipherSetupData->cipherAlgorithm) { 684 /* For Kasumi in F8 mode Cipher Key is concatenated with 685 * Cipher Key XOR-ed with Key Modifier (CK||CK^KM) */ 686 case CPA_CY_SYM_CIPHER_KASUMI_F8: { 687 Cpa32U wordIndex = 0; 688 Cpa32U *pu32CipherKey = 689 (Cpa32U *)pCipherSetupData->pCipherKey; 690 Cpa32U *pTempKey = 691 (Cpa32U *)(pCipherKey + targetKeyLenInBytes); 692 693 /* XOR Key with KASUMI F8 key modifier at 4 bytes level 694 */ 695 for (wordIndex = 0; wordIndex < 696 LAC_BYTES_TO_LONGWORDS(targetKeyLenInBytes); 697 wordIndex++) { 698 pTempKey[wordIndex] = pu32CipherKey[wordIndex] ^ 699 LAC_CIPHER_KASUMI_F8_KEY_MODIFIER_4_BYTES; 700 } 701 702 *pSizeInBytes += targetKeyLenInBytes; 703 704 /* also add padding for F8 */ 705 *pSizeInBytes += LAC_QUADWORDS_TO_BYTES( 706 ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR); 707 LAC_OS_BZERO((Cpa8U *)pTempKey + targetKeyLenInBytes, 708 LAC_QUADWORDS_TO_BYTES( 709 ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR)); 710 } break; 711 /* For AES in F8 mode Cipher Key is concatenated with 712 * Cipher Key XOR-ed with Key Mask (CK||CK^KM) */ 713 case CPA_CY_SYM_CIPHER_AES_F8: { 714 Cpa32U index = 0; 715 Cpa8U *pTempKey = 716 pCipherKey + (targetKeyLenInBytes / 2); 717 *pSizeInBytes += targetKeyLenInBytes; 718 /* XOR Key with key Mask */ 719 for (index = 0; index < targetKeyLenInBytes; index++) { 720 pTempKey[index] = 721 pCipherKey[index] ^ pTempKey[index]; 722 } 723 pTempKey = (pCipherKey + targetKeyLenInBytes); 724 /* also add padding for AES F8 */ 725 *pSizeInBytes += 2 * targetKeyLenInBytes; 726 LAC_OS_BZERO(pTempKey, 2 * targetKeyLenInBytes); 727 } break; 728 case CPA_CY_SYM_CIPHER_SNOW3G_UEA2: { 729 /* For Snow3G zero area after the key for FW */ 730 LAC_OS_BZERO(pCipherKey + targetKeyLenInBytes, 731 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ); 732 733 *pSizeInBytes += ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ; 734 } break; 735 case CPA_CY_SYM_CIPHER_ZUC_EEA3: { 736 /* For ZUC zero area after the key for FW */ 737 LAC_OS_BZERO(pCipherKey + targetKeyLenInBytes, 738 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ); 739 740 *pSizeInBytes += ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ; 741 } break; 742 case CPA_CY_SYM_CIPHER_AES_XTS: { 743 /* For AES in XTS mode Cipher Key is concatenated with 744 * second Cipher Key which is used for tweak calculation 745 * (CK1||CK2). For decryption Cipher Key needs to be 746 * converted to reverse key.*/ 747 if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == sliceType) { 748 Cpa32U key_len = 749 pCipherSetupData->cipherKeyLenInBytes / 2; 750 memcpy(pSessionDesc->cipherAesXtsKey1Forward, 751 pCipherSetupData->pCipherKey, 752 key_len); 753 754 qatUtilsAESKeyExpansionForward( 755 pSessionDesc->cipherAesXtsKey1Forward, 756 key_len, 757 (uint32_t *) 758 pSessionDesc->cipherAesXtsKey1Reverse); 759 760 memcpy(pSessionDesc->cipherAesXtsKey2, 761 pCipherSetupData->pCipherKey + key_len, 762 key_len); 763 764 if (CPA_CY_SYM_CIPHER_DIRECTION_DECRYPT == 765 pCipherSetupData->cipherDirection) { 766 memcpy(pCipherKey, 767 pSessionDesc 768 ->cipherAesXtsKey1Reverse, 769 key_len); 770 } else { 771 memcpy(pCipherKey, 772 pSessionDesc 773 ->cipherAesXtsKey1Forward, 774 key_len); 775 } 776 } 777 } break; 778 default: 779 break; 780 } 781 } 782 } 783 784 /***************************************************************************** 785 * External functions 786 *****************************************************************************/ 787 788 Cpa8U 789 LacSymQat_CipherBlockSizeBytesGet(CpaCySymCipherAlgorithm cipherAlgorithm) 790 { 791 Cpa8U blockSize = 0; 792 switch (cipherAlgorithm) { 793 case CPA_CY_SYM_CIPHER_ARC4: 794 blockSize = LAC_CIPHER_ARC4_BLOCK_LEN_BYTES; 795 break; 796 /* Handle AES or AES_F8 */ 797 case CPA_CY_SYM_CIPHER_AES_ECB: 798 case CPA_CY_SYM_CIPHER_AES_CBC: 799 case CPA_CY_SYM_CIPHER_AES_CTR: 800 case CPA_CY_SYM_CIPHER_AES_CCM: 801 case CPA_CY_SYM_CIPHER_AES_GCM: 802 case CPA_CY_SYM_CIPHER_AES_XTS: 803 case CPA_CY_SYM_CIPHER_AES_F8: 804 blockSize = ICP_QAT_HW_AES_BLK_SZ; 805 break; 806 /* Handle DES */ 807 case CPA_CY_SYM_CIPHER_DES_ECB: 808 case CPA_CY_SYM_CIPHER_DES_CBC: 809 blockSize = ICP_QAT_HW_DES_BLK_SZ; 810 break; 811 /* Handle TRIPLE DES */ 812 case CPA_CY_SYM_CIPHER_3DES_ECB: 813 case CPA_CY_SYM_CIPHER_3DES_CBC: 814 case CPA_CY_SYM_CIPHER_3DES_CTR: 815 blockSize = ICP_QAT_HW_3DES_BLK_SZ; 816 break; 817 case CPA_CY_SYM_CIPHER_KASUMI_F8: 818 blockSize = ICP_QAT_HW_KASUMI_BLK_SZ; 819 break; 820 case CPA_CY_SYM_CIPHER_SNOW3G_UEA2: 821 blockSize = ICP_QAT_HW_SNOW_3G_BLK_SZ; 822 break; 823 case CPA_CY_SYM_CIPHER_ZUC_EEA3: 824 blockSize = ICP_QAT_HW_ZUC_3G_BLK_SZ; 825 break; 826 case CPA_CY_SYM_CIPHER_NULL: 827 blockSize = LAC_CIPHER_NULL_BLOCK_LEN_BYTES; 828 break; 829 case CPA_CY_SYM_CIPHER_CHACHA: 830 blockSize = ICP_QAT_HW_CHACHAPOLY_BLK_SZ; 831 break; 832 case CPA_CY_SYM_CIPHER_SM4_ECB: 833 case CPA_CY_SYM_CIPHER_SM4_CBC: 834 case CPA_CY_SYM_CIPHER_SM4_CTR: 835 blockSize = ICP_QAT_HW_SM4_BLK_SZ; 836 break; 837 default: 838 QAT_UTILS_LOG("Algorithm not supported in Cipher"); 839 } 840 return blockSize; 841 } 842 843 Cpa32U 844 LacSymQat_CipherIvSizeBytesGet(CpaCySymCipherAlgorithm cipherAlgorithm) 845 { 846 Cpa32U ivSize = 0; 847 switch (cipherAlgorithm) { 848 case CPA_CY_SYM_CIPHER_ARC4: 849 ivSize = LAC_CIPHER_ARC4_STATE_LEN_BYTES; 850 break; 851 case CPA_CY_SYM_CIPHER_KASUMI_F8: 852 ivSize = ICP_QAT_HW_KASUMI_BLK_SZ; 853 break; 854 case CPA_CY_SYM_CIPHER_SNOW3G_UEA2: 855 ivSize = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ; 856 break; 857 case CPA_CY_SYM_CIPHER_ZUC_EEA3: 858 ivSize = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ; 859 break; 860 case CPA_CY_SYM_CIPHER_CHACHA: 861 ivSize = ICP_QAT_HW_CHACHAPOLY_IV_SZ; 862 break; 863 case CPA_CY_SYM_CIPHER_AES_ECB: 864 case CPA_CY_SYM_CIPHER_DES_ECB: 865 case CPA_CY_SYM_CIPHER_3DES_ECB: 866 case CPA_CY_SYM_CIPHER_SM4_ECB: 867 case CPA_CY_SYM_CIPHER_NULL: 868 /* for all ECB Mode IV size is 0 */ 869 break; 870 default: 871 ivSize = LacSymQat_CipherBlockSizeBytesGet(cipherAlgorithm); 872 } 873 return ivSize; 874 } 875 876 inline CpaStatus 877 LacSymQat_CipherRequestParamsPopulate(lac_session_desc_t *pSessionDesc, 878 icp_qat_fw_la_bulk_req_t *pReq, 879 Cpa32U cipherOffsetInBytes, 880 Cpa32U cipherLenInBytes, 881 Cpa64U ivBufferPhysAddr, 882 Cpa8U *pIvBufferVirt) 883 { 884 icp_qat_fw_la_cipher_req_params_t *pCipherReqParams; 885 icp_qat_fw_cipher_cd_ctrl_hdr_t *pCipherCdCtrlHdr; 886 icp_qat_fw_serv_specif_flags *pCipherSpecificFlags; 887 Cpa32U usedBufSize = 0; 888 Cpa32U totalBufSize = 0; 889 890 pCipherReqParams = (icp_qat_fw_la_cipher_req_params_t 891 *)((Cpa8U *)&(pReq->serv_specif_rqpars) + 892 ICP_QAT_FW_CIPHER_REQUEST_PARAMETERS_OFFSET); 893 pCipherCdCtrlHdr = (icp_qat_fw_cipher_cd_ctrl_hdr_t *)&(pReq->cd_ctrl); 894 pCipherSpecificFlags = &(pReq->comn_hdr.serv_specif_flags); 895 896 pCipherReqParams->cipher_offset = cipherOffsetInBytes; 897 pCipherReqParams->cipher_length = cipherLenInBytes; 898 899 /* Don't copy the buffer into the Msg if 900 * it's too big for the cipher_IV_array 901 * OR if the FW needs to update it 902 * OR if there's no buffer supplied 903 * OR if last partial 904 */ 905 if ((pCipherCdCtrlHdr->cipher_state_sz > 906 LAC_SYM_QAT_HASH_IV_REQ_MAX_SIZE_QW) || 907 (ICP_QAT_FW_LA_UPDATE_STATE_GET(*pCipherSpecificFlags) == 908 ICP_QAT_FW_LA_UPDATE_STATE) || 909 (pIvBufferVirt == NULL) || 910 (ICP_QAT_FW_LA_PARTIAL_GET(*pCipherSpecificFlags) == 911 ICP_QAT_FW_LA_PARTIAL_END)) { 912 /* Populate the field with a ptr to the flat buffer */ 913 pCipherReqParams->u.s.cipher_IV_ptr = ivBufferPhysAddr; 914 pCipherReqParams->u.s.resrvd1 = 0; 915 /* Set the flag indicating the field format */ 916 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET( 917 *pCipherSpecificFlags, ICP_QAT_FW_CIPH_IV_64BIT_PTR); 918 } else { 919 /* Populate the field with the contents of the buffer, 920 * zero field first as data may be smaller than the field */ 921 922 /* In case of XTS mode using UCS slice always embedd IV. 923 * IV provided by user needs to be encrypted to calculate 924 * initial tweak, use pCipherReqParams->u.cipher_IV_array as 925 * destination buffer for tweak value */ 926 if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == 927 pSessionDesc->cipherSliceType && 928 LAC_CIPHER_IS_XTS_MODE(pSessionDesc->cipherAlgorithm)) { 929 memset(pCipherReqParams->u.cipher_IV_array, 930 0, 931 LAC_LONGWORDS_TO_BYTES( 932 ICP_QAT_FW_NUM_LONGWORDS_4)); 933 qatUtilsAESEncrypt( 934 pSessionDesc->cipherAesXtsKey2, 935 pSessionDesc->cipherKeyLenInBytes / 2, 936 pIvBufferVirt, 937 (Cpa8U *)pCipherReqParams->u.cipher_IV_array); 938 } else { 939 totalBufSize = 940 LAC_LONGWORDS_TO_BYTES(ICP_QAT_FW_NUM_LONGWORDS_4); 941 usedBufSize = LAC_QUADWORDS_TO_BYTES( 942 pCipherCdCtrlHdr->cipher_state_sz); 943 /* Only initialise unused buffer if applicable*/ 944 if (usedBufSize < totalBufSize) { 945 memset( 946 (&pCipherReqParams->u.cipher_IV_array 947 [usedBufSize & LAC_UNUSED_POS_MASK]), 948 0, 949 totalBufSize - usedBufSize); 950 } 951 memcpy(pCipherReqParams->u.cipher_IV_array, 952 pIvBufferVirt, 953 usedBufSize); 954 } 955 /* Set the flag indicating the field format */ 956 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET( 957 *pCipherSpecificFlags, ICP_QAT_FW_CIPH_IV_16BYTE_DATA); 958 } 959 960 return CPA_STATUS_SUCCESS; 961 } 962 963 void 964 LacSymQat_CipherArc4StateInit(const Cpa8U *pKey, 965 Cpa32U keyLenInBytes, 966 Cpa8U *pArc4CipherState) 967 { 968 Cpa32U i = 0; 969 Cpa32U j = 0; 970 Cpa32U k = 0; 971 972 for (i = 0; i < LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES; ++i) { 973 pArc4CipherState[i] = (Cpa8U)i; 974 } 975 976 for (i = 0, k = 0; i < LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES; ++i, ++k) { 977 Cpa8U swap = 0; 978 979 if (k >= keyLenInBytes) 980 k -= keyLenInBytes; 981 982 j = (j + pArc4CipherState[i] + pKey[k]); 983 if (j >= LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES) 984 j %= LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES; 985 986 /* Swap state[i] & state[j] */ 987 swap = pArc4CipherState[i]; 988 pArc4CipherState[i] = pArc4CipherState[j]; 989 pArc4CipherState[j] = swap; 990 } 991 992 /* Initialise i & j values for QAT */ 993 pArc4CipherState[LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES] = 0; 994 pArc4CipherState[LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES + 1] = 0; 995 } 996 997 /* Update the cipher_key_sz in the Request cache prepared and stored 998 * in the session */ 999 void 1000 LacSymQat_CipherXTSModeUpdateKeyLen(lac_session_desc_t *pSessionDesc, 1001 Cpa32U newKeySizeInBytes) 1002 { 1003 icp_qat_fw_cipher_cd_ctrl_hdr_t *pCipherControlBlock = NULL; 1004 1005 pCipherControlBlock = (icp_qat_fw_cipher_cd_ctrl_hdr_t *)&( 1006 pSessionDesc->reqCacheFtr.cd_ctrl); 1007 1008 pCipherControlBlock->cipher_key_sz = 1009 LAC_BYTES_TO_QUADWORDS(newKeySizeInBytes); 1010 } 1011