1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0-only) */ 2 /* Copyright(c) 2014 - 2020 Intel Corporation */ 3 #ifndef _ICP_QAT_HW_H_ 4 #define _ICP_QAT_HW_H_ 5 6 #include <linux/bits.h> 7 8 enum icp_qat_hw_ae_id { 9 ICP_QAT_HW_AE_0 = 0, 10 ICP_QAT_HW_AE_1 = 1, 11 ICP_QAT_HW_AE_2 = 2, 12 ICP_QAT_HW_AE_3 = 3, 13 ICP_QAT_HW_AE_4 = 4, 14 ICP_QAT_HW_AE_5 = 5, 15 ICP_QAT_HW_AE_6 = 6, 16 ICP_QAT_HW_AE_7 = 7, 17 ICP_QAT_HW_AE_8 = 8, 18 ICP_QAT_HW_AE_9 = 9, 19 ICP_QAT_HW_AE_10 = 10, 20 ICP_QAT_HW_AE_11 = 11, 21 ICP_QAT_HW_AE_12 = 12, 22 ICP_QAT_HW_AE_13 = 13, 23 ICP_QAT_HW_AE_14 = 14, 24 ICP_QAT_HW_AE_15 = 15, 25 ICP_QAT_HW_AE_16 = 16, 26 ICP_QAT_HW_AE_DELIMITER = 17 27 }; 28 29 enum icp_qat_hw_qat_id { 30 ICP_QAT_HW_QAT_0 = 0, 31 ICP_QAT_HW_QAT_1 = 1, 32 ICP_QAT_HW_QAT_2 = 2, 33 ICP_QAT_HW_QAT_3 = 3, 34 ICP_QAT_HW_QAT_4 = 4, 35 ICP_QAT_HW_QAT_5 = 5, 36 ICP_QAT_HW_QAT_DELIMITER = 6 37 }; 38 39 enum icp_qat_hw_auth_algo { 40 ICP_QAT_HW_AUTH_ALGO_NULL = 0, 41 ICP_QAT_HW_AUTH_ALGO_SHA1 = 1, 42 ICP_QAT_HW_AUTH_ALGO_MD5 = 2, 43 ICP_QAT_HW_AUTH_ALGO_SHA224 = 3, 44 ICP_QAT_HW_AUTH_ALGO_SHA256 = 4, 45 ICP_QAT_HW_AUTH_ALGO_SHA384 = 5, 46 ICP_QAT_HW_AUTH_ALGO_SHA512 = 6, 47 ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC = 7, 48 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC = 8, 49 ICP_QAT_HW_AUTH_ALGO_AES_F9 = 9, 50 ICP_QAT_HW_AUTH_ALGO_GALOIS_128 = 10, 51 ICP_QAT_HW_AUTH_ALGO_GALOIS_64 = 11, 52 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9 = 12, 53 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 = 13, 54 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 = 14, 55 ICP_QAT_HW_AUTH_RESERVED_1 = 15, 56 ICP_QAT_HW_AUTH_RESERVED_2 = 16, 57 ICP_QAT_HW_AUTH_ALGO_SHA3_256 = 17, 58 ICP_QAT_HW_AUTH_RESERVED_3 = 18, 59 ICP_QAT_HW_AUTH_ALGO_SHA3_512 = 19, 60 ICP_QAT_HW_AUTH_ALGO_DELIMITER = 20 61 }; 62 63 enum icp_qat_hw_auth_mode { 64 ICP_QAT_HW_AUTH_MODE0 = 0, 65 ICP_QAT_HW_AUTH_MODE1 = 1, 66 ICP_QAT_HW_AUTH_MODE2 = 2, 67 ICP_QAT_HW_AUTH_MODE_DELIMITER = 3 68 }; 69 70 struct icp_qat_hw_auth_config { 71 __u32 config; 72 __u32 reserved; 73 }; 74 75 struct icp_qat_hw_ucs_cipher_config { 76 __u32 val; 77 __u32 reserved[3]; 78 }; 79 80 enum icp_qat_slice_mask { 81 ICP_ACCEL_MASK_CIPHER_SLICE = BIT(0), 82 ICP_ACCEL_MASK_AUTH_SLICE = BIT(1), 83 ICP_ACCEL_MASK_PKE_SLICE = BIT(2), 84 ICP_ACCEL_MASK_COMPRESS_SLICE = BIT(3), 85 ICP_ACCEL_MASK_LZS_SLICE = BIT(4), 86 ICP_ACCEL_MASK_EIA3_SLICE = BIT(5), 87 ICP_ACCEL_MASK_SHA3_SLICE = BIT(6), 88 }; 89 90 enum icp_qat_capabilities_mask { 91 ICP_ACCEL_CAPABILITIES_CRYPTO_SYMMETRIC = BIT(0), 92 ICP_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC = BIT(1), 93 ICP_ACCEL_CAPABILITIES_CIPHER = BIT(2), 94 ICP_ACCEL_CAPABILITIES_AUTHENTICATION = BIT(3), 95 ICP_ACCEL_CAPABILITIES_RESERVED_1 = BIT(4), 96 ICP_ACCEL_CAPABILITIES_COMPRESSION = BIT(5), 97 /* Bits 6-7 are currently reserved */ 98 ICP_ACCEL_CAPABILITIES_ZUC = BIT(8), 99 ICP_ACCEL_CAPABILITIES_SHA3 = BIT(9), 100 /* Bits 10-11 are currently reserved */ 101 ICP_ACCEL_CAPABILITIES_HKDF = BIT(12), 102 ICP_ACCEL_CAPABILITIES_ECEDMONT = BIT(13), 103 ICP_ACCEL_CAPABILITIES_EXT_ALGCHAIN = BIT(14), 104 ICP_ACCEL_CAPABILITIES_SHA3_EXT = BIT(15), 105 ICP_ACCEL_CAPABILITIES_AESGCM_SPC = BIT(16), 106 ICP_ACCEL_CAPABILITIES_CHACHA_POLY = BIT(17), 107 ICP_ACCEL_CAPABILITIES_SM2 = BIT(18), 108 ICP_ACCEL_CAPABILITIES_SM3 = BIT(19), 109 ICP_ACCEL_CAPABILITIES_SM4 = BIT(20), 110 /* Bit 21 is currently reserved */ 111 ICP_ACCEL_CAPABILITIES_CNV_INTEGRITY = BIT(22), 112 ICP_ACCEL_CAPABILITIES_CNV_INTEGRITY64 = BIT(23), 113 ICP_ACCEL_CAPABILITIES_LZ4_COMPRESSION = BIT(24), 114 ICP_ACCEL_CAPABILITIES_LZ4S_COMPRESSION = BIT(25), 115 ICP_ACCEL_CAPABILITIES_AES_V2 = BIT(26), 116 /* Bits 27-28 are currently reserved */ 117 ICP_ACCEL_CAPABILITIES_ZUC_256 = BIT(29), 118 ICP_ACCEL_CAPABILITIES_WIRELESS_CRYPTO_EXT = BIT(30), 119 }; 120 121 #define QAT_AUTH_MODE_BITPOS 4 122 #define QAT_AUTH_MODE_MASK 0xF 123 #define QAT_AUTH_ALGO_BITPOS 0 124 #define QAT_AUTH_ALGO_MASK 0xF 125 #define QAT_AUTH_CMP_BITPOS 8 126 #define QAT_AUTH_CMP_MASK 0x7F 127 #define QAT_AUTH_SHA3_PADDING_BITPOS 16 128 #define QAT_AUTH_SHA3_PADDING_MASK 0x1 129 #define QAT_AUTH_ALGO_SHA3_BITPOS 22 130 #define QAT_AUTH_ALGO_SHA3_MASK 0x3 131 #define ICP_QAT_HW_AUTH_CONFIG_BUILD(mode, algo, cmp_len) \ 132 (((mode & QAT_AUTH_MODE_MASK) << QAT_AUTH_MODE_BITPOS) | \ 133 ((algo & QAT_AUTH_ALGO_MASK) << QAT_AUTH_ALGO_BITPOS) | \ 134 (((algo >> 4) & QAT_AUTH_ALGO_SHA3_MASK) << \ 135 QAT_AUTH_ALGO_SHA3_BITPOS) | \ 136 (((((algo == ICP_QAT_HW_AUTH_ALGO_SHA3_256) || \ 137 (algo == ICP_QAT_HW_AUTH_ALGO_SHA3_512)) ? 1 : 0) \ 138 & QAT_AUTH_SHA3_PADDING_MASK) << QAT_AUTH_SHA3_PADDING_BITPOS) | \ 139 ((cmp_len & QAT_AUTH_CMP_MASK) << QAT_AUTH_CMP_BITPOS)) 140 141 struct icp_qat_hw_auth_counter { 142 __be32 counter; 143 __u32 reserved; 144 }; 145 146 #define QAT_AUTH_COUNT_MASK 0xFFFFFFFF 147 #define QAT_AUTH_COUNT_BITPOS 0 148 #define ICP_QAT_HW_AUTH_COUNT_BUILD(val) \ 149 (((val) & QAT_AUTH_COUNT_MASK) << QAT_AUTH_COUNT_BITPOS) 150 151 struct icp_qat_hw_auth_setup { 152 struct icp_qat_hw_auth_config auth_config; 153 struct icp_qat_hw_auth_counter auth_counter; 154 }; 155 156 #define QAT_HW_DEFAULT_ALIGNMENT 8 157 #define QAT_HW_ROUND_UP(val, n) (((val) + ((n) - 1)) & (~(n - 1))) 158 #define ICP_QAT_HW_NULL_STATE1_SZ 32 159 #define ICP_QAT_HW_MD5_STATE1_SZ 16 160 #define ICP_QAT_HW_SHA1_STATE1_SZ 20 161 #define ICP_QAT_HW_SHA224_STATE1_SZ 32 162 #define ICP_QAT_HW_SHA256_STATE1_SZ 32 163 #define ICP_QAT_HW_SHA3_256_STATE1_SZ 32 164 #define ICP_QAT_HW_SHA384_STATE1_SZ 64 165 #define ICP_QAT_HW_SHA512_STATE1_SZ 64 166 #define ICP_QAT_HW_SHA3_512_STATE1_SZ 64 167 #define ICP_QAT_HW_SHA3_224_STATE1_SZ 28 168 #define ICP_QAT_HW_SHA3_384_STATE1_SZ 48 169 #define ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ 16 170 #define ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ 16 171 #define ICP_QAT_HW_AES_F9_STATE1_SZ 32 172 #define ICP_QAT_HW_KASUMI_F9_STATE1_SZ 16 173 #define ICP_QAT_HW_GALOIS_128_STATE1_SZ 16 174 #define ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ 8 175 #define ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ 8 176 #define ICP_QAT_HW_NULL_STATE2_SZ 32 177 #define ICP_QAT_HW_MD5_STATE2_SZ 16 178 #define ICP_QAT_HW_SHA1_STATE2_SZ 20 179 #define ICP_QAT_HW_SHA224_STATE2_SZ 32 180 #define ICP_QAT_HW_SHA256_STATE2_SZ 32 181 #define ICP_QAT_HW_SHA3_256_STATE2_SZ 0 182 #define ICP_QAT_HW_SHA384_STATE2_SZ 64 183 #define ICP_QAT_HW_SHA512_STATE2_SZ 64 184 #define ICP_QAT_HW_SHA3_512_STATE2_SZ 0 185 #define ICP_QAT_HW_SHA3_224_STATE2_SZ 0 186 #define ICP_QAT_HW_SHA3_384_STATE2_SZ 0 187 #define ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ 16 188 #define ICP_QAT_HW_AES_CBC_MAC_KEY_SZ 16 189 #define ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ 16 190 #define ICP_QAT_HW_F9_IK_SZ 16 191 #define ICP_QAT_HW_F9_FK_SZ 16 192 #define ICP_QAT_HW_KASUMI_F9_STATE2_SZ (ICP_QAT_HW_F9_IK_SZ + \ 193 ICP_QAT_HW_F9_FK_SZ) 194 #define ICP_QAT_HW_AES_F9_STATE2_SZ ICP_QAT_HW_KASUMI_F9_STATE2_SZ 195 #define ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ 24 196 #define ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ 32 197 #define ICP_QAT_HW_GALOIS_H_SZ 16 198 #define ICP_QAT_HW_GALOIS_LEN_A_SZ 8 199 #define ICP_QAT_HW_GALOIS_E_CTR0_SZ 16 200 201 struct icp_qat_hw_auth_sha512 { 202 struct icp_qat_hw_auth_setup inner_setup; 203 __u8 state1[ICP_QAT_HW_SHA512_STATE1_SZ]; 204 struct icp_qat_hw_auth_setup outer_setup; 205 __u8 state2[ICP_QAT_HW_SHA512_STATE2_SZ]; 206 }; 207 208 struct icp_qat_hw_auth_algo_blk { 209 struct icp_qat_hw_auth_sha512 sha; 210 }; 211 212 #define ICP_QAT_HW_GALOIS_LEN_A_BITPOS 0 213 #define ICP_QAT_HW_GALOIS_LEN_A_MASK 0xFFFFFFFF 214 215 enum icp_qat_hw_cipher_algo { 216 ICP_QAT_HW_CIPHER_ALGO_NULL = 0, 217 ICP_QAT_HW_CIPHER_ALGO_DES = 1, 218 ICP_QAT_HW_CIPHER_ALGO_3DES = 2, 219 ICP_QAT_HW_CIPHER_ALGO_AES128 = 3, 220 ICP_QAT_HW_CIPHER_ALGO_AES192 = 4, 221 ICP_QAT_HW_CIPHER_ALGO_AES256 = 5, 222 ICP_QAT_HW_CIPHER_ALGO_ARC4 = 6, 223 ICP_QAT_HW_CIPHER_ALGO_KASUMI = 7, 224 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 = 8, 225 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3 = 9, 226 ICP_QAT_HW_CIPHER_DELIMITER = 10 227 }; 228 229 enum icp_qat_hw_cipher_mode { 230 ICP_QAT_HW_CIPHER_ECB_MODE = 0, 231 ICP_QAT_HW_CIPHER_CBC_MODE = 1, 232 ICP_QAT_HW_CIPHER_CTR_MODE = 2, 233 ICP_QAT_HW_CIPHER_F8_MODE = 3, 234 ICP_QAT_HW_CIPHER_XTS_MODE = 6, 235 ICP_QAT_HW_CIPHER_MODE_DELIMITER = 7 236 }; 237 238 struct icp_qat_hw_cipher_config { 239 __u32 val; 240 __u32 reserved; 241 }; 242 243 enum icp_qat_hw_cipher_dir { 244 ICP_QAT_HW_CIPHER_ENCRYPT = 0, 245 ICP_QAT_HW_CIPHER_DECRYPT = 1, 246 }; 247 248 enum icp_qat_hw_cipher_convert { 249 ICP_QAT_HW_CIPHER_NO_CONVERT = 0, 250 ICP_QAT_HW_CIPHER_KEY_CONVERT = 1, 251 }; 252 253 #define QAT_CIPHER_MODE_BITPOS 4 254 #define QAT_CIPHER_MODE_MASK 0xF 255 #define QAT_CIPHER_ALGO_BITPOS 0 256 #define QAT_CIPHER_ALGO_MASK 0xF 257 #define QAT_CIPHER_CONVERT_BITPOS 9 258 #define QAT_CIPHER_CONVERT_MASK 0x1 259 #define QAT_CIPHER_DIR_BITPOS 8 260 #define QAT_CIPHER_DIR_MASK 0x1 261 #define QAT_CIPHER_MODE_F8_KEY_SZ_MULT 2 262 #define QAT_CIPHER_MODE_XTS_KEY_SZ_MULT 2 263 #define ICP_QAT_HW_CIPHER_CONFIG_BUILD(mode, algo, convert, dir) \ 264 (((mode & QAT_CIPHER_MODE_MASK) << QAT_CIPHER_MODE_BITPOS) | \ 265 ((algo & QAT_CIPHER_ALGO_MASK) << QAT_CIPHER_ALGO_BITPOS) | \ 266 ((convert & QAT_CIPHER_CONVERT_MASK) << QAT_CIPHER_CONVERT_BITPOS) | \ 267 ((dir & QAT_CIPHER_DIR_MASK) << QAT_CIPHER_DIR_BITPOS)) 268 #define ICP_QAT_HW_DES_BLK_SZ 8 269 #define ICP_QAT_HW_3DES_BLK_SZ 8 270 #define ICP_QAT_HW_NULL_BLK_SZ 8 271 #define ICP_QAT_HW_AES_BLK_SZ 16 272 #define ICP_QAT_HW_KASUMI_BLK_SZ 8 273 #define ICP_QAT_HW_SNOW_3G_BLK_SZ 8 274 #define ICP_QAT_HW_ZUC_3G_BLK_SZ 8 275 #define ICP_QAT_HW_NULL_KEY_SZ 256 276 #define ICP_QAT_HW_DES_KEY_SZ 8 277 #define ICP_QAT_HW_3DES_KEY_SZ 24 278 #define ICP_QAT_HW_AES_128_KEY_SZ 16 279 #define ICP_QAT_HW_AES_192_KEY_SZ 24 280 #define ICP_QAT_HW_AES_256_KEY_SZ 32 281 #define ICP_QAT_HW_AES_128_F8_KEY_SZ (ICP_QAT_HW_AES_128_KEY_SZ * \ 282 QAT_CIPHER_MODE_F8_KEY_SZ_MULT) 283 #define ICP_QAT_HW_AES_192_F8_KEY_SZ (ICP_QAT_HW_AES_192_KEY_SZ * \ 284 QAT_CIPHER_MODE_F8_KEY_SZ_MULT) 285 #define ICP_QAT_HW_AES_256_F8_KEY_SZ (ICP_QAT_HW_AES_256_KEY_SZ * \ 286 QAT_CIPHER_MODE_F8_KEY_SZ_MULT) 287 #define ICP_QAT_HW_AES_128_XTS_KEY_SZ (ICP_QAT_HW_AES_128_KEY_SZ * \ 288 QAT_CIPHER_MODE_XTS_KEY_SZ_MULT) 289 #define ICP_QAT_HW_AES_256_XTS_KEY_SZ (ICP_QAT_HW_AES_256_KEY_SZ * \ 290 QAT_CIPHER_MODE_XTS_KEY_SZ_MULT) 291 #define ICP_QAT_HW_KASUMI_KEY_SZ 16 292 #define ICP_QAT_HW_KASUMI_F8_KEY_SZ (ICP_QAT_HW_KASUMI_KEY_SZ * \ 293 QAT_CIPHER_MODE_F8_KEY_SZ_MULT) 294 #define ICP_QAT_HW_AES_128_XTS_KEY_SZ (ICP_QAT_HW_AES_128_KEY_SZ * \ 295 QAT_CIPHER_MODE_XTS_KEY_SZ_MULT) 296 #define ICP_QAT_HW_AES_256_XTS_KEY_SZ (ICP_QAT_HW_AES_256_KEY_SZ * \ 297 QAT_CIPHER_MODE_XTS_KEY_SZ_MULT) 298 #define ICP_QAT_HW_ARC4_KEY_SZ 256 299 #define ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ 16 300 #define ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ 16 301 #define ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ 16 302 #define ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ 16 303 #define ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR 2 304 #define INIT_SHRAM_CONSTANTS_TABLE_SZ 1024 305 306 struct icp_qat_hw_cipher_aes256_f8 { 307 struct icp_qat_hw_cipher_config cipher_config; 308 __u8 key[ICP_QAT_HW_AES_256_F8_KEY_SZ]; 309 }; 310 311 struct icp_qat_hw_ucs_cipher_aes256_f8 { 312 struct icp_qat_hw_ucs_cipher_config cipher_config; 313 __u8 key[ICP_QAT_HW_AES_256_F8_KEY_SZ]; 314 }; 315 316 struct icp_qat_hw_cipher_algo_blk { 317 union { 318 struct icp_qat_hw_cipher_aes256_f8 aes; 319 struct icp_qat_hw_ucs_cipher_aes256_f8 ucs_aes; 320 }; 321 } __aligned(64); 322 323 enum icp_qat_hw_compression_direction { 324 ICP_QAT_HW_COMPRESSION_DIR_COMPRESS = 0, 325 ICP_QAT_HW_COMPRESSION_DIR_DECOMPRESS = 1, 326 ICP_QAT_HW_COMPRESSION_DIR_DELIMITER = 2 327 }; 328 329 enum icp_qat_hw_compression_delayed_match { 330 ICP_QAT_HW_COMPRESSION_DELAYED_MATCH_DISABLED = 0, 331 ICP_QAT_HW_COMPRESSION_DELAYED_MATCH_ENABLED = 1, 332 ICP_QAT_HW_COMPRESSION_DELAYED_MATCH_DELIMITER = 2 333 }; 334 335 enum icp_qat_hw_compression_algo { 336 ICP_QAT_HW_COMPRESSION_ALGO_DEFLATE = 0, 337 ICP_QAT_HW_COMPRESSION_ALGO_LZS = 1, 338 ICP_QAT_HW_COMPRESSION_ALGO_DELIMITER = 2 339 }; 340 341 enum icp_qat_hw_compression_depth { 342 ICP_QAT_HW_COMPRESSION_DEPTH_1 = 0, 343 ICP_QAT_HW_COMPRESSION_DEPTH_4 = 1, 344 ICP_QAT_HW_COMPRESSION_DEPTH_8 = 2, 345 ICP_QAT_HW_COMPRESSION_DEPTH_16 = 3, 346 ICP_QAT_HW_COMPRESSION_DEPTH_128 = 4, 347 ICP_QAT_HW_COMPRESSION_DEPTH_DELIMITER = 5 348 }; 349 350 enum icp_qat_hw_compression_file_type { 351 ICP_QAT_HW_COMPRESSION_FILE_TYPE_0 = 0, 352 ICP_QAT_HW_COMPRESSION_FILE_TYPE_1 = 1, 353 ICP_QAT_HW_COMPRESSION_FILE_TYPE_2 = 2, 354 ICP_QAT_HW_COMPRESSION_FILE_TYPE_3 = 3, 355 ICP_QAT_HW_COMPRESSION_FILE_TYPE_4 = 4, 356 ICP_QAT_HW_COMPRESSION_FILE_TYPE_DELIMITER = 5 357 }; 358 359 struct icp_qat_hw_compression_config { 360 __u32 lower_val; 361 __u32 upper_val; 362 }; 363 364 #define QAT_COMPRESSION_DIR_BITPOS 4 365 #define QAT_COMPRESSION_DIR_MASK 0x7 366 #define QAT_COMPRESSION_DELAYED_MATCH_BITPOS 16 367 #define QAT_COMPRESSION_DELAYED_MATCH_MASK 0x1 368 #define QAT_COMPRESSION_ALGO_BITPOS 31 369 #define QAT_COMPRESSION_ALGO_MASK 0x1 370 #define QAT_COMPRESSION_DEPTH_BITPOS 28 371 #define QAT_COMPRESSION_DEPTH_MASK 0x7 372 #define QAT_COMPRESSION_FILE_TYPE_BITPOS 24 373 #define QAT_COMPRESSION_FILE_TYPE_MASK 0xF 374 375 #define ICP_QAT_HW_COMPRESSION_CONFIG_BUILD(dir, delayed, \ 376 algo, depth, filetype) \ 377 ((((dir) & QAT_COMPRESSION_DIR_MASK) << \ 378 QAT_COMPRESSION_DIR_BITPOS) | \ 379 (((delayed) & QAT_COMPRESSION_DELAYED_MATCH_MASK) << \ 380 QAT_COMPRESSION_DELAYED_MATCH_BITPOS) | \ 381 (((algo) & QAT_COMPRESSION_ALGO_MASK) << \ 382 QAT_COMPRESSION_ALGO_BITPOS) | \ 383 (((depth) & QAT_COMPRESSION_DEPTH_MASK) << \ 384 QAT_COMPRESSION_DEPTH_BITPOS) | \ 385 (((filetype) & QAT_COMPRESSION_FILE_TYPE_MASK) << \ 386 QAT_COMPRESSION_FILE_TYPE_BITPOS)) 387 388 #endif 389