1091d81d1SSam Leffler /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */ 2091d81d1SSam Leffler 360727d8bSWarner Losh /*- 4091d81d1SSam Leffler * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu) 56810ad6fSSam Leffler * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting 6091d81d1SSam Leffler * 7091d81d1SSam Leffler * This code was written by Angelos D. Keromytis in Athens, Greece, in 8091d81d1SSam Leffler * February 2000. Network Security Technologies Inc. (NSTI) kindly 9091d81d1SSam Leffler * supported the development of this code. 10091d81d1SSam Leffler * 11091d81d1SSam Leffler * Copyright (c) 2000, 2001 Angelos D. Keromytis 1208fca7a5SJohn-Mark Gurney * Copyright (c) 2014 The FreeBSD Foundation 1308fca7a5SJohn-Mark Gurney * All rights reserved. 1408fca7a5SJohn-Mark Gurney * 1508fca7a5SJohn-Mark Gurney * Portions of this software were developed by John-Mark Gurney 1608fca7a5SJohn-Mark Gurney * under sponsorship of the FreeBSD Foundation and 1708fca7a5SJohn-Mark Gurney * Rubicon Communications, LLC (Netgate). 18091d81d1SSam Leffler * 19091d81d1SSam Leffler * Permission to use, copy, and modify this software with or without fee 20091d81d1SSam Leffler * is hereby granted, provided that this entire notice is included in 21091d81d1SSam Leffler * all source code copies of any software which is or includes a copy or 22091d81d1SSam Leffler * modification of this software. 23091d81d1SSam Leffler * 24091d81d1SSam Leffler * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR 25091d81d1SSam Leffler * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY 26091d81d1SSam Leffler * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE 27091d81d1SSam Leffler * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR 28091d81d1SSam Leffler * PURPOSE. 29091d81d1SSam Leffler */ 30091d81d1SSam Leffler 312c446514SDavid E. O'Brien #include <sys/cdefs.h> 322c446514SDavid E. O'Brien __FBSDID("$FreeBSD$"); 332c446514SDavid E. O'Brien 34091d81d1SSam Leffler #include <sys/param.h> 35091d81d1SSam Leffler #include <sys/systm.h> 36091d81d1SSam Leffler #include <sys/malloc.h> 37091d81d1SSam Leffler #include <sys/mbuf.h> 386810ad6fSSam Leffler #include <sys/module.h> 39091d81d1SSam Leffler #include <sys/sysctl.h> 40091d81d1SSam Leffler #include <sys/errno.h> 41091d81d1SSam Leffler #include <sys/random.h> 42091d81d1SSam Leffler #include <sys/kernel.h> 43091d81d1SSam Leffler #include <sys/uio.h> 44109919c6SBenno Rice #include <sys/lock.h> 45109919c6SBenno Rice #include <sys/rwlock.h> 4608fca7a5SJohn-Mark Gurney #include <sys/endian.h> 4708fca7a5SJohn-Mark Gurney #include <sys/limits.h> 48a7fcb1afSSean Eric Fagan #include <sys/mutex.h> 49091d81d1SSam Leffler 50091d81d1SSam Leffler #include <crypto/sha1.h> 51091d81d1SSam Leffler #include <opencrypto/rmd160.h> 52091d81d1SSam Leffler 53091d81d1SSam Leffler #include <opencrypto/cryptodev.h> 54091d81d1SSam Leffler #include <opencrypto/xform.h> 55091d81d1SSam Leffler 566810ad6fSSam Leffler #include <sys/kobj.h> 576810ad6fSSam Leffler #include <sys/bus.h> 586810ad6fSSam Leffler #include "cryptodev_if.h" 59091d81d1SSam Leffler 60c0341432SJohn Baldwin struct swcr_auth { 61c0341432SJohn Baldwin void *sw_ictx; 62c0341432SJohn Baldwin void *sw_octx; 63c0341432SJohn Baldwin struct auth_hash *sw_axf; 64c0341432SJohn Baldwin uint16_t sw_mlen; 65c0341432SJohn Baldwin }; 66c0341432SJohn Baldwin 67c0341432SJohn Baldwin struct swcr_encdec { 683e947048SJohn Baldwin void *sw_kschedule; 69c0341432SJohn Baldwin struct enc_xform *sw_exf; 70c0341432SJohn Baldwin }; 71c0341432SJohn Baldwin 72c0341432SJohn Baldwin struct swcr_compdec { 73c0341432SJohn Baldwin struct comp_algo *sw_cxf; 74c0341432SJohn Baldwin }; 75c0341432SJohn Baldwin 76c0341432SJohn Baldwin struct swcr_session { 77c0341432SJohn Baldwin struct mtx swcr_lock; 78c0341432SJohn Baldwin int (*swcr_process)(struct swcr_session *, struct cryptop *); 79c0341432SJohn Baldwin 80c0341432SJohn Baldwin struct swcr_auth swcr_auth; 81c0341432SJohn Baldwin struct swcr_encdec swcr_encdec; 82c0341432SJohn Baldwin struct swcr_compdec swcr_compdec; 83c0341432SJohn Baldwin }; 84507281e5SSean Eric Fagan 856810ad6fSSam Leffler static int32_t swcr_id; 866810ad6fSSam Leffler 871b0909d5SConrad Meyer static void swcr_freesession(device_t dev, crypto_session_t cses); 88091d81d1SSam Leffler 89c0341432SJohn Baldwin /* Used for CRYPTO_NULL_CBC. */ 90c0341432SJohn Baldwin static int 91c0341432SJohn Baldwin swcr_null(struct swcr_session *ses, struct cryptop *crp) 92c0341432SJohn Baldwin { 93c0341432SJohn Baldwin 94c0341432SJohn Baldwin return (0); 95c0341432SJohn Baldwin } 96c0341432SJohn Baldwin 97091d81d1SSam Leffler /* 98091d81d1SSam Leffler * Apply a symmetric encryption/decryption algorithm. 99091d81d1SSam Leffler */ 100091d81d1SSam Leffler static int 101c0341432SJohn Baldwin swcr_encdec(struct swcr_session *ses, struct cryptop *crp) 102091d81d1SSam Leffler { 1035d7ae54aSConrad Meyer unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN]; 10408fca7a5SJohn-Mark Gurney unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN]; 105c0341432SJohn Baldwin const struct crypto_session_params *csp; 106c0341432SJohn Baldwin struct swcr_encdec *sw; 107091d81d1SSam Leffler struct enc_xform *exf; 1089c0e3d3aSJohn Baldwin int i, blks, inlen, ivlen, outlen, resid; 1099c0e3d3aSJohn Baldwin struct crypto_buffer_cursor cc_in, cc_out; 11026d292d3SJohn Baldwin const unsigned char *inblk; 11126d292d3SJohn Baldwin unsigned char *outblk; 11208fca7a5SJohn-Mark Gurney int error; 113c0341432SJohn Baldwin bool encrypting; 11408fca7a5SJohn-Mark Gurney 11508fca7a5SJohn-Mark Gurney error = 0; 116091d81d1SSam Leffler 117c0341432SJohn Baldwin sw = &ses->swcr_encdec; 118091d81d1SSam Leffler exf = sw->sw_exf; 11908fca7a5SJohn-Mark Gurney ivlen = exf->ivsize; 120091d81d1SSam Leffler 121723d8764SJohn Baldwin if (exf->native_blocksize == 0) { 122091d81d1SSam Leffler /* Check for non-padded data */ 123723d8764SJohn Baldwin if ((crp->crp_payload_length % exf->blocksize) != 0) 124723d8764SJohn Baldwin return (EINVAL); 125723d8764SJohn Baldwin 126723d8764SJohn Baldwin blks = exf->blocksize; 127723d8764SJohn Baldwin } else 128723d8764SJohn Baldwin blks = exf->native_blocksize; 129091d81d1SSam Leffler 130c0341432SJohn Baldwin if (exf == &enc_xform_aes_icm && 131c0341432SJohn Baldwin (crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0) 13208fca7a5SJohn-Mark Gurney return (EINVAL); 13308fca7a5SJohn-Mark Gurney 134c0341432SJohn Baldwin if (crp->crp_cipher_key != NULL) { 135c0341432SJohn Baldwin csp = crypto_get_params(crp->crp_session); 1363e947048SJohn Baldwin error = exf->setkey(sw->sw_kschedule, 137c0341432SJohn Baldwin crp->crp_cipher_key, csp->csp_cipher_klen); 138c740ae4bSPoul-Henning Kamp if (error) 139c740ae4bSPoul-Henning Kamp return (error); 140c740ae4bSPoul-Henning Kamp } 141d295bdeeSPawel Jakub Dawidek 14220c128daSJohn Baldwin crypto_read_iv(crp, iv); 14320c128daSJohn Baldwin 14408fca7a5SJohn-Mark Gurney if (exf->reinit) { 145d295bdeeSPawel Jakub Dawidek /* 146d295bdeeSPawel Jakub Dawidek * xforms that provide a reinit method perform all IV 147d295bdeeSPawel Jakub Dawidek * handling themselves. 148d295bdeeSPawel Jakub Dawidek */ 149d295bdeeSPawel Jakub Dawidek exf->reinit(sw->sw_kschedule, iv); 150091d81d1SSam Leffler } 151091d81d1SSam Leffler 1529c0e3d3aSJohn Baldwin ivp = iv; 153091d81d1SSam Leffler 1549c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 1559c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 1569c0e3d3aSJohn Baldwin inlen = crypto_cursor_seglen(&cc_in); 1579c0e3d3aSJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 1589c0e3d3aSJohn Baldwin if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) { 1599c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_out, &crp->crp_obuf); 1609c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_out, crp->crp_payload_output_start); 1619c0e3d3aSJohn Baldwin } else 1629c0e3d3aSJohn Baldwin cc_out = cc_in; 1639c0e3d3aSJohn Baldwin outlen = crypto_cursor_seglen(&cc_out); 1649c0e3d3aSJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 1659c0e3d3aSJohn Baldwin 1669c0e3d3aSJohn Baldwin resid = crp->crp_payload_length; 167c0341432SJohn Baldwin encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op); 168091d81d1SSam Leffler 169091d81d1SSam Leffler /* 1709c0e3d3aSJohn Baldwin * Loop through encrypting blocks. 'inlen' is the remaining 1719c0e3d3aSJohn Baldwin * length of the current segment in the input buffer. 1729c0e3d3aSJohn Baldwin * 'outlen' is the remaining length of current segment in the 1739c0e3d3aSJohn Baldwin * output buffer. 174091d81d1SSam Leffler */ 1759c0e3d3aSJohn Baldwin while (resid >= blks) { 1769c0e3d3aSJohn Baldwin /* 1779c0e3d3aSJohn Baldwin * If the current block is not contained within the 1789c0e3d3aSJohn Baldwin * current input/output segment, use 'blk' as a local 1799c0e3d3aSJohn Baldwin * buffer. 1809c0e3d3aSJohn Baldwin */ 1819c0e3d3aSJohn Baldwin if (inlen < blks) { 1829c0e3d3aSJohn Baldwin crypto_cursor_copydata(&cc_in, blks, blk); 1839c0e3d3aSJohn Baldwin inblk = blk; 184d295bdeeSPawel Jakub Dawidek } 1859c0e3d3aSJohn Baldwin if (outlen < blks) 1869c0e3d3aSJohn Baldwin outblk = blk; 1879c0e3d3aSJohn Baldwin 1889c0e3d3aSJohn Baldwin /* 1899c0e3d3aSJohn Baldwin * Ciphers without a 'reinit' hook are assumed to be 1909c0e3d3aSJohn Baldwin * used in CBC mode where the chaining is done here. 1919c0e3d3aSJohn Baldwin */ 1929c0e3d3aSJohn Baldwin if (exf->reinit != NULL) { 1939c0e3d3aSJohn Baldwin if (encrypting) 1949c0e3d3aSJohn Baldwin exf->encrypt(sw->sw_kschedule, inblk, outblk); 1959c0e3d3aSJohn Baldwin else 1969c0e3d3aSJohn Baldwin exf->decrypt(sw->sw_kschedule, inblk, outblk); 197c0341432SJohn Baldwin } else if (encrypting) { 198091d81d1SSam Leffler /* XOR with previous block */ 1999c0e3d3aSJohn Baldwin for (i = 0; i < blks; i++) 2009c0e3d3aSJohn Baldwin outblk[i] = inblk[i] ^ ivp[i]; 201091d81d1SSam Leffler 2029c0e3d3aSJohn Baldwin exf->encrypt(sw->sw_kschedule, outblk, outblk); 203091d81d1SSam Leffler 204091d81d1SSam Leffler /* 205091d81d1SSam Leffler * Keep encrypted block for XOR'ing 206091d81d1SSam Leffler * with next block 207091d81d1SSam Leffler */ 2089c0e3d3aSJohn Baldwin memcpy(iv, outblk, blks); 209091d81d1SSam Leffler ivp = iv; 210091d81d1SSam Leffler } else { /* decrypt */ 211091d81d1SSam Leffler /* 212091d81d1SSam Leffler * Keep encrypted block for XOR'ing 213091d81d1SSam Leffler * with next block 214091d81d1SSam Leffler */ 21508fca7a5SJohn-Mark Gurney nivp = (ivp == iv) ? iv2 : iv; 2169c0e3d3aSJohn Baldwin memcpy(nivp, inblk, blks); 217091d81d1SSam Leffler 2189c0e3d3aSJohn Baldwin exf->decrypt(sw->sw_kschedule, inblk, outblk); 219091d81d1SSam Leffler 220091d81d1SSam Leffler /* XOR with previous block */ 2219c0e3d3aSJohn Baldwin for (i = 0; i < blks; i++) 2229c0e3d3aSJohn Baldwin outblk[i] ^= ivp[i]; 223091d81d1SSam Leffler 22408fca7a5SJohn-Mark Gurney ivp = nivp; 225091d81d1SSam Leffler } 226091d81d1SSam Leffler 2279c0e3d3aSJohn Baldwin if (inlen < blks) { 2289c0e3d3aSJohn Baldwin inlen = crypto_cursor_seglen(&cc_in); 2299c0e3d3aSJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 2309c0e3d3aSJohn Baldwin } else { 2319c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, blks); 2329c0e3d3aSJohn Baldwin inlen -= blks; 2339c0e3d3aSJohn Baldwin inblk += blks; 23408fca7a5SJohn-Mark Gurney } 235091d81d1SSam Leffler 2369c0e3d3aSJohn Baldwin if (outlen < blks) { 2379c0e3d3aSJohn Baldwin crypto_cursor_copyback(&cc_out, blks, blk); 2389c0e3d3aSJohn Baldwin outlen = crypto_cursor_seglen(&cc_out); 2399c0e3d3aSJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 2409c0e3d3aSJohn Baldwin } else { 2419c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_out, blks); 2429c0e3d3aSJohn Baldwin outlen -= blks; 2439c0e3d3aSJohn Baldwin outblk += blks; 244091d81d1SSam Leffler } 245091d81d1SSam Leffler 2469c0e3d3aSJohn Baldwin resid -= blks; 247f34a967bSPawel Jakub Dawidek } 248f34a967bSPawel Jakub Dawidek 249723d8764SJohn Baldwin /* Handle trailing partial block for stream ciphers. */ 2509c0e3d3aSJohn Baldwin if (resid > 0) { 251723d8764SJohn Baldwin KASSERT(exf->native_blocksize != 0, 252723d8764SJohn Baldwin ("%s: partial block of %d bytes for cipher %s", 253723d8764SJohn Baldwin __func__, i, exf->name)); 254723d8764SJohn Baldwin KASSERT(exf->reinit != NULL, 255723d8764SJohn Baldwin ("%s: partial block cipher %s without reinit hook", 256723d8764SJohn Baldwin __func__, exf->name)); 2579c0e3d3aSJohn Baldwin KASSERT(resid < blks, ("%s: partial block too big", __func__)); 258723d8764SJohn Baldwin 2599c0e3d3aSJohn Baldwin inlen = crypto_cursor_seglen(&cc_in); 2609c0e3d3aSJohn Baldwin outlen = crypto_cursor_seglen(&cc_out); 2619c0e3d3aSJohn Baldwin if (inlen < resid) { 2629c0e3d3aSJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 2639c0e3d3aSJohn Baldwin inblk = blk; 2649c0e3d3aSJohn Baldwin } else 2659c0e3d3aSJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 2669c0e3d3aSJohn Baldwin if (outlen < resid) 2679c0e3d3aSJohn Baldwin outblk = blk; 2689c0e3d3aSJohn Baldwin else 2699c0e3d3aSJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 2709c0e3d3aSJohn Baldwin if (encrypting) 2719c0e3d3aSJohn Baldwin exf->encrypt_last(sw->sw_kschedule, inblk, outblk, 2729c0e3d3aSJohn Baldwin resid); 2739c0e3d3aSJohn Baldwin else 2749c0e3d3aSJohn Baldwin exf->decrypt_last(sw->sw_kschedule, inblk, outblk, 2759c0e3d3aSJohn Baldwin resid); 2769c0e3d3aSJohn Baldwin if (outlen < resid) 2779c0e3d3aSJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 278723d8764SJohn Baldwin } 279723d8764SJohn Baldwin 28020c128daSJohn Baldwin explicit_bzero(blk, sizeof(blk)); 28120c128daSJohn Baldwin explicit_bzero(iv, sizeof(iv)); 28220c128daSJohn Baldwin explicit_bzero(iv2, sizeof(iv2)); 2839c0e3d3aSJohn Baldwin return (0); 284091d81d1SSam Leffler } 285091d81d1SSam Leffler 286c0341432SJohn Baldwin static void 287c0341432SJohn Baldwin swcr_authprepare(struct auth_hash *axf, struct swcr_auth *sw, 288c0341432SJohn Baldwin const uint8_t *key, int klen) 289f6c4bc3bSPawel Jakub Dawidek { 290f6c4bc3bSPawel Jakub Dawidek 291f6c4bc3bSPawel Jakub Dawidek switch (axf->type) { 292f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA1_HMAC: 293c97f39ceSConrad Meyer case CRYPTO_SHA2_224_HMAC: 294f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_256_HMAC: 295f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_384_HMAC: 296f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_512_HMAC: 297f6c4bc3bSPawel Jakub Dawidek case CRYPTO_NULL_HMAC: 298f6c4bc3bSPawel Jakub Dawidek case CRYPTO_RIPEMD160_HMAC: 299c0341432SJohn Baldwin hmac_init_ipad(axf, key, klen, sw->sw_ictx); 300c0341432SJohn Baldwin hmac_init_opad(axf, key, klen, sw->sw_octx); 301f6c4bc3bSPawel Jakub Dawidek break; 30225b7033bSConrad Meyer case CRYPTO_POLY1305: 3030e33efe4SConrad Meyer case CRYPTO_BLAKE2B: 3040e33efe4SConrad Meyer case CRYPTO_BLAKE2S: 3050e33efe4SConrad Meyer axf->Setkey(sw->sw_ictx, key, klen); 3060e33efe4SConrad Meyer axf->Init(sw->sw_ictx); 3070e33efe4SConrad Meyer break; 308f6c4bc3bSPawel Jakub Dawidek default: 309c0341432SJohn Baldwin panic("%s: algorithm %d doesn't use keys", __func__, axf->type); 310f6c4bc3bSPawel Jakub Dawidek } 311f6c4bc3bSPawel Jakub Dawidek } 312f6c4bc3bSPawel Jakub Dawidek 313091d81d1SSam Leffler /* 314c0341432SJohn Baldwin * Compute or verify hash. 315091d81d1SSam Leffler */ 316091d81d1SSam Leffler static int 317c0341432SJohn Baldwin swcr_authcompute(struct swcr_session *ses, struct cryptop *crp) 318091d81d1SSam Leffler { 319c0341432SJohn Baldwin u_char aalg[HASH_MAX_LEN]; 320c0341432SJohn Baldwin const struct crypto_session_params *csp; 321c0341432SJohn Baldwin struct swcr_auth *sw; 322091d81d1SSam Leffler struct auth_hash *axf; 323091d81d1SSam Leffler union authctx ctx; 324091d81d1SSam Leffler int err; 325091d81d1SSam Leffler 326c0341432SJohn Baldwin sw = &ses->swcr_auth; 327091d81d1SSam Leffler 328091d81d1SSam Leffler axf = sw->sw_axf; 329091d81d1SSam Leffler 330c0341432SJohn Baldwin if (crp->crp_auth_key != NULL) { 331c0341432SJohn Baldwin csp = crypto_get_params(crp->crp_session); 332c0341432SJohn Baldwin swcr_authprepare(axf, sw, crp->crp_auth_key, 333c0341432SJohn Baldwin csp->csp_auth_klen); 33425b7033bSConrad Meyer } 335f6c4bc3bSPawel Jakub Dawidek 336091d81d1SSam Leffler bcopy(sw->sw_ictx, &ctx, axf->ctxsize); 337091d81d1SSam Leffler 3389b774dc0SJohn Baldwin if (crp->crp_aad != NULL) 3399b774dc0SJohn Baldwin err = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length); 3409b774dc0SJohn Baldwin else 341c0341432SJohn Baldwin err = crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length, 3429b6b2f86SJohn Baldwin axf->Update, &ctx); 343091d81d1SSam Leffler if (err) 344091d81d1SSam Leffler return err; 345091d81d1SSam Leffler 3469c0e3d3aSJohn Baldwin if (CRYPTO_HAS_OUTPUT_BUFFER(crp) && 3479c0e3d3aSJohn Baldwin CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) 3489c0e3d3aSJohn Baldwin err = crypto_apply_buf(&crp->crp_obuf, 3499c0e3d3aSJohn Baldwin crp->crp_payload_output_start, crp->crp_payload_length, 3509b6b2f86SJohn Baldwin axf->Update, &ctx); 3519c0e3d3aSJohn Baldwin else 3529c0e3d3aSJohn Baldwin err = crypto_apply(crp, crp->crp_payload_start, 3539b6b2f86SJohn Baldwin crp->crp_payload_length, axf->Update, &ctx); 354c0341432SJohn Baldwin if (err) 355c0341432SJohn Baldwin return err; 356c0341432SJohn Baldwin 357c0341432SJohn Baldwin switch (axf->type) { 358c4729f6eSConrad Meyer case CRYPTO_SHA1: 359c4729f6eSConrad Meyer case CRYPTO_SHA2_224: 360c4729f6eSConrad Meyer case CRYPTO_SHA2_256: 361c4729f6eSConrad Meyer case CRYPTO_SHA2_384: 362c4729f6eSConrad Meyer case CRYPTO_SHA2_512: 363c4729f6eSConrad Meyer axf->Final(aalg, &ctx); 364c4729f6eSConrad Meyer break; 365c4729f6eSConrad Meyer 366091d81d1SSam Leffler case CRYPTO_SHA1_HMAC: 367c97f39ceSConrad Meyer case CRYPTO_SHA2_224_HMAC: 368f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_256_HMAC: 369f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_384_HMAC: 370f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_512_HMAC: 371091d81d1SSam Leffler case CRYPTO_RIPEMD160_HMAC: 372091d81d1SSam Leffler if (sw->sw_octx == NULL) 373091d81d1SSam Leffler return EINVAL; 374091d81d1SSam Leffler 375091d81d1SSam Leffler axf->Final(aalg, &ctx); 376091d81d1SSam Leffler bcopy(sw->sw_octx, &ctx, axf->ctxsize); 377091d81d1SSam Leffler axf->Update(&ctx, aalg, axf->hashsize); 378091d81d1SSam Leffler axf->Final(aalg, &ctx); 379091d81d1SSam Leffler break; 380091d81d1SSam Leffler 3810e33efe4SConrad Meyer case CRYPTO_BLAKE2B: 3820e33efe4SConrad Meyer case CRYPTO_BLAKE2S: 383091d81d1SSam Leffler case CRYPTO_NULL_HMAC: 38425b7033bSConrad Meyer case CRYPTO_POLY1305: 385091d81d1SSam Leffler axf->Final(aalg, &ctx); 386091d81d1SSam Leffler break; 387091d81d1SSam Leffler } 388091d81d1SSam Leffler 389c0341432SJohn Baldwin if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) { 39020c128daSJohn Baldwin u_char uaalg[HASH_MAX_LEN]; 39120c128daSJohn Baldwin 392c0341432SJohn Baldwin crypto_copydata(crp, crp->crp_digest_start, sw->sw_mlen, uaalg); 393c0341432SJohn Baldwin if (timingsafe_bcmp(aalg, uaalg, sw->sw_mlen) != 0) 39420c128daSJohn Baldwin err = EBADMSG; 39520c128daSJohn Baldwin explicit_bzero(uaalg, sizeof(uaalg)); 396c0341432SJohn Baldwin } else { 397091d81d1SSam Leffler /* Inject the authentication data */ 398c0341432SJohn Baldwin crypto_copyback(crp, crp->crp_digest_start, sw->sw_mlen, aalg); 399c0341432SJohn Baldwin } 40020c128daSJohn Baldwin explicit_bzero(aalg, sizeof(aalg)); 40120c128daSJohn Baldwin return (err); 402091d81d1SSam Leffler } 403091d81d1SSam Leffler 40408fca7a5SJohn-Mark Gurney CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */ 40508fca7a5SJohn-Mark Gurney CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */ 40608fca7a5SJohn-Mark Gurney 40708fca7a5SJohn-Mark Gurney static int 408c0341432SJohn Baldwin swcr_gmac(struct swcr_session *ses, struct cryptop *crp) 40908fca7a5SJohn-Mark Gurney { 41026d292d3SJohn Baldwin uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))]; 41108fca7a5SJohn-Mark Gurney u_char *blk = (u_char *)blkbuf; 41226d292d3SJohn Baldwin u_char tag[GMAC_DIGEST_LEN]; 41326d292d3SJohn Baldwin u_char iv[AES_BLOCK_LEN]; 4149c0e3d3aSJohn Baldwin struct crypto_buffer_cursor cc; 41526d292d3SJohn Baldwin const u_char *inblk; 41608fca7a5SJohn-Mark Gurney union authctx ctx; 417c0341432SJohn Baldwin struct swcr_auth *swa; 418c0341432SJohn Baldwin struct auth_hash *axf; 41908fca7a5SJohn-Mark Gurney uint32_t *blkp; 42020c128daSJohn Baldwin int blksz, error, ivlen, len, resid; 42108fca7a5SJohn-Mark Gurney 422c0341432SJohn Baldwin swa = &ses->swcr_auth; 42308fca7a5SJohn-Mark Gurney axf = swa->sw_axf; 424c0341432SJohn Baldwin 42508fca7a5SJohn-Mark Gurney bcopy(swa->sw_ictx, &ctx, axf->ctxsize); 42626d292d3SJohn Baldwin blksz = GMAC_BLOCK_LEN; 42726d292d3SJohn Baldwin KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch", 42826d292d3SJohn Baldwin __func__)); 42908fca7a5SJohn-Mark Gurney 43008fca7a5SJohn-Mark Gurney /* Initialize the IV */ 431c0341432SJohn Baldwin ivlen = AES_GCM_IV_LEN; 43229fe41ddSJohn Baldwin crypto_read_iv(crp, iv); 43308fca7a5SJohn-Mark Gurney 43408fca7a5SJohn-Mark Gurney axf->Reinit(&ctx, iv, ivlen); 4359c0e3d3aSJohn Baldwin crypto_cursor_init(&cc, &crp->crp_buf); 4369c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc, crp->crp_payload_start); 43726d292d3SJohn Baldwin for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) { 43826d292d3SJohn Baldwin len = crypto_cursor_seglen(&cc); 43926d292d3SJohn Baldwin if (len >= blksz) { 44026d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc); 44126d292d3SJohn Baldwin len = rounddown(MIN(len, resid), blksz); 44226d292d3SJohn Baldwin crypto_cursor_advance(&cc, len); 44326d292d3SJohn Baldwin } else { 44426d292d3SJohn Baldwin len = blksz; 4459c0e3d3aSJohn Baldwin crypto_cursor_copydata(&cc, len, blk); 44626d292d3SJohn Baldwin inblk = blk; 44726d292d3SJohn Baldwin } 44826d292d3SJohn Baldwin axf->Update(&ctx, inblk, len); 44926d292d3SJohn Baldwin } 45026d292d3SJohn Baldwin if (resid > 0) { 45126d292d3SJohn Baldwin memset(blk, 0, blksz); 45226d292d3SJohn Baldwin crypto_cursor_copydata(&cc, resid, blk); 45308fca7a5SJohn-Mark Gurney axf->Update(&ctx, blk, blksz); 45408fca7a5SJohn-Mark Gurney } 45508fca7a5SJohn-Mark Gurney 45608fca7a5SJohn-Mark Gurney /* length block */ 45726d292d3SJohn Baldwin memset(blk, 0, blksz); 45808fca7a5SJohn-Mark Gurney blkp = (uint32_t *)blk + 1; 459c0341432SJohn Baldwin *blkp = htobe32(crp->crp_payload_length * 8); 46008fca7a5SJohn-Mark Gurney axf->Update(&ctx, blk, blksz); 461c0341432SJohn Baldwin 462c0341432SJohn Baldwin /* Finalize MAC */ 46326d292d3SJohn Baldwin axf->Final(tag, &ctx); 464c0341432SJohn Baldwin 46520c128daSJohn Baldwin error = 0; 466c0341432SJohn Baldwin if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) { 46726d292d3SJohn Baldwin u_char tag2[GMAC_DIGEST_LEN]; 46820c128daSJohn Baldwin 469c0341432SJohn Baldwin crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, 47026d292d3SJohn Baldwin tag2); 47126d292d3SJohn Baldwin if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0) 47220c128daSJohn Baldwin error = EBADMSG; 47326d292d3SJohn Baldwin explicit_bzero(tag2, sizeof(tag2)); 474c0341432SJohn Baldwin } else { 475c0341432SJohn Baldwin /* Inject the authentication data */ 47626d292d3SJohn Baldwin crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag); 477c0341432SJohn Baldwin } 47820c128daSJohn Baldwin explicit_bzero(blkbuf, sizeof(blkbuf)); 47926d292d3SJohn Baldwin explicit_bzero(tag, sizeof(tag)); 48020c128daSJohn Baldwin explicit_bzero(iv, sizeof(iv)); 48120c128daSJohn Baldwin return (error); 482c0341432SJohn Baldwin } 483c0341432SJohn Baldwin 484c0341432SJohn Baldwin static int 485c0341432SJohn Baldwin swcr_gcm(struct swcr_session *ses, struct cryptop *crp) 486c0341432SJohn Baldwin { 48726d292d3SJohn Baldwin uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))]; 488c0341432SJohn Baldwin u_char *blk = (u_char *)blkbuf; 48926d292d3SJohn Baldwin u_char tag[GMAC_DIGEST_LEN]; 49026d292d3SJohn Baldwin u_char iv[AES_BLOCK_LEN]; 4919c0e3d3aSJohn Baldwin struct crypto_buffer_cursor cc_in, cc_out; 49226d292d3SJohn Baldwin const u_char *inblk; 49326d292d3SJohn Baldwin u_char *outblk; 494c0341432SJohn Baldwin union authctx ctx; 495c0341432SJohn Baldwin struct swcr_auth *swa; 496c0341432SJohn Baldwin struct swcr_encdec *swe; 497c0341432SJohn Baldwin struct auth_hash *axf; 498c0341432SJohn Baldwin struct enc_xform *exf; 499c0341432SJohn Baldwin uint32_t *blkp; 50020c128daSJohn Baldwin int blksz, error, ivlen, len, r, resid; 501c0341432SJohn Baldwin 502c0341432SJohn Baldwin swa = &ses->swcr_auth; 503c0341432SJohn Baldwin axf = swa->sw_axf; 504c0341432SJohn Baldwin 505c0341432SJohn Baldwin bcopy(swa->sw_ictx, &ctx, axf->ctxsize); 50626d292d3SJohn Baldwin blksz = GMAC_BLOCK_LEN; 50726d292d3SJohn Baldwin KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch", 50826d292d3SJohn Baldwin __func__)); 509c0341432SJohn Baldwin 510c0341432SJohn Baldwin swe = &ses->swcr_encdec; 511c0341432SJohn Baldwin exf = swe->sw_exf; 512723d8764SJohn Baldwin KASSERT(axf->blocksize == exf->native_blocksize, 513723d8764SJohn Baldwin ("%s: blocksize mismatch", __func__)); 514c0341432SJohn Baldwin 515c0341432SJohn Baldwin if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0) 516c0341432SJohn Baldwin return (EINVAL); 517c0341432SJohn Baldwin 518c0341432SJohn Baldwin /* Initialize the IV */ 519c0341432SJohn Baldwin ivlen = AES_GCM_IV_LEN; 520c0341432SJohn Baldwin bcopy(crp->crp_iv, iv, ivlen); 521c0341432SJohn Baldwin 522c0341432SJohn Baldwin /* Supply MAC with IV */ 523c0341432SJohn Baldwin axf->Reinit(&ctx, iv, ivlen); 524c0341432SJohn Baldwin 525c0341432SJohn Baldwin /* Supply MAC with AAD */ 5269b774dc0SJohn Baldwin if (crp->crp_aad != NULL) { 5279b774dc0SJohn Baldwin len = rounddown(crp->crp_aad_length, blksz); 5289b774dc0SJohn Baldwin if (len != 0) 5299b774dc0SJohn Baldwin axf->Update(&ctx, crp->crp_aad, len); 5309b774dc0SJohn Baldwin if (crp->crp_aad_length != len) { 5319b774dc0SJohn Baldwin memset(blk, 0, blksz); 5329b774dc0SJohn Baldwin memcpy(blk, (char *)crp->crp_aad + len, 5339b774dc0SJohn Baldwin crp->crp_aad_length - len); 5349b774dc0SJohn Baldwin axf->Update(&ctx, blk, blksz); 5359b774dc0SJohn Baldwin } 5369b774dc0SJohn Baldwin } else { 5379c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 5389c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_aad_start); 5399b774dc0SJohn Baldwin for (resid = crp->crp_aad_length; resid >= blksz; 5409b774dc0SJohn Baldwin resid -= len) { 54126d292d3SJohn Baldwin len = crypto_cursor_seglen(&cc_in); 54226d292d3SJohn Baldwin if (len >= blksz) { 54326d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 54426d292d3SJohn Baldwin len = rounddown(MIN(len, resid), blksz); 54526d292d3SJohn Baldwin crypto_cursor_advance(&cc_in, len); 54626d292d3SJohn Baldwin } else { 54726d292d3SJohn Baldwin len = blksz; 5489c0e3d3aSJohn Baldwin crypto_cursor_copydata(&cc_in, len, blk); 54926d292d3SJohn Baldwin inblk = blk; 55026d292d3SJohn Baldwin } 55126d292d3SJohn Baldwin axf->Update(&ctx, inblk, len); 55226d292d3SJohn Baldwin } 55326d292d3SJohn Baldwin if (resid > 0) { 55426d292d3SJohn Baldwin memset(blk, 0, blksz); 55526d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 556c0341432SJohn Baldwin axf->Update(&ctx, blk, blksz); 557c0341432SJohn Baldwin } 5589b774dc0SJohn Baldwin } 559c0341432SJohn Baldwin 560c0341432SJohn Baldwin exf->reinit(swe->sw_kschedule, iv); 561c0341432SJohn Baldwin 562c0341432SJohn Baldwin /* Do encryption with MAC */ 5639c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 5649c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 5659c0e3d3aSJohn Baldwin if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) { 5669c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_out, &crp->crp_obuf); 5679c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_out, crp->crp_payload_output_start); 5689c0e3d3aSJohn Baldwin } else 5699c0e3d3aSJohn Baldwin cc_out = cc_in; 57026d292d3SJohn Baldwin for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) { 57126d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_in) < blksz) { 57226d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, blksz, blk); 57326d292d3SJohn Baldwin inblk = blk; 574c0341432SJohn Baldwin } else { 57526d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 57626d292d3SJohn Baldwin crypto_cursor_advance(&cc_in, blksz); 577c0341432SJohn Baldwin } 57826d292d3SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 57926d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_out) < blksz) 58026d292d3SJohn Baldwin outblk = blk; 58126d292d3SJohn Baldwin else 58226d292d3SJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 58326d292d3SJohn Baldwin exf->encrypt(swe->sw_kschedule, inblk, outblk); 58426d292d3SJohn Baldwin axf->Update(&ctx, outblk, blksz); 58526d292d3SJohn Baldwin if (outblk == blk) 58626d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, blksz, blk); 58726d292d3SJohn Baldwin else 58826d292d3SJohn Baldwin crypto_cursor_advance(&cc_out, blksz); 58926d292d3SJohn Baldwin } else { 59026d292d3SJohn Baldwin axf->Update(&ctx, inblk, blksz); 59126d292d3SJohn Baldwin } 59226d292d3SJohn Baldwin } 59326d292d3SJohn Baldwin if (resid > 0) { 59426d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 59526d292d3SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 59626d292d3SJohn Baldwin exf->encrypt_last(swe->sw_kschedule, blk, blk, resid); 59726d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 59826d292d3SJohn Baldwin } 59926d292d3SJohn Baldwin axf->Update(&ctx, blk, resid); 600c0341432SJohn Baldwin } 601c0341432SJohn Baldwin 602c0341432SJohn Baldwin /* length block */ 60326d292d3SJohn Baldwin memset(blk, 0, blksz); 604c0341432SJohn Baldwin blkp = (uint32_t *)blk + 1; 605c0341432SJohn Baldwin *blkp = htobe32(crp->crp_aad_length * 8); 606c0341432SJohn Baldwin blkp = (uint32_t *)blk + 3; 607c0341432SJohn Baldwin *blkp = htobe32(crp->crp_payload_length * 8); 608c0341432SJohn Baldwin axf->Update(&ctx, blk, blksz); 609c0341432SJohn Baldwin 610c0341432SJohn Baldwin /* Finalize MAC */ 61126d292d3SJohn Baldwin axf->Final(tag, &ctx); 612c0341432SJohn Baldwin 613c0341432SJohn Baldwin /* Validate tag */ 61420c128daSJohn Baldwin error = 0; 615c0341432SJohn Baldwin if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 61626d292d3SJohn Baldwin u_char tag2[GMAC_DIGEST_LEN]; 61720c128daSJohn Baldwin 61826d292d3SJohn Baldwin crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, tag2); 619c0341432SJohn Baldwin 62026d292d3SJohn Baldwin r = timingsafe_bcmp(tag, tag2, swa->sw_mlen); 62126d292d3SJohn Baldwin explicit_bzero(tag2, sizeof(tag2)); 62220c128daSJohn Baldwin if (r != 0) { 62320c128daSJohn Baldwin error = EBADMSG; 62420c128daSJohn Baldwin goto out; 62520c128daSJohn Baldwin } 626c0341432SJohn Baldwin 627c0341432SJohn Baldwin /* tag matches, decrypt data */ 6289c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 6299c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 63026d292d3SJohn Baldwin for (resid = crp->crp_payload_length; resid > blksz; 63126d292d3SJohn Baldwin resid -= blksz) { 63226d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_in) < blksz) { 63326d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, blksz, blk); 63426d292d3SJohn Baldwin inblk = blk; 63526d292d3SJohn Baldwin } else { 63626d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 63726d292d3SJohn Baldwin crypto_cursor_advance(&cc_in, blksz); 63826d292d3SJohn Baldwin } 63926d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_out) < blksz) 64026d292d3SJohn Baldwin outblk = blk; 64126d292d3SJohn Baldwin else 64226d292d3SJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 64326d292d3SJohn Baldwin exf->decrypt(swe->sw_kschedule, inblk, outblk); 64426d292d3SJohn Baldwin if (outblk == blk) 64526d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, blksz, blk); 64626d292d3SJohn Baldwin else 64726d292d3SJohn Baldwin crypto_cursor_advance(&cc_out, blksz); 64826d292d3SJohn Baldwin } 64926d292d3SJohn Baldwin if (resid > 0) { 65026d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 65126d292d3SJohn Baldwin exf->decrypt_last(swe->sw_kschedule, blk, blk, resid); 65226d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 653c0341432SJohn Baldwin } 654c0341432SJohn Baldwin } else { 655c0341432SJohn Baldwin /* Inject the authentication data */ 65626d292d3SJohn Baldwin crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag); 657c0341432SJohn Baldwin } 658c0341432SJohn Baldwin 65920c128daSJohn Baldwin out: 66020c128daSJohn Baldwin explicit_bzero(blkbuf, sizeof(blkbuf)); 66126d292d3SJohn Baldwin explicit_bzero(tag, sizeof(tag)); 66220c128daSJohn Baldwin explicit_bzero(iv, sizeof(iv)); 66320c128daSJohn Baldwin 66420c128daSJohn Baldwin return (error); 665c0341432SJohn Baldwin } 666c0341432SJohn Baldwin 667c0341432SJohn Baldwin static int 668c0341432SJohn Baldwin swcr_ccm_cbc_mac(struct swcr_session *ses, struct cryptop *crp) 669c0341432SJohn Baldwin { 67026d292d3SJohn Baldwin u_char tag[AES_CBC_MAC_HASH_LEN]; 67126d292d3SJohn Baldwin u_char iv[AES_BLOCK_LEN]; 672c0341432SJohn Baldwin union authctx ctx; 673c0341432SJohn Baldwin struct swcr_auth *swa; 674c0341432SJohn Baldwin struct auth_hash *axf; 67526d292d3SJohn Baldwin int error, ivlen; 676c0341432SJohn Baldwin 677c0341432SJohn Baldwin swa = &ses->swcr_auth; 678c0341432SJohn Baldwin axf = swa->sw_axf; 679c0341432SJohn Baldwin 680c0341432SJohn Baldwin bcopy(swa->sw_ictx, &ctx, axf->ctxsize); 681c0341432SJohn Baldwin 682c0341432SJohn Baldwin /* Initialize the IV */ 683c0341432SJohn Baldwin ivlen = AES_CCM_IV_LEN; 68429fe41ddSJohn Baldwin crypto_read_iv(crp, iv); 685c0341432SJohn Baldwin 686c0341432SJohn Baldwin /* 687c0341432SJohn Baldwin * AES CCM-CBC-MAC needs to know the length of both the auth 688c0341432SJohn Baldwin * data and payload data before doing the auth computation. 689c0341432SJohn Baldwin */ 690c0341432SJohn Baldwin ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_payload_length; 691c0341432SJohn Baldwin ctx.aes_cbc_mac_ctx.cryptDataLength = 0; 692c0341432SJohn Baldwin 693c0341432SJohn Baldwin axf->Reinit(&ctx, iv, ivlen); 6949b774dc0SJohn Baldwin if (crp->crp_aad != NULL) 6959b774dc0SJohn Baldwin error = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length); 6969b774dc0SJohn Baldwin else 69726d292d3SJohn Baldwin error = crypto_apply(crp, crp->crp_payload_start, 69826d292d3SJohn Baldwin crp->crp_payload_length, axf->Update, &ctx); 69926d292d3SJohn Baldwin if (error) 70026d292d3SJohn Baldwin return (error); 701c0341432SJohn Baldwin 702c0341432SJohn Baldwin /* Finalize MAC */ 70326d292d3SJohn Baldwin axf->Final(tag, &ctx); 704c0341432SJohn Baldwin 705c0341432SJohn Baldwin if (crp->crp_op & CRYPTO_OP_VERIFY_DIGEST) { 70626d292d3SJohn Baldwin u_char tag2[AES_CBC_MAC_HASH_LEN]; 70720c128daSJohn Baldwin 708c0341432SJohn Baldwin crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, 70926d292d3SJohn Baldwin tag2); 71026d292d3SJohn Baldwin if (timingsafe_bcmp(tag, tag2, swa->sw_mlen) != 0) 71120c128daSJohn Baldwin error = EBADMSG; 71226d292d3SJohn Baldwin explicit_bzero(tag2, sizeof(tag)); 713c0341432SJohn Baldwin } else { 714c0341432SJohn Baldwin /* Inject the authentication data */ 71526d292d3SJohn Baldwin crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag); 716c0341432SJohn Baldwin } 71726d292d3SJohn Baldwin explicit_bzero(tag, sizeof(tag)); 71820c128daSJohn Baldwin explicit_bzero(iv, sizeof(iv)); 71920c128daSJohn Baldwin return (error); 720c0341432SJohn Baldwin } 721c0341432SJohn Baldwin 722c0341432SJohn Baldwin static int 723c0341432SJohn Baldwin swcr_ccm(struct swcr_session *ses, struct cryptop *crp) 724c0341432SJohn Baldwin { 72526d292d3SJohn Baldwin uint32_t blkbuf[howmany(AES_BLOCK_LEN, sizeof(uint32_t))]; 726c0341432SJohn Baldwin u_char *blk = (u_char *)blkbuf; 72726d292d3SJohn Baldwin u_char tag[AES_CBC_MAC_HASH_LEN]; 72826d292d3SJohn Baldwin u_char iv[AES_BLOCK_LEN]; 7299c0e3d3aSJohn Baldwin struct crypto_buffer_cursor cc_in, cc_out; 73026d292d3SJohn Baldwin const u_char *inblk; 73126d292d3SJohn Baldwin u_char *outblk; 732c0341432SJohn Baldwin union authctx ctx; 733c0341432SJohn Baldwin struct swcr_auth *swa; 734c0341432SJohn Baldwin struct swcr_encdec *swe; 735c0341432SJohn Baldwin struct auth_hash *axf; 736c0341432SJohn Baldwin struct enc_xform *exf; 73726d292d3SJohn Baldwin int blksz, error, ivlen, r, resid; 738c0341432SJohn Baldwin 739c0341432SJohn Baldwin swa = &ses->swcr_auth; 740c0341432SJohn Baldwin axf = swa->sw_axf; 741c0341432SJohn Baldwin 742c0341432SJohn Baldwin bcopy(swa->sw_ictx, &ctx, axf->ctxsize); 74326d292d3SJohn Baldwin blksz = AES_BLOCK_LEN; 74426d292d3SJohn Baldwin KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch", 74526d292d3SJohn Baldwin __func__)); 746c0341432SJohn Baldwin 747c0341432SJohn Baldwin swe = &ses->swcr_encdec; 748c0341432SJohn Baldwin exf = swe->sw_exf; 749723d8764SJohn Baldwin KASSERT(axf->blocksize == exf->native_blocksize, 750723d8764SJohn Baldwin ("%s: blocksize mismatch", __func__)); 751c0341432SJohn Baldwin 752c0341432SJohn Baldwin if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0) 753c0341432SJohn Baldwin return (EINVAL); 754c0341432SJohn Baldwin 755c0341432SJohn Baldwin /* Initialize the IV */ 756c0341432SJohn Baldwin ivlen = AES_CCM_IV_LEN; 757c0341432SJohn Baldwin bcopy(crp->crp_iv, iv, ivlen); 758c0341432SJohn Baldwin 759c0341432SJohn Baldwin /* 760c0341432SJohn Baldwin * AES CCM-CBC-MAC needs to know the length of both the auth 761c0341432SJohn Baldwin * data and payload data before doing the auth computation. 762c0341432SJohn Baldwin */ 763c0341432SJohn Baldwin ctx.aes_cbc_mac_ctx.authDataLength = crp->crp_aad_length; 764c0341432SJohn Baldwin ctx.aes_cbc_mac_ctx.cryptDataLength = crp->crp_payload_length; 765c0341432SJohn Baldwin 766c0341432SJohn Baldwin /* Supply MAC with IV */ 767c0341432SJohn Baldwin axf->Reinit(&ctx, iv, ivlen); 768c0341432SJohn Baldwin 769c0341432SJohn Baldwin /* Supply MAC with AAD */ 7709b774dc0SJohn Baldwin if (crp->crp_aad != NULL) 7719b774dc0SJohn Baldwin error = axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length); 7729b774dc0SJohn Baldwin else 7739b774dc0SJohn Baldwin error = crypto_apply(crp, crp->crp_aad_start, 7749b774dc0SJohn Baldwin crp->crp_aad_length, axf->Update, &ctx); 77526d292d3SJohn Baldwin if (error) 77626d292d3SJohn Baldwin return (error); 777c0341432SJohn Baldwin 778c0341432SJohn Baldwin exf->reinit(swe->sw_kschedule, iv); 779c0341432SJohn Baldwin 780c0341432SJohn Baldwin /* Do encryption/decryption with MAC */ 7819c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 7829c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 7839c0e3d3aSJohn Baldwin if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) { 7849c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_out, &crp->crp_obuf); 7859c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_out, crp->crp_payload_output_start); 7869c0e3d3aSJohn Baldwin } else 7879c0e3d3aSJohn Baldwin cc_out = cc_in; 78826d292d3SJohn Baldwin for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) { 78926d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_in) < blksz) { 79026d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, blksz, blk); 79126d292d3SJohn Baldwin inblk = blk; 79226d292d3SJohn Baldwin } else { 79326d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 79426d292d3SJohn Baldwin crypto_cursor_advance(&cc_in, blksz); 79526d292d3SJohn Baldwin } 796c0341432SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 79726d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_out) < blksz) 79826d292d3SJohn Baldwin outblk = blk; 79926d292d3SJohn Baldwin else 80026d292d3SJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 80126d292d3SJohn Baldwin axf->Update(&ctx, inblk, blksz); 80226d292d3SJohn Baldwin exf->encrypt(swe->sw_kschedule, inblk, outblk); 80326d292d3SJohn Baldwin if (outblk == blk) 80426d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, blksz, blk); 80526d292d3SJohn Baldwin else 80626d292d3SJohn Baldwin crypto_cursor_advance(&cc_out, blksz); 807c0341432SJohn Baldwin } else { 808c0341432SJohn Baldwin /* 809c0341432SJohn Baldwin * One of the problems with CCM+CBC is that 810c0341432SJohn Baldwin * the authentication is done on the 81126d292d3SJohn Baldwin * unencrypted data. As a result, we have to 812c0341432SJohn Baldwin * decrypt the data twice: once to generate 813c0341432SJohn Baldwin * the tag and a second time after the tag is 814c0341432SJohn Baldwin * verified. 815c0341432SJohn Baldwin */ 81626d292d3SJohn Baldwin exf->decrypt(swe->sw_kschedule, inblk, blk); 81726d292d3SJohn Baldwin axf->Update(&ctx, blk, blksz); 81826d292d3SJohn Baldwin } 81926d292d3SJohn Baldwin } 82026d292d3SJohn Baldwin if (resid > 0) { 82126d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 82226d292d3SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 82326d292d3SJohn Baldwin axf->Update(&ctx, blk, resid); 82426d292d3SJohn Baldwin exf->encrypt_last(swe->sw_kschedule, blk, blk, resid); 82526d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 82626d292d3SJohn Baldwin } else { 82726d292d3SJohn Baldwin exf->decrypt_last(swe->sw_kschedule, blk, blk, resid); 82826d292d3SJohn Baldwin axf->Update(&ctx, blk, resid); 829c0341432SJohn Baldwin } 83008fca7a5SJohn-Mark Gurney } 83108fca7a5SJohn-Mark Gurney 83208fca7a5SJohn-Mark Gurney /* Finalize MAC */ 83326d292d3SJohn Baldwin axf->Final(tag, &ctx); 83408fca7a5SJohn-Mark Gurney 83508fca7a5SJohn-Mark Gurney /* Validate tag */ 83620c128daSJohn Baldwin error = 0; 837c0341432SJohn Baldwin if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 83826d292d3SJohn Baldwin u_char tag2[AES_CBC_MAC_HASH_LEN]; 83920c128daSJohn Baldwin 840c0341432SJohn Baldwin crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, 84126d292d3SJohn Baldwin tag2); 84208fca7a5SJohn-Mark Gurney 84326d292d3SJohn Baldwin r = timingsafe_bcmp(tag, tag2, swa->sw_mlen); 84426d292d3SJohn Baldwin explicit_bzero(tag2, sizeof(tag2)); 84520c128daSJohn Baldwin if (r != 0) { 84620c128daSJohn Baldwin error = EBADMSG; 84720c128daSJohn Baldwin goto out; 84820c128daSJohn Baldwin } 849c0341432SJohn Baldwin 85008fca7a5SJohn-Mark Gurney /* tag matches, decrypt data */ 851507281e5SSean Eric Fagan exf->reinit(swe->sw_kschedule, iv); 8529c0e3d3aSJohn Baldwin crypto_cursor_init(&cc_in, &crp->crp_buf); 8539c0e3d3aSJohn Baldwin crypto_cursor_advance(&cc_in, crp->crp_payload_start); 85426d292d3SJohn Baldwin for (resid = crp->crp_payload_length; resid > blksz; 85526d292d3SJohn Baldwin resid -= blksz) { 85626d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_in) < blksz) { 85726d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, blksz, blk); 85826d292d3SJohn Baldwin inblk = blk; 85926d292d3SJohn Baldwin } else { 86026d292d3SJohn Baldwin inblk = crypto_cursor_segbase(&cc_in); 86126d292d3SJohn Baldwin crypto_cursor_advance(&cc_in, blksz); 86226d292d3SJohn Baldwin } 86326d292d3SJohn Baldwin if (crypto_cursor_seglen(&cc_out) < blksz) 86426d292d3SJohn Baldwin outblk = blk; 86526d292d3SJohn Baldwin else 86626d292d3SJohn Baldwin outblk = crypto_cursor_segbase(&cc_out); 86726d292d3SJohn Baldwin exf->decrypt(swe->sw_kschedule, inblk, outblk); 86826d292d3SJohn Baldwin if (outblk == blk) 86926d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, blksz, blk); 87026d292d3SJohn Baldwin else 87126d292d3SJohn Baldwin crypto_cursor_advance(&cc_out, blksz); 87226d292d3SJohn Baldwin } 87326d292d3SJohn Baldwin if (resid > 0) { 87426d292d3SJohn Baldwin crypto_cursor_copydata(&cc_in, resid, blk); 87526d292d3SJohn Baldwin exf->decrypt_last(swe->sw_kschedule, blk, blk, resid); 87626d292d3SJohn Baldwin crypto_cursor_copyback(&cc_out, resid, blk); 87708fca7a5SJohn-Mark Gurney } 87808fca7a5SJohn-Mark Gurney } else { 87908fca7a5SJohn-Mark Gurney /* Inject the authentication data */ 88026d292d3SJohn Baldwin crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag); 88108fca7a5SJohn-Mark Gurney } 88208fca7a5SJohn-Mark Gurney 88320c128daSJohn Baldwin out: 88420c128daSJohn Baldwin explicit_bzero(blkbuf, sizeof(blkbuf)); 88526d292d3SJohn Baldwin explicit_bzero(tag, sizeof(tag)); 88620c128daSJohn Baldwin explicit_bzero(iv, sizeof(iv)); 88720c128daSJohn Baldwin return (error); 88808fca7a5SJohn-Mark Gurney } 88908fca7a5SJohn-Mark Gurney 890091d81d1SSam Leffler /* 891c0341432SJohn Baldwin * Apply a cipher and a digest to perform EtA. 892c0341432SJohn Baldwin */ 893c0341432SJohn Baldwin static int 894c0341432SJohn Baldwin swcr_eta(struct swcr_session *ses, struct cryptop *crp) 895c0341432SJohn Baldwin { 896c0341432SJohn Baldwin int error; 897c0341432SJohn Baldwin 898c0341432SJohn Baldwin if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { 899c0341432SJohn Baldwin error = swcr_encdec(ses, crp); 900c0341432SJohn Baldwin if (error == 0) 901c0341432SJohn Baldwin error = swcr_authcompute(ses, crp); 902c0341432SJohn Baldwin } else { 903c0341432SJohn Baldwin error = swcr_authcompute(ses, crp); 904c0341432SJohn Baldwin if (error == 0) 905c0341432SJohn Baldwin error = swcr_encdec(ses, crp); 906c0341432SJohn Baldwin } 907c0341432SJohn Baldwin return (error); 908c0341432SJohn Baldwin } 909c0341432SJohn Baldwin 910c0341432SJohn Baldwin /* 911091d81d1SSam Leffler * Apply a compression/decompression algorithm 912091d81d1SSam Leffler */ 913091d81d1SSam Leffler static int 914c0341432SJohn Baldwin swcr_compdec(struct swcr_session *ses, struct cryptop *crp) 915091d81d1SSam Leffler { 916091d81d1SSam Leffler u_int8_t *data, *out; 917091d81d1SSam Leffler struct comp_algo *cxf; 918091d81d1SSam Leffler int adj; 919091d81d1SSam Leffler u_int32_t result; 920091d81d1SSam Leffler 921c0341432SJohn Baldwin cxf = ses->swcr_compdec.sw_cxf; 922091d81d1SSam Leffler 923091d81d1SSam Leffler /* We must handle the whole buffer of data in one time 924091d81d1SSam Leffler * then if there is not all the data in the mbuf, we must 925091d81d1SSam Leffler * copy in a buffer. 926091d81d1SSam Leffler */ 927091d81d1SSam Leffler 928c0341432SJohn Baldwin data = malloc(crp->crp_payload_length, M_CRYPTO_DATA, M_NOWAIT); 929091d81d1SSam Leffler if (data == NULL) 930091d81d1SSam Leffler return (EINVAL); 931c0341432SJohn Baldwin crypto_copydata(crp, crp->crp_payload_start, crp->crp_payload_length, 932c0341432SJohn Baldwin data); 933091d81d1SSam Leffler 934c0341432SJohn Baldwin if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) 935c0341432SJohn Baldwin result = cxf->compress(data, crp->crp_payload_length, &out); 936091d81d1SSam Leffler else 937c0341432SJohn Baldwin result = cxf->decompress(data, crp->crp_payload_length, &out); 938091d81d1SSam Leffler 9391ede983cSDag-Erling Smørgrav free(data, M_CRYPTO_DATA); 940091d81d1SSam Leffler if (result == 0) 941c0341432SJohn Baldwin return (EINVAL); 942c0341432SJohn Baldwin crp->crp_olen = result; 943c0341432SJohn Baldwin 944c0341432SJohn Baldwin /* Check the compressed size when doing compression */ 945c0341432SJohn Baldwin if (CRYPTO_OP_IS_COMPRESS(crp->crp_op)) { 946c0341432SJohn Baldwin if (result >= crp->crp_payload_length) { 947c0341432SJohn Baldwin /* Compression was useless, we lost time */ 948c0341432SJohn Baldwin free(out, M_CRYPTO_DATA); 949c0341432SJohn Baldwin return (0); 950c0341432SJohn Baldwin } 951c0341432SJohn Baldwin } 952091d81d1SSam Leffler 953091d81d1SSam Leffler /* Copy back the (de)compressed data. m_copyback is 954091d81d1SSam Leffler * extending the mbuf as necessary. 955091d81d1SSam Leffler */ 956c0341432SJohn Baldwin crypto_copyback(crp, crp->crp_payload_start, result, out); 957c0341432SJohn Baldwin if (result < crp->crp_payload_length) { 9589c0e3d3aSJohn Baldwin switch (crp->crp_buf.cb_type) { 959c0341432SJohn Baldwin case CRYPTO_BUF_MBUF: 960c0341432SJohn Baldwin adj = result - crp->crp_payload_length; 9619c0e3d3aSJohn Baldwin m_adj(crp->crp_buf.cb_mbuf, adj); 962c0341432SJohn Baldwin break; 963c0341432SJohn Baldwin case CRYPTO_BUF_UIO: { 9649c0e3d3aSJohn Baldwin struct uio *uio = crp->crp_buf.cb_uio; 965091d81d1SSam Leffler int ind; 966091d81d1SSam Leffler 967c0341432SJohn Baldwin adj = crp->crp_payload_length - result; 968091d81d1SSam Leffler ind = uio->uio_iovcnt - 1; 969091d81d1SSam Leffler 970091d81d1SSam Leffler while (adj > 0 && ind >= 0) { 971091d81d1SSam Leffler if (adj < uio->uio_iov[ind].iov_len) { 972091d81d1SSam Leffler uio->uio_iov[ind].iov_len -= adj; 973091d81d1SSam Leffler break; 974091d81d1SSam Leffler } 975091d81d1SSam Leffler 976091d81d1SSam Leffler adj -= uio->uio_iov[ind].iov_len; 977091d81d1SSam Leffler uio->uio_iov[ind].iov_len = 0; 978091d81d1SSam Leffler ind--; 979091d81d1SSam Leffler uio->uio_iovcnt--; 980091d81d1SSam Leffler } 981091d81d1SSam Leffler } 982c0341432SJohn Baldwin break; 983e6f6d0c9SAlan Somers case CRYPTO_BUF_VMPAGE: 984e6f6d0c9SAlan Somers adj = crp->crp_payload_length - result; 985e6f6d0c9SAlan Somers crp->crp_buf.cb_vm_page_len -= adj; 986e6f6d0c9SAlan Somers break; 9879c0e3d3aSJohn Baldwin default: 9889c0e3d3aSJohn Baldwin break; 989c0341432SJohn Baldwin } 990091d81d1SSam Leffler } 9911ede983cSDag-Erling Smørgrav free(out, M_CRYPTO_DATA); 992091d81d1SSam Leffler return 0; 993091d81d1SSam Leffler } 994091d81d1SSam Leffler 995091d81d1SSam Leffler static int 9963e947048SJohn Baldwin swcr_setup_cipher(struct swcr_session *ses, 997c0341432SJohn Baldwin const struct crypto_session_params *csp) 998091d81d1SSam Leffler { 999c0341432SJohn Baldwin struct swcr_encdec *swe; 1000091d81d1SSam Leffler struct enc_xform *txf; 1001f6c4bc3bSPawel Jakub Dawidek int error; 1002091d81d1SSam Leffler 1003c0341432SJohn Baldwin swe = &ses->swcr_encdec; 1004c0341432SJohn Baldwin txf = crypto_cipher(csp); 1005c0341432SJohn Baldwin MPASS(txf->ivsize == csp->csp_ivlen); 10063e947048SJohn Baldwin if (txf->ctxsize != 0) { 10073e947048SJohn Baldwin swe->sw_kschedule = malloc(txf->ctxsize, M_CRYPTO_DATA, 10083e947048SJohn Baldwin M_NOWAIT); 10093e947048SJohn Baldwin if (swe->sw_kschedule == NULL) 10103e947048SJohn Baldwin return (ENOMEM); 10113e947048SJohn Baldwin } 1012c0341432SJohn Baldwin if (csp->csp_cipher_key != NULL) { 10133e947048SJohn Baldwin error = txf->setkey(swe->sw_kschedule, 1014c0341432SJohn Baldwin csp->csp_cipher_key, csp->csp_cipher_klen); 1015c0341432SJohn Baldwin if (error) 1016c0341432SJohn Baldwin return (error); 1017091d81d1SSam Leffler } 1018c0341432SJohn Baldwin swe->sw_exf = txf; 1019c0341432SJohn Baldwin return (0); 1020f6c4bc3bSPawel Jakub Dawidek } 1021091d81d1SSam Leffler 1022c0341432SJohn Baldwin static int 1023c0341432SJohn Baldwin swcr_setup_auth(struct swcr_session *ses, 1024c0341432SJohn Baldwin const struct crypto_session_params *csp) 1025c0341432SJohn Baldwin { 1026c0341432SJohn Baldwin struct swcr_auth *swa; 1027c0341432SJohn Baldwin struct auth_hash *axf; 1028c0341432SJohn Baldwin 1029c0341432SJohn Baldwin swa = &ses->swcr_auth; 1030c0341432SJohn Baldwin 1031c0341432SJohn Baldwin axf = crypto_auth_hash(csp); 1032c0341432SJohn Baldwin swa->sw_axf = axf; 1033c0341432SJohn Baldwin if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize) 1034c0341432SJohn Baldwin return (EINVAL); 1035c0341432SJohn Baldwin if (csp->csp_auth_mlen == 0) 1036c0341432SJohn Baldwin swa->sw_mlen = axf->hashsize; 1037c0341432SJohn Baldwin else 1038c0341432SJohn Baldwin swa->sw_mlen = csp->csp_auth_mlen; 1039c0341432SJohn Baldwin swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT); 1040c0341432SJohn Baldwin if (swa->sw_ictx == NULL) 1041c0341432SJohn Baldwin return (ENOBUFS); 1042c0341432SJohn Baldwin 1043c0341432SJohn Baldwin switch (csp->csp_auth_alg) { 1044091d81d1SSam Leffler case CRYPTO_SHA1_HMAC: 1045c97f39ceSConrad Meyer case CRYPTO_SHA2_224_HMAC: 1046f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_256_HMAC: 1047f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_384_HMAC: 1048f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_512_HMAC: 1049091d81d1SSam Leffler case CRYPTO_NULL_HMAC: 1050091d81d1SSam Leffler case CRYPTO_RIPEMD160_HMAC: 10513a0b6a93SJohn Baldwin swa->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA, 1052091d81d1SSam Leffler M_NOWAIT); 1053c0341432SJohn Baldwin if (swa->sw_octx == NULL) 1054c0341432SJohn Baldwin return (ENOBUFS); 1055c0341432SJohn Baldwin 1056c0341432SJohn Baldwin if (csp->csp_auth_key != NULL) { 1057c0341432SJohn Baldwin swcr_authprepare(axf, swa, csp->csp_auth_key, 1058c0341432SJohn Baldwin csp->csp_auth_klen); 1059091d81d1SSam Leffler } 1060091d81d1SSam Leffler 1061c0341432SJohn Baldwin if (csp->csp_mode == CSP_MODE_DIGEST) 1062c0341432SJohn Baldwin ses->swcr_process = swcr_authcompute; 1063091d81d1SSam Leffler break; 1064091d81d1SSam Leffler case CRYPTO_SHA1: 1065c4729f6eSConrad Meyer case CRYPTO_SHA2_224: 1066c4729f6eSConrad Meyer case CRYPTO_SHA2_256: 1067c4729f6eSConrad Meyer case CRYPTO_SHA2_384: 1068c4729f6eSConrad Meyer case CRYPTO_SHA2_512: 1069c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1070c0341432SJohn Baldwin if (csp->csp_mode == CSP_MODE_DIGEST) 1071c0341432SJohn Baldwin ses->swcr_process = swcr_authcompute; 1072c0341432SJohn Baldwin break; 1073c0341432SJohn Baldwin case CRYPTO_AES_NIST_GMAC: 1074c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1075c0341432SJohn Baldwin axf->Setkey(swa->sw_ictx, csp->csp_auth_key, 1076c0341432SJohn Baldwin csp->csp_auth_klen); 1077c0341432SJohn Baldwin if (csp->csp_mode == CSP_MODE_DIGEST) 1078c0341432SJohn Baldwin ses->swcr_process = swcr_gmac; 1079c0341432SJohn Baldwin break; 1080c0341432SJohn Baldwin case CRYPTO_POLY1305: 1081c0341432SJohn Baldwin case CRYPTO_BLAKE2B: 1082c0341432SJohn Baldwin case CRYPTO_BLAKE2S: 1083c0341432SJohn Baldwin /* 1084c0341432SJohn Baldwin * Blake2b and Blake2s support an optional key but do 1085c0341432SJohn Baldwin * not require one. 1086c0341432SJohn Baldwin */ 1087c0341432SJohn Baldwin if (csp->csp_auth_klen == 0 || csp->csp_auth_key != NULL) 1088c0341432SJohn Baldwin axf->Setkey(swa->sw_ictx, csp->csp_auth_key, 1089c0341432SJohn Baldwin csp->csp_auth_klen); 1090c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1091c0341432SJohn Baldwin if (csp->csp_mode == CSP_MODE_DIGEST) 1092c0341432SJohn Baldwin ses->swcr_process = swcr_authcompute; 1093c0341432SJohn Baldwin break; 1094c0341432SJohn Baldwin case CRYPTO_AES_CCM_CBC_MAC: 1095c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1096c0341432SJohn Baldwin axf->Setkey(swa->sw_ictx, csp->csp_auth_key, 1097c0341432SJohn Baldwin csp->csp_auth_klen); 1098c0341432SJohn Baldwin if (csp->csp_mode == CSP_MODE_DIGEST) 1099c0341432SJohn Baldwin ses->swcr_process = swcr_ccm_cbc_mac; 1100c0341432SJohn Baldwin break; 1101091d81d1SSam Leffler } 1102091d81d1SSam Leffler 1103c0341432SJohn Baldwin return (0); 1104c0341432SJohn Baldwin } 110508fca7a5SJohn-Mark Gurney 1106c0341432SJohn Baldwin static int 1107c0341432SJohn Baldwin swcr_setup_gcm(struct swcr_session *ses, 1108c0341432SJohn Baldwin const struct crypto_session_params *csp) 1109c0341432SJohn Baldwin { 1110c0341432SJohn Baldwin struct swcr_auth *swa; 1111c0341432SJohn Baldwin struct auth_hash *axf; 1112c0341432SJohn Baldwin 1113c0341432SJohn Baldwin if (csp->csp_ivlen != AES_GCM_IV_LEN) 1114c0341432SJohn Baldwin return (EINVAL); 1115c0341432SJohn Baldwin 1116c0341432SJohn Baldwin /* First, setup the auth side. */ 1117c0341432SJohn Baldwin swa = &ses->swcr_auth; 1118c0341432SJohn Baldwin switch (csp->csp_cipher_klen * 8) { 1119c0341432SJohn Baldwin case 128: 1120c0341432SJohn Baldwin axf = &auth_hash_nist_gmac_aes_128; 1121c0341432SJohn Baldwin break; 1122c0341432SJohn Baldwin case 192: 1123c0341432SJohn Baldwin axf = &auth_hash_nist_gmac_aes_192; 1124c0341432SJohn Baldwin break; 1125c0341432SJohn Baldwin case 256: 1126c0341432SJohn Baldwin axf = &auth_hash_nist_gmac_aes_256; 1127c0341432SJohn Baldwin break; 1128c0341432SJohn Baldwin default: 1129c0341432SJohn Baldwin return (EINVAL); 1130c0341432SJohn Baldwin } 1131c0341432SJohn Baldwin swa->sw_axf = axf; 1132c0341432SJohn Baldwin if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize) 1133c0341432SJohn Baldwin return (EINVAL); 1134c0341432SJohn Baldwin if (csp->csp_auth_mlen == 0) 1135c0341432SJohn Baldwin swa->sw_mlen = axf->hashsize; 1136c0341432SJohn Baldwin else 1137c0341432SJohn Baldwin swa->sw_mlen = csp->csp_auth_mlen; 1138c0341432SJohn Baldwin swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT); 1139c0341432SJohn Baldwin if (swa->sw_ictx == NULL) 1140c0341432SJohn Baldwin return (ENOBUFS); 1141c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1142c0341432SJohn Baldwin if (csp->csp_cipher_key != NULL) 1143c0341432SJohn Baldwin axf->Setkey(swa->sw_ictx, csp->csp_cipher_key, 1144c0341432SJohn Baldwin csp->csp_cipher_klen); 1145c0341432SJohn Baldwin 1146c0341432SJohn Baldwin /* Second, setup the cipher side. */ 11473e947048SJohn Baldwin return (swcr_setup_cipher(ses, csp)); 1148c0341432SJohn Baldwin } 1149c0341432SJohn Baldwin 1150c0341432SJohn Baldwin static int 1151c0341432SJohn Baldwin swcr_setup_ccm(struct swcr_session *ses, 1152c0341432SJohn Baldwin const struct crypto_session_params *csp) 1153c0341432SJohn Baldwin { 1154c0341432SJohn Baldwin struct swcr_auth *swa; 1155c0341432SJohn Baldwin struct auth_hash *axf; 1156c0341432SJohn Baldwin 1157c0341432SJohn Baldwin if (csp->csp_ivlen != AES_CCM_IV_LEN) 1158c0341432SJohn Baldwin return (EINVAL); 1159c0341432SJohn Baldwin 1160c0341432SJohn Baldwin /* First, setup the auth side. */ 1161c0341432SJohn Baldwin swa = &ses->swcr_auth; 1162c0341432SJohn Baldwin switch (csp->csp_cipher_klen * 8) { 1163507281e5SSean Eric Fagan case 128: 1164507281e5SSean Eric Fagan axf = &auth_hash_ccm_cbc_mac_128; 1165507281e5SSean Eric Fagan break; 1166507281e5SSean Eric Fagan case 192: 1167507281e5SSean Eric Fagan axf = &auth_hash_ccm_cbc_mac_192; 1168507281e5SSean Eric Fagan break; 1169507281e5SSean Eric Fagan case 256: 1170507281e5SSean Eric Fagan axf = &auth_hash_ccm_cbc_mac_256; 1171507281e5SSean Eric Fagan break; 1172507281e5SSean Eric Fagan default: 1173c0341432SJohn Baldwin return (EINVAL); 1174507281e5SSean Eric Fagan } 1175c0341432SJohn Baldwin swa->sw_axf = axf; 1176c0341432SJohn Baldwin if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize) 1177c0341432SJohn Baldwin return (EINVAL); 1178c0341432SJohn Baldwin if (csp->csp_auth_mlen == 0) 1179c0341432SJohn Baldwin swa->sw_mlen = axf->hashsize; 1180c0341432SJohn Baldwin else 1181c0341432SJohn Baldwin swa->sw_mlen = csp->csp_auth_mlen; 1182c0341432SJohn Baldwin swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT); 1183c0341432SJohn Baldwin if (swa->sw_ictx == NULL) 1184c0341432SJohn Baldwin return (ENOBUFS); 1185c0341432SJohn Baldwin axf->Init(swa->sw_ictx); 1186c0341432SJohn Baldwin if (csp->csp_cipher_key != NULL) 1187c0341432SJohn Baldwin axf->Setkey(swa->sw_ictx, csp->csp_cipher_key, 1188c0341432SJohn Baldwin csp->csp_cipher_klen); 118908fca7a5SJohn-Mark Gurney 1190c0341432SJohn Baldwin /* Second, setup the cipher side. */ 11913e947048SJohn Baldwin return (swcr_setup_cipher(ses, csp)); 11922e2e26d1SJohn Baldwin } 1193a2bc81bfSJohn-Mark Gurney 1194c0341432SJohn Baldwin static bool 1195c0341432SJohn Baldwin swcr_auth_supported(const struct crypto_session_params *csp) 1196109919c6SBenno Rice { 1197091d81d1SSam Leffler struct auth_hash *axf; 1198091d81d1SSam Leffler 1199c0341432SJohn Baldwin axf = crypto_auth_hash(csp); 1200c0341432SJohn Baldwin if (axf == NULL) 1201c0341432SJohn Baldwin return (false); 1202c0341432SJohn Baldwin switch (csp->csp_auth_alg) { 1203091d81d1SSam Leffler case CRYPTO_SHA1_HMAC: 1204c97f39ceSConrad Meyer case CRYPTO_SHA2_224_HMAC: 1205f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_256_HMAC: 1206f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_384_HMAC: 1207f6c4bc3bSPawel Jakub Dawidek case CRYPTO_SHA2_512_HMAC: 1208091d81d1SSam Leffler case CRYPTO_NULL_HMAC: 1209c0341432SJohn Baldwin case CRYPTO_RIPEMD160_HMAC: 1210091d81d1SSam Leffler break; 1211c0341432SJohn Baldwin case CRYPTO_AES_NIST_GMAC: 1212c0341432SJohn Baldwin switch (csp->csp_auth_klen * 8) { 1213c0341432SJohn Baldwin case 128: 1214c0341432SJohn Baldwin case 192: 1215c0341432SJohn Baldwin case 256: 1216c0341432SJohn Baldwin break; 1217c0341432SJohn Baldwin default: 1218c0341432SJohn Baldwin return (false); 1219c0341432SJohn Baldwin } 1220c0341432SJohn Baldwin if (csp->csp_auth_key == NULL) 1221c0341432SJohn Baldwin return (false); 1222c0341432SJohn Baldwin if (csp->csp_ivlen != AES_GCM_IV_LEN) 1223c0341432SJohn Baldwin return (false); 1224c0341432SJohn Baldwin break; 122525b7033bSConrad Meyer case CRYPTO_POLY1305: 1226c0341432SJohn Baldwin if (csp->csp_auth_klen != POLY1305_KEY_LEN) 1227c0341432SJohn Baldwin return (false); 1228c0341432SJohn Baldwin break; 1229c0341432SJohn Baldwin case CRYPTO_AES_CCM_CBC_MAC: 1230c0341432SJohn Baldwin switch (csp->csp_auth_klen * 8) { 1231c0341432SJohn Baldwin case 128: 1232c0341432SJohn Baldwin case 192: 1233c0341432SJohn Baldwin case 256: 1234c0341432SJohn Baldwin break; 1235c0341432SJohn Baldwin default: 1236c0341432SJohn Baldwin return (false); 1237c0341432SJohn Baldwin } 1238c0341432SJohn Baldwin if (csp->csp_auth_key == NULL) 1239c0341432SJohn Baldwin return (false); 1240c0341432SJohn Baldwin if (csp->csp_ivlen != AES_CCM_IV_LEN) 1241c0341432SJohn Baldwin return (false); 1242c0341432SJohn Baldwin break; 1243c0341432SJohn Baldwin } 1244c0341432SJohn Baldwin return (true); 1245c0341432SJohn Baldwin } 1246091d81d1SSam Leffler 1247c0341432SJohn Baldwin static bool 1248c0341432SJohn Baldwin swcr_cipher_supported(const struct crypto_session_params *csp) 1249c0341432SJohn Baldwin { 1250c0341432SJohn Baldwin struct enc_xform *txf; 1251c0341432SJohn Baldwin 1252c0341432SJohn Baldwin txf = crypto_cipher(csp); 1253c0341432SJohn Baldwin if (txf == NULL) 1254c0341432SJohn Baldwin return (false); 1255c0341432SJohn Baldwin if (csp->csp_cipher_alg != CRYPTO_NULL_CBC && 1256c0341432SJohn Baldwin txf->ivsize != csp->csp_ivlen) 1257c0341432SJohn Baldwin return (false); 1258c0341432SJohn Baldwin return (true); 1259c0341432SJohn Baldwin } 1260c0341432SJohn Baldwin 1261c0341432SJohn Baldwin static int 1262c0341432SJohn Baldwin swcr_probesession(device_t dev, const struct crypto_session_params *csp) 1263c0341432SJohn Baldwin { 1264c0341432SJohn Baldwin 12659b774dc0SJohn Baldwin if ((csp->csp_flags & ~(CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD)) != 12669b774dc0SJohn Baldwin 0) 1267c0341432SJohn Baldwin return (EINVAL); 1268c0341432SJohn Baldwin switch (csp->csp_mode) { 1269c0341432SJohn Baldwin case CSP_MODE_COMPRESS: 1270c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1271c0341432SJohn Baldwin case CRYPTO_DEFLATE_COMP: 1272c0341432SJohn Baldwin break; 1273c0341432SJohn Baldwin default: 1274c0341432SJohn Baldwin return (EINVAL); 12755fbc5b5aSConrad Meyer } 1276091d81d1SSam Leffler break; 1277c0341432SJohn Baldwin case CSP_MODE_CIPHER: 1278c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1279c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1280c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1281c0341432SJohn Baldwin return (EINVAL); 1282c0341432SJohn Baldwin default: 1283c0341432SJohn Baldwin if (!swcr_cipher_supported(csp)) 1284c0341432SJohn Baldwin return (EINVAL); 1285091d81d1SSam Leffler break; 1286091d81d1SSam Leffler } 1287c0341432SJohn Baldwin break; 1288c0341432SJohn Baldwin case CSP_MODE_DIGEST: 1289c0341432SJohn Baldwin if (!swcr_auth_supported(csp)) 1290c0341432SJohn Baldwin return (EINVAL); 1291c0341432SJohn Baldwin break; 1292c0341432SJohn Baldwin case CSP_MODE_AEAD: 1293c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1294c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1295c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1296c0341432SJohn Baldwin break; 1297c0341432SJohn Baldwin default: 1298c0341432SJohn Baldwin return (EINVAL); 1299c0341432SJohn Baldwin } 1300c0341432SJohn Baldwin break; 1301c0341432SJohn Baldwin case CSP_MODE_ETA: 1302c0341432SJohn Baldwin /* AEAD algorithms cannot be used for EtA. */ 1303c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1304c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1305c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1306c0341432SJohn Baldwin return (EINVAL); 1307c0341432SJohn Baldwin } 1308c0341432SJohn Baldwin switch (csp->csp_auth_alg) { 1309c0341432SJohn Baldwin case CRYPTO_AES_NIST_GMAC: 1310c0341432SJohn Baldwin case CRYPTO_AES_CCM_CBC_MAC: 1311c0341432SJohn Baldwin return (EINVAL); 1312c0341432SJohn Baldwin } 1313c0341432SJohn Baldwin 1314c0341432SJohn Baldwin if (!swcr_cipher_supported(csp) || 1315c0341432SJohn Baldwin !swcr_auth_supported(csp)) 1316c0341432SJohn Baldwin return (EINVAL); 1317c0341432SJohn Baldwin break; 1318c0341432SJohn Baldwin default: 1319c0341432SJohn Baldwin return (EINVAL); 1320c0341432SJohn Baldwin } 1321c0341432SJohn Baldwin 1322c0341432SJohn Baldwin return (CRYPTODEV_PROBE_SOFTWARE); 1323c0341432SJohn Baldwin } 1324c0341432SJohn Baldwin 1325c0341432SJohn Baldwin /* 1326c0341432SJohn Baldwin * Generate a new software session. 1327c0341432SJohn Baldwin */ 1328c0341432SJohn Baldwin static int 1329c0341432SJohn Baldwin swcr_newsession(device_t dev, crypto_session_t cses, 1330c0341432SJohn Baldwin const struct crypto_session_params *csp) 1331c0341432SJohn Baldwin { 1332c0341432SJohn Baldwin struct swcr_session *ses; 1333c0341432SJohn Baldwin struct swcr_encdec *swe; 1334c0341432SJohn Baldwin struct swcr_auth *swa; 1335c0341432SJohn Baldwin struct comp_algo *cxf; 1336c0341432SJohn Baldwin int error; 1337c0341432SJohn Baldwin 1338c0341432SJohn Baldwin ses = crypto_get_driver_session(cses); 1339c0341432SJohn Baldwin mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF); 1340c0341432SJohn Baldwin 1341c0341432SJohn Baldwin error = 0; 1342c0341432SJohn Baldwin swe = &ses->swcr_encdec; 1343c0341432SJohn Baldwin swa = &ses->swcr_auth; 1344c0341432SJohn Baldwin switch (csp->csp_mode) { 1345c0341432SJohn Baldwin case CSP_MODE_COMPRESS: 1346c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1347c0341432SJohn Baldwin case CRYPTO_DEFLATE_COMP: 1348c0341432SJohn Baldwin cxf = &comp_algo_deflate; 1349c0341432SJohn Baldwin break; 1350c0341432SJohn Baldwin #ifdef INVARIANTS 1351c0341432SJohn Baldwin default: 1352c0341432SJohn Baldwin panic("bad compression algo"); 1353c0341432SJohn Baldwin #endif 1354c0341432SJohn Baldwin } 1355c0341432SJohn Baldwin ses->swcr_compdec.sw_cxf = cxf; 1356c0341432SJohn Baldwin ses->swcr_process = swcr_compdec; 1357c0341432SJohn Baldwin break; 1358c0341432SJohn Baldwin case CSP_MODE_CIPHER: 1359c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1360c0341432SJohn Baldwin case CRYPTO_NULL_CBC: 1361c0341432SJohn Baldwin ses->swcr_process = swcr_null; 1362c0341432SJohn Baldwin break; 1363c0341432SJohn Baldwin #ifdef INVARIANTS 1364c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1365c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1366c0341432SJohn Baldwin panic("bad cipher algo"); 1367c0341432SJohn Baldwin #endif 1368c0341432SJohn Baldwin default: 13693e947048SJohn Baldwin error = swcr_setup_cipher(ses, csp); 1370c0341432SJohn Baldwin if (error == 0) 1371c0341432SJohn Baldwin ses->swcr_process = swcr_encdec; 1372c0341432SJohn Baldwin } 1373c0341432SJohn Baldwin break; 1374c0341432SJohn Baldwin case CSP_MODE_DIGEST: 1375c0341432SJohn Baldwin error = swcr_setup_auth(ses, csp); 1376c0341432SJohn Baldwin break; 1377c0341432SJohn Baldwin case CSP_MODE_AEAD: 1378c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1379c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1380c0341432SJohn Baldwin error = swcr_setup_gcm(ses, csp); 1381c0341432SJohn Baldwin if (error == 0) 1382c0341432SJohn Baldwin ses->swcr_process = swcr_gcm; 1383c0341432SJohn Baldwin break; 1384c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1385c0341432SJohn Baldwin error = swcr_setup_ccm(ses, csp); 1386c0341432SJohn Baldwin if (error == 0) 1387c0341432SJohn Baldwin ses->swcr_process = swcr_ccm; 1388c0341432SJohn Baldwin break; 1389c0341432SJohn Baldwin #ifdef INVARIANTS 1390c0341432SJohn Baldwin default: 1391c0341432SJohn Baldwin panic("bad aead algo"); 1392c0341432SJohn Baldwin #endif 1393c0341432SJohn Baldwin } 1394c0341432SJohn Baldwin break; 1395c0341432SJohn Baldwin case CSP_MODE_ETA: 1396c0341432SJohn Baldwin #ifdef INVARIANTS 1397c0341432SJohn Baldwin switch (csp->csp_cipher_alg) { 1398c0341432SJohn Baldwin case CRYPTO_AES_NIST_GCM_16: 1399c0341432SJohn Baldwin case CRYPTO_AES_CCM_16: 1400c0341432SJohn Baldwin panic("bad eta cipher algo"); 1401c0341432SJohn Baldwin } 1402c0341432SJohn Baldwin switch (csp->csp_auth_alg) { 1403c0341432SJohn Baldwin case CRYPTO_AES_NIST_GMAC: 1404c0341432SJohn Baldwin case CRYPTO_AES_CCM_CBC_MAC: 1405c0341432SJohn Baldwin panic("bad eta auth algo"); 1406c0341432SJohn Baldwin } 1407c0341432SJohn Baldwin #endif 1408c0341432SJohn Baldwin 1409c0341432SJohn Baldwin error = swcr_setup_auth(ses, csp); 1410c0341432SJohn Baldwin if (error) 1411c0341432SJohn Baldwin break; 1412c0341432SJohn Baldwin if (csp->csp_cipher_alg == CRYPTO_NULL_CBC) { 1413c0341432SJohn Baldwin /* Effectively degrade to digest mode. */ 1414c0341432SJohn Baldwin ses->swcr_process = swcr_authcompute; 1415c0341432SJohn Baldwin break; 1416c0341432SJohn Baldwin } 1417c0341432SJohn Baldwin 14183e947048SJohn Baldwin error = swcr_setup_cipher(ses, csp); 1419c0341432SJohn Baldwin if (error == 0) 1420c0341432SJohn Baldwin ses->swcr_process = swcr_eta; 1421c0341432SJohn Baldwin break; 1422c0341432SJohn Baldwin default: 1423c0341432SJohn Baldwin error = EINVAL; 1424c0341432SJohn Baldwin } 1425c0341432SJohn Baldwin 1426c0341432SJohn Baldwin if (error) 1427c0341432SJohn Baldwin swcr_freesession(dev, cses); 1428c0341432SJohn Baldwin return (error); 1429c0341432SJohn Baldwin } 1430c0341432SJohn Baldwin 1431c0341432SJohn Baldwin static void 1432c0341432SJohn Baldwin swcr_freesession(device_t dev, crypto_session_t cses) 1433c0341432SJohn Baldwin { 1434c0341432SJohn Baldwin struct swcr_session *ses; 1435c0341432SJohn Baldwin 1436c0341432SJohn Baldwin ses = crypto_get_driver_session(cses); 1437c0341432SJohn Baldwin 1438c0341432SJohn Baldwin mtx_destroy(&ses->swcr_lock); 1439c0341432SJohn Baldwin 14403e947048SJohn Baldwin zfree(ses->swcr_encdec.sw_kschedule, M_CRYPTO_DATA); 14414a711b8dSJohn Baldwin zfree(ses->swcr_auth.sw_ictx, M_CRYPTO_DATA); 14424a711b8dSJohn Baldwin zfree(ses->swcr_auth.sw_octx, M_CRYPTO_DATA); 1443091d81d1SSam Leffler } 1444091d81d1SSam Leffler 1445091d81d1SSam Leffler /* 1446091d81d1SSam Leffler * Process a software request. 1447091d81d1SSam Leffler */ 1448091d81d1SSam Leffler static int 14496810ad6fSSam Leffler swcr_process(device_t dev, struct cryptop *crp, int hint) 1450091d81d1SSam Leffler { 1451c0341432SJohn Baldwin struct swcr_session *ses; 1452091d81d1SSam Leffler 14531b0909d5SConrad Meyer ses = crypto_get_driver_session(crp->crp_session); 1454a7fcb1afSSean Eric Fagan mtx_lock(&ses->swcr_lock); 1455091d81d1SSam Leffler 1456c0341432SJohn Baldwin crp->crp_etype = ses->swcr_process(ses, crp); 1457091d81d1SSam Leffler 1458a7fcb1afSSean Eric Fagan mtx_unlock(&ses->swcr_lock); 1459091d81d1SSam Leffler crypto_done(crp); 1460c0341432SJohn Baldwin return (0); 1461091d81d1SSam Leffler } 1462091d81d1SSam Leffler 1463091d81d1SSam Leffler static void 14643f147ab2SWarner Losh swcr_identify(driver_t *drv, device_t parent) 1465091d81d1SSam Leffler { 14666810ad6fSSam Leffler /* NB: order 10 is so we get attached after h/w devices */ 14676810ad6fSSam Leffler if (device_find_child(parent, "cryptosoft", -1) == NULL && 146886c585d9SMarius Strobl BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0) 14696810ad6fSSam Leffler panic("cryptosoft: could not attach"); 14706810ad6fSSam Leffler } 1471f6c4bc3bSPawel Jakub Dawidek 14726810ad6fSSam Leffler static int 14736810ad6fSSam Leffler swcr_probe(device_t dev) 14746810ad6fSSam Leffler { 14756810ad6fSSam Leffler device_set_desc(dev, "software crypto"); 147686c585d9SMarius Strobl return (BUS_PROBE_NOWILDCARD); 14776810ad6fSSam Leffler } 1478f6c4bc3bSPawel Jakub Dawidek 14796810ad6fSSam Leffler static int 14806810ad6fSSam Leffler swcr_attach(device_t dev) 14816810ad6fSSam Leffler { 14826810ad6fSSam Leffler 14839ebbebe4SConrad Meyer swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session), 14846810ad6fSSam Leffler CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC); 14856810ad6fSSam Leffler if (swcr_id < 0) { 14866810ad6fSSam Leffler device_printf(dev, "cannot initialize!"); 1487c0341432SJohn Baldwin return (ENXIO); 14886810ad6fSSam Leffler } 14896810ad6fSSam Leffler 1490c0341432SJohn Baldwin return (0); 1491091d81d1SSam Leffler } 14924b465da2SPawel Jakub Dawidek 14933f147ab2SWarner Losh static int 14946810ad6fSSam Leffler swcr_detach(device_t dev) 14954b465da2SPawel Jakub Dawidek { 14966810ad6fSSam Leffler crypto_unregister_all(swcr_id); 14973f147ab2SWarner Losh return 0; 14984b465da2SPawel Jakub Dawidek } 14996810ad6fSSam Leffler 15006810ad6fSSam Leffler static device_method_t swcr_methods[] = { 15016810ad6fSSam Leffler DEVMETHOD(device_identify, swcr_identify), 15026810ad6fSSam Leffler DEVMETHOD(device_probe, swcr_probe), 15036810ad6fSSam Leffler DEVMETHOD(device_attach, swcr_attach), 15046810ad6fSSam Leffler DEVMETHOD(device_detach, swcr_detach), 15056810ad6fSSam Leffler 1506c0341432SJohn Baldwin DEVMETHOD(cryptodev_probesession, swcr_probesession), 15076810ad6fSSam Leffler DEVMETHOD(cryptodev_newsession, swcr_newsession), 15086810ad6fSSam Leffler DEVMETHOD(cryptodev_freesession,swcr_freesession), 15096810ad6fSSam Leffler DEVMETHOD(cryptodev_process, swcr_process), 15106810ad6fSSam Leffler 15116810ad6fSSam Leffler {0, 0}, 15126810ad6fSSam Leffler }; 15136810ad6fSSam Leffler 15146810ad6fSSam Leffler static driver_t swcr_driver = { 15156810ad6fSSam Leffler "cryptosoft", 15166810ad6fSSam Leffler swcr_methods, 15176810ad6fSSam Leffler 0, /* NB: no softc */ 15186810ad6fSSam Leffler }; 15196810ad6fSSam Leffler static devclass_t swcr_devclass; 15206810ad6fSSam Leffler 15216810ad6fSSam Leffler /* 15226810ad6fSSam Leffler * NB: We explicitly reference the crypto module so we 15236810ad6fSSam Leffler * get the necessary ordering when built as a loadable 15246810ad6fSSam Leffler * module. This is required because we bundle the crypto 15256810ad6fSSam Leffler * module code together with the cryptosoft driver (otherwise 15266810ad6fSSam Leffler * normal module dependencies would handle things). 15276810ad6fSSam Leffler */ 15286810ad6fSSam Leffler extern int crypto_modevent(struct module *, int, void *); 15296810ad6fSSam Leffler /* XXX where to attach */ 15306810ad6fSSam Leffler DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0); 15316810ad6fSSam Leffler MODULE_VERSION(cryptosoft, 1); 15326810ad6fSSam Leffler MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1); 1533