crypto: Refactor software support for AEAD ciphers.
Extend struct enc_xform to add new members to handle auth operations for AEAD ciphers. In particular, AEAD operations in cryptosoft no longer use a struct auth_hash. Instead, the setkey and reinit methods of struct enc_xform are responsible for initializing both the cipher and auth state. Reviewed by: markj Sponsored by: The FreeBSD Foundation Differential Revision: https://reviews.freebsd.org/D33196
This commit is contained in:
parent
356c922f74
commit
ab91fb6c21
@ -645,12 +645,8 @@ cse_create(struct fcrypt *fcr, struct session2_op *sop)
|
||||
cse->hashsize = sop->maclen;
|
||||
else if (thash != NULL)
|
||||
cse->hashsize = thash->hashsize;
|
||||
else if (csp.csp_cipher_alg == CRYPTO_AES_NIST_GCM_16)
|
||||
cse->hashsize = AES_GMAC_HASH_LEN;
|
||||
else if (csp.csp_cipher_alg == CRYPTO_AES_CCM_16)
|
||||
cse->hashsize = AES_CBC_MAC_HASH_LEN;
|
||||
else if (csp.csp_cipher_alg == CRYPTO_CHACHA20_POLY1305)
|
||||
cse->hashsize = POLY1305_HASH_LEN;
|
||||
else if (csp.csp_mode == CSP_MODE_AEAD)
|
||||
cse->hashsize = txform->macsize;
|
||||
cse->ivsize = csp.csp_ivlen;
|
||||
|
||||
mtx_lock(&fcr->lock);
|
||||
|
@ -466,26 +466,18 @@ swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
|
||||
struct crypto_buffer_cursor cc_in, cc_out;
|
||||
const u_char *inblk;
|
||||
u_char *outblk;
|
||||
union authctx ctx;
|
||||
struct swcr_auth *swa;
|
||||
struct swcr_encdec *swe;
|
||||
const struct auth_hash *axf;
|
||||
const struct enc_xform *exf;
|
||||
uint32_t *blkp;
|
||||
size_t len;
|
||||
int blksz, error, ivlen, r, resid;
|
||||
|
||||
swa = &ses->swcr_auth;
|
||||
axf = swa->sw_axf;
|
||||
|
||||
bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
|
||||
blksz = GMAC_BLOCK_LEN;
|
||||
KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
|
||||
__func__));
|
||||
|
||||
swe = &ses->swcr_encdec;
|
||||
exf = swe->sw_exf;
|
||||
KASSERT(axf->blocksize == exf->native_blocksize,
|
||||
blksz = GMAC_BLOCK_LEN;
|
||||
KASSERT(blksz == exf->native_blocksize,
|
||||
("%s: blocksize mismatch", __func__));
|
||||
|
||||
if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
|
||||
@ -493,19 +485,22 @@ swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
|
||||
|
||||
ivlen = AES_GCM_IV_LEN;
|
||||
|
||||
/* Supply MAC with IV */
|
||||
axf->Reinit(&ctx, crp->crp_iv, ivlen);
|
||||
/* Supply cipher with nonce. */
|
||||
if (crp->crp_cipher_key != NULL)
|
||||
exf->setkey(swe->sw_kschedule, crp->crp_cipher_key,
|
||||
crypto_get_params(crp->crp_session)->csp_cipher_klen);
|
||||
exf->reinit(swe->sw_kschedule, crp->crp_iv, ivlen);
|
||||
|
||||
/* Supply MAC with AAD */
|
||||
if (crp->crp_aad != NULL) {
|
||||
len = rounddown(crp->crp_aad_length, blksz);
|
||||
if (len != 0)
|
||||
axf->Update(&ctx, crp->crp_aad, len);
|
||||
exf->update(swe->sw_kschedule, crp->crp_aad, len);
|
||||
if (crp->crp_aad_length != len) {
|
||||
memset(blk, 0, blksz);
|
||||
memcpy(blk, (char *)crp->crp_aad + len,
|
||||
crp->crp_aad_length - len);
|
||||
axf->Update(&ctx, blk, blksz);
|
||||
exf->update(swe->sw_kschedule, blk, blksz);
|
||||
}
|
||||
} else {
|
||||
crypto_cursor_init(&cc_in, &crp->crp_buf);
|
||||
@ -521,20 +516,15 @@ swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
|
||||
crypto_cursor_copydata(&cc_in, len, blk);
|
||||
inblk = blk;
|
||||
}
|
||||
axf->Update(&ctx, inblk, len);
|
||||
exf->update(swe->sw_kschedule, inblk, len);
|
||||
}
|
||||
if (resid > 0) {
|
||||
memset(blk, 0, blksz);
|
||||
crypto_cursor_copydata(&cc_in, resid, blk);
|
||||
axf->Update(&ctx, blk, blksz);
|
||||
exf->update(swe->sw_kschedule, blk, blksz);
|
||||
}
|
||||
}
|
||||
|
||||
if (crp->crp_cipher_key != NULL)
|
||||
exf->setkey(swe->sw_kschedule, crp->crp_cipher_key,
|
||||
crypto_get_params(crp->crp_session)->csp_cipher_klen);
|
||||
exf->reinit(swe->sw_kschedule, crp->crp_iv, ivlen);
|
||||
|
||||
/* Do encryption with MAC */
|
||||
crypto_cursor_init(&cc_in, &crp->crp_buf);
|
||||
crypto_cursor_advance(&cc_in, crp->crp_payload_start);
|
||||
@ -556,13 +546,13 @@ swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
|
||||
if (len < blksz)
|
||||
outblk = blk;
|
||||
exf->encrypt(swe->sw_kschedule, inblk, outblk);
|
||||
axf->Update(&ctx, outblk, blksz);
|
||||
exf->update(swe->sw_kschedule, outblk, blksz);
|
||||
if (outblk == blk)
|
||||
crypto_cursor_copyback(&cc_out, blksz, blk);
|
||||
else
|
||||
crypto_cursor_advance(&cc_out, blksz);
|
||||
} else {
|
||||
axf->Update(&ctx, inblk, blksz);
|
||||
exf->update(swe->sw_kschedule, inblk, blksz);
|
||||
}
|
||||
}
|
||||
if (resid > 0) {
|
||||
@ -571,7 +561,7 @@ swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
|
||||
exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
|
||||
crypto_cursor_copyback(&cc_out, resid, blk);
|
||||
}
|
||||
axf->Update(&ctx, blk, resid);
|
||||
exf->update(swe->sw_kschedule, blk, resid);
|
||||
}
|
||||
|
||||
/* length block */
|
||||
@ -580,10 +570,10 @@ swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
|
||||
*blkp = htobe32(crp->crp_aad_length * 8);
|
||||
blkp = (uint32_t *)blk + 3;
|
||||
*blkp = htobe32(crp->crp_payload_length * 8);
|
||||
axf->Update(&ctx, blk, blksz);
|
||||
exf->update(swe->sw_kschedule, blk, blksz);
|
||||
|
||||
/* Finalize MAC */
|
||||
axf->Final(tag, &ctx);
|
||||
exf->final(tag, swe->sw_kschedule);
|
||||
|
||||
/* Validate tag */
|
||||
error = 0;
|
||||
@ -756,26 +746,18 @@ swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
|
||||
struct crypto_buffer_cursor cc_in, cc_out;
|
||||
const u_char *inblk;
|
||||
u_char *outblk;
|
||||
union authctx ctx;
|
||||
struct swcr_auth *swa;
|
||||
struct swcr_encdec *swe;
|
||||
const struct auth_hash *axf;
|
||||
const struct enc_xform *exf;
|
||||
size_t len;
|
||||
int blksz, error, ivlen, r, resid;
|
||||
|
||||
csp = crypto_get_params(crp->crp_session);
|
||||
swa = &ses->swcr_auth;
|
||||
axf = swa->sw_axf;
|
||||
|
||||
bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
|
||||
blksz = AES_BLOCK_LEN;
|
||||
KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch",
|
||||
__func__));
|
||||
|
||||
swe = &ses->swcr_encdec;
|
||||
exf = swe->sw_exf;
|
||||
KASSERT(axf->blocksize == exf->native_blocksize,
|
||||
blksz = AES_BLOCK_LEN;
|
||||
KASSERT(blksz == exf->native_blocksize,
|
||||
("%s: blocksize mismatch", __func__));
|
||||
|
||||
if (crp->crp_payload_length > ccm_max_payload_length(csp))
|
||||
@ -786,41 +768,39 @@ swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
|
||||
|
||||
ivlen = csp->csp_ivlen;
|
||||
|
||||
/* Supply MAC with IV */
|
||||
axf->Reinit(&ctx, crp->crp_iv, ivlen);
|
||||
if (crp->crp_cipher_key != NULL)
|
||||
exf->setkey(swe->sw_kschedule, crp->crp_cipher_key,
|
||||
crypto_get_params(crp->crp_session)->csp_cipher_klen);
|
||||
exf->reinit(swe->sw_kschedule, crp->crp_iv, ivlen);
|
||||
|
||||
/* Supply MAC with b0. */
|
||||
_Static_assert(sizeof(blkbuf) >= CCM_CBC_BLOCK_LEN,
|
||||
"blkbuf too small for b0");
|
||||
build_ccm_b0(crp->crp_iv, ivlen, crp->crp_aad_length,
|
||||
crp->crp_payload_length, swa->sw_mlen, blk);
|
||||
axf->Update(&ctx, blk, CCM_CBC_BLOCK_LEN);
|
||||
exf->update(swe->sw_kschedule, blk, CCM_CBC_BLOCK_LEN);
|
||||
|
||||
/* Supply MAC with AAD */
|
||||
if (crp->crp_aad_length != 0) {
|
||||
len = build_ccm_aad_length(crp->crp_aad_length, blk);
|
||||
axf->Update(&ctx, blk, len);
|
||||
exf->update(swe->sw_kschedule, blk, len);
|
||||
if (crp->crp_aad != NULL)
|
||||
axf->Update(&ctx, crp->crp_aad,
|
||||
exf->update(swe->sw_kschedule, crp->crp_aad,
|
||||
crp->crp_aad_length);
|
||||
else
|
||||
crypto_apply(crp, crp->crp_aad_start,
|
||||
crp->crp_aad_length, axf->Update, &ctx);
|
||||
crp->crp_aad_length, exf->update,
|
||||
swe->sw_kschedule);
|
||||
|
||||
/* Pad the AAD (including length field) to a full block. */
|
||||
len = (len + crp->crp_aad_length) % CCM_CBC_BLOCK_LEN;
|
||||
if (len != 0) {
|
||||
len = CCM_CBC_BLOCK_LEN - len;
|
||||
memset(blk, 0, CCM_CBC_BLOCK_LEN);
|
||||
axf->Update(&ctx, blk, len);
|
||||
exf->update(swe->sw_kschedule, blk, len);
|
||||
}
|
||||
}
|
||||
|
||||
if (crp->crp_cipher_key != NULL)
|
||||
exf->setkey(swe->sw_kschedule, crp->crp_cipher_key,
|
||||
crypto_get_params(crp->crp_session)->csp_cipher_klen);
|
||||
exf->reinit(swe->sw_kschedule, crp->crp_iv, ivlen);
|
||||
|
||||
/* Do encryption/decryption with MAC */
|
||||
crypto_cursor_init(&cc_in, &crp->crp_buf);
|
||||
crypto_cursor_advance(&cc_in, crp->crp_payload_start);
|
||||
@ -840,7 +820,7 @@ swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
|
||||
outblk = crypto_cursor_segment(&cc_out, &len);
|
||||
if (len < blksz)
|
||||
outblk = blk;
|
||||
axf->Update(&ctx, inblk, blksz);
|
||||
exf->update(swe->sw_kschedule, inblk, blksz);
|
||||
exf->encrypt(swe->sw_kschedule, inblk, outblk);
|
||||
if (outblk == blk)
|
||||
crypto_cursor_copyback(&cc_out, blksz, blk);
|
||||
@ -856,23 +836,23 @@ swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
|
||||
* verified.
|
||||
*/
|
||||
exf->decrypt(swe->sw_kschedule, inblk, blk);
|
||||
axf->Update(&ctx, blk, blksz);
|
||||
exf->update(swe->sw_kschedule, blk, blksz);
|
||||
}
|
||||
}
|
||||
if (resid > 0) {
|
||||
crypto_cursor_copydata(&cc_in, resid, blk);
|
||||
if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
|
||||
axf->Update(&ctx, blk, resid);
|
||||
exf->update(swe->sw_kschedule, blk, resid);
|
||||
exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
|
||||
crypto_cursor_copyback(&cc_out, resid, blk);
|
||||
} else {
|
||||
exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
|
||||
axf->Update(&ctx, blk, resid);
|
||||
exf->update(swe->sw_kschedule, blk, resid);
|
||||
}
|
||||
}
|
||||
|
||||
/* Finalize MAC */
|
||||
axf->Final(tag, &ctx);
|
||||
exf->final(tag, swe->sw_kschedule);
|
||||
|
||||
/* Validate tag */
|
||||
error = 0;
|
||||
@ -937,17 +917,13 @@ swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp)
|
||||
const u_char *inblk;
|
||||
u_char *outblk;
|
||||
uint64_t *blkp;
|
||||
union authctx ctx;
|
||||
struct swcr_auth *swa;
|
||||
struct swcr_encdec *swe;
|
||||
const struct auth_hash *axf;
|
||||
const struct enc_xform *exf;
|
||||
size_t len;
|
||||
int blksz, error, r, resid;
|
||||
|
||||
swa = &ses->swcr_auth;
|
||||
axf = swa->sw_axf;
|
||||
|
||||
swe = &ses->swcr_encdec;
|
||||
exf = swe->sw_exf;
|
||||
blksz = exf->native_blocksize;
|
||||
@ -958,30 +934,25 @@ swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp)
|
||||
|
||||
csp = crypto_get_params(crp->crp_session);
|
||||
|
||||
/* Generate Poly1305 key. */
|
||||
if (crp->crp_cipher_key != NULL)
|
||||
axf->Setkey(&ctx, crp->crp_cipher_key, csp->csp_cipher_klen);
|
||||
else
|
||||
axf->Setkey(&ctx, csp->csp_cipher_key, csp->csp_cipher_klen);
|
||||
axf->Reinit(&ctx, crp->crp_iv, csp->csp_ivlen);
|
||||
|
||||
/* Supply MAC with AAD */
|
||||
if (crp->crp_aad != NULL)
|
||||
axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
|
||||
else
|
||||
crypto_apply(crp, crp->crp_aad_start,
|
||||
crp->crp_aad_length, axf->Update, &ctx);
|
||||
if (crp->crp_aad_length % 16 != 0) {
|
||||
/* padding1 */
|
||||
memset(blk, 0, 16);
|
||||
axf->Update(&ctx, blk, 16 - crp->crp_aad_length % 16);
|
||||
}
|
||||
|
||||
if (crp->crp_cipher_key != NULL)
|
||||
exf->setkey(swe->sw_kschedule, crp->crp_cipher_key,
|
||||
csp->csp_cipher_klen);
|
||||
exf->reinit(swe->sw_kschedule, crp->crp_iv, csp->csp_ivlen);
|
||||
|
||||
/* Supply MAC with AAD */
|
||||
if (crp->crp_aad != NULL)
|
||||
exf->update(swe->sw_kschedule, crp->crp_aad,
|
||||
crp->crp_aad_length);
|
||||
else
|
||||
crypto_apply(crp, crp->crp_aad_start,
|
||||
crp->crp_aad_length, exf->update, swe->sw_kschedule);
|
||||
if (crp->crp_aad_length % 16 != 0) {
|
||||
/* padding1 */
|
||||
memset(blk, 0, 16);
|
||||
exf->update(swe->sw_kschedule, blk,
|
||||
16 - crp->crp_aad_length % 16);
|
||||
}
|
||||
|
||||
/* Do encryption with MAC */
|
||||
crypto_cursor_init(&cc_in, &crp->crp_buf);
|
||||
crypto_cursor_advance(&cc_in, crp->crp_payload_start);
|
||||
@ -1002,13 +973,13 @@ swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp)
|
||||
if (len < blksz)
|
||||
outblk = blk;
|
||||
exf->encrypt(swe->sw_kschedule, inblk, outblk);
|
||||
axf->Update(&ctx, outblk, blksz);
|
||||
exf->update(swe->sw_kschedule, outblk, blksz);
|
||||
if (outblk == blk)
|
||||
crypto_cursor_copyback(&cc_out, blksz, blk);
|
||||
else
|
||||
crypto_cursor_advance(&cc_out, blksz);
|
||||
} else {
|
||||
axf->Update(&ctx, inblk, blksz);
|
||||
exf->update(swe->sw_kschedule, inblk, blksz);
|
||||
}
|
||||
}
|
||||
if (resid > 0) {
|
||||
@ -1017,11 +988,11 @@ swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp)
|
||||
exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
|
||||
crypto_cursor_copyback(&cc_out, resid, blk);
|
||||
}
|
||||
axf->Update(&ctx, blk, resid);
|
||||
exf->update(swe->sw_kschedule, blk, resid);
|
||||
if (resid % 16 != 0) {
|
||||
/* padding2 */
|
||||
memset(blk, 0, 16);
|
||||
axf->Update(&ctx, blk, 16 - resid % 16);
|
||||
exf->update(swe->sw_kschedule, blk, 16 - resid % 16);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1029,10 +1000,10 @@ swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp)
|
||||
blkp = (uint64_t *)blk;
|
||||
blkp[0] = htole64(crp->crp_aad_length);
|
||||
blkp[1] = htole64(crp->crp_payload_length);
|
||||
axf->Update(&ctx, blk, sizeof(uint64_t) * 2);
|
||||
exf->update(swe->sw_kschedule, blk, sizeof(uint64_t) * 2);
|
||||
|
||||
/* Finalize MAC */
|
||||
axf->Final(tag, &ctx);
|
||||
exf->final(tag, swe->sw_kschedule);
|
||||
|
||||
/* Validate tag */
|
||||
error = 0;
|
||||
@ -1081,7 +1052,6 @@ swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp)
|
||||
out:
|
||||
explicit_bzero(blkbuf, sizeof(blkbuf));
|
||||
explicit_bzero(tag, sizeof(tag));
|
||||
explicit_bzero(&ctx, sizeof(ctx));
|
||||
return (error);
|
||||
}
|
||||
|
||||
@ -1302,109 +1272,22 @@ swcr_setup_auth(struct swcr_session *ses,
|
||||
}
|
||||
|
||||
static int
|
||||
swcr_setup_gcm(struct swcr_session *ses,
|
||||
swcr_setup_aead(struct swcr_session *ses,
|
||||
const struct crypto_session_params *csp)
|
||||
{
|
||||
struct swcr_auth *swa;
|
||||
const struct auth_hash *axf;
|
||||
int error;
|
||||
|
||||
error = swcr_setup_cipher(ses, csp);
|
||||
if (error)
|
||||
return (error);
|
||||
|
||||
/* First, setup the auth side. */
|
||||
swa = &ses->swcr_auth;
|
||||
switch (csp->csp_cipher_klen * 8) {
|
||||
case 128:
|
||||
axf = &auth_hash_nist_gmac_aes_128;
|
||||
break;
|
||||
case 192:
|
||||
axf = &auth_hash_nist_gmac_aes_192;
|
||||
break;
|
||||
case 256:
|
||||
axf = &auth_hash_nist_gmac_aes_256;
|
||||
break;
|
||||
default:
|
||||
return (EINVAL);
|
||||
}
|
||||
swa->sw_axf = axf;
|
||||
if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
|
||||
return (EINVAL);
|
||||
if (csp->csp_auth_mlen == 0)
|
||||
swa->sw_mlen = axf->hashsize;
|
||||
swa->sw_mlen = ses->swcr_encdec.sw_exf->macsize;
|
||||
else
|
||||
swa->sw_mlen = csp->csp_auth_mlen;
|
||||
swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
|
||||
if (swa->sw_ictx == NULL)
|
||||
return (ENOBUFS);
|
||||
axf->Init(swa->sw_ictx);
|
||||
if (csp->csp_cipher_key != NULL)
|
||||
axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
|
||||
csp->csp_cipher_klen);
|
||||
|
||||
/* Second, setup the cipher side. */
|
||||
return (swcr_setup_cipher(ses, csp));
|
||||
}
|
||||
|
||||
static int
|
||||
swcr_setup_ccm(struct swcr_session *ses,
|
||||
const struct crypto_session_params *csp)
|
||||
{
|
||||
struct swcr_auth *swa;
|
||||
const struct auth_hash *axf;
|
||||
|
||||
/* First, setup the auth side. */
|
||||
swa = &ses->swcr_auth;
|
||||
switch (csp->csp_cipher_klen * 8) {
|
||||
case 128:
|
||||
axf = &auth_hash_ccm_cbc_mac_128;
|
||||
break;
|
||||
case 192:
|
||||
axf = &auth_hash_ccm_cbc_mac_192;
|
||||
break;
|
||||
case 256:
|
||||
axf = &auth_hash_ccm_cbc_mac_256;
|
||||
break;
|
||||
default:
|
||||
return (EINVAL);
|
||||
}
|
||||
swa->sw_axf = axf;
|
||||
if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
|
||||
return (EINVAL);
|
||||
if (csp->csp_auth_mlen == 0)
|
||||
swa->sw_mlen = axf->hashsize;
|
||||
else
|
||||
swa->sw_mlen = csp->csp_auth_mlen;
|
||||
swa->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA, M_NOWAIT);
|
||||
if (swa->sw_ictx == NULL)
|
||||
return (ENOBUFS);
|
||||
axf->Init(swa->sw_ictx);
|
||||
if (csp->csp_cipher_key != NULL)
|
||||
axf->Setkey(swa->sw_ictx, csp->csp_cipher_key,
|
||||
csp->csp_cipher_klen);
|
||||
|
||||
/* Second, setup the cipher side. */
|
||||
return (swcr_setup_cipher(ses, csp));
|
||||
}
|
||||
|
||||
static int
|
||||
swcr_setup_chacha20_poly1305(struct swcr_session *ses,
|
||||
const struct crypto_session_params *csp)
|
||||
{
|
||||
struct swcr_auth *swa;
|
||||
const struct auth_hash *axf;
|
||||
|
||||
/* First, setup the auth side. */
|
||||
swa = &ses->swcr_auth;
|
||||
axf = &auth_hash_chacha20_poly1305;
|
||||
swa->sw_axf = axf;
|
||||
if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
|
||||
return (EINVAL);
|
||||
if (csp->csp_auth_mlen == 0)
|
||||
swa->sw_mlen = axf->hashsize;
|
||||
else
|
||||
swa->sw_mlen = csp->csp_auth_mlen;
|
||||
|
||||
/* The auth state is regenerated for each nonce. */
|
||||
|
||||
/* Second, setup the cipher side. */
|
||||
return (swcr_setup_cipher(ses, csp));
|
||||
return (0);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -1600,17 +1483,17 @@ swcr_newsession(device_t dev, crypto_session_t cses,
|
||||
case CSP_MODE_AEAD:
|
||||
switch (csp->csp_cipher_alg) {
|
||||
case CRYPTO_AES_NIST_GCM_16:
|
||||
error = swcr_setup_gcm(ses, csp);
|
||||
error = swcr_setup_aead(ses, csp);
|
||||
if (error == 0)
|
||||
ses->swcr_process = swcr_gcm;
|
||||
break;
|
||||
case CRYPTO_AES_CCM_16:
|
||||
error = swcr_setup_ccm(ses, csp);
|
||||
error = swcr_setup_aead(ses, csp);
|
||||
if (error == 0)
|
||||
ses->swcr_process = swcr_ccm;
|
||||
break;
|
||||
case CRYPTO_CHACHA20_POLY1305:
|
||||
error = swcr_setup_chacha20_poly1305(ses, csp);
|
||||
error = swcr_setup_aead(ses, csp);
|
||||
if (error == 0)
|
||||
ses->swcr_process = swcr_chacha20_poly1305;
|
||||
break;
|
||||
|
@ -50,14 +50,32 @@
|
||||
#include <sys/cdefs.h>
|
||||
__FBSDID("$FreeBSD$");
|
||||
|
||||
#include <opencrypto/cbc_mac.h>
|
||||
#include <opencrypto/gmac.h>
|
||||
#include <opencrypto/xform_enc.h>
|
||||
|
||||
struct aes_gcm_ctx {
|
||||
struct aes_icm_ctx cipher;
|
||||
struct aes_gmac_ctx gmac;
|
||||
};
|
||||
|
||||
struct aes_ccm_ctx {
|
||||
struct aes_icm_ctx cipher;
|
||||
struct aes_cbc_mac_ctx cbc_mac;
|
||||
};
|
||||
|
||||
static int aes_icm_setkey(void *, const uint8_t *, int);
|
||||
static void aes_icm_crypt(void *, const uint8_t *, uint8_t *);
|
||||
static void aes_icm_crypt_last(void *, const uint8_t *, uint8_t *, size_t);
|
||||
static void aes_icm_reinit(void *, const uint8_t *, size_t);
|
||||
static int aes_gcm_setkey(void *, const uint8_t *, int);
|
||||
static void aes_gcm_reinit(void *, const uint8_t *, size_t);
|
||||
static int aes_gcm_update(void *, const void *, u_int);
|
||||
static void aes_gcm_final(uint8_t *, void *);
|
||||
static int aes_ccm_setkey(void *, const uint8_t *, int);
|
||||
static void aes_ccm_reinit(void *, const uint8_t *, size_t);
|
||||
static int aes_ccm_update(void *, const void *, u_int);
|
||||
static void aes_ccm_final(uint8_t *, void *);
|
||||
|
||||
/* Encryption instances */
|
||||
const struct enc_xform enc_xform_aes_icm = {
|
||||
@ -80,34 +98,40 @@ const struct enc_xform enc_xform_aes_icm = {
|
||||
const struct enc_xform enc_xform_aes_nist_gcm = {
|
||||
.type = CRYPTO_AES_NIST_GCM_16,
|
||||
.name = "AES-GCM",
|
||||
.ctxsize = sizeof(struct aes_icm_ctx),
|
||||
.ctxsize = sizeof(struct aes_gcm_ctx),
|
||||
.blocksize = 1,
|
||||
.native_blocksize = AES_BLOCK_LEN,
|
||||
.ivsize = AES_GCM_IV_LEN,
|
||||
.minkey = AES_MIN_KEY,
|
||||
.maxkey = AES_MAX_KEY,
|
||||
.macsize = AES_GMAC_HASH_LEN,
|
||||
.encrypt = aes_icm_crypt,
|
||||
.decrypt = aes_icm_crypt,
|
||||
.setkey = aes_icm_setkey,
|
||||
.setkey = aes_gcm_setkey,
|
||||
.reinit = aes_gcm_reinit,
|
||||
.encrypt_last = aes_icm_crypt_last,
|
||||
.decrypt_last = aes_icm_crypt_last,
|
||||
.update = aes_gcm_update,
|
||||
.final = aes_gcm_final,
|
||||
};
|
||||
|
||||
const struct enc_xform enc_xform_ccm = {
|
||||
.type = CRYPTO_AES_CCM_16,
|
||||
.name = "AES-CCM",
|
||||
.ctxsize = sizeof(struct aes_icm_ctx),
|
||||
.ctxsize = sizeof(struct aes_ccm_ctx),
|
||||
.blocksize = 1,
|
||||
.native_blocksize = AES_BLOCK_LEN,
|
||||
.ivsize = AES_CCM_IV_LEN,
|
||||
.minkey = AES_MIN_KEY, .maxkey = AES_MAX_KEY,
|
||||
.macsize = AES_CBC_MAC_HASH_LEN,
|
||||
.encrypt = aes_icm_crypt,
|
||||
.decrypt = aes_icm_crypt,
|
||||
.setkey = aes_icm_setkey,
|
||||
.setkey = aes_ccm_setkey,
|
||||
.reinit = aes_ccm_reinit,
|
||||
.encrypt_last = aes_icm_crypt_last,
|
||||
.decrypt_last = aes_icm_crypt_last,
|
||||
.update = aes_ccm_update,
|
||||
.final = aes_ccm_final,
|
||||
};
|
||||
|
||||
/*
|
||||
@ -125,34 +149,36 @@ aes_icm_reinit(void *key, const uint8_t *iv, size_t ivlen)
|
||||
}
|
||||
|
||||
static void
|
||||
aes_gcm_reinit(void *key, const uint8_t *iv, size_t ivlen)
|
||||
aes_gcm_reinit(void *vctx, const uint8_t *iv, size_t ivlen)
|
||||
{
|
||||
struct aes_icm_ctx *ctx;
|
||||
struct aes_gcm_ctx *ctx = vctx;
|
||||
|
||||
KASSERT(ivlen == AES_GCM_IV_LEN,
|
||||
("%s: invalid IV length", __func__));
|
||||
aes_icm_reinit(key, iv, ivlen);
|
||||
aes_icm_reinit(&ctx->cipher, iv, ivlen);
|
||||
|
||||
ctx = key;
|
||||
/* GCM starts with 2 as counter 1 is used for final xor of tag. */
|
||||
bzero(&ctx->ac_block[AESICM_BLOCKSIZE - 4], 4);
|
||||
ctx->ac_block[AESICM_BLOCKSIZE - 1] = 2;
|
||||
bzero(&ctx->cipher.ac_block[AESICM_BLOCKSIZE - 4], 4);
|
||||
ctx->cipher.ac_block[AESICM_BLOCKSIZE - 1] = 2;
|
||||
|
||||
AES_GMAC_Reinit(&ctx->gmac, iv, ivlen);
|
||||
}
|
||||
|
||||
static void
|
||||
aes_ccm_reinit(void *key, const uint8_t *iv, size_t ivlen)
|
||||
aes_ccm_reinit(void *vctx, const uint8_t *iv, size_t ivlen)
|
||||
{
|
||||
struct aes_icm_ctx *ctx;
|
||||
struct aes_ccm_ctx *ctx = vctx;
|
||||
|
||||
KASSERT(ivlen >= 7 && ivlen <= 13,
|
||||
("%s: invalid IV length", __func__));
|
||||
ctx = key;
|
||||
|
||||
/* CCM has flags, then the IV, then the counter, which starts at 1 */
|
||||
bzero(ctx->ac_block, sizeof(ctx->ac_block));
|
||||
ctx->ac_block[0] = (15 - ivlen) - 1;
|
||||
bcopy(iv, ctx->ac_block + 1, ivlen);
|
||||
ctx->ac_block[AESICM_BLOCKSIZE - 1] = 1;
|
||||
bzero(ctx->cipher.ac_block, sizeof(ctx->cipher.ac_block));
|
||||
ctx->cipher.ac_block[0] = (15 - ivlen) - 1;
|
||||
bcopy(iv, ctx->cipher.ac_block + 1, ivlen);
|
||||
ctx->cipher.ac_block[AESICM_BLOCKSIZE - 1] = 1;
|
||||
|
||||
AES_CBC_MAC_Reinit(&ctx->cbc_mac, iv, ivlen);
|
||||
}
|
||||
|
||||
static void
|
||||
@ -197,3 +223,63 @@ aes_icm_setkey(void *sched, const uint8_t *key, int len)
|
||||
ctx->ac_nr = rijndaelKeySetupEnc(ctx->ac_ek, key, len * 8);
|
||||
return (0);
|
||||
}
|
||||
|
||||
static int
|
||||
aes_gcm_setkey(void *vctx, const uint8_t *key, int len)
|
||||
{
|
||||
struct aes_gcm_ctx *ctx = vctx;
|
||||
int error;
|
||||
|
||||
error = aes_icm_setkey(&ctx->cipher, key, len);
|
||||
if (error != 0)
|
||||
return (error);
|
||||
|
||||
AES_GMAC_Setkey(&ctx->gmac, key, len);
|
||||
return (0);
|
||||
}
|
||||
|
||||
static int
|
||||
aes_ccm_setkey(void *vctx, const uint8_t *key, int len)
|
||||
{
|
||||
struct aes_ccm_ctx *ctx = vctx;
|
||||
int error;
|
||||
|
||||
error = aes_icm_setkey(&ctx->cipher, key, len);
|
||||
if (error != 0)
|
||||
return (error);
|
||||
|
||||
AES_CBC_MAC_Setkey(&ctx->cbc_mac, key, len);
|
||||
return (0);
|
||||
}
|
||||
|
||||
static int
|
||||
aes_gcm_update(void *vctx, const void *buf, u_int len)
|
||||
{
|
||||
struct aes_gcm_ctx *ctx = vctx;
|
||||
|
||||
return (AES_GMAC_Update(&ctx->gmac, buf, len));
|
||||
}
|
||||
|
||||
static int
|
||||
aes_ccm_update(void *vctx, const void *buf, u_int len)
|
||||
{
|
||||
struct aes_ccm_ctx *ctx = vctx;
|
||||
|
||||
return (AES_CBC_MAC_Update(&ctx->cbc_mac, buf, len));
|
||||
}
|
||||
|
||||
static void
|
||||
aes_gcm_final(uint8_t *tag, void *vctx)
|
||||
{
|
||||
struct aes_gcm_ctx *ctx = vctx;
|
||||
|
||||
AES_GMAC_Final(tag, &ctx->gmac);
|
||||
}
|
||||
|
||||
static void
|
||||
aes_ccm_final(uint8_t *tag, void *vctx)
|
||||
{
|
||||
struct aes_ccm_ctx *ctx = vctx;
|
||||
|
||||
AES_CBC_MAC_Final(tag, &ctx->cbc_mac);
|
||||
}
|
||||
|
@ -84,7 +84,6 @@ extern const struct auth_hash auth_hash_poly1305;
|
||||
extern const struct auth_hash auth_hash_ccm_cbc_mac_128;
|
||||
extern const struct auth_hash auth_hash_ccm_cbc_mac_192;
|
||||
extern const struct auth_hash auth_hash_ccm_cbc_mac_256;
|
||||
extern const struct auth_hash auth_hash_chacha20_poly1305;
|
||||
|
||||
union authctx {
|
||||
SHA1_CTX sha1ctx;
|
||||
|
@ -31,7 +31,8 @@
|
||||
#include <sodium/crypto_onetimeauth_poly1305.h>
|
||||
#include <sodium/crypto_stream_chacha20.h>
|
||||
|
||||
struct chacha20_poly1305_cipher_ctx {
|
||||
struct chacha20_poly1305_ctx {
|
||||
struct crypto_onetimeauth_poly1305_state auth;
|
||||
const void *key;
|
||||
uint32_t ic;
|
||||
bool ietf;
|
||||
@ -41,7 +42,7 @@ struct chacha20_poly1305_cipher_ctx {
|
||||
static int
|
||||
chacha20_poly1305_setkey(void *vctx, const uint8_t *key, int len)
|
||||
{
|
||||
struct chacha20_poly1305_cipher_ctx *ctx = vctx;
|
||||
struct chacha20_poly1305_ctx *ctx = vctx;
|
||||
|
||||
if (len != CHACHA20_POLY1305_KEY)
|
||||
return (EINVAL);
|
||||
@ -53,21 +54,31 @@ chacha20_poly1305_setkey(void *vctx, const uint8_t *key, int len)
|
||||
static void
|
||||
chacha20_poly1305_reinit(void *vctx, const uint8_t *iv, size_t ivlen)
|
||||
{
|
||||
struct chacha20_poly1305_cipher_ctx *ctx = vctx;
|
||||
struct chacha20_poly1305_ctx *ctx = vctx;
|
||||
char block[CHACHA20_NATIVE_BLOCK_LEN];
|
||||
|
||||
KASSERT(ivlen == 8 || ivlen == sizeof(ctx->nonce),
|
||||
("%s: invalid nonce length", __func__));
|
||||
|
||||
/* Block 0 is used for the poly1305 key. */
|
||||
memcpy(ctx->nonce, iv, ivlen);
|
||||
ctx->ietf = (ivlen == CHACHA20_POLY1305_IV_LEN);
|
||||
|
||||
/* Block 0 is used for the poly1305 key. */
|
||||
if (ctx->ietf)
|
||||
crypto_stream_chacha20_ietf(block, sizeof(block), iv, ctx->key);
|
||||
else
|
||||
crypto_stream_chacha20(block, sizeof(block), iv, ctx->key);
|
||||
crypto_onetimeauth_poly1305_init(&ctx->auth, block);
|
||||
explicit_bzero(block, sizeof(block));
|
||||
|
||||
/* Start with block 1 for ciphertext. */
|
||||
ctx->ic = 1;
|
||||
}
|
||||
|
||||
static void
|
||||
chacha20_poly1305_crypt(void *vctx, const uint8_t *in, uint8_t *out)
|
||||
{
|
||||
struct chacha20_poly1305_cipher_ctx *ctx = vctx;
|
||||
struct chacha20_poly1305_ctx *ctx = vctx;
|
||||
int error __diagused;
|
||||
|
||||
if (ctx->ietf)
|
||||
@ -84,7 +95,7 @@ static void
|
||||
chacha20_poly1305_crypt_last(void *vctx, const uint8_t *in, uint8_t *out,
|
||||
size_t len)
|
||||
{
|
||||
struct chacha20_poly1305_cipher_ctx *ctx = vctx;
|
||||
struct chacha20_poly1305_ctx *ctx = vctx;
|
||||
|
||||
int error __diagused;
|
||||
|
||||
@ -97,89 +108,39 @@ chacha20_poly1305_crypt_last(void *vctx, const uint8_t *in, uint8_t *out,
|
||||
KASSERT(error == 0, ("%s failed: %d", __func__, error));
|
||||
}
|
||||
|
||||
static int
|
||||
chacha20_poly1305_update(void *vctx, const void *data, u_int len)
|
||||
{
|
||||
struct chacha20_poly1305_ctx *ctx = vctx;
|
||||
|
||||
crypto_onetimeauth_poly1305_update(&ctx->auth, data, len);
|
||||
return (0);
|
||||
}
|
||||
|
||||
static void
|
||||
chacha20_poly1305_final(uint8_t *digest, void *vctx)
|
||||
{
|
||||
struct chacha20_poly1305_ctx *ctx = vctx;
|
||||
|
||||
crypto_onetimeauth_poly1305_final(&ctx->auth, digest);
|
||||
}
|
||||
|
||||
const struct enc_xform enc_xform_chacha20_poly1305 = {
|
||||
.type = CRYPTO_CHACHA20_POLY1305,
|
||||
.name = "ChaCha20-Poly1305",
|
||||
.ctxsize = sizeof(struct chacha20_poly1305_cipher_ctx),
|
||||
.ctxsize = sizeof(struct chacha20_poly1305_ctx),
|
||||
.blocksize = 1,
|
||||
.native_blocksize = CHACHA20_NATIVE_BLOCK_LEN,
|
||||
.ivsize = CHACHA20_POLY1305_IV_LEN,
|
||||
.minkey = CHACHA20_POLY1305_KEY,
|
||||
.maxkey = CHACHA20_POLY1305_KEY,
|
||||
.macsize = POLY1305_HASH_LEN,
|
||||
.encrypt = chacha20_poly1305_crypt,
|
||||
.decrypt = chacha20_poly1305_crypt,
|
||||
.setkey = chacha20_poly1305_setkey,
|
||||
.reinit = chacha20_poly1305_reinit,
|
||||
.encrypt_last = chacha20_poly1305_crypt_last,
|
||||
.decrypt_last = chacha20_poly1305_crypt_last,
|
||||
};
|
||||
|
||||
struct chacha20_poly1305_auth_ctx {
|
||||
struct crypto_onetimeauth_poly1305_state state;
|
||||
const void *key;
|
||||
};
|
||||
CTASSERT(sizeof(union authctx) >= sizeof(struct chacha20_poly1305_auth_ctx));
|
||||
|
||||
static void
|
||||
chacha20_poly1305_Init(void *vctx)
|
||||
{
|
||||
}
|
||||
|
||||
static void
|
||||
chacha20_poly1305_Setkey(void *vctx, const uint8_t *key, u_int klen)
|
||||
{
|
||||
struct chacha20_poly1305_auth_ctx *ctx = vctx;
|
||||
|
||||
ctx->key = key;
|
||||
}
|
||||
|
||||
static void
|
||||
chacha20_poly1305_Reinit(void *vctx, const uint8_t *nonce, u_int noncelen)
|
||||
{
|
||||
struct chacha20_poly1305_auth_ctx *ctx = vctx;
|
||||
char block[CHACHA20_NATIVE_BLOCK_LEN];
|
||||
|
||||
switch (noncelen) {
|
||||
case 8:
|
||||
crypto_stream_chacha20(block, sizeof(block), nonce, ctx->key);
|
||||
break;
|
||||
case CHACHA20_POLY1305_IV_LEN:
|
||||
crypto_stream_chacha20_ietf(block, sizeof(block), nonce, ctx->key);
|
||||
break;
|
||||
default:
|
||||
__assert_unreachable();
|
||||
}
|
||||
crypto_onetimeauth_poly1305_init(&ctx->state, block);
|
||||
explicit_bzero(block, sizeof(block));
|
||||
}
|
||||
|
||||
static int
|
||||
chacha20_poly1305_Update(void *vctx, const void *data, u_int len)
|
||||
{
|
||||
struct chacha20_poly1305_auth_ctx *ctx = vctx;
|
||||
|
||||
crypto_onetimeauth_poly1305_update(&ctx->state, data, len);
|
||||
return (0);
|
||||
}
|
||||
|
||||
static void
|
||||
chacha20_poly1305_Final(uint8_t *digest, void *vctx)
|
||||
{
|
||||
struct chacha20_poly1305_auth_ctx *ctx = vctx;
|
||||
|
||||
crypto_onetimeauth_poly1305_final(&ctx->state, digest);
|
||||
}
|
||||
|
||||
const struct auth_hash auth_hash_chacha20_poly1305 = {
|
||||
.type = CRYPTO_POLY1305,
|
||||
.name = "ChaCha20-Poly1305",
|
||||
.keysize = POLY1305_KEY_LEN,
|
||||
.hashsize = POLY1305_HASH_LEN,
|
||||
.ctxsize = sizeof(struct chacha20_poly1305_auth_ctx),
|
||||
.blocksize = crypto_onetimeauth_poly1305_BYTES,
|
||||
.Init = chacha20_poly1305_Init,
|
||||
.Setkey = chacha20_poly1305_Setkey,
|
||||
.Reinit = chacha20_poly1305_Reinit,
|
||||
.Update = chacha20_poly1305_Update,
|
||||
.Final = chacha20_poly1305_Final,
|
||||
.update = chacha20_poly1305_update,
|
||||
.final = chacha20_poly1305_final,
|
||||
};
|
||||
|
@ -54,6 +54,7 @@ struct enc_xform {
|
||||
uint16_t native_blocksize; /* Used for stream ciphers. */
|
||||
uint16_t ivsize;
|
||||
uint16_t minkey, maxkey;
|
||||
uint16_t macsize; /* For AEAD ciphers. */
|
||||
|
||||
/*
|
||||
* Encrypt/decrypt a single block. For stream ciphers this
|
||||
@ -70,6 +71,12 @@ struct enc_xform {
|
||||
*/
|
||||
void (*encrypt_last) (void *, const uint8_t *, uint8_t *, size_t len);
|
||||
void (*decrypt_last) (void *, const uint8_t *, uint8_t *, size_t len);
|
||||
|
||||
/*
|
||||
* For AEAD ciphers, update and generate MAC/tag.
|
||||
*/
|
||||
int (*update) (void *, const void *, u_int);
|
||||
void (*final) (uint8_t *, void *);
|
||||
};
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user