cryptosoft: Use crypto_cursor_segment().

Reviewed by:	markj
Sponsored by:	Netflix
Differential Revision:	https://reviews.freebsd.org/D30446
This commit is contained in:
John Baldwin 2021-05-25 16:59:19 -07:00
parent beb817edfe
commit 86be314d09

View File

@ -105,7 +105,8 @@ swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
const struct crypto_session_params *csp; const struct crypto_session_params *csp;
struct swcr_encdec *sw; struct swcr_encdec *sw;
struct enc_xform *exf; struct enc_xform *exf;
int i, blks, inlen, ivlen, outlen, resid; size_t inlen, outlen;
int i, blks, ivlen, resid;
struct crypto_buffer_cursor cc_in, cc_out; struct crypto_buffer_cursor cc_in, cc_out;
const unsigned char *inblk; const unsigned char *inblk;
unsigned char *outblk; unsigned char *outblk;
@ -153,15 +154,13 @@ swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
crypto_cursor_init(&cc_in, &crp->crp_buf); crypto_cursor_init(&cc_in, &crp->crp_buf);
crypto_cursor_advance(&cc_in, crp->crp_payload_start); crypto_cursor_advance(&cc_in, crp->crp_payload_start);
inlen = crypto_cursor_seglen(&cc_in); inblk = crypto_cursor_segment(&cc_in, &inlen);
inblk = crypto_cursor_segbase(&cc_in);
if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) { if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
crypto_cursor_init(&cc_out, &crp->crp_obuf); crypto_cursor_init(&cc_out, &crp->crp_obuf);
crypto_cursor_advance(&cc_out, crp->crp_payload_output_start); crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
} else } else
cc_out = cc_in; cc_out = cc_in;
outlen = crypto_cursor_seglen(&cc_out); outblk = crypto_cursor_segment(&cc_out, &outlen);
outblk = crypto_cursor_segbase(&cc_out);
resid = crp->crp_payload_length; resid = crp->crp_payload_length;
encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op); encrypting = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
@ -225,8 +224,7 @@ swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
} }
if (inlen < blks) { if (inlen < blks) {
inlen = crypto_cursor_seglen(&cc_in); inblk = crypto_cursor_segment(&cc_in, &inlen);
inblk = crypto_cursor_segbase(&cc_in);
} else { } else {
crypto_cursor_advance(&cc_in, blks); crypto_cursor_advance(&cc_in, blks);
inlen -= blks; inlen -= blks;
@ -235,8 +233,7 @@ swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
if (outlen < blks) { if (outlen < blks) {
crypto_cursor_copyback(&cc_out, blks, blk); crypto_cursor_copyback(&cc_out, blks, blk);
outlen = crypto_cursor_seglen(&cc_out); outblk = crypto_cursor_segment(&cc_out, &outlen);
outblk = crypto_cursor_segbase(&cc_out);
} else { } else {
crypto_cursor_advance(&cc_out, blks); crypto_cursor_advance(&cc_out, blks);
outlen -= blks; outlen -= blks;
@ -256,17 +253,14 @@ swcr_encdec(struct swcr_session *ses, struct cryptop *crp)
__func__, exf->name)); __func__, exf->name));
KASSERT(resid < blks, ("%s: partial block too big", __func__)); KASSERT(resid < blks, ("%s: partial block too big", __func__));
inlen = crypto_cursor_seglen(&cc_in); inblk = crypto_cursor_segment(&cc_in, &inlen);
outlen = crypto_cursor_seglen(&cc_out); outblk = crypto_cursor_segment(&cc_out, &outlen);
if (inlen < resid) { if (inlen < resid) {
crypto_cursor_copydata(&cc_in, resid, blk); crypto_cursor_copydata(&cc_in, resid, blk);
inblk = blk; inblk = blk;
} else }
inblk = crypto_cursor_segbase(&cc_in);
if (outlen < resid) if (outlen < resid)
outblk = blk; outblk = blk;
else
outblk = crypto_cursor_segbase(&cc_out);
if (encrypting) if (encrypting)
exf->encrypt_last(sw->sw_kschedule, inblk, outblk, exf->encrypt_last(sw->sw_kschedule, inblk, outblk,
resid); resid);
@ -397,7 +391,8 @@ swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
struct swcr_auth *swa; struct swcr_auth *swa;
struct auth_hash *axf; struct auth_hash *axf;
uint32_t *blkp; uint32_t *blkp;
int blksz, error, ivlen, len, resid; size_t len;
int blksz, error, ivlen, resid;
swa = &ses->swcr_auth; swa = &ses->swcr_auth;
axf = swa->sw_axf; axf = swa->sw_axf;
@ -415,9 +410,8 @@ swcr_gmac(struct swcr_session *ses, struct cryptop *crp)
crypto_cursor_init(&cc, &crp->crp_buf); crypto_cursor_init(&cc, &crp->crp_buf);
crypto_cursor_advance(&cc, crp->crp_payload_start); crypto_cursor_advance(&cc, crp->crp_payload_start);
for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) { for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) {
len = crypto_cursor_seglen(&cc); inblk = crypto_cursor_segment(&cc, &len);
if (len >= blksz) { if (len >= blksz) {
inblk = crypto_cursor_segbase(&cc);
len = rounddown(MIN(len, resid), blksz); len = rounddown(MIN(len, resid), blksz);
crypto_cursor_advance(&cc, len); crypto_cursor_advance(&cc, len);
} else { } else {
@ -477,7 +471,8 @@ swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
struct auth_hash *axf; struct auth_hash *axf;
struct enc_xform *exf; struct enc_xform *exf;
uint32_t *blkp; uint32_t *blkp;
int blksz, error, ivlen, len, r, resid; size_t len;
int blksz, error, ivlen, r, resid;
swa = &ses->swcr_auth; swa = &ses->swcr_auth;
axf = swa->sw_axf; axf = swa->sw_axf;
@ -518,9 +513,8 @@ swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
crypto_cursor_advance(&cc_in, crp->crp_aad_start); crypto_cursor_advance(&cc_in, crp->crp_aad_start);
for (resid = crp->crp_aad_length; resid >= blksz; for (resid = crp->crp_aad_length; resid >= blksz;
resid -= len) { resid -= len) {
len = crypto_cursor_seglen(&cc_in); inblk = crypto_cursor_segment(&cc_in, &len);
if (len >= blksz) { if (len >= blksz) {
inblk = crypto_cursor_segbase(&cc_in);
len = rounddown(MIN(len, resid), blksz); len = rounddown(MIN(len, resid), blksz);
crypto_cursor_advance(&cc_in, len); crypto_cursor_advance(&cc_in, len);
} else { } else {
@ -551,18 +545,17 @@ swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
} else } else
cc_out = cc_in; cc_out = cc_in;
for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) { for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
if (crypto_cursor_seglen(&cc_in) < blksz) { inblk = crypto_cursor_segment(&cc_in, &len);
if (len < blksz) {
crypto_cursor_copydata(&cc_in, blksz, blk); crypto_cursor_copydata(&cc_in, blksz, blk);
inblk = blk; inblk = blk;
} else { } else {
inblk = crypto_cursor_segbase(&cc_in);
crypto_cursor_advance(&cc_in, blksz); crypto_cursor_advance(&cc_in, blksz);
} }
if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
if (crypto_cursor_seglen(&cc_out) < blksz) outblk = crypto_cursor_segment(&cc_out, &len);
if (len < blksz)
outblk = blk; outblk = blk;
else
outblk = crypto_cursor_segbase(&cc_out);
exf->encrypt(swe->sw_kschedule, inblk, outblk); exf->encrypt(swe->sw_kschedule, inblk, outblk);
axf->Update(&ctx, outblk, blksz); axf->Update(&ctx, outblk, blksz);
if (outblk == blk) if (outblk == blk)
@ -612,17 +605,15 @@ swcr_gcm(struct swcr_session *ses, struct cryptop *crp)
crypto_cursor_advance(&cc_in, crp->crp_payload_start); crypto_cursor_advance(&cc_in, crp->crp_payload_start);
for (resid = crp->crp_payload_length; resid > blksz; for (resid = crp->crp_payload_length; resid > blksz;
resid -= blksz) { resid -= blksz) {
if (crypto_cursor_seglen(&cc_in) < blksz) { inblk = crypto_cursor_segment(&cc_in, &len);
if (len < blksz) {
crypto_cursor_copydata(&cc_in, blksz, blk); crypto_cursor_copydata(&cc_in, blksz, blk);
inblk = blk; inblk = blk;
} else { } else
inblk = crypto_cursor_segbase(&cc_in);
crypto_cursor_advance(&cc_in, blksz); crypto_cursor_advance(&cc_in, blksz);
} outblk = crypto_cursor_segment(&cc_out, &len);
if (crypto_cursor_seglen(&cc_out) < blksz) if (len < blksz)
outblk = blk; outblk = blk;
else
outblk = crypto_cursor_segbase(&cc_out);
exf->decrypt(swe->sw_kschedule, inblk, outblk); exf->decrypt(swe->sw_kschedule, inblk, outblk);
if (outblk == blk) if (outblk == blk)
crypto_cursor_copyback(&cc_out, blksz, blk); crypto_cursor_copyback(&cc_out, blksz, blk);
@ -717,6 +708,7 @@ swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
struct swcr_encdec *swe; struct swcr_encdec *swe;
struct auth_hash *axf; struct auth_hash *axf;
struct enc_xform *exf; struct enc_xform *exf;
size_t len;
int blksz, error, ivlen, r, resid; int blksz, error, ivlen, r, resid;
swa = &ses->swcr_auth; swa = &ses->swcr_auth;
@ -772,18 +764,16 @@ swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
} else } else
cc_out = cc_in; cc_out = cc_in;
for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) { for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
if (crypto_cursor_seglen(&cc_in) < blksz) { inblk = crypto_cursor_segment(&cc_in, &len);
if (len < blksz) {
crypto_cursor_copydata(&cc_in, blksz, blk); crypto_cursor_copydata(&cc_in, blksz, blk);
inblk = blk; inblk = blk;
} else { } else
inblk = crypto_cursor_segbase(&cc_in);
crypto_cursor_advance(&cc_in, blksz); crypto_cursor_advance(&cc_in, blksz);
}
if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
if (crypto_cursor_seglen(&cc_out) < blksz) outblk = crypto_cursor_segment(&cc_out, &len);
if (len < blksz)
outblk = blk; outblk = blk;
else
outblk = crypto_cursor_segbase(&cc_out);
axf->Update(&ctx, inblk, blksz); axf->Update(&ctx, inblk, blksz);
exf->encrypt(swe->sw_kschedule, inblk, outblk); exf->encrypt(swe->sw_kschedule, inblk, outblk);
if (outblk == blk) if (outblk == blk)
@ -839,17 +829,15 @@ swcr_ccm(struct swcr_session *ses, struct cryptop *crp)
crypto_cursor_advance(&cc_in, crp->crp_payload_start); crypto_cursor_advance(&cc_in, crp->crp_payload_start);
for (resid = crp->crp_payload_length; resid > blksz; for (resid = crp->crp_payload_length; resid > blksz;
resid -= blksz) { resid -= blksz) {
if (crypto_cursor_seglen(&cc_in) < blksz) { inblk = crypto_cursor_segment(&cc_in, &len);
if (len < blksz) {
crypto_cursor_copydata(&cc_in, blksz, blk); crypto_cursor_copydata(&cc_in, blksz, blk);
inblk = blk; inblk = blk;
} else { } else
inblk = crypto_cursor_segbase(&cc_in);
crypto_cursor_advance(&cc_in, blksz); crypto_cursor_advance(&cc_in, blksz);
} outblk = crypto_cursor_segment(&cc_out, &len);
if (crypto_cursor_seglen(&cc_out) < blksz) if (len < blksz)
outblk = blk; outblk = blk;
else
outblk = crypto_cursor_segbase(&cc_out);
exf->decrypt(swe->sw_kschedule, inblk, outblk); exf->decrypt(swe->sw_kschedule, inblk, outblk);
if (outblk == blk) if (outblk == blk)
crypto_cursor_copyback(&cc_out, blksz, blk); crypto_cursor_copyback(&cc_out, blksz, blk);
@ -889,6 +877,7 @@ swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp)
struct swcr_encdec *swe; struct swcr_encdec *swe;
struct auth_hash *axf; struct auth_hash *axf;
struct enc_xform *exf; struct enc_xform *exf;
size_t len;
int blksz, error, r, resid; int blksz, error, r, resid;
swa = &ses->swcr_auth; swa = &ses->swcr_auth;
@ -937,18 +926,16 @@ swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp)
} else } else
cc_out = cc_in; cc_out = cc_in;
for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) { for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
if (crypto_cursor_seglen(&cc_in) < blksz) { inblk = crypto_cursor_segment(&cc_in, &len);
if (len < blksz) {
crypto_cursor_copydata(&cc_in, blksz, blk); crypto_cursor_copydata(&cc_in, blksz, blk);
inblk = blk; inblk = blk;
} else { } else
inblk = crypto_cursor_segbase(&cc_in);
crypto_cursor_advance(&cc_in, blksz); crypto_cursor_advance(&cc_in, blksz);
}
if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) { if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
if (crypto_cursor_seglen(&cc_out) < blksz) outblk = crypto_cursor_segment(&cc_out, &len);
if (len < blksz)
outblk = blk; outblk = blk;
else
outblk = crypto_cursor_segbase(&cc_out);
exf->encrypt(swe->sw_kschedule, inblk, outblk); exf->encrypt(swe->sw_kschedule, inblk, outblk);
axf->Update(&ctx, outblk, blksz); axf->Update(&ctx, outblk, blksz);
if (outblk == blk) if (outblk == blk)
@ -1001,17 +988,15 @@ swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp)
crypto_cursor_advance(&cc_in, crp->crp_payload_start); crypto_cursor_advance(&cc_in, crp->crp_payload_start);
for (resid = crp->crp_payload_length; resid > blksz; for (resid = crp->crp_payload_length; resid > blksz;
resid -= blksz) { resid -= blksz) {
if (crypto_cursor_seglen(&cc_in) < blksz) { inblk = crypto_cursor_segment(&cc_in, &len);
if (len < blksz) {
crypto_cursor_copydata(&cc_in, blksz, blk); crypto_cursor_copydata(&cc_in, blksz, blk);
inblk = blk; inblk = blk;
} else { } else
inblk = crypto_cursor_segbase(&cc_in);
crypto_cursor_advance(&cc_in, blksz); crypto_cursor_advance(&cc_in, blksz);
} outblk = crypto_cursor_segment(&cc_out, &len);
if (crypto_cursor_seglen(&cc_out) < blksz) if (len < blksz)
outblk = blk; outblk = blk;
else
outblk = crypto_cursor_segbase(&cc_out);
exf->decrypt(swe->sw_kschedule, inblk, outblk); exf->decrypt(swe->sw_kschedule, inblk, outblk);
if (outblk == blk) if (outblk == blk)
crypto_cursor_copyback(&cc_out, blksz, blk); crypto_cursor_copyback(&cc_out, blksz, blk);