Add support for CRYPTO_AES_XTS.
MFC after: 1 week
This commit is contained in:
parent
d295bdee07
commit
ac970319ff
@ -74,7 +74,7 @@ aesni_probe(device_t dev)
|
||||
device_printf(dev, "No AESNI support.\n");
|
||||
return (EINVAL);
|
||||
}
|
||||
device_set_desc_copy(dev, "AES-CBC");
|
||||
device_set_desc_copy(dev, "AES-CBC,AES-XTS");
|
||||
return (0);
|
||||
}
|
||||
|
||||
@ -94,6 +94,7 @@ aesni_attach(device_t dev)
|
||||
|
||||
rw_init(&sc->lock, "aesni_lock");
|
||||
crypto_register(sc->cid, CRYPTO_AES_CBC, 0, 0);
|
||||
crypto_register(sc->cid, CRYPTO_AES_XTS, 0, 0);
|
||||
return (0);
|
||||
}
|
||||
|
||||
@ -140,6 +141,7 @@ aesni_newsession(device_t dev, uint32_t *sidp, struct cryptoini *cri)
|
||||
for (; cri != NULL; cri = cri->cri_next) {
|
||||
switch (cri->cri_alg) {
|
||||
case CRYPTO_AES_CBC:
|
||||
case CRYPTO_AES_XTS:
|
||||
if (encini != NULL)
|
||||
return (EINVAL);
|
||||
encini = cri;
|
||||
@ -172,6 +174,7 @@ aesni_newsession(device_t dev, uint32_t *sidp, struct cryptoini *cri)
|
||||
ses->used = 1;
|
||||
TAILQ_INSERT_TAIL(&sc->sessions, ses, next);
|
||||
rw_wunlock(&sc->lock);
|
||||
ses->algo = encini->cri_alg;
|
||||
|
||||
error = aesni_cipher_setup(ses, encini);
|
||||
if (error != 0) {
|
||||
@ -243,6 +246,7 @@ aesni_process(device_t dev, struct cryptop *crp, int hint __unused)
|
||||
for (crd = crp->crp_desc; crd != NULL; crd = crd->crd_next) {
|
||||
switch (crd->crd_alg) {
|
||||
case CRYPTO_AES_CBC:
|
||||
case CRYPTO_AES_XTS:
|
||||
if (enccrd != NULL) {
|
||||
error = EINVAL;
|
||||
goto out;
|
||||
|
@ -55,7 +55,9 @@
|
||||
struct aesni_session {
|
||||
uint8_t enc_schedule[AES_SCHED_LEN] __aligned(16);
|
||||
uint8_t dec_schedule[AES_SCHED_LEN] __aligned(16);
|
||||
uint8_t xts_schedule[AES_SCHED_LEN] __aligned(16);
|
||||
uint8_t iv[AES_BLOCK_LEN];
|
||||
int algo;
|
||||
int rounds;
|
||||
/* uint8_t *ses_ictx; */
|
||||
/* uint8_t *ses_octx; */
|
||||
|
@ -1,5 +1,6 @@
|
||||
/*-
|
||||
* Copyright (c) 2010 Konstantin Belousov <kib@FreeBSD.org>
|
||||
* Copyright (c) 2010 Pawel Jakub Dawidek <pjd@FreeBSD.org>
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
@ -105,28 +106,138 @@ aesni_decrypt_ecb(int rounds, const void *key_schedule, size_t len,
|
||||
}
|
||||
}
|
||||
|
||||
#define AES_XTS_BLOCKSIZE 16
|
||||
#define AES_XTS_IVSIZE 8
|
||||
#define AES_XTS_ALPHA 0x87 /* GF(2^128) generator polynomial */
|
||||
|
||||
static void
|
||||
aesni_crypt_xts_block(int rounds, const void *key_schedule, uint8_t *tweak,
|
||||
const uint8_t *from, uint8_t *to, int do_encrypt)
|
||||
{
|
||||
uint8_t block[AES_XTS_BLOCKSIZE];
|
||||
u_int i, carry_in, carry_out;
|
||||
|
||||
for (i = 0; i < AES_XTS_BLOCKSIZE; i++)
|
||||
block[i] = from[i] ^ tweak[i];
|
||||
|
||||
if (do_encrypt)
|
||||
aesni_enc(rounds - 1, key_schedule, block, to, NULL);
|
||||
else
|
||||
aesni_dec(rounds - 1, key_schedule, block, to, NULL);
|
||||
|
||||
for (i = 0; i < AES_XTS_BLOCKSIZE; i++)
|
||||
to[i] ^= tweak[i];
|
||||
|
||||
/* Exponentiate tweak. */
|
||||
carry_in = 0;
|
||||
for (i = 0; i < AES_XTS_BLOCKSIZE; i++) {
|
||||
carry_out = tweak[i] & 0x80;
|
||||
tweak[i] = (tweak[i] << 1) | (carry_in ? 1 : 0);
|
||||
carry_in = carry_out;
|
||||
}
|
||||
if (carry_in)
|
||||
tweak[0] ^= AES_XTS_ALPHA;
|
||||
bzero(block, sizeof(block));
|
||||
}
|
||||
|
||||
static void
|
||||
aesni_crypt_xts(int rounds, const void *data_schedule,
|
||||
const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
|
||||
const uint8_t iv[AES_BLOCK_LEN], int do_encrypt)
|
||||
{
|
||||
uint8_t tweak[AES_XTS_BLOCKSIZE];
|
||||
uint64_t blocknum;
|
||||
size_t i;
|
||||
|
||||
/*
|
||||
* Prepare tweak as E_k2(IV). IV is specified as LE representation
|
||||
* of a 64-bit block number which we allow to be passed in directly.
|
||||
*/
|
||||
bcopy(iv, &blocknum, AES_XTS_IVSIZE);
|
||||
for (i = 0; i < AES_XTS_IVSIZE; i++) {
|
||||
tweak[i] = blocknum & 0xff;
|
||||
blocknum >>= 8;
|
||||
}
|
||||
/* Last 64 bits of IV are always zero. */
|
||||
bzero(tweak + AES_XTS_IVSIZE, AES_XTS_IVSIZE);
|
||||
aesni_enc(rounds - 1, tweak_schedule, tweak, tweak, NULL);
|
||||
|
||||
len /= AES_XTS_BLOCKSIZE;
|
||||
for (i = 0; i < len; i++) {
|
||||
aesni_crypt_xts_block(rounds, data_schedule, tweak, from, to,
|
||||
do_encrypt);
|
||||
from += AES_XTS_BLOCKSIZE;
|
||||
to += AES_XTS_BLOCKSIZE;
|
||||
}
|
||||
|
||||
bzero(tweak, sizeof(tweak));
|
||||
}
|
||||
|
||||
static void
|
||||
aesni_encrypt_xts(int rounds, const void *data_schedule,
|
||||
const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
|
||||
const uint8_t iv[AES_BLOCK_LEN])
|
||||
{
|
||||
|
||||
aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
|
||||
iv, 1);
|
||||
}
|
||||
|
||||
static void
|
||||
aesni_decrypt_xts(int rounds, const void *data_schedule,
|
||||
const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
|
||||
const uint8_t iv[AES_BLOCK_LEN])
|
||||
{
|
||||
|
||||
aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
|
||||
iv, 0);
|
||||
}
|
||||
|
||||
static int
|
||||
aesni_cipher_setup_common(struct aesni_session *ses, const uint8_t *key,
|
||||
int keylen)
|
||||
{
|
||||
|
||||
switch (keylen) {
|
||||
case 128:
|
||||
ses->rounds = AES128_ROUNDS;
|
||||
switch (ses->algo) {
|
||||
case CRYPTO_AES_CBC:
|
||||
switch (keylen) {
|
||||
case 128:
|
||||
ses->rounds = AES128_ROUNDS;
|
||||
break;
|
||||
case 192:
|
||||
ses->rounds = AES192_ROUNDS;
|
||||
break;
|
||||
case 256:
|
||||
ses->rounds = AES256_ROUNDS;
|
||||
break;
|
||||
default:
|
||||
return (EINVAL);
|
||||
}
|
||||
break;
|
||||
case 192:
|
||||
ses->rounds = AES192_ROUNDS;
|
||||
break;
|
||||
case 256:
|
||||
ses->rounds = AES256_ROUNDS;
|
||||
case CRYPTO_AES_XTS:
|
||||
switch (keylen) {
|
||||
case 256:
|
||||
ses->rounds = AES128_ROUNDS;
|
||||
break;
|
||||
case 512:
|
||||
ses->rounds = AES256_ROUNDS;
|
||||
break;
|
||||
default:
|
||||
return (EINVAL);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
return (EINVAL);
|
||||
}
|
||||
|
||||
|
||||
aesni_set_enckey(key, ses->enc_schedule, ses->rounds);
|
||||
aesni_set_deckey(ses->enc_schedule, ses->dec_schedule, ses->rounds);
|
||||
arc4rand(ses->iv, sizeof(ses->iv), 0);
|
||||
if (ses->algo == CRYPTO_AES_XTS)
|
||||
arc4rand(ses->iv, sizeof(ses->iv), 0);
|
||||
else /* if (ses->algo == CRYPTO_AES_XTS) */ {
|
||||
aesni_set_enckey(key + keylen / 16, ses->xts_schedule,
|
||||
ses->rounds);
|
||||
}
|
||||
|
||||
return (0);
|
||||
}
|
||||
@ -137,14 +248,14 @@ aesni_cipher_setup(struct aesni_session *ses, struct cryptoini *encini)
|
||||
struct thread *td;
|
||||
int error;
|
||||
|
||||
td = curthread;
|
||||
error = fpu_kern_enter(td, &ses->fpu_ctx, FPU_KERN_NORMAL);
|
||||
if (error == 0) {
|
||||
td = curthread;
|
||||
error = fpu_kern_enter(td, &ses->fpu_ctx, FPU_KERN_NORMAL);
|
||||
if (error == 0) {
|
||||
error = aesni_cipher_setup_common(ses, encini->cri_key,
|
||||
encini->cri_klen);
|
||||
fpu_kern_leave(td, &ses->fpu_ctx);
|
||||
}
|
||||
return (error);
|
||||
fpu_kern_leave(td, &ses->fpu_ctx);
|
||||
}
|
||||
return (error);
|
||||
}
|
||||
|
||||
int
|
||||
@ -163,7 +274,7 @@ aesni_cipher_process(struct aesni_session *ses, struct cryptodesc *enccrd,
|
||||
error = fpu_kern_enter(td, &ses->fpu_ctx, FPU_KERN_NORMAL);
|
||||
if (error != 0)
|
||||
goto out;
|
||||
|
||||
|
||||
if ((enccrd->crd_flags & CRD_F_KEY_EXPLICIT) != 0) {
|
||||
error = aesni_cipher_setup_common(ses, enccrd->crd_key,
|
||||
enccrd->crd_klen);
|
||||
@ -174,21 +285,31 @@ aesni_cipher_process(struct aesni_session *ses, struct cryptodesc *enccrd,
|
||||
if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0) {
|
||||
if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
|
||||
bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
|
||||
|
||||
if ((enccrd->crd_flags & CRD_F_IV_PRESENT) == 0)
|
||||
crypto_copyback(crp->crp_flags, crp->crp_buf,
|
||||
enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
|
||||
|
||||
aesni_encrypt_cbc(ses->rounds, ses->enc_schedule,
|
||||
enccrd->crd_len, buf, buf, ses->iv);
|
||||
if (ses->algo == CRYPTO_AES_CBC) {
|
||||
aesni_encrypt_cbc(ses->rounds, ses->enc_schedule,
|
||||
enccrd->crd_len, buf, buf, ses->iv);
|
||||
} else /* if (ses->algo == CRYPTO_AES_XTS) */ {
|
||||
aesni_encrypt_xts(ses->rounds, ses->enc_schedule,
|
||||
ses->xts_schedule, enccrd->crd_len, buf, buf,
|
||||
ses->iv);
|
||||
}
|
||||
} else {
|
||||
if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
|
||||
bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
|
||||
else
|
||||
crypto_copydata(crp->crp_flags, crp->crp_buf,
|
||||
enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
|
||||
aesni_decrypt_cbc(ses->rounds, ses->dec_schedule,
|
||||
enccrd->crd_len, buf, ses->iv);
|
||||
if (ses->algo == CRYPTO_AES_CBC) {
|
||||
aesni_decrypt_cbc(ses->rounds, ses->dec_schedule,
|
||||
enccrd->crd_len, buf, ses->iv);
|
||||
} else /* if (ses->algo == CRYPTO_AES_XTS) */ {
|
||||
aesni_decrypt_xts(ses->rounds, ses->enc_schedule,
|
||||
ses->xts_schedule, enccrd->crd_len, buf, buf,
|
||||
ses->iv);
|
||||
}
|
||||
}
|
||||
fpu_kern_leave(td, &ses->fpu_ctx);
|
||||
if (allocated)
|
||||
|
Loading…
Reference in New Issue
Block a user