crypto/aesni_gcm: migrate from MB library to ISA-L
Current Cryptodev AES-NI GCM PMD is implemented using Multi Buffer Crypto library.This patch reimplement the device using ISA-L Crypto library: https://github.com/01org/isa-l_crypto. The migration entailed the following additional support for: * GMAC algorithm. * 256-bit cipher key. * Session-less mode. * Out-of place processing * Scatter-gatter support for chained mbufs (only out-of place and destination mbuf must be contiguous) Signed-off-by: Piotr Azarewicz <piotrx.t.azarewicz@intel.com> Acked-by: Declan Doherty <declan.doherty@intel.com>
This commit is contained in:
parent
e4006b30ee
commit
9c2a5775c0
@ -37,6 +37,7 @@ default_path=$PATH
|
||||
# - DPDK_BUILD_TEST_CONFIGS (defconfig1+option1+option2 defconfig2)
|
||||
# - DPDK_DEP_ARCHIVE
|
||||
# - DPDK_DEP_CFLAGS
|
||||
# - DPDK_DEP_ISAL_CRYPTO (y/[n])
|
||||
# - DPDK_DEP_LDFLAGS
|
||||
# - DPDK_DEP_MOFED (y/[n])
|
||||
# - DPDK_DEP_NUMA (y/[n])
|
||||
@ -119,6 +120,7 @@ reset_env ()
|
||||
unset CROSS
|
||||
unset DPDK_DEP_ARCHIVE
|
||||
unset DPDK_DEP_CFLAGS
|
||||
unset DPDK_DEP_ISAL_CRYPTO
|
||||
unset DPDK_DEP_LDFLAGS
|
||||
unset DPDK_DEP_MOFED
|
||||
unset DPDK_DEP_NUMA
|
||||
@ -176,7 +178,7 @@ config () # <directory> <target> <options>
|
||||
sed -ri 's,(PCAP=)n,\1y,' $1/.config
|
||||
test -z "$AESNI_MULTI_BUFFER_LIB_PATH" || \
|
||||
sed -ri 's,(PMD_AESNI_MB=)n,\1y,' $1/.config
|
||||
test -z "$AESNI_MULTI_BUFFER_LIB_PATH" || \
|
||||
test "$DPDK_DEP_ISAL_CRYPTO" != y || \
|
||||
sed -ri 's,(PMD_AESNI_GCM=)n,\1y,' $1/.config
|
||||
test -z "$LIBSSO_SNOW3G_PATH" || \
|
||||
sed -ri 's,(PMD_SNOW3G=)n,\1y,' $1/.config
|
||||
|
@ -32,10 +32,8 @@ AES-NI GCM Crypto Poll Mode Driver
|
||||
|
||||
|
||||
The AES-NI GCM PMD (**librte_pmd_aesni_gcm**) provides poll mode crypto driver
|
||||
support for utilizing Intel multi buffer library (see AES-NI Multi-buffer PMD documentation
|
||||
to learn more about it, including installation).
|
||||
|
||||
The AES-NI GCM PMD has current only been tested on Fedora 21 64-bit with gcc.
|
||||
support for utilizing Intel ISA-L crypto library, which provides operation acceleration
|
||||
through the AES-NI instruction sets for AES-GCM authenticated cipher algorithm.
|
||||
|
||||
Features
|
||||
--------
|
||||
@ -49,16 +47,21 @@ Cipher algorithms:
|
||||
Authentication algorithms:
|
||||
|
||||
* RTE_CRYPTO_AUTH_AES_GCM
|
||||
* RTE_CRYPTO_AUTH_AES_GMAC
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
To build DPDK with the AESNI_GCM_PMD the user is required to install
|
||||
the ``libisal_crypto`` library in the build environment.
|
||||
For download and more details please visit `<https://github.com/01org/isa-l_crypto>`_.
|
||||
|
||||
Initialization
|
||||
--------------
|
||||
|
||||
In order to enable this virtual crypto PMD, user must:
|
||||
|
||||
* Export the environmental variable AESNI_MULTI_BUFFER_LIB_PATH with the path where
|
||||
the library was extracted.
|
||||
|
||||
* Build the multi buffer library (go to Installation section in AES-NI MB PMD documentation).
|
||||
* Install the ISA-L crypto library (explained in Installation section).
|
||||
|
||||
* Set CONFIG_RTE_LIBRTE_PMD_AESNI_GCM=y in config/common_base.
|
||||
|
||||
@ -86,9 +89,6 @@ Example:
|
||||
Limitations
|
||||
-----------
|
||||
|
||||
* Chained mbufs are not supported.
|
||||
* Chained mbufs are supported but only out-of-place (destination mbuf must be contiguous).
|
||||
* Hash only is not supported.
|
||||
* Cipher only is not supported.
|
||||
* Only in-place is currently supported (destination address is the same as source address).
|
||||
* Only supports session-oriented API implementation (session-less APIs are not supported).
|
||||
* Not performance tuned.
|
||||
|
@ -39,9 +39,9 @@ Supported Feature Flags
|
||||
"RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO",x,x,x,x,x,x
|
||||
"RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO",,,,,,
|
||||
"RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING",x,x,x,x,x,x
|
||||
"RTE_CRYPTODEV_FF_CPU_SSE",,,x,x,x,x
|
||||
"RTE_CRYPTODEV_FF_CPU_AVX",,,x,x,x,x
|
||||
"RTE_CRYPTODEV_FF_CPU_AVX2",,,x,x,,
|
||||
"RTE_CRYPTODEV_FF_CPU_SSE",,,x,,x,x
|
||||
"RTE_CRYPTODEV_FF_CPU_AVX",,,x,,x,x
|
||||
"RTE_CRYPTODEV_FF_CPU_AVX2",,,x,,,
|
||||
"RTE_CRYPTODEV_FF_CPU_AVX512",,,x,,,
|
||||
"RTE_CRYPTODEV_FF_CPU_AESNI",,,x,x,,
|
||||
"RTE_CRYPTODEV_FF_HW_ACCELERATED",x,,,,,
|
||||
@ -83,6 +83,7 @@ Supported Authentication Algorithms
|
||||
"SHA512",,,,,,
|
||||
"SHA512_HMAC",x,,x,,,
|
||||
"AES_XCBC",x,,x,,,
|
||||
"AES_GMAC",,,,x,,
|
||||
"SNOW3G_UIA2",x,,,,x,
|
||||
"KASUMI_F9",,,,,,x
|
||||
|
||||
@ -92,6 +93,6 @@ Supported AEAD Algorithms
|
||||
:header: "AEAD Algorithms", "qat", "null", "aesni_mb", "aesni_gcm", "snow3g", "kasumi"
|
||||
:stub-columns: 1
|
||||
|
||||
"AES_GCM_128",x,,x,,,
|
||||
"AES_GCM_128",x,,,x,,
|
||||
"AES_GCM_192",x,,,,,
|
||||
"AES_GCM_256",x,,,,,
|
||||
"AES_GCM_256",x,,,x,,
|
||||
|
@ -161,6 +161,18 @@ New Features
|
||||
AESNI MB PMD has been moved to a new repository, in GitHub.
|
||||
* Support for single operations (cipher only and authentication only).
|
||||
|
||||
* **Updated the AES-NI GCM PMD.**
|
||||
|
||||
The AES-NI GCM PMD was migrated from MB library to ISA-L library.
|
||||
The migration entailed the following additional support for:
|
||||
|
||||
* GMAC algorithm.
|
||||
* 256-bit cipher key.
|
||||
* Session-less mode.
|
||||
* Out-of place processing
|
||||
* Scatter-gatter support for chained mbufs (only out-of place and destination
|
||||
mbuf must be contiguous)
|
||||
|
||||
* **Added Elastic Flow Distributor library (rte_efd).**
|
||||
|
||||
This new library uses perfect hashing to determine a target/value for a
|
||||
|
@ -31,9 +31,6 @@
|
||||
include $(RTE_SDK)/mk/rte.vars.mk
|
||||
|
||||
ifneq ($(MAKECMDGOALS),clean)
|
||||
ifeq ($(AESNI_MULTI_BUFFER_LIB_PATH),)
|
||||
$(error "Please define AESNI_MULTI_BUFFER_LIB_PATH environment variable")
|
||||
endif
|
||||
endif
|
||||
|
||||
# library name
|
||||
@ -50,10 +47,7 @@ LIBABIVER := 1
|
||||
EXPORT_MAP := rte_pmd_aesni_gcm_version.map
|
||||
|
||||
# external library dependencies
|
||||
CFLAGS += -I$(AESNI_MULTI_BUFFER_LIB_PATH)
|
||||
CFLAGS += -I$(AESNI_MULTI_BUFFER_LIB_PATH)/include
|
||||
LDLIBS += -L$(AESNI_MULTI_BUFFER_LIB_PATH) -lIPSec_MB
|
||||
LDLIBS += -lcrypto
|
||||
LDLIBS += -lisal_crypto
|
||||
|
||||
# library source files
|
||||
SRCS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM) += aesni_gcm_pmd.c
|
||||
|
@ -37,91 +37,26 @@
|
||||
#define LINUX
|
||||
#endif
|
||||
|
||||
#include <gcm_defines.h>
|
||||
#include <aux_funcs.h>
|
||||
#include <isa-l_crypto/aes_gcm.h>
|
||||
|
||||
/** Supported vector modes */
|
||||
enum aesni_gcm_vector_mode {
|
||||
RTE_AESNI_GCM_NOT_SUPPORTED = 0,
|
||||
RTE_AESNI_GCM_SSE,
|
||||
RTE_AESNI_GCM_AVX,
|
||||
RTE_AESNI_GCM_AVX2
|
||||
};
|
||||
typedef void (*aesni_gcm_init_t)(struct gcm_data *my_ctx_data,
|
||||
uint8_t *iv,
|
||||
uint8_t const *aad,
|
||||
uint64_t aad_len);
|
||||
|
||||
typedef void (*aes_keyexp_128_enc_t)(void *key, void *enc_exp_keys);
|
||||
typedef void (*aesni_gcm_update_t)(struct gcm_data *my_ctx_data,
|
||||
uint8_t *out,
|
||||
const uint8_t *in,
|
||||
uint64_t plaintext_len);
|
||||
|
||||
typedef void (*aesni_gcm_t)(gcm_data *my_ctx_data, u8 *out, const u8 *in,
|
||||
u64 plaintext_len, u8 *iv, const u8 *aad, u64 aad_len,
|
||||
u8 *auth_tag, u64 auth_tag_len);
|
||||
typedef void (*aesni_gcm_finalize_t)(struct gcm_data *my_ctx_data,
|
||||
uint8_t *auth_tag,
|
||||
uint64_t auth_tag_len);
|
||||
|
||||
typedef void (*aesni_gcm_precomp_t)(gcm_data *my_ctx_data, u8 *hash_subkey);
|
||||
|
||||
/** GCM library function pointer table */
|
||||
struct aesni_gcm_ops {
|
||||
struct {
|
||||
struct {
|
||||
aes_keyexp_128_enc_t aes128_enc;
|
||||
/**< AES128 enc key expansion */
|
||||
} keyexp;
|
||||
/**< Key expansion functions */
|
||||
} aux; /**< Auxiliary functions */
|
||||
|
||||
struct {
|
||||
aesni_gcm_t enc; /**< GCM encode function pointer */
|
||||
aesni_gcm_t dec; /**< GCM decode function pointer */
|
||||
aesni_gcm_precomp_t precomp; /**< GCM pre-compute */
|
||||
} gcm; /**< GCM functions */
|
||||
aesni_gcm_init_t init;
|
||||
aesni_gcm_update_t update;
|
||||
aesni_gcm_finalize_t finalize;
|
||||
};
|
||||
|
||||
|
||||
static const struct aesni_gcm_ops gcm_ops[] = {
|
||||
[RTE_AESNI_GCM_NOT_SUPPORTED] = {
|
||||
.aux = {
|
||||
.keyexp = {
|
||||
NULL
|
||||
}
|
||||
},
|
||||
.gcm = {
|
||||
NULL
|
||||
}
|
||||
},
|
||||
[RTE_AESNI_GCM_SSE] = {
|
||||
.aux = {
|
||||
.keyexp = {
|
||||
aes_keyexp_128_enc_sse
|
||||
}
|
||||
},
|
||||
.gcm = {
|
||||
aesni_gcm_enc_sse,
|
||||
aesni_gcm_dec_sse,
|
||||
aesni_gcm_precomp_sse
|
||||
}
|
||||
},
|
||||
[RTE_AESNI_GCM_AVX] = {
|
||||
.aux = {
|
||||
.keyexp = {
|
||||
aes_keyexp_128_enc_avx,
|
||||
}
|
||||
},
|
||||
.gcm = {
|
||||
aesni_gcm_enc_avx_gen2,
|
||||
aesni_gcm_dec_avx_gen2,
|
||||
aesni_gcm_precomp_avx_gen2
|
||||
}
|
||||
},
|
||||
[RTE_AESNI_GCM_AVX2] = {
|
||||
.aux = {
|
||||
.keyexp = {
|
||||
aes_keyexp_128_enc_avx2,
|
||||
}
|
||||
},
|
||||
.gcm = {
|
||||
aesni_gcm_enc_avx_gen4,
|
||||
aesni_gcm_dec_avx_gen4,
|
||||
aesni_gcm_precomp_avx_gen4
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
#endif /* _AESNI_GCM_OPS_H_ */
|
||||
|
@ -30,8 +30,6 @@
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include <openssl/aes.h>
|
||||
|
||||
#include <rte_common.h>
|
||||
#include <rte_config.h>
|
||||
#include <rte_hexdump.h>
|
||||
@ -44,113 +42,97 @@
|
||||
|
||||
#include "aesni_gcm_pmd_private.h"
|
||||
|
||||
static int
|
||||
aesni_gcm_calculate_hash_sub_key(uint8_t *hsubkey, unsigned hsubkey_length,
|
||||
uint8_t *aeskey, unsigned aeskey_length)
|
||||
{
|
||||
uint8_t key[aeskey_length] __rte_aligned(16);
|
||||
AES_KEY enc_key;
|
||||
/** GCM encode functions pointer table */
|
||||
static const struct aesni_gcm_ops aesni_gcm_enc[] = {
|
||||
[AESNI_GCM_KEY_128] = {
|
||||
aesni_gcm128_init,
|
||||
aesni_gcm128_enc_update,
|
||||
aesni_gcm128_enc_finalize
|
||||
},
|
||||
[AESNI_GCM_KEY_256] = {
|
||||
aesni_gcm256_init,
|
||||
aesni_gcm256_enc_update,
|
||||
aesni_gcm256_enc_finalize
|
||||
}
|
||||
};
|
||||
|
||||
if (hsubkey_length % 16 != 0 && aeskey_length % 16 != 0)
|
||||
return -EFAULT;
|
||||
|
||||
memcpy(key, aeskey, aeskey_length);
|
||||
|
||||
if (AES_set_encrypt_key(key, aeskey_length << 3, &enc_key) != 0)
|
||||
return -EFAULT;
|
||||
|
||||
AES_encrypt(hsubkey, hsubkey, &enc_key);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/** Get xform chain order */
|
||||
static int
|
||||
aesni_gcm_get_mode(const struct rte_crypto_sym_xform *xform)
|
||||
{
|
||||
/*
|
||||
* GCM only supports authenticated encryption or authenticated
|
||||
* decryption, all other options are invalid, so we must have exactly
|
||||
* 2 xform structs chained together
|
||||
*/
|
||||
if (xform->next == NULL || xform->next->next != NULL)
|
||||
return -1;
|
||||
|
||||
if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
|
||||
xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
|
||||
return AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION;
|
||||
}
|
||||
|
||||
if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
|
||||
xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
|
||||
return AESNI_GCM_OP_AUTHENTICATED_DECRYPTION;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
/** GCM decode functions pointer table */
|
||||
static const struct aesni_gcm_ops aesni_gcm_dec[] = {
|
||||
[AESNI_GCM_KEY_128] = {
|
||||
aesni_gcm128_init,
|
||||
aesni_gcm128_dec_update,
|
||||
aesni_gcm128_dec_finalize
|
||||
},
|
||||
[AESNI_GCM_KEY_256] = {
|
||||
aesni_gcm256_init,
|
||||
aesni_gcm256_dec_update,
|
||||
aesni_gcm256_dec_finalize
|
||||
}
|
||||
};
|
||||
|
||||
/** Parse crypto xform chain and set private session parameters */
|
||||
int
|
||||
aesni_gcm_set_session_parameters(const struct aesni_gcm_ops *gcm_ops,
|
||||
struct aesni_gcm_session *sess,
|
||||
aesni_gcm_set_session_parameters(struct aesni_gcm_session *sess,
|
||||
const struct rte_crypto_sym_xform *xform)
|
||||
{
|
||||
const struct rte_crypto_sym_xform *auth_xform = NULL;
|
||||
const struct rte_crypto_sym_xform *cipher_xform = NULL;
|
||||
const struct rte_crypto_sym_xform *auth_xform;
|
||||
const struct rte_crypto_sym_xform *cipher_xform;
|
||||
|
||||
uint8_t hsubkey[16] __rte_aligned(16) = { 0 };
|
||||
if (xform->next == NULL || xform->next->next != NULL) {
|
||||
GCM_LOG_ERR("Two and only two chained xform required");
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
/* Select Crypto operation - hash then cipher / cipher then hash */
|
||||
switch (aesni_gcm_get_mode(xform)) {
|
||||
case AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION:
|
||||
sess->op = AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION;
|
||||
|
||||
cipher_xform = xform;
|
||||
if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
|
||||
xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
|
||||
auth_xform = xform->next;
|
||||
break;
|
||||
case AESNI_GCM_OP_AUTHENTICATED_DECRYPTION:
|
||||
sess->op = AESNI_GCM_OP_AUTHENTICATED_DECRYPTION;
|
||||
|
||||
cipher_xform = xform;
|
||||
} else if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
|
||||
xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
|
||||
auth_xform = xform;
|
||||
cipher_xform = xform->next;
|
||||
} else {
|
||||
GCM_LOG_ERR("Cipher and auth xform required");
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
if (!(cipher_xform->cipher.algo == RTE_CRYPTO_CIPHER_AES_GCM &&
|
||||
(auth_xform->auth.algo == RTE_CRYPTO_AUTH_AES_GCM ||
|
||||
auth_xform->auth.algo == RTE_CRYPTO_AUTH_AES_GMAC))) {
|
||||
GCM_LOG_ERR("We only support AES GCM and AES GMAC");
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
/* Select Crypto operation */
|
||||
if (cipher_xform->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
|
||||
auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_GENERATE)
|
||||
sess->op = AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION;
|
||||
else if (cipher_xform->cipher.op == RTE_CRYPTO_CIPHER_OP_DECRYPT &&
|
||||
auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_VERIFY)
|
||||
sess->op = AESNI_GCM_OP_AUTHENTICATED_DECRYPTION;
|
||||
else {
|
||||
GCM_LOG_ERR("Cipher/Auth operations: Encrypt/Generate or"
|
||||
" Decrypt/Verify are valid only");
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
/* Check key length and calculate GCM pre-compute. */
|
||||
switch (cipher_xform->cipher.key.length) {
|
||||
case 16:
|
||||
aesni_gcm128_pre(cipher_xform->cipher.key.data, &sess->gdata);
|
||||
sess->key = AESNI_GCM_KEY_128;
|
||||
|
||||
break;
|
||||
case 32:
|
||||
aesni_gcm256_pre(cipher_xform->cipher.key.data, &sess->gdata);
|
||||
sess->key = AESNI_GCM_KEY_256;
|
||||
|
||||
break;
|
||||
default:
|
||||
GCM_LOG_ERR("Unsupported operation chain order parameter");
|
||||
GCM_LOG_ERR("Unsupported cipher key length");
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
/* We only support AES GCM */
|
||||
if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_AES_GCM &&
|
||||
auth_xform->auth.algo != RTE_CRYPTO_AUTH_AES_GCM)
|
||||
return -EINVAL;
|
||||
|
||||
/* Select cipher direction */
|
||||
if (sess->op == AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION &&
|
||||
cipher_xform->cipher.op !=
|
||||
RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
|
||||
GCM_LOG_ERR("xform chain (CIPHER/AUTH) and cipher operation "
|
||||
"(DECRYPT) specified are an invalid selection");
|
||||
return -EINVAL;
|
||||
} else if (sess->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION &&
|
||||
cipher_xform->cipher.op !=
|
||||
RTE_CRYPTO_CIPHER_OP_DECRYPT) {
|
||||
GCM_LOG_ERR("xform chain (AUTH/CIPHER) and cipher operation "
|
||||
"(ENCRYPT) specified are an invalid selection");
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
/* Expand GCM AES128 key */
|
||||
(*gcm_ops->aux.keyexp.aes128_enc)(cipher_xform->cipher.key.data,
|
||||
sess->gdata.expanded_keys);
|
||||
|
||||
/* Calculate hash sub key here */
|
||||
aesni_gcm_calculate_hash_sub_key(hsubkey, sizeof(hsubkey),
|
||||
cipher_xform->cipher.key.data,
|
||||
cipher_xform->cipher.key.length);
|
||||
|
||||
/* Calculate GCM pre-compute */
|
||||
(*gcm_ops->gcm.precomp)(&sess->gdata, hsubkey);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -173,10 +155,10 @@ aesni_gcm_get_session(struct aesni_gcm_qp *qp, struct rte_crypto_sym_op *op)
|
||||
return sess;
|
||||
|
||||
sess = (struct aesni_gcm_session *)
|
||||
((struct rte_cryptodev_session *)_sess)->_private;
|
||||
((struct rte_cryptodev_sym_session *)_sess)->_private;
|
||||
|
||||
if (unlikely(aesni_gcm_set_session_parameters(qp->ops,
|
||||
sess, op->xform) != 0)) {
|
||||
if (unlikely(aesni_gcm_set_session_parameters(sess,
|
||||
op->xform) != 0)) {
|
||||
rte_mempool_put(qp->sess_mp, _sess);
|
||||
sess = NULL;
|
||||
}
|
||||
@ -196,19 +178,45 @@ aesni_gcm_get_session(struct aesni_gcm_qp *qp, struct rte_crypto_sym_op *op)
|
||||
*
|
||||
*/
|
||||
static int
|
||||
process_gcm_crypto_op(struct aesni_gcm_qp *qp, struct rte_crypto_sym_op *op,
|
||||
process_gcm_crypto_op(struct rte_crypto_sym_op *op,
|
||||
struct aesni_gcm_session *session)
|
||||
{
|
||||
uint8_t *src, *dst;
|
||||
struct rte_mbuf *m = op->m_src;
|
||||
struct rte_mbuf *m_src = op->m_src;
|
||||
uint32_t offset = op->cipher.data.offset;
|
||||
uint32_t part_len, total_len, data_len;
|
||||
|
||||
RTE_ASSERT(m_src != NULL);
|
||||
|
||||
while (offset >= m_src->data_len) {
|
||||
offset -= m_src->data_len;
|
||||
m_src = m_src->next;
|
||||
|
||||
RTE_ASSERT(m_src != NULL);
|
||||
}
|
||||
|
||||
data_len = m_src->data_len - offset;
|
||||
part_len = (data_len < op->cipher.data.length) ? data_len :
|
||||
op->cipher.data.length;
|
||||
|
||||
/* Destination buffer is required when segmented source buffer */
|
||||
RTE_ASSERT((part_len == op->cipher.data.length) ||
|
||||
((part_len != op->cipher.data.length) &&
|
||||
(op->m_dst != NULL)));
|
||||
/* Segmented destination buffer is not supported */
|
||||
RTE_ASSERT((op->m_dst == NULL) ||
|
||||
((op->m_dst != NULL) &&
|
||||
rte_pktmbuf_is_contiguous(op->m_dst)));
|
||||
|
||||
|
||||
src = rte_pktmbuf_mtod(m, uint8_t *) + op->cipher.data.offset;
|
||||
dst = op->m_dst ?
|
||||
rte_pktmbuf_mtod_offset(op->m_dst, uint8_t *,
|
||||
op->cipher.data.offset) :
|
||||
rte_pktmbuf_mtod_offset(m, uint8_t *,
|
||||
rte_pktmbuf_mtod_offset(op->m_src, uint8_t *,
|
||||
op->cipher.data.offset);
|
||||
|
||||
src = rte_pktmbuf_mtod_offset(m_src, uint8_t *, offset);
|
||||
|
||||
/* sanity checks */
|
||||
if (op->cipher.iv.length != 16 && op->cipher.iv.length != 12 &&
|
||||
op->cipher.iv.length != 0) {
|
||||
@ -225,48 +233,81 @@ process_gcm_crypto_op(struct aesni_gcm_qp *qp, struct rte_crypto_sym_op *op,
|
||||
*iv_padd = rte_bswap32(1);
|
||||
}
|
||||
|
||||
if (op->auth.aad.length != 12 && op->auth.aad.length != 8 &&
|
||||
op->auth.aad.length != 0) {
|
||||
GCM_LOG_ERR("iv");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (op->auth.digest.length != 16 &&
|
||||
op->auth.digest.length != 12 &&
|
||||
op->auth.digest.length != 8 &&
|
||||
op->auth.digest.length != 0) {
|
||||
GCM_LOG_ERR("iv");
|
||||
op->auth.digest.length != 8) {
|
||||
GCM_LOG_ERR("digest");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (session->op == AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION) {
|
||||
|
||||
(*qp->ops->gcm.enc)(&session->gdata, dst, src,
|
||||
(uint64_t)op->cipher.data.length,
|
||||
aesni_gcm_enc[session->key].init(&session->gdata,
|
||||
op->cipher.iv.data,
|
||||
op->auth.aad.data,
|
||||
(uint64_t)op->auth.aad.length,
|
||||
(uint64_t)op->auth.aad.length);
|
||||
|
||||
aesni_gcm_enc[session->key].update(&session->gdata, dst, src,
|
||||
(uint64_t)part_len);
|
||||
total_len = op->cipher.data.length - part_len;
|
||||
|
||||
while (total_len) {
|
||||
dst += part_len;
|
||||
m_src = m_src->next;
|
||||
|
||||
RTE_ASSERT(m_src != NULL);
|
||||
|
||||
src = rte_pktmbuf_mtod(m_src, uint8_t *);
|
||||
part_len = (m_src->data_len < total_len) ?
|
||||
m_src->data_len : total_len;
|
||||
|
||||
aesni_gcm_enc[session->key].update(&session->gdata,
|
||||
dst, src,
|
||||
(uint64_t)part_len);
|
||||
total_len -= part_len;
|
||||
}
|
||||
|
||||
aesni_gcm_enc[session->key].finalize(&session->gdata,
|
||||
op->auth.digest.data,
|
||||
(uint64_t)op->auth.digest.length);
|
||||
} else if (session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION) {
|
||||
uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(m,
|
||||
} else { /* session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION */
|
||||
uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(op->m_dst ?
|
||||
op->m_dst : op->m_src,
|
||||
op->auth.digest.length);
|
||||
|
||||
if (!auth_tag) {
|
||||
GCM_LOG_ERR("iv");
|
||||
GCM_LOG_ERR("auth_tag");
|
||||
return -1;
|
||||
}
|
||||
|
||||
(*qp->ops->gcm.dec)(&session->gdata, dst, src,
|
||||
(uint64_t)op->cipher.data.length,
|
||||
aesni_gcm_dec[session->key].init(&session->gdata,
|
||||
op->cipher.iv.data,
|
||||
op->auth.aad.data,
|
||||
(uint64_t)op->auth.aad.length,
|
||||
(uint64_t)op->auth.aad.length);
|
||||
|
||||
aesni_gcm_dec[session->key].update(&session->gdata, dst, src,
|
||||
(uint64_t)part_len);
|
||||
total_len = op->cipher.data.length - part_len;
|
||||
|
||||
while (total_len) {
|
||||
dst += part_len;
|
||||
m_src = m_src->next;
|
||||
|
||||
RTE_ASSERT(m_src != NULL);
|
||||
|
||||
src = rte_pktmbuf_mtod(m_src, uint8_t *);
|
||||
part_len = (m_src->data_len < total_len) ?
|
||||
m_src->data_len : total_len;
|
||||
|
||||
aesni_gcm_dec[session->key].update(&session->gdata,
|
||||
dst, src,
|
||||
(uint64_t)part_len);
|
||||
total_len -= part_len;
|
||||
}
|
||||
|
||||
aesni_gcm_dec[session->key].finalize(&session->gdata,
|
||||
auth_tag,
|
||||
(uint64_t)op->auth.digest.length);
|
||||
} else {
|
||||
GCM_LOG_ERR("iv");
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
@ -356,21 +397,7 @@ aesni_gcm_pmd_enqueue_burst(void *queue_pair,
|
||||
break;
|
||||
}
|
||||
|
||||
#ifdef RTE_LIBRTE_PMD_AESNI_GCM_DEBUG
|
||||
if (!rte_pktmbuf_is_contiguous(ops[i]->sym->m_src) ||
|
||||
(ops[i]->sym->m_dst != NULL &&
|
||||
!rte_pktmbuf_is_contiguous(
|
||||
ops[i]->sym->m_dst))) {
|
||||
ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
|
||||
GCM_LOG_ERR("PMD supports only contiguous mbufs, "
|
||||
"op (%p) provides noncontiguous mbuf as "
|
||||
"source/destination buffer.\n", ops[i]);
|
||||
qp->qp_stats.enqueue_err_count++;
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
|
||||
retval = process_gcm_crypto_op(qp, ops[i]->sym, sess);
|
||||
retval = process_gcm_crypto_op(ops[i]->sym, sess);
|
||||
if (retval < 0) {
|
||||
ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
|
||||
qp->qp_stats.enqueue_err_count++;
|
||||
@ -406,7 +433,6 @@ aesni_gcm_create(struct rte_crypto_vdev_init_params *init_params)
|
||||
{
|
||||
struct rte_cryptodev *dev;
|
||||
struct aesni_gcm_private *internals;
|
||||
enum aesni_gcm_vector_mode vector_mode;
|
||||
|
||||
if (init_params->name[0] == '\0') {
|
||||
int ret = rte_cryptodev_pmd_create_dev_name(
|
||||
@ -425,18 +451,6 @@ aesni_gcm_create(struct rte_crypto_vdev_init_params *init_params)
|
||||
return -EFAULT;
|
||||
}
|
||||
|
||||
/* Check CPU for supported vector instruction set */
|
||||
if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX2))
|
||||
vector_mode = RTE_AESNI_GCM_AVX2;
|
||||
else if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX))
|
||||
vector_mode = RTE_AESNI_GCM_AVX;
|
||||
else if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_SSE4_1))
|
||||
vector_mode = RTE_AESNI_GCM_SSE;
|
||||
else {
|
||||
GCM_LOG_ERR("Vector instructions are not supported by CPU");
|
||||
return -EFAULT;
|
||||
}
|
||||
|
||||
dev = rte_cryptodev_pmd_virtual_dev_init(init_params->name,
|
||||
sizeof(struct aesni_gcm_private), init_params->socket_id);
|
||||
if (dev == NULL) {
|
||||
@ -453,27 +467,11 @@ aesni_gcm_create(struct rte_crypto_vdev_init_params *init_params)
|
||||
|
||||
dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
|
||||
RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
|
||||
RTE_CRYPTODEV_FF_CPU_AESNI;
|
||||
RTE_CRYPTODEV_FF_CPU_AESNI |
|
||||
RTE_CRYPTODEV_FF_MBUF_SCATTER_GATHER;
|
||||
|
||||
switch (vector_mode) {
|
||||
case RTE_AESNI_GCM_SSE:
|
||||
dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_SSE;
|
||||
break;
|
||||
case RTE_AESNI_GCM_AVX:
|
||||
dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX;
|
||||
break;
|
||||
case RTE_AESNI_GCM_AVX2:
|
||||
dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX2;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
/* Set vector instructions mode supported */
|
||||
internals = dev->data->dev_private;
|
||||
|
||||
internals->vector_mode = vector_mode;
|
||||
|
||||
internals->max_nb_queue_pairs = init_params->max_nb_queue_pairs;
|
||||
internals->max_nb_sessions = init_params->max_nb_sessions;
|
||||
|
||||
|
@ -39,6 +39,31 @@
|
||||
#include "aesni_gcm_pmd_private.h"
|
||||
|
||||
static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = {
|
||||
{ /* AES GMAC (AUTH) */
|
||||
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
||||
{.sym = {
|
||||
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
|
||||
{.auth = {
|
||||
.algo = RTE_CRYPTO_AUTH_AES_GMAC,
|
||||
.block_size = 16,
|
||||
.key_size = {
|
||||
.min = 16,
|
||||
.max = 32,
|
||||
.increment = 16
|
||||
},
|
||||
.digest_size = {
|
||||
.min = 8,
|
||||
.max = 16,
|
||||
.increment = 4
|
||||
},
|
||||
.aad_size = {
|
||||
.min = 0,
|
||||
.max = 65535,
|
||||
.increment = 1
|
||||
}
|
||||
}, }
|
||||
}, }
|
||||
},
|
||||
{ /* AES GCM (AUTH) */
|
||||
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
||||
{.sym = {
|
||||
@ -48,8 +73,8 @@ static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = {
|
||||
.block_size = 16,
|
||||
.key_size = {
|
||||
.min = 16,
|
||||
.max = 16,
|
||||
.increment = 0
|
||||
.max = 32,
|
||||
.increment = 16
|
||||
},
|
||||
.digest_size = {
|
||||
.min = 8,
|
||||
@ -57,9 +82,9 @@ static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = {
|
||||
.increment = 4
|
||||
},
|
||||
.aad_size = {
|
||||
.min = 8,
|
||||
.max = 12,
|
||||
.increment = 4
|
||||
.min = 0,
|
||||
.max = 65535,
|
||||
.increment = 1
|
||||
}
|
||||
}, }
|
||||
}, }
|
||||
@ -73,8 +98,8 @@ static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = {
|
||||
.block_size = 16,
|
||||
.key_size = {
|
||||
.min = 16,
|
||||
.max = 16,
|
||||
.increment = 0
|
||||
.max = 32,
|
||||
.increment = 16
|
||||
},
|
||||
.iv_size = {
|
||||
.min = 12,
|
||||
@ -221,7 +246,6 @@ aesni_gcm_pmd_qp_setup(struct rte_cryptodev *dev, uint16_t qp_id,
|
||||
int socket_id)
|
||||
{
|
||||
struct aesni_gcm_qp *qp = NULL;
|
||||
struct aesni_gcm_private *internals = dev->data->dev_private;
|
||||
|
||||
/* Free memory prior to re-allocation if needed. */
|
||||
if (dev->data->queue_pairs[qp_id] != NULL)
|
||||
@ -239,8 +263,6 @@ aesni_gcm_pmd_qp_setup(struct rte_cryptodev *dev, uint16_t qp_id,
|
||||
if (aesni_gcm_pmd_qp_set_unique_name(dev, qp))
|
||||
goto qp_setup_cleanup;
|
||||
|
||||
qp->ops = &gcm_ops[internals->vector_mode];
|
||||
|
||||
qp->processed_pkts = aesni_gcm_pmd_qp_create_processed_pkts_ring(qp,
|
||||
qp_conf->nb_descriptors, socket_id);
|
||||
if (qp->processed_pkts == NULL)
|
||||
@ -291,18 +313,15 @@ aesni_gcm_pmd_session_get_size(struct rte_cryptodev *dev __rte_unused)
|
||||
|
||||
/** Configure a aesni gcm session from a crypto xform chain */
|
||||
static void *
|
||||
aesni_gcm_pmd_session_configure(struct rte_cryptodev *dev,
|
||||
aesni_gcm_pmd_session_configure(struct rte_cryptodev *dev __rte_unused,
|
||||
struct rte_crypto_sym_xform *xform, void *sess)
|
||||
{
|
||||
struct aesni_gcm_private *internals = dev->data->dev_private;
|
||||
|
||||
if (unlikely(sess == NULL)) {
|
||||
GCM_LOG_ERR("invalid session struct");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (aesni_gcm_set_session_parameters(&gcm_ops[internals->vector_mode],
|
||||
sess, xform) != 0) {
|
||||
if (aesni_gcm_set_session_parameters(sess, xform) != 0) {
|
||||
GCM_LOG_ERR("failed configure session parameters");
|
||||
return NULL;
|
||||
}
|
||||
|
@ -58,8 +58,6 @@
|
||||
|
||||
/** private data structure for each virtual AESNI GCM device */
|
||||
struct aesni_gcm_private {
|
||||
enum aesni_gcm_vector_mode vector_mode;
|
||||
/**< Vector mode */
|
||||
unsigned max_nb_queue_pairs;
|
||||
/**< Max number of queue pairs supported by device */
|
||||
unsigned max_nb_sessions;
|
||||
@ -71,8 +69,6 @@ struct aesni_gcm_qp {
|
||||
/**< Queue Pair Identifier */
|
||||
char name[RTE_CRYPTODEV_NAME_LEN];
|
||||
/**< Unique Queue Pair Name */
|
||||
const struct aesni_gcm_ops *ops;
|
||||
/**< Architecture dependent function pointer table of the gcm APIs */
|
||||
struct rte_ring *processed_pkts;
|
||||
/**< Ring for placing process packets */
|
||||
struct rte_mempool *sess_mp;
|
||||
@ -87,10 +83,17 @@ enum aesni_gcm_operation {
|
||||
AESNI_GCM_OP_AUTHENTICATED_DECRYPTION
|
||||
};
|
||||
|
||||
enum aesni_gcm_key {
|
||||
AESNI_GCM_KEY_128,
|
||||
AESNI_GCM_KEY_256
|
||||
};
|
||||
|
||||
/** AESNI GCM private session structure */
|
||||
struct aesni_gcm_session {
|
||||
enum aesni_gcm_operation op;
|
||||
/**< GCM operation type */
|
||||
enum aesni_gcm_key key;
|
||||
/**< GCM key type */
|
||||
struct gcm_data gdata __rte_cache_aligned;
|
||||
/**< GCM parameters */
|
||||
};
|
||||
@ -98,7 +101,6 @@ struct aesni_gcm_session {
|
||||
|
||||
/**
|
||||
* Setup GCM session parameters
|
||||
* @param ops gcm ops function pointer table
|
||||
* @param sess aesni gcm session structure
|
||||
* @param xform crypto transform chain
|
||||
*
|
||||
@ -107,8 +109,7 @@ struct aesni_gcm_session {
|
||||
* - On failure returns error code < 0
|
||||
*/
|
||||
extern int
|
||||
aesni_gcm_set_session_parameters(const struct aesni_gcm_ops *ops,
|
||||
struct aesni_gcm_session *sess,
|
||||
aesni_gcm_set_session_parameters(struct aesni_gcm_session *sess,
|
||||
const struct rte_crypto_sym_xform *xform);
|
||||
|
||||
|
||||
|
@ -137,8 +137,7 @@ _LDLIBS-$(CONFIG_RTE_LIBRTE_VMXNET3_PMD) += -lrte_pmd_vmxnet3_uio
|
||||
ifeq ($(CONFIG_RTE_LIBRTE_CRYPTODEV),y)
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_MB) += -lrte_pmd_aesni_mb
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_MB) += -L$(AESNI_MULTI_BUFFER_LIB_PATH) -lIPSec_MB
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM) += -lrte_pmd_aesni_gcm -lcrypto
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM) += -L$(AESNI_MULTI_BUFFER_LIB_PATH) -lIPSec_MB
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM) += -lrte_pmd_aesni_gcm -lisal_crypto
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_OPENSSL) += -lrte_pmd_openssl -lcrypto
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_NULL_CRYPTO) += -lrte_pmd_null_crypto
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_QAT) += -lrte_pmd_qat -lcrypto
|
||||
|
Loading…
x
Reference in New Issue
Block a user