aesni_gcm: add driver for AES-GCM crypto operations
This patch provides the implementation of an AES-NI accelerated crypto PMD which is dependent on Intel's multi-buffer library, see the white paper "Fast Multi-buffer IPsec Implementations on Intel® Architecture Processors" This PMD supports AES_GCM authenticated encryption and authenticated decryption using 128-bit AES keys The patch also contains the related unit tests functions Signed-off-by: Declan Doherty <declan.doherty@intel.com> Signed-off-by: Pablo de Lara <pablo.de.lara.guarch@intel.com> Acked-by: John Griffin <john.griffin@intel.com>
This commit is contained in:
parent
a59ffe7eb9
commit
eec136f3c5
@ -349,6 +349,11 @@ Null PMD
|
||||
M: Tetsuya Mukawa <mukawa@igel.co.jp>
|
||||
F: drivers/net/null/
|
||||
|
||||
Intel AES-NI GCM PMD
|
||||
M: Declan Doherty <declan.doherty@intel.com>
|
||||
F: drivers/crypto/aesni_gcm/
|
||||
F: doc/guides/cryptodevs/aesni_gcm.rst
|
||||
|
||||
Intel AES-NI Multi-Buffer
|
||||
M: Declan Doherty <declan.doherty@intel.com>
|
||||
F: drivers/crypto/aesni_mb/
|
||||
|
@ -44,6 +44,8 @@
|
||||
#include "test_cryptodev.h"
|
||||
#include "test_cryptodev_snow3g_test_vectors.h"
|
||||
#include "test_cryptodev_snow3g_hash_test_vectors.h"
|
||||
#include "test_cryptodev_gcm_test_vectors.h"
|
||||
|
||||
static enum rte_cryptodev_type gbl_cryptodev_type;
|
||||
|
||||
struct crypto_testsuite_params {
|
||||
@ -195,6 +197,21 @@ testsuite_setup(void)
|
||||
}
|
||||
}
|
||||
|
||||
/* Create 2 AESNI GCM devices if required */
|
||||
if (gbl_cryptodev_type == RTE_CRYPTODEV_AESNI_GCM_PMD) {
|
||||
nb_devs = rte_cryptodev_count_devtype(
|
||||
RTE_CRYPTODEV_AESNI_GCM_PMD);
|
||||
if (nb_devs < 2) {
|
||||
for (i = nb_devs; i < 2; i++) {
|
||||
TEST_ASSERT_SUCCESS(rte_eal_vdev_init(
|
||||
CRYPTODEV_NAME_AESNI_GCM_PMD, NULL),
|
||||
"Failed to create instance %u of"
|
||||
" pmd : %s",
|
||||
i, CRYPTODEV_NAME_AESNI_GCM_PMD);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Create 2 Snow3G devices if required */
|
||||
if (gbl_cryptodev_type == RTE_CRYPTODEV_SNOW3G_PMD) {
|
||||
nb_devs = rte_cryptodev_count_devtype(RTE_CRYPTODEV_SNOW3G_PMD);
|
||||
@ -2779,6 +2796,400 @@ test_snow3g_encrypted_authentication_test_case_1(void)
|
||||
|
||||
/* ***** AES-GCM Tests ***** */
|
||||
|
||||
static int
|
||||
create_gcm_session(uint8_t dev_id, enum rte_crypto_cipher_operation op,
|
||||
const uint8_t *key, const uint8_t key_len,
|
||||
const uint8_t aad_len, const uint8_t auth_len)
|
||||
{
|
||||
uint8_t cipher_key[key_len];
|
||||
|
||||
struct crypto_unittest_params *ut_params = &unittest_params;
|
||||
|
||||
|
||||
memcpy(cipher_key, key, key_len);
|
||||
|
||||
/* Setup Cipher Parameters */
|
||||
ut_params->cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
|
||||
ut_params->cipher_xform.next = NULL;
|
||||
|
||||
ut_params->cipher_xform.cipher.algo = RTE_CRYPTO_CIPHER_AES_GCM;
|
||||
ut_params->cipher_xform.cipher.op = op;
|
||||
ut_params->cipher_xform.cipher.key.data = cipher_key;
|
||||
ut_params->cipher_xform.cipher.key.length = key_len;
|
||||
|
||||
#ifdef RTE_APP_TEST_DEBUG
|
||||
rte_hexdump(stdout, "key:", key, key_len);
|
||||
#endif
|
||||
/* Setup Authentication Parameters */
|
||||
ut_params->auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
|
||||
ut_params->auth_xform.next = NULL;
|
||||
|
||||
ut_params->auth_xform.auth.algo = RTE_CRYPTO_AUTH_AES_GCM;
|
||||
|
||||
ut_params->auth_xform.auth.digest_length = auth_len;
|
||||
ut_params->auth_xform.auth.add_auth_data_length = aad_len;
|
||||
ut_params->auth_xform.auth.key.length = 0;
|
||||
ut_params->auth_xform.auth.key.data = NULL;
|
||||
|
||||
if (op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
|
||||
ut_params->cipher_xform.next = &ut_params->auth_xform;
|
||||
|
||||
/* Create Crypto session*/
|
||||
ut_params->sess = rte_cryptodev_sym_session_create(dev_id,
|
||||
&ut_params->cipher_xform);
|
||||
} else {/* Create Crypto session*/
|
||||
ut_params->auth_xform.next = &ut_params->cipher_xform;
|
||||
ut_params->sess = rte_cryptodev_sym_session_create(dev_id,
|
||||
&ut_params->auth_xform);
|
||||
}
|
||||
|
||||
TEST_ASSERT_NOT_NULL(ut_params->sess, "Session creation failed");
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
create_gcm_operation(enum rte_crypto_cipher_operation op,
|
||||
const uint8_t *auth_tag, const unsigned auth_tag_len,
|
||||
const uint8_t *iv, const unsigned iv_len,
|
||||
const uint8_t *aad, const unsigned aad_len,
|
||||
const unsigned data_len, unsigned data_pad_len)
|
||||
{
|
||||
struct crypto_testsuite_params *ts_params = &testsuite_params;
|
||||
struct crypto_unittest_params *ut_params = &unittest_params;
|
||||
|
||||
unsigned iv_pad_len = 0, aad_buffer_len;
|
||||
|
||||
/* Generate Crypto op data structure */
|
||||
ut_params->op = rte_crypto_op_alloc(ts_params->op_mpool,
|
||||
RTE_CRYPTO_OP_TYPE_SYMMETRIC);
|
||||
TEST_ASSERT_NOT_NULL(ut_params->op,
|
||||
"Failed to allocate symmetric crypto operation struct");
|
||||
|
||||
struct rte_crypto_sym_op *sym_op = ut_params->op->sym;
|
||||
|
||||
|
||||
|
||||
sym_op->auth.digest.data = (uint8_t *)rte_pktmbuf_append(
|
||||
ut_params->ibuf, auth_tag_len);
|
||||
TEST_ASSERT_NOT_NULL(sym_op->auth.digest.data,
|
||||
"no room to append digest");
|
||||
sym_op->auth.digest.phys_addr = rte_pktmbuf_mtophys_offset(
|
||||
ut_params->ibuf, data_pad_len);
|
||||
sym_op->auth.digest.length = auth_tag_len;
|
||||
|
||||
if (op == RTE_CRYPTO_CIPHER_OP_DECRYPT) {
|
||||
rte_memcpy(sym_op->auth.digest.data, auth_tag, auth_tag_len);
|
||||
#ifdef RTE_APP_TEST_DEBUG
|
||||
rte_hexdump(stdout, "digest:",
|
||||
ut_params->op->digest.data,
|
||||
ut_params->op->digest.length);
|
||||
#endif
|
||||
}
|
||||
|
||||
/* iv */
|
||||
iv_pad_len = RTE_ALIGN_CEIL(iv_len, 16);
|
||||
|
||||
sym_op->cipher.iv.data = (uint8_t *)rte_pktmbuf_prepend(
|
||||
ut_params->ibuf, iv_pad_len);
|
||||
TEST_ASSERT_NOT_NULL(sym_op->cipher.iv.data, "no room to prepend iv");
|
||||
|
||||
memset(sym_op->cipher.iv.data, 0, iv_pad_len);
|
||||
sym_op->cipher.iv.phys_addr = rte_pktmbuf_mtophys(ut_params->ibuf);
|
||||
sym_op->cipher.iv.length = iv_pad_len;
|
||||
|
||||
rte_memcpy(sym_op->cipher.iv.data, iv, iv_len);
|
||||
|
||||
/* CalcY0 */
|
||||
if (iv_len != 16)
|
||||
sym_op->cipher.iv.data[15] = 1;
|
||||
|
||||
/*
|
||||
* Always allocate the aad up to the block size.
|
||||
* The cryptodev API calls out -
|
||||
* - the array must be big enough to hold the AAD, plus any
|
||||
* space to round this up to the nearest multiple of the
|
||||
* block size (16 bytes).
|
||||
*/
|
||||
aad_buffer_len = ALIGN_POW2_ROUNDUP(aad_len, 16);
|
||||
|
||||
sym_op->auth.aad.data = (uint8_t *)rte_pktmbuf_prepend(
|
||||
ut_params->ibuf, aad_buffer_len);
|
||||
TEST_ASSERT_NOT_NULL(sym_op->auth.aad.data,
|
||||
"no room to prepend aad");
|
||||
sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(
|
||||
ut_params->ibuf);
|
||||
sym_op->auth.aad.length = aad_len;
|
||||
|
||||
memset(sym_op->auth.aad.data, 0, aad_buffer_len);
|
||||
rte_memcpy(sym_op->auth.aad.data, aad, aad_len);
|
||||
|
||||
#ifdef RTE_APP_TEST_DEBUG
|
||||
rte_hexdump(stdout, "iv:", ut_params->op->iv.data, iv_pad_len);
|
||||
rte_hexdump(stdout, "aad:",
|
||||
ut_params->op->additional_auth.data, aad_len);
|
||||
#endif
|
||||
sym_op->cipher.data.length = data_len;
|
||||
sym_op->cipher.data.offset = aad_buffer_len + iv_pad_len;
|
||||
|
||||
sym_op->auth.data.offset = aad_buffer_len + iv_pad_len;
|
||||
sym_op->auth.data.length = data_len;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_encryption(const struct gcm_test_data *tdata)
|
||||
{
|
||||
struct crypto_testsuite_params *ts_params = &testsuite_params;
|
||||
struct crypto_unittest_params *ut_params = &unittest_params;
|
||||
|
||||
int retval;
|
||||
|
||||
uint8_t *plaintext, *ciphertext, *auth_tag;
|
||||
uint16_t plaintext_pad_len;
|
||||
|
||||
/* Create GCM session */
|
||||
retval = create_gcm_session(ts_params->valid_devs[0],
|
||||
RTE_CRYPTO_CIPHER_OP_ENCRYPT,
|
||||
tdata->key.data, tdata->key.len,
|
||||
tdata->aad.len, tdata->auth_tag.len);
|
||||
if (retval < 0)
|
||||
return retval;
|
||||
|
||||
|
||||
ut_params->ibuf = rte_pktmbuf_alloc(ts_params->mbuf_pool);
|
||||
|
||||
/* clear mbuf payload */
|
||||
memset(rte_pktmbuf_mtod(ut_params->ibuf, uint8_t *), 0,
|
||||
rte_pktmbuf_tailroom(ut_params->ibuf));
|
||||
|
||||
/*
|
||||
* Append data which is padded to a multiple
|
||||
* of the algorithms block size
|
||||
*/
|
||||
plaintext_pad_len = RTE_ALIGN_CEIL(tdata->plaintext.len, 16);
|
||||
|
||||
plaintext = (uint8_t *)rte_pktmbuf_append(ut_params->ibuf,
|
||||
plaintext_pad_len);
|
||||
memcpy(plaintext, tdata->plaintext.data, tdata->plaintext.len);
|
||||
|
||||
#ifdef RTE_APP_TEST_DEBUG
|
||||
rte_hexdump(stdout, "plaintext:", plaintext, tdata->plaintext.len);
|
||||
#endif
|
||||
/* Create GCM opertaion */
|
||||
retval = create_gcm_operation(RTE_CRYPTO_CIPHER_OP_ENCRYPT,
|
||||
tdata->auth_tag.data, tdata->auth_tag.len,
|
||||
tdata->iv.data, tdata->iv.len,
|
||||
tdata->aad.data, tdata->aad.len,
|
||||
tdata->plaintext.len, plaintext_pad_len);
|
||||
if (retval < 0)
|
||||
return retval;
|
||||
|
||||
rte_crypto_op_attach_sym_session(ut_params->op, ut_params->sess);
|
||||
|
||||
ut_params->op->sym->m_src = ut_params->ibuf;
|
||||
|
||||
/* Process crypto operation */
|
||||
TEST_ASSERT_NOT_NULL(process_crypto_request(ts_params->valid_devs[0],
|
||||
ut_params->op), "failed to process sym crypto op");
|
||||
|
||||
TEST_ASSERT_EQUAL(ut_params->op->status, RTE_CRYPTO_OP_STATUS_SUCCESS,
|
||||
"crypto op processing failed");
|
||||
|
||||
if (ut_params->op->sym->m_dst) {
|
||||
ciphertext = rte_pktmbuf_mtod(ut_params->op->sym->m_dst,
|
||||
uint8_t *);
|
||||
auth_tag = rte_pktmbuf_mtod_offset(ut_params->op->sym->m_dst,
|
||||
uint8_t *, plaintext_pad_len);
|
||||
} else {
|
||||
ciphertext = plaintext;
|
||||
auth_tag = plaintext + plaintext_pad_len;
|
||||
}
|
||||
|
||||
#ifdef RTE_APP_TEST_DEBUG
|
||||
rte_hexdump(stdout, "ciphertext:", ciphertext, tdata->ciphertext.len);
|
||||
rte_hexdump(stdout, "auth tag:", auth_tag, tdata->auth_tag.len);
|
||||
#endif
|
||||
/* Validate obuf */
|
||||
TEST_ASSERT_BUFFERS_ARE_EQUAL(
|
||||
ciphertext,
|
||||
tdata->ciphertext.data,
|
||||
tdata->ciphertext.len,
|
||||
"GCM Ciphertext data not as expected");
|
||||
|
||||
TEST_ASSERT_BUFFERS_ARE_EQUAL(
|
||||
auth_tag,
|
||||
tdata->auth_tag.data,
|
||||
tdata->auth_tag.len,
|
||||
"GCM Generated auth tag not as expected");
|
||||
|
||||
return 0;
|
||||
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_1(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_encryption(&gcm_test_case_1);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_2(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_encryption(&gcm_test_case_2);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_3(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_encryption(&gcm_test_case_3);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_4(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_encryption(&gcm_test_case_4);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_5(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_encryption(&gcm_test_case_5);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_6(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_encryption(&gcm_test_case_6);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_7(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_encryption(&gcm_test_case_7);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_decryption(const struct gcm_test_data *tdata)
|
||||
{
|
||||
struct crypto_testsuite_params *ts_params = &testsuite_params;
|
||||
struct crypto_unittest_params *ut_params = &unittest_params;
|
||||
|
||||
int retval;
|
||||
|
||||
uint8_t *plaintext, *ciphertext;
|
||||
uint16_t ciphertext_pad_len;
|
||||
|
||||
/* Create GCM session */
|
||||
retval = create_gcm_session(ts_params->valid_devs[0],
|
||||
RTE_CRYPTO_CIPHER_OP_DECRYPT,
|
||||
tdata->key.data, tdata->key.len,
|
||||
tdata->aad.len, tdata->auth_tag.len);
|
||||
if (retval < 0)
|
||||
return retval;
|
||||
|
||||
|
||||
/* alloc mbuf and set payload */
|
||||
ut_params->ibuf = rte_pktmbuf_alloc(ts_params->mbuf_pool);
|
||||
|
||||
memset(rte_pktmbuf_mtod(ut_params->ibuf, uint8_t *), 0,
|
||||
rte_pktmbuf_tailroom(ut_params->ibuf));
|
||||
|
||||
ciphertext_pad_len = RTE_ALIGN_CEIL(tdata->ciphertext.len, 16);
|
||||
|
||||
ciphertext = (uint8_t *)rte_pktmbuf_append(ut_params->ibuf,
|
||||
ciphertext_pad_len);
|
||||
memcpy(ciphertext, tdata->ciphertext.data, tdata->ciphertext.len);
|
||||
|
||||
#ifdef RTE_APP_TEST_DEBUG
|
||||
rte_hexdump(stdout, "ciphertext:", ciphertext, tdata->ciphertext.len);
|
||||
#endif
|
||||
/* Create GCM opertaion */
|
||||
retval = create_gcm_operation(RTE_CRYPTO_CIPHER_OP_DECRYPT,
|
||||
tdata->auth_tag.data, tdata->auth_tag.len,
|
||||
tdata->iv.data, tdata->iv.len,
|
||||
tdata->aad.data, tdata->aad.len,
|
||||
tdata->ciphertext.len, ciphertext_pad_len);
|
||||
if (retval < 0)
|
||||
return retval;
|
||||
|
||||
|
||||
rte_crypto_op_attach_sym_session(ut_params->op, ut_params->sess);
|
||||
|
||||
ut_params->op->sym->m_src = ut_params->ibuf;
|
||||
|
||||
/* Process crypto operation */
|
||||
TEST_ASSERT_NOT_NULL(process_crypto_request(ts_params->valid_devs[0],
|
||||
ut_params->op), "failed to process sym crypto op");
|
||||
|
||||
TEST_ASSERT_EQUAL(ut_params->op->status, RTE_CRYPTO_OP_STATUS_SUCCESS,
|
||||
"crypto op processing failed");
|
||||
|
||||
if (ut_params->op->sym->m_dst)
|
||||
plaintext = rte_pktmbuf_mtod(ut_params->op->sym->m_dst,
|
||||
uint8_t *);
|
||||
else
|
||||
plaintext = ciphertext;
|
||||
|
||||
#ifdef RTE_APP_TEST_DEBUG
|
||||
rte_hexdump(stdout, "plaintext:", plaintext, tdata->ciphertext.len);
|
||||
#endif
|
||||
/* Validate obuf */
|
||||
TEST_ASSERT_BUFFERS_ARE_EQUAL(
|
||||
plaintext,
|
||||
tdata->plaintext.data,
|
||||
tdata->plaintext.len,
|
||||
"GCM plaintext data not as expected");
|
||||
|
||||
TEST_ASSERT_EQUAL(ut_params->op->status,
|
||||
RTE_CRYPTO_OP_STATUS_SUCCESS,
|
||||
"GCM authentication failed");
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_1(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_decryption(&gcm_test_case_1);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_2(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_decryption(&gcm_test_case_2);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_3(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_decryption(&gcm_test_case_3);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_4(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_decryption(&gcm_test_case_4);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_5(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_decryption(&gcm_test_case_5);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_6(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_decryption(&gcm_test_case_6);
|
||||
}
|
||||
|
||||
static int
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_7(void)
|
||||
{
|
||||
return test_mb_AES_GCM_authenticated_decryption(&gcm_test_case_7);
|
||||
}
|
||||
|
||||
static int
|
||||
test_stats(void)
|
||||
{
|
||||
@ -3107,6 +3518,47 @@ static struct unit_test_suite cryptodev_aesni_mb_testsuite = {
|
||||
}
|
||||
};
|
||||
|
||||
static struct unit_test_suite cryptodev_aesni_gcm_testsuite = {
|
||||
.suite_name = "Crypto Device AESNI GCM Unit Test Suite",
|
||||
.setup = testsuite_setup,
|
||||
.teardown = testsuite_teardown,
|
||||
.unit_test_cases = {
|
||||
/** AES GCM Authenticated Encryption */
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_1),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_2),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_3),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_4),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_5),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_6),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_encryption_test_case_7),
|
||||
|
||||
/** AES GCM Authenticated Decryption */
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_1),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_2),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_3),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_4),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_5),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_6),
|
||||
TEST_CASE_ST(ut_setup, ut_teardown,
|
||||
test_mb_AES_GCM_authenticated_decryption_test_case_7),
|
||||
|
||||
TEST_CASES_END() /**< NULL terminate unit test array */
|
||||
}
|
||||
};
|
||||
|
||||
static struct unit_test_suite cryptodev_sw_snow3g_testsuite = {
|
||||
.suite_name = "Crypto Device SW Snow3G Unit Test Suite",
|
||||
.setup = testsuite_setup,
|
||||
@ -3181,6 +3633,19 @@ static struct test_command cryptodev_aesni_mb_cmd = {
|
||||
.callback = test_cryptodev_aesni_mb,
|
||||
};
|
||||
|
||||
static int
|
||||
test_cryptodev_aesni_gcm(void)
|
||||
{
|
||||
gbl_cryptodev_type = RTE_CRYPTODEV_AESNI_GCM_PMD;
|
||||
|
||||
return unit_test_suite_runner(&cryptodev_aesni_gcm_testsuite);
|
||||
}
|
||||
|
||||
static struct test_command cryptodev_aesni_gcm_cmd = {
|
||||
.command = "cryptodev_aesni_gcm_autotest",
|
||||
.callback = test_cryptodev_aesni_gcm,
|
||||
};
|
||||
|
||||
static int
|
||||
test_cryptodev_sw_snow3g(void /*argv __rte_unused, int argc __rte_unused*/)
|
||||
{
|
||||
@ -3196,4 +3661,5 @@ static struct test_command cryptodev_sw_snow3g_cmd = {
|
||||
|
||||
REGISTER_TEST_COMMAND(cryptodev_qat_cmd);
|
||||
REGISTER_TEST_COMMAND(cryptodev_aesni_mb_cmd);
|
||||
REGISTER_TEST_COMMAND(cryptodev_aesni_gcm_cmd);
|
||||
REGISTER_TEST_COMMAND(cryptodev_sw_snow3g_cmd);
|
||||
|
423
app/test/test_cryptodev_gcm_test_vectors.h
Normal file
423
app/test/test_cryptodev_gcm_test_vectors.h
Normal file
@ -0,0 +1,423 @@
|
||||
/*-
|
||||
* BSD LICENSE
|
||||
*
|
||||
* Copyright(c) 2015 Intel Corporation. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Intel Corporation nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef TEST_CRYPTODEV_GCM_TEST_VECTORS_H_
|
||||
#define TEST_CRYPTODEV_GCM_TEST_VECTORS_H_
|
||||
|
||||
struct gcm_test_data {
|
||||
struct {
|
||||
uint8_t data[64];
|
||||
unsigned len;
|
||||
} key;
|
||||
|
||||
struct {
|
||||
uint8_t data[64] __rte_aligned(16);
|
||||
unsigned len;
|
||||
} iv;
|
||||
|
||||
struct {
|
||||
uint8_t data[64];
|
||||
unsigned len;
|
||||
} aad;
|
||||
|
||||
struct {
|
||||
uint8_t data[1024];
|
||||
unsigned len;
|
||||
} plaintext;
|
||||
|
||||
struct {
|
||||
uint8_t data[1024];
|
||||
unsigned len;
|
||||
} ciphertext;
|
||||
|
||||
struct {
|
||||
uint8_t data[16];
|
||||
unsigned len;
|
||||
} auth_tag;
|
||||
};
|
||||
|
||||
/** AES-128 Test Vectors */
|
||||
static const struct gcm_test_data gcm_test_case_1 = {
|
||||
.key = {
|
||||
.data = {
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
|
||||
.len = 16
|
||||
},
|
||||
.iv = {
|
||||
.data = {
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00 },
|
||||
.len = 12
|
||||
},
|
||||
.aad = {
|
||||
.data = { 0 },
|
||||
.len = 0
|
||||
},
|
||||
.plaintext = {
|
||||
.data = {
|
||||
0x00 },
|
||||
.len = 0
|
||||
},
|
||||
.ciphertext = {
|
||||
.data = {
|
||||
0x00
|
||||
},
|
||||
.len = 0
|
||||
},
|
||||
.auth_tag = {
|
||||
.data = {
|
||||
0x58, 0xe2, 0xfc, 0xce, 0xfa, 0x7e, 0x30, 0x61,
|
||||
0x36, 0x7f, 0x1d, 0x57, 0xa4, 0xe7, 0x45, 0x5a },
|
||||
.len = 16
|
||||
}
|
||||
};
|
||||
|
||||
/** AES-128 Test Vectors */
|
||||
static const struct gcm_test_data gcm_test_case_2 = {
|
||||
.key = {
|
||||
.data = {
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
|
||||
.len = 16
|
||||
},
|
||||
.iv = {
|
||||
.data = {
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00 },
|
||||
.len = 12
|
||||
},
|
||||
.aad = {
|
||||
.data = { 0 },
|
||||
.len = 0
|
||||
},
|
||||
.plaintext = {
|
||||
.data = {
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
|
||||
.len = 16
|
||||
},
|
||||
.ciphertext = {
|
||||
.data = {
|
||||
0x03, 0x88, 0xda, 0xce, 0x60, 0xb6, 0xa3, 0x92,
|
||||
0xf3, 0x28, 0xc2, 0xb9, 0x71, 0xb2, 0xfe, 0x78 },
|
||||
.len = 16
|
||||
},
|
||||
.auth_tag = {
|
||||
.data = {
|
||||
0xab, 0x6e, 0x47, 0xd4, 0x2c, 0xec, 0x13, 0xbd,
|
||||
0xf5, 0x3a, 0x67, 0xb2, 0x12, 0x57, 0xbd, 0xdf },
|
||||
.len = 16
|
||||
}
|
||||
};
|
||||
|
||||
/** AES-128 Test Vectors */
|
||||
static const struct gcm_test_data gcm_test_case_3 = {
|
||||
.key = {
|
||||
.data = {
|
||||
0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
|
||||
0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
|
||||
.len = 16
|
||||
},
|
||||
.iv = {
|
||||
.data = {
|
||||
0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
|
||||
0xde, 0xca, 0xf8, 0x88 },
|
||||
.len = 12
|
||||
},
|
||||
.aad = {
|
||||
.data = { 0 },
|
||||
.len = 0
|
||||
},
|
||||
.plaintext = {
|
||||
.data = {
|
||||
0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
|
||||
0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
|
||||
0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
|
||||
0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
|
||||
0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
|
||||
0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
|
||||
0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
|
||||
0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 },
|
||||
.len = 64
|
||||
},
|
||||
.ciphertext = {
|
||||
.data = {
|
||||
0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
|
||||
0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
|
||||
0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
|
||||
0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
|
||||
0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
|
||||
0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
|
||||
0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
|
||||
0x3d, 0x58, 0xe0, 0x91, 0x47, 0x3f, 0x59, 0x85
|
||||
},
|
||||
.len = 64
|
||||
},
|
||||
.auth_tag = {
|
||||
.data = {
|
||||
0x4d, 0x5c, 0x2a, 0xf3, 0x27, 0xcd, 0x64, 0xa6,
|
||||
0x2c, 0xf3, 0x5a, 0xbd, 0x2b, 0xa6, 0xfa, 0xb4 },
|
||||
.len = 16
|
||||
}
|
||||
};
|
||||
|
||||
/** AES-128 Test Vectors */
|
||||
static const struct gcm_test_data gcm_test_case_4 = {
|
||||
.key = {
|
||||
.data = {
|
||||
0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
|
||||
0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08
|
||||
},
|
||||
.len = 16
|
||||
},
|
||||
.iv = {
|
||||
.data = {
|
||||
0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
|
||||
0xde, 0xca, 0xf8, 0x88 },
|
||||
.len = 12
|
||||
},
|
||||
.aad = {
|
||||
.data = {
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
|
||||
.len = 8
|
||||
},
|
||||
.plaintext = {
|
||||
.data = {
|
||||
0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
|
||||
0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
|
||||
0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
|
||||
0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
|
||||
0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
|
||||
0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
|
||||
0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
|
||||
0xba, 0x63, 0x7b, 0x39
|
||||
},
|
||||
.len = 60
|
||||
},
|
||||
.ciphertext = {
|
||||
.data = {
|
||||
0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
|
||||
0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
|
||||
0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
|
||||
0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
|
||||
0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
|
||||
0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
|
||||
0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
|
||||
0x3d, 0x58, 0xe0, 0x91
|
||||
},
|
||||
.len = 60
|
||||
},
|
||||
.auth_tag = {
|
||||
.data = {
|
||||
0xA2, 0xA4, 0x35, 0x75, 0xDC, 0xB0, 0x57, 0x74,
|
||||
0x07, 0x02, 0x30, 0xC2, 0xE7, 0x52, 0x02, 0x00
|
||||
},
|
||||
.len = 16
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
/** AES-128 Test Vectors */
|
||||
static const struct gcm_test_data gcm_test_case_5 = {
|
||||
.key = {
|
||||
.data = {
|
||||
0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
|
||||
0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08
|
||||
},
|
||||
.len = 16
|
||||
},
|
||||
.iv = {
|
||||
.data = {
|
||||
0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
|
||||
0xde, 0xca, 0xf8, 0x88 },
|
||||
.len = 12
|
||||
},
|
||||
.aad = {
|
||||
.data = {
|
||||
0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef },
|
||||
.len = 8
|
||||
},
|
||||
.plaintext = {
|
||||
.data = {
|
||||
0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
|
||||
0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
|
||||
0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
|
||||
0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
|
||||
0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
|
||||
0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
|
||||
0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
|
||||
0xba, 0x63, 0x7b, 0x39
|
||||
},
|
||||
.len = 60
|
||||
},
|
||||
.ciphertext = {
|
||||
.data = {
|
||||
0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
|
||||
0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
|
||||
0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
|
||||
0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
|
||||
0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
|
||||
0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
|
||||
0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
|
||||
0x3d, 0x58, 0xe0, 0x91
|
||||
},
|
||||
.len = 60
|
||||
},
|
||||
.auth_tag = {
|
||||
.data = {
|
||||
0xC5, 0x2D, 0xFB, 0x54, 0xAF, 0xBB, 0x07, 0xA1,
|
||||
0x9A, 0xFF, 0xBE, 0xE0, 0x61, 0x4C, 0xE7, 0xA5
|
||||
},
|
||||
.len = 16
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
/** AES-128 Test Vectors */
|
||||
static const struct gcm_test_data gcm_test_case_6 = {
|
||||
.key = {
|
||||
.data = {
|
||||
0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
|
||||
0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08
|
||||
},
|
||||
.len = 16
|
||||
},
|
||||
.iv = {
|
||||
.data = {
|
||||
0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
|
||||
0xde, 0xca, 0xf8, 0x88
|
||||
},
|
||||
.len = 12
|
||||
},
|
||||
.aad = {
|
||||
.data = {
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00
|
||||
},
|
||||
.len = 12
|
||||
},
|
||||
.plaintext = {
|
||||
.data = {
|
||||
0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
|
||||
0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
|
||||
0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
|
||||
0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
|
||||
0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
|
||||
0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
|
||||
0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
|
||||
0xba, 0x63, 0x7b, 0x39
|
||||
},
|
||||
.len = 60
|
||||
},
|
||||
.ciphertext = {
|
||||
.data = {
|
||||
0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
|
||||
0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
|
||||
0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
|
||||
0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
|
||||
0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
|
||||
0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
|
||||
0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
|
||||
0x3d, 0x58, 0xe0, 0x91
|
||||
},
|
||||
.len = 60
|
||||
},
|
||||
.auth_tag = {
|
||||
.data = {
|
||||
0x74, 0xFC, 0xFA, 0x29, 0x3E, 0x60, 0xCC, 0x66,
|
||||
0x09, 0xD6, 0xFD, 0x00, 0xC8, 0x86, 0xD5, 0x42
|
||||
},
|
||||
.len = 16
|
||||
}
|
||||
};
|
||||
|
||||
/** AES-128 Test Vectors */
|
||||
static const struct gcm_test_data gcm_test_case_7 = {
|
||||
.key = {
|
||||
.data = {
|
||||
0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
|
||||
0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08
|
||||
},
|
||||
.len = 16
|
||||
},
|
||||
.iv = {
|
||||
.data = {
|
||||
0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
|
||||
0xde, 0xca, 0xf8, 0x88
|
||||
},
|
||||
.len = 12
|
||||
},
|
||||
.aad = {
|
||||
.data = {
|
||||
0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
|
||||
0xfe, 0xed, 0xfa, 0xce
|
||||
},
|
||||
.len = 12
|
||||
},
|
||||
.plaintext = {
|
||||
.data = {
|
||||
0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
|
||||
0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
|
||||
0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
|
||||
0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
|
||||
0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
|
||||
0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
|
||||
0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
|
||||
0xba, 0x63, 0x7b, 0x39
|
||||
},
|
||||
.len = 60
|
||||
},
|
||||
.ciphertext = {
|
||||
.data = {
|
||||
0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
|
||||
0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
|
||||
0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
|
||||
0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
|
||||
0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
|
||||
0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
|
||||
0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
|
||||
0x3d, 0x58, 0xe0, 0x91
|
||||
},
|
||||
.len = 60
|
||||
},
|
||||
.auth_tag = {
|
||||
.data = {
|
||||
0xE9, 0xE4, 0xAB, 0x76, 0xB7, 0xFF, 0xEA, 0xDC,
|
||||
0x69, 0x79, 0x38, 0xA2, 0x0D, 0xCA, 0xF5, 0x92
|
||||
},
|
||||
.len = 16
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
#endif /* TEST_CRYPTODEV_GCM_TEST_VECTORS_H_ */
|
@ -337,6 +337,12 @@ CONFIG_RTE_LIBRTE_PMD_AESNI_MB_DEBUG=n
|
||||
CONFIG_RTE_AESNI_MB_PMD_MAX_NB_QUEUE_PAIRS=8
|
||||
CONFIG_RTE_AESNI_MB_PMD_MAX_NB_SESSIONS=2048
|
||||
|
||||
#
|
||||
# Compile PMD for AESNI GCM device
|
||||
#
|
||||
CONFIG_RTE_LIBRTE_PMD_AESNI_GCM=n
|
||||
CONFIG_RTE_LIBRTE_PMD_AESNI_GCM_DEBUG=n
|
||||
|
||||
#
|
||||
# Compile PMD for SNOW 3G device
|
||||
#
|
||||
|
@ -50,3 +50,13 @@ CONFIG_RTE_LIBRTE_KNI=n
|
||||
# Vectorized PMD is not supported on 32-bit
|
||||
#
|
||||
CONFIG_RTE_IXGBE_INC_VECTOR=n
|
||||
|
||||
#
|
||||
# AES-NI multi-buffer PMD is not supported on 32-bit
|
||||
#
|
||||
CONFIG_RTE_LIBRTE_PMD_AESNI_MB=n
|
||||
|
||||
#
|
||||
# AES-NI GCM PMD is not supported on 32-bit
|
||||
#
|
||||
CONFIG_RTE_LIBRTE_PMD_AESNI_GCM=n
|
||||
|
@ -50,3 +50,13 @@ CONFIG_RTE_LIBRTE_KNI=n
|
||||
# Vectorized PMD is not supported on 32-bit
|
||||
#
|
||||
CONFIG_RTE_IXGBE_INC_VECTOR=n
|
||||
|
||||
#
|
||||
# AES-NI multi-buffer PMD is not supported on 32-bit
|
||||
#
|
||||
CONFIG_RTE_LIBRTE_PMD_AESNI_MB=n
|
||||
|
||||
#
|
||||
# AES-NI GCM PMD is not supported on 32-bit
|
||||
#
|
||||
CONFIG_RTE_LIBRTE_PMD_AESNI_GCM=n
|
||||
|
66
doc/guides/cryptodevs/aesni_gcm.rst
Normal file
66
doc/guides/cryptodevs/aesni_gcm.rst
Normal file
@ -0,0 +1,66 @@
|
||||
.. BSD LICENSE
|
||||
Copyright(c) 2016 Intel Corporation. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in
|
||||
the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Intel Corporation nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
AES-NI GCM Crypto Poll Mode Driver
|
||||
============================================
|
||||
|
||||
|
||||
The AES-NI GCM PMD (**librte_pmd_aesni_gcm**) provides poll mode crypto driver
|
||||
support for utilizing Intel multi buffer library (see AES-NI Multi-buffer PMD documentation
|
||||
to learn more about it, including installation).
|
||||
|
||||
The AES-NI GCM PMD has current only been tested on Fedora 21 64-bit with gcc.
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
AESNI GCM PMD has support for:
|
||||
|
||||
Cipher algorithms:
|
||||
|
||||
* RTE_CRYPTO_CIPHER_AES_GCM
|
||||
|
||||
Authentication algorithms:
|
||||
|
||||
* RTE_CRYPTO_AUTH_AES_GCM
|
||||
|
||||
Limitations
|
||||
-----------
|
||||
|
||||
* Chained mbufs are not supported.
|
||||
* Hash only is not supported.
|
||||
* Cipher only is not supported.
|
||||
* Only in-place is currently supported (destination address is the same as source address).
|
||||
* Only supports session-oriented API implementation (session-less APIs are not supported).
|
||||
* Not performance tuned.
|
||||
|
||||
The environmental variable
|
||||
AESNI_MULTI_BUFFER_LIB_PATH must be exported with the path where you extracted
|
||||
and built the multi buffer library and finally set
|
||||
CONFIG_RTE_LIBRTE_PMD_AESNI_GCM=y in config/common_base.
|
@ -36,5 +36,6 @@ Crypto Device Drivers
|
||||
:numbered:
|
||||
|
||||
aesni_mb
|
||||
aesni_gcm
|
||||
snow3g
|
||||
qat
|
||||
|
@ -85,6 +85,11 @@ This section should contain new features added in this release. Sample format:
|
||||
A new Crypto PMD has been added, which provides SNOW 3G UEA2 ciphering
|
||||
and SNOW3G UIA2 hashing.
|
||||
|
||||
* **Added AES GCM PMD.**
|
||||
|
||||
Added new Crypto PMD to support AES-GCM authenticated encryption and
|
||||
authenticated decryption in SW.
|
||||
|
||||
|
||||
Resolved Issues
|
||||
---------------
|
||||
|
@ -31,6 +31,7 @@
|
||||
|
||||
include $(RTE_SDK)/mk/rte.vars.mk
|
||||
|
||||
DIRS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM) += aesni_gcm
|
||||
DIRS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_MB) += aesni_mb
|
||||
DIRS-$(CONFIG_RTE_LIBRTE_PMD_QAT) += qat
|
||||
DIRS-$(CONFIG_RTE_LIBRTE_PMD_SNOW3G) += snow3g
|
||||
|
67
drivers/crypto/aesni_gcm/Makefile
Normal file
67
drivers/crypto/aesni_gcm/Makefile
Normal file
@ -0,0 +1,67 @@
|
||||
# BSD LICENSE
|
||||
#
|
||||
# Copyright(c) 2016 Intel Corporation. All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions
|
||||
# are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in
|
||||
# the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Intel Corporation nor the names of its
|
||||
# contributors may be used to endorse or promote products derived
|
||||
# from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
include $(RTE_SDK)/mk/rte.vars.mk
|
||||
|
||||
ifeq ($(AESNI_MULTI_BUFFER_LIB_PATH),)
|
||||
$(error "Please define AESNI_MULTI_BUFFER_LIB_PATH environment variable")
|
||||
endif
|
||||
|
||||
# library name
|
||||
LIB = librte_pmd_aesni_gcm.a
|
||||
|
||||
# build flags
|
||||
CFLAGS += -O3
|
||||
CFLAGS += $(WERROR_FLAGS)
|
||||
|
||||
# library version
|
||||
LIBABIVER := 1
|
||||
|
||||
# versioning export map
|
||||
EXPORT_MAP := rte_pmd_aesni_gcm_version.map
|
||||
|
||||
# external library include paths
|
||||
CFLAGS += -I$(AESNI_MULTI_BUFFER_LIB_PATH)
|
||||
CFLAGS += -I$(AESNI_MULTI_BUFFER_LIB_PATH)/include
|
||||
LDLIBS += -lcrypto
|
||||
|
||||
# library source files
|
||||
SRCS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM) += aesni_gcm_pmd.c
|
||||
SRCS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM) += aesni_gcm_pmd_ops.c
|
||||
|
||||
# export include files
|
||||
SYMLINK-y-include +=
|
||||
|
||||
# library dependencies
|
||||
DEPDIRS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM) += lib/librte_eal
|
||||
DEPDIRS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM) += lib/librte_mbuf
|
||||
DEPDIRS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM) += lib/librte_cryptodev
|
||||
|
||||
include $(RTE_SDK)/mk/rte.lib.mk
|
127
drivers/crypto/aesni_gcm/aesni_gcm_ops.h
Normal file
127
drivers/crypto/aesni_gcm/aesni_gcm_ops.h
Normal file
@ -0,0 +1,127 @@
|
||||
/*-
|
||||
* BSD LICENSE
|
||||
*
|
||||
* Copyright(c) 2016 Intel Corporation. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Intel Corporation nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef _AESNI_GCM_OPS_H_
|
||||
#define _AESNI_GCM_OPS_H_
|
||||
|
||||
#ifndef LINUX
|
||||
#define LINUX
|
||||
#endif
|
||||
|
||||
#include <gcm_defines.h>
|
||||
#include <aux_funcs.h>
|
||||
|
||||
/** Supported vector modes */
|
||||
enum aesni_gcm_vector_mode {
|
||||
RTE_AESNI_GCM_NOT_SUPPORTED = 0,
|
||||
RTE_AESNI_GCM_SSE,
|
||||
RTE_AESNI_GCM_AVX,
|
||||
RTE_AESNI_GCM_AVX2
|
||||
};
|
||||
|
||||
typedef void (*aes_keyexp_128_enc_t)(void *key, void *enc_exp_keys);
|
||||
|
||||
typedef void (*aesni_gcm_t)(gcm_data *my_ctx_data, u8 *out, const u8 *in,
|
||||
u64 plaintext_len, u8 *iv, const u8 *aad, u64 aad_len,
|
||||
u8 *auth_tag, u64 auth_tag_len);
|
||||
|
||||
typedef void (*aesni_gcm_precomp_t)(gcm_data *my_ctx_data, u8 *hash_subkey);
|
||||
|
||||
/** GCM library function pointer table */
|
||||
struct aesni_gcm_ops {
|
||||
struct {
|
||||
struct {
|
||||
aes_keyexp_128_enc_t aes128_enc;
|
||||
/**< AES128 enc key expansion */
|
||||
} keyexp;
|
||||
/**< Key expansion functions */
|
||||
} aux; /**< Auxiliary functions */
|
||||
|
||||
struct {
|
||||
aesni_gcm_t enc; /**< GCM encode function pointer */
|
||||
aesni_gcm_t dec; /**< GCM decode function pointer */
|
||||
aesni_gcm_precomp_t precomp; /**< GCM pre-compute */
|
||||
} gcm; /**< GCM functions */
|
||||
};
|
||||
|
||||
|
||||
static const struct aesni_gcm_ops gcm_ops[] = {
|
||||
[RTE_AESNI_GCM_NOT_SUPPORTED] = {
|
||||
.aux = {
|
||||
.keyexp = {
|
||||
NULL
|
||||
}
|
||||
},
|
||||
.gcm = {
|
||||
NULL
|
||||
}
|
||||
},
|
||||
[RTE_AESNI_GCM_SSE] = {
|
||||
.aux = {
|
||||
.keyexp = {
|
||||
aes_keyexp_128_enc_sse
|
||||
}
|
||||
},
|
||||
.gcm = {
|
||||
aesni_gcm_enc_sse,
|
||||
aesni_gcm_dec_sse,
|
||||
aesni_gcm_precomp_sse
|
||||
}
|
||||
},
|
||||
[RTE_AESNI_GCM_AVX] = {
|
||||
.aux = {
|
||||
.keyexp = {
|
||||
aes_keyexp_128_enc_avx,
|
||||
}
|
||||
},
|
||||
.gcm = {
|
||||
aesni_gcm_enc_avx_gen2,
|
||||
aesni_gcm_dec_avx_gen2,
|
||||
aesni_gcm_precomp_avx_gen2
|
||||
}
|
||||
},
|
||||
[RTE_AESNI_GCM_AVX2] = {
|
||||
.aux = {
|
||||
.keyexp = {
|
||||
aes_keyexp_128_enc_avx2,
|
||||
}
|
||||
},
|
||||
.gcm = {
|
||||
aesni_gcm_enc_avx_gen4,
|
||||
aesni_gcm_dec_avx_gen4,
|
||||
aesni_gcm_precomp_avx_gen4
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
#endif /* _AESNI_GCM_OPS_H_ */
|
505
drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
Normal file
505
drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
Normal file
@ -0,0 +1,505 @@
|
||||
/*-
|
||||
* BSD LICENSE
|
||||
*
|
||||
* Copyright(c) 2016 Intel Corporation. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Intel Corporation nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include <openssl/aes.h>
|
||||
|
||||
#include <rte_common.h>
|
||||
#include <rte_config.h>
|
||||
#include <rte_hexdump.h>
|
||||
#include <rte_cryptodev.h>
|
||||
#include <rte_cryptodev_pmd.h>
|
||||
#include <rte_dev.h>
|
||||
#include <rte_malloc.h>
|
||||
#include <rte_cpuflags.h>
|
||||
|
||||
#include "aesni_gcm_pmd_private.h"
|
||||
|
||||
/**
|
||||
* Global static parameter used to create a unique name for each AES-NI multi
|
||||
* buffer crypto device.
|
||||
*/
|
||||
static unsigned unique_name_id;
|
||||
|
||||
static inline int
|
||||
create_unique_device_name(char *name, size_t size)
|
||||
{
|
||||
int ret;
|
||||
|
||||
if (name == NULL)
|
||||
return -EINVAL;
|
||||
|
||||
ret = snprintf(name, size, "%s_%u", CRYPTODEV_NAME_AESNI_GCM_PMD,
|
||||
unique_name_id++);
|
||||
if (ret < 0)
|
||||
return ret;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
aesni_gcm_calculate_hash_sub_key(uint8_t *hsubkey, unsigned hsubkey_length,
|
||||
uint8_t *aeskey, unsigned aeskey_length)
|
||||
{
|
||||
uint8_t key[aeskey_length] __rte_aligned(16);
|
||||
AES_KEY enc_key;
|
||||
|
||||
if (hsubkey_length % 16 != 0 && aeskey_length % 16 != 0)
|
||||
return -EFAULT;
|
||||
|
||||
memcpy(key, aeskey, aeskey_length);
|
||||
|
||||
if (AES_set_encrypt_key(key, aeskey_length << 3, &enc_key) != 0)
|
||||
return -EFAULT;
|
||||
|
||||
AES_encrypt(hsubkey, hsubkey, &enc_key);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/** Get xform chain order */
|
||||
static int
|
||||
aesni_gcm_get_mode(const struct rte_crypto_sym_xform *xform)
|
||||
{
|
||||
/*
|
||||
* GCM only supports authenticated encryption or authenticated
|
||||
* decryption, all other options are invalid, so we must have exactly
|
||||
* 2 xform structs chained together
|
||||
*/
|
||||
if (xform->next == NULL || xform->next->next != NULL)
|
||||
return -1;
|
||||
|
||||
if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
|
||||
xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
|
||||
return AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION;
|
||||
}
|
||||
|
||||
if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
|
||||
xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
|
||||
return AESNI_GCM_OP_AUTHENTICATED_DECRYPTION;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
/** Parse crypto xform chain and set private session parameters */
|
||||
int
|
||||
aesni_gcm_set_session_parameters(const struct aesni_gcm_ops *gcm_ops,
|
||||
struct aesni_gcm_session *sess,
|
||||
const struct rte_crypto_sym_xform *xform)
|
||||
{
|
||||
const struct rte_crypto_sym_xform *auth_xform = NULL;
|
||||
const struct rte_crypto_sym_xform *cipher_xform = NULL;
|
||||
|
||||
uint8_t hsubkey[16] __rte_aligned(16) = { 0 };
|
||||
|
||||
/* Select Crypto operation - hash then cipher / cipher then hash */
|
||||
switch (aesni_gcm_get_mode(xform)) {
|
||||
case AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION:
|
||||
sess->op = AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION;
|
||||
|
||||
cipher_xform = xform;
|
||||
auth_xform = xform->next;
|
||||
break;
|
||||
case AESNI_GCM_OP_AUTHENTICATED_DECRYPTION:
|
||||
sess->op = AESNI_GCM_OP_AUTHENTICATED_DECRYPTION;
|
||||
|
||||
auth_xform = xform;
|
||||
cipher_xform = xform->next;
|
||||
break;
|
||||
default:
|
||||
GCM_LOG_ERR("Unsupported operation chain order parameter");
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
/* We only support AES GCM */
|
||||
if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_AES_GCM &&
|
||||
auth_xform->auth.algo != RTE_CRYPTO_AUTH_AES_GCM)
|
||||
return -EINVAL;
|
||||
|
||||
/* Select cipher direction */
|
||||
if (sess->op == AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION &&
|
||||
cipher_xform->cipher.op !=
|
||||
RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
|
||||
GCM_LOG_ERR("xform chain (CIPHER/AUTH) and cipher operation "
|
||||
"(DECRYPT) specified are an invalid selection");
|
||||
return -EINVAL;
|
||||
} else if (sess->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION &&
|
||||
cipher_xform->cipher.op !=
|
||||
RTE_CRYPTO_CIPHER_OP_DECRYPT) {
|
||||
GCM_LOG_ERR("xform chain (AUTH/CIPHER) and cipher operation "
|
||||
"(ENCRYPT) specified are an invalid selection");
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
/* Expand GCM AES128 key */
|
||||
(*gcm_ops->aux.keyexp.aes128_enc)(cipher_xform->cipher.key.data,
|
||||
sess->gdata.expanded_keys);
|
||||
|
||||
/* Calculate hash sub key here */
|
||||
aesni_gcm_calculate_hash_sub_key(hsubkey, sizeof(hsubkey),
|
||||
cipher_xform->cipher.key.data,
|
||||
cipher_xform->cipher.key.length);
|
||||
|
||||
/* Calculate GCM pre-compute */
|
||||
(*gcm_ops->gcm.precomp)(&sess->gdata, hsubkey);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/** Get gcm session */
|
||||
static struct aesni_gcm_session *
|
||||
aesni_gcm_get_session(struct aesni_gcm_qp *qp, struct rte_crypto_sym_op *op)
|
||||
{
|
||||
struct aesni_gcm_session *sess = NULL;
|
||||
|
||||
if (op->type == RTE_CRYPTO_SYM_OP_WITH_SESSION) {
|
||||
if (unlikely(op->session->type != RTE_CRYPTODEV_AESNI_GCM_PMD))
|
||||
return sess;
|
||||
|
||||
sess = (struct aesni_gcm_session *)op->session->_private;
|
||||
} else {
|
||||
void *_sess;
|
||||
|
||||
if (rte_mempool_get(qp->sess_mp, &_sess))
|
||||
return sess;
|
||||
|
||||
sess = (struct aesni_gcm_session *)
|
||||
((struct rte_cryptodev_session *)_sess)->_private;
|
||||
|
||||
if (unlikely(aesni_gcm_set_session_parameters(qp->ops,
|
||||
sess, op->xform) != 0)) {
|
||||
rte_mempool_put(qp->sess_mp, _sess);
|
||||
sess = NULL;
|
||||
}
|
||||
}
|
||||
return sess;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a crypto operation and complete a JOB_AES_HMAC job structure for
|
||||
* submission to the multi buffer library for processing.
|
||||
*
|
||||
* @param qp queue pair
|
||||
* @param op symmetric crypto operation
|
||||
* @param session GCM session
|
||||
*
|
||||
* @return
|
||||
*
|
||||
*/
|
||||
static int
|
||||
process_gcm_crypto_op(struct aesni_gcm_qp *qp, struct rte_crypto_sym_op *op,
|
||||
struct aesni_gcm_session *session)
|
||||
{
|
||||
uint8_t *src, *dst;
|
||||
struct rte_mbuf *m = op->m_src;
|
||||
|
||||
src = rte_pktmbuf_mtod(m, uint8_t *) + op->cipher.data.offset;
|
||||
dst = op->m_dst ?
|
||||
rte_pktmbuf_mtod_offset(op->m_dst, uint8_t *,
|
||||
op->cipher.data.offset) :
|
||||
rte_pktmbuf_mtod_offset(m, uint8_t *,
|
||||
op->cipher.data.offset);
|
||||
|
||||
/* sanity checks */
|
||||
if (op->cipher.iv.length != 16 && op->cipher.iv.length != 0) {
|
||||
GCM_LOG_ERR("iv");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (op->auth.aad.length != 12 && op->auth.aad.length != 8 &&
|
||||
op->auth.aad.length != 0) {
|
||||
GCM_LOG_ERR("iv");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (op->auth.digest.length != 16 &&
|
||||
op->auth.digest.length != 12 &&
|
||||
op->auth.digest.length != 8 &&
|
||||
op->auth.digest.length != 0) {
|
||||
GCM_LOG_ERR("iv");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (session->op == AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION) {
|
||||
|
||||
(*qp->ops->gcm.enc)(&session->gdata, dst, src,
|
||||
(uint64_t)op->cipher.data.length,
|
||||
op->cipher.iv.data,
|
||||
op->auth.aad.data,
|
||||
(uint64_t)op->auth.aad.length,
|
||||
op->auth.digest.data,
|
||||
(uint64_t)op->auth.digest.length);
|
||||
} else if (session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION) {
|
||||
uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(m,
|
||||
op->auth.digest.length);
|
||||
|
||||
if (!auth_tag) {
|
||||
GCM_LOG_ERR("iv");
|
||||
return -1;
|
||||
}
|
||||
|
||||
(*qp->ops->gcm.dec)(&session->gdata, dst, src,
|
||||
(uint64_t)op->cipher.data.length,
|
||||
op->cipher.iv.data,
|
||||
op->auth.aad.data,
|
||||
(uint64_t)op->auth.aad.length,
|
||||
auth_tag,
|
||||
(uint64_t)op->auth.digest.length);
|
||||
} else {
|
||||
GCM_LOG_ERR("iv");
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a completed job and return rte_mbuf which job processed
|
||||
*
|
||||
* @param job JOB_AES_HMAC job to process
|
||||
*
|
||||
* @return
|
||||
* - Returns processed mbuf which is trimmed of output digest used in
|
||||
* verification of supplied digest in the case of a HASH_CIPHER operation
|
||||
* - Returns NULL on invalid job
|
||||
*/
|
||||
static void
|
||||
post_process_gcm_crypto_op(struct rte_crypto_op *op)
|
||||
{
|
||||
struct rte_mbuf *m = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
|
||||
|
||||
struct aesni_gcm_session *session =
|
||||
(struct aesni_gcm_session *)op->sym->session->_private;
|
||||
|
||||
op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
|
||||
|
||||
/* Verify digest if required */
|
||||
if (session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION) {
|
||||
|
||||
uint8_t *tag = rte_pktmbuf_mtod_offset(m, uint8_t *,
|
||||
m->data_len - op->sym->auth.digest.length);
|
||||
|
||||
#ifdef RTE_LIBRTE_PMD_AESNI_GCM_DEBUG
|
||||
rte_hexdump(stdout, "auth tag (orig):",
|
||||
op->sym->auth.digest.data, op->sym->auth.digest.length);
|
||||
rte_hexdump(stdout, "auth tag (calc):",
|
||||
tag, op->sym->auth.digest.length);
|
||||
#endif
|
||||
|
||||
if (memcmp(tag, op->sym->auth.digest.data,
|
||||
op->sym->auth.digest.length) != 0)
|
||||
op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
|
||||
|
||||
/* trim area used for digest from mbuf */
|
||||
rte_pktmbuf_trim(m, op->sym->auth.digest.length);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a completed GCM request
|
||||
*
|
||||
* @param qp Queue Pair to process
|
||||
* @param job JOB_AES_HMAC job
|
||||
*
|
||||
* @return
|
||||
* - Number of processed jobs
|
||||
*/
|
||||
static void
|
||||
handle_completed_gcm_crypto_op(struct aesni_gcm_qp *qp,
|
||||
struct rte_crypto_op *op)
|
||||
{
|
||||
post_process_gcm_crypto_op(op);
|
||||
|
||||
/* Free session if a session-less crypto op */
|
||||
if (op->sym->type == RTE_CRYPTO_SYM_OP_SESSIONLESS) {
|
||||
rte_mempool_put(qp->sess_mp, op->sym->session);
|
||||
op->sym->session = NULL;
|
||||
}
|
||||
|
||||
rte_ring_enqueue(qp->processed_pkts, (void *)op);
|
||||
}
|
||||
|
||||
static uint16_t
|
||||
aesni_gcm_pmd_enqueue_burst(void *queue_pair,
|
||||
struct rte_crypto_op **ops, uint16_t nb_ops)
|
||||
{
|
||||
struct aesni_gcm_session *sess;
|
||||
struct aesni_gcm_qp *qp = queue_pair;
|
||||
|
||||
int i, retval = 0;
|
||||
|
||||
for (i = 0; i < nb_ops; i++) {
|
||||
|
||||
sess = aesni_gcm_get_session(qp, ops[i]->sym);
|
||||
if (unlikely(sess == NULL)) {
|
||||
ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
|
||||
qp->qp_stats.enqueue_err_count++;
|
||||
break;
|
||||
}
|
||||
|
||||
retval = process_gcm_crypto_op(qp, ops[i]->sym, sess);
|
||||
if (retval < 0) {
|
||||
ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
|
||||
qp->qp_stats.enqueue_err_count++;
|
||||
break;
|
||||
}
|
||||
|
||||
handle_completed_gcm_crypto_op(qp, ops[i]);
|
||||
|
||||
qp->qp_stats.enqueued_count++;
|
||||
}
|
||||
return i;
|
||||
}
|
||||
|
||||
static uint16_t
|
||||
aesni_gcm_pmd_dequeue_burst(void *queue_pair,
|
||||
struct rte_crypto_op **ops, uint16_t nb_ops)
|
||||
{
|
||||
struct aesni_gcm_qp *qp = queue_pair;
|
||||
|
||||
unsigned nb_dequeued;
|
||||
|
||||
nb_dequeued = rte_ring_dequeue_burst(qp->processed_pkts,
|
||||
(void **)ops, nb_ops);
|
||||
qp->qp_stats.dequeued_count += nb_dequeued;
|
||||
|
||||
return nb_dequeued;
|
||||
}
|
||||
|
||||
static int aesni_gcm_uninit(const char *name);
|
||||
|
||||
static int
|
||||
aesni_gcm_create(const char *name,
|
||||
struct rte_crypto_vdev_init_params *init_params)
|
||||
{
|
||||
struct rte_cryptodev *dev;
|
||||
char crypto_dev_name[RTE_CRYPTODEV_NAME_MAX_LEN];
|
||||
struct aesni_gcm_private *internals;
|
||||
enum aesni_gcm_vector_mode vector_mode;
|
||||
|
||||
/* Check CPU for support for AES instruction set */
|
||||
if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) {
|
||||
GCM_LOG_ERR("AES instructions not supported by CPU");
|
||||
return -EFAULT;
|
||||
}
|
||||
|
||||
/* Check CPU for supported vector instruction set */
|
||||
if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX2))
|
||||
vector_mode = RTE_AESNI_GCM_AVX2;
|
||||
else if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX))
|
||||
vector_mode = RTE_AESNI_GCM_AVX;
|
||||
else if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_SSE4_1))
|
||||
vector_mode = RTE_AESNI_GCM_SSE;
|
||||
else {
|
||||
GCM_LOG_ERR("Vector instructions are not supported by CPU");
|
||||
return -EFAULT;
|
||||
}
|
||||
|
||||
/* create a unique device name */
|
||||
if (create_unique_device_name(crypto_dev_name,
|
||||
RTE_CRYPTODEV_NAME_MAX_LEN) != 0) {
|
||||
GCM_LOG_ERR("failed to create unique cryptodev name");
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
|
||||
dev = rte_cryptodev_pmd_virtual_dev_init(crypto_dev_name,
|
||||
sizeof(struct aesni_gcm_private), init_params->socket_id);
|
||||
if (dev == NULL) {
|
||||
GCM_LOG_ERR("failed to create cryptodev vdev");
|
||||
goto init_error;
|
||||
}
|
||||
|
||||
dev->dev_type = RTE_CRYPTODEV_AESNI_GCM_PMD;
|
||||
dev->dev_ops = rte_aesni_gcm_pmd_ops;
|
||||
|
||||
/* register rx/tx burst functions for data path */
|
||||
dev->dequeue_burst = aesni_gcm_pmd_dequeue_burst;
|
||||
dev->enqueue_burst = aesni_gcm_pmd_enqueue_burst;
|
||||
|
||||
/* Set vector instructions mode supported */
|
||||
internals = dev->data->dev_private;
|
||||
|
||||
internals->vector_mode = vector_mode;
|
||||
|
||||
internals->max_nb_queue_pairs = init_params->max_nb_queue_pairs;
|
||||
internals->max_nb_sessions = init_params->max_nb_sessions;
|
||||
|
||||
return 0;
|
||||
|
||||
init_error:
|
||||
GCM_LOG_ERR("driver %s: create failed", name);
|
||||
|
||||
aesni_gcm_uninit(crypto_dev_name);
|
||||
return -EFAULT;
|
||||
}
|
||||
|
||||
static int
|
||||
aesni_gcm_init(const char *name, const char *input_args)
|
||||
{
|
||||
struct rte_crypto_vdev_init_params init_params = {
|
||||
RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_QUEUE_PAIRS,
|
||||
RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_SESSIONS,
|
||||
rte_socket_id()
|
||||
};
|
||||
|
||||
rte_cryptodev_parse_vdev_init_params(&init_params, input_args);
|
||||
|
||||
RTE_LOG(INFO, PMD, "Initialising %s on NUMA node %d\n", name,
|
||||
init_params.socket_id);
|
||||
RTE_LOG(INFO, PMD, " Max number of queue pairs = %d\n",
|
||||
init_params.max_nb_queue_pairs);
|
||||
RTE_LOG(INFO, PMD, " Max number of sessions = %d\n",
|
||||
init_params.max_nb_sessions);
|
||||
|
||||
return aesni_gcm_create(name, &init_params);
|
||||
}
|
||||
|
||||
static int
|
||||
aesni_gcm_uninit(const char *name)
|
||||
{
|
||||
if (name == NULL)
|
||||
return -EINVAL;
|
||||
|
||||
GCM_LOG_INFO("Closing AESNI crypto device %s on numa socket %u\n",
|
||||
name, rte_socket_id());
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct rte_driver aesni_gcm_pmd_drv = {
|
||||
.name = CRYPTODEV_NAME_AESNI_GCM_PMD,
|
||||
.type = PMD_VDEV,
|
||||
.init = aesni_gcm_init,
|
||||
.uninit = aesni_gcm_uninit
|
||||
};
|
||||
|
||||
PMD_REGISTER_DRIVER(aesni_gcm_pmd_drv);
|
292
drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c
Normal file
292
drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c
Normal file
@ -0,0 +1,292 @@
|
||||
/*-
|
||||
* BSD LICENSE
|
||||
*
|
||||
* Copyright(c) 2016 Intel Corporation. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Intel Corporation nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include <string.h>
|
||||
|
||||
#include <rte_common.h>
|
||||
#include <rte_malloc.h>
|
||||
#include <rte_cryptodev_pmd.h>
|
||||
|
||||
#include "aesni_gcm_pmd_private.h"
|
||||
|
||||
/** Configure device */
|
||||
static int
|
||||
aesni_gcm_pmd_config(__rte_unused struct rte_cryptodev *dev)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
/** Start device */
|
||||
static int
|
||||
aesni_gcm_pmd_start(__rte_unused struct rte_cryptodev *dev)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
/** Stop device */
|
||||
static void
|
||||
aesni_gcm_pmd_stop(__rte_unused struct rte_cryptodev *dev)
|
||||
{
|
||||
}
|
||||
|
||||
/** Close device */
|
||||
static int
|
||||
aesni_gcm_pmd_close(__rte_unused struct rte_cryptodev *dev)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/** Get device statistics */
|
||||
static void
|
||||
aesni_gcm_pmd_stats_get(struct rte_cryptodev *dev,
|
||||
struct rte_cryptodev_stats *stats)
|
||||
{
|
||||
int qp_id;
|
||||
|
||||
for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
|
||||
struct aesni_gcm_qp *qp = dev->data->queue_pairs[qp_id];
|
||||
|
||||
stats->enqueued_count += qp->qp_stats.enqueued_count;
|
||||
stats->dequeued_count += qp->qp_stats.dequeued_count;
|
||||
|
||||
stats->enqueue_err_count += qp->qp_stats.enqueue_err_count;
|
||||
stats->dequeue_err_count += qp->qp_stats.dequeue_err_count;
|
||||
}
|
||||
}
|
||||
|
||||
/** Reset device statistics */
|
||||
static void
|
||||
aesni_gcm_pmd_stats_reset(struct rte_cryptodev *dev)
|
||||
{
|
||||
int qp_id;
|
||||
|
||||
for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
|
||||
struct aesni_gcm_qp *qp = dev->data->queue_pairs[qp_id];
|
||||
|
||||
memset(&qp->qp_stats, 0, sizeof(qp->qp_stats));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** Get device info */
|
||||
static void
|
||||
aesni_gcm_pmd_info_get(struct rte_cryptodev *dev,
|
||||
struct rte_cryptodev_info *dev_info)
|
||||
{
|
||||
struct aesni_gcm_private *internals = dev->data->dev_private;
|
||||
|
||||
if (dev_info != NULL) {
|
||||
dev_info->dev_type = dev->dev_type;
|
||||
|
||||
dev_info->max_nb_queue_pairs = internals->max_nb_queue_pairs;
|
||||
dev_info->sym.max_nb_sessions = internals->max_nb_sessions;
|
||||
}
|
||||
}
|
||||
|
||||
/** Release queue pair */
|
||||
static int
|
||||
aesni_gcm_pmd_qp_release(struct rte_cryptodev *dev, uint16_t qp_id)
|
||||
{
|
||||
if (dev->data->queue_pairs[qp_id] != NULL) {
|
||||
rte_free(dev->data->queue_pairs[qp_id]);
|
||||
dev->data->queue_pairs[qp_id] = NULL;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/** set a unique name for the queue pair based on it's name, dev_id and qp_id */
|
||||
static int
|
||||
aesni_gcm_pmd_qp_set_unique_name(struct rte_cryptodev *dev,
|
||||
struct aesni_gcm_qp *qp)
|
||||
{
|
||||
unsigned n = snprintf(qp->name, sizeof(qp->name),
|
||||
"aesni_gcm_pmd_%u_qp_%u",
|
||||
dev->data->dev_id, qp->id);
|
||||
|
||||
if (n > sizeof(qp->name))
|
||||
return -1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/** Create a ring to place process packets on */
|
||||
static struct rte_ring *
|
||||
aesni_gcm_pmd_qp_create_processed_pkts_ring(struct aesni_gcm_qp *qp,
|
||||
unsigned ring_size, int socket_id)
|
||||
{
|
||||
struct rte_ring *r;
|
||||
|
||||
r = rte_ring_lookup(qp->name);
|
||||
if (r) {
|
||||
if (r->prod.size >= ring_size) {
|
||||
GCM_LOG_INFO("Reusing existing ring %s for processed"
|
||||
" packets", qp->name);
|
||||
return r;
|
||||
}
|
||||
|
||||
GCM_LOG_ERR("Unable to reuse existing ring %s for processed"
|
||||
" packets", qp->name);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return rte_ring_create(qp->name, ring_size, socket_id,
|
||||
RING_F_SP_ENQ | RING_F_SC_DEQ);
|
||||
}
|
||||
|
||||
/** Setup a queue pair */
|
||||
static int
|
||||
aesni_gcm_pmd_qp_setup(struct rte_cryptodev *dev, uint16_t qp_id,
|
||||
const struct rte_cryptodev_qp_conf *qp_conf,
|
||||
int socket_id)
|
||||
{
|
||||
struct aesni_gcm_qp *qp = NULL;
|
||||
struct aesni_gcm_private *internals = dev->data->dev_private;
|
||||
|
||||
/* Free memory prior to re-allocation if needed. */
|
||||
if (dev->data->queue_pairs[qp_id] != NULL)
|
||||
aesni_gcm_pmd_qp_release(dev, qp_id);
|
||||
|
||||
/* Allocate the queue pair data structure. */
|
||||
qp = rte_zmalloc_socket("AES-NI PMD Queue Pair", sizeof(*qp),
|
||||
RTE_CACHE_LINE_SIZE, socket_id);
|
||||
if (qp == NULL)
|
||||
return (-ENOMEM);
|
||||
|
||||
qp->id = qp_id;
|
||||
dev->data->queue_pairs[qp_id] = qp;
|
||||
|
||||
if (aesni_gcm_pmd_qp_set_unique_name(dev, qp))
|
||||
goto qp_setup_cleanup;
|
||||
|
||||
qp->ops = &gcm_ops[internals->vector_mode];
|
||||
|
||||
qp->processed_pkts = aesni_gcm_pmd_qp_create_processed_pkts_ring(qp,
|
||||
qp_conf->nb_descriptors, socket_id);
|
||||
if (qp->processed_pkts == NULL)
|
||||
goto qp_setup_cleanup;
|
||||
|
||||
qp->sess_mp = dev->data->session_pool;
|
||||
|
||||
memset(&qp->qp_stats, 0, sizeof(qp->qp_stats));
|
||||
|
||||
return 0;
|
||||
|
||||
qp_setup_cleanup:
|
||||
if (qp)
|
||||
rte_free(qp);
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
/** Start queue pair */
|
||||
static int
|
||||
aesni_gcm_pmd_qp_start(__rte_unused struct rte_cryptodev *dev,
|
||||
__rte_unused uint16_t queue_pair_id)
|
||||
{
|
||||
return -ENOTSUP;
|
||||
}
|
||||
|
||||
/** Stop queue pair */
|
||||
static int
|
||||
aesni_gcm_pmd_qp_stop(__rte_unused struct rte_cryptodev *dev,
|
||||
__rte_unused uint16_t queue_pair_id)
|
||||
{
|
||||
return -ENOTSUP;
|
||||
}
|
||||
|
||||
/** Return the number of allocated queue pairs */
|
||||
static uint32_t
|
||||
aesni_gcm_pmd_qp_count(struct rte_cryptodev *dev)
|
||||
{
|
||||
return dev->data->nb_queue_pairs;
|
||||
}
|
||||
|
||||
/** Returns the size of the aesni gcm session structure */
|
||||
static unsigned
|
||||
aesni_gcm_pmd_session_get_size(struct rte_cryptodev *dev __rte_unused)
|
||||
{
|
||||
return sizeof(struct aesni_gcm_session);
|
||||
}
|
||||
|
||||
/** Configure a aesni gcm session from a crypto xform chain */
|
||||
static void *
|
||||
aesni_gcm_pmd_session_configure(struct rte_cryptodev *dev,
|
||||
struct rte_crypto_sym_xform *xform, void *sess)
|
||||
{
|
||||
struct aesni_gcm_private *internals = dev->data->dev_private;
|
||||
|
||||
if (unlikely(sess == NULL)) {
|
||||
GCM_LOG_ERR("invalid session struct");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (aesni_gcm_set_session_parameters(&gcm_ops[internals->vector_mode],
|
||||
sess, xform) != 0) {
|
||||
GCM_LOG_ERR("failed configure session parameters");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return sess;
|
||||
}
|
||||
|
||||
/** Clear the memory of session so it doesn't leave key material behind */
|
||||
static void
|
||||
aesni_gcm_pmd_session_clear(struct rte_cryptodev *dev __rte_unused, void *sess)
|
||||
{
|
||||
if (sess)
|
||||
memset(sess, 0, sizeof(struct aesni_gcm_session));
|
||||
}
|
||||
|
||||
struct rte_cryptodev_ops aesni_gcm_pmd_ops = {
|
||||
.dev_configure = aesni_gcm_pmd_config,
|
||||
.dev_start = aesni_gcm_pmd_start,
|
||||
.dev_stop = aesni_gcm_pmd_stop,
|
||||
.dev_close = aesni_gcm_pmd_close,
|
||||
|
||||
.stats_get = aesni_gcm_pmd_stats_get,
|
||||
.stats_reset = aesni_gcm_pmd_stats_reset,
|
||||
|
||||
.dev_infos_get = aesni_gcm_pmd_info_get,
|
||||
|
||||
.queue_pair_setup = aesni_gcm_pmd_qp_setup,
|
||||
.queue_pair_release = aesni_gcm_pmd_qp_release,
|
||||
.queue_pair_start = aesni_gcm_pmd_qp_start,
|
||||
.queue_pair_stop = aesni_gcm_pmd_qp_stop,
|
||||
.queue_pair_count = aesni_gcm_pmd_qp_count,
|
||||
|
||||
.session_get_size = aesni_gcm_pmd_session_get_size,
|
||||
.session_configure = aesni_gcm_pmd_session_configure,
|
||||
.session_clear = aesni_gcm_pmd_session_clear
|
||||
};
|
||||
|
||||
struct rte_cryptodev_ops *rte_aesni_gcm_pmd_ops = &aesni_gcm_pmd_ops;
|
120
drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h
Normal file
120
drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h
Normal file
@ -0,0 +1,120 @@
|
||||
/*-
|
||||
* BSD LICENSE
|
||||
*
|
||||
* Copyright(c) 2016 Intel Corporation. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Intel Corporation nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef _RTE_AESNI_GCM_PMD_PRIVATE_H_
|
||||
#define _RTE_AESNI_GCM_PMD_PRIVATE_H_
|
||||
|
||||
#include "aesni_gcm_ops.h"
|
||||
|
||||
#define GCM_LOG_ERR(fmt, args...) \
|
||||
RTE_LOG(ERR, CRYPTODEV, "[%s] %s() line %u: " fmt "\n", \
|
||||
CRYPTODEV_NAME_AESNI_GCM_PMD, \
|
||||
__func__, __LINE__, ## args)
|
||||
|
||||
#ifdef RTE_LIBRTE_AESNI_MB_DEBUG
|
||||
#define GCM_LOG_INFO(fmt, args...) \
|
||||
RTE_LOG(INFO, CRYPTODEV, "[%s] %s() line %u: " fmt "\n", \
|
||||
CRYPTODEV_NAME_AESNI_GCM_PMD, \
|
||||
__func__, __LINE__, ## args)
|
||||
|
||||
#define GCM_LOG_DBG(fmt, args...) \
|
||||
RTE_LOG(DEBUG, CRYPTODEV, "[%s] %s() line %u: " fmt "\n", \
|
||||
CRYPTODEV_NAME_AESNI_GCM_PMD, \
|
||||
__func__, __LINE__, ## args)
|
||||
#else
|
||||
#define GCM_LOG_INFO(fmt, args...)
|
||||
#define GCM_LOG_DBG(fmt, args...)
|
||||
#endif
|
||||
|
||||
|
||||
/** private data structure for each virtual AESNI GCM device */
|
||||
struct aesni_gcm_private {
|
||||
enum aesni_gcm_vector_mode vector_mode;
|
||||
/**< Vector mode */
|
||||
unsigned max_nb_queue_pairs;
|
||||
/**< Max number of queue pairs supported by device */
|
||||
unsigned max_nb_sessions;
|
||||
/**< Max number of sessions supported by device */
|
||||
};
|
||||
|
||||
struct aesni_gcm_qp {
|
||||
uint16_t id;
|
||||
/**< Queue Pair Identifier */
|
||||
char name[RTE_CRYPTODEV_NAME_LEN];
|
||||
/**< Unique Queue Pair Name */
|
||||
const struct aesni_gcm_ops *ops;
|
||||
/**< Architecture dependent function pointer table of the gcm APIs */
|
||||
struct rte_ring *processed_pkts;
|
||||
/**< Ring for placing process packets */
|
||||
struct rte_mempool *sess_mp;
|
||||
/**< Session Mempool */
|
||||
struct rte_cryptodev_stats qp_stats;
|
||||
/**< Queue pair statistics */
|
||||
} __rte_cache_aligned;
|
||||
|
||||
|
||||
enum aesni_gcm_operation {
|
||||
AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION,
|
||||
AESNI_GCM_OP_AUTHENTICATED_DECRYPTION
|
||||
};
|
||||
|
||||
/** AESNI GCM private session structure */
|
||||
struct aesni_gcm_session {
|
||||
enum aesni_gcm_operation op;
|
||||
/**< GCM operation type */
|
||||
struct gcm_data gdata __rte_cache_aligned;
|
||||
/**< GCM parameters */
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Setup GCM session parameters
|
||||
* @param ops gcm ops function pointer table
|
||||
* @param sess aesni gcm session structure
|
||||
* @param xform crypto transform chain
|
||||
*
|
||||
* @return
|
||||
* - On success returns 0
|
||||
* - On failure returns error code < 0
|
||||
*/
|
||||
extern int
|
||||
aesni_gcm_set_session_parameters(const struct aesni_gcm_ops *ops,
|
||||
struct aesni_gcm_session *sess,
|
||||
const struct rte_crypto_sym_xform *xform);
|
||||
|
||||
|
||||
/**
|
||||
* Device specific operations function pointer structure */
|
||||
extern struct rte_cryptodev_ops *rte_aesni_gcm_pmd_ops;
|
||||
|
||||
|
||||
#endif /* _RTE_AESNI_GCM_PMD_PRIVATE_H_ */
|
3
drivers/crypto/aesni_gcm/rte_pmd_aesni_gcm_version.map
Normal file
3
drivers/crypto/aesni_gcm/rte_pmd_aesni_gcm_version.map
Normal file
@ -0,0 +1,3 @@
|
||||
DPDK_16.04 {
|
||||
local: *;
|
||||
};
|
@ -56,6 +56,8 @@ extern "C" {
|
||||
/**< Null crypto PMD device name */
|
||||
#define CRYPTODEV_NAME_AESNI_MB_PMD ("cryptodev_aesni_mb_pmd")
|
||||
/**< AES-NI Multi buffer PMD device name */
|
||||
#define CRYPTODEV_NAME_AESNI_GCM_PMD ("cryptodev_aesni_gcm_pmd")
|
||||
/**< AES-NI GCM PMD device name */
|
||||
#define CRYPTODEV_NAME_QAT_SYM_PMD ("cryptodev_qat_sym_pmd")
|
||||
/**< Intel QAT Symmetric Crypto PMD device name */
|
||||
#define CRYPTODEV_NAME_SNOW3G_PMD ("cryptodev_snow3g_pmd")
|
||||
@ -64,6 +66,7 @@ extern "C" {
|
||||
/** Crypto device type */
|
||||
enum rte_cryptodev_type {
|
||||
RTE_CRYPTODEV_NULL_PMD = 1, /**< Null crypto PMD */
|
||||
RTE_CRYPTODEV_AESNI_GCM_PMD, /**< AES-NI GCM PMD */
|
||||
RTE_CRYPTODEV_AESNI_MB_PMD, /**< AES-NI multi buffer PMD */
|
||||
RTE_CRYPTODEV_QAT_SYM_PMD, /**< QAT PMD Symmetric Crypto */
|
||||
RTE_CRYPTODEV_SNOW3G_PMD, /**< SNOW 3G PMD */
|
||||
|
@ -102,8 +102,13 @@ _LDLIBS-$(CONFIG_RTE_LIBRTE_MLX5_PMD) += -libverbs
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_SZEDATA2) += -lsze2
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_XENVIRT) += -lxenstore
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_MPIPE_PMD) += -lgxio
|
||||
# QAT PMD has a dependency on libcrypto (from openssl) for calculating HMAC precomputes
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_QAT) += -lcrypto
|
||||
# QAT / AESNI GCM PMDs are dependent on libcrypto (from openssl)
|
||||
# for calculating HMAC precomputes
|
||||
ifeq ($(CONFIG_RTE_LIBRTE_PMD_QAT),y)
|
||||
_LDLIBS-y += -lcrypto
|
||||
else ifeq ($(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM),y)
|
||||
_LDLIBS-y += -lcrypto
|
||||
endif
|
||||
endif # !CONFIG_RTE_BUILD_SHARED_LIBS
|
||||
|
||||
_LDLIBS-y += --start-group
|
||||
@ -145,9 +150,15 @@ _LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_AF_PACKET) += -lrte_pmd_af_packet
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_NULL) += -lrte_pmd_null
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_QAT) += -lrte_pmd_qat
|
||||
|
||||
# AESNI MULTI BUFFER is dependent on the IPSec_MB library
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_MB) += -lrte_pmd_aesni_mb
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_MB) += -L$(AESNI_MULTI_BUFFER_LIB_PATH) -lIPSec_MB
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM) += -lrte_pmd_aesni_gcm
|
||||
|
||||
# AESNI MULTI BUFFER / GCM PMDs are dependent on the IPSec_MB library
|
||||
ifeq ($(CONFIG_RTE_LIBRTE_PMD_AESNI_MB),y)
|
||||
_LDLIBS-y += -L$(AESNI_MULTI_BUFFER_LIB_PATH) -lIPSec_MB
|
||||
else ifeq ($(CONFIG_RTE_LIBRTE_PMD_AESNI_GCM),y)
|
||||
_LDLIBS-y += -L$(AESNI_MULTI_BUFFER_LIB_PATH) -lIPSec_MB
|
||||
endif
|
||||
|
||||
# SNOW3G PMD is dependent on the LIBSSO library
|
||||
_LDLIBS-$(CONFIG_RTE_LIBRTE_PMD_SNOW3G) += -lrte_pmd_snow3g
|
||||
|
@ -31,6 +31,7 @@
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# Load config options:
|
||||
# - AESNI_MULTI_BUFFER_LIB_PATH
|
||||
# - DPDK_BUILD_TEST_CONFIGS (target1+option1+option2 target2)
|
||||
# - DPDK_DEP_CFLAGS
|
||||
# - DPDK_DEP_LDFLAGS
|
||||
@ -118,8 +119,11 @@ config () # <directory> <target> <options>
|
||||
test "$DPDK_DEP_PCAP" != y || \
|
||||
sed -ri 's,(PCAP=)n,\1y,' $1/.config
|
||||
test -z "$AESNI_MULTI_BUFFER_LIB_PATH" || \
|
||||
echo $2 | grep -q '^i686' || \
|
||||
! echo $2 | grep -q '^x86_64' || \
|
||||
sed -ri 's,(PMD_AESNI_MB=)n,\1y,' $1/.config
|
||||
test -z "$AESNI_MULTI_BUFFER_LIB_PATH" || \
|
||||
! echo $2 | grep -q '^x86_64' || \
|
||||
sed -ri 's,(PMD_AESNI_GCM=)n,\1y,' $1/.config
|
||||
test "$DPDK_DEP_SSL" != y || \
|
||||
sed -ri 's,(PMD_QAT=)n,\1y,' $1/.config
|
||||
sed -ri 's,(KNI_VHOST.*=)n,\1y,' $1/.config
|
||||
|
Loading…
Reference in New Issue
Block a user