crypto/cnxk: support AES-CMAC

Add support for AES CMAC auth algorithm.

Signed-off-by: Anoob Joseph <anoobj@marvell.com>
Acked-by: Akhil Goyal <gakhil@marvell.com>
This commit is contained in:
Anoob Joseph 2021-12-17 14:50:08 +05:30 committed by Akhil Goyal
parent 705fe0bd09
commit 759b5e6535
7 changed files with 103 additions and 61 deletions

View File

@ -61,6 +61,7 @@ Hash algorithms:
* ``RTE_CRYPTO_AUTH_SHA512_HMAC`` * ``RTE_CRYPTO_AUTH_SHA512_HMAC``
* ``RTE_CRYPTO_AUTH_SNOW3G_UIA2`` * ``RTE_CRYPTO_AUTH_SNOW3G_UIA2``
* ``RTE_CRYPTO_AUTH_ZUC_EIA3`` * ``RTE_CRYPTO_AUTH_ZUC_EIA3``
* ``RTE_CRYPTO_AUTH_AES_CMAC``
AEAD algorithms: AEAD algorithms:

View File

@ -58,6 +58,9 @@ SHA512 = Y
SHA512 HMAC = Y SHA512 HMAC = Y
SNOW3G UIA2 = Y SNOW3G UIA2 = Y
ZUC EIA3 = Y ZUC EIA3 = Y
AES CMAC (128) = Y
AES CMAC (192) = Y
AES CMAC (256) = Y
; ;
; Supported AEAD algorithms of 'cn10k' crypto driver. ; Supported AEAD algorithms of 'cn10k' crypto driver.

View File

@ -57,6 +57,9 @@ SHA512 = Y
SHA512 HMAC = Y SHA512 HMAC = Y
SNOW3G UIA2 = Y SNOW3G UIA2 = Y
ZUC EIA3 = Y ZUC EIA3 = Y
AES CMAC (128) = Y
AES CMAC (192) = Y
AES CMAC (256) = Y
; ;
; Supported AEAD algorithms of 'cn9k' crypto driver. ; Supported AEAD algorithms of 'cn9k' crypto driver.

View File

@ -63,6 +63,7 @@ New Features
* Added AES-CTR support in lookaside protocol (IPsec) for CN9K & CN10K. * Added AES-CTR support in lookaside protocol (IPsec) for CN9K & CN10K.
* Added NULL cipher support in lookaside protocol (IPsec) for CN9K & CN10K. * Added NULL cipher support in lookaside protocol (IPsec) for CN9K & CN10K.
* Added AES-XCBC support in lookaside protocol (IPsec) for CN9K & CN10K. * Added AES-XCBC support in lookaside protocol (IPsec) for CN9K & CN10K.
* Added AES-CMAC support in CN9K & CN10K.
* **Added an API to retrieve event port id of ethdev Rx adapter.** * **Added an API to retrieve event port id of ethdev Rx adapter.**

View File

@ -13,7 +13,7 @@
#define ROC_SE_MAJOR_OP_HASH 0x34 #define ROC_SE_MAJOR_OP_HASH 0x34
#define ROC_SE_MAJOR_OP_HMAC 0x35 #define ROC_SE_MAJOR_OP_HMAC 0x35
#define ROC_SE_MAJOR_OP_ZUC_SNOW3G 0x37 #define ROC_SE_MAJOR_OP_PDCP 0x37
#define ROC_SE_MAJOR_OP_KASUMI 0x38 #define ROC_SE_MAJOR_OP_KASUMI 0x38
#define ROC_SE_MAJOR_OP_MISC 0x01 #define ROC_SE_MAJOR_OP_MISC 0x01

View File

@ -568,6 +568,26 @@ static const struct rte_cryptodev_capabilities caps_aes[] = {
}, } }, }
}, } }, }
}, },
{ /* AES CMAC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_AES_CMAC,
.block_size = 16,
.key_size = {
.min = 16,
.max = 32,
.increment = 8
},
.digest_size = {
.min = 4,
.max = 4,
.increment = 0
},
}, }
}, }
},
}; };
static const struct rte_cryptodev_capabilities caps_kasumi[] = { static const struct rte_cryptodev_capabilities caps_kasumi[] = {

View File

@ -73,11 +73,15 @@ pdcp_iv_copy(uint8_t *iv_d, uint8_t *iv_s, const uint8_t pdcp_alg_type,
for (j = 0; j < 4; j++) for (j = 0; j < 4; j++)
iv_temp[j] = iv_s_temp[3 - j]; iv_temp[j] = iv_s_temp[3 - j];
memcpy(iv_d, iv_temp, 16); memcpy(iv_d, iv_temp, 16);
} else { } else if (pdcp_alg_type == ROC_SE_PDCP_ALG_TYPE_ZUC) {
/* ZUC doesn't need a swap */ /* ZUC doesn't need a swap */
memcpy(iv_d, iv_s, 16); memcpy(iv_d, iv_s, 16);
if (pack_iv) if (pack_iv)
cpt_pack_iv(iv_s, iv_d); cpt_pack_iv(iv_s, iv_d);
} else {
/* AES-CMAC EIA2, microcode expects 16B zeroized IV */
for (j = 0; j < 4; j++)
iv_d[j] = 0;
} }
} }
@ -992,7 +996,7 @@ cpt_dec_hmac_prep(uint32_t flags, uint64_t d_offs, uint64_t d_lens,
} }
static __rte_always_inline int static __rte_always_inline int
cpt_zuc_snow3g_prep(uint32_t req_flags, uint64_t d_offs, uint64_t d_lens, cpt_pdcp_alg_prep(uint32_t req_flags, uint64_t d_offs, uint64_t d_lens,
struct roc_se_fc_params *params, struct cpt_inst_s *inst) struct roc_se_fc_params *params, struct cpt_inst_s *inst)
{ {
uint32_t size; uint32_t size;
@ -1014,12 +1018,20 @@ cpt_zuc_snow3g_prep(uint32_t req_flags, uint64_t d_offs, uint64_t d_lens,
mac_len = se_ctx->mac_len; mac_len = se_ctx->mac_len;
pdcp_alg_type = se_ctx->pdcp_alg_type; pdcp_alg_type = se_ctx->pdcp_alg_type;
cpt_inst_w4.s.opcode_major = ROC_SE_MAJOR_OP_ZUC_SNOW3G; cpt_inst_w4.s.opcode_major = ROC_SE_MAJOR_OP_PDCP;
cpt_inst_w4.s.opcode_minor = se_ctx->template_w4.s.opcode_minor; cpt_inst_w4.s.opcode_minor = se_ctx->template_w4.s.opcode_minor;
if (flags == 0x1) { if (flags == 0x1) {
iv_s = params->auth_iv_buf; iv_s = params->auth_iv_buf;
/*
* Microcode expects offsets in bytes
* TODO: Rounding off
*/
auth_data_len = ROC_SE_AUTH_DLEN(d_lens);
auth_offset = ROC_SE_AUTH_OFFSET(d_offs);
if (se_ctx->pdcp_alg_type != ROC_SE_PDCP_ALG_TYPE_AES_CTR) {
iv_len = params->auth_iv_len; iv_len = params->auth_iv_len;
if (iv_len == 25) { if (iv_len == 25) {
@ -1027,20 +1039,22 @@ cpt_zuc_snow3g_prep(uint32_t req_flags, uint64_t d_offs, uint64_t d_lens,
pack_iv = 1; pack_iv = 1;
} }
/*
* Microcode expects offsets in bytes
* TODO: Rounding off
*/
auth_data_len = ROC_SE_AUTH_DLEN(d_lens);
/* EIA3 or UIA2 */
auth_offset = ROC_SE_AUTH_OFFSET(d_offs);
auth_offset = auth_offset / 8; auth_offset = auth_offset / 8;
/* consider iv len */ /* consider iv len */
auth_offset += iv_len; auth_offset += iv_len;
inputlen = auth_offset + (RTE_ALIGN(auth_data_len, 8) / 8); inputlen =
auth_offset + (RTE_ALIGN(auth_data_len, 8) / 8);
} else {
iv_len = 16;
/* consider iv len */
auth_offset += iv_len;
inputlen = auth_offset + auth_data_len;
}
outputlen = mac_len; outputlen = mac_len;
offset_ctrl = rte_cpu_to_be_64((uint64_t)auth_offset); offset_ctrl = rte_cpu_to_be_64((uint64_t)auth_offset);
@ -1056,7 +1070,6 @@ cpt_zuc_snow3g_prep(uint32_t req_flags, uint64_t d_offs, uint64_t d_lens,
pack_iv = 1; pack_iv = 1;
} }
/* EEA3 or UEA2 */
/* /*
* Microcode expects offsets in bytes * Microcode expects offsets in bytes
* TODO: Rounding off * TODO: Rounding off
@ -1589,8 +1602,7 @@ cpt_fc_dec_hmac_prep(uint32_t flags, uint64_t d_offs, uint64_t d_lens,
if (likely(fc_type == ROC_SE_FC_GEN)) { if (likely(fc_type == ROC_SE_FC_GEN)) {
ret = cpt_dec_hmac_prep(flags, d_offs, d_lens, fc_params, inst); ret = cpt_dec_hmac_prep(flags, d_offs, d_lens, fc_params, inst);
} else if (fc_type == ROC_SE_PDCP) { } else if (fc_type == ROC_SE_PDCP) {
ret = cpt_zuc_snow3g_prep(flags, d_offs, d_lens, fc_params, ret = cpt_pdcp_alg_prep(flags, d_offs, d_lens, fc_params, inst);
inst);
} else if (fc_type == ROC_SE_KASUMI) { } else if (fc_type == ROC_SE_KASUMI) {
ret = cpt_kasumi_dec_prep(d_offs, d_lens, fc_params, inst); ret = cpt_kasumi_dec_prep(d_offs, d_lens, fc_params, inst);
} }
@ -1618,8 +1630,7 @@ cpt_fc_enc_hmac_prep(uint32_t flags, uint64_t d_offs, uint64_t d_lens,
if (likely(fc_type == ROC_SE_FC_GEN)) { if (likely(fc_type == ROC_SE_FC_GEN)) {
ret = cpt_enc_hmac_prep(flags, d_offs, d_lens, fc_params, inst); ret = cpt_enc_hmac_prep(flags, d_offs, d_lens, fc_params, inst);
} else if (fc_type == ROC_SE_PDCP) { } else if (fc_type == ROC_SE_PDCP) {
ret = cpt_zuc_snow3g_prep(flags, d_offs, d_lens, fc_params, ret = cpt_pdcp_alg_prep(flags, d_offs, d_lens, fc_params, inst);
inst);
} else if (fc_type == ROC_SE_KASUMI) { } else if (fc_type == ROC_SE_KASUMI) {
ret = cpt_kasumi_enc_prep(flags, d_offs, d_lens, fc_params, ret = cpt_kasumi_enc_prep(flags, d_offs, d_lens, fc_params,
inst); inst);
@ -1883,8 +1894,11 @@ fill_sess_auth(struct rte_crypto_sym_xform *xform, struct cnxk_se_sess *sess)
auth_type = 0; auth_type = 0;
is_null = 1; is_null = 1;
break; break;
case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
case RTE_CRYPTO_AUTH_AES_CMAC: case RTE_CRYPTO_AUTH_AES_CMAC:
auth_type = ROC_SE_AES_CMAC_EIA2;
zsk_flag = ROC_SE_ZS_IA;
break;
case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
case RTE_CRYPTO_AUTH_AES_CBC_MAC: case RTE_CRYPTO_AUTH_AES_CBC_MAC:
plt_dp_err("Crypto: Unsupported hash algo %u", a_form->algo); plt_dp_err("Crypto: Unsupported hash algo %u", a_form->algo);
return -1; return -1;