numam-dpdk/drivers/crypto/octeontx2/otx2_cryptodev_capabilities.c
Tejasree Kondoj ae9000ada9 crypto/octeontx2: add security capabilities
This patch adds lookaside IPsec capabilities for
rte_security

Signed-off-by: Vamsi Attunuru <vattunuru@marvell.com>
Signed-off-by: Tejasree Kondoj <ktejasree@marvell.com>
Acked-by: Anoob Joseph <anoobj@marvell.com>
Acked-by: Akhil Goyal <akhil.goyal@nxp.com>
2020-07-18 23:09:02 +02:00

861 lines
16 KiB
C

/* SPDX-License-Identifier: BSD-3-Clause
* Copyright (C) 2019 Marvell International Ltd.
*/
#include <rte_cryptodev.h>
#include <rte_security.h>
#include "otx2_cryptodev.h"
#include "otx2_cryptodev_capabilities.h"
#include "otx2_mbox.h"
#define CPT_EGRP_GET(hw_caps, name, egrp) do { \
if ((hw_caps[CPT_ENG_TYPE_SE].name) && \
(hw_caps[CPT_ENG_TYPE_IE].name)) \
*egrp = OTX2_CPT_EGRP_SE_IE; \
else if (hw_caps[CPT_ENG_TYPE_SE].name) \
*egrp = OTX2_CPT_EGRP_SE; \
else if (hw_caps[CPT_ENG_TYPE_AE].name) \
*egrp = OTX2_CPT_EGRP_AE; \
else \
*egrp = OTX2_CPT_EGRP_MAX; \
} while (0)
#define CPT_CAPS_ADD(hw_caps, name) do { \
enum otx2_cpt_egrp egrp; \
CPT_EGRP_GET(hw_caps, name, &egrp); \
if (egrp < OTX2_CPT_EGRP_MAX) \
cpt_caps_add(caps_##name, RTE_DIM(caps_##name)); \
} while (0)
#define SEC_CAPS_ADD(hw_caps, name) do { \
enum otx2_cpt_egrp egrp; \
CPT_EGRP_GET(hw_caps, name, &egrp); \
if (egrp < OTX2_CPT_EGRP_MAX) \
sec_caps_add(sec_caps_##name, RTE_DIM(sec_caps_##name));\
} while (0)
#define OTX2_CPT_MAX_CAPS 34
#define OTX2_SEC_MAX_CAPS 4
static struct rte_cryptodev_capabilities otx2_cpt_caps[OTX2_CPT_MAX_CAPS];
static struct rte_cryptodev_capabilities otx2_cpt_sec_caps[OTX2_SEC_MAX_CAPS];
static const struct rte_cryptodev_capabilities caps_mul[] = {
{ /* RSA */
.op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
{.asym = {
.xform_capa = {
.xform_type = RTE_CRYPTO_ASYM_XFORM_RSA,
.op_types = ((1 << RTE_CRYPTO_ASYM_OP_SIGN) |
(1 << RTE_CRYPTO_ASYM_OP_VERIFY) |
(1 << RTE_CRYPTO_ASYM_OP_ENCRYPT) |
(1 << RTE_CRYPTO_ASYM_OP_DECRYPT)),
{.modlen = {
.min = 17,
.max = 1024,
.increment = 1
}, }
}
}, }
},
{ /* MOD_EXP */
.op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
{.asym = {
.xform_capa = {
.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX,
.op_types = 0,
{.modlen = {
.min = 17,
.max = 1024,
.increment = 1
}, }
}
}, }
},
{ /* ECDSA */
.op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
{.asym = {
.xform_capa = {
.xform_type = RTE_CRYPTO_ASYM_XFORM_ECDSA,
.op_types = ((1 << RTE_CRYPTO_ASYM_OP_SIGN) |
(1 << RTE_CRYPTO_ASYM_OP_VERIFY)),
}
},
}
},
{ /* ECPM */
.op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
{.asym = {
.xform_capa = {
.xform_type = RTE_CRYPTO_ASYM_XFORM_ECPM,
.op_types = 0
}
},
}
},
};
static const struct rte_cryptodev_capabilities caps_sha1_sha2[] = {
{ /* SHA1 */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_SHA1,
.block_size = 64,
.key_size = {
.min = 0,
.max = 0,
.increment = 0
},
.digest_size = {
.min = 20,
.max = 20,
.increment = 0
},
}, }
}, }
},
{ /* SHA1 HMAC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
.block_size = 64,
.key_size = {
.min = 1,
.max = 1024,
.increment = 1
},
.digest_size = {
.min = 12,
.max = 20,
.increment = 8
},
}, }
}, }
},
{ /* SHA224 */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_SHA224,
.block_size = 64,
.key_size = {
.min = 0,
.max = 0,
.increment = 0
},
.digest_size = {
.min = 28,
.max = 28,
.increment = 0
},
}, }
}, }
},
{ /* SHA224 HMAC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
.block_size = 64,
.key_size = {
.min = 1,
.max = 1024,
.increment = 1
},
.digest_size = {
.min = 28,
.max = 28,
.increment = 0
},
}, }
}, }
},
{ /* SHA256 */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_SHA256,
.block_size = 64,
.key_size = {
.min = 0,
.max = 0,
.increment = 0
},
.digest_size = {
.min = 32,
.max = 32,
.increment = 0
},
}, }
}, }
},
{ /* SHA256 HMAC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
.block_size = 64,
.key_size = {
.min = 1,
.max = 1024,
.increment = 1
},
.digest_size = {
.min = 16,
.max = 32,
.increment = 16
},
}, }
}, }
},
{ /* SHA384 */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_SHA384,
.block_size = 64,
.key_size = {
.min = 0,
.max = 0,
.increment = 0
},
.digest_size = {
.min = 48,
.max = 48,
.increment = 0
},
}, }
}, }
},
{ /* SHA384 HMAC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
.block_size = 64,
.key_size = {
.min = 1,
.max = 1024,
.increment = 1
},
.digest_size = {
.min = 24,
.max = 48,
.increment = 24
},
}, }
}, }
},
{ /* SHA512 */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_SHA512,
.block_size = 128,
.key_size = {
.min = 0,
.max = 0,
.increment = 0
},
.digest_size = {
.min = 64,
.max = 64,
.increment = 0
},
}, }
}, }
},
{ /* SHA512 HMAC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
.block_size = 128,
.key_size = {
.min = 1,
.max = 1024,
.increment = 1
},
.digest_size = {
.min = 32,
.max = 64,
.increment = 32
},
}, }
}, }
},
{ /* MD5 */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_MD5,
.block_size = 64,
.key_size = {
.min = 0,
.max = 0,
.increment = 0
},
.digest_size = {
.min = 16,
.max = 16,
.increment = 0
},
}, }
}, }
},
{ /* MD5 HMAC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_MD5_HMAC,
.block_size = 64,
.key_size = {
.min = 8,
.max = 64,
.increment = 8
},
.digest_size = {
.min = 12,
.max = 16,
.increment = 4
},
}, }
}, }
},
};
static const struct rte_cryptodev_capabilities caps_chacha20[] = {
{ /* Chacha20-Poly1305 */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
{.aead = {
.algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305,
.block_size = 64,
.key_size = {
.min = 32,
.max = 32,
.increment = 0
},
.digest_size = {
.min = 16,
.max = 16,
.increment = 0
},
.aad_size = {
.min = 0,
.max = 1024,
.increment = 1
},
.iv_size = {
.min = 12,
.max = 12,
.increment = 0
},
}, }
}, }
}
};
static const struct rte_cryptodev_capabilities caps_zuc_snow3g[] = {
{ /* SNOW 3G (UEA2) */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
{.cipher = {
.algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
.block_size = 16,
.key_size = {
.min = 16,
.max = 16,
.increment = 0
},
.iv_size = {
.min = 16,
.max = 16,
.increment = 0
}
}, }
}, }
},
{ /* ZUC (EEA3) */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
{.cipher = {
.algo = RTE_CRYPTO_CIPHER_ZUC_EEA3,
.block_size = 16,
.key_size = {
.min = 16,
.max = 16,
.increment = 0
},
.iv_size = {
.min = 16,
.max = 16,
.increment = 0
}
}, }
}, }
},
{ /* SNOW 3G (UIA2) */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
.block_size = 16,
.key_size = {
.min = 16,
.max = 16,
.increment = 0
},
.digest_size = {
.min = 4,
.max = 4,
.increment = 0
},
.iv_size = {
.min = 16,
.max = 16,
.increment = 0
}
}, }
}, }
},
{ /* ZUC (EIA3) */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_ZUC_EIA3,
.block_size = 16,
.key_size = {
.min = 16,
.max = 16,
.increment = 0
},
.digest_size = {
.min = 4,
.max = 4,
.increment = 0
},
.iv_size = {
.min = 16,
.max = 16,
.increment = 0
}
}, }
}, }
},
};
static const struct rte_cryptodev_capabilities caps_aes[] = {
{ /* AES GMAC (AUTH) */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_AES_GMAC,
.block_size = 16,
.key_size = {
.min = 16,
.max = 32,
.increment = 8
},
.digest_size = {
.min = 8,
.max = 16,
.increment = 4
},
.iv_size = {
.min = 12,
.max = 12,
.increment = 0
}
}, }
}, }
},
{ /* AES CBC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
{.cipher = {
.algo = RTE_CRYPTO_CIPHER_AES_CBC,
.block_size = 16,
.key_size = {
.min = 16,
.max = 32,
.increment = 8
},
.iv_size = {
.min = 16,
.max = 16,
.increment = 0
}
}, }
}, }
},
{ /* AES CTR */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
{.cipher = {
.algo = RTE_CRYPTO_CIPHER_AES_CTR,
.block_size = 16,
.key_size = {
.min = 16,
.max = 32,
.increment = 8
},
.iv_size = {
.min = 12,
.max = 16,
.increment = 4
}
}, }
}, }
},
{ /* AES XTS */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
{.cipher = {
.algo = RTE_CRYPTO_CIPHER_AES_XTS,
.block_size = 16,
.key_size = {
.min = 32,
.max = 64,
.increment = 0
},
.iv_size = {
.min = 16,
.max = 16,
.increment = 0
}
}, }
}, }
},
{ /* AES GCM */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
{.aead = {
.algo = RTE_CRYPTO_AEAD_AES_GCM,
.block_size = 16,
.key_size = {
.min = 16,
.max = 32,
.increment = 8
},
.digest_size = {
.min = 4,
.max = 16,
.increment = 1
},
.aad_size = {
.min = 0,
.max = 1024,
.increment = 1
},
.iv_size = {
.min = 12,
.max = 12,
.increment = 0
}
}, }
}, }
},
};
static const struct rte_cryptodev_capabilities caps_kasumi[] = {
{ /* KASUMI (F8) */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
{.cipher = {
.algo = RTE_CRYPTO_CIPHER_KASUMI_F8,
.block_size = 8,
.key_size = {
.min = 16,
.max = 16,
.increment = 0
},
.iv_size = {
.min = 8,
.max = 8,
.increment = 0
}
}, }
}, }
},
{ /* KASUMI (F9) */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_KASUMI_F9,
.block_size = 8,
.key_size = {
.min = 16,
.max = 16,
.increment = 0
},
.digest_size = {
.min = 4,
.max = 4,
.increment = 0
},
}, }
}, }
},
};
static const struct rte_cryptodev_capabilities caps_des[] = {
{ /* 3DES CBC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
{.cipher = {
.algo = RTE_CRYPTO_CIPHER_3DES_CBC,
.block_size = 8,
.key_size = {
.min = 24,
.max = 24,
.increment = 0
},
.iv_size = {
.min = 8,
.max = 16,
.increment = 8
}
}, }
}, }
},
{ /* 3DES ECB */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
{.cipher = {
.algo = RTE_CRYPTO_CIPHER_3DES_ECB,
.block_size = 8,
.key_size = {
.min = 24,
.max = 24,
.increment = 0
},
.iv_size = {
.min = 0,
.max = 0,
.increment = 0
}
}, }
}, }
},
{ /* DES CBC */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
{.cipher = {
.algo = RTE_CRYPTO_CIPHER_DES_CBC,
.block_size = 8,
.key_size = {
.min = 8,
.max = 8,
.increment = 0
},
.iv_size = {
.min = 8,
.max = 8,
.increment = 0
}
}, }
}, }
},
};
static const struct rte_cryptodev_capabilities caps_null[] = {
{ /* NULL (AUTH) */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
{.auth = {
.algo = RTE_CRYPTO_AUTH_NULL,
.block_size = 1,
.key_size = {
.min = 0,
.max = 0,
.increment = 0
},
.digest_size = {
.min = 0,
.max = 0,
.increment = 0
},
}, },
}, },
},
{ /* NULL (CIPHER) */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
{.cipher = {
.algo = RTE_CRYPTO_CIPHER_NULL,
.block_size = 1,
.key_size = {
.min = 0,
.max = 0,
.increment = 0
},
.iv_size = {
.min = 0,
.max = 0,
.increment = 0
}
}, },
}, }
},
};
static const struct rte_cryptodev_capabilities caps_end[] = {
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
};
static const struct rte_cryptodev_capabilities sec_caps_aes[] = {
{ /* AES GCM */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
{.aead = {
.algo = RTE_CRYPTO_AEAD_AES_GCM,
.block_size = 16,
.key_size = {
.min = 16,
.max = 32,
.increment = 8
},
.digest_size = {
.min = 16,
.max = 16,
.increment = 0
},
.aad_size = {
.min = 8,
.max = 12,
.increment = 4
},
.iv_size = {
.min = 12,
.max = 12,
.increment = 0
}
}, }
}, }
},
};
static const struct rte_security_capability
otx2_crypto_sec_capabilities[] = {
{ /* IPsec Lookaside Protocol ESP Tunnel Ingress */
.action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
.protocol = RTE_SECURITY_PROTOCOL_IPSEC,
.ipsec = {
.proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
.mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
.direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS,
.options = { 0 }
},
.crypto_capabilities = otx2_cpt_sec_caps,
.ol_flags = RTE_SECURITY_TX_OLOAD_NEED_MDATA
},
{ /* IPsec Lookaside Protocol ESP Tunnel Egress */
.action = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
.protocol = RTE_SECURITY_PROTOCOL_IPSEC,
.ipsec = {
.proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
.mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
.direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS,
.options = { 0 }
},
.crypto_capabilities = otx2_cpt_sec_caps,
.ol_flags = RTE_SECURITY_TX_OLOAD_NEED_MDATA
},
{
.action = RTE_SECURITY_ACTION_TYPE_NONE
}
};
static void
cpt_caps_add(const struct rte_cryptodev_capabilities *caps, int nb_caps)
{
static int cur_pos;
if (cur_pos + nb_caps > OTX2_CPT_MAX_CAPS)
return;
memcpy(&otx2_cpt_caps[cur_pos], caps, nb_caps * sizeof(caps[0]));
cur_pos += nb_caps;
}
void
otx2_crypto_capabilities_init(union cpt_eng_caps *hw_caps)
{
CPT_CAPS_ADD(hw_caps, mul);
CPT_CAPS_ADD(hw_caps, sha1_sha2);
CPT_CAPS_ADD(hw_caps, chacha20);
CPT_CAPS_ADD(hw_caps, zuc_snow3g);
CPT_CAPS_ADD(hw_caps, aes);
CPT_CAPS_ADD(hw_caps, kasumi);
CPT_CAPS_ADD(hw_caps, des);
cpt_caps_add(caps_null, RTE_DIM(caps_null));
cpt_caps_add(caps_end, RTE_DIM(caps_end));
}
const struct rte_cryptodev_capabilities *
otx2_cpt_capabilities_get(void)
{
return otx2_cpt_caps;
}
static void
sec_caps_add(const struct rte_cryptodev_capabilities *caps, int nb_caps)
{
static int cur_pos;
if (cur_pos + nb_caps > OTX2_SEC_MAX_CAPS)
return;
memcpy(&otx2_cpt_sec_caps[cur_pos], caps, nb_caps * sizeof(caps[0]));
cur_pos += nb_caps;
}
void
otx2_crypto_sec_capabilities_init(union cpt_eng_caps *hw_caps)
{
SEC_CAPS_ADD(hw_caps, aes);
sec_caps_add(caps_end, RTE_DIM(caps_end));
}
const struct rte_security_capability *
otx2_crypto_sec_capabilities_get(void *device __rte_unused)
{
return otx2_crypto_sec_capabilities;
}