6bc987ecb8
Add support for inline crypto for IPsec, for ESP transport and tunnel over IPv4 and IPv6, as well as supporting the offload for ESP over UDP, and in conjunction with TSO for UDP and TCP flows. Implement support for rte_security packet metadata Add definition for IPsec descriptors, extend support for offload in data and context descriptor to support Add support to virtual channel mailbox for IPsec Crypto request operations. IPsec Crypto requests receive an initial acknowledgment from physical function driver of receipt of request and then an asynchronous response with success/failure of request including any response data. Add enhanced descriptor debugging Refactor of scalar tx burst function to support integration of offload Signed-off-by: Declan Doherty <declan.doherty@intel.com> Signed-off-by: Abhijit Sinha <abhijit.sinha@intel.com> Signed-off-by: Radu Nicolau <radu.nicolau@intel.com> Reviewed-by: Jingjing Wu <jingjing.wu@intel.com>
384 lines
6.8 KiB
C
384 lines
6.8 KiB
C
/* SPDX-License-Identifier: BSD-3-Clause
|
|
* Copyright(c) 2020 Intel Corporation
|
|
*/
|
|
|
|
#ifndef _IAVF_IPSEC_CRYPTO_CAPABILITIES_H_
|
|
#define _IAVF_IPSEC_CRYPTO_CAPABILITIES_H_
|
|
|
|
static const struct rte_cryptodev_capabilities iavf_crypto_capabilities[] = {
|
|
{ /* SHA1 HMAC */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
|
|
{.auth = {
|
|
.algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
|
|
.block_size = 64,
|
|
.key_size = {
|
|
.min = 1,
|
|
.max = 64,
|
|
.increment = 1
|
|
},
|
|
.digest_size = {
|
|
.min = 20,
|
|
.max = 20,
|
|
.increment = 0
|
|
},
|
|
.iv_size = { 0 }
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* SHA256 HMAC */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
|
|
{.auth = {
|
|
.algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
|
|
.block_size = 64,
|
|
.key_size = {
|
|
.min = 1,
|
|
.max = 64,
|
|
.increment = 1
|
|
},
|
|
.digest_size = {
|
|
.min = 32,
|
|
.max = 32,
|
|
.increment = 0
|
|
},
|
|
.iv_size = { 0 }
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* SHA384 HMAC */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
|
|
{.auth = {
|
|
.algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
|
|
.block_size = 128,
|
|
.key_size = {
|
|
.min = 1,
|
|
.max = 128,
|
|
.increment = 1
|
|
},
|
|
.digest_size = {
|
|
.min = 48,
|
|
.max = 48,
|
|
.increment = 0
|
|
},
|
|
.iv_size = { 0 }
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* SHA512 HMAC */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
|
|
{.auth = {
|
|
.algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
|
|
.block_size = 128,
|
|
.key_size = {
|
|
.min = 1,
|
|
.max = 128,
|
|
.increment = 1
|
|
},
|
|
.digest_size = {
|
|
.min = 64,
|
|
.max = 64,
|
|
.increment = 0
|
|
},
|
|
.iv_size = { 0 }
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* MD5 HMAC */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
|
|
{.auth = {
|
|
.algo = RTE_CRYPTO_AUTH_MD5_HMAC,
|
|
.block_size = 64,
|
|
.key_size = {
|
|
.min = 1,
|
|
.max = 64,
|
|
.increment = 1
|
|
},
|
|
.digest_size = {
|
|
.min = 16,
|
|
.max = 16,
|
|
.increment = 0
|
|
},
|
|
.iv_size = { 0 }
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* AES XCBC MAC */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
|
|
{.auth = {
|
|
.algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC,
|
|
.block_size = 16,
|
|
.key_size = {
|
|
.min = 16,
|
|
.max = 16,
|
|
.increment = 0
|
|
},
|
|
.digest_size = {
|
|
.min = 16,
|
|
.max = 16,
|
|
.increment = 0
|
|
},
|
|
.aad_size = { 0 },
|
|
.iv_size = { 0 }
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* AES GCM */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
|
|
{.aead = {
|
|
.algo = RTE_CRYPTO_AEAD_AES_GCM,
|
|
.block_size = 16,
|
|
.key_size = {
|
|
.min = 16,
|
|
.max = 32,
|
|
.increment = 8
|
|
},
|
|
.digest_size = {
|
|
.min = 8,
|
|
.max = 16,
|
|
.increment = 4
|
|
},
|
|
.aad_size = {
|
|
.min = 0,
|
|
.max = 240,
|
|
.increment = 1
|
|
},
|
|
.iv_size = {
|
|
.min = 8,
|
|
.max = 8,
|
|
.increment = 0
|
|
},
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* ChaCha20-Poly1305 */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
|
|
{.aead = {
|
|
.algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305,
|
|
.block_size = 16,
|
|
.key_size = {
|
|
.min = 32,
|
|
.max = 32,
|
|
.increment = 0
|
|
},
|
|
.digest_size = {
|
|
.min = 8,
|
|
.max = 16,
|
|
.increment = 4
|
|
},
|
|
.aad_size = {
|
|
.min = 0,
|
|
.max = 240,
|
|
.increment = 1
|
|
},
|
|
.iv_size = {
|
|
.min = 12,
|
|
.max = 12,
|
|
.increment = 0
|
|
},
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* AES CCM */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
|
|
{.aead = {
|
|
.algo = RTE_CRYPTO_AEAD_AES_CCM,
|
|
.block_size = 16,
|
|
.key_size = {
|
|
.min = 16,
|
|
.max = 32,
|
|
.increment = 8
|
|
},
|
|
.digest_size = {
|
|
.min = 8,
|
|
.max = 16,
|
|
.increment = 4
|
|
},
|
|
.aad_size = {
|
|
.min = 0,
|
|
.max = 240,
|
|
.increment = 1
|
|
},
|
|
.iv_size = {
|
|
.min = 12,
|
|
.max = 12,
|
|
.increment = 0
|
|
},
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* AES GMAC (AUTH) */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
|
|
{.auth = {
|
|
.algo = RTE_CRYPTO_AUTH_AES_GMAC,
|
|
.block_size = 16,
|
|
.key_size = {
|
|
.min = 16,
|
|
.max = 32,
|
|
.increment = 8
|
|
},
|
|
.digest_size = {
|
|
.min = 8,
|
|
.max = 16,
|
|
.increment = 4
|
|
},
|
|
.iv_size = {
|
|
.min = 12,
|
|
.max = 12,
|
|
.increment = 0
|
|
}
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* AES CMAC (AUTH) */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
|
|
{.auth = {
|
|
.algo = RTE_CRYPTO_AUTH_AES_CMAC,
|
|
.block_size = 16,
|
|
.key_size = {
|
|
.min = 16,
|
|
.max = 32,
|
|
.increment = 8
|
|
},
|
|
.digest_size = {
|
|
.min = 8,
|
|
.max = 16,
|
|
.increment = 4
|
|
},
|
|
.iv_size = {
|
|
.min = 12,
|
|
.max = 12,
|
|
.increment = 0
|
|
}
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* AES CBC */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
|
|
{.cipher = {
|
|
.algo = RTE_CRYPTO_CIPHER_AES_CBC,
|
|
.block_size = 16,
|
|
.key_size = {
|
|
.min = 16,
|
|
.max = 32,
|
|
.increment = 8
|
|
},
|
|
.iv_size = {
|
|
.min = 16,
|
|
.max = 16,
|
|
.increment = 0
|
|
}
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* AES CTR */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
|
|
{.cipher = {
|
|
.algo = RTE_CRYPTO_CIPHER_AES_CTR,
|
|
.block_size = 16,
|
|
.key_size = {
|
|
.min = 16,
|
|
.max = 32,
|
|
.increment = 8
|
|
},
|
|
.iv_size = {
|
|
.min = 8,
|
|
.max = 8,
|
|
.increment = 0
|
|
}
|
|
}, }
|
|
}, }
|
|
},
|
|
{ /* NULL (AUTH) */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
|
|
{.auth = {
|
|
.algo = RTE_CRYPTO_AUTH_NULL,
|
|
.block_size = 1,
|
|
.key_size = {
|
|
.min = 0,
|
|
.max = 0,
|
|
.increment = 0
|
|
},
|
|
.digest_size = {
|
|
.min = 0,
|
|
.max = 0,
|
|
.increment = 0
|
|
},
|
|
.iv_size = { 0 }
|
|
}, },
|
|
}, },
|
|
},
|
|
{ /* NULL (CIPHER) */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
|
|
{.cipher = {
|
|
.algo = RTE_CRYPTO_CIPHER_NULL,
|
|
.block_size = 1,
|
|
.key_size = {
|
|
.min = 0,
|
|
.max = 0,
|
|
.increment = 0
|
|
},
|
|
.iv_size = {
|
|
.min = 0,
|
|
.max = 0,
|
|
.increment = 0
|
|
}
|
|
}, },
|
|
}, }
|
|
},
|
|
{ /* 3DES CBC */
|
|
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
|
|
{.sym = {
|
|
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
|
|
{.cipher = {
|
|
.algo = RTE_CRYPTO_CIPHER_3DES_CBC,
|
|
.block_size = 8,
|
|
.key_size = {
|
|
.min = 24,
|
|
.max = 24,
|
|
.increment = 0
|
|
},
|
|
.iv_size = {
|
|
.min = 8,
|
|
.max = 8,
|
|
.increment = 0
|
|
}
|
|
}, }
|
|
}, }
|
|
},
|
|
{
|
|
.op = RTE_CRYPTO_OP_TYPE_UNDEFINED,
|
|
}
|
|
};
|
|
|
|
|
|
#endif /* _IAVF_IPSEC_CRYPTO_CAPABILITIES_H_ */
|