crypto/qat: support ChaCha20-Poly1305

This patchset adds ChaCha20-Poly1305 implementation to Intel
QuickAssist Technology pmd.

Signed-off-by: Arek Kusztal <arkadiuszx.kusztal@intel.com>
This commit is contained in:
Arek Kusztal 2020-07-07 17:16:00 +02:00 committed by Thomas Monjalon
parent 7788dceccb
commit faa57df0b4
7 changed files with 124 additions and 75 deletions

View File

@ -66,12 +66,13 @@ AES CMAC (128) = Y
; Supported AEAD algorithms of the 'qat' crypto driver.
;
[AEAD]
AES GCM (128) = Y
AES GCM (192) = Y
AES GCM (256) = Y
AES CCM (128) = Y
AES CCM (192) = Y
AES CCM (256) = Y
AES GCM (128) = Y
AES GCM (192) = Y
AES GCM (256) = Y
AES CCM (128) = Y
AES CCM (192) = Y
AES CCM (256) = Y
CHACHA20-POLY1305 = Y
;
; Supported Asymmetric algorithms of the 'qat' crypto driver.

View File

@ -75,6 +75,7 @@ Supported AEAD algorithms:
* ``RTE_CRYPTO_AEAD_AES_GCM``
* ``RTE_CRYPTO_AEAD_AES_CCM``
* ``RTE_CRYPTO_AEAD_CHACHA20_POLY1305``
Protocol offloads:

View File

@ -130,6 +130,7 @@ New Features
* Added support for lookaside protocol offload for DOCSIS through the
``rte_security`` API.
* Added Chacha20-Poly1305 AEAD algorithm.
* Improved handling of multi process in QAT crypto and compression PMDs.
* **Updated the OCTEON TX2 crypto PMD.**

View File

@ -204,7 +204,9 @@ enum icp_qat_hw_cipher_algo {
ICP_QAT_HW_CIPHER_ALGO_KASUMI = 7,
ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 = 8,
ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3 = 9,
ICP_QAT_HW_CIPHER_DELIMITER = 10
ICP_QAT_HW_CIPHER_ALGO_SM4 = 10,
ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305 = 11,
ICP_QAT_HW_CIPHER_DELIMITER = 12
};
enum icp_qat_hw_cipher_mode {
@ -306,6 +308,12 @@ enum icp_qat_hw_cipher_convert {
#define ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ 16
#define ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ 16
#define ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR 2
#define ICP_QAT_HW_CHACHAPOLY_KEY_SZ 32
#define ICP_QAT_HW_CHACHAPOLY_IV_SZ 12
#define ICP_QAT_HW_CHACHAPOLY_BLK_SZ 64
#define ICP_QAT_HW_SPC_CTR_SZ 16
#define ICP_QAT_HW_CHACHAPOLY_ICV_SZ 16
#define ICP_QAT_HW_CHACHAPOLY_AAD_MAX_LOG 14
#define ICP_QAT_HW_CIPHER_MAX_KEY_SZ ICP_QAT_HW_AES_256_F8_KEY_SZ

View File

@ -699,6 +699,38 @@
}, } \
}
#define QAT_EXTRA_GEN3_SYM_CAPABILITIES \
{ /* Chacha20-Poly1305 */ \
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
{.sym = { \
.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
{.aead = { \
.algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305, \
.block_size = 64, \
.key_size = { \
.min = 32, \
.max = 32, \
.increment = 0 \
}, \
.digest_size = { \
.min = 16, \
.max = 16, \
.increment = 0 \
}, \
.aad_size = { \
.min = 0, \
.max = 240, \
.increment = 1 \
}, \
.iv_size = { \
.min = 12, \
.max = 12, \
.increment = 0 \
}, \
}, } \
}, } \
}
#ifdef RTE_LIBRTE_SECURITY
#define QAT_SECURITY_SYM_CAPABILITIES \
{ /* AES DOCSIS BPI */ \

View File

@ -35,6 +35,7 @@ static const struct rte_cryptodev_capabilities qat_gen2_sym_capabilities[] = {
static const struct rte_cryptodev_capabilities qat_gen3_sym_capabilities[] = {
QAT_BASE_GEN1_SYM_CAPABILITIES,
QAT_EXTRA_GEN2_SYM_CAPABILITIES,
QAT_EXTRA_GEN3_SYM_CAPABILITIES,
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
};

View File

@ -632,69 +632,68 @@ qat_sym_session_set_parameters(struct rte_cryptodev *dev,
}
static int
qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
struct qat_sym_session *session,
qat_sym_session_handle_single_pass(struct qat_sym_session *session,
struct rte_crypto_aead_xform *aead_xform)
{
enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
struct icp_qat_fw_la_cipher_req_params *cipher_param =
(void *) &session->fw_req.serv_specif_rqpars;
if (qat_dev_gen == QAT_GEN3 &&
aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
/* Use faster Single-Pass GCM */
struct icp_qat_fw_la_cipher_req_params *cipher_param =
(void *) &session->fw_req.serv_specif_rqpars;
session->is_single_pass = 1;
session->min_qat_dev_gen = QAT_GEN3;
session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
session->is_single_pass = 1;
session->min_qat_dev_gen = QAT_GEN3;
session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
session->cipher_iv.offset = aead_xform->iv.offset;
session->cipher_iv.length = aead_xform->iv.length;
if (qat_sym_session_aead_create_cd_cipher(session,
aead_xform->key.data, aead_xform->key.length))
return -EINVAL;
session->aad_len = aead_xform->aad_length;
session->digest_length = aead_xform->digest_length;
if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
ICP_QAT_FW_LA_RET_AUTH_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_RET_AUTH_RES);
} else {
session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
ICP_QAT_FW_LA_CMP_AUTH_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_CMP_AUTH_RES);
}
ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
ICP_QAT_FW_LA_PROTO_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_NO_PROTO);
ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
session->fw_req.comn_hdr.service_cmd_id =
ICP_QAT_FW_LA_CMD_CIPHER;
session->cd.cipher.cipher_config.val =
ICP_QAT_HW_CIPHER_CONFIG_BUILD(
ICP_QAT_HW_CIPHER_AEAD_MODE,
session->qat_cipher_alg,
ICP_QAT_HW_CIPHER_NO_CONVERT,
session->qat_dir);
QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
aead_xform->digest_length,
QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
session->cd.cipher.cipher_config.reserved =
ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
aead_xform->aad_length);
cipher_param->spc_aad_sz = aead_xform->aad_length;
cipher_param->spc_auth_res_sz = aead_xform->digest_length;
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
} else {
/* Chacha-Poly is special case that use QAT CTR mode */
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
}
session->cipher_iv.offset = aead_xform->iv.offset;
session->cipher_iv.length = aead_xform->iv.length;
if (qat_sym_session_aead_create_cd_cipher(session,
aead_xform->key.data, aead_xform->key.length))
return -EINVAL;
session->aad_len = aead_xform->aad_length;
session->digest_length = aead_xform->digest_length;
if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
ICP_QAT_FW_LA_RET_AUTH_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_RET_AUTH_RES);
} else {
session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
ICP_QAT_FW_LA_CMP_AUTH_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_CMP_AUTH_RES);
}
ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
ICP_QAT_FW_LA_PROTO_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_NO_PROTO);
session->fw_req.comn_hdr.service_cmd_id =
ICP_QAT_FW_LA_CMD_CIPHER;
session->cd.cipher.cipher_config.val =
ICP_QAT_HW_CIPHER_CONFIG_BUILD(
ICP_QAT_HW_CIPHER_AEAD_MODE,
session->qat_cipher_alg,
ICP_QAT_HW_CIPHER_NO_CONVERT,
session->qat_dir);
QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
aead_xform->digest_length,
QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
session->cd.cipher.cipher_config.reserved =
ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
aead_xform->aad_length);
cipher_param->spc_aad_sz = aead_xform->aad_length;
cipher_param->spc_auth_res_sz = aead_xform->digest_length;
return 0;
}
@ -865,6 +864,10 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
{
struct rte_crypto_aead_xform *aead_xform = &xform->aead;
enum rte_crypto_auth_operation crypto_operation;
struct qat_sym_dev_private *internals =
dev->data->dev_private;
enum qat_device_gen qat_dev_gen =
internals->qat_dev->qat_dev_gen;
/*
* Store AEAD IV parameters as cipher IV,
@ -875,6 +878,7 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
session->auth_mode = ICP_QAT_HW_AUTH_MODE1;
session->is_single_pass = 0;
switch (aead_xform->algo) {
case RTE_CRYPTO_AEAD_AES_GCM:
if (qat_sym_validate_aes_key(aead_xform->key.length,
@ -884,6 +888,11 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
}
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
if (qat_dev_gen > QAT_GEN2 && aead_xform->iv.length ==
QAT_AES_GCM_SPC_IV_SIZE) {
return qat_sym_session_handle_single_pass(session,
aead_xform);
}
if (session->cipher_iv.length == 0)
session->cipher_iv.length = AES_GCM_J0_LEN;
@ -897,23 +906,19 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
break;
case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
return -EINVAL;
session->qat_cipher_alg =
ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
return qat_sym_session_handle_single_pass(session,
aead_xform);
default:
QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
aead_xform->algo);
return -EINVAL;
}
session->is_single_pass = 0;
if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
/* Use faster Single-Pass GCM if possible */
int res = qat_sym_session_handle_single_pass(
dev->data->dev_private, session, aead_xform);
if (res < 0)
return res;
if (session->is_single_pass)
return 0;
}
if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
(aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&