crypto/qat: support Chacha Poly
This patchset adds Chacha20-Poly1305 implementation to Intel QuickAssist Technology pmd. Signed-off-by: Arek Kusztal <arkadiuszx.kusztal@intel.com> Acked-by: Fiona Trahe <fiona.trahe@intel.com>
This commit is contained in:
parent
c6c267a00a
commit
2c512e64d6
@ -60,12 +60,13 @@ AES CMAC (128) = Y
|
||||
; Supported AEAD algorithms of the 'qat' crypto driver.
|
||||
;
|
||||
[AEAD]
|
||||
AES GCM (128) = Y
|
||||
AES GCM (192) = Y
|
||||
AES GCM (256) = Y
|
||||
AES CCM (128) = Y
|
||||
AES CCM (192) = Y
|
||||
AES CCM (256) = Y
|
||||
AES GCM (128) = Y
|
||||
AES GCM (192) = Y
|
||||
AES GCM (256) = Y
|
||||
AES CCM (128) = Y
|
||||
AES CCM (192) = Y
|
||||
AES CCM (256) = Y
|
||||
CHACHA20-POLY1305 = Y
|
||||
|
||||
;
|
||||
; Supported Asymmetric algorithms of the 'qat' crypto driver.
|
||||
|
@ -70,6 +70,7 @@ Supported AEAD algorithms:
|
||||
|
||||
* ``RTE_CRYPTO_AEAD_AES_GCM``
|
||||
* ``RTE_CRYPTO_AEAD_AES_CCM``
|
||||
* ``RTE_CRYPTO_AEAD_CHACHA20_POLY1305``
|
||||
|
||||
|
||||
Supported Chains
|
||||
|
@ -77,6 +77,10 @@ New Features
|
||||
Such algorithm combinations are not supported on GEN1/GEN2 hardware
|
||||
and executing the request returns RTE_CRYPTO_OP_STATUS_INVALID_SESSION.
|
||||
|
||||
* **Updated the Intel QuickAssist Technology (QAT) symmetric crypto PMD.**
|
||||
|
||||
Added Chacha20-Poly1305 AEAD algorithm.
|
||||
|
||||
|
||||
Removed Items
|
||||
-------------
|
||||
|
@ -204,7 +204,9 @@ enum icp_qat_hw_cipher_algo {
|
||||
ICP_QAT_HW_CIPHER_ALGO_KASUMI = 7,
|
||||
ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 = 8,
|
||||
ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3 = 9,
|
||||
ICP_QAT_HW_CIPHER_DELIMITER = 10
|
||||
ICP_QAT_HW_CIPHER_ALGO_SM4 = 10,
|
||||
ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305 = 11,
|
||||
ICP_QAT_HW_CIPHER_DELIMITER = 12
|
||||
};
|
||||
|
||||
enum icp_qat_hw_cipher_mode {
|
||||
@ -306,6 +308,12 @@ enum icp_qat_hw_cipher_convert {
|
||||
#define ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ 16
|
||||
#define ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ 16
|
||||
#define ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR 2
|
||||
#define ICP_QAT_HW_CHACHAPOLY_KEY_SZ 32
|
||||
#define ICP_QAT_HW_CHACHAPOLY_IV_SZ 12
|
||||
#define ICP_QAT_HW_CHACHAPOLY_BLK_SZ 64
|
||||
#define ICP_QAT_HW_SPC_CTR_SZ 16
|
||||
#define ICP_QAT_HW_CHACHAPOLY_ICV_SZ 16
|
||||
#define ICP_QAT_HW_CHACHAPOLY_AAD_MAX_LOG 14
|
||||
|
||||
#define ICP_QAT_HW_CIPHER_MAX_KEY_SZ ICP_QAT_HW_AES_256_F8_KEY_SZ
|
||||
|
||||
|
@ -594,4 +594,36 @@
|
||||
}, } \
|
||||
}
|
||||
|
||||
#define QAT_EXTRA_GEN3_SYM_CAPABILITIES \
|
||||
{ /* Chacha20-Poly1305 */ \
|
||||
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
|
||||
{.sym = { \
|
||||
.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
|
||||
{.aead = { \
|
||||
.algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305, \
|
||||
.block_size = 64, \
|
||||
.key_size = { \
|
||||
.min = 32, \
|
||||
.max = 32, \
|
||||
.increment = 0 \
|
||||
}, \
|
||||
.digest_size = { \
|
||||
.min = 16, \
|
||||
.max = 16, \
|
||||
.increment = 0 \
|
||||
}, \
|
||||
.aad_size = { \
|
||||
.min = 0, \
|
||||
.max = 240, \
|
||||
.increment = 1 \
|
||||
}, \
|
||||
.iv_size = { \
|
||||
.min = 12, \
|
||||
.max = 12, \
|
||||
.increment = 0 \
|
||||
}, \
|
||||
}, } \
|
||||
}, } \
|
||||
}
|
||||
|
||||
#endif /* _QAT_SYM_CAPABILITIES_H_ */
|
||||
|
@ -27,6 +27,13 @@ static const struct rte_cryptodev_capabilities qat_gen2_sym_capabilities[] = {
|
||||
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
|
||||
};
|
||||
|
||||
static const struct rte_cryptodev_capabilities qat_gen3_sym_capabilities[] = {
|
||||
QAT_BASE_GEN1_SYM_CAPABILITIES,
|
||||
QAT_EXTRA_GEN2_SYM_CAPABILITIES,
|
||||
QAT_EXTRA_GEN3_SYM_CAPABILITIES,
|
||||
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
|
||||
};
|
||||
|
||||
static int qat_sym_qp_release(struct rte_cryptodev *dev,
|
||||
uint16_t queue_pair_id);
|
||||
|
||||
@ -294,9 +301,11 @@ qat_sym_dev_create(struct qat_pci_device *qat_pci_dev,
|
||||
internals->qat_dev_capabilities = qat_gen1_sym_capabilities;
|
||||
break;
|
||||
case QAT_GEN2:
|
||||
case QAT_GEN3:
|
||||
internals->qat_dev_capabilities = qat_gen2_sym_capabilities;
|
||||
break;
|
||||
case QAT_GEN3:
|
||||
internals->qat_dev_capabilities = qat_gen3_sym_capabilities;
|
||||
break;
|
||||
default:
|
||||
internals->qat_dev_capabilities = qat_gen2_sym_capabilities;
|
||||
QAT_LOG(DEBUG,
|
||||
|
@ -576,69 +576,68 @@ qat_sym_session_set_parameters(struct rte_cryptodev *dev,
|
||||
}
|
||||
|
||||
static int
|
||||
qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
|
||||
struct qat_sym_session *session,
|
||||
qat_sym_session_handle_single_pass(struct qat_sym_session *session,
|
||||
struct rte_crypto_aead_xform *aead_xform)
|
||||
{
|
||||
enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
|
||||
struct icp_qat_fw_la_cipher_req_params *cipher_param =
|
||||
(void *) &session->fw_req.serv_specif_rqpars;
|
||||
|
||||
if (qat_dev_gen == QAT_GEN3 &&
|
||||
aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
|
||||
/* Use faster Single-Pass GCM */
|
||||
struct icp_qat_fw_la_cipher_req_params *cipher_param =
|
||||
(void *) &session->fw_req.serv_specif_rqpars;
|
||||
|
||||
session->is_single_pass = 1;
|
||||
session->min_qat_dev_gen = QAT_GEN3;
|
||||
session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
|
||||
session->is_single_pass = 1;
|
||||
session->min_qat_dev_gen = QAT_GEN3;
|
||||
session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
|
||||
if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
|
||||
session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
|
||||
session->cipher_iv.offset = aead_xform->iv.offset;
|
||||
session->cipher_iv.length = aead_xform->iv.length;
|
||||
if (qat_sym_session_aead_create_cd_cipher(session,
|
||||
aead_xform->key.data, aead_xform->key.length))
|
||||
return -EINVAL;
|
||||
session->aad_len = aead_xform->aad_length;
|
||||
session->digest_length = aead_xform->digest_length;
|
||||
if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
|
||||
session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
|
||||
session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
|
||||
ICP_QAT_FW_LA_RET_AUTH_SET(
|
||||
session->fw_req.comn_hdr.serv_specif_flags,
|
||||
ICP_QAT_FW_LA_RET_AUTH_RES);
|
||||
} else {
|
||||
session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
|
||||
session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
|
||||
ICP_QAT_FW_LA_CMP_AUTH_SET(
|
||||
session->fw_req.comn_hdr.serv_specif_flags,
|
||||
ICP_QAT_FW_LA_CMP_AUTH_RES);
|
||||
}
|
||||
ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
|
||||
session->fw_req.comn_hdr.serv_specif_flags,
|
||||
ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
|
||||
ICP_QAT_FW_LA_PROTO_SET(
|
||||
session->fw_req.comn_hdr.serv_specif_flags,
|
||||
ICP_QAT_FW_LA_NO_PROTO);
|
||||
ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
|
||||
session->fw_req.comn_hdr.serv_specif_flags,
|
||||
ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
|
||||
session->fw_req.comn_hdr.service_cmd_id =
|
||||
ICP_QAT_FW_LA_CMD_CIPHER;
|
||||
session->cd.cipher.cipher_config.val =
|
||||
ICP_QAT_HW_CIPHER_CONFIG_BUILD(
|
||||
ICP_QAT_HW_CIPHER_AEAD_MODE,
|
||||
session->qat_cipher_alg,
|
||||
ICP_QAT_HW_CIPHER_NO_CONVERT,
|
||||
session->qat_dir);
|
||||
QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
|
||||
aead_xform->digest_length,
|
||||
QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
|
||||
QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
|
||||
session->cd.cipher.cipher_config.reserved =
|
||||
ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
|
||||
aead_xform->aad_length);
|
||||
cipher_param->spc_aad_sz = aead_xform->aad_length;
|
||||
cipher_param->spc_auth_res_sz = aead_xform->digest_length;
|
||||
session->fw_req.comn_hdr.serv_specif_flags,
|
||||
ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
|
||||
} else {
|
||||
/* Chacha-Poly is special case that use QAT CTR mode */
|
||||
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
|
||||
}
|
||||
session->cipher_iv.offset = aead_xform->iv.offset;
|
||||
session->cipher_iv.length = aead_xform->iv.length;
|
||||
if (qat_sym_session_aead_create_cd_cipher(session,
|
||||
aead_xform->key.data, aead_xform->key.length))
|
||||
return -EINVAL;
|
||||
session->aad_len = aead_xform->aad_length;
|
||||
session->digest_length = aead_xform->digest_length;
|
||||
if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
|
||||
session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
|
||||
session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
|
||||
ICP_QAT_FW_LA_RET_AUTH_SET(
|
||||
session->fw_req.comn_hdr.serv_specif_flags,
|
||||
ICP_QAT_FW_LA_RET_AUTH_RES);
|
||||
} else {
|
||||
session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
|
||||
session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
|
||||
ICP_QAT_FW_LA_CMP_AUTH_SET(
|
||||
session->fw_req.comn_hdr.serv_specif_flags,
|
||||
ICP_QAT_FW_LA_CMP_AUTH_RES);
|
||||
}
|
||||
ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
|
||||
session->fw_req.comn_hdr.serv_specif_flags,
|
||||
ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
|
||||
ICP_QAT_FW_LA_PROTO_SET(
|
||||
session->fw_req.comn_hdr.serv_specif_flags,
|
||||
ICP_QAT_FW_LA_NO_PROTO);
|
||||
session->fw_req.comn_hdr.service_cmd_id =
|
||||
ICP_QAT_FW_LA_CMD_CIPHER;
|
||||
session->cd.cipher.cipher_config.val =
|
||||
ICP_QAT_HW_CIPHER_CONFIG_BUILD(
|
||||
ICP_QAT_HW_CIPHER_AEAD_MODE,
|
||||
session->qat_cipher_alg,
|
||||
ICP_QAT_HW_CIPHER_NO_CONVERT,
|
||||
session->qat_dir);
|
||||
QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
|
||||
aead_xform->digest_length,
|
||||
QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
|
||||
QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
|
||||
session->cd.cipher.cipher_config.reserved =
|
||||
ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
|
||||
aead_xform->aad_length);
|
||||
cipher_param->spc_aad_sz = aead_xform->aad_length;
|
||||
cipher_param->spc_auth_res_sz = aead_xform->digest_length;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -791,6 +790,10 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
|
||||
{
|
||||
struct rte_crypto_aead_xform *aead_xform = &xform->aead;
|
||||
enum rte_crypto_auth_operation crypto_operation;
|
||||
struct qat_sym_dev_private *internals =
|
||||
dev->data->dev_private;
|
||||
enum qat_device_gen qat_dev_gen =
|
||||
internals->qat_dev->qat_dev_gen;
|
||||
|
||||
/*
|
||||
* Store AEAD IV parameters as cipher IV,
|
||||
@ -799,6 +802,7 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
|
||||
session->cipher_iv.offset = xform->aead.iv.offset;
|
||||
session->cipher_iv.length = xform->aead.iv.length;
|
||||
|
||||
session->is_single_pass = 0;
|
||||
switch (aead_xform->algo) {
|
||||
case RTE_CRYPTO_AEAD_AES_GCM:
|
||||
if (qat_sym_validate_aes_key(aead_xform->key.length,
|
||||
@ -807,7 +811,13 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
|
||||
return -EINVAL;
|
||||
}
|
||||
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
|
||||
session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
|
||||
session->qat_hash_alg =
|
||||
ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
|
||||
if (qat_dev_gen > QAT_GEN2 && aead_xform->iv.length ==
|
||||
QAT_AES_GCM_SPC_IV_SIZE) {
|
||||
return qat_sym_session_handle_single_pass(session,
|
||||
aead_xform);
|
||||
}
|
||||
break;
|
||||
case RTE_CRYPTO_AEAD_AES_CCM:
|
||||
if (qat_sym_validate_aes_key(aead_xform->key.length,
|
||||
@ -818,23 +828,19 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
|
||||
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
|
||||
session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
|
||||
break;
|
||||
case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
|
||||
if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
|
||||
return -EINVAL;
|
||||
session->qat_cipher_alg =
|
||||
ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
|
||||
return qat_sym_session_handle_single_pass(session,
|
||||
aead_xform);
|
||||
default:
|
||||
QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
|
||||
aead_xform->algo);
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
session->is_single_pass = 0;
|
||||
if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
|
||||
/* Use faster Single-Pass GCM if possible */
|
||||
int res = qat_sym_session_handle_single_pass(
|
||||
dev->data->dev_private, session, aead_xform);
|
||||
if (res < 0)
|
||||
return res;
|
||||
if (session->is_single_pass)
|
||||
return 0;
|
||||
}
|
||||
|
||||
if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
|
||||
aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
|
||||
(aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
|
||||
|
Loading…
x
Reference in New Issue
Block a user