cryptodev: revert Chacha20-Poly1305 AEAD algorithm

API makes think that rte_cryptodev_info_get() cannot return
a value >= 3 (RTE_CRYPTO_AEAD_LIST_END in 19.11).
20.02-rc1 was returning 3 (RTE_CRYPTO_AEAD_CHACHA20_POLY1305).
So the ABI compatibility contract was broken.

It could be solved with some function versioning,
but because a lack of time, the feature is reverted for now.

This reverts following commits:
- 6c9f3b347e21 ("cryptodev: add Chacha20-Poly1305 AEAD algorithm")
- 2c512e64d600 ("crypto/qat: support Chacha Poly")
- d55e01f579e1 ("test/crypto: add Chacha Poly cases")

Signed-off-by: Thomas Monjalon <thomas@monjalon.net>
This commit is contained in:
Thomas Monjalon 2020-02-05 12:04:32 +01:00
parent 777014e56d
commit f5862ae99e
12 changed files with 83 additions and 268 deletions

View File

@ -7814,7 +7814,6 @@ test_authenticated_encryption(const struct aead_test_data *tdata)
{ {
struct crypto_testsuite_params *ts_params = &testsuite_params; struct crypto_testsuite_params *ts_params = &testsuite_params;
struct crypto_unittest_params *ut_params = &unittest_params; struct crypto_unittest_params *ut_params = &unittest_params;
struct rte_cryptodev_sym_capability_idx cap_idx;
int retval; int retval;
uint8_t *ciphertext, *auth_tag; uint8_t *ciphertext, *auth_tag;
@ -7828,15 +7827,6 @@ test_authenticated_encryption(const struct aead_test_data *tdata)
tdata->key.data, tdata->key.len, tdata->key.data, tdata->key.len,
tdata->aad.len, tdata->auth_tag.len, tdata->aad.len, tdata->auth_tag.len,
tdata->iv.len); tdata->iv.len);
cap_idx.type = RTE_CRYPTO_SYM_XFORM_AEAD;
cap_idx.algo.aead = tdata->algo;
if (rte_cryptodev_sym_capability_get(ts_params->valid_devs[0],
&cap_idx) == NULL) {
return -ENOTSUP;
}
if (retval < 0) if (retval < 0)
return retval; return retval;
@ -8714,7 +8704,6 @@ test_authenticated_decryption(const struct aead_test_data *tdata)
{ {
struct crypto_testsuite_params *ts_params = &testsuite_params; struct crypto_testsuite_params *ts_params = &testsuite_params;
struct crypto_unittest_params *ut_params = &unittest_params; struct crypto_unittest_params *ut_params = &unittest_params;
struct rte_cryptodev_sym_capability_idx cap_idx;
int retval; int retval;
uint8_t *plaintext; uint8_t *plaintext;
@ -8730,14 +8719,6 @@ test_authenticated_decryption(const struct aead_test_data *tdata)
if (retval < 0) if (retval < 0)
return retval; return retval;
cap_idx.type = RTE_CRYPTO_SYM_XFORM_AEAD;
cap_idx.algo.aead = tdata->algo;
if (rte_cryptodev_sym_capability_get(ts_params->valid_devs[0],
&cap_idx) == NULL) {
return -ENOTSUP;
}
/* alloc mbuf and set payload */ /* alloc mbuf and set payload */
if (tdata->aad.len > MBUF_SIZE) { if (tdata->aad.len > MBUF_SIZE) {
ut_params->ibuf = rte_pktmbuf_alloc(ts_params->large_mbuf_pool); ut_params->ibuf = rte_pktmbuf_alloc(ts_params->large_mbuf_pool);
@ -8790,18 +8771,6 @@ test_authenticated_decryption(const struct aead_test_data *tdata)
return 0; return 0;
} }
static int
test_chacha20_poly1305_encrypt_test_case_rfc8439(void)
{
return test_authenticated_encryption(&chacha20_poly1305_case_rfc8439);
}
static int
test_chacha20_poly1305_decrypt_test_case_rfc8439(void)
{
return test_authenticated_decryption(&chacha20_poly1305_case_rfc8439);
}
static int static int
test_AES_GCM_authenticated_decryption_test_case_1(void) test_AES_GCM_authenticated_decryption_test_case_1(void)
{ {
@ -12199,10 +12168,6 @@ static struct unit_test_suite cryptodev_qat_testsuite = {
TEST_CASE_ST(ut_setup, ut_teardown, TEST_CASE_ST(ut_setup, ut_teardown,
test_AES_CCM_authenticated_decryption_test_case_128_3), test_AES_CCM_authenticated_decryption_test_case_128_3),
TEST_CASE_ST(ut_setup, ut_teardown,
test_chacha20_poly1305_encrypt_test_case_rfc8439),
TEST_CASE_ST(ut_setup, ut_teardown,
test_chacha20_poly1305_decrypt_test_case_rfc8439),
/** AES GCM Authenticated Encryption */ /** AES GCM Authenticated Encryption */
TEST_CASE_ST(ut_setup, ut_teardown, TEST_CASE_ST(ut_setup, ut_teardown,
test_AES_GCM_auth_encrypt_SGL_in_place_1500B), test_AES_GCM_auth_encrypt_SGL_in_place_1500B),

View File

@ -84,83 +84,6 @@ struct gmac_test_data {
}; };
static uint8_t chacha_aad_rfc8439[] = {
0x50, 0x51, 0x52, 0x53, 0xc0, 0xc1, 0xc2, 0xc3,
0xc4, 0xc5, 0xc6, 0xc7
};
static const struct aead_test_data chacha20_poly1305_case_rfc8439 = {
.algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305,
.key = {
.data = {
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f
},
.len = 32
},
.iv = {
.data = {
0x07, 0x00, 0x00, 0x00, 0x40, 0x41, 0x42, 0x43,
0x44, 0x45, 0x46, 0x47
},
.len = 12
},
.aad = {
.data = chacha_aad_rfc8439,
.len = 12
},
.plaintext = {
.data = {
0x4c, 0x61, 0x64, 0x69, 0x65, 0x73, 0x20, 0x61,
0x6e, 0x64, 0x20, 0x47, 0x65, 0x6e, 0x74, 0x6c,
0x65, 0x6d, 0x65, 0x6e, 0x20, 0x6f, 0x66, 0x20,
0x74, 0x68, 0x65, 0x20, 0x63, 0x6c, 0x61, 0x73,
0x73, 0x20, 0x6f, 0x66, 0x20, 0x27, 0x39, 0x39,
0x3a, 0x20, 0x49, 0x66, 0x20, 0x49, 0x20, 0x63,
0x6f, 0x75, 0x6c, 0x64, 0x20, 0x6f, 0x66, 0x66,
0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x6f,
0x6e, 0x6c, 0x79, 0x20, 0x6f, 0x6e, 0x65, 0x20,
0x74, 0x69, 0x70, 0x20, 0x66, 0x6f, 0x72, 0x20,
0x74, 0x68, 0x65, 0x20, 0x66, 0x75, 0x74, 0x75,
0x72, 0x65, 0x2c, 0x20, 0x73, 0x75, 0x6e, 0x73,
0x63, 0x72, 0x65, 0x65, 0x6e, 0x20, 0x77, 0x6f,
0x75, 0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x69,
0x74, 0x2e
},
.len = 114
},
.ciphertext = {
.data = {
0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb,
0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e, 0xc2,
0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe,
0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee, 0x62, 0xd6,
0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12,
0x82, 0xfa, 0xfb, 0x69, 0xda, 0x92, 0x72, 0x8b,
0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29,
0x05, 0xd6, 0xa5, 0xb6, 0x7e, 0xcd, 0x3b, 0x36,
0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c,
0x98, 0x03, 0xae, 0xe3, 0x28, 0x09, 0x1b, 0x58,
0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94,
0x55, 0x85, 0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc,
0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d,
0xe5, 0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b,
0x61, 0x16
},
.len = 114
},
.auth_tag = {
.data = {
0x1a, 0xe1, 0x0b, 0x59, 0x4f, 0x09, 0xe2, 0x6a,
0x7e, 0x90, 0x2e, 0xcb, 0xd0, 0x60, 0x06, 0x91
},
.len = 16
}
};
/** AES-GCM-128 Test Vectors */ /** AES-GCM-128 Test Vectors */
static const struct aead_test_data gcm_test_case_1 = { static const struct aead_test_data gcm_test_case_1 = {
.algo = RTE_CRYPTO_AEAD_AES_GCM, .algo = RTE_CRYPTO_AEAD_AES_GCM,

View File

@ -93,13 +93,12 @@ SHA3_512 HMAC =
; Supported AEAD algorithms of a default crypto driver. ; Supported AEAD algorithms of a default crypto driver.
; ;
[AEAD] [AEAD]
AES GCM (128) = AES GCM (128) =
AES GCM (192) = AES GCM (192) =
AES GCM (256) = AES GCM (256) =
AES CCM (128) = AES CCM (128) =
AES CCM (192) = AES CCM (192) =
AES CCM (256) = AES CCM (256) =
CHACHA20-POLY1305 =
; ;
; Supported Asymmetric algorithms of a default crypto driver. ; Supported Asymmetric algorithms of a default crypto driver.
; ;

View File

@ -60,13 +60,12 @@ AES CMAC (128) = Y
; Supported AEAD algorithms of the 'qat' crypto driver. ; Supported AEAD algorithms of the 'qat' crypto driver.
; ;
[AEAD] [AEAD]
AES GCM (128) = Y AES GCM (128) = Y
AES GCM (192) = Y AES GCM (192) = Y
AES GCM (256) = Y AES GCM (256) = Y
AES CCM (128) = Y AES CCM (128) = Y
AES CCM (192) = Y AES CCM (192) = Y
AES CCM (256) = Y AES CCM (256) = Y
CHACHA20-POLY1305 = Y
; ;
; Supported Asymmetric algorithms of the 'qat' crypto driver. ; Supported Asymmetric algorithms of the 'qat' crypto driver.

View File

@ -70,7 +70,6 @@ Supported AEAD algorithms:
* ``RTE_CRYPTO_AEAD_AES_GCM`` * ``RTE_CRYPTO_AEAD_AES_GCM``
* ``RTE_CRYPTO_AEAD_AES_CCM`` * ``RTE_CRYPTO_AEAD_AES_CCM``
* ``RTE_CRYPTO_AEAD_CHACHA20_POLY1305``
Supported Chains Supported Chains

View File

@ -120,7 +120,6 @@ New Features
* **Added algorithms to cryptodev API.** * **Added algorithms to cryptodev API.**
* Chacha20-Poly1305 AEAD algorithm can now be supported in cryptodev.
* ECDSA (Elliptic Curve Digital Signature Algorithm) is added to * ECDSA (Elliptic Curve Digital Signature Algorithm) is added to
asymmetric crypto library specifications. asymmetric crypto library specifications.
* ECPM (Elliptic Curve Point Multiplication) is added to * ECPM (Elliptic Curve Point Multiplication) is added to
@ -134,10 +133,6 @@ New Features
Such algorithm combinations are not supported on GEN1/GEN2 hardware Such algorithm combinations are not supported on GEN1/GEN2 hardware
and executing the request returns RTE_CRYPTO_OP_STATUS_INVALID_SESSION. and executing the request returns RTE_CRYPTO_OP_STATUS_INVALID_SESSION.
* **Updated the Intel QuickAssist Technology (QAT) symmetric crypto PMD.**
Added Chacha20-Poly1305 AEAD algorithm.
* **Added Marvell OCTEON TX2 End Point rawdev PMD.** * **Added Marvell OCTEON TX2 End Point rawdev PMD.**
Added a new OCTEON TX2 rawdev PMD for End Point mode of operation. Added a new OCTEON TX2 rawdev PMD for End Point mode of operation.

View File

@ -204,9 +204,7 @@ enum icp_qat_hw_cipher_algo {
ICP_QAT_HW_CIPHER_ALGO_KASUMI = 7, ICP_QAT_HW_CIPHER_ALGO_KASUMI = 7,
ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 = 8, ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 = 8,
ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3 = 9, ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3 = 9,
ICP_QAT_HW_CIPHER_ALGO_SM4 = 10, ICP_QAT_HW_CIPHER_DELIMITER = 10
ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305 = 11,
ICP_QAT_HW_CIPHER_DELIMITER = 12
}; };
enum icp_qat_hw_cipher_mode { enum icp_qat_hw_cipher_mode {
@ -308,12 +306,6 @@ enum icp_qat_hw_cipher_convert {
#define ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ 16 #define ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ 16
#define ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ 16 #define ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ 16
#define ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR 2 #define ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR 2
#define ICP_QAT_HW_CHACHAPOLY_KEY_SZ 32
#define ICP_QAT_HW_CHACHAPOLY_IV_SZ 12
#define ICP_QAT_HW_CHACHAPOLY_BLK_SZ 64
#define ICP_QAT_HW_SPC_CTR_SZ 16
#define ICP_QAT_HW_CHACHAPOLY_ICV_SZ 16
#define ICP_QAT_HW_CHACHAPOLY_AAD_MAX_LOG 14
#define ICP_QAT_HW_CIPHER_MAX_KEY_SZ ICP_QAT_HW_AES_256_F8_KEY_SZ #define ICP_QAT_HW_CIPHER_MAX_KEY_SZ ICP_QAT_HW_AES_256_F8_KEY_SZ

View File

@ -594,36 +594,4 @@
}, } \ }, } \
} }
#define QAT_EXTRA_GEN3_SYM_CAPABILITIES \
{ /* Chacha20-Poly1305 */ \
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
{.sym = { \
.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \
{.aead = { \
.algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305, \
.block_size = 64, \
.key_size = { \
.min = 32, \
.max = 32, \
.increment = 0 \
}, \
.digest_size = { \
.min = 16, \
.max = 16, \
.increment = 0 \
}, \
.aad_size = { \
.min = 0, \
.max = 240, \
.increment = 1 \
}, \
.iv_size = { \
.min = 12, \
.max = 12, \
.increment = 0 \
}, \
}, } \
}, } \
}
#endif /* _QAT_SYM_CAPABILITIES_H_ */ #endif /* _QAT_SYM_CAPABILITIES_H_ */

View File

@ -27,13 +27,6 @@ static const struct rte_cryptodev_capabilities qat_gen2_sym_capabilities[] = {
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
}; };
static const struct rte_cryptodev_capabilities qat_gen3_sym_capabilities[] = {
QAT_BASE_GEN1_SYM_CAPABILITIES,
QAT_EXTRA_GEN2_SYM_CAPABILITIES,
QAT_EXTRA_GEN3_SYM_CAPABILITIES,
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
};
static int qat_sym_qp_release(struct rte_cryptodev *dev, static int qat_sym_qp_release(struct rte_cryptodev *dev,
uint16_t queue_pair_id); uint16_t queue_pair_id);
@ -301,10 +294,8 @@ qat_sym_dev_create(struct qat_pci_device *qat_pci_dev,
internals->qat_dev_capabilities = qat_gen1_sym_capabilities; internals->qat_dev_capabilities = qat_gen1_sym_capabilities;
break; break;
case QAT_GEN2: case QAT_GEN2:
internals->qat_dev_capabilities = qat_gen2_sym_capabilities;
break;
case QAT_GEN3: case QAT_GEN3:
internals->qat_dev_capabilities = qat_gen3_sym_capabilities; internals->qat_dev_capabilities = qat_gen2_sym_capabilities;
break; break;
default: default:
internals->qat_dev_capabilities = qat_gen2_sym_capabilities; internals->qat_dev_capabilities = qat_gen2_sym_capabilities;

View File

@ -576,68 +576,69 @@ qat_sym_session_set_parameters(struct rte_cryptodev *dev,
} }
static int static int
qat_sym_session_handle_single_pass(struct qat_sym_session *session, qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
struct qat_sym_session *session,
struct rte_crypto_aead_xform *aead_xform) struct rte_crypto_aead_xform *aead_xform)
{ {
struct icp_qat_fw_la_cipher_req_params *cipher_param = enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
(void *) &session->fw_req.serv_specif_rqpars;
session->is_single_pass = 1; if (qat_dev_gen == QAT_GEN3 &&
session->min_qat_dev_gen = QAT_GEN3; aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER; /* Use faster Single-Pass GCM */
if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) { struct icp_qat_fw_la_cipher_req_params *cipher_param =
(void *) &session->fw_req.serv_specif_rqpars;
session->is_single_pass = 1;
session->min_qat_dev_gen = QAT_GEN3;
session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE; session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
session->cipher_iv.offset = aead_xform->iv.offset;
session->cipher_iv.length = aead_xform->iv.length;
if (qat_sym_session_aead_create_cd_cipher(session,
aead_xform->key.data, aead_xform->key.length))
return -EINVAL;
session->aad_len = aead_xform->aad_length;
session->digest_length = aead_xform->digest_length;
if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
ICP_QAT_FW_LA_RET_AUTH_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_RET_AUTH_RES);
} else {
session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
ICP_QAT_FW_LA_CMP_AUTH_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_CMP_AUTH_RES);
}
ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
ICP_QAT_FW_LA_PROTO_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_NO_PROTO);
ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET( ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
session->fw_req.comn_hdr.serv_specif_flags, session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS); ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
} else { session->fw_req.comn_hdr.service_cmd_id =
/* Chacha-Poly is special case that use QAT CTR mode */ ICP_QAT_FW_LA_CMD_CIPHER;
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE; session->cd.cipher.cipher_config.val =
ICP_QAT_HW_CIPHER_CONFIG_BUILD(
ICP_QAT_HW_CIPHER_AEAD_MODE,
session->qat_cipher_alg,
ICP_QAT_HW_CIPHER_NO_CONVERT,
session->qat_dir);
QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
aead_xform->digest_length,
QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
session->cd.cipher.cipher_config.reserved =
ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
aead_xform->aad_length);
cipher_param->spc_aad_sz = aead_xform->aad_length;
cipher_param->spc_auth_res_sz = aead_xform->digest_length;
} }
session->cipher_iv.offset = aead_xform->iv.offset;
session->cipher_iv.length = aead_xform->iv.length;
if (qat_sym_session_aead_create_cd_cipher(session,
aead_xform->key.data, aead_xform->key.length))
return -EINVAL;
session->aad_len = aead_xform->aad_length;
session->digest_length = aead_xform->digest_length;
if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
ICP_QAT_FW_LA_RET_AUTH_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_RET_AUTH_RES);
} else {
session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
ICP_QAT_FW_LA_CMP_AUTH_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_CMP_AUTH_RES);
}
ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
ICP_QAT_FW_LA_PROTO_SET(
session->fw_req.comn_hdr.serv_specif_flags,
ICP_QAT_FW_LA_NO_PROTO);
session->fw_req.comn_hdr.service_cmd_id =
ICP_QAT_FW_LA_CMD_CIPHER;
session->cd.cipher.cipher_config.val =
ICP_QAT_HW_CIPHER_CONFIG_BUILD(
ICP_QAT_HW_CIPHER_AEAD_MODE,
session->qat_cipher_alg,
ICP_QAT_HW_CIPHER_NO_CONVERT,
session->qat_dir);
QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
aead_xform->digest_length,
QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
session->cd.cipher.cipher_config.reserved =
ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
aead_xform->aad_length);
cipher_param->spc_aad_sz = aead_xform->aad_length;
cipher_param->spc_auth_res_sz = aead_xform->digest_length;
return 0; return 0;
} }
@ -790,10 +791,6 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
{ {
struct rte_crypto_aead_xform *aead_xform = &xform->aead; struct rte_crypto_aead_xform *aead_xform = &xform->aead;
enum rte_crypto_auth_operation crypto_operation; enum rte_crypto_auth_operation crypto_operation;
struct qat_sym_dev_private *internals =
dev->data->dev_private;
enum qat_device_gen qat_dev_gen =
internals->qat_dev->qat_dev_gen;
/* /*
* Store AEAD IV parameters as cipher IV, * Store AEAD IV parameters as cipher IV,
@ -802,7 +799,6 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
session->cipher_iv.offset = xform->aead.iv.offset; session->cipher_iv.offset = xform->aead.iv.offset;
session->cipher_iv.length = xform->aead.iv.length; session->cipher_iv.length = xform->aead.iv.length;
session->is_single_pass = 0;
switch (aead_xform->algo) { switch (aead_xform->algo) {
case RTE_CRYPTO_AEAD_AES_GCM: case RTE_CRYPTO_AEAD_AES_GCM:
if (qat_sym_validate_aes_key(aead_xform->key.length, if (qat_sym_validate_aes_key(aead_xform->key.length,
@ -811,13 +807,7 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
return -EINVAL; return -EINVAL;
} }
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE; session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
session->qat_hash_alg = session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
if (qat_dev_gen > QAT_GEN2 && aead_xform->iv.length ==
QAT_AES_GCM_SPC_IV_SIZE) {
return qat_sym_session_handle_single_pass(session,
aead_xform);
}
break; break;
case RTE_CRYPTO_AEAD_AES_CCM: case RTE_CRYPTO_AEAD_AES_CCM:
if (qat_sym_validate_aes_key(aead_xform->key.length, if (qat_sym_validate_aes_key(aead_xform->key.length,
@ -828,19 +818,23 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE; session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC; session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
break; break;
case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
return -EINVAL;
session->qat_cipher_alg =
ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
return qat_sym_session_handle_single_pass(session,
aead_xform);
default: default:
QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n", QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
aead_xform->algo); aead_xform->algo);
return -EINVAL; return -EINVAL;
} }
session->is_single_pass = 0;
if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
/* Use faster Single-Pass GCM if possible */
int res = qat_sym_session_handle_single_pass(
dev->data->dev_private, session, aead_xform);
if (res < 0)
return res;
if (session->is_single_pass)
return 0;
}
if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT && if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) || aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
(aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT && (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&

View File

@ -348,8 +348,6 @@ enum rte_crypto_aead_algorithm {
/**< AES algorithm in CCM mode. */ /**< AES algorithm in CCM mode. */
RTE_CRYPTO_AEAD_AES_GCM, RTE_CRYPTO_AEAD_AES_GCM,
/**< AES algorithm in GCM mode. */ /**< AES algorithm in GCM mode. */
RTE_CRYPTO_AEAD_CHACHA20_POLY1305,
/**< Chacha20 cipher with poly1305 authenticator */
RTE_CRYPTO_AEAD_LIST_END RTE_CRYPTO_AEAD_LIST_END
}; };
@ -393,11 +391,6 @@ struct rte_crypto_aead_xform {
* be allocated, even though the length field will * be allocated, even though the length field will
* have a value less than this. * have a value less than this.
* *
* - For Chacha20-Poly1305 it is 96-bit nonce.
* PMD sets initial counter for Poly1305 key generation
* part to 0 and for Chacha20 encryption to 1 as per
* rfc8439 2.8. AEAD construction.
*
* For optimum performance, the data pointed to SHOULD * For optimum performance, the data pointed to SHOULD
* be 8-byte aligned. * be 8-byte aligned.
*/ */
@ -414,8 +407,6 @@ struct rte_crypto_aead_xform {
* *
* - For CCM mode, this is the length of the nonce, * - For CCM mode, this is the length of the nonce,
* which can be in the range 7 to 13 inclusive. * which can be in the range 7 to 13 inclusive.
*
* - For Chacha20-Poly1305 this field is always 12.
*/ */
} iv; /**< Initialisation vector parameters */ } iv; /**< Initialisation vector parameters */

View File

@ -151,7 +151,6 @@ const char *
rte_crypto_aead_algorithm_strings[] = { rte_crypto_aead_algorithm_strings[] = {
[RTE_CRYPTO_AEAD_AES_CCM] = "aes-ccm", [RTE_CRYPTO_AEAD_AES_CCM] = "aes-ccm",
[RTE_CRYPTO_AEAD_AES_GCM] = "aes-gcm", [RTE_CRYPTO_AEAD_AES_GCM] = "aes-gcm",
[RTE_CRYPTO_AEAD_CHACHA20_POLY1305] = "chacha20-poly1305"
}; };
/** /**