crypto/qat: add ZUC EEA3/EIA3 capability

This commit adds ZUC EEA3 cipher and ZUC EIA3 MAC capability
to Intel(R) QuickAssist Technology driver

Signed-off-by: Arek Kusztal <arkadiuszx.kusztal@intel.com>
Acked-by: Fiona Trahe <fiona.trahe@intel.com>
This commit is contained in:
Arek Kusztal 2017-03-31 13:53:18 +01:00 committed by Pablo de Lara
parent 6a3c87bc6a
commit d9b7d5bbc8
7 changed files with 211 additions and 30 deletions

View File

@ -26,6 +26,7 @@ SNOW3G UEA2 = Y
KASUMI F8 = Y
AES DOCSIS BPI = Y
DES DOCSIS BPI = Y
ZUC EEA3 = Y
;
; Supported authentication algorithms of the 'qat' crypto driver.
;
@ -41,6 +42,7 @@ AES GMAC = Y
SNOW3G UIA2 = Y
KASUMI F9 = Y
AES XCBC MAC = Y
ZUC EIA3 = Y
;
; Supported AEAD algorithms of the 'qat' crypto driver.

View File

@ -57,6 +57,7 @@ Cipher algorithms:
* ``RTE_CRYPTO_CIPHER_DES_CBC``
* ``RTE_CRYPTO_CIPHER_AES_DOCSISBPI``
* ``RTE_CRYPTO_CIPHER_DES_DOCSISBPI``
* ``RTE_CRYPTO_CIPHER_ZUC_EEA3``
Hash algorithms:
@ -71,6 +72,7 @@ Hash algorithms:
* ``RTE_CRYPTO_AUTH_NULL``
* ``RTE_CRYPTO_AUTH_KASUMI_F9``
* ``RTE_CRYPTO_AUTH_AES_GMAC``
* ``RTE_CRYPTO_AUTH_ZUC_EIA3``
Limitations
@ -81,6 +83,7 @@ Limitations
* SNOW 3G (UEA2) and KASUMI (F8) supported only if cipher length, cipher offset fields are byte-aligned.
* SNOW 3G (UIA2) and KASUMI (F9) supported only if hash length, hash offset fields are byte-aligned.
* No BSD support as BSD QAT kernel driver not available.
* ZUC EEA3/EIA3 is not supported by dh895xcc devices
Installation

View File

@ -251,6 +251,7 @@ New Features
* AES DOCSIS BPI algorithm.
* DES DOCSIS BPI algorithm.
* ZUC EEA3/EIA3 algorithms.
* **Updated the AESNI MB PMD.**

View File

@ -80,6 +80,14 @@ struct qat_alg_buf {
uint64_t addr;
} __rte_packed;
enum qat_crypto_proto_flag {
QAT_CRYPTO_PROTO_FLAG_NONE = 0,
QAT_CRYPTO_PROTO_FLAG_CCM = 1,
QAT_CRYPTO_PROTO_FLAG_GCM = 2,
QAT_CRYPTO_PROTO_FLAG_SNOW3G = 3,
QAT_CRYPTO_PROTO_FLAG_ZUC = 4
};
/*
* Maximum number of SGL entries
*/
@ -144,7 +152,7 @@ int qat_alg_aead_session_create_content_desc_auth(struct qat_session *cdesc,
unsigned int operation);
void qat_alg_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
uint16_t proto);
enum qat_crypto_proto_flag proto_flags);
void qat_alg_ablkcipher_init_enc(struct qat_alg_ablkcipher_cd *cd,
int alg, const uint8_t *key,
@ -162,4 +170,5 @@ int qat_alg_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg);
int qat_alg_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg);
int qat_alg_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg);
int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg);
int qat_alg_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg);
#endif

View File

@ -109,6 +109,9 @@ static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
QAT_HW_DEFAULT_ALIGNMENT);
case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
QAT_HW_DEFAULT_ALIGNMENT);
case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
QAT_HW_DEFAULT_ALIGNMENT);
@ -441,7 +444,7 @@ static int qat_alg_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
}
void qat_alg_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
uint16_t proto)
enum qat_crypto_proto_flag proto_flags)
{
PMD_INIT_FUNC_TRACE();
header->hdr_flags =
@ -454,14 +457,60 @@ void qat_alg_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
ICP_QAT_FW_LA_PARTIAL_NONE);
ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
proto);
switch (proto_flags) {
case QAT_CRYPTO_PROTO_FLAG_NONE:
ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
ICP_QAT_FW_LA_NO_PROTO);
break;
case QAT_CRYPTO_PROTO_FLAG_CCM:
ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
ICP_QAT_FW_LA_CCM_PROTO);
break;
case QAT_CRYPTO_PROTO_FLAG_GCM:
ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
ICP_QAT_FW_LA_GCM_PROTO);
break;
case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
ICP_QAT_FW_LA_SNOW_3G_PROTO);
break;
case QAT_CRYPTO_PROTO_FLAG_ZUC:
ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
ICP_QAT_FW_LA_ZUC_3G_PROTO);
break;
}
ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
ICP_QAT_FW_LA_NO_UPDATE_STATE);
ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
}
/*
* Snow3G and ZUC should never use this function
* and set its protocol flag in both cipher and auth part of content
* descriptor building function
*/
static enum qat_crypto_proto_flag
qat_get_crypto_proto_flag(uint16_t flags)
{
int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
enum qat_crypto_proto_flag qat_proto_flag =
QAT_CRYPTO_PROTO_FLAG_NONE;
switch (proto) {
case ICP_QAT_FW_LA_GCM_PROTO:
qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
break;
case ICP_QAT_FW_LA_CCM_PROTO:
qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
break;
}
return qat_proto_flag;
}
int qat_alg_aead_session_create_content_desc_cipher(struct qat_session *cdesc,
uint8_t *cipherkey,
uint32_t cipherkeylen)
@ -474,8 +523,9 @@ int qat_alg_aead_session_create_content_desc_cipher(struct qat_session *cdesc,
struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
enum icp_qat_hw_cipher_convert key_convert;
enum qat_crypto_proto_flag qat_proto_flag =
QAT_CRYPTO_PROTO_FLAG_NONE;
uint32_t total_key_size;
uint16_t proto = ICP_QAT_FW_LA_NO_PROTO; /* no CCM/GCM/SNOW 3G */
uint16_t cipher_offset, cd_size;
uint32_t wordIndex = 0;
uint32_t *temp_key = NULL;
@ -515,7 +565,9 @@ int qat_alg_aead_session_create_content_desc_cipher(struct qat_session *cdesc,
*/
cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
} else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2)
} else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
|| cdesc->qat_cipher_alg ==
ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
@ -527,7 +579,8 @@ int qat_alg_aead_session_create_content_desc_cipher(struct qat_session *cdesc,
ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
cipher_cd_ctrl->cipher_state_sz =
ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
proto = ICP_QAT_FW_LA_SNOW_3G_PROTO;
qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
} else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
@ -536,25 +589,34 @@ int qat_alg_aead_session_create_content_desc_cipher(struct qat_session *cdesc,
} else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
proto = ICP_QAT_FW_LA_PROTO_GET(header->serv_specif_flags);
qat_proto_flag =
qat_get_crypto_proto_flag(header->serv_specif_flags);
} else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
total_key_size = ICP_QAT_HW_DES_KEY_SZ;
cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
proto = ICP_QAT_FW_LA_PROTO_GET(header->serv_specif_flags);
qat_proto_flag =
qat_get_crypto_proto_flag(header->serv_specif_flags);
} else if (cdesc->qat_cipher_alg ==
ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
cipher_cd_ctrl->cipher_state_sz =
ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
} else {
total_key_size = cipherkeylen;
cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
proto = ICP_QAT_FW_LA_PROTO_GET(header->serv_specif_flags);
qat_proto_flag =
qat_get_crypto_proto_flag(header->serv_specif_flags);
}
cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
header->service_cmd_id = cdesc->qat_cmd;
qat_alg_init_common_hdr(header, proto);
qat_alg_init_common_hdr(header, qat_proto_flag);
cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
cipher->cipher_config.val =
ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
cdesc->qat_cipher_alg, key_convert,
@ -615,12 +677,13 @@ int qat_alg_aead_session_create_content_desc_auth(struct qat_session *cdesc,
(struct icp_qat_fw_la_auth_req_params *)
((char *)&req_tmpl->serv_specif_rqpars +
sizeof(struct icp_qat_fw_la_cipher_req_params));
uint16_t proto = ICP_QAT_FW_LA_NO_PROTO; /* no CCM/GCM/SNOW 3G */
uint16_t state1_size = 0, state2_size = 0;
uint16_t hash_offset, cd_size;
uint32_t *aad_len = NULL;
uint32_t wordIndex = 0;
uint32_t *pTempKey;
enum qat_crypto_proto_flag qat_proto_flag =
QAT_CRYPTO_PROTO_FLAG_NONE;
PMD_INIT_FUNC_TRACE();
@ -670,7 +733,8 @@ int qat_alg_aead_session_create_content_desc_auth(struct qat_session *cdesc,
cdesc->qat_hash_alg, digestsize);
if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
|| cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9)
|| cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
|| cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3)
hash->auth_counter.counter = 0;
else
hash->auth_counter.counter = rte_bswap32(
@ -733,7 +797,7 @@ int qat_alg_aead_session_create_content_desc_auth(struct qat_session *cdesc,
break;
case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
proto = ICP_QAT_FW_LA_GCM_PROTO;
qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
if (qat_alg_do_precomputes(cdesc->qat_hash_alg,
authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
@ -755,7 +819,7 @@ int qat_alg_aead_session_create_content_desc_auth(struct qat_session *cdesc,
*aad_len = rte_bswap32(add_auth_data_length);
break;
case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
proto = ICP_QAT_FW_LA_SNOW_3G_PROTO;
qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
state1_size = qat_hash_get_state1_size(
ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
@ -776,6 +840,24 @@ int qat_alg_aead_session_create_content_desc_auth(struct qat_session *cdesc,
auth_param->hash_state_sz =
RTE_ALIGN_CEIL(add_auth_data_length, 16) >> 3;
break;
case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
hash->auth_config.config =
ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
cdesc->qat_hash_alg, digestsize);
qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
state1_size = qat_hash_get_state1_size(
ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
+ ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
cdesc->cd_cur_ptr += state1_size + state2_size
+ ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
auth_param->hash_state_sz =
RTE_ALIGN_CEIL(add_auth_data_length, 16) >> 3;
break;
case ICP_QAT_HW_AUTH_ALGO_MD5:
if (qat_alg_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5,
authkey, authkeylen, cdesc->cd_cur_ptr,
@ -813,7 +895,7 @@ int qat_alg_aead_session_create_content_desc_auth(struct qat_session *cdesc,
}
/* Request template setup */
qat_alg_init_common_hdr(header, proto);
qat_alg_init_common_hdr(header, qat_proto_flag);
header->service_cmd_id = cdesc->qat_cmd;
/* Auth CD config setup */
@ -918,3 +1000,15 @@ int qat_alg_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
}
return 0;
}
int qat_alg_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
{
switch (key_len) {
case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
*alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
break;
default:
return -EINVAL;
}
return 0;
}

View File

@ -69,7 +69,7 @@
#define BYTE_LENGTH 8
static int __rte_unused
static int
qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
struct qat_pmd_private *internals) {
int i = 0;
@ -89,7 +89,7 @@ qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
return 0;
}
static int __rte_unused
static int
qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
struct qat_pmd_private *internals) {
int i = 0;
@ -287,11 +287,11 @@ qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
return NULL;
}
void *
qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev __rte_unused,
qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev,
struct rte_crypto_sym_xform *xform, void *session_private)
{
struct qat_session *session = session_private;
struct qat_pmd_private *internals = dev->data->dev_private;
struct rte_crypto_cipher_xform *cipher_xform = NULL;
/* Get cipher xform from crypto xform chain */
@ -397,13 +397,27 @@ qat_crypto_sym_configure_session_cipher(struct rte_cryptodev *dev __rte_unused,
}
session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
break;
case RTE_CRYPTO_CIPHER_ZUC_EEA3:
if (!qat_is_cipher_alg_supported(
cipher_xform->algo, internals)) {
PMD_DRV_LOG(ERR, "%s not supported on this device",
rte_crypto_cipher_algorithm_strings
[cipher_xform->algo]);
goto error_out;
}
if (qat_alg_validate_zuc_key(cipher_xform->key.length,
&session->qat_cipher_alg) != 0) {
PMD_DRV_LOG(ERR, "Invalid ZUC cipher key size");
goto error_out;
}
session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
break;
case RTE_CRYPTO_CIPHER_3DES_ECB:
case RTE_CRYPTO_CIPHER_AES_ECB:
case RTE_CRYPTO_CIPHER_AES_CCM:
case RTE_CRYPTO_CIPHER_AES_F8:
case RTE_CRYPTO_CIPHER_AES_XTS:
case RTE_CRYPTO_CIPHER_ARC4:
case RTE_CRYPTO_CIPHER_ZUC_EEA3:
PMD_DRV_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
cipher_xform->algo);
goto error_out;
@ -490,7 +504,7 @@ qat_crypto_sym_configure_session(struct rte_cryptodev *dev,
}
struct qat_session *
qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev __rte_unused,
qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev,
struct rte_crypto_sym_xform *xform,
struct qat_session *session_private)
{
@ -498,6 +512,7 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev __rte_unused,
struct qat_session *session = session_private;
struct rte_crypto_auth_xform *auth_xform = NULL;
struct rte_crypto_cipher_xform *cipher_xform = NULL;
struct qat_pmd_private *internals = dev->data->dev_private;
auth_xform = qat_get_auth_xform(xform);
switch (auth_xform->algo) {
@ -537,6 +552,15 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev __rte_unused,
case RTE_CRYPTO_AUTH_KASUMI_F9:
session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
break;
case RTE_CRYPTO_AUTH_ZUC_EIA3:
if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
PMD_DRV_LOG(ERR, "%s not supported on this device",
rte_crypto_auth_algorithm_strings
[auth_xform->algo]);
goto error_out;
}
session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
break;
case RTE_CRYPTO_AUTH_SHA1:
case RTE_CRYPTO_AUTH_SHA256:
case RTE_CRYPTO_AUTH_SHA512:
@ -546,7 +570,6 @@ qat_crypto_sym_configure_session_auth(struct rte_cryptodev *dev __rte_unused,
case RTE_CRYPTO_AUTH_AES_CCM:
case RTE_CRYPTO_AUTH_AES_CMAC:
case RTE_CRYPTO_AUTH_AES_CBC_MAC:
case RTE_CRYPTO_AUTH_ZUC_EIA3:
PMD_DRV_LOG(ERR, "Crypto: Unsupported hash alg %u",
auth_xform->algo);
goto error_out;
@ -777,6 +800,7 @@ qat_pmd_dequeue_op_burst(void *qp, struct rte_crypto_op **ops,
#ifdef RTE_LIBRTE_PMD_QAT_DEBUG_RX
rte_hexdump(stdout, "qat_response:", (uint8_t *)resp_msg,
sizeof(struct icp_qat_fw_comn_resp));
#endif
if (ICP_QAT_FW_COMN_STATUS_FLAG_OK !=
ICP_QAT_FW_COMN_RESP_CRYPTO_STAT_GET(
@ -917,14 +941,16 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
if (ctx->qat_cipher_alg ==
ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI ||
ctx->qat_cipher_alg ==
ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
if (unlikely(
(cipher_param->cipher_length % BYTE_LENGTH != 0)
|| (cipher_param->cipher_offset
% BYTE_LENGTH != 0))) {
PMD_DRV_LOG(ERR,
"SNOW3G/KASUMI in QAT PMD only supports byte aligned values");
"SNOW3G/KASUMI/ZUC in QAT PMD only supports byte aligned values");
op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
return -EINVAL;
}
@ -963,11 +989,13 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
if (do_auth) {
if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 ||
ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9) {
ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9 ||
ctx->qat_hash_alg ==
ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3) {
if (unlikely((auth_param->auth_off % BYTE_LENGTH != 0)
|| (auth_param->auth_len % BYTE_LENGTH != 0))) {
PMD_DRV_LOG(ERR,
"For SNOW3G/KASUMI, QAT PMD only supports byte aligned values");
"For SNOW3G/KASUMI/ZUC, QAT PMD only supports byte aligned values");
op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
return -EINVAL;
}

View File

@ -504,7 +504,51 @@
}, } \
}
#define QAT_EXTRA_CPM17_SYM_CAPABILITIES \
{ }
#define QAT_EXTRA_CPM17_SYM_CAPABILITIES \
{ /* ZUC (EEA3) */ \
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
{.sym = { \
.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \
{.cipher = { \
.algo = RTE_CRYPTO_CIPHER_ZUC_EEA3, \
.block_size = 16, \
.key_size = { \
.min = 16, \
.max = 16, \
.increment = 0 \
}, \
.iv_size = { \
.min = 16, \
.max = 16, \
.increment = 0 \
} \
}, } \
}, } \
}, \
{ /* ZUC (EIA3) */ \
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \
{.sym = { \
.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \
{.auth = { \
.algo = RTE_CRYPTO_AUTH_ZUC_EIA3, \
.block_size = 16, \
.key_size = { \
.min = 16, \
.max = 16, \
.increment = 0 \
}, \
.digest_size = { \
.min = 4, \
.max = 4, \
.increment = 0 \
}, \
.aad_size = { \
.min = 16, \
.max = 16, \
.increment = 0 \
} \
}, } \
}, } \
}
#endif /* _QAT_CRYPTO_CAPABILITIES_H_ */