crypto/octeontx: support ECPM

Add support for asymmetric operation EC Point Multiplication,
in crypto_octeontx PMD.

Signed-off-by: Anoob Joseph <anoobj@marvell.com>
Signed-off-by: Balakrishna Bhamidipati <bbhamidipati@marvell.com>
Signed-off-by: Sunila Sahu <ssahu@marvell.com>
This commit is contained in:
Sunila Sahu 2020-01-15 18:26:32 +05:30 committed by Akhil Goyal
parent e98dc331e9
commit 99faef8325
5 changed files with 121 additions and 0 deletions

View File

@ -73,3 +73,4 @@ Modular Exponentiation = Y
Modular Inversion =
Diffie-hellman =
ECDSA = Y
ECPM = Y

View File

@ -24,6 +24,7 @@
/* AE opcodes */
#define CPT_MAJOR_OP_MODEX 0x03
#define CPT_MAJOR_OP_ECDSA 0x04
#define CPT_MAJOR_OP_ECC 0x05
#define CPT_MINOR_OP_MODEX 0x01
#define CPT_MINOR_OP_PKCS_ENC 0x02
#define CPT_MINOR_OP_PKCS_ENC_CRT 0x03
@ -32,6 +33,7 @@
#define CPT_MINOR_OP_MODEX_CRT 0x06
#define CPT_MINOR_OP_ECDSA_SIGN 0x01
#define CPT_MINOR_OP_ECDSA_VERIFY 0x02
#define CPT_MINOR_OP_ECC_UMP 0x03
#define CPT_BLOCK_TYPE1 0
#define CPT_BLOCK_TYPE2 1

View File

@ -172,6 +172,8 @@ cpt_fill_asym_session_parameters(struct cpt_asym_sess_misc *sess,
ret = cpt_fill_modex_params(sess, xform);
break;
case RTE_CRYPTO_ASYM_XFORM_ECDSA:
/* Fall through */
case RTE_CRYPTO_ASYM_XFORM_ECPM:
ret = cpt_fill_ec_params(sess, xform);
break;
default:
@ -199,6 +201,8 @@ cpt_free_asym_session_parameters(struct cpt_asym_sess_misc *sess)
rte_free(mod->modulus.data);
break;
case RTE_CRYPTO_ASYM_XFORM_ECDSA:
/* Fall through */
case RTE_CRYPTO_ASYM_XFORM_ECPM:
break;
default:
CPT_LOG_DP_ERR("Invalid transform type");
@ -828,4 +832,85 @@ cpt_enqueue_ecdsa_op(struct rte_crypto_op *op,
return 0;
}
static __rte_always_inline int
cpt_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm,
struct asym_op_params *asym_params,
uint8_t curveid)
{
struct cpt_request_info *req = asym_params->req;
phys_addr_t mphys = asym_params->meta_buf;
uint16_t x1_len = ecpm->p.x.length;
uint16_t y1_len = ecpm->p.y.length;
uint16_t scalar_align, p_align;
uint16_t dlen, rlen, prime_len;
uint16_t x1_offset, y1_offset;
vq_cmd_word0_t vq_cmd_w0;
opcode_info_t opcode;
buf_ptr_t caddr;
uint8_t *dptr;
prime_len = ec_grp[curveid].prime.length;
/* Input buffer */
dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
p_align = ROUNDUP8(prime_len);
scalar_align = ROUNDUP8(ecpm->scalar.length);
/*
* Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime,
* scalar length),
* Please note point length is equivalent to prime of the curve
*/
dlen = 3 * p_align + scalar_align;
x1_offset = prime_len - x1_len;
y1_offset = prime_len - y1_len;
memset(dptr, 0, dlen);
/* Copy input point, scalar, prime */
memcpy(dptr + x1_offset, ecpm->p.x.data, x1_len);
dptr += p_align;
memcpy(dptr + y1_offset, ecpm->p.y.data, y1_len);
dptr += p_align;
memcpy(dptr, ecpm->scalar.data, ecpm->scalar.length);
dptr += scalar_align;
memcpy(dptr, ec_grp[curveid].prime.data, ec_grp[curveid].prime.length);
dptr += p_align;
/* Setup opcodes */
opcode.s.major = CPT_MAJOR_OP_ECC;
opcode.s.minor = CPT_MINOR_OP_ECC_UMP;
/* GP op header */
vq_cmd_w0.s.opcode = opcode.flags;
vq_cmd_w0.s.param1 = curveid;
vq_cmd_w0.s.param2 = ecpm->scalar.length;
vq_cmd_w0.s.dlen = dlen;
vq_cmd_w0.u64 = vq_cmd_w0.u64;
/* Filling cpt_request_info structure */
req->ist.ei0 = vq_cmd_w0.u64;
req->ist.ei1 = mphys;
req->ist.ei2 = mphys + dlen;
/* Result buffer will store output point where length of
* each coordinate will be of prime length, thus set
* rlen to twice of prime length.
*/
rlen = p_align << 1;
req->rptr = dptr;
/* alternate_caddr to write completion status by the microcode */
req->alternate_caddr = (uint64_t *)(dptr + rlen);
*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
/* Preparing completion addr, +1 for completion code */
caddr.vaddr = dptr + rlen + 1;
caddr.dma_addr = mphys + dlen + rlen + 1;
cpt_fill_req_comp_addr(req, caddr);
return 0;
}
#endif /* _CPT_UCODE_ASYM_H_ */

View File

@ -641,6 +641,16 @@ static const struct rte_cryptodev_capabilities otx_asym_capabilities[] = {
},
}
},
{ /* ECPM */
.op = RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
{.asym = {
.xform_capa = {
.xform_type = RTE_CRYPTO_ASYM_XFORM_ECPM,
.op_types = 0
}
},
}
},
/* End of asymmetric capabilities */
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
};

View File

@ -443,6 +443,13 @@ otx_cpt_enq_single_asym(struct cpt_instance *instance,
if (unlikely(ret))
goto req_fail;
break;
case RTE_CRYPTO_ASYM_XFORM_ECPM:
ret = cpt_ecpm_prep(&asym_op->ecpm, &params,
sess->ec_ctx.curveid);
if (unlikely(ret))
goto req_fail;
break;
default:
op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
ret = -EINVAL;
@ -704,6 +711,19 @@ otx_cpt_asym_dequeue_ecdsa_op(struct rte_crypto_ecdsa_op_param *ecdsa,
ecdsa->s.length = prime_len;
}
static __rte_always_inline void
otx_cpt_asym_dequeue_ecpm_op(struct rte_crypto_ecpm_op_param *ecpm,
struct cpt_request_info *req,
struct cpt_asym_ec_ctx *ec)
{
int prime_len = ec_grp[ec->curveid].prime.length;
memcpy(ecpm->r.x.data, req->rptr, prime_len);
memcpy(ecpm->r.y.data, req->rptr + ROUNDUP8(prime_len), prime_len);
ecpm->r.x.length = prime_len;
ecpm->r.y.length = prime_len;
}
static __rte_always_inline void __hot
otx_cpt_asym_post_process(struct rte_crypto_op *cop,
struct cpt_request_info *req)
@ -726,6 +746,9 @@ otx_cpt_asym_post_process(struct rte_crypto_op *cop,
case RTE_CRYPTO_ASYM_XFORM_ECDSA:
otx_cpt_asym_dequeue_ecdsa_op(&op->ecdsa, req, &sess->ec_ctx);
break;
case RTE_CRYPTO_ASYM_XFORM_ECPM:
otx_cpt_asym_dequeue_ecpm_op(&op->ecpm, req, &sess->ec_ctx);
break;
default:
CPT_LOG_DP_DEBUG("Invalid crypto xform type");
cop->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;