1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright (C) 2019 Marvell International Ltd.
3 */
4
5 #ifndef _CPT_UCODE_ASYM_H_
6 #define _CPT_UCODE_ASYM_H_
7
8 #include <rte_common.h>
9 #include <rte_crypto_asym.h>
10 #include <rte_malloc.h>
11
12 #include "cpt_common.h"
13 #include "cpt_hw_types.h"
14 #include "cpt_mcode_defines.h"
15
16 static __rte_always_inline void
cpt_modex_param_normalize(uint8_t ** data,size_t * len)17 cpt_modex_param_normalize(uint8_t **data, size_t *len)
18 {
19 size_t i;
20
21 /* Strip leading NUL bytes */
22
23 for (i = 0; i < *len; i++) {
24 if ((*data)[i] != 0)
25 break;
26 }
27
28 *data += i;
29 *len -= i;
30 }
31
32 static __rte_always_inline int
cpt_fill_modex_params(struct cpt_asym_sess_misc * sess,struct rte_crypto_asym_xform * xform)33 cpt_fill_modex_params(struct cpt_asym_sess_misc *sess,
34 struct rte_crypto_asym_xform *xform)
35 {
36 struct rte_crypto_modex_xform *ctx = &sess->mod_ctx;
37 size_t exp_len = xform->modex.exponent.length;
38 size_t mod_len = xform->modex.modulus.length;
39 uint8_t *exp = xform->modex.exponent.data;
40 uint8_t *mod = xform->modex.modulus.data;
41
42 cpt_modex_param_normalize(&mod, &mod_len);
43 cpt_modex_param_normalize(&exp, &exp_len);
44
45 if (unlikely(exp_len == 0 || mod_len == 0))
46 return -EINVAL;
47
48 if (unlikely(exp_len > mod_len)) {
49 CPT_LOG_DP_ERR("Exponent length greater than modulus length is not supported");
50 return -ENOTSUP;
51 }
52
53 /* Allocate buffer to hold modexp params */
54 ctx->modulus.data = rte_malloc(NULL, mod_len + exp_len, 0);
55 if (ctx->modulus.data == NULL) {
56 CPT_LOG_DP_ERR("Could not allocate buffer for modex params");
57 return -ENOMEM;
58 }
59
60 /* Set up modexp prime modulus and private exponent */
61
62 memcpy(ctx->modulus.data, mod, mod_len);
63 ctx->exponent.data = ctx->modulus.data + mod_len;
64 memcpy(ctx->exponent.data, exp, exp_len);
65
66 ctx->modulus.length = mod_len;
67 ctx->exponent.length = exp_len;
68
69 return 0;
70 }
71
72 static __rte_always_inline int
cpt_fill_rsa_params(struct cpt_asym_sess_misc * sess,struct rte_crypto_asym_xform * xform)73 cpt_fill_rsa_params(struct cpt_asym_sess_misc *sess,
74 struct rte_crypto_asym_xform *xform)
75 {
76 struct rte_crypto_rsa_priv_key_qt qt = xform->rsa.qt;
77 struct rte_crypto_rsa_xform *xfrm_rsa = &xform->rsa;
78 struct rte_crypto_rsa_xform *rsa = &sess->rsa_ctx;
79 size_t mod_len = xfrm_rsa->n.length;
80 size_t exp_len = xfrm_rsa->e.length;
81 uint64_t total_size;
82 size_t len = 0;
83
84 /* Make sure key length used is not more than mod_len/2 */
85 if (qt.p.data != NULL)
86 len = (((mod_len / 2) < qt.p.length) ? len : qt.p.length);
87
88 /* Total size required for RSA key params(n,e,(q,dQ,p,dP,qInv)) */
89 total_size = mod_len + exp_len + 5 * len;
90
91 /* Allocate buffer to hold all RSA keys */
92 rsa->n.data = rte_malloc(NULL, total_size, 0);
93 if (rsa->n.data == NULL) {
94 CPT_LOG_DP_ERR("Could not allocate buffer for RSA keys");
95 return -ENOMEM;
96 }
97
98 /* Set up RSA prime modulus and public key exponent */
99 memcpy(rsa->n.data, xfrm_rsa->n.data, mod_len);
100 rsa->e.data = rsa->n.data + mod_len;
101 memcpy(rsa->e.data, xfrm_rsa->e.data, exp_len);
102
103 /* Private key in quintuple format */
104 if (len != 0) {
105 rsa->qt.q.data = rsa->e.data + exp_len;
106 memcpy(rsa->qt.q.data, qt.q.data, qt.q.length);
107 rsa->qt.dQ.data = rsa->qt.q.data + qt.q.length;
108 memcpy(rsa->qt.dQ.data, qt.dQ.data, qt.dQ.length);
109 rsa->qt.p.data = rsa->qt.dQ.data + qt.dQ.length;
110 memcpy(rsa->qt.p.data, qt.p.data, qt.p.length);
111 rsa->qt.dP.data = rsa->qt.p.data + qt.p.length;
112 memcpy(rsa->qt.dP.data, qt.dP.data, qt.dP.length);
113 rsa->qt.qInv.data = rsa->qt.dP.data + qt.dP.length;
114 memcpy(rsa->qt.qInv.data, qt.qInv.data, qt.qInv.length);
115
116 rsa->qt.q.length = qt.q.length;
117 rsa->qt.dQ.length = qt.dQ.length;
118 rsa->qt.p.length = qt.p.length;
119 rsa->qt.dP.length = qt.dP.length;
120 rsa->qt.qInv.length = qt.qInv.length;
121 }
122 rsa->n.length = mod_len;
123 rsa->e.length = exp_len;
124
125 return 0;
126 }
127
128 static __rte_always_inline int
cpt_fill_ec_params(struct cpt_asym_sess_misc * sess,struct rte_crypto_asym_xform * xform)129 cpt_fill_ec_params(struct cpt_asym_sess_misc *sess,
130 struct rte_crypto_asym_xform *xform)
131 {
132 struct cpt_asym_ec_ctx *ec = &sess->ec_ctx;
133
134 switch (xform->ec.curve_id) {
135 case RTE_CRYPTO_EC_GROUP_SECP192R1:
136 ec->curveid = CPT_EC_ID_P192;
137 break;
138 case RTE_CRYPTO_EC_GROUP_SECP224R1:
139 ec->curveid = CPT_EC_ID_P224;
140 break;
141 case RTE_CRYPTO_EC_GROUP_SECP256R1:
142 ec->curveid = CPT_EC_ID_P256;
143 break;
144 case RTE_CRYPTO_EC_GROUP_SECP384R1:
145 ec->curveid = CPT_EC_ID_P384;
146 break;
147 case RTE_CRYPTO_EC_GROUP_SECP521R1:
148 ec->curveid = CPT_EC_ID_P521;
149 break;
150 default:
151 /* Only NIST curves (FIPS 186-4) are supported */
152 CPT_LOG_DP_ERR("Unsupported curve");
153 return -EINVAL;
154 }
155
156 return 0;
157 }
158
159 static __rte_always_inline int
cpt_fill_asym_session_parameters(struct cpt_asym_sess_misc * sess,struct rte_crypto_asym_xform * xform)160 cpt_fill_asym_session_parameters(struct cpt_asym_sess_misc *sess,
161 struct rte_crypto_asym_xform *xform)
162 {
163 int ret;
164
165 sess->xfrm_type = xform->xform_type;
166
167 switch (xform->xform_type) {
168 case RTE_CRYPTO_ASYM_XFORM_RSA:
169 ret = cpt_fill_rsa_params(sess, xform);
170 break;
171 case RTE_CRYPTO_ASYM_XFORM_MODEX:
172 ret = cpt_fill_modex_params(sess, xform);
173 break;
174 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
175 /* Fall through */
176 case RTE_CRYPTO_ASYM_XFORM_ECPM:
177 ret = cpt_fill_ec_params(sess, xform);
178 break;
179 default:
180 CPT_LOG_DP_ERR("Unsupported transform type");
181 return -ENOTSUP;
182 }
183 return ret;
184 }
185
186 static __rte_always_inline void
cpt_free_asym_session_parameters(struct cpt_asym_sess_misc * sess)187 cpt_free_asym_session_parameters(struct cpt_asym_sess_misc *sess)
188 {
189 struct rte_crypto_modex_xform *mod;
190 struct rte_crypto_rsa_xform *rsa;
191
192 switch (sess->xfrm_type) {
193 case RTE_CRYPTO_ASYM_XFORM_RSA:
194 rsa = &sess->rsa_ctx;
195 rte_free(rsa->n.data);
196 break;
197 case RTE_CRYPTO_ASYM_XFORM_MODEX:
198 mod = &sess->mod_ctx;
199 rte_free(mod->modulus.data);
200 break;
201 case RTE_CRYPTO_ASYM_XFORM_ECDSA:
202 /* Fall through */
203 case RTE_CRYPTO_ASYM_XFORM_ECPM:
204 break;
205 default:
206 CPT_LOG_DP_ERR("Invalid transform type");
207 break;
208 }
209 }
210
211 static __rte_always_inline void
cpt_fill_req_comp_addr(struct cpt_request_info * req,buf_ptr_t addr)212 cpt_fill_req_comp_addr(struct cpt_request_info *req, buf_ptr_t addr)
213 {
214 void *completion_addr = RTE_PTR_ALIGN(addr.vaddr, 16);
215
216 /* Pointer to cpt_res_s, updated by CPT */
217 req->completion_addr = (volatile uint64_t *)completion_addr;
218 req->comp_baddr = addr.dma_addr +
219 RTE_PTR_DIFF(completion_addr, addr.vaddr);
220 *(req->completion_addr) = COMPLETION_CODE_INIT;
221 }
222
223 static __rte_always_inline int
cpt_modex_prep(struct asym_op_params * modex_params,struct rte_crypto_modex_xform * mod)224 cpt_modex_prep(struct asym_op_params *modex_params,
225 struct rte_crypto_modex_xform *mod)
226 {
227 struct cpt_request_info *req = modex_params->req;
228 phys_addr_t mphys = modex_params->meta_buf;
229 uint32_t exp_len = mod->exponent.length;
230 uint32_t mod_len = mod->modulus.length;
231 struct rte_crypto_mod_op_param mod_op;
232 struct rte_crypto_op **op;
233 vq_cmd_word0_t vq_cmd_w0;
234 uint64_t total_key_len;
235 uint32_t dlen, rlen;
236 uint32_t base_len;
237 buf_ptr_t caddr;
238 uint8_t *dptr;
239
240 /* Extracting modex op form params->req->op[1]->asym->modex */
241 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
242 mod_op = ((struct rte_crypto_op *)*op)->asym->modex;
243
244 base_len = mod_op.base.length;
245 if (unlikely(base_len > mod_len)) {
246 CPT_LOG_DP_ERR("Base length greater than modulus length is not supported");
247 (*op)->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
248 return -ENOTSUP;
249 }
250
251 total_key_len = mod_len + exp_len;
252
253 /* Input buffer */
254 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
255 memcpy(dptr, mod->modulus.data, total_key_len);
256 dptr += total_key_len;
257 memcpy(dptr, mod_op.base.data, base_len);
258 dptr += base_len;
259 dlen = total_key_len + base_len;
260
261 /* Result buffer */
262 rlen = mod_len;
263
264 /* Setup opcodes */
265 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
266 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
267
268 /* GP op header */
269 vq_cmd_w0.s.param1 = mod_len;
270 vq_cmd_w0.s.param2 = exp_len;
271 vq_cmd_w0.s.dlen = dlen;
272
273 /* Filling cpt_request_info structure */
274 req->ist.ei0 = vq_cmd_w0.u64;
275 req->ist.ei1 = mphys;
276 req->ist.ei2 = mphys + dlen;
277
278 /* Result pointer to store result data */
279 req->rptr = dptr;
280
281 /* alternate_caddr to write completion status of the microcode */
282 req->alternate_caddr = (uint64_t *)(dptr + rlen);
283 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
284
285 /* Preparing completion addr, +1 for completion code */
286 caddr.vaddr = dptr + rlen + 1;
287 caddr.dma_addr = mphys + dlen + rlen + 1;
288
289 cpt_fill_req_comp_addr(req, caddr);
290 return 0;
291 }
292
293 static __rte_always_inline void
cpt_rsa_prep(struct asym_op_params * rsa_params,struct rte_crypto_rsa_xform * rsa,rte_crypto_param * crypto_param)294 cpt_rsa_prep(struct asym_op_params *rsa_params,
295 struct rte_crypto_rsa_xform *rsa,
296 rte_crypto_param *crypto_param)
297 {
298 struct cpt_request_info *req = rsa_params->req;
299 phys_addr_t mphys = rsa_params->meta_buf;
300 struct rte_crypto_rsa_op_param rsa_op;
301 uint32_t mod_len = rsa->n.length;
302 uint32_t exp_len = rsa->e.length;
303 struct rte_crypto_op **op;
304 vq_cmd_word0_t vq_cmd_w0;
305 uint64_t total_key_len;
306 uint32_t dlen, rlen;
307 uint32_t in_size;
308 buf_ptr_t caddr;
309 uint8_t *dptr;
310
311 /* Extracting rsa op form params->req->op[1]->asym->rsa */
312 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
313 rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
314 total_key_len = mod_len + exp_len;
315
316 /* Input buffer */
317 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
318 memcpy(dptr, rsa->n.data, total_key_len);
319 dptr += total_key_len;
320
321 in_size = crypto_param->length;
322 memcpy(dptr, crypto_param->data, in_size);
323
324 dptr += in_size;
325 dlen = total_key_len + in_size;
326
327 /* Result buffer */
328 rlen = mod_len;
329
330 if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
331 /* Use mod_exp operation for no_padding type */
332 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
333 vq_cmd_w0.s.param2 = exp_len;
334 } else {
335 if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
336 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC;
337 /* Public key encrypt, use BT2*/
338 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2 |
339 ((uint16_t)(exp_len) << 1);
340 } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
341 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC;
342 /* Public key decrypt, use BT1 */
343 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
344 /* + 2 for decrypted len */
345 rlen += 2;
346 }
347 }
348
349 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
350
351 /* GP op header */
352 vq_cmd_w0.s.param1 = mod_len;
353 vq_cmd_w0.s.dlen = dlen;
354
355 /* Filling cpt_request_info structure */
356 req->ist.ei0 = vq_cmd_w0.u64;
357 req->ist.ei1 = mphys;
358 req->ist.ei2 = mphys + dlen;
359
360 /* Result pointer to store result data */
361 req->rptr = dptr;
362
363 /* alternate_caddr to write completion status of the microcode */
364 req->alternate_caddr = (uint64_t *)(dptr + rlen);
365 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
366
367 /* Preparing completion addr, +1 for completion code */
368 caddr.vaddr = dptr + rlen + 1;
369 caddr.dma_addr = mphys + dlen + rlen + 1;
370
371 cpt_fill_req_comp_addr(req, caddr);
372 }
373
374 static __rte_always_inline void
cpt_rsa_crt_prep(struct asym_op_params * rsa_params,struct rte_crypto_rsa_xform * rsa,rte_crypto_param * crypto_param)375 cpt_rsa_crt_prep(struct asym_op_params *rsa_params,
376 struct rte_crypto_rsa_xform *rsa,
377 rte_crypto_param *crypto_param)
378 {
379 struct cpt_request_info *req = rsa_params->req;
380 phys_addr_t mphys = rsa_params->meta_buf;
381 uint32_t qInv_len = rsa->qt.qInv.length;
382 struct rte_crypto_rsa_op_param rsa_op;
383 uint32_t dP_len = rsa->qt.dP.length;
384 uint32_t dQ_len = rsa->qt.dQ.length;
385 uint32_t p_len = rsa->qt.p.length;
386 uint32_t q_len = rsa->qt.q.length;
387 uint32_t mod_len = rsa->n.length;
388 struct rte_crypto_op **op;
389 vq_cmd_word0_t vq_cmd_w0;
390 uint64_t total_key_len;
391 uint32_t dlen, rlen;
392 uint32_t in_size;
393 buf_ptr_t caddr;
394 uint8_t *dptr;
395
396 /* Extracting rsa op form params->req->op[1]->asym->rsa */
397 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
398 rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
399 total_key_len = p_len + q_len + dP_len + dQ_len + qInv_len;
400
401 /* Input buffer */
402 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
403 memcpy(dptr, rsa->qt.q.data, total_key_len);
404 dptr += total_key_len;
405
406 in_size = crypto_param->length;
407 memcpy(dptr, crypto_param->data, in_size);
408
409 dptr += in_size;
410 dlen = total_key_len + in_size;
411
412 /* Result buffer */
413 rlen = mod_len;
414
415 if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
416 /*Use mod_exp operation for no_padding type */
417 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX_CRT;
418 } else {
419 if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
420 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC_CRT;
421 /* Private encrypt, use BT1 */
422 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
423 } else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
424 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC_CRT;
425 /* Private decrypt, use BT2 */
426 vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2;
427 /* + 2 for decrypted len */
428 rlen += 2;
429 }
430 }
431
432 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
433
434 /* GP op header */
435 vq_cmd_w0.s.param1 = mod_len;
436 vq_cmd_w0.s.dlen = dlen;
437
438 /* Filling cpt_request_info structure */
439 req->ist.ei0 = vq_cmd_w0.u64;
440 req->ist.ei1 = mphys;
441 req->ist.ei2 = mphys + dlen;
442
443 /* Result pointer to store result data */
444 req->rptr = dptr;
445
446 /* alternate_caddr to write completion status of the microcode */
447 req->alternate_caddr = (uint64_t *)(dptr + rlen);
448 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
449
450 /* Preparing completion addr, +1 for completion code */
451 caddr.vaddr = dptr + rlen + 1;
452 caddr.dma_addr = mphys + dlen + rlen + 1;
453
454 cpt_fill_req_comp_addr(req, caddr);
455 }
456
457 static __rte_always_inline int __rte_hot
cpt_enqueue_rsa_op(struct rte_crypto_op * op,struct asym_op_params * params,struct cpt_asym_sess_misc * sess)458 cpt_enqueue_rsa_op(struct rte_crypto_op *op,
459 struct asym_op_params *params,
460 struct cpt_asym_sess_misc *sess)
461 {
462 struct rte_crypto_rsa_op_param *rsa = &op->asym->rsa;
463
464 switch (rsa->op_type) {
465 case RTE_CRYPTO_ASYM_OP_VERIFY:
466 cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->sign);
467 break;
468 case RTE_CRYPTO_ASYM_OP_ENCRYPT:
469 cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->message);
470 break;
471 case RTE_CRYPTO_ASYM_OP_SIGN:
472 cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->message);
473 break;
474 case RTE_CRYPTO_ASYM_OP_DECRYPT:
475 cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->cipher);
476 break;
477 default:
478 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
479 return -EINVAL;
480 }
481 return 0;
482 }
483
484 static const struct cpt_ec_group ec_grp[CPT_EC_ID_PMAX] = {
485 {
486 .prime = {
487 .data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
488 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
489 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF,
490 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF},
491 .length = 24,
492 },
493 .order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
494 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
495 0x99, 0xDE, 0xF8, 0x36, 0x14, 0x6B,
496 0xC9, 0xB1, 0xB4, 0xD2, 0x28, 0x31},
497 .length = 24},
498 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
499 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
500 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF,
501 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFC},
502 .length = 24},
503 .constb = {.data = {0x64, 0x21, 0x05, 0x19, 0xE5, 0x9C,
504 0x80, 0xE7, 0x0F, 0xA7, 0xE9, 0xAB,
505 0x72, 0x24, 0x30, 0x49, 0xFE, 0xB8,
506 0xDE, 0xEC, 0xC1, 0x46, 0xB9, 0xB1},
507 .length = 24},
508 },
509 {
510 .prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
511 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
512 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00,
513 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01},
514 .length = 28},
515 .order = {.data = {0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
516 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
517 0X16, 0XA2, 0XE0, 0XB8, 0XF0, 0X3E, 0X13,
518 0XDD, 0X29, 0X45, 0X5C, 0X5C, 0X2A, 0X3D},
519 .length = 28},
520 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
521 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
522 0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
523 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE},
524 .length = 28},
525 .constb = {.data = {0xB4, 0x05, 0x0A, 0x85, 0x0C, 0x04, 0xB3,
526 0xAB, 0xF5, 0x41, 0x32, 0x56, 0x50, 0x44,
527 0xB0, 0xB7, 0xD7, 0xBF, 0xD8, 0xBA, 0x27,
528 0x0B, 0x39, 0x43, 0x23, 0x55, 0xFF, 0xB4},
529 .length = 28},
530 },
531 {
532 .prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
533 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
534 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
535 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
536 0xFF, 0xFF, 0xFF, 0xFF},
537 .length = 32},
538 .order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
539 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
540 0xFF, 0xFF, 0xBC, 0xE6, 0xFA, 0xAD, 0xA7,
541 0x17, 0x9E, 0x84, 0xF3, 0xB9, 0xCA, 0xC2,
542 0xFC, 0x63, 0x25, 0x51},
543 .length = 32},
544 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
545 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
546 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
547 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
548 0xFF, 0xFF, 0xFF, 0xFC},
549 .length = 32},
550 .constb = {.data = {0x5A, 0xC6, 0x35, 0xD8, 0xAA, 0x3A, 0x93,
551 0xE7, 0xB3, 0xEB, 0xBD, 0x55, 0x76, 0x98,
552 0x86, 0xBC, 0x65, 0x1D, 0x06, 0xB0, 0xCC,
553 0x53, 0xB0, 0xF6, 0x3B, 0xCE, 0x3C, 0x3E,
554 0x27, 0xD2, 0x60, 0x4B},
555 .length = 32},
556 },
557 {
558 .prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
559 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
560 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
561 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
562 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF,
563 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
564 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF},
565 .length = 48},
566 .order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
567 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
568 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
569 0xFF, 0xFF, 0xFF, 0xC7, 0x63, 0x4D, 0x81,
570 0xF4, 0x37, 0x2D, 0xDF, 0x58, 0x1A, 0x0D,
571 0xB2, 0x48, 0xB0, 0xA7, 0x7A, 0xEC, 0xEC,
572 0x19, 0x6A, 0xCC, 0xC5, 0x29, 0x73},
573 .length = 48},
574 .consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
575 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
576 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
577 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
578 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF,
579 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
580 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFC},
581 .length = 48},
582 .constb = {.data = {0xB3, 0x31, 0x2F, 0xA7, 0xE2, 0x3E, 0xE7,
583 0xE4, 0x98, 0x8E, 0x05, 0x6B, 0xE3, 0xF8,
584 0x2D, 0x19, 0x18, 0x1D, 0x9C, 0x6E, 0xFE,
585 0x81, 0x41, 0x12, 0x03, 0x14, 0x08, 0x8F,
586 0x50, 0x13, 0x87, 0x5A, 0xC6, 0x56, 0x39,
587 0x8D, 0x8A, 0x2E, 0xD1, 0x9D, 0x2A, 0x85,
588 0xC8, 0xED, 0xD3, 0xEC, 0x2A, 0xEF},
589 .length = 48},
590 },
591 {.prime = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
592 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
593 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
594 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
595 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
596 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
597 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
598 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
599 0xFF, 0xFF},
600 .length = 66},
601 .order = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
602 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
603 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
604 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
605 0xFF, 0xFA, 0x51, 0x86, 0x87, 0x83, 0xBF, 0x2F,
606 0x96, 0x6B, 0x7F, 0xCC, 0x01, 0x48, 0xF7, 0x09,
607 0xA5, 0xD0, 0x3B, 0xB5, 0xC9, 0xB8, 0x89, 0x9C,
608 0x47, 0xAE, 0xBB, 0x6F, 0xB7, 0x1E, 0x91, 0x38,
609 0x64, 0x09},
610 .length = 66},
611 .consta = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
612 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
613 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
614 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
615 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
616 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
617 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
618 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
619 0xFF, 0xFC},
620 .length = 66},
621 .constb = {.data = {0x00, 0x51, 0x95, 0x3E, 0xB9, 0x61, 0x8E, 0x1C,
622 0x9A, 0x1F, 0x92, 0x9A, 0x21, 0xA0, 0xB6, 0x85,
623 0x40, 0xEE, 0xA2, 0xDA, 0x72, 0x5B, 0x99, 0xB3,
624 0x15, 0xF3, 0xB8, 0xB4, 0x89, 0x91, 0x8E, 0xF1,
625 0x09, 0xE1, 0x56, 0x19, 0x39, 0x51, 0xEC, 0x7E,
626 0x93, 0x7B, 0x16, 0x52, 0xC0, 0xBD, 0x3B, 0xB1,
627 0xBF, 0x07, 0x35, 0x73, 0xDF, 0x88, 0x3D, 0x2C,
628 0x34, 0xF1, 0xEF, 0x45, 0x1F, 0xD4, 0x6B, 0x50,
629 0x3F, 0x00},
630 .length = 66}}};
631
632 static __rte_always_inline void
cpt_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param * ecdsa,struct asym_op_params * ecdsa_params,uint64_t fpm_table_iova,uint8_t curveid)633 cpt_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
634 struct asym_op_params *ecdsa_params,
635 uint64_t fpm_table_iova,
636 uint8_t curveid)
637 {
638 struct cpt_request_info *req = ecdsa_params->req;
639 uint16_t message_len = ecdsa->message.length;
640 phys_addr_t mphys = ecdsa_params->meta_buf;
641 uint16_t pkey_len = ecdsa->pkey.length;
642 uint16_t p_align, k_align, m_align;
643 uint16_t k_len = ecdsa->k.length;
644 uint16_t order_len, prime_len;
645 uint16_t o_offset, pk_offset;
646 vq_cmd_word0_t vq_cmd_w0;
647 uint16_t rlen, dlen;
648 buf_ptr_t caddr;
649 uint8_t *dptr;
650
651 prime_len = ec_grp[curveid].prime.length;
652 order_len = ec_grp[curveid].order.length;
653
654 /* Truncate input length to curve prime length */
655 if (message_len > prime_len)
656 message_len = prime_len;
657 m_align = RTE_ALIGN_CEIL(message_len, 8);
658
659 p_align = RTE_ALIGN_CEIL(prime_len, 8);
660 k_align = RTE_ALIGN_CEIL(k_len, 8);
661
662 /* Set write offset for order and private key */
663 o_offset = prime_len - order_len;
664 pk_offset = prime_len - pkey_len;
665
666 /* Input buffer */
667 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
668
669 /*
670 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(scalar len, input len),
671 * ROUNDUP8(priv key len, prime len, order len)).
672 * Please note, private key, order cannot exceed prime
673 * length i.e 3 * p_align.
674 */
675 dlen = sizeof(fpm_table_iova) + k_align + m_align + p_align * 5;
676
677 memset(dptr, 0, dlen);
678
679 *(uint64_t *)dptr = fpm_table_iova;
680 dptr += sizeof(fpm_table_iova);
681
682 memcpy(dptr, ecdsa->k.data, k_len);
683 dptr += k_align;
684
685 memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
686 dptr += p_align;
687
688 memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
689 dptr += p_align;
690
691 memcpy(dptr + pk_offset, ecdsa->pkey.data, pkey_len);
692 dptr += p_align;
693
694 memcpy(dptr, ecdsa->message.data, message_len);
695 dptr += m_align;
696
697 memcpy(dptr, ec_grp[curveid].consta.data, prime_len);
698 dptr += p_align;
699
700 memcpy(dptr, ec_grp[curveid].constb.data, prime_len);
701 dptr += p_align;
702
703 /* 2 * prime length (for sign r and s ) */
704 rlen = 2 * p_align;
705
706 /* Setup opcodes */
707 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
708 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_SIGN;
709
710 /* GP op header */
711 vq_cmd_w0.s.param1 = curveid | (message_len << 8);
712 vq_cmd_w0.s.param2 = (pkey_len << 8) | k_len;
713 vq_cmd_w0.s.dlen = dlen;
714
715 /* Filling cpt_request_info structure */
716 req->ist.ei0 = vq_cmd_w0.u64;
717 req->ist.ei1 = mphys;
718 req->ist.ei2 = mphys + dlen;
719
720 /* Result pointer to store result data */
721 req->rptr = dptr;
722
723 /* alternate_caddr to write completion status of the microcode */
724 req->alternate_caddr = (uint64_t *)(dptr + rlen);
725 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
726
727 /* Preparing completion addr, +1 for completion code */
728 caddr.vaddr = dptr + rlen + 1;
729 caddr.dma_addr = mphys + dlen + rlen + 1;
730
731 cpt_fill_req_comp_addr(req, caddr);
732 }
733
734 static __rte_always_inline void
cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param * ecdsa,struct asym_op_params * ecdsa_params,uint64_t fpm_table_iova,uint8_t curveid)735 cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
736 struct asym_op_params *ecdsa_params,
737 uint64_t fpm_table_iova,
738 uint8_t curveid)
739 {
740 struct cpt_request_info *req = ecdsa_params->req;
741 uint32_t message_len = ecdsa->message.length;
742 phys_addr_t mphys = ecdsa_params->meta_buf;
743 uint16_t o_offset, r_offset, s_offset;
744 uint16_t qx_len = ecdsa->q.x.length;
745 uint16_t qy_len = ecdsa->q.y.length;
746 uint16_t r_len = ecdsa->r.length;
747 uint16_t s_len = ecdsa->s.length;
748 uint16_t order_len, prime_len;
749 uint16_t qx_offset, qy_offset;
750 uint16_t p_align, m_align;
751 vq_cmd_word0_t vq_cmd_w0;
752 buf_ptr_t caddr;
753 uint16_t dlen;
754 uint8_t *dptr;
755
756 prime_len = ec_grp[curveid].prime.length;
757 order_len = ec_grp[curveid].order.length;
758
759 /* Truncate input length to curve prime length */
760 if (message_len > prime_len)
761 message_len = prime_len;
762
763 m_align = RTE_ALIGN_CEIL(message_len, 8);
764 p_align = RTE_ALIGN_CEIL(prime_len, 8);
765
766 /* Set write offset for sign, order and public key coordinates */
767 o_offset = prime_len - order_len;
768 qx_offset = prime_len - qx_len;
769 qy_offset = prime_len - qy_len;
770 r_offset = prime_len - r_len;
771 s_offset = prime_len - s_len;
772
773 /* Input buffer */
774 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
775
776 /*
777 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(message len),
778 * ROUNDUP8(sign len(r and s), public key len(x and y coordinates),
779 * prime len, order len)).
780 * Please note sign, public key and order can not exceed prime length
781 * i.e. 6 * p_align
782 */
783 dlen = sizeof(fpm_table_iova) + m_align + (8 * p_align);
784
785 memset(dptr, 0, dlen);
786
787 *(uint64_t *)dptr = fpm_table_iova;
788 dptr += sizeof(fpm_table_iova);
789
790 memcpy(dptr + r_offset, ecdsa->r.data, r_len);
791 dptr += p_align;
792
793 memcpy(dptr + s_offset, ecdsa->s.data, s_len);
794 dptr += p_align;
795
796 memcpy(dptr, ecdsa->message.data, message_len);
797 dptr += m_align;
798
799 memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
800 dptr += p_align;
801
802 memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
803 dptr += p_align;
804
805 memcpy(dptr + qx_offset, ecdsa->q.x.data, qx_len);
806 dptr += p_align;
807
808 memcpy(dptr + qy_offset, ecdsa->q.y.data, qy_len);
809 dptr += p_align;
810
811 memcpy(dptr, ec_grp[curveid].consta.data, prime_len);
812 dptr += p_align;
813
814 memcpy(dptr, ec_grp[curveid].constb.data, prime_len);
815 dptr += p_align;
816
817 /* Setup opcodes */
818 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
819 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_VERIFY;
820
821 /* GP op header */
822 vq_cmd_w0.s.param1 = curveid | (message_len << 8);
823 vq_cmd_w0.s.param2 = 0;
824 vq_cmd_w0.s.dlen = dlen;
825
826 /* Filling cpt_request_info structure */
827 req->ist.ei0 = vq_cmd_w0.u64;
828 req->ist.ei1 = mphys;
829 req->ist.ei2 = mphys + dlen;
830
831 /* Result pointer to store result data */
832 req->rptr = dptr;
833
834 /* alternate_caddr to write completion status of the microcode */
835 req->alternate_caddr = (uint64_t *)dptr;
836 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
837
838 /* Preparing completion addr, +1 for completion code */
839 caddr.vaddr = dptr + 1;
840 caddr.dma_addr = mphys + dlen + 1;
841
842 cpt_fill_req_comp_addr(req, caddr);
843 }
844
845 static __rte_always_inline int __rte_hot
cpt_enqueue_ecdsa_op(struct rte_crypto_op * op,struct asym_op_params * params,struct cpt_asym_sess_misc * sess,uint64_t * fpm_iova)846 cpt_enqueue_ecdsa_op(struct rte_crypto_op *op,
847 struct asym_op_params *params,
848 struct cpt_asym_sess_misc *sess,
849 uint64_t *fpm_iova)
850 {
851 struct rte_crypto_ecdsa_op_param *ecdsa = &op->asym->ecdsa;
852 uint8_t curveid = sess->ec_ctx.curveid;
853
854 if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_SIGN)
855 cpt_ecdsa_sign_prep(ecdsa, params, fpm_iova[curveid], curveid);
856 else if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
857 cpt_ecdsa_verify_prep(ecdsa, params, fpm_iova[curveid],
858 curveid);
859 else {
860 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
861 return -EINVAL;
862 }
863 return 0;
864 }
865
866 static __rte_always_inline int
cpt_ecpm_prep(struct rte_crypto_ecpm_op_param * ecpm,struct asym_op_params * asym_params,uint8_t curveid)867 cpt_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm,
868 struct asym_op_params *asym_params,
869 uint8_t curveid)
870 {
871 struct cpt_request_info *req = asym_params->req;
872 phys_addr_t mphys = asym_params->meta_buf;
873 uint16_t x1_len = ecpm->p.x.length;
874 uint16_t y1_len = ecpm->p.y.length;
875 uint16_t scalar_align, p_align;
876 uint16_t dlen, rlen, prime_len;
877 uint16_t x1_offset, y1_offset;
878 vq_cmd_word0_t vq_cmd_w0;
879 buf_ptr_t caddr;
880 uint8_t *dptr;
881
882 prime_len = ec_grp[curveid].prime.length;
883
884 /* Input buffer */
885 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
886
887 p_align = RTE_ALIGN_CEIL(prime_len, 8);
888 scalar_align = RTE_ALIGN_CEIL(ecpm->scalar.length, 8);
889
890 /*
891 * Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime,
892 * scalar length),
893 * Please note point length is equivalent to prime of the curve
894 */
895 dlen = 5 * p_align + scalar_align;
896
897 x1_offset = prime_len - x1_len;
898 y1_offset = prime_len - y1_len;
899
900 memset(dptr, 0, dlen);
901
902 /* Copy input point, scalar, prime */
903 memcpy(dptr + x1_offset, ecpm->p.x.data, x1_len);
904 dptr += p_align;
905 memcpy(dptr + y1_offset, ecpm->p.y.data, y1_len);
906 dptr += p_align;
907 memcpy(dptr, ecpm->scalar.data, ecpm->scalar.length);
908 dptr += scalar_align;
909 memcpy(dptr, ec_grp[curveid].prime.data, ec_grp[curveid].prime.length);
910 dptr += p_align;
911
912 memcpy(dptr, ec_grp[curveid].consta.data,
913 ec_grp[curveid].consta.length);
914 dptr += p_align;
915
916 memcpy(dptr, ec_grp[curveid].constb.data,
917 ec_grp[curveid].constb.length);
918 dptr += p_align;
919
920 /* Setup opcodes */
921 vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECC;
922 vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECC_UMP;
923
924 /* GP op header */
925 vq_cmd_w0.s.param1 = curveid;
926 vq_cmd_w0.s.param2 = ecpm->scalar.length;
927 vq_cmd_w0.s.dlen = dlen;
928
929 /* Filling cpt_request_info structure */
930 req->ist.ei0 = vq_cmd_w0.u64;
931 req->ist.ei1 = mphys;
932 req->ist.ei2 = mphys + dlen;
933
934 /* Result buffer will store output point where length of
935 * each coordinate will be of prime length, thus set
936 * rlen to twice of prime length.
937 */
938 rlen = p_align << 1;
939 req->rptr = dptr;
940
941 /* alternate_caddr to write completion status by the microcode */
942 req->alternate_caddr = (uint64_t *)(dptr + rlen);
943 *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
944
945 /* Preparing completion addr, +1 for completion code */
946 caddr.vaddr = dptr + rlen + 1;
947 caddr.dma_addr = mphys + dlen + rlen + 1;
948
949 cpt_fill_req_comp_addr(req, caddr);
950 return 0;
951 }
952 #endif /* _CPT_UCODE_ASYM_H_ */
953