1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2017 Cavium, Inc
3 */
4
5 #include <stdbool.h>
6
7 #include <rte_common.h>
8 #include <rte_hexdump.h>
9 #include <rte_cryptodev.h>
10 #include <rte_cryptodev_pmd.h>
11 #include <rte_bus_vdev.h>
12 #include <rte_malloc.h>
13 #include <rte_cpuflags.h>
14
15 #include "AArch64cryptolib.h"
16
17 #include "armv8_pmd_private.h"
18
19 static uint8_t cryptodev_driver_id;
20
21 static int cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev);
22
23 /**
24 * Pointers to the supported combined mode crypto functions are stored
25 * in the static tables. Each combined (chained) cryptographic operation
26 * can be described by a set of numbers:
27 * - order: order of operations (cipher, auth) or (auth, cipher)
28 * - direction: encryption or decryption
29 * - calg: cipher algorithm such as AES_CBC, AES_CTR, etc.
30 * - aalg: authentication algorithm such as SHA1, SHA256, etc.
31 * - keyl: cipher key length, for example 128, 192, 256 bits
32 *
33 * In order to quickly acquire each function pointer based on those numbers,
34 * a hierarchy of arrays is maintained. The final level, 3D array is indexed
35 * by the combined mode function parameters only (cipher algorithm,
36 * authentication algorithm and key length).
37 *
38 * This gives 3 memory accesses to obtain a function pointer instead of
39 * traversing the array manually and comparing function parameters on each loop.
40 *
41 * +--+CRYPTO_FUNC
42 * +--+ENC|
43 * +--+CA|
44 * | +--+DEC
45 * ORDER|
46 * | +--+ENC
47 * +--+AC|
48 * +--+DEC
49 *
50 */
51
52 /**
53 * 3D array type for ARM Combined Mode crypto functions pointers.
54 * CRYPTO_CIPHER_MAX: max cipher ID number
55 * CRYPTO_AUTH_MAX: max auth ID number
56 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number
57 */
58 typedef const crypto_func_t
59 crypto_func_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_AUTH_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
60
61 /* Evaluate to key length definition */
62 #define KEYL(keyl) (ARMV8_CRYPTO_CIPHER_KEYLEN_ ## keyl)
63
64 /* Local aliases for supported ciphers */
65 #define CIPH_AES_CBC RTE_CRYPTO_CIPHER_AES_CBC
66 /* Local aliases for supported hashes */
67 #define AUTH_SHA1_HMAC RTE_CRYPTO_AUTH_SHA1_HMAC
68 #define AUTH_SHA256_HMAC RTE_CRYPTO_AUTH_SHA256_HMAC
69
70 /**
71 * Arrays containing pointers to particular cryptographic,
72 * combined mode functions.
73 * crypto_op_ca_encrypt: cipher (encrypt), authenticate
74 * crypto_op_ca_decrypt: cipher (decrypt), authenticate
75 * crypto_op_ac_encrypt: authenticate, cipher (encrypt)
76 * crypto_op_ac_decrypt: authenticate, cipher (decrypt)
77 */
78 static const crypto_func_tbl_t
79 crypto_op_ca_encrypt = {
80 /* [cipher alg][auth alg][key length] = crypto_function, */
81 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] =
82 armv8_enc_aes_cbc_sha1_128,
83 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] =
84 armv8_enc_aes_cbc_sha256_128,
85 };
86
87 static const crypto_func_tbl_t
88 crypto_op_ca_decrypt = {
89 { {NULL} }
90 };
91
92 static const crypto_func_tbl_t
93 crypto_op_ac_encrypt = {
94 { {NULL} }
95 };
96
97 static const crypto_func_tbl_t
98 crypto_op_ac_decrypt = {
99 /* [cipher alg][auth alg][key length] = crypto_function, */
100 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] =
101 armv8_dec_aes_cbc_sha1_128,
102 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] =
103 armv8_dec_aes_cbc_sha256_128,
104 };
105
106 /**
107 * Arrays containing pointers to particular cryptographic function sets,
108 * covering given cipher operation directions (encrypt, decrypt)
109 * for each order of cipher and authentication pairs.
110 */
111 static const crypto_func_tbl_t *
112 crypto_cipher_auth[] = {
113 &crypto_op_ca_encrypt,
114 &crypto_op_ca_decrypt,
115 NULL
116 };
117
118 static const crypto_func_tbl_t *
119 crypto_auth_cipher[] = {
120 &crypto_op_ac_encrypt,
121 &crypto_op_ac_decrypt,
122 NULL
123 };
124
125 /**
126 * Top level array containing pointers to particular cryptographic
127 * function sets, covering given order of chained operations.
128 * crypto_cipher_auth: cipher first, authenticate after
129 * crypto_auth_cipher: authenticate first, cipher after
130 */
131 static const crypto_func_tbl_t **
132 crypto_chain_order[] = {
133 crypto_cipher_auth,
134 crypto_auth_cipher,
135 NULL
136 };
137
138 /**
139 * Extract particular combined mode crypto function from the 3D array.
140 */
141 #define CRYPTO_GET_ALGO(order, cop, calg, aalg, keyl) \
142 ({ \
143 crypto_func_tbl_t *func_tbl = \
144 (crypto_chain_order[(order)])[(cop)]; \
145 \
146 ((calg >= CRYPTO_CIPHER_MAX) || (aalg >= CRYPTO_AUTH_MAX)) ? \
147 NULL : ((*func_tbl)[(calg)][(aalg)][KEYL(keyl)]); \
148 })
149
150 /*----------------------------------------------------------------------------*/
151
152 /**
153 * 2D array type for ARM key schedule functions pointers.
154 * CRYPTO_CIPHER_MAX: max cipher ID number
155 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number
156 */
157 typedef const crypto_key_sched_t
158 crypto_key_sched_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
159
160 static const crypto_key_sched_tbl_t
161 crypto_key_sched_encrypt = {
162 /* [cipher alg][key length] = key_expand_func, */
163 [CIPH_AES_CBC][KEYL(128)] = armv8_expandkeys_enc_aes_cbc_128,
164 };
165
166 static const crypto_key_sched_tbl_t
167 crypto_key_sched_decrypt = {
168 /* [cipher alg][key length] = key_expand_func, */
169 [CIPH_AES_CBC][KEYL(128)] = armv8_expandkeys_dec_aes_cbc_128,
170 };
171
172 /**
173 * Top level array containing pointers to particular key generation
174 * function sets, covering given operation direction.
175 * crypto_key_sched_encrypt: keys for encryption
176 * crypto_key_sched_decrypt: keys for decryption
177 */
178 static const crypto_key_sched_tbl_t *
179 crypto_key_sched_dir[] = {
180 &crypto_key_sched_encrypt,
181 &crypto_key_sched_decrypt,
182 NULL
183 };
184
185 /**
186 * Extract particular combined mode crypto function from the 3D array.
187 */
188 #define CRYPTO_GET_KEY_SCHED(cop, calg, keyl) \
189 ({ \
190 crypto_key_sched_tbl_t *ks_tbl = crypto_key_sched_dir[(cop)]; \
191 \
192 (calg >= CRYPTO_CIPHER_MAX) ? \
193 NULL : ((*ks_tbl)[(calg)][KEYL(keyl)]); \
194 })
195
196 /*----------------------------------------------------------------------------*/
197
198 /*
199 *------------------------------------------------------------------------------
200 * Session Prepare
201 *------------------------------------------------------------------------------
202 */
203
204 /** Get xform chain order */
205 static enum armv8_crypto_chain_order
armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform * xform)206 armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform *xform)
207 {
208
209 /*
210 * This driver currently covers only chained operations.
211 * Ignore only cipher or only authentication operations
212 * or chains longer than 2 xform structures.
213 */
214 if (xform->next == NULL || xform->next->next != NULL)
215 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
216
217 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
218 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
219 return ARMV8_CRYPTO_CHAIN_AUTH_CIPHER;
220 }
221
222 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
223 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
224 return ARMV8_CRYPTO_CHAIN_CIPHER_AUTH;
225 }
226
227 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
228 }
229
230 static inline void
auth_hmac_pad_prepare(struct armv8_crypto_session * sess,const struct rte_crypto_sym_xform * xform)231 auth_hmac_pad_prepare(struct armv8_crypto_session *sess,
232 const struct rte_crypto_sym_xform *xform)
233 {
234 size_t i;
235
236 /* Generate i_key_pad and o_key_pad */
237 memset(sess->auth.hmac.i_key_pad, 0, sizeof(sess->auth.hmac.i_key_pad));
238 rte_memcpy(sess->auth.hmac.i_key_pad, sess->auth.hmac.key,
239 xform->auth.key.length);
240 memset(sess->auth.hmac.o_key_pad, 0, sizeof(sess->auth.hmac.o_key_pad));
241 rte_memcpy(sess->auth.hmac.o_key_pad, sess->auth.hmac.key,
242 xform->auth.key.length);
243 /*
244 * XOR key with IPAD/OPAD values to obtain i_key_pad
245 * and o_key_pad.
246 * Byte-by-byte operation may seem to be the less efficient
247 * here but in fact it's the opposite.
248 * The result ASM code is likely operate on NEON registers
249 * (load auth key to Qx, load IPAD/OPAD to multiple
250 * elements of Qy, eor 128 bits at once).
251 */
252 for (i = 0; i < SHA_BLOCK_MAX; i++) {
253 sess->auth.hmac.i_key_pad[i] ^= HMAC_IPAD_VALUE;
254 sess->auth.hmac.o_key_pad[i] ^= HMAC_OPAD_VALUE;
255 }
256 }
257
258 static inline int
auth_set_prerequisites(struct armv8_crypto_session * sess,const struct rte_crypto_sym_xform * xform)259 auth_set_prerequisites(struct armv8_crypto_session *sess,
260 const struct rte_crypto_sym_xform *xform)
261 {
262 uint8_t partial[64] = { 0 };
263 int error;
264
265 switch (xform->auth.algo) {
266 case RTE_CRYPTO_AUTH_SHA1_HMAC:
267 /*
268 * Generate authentication key, i_key_pad and o_key_pad.
269 */
270 /* Zero memory under key */
271 memset(sess->auth.hmac.key, 0, SHA1_BLOCK_SIZE);
272
273 /*
274 * Now copy the given authentication key to the session
275 * key.
276 */
277 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
278 xform->auth.key.length);
279
280 /* Prepare HMAC padding: key|pattern */
281 auth_hmac_pad_prepare(sess, xform);
282 /*
283 * Calculate partial hash values for i_key_pad and o_key_pad.
284 * Will be used as initialization state for final HMAC.
285 */
286 error = armv8_sha1_block_partial(NULL,
287 sess->auth.hmac.i_key_pad,
288 partial, SHA1_BLOCK_SIZE);
289 if (error != 0)
290 return -1;
291 memcpy(sess->auth.hmac.i_key_pad, partial, SHA1_BLOCK_SIZE);
292
293 error = armv8_sha1_block_partial(NULL,
294 sess->auth.hmac.o_key_pad,
295 partial, SHA1_BLOCK_SIZE);
296 if (error != 0)
297 return -1;
298 memcpy(sess->auth.hmac.o_key_pad, partial, SHA1_BLOCK_SIZE);
299
300 break;
301 case RTE_CRYPTO_AUTH_SHA256_HMAC:
302 /*
303 * Generate authentication key, i_key_pad and o_key_pad.
304 */
305 /* Zero memory under key */
306 memset(sess->auth.hmac.key, 0, SHA256_BLOCK_SIZE);
307
308 /*
309 * Now copy the given authentication key to the session
310 * key.
311 */
312 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
313 xform->auth.key.length);
314
315 /* Prepare HMAC padding: key|pattern */
316 auth_hmac_pad_prepare(sess, xform);
317 /*
318 * Calculate partial hash values for i_key_pad and o_key_pad.
319 * Will be used as initialization state for final HMAC.
320 */
321 error = armv8_sha256_block_partial(NULL,
322 sess->auth.hmac.i_key_pad,
323 partial, SHA256_BLOCK_SIZE);
324 if (error != 0)
325 return -1;
326 memcpy(sess->auth.hmac.i_key_pad, partial, SHA256_BLOCK_SIZE);
327
328 error = armv8_sha256_block_partial(NULL,
329 sess->auth.hmac.o_key_pad,
330 partial, SHA256_BLOCK_SIZE);
331 if (error != 0)
332 return -1;
333 memcpy(sess->auth.hmac.o_key_pad, partial, SHA256_BLOCK_SIZE);
334
335 break;
336 default:
337 break;
338 }
339
340 return 0;
341 }
342
343 static inline int
cipher_set_prerequisites(struct armv8_crypto_session * sess,const struct rte_crypto_sym_xform * xform)344 cipher_set_prerequisites(struct armv8_crypto_session *sess,
345 const struct rte_crypto_sym_xform *xform)
346 {
347 crypto_key_sched_t cipher_key_sched;
348
349 cipher_key_sched = sess->cipher.key_sched;
350 if (likely(cipher_key_sched != NULL)) {
351 /* Set up cipher session key */
352 cipher_key_sched(sess->cipher.key.data, xform->cipher.key.data);
353 }
354
355 return 0;
356 }
357
358 static int
armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session * sess,const struct rte_crypto_sym_xform * cipher_xform,const struct rte_crypto_sym_xform * auth_xform)359 armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess,
360 const struct rte_crypto_sym_xform *cipher_xform,
361 const struct rte_crypto_sym_xform *auth_xform)
362 {
363 enum armv8_crypto_chain_order order;
364 enum armv8_crypto_cipher_operation cop;
365 enum rte_crypto_cipher_algorithm calg;
366 enum rte_crypto_auth_algorithm aalg;
367
368 /* Validate and prepare scratch order of combined operations */
369 switch (sess->chain_order) {
370 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
371 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
372 order = sess->chain_order;
373 break;
374 default:
375 return -ENOTSUP;
376 }
377 /* Select cipher direction */
378 sess->cipher.direction = cipher_xform->cipher.op;
379 /* Select cipher key */
380 sess->cipher.key.length = cipher_xform->cipher.key.length;
381 /* Set cipher direction */
382 switch (sess->cipher.direction) {
383 case RTE_CRYPTO_CIPHER_OP_ENCRYPT:
384 cop = ARMV8_CRYPTO_CIPHER_OP_ENCRYPT;
385 break;
386 case RTE_CRYPTO_CIPHER_OP_DECRYPT:
387 cop = ARMV8_CRYPTO_CIPHER_OP_DECRYPT;
388 break;
389 default:
390 return -ENOTSUP;
391 }
392 /* Set cipher algorithm */
393 calg = cipher_xform->cipher.algo;
394
395 /* Select cipher algo */
396 switch (calg) {
397 /* Cover supported cipher algorithms */
398 case RTE_CRYPTO_CIPHER_AES_CBC:
399 sess->cipher.algo = calg;
400 /* IV len is always 16 bytes (block size) for AES CBC */
401 sess->cipher.iv.length = 16;
402 break;
403 default:
404 return -ENOTSUP;
405 }
406 /* Select auth generate/verify */
407 sess->auth.operation = auth_xform->auth.op;
408
409 /* Select auth algo */
410 switch (auth_xform->auth.algo) {
411 /* Cover supported hash algorithms */
412 case RTE_CRYPTO_AUTH_SHA1_HMAC:
413 case RTE_CRYPTO_AUTH_SHA256_HMAC: /* Fall through */
414 aalg = auth_xform->auth.algo;
415 sess->auth.mode = ARMV8_CRYPTO_AUTH_AS_HMAC;
416 break;
417 default:
418 return -ENOTSUP;
419 }
420
421 /* Set the digest length */
422 sess->auth.digest_length = auth_xform->auth.digest_length;
423
424 /* Verify supported key lengths and extract proper algorithm */
425 switch (cipher_xform->cipher.key.length << 3) {
426 case 128:
427 sess->crypto_func =
428 CRYPTO_GET_ALGO(order, cop, calg, aalg, 128);
429 sess->cipher.key_sched =
430 CRYPTO_GET_KEY_SCHED(cop, calg, 128);
431 break;
432 case 192:
433 case 256:
434 /* These key lengths are not supported yet */
435 default: /* Fall through */
436 sess->crypto_func = NULL;
437 sess->cipher.key_sched = NULL;
438 return -ENOTSUP;
439 }
440
441 if (unlikely(sess->crypto_func == NULL ||
442 sess->cipher.key_sched == NULL)) {
443 /*
444 * If we got here that means that there must be a bug
445 * in the algorithms selection above. Nevertheless keep
446 * it here to catch bug immediately and avoid NULL pointer
447 * dereference in OPs processing.
448 */
449 ARMV8_CRYPTO_LOG_ERR(
450 "No appropriate crypto function for given parameters");
451 return -EINVAL;
452 }
453
454 /* Set up cipher session prerequisites */
455 if (cipher_set_prerequisites(sess, cipher_xform) != 0)
456 return -EINVAL;
457
458 /* Set up authentication session prerequisites */
459 if (auth_set_prerequisites(sess, auth_xform) != 0)
460 return -EINVAL;
461
462 return 0;
463 }
464
465 /** Parse crypto xform chain and set private session parameters */
466 int
armv8_crypto_set_session_parameters(struct armv8_crypto_session * sess,const struct rte_crypto_sym_xform * xform)467 armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess,
468 const struct rte_crypto_sym_xform *xform)
469 {
470 const struct rte_crypto_sym_xform *cipher_xform = NULL;
471 const struct rte_crypto_sym_xform *auth_xform = NULL;
472 bool is_chained_op;
473 int ret;
474
475 /* Filter out spurious/broken requests */
476 if (xform == NULL)
477 return -EINVAL;
478
479 sess->chain_order = armv8_crypto_get_chain_order(xform);
480 switch (sess->chain_order) {
481 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
482 cipher_xform = xform;
483 auth_xform = xform->next;
484 is_chained_op = true;
485 break;
486 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
487 auth_xform = xform;
488 cipher_xform = xform->next;
489 is_chained_op = true;
490 break;
491 default:
492 is_chained_op = false;
493 return -ENOTSUP;
494 }
495
496 /* Set IV offset */
497 sess->cipher.iv.offset = cipher_xform->cipher.iv.offset;
498
499 if (is_chained_op) {
500 ret = armv8_crypto_set_session_chained_parameters(sess,
501 cipher_xform, auth_xform);
502 if (unlikely(ret != 0)) {
503 ARMV8_CRYPTO_LOG_ERR(
504 "Invalid/unsupported chained (cipher/auth) parameters");
505 return ret;
506 }
507 } else {
508 ARMV8_CRYPTO_LOG_ERR("Invalid/unsupported operation");
509 return -ENOTSUP;
510 }
511
512 return 0;
513 }
514
515 /** Provide session for operation */
516 static inline struct armv8_crypto_session *
get_session(struct armv8_crypto_qp * qp,struct rte_crypto_op * op)517 get_session(struct armv8_crypto_qp *qp, struct rte_crypto_op *op)
518 {
519 struct armv8_crypto_session *sess = NULL;
520
521 if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
522 /* get existing session */
523 if (likely(op->sym->session != NULL)) {
524 sess = (struct armv8_crypto_session *)
525 get_sym_session_private_data(
526 op->sym->session,
527 cryptodev_driver_id);
528 }
529 } else {
530 /* provide internal session */
531 void *_sess = NULL;
532 void *_sess_private_data = NULL;
533
534 if (rte_mempool_get(qp->sess_mp, (void **)&_sess))
535 return NULL;
536
537 if (rte_mempool_get(qp->sess_mp_priv,
538 (void **)&_sess_private_data))
539 return NULL;
540
541 sess = (struct armv8_crypto_session *)_sess_private_data;
542
543 if (unlikely(armv8_crypto_set_session_parameters(sess,
544 op->sym->xform) != 0)) {
545 rte_mempool_put(qp->sess_mp, _sess);
546 rte_mempool_put(qp->sess_mp_priv, _sess_private_data);
547 sess = NULL;
548 }
549 op->sym->session = (struct rte_cryptodev_sym_session *)_sess;
550 set_sym_session_private_data(op->sym->session,
551 cryptodev_driver_id, _sess_private_data);
552 }
553
554 if (unlikely(sess == NULL))
555 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
556
557 return sess;
558 }
559
560 /*
561 *------------------------------------------------------------------------------
562 * Process Operations
563 *------------------------------------------------------------------------------
564 */
565
566 /*----------------------------------------------------------------------------*/
567
568 /** Process cipher operation */
569 static inline void
process_armv8_chained_op(struct armv8_crypto_qp * qp,struct rte_crypto_op * op,struct armv8_crypto_session * sess,struct rte_mbuf * mbuf_src,struct rte_mbuf * mbuf_dst)570 process_armv8_chained_op(struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
571 struct armv8_crypto_session *sess,
572 struct rte_mbuf *mbuf_src, struct rte_mbuf *mbuf_dst)
573 {
574 crypto_func_t crypto_func;
575 armv8_cipher_digest_t arg;
576 struct rte_mbuf *m_asrc, *m_adst;
577 uint8_t *csrc, *cdst;
578 uint8_t *adst, *asrc;
579 uint64_t clen, alen;
580 int error;
581
582 clen = op->sym->cipher.data.length;
583 alen = op->sym->auth.data.length;
584
585 csrc = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
586 op->sym->cipher.data.offset);
587 cdst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
588 op->sym->cipher.data.offset);
589
590 switch (sess->chain_order) {
591 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
592 m_asrc = m_adst = mbuf_dst;
593 break;
594 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
595 m_asrc = mbuf_src;
596 m_adst = mbuf_dst;
597 break;
598 default:
599 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
600 return;
601 }
602 asrc = rte_pktmbuf_mtod_offset(m_asrc, uint8_t *,
603 op->sym->auth.data.offset);
604
605 switch (sess->auth.mode) {
606 case ARMV8_CRYPTO_AUTH_AS_AUTH:
607 /* Nothing to do here, just verify correct option */
608 break;
609 case ARMV8_CRYPTO_AUTH_AS_HMAC:
610 arg.digest.hmac.key = sess->auth.hmac.key;
611 arg.digest.hmac.i_key_pad = sess->auth.hmac.i_key_pad;
612 arg.digest.hmac.o_key_pad = sess->auth.hmac.o_key_pad;
613 break;
614 default:
615 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
616 return;
617 }
618
619 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_GENERATE) {
620 adst = op->sym->auth.digest.data;
621 if (adst == NULL) {
622 adst = rte_pktmbuf_mtod_offset(m_adst,
623 uint8_t *,
624 op->sym->auth.data.offset +
625 op->sym->auth.data.length);
626 }
627 } else {
628 adst = qp->temp_digest;
629 }
630
631 arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
632 sess->cipher.iv.offset);
633 arg.cipher.key = sess->cipher.key.data;
634 /* Acquire combined mode function */
635 crypto_func = sess->crypto_func;
636 RTE_VERIFY(crypto_func != NULL);
637 error = crypto_func(csrc, cdst, clen, asrc, adst, alen, &arg);
638 if (error != 0) {
639 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
640 return;
641 }
642
643 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
644 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
645 if (memcmp(adst, op->sym->auth.digest.data,
646 sess->auth.digest_length) != 0) {
647 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
648 }
649 }
650 }
651
652 /** Process crypto operation for mbuf */
653 static inline int
process_op(struct armv8_crypto_qp * qp,struct rte_crypto_op * op,struct armv8_crypto_session * sess)654 process_op(struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
655 struct armv8_crypto_session *sess)
656 {
657 struct rte_mbuf *msrc, *mdst;
658
659 msrc = op->sym->m_src;
660 mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
661
662 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
663
664 switch (sess->chain_order) {
665 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
666 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: /* Fall through */
667 process_armv8_chained_op(qp, op, sess, msrc, mdst);
668 break;
669 default:
670 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
671 break;
672 }
673
674 /* Free session if a session-less crypto op */
675 if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
676 memset(sess, 0, sizeof(struct armv8_crypto_session));
677 memset(op->sym->session, 0,
678 rte_cryptodev_sym_get_existing_header_session_size(
679 op->sym->session));
680 rte_mempool_put(qp->sess_mp_priv, sess);
681 rte_mempool_put(qp->sess_mp, op->sym->session);
682 op->sym->session = NULL;
683 }
684
685 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
686 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
687
688 if (unlikely(op->status == RTE_CRYPTO_OP_STATUS_ERROR))
689 return -1;
690
691 return 0;
692 }
693
694 /*
695 *------------------------------------------------------------------------------
696 * PMD Framework
697 *------------------------------------------------------------------------------
698 */
699
700 /** Enqueue burst */
701 static uint16_t
armv8_crypto_pmd_enqueue_burst(void * queue_pair,struct rte_crypto_op ** ops,uint16_t nb_ops)702 armv8_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
703 uint16_t nb_ops)
704 {
705 struct armv8_crypto_session *sess;
706 struct armv8_crypto_qp *qp = queue_pair;
707 int i, retval;
708
709 for (i = 0; i < nb_ops; i++) {
710 sess = get_session(qp, ops[i]);
711 if (unlikely(sess == NULL))
712 goto enqueue_err;
713
714 retval = process_op(qp, ops[i], sess);
715 if (unlikely(retval < 0))
716 goto enqueue_err;
717 }
718
719 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
720 NULL);
721 qp->stats.enqueued_count += retval;
722
723 return retval;
724
725 enqueue_err:
726 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
727 NULL);
728 if (ops[i] != NULL)
729 ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
730
731 qp->stats.enqueue_err_count++;
732 return retval;
733 }
734
735 /** Dequeue burst */
736 static uint16_t
armv8_crypto_pmd_dequeue_burst(void * queue_pair,struct rte_crypto_op ** ops,uint16_t nb_ops)737 armv8_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops,
738 uint16_t nb_ops)
739 {
740 struct armv8_crypto_qp *qp = queue_pair;
741
742 unsigned int nb_dequeued = 0;
743
744 nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops,
745 (void **)ops, nb_ops, NULL);
746 qp->stats.dequeued_count += nb_dequeued;
747
748 return nb_dequeued;
749 }
750
751 /** Create ARMv8 crypto device */
752 static int
cryptodev_armv8_crypto_create(const char * name,struct rte_vdev_device * vdev,struct rte_cryptodev_pmd_init_params * init_params)753 cryptodev_armv8_crypto_create(const char *name,
754 struct rte_vdev_device *vdev,
755 struct rte_cryptodev_pmd_init_params *init_params)
756 {
757 struct rte_cryptodev *dev;
758 struct armv8_crypto_private *internals;
759
760 /* Check CPU for support for AES instruction set */
761 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) {
762 ARMV8_CRYPTO_LOG_ERR(
763 "AES instructions not supported by CPU");
764 return -EFAULT;
765 }
766
767 /* Check CPU for support for SHA instruction set */
768 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA1) ||
769 !rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA2)) {
770 ARMV8_CRYPTO_LOG_ERR(
771 "SHA1/SHA2 instructions not supported by CPU");
772 return -EFAULT;
773 }
774
775 /* Check CPU for support for Advance SIMD instruction set */
776 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON)) {
777 ARMV8_CRYPTO_LOG_ERR(
778 "Advanced SIMD instructions not supported by CPU");
779 return -EFAULT;
780 }
781
782 dev = rte_cryptodev_pmd_create(name, &vdev->device, init_params);
783 if (dev == NULL) {
784 ARMV8_CRYPTO_LOG_ERR("failed to create cryptodev vdev");
785 goto init_error;
786 }
787
788 dev->driver_id = cryptodev_driver_id;
789 dev->dev_ops = rte_armv8_crypto_pmd_ops;
790
791 /* register rx/tx burst functions for data path */
792 dev->dequeue_burst = armv8_crypto_pmd_dequeue_burst;
793 dev->enqueue_burst = armv8_crypto_pmd_enqueue_burst;
794
795 dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
796 RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
797 RTE_CRYPTODEV_FF_CPU_NEON |
798 RTE_CRYPTODEV_FF_CPU_ARM_CE |
799 RTE_CRYPTODEV_FF_SYM_SESSIONLESS;
800
801 internals = dev->data->dev_private;
802
803 internals->max_nb_qpairs = init_params->max_nb_queue_pairs;
804
805 return 0;
806
807 init_error:
808 ARMV8_CRYPTO_LOG_ERR(
809 "driver %s: cryptodev_armv8_crypto_create failed",
810 init_params->name);
811
812 cryptodev_armv8_crypto_uninit(vdev);
813 return -EFAULT;
814 }
815
816 /** Initialise ARMv8 crypto device */
817 static int
cryptodev_armv8_crypto_init(struct rte_vdev_device * vdev)818 cryptodev_armv8_crypto_init(struct rte_vdev_device *vdev)
819 {
820 struct rte_cryptodev_pmd_init_params init_params = {
821 "",
822 sizeof(struct armv8_crypto_private),
823 rte_socket_id(),
824 RTE_CRYPTODEV_PMD_DEFAULT_MAX_NB_QUEUE_PAIRS
825 };
826 const char *name;
827 const char *input_args;
828
829 name = rte_vdev_device_name(vdev);
830 if (name == NULL)
831 return -EINVAL;
832 input_args = rte_vdev_device_args(vdev);
833 rte_cryptodev_pmd_parse_input_args(&init_params, input_args);
834
835 return cryptodev_armv8_crypto_create(name, vdev, &init_params);
836 }
837
838 /** Uninitialise ARMv8 crypto device */
839 static int
cryptodev_armv8_crypto_uninit(struct rte_vdev_device * vdev)840 cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev)
841 {
842 struct rte_cryptodev *cryptodev;
843 const char *name;
844
845 name = rte_vdev_device_name(vdev);
846 if (name == NULL)
847 return -EINVAL;
848
849 RTE_LOG(INFO, PMD,
850 "Closing ARMv8 crypto device %s on numa socket %u\n",
851 name, rte_socket_id());
852
853 cryptodev = rte_cryptodev_pmd_get_named_dev(name);
854 if (cryptodev == NULL)
855 return -ENODEV;
856
857 return rte_cryptodev_pmd_destroy(cryptodev);
858 }
859
860 static struct rte_vdev_driver armv8_crypto_pmd_drv = {
861 .probe = cryptodev_armv8_crypto_init,
862 .remove = cryptodev_armv8_crypto_uninit
863 };
864
865 static struct cryptodev_driver armv8_crypto_drv;
866
867 RTE_LOG_REGISTER(crypto_armv8_log_type, pmd.crypto.armv8, ERR);
868
869 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_ARMV8_PMD, armv8_crypto_pmd_drv);
870 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_ARMV8_PMD, cryptodev_armv8_pmd);
871 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_ARMV8_PMD,
872 "max_nb_queue_pairs=<int> "
873 "socket_id=<int>");
874 RTE_PMD_REGISTER_CRYPTO_DRIVER(armv8_crypto_drv, armv8_crypto_pmd_drv.driver,
875 cryptodev_driver_id);
876