1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
3 */
4
5 #include <dirent.h>
6 #include <fcntl.h>
7 #include <stdio.h>
8 #include <string.h>
9 #include <sys/mman.h>
10 #include <sys/queue.h>
11 #include <sys/types.h>
12 #include <unistd.h>
13 #include <openssl/sha.h>
14 #include <openssl/cmac.h> /*sub key apis*/
15 #include <openssl/evp.h> /*sub key apis*/
16
17 #include <rte_hexdump.h>
18 #include <rte_memzone.h>
19 #include <rte_malloc.h>
20 #include <rte_memory.h>
21 #include <rte_spinlock.h>
22 #include <rte_string_fns.h>
23 #include <rte_cryptodev_pmd.h>
24
25 #include "ccp_dev.h"
26 #include "ccp_crypto.h"
27 #include "ccp_pci.h"
28 #include "ccp_pmd_private.h"
29
30 #include <openssl/conf.h>
31 #include <openssl/err.h>
32 #include <openssl/hmac.h>
33
34 /* SHA initial context values */
35 static uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
36 SHA1_H4, SHA1_H3,
37 SHA1_H2, SHA1_H1,
38 SHA1_H0, 0x0U,
39 0x0U, 0x0U,
40 };
41
42 uint32_t ccp_sha224_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
43 SHA224_H7, SHA224_H6,
44 SHA224_H5, SHA224_H4,
45 SHA224_H3, SHA224_H2,
46 SHA224_H1, SHA224_H0,
47 };
48
49 uint32_t ccp_sha256_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
50 SHA256_H7, SHA256_H6,
51 SHA256_H5, SHA256_H4,
52 SHA256_H3, SHA256_H2,
53 SHA256_H1, SHA256_H0,
54 };
55
56 uint64_t ccp_sha384_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
57 SHA384_H7, SHA384_H6,
58 SHA384_H5, SHA384_H4,
59 SHA384_H3, SHA384_H2,
60 SHA384_H1, SHA384_H0,
61 };
62
63 uint64_t ccp_sha512_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
64 SHA512_H7, SHA512_H6,
65 SHA512_H5, SHA512_H4,
66 SHA512_H3, SHA512_H2,
67 SHA512_H1, SHA512_H0,
68 };
69
70 #if defined(_MSC_VER)
71 #define SHA3_CONST(x) x
72 #else
73 #define SHA3_CONST(x) x##L
74 #endif
75
76 /** 'Words' here refers to uint64_t */
77 #define SHA3_KECCAK_SPONGE_WORDS \
78 (((1600) / 8) / sizeof(uint64_t))
79 typedef struct sha3_context_ {
80 uint64_t saved;
81 /**
82 * The portion of the input message that we
83 * didn't consume yet
84 */
85 union {
86 uint64_t s[SHA3_KECCAK_SPONGE_WORDS];
87 /* Keccak's state */
88 uint8_t sb[SHA3_KECCAK_SPONGE_WORDS * 8];
89 /**total 200 ctx size**/
90 };
91 unsigned int byteIndex;
92 /**
93 * 0..7--the next byte after the set one
94 * (starts from 0; 0--none are buffered)
95 */
96 unsigned int wordIndex;
97 /**
98 * 0..24--the next word to integrate input
99 * (starts from 0)
100 */
101 unsigned int capacityWords;
102 /**
103 * the double size of the hash output in
104 * words (e.g. 16 for Keccak 512)
105 */
106 } sha3_context;
107
108 #ifndef SHA3_ROTL64
109 #define SHA3_ROTL64(x, y) \
110 (((x) << (y)) | ((x) >> ((sizeof(uint64_t)*8) - (y))))
111 #endif
112
113 static const uint64_t keccakf_rndc[24] = {
114 SHA3_CONST(0x0000000000000001UL), SHA3_CONST(0x0000000000008082UL),
115 SHA3_CONST(0x800000000000808aUL), SHA3_CONST(0x8000000080008000UL),
116 SHA3_CONST(0x000000000000808bUL), SHA3_CONST(0x0000000080000001UL),
117 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008009UL),
118 SHA3_CONST(0x000000000000008aUL), SHA3_CONST(0x0000000000000088UL),
119 SHA3_CONST(0x0000000080008009UL), SHA3_CONST(0x000000008000000aUL),
120 SHA3_CONST(0x000000008000808bUL), SHA3_CONST(0x800000000000008bUL),
121 SHA3_CONST(0x8000000000008089UL), SHA3_CONST(0x8000000000008003UL),
122 SHA3_CONST(0x8000000000008002UL), SHA3_CONST(0x8000000000000080UL),
123 SHA3_CONST(0x000000000000800aUL), SHA3_CONST(0x800000008000000aUL),
124 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008080UL),
125 SHA3_CONST(0x0000000080000001UL), SHA3_CONST(0x8000000080008008UL)
126 };
127
128 static const unsigned int keccakf_rotc[24] = {
129 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62,
130 18, 39, 61, 20, 44
131 };
132
133 static const unsigned int keccakf_piln[24] = {
134 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20,
135 14, 22, 9, 6, 1
136 };
137
138 static enum ccp_cmd_order
ccp_get_cmd_id(const struct rte_crypto_sym_xform * xform)139 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform)
140 {
141 enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED;
142
143 if (xform == NULL)
144 return res;
145 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
146 if (xform->next == NULL)
147 return CCP_CMD_AUTH;
148 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
149 return CCP_CMD_HASH_CIPHER;
150 }
151 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
152 if (xform->next == NULL)
153 return CCP_CMD_CIPHER;
154 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
155 return CCP_CMD_CIPHER_HASH;
156 }
157 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
158 return CCP_CMD_COMBINED;
159 return res;
160 }
161
162 /* partial hash using openssl */
partial_hash_sha1(uint8_t * data_in,uint8_t * data_out)163 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
164 {
165 SHA_CTX ctx;
166
167 if (!SHA1_Init(&ctx))
168 return -EFAULT;
169 SHA1_Transform(&ctx, data_in);
170 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
171 return 0;
172 }
173
partial_hash_sha224(uint8_t * data_in,uint8_t * data_out)174 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
175 {
176 SHA256_CTX ctx;
177
178 if (!SHA224_Init(&ctx))
179 return -EFAULT;
180 SHA256_Transform(&ctx, data_in);
181 rte_memcpy(data_out, &ctx,
182 SHA256_DIGEST_LENGTH);
183 return 0;
184 }
185
partial_hash_sha256(uint8_t * data_in,uint8_t * data_out)186 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
187 {
188 SHA256_CTX ctx;
189
190 if (!SHA256_Init(&ctx))
191 return -EFAULT;
192 SHA256_Transform(&ctx, data_in);
193 rte_memcpy(data_out, &ctx,
194 SHA256_DIGEST_LENGTH);
195 return 0;
196 }
197
partial_hash_sha384(uint8_t * data_in,uint8_t * data_out)198 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
199 {
200 SHA512_CTX ctx;
201
202 if (!SHA384_Init(&ctx))
203 return -EFAULT;
204 SHA512_Transform(&ctx, data_in);
205 rte_memcpy(data_out, &ctx,
206 SHA512_DIGEST_LENGTH);
207 return 0;
208 }
209
partial_hash_sha512(uint8_t * data_in,uint8_t * data_out)210 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
211 {
212 SHA512_CTX ctx;
213
214 if (!SHA512_Init(&ctx))
215 return -EFAULT;
216 SHA512_Transform(&ctx, data_in);
217 rte_memcpy(data_out, &ctx,
218 SHA512_DIGEST_LENGTH);
219 return 0;
220 }
221
222 static void
keccakf(uint64_t s[25])223 keccakf(uint64_t s[25])
224 {
225 int i, j, round;
226 uint64_t t, bc[5];
227 #define KECCAK_ROUNDS 24
228
229 for (round = 0; round < KECCAK_ROUNDS; round++) {
230
231 /* Theta */
232 for (i = 0; i < 5; i++)
233 bc[i] = s[i] ^ s[i + 5] ^ s[i + 10] ^ s[i + 15] ^
234 s[i + 20];
235
236 for (i = 0; i < 5; i++) {
237 t = bc[(i + 4) % 5] ^ SHA3_ROTL64(bc[(i + 1) % 5], 1);
238 for (j = 0; j < 25; j += 5)
239 s[j + i] ^= t;
240 }
241
242 /* Rho Pi */
243 t = s[1];
244 for (i = 0; i < 24; i++) {
245 j = keccakf_piln[i];
246 bc[0] = s[j];
247 s[j] = SHA3_ROTL64(t, keccakf_rotc[i]);
248 t = bc[0];
249 }
250
251 /* Chi */
252 for (j = 0; j < 25; j += 5) {
253 for (i = 0; i < 5; i++)
254 bc[i] = s[j + i];
255 for (i = 0; i < 5; i++)
256 s[j + i] ^= (~bc[(i + 1) % 5]) &
257 bc[(i + 2) % 5];
258 }
259
260 /* Iota */
261 s[0] ^= keccakf_rndc[round];
262 }
263 }
264
265 static void
sha3_Init224(void * priv)266 sha3_Init224(void *priv)
267 {
268 sha3_context *ctx = (sha3_context *) priv;
269
270 memset(ctx, 0, sizeof(*ctx));
271 ctx->capacityWords = 2 * 224 / (8 * sizeof(uint64_t));
272 }
273
274 static void
sha3_Init256(void * priv)275 sha3_Init256(void *priv)
276 {
277 sha3_context *ctx = (sha3_context *) priv;
278
279 memset(ctx, 0, sizeof(*ctx));
280 ctx->capacityWords = 2 * 256 / (8 * sizeof(uint64_t));
281 }
282
283 static void
sha3_Init384(void * priv)284 sha3_Init384(void *priv)
285 {
286 sha3_context *ctx = (sha3_context *) priv;
287
288 memset(ctx, 0, sizeof(*ctx));
289 ctx->capacityWords = 2 * 384 / (8 * sizeof(uint64_t));
290 }
291
292 static void
sha3_Init512(void * priv)293 sha3_Init512(void *priv)
294 {
295 sha3_context *ctx = (sha3_context *) priv;
296
297 memset(ctx, 0, sizeof(*ctx));
298 ctx->capacityWords = 2 * 512 / (8 * sizeof(uint64_t));
299 }
300
301
302 /* This is simply the 'update' with the padding block.
303 * The padding block is 0x01 || 0x00* || 0x80. First 0x01 and last 0x80
304 * bytes are always present, but they can be the same byte.
305 */
306 static void
sha3_Update(void * priv,void const * bufIn,size_t len)307 sha3_Update(void *priv, void const *bufIn, size_t len)
308 {
309 sha3_context *ctx = (sha3_context *) priv;
310 unsigned int old_tail = (8 - ctx->byteIndex) & 7;
311 size_t words;
312 unsigned int tail;
313 size_t i;
314 const uint8_t *buf = bufIn;
315
316 if (len < old_tail) {
317 while (len--)
318 ctx->saved |= (uint64_t) (*(buf++)) <<
319 ((ctx->byteIndex++) * 8);
320 return;
321 }
322
323 if (old_tail) {
324 len -= old_tail;
325 while (old_tail--)
326 ctx->saved |= (uint64_t) (*(buf++)) <<
327 ((ctx->byteIndex++) * 8);
328
329 ctx->s[ctx->wordIndex] ^= ctx->saved;
330 ctx->byteIndex = 0;
331 ctx->saved = 0;
332 if (++ctx->wordIndex ==
333 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
334 keccakf(ctx->s);
335 ctx->wordIndex = 0;
336 }
337 }
338
339 words = len / sizeof(uint64_t);
340 tail = len - words * sizeof(uint64_t);
341
342 for (i = 0; i < words; i++, buf += sizeof(uint64_t)) {
343 const uint64_t t = (uint64_t) (buf[0]) |
344 ((uint64_t) (buf[1]) << 8 * 1) |
345 ((uint64_t) (buf[2]) << 8 * 2) |
346 ((uint64_t) (buf[3]) << 8 * 3) |
347 ((uint64_t) (buf[4]) << 8 * 4) |
348 ((uint64_t) (buf[5]) << 8 * 5) |
349 ((uint64_t) (buf[6]) << 8 * 6) |
350 ((uint64_t) (buf[7]) << 8 * 7);
351 ctx->s[ctx->wordIndex] ^= t;
352 if (++ctx->wordIndex ==
353 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
354 keccakf(ctx->s);
355 ctx->wordIndex = 0;
356 }
357 }
358
359 while (tail--)
360 ctx->saved |= (uint64_t) (*(buf++)) << ((ctx->byteIndex++) * 8);
361 }
362
partial_hash_sha3_224(uint8_t * data_in,uint8_t * data_out)363 int partial_hash_sha3_224(uint8_t *data_in, uint8_t *data_out)
364 {
365 sha3_context *ctx;
366 int i;
367
368 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
369 if (!ctx) {
370 CCP_LOG_ERR("sha3-ctx creation failed");
371 return -ENOMEM;
372 }
373 sha3_Init224(ctx);
374 sha3_Update(ctx, data_in, SHA3_224_BLOCK_SIZE);
375 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
376 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
377 rte_free(ctx);
378
379 return 0;
380 }
381
partial_hash_sha3_256(uint8_t * data_in,uint8_t * data_out)382 int partial_hash_sha3_256(uint8_t *data_in, uint8_t *data_out)
383 {
384 sha3_context *ctx;
385 int i;
386
387 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
388 if (!ctx) {
389 CCP_LOG_ERR("sha3-ctx creation failed");
390 return -ENOMEM;
391 }
392 sha3_Init256(ctx);
393 sha3_Update(ctx, data_in, SHA3_256_BLOCK_SIZE);
394 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
395 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
396 rte_free(ctx);
397
398 return 0;
399 }
400
partial_hash_sha3_384(uint8_t * data_in,uint8_t * data_out)401 int partial_hash_sha3_384(uint8_t *data_in, uint8_t *data_out)
402 {
403 sha3_context *ctx;
404 int i;
405
406 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
407 if (!ctx) {
408 CCP_LOG_ERR("sha3-ctx creation failed");
409 return -ENOMEM;
410 }
411 sha3_Init384(ctx);
412 sha3_Update(ctx, data_in, SHA3_384_BLOCK_SIZE);
413 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
414 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
415 rte_free(ctx);
416
417 return 0;
418 }
419
partial_hash_sha3_512(uint8_t * data_in,uint8_t * data_out)420 int partial_hash_sha3_512(uint8_t *data_in, uint8_t *data_out)
421 {
422 sha3_context *ctx;
423 int i;
424
425 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
426 if (!ctx) {
427 CCP_LOG_ERR("sha3-ctx creation failed");
428 return -ENOMEM;
429 }
430 sha3_Init512(ctx);
431 sha3_Update(ctx, data_in, SHA3_512_BLOCK_SIZE);
432 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
433 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
434 rte_free(ctx);
435
436 return 0;
437 }
438
generate_partial_hash(struct ccp_session * sess)439 static int generate_partial_hash(struct ccp_session *sess)
440 {
441
442 uint8_t ipad[sess->auth.block_size];
443 uint8_t opad[sess->auth.block_size];
444 uint8_t *ipad_t, *opad_t;
445 uint32_t *hash_value_be32, hash_temp32[8];
446 uint64_t *hash_value_be64, hash_temp64[8];
447 int i, count;
448 uint8_t *hash_value_sha3;
449
450 opad_t = ipad_t = (uint8_t *)sess->auth.key;
451
452 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute);
453 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute);
454
455 /* considering key size is always equal to block size of algorithm */
456 for (i = 0; i < sess->auth.block_size; i++) {
457 ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE);
458 opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE);
459 }
460
461 switch (sess->auth.algo) {
462 case CCP_AUTH_ALGO_SHA1_HMAC:
463 count = SHA1_DIGEST_SIZE >> 2;
464
465 if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32))
466 return -1;
467 for (i = 0; i < count; i++, hash_value_be32++)
468 *hash_value_be32 = hash_temp32[count - 1 - i];
469
470 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
471 + sess->auth.ctx_len);
472 if (partial_hash_sha1(opad, (uint8_t *)hash_temp32))
473 return -1;
474 for (i = 0; i < count; i++, hash_value_be32++)
475 *hash_value_be32 = hash_temp32[count - 1 - i];
476 return 0;
477 case CCP_AUTH_ALGO_SHA224_HMAC:
478 count = SHA256_DIGEST_SIZE >> 2;
479
480 if (partial_hash_sha224(ipad, (uint8_t *)hash_temp32))
481 return -1;
482 for (i = 0; i < count; i++, hash_value_be32++)
483 *hash_value_be32 = hash_temp32[count - 1 - i];
484
485 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
486 + sess->auth.ctx_len);
487 if (partial_hash_sha224(opad, (uint8_t *)hash_temp32))
488 return -1;
489 for (i = 0; i < count; i++, hash_value_be32++)
490 *hash_value_be32 = hash_temp32[count - 1 - i];
491 return 0;
492 case CCP_AUTH_ALGO_SHA3_224_HMAC:
493 hash_value_sha3 = sess->auth.pre_compute;
494 if (partial_hash_sha3_224(ipad, hash_value_sha3))
495 return -1;
496
497 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
498 + sess->auth.ctx_len);
499 if (partial_hash_sha3_224(opad, hash_value_sha3))
500 return -1;
501 return 0;
502 case CCP_AUTH_ALGO_SHA256_HMAC:
503 count = SHA256_DIGEST_SIZE >> 2;
504
505 if (partial_hash_sha256(ipad, (uint8_t *)hash_temp32))
506 return -1;
507 for (i = 0; i < count; i++, hash_value_be32++)
508 *hash_value_be32 = hash_temp32[count - 1 - i];
509
510 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
511 + sess->auth.ctx_len);
512 if (partial_hash_sha256(opad, (uint8_t *)hash_temp32))
513 return -1;
514 for (i = 0; i < count; i++, hash_value_be32++)
515 *hash_value_be32 = hash_temp32[count - 1 - i];
516 return 0;
517 case CCP_AUTH_ALGO_SHA3_256_HMAC:
518 hash_value_sha3 = sess->auth.pre_compute;
519 if (partial_hash_sha3_256(ipad, hash_value_sha3))
520 return -1;
521
522 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
523 + sess->auth.ctx_len);
524 if (partial_hash_sha3_256(opad, hash_value_sha3))
525 return -1;
526 return 0;
527 case CCP_AUTH_ALGO_SHA384_HMAC:
528 count = SHA512_DIGEST_SIZE >> 3;
529
530 if (partial_hash_sha384(ipad, (uint8_t *)hash_temp64))
531 return -1;
532 for (i = 0; i < count; i++, hash_value_be64++)
533 *hash_value_be64 = hash_temp64[count - 1 - i];
534
535 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
536 + sess->auth.ctx_len);
537 if (partial_hash_sha384(opad, (uint8_t *)hash_temp64))
538 return -1;
539 for (i = 0; i < count; i++, hash_value_be64++)
540 *hash_value_be64 = hash_temp64[count - 1 - i];
541 return 0;
542 case CCP_AUTH_ALGO_SHA3_384_HMAC:
543 hash_value_sha3 = sess->auth.pre_compute;
544 if (partial_hash_sha3_384(ipad, hash_value_sha3))
545 return -1;
546
547 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
548 + sess->auth.ctx_len);
549 if (partial_hash_sha3_384(opad, hash_value_sha3))
550 return -1;
551 return 0;
552 case CCP_AUTH_ALGO_SHA512_HMAC:
553 count = SHA512_DIGEST_SIZE >> 3;
554
555 if (partial_hash_sha512(ipad, (uint8_t *)hash_temp64))
556 return -1;
557 for (i = 0; i < count; i++, hash_value_be64++)
558 *hash_value_be64 = hash_temp64[count - 1 - i];
559
560 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
561 + sess->auth.ctx_len);
562 if (partial_hash_sha512(opad, (uint8_t *)hash_temp64))
563 return -1;
564 for (i = 0; i < count; i++, hash_value_be64++)
565 *hash_value_be64 = hash_temp64[count - 1 - i];
566 return 0;
567 case CCP_AUTH_ALGO_SHA3_512_HMAC:
568 hash_value_sha3 = sess->auth.pre_compute;
569 if (partial_hash_sha3_512(ipad, hash_value_sha3))
570 return -1;
571
572 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
573 + sess->auth.ctx_len);
574 if (partial_hash_sha3_512(opad, hash_value_sha3))
575 return -1;
576 return 0;
577 default:
578 CCP_LOG_ERR("Invalid auth algo");
579 return -1;
580 }
581 }
582
583 /* prepare temporary keys K1 and K2 */
prepare_key(unsigned char * k,unsigned char * l,int bl)584 static void prepare_key(unsigned char *k, unsigned char *l, int bl)
585 {
586 int i;
587 /* Shift block to left, including carry */
588 for (i = 0; i < bl; i++) {
589 k[i] = l[i] << 1;
590 if (i < bl - 1 && l[i + 1] & 0x80)
591 k[i] |= 1;
592 }
593 /* If MSB set fixup with R */
594 if (l[0] & 0x80)
595 k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b;
596 }
597
598 /* subkeys K1 and K2 generation for CMAC */
599 static int
generate_cmac_subkeys(struct ccp_session * sess)600 generate_cmac_subkeys(struct ccp_session *sess)
601 {
602 const EVP_CIPHER *algo;
603 EVP_CIPHER_CTX *ctx;
604 unsigned char *ccp_ctx;
605 size_t i;
606 int dstlen, totlen;
607 unsigned char zero_iv[AES_BLOCK_SIZE] = {0};
608 unsigned char dst[2 * AES_BLOCK_SIZE] = {0};
609 unsigned char k1[AES_BLOCK_SIZE] = {0};
610 unsigned char k2[AES_BLOCK_SIZE] = {0};
611
612 if (sess->auth.ut.aes_type == CCP_AES_TYPE_128)
613 algo = EVP_aes_128_cbc();
614 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192)
615 algo = EVP_aes_192_cbc();
616 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256)
617 algo = EVP_aes_256_cbc();
618 else {
619 CCP_LOG_ERR("Invalid CMAC type length");
620 return -1;
621 }
622
623 ctx = EVP_CIPHER_CTX_new();
624 if (!ctx) {
625 CCP_LOG_ERR("ctx creation failed");
626 return -1;
627 }
628 if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key,
629 (unsigned char *)zero_iv) <= 0)
630 goto key_generate_err;
631 if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0)
632 goto key_generate_err;
633 if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv,
634 AES_BLOCK_SIZE) <= 0)
635 goto key_generate_err;
636 if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0)
637 goto key_generate_err;
638
639 memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2);
640
641 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1);
642 prepare_key(k1, dst, AES_BLOCK_SIZE);
643 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
644 *ccp_ctx = k1[i];
645
646 ccp_ctx = (unsigned char *)(sess->auth.pre_compute +
647 (2 * CCP_SB_BYTES) - 1);
648 prepare_key(k2, k1, AES_BLOCK_SIZE);
649 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
650 *ccp_ctx = k2[i];
651
652 EVP_CIPHER_CTX_free(ctx);
653
654 return 0;
655
656 key_generate_err:
657 CCP_LOG_ERR("CMAC Init failed");
658 return -1;
659 }
660
661 /* configure session */
662 static int
ccp_configure_session_cipher(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform)663 ccp_configure_session_cipher(struct ccp_session *sess,
664 const struct rte_crypto_sym_xform *xform)
665 {
666 const struct rte_crypto_cipher_xform *cipher_xform = NULL;
667 size_t i, j, x;
668
669 cipher_xform = &xform->cipher;
670
671 /* set cipher direction */
672 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
673 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
674 else
675 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
676
677 /* set cipher key */
678 sess->cipher.key_length = cipher_xform->key.length;
679 rte_memcpy(sess->cipher.key, cipher_xform->key.data,
680 cipher_xform->key.length);
681
682 /* set iv parameters */
683 sess->iv.offset = cipher_xform->iv.offset;
684 sess->iv.length = cipher_xform->iv.length;
685
686 switch (cipher_xform->algo) {
687 case RTE_CRYPTO_CIPHER_AES_CTR:
688 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR;
689 sess->cipher.um.aes_mode = CCP_AES_MODE_CTR;
690 sess->cipher.engine = CCP_ENGINE_AES;
691 break;
692 case RTE_CRYPTO_CIPHER_AES_ECB:
693 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
694 sess->cipher.um.aes_mode = CCP_AES_MODE_ECB;
695 sess->cipher.engine = CCP_ENGINE_AES;
696 break;
697 case RTE_CRYPTO_CIPHER_AES_CBC:
698 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
699 sess->cipher.um.aes_mode = CCP_AES_MODE_CBC;
700 sess->cipher.engine = CCP_ENGINE_AES;
701 break;
702 case RTE_CRYPTO_CIPHER_3DES_CBC:
703 sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC;
704 sess->cipher.um.des_mode = CCP_DES_MODE_CBC;
705 sess->cipher.engine = CCP_ENGINE_3DES;
706 break;
707 default:
708 CCP_LOG_ERR("Unsupported cipher algo");
709 return -1;
710 }
711
712
713 switch (sess->cipher.engine) {
714 case CCP_ENGINE_AES:
715 if (sess->cipher.key_length == 16)
716 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
717 else if (sess->cipher.key_length == 24)
718 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
719 else if (sess->cipher.key_length == 32)
720 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
721 else {
722 CCP_LOG_ERR("Invalid cipher key length");
723 return -1;
724 }
725 for (i = 0; i < sess->cipher.key_length ; i++)
726 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
727 sess->cipher.key[i];
728 break;
729 case CCP_ENGINE_3DES:
730 if (sess->cipher.key_length == 16)
731 sess->cipher.ut.des_type = CCP_DES_TYPE_128;
732 else if (sess->cipher.key_length == 24)
733 sess->cipher.ut.des_type = CCP_DES_TYPE_192;
734 else {
735 CCP_LOG_ERR("Invalid cipher key length");
736 return -1;
737 }
738 for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8)
739 for (i = 0; i < 8; i++)
740 sess->cipher.key_ccp[(8 + x) - i - 1] =
741 sess->cipher.key[i + x];
742 break;
743 default:
744 CCP_LOG_ERR("Invalid CCP Engine");
745 return -ENOTSUP;
746 }
747 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
748 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
749 return 0;
750 }
751
752 static int
ccp_configure_session_auth(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform)753 ccp_configure_session_auth(struct ccp_session *sess,
754 const struct rte_crypto_sym_xform *xform)
755 {
756 const struct rte_crypto_auth_xform *auth_xform = NULL;
757 size_t i;
758
759 auth_xform = &xform->auth;
760
761 sess->auth.digest_length = auth_xform->digest_length;
762 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE)
763 sess->auth.op = CCP_AUTH_OP_GENERATE;
764 else
765 sess->auth.op = CCP_AUTH_OP_VERIFY;
766 switch (auth_xform->algo) {
767 case RTE_CRYPTO_AUTH_MD5_HMAC:
768 if (sess->auth_opt) {
769 sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC;
770 sess->auth.offset = ((CCP_SB_BYTES << 1) -
771 MD5_DIGEST_SIZE);
772 sess->auth.key_length = auth_xform->key.length;
773 sess->auth.block_size = MD5_BLOCK_SIZE;
774 memset(sess->auth.key, 0, sess->auth.block_size);
775 rte_memcpy(sess->auth.key, auth_xform->key.data,
776 auth_xform->key.length);
777 } else
778 return -1; /* HMAC MD5 not supported on CCP */
779 break;
780 case RTE_CRYPTO_AUTH_SHA1:
781 sess->auth.engine = CCP_ENGINE_SHA;
782 sess->auth.algo = CCP_AUTH_ALGO_SHA1;
783 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
784 sess->auth.ctx = (void *)ccp_sha1_init;
785 sess->auth.ctx_len = CCP_SB_BYTES;
786 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
787 break;
788 case RTE_CRYPTO_AUTH_SHA1_HMAC:
789 if (sess->auth_opt) {
790 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
791 return -1;
792 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
793 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
794 sess->auth.block_size = SHA1_BLOCK_SIZE;
795 sess->auth.key_length = auth_xform->key.length;
796 memset(sess->auth.key, 0, sess->auth.block_size);
797 rte_memcpy(sess->auth.key, auth_xform->key.data,
798 auth_xform->key.length);
799 } else {
800 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
801 return -1;
802 sess->auth.engine = CCP_ENGINE_SHA;
803 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
804 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
805 sess->auth.ctx_len = CCP_SB_BYTES;
806 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
807 sess->auth.block_size = SHA1_BLOCK_SIZE;
808 sess->auth.key_length = auth_xform->key.length;
809 memset(sess->auth.key, 0, sess->auth.block_size);
810 memset(sess->auth.pre_compute, 0,
811 sess->auth.ctx_len << 1);
812 rte_memcpy(sess->auth.key, auth_xform->key.data,
813 auth_xform->key.length);
814 if (generate_partial_hash(sess))
815 return -1;
816 }
817 break;
818 case RTE_CRYPTO_AUTH_SHA224:
819 sess->auth.algo = CCP_AUTH_ALGO_SHA224;
820 sess->auth.engine = CCP_ENGINE_SHA;
821 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
822 sess->auth.ctx = (void *)ccp_sha224_init;
823 sess->auth.ctx_len = CCP_SB_BYTES;
824 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
825 break;
826 case RTE_CRYPTO_AUTH_SHA224_HMAC:
827 if (sess->auth_opt) {
828 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
829 return -1;
830 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
831 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
832 sess->auth.block_size = SHA224_BLOCK_SIZE;
833 sess->auth.key_length = auth_xform->key.length;
834 memset(sess->auth.key, 0, sess->auth.block_size);
835 rte_memcpy(sess->auth.key, auth_xform->key.data,
836 auth_xform->key.length);
837 } else {
838 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
839 return -1;
840 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
841 sess->auth.engine = CCP_ENGINE_SHA;
842 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
843 sess->auth.ctx_len = CCP_SB_BYTES;
844 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
845 sess->auth.block_size = SHA224_BLOCK_SIZE;
846 sess->auth.key_length = auth_xform->key.length;
847 memset(sess->auth.key, 0, sess->auth.block_size);
848 memset(sess->auth.pre_compute, 0,
849 sess->auth.ctx_len << 1);
850 rte_memcpy(sess->auth.key, auth_xform->key.data,
851 auth_xform->key.length);
852 if (generate_partial_hash(sess))
853 return -1;
854 }
855 break;
856 case RTE_CRYPTO_AUTH_SHA3_224:
857 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224;
858 sess->auth.engine = CCP_ENGINE_SHA;
859 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
860 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
861 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
862 break;
863 case RTE_CRYPTO_AUTH_SHA3_224_HMAC:
864 if (auth_xform->key.length > SHA3_224_BLOCK_SIZE)
865 return -1;
866 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224_HMAC;
867 sess->auth.engine = CCP_ENGINE_SHA;
868 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
869 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
870 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
871 sess->auth.block_size = SHA3_224_BLOCK_SIZE;
872 sess->auth.key_length = auth_xform->key.length;
873 memset(sess->auth.key, 0, sess->auth.block_size);
874 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
875 rte_memcpy(sess->auth.key, auth_xform->key.data,
876 auth_xform->key.length);
877 if (generate_partial_hash(sess))
878 return -1;
879 break;
880 case RTE_CRYPTO_AUTH_SHA256:
881 sess->auth.algo = CCP_AUTH_ALGO_SHA256;
882 sess->auth.engine = CCP_ENGINE_SHA;
883 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
884 sess->auth.ctx = (void *)ccp_sha256_init;
885 sess->auth.ctx_len = CCP_SB_BYTES;
886 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
887 break;
888 case RTE_CRYPTO_AUTH_SHA256_HMAC:
889 if (sess->auth_opt) {
890 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
891 return -1;
892 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
893 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
894 sess->auth.block_size = SHA256_BLOCK_SIZE;
895 sess->auth.key_length = auth_xform->key.length;
896 memset(sess->auth.key, 0, sess->auth.block_size);
897 rte_memcpy(sess->auth.key, auth_xform->key.data,
898 auth_xform->key.length);
899 } else {
900 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
901 return -1;
902 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
903 sess->auth.engine = CCP_ENGINE_SHA;
904 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
905 sess->auth.ctx_len = CCP_SB_BYTES;
906 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
907 sess->auth.block_size = SHA256_BLOCK_SIZE;
908 sess->auth.key_length = auth_xform->key.length;
909 memset(sess->auth.key, 0, sess->auth.block_size);
910 memset(sess->auth.pre_compute, 0,
911 sess->auth.ctx_len << 1);
912 rte_memcpy(sess->auth.key, auth_xform->key.data,
913 auth_xform->key.length);
914 if (generate_partial_hash(sess))
915 return -1;
916 }
917 break;
918 case RTE_CRYPTO_AUTH_SHA3_256:
919 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256;
920 sess->auth.engine = CCP_ENGINE_SHA;
921 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
922 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
923 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
924 break;
925 case RTE_CRYPTO_AUTH_SHA3_256_HMAC:
926 if (auth_xform->key.length > SHA3_256_BLOCK_SIZE)
927 return -1;
928 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256_HMAC;
929 sess->auth.engine = CCP_ENGINE_SHA;
930 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
931 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
932 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
933 sess->auth.block_size = SHA3_256_BLOCK_SIZE;
934 sess->auth.key_length = auth_xform->key.length;
935 memset(sess->auth.key, 0, sess->auth.block_size);
936 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
937 rte_memcpy(sess->auth.key, auth_xform->key.data,
938 auth_xform->key.length);
939 if (generate_partial_hash(sess))
940 return -1;
941 break;
942 case RTE_CRYPTO_AUTH_SHA384:
943 sess->auth.algo = CCP_AUTH_ALGO_SHA384;
944 sess->auth.engine = CCP_ENGINE_SHA;
945 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
946 sess->auth.ctx = (void *)ccp_sha384_init;
947 sess->auth.ctx_len = CCP_SB_BYTES << 1;
948 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
949 break;
950 case RTE_CRYPTO_AUTH_SHA384_HMAC:
951 if (sess->auth_opt) {
952 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
953 return -1;
954 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
955 sess->auth.offset = ((CCP_SB_BYTES << 1) -
956 SHA384_DIGEST_SIZE);
957 sess->auth.block_size = SHA384_BLOCK_SIZE;
958 sess->auth.key_length = auth_xform->key.length;
959 memset(sess->auth.key, 0, sess->auth.block_size);
960 rte_memcpy(sess->auth.key, auth_xform->key.data,
961 auth_xform->key.length);
962 } else {
963 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
964 return -1;
965 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
966 sess->auth.engine = CCP_ENGINE_SHA;
967 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
968 sess->auth.ctx_len = CCP_SB_BYTES << 1;
969 sess->auth.offset = ((CCP_SB_BYTES << 1) -
970 SHA384_DIGEST_SIZE);
971 sess->auth.block_size = SHA384_BLOCK_SIZE;
972 sess->auth.key_length = auth_xform->key.length;
973 memset(sess->auth.key, 0, sess->auth.block_size);
974 memset(sess->auth.pre_compute, 0,
975 sess->auth.ctx_len << 1);
976 rte_memcpy(sess->auth.key, auth_xform->key.data,
977 auth_xform->key.length);
978 if (generate_partial_hash(sess))
979 return -1;
980 }
981 break;
982 case RTE_CRYPTO_AUTH_SHA3_384:
983 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384;
984 sess->auth.engine = CCP_ENGINE_SHA;
985 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
986 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
987 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
988 break;
989 case RTE_CRYPTO_AUTH_SHA3_384_HMAC:
990 if (auth_xform->key.length > SHA3_384_BLOCK_SIZE)
991 return -1;
992 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384_HMAC;
993 sess->auth.engine = CCP_ENGINE_SHA;
994 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
995 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
996 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
997 sess->auth.block_size = SHA3_384_BLOCK_SIZE;
998 sess->auth.key_length = auth_xform->key.length;
999 memset(sess->auth.key, 0, sess->auth.block_size);
1000 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1001 rte_memcpy(sess->auth.key, auth_xform->key.data,
1002 auth_xform->key.length);
1003 if (generate_partial_hash(sess))
1004 return -1;
1005 break;
1006 case RTE_CRYPTO_AUTH_SHA512:
1007 sess->auth.algo = CCP_AUTH_ALGO_SHA512;
1008 sess->auth.engine = CCP_ENGINE_SHA;
1009 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1010 sess->auth.ctx = (void *)ccp_sha512_init;
1011 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1012 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
1013 break;
1014 case RTE_CRYPTO_AUTH_SHA512_HMAC:
1015 if (sess->auth_opt) {
1016 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1017 return -1;
1018 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1019 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1020 SHA512_DIGEST_SIZE);
1021 sess->auth.block_size = SHA512_BLOCK_SIZE;
1022 sess->auth.key_length = auth_xform->key.length;
1023 memset(sess->auth.key, 0, sess->auth.block_size);
1024 rte_memcpy(sess->auth.key, auth_xform->key.data,
1025 auth_xform->key.length);
1026 } else {
1027 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1028 return -1;
1029 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1030 sess->auth.engine = CCP_ENGINE_SHA;
1031 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1032 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1033 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1034 SHA512_DIGEST_SIZE);
1035 sess->auth.block_size = SHA512_BLOCK_SIZE;
1036 sess->auth.key_length = auth_xform->key.length;
1037 memset(sess->auth.key, 0, sess->auth.block_size);
1038 memset(sess->auth.pre_compute, 0,
1039 sess->auth.ctx_len << 1);
1040 rte_memcpy(sess->auth.key, auth_xform->key.data,
1041 auth_xform->key.length);
1042 if (generate_partial_hash(sess))
1043 return -1;
1044 }
1045 break;
1046 case RTE_CRYPTO_AUTH_SHA3_512:
1047 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512;
1048 sess->auth.engine = CCP_ENGINE_SHA;
1049 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1050 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1051 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1052 break;
1053 case RTE_CRYPTO_AUTH_SHA3_512_HMAC:
1054 if (auth_xform->key.length > SHA3_512_BLOCK_SIZE)
1055 return -1;
1056 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512_HMAC;
1057 sess->auth.engine = CCP_ENGINE_SHA;
1058 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1059 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1060 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1061 sess->auth.block_size = SHA3_512_BLOCK_SIZE;
1062 sess->auth.key_length = auth_xform->key.length;
1063 memset(sess->auth.key, 0, sess->auth.block_size);
1064 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1065 rte_memcpy(sess->auth.key, auth_xform->key.data,
1066 auth_xform->key.length);
1067 if (generate_partial_hash(sess))
1068 return -1;
1069 break;
1070 case RTE_CRYPTO_AUTH_AES_CMAC:
1071 sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC;
1072 sess->auth.engine = CCP_ENGINE_AES;
1073 sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
1074 sess->auth.key_length = auth_xform->key.length;
1075 /* padding and hash result */
1076 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1077 sess->auth.offset = AES_BLOCK_SIZE;
1078 sess->auth.block_size = AES_BLOCK_SIZE;
1079 if (sess->auth.key_length == 16)
1080 sess->auth.ut.aes_type = CCP_AES_TYPE_128;
1081 else if (sess->auth.key_length == 24)
1082 sess->auth.ut.aes_type = CCP_AES_TYPE_192;
1083 else if (sess->auth.key_length == 32)
1084 sess->auth.ut.aes_type = CCP_AES_TYPE_256;
1085 else {
1086 CCP_LOG_ERR("Invalid CMAC key length");
1087 return -1;
1088 }
1089 rte_memcpy(sess->auth.key, auth_xform->key.data,
1090 sess->auth.key_length);
1091 for (i = 0; i < sess->auth.key_length; i++)
1092 sess->auth.key_ccp[sess->auth.key_length - i - 1] =
1093 sess->auth.key[i];
1094 if (generate_cmac_subkeys(sess))
1095 return -1;
1096 break;
1097 default:
1098 CCP_LOG_ERR("Unsupported hash algo");
1099 return -ENOTSUP;
1100 }
1101 return 0;
1102 }
1103
1104 static int
ccp_configure_session_aead(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform)1105 ccp_configure_session_aead(struct ccp_session *sess,
1106 const struct rte_crypto_sym_xform *xform)
1107 {
1108 const struct rte_crypto_aead_xform *aead_xform = NULL;
1109 size_t i;
1110
1111 aead_xform = &xform->aead;
1112
1113 sess->cipher.key_length = aead_xform->key.length;
1114 rte_memcpy(sess->cipher.key, aead_xform->key.data,
1115 aead_xform->key.length);
1116
1117 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
1118 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
1119 sess->auth.op = CCP_AUTH_OP_GENERATE;
1120 } else {
1121 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
1122 sess->auth.op = CCP_AUTH_OP_VERIFY;
1123 }
1124 sess->aead_algo = aead_xform->algo;
1125 sess->auth.aad_length = aead_xform->aad_length;
1126 sess->auth.digest_length = aead_xform->digest_length;
1127
1128 /* set iv parameters */
1129 sess->iv.offset = aead_xform->iv.offset;
1130 sess->iv.length = aead_xform->iv.length;
1131
1132 switch (aead_xform->algo) {
1133 case RTE_CRYPTO_AEAD_AES_GCM:
1134 sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM;
1135 sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR;
1136 sess->cipher.engine = CCP_ENGINE_AES;
1137 if (sess->cipher.key_length == 16)
1138 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
1139 else if (sess->cipher.key_length == 24)
1140 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
1141 else if (sess->cipher.key_length == 32)
1142 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
1143 else {
1144 CCP_LOG_ERR("Invalid aead key length");
1145 return -1;
1146 }
1147 for (i = 0; i < sess->cipher.key_length; i++)
1148 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
1149 sess->cipher.key[i];
1150 sess->auth.algo = CCP_AUTH_ALGO_AES_GCM;
1151 sess->auth.engine = CCP_ENGINE_AES;
1152 sess->auth.um.aes_mode = CCP_AES_MODE_GHASH;
1153 sess->auth.ctx_len = CCP_SB_BYTES;
1154 sess->auth.offset = 0;
1155 sess->auth.block_size = AES_BLOCK_SIZE;
1156 sess->cmd_id = CCP_CMD_COMBINED;
1157 break;
1158 default:
1159 CCP_LOG_ERR("Unsupported aead algo");
1160 return -ENOTSUP;
1161 }
1162 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
1163 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
1164 return 0;
1165 }
1166
1167 int
ccp_set_session_parameters(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform,struct ccp_private * internals)1168 ccp_set_session_parameters(struct ccp_session *sess,
1169 const struct rte_crypto_sym_xform *xform,
1170 struct ccp_private *internals)
1171 {
1172 const struct rte_crypto_sym_xform *cipher_xform = NULL;
1173 const struct rte_crypto_sym_xform *auth_xform = NULL;
1174 const struct rte_crypto_sym_xform *aead_xform = NULL;
1175 int ret = 0;
1176
1177 sess->auth_opt = internals->auth_opt;
1178 sess->cmd_id = ccp_get_cmd_id(xform);
1179
1180 switch (sess->cmd_id) {
1181 case CCP_CMD_CIPHER:
1182 cipher_xform = xform;
1183 break;
1184 case CCP_CMD_AUTH:
1185 auth_xform = xform;
1186 break;
1187 case CCP_CMD_CIPHER_HASH:
1188 cipher_xform = xform;
1189 auth_xform = xform->next;
1190 break;
1191 case CCP_CMD_HASH_CIPHER:
1192 auth_xform = xform;
1193 cipher_xform = xform->next;
1194 break;
1195 case CCP_CMD_COMBINED:
1196 aead_xform = xform;
1197 break;
1198 default:
1199 CCP_LOG_ERR("Unsupported cmd_id");
1200 return -1;
1201 }
1202
1203 /* Default IV length = 0 */
1204 sess->iv.length = 0;
1205 if (cipher_xform) {
1206 ret = ccp_configure_session_cipher(sess, cipher_xform);
1207 if (ret != 0) {
1208 CCP_LOG_ERR("Invalid/unsupported cipher parameters");
1209 return ret;
1210 }
1211 }
1212 if (auth_xform) {
1213 ret = ccp_configure_session_auth(sess, auth_xform);
1214 if (ret != 0) {
1215 CCP_LOG_ERR("Invalid/unsupported auth parameters");
1216 return ret;
1217 }
1218 }
1219 if (aead_xform) {
1220 ret = ccp_configure_session_aead(sess, aead_xform);
1221 if (ret != 0) {
1222 CCP_LOG_ERR("Invalid/unsupported aead parameters");
1223 return ret;
1224 }
1225 }
1226 return ret;
1227 }
1228
1229 /* calculate CCP descriptors requirement */
1230 static inline int
ccp_cipher_slot(struct ccp_session * session)1231 ccp_cipher_slot(struct ccp_session *session)
1232 {
1233 int count = 0;
1234
1235 switch (session->cipher.algo) {
1236 case CCP_CIPHER_ALGO_AES_CBC:
1237 count = 2;
1238 /**< op + passthrough for iv */
1239 break;
1240 case CCP_CIPHER_ALGO_AES_ECB:
1241 count = 1;
1242 /**<only op*/
1243 break;
1244 case CCP_CIPHER_ALGO_AES_CTR:
1245 count = 2;
1246 /**< op + passthrough for iv */
1247 break;
1248 case CCP_CIPHER_ALGO_3DES_CBC:
1249 count = 2;
1250 /**< op + passthrough for iv */
1251 break;
1252 default:
1253 CCP_LOG_ERR("Unsupported cipher algo %d",
1254 session->cipher.algo);
1255 }
1256 return count;
1257 }
1258
1259 static inline int
ccp_auth_slot(struct ccp_session * session)1260 ccp_auth_slot(struct ccp_session *session)
1261 {
1262 int count = 0;
1263
1264 switch (session->auth.algo) {
1265 case CCP_AUTH_ALGO_SHA1:
1266 case CCP_AUTH_ALGO_SHA224:
1267 case CCP_AUTH_ALGO_SHA256:
1268 case CCP_AUTH_ALGO_SHA384:
1269 case CCP_AUTH_ALGO_SHA512:
1270 count = 3;
1271 /**< op + lsb passthrough cpy to/from*/
1272 break;
1273 case CCP_AUTH_ALGO_MD5_HMAC:
1274 break;
1275 case CCP_AUTH_ALGO_SHA1_HMAC:
1276 case CCP_AUTH_ALGO_SHA224_HMAC:
1277 case CCP_AUTH_ALGO_SHA256_HMAC:
1278 if (session->auth_opt == 0)
1279 count = 6;
1280 break;
1281 case CCP_AUTH_ALGO_SHA384_HMAC:
1282 case CCP_AUTH_ALGO_SHA512_HMAC:
1283 /**
1284 * 1. Load PHash1 = H(k ^ ipad); to LSB
1285 * 2. generate IHash = H(hash on meassage with PHash1
1286 * as init values);
1287 * 3. Retrieve IHash 2 slots for 384/512
1288 * 4. Load Phash2 = H(k ^ opad); to LSB
1289 * 5. generate FHash = H(hash on Ihash with Phash2
1290 * as init value);
1291 * 6. Retrieve HMAC output from LSB to host memory
1292 */
1293 if (session->auth_opt == 0)
1294 count = 7;
1295 break;
1296 case CCP_AUTH_ALGO_SHA3_224:
1297 case CCP_AUTH_ALGO_SHA3_256:
1298 case CCP_AUTH_ALGO_SHA3_384:
1299 case CCP_AUTH_ALGO_SHA3_512:
1300 count = 1;
1301 /**< only op ctx and dst in host memory*/
1302 break;
1303 case CCP_AUTH_ALGO_SHA3_224_HMAC:
1304 case CCP_AUTH_ALGO_SHA3_256_HMAC:
1305 count = 3;
1306 break;
1307 case CCP_AUTH_ALGO_SHA3_384_HMAC:
1308 case CCP_AUTH_ALGO_SHA3_512_HMAC:
1309 count = 4;
1310 /**
1311 * 1. Op to Perform Ihash
1312 * 2. Retrieve result from LSB to host memory
1313 * 3. Perform final hash
1314 */
1315 break;
1316 case CCP_AUTH_ALGO_AES_CMAC:
1317 count = 4;
1318 /**
1319 * op
1320 * extra descriptor in padding case
1321 * (k1/k2(255:128) with iv(127:0))
1322 * Retrieve result
1323 */
1324 break;
1325 default:
1326 CCP_LOG_ERR("Unsupported auth algo %d",
1327 session->auth.algo);
1328 }
1329
1330 return count;
1331 }
1332
1333 static int
ccp_aead_slot(struct ccp_session * session)1334 ccp_aead_slot(struct ccp_session *session)
1335 {
1336 int count = 0;
1337
1338 switch (session->aead_algo) {
1339 case RTE_CRYPTO_AEAD_AES_GCM:
1340 break;
1341 default:
1342 CCP_LOG_ERR("Unsupported aead algo %d",
1343 session->aead_algo);
1344 }
1345 switch (session->auth.algo) {
1346 case CCP_AUTH_ALGO_AES_GCM:
1347 count = 5;
1348 /**
1349 * 1. Passthru iv
1350 * 2. Hash AAD
1351 * 3. GCTR
1352 * 4. Reload passthru
1353 * 5. Hash Final tag
1354 */
1355 break;
1356 default:
1357 CCP_LOG_ERR("Unsupported combined auth ALGO %d",
1358 session->auth.algo);
1359 }
1360 return count;
1361 }
1362
1363 int
ccp_compute_slot_count(struct ccp_session * session)1364 ccp_compute_slot_count(struct ccp_session *session)
1365 {
1366 int count = 0;
1367
1368 switch (session->cmd_id) {
1369 case CCP_CMD_CIPHER:
1370 count = ccp_cipher_slot(session);
1371 break;
1372 case CCP_CMD_AUTH:
1373 count = ccp_auth_slot(session);
1374 break;
1375 case CCP_CMD_CIPHER_HASH:
1376 case CCP_CMD_HASH_CIPHER:
1377 count = ccp_cipher_slot(session);
1378 count += ccp_auth_slot(session);
1379 break;
1380 case CCP_CMD_COMBINED:
1381 count = ccp_aead_slot(session);
1382 break;
1383 default:
1384 CCP_LOG_ERR("Unsupported cmd_id");
1385
1386 }
1387
1388 return count;
1389 }
1390
1391 static uint8_t
algo_select(int sessalgo,const EVP_MD ** algo)1392 algo_select(int sessalgo,
1393 const EVP_MD **algo)
1394 {
1395 int res = 0;
1396
1397 switch (sessalgo) {
1398 case CCP_AUTH_ALGO_MD5_HMAC:
1399 *algo = EVP_md5();
1400 break;
1401 case CCP_AUTH_ALGO_SHA1_HMAC:
1402 *algo = EVP_sha1();
1403 break;
1404 case CCP_AUTH_ALGO_SHA224_HMAC:
1405 *algo = EVP_sha224();
1406 break;
1407 case CCP_AUTH_ALGO_SHA256_HMAC:
1408 *algo = EVP_sha256();
1409 break;
1410 case CCP_AUTH_ALGO_SHA384_HMAC:
1411 *algo = EVP_sha384();
1412 break;
1413 case CCP_AUTH_ALGO_SHA512_HMAC:
1414 *algo = EVP_sha512();
1415 break;
1416 default:
1417 res = -EINVAL;
1418 break;
1419 }
1420 return res;
1421 }
1422
1423 static int
process_cpu_auth_hmac(uint8_t * src,uint8_t * dst,__rte_unused uint8_t * iv,EVP_PKEY * pkey,int srclen,EVP_MD_CTX * ctx,const EVP_MD * algo,uint16_t d_len)1424 process_cpu_auth_hmac(uint8_t *src, uint8_t *dst,
1425 __rte_unused uint8_t *iv,
1426 EVP_PKEY *pkey,
1427 int srclen,
1428 EVP_MD_CTX *ctx,
1429 const EVP_MD *algo,
1430 uint16_t d_len)
1431 {
1432 size_t dstlen;
1433 unsigned char temp_dst[64];
1434
1435 if (EVP_DigestSignInit(ctx, NULL, algo, NULL, pkey) <= 0)
1436 goto process_auth_err;
1437
1438 if (EVP_DigestSignUpdate(ctx, (char *)src, srclen) <= 0)
1439 goto process_auth_err;
1440
1441 if (EVP_DigestSignFinal(ctx, temp_dst, &dstlen) <= 0)
1442 goto process_auth_err;
1443
1444 memcpy(dst, temp_dst, d_len);
1445 return 0;
1446 process_auth_err:
1447 CCP_LOG_ERR("Process cpu auth failed");
1448 return -EINVAL;
1449 }
1450
cpu_crypto_auth(struct ccp_qp * qp,struct rte_crypto_op * op,struct ccp_session * sess,EVP_MD_CTX * ctx)1451 static int cpu_crypto_auth(struct ccp_qp *qp,
1452 struct rte_crypto_op *op,
1453 struct ccp_session *sess,
1454 EVP_MD_CTX *ctx)
1455 {
1456 uint8_t *src, *dst;
1457 int srclen, status;
1458 struct rte_mbuf *mbuf_src, *mbuf_dst;
1459 const EVP_MD *algo = NULL;
1460 EVP_PKEY *pkey;
1461
1462 algo_select(sess->auth.algo, &algo);
1463 pkey = EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, sess->auth.key,
1464 sess->auth.key_length);
1465 mbuf_src = op->sym->m_src;
1466 mbuf_dst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
1467 srclen = op->sym->auth.data.length;
1468 src = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
1469 op->sym->auth.data.offset);
1470
1471 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1472 dst = qp->temp_digest;
1473 } else {
1474 dst = op->sym->auth.digest.data;
1475 if (dst == NULL) {
1476 dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
1477 op->sym->auth.data.offset +
1478 sess->auth.digest_length);
1479 }
1480 }
1481 status = process_cpu_auth_hmac(src, dst, NULL,
1482 pkey, srclen,
1483 ctx,
1484 algo,
1485 sess->auth.digest_length);
1486 if (status) {
1487 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1488 return status;
1489 }
1490
1491 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1492 if (memcmp(dst, op->sym->auth.digest.data,
1493 sess->auth.digest_length) != 0) {
1494 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
1495 } else {
1496 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1497 }
1498 } else {
1499 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1500 }
1501 EVP_PKEY_free(pkey);
1502 return 0;
1503 }
1504
1505 static void
ccp_perform_passthru(struct ccp_passthru * pst,struct ccp_queue * cmd_q)1506 ccp_perform_passthru(struct ccp_passthru *pst,
1507 struct ccp_queue *cmd_q)
1508 {
1509 struct ccp_desc *desc;
1510 union ccp_function function;
1511
1512 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1513
1514 CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU;
1515
1516 CCP_CMD_SOC(desc) = 0;
1517 CCP_CMD_IOC(desc) = 0;
1518 CCP_CMD_INIT(desc) = 0;
1519 CCP_CMD_EOM(desc) = 0;
1520 CCP_CMD_PROT(desc) = 0;
1521
1522 function.raw = 0;
1523 CCP_PT_BYTESWAP(&function) = pst->byte_swap;
1524 CCP_PT_BITWISE(&function) = pst->bit_mod;
1525 CCP_CMD_FUNCTION(desc) = function.raw;
1526
1527 CCP_CMD_LEN(desc) = pst->len;
1528
1529 if (pst->dir) {
1530 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1531 CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr);
1532 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1533
1534 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1535 CCP_CMD_DST_HI(desc) = 0;
1536 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1537
1538 if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP)
1539 CCP_CMD_LSB_ID(desc) = cmd_q->sb_key;
1540 } else {
1541
1542 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1543 CCP_CMD_SRC_HI(desc) = 0;
1544 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB;
1545
1546 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1547 CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr);
1548 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1549 }
1550
1551 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1552 }
1553
1554 static int
ccp_perform_hmac(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1555 ccp_perform_hmac(struct rte_crypto_op *op,
1556 struct ccp_queue *cmd_q)
1557 {
1558
1559 struct ccp_session *session;
1560 union ccp_function function;
1561 struct ccp_desc *desc;
1562 uint32_t tail;
1563 phys_addr_t src_addr, dest_addr, dest_addr_t;
1564 struct ccp_passthru pst;
1565 uint64_t auth_msg_bits;
1566 void *append_ptr;
1567 uint8_t *addr;
1568
1569 session = (struct ccp_session *)get_sym_session_private_data(
1570 op->sym->session,
1571 ccp_cryptodev_driver_id);
1572 addr = session->auth.pre_compute;
1573
1574 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1575 op->sym->auth.data.offset);
1576 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1577 session->auth.ctx_len);
1578 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1579 dest_addr_t = dest_addr;
1580
1581 /** Load PHash1 to LSB*/
1582 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1583 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1584 pst.len = session->auth.ctx_len;
1585 pst.dir = 1;
1586 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1587 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1588 ccp_perform_passthru(&pst, cmd_q);
1589
1590 /**sha engine command descriptor for IntermediateHash*/
1591
1592 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1593 memset(desc, 0, Q_DESC_SIZE);
1594
1595 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1596
1597 CCP_CMD_SOC(desc) = 0;
1598 CCP_CMD_IOC(desc) = 0;
1599 CCP_CMD_INIT(desc) = 1;
1600 CCP_CMD_EOM(desc) = 1;
1601 CCP_CMD_PROT(desc) = 0;
1602
1603 function.raw = 0;
1604 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1605 CCP_CMD_FUNCTION(desc) = function.raw;
1606
1607 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1608 auth_msg_bits = (op->sym->auth.data.length +
1609 session->auth.block_size) * 8;
1610
1611 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1612 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1613 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1614
1615 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1616 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1617 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1618
1619 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1620
1621 rte_wmb();
1622
1623 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1624 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1625 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1626 cmd_q->qcontrol | CMD_Q_RUN);
1627
1628 /* Intermediate Hash value retrieve */
1629 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1630 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) {
1631
1632 pst.src_addr =
1633 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1634 pst.dest_addr = dest_addr_t;
1635 pst.len = CCP_SB_BYTES;
1636 pst.dir = 0;
1637 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1638 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1639 ccp_perform_passthru(&pst, cmd_q);
1640
1641 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1642 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1643 pst.len = CCP_SB_BYTES;
1644 pst.dir = 0;
1645 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1646 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1647 ccp_perform_passthru(&pst, cmd_q);
1648
1649 } else {
1650 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1651 pst.dest_addr = dest_addr_t;
1652 pst.len = session->auth.ctx_len;
1653 pst.dir = 0;
1654 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1655 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1656 ccp_perform_passthru(&pst, cmd_q);
1657
1658 }
1659
1660 /** Load PHash2 to LSB*/
1661 addr += session->auth.ctx_len;
1662 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1663 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1664 pst.len = session->auth.ctx_len;
1665 pst.dir = 1;
1666 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1667 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1668 ccp_perform_passthru(&pst, cmd_q);
1669
1670 /**sha engine command descriptor for FinalHash*/
1671 dest_addr_t += session->auth.offset;
1672
1673 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1674 memset(desc, 0, Q_DESC_SIZE);
1675
1676 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1677
1678 CCP_CMD_SOC(desc) = 0;
1679 CCP_CMD_IOC(desc) = 0;
1680 CCP_CMD_INIT(desc) = 1;
1681 CCP_CMD_EOM(desc) = 1;
1682 CCP_CMD_PROT(desc) = 0;
1683
1684 function.raw = 0;
1685 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1686 CCP_CMD_FUNCTION(desc) = function.raw;
1687
1688 CCP_CMD_LEN(desc) = (session->auth.ctx_len -
1689 session->auth.offset);
1690 auth_msg_bits = (session->auth.block_size +
1691 session->auth.ctx_len -
1692 session->auth.offset) * 8;
1693
1694 CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t);
1695 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1696 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1697
1698 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1699 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1700 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1701
1702 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1703
1704 rte_wmb();
1705
1706 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1707 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1708 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1709 cmd_q->qcontrol | CMD_Q_RUN);
1710
1711 /* Retrieve hmac output */
1712 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1713 pst.dest_addr = dest_addr;
1714 pst.len = session->auth.ctx_len;
1715 pst.dir = 0;
1716 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1717 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1718 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1719 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1720 else
1721 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1722 ccp_perform_passthru(&pst, cmd_q);
1723
1724 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1725 return 0;
1726
1727 }
1728
1729 static int
ccp_perform_sha(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1730 ccp_perform_sha(struct rte_crypto_op *op,
1731 struct ccp_queue *cmd_q)
1732 {
1733 struct ccp_session *session;
1734 union ccp_function function;
1735 struct ccp_desc *desc;
1736 uint32_t tail;
1737 phys_addr_t src_addr, dest_addr;
1738 struct ccp_passthru pst;
1739 void *append_ptr;
1740 uint64_t auth_msg_bits;
1741
1742 session = (struct ccp_session *)get_sym_session_private_data(
1743 op->sym->session,
1744 ccp_cryptodev_driver_id);
1745
1746 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1747 op->sym->auth.data.offset);
1748
1749 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1750 session->auth.ctx_len);
1751 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1752
1753 /** Passthru sha context*/
1754
1755 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)
1756 session->auth.ctx);
1757 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1758 pst.len = session->auth.ctx_len;
1759 pst.dir = 1;
1760 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1761 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1762 ccp_perform_passthru(&pst, cmd_q);
1763
1764 /**prepare sha command descriptor*/
1765
1766 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1767 memset(desc, 0, Q_DESC_SIZE);
1768
1769 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1770
1771 CCP_CMD_SOC(desc) = 0;
1772 CCP_CMD_IOC(desc) = 0;
1773 CCP_CMD_INIT(desc) = 1;
1774 CCP_CMD_EOM(desc) = 1;
1775 CCP_CMD_PROT(desc) = 0;
1776
1777 function.raw = 0;
1778 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1779 CCP_CMD_FUNCTION(desc) = function.raw;
1780
1781 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1782 auth_msg_bits = op->sym->auth.data.length * 8;
1783
1784 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1785 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1786 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1787
1788 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1789 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1790 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1791
1792 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1793
1794 rte_wmb();
1795
1796 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1797 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1798 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1799 cmd_q->qcontrol | CMD_Q_RUN);
1800
1801 /* Hash value retrieve */
1802 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1803 pst.dest_addr = dest_addr;
1804 pst.len = session->auth.ctx_len;
1805 pst.dir = 0;
1806 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1807 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1808 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1809 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1810 else
1811 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1812 ccp_perform_passthru(&pst, cmd_q);
1813
1814 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1815 return 0;
1816
1817 }
1818
1819 static int
ccp_perform_sha3_hmac(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1820 ccp_perform_sha3_hmac(struct rte_crypto_op *op,
1821 struct ccp_queue *cmd_q)
1822 {
1823 struct ccp_session *session;
1824 struct ccp_passthru pst;
1825 union ccp_function function;
1826 struct ccp_desc *desc;
1827 uint8_t *append_ptr;
1828 uint32_t tail;
1829 phys_addr_t src_addr, dest_addr, ctx_paddr, dest_addr_t;
1830
1831 session = (struct ccp_session *)get_sym_session_private_data(
1832 op->sym->session,
1833 ccp_cryptodev_driver_id);
1834
1835 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1836 op->sym->auth.data.offset);
1837 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1838 session->auth.ctx_len);
1839 if (!append_ptr) {
1840 CCP_LOG_ERR("CCP MBUF append failed\n");
1841 return -1;
1842 }
1843 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1844 dest_addr_t = dest_addr + (session->auth.ctx_len / 2);
1845 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void
1846 *)session->auth.pre_compute);
1847 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1848 memset(desc, 0, Q_DESC_SIZE);
1849
1850 /*desc1 for SHA3-Ihash operation */
1851 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1852 CCP_CMD_INIT(desc) = 1;
1853 CCP_CMD_EOM(desc) = 1;
1854
1855 function.raw = 0;
1856 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1857 CCP_CMD_FUNCTION(desc) = function.raw;
1858 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1859
1860 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1861 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1862 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1863
1864 CCP_CMD_DST_LO(desc) = (cmd_q->sb_sha * CCP_SB_BYTES);
1865 CCP_CMD_DST_HI(desc) = 0;
1866 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1867
1868 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1869 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1870 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1871
1872 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1873
1874 rte_wmb();
1875 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1876 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1877 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1878 cmd_q->qcontrol | CMD_Q_RUN);
1879
1880 /* Intermediate Hash value retrieve */
1881 if ((session->auth.ut.sha_type == CCP_SHA3_TYPE_384) ||
1882 (session->auth.ut.sha_type == CCP_SHA3_TYPE_512)) {
1883
1884 pst.src_addr =
1885 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1886 pst.dest_addr = dest_addr_t;
1887 pst.len = CCP_SB_BYTES;
1888 pst.dir = 0;
1889 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1890 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1891 ccp_perform_passthru(&pst, cmd_q);
1892
1893 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1894 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1895 pst.len = CCP_SB_BYTES;
1896 pst.dir = 0;
1897 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1898 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1899 ccp_perform_passthru(&pst, cmd_q);
1900
1901 } else {
1902 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1903 pst.dest_addr = dest_addr_t;
1904 pst.len = CCP_SB_BYTES;
1905 pst.dir = 0;
1906 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1907 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1908 ccp_perform_passthru(&pst, cmd_q);
1909 }
1910
1911 /**sha engine command descriptor for FinalHash*/
1912 ctx_paddr += CCP_SHA3_CTX_SIZE;
1913 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1914 memset(desc, 0, Q_DESC_SIZE);
1915
1916 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1917 CCP_CMD_INIT(desc) = 1;
1918 CCP_CMD_EOM(desc) = 1;
1919
1920 function.raw = 0;
1921 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1922 CCP_CMD_FUNCTION(desc) = function.raw;
1923
1924 if (session->auth.ut.sha_type == CCP_SHA3_TYPE_224) {
1925 dest_addr_t += (CCP_SB_BYTES - SHA224_DIGEST_SIZE);
1926 CCP_CMD_LEN(desc) = SHA224_DIGEST_SIZE;
1927 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_256) {
1928 CCP_CMD_LEN(desc) = SHA256_DIGEST_SIZE;
1929 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_384) {
1930 dest_addr_t += (2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE);
1931 CCP_CMD_LEN(desc) = SHA384_DIGEST_SIZE;
1932 } else {
1933 CCP_CMD_LEN(desc) = SHA512_DIGEST_SIZE;
1934 }
1935
1936 CCP_CMD_SRC_LO(desc) = ((uint32_t)dest_addr_t);
1937 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1938 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1939
1940 CCP_CMD_DST_LO(desc) = (uint32_t)dest_addr;
1941 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1942 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1943
1944 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1945 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1946 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1947
1948 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1949
1950 rte_wmb();
1951 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1952 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1953 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1954 cmd_q->qcontrol | CMD_Q_RUN);
1955
1956 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1957 return 0;
1958 }
1959
1960 static int
ccp_perform_sha3(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1961 ccp_perform_sha3(struct rte_crypto_op *op,
1962 struct ccp_queue *cmd_q)
1963 {
1964 struct ccp_session *session;
1965 union ccp_function function;
1966 struct ccp_desc *desc;
1967 uint8_t *ctx_addr, *append_ptr;
1968 uint32_t tail;
1969 phys_addr_t src_addr, dest_addr, ctx_paddr;
1970
1971 session = (struct ccp_session *)get_sym_session_private_data(
1972 op->sym->session,
1973 ccp_cryptodev_driver_id);
1974
1975 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1976 op->sym->auth.data.offset);
1977 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1978 session->auth.ctx_len);
1979 if (!append_ptr) {
1980 CCP_LOG_ERR("CCP MBUF append failed\n");
1981 return -1;
1982 }
1983 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1984 ctx_addr = session->auth.sha3_ctx;
1985 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
1986
1987 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1988 memset(desc, 0, Q_DESC_SIZE);
1989
1990 /* prepare desc for SHA3 operation */
1991 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1992 CCP_CMD_INIT(desc) = 1;
1993 CCP_CMD_EOM(desc) = 1;
1994
1995 function.raw = 0;
1996 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1997 CCP_CMD_FUNCTION(desc) = function.raw;
1998
1999 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2000
2001 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2002 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2003 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2004
2005 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2006 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2007 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2008
2009 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
2010 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
2011 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2012
2013 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2014
2015 rte_wmb();
2016
2017 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2018 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2019 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2020 cmd_q->qcontrol | CMD_Q_RUN);
2021
2022 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2023 return 0;
2024 }
2025
2026 static int
ccp_perform_aes_cmac(struct rte_crypto_op * op,struct ccp_queue * cmd_q)2027 ccp_perform_aes_cmac(struct rte_crypto_op *op,
2028 struct ccp_queue *cmd_q)
2029 {
2030 struct ccp_session *session;
2031 union ccp_function function;
2032 struct ccp_passthru pst;
2033 struct ccp_desc *desc;
2034 uint32_t tail;
2035 uint8_t *src_tb, *append_ptr, *ctx_addr;
2036 phys_addr_t src_addr, dest_addr, key_addr;
2037 int length, non_align_len;
2038
2039 session = (struct ccp_session *)get_sym_session_private_data(
2040 op->sym->session,
2041 ccp_cryptodev_driver_id);
2042 key_addr = rte_mem_virt2phy(session->auth.key_ccp);
2043
2044 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2045 op->sym->auth.data.offset);
2046 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
2047 session->auth.ctx_len);
2048 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
2049
2050 function.raw = 0;
2051 CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT;
2052 CCP_AES_MODE(&function) = session->auth.um.aes_mode;
2053 CCP_AES_TYPE(&function) = session->auth.ut.aes_type;
2054
2055 if (op->sym->auth.data.length % session->auth.block_size == 0) {
2056
2057 ctx_addr = session->auth.pre_compute;
2058 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2059 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
2060 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2061 pst.len = CCP_SB_BYTES;
2062 pst.dir = 1;
2063 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2064 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2065 ccp_perform_passthru(&pst, cmd_q);
2066
2067 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2068 memset(desc, 0, Q_DESC_SIZE);
2069
2070 /* prepare desc for aes-cmac command */
2071 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2072 CCP_CMD_EOM(desc) = 1;
2073 CCP_CMD_FUNCTION(desc) = function.raw;
2074
2075 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2076 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2077 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2078 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2079
2080 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2081 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2082 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2083 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2084
2085 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2086
2087 rte_wmb();
2088
2089 tail =
2090 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2091 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2092 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2093 cmd_q->qcontrol | CMD_Q_RUN);
2094 } else {
2095 ctx_addr = session->auth.pre_compute + CCP_SB_BYTES;
2096 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2097 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
2098 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2099 pst.len = CCP_SB_BYTES;
2100 pst.dir = 1;
2101 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2102 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2103 ccp_perform_passthru(&pst, cmd_q);
2104
2105 length = (op->sym->auth.data.length / AES_BLOCK_SIZE);
2106 length *= AES_BLOCK_SIZE;
2107 non_align_len = op->sym->auth.data.length - length;
2108 /* prepare desc for aes-cmac command */
2109 /*Command 1*/
2110 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2111 memset(desc, 0, Q_DESC_SIZE);
2112
2113 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2114 CCP_CMD_INIT(desc) = 1;
2115 CCP_CMD_FUNCTION(desc) = function.raw;
2116
2117 CCP_CMD_LEN(desc) = length;
2118 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2119 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2120 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2121
2122 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2123 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2124 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2125 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2126
2127 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2128
2129 /*Command 2*/
2130 append_ptr = append_ptr + CCP_SB_BYTES;
2131 memset(append_ptr, 0, AES_BLOCK_SIZE);
2132 src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src,
2133 uint8_t *,
2134 op->sym->auth.data.offset +
2135 length);
2136 rte_memcpy(append_ptr, src_tb, non_align_len);
2137 append_ptr[non_align_len] = CMAC_PAD_VALUE;
2138
2139 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2140 memset(desc, 0, Q_DESC_SIZE);
2141
2142 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2143 CCP_CMD_EOM(desc) = 1;
2144 CCP_CMD_FUNCTION(desc) = function.raw;
2145 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2146
2147 CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES));
2148 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES);
2149 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2150
2151 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2152 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2153 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2154 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2155
2156 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2157
2158 rte_wmb();
2159 tail =
2160 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2161 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2162 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2163 cmd_q->qcontrol | CMD_Q_RUN);
2164 }
2165 /* Retrieve result */
2166 pst.dest_addr = dest_addr;
2167 pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2168 pst.len = CCP_SB_BYTES;
2169 pst.dir = 0;
2170 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2171 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2172 ccp_perform_passthru(&pst, cmd_q);
2173
2174 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2175 return 0;
2176 }
2177
2178 static int
ccp_perform_aes(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2179 ccp_perform_aes(struct rte_crypto_op *op,
2180 struct ccp_queue *cmd_q,
2181 struct ccp_batch_info *b_info)
2182 {
2183 struct ccp_session *session;
2184 union ccp_function function;
2185 uint8_t *lsb_buf;
2186 struct ccp_passthru pst = {0};
2187 struct ccp_desc *desc;
2188 phys_addr_t src_addr, dest_addr, key_addr;
2189 uint8_t *iv;
2190
2191 session = (struct ccp_session *)get_sym_session_private_data(
2192 op->sym->session,
2193 ccp_cryptodev_driver_id);
2194 function.raw = 0;
2195
2196 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2197 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) {
2198 if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) {
2199 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE,
2200 iv, session->iv.length);
2201 pst.src_addr = (phys_addr_t)session->cipher.nonce_phys;
2202 CCP_AES_SIZE(&function) = 0x1F;
2203 } else {
2204 lsb_buf =
2205 &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2206 rte_memcpy(lsb_buf +
2207 (CCP_SB_BYTES - session->iv.length),
2208 iv, session->iv.length);
2209 pst.src_addr = b_info->lsb_buf_phys +
2210 (b_info->lsb_buf_idx * CCP_SB_BYTES);
2211 b_info->lsb_buf_idx++;
2212 }
2213
2214 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2215 pst.len = CCP_SB_BYTES;
2216 pst.dir = 1;
2217 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2218 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2219 ccp_perform_passthru(&pst, cmd_q);
2220 }
2221
2222 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2223
2224 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2225 op->sym->cipher.data.offset);
2226 if (likely(op->sym->m_dst != NULL))
2227 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2228 op->sym->cipher.data.offset);
2229 else
2230 dest_addr = src_addr;
2231 key_addr = session->cipher.key_phys;
2232
2233 /* prepare desc for aes command */
2234 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2235 CCP_CMD_INIT(desc) = 1;
2236 CCP_CMD_EOM(desc) = 1;
2237
2238 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2239 CCP_AES_MODE(&function) = session->cipher.um.aes_mode;
2240 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2241 CCP_CMD_FUNCTION(desc) = function.raw;
2242
2243 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2244
2245 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2246 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2247 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2248
2249 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2250 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2251 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2252
2253 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2254 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2255 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2256
2257 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB)
2258 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2259
2260 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2261 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2262 return 0;
2263 }
2264
2265 static int
ccp_perform_3des(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2266 ccp_perform_3des(struct rte_crypto_op *op,
2267 struct ccp_queue *cmd_q,
2268 struct ccp_batch_info *b_info)
2269 {
2270 struct ccp_session *session;
2271 union ccp_function function;
2272 unsigned char *lsb_buf;
2273 struct ccp_passthru pst;
2274 struct ccp_desc *desc;
2275 uint32_t tail;
2276 uint8_t *iv;
2277 phys_addr_t src_addr, dest_addr, key_addr;
2278
2279 session = (struct ccp_session *)get_sym_session_private_data(
2280 op->sym->session,
2281 ccp_cryptodev_driver_id);
2282
2283 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2284 switch (session->cipher.um.des_mode) {
2285 case CCP_DES_MODE_CBC:
2286 lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2287 b_info->lsb_buf_idx++;
2288
2289 rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length),
2290 iv, session->iv.length);
2291
2292 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *) lsb_buf);
2293 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2294 pst.len = CCP_SB_BYTES;
2295 pst.dir = 1;
2296 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2297 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2298 ccp_perform_passthru(&pst, cmd_q);
2299 break;
2300 case CCP_DES_MODE_CFB:
2301 case CCP_DES_MODE_ECB:
2302 CCP_LOG_ERR("Unsupported DES cipher mode");
2303 return -ENOTSUP;
2304 }
2305
2306 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2307 op->sym->cipher.data.offset);
2308 if (unlikely(op->sym->m_dst != NULL))
2309 dest_addr =
2310 rte_pktmbuf_iova_offset(op->sym->m_dst,
2311 op->sym->cipher.data.offset);
2312 else
2313 dest_addr = src_addr;
2314
2315 key_addr = rte_mem_virt2phy(session->cipher.key_ccp);
2316
2317 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2318
2319 memset(desc, 0, Q_DESC_SIZE);
2320
2321 /* prepare desc for des command */
2322 CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES;
2323
2324 CCP_CMD_SOC(desc) = 0;
2325 CCP_CMD_IOC(desc) = 0;
2326 CCP_CMD_INIT(desc) = 1;
2327 CCP_CMD_EOM(desc) = 1;
2328 CCP_CMD_PROT(desc) = 0;
2329
2330 function.raw = 0;
2331 CCP_DES_ENCRYPT(&function) = session->cipher.dir;
2332 CCP_DES_MODE(&function) = session->cipher.um.des_mode;
2333 CCP_DES_TYPE(&function) = session->cipher.ut.des_type;
2334 CCP_CMD_FUNCTION(desc) = function.raw;
2335
2336 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2337
2338 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2339 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2340 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2341
2342 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2343 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2344 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2345
2346 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2347 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2348 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2349
2350 if (session->cipher.um.des_mode)
2351 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2352
2353 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2354
2355 rte_wmb();
2356
2357 /* Write the new tail address back to the queue register */
2358 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2359 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2360 /* Turn the queue back on using our cached control register */
2361 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2362 cmd_q->qcontrol | CMD_Q_RUN);
2363
2364 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2365 return 0;
2366 }
2367
2368 static int
ccp_perform_aes_gcm(struct rte_crypto_op * op,struct ccp_queue * cmd_q)2369 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q)
2370 {
2371 struct ccp_session *session;
2372 union ccp_function function;
2373 uint8_t *iv;
2374 struct ccp_passthru pst;
2375 struct ccp_desc *desc;
2376 uint32_t tail;
2377 uint64_t *temp;
2378 phys_addr_t src_addr, dest_addr, key_addr, aad_addr;
2379 phys_addr_t digest_dest_addr;
2380 int length, non_align_len;
2381
2382 session = (struct ccp_session *)get_sym_session_private_data(
2383 op->sym->session,
2384 ccp_cryptodev_driver_id);
2385 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2386 key_addr = session->cipher.key_phys;
2387
2388 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2389 op->sym->aead.data.offset);
2390 if (unlikely(op->sym->m_dst != NULL))
2391 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2392 op->sym->aead.data.offset);
2393 else
2394 dest_addr = src_addr;
2395 rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len);
2396 digest_dest_addr = op->sym->aead.digest.phys_addr;
2397 temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE);
2398 *temp++ = rte_bswap64(session->auth.aad_length << 3);
2399 *temp = rte_bswap64(op->sym->aead.data.length << 3);
2400
2401 non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE;
2402 length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE);
2403
2404 aad_addr = op->sym->aead.aad.phys_addr;
2405
2406 /* CMD1 IV Passthru */
2407 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv,
2408 session->iv.length);
2409 pst.src_addr = session->cipher.nonce_phys;
2410 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2411 pst.len = CCP_SB_BYTES;
2412 pst.dir = 1;
2413 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2414 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2415 ccp_perform_passthru(&pst, cmd_q);
2416
2417 /* CMD2 GHASH-AAD */
2418 function.raw = 0;
2419 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD;
2420 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2421 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2422
2423 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2424 memset(desc, 0, Q_DESC_SIZE);
2425
2426 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2427 CCP_CMD_INIT(desc) = 1;
2428 CCP_CMD_FUNCTION(desc) = function.raw;
2429
2430 CCP_CMD_LEN(desc) = session->auth.aad_length;
2431
2432 CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr);
2433 CCP_CMD_SRC_HI(desc) = high32_value(aad_addr);
2434 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2435
2436 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2437 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2438 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2439
2440 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2441
2442 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2443 rte_wmb();
2444
2445 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2446 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2447 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2448 cmd_q->qcontrol | CMD_Q_RUN);
2449
2450 /* CMD3 : GCTR Plain text */
2451 function.raw = 0;
2452 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2453 CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR;
2454 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2455 if (non_align_len == 0)
2456 CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1;
2457 else
2458 CCP_AES_SIZE(&function) = (non_align_len << 3) - 1;
2459
2460
2461 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2462 memset(desc, 0, Q_DESC_SIZE);
2463
2464 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2465 CCP_CMD_EOM(desc) = 1;
2466 CCP_CMD_FUNCTION(desc) = function.raw;
2467
2468 CCP_CMD_LEN(desc) = length;
2469
2470 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2471 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2472 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2473
2474 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2475 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2476 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2477
2478 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2479 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2480 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2481
2482 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2483
2484 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2485 rte_wmb();
2486
2487 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2488 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2489 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2490 cmd_q->qcontrol | CMD_Q_RUN);
2491
2492 /* CMD4 : PT to copy IV */
2493 pst.src_addr = session->cipher.nonce_phys;
2494 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2495 pst.len = AES_BLOCK_SIZE;
2496 pst.dir = 1;
2497 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2498 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2499 ccp_perform_passthru(&pst, cmd_q);
2500
2501 /* CMD5 : GHASH-Final */
2502 function.raw = 0;
2503 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL;
2504 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2505 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2506
2507 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2508 memset(desc, 0, Q_DESC_SIZE);
2509
2510 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2511 CCP_CMD_FUNCTION(desc) = function.raw;
2512 /* Last block (AAD_len || PT_len)*/
2513 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2514
2515 CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE);
2516 CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE);
2517 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2518
2519 CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr);
2520 CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr);
2521 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2522
2523 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2524 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2525 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2526
2527 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2528
2529 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2530 rte_wmb();
2531
2532 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2533 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2534 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2535 cmd_q->qcontrol | CMD_Q_RUN);
2536
2537 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2538 return 0;
2539 }
2540
2541 static inline int
ccp_crypto_cipher(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2542 ccp_crypto_cipher(struct rte_crypto_op *op,
2543 struct ccp_queue *cmd_q,
2544 struct ccp_batch_info *b_info)
2545 {
2546 int result = 0;
2547 struct ccp_session *session;
2548
2549 session = (struct ccp_session *)get_sym_session_private_data(
2550 op->sym->session,
2551 ccp_cryptodev_driver_id);
2552
2553 switch (session->cipher.algo) {
2554 case CCP_CIPHER_ALGO_AES_CBC:
2555 result = ccp_perform_aes(op, cmd_q, b_info);
2556 b_info->desccnt += 2;
2557 break;
2558 case CCP_CIPHER_ALGO_AES_CTR:
2559 result = ccp_perform_aes(op, cmd_q, b_info);
2560 b_info->desccnt += 2;
2561 break;
2562 case CCP_CIPHER_ALGO_AES_ECB:
2563 result = ccp_perform_aes(op, cmd_q, b_info);
2564 b_info->desccnt += 1;
2565 break;
2566 case CCP_CIPHER_ALGO_3DES_CBC:
2567 result = ccp_perform_3des(op, cmd_q, b_info);
2568 b_info->desccnt += 2;
2569 break;
2570 default:
2571 CCP_LOG_ERR("Unsupported cipher algo %d",
2572 session->cipher.algo);
2573 return -ENOTSUP;
2574 }
2575 return result;
2576 }
2577
2578 static inline int
ccp_crypto_auth(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2579 ccp_crypto_auth(struct rte_crypto_op *op,
2580 struct ccp_queue *cmd_q,
2581 struct ccp_batch_info *b_info)
2582 {
2583
2584 int result = 0;
2585 struct ccp_session *session;
2586
2587 session = (struct ccp_session *)get_sym_session_private_data(
2588 op->sym->session,
2589 ccp_cryptodev_driver_id);
2590
2591 switch (session->auth.algo) {
2592 case CCP_AUTH_ALGO_SHA1:
2593 case CCP_AUTH_ALGO_SHA224:
2594 case CCP_AUTH_ALGO_SHA256:
2595 case CCP_AUTH_ALGO_SHA384:
2596 case CCP_AUTH_ALGO_SHA512:
2597 result = ccp_perform_sha(op, cmd_q);
2598 b_info->desccnt += 3;
2599 break;
2600 case CCP_AUTH_ALGO_MD5_HMAC:
2601 if (session->auth_opt == 0)
2602 result = -1;
2603 break;
2604 case CCP_AUTH_ALGO_SHA1_HMAC:
2605 case CCP_AUTH_ALGO_SHA224_HMAC:
2606 case CCP_AUTH_ALGO_SHA256_HMAC:
2607 if (session->auth_opt == 0) {
2608 result = ccp_perform_hmac(op, cmd_q);
2609 b_info->desccnt += 6;
2610 }
2611 break;
2612 case CCP_AUTH_ALGO_SHA384_HMAC:
2613 case CCP_AUTH_ALGO_SHA512_HMAC:
2614 if (session->auth_opt == 0) {
2615 result = ccp_perform_hmac(op, cmd_q);
2616 b_info->desccnt += 7;
2617 }
2618 break;
2619 case CCP_AUTH_ALGO_SHA3_224:
2620 case CCP_AUTH_ALGO_SHA3_256:
2621 case CCP_AUTH_ALGO_SHA3_384:
2622 case CCP_AUTH_ALGO_SHA3_512:
2623 result = ccp_perform_sha3(op, cmd_q);
2624 b_info->desccnt += 1;
2625 break;
2626 case CCP_AUTH_ALGO_SHA3_224_HMAC:
2627 case CCP_AUTH_ALGO_SHA3_256_HMAC:
2628 result = ccp_perform_sha3_hmac(op, cmd_q);
2629 b_info->desccnt += 3;
2630 break;
2631 case CCP_AUTH_ALGO_SHA3_384_HMAC:
2632 case CCP_AUTH_ALGO_SHA3_512_HMAC:
2633 result = ccp_perform_sha3_hmac(op, cmd_q);
2634 b_info->desccnt += 4;
2635 break;
2636 case CCP_AUTH_ALGO_AES_CMAC:
2637 result = ccp_perform_aes_cmac(op, cmd_q);
2638 b_info->desccnt += 4;
2639 break;
2640 default:
2641 CCP_LOG_ERR("Unsupported auth algo %d",
2642 session->auth.algo);
2643 return -ENOTSUP;
2644 }
2645
2646 return result;
2647 }
2648
2649 static inline int
ccp_crypto_aead(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2650 ccp_crypto_aead(struct rte_crypto_op *op,
2651 struct ccp_queue *cmd_q,
2652 struct ccp_batch_info *b_info)
2653 {
2654 int result = 0;
2655 struct ccp_session *session;
2656
2657 session = (struct ccp_session *)get_sym_session_private_data(
2658 op->sym->session,
2659 ccp_cryptodev_driver_id);
2660
2661 switch (session->auth.algo) {
2662 case CCP_AUTH_ALGO_AES_GCM:
2663 if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) {
2664 CCP_LOG_ERR("Incorrect chain order");
2665 return -1;
2666 }
2667 result = ccp_perform_aes_gcm(op, cmd_q);
2668 b_info->desccnt += 5;
2669 break;
2670 default:
2671 CCP_LOG_ERR("Unsupported aead algo %d",
2672 session->aead_algo);
2673 return -ENOTSUP;
2674 }
2675 return result;
2676 }
2677
2678 int
process_ops_to_enqueue(struct ccp_qp * qp,struct rte_crypto_op ** op,struct ccp_queue * cmd_q,uint16_t nb_ops,uint16_t total_nb_ops,int slots_req,uint16_t b_idx)2679 process_ops_to_enqueue(struct ccp_qp *qp,
2680 struct rte_crypto_op **op,
2681 struct ccp_queue *cmd_q,
2682 uint16_t nb_ops,
2683 uint16_t total_nb_ops,
2684 int slots_req,
2685 uint16_t b_idx)
2686 {
2687 int i, result = 0;
2688 struct ccp_batch_info *b_info;
2689 struct ccp_session *session;
2690 EVP_MD_CTX *auth_ctx = NULL;
2691
2692 if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
2693 CCP_LOG_ERR("batch info allocation failed");
2694 return 0;
2695 }
2696
2697 auth_ctx = EVP_MD_CTX_create();
2698 if (unlikely(!auth_ctx)) {
2699 CCP_LOG_ERR("Unable to create auth ctx");
2700 return 0;
2701 }
2702 b_info->auth_ctr = 0;
2703
2704 /* populate batch info necessary for dequeue */
2705 b_info->op_idx = 0;
2706 b_info->b_idx = 0;
2707 b_info->lsb_buf_idx = 0;
2708 b_info->desccnt = 0;
2709 b_info->cmd_q = cmd_q;
2710 b_info->lsb_buf_phys =
2711 (phys_addr_t)rte_mem_virt2phy((void *)b_info->lsb_buf);
2712 rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req);
2713
2714 b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2715 Q_DESC_SIZE);
2716 for (i = b_idx; i < (nb_ops+b_idx); i++) {
2717 session = (struct ccp_session *)get_sym_session_private_data(
2718 op[i]->sym->session,
2719 ccp_cryptodev_driver_id);
2720 switch (session->cmd_id) {
2721 case CCP_CMD_CIPHER:
2722 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2723 break;
2724 case CCP_CMD_AUTH:
2725 if (session->auth_opt) {
2726 b_info->auth_ctr++;
2727 result = cpu_crypto_auth(qp, op[i],
2728 session, auth_ctx);
2729 } else
2730 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2731 break;
2732 case CCP_CMD_CIPHER_HASH:
2733 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2734 if (result)
2735 break;
2736 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2737 break;
2738 case CCP_CMD_HASH_CIPHER:
2739 if (session->auth_opt) {
2740 result = cpu_crypto_auth(qp, op[i],
2741 session, auth_ctx);
2742 if (op[i]->status !=
2743 RTE_CRYPTO_OP_STATUS_SUCCESS)
2744 CCP_LOG_ERR("RTE_CRYPTO_OP_STATUS_AUTH_FAILED");
2745 } else
2746 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2747
2748 if (result)
2749 break;
2750 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2751 break;
2752 case CCP_CMD_COMBINED:
2753 result = ccp_crypto_aead(op[i], cmd_q, b_info);
2754 break;
2755 default:
2756 CCP_LOG_ERR("Unsupported cmd_id");
2757 result = -1;
2758 }
2759 if (unlikely(result < 0)) {
2760 rte_atomic64_add(&b_info->cmd_q->free_slots,
2761 (slots_req - b_info->desccnt));
2762 break;
2763 }
2764 b_info->op[i] = op[i];
2765 }
2766
2767 b_info->opcnt = i;
2768 b_info->b_idx = b_idx;
2769 b_info->total_nb_ops = total_nb_ops;
2770 b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2771 Q_DESC_SIZE);
2772
2773 rte_wmb();
2774 /* Write the new tail address back to the queue register */
2775 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE,
2776 b_info->tail_offset);
2777 /* Turn the queue back on using our cached control register */
2778 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2779 cmd_q->qcontrol | CMD_Q_RUN);
2780
2781 rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
2782
2783 EVP_MD_CTX_destroy(auth_ctx);
2784 return i-b_idx;
2785 }
2786
ccp_auth_dq_prepare(struct rte_crypto_op * op)2787 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op)
2788 {
2789 struct ccp_session *session;
2790 uint8_t *digest_data, *addr;
2791 struct rte_mbuf *m_last;
2792 int offset, digest_offset;
2793 uint8_t digest_le[64];
2794
2795 session = (struct ccp_session *)get_sym_session_private_data(
2796 op->sym->session,
2797 ccp_cryptodev_driver_id);
2798
2799 if (session->cmd_id == CCP_CMD_COMBINED) {
2800 digest_data = op->sym->aead.digest.data;
2801 digest_offset = op->sym->aead.data.offset +
2802 op->sym->aead.data.length;
2803 } else {
2804 digest_data = op->sym->auth.digest.data;
2805 digest_offset = op->sym->auth.data.offset +
2806 op->sym->auth.data.length;
2807 }
2808 m_last = rte_pktmbuf_lastseg(op->sym->m_src);
2809 addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off +
2810 m_last->data_len - session->auth.ctx_len);
2811
2812 rte_mb();
2813 offset = session->auth.offset;
2814
2815 if (session->auth.engine == CCP_ENGINE_SHA)
2816 if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) &&
2817 (session->auth.ut.sha_type != CCP_SHA_TYPE_224) &&
2818 (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) {
2819 /* All other algorithms require byte
2820 * swap done by host
2821 */
2822 unsigned int i;
2823
2824 offset = session->auth.ctx_len -
2825 session->auth.offset - 1;
2826 for (i = 0; i < session->auth.digest_length; i++)
2827 digest_le[i] = addr[offset - i];
2828 offset = 0;
2829 addr = digest_le;
2830 }
2831
2832 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2833 if (session->auth.op == CCP_AUTH_OP_VERIFY) {
2834 if (memcmp(addr + offset, digest_data,
2835 session->auth.digest_length) != 0)
2836 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
2837
2838 } else {
2839 if (unlikely(digest_data == 0))
2840 digest_data = rte_pktmbuf_mtod_offset(
2841 op->sym->m_dst, uint8_t *,
2842 digest_offset);
2843 rte_memcpy(digest_data, addr + offset,
2844 session->auth.digest_length);
2845 }
2846 /* Trim area used for digest from mbuf. */
2847 rte_pktmbuf_trim(op->sym->m_src,
2848 session->auth.ctx_len);
2849 }
2850
2851 static int
ccp_prepare_ops(struct ccp_qp * qp,struct rte_crypto_op ** op_d,struct ccp_batch_info * b_info,uint16_t nb_ops)2852 ccp_prepare_ops(struct ccp_qp *qp,
2853 struct rte_crypto_op **op_d,
2854 struct ccp_batch_info *b_info,
2855 uint16_t nb_ops)
2856 {
2857 int i, min_ops;
2858 struct ccp_session *session;
2859
2860 EVP_MD_CTX *auth_ctx = NULL;
2861
2862 auth_ctx = EVP_MD_CTX_create();
2863 if (unlikely(!auth_ctx)) {
2864 CCP_LOG_ERR("Unable to create auth ctx");
2865 return 0;
2866 }
2867 min_ops = RTE_MIN(nb_ops, b_info->opcnt);
2868
2869 for (i = b_info->b_idx; i < min_ops; i++) {
2870 op_d[i] = b_info->op[b_info->b_idx + b_info->op_idx++];
2871 session = (struct ccp_session *)get_sym_session_private_data(
2872 op_d[i]->sym->session,
2873 ccp_cryptodev_driver_id);
2874 switch (session->cmd_id) {
2875 case CCP_CMD_CIPHER:
2876 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2877 break;
2878 case CCP_CMD_AUTH:
2879 if (session->auth_opt == 0)
2880 ccp_auth_dq_prepare(op_d[i]);
2881 break;
2882 case CCP_CMD_CIPHER_HASH:
2883 if (session->auth_opt)
2884 cpu_crypto_auth(qp, op_d[i],
2885 session, auth_ctx);
2886 else
2887 ccp_auth_dq_prepare(op_d[i]);
2888 break;
2889 case CCP_CMD_HASH_CIPHER:
2890 if (session->auth_opt)
2891 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2892 else
2893 ccp_auth_dq_prepare(op_d[i]);
2894 break;
2895 case CCP_CMD_COMBINED:
2896 ccp_auth_dq_prepare(op_d[i]);
2897 break;
2898 default:
2899 CCP_LOG_ERR("Unsupported cmd_id");
2900 }
2901 }
2902
2903 EVP_MD_CTX_destroy(auth_ctx);
2904 b_info->opcnt -= min_ops;
2905 return min_ops;
2906 }
2907
2908 int
process_ops_to_dequeue(struct ccp_qp * qp,struct rte_crypto_op ** op,uint16_t nb_ops,uint16_t * total_nb_ops)2909 process_ops_to_dequeue(struct ccp_qp *qp,
2910 struct rte_crypto_op **op,
2911 uint16_t nb_ops,
2912 uint16_t *total_nb_ops)
2913 {
2914 struct ccp_batch_info *b_info;
2915 uint32_t cur_head_offset;
2916
2917 if (qp->b_info != NULL) {
2918 b_info = qp->b_info;
2919 if (unlikely(b_info->op_idx > 0))
2920 goto success;
2921 } else if (rte_ring_dequeue(qp->processed_pkts,
2922 (void **)&b_info))
2923 return 0;
2924
2925 if (b_info->auth_ctr == b_info->opcnt)
2926 goto success;
2927 *total_nb_ops = b_info->total_nb_ops;
2928 cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
2929 CMD_Q_HEAD_LO_BASE);
2930
2931 if (b_info->head_offset < b_info->tail_offset) {
2932 if ((cur_head_offset >= b_info->head_offset) &&
2933 (cur_head_offset < b_info->tail_offset)) {
2934 qp->b_info = b_info;
2935 return 0;
2936 }
2937 } else if (b_info->tail_offset != b_info->head_offset) {
2938 if ((cur_head_offset >= b_info->head_offset) ||
2939 (cur_head_offset < b_info->tail_offset)) {
2940 qp->b_info = b_info;
2941 return 0;
2942 }
2943 }
2944
2945
2946 success:
2947 *total_nb_ops = b_info->total_nb_ops;
2948 nb_ops = ccp_prepare_ops(qp, op, b_info, nb_ops);
2949 rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
2950 b_info->desccnt = 0;
2951 if (b_info->opcnt > 0) {
2952 qp->b_info = b_info;
2953 } else {
2954 rte_mempool_put(qp->batch_mp, (void *)b_info);
2955 qp->b_info = NULL;
2956 }
2957
2958 return nb_ops;
2959 }
2960