Home
last modified time | relevance | path

Searched refs:data_len (Results 1 – 25 of 232) sorted by relevance

12345678910

/dpdk/lib/telemetry/
H A Dtelemetry_data.c20 d->data_len = 0; in rte_tel_data_start_array()
28 d->data_len = 0; in rte_tel_data_start_dict()
49 if (d->data_len >= RTE_TEL_MAX_ARRAY_ENTRIES) in rte_tel_data_add_array_string()
61 if (d->data_len >= RTE_TEL_MAX_ARRAY_ENTRIES) in rte_tel_data_add_array_int()
63 d->data.array[d->data_len++].ival = x; in rte_tel_data_add_array_int()
74 d->data.array[d->data_len++].u64val = x; in rte_tel_data_add_array_u64()
104 if (d->data_len >= RTE_TEL_MAX_DICT_ENTRIES) in rte_tel_data_add_dict_string()
107 d->data_len++; in rte_tel_data_add_dict_string()
126 d->data_len++; in rte_tel_data_add_dict_int()
143 d->data_len++; in rte_tel_data_add_dict_u64()
[all …]
/dpdk/lib/net/
H A Drte_net_crc.c123 uint32_t data_len, in crc32_eth_calc_lut() argument
127 while (data_len--) in crc32_eth_calc_lut()
148 data_len, in rte_crc16_ccitt_handler()
158 data_len, in rte_crc32_eth_handler()
266 return handlers[RTE_NET_CRC16_CCITT](data, data_len); in rte_crc16_ccitt_default_handler()
278 return handlers[RTE_NET_CRC32_ETH](data, data_len); in rte_crc32_eth_default_handler()
281 return handlers[RTE_NET_CRC32_ETH](data, data_len); in rte_crc32_eth_default_handler()
284 return handlers[RTE_NET_CRC32_ETH](data, data_len); in rte_crc32_eth_default_handler()
286 return handlers[RTE_NET_CRC32_ETH](data, data_len); in rte_crc32_eth_default_handler()
322 uint32_t data_len, in rte_net_crc_calc() argument
[all …]
H A Dnet_crc_avx512.c101 offset = data_len - n; in last_two_xmm()
191 if (data_len > 255) { in crc32_eth_calc_vpclmulqdq()
229 if (n != data_len) in crc32_eth_calc_vpclmulqdq()
233 if (data_len > 31) { in crc32_eth_calc_vpclmulqdq()
245 if (n != data_len) in crc32_eth_calc_vpclmulqdq()
248 } else if (data_len > 16) { in crc32_eth_calc_vpclmulqdq()
254 if (n != data_len) in crc32_eth_calc_vpclmulqdq()
267 if (data_len > 3) { in crc32_eth_calc_vpclmulqdq()
269 &shf_table[data_len]); in crc32_eth_calc_vpclmulqdq()
407 data_len, in rte_crc16_ccitt_avx512_handler()
[all …]
H A Dnet_crc_neon.c127 uint32_t data_len, in crc32_eth_calc_pmull() argument
141 if (unlikely(data_len < 32)) { in crc32_eth_calc_pmull()
142 if (unlikely(data_len == 16)) { in crc32_eth_calc_pmull()
149 if (unlikely(data_len < 16)) { in crc32_eth_calc_pmull()
154 memcpy(buffer, data, data_len); in crc32_eth_calc_pmull()
158 if (unlikely(data_len < 4)) { in crc32_eth_calc_pmull()
180 for (n = 16; (n + 16) <= data_len; n += 16) { in crc32_eth_calc_pmull()
186 if (likely(n < data_len)) { in crc32_eth_calc_pmull()
188 uint32_t rem = data_len & 15; in crc32_eth_calc_pmull()
247 data_len, in rte_crc16_ccitt_neon_handler()
[all …]
H A Dnet_crc_sse.c153 uint32_t data_len, in crc32_eth_calc_pclmulqdq() argument
168 if (unlikely(data_len < 32)) { in crc32_eth_calc_pclmulqdq()
169 if (unlikely(data_len == 16)) { in crc32_eth_calc_pclmulqdq()
176 if (unlikely(data_len < 16)) { in crc32_eth_calc_pclmulqdq()
181 memcpy(buffer, data, data_len); in crc32_eth_calc_pclmulqdq()
185 if (unlikely(data_len < 4)) { in crc32_eth_calc_pclmulqdq()
186 fold = xmm_shift_left(fold, 8 - data_len); in crc32_eth_calc_pclmulqdq()
213 if (likely(n < data_len)) { in crc32_eth_calc_pclmulqdq()
231 &shf_table[data_len & 15]); in crc32_eth_calc_pclmulqdq()
309 data_len, in rte_crc16_ccitt_sse42_handler()
[all …]
H A Dnet_crc.h18 rte_crc16_ccitt_sse42_handler(const uint8_t *data, uint32_t data_len);
21 rte_crc32_eth_sse42_handler(const uint8_t *data, uint32_t data_len);
29 rte_crc16_ccitt_avx512_handler(const uint8_t *data, uint32_t data_len);
32 rte_crc32_eth_avx512_handler(const uint8_t *data, uint32_t data_len);
40 rte_crc16_ccitt_neon_handler(const uint8_t *data, uint32_t data_len);
43 rte_crc32_eth_neon_handler(const uint8_t *data, uint32_t data_len);
/dpdk/app/test/
H A Dtest_cryptodev.h120 for (m = mbuf; (m != NULL) && (offset > m->data_len); m = m->next) in pktmbuf_write()
121 offset -= m->data_len; in pktmbuf_write()
123 l = m->data_len - offset; in pktmbuf_write()
138 l = m->data_len; in pktmbuf_write()
154 offset -= m->data_len; in pktmbuf_mtod_offset()
168 offset -= m->data_len; in pktmbuf_iova_offset()
183 int data_len = 0; in create_segmented_mbuf() local
216 data_len = size > t_len ? t_len : size; in create_segmented_mbuf()
217 dst = (uint8_t *)rte_pktmbuf_append(m, data_len); in create_segmented_mbuf()
220 data_len); in create_segmented_mbuf()
[all …]
/dpdk/examples/vhost_blk/
H A Dblk.c75 if (unlikely(task->data_len > (bdev->blockcnt * bdev->blocklen))) { in vhost_bdev_process_blk_commands()
82 if (unlikely(task->data_len == 0 || in vhost_bdev_process_blk_commands()
83 (task->data_len & (512 - 1)) != 0)) { in vhost_bdev_process_blk_commands()
94 task->req->sector, task->data_len); in vhost_bdev_process_blk_commands()
97 if (unlikely(task->data_len == 0 || in vhost_bdev_process_blk_commands()
98 (task->data_len & (512 - 1)) != 0)) { in vhost_bdev_process_blk_commands()
109 task->req->sector, task->data_len); in vhost_bdev_process_blk_commands()
112 if (!task->iovs_cnt || task->data_len) in vhost_bdev_process_blk_commands()
114 used_len = RTE_MIN((size_t)VIRTIO_BLK_ID_BYTES, task->data_len); in vhost_bdev_process_blk_commands()
/dpdk/app/test-regex/
H A Dmain.c72 long data_len; member
317 int data_len = 0; in regex_create_segmented_mbuf() local
355 data_len); in regex_create_segmented_mbuf()
361 src += data_len; in regex_create_segmented_mbuf()
362 size -= data_len; in regex_create_segmented_mbuf()
383 long data_len = rgxc->data_len; in run_regex() local
462 remainder = data_len % nb_jobs; in run_regex()
709 long data_len; in main() local
739 if (data_len <= 0) in main()
742 job_len = data_len / nb_jobs; in main()
[all …]
/dpdk/drivers/net/mlx4/
H A Dmlx4_rxtx.c480 uint16_t data_len = sbuf->data_len - tinfo->tso_header_size; in mlx4_tx_burst_fill_tso_dsegs() local
517 data_len : in mlx4_tx_burst_fill_tso_dsegs()
524 data_len = sbuf->data_len; in mlx4_tx_burst_fill_tso_dsegs()
533 data_len : in mlx4_tx_burst_fill_tso_dsegs()
540 data_len = sbuf->data_len; in mlx4_tx_burst_fill_tso_dsegs()
549 data_len : in mlx4_tx_burst_fill_tso_dsegs()
556 data_len = sbuf->data_len; in mlx4_tx_burst_fill_tso_dsegs()
565 data_len : in mlx4_tx_burst_fill_tso_dsegs()
572 data_len = sbuf->data_len; in mlx4_tx_burst_fill_tso_dsegs()
1296 rep->data_len = seg->data_len; in mlx4_rx_burst()
[all …]
/dpdk/app/test-crypto-perf/
H A Dcperf_test_common.c15 uint16_t data_len; member
22 uint16_t headroom, uint16_t data_len) in fill_single_seg_mbuf() argument
32 m->data_len = data_len; in fill_single_seg_mbuf()
33 m->pkt_len = data_len; in fill_single_seg_mbuf()
49 uint16_t headroom, uint16_t data_len, uint16_t segments_nb) in fill_multi_seg_mbuf() argument
64 m->data_len = data_len; in fill_multi_seg_mbuf()
121 params->data_len); in mempool_obj_init()
125 params->data_len, params->segments_nb); in mempool_obj_init()
134 params->data_len); in mempool_obj_init()
216 .data_len = options->segment_sz - in cperf_alloc_common_memory()
/dpdk/drivers/crypto/qat/dev/
H A Dqat_crypto_pmd_gen3.c194 data_len); in enqueue_one_auth_job_gen3()
416 int32_t data_len; in qat_sym_dp_enqueue_single_aead_gen3() local
427 if (unlikely(data_len < 0)) in qat_sym_dp_enqueue_single_aead_gen3()
431 (uint32_t)data_len); in qat_sym_dp_enqueue_single_aead_gen3()
455 int32_t data_len; in qat_sym_dp_enqueue_aead_jobs_gen3() local
486 if (unlikely(data_len < 0)) in qat_sym_dp_enqueue_aead_jobs_gen3()
491 (uint32_t)data_len); in qat_sym_dp_enqueue_aead_jobs_gen3()
526 int32_t data_len; in qat_sym_dp_enqueue_single_auth_gen3() local
538 if (unlikely(data_len < 0)) in qat_sym_dp_enqueue_single_auth_gen3()
542 (uint32_t)data_len); in qat_sym_dp_enqueue_single_auth_gen3()
[all …]
H A Dqat_sym_pmd_gen1.c470 int32_t data_len; in qat_sym_dp_enqueue_single_cipher_gen1() local
510 int32_t data_len; in qat_sym_dp_enqueue_cipher_jobs_gen1() local
544 (uint32_t)data_len); in qat_sym_dp_enqueue_cipher_jobs_gen1()
578 int32_t data_len; in qat_sym_dp_enqueue_single_auth_gen1() local
594 (uint32_t)data_len); in qat_sym_dp_enqueue_single_auth_gen1()
618 int32_t data_len; in qat_sym_dp_enqueue_auth_jobs_gen1() local
686 int32_t data_len; in qat_sym_dp_enqueue_single_chain_gen1() local
728 int32_t data_len; in qat_sym_dp_enqueue_chain_jobs_gen1() local
804 int32_t data_len; in qat_sym_dp_enqueue_single_aead_gen1() local
819 (uint32_t)data_len); in qat_sym_dp_enqueue_single_aead_gen1()
[all …]
H A Dqat_crypto_pmd_gen4.c112 union rte_crypto_sym_ofs ofs, uint32_t data_len) in enqueue_one_aead_job_gen4() argument
126 cipher_param->cipher_length = data_len - in enqueue_one_aead_job_gen4()
245 int32_t data_len; in qat_sym_dp_enqueue_single_aead_gen4() local
254 data_len = qat_sym_build_req_set_data(req, user_data, cookie, in qat_sym_dp_enqueue_single_aead_gen4()
256 if (unlikely(data_len < 0)) in qat_sym_dp_enqueue_single_aead_gen4()
260 (uint32_t)data_len); in qat_sym_dp_enqueue_single_aead_gen4()
284 int32_t data_len; in qat_sym_dp_enqueue_aead_jobs_gen4() local
304 data_len = qat_sym_build_req_set_data(req, in qat_sym_dp_enqueue_aead_jobs_gen4()
309 data_len = qat_sym_build_req_set_data(req, in qat_sym_dp_enqueue_aead_jobs_gen4()
315 if (unlikely(data_len < 0)) in qat_sym_dp_enqueue_aead_jobs_gen4()
[all …]
/dpdk/drivers/compress/isal/
H A Disal_compress_pmd.c247 while (remaining_offset >= src->data_len) { in chained_mbuf_compression()
248 remaining_offset -= src->data_len; in chained_mbuf_compression()
257 while (remaining_offset >= dst->data_len) { in chained_mbuf_compression()
258 remaining_offset -= dst->data_len; in chained_mbuf_compression()
273 if (remaining_data <= src->data_len) in chained_mbuf_compression()
294 RTE_MIN(remaining_data, src->data_len); in chained_mbuf_compression()
309 qp->stream->avail_out = dst->data_len; in chained_mbuf_compression()
338 src_remaining_offset -= src->data_len; in chained_mbuf_decompression()
348 dst_remaining_offset -= dst->data_len; in chained_mbuf_decompression()
363 consumed_data = src->data_len; in chained_mbuf_decompression()
[all …]
/dpdk/lib/hash/
H A Drte_hash_crc.h168 rte_hash_crc(const void *data, uint32_t data_len, uint32_t init_val) in rte_hash_crc() argument
173 for (i = 0; i < data_len / 8; i++) { in rte_hash_crc()
178 if (data_len & 0x4) { in rte_hash_crc()
183 if (data_len & 0x2) { in rte_hash_crc()
188 if (data_len & 0x1) in rte_hash_crc()
/dpdk/drivers/crypto/ipsec_mb/
H A Dpmd_chacha_poly.c106 uint32_t part_len, data_len; in chacha20_poly1305_crypto_op() local
116 while (offset >= m_src->data_len && data_length != 0) { in chacha20_poly1305_crypto_op()
117 offset -= m_src->data_len; in chacha20_poly1305_crypto_op()
125 data_len = m_src->data_len - offset; in chacha20_poly1305_crypto_op()
126 part_len = (data_len < data_length) ? data_len : in chacha20_poly1305_crypto_op()
167 part_len = (m_src->data_len < total_len) ? in chacha20_poly1305_crypto_op()
168 m_src->data_len : total_len; in chacha20_poly1305_crypto_op()
208 part_len = (m_src->data_len < total_len) ? in chacha20_poly1305_crypto_op()
209 m_src->data_len : total_len; in chacha20_poly1305_crypto_op()
/dpdk/lib/mbuf/
H A Drte_mbuf.h854 m->data_len = 0; in rte_pktmbuf_reset()
1062 m->data_len = 0; in rte_pktmbuf_attach_extbuf()
1145 mi->data_len = m->data_len; in rte_pktmbuf_attach()
1151 mi->pkt_len = mi->data_len; in rte_pktmbuf_attach()
1251 m->data_len = 0; in rte_pktmbuf_detach()
1495 m->data_len); in rte_pktmbuf_tailroom()
1561 m->data_len = (uint16_t)(m->data_len + len); in rte_pktmbuf_prepend()
1594 m_last->data_len = (uint16_t)(m_last->data_len + len); in rte_pktmbuf_append()
1623 m->data_len = (uint16_t)(m->data_len - len); in rte_pktmbuf_adj()
1653 m_last->data_len = (uint16_t)(m_last->data_len - len); in rte_pktmbuf_trim()
[all …]
/dpdk/lib/gso/
H A Dgso_common.c23 hdr_segment->data_len = pkt_hdr_offset; in hdr_segment_init()
95 if (pyld_len + pkt_in_data_pos > pkt_in->data_len) in gso_do_segment()
96 pyld_len = pkt_in->data_len - pkt_in_data_pos; in gso_do_segment()
100 pyld_segment->data_len = pyld_len; in gso_do_segment()
110 if (pkt_in_data_pos == pkt_in->data_len) { in gso_do_segment()
/dpdk/lib/ip_frag/
H A Drte_ipv4_fragmentation.c92 if (unlikely(pkt_in->data_len < header_len) || in rte_ipv4_fragment_packet()
133 out_pkt->data_len = header_len; in rte_ipv4_fragment_packet()
156 if (len > (in_seg->data_len - in_seg_data_pos)) { in rte_ipv4_fragment_packet()
157 len = in_seg->data_len - in_seg_data_pos; in rte_ipv4_fragment_packet()
160 out_seg->data_len = (uint16_t)len; in rte_ipv4_fragment_packet()
172 if (unlikely(in_seg_data_pos == in_seg->data_len)) { in rte_ipv4_fragment_packet()
H A Drte_ipv6_fragmentation.c126 out_pkt->data_len = sizeof(struct rte_ipv6_hdr) + in rte_ipv6_fragment_packet()
151 if (len > (in_seg->data_len - in_seg_data_pos)) { in rte_ipv6_fragment_packet()
152 len = in_seg->data_len - in_seg_data_pos; in rte_ipv6_fragment_packet()
155 out_seg->data_len = (uint16_t)len; in rte_ipv6_fragment_packet()
167 if (unlikely(in_seg_data_pos == in_seg->data_len)) { in rte_ipv6_fragment_packet()
/dpdk/drivers/common/dpaax/caamflib/desc/
H A Dcommon.h65 unsigned int *data_len, in rta_inline_query() argument
74 if (rem_bytes - (int)(data_len[i] + in rta_inline_query()
76 rem_bytes -= data_len[i]; in rta_inline_query()
/dpdk/drivers/net/i40e/
H A Di40e_rxtx_vec_common.h31 rx_bufs[buf_idx]->data_len += rxq->crc_len; in reassemble_packets()
34 start->pkt_len += rx_bufs[buf_idx]->data_len; in reassemble_packets()
44 if (end->data_len > rxq->crc_len) in reassemble_packets()
45 end->data_len -= rxq->crc_len; in reassemble_packets()
53 secondlast->data_len -= (rxq->crc_len - in reassemble_packets()
54 end->data_len); in reassemble_packets()
69 rx_bufs[buf_idx]->data_len += rxq->crc_len; in reassemble_packets()
/dpdk/drivers/net/hns3/
H A Dhns3_rxtx_vec_sve.c122 RTE_BUILD_BUG_ON(offsetof(struct rte_mbuf, data_len) != in hns3_recv_burst_vec_sve()
125 offsetof(struct rte_mbuf, data_len) + 2); in hns3_recv_burst_vec_sve()
390 svuint64_t base_addr, buf_iova, data_off, data_len, addr; in hns3_tx_fill_hw_ring_sve() local
404 data_len = svadd_n_u64_z(pg, base_addr, in hns3_tx_fill_hw_ring_sve()
405 offsetof(struct rte_mbuf, data_len)); in hns3_tx_fill_hw_ring_sve()
413 data_len = svld1_gather_u64base_u64(pg, data_len); in hns3_tx_fill_hw_ring_sve()
417 data_len = svand_n_u64_z(pg, data_len, DATA_OFF_LEN_VAL_MASK); in hns3_tx_fill_hw_ring_sve()
421 data_len = svlsl_n_u64_z(pg, data_len, HNS3_UINT16_BIT); in hns3_tx_fill_hw_ring_sve()
427 offsets, data_len); in hns3_tx_fill_hw_ring_sve()
/dpdk/drivers/crypto/dpaa_sec/
H A Ddpaa_sec_raw_dp.c112 int data_len, data_offset, total_len = 0; in build_dpaa_raw_dp_auth_fd() local
152 in_sg->length = data_len; in build_dpaa_raw_dp_auth_fd()
183 sg->length = data_len; in build_dpaa_raw_dp_auth_fd()
192 if (data_len > (int)sgl->vec[i].len) in build_dpaa_raw_dp_auth_fd()
195 sg->length = data_len; in build_dpaa_raw_dp_auth_fd()
197 data_len = data_len - sg->length; in build_dpaa_raw_dp_auth_fd()
198 if (data_len < 1) in build_dpaa_raw_dp_auth_fd()
239 int data_len = 0, aead_len = 0; in build_raw_cipher_auth_gcm_sg() local
243 data_len += sgl->vec[i].len; in build_raw_cipher_auth_gcm_sg()
405 data_len += sgl->vec[i].len; in build_dpaa_raw_dp_chain_fd()
[all …]

12345678910