| /dpdk/lib/mbuf/ |
| H A D | rte_mbuf.h | 385 m->refcnt = (uint16_t)(m->refcnt + value); in __rte_mbuf_refcnt_update() 589 rte_mempool_put(m->pool, m); in rte_mbuf_raw_free() 1172 m->shinfo->free_cb(m->buf_addr, m->shinfo->fcb_opaque); in __rte_pktmbuf_free_extbuf() 1247 m->buf_addr = (char *)m + mbuf_size; in rte_pktmbuf_detach() 1362 m = rte_pktmbuf_prefree_seg(m); in rte_pktmbuf_free_seg() 1466 } while ((m = m->next) != NULL); in rte_pktmbuf_refcnt_update() 1510 m = m->next; in rte_pktmbuf_lastseg() 1562 m->pkt_len = (m->pkt_len + len); in rte_pktmbuf_prepend() 1595 m->pkt_len = (m->pkt_len + len); in rte_pktmbuf_append() 1625 m->pkt_len = (m->pkt_len - len); in rte_pktmbuf_adj() [all …]
|
| H A D | rte_mbuf.c | 91 m->buf_addr = (char *)m + mbuf_size; in rte_pktmbuf_init() 411 if (m->data_len > m->pkt_len) { in rte_mbuf_check() 420 if (m->data_off > m->buf_len) { in rte_mbuf_check() 424 if (m->data_off + m->data_len > m->buf_len) { in rte_mbuf_check() 430 } while ((m = m->next) != NULL); in rte_mbuf_check() 465 m = rte_pktmbuf_prefree_seg(m); in __rte_pktmbuf_free_seg_via_array() 673 m, m->buf_iova, m->buf_len); in rte_pktmbuf_dump() 675 m->pkt_len, m->ol_flags, m->nb_segs, m->port); in rte_pktmbuf_dump() 688 m, rte_pktmbuf_mtod(m, void *), in rte_pktmbuf_dump() 689 m->data_len, m->data_off, rte_mbuf_refcnt_read(m)); in rte_pktmbuf_dump() [all …]
|
| /dpdk/lib/meter/ |
| H A D | rte_meter.h | 428 te = m->te; in rte_meter_srtcm_color_blind_check() 439 m->te = te; in rte_meter_srtcm_color_blind_check() 449 m->tc = tc; in rte_meter_srtcm_color_blind_check() 450 m->te = te; in rte_meter_srtcm_color_blind_check() 470 te = m->te; in rte_meter_srtcm_color_aware_check() 491 m->tc = tc; in rte_meter_srtcm_color_aware_check() 492 m->te = te; in rte_meter_srtcm_color_aware_check() 619 m->tc = tc; in rte_meter_trtcm_rfc4115_color_blind_check() 620 m->te = te; in rte_meter_trtcm_rfc4115_color_blind_check() 664 m->tc = tc; in rte_meter_trtcm_rfc4115_color_aware_check() [all …]
|
| H A D | rte_meter.c | 68 if ((m == NULL) || (p == NULL)) in rte_meter_srtcm_config() 72 m->time = rte_get_tsc_cycles(); in rte_meter_srtcm_config() 73 m->tc = p->cbs; in rte_meter_srtcm_config() 74 m->te = p->ebs; in rte_meter_srtcm_config() 111 if ((m == NULL) || (p == NULL)) in rte_meter_trtcm_config() 115 m->time_tc = m->time_tp = rte_get_tsc_cycles(); in rte_meter_trtcm_config() 116 m->tc = p->cbs; in rte_meter_trtcm_config() 117 m->tp = p->pbs; in rte_meter_trtcm_config() 157 m->time_tc = m->time_te = rte_get_tsc_cycles(); in rte_meter_trtcm_rfc4115_config() 158 m->tc = p->cbs; in rte_meter_trtcm_rfc4115_config() [all …]
|
| /dpdk/app/test/ |
| H A D | test_cryptodev.h | 120 for (m = mbuf; (m != NULL) && (offset > m->data_len); m = m->next) in pktmbuf_write() 136 for (m = m->next; (m != NULL) && (n > 0); m = m->next) { in pktmbuf_write() 138 l = m->data_len; in pktmbuf_write() 153 for (m = mbuf; (m != NULL) && (offset > m->data_len); m = m->next) in pktmbuf_mtod_offset() 156 if (m == NULL) { in pktmbuf_mtod_offset() 167 for (m = mbuf; (m != NULL) && (offset > m->data_len); m = m->next) in pktmbuf_iova_offset() 170 if (m == NULL) { in pktmbuf_iova_offset() 206 mbuf = m; in create_segmented_mbuf() 208 if (m == NULL) { in create_segmented_mbuf() 214 memset(m->buf_addr, pattern, m->buf_len); in create_segmented_mbuf() [all …]
|
| H A D | test_mbuf.c | 157 if(m) { in test_pktmbuf_with_non_ascii_data() 303 m = NULL; in test_one_pktmbuf() 411 m = NULL; in testclone_testupdate_testdetach() 564 m = NULL; in test_pktmbuf_copy() 1738 m = NULL; in test_mbuf_validate_tx_offload() 1741 if (m) { in test_mbuf_validate_tx_offload() 1955 m = NULL; in test_pktmbuf_read() 1959 if (m) { in test_pktmbuf_read() 2086 m = NULL; in test_pktmbuf_read_from_offset() 2090 if (m) { in test_pktmbuf_read_from_offset() [all …]
|
| H A D | test_ring_st_peek_stress_zc.c | 13 uint32_t m; in _st_ring_dequeue_bulk() local 20 m = rte_ring_dequeue_zc_bulk_start(r, n, &zcd, avail); in _st_ring_dequeue_bulk() 21 if (m != 0) { in _st_ring_dequeue_bulk() 23 test_ring_copy_from(&zcd, obj, -1, m); in _st_ring_dequeue_bulk() 24 rte_ring_dequeue_zc_finish(r, m); in _st_ring_dequeue_bulk() 28 return m; in _st_ring_dequeue_bulk() 35 uint32_t m; in _st_ring_enqueue_bulk() local 43 if (m != 0) { in _st_ring_enqueue_bulk() 45 test_ring_copy_to(&zcd, obj, -1, m); in _st_ring_enqueue_bulk() 46 rte_ring_enqueue_zc_finish(r, m); in _st_ring_enqueue_bulk() [all …]
|
| /dpdk/lib/table/ |
| H A D | rte_table_hash_func.h | 79 k0 = k[0] & m[0]; in rte_table_hash_crc_key16() 98 k0 = k[0] & m[0]; in rte_table_hash_crc_key24() 99 k2 = k[2] & m[2]; in rte_table_hash_crc_key24() 120 k0 = k[0] & m[0]; in rte_table_hash_crc_key32() 121 k2 = k[2] & m[2]; in rte_table_hash_crc_key32() 146 k0 = k[0] & m[0]; in rte_table_hash_crc_key40() 147 k2 = k[2] & m[2]; in rte_table_hash_crc_key40() 172 k0 = k[0] & m[0]; in rte_table_hash_crc_key48() 173 k2 = k[2] & m[2]; in rte_table_hash_crc_key48() 174 k5 = k[5] & m[5]; in rte_table_hash_crc_key48() [all …]
|
| H A D | rte_swx_table_learner.c | 107 k0 = k[0] & m[0]; in hash() 483 m->state = 1; in rte_swx_table_learner_lookup() 510 m->hit = 1; in rte_swx_table_learner_lookup() 522 m->hit = 0; in rte_swx_table_learner_lookup() 523 m->state = 0; in rte_swx_table_learner_lookup() 531 m->hit = 0; in rte_swx_table_learner_lookup() 532 m->state = 0; in rte_swx_table_learner_lookup() 554 if (m->hit) { in rte_swx_table_learner_add() 590 m->hit = 1; in rte_swx_table_learner_add() 607 if (m->hit) { in rte_swx_table_learner_delete() [all …]
|
| H A D | rte_swx_table_selector.c | 93 uint64_t *m = key_mask; in hash() local 102 k0 = k[0] & m[0]; in hash() 112 k0 = k[0] & m[0]; in hash() 113 k2 = k[2] & m[2]; in hash() 129 k0 = k[0] & m[0]; in hash() 130 k2 = k[2] & m[2]; in hash() 131 k5 = k[5] & m[5]; in hash() 384 if (m->member_weight < min) in members_min_weight_find() 385 min = m->member_weight; in members_min_weight_find() 450 m->count = m->member_weight_normalized * multiplier; in members_weight_scale() [all …]
|
| /dpdk/drivers/common/octeontx/ |
| H A D | octeontx_mbox.c | 139 m->tag_own++; in mbox_wait_response() 199 if (m->init_once) in octeontx_mbox_set_ram_mbox_base() 211 m->init_once = 1; in octeontx_mbox_set_ram_mbox_base() 223 if (m->init_once) in octeontx_mbox_set_reg() 231 m->reg = reg; in octeontx_mbox_set_reg() 235 m->init_once = 1; in octeontx_mbox_set_reg() 318 if (m->ready) in octeontx_mbox_init() 323 m->init_once = 0; in octeontx_mbox_init() 335 m->init_once = 0; in octeontx_mbox_init() 339 m->ready = 1; in octeontx_mbox_init() [all …]
|
| /dpdk/drivers/net/enetc/ |
| H A D | enetc_rxtx.c | 27 struct rte_mbuf *m[ENETC_RXBD_BUNDLE]; in enetc_clean_tx_ring() local 54 rte_pktmbuf_free_bulk(m, tx_frm_cnt); in enetc_clean_tx_ring() 58 m[tx_frm_cnt] = tx_swbd->buffer_addr; in enetc_clean_tx_ring() 72 rte_pktmbuf_free_bulk(m, tx_frm_cnt); in enetc_clean_tx_ring() 131 struct rte_mbuf *m[ENETC_RXBD_BUNDLE]; in enetc_refill_rx_ring() local 148 rx_swbd->buffer_addr = m[k]; in enetc_refill_rx_ring() 181 m->packet_type = RTE_PTYPE_L2_ETHER | in enetc_slow_parsing() 186 m->packet_type = RTE_PTYPE_L2_ETHER | in enetc_slow_parsing() 191 m->packet_type = RTE_PTYPE_L2_ETHER | in enetc_slow_parsing() 248 m->packet_type = RTE_PTYPE_UNKNOWN; in enetc_slow_parsing() [all …]
|
| /dpdk/lib/net/ |
| H A D | rte_net.h | 117 uint64_t inner_l3_offset = m->l2_len; in rte_net_intel_cksum_flags_prepare() 129 inner_l3_offset += m->outer_l2_len + m->outer_l3_len; in rte_net_intel_cksum_flags_prepare() 146 if (unlikely(rte_pktmbuf_data_len(m) < in rte_net_intel_cksum_flags_prepare() 147 inner_l3_offset + m->l3_len + m->l4_len)) in rte_net_intel_cksum_flags_prepare() 161 m->l3_len); in rte_net_intel_cksum_flags_prepare() 168 udp_hdr = rte_pktmbuf_mtod_offset(m, in rte_net_intel_cksum_flags_prepare() 170 inner_l3_offset + m->l3_len); in rte_net_intel_cksum_flags_prepare() 179 m->l3_len); in rte_net_intel_cksum_flags_prepare() 186 tcp_hdr = rte_pktmbuf_mtod_offset(m, in rte_net_intel_cksum_flags_prepare() 188 inner_l3_offset + m->l3_len); in rte_net_intel_cksum_flags_prepare() [all …]
|
| H A D | rte_ether.h | 324 static inline int rte_vlan_strip(struct rte_mbuf *m) in rte_vlan_strip() argument 327 = rte_pktmbuf_mtod(m, struct rte_ether_hdr *); in rte_vlan_strip() 335 m->vlan_tci = rte_be_to_cpu_16(vh->vlan_tci); in rte_vlan_strip() 356 static inline int rte_vlan_insert(struct rte_mbuf **m) in rte_vlan_insert() argument 362 if (!RTE_MBUF_DIRECT(*m) || rte_mbuf_refcnt_read(*m) > 1) in rte_vlan_insert() 366 if (rte_pktmbuf_data_len(*m) < 2 * RTE_ETHER_ADDR_LEN) in rte_vlan_insert() 369 oh = rte_pktmbuf_mtod(*m, struct rte_ether_hdr *); in rte_vlan_insert() 379 vh->vlan_tci = rte_cpu_to_be_16((*m)->vlan_tci); in rte_vlan_insert() 383 if ((*m)->ol_flags & RTE_MBUF_F_TX_TUNNEL_MASK) in rte_vlan_insert() 384 (*m)->outer_l2_len += sizeof(struct rte_vlan_hdr); in rte_vlan_insert() [all …]
|
| /dpdk/drivers/net/sfc/ |
| H A D | sfc_tso.c | 59 struct rte_mbuf *m = *in_seg; in sfc_tso_prepare_header() local 74 m = m->next; in sfc_tso_prepare_header() 75 SFC_ASSERT(m != NULL); in sfc_tso_prepare_header() 81 *in_seg = m->next; in sfc_tso_prepare_header() 85 *in_seg = m; in sfc_tso_prepare_header() 102 struct rte_mbuf *m = *in_seg; in sfc_efx_tso_do() local 104 size_t tcph_off = m->l2_len + m->l3_len; /* TCP header offset */ in sfc_efx_tso_do() 105 size_t header_len = m->l2_len + m->l3_len + m->l4_len; in sfc_efx_tso_do() 118 if (m->data_len < header_len) { in sfc_efx_tso_do() 135 *in_seg = m->next; in sfc_efx_tso_do() [all …]
|
| H A D | sfc_ef100_tx.c | 104 struct rte_mbuf *m) in sfc_ef100_tx_prepare_pkt_tso() argument 107 m->outer_l2_len + m->outer_l3_len : 0) + in sfc_ef100_tx_prepare_pkt_tso() 108 m->l2_len + m->l3_len + m->l4_len; in sfc_ef100_tx_prepare_pkt_tso() 137 nb_payload_descs = m->nb_segs; in sfc_ef100_tx_prepare_pkt_tso() 282 struct rte_mbuf *m; in sfc_ef100_tx_reap_num_descs() local 289 if (m == NULL) in sfc_ef100_tx_reap_num_descs() 301 bulk[nb++] = m; in sfc_ef100_tx_reap_num_descs() 404 l4_offset_w = (m->outer_l2_len + m->outer_l3_len + in sfc_ef100_tx_qdesc_send_create() 405 m->l2_len + m->l3_len) >> 1; in sfc_ef100_tx_qdesc_send_create() 589 struct rte_mbuf *m_seg = *m; in sfc_ef100_xmit_tso_pkt() [all …]
|
| H A D | sfc_tso.h | 44 sfc_tso_outer_udp_fix_len(const struct rte_mbuf *m, uint8_t *tsoh) in sfc_tso_outer_udp_fix_len() argument 46 rte_be16_t len = rte_cpu_to_be_16(m->l2_len + m->l3_len + m->l4_len + in sfc_tso_outer_udp_fix_len() 47 m->tso_segsz); in sfc_tso_outer_udp_fix_len() 49 rte_memcpy(tsoh + m->outer_l2_len + m->outer_l3_len + in sfc_tso_outer_udp_fix_len() 55 sfc_tso_innermost_ip_fix_len(const struct rte_mbuf *m, uint8_t *tsoh, in sfc_tso_innermost_ip_fix_len() argument 58 size_t ip_payload_len = m->l4_len + m->tso_segsz; in sfc_tso_innermost_ip_fix_len() 62 if (m->ol_flags & RTE_MBUF_F_TX_IPV4) { in sfc_tso_innermost_ip_fix_len() 64 len = rte_cpu_to_be_16(m->l3_len + ip_payload_len); in sfc_tso_innermost_ip_fix_len()
|
| /dpdk/examples/ipsec-secgw/ |
| H A D | esp.c | 37 RTE_ASSERT(m != NULL); in esp_inbound() 62 sym_cop->m_src = m; in esp_inbound() 74 icb = get_cnt_blk(m); in esp_inbound() 79 aad = get_aad(m); in esp_inbound() 109 icb = get_cnt_blk(m); in esp_inbound() 154 RTE_ASSERT(m != NULL); in esp_inbound_post() 234 RTE_ASSERT(m != NULL); in esp_outbound() 358 m->inner_esp_next_proto = nlp; in esp_outbound() 369 sym_cop->m_src = m; in esp_outbound() 389 aad = get_aad(m); in esp_outbound() [all …]
|
| H A D | ipip.h | 23 inip4 = rte_pktmbuf_mtod(m, struct ip *); in ipip_outbound() 71 outip4->ip_len = htons(rte_pktmbuf_data_len(m)); in ipip_outbound() 81 m->packet_type &= ~RTE_PTYPE_L4_MASK; in ipip_outbound() 89 return ipip_outbound(m, offset, 0, src, dst); in ip4ip_outbound() 96 return ipip_outbound(m, offset, 1, src, dst); in ip6ip_outbound() 124 ipip_inbound(struct rte_mbuf *m, uint32_t offset) in ipip_inbound() argument 130 outip4 = rte_pktmbuf_mtod(m, struct ip*); in ipip_inbound() 148 RTE_ASSERT(rte_pktmbuf_pkt_len(m) > ip_len); in ipip_inbound() 160 m->packet_type &= ~RTE_PTYPE_L4_MASK; in ipip_inbound() 162 m->packet_type |= RTE_PTYPE_L4_UDP; in ipip_inbound() [all …]
|
| /dpdk/app/test-crypto-perf/ |
| H A D | cperf_test_common.c | 27 m->priv_size = 0; in fill_single_seg_mbuf() 28 m->buf_addr = (char *)m + mbuf_hdr_size; in fill_single_seg_mbuf() 39 m->pool = mp; in fill_single_seg_mbuf() 40 m->nb_segs = 1; in fill_single_seg_mbuf() 41 m->port = 0xff; in fill_single_seg_mbuf() 43 m->next = NULL; in fill_single_seg_mbuf() 60 m->buf_addr = (char *)m + mbuf_hdr_size; in fill_multi_seg_mbuf() 70 m->pool = mp; in fill_multi_seg_mbuf() 72 m->port = 0xff; in fill_multi_seg_mbuf() 77 m = next_mbuf; in fill_multi_seg_mbuf() [all …]
|
| /dpdk/drivers/net/softnic/ |
| H A D | rte_eth_softnic_meter.c | 37 if (m == NULL) in softnic_mtr_free() 41 free(m); in softnic_mtr_free() 367 return m; in softnic_mtr_find() 456 if (m == NULL) in pmd_mtr_create() 465 memcpy(&m->params, params, sizeof(m->params)); in pmd_mtr_create() 491 if (m == NULL) in pmd_mtr_destroy() 530 free(m); in pmd_mtr_destroy() 549 if (m == NULL) in pmd_mtr_meter_profile_update() 570 if (m->flow) { in pmd_mtr_meter_profile_update() 653 if (m == NULL) in pmd_mtr_meter_dscp_table_update() [all …]
|
| /dpdk/examples/l3fwd/ |
| H A D | l3fwd_em.h | 9 l3fwd_em_handle_ipv4(struct rte_mbuf *m, uint16_t portid, in l3fwd_em_handle_ipv4() argument 21 if (is_valid_ipv4_pkt(ipv4_hdr, m->pkt_len) < 0) { in l3fwd_em_handle_ipv4() 22 rte_pktmbuf_free(m); in l3fwd_em_handle_ipv4() 87 l3_ptypes = m->packet_type & RTE_PTYPE_L3_MASK; in l3fwd_em_simple_forward() 91 send_single_packet(qconf, m, dst_port); in l3fwd_em_simple_forward() 94 send_single_packet(qconf, m, dst_port); in l3fwd_em_simple_forward() 97 rte_pktmbuf_free(m); in l3fwd_em_simple_forward() 110 l3_ptypes = m->packet_type & RTE_PTYPE_L3_MASK; in l3fwd_em_simple_process() 113 m->port = l3fwd_em_handle_ipv4(m, m->port, eth_hdr, qconf); in l3fwd_em_simple_process() 115 m->port = l3fwd_em_handle_ipv6(m, m->port, eth_hdr, qconf); in l3fwd_em_simple_process() [all …]
|
| /dpdk/lib/ip_frag/ |
| H A D | rte_ipv6_reassembly.c | 37 struct rte_mbuf *m, *prev; in ipv6_frag_reassemble() local 46 m = fp->frags[IP_LAST_FRAG_IDX].mb; in ipv6_frag_reassemble() 55 prev = m; in ipv6_frag_reassemble() 65 rte_pktmbuf_adj(m, in ipv6_frag_reassemble() 66 (uint16_t)(m->l2_len + m->l3_len)); in ipv6_frag_reassemble() 74 m = fp->frags[i].mb; in ipv6_frag_reassemble() 80 if (m == prev) { in ipv6_frag_reassemble() 86 rte_pktmbuf_adj(m, (uint16_t)(m->l2_len + m->l3_len)); in ipv6_frag_reassemble() 93 ip_hdr = rte_pktmbuf_mtod_offset(m, struct rte_ipv6_hdr *, m->l2_len); in ipv6_frag_reassemble() 104 move_len = m->l2_len + m->l3_len - sizeof(*frag_hdr); in ipv6_frag_reassemble() [all …]
|
| H A D | rte_ipv4_reassembly.c | 18 struct rte_mbuf *m, *prev; in ipv4_frag_reassemble() local 26 m = fp->frags[IP_LAST_FRAG_IDX].mb; in ipv4_frag_reassemble() 32 prev = m; in ipv4_frag_reassemble() 42 rte_pktmbuf_adj(m, in ipv4_frag_reassemble() 43 (uint16_t)(m->l2_len + m->l3_len)); in ipv4_frag_reassemble() 51 m = fp->frags[i].mb; in ipv4_frag_reassemble() 57 if (m == prev) { in ipv4_frag_reassemble() 63 rte_pktmbuf_adj(m, (uint16_t)(m->l2_len + m->l3_len)); in ipv4_frag_reassemble() 70 ip_hdr = rte_pktmbuf_mtod_offset(m, struct rte_ipv4_hdr *, m->l2_len); in ipv4_frag_reassemble() 73 m->l3_len)); in ipv4_frag_reassemble() [all …]
|
| /dpdk/lib/eal/common/ |
| H A D | rte_reciprocal.c | 17 uint64_t m; in rte_reciprocal_value() local 21 m = ((1ULL << 32) * ((1ULL << l) - d)); in rte_reciprocal_value() 22 m /= d; in rte_reciprocal_value() 24 ++m; in rte_reciprocal_value() 25 R.m = m; in rte_reciprocal_value() 105 uint64_t m; in rte_reciprocal_value_u64() local 111 m = divide_128_div_64_to_64((1ULL << l), 0, d, &r) << 1; in rte_reciprocal_value_u64() 113 m++; in rte_reciprocal_value_u64() 114 m = (1ULL << l) - d ? m + 1 : 1; in rte_reciprocal_value_u64() 115 R.m = m; in rte_reciprocal_value_u64() [all …]
|