Home
last modified time | relevance | path

Searched refs:m0 (Results 1 – 13 of 13) sorted by relevance

/dpdk/examples/l3fwd/
H A Dl3fwd_em_hlm_neon.h12 get_ipv4_5tuple(struct rte_mbuf *m0, int32x4_t mask0, in get_ipv4_5tuple() argument
15 int32x4_t tmpdata0 = vld1q_s32(rte_pktmbuf_mtod_offset(m0, int32_t *, in get_ipv4_5tuple()
23 get_ipv6_5tuple(struct rte_mbuf *m0, int32x4_t mask0, in get_ipv6_5tuple() argument
27 rte_pktmbuf_mtod_offset(m0, int *, in get_ipv6_5tuple()
32 rte_pktmbuf_mtod_offset(m0, int *, in get_ipv6_5tuple()
37 rte_pktmbuf_mtod_offset(m0, int *, in get_ipv6_5tuple()
H A Dl3fwd_em_hlm_sse.h11 get_ipv4_5tuple(struct rte_mbuf *m0, __m128i mask0, in get_ipv4_5tuple() argument
15 rte_pktmbuf_mtod_offset(m0, __m128i *, in get_ipv4_5tuple()
23 get_ipv6_5tuple(struct rte_mbuf *m0, __m128i mask0, in get_ipv6_5tuple() argument
27 rte_pktmbuf_mtod_offset(m0, __m128i *, in get_ipv6_5tuple()
32 rte_pktmbuf_mtod_offset(m0, __m128i *, in get_ipv6_5tuple()
38 rte_pktmbuf_mtod_offset(m0, __m128i *, in get_ipv6_5tuple()
/dpdk/doc/guides/prog_guide/img/
H A Drib_pic.svg39 <path d="m0 595.28-62.71 27.91" class="st4" id="path56"/>
42 <path d="m0 595.28 52.87 27.52" class="st4" id="path61"/>
45 <path d="m0 595.28 62.71 27.91" class="st4" id="path66"/>
48 <path d="m0 595.28-56.94 27.69" class="st4" id="path71"/>
60 <path d="m0 595.28 52.87 27.52" class="st4" id="path91"/>
63 <path d="m0 595.28-151.53 40.3" class="st4" id="path96"/>
66 <path d="m0 595.28 151.53 40.3" class="st4" id="path101"/>
75 <path d="m0 595.28-62.71 27.91" class="st4" id="path116"/>
84 <path d="m0 595.28 52.87 27.52" class="st4" id="path131"/>
87 <path d="m0 595.28-62.51 27.9" class="st4" id="path136"/>
[all …]
H A Drib_internals.svg24 <path d="m0 595.28-143.05 56.53" class="st4" id="path768"/>
36 <path d="m0 595.28 137.65 56.44" class="st4" id="path788"/>
115 <path d="m0 595.28-62.71 27.91" class="st4" id="path942"/>
118 <path d="m0 595.28 52.87 27.52" class="st4" id="path947"/>
121 <path d="m0 595.28 62.71 27.91" class="st4" id="path952"/>
124 <path d="m0 595.28-56.94 27.69" class="st4" id="path957"/>
127 <path d="m0 595.28 55.35 27.62" class="st4" id="path962"/>
130 <path d="m0 595.28-64.01 27.95" class="st4" id="path967"/>
133 <path d="m0 595.28 66.5 28.03" class="st4" id="path972"/>
136 <path d="m0 595.28-67.1 28.05" class="st4" id="path977"/>
[all …]
H A Ddir_24_8_alg.svg90 …<path d="m0 461.57 58.11-34.87 58.11 34.87-58.11 34.87z" class="st5" id="path124" style="fill:url(…
/dpdk/app/test/
H A Dtest_mbuf.c2713 struct rte_mbuf *m0 = NULL, *m1 = NULL, *m2 = NULL; in test_nb_segs_and_next_reset() local
2722 m0 = rte_pktmbuf_alloc(pool); in test_nb_segs_and_next_reset()
2725 if (m0 == NULL || m1 == NULL || m2 == NULL) in test_nb_segs_and_next_reset()
2729 if (rte_pktmbuf_append(m0, 500) == NULL || in test_nb_segs_and_next_reset()
2736 rte_pktmbuf_chain(m0, m1); in test_nb_segs_and_next_reset()
2737 if (m0->nb_segs != 3 || m0->next != m1 || m1->next != m2 || in test_nb_segs_and_next_reset()
2744 m0->nb_segs = 2; in test_nb_segs_and_next_reset()
2749 rte_pktmbuf_free(m0); in test_nb_segs_and_next_reset()
2753 m0 = rte_mbuf_raw_alloc(pool); in test_nb_segs_and_next_reset()
2756 if (m0 == NULL || m1 == NULL || m2 == NULL) in test_nb_segs_and_next_reset()
[all …]
/dpdk/drivers/net/sfc/
H A Dsfc_ef10_essb_rx.c269 struct rte_mbuf *m0; in sfc_ef10_essb_rx_process_ev() local
297 m0 = m; in sfc_ef10_essb_rx_process_ev()
300 m->ol_flags = m0->ol_flags; in sfc_ef10_essb_rx_process_ev()
301 m->packet_type = m0->packet_type; in sfc_ef10_essb_rx_process_ev()
H A Dsfc_ef10_rx.c260 struct rte_mbuf *m0; in sfc_ef10_rx_process_event() local
367 m0 = m; in sfc_ef10_rx_process_event()
385 m->ol_flags = m0->ol_flags; in sfc_ef10_rx_process_event()
386 m->packet_type = m0->packet_type; in sfc_ef10_rx_process_event()
/dpdk/drivers/event/dpaa/
H A Ddpaa_eventdev.c897 struct rte_mbuf *m[DPAA_EVENT_MAX_PORT_ENQUEUE_DEPTH], *m0; in dpaa_eventdev_txa_enqueue_same_dest() local
902 m0 = (struct rte_mbuf *)ev[0].mbuf; in dpaa_eventdev_txa_enqueue_same_dest()
903 qid = rte_event_eth_tx_adapter_txq_get(m0); in dpaa_eventdev_txa_enqueue_same_dest()
908 return rte_eth_tx_burst(m0->port, qid, m, nb_events); in dpaa_eventdev_txa_enqueue_same_dest()
/dpdk/drivers/event/dpaa2/
H A Ddpaa2_eventdev.c987 struct rte_mbuf *m[DPAA2_EVENT_MAX_PORT_ENQUEUE_DEPTH], *m0; in dpaa2_eventdev_txa_enqueue_same_dest() local
992 m0 = (struct rte_mbuf *)ev[0].mbuf; in dpaa2_eventdev_txa_enqueue_same_dest()
993 qid = rte_event_eth_tx_adapter_txq_get(m0); in dpaa2_eventdev_txa_enqueue_same_dest()
998 return rte_eth_tx_burst(m0->port, qid, m, nb_events); in dpaa2_eventdev_txa_enqueue_same_dest()
/dpdk/drivers/net/softnic/
H A Drte_eth_softnic_flow.c598 uint64_t m0 = rte_be_to_cpu_64(m[0]); in ipv6_mask_to_depth() local
603 status = mask_to_depth(m0, &d0); in ipv6_mask_to_depth()
/dpdk/drivers/net/bnx2x/
H A Dbnx2x.c2166 int bnx2x_tx_encap(struct bnx2x_tx_queue *txq, struct rte_mbuf *m0) in bnx2x_tx_encap() argument
2177 txq->sw_ring[TX_BD(pkt_prod, txq)] = m0; in bnx2x_tx_encap()
2182 rte_cpu_to_le_32(U64_LO(rte_mbuf_data_iova(m0))); in bnx2x_tx_encap()
2184 rte_cpu_to_le_32(U64_HI(rte_mbuf_data_iova(m0))); in bnx2x_tx_encap()
2185 tx_start_bd->nbytes = rte_cpu_to_le_16(m0->data_len); in bnx2x_tx_encap()
2192 if (m0->ol_flags & RTE_MBUF_F_TX_VLAN) { in bnx2x_tx_encap()
2194 rte_cpu_to_le_16(m0->vlan_tci); in bnx2x_tx_encap()
2207 rte_pktmbuf_mtod(m0, struct rte_ether_hdr *); in bnx2x_tx_encap()
2231 rte_pktmbuf_mtod(m0, struct rte_ether_hdr *); in bnx2x_tx_encap()
H A Dbnx2x.h2007 int bnx2x_tx_encap(struct bnx2x_tx_queue *txq, struct rte_mbuf *m0);