Searched refs:txm (Results 1 – 6 of 6) sorted by relevance
112 while (txm != NULL) { in ionic_tx_flush()113 next = txm->next; in ionic_tx_flush()115 txm = next; in ionic_tx_flush()258 txm->outer_l3_len + txm->l2_len; in ionic_tx_tcp_inner_pseudo_csum()347 hdrlen = txm->outer_l2_len + txm->outer_l3_len + in ionic_tx_tso()348 txm->l2_len + txm->l3_len + txm->l4_len; in ionic_tx_tso()351 hdrlen = txm->l2_len + txm->l3_len + txm->l4_len; in ionic_tx_tso()355 left = txm->data_len; in ionic_tx_tso()386 txm_seg = txm->next; in ionic_tx_tso()477 txm_seg = txm->next; in ionic_tx()[all …]
417 struct rte_mbuf *m_seg = txm; in vmxnet3_xmit_pkts()421 unsigned count = txm->nb_segs; in vmxnet3_xmit_pkts()442 rte_pktmbuf_free(txm); in vmxnet3_xmit_pkts()453 rte_pktmbuf_free(txm); in vmxnet3_xmit_pkts()458 if (txm->nb_segs == 1 && in vmxnet3_xmit_pkts()465 rte_pktmbuf_free(txm); in vmxnet3_xmit_pkts()517 tbi->m = txm; in vmxnet3_xmit_pkts()525 gdesc->txd.tci = txm->vlan_tci; in vmxnet3_xmit_pkts()529 uint16_t mss = txm->tso_segsz; in vmxnet3_xmit_pkts()533 gdesc->txd.hlen = txm->l2_len + txm->l3_len + txm->l4_len; in vmxnet3_xmit_pkts()[all …]
1759 struct rte_mbuf *txm = tx_pkts[nb_tx]; in virtio_xmit_pkts_packed() local1765 rte_mbuf_refcnt_read(txm) == 1 && in virtio_xmit_pkts_packed()1766 RTE_MBUF_DIRECT(txm) && in virtio_xmit_pkts_packed()1767 txm->nb_segs == 1 && in virtio_xmit_pkts_packed()1846 rte_mbuf_refcnt_read(txm) == 1 && in virtio_xmit_pkts()1847 RTE_MBUF_DIRECT(txm) && in virtio_xmit_pkts()1848 txm->nb_segs == 1 && in virtio_xmit_pkts()1949 RTE_MBUF_DIRECT(txm) && in virtio_xmit_pkts_inorder()1950 txm->nb_segs == 1 && in virtio_xmit_pkts_inorder()1954 inorder_pkts[nb_inorder_pkts] = txm; in virtio_xmit_pkts_inorder()[all …]
205 struct rte_mbuf *txm) in virtqueue_enqueue_single_packed_vec() argument216 rte_mbuf_refcnt_read(txm) == 1 && in virtqueue_enqueue_single_packed_vec()217 RTE_MBUF_DIRECT(txm) && in virtqueue_enqueue_single_packed_vec()218 txm->nb_segs == 1 && in virtqueue_enqueue_single_packed_vec()219 rte_pktmbuf_headroom(txm) >= hdr_size) in virtqueue_enqueue_single_packed_vec()222 txm->nb_segs < VIRTIO_MAX_TX_INDIRECT) in virtqueue_enqueue_single_packed_vec()229 slots = use_indirect ? 1 : (txm->nb_segs + !can_push); in virtqueue_enqueue_single_packed_vec()244 virtqueue_enqueue_xmit_packed(txvq, txm, slots, use_indirect, in virtqueue_enqueue_single_packed_vec()247 txvq->stats.bytes += txm->pkt_len; in virtqueue_enqueue_single_packed_vec()
608 struct rte_mbuf *txm; in ntb_enqueue_bufs() local636 if (txm == NULL || txq->nb_tx_free < txm->nb_segs) in ntb_enqueue_bufs()639 tx_last = (txq->last_used + txm->nb_segs - 1) & in ntb_enqueue_bufs()641 nb_segs = txm->nb_segs; in ntb_enqueue_bufs()646 sw_ring[txq->last_used].mbuf = txm; in ntb_enqueue_bufs()653 if (txm->data_len > tx_item->len) { in ntb_enqueue_bufs()657 txm->data_len = tx_item->len; in ntb_enqueue_bufs()668 txm->data_len); in ntb_enqueue_bufs()670 tx_used[nb_mbufs].len = txm->data_len; in ntb_enqueue_bufs()676 bytes += txm->data_len; in ntb_enqueue_bufs()[all …]
477 struct rte_mbuf *txm = tx_pkts[nb_tx]->sym->m_src; in virtio_crypto_pkt_tx_burst() local479 int need = txm->nb_segs - txvq->vq_free_cnt; in virtio_crypto_pkt_tx_burst()490 need = txm->nb_segs - txvq->vq_free_cnt; in virtio_crypto_pkt_tx_burst()