| /f-stack/dpdk/drivers/net/hns3/ |
| H A D | hns3_rxtx_vec.h | 21 for (i = 0; i < txq->tx_rs_thresh; i++, tx_entry++) { in hns3_tx_bulk_free_buffers() 40 txq->tx_bd_ready += txq->tx_rs_thresh; in hns3_tx_bulk_free_buffers() 41 txq->next_to_clean += txq->tx_rs_thresh; in hns3_tx_bulk_free_buffers() 57 for (i = 0; i < txq->tx_rs_thresh; i++, tx_desc++) { in hns3_tx_free_buffers()
|
| H A D | hns3_rxtx.c | 2496 uint16_t nb_desc, uint16_t *tx_rs_thresh, in hns3_tx_queue_conf_check() argument 2509 rs_thresh = (conf->tx_rs_thresh > 0) ? in hns3_tx_queue_conf_check() 2510 conf->tx_rs_thresh : HNS3_DEFAULT_TX_RS_THRESH; in hns3_tx_queue_conf_check() 2533 *tx_rs_thresh = rs_thresh; in hns3_tx_queue_conf_check() 2543 uint16_t tx_rs_thresh, tx_free_thresh; in hns3_tx_queue_setup() local 2551 &tx_rs_thresh, &tx_free_thresh, idx); in hns3_tx_queue_setup() 2592 txq->tx_rs_thresh = tx_rs_thresh; in hns3_tx_queue_setup() 3421 for (i = 0; i < txq->tx_rs_thresh; i++) { in hns3_tx_free_buffer_simple() 3430 for (i = 0; i < txq->tx_rs_thresh; i++) in hns3_tx_free_buffer_simple() 3438 txq->tx_bd_ready += txq->tx_rs_thresh; in hns3_tx_free_buffer_simple() [all …]
|
| H A D | hns3_rxtx_vec.c | 37 new_burst = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in hns3_xmit_pkts_vec()
|
| /f-stack/dpdk/drivers/net/ixgbe/ |
| H A D | ixgbe_rxtx_vec_common.h | 90 n = txq->tx_rs_thresh; in ixgbe_tx_free_bufs() 124 txq->nb_tx_free = (uint16_t)(txq->nb_tx_free + txq->tx_rs_thresh); in ixgbe_tx_free_bufs() 125 txq->tx_next_dd = (uint16_t)(txq->tx_next_dd + txq->tx_rs_thresh); in ixgbe_tx_free_bufs() 127 txq->tx_next_dd = (uint16_t)(txq->tx_rs_thresh - 1); in ixgbe_tx_free_bufs() 129 return txq->tx_rs_thresh; in ixgbe_tx_free_bufs() 153 for (i = txq->tx_next_dd - (txq->tx_rs_thresh - 1); in _ixgbe_tx_queue_release_mbufs_vec() 229 txq->tx_next_dd = (uint16_t)(txq->tx_rs_thresh - 1); in _ixgbe_reset_tx_queue_vec() 230 txq->tx_next_rs = (uint16_t)(txq->tx_rs_thresh - 1); in _ixgbe_reset_tx_queue_vec()
|
| H A D | ixgbe_rxtx.c | 148 return txq->tx_rs_thresh; in ixgbe_tx_free_bufs() 299 txq->tx_rs_thresh); in tx_xmit_pkts() 778 txq->tx_rs_thresh, in ixgbe_xmit_pkts() 2625 if (tx_conf->tx_rs_thresh > 0) in ixgbe_dev_tx_queue_setup() 2626 tx_rs_thresh = tx_conf->tx_rs_thresh; in ixgbe_dev_tx_queue_setup() 2631 (unsigned int)tx_rs_thresh, in ixgbe_dev_tx_queue_setup() 2666 (unsigned int)tx_rs_thresh, in ixgbe_dev_tx_queue_setup() 2719 txq->tx_rs_thresh = tx_rs_thresh; in ixgbe_dev_tx_queue_setup() 3285 desc = ((desc + txq->tx_rs_thresh - 1) / txq->tx_rs_thresh) * in ixgbe_dev_tx_descriptor_status() 3286 txq->tx_rs_thresh; in ixgbe_dev_tx_descriptor_status() [all …]
|
| H A D | ixgbe_rxtx_vec_neon.c | 495 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ixgbe_xmit_fixed_burst_vec() 522 txq->tx_next_rs = (uint16_t)(txq->tx_rs_thresh - 1); in ixgbe_xmit_fixed_burst_vec() 538 txq->tx_rs_thresh); in ixgbe_xmit_fixed_burst_vec()
|
| H A D | ixgbe_rxtx_vec_sse.c | 668 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ixgbe_xmit_fixed_burst_vec() 696 txq->tx_next_rs = (uint16_t)(txq->tx_rs_thresh - 1); in ixgbe_xmit_fixed_burst_vec() 712 txq->tx_rs_thresh); in ixgbe_xmit_fixed_burst_vec()
|
| H A D | ixgbe_rxtx.h | 212 uint16_t tx_rs_thresh; member
|
| /f-stack/dpdk/drivers/net/ice/ |
| H A D | ice_rxtx_vec_common.h | 90 n = txq->tx_rs_thresh; in ice_tx_free_bufs() 124 txq->nb_tx_free = (uint16_t)(txq->nb_tx_free + txq->tx_rs_thresh); in ice_tx_free_bufs() 125 txq->tx_next_dd = (uint16_t)(txq->tx_next_dd + txq->tx_rs_thresh); in ice_tx_free_bufs() 127 txq->tx_next_dd = (uint16_t)(txq->tx_rs_thresh - 1); in ice_tx_free_bufs() 129 return txq->tx_rs_thresh; in ice_tx_free_bufs() 191 i = txq->tx_next_dd - txq->tx_rs_thresh + 1; in _ice_tx_queue_release_mbufs_vec() 283 if (txq->tx_rs_thresh < ICE_VPMD_TX_BURST || in ice_tx_vec_queue_default() 284 txq->tx_rs_thresh > ICE_TX_MAX_FREE_BUF_SZ) in ice_tx_vec_queue_default()
|
| H A D | ice_rxtx.c | 1207 tx_rs_thresh = in ice_tx_queue_setup() 1210 if (tx_conf->tx_rs_thresh) in ice_tx_queue_setup() 1211 tx_rs_thresh = tx_conf->tx_rs_thresh; in ice_tx_queue_setup() 1216 (unsigned int)tx_rs_thresh, in ice_tx_queue_setup() 1227 (unsigned int)tx_rs_thresh, in ice_tx_queue_setup() 1247 (unsigned int)tx_rs_thresh, in ice_tx_queue_setup() 1301 txq->tx_rs_thresh = tx_rs_thresh; in ice_tx_queue_setup() 1385 qinfo->conf.tx_rs_thresh = txq->tx_rs_thresh; in ice_txq_info_get() 2009 desc = ((desc + txq->tx_rs_thresh - 1) / txq->tx_rs_thresh) * in ice_tx_descriptor_status() 2010 txq->tx_rs_thresh; in ice_tx_descriptor_status() [all …]
|
| H A D | ice_rxtx_vec_avx512.c | 836 n = txq->tx_rs_thresh; in ice_tx_free_bufs_avx512() 921 txq->nb_tx_free = (uint16_t)(txq->nb_tx_free + txq->tx_rs_thresh); in ice_tx_free_bufs_avx512() 922 txq->tx_next_dd = (uint16_t)(txq->tx_next_dd + txq->tx_rs_thresh); in ice_tx_free_bufs_avx512() 924 txq->tx_next_dd = (uint16_t)(txq->tx_rs_thresh - 1); in ice_tx_free_bufs_avx512() 926 return txq->tx_rs_thresh; in ice_tx_free_bufs_avx512() 1010 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ice_xmit_fixed_burst_vec_avx512() 1039 txq->tx_next_rs = (uint16_t)(txq->tx_rs_thresh - 1); in ice_xmit_fixed_burst_vec_avx512() 1056 (uint16_t)(txq->tx_next_rs + txq->tx_rs_thresh); in ice_xmit_fixed_burst_vec_avx512() 1076 num = (uint16_t)RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ice_xmit_pkts_vec_avx512()
|
| H A D | ice_rxtx_vec_sse.c | 666 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ice_xmit_fixed_burst_vec() 694 txq->tx_next_rs = (uint16_t)(txq->tx_rs_thresh - 1); in ice_xmit_fixed_burst_vec() 711 (uint16_t)(txq->tx_next_rs + txq->tx_rs_thresh); in ice_xmit_fixed_burst_vec() 731 num = (uint16_t)RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ice_xmit_pkts_vec()
|
| H A D | ice_rxtx_vec_avx2.c | 915 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ice_xmit_fixed_burst_vec_avx2() 943 txq->tx_next_rs = (uint16_t)(txq->tx_rs_thresh - 1); in ice_xmit_fixed_burst_vec_avx2() 960 (uint16_t)(txq->tx_next_rs + txq->tx_rs_thresh); in ice_xmit_fixed_burst_vec_avx2() 980 num = (uint16_t)RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ice_xmit_pkts_vec_avx2()
|
| /f-stack/dpdk/drivers/net/i40e/ |
| H A D | i40e_rxtx.c | 1292 return txq->tx_rs_thresh; in i40e_tx_free_bufs() 2102 desc = ((desc + txq->tx_rs_thresh - 1) / txq->tx_rs_thresh) * in i40e_dev_tx_descriptor_status() 2103 txq->tx_rs_thresh; in i40e_dev_tx_descriptor_status() 2238 if (tx_conf->tx_rs_thresh > 0) in i40e_dev_tx_queue_setup() 2239 tx_rs_thresh = tx_conf->tx_rs_thresh; in i40e_dev_tx_queue_setup() 2244 (unsigned int)tx_rs_thresh, in i40e_dev_tx_queue_setup() 2255 (unsigned int)tx_rs_thresh, in i40e_dev_tx_queue_setup() 2274 (unsigned int)tx_rs_thresh, in i40e_dev_tx_queue_setup() 2283 (unsigned int)tx_rs_thresh, in i40e_dev_tx_queue_setup() 2327 txq->tx_rs_thresh = tx_rs_thresh; in i40e_dev_tx_queue_setup() [all …]
|
| H A D | i40e_rxtx_vec_common.h | 92 n = txq->tx_rs_thresh; in i40e_tx_free_bufs() 126 txq->nb_tx_free = (uint16_t)(txq->nb_tx_free + txq->tx_rs_thresh); in i40e_tx_free_bufs() 127 txq->tx_next_dd = (uint16_t)(txq->tx_next_dd + txq->tx_rs_thresh); in i40e_tx_free_bufs() 129 txq->tx_next_dd = (uint16_t)(txq->tx_rs_thresh - 1); in i40e_tx_free_bufs() 131 return txq->tx_rs_thresh; in i40e_tx_free_bufs()
|
| H A D | i40e_rxtx_vec_altivec.c | 564 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in i40e_xmit_fixed_burst_vec() 592 txq->tx_next_rs = (uint16_t)(txq->tx_rs_thresh - 1); in i40e_xmit_fixed_burst_vec() 609 (uint16_t)(txq->tx_next_rs + txq->tx_rs_thresh); in i40e_xmit_fixed_burst_vec()
|
| H A D | i40e_rxtx_vec_neon.c | 544 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in i40e_xmit_fixed_burst_vec() 571 txq->tx_next_rs = (uint16_t)(txq->tx_rs_thresh - 1); in i40e_xmit_fixed_burst_vec() 588 (uint16_t)(txq->tx_next_rs + txq->tx_rs_thresh); in i40e_xmit_fixed_burst_vec()
|
| H A D | i40e_rxtx_vec_avx2.c | 874 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in i40e_xmit_fixed_burst_vec_avx2() 902 txq->tx_next_rs = (uint16_t)(txq->tx_rs_thresh - 1); in i40e_xmit_fixed_burst_vec_avx2() 919 (uint16_t)(txq->tx_next_rs + txq->tx_rs_thresh); in i40e_xmit_fixed_burst_vec_avx2() 939 num = (uint16_t)RTE_MIN(nb_pkts, txq->tx_rs_thresh); in i40e_xmit_pkts_vec_avx2()
|
| H A D | i40e_rxtx_vec_sse.c | 711 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in i40e_xmit_fixed_burst_vec() 738 txq->tx_next_rs = (uint16_t)(txq->tx_rs_thresh - 1); in i40e_xmit_fixed_burst_vec() 755 (uint16_t)(txq->tx_next_rs + txq->tx_rs_thresh); in i40e_xmit_fixed_burst_vec()
|
| /f-stack/dpdk/drivers/net/e1000/ |
| H A D | em_rxtx.c | 161 uint16_t tx_rs_thresh; member 581 if (txq->nb_tx_used >= txq->tx_rs_thresh) { in eth_em_xmit_pkts() 1206 uint16_t tx_rs_thresh, tx_free_thresh; in eth_em_tx_queue_setup() local 1229 tx_rs_thresh = tx_conf->tx_rs_thresh; in eth_em_tx_queue_setup() 1230 if (tx_rs_thresh == 0) in eth_em_tx_queue_setup() 1242 if (tx_rs_thresh > tx_free_thresh) { in eth_em_tx_queue_setup() 1247 (unsigned int)tx_rs_thresh, in eth_em_tx_queue_setup() 1299 txq->tx_rs_thresh = tx_rs_thresh; in eth_em_tx_queue_setup() 1567 desc = ((desc + txq->tx_rs_thresh - 1) / txq->tx_rs_thresh) * in eth_em_tx_descriptor_status() 1568 txq->tx_rs_thresh; in eth_em_tx_descriptor_status() [all …]
|
| /f-stack/dpdk/app/test/ |
| H A D | test_pmd_perf.c | 91 .tx_rs_thresh = 32, /* Use PMD default values */ 821 tx_conf.tx_rs_thresh = 32; in test_set_rxtx_conf() 826 tx_conf.tx_rs_thresh = 32; in test_set_rxtx_conf() 835 tx_conf.tx_rs_thresh = 32; in test_set_rxtx_conf() 841 tx_conf.tx_rs_thresh = 32; in test_set_rxtx_conf()
|
| /f-stack/dpdk/drivers/net/iavf/ |
| H A D | iavf_rxtx.c | 94 if (tx_rs_thresh >= (nb_desc - 2)) { in check_tx_thresh() 97 tx_rs_thresh, nb_desc); in check_tx_thresh() 106 if (tx_rs_thresh > tx_free_thresh) { in check_tx_thresh() 109 tx_rs_thresh, tx_free_thresh); in check_tx_thresh() 112 if ((nb_desc % tx_rs_thresh) != 0) { in check_tx_thresh() 115 tx_rs_thresh, nb_desc); in check_tx_thresh() 631 uint16_t tx_rs_thresh, tx_free_thresh; in iavf_dev_tx_queue_setup() local 646 tx_rs_thresh = (uint16_t)((tx_conf->tx_rs_thresh) ? in iavf_dev_tx_queue_setup() 650 check_tx_thresh(nb_desc, tx_rs_thresh, tx_rs_thresh); in iavf_dev_tx_queue_setup() 670 txq->rs_thresh = tx_rs_thresh; in iavf_dev_tx_queue_setup() [all …]
|
| /f-stack/dpdk/app/test-pipeline/ |
| H A D | init.c | 103 .tx_rs_thresh = 0,
|
| /f-stack/dpdk/doc/guides/prog_guide/ |
| H A D | poll_mode_drv.rst | 216 …first descriptor used to transmit the packet, exceeds the transmit RS bit threshold (tx_rs_thresh). 219 The default value for tx_rs_thresh is 32. 222 …that the TX Write-back threshold (TX wthresh) should be set to 0 when tx_rs_thresh is greater than… 225 The following constraints must be satisfied for tx_free_thresh and tx_rs_thresh: 227 * tx_rs_thresh must be greater than 0. 229 * tx_rs_thresh must be less than the size of the ring minus 2. 231 * tx_rs_thresh must be less than or equal to tx_free_thresh. 237 * For optimal performance, TX wthresh should be set to 0 when tx_rs_thresh is greater than 1. 251 either perform a bulk release when the ``tx_rs_thresh`` has been crossed 256 independent of whether or not the ``tx_rs_thresh`` has been crossed.
|
| /f-stack/dpdk/drivers/net/nfp/ |
| H A D | nfp_net_pmd.h | 244 uint32_t tx_rs_thresh; /* not used by now. Future? */ member
|