| /f-stack/dpdk/drivers/net/hns3/ |
| H A D | hns3_rxtx_vec.c | 75 rxdp[0].addr = rte_cpu_to_le_64(dma_addr); in hns3_rxq_rearm_mbuf() 76 rxdp[0].rx.bd_base_info = 0; in hns3_rxq_rearm_mbuf() 79 rxdp[1].addr = rte_cpu_to_le_64(dma_addr); in hns3_rxq_rearm_mbuf() 80 rxdp[1].rx.bd_base_info = 0; in hns3_rxq_rearm_mbuf() 83 rxdp[2].addr = rte_cpu_to_le_64(dma_addr); in hns3_rxq_rearm_mbuf() 84 rxdp[2].rx.bd_base_info = 0; in hns3_rxq_rearm_mbuf() 87 rxdp[3].addr = rte_cpu_to_le_64(dma_addr); in hns3_rxq_rearm_mbuf() 88 rxdp[3].rx.bd_base_info = 0; in hns3_rxq_rearm_mbuf() 106 struct hns3_desc *rxdp = &rxq->rx_ring[rxq->next_to_use]; in hns3_recv_pkts_vec() local 113 rte_prefetch_non_temporal(rxdp); in hns3_recv_pkts_vec() [all …]
|
| H A D | hns3_rxtx_vec_neon.h | 89 struct hns3_desc *rxdp, in hns3_desc_parse_field() argument 105 l234_info = rxdp[i].rx.l234_info; in hns3_desc_parse_field() 106 ol_info = rxdp[i].rx.ol_info; in hns3_desc_parse_field() 107 bd_base_info = rxdp[i].rx.bd_base_info; in hns3_desc_parse_field() 132 struct hns3_desc *rxdp = &rxq->rx_ring[rx_id]; in hns3_recv_burst_vec() local 156 rxdp += HNS3_DEFAULT_DESCS_PER_LOOP) { in hns3_recv_burst_vec() 198 descs[0] = vld2q_u64((uint64_t *)(rxdp + offset)); in hns3_recv_burst_vec() 199 descs[1] = vld2q_u64((uint64_t *)(rxdp + offset + 1)); in hns3_recv_burst_vec() 205 descs[2] = vld2q_u64((uint64_t *)(rxdp + offset + 2)); in hns3_recv_burst_vec() 206 descs[3] = vld2q_u64((uint64_t *)(rxdp + offset + 3)); in hns3_recv_burst_vec() [all …]
|
| H A D | hns3_rxtx_vec_sve.c | 86 struct hns3_desc *rxdp = &rxq->rx_ring[rx_id]; in hns3_recv_burst_vec_sve() local 123 rxdp += HNS3_SVE_DEFAULT_DESCS_PER_LOOP) { in hns3_recv_burst_vec_sve() 130 vld = svld1_gather_u32offset_u32(pg32, (uint32_t *)rxdp, in hns3_recv_burst_vec_sve() 153 rxdp2 = rxdp + offset; in hns3_recv_burst_vec_sve() 211 rte_prefetch_non_temporal(rxdp + in hns3_recv_burst_vec_sve() 240 struct hns3_desc *rxdp = rxq->rx_ring + rxq->rx_rearm_start; in hns3_rxq_rearm_mbuf_sve() local 267 (uint64_t *)&rxdp[0].addr, in hns3_rxq_rearm_mbuf_sve() 270 (uint64_t *)&rxdp[0].addr, in hns3_rxq_rearm_mbuf_sve() 289 struct hns3_desc *rxdp = &rxq->rx_ring[rxq->next_to_use]; in hns3_recv_pkts_vec_sve() local 293 rte_prefetch_non_temporal(rxdp); in hns3_recv_pkts_vec_sve() [all …]
|
| /f-stack/dpdk/drivers/net/ice/ |
| H A D | ice_rxtx_vec_avx2.c | 18 volatile union ice_rx_flex_desc *rxdp; in ice_rxq_rearm() local 21 rxdp = rxq->rx_ring + rxq->rxrearm_start; in ice_rxq_rearm() 34 _mm_store_si128((__m128i *)&rxdp[i].read, in ice_rxq_rearm() 79 i += 4, rxep += 4, rxdp += 4) { in ice_rxq_rearm() 167 rte_prefetch0(rxdp); in _ice_recv_raw_pkts_vec_avx2() 181 if (!(rxdp->wb.status_error0 & in _ice_recv_raw_pkts_vec_avx2() 316 rxdp += ICE_DESCS_PER_LOOP_AVX) { in _ice_recv_raw_pkts_vec_avx2() 342 _mm_load_si128((void *)(rxdp + 7)); in _ice_recv_raw_pkts_vec_avx2() 345 _mm_load_si128((void *)(rxdp + 6)); in _ice_recv_raw_pkts_vec_avx2() 348 _mm_load_si128((void *)(rxdp + 5)); in _ice_recv_raw_pkts_vec_avx2() [all …]
|
| H A D | ice_rxtx_vec_avx512.c | 20 volatile union ice_rx_flex_desc *rxdp; in ice_rxq_rearm() local 25 rxdp = rxq->rx_ring + rxq->rxrearm_start; in ice_rxq_rearm() 45 ((__m128i *)&rxdp[i].read, in ice_rxq_rearm() 115 rxep += 8, rxdp += 8, cache->len -= 8; in ice_rxq_rearm() 161 rte_prefetch0(rxdp); in _ice_recv_raw_pkts_vec_avx512() 175 if (!(rxdp->wb.status_error0 & in _ice_recv_raw_pkts_vec_avx512() 308 _mm_load_si128((void *)(rxdp + 7)); in _ice_recv_raw_pkts_vec_avx512() 311 _mm_load_si128((void *)(rxdp + 6)); in _ice_recv_raw_pkts_vec_avx512() 314 _mm_load_si128((void *)(rxdp + 5)); in _ice_recv_raw_pkts_vec_avx512() 317 _mm_load_si128((void *)(rxdp + 4)); in _ice_recv_raw_pkts_vec_avx512() [all …]
|
| H A D | ice_rxtx_vec_sse.c | 37 volatile union ice_rx_flex_desc *rxdp; in ice_rxq_rearm() local 44 rxdp = rxq->rx_ring + rxq->rxrearm_start; in ice_rxq_rearm() 55 _mm_store_si128((__m128i *)&rxdp[i].read, in ice_rxq_rearm() 267 volatile union ice_rx_flex_desc *rxdp; in _ice_recv_raw_pkts_vec() local 324 rxdp = rxq->rx_ring + rxq->rx_tail; in _ice_recv_raw_pkts_vec() 326 rte_prefetch0(rxdp); in _ice_recv_raw_pkts_vec() 337 if (!(rxdp->wb.status_error0 & in _ice_recv_raw_pkts_vec() 370 rxdp += ICE_DESCS_PER_LOOP) { in _ice_recv_raw_pkts_vec() 450 ((void *)(&rxdp[3].wb.status_error1)); in _ice_recv_raw_pkts_vec() 454 ((void *)(&rxdp[2].wb.status_error1)); in _ice_recv_raw_pkts_vec() [all …]
|
| H A D | ice_rxtx.c | 1409 rxdp += ICE_RXQ_SCAN_INTERVAL; in ice_rx_queue_count() 1609 rxdp = &rxq->rx_ring[alloc_idx]; in ice_rx_alloc_bufs() 1622 rxdp[i].read.hdr_addr = 0; in ice_rx_alloc_bufs() 1623 rxdp[i].read.pkt_addr = dma_addr; in ice_rx_alloc_bufs() 1736 rxdp = &rx_ring[rx_id]; in ice_recv_scattered_pkts() 1777 rxdp->read.hdr_addr = 0; in ice_recv_scattered_pkts() 1778 rxdp->read.pkt_addr = dma_addr; in ice_recv_scattered_pkts() 1988 rxdp = &rxq->rx_ring[desc]; in ice_rx_descriptor_status() 2199 rxdp = &rx_ring[rx_id]; in ice_recv_pkts() 2229 rxdp->read.hdr_addr = 0; in ice_recv_pkts() [all …]
|
| /f-stack/dpdk/drivers/net/iavf/ |
| H A D | iavf_rxtx_vec_avx2.c | 18 volatile union iavf_rx_desc *rxdp; in iavf_rxq_rearm() local 21 rxdp = rxq->rx_ring + rxq->rxrearm_start; in iavf_rxq_rearm() 34 _mm_store_si128((__m128i *)&rxdp[i].read, in iavf_rxq_rearm() 79 i += 4, rxp += 4, rxdp += 4) { in iavf_rxq_rearm() 154 rte_prefetch0(rxdp); in _iavf_recv_raw_pkts_vec_avx2() 327 _mm_load_si128((void *)(rxdp + 7)); in _iavf_recv_raw_pkts_vec_avx2() 330 _mm_load_si128((void *)(rxdp + 6)); in _iavf_recv_raw_pkts_vec_avx2() 333 _mm_load_si128((void *)(rxdp + 5)); in _iavf_recv_raw_pkts_vec_avx2() 336 _mm_load_si128((void *)(rxdp + 4)); in _iavf_recv_raw_pkts_vec_avx2() 651 rte_prefetch0(rxdp); in _iavf_recv_raw_pkts_vec_avx2_flex_rxd() [all …]
|
| H A D | iavf_rxtx.c | 1132 rxdp = &rx_ring[rx_id]; in iavf_recv_pkts() 1151 rxd = *rxdp; in iavf_recv_pkts() 1172 rxdp->read.hdr_addr = 0; in iavf_recv_pkts() 1256 rxd = *rxdp; in iavf_recv_pkts_flex_rxd() 1277 rxdp->read.hdr_addr = 0; in iavf_recv_pkts_flex_rxd() 1347 rxd = *rxdp; in iavf_recv_scattered_pkts_flex_rxd() 1371 rxdp->read.hdr_addr = 0; in iavf_recv_scattered_pkts_flex_rxd() 1481 rxdp = &rx_ring[rx_id]; in iavf_recv_scattered_pkts() 1500 rxd = *rxdp; in iavf_recv_scattered_pkts() 1524 rxdp->read.hdr_addr = 0; in iavf_recv_scattered_pkts() [all …]
|
| H A D | iavf_rxtx_vec_avx512.c | 21 volatile union iavf_rx_desc *rxdp; in iavf_rxq_rearm() local 26 rxdp = rxq->rx_ring + rxq->rxrearm_start; in iavf_rxq_rearm() 126 rxdp += IAVF_DESCS_PER_LOOP_AVX; in iavf_rxq_rearm() 156 rte_prefetch0(rxdp); in _iavf_recv_raw_pkts_vec_avx512() 333 _mm_load_si128((void *)(rxdp + 7)); in _iavf_recv_raw_pkts_vec_avx512() 336 _mm_load_si128((void *)(rxdp + 6)); in _iavf_recv_raw_pkts_vec_avx512() 339 _mm_load_si128((void *)(rxdp + 5)); in _iavf_recv_raw_pkts_vec_avx512() 342 _mm_load_si128((void *)(rxdp + 4)); in _iavf_recv_raw_pkts_vec_avx512() 345 _mm_load_si128((void *)(rxdp + 3)); in _iavf_recv_raw_pkts_vec_avx512() 649 rte_prefetch0(rxdp); in _iavf_recv_raw_pkts_vec_avx512_flex_rxd() [all …]
|
| H A D | iavf_rxtx_vec_sse.c | 25 volatile union iavf_rx_desc *rxdp; in iavf_rxq_rearm() local 32 rxdp = rxq->rx_ring + rxq->rxrearm_start; in iavf_rxq_rearm() 41 _mm_store_si128((__m128i *)&rxdp[i].read, in iavf_rxq_rearm() 393 volatile union iavf_rx_desc *rxdp; in _recv_raw_pkts_vec() local 424 rxdp = rxq->rx_ring + rxq->rx_tail; in _recv_raw_pkts_vec() 426 rte_prefetch0(rxdp); in _recv_raw_pkts_vec() 437 if (!(rxdp->wb.qword1.status_error_len & in _recv_raw_pkts_vec() 484 rxdp += IAVF_VPMD_DESCS_PER_LOOP) { in _recv_raw_pkts_vec() 642 volatile union iavf_rx_flex_desc *rxdp; in _recv_raw_pkts_vec_flex_rxd() local 701 rte_prefetch0(rxdp); in _recv_raw_pkts_vec_flex_rxd() [all …]
|
| /f-stack/dpdk/drivers/net/i40e/ |
| H A D | i40e_rxtx_vec_avx2.c | 26 volatile union i40e_rx_desc *rxdp; in i40e_rxq_rearm() local 29 rxdp = rxq->rx_ring + rxq->rxrearm_start; in i40e_rxq_rearm() 41 _mm_store_si128((__m128i *)&rxdp[i].read, in i40e_rxq_rearm() 77 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr0); in i40e_rxq_rearm() 78 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr1); in i40e_rxq_rearm() 86 i += 4, rxep += 4, rxdp += 4) { in i40e_rxq_rearm() 235 rte_prefetch0(rxdp); in _recv_raw_pkts_vec_avx2() 249 if (!(rxdp->wb.qword1.status_error_len & in _recv_raw_pkts_vec_avx2() 374 rxdp += RTE_I40E_DESCS_PER_LOOP_AVX) { in _recv_raw_pkts_vec_avx2() 388 raw_desc6_7 = _mm256_load_si256((void *)(rxdp + 6)); in _recv_raw_pkts_vec_avx2() [all …]
|
| H A D | i40e_rxtx_vec_sse.c | 26 volatile union i40e_rx_desc *rxdp; in i40e_rxq_rearm() local 33 rxdp = rxq->rx_ring + rxq->rxrearm_start; in i40e_rxq_rearm() 44 _mm_store_si128((__m128i *)&rxdp[i].read, in i40e_rxq_rearm() 75 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr0); in i40e_rxq_rearm() 76 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr1); in i40e_rxq_rearm() 356 volatile union i40e_rx_desc *rxdp; in _recv_raw_pkts_vec() local 388 rxdp = rxq->rx_ring + rxq->rx_tail; in _recv_raw_pkts_vec() 390 rte_prefetch0(rxdp); in _recv_raw_pkts_vec() 401 if (!(rxdp->wb.qword1.status_error_len & in _recv_raw_pkts_vec() 450 rxdp += RTE_I40E_DESCS_PER_LOOP) { in _recv_raw_pkts_vec() [all …]
|
| H A D | i40e_rxtx_vec_neon.c | 25 volatile union i40e_rx_desc *rxdp; in i40e_rxq_rearm() local 32 rxdp = rxq->rx_ring + rxq->rxrearm_start; in i40e_rxq_rearm() 42 vst1q_u64((uint64_t *)&rxdp[i].read, zero); in i40e_rxq_rearm() 59 vst1q_u64((uint64_t *)&rxdp++->read, dma_addr0); in i40e_rxq_rearm() 63 vst1q_u64((uint64_t *)&rxdp++->read, dma_addr1); in i40e_rxq_rearm() 202 volatile union i40e_rx_desc *rxdp; in _recv_raw_pkts_vec() local 240 rxdp = rxq->rx_ring + rxq->rx_tail; in _recv_raw_pkts_vec() 242 rte_prefetch_non_temporal(rxdp); in _recv_raw_pkts_vec() 253 if (!(rxdp->wb.qword1.status_error_len & in _recv_raw_pkts_vec() 272 rxdp += RTE_I40E_DESCS_PER_LOOP) { in _recv_raw_pkts_vec() [all …]
|
| H A D | i40e_rxtx_vec_altivec.c | 25 volatile union i40e_rx_desc *rxdp; in i40e_rxq_rearm() local 35 rxdp = rxq->rx_ring + rxq->rxrearm_start; in i40e_rxq_rearm() 47 (vector unsigned long *)&rxdp[i].read); in i40e_rxq_rearm() 202 volatile union i40e_rx_desc *rxdp; in _recv_raw_pkts_vec() local 225 rxdp = rxq->rx_ring + rxq->rx_tail; in _recv_raw_pkts_vec() 227 rte_prefetch0(rxdp); in _recv_raw_pkts_vec() 238 if (!(rxdp->wb.qword1.status_error_len & in _recv_raw_pkts_vec() 276 rxdp += RTE_I40E_DESCS_PER_LOOP) { in _recv_raw_pkts_vec() 288 descs[3] = *(vector unsigned long *)(rxdp + 3); in _recv_raw_pkts_vec() 297 descs[2] = *(vector unsigned long *)(rxdp + 2); in _recv_raw_pkts_vec() [all …]
|
| H A D | i40e_rxtx.c | 543 rxdp = &rxq->rx_ring[alloc_idx]; in i40e_rx_alloc_bufs() 557 rxdp[i].read.hdr_addr = 0; in i40e_rx_alloc_bufs() 680 rxdp = &rx_ring[rx_id]; in i40e_recv_pkts() 696 rxd = *rxdp; in i40e_recv_pkts() 719 rxdp->read.hdr_addr = 0; in i40e_recv_pkts() 720 rxdp->read.pkt_addr = dma_addr; in i40e_recv_pkts() 796 rxdp = &rx_ring[rx_id]; in i40e_recv_scattered_pkts() 812 rxd = *rxdp; in i40e_recv_scattered_pkts() 838 rxdp->read.hdr_addr = 0; in i40e_recv_scattered_pkts() 839 rxdp->read.pkt_addr = dma_addr; in i40e_recv_scattered_pkts() [all …]
|
| /f-stack/dpdk/drivers/net/ixgbe/ |
| H A D | ixgbe_rxtx_vec_neon.c | 21 volatile union ixgbe_adv_rx_desc *rxdp; in ixgbe_rxq_rearm() local 29 rxdp = rxq->rx_ring + rxq->rxrearm_start; in ixgbe_rxq_rearm() 39 vst1q_u64((uint64_t *)&rxdp[i].read, in ixgbe_rxq_rearm() 63 vst1q_u64((uint64_t *)&rxdp++->read, dma_addr0); in ixgbe_rxq_rearm() 68 vst1q_u64((uint64_t *)&rxdp++->read, dma_addr1); in ixgbe_rxq_rearm() 209 volatile union ixgbe_adv_rx_desc *rxdp; in _recv_raw_pkts_vec() local 231 rxdp = rxq->rx_ring + rxq->rx_tail; in _recv_raw_pkts_vec() 233 rte_prefetch_non_temporal(rxdp); in _recv_raw_pkts_vec() 244 if (!(rxdp->wb.upper.status_error & in _recv_raw_pkts_vec() 261 rxdp += RTE_IXGBE_DESCS_PER_LOOP) { in _recv_raw_pkts_vec() [all …]
|
| H A D | ixgbe_rxtx_vec_sse.c | 24 volatile union ixgbe_adv_rx_desc *rxdp; in ixgbe_rxq_rearm() local 33 rxdp = rxq->rx_ring + rxq->rxrearm_start; in ixgbe_rxq_rearm() 44 _mm_store_si128((__m128i *)&rxdp[i].read, in ixgbe_rxq_rearm() 79 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr0); in ixgbe_rxq_rearm() 80 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr1); in ixgbe_rxq_rearm() 316 volatile union ixgbe_adv_rx_desc *rxdp; in _recv_raw_pkts_vec() local 351 rxdp = rxq->rx_ring + rxq->rx_tail; in _recv_raw_pkts_vec() 353 rte_prefetch0(rxdp); in _recv_raw_pkts_vec() 364 if (!(rxdp->wb.upper.status_error & in _recv_raw_pkts_vec() 418 rxdp += RTE_IXGBE_DESCS_PER_LOOP) { in _recv_raw_pkts_vec() [all …]
|
| /f-stack/dpdk/drivers/net/fm10k/ |
| H A D | fm10k_rxtx_vec.c | 260 volatile union fm10k_rx_desc *rxdp; in fm10k_rxq_rearm() local 271 rxdp = rxq->hw_ring + rxq->rxrearm_start; in fm10k_rxq_rearm() 281 _mm_store_si128((__m128i *)&rxdp[i].q, in fm10k_rxq_rearm() 327 _mm_store_si128((__m128i *)&rxdp++->q, dma_addr0); in fm10k_rxq_rearm() 328 _mm_store_si128((__m128i *)&rxdp++->q, dma_addr1); in fm10k_rxq_rearm() 381 volatile union fm10k_rx_desc *rxdp; in fm10k_recv_raw_pkts_vec() local 396 rxdp = rxq->hw_ring + next_dd; in fm10k_recv_raw_pkts_vec() 398 rte_prefetch0(rxdp); in fm10k_recv_raw_pkts_vec() 409 if (!(rxdp->d.staterr & FM10K_RXD_STATUS_DD)) in fm10k_recv_raw_pkts_vec() 461 rxdp += RTE_FM10K_DESCS_PER_LOOP) { in fm10k_recv_raw_pkts_vec() [all …]
|
| H A D | fm10k_rxtx.c | 373 volatile union fm10k_rx_desc *rxdp; in fm10k_dev_rx_queue_count() local 378 rxdp = &rxq->hw_ring[rxq->next_dd]; in fm10k_dev_rx_queue_count() 380 rxdp->w.status & rte_cpu_to_le_16(FM10K_RXD_STATUS_DD)) { in fm10k_dev_rx_queue_count() 387 rxdp += FM10K_RXQ_SCAN_INTERVAL; in fm10k_dev_rx_queue_count() 389 rxdp = &rxq->hw_ring[rxq->next_dd + desc - in fm10k_dev_rx_queue_count() 399 volatile union fm10k_rx_desc *rxdp; in fm10k_dev_rx_descriptor_done() local 413 rxdp = &rxq->hw_ring[desc]; in fm10k_dev_rx_descriptor_done() 415 ret = !!(rxdp->w.status & in fm10k_dev_rx_descriptor_done() 424 volatile union fm10k_rx_desc *rxdp; in fm10k_dev_rx_descriptor_status() local 453 rxdp = &rxq->hw_ring[desc]; in fm10k_dev_rx_descriptor_status() [all …]
|
| /f-stack/dpdk/drivers/net/e1000/ |
| H A D | em_rxtx.c | 711 rxdp = &rx_ring[rx_id]; in eth_em_recv_pkts() 712 status = rxdp->status; in eth_em_recv_pkts() 715 rxd = *rxdp; in eth_em_recv_pkts() 784 rxdp->buffer_addr = dma_addr; in eth_em_recv_pkts() 785 rxdp->status = 0; in eth_em_recv_pkts() 895 rxdp = &rx_ring[rx_id]; in eth_em_recv_scattered_pkts() 896 status = rxdp->status; in eth_em_recv_scattered_pkts() 899 rxd = *rxdp; in eth_em_recv_scattered_pkts() 964 rxdp->buffer_addr = dma; in eth_em_recv_scattered_pkts() 965 rxdp->status = 0; in eth_em_recv_scattered_pkts() [all …]
|
| H A D | igb_rxtx.c | 839 rxdp = &rx_ring[rx_id]; in eth_igb_recv_pkts() 843 rxd = *rxdp; in eth_igb_recv_pkts() 909 rxdp->read.hdr_addr = 0; in eth_igb_recv_pkts() 910 rxdp->read.pkt_addr = dma_addr; in eth_igb_recv_pkts() 1034 rxdp = &rx_ring[rx_id]; in eth_igb_recv_scattered_pkts() 1038 rxd = *rxdp; in eth_igb_recv_scattered_pkts() 1103 rxdp->read.pkt_addr = dma; in eth_igb_recv_scattered_pkts() 1104 rxdp->read.hdr_addr = 0; in eth_igb_recv_scattered_pkts() 1780 rxdp = &(rxq->rx_ring[rxq->rx_tail]); in eth_igb_rx_queue_count() 1785 rxdp += IGB_RXQ_SCAN_INTERVAL; in eth_igb_rx_queue_count() [all …]
|
| /f-stack/dpdk/drivers/net/igc/ |
| H A D | igc_txrx.c | 373 rxdp = &rx_ring[rx_id]; in igc_recv_pkts() 377 rxd = *rxdp; in igc_recv_pkts() 446 rxdp->read.hdr_addr = 0; in igc_recv_pkts() 447 rxdp->read.pkt_addr = in igc_recv_pkts() 521 rxdp = &rx_ring[rx_id]; in igc_recv_scattered_pkts() 525 rxd = *rxdp; in igc_recv_scattered_pkts() 590 rxdp->read.hdr_addr = 0; in igc_recv_scattered_pkts() 591 rxdp->read.pkt_addr = in igc_recv_scattered_pkts() 746 rxdp += IGC_RXQ_SCAN_INTERVAL; in eth_igc_rx_queue_count() 753 rxdp += IGC_RXQ_SCAN_INTERVAL; in eth_igc_rx_queue_count() [all …]
|
| /f-stack/freebsd/contrib/dev/ath/ath_hal/ar9300/ |
| H A D | ar9300_recv.c | 44 ar9300_set_rx_dp(struct ath_hal *ah, u_int32_t rxdp, HAL_RX_QUEUE qtype) in ar9300_set_rx_dp() argument 49 OS_REG_WRITE(ah, AR_HP_RXDP, rxdp); in ar9300_set_rx_dp() 51 OS_REG_WRITE(ah, AR_LP_RXDP, rxdp); in ar9300_set_rx_dp()
|
| /f-stack/dpdk/drivers/net/txgbe/ |
| H A D | txgbe_rxtx.c | 1133 status = rxdp->qw1.lo.status; in txgbe_rx_scan_hw_ring() 1226 rxdp = &rxq->rx_ring[alloc_idx]; in txgbe_rx_alloc_bufs() 1399 rxdp = &rx_ring[rx_id]; in txgbe_recv_pkts() 1400 staterr = rxdp->qw1.lo.status; in txgbe_recv_pkts() 1403 rxd = *rxdp; in txgbe_recv_pkts() 1468 TXGBE_RXD_HDRADDR(rxdp, 0); in txgbe_recv_pkts() 1675 rxdp = &rx_ring[rx_id]; in txgbe_recv_pkts_lro() 1681 rxd = *rxdp; in txgbe_recv_pkts_lro() 1749 TXGBE_RXD_HDRADDR(rxdp, 0); in txgbe_recv_pkts_lro() 1750 TXGBE_RXD_PKTADDR(rxdp, dma); in txgbe_recv_pkts_lro() [all …]
|