Home
last modified time | relevance | path

Searched refs:read (Results 1 – 25 of 221) sorted by relevance

123456789

/dpdk/drivers/net/avp/
H A Drte_avp_fifo.h41 fifo->read = 0; in avp_fifo_init()
55 unsigned int fifo_read = fifo->read; in avp_fifo_put()
78 unsigned int new_read = fifo->read; in avp_fifo_get()
92 fifo->read = new_read; in avp_fifo_get()
102 return (fifo->len + fifo->write - fifo->read) & (fifo->len - 1); in avp_fifo_count()
111 return (fifo->read - fifo->write - 1) & (fifo->len - 1); in avp_fifo_free_count()
/dpdk/drivers/raw/ioat/
H A Drte_ioat_rawdev_fns.h120 unsigned short read = ioat->next_read; in __ioat_burst_capacity() local
122 unsigned short space = size - (write - read); in __ioat_burst_capacity()
133 unsigned short read = ioat->next_read; in __ioat_write_desc() local
136 unsigned short space = mask + read - write; in __ioat_write_desc()
245 unsigned short read = ioat->next_read; in __ioat_completed_ops() local
251 count = (end_read - (read & mask)) & mask; in __ioat_completed_ops()
259 read += count; in __ioat_completed_ops()
266 for (; i < count - 1; i += 2, read += 2) { in __ioat_completed_ops()
275 for (; i < count; i++, read++) { in __ioat_completed_ops()
276 uintptr_t *hdls = (uintptr_t *)&ioat->hdls[read & mask]; in __ioat_completed_ops()
[all …]
/dpdk/lib/kni/
H A Drte_kni_fifo.h48 fifo->read = 0; in kni_fifo_init()
62 unsigned fifo_read = __KNI_LOAD_ACQUIRE(&fifo->read); in kni_fifo_put()
83 unsigned new_read = fifo->read; in kni_fifo_get()
93 __KNI_STORE_RELEASE(&fifo->read, new_read); in kni_fifo_get()
104 unsigned fifo_read = __KNI_LOAD_ACQUIRE(&fifo->read); in kni_fifo_count()
115 uint32_t fifo_read = __KNI_LOAD_ACQUIRE(&fifo->read); in kni_fifo_free_count()
H A Drte_kni_common.h67 unsigned read; /**< Next position to be read */ member
70 volatile unsigned read; /**< Next position to be read */
/dpdk/drivers/event/sw/
H A Devent_ring.h74 const uint32_t read = r->read_idx; in rob_ring_enqueue() local
76 const uint32_t space = read + size - write; in rob_ring_enqueue()
88 uint32_t read = r->read_idx; in rob_ring_dequeue() local
90 const uint32_t items = write - read; in rob_ring_dequeue()
93 *re = r->ring[read & mask]; in rob_ring_dequeue()
/dpdk/kernel/linux/kni/
H A Dkni_fifo.h27 uint32_t fifo_read = smp_load_acquire(&fifo->read); in kni_fifo_put()
50 uint32_t new_read = fifo->read; in kni_fifo_get()
60 smp_store_release(&fifo->read, new_read); in kni_fifo_get()
72 uint32_t fifo_read = smp_load_acquire(&fifo->read); in kni_fifo_count()
83 uint32_t fifo_read = smp_load_acquire(&fifo->read); in kni_fifo_free_count()
/dpdk/drivers/net/e1000/base/
H A De1000_nvm.c23 nvm->ops.read = e1000_null_read_nvm; in e1000_init_nvm_ops_generic()
822 ret_val = hw->nvm.ops.read(hw, pba_ptr, 1, &length); in e1000_read_pba_string_generic()
896 ret_val = hw->nvm.ops.read(hw, pba_ptr, 1, &length); in e1000_read_pba_length_generic()
1184 ret_val = hw->nvm.ops.read(hw, i, 1, &nvm_data); in e1000_validate_nvm_checksum_generic()
1217 ret_val = hw->nvm.ops.read(hw, i, 1, &nvm_data); in e1000_update_nvm_checksum_generic()
1283 hw->nvm.ops.read(hw, NVM_VERSION, 1, &fw_version); in e1000_get_fw_version()
1305 hw->nvm.ops.read(hw, (NVM_COMB_VER_OFF + comb_offset in e1000_get_fw_version()
1307 hw->nvm.ops.read(hw, (NVM_COMB_VER_OFF + comb_offset), in e1000_get_fw_version()
1330 hw->nvm.ops.read(hw, NVM_VERSION, 1, &fw_version); in e1000_get_fw_version()
1352 hw->nvm.ops.read(hw, NVM_ETRACK_WORD, 1, &eeprom_verl); in e1000_get_fw_version()
[all …]
H A De1000_mbx.c59 if (mbx->ops.read) in e1000_read_mbx()
60 ret_val = mbx->ops.read(hw, msg, size, mbx_id); in e1000_read_mbx()
229 if (!mbx->ops.read) in e1000_read_posted_mbx()
236 ret_val = mbx->ops.read(hw, msg, size, mbx_id); in e1000_read_posted_mbx()
282 mbx->ops.read = e1000_null_mbx_transact; in e1000_init_mbx_ops_generic()
523 mbx->ops.read = e1000_read_mbx_vf; in e1000_init_mbx_params_vf()
749 mbx->ops.read = e1000_read_mbx_pf; in e1000_init_mbx_params_pf()
H A De1000_base.h21 } read; member
89 } read; member
H A De1000_vf.h58 } read; member
95 } read; member
200 s32 (*read)(struct e1000_hw *, u32 *, u16, u16); member
/dpdk/drivers/net/igc/base/
H A Digc_nvm.c23 nvm->ops.read = igc_null_read_nvm; in igc_init_nvm_ops_generic()
813 ret_val = hw->nvm.ops.read(hw, pba_ptr, 1, &length); in igc_read_pba_string_generic()
887 ret_val = hw->nvm.ops.read(hw, pba_ptr, 1, &length); in igc_read_pba_length_generic()
1175 ret_val = hw->nvm.ops.read(hw, i, 1, &nvm_data); in igc_validate_nvm_checksum_generic()
1208 ret_val = hw->nvm.ops.read(hw, i, 1, &nvm_data); in igc_update_nvm_checksum_generic()
1264 hw->nvm.ops.read(hw, NVM_ETRACK_HIWORD, 1, &etrack_test); in igc_get_fw_version()
1266 hw->nvm.ops.read(hw, NVM_COMB_VER_PTR, 1, &comb_offset); in igc_get_fw_version()
1270 hw->nvm.ops.read(hw, NVM_COMB_VER_OFF + comb_offset, in igc_get_fw_version()
1293 hw->nvm.ops.read(hw, NVM_VERSION, 1, &fw_version); in igc_get_fw_version()
1314 hw->nvm.ops.read(hw, NVM_ETRACK_WORD, 1, &eeprom_verl); in igc_get_fw_version()
[all …]
H A Digc_base.h21 } read; member
89 } read; member
/dpdk/drivers/net/i40e/
H A Di40e_rxtx_common_avx.h39 _mm_store_si128((__m128i *)&rxdp[i].read, in i40e_rxq_rearm_common()
75 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr0); in i40e_rxq_rearm_common()
76 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr1); in i40e_rxq_rearm_common()
147 _mm512_store_si512((__m512i *)&rxdp->read, dma_addr0_3); in i40e_rxq_rearm_common()
148 _mm512_store_si512((__m512i *)&(rxdp + 4)->read, dma_addr4_7); in i40e_rxq_rearm_common()
193 _mm256_store_si256((__m256i *)&rxdp->read, dma_addr0_1); in i40e_rxq_rearm_common()
194 _mm256_store_si256((__m256i *)&(rxdp + 2)->read, dma_addr2_3); in i40e_rxq_rearm_common()
/dpdk/drivers/net/ice/
H A Dice_rxtx_common_avx.h36 _mm_store_si128((__m128i *)&rxdp[i].read, in ice_rxq_rearm_common()
72 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr0); in ice_rxq_rearm_common()
73 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr1); in ice_rxq_rearm_common()
144 _mm512_store_si512((__m512i *)&rxdp->read, dma_addr0_3); in ice_rxq_rearm_common()
145 _mm512_store_si512((__m512i *)&(rxdp + 4)->read, dma_addr4_7); in ice_rxq_rearm_common()
192 _mm256_store_si256((__m256i *)&rxdp->read, dma_addr0_1); in ice_rxq_rearm_common()
193 _mm256_store_si256((__m256i *)&(rxdp + 2)->read, dma_addr2_3); in ice_rxq_rearm_common()
/dpdk/usertools/
H A Dcpu_layout.py11 max_cpus = int(fd.read())
18 core = int(fd.read())
21 socket = int(fd.read())
/dpdk/drivers/dma/ioat/
H A Dioat_dmadev.c252 const unsigned short read = ioat->next_read; in __write_desc() local
424 const unsigned short read = ioat->next_read; in ioat_completed() local
436 count = (last_completed + 1 - read) & mask; in ioat_completed()
443 ioat->next_read = read + count; in ioat_completed()
449 ioat->next_read = read + count + 1; in ioat_completed()
474 const unsigned short read = ioat->next_read; in ioat_completed_status() local
480 count = (last_completed + 1 - read) & mask; in ioat_completed_status()
490 ioat->next_read = read + count; in ioat_completed_status()
495 ioat->next_read = read + count; in ioat_completed_status()
525 unsigned short read = ioat->next_read; in ioat_burst_capacity() local
[all …]
/dpdk/lib/eal/unix/
H A Deal_unix_thread.c27 n = read(w2m, &c, 1); in eal_thread_wake_worker()
43 n = read(m2w, &c, 1); in eal_thread_wait_command()
/dpdk/drivers/net/iavf/
H A Diavf_rxtx.h87 } read; member
113 } read; member
708 rxq->queue_id, rx_id, rx_desc->read.pkt_addr, in iavf_dump_rx_descriptor()
709 rx_desc->read.hdr_addr); in iavf_dump_rx_descriptor()
715 rx_id, rx_desc->read.pkt_addr, rx_desc->read.hdr_addr, in iavf_dump_rx_descriptor()
716 rx_desc->read.rsvd1, rx_desc->read.rsvd2); in iavf_dump_rx_descriptor()
H A Diavf_rxtx_vec_common.h403 _mm_store_si128((__m128i *)&rxdp[i].read, in iavf_rxq_rearm_common()
439 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr0); in iavf_rxq_rearm_common()
440 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr1); in iavf_rxq_rearm_common()
511 _mm512_store_si512((__m512i *)&rxdp->read, dma_addr0_3); in iavf_rxq_rearm_common()
512 _mm512_store_si512((__m512i *)&(rxdp + 4)->read, dma_addr4_7); in iavf_rxq_rearm_common()
559 _mm256_store_si256((__m256i *)&rxdp->read, dma_addr0_1); in iavf_rxq_rearm_common()
560 _mm256_store_si256((__m256i *)&(rxdp + 2)->read, dma_addr2_3); in iavf_rxq_rearm_common()
/dpdk/drivers/net/ixgbe/base/
H A Dixgbe_common.c51 eeprom->ops.read = ixgbe_read_eerd_generic; in ixgbe_init_ops_generic()
54 eeprom->ops.read = ixgbe_read_eeprom_bit_bang_generic; in ixgbe_init_ops_generic()
2185 if (hw->eeprom.ops.read(hw, i, &word)) { in ixgbe_calc_eeprom_checksum_generic()
2194 if (hw->eeprom.ops.read(hw, i, &pointer)) { in ixgbe_calc_eeprom_checksum_generic()
2212 if (hw->eeprom.ops.read(hw, j, &word)) { in ixgbe_calc_eeprom_checksum_generic()
2246 status = hw->eeprom.ops.read(hw, 0, &checksum); in ixgbe_validate_eeprom_checksum_generic()
2292 status = hw->eeprom.ops.read(hw, 0, &checksum); in ixgbe_update_eeprom_checksum_generic()
4265 if (hw->eeprom.ops.read(hw, offset, &caps)) in ixgbe_get_wwn_prefix_generic()
4309 status = hw->eeprom.ops.read(hw, offset, &caps); in ixgbe_get_fcoe_boot_status_generic()
4939 if (hw->eeprom.ops.read(hw, offset, &ets_cfg)) in ixgbe_init_thermal_sensor_thresh_generic()
[all …]
H A Dixgbe_mbx.c28 if (mbx->ops.read) in ixgbe_read_mbx()
29 ret_val = mbx->ops.read(hw, msg, size, mbx_id); in ixgbe_read_mbx()
201 if (!mbx->ops.read) in ixgbe_read_posted_mbx()
208 ret_val = mbx->ops.read(hw, msg, size, mbx_id); in ixgbe_read_posted_mbx()
482 mbx->ops.read = ixgbe_read_mbx_vf; in ixgbe_init_mbx_params_vf()
727 mbx->ops.read = ixgbe_read_mbx_pf; in ixgbe_init_mbx_params_pf()
/dpdk/doc/guides/rawdevs/
H A Dntb.rst13 allocation for the peer to access and read/write allocated memory from peer.
93 Since read/write remote system's memory are through PCI bus, remote read
95 based on ntb ring should avoid remote read. The ring layout for ntb is
142 So in this way, only remote write happens and remote read can be avoid
/dpdk/drivers/net/nfp/nfpcore/
H A Dnfp_cpp_pcie_ops.c396 int read; member
420 priv->width.read = PUSH_WIDTH(pp); in nfp6000_area_init()
423 if (priv->width.read > 0 && in nfp6000_area_init()
424 priv->width.write > 0 && priv->width.read != priv->width.write) in nfp6000_area_init()
427 if (priv->width.read > 0) in nfp6000_area_init()
428 priv->width.bar = priv->width.read; in nfp6000_area_init()
524 width = priv->width.read; in nfp6000_area_read()
/dpdk/drivers/raw/ifpga/base/
H A Dopae_i2c.c292 bool read, finish = false; in altera_handle_i2c_status() local
295 read = (dev->msg->flags & I2C_M_RD) != 0; in altera_handle_i2c_status()
307 } else if (read && (status & ALTERA_I2C_ISR_RXOF)) { in altera_handle_i2c_status()
314 } else if (read && (status & ALTERA_I2C_ISR_RXRDY)) { in altera_handle_i2c_status()
319 } else if (!read && (status & ALTERA_I2C_ISR_TXRDY)) { in altera_handle_i2c_status()
/dpdk/doc/guides/sample_app_ug/
H A Dl2_forward_job_stats.rst213 * stats_read_pending and lock are used during job stats read phase.
270 …lled to mark loop end - no other jobs are ready to execute. By this time stats are ready to be read
271 and if stats_read_pending is set, loop breaks allowing stats to be read.
273 …dle stats counter). Its only purpose is monitoring if any job is ready or stats job read is pending
279 The main task of l2fwd_fwd_job() function is to read ingress packets from the RX queue of particula…
288 Packets are read in a burst of size MAX_PKT_BURST.
294 After first read second try is issued.
299 :end-before: >8 End of read second try.
302 This second read is important to give job stats library a feedback how many packets was processed.
310 To maximize performance exactly MAX_PKT_BURST is expected (the target value) to be read for each l2…

123456789