Home
last modified time | relevance | path

Searched refs:RTE_PTR_ADD (Results 1 – 25 of 77) sorted by relevance

1234

/f-stack/dpdk/drivers/baseband/fpga_5gnr_fec/
H A Dfpga_5gnr_fec.h299 void *reg_addr = RTE_PTR_ADD(mmio_base, offset); in fpga_reg_write_8()
307 void *reg_addr = RTE_PTR_ADD(mmio_base, offset); in fpga_reg_write_16()
315 void *reg_addr = RTE_PTR_ADD(mmio_base, offset); in fpga_reg_write_32()
323 void *reg_addr = RTE_PTR_ADD(mmio_base, offset); in fpga_reg_write_64()
353 void *reg_addr = RTE_PTR_ADD(mmio_base, offset); in fpga_reg_read_32()
364 void *reg_addr = RTE_PTR_ADD(mmio_base, offset); in fpga_reg_read_16()
375 void *reg_addr = RTE_PTR_ADD(mmio_base, offset); in fpga_reg_read_8()
383 void *reg_addr = RTE_PTR_ADD(mmio_base, offset); in fpga_reg_read_64()
/f-stack/dpdk/lib/librte_eal/common/
H A Dmalloc_elem.c50 data_start = RTE_PTR_ADD(elem, MALLOC_ELEM_HEADER_LEN); in malloc_elem_find_max_iova_contig()
74 page_end = RTE_PTR_ADD(cur_page, page_sz); in malloc_elem_find_max_iova_contig()
83 cur_page = RTE_PTR_ADD(cur_page, page_sz); in malloc_elem_find_max_iova_contig()
86 page_end = RTE_PTR_ADD(cur_page, page_sz); in malloc_elem_find_max_iova_contig()
315 elem = RTE_PTR_ADD(elem, elem->pad); in split_elem()
349 return elem->next == RTE_PTR_ADD(elem, elem->size) && in next_elem_is_adjacent()
361 return elem == RTE_PTR_ADD(elem->prev, elem->prev->size) && in prev_elem_is_adjacent()
447 RTE_PTR_ADD(new_elem, size + MALLOC_ELEM_OVERHEAD); in malloc_elem_alloc()
500 struct malloc_elem *inner = RTE_PTR_ADD(elem1, elem1->pad); in join_elem()
569 ptr = RTE_PTR_ADD(elem, MALLOC_ELEM_HEADER_LEN); in malloc_elem_free()
[all …]
H A Deal_common_trace_ctf.c44 memcpy(RTE_PTR_ADD(ptr, count), str, rc); in meta_copy()
316 str = RTE_PTR_ADD(meta, trace->ctf_meta_offset_freq); in meta_fix_freq()
340 str = RTE_PTR_ADD(meta, trace->ctf_meta_offset_freq_off_s); in meta_fix_freq_offset()
343 str = RTE_PTR_ADD(meta, trace->ctf_meta_offset_freq_off); in meta_fix_freq_offset()
H A Deal_common_memory.c109 next_baseaddr = RTE_PTR_ADD(next_baseaddr, page_sz); in eal_get_virtual_area()
147 next_baseaddr = RTE_PTR_ADD(aligned_addr, *size); in eal_get_virtual_area()
166 map_end = RTE_PTR_ADD(mapped_addr, (size_t)map_sz); in eal_get_virtual_area()
167 aligned_end = RTE_PTR_ADD(aligned_addr, *size); in eal_get_virtual_area()
279 addr = RTE_PTR_ADD(addr, page_sz); in eal_memseg_list_populate()
295 end = RTE_PTR_ADD(start, msl->len); in virt2memseg()
318 end = RTE_PTR_ADD(start, msl->len); in virt2memseg_list()
345 vi->virt = RTE_PTR_ADD(ms->addr, offset); in find_virt()
358 vi->virt = RTE_PTR_ADD(ms->addr, offset); in find_virt_legacy()
H A Deal_common_memalloc.c88 end = RTE_PTR_ADD(start, len); in eal_memalloc_is_contig()
104 aligned_start = RTE_PTR_ADD(aligned_start, pgsz); in eal_memalloc_is_contig()
110 aligned_start = RTE_PTR_ADD(aligned_start, pgsz); in eal_memalloc_is_contig()
H A Dmalloc_heap.c207 void *data_start = RTE_PTR_ADD(elem, in find_biggest_element()
209 void *data_end = RTE_PTR_ADD(elem, elem->size - in find_biggest_element()
879 end = RTE_PTR_ADD(elem, len); in malloc_heap_free()
898 aligned_start = RTE_PTR_ADD(tmp->addr, tmp->len); in malloc_heap_free()
926 aligned_start = RTE_PTR_ADD(aligned_start, page_sz); in malloc_heap_free()
1146 ms->addr = RTE_PTR_ADD(va_addr, i * page_sz); in malloc_heap_create_external_seg()
H A Deal_common_memzone.c344 mz_end = RTE_PTR_ADD(cur_addr, mz->len); in dump_memzone()
357 cur_addr = RTE_PTR_ADD(cur_addr, page_sz); in dump_memzone()
H A Dmalloc_elem.h64 #define MALLOC_ELEM_TRAILER(elem) (*((uint64_t*)RTE_PTR_ADD(elem, \
/f-stack/dpdk/lib/librte_eal/include/
H A Drte_trace_point.h323 void *mem = RTE_PTR_ADD(&trace->mem[0], offset); in __rte_trace_mem_get()
343 return RTE_PTR_ADD(mem, __RTE_TRACE_EVENT_HEADER_SZ); in __rte_trace_point_emit_ev_header()
366 mem = RTE_PTR_ADD(mem, sizeof(in)); \
374 mem = RTE_PTR_ADD(mem, __RTE_TRACE_EMIT_STRING_LEN_MAX); \
/f-stack/dpdk/buildtools/pmdinfogen/
H A Dpmdinfogen.c95 return RTE_PTR_ADD(info->hdr, in get_sym_value()
166 sechdrs = RTE_PTR_ADD(hdr, hdr->e_shoff); in parse_elf()
232 info->symtab_start = RTE_PTR_ADD(hdr, in parse_elf()
234 info->symtab_stop = RTE_PTR_ADD(hdr, in parse_elf()
237 info->strtab = RTE_PTR_ADD(hdr, in parse_elf()
244 info->symtab_shndx_start = RTE_PTR_ADD(hdr, in parse_elf()
246 info->symtab_shndx_stop = RTE_PTR_ADD(hdr, in parse_elf()
/f-stack/dpdk/drivers/raw/ioat/
H A Didxd_pci.c50 return RTE_PTR_ADD(pci->wq_regs_base, in idxd_get_wq_cfg()
145 pci->grp_regs = RTE_PTR_ADD(pci->regs, grp_offset * 0x100); in init_pci_device()
147 pci->wq_regs_base = RTE_PTR_ADD(pci->regs, wq_offset * 0x100); in init_pci_device()
264 idxd.public.portal = RTE_PTR_ADD(idxd.u.pci->portals, in idxd_rawdev_probe_pci()
/f-stack/dpdk/drivers/common/cpt/
H A Dcpt_ucode_asym.h243 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t)); in cpt_modex_prep()
256 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info)); in cpt_modex_prep()
314 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t)); in cpt_rsa_prep()
319 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info)); in cpt_rsa_prep()
399 op = RTE_PTR_ADD(req->op, sizeof(uintptr_t)); in cpt_rsa_crt_prep()
404 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info)); in cpt_rsa_crt_prep()
636 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info)); in cpt_ecdsa_sign_prep()
737 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info)); in cpt_ecdsa_verify_prep()
842 dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info)); in cpt_ecpm_prep()
H A Dcpt_fpm_tables.c1115 data = RTE_PTR_ADD(data, fpm_table_scalar[i].len); in cpt_fpm_init()
/f-stack/dpdk/lib/librte_graph/
H A Dgraph_stats.c161 cluster = RTE_PTR_ADD(cluster, stats->cluster_node_size); in stats_mem_populate()
170 cluster = RTE_PTR_ADD(stats, stats->sz), in stats_mem_populate()
380 cluster = RTE_PTR_ADD(cluster, stat->cluster_node_size); in rte_graph_cluster_stats_get()
404 cluster = RTE_PTR_ADD(cluster, stat->cluster_node_size); in rte_graph_cluster_stats_reset()
H A Drte_graph.h345 node = RTE_PTR_ADD(graph, off); \
347 off = node->next, node = RTE_PTR_ADD(graph, off), count++)
H A Dgraph_populate.c57 graph->cir_start = RTE_PTR_ADD(graph, _graph->cir_start); in graph_header_popluate()
76 struct rte_node *node = RTE_PTR_ADD(graph, off); in graph_nodes_populate()
/f-stack/dpdk/drivers/net/ark/
H A Dark_ethdev_rx.c198 queue->udm = RTE_PTR_ADD(ark->udm.v, qidx * ARK_UDM_QOFFSET); in eth_ark_dev_rx_queue_setup()
199 queue->mpu = RTE_PTR_ADD(ark->mpurx.v, qidx * ARK_MPU_QOFFSET); in eth_ark_dev_rx_queue_setup()
270 meta = RTE_PTR_ADD(mbuf->buf_addr, ARK_RX_META_OFFSET); in eth_ark_recv_pkts()
476 *(uint16_t *)RTE_PTR_ADD(mbuf_init->buf_addr, 4) = in eth_ark_rx_seed_mbufs()
674 uint8_t *dp = RTE_PTR_ADD(mbuf->buf_addr, i); in dump_mbuf_data()
H A Dark_ethdev.c504 mpu = RTE_PTR_ADD(mpu, ARK_MPU_QOFFSET); in ark_config_device()
524 mpu = RTE_PTR_ADD(mpu, ARK_MPU_QOFFSET); in ark_config_device()
695 mpu = RTE_PTR_ADD(mpu, ARK_MPU_QOFFSET); in eth_ark_dev_stop()
714 mpu = RTE_PTR_ADD(mpu, ARK_MPU_QOFFSET); in eth_ark_dev_stop()
772 struct ark_mpu_t *tx_mpu = RTE_PTR_ADD(ark->bar0, ARK_MPU_TX_BASE); in eth_ark_dev_info_get()
773 struct ark_mpu_t *rx_mpu = RTE_PTR_ADD(ark->bar0, ARK_MPU_RX_BASE); in eth_ark_dev_info_get()
/f-stack/dpdk/drivers/bus/vmbus/linux/
H A Dvmbus_uio.c112 void *end_va = RTE_PTR_ADD(msl->base_va, sz); in find_max_end_va()
166 vmbus_map_addr = RTE_PTR_ADD(mapaddr, size); in vmbus_uio_map_resource_by_index()
245 vmbus_map_addr = RTE_PTR_ADD(mapaddr, file_size); in vmbus_uio_map_subchan()
/f-stack/dpdk/drivers/event/dsw/
H A Ddsw_sort.h13 RTE_PTR_ADD(_ary, (_idx) * (_elem_size))
/f-stack/dpdk/drivers/net/avp/
H A Davp_ethdev.c353 return RTE_PTR_ADD(RTE_PTR_SUB(host_mbuf_address, in avp_dev_translate_buffer()
384 addr = RTE_PTR_ADD(addr, (uintptr_t)offset); in avp_dev_translate_address()
679 RTE_PTR_ADD(registers, in avp_dev_interrupt_handler()
685 RTE_PTR_ADD(registers, in avp_dev_interrupt_handler()
703 RTE_PTR_ADD(registers, in avp_dev_interrupt_handler()
802 value = AVP_READ32(RTE_PTR_ADD(registers, in avp_dev_migration_pending()
807 RTE_PTR_ADD(registers, in avp_dev_migration_pending()
1338 rte_memcpy(RTE_PTR_ADD(rte_pktmbuf_mtod(m, void *), in avp_dev_copy_from_buffers()
1340 RTE_PTR_ADD(pkt_data, src_offset), in avp_dev_copy_from_buffers()
1652 rte_memcpy(RTE_PTR_ADD(pkt_data, pkt_buf->data_len), in avp_dev_copy_to_buffers()
[all …]
/f-stack/dpdk/lib/librte_latencystats/
H A Drte_latencystats.c92 stats_ptr = RTE_PTR_ADD(glob_stats, in rte_latencystats_update()
114 stats_ptr = RTE_PTR_ADD(glob_stats, in rte_latencystats_fill_values()
/f-stack/dpdk/drivers/net/mlx5/
H A Dmlx5_txpp.c215 qs = RTE_PTR_ADD(cs, sizeof(struct mlx5_wqe_cseg)); in mlx5_txpp_fill_wqe_rearm_queue()
227 qs = RTE_PTR_ADD(cs, sizeof(struct mlx5_wqe_cseg)); in mlx5_txpp_fill_wqe_rearm_queue()
291 wq->cq_dbrec = RTE_PTR_ADD(wq->cq_buf, umem_dbrec); in mlx5_txpp_create_rearm_queue()
345 wq->sq_dbrec = RTE_PTR_ADD(wq->sq_buf, umem_dbrec + in mlx5_txpp_create_rearm_queue()
531 wq->cq_dbrec = RTE_PTR_ADD(wq->cq_buf, umem_dbrec); in mlx5_txpp_create_clock_queue()
598 wq->sq_dbrec = RTE_PTR_ADD(wq->sq_buf, umem_dbrec + in mlx5_txpp_create_clock_queue()
630 uint32_t *addr = RTE_PTR_ADD(base_addr, MLX5_CQ_DOORBELL); in mlx5_txpp_cq_arm()
/f-stack/dpdk/app/test/
H A Dtest_common.c36 if ((uintptr_t)RTE_PTR_ADD(SMALLER, PTR_DIFF) != BIGGER) in test_macros()
37 FAIL_MACRO(RTE_PTR_ADD); in test_macros()
/f-stack/freebsd/contrib/dpdk_rte_lpm/
H A Drte_common.h209 #define RTE_PTR_ADD(ptr, x) ((void*)((uintptr_t)(ptr) + (x))) macro
257 RTE_PTR_ALIGN_FLOOR((typeof(ptr))RTE_PTR_ADD(ptr, (align) - 1), align)

1234