Home
last modified time | relevance | path

Searched refs:RTE_ALIGN (Results 1 – 25 of 84) sorted by relevance

1234

/dpdk/drivers/crypto/octeontx/
H A Dotx_cryptodev_hw_access.c538 len = chunks * RTE_ALIGN(sizeof(struct command_chunk), 8); in otx_cpt_get_resource()
541 len += qlen * RTE_ALIGN(sizeof(cptvf->pqueue.rid_queue[0]), 8); in otx_cpt_get_resource()
544 len = RTE_ALIGN(len, pg_sz); in otx_cpt_get_resource()
547 len += chunks * RTE_ALIGN(chunk_size, 128); in otx_cpt_get_resource()
550 len = RTE_ALIGN(len, pg_sz); in otx_cpt_get_resource()
578 mem += qlen * RTE_ALIGN(sizeof(cptvf->pqueue.rid_queue[0]), 8); in otx_cpt_get_resource()
579 len -= qlen * RTE_ALIGN(sizeof(cptvf->pqueue.rid_queue[0]), 8); in otx_cpt_get_resource()
584 mem += RTE_ALIGN(used_len, pg_sz) - used_len; in otx_cpt_get_resource()
585 len -= RTE_ALIGN(used_len, pg_sz) - used_len; in otx_cpt_get_resource()
586 dma_addr += RTE_ALIGN(used_len, pg_sz) - used_len; in otx_cpt_get_resource()
[all …]
/dpdk/drivers/net/bnxt/
H A Dbnxt_ring.c124 stats_len = RTE_ALIGN(stats_len, 128); in bnxt_alloc_rings()
128 cp_vmem_len = RTE_ALIGN(cp_vmem_len, 128); in bnxt_alloc_rings()
132 nq_vmem_len = RTE_ALIGN(nq_vmem_len, 128); in bnxt_alloc_rings()
140 tx_vmem_len = RTE_ALIGN(tx_vmem_len, 128); in bnxt_alloc_rings()
146 rx_vmem_len = RTE_ALIGN(rx_vmem_len, 128); in bnxt_alloc_rings()
154 cp_ring_start = RTE_ALIGN(cp_ring_start, 4096); in bnxt_alloc_rings()
158 cp_ring_len = RTE_ALIGN(cp_ring_len, 128); in bnxt_alloc_rings()
160 nq_ring_start = RTE_ALIGN(nq_ring_start, 4096); in bnxt_alloc_rings()
169 tx_ring_len = RTE_ALIGN(tx_ring_len, 4096); in bnxt_alloc_rings()
176 rx_ring_len = RTE_ALIGN(rx_ring_len, 4096); in bnxt_alloc_rings()
[all …]
/dpdk/lib/graph/
H A Dgraph_populate.c26 sz = RTE_ALIGN(sz, val); in graph_fp_mem_calc_size()
32 sz = RTE_ALIGN(sz, RTE_CACHE_LINE_SIZE); in graph_fp_mem_calc_size()
36 sz = RTE_ALIGN(sz, RTE_CACHE_LINE_SIZE); in graph_fp_mem_calc_size()
97 off = RTE_ALIGN(off, RTE_CACHE_LINE_SIZE); in graph_nodes_populate()
/dpdk/drivers/net/mlx5/
H A Dmlx5_rxtx_vec.h44 RTE_ALIGN(offsetof(struct rte_mbuf, rearm_data), 16));
65 RTE_ALIGN(offsetof(struct mlx5_cqe, sop_drop_qpn), 8));
H A Dmlx5_txq.c801 temp = RTE_ALIGN(temp, MLX5_WSEG_SIZE) + in txq_set_params()
836 temp = RTE_ALIGN(temp, MLX5_WQE_SIZE); in txq_set_params()
899 temp = RTE_ALIGN(temp, MLX5_WQE_SIZE); in txq_set_params()
H A Dmlx5_txpp.c294 wqe_size = RTE_ALIGN(MLX5_TXPP_TEST_PKT_SIZE + in mlx5_txpp_fill_wqe_clock_queue()
309 wqe_size = RTE_ALIGN(wqe_size, MLX5_WQE_SIZE); in mlx5_txpp_fill_wqe_clock_queue()
417 wq->sq_size = RTE_ALIGN(MLX5_TXPP_TEST_PKT_SIZE + in mlx5_txpp_create_clock_queue()
H A Dmlx5_devx.c1317 RTE_ALIGN(txq_ctrl->max_tso_header, MLX5_WSEG_SIZE) : 0; in mlx5_txq_devx_obj_new()
1324 RTE_ALIGN(txq_data->inlen_send + in mlx5_txq_devx_obj_new()
1327 wqe_size = RTE_ALIGN(wqe_size, MLX5_WQE_SIZE) / MLX5_WQE_SIZE; in mlx5_txq_devx_obj_new()
/dpdk/drivers/net/fm10k/
H A Dfm10k.h227 ((uint64_t) RTE_ALIGN(((mb)->buf_iova + RTE_PKTMBUF_HEADROOM),\
300 if (RTE_ALIGN(addr, FM10K_RX_DATABUF_ALIGN) == addr) in fm10k_addr_alignment_valid()
304 if (RTE_ALIGN(addr, 8) == addr) { in fm10k_addr_alignment_valid()
/dpdk/drivers/common/mlx5/
H A Dmlx5_common_devx.c106 umem_dbrec = RTE_ALIGN(umem_size, MLX5_DBR_SIZE); in mlx5_devx_cq_create()
224 umem_dbrec = RTE_ALIGN(umem_size, MLX5_DBR_SIZE); in mlx5_devx_sq_create()
369 umem_dbrec = RTE_ALIGN(umem_size, MLX5_DBR_SIZE); in mlx5_devx_qp_create()
475 umem_dbrec = RTE_ALIGN(umem_size, MLX5_DBR_SIZE); in mlx5_devx_wq_init()
/dpdk/drivers/crypto/mlx5/
H A Dmlx5_crypto.c415 ds = RTE_ALIGN(ds, 4); in mlx5_crypto_wqe_set()
607 alloc_size = RTE_ALIGN(alloc_size, RTE_CACHE_LINE_SIZE); in mlx5_crypto_queue_pair_setup()
652 qp->mkey = (struct mlx5_devx_obj **)RTE_ALIGN((uintptr_t)(qp + 1), in mlx5_crypto_queue_pair_setup()
822 RTE_ALIGN(segs_num, 4) * in mlx5_crypto_get_wqe_sizes()
825 *umr_size = RTE_ALIGN(*umr_size, MLX5_SEND_WQE_BB); in mlx5_crypto_get_wqe_sizes()
829 RTE_ALIGN(segs_num - 2, 4)); in mlx5_crypto_get_wqe_sizes()
831 *rdmaw_size = RTE_ALIGN(*rdmaw_size, MLX5_SEND_WQE_BB); in mlx5_crypto_get_wqe_sizes()
/dpdk/drivers/regex/cn9k/
H A Dcn9k_regexdev.c89 len = iq_len * RTE_ALIGN(sizeof(struct roc_ree_rid), 8); in ree_qp_create()
92 len = RTE_ALIGN(len, pg_sz); in ree_qp_create()
98 len = RTE_ALIGN(len, pg_sz); in ree_qp_create()
122 used_len = iq_len * RTE_ALIGN(sizeof(struct roc_ree_rid), 8); in ree_qp_create()
123 used_len = RTE_ALIGN(used_len, pg_sz); in ree_qp_create()
/dpdk/drivers/net/sfc/
H A Dsfc_ef10.h108 SFC_ASSERT(RTE_ALIGN(added, SFC_EF10_RX_WPTR_ALIGN) == added); in sfc_ef10_rx_qpush()
/dpdk/app/test-pmd/
H A Dcmd_flex_item.c284 base_size = RTE_ALIGN(sizeof(*conf), sizeof(uintptr_t)); in flex_item_init()
285 samples_size = RTE_ALIGN(FLEX_ITEM_MAX_SAMPLES_NUM * in flex_item_init()
288 links_size = RTE_ALIGN(FLEX_ITEM_MAX_LINKS_NUM * in flex_item_init()
/dpdk/lib/pcapng/
H A Drte_pcapng.c63 return RTE_ALIGN(sizeof(struct pcapng_option) + len, in pcapng_optlen()
337 + RTE_ALIGN(length, sizeof(uint32_t)) in rte_pcapng_mbuf_size()
456 padding = RTE_ALIGN(data_len, sizeof(uint32_t)) - data_len; in rte_pcapng_copy()
/dpdk/lib/mbuf/
H A Drte_mbuf.c84 RTE_ASSERT(RTE_ALIGN(priv_size, RTE_MBUF_PRIV_ALIGN) == priv_size); in rte_pktmbuf_init()
175 RTE_ASSERT(RTE_ALIGN(priv_size, RTE_MBUF_PRIV_ALIGN) == priv_size); in __rte_pktmbuf_init_extmem()
229 if (RTE_ALIGN(priv_size, RTE_MBUF_PRIV_ALIGN) != priv_size) { in rte_pktmbuf_pool_create_by_ops()
295 if (RTE_ALIGN(priv_size, RTE_MBUF_PRIV_ALIGN) != priv_size) { in rte_pktmbuf_pool_create_extbuf()
/dpdk/lib/acl/
H A Dacl_gen.c439 indices->match_start = RTE_ALIGN(indices->match_start, in acl_calc_counts_indices()
467 total_size = RTE_ALIGN(data_index_sz, RTE_CACHE_LINE_SIZE) + in rte_acl_gen()
492 RTE_ALIGN(data_index_sz, RTE_CACHE_LINE_SIZE)); in rte_acl_gen()
/dpdk/drivers/net/ionic/
H A Dionic_lif.c615 total_size = RTE_ALIGN(q_size, rte_mem_page_size()) + in ionic_qcq_alloc()
616 RTE_ALIGN(cq_size, rte_mem_page_size()); in ionic_qcq_alloc()
625 total_size += RTE_ALIGN(sg_size, rte_mem_page_size()); in ionic_qcq_alloc()
676 cq_base = (void *)RTE_ALIGN((uintptr_t)q_base + q_size, in ionic_qcq_alloc()
678 cq_base_pa = RTE_ALIGN(q_base_pa + q_size, in ionic_qcq_alloc()
682 sg_base = (void *)RTE_ALIGN((uintptr_t)cq_base + cq_size, in ionic_qcq_alloc()
684 sg_base_pa = RTE_ALIGN(cq_base_pa + cq_size, in ionic_qcq_alloc()
1008 lif->info_sz = RTE_ALIGN(sizeof(*lif->info), rte_mem_page_size()); in ionic_lif_alloc()
/dpdk/drivers/bus/vmbus/
H A Dvmbus_channel.c112 pad_pktlen = RTE_ALIGN(pktlen, sizeof(uint64_t)); in rte_vmbus_chan_send()
156 pad_pktlen = RTE_ALIGN(pktlen, sizeof(uint64_t)); in rte_vmbus_chan_send_sglist()
/dpdk/lib/ethdev/
H A Drte_eth_ctrl.h435 (RTE_ALIGN(RTE_ETH_FLOW_MAX, UINT64_BIT)/UINT64_BIT)
/dpdk/drivers/net/hinic/base/
H A Dhinic_compat.h79 #define ALIGN(x, a) RTE_ALIGN(x, a)
/dpdk/drivers/net/mlx4/
H A Dmlx4_rxtx.c427 tinfo->wqe_tso_seg_size = RTE_ALIGN(sizeof(struct mlx4_wqe_lso_seg) + in mlx4_tx_burst_tso_get_params()
434 RTE_ALIGN((uint32_t)(tinfo->fence_size << MLX4_SEG_SHIFT), in mlx4_tx_burst_tso_get_params()
753 wqe_size = RTE_ALIGN((uint32_t)(ctrl->fence_size << MLX4_SEG_SHIFT), in mlx4_tx_burst_segs()
/dpdk/drivers/net/atlantic/hw_atl/
H A Dhw_atl_utils_fw2x.c720 RTE_ALIGN(sizeof(*req) / sizeof(u32), sizeof(u32))); in aq_fw2x_send_macsec_request()
742 RTE_ALIGN(sizeof(*response) / sizeof(u32), sizeof(u32))); in aq_fw2x_send_macsec_request()
/dpdk/drivers/net/netvsc/
H A Dhn_rxtx.c99 #define HN_RNDIS_PKT_ALIGNED RTE_ALIGN(HN_RNDIS_PKT_LEN, RTE_CACHE_LINE_SIZE)
103 RTE_ALIGN(RTE_ETHER_MIN_LEN + HN_RNDIS_PKT_LEN, align)
1240 padding = RTE_ALIGN(olen, txq->agg_align) - olen; in hn_try_txagg()
1530 RTE_ALIGN(pkt_size, txq->agg_align) > txq->agg_szleft) { in hn_xmit_pkts()
/dpdk/drivers/common/cnxk/
H A Droc_platform.h53 #define PLT_ALIGN RTE_ALIGN
/dpdk/lib/table/
H A Drte_table_lpm_ipv6.c85 entry_size = RTE_ALIGN(entry_size, sizeof(uint64_t)); in rte_table_lpm_ipv6_create()

1234