| /dpdk/lib/stack/ |
| H A D | rte_stack_lf_c11.h | 39 struct rte_stack_lf_head old_head; in __rte_stack_lf_push_elems() local 42 old_head = list->head; in __rte_stack_lf_push_elems() 51 new_head.cnt = old_head.cnt + 1; in __rte_stack_lf_push_elems() 53 last->next = old_head.top; in __rte_stack_lf_push_elems() 60 (rte_int128_t *)&old_head, in __rte_stack_lf_push_elems() 101 old_head = list->head; in __rte_stack_lf_pop_elems() 115 rte_prefetch0(old_head.top); in __rte_stack_lf_pop_elems() 117 tmp = old_head.top; in __rte_stack_lf_pop_elems() 137 old_head = list->head; in __rte_stack_lf_pop_elems() 160 (rte_int128_t *)&old_head, in __rte_stack_lf_pop_elems() [all …]
|
| H A D | rte_stack_lf_generic.h | 39 struct rte_stack_lf_head old_head; in __rte_stack_lf_push_elems() local 42 old_head = list->head; in __rte_stack_lf_push_elems() 58 new_head.cnt = old_head.cnt + 1; in __rte_stack_lf_push_elems() 60 last->next = old_head.top; in __rte_stack_lf_push_elems() 65 (rte_int128_t *)&old_head, in __rte_stack_lf_push_elems() 96 old_head = list->head; in __rte_stack_lf_pop_elems() 110 rte_prefetch0(old_head.top); in __rte_stack_lf_pop_elems() 112 tmp = old_head.top; in __rte_stack_lf_pop_elems() 132 old_head = list->head; in __rte_stack_lf_pop_elems() 142 (rte_int128_t *)&old_head, in __rte_stack_lf_pop_elems() [all …]
|
| /dpdk/lib/ring/ |
| H A D | rte_ring_generic_pvt.h | 57 uint32_t *old_head, uint32_t *new_head, in __rte_ring_move_prod_head() argument 68 *old_head = r->prod.head; in __rte_ring_move_prod_head() 81 *free_entries = (capacity + r->cons.tail - *old_head); in __rte_ring_move_prod_head() 91 *new_head = *old_head + n; in __rte_ring_move_prod_head() 96 *old_head, *new_head); in __rte_ring_move_prod_head() 127 uint32_t *old_head, uint32_t *new_head, in __rte_ring_move_cons_head() argument 138 *old_head = r->cons.head; in __rte_ring_move_cons_head() 150 *entries = (r->prod.tail - *old_head); in __rte_ring_move_cons_head() 159 *new_head = *old_head + n; in __rte_ring_move_cons_head() 165 success = rte_atomic32_cmpset(&r->cons.head, *old_head, in __rte_ring_move_cons_head()
|
| H A D | rte_ring_c11_pvt.h | 56 uint32_t *old_head, uint32_t *new_head, in __rte_ring_move_prod_head() argument 64 *old_head = __atomic_load_n(&r->prod.head, __ATOMIC_RELAXED); in __rte_ring_move_prod_head() 83 *free_entries = (capacity + cons_tail - *old_head); in __rte_ring_move_prod_head() 93 *new_head = *old_head + n; in __rte_ring_move_prod_head() 99 old_head, *new_head, in __rte_ring_move_prod_head() 132 uint32_t *old_head, uint32_t *new_head, in __rte_ring_move_cons_head() argument 140 *old_head = __atomic_load_n(&r->cons.head, __ATOMIC_RELAXED); in __rte_ring_move_cons_head() 159 *entries = (prod_tail - *old_head); in __rte_ring_move_cons_head() 168 *new_head = *old_head + n; in __rte_ring_move_cons_head() 174 old_head, *new_head, in __rte_ring_move_cons_head()
|
| H A D | rte_ring_rts_elem_pvt.h | 71 enum rte_ring_queue_behavior behavior, uint32_t *old_head, in __rte_ring_rts_move_prod_head() argument 120 *old_head = oh.val.pos; in __rte_ring_rts_move_prod_head() 129 enum rte_ring_queue_behavior behavior, uint32_t *old_head, in __rte_ring_rts_move_cons_head() argument 175 *old_head = oh.val.pos; in __rte_ring_rts_move_cons_head()
|
| H A D | rte_ring_hts_elem_pvt.h | 56 enum rte_ring_queue_behavior behavior, uint32_t *old_head, in __rte_ring_hts_move_prod_head() argument 105 *old_head = op.pos.head; in __rte_ring_hts_move_prod_head() 114 enum rte_ring_queue_behavior behavior, uint32_t *old_head, in __rte_ring_hts_move_cons_head() argument 160 *old_head = op.pos.head; in __rte_ring_hts_move_cons_head()
|
| /dpdk/drivers/event/opdl/ |
| H A D | opdl_ring.c | 414 uint32_t *old_head, bool block, bool claim_func) in move_head_atomically() argument 445 *old_head + *num_entries, in move_head_atomically() 456 claim_mgr_add(disclaims, *old_head, *old_head + *num_entries); in move_head_atomically() 465 uint32_t old_head; in opdl_ring_input_multithread() local 471 copy_entries_in(t, old_head, entries, num_entries); in opdl_ring_input_multithread() 478 __atomic_store_n(&s->shared.tail, old_head + num_entries, in opdl_ring_input_multithread() 563 uint32_t old_head; in opdl_stage_claim_multithread() local 584 *seq = old_head; in opdl_stage_claim_multithread() 609 uint32_t old_head; in opdl_stage_claim_copy_multithread() local 614 copy_entries_out(s->t, old_head, entries, num_entries); in opdl_stage_claim_copy_multithread() [all …]
|
| /dpdk/drivers/common/qat/ |
| H A D | qat_qp.c | 497 uint32_t old_head, new_head; in rxq_free_desc() local 500 old_head = q->csr_head; in rxq_free_desc() 505 void *cur_desc = (uint8_t *)q->base_addr + old_head; in rxq_free_desc() 507 if (new_head < old_head) { in rxq_free_desc() 508 memset(cur_desc, ADF_RING_EMPTY_SIG_BYTE, max_head - old_head); in rxq_free_desc() 511 memset(cur_desc, ADF_RING_EMPTY_SIG_BYTE, new_head - old_head); in rxq_free_desc()
|
| /dpdk/drivers/crypto/qat/dev/ |
| H A D | qat_sym_pmd_gen1.c | 1058 uint32_t old_head, new_head; in qat_sym_dp_dequeue_done_gen1() local 1061 old_head = rx_queue->csr_head; in qat_sym_dp_dequeue_done_gen1() 1066 void *cur_desc = (uint8_t *)rx_queue->base_addr + old_head; in qat_sym_dp_dequeue_done_gen1() 1068 if (new_head < old_head) { in qat_sym_dp_dequeue_done_gen1() 1070 max_head - old_head); in qat_sym_dp_dequeue_done_gen1() 1075 old_head); in qat_sym_dp_dequeue_done_gen1()
|