Home
last modified time | relevance | path

Searched refs:inflights (Results 1 – 12 of 12) sorted by relevance

/f-stack/dpdk/drivers/event/sw/
H A Dsw_evdev_worker.c59 uint32_t sw_inflights = rte_atomic32_read(&sw->inflights); in sw_event_enqueue_burst()
77 rte_atomic32_add(&sw->inflights, credit_update_quanta); in sw_event_enqueue_burst()
127 rte_atomic32_sub(&sw->inflights, credit_update_quanta); in sw_event_enqueue_burst()
159 rte_atomic32_sub(&sw->inflights, credit_update_quanta); in sw_event_dequeue_burst()
H A Dsw_evdev_scheduler.c79 sw->ports[cq].inflights == SW_PORT_HIST_LIST) { in sw_schedule_atomic_to_cq()
89 p->inflights++; in sw_schedule_atomic_to_cq()
151 } while (sw->ports[cq].inflights == SW_PORT_HIST_LIST || in sw_schedule_parallel_to_cq()
157 p->inflights == SW_PORT_HIST_LIST) in sw_schedule_parallel_to_cq()
176 p->inflights++; in sw_schedule_parallel_to_cq()
385 if ((flags & QE_FLAG_COMPLETE) && port->inflights > 0) { in __pull_port_lb()
412 port->inflights -= eop; in __pull_port_lb()
H A Dsw_evdev.c154 int possible_inflights = p->inflight_credits + p->inflights; in sw_port_setup()
155 rte_atomic32_sub(&sw->inflights, possible_inflights); in sw_port_setup()
519 rte_atomic32_set(&sw->inflights, 0); in sw_dev_configure()
637 uint32_t inflights = rte_atomic32_read(&sw->inflights); in sw_dump() local
638 uint32_t credits = sw->nb_events_limit - inflights; in sw_dump()
639 fprintf(f, "\tinflight %d, credits: %d\n", inflights, credits); in sw_dump()
658 (p->inflights == p->inflight_max) ? in sw_dump()
660 sw->ports[i].inflights, COL_RESET); in sw_dump()
715 uint32_t inflights = 0; in sw_dump() local
736 inflights += qid->fids[flow].pcount; in sw_dump()
H A Dsw_evdev.h196 uint16_t inflights; member
236 rte_atomic32_t inflights __rte_cache_aligned;
H A Dsw_evdev_xstats.c74 case inflight: return p->inflights; in get_port_stat()
/f-stack/dpdk/drivers/common/qat/
H A Dqat_qp.c609 uint32_t inflights = in qat_enqueue_op_burst() local
612 if ((inflights + nb_ops) > tmp_qp->max_inflights) { in qat_enqueue_op_burst()
613 nb_ops_possible = tmp_qp->max_inflights - inflights; in qat_enqueue_op_burst()
621 if (tmp_qp->min_enq_burst_threshold && inflights > in qat_enqueue_op_burst()
710 uint32_t inflights = in qat_enqueue_comp_op_burst() local
714 overflow = (inflights + nb_ops) - tmp_qp->max_inflights; in qat_enqueue_comp_op_burst()
725 if (tmp_qp->min_enq_burst_threshold && inflights > in qat_enqueue_comp_op_burst()
/f-stack/dpdk/drivers/event/dlb/
H A Ddlb_priv.h438 uint32_t inflights; /* use __atomic builtins to access */ member
H A Ddlb.c691 __atomic_store_n(&dlb->inflights, 0, __ATOMIC_SEQ_CST); in dlb_eventdev_configure()
2182 uint32_t sw_inflights = __atomic_load_n(&dlb->inflights, in dlb_check_enqueue_sw_credits()
2205 __atomic_fetch_add(&dlb->inflights, credit_update_quanta, in dlb_check_enqueue_sw_credits()
2231 __atomic_fetch_sub(&dlb->inflights, val, __ATOMIC_SEQ_CST); in dlb_replenish_sw_credits()
H A Ddlb_xstats.c157 return __atomic_load_n(&dlb->inflights, __ATOMIC_SEQ_CST); in get_dev_stat()
/f-stack/dpdk/drivers/event/dlb2/
H A Ddlb2_priv.h486 uint32_t inflights; /* use __atomic builtins */ member
H A Ddlb2.c630 __atomic_store_n(&dlb2->inflights, 0, __ATOMIC_SEQ_CST); in dlb2_eventdev_configure()
2157 __atomic_fetch_sub(&dlb2->inflights, val, __ATOMIC_SEQ_CST); in dlb2_replenish_sw_credits()
2166 uint32_t sw_inflights = __atomic_load_n(&dlb2->inflights, in dlb2_check_enqueue_sw_credits()
2189 __atomic_fetch_add(&dlb2->inflights, credit_update_quanta, in dlb2_check_enqueue_sw_credits()
H A Ddlb2_xstats.c169 return __atomic_load_n(&dlb2->inflights, __ATOMIC_SEQ_CST); in get_dev_stat()