| /dpdk/drivers/net/fm10k/base/ |
| H A D | fm10k_mbx.c | 18 fifo->tail = 0; in fm10k_fifo_init() 138 if (len > tail) in fm10k_mbx_index_len() 157 return (tail > mbx->tail) ? --tail : ++tail; in fm10k_mbx_tail_add() 173 return (tail < mbx->tail) ? ++tail : --tail; in fm10k_mbx_tail_sub() 352 tail++; in fm10k_mbx_write_copy() 366 if (!tail) in fm10k_mbx_write_copy() 367 tail++; in fm10k_mbx_write_copy() 447 tail += end; in fm10k_mbx_read_copy() 995 if (!tail || (tail == FM10K_MSG_HDR_MASK(TAIL))) in fm10k_mbx_validate_msg_hdr() 1011 if (tail) in fm10k_mbx_validate_msg_hdr() [all …]
|
| /dpdk/drivers/crypto/qat/dev/ |
| H A D | qat_sym_pmd_gen1.c | 492 dp_ctx->tail = tail; in qat_sym_dp_enqueue_single_cipher_gen1() 519 tail = dp_ctx->tail; in qat_sym_dp_enqueue_cipher_jobs_gen1() 557 dp_ctx->tail = tail; in qat_sym_dp_enqueue_cipher_jobs_gen1() 596 dp_ctx->tail = tail; in qat_sym_dp_enqueue_single_auth_gen1() 627 tail = dp_ctx->tail; in qat_sym_dp_enqueue_auth_jobs_gen1() 665 dp_ctx->tail = tail; in qat_sym_dp_enqueue_auth_jobs_gen1() 705 dp_ctx->tail = tail; in qat_sym_dp_enqueue_single_chain_gen1() 737 tail = dp_ctx->tail; in qat_sym_dp_enqueue_chain_jobs_gen1() 782 dp_ctx->tail = tail; in qat_sym_dp_enqueue_chain_jobs_gen1() 821 dp_ctx->tail = tail; in qat_sym_dp_enqueue_single_aead_gen1() [all …]
|
| H A D | qat_crypto_pmd_gen3.c | 417 uint32_t tail = dp_ctx->tail; in qat_sym_dp_enqueue_single_aead_gen3() local 422 tail = (tail + tx_queue->msg_size) & tx_queue->modulo_mask; in qat_sym_dp_enqueue_single_aead_gen3() 433 dp_ctx->tail = tail; in qat_sym_dp_enqueue_single_aead_gen3() 453 uint32_t tail; in qat_sym_dp_enqueue_aead_jobs_gen3() local 464 tail = dp_ctx->tail; in qat_sym_dp_enqueue_aead_jobs_gen3() 505 dp_ctx->tail = tail; in qat_sym_dp_enqueue_aead_jobs_gen3() 527 uint32_t tail = dp_ctx->tail; in qat_sym_dp_enqueue_single_auth_gen3() local 532 tail = (tail + tx_queue->msg_size) & tx_queue->modulo_mask; in qat_sym_dp_enqueue_single_auth_gen3() 544 dp_ctx->tail = tail; in qat_sym_dp_enqueue_single_auth_gen3() 571 tail = dp_ctx->tail; in qat_sym_dp_enqueue_auth_jobs_gen3() [all …]
|
| H A D | qat_crypto_pmd_gen4.c | 127 ofs.ofs.cipher.head - ofs.ofs.cipher.tail; in enqueue_one_aead_job_gen4() 246 uint32_t tail = dp_ctx->tail; in qat_sym_dp_enqueue_single_aead_gen4() local 249 (uint8_t *)tx_queue->base_addr + tail); in qat_sym_dp_enqueue_single_aead_gen4() 251 tail = (tail + tx_queue->msg_size) & tx_queue->modulo_mask; in qat_sym_dp_enqueue_single_aead_gen4() 262 dp_ctx->tail = tail; in qat_sym_dp_enqueue_single_aead_gen4() 282 uint32_t tail; in qat_sym_dp_enqueue_aead_jobs_gen4() local 293 tail = dp_ctx->tail; in qat_sym_dp_enqueue_aead_jobs_gen4() 297 qp->op_cookies[tail >> tx_queue->trailz]; in qat_sym_dp_enqueue_aead_jobs_gen4() 300 (uint8_t *)tx_queue->base_addr + tail); in qat_sym_dp_enqueue_aead_jobs_gen4() 322 tail = (tail + tx_queue->msg_size) & tx_queue->modulo_mask; in qat_sym_dp_enqueue_aead_jobs_gen4() [all …]
|
| /dpdk/drivers/net/tap/ |
| H A D | tap_netlink.c | 31 struct rtattr *tail; member 371 struct nested_tail *tail; in tap_nlattr_nested_start() local 374 if (!tail) { in tap_nlattr_nested_start() 380 tail->tail = (struct rtattr *)NLMSG_TAIL(&msg->nh); in tap_nlattr_nested_start() 384 tail->prev = msg->nested_tails; in tap_nlattr_nested_start() 386 msg->nested_tails = tail; in tap_nlattr_nested_start() 403 struct nested_tail *tail = msg->nested_tails; in tap_nlattr_nested_finish() local 405 tail->tail->rta_len = (char *)NLMSG_TAIL(&msg->nh) - (char *)tail->tail; in tap_nlattr_nested_finish() 407 if (tail->prev) in tap_nlattr_nested_finish() 408 msg->nested_tails = tail->prev; in tap_nlattr_nested_finish() [all …]
|
| /dpdk/lib/ring/ |
| H A D | rte_ring_peek_elem_pvt.h | 31 __rte_ring_st_get_tail(struct rte_ring_headtail *ht, uint32_t *tail, in __rte_ring_st_get_tail() argument 37 t = ht->tail; in __rte_ring_st_get_tail() 43 *tail = t; in __rte_ring_st_get_tail() 53 __rte_ring_st_set_head_tail(struct rte_ring_headtail *ht, uint32_t tail, in __rte_ring_st_set_head_tail() argument 60 pos = tail + num; in __rte_ring_st_set_head_tail() 62 __atomic_store_n(&ht->tail, pos, __ATOMIC_RELEASE); in __rte_ring_st_set_head_tail() 75 __rte_ring_hts_get_tail(struct rte_ring_hts_headtail *ht, uint32_t *tail, in __rte_ring_hts_get_tail() argument 82 n = p.pos.head - p.pos.tail; in __rte_ring_hts_get_tail() 87 *tail = p.pos.tail; in __rte_ring_hts_get_tail() 104 p.pos.head = tail + num; in __rte_ring_hts_set_head_tail() [all …]
|
| H A D | rte_ring_peek.h | 170 uint32_t tail; in rte_ring_enqueue_elem_finish() local 174 n = __rte_ring_st_get_tail(&r->prod, &tail, n); in rte_ring_enqueue_elem_finish() 176 __rte_ring_enqueue_elems(r, tail, obj_table, esize, n); in rte_ring_enqueue_elem_finish() 177 __rte_ring_st_set_head_tail(&r->prod, tail, n, 1); in rte_ring_enqueue_elem_finish() 180 n = __rte_ring_hts_get_tail(&r->hts_prod, &tail, n); in rte_ring_enqueue_elem_finish() 182 __rte_ring_enqueue_elems(r, tail, obj_table, esize, n); in rte_ring_enqueue_elem_finish() 183 __rte_ring_hts_set_head_tail(&r->hts_prod, tail, n, 1); in rte_ring_enqueue_elem_finish() 333 uint32_t tail; in rte_ring_dequeue_elem_finish() local 337 n = __rte_ring_st_get_tail(&r->cons, &tail, n); in rte_ring_dequeue_elem_finish() 338 __rte_ring_st_set_head_tail(&r->cons, tail, n, 0); in rte_ring_dequeue_elem_finish() [all …]
|
| H A D | rte_ring_hts_elem_pvt.h | 28 uint32_t tail; in __rte_ring_hts_update_tail() local 32 tail = old_tail + num; in __rte_ring_hts_update_tail() 33 __atomic_store_n(&ht->ht.pos.tail, tail, __ATOMIC_RELEASE); in __rte_ring_hts_update_tail() 45 while (p->pos.head != p->pos.tail) { in __rte_ring_hts_head_wait() 83 *free_entries = capacity + r->cons.tail - op.pos.head; in __rte_ring_hts_move_prod_head() 93 np.pos.tail = op.pos.tail; in __rte_ring_hts_move_prod_head() 139 *entries = r->prod.tail - op.pos.head; in __rte_ring_hts_move_cons_head() 148 np.pos.tail = op.pos.tail; in __rte_ring_hts_move_cons_head()
|
| H A D | rte_ring_peek_zc.h | 291 uint32_t tail; in rte_ring_enqueue_zc_elem_finish() local 295 n = __rte_ring_st_get_tail(&r->prod, &tail, n); in rte_ring_enqueue_zc_elem_finish() 296 __rte_ring_st_set_head_tail(&r->prod, tail, n, 1); in rte_ring_enqueue_zc_elem_finish() 299 n = __rte_ring_hts_get_tail(&r->hts_prod, &tail, n); in rte_ring_enqueue_zc_elem_finish() 300 __rte_ring_hts_set_head_tail(&r->hts_prod, tail, n, 1); in rte_ring_enqueue_zc_elem_finish() 497 uint32_t tail; in rte_ring_dequeue_zc_elem_finish() local 501 n = __rte_ring_st_get_tail(&r->cons, &tail, n); in rte_ring_dequeue_zc_elem_finish() 502 __rte_ring_st_set_head_tail(&r->cons, tail, n, 0); in rte_ring_dequeue_zc_elem_finish() 505 n = __rte_ring_hts_get_tail(&r->hts_cons, &tail, n); in rte_ring_dequeue_zc_elem_finish() 506 __rte_ring_hts_set_head_tail(&r->hts_cons, tail, n, 0); in rte_ring_dequeue_zc_elem_finish()
|
| H A D | rte_ring_generic_pvt.h | 26 rte_wait_until_equal_32(&ht->tail, old_val, __ATOMIC_RELAXED); in __rte_ring_update_tail() 28 ht->tail = new_val; in __rte_ring_update_tail() 81 *free_entries = (capacity + r->cons.tail - *old_head); in __rte_ring_move_prod_head() 150 *entries = (r->prod.tail - *old_head); in __rte_ring_move_cons_head()
|
| /dpdk/drivers/bus/dpaa/include/ |
| H A D | dpaa_rbtree.h | 37 struct rb_node *head, *tail; member 43 tree->head = tree->tail = NULL; in dpa_rbtree_init() 54 tree->head = tree->tail = &obj->node_field; \ 74 obj->node_field.prev = tree->tail; \ 76 tree->tail->next = &obj->node_field; \ 77 tree->tail = &obj->node_field; \ 83 if (tree->tail == &obj->node_field) \ 85 tree->head = tree->tail = NULL; \ 92 if (tree->tail == &obj->node_field) { \ 94 tree->tail = tree->tail->prev; \ [all …]
|
| /dpdk/drivers/net/bnxt/tf_core/ |
| H A D | ll.c | 15 ll->tail = NULL; in ll_init() 25 ll->tail = entry; in ll_insert() 41 if (ll->head == entry && ll->tail == entry) { in ll_delete() 43 ll->tail = NULL; in ll_delete() 47 } else if (ll->tail == entry) { in ll_delete() 48 ll->tail = entry->prev; in ll_delete() 49 ll->tail->next = NULL; in ll_delete()
|
| /dpdk/drivers/net/thunderx/ |
| H A D | nicvf_rxtx.c | 150 uint32_t tail; in nicvf_xmit_pkts() local 157 tail = sq->tail; in nicvf_xmit_pkts() 165 tail = (tail + 1) & qlen_mask; in nicvf_xmit_pkts() 167 txbuffs[tail] = pkt; in nicvf_xmit_pkts() 169 tail = (tail + 1) & qlen_mask; in nicvf_xmit_pkts() 174 sq->tail = tail; in nicvf_xmit_pkts() 197 tail = sq->tail; in nicvf_xmit_pkts_multiseg() 216 tail = (tail + 1) & qlen_mask; in nicvf_xmit_pkts_multiseg() 220 tail = (tail + 1) & qlen_mask; in nicvf_xmit_pkts_multiseg() 226 tail = (tail + 1) & qlen_mask; in nicvf_xmit_pkts_multiseg() [all …]
|
| /dpdk/drivers/net/hns3/ |
| H A D | hns3_mbx.c | 99 hw->mbx_resp.tail + hw->mbx_resp.lost); in hns3_get_mbx_resp() 215 return tail == hw->cmq.crq.next_to_use; in hns3_cmd_crq_empty() 268 uint32_t tail = resp->tail + 1; in hns3_update_resp_position() local 270 if (tail > resp->head) in hns3_update_resp_position() 271 tail = resp->head; in hns3_update_resp_position() 279 } else if (tail + resp->lost > resp->head) { in hns3_update_resp_position() 283 resp->head, tail, resp->lost); in hns3_update_resp_position() 286 resp->tail = tail; in hns3_update_resp_position() 408 uint32_t tail, next_to_use; in hns3_handle_mbx_msg_out_intr() local 412 tail = hns3_read_dev(hw, HNS3_CMDQ_RX_TAIL_REG); in hns3_handle_mbx_msg_out_intr() [all …]
|
| /dpdk/drivers/net/atlantic/ |
| H A D | atl_rxtx.c | 962 eop_tail = tail; in atl_recv_pkts() 969 if (eop_tail == tail) in atl_recv_pkts() 1058 tail = (tail + 1) % rxq->nb_rx_desc; in atl_recv_pkts() 1089 rxq->rx_tail = tail; in atl_recv_pkts() 1107 tail = (uint16_t)((tail == 0) ? in atl_recv_pkts() 1223 int tail = 0; in atl_xmit_pkt() local 1232 tail = txq->tx_tail; in atl_xmit_pkt() 1243 tail = (tail + 1) % txq->nb_tx_desc; in atl_xmit_pkt() 1244 txq->tx_tail = tail; in atl_xmit_pkt() 1286 tail = (tail + 1) % txq->nb_tx_desc; in atl_xmit_pkt() [all …]
|
| /dpdk/drivers/event/opdl/ |
| H A D | opdl_ring.c | 73 uint32_t tail; member 171 uint32_t this_tail = s->shared.tail; in update_available_seq() 328 tail)) { in claim_mgr_add() 333 mgr->claims[claim_mgr_index(mgr->mgr_head)].tail = tail; in claim_mgr_add() 337 mgr->num_claimed += (head - tail); in claim_mgr_add() 348 *tail = mgr->claims[claim_mgr_index(mgr->mgr_tail)].tail; in claim_mgr_read() 375 uint32_t tail; in opdl_stage_disclaim_multithread_n() local 386 tail) { in opdl_stage_disclaim_multithread_n() 391 num_entries -= (head - tail); in opdl_stage_disclaim_multithread_n() 395 num_entries + tail, in opdl_stage_disclaim_multithread_n() [all …]
|
| /dpdk/drivers/net/ena/base/ |
| H A D | ena_eth_com.h | 77 u16 tail, next_to_comp, cnt; in ena_com_free_q_entries() local 80 tail = io_sq->tail; in ena_com_free_q_entries() 81 cnt = tail - next_to_comp; in ena_com_free_q_entries() 156 u16 tail = io_sq->tail; in ena_com_write_sq_doorbell() local 160 io_sq->qid, tail); in ena_com_write_sq_doorbell() 162 ENA_REG_WRITE32(io_sq->bus, tail, io_sq->db_addr); in ena_com_write_sq_doorbell()
|
| /dpdk/drivers/common/qat/ |
| H A D | qat_qp.c | 244 queue->tail = 0; in qat_queue_create() 559 register uint32_t tail; in qat_enqueue_op_burst() local 567 tail = queue->tail; in qat_enqueue_op_burst() 621 tail = adf_modulo(tail + queue->msg_size, queue->modulo_mask); in qat_enqueue_op_burst() 626 queue->tail = tail; in qat_enqueue_op_burst() 645 register uint32_t tail; in qat_enqueue_comp_op_burst() local 657 tail = queue->tail; in qat_enqueue_comp_op_burst() 743 *ops, tmp_qp, tail, in qat_enqueue_comp_op_burst() 792 tail = adf_modulo(tail + (queue->msg_size * descriptors_built), in qat_enqueue_comp_op_burst() 799 queue->tail = tail; in qat_enqueue_comp_op_burst() [all …]
|
| /dpdk/drivers/common/cpt/ |
| H A D | cpt_common.h | 39 unsigned int tail; member 68 q->rid_queue[(q->tail + off) & (qsize - 1)] = rid; in pending_queue_push() 78 q->tail = (q->tail + cnt) & (qsize - 1); in pending_queue_commit() 107 return (q->tail - q->head) & (qsize - 1); in pending_queue_level()
|
| /dpdk/drivers/net/mvneta/ |
| H A D | mvneta_rxtx.c | 185 int tail = sq->tail; in mvneta_sent_buffers_free() local 196 entry = &sq->ent[tail]; in mvneta_sent_buffers_free() 201 tail, (u64)entry->cookie, in mvneta_sent_buffers_free() 203 tail = (tail + 1) & MRVL_NETA_TX_SHADOWQ_MASK; in mvneta_sent_buffers_free() 212 tail = (tail + 1) & MRVL_NETA_TX_SHADOWQ_MASK; in mvneta_sent_buffers_free() 215 sq->tail = tail; in mvneta_sent_buffers_free() 462 int tail, tail_first; in mvneta_tx_sg_pkt_burst() local 480 tail = 0; in mvneta_tx_sg_pkt_burst() 517 tail_first = tail; in mvneta_tx_sg_pkt_burst() 527 tail++; in mvneta_tx_sg_pkt_burst() [all …]
|
| /dpdk/drivers/crypto/nitrox/ |
| H A D | nitrox_qp.h | 30 uint32_t tail; member 61 uint32_t tail = qp->tail % qp->count; in nitrox_qp_get_softreq() local 64 return qp->ridq[tail].sr; in nitrox_qp_get_softreq() 95 qp->tail++; in nitrox_qp_dequeue()
|
| /dpdk/lib/graph/ |
| H A D | rte_graph_worker.h | 37 uint32_t tail; /**< Tail of circular buffer. */ member 157 while (likely(head != graph->tail)) { in rte_graph_walk() 175 graph->tail = 0; in rte_graph_walk() 193 uint32_t tail; in __rte_node_enqueue_tail_update() local 195 tail = graph->tail; in __rte_node_enqueue_tail_update() 196 graph->cir_start[tail++] = node->off; in __rte_node_enqueue_tail_update() 197 graph->tail = tail & graph->cir_mask; in __rte_node_enqueue_tail_update()
|
| /dpdk/drivers/net/qede/base/ |
| H A D | bcm_osal.h | 198 osal_list_entry_t *head, *tail; member 205 (list)->tail = NULL; \ 214 (list)->tail = (entry); \ 225 (entry)->prev = (list)->tail; \ 226 if ((list)->tail) { \ 227 (list)->tail->next = (entry); \ 231 (list)->tail = (entry); \ 250 } else if ((list)->tail == (entry)) { \ 251 if ((list)->tail) { \ 252 (list)->tail = (list)->tail->prev; \ [all …]
|
| /dpdk/drivers/crypto/cnxk/ |
| H A D | cnxk_cryptodev_ops.h | 53 uint64_t tail; member 159 pending_queue_infl_cnt(uint64_t head, uint64_t tail, const uint64_t mask) in pending_queue_infl_cnt() argument 165 return ((head + mask + 1) - tail) & mask; in pending_queue_infl_cnt() 169 pending_queue_free_cnt(uint64_t head, uint64_t tail, const uint64_t mask) in pending_queue_free_cnt() argument 172 return mask - pending_queue_infl_cnt(head, tail, mask); in pending_queue_free_cnt()
|
| /dpdk/drivers/dma/cnxk/ |
| H A D | cnxk_dmadev.h | 10 #define STRM_INC(s) ((s).tail = ((s).tail + 1) % (s).max_cnt) 27 uint16_t tail; member
|