| /linux-6.15/drivers/net/ethernet/mellanox/mlx5/core/steering/hws/ |
| H A D | debug.c | 243 struct mlx5hws_send_engine *send_queue; in hws_debug_dump_context_send_engine() local 250 send_queue = &ctx->send_queue[i]; in hws_debug_dump_context_send_engine() 255 send_queue->used_entries, in hws_debug_dump_context_send_engine() 256 send_queue->num_entries, in hws_debug_dump_context_send_engine() 258 send_queue->num_entries, in hws_debug_dump_context_send_engine() 259 send_queue->err, in hws_debug_dump_context_send_engine() 260 send_queue->completed.ci, in hws_debug_dump_context_send_engine() 261 send_queue->completed.pi, in hws_debug_dump_context_send_engine() 262 send_queue->completed.mask); in hws_debug_dump_context_send_engine() 264 send_ring = &send_queue->send_ring; in hws_debug_dump_context_send_engine() [all …]
|
| H A D | send.c | 562 return hws_send_engine_poll(&ctx->send_queue[queue_id], res, res_nb); in mlx5hws_send_queue_poll() 951 mlx5hws_send_queue_close(&ctx->send_queue[queues]); in __hws_send_queues_close() 975 kfree(ctx->send_queue); in mlx5hws_send_queues_close() 1030 ctx->send_queue = kcalloc(ctx->queues, sizeof(*ctx->send_queue), GFP_KERNEL); in mlx5hws_send_queues_open() 1031 if (!ctx->send_queue) { in mlx5hws_send_queues_open() 1043 err = mlx5hws_send_queue_open(ctx, &ctx->send_queue[i], queue_size); in mlx5hws_send_queues_open() 1053 kfree(ctx->send_queue); in mlx5hws_send_queues_open() 1070 queue = &ctx->send_queue[queue_id]; in mlx5hws_send_queue_action() 1192 queue_id = queue - ctx->send_queue; in mlx5hws_send_stes_fw()
|
| H A D | rule.c | 334 queue = &ctx->send_queue[attr->queue_id]; in hws_rule_create_hws() 432 queue = &ctx->send_queue[attr->queue_id]; in hws_rule_destroy_failed_hws() 470 queue = &ctx->send_queue[attr->queue_id]; in hws_rule_destroy_hws() 543 if (unlikely(mlx5hws_send_engine_full(&ctx->send_queue[attr->queue_id]))) in hws_rule_enqueue_precheck() 637 queue = &ctx->send_queue[attr->queue_id]; in mlx5hws_rule_move_hws_add()
|
| H A D | context.h | 47 struct mlx5hws_send_engine *send_queue; member
|
| H A D | bwc.c | 15 return min(ctx->send_queue[queue_id].num_entries / 2, in hws_bwc_get_burst_th() 236 queue_full = mlx5hws_send_engine_full(&ctx->send_queue[queue_id]); in hws_bwc_queue_poll() 657 mlx5hws_send_engine_flush_queue(&ctx->send_queue[queue_id]); in hws_bwc_matcher_move_all_simple()
|
| H A D | pat_arg.c | 376 queue = &ctx->send_queue[ctx->queues - 1]; in mlx5hws_arg_write_inline_arg_data()
|
| H A D | action.c | 1713 queue = &ctx->send_queue[ctx->queues - 1]; in hws_action_create_dest_match_range_fill_table()
|
| /linux-6.15/drivers/net/wireless/ath/ath6kl/ |
| H A D | htc_pipe.c | 315 INIT_LIST_HEAD(&send_queue); in htc_try_send() 357 list_splice_tail_init(txq, &send_queue); in htc_try_send() 373 list_move_tail(&packet->list, &send_queue); in htc_try_send() 398 &send_queue); in htc_try_send() 402 if (list_empty(&send_queue)) { in htc_try_send() 418 if (!list_empty(&send_queue)) { in htc_try_send() 420 list_splice_tail_init(&send_queue, &ep->txq); in htc_try_send() 421 if (!list_empty(&send_queue)) { in htc_try_send() 426 INIT_LIST_HEAD(&send_queue); in htc_try_send() 470 if (get_queue_depth(&send_queue) == 0) { in htc_try_send() [all …]
|
| /linux-6.15/net/rxrpc/ |
| H A D | sendmsg.c | 245 struct rxrpc_txqueue *sq = call->send_queue; in rxrpc_queue_packet() 271 call->send_queue = NULL; in rxrpc_queue_packet() 293 if (call->send_queue) { in rxrpc_alloc_txqueue() 295 call->send_queue->next = tq; in rxrpc_alloc_txqueue() 296 call->send_queue = tq; in rxrpc_alloc_txqueue() 306 call->send_queue = tq; in rxrpc_alloc_txqueue() 397 if (!call->send_queue || !((call->send_top + 1) & RXRPC_TXQ_MASK)) { in rxrpc_send_data()
|
| H A D | ar-internal.h | 701 struct rxrpc_txqueue *send_queue; /* Queue that sendmsg is writing into */ member
|
| /linux-6.15/drivers/infiniband/core/ |
| H A D | mad.c | 1032 if (qp_info->send_queue.count < qp_info->send_queue.max_active) { in ib_send_mad() 1036 list = &qp_info->send_queue.list; in ib_send_mad() 1043 qp_info->send_queue.count++; in ib_send_mad() 2273 struct ib_mad_queue *send_queue; in ib_mad_send_done() local 2288 send_queue = mad_list->mad_queue; in ib_mad_send_done() 2289 qp_info = send_queue->qp_info; in ib_mad_send_done() 2302 spin_lock_irqsave(&send_queue->lock, flags); in ib_mad_send_done() 2306 if (send_queue->count-- > send_queue->max_active) { in ib_mad_send_done() 2314 spin_unlock_irqrestore(&send_queue->lock, flags); in ib_mad_send_done() 2886 init_mad_queue(qp_info, &qp_info->send_queue); in init_mad_qp() [all …]
|
| H A D | mad_priv.h | 189 struct ib_mad_queue send_queue; member
|
| /linux-6.15/fs/dlm/ |
| H A D | midcomms.c | 164 struct list_head send_queue; member 310 list_for_each_entry_rcu(mh, &node->send_queue, list) { in dlm_send_queue_flush() 362 INIT_LIST_HEAD(&node->send_queue); in dlm_midcomms_addr() 455 list_for_each_entry_rcu(mh, &node->send_queue, list) { in dlm_receive_ack() 466 list_for_each_entry_rcu(mh, &node->send_queue, list) { in dlm_receive_ack() 941 list_for_each_entry_rcu(mh, &node->send_queue, list) { in dlm_midcomms_unack_msg_resend() 971 list_add_tail_rcu(&mh->list, &mh->node->send_queue); in midcomms_new_msg_cb()
|
| /linux-6.15/drivers/net/hamradio/ |
| H A D | yam.c | 125 struct sk_buff_head send_queue; /* Packets awaiting transmission */ member 588 skb_queue_tail(&yp->send_queue, skb); in yam_send_packet() 608 skb_queue_empty(&yp->send_queue)) in yam_arbitrate() 658 if (!(skb = skb_dequeue(&yp->send_queue))) { in yam_tx_byte() 702 if (skb_queue_empty(&yp->send_queue)) { in yam_tx_byte() 913 while ((skb = skb_dequeue(&yp->send_queue))) in yam_close() 1098 skb_queue_head_init(&yp->send_queue); in yam_setup()
|
| /linux-6.15/drivers/net/ |
| H A D | virtio_net.c | 281 struct send_queue { struct 375 struct send_queue *sq; 1071 struct send_queue *sq) in tx_may_stop() 1542 struct send_queue *sq; in virtnet_xsk_wakeup() 1659 struct send_queue *sq; in virtnet_xdp_xmit() 3038 struct send_queue *sq; in virtnet_poll() 3176 struct send_queue *sq = container_of(napi, struct send_queue, napi); in virtnet_poll_tx() 4095 struct send_queue *sq; in virtnet_set_ringparam() 5856 struct send_queue *sq; in virtnet_xsk_pool_enable() 5938 struct send_queue *sq; in virtnet_xsk_pool_disable() [all …]
|