| /linux-6.15/drivers/dma/amd/ptdma/ |
| H A D | ptdma-dev.c | 72 u8 *q_desc = (u8 *)&cmd_q->qbase[cmd_q->qidx]; in pt_core_execute_cmd() 84 cmd_q->qidx = (cmd_q->qidx + 1) % CMD_Q_LEN; in pt_core_execute_cmd() 128 struct pt_cmd_queue *cmd_q = &cmd->pt->cmd_q; in pt_do_cmd_complete() local 137 pt_log_error(cmd_q->pt, cmd_q->cmd_error); in pt_do_cmd_complete() 167 struct pt_cmd_queue *cmd_q = &pt->cmd_q; in pt_core_irq_handler() local 179 struct pt_cmd_queue *cmd_q = &pt->cmd_q; in pt_core_init() local 225 iowrite32(cmd_q->qcontrol, cmd_q->reg_control); in pt_core_init() 244 cmd_q->qdma_tail = cmd_q->qbase_dma; in pt_core_init() 251 iowrite32(cmd_q->qcontrol, cmd_q->reg_control); in pt_core_init() 269 dma_free_coherent(dev, cmd_q->qsize, cmd_q->qbase, cmd_q->qbase_dma); in pt_core_init() [all …]
|
| H A D | ptdma-dmaengine.c | 51 struct pt_cmd_queue *cmd_q = &ae4cmd_q->cmd_q; in ae4_check_status_error() local 59 if (cmd_q->cmd_error) in ae4_check_status_error() 60 ae4_log_error(cmd_q->pt, cmd_q->cmd_error); in ae4_check_status_error() 106 cmd_q = &ae4cmd_q->cmd_q; in pt_get_cmd_queue() 108 cmd_q = &pt->cmd_q; in pt_get_cmd_queue() 111 return cmd_q; in pt_get_cmd_queue() 117 struct pt_cmd_queue *cmd_q = &ae4cmd_q->cmd_q; in ae4_core_execute_cmd() local 142 cmd_q->cmd_error = 0; in pt_core_perform_passthru_ae4() 143 cmd_q->total_pt_ops++; in pt_core_perform_passthru_ae4() 511 pt_stop_queue(cmd_q); in pt_pause() [all …]
|
| H A D | ptdma-debugfs.c | 66 struct pt_cmd_queue *cmd_q = s->private; in pt_debugfs_queue_show() local 70 if (!cmd_q) in pt_debugfs_queue_show() 73 seq_printf(s, " Pass-Thru: %ld\n", cmd_q->total_pt_ops); in pt_debugfs_queue_show() 75 pt = cmd_q->pt; in pt_debugfs_queue_show() 77 regval = readl(cmd_q->reg_control + 0x4); in pt_debugfs_queue_show() 80 regval = ioread32(cmd_q->reg_control + 0x000C); in pt_debugfs_queue_show() 105 struct pt_cmd_queue *cmd_q; in ptdma_debugfs_setup() local 124 cmd_q = &ae4cmd_q->cmd_q; in ptdma_debugfs_setup() 132 debugfs_create_file("stats", 0400, debugfs_q_instance, cmd_q, in ptdma_debugfs_setup() 138 cmd_q = &pt->cmd_q; in ptdma_debugfs_setup() [all …]
|
| H A D | ptdma.h | 252 struct pt_cmd_queue cmd_q; member 323 int pt_core_perform_passthru(struct pt_cmd_queue *cmd_q, 326 void pt_check_status_trans(struct pt_device *pt, struct pt_cmd_queue *cmd_q); 327 void pt_start_queue(struct pt_cmd_queue *cmd_q); 328 void pt_stop_queue(struct pt_cmd_queue *cmd_q); 332 iowrite32(0, pt->cmd_q.reg_control + 0x000C); in pt_core_disable_queue_interrupts() 337 iowrite32(SUPPORTED_INTERRUPTS, pt->cmd_q.reg_control + 0x000C); in pt_core_enable_queue_interrupts()
|
| /linux-6.15/drivers/crypto/ccp/ |
| H A D | ccp-dev-v5.c | 238 mP = (__le32 *)&cmd_q->qbase[cmd_q->qidx]; in ccp5_do_cmd() 628 struct ccp_cmd_queue *cmd_q = &ccp->cmd_q[i]; in ccp_find_and_assign_lsb_to_q() local 820 cmd_q = &ccp->cmd_q[ccp->cmd_q_count]; in ccp5_init() 848 cmd_q->reg_int_enable = cmd_q->reg_control + in ccp5_init() 853 cmd_q->reg_int_status = cmd_q->reg_control + in ccp5_init() 855 cmd_q->reg_dma_status = cmd_q->reg_control + in ccp5_init() 876 cmd_q = &ccp->cmd_q[i]; in ccp5_init() 914 cmd_q = &ccp->cmd_q[i]; in ccp5_init() 919 cmd_q->qdma_tail = cmd_q->qbase_dma; in ccp5_init() 951 cmd_q = &ccp->cmd_q[i]; in ccp5_init() [all …]
|
| H A D | ccp-dev-v3.c | 76 struct ccp_cmd_queue *cmd_q = op->cmd_q; in ccp_do_cmd() local 140 cmd_q->free_slots = CMD_Q_DEPTH(cmd_q->q_status); in ccp_do_cmd() 334 cmd_q = &ccp->cmd_q[i]; in ccp_irq_bh() 336 q_int = status & (cmd_q->int_ok | cmd_q->int_err); in ccp_irq_bh() 339 cmd_q->q_status = ioread32(cmd_q->reg_status); in ccp_irq_bh() 397 cmd_q = &ccp->cmd_q[ccp->cmd_q_count]; in ccp_init() 419 cmd_q->free_slots = ccp_get_free_slots(cmd_q); in ccp_init() 424 ccp->qim |= cmd_q->int_ok | cmd_q->int_err; in ccp_init() 444 cmd_q = &ccp->cmd_q[i]; in ccp_init() 468 cmd_q = &ccp->cmd_q[i]; in ccp_init() [all …]
|
| H A D | ccp-debugfs.c | 115 struct ccp_cmd_queue *cmd_q = &ccp->cmd_q[i]; in ccp5_debugfs_stats_read() local 160 cmd_q->total_ops = 0L; in ccp5_debugfs_reset_queue_stats() 161 cmd_q->total_aes_ops = 0L; in ccp5_debugfs_reset_queue_stats() 163 cmd_q->total_3des_ops = 0L; in ccp5_debugfs_reset_queue_stats() 164 cmd_q->total_sha_ops = 0L; in ccp5_debugfs_reset_queue_stats() 165 cmd_q->total_rsa_ops = 0L; in ccp5_debugfs_reset_queue_stats() 166 cmd_q->total_pt_ops = 0L; in ccp5_debugfs_reset_queue_stats() 167 cmd_q->total_ecc_ops = 0L; in ccp5_debugfs_reset_queue_stats() 200 if (!cmd_q) in ccp5_debugfs_queue_read() 208 cmd_q->total_ops); in ccp5_debugfs_queue_read() [all …]
|
| H A D | ccp-ops.c | 437 op.cmd_q = cmd_q; in ccp_copy_to_from_sb() 512 op.cmd_q = cmd_q; in ccp_run_aes_cmac_cmd() 699 op.cmd_q = cmd_q; in ccp_run_aes_gcm_cmd() 851 op.cmd_q = cmd_q; in ccp_run_aes_gcm_cmd() 952 op.cmd_q = cmd_q; in ccp_run_aes_cmd() 1147 op.cmd_q = cmd_q; in ccp_run_xts_aes_cmd() 1335 op.cmd_q = cmd_q; in ccp_run_des3_cmd() 1645 op.cmd_q = cmd_q; in ccp_run_sha_cmd() 1851 op.cmd_q = cmd_q; in ccp_run_rsa_cmd() 1993 op.cmd_q = cmd_q; in ccp_run_passthru_cmd() [all …]
|
| H A D | ccp-dev.c | 327 if (ccp->cmd_q[i].active) in ccp_enqueue_cmd() 339 wake_up_process(ccp->cmd_q[i].kthread); in ccp_enqueue_cmd() 361 if (ccp->cmd_q[i].active) in ccp_do_cmd_backlog() 376 struct ccp_device *ccp = cmd_q->ccp; in ccp_dequeue_cmd() 383 cmd_q->active = 0; in ccp_dequeue_cmd() 386 cmd_q->suspended = 1; in ccp_dequeue_cmd() 395 cmd_q->active = 1; in ccp_dequeue_cmd() 449 cmd = ccp_dequeue_cmd(cmd_q); in ccp_cmd_queue_thread() 456 cmd->ret = ccp_run_cmd(cmd_q, cmd); in ccp_cmd_queue_thread() 543 if (ccp->cmd_q[i].suspended) in ccp_queues_suspended() [all …]
|
| H A D | ccp-dev.h | 380 struct ccp_cmd_queue cmd_q[MAX_HW_QUEUES]; member 532 struct ccp_cmd_queue *cmd_q; member 641 int ccp_run_cmd(struct ccp_cmd_queue *cmd_q, struct ccp_cmd *cmd);
|
| /linux-6.15/drivers/dma/amd/ae4dma/ |
| H A D | ae4dma-dev.c | 20 struct pt_cmd_queue *cmd_q = &ae4cmd_q->cmd_q; in ae4_pending_work() local 56 cmd_q = &ae4cmd_q->cmd_q; in ae4_core_irq_handler() 57 pt = cmd_q->pt; in ae4_core_irq_handler() 104 cmd_q = &ae4cmd_q->cmd_q; in ae4_core_init() 105 cmd_q->pt = pt; in ae4_core_init() 116 cmd_q->qbase = dmam_alloc_coherent(dev, cmd_q->qsize, &cmd_q->qbase_dma, in ae4_core_init() 118 if (!cmd_q->qbase) in ae4_core_init() 125 cmd_q = &ae4cmd_q->cmd_q; in ae4_core_init() 132 cmd_q->qdma_tail = cmd_q->qbase_dma; in ae4_core_init() 133 writel(lower_32_bits(cmd_q->qdma_tail), cmd_q->reg_control + AE4_Q_BASE_L_OFF); in ae4_core_init() [all …]
|
| H A D | ae4dma.h | 49 struct pt_cmd_queue cmd_q; member
|
| /linux-6.15/drivers/scsi/ibmvscsi_tgt/ |
| H A D | ibmvscsi_tgt.c | 363 crq = ibmvscsis_cmd_q_dequeue(vscsi->cmd_q.mask, &vscsi->cmd_q.index, in ibmvscsis_check_init_msg() 982 crq = vscsi->cmd_q.base_addr + vscsi->cmd_q.index; in ibmvscsis_ready_for_suspend() 1179 crq = vscsi->cmd_q.base_addr + vscsi->cmd_q.index; in ibmvscsis_poll_cmd_q() 1185 vscsi->cmd_q.index = in ibmvscsis_poll_cmd_q() 1186 (vscsi->cmd_q.index + 1) & vscsi->cmd_q.mask; in ibmvscsis_poll_cmd_q() 1217 vscsi->cmd_q.index = vscsi->cmd_q.mask; in ibmvscsis_poll_cmd_q() 1224 crq = vscsi->cmd_q.base_addr + vscsi->cmd_q.index; in ibmvscsis_poll_cmd_q() 3345 crq = vscsi->cmd_q.base_addr + vscsi->cmd_q.index; in ibmvscsis_handle_crq() 3358 (vscsi->cmd_q.index + 1) & vscsi->cmd_q.mask; in ibmvscsis_handle_crq() 3386 vscsi->cmd_q.index = vscsi->cmd_q.mask; in ibmvscsis_handle_crq() [all …]
|
| H A D | ibmvscsi_tgt.h | 268 struct cmd_queue cmd_q; member
|
| /linux-6.15/drivers/platform/olpc/ |
| H A D | olpc-ec.c | 43 struct list_head cmd_q; member 85 if (!list_empty(&ec->cmd_q)) { in olpc_ec_worker() 86 desc = list_first_entry(&ec->cmd_q, struct ec_cmd_desc, node); in olpc_ec_worker() 120 list_add_tail(&desc->node, &ec->cmd_q); in queue_ec_descriptor() 419 INIT_LIST_HEAD(&ec->cmd_q); in olpc_ec_probe()
|
| /linux-6.15/drivers/media/platform/amphion/ |
| H A D | vpu_cmds.c | 150 list_for_each_entry_safe(cmd, tmp, &inst->cmd_q, list) { in vpu_process_cmd_request() 184 list_add_tail(&cmd->list, &inst->cmd_q); in vpu_request_cmd() 244 list_for_each_entry_safe(cmd, tmp, &inst->cmd_q, list) { in vpu_clear_request()
|
| H A D | vpu.h | 253 struct list_head cmd_q; member
|
| /linux-6.15/net/nfc/nci/ |
| H A D | core.c | 541 skb_queue_purge(&ndev->cmd_q); in nci_open_device() 583 skb_queue_purge(&ndev->cmd_q); in nci_close_device() 1267 skb_queue_head_init(&ndev->cmd_q); in nci_register_device() 1396 skb_queue_tail(&ndev->cmd_q, skb); in nci_send_cmd() 1592 skb = skb_dequeue(&ndev->cmd_q); in nci_cmd_work()
|
| H A D | rsp.c | 426 if (!skb_queue_empty(&ndev->cmd_q)) in nci_rsp_packet()
|
| /linux-6.15/net/bluetooth/ |
| H A D | hci_core.c | 525 skb_queue_purge(&hdev->cmd_q); in hci_dev_do_reset() 2524 skb_queue_head_init(&hdev->cmd_q); in hci_alloc_dev_priv() 3058 skb_queue_tail(&hdev->cmd_q, skb); in hci_send_cmd() 3874 skb = skb_peek(&hdev->cmd_q); in hci_req_is_complete() 3899 skb_queue_head(&hdev->cmd_q, skb); in hci_resend_last() 3954 spin_lock_irqsave(&hdev->cmd_q.lock, flags); in hci_req_cmd_complete() 3955 while ((skb = __skb_dequeue(&hdev->cmd_q))) { in hci_req_cmd_complete() 3957 __skb_queue_head(&hdev->cmd_q, skb); in hci_req_cmd_complete() 3967 spin_unlock_irqrestore(&hdev->cmd_q.lock, flags); in hci_req_cmd_complete() 4055 skb_queue_head(&hdev->cmd_q, skb); in hci_send_cmd_sync() [all …]
|
| H A D | hci_sync.c | 107 if (skb_queue_empty(&req->cmd_q)) in hci_cmd_sync_add() 112 skb_queue_tail(&req->cmd_q, skb); in hci_cmd_sync_add() 127 skb_queue_purge(&req->cmd_q); in hci_req_sync_run() 132 if (skb_queue_empty(&req->cmd_q)) in hci_req_sync_run() 135 skb = skb_peek_tail(&req->cmd_q); in hci_req_sync_run() 139 spin_lock_irqsave(&hdev->cmd_q.lock, flags); in hci_req_sync_run() 140 skb_queue_splice_tail(&req->cmd_q, &hdev->cmd_q); in hci_req_sync_run() 150 skb_queue_head_init(&req->cmd_q); in hci_request_init() 5081 skb_queue_purge(&hdev->cmd_q); in hci_dev_open_sync() 5241 skb_queue_purge(&hdev->cmd_q); in hci_dev_close_sync() [all …]
|
| /linux-6.15/drivers/scsi/ |
| H A D | sg.c | 153 char cmd_q; /* 1 -> allow command queuing, 0 -> don't */ member 722 sfp->cmd_q = 1; /* when sg_io_hdr seen, set command queuing on */ in sg_new_write() 1047 sfp->cmd_q = val ? 1 : 0; in sg_ioctl_common() 1050 return put_user((int) sfp->cmd_q, ip); in sg_ioctl_common() 1190 else if (!sfp->cmd_q) { in sg_poll() 2103 if (!sfp->cmd_q) in sg_add_request() 2171 sfp->cmd_q = SG_DEF_COMMAND_Q; in sg_add_sfp() 2541 (int) fp->cmd_q, (int) fp->force_packid, in sg_proc_debug_helper()
|
| /linux-6.15/drivers/net/ethernet/brocade/bna/ |
| H A D | bfa_ioc.c | 2120 INIT_LIST_HEAD(&mod->cmd_q); in bfa_ioc_mbox_attach() 2140 if (list_empty(&mod->cmd_q)) in bfa_ioc_mbox_poll() 2153 cmd = list_first_entry(&mod->cmd_q, struct bfa_mbox_cmd, qe); in bfa_ioc_mbox_poll() 2175 while (!list_empty(&mod->cmd_q)) { in bfa_ioc_mbox_flush() 2176 cmd = list_first_entry(&mod->cmd_q, struct bfa_mbox_cmd, qe); in bfa_ioc_mbox_flush() 2662 if (!list_empty(&mod->cmd_q)) { in bfa_nw_ioc_mbox_queue() 2663 list_add_tail(&cmd->qe, &mod->cmd_q); in bfa_nw_ioc_mbox_queue() 2672 list_add_tail(&cmd->qe, &mod->cmd_q); in bfa_nw_ioc_mbox_queue()
|
| H A D | bfa_ioc.h | 109 struct list_head cmd_q; /*!< pending mbox queue */ member
|
| /linux-6.15/include/net/bluetooth/ |
| H A D | hci_sync.h | 20 struct sk_buff_head cmd_q; member
|