Lines Matching refs:adev
36 static void xgpu_nv_mailbox_send_ack(struct amdgpu_device *adev) in xgpu_nv_mailbox_send_ack() argument
41 static void xgpu_nv_mailbox_set_valid(struct amdgpu_device *adev, bool val) in xgpu_nv_mailbox_set_valid() argument
55 static enum idh_event xgpu_nv_mailbox_peek_msg(struct amdgpu_device *adev) in xgpu_nv_mailbox_peek_msg() argument
61 static int xgpu_nv_mailbox_rcv_msg(struct amdgpu_device *adev, in xgpu_nv_mailbox_rcv_msg() argument
73 xgpu_nv_mailbox_send_ack(adev); in xgpu_nv_mailbox_rcv_msg()
78 static uint8_t xgpu_nv_peek_ack(struct amdgpu_device *adev) in xgpu_nv_peek_ack() argument
83 static int xgpu_nv_poll_ack(struct amdgpu_device *adev) in xgpu_nv_poll_ack() argument
97 dev_err(adev->dev, "Doesn't get TRN_MSG_ACK from pf in %d msec \n", NV_MAILBOX_POLL_ACK_TIMEDOUT); in xgpu_nv_poll_ack()
102 static int xgpu_nv_poll_msg(struct amdgpu_device *adev, enum idh_event event) in xgpu_nv_poll_msg() argument
111 r = xgpu_nv_mailbox_rcv_msg(adev, event); in xgpu_nv_poll_msg()
113 …dev_dbg(adev->dev, "rcv_msg 0x%x after %llu ms\n", event, NV_MAILBOX_POLL_MSG_TIMEDOUT - timeout +… in xgpu_nv_poll_msg()
121 dev_dbg(adev->dev, "nv_poll_msg timed out\n"); in xgpu_nv_poll_msg()
126 static void xgpu_nv_mailbox_trans_msg (struct amdgpu_device *adev, in xgpu_nv_mailbox_trans_msg() argument
139 xgpu_nv_mailbox_set_valid(adev, false); in xgpu_nv_mailbox_trans_msg()
140 trn = xgpu_nv_peek_ack(adev); in xgpu_nv_mailbox_trans_msg()
142 dev_err_ratelimited(adev->dev, "trn=%x ACK should not assert! wait again !\n", trn); in xgpu_nv_mailbox_trans_msg()
147 dev_dbg(adev->dev, "trans_msg req = 0x%x, data1 = 0x%x\n", req, data1); in xgpu_nv_mailbox_trans_msg()
152 xgpu_nv_mailbox_set_valid(adev, true); in xgpu_nv_mailbox_trans_msg()
155 r = xgpu_nv_poll_ack(adev); in xgpu_nv_mailbox_trans_msg()
157 dev_err(adev->dev, "Doesn't get ack from pf, continue\n"); in xgpu_nv_mailbox_trans_msg()
159 xgpu_nv_mailbox_set_valid(adev, false); in xgpu_nv_mailbox_trans_msg()
162 static int xgpu_nv_send_access_requests_with_param(struct amdgpu_device *adev, in xgpu_nv_send_access_requests_with_param() argument
169 xgpu_nv_mailbox_trans_msg(adev, req, data1, data2, data3); in xgpu_nv_send_access_requests_with_param()
195 r = xgpu_nv_poll_msg(adev, event); in xgpu_nv_send_access_requests_with_param()
201 dev_err(adev->dev, "Doesn't get msg:%d from pf, error=%d\n", event, r); in xgpu_nv_send_access_requests_with_param()
204 adev->virt.req_init_data_ver = 0; in xgpu_nv_send_access_requests_with_param()
207 adev->virt.req_init_data_ver = in xgpu_nv_send_access_requests_with_param()
211 if (adev->virt.req_init_data_ver < 1) in xgpu_nv_send_access_requests_with_param()
212 adev->virt.req_init_data_ver = 1; in xgpu_nv_send_access_requests_with_param()
218 adev->virt.fw_reserve.checksum_key = in xgpu_nv_send_access_requests_with_param()
226 static int xgpu_nv_send_access_requests(struct amdgpu_device *adev, in xgpu_nv_send_access_requests() argument
229 return xgpu_nv_send_access_requests_with_param(adev, in xgpu_nv_send_access_requests()
233 static int xgpu_nv_request_reset(struct amdgpu_device *adev) in xgpu_nv_request_reset() argument
238 ret = xgpu_nv_send_access_requests(adev, IDH_REQ_GPU_RESET_ACCESS); in xgpu_nv_request_reset()
247 static int xgpu_nv_request_full_gpu_access(struct amdgpu_device *adev, in xgpu_nv_request_full_gpu_access() argument
253 return xgpu_nv_send_access_requests(adev, req); in xgpu_nv_request_full_gpu_access()
256 static int xgpu_nv_release_full_gpu_access(struct amdgpu_device *adev, in xgpu_nv_release_full_gpu_access() argument
263 r = xgpu_nv_send_access_requests(adev, req); in xgpu_nv_release_full_gpu_access()
268 static int xgpu_nv_request_init_data(struct amdgpu_device *adev) in xgpu_nv_request_init_data() argument
270 return xgpu_nv_send_access_requests(adev, IDH_REQ_GPU_INIT_DATA); in xgpu_nv_request_init_data()
273 static int xgpu_nv_mailbox_ack_irq(struct amdgpu_device *adev, in xgpu_nv_mailbox_ack_irq() argument
277 dev_dbg(adev->dev, "get ack intr and do nothing.\n"); in xgpu_nv_mailbox_ack_irq()
281 static int xgpu_nv_set_mailbox_ack_irq(struct amdgpu_device *adev, in xgpu_nv_set_mailbox_ack_irq() argument
298 static void xgpu_nv_ready_to_reset(struct amdgpu_device *adev) in xgpu_nv_ready_to_reset() argument
300 xgpu_nv_mailbox_trans_msg(adev, IDH_READY_TO_RESET, 0, 0, 0); in xgpu_nv_ready_to_reset()
303 static int xgpu_nv_wait_reset(struct amdgpu_device *adev) in xgpu_nv_wait_reset() argument
307 if (xgpu_nv_mailbox_peek_msg(adev) == IDH_FLR_NOTIFICATION_CMPL) { in xgpu_nv_wait_reset()
308 …dev_dbg(adev->dev, "Got NV IDH_FLR_NOTIFICATION_CMPL after %d ms\n", NV_MAILBOX_POLL_FLR_TIMEDOUT … in xgpu_nv_wait_reset()
315 dev_dbg(adev->dev, "waiting NV IDH_FLR_NOTIFICATION_CMPL timeout\n"); in xgpu_nv_wait_reset()
322 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_nv_mailbox_flr_work() local
324 amdgpu_virt_fini_data_exchange(adev); in xgpu_nv_mailbox_flr_work()
327 if (amdgpu_device_should_recover_gpu(adev) in xgpu_nv_mailbox_flr_work()
328 && (!amdgpu_device_has_job_running(adev) || in xgpu_nv_mailbox_flr_work()
329 adev->sdma_timeout == MAX_SCHEDULE_TIMEOUT || in xgpu_nv_mailbox_flr_work()
330 adev->gfx_timeout == MAX_SCHEDULE_TIMEOUT || in xgpu_nv_mailbox_flr_work()
331 adev->compute_timeout == MAX_SCHEDULE_TIMEOUT || in xgpu_nv_mailbox_flr_work()
332 adev->video_timeout == MAX_SCHEDULE_TIMEOUT)) { in xgpu_nv_mailbox_flr_work()
337 reset_context.reset_req_dev = adev; in xgpu_nv_mailbox_flr_work()
341 amdgpu_device_gpu_recover(adev, NULL, &reset_context); in xgpu_nv_mailbox_flr_work()
345 static int xgpu_nv_set_mailbox_rcv_irq(struct amdgpu_device *adev, in xgpu_nv_set_mailbox_rcv_irq() argument
362 static int xgpu_nv_mailbox_rcv_irq(struct amdgpu_device *adev, in xgpu_nv_mailbox_rcv_irq() argument
366 enum idh_event event = xgpu_nv_mailbox_peek_msg(adev); in xgpu_nv_mailbox_rcv_irq()
370 if (amdgpu_sriov_runtime(adev)) in xgpu_nv_mailbox_rcv_irq()
371 WARN_ONCE(!amdgpu_reset_domain_schedule(adev->reset_domain, in xgpu_nv_mailbox_rcv_irq()
372 &adev->virt.flr_work), in xgpu_nv_mailbox_rcv_irq()
400 void xgpu_nv_mailbox_set_irq_funcs(struct amdgpu_device *adev) in xgpu_nv_mailbox_set_irq_funcs() argument
402 adev->virt.ack_irq.num_types = 1; in xgpu_nv_mailbox_set_irq_funcs()
403 adev->virt.ack_irq.funcs = &xgpu_nv_mailbox_ack_irq_funcs; in xgpu_nv_mailbox_set_irq_funcs()
404 adev->virt.rcv_irq.num_types = 1; in xgpu_nv_mailbox_set_irq_funcs()
405 adev->virt.rcv_irq.funcs = &xgpu_nv_mailbox_rcv_irq_funcs; in xgpu_nv_mailbox_set_irq_funcs()
408 int xgpu_nv_mailbox_add_irq_id(struct amdgpu_device *adev) in xgpu_nv_mailbox_add_irq_id() argument
412 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 135, &adev->virt.rcv_irq); in xgpu_nv_mailbox_add_irq_id()
416 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 138, &adev->virt.ack_irq); in xgpu_nv_mailbox_add_irq_id()
418 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_add_irq_id()
425 int xgpu_nv_mailbox_get_irq(struct amdgpu_device *adev) in xgpu_nv_mailbox_get_irq() argument
429 r = amdgpu_irq_get(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_get_irq()
432 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_get_irq()
434 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_get_irq()
438 INIT_WORK(&adev->virt.flr_work, xgpu_nv_mailbox_flr_work); in xgpu_nv_mailbox_get_irq()
443 void xgpu_nv_mailbox_put_irq(struct amdgpu_device *adev) in xgpu_nv_mailbox_put_irq() argument
445 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_put_irq()
446 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_put_irq()
449 static void xgpu_nv_ras_poison_handler(struct amdgpu_device *adev, in xgpu_nv_ras_poison_handler() argument
452 if (amdgpu_ip_version(adev, UMC_HWIP, 0) < IP_VERSION(12, 0, 0)) { in xgpu_nv_ras_poison_handler()
453 xgpu_nv_send_access_requests(adev, IDH_RAS_POISON); in xgpu_nv_ras_poison_handler()
455 amdgpu_virt_fini_data_exchange(adev); in xgpu_nv_ras_poison_handler()
456 xgpu_nv_send_access_requests_with_param(adev, in xgpu_nv_ras_poison_handler()
461 static bool xgpu_nv_rcvd_ras_intr(struct amdgpu_device *adev) in xgpu_nv_rcvd_ras_intr() argument
463 enum idh_event msg = xgpu_nv_mailbox_peek_msg(adev); in xgpu_nv_rcvd_ras_intr()
468 static int xgpu_nv_req_ras_err_count(struct amdgpu_device *adev) in xgpu_nv_req_ras_err_count() argument
470 return xgpu_nv_send_access_requests(adev, IDH_REQ_RAS_ERROR_COUNT); in xgpu_nv_req_ras_err_count()
473 static int xgpu_nv_req_ras_cper_dump(struct amdgpu_device *adev, u64 vf_rptr) in xgpu_nv_req_ras_cper_dump() argument
480 adev, IDH_REQ_RAS_CPER_DUMP, vf_rptr_hi, vf_rptr_lo, 0); in xgpu_nv_req_ras_cper_dump()