Lines Matching refs:tmp_adev

142 	struct amdgpu_device *tmp_adev = NULL;  in aldebaran_mode2_perform_reset()  local
156 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
157 mutex_lock(&tmp_adev->reset_cntl->reset_lock); in aldebaran_mode2_perform_reset()
158 tmp_adev->reset_cntl->active_reset = AMD_RESET_METHOD_MODE2; in aldebaran_mode2_perform_reset()
164 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
166 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in aldebaran_mode2_perform_reset()
168 &tmp_adev->reset_cntl->reset_work)) in aldebaran_mode2_perform_reset()
171 r = aldebaran_mode2_reset(tmp_adev); in aldebaran_mode2_perform_reset()
173 dev_err(tmp_adev->dev, in aldebaran_mode2_perform_reset()
175 r, adev_to_drm(tmp_adev)->unique); in aldebaran_mode2_perform_reset()
182 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
183 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in aldebaran_mode2_perform_reset()
184 flush_work(&tmp_adev->reset_cntl->reset_work); in aldebaran_mode2_perform_reset()
185 r = tmp_adev->asic_reset_res; in aldebaran_mode2_perform_reset()
192 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
193 mutex_unlock(&tmp_adev->reset_cntl->reset_lock); in aldebaran_mode2_perform_reset()
194 tmp_adev->reset_cntl->active_reset = AMD_RESET_METHOD_NONE; in aldebaran_mode2_perform_reset()
318 struct amdgpu_device *tmp_adev = NULL; in aldebaran_mode2_restore_hwcontext() local
332 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_restore_hwcontext()
333 amdgpu_set_init_level(tmp_adev, in aldebaran_mode2_restore_hwcontext()
335 dev_info(tmp_adev->dev, in aldebaran_mode2_restore_hwcontext()
338 amdgpu_ras_clear_err_state(tmp_adev); in aldebaran_mode2_restore_hwcontext()
339 r = aldebaran_mode2_restore_ip(tmp_adev); in aldebaran_mode2_restore_hwcontext()
347 amdgpu_register_gpu_instance(tmp_adev); in aldebaran_mode2_restore_hwcontext()
350 con = amdgpu_ras_get_context(tmp_adev); in aldebaran_mode2_restore_hwcontext()
351 if (!amdgpu_sriov_vf(tmp_adev) && con) { in aldebaran_mode2_restore_hwcontext()
352 if (tmp_adev->sdma.ras && in aldebaran_mode2_restore_hwcontext()
353 tmp_adev->sdma.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext()
354 r = tmp_adev->sdma.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext()
355 &tmp_adev->sdma.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext()
357 dev_err(tmp_adev->dev, "SDMA failed to execute ras_late_init! ret:%d\n", r); in aldebaran_mode2_restore_hwcontext()
362 if (tmp_adev->gfx.ras && in aldebaran_mode2_restore_hwcontext()
363 tmp_adev->gfx.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext()
364 r = tmp_adev->gfx.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext()
365 &tmp_adev->gfx.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext()
367 dev_err(tmp_adev->dev, "GFX failed to execute ras_late_init! ret:%d\n", r); in aldebaran_mode2_restore_hwcontext()
373 amdgpu_ras_resume(tmp_adev); in aldebaran_mode2_restore_hwcontext()
377 tmp_adev->gmc.xgmi.num_physical_nodes > 1) in aldebaran_mode2_restore_hwcontext()
379 tmp_adev); in aldebaran_mode2_restore_hwcontext()
382 amdgpu_set_init_level(tmp_adev, in aldebaran_mode2_restore_hwcontext()
384 amdgpu_irq_gpu_reset_resume_helper(tmp_adev); in aldebaran_mode2_restore_hwcontext()
386 r = amdgpu_ib_ring_tests(tmp_adev); in aldebaran_mode2_restore_hwcontext()
388 dev_err(tmp_adev->dev, in aldebaran_mode2_restore_hwcontext()
391 tmp_adev->asic_reset_res = r; in aldebaran_mode2_restore_hwcontext()