Lines Matching refs:adev
36 static void amdgpu_jpeg_reg_dump_fini(struct amdgpu_device *adev);
38 int amdgpu_jpeg_sw_init(struct amdgpu_device *adev) in amdgpu_jpeg_sw_init() argument
42 INIT_DELAYED_WORK(&adev->jpeg.idle_work, amdgpu_jpeg_idle_work_handler); in amdgpu_jpeg_sw_init()
43 mutex_init(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_sw_init()
44 atomic_set(&adev->jpeg.total_submission_cnt, 0); in amdgpu_jpeg_sw_init()
46 if ((adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) && in amdgpu_jpeg_sw_init()
47 (adev->pg_flags & AMD_PG_SUPPORT_JPEG_DPG)) in amdgpu_jpeg_sw_init()
48 adev->jpeg.indirect_sram = true; in amdgpu_jpeg_sw_init()
50 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_sw_init()
51 if (adev->jpeg.harvest_config & (1U << i)) in amdgpu_jpeg_sw_init()
54 if (adev->jpeg.indirect_sram) { in amdgpu_jpeg_sw_init()
55 r = amdgpu_bo_create_kernel(adev, 64 * 2 * 4, PAGE_SIZE, in amdgpu_jpeg_sw_init()
58 &adev->jpeg.inst[i].dpg_sram_bo, in amdgpu_jpeg_sw_init()
59 &adev->jpeg.inst[i].dpg_sram_gpu_addr, in amdgpu_jpeg_sw_init()
60 &adev->jpeg.inst[i].dpg_sram_cpu_addr); in amdgpu_jpeg_sw_init()
62 dev_err(adev->dev, in amdgpu_jpeg_sw_init()
72 int amdgpu_jpeg_sw_fini(struct amdgpu_device *adev) in amdgpu_jpeg_sw_fini() argument
76 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_sw_fini()
77 if (adev->jpeg.harvest_config & (1U << i)) in amdgpu_jpeg_sw_fini()
81 &adev->jpeg.inst[i].dpg_sram_bo, in amdgpu_jpeg_sw_fini()
82 &adev->jpeg.inst[i].dpg_sram_gpu_addr, in amdgpu_jpeg_sw_fini()
83 (void **)&adev->jpeg.inst[i].dpg_sram_cpu_addr); in amdgpu_jpeg_sw_fini()
85 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) in amdgpu_jpeg_sw_fini()
86 amdgpu_ring_fini(&adev->jpeg.inst[i].ring_dec[j]); in amdgpu_jpeg_sw_fini()
89 if (adev->jpeg.reg_list) in amdgpu_jpeg_sw_fini()
90 amdgpu_jpeg_reg_dump_fini(adev); in amdgpu_jpeg_sw_fini()
92 mutex_destroy(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_sw_fini()
97 int amdgpu_jpeg_suspend(struct amdgpu_device *adev) in amdgpu_jpeg_suspend() argument
99 cancel_delayed_work_sync(&adev->jpeg.idle_work); in amdgpu_jpeg_suspend()
104 int amdgpu_jpeg_resume(struct amdgpu_device *adev) in amdgpu_jpeg_resume() argument
111 struct amdgpu_device *adev = in amdgpu_jpeg_idle_work_handler() local
116 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_idle_work_handler()
117 if (adev->jpeg.harvest_config & (1U << i)) in amdgpu_jpeg_idle_work_handler()
120 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) in amdgpu_jpeg_idle_work_handler()
121 fences += amdgpu_fence_count_emitted(&adev->jpeg.inst[i].ring_dec[j]); in amdgpu_jpeg_idle_work_handler()
124 if (!fences && !atomic_read(&adev->jpeg.total_submission_cnt)) in amdgpu_jpeg_idle_work_handler()
125 amdgpu_device_ip_set_powergating_state(adev, AMD_IP_BLOCK_TYPE_JPEG, in amdgpu_jpeg_idle_work_handler()
128 schedule_delayed_work(&adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_idle_work_handler()
133 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_ring_begin_use() local
135 atomic_inc(&adev->jpeg.total_submission_cnt); in amdgpu_jpeg_ring_begin_use()
136 cancel_delayed_work_sync(&adev->jpeg.idle_work); in amdgpu_jpeg_ring_begin_use()
138 mutex_lock(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_ring_begin_use()
139 amdgpu_device_ip_set_powergating_state(adev, AMD_IP_BLOCK_TYPE_JPEG, in amdgpu_jpeg_ring_begin_use()
141 mutex_unlock(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_ring_begin_use()
146 atomic_dec(&ring->adev->jpeg.total_submission_cnt); in amdgpu_jpeg_ring_end_use()
147 schedule_delayed_work(&ring->adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_ring_end_use()
152 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_ring_test_ring() local
158 if (amdgpu_sriov_vf(adev)) in amdgpu_jpeg_dec_ring_test_ring()
165 WREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe], 0xCAFEDEAD); in amdgpu_jpeg_dec_ring_test_ring()
167 RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ring()
169 amdgpu_ring_write(ring, PACKET0(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0)); in amdgpu_jpeg_dec_ring_test_ring()
173 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_jpeg_dec_ring_test_ring()
174 tmp = RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ring()
180 if (i >= adev->usec_timeout) in amdgpu_jpeg_dec_ring_test_ring()
189 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_set_reg() local
196 r = amdgpu_job_alloc_with_ib(ring->adev, NULL, NULL, ib_size_dw * 4, in amdgpu_jpeg_dec_set_reg()
203 ib->ptr[0] = PACKETJ(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0, 0, PACKETJ_TYPE0); in amdgpu_jpeg_dec_set_reg()
228 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_ring_test_ib() local
248 if (!amdgpu_sriov_vf(adev)) { in amdgpu_jpeg_dec_ring_test_ib()
249 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_jpeg_dec_ring_test_ib()
250 tmp = RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ib()
258 if (i >= adev->usec_timeout) in amdgpu_jpeg_dec_ring_test_ib()
267 int amdgpu_jpeg_process_poison_irq(struct amdgpu_device *adev, in amdgpu_jpeg_process_poison_irq() argument
271 struct ras_common_if *ras_if = adev->jpeg.ras_if; in amdgpu_jpeg_process_poison_irq()
280 amdgpu_ras_interrupt_dispatch(adev, &ih_data); in amdgpu_jpeg_process_poison_irq()
285 int amdgpu_jpeg_ras_late_init(struct amdgpu_device *adev, struct ras_common_if *ras_block) in amdgpu_jpeg_ras_late_init() argument
289 r = amdgpu_ras_block_late_init(adev, ras_block); in amdgpu_jpeg_ras_late_init()
293 if (amdgpu_ras_is_supported(adev, ras_block->block)) { in amdgpu_jpeg_ras_late_init()
294 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_ras_late_init()
295 if (adev->jpeg.harvest_config & (1 << i) || in amdgpu_jpeg_ras_late_init()
296 !adev->jpeg.inst[i].ras_poison_irq.funcs) in amdgpu_jpeg_ras_late_init()
299 r = amdgpu_irq_get(adev, &adev->jpeg.inst[i].ras_poison_irq, 0); in amdgpu_jpeg_ras_late_init()
307 amdgpu_ras_block_late_fini(adev, ras_block); in amdgpu_jpeg_ras_late_init()
311 int amdgpu_jpeg_ras_sw_init(struct amdgpu_device *adev) in amdgpu_jpeg_ras_sw_init() argument
316 if (!adev->jpeg.ras) in amdgpu_jpeg_ras_sw_init()
319 ras = adev->jpeg.ras; in amdgpu_jpeg_ras_sw_init()
320 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_jpeg_ras_sw_init()
322 dev_err(adev->dev, "Failed to register jpeg ras block!\n"); in amdgpu_jpeg_ras_sw_init()
329 adev->jpeg.ras_if = &ras->ras_block.ras_comm; in amdgpu_jpeg_ras_sw_init()
337 int amdgpu_jpeg_psp_update_sram(struct amdgpu_device *adev, int inst_idx, in amdgpu_jpeg_psp_update_sram() argument
342 .mc_addr = adev->jpeg.inst[inst_idx].dpg_sram_gpu_addr, in amdgpu_jpeg_psp_update_sram()
343 .ucode_size = ((uintptr_t)adev->jpeg.inst[inst_idx].dpg_sram_curr_addr - in amdgpu_jpeg_psp_update_sram()
344 (uintptr_t)adev->jpeg.inst[inst_idx].dpg_sram_cpu_addr), in amdgpu_jpeg_psp_update_sram()
347 return psp_execute_ip_fw_load(&adev->psp, &ucode); in amdgpu_jpeg_psp_update_sram()
356 struct amdgpu_device *adev = (struct amdgpu_device *)data; in amdgpu_debugfs_jpeg_sched_mask_set() local
361 if (!adev) in amdgpu_debugfs_jpeg_sched_mask_set()
364 mask = (1ULL << (adev->jpeg.num_jpeg_inst * adev->jpeg.num_jpeg_rings)) - 1; in amdgpu_debugfs_jpeg_sched_mask_set()
368 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_debugfs_jpeg_sched_mask_set()
369 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) { in amdgpu_debugfs_jpeg_sched_mask_set()
370 ring = &adev->jpeg.inst[i].ring_dec[j]; in amdgpu_debugfs_jpeg_sched_mask_set()
371 if (val & (1 << ((i * adev->jpeg.num_jpeg_rings) + j))) in amdgpu_debugfs_jpeg_sched_mask_set()
384 struct amdgpu_device *adev = (struct amdgpu_device *)data; in amdgpu_debugfs_jpeg_sched_mask_get() local
389 if (!adev) in amdgpu_debugfs_jpeg_sched_mask_get()
391 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_debugfs_jpeg_sched_mask_get()
392 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) { in amdgpu_debugfs_jpeg_sched_mask_get()
393 ring = &adev->jpeg.inst[i].ring_dec[j]; in amdgpu_debugfs_jpeg_sched_mask_get()
395 mask |= 1ULL << ((i * adev->jpeg.num_jpeg_rings) + j); in amdgpu_debugfs_jpeg_sched_mask_get()
408 void amdgpu_debugfs_jpeg_sched_mask_init(struct amdgpu_device *adev) in amdgpu_debugfs_jpeg_sched_mask_init() argument
411 struct drm_minor *minor = adev_to_drm(adev)->primary; in amdgpu_debugfs_jpeg_sched_mask_init()
415 if (!(adev->jpeg.num_jpeg_inst > 1) && !(adev->jpeg.num_jpeg_rings > 1)) in amdgpu_debugfs_jpeg_sched_mask_init()
418 debugfs_create_file(name, 0600, root, adev, in amdgpu_debugfs_jpeg_sched_mask_init()
428 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_get_jpeg_reset_mask() local
430 if (!adev) in amdgpu_get_jpeg_reset_mask()
433 return amdgpu_show_reset_mask(buf, adev->jpeg.supported_reset); in amdgpu_get_jpeg_reset_mask()
439 int amdgpu_jpeg_sysfs_reset_mask_init(struct amdgpu_device *adev) in amdgpu_jpeg_sysfs_reset_mask_init() argument
443 if (adev->jpeg.num_jpeg_inst) { in amdgpu_jpeg_sysfs_reset_mask_init()
444 r = device_create_file(adev->dev, &dev_attr_jpeg_reset_mask); in amdgpu_jpeg_sysfs_reset_mask_init()
452 void amdgpu_jpeg_sysfs_reset_mask_fini(struct amdgpu_device *adev) in amdgpu_jpeg_sysfs_reset_mask_fini() argument
454 if (adev->dev->kobj.sd) { in amdgpu_jpeg_sysfs_reset_mask_fini()
455 if (adev->jpeg.num_jpeg_inst) in amdgpu_jpeg_sysfs_reset_mask_fini()
456 device_remove_file(adev->dev, &dev_attr_jpeg_reset_mask); in amdgpu_jpeg_sysfs_reset_mask_fini()
460 int amdgpu_jpeg_reg_dump_init(struct amdgpu_device *adev, in amdgpu_jpeg_reg_dump_init() argument
463 adev->jpeg.ip_dump = kcalloc(adev->jpeg.num_jpeg_inst * count, in amdgpu_jpeg_reg_dump_init()
465 if (!adev->jpeg.ip_dump) { in amdgpu_jpeg_reg_dump_init()
469 adev->jpeg.reg_list = reg; in amdgpu_jpeg_reg_dump_init()
470 adev->jpeg.reg_count = count; in amdgpu_jpeg_reg_dump_init()
475 static void amdgpu_jpeg_reg_dump_fini(struct amdgpu_device *adev) in amdgpu_jpeg_reg_dump_fini() argument
477 kfree(adev->jpeg.ip_dump); in amdgpu_jpeg_reg_dump_fini()
478 adev->jpeg.reg_list = NULL; in amdgpu_jpeg_reg_dump_fini()
479 adev->jpeg.reg_count = 0; in amdgpu_jpeg_reg_dump_fini()
484 struct amdgpu_device *adev = ip_block->adev; in amdgpu_jpeg_dump_ip_state() local
488 if (!adev->jpeg.ip_dump) in amdgpu_jpeg_dump_ip_state()
491 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_dump_ip_state()
492 if (adev->jpeg.harvest_config & (1 << i)) in amdgpu_jpeg_dump_ip_state()
496 inst_off = i * adev->jpeg.reg_count; in amdgpu_jpeg_dump_ip_state()
498 adev->jpeg.ip_dump[inst_off] = in amdgpu_jpeg_dump_ip_state()
499 RREG32(SOC15_REG_ENTRY_OFFSET_INST(adev->jpeg.reg_list[0], in amdgpu_jpeg_dump_ip_state()
501 is_powered = ((adev->jpeg.ip_dump[inst_off] & 0x1) != 1); in amdgpu_jpeg_dump_ip_state()
504 for (j = 1; j < adev->jpeg.reg_count; j++) in amdgpu_jpeg_dump_ip_state()
505 adev->jpeg.ip_dump[inst_off + j] = in amdgpu_jpeg_dump_ip_state()
506 RREG32(SOC15_REG_ENTRY_OFFSET_INST(adev->jpeg.reg_list[j], in amdgpu_jpeg_dump_ip_state()
513 struct amdgpu_device *adev = ip_block->adev; in amdgpu_jpeg_print_ip_state() local
517 if (!adev->jpeg.ip_dump) in amdgpu_jpeg_print_ip_state()
520 drm_printf(p, "num_instances:%d\n", adev->jpeg.num_jpeg_inst); in amdgpu_jpeg_print_ip_state()
521 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_print_ip_state()
522 if (adev->jpeg.harvest_config & (1 << i)) { in amdgpu_jpeg_print_ip_state()
527 inst_off = i * adev->jpeg.reg_count; in amdgpu_jpeg_print_ip_state()
528 is_powered = ((adev->jpeg.ip_dump[inst_off] & 0x1) != 1); in amdgpu_jpeg_print_ip_state()
532 for (j = 0; j < adev->jpeg.reg_count; j++) in amdgpu_jpeg_print_ip_state()
533 drm_printf(p, "%-50s \t 0x%08x\n", adev->jpeg.reg_list[j].reg_name, in amdgpu_jpeg_print_ip_state()
534 adev->jpeg.ip_dump[inst_off + j]); in amdgpu_jpeg_print_ip_state()