1 /*
2  * Copyright 2021 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 
26 #include "amdgpu_dm_psr.h"
27 #include "dc_dmub_srv.h"
28 #include "dc.h"
29 #include "dm_helpers.h"
30 #include "amdgpu_dm.h"
31 #include "modules/power/power_helpers.h"
32 
33 static bool is_specific_oled_panel(struct dc_link *link)
34 {
35 	if (!link->dpcd_sink_ext_caps.bits.oled)
36 		return false;
37 
38 	/* Disable PSR-SU for some OLED panels to avoid glitches */
39 	if (link->dpcd_caps.sink_dev_id == 0xBA4159) {
40 		uint8_t sink_dev_id_str1[] = {'4', '0', 'C', 'U', '1'};
41 
42 		if (!memcmp(link->dpcd_caps.sink_dev_id_str, sink_dev_id_str1,
43 		    sizeof(sink_dev_id_str1)))
44 			return true;
45 	}
46 
47 	return false;
48 }
49 
50 static bool link_supports_psrsu(struct dc_link *link)
51 {
52 	struct dc *dc = link->ctx->dc;
53 
54 	if (!dc->caps.dmcub_support)
55 		return false;
56 
57 	if (dc->ctx->dce_version < DCN_VERSION_3_1)
58 		return false;
59 
60 	if (is_specific_oled_panel(link))
61 		return false;
62 
63 	if (!is_psr_su_specific_panel(link))
64 		return false;
65 
66 	if (!link->dpcd_caps.alpm_caps.bits.AUX_WAKE_ALPM_CAP ||
67 	    !link->dpcd_caps.psr_info.psr_dpcd_caps.bits.Y_COORDINATE_REQUIRED)
68 		return false;
69 
70 	if (link->dpcd_caps.psr_info.psr_dpcd_caps.bits.SU_GRANULARITY_REQUIRED &&
71 	    !link->dpcd_caps.psr_info.psr2_su_y_granularity_cap)
72 		return false;
73 
74 	if (amdgpu_dc_debug_mask & DC_DISABLE_PSR_SU)
75 		return false;
76 
77 	return dc_dmub_check_min_version(dc->ctx->dmub_srv->dmub);
78 }
79 
80 /*
81  * amdgpu_dm_set_psr_caps() - set link psr capabilities
82  * @link: link
83  *
84  */
85 void amdgpu_dm_set_psr_caps(struct dc_link *link)
86 {
87 	if (!(link->connector_signal & SIGNAL_TYPE_EDP)) {
88 		link->psr_settings.psr_feature_enabled = false;
89 		return;
90 	}
91 
92 	if (link->type == dc_connection_none) {
93 		link->psr_settings.psr_feature_enabled = false;
94 		return;
95 	}
96 
97 	if (link->dpcd_caps.psr_info.psr_version == 0) {
98 		link->psr_settings.psr_version = DC_PSR_VERSION_UNSUPPORTED;
99 		link->psr_settings.psr_feature_enabled = false;
100 
101 	} else {
102 		if (link_supports_psrsu(link))
103 			link->psr_settings.psr_version = DC_PSR_VERSION_SU_1;
104 		else
105 			link->psr_settings.psr_version = DC_PSR_VERSION_1;
106 
107 		link->psr_settings.psr_feature_enabled = true;
108 	}
109 
110 	DRM_INFO("PSR support %d, DC PSR ver %d, sink PSR ver %d DPCD caps 0x%x su_y_granularity %d\n",
111 		link->psr_settings.psr_feature_enabled,
112 		link->psr_settings.psr_version,
113 		link->dpcd_caps.psr_info.psr_version,
114 		link->dpcd_caps.psr_info.psr_dpcd_caps.raw,
115 		link->dpcd_caps.psr_info.psr2_su_y_granularity_cap);
116 
117 }
118 
119 /*
120  * amdgpu_dm_link_setup_psr() - configure psr link
121  * @stream: stream state
122  *
123  * Return: true if success
124  */
125 bool amdgpu_dm_link_setup_psr(struct dc_stream_state *stream)
126 {
127 	struct dc_link *link = NULL;
128 	struct psr_config psr_config = {0};
129 	struct psr_context psr_context = {0};
130 	struct dc *dc = NULL;
131 	bool ret = false;
132 
133 	if (stream == NULL)
134 		return false;
135 
136 	link = stream->link;
137 	dc = link->ctx->dc;
138 
139 	if (link->psr_settings.psr_version != DC_PSR_VERSION_UNSUPPORTED) {
140 		mod_power_calc_psr_configs(&psr_config, link, stream);
141 
142 		/* linux DM specific updating for psr config fields */
143 		psr_config.allow_smu_optimizations =
144 			(amdgpu_dc_feature_mask & DC_PSR_ALLOW_SMU_OPT) &&
145 			mod_power_only_edp(dc->current_state, stream);
146 		psr_config.allow_multi_disp_optimizations =
147 			(amdgpu_dc_feature_mask & DC_PSR_ALLOW_MULTI_DISP_OPT);
148 
149 		if (!psr_su_set_dsc_slice_height(dc, link, stream, &psr_config))
150 			return false;
151 
152 		ret = dc_link_setup_psr(link, stream, &psr_config, &psr_context);
153 
154 	}
155 	DRM_DEBUG_DRIVER("PSR link: %d\n",	link->psr_settings.psr_feature_enabled);
156 
157 	return ret;
158 }
159 
160 /*
161  * amdgpu_dm_psr_enable() - enable psr f/w
162  * @stream: stream state
163  *
164  */
165 void amdgpu_dm_psr_enable(struct dc_stream_state *stream)
166 {
167 	struct dc_link *link = stream->link;
168 	unsigned int vsync_rate_hz = 0;
169 	struct dc_static_screen_params params = {0};
170 	/* Calculate number of static frames before generating interrupt to
171 	 * enter PSR.
172 	 */
173 	// Init fail safe of 2 frames static
174 	unsigned int num_frames_static = 2;
175 	unsigned int power_opt = 0;
176 	bool psr_enable = true;
177 
178 	DRM_DEBUG_DRIVER("Enabling psr...\n");
179 
180 	vsync_rate_hz = div64_u64(div64_u64((
181 			stream->timing.pix_clk_100hz * (uint64_t)100),
182 			stream->timing.v_total),
183 			stream->timing.h_total);
184 
185 	/* Round up
186 	 * Calculate number of frames such that at least 30 ms of time has
187 	 * passed.
188 	 */
189 	if (vsync_rate_hz != 0) {
190 		unsigned int frame_time_microsec = 1000000 / vsync_rate_hz;
191 
192 		num_frames_static = (30000 / frame_time_microsec) + 1;
193 	}
194 
195 	params.triggers.cursor_update = true;
196 	params.triggers.overlay_update = true;
197 	params.triggers.surface_update = true;
198 	params.num_frames = num_frames_static;
199 
200 	dc_stream_set_static_screen_params(link->ctx->dc,
201 					   &stream, 1,
202 					   &params);
203 
204 	/*
205 	 * Only enable static-screen optimizations for PSR1. For PSR SU, this
206 	 * causes vstartup interrupt issues, used by amdgpu_dm to send vblank
207 	 * events.
208 	 */
209 	if (link->psr_settings.psr_version < DC_PSR_VERSION_SU_1)
210 		power_opt |= psr_power_opt_z10_static_screen;
211 
212 	dc_link_set_psr_allow_active(link, &psr_enable, false, false, &power_opt);
213 
214 	if (link->ctx->dc->caps.ips_support)
215 		dc_allow_idle_optimizations(link->ctx->dc, true);
216 }
217 
218 /*
219  * amdgpu_dm_psr_disable() - disable psr f/w
220  * @stream:  stream state
221  *
222  * Return: true if success
223  */
224 bool amdgpu_dm_psr_disable(struct dc_stream_state *stream, bool wait)
225 {
226 	bool psr_enable = false;
227 
228 	DRM_DEBUG_DRIVER("Disabling psr...\n");
229 
230 	return dc_link_set_psr_allow_active(stream->link, &psr_enable, wait, false, NULL);
231 }
232 
233 /*
234  * amdgpu_dm_psr_disable_all() - disable psr f/w for all streams
235  * if psr is enabled on any stream
236  *
237  * Return: true if success
238  */
239 bool amdgpu_dm_psr_disable_all(struct amdgpu_display_manager *dm)
240 {
241 	DRM_DEBUG_DRIVER("Disabling psr if psr is enabled on any stream\n");
242 	return dc_set_psr_allow_active(dm->dc, false);
243 }
244 
245 /*
246  * amdgpu_dm_psr_is_active_allowed() - check if psr is allowed on any stream
247  * @dm:  pointer to amdgpu_display_manager
248  *
249  * Return: true if allowed
250  */
251 
252 bool amdgpu_dm_psr_is_active_allowed(struct amdgpu_display_manager *dm)
253 {
254 	unsigned int i;
255 	bool allow_active = false;
256 
257 	for (i = 0; i < dm->dc->current_state->stream_count ; i++) {
258 		struct dc_link *link;
259 		struct dc_stream_state *stream = dm->dc->current_state->streams[i];
260 
261 		link = stream->link;
262 		if (!link)
263 			continue;
264 		if (link->psr_settings.psr_feature_enabled &&
265 		    link->psr_settings.psr_allow_active) {
266 			allow_active = true;
267 			break;
268 		}
269 	}
270 
271 	return allow_active;
272 }
273 
274 /**
275  * amdgpu_dm_psr_wait_disable() - Wait for eDP panel to exit PSR
276  * @stream: stream state attached to the eDP link
277  *
278  * Waits for a max of 500ms for the eDP panel to exit PSR.
279  *
280  * Return: true if panel exited PSR, false otherwise.
281  */
282 bool amdgpu_dm_psr_wait_disable(struct dc_stream_state *stream)
283 {
284 	enum dc_psr_state psr_state = PSR_STATE0;
285 	struct dc_link *link = stream->link;
286 	int retry_count;
287 
288 	if (link == NULL)
289 		return false;
290 
291 	for (retry_count = 0; retry_count <= 1000; retry_count++) {
292 		dc_link_get_psr_state(link, &psr_state);
293 		if (psr_state == PSR_STATE0)
294 			break;
295 		udelay(500);
296 	}
297 
298 	if (retry_count == 1000)
299 		return false;
300 
301 	return true;
302 }
303