2 * Copyright 2021 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
27 #include "clk_mgr_internal.h"
29 #include "dcn32/dcn32_clk_mgr_smu_msg.h"
30 #include "dcn20/dcn20_clk_mgr.h"
31 #include "dce100/dce_clk_mgr.h"
32 #include "dcn31/dcn31_clk_mgr.h"
33 #include "reg_helper.h"
34 #include "core_types.h"
35 #include "dm_helpers.h"
38 #include "atomfirmware.h"
39 #include "smu13_driver_if.h"
41 #include "dcn/dcn_3_2_0_offset.h"
42 #include "dcn/dcn_3_2_0_sh_mask.h"
44 #include "dcn32/dcn32_clk_mgr.h"
45 #include "dml/dcn32/dcn32_fpu.h"
47 #define DCN_BASE__INST0_SEG1 0x000000C0
49 #define mmCLK1_CLK_PLL_REQ 0x16E37
50 #define mmCLK1_CLK0_DFS_CNTL 0x16E69
51 #define mmCLK1_CLK1_DFS_CNTL 0x16E6C
52 #define mmCLK1_CLK2_DFS_CNTL 0x16E6F
53 #define mmCLK1_CLK3_DFS_CNTL 0x16E72
54 #define mmCLK1_CLK4_DFS_CNTL 0x16E75
56 #define CLK1_CLK_PLL_REQ__FbMult_int_MASK 0x000001ffUL
57 #define CLK1_CLK_PLL_REQ__PllSpineDiv_MASK 0x0000f000UL
58 #define CLK1_CLK_PLL_REQ__FbMult_frac_MASK 0xffff0000UL
59 #define CLK1_CLK_PLL_REQ__FbMult_int__SHIFT 0x00000000
60 #define CLK1_CLK_PLL_REQ__PllSpineDiv__SHIFT 0x0000000c
61 #define CLK1_CLK_PLL_REQ__FbMult_frac__SHIFT 0x00000010
63 #define mmCLK01_CLK0_CLK_PLL_REQ 0x16E37
64 #define mmCLK01_CLK0_CLK0_DFS_CNTL 0x16E64
65 #define mmCLK01_CLK0_CLK1_DFS_CNTL 0x16E67
66 #define mmCLK01_CLK0_CLK2_DFS_CNTL 0x16E6A
67 #define mmCLK01_CLK0_CLK3_DFS_CNTL 0x16E6D
68 #define mmCLK01_CLK0_CLK4_DFS_CNTL 0x16E70
70 #define CLK0_CLK_PLL_REQ__FbMult_int_MASK 0x000001ffL
71 #define CLK0_CLK_PLL_REQ__PllSpineDiv_MASK 0x0000f000L
72 #define CLK0_CLK_PLL_REQ__FbMult_frac_MASK 0xffff0000L
73 #define CLK0_CLK_PLL_REQ__FbMult_int__SHIFT 0x00000000
74 #define CLK0_CLK_PLL_REQ__PllSpineDiv__SHIFT 0x0000000c
75 #define CLK0_CLK_PLL_REQ__FbMult_frac__SHIFT 0x00000010
78 #define FN(reg_name, field_name) \
79 clk_mgr->clk_mgr_shift->field_name, clk_mgr->clk_mgr_mask->field_name
84 #define BASE_INNER(seg) DCN_BASE__INST0_SEG ## seg
86 #define BASE(seg) BASE_INNER(seg)
89 .reg_name = BASE(reg ## reg_name ## _BASE_IDX) + \
92 #define CLK_SR_DCN32(reg_name)\
93 .reg_name = mm ## reg_name
95 static const struct clk_mgr_registers clk_mgr_regs_dcn32
= {
99 static const struct clk_mgr_shift clk_mgr_shift_dcn32
= {
100 CLK_COMMON_MASK_SH_LIST_DCN32(__SHIFT
)
103 static const struct clk_mgr_mask clk_mgr_mask_dcn32
= {
104 CLK_COMMON_MASK_SH_LIST_DCN32(_MASK
)
108 #define CLK_SR_DCN321(reg_name, block, inst)\
109 .reg_name = mm ## block ## _ ## reg_name
111 static const struct clk_mgr_registers clk_mgr_regs_dcn321
= {
112 CLK_REG_LIST_DCN321()
115 static const struct clk_mgr_shift clk_mgr_shift_dcn321
= {
116 CLK_COMMON_MASK_SH_LIST_DCN321(__SHIFT
)
119 static const struct clk_mgr_mask clk_mgr_mask_dcn321
= {
120 CLK_COMMON_MASK_SH_LIST_DCN321(_MASK
)
124 /* Query SMU for all clock states for a particular clock */
125 static void dcn32_init_single_clock(struct clk_mgr_internal
*clk_mgr
, PPCLK_e clk
, unsigned int *entry_0
,
126 unsigned int *num_levels
)
129 char *entry_i
= (char *)entry_0
;
131 uint32_t ret
= dcn30_smu_get_dpm_freq_by_index(clk_mgr
, clk
, 0xFF);
134 /* fine-grained, only min and max */
137 /* discrete, a number of fixed states */
138 /* will set num_levels to 0 on failure */
139 *num_levels
= ret
& 0xFF;
141 /* if the initial message failed, num_levels will be 0 */
142 for (i
= 0; i
< *num_levels
; i
++) {
143 *((unsigned int *)entry_i
) = (dcn30_smu_get_dpm_freq_by_index(clk_mgr
, clk
, i
) & 0xFFFF);
144 entry_i
+= sizeof(clk_mgr
->base
.bw_params
->clk_table
.entries
[0]);
148 static void dcn32_build_wm_range_table(struct clk_mgr_internal
*clk_mgr
)
151 dcn32_build_wm_range_table_fpu(clk_mgr
);
155 void dcn32_init_clocks(struct clk_mgr
*clk_mgr_base
)
157 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
158 unsigned int num_levels
;
159 struct clk_limit_num_entries
*num_entries_per_clk
= &clk_mgr_base
->bw_params
->clk_table
.num_entries_per_clk
;
162 memset(&(clk_mgr_base
->clks
), 0, sizeof(struct dc_clocks
));
163 clk_mgr_base
->clks
.p_state_change_support
= true;
164 clk_mgr_base
->clks
.prev_p_state_change_support
= true;
165 clk_mgr_base
->clks
.fclk_prev_p_state_change_support
= true;
166 clk_mgr
->smu_present
= false;
167 clk_mgr
->dpm_present
= false;
169 if (!clk_mgr_base
->bw_params
)
172 if (!clk_mgr_base
->force_smu_not_present
&& dcn30_smu_get_smu_version(clk_mgr
, &clk_mgr
->smu_ver
))
173 clk_mgr
->smu_present
= true;
175 if (!clk_mgr
->smu_present
)
178 dcn30_smu_check_driver_if_version(clk_mgr
);
179 dcn30_smu_check_msg_header_version(clk_mgr
);
182 dcn32_init_single_clock(clk_mgr
, PPCLK_DCFCLK
,
183 &clk_mgr_base
->bw_params
->clk_table
.entries
[0].dcfclk_mhz
,
184 &num_entries_per_clk
->num_dcfclk_levels
);
185 clk_mgr_base
->bw_params
->dc_mode_limit
.dcfclk_mhz
= dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr
, PPCLK_DCFCLK
);
188 dcn32_init_single_clock(clk_mgr
, PPCLK_SOCCLK
,
189 &clk_mgr_base
->bw_params
->clk_table
.entries
[0].socclk_mhz
,
190 &num_entries_per_clk
->num_socclk_levels
);
191 clk_mgr_base
->bw_params
->dc_mode_limit
.socclk_mhz
= dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr
, PPCLK_SOCCLK
);
194 if (!clk_mgr
->base
.ctx
->dc
->debug
.disable_dtb_ref_clk_switch
) {
195 dcn32_init_single_clock(clk_mgr
, PPCLK_DTBCLK
,
196 &clk_mgr_base
->bw_params
->clk_table
.entries
[0].dtbclk_mhz
,
197 &num_entries_per_clk
->num_dtbclk_levels
);
198 clk_mgr_base
->bw_params
->dc_mode_limit
.dtbclk_mhz
=
199 dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr
, PPCLK_DTBCLK
);
203 dcn32_init_single_clock(clk_mgr
, PPCLK_DISPCLK
,
204 &clk_mgr_base
->bw_params
->clk_table
.entries
[0].dispclk_mhz
,
205 &num_entries_per_clk
->num_dispclk_levels
);
206 num_levels
= num_entries_per_clk
->num_dispclk_levels
;
207 clk_mgr_base
->bw_params
->dc_mode_limit
.dispclk_mhz
= dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr
, PPCLK_DISPCLK
);
208 //HW recommends limit of 1950 MHz in display clock for all DCN3.2.x
209 if (clk_mgr_base
->bw_params
->dc_mode_limit
.dispclk_mhz
> 1950)
210 clk_mgr_base
->bw_params
->dc_mode_limit
.dispclk_mhz
= 1950;
212 if (num_entries_per_clk
->num_dcfclk_levels
&&
213 num_entries_per_clk
->num_dtbclk_levels
&&
214 num_entries_per_clk
->num_dispclk_levels
)
215 clk_mgr
->dpm_present
= true;
217 if (clk_mgr_base
->ctx
->dc
->debug
.min_disp_clk_khz
) {
218 for (i
= 0; i
< num_levels
; i
++)
219 if (clk_mgr_base
->bw_params
->clk_table
.entries
[i
].dispclk_mhz
220 < khz_to_mhz_ceil(clk_mgr_base
->ctx
->dc
->debug
.min_disp_clk_khz
))
221 clk_mgr_base
->bw_params
->clk_table
.entries
[i
].dispclk_mhz
222 = khz_to_mhz_ceil(clk_mgr_base
->ctx
->dc
->debug
.min_disp_clk_khz
);
224 for (i
= 0; i
< num_levels
; i
++)
225 if (clk_mgr_base
->bw_params
->clk_table
.entries
[i
].dispclk_mhz
> 1950)
226 clk_mgr_base
->bw_params
->clk_table
.entries
[i
].dispclk_mhz
= 1950;
228 if (clk_mgr_base
->ctx
->dc
->debug
.min_dpp_clk_khz
) {
229 for (i
= 0; i
< num_levels
; i
++)
230 if (clk_mgr_base
->bw_params
->clk_table
.entries
[i
].dppclk_mhz
231 < khz_to_mhz_ceil(clk_mgr_base
->ctx
->dc
->debug
.min_dpp_clk_khz
))
232 clk_mgr_base
->bw_params
->clk_table
.entries
[i
].dppclk_mhz
233 = khz_to_mhz_ceil(clk_mgr_base
->ctx
->dc
->debug
.min_dpp_clk_khz
);
236 /* Get UCLK, update bounding box */
237 clk_mgr_base
->funcs
->get_memclk_states_from_smu(clk_mgr_base
);
241 dcn32_build_wm_range_table(clk_mgr
);
245 static void dcn32_update_clocks_update_dtb_dto(struct clk_mgr_internal
*clk_mgr
,
246 struct dc_state
*context
,
249 struct dccg
*dccg
= clk_mgr
->dccg
;
250 uint32_t tg_mask
= 0;
253 for (i
= 0; i
< clk_mgr
->base
.ctx
->dc
->res_pool
->pipe_count
; i
++) {
254 struct pipe_ctx
*pipe_ctx
= &context
->res_ctx
.pipe_ctx
[i
];
255 struct dtbclk_dto_params dto_params
= {0};
257 /* use mask to program DTO once per tg */
258 if (pipe_ctx
->stream_res
.tg
&&
259 !(tg_mask
& (1 << pipe_ctx
->stream_res
.tg
->inst
))) {
260 tg_mask
|= (1 << pipe_ctx
->stream_res
.tg
->inst
);
262 dto_params
.otg_inst
= pipe_ctx
->stream_res
.tg
->inst
;
263 dto_params
.ref_dtbclk_khz
= ref_dtbclk_khz
;
265 dccg
->funcs
->set_dtbclk_dto(clk_mgr
->dccg
, &dto_params
);
266 //dccg->funcs->set_audio_dtbclk_dto(clk_mgr->dccg, &dto_params);
271 /* Since DPPCLK request to PMFW needs to be exact (due to DPP DTO programming),
272 * update DPPCLK to be the exact frequency that will be set after the DPPCLK
273 * divider is updated. This will prevent rounding issues that could cause DPP
274 * refclk and DPP DTO to not match up.
276 static void dcn32_update_dppclk_dispclk_freq(struct clk_mgr_internal
*clk_mgr
, struct dc_clocks
*new_clocks
)
279 int disp_divider
= 0;
281 if (new_clocks
->dppclk_khz
) {
282 dpp_divider
= DENTIST_DIVIDER_RANGE_SCALE_FACTOR
283 * clk_mgr
->base
.dentist_vco_freq_khz
/ new_clocks
->dppclk_khz
;
284 new_clocks
->dppclk_khz
= (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
* clk_mgr
->base
.dentist_vco_freq_khz
) / dpp_divider
;
286 if (new_clocks
->dispclk_khz
> 0) {
287 disp_divider
= DENTIST_DIVIDER_RANGE_SCALE_FACTOR
288 * clk_mgr
->base
.dentist_vco_freq_khz
/ new_clocks
->dispclk_khz
;
289 new_clocks
->dispclk_khz
= (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
* clk_mgr
->base
.dentist_vco_freq_khz
) / disp_divider
;
293 void dcn32_update_clocks_update_dpp_dto(struct clk_mgr_internal
*clk_mgr
,
294 struct dc_state
*context
, bool safe_to_lower
)
298 clk_mgr
->dccg
->ref_dppclk
= clk_mgr
->base
.clks
.dppclk_khz
;
299 for (i
= 0; i
< clk_mgr
->base
.ctx
->dc
->res_pool
->pipe_count
; i
++) {
300 int dpp_inst
= 0, dppclk_khz
, prev_dppclk_khz
;
302 dppclk_khz
= context
->res_ctx
.pipe_ctx
[i
].plane_res
.bw
.dppclk_khz
;
304 if (context
->res_ctx
.pipe_ctx
[i
].plane_res
.dpp
)
305 dpp_inst
= context
->res_ctx
.pipe_ctx
[i
].plane_res
.dpp
->inst
;
306 else if (!context
->res_ctx
.pipe_ctx
[i
].plane_res
.dpp
&& dppclk_khz
== 0) {
307 /* dpp == NULL && dppclk_khz == 0 is valid because of pipe harvesting.
308 * In this case just continue in loop
311 } else if (!context
->res_ctx
.pipe_ctx
[i
].plane_res
.dpp
&& dppclk_khz
> 0) {
312 /* The software state is not valid if dpp resource is NULL and
319 prev_dppclk_khz
= clk_mgr
->dccg
->pipe_dppclk_khz
[i
];
321 if (safe_to_lower
|| prev_dppclk_khz
< dppclk_khz
)
322 clk_mgr
->dccg
->funcs
->update_dpp_dto(
323 clk_mgr
->dccg
, dpp_inst
, dppclk_khz
);
327 static void dcn32_update_clocks_update_dentist(
328 struct clk_mgr_internal
*clk_mgr
,
329 struct dc_state
*context
)
331 uint32_t new_disp_divider
= 0;
332 uint32_t new_dispclk_wdivider
= 0;
333 uint32_t old_dispclk_wdivider
= 0;
335 uint32_t dentist_dispclk_wdivider_readback
= 0;
336 struct dc
*dc
= clk_mgr
->base
.ctx
->dc
;
338 if (clk_mgr
->base
.clks
.dispclk_khz
== 0)
341 new_disp_divider
= DENTIST_DIVIDER_RANGE_SCALE_FACTOR
342 * clk_mgr
->base
.dentist_vco_freq_khz
/ clk_mgr
->base
.clks
.dispclk_khz
;
344 new_dispclk_wdivider
= dentist_get_did_from_divider(new_disp_divider
);
345 REG_GET(DENTIST_DISPCLK_CNTL
,
346 DENTIST_DISPCLK_WDIVIDER
, &old_dispclk_wdivider
);
348 /* When changing divider to or from 127, some extra programming is required to prevent corruption */
349 if (old_dispclk_wdivider
== 127 && new_dispclk_wdivider
!= 127) {
350 for (i
= 0; i
< clk_mgr
->base
.ctx
->dc
->res_pool
->pipe_count
; i
++) {
351 struct pipe_ctx
*pipe_ctx
= &context
->res_ctx
.pipe_ctx
[i
];
353 struct dccg
*dccg
= clk_mgr
->base
.ctx
->dc
->res_pool
->dccg
;
354 struct stream_encoder
*stream_enc
= pipe_ctx
->stream_res
.stream_enc
;
358 if (!pipe_ctx
->stream
)
360 /* Virtual encoders don't have this function */
361 if (!stream_enc
->funcs
->get_fifo_cal_average_level
)
363 fifo_level
= stream_enc
->funcs
->get_fifo_cal_average_level(
366 dccg
->funcs
->set_fifo_errdet_ovr_en(
369 for (j
= 0; j
< N
- 4; j
++)
370 dccg
->funcs
->otg_drop_pixel(
372 pipe_ctx
->stream_res
.tg
->inst
);
373 dccg
->funcs
->set_fifo_errdet_ovr_en(
377 } else if (new_dispclk_wdivider
== 127 && old_dispclk_wdivider
!= 127) {
378 /* request clock with 126 divider first */
379 uint32_t temp_disp_divider
= dentist_get_divider_from_did(126);
380 uint32_t temp_dispclk_khz
= (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
* clk_mgr
->base
.dentist_vco_freq_khz
) / temp_disp_divider
;
382 if (clk_mgr
->smu_present
)
383 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_DISPCLK
, khz_to_mhz_ceil(temp_dispclk_khz
));
385 if (dc
->debug
.override_dispclk_programming
) {
386 REG_GET(DENTIST_DISPCLK_CNTL
,
387 DENTIST_DISPCLK_WDIVIDER
, &dentist_dispclk_wdivider_readback
);
389 if (dentist_dispclk_wdivider_readback
!= 126) {
390 REG_UPDATE(DENTIST_DISPCLK_CNTL
,
391 DENTIST_DISPCLK_WDIVIDER
, 126);
392 REG_WAIT(DENTIST_DISPCLK_CNTL
, DENTIST_DISPCLK_CHG_DONE
, 1, 50, 2000);
396 for (i
= 0; i
< clk_mgr
->base
.ctx
->dc
->res_pool
->pipe_count
; i
++) {
397 struct pipe_ctx
*pipe_ctx
= &context
->res_ctx
.pipe_ctx
[i
];
398 struct dccg
*dccg
= clk_mgr
->base
.ctx
->dc
->res_pool
->dccg
;
399 struct stream_encoder
*stream_enc
= pipe_ctx
->stream_res
.stream_enc
;
404 if (!pipe_ctx
->stream
)
406 /* Virtual encoders don't have this function */
407 if (!stream_enc
->funcs
->get_fifo_cal_average_level
)
409 fifo_level
= stream_enc
->funcs
->get_fifo_cal_average_level(
412 dccg
->funcs
->set_fifo_errdet_ovr_en(dccg
, true);
413 for (j
= 0; j
< 12 - N
; j
++)
414 dccg
->funcs
->otg_add_pixel(dccg
,
415 pipe_ctx
->stream_res
.tg
->inst
);
416 dccg
->funcs
->set_fifo_errdet_ovr_en(dccg
, false);
420 /* do requested DISPCLK updates*/
421 if (clk_mgr
->smu_present
)
422 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_DISPCLK
, khz_to_mhz_ceil(clk_mgr
->base
.clks
.dispclk_khz
));
424 if (dc
->debug
.override_dispclk_programming
) {
425 REG_GET(DENTIST_DISPCLK_CNTL
,
426 DENTIST_DISPCLK_WDIVIDER
, &dentist_dispclk_wdivider_readback
);
428 if (dentist_dispclk_wdivider_readback
> new_dispclk_wdivider
) {
429 REG_UPDATE(DENTIST_DISPCLK_CNTL
,
430 DENTIST_DISPCLK_WDIVIDER
, new_dispclk_wdivider
);
431 REG_WAIT(DENTIST_DISPCLK_CNTL
, DENTIST_DISPCLK_CHG_DONE
, 1, 50, 2000);
437 static int dcn32_get_dispclk_from_dentist(struct clk_mgr
*clk_mgr_base
)
439 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
440 uint32_t dispclk_wdivider
;
443 REG_GET(DENTIST_DISPCLK_CNTL
, DENTIST_DISPCLK_WDIVIDER
, &dispclk_wdivider
);
444 disp_divider
= dentist_get_divider_from_did(dispclk_wdivider
);
446 /* Return DISPCLK freq in Khz */
448 return (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
* clk_mgr
->base
.dentist_vco_freq_khz
) / disp_divider
;
454 static void dcn32_update_clocks(struct clk_mgr
*clk_mgr_base
,
455 struct dc_state
*context
,
458 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
459 struct dc_clocks
*new_clocks
= &context
->bw_ctx
.bw
.dcn
.clk
;
460 struct dc
*dc
= clk_mgr_base
->ctx
->dc
;
462 bool update_dppclk
= false;
463 bool update_dispclk
= false;
464 bool enter_display_off
= false;
465 bool dpp_clock_lowered
= false;
466 struct dmcu
*dmcu
= clk_mgr_base
->ctx
->dc
->res_pool
->dmcu
;
467 bool force_reset
= false;
468 bool update_uclk
= false, update_fclk
= false;
469 bool p_state_change_support
;
470 bool fclk_p_state_change_support
;
472 if (clk_mgr_base
->clks
.dispclk_khz
== 0 ||
473 (dc
->debug
.force_clock_mode
& 0x1)) {
474 /* This is from resume or boot up, if forced_clock cfg option used,
475 * we bypass program dispclk and DPPCLK, but need set them for S3.
479 dcn2_read_clocks_from_hw_dentist(clk_mgr_base
);
481 /* Force_clock_mode 0x1: force reset the clock even it is the same clock
482 * as long as it is in Passive level.
485 display_count
= clk_mgr_helper_get_active_display_cnt(dc
, context
);
487 if (display_count
== 0)
488 enter_display_off
= true;
490 if (clk_mgr
->smu_present
) {
491 if (enter_display_off
== safe_to_lower
)
492 dcn30_smu_set_num_of_displays(clk_mgr
, display_count
);
494 clk_mgr_base
->clks
.fclk_prev_p_state_change_support
= clk_mgr_base
->clks
.fclk_p_state_change_support
;
496 fclk_p_state_change_support
= new_clocks
->fclk_p_state_change_support
;
498 if (should_update_pstate_support(safe_to_lower
, fclk_p_state_change_support
, clk_mgr_base
->clks
.fclk_p_state_change_support
) &&
499 !dc
->work_arounds
.clock_update_disable_mask
.fclk
) {
500 clk_mgr_base
->clks
.fclk_p_state_change_support
= fclk_p_state_change_support
;
502 /* To enable FCLK P-state switching, send FCLK_PSTATE_SUPPORTED message to PMFW */
503 if (clk_mgr_base
->ctx
->dce_version
!= DCN_VERSION_3_21
&& clk_mgr_base
->clks
.fclk_p_state_change_support
) {
504 /* Handle the code for sending a message to PMFW that FCLK P-state change is supported */
505 dcn32_smu_send_fclk_pstate_message(clk_mgr
, FCLK_PSTATE_SUPPORTED
);
509 if (dc
->debug
.force_min_dcfclk_mhz
> 0)
510 new_clocks
->dcfclk_khz
= (new_clocks
->dcfclk_khz
> (dc
->debug
.force_min_dcfclk_mhz
* 1000)) ?
511 new_clocks
->dcfclk_khz
: (dc
->debug
.force_min_dcfclk_mhz
* 1000);
513 if (should_set_clock(safe_to_lower
, new_clocks
->dcfclk_khz
, clk_mgr_base
->clks
.dcfclk_khz
) &&
514 !dc
->work_arounds
.clock_update_disable_mask
.dcfclk
) {
515 clk_mgr_base
->clks
.dcfclk_khz
= new_clocks
->dcfclk_khz
;
516 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_DCFCLK
, khz_to_mhz_ceil(clk_mgr_base
->clks
.dcfclk_khz
));
519 if (should_set_clock(safe_to_lower
, new_clocks
->dcfclk_deep_sleep_khz
, clk_mgr_base
->clks
.dcfclk_deep_sleep_khz
) &&
520 !dc
->work_arounds
.clock_update_disable_mask
.dcfclk_ds
) {
521 clk_mgr_base
->clks
.dcfclk_deep_sleep_khz
= new_clocks
->dcfclk_deep_sleep_khz
;
522 dcn30_smu_set_min_deep_sleep_dcef_clk(clk_mgr
, khz_to_mhz_ceil(clk_mgr_base
->clks
.dcfclk_deep_sleep_khz
));
525 if (should_set_clock(safe_to_lower
, new_clocks
->socclk_khz
, clk_mgr_base
->clks
.socclk_khz
))
526 /* We don't actually care about socclk, don't notify SMU of hard min */
527 clk_mgr_base
->clks
.socclk_khz
= new_clocks
->socclk_khz
;
529 clk_mgr_base
->clks
.prev_p_state_change_support
= clk_mgr_base
->clks
.p_state_change_support
;
530 clk_mgr_base
->clks
.prev_num_ways
= clk_mgr_base
->clks
.num_ways
;
532 if (clk_mgr_base
->clks
.num_ways
!= new_clocks
->num_ways
&&
533 clk_mgr_base
->clks
.num_ways
< new_clocks
->num_ways
) {
534 clk_mgr_base
->clks
.num_ways
= new_clocks
->num_ways
;
535 dcn32_smu_send_cab_for_uclk_message(clk_mgr
, clk_mgr_base
->clks
.num_ways
);
538 p_state_change_support
= new_clocks
->p_state_change_support
;
539 if (should_update_pstate_support(safe_to_lower
, p_state_change_support
, clk_mgr_base
->clks
.p_state_change_support
) &&
540 !dc
->work_arounds
.clock_update_disable_mask
.uclk
) {
541 clk_mgr_base
->clks
.p_state_change_support
= p_state_change_support
;
543 /* to disable P-State switching, set UCLK min = max */
544 if (!clk_mgr_base
->clks
.p_state_change_support
) {
545 if (dc
->clk_mgr
->dc_mode_softmax_enabled
) {
546 /* On DCN32x we will never have the functional UCLK min above the softmax
547 * since we calculate mode support based on softmax being the max UCLK
550 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_UCLK
,
551 dc
->clk_mgr
->bw_params
->dc_mode_softmax_memclk
);
553 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_UCLK
, dc
->clk_mgr
->bw_params
->max_memclk_mhz
);
558 if (context
->bw_ctx
.bw
.dcn
.clk
.fw_based_mclk_switching
)
559 dcn32_smu_wait_for_dmub_ack_mclk(clk_mgr
, true);
561 dcn32_smu_wait_for_dmub_ack_mclk(clk_mgr
, false);
563 /* Always update saved value, even if new value not set due to P-State switching unsupported. Also check safe_to_lower for FCLK */
564 if (safe_to_lower
&& (clk_mgr_base
->clks
.fclk_p_state_change_support
!= clk_mgr_base
->clks
.fclk_prev_p_state_change_support
)) {
568 if (clk_mgr_base
->ctx
->dce_version
!= DCN_VERSION_3_21
&& !clk_mgr_base
->clks
.fclk_p_state_change_support
&& update_fclk
&&
569 !dc
->work_arounds
.clock_update_disable_mask
.fclk
) {
570 /* Handle code for sending a message to PMFW that FCLK P-state change is not supported */
571 dcn32_smu_send_fclk_pstate_message(clk_mgr
, FCLK_PSTATE_NOTSUPPORTED
);
574 /* Always update saved value, even if new value not set due to P-State switching unsupported */
575 if (should_set_clock(safe_to_lower
, new_clocks
->dramclk_khz
, clk_mgr_base
->clks
.dramclk_khz
) &&
576 !dc
->work_arounds
.clock_update_disable_mask
.uclk
) {
577 clk_mgr_base
->clks
.dramclk_khz
= new_clocks
->dramclk_khz
;
581 /* set UCLK to requested value if P-State switching is supported, or to re-enable P-State switching */
582 if (clk_mgr_base
->clks
.p_state_change_support
&&
583 (update_uclk
|| !clk_mgr_base
->clks
.prev_p_state_change_support
) &&
584 !dc
->work_arounds
.clock_update_disable_mask
.uclk
)
585 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_UCLK
, khz_to_mhz_ceil(clk_mgr_base
->clks
.dramclk_khz
));
587 if (clk_mgr_base
->clks
.num_ways
!= new_clocks
->num_ways
&&
588 clk_mgr_base
->clks
.num_ways
> new_clocks
->num_ways
) {
589 clk_mgr_base
->clks
.num_ways
= new_clocks
->num_ways
;
590 dcn32_smu_send_cab_for_uclk_message(clk_mgr
, clk_mgr_base
->clks
.num_ways
);
594 dcn32_update_dppclk_dispclk_freq(clk_mgr
, new_clocks
);
595 if (should_set_clock(safe_to_lower
, new_clocks
->dppclk_khz
, clk_mgr_base
->clks
.dppclk_khz
)) {
596 if (clk_mgr_base
->clks
.dppclk_khz
> new_clocks
->dppclk_khz
)
597 dpp_clock_lowered
= true;
599 clk_mgr_base
->clks
.dppclk_khz
= new_clocks
->dppclk_khz
;
601 if (clk_mgr
->smu_present
&& !dpp_clock_lowered
)
602 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_DPPCLK
, khz_to_mhz_ceil(clk_mgr_base
->clks
.dppclk_khz
));
604 update_dppclk
= true;
607 if (should_set_clock(safe_to_lower
, new_clocks
->dispclk_khz
, clk_mgr_base
->clks
.dispclk_khz
)) {
608 clk_mgr_base
->clks
.dispclk_khz
= new_clocks
->dispclk_khz
;
610 update_dispclk
= true;
613 if (!new_clocks
->dtbclk_en
) {
614 new_clocks
->ref_dtbclk_khz
= clk_mgr_base
->bw_params
->clk_table
.entries
[0].dtbclk_mhz
* 1000;
617 /* clock limits are received with MHz precision, divide by 1000 to prevent setting clocks at every call */
618 if (!dc
->debug
.disable_dtb_ref_clk_switch
&&
619 should_set_clock(safe_to_lower
, new_clocks
->ref_dtbclk_khz
/ 1000, clk_mgr_base
->clks
.ref_dtbclk_khz
/ 1000)) {
620 /* DCCG requires KHz precision for DTBCLK */
621 clk_mgr_base
->clks
.ref_dtbclk_khz
=
622 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_DTBCLK
, khz_to_mhz_ceil(new_clocks
->ref_dtbclk_khz
));
623 dcn32_update_clocks_update_dtb_dto(clk_mgr
, context
, clk_mgr_base
->clks
.ref_dtbclk_khz
);
626 if (dc
->config
.forced_clocks
== false || (force_reset
&& safe_to_lower
)) {
627 if (dpp_clock_lowered
) {
628 /* if clock is being lowered, increase DTO before lowering refclk */
629 dcn32_update_clocks_update_dpp_dto(clk_mgr
, context
, safe_to_lower
);
630 dcn32_update_clocks_update_dentist(clk_mgr
, context
);
631 if (clk_mgr
->smu_present
)
632 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_DPPCLK
, khz_to_mhz_ceil(clk_mgr_base
->clks
.dppclk_khz
));
634 /* if clock is being raised, increase refclk before lowering DTO */
635 if (update_dppclk
|| update_dispclk
)
636 dcn32_update_clocks_update_dentist(clk_mgr
, context
);
637 /* There is a check inside dcn20_update_clocks_update_dpp_dto which ensures
638 * that we do not lower dto when it is not safe to lower. We do not need to
639 * compare the current and new dppclk before calling this function.
641 dcn32_update_clocks_update_dpp_dto(clk_mgr
, context
, safe_to_lower
);
645 if (update_dispclk
&& dmcu
&& dmcu
->funcs
->is_dmcu_initialized(dmcu
))
646 /*update dmcu for wait_loop count*/
647 dmcu
->funcs
->set_psr_wait_loop(dmcu
,
648 clk_mgr_base
->clks
.dispclk_khz
/ 1000 / 7);
651 static uint32_t dcn32_get_vco_frequency_from_reg(struct clk_mgr_internal
*clk_mgr
)
653 struct fixed31_32 pll_req
;
654 uint32_t pll_req_reg
= 0;
656 /* get FbMult value */
657 if (ASICREV_IS_GC_11_0_2(clk_mgr
->base
.ctx
->asic_id
.hw_internal_rev
))
658 pll_req_reg
= REG_READ(CLK0_CLK_PLL_REQ
);
660 pll_req_reg
= REG_READ(CLK1_CLK_PLL_REQ
);
662 /* set up a fixed-point number
663 * this works because the int part is on the right edge of the register
664 * and the frac part is on the left edge
666 pll_req
= dc_fixpt_from_int(pll_req_reg
& clk_mgr
->clk_mgr_mask
->FbMult_int
);
667 pll_req
.value
|= pll_req_reg
& clk_mgr
->clk_mgr_mask
->FbMult_frac
;
669 /* multiply by REFCLK period */
670 pll_req
= dc_fixpt_mul_int(pll_req
, clk_mgr
->dfs_ref_freq_khz
);
672 return dc_fixpt_floor(pll_req
);
675 static void dcn32_dump_clk_registers(struct clk_state_registers_and_bypass
*regs_and_bypass
,
676 struct clk_mgr
*clk_mgr_base
, struct clk_log_info
*log_info
)
678 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
679 uint32_t dprefclk_did
= 0;
680 uint32_t dcfclk_did
= 0;
681 uint32_t dtbclk_did
= 0;
682 uint32_t dispclk_did
= 0;
683 uint32_t dppclk_did
= 0;
684 uint32_t target_div
= 0;
686 if (ASICREV_IS_GC_11_0_2(clk_mgr
->base
.ctx
->asic_id
.hw_internal_rev
)) {
687 /* DFS Slice 0 is used for DISPCLK */
688 dispclk_did
= REG_READ(CLK0_CLK0_DFS_CNTL
);
689 /* DFS Slice 1 is used for DPPCLK */
690 dppclk_did
= REG_READ(CLK0_CLK1_DFS_CNTL
);
691 /* DFS Slice 2 is used for DPREFCLK */
692 dprefclk_did
= REG_READ(CLK0_CLK2_DFS_CNTL
);
693 /* DFS Slice 3 is used for DCFCLK */
694 dcfclk_did
= REG_READ(CLK0_CLK3_DFS_CNTL
);
695 /* DFS Slice 4 is used for DTBCLK */
696 dtbclk_did
= REG_READ(CLK0_CLK4_DFS_CNTL
);
698 /* DFS Slice 0 is used for DISPCLK */
699 dispclk_did
= REG_READ(CLK1_CLK0_DFS_CNTL
);
700 /* DFS Slice 1 is used for DPPCLK */
701 dppclk_did
= REG_READ(CLK1_CLK1_DFS_CNTL
);
702 /* DFS Slice 2 is used for DPREFCLK */
703 dprefclk_did
= REG_READ(CLK1_CLK2_DFS_CNTL
);
704 /* DFS Slice 3 is used for DCFCLK */
705 dcfclk_did
= REG_READ(CLK1_CLK3_DFS_CNTL
);
706 /* DFS Slice 4 is used for DTBCLK */
707 dtbclk_did
= REG_READ(CLK1_CLK4_DFS_CNTL
);
710 /* Convert DISPCLK DFS Slice DID to divider*/
711 target_div
= dentist_get_divider_from_did(dispclk_did
);
713 regs_and_bypass
->dispclk
= (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
714 * clk_mgr
->base
.dentist_vco_freq_khz
) / target_div
;
716 /* Convert DISPCLK DFS Slice DID to divider*/
717 target_div
= dentist_get_divider_from_did(dppclk_did
);
719 regs_and_bypass
->dppclk
= (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
720 * clk_mgr
->base
.dentist_vco_freq_khz
) / target_div
;
722 /* Convert DPREFCLK DFS Slice DID to divider*/
723 target_div
= dentist_get_divider_from_did(dprefclk_did
);
724 //Get dprefclk in khz
725 regs_and_bypass
->dprefclk
= (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
726 * clk_mgr
->base
.dentist_vco_freq_khz
) / target_div
;
728 /* Convert DCFCLK DFS Slice DID to divider*/
729 target_div
= dentist_get_divider_from_did(dcfclk_did
);
731 regs_and_bypass
->dcfclk
= (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
732 * clk_mgr
->base
.dentist_vco_freq_khz
) / target_div
;
734 /* Convert DTBCLK DFS Slice DID to divider*/
735 target_div
= dentist_get_divider_from_did(dtbclk_did
);
737 regs_and_bypass
->dtbclk
= (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
738 * clk_mgr
->base
.dentist_vco_freq_khz
) / target_div
;
741 static void dcn32_clock_read_ss_info(struct clk_mgr_internal
*clk_mgr
)
743 struct dc_bios
*bp
= clk_mgr
->base
.ctx
->dc_bios
;
744 int ss_info_num
= bp
->funcs
->get_ss_entry_number(
745 bp
, AS_SIGNAL_TYPE_GPU_PLL
);
748 struct spread_spectrum_info info
= { { 0 } };
749 enum bp_result result
= bp
->funcs
->get_spread_spectrum_info(
750 bp
, AS_SIGNAL_TYPE_GPU_PLL
, 0, &info
);
752 /* SSInfo.spreadSpectrumPercentage !=0 would be sign
755 if (result
== BP_RESULT_OK
&&
756 info
.spread_spectrum_percentage
!= 0) {
757 clk_mgr
->ss_on_dprefclk
= true;
758 clk_mgr
->dprefclk_ss_divider
= info
.spread_percentage_divider
;
760 if (info
.type
.CENTER_MODE
== 0) {
761 /* Currently for DP Reference clock we
762 * need only SS percentage for
765 clk_mgr
->dprefclk_ss_percentage
=
766 info
.spread_spectrum_percentage
;
771 static void dcn32_notify_wm_ranges(struct clk_mgr
*clk_mgr_base
)
774 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
775 WatermarksExternal_t
*table
= (WatermarksExternal_t
*) clk_mgr
->wm_range_table
;
777 if (!clk_mgr
->smu_present
)
783 memset(table
, 0, sizeof(*table
));
785 /* collect valid ranges, place in pmfw table */
786 for (i
= 0; i
< WM_SET_COUNT
; i
++)
787 if (clk_mgr
->base
.bw_params
->wm_table
.nv_entries
[i
].valid
) {
788 table
->Watermarks
.WatermarkRow
[i
].WmSetting
= i
;
789 table
->Watermarks
.WatermarkRow
[i
].Flags
= clk_mgr
->base
.bw_params
->wm_table
.nv_entries
[i
].pmfw_breakdown
.wm_type
;
791 dcn30_smu_set_dram_addr_high(clk_mgr
, clk_mgr
->wm_range_table_addr
>> 32);
792 dcn30_smu_set_dram_addr_low(clk_mgr
, clk_mgr
->wm_range_table_addr
& 0xFFFFFFFF);
793 dcn32_smu_transfer_wm_table_dram_2_smu(clk_mgr
);
796 /* Set min memclk to minimum, either constrained by the current mode or DPM0 */
797 static void dcn32_set_hard_min_memclk(struct clk_mgr
*clk_mgr_base
, bool current_mode
)
799 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
801 if (!clk_mgr
->smu_present
)
805 if (clk_mgr_base
->clks
.p_state_change_support
)
806 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_UCLK
,
807 khz_to_mhz_ceil(clk_mgr_base
->clks
.dramclk_khz
));
809 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_UCLK
,
810 clk_mgr_base
->bw_params
->max_memclk_mhz
);
812 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_UCLK
,
813 clk_mgr_base
->bw_params
->clk_table
.entries
[0].memclk_mhz
);
817 /* Set max memclk to highest DPM value */
818 static void dcn32_set_hard_max_memclk(struct clk_mgr
*clk_mgr_base
)
820 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
822 if (!clk_mgr
->smu_present
)
825 dcn30_smu_set_hard_max_by_freq(clk_mgr
, PPCLK_UCLK
, clk_mgr_base
->bw_params
->max_memclk_mhz
);
828 /* Get current memclk states, update bounding box */
829 static void dcn32_get_memclk_states_from_smu(struct clk_mgr
*clk_mgr_base
)
831 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
832 struct clk_limit_num_entries
*num_entries_per_clk
= &clk_mgr_base
->bw_params
->clk_table
.num_entries_per_clk
;
833 unsigned int num_levels
;
835 if (!clk_mgr
->smu_present
)
838 /* Refresh memclk and fclk states */
839 dcn32_init_single_clock(clk_mgr
, PPCLK_UCLK
,
840 &clk_mgr_base
->bw_params
->clk_table
.entries
[0].memclk_mhz
,
841 &num_entries_per_clk
->num_memclk_levels
);
842 clk_mgr_base
->bw_params
->dc_mode_limit
.memclk_mhz
= dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr
, PPCLK_UCLK
);
843 clk_mgr_base
->bw_params
->dc_mode_softmax_memclk
= clk_mgr_base
->bw_params
->dc_mode_limit
.memclk_mhz
;
845 /* memclk must have at least one level */
846 num_entries_per_clk
->num_memclk_levels
= num_entries_per_clk
->num_memclk_levels
? num_entries_per_clk
->num_memclk_levels
: 1;
848 dcn32_init_single_clock(clk_mgr
, PPCLK_FCLK
,
849 &clk_mgr_base
->bw_params
->clk_table
.entries
[0].fclk_mhz
,
850 &num_entries_per_clk
->num_fclk_levels
);
851 clk_mgr_base
->bw_params
->dc_mode_limit
.fclk_mhz
= dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr
, PPCLK_FCLK
);
853 if (num_entries_per_clk
->num_memclk_levels
>= num_entries_per_clk
->num_fclk_levels
) {
854 num_levels
= num_entries_per_clk
->num_memclk_levels
;
856 num_levels
= num_entries_per_clk
->num_fclk_levels
;
858 clk_mgr_base
->bw_params
->max_memclk_mhz
=
859 clk_mgr_base
->bw_params
->clk_table
.entries
[num_entries_per_clk
->num_memclk_levels
- 1].memclk_mhz
;
860 clk_mgr_base
->bw_params
->clk_table
.num_entries
= num_levels
? num_levels
: 1;
862 if (clk_mgr
->dpm_present
&& !num_levels
)
863 clk_mgr
->dpm_present
= false;
865 if (!clk_mgr
->dpm_present
)
866 dcn32_patch_dpm_table(clk_mgr_base
->bw_params
);
869 /* Refresh bounding box */
870 clk_mgr_base
->ctx
->dc
->res_pool
->funcs
->update_bw_bounding_box(
871 clk_mgr
->base
.ctx
->dc
, clk_mgr_base
->bw_params
);
875 static bool dcn32_are_clock_states_equal(struct dc_clocks
*a
,
878 if (a
->dispclk_khz
!= b
->dispclk_khz
)
880 else if (a
->dppclk_khz
!= b
->dppclk_khz
)
882 else if (a
->dcfclk_khz
!= b
->dcfclk_khz
)
884 else if (a
->dcfclk_deep_sleep_khz
!= b
->dcfclk_deep_sleep_khz
)
886 else if (a
->dramclk_khz
!= b
->dramclk_khz
)
888 else if (a
->p_state_change_support
!= b
->p_state_change_support
)
890 else if (a
->fclk_p_state_change_support
!= b
->fclk_p_state_change_support
)
896 static void dcn32_enable_pme_wa(struct clk_mgr
*clk_mgr_base
)
898 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
900 if (!clk_mgr
->smu_present
)
903 dcn32_smu_set_pme_workaround(clk_mgr
);
906 static bool dcn32_is_smu_present(struct clk_mgr
*clk_mgr_base
)
908 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
909 return clk_mgr
->smu_present
;
912 static void dcn32_set_max_memclk(struct clk_mgr
*clk_mgr_base
, unsigned int memclk_mhz
)
914 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
916 if (!clk_mgr
->smu_present
)
919 dcn30_smu_set_hard_max_by_freq(clk_mgr
, PPCLK_UCLK
, memclk_mhz
);
922 static void dcn32_set_min_memclk(struct clk_mgr
*clk_mgr_base
, unsigned int memclk_mhz
)
924 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
926 if (!clk_mgr
->smu_present
)
929 dcn32_smu_set_hard_min_by_freq(clk_mgr
, PPCLK_UCLK
, memclk_mhz
);
932 static struct clk_mgr_funcs dcn32_funcs
= {
933 .get_dp_ref_clk_frequency
= dce12_get_dp_ref_freq_khz
,
934 .get_dtb_ref_clk_frequency
= dcn31_get_dtb_ref_freq_khz
,
935 .update_clocks
= dcn32_update_clocks
,
936 .dump_clk_registers
= dcn32_dump_clk_registers
,
937 .init_clocks
= dcn32_init_clocks
,
938 .notify_wm_ranges
= dcn32_notify_wm_ranges
,
939 .set_hard_min_memclk
= dcn32_set_hard_min_memclk
,
940 .set_hard_max_memclk
= dcn32_set_hard_max_memclk
,
941 .set_max_memclk
= dcn32_set_max_memclk
,
942 .set_min_memclk
= dcn32_set_min_memclk
,
943 .get_memclk_states_from_smu
= dcn32_get_memclk_states_from_smu
,
944 .are_clock_states_equal
= dcn32_are_clock_states_equal
,
945 .enable_pme_wa
= dcn32_enable_pme_wa
,
946 .is_smu_present
= dcn32_is_smu_present
,
947 .get_dispclk_from_dentist
= dcn32_get_dispclk_from_dentist
,
950 void dcn32_clk_mgr_construct(
951 struct dc_context
*ctx
,
952 struct clk_mgr_internal
*clk_mgr
,
953 struct pp_smu_funcs
*pp_smu
,
956 struct clk_log_info log_info
= {0};
958 clk_mgr
->base
.ctx
= ctx
;
959 clk_mgr
->base
.funcs
= &dcn32_funcs
;
960 if (ASICREV_IS_GC_11_0_2(clk_mgr
->base
.ctx
->asic_id
.hw_internal_rev
)) {
961 clk_mgr
->regs
= &clk_mgr_regs_dcn321
;
962 clk_mgr
->clk_mgr_shift
= &clk_mgr_shift_dcn321
;
963 clk_mgr
->clk_mgr_mask
= &clk_mgr_mask_dcn321
;
965 clk_mgr
->regs
= &clk_mgr_regs_dcn32
;
966 clk_mgr
->clk_mgr_shift
= &clk_mgr_shift_dcn32
;
967 clk_mgr
->clk_mgr_mask
= &clk_mgr_mask_dcn32
;
970 clk_mgr
->dccg
= dccg
;
971 clk_mgr
->dfs_bypass_disp_clk
= 0;
973 clk_mgr
->dprefclk_ss_percentage
= 0;
974 clk_mgr
->dprefclk_ss_divider
= 1000;
975 clk_mgr
->ss_on_dprefclk
= false;
976 clk_mgr
->dfs_ref_freq_khz
= 100000;
978 /* Changed from DCN3.2_clock_frequency doc to match
979 * dcn32_dump_clk_registers from 4 * dentist_vco_freq_khz /
980 * dprefclk DID divider
982 clk_mgr
->base
.dprefclk_khz
= 716666;
983 if (ctx
->dc
->debug
.disable_dtb_ref_clk_switch
) {
984 //initialize DTB ref clock value if DPM disabled
985 if (ctx
->dce_version
== DCN_VERSION_3_21
)
986 clk_mgr
->base
.clks
.ref_dtbclk_khz
= 477800;
988 clk_mgr
->base
.clks
.ref_dtbclk_khz
= 268750;
992 /* integer part is now VCO frequency in kHz */
993 clk_mgr
->base
.dentist_vco_freq_khz
= dcn32_get_vco_frequency_from_reg(clk_mgr
);
995 /* in case we don't get a value from the register, use default */
996 if (clk_mgr
->base
.dentist_vco_freq_khz
== 0)
997 clk_mgr
->base
.dentist_vco_freq_khz
= 4300000; /* Updated as per HW docs */
999 dcn32_dump_clk_registers(&clk_mgr
->base
.boot_snapshot
, &clk_mgr
->base
, &log_info
);
1001 if (ctx
->dc
->debug
.disable_dtb_ref_clk_switch
&&
1002 clk_mgr
->base
.clks
.ref_dtbclk_khz
!= clk_mgr
->base
.boot_snapshot
.dtbclk
) {
1003 clk_mgr
->base
.clks
.ref_dtbclk_khz
= clk_mgr
->base
.boot_snapshot
.dtbclk
;
1006 if (clk_mgr
->base
.boot_snapshot
.dprefclk
!= 0) {
1007 clk_mgr
->base
.dprefclk_khz
= clk_mgr
->base
.boot_snapshot
.dprefclk
;
1009 dcn32_clock_read_ss_info(clk_mgr
);
1011 clk_mgr
->dfs_bypass_enabled
= false;
1013 clk_mgr
->smu_present
= false;
1015 clk_mgr
->base
.bw_params
= kzalloc(sizeof(*clk_mgr
->base
.bw_params
), GFP_KERNEL
);
1017 /* need physical address of table to give to PMFW */
1018 clk_mgr
->wm_range_table
= dm_helpers_allocate_gpu_mem(clk_mgr
->base
.ctx
,
1019 DC_MEM_ALLOC_TYPE_GART
, sizeof(WatermarksExternal_t
),
1020 &clk_mgr
->wm_range_table_addr
);
1023 void dcn32_clk_mgr_destroy(struct clk_mgr_internal
*clk_mgr
)
1025 kfree(clk_mgr
->base
.bw_params
);
1027 if (clk_mgr
->wm_range_table
)
1028 dm_helpers_free_gpu_mem(clk_mgr
->base
.ctx
, DC_MEM_ALLOC_TYPE_GART
,
1029 clk_mgr
->wm_range_table
);