2 * Copyright 2021 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
29 #include "clk_mgr_internal.h"
31 // For dce12_get_dp_ref_freq_khz
32 #include "dce100/dce_clk_mgr.h"
33 // For dcn20_update_clocks_update_dpp_dto
34 #include "dcn20/dcn20_clk_mgr.h"
35 #include "dcn31/dcn31_clk_mgr.h"
36 #include "dcn315_clk_mgr.h"
38 #include "core_types.h"
39 #include "dcn315_smu.h"
40 #include "dm_helpers.h"
42 #include "dc_dmub_srv.h"
44 #include "logger_types.h"
47 clk_mgr->base.base.ctx->logger
51 #define TO_CLK_MGR_DCN315(clk_mgr)\
52 container_of(clk_mgr, struct clk_mgr_dcn315, base)
54 #define UNSUPPORTED_DCFCLK 10000000
55 #define MIN_DPP_DISP_CLK 100000
57 static int dcn315_get_active_display_cnt_wa(
59 struct dc_state
*context
)
62 bool tmds_present
= false;
65 for (i
= 0; i
< context
->stream_count
; i
++) {
66 const struct dc_stream_state
*stream
= context
->streams
[i
];
68 if (stream
->signal
== SIGNAL_TYPE_HDMI_TYPE_A
||
69 stream
->signal
== SIGNAL_TYPE_DVI_SINGLE_LINK
||
70 stream
->signal
== SIGNAL_TYPE_DVI_DUAL_LINK
)
74 for (i
= 0; i
< dc
->link_count
; i
++) {
75 const struct dc_link
*link
= dc
->links
[i
];
77 /* abusing the fact that the dig and phy are coupled to see if the phy is enabled */
78 if (link
->link_enc
&& link
->link_enc
->funcs
->is_dig_enabled
&&
79 link
->link_enc
->funcs
->is_dig_enabled(link
->link_enc
))
83 /* WA for hang on HDMI after display off back back on*/
84 if (display_count
== 0 && tmds_present
)
90 static bool should_disable_otg(struct pipe_ctx
*pipe
)
94 if (pipe
->stream
->link
->link_enc
&& pipe
->stream
->link
->link_enc
->funcs
->is_dig_enabled
&&
95 pipe
->stream
->link
->link_enc
->funcs
->is_dig_enabled(pipe
->stream
->link
->link_enc
))
100 static void dcn315_disable_otg_wa(struct clk_mgr
*clk_mgr_base
, struct dc_state
*context
, bool disable
)
102 struct dc
*dc
= clk_mgr_base
->ctx
->dc
;
105 for (i
= 0; i
< dc
->res_pool
->pipe_count
; ++i
) {
106 struct pipe_ctx
*pipe
= &dc
->current_state
->res_ctx
.pipe_ctx
[i
];
108 if (pipe
->top_pipe
|| pipe
->prev_odm_pipe
)
110 if (pipe
->stream
&& (pipe
->stream
->dpms_off
|| pipe
->plane_state
== NULL
||
111 dc_is_virtual_signal(pipe
->stream
->signal
))) {
113 /* This w/a should not trigger when we have a dig active */
114 if (should_disable_otg(pipe
)) {
116 pipe
->stream_res
.tg
->funcs
->immediate_disable_crtc(pipe
->stream_res
.tg
);
117 reset_sync_context_for_pipe(dc
, context
, i
);
119 pipe
->stream_res
.tg
->funcs
->enable_crtc(pipe
->stream_res
.tg
);
125 static void dcn315_update_clocks(struct clk_mgr
*clk_mgr_base
,
126 struct dc_state
*context
,
129 union dmub_rb_cmd cmd
;
130 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
131 struct dc_clocks
*new_clocks
= &context
->bw_ctx
.bw
.dcn
.clk
;
132 struct dc
*dc
= clk_mgr_base
->ctx
->dc
;
134 bool update_dppclk
= false;
135 bool update_dispclk
= false;
136 bool dpp_clock_lowered
= false;
138 if (dc
->work_arounds
.skip_clock_update
)
141 clk_mgr_base
->clks
.zstate_support
= new_clocks
->zstate_support
;
143 * if it is safe to lower, but we are already in the lower state, we don't have to do anything
144 * also if safe to lower is false, we just go in the higher state
146 clk_mgr_base
->clks
.zstate_support
= new_clocks
->zstate_support
;
148 /* check that we're not already in lower */
149 if (clk_mgr_base
->clks
.pwr_state
!= DCN_PWR_STATE_LOW_POWER
) {
150 display_count
= dcn315_get_active_display_cnt_wa(dc
, context
);
151 /* if we can go lower, go lower */
152 if (display_count
== 0) {
153 union display_idle_optimization_u idle_info
= { 0 };
154 idle_info
.idle_info
.df_request_disabled
= 1;
155 idle_info
.idle_info
.phy_ref_clk_off
= 1;
156 idle_info
.idle_info
.s0i2_rdy
= 1;
157 dcn315_smu_set_display_idle_optimization(clk_mgr
, idle_info
.data
);
158 /* update power state */
159 clk_mgr_base
->clks
.pwr_state
= DCN_PWR_STATE_LOW_POWER
;
163 /* check that we're not already in D0 */
164 if (clk_mgr_base
->clks
.pwr_state
!= DCN_PWR_STATE_MISSION_MODE
) {
165 union display_idle_optimization_u idle_info
= { 0 };
166 dcn315_smu_set_display_idle_optimization(clk_mgr
, idle_info
.data
);
167 /* update power state */
168 clk_mgr_base
->clks
.pwr_state
= DCN_PWR_STATE_MISSION_MODE
;
172 /* Lock pstate by requesting unsupported dcfclk if change is unsupported */
173 if (!new_clocks
->p_state_change_support
)
174 new_clocks
->dcfclk_khz
= UNSUPPORTED_DCFCLK
;
175 if (should_set_clock(safe_to_lower
, new_clocks
->dcfclk_khz
, clk_mgr_base
->clks
.dcfclk_khz
)) {
176 clk_mgr_base
->clks
.dcfclk_khz
= new_clocks
->dcfclk_khz
;
177 dcn315_smu_set_hard_min_dcfclk(clk_mgr
, clk_mgr_base
->clks
.dcfclk_khz
);
180 if (should_set_clock(safe_to_lower
,
181 new_clocks
->dcfclk_deep_sleep_khz
, clk_mgr_base
->clks
.dcfclk_deep_sleep_khz
)) {
182 clk_mgr_base
->clks
.dcfclk_deep_sleep_khz
= new_clocks
->dcfclk_deep_sleep_khz
;
183 dcn315_smu_set_min_deep_sleep_dcfclk(clk_mgr
, clk_mgr_base
->clks
.dcfclk_deep_sleep_khz
);
186 // workaround: Limit dppclk to 100Mhz to avoid lower eDP panel switch to plus 4K monitor underflow.
187 if (new_clocks
->dppclk_khz
< MIN_DPP_DISP_CLK
)
188 new_clocks
->dppclk_khz
= MIN_DPP_DISP_CLK
;
189 if (new_clocks
->dispclk_khz
< MIN_DPP_DISP_CLK
)
190 new_clocks
->dispclk_khz
= MIN_DPP_DISP_CLK
;
192 if (should_set_clock(safe_to_lower
, new_clocks
->dppclk_khz
, clk_mgr
->base
.clks
.dppclk_khz
)) {
193 if (clk_mgr
->base
.clks
.dppclk_khz
> new_clocks
->dppclk_khz
)
194 dpp_clock_lowered
= true;
195 clk_mgr_base
->clks
.dppclk_khz
= new_clocks
->dppclk_khz
;
196 update_dppclk
= true;
199 if (should_set_clock(safe_to_lower
, new_clocks
->dispclk_khz
, clk_mgr_base
->clks
.dispclk_khz
)) {
200 /* No need to apply the w/a if we haven't taken over from bios yet */
201 if (clk_mgr_base
->clks
.dispclk_khz
)
202 dcn315_disable_otg_wa(clk_mgr_base
, context
, true);
204 clk_mgr_base
->clks
.dispclk_khz
= new_clocks
->dispclk_khz
;
205 dcn315_smu_set_dispclk(clk_mgr
, clk_mgr_base
->clks
.dispclk_khz
);
206 if (clk_mgr_base
->clks
.dispclk_khz
)
207 dcn315_disable_otg_wa(clk_mgr_base
, context
, false);
209 update_dispclk
= true;
212 if (dpp_clock_lowered
) {
213 // increase per DPP DTO before lowering global dppclk
214 dcn20_update_clocks_update_dpp_dto(clk_mgr
, context
, safe_to_lower
);
215 dcn315_smu_set_dppclk(clk_mgr
, clk_mgr_base
->clks
.dppclk_khz
);
217 // increase global DPPCLK before lowering per DPP DTO
218 if (update_dppclk
|| update_dispclk
)
219 dcn315_smu_set_dppclk(clk_mgr
, clk_mgr_base
->clks
.dppclk_khz
);
220 // always update dtos unless clock is lowered and not safe to lower
221 if (new_clocks
->dppclk_khz
>= dc
->current_state
->bw_ctx
.bw
.dcn
.clk
.dppclk_khz
)
222 dcn20_update_clocks_update_dpp_dto(clk_mgr
, context
, safe_to_lower
);
225 // notify DMCUB of latest clocks
226 memset(&cmd
, 0, sizeof(cmd
));
227 cmd
.notify_clocks
.header
.type
= DMUB_CMD__CLK_MGR
;
228 cmd
.notify_clocks
.header
.sub_type
= DMUB_CMD__CLK_MGR_NOTIFY_CLOCKS
;
229 cmd
.notify_clocks
.clocks
.dcfclk_khz
= clk_mgr_base
->clks
.dcfclk_khz
;
230 cmd
.notify_clocks
.clocks
.dcfclk_deep_sleep_khz
=
231 clk_mgr_base
->clks
.dcfclk_deep_sleep_khz
;
232 cmd
.notify_clocks
.clocks
.dispclk_khz
= clk_mgr_base
->clks
.dispclk_khz
;
233 cmd
.notify_clocks
.clocks
.dppclk_khz
= clk_mgr_base
->clks
.dppclk_khz
;
235 dm_execute_dmub_cmd(dc
->ctx
, &cmd
, DM_DMUB_WAIT_TYPE_WAIT
);
238 static void dcn315_dump_clk_registers(struct clk_state_registers_and_bypass
*regs_and_bypass
,
239 struct clk_mgr
*clk_mgr_base
, struct clk_log_info
*log_info
)
244 static struct clk_bw_params dcn315_bw_params
= {
245 .vram_type
= Ddr4MemType
,
254 .phyclk_d18_mhz
= 667,
262 .phyclk_d18_mhz
= 667,
270 .phyclk_d18_mhz
= 667,
278 .phyclk_d18_mhz
= 667,
286 .phyclk_d18_mhz
= 667,
295 static struct wm_table ddr5_wm_table
= {
299 .wm_type
= WM_TYPE_PSTATE_CHG
,
300 .pstate_latency_us
= 129.0,
301 .sr_exit_time_us
= 11.5,
302 .sr_enter_plus_exit_time_us
= 14.5,
307 .wm_type
= WM_TYPE_PSTATE_CHG
,
308 .pstate_latency_us
= 129.0,
309 .sr_exit_time_us
= 11.5,
310 .sr_enter_plus_exit_time_us
= 14.5,
315 .wm_type
= WM_TYPE_PSTATE_CHG
,
316 .pstate_latency_us
= 129.0,
317 .sr_exit_time_us
= 11.5,
318 .sr_enter_plus_exit_time_us
= 14.5,
323 .wm_type
= WM_TYPE_PSTATE_CHG
,
324 .pstate_latency_us
= 129.0,
325 .sr_exit_time_us
= 11.5,
326 .sr_enter_plus_exit_time_us
= 14.5,
332 static struct wm_table lpddr5_wm_table
= {
336 .wm_type
= WM_TYPE_PSTATE_CHG
,
337 .pstate_latency_us
= 129.0,
338 .sr_exit_time_us
= 11.5,
339 .sr_enter_plus_exit_time_us
= 14.5,
344 .wm_type
= WM_TYPE_PSTATE_CHG
,
345 .pstate_latency_us
= 129.0,
346 .sr_exit_time_us
= 11.5,
347 .sr_enter_plus_exit_time_us
= 14.5,
352 .wm_type
= WM_TYPE_PSTATE_CHG
,
353 .pstate_latency_us
= 129.0,
354 .sr_exit_time_us
= 11.5,
355 .sr_enter_plus_exit_time_us
= 14.5,
360 .wm_type
= WM_TYPE_PSTATE_CHG
,
361 .pstate_latency_us
= 129.0,
362 .sr_exit_time_us
= 11.5,
363 .sr_enter_plus_exit_time_us
= 14.5,
369 /* Temporary Place holder until we can get them from fuse */
370 static DpmClocks_315_t dummy_clocks
= { 0 };
371 static struct dcn315_watermarks dummy_wms
= { 0 };
373 static void dcn315_build_watermark_ranges(struct clk_bw_params
*bw_params
, struct dcn315_watermarks
*table
)
375 int i
, num_valid_sets
;
379 for (i
= 0; i
< WM_SET_COUNT
; i
++) {
380 /* skip empty entries, the smu array has no holes*/
381 if (!bw_params
->wm_table
.entries
[i
].valid
)
384 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
].WmSetting
= bw_params
->wm_table
.entries
[i
].wm_inst
;
385 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
].WmType
= bw_params
->wm_table
.entries
[i
].wm_type
;
386 /* We will not select WM based on fclk, so leave it as unconstrained */
387 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
].MinClock
= 0;
388 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
].MaxClock
= 0xFFFF;
390 if (table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
].WmType
== WM_TYPE_PSTATE_CHG
) {
392 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
].MinMclk
= 0;
394 /* add 1 to make it non-overlapping with next lvl */
395 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
].MinMclk
=
396 bw_params
->clk_table
.entries
[i
- 1].dcfclk_mhz
+ 1;
398 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
].MaxMclk
=
399 bw_params
->clk_table
.entries
[i
].dcfclk_mhz
;
402 /* unconstrained for memory retraining */
403 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
].MinClock
= 0;
404 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
].MaxClock
= 0xFFFF;
406 /* Modify previous watermark range to cover up to max */
407 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
- 1].MaxClock
= 0xFFFF;
412 ASSERT(num_valid_sets
!= 0); /* Must have at least one set of valid watermarks */
414 /* modify the min and max to make sure we cover the whole range*/
415 table
->WatermarkRow
[WM_DCFCLK
][0].MinMclk
= 0;
416 table
->WatermarkRow
[WM_DCFCLK
][0].MinClock
= 0;
417 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
- 1].MaxMclk
= 0xFFFF;
418 table
->WatermarkRow
[WM_DCFCLK
][num_valid_sets
- 1].MaxClock
= 0xFFFF;
420 /* This is for writeback only, does not matter currently as no writeback support*/
421 table
->WatermarkRow
[WM_SOCCLK
][0].WmSetting
= WM_A
;
422 table
->WatermarkRow
[WM_SOCCLK
][0].MinClock
= 0;
423 table
->WatermarkRow
[WM_SOCCLK
][0].MaxClock
= 0xFFFF;
424 table
->WatermarkRow
[WM_SOCCLK
][0].MinMclk
= 0;
425 table
->WatermarkRow
[WM_SOCCLK
][0].MaxMclk
= 0xFFFF;
428 static void dcn315_notify_wm_ranges(struct clk_mgr
*clk_mgr_base
)
430 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
431 struct clk_mgr_dcn315
*clk_mgr_dcn315
= TO_CLK_MGR_DCN315(clk_mgr
);
432 struct dcn315_watermarks
*table
= clk_mgr_dcn315
->smu_wm_set
.wm_set
;
434 if (!clk_mgr
->smu_ver
)
437 if (!table
|| clk_mgr_dcn315
->smu_wm_set
.mc_address
.quad_part
== 0)
440 memset(table
, 0, sizeof(*table
));
442 dcn315_build_watermark_ranges(clk_mgr_base
->bw_params
, table
);
444 dcn315_smu_set_dram_addr_high(clk_mgr
,
445 clk_mgr_dcn315
->smu_wm_set
.mc_address
.high_part
);
446 dcn315_smu_set_dram_addr_low(clk_mgr
,
447 clk_mgr_dcn315
->smu_wm_set
.mc_address
.low_part
);
448 dcn315_smu_transfer_wm_table_dram_2_smu(clk_mgr
);
451 static void dcn315_get_dpm_table_from_smu(struct clk_mgr_internal
*clk_mgr
,
452 struct dcn315_smu_dpm_clks
*smu_dpm_clks
)
454 DpmClocks_315_t
*table
= smu_dpm_clks
->dpm_clks
;
456 if (!clk_mgr
->smu_ver
)
459 if (!table
|| smu_dpm_clks
->mc_address
.quad_part
== 0)
462 memset(table
, 0, sizeof(*table
));
464 dcn315_smu_set_dram_addr_high(clk_mgr
,
465 smu_dpm_clks
->mc_address
.high_part
);
466 dcn315_smu_set_dram_addr_low(clk_mgr
,
467 smu_dpm_clks
->mc_address
.low_part
);
468 dcn315_smu_transfer_dpm_table_smu_2_dram(clk_mgr
);
471 static void dcn315_clk_mgr_helper_populate_bw_params(
472 struct clk_mgr_internal
*clk_mgr
,
473 struct integrated_info
*bios_info
,
474 const DpmClocks_315_t
*clock_table
)
477 struct clk_bw_params
*bw_params
= clk_mgr
->base
.bw_params
;
478 uint32_t max_pstate
= clock_table
->NumDfPstatesEnabled
- 1;
479 struct clk_limit_table_entry def_max
= bw_params
->clk_table
.entries
[bw_params
->clk_table
.num_entries
- 1];
481 /* For 315 we want to base clock table on dcfclk, need at least one entry regardless of pmfw table */
482 for (i
= 0; i
< clock_table
->NumDcfClkLevelsEnabled
; i
++) {
485 /* DF table is sorted with clocks decreasing */
486 for (j
= clock_table
->NumDfPstatesEnabled
- 2; j
>= 0; j
--) {
487 if (clock_table
->DfPstateTable
[j
].Voltage
<= clock_table
->SocVoltage
[i
])
490 /* Max DCFCLK should match up with max pstate */
491 if (i
== clock_table
->NumDcfClkLevelsEnabled
- 1)
494 /* First search defaults for the clocks we don't read using closest lower or equal default dcfclk */
495 for (j
= bw_params
->clk_table
.num_entries
- 1; j
> 0; j
--)
496 if (bw_params
->clk_table
.entries
[j
].dcfclk_mhz
<= clock_table
->DcfClocks
[i
])
498 bw_params
->clk_table
.entries
[i
].phyclk_mhz
= bw_params
->clk_table
.entries
[j
].phyclk_mhz
;
499 bw_params
->clk_table
.entries
[i
].phyclk_d18_mhz
= bw_params
->clk_table
.entries
[j
].phyclk_d18_mhz
;
500 bw_params
->clk_table
.entries
[i
].dtbclk_mhz
= bw_params
->clk_table
.entries
[j
].dtbclk_mhz
;
502 /* Now update clocks we do read */
503 bw_params
->clk_table
.entries
[i
].fclk_mhz
= clock_table
->DfPstateTable
[max_pstate
].FClk
;
504 bw_params
->clk_table
.entries
[i
].memclk_mhz
= clock_table
->DfPstateTable
[max_pstate
].MemClk
;
505 bw_params
->clk_table
.entries
[i
].voltage
= clock_table
->SocVoltage
[i
];
506 bw_params
->clk_table
.entries
[i
].dcfclk_mhz
= clock_table
->DcfClocks
[i
];
507 bw_params
->clk_table
.entries
[i
].socclk_mhz
= clock_table
->SocClocks
[i
];
508 bw_params
->clk_table
.entries
[i
].dispclk_mhz
= clock_table
->DispClocks
[i
];
509 bw_params
->clk_table
.entries
[i
].dppclk_mhz
= clock_table
->DppClocks
[i
];
510 bw_params
->clk_table
.entries
[i
].wck_ratio
= 1;
513 /* Make sure to include at least one entry */
515 bw_params
->clk_table
.entries
[i
].fclk_mhz
= clock_table
->DfPstateTable
[0].FClk
;
516 bw_params
->clk_table
.entries
[i
].memclk_mhz
= clock_table
->DfPstateTable
[0].MemClk
;
517 bw_params
->clk_table
.entries
[i
].voltage
= clock_table
->DfPstateTable
[0].Voltage
;
518 bw_params
->clk_table
.entries
[i
].dcfclk_mhz
= clock_table
->DcfClocks
[0];
519 bw_params
->clk_table
.entries
[i
].wck_ratio
= 1;
521 } else if (clock_table
->NumDcfClkLevelsEnabled
!= clock_table
->NumSocClkLevelsEnabled
) {
522 bw_params
->clk_table
.entries
[i
-1].voltage
= clock_table
->SocVoltage
[clock_table
->NumSocClkLevelsEnabled
- 1];
523 bw_params
->clk_table
.entries
[i
-1].socclk_mhz
= clock_table
->SocClocks
[clock_table
->NumSocClkLevelsEnabled
- 1];
524 bw_params
->clk_table
.entries
[i
-1].dispclk_mhz
= clock_table
->DispClocks
[clock_table
->NumDispClkLevelsEnabled
- 1];
525 bw_params
->clk_table
.entries
[i
-1].dppclk_mhz
= clock_table
->DppClocks
[clock_table
->NumDispClkLevelsEnabled
- 1];
527 bw_params
->clk_table
.num_entries
= i
;
529 /* Set any 0 clocks to max default setting. Not an issue for
530 * power since we aren't doing switching in such case anyway
532 for (i
= 0; i
< bw_params
->clk_table
.num_entries
; i
++) {
533 if (!bw_params
->clk_table
.entries
[i
].fclk_mhz
) {
534 bw_params
->clk_table
.entries
[i
].fclk_mhz
= def_max
.fclk_mhz
;
535 bw_params
->clk_table
.entries
[i
].memclk_mhz
= def_max
.memclk_mhz
;
536 bw_params
->clk_table
.entries
[i
].voltage
= def_max
.voltage
;
538 if (!bw_params
->clk_table
.entries
[i
].dcfclk_mhz
)
539 bw_params
->clk_table
.entries
[i
].dcfclk_mhz
= def_max
.dcfclk_mhz
;
540 if (!bw_params
->clk_table
.entries
[i
].socclk_mhz
)
541 bw_params
->clk_table
.entries
[i
].socclk_mhz
= def_max
.socclk_mhz
;
542 if (!bw_params
->clk_table
.entries
[i
].dispclk_mhz
)
543 bw_params
->clk_table
.entries
[i
].dispclk_mhz
= def_max
.dispclk_mhz
;
544 if (!bw_params
->clk_table
.entries
[i
].dppclk_mhz
)
545 bw_params
->clk_table
.entries
[i
].dppclk_mhz
= def_max
.dppclk_mhz
;
546 if (!bw_params
->clk_table
.entries
[i
].phyclk_mhz
)
547 bw_params
->clk_table
.entries
[i
].phyclk_mhz
= def_max
.phyclk_mhz
;
548 if (!bw_params
->clk_table
.entries
[i
].phyclk_d18_mhz
)
549 bw_params
->clk_table
.entries
[i
].phyclk_d18_mhz
= def_max
.phyclk_d18_mhz
;
550 if (!bw_params
->clk_table
.entries
[i
].dtbclk_mhz
)
551 bw_params
->clk_table
.entries
[i
].dtbclk_mhz
= def_max
.dtbclk_mhz
;
554 /* Make sure all highest default clocks are included*/
555 ASSERT(bw_params
->clk_table
.entries
[i
-1].phyclk_mhz
== def_max
.phyclk_mhz
);
556 ASSERT(bw_params
->clk_table
.entries
[i
-1].phyclk_d18_mhz
== def_max
.phyclk_d18_mhz
);
557 ASSERT(bw_params
->clk_table
.entries
[i
-1].dtbclk_mhz
== def_max
.dtbclk_mhz
);
558 ASSERT(bw_params
->clk_table
.entries
[i
-1].dcfclk_mhz
);
559 bw_params
->vram_type
= bios_info
->memory_type
;
560 bw_params
->num_channels
= bios_info
->ma_channel_number
;
561 bw_params
->dram_channel_width_bytes
= bios_info
->memory_type
== 0x22 ? 8 : 4;
563 for (i
= 0; i
< WM_SET_COUNT
; i
++) {
564 bw_params
->wm_table
.entries
[i
].wm_inst
= i
;
566 if (i
>= bw_params
->clk_table
.num_entries
) {
567 bw_params
->wm_table
.entries
[i
].valid
= false;
571 bw_params
->wm_table
.entries
[i
].wm_type
= WM_TYPE_PSTATE_CHG
;
572 bw_params
->wm_table
.entries
[i
].valid
= true;
576 static void dcn315_enable_pme_wa(struct clk_mgr
*clk_mgr_base
)
578 struct clk_mgr_internal
*clk_mgr
= TO_CLK_MGR_INTERNAL(clk_mgr_base
);
580 dcn315_smu_enable_pme_wa(clk_mgr
);
583 static struct clk_mgr_funcs dcn315_funcs
= {
584 .get_dp_ref_clk_frequency
= dce12_get_dp_ref_freq_khz
,
585 .get_dtb_ref_clk_frequency
= dcn31_get_dtb_ref_freq_khz
,
586 .update_clocks
= dcn315_update_clocks
,
587 .init_clocks
= dcn31_init_clocks
,
588 .enable_pme_wa
= dcn315_enable_pme_wa
,
589 .are_clock_states_equal
= dcn31_are_clock_states_equal
,
590 .notify_wm_ranges
= dcn315_notify_wm_ranges
592 extern struct clk_mgr_funcs dcn3_fpga_funcs
;
594 void dcn315_clk_mgr_construct(
595 struct dc_context
*ctx
,
596 struct clk_mgr_dcn315
*clk_mgr
,
597 struct pp_smu_funcs
*pp_smu
,
600 struct dcn315_smu_dpm_clks smu_dpm_clks
= { 0 };
601 struct clk_log_info log_info
= {0};
603 clk_mgr
->base
.base
.ctx
= ctx
;
604 clk_mgr
->base
.base
.funcs
= &dcn315_funcs
;
606 clk_mgr
->base
.pp_smu
= pp_smu
;
608 clk_mgr
->base
.dccg
= dccg
;
609 clk_mgr
->base
.dfs_bypass_disp_clk
= 0;
611 clk_mgr
->base
.dprefclk_ss_percentage
= 0;
612 clk_mgr
->base
.dprefclk_ss_divider
= 1000;
613 clk_mgr
->base
.ss_on_dprefclk
= false;
614 clk_mgr
->base
.dfs_ref_freq_khz
= 48000;
616 clk_mgr
->smu_wm_set
.wm_set
= (struct dcn315_watermarks
*)dm_helpers_allocate_gpu_mem(
617 clk_mgr
->base
.base
.ctx
,
618 DC_MEM_ALLOC_TYPE_FRAME_BUFFER
,
619 sizeof(struct dcn315_watermarks
),
620 &clk_mgr
->smu_wm_set
.mc_address
.quad_part
);
622 if (!clk_mgr
->smu_wm_set
.wm_set
) {
623 clk_mgr
->smu_wm_set
.wm_set
= &dummy_wms
;
624 clk_mgr
->smu_wm_set
.mc_address
.quad_part
= 0;
626 ASSERT(clk_mgr
->smu_wm_set
.wm_set
);
628 smu_dpm_clks
.dpm_clks
= (DpmClocks_315_t
*)dm_helpers_allocate_gpu_mem(
629 clk_mgr
->base
.base
.ctx
,
630 DC_MEM_ALLOC_TYPE_FRAME_BUFFER
,
631 sizeof(DpmClocks_315_t
),
632 &smu_dpm_clks
.mc_address
.quad_part
);
634 if (smu_dpm_clks
.dpm_clks
== NULL
) {
635 smu_dpm_clks
.dpm_clks
= &dummy_clocks
;
636 smu_dpm_clks
.mc_address
.quad_part
= 0;
639 ASSERT(smu_dpm_clks
.dpm_clks
);
641 clk_mgr
->base
.smu_ver
= dcn315_smu_get_smu_version(&clk_mgr
->base
);
643 if (clk_mgr
->base
.smu_ver
> 0)
644 clk_mgr
->base
.smu_present
= true;
646 if (ctx
->dc_bios
->integrated_info
->memory_type
== LpDdr5MemType
) {
647 dcn315_bw_params
.wm_table
= lpddr5_wm_table
;
649 dcn315_bw_params
.wm_table
= ddr5_wm_table
;
651 /* Saved clocks configured at boot for debug purposes */
652 dcn315_dump_clk_registers(&clk_mgr
->base
.base
.boot_snapshot
,
653 &clk_mgr
->base
.base
, &log_info
);
655 clk_mgr
->base
.base
.dprefclk_khz
= 600000;
656 clk_mgr
->base
.base
.dprefclk_khz
= dcn315_smu_get_dpref_clk(&clk_mgr
->base
);
657 clk_mgr
->base
.base
.clks
.ref_dtbclk_khz
= clk_mgr
->base
.base
.dprefclk_khz
;
658 dce_clock_read_ss_info(&clk_mgr
->base
);
659 clk_mgr
->base
.base
.clks
.ref_dtbclk_khz
= dce_adjust_dp_ref_freq_for_ss(&clk_mgr
->base
, clk_mgr
->base
.base
.dprefclk_khz
);
661 clk_mgr
->base
.base
.bw_params
= &dcn315_bw_params
;
663 if (clk_mgr
->base
.base
.ctx
->dc
->debug
.pstate_enabled
) {
666 dcn315_get_dpm_table_from_smu(&clk_mgr
->base
, &smu_dpm_clks
);
667 DC_LOG_SMU("NumDcfClkLevelsEnabled: %d\n"
668 "NumDispClkLevelsEnabled: %d\n"
669 "NumSocClkLevelsEnabled: %d\n"
670 "VcnClkLevelsEnabled: %d\n"
671 "NumDfPst atesEnabled: %d\n"
674 smu_dpm_clks
.dpm_clks
->NumDcfClkLevelsEnabled
,
675 smu_dpm_clks
.dpm_clks
->NumDispClkLevelsEnabled
,
676 smu_dpm_clks
.dpm_clks
->NumSocClkLevelsEnabled
,
677 smu_dpm_clks
.dpm_clks
->VcnClkLevelsEnabled
,
678 smu_dpm_clks
.dpm_clks
->NumDfPstatesEnabled
,
679 smu_dpm_clks
.dpm_clks
->MinGfxClk
,
680 smu_dpm_clks
.dpm_clks
->MaxGfxClk
);
681 for (i
= 0; i
< smu_dpm_clks
.dpm_clks
->NumDcfClkLevelsEnabled
; i
++) {
682 DC_LOG_SMU("smu_dpm_clks.dpm_clks->DcfClocks[%d] = %d\n",
684 smu_dpm_clks
.dpm_clks
->DcfClocks
[i
]);
686 for (i
= 0; i
< smu_dpm_clks
.dpm_clks
->NumDispClkLevelsEnabled
; i
++) {
687 DC_LOG_SMU("smu_dpm_clks.dpm_clks->DispClocks[%d] = %d\n",
688 i
, smu_dpm_clks
.dpm_clks
->DispClocks
[i
]);
690 for (i
= 0; i
< smu_dpm_clks
.dpm_clks
->NumSocClkLevelsEnabled
; i
++) {
691 DC_LOG_SMU("smu_dpm_clks.dpm_clks->SocClocks[%d] = %d\n",
692 i
, smu_dpm_clks
.dpm_clks
->SocClocks
[i
]);
694 for (i
= 0; i
< NUM_SOC_VOLTAGE_LEVELS
; i
++)
695 DC_LOG_SMU("smu_dpm_clks.dpm_clks->SocVoltage[%d] = %d\n",
696 i
, smu_dpm_clks
.dpm_clks
->SocVoltage
[i
]);
698 for (i
= 0; i
< NUM_DF_PSTATE_LEVELS
; i
++) {
699 DC_LOG_SMU("smu_dpm_clks.dpm_clks.DfPstateTable[%d].FClk = %d\n"
700 "smu_dpm_clks.dpm_clks->DfPstateTable[%d].MemClk= %d\n"
701 "smu_dpm_clks.dpm_clks->DfPstateTable[%d].Voltage = %d\n",
702 i
, smu_dpm_clks
.dpm_clks
->DfPstateTable
[i
].FClk
,
703 i
, smu_dpm_clks
.dpm_clks
->DfPstateTable
[i
].MemClk
,
704 i
, smu_dpm_clks
.dpm_clks
->DfPstateTable
[i
].Voltage
);
707 if (ctx
->dc_bios
&& ctx
->dc_bios
->integrated_info
) {
708 dcn315_clk_mgr_helper_populate_bw_params(
710 ctx
->dc_bios
->integrated_info
,
711 smu_dpm_clks
.dpm_clks
);
715 if (smu_dpm_clks
.dpm_clks
&& smu_dpm_clks
.mc_address
.quad_part
!= 0)
716 dm_helpers_free_gpu_mem(clk_mgr
->base
.base
.ctx
, DC_MEM_ALLOC_TYPE_FRAME_BUFFER
,
717 smu_dpm_clks
.dpm_clks
);
720 void dcn315_clk_mgr_destroy(struct clk_mgr_internal
*clk_mgr_int
)
722 struct clk_mgr_dcn315
*clk_mgr
= TO_CLK_MGR_DCN315(clk_mgr_int
);
724 if (clk_mgr
->smu_wm_set
.wm_set
&& clk_mgr
->smu_wm_set
.mc_address
.quad_part
!= 0)
725 dm_helpers_free_gpu_mem(clk_mgr_int
->base
.ctx
, DC_MEM_ALLOC_TYPE_FRAME_BUFFER
,
726 clk_mgr
->smu_wm_set
.wm_set
);