2 * Copyright © 2018 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
24 * Madhav Chauhan <madhav.chauhan@intel.com>
25 * Jani Nikula <jani.nikula@intel.com>
28 #include <drm/drm_mipi_dsi.h>
29 #include <drm/drm_atomic_helper.h>
30 #include "intel_dsi.h"
32 static inline int header_credits_available(struct drm_i915_private
*dev_priv
,
33 enum transcoder dsi_trans
)
35 return (I915_READ(DSI_CMD_TXCTL(dsi_trans
)) & FREE_HEADER_CREDIT_MASK
)
36 >> FREE_HEADER_CREDIT_SHIFT
;
39 static inline int payload_credits_available(struct drm_i915_private
*dev_priv
,
40 enum transcoder dsi_trans
)
42 return (I915_READ(DSI_CMD_TXCTL(dsi_trans
)) & FREE_PLOAD_CREDIT_MASK
)
43 >> FREE_PLOAD_CREDIT_SHIFT
;
46 static void wait_for_header_credits(struct drm_i915_private
*dev_priv
,
47 enum transcoder dsi_trans
)
49 if (wait_for_us(header_credits_available(dev_priv
, dsi_trans
) >=
50 MAX_HEADER_CREDIT
, 100))
51 DRM_ERROR("DSI header credits not released\n");
54 static void wait_for_payload_credits(struct drm_i915_private
*dev_priv
,
55 enum transcoder dsi_trans
)
57 if (wait_for_us(payload_credits_available(dev_priv
, dsi_trans
) >=
58 MAX_PLOAD_CREDIT
, 100))
59 DRM_ERROR("DSI payload credits not released\n");
62 static enum transcoder
dsi_port_to_transcoder(enum port port
)
65 return TRANSCODER_DSI_0
;
67 return TRANSCODER_DSI_1
;
70 static void wait_for_cmds_dispatched_to_panel(struct intel_encoder
*encoder
)
72 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
73 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
74 struct mipi_dsi_device
*dsi
;
76 enum transcoder dsi_trans
;
79 /* wait for header/payload credits to be released */
80 for_each_dsi_port(port
, intel_dsi
->ports
) {
81 dsi_trans
= dsi_port_to_transcoder(port
);
82 wait_for_header_credits(dev_priv
, dsi_trans
);
83 wait_for_payload_credits(dev_priv
, dsi_trans
);
86 /* send nop DCS command */
87 for_each_dsi_port(port
, intel_dsi
->ports
) {
88 dsi
= intel_dsi
->dsi_hosts
[port
]->device
;
89 dsi
->mode_flags
|= MIPI_DSI_MODE_LPM
;
91 ret
= mipi_dsi_dcs_nop(dsi
);
93 DRM_ERROR("error sending DCS NOP command\n");
96 /* wait for header credits to be released */
97 for_each_dsi_port(port
, intel_dsi
->ports
) {
98 dsi_trans
= dsi_port_to_transcoder(port
);
99 wait_for_header_credits(dev_priv
, dsi_trans
);
102 /* wait for LP TX in progress bit to be cleared */
103 for_each_dsi_port(port
, intel_dsi
->ports
) {
104 dsi_trans
= dsi_port_to_transcoder(port
);
105 if (wait_for_us(!(I915_READ(DSI_LP_MSG(dsi_trans
)) &
106 LPTX_IN_PROGRESS
), 20))
107 DRM_ERROR("LPTX bit not cleared\n");
111 static bool add_payld_to_queue(struct intel_dsi_host
*host
, const u8
*data
,
114 struct intel_dsi
*intel_dsi
= host
->intel_dsi
;
115 struct drm_i915_private
*dev_priv
= to_i915(intel_dsi
->base
.base
.dev
);
116 enum transcoder dsi_trans
= dsi_port_to_transcoder(host
->port
);
120 for (i
= 0; i
< len
; i
+= 4) {
123 free_credits
= payload_credits_available(dev_priv
, dsi_trans
);
124 if (free_credits
< 1) {
125 DRM_ERROR("Payload credit not available\n");
129 for (j
= 0; j
< min_t(u32
, len
- i
, 4); j
++)
130 tmp
|= *data
++ << 8 * j
;
132 I915_WRITE(DSI_CMD_TXPYLD(dsi_trans
), tmp
);
138 static int dsi_send_pkt_hdr(struct intel_dsi_host
*host
,
139 struct mipi_dsi_packet pkt
, bool enable_lpdt
)
141 struct intel_dsi
*intel_dsi
= host
->intel_dsi
;
142 struct drm_i915_private
*dev_priv
= to_i915(intel_dsi
->base
.base
.dev
);
143 enum transcoder dsi_trans
= dsi_port_to_transcoder(host
->port
);
147 /* check if header credit available */
148 free_credits
= header_credits_available(dev_priv
, dsi_trans
);
149 if (free_credits
< 1) {
150 DRM_ERROR("send pkt header failed, not enough hdr credits\n");
154 tmp
= I915_READ(DSI_CMD_TXHDR(dsi_trans
));
157 tmp
|= PAYLOAD_PRESENT
;
159 tmp
&= ~PAYLOAD_PRESENT
;
161 tmp
&= ~VBLANK_FENCE
;
164 tmp
|= LP_DATA_TRANSFER
;
166 tmp
&= ~(PARAM_WC_MASK
| VC_MASK
| DT_MASK
);
167 tmp
|= ((pkt
.header
[0] & VC_MASK
) << VC_SHIFT
);
168 tmp
|= ((pkt
.header
[0] & DT_MASK
) << DT_SHIFT
);
169 tmp
|= (pkt
.header
[1] << PARAM_WC_LOWER_SHIFT
);
170 tmp
|= (pkt
.header
[2] << PARAM_WC_UPPER_SHIFT
);
171 I915_WRITE(DSI_CMD_TXHDR(dsi_trans
), tmp
);
176 static int dsi_send_pkt_payld(struct intel_dsi_host
*host
,
177 struct mipi_dsi_packet pkt
)
179 /* payload queue can accept *256 bytes*, check limit */
180 if (pkt
.payload_length
> MAX_PLOAD_CREDIT
* 4) {
181 DRM_ERROR("payload size exceeds max queue limit\n");
185 /* load data into command payload queue */
186 if (!add_payld_to_queue(host
, pkt
.payload
,
187 pkt
.payload_length
)) {
188 DRM_ERROR("adding payload to queue failed\n");
195 static void dsi_program_swing_and_deemphasis(struct intel_encoder
*encoder
)
197 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
198 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
203 for_each_dsi_port(port
, intel_dsi
->ports
) {
206 * Program voltage swing and pre-emphasis level values as per
207 * table in BSPEC under DDI buffer programing
209 tmp
= I915_READ(ICL_PORT_TX_DW5_LN0(port
));
210 tmp
&= ~(SCALING_MODE_SEL_MASK
| RTERM_SELECT_MASK
);
211 tmp
|= SCALING_MODE_SEL(0x2);
212 tmp
|= TAP2_DISABLE
| TAP3_DISABLE
;
213 tmp
|= RTERM_SELECT(0x6);
214 I915_WRITE(ICL_PORT_TX_DW5_GRP(port
), tmp
);
216 tmp
= I915_READ(ICL_PORT_TX_DW5_AUX(port
));
217 tmp
&= ~(SCALING_MODE_SEL_MASK
| RTERM_SELECT_MASK
);
218 tmp
|= SCALING_MODE_SEL(0x2);
219 tmp
|= TAP2_DISABLE
| TAP3_DISABLE
;
220 tmp
|= RTERM_SELECT(0x6);
221 I915_WRITE(ICL_PORT_TX_DW5_AUX(port
), tmp
);
223 tmp
= I915_READ(ICL_PORT_TX_DW2_LN0(port
));
224 tmp
&= ~(SWING_SEL_LOWER_MASK
| SWING_SEL_UPPER_MASK
|
226 tmp
|= SWING_SEL_UPPER(0x2);
227 tmp
|= SWING_SEL_LOWER(0x2);
228 tmp
|= RCOMP_SCALAR(0x98);
229 I915_WRITE(ICL_PORT_TX_DW2_GRP(port
), tmp
);
231 tmp
= I915_READ(ICL_PORT_TX_DW2_AUX(port
));
232 tmp
&= ~(SWING_SEL_LOWER_MASK
| SWING_SEL_UPPER_MASK
|
234 tmp
|= SWING_SEL_UPPER(0x2);
235 tmp
|= SWING_SEL_LOWER(0x2);
236 tmp
|= RCOMP_SCALAR(0x98);
237 I915_WRITE(ICL_PORT_TX_DW2_AUX(port
), tmp
);
239 tmp
= I915_READ(ICL_PORT_TX_DW4_AUX(port
));
240 tmp
&= ~(POST_CURSOR_1_MASK
| POST_CURSOR_2_MASK
|
242 tmp
|= POST_CURSOR_1(0x0);
243 tmp
|= POST_CURSOR_2(0x0);
244 tmp
|= CURSOR_COEFF(0x3f);
245 I915_WRITE(ICL_PORT_TX_DW4_AUX(port
), tmp
);
247 for (lane
= 0; lane
<= 3; lane
++) {
248 /* Bspec: must not use GRP register for write */
249 tmp
= I915_READ(ICL_PORT_TX_DW4_LN(port
, lane
));
250 tmp
&= ~(POST_CURSOR_1_MASK
| POST_CURSOR_2_MASK
|
252 tmp
|= POST_CURSOR_1(0x0);
253 tmp
|= POST_CURSOR_2(0x0);
254 tmp
|= CURSOR_COEFF(0x3f);
255 I915_WRITE(ICL_PORT_TX_DW4_LN(port
, lane
), tmp
);
260 static void configure_dual_link_mode(struct intel_encoder
*encoder
,
261 const struct intel_crtc_state
*pipe_config
)
263 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
264 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
267 dss_ctl1
= I915_READ(DSS_CTL1
);
268 dss_ctl1
|= SPLITTER_ENABLE
;
269 dss_ctl1
&= ~OVERLAP_PIXELS_MASK
;
270 dss_ctl1
|= OVERLAP_PIXELS(intel_dsi
->pixel_overlap
);
272 if (intel_dsi
->dual_link
== DSI_DUAL_LINK_FRONT_BACK
) {
273 const struct drm_display_mode
*adjusted_mode
=
274 &pipe_config
->base
.adjusted_mode
;
276 u16 hactive
= adjusted_mode
->crtc_hdisplay
;
279 dss_ctl1
&= ~DUAL_LINK_MODE_INTERLEAVE
;
280 dl_buffer_depth
= hactive
/ 2 + intel_dsi
->pixel_overlap
;
282 if (dl_buffer_depth
> MAX_DL_BUFFER_TARGET_DEPTH
)
283 DRM_ERROR("DL buffer depth exceed max value\n");
285 dss_ctl1
&= ~LEFT_DL_BUF_TARGET_DEPTH_MASK
;
286 dss_ctl1
|= LEFT_DL_BUF_TARGET_DEPTH(dl_buffer_depth
);
287 dss_ctl2
= I915_READ(DSS_CTL2
);
288 dss_ctl2
&= ~RIGHT_DL_BUF_TARGET_DEPTH_MASK
;
289 dss_ctl2
|= RIGHT_DL_BUF_TARGET_DEPTH(dl_buffer_depth
);
290 I915_WRITE(DSS_CTL2
, dss_ctl2
);
293 dss_ctl1
|= DUAL_LINK_MODE_INTERLEAVE
;
296 I915_WRITE(DSS_CTL1
, dss_ctl1
);
299 static void gen11_dsi_program_esc_clk_div(struct intel_encoder
*encoder
)
301 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
302 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
304 u32 bpp
= mipi_dsi_pixel_format_to_bpp(intel_dsi
->pixel_format
);
305 u32 afe_clk_khz
; /* 8X Clock */
308 afe_clk_khz
= DIV_ROUND_CLOSEST(intel_dsi
->pclk
* bpp
,
309 intel_dsi
->lane_count
);
311 esc_clk_div_m
= DIV_ROUND_UP(afe_clk_khz
, DSI_MAX_ESC_CLK
);
313 for_each_dsi_port(port
, intel_dsi
->ports
) {
314 I915_WRITE(ICL_DSI_ESC_CLK_DIV(port
),
315 esc_clk_div_m
& ICL_ESC_CLK_DIV_MASK
);
316 POSTING_READ(ICL_DSI_ESC_CLK_DIV(port
));
319 for_each_dsi_port(port
, intel_dsi
->ports
) {
320 I915_WRITE(ICL_DPHY_ESC_CLK_DIV(port
),
321 esc_clk_div_m
& ICL_ESC_CLK_DIV_MASK
);
322 POSTING_READ(ICL_DPHY_ESC_CLK_DIV(port
));
326 static void gen11_dsi_enable_io_power(struct intel_encoder
*encoder
)
328 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
329 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
333 for_each_dsi_port(port
, intel_dsi
->ports
) {
334 tmp
= I915_READ(ICL_DSI_IO_MODECTL(port
));
335 tmp
|= COMBO_PHY_MODE_DSI
;
336 I915_WRITE(ICL_DSI_IO_MODECTL(port
), tmp
);
339 for_each_dsi_port(port
, intel_dsi
->ports
) {
340 intel_display_power_get(dev_priv
, port
== PORT_A
?
341 POWER_DOMAIN_PORT_DDI_A_IO
:
342 POWER_DOMAIN_PORT_DDI_B_IO
);
346 static void gen11_dsi_power_up_lanes(struct intel_encoder
*encoder
)
348 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
349 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
354 switch (intel_dsi
->lane_count
) {
356 lane_mask
= PWR_DOWN_LN_3_1_0
;
359 lane_mask
= PWR_DOWN_LN_3_1
;
362 lane_mask
= PWR_DOWN_LN_3
;
366 lane_mask
= PWR_UP_ALL_LANES
;
370 for_each_dsi_port(port
, intel_dsi
->ports
) {
371 tmp
= I915_READ(ICL_PORT_CL_DW10(port
));
372 tmp
&= ~PWR_DOWN_LN_MASK
;
373 I915_WRITE(ICL_PORT_CL_DW10(port
), tmp
| lane_mask
);
377 static void gen11_dsi_config_phy_lanes_sequence(struct intel_encoder
*encoder
)
379 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
380 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
385 /* Step 4b(i) set loadgen select for transmit and aux lanes */
386 for_each_dsi_port(port
, intel_dsi
->ports
) {
387 tmp
= I915_READ(ICL_PORT_TX_DW4_AUX(port
));
388 tmp
&= ~LOADGEN_SELECT
;
389 I915_WRITE(ICL_PORT_TX_DW4_AUX(port
), tmp
);
390 for (lane
= 0; lane
<= 3; lane
++) {
391 tmp
= I915_READ(ICL_PORT_TX_DW4_LN(port
, lane
));
392 tmp
&= ~LOADGEN_SELECT
;
394 tmp
|= LOADGEN_SELECT
;
395 I915_WRITE(ICL_PORT_TX_DW4_LN(port
, lane
), tmp
);
399 /* Step 4b(ii) set latency optimization for transmit and aux lanes */
400 for_each_dsi_port(port
, intel_dsi
->ports
) {
401 tmp
= I915_READ(ICL_PORT_TX_DW2_AUX(port
));
402 tmp
&= ~FRC_LATENCY_OPTIM_MASK
;
403 tmp
|= FRC_LATENCY_OPTIM_VAL(0x5);
404 I915_WRITE(ICL_PORT_TX_DW2_AUX(port
), tmp
);
405 tmp
= I915_READ(ICL_PORT_TX_DW2_LN0(port
));
406 tmp
&= ~FRC_LATENCY_OPTIM_MASK
;
407 tmp
|= FRC_LATENCY_OPTIM_VAL(0x5);
408 I915_WRITE(ICL_PORT_TX_DW2_GRP(port
), tmp
);
413 static void gen11_dsi_voltage_swing_program_seq(struct intel_encoder
*encoder
)
415 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
416 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
420 /* clear common keeper enable bit */
421 for_each_dsi_port(port
, intel_dsi
->ports
) {
422 tmp
= I915_READ(ICL_PORT_PCS_DW1_LN0(port
));
423 tmp
&= ~COMMON_KEEPER_EN
;
424 I915_WRITE(ICL_PORT_PCS_DW1_GRP(port
), tmp
);
425 tmp
= I915_READ(ICL_PORT_PCS_DW1_AUX(port
));
426 tmp
&= ~COMMON_KEEPER_EN
;
427 I915_WRITE(ICL_PORT_PCS_DW1_AUX(port
), tmp
);
431 * Set SUS Clock Config bitfield to 11b
432 * Note: loadgen select program is done
433 * as part of lane phy sequence configuration
435 for_each_dsi_port(port
, intel_dsi
->ports
) {
436 tmp
= I915_READ(ICL_PORT_CL_DW5(port
));
437 tmp
|= SUS_CLOCK_CONFIG
;
438 I915_WRITE(ICL_PORT_CL_DW5(port
), tmp
);
441 /* Clear training enable to change swing values */
442 for_each_dsi_port(port
, intel_dsi
->ports
) {
443 tmp
= I915_READ(ICL_PORT_TX_DW5_LN0(port
));
444 tmp
&= ~TX_TRAINING_EN
;
445 I915_WRITE(ICL_PORT_TX_DW5_GRP(port
), tmp
);
446 tmp
= I915_READ(ICL_PORT_TX_DW5_AUX(port
));
447 tmp
&= ~TX_TRAINING_EN
;
448 I915_WRITE(ICL_PORT_TX_DW5_AUX(port
), tmp
);
451 /* Program swing and de-emphasis */
452 dsi_program_swing_and_deemphasis(encoder
);
454 /* Set training enable to trigger update */
455 for_each_dsi_port(port
, intel_dsi
->ports
) {
456 tmp
= I915_READ(ICL_PORT_TX_DW5_LN0(port
));
457 tmp
|= TX_TRAINING_EN
;
458 I915_WRITE(ICL_PORT_TX_DW5_GRP(port
), tmp
);
459 tmp
= I915_READ(ICL_PORT_TX_DW5_AUX(port
));
460 tmp
|= TX_TRAINING_EN
;
461 I915_WRITE(ICL_PORT_TX_DW5_AUX(port
), tmp
);
465 static void gen11_dsi_enable_ddi_buffer(struct intel_encoder
*encoder
)
467 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
468 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
472 for_each_dsi_port(port
, intel_dsi
->ports
) {
473 tmp
= I915_READ(DDI_BUF_CTL(port
));
474 tmp
|= DDI_BUF_CTL_ENABLE
;
475 I915_WRITE(DDI_BUF_CTL(port
), tmp
);
477 if (wait_for_us(!(I915_READ(DDI_BUF_CTL(port
)) &
480 DRM_ERROR("DDI port:%c buffer idle\n", port_name(port
));
484 static void gen11_dsi_setup_dphy_timings(struct intel_encoder
*encoder
)
486 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
487 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
491 /* Program T-INIT master registers */
492 for_each_dsi_port(port
, intel_dsi
->ports
) {
493 tmp
= I915_READ(ICL_DSI_T_INIT_MASTER(port
));
494 tmp
&= ~MASTER_INIT_TIMER_MASK
;
495 tmp
|= intel_dsi
->init_count
;
496 I915_WRITE(ICL_DSI_T_INIT_MASTER(port
), tmp
);
499 /* Program DPHY clock lanes timings */
500 for_each_dsi_port(port
, intel_dsi
->ports
) {
501 I915_WRITE(DPHY_CLK_TIMING_PARAM(port
), intel_dsi
->dphy_reg
);
503 /* shadow register inside display core */
504 I915_WRITE(DSI_CLK_TIMING_PARAM(port
), intel_dsi
->dphy_reg
);
507 /* Program DPHY data lanes timings */
508 for_each_dsi_port(port
, intel_dsi
->ports
) {
509 I915_WRITE(DPHY_DATA_TIMING_PARAM(port
),
510 intel_dsi
->dphy_data_lane_reg
);
512 /* shadow register inside display core */
513 I915_WRITE(DSI_DATA_TIMING_PARAM(port
),
514 intel_dsi
->dphy_data_lane_reg
);
518 * If DSI link operating at or below an 800 MHz,
519 * TA_SURE should be override and programmed to
520 * a value '0' inside TA_PARAM_REGISTERS otherwise
521 * leave all fields at HW default values.
523 if (intel_dsi_bitrate(intel_dsi
) <= 800000) {
524 for_each_dsi_port(port
, intel_dsi
->ports
) {
525 tmp
= I915_READ(DPHY_TA_TIMING_PARAM(port
));
526 tmp
&= ~TA_SURE_MASK
;
527 tmp
|= TA_SURE_OVERRIDE
| TA_SURE(0);
528 I915_WRITE(DPHY_TA_TIMING_PARAM(port
), tmp
);
530 /* shadow register inside display core */
531 tmp
= I915_READ(DSI_TA_TIMING_PARAM(port
));
532 tmp
&= ~TA_SURE_MASK
;
533 tmp
|= TA_SURE_OVERRIDE
| TA_SURE(0);
534 I915_WRITE(DSI_TA_TIMING_PARAM(port
), tmp
);
539 static void gen11_dsi_gate_clocks(struct intel_encoder
*encoder
)
541 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
542 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
546 mutex_lock(&dev_priv
->dpll_lock
);
547 tmp
= I915_READ(DPCLKA_CFGCR0_ICL
);
548 for_each_dsi_port(port
, intel_dsi
->ports
) {
549 tmp
|= DPCLKA_CFGCR0_DDI_CLK_OFF(port
);
552 I915_WRITE(DPCLKA_CFGCR0_ICL
, tmp
);
553 mutex_unlock(&dev_priv
->dpll_lock
);
556 static void gen11_dsi_ungate_clocks(struct intel_encoder
*encoder
)
558 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
559 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
563 mutex_lock(&dev_priv
->dpll_lock
);
564 tmp
= I915_READ(DPCLKA_CFGCR0_ICL
);
565 for_each_dsi_port(port
, intel_dsi
->ports
) {
566 tmp
&= ~DPCLKA_CFGCR0_DDI_CLK_OFF(port
);
569 I915_WRITE(DPCLKA_CFGCR0_ICL
, tmp
);
570 mutex_unlock(&dev_priv
->dpll_lock
);
574 gen11_dsi_configure_transcoder(struct intel_encoder
*encoder
,
575 const struct intel_crtc_state
*pipe_config
)
577 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
578 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
579 struct intel_crtc
*intel_crtc
= to_intel_crtc(pipe_config
->base
.crtc
);
580 enum pipe pipe
= intel_crtc
->pipe
;
583 enum transcoder dsi_trans
;
585 for_each_dsi_port(port
, intel_dsi
->ports
) {
586 dsi_trans
= dsi_port_to_transcoder(port
);
587 tmp
= I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans
));
589 if (intel_dsi
->eotp_pkt
)
590 tmp
&= ~EOTP_DISABLED
;
592 tmp
|= EOTP_DISABLED
;
594 /* enable link calibration if freq > 1.5Gbps */
595 if (intel_dsi_bitrate(intel_dsi
) >= 1500 * 1000) {
596 tmp
&= ~LINK_CALIBRATION_MASK
;
597 tmp
|= CALIBRATION_ENABLED_INITIAL_ONLY
;
600 /* configure continuous clock */
601 tmp
&= ~CONTINUOUS_CLK_MASK
;
602 if (intel_dsi
->clock_stop
)
603 tmp
|= CLK_ENTER_LP_AFTER_DATA
;
605 tmp
|= CLK_HS_CONTINUOUS
;
607 /* configure buffer threshold limit to minimum */
608 tmp
&= ~PIX_BUF_THRESHOLD_MASK
;
609 tmp
|= PIX_BUF_THRESHOLD_1_4
;
611 /* set virtual channel to '0' */
612 tmp
&= ~PIX_VIRT_CHAN_MASK
;
613 tmp
|= PIX_VIRT_CHAN(0);
615 /* program BGR transmission */
616 if (intel_dsi
->bgr_enabled
)
617 tmp
|= BGR_TRANSMISSION
;
619 /* select pixel format */
620 tmp
&= ~PIX_FMT_MASK
;
621 switch (intel_dsi
->pixel_format
) {
623 MISSING_CASE(intel_dsi
->pixel_format
);
625 case MIPI_DSI_FMT_RGB565
:
626 tmp
|= PIX_FMT_RGB565
;
628 case MIPI_DSI_FMT_RGB666_PACKED
:
629 tmp
|= PIX_FMT_RGB666_PACKED
;
631 case MIPI_DSI_FMT_RGB666
:
632 tmp
|= PIX_FMT_RGB666_LOOSE
;
634 case MIPI_DSI_FMT_RGB888
:
635 tmp
|= PIX_FMT_RGB888
;
639 /* program DSI operation mode */
640 if (is_vid_mode(intel_dsi
)) {
641 tmp
&= ~OP_MODE_MASK
;
642 switch (intel_dsi
->video_mode_format
) {
644 MISSING_CASE(intel_dsi
->video_mode_format
);
646 case VIDEO_MODE_NON_BURST_WITH_SYNC_EVENTS
:
647 tmp
|= VIDEO_MODE_SYNC_EVENT
;
649 case VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE
:
650 tmp
|= VIDEO_MODE_SYNC_PULSE
;
655 I915_WRITE(DSI_TRANS_FUNC_CONF(dsi_trans
), tmp
);
658 /* enable port sync mode if dual link */
659 if (intel_dsi
->dual_link
) {
660 for_each_dsi_port(port
, intel_dsi
->ports
) {
661 dsi_trans
= dsi_port_to_transcoder(port
);
662 tmp
= I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans
));
663 tmp
|= PORT_SYNC_MODE_ENABLE
;
664 I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans
), tmp
);
667 /* configure stream splitting */
668 configure_dual_link_mode(encoder
, pipe_config
);
671 for_each_dsi_port(port
, intel_dsi
->ports
) {
672 dsi_trans
= dsi_port_to_transcoder(port
);
674 /* select data lane width */
675 tmp
= I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans
));
676 tmp
&= ~DDI_PORT_WIDTH_MASK
;
677 tmp
|= DDI_PORT_WIDTH(intel_dsi
->lane_count
);
679 /* select input pipe */
680 tmp
&= ~TRANS_DDI_EDP_INPUT_MASK
;
686 tmp
|= TRANS_DDI_EDP_INPUT_A_ON
;
689 tmp
|= TRANS_DDI_EDP_INPUT_B_ONOFF
;
692 tmp
|= TRANS_DDI_EDP_INPUT_C_ONOFF
;
696 /* enable DDI buffer */
697 tmp
|= TRANS_DDI_FUNC_ENABLE
;
698 I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans
), tmp
);
701 /* wait for link ready */
702 for_each_dsi_port(port
, intel_dsi
->ports
) {
703 dsi_trans
= dsi_port_to_transcoder(port
);
704 if (wait_for_us((I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans
)) &
706 DRM_ERROR("DSI link not ready\n");
711 gen11_dsi_set_transcoder_timings(struct intel_encoder
*encoder
,
712 const struct intel_crtc_state
*pipe_config
)
714 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
715 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
716 const struct drm_display_mode
*adjusted_mode
=
717 &pipe_config
->base
.adjusted_mode
;
719 enum transcoder dsi_trans
;
720 /* horizontal timings */
721 u16 htotal
, hactive
, hsync_start
, hsync_end
, hsync_size
;
722 u16 hfront_porch
, hback_porch
;
723 /* vertical timings */
724 u16 vtotal
, vactive
, vsync_start
, vsync_end
, vsync_shift
;
726 hactive
= adjusted_mode
->crtc_hdisplay
;
727 htotal
= adjusted_mode
->crtc_htotal
;
728 hsync_start
= adjusted_mode
->crtc_hsync_start
;
729 hsync_end
= adjusted_mode
->crtc_hsync_end
;
730 hsync_size
= hsync_end
- hsync_start
;
731 hfront_porch
= (adjusted_mode
->crtc_hsync_start
-
732 adjusted_mode
->crtc_hdisplay
);
733 hback_porch
= (adjusted_mode
->crtc_htotal
-
734 adjusted_mode
->crtc_hsync_end
);
735 vactive
= adjusted_mode
->crtc_vdisplay
;
736 vtotal
= adjusted_mode
->crtc_vtotal
;
737 vsync_start
= adjusted_mode
->crtc_vsync_start
;
738 vsync_end
= adjusted_mode
->crtc_vsync_end
;
739 vsync_shift
= hsync_start
- htotal
/ 2;
741 if (intel_dsi
->dual_link
) {
743 if (intel_dsi
->dual_link
== DSI_DUAL_LINK_FRONT_BACK
)
744 hactive
+= intel_dsi
->pixel_overlap
;
748 /* minimum hactive as per bspec: 256 pixels */
749 if (adjusted_mode
->crtc_hdisplay
< 256)
750 DRM_ERROR("hactive is less then 256 pixels\n");
752 /* if RGB666 format, then hactive must be multiple of 4 pixels */
753 if (intel_dsi
->pixel_format
== MIPI_DSI_FMT_RGB666
&& hactive
% 4 != 0)
754 DRM_ERROR("hactive pixels are not multiple of 4\n");
756 /* program TRANS_HTOTAL register */
757 for_each_dsi_port(port
, intel_dsi
->ports
) {
758 dsi_trans
= dsi_port_to_transcoder(port
);
759 I915_WRITE(HTOTAL(dsi_trans
),
760 (hactive
- 1) | ((htotal
- 1) << 16));
763 /* TRANS_HSYNC register to be programmed only for video mode */
764 if (intel_dsi
->operation_mode
== INTEL_DSI_VIDEO_MODE
) {
765 if (intel_dsi
->video_mode_format
==
766 VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE
) {
767 /* BSPEC: hsync size should be atleast 16 pixels */
769 DRM_ERROR("hsync size < 16 pixels\n");
772 if (hback_porch
< 16)
773 DRM_ERROR("hback porch < 16 pixels\n");
775 if (intel_dsi
->dual_link
) {
780 for_each_dsi_port(port
, intel_dsi
->ports
) {
781 dsi_trans
= dsi_port_to_transcoder(port
);
782 I915_WRITE(HSYNC(dsi_trans
),
783 (hsync_start
- 1) | ((hsync_end
- 1) << 16));
787 /* program TRANS_VTOTAL register */
788 for_each_dsi_port(port
, intel_dsi
->ports
) {
789 dsi_trans
= dsi_port_to_transcoder(port
);
791 * FIXME: Programing this by assuming progressive mode, since
792 * non-interlaced info from VBT is not saved inside
793 * struct drm_display_mode.
794 * For interlace mode: program required pixel minus 2
796 I915_WRITE(VTOTAL(dsi_trans
),
797 (vactive
- 1) | ((vtotal
- 1) << 16));
800 if (vsync_end
< vsync_start
|| vsync_end
> vtotal
)
801 DRM_ERROR("Invalid vsync_end value\n");
803 if (vsync_start
< vactive
)
804 DRM_ERROR("vsync_start less than vactive\n");
806 /* program TRANS_VSYNC register */
807 for_each_dsi_port(port
, intel_dsi
->ports
) {
808 dsi_trans
= dsi_port_to_transcoder(port
);
809 I915_WRITE(VSYNC(dsi_trans
),
810 (vsync_start
- 1) | ((vsync_end
- 1) << 16));
814 * FIXME: It has to be programmed only for interlaced
815 * modes. Put the check condition here once interlaced
816 * info available as described above.
817 * program TRANS_VSYNCSHIFT register
819 for_each_dsi_port(port
, intel_dsi
->ports
) {
820 dsi_trans
= dsi_port_to_transcoder(port
);
821 I915_WRITE(VSYNCSHIFT(dsi_trans
), vsync_shift
);
825 static void gen11_dsi_enable_transcoder(struct intel_encoder
*encoder
)
827 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
828 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
830 enum transcoder dsi_trans
;
833 for_each_dsi_port(port
, intel_dsi
->ports
) {
834 dsi_trans
= dsi_port_to_transcoder(port
);
835 tmp
= I915_READ(PIPECONF(dsi_trans
));
836 tmp
|= PIPECONF_ENABLE
;
837 I915_WRITE(PIPECONF(dsi_trans
), tmp
);
839 /* wait for transcoder to be enabled */
840 if (intel_wait_for_register(dev_priv
, PIPECONF(dsi_trans
),
841 I965_PIPECONF_ACTIVE
,
842 I965_PIPECONF_ACTIVE
, 10))
843 DRM_ERROR("DSI transcoder not enabled\n");
847 static void gen11_dsi_setup_timeouts(struct intel_encoder
*encoder
)
849 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
850 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
852 enum transcoder dsi_trans
;
853 u32 tmp
, hs_tx_timeout
, lp_rx_timeout
, ta_timeout
, divisor
, mul
;
856 * escape clock count calculation:
857 * BYTE_CLK_COUNT = TIME_NS/(8 * UI)
858 * UI (nsec) = (10^6)/Bitrate
859 * TIME_NS = (BYTE_CLK_COUNT * 8 * 10^6)/ Bitrate
860 * ESCAPE_CLK_COUNT = TIME_NS/ESC_CLK_NS
862 divisor
= intel_dsi_tlpx_ns(intel_dsi
) * intel_dsi_bitrate(intel_dsi
) * 1000;
864 hs_tx_timeout
= DIV_ROUND_UP(intel_dsi
->hs_tx_timeout
* mul
,
866 lp_rx_timeout
= DIV_ROUND_UP(intel_dsi
->lp_rx_timeout
* mul
, divisor
);
867 ta_timeout
= DIV_ROUND_UP(intel_dsi
->turn_arnd_val
* mul
, divisor
);
869 for_each_dsi_port(port
, intel_dsi
->ports
) {
870 dsi_trans
= dsi_port_to_transcoder(port
);
872 /* program hst_tx_timeout */
873 tmp
= I915_READ(DSI_HSTX_TO(dsi_trans
));
874 tmp
&= ~HSTX_TIMEOUT_VALUE_MASK
;
875 tmp
|= HSTX_TIMEOUT_VALUE(hs_tx_timeout
);
876 I915_WRITE(DSI_HSTX_TO(dsi_trans
), tmp
);
878 /* FIXME: DSI_CALIB_TO */
880 /* program lp_rx_host timeout */
881 tmp
= I915_READ(DSI_LPRX_HOST_TO(dsi_trans
));
882 tmp
&= ~LPRX_TIMEOUT_VALUE_MASK
;
883 tmp
|= LPRX_TIMEOUT_VALUE(lp_rx_timeout
);
884 I915_WRITE(DSI_LPRX_HOST_TO(dsi_trans
), tmp
);
886 /* FIXME: DSI_PWAIT_TO */
888 /* program turn around timeout */
889 tmp
= I915_READ(DSI_TA_TO(dsi_trans
));
890 tmp
&= ~TA_TIMEOUT_VALUE_MASK
;
891 tmp
|= TA_TIMEOUT_VALUE(ta_timeout
);
892 I915_WRITE(DSI_TA_TO(dsi_trans
), tmp
);
897 gen11_dsi_enable_port_and_phy(struct intel_encoder
*encoder
,
898 const struct intel_crtc_state
*pipe_config
)
900 /* step 4a: power up all lanes of the DDI used by DSI */
901 gen11_dsi_power_up_lanes(encoder
);
903 /* step 4b: configure lane sequencing of the Combo-PHY transmitters */
904 gen11_dsi_config_phy_lanes_sequence(encoder
);
906 /* step 4c: configure voltage swing and skew */
907 gen11_dsi_voltage_swing_program_seq(encoder
);
909 /* enable DDI buffer */
910 gen11_dsi_enable_ddi_buffer(encoder
);
912 /* setup D-PHY timings */
913 gen11_dsi_setup_dphy_timings(encoder
);
915 /* step 4h: setup DSI protocol timeouts */
916 gen11_dsi_setup_timeouts(encoder
);
918 /* Step (4h, 4i, 4j, 4k): Configure transcoder */
919 gen11_dsi_configure_transcoder(encoder
, pipe_config
);
921 /* Step 4l: Gate DDI clocks */
922 gen11_dsi_gate_clocks(encoder
);
925 static void gen11_dsi_powerup_panel(struct intel_encoder
*encoder
)
927 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
928 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
929 struct mipi_dsi_device
*dsi
;
931 enum transcoder dsi_trans
;
935 /* set maximum return packet size */
936 for_each_dsi_port(port
, intel_dsi
->ports
) {
937 dsi_trans
= dsi_port_to_transcoder(port
);
940 * FIXME: This uses the number of DW's currently in the payload
941 * receive queue. This is probably not what we want here.
943 tmp
= I915_READ(DSI_CMD_RXCTL(dsi_trans
));
944 tmp
&= NUMBER_RX_PLOAD_DW_MASK
;
945 /* multiply "Number Rx Payload DW" by 4 to get max value */
947 dsi
= intel_dsi
->dsi_hosts
[port
]->device
;
948 ret
= mipi_dsi_set_maximum_return_packet_size(dsi
, tmp
);
950 DRM_ERROR("error setting max return pkt size%d\n", tmp
);
953 /* panel power on related mipi dsi vbt sequences */
954 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_POWER_ON
);
955 intel_dsi_msleep(intel_dsi
, intel_dsi
->panel_on_delay
);
956 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_DEASSERT_RESET
);
957 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_INIT_OTP
);
958 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_DISPLAY_ON
);
960 /* ensure all panel commands dispatched before enabling transcoder */
961 wait_for_cmds_dispatched_to_panel(encoder
);
964 static void gen11_dsi_pre_pll_enable(struct intel_encoder
*encoder
,
965 const struct intel_crtc_state
*pipe_config
,
966 const struct drm_connector_state
*conn_state
)
968 /* step2: enable IO power */
969 gen11_dsi_enable_io_power(encoder
);
971 /* step3: enable DSI PLL */
972 gen11_dsi_program_esc_clk_div(encoder
);
975 static void gen11_dsi_pre_enable(struct intel_encoder
*encoder
,
976 const struct intel_crtc_state
*pipe_config
,
977 const struct drm_connector_state
*conn_state
)
979 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
981 /* step4: enable DSI port and DPHY */
982 gen11_dsi_enable_port_and_phy(encoder
, pipe_config
);
984 /* step5: program and powerup panel */
985 gen11_dsi_powerup_panel(encoder
);
987 /* step6c: configure transcoder timings */
988 gen11_dsi_set_transcoder_timings(encoder
, pipe_config
);
990 /* step6d: enable dsi transcoder */
991 gen11_dsi_enable_transcoder(encoder
);
993 /* step7: enable backlight */
994 intel_panel_enable_backlight(pipe_config
, conn_state
);
995 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_BACKLIGHT_ON
);
998 static void gen11_dsi_disable_transcoder(struct intel_encoder
*encoder
)
1000 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
1001 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
1003 enum transcoder dsi_trans
;
1006 for_each_dsi_port(port
, intel_dsi
->ports
) {
1007 dsi_trans
= dsi_port_to_transcoder(port
);
1009 /* disable transcoder */
1010 tmp
= I915_READ(PIPECONF(dsi_trans
));
1011 tmp
&= ~PIPECONF_ENABLE
;
1012 I915_WRITE(PIPECONF(dsi_trans
), tmp
);
1014 /* wait for transcoder to be disabled */
1015 if (intel_wait_for_register(dev_priv
, PIPECONF(dsi_trans
),
1016 I965_PIPECONF_ACTIVE
, 0, 50))
1017 DRM_ERROR("DSI trancoder not disabled\n");
1021 static void gen11_dsi_powerdown_panel(struct intel_encoder
*encoder
)
1023 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
1025 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_DISPLAY_OFF
);
1026 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_ASSERT_RESET
);
1027 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_POWER_OFF
);
1029 /* ensure cmds dispatched to panel */
1030 wait_for_cmds_dispatched_to_panel(encoder
);
1033 static void gen11_dsi_deconfigure_trancoder(struct intel_encoder
*encoder
)
1035 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
1036 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
1038 enum transcoder dsi_trans
;
1041 /* put dsi link in ULPS */
1042 for_each_dsi_port(port
, intel_dsi
->ports
) {
1043 dsi_trans
= dsi_port_to_transcoder(port
);
1044 tmp
= I915_READ(DSI_LP_MSG(dsi_trans
));
1045 tmp
|= LINK_ENTER_ULPS
;
1046 tmp
&= ~LINK_ULPS_TYPE_LP11
;
1047 I915_WRITE(DSI_LP_MSG(dsi_trans
), tmp
);
1049 if (wait_for_us((I915_READ(DSI_LP_MSG(dsi_trans
)) &
1052 DRM_ERROR("DSI link not in ULPS\n");
1055 /* disable ddi function */
1056 for_each_dsi_port(port
, intel_dsi
->ports
) {
1057 dsi_trans
= dsi_port_to_transcoder(port
);
1058 tmp
= I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans
));
1059 tmp
&= ~TRANS_DDI_FUNC_ENABLE
;
1060 I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans
), tmp
);
1063 /* disable port sync mode if dual link */
1064 if (intel_dsi
->dual_link
) {
1065 for_each_dsi_port(port
, intel_dsi
->ports
) {
1066 dsi_trans
= dsi_port_to_transcoder(port
);
1067 tmp
= I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans
));
1068 tmp
&= ~PORT_SYNC_MODE_ENABLE
;
1069 I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans
), tmp
);
1074 static void gen11_dsi_disable_port(struct intel_encoder
*encoder
)
1076 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
1077 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
1081 gen11_dsi_ungate_clocks(encoder
);
1082 for_each_dsi_port(port
, intel_dsi
->ports
) {
1083 tmp
= I915_READ(DDI_BUF_CTL(port
));
1084 tmp
&= ~DDI_BUF_CTL_ENABLE
;
1085 I915_WRITE(DDI_BUF_CTL(port
), tmp
);
1087 if (wait_for_us((I915_READ(DDI_BUF_CTL(port
)) &
1090 DRM_ERROR("DDI port:%c buffer not idle\n",
1093 gen11_dsi_ungate_clocks(encoder
);
1096 static void gen11_dsi_disable_io_power(struct intel_encoder
*encoder
)
1098 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
1099 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
1103 intel_display_power_put(dev_priv
, POWER_DOMAIN_PORT_DDI_A_IO
);
1105 if (intel_dsi
->dual_link
)
1106 intel_display_power_put(dev_priv
, POWER_DOMAIN_PORT_DDI_B_IO
);
1108 /* set mode to DDI */
1109 for_each_dsi_port(port
, intel_dsi
->ports
) {
1110 tmp
= I915_READ(ICL_DSI_IO_MODECTL(port
));
1111 tmp
&= ~COMBO_PHY_MODE_DSI
;
1112 I915_WRITE(ICL_DSI_IO_MODECTL(port
), tmp
);
1116 static void gen11_dsi_disable(struct intel_encoder
*encoder
,
1117 const struct intel_crtc_state
*old_crtc_state
,
1118 const struct drm_connector_state
*old_conn_state
)
1120 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
1122 /* step1: turn off backlight */
1123 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_BACKLIGHT_OFF
);
1124 intel_panel_disable_backlight(old_conn_state
);
1126 /* step2d,e: disable transcoder and wait */
1127 gen11_dsi_disable_transcoder(encoder
);
1129 /* step2f,g: powerdown panel */
1130 gen11_dsi_powerdown_panel(encoder
);
1132 /* step2h,i,j: deconfig trancoder */
1133 gen11_dsi_deconfigure_trancoder(encoder
);
1135 /* step3: disable port */
1136 gen11_dsi_disable_port(encoder
);
1138 /* step4: disable IO power */
1139 gen11_dsi_disable_io_power(encoder
);
1142 static void gen11_dsi_get_config(struct intel_encoder
*encoder
,
1143 struct intel_crtc_state
*pipe_config
)
1145 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
1146 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
1149 /* FIXME: adapt icl_ddi_clock_get() for DSI and use that? */
1150 pll_id
= intel_get_shared_dpll_id(dev_priv
, pipe_config
->shared_dpll
);
1151 pipe_config
->port_clock
= cnl_calc_wrpll_link(dev_priv
, pll_id
);
1152 pipe_config
->base
.adjusted_mode
.crtc_clock
= intel_dsi
->pclk
;
1153 pipe_config
->output_types
|= BIT(INTEL_OUTPUT_DSI
);
1156 static bool gen11_dsi_compute_config(struct intel_encoder
*encoder
,
1157 struct intel_crtc_state
*pipe_config
,
1158 struct drm_connector_state
*conn_state
)
1160 struct intel_dsi
*intel_dsi
= container_of(encoder
, struct intel_dsi
,
1162 struct intel_connector
*intel_connector
= intel_dsi
->attached_connector
;
1163 struct intel_crtc
*crtc
= to_intel_crtc(pipe_config
->base
.crtc
);
1164 const struct drm_display_mode
*fixed_mode
=
1165 intel_connector
->panel
.fixed_mode
;
1166 struct drm_display_mode
*adjusted_mode
=
1167 &pipe_config
->base
.adjusted_mode
;
1169 intel_fixed_panel_mode(fixed_mode
, adjusted_mode
);
1170 intel_pch_panel_fitting(crtc
, pipe_config
, conn_state
->scaling_mode
);
1172 adjusted_mode
->flags
= 0;
1174 /* Dual link goes to trancoder DSI'0' */
1175 if (intel_dsi
->ports
== BIT(PORT_B
))
1176 pipe_config
->cpu_transcoder
= TRANSCODER_DSI_1
;
1178 pipe_config
->cpu_transcoder
= TRANSCODER_DSI_0
;
1180 pipe_config
->clock_set
= true;
1181 pipe_config
->port_clock
= intel_dsi_bitrate(intel_dsi
) / 5;
1186 static u64
gen11_dsi_get_power_domains(struct intel_encoder
*encoder
,
1187 struct intel_crtc_state
*crtc_state
)
1189 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
1193 for_each_dsi_port(port
, intel_dsi
->ports
)
1195 domains
|= BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO
);
1197 domains
|= BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO
);
1202 static bool gen11_dsi_get_hw_state(struct intel_encoder
*encoder
,
1205 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
1206 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
1209 enum transcoder dsi_trans
;
1212 if (!intel_display_power_get_if_enabled(dev_priv
,
1213 encoder
->power_domain
))
1216 for_each_dsi_port(port
, intel_dsi
->ports
) {
1217 dsi_trans
= dsi_port_to_transcoder(port
);
1218 tmp
= I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans
));
1219 switch (tmp
& TRANS_DDI_EDP_INPUT_MASK
) {
1220 case TRANS_DDI_EDP_INPUT_A_ON
:
1223 case TRANS_DDI_EDP_INPUT_B_ONOFF
:
1226 case TRANS_DDI_EDP_INPUT_C_ONOFF
:
1230 DRM_ERROR("Invalid PIPE input\n");
1234 tmp
= I915_READ(PIPECONF(dsi_trans
));
1235 ret
= tmp
& PIPECONF_ENABLE
;
1238 intel_display_power_put(dev_priv
, encoder
->power_domain
);
1242 static void gen11_dsi_encoder_destroy(struct drm_encoder
*encoder
)
1244 intel_encoder_destroy(encoder
);
1247 static const struct drm_encoder_funcs gen11_dsi_encoder_funcs
= {
1248 .destroy
= gen11_dsi_encoder_destroy
,
1251 static const struct drm_connector_funcs gen11_dsi_connector_funcs
= {
1252 .late_register
= intel_connector_register
,
1253 .early_unregister
= intel_connector_unregister
,
1254 .destroy
= intel_connector_destroy
,
1255 .fill_modes
= drm_helper_probe_single_connector_modes
,
1256 .atomic_get_property
= intel_digital_connector_atomic_get_property
,
1257 .atomic_set_property
= intel_digital_connector_atomic_set_property
,
1258 .atomic_destroy_state
= drm_atomic_helper_connector_destroy_state
,
1259 .atomic_duplicate_state
= intel_digital_connector_duplicate_state
,
1262 static const struct drm_connector_helper_funcs gen11_dsi_connector_helper_funcs
= {
1263 .get_modes
= intel_dsi_get_modes
,
1264 .mode_valid
= intel_dsi_mode_valid
,
1265 .atomic_check
= intel_digital_connector_atomic_check
,
1268 static int gen11_dsi_host_attach(struct mipi_dsi_host
*host
,
1269 struct mipi_dsi_device
*dsi
)
1274 static int gen11_dsi_host_detach(struct mipi_dsi_host
*host
,
1275 struct mipi_dsi_device
*dsi
)
1280 static ssize_t
gen11_dsi_host_transfer(struct mipi_dsi_host
*host
,
1281 const struct mipi_dsi_msg
*msg
)
1283 struct intel_dsi_host
*intel_dsi_host
= to_intel_dsi_host(host
);
1284 struct mipi_dsi_packet dsi_pkt
;
1286 bool enable_lpdt
= false;
1288 ret
= mipi_dsi_create_packet(&dsi_pkt
, msg
);
1292 if (msg
->flags
& MIPI_DSI_MSG_USE_LPM
)
1295 /* send packet header */
1296 ret
= dsi_send_pkt_hdr(intel_dsi_host
, dsi_pkt
, enable_lpdt
);
1300 /* only long packet contains payload */
1301 if (mipi_dsi_packet_format_is_long(msg
->type
)) {
1302 ret
= dsi_send_pkt_payld(intel_dsi_host
, dsi_pkt
);
1307 //TODO: add payload receive code if needed
1309 ret
= sizeof(dsi_pkt
.header
) + dsi_pkt
.payload_length
;
1314 static const struct mipi_dsi_host_ops gen11_dsi_host_ops
= {
1315 .attach
= gen11_dsi_host_attach
,
1316 .detach
= gen11_dsi_host_detach
,
1317 .transfer
= gen11_dsi_host_transfer
,
1320 void icl_dsi_init(struct drm_i915_private
*dev_priv
)
1322 struct drm_device
*dev
= &dev_priv
->drm
;
1323 struct intel_dsi
*intel_dsi
;
1324 struct intel_encoder
*encoder
;
1325 struct intel_connector
*intel_connector
;
1326 struct drm_connector
*connector
;
1327 struct drm_display_mode
*scan
, *fixed_mode
= NULL
;
1330 if (!intel_bios_is_dsi_present(dev_priv
, &port
))
1333 intel_dsi
= kzalloc(sizeof(*intel_dsi
), GFP_KERNEL
);
1337 intel_connector
= intel_connector_alloc();
1338 if (!intel_connector
) {
1343 encoder
= &intel_dsi
->base
;
1344 intel_dsi
->attached_connector
= intel_connector
;
1345 connector
= &intel_connector
->base
;
1347 /* register DSI encoder with DRM subsystem */
1348 drm_encoder_init(dev
, &encoder
->base
, &gen11_dsi_encoder_funcs
,
1349 DRM_MODE_ENCODER_DSI
, "DSI %c", port_name(port
));
1351 encoder
->pre_pll_enable
= gen11_dsi_pre_pll_enable
;
1352 encoder
->pre_enable
= gen11_dsi_pre_enable
;
1353 encoder
->disable
= gen11_dsi_disable
;
1354 encoder
->port
= port
;
1355 encoder
->get_config
= gen11_dsi_get_config
;
1356 encoder
->compute_config
= gen11_dsi_compute_config
;
1357 encoder
->get_hw_state
= gen11_dsi_get_hw_state
;
1358 encoder
->type
= INTEL_OUTPUT_DSI
;
1359 encoder
->cloneable
= 0;
1360 encoder
->crtc_mask
= BIT(PIPE_A
) | BIT(PIPE_B
) | BIT(PIPE_C
);
1361 encoder
->power_domain
= POWER_DOMAIN_PORT_DSI
;
1362 encoder
->get_power_domains
= gen11_dsi_get_power_domains
;
1364 /* register DSI connector with DRM subsystem */
1365 drm_connector_init(dev
, connector
, &gen11_dsi_connector_funcs
,
1366 DRM_MODE_CONNECTOR_DSI
);
1367 drm_connector_helper_add(connector
, &gen11_dsi_connector_helper_funcs
);
1368 connector
->display_info
.subpixel_order
= SubPixelHorizontalRGB
;
1369 connector
->interlace_allowed
= false;
1370 connector
->doublescan_allowed
= false;
1371 intel_connector
->get_hw_state
= intel_connector_get_hw_state
;
1373 /* attach connector to encoder */
1374 intel_connector_attach_encoder(intel_connector
, encoder
);
1376 /* fill mode info from VBT */
1377 mutex_lock(&dev
->mode_config
.mutex
);
1378 intel_dsi_vbt_get_modes(intel_dsi
);
1379 list_for_each_entry(scan
, &connector
->probed_modes
, head
) {
1380 if (scan
->type
& DRM_MODE_TYPE_PREFERRED
) {
1381 fixed_mode
= drm_mode_duplicate(dev
, scan
);
1385 mutex_unlock(&dev
->mode_config
.mutex
);
1388 DRM_ERROR("DSI fixed mode info missing\n");
1392 connector
->display_info
.width_mm
= fixed_mode
->width_mm
;
1393 connector
->display_info
.height_mm
= fixed_mode
->height_mm
;
1394 intel_panel_init(&intel_connector
->panel
, fixed_mode
, NULL
);
1395 intel_panel_setup_backlight(connector
, INVALID_PIPE
);
1398 if (dev_priv
->vbt
.dsi
.config
->dual_link
)
1399 intel_dsi
->ports
= BIT(PORT_A
) | BIT(PORT_B
);
1401 intel_dsi
->ports
= BIT(port
);
1403 intel_dsi
->dcs_backlight_ports
= dev_priv
->vbt
.dsi
.bl_ports
;
1404 intel_dsi
->dcs_cabc_ports
= dev_priv
->vbt
.dsi
.cabc_ports
;
1406 for_each_dsi_port(port
, intel_dsi
->ports
) {
1407 struct intel_dsi_host
*host
;
1409 host
= intel_dsi_host_init(intel_dsi
, &gen11_dsi_host_ops
, port
);
1413 intel_dsi
->dsi_hosts
[port
] = host
;
1416 if (!intel_dsi_vbt_init(intel_dsi
, MIPI_DSI_GENERIC_PANEL_ID
)) {
1417 DRM_DEBUG_KMS("no device found\n");
1424 drm_encoder_cleanup(&encoder
->base
);
1426 kfree(intel_connector
);