2 * Copyright © 2018 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
24 * Madhav Chauhan <madhav.chauhan@intel.com>
25 * Jani Nikula <jani.nikula@intel.com>
28 #include <drm/drm_mipi_dsi.h>
29 #include "intel_dsi.h"
31 static inline int header_credits_available(struct drm_i915_private
*dev_priv
,
32 enum transcoder dsi_trans
)
34 return (I915_READ(DSI_CMD_TXCTL(dsi_trans
)) & FREE_HEADER_CREDIT_MASK
)
35 >> FREE_HEADER_CREDIT_SHIFT
;
38 static inline int payload_credits_available(struct drm_i915_private
*dev_priv
,
39 enum transcoder dsi_trans
)
41 return (I915_READ(DSI_CMD_TXCTL(dsi_trans
)) & FREE_PLOAD_CREDIT_MASK
)
42 >> FREE_PLOAD_CREDIT_SHIFT
;
45 static void wait_for_header_credits(struct drm_i915_private
*dev_priv
,
46 enum transcoder dsi_trans
)
48 if (wait_for_us(header_credits_available(dev_priv
, dsi_trans
) >=
49 MAX_HEADER_CREDIT
, 100))
50 DRM_ERROR("DSI header credits not released\n");
53 static void wait_for_payload_credits(struct drm_i915_private
*dev_priv
,
54 enum transcoder dsi_trans
)
56 if (wait_for_us(payload_credits_available(dev_priv
, dsi_trans
) >=
57 MAX_PLOAD_CREDIT
, 100))
58 DRM_ERROR("DSI payload credits not released\n");
61 static enum transcoder
dsi_port_to_transcoder(enum port port
)
64 return TRANSCODER_DSI_0
;
66 return TRANSCODER_DSI_1
;
69 static void wait_for_cmds_dispatched_to_panel(struct intel_encoder
*encoder
)
71 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
72 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
73 struct mipi_dsi_device
*dsi
;
75 enum transcoder dsi_trans
;
78 /* wait for header/payload credits to be released */
79 for_each_dsi_port(port
, intel_dsi
->ports
) {
80 dsi_trans
= dsi_port_to_transcoder(port
);
81 wait_for_header_credits(dev_priv
, dsi_trans
);
82 wait_for_payload_credits(dev_priv
, dsi_trans
);
85 /* send nop DCS command */
86 for_each_dsi_port(port
, intel_dsi
->ports
) {
87 dsi
= intel_dsi
->dsi_hosts
[port
]->device
;
88 dsi
->mode_flags
|= MIPI_DSI_MODE_LPM
;
90 ret
= mipi_dsi_dcs_nop(dsi
);
92 DRM_ERROR("error sending DCS NOP command\n");
95 /* wait for header credits to be released */
96 for_each_dsi_port(port
, intel_dsi
->ports
) {
97 dsi_trans
= dsi_port_to_transcoder(port
);
98 wait_for_header_credits(dev_priv
, dsi_trans
);
101 /* wait for LP TX in progress bit to be cleared */
102 for_each_dsi_port(port
, intel_dsi
->ports
) {
103 dsi_trans
= dsi_port_to_transcoder(port
);
104 if (wait_for_us(!(I915_READ(DSI_LP_MSG(dsi_trans
)) &
105 LPTX_IN_PROGRESS
), 20))
106 DRM_ERROR("LPTX bit not cleared\n");
110 static void dsi_program_swing_and_deemphasis(struct intel_encoder
*encoder
)
112 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
113 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
118 for_each_dsi_port(port
, intel_dsi
->ports
) {
121 * Program voltage swing and pre-emphasis level values as per
122 * table in BSPEC under DDI buffer programing
124 tmp
= I915_READ(ICL_PORT_TX_DW5_LN0(port
));
125 tmp
&= ~(SCALING_MODE_SEL_MASK
| RTERM_SELECT_MASK
);
126 tmp
|= SCALING_MODE_SEL(0x2);
127 tmp
|= TAP2_DISABLE
| TAP3_DISABLE
;
128 tmp
|= RTERM_SELECT(0x6);
129 I915_WRITE(ICL_PORT_TX_DW5_GRP(port
), tmp
);
131 tmp
= I915_READ(ICL_PORT_TX_DW5_AUX(port
));
132 tmp
&= ~(SCALING_MODE_SEL_MASK
| RTERM_SELECT_MASK
);
133 tmp
|= SCALING_MODE_SEL(0x2);
134 tmp
|= TAP2_DISABLE
| TAP3_DISABLE
;
135 tmp
|= RTERM_SELECT(0x6);
136 I915_WRITE(ICL_PORT_TX_DW5_AUX(port
), tmp
);
138 tmp
= I915_READ(ICL_PORT_TX_DW2_LN0(port
));
139 tmp
&= ~(SWING_SEL_LOWER_MASK
| SWING_SEL_UPPER_MASK
|
141 tmp
|= SWING_SEL_UPPER(0x2);
142 tmp
|= SWING_SEL_LOWER(0x2);
143 tmp
|= RCOMP_SCALAR(0x98);
144 I915_WRITE(ICL_PORT_TX_DW2_GRP(port
), tmp
);
146 tmp
= I915_READ(ICL_PORT_TX_DW2_AUX(port
));
147 tmp
&= ~(SWING_SEL_LOWER_MASK
| SWING_SEL_UPPER_MASK
|
149 tmp
|= SWING_SEL_UPPER(0x2);
150 tmp
|= SWING_SEL_LOWER(0x2);
151 tmp
|= RCOMP_SCALAR(0x98);
152 I915_WRITE(ICL_PORT_TX_DW2_AUX(port
), tmp
);
154 tmp
= I915_READ(ICL_PORT_TX_DW4_AUX(port
));
155 tmp
&= ~(POST_CURSOR_1_MASK
| POST_CURSOR_2_MASK
|
157 tmp
|= POST_CURSOR_1(0x0);
158 tmp
|= POST_CURSOR_2(0x0);
159 tmp
|= CURSOR_COEFF(0x3f);
160 I915_WRITE(ICL_PORT_TX_DW4_AUX(port
), tmp
);
162 for (lane
= 0; lane
<= 3; lane
++) {
163 /* Bspec: must not use GRP register for write */
164 tmp
= I915_READ(ICL_PORT_TX_DW4_LN(port
, lane
));
165 tmp
&= ~(POST_CURSOR_1_MASK
| POST_CURSOR_2_MASK
|
167 tmp
|= POST_CURSOR_1(0x0);
168 tmp
|= POST_CURSOR_2(0x0);
169 tmp
|= CURSOR_COEFF(0x3f);
170 I915_WRITE(ICL_PORT_TX_DW4_LN(port
, lane
), tmp
);
175 static void gen11_dsi_program_esc_clk_div(struct intel_encoder
*encoder
)
177 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
178 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
180 u32 bpp
= mipi_dsi_pixel_format_to_bpp(intel_dsi
->pixel_format
);
181 u32 afe_clk_khz
; /* 8X Clock */
184 afe_clk_khz
= DIV_ROUND_CLOSEST(intel_dsi
->pclk
* bpp
,
185 intel_dsi
->lane_count
);
187 esc_clk_div_m
= DIV_ROUND_UP(afe_clk_khz
, DSI_MAX_ESC_CLK
);
189 for_each_dsi_port(port
, intel_dsi
->ports
) {
190 I915_WRITE(ICL_DSI_ESC_CLK_DIV(port
),
191 esc_clk_div_m
& ICL_ESC_CLK_DIV_MASK
);
192 POSTING_READ(ICL_DSI_ESC_CLK_DIV(port
));
195 for_each_dsi_port(port
, intel_dsi
->ports
) {
196 I915_WRITE(ICL_DPHY_ESC_CLK_DIV(port
),
197 esc_clk_div_m
& ICL_ESC_CLK_DIV_MASK
);
198 POSTING_READ(ICL_DPHY_ESC_CLK_DIV(port
));
202 static void gen11_dsi_enable_io_power(struct intel_encoder
*encoder
)
204 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
205 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
209 for_each_dsi_port(port
, intel_dsi
->ports
) {
210 tmp
= I915_READ(ICL_DSI_IO_MODECTL(port
));
211 tmp
|= COMBO_PHY_MODE_DSI
;
212 I915_WRITE(ICL_DSI_IO_MODECTL(port
), tmp
);
215 for_each_dsi_port(port
, intel_dsi
->ports
) {
216 intel_display_power_get(dev_priv
, port
== PORT_A
?
217 POWER_DOMAIN_PORT_DDI_A_IO
:
218 POWER_DOMAIN_PORT_DDI_B_IO
);
222 static void gen11_dsi_power_up_lanes(struct intel_encoder
*encoder
)
224 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
225 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
230 switch (intel_dsi
->lane_count
) {
232 lane_mask
= PWR_DOWN_LN_3_1_0
;
235 lane_mask
= PWR_DOWN_LN_3_1
;
238 lane_mask
= PWR_DOWN_LN_3
;
242 lane_mask
= PWR_UP_ALL_LANES
;
246 for_each_dsi_port(port
, intel_dsi
->ports
) {
247 tmp
= I915_READ(ICL_PORT_CL_DW10(port
));
248 tmp
&= ~PWR_DOWN_LN_MASK
;
249 I915_WRITE(ICL_PORT_CL_DW10(port
), tmp
| lane_mask
);
253 static void gen11_dsi_config_phy_lanes_sequence(struct intel_encoder
*encoder
)
255 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
256 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
261 /* Step 4b(i) set loadgen select for transmit and aux lanes */
262 for_each_dsi_port(port
, intel_dsi
->ports
) {
263 tmp
= I915_READ(ICL_PORT_TX_DW4_AUX(port
));
264 tmp
&= ~LOADGEN_SELECT
;
265 I915_WRITE(ICL_PORT_TX_DW4_AUX(port
), tmp
);
266 for (lane
= 0; lane
<= 3; lane
++) {
267 tmp
= I915_READ(ICL_PORT_TX_DW4_LN(port
, lane
));
268 tmp
&= ~LOADGEN_SELECT
;
270 tmp
|= LOADGEN_SELECT
;
271 I915_WRITE(ICL_PORT_TX_DW4_LN(port
, lane
), tmp
);
275 /* Step 4b(ii) set latency optimization for transmit and aux lanes */
276 for_each_dsi_port(port
, intel_dsi
->ports
) {
277 tmp
= I915_READ(ICL_PORT_TX_DW2_AUX(port
));
278 tmp
&= ~FRC_LATENCY_OPTIM_MASK
;
279 tmp
|= FRC_LATENCY_OPTIM_VAL(0x5);
280 I915_WRITE(ICL_PORT_TX_DW2_AUX(port
), tmp
);
281 tmp
= I915_READ(ICL_PORT_TX_DW2_LN0(port
));
282 tmp
&= ~FRC_LATENCY_OPTIM_MASK
;
283 tmp
|= FRC_LATENCY_OPTIM_VAL(0x5);
284 I915_WRITE(ICL_PORT_TX_DW2_GRP(port
), tmp
);
289 static void gen11_dsi_voltage_swing_program_seq(struct intel_encoder
*encoder
)
291 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
292 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
296 /* clear common keeper enable bit */
297 for_each_dsi_port(port
, intel_dsi
->ports
) {
298 tmp
= I915_READ(ICL_PORT_PCS_DW1_LN0(port
));
299 tmp
&= ~COMMON_KEEPER_EN
;
300 I915_WRITE(ICL_PORT_PCS_DW1_GRP(port
), tmp
);
301 tmp
= I915_READ(ICL_PORT_PCS_DW1_AUX(port
));
302 tmp
&= ~COMMON_KEEPER_EN
;
303 I915_WRITE(ICL_PORT_PCS_DW1_AUX(port
), tmp
);
307 * Set SUS Clock Config bitfield to 11b
308 * Note: loadgen select program is done
309 * as part of lane phy sequence configuration
311 for_each_dsi_port(port
, intel_dsi
->ports
) {
312 tmp
= I915_READ(ICL_PORT_CL_DW5(port
));
313 tmp
|= SUS_CLOCK_CONFIG
;
314 I915_WRITE(ICL_PORT_CL_DW5(port
), tmp
);
317 /* Clear training enable to change swing values */
318 for_each_dsi_port(port
, intel_dsi
->ports
) {
319 tmp
= I915_READ(ICL_PORT_TX_DW5_LN0(port
));
320 tmp
&= ~TX_TRAINING_EN
;
321 I915_WRITE(ICL_PORT_TX_DW5_GRP(port
), tmp
);
322 tmp
= I915_READ(ICL_PORT_TX_DW5_AUX(port
));
323 tmp
&= ~TX_TRAINING_EN
;
324 I915_WRITE(ICL_PORT_TX_DW5_AUX(port
), tmp
);
327 /* Program swing and de-emphasis */
328 dsi_program_swing_and_deemphasis(encoder
);
330 /* Set training enable to trigger update */
331 for_each_dsi_port(port
, intel_dsi
->ports
) {
332 tmp
= I915_READ(ICL_PORT_TX_DW5_LN0(port
));
333 tmp
|= TX_TRAINING_EN
;
334 I915_WRITE(ICL_PORT_TX_DW5_GRP(port
), tmp
);
335 tmp
= I915_READ(ICL_PORT_TX_DW5_AUX(port
));
336 tmp
|= TX_TRAINING_EN
;
337 I915_WRITE(ICL_PORT_TX_DW5_AUX(port
), tmp
);
341 static void gen11_dsi_enable_ddi_buffer(struct intel_encoder
*encoder
)
343 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
344 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
348 for_each_dsi_port(port
, intel_dsi
->ports
) {
349 tmp
= I915_READ(DDI_BUF_CTL(port
));
350 tmp
|= DDI_BUF_CTL_ENABLE
;
351 I915_WRITE(DDI_BUF_CTL(port
), tmp
);
353 if (wait_for_us(!(I915_READ(DDI_BUF_CTL(port
)) &
356 DRM_ERROR("DDI port:%c buffer idle\n", port_name(port
));
360 static void gen11_dsi_setup_dphy_timings(struct intel_encoder
*encoder
)
362 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
363 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
367 /* Program T-INIT master registers */
368 for_each_dsi_port(port
, intel_dsi
->ports
) {
369 tmp
= I915_READ(ICL_DSI_T_INIT_MASTER(port
));
370 tmp
&= ~MASTER_INIT_TIMER_MASK
;
371 tmp
|= intel_dsi
->init_count
;
372 I915_WRITE(ICL_DSI_T_INIT_MASTER(port
), tmp
);
375 /* Program DPHY clock lanes timings */
376 for_each_dsi_port(port
, intel_dsi
->ports
) {
377 I915_WRITE(DPHY_CLK_TIMING_PARAM(port
), intel_dsi
->dphy_reg
);
379 /* shadow register inside display core */
380 I915_WRITE(DSI_CLK_TIMING_PARAM(port
), intel_dsi
->dphy_reg
);
383 /* Program DPHY data lanes timings */
384 for_each_dsi_port(port
, intel_dsi
->ports
) {
385 I915_WRITE(DPHY_DATA_TIMING_PARAM(port
),
386 intel_dsi
->dphy_data_lane_reg
);
388 /* shadow register inside display core */
389 I915_WRITE(DSI_DATA_TIMING_PARAM(port
),
390 intel_dsi
->dphy_data_lane_reg
);
394 * If DSI link operating at or below an 800 MHz,
395 * TA_SURE should be override and programmed to
396 * a value '0' inside TA_PARAM_REGISTERS otherwise
397 * leave all fields at HW default values.
399 if (intel_dsi_bitrate(intel_dsi
) <= 800000) {
400 for_each_dsi_port(port
, intel_dsi
->ports
) {
401 tmp
= I915_READ(DPHY_TA_TIMING_PARAM(port
));
402 tmp
&= ~TA_SURE_MASK
;
403 tmp
|= TA_SURE_OVERRIDE
| TA_SURE(0);
404 I915_WRITE(DPHY_TA_TIMING_PARAM(port
), tmp
);
406 /* shadow register inside display core */
407 tmp
= I915_READ(DSI_TA_TIMING_PARAM(port
));
408 tmp
&= ~TA_SURE_MASK
;
409 tmp
|= TA_SURE_OVERRIDE
| TA_SURE(0);
410 I915_WRITE(DSI_TA_TIMING_PARAM(port
), tmp
);
416 gen11_dsi_configure_transcoder(struct intel_encoder
*encoder
,
417 const struct intel_crtc_state
*pipe_config
)
419 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
420 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
421 struct intel_crtc
*intel_crtc
= to_intel_crtc(pipe_config
->base
.crtc
);
422 enum pipe pipe
= intel_crtc
->pipe
;
425 enum transcoder dsi_trans
;
427 for_each_dsi_port(port
, intel_dsi
->ports
) {
428 dsi_trans
= dsi_port_to_transcoder(port
);
429 tmp
= I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans
));
431 if (intel_dsi
->eotp_pkt
)
432 tmp
&= ~EOTP_DISABLED
;
434 tmp
|= EOTP_DISABLED
;
436 /* enable link calibration if freq > 1.5Gbps */
437 if (intel_dsi_bitrate(intel_dsi
) >= 1500 * 1000) {
438 tmp
&= ~LINK_CALIBRATION_MASK
;
439 tmp
|= CALIBRATION_ENABLED_INITIAL_ONLY
;
442 /* configure continuous clock */
443 tmp
&= ~CONTINUOUS_CLK_MASK
;
444 if (intel_dsi
->clock_stop
)
445 tmp
|= CLK_ENTER_LP_AFTER_DATA
;
447 tmp
|= CLK_HS_CONTINUOUS
;
449 /* configure buffer threshold limit to minimum */
450 tmp
&= ~PIX_BUF_THRESHOLD_MASK
;
451 tmp
|= PIX_BUF_THRESHOLD_1_4
;
453 /* set virtual channel to '0' */
454 tmp
&= ~PIX_VIRT_CHAN_MASK
;
455 tmp
|= PIX_VIRT_CHAN(0);
457 /* program BGR transmission */
458 if (intel_dsi
->bgr_enabled
)
459 tmp
|= BGR_TRANSMISSION
;
461 /* select pixel format */
462 tmp
&= ~PIX_FMT_MASK
;
463 switch (intel_dsi
->pixel_format
) {
465 MISSING_CASE(intel_dsi
->pixel_format
);
467 case MIPI_DSI_FMT_RGB565
:
468 tmp
|= PIX_FMT_RGB565
;
470 case MIPI_DSI_FMT_RGB666_PACKED
:
471 tmp
|= PIX_FMT_RGB666_PACKED
;
473 case MIPI_DSI_FMT_RGB666
:
474 tmp
|= PIX_FMT_RGB666_LOOSE
;
476 case MIPI_DSI_FMT_RGB888
:
477 tmp
|= PIX_FMT_RGB888
;
481 /* program DSI operation mode */
482 if (is_vid_mode(intel_dsi
)) {
483 tmp
&= ~OP_MODE_MASK
;
484 switch (intel_dsi
->video_mode_format
) {
486 MISSING_CASE(intel_dsi
->video_mode_format
);
488 case VIDEO_MODE_NON_BURST_WITH_SYNC_EVENTS
:
489 tmp
|= VIDEO_MODE_SYNC_EVENT
;
491 case VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE
:
492 tmp
|= VIDEO_MODE_SYNC_PULSE
;
497 I915_WRITE(DSI_TRANS_FUNC_CONF(dsi_trans
), tmp
);
500 /* enable port sync mode if dual link */
501 if (intel_dsi
->dual_link
) {
502 for_each_dsi_port(port
, intel_dsi
->ports
) {
503 dsi_trans
= dsi_port_to_transcoder(port
);
504 tmp
= I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans
));
505 tmp
|= PORT_SYNC_MODE_ENABLE
;
506 I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans
), tmp
);
509 //TODO: configure DSS_CTL1
512 for_each_dsi_port(port
, intel_dsi
->ports
) {
513 dsi_trans
= dsi_port_to_transcoder(port
);
515 /* select data lane width */
516 tmp
= I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans
));
517 tmp
&= ~DDI_PORT_WIDTH_MASK
;
518 tmp
|= DDI_PORT_WIDTH(intel_dsi
->lane_count
);
520 /* select input pipe */
521 tmp
&= ~TRANS_DDI_EDP_INPUT_MASK
;
527 tmp
|= TRANS_DDI_EDP_INPUT_A_ON
;
530 tmp
|= TRANS_DDI_EDP_INPUT_B_ONOFF
;
533 tmp
|= TRANS_DDI_EDP_INPUT_C_ONOFF
;
537 /* enable DDI buffer */
538 tmp
|= TRANS_DDI_FUNC_ENABLE
;
539 I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans
), tmp
);
542 /* wait for link ready */
543 for_each_dsi_port(port
, intel_dsi
->ports
) {
544 dsi_trans
= dsi_port_to_transcoder(port
);
545 if (wait_for_us((I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans
)) &
547 DRM_ERROR("DSI link not ready\n");
552 gen11_dsi_set_transcoder_timings(struct intel_encoder
*encoder
,
553 const struct intel_crtc_state
*pipe_config
)
555 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
556 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
557 const struct drm_display_mode
*adjusted_mode
=
558 &pipe_config
->base
.adjusted_mode
;
560 enum transcoder dsi_trans
;
561 /* horizontal timings */
562 u16 htotal
, hactive
, hsync_start
, hsync_end
, hsync_size
;
563 u16 hfront_porch
, hback_porch
;
564 /* vertical timings */
565 u16 vtotal
, vactive
, vsync_start
, vsync_end
, vsync_shift
;
567 hactive
= adjusted_mode
->crtc_hdisplay
;
568 htotal
= adjusted_mode
->crtc_htotal
;
569 hsync_start
= adjusted_mode
->crtc_hsync_start
;
570 hsync_end
= adjusted_mode
->crtc_hsync_end
;
571 hsync_size
= hsync_end
- hsync_start
;
572 hfront_porch
= (adjusted_mode
->crtc_hsync_start
-
573 adjusted_mode
->crtc_hdisplay
);
574 hback_porch
= (adjusted_mode
->crtc_htotal
-
575 adjusted_mode
->crtc_hsync_end
);
576 vactive
= adjusted_mode
->crtc_vdisplay
;
577 vtotal
= adjusted_mode
->crtc_vtotal
;
578 vsync_start
= adjusted_mode
->crtc_vsync_start
;
579 vsync_end
= adjusted_mode
->crtc_vsync_end
;
580 vsync_shift
= hsync_start
- htotal
/ 2;
582 if (intel_dsi
->dual_link
) {
584 if (intel_dsi
->dual_link
== DSI_DUAL_LINK_FRONT_BACK
)
585 hactive
+= intel_dsi
->pixel_overlap
;
589 /* minimum hactive as per bspec: 256 pixels */
590 if (adjusted_mode
->crtc_hdisplay
< 256)
591 DRM_ERROR("hactive is less then 256 pixels\n");
593 /* if RGB666 format, then hactive must be multiple of 4 pixels */
594 if (intel_dsi
->pixel_format
== MIPI_DSI_FMT_RGB666
&& hactive
% 4 != 0)
595 DRM_ERROR("hactive pixels are not multiple of 4\n");
597 /* program TRANS_HTOTAL register */
598 for_each_dsi_port(port
, intel_dsi
->ports
) {
599 dsi_trans
= dsi_port_to_transcoder(port
);
600 I915_WRITE(HTOTAL(dsi_trans
),
601 (hactive
- 1) | ((htotal
- 1) << 16));
604 /* TRANS_HSYNC register to be programmed only for video mode */
605 if (intel_dsi
->operation_mode
== INTEL_DSI_VIDEO_MODE
) {
606 if (intel_dsi
->video_mode_format
==
607 VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE
) {
608 /* BSPEC: hsync size should be atleast 16 pixels */
610 DRM_ERROR("hsync size < 16 pixels\n");
613 if (hback_porch
< 16)
614 DRM_ERROR("hback porch < 16 pixels\n");
616 if (intel_dsi
->dual_link
) {
621 for_each_dsi_port(port
, intel_dsi
->ports
) {
622 dsi_trans
= dsi_port_to_transcoder(port
);
623 I915_WRITE(HSYNC(dsi_trans
),
624 (hsync_start
- 1) | ((hsync_end
- 1) << 16));
628 /* program TRANS_VTOTAL register */
629 for_each_dsi_port(port
, intel_dsi
->ports
) {
630 dsi_trans
= dsi_port_to_transcoder(port
);
632 * FIXME: Programing this by assuming progressive mode, since
633 * non-interlaced info from VBT is not saved inside
634 * struct drm_display_mode.
635 * For interlace mode: program required pixel minus 2
637 I915_WRITE(VTOTAL(dsi_trans
),
638 (vactive
- 1) | ((vtotal
- 1) << 16));
641 if (vsync_end
< vsync_start
|| vsync_end
> vtotal
)
642 DRM_ERROR("Invalid vsync_end value\n");
644 if (vsync_start
< vactive
)
645 DRM_ERROR("vsync_start less than vactive\n");
647 /* program TRANS_VSYNC register */
648 for_each_dsi_port(port
, intel_dsi
->ports
) {
649 dsi_trans
= dsi_port_to_transcoder(port
);
650 I915_WRITE(VSYNC(dsi_trans
),
651 (vsync_start
- 1) | ((vsync_end
- 1) << 16));
655 * FIXME: It has to be programmed only for interlaced
656 * modes. Put the check condition here once interlaced
657 * info available as described above.
658 * program TRANS_VSYNCSHIFT register
660 for_each_dsi_port(port
, intel_dsi
->ports
) {
661 dsi_trans
= dsi_port_to_transcoder(port
);
662 I915_WRITE(VSYNCSHIFT(dsi_trans
), vsync_shift
);
666 static void gen11_dsi_enable_transcoder(struct intel_encoder
*encoder
)
668 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
669 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
671 enum transcoder dsi_trans
;
674 for_each_dsi_port(port
, intel_dsi
->ports
) {
675 dsi_trans
= dsi_port_to_transcoder(port
);
676 tmp
= I915_READ(PIPECONF(dsi_trans
));
677 tmp
|= PIPECONF_ENABLE
;
678 I915_WRITE(PIPECONF(dsi_trans
), tmp
);
680 /* wait for transcoder to be enabled */
681 if (intel_wait_for_register(dev_priv
, PIPECONF(dsi_trans
),
682 I965_PIPECONF_ACTIVE
,
683 I965_PIPECONF_ACTIVE
, 10))
684 DRM_ERROR("DSI transcoder not enabled\n");
688 static void gen11_dsi_setup_timeouts(struct intel_encoder
*encoder
)
690 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
691 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
693 enum transcoder dsi_trans
;
694 u32 tmp
, hs_tx_timeout
, lp_rx_timeout
, ta_timeout
, divisor
, mul
;
697 * escape clock count calculation:
698 * BYTE_CLK_COUNT = TIME_NS/(8 * UI)
699 * UI (nsec) = (10^6)/Bitrate
700 * TIME_NS = (BYTE_CLK_COUNT * 8 * 10^6)/ Bitrate
701 * ESCAPE_CLK_COUNT = TIME_NS/ESC_CLK_NS
703 divisor
= intel_dsi_tlpx_ns(intel_dsi
) * intel_dsi_bitrate(intel_dsi
) * 1000;
705 hs_tx_timeout
= DIV_ROUND_UP(intel_dsi
->hs_tx_timeout
* mul
,
707 lp_rx_timeout
= DIV_ROUND_UP(intel_dsi
->lp_rx_timeout
* mul
, divisor
);
708 ta_timeout
= DIV_ROUND_UP(intel_dsi
->turn_arnd_val
* mul
, divisor
);
710 for_each_dsi_port(port
, intel_dsi
->ports
) {
711 dsi_trans
= dsi_port_to_transcoder(port
);
713 /* program hst_tx_timeout */
714 tmp
= I915_READ(DSI_HSTX_TO(dsi_trans
));
715 tmp
&= ~HSTX_TIMEOUT_VALUE_MASK
;
716 tmp
|= HSTX_TIMEOUT_VALUE(hs_tx_timeout
);
717 I915_WRITE(DSI_HSTX_TO(dsi_trans
), tmp
);
719 /* FIXME: DSI_CALIB_TO */
721 /* program lp_rx_host timeout */
722 tmp
= I915_READ(DSI_LPRX_HOST_TO(dsi_trans
));
723 tmp
&= ~LPRX_TIMEOUT_VALUE_MASK
;
724 tmp
|= LPRX_TIMEOUT_VALUE(lp_rx_timeout
);
725 I915_WRITE(DSI_LPRX_HOST_TO(dsi_trans
), tmp
);
727 /* FIXME: DSI_PWAIT_TO */
729 /* program turn around timeout */
730 tmp
= I915_READ(DSI_TA_TO(dsi_trans
));
731 tmp
&= ~TA_TIMEOUT_VALUE_MASK
;
732 tmp
|= TA_TIMEOUT_VALUE(ta_timeout
);
733 I915_WRITE(DSI_TA_TO(dsi_trans
), tmp
);
738 gen11_dsi_enable_port_and_phy(struct intel_encoder
*encoder
,
739 const struct intel_crtc_state
*pipe_config
)
741 /* step 4a: power up all lanes of the DDI used by DSI */
742 gen11_dsi_power_up_lanes(encoder
);
744 /* step 4b: configure lane sequencing of the Combo-PHY transmitters */
745 gen11_dsi_config_phy_lanes_sequence(encoder
);
747 /* step 4c: configure voltage swing and skew */
748 gen11_dsi_voltage_swing_program_seq(encoder
);
750 /* enable DDI buffer */
751 gen11_dsi_enable_ddi_buffer(encoder
);
753 /* setup D-PHY timings */
754 gen11_dsi_setup_dphy_timings(encoder
);
756 /* step 4h: setup DSI protocol timeouts */
757 gen11_dsi_setup_timeouts(encoder
);
759 /* Step (4h, 4i, 4j, 4k): Configure transcoder */
760 gen11_dsi_configure_transcoder(encoder
, pipe_config
);
763 static void gen11_dsi_powerup_panel(struct intel_encoder
*encoder
)
765 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
766 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
767 struct mipi_dsi_device
*dsi
;
769 enum transcoder dsi_trans
;
773 /* set maximum return packet size */
774 for_each_dsi_port(port
, intel_dsi
->ports
) {
775 dsi_trans
= dsi_port_to_transcoder(port
);
778 * FIXME: This uses the number of DW's currently in the payload
779 * receive queue. This is probably not what we want here.
781 tmp
= I915_READ(DSI_CMD_RXCTL(dsi_trans
));
782 tmp
&= NUMBER_RX_PLOAD_DW_MASK
;
783 /* multiply "Number Rx Payload DW" by 4 to get max value */
785 dsi
= intel_dsi
->dsi_hosts
[port
]->device
;
786 ret
= mipi_dsi_set_maximum_return_packet_size(dsi
, tmp
);
788 DRM_ERROR("error setting max return pkt size%d\n", tmp
);
791 /* panel power on related mipi dsi vbt sequences */
792 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_POWER_ON
);
793 intel_dsi_msleep(intel_dsi
, intel_dsi
->panel_on_delay
);
794 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_DEASSERT_RESET
);
795 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_INIT_OTP
);
796 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_DISPLAY_ON
);
798 /* ensure all panel commands dispatched before enabling transcoder */
799 wait_for_cmds_dispatched_to_panel(encoder
);
802 static void __attribute__((unused
))
803 gen11_dsi_pre_enable(struct intel_encoder
*encoder
,
804 const struct intel_crtc_state
*pipe_config
,
805 const struct drm_connector_state
*conn_state
)
807 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
809 /* step2: enable IO power */
810 gen11_dsi_enable_io_power(encoder
);
812 /* step3: enable DSI PLL */
813 gen11_dsi_program_esc_clk_div(encoder
);
815 /* step4: enable DSI port and DPHY */
816 gen11_dsi_enable_port_and_phy(encoder
, pipe_config
);
818 /* step5: program and powerup panel */
819 gen11_dsi_powerup_panel(encoder
);
821 /* step6c: configure transcoder timings */
822 gen11_dsi_set_transcoder_timings(encoder
, pipe_config
);
824 /* step6d: enable dsi transcoder */
825 gen11_dsi_enable_transcoder(encoder
);
827 /* step7: enable backlight */
828 intel_panel_enable_backlight(pipe_config
, conn_state
);
829 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_BACKLIGHT_ON
);
832 static void gen11_dsi_disable_transcoder(struct intel_encoder
*encoder
)
834 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
835 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
837 enum transcoder dsi_trans
;
840 for_each_dsi_port(port
, intel_dsi
->ports
) {
841 dsi_trans
= dsi_port_to_transcoder(port
);
843 /* disable transcoder */
844 tmp
= I915_READ(PIPECONF(dsi_trans
));
845 tmp
&= ~PIPECONF_ENABLE
;
846 I915_WRITE(PIPECONF(dsi_trans
), tmp
);
848 /* wait for transcoder to be disabled */
849 if (intel_wait_for_register(dev_priv
, PIPECONF(dsi_trans
),
850 I965_PIPECONF_ACTIVE
, 0, 50))
851 DRM_ERROR("DSI trancoder not disabled\n");
855 static void gen11_dsi_powerdown_panel(struct intel_encoder
*encoder
)
857 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
859 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_DISPLAY_OFF
);
860 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_ASSERT_RESET
);
861 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_POWER_OFF
);
863 /* ensure cmds dispatched to panel */
864 wait_for_cmds_dispatched_to_panel(encoder
);
867 static void gen11_dsi_deconfigure_trancoder(struct intel_encoder
*encoder
)
869 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
870 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
872 enum transcoder dsi_trans
;
875 /* put dsi link in ULPS */
876 for_each_dsi_port(port
, intel_dsi
->ports
) {
877 dsi_trans
= dsi_port_to_transcoder(port
);
878 tmp
= I915_READ(DSI_LP_MSG(dsi_trans
));
879 tmp
|= LINK_ENTER_ULPS
;
880 tmp
&= ~LINK_ULPS_TYPE_LP11
;
881 I915_WRITE(DSI_LP_MSG(dsi_trans
), tmp
);
883 if (wait_for_us((I915_READ(DSI_LP_MSG(dsi_trans
)) &
886 DRM_ERROR("DSI link not in ULPS\n");
889 /* disable ddi function */
890 for_each_dsi_port(port
, intel_dsi
->ports
) {
891 dsi_trans
= dsi_port_to_transcoder(port
);
892 tmp
= I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans
));
893 tmp
&= ~TRANS_DDI_FUNC_ENABLE
;
894 I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans
), tmp
);
897 /* disable port sync mode if dual link */
898 if (intel_dsi
->dual_link
) {
899 for_each_dsi_port(port
, intel_dsi
->ports
) {
900 dsi_trans
= dsi_port_to_transcoder(port
);
901 tmp
= I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans
));
902 tmp
&= ~PORT_SYNC_MODE_ENABLE
;
903 I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans
), tmp
);
908 static void gen11_dsi_disable_port(struct intel_encoder
*encoder
)
910 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
911 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
915 for_each_dsi_port(port
, intel_dsi
->ports
) {
916 tmp
= I915_READ(DDI_BUF_CTL(port
));
917 tmp
&= ~DDI_BUF_CTL_ENABLE
;
918 I915_WRITE(DDI_BUF_CTL(port
), tmp
);
920 if (wait_for_us((I915_READ(DDI_BUF_CTL(port
)) &
923 DRM_ERROR("DDI port:%c buffer not idle\n",
928 static void gen11_dsi_disable_io_power(struct intel_encoder
*encoder
)
930 struct drm_i915_private
*dev_priv
= to_i915(encoder
->base
.dev
);
931 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
935 intel_display_power_put(dev_priv
, POWER_DOMAIN_PORT_DDI_A_IO
);
937 if (intel_dsi
->dual_link
)
938 intel_display_power_put(dev_priv
, POWER_DOMAIN_PORT_DDI_B_IO
);
940 /* set mode to DDI */
941 for_each_dsi_port(port
, intel_dsi
->ports
) {
942 tmp
= I915_READ(ICL_DSI_IO_MODECTL(port
));
943 tmp
&= ~COMBO_PHY_MODE_DSI
;
944 I915_WRITE(ICL_DSI_IO_MODECTL(port
), tmp
);
948 static void __attribute__((unused
)) gen11_dsi_disable(
949 struct intel_encoder
*encoder
,
950 const struct intel_crtc_state
*old_crtc_state
,
951 const struct drm_connector_state
*old_conn_state
)
953 struct intel_dsi
*intel_dsi
= enc_to_intel_dsi(&encoder
->base
);
955 /* step1: turn off backlight */
956 intel_dsi_vbt_exec_sequence(intel_dsi
, MIPI_SEQ_BACKLIGHT_OFF
);
957 intel_panel_disable_backlight(old_conn_state
);
959 /* step2d,e: disable transcoder and wait */
960 gen11_dsi_disable_transcoder(encoder
);
962 /* step2f,g: powerdown panel */
963 gen11_dsi_powerdown_panel(encoder
);
965 /* step2h,i,j: deconfig trancoder */
966 gen11_dsi_deconfigure_trancoder(encoder
);
968 /* step3: disable port */
969 gen11_dsi_disable_port(encoder
);
971 /* step4: disable IO power */
972 gen11_dsi_disable_io_power(encoder
);