]> git.ipfire.org Git - thirdparty/kernel/stable.git/blob - drivers/gpu/drm/i915/icl_dsi.c
drm/i915/icl: Program HS_TX_TIMEOUT/LP_RX_TIMEOUT/TA_TIMEOUT registers
[thirdparty/kernel/stable.git] / drivers / gpu / drm / i915 / icl_dsi.c
1 /*
2 * Copyright © 2018 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 *
23 * Authors:
24 * Madhav Chauhan <madhav.chauhan@intel.com>
25 * Jani Nikula <jani.nikula@intel.com>
26 */
27
28 #include <drm/drm_mipi_dsi.h>
29 #include "intel_dsi.h"
30
31 static inline int header_credits_available(struct drm_i915_private *dev_priv,
32 enum transcoder dsi_trans)
33 {
34 return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_HEADER_CREDIT_MASK)
35 >> FREE_HEADER_CREDIT_SHIFT;
36 }
37
38 static inline int payload_credits_available(struct drm_i915_private *dev_priv,
39 enum transcoder dsi_trans)
40 {
41 return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_PLOAD_CREDIT_MASK)
42 >> FREE_PLOAD_CREDIT_SHIFT;
43 }
44
45 static void wait_for_header_credits(struct drm_i915_private *dev_priv,
46 enum transcoder dsi_trans)
47 {
48 if (wait_for_us(header_credits_available(dev_priv, dsi_trans) >=
49 MAX_HEADER_CREDIT, 100))
50 DRM_ERROR("DSI header credits not released\n");
51 }
52
53 static void wait_for_payload_credits(struct drm_i915_private *dev_priv,
54 enum transcoder dsi_trans)
55 {
56 if (wait_for_us(payload_credits_available(dev_priv, dsi_trans) >=
57 MAX_PLOAD_CREDIT, 100))
58 DRM_ERROR("DSI payload credits not released\n");
59 }
60
61 static enum transcoder dsi_port_to_transcoder(enum port port)
62 {
63 if (port == PORT_A)
64 return TRANSCODER_DSI_0;
65 else
66 return TRANSCODER_DSI_1;
67 }
68
69 static void wait_for_cmds_dispatched_to_panel(struct intel_encoder *encoder)
70 {
71 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
72 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
73 struct mipi_dsi_device *dsi;
74 enum port port;
75 enum transcoder dsi_trans;
76 int ret;
77
78 /* wait for header/payload credits to be released */
79 for_each_dsi_port(port, intel_dsi->ports) {
80 dsi_trans = dsi_port_to_transcoder(port);
81 wait_for_header_credits(dev_priv, dsi_trans);
82 wait_for_payload_credits(dev_priv, dsi_trans);
83 }
84
85 /* send nop DCS command */
86 for_each_dsi_port(port, intel_dsi->ports) {
87 dsi = intel_dsi->dsi_hosts[port]->device;
88 dsi->mode_flags |= MIPI_DSI_MODE_LPM;
89 dsi->channel = 0;
90 ret = mipi_dsi_dcs_nop(dsi);
91 if (ret < 0)
92 DRM_ERROR("error sending DCS NOP command\n");
93 }
94
95 /* wait for header credits to be released */
96 for_each_dsi_port(port, intel_dsi->ports) {
97 dsi_trans = dsi_port_to_transcoder(port);
98 wait_for_header_credits(dev_priv, dsi_trans);
99 }
100
101 /* wait for LP TX in progress bit to be cleared */
102 for_each_dsi_port(port, intel_dsi->ports) {
103 dsi_trans = dsi_port_to_transcoder(port);
104 if (wait_for_us(!(I915_READ(DSI_LP_MSG(dsi_trans)) &
105 LPTX_IN_PROGRESS), 20))
106 DRM_ERROR("LPTX bit not cleared\n");
107 }
108 }
109
110 static void dsi_program_swing_and_deemphasis(struct intel_encoder *encoder)
111 {
112 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
113 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
114 enum port port;
115 u32 tmp;
116 int lane;
117
118 for_each_dsi_port(port, intel_dsi->ports) {
119
120 /*
121 * Program voltage swing and pre-emphasis level values as per
122 * table in BSPEC under DDI buffer programing
123 */
124 tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port));
125 tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK);
126 tmp |= SCALING_MODE_SEL(0x2);
127 tmp |= TAP2_DISABLE | TAP3_DISABLE;
128 tmp |= RTERM_SELECT(0x6);
129 I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp);
130
131 tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port));
132 tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK);
133 tmp |= SCALING_MODE_SEL(0x2);
134 tmp |= TAP2_DISABLE | TAP3_DISABLE;
135 tmp |= RTERM_SELECT(0x6);
136 I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp);
137
138 tmp = I915_READ(ICL_PORT_TX_DW2_LN0(port));
139 tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
140 RCOMP_SCALAR_MASK);
141 tmp |= SWING_SEL_UPPER(0x2);
142 tmp |= SWING_SEL_LOWER(0x2);
143 tmp |= RCOMP_SCALAR(0x98);
144 I915_WRITE(ICL_PORT_TX_DW2_GRP(port), tmp);
145
146 tmp = I915_READ(ICL_PORT_TX_DW2_AUX(port));
147 tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
148 RCOMP_SCALAR_MASK);
149 tmp |= SWING_SEL_UPPER(0x2);
150 tmp |= SWING_SEL_LOWER(0x2);
151 tmp |= RCOMP_SCALAR(0x98);
152 I915_WRITE(ICL_PORT_TX_DW2_AUX(port), tmp);
153
154 tmp = I915_READ(ICL_PORT_TX_DW4_AUX(port));
155 tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
156 CURSOR_COEFF_MASK);
157 tmp |= POST_CURSOR_1(0x0);
158 tmp |= POST_CURSOR_2(0x0);
159 tmp |= CURSOR_COEFF(0x3f);
160 I915_WRITE(ICL_PORT_TX_DW4_AUX(port), tmp);
161
162 for (lane = 0; lane <= 3; lane++) {
163 /* Bspec: must not use GRP register for write */
164 tmp = I915_READ(ICL_PORT_TX_DW4_LN(port, lane));
165 tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
166 CURSOR_COEFF_MASK);
167 tmp |= POST_CURSOR_1(0x0);
168 tmp |= POST_CURSOR_2(0x0);
169 tmp |= CURSOR_COEFF(0x3f);
170 I915_WRITE(ICL_PORT_TX_DW4_LN(port, lane), tmp);
171 }
172 }
173 }
174
175 static void gen11_dsi_program_esc_clk_div(struct intel_encoder *encoder)
176 {
177 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
178 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
179 enum port port;
180 u32 bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
181 u32 afe_clk_khz; /* 8X Clock */
182 u32 esc_clk_div_m;
183
184 afe_clk_khz = DIV_ROUND_CLOSEST(intel_dsi->pclk * bpp,
185 intel_dsi->lane_count);
186
187 esc_clk_div_m = DIV_ROUND_UP(afe_clk_khz, DSI_MAX_ESC_CLK);
188
189 for_each_dsi_port(port, intel_dsi->ports) {
190 I915_WRITE(ICL_DSI_ESC_CLK_DIV(port),
191 esc_clk_div_m & ICL_ESC_CLK_DIV_MASK);
192 POSTING_READ(ICL_DSI_ESC_CLK_DIV(port));
193 }
194
195 for_each_dsi_port(port, intel_dsi->ports) {
196 I915_WRITE(ICL_DPHY_ESC_CLK_DIV(port),
197 esc_clk_div_m & ICL_ESC_CLK_DIV_MASK);
198 POSTING_READ(ICL_DPHY_ESC_CLK_DIV(port));
199 }
200 }
201
202 static void gen11_dsi_enable_io_power(struct intel_encoder *encoder)
203 {
204 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
205 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
206 enum port port;
207 u32 tmp;
208
209 for_each_dsi_port(port, intel_dsi->ports) {
210 tmp = I915_READ(ICL_DSI_IO_MODECTL(port));
211 tmp |= COMBO_PHY_MODE_DSI;
212 I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp);
213 }
214
215 for_each_dsi_port(port, intel_dsi->ports) {
216 intel_display_power_get(dev_priv, port == PORT_A ?
217 POWER_DOMAIN_PORT_DDI_A_IO :
218 POWER_DOMAIN_PORT_DDI_B_IO);
219 }
220 }
221
222 static void gen11_dsi_power_up_lanes(struct intel_encoder *encoder)
223 {
224 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
225 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
226 enum port port;
227 u32 tmp;
228 u32 lane_mask;
229
230 switch (intel_dsi->lane_count) {
231 case 1:
232 lane_mask = PWR_DOWN_LN_3_1_0;
233 break;
234 case 2:
235 lane_mask = PWR_DOWN_LN_3_1;
236 break;
237 case 3:
238 lane_mask = PWR_DOWN_LN_3;
239 break;
240 case 4:
241 default:
242 lane_mask = PWR_UP_ALL_LANES;
243 break;
244 }
245
246 for_each_dsi_port(port, intel_dsi->ports) {
247 tmp = I915_READ(ICL_PORT_CL_DW10(port));
248 tmp &= ~PWR_DOWN_LN_MASK;
249 I915_WRITE(ICL_PORT_CL_DW10(port), tmp | lane_mask);
250 }
251 }
252
253 static void gen11_dsi_config_phy_lanes_sequence(struct intel_encoder *encoder)
254 {
255 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
256 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
257 enum port port;
258 u32 tmp;
259 int lane;
260
261 /* Step 4b(i) set loadgen select for transmit and aux lanes */
262 for_each_dsi_port(port, intel_dsi->ports) {
263 tmp = I915_READ(ICL_PORT_TX_DW4_AUX(port));
264 tmp &= ~LOADGEN_SELECT;
265 I915_WRITE(ICL_PORT_TX_DW4_AUX(port), tmp);
266 for (lane = 0; lane <= 3; lane++) {
267 tmp = I915_READ(ICL_PORT_TX_DW4_LN(port, lane));
268 tmp &= ~LOADGEN_SELECT;
269 if (lane != 2)
270 tmp |= LOADGEN_SELECT;
271 I915_WRITE(ICL_PORT_TX_DW4_LN(port, lane), tmp);
272 }
273 }
274
275 /* Step 4b(ii) set latency optimization for transmit and aux lanes */
276 for_each_dsi_port(port, intel_dsi->ports) {
277 tmp = I915_READ(ICL_PORT_TX_DW2_AUX(port));
278 tmp &= ~FRC_LATENCY_OPTIM_MASK;
279 tmp |= FRC_LATENCY_OPTIM_VAL(0x5);
280 I915_WRITE(ICL_PORT_TX_DW2_AUX(port), tmp);
281 tmp = I915_READ(ICL_PORT_TX_DW2_LN0(port));
282 tmp &= ~FRC_LATENCY_OPTIM_MASK;
283 tmp |= FRC_LATENCY_OPTIM_VAL(0x5);
284 I915_WRITE(ICL_PORT_TX_DW2_GRP(port), tmp);
285 }
286
287 }
288
289 static void gen11_dsi_voltage_swing_program_seq(struct intel_encoder *encoder)
290 {
291 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
292 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
293 u32 tmp;
294 enum port port;
295
296 /* clear common keeper enable bit */
297 for_each_dsi_port(port, intel_dsi->ports) {
298 tmp = I915_READ(ICL_PORT_PCS_DW1_LN0(port));
299 tmp &= ~COMMON_KEEPER_EN;
300 I915_WRITE(ICL_PORT_PCS_DW1_GRP(port), tmp);
301 tmp = I915_READ(ICL_PORT_PCS_DW1_AUX(port));
302 tmp &= ~COMMON_KEEPER_EN;
303 I915_WRITE(ICL_PORT_PCS_DW1_AUX(port), tmp);
304 }
305
306 /*
307 * Set SUS Clock Config bitfield to 11b
308 * Note: loadgen select program is done
309 * as part of lane phy sequence configuration
310 */
311 for_each_dsi_port(port, intel_dsi->ports) {
312 tmp = I915_READ(ICL_PORT_CL_DW5(port));
313 tmp |= SUS_CLOCK_CONFIG;
314 I915_WRITE(ICL_PORT_CL_DW5(port), tmp);
315 }
316
317 /* Clear training enable to change swing values */
318 for_each_dsi_port(port, intel_dsi->ports) {
319 tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port));
320 tmp &= ~TX_TRAINING_EN;
321 I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp);
322 tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port));
323 tmp &= ~TX_TRAINING_EN;
324 I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp);
325 }
326
327 /* Program swing and de-emphasis */
328 dsi_program_swing_and_deemphasis(encoder);
329
330 /* Set training enable to trigger update */
331 for_each_dsi_port(port, intel_dsi->ports) {
332 tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port));
333 tmp |= TX_TRAINING_EN;
334 I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp);
335 tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port));
336 tmp |= TX_TRAINING_EN;
337 I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp);
338 }
339 }
340
341 static void gen11_dsi_enable_ddi_buffer(struct intel_encoder *encoder)
342 {
343 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
344 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
345 u32 tmp;
346 enum port port;
347
348 for_each_dsi_port(port, intel_dsi->ports) {
349 tmp = I915_READ(DDI_BUF_CTL(port));
350 tmp |= DDI_BUF_CTL_ENABLE;
351 I915_WRITE(DDI_BUF_CTL(port), tmp);
352
353 if (wait_for_us(!(I915_READ(DDI_BUF_CTL(port)) &
354 DDI_BUF_IS_IDLE),
355 500))
356 DRM_ERROR("DDI port:%c buffer idle\n", port_name(port));
357 }
358 }
359
360 static void gen11_dsi_setup_dphy_timings(struct intel_encoder *encoder)
361 {
362 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
363 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
364 u32 tmp;
365 enum port port;
366
367 /* Program T-INIT master registers */
368 for_each_dsi_port(port, intel_dsi->ports) {
369 tmp = I915_READ(ICL_DSI_T_INIT_MASTER(port));
370 tmp &= ~MASTER_INIT_TIMER_MASK;
371 tmp |= intel_dsi->init_count;
372 I915_WRITE(ICL_DSI_T_INIT_MASTER(port), tmp);
373 }
374
375 /* Program DPHY clock lanes timings */
376 for_each_dsi_port(port, intel_dsi->ports) {
377 I915_WRITE(DPHY_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg);
378
379 /* shadow register inside display core */
380 I915_WRITE(DSI_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg);
381 }
382
383 /* Program DPHY data lanes timings */
384 for_each_dsi_port(port, intel_dsi->ports) {
385 I915_WRITE(DPHY_DATA_TIMING_PARAM(port),
386 intel_dsi->dphy_data_lane_reg);
387
388 /* shadow register inside display core */
389 I915_WRITE(DSI_DATA_TIMING_PARAM(port),
390 intel_dsi->dphy_data_lane_reg);
391 }
392
393 /*
394 * If DSI link operating at or below an 800 MHz,
395 * TA_SURE should be override and programmed to
396 * a value '0' inside TA_PARAM_REGISTERS otherwise
397 * leave all fields at HW default values.
398 */
399 if (intel_dsi_bitrate(intel_dsi) <= 800000) {
400 for_each_dsi_port(port, intel_dsi->ports) {
401 tmp = I915_READ(DPHY_TA_TIMING_PARAM(port));
402 tmp &= ~TA_SURE_MASK;
403 tmp |= TA_SURE_OVERRIDE | TA_SURE(0);
404 I915_WRITE(DPHY_TA_TIMING_PARAM(port), tmp);
405
406 /* shadow register inside display core */
407 tmp = I915_READ(DSI_TA_TIMING_PARAM(port));
408 tmp &= ~TA_SURE_MASK;
409 tmp |= TA_SURE_OVERRIDE | TA_SURE(0);
410 I915_WRITE(DSI_TA_TIMING_PARAM(port), tmp);
411 }
412 }
413 }
414
415 static void
416 gen11_dsi_configure_transcoder(struct intel_encoder *encoder,
417 const struct intel_crtc_state *pipe_config)
418 {
419 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
420 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
421 struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->base.crtc);
422 enum pipe pipe = intel_crtc->pipe;
423 u32 tmp;
424 enum port port;
425 enum transcoder dsi_trans;
426
427 for_each_dsi_port(port, intel_dsi->ports) {
428 dsi_trans = dsi_port_to_transcoder(port);
429 tmp = I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans));
430
431 if (intel_dsi->eotp_pkt)
432 tmp &= ~EOTP_DISABLED;
433 else
434 tmp |= EOTP_DISABLED;
435
436 /* enable link calibration if freq > 1.5Gbps */
437 if (intel_dsi_bitrate(intel_dsi) >= 1500 * 1000) {
438 tmp &= ~LINK_CALIBRATION_MASK;
439 tmp |= CALIBRATION_ENABLED_INITIAL_ONLY;
440 }
441
442 /* configure continuous clock */
443 tmp &= ~CONTINUOUS_CLK_MASK;
444 if (intel_dsi->clock_stop)
445 tmp |= CLK_ENTER_LP_AFTER_DATA;
446 else
447 tmp |= CLK_HS_CONTINUOUS;
448
449 /* configure buffer threshold limit to minimum */
450 tmp &= ~PIX_BUF_THRESHOLD_MASK;
451 tmp |= PIX_BUF_THRESHOLD_1_4;
452
453 /* set virtual channel to '0' */
454 tmp &= ~PIX_VIRT_CHAN_MASK;
455 tmp |= PIX_VIRT_CHAN(0);
456
457 /* program BGR transmission */
458 if (intel_dsi->bgr_enabled)
459 tmp |= BGR_TRANSMISSION;
460
461 /* select pixel format */
462 tmp &= ~PIX_FMT_MASK;
463 switch (intel_dsi->pixel_format) {
464 default:
465 MISSING_CASE(intel_dsi->pixel_format);
466 /* fallthrough */
467 case MIPI_DSI_FMT_RGB565:
468 tmp |= PIX_FMT_RGB565;
469 break;
470 case MIPI_DSI_FMT_RGB666_PACKED:
471 tmp |= PIX_FMT_RGB666_PACKED;
472 break;
473 case MIPI_DSI_FMT_RGB666:
474 tmp |= PIX_FMT_RGB666_LOOSE;
475 break;
476 case MIPI_DSI_FMT_RGB888:
477 tmp |= PIX_FMT_RGB888;
478 break;
479 }
480
481 /* program DSI operation mode */
482 if (is_vid_mode(intel_dsi)) {
483 tmp &= ~OP_MODE_MASK;
484 switch (intel_dsi->video_mode_format) {
485 default:
486 MISSING_CASE(intel_dsi->video_mode_format);
487 /* fallthrough */
488 case VIDEO_MODE_NON_BURST_WITH_SYNC_EVENTS:
489 tmp |= VIDEO_MODE_SYNC_EVENT;
490 break;
491 case VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE:
492 tmp |= VIDEO_MODE_SYNC_PULSE;
493 break;
494 }
495 }
496
497 I915_WRITE(DSI_TRANS_FUNC_CONF(dsi_trans), tmp);
498 }
499
500 /* enable port sync mode if dual link */
501 if (intel_dsi->dual_link) {
502 for_each_dsi_port(port, intel_dsi->ports) {
503 dsi_trans = dsi_port_to_transcoder(port);
504 tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans));
505 tmp |= PORT_SYNC_MODE_ENABLE;
506 I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp);
507 }
508
509 //TODO: configure DSS_CTL1
510 }
511
512 for_each_dsi_port(port, intel_dsi->ports) {
513 dsi_trans = dsi_port_to_transcoder(port);
514
515 /* select data lane width */
516 tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans));
517 tmp &= ~DDI_PORT_WIDTH_MASK;
518 tmp |= DDI_PORT_WIDTH(intel_dsi->lane_count);
519
520 /* select input pipe */
521 tmp &= ~TRANS_DDI_EDP_INPUT_MASK;
522 switch (pipe) {
523 default:
524 MISSING_CASE(pipe);
525 /* fallthrough */
526 case PIPE_A:
527 tmp |= TRANS_DDI_EDP_INPUT_A_ON;
528 break;
529 case PIPE_B:
530 tmp |= TRANS_DDI_EDP_INPUT_B_ONOFF;
531 break;
532 case PIPE_C:
533 tmp |= TRANS_DDI_EDP_INPUT_C_ONOFF;
534 break;
535 }
536
537 /* enable DDI buffer */
538 tmp |= TRANS_DDI_FUNC_ENABLE;
539 I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp);
540 }
541
542 /* wait for link ready */
543 for_each_dsi_port(port, intel_dsi->ports) {
544 dsi_trans = dsi_port_to_transcoder(port);
545 if (wait_for_us((I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans)) &
546 LINK_READY), 2500))
547 DRM_ERROR("DSI link not ready\n");
548 }
549 }
550
551 static void
552 gen11_dsi_set_transcoder_timings(struct intel_encoder *encoder,
553 const struct intel_crtc_state *pipe_config)
554 {
555 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
556 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
557 const struct drm_display_mode *adjusted_mode =
558 &pipe_config->base.adjusted_mode;
559 enum port port;
560 enum transcoder dsi_trans;
561 /* horizontal timings */
562 u16 htotal, hactive, hsync_start, hsync_end, hsync_size;
563 u16 hfront_porch, hback_porch;
564 /* vertical timings */
565 u16 vtotal, vactive, vsync_start, vsync_end, vsync_shift;
566
567 hactive = adjusted_mode->crtc_hdisplay;
568 htotal = adjusted_mode->crtc_htotal;
569 hsync_start = adjusted_mode->crtc_hsync_start;
570 hsync_end = adjusted_mode->crtc_hsync_end;
571 hsync_size = hsync_end - hsync_start;
572 hfront_porch = (adjusted_mode->crtc_hsync_start -
573 adjusted_mode->crtc_hdisplay);
574 hback_porch = (adjusted_mode->crtc_htotal -
575 adjusted_mode->crtc_hsync_end);
576 vactive = adjusted_mode->crtc_vdisplay;
577 vtotal = adjusted_mode->crtc_vtotal;
578 vsync_start = adjusted_mode->crtc_vsync_start;
579 vsync_end = adjusted_mode->crtc_vsync_end;
580 vsync_shift = hsync_start - htotal / 2;
581
582 if (intel_dsi->dual_link) {
583 hactive /= 2;
584 if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK)
585 hactive += intel_dsi->pixel_overlap;
586 htotal /= 2;
587 }
588
589 /* minimum hactive as per bspec: 256 pixels */
590 if (adjusted_mode->crtc_hdisplay < 256)
591 DRM_ERROR("hactive is less then 256 pixels\n");
592
593 /* if RGB666 format, then hactive must be multiple of 4 pixels */
594 if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB666 && hactive % 4 != 0)
595 DRM_ERROR("hactive pixels are not multiple of 4\n");
596
597 /* program TRANS_HTOTAL register */
598 for_each_dsi_port(port, intel_dsi->ports) {
599 dsi_trans = dsi_port_to_transcoder(port);
600 I915_WRITE(HTOTAL(dsi_trans),
601 (hactive - 1) | ((htotal - 1) << 16));
602 }
603
604 /* TRANS_HSYNC register to be programmed only for video mode */
605 if (intel_dsi->operation_mode == INTEL_DSI_VIDEO_MODE) {
606 if (intel_dsi->video_mode_format ==
607 VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE) {
608 /* BSPEC: hsync size should be atleast 16 pixels */
609 if (hsync_size < 16)
610 DRM_ERROR("hsync size < 16 pixels\n");
611 }
612
613 if (hback_porch < 16)
614 DRM_ERROR("hback porch < 16 pixels\n");
615
616 if (intel_dsi->dual_link) {
617 hsync_start /= 2;
618 hsync_end /= 2;
619 }
620
621 for_each_dsi_port(port, intel_dsi->ports) {
622 dsi_trans = dsi_port_to_transcoder(port);
623 I915_WRITE(HSYNC(dsi_trans),
624 (hsync_start - 1) | ((hsync_end - 1) << 16));
625 }
626 }
627
628 /* program TRANS_VTOTAL register */
629 for_each_dsi_port(port, intel_dsi->ports) {
630 dsi_trans = dsi_port_to_transcoder(port);
631 /*
632 * FIXME: Programing this by assuming progressive mode, since
633 * non-interlaced info from VBT is not saved inside
634 * struct drm_display_mode.
635 * For interlace mode: program required pixel minus 2
636 */
637 I915_WRITE(VTOTAL(dsi_trans),
638 (vactive - 1) | ((vtotal - 1) << 16));
639 }
640
641 if (vsync_end < vsync_start || vsync_end > vtotal)
642 DRM_ERROR("Invalid vsync_end value\n");
643
644 if (vsync_start < vactive)
645 DRM_ERROR("vsync_start less than vactive\n");
646
647 /* program TRANS_VSYNC register */
648 for_each_dsi_port(port, intel_dsi->ports) {
649 dsi_trans = dsi_port_to_transcoder(port);
650 I915_WRITE(VSYNC(dsi_trans),
651 (vsync_start - 1) | ((vsync_end - 1) << 16));
652 }
653
654 /*
655 * FIXME: It has to be programmed only for interlaced
656 * modes. Put the check condition here once interlaced
657 * info available as described above.
658 * program TRANS_VSYNCSHIFT register
659 */
660 for_each_dsi_port(port, intel_dsi->ports) {
661 dsi_trans = dsi_port_to_transcoder(port);
662 I915_WRITE(VSYNCSHIFT(dsi_trans), vsync_shift);
663 }
664 }
665
666 static void gen11_dsi_enable_transcoder(struct intel_encoder *encoder)
667 {
668 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
669 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
670 enum port port;
671 enum transcoder dsi_trans;
672 u32 tmp;
673
674 for_each_dsi_port(port, intel_dsi->ports) {
675 dsi_trans = dsi_port_to_transcoder(port);
676 tmp = I915_READ(PIPECONF(dsi_trans));
677 tmp |= PIPECONF_ENABLE;
678 I915_WRITE(PIPECONF(dsi_trans), tmp);
679
680 /* wait for transcoder to be enabled */
681 if (intel_wait_for_register(dev_priv, PIPECONF(dsi_trans),
682 I965_PIPECONF_ACTIVE,
683 I965_PIPECONF_ACTIVE, 10))
684 DRM_ERROR("DSI transcoder not enabled\n");
685 }
686 }
687
688 static void gen11_dsi_setup_timeouts(struct intel_encoder *encoder)
689 {
690 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
691 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
692 enum port port;
693 enum transcoder dsi_trans;
694 u32 tmp, hs_tx_timeout, lp_rx_timeout, ta_timeout, divisor, mul;
695
696 /*
697 * escape clock count calculation:
698 * BYTE_CLK_COUNT = TIME_NS/(8 * UI)
699 * UI (nsec) = (10^6)/Bitrate
700 * TIME_NS = (BYTE_CLK_COUNT * 8 * 10^6)/ Bitrate
701 * ESCAPE_CLK_COUNT = TIME_NS/ESC_CLK_NS
702 */
703 divisor = intel_dsi_tlpx_ns(intel_dsi) * intel_dsi_bitrate(intel_dsi) * 1000;
704 mul = 8 * 1000000;
705 hs_tx_timeout = DIV_ROUND_UP(intel_dsi->hs_tx_timeout * mul,
706 divisor);
707 lp_rx_timeout = DIV_ROUND_UP(intel_dsi->lp_rx_timeout * mul, divisor);
708 ta_timeout = DIV_ROUND_UP(intel_dsi->turn_arnd_val * mul, divisor);
709
710 for_each_dsi_port(port, intel_dsi->ports) {
711 dsi_trans = dsi_port_to_transcoder(port);
712
713 /* program hst_tx_timeout */
714 tmp = I915_READ(DSI_HSTX_TO(dsi_trans));
715 tmp &= ~HSTX_TIMEOUT_VALUE_MASK;
716 tmp |= HSTX_TIMEOUT_VALUE(hs_tx_timeout);
717 I915_WRITE(DSI_HSTX_TO(dsi_trans), tmp);
718
719 /* FIXME: DSI_CALIB_TO */
720
721 /* program lp_rx_host timeout */
722 tmp = I915_READ(DSI_LPRX_HOST_TO(dsi_trans));
723 tmp &= ~LPRX_TIMEOUT_VALUE_MASK;
724 tmp |= LPRX_TIMEOUT_VALUE(lp_rx_timeout);
725 I915_WRITE(DSI_LPRX_HOST_TO(dsi_trans), tmp);
726
727 /* FIXME: DSI_PWAIT_TO */
728
729 /* program turn around timeout */
730 tmp = I915_READ(DSI_TA_TO(dsi_trans));
731 tmp &= ~TA_TIMEOUT_VALUE_MASK;
732 tmp |= TA_TIMEOUT_VALUE(ta_timeout);
733 I915_WRITE(DSI_TA_TO(dsi_trans), tmp);
734 }
735 }
736
737 static void
738 gen11_dsi_enable_port_and_phy(struct intel_encoder *encoder,
739 const struct intel_crtc_state *pipe_config)
740 {
741 /* step 4a: power up all lanes of the DDI used by DSI */
742 gen11_dsi_power_up_lanes(encoder);
743
744 /* step 4b: configure lane sequencing of the Combo-PHY transmitters */
745 gen11_dsi_config_phy_lanes_sequence(encoder);
746
747 /* step 4c: configure voltage swing and skew */
748 gen11_dsi_voltage_swing_program_seq(encoder);
749
750 /* enable DDI buffer */
751 gen11_dsi_enable_ddi_buffer(encoder);
752
753 /* setup D-PHY timings */
754 gen11_dsi_setup_dphy_timings(encoder);
755
756 /* step 4h: setup DSI protocol timeouts */
757 gen11_dsi_setup_timeouts(encoder);
758
759 /* Step (4h, 4i, 4j, 4k): Configure transcoder */
760 gen11_dsi_configure_transcoder(encoder, pipe_config);
761 }
762
763 static void gen11_dsi_powerup_panel(struct intel_encoder *encoder)
764 {
765 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
766 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
767 struct mipi_dsi_device *dsi;
768 enum port port;
769 enum transcoder dsi_trans;
770 u32 tmp;
771 int ret;
772
773 /* set maximum return packet size */
774 for_each_dsi_port(port, intel_dsi->ports) {
775 dsi_trans = dsi_port_to_transcoder(port);
776
777 /*
778 * FIXME: This uses the number of DW's currently in the payload
779 * receive queue. This is probably not what we want here.
780 */
781 tmp = I915_READ(DSI_CMD_RXCTL(dsi_trans));
782 tmp &= NUMBER_RX_PLOAD_DW_MASK;
783 /* multiply "Number Rx Payload DW" by 4 to get max value */
784 tmp = tmp * 4;
785 dsi = intel_dsi->dsi_hosts[port]->device;
786 ret = mipi_dsi_set_maximum_return_packet_size(dsi, tmp);
787 if (ret < 0)
788 DRM_ERROR("error setting max return pkt size%d\n", tmp);
789 }
790
791 /* panel power on related mipi dsi vbt sequences */
792 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_ON);
793 intel_dsi_msleep(intel_dsi, intel_dsi->panel_on_delay);
794 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DEASSERT_RESET);
795 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_INIT_OTP);
796 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_ON);
797
798 /* ensure all panel commands dispatched before enabling transcoder */
799 wait_for_cmds_dispatched_to_panel(encoder);
800 }
801
802 static void __attribute__((unused))
803 gen11_dsi_pre_enable(struct intel_encoder *encoder,
804 const struct intel_crtc_state *pipe_config,
805 const struct drm_connector_state *conn_state)
806 {
807 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
808
809 /* step2: enable IO power */
810 gen11_dsi_enable_io_power(encoder);
811
812 /* step3: enable DSI PLL */
813 gen11_dsi_program_esc_clk_div(encoder);
814
815 /* step4: enable DSI port and DPHY */
816 gen11_dsi_enable_port_and_phy(encoder, pipe_config);
817
818 /* step5: program and powerup panel */
819 gen11_dsi_powerup_panel(encoder);
820
821 /* step6c: configure transcoder timings */
822 gen11_dsi_set_transcoder_timings(encoder, pipe_config);
823
824 /* step6d: enable dsi transcoder */
825 gen11_dsi_enable_transcoder(encoder);
826
827 /* step7: enable backlight */
828 intel_panel_enable_backlight(pipe_config, conn_state);
829 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_ON);
830 }
831
832 static void gen11_dsi_disable_transcoder(struct intel_encoder *encoder)
833 {
834 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
835 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
836 enum port port;
837 enum transcoder dsi_trans;
838 u32 tmp;
839
840 for_each_dsi_port(port, intel_dsi->ports) {
841 dsi_trans = dsi_port_to_transcoder(port);
842
843 /* disable transcoder */
844 tmp = I915_READ(PIPECONF(dsi_trans));
845 tmp &= ~PIPECONF_ENABLE;
846 I915_WRITE(PIPECONF(dsi_trans), tmp);
847
848 /* wait for transcoder to be disabled */
849 if (intel_wait_for_register(dev_priv, PIPECONF(dsi_trans),
850 I965_PIPECONF_ACTIVE, 0, 50))
851 DRM_ERROR("DSI trancoder not disabled\n");
852 }
853 }
854
855 static void gen11_dsi_powerdown_panel(struct intel_encoder *encoder)
856 {
857 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
858
859 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_OFF);
860 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_ASSERT_RESET);
861 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_OFF);
862
863 /* ensure cmds dispatched to panel */
864 wait_for_cmds_dispatched_to_panel(encoder);
865 }
866
867 static void gen11_dsi_deconfigure_trancoder(struct intel_encoder *encoder)
868 {
869 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
870 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
871 enum port port;
872 enum transcoder dsi_trans;
873 u32 tmp;
874
875 /* put dsi link in ULPS */
876 for_each_dsi_port(port, intel_dsi->ports) {
877 dsi_trans = dsi_port_to_transcoder(port);
878 tmp = I915_READ(DSI_LP_MSG(dsi_trans));
879 tmp |= LINK_ENTER_ULPS;
880 tmp &= ~LINK_ULPS_TYPE_LP11;
881 I915_WRITE(DSI_LP_MSG(dsi_trans), tmp);
882
883 if (wait_for_us((I915_READ(DSI_LP_MSG(dsi_trans)) &
884 LINK_IN_ULPS),
885 10))
886 DRM_ERROR("DSI link not in ULPS\n");
887 }
888
889 /* disable ddi function */
890 for_each_dsi_port(port, intel_dsi->ports) {
891 dsi_trans = dsi_port_to_transcoder(port);
892 tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans));
893 tmp &= ~TRANS_DDI_FUNC_ENABLE;
894 I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp);
895 }
896
897 /* disable port sync mode if dual link */
898 if (intel_dsi->dual_link) {
899 for_each_dsi_port(port, intel_dsi->ports) {
900 dsi_trans = dsi_port_to_transcoder(port);
901 tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans));
902 tmp &= ~PORT_SYNC_MODE_ENABLE;
903 I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp);
904 }
905 }
906 }
907
908 static void gen11_dsi_disable_port(struct intel_encoder *encoder)
909 {
910 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
911 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
912 u32 tmp;
913 enum port port;
914
915 for_each_dsi_port(port, intel_dsi->ports) {
916 tmp = I915_READ(DDI_BUF_CTL(port));
917 tmp &= ~DDI_BUF_CTL_ENABLE;
918 I915_WRITE(DDI_BUF_CTL(port), tmp);
919
920 if (wait_for_us((I915_READ(DDI_BUF_CTL(port)) &
921 DDI_BUF_IS_IDLE),
922 8))
923 DRM_ERROR("DDI port:%c buffer not idle\n",
924 port_name(port));
925 }
926 }
927
928 static void gen11_dsi_disable_io_power(struct intel_encoder *encoder)
929 {
930 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
931 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
932 enum port port;
933 u32 tmp;
934
935 intel_display_power_put(dev_priv, POWER_DOMAIN_PORT_DDI_A_IO);
936
937 if (intel_dsi->dual_link)
938 intel_display_power_put(dev_priv, POWER_DOMAIN_PORT_DDI_B_IO);
939
940 /* set mode to DDI */
941 for_each_dsi_port(port, intel_dsi->ports) {
942 tmp = I915_READ(ICL_DSI_IO_MODECTL(port));
943 tmp &= ~COMBO_PHY_MODE_DSI;
944 I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp);
945 }
946 }
947
948 static void __attribute__((unused)) gen11_dsi_disable(
949 struct intel_encoder *encoder,
950 const struct intel_crtc_state *old_crtc_state,
951 const struct drm_connector_state *old_conn_state)
952 {
953 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
954
955 /* step1: turn off backlight */
956 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_OFF);
957 intel_panel_disable_backlight(old_conn_state);
958
959 /* step2d,e: disable transcoder and wait */
960 gen11_dsi_disable_transcoder(encoder);
961
962 /* step2f,g: powerdown panel */
963 gen11_dsi_powerdown_panel(encoder);
964
965 /* step2h,i,j: deconfig trancoder */
966 gen11_dsi_deconfigure_trancoder(encoder);
967
968 /* step3: disable port */
969 gen11_dsi_disable_port(encoder);
970
971 /* step4: disable IO power */
972 gen11_dsi_disable_io_power(encoder);
973 }