]>
Commit | Line | Data |
---|---|---|
fcfe0bdc MC |
1 | /* |
2 | * Copyright © 2018 Intel Corporation | |
3 | * | |
4 | * Permission is hereby granted, free of charge, to any person obtaining a | |
5 | * copy of this software and associated documentation files (the "Software"), | |
6 | * to deal in the Software without restriction, including without limitation | |
7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
8 | * and/or sell copies of the Software, and to permit persons to whom the | |
9 | * Software is furnished to do so, subject to the following conditions: | |
10 | * | |
11 | * The above copyright notice and this permission notice (including the next | |
12 | * paragraph) shall be included in all copies or substantial portions of the | |
13 | * Software. | |
14 | * | |
15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |
19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | |
20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER | |
21 | * DEALINGS IN THE SOFTWARE. | |
22 | * | |
23 | * Authors: | |
24 | * Madhav Chauhan <madhav.chauhan@intel.com> | |
25 | * Jani Nikula <jani.nikula@intel.com> | |
26 | */ | |
27 | ||
bfee32bf | 28 | #include <drm/drm_mipi_dsi.h> |
e2758048 | 29 | #include <drm/drm_atomic_helper.h> |
fcfe0bdc MC |
30 | #include "intel_dsi.h" |
31 | ||
32bbc3d4 MC |
32 | static inline int header_credits_available(struct drm_i915_private *dev_priv, |
33 | enum transcoder dsi_trans) | |
34 | { | |
35 | return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_HEADER_CREDIT_MASK) | |
36 | >> FREE_HEADER_CREDIT_SHIFT; | |
37 | } | |
38 | ||
39 | static inline int payload_credits_available(struct drm_i915_private *dev_priv, | |
40 | enum transcoder dsi_trans) | |
41 | { | |
42 | return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_PLOAD_CREDIT_MASK) | |
43 | >> FREE_PLOAD_CREDIT_SHIFT; | |
44 | } | |
45 | ||
46 | static void wait_for_header_credits(struct drm_i915_private *dev_priv, | |
47 | enum transcoder dsi_trans) | |
48 | { | |
49 | if (wait_for_us(header_credits_available(dev_priv, dsi_trans) >= | |
50 | MAX_HEADER_CREDIT, 100)) | |
51 | DRM_ERROR("DSI header credits not released\n"); | |
52 | } | |
53 | ||
54 | static void wait_for_payload_credits(struct drm_i915_private *dev_priv, | |
55 | enum transcoder dsi_trans) | |
56 | { | |
57 | if (wait_for_us(payload_credits_available(dev_priv, dsi_trans) >= | |
58 | MAX_PLOAD_CREDIT, 100)) | |
59 | DRM_ERROR("DSI payload credits not released\n"); | |
60 | } | |
61 | ||
d364dc66 | 62 | static enum transcoder dsi_port_to_transcoder(enum port port) |
ca8fc99f MC |
63 | { |
64 | if (port == PORT_A) | |
65 | return TRANSCODER_DSI_0; | |
66 | else | |
67 | return TRANSCODER_DSI_1; | |
68 | } | |
69 | ||
32bbc3d4 MC |
70 | static void wait_for_cmds_dispatched_to_panel(struct intel_encoder *encoder) |
71 | { | |
72 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
73 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
74 | struct mipi_dsi_device *dsi; | |
75 | enum port port; | |
76 | enum transcoder dsi_trans; | |
77 | int ret; | |
78 | ||
79 | /* wait for header/payload credits to be released */ | |
80 | for_each_dsi_port(port, intel_dsi->ports) { | |
81 | dsi_trans = dsi_port_to_transcoder(port); | |
82 | wait_for_header_credits(dev_priv, dsi_trans); | |
83 | wait_for_payload_credits(dev_priv, dsi_trans); | |
84 | } | |
85 | ||
86 | /* send nop DCS command */ | |
87 | for_each_dsi_port(port, intel_dsi->ports) { | |
88 | dsi = intel_dsi->dsi_hosts[port]->device; | |
89 | dsi->mode_flags |= MIPI_DSI_MODE_LPM; | |
90 | dsi->channel = 0; | |
91 | ret = mipi_dsi_dcs_nop(dsi); | |
92 | if (ret < 0) | |
93 | DRM_ERROR("error sending DCS NOP command\n"); | |
94 | } | |
95 | ||
96 | /* wait for header credits to be released */ | |
97 | for_each_dsi_port(port, intel_dsi->ports) { | |
98 | dsi_trans = dsi_port_to_transcoder(port); | |
99 | wait_for_header_credits(dev_priv, dsi_trans); | |
100 | } | |
101 | ||
102 | /* wait for LP TX in progress bit to be cleared */ | |
103 | for_each_dsi_port(port, intel_dsi->ports) { | |
104 | dsi_trans = dsi_port_to_transcoder(port); | |
105 | if (wait_for_us(!(I915_READ(DSI_LP_MSG(dsi_trans)) & | |
106 | LPTX_IN_PROGRESS), 20)) | |
107 | DRM_ERROR("LPTX bit not cleared\n"); | |
108 | } | |
109 | } | |
110 | ||
c5f9c934 MC |
111 | static bool add_payld_to_queue(struct intel_dsi_host *host, const u8 *data, |
112 | u32 len) | |
113 | { | |
114 | struct intel_dsi *intel_dsi = host->intel_dsi; | |
115 | struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev); | |
116 | enum transcoder dsi_trans = dsi_port_to_transcoder(host->port); | |
117 | int free_credits; | |
118 | int i, j; | |
119 | ||
120 | for (i = 0; i < len; i += 4) { | |
121 | u32 tmp = 0; | |
122 | ||
123 | free_credits = payload_credits_available(dev_priv, dsi_trans); | |
124 | if (free_credits < 1) { | |
125 | DRM_ERROR("Payload credit not available\n"); | |
126 | return false; | |
127 | } | |
128 | ||
129 | for (j = 0; j < min_t(u32, len - i, 4); j++) | |
130 | tmp |= *data++ << 8 * j; | |
131 | ||
132 | I915_WRITE(DSI_CMD_TXPYLD(dsi_trans), tmp); | |
133 | } | |
134 | ||
135 | return true; | |
136 | } | |
137 | ||
138 | static int dsi_send_pkt_hdr(struct intel_dsi_host *host, | |
139 | struct mipi_dsi_packet pkt, bool enable_lpdt) | |
140 | { | |
141 | struct intel_dsi *intel_dsi = host->intel_dsi; | |
142 | struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev); | |
143 | enum transcoder dsi_trans = dsi_port_to_transcoder(host->port); | |
144 | u32 tmp; | |
145 | int free_credits; | |
146 | ||
147 | /* check if header credit available */ | |
148 | free_credits = header_credits_available(dev_priv, dsi_trans); | |
149 | if (free_credits < 1) { | |
150 | DRM_ERROR("send pkt header failed, not enough hdr credits\n"); | |
151 | return -1; | |
152 | } | |
153 | ||
154 | tmp = I915_READ(DSI_CMD_TXHDR(dsi_trans)); | |
155 | ||
156 | if (pkt.payload) | |
157 | tmp |= PAYLOAD_PRESENT; | |
158 | else | |
159 | tmp &= ~PAYLOAD_PRESENT; | |
160 | ||
161 | tmp &= ~VBLANK_FENCE; | |
162 | ||
163 | if (enable_lpdt) | |
164 | tmp |= LP_DATA_TRANSFER; | |
165 | ||
166 | tmp &= ~(PARAM_WC_MASK | VC_MASK | DT_MASK); | |
167 | tmp |= ((pkt.header[0] & VC_MASK) << VC_SHIFT); | |
168 | tmp |= ((pkt.header[0] & DT_MASK) << DT_SHIFT); | |
169 | tmp |= (pkt.header[1] << PARAM_WC_LOWER_SHIFT); | |
170 | tmp |= (pkt.header[2] << PARAM_WC_UPPER_SHIFT); | |
171 | I915_WRITE(DSI_CMD_TXHDR(dsi_trans), tmp); | |
172 | ||
173 | return 0; | |
174 | } | |
175 | ||
176 | static int dsi_send_pkt_payld(struct intel_dsi_host *host, | |
177 | struct mipi_dsi_packet pkt) | |
178 | { | |
179 | /* payload queue can accept *256 bytes*, check limit */ | |
180 | if (pkt.payload_length > MAX_PLOAD_CREDIT * 4) { | |
181 | DRM_ERROR("payload size exceeds max queue limit\n"); | |
182 | return -1; | |
183 | } | |
184 | ||
185 | /* load data into command payload queue */ | |
186 | if (!add_payld_to_queue(host, pkt.payload, | |
187 | pkt.payload_length)) { | |
188 | DRM_ERROR("adding payload to queue failed\n"); | |
189 | return -1; | |
190 | } | |
191 | ||
192 | return 0; | |
193 | } | |
194 | ||
3f4b9d9d MC |
195 | static void dsi_program_swing_and_deemphasis(struct intel_encoder *encoder) |
196 | { | |
197 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
198 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
199 | enum port port; | |
200 | u32 tmp; | |
201 | int lane; | |
202 | ||
203 | for_each_dsi_port(port, intel_dsi->ports) { | |
204 | ||
205 | /* | |
206 | * Program voltage swing and pre-emphasis level values as per | |
207 | * table in BSPEC under DDI buffer programing | |
208 | */ | |
209 | tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port)); | |
210 | tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK); | |
211 | tmp |= SCALING_MODE_SEL(0x2); | |
212 | tmp |= TAP2_DISABLE | TAP3_DISABLE; | |
213 | tmp |= RTERM_SELECT(0x6); | |
214 | I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp); | |
215 | ||
216 | tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port)); | |
217 | tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK); | |
218 | tmp |= SCALING_MODE_SEL(0x2); | |
219 | tmp |= TAP2_DISABLE | TAP3_DISABLE; | |
220 | tmp |= RTERM_SELECT(0x6); | |
221 | I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp); | |
222 | ||
223 | tmp = I915_READ(ICL_PORT_TX_DW2_LN0(port)); | |
224 | tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | | |
225 | RCOMP_SCALAR_MASK); | |
226 | tmp |= SWING_SEL_UPPER(0x2); | |
227 | tmp |= SWING_SEL_LOWER(0x2); | |
228 | tmp |= RCOMP_SCALAR(0x98); | |
229 | I915_WRITE(ICL_PORT_TX_DW2_GRP(port), tmp); | |
230 | ||
231 | tmp = I915_READ(ICL_PORT_TX_DW2_AUX(port)); | |
232 | tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | | |
233 | RCOMP_SCALAR_MASK); | |
234 | tmp |= SWING_SEL_UPPER(0x2); | |
235 | tmp |= SWING_SEL_LOWER(0x2); | |
236 | tmp |= RCOMP_SCALAR(0x98); | |
237 | I915_WRITE(ICL_PORT_TX_DW2_AUX(port), tmp); | |
238 | ||
239 | tmp = I915_READ(ICL_PORT_TX_DW4_AUX(port)); | |
240 | tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | | |
241 | CURSOR_COEFF_MASK); | |
242 | tmp |= POST_CURSOR_1(0x0); | |
243 | tmp |= POST_CURSOR_2(0x0); | |
244 | tmp |= CURSOR_COEFF(0x3f); | |
245 | I915_WRITE(ICL_PORT_TX_DW4_AUX(port), tmp); | |
246 | ||
247 | for (lane = 0; lane <= 3; lane++) { | |
248 | /* Bspec: must not use GRP register for write */ | |
249 | tmp = I915_READ(ICL_PORT_TX_DW4_LN(port, lane)); | |
250 | tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | | |
251 | CURSOR_COEFF_MASK); | |
252 | tmp |= POST_CURSOR_1(0x0); | |
253 | tmp |= POST_CURSOR_2(0x0); | |
254 | tmp |= CURSOR_COEFF(0x3f); | |
255 | I915_WRITE(ICL_PORT_TX_DW4_LN(port, lane), tmp); | |
256 | } | |
257 | } | |
258 | } | |
259 | ||
5a8507b5 MC |
260 | static void configure_dual_link_mode(struct intel_encoder *encoder, |
261 | const struct intel_crtc_state *pipe_config) | |
262 | { | |
263 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
264 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
265 | u32 dss_ctl1; | |
266 | ||
267 | dss_ctl1 = I915_READ(DSS_CTL1); | |
268 | dss_ctl1 |= SPLITTER_ENABLE; | |
269 | dss_ctl1 &= ~OVERLAP_PIXELS_MASK; | |
270 | dss_ctl1 |= OVERLAP_PIXELS(intel_dsi->pixel_overlap); | |
271 | ||
272 | if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) { | |
273 | const struct drm_display_mode *adjusted_mode = | |
274 | &pipe_config->base.adjusted_mode; | |
275 | u32 dss_ctl2; | |
276 | u16 hactive = adjusted_mode->crtc_hdisplay; | |
277 | u16 dl_buffer_depth; | |
278 | ||
279 | dss_ctl1 &= ~DUAL_LINK_MODE_INTERLEAVE; | |
280 | dl_buffer_depth = hactive / 2 + intel_dsi->pixel_overlap; | |
281 | ||
282 | if (dl_buffer_depth > MAX_DL_BUFFER_TARGET_DEPTH) | |
283 | DRM_ERROR("DL buffer depth exceed max value\n"); | |
284 | ||
285 | dss_ctl1 &= ~LEFT_DL_BUF_TARGET_DEPTH_MASK; | |
286 | dss_ctl1 |= LEFT_DL_BUF_TARGET_DEPTH(dl_buffer_depth); | |
287 | dss_ctl2 = I915_READ(DSS_CTL2); | |
288 | dss_ctl2 &= ~RIGHT_DL_BUF_TARGET_DEPTH_MASK; | |
289 | dss_ctl2 |= RIGHT_DL_BUF_TARGET_DEPTH(dl_buffer_depth); | |
290 | I915_WRITE(DSS_CTL2, dss_ctl2); | |
291 | } else { | |
292 | /* Interleave */ | |
293 | dss_ctl1 |= DUAL_LINK_MODE_INTERLEAVE; | |
294 | } | |
295 | ||
296 | I915_WRITE(DSS_CTL1, dss_ctl1); | |
297 | } | |
298 | ||
fcfe0bdc MC |
299 | static void gen11_dsi_program_esc_clk_div(struct intel_encoder *encoder) |
300 | { | |
301 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
302 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
303 | enum port port; | |
304 | u32 bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format); | |
305 | u32 afe_clk_khz; /* 8X Clock */ | |
306 | u32 esc_clk_div_m; | |
307 | ||
308 | afe_clk_khz = DIV_ROUND_CLOSEST(intel_dsi->pclk * bpp, | |
309 | intel_dsi->lane_count); | |
310 | ||
311 | esc_clk_div_m = DIV_ROUND_UP(afe_clk_khz, DSI_MAX_ESC_CLK); | |
312 | ||
313 | for_each_dsi_port(port, intel_dsi->ports) { | |
314 | I915_WRITE(ICL_DSI_ESC_CLK_DIV(port), | |
315 | esc_clk_div_m & ICL_ESC_CLK_DIV_MASK); | |
316 | POSTING_READ(ICL_DSI_ESC_CLK_DIV(port)); | |
317 | } | |
318 | ||
319 | for_each_dsi_port(port, intel_dsi->ports) { | |
320 | I915_WRITE(ICL_DPHY_ESC_CLK_DIV(port), | |
321 | esc_clk_div_m & ICL_ESC_CLK_DIV_MASK); | |
322 | POSTING_READ(ICL_DPHY_ESC_CLK_DIV(port)); | |
323 | } | |
324 | } | |
325 | ||
b1cb21a5 MC |
326 | static void gen11_dsi_enable_io_power(struct intel_encoder *encoder) |
327 | { | |
328 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
329 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
330 | enum port port; | |
331 | u32 tmp; | |
332 | ||
333 | for_each_dsi_port(port, intel_dsi->ports) { | |
334 | tmp = I915_READ(ICL_DSI_IO_MODECTL(port)); | |
335 | tmp |= COMBO_PHY_MODE_DSI; | |
336 | I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp); | |
337 | } | |
338 | ||
339 | for_each_dsi_port(port, intel_dsi->ports) { | |
340 | intel_display_power_get(dev_priv, port == PORT_A ? | |
341 | POWER_DOMAIN_PORT_DDI_A_IO : | |
342 | POWER_DOMAIN_PORT_DDI_B_IO); | |
343 | } | |
344 | } | |
345 | ||
45f09f7a MC |
346 | static void gen11_dsi_power_up_lanes(struct intel_encoder *encoder) |
347 | { | |
348 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
349 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
350 | enum port port; | |
351 | u32 tmp; | |
352 | u32 lane_mask; | |
353 | ||
354 | switch (intel_dsi->lane_count) { | |
355 | case 1: | |
356 | lane_mask = PWR_DOWN_LN_3_1_0; | |
357 | break; | |
358 | case 2: | |
359 | lane_mask = PWR_DOWN_LN_3_1; | |
360 | break; | |
361 | case 3: | |
362 | lane_mask = PWR_DOWN_LN_3; | |
363 | break; | |
364 | case 4: | |
365 | default: | |
366 | lane_mask = PWR_UP_ALL_LANES; | |
367 | break; | |
368 | } | |
369 | ||
370 | for_each_dsi_port(port, intel_dsi->ports) { | |
371 | tmp = I915_READ(ICL_PORT_CL_DW10(port)); | |
372 | tmp &= ~PWR_DOWN_LN_MASK; | |
373 | I915_WRITE(ICL_PORT_CL_DW10(port), tmp | lane_mask); | |
374 | } | |
375 | } | |
376 | ||
fc41001d MC |
377 | static void gen11_dsi_config_phy_lanes_sequence(struct intel_encoder *encoder) |
378 | { | |
379 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
380 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
381 | enum port port; | |
382 | u32 tmp; | |
383 | int lane; | |
384 | ||
385 | /* Step 4b(i) set loadgen select for transmit and aux lanes */ | |
386 | for_each_dsi_port(port, intel_dsi->ports) { | |
387 | tmp = I915_READ(ICL_PORT_TX_DW4_AUX(port)); | |
388 | tmp &= ~LOADGEN_SELECT; | |
389 | I915_WRITE(ICL_PORT_TX_DW4_AUX(port), tmp); | |
390 | for (lane = 0; lane <= 3; lane++) { | |
391 | tmp = I915_READ(ICL_PORT_TX_DW4_LN(port, lane)); | |
392 | tmp &= ~LOADGEN_SELECT; | |
393 | if (lane != 2) | |
394 | tmp |= LOADGEN_SELECT; | |
395 | I915_WRITE(ICL_PORT_TX_DW4_LN(port, lane), tmp); | |
396 | } | |
397 | } | |
398 | ||
399 | /* Step 4b(ii) set latency optimization for transmit and aux lanes */ | |
400 | for_each_dsi_port(port, intel_dsi->ports) { | |
401 | tmp = I915_READ(ICL_PORT_TX_DW2_AUX(port)); | |
402 | tmp &= ~FRC_LATENCY_OPTIM_MASK; | |
403 | tmp |= FRC_LATENCY_OPTIM_VAL(0x5); | |
404 | I915_WRITE(ICL_PORT_TX_DW2_AUX(port), tmp); | |
405 | tmp = I915_READ(ICL_PORT_TX_DW2_LN0(port)); | |
406 | tmp &= ~FRC_LATENCY_OPTIM_MASK; | |
407 | tmp |= FRC_LATENCY_OPTIM_VAL(0x5); | |
408 | I915_WRITE(ICL_PORT_TX_DW2_GRP(port), tmp); | |
409 | } | |
410 | ||
411 | } | |
412 | ||
3f4b9d9d MC |
413 | static void gen11_dsi_voltage_swing_program_seq(struct intel_encoder *encoder) |
414 | { | |
415 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
416 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
417 | u32 tmp; | |
418 | enum port port; | |
419 | ||
420 | /* clear common keeper enable bit */ | |
421 | for_each_dsi_port(port, intel_dsi->ports) { | |
422 | tmp = I915_READ(ICL_PORT_PCS_DW1_LN0(port)); | |
423 | tmp &= ~COMMON_KEEPER_EN; | |
424 | I915_WRITE(ICL_PORT_PCS_DW1_GRP(port), tmp); | |
425 | tmp = I915_READ(ICL_PORT_PCS_DW1_AUX(port)); | |
426 | tmp &= ~COMMON_KEEPER_EN; | |
427 | I915_WRITE(ICL_PORT_PCS_DW1_AUX(port), tmp); | |
428 | } | |
429 | ||
430 | /* | |
431 | * Set SUS Clock Config bitfield to 11b | |
432 | * Note: loadgen select program is done | |
433 | * as part of lane phy sequence configuration | |
434 | */ | |
435 | for_each_dsi_port(port, intel_dsi->ports) { | |
436 | tmp = I915_READ(ICL_PORT_CL_DW5(port)); | |
437 | tmp |= SUS_CLOCK_CONFIG; | |
438 | I915_WRITE(ICL_PORT_CL_DW5(port), tmp); | |
439 | } | |
440 | ||
441 | /* Clear training enable to change swing values */ | |
442 | for_each_dsi_port(port, intel_dsi->ports) { | |
443 | tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port)); | |
444 | tmp &= ~TX_TRAINING_EN; | |
445 | I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp); | |
446 | tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port)); | |
447 | tmp &= ~TX_TRAINING_EN; | |
448 | I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp); | |
449 | } | |
450 | ||
451 | /* Program swing and de-emphasis */ | |
452 | dsi_program_swing_and_deemphasis(encoder); | |
453 | ||
454 | /* Set training enable to trigger update */ | |
455 | for_each_dsi_port(port, intel_dsi->ports) { | |
456 | tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port)); | |
457 | tmp |= TX_TRAINING_EN; | |
458 | I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp); | |
459 | tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port)); | |
460 | tmp |= TX_TRAINING_EN; | |
461 | I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp); | |
462 | } | |
463 | } | |
464 | ||
ba3df888 MC |
465 | static void gen11_dsi_enable_ddi_buffer(struct intel_encoder *encoder) |
466 | { | |
467 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
468 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
469 | u32 tmp; | |
470 | enum port port; | |
471 | ||
472 | for_each_dsi_port(port, intel_dsi->ports) { | |
473 | tmp = I915_READ(DDI_BUF_CTL(port)); | |
474 | tmp |= DDI_BUF_CTL_ENABLE; | |
475 | I915_WRITE(DDI_BUF_CTL(port), tmp); | |
476 | ||
477 | if (wait_for_us(!(I915_READ(DDI_BUF_CTL(port)) & | |
478 | DDI_BUF_IS_IDLE), | |
479 | 500)) | |
480 | DRM_ERROR("DDI port:%c buffer idle\n", port_name(port)); | |
481 | } | |
482 | } | |
483 | ||
70a7b836 MC |
484 | static void gen11_dsi_setup_dphy_timings(struct intel_encoder *encoder) |
485 | { | |
486 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
487 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
488 | u32 tmp; | |
489 | enum port port; | |
490 | ||
491 | /* Program T-INIT master registers */ | |
492 | for_each_dsi_port(port, intel_dsi->ports) { | |
493 | tmp = I915_READ(ICL_DSI_T_INIT_MASTER(port)); | |
494 | tmp &= ~MASTER_INIT_TIMER_MASK; | |
495 | tmp |= intel_dsi->init_count; | |
496 | I915_WRITE(ICL_DSI_T_INIT_MASTER(port), tmp); | |
497 | } | |
e72cce53 MC |
498 | |
499 | /* Program DPHY clock lanes timings */ | |
500 | for_each_dsi_port(port, intel_dsi->ports) { | |
501 | I915_WRITE(DPHY_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg); | |
502 | ||
503 | /* shadow register inside display core */ | |
504 | I915_WRITE(DSI_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg); | |
505 | } | |
506 | ||
507 | /* Program DPHY data lanes timings */ | |
508 | for_each_dsi_port(port, intel_dsi->ports) { | |
509 | I915_WRITE(DPHY_DATA_TIMING_PARAM(port), | |
510 | intel_dsi->dphy_data_lane_reg); | |
511 | ||
512 | /* shadow register inside display core */ | |
513 | I915_WRITE(DSI_DATA_TIMING_PARAM(port), | |
514 | intel_dsi->dphy_data_lane_reg); | |
515 | } | |
5fea8645 MC |
516 | |
517 | /* | |
518 | * If DSI link operating at or below an 800 MHz, | |
519 | * TA_SURE should be override and programmed to | |
520 | * a value '0' inside TA_PARAM_REGISTERS otherwise | |
521 | * leave all fields at HW default values. | |
522 | */ | |
523 | if (intel_dsi_bitrate(intel_dsi) <= 800000) { | |
524 | for_each_dsi_port(port, intel_dsi->ports) { | |
525 | tmp = I915_READ(DPHY_TA_TIMING_PARAM(port)); | |
526 | tmp &= ~TA_SURE_MASK; | |
527 | tmp |= TA_SURE_OVERRIDE | TA_SURE(0); | |
528 | I915_WRITE(DPHY_TA_TIMING_PARAM(port), tmp); | |
529 | ||
530 | /* shadow register inside display core */ | |
531 | tmp = I915_READ(DSI_TA_TIMING_PARAM(port)); | |
532 | tmp &= ~TA_SURE_MASK; | |
533 | tmp |= TA_SURE_OVERRIDE | TA_SURE(0); | |
534 | I915_WRITE(DSI_TA_TIMING_PARAM(port), tmp); | |
535 | } | |
536 | } | |
70a7b836 MC |
537 | } |
538 | ||
32250c8e MC |
539 | static void gen11_dsi_gate_clocks(struct intel_encoder *encoder) |
540 | { | |
541 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
542 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
543 | u32 tmp; | |
544 | enum port port; | |
545 | ||
546 | mutex_lock(&dev_priv->dpll_lock); | |
547 | tmp = I915_READ(DPCLKA_CFGCR0_ICL); | |
548 | for_each_dsi_port(port, intel_dsi->ports) { | |
549 | tmp |= DPCLKA_CFGCR0_DDI_CLK_OFF(port); | |
550 | } | |
551 | ||
552 | I915_WRITE(DPCLKA_CFGCR0_ICL, tmp); | |
553 | mutex_unlock(&dev_priv->dpll_lock); | |
554 | } | |
555 | ||
1026bea0 MC |
556 | static void gen11_dsi_ungate_clocks(struct intel_encoder *encoder) |
557 | { | |
558 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
559 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
560 | u32 tmp; | |
561 | enum port port; | |
562 | ||
563 | mutex_lock(&dev_priv->dpll_lock); | |
564 | tmp = I915_READ(DPCLKA_CFGCR0_ICL); | |
565 | for_each_dsi_port(port, intel_dsi->ports) { | |
566 | tmp &= ~DPCLKA_CFGCR0_DDI_CLK_OFF(port); | |
567 | } | |
568 | ||
569 | I915_WRITE(DPCLKA_CFGCR0_ICL, tmp); | |
570 | mutex_unlock(&dev_priv->dpll_lock); | |
571 | } | |
572 | ||
70f4f502 MC |
573 | static void |
574 | gen11_dsi_configure_transcoder(struct intel_encoder *encoder, | |
575 | const struct intel_crtc_state *pipe_config) | |
d364dc66 MC |
576 | { |
577 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
578 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
70f4f502 MC |
579 | struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->base.crtc); |
580 | enum pipe pipe = intel_crtc->pipe; | |
d364dc66 MC |
581 | u32 tmp; |
582 | enum port port; | |
583 | enum transcoder dsi_trans; | |
584 | ||
585 | for_each_dsi_port(port, intel_dsi->ports) { | |
586 | dsi_trans = dsi_port_to_transcoder(port); | |
587 | tmp = I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans)); | |
588 | ||
589 | if (intel_dsi->eotp_pkt) | |
590 | tmp &= ~EOTP_DISABLED; | |
591 | else | |
592 | tmp |= EOTP_DISABLED; | |
593 | ||
594 | /* enable link calibration if freq > 1.5Gbps */ | |
595 | if (intel_dsi_bitrate(intel_dsi) >= 1500 * 1000) { | |
596 | tmp &= ~LINK_CALIBRATION_MASK; | |
597 | tmp |= CALIBRATION_ENABLED_INITIAL_ONLY; | |
598 | } | |
599 | ||
600 | /* configure continuous clock */ | |
601 | tmp &= ~CONTINUOUS_CLK_MASK; | |
602 | if (intel_dsi->clock_stop) | |
603 | tmp |= CLK_ENTER_LP_AFTER_DATA; | |
604 | else | |
605 | tmp |= CLK_HS_CONTINUOUS; | |
606 | ||
607 | /* configure buffer threshold limit to minimum */ | |
608 | tmp &= ~PIX_BUF_THRESHOLD_MASK; | |
609 | tmp |= PIX_BUF_THRESHOLD_1_4; | |
610 | ||
611 | /* set virtual channel to '0' */ | |
612 | tmp &= ~PIX_VIRT_CHAN_MASK; | |
613 | tmp |= PIX_VIRT_CHAN(0); | |
614 | ||
615 | /* program BGR transmission */ | |
616 | if (intel_dsi->bgr_enabled) | |
617 | tmp |= BGR_TRANSMISSION; | |
618 | ||
619 | /* select pixel format */ | |
620 | tmp &= ~PIX_FMT_MASK; | |
621 | switch (intel_dsi->pixel_format) { | |
622 | default: | |
623 | MISSING_CASE(intel_dsi->pixel_format); | |
624 | /* fallthrough */ | |
625 | case MIPI_DSI_FMT_RGB565: | |
626 | tmp |= PIX_FMT_RGB565; | |
627 | break; | |
628 | case MIPI_DSI_FMT_RGB666_PACKED: | |
629 | tmp |= PIX_FMT_RGB666_PACKED; | |
630 | break; | |
631 | case MIPI_DSI_FMT_RGB666: | |
632 | tmp |= PIX_FMT_RGB666_LOOSE; | |
633 | break; | |
634 | case MIPI_DSI_FMT_RGB888: | |
635 | tmp |= PIX_FMT_RGB888; | |
636 | break; | |
637 | } | |
638 | ||
639 | /* program DSI operation mode */ | |
640 | if (is_vid_mode(intel_dsi)) { | |
641 | tmp &= ~OP_MODE_MASK; | |
642 | switch (intel_dsi->video_mode_format) { | |
643 | default: | |
644 | MISSING_CASE(intel_dsi->video_mode_format); | |
645 | /* fallthrough */ | |
646 | case VIDEO_MODE_NON_BURST_WITH_SYNC_EVENTS: | |
647 | tmp |= VIDEO_MODE_SYNC_EVENT; | |
648 | break; | |
649 | case VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE: | |
650 | tmp |= VIDEO_MODE_SYNC_PULSE; | |
651 | break; | |
652 | } | |
653 | } | |
654 | ||
655 | I915_WRITE(DSI_TRANS_FUNC_CONF(dsi_trans), tmp); | |
656 | } | |
70f4f502 MC |
657 | |
658 | /* enable port sync mode if dual link */ | |
659 | if (intel_dsi->dual_link) { | |
660 | for_each_dsi_port(port, intel_dsi->ports) { | |
661 | dsi_trans = dsi_port_to_transcoder(port); | |
662 | tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans)); | |
663 | tmp |= PORT_SYNC_MODE_ENABLE; | |
664 | I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp); | |
665 | } | |
666 | ||
5a8507b5 MC |
667 | /* configure stream splitting */ |
668 | configure_dual_link_mode(encoder, pipe_config); | |
70f4f502 MC |
669 | } |
670 | ||
671 | for_each_dsi_port(port, intel_dsi->ports) { | |
672 | dsi_trans = dsi_port_to_transcoder(port); | |
673 | ||
674 | /* select data lane width */ | |
675 | tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans)); | |
676 | tmp &= ~DDI_PORT_WIDTH_MASK; | |
677 | tmp |= DDI_PORT_WIDTH(intel_dsi->lane_count); | |
678 | ||
679 | /* select input pipe */ | |
680 | tmp &= ~TRANS_DDI_EDP_INPUT_MASK; | |
681 | switch (pipe) { | |
682 | default: | |
683 | MISSING_CASE(pipe); | |
684 | /* fallthrough */ | |
685 | case PIPE_A: | |
686 | tmp |= TRANS_DDI_EDP_INPUT_A_ON; | |
687 | break; | |
688 | case PIPE_B: | |
689 | tmp |= TRANS_DDI_EDP_INPUT_B_ONOFF; | |
690 | break; | |
691 | case PIPE_C: | |
692 | tmp |= TRANS_DDI_EDP_INPUT_C_ONOFF; | |
693 | break; | |
694 | } | |
695 | ||
696 | /* enable DDI buffer */ | |
697 | tmp |= TRANS_DDI_FUNC_ENABLE; | |
698 | I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp); | |
699 | } | |
700 | ||
701 | /* wait for link ready */ | |
702 | for_each_dsi_port(port, intel_dsi->ports) { | |
703 | dsi_trans = dsi_port_to_transcoder(port); | |
704 | if (wait_for_us((I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans)) & | |
705 | LINK_READY), 2500)) | |
706 | DRM_ERROR("DSI link not ready\n"); | |
707 | } | |
d364dc66 MC |
708 | } |
709 | ||
d1aeb5f3 MC |
710 | static void |
711 | gen11_dsi_set_transcoder_timings(struct intel_encoder *encoder, | |
712 | const struct intel_crtc_state *pipe_config) | |
713 | { | |
714 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
715 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
716 | const struct drm_display_mode *adjusted_mode = | |
717 | &pipe_config->base.adjusted_mode; | |
718 | enum port port; | |
719 | enum transcoder dsi_trans; | |
720 | /* horizontal timings */ | |
721 | u16 htotal, hactive, hsync_start, hsync_end, hsync_size; | |
722 | u16 hfront_porch, hback_porch; | |
723 | /* vertical timings */ | |
724 | u16 vtotal, vactive, vsync_start, vsync_end, vsync_shift; | |
725 | ||
726 | hactive = adjusted_mode->crtc_hdisplay; | |
727 | htotal = adjusted_mode->crtc_htotal; | |
728 | hsync_start = adjusted_mode->crtc_hsync_start; | |
729 | hsync_end = adjusted_mode->crtc_hsync_end; | |
730 | hsync_size = hsync_end - hsync_start; | |
731 | hfront_porch = (adjusted_mode->crtc_hsync_start - | |
732 | adjusted_mode->crtc_hdisplay); | |
733 | hback_porch = (adjusted_mode->crtc_htotal - | |
734 | adjusted_mode->crtc_hsync_end); | |
735 | vactive = adjusted_mode->crtc_vdisplay; | |
736 | vtotal = adjusted_mode->crtc_vtotal; | |
737 | vsync_start = adjusted_mode->crtc_vsync_start; | |
738 | vsync_end = adjusted_mode->crtc_vsync_end; | |
739 | vsync_shift = hsync_start - htotal / 2; | |
740 | ||
741 | if (intel_dsi->dual_link) { | |
742 | hactive /= 2; | |
743 | if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) | |
744 | hactive += intel_dsi->pixel_overlap; | |
745 | htotal /= 2; | |
746 | } | |
747 | ||
748 | /* minimum hactive as per bspec: 256 pixels */ | |
749 | if (adjusted_mode->crtc_hdisplay < 256) | |
750 | DRM_ERROR("hactive is less then 256 pixels\n"); | |
751 | ||
752 | /* if RGB666 format, then hactive must be multiple of 4 pixels */ | |
753 | if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB666 && hactive % 4 != 0) | |
754 | DRM_ERROR("hactive pixels are not multiple of 4\n"); | |
755 | ||
756 | /* program TRANS_HTOTAL register */ | |
757 | for_each_dsi_port(port, intel_dsi->ports) { | |
758 | dsi_trans = dsi_port_to_transcoder(port); | |
759 | I915_WRITE(HTOTAL(dsi_trans), | |
760 | (hactive - 1) | ((htotal - 1) << 16)); | |
761 | } | |
762 | ||
763 | /* TRANS_HSYNC register to be programmed only for video mode */ | |
764 | if (intel_dsi->operation_mode == INTEL_DSI_VIDEO_MODE) { | |
765 | if (intel_dsi->video_mode_format == | |
766 | VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE) { | |
767 | /* BSPEC: hsync size should be atleast 16 pixels */ | |
768 | if (hsync_size < 16) | |
769 | DRM_ERROR("hsync size < 16 pixels\n"); | |
770 | } | |
771 | ||
772 | if (hback_porch < 16) | |
773 | DRM_ERROR("hback porch < 16 pixels\n"); | |
774 | ||
775 | if (intel_dsi->dual_link) { | |
776 | hsync_start /= 2; | |
777 | hsync_end /= 2; | |
778 | } | |
779 | ||
780 | for_each_dsi_port(port, intel_dsi->ports) { | |
781 | dsi_trans = dsi_port_to_transcoder(port); | |
782 | I915_WRITE(HSYNC(dsi_trans), | |
783 | (hsync_start - 1) | ((hsync_end - 1) << 16)); | |
784 | } | |
785 | } | |
786 | ||
787 | /* program TRANS_VTOTAL register */ | |
788 | for_each_dsi_port(port, intel_dsi->ports) { | |
789 | dsi_trans = dsi_port_to_transcoder(port); | |
790 | /* | |
791 | * FIXME: Programing this by assuming progressive mode, since | |
792 | * non-interlaced info from VBT is not saved inside | |
793 | * struct drm_display_mode. | |
794 | * For interlace mode: program required pixel minus 2 | |
795 | */ | |
796 | I915_WRITE(VTOTAL(dsi_trans), | |
797 | (vactive - 1) | ((vtotal - 1) << 16)); | |
798 | } | |
799 | ||
800 | if (vsync_end < vsync_start || vsync_end > vtotal) | |
801 | DRM_ERROR("Invalid vsync_end value\n"); | |
802 | ||
803 | if (vsync_start < vactive) | |
804 | DRM_ERROR("vsync_start less than vactive\n"); | |
805 | ||
806 | /* program TRANS_VSYNC register */ | |
807 | for_each_dsi_port(port, intel_dsi->ports) { | |
808 | dsi_trans = dsi_port_to_transcoder(port); | |
809 | I915_WRITE(VSYNC(dsi_trans), | |
810 | (vsync_start - 1) | ((vsync_end - 1) << 16)); | |
811 | } | |
812 | ||
813 | /* | |
814 | * FIXME: It has to be programmed only for interlaced | |
815 | * modes. Put the check condition here once interlaced | |
816 | * info available as described above. | |
817 | * program TRANS_VSYNCSHIFT register | |
818 | */ | |
819 | for_each_dsi_port(port, intel_dsi->ports) { | |
820 | dsi_trans = dsi_port_to_transcoder(port); | |
821 | I915_WRITE(VSYNCSHIFT(dsi_trans), vsync_shift); | |
822 | } | |
823 | } | |
824 | ||
303e347c MC |
825 | static void gen11_dsi_enable_transcoder(struct intel_encoder *encoder) |
826 | { | |
827 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
828 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
829 | enum port port; | |
830 | enum transcoder dsi_trans; | |
831 | u32 tmp; | |
832 | ||
833 | for_each_dsi_port(port, intel_dsi->ports) { | |
834 | dsi_trans = dsi_port_to_transcoder(port); | |
835 | tmp = I915_READ(PIPECONF(dsi_trans)); | |
836 | tmp |= PIPECONF_ENABLE; | |
837 | I915_WRITE(PIPECONF(dsi_trans), tmp); | |
838 | ||
839 | /* wait for transcoder to be enabled */ | |
840 | if (intel_wait_for_register(dev_priv, PIPECONF(dsi_trans), | |
841 | I965_PIPECONF_ACTIVE, | |
842 | I965_PIPECONF_ACTIVE, 10)) | |
843 | DRM_ERROR("DSI transcoder not enabled\n"); | |
844 | } | |
845 | } | |
846 | ||
5a4712f4 MC |
847 | static void gen11_dsi_setup_timeouts(struct intel_encoder *encoder) |
848 | { | |
849 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
850 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
851 | enum port port; | |
852 | enum transcoder dsi_trans; | |
853 | u32 tmp, hs_tx_timeout, lp_rx_timeout, ta_timeout, divisor, mul; | |
854 | ||
855 | /* | |
856 | * escape clock count calculation: | |
857 | * BYTE_CLK_COUNT = TIME_NS/(8 * UI) | |
858 | * UI (nsec) = (10^6)/Bitrate | |
859 | * TIME_NS = (BYTE_CLK_COUNT * 8 * 10^6)/ Bitrate | |
860 | * ESCAPE_CLK_COUNT = TIME_NS/ESC_CLK_NS | |
861 | */ | |
862 | divisor = intel_dsi_tlpx_ns(intel_dsi) * intel_dsi_bitrate(intel_dsi) * 1000; | |
863 | mul = 8 * 1000000; | |
864 | hs_tx_timeout = DIV_ROUND_UP(intel_dsi->hs_tx_timeout * mul, | |
865 | divisor); | |
866 | lp_rx_timeout = DIV_ROUND_UP(intel_dsi->lp_rx_timeout * mul, divisor); | |
867 | ta_timeout = DIV_ROUND_UP(intel_dsi->turn_arnd_val * mul, divisor); | |
868 | ||
869 | for_each_dsi_port(port, intel_dsi->ports) { | |
870 | dsi_trans = dsi_port_to_transcoder(port); | |
871 | ||
872 | /* program hst_tx_timeout */ | |
873 | tmp = I915_READ(DSI_HSTX_TO(dsi_trans)); | |
874 | tmp &= ~HSTX_TIMEOUT_VALUE_MASK; | |
875 | tmp |= HSTX_TIMEOUT_VALUE(hs_tx_timeout); | |
876 | I915_WRITE(DSI_HSTX_TO(dsi_trans), tmp); | |
877 | ||
878 | /* FIXME: DSI_CALIB_TO */ | |
879 | ||
880 | /* program lp_rx_host timeout */ | |
881 | tmp = I915_READ(DSI_LPRX_HOST_TO(dsi_trans)); | |
882 | tmp &= ~LPRX_TIMEOUT_VALUE_MASK; | |
883 | tmp |= LPRX_TIMEOUT_VALUE(lp_rx_timeout); | |
884 | I915_WRITE(DSI_LPRX_HOST_TO(dsi_trans), tmp); | |
885 | ||
886 | /* FIXME: DSI_PWAIT_TO */ | |
887 | ||
888 | /* program turn around timeout */ | |
889 | tmp = I915_READ(DSI_TA_TO(dsi_trans)); | |
890 | tmp &= ~TA_TIMEOUT_VALUE_MASK; | |
891 | tmp |= TA_TIMEOUT_VALUE(ta_timeout); | |
892 | I915_WRITE(DSI_TA_TO(dsi_trans), tmp); | |
893 | } | |
894 | } | |
895 | ||
70f4f502 MC |
896 | static void |
897 | gen11_dsi_enable_port_and_phy(struct intel_encoder *encoder, | |
898 | const struct intel_crtc_state *pipe_config) | |
45f09f7a MC |
899 | { |
900 | /* step 4a: power up all lanes of the DDI used by DSI */ | |
901 | gen11_dsi_power_up_lanes(encoder); | |
fc41001d MC |
902 | |
903 | /* step 4b: configure lane sequencing of the Combo-PHY transmitters */ | |
904 | gen11_dsi_config_phy_lanes_sequence(encoder); | |
3f4b9d9d MC |
905 | |
906 | /* step 4c: configure voltage swing and skew */ | |
907 | gen11_dsi_voltage_swing_program_seq(encoder); | |
ba3df888 MC |
908 | |
909 | /* enable DDI buffer */ | |
910 | gen11_dsi_enable_ddi_buffer(encoder); | |
70a7b836 MC |
911 | |
912 | /* setup D-PHY timings */ | |
913 | gen11_dsi_setup_dphy_timings(encoder); | |
d364dc66 | 914 | |
5a4712f4 MC |
915 | /* step 4h: setup DSI protocol timeouts */ |
916 | gen11_dsi_setup_timeouts(encoder); | |
917 | ||
d364dc66 | 918 | /* Step (4h, 4i, 4j, 4k): Configure transcoder */ |
70f4f502 | 919 | gen11_dsi_configure_transcoder(encoder, pipe_config); |
32250c8e MC |
920 | |
921 | /* Step 4l: Gate DDI clocks */ | |
922 | gen11_dsi_gate_clocks(encoder); | |
45f09f7a MC |
923 | } |
924 | ||
bfee32bf MC |
925 | static void gen11_dsi_powerup_panel(struct intel_encoder *encoder) |
926 | { | |
927 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
928 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
929 | struct mipi_dsi_device *dsi; | |
930 | enum port port; | |
931 | enum transcoder dsi_trans; | |
932 | u32 tmp; | |
933 | int ret; | |
934 | ||
935 | /* set maximum return packet size */ | |
936 | for_each_dsi_port(port, intel_dsi->ports) { | |
937 | dsi_trans = dsi_port_to_transcoder(port); | |
938 | ||
939 | /* | |
940 | * FIXME: This uses the number of DW's currently in the payload | |
941 | * receive queue. This is probably not what we want here. | |
942 | */ | |
943 | tmp = I915_READ(DSI_CMD_RXCTL(dsi_trans)); | |
944 | tmp &= NUMBER_RX_PLOAD_DW_MASK; | |
945 | /* multiply "Number Rx Payload DW" by 4 to get max value */ | |
946 | tmp = tmp * 4; | |
947 | dsi = intel_dsi->dsi_hosts[port]->device; | |
948 | ret = mipi_dsi_set_maximum_return_packet_size(dsi, tmp); | |
949 | if (ret < 0) | |
950 | DRM_ERROR("error setting max return pkt size%d\n", tmp); | |
951 | } | |
c2661638 MC |
952 | |
953 | /* panel power on related mipi dsi vbt sequences */ | |
954 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_ON); | |
955 | intel_dsi_msleep(intel_dsi, intel_dsi->panel_on_delay); | |
956 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DEASSERT_RESET); | |
957 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_INIT_OTP); | |
958 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_ON); | |
32bbc3d4 MC |
959 | |
960 | /* ensure all panel commands dispatched before enabling transcoder */ | |
961 | wait_for_cmds_dispatched_to_panel(encoder); | |
bfee32bf MC |
962 | } |
963 | ||
95f2f4db VK |
964 | static void gen11_dsi_pre_pll_enable(struct intel_encoder *encoder, |
965 | const struct intel_crtc_state *pipe_config, | |
966 | const struct drm_connector_state *conn_state) | |
fcfe0bdc | 967 | { |
b1cb21a5 MC |
968 | /* step2: enable IO power */ |
969 | gen11_dsi_enable_io_power(encoder); | |
970 | ||
fcfe0bdc MC |
971 | /* step3: enable DSI PLL */ |
972 | gen11_dsi_program_esc_clk_div(encoder); | |
95f2f4db VK |
973 | } |
974 | ||
975 | static void gen11_dsi_pre_enable(struct intel_encoder *encoder, | |
976 | const struct intel_crtc_state *pipe_config, | |
977 | const struct drm_connector_state *conn_state) | |
978 | { | |
979 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
45f09f7a MC |
980 | |
981 | /* step4: enable DSI port and DPHY */ | |
70f4f502 | 982 | gen11_dsi_enable_port_and_phy(encoder, pipe_config); |
d1aeb5f3 | 983 | |
bfee32bf MC |
984 | /* step5: program and powerup panel */ |
985 | gen11_dsi_powerup_panel(encoder); | |
986 | ||
d1aeb5f3 MC |
987 | /* step6c: configure transcoder timings */ |
988 | gen11_dsi_set_transcoder_timings(encoder, pipe_config); | |
303e347c MC |
989 | |
990 | /* step6d: enable dsi transcoder */ | |
991 | gen11_dsi_enable_transcoder(encoder); | |
20801315 MC |
992 | |
993 | /* step7: enable backlight */ | |
994 | intel_panel_enable_backlight(pipe_config, conn_state); | |
995 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_ON); | |
fcfe0bdc | 996 | } |
d9d996b6 | 997 | |
4e123bd3 MC |
998 | static void gen11_dsi_disable_transcoder(struct intel_encoder *encoder) |
999 | { | |
1000 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1001 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1002 | enum port port; | |
1003 | enum transcoder dsi_trans; | |
1004 | u32 tmp; | |
1005 | ||
1006 | for_each_dsi_port(port, intel_dsi->ports) { | |
1007 | dsi_trans = dsi_port_to_transcoder(port); | |
1008 | ||
1009 | /* disable transcoder */ | |
1010 | tmp = I915_READ(PIPECONF(dsi_trans)); | |
1011 | tmp &= ~PIPECONF_ENABLE; | |
1012 | I915_WRITE(PIPECONF(dsi_trans), tmp); | |
1013 | ||
1014 | /* wait for transcoder to be disabled */ | |
1015 | if (intel_wait_for_register(dev_priv, PIPECONF(dsi_trans), | |
1016 | I965_PIPECONF_ACTIVE, 0, 50)) | |
1017 | DRM_ERROR("DSI trancoder not disabled\n"); | |
1018 | } | |
1019 | } | |
1020 | ||
522cc3f7 MC |
1021 | static void gen11_dsi_powerdown_panel(struct intel_encoder *encoder) |
1022 | { | |
1023 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1024 | ||
1025 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_OFF); | |
1026 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_ASSERT_RESET); | |
1027 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_OFF); | |
1028 | ||
1029 | /* ensure cmds dispatched to panel */ | |
1030 | wait_for_cmds_dispatched_to_panel(encoder); | |
1031 | } | |
1032 | ||
4769b598 MC |
1033 | static void gen11_dsi_deconfigure_trancoder(struct intel_encoder *encoder) |
1034 | { | |
1035 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1036 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1037 | enum port port; | |
1038 | enum transcoder dsi_trans; | |
1039 | u32 tmp; | |
1040 | ||
1041 | /* put dsi link in ULPS */ | |
1042 | for_each_dsi_port(port, intel_dsi->ports) { | |
1043 | dsi_trans = dsi_port_to_transcoder(port); | |
1044 | tmp = I915_READ(DSI_LP_MSG(dsi_trans)); | |
1045 | tmp |= LINK_ENTER_ULPS; | |
1046 | tmp &= ~LINK_ULPS_TYPE_LP11; | |
1047 | I915_WRITE(DSI_LP_MSG(dsi_trans), tmp); | |
1048 | ||
1049 | if (wait_for_us((I915_READ(DSI_LP_MSG(dsi_trans)) & | |
1050 | LINK_IN_ULPS), | |
1051 | 10)) | |
1052 | DRM_ERROR("DSI link not in ULPS\n"); | |
1053 | } | |
7aa32f7c MC |
1054 | |
1055 | /* disable ddi function */ | |
1056 | for_each_dsi_port(port, intel_dsi->ports) { | |
1057 | dsi_trans = dsi_port_to_transcoder(port); | |
1058 | tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans)); | |
1059 | tmp &= ~TRANS_DDI_FUNC_ENABLE; | |
1060 | I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp); | |
1061 | } | |
9c83ab1b MC |
1062 | |
1063 | /* disable port sync mode if dual link */ | |
1064 | if (intel_dsi->dual_link) { | |
1065 | for_each_dsi_port(port, intel_dsi->ports) { | |
1066 | dsi_trans = dsi_port_to_transcoder(port); | |
1067 | tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans)); | |
1068 | tmp &= ~PORT_SYNC_MODE_ENABLE; | |
1069 | I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp); | |
1070 | } | |
1071 | } | |
4769b598 MC |
1072 | } |
1073 | ||
019cec36 MC |
1074 | static void gen11_dsi_disable_port(struct intel_encoder *encoder) |
1075 | { | |
1076 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1077 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1078 | u32 tmp; | |
1079 | enum port port; | |
1080 | ||
1026bea0 | 1081 | gen11_dsi_ungate_clocks(encoder); |
019cec36 MC |
1082 | for_each_dsi_port(port, intel_dsi->ports) { |
1083 | tmp = I915_READ(DDI_BUF_CTL(port)); | |
1084 | tmp &= ~DDI_BUF_CTL_ENABLE; | |
1085 | I915_WRITE(DDI_BUF_CTL(port), tmp); | |
1086 | ||
1087 | if (wait_for_us((I915_READ(DDI_BUF_CTL(port)) & | |
1088 | DDI_BUF_IS_IDLE), | |
1089 | 8)) | |
1090 | DRM_ERROR("DDI port:%c buffer not idle\n", | |
1091 | port_name(port)); | |
1092 | } | |
1026bea0 | 1093 | gen11_dsi_ungate_clocks(encoder); |
019cec36 MC |
1094 | } |
1095 | ||
0f0fe849 MC |
1096 | static void gen11_dsi_disable_io_power(struct intel_encoder *encoder) |
1097 | { | |
1098 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1099 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1100 | enum port port; | |
1101 | u32 tmp; | |
1102 | ||
1103 | intel_display_power_put(dev_priv, POWER_DOMAIN_PORT_DDI_A_IO); | |
1104 | ||
1105 | if (intel_dsi->dual_link) | |
1106 | intel_display_power_put(dev_priv, POWER_DOMAIN_PORT_DDI_B_IO); | |
1107 | ||
1108 | /* set mode to DDI */ | |
1109 | for_each_dsi_port(port, intel_dsi->ports) { | |
1110 | tmp = I915_READ(ICL_DSI_IO_MODECTL(port)); | |
1111 | tmp &= ~COMBO_PHY_MODE_DSI; | |
1112 | I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp); | |
1113 | } | |
1114 | } | |
1115 | ||
e2758048 MC |
1116 | static void gen11_dsi_disable(struct intel_encoder *encoder, |
1117 | const struct intel_crtc_state *old_crtc_state, | |
1118 | const struct drm_connector_state *old_conn_state) | |
d9d996b6 MC |
1119 | { |
1120 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1121 | ||
1122 | /* step1: turn off backlight */ | |
1123 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_OFF); | |
1124 | intel_panel_disable_backlight(old_conn_state); | |
4e123bd3 MC |
1125 | |
1126 | /* step2d,e: disable transcoder and wait */ | |
1127 | gen11_dsi_disable_transcoder(encoder); | |
522cc3f7 MC |
1128 | |
1129 | /* step2f,g: powerdown panel */ | |
1130 | gen11_dsi_powerdown_panel(encoder); | |
4769b598 MC |
1131 | |
1132 | /* step2h,i,j: deconfig trancoder */ | |
1133 | gen11_dsi_deconfigure_trancoder(encoder); | |
019cec36 MC |
1134 | |
1135 | /* step3: disable port */ | |
1136 | gen11_dsi_disable_port(encoder); | |
0f0fe849 MC |
1137 | |
1138 | /* step4: disable IO power */ | |
1139 | gen11_dsi_disable_io_power(encoder); | |
d9d996b6 | 1140 | } |
bf4d57ff | 1141 | |
8327af28 VK |
1142 | static void gen11_dsi_get_config(struct intel_encoder *encoder, |
1143 | struct intel_crtc_state *pipe_config) | |
1144 | { | |
1145 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1146 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1147 | u32 pll_id; | |
1148 | ||
1149 | /* FIXME: adapt icl_ddi_clock_get() for DSI and use that? */ | |
1150 | pll_id = intel_get_shared_dpll_id(dev_priv, pipe_config->shared_dpll); | |
1151 | pipe_config->port_clock = cnl_calc_wrpll_link(dev_priv, pll_id); | |
1152 | pipe_config->base.adjusted_mode.crtc_clock = intel_dsi->pclk; | |
1153 | pipe_config->output_types |= BIT(INTEL_OUTPUT_DSI); | |
1154 | } | |
1155 | ||
d04afb15 MC |
1156 | static bool gen11_dsi_compute_config(struct intel_encoder *encoder, |
1157 | struct intel_crtc_state *pipe_config, | |
1158 | struct drm_connector_state *conn_state) | |
1159 | { | |
1160 | struct intel_dsi *intel_dsi = container_of(encoder, struct intel_dsi, | |
1161 | base); | |
1162 | struct intel_connector *intel_connector = intel_dsi->attached_connector; | |
1163 | struct intel_crtc *crtc = to_intel_crtc(pipe_config->base.crtc); | |
1164 | const struct drm_display_mode *fixed_mode = | |
1165 | intel_connector->panel.fixed_mode; | |
1166 | struct drm_display_mode *adjusted_mode = | |
1167 | &pipe_config->base.adjusted_mode; | |
1168 | ||
1169 | intel_fixed_panel_mode(fixed_mode, adjusted_mode); | |
1170 | intel_pch_panel_fitting(crtc, pipe_config, conn_state->scaling_mode); | |
1171 | ||
1172 | adjusted_mode->flags = 0; | |
1173 | ||
1174 | /* Dual link goes to trancoder DSI'0' */ | |
1175 | if (intel_dsi->ports == BIT(PORT_B)) | |
1176 | pipe_config->cpu_transcoder = TRANSCODER_DSI_1; | |
1177 | else | |
1178 | pipe_config->cpu_transcoder = TRANSCODER_DSI_0; | |
1179 | ||
1180 | pipe_config->clock_set = true; | |
1181 | pipe_config->port_clock = intel_dsi_bitrate(intel_dsi) / 5; | |
1182 | ||
1183 | return true; | |
1184 | } | |
1185 | ||
ab841148 MC |
1186 | static u64 gen11_dsi_get_power_domains(struct intel_encoder *encoder, |
1187 | struct intel_crtc_state *crtc_state) | |
1188 | { | |
1189 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1190 | u64 domains = 0; | |
1191 | enum port port; | |
1192 | ||
1193 | for_each_dsi_port(port, intel_dsi->ports) | |
1194 | if (port == PORT_A) | |
1195 | domains |= BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO); | |
1196 | else | |
1197 | domains |= BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO); | |
1198 | ||
1199 | return domains; | |
1200 | } | |
1201 | ||
1202 | static bool gen11_dsi_get_hw_state(struct intel_encoder *encoder, | |
1203 | enum pipe *pipe) | |
1204 | { | |
1205 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1206 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1207 | u32 tmp; | |
1208 | enum port port; | |
1209 | enum transcoder dsi_trans; | |
1210 | bool ret = false; | |
1211 | ||
1212 | if (!intel_display_power_get_if_enabled(dev_priv, | |
1213 | encoder->power_domain)) | |
1214 | return false; | |
1215 | ||
1216 | for_each_dsi_port(port, intel_dsi->ports) { | |
1217 | dsi_trans = dsi_port_to_transcoder(port); | |
1218 | tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans)); | |
1219 | switch (tmp & TRANS_DDI_EDP_INPUT_MASK) { | |
1220 | case TRANS_DDI_EDP_INPUT_A_ON: | |
1221 | *pipe = PIPE_A; | |
1222 | break; | |
1223 | case TRANS_DDI_EDP_INPUT_B_ONOFF: | |
1224 | *pipe = PIPE_B; | |
1225 | break; | |
1226 | case TRANS_DDI_EDP_INPUT_C_ONOFF: | |
1227 | *pipe = PIPE_C; | |
1228 | break; | |
1229 | default: | |
1230 | DRM_ERROR("Invalid PIPE input\n"); | |
1231 | goto out; | |
1232 | } | |
1233 | ||
1234 | tmp = I915_READ(PIPECONF(dsi_trans)); | |
1235 | ret = tmp & PIPECONF_ENABLE; | |
1236 | } | |
1237 | out: | |
1238 | intel_display_power_put(dev_priv, encoder->power_domain); | |
1239 | return ret; | |
1240 | } | |
1241 | ||
e2758048 MC |
1242 | static void gen11_dsi_encoder_destroy(struct drm_encoder *encoder) |
1243 | { | |
1244 | intel_encoder_destroy(encoder); | |
1245 | } | |
1246 | ||
1247 | static const struct drm_encoder_funcs gen11_dsi_encoder_funcs = { | |
1248 | .destroy = gen11_dsi_encoder_destroy, | |
1249 | }; | |
1250 | ||
1251 | static const struct drm_connector_funcs gen11_dsi_connector_funcs = { | |
1252 | .late_register = intel_connector_register, | |
1253 | .early_unregister = intel_connector_unregister, | |
1254 | .destroy = intel_connector_destroy, | |
1255 | .fill_modes = drm_helper_probe_single_connector_modes, | |
1256 | .atomic_get_property = intel_digital_connector_atomic_get_property, | |
1257 | .atomic_set_property = intel_digital_connector_atomic_set_property, | |
1258 | .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, | |
1259 | .atomic_duplicate_state = intel_digital_connector_duplicate_state, | |
1260 | }; | |
1261 | ||
1262 | static const struct drm_connector_helper_funcs gen11_dsi_connector_helper_funcs = { | |
1263 | .get_modes = intel_dsi_get_modes, | |
1264 | .mode_valid = intel_dsi_mode_valid, | |
1265 | .atomic_check = intel_digital_connector_atomic_check, | |
1266 | }; | |
1267 | ||
c5f9c934 MC |
1268 | static int gen11_dsi_host_attach(struct mipi_dsi_host *host, |
1269 | struct mipi_dsi_device *dsi) | |
1270 | { | |
1271 | return 0; | |
1272 | } | |
1273 | ||
1274 | static int gen11_dsi_host_detach(struct mipi_dsi_host *host, | |
1275 | struct mipi_dsi_device *dsi) | |
1276 | { | |
1277 | return 0; | |
1278 | } | |
1279 | ||
1280 | static ssize_t gen11_dsi_host_transfer(struct mipi_dsi_host *host, | |
1281 | const struct mipi_dsi_msg *msg) | |
1282 | { | |
1283 | struct intel_dsi_host *intel_dsi_host = to_intel_dsi_host(host); | |
1284 | struct mipi_dsi_packet dsi_pkt; | |
1285 | ssize_t ret; | |
1286 | bool enable_lpdt = false; | |
1287 | ||
1288 | ret = mipi_dsi_create_packet(&dsi_pkt, msg); | |
1289 | if (ret < 0) | |
1290 | return ret; | |
1291 | ||
1292 | if (msg->flags & MIPI_DSI_MSG_USE_LPM) | |
1293 | enable_lpdt = true; | |
1294 | ||
1295 | /* send packet header */ | |
1296 | ret = dsi_send_pkt_hdr(intel_dsi_host, dsi_pkt, enable_lpdt); | |
1297 | if (ret < 0) | |
1298 | return ret; | |
1299 | ||
1300 | /* only long packet contains payload */ | |
1301 | if (mipi_dsi_packet_format_is_long(msg->type)) { | |
1302 | ret = dsi_send_pkt_payld(intel_dsi_host, dsi_pkt); | |
1303 | if (ret < 0) | |
1304 | return ret; | |
1305 | } | |
1306 | ||
1307 | //TODO: add payload receive code if needed | |
1308 | ||
1309 | ret = sizeof(dsi_pkt.header) + dsi_pkt.payload_length; | |
1310 | ||
1311 | return ret; | |
1312 | } | |
1313 | ||
1314 | static const struct mipi_dsi_host_ops gen11_dsi_host_ops = { | |
1315 | .attach = gen11_dsi_host_attach, | |
1316 | .detach = gen11_dsi_host_detach, | |
1317 | .transfer = gen11_dsi_host_transfer, | |
1318 | }; | |
1319 | ||
bf4d57ff MC |
1320 | void icl_dsi_init(struct drm_i915_private *dev_priv) |
1321 | { | |
e2758048 MC |
1322 | struct drm_device *dev = &dev_priv->drm; |
1323 | struct intel_dsi *intel_dsi; | |
1324 | struct intel_encoder *encoder; | |
1325 | struct intel_connector *intel_connector; | |
1326 | struct drm_connector *connector; | |
1327 | struct drm_display_mode *scan, *fixed_mode = NULL; | |
bf4d57ff MC |
1328 | enum port port; |
1329 | ||
1330 | if (!intel_bios_is_dsi_present(dev_priv, &port)) | |
1331 | return; | |
e2758048 MC |
1332 | |
1333 | intel_dsi = kzalloc(sizeof(*intel_dsi), GFP_KERNEL); | |
1334 | if (!intel_dsi) | |
1335 | return; | |
1336 | ||
1337 | intel_connector = intel_connector_alloc(); | |
1338 | if (!intel_connector) { | |
1339 | kfree(intel_dsi); | |
1340 | return; | |
1341 | } | |
1342 | ||
1343 | encoder = &intel_dsi->base; | |
1344 | intel_dsi->attached_connector = intel_connector; | |
1345 | connector = &intel_connector->base; | |
1346 | ||
1347 | /* register DSI encoder with DRM subsystem */ | |
1348 | drm_encoder_init(dev, &encoder->base, &gen11_dsi_encoder_funcs, | |
1349 | DRM_MODE_ENCODER_DSI, "DSI %c", port_name(port)); | |
1350 | ||
95f2f4db | 1351 | encoder->pre_pll_enable = gen11_dsi_pre_pll_enable; |
e2758048 MC |
1352 | encoder->pre_enable = gen11_dsi_pre_enable; |
1353 | encoder->disable = gen11_dsi_disable; | |
1354 | encoder->port = port; | |
8327af28 | 1355 | encoder->get_config = gen11_dsi_get_config; |
d04afb15 | 1356 | encoder->compute_config = gen11_dsi_compute_config; |
ab841148 | 1357 | encoder->get_hw_state = gen11_dsi_get_hw_state; |
e2758048 MC |
1358 | encoder->type = INTEL_OUTPUT_DSI; |
1359 | encoder->cloneable = 0; | |
1360 | encoder->crtc_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C); | |
1361 | encoder->power_domain = POWER_DOMAIN_PORT_DSI; | |
ab841148 | 1362 | encoder->get_power_domains = gen11_dsi_get_power_domains; |
e2758048 MC |
1363 | |
1364 | /* register DSI connector with DRM subsystem */ | |
1365 | drm_connector_init(dev, connector, &gen11_dsi_connector_funcs, | |
1366 | DRM_MODE_CONNECTOR_DSI); | |
1367 | drm_connector_helper_add(connector, &gen11_dsi_connector_helper_funcs); | |
1368 | connector->display_info.subpixel_order = SubPixelHorizontalRGB; | |
1369 | connector->interlace_allowed = false; | |
1370 | connector->doublescan_allowed = false; | |
ab841148 | 1371 | intel_connector->get_hw_state = intel_connector_get_hw_state; |
e2758048 MC |
1372 | |
1373 | /* attach connector to encoder */ | |
1374 | intel_connector_attach_encoder(intel_connector, encoder); | |
1375 | ||
1376 | /* fill mode info from VBT */ | |
1377 | mutex_lock(&dev->mode_config.mutex); | |
1378 | intel_dsi_vbt_get_modes(intel_dsi); | |
1379 | list_for_each_entry(scan, &connector->probed_modes, head) { | |
1380 | if (scan->type & DRM_MODE_TYPE_PREFERRED) { | |
1381 | fixed_mode = drm_mode_duplicate(dev, scan); | |
1382 | break; | |
1383 | } | |
1384 | } | |
1385 | mutex_unlock(&dev->mode_config.mutex); | |
1386 | ||
1387 | if (!fixed_mode) { | |
1388 | DRM_ERROR("DSI fixed mode info missing\n"); | |
1389 | goto err; | |
1390 | } | |
1391 | ||
1392 | connector->display_info.width_mm = fixed_mode->width_mm; | |
1393 | connector->display_info.height_mm = fixed_mode->height_mm; | |
1394 | intel_panel_init(&intel_connector->panel, fixed_mode, NULL); | |
1395 | intel_panel_setup_backlight(connector, INVALID_PIPE); | |
1396 | ||
c5f9c934 | 1397 | |
972d607c MC |
1398 | if (dev_priv->vbt.dsi.config->dual_link) |
1399 | intel_dsi->ports = BIT(PORT_A) | BIT(PORT_B); | |
1400 | else | |
1401 | intel_dsi->ports = BIT(port); | |
1402 | ||
1403 | intel_dsi->dcs_backlight_ports = dev_priv->vbt.dsi.bl_ports; | |
1404 | intel_dsi->dcs_cabc_ports = dev_priv->vbt.dsi.cabc_ports; | |
1405 | ||
c5f9c934 MC |
1406 | for_each_dsi_port(port, intel_dsi->ports) { |
1407 | struct intel_dsi_host *host; | |
1408 | ||
1409 | host = intel_dsi_host_init(intel_dsi, &gen11_dsi_host_ops, port); | |
1410 | if (!host) | |
1411 | goto err; | |
1412 | ||
1413 | intel_dsi->dsi_hosts[port] = host; | |
1414 | } | |
1415 | ||
e2758048 MC |
1416 | if (!intel_dsi_vbt_init(intel_dsi, MIPI_DSI_GENERIC_PANEL_ID)) { |
1417 | DRM_DEBUG_KMS("no device found\n"); | |
1418 | goto err; | |
1419 | } | |
1420 | ||
1421 | return; | |
1422 | ||
1423 | err: | |
1424 | drm_encoder_cleanup(&encoder->base); | |
1425 | kfree(intel_dsi); | |
1426 | kfree(intel_connector); | |
bf4d57ff | 1427 | } |