]>
Commit | Line | Data |
---|---|---|
fcfe0bdc MC |
1 | /* |
2 | * Copyright © 2018 Intel Corporation | |
3 | * | |
4 | * Permission is hereby granted, free of charge, to any person obtaining a | |
5 | * copy of this software and associated documentation files (the "Software"), | |
6 | * to deal in the Software without restriction, including without limitation | |
7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
8 | * and/or sell copies of the Software, and to permit persons to whom the | |
9 | * Software is furnished to do so, subject to the following conditions: | |
10 | * | |
11 | * The above copyright notice and this permission notice (including the next | |
12 | * paragraph) shall be included in all copies or substantial portions of the | |
13 | * Software. | |
14 | * | |
15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |
19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | |
20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER | |
21 | * DEALINGS IN THE SOFTWARE. | |
22 | * | |
23 | * Authors: | |
24 | * Madhav Chauhan <madhav.chauhan@intel.com> | |
25 | * Jani Nikula <jani.nikula@intel.com> | |
26 | */ | |
27 | ||
bfee32bf | 28 | #include <drm/drm_mipi_dsi.h> |
e2758048 | 29 | #include <drm/drm_atomic_helper.h> |
fcfe0bdc MC |
30 | #include "intel_dsi.h" |
31 | ||
32bbc3d4 MC |
32 | static inline int header_credits_available(struct drm_i915_private *dev_priv, |
33 | enum transcoder dsi_trans) | |
34 | { | |
35 | return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_HEADER_CREDIT_MASK) | |
36 | >> FREE_HEADER_CREDIT_SHIFT; | |
37 | } | |
38 | ||
39 | static inline int payload_credits_available(struct drm_i915_private *dev_priv, | |
40 | enum transcoder dsi_trans) | |
41 | { | |
42 | return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_PLOAD_CREDIT_MASK) | |
43 | >> FREE_PLOAD_CREDIT_SHIFT; | |
44 | } | |
45 | ||
46 | static void wait_for_header_credits(struct drm_i915_private *dev_priv, | |
47 | enum transcoder dsi_trans) | |
48 | { | |
49 | if (wait_for_us(header_credits_available(dev_priv, dsi_trans) >= | |
50 | MAX_HEADER_CREDIT, 100)) | |
51 | DRM_ERROR("DSI header credits not released\n"); | |
52 | } | |
53 | ||
54 | static void wait_for_payload_credits(struct drm_i915_private *dev_priv, | |
55 | enum transcoder dsi_trans) | |
56 | { | |
57 | if (wait_for_us(payload_credits_available(dev_priv, dsi_trans) >= | |
58 | MAX_PLOAD_CREDIT, 100)) | |
59 | DRM_ERROR("DSI payload credits not released\n"); | |
60 | } | |
61 | ||
d364dc66 | 62 | static enum transcoder dsi_port_to_transcoder(enum port port) |
ca8fc99f MC |
63 | { |
64 | if (port == PORT_A) | |
65 | return TRANSCODER_DSI_0; | |
66 | else | |
67 | return TRANSCODER_DSI_1; | |
68 | } | |
69 | ||
32bbc3d4 MC |
70 | static void wait_for_cmds_dispatched_to_panel(struct intel_encoder *encoder) |
71 | { | |
72 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
73 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
74 | struct mipi_dsi_device *dsi; | |
75 | enum port port; | |
76 | enum transcoder dsi_trans; | |
77 | int ret; | |
78 | ||
79 | /* wait for header/payload credits to be released */ | |
80 | for_each_dsi_port(port, intel_dsi->ports) { | |
81 | dsi_trans = dsi_port_to_transcoder(port); | |
82 | wait_for_header_credits(dev_priv, dsi_trans); | |
83 | wait_for_payload_credits(dev_priv, dsi_trans); | |
84 | } | |
85 | ||
86 | /* send nop DCS command */ | |
87 | for_each_dsi_port(port, intel_dsi->ports) { | |
88 | dsi = intel_dsi->dsi_hosts[port]->device; | |
89 | dsi->mode_flags |= MIPI_DSI_MODE_LPM; | |
90 | dsi->channel = 0; | |
91 | ret = mipi_dsi_dcs_nop(dsi); | |
92 | if (ret < 0) | |
93 | DRM_ERROR("error sending DCS NOP command\n"); | |
94 | } | |
95 | ||
96 | /* wait for header credits to be released */ | |
97 | for_each_dsi_port(port, intel_dsi->ports) { | |
98 | dsi_trans = dsi_port_to_transcoder(port); | |
99 | wait_for_header_credits(dev_priv, dsi_trans); | |
100 | } | |
101 | ||
102 | /* wait for LP TX in progress bit to be cleared */ | |
103 | for_each_dsi_port(port, intel_dsi->ports) { | |
104 | dsi_trans = dsi_port_to_transcoder(port); | |
105 | if (wait_for_us(!(I915_READ(DSI_LP_MSG(dsi_trans)) & | |
106 | LPTX_IN_PROGRESS), 20)) | |
107 | DRM_ERROR("LPTX bit not cleared\n"); | |
108 | } | |
109 | } | |
110 | ||
c5f9c934 MC |
111 | static bool add_payld_to_queue(struct intel_dsi_host *host, const u8 *data, |
112 | u32 len) | |
113 | { | |
114 | struct intel_dsi *intel_dsi = host->intel_dsi; | |
115 | struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev); | |
116 | enum transcoder dsi_trans = dsi_port_to_transcoder(host->port); | |
117 | int free_credits; | |
118 | int i, j; | |
119 | ||
120 | for (i = 0; i < len; i += 4) { | |
121 | u32 tmp = 0; | |
122 | ||
123 | free_credits = payload_credits_available(dev_priv, dsi_trans); | |
124 | if (free_credits < 1) { | |
125 | DRM_ERROR("Payload credit not available\n"); | |
126 | return false; | |
127 | } | |
128 | ||
129 | for (j = 0; j < min_t(u32, len - i, 4); j++) | |
130 | tmp |= *data++ << 8 * j; | |
131 | ||
132 | I915_WRITE(DSI_CMD_TXPYLD(dsi_trans), tmp); | |
133 | } | |
134 | ||
135 | return true; | |
136 | } | |
137 | ||
138 | static int dsi_send_pkt_hdr(struct intel_dsi_host *host, | |
139 | struct mipi_dsi_packet pkt, bool enable_lpdt) | |
140 | { | |
141 | struct intel_dsi *intel_dsi = host->intel_dsi; | |
142 | struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev); | |
143 | enum transcoder dsi_trans = dsi_port_to_transcoder(host->port); | |
144 | u32 tmp; | |
145 | int free_credits; | |
146 | ||
147 | /* check if header credit available */ | |
148 | free_credits = header_credits_available(dev_priv, dsi_trans); | |
149 | if (free_credits < 1) { | |
150 | DRM_ERROR("send pkt header failed, not enough hdr credits\n"); | |
151 | return -1; | |
152 | } | |
153 | ||
154 | tmp = I915_READ(DSI_CMD_TXHDR(dsi_trans)); | |
155 | ||
156 | if (pkt.payload) | |
157 | tmp |= PAYLOAD_PRESENT; | |
158 | else | |
159 | tmp &= ~PAYLOAD_PRESENT; | |
160 | ||
161 | tmp &= ~VBLANK_FENCE; | |
162 | ||
163 | if (enable_lpdt) | |
164 | tmp |= LP_DATA_TRANSFER; | |
165 | ||
166 | tmp &= ~(PARAM_WC_MASK | VC_MASK | DT_MASK); | |
167 | tmp |= ((pkt.header[0] & VC_MASK) << VC_SHIFT); | |
168 | tmp |= ((pkt.header[0] & DT_MASK) << DT_SHIFT); | |
169 | tmp |= (pkt.header[1] << PARAM_WC_LOWER_SHIFT); | |
170 | tmp |= (pkt.header[2] << PARAM_WC_UPPER_SHIFT); | |
171 | I915_WRITE(DSI_CMD_TXHDR(dsi_trans), tmp); | |
172 | ||
173 | return 0; | |
174 | } | |
175 | ||
176 | static int dsi_send_pkt_payld(struct intel_dsi_host *host, | |
177 | struct mipi_dsi_packet pkt) | |
178 | { | |
179 | /* payload queue can accept *256 bytes*, check limit */ | |
180 | if (pkt.payload_length > MAX_PLOAD_CREDIT * 4) { | |
181 | DRM_ERROR("payload size exceeds max queue limit\n"); | |
182 | return -1; | |
183 | } | |
184 | ||
185 | /* load data into command payload queue */ | |
186 | if (!add_payld_to_queue(host, pkt.payload, | |
187 | pkt.payload_length)) { | |
188 | DRM_ERROR("adding payload to queue failed\n"); | |
189 | return -1; | |
190 | } | |
191 | ||
192 | return 0; | |
193 | } | |
194 | ||
3f4b9d9d MC |
195 | static void dsi_program_swing_and_deemphasis(struct intel_encoder *encoder) |
196 | { | |
197 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
198 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
199 | enum port port; | |
200 | u32 tmp; | |
201 | int lane; | |
202 | ||
203 | for_each_dsi_port(port, intel_dsi->ports) { | |
204 | ||
205 | /* | |
206 | * Program voltage swing and pre-emphasis level values as per | |
207 | * table in BSPEC under DDI buffer programing | |
208 | */ | |
209 | tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port)); | |
210 | tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK); | |
211 | tmp |= SCALING_MODE_SEL(0x2); | |
212 | tmp |= TAP2_DISABLE | TAP3_DISABLE; | |
213 | tmp |= RTERM_SELECT(0x6); | |
214 | I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp); | |
215 | ||
216 | tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port)); | |
217 | tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK); | |
218 | tmp |= SCALING_MODE_SEL(0x2); | |
219 | tmp |= TAP2_DISABLE | TAP3_DISABLE; | |
220 | tmp |= RTERM_SELECT(0x6); | |
221 | I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp); | |
222 | ||
223 | tmp = I915_READ(ICL_PORT_TX_DW2_LN0(port)); | |
224 | tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | | |
225 | RCOMP_SCALAR_MASK); | |
226 | tmp |= SWING_SEL_UPPER(0x2); | |
227 | tmp |= SWING_SEL_LOWER(0x2); | |
228 | tmp |= RCOMP_SCALAR(0x98); | |
229 | I915_WRITE(ICL_PORT_TX_DW2_GRP(port), tmp); | |
230 | ||
231 | tmp = I915_READ(ICL_PORT_TX_DW2_AUX(port)); | |
232 | tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | | |
233 | RCOMP_SCALAR_MASK); | |
234 | tmp |= SWING_SEL_UPPER(0x2); | |
235 | tmp |= SWING_SEL_LOWER(0x2); | |
236 | tmp |= RCOMP_SCALAR(0x98); | |
237 | I915_WRITE(ICL_PORT_TX_DW2_AUX(port), tmp); | |
238 | ||
239 | tmp = I915_READ(ICL_PORT_TX_DW4_AUX(port)); | |
240 | tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | | |
241 | CURSOR_COEFF_MASK); | |
242 | tmp |= POST_CURSOR_1(0x0); | |
243 | tmp |= POST_CURSOR_2(0x0); | |
244 | tmp |= CURSOR_COEFF(0x3f); | |
245 | I915_WRITE(ICL_PORT_TX_DW4_AUX(port), tmp); | |
246 | ||
247 | for (lane = 0; lane <= 3; lane++) { | |
248 | /* Bspec: must not use GRP register for write */ | |
249 | tmp = I915_READ(ICL_PORT_TX_DW4_LN(port, lane)); | |
250 | tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | | |
251 | CURSOR_COEFF_MASK); | |
252 | tmp |= POST_CURSOR_1(0x0); | |
253 | tmp |= POST_CURSOR_2(0x0); | |
254 | tmp |= CURSOR_COEFF(0x3f); | |
255 | I915_WRITE(ICL_PORT_TX_DW4_LN(port, lane), tmp); | |
256 | } | |
257 | } | |
258 | } | |
259 | ||
fcfe0bdc MC |
260 | static void gen11_dsi_program_esc_clk_div(struct intel_encoder *encoder) |
261 | { | |
262 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
263 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
264 | enum port port; | |
265 | u32 bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format); | |
266 | u32 afe_clk_khz; /* 8X Clock */ | |
267 | u32 esc_clk_div_m; | |
268 | ||
269 | afe_clk_khz = DIV_ROUND_CLOSEST(intel_dsi->pclk * bpp, | |
270 | intel_dsi->lane_count); | |
271 | ||
272 | esc_clk_div_m = DIV_ROUND_UP(afe_clk_khz, DSI_MAX_ESC_CLK); | |
273 | ||
274 | for_each_dsi_port(port, intel_dsi->ports) { | |
275 | I915_WRITE(ICL_DSI_ESC_CLK_DIV(port), | |
276 | esc_clk_div_m & ICL_ESC_CLK_DIV_MASK); | |
277 | POSTING_READ(ICL_DSI_ESC_CLK_DIV(port)); | |
278 | } | |
279 | ||
280 | for_each_dsi_port(port, intel_dsi->ports) { | |
281 | I915_WRITE(ICL_DPHY_ESC_CLK_DIV(port), | |
282 | esc_clk_div_m & ICL_ESC_CLK_DIV_MASK); | |
283 | POSTING_READ(ICL_DPHY_ESC_CLK_DIV(port)); | |
284 | } | |
285 | } | |
286 | ||
b1cb21a5 MC |
287 | static void gen11_dsi_enable_io_power(struct intel_encoder *encoder) |
288 | { | |
289 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
290 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
291 | enum port port; | |
292 | u32 tmp; | |
293 | ||
294 | for_each_dsi_port(port, intel_dsi->ports) { | |
295 | tmp = I915_READ(ICL_DSI_IO_MODECTL(port)); | |
296 | tmp |= COMBO_PHY_MODE_DSI; | |
297 | I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp); | |
298 | } | |
299 | ||
300 | for_each_dsi_port(port, intel_dsi->ports) { | |
301 | intel_display_power_get(dev_priv, port == PORT_A ? | |
302 | POWER_DOMAIN_PORT_DDI_A_IO : | |
303 | POWER_DOMAIN_PORT_DDI_B_IO); | |
304 | } | |
305 | } | |
306 | ||
45f09f7a MC |
307 | static void gen11_dsi_power_up_lanes(struct intel_encoder *encoder) |
308 | { | |
309 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
310 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
311 | enum port port; | |
312 | u32 tmp; | |
313 | u32 lane_mask; | |
314 | ||
315 | switch (intel_dsi->lane_count) { | |
316 | case 1: | |
317 | lane_mask = PWR_DOWN_LN_3_1_0; | |
318 | break; | |
319 | case 2: | |
320 | lane_mask = PWR_DOWN_LN_3_1; | |
321 | break; | |
322 | case 3: | |
323 | lane_mask = PWR_DOWN_LN_3; | |
324 | break; | |
325 | case 4: | |
326 | default: | |
327 | lane_mask = PWR_UP_ALL_LANES; | |
328 | break; | |
329 | } | |
330 | ||
331 | for_each_dsi_port(port, intel_dsi->ports) { | |
332 | tmp = I915_READ(ICL_PORT_CL_DW10(port)); | |
333 | tmp &= ~PWR_DOWN_LN_MASK; | |
334 | I915_WRITE(ICL_PORT_CL_DW10(port), tmp | lane_mask); | |
335 | } | |
336 | } | |
337 | ||
fc41001d MC |
338 | static void gen11_dsi_config_phy_lanes_sequence(struct intel_encoder *encoder) |
339 | { | |
340 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
341 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
342 | enum port port; | |
343 | u32 tmp; | |
344 | int lane; | |
345 | ||
346 | /* Step 4b(i) set loadgen select for transmit and aux lanes */ | |
347 | for_each_dsi_port(port, intel_dsi->ports) { | |
348 | tmp = I915_READ(ICL_PORT_TX_DW4_AUX(port)); | |
349 | tmp &= ~LOADGEN_SELECT; | |
350 | I915_WRITE(ICL_PORT_TX_DW4_AUX(port), tmp); | |
351 | for (lane = 0; lane <= 3; lane++) { | |
352 | tmp = I915_READ(ICL_PORT_TX_DW4_LN(port, lane)); | |
353 | tmp &= ~LOADGEN_SELECT; | |
354 | if (lane != 2) | |
355 | tmp |= LOADGEN_SELECT; | |
356 | I915_WRITE(ICL_PORT_TX_DW4_LN(port, lane), tmp); | |
357 | } | |
358 | } | |
359 | ||
360 | /* Step 4b(ii) set latency optimization for transmit and aux lanes */ | |
361 | for_each_dsi_port(port, intel_dsi->ports) { | |
362 | tmp = I915_READ(ICL_PORT_TX_DW2_AUX(port)); | |
363 | tmp &= ~FRC_LATENCY_OPTIM_MASK; | |
364 | tmp |= FRC_LATENCY_OPTIM_VAL(0x5); | |
365 | I915_WRITE(ICL_PORT_TX_DW2_AUX(port), tmp); | |
366 | tmp = I915_READ(ICL_PORT_TX_DW2_LN0(port)); | |
367 | tmp &= ~FRC_LATENCY_OPTIM_MASK; | |
368 | tmp |= FRC_LATENCY_OPTIM_VAL(0x5); | |
369 | I915_WRITE(ICL_PORT_TX_DW2_GRP(port), tmp); | |
370 | } | |
371 | ||
372 | } | |
373 | ||
3f4b9d9d MC |
374 | static void gen11_dsi_voltage_swing_program_seq(struct intel_encoder *encoder) |
375 | { | |
376 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
377 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
378 | u32 tmp; | |
379 | enum port port; | |
380 | ||
381 | /* clear common keeper enable bit */ | |
382 | for_each_dsi_port(port, intel_dsi->ports) { | |
383 | tmp = I915_READ(ICL_PORT_PCS_DW1_LN0(port)); | |
384 | tmp &= ~COMMON_KEEPER_EN; | |
385 | I915_WRITE(ICL_PORT_PCS_DW1_GRP(port), tmp); | |
386 | tmp = I915_READ(ICL_PORT_PCS_DW1_AUX(port)); | |
387 | tmp &= ~COMMON_KEEPER_EN; | |
388 | I915_WRITE(ICL_PORT_PCS_DW1_AUX(port), tmp); | |
389 | } | |
390 | ||
391 | /* | |
392 | * Set SUS Clock Config bitfield to 11b | |
393 | * Note: loadgen select program is done | |
394 | * as part of lane phy sequence configuration | |
395 | */ | |
396 | for_each_dsi_port(port, intel_dsi->ports) { | |
397 | tmp = I915_READ(ICL_PORT_CL_DW5(port)); | |
398 | tmp |= SUS_CLOCK_CONFIG; | |
399 | I915_WRITE(ICL_PORT_CL_DW5(port), tmp); | |
400 | } | |
401 | ||
402 | /* Clear training enable to change swing values */ | |
403 | for_each_dsi_port(port, intel_dsi->ports) { | |
404 | tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port)); | |
405 | tmp &= ~TX_TRAINING_EN; | |
406 | I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp); | |
407 | tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port)); | |
408 | tmp &= ~TX_TRAINING_EN; | |
409 | I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp); | |
410 | } | |
411 | ||
412 | /* Program swing and de-emphasis */ | |
413 | dsi_program_swing_and_deemphasis(encoder); | |
414 | ||
415 | /* Set training enable to trigger update */ | |
416 | for_each_dsi_port(port, intel_dsi->ports) { | |
417 | tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port)); | |
418 | tmp |= TX_TRAINING_EN; | |
419 | I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp); | |
420 | tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port)); | |
421 | tmp |= TX_TRAINING_EN; | |
422 | I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp); | |
423 | } | |
424 | } | |
425 | ||
ba3df888 MC |
426 | static void gen11_dsi_enable_ddi_buffer(struct intel_encoder *encoder) |
427 | { | |
428 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
429 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
430 | u32 tmp; | |
431 | enum port port; | |
432 | ||
433 | for_each_dsi_port(port, intel_dsi->ports) { | |
434 | tmp = I915_READ(DDI_BUF_CTL(port)); | |
435 | tmp |= DDI_BUF_CTL_ENABLE; | |
436 | I915_WRITE(DDI_BUF_CTL(port), tmp); | |
437 | ||
438 | if (wait_for_us(!(I915_READ(DDI_BUF_CTL(port)) & | |
439 | DDI_BUF_IS_IDLE), | |
440 | 500)) | |
441 | DRM_ERROR("DDI port:%c buffer idle\n", port_name(port)); | |
442 | } | |
443 | } | |
444 | ||
70a7b836 MC |
445 | static void gen11_dsi_setup_dphy_timings(struct intel_encoder *encoder) |
446 | { | |
447 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
448 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
449 | u32 tmp; | |
450 | enum port port; | |
451 | ||
452 | /* Program T-INIT master registers */ | |
453 | for_each_dsi_port(port, intel_dsi->ports) { | |
454 | tmp = I915_READ(ICL_DSI_T_INIT_MASTER(port)); | |
455 | tmp &= ~MASTER_INIT_TIMER_MASK; | |
456 | tmp |= intel_dsi->init_count; | |
457 | I915_WRITE(ICL_DSI_T_INIT_MASTER(port), tmp); | |
458 | } | |
e72cce53 MC |
459 | |
460 | /* Program DPHY clock lanes timings */ | |
461 | for_each_dsi_port(port, intel_dsi->ports) { | |
462 | I915_WRITE(DPHY_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg); | |
463 | ||
464 | /* shadow register inside display core */ | |
465 | I915_WRITE(DSI_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg); | |
466 | } | |
467 | ||
468 | /* Program DPHY data lanes timings */ | |
469 | for_each_dsi_port(port, intel_dsi->ports) { | |
470 | I915_WRITE(DPHY_DATA_TIMING_PARAM(port), | |
471 | intel_dsi->dphy_data_lane_reg); | |
472 | ||
473 | /* shadow register inside display core */ | |
474 | I915_WRITE(DSI_DATA_TIMING_PARAM(port), | |
475 | intel_dsi->dphy_data_lane_reg); | |
476 | } | |
5fea8645 MC |
477 | |
478 | /* | |
479 | * If DSI link operating at or below an 800 MHz, | |
480 | * TA_SURE should be override and programmed to | |
481 | * a value '0' inside TA_PARAM_REGISTERS otherwise | |
482 | * leave all fields at HW default values. | |
483 | */ | |
484 | if (intel_dsi_bitrate(intel_dsi) <= 800000) { | |
485 | for_each_dsi_port(port, intel_dsi->ports) { | |
486 | tmp = I915_READ(DPHY_TA_TIMING_PARAM(port)); | |
487 | tmp &= ~TA_SURE_MASK; | |
488 | tmp |= TA_SURE_OVERRIDE | TA_SURE(0); | |
489 | I915_WRITE(DPHY_TA_TIMING_PARAM(port), tmp); | |
490 | ||
491 | /* shadow register inside display core */ | |
492 | tmp = I915_READ(DSI_TA_TIMING_PARAM(port)); | |
493 | tmp &= ~TA_SURE_MASK; | |
494 | tmp |= TA_SURE_OVERRIDE | TA_SURE(0); | |
495 | I915_WRITE(DSI_TA_TIMING_PARAM(port), tmp); | |
496 | } | |
497 | } | |
70a7b836 MC |
498 | } |
499 | ||
70f4f502 MC |
500 | static void |
501 | gen11_dsi_configure_transcoder(struct intel_encoder *encoder, | |
502 | const struct intel_crtc_state *pipe_config) | |
d364dc66 MC |
503 | { |
504 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
505 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
70f4f502 MC |
506 | struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->base.crtc); |
507 | enum pipe pipe = intel_crtc->pipe; | |
d364dc66 MC |
508 | u32 tmp; |
509 | enum port port; | |
510 | enum transcoder dsi_trans; | |
511 | ||
512 | for_each_dsi_port(port, intel_dsi->ports) { | |
513 | dsi_trans = dsi_port_to_transcoder(port); | |
514 | tmp = I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans)); | |
515 | ||
516 | if (intel_dsi->eotp_pkt) | |
517 | tmp &= ~EOTP_DISABLED; | |
518 | else | |
519 | tmp |= EOTP_DISABLED; | |
520 | ||
521 | /* enable link calibration if freq > 1.5Gbps */ | |
522 | if (intel_dsi_bitrate(intel_dsi) >= 1500 * 1000) { | |
523 | tmp &= ~LINK_CALIBRATION_MASK; | |
524 | tmp |= CALIBRATION_ENABLED_INITIAL_ONLY; | |
525 | } | |
526 | ||
527 | /* configure continuous clock */ | |
528 | tmp &= ~CONTINUOUS_CLK_MASK; | |
529 | if (intel_dsi->clock_stop) | |
530 | tmp |= CLK_ENTER_LP_AFTER_DATA; | |
531 | else | |
532 | tmp |= CLK_HS_CONTINUOUS; | |
533 | ||
534 | /* configure buffer threshold limit to minimum */ | |
535 | tmp &= ~PIX_BUF_THRESHOLD_MASK; | |
536 | tmp |= PIX_BUF_THRESHOLD_1_4; | |
537 | ||
538 | /* set virtual channel to '0' */ | |
539 | tmp &= ~PIX_VIRT_CHAN_MASK; | |
540 | tmp |= PIX_VIRT_CHAN(0); | |
541 | ||
542 | /* program BGR transmission */ | |
543 | if (intel_dsi->bgr_enabled) | |
544 | tmp |= BGR_TRANSMISSION; | |
545 | ||
546 | /* select pixel format */ | |
547 | tmp &= ~PIX_FMT_MASK; | |
548 | switch (intel_dsi->pixel_format) { | |
549 | default: | |
550 | MISSING_CASE(intel_dsi->pixel_format); | |
551 | /* fallthrough */ | |
552 | case MIPI_DSI_FMT_RGB565: | |
553 | tmp |= PIX_FMT_RGB565; | |
554 | break; | |
555 | case MIPI_DSI_FMT_RGB666_PACKED: | |
556 | tmp |= PIX_FMT_RGB666_PACKED; | |
557 | break; | |
558 | case MIPI_DSI_FMT_RGB666: | |
559 | tmp |= PIX_FMT_RGB666_LOOSE; | |
560 | break; | |
561 | case MIPI_DSI_FMT_RGB888: | |
562 | tmp |= PIX_FMT_RGB888; | |
563 | break; | |
564 | } | |
565 | ||
566 | /* program DSI operation mode */ | |
567 | if (is_vid_mode(intel_dsi)) { | |
568 | tmp &= ~OP_MODE_MASK; | |
569 | switch (intel_dsi->video_mode_format) { | |
570 | default: | |
571 | MISSING_CASE(intel_dsi->video_mode_format); | |
572 | /* fallthrough */ | |
573 | case VIDEO_MODE_NON_BURST_WITH_SYNC_EVENTS: | |
574 | tmp |= VIDEO_MODE_SYNC_EVENT; | |
575 | break; | |
576 | case VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE: | |
577 | tmp |= VIDEO_MODE_SYNC_PULSE; | |
578 | break; | |
579 | } | |
580 | } | |
581 | ||
582 | I915_WRITE(DSI_TRANS_FUNC_CONF(dsi_trans), tmp); | |
583 | } | |
70f4f502 MC |
584 | |
585 | /* enable port sync mode if dual link */ | |
586 | if (intel_dsi->dual_link) { | |
587 | for_each_dsi_port(port, intel_dsi->ports) { | |
588 | dsi_trans = dsi_port_to_transcoder(port); | |
589 | tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans)); | |
590 | tmp |= PORT_SYNC_MODE_ENABLE; | |
591 | I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp); | |
592 | } | |
593 | ||
594 | //TODO: configure DSS_CTL1 | |
595 | } | |
596 | ||
597 | for_each_dsi_port(port, intel_dsi->ports) { | |
598 | dsi_trans = dsi_port_to_transcoder(port); | |
599 | ||
600 | /* select data lane width */ | |
601 | tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans)); | |
602 | tmp &= ~DDI_PORT_WIDTH_MASK; | |
603 | tmp |= DDI_PORT_WIDTH(intel_dsi->lane_count); | |
604 | ||
605 | /* select input pipe */ | |
606 | tmp &= ~TRANS_DDI_EDP_INPUT_MASK; | |
607 | switch (pipe) { | |
608 | default: | |
609 | MISSING_CASE(pipe); | |
610 | /* fallthrough */ | |
611 | case PIPE_A: | |
612 | tmp |= TRANS_DDI_EDP_INPUT_A_ON; | |
613 | break; | |
614 | case PIPE_B: | |
615 | tmp |= TRANS_DDI_EDP_INPUT_B_ONOFF; | |
616 | break; | |
617 | case PIPE_C: | |
618 | tmp |= TRANS_DDI_EDP_INPUT_C_ONOFF; | |
619 | break; | |
620 | } | |
621 | ||
622 | /* enable DDI buffer */ | |
623 | tmp |= TRANS_DDI_FUNC_ENABLE; | |
624 | I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp); | |
625 | } | |
626 | ||
627 | /* wait for link ready */ | |
628 | for_each_dsi_port(port, intel_dsi->ports) { | |
629 | dsi_trans = dsi_port_to_transcoder(port); | |
630 | if (wait_for_us((I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans)) & | |
631 | LINK_READY), 2500)) | |
632 | DRM_ERROR("DSI link not ready\n"); | |
633 | } | |
d364dc66 MC |
634 | } |
635 | ||
d1aeb5f3 MC |
636 | static void |
637 | gen11_dsi_set_transcoder_timings(struct intel_encoder *encoder, | |
638 | const struct intel_crtc_state *pipe_config) | |
639 | { | |
640 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
641 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
642 | const struct drm_display_mode *adjusted_mode = | |
643 | &pipe_config->base.adjusted_mode; | |
644 | enum port port; | |
645 | enum transcoder dsi_trans; | |
646 | /* horizontal timings */ | |
647 | u16 htotal, hactive, hsync_start, hsync_end, hsync_size; | |
648 | u16 hfront_porch, hback_porch; | |
649 | /* vertical timings */ | |
650 | u16 vtotal, vactive, vsync_start, vsync_end, vsync_shift; | |
651 | ||
652 | hactive = adjusted_mode->crtc_hdisplay; | |
653 | htotal = adjusted_mode->crtc_htotal; | |
654 | hsync_start = adjusted_mode->crtc_hsync_start; | |
655 | hsync_end = adjusted_mode->crtc_hsync_end; | |
656 | hsync_size = hsync_end - hsync_start; | |
657 | hfront_porch = (adjusted_mode->crtc_hsync_start - | |
658 | adjusted_mode->crtc_hdisplay); | |
659 | hback_porch = (adjusted_mode->crtc_htotal - | |
660 | adjusted_mode->crtc_hsync_end); | |
661 | vactive = adjusted_mode->crtc_vdisplay; | |
662 | vtotal = adjusted_mode->crtc_vtotal; | |
663 | vsync_start = adjusted_mode->crtc_vsync_start; | |
664 | vsync_end = adjusted_mode->crtc_vsync_end; | |
665 | vsync_shift = hsync_start - htotal / 2; | |
666 | ||
667 | if (intel_dsi->dual_link) { | |
668 | hactive /= 2; | |
669 | if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) | |
670 | hactive += intel_dsi->pixel_overlap; | |
671 | htotal /= 2; | |
672 | } | |
673 | ||
674 | /* minimum hactive as per bspec: 256 pixels */ | |
675 | if (adjusted_mode->crtc_hdisplay < 256) | |
676 | DRM_ERROR("hactive is less then 256 pixels\n"); | |
677 | ||
678 | /* if RGB666 format, then hactive must be multiple of 4 pixels */ | |
679 | if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB666 && hactive % 4 != 0) | |
680 | DRM_ERROR("hactive pixels are not multiple of 4\n"); | |
681 | ||
682 | /* program TRANS_HTOTAL register */ | |
683 | for_each_dsi_port(port, intel_dsi->ports) { | |
684 | dsi_trans = dsi_port_to_transcoder(port); | |
685 | I915_WRITE(HTOTAL(dsi_trans), | |
686 | (hactive - 1) | ((htotal - 1) << 16)); | |
687 | } | |
688 | ||
689 | /* TRANS_HSYNC register to be programmed only for video mode */ | |
690 | if (intel_dsi->operation_mode == INTEL_DSI_VIDEO_MODE) { | |
691 | if (intel_dsi->video_mode_format == | |
692 | VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE) { | |
693 | /* BSPEC: hsync size should be atleast 16 pixels */ | |
694 | if (hsync_size < 16) | |
695 | DRM_ERROR("hsync size < 16 pixels\n"); | |
696 | } | |
697 | ||
698 | if (hback_porch < 16) | |
699 | DRM_ERROR("hback porch < 16 pixels\n"); | |
700 | ||
701 | if (intel_dsi->dual_link) { | |
702 | hsync_start /= 2; | |
703 | hsync_end /= 2; | |
704 | } | |
705 | ||
706 | for_each_dsi_port(port, intel_dsi->ports) { | |
707 | dsi_trans = dsi_port_to_transcoder(port); | |
708 | I915_WRITE(HSYNC(dsi_trans), | |
709 | (hsync_start - 1) | ((hsync_end - 1) << 16)); | |
710 | } | |
711 | } | |
712 | ||
713 | /* program TRANS_VTOTAL register */ | |
714 | for_each_dsi_port(port, intel_dsi->ports) { | |
715 | dsi_trans = dsi_port_to_transcoder(port); | |
716 | /* | |
717 | * FIXME: Programing this by assuming progressive mode, since | |
718 | * non-interlaced info from VBT is not saved inside | |
719 | * struct drm_display_mode. | |
720 | * For interlace mode: program required pixel minus 2 | |
721 | */ | |
722 | I915_WRITE(VTOTAL(dsi_trans), | |
723 | (vactive - 1) | ((vtotal - 1) << 16)); | |
724 | } | |
725 | ||
726 | if (vsync_end < vsync_start || vsync_end > vtotal) | |
727 | DRM_ERROR("Invalid vsync_end value\n"); | |
728 | ||
729 | if (vsync_start < vactive) | |
730 | DRM_ERROR("vsync_start less than vactive\n"); | |
731 | ||
732 | /* program TRANS_VSYNC register */ | |
733 | for_each_dsi_port(port, intel_dsi->ports) { | |
734 | dsi_trans = dsi_port_to_transcoder(port); | |
735 | I915_WRITE(VSYNC(dsi_trans), | |
736 | (vsync_start - 1) | ((vsync_end - 1) << 16)); | |
737 | } | |
738 | ||
739 | /* | |
740 | * FIXME: It has to be programmed only for interlaced | |
741 | * modes. Put the check condition here once interlaced | |
742 | * info available as described above. | |
743 | * program TRANS_VSYNCSHIFT register | |
744 | */ | |
745 | for_each_dsi_port(port, intel_dsi->ports) { | |
746 | dsi_trans = dsi_port_to_transcoder(port); | |
747 | I915_WRITE(VSYNCSHIFT(dsi_trans), vsync_shift); | |
748 | } | |
749 | } | |
750 | ||
303e347c MC |
751 | static void gen11_dsi_enable_transcoder(struct intel_encoder *encoder) |
752 | { | |
753 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
754 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
755 | enum port port; | |
756 | enum transcoder dsi_trans; | |
757 | u32 tmp; | |
758 | ||
759 | for_each_dsi_port(port, intel_dsi->ports) { | |
760 | dsi_trans = dsi_port_to_transcoder(port); | |
761 | tmp = I915_READ(PIPECONF(dsi_trans)); | |
762 | tmp |= PIPECONF_ENABLE; | |
763 | I915_WRITE(PIPECONF(dsi_trans), tmp); | |
764 | ||
765 | /* wait for transcoder to be enabled */ | |
766 | if (intel_wait_for_register(dev_priv, PIPECONF(dsi_trans), | |
767 | I965_PIPECONF_ACTIVE, | |
768 | I965_PIPECONF_ACTIVE, 10)) | |
769 | DRM_ERROR("DSI transcoder not enabled\n"); | |
770 | } | |
771 | } | |
772 | ||
5a4712f4 MC |
773 | static void gen11_dsi_setup_timeouts(struct intel_encoder *encoder) |
774 | { | |
775 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
776 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
777 | enum port port; | |
778 | enum transcoder dsi_trans; | |
779 | u32 tmp, hs_tx_timeout, lp_rx_timeout, ta_timeout, divisor, mul; | |
780 | ||
781 | /* | |
782 | * escape clock count calculation: | |
783 | * BYTE_CLK_COUNT = TIME_NS/(8 * UI) | |
784 | * UI (nsec) = (10^6)/Bitrate | |
785 | * TIME_NS = (BYTE_CLK_COUNT * 8 * 10^6)/ Bitrate | |
786 | * ESCAPE_CLK_COUNT = TIME_NS/ESC_CLK_NS | |
787 | */ | |
788 | divisor = intel_dsi_tlpx_ns(intel_dsi) * intel_dsi_bitrate(intel_dsi) * 1000; | |
789 | mul = 8 * 1000000; | |
790 | hs_tx_timeout = DIV_ROUND_UP(intel_dsi->hs_tx_timeout * mul, | |
791 | divisor); | |
792 | lp_rx_timeout = DIV_ROUND_UP(intel_dsi->lp_rx_timeout * mul, divisor); | |
793 | ta_timeout = DIV_ROUND_UP(intel_dsi->turn_arnd_val * mul, divisor); | |
794 | ||
795 | for_each_dsi_port(port, intel_dsi->ports) { | |
796 | dsi_trans = dsi_port_to_transcoder(port); | |
797 | ||
798 | /* program hst_tx_timeout */ | |
799 | tmp = I915_READ(DSI_HSTX_TO(dsi_trans)); | |
800 | tmp &= ~HSTX_TIMEOUT_VALUE_MASK; | |
801 | tmp |= HSTX_TIMEOUT_VALUE(hs_tx_timeout); | |
802 | I915_WRITE(DSI_HSTX_TO(dsi_trans), tmp); | |
803 | ||
804 | /* FIXME: DSI_CALIB_TO */ | |
805 | ||
806 | /* program lp_rx_host timeout */ | |
807 | tmp = I915_READ(DSI_LPRX_HOST_TO(dsi_trans)); | |
808 | tmp &= ~LPRX_TIMEOUT_VALUE_MASK; | |
809 | tmp |= LPRX_TIMEOUT_VALUE(lp_rx_timeout); | |
810 | I915_WRITE(DSI_LPRX_HOST_TO(dsi_trans), tmp); | |
811 | ||
812 | /* FIXME: DSI_PWAIT_TO */ | |
813 | ||
814 | /* program turn around timeout */ | |
815 | tmp = I915_READ(DSI_TA_TO(dsi_trans)); | |
816 | tmp &= ~TA_TIMEOUT_VALUE_MASK; | |
817 | tmp |= TA_TIMEOUT_VALUE(ta_timeout); | |
818 | I915_WRITE(DSI_TA_TO(dsi_trans), tmp); | |
819 | } | |
820 | } | |
821 | ||
70f4f502 MC |
822 | static void |
823 | gen11_dsi_enable_port_and_phy(struct intel_encoder *encoder, | |
824 | const struct intel_crtc_state *pipe_config) | |
45f09f7a MC |
825 | { |
826 | /* step 4a: power up all lanes of the DDI used by DSI */ | |
827 | gen11_dsi_power_up_lanes(encoder); | |
fc41001d MC |
828 | |
829 | /* step 4b: configure lane sequencing of the Combo-PHY transmitters */ | |
830 | gen11_dsi_config_phy_lanes_sequence(encoder); | |
3f4b9d9d MC |
831 | |
832 | /* step 4c: configure voltage swing and skew */ | |
833 | gen11_dsi_voltage_swing_program_seq(encoder); | |
ba3df888 MC |
834 | |
835 | /* enable DDI buffer */ | |
836 | gen11_dsi_enable_ddi_buffer(encoder); | |
70a7b836 MC |
837 | |
838 | /* setup D-PHY timings */ | |
839 | gen11_dsi_setup_dphy_timings(encoder); | |
d364dc66 | 840 | |
5a4712f4 MC |
841 | /* step 4h: setup DSI protocol timeouts */ |
842 | gen11_dsi_setup_timeouts(encoder); | |
843 | ||
d364dc66 | 844 | /* Step (4h, 4i, 4j, 4k): Configure transcoder */ |
70f4f502 | 845 | gen11_dsi_configure_transcoder(encoder, pipe_config); |
45f09f7a MC |
846 | } |
847 | ||
bfee32bf MC |
848 | static void gen11_dsi_powerup_panel(struct intel_encoder *encoder) |
849 | { | |
850 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
851 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
852 | struct mipi_dsi_device *dsi; | |
853 | enum port port; | |
854 | enum transcoder dsi_trans; | |
855 | u32 tmp; | |
856 | int ret; | |
857 | ||
858 | /* set maximum return packet size */ | |
859 | for_each_dsi_port(port, intel_dsi->ports) { | |
860 | dsi_trans = dsi_port_to_transcoder(port); | |
861 | ||
862 | /* | |
863 | * FIXME: This uses the number of DW's currently in the payload | |
864 | * receive queue. This is probably not what we want here. | |
865 | */ | |
866 | tmp = I915_READ(DSI_CMD_RXCTL(dsi_trans)); | |
867 | tmp &= NUMBER_RX_PLOAD_DW_MASK; | |
868 | /* multiply "Number Rx Payload DW" by 4 to get max value */ | |
869 | tmp = tmp * 4; | |
870 | dsi = intel_dsi->dsi_hosts[port]->device; | |
871 | ret = mipi_dsi_set_maximum_return_packet_size(dsi, tmp); | |
872 | if (ret < 0) | |
873 | DRM_ERROR("error setting max return pkt size%d\n", tmp); | |
874 | } | |
c2661638 MC |
875 | |
876 | /* panel power on related mipi dsi vbt sequences */ | |
877 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_ON); | |
878 | intel_dsi_msleep(intel_dsi, intel_dsi->panel_on_delay); | |
879 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DEASSERT_RESET); | |
880 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_INIT_OTP); | |
881 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_ON); | |
32bbc3d4 MC |
882 | |
883 | /* ensure all panel commands dispatched before enabling transcoder */ | |
884 | wait_for_cmds_dispatched_to_panel(encoder); | |
bfee32bf MC |
885 | } |
886 | ||
95f2f4db VK |
887 | static void gen11_dsi_pre_pll_enable(struct intel_encoder *encoder, |
888 | const struct intel_crtc_state *pipe_config, | |
889 | const struct drm_connector_state *conn_state) | |
fcfe0bdc | 890 | { |
b1cb21a5 MC |
891 | /* step2: enable IO power */ |
892 | gen11_dsi_enable_io_power(encoder); | |
893 | ||
fcfe0bdc MC |
894 | /* step3: enable DSI PLL */ |
895 | gen11_dsi_program_esc_clk_div(encoder); | |
95f2f4db VK |
896 | } |
897 | ||
898 | static void gen11_dsi_pre_enable(struct intel_encoder *encoder, | |
899 | const struct intel_crtc_state *pipe_config, | |
900 | const struct drm_connector_state *conn_state) | |
901 | { | |
902 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
45f09f7a MC |
903 | |
904 | /* step4: enable DSI port and DPHY */ | |
70f4f502 | 905 | gen11_dsi_enable_port_and_phy(encoder, pipe_config); |
d1aeb5f3 | 906 | |
bfee32bf MC |
907 | /* step5: program and powerup panel */ |
908 | gen11_dsi_powerup_panel(encoder); | |
909 | ||
d1aeb5f3 MC |
910 | /* step6c: configure transcoder timings */ |
911 | gen11_dsi_set_transcoder_timings(encoder, pipe_config); | |
303e347c MC |
912 | |
913 | /* step6d: enable dsi transcoder */ | |
914 | gen11_dsi_enable_transcoder(encoder); | |
20801315 MC |
915 | |
916 | /* step7: enable backlight */ | |
917 | intel_panel_enable_backlight(pipe_config, conn_state); | |
918 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_ON); | |
fcfe0bdc | 919 | } |
d9d996b6 | 920 | |
4e123bd3 MC |
921 | static void gen11_dsi_disable_transcoder(struct intel_encoder *encoder) |
922 | { | |
923 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
924 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
925 | enum port port; | |
926 | enum transcoder dsi_trans; | |
927 | u32 tmp; | |
928 | ||
929 | for_each_dsi_port(port, intel_dsi->ports) { | |
930 | dsi_trans = dsi_port_to_transcoder(port); | |
931 | ||
932 | /* disable transcoder */ | |
933 | tmp = I915_READ(PIPECONF(dsi_trans)); | |
934 | tmp &= ~PIPECONF_ENABLE; | |
935 | I915_WRITE(PIPECONF(dsi_trans), tmp); | |
936 | ||
937 | /* wait for transcoder to be disabled */ | |
938 | if (intel_wait_for_register(dev_priv, PIPECONF(dsi_trans), | |
939 | I965_PIPECONF_ACTIVE, 0, 50)) | |
940 | DRM_ERROR("DSI trancoder not disabled\n"); | |
941 | } | |
942 | } | |
943 | ||
522cc3f7 MC |
944 | static void gen11_dsi_powerdown_panel(struct intel_encoder *encoder) |
945 | { | |
946 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
947 | ||
948 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_OFF); | |
949 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_ASSERT_RESET); | |
950 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_OFF); | |
951 | ||
952 | /* ensure cmds dispatched to panel */ | |
953 | wait_for_cmds_dispatched_to_panel(encoder); | |
954 | } | |
955 | ||
4769b598 MC |
956 | static void gen11_dsi_deconfigure_trancoder(struct intel_encoder *encoder) |
957 | { | |
958 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
959 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
960 | enum port port; | |
961 | enum transcoder dsi_trans; | |
962 | u32 tmp; | |
963 | ||
964 | /* put dsi link in ULPS */ | |
965 | for_each_dsi_port(port, intel_dsi->ports) { | |
966 | dsi_trans = dsi_port_to_transcoder(port); | |
967 | tmp = I915_READ(DSI_LP_MSG(dsi_trans)); | |
968 | tmp |= LINK_ENTER_ULPS; | |
969 | tmp &= ~LINK_ULPS_TYPE_LP11; | |
970 | I915_WRITE(DSI_LP_MSG(dsi_trans), tmp); | |
971 | ||
972 | if (wait_for_us((I915_READ(DSI_LP_MSG(dsi_trans)) & | |
973 | LINK_IN_ULPS), | |
974 | 10)) | |
975 | DRM_ERROR("DSI link not in ULPS\n"); | |
976 | } | |
7aa32f7c MC |
977 | |
978 | /* disable ddi function */ | |
979 | for_each_dsi_port(port, intel_dsi->ports) { | |
980 | dsi_trans = dsi_port_to_transcoder(port); | |
981 | tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans)); | |
982 | tmp &= ~TRANS_DDI_FUNC_ENABLE; | |
983 | I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp); | |
984 | } | |
9c83ab1b MC |
985 | |
986 | /* disable port sync mode if dual link */ | |
987 | if (intel_dsi->dual_link) { | |
988 | for_each_dsi_port(port, intel_dsi->ports) { | |
989 | dsi_trans = dsi_port_to_transcoder(port); | |
990 | tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans)); | |
991 | tmp &= ~PORT_SYNC_MODE_ENABLE; | |
992 | I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp); | |
993 | } | |
994 | } | |
4769b598 MC |
995 | } |
996 | ||
019cec36 MC |
997 | static void gen11_dsi_disable_port(struct intel_encoder *encoder) |
998 | { | |
999 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1000 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1001 | u32 tmp; | |
1002 | enum port port; | |
1003 | ||
1004 | for_each_dsi_port(port, intel_dsi->ports) { | |
1005 | tmp = I915_READ(DDI_BUF_CTL(port)); | |
1006 | tmp &= ~DDI_BUF_CTL_ENABLE; | |
1007 | I915_WRITE(DDI_BUF_CTL(port), tmp); | |
1008 | ||
1009 | if (wait_for_us((I915_READ(DDI_BUF_CTL(port)) & | |
1010 | DDI_BUF_IS_IDLE), | |
1011 | 8)) | |
1012 | DRM_ERROR("DDI port:%c buffer not idle\n", | |
1013 | port_name(port)); | |
1014 | } | |
1015 | } | |
1016 | ||
0f0fe849 MC |
1017 | static void gen11_dsi_disable_io_power(struct intel_encoder *encoder) |
1018 | { | |
1019 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1020 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1021 | enum port port; | |
1022 | u32 tmp; | |
1023 | ||
1024 | intel_display_power_put(dev_priv, POWER_DOMAIN_PORT_DDI_A_IO); | |
1025 | ||
1026 | if (intel_dsi->dual_link) | |
1027 | intel_display_power_put(dev_priv, POWER_DOMAIN_PORT_DDI_B_IO); | |
1028 | ||
1029 | /* set mode to DDI */ | |
1030 | for_each_dsi_port(port, intel_dsi->ports) { | |
1031 | tmp = I915_READ(ICL_DSI_IO_MODECTL(port)); | |
1032 | tmp &= ~COMBO_PHY_MODE_DSI; | |
1033 | I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp); | |
1034 | } | |
1035 | } | |
1036 | ||
e2758048 MC |
1037 | static void gen11_dsi_disable(struct intel_encoder *encoder, |
1038 | const struct intel_crtc_state *old_crtc_state, | |
1039 | const struct drm_connector_state *old_conn_state) | |
d9d996b6 MC |
1040 | { |
1041 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1042 | ||
1043 | /* step1: turn off backlight */ | |
1044 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_OFF); | |
1045 | intel_panel_disable_backlight(old_conn_state); | |
4e123bd3 MC |
1046 | |
1047 | /* step2d,e: disable transcoder and wait */ | |
1048 | gen11_dsi_disable_transcoder(encoder); | |
522cc3f7 MC |
1049 | |
1050 | /* step2f,g: powerdown panel */ | |
1051 | gen11_dsi_powerdown_panel(encoder); | |
4769b598 MC |
1052 | |
1053 | /* step2h,i,j: deconfig trancoder */ | |
1054 | gen11_dsi_deconfigure_trancoder(encoder); | |
019cec36 MC |
1055 | |
1056 | /* step3: disable port */ | |
1057 | gen11_dsi_disable_port(encoder); | |
0f0fe849 MC |
1058 | |
1059 | /* step4: disable IO power */ | |
1060 | gen11_dsi_disable_io_power(encoder); | |
d9d996b6 | 1061 | } |
bf4d57ff | 1062 | |
8327af28 VK |
1063 | static void gen11_dsi_get_config(struct intel_encoder *encoder, |
1064 | struct intel_crtc_state *pipe_config) | |
1065 | { | |
1066 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1067 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1068 | u32 pll_id; | |
1069 | ||
1070 | /* FIXME: adapt icl_ddi_clock_get() for DSI and use that? */ | |
1071 | pll_id = intel_get_shared_dpll_id(dev_priv, pipe_config->shared_dpll); | |
1072 | pipe_config->port_clock = cnl_calc_wrpll_link(dev_priv, pll_id); | |
1073 | pipe_config->base.adjusted_mode.crtc_clock = intel_dsi->pclk; | |
1074 | pipe_config->output_types |= BIT(INTEL_OUTPUT_DSI); | |
1075 | } | |
1076 | ||
d04afb15 MC |
1077 | static bool gen11_dsi_compute_config(struct intel_encoder *encoder, |
1078 | struct intel_crtc_state *pipe_config, | |
1079 | struct drm_connector_state *conn_state) | |
1080 | { | |
1081 | struct intel_dsi *intel_dsi = container_of(encoder, struct intel_dsi, | |
1082 | base); | |
1083 | struct intel_connector *intel_connector = intel_dsi->attached_connector; | |
1084 | struct intel_crtc *crtc = to_intel_crtc(pipe_config->base.crtc); | |
1085 | const struct drm_display_mode *fixed_mode = | |
1086 | intel_connector->panel.fixed_mode; | |
1087 | struct drm_display_mode *adjusted_mode = | |
1088 | &pipe_config->base.adjusted_mode; | |
1089 | ||
1090 | intel_fixed_panel_mode(fixed_mode, adjusted_mode); | |
1091 | intel_pch_panel_fitting(crtc, pipe_config, conn_state->scaling_mode); | |
1092 | ||
1093 | adjusted_mode->flags = 0; | |
1094 | ||
1095 | /* Dual link goes to trancoder DSI'0' */ | |
1096 | if (intel_dsi->ports == BIT(PORT_B)) | |
1097 | pipe_config->cpu_transcoder = TRANSCODER_DSI_1; | |
1098 | else | |
1099 | pipe_config->cpu_transcoder = TRANSCODER_DSI_0; | |
1100 | ||
1101 | pipe_config->clock_set = true; | |
1102 | pipe_config->port_clock = intel_dsi_bitrate(intel_dsi) / 5; | |
1103 | ||
1104 | return true; | |
1105 | } | |
1106 | ||
ab841148 MC |
1107 | static u64 gen11_dsi_get_power_domains(struct intel_encoder *encoder, |
1108 | struct intel_crtc_state *crtc_state) | |
1109 | { | |
1110 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1111 | u64 domains = 0; | |
1112 | enum port port; | |
1113 | ||
1114 | for_each_dsi_port(port, intel_dsi->ports) | |
1115 | if (port == PORT_A) | |
1116 | domains |= BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO); | |
1117 | else | |
1118 | domains |= BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO); | |
1119 | ||
1120 | return domains; | |
1121 | } | |
1122 | ||
1123 | static bool gen11_dsi_get_hw_state(struct intel_encoder *encoder, | |
1124 | enum pipe *pipe) | |
1125 | { | |
1126 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1127 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1128 | u32 tmp; | |
1129 | enum port port; | |
1130 | enum transcoder dsi_trans; | |
1131 | bool ret = false; | |
1132 | ||
1133 | if (!intel_display_power_get_if_enabled(dev_priv, | |
1134 | encoder->power_domain)) | |
1135 | return false; | |
1136 | ||
1137 | for_each_dsi_port(port, intel_dsi->ports) { | |
1138 | dsi_trans = dsi_port_to_transcoder(port); | |
1139 | tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans)); | |
1140 | switch (tmp & TRANS_DDI_EDP_INPUT_MASK) { | |
1141 | case TRANS_DDI_EDP_INPUT_A_ON: | |
1142 | *pipe = PIPE_A; | |
1143 | break; | |
1144 | case TRANS_DDI_EDP_INPUT_B_ONOFF: | |
1145 | *pipe = PIPE_B; | |
1146 | break; | |
1147 | case TRANS_DDI_EDP_INPUT_C_ONOFF: | |
1148 | *pipe = PIPE_C; | |
1149 | break; | |
1150 | default: | |
1151 | DRM_ERROR("Invalid PIPE input\n"); | |
1152 | goto out; | |
1153 | } | |
1154 | ||
1155 | tmp = I915_READ(PIPECONF(dsi_trans)); | |
1156 | ret = tmp & PIPECONF_ENABLE; | |
1157 | } | |
1158 | out: | |
1159 | intel_display_power_put(dev_priv, encoder->power_domain); | |
1160 | return ret; | |
1161 | } | |
1162 | ||
e2758048 MC |
1163 | static void gen11_dsi_encoder_destroy(struct drm_encoder *encoder) |
1164 | { | |
1165 | intel_encoder_destroy(encoder); | |
1166 | } | |
1167 | ||
1168 | static const struct drm_encoder_funcs gen11_dsi_encoder_funcs = { | |
1169 | .destroy = gen11_dsi_encoder_destroy, | |
1170 | }; | |
1171 | ||
1172 | static const struct drm_connector_funcs gen11_dsi_connector_funcs = { | |
1173 | .late_register = intel_connector_register, | |
1174 | .early_unregister = intel_connector_unregister, | |
1175 | .destroy = intel_connector_destroy, | |
1176 | .fill_modes = drm_helper_probe_single_connector_modes, | |
1177 | .atomic_get_property = intel_digital_connector_atomic_get_property, | |
1178 | .atomic_set_property = intel_digital_connector_atomic_set_property, | |
1179 | .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, | |
1180 | .atomic_duplicate_state = intel_digital_connector_duplicate_state, | |
1181 | }; | |
1182 | ||
1183 | static const struct drm_connector_helper_funcs gen11_dsi_connector_helper_funcs = { | |
1184 | .get_modes = intel_dsi_get_modes, | |
1185 | .mode_valid = intel_dsi_mode_valid, | |
1186 | .atomic_check = intel_digital_connector_atomic_check, | |
1187 | }; | |
1188 | ||
c5f9c934 MC |
1189 | static int gen11_dsi_host_attach(struct mipi_dsi_host *host, |
1190 | struct mipi_dsi_device *dsi) | |
1191 | { | |
1192 | return 0; | |
1193 | } | |
1194 | ||
1195 | static int gen11_dsi_host_detach(struct mipi_dsi_host *host, | |
1196 | struct mipi_dsi_device *dsi) | |
1197 | { | |
1198 | return 0; | |
1199 | } | |
1200 | ||
1201 | static ssize_t gen11_dsi_host_transfer(struct mipi_dsi_host *host, | |
1202 | const struct mipi_dsi_msg *msg) | |
1203 | { | |
1204 | struct intel_dsi_host *intel_dsi_host = to_intel_dsi_host(host); | |
1205 | struct mipi_dsi_packet dsi_pkt; | |
1206 | ssize_t ret; | |
1207 | bool enable_lpdt = false; | |
1208 | ||
1209 | ret = mipi_dsi_create_packet(&dsi_pkt, msg); | |
1210 | if (ret < 0) | |
1211 | return ret; | |
1212 | ||
1213 | if (msg->flags & MIPI_DSI_MSG_USE_LPM) | |
1214 | enable_lpdt = true; | |
1215 | ||
1216 | /* send packet header */ | |
1217 | ret = dsi_send_pkt_hdr(intel_dsi_host, dsi_pkt, enable_lpdt); | |
1218 | if (ret < 0) | |
1219 | return ret; | |
1220 | ||
1221 | /* only long packet contains payload */ | |
1222 | if (mipi_dsi_packet_format_is_long(msg->type)) { | |
1223 | ret = dsi_send_pkt_payld(intel_dsi_host, dsi_pkt); | |
1224 | if (ret < 0) | |
1225 | return ret; | |
1226 | } | |
1227 | ||
1228 | //TODO: add payload receive code if needed | |
1229 | ||
1230 | ret = sizeof(dsi_pkt.header) + dsi_pkt.payload_length; | |
1231 | ||
1232 | return ret; | |
1233 | } | |
1234 | ||
1235 | static const struct mipi_dsi_host_ops gen11_dsi_host_ops = { | |
1236 | .attach = gen11_dsi_host_attach, | |
1237 | .detach = gen11_dsi_host_detach, | |
1238 | .transfer = gen11_dsi_host_transfer, | |
1239 | }; | |
1240 | ||
bf4d57ff MC |
1241 | void icl_dsi_init(struct drm_i915_private *dev_priv) |
1242 | { | |
e2758048 MC |
1243 | struct drm_device *dev = &dev_priv->drm; |
1244 | struct intel_dsi *intel_dsi; | |
1245 | struct intel_encoder *encoder; | |
1246 | struct intel_connector *intel_connector; | |
1247 | struct drm_connector *connector; | |
1248 | struct drm_display_mode *scan, *fixed_mode = NULL; | |
bf4d57ff MC |
1249 | enum port port; |
1250 | ||
1251 | if (!intel_bios_is_dsi_present(dev_priv, &port)) | |
1252 | return; | |
e2758048 MC |
1253 | |
1254 | intel_dsi = kzalloc(sizeof(*intel_dsi), GFP_KERNEL); | |
1255 | if (!intel_dsi) | |
1256 | return; | |
1257 | ||
1258 | intel_connector = intel_connector_alloc(); | |
1259 | if (!intel_connector) { | |
1260 | kfree(intel_dsi); | |
1261 | return; | |
1262 | } | |
1263 | ||
1264 | encoder = &intel_dsi->base; | |
1265 | intel_dsi->attached_connector = intel_connector; | |
1266 | connector = &intel_connector->base; | |
1267 | ||
1268 | /* register DSI encoder with DRM subsystem */ | |
1269 | drm_encoder_init(dev, &encoder->base, &gen11_dsi_encoder_funcs, | |
1270 | DRM_MODE_ENCODER_DSI, "DSI %c", port_name(port)); | |
1271 | ||
95f2f4db | 1272 | encoder->pre_pll_enable = gen11_dsi_pre_pll_enable; |
e2758048 MC |
1273 | encoder->pre_enable = gen11_dsi_pre_enable; |
1274 | encoder->disable = gen11_dsi_disable; | |
1275 | encoder->port = port; | |
8327af28 | 1276 | encoder->get_config = gen11_dsi_get_config; |
d04afb15 | 1277 | encoder->compute_config = gen11_dsi_compute_config; |
ab841148 | 1278 | encoder->get_hw_state = gen11_dsi_get_hw_state; |
e2758048 MC |
1279 | encoder->type = INTEL_OUTPUT_DSI; |
1280 | encoder->cloneable = 0; | |
1281 | encoder->crtc_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C); | |
1282 | encoder->power_domain = POWER_DOMAIN_PORT_DSI; | |
ab841148 | 1283 | encoder->get_power_domains = gen11_dsi_get_power_domains; |
e2758048 MC |
1284 | |
1285 | /* register DSI connector with DRM subsystem */ | |
1286 | drm_connector_init(dev, connector, &gen11_dsi_connector_funcs, | |
1287 | DRM_MODE_CONNECTOR_DSI); | |
1288 | drm_connector_helper_add(connector, &gen11_dsi_connector_helper_funcs); | |
1289 | connector->display_info.subpixel_order = SubPixelHorizontalRGB; | |
1290 | connector->interlace_allowed = false; | |
1291 | connector->doublescan_allowed = false; | |
ab841148 | 1292 | intel_connector->get_hw_state = intel_connector_get_hw_state; |
e2758048 MC |
1293 | |
1294 | /* attach connector to encoder */ | |
1295 | intel_connector_attach_encoder(intel_connector, encoder); | |
1296 | ||
1297 | /* fill mode info from VBT */ | |
1298 | mutex_lock(&dev->mode_config.mutex); | |
1299 | intel_dsi_vbt_get_modes(intel_dsi); | |
1300 | list_for_each_entry(scan, &connector->probed_modes, head) { | |
1301 | if (scan->type & DRM_MODE_TYPE_PREFERRED) { | |
1302 | fixed_mode = drm_mode_duplicate(dev, scan); | |
1303 | break; | |
1304 | } | |
1305 | } | |
1306 | mutex_unlock(&dev->mode_config.mutex); | |
1307 | ||
1308 | if (!fixed_mode) { | |
1309 | DRM_ERROR("DSI fixed mode info missing\n"); | |
1310 | goto err; | |
1311 | } | |
1312 | ||
1313 | connector->display_info.width_mm = fixed_mode->width_mm; | |
1314 | connector->display_info.height_mm = fixed_mode->height_mm; | |
1315 | intel_panel_init(&intel_connector->panel, fixed_mode, NULL); | |
1316 | intel_panel_setup_backlight(connector, INVALID_PIPE); | |
1317 | ||
c5f9c934 | 1318 | |
972d607c MC |
1319 | if (dev_priv->vbt.dsi.config->dual_link) |
1320 | intel_dsi->ports = BIT(PORT_A) | BIT(PORT_B); | |
1321 | else | |
1322 | intel_dsi->ports = BIT(port); | |
1323 | ||
1324 | intel_dsi->dcs_backlight_ports = dev_priv->vbt.dsi.bl_ports; | |
1325 | intel_dsi->dcs_cabc_ports = dev_priv->vbt.dsi.cabc_ports; | |
1326 | ||
c5f9c934 MC |
1327 | for_each_dsi_port(port, intel_dsi->ports) { |
1328 | struct intel_dsi_host *host; | |
1329 | ||
1330 | host = intel_dsi_host_init(intel_dsi, &gen11_dsi_host_ops, port); | |
1331 | if (!host) | |
1332 | goto err; | |
1333 | ||
1334 | intel_dsi->dsi_hosts[port] = host; | |
1335 | } | |
1336 | ||
e2758048 MC |
1337 | if (!intel_dsi_vbt_init(intel_dsi, MIPI_DSI_GENERIC_PANEL_ID)) { |
1338 | DRM_DEBUG_KMS("no device found\n"); | |
1339 | goto err; | |
1340 | } | |
1341 | ||
1342 | return; | |
1343 | ||
1344 | err: | |
1345 | drm_encoder_cleanup(&encoder->base); | |
1346 | kfree(intel_dsi); | |
1347 | kfree(intel_connector); | |
bf4d57ff | 1348 | } |