2 * Copyright (c) 2014-2018, The Linux Foundation. All rights reserved.
3 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
19 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
20 #include <linux/kthread.h>
21 #include <linux/debugfs.h>
22 #include <linux/seq_file.h>
26 #include <drm/drm_crtc.h>
27 #include <drm/drm_crtc_helper.h>
29 #include "dpu_hw_catalog.h"
30 #include "dpu_hw_intf.h"
31 #include "dpu_hw_ctl.h"
32 #include "dpu_formats.h"
33 #include "dpu_encoder_phys.h"
35 #include "dpu_trace.h"
36 #include "dpu_core_irq.h"
38 #define DPU_DEBUG_ENC(e, fmt, ...) DPU_DEBUG("enc%d " fmt,\
39 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
41 #define DPU_ERROR_ENC(e, fmt, ...) DPU_ERROR("enc%d " fmt,\
42 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
44 #define DPU_DEBUG_PHYS(p, fmt, ...) DPU_DEBUG("enc%d intf%d pp%d " fmt,\
45 (p) ? (p)->parent->base.id : -1, \
46 (p) ? (p)->intf_idx - INTF_0 : -1, \
47 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
50 #define DPU_ERROR_PHYS(p, fmt, ...) DPU_ERROR("enc%d intf%d pp%d " fmt,\
51 (p) ? (p)->parent->base.id : -1, \
52 (p) ? (p)->intf_idx - INTF_0 : -1, \
53 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
57 * Two to anticipate panels that can do cmd/vid dynamic switching
58 * plan is to create all possible physical encoder types, and switch between
61 #define NUM_PHYS_ENCODER_TYPES 2
63 #define MAX_PHYS_ENCODERS_PER_VIRTUAL \
64 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
66 #define MAX_CHANNELS_PER_ENC 2
68 #define IDLE_SHORT_TIMEOUT 1
70 #define MAX_VDISPLAY_SPLIT 1080
73 * enum dpu_enc_rc_events - events for resource control state machine
74 * @DPU_ENC_RC_EVENT_KICKOFF:
75 * This event happens at NORMAL priority.
76 * Event that signals the start of the transfer. When this event is
77 * received, enable MDP/DSI core clocks. Regardless of the previous
78 * state, the resource should be in ON state at the end of this event.
79 * @DPU_ENC_RC_EVENT_FRAME_DONE:
80 * This event happens at INTERRUPT level.
81 * Event signals the end of the data transfer after the PP FRAME_DONE
82 * event. At the end of this event, a delayed work is scheduled to go to
83 * IDLE_PC state after IDLE_TIMEOUT time.
84 * @DPU_ENC_RC_EVENT_PRE_STOP:
85 * This event happens at NORMAL priority.
86 * This event, when received during the ON state, leave the RC STATE
87 * in the PRE_OFF state. It should be followed by the STOP event as
88 * part of encoder disable.
89 * If received during IDLE or OFF states, it will do nothing.
90 * @DPU_ENC_RC_EVENT_STOP:
91 * This event happens at NORMAL priority.
92 * When this event is received, disable all the MDP/DSI core clocks, and
93 * disable IRQs. It should be called from the PRE_OFF or IDLE states.
94 * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
95 * PRE_OFF is expected when PRE_STOP was executed during the ON state.
96 * Resource state should be in OFF at the end of the event.
97 * @DPU_ENC_RC_EVENT_ENTER_IDLE:
98 * This event happens at NORMAL priority from a work item.
99 * Event signals that there were no frame updates for IDLE_TIMEOUT time.
100 * This would disable MDP/DSI core clocks and change the resource state
103 enum dpu_enc_rc_events
{
104 DPU_ENC_RC_EVENT_KICKOFF
= 1,
105 DPU_ENC_RC_EVENT_FRAME_DONE
,
106 DPU_ENC_RC_EVENT_PRE_STOP
,
107 DPU_ENC_RC_EVENT_STOP
,
108 DPU_ENC_RC_EVENT_ENTER_IDLE
112 * enum dpu_enc_rc_states - states that the resource control maintains
113 * @DPU_ENC_RC_STATE_OFF: Resource is in OFF state
114 * @DPU_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
115 * @DPU_ENC_RC_STATE_ON: Resource is in ON state
116 * @DPU_ENC_RC_STATE_MODESET: Resource is in modeset state
117 * @DPU_ENC_RC_STATE_IDLE: Resource is in IDLE state
119 enum dpu_enc_rc_states
{
120 DPU_ENC_RC_STATE_OFF
,
121 DPU_ENC_RC_STATE_PRE_OFF
,
123 DPU_ENC_RC_STATE_IDLE
127 * struct dpu_encoder_virt - virtual encoder. Container of one or more physical
128 * encoders. Virtual encoder manages one "logical" display. Physical
129 * encoders manage one intf block, tied to a specific panel/sub-panel.
130 * Virtual encoder defers as much as possible to the physical encoders.
131 * Virtual encoder registers itself with the DRM Framework as the encoder.
132 * @base: drm_encoder base class for registration with DRM
133 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
134 * @bus_scaling_client: Client handle to the bus scaling interface
135 * @num_phys_encs: Actual number of physical encoders contained.
136 * @phys_encs: Container of physical encoders managed.
137 * @cur_master: Pointer to the current master in this mode. Optimization
138 * Only valid after enable. Cleared as disable.
139 * @hw_pp Handle to the pingpong blocks used for the display. No.
140 * pingpong blocks can be different than num_phys_encs.
141 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
142 * for partial update right-only cases, such as pingpong
143 * split where virtual pingpong does not generate IRQs
144 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
145 * notification of the VBLANK
146 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
147 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
149 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
150 * @debugfs_root: Debug file system root file node
151 * @enc_lock: Lock around physical encoder create/destroy and
153 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
154 * busy processing current command.
155 * Bit0 = phys_encs[0] etc.
156 * @crtc_frame_event_cb: callback handler for frame event
157 * @crtc_frame_event_cb_data: callback handler private data
158 * @frame_done_timeout: frame done timeout in Hz
159 * @frame_done_timer: watchdog timer for frame done event
160 * @vsync_event_timer: vsync timer
161 * @disp_info: local copy of msm_display_info struct
162 * @idle_pc_supported: indicate if idle power collaps is supported
163 * @rc_lock: resource control mutex lock to protect
164 * virt encoder over various state changes
165 * @rc_state: resource controller state
166 * @delayed_off_work: delayed worker to schedule disabling of
167 * clks and resources after IDLE_TIMEOUT time.
168 * @vsync_event_work: worker to handle vsync event for autorefresh
169 * @topology: topology of the display
170 * @mode_set_complete: flag to indicate modeset completion
171 * @idle_timeout: idle timeout duration in milliseconds
173 struct dpu_encoder_virt
{
174 struct drm_encoder base
;
175 spinlock_t enc_spinlock
;
176 uint32_t bus_scaling_client
;
178 unsigned int num_phys_encs
;
179 struct dpu_encoder_phys
*phys_encs
[MAX_PHYS_ENCODERS_PER_VIRTUAL
];
180 struct dpu_encoder_phys
*cur_master
;
181 struct dpu_encoder_phys
*cur_slave
;
182 struct dpu_hw_pingpong
*hw_pp
[MAX_CHANNELS_PER_ENC
];
186 void (*crtc_vblank_cb
)(void *);
187 void *crtc_vblank_cb_data
;
189 struct dentry
*debugfs_root
;
190 struct mutex enc_lock
;
191 DECLARE_BITMAP(frame_busy_mask
, MAX_PHYS_ENCODERS_PER_VIRTUAL
);
192 void (*crtc_frame_event_cb
)(void *, u32 event
);
193 void *crtc_frame_event_cb_data
;
195 atomic_t frame_done_timeout
;
196 struct timer_list frame_done_timer
;
197 struct timer_list vsync_event_timer
;
199 struct msm_display_info disp_info
;
201 bool idle_pc_supported
;
202 struct mutex rc_lock
;
203 enum dpu_enc_rc_states rc_state
;
204 struct kthread_delayed_work delayed_off_work
;
205 struct kthread_work vsync_event_work
;
206 struct msm_display_topology topology
;
207 bool mode_set_complete
;
212 #define to_dpu_encoder_virt(x) container_of(x, struct dpu_encoder_virt, base)
213 static inline int _dpu_encoder_power_enable(struct dpu_encoder_virt
*dpu_enc
,
216 struct drm_encoder
*drm_enc
;
217 struct msm_drm_private
*priv
;
218 struct dpu_kms
*dpu_kms
;
221 DPU_ERROR("invalid dpu enc\n");
225 drm_enc
= &dpu_enc
->base
;
226 if (!drm_enc
->dev
|| !drm_enc
->dev
->dev_private
) {
227 DPU_ERROR("drm device invalid\n");
231 priv
= drm_enc
->dev
->dev_private
;
233 DPU_ERROR("invalid kms\n");
237 dpu_kms
= to_dpu_kms(priv
->kms
);
240 pm_runtime_get_sync(&dpu_kms
->pdev
->dev
);
242 pm_runtime_put_sync(&dpu_kms
->pdev
->dev
);
247 void dpu_encoder_helper_report_irq_timeout(struct dpu_encoder_phys
*phys_enc
,
248 enum dpu_intr_idx intr_idx
)
250 DRM_ERROR("irq timeout id=%u, intf=%d, pp=%d, intr=%d\n",
251 DRMID(phys_enc
->parent
), phys_enc
->intf_idx
- INTF_0
,
252 phys_enc
->hw_pp
->idx
- PINGPONG_0
, intr_idx
);
254 if (phys_enc
->parent_ops
->handle_frame_done
)
255 phys_enc
->parent_ops
->handle_frame_done(
256 phys_enc
->parent
, phys_enc
,
257 DPU_ENCODER_FRAME_EVENT_ERROR
);
260 static int dpu_encoder_helper_wait_event_timeout(int32_t drm_id
,
261 int32_t hw_id
, struct dpu_encoder_wait_info
*info
);
263 int dpu_encoder_helper_wait_for_irq(struct dpu_encoder_phys
*phys_enc
,
264 enum dpu_intr_idx intr_idx
,
265 struct dpu_encoder_wait_info
*wait_info
)
267 struct dpu_encoder_irq
*irq
;
271 if (!phys_enc
|| !wait_info
|| intr_idx
>= INTR_IDX_MAX
) {
272 DPU_ERROR("invalid params\n");
275 irq
= &phys_enc
->irq
[intr_idx
];
277 /* note: do master / slave checking outside */
279 /* return EWOULDBLOCK since we know the wait isn't necessary */
280 if (phys_enc
->enable_state
== DPU_ENC_DISABLED
) {
281 DRM_ERROR("encoder is disabled id=%u, intr=%d, hw=%d, irq=%d",
282 DRMID(phys_enc
->parent
), intr_idx
, irq
->hw_idx
,
287 if (irq
->irq_idx
< 0) {
288 DRM_DEBUG_KMS("skip irq wait id=%u, intr=%d, hw=%d, irq=%s",
289 DRMID(phys_enc
->parent
), intr_idx
, irq
->hw_idx
,
294 DRM_DEBUG_KMS("id=%u, intr=%d, hw=%d, irq=%d, pp=%d, pending_cnt=%d",
295 DRMID(phys_enc
->parent
), intr_idx
, irq
->hw_idx
,
296 irq
->irq_idx
, phys_enc
->hw_pp
->idx
- PINGPONG_0
,
297 atomic_read(wait_info
->atomic_cnt
));
299 ret
= dpu_encoder_helper_wait_event_timeout(
300 DRMID(phys_enc
->parent
),
305 irq_status
= dpu_core_irq_read(phys_enc
->dpu_kms
,
310 DRM_DEBUG_KMS("irq not triggered id=%u, intr=%d, "
311 "hw=%d, irq=%d, pp=%d, atomic_cnt=%d",
312 DRMID(phys_enc
->parent
), intr_idx
,
313 irq
->hw_idx
, irq
->irq_idx
,
314 phys_enc
->hw_pp
->idx
- PINGPONG_0
,
315 atomic_read(wait_info
->atomic_cnt
));
316 local_irq_save(flags
);
317 irq
->cb
.func(phys_enc
, irq
->irq_idx
);
318 local_irq_restore(flags
);
322 DRM_DEBUG_KMS("irq timeout id=%u, intr=%d, "
323 "hw=%d, irq=%d, pp=%d, atomic_cnt=%d",
324 DRMID(phys_enc
->parent
), intr_idx
,
325 irq
->hw_idx
, irq
->irq_idx
,
326 phys_enc
->hw_pp
->idx
- PINGPONG_0
,
327 atomic_read(wait_info
->atomic_cnt
));
331 trace_dpu_enc_irq_wait_success(DRMID(phys_enc
->parent
),
332 intr_idx
, irq
->hw_idx
, irq
->irq_idx
,
333 phys_enc
->hw_pp
->idx
- PINGPONG_0
,
334 atomic_read(wait_info
->atomic_cnt
));
340 int dpu_encoder_helper_register_irq(struct dpu_encoder_phys
*phys_enc
,
341 enum dpu_intr_idx intr_idx
)
343 struct dpu_encoder_irq
*irq
;
346 if (!phys_enc
|| intr_idx
>= INTR_IDX_MAX
) {
347 DPU_ERROR("invalid params\n");
350 irq
= &phys_enc
->irq
[intr_idx
];
352 if (irq
->irq_idx
>= 0) {
353 DPU_DEBUG_PHYS(phys_enc
,
354 "skipping already registered irq %s type %d\n",
355 irq
->name
, irq
->intr_type
);
359 irq
->irq_idx
= dpu_core_irq_idx_lookup(phys_enc
->dpu_kms
,
360 irq
->intr_type
, irq
->hw_idx
);
361 if (irq
->irq_idx
< 0) {
362 DPU_ERROR_PHYS(phys_enc
,
363 "failed to lookup IRQ index for %s type:%d\n",
364 irq
->name
, irq
->intr_type
);
368 ret
= dpu_core_irq_register_callback(phys_enc
->dpu_kms
, irq
->irq_idx
,
371 DPU_ERROR_PHYS(phys_enc
,
372 "failed to register IRQ callback for %s\n",
374 irq
->irq_idx
= -EINVAL
;
378 ret
= dpu_core_irq_enable(phys_enc
->dpu_kms
, &irq
->irq_idx
, 1);
380 DRM_ERROR("enable failed id=%u, intr=%d, hw=%d, irq=%d",
381 DRMID(phys_enc
->parent
), intr_idx
, irq
->hw_idx
,
383 dpu_core_irq_unregister_callback(phys_enc
->dpu_kms
,
384 irq
->irq_idx
, &irq
->cb
);
385 irq
->irq_idx
= -EINVAL
;
389 trace_dpu_enc_irq_register_success(DRMID(phys_enc
->parent
), intr_idx
,
390 irq
->hw_idx
, irq
->irq_idx
);
395 int dpu_encoder_helper_unregister_irq(struct dpu_encoder_phys
*phys_enc
,
396 enum dpu_intr_idx intr_idx
)
398 struct dpu_encoder_irq
*irq
;
402 DPU_ERROR("invalid encoder\n");
405 irq
= &phys_enc
->irq
[intr_idx
];
407 /* silently skip irqs that weren't registered */
408 if (irq
->irq_idx
< 0) {
409 DRM_ERROR("duplicate unregister id=%u, intr=%d, hw=%d, irq=%d",
410 DRMID(phys_enc
->parent
), intr_idx
, irq
->hw_idx
,
415 ret
= dpu_core_irq_disable(phys_enc
->dpu_kms
, &irq
->irq_idx
, 1);
417 DRM_ERROR("disable failed id=%u, intr=%d, hw=%d, irq=%d ret=%d",
418 DRMID(phys_enc
->parent
), intr_idx
, irq
->hw_idx
,
422 ret
= dpu_core_irq_unregister_callback(phys_enc
->dpu_kms
, irq
->irq_idx
,
425 DRM_ERROR("unreg cb fail id=%u, intr=%d, hw=%d, irq=%d ret=%d",
426 DRMID(phys_enc
->parent
), intr_idx
, irq
->hw_idx
,
430 trace_dpu_enc_irq_unregister_success(DRMID(phys_enc
->parent
), intr_idx
,
431 irq
->hw_idx
, irq
->irq_idx
);
433 irq
->irq_idx
= -EINVAL
;
438 void dpu_encoder_get_hw_resources(struct drm_encoder
*drm_enc
,
439 struct dpu_encoder_hw_resources
*hw_res
)
441 struct dpu_encoder_virt
*dpu_enc
= NULL
;
444 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
445 DPU_DEBUG_ENC(dpu_enc
, "\n");
447 /* Query resources used by phys encs, expected to be without overlap */
448 memset(hw_res
, 0, sizeof(*hw_res
));
450 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
451 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
453 if (phys
&& phys
->ops
.get_hw_resources
)
454 phys
->ops
.get_hw_resources(phys
, hw_res
);
458 static void dpu_encoder_destroy(struct drm_encoder
*drm_enc
)
460 struct dpu_encoder_virt
*dpu_enc
= NULL
;
464 DPU_ERROR("invalid encoder\n");
468 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
469 DPU_DEBUG_ENC(dpu_enc
, "\n");
471 mutex_lock(&dpu_enc
->enc_lock
);
473 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
474 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
476 if (phys
&& phys
->ops
.destroy
) {
477 phys
->ops
.destroy(phys
);
478 --dpu_enc
->num_phys_encs
;
479 dpu_enc
->phys_encs
[i
] = NULL
;
483 if (dpu_enc
->num_phys_encs
)
484 DPU_ERROR_ENC(dpu_enc
, "expected 0 num_phys_encs not %d\n",
485 dpu_enc
->num_phys_encs
);
486 dpu_enc
->num_phys_encs
= 0;
487 mutex_unlock(&dpu_enc
->enc_lock
);
489 drm_encoder_cleanup(drm_enc
);
490 mutex_destroy(&dpu_enc
->enc_lock
);
493 void dpu_encoder_helper_split_config(
494 struct dpu_encoder_phys
*phys_enc
,
495 enum dpu_intf interface
)
497 struct dpu_encoder_virt
*dpu_enc
;
498 struct split_pipe_cfg cfg
= { 0 };
499 struct dpu_hw_mdp
*hw_mdptop
;
500 struct msm_display_info
*disp_info
;
502 if (!phys_enc
|| !phys_enc
->hw_mdptop
|| !phys_enc
->parent
) {
503 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc
!= 0);
507 dpu_enc
= to_dpu_encoder_virt(phys_enc
->parent
);
508 hw_mdptop
= phys_enc
->hw_mdptop
;
509 disp_info
= &dpu_enc
->disp_info
;
511 if (disp_info
->intf_type
!= DRM_MODE_ENCODER_DSI
)
515 * disable split modes since encoder will be operating in as the only
516 * encoder, either for the entire use case in the case of, for example,
517 * single DSI, or for this frame in the case of left/right only partial
520 if (phys_enc
->split_role
== ENC_ROLE_SOLO
) {
521 if (hw_mdptop
->ops
.setup_split_pipe
)
522 hw_mdptop
->ops
.setup_split_pipe(hw_mdptop
, &cfg
);
527 cfg
.mode
= phys_enc
->intf_mode
;
528 cfg
.intf
= interface
;
530 if (cfg
.en
&& phys_enc
->ops
.needs_single_flush
&&
531 phys_enc
->ops
.needs_single_flush(phys_enc
))
532 cfg
.split_flush_en
= true;
534 if (phys_enc
->split_role
== ENC_ROLE_MASTER
) {
535 DPU_DEBUG_ENC(dpu_enc
, "enable %d\n", cfg
.en
);
537 if (hw_mdptop
->ops
.setup_split_pipe
)
538 hw_mdptop
->ops
.setup_split_pipe(hw_mdptop
, &cfg
);
542 static void _dpu_encoder_adjust_mode(struct drm_connector
*connector
,
543 struct drm_display_mode
*adj_mode
)
545 struct drm_display_mode
*cur_mode
;
547 if (!connector
|| !adj_mode
)
550 list_for_each_entry(cur_mode
, &connector
->modes
, head
) {
551 if (cur_mode
->vdisplay
== adj_mode
->vdisplay
&&
552 cur_mode
->hdisplay
== adj_mode
->hdisplay
&&
553 cur_mode
->vrefresh
== adj_mode
->vrefresh
) {
554 adj_mode
->private = cur_mode
->private;
555 adj_mode
->private_flags
|= cur_mode
->private_flags
;
560 static struct msm_display_topology
dpu_encoder_get_topology(
561 struct dpu_encoder_virt
*dpu_enc
,
562 struct dpu_kms
*dpu_kms
,
563 struct drm_display_mode
*mode
)
565 struct msm_display_topology topology
;
566 int i
, intf_count
= 0;
568 for (i
= 0; i
< MAX_PHYS_ENCODERS_PER_VIRTUAL
; i
++)
569 if (dpu_enc
->phys_encs
[i
])
572 /* User split topology for width > 1080 */
573 topology
.num_lm
= (mode
->vdisplay
> MAX_VDISPLAY_SPLIT
) ? 2 : 1;
574 topology
.num_enc
= 0;
575 topology
.num_intf
= intf_count
;
579 static int dpu_encoder_virt_atomic_check(
580 struct drm_encoder
*drm_enc
,
581 struct drm_crtc_state
*crtc_state
,
582 struct drm_connector_state
*conn_state
)
584 struct dpu_encoder_virt
*dpu_enc
;
585 struct msm_drm_private
*priv
;
586 struct dpu_kms
*dpu_kms
;
587 const struct drm_display_mode
*mode
;
588 struct drm_display_mode
*adj_mode
;
589 struct msm_display_topology topology
;
593 if (!drm_enc
|| !crtc_state
|| !conn_state
) {
594 DPU_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
595 drm_enc
!= 0, crtc_state
!= 0, conn_state
!= 0);
599 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
600 DPU_DEBUG_ENC(dpu_enc
, "\n");
602 priv
= drm_enc
->dev
->dev_private
;
603 dpu_kms
= to_dpu_kms(priv
->kms
);
604 mode
= &crtc_state
->mode
;
605 adj_mode
= &crtc_state
->adjusted_mode
;
606 trace_dpu_enc_atomic_check(DRMID(drm_enc
));
609 * display drivers may populate private fields of the drm display mode
610 * structure while registering possible modes of a connector with DRM.
611 * These private fields are not populated back while DRM invokes
612 * the mode_set callbacks. This module retrieves and populates the
613 * private fields of the given mode.
615 _dpu_encoder_adjust_mode(conn_state
->connector
, adj_mode
);
617 /* perform atomic check on the first physical encoder (master) */
618 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
619 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
621 if (phys
&& phys
->ops
.atomic_check
)
622 ret
= phys
->ops
.atomic_check(phys
, crtc_state
,
624 else if (phys
&& phys
->ops
.mode_fixup
)
625 if (!phys
->ops
.mode_fixup(phys
, mode
, adj_mode
))
629 DPU_ERROR_ENC(dpu_enc
,
630 "mode unsupported, phys idx %d\n", i
);
635 topology
= dpu_encoder_get_topology(dpu_enc
, dpu_kms
, adj_mode
);
637 /* Reserve dynamic resources now. Indicating AtomicTest phase */
640 * Avoid reserving resources when mode set is pending. Topology
641 * info may not be available to complete reservation.
643 if (drm_atomic_crtc_needs_modeset(crtc_state
)
644 && dpu_enc
->mode_set_complete
) {
645 ret
= dpu_rm_reserve(&dpu_kms
->rm
, drm_enc
, crtc_state
,
647 dpu_enc
->mode_set_complete
= false;
652 drm_mode_set_crtcinfo(adj_mode
, 0);
654 trace_dpu_enc_atomic_check_flags(DRMID(drm_enc
), adj_mode
->flags
,
655 adj_mode
->private_flags
);
660 static void _dpu_encoder_update_vsync_source(struct dpu_encoder_virt
*dpu_enc
,
661 struct msm_display_info
*disp_info
)
663 struct dpu_vsync_source_cfg vsync_cfg
= { 0 };
664 struct msm_drm_private
*priv
;
665 struct dpu_kms
*dpu_kms
;
666 struct dpu_hw_mdp
*hw_mdptop
;
667 struct drm_encoder
*drm_enc
;
670 if (!dpu_enc
|| !disp_info
) {
671 DPU_ERROR("invalid param dpu_enc:%d or disp_info:%d\n",
672 dpu_enc
!= NULL
, disp_info
!= NULL
);
674 } else if (dpu_enc
->num_phys_encs
> ARRAY_SIZE(dpu_enc
->hw_pp
)) {
675 DPU_ERROR("invalid num phys enc %d/%d\n",
676 dpu_enc
->num_phys_encs
,
677 (int) ARRAY_SIZE(dpu_enc
->hw_pp
));
681 drm_enc
= &dpu_enc
->base
;
682 /* this pointers are checked in virt_enable_helper */
683 priv
= drm_enc
->dev
->dev_private
;
685 dpu_kms
= to_dpu_kms(priv
->kms
);
687 DPU_ERROR("invalid dpu_kms\n");
691 hw_mdptop
= dpu_kms
->hw_mdp
;
693 DPU_ERROR("invalid mdptop\n");
697 if (hw_mdptop
->ops
.setup_vsync_source
&&
698 disp_info
->capabilities
& MSM_DISPLAY_CAP_CMD_MODE
) {
699 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++)
700 vsync_cfg
.ppnumber
[i
] = dpu_enc
->hw_pp
[i
]->idx
;
702 vsync_cfg
.pp_count
= dpu_enc
->num_phys_encs
;
703 if (disp_info
->is_te_using_watchdog_timer
)
704 vsync_cfg
.vsync_source
= DPU_VSYNC_SOURCE_WD_TIMER_0
;
706 vsync_cfg
.vsync_source
= DPU_VSYNC0_SOURCE_GPIO
;
708 hw_mdptop
->ops
.setup_vsync_source(hw_mdptop
, &vsync_cfg
);
712 static void _dpu_encoder_irq_control(struct drm_encoder
*drm_enc
, bool enable
)
714 struct dpu_encoder_virt
*dpu_enc
;
718 DPU_ERROR("invalid encoder\n");
722 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
724 DPU_DEBUG_ENC(dpu_enc
, "enable:%d\n", enable
);
725 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
726 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
728 if (phys
&& phys
->ops
.irq_control
)
729 phys
->ops
.irq_control(phys
, enable
);
734 static void _dpu_encoder_resource_control_helper(struct drm_encoder
*drm_enc
,
737 struct msm_drm_private
*priv
;
738 struct dpu_kms
*dpu_kms
;
739 struct dpu_encoder_virt
*dpu_enc
;
741 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
742 priv
= drm_enc
->dev
->dev_private
;
743 dpu_kms
= to_dpu_kms(priv
->kms
);
745 trace_dpu_enc_rc_helper(DRMID(drm_enc
), enable
);
747 if (!dpu_enc
->cur_master
) {
748 DPU_ERROR("encoder master not set\n");
753 /* enable DPU core clks */
754 pm_runtime_get_sync(&dpu_kms
->pdev
->dev
);
756 /* enable all the irq */
757 _dpu_encoder_irq_control(drm_enc
, true);
760 /* disable all the irq */
761 _dpu_encoder_irq_control(drm_enc
, false);
763 /* disable DPU core clks */
764 pm_runtime_put_sync(&dpu_kms
->pdev
->dev
);
769 static int dpu_encoder_resource_control(struct drm_encoder
*drm_enc
,
772 struct dpu_encoder_virt
*dpu_enc
;
773 struct msm_drm_private
*priv
;
774 struct msm_drm_thread
*disp_thread
;
775 bool is_vid_mode
= false;
777 if (!drm_enc
|| !drm_enc
->dev
|| !drm_enc
->dev
->dev_private
||
779 DPU_ERROR("invalid parameters\n");
782 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
783 priv
= drm_enc
->dev
->dev_private
;
784 is_vid_mode
= dpu_enc
->disp_info
.capabilities
&
785 MSM_DISPLAY_CAP_VID_MODE
;
787 if (drm_enc
->crtc
->index
>= ARRAY_SIZE(priv
->disp_thread
)) {
788 DPU_ERROR("invalid crtc index\n");
791 disp_thread
= &priv
->disp_thread
[drm_enc
->crtc
->index
];
794 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
795 * events and return early for other events (ie wb display).
797 if (!dpu_enc
->idle_pc_supported
&&
798 (sw_event
!= DPU_ENC_RC_EVENT_KICKOFF
&&
799 sw_event
!= DPU_ENC_RC_EVENT_STOP
&&
800 sw_event
!= DPU_ENC_RC_EVENT_PRE_STOP
))
803 trace_dpu_enc_rc(DRMID(drm_enc
), sw_event
, dpu_enc
->idle_pc_supported
,
804 dpu_enc
->rc_state
, "begin");
807 case DPU_ENC_RC_EVENT_KICKOFF
:
808 /* cancel delayed off work, if any */
809 if (kthread_cancel_delayed_work_sync(
810 &dpu_enc
->delayed_off_work
))
811 DPU_DEBUG_ENC(dpu_enc
, "sw_event:%d, work cancelled\n",
814 mutex_lock(&dpu_enc
->rc_lock
);
816 /* return if the resource control is already in ON state */
817 if (dpu_enc
->rc_state
== DPU_ENC_RC_STATE_ON
) {
818 DRM_DEBUG_KMS("id;%u, sw_event:%d, rc in ON state\n",
819 DRMID(drm_enc
), sw_event
);
820 mutex_unlock(&dpu_enc
->rc_lock
);
822 } else if (dpu_enc
->rc_state
!= DPU_ENC_RC_STATE_OFF
&&
823 dpu_enc
->rc_state
!= DPU_ENC_RC_STATE_IDLE
) {
824 DRM_DEBUG_KMS("id;%u, sw_event:%d, rc in state %d\n",
825 DRMID(drm_enc
), sw_event
,
827 mutex_unlock(&dpu_enc
->rc_lock
);
831 if (is_vid_mode
&& dpu_enc
->rc_state
== DPU_ENC_RC_STATE_IDLE
)
832 _dpu_encoder_irq_control(drm_enc
, true);
834 _dpu_encoder_resource_control_helper(drm_enc
, true);
836 dpu_enc
->rc_state
= DPU_ENC_RC_STATE_ON
;
838 trace_dpu_enc_rc(DRMID(drm_enc
), sw_event
,
839 dpu_enc
->idle_pc_supported
, dpu_enc
->rc_state
,
842 mutex_unlock(&dpu_enc
->rc_lock
);
845 case DPU_ENC_RC_EVENT_FRAME_DONE
:
847 * mutex lock is not used as this event happens at interrupt
848 * context. And locking is not required as, the other events
849 * like KICKOFF and STOP does a wait-for-idle before executing
850 * the resource_control
852 if (dpu_enc
->rc_state
!= DPU_ENC_RC_STATE_ON
) {
853 DRM_DEBUG_KMS("id:%d, sw_event:%d,rc:%d-unexpected\n",
854 DRMID(drm_enc
), sw_event
,
860 * schedule off work item only when there are no
863 if (dpu_crtc_frame_pending(drm_enc
->crtc
) > 1) {
864 DRM_DEBUG_KMS("id:%d skip schedule work\n",
869 kthread_queue_delayed_work(
870 &disp_thread
->worker
,
871 &dpu_enc
->delayed_off_work
,
872 msecs_to_jiffies(dpu_enc
->idle_timeout
));
874 trace_dpu_enc_rc(DRMID(drm_enc
), sw_event
,
875 dpu_enc
->idle_pc_supported
, dpu_enc
->rc_state
,
879 case DPU_ENC_RC_EVENT_PRE_STOP
:
880 /* cancel delayed off work, if any */
881 if (kthread_cancel_delayed_work_sync(
882 &dpu_enc
->delayed_off_work
))
883 DPU_DEBUG_ENC(dpu_enc
, "sw_event:%d, work cancelled\n",
886 mutex_lock(&dpu_enc
->rc_lock
);
889 dpu_enc
->rc_state
== DPU_ENC_RC_STATE_IDLE
) {
890 _dpu_encoder_irq_control(drm_enc
, true);
892 /* skip if is already OFF or IDLE, resources are off already */
893 else if (dpu_enc
->rc_state
== DPU_ENC_RC_STATE_OFF
||
894 dpu_enc
->rc_state
== DPU_ENC_RC_STATE_IDLE
) {
895 DRM_DEBUG_KMS("id:%u, sw_event:%d, rc in %d state\n",
896 DRMID(drm_enc
), sw_event
,
898 mutex_unlock(&dpu_enc
->rc_lock
);
902 dpu_enc
->rc_state
= DPU_ENC_RC_STATE_PRE_OFF
;
904 trace_dpu_enc_rc(DRMID(drm_enc
), sw_event
,
905 dpu_enc
->idle_pc_supported
, dpu_enc
->rc_state
,
908 mutex_unlock(&dpu_enc
->rc_lock
);
911 case DPU_ENC_RC_EVENT_STOP
:
912 mutex_lock(&dpu_enc
->rc_lock
);
914 /* return if the resource control is already in OFF state */
915 if (dpu_enc
->rc_state
== DPU_ENC_RC_STATE_OFF
) {
916 DRM_DEBUG_KMS("id: %u, sw_event:%d, rc in OFF state\n",
917 DRMID(drm_enc
), sw_event
);
918 mutex_unlock(&dpu_enc
->rc_lock
);
920 } else if (dpu_enc
->rc_state
== DPU_ENC_RC_STATE_ON
) {
921 DRM_ERROR("id: %u, sw_event:%d, rc in state %d\n",
922 DRMID(drm_enc
), sw_event
, dpu_enc
->rc_state
);
923 mutex_unlock(&dpu_enc
->rc_lock
);
928 * expect to arrive here only if in either idle state or pre-off
929 * and in IDLE state the resources are already disabled
931 if (dpu_enc
->rc_state
== DPU_ENC_RC_STATE_PRE_OFF
)
932 _dpu_encoder_resource_control_helper(drm_enc
, false);
934 dpu_enc
->rc_state
= DPU_ENC_RC_STATE_OFF
;
936 trace_dpu_enc_rc(DRMID(drm_enc
), sw_event
,
937 dpu_enc
->idle_pc_supported
, dpu_enc
->rc_state
,
940 mutex_unlock(&dpu_enc
->rc_lock
);
943 case DPU_ENC_RC_EVENT_ENTER_IDLE
:
944 mutex_lock(&dpu_enc
->rc_lock
);
946 if (dpu_enc
->rc_state
!= DPU_ENC_RC_STATE_ON
) {
947 DRM_ERROR("id: %u, sw_event:%d, rc:%d !ON state\n",
948 DRMID(drm_enc
), sw_event
, dpu_enc
->rc_state
);
949 mutex_unlock(&dpu_enc
->rc_lock
);
954 * if we are in ON but a frame was just kicked off,
955 * ignore the IDLE event, it's probably a stale timer event
957 if (dpu_enc
->frame_busy_mask
[0]) {
958 DRM_ERROR("id:%u, sw_event:%d, rc:%d frame pending\n",
959 DRMID(drm_enc
), sw_event
, dpu_enc
->rc_state
);
960 mutex_unlock(&dpu_enc
->rc_lock
);
965 _dpu_encoder_irq_control(drm_enc
, false);
967 _dpu_encoder_resource_control_helper(drm_enc
, false);
969 dpu_enc
->rc_state
= DPU_ENC_RC_STATE_IDLE
;
971 trace_dpu_enc_rc(DRMID(drm_enc
), sw_event
,
972 dpu_enc
->idle_pc_supported
, dpu_enc
->rc_state
,
975 mutex_unlock(&dpu_enc
->rc_lock
);
979 DRM_ERROR("id:%u, unexpected sw_event: %d\n", DRMID(drm_enc
),
981 trace_dpu_enc_rc(DRMID(drm_enc
), sw_event
,
982 dpu_enc
->idle_pc_supported
, dpu_enc
->rc_state
,
987 trace_dpu_enc_rc(DRMID(drm_enc
), sw_event
,
988 dpu_enc
->idle_pc_supported
, dpu_enc
->rc_state
,
993 static void dpu_encoder_virt_mode_set(struct drm_encoder
*drm_enc
,
994 struct drm_display_mode
*mode
,
995 struct drm_display_mode
*adj_mode
)
997 struct dpu_encoder_virt
*dpu_enc
;
998 struct msm_drm_private
*priv
;
999 struct dpu_kms
*dpu_kms
;
1000 struct list_head
*connector_list
;
1001 struct drm_connector
*conn
= NULL
, *conn_iter
;
1002 struct dpu_rm_hw_iter pp_iter
, ctl_iter
;
1003 struct msm_display_topology topology
;
1004 struct dpu_hw_ctl
*hw_ctl
[MAX_CHANNELS_PER_ENC
] = { NULL
};
1008 DPU_ERROR("invalid encoder\n");
1012 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1013 DPU_DEBUG_ENC(dpu_enc
, "\n");
1015 priv
= drm_enc
->dev
->dev_private
;
1016 dpu_kms
= to_dpu_kms(priv
->kms
);
1017 connector_list
= &dpu_kms
->dev
->mode_config
.connector_list
;
1019 trace_dpu_enc_mode_set(DRMID(drm_enc
));
1021 list_for_each_entry(conn_iter
, connector_list
, head
)
1022 if (conn_iter
->encoder
== drm_enc
)
1026 DPU_ERROR_ENC(dpu_enc
, "failed to find attached connector\n");
1028 } else if (!conn
->state
) {
1029 DPU_ERROR_ENC(dpu_enc
, "invalid connector state\n");
1033 topology
= dpu_encoder_get_topology(dpu_enc
, dpu_kms
, adj_mode
);
1035 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
1036 ret
= dpu_rm_reserve(&dpu_kms
->rm
, drm_enc
, drm_enc
->crtc
->state
,
1039 DPU_ERROR_ENC(dpu_enc
,
1040 "failed to reserve hw resources, %d\n", ret
);
1044 dpu_rm_init_hw_iter(&pp_iter
, drm_enc
->base
.id
, DPU_HW_BLK_PINGPONG
);
1045 for (i
= 0; i
< MAX_CHANNELS_PER_ENC
; i
++) {
1046 dpu_enc
->hw_pp
[i
] = NULL
;
1047 if (!dpu_rm_get_hw(&dpu_kms
->rm
, &pp_iter
))
1049 dpu_enc
->hw_pp
[i
] = (struct dpu_hw_pingpong
*) pp_iter
.hw
;
1052 dpu_rm_init_hw_iter(&ctl_iter
, drm_enc
->base
.id
, DPU_HW_BLK_CTL
);
1053 for (i
= 0; i
< MAX_CHANNELS_PER_ENC
; i
++) {
1054 if (!dpu_rm_get_hw(&dpu_kms
->rm
, &ctl_iter
))
1056 hw_ctl
[i
] = (struct dpu_hw_ctl
*)ctl_iter
.hw
;
1059 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1060 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
1063 if (!dpu_enc
->hw_pp
[i
]) {
1064 DPU_ERROR_ENC(dpu_enc
, "no pp block assigned"
1070 DPU_ERROR_ENC(dpu_enc
, "no ctl block assigned"
1075 phys
->hw_pp
= dpu_enc
->hw_pp
[i
];
1076 phys
->hw_ctl
= hw_ctl
[i
];
1078 phys
->connector
= conn
->state
->connector
;
1079 if (phys
->ops
.mode_set
)
1080 phys
->ops
.mode_set(phys
, mode
, adj_mode
);
1084 dpu_enc
->mode_set_complete
= true;
1087 static void _dpu_encoder_virt_enable_helper(struct drm_encoder
*drm_enc
)
1089 struct dpu_encoder_virt
*dpu_enc
= NULL
;
1090 struct msm_drm_private
*priv
;
1091 struct dpu_kms
*dpu_kms
;
1093 if (!drm_enc
|| !drm_enc
->dev
|| !drm_enc
->dev
->dev_private
) {
1094 DPU_ERROR("invalid parameters\n");
1098 priv
= drm_enc
->dev
->dev_private
;
1099 dpu_kms
= to_dpu_kms(priv
->kms
);
1101 DPU_ERROR("invalid dpu_kms\n");
1105 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1106 if (!dpu_enc
|| !dpu_enc
->cur_master
) {
1107 DPU_ERROR("invalid dpu encoder/master\n");
1111 if (dpu_enc
->cur_master
->hw_mdptop
&&
1112 dpu_enc
->cur_master
->hw_mdptop
->ops
.reset_ubwc
)
1113 dpu_enc
->cur_master
->hw_mdptop
->ops
.reset_ubwc(
1114 dpu_enc
->cur_master
->hw_mdptop
,
1117 _dpu_encoder_update_vsync_source(dpu_enc
, &dpu_enc
->disp_info
);
1120 void dpu_encoder_virt_restore(struct drm_encoder
*drm_enc
)
1122 struct dpu_encoder_virt
*dpu_enc
= NULL
;
1126 DPU_ERROR("invalid encoder\n");
1129 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1131 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1132 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
1134 if (phys
&& (phys
!= dpu_enc
->cur_master
) && phys
->ops
.restore
)
1135 phys
->ops
.restore(phys
);
1138 if (dpu_enc
->cur_master
&& dpu_enc
->cur_master
->ops
.restore
)
1139 dpu_enc
->cur_master
->ops
.restore(dpu_enc
->cur_master
);
1141 _dpu_encoder_virt_enable_helper(drm_enc
);
1144 static void dpu_encoder_virt_enable(struct drm_encoder
*drm_enc
)
1146 struct dpu_encoder_virt
*dpu_enc
= NULL
;
1148 struct drm_display_mode
*cur_mode
= NULL
;
1151 DPU_ERROR("invalid encoder\n");
1154 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1155 cur_mode
= &dpu_enc
->base
.crtc
->state
->adjusted_mode
;
1157 trace_dpu_enc_enable(DRMID(drm_enc
), cur_mode
->hdisplay
,
1158 cur_mode
->vdisplay
);
1160 /* always enable slave encoder before master */
1161 if (dpu_enc
->cur_slave
&& dpu_enc
->cur_slave
->ops
.enable
)
1162 dpu_enc
->cur_slave
->ops
.enable(dpu_enc
->cur_slave
);
1164 if (dpu_enc
->cur_master
&& dpu_enc
->cur_master
->ops
.enable
)
1165 dpu_enc
->cur_master
->ops
.enable(dpu_enc
->cur_master
);
1167 ret
= dpu_encoder_resource_control(drm_enc
, DPU_ENC_RC_EVENT_KICKOFF
);
1169 DPU_ERROR_ENC(dpu_enc
, "dpu resource control failed: %d\n",
1174 _dpu_encoder_virt_enable_helper(drm_enc
);
1177 static void dpu_encoder_virt_disable(struct drm_encoder
*drm_enc
)
1179 struct dpu_encoder_virt
*dpu_enc
= NULL
;
1180 struct msm_drm_private
*priv
;
1181 struct dpu_kms
*dpu_kms
;
1182 struct drm_display_mode
*mode
;
1186 DPU_ERROR("invalid encoder\n");
1188 } else if (!drm_enc
->dev
) {
1189 DPU_ERROR("invalid dev\n");
1191 } else if (!drm_enc
->dev
->dev_private
) {
1192 DPU_ERROR("invalid dev_private\n");
1196 mode
= &drm_enc
->crtc
->state
->adjusted_mode
;
1198 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1199 DPU_DEBUG_ENC(dpu_enc
, "\n");
1201 priv
= drm_enc
->dev
->dev_private
;
1202 dpu_kms
= to_dpu_kms(priv
->kms
);
1204 trace_dpu_enc_disable(DRMID(drm_enc
));
1207 dpu_encoder_wait_for_event(drm_enc
, MSM_ENC_TX_COMPLETE
);
1209 dpu_encoder_resource_control(drm_enc
, DPU_ENC_RC_EVENT_PRE_STOP
);
1211 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1212 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
1214 if (phys
&& phys
->ops
.disable
)
1215 phys
->ops
.disable(phys
);
1218 /* after phys waits for frame-done, should be no more frames pending */
1219 if (atomic_xchg(&dpu_enc
->frame_done_timeout
, 0)) {
1220 DPU_ERROR("enc%d timeout pending\n", drm_enc
->base
.id
);
1221 del_timer_sync(&dpu_enc
->frame_done_timer
);
1224 dpu_encoder_resource_control(drm_enc
, DPU_ENC_RC_EVENT_STOP
);
1226 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1227 if (dpu_enc
->phys_encs
[i
])
1228 dpu_enc
->phys_encs
[i
]->connector
= NULL
;
1231 DPU_DEBUG_ENC(dpu_enc
, "encoder disabled\n");
1233 dpu_rm_release(&dpu_kms
->rm
, drm_enc
);
1236 static enum dpu_intf
dpu_encoder_get_intf(struct dpu_mdss_cfg
*catalog
,
1237 enum dpu_intf_type type
, u32 controller_id
)
1241 for (i
= 0; i
< catalog
->intf_count
; i
++) {
1242 if (catalog
->intf
[i
].type
== type
1243 && catalog
->intf
[i
].controller_id
== controller_id
) {
1244 return catalog
->intf
[i
].id
;
1251 static void dpu_encoder_vblank_callback(struct drm_encoder
*drm_enc
,
1252 struct dpu_encoder_phys
*phy_enc
)
1254 struct dpu_encoder_virt
*dpu_enc
= NULL
;
1255 unsigned long lock_flags
;
1257 if (!drm_enc
|| !phy_enc
)
1260 DPU_ATRACE_BEGIN("encoder_vblank_callback");
1261 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1263 spin_lock_irqsave(&dpu_enc
->enc_spinlock
, lock_flags
);
1264 if (dpu_enc
->crtc_vblank_cb
)
1265 dpu_enc
->crtc_vblank_cb(dpu_enc
->crtc_vblank_cb_data
);
1266 spin_unlock_irqrestore(&dpu_enc
->enc_spinlock
, lock_flags
);
1268 atomic_inc(&phy_enc
->vsync_cnt
);
1269 DPU_ATRACE_END("encoder_vblank_callback");
1272 static void dpu_encoder_underrun_callback(struct drm_encoder
*drm_enc
,
1273 struct dpu_encoder_phys
*phy_enc
)
1278 DPU_ATRACE_BEGIN("encoder_underrun_callback");
1279 atomic_inc(&phy_enc
->underrun_cnt
);
1280 trace_dpu_enc_underrun_cb(DRMID(drm_enc
),
1281 atomic_read(&phy_enc
->underrun_cnt
));
1282 DPU_ATRACE_END("encoder_underrun_callback");
1285 void dpu_encoder_register_vblank_callback(struct drm_encoder
*drm_enc
,
1286 void (*vbl_cb
)(void *), void *vbl_data
)
1288 struct dpu_encoder_virt
*dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1289 unsigned long lock_flags
;
1293 enable
= vbl_cb
? true : false;
1296 DPU_ERROR("invalid encoder\n");
1299 trace_dpu_enc_vblank_cb(DRMID(drm_enc
), enable
);
1301 spin_lock_irqsave(&dpu_enc
->enc_spinlock
, lock_flags
);
1302 dpu_enc
->crtc_vblank_cb
= vbl_cb
;
1303 dpu_enc
->crtc_vblank_cb_data
= vbl_data
;
1304 spin_unlock_irqrestore(&dpu_enc
->enc_spinlock
, lock_flags
);
1306 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1307 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
1309 if (phys
&& phys
->ops
.control_vblank_irq
)
1310 phys
->ops
.control_vblank_irq(phys
, enable
);
1314 void dpu_encoder_register_frame_event_callback(struct drm_encoder
*drm_enc
,
1315 void (*frame_event_cb
)(void *, u32 event
),
1316 void *frame_event_cb_data
)
1318 struct dpu_encoder_virt
*dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1319 unsigned long lock_flags
;
1322 enable
= frame_event_cb
? true : false;
1325 DPU_ERROR("invalid encoder\n");
1328 trace_dpu_enc_frame_event_cb(DRMID(drm_enc
), enable
);
1330 spin_lock_irqsave(&dpu_enc
->enc_spinlock
, lock_flags
);
1331 dpu_enc
->crtc_frame_event_cb
= frame_event_cb
;
1332 dpu_enc
->crtc_frame_event_cb_data
= frame_event_cb_data
;
1333 spin_unlock_irqrestore(&dpu_enc
->enc_spinlock
, lock_flags
);
1336 static void dpu_encoder_frame_done_callback(
1337 struct drm_encoder
*drm_enc
,
1338 struct dpu_encoder_phys
*ready_phys
, u32 event
)
1340 struct dpu_encoder_virt
*dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1343 if (event
& (DPU_ENCODER_FRAME_EVENT_DONE
1344 | DPU_ENCODER_FRAME_EVENT_ERROR
1345 | DPU_ENCODER_FRAME_EVENT_PANEL_DEAD
)) {
1347 if (!dpu_enc
->frame_busy_mask
[0]) {
1349 * suppress frame_done without waiter,
1350 * likely autorefresh
1352 trace_dpu_enc_frame_done_cb_not_busy(DRMID(drm_enc
),
1353 event
, ready_phys
->intf_idx
);
1357 /* One of the physical encoders has become idle */
1358 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1359 if (dpu_enc
->phys_encs
[i
] == ready_phys
) {
1360 trace_dpu_enc_frame_done_cb(DRMID(drm_enc
), i
,
1361 dpu_enc
->frame_busy_mask
[0]);
1362 clear_bit(i
, dpu_enc
->frame_busy_mask
);
1366 if (!dpu_enc
->frame_busy_mask
[0]) {
1367 atomic_set(&dpu_enc
->frame_done_timeout
, 0);
1368 del_timer(&dpu_enc
->frame_done_timer
);
1370 dpu_encoder_resource_control(drm_enc
,
1371 DPU_ENC_RC_EVENT_FRAME_DONE
);
1373 if (dpu_enc
->crtc_frame_event_cb
)
1374 dpu_enc
->crtc_frame_event_cb(
1375 dpu_enc
->crtc_frame_event_cb_data
,
1379 if (dpu_enc
->crtc_frame_event_cb
)
1380 dpu_enc
->crtc_frame_event_cb(
1381 dpu_enc
->crtc_frame_event_cb_data
, event
);
1385 static void dpu_encoder_off_work(struct kthread_work
*work
)
1387 struct dpu_encoder_virt
*dpu_enc
= container_of(work
,
1388 struct dpu_encoder_virt
, delayed_off_work
.work
);
1391 DPU_ERROR("invalid dpu encoder\n");
1395 dpu_encoder_resource_control(&dpu_enc
->base
,
1396 DPU_ENC_RC_EVENT_ENTER_IDLE
);
1398 dpu_encoder_frame_done_callback(&dpu_enc
->base
, NULL
,
1399 DPU_ENCODER_FRAME_EVENT_IDLE
);
1403 * _dpu_encoder_trigger_flush - trigger flush for a physical encoder
1404 * drm_enc: Pointer to drm encoder structure
1405 * phys: Pointer to physical encoder structure
1406 * extra_flush_bits: Additional bit mask to include in flush trigger
1408 static inline void _dpu_encoder_trigger_flush(struct drm_encoder
*drm_enc
,
1409 struct dpu_encoder_phys
*phys
, uint32_t extra_flush_bits
)
1411 struct dpu_hw_ctl
*ctl
;
1412 int pending_kickoff_cnt
;
1415 if (!drm_enc
|| !phys
) {
1416 DPU_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
1417 drm_enc
!= 0, phys
!= 0);
1422 DPU_ERROR("invalid pingpong hw\n");
1427 if (!ctl
|| !ctl
->ops
.trigger_flush
) {
1428 DPU_ERROR("missing trigger cb\n");
1432 pending_kickoff_cnt
= dpu_encoder_phys_inc_pending(phys
);
1434 if (extra_flush_bits
&& ctl
->ops
.update_pending_flush
)
1435 ctl
->ops
.update_pending_flush(ctl
, extra_flush_bits
);
1437 ctl
->ops
.trigger_flush(ctl
);
1439 if (ctl
->ops
.get_pending_flush
)
1440 ret
= ctl
->ops
.get_pending_flush(ctl
);
1442 trace_dpu_enc_trigger_flush(DRMID(drm_enc
), phys
->intf_idx
,
1443 pending_kickoff_cnt
, ctl
->idx
,
1444 extra_flush_bits
, ret
);
1448 * _dpu_encoder_trigger_start - trigger start for a physical encoder
1449 * phys: Pointer to physical encoder structure
1451 static inline void _dpu_encoder_trigger_start(struct dpu_encoder_phys
*phys
)
1454 DPU_ERROR("invalid argument(s)\n");
1459 DPU_ERROR("invalid pingpong hw\n");
1463 if (phys
->ops
.trigger_start
&& phys
->enable_state
!= DPU_ENC_DISABLED
)
1464 phys
->ops
.trigger_start(phys
);
1467 void dpu_encoder_helper_trigger_start(struct dpu_encoder_phys
*phys_enc
)
1469 struct dpu_hw_ctl
*ctl
;
1472 DPU_ERROR("invalid encoder\n");
1476 ctl
= phys_enc
->hw_ctl
;
1477 if (ctl
&& ctl
->ops
.trigger_start
) {
1478 ctl
->ops
.trigger_start(ctl
);
1479 trace_dpu_enc_trigger_start(DRMID(phys_enc
->parent
), ctl
->idx
);
1483 static int dpu_encoder_helper_wait_event_timeout(
1486 struct dpu_encoder_wait_info
*info
)
1489 s64 expected_time
= ktime_to_ms(ktime_get()) + info
->timeout_ms
;
1490 s64 jiffies
= msecs_to_jiffies(info
->timeout_ms
);
1494 rc
= wait_event_timeout(*(info
->wq
),
1495 atomic_read(info
->atomic_cnt
) == 0, jiffies
);
1496 time
= ktime_to_ms(ktime_get());
1498 trace_dpu_enc_wait_event_timeout(drm_id
, hw_id
, rc
, time
,
1500 atomic_read(info
->atomic_cnt
));
1501 /* If we timed out, counter is valid and time is less, wait again */
1502 } while (atomic_read(info
->atomic_cnt
) && (rc
== 0) &&
1503 (time
< expected_time
));
1508 void dpu_encoder_helper_hw_reset(struct dpu_encoder_phys
*phys_enc
)
1510 struct dpu_encoder_virt
*dpu_enc
;
1511 struct dpu_hw_ctl
*ctl
;
1515 DPU_ERROR("invalid encoder\n");
1518 dpu_enc
= to_dpu_encoder_virt(phys_enc
->parent
);
1519 ctl
= phys_enc
->hw_ctl
;
1521 if (!ctl
|| !ctl
->ops
.reset
)
1524 DRM_DEBUG_KMS("id:%u ctl %d reset\n", DRMID(phys_enc
->parent
),
1527 rc
= ctl
->ops
.reset(ctl
);
1529 DPU_ERROR_ENC(dpu_enc
, "ctl %d reset failure\n", ctl
->idx
);
1530 dpu_dbg_dump(false, __func__
, true, true);
1533 phys_enc
->enable_state
= DPU_ENC_ENABLED
;
1537 * _dpu_encoder_kickoff_phys - handle physical encoder kickoff
1538 * Iterate through the physical encoders and perform consolidated flush
1539 * and/or control start triggering as needed. This is done in the virtual
1540 * encoder rather than the individual physical ones in order to handle
1541 * use cases that require visibility into multiple physical encoders at
1543 * dpu_enc: Pointer to virtual encoder structure
1545 static void _dpu_encoder_kickoff_phys(struct dpu_encoder_virt
*dpu_enc
)
1547 struct dpu_hw_ctl
*ctl
;
1548 uint32_t i
, pending_flush
;
1549 unsigned long lock_flags
;
1552 DPU_ERROR("invalid encoder\n");
1556 pending_flush
= 0x0;
1558 /* update pending counts and trigger kickoff ctl flush atomically */
1559 spin_lock_irqsave(&dpu_enc
->enc_spinlock
, lock_flags
);
1561 /* don't perform flush/start operations for slave encoders */
1562 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1563 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
1565 if (!phys
|| phys
->enable_state
== DPU_ENC_DISABLED
)
1572 if (phys
->split_role
!= ENC_ROLE_SLAVE
)
1573 set_bit(i
, dpu_enc
->frame_busy_mask
);
1574 if (!phys
->ops
.needs_single_flush
||
1575 !phys
->ops
.needs_single_flush(phys
))
1576 _dpu_encoder_trigger_flush(&dpu_enc
->base
, phys
, 0x0);
1577 else if (ctl
->ops
.get_pending_flush
)
1578 pending_flush
|= ctl
->ops
.get_pending_flush(ctl
);
1581 /* for split flush, combine pending flush masks and send to master */
1582 if (pending_flush
&& dpu_enc
->cur_master
) {
1583 _dpu_encoder_trigger_flush(
1585 dpu_enc
->cur_master
,
1589 _dpu_encoder_trigger_start(dpu_enc
->cur_master
);
1591 spin_unlock_irqrestore(&dpu_enc
->enc_spinlock
, lock_flags
);
1594 void dpu_encoder_trigger_kickoff_pending(struct drm_encoder
*drm_enc
)
1596 struct dpu_encoder_virt
*dpu_enc
;
1597 struct dpu_encoder_phys
*phys
;
1599 struct dpu_hw_ctl
*ctl
;
1600 struct msm_display_info
*disp_info
;
1603 DPU_ERROR("invalid encoder\n");
1606 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1607 disp_info
= &dpu_enc
->disp_info
;
1609 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1610 phys
= dpu_enc
->phys_encs
[i
];
1612 if (phys
&& phys
->hw_ctl
) {
1614 if (ctl
->ops
.clear_pending_flush
)
1615 ctl
->ops
.clear_pending_flush(ctl
);
1617 /* update only for command mode primary ctl */
1618 if ((phys
== dpu_enc
->cur_master
) &&
1619 (disp_info
->capabilities
& MSM_DISPLAY_CAP_CMD_MODE
)
1620 && ctl
->ops
.trigger_pending
)
1621 ctl
->ops
.trigger_pending(ctl
);
1626 static u32
_dpu_encoder_calculate_linetime(struct dpu_encoder_virt
*dpu_enc
,
1627 struct drm_display_mode
*mode
)
1634 * For linetime calculation, only operate on master encoder.
1636 if (!dpu_enc
->cur_master
)
1639 if (!dpu_enc
->cur_master
->ops
.get_line_count
) {
1640 DPU_ERROR("get_line_count function not defined\n");
1644 pclk_rate
= mode
->clock
; /* pixel clock in kHz */
1645 if (pclk_rate
== 0) {
1646 DPU_ERROR("pclk is 0, cannot calculate line time\n");
1650 pclk_period
= DIV_ROUND_UP_ULL(1000000000ull, pclk_rate
);
1651 if (pclk_period
== 0) {
1652 DPU_ERROR("pclk period is 0\n");
1657 * Line time calculation based on Pixel clock and HTOTAL.
1658 * Final unit is in ns.
1660 line_time
= (pclk_period
* mode
->htotal
) / 1000;
1661 if (line_time
== 0) {
1662 DPU_ERROR("line time calculation is 0\n");
1666 DPU_DEBUG_ENC(dpu_enc
,
1667 "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
1668 pclk_rate
, pclk_period
, line_time
);
1673 static int _dpu_encoder_wakeup_time(struct drm_encoder
*drm_enc
,
1674 ktime_t
*wakeup_time
)
1676 struct drm_display_mode
*mode
;
1677 struct dpu_encoder_virt
*dpu_enc
;
1680 u32 vtotal
, time_to_vsync
;
1683 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1685 if (!drm_enc
->crtc
|| !drm_enc
->crtc
->state
) {
1686 DPU_ERROR("crtc/crtc state object is NULL\n");
1689 mode
= &drm_enc
->crtc
->state
->adjusted_mode
;
1691 line_time
= _dpu_encoder_calculate_linetime(dpu_enc
, mode
);
1695 cur_line
= dpu_enc
->cur_master
->ops
.get_line_count(dpu_enc
->cur_master
);
1697 vtotal
= mode
->vtotal
;
1698 if (cur_line
>= vtotal
)
1699 time_to_vsync
= line_time
* vtotal
;
1701 time_to_vsync
= line_time
* (vtotal
- cur_line
);
1703 if (time_to_vsync
== 0) {
1704 DPU_ERROR("time to vsync should not be zero, vtotal=%d\n",
1709 cur_time
= ktime_get();
1710 *wakeup_time
= ktime_add_ns(cur_time
, time_to_vsync
);
1712 DPU_DEBUG_ENC(dpu_enc
,
1713 "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
1714 cur_line
, vtotal
, time_to_vsync
,
1715 ktime_to_ms(cur_time
),
1716 ktime_to_ms(*wakeup_time
));
1720 static void dpu_encoder_vsync_event_handler(struct timer_list
*t
)
1722 struct dpu_encoder_virt
*dpu_enc
= from_timer(dpu_enc
, t
,
1724 struct drm_encoder
*drm_enc
= &dpu_enc
->base
;
1725 struct msm_drm_private
*priv
;
1726 struct msm_drm_thread
*event_thread
;
1728 if (!drm_enc
->dev
|| !drm_enc
->dev
->dev_private
||
1730 DPU_ERROR("invalid parameters\n");
1734 priv
= drm_enc
->dev
->dev_private
;
1736 if (drm_enc
->crtc
->index
>= ARRAY_SIZE(priv
->event_thread
)) {
1737 DPU_ERROR("invalid crtc index\n");
1740 event_thread
= &priv
->event_thread
[drm_enc
->crtc
->index
];
1741 if (!event_thread
) {
1742 DPU_ERROR("event_thread not found for crtc:%d\n",
1743 drm_enc
->crtc
->index
);
1747 del_timer(&dpu_enc
->vsync_event_timer
);
1750 static void dpu_encoder_vsync_event_work_handler(struct kthread_work
*work
)
1752 struct dpu_encoder_virt
*dpu_enc
= container_of(work
,
1753 struct dpu_encoder_virt
, vsync_event_work
);
1754 ktime_t wakeup_time
;
1757 DPU_ERROR("invalid dpu encoder\n");
1761 if (_dpu_encoder_wakeup_time(&dpu_enc
->base
, &wakeup_time
))
1764 trace_dpu_enc_vsync_event_work(DRMID(&dpu_enc
->base
), wakeup_time
);
1765 mod_timer(&dpu_enc
->vsync_event_timer
,
1766 nsecs_to_jiffies(ktime_to_ns(wakeup_time
)));
1769 void dpu_encoder_prepare_for_kickoff(struct drm_encoder
*drm_enc
,
1770 struct dpu_encoder_kickoff_params
*params
)
1772 struct dpu_encoder_virt
*dpu_enc
;
1773 struct dpu_encoder_phys
*phys
;
1774 bool needs_hw_reset
= false;
1777 if (!drm_enc
|| !params
) {
1778 DPU_ERROR("invalid args\n");
1781 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1783 trace_dpu_enc_prepare_kickoff(DRMID(drm_enc
));
1785 /* prepare for next kickoff, may include waiting on previous kickoff */
1786 DPU_ATRACE_BEGIN("enc_prepare_for_kickoff");
1787 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1788 phys
= dpu_enc
->phys_encs
[i
];
1790 if (phys
->ops
.prepare_for_kickoff
)
1791 phys
->ops
.prepare_for_kickoff(phys
, params
);
1792 if (phys
->enable_state
== DPU_ENC_ERR_NEEDS_HW_RESET
)
1793 needs_hw_reset
= true;
1796 DPU_ATRACE_END("enc_prepare_for_kickoff");
1798 dpu_encoder_resource_control(drm_enc
, DPU_ENC_RC_EVENT_KICKOFF
);
1800 /* if any phys needs reset, reset all phys, in-order */
1801 if (needs_hw_reset
) {
1802 trace_dpu_enc_prepare_kickoff_reset(DRMID(drm_enc
));
1803 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1804 phys
= dpu_enc
->phys_encs
[i
];
1805 if (phys
&& phys
->ops
.hw_reset
)
1806 phys
->ops
.hw_reset(phys
);
1811 void dpu_encoder_kickoff(struct drm_encoder
*drm_enc
)
1813 struct dpu_encoder_virt
*dpu_enc
;
1814 struct dpu_encoder_phys
*phys
;
1815 ktime_t wakeup_time
;
1819 DPU_ERROR("invalid encoder\n");
1822 DPU_ATRACE_BEGIN("encoder_kickoff");
1823 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1825 trace_dpu_enc_kickoff(DRMID(drm_enc
));
1827 atomic_set(&dpu_enc
->frame_done_timeout
,
1828 DPU_FRAME_DONE_TIMEOUT
* 1000 /
1829 drm_enc
->crtc
->state
->adjusted_mode
.vrefresh
);
1830 mod_timer(&dpu_enc
->frame_done_timer
, jiffies
+
1831 ((atomic_read(&dpu_enc
->frame_done_timeout
) * HZ
) / 1000));
1833 /* All phys encs are ready to go, trigger the kickoff */
1834 _dpu_encoder_kickoff_phys(dpu_enc
);
1836 /* allow phys encs to handle any post-kickoff business */
1837 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1838 phys
= dpu_enc
->phys_encs
[i
];
1839 if (phys
&& phys
->ops
.handle_post_kickoff
)
1840 phys
->ops
.handle_post_kickoff(phys
);
1843 if (dpu_enc
->disp_info
.intf_type
== DRM_MODE_ENCODER_DSI
&&
1844 !_dpu_encoder_wakeup_time(drm_enc
, &wakeup_time
)) {
1845 trace_dpu_enc_early_kickoff(DRMID(drm_enc
),
1846 ktime_to_ms(wakeup_time
));
1847 mod_timer(&dpu_enc
->vsync_event_timer
,
1848 nsecs_to_jiffies(ktime_to_ns(wakeup_time
)));
1851 DPU_ATRACE_END("encoder_kickoff");
1854 void dpu_encoder_prepare_commit(struct drm_encoder
*drm_enc
)
1856 struct dpu_encoder_virt
*dpu_enc
;
1857 struct dpu_encoder_phys
*phys
;
1861 DPU_ERROR("invalid encoder\n");
1864 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1866 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1867 phys
= dpu_enc
->phys_encs
[i
];
1868 if (phys
&& phys
->ops
.prepare_commit
)
1869 phys
->ops
.prepare_commit(phys
);
1873 #ifdef CONFIG_DEBUG_FS
1874 static int _dpu_encoder_status_show(struct seq_file
*s
, void *data
)
1876 struct dpu_encoder_virt
*dpu_enc
;
1879 if (!s
|| !s
->private)
1882 dpu_enc
= s
->private;
1884 mutex_lock(&dpu_enc
->enc_lock
);
1885 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
1886 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
1891 seq_printf(s
, "intf:%d vsync:%8d underrun:%8d ",
1892 phys
->intf_idx
- INTF_0
,
1893 atomic_read(&phys
->vsync_cnt
),
1894 atomic_read(&phys
->underrun_cnt
));
1896 switch (phys
->intf_mode
) {
1897 case INTF_MODE_VIDEO
:
1898 seq_puts(s
, "mode: video\n");
1901 seq_puts(s
, "mode: command\n");
1904 seq_puts(s
, "mode: ???\n");
1908 mutex_unlock(&dpu_enc
->enc_lock
);
1913 static int _dpu_encoder_debugfs_status_open(struct inode
*inode
,
1916 return single_open(file
, _dpu_encoder_status_show
, inode
->i_private
);
1919 static int _dpu_encoder_init_debugfs(struct drm_encoder
*drm_enc
)
1921 struct dpu_encoder_virt
*dpu_enc
;
1922 struct msm_drm_private
*priv
;
1923 struct dpu_kms
*dpu_kms
;
1926 static const struct file_operations debugfs_status_fops
= {
1927 .open
= _dpu_encoder_debugfs_status_open
,
1929 .llseek
= seq_lseek
,
1930 .release
= single_release
,
1933 char name
[DPU_NAME_SIZE
];
1935 if (!drm_enc
|| !drm_enc
->dev
|| !drm_enc
->dev
->dev_private
) {
1936 DPU_ERROR("invalid encoder or kms\n");
1940 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1941 priv
= drm_enc
->dev
->dev_private
;
1942 dpu_kms
= to_dpu_kms(priv
->kms
);
1944 snprintf(name
, DPU_NAME_SIZE
, "encoder%u", drm_enc
->base
.id
);
1946 /* create overall sub-directory for the encoder */
1947 dpu_enc
->debugfs_root
= debugfs_create_dir(name
,
1948 drm_enc
->dev
->primary
->debugfs_root
);
1949 if (!dpu_enc
->debugfs_root
)
1952 /* don't error check these */
1953 debugfs_create_file("status", 0600,
1954 dpu_enc
->debugfs_root
, dpu_enc
, &debugfs_status_fops
);
1956 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++)
1957 if (dpu_enc
->phys_encs
[i
] &&
1958 dpu_enc
->phys_encs
[i
]->ops
.late_register
)
1959 dpu_enc
->phys_encs
[i
]->ops
.late_register(
1960 dpu_enc
->phys_encs
[i
],
1961 dpu_enc
->debugfs_root
);
1966 static void _dpu_encoder_destroy_debugfs(struct drm_encoder
*drm_enc
)
1968 struct dpu_encoder_virt
*dpu_enc
;
1973 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
1974 debugfs_remove_recursive(dpu_enc
->debugfs_root
);
1977 static int _dpu_encoder_init_debugfs(struct drm_encoder
*drm_enc
)
1982 static void _dpu_encoder_destroy_debugfs(struct drm_encoder
*drm_enc
)
1987 static int dpu_encoder_late_register(struct drm_encoder
*encoder
)
1989 return _dpu_encoder_init_debugfs(encoder
);
1992 static void dpu_encoder_early_unregister(struct drm_encoder
*encoder
)
1994 _dpu_encoder_destroy_debugfs(encoder
);
1997 static int dpu_encoder_virt_add_phys_encs(
1999 struct dpu_encoder_virt
*dpu_enc
,
2000 struct dpu_enc_phys_init_params
*params
)
2002 struct dpu_encoder_phys
*enc
= NULL
;
2004 DPU_DEBUG_ENC(dpu_enc
, "\n");
2007 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
2008 * in this function, check up-front.
2010 if (dpu_enc
->num_phys_encs
+ NUM_PHYS_ENCODER_TYPES
>=
2011 ARRAY_SIZE(dpu_enc
->phys_encs
)) {
2012 DPU_ERROR_ENC(dpu_enc
, "too many physical encoders %d\n",
2013 dpu_enc
->num_phys_encs
);
2017 if (display_caps
& MSM_DISPLAY_CAP_VID_MODE
) {
2018 enc
= dpu_encoder_phys_vid_init(params
);
2020 if (IS_ERR_OR_NULL(enc
)) {
2021 DPU_ERROR_ENC(dpu_enc
, "failed to init vid enc: %ld\n",
2023 return enc
== 0 ? -EINVAL
: PTR_ERR(enc
);
2026 dpu_enc
->phys_encs
[dpu_enc
->num_phys_encs
] = enc
;
2027 ++dpu_enc
->num_phys_encs
;
2030 if (display_caps
& MSM_DISPLAY_CAP_CMD_MODE
) {
2031 enc
= dpu_encoder_phys_cmd_init(params
);
2033 if (IS_ERR_OR_NULL(enc
)) {
2034 DPU_ERROR_ENC(dpu_enc
, "failed to init cmd enc: %ld\n",
2036 return enc
== 0 ? -EINVAL
: PTR_ERR(enc
);
2039 dpu_enc
->phys_encs
[dpu_enc
->num_phys_encs
] = enc
;
2040 ++dpu_enc
->num_phys_encs
;
2043 if (params
->split_role
== ENC_ROLE_SLAVE
)
2044 dpu_enc
->cur_slave
= enc
;
2046 dpu_enc
->cur_master
= enc
;
2051 static const struct dpu_encoder_virt_ops dpu_encoder_parent_ops
= {
2052 .handle_vblank_virt
= dpu_encoder_vblank_callback
,
2053 .handle_underrun_virt
= dpu_encoder_underrun_callback
,
2054 .handle_frame_done
= dpu_encoder_frame_done_callback
,
2057 static int dpu_encoder_setup_display(struct dpu_encoder_virt
*dpu_enc
,
2058 struct dpu_kms
*dpu_kms
,
2059 struct msm_display_info
*disp_info
)
2063 enum dpu_intf_type intf_type
;
2064 struct dpu_enc_phys_init_params phys_params
;
2066 if (!dpu_enc
|| !dpu_kms
) {
2067 DPU_ERROR("invalid arg(s), enc %d kms %d\n",
2068 dpu_enc
!= 0, dpu_kms
!= 0);
2072 dpu_enc
->cur_master
= NULL
;
2074 memset(&phys_params
, 0, sizeof(phys_params
));
2075 phys_params
.dpu_kms
= dpu_kms
;
2076 phys_params
.parent
= &dpu_enc
->base
;
2077 phys_params
.parent_ops
= &dpu_encoder_parent_ops
;
2078 phys_params
.enc_spinlock
= &dpu_enc
->enc_spinlock
;
2082 switch (disp_info
->intf_type
) {
2083 case DRM_MODE_ENCODER_DSI
:
2084 intf_type
= INTF_DSI
;
2087 DPU_ERROR_ENC(dpu_enc
, "unsupported display interface type\n");
2091 WARN_ON(disp_info
->num_of_h_tiles
< 1);
2093 DPU_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info
->num_of_h_tiles
);
2095 if ((disp_info
->capabilities
& MSM_DISPLAY_CAP_CMD_MODE
) ||
2096 (disp_info
->capabilities
& MSM_DISPLAY_CAP_VID_MODE
))
2097 dpu_enc
->idle_pc_supported
=
2098 dpu_kms
->catalog
->caps
->has_idle_pc
;
2100 mutex_lock(&dpu_enc
->enc_lock
);
2101 for (i
= 0; i
< disp_info
->num_of_h_tiles
&& !ret
; i
++) {
2103 * Left-most tile is at index 0, content is controller id
2104 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
2105 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
2107 u32 controller_id
= disp_info
->h_tile_instance
[i
];
2109 if (disp_info
->num_of_h_tiles
> 1) {
2111 phys_params
.split_role
= ENC_ROLE_MASTER
;
2113 phys_params
.split_role
= ENC_ROLE_SLAVE
;
2115 phys_params
.split_role
= ENC_ROLE_SOLO
;
2118 DPU_DEBUG("h_tile_instance %d = %d, split_role %d\n",
2119 i
, controller_id
, phys_params
.split_role
);
2121 phys_params
.intf_idx
= dpu_encoder_get_intf(dpu_kms
->catalog
,
2124 if (phys_params
.intf_idx
== INTF_MAX
) {
2125 DPU_ERROR_ENC(dpu_enc
, "could not get intf: type %d, id %d\n",
2126 intf_type
, controller_id
);
2131 ret
= dpu_encoder_virt_add_phys_encs(disp_info
->capabilities
,
2135 DPU_ERROR_ENC(dpu_enc
, "failed to add phys encs\n");
2139 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
2140 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
2143 atomic_set(&phys
->vsync_cnt
, 0);
2144 atomic_set(&phys
->underrun_cnt
, 0);
2147 mutex_unlock(&dpu_enc
->enc_lock
);
2152 static void dpu_encoder_frame_done_timeout(struct timer_list
*t
)
2154 struct dpu_encoder_virt
*dpu_enc
= from_timer(dpu_enc
, t
,
2156 struct drm_encoder
*drm_enc
= &dpu_enc
->base
;
2157 struct msm_drm_private
*priv
;
2160 if (!drm_enc
->dev
|| !drm_enc
->dev
->dev_private
) {
2161 DPU_ERROR("invalid parameters\n");
2164 priv
= drm_enc
->dev
->dev_private
;
2166 if (!dpu_enc
->frame_busy_mask
[0] || !dpu_enc
->crtc_frame_event_cb
) {
2167 DRM_DEBUG_KMS("id:%u invalid timeout frame_busy_mask=%lu\n",
2168 DRMID(drm_enc
), dpu_enc
->frame_busy_mask
[0]);
2170 } else if (!atomic_xchg(&dpu_enc
->frame_done_timeout
, 0)) {
2171 DRM_DEBUG_KMS("id:%u invalid timeout\n", DRMID(drm_enc
));
2175 DPU_ERROR_ENC(dpu_enc
, "frame done timeout\n");
2177 event
= DPU_ENCODER_FRAME_EVENT_ERROR
;
2178 trace_dpu_enc_frame_done_timeout(DRMID(drm_enc
), event
);
2179 dpu_enc
->crtc_frame_event_cb(dpu_enc
->crtc_frame_event_cb_data
, event
);
2182 static const struct drm_encoder_helper_funcs dpu_encoder_helper_funcs
= {
2183 .mode_set
= dpu_encoder_virt_mode_set
,
2184 .disable
= dpu_encoder_virt_disable
,
2185 .enable
= dpu_kms_encoder_enable
,
2186 .atomic_check
= dpu_encoder_virt_atomic_check
,
2188 /* This is called by dpu_kms_encoder_enable */
2189 .commit
= dpu_encoder_virt_enable
,
2192 static const struct drm_encoder_funcs dpu_encoder_funcs
= {
2193 .destroy
= dpu_encoder_destroy
,
2194 .late_register
= dpu_encoder_late_register
,
2195 .early_unregister
= dpu_encoder_early_unregister
,
2198 int dpu_encoder_setup(struct drm_device
*dev
, struct drm_encoder
*enc
,
2199 struct msm_display_info
*disp_info
)
2201 struct msm_drm_private
*priv
= dev
->dev_private
;
2202 struct dpu_kms
*dpu_kms
= to_dpu_kms(priv
->kms
);
2203 struct drm_encoder
*drm_enc
= NULL
;
2204 struct dpu_encoder_virt
*dpu_enc
= NULL
;
2207 dpu_enc
= to_dpu_encoder_virt(enc
);
2209 mutex_init(&dpu_enc
->enc_lock
);
2210 ret
= dpu_encoder_setup_display(dpu_enc
, dpu_kms
, disp_info
);
2214 spin_lock_init(&dpu_enc
->enc_spinlock
);
2216 atomic_set(&dpu_enc
->frame_done_timeout
, 0);
2217 timer_setup(&dpu_enc
->frame_done_timer
,
2218 dpu_encoder_frame_done_timeout
, 0);
2220 if (disp_info
->intf_type
== DRM_MODE_ENCODER_DSI
)
2221 timer_setup(&dpu_enc
->vsync_event_timer
,
2222 dpu_encoder_vsync_event_handler
,
2226 mutex_init(&dpu_enc
->rc_lock
);
2227 kthread_init_delayed_work(&dpu_enc
->delayed_off_work
,
2228 dpu_encoder_off_work
);
2229 dpu_enc
->idle_timeout
= IDLE_TIMEOUT
;
2231 kthread_init_work(&dpu_enc
->vsync_event_work
,
2232 dpu_encoder_vsync_event_work_handler
);
2234 memcpy(&dpu_enc
->disp_info
, disp_info
, sizeof(*disp_info
));
2236 DPU_DEBUG_ENC(dpu_enc
, "created\n");
2241 DPU_ERROR("failed to create encoder\n");
2243 dpu_encoder_destroy(drm_enc
);
2250 struct drm_encoder
*dpu_encoder_init(struct drm_device
*dev
,
2253 struct dpu_encoder_virt
*dpu_enc
= NULL
;
2256 dpu_enc
= devm_kzalloc(dev
->dev
, sizeof(*dpu_enc
), GFP_KERNEL
);
2258 return ERR_PTR(ENOMEM
);
2260 rc
= drm_encoder_init(dev
, &dpu_enc
->base
, &dpu_encoder_funcs
,
2261 drm_enc_mode
, NULL
);
2263 devm_kfree(dev
->dev
, dpu_enc
);
2267 drm_encoder_helper_add(&dpu_enc
->base
, &dpu_encoder_helper_funcs
);
2269 return &dpu_enc
->base
;
2272 int dpu_encoder_wait_for_event(struct drm_encoder
*drm_enc
,
2273 enum msm_event_wait event
)
2275 int (*fn_wait
)(struct dpu_encoder_phys
*phys_enc
) = NULL
;
2276 struct dpu_encoder_virt
*dpu_enc
= NULL
;
2280 DPU_ERROR("invalid encoder\n");
2283 dpu_enc
= to_dpu_encoder_virt(drm_enc
);
2284 DPU_DEBUG_ENC(dpu_enc
, "\n");
2286 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
2287 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
2292 case MSM_ENC_COMMIT_DONE
:
2293 fn_wait
= phys
->ops
.wait_for_commit_done
;
2295 case MSM_ENC_TX_COMPLETE
:
2296 fn_wait
= phys
->ops
.wait_for_tx_complete
;
2298 case MSM_ENC_VBLANK
:
2299 fn_wait
= phys
->ops
.wait_for_vblank
;
2302 DPU_ERROR_ENC(dpu_enc
, "unknown wait event %d\n",
2308 DPU_ATRACE_BEGIN("wait_for_completion_event");
2309 ret
= fn_wait(phys
);
2310 DPU_ATRACE_END("wait_for_completion_event");
2319 enum dpu_intf_mode
dpu_encoder_get_intf_mode(struct drm_encoder
*encoder
)
2321 struct dpu_encoder_virt
*dpu_enc
= NULL
;
2325 DPU_ERROR("invalid encoder\n");
2326 return INTF_MODE_NONE
;
2328 dpu_enc
= to_dpu_encoder_virt(encoder
);
2330 if (dpu_enc
->cur_master
)
2331 return dpu_enc
->cur_master
->intf_mode
;
2333 for (i
= 0; i
< dpu_enc
->num_phys_encs
; i
++) {
2334 struct dpu_encoder_phys
*phys
= dpu_enc
->phys_encs
[i
];
2337 return phys
->intf_mode
;
2340 return INTF_MODE_NONE
;