summaryrefslogtreecommitdiff
path: root/drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c')
-rw-r--r--drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c51
1 files changed, 27 insertions, 24 deletions
diff --git a/drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c b/drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c
index e86421c69bd1..373ae7d9bf01 100644
--- a/drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c
+++ b/drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c
@@ -13,6 +13,7 @@
#include <drm/drm_crtc.h>
#include <drm/drm_flip_work.h>
#include <drm/drm_fourcc.h>
+#include <drm/drm_managed.h>
#include <drm/drm_probe_helper.h>
#include <drm/drm_vblank.h>
@@ -168,18 +169,15 @@ static void unref_cursor_worker(struct drm_flip_work *work, void *val)
struct mdp5_kms *mdp5_kms = get_kms(&mdp5_crtc->base);
struct msm_kms *kms = &mdp5_kms->base.base;
- msm_gem_unpin_iova(val, kms->aspace);
+ msm_gem_unpin_iova(val, kms->vm);
drm_gem_object_put(val);
}
-static void mdp5_crtc_destroy(struct drm_crtc *crtc)
+static void mdp5_crtc_flip_cleanup(struct drm_device *dev, void *ptr)
{
- struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
+ struct mdp5_crtc *mdp5_crtc = ptr;
- drm_crtc_cleanup(crtc);
drm_flip_work_cleanup(&mdp5_crtc->unref_cursor_work);
-
- kfree(mdp5_crtc);
}
static inline u32 mdp5_lm_use_fg_alpha_mask(enum mdp_mixer_stage_id stage)
@@ -218,7 +216,7 @@ static void blend_setup(struct drm_crtc *crtc)
struct mdp5_kms *mdp5_kms = get_kms(crtc);
struct drm_plane *plane;
struct mdp5_plane_state *pstate, *pstates[STAGE_MAX + 1] = {NULL};
- const struct mdp_format *format;
+ const struct msm_format *format;
struct mdp5_hw_mixer *mixer = pipeline->mixer;
uint32_t lm = mixer->lm;
struct mdp5_hw_mixer *r_mixer = pipeline->r_mixer;
@@ -276,7 +274,7 @@ static void blend_setup(struct drm_crtc *crtc)
ctl_blend_flags |= MDP5_CTL_BLEND_OP_FLAG_BORDER_OUT;
DBG("Border Color is enabled");
} else if (plane_cnt) {
- format = to_mdp_format(msm_framebuffer_format(pstates[STAGE_BASE]->base.fb));
+ format = msm_framebuffer_format(pstates[STAGE_BASE]->base.fb);
if (format->alpha_enable)
bg_alpha_enabled = true;
@@ -287,8 +285,7 @@ static void blend_setup(struct drm_crtc *crtc)
if (!pstates[i])
continue;
- format = to_mdp_format(
- msm_framebuffer_format(pstates[i]->base.fb));
+ format = msm_framebuffer_format(pstates[i]->base.fb);
plane = pstates[i]->base.plane;
blend_op = MDP5_LM_BLEND_OP_MODE_FG_ALPHA(FG_CONST) |
MDP5_LM_BLEND_OP_MODE_BG_ALPHA(BG_CONST);
@@ -996,7 +993,7 @@ static int mdp5_crtc_cursor_set(struct drm_crtc *crtc,
if (!cursor_bo)
return -ENOENT;
- ret = msm_gem_get_and_pin_iova(cursor_bo, kms->aspace,
+ ret = msm_gem_get_and_pin_iova(cursor_bo, kms->vm,
&mdp5_crtc->cursor.iova);
if (ret) {
drm_gem_object_put(cursor_bo);
@@ -1139,12 +1136,14 @@ static void mdp5_crtc_reset(struct drm_crtc *crtc)
if (crtc->state)
mdp5_crtc_destroy_state(crtc, crtc->state);
- __drm_atomic_helper_crtc_reset(crtc, &mdp5_cstate->base);
+ if (mdp5_cstate)
+ __drm_atomic_helper_crtc_reset(crtc, &mdp5_cstate->base);
+ else
+ __drm_atomic_helper_crtc_reset(crtc, NULL);
}
static const struct drm_crtc_funcs mdp5_crtc_no_lm_cursor_funcs = {
.set_config = drm_atomic_helper_set_config,
- .destroy = mdp5_crtc_destroy,
.page_flip = drm_atomic_helper_page_flip,
.reset = mdp5_crtc_reset,
.atomic_duplicate_state = mdp5_crtc_duplicate_state,
@@ -1158,7 +1157,6 @@ static const struct drm_crtc_funcs mdp5_crtc_no_lm_cursor_funcs = {
static const struct drm_crtc_funcs mdp5_crtc_funcs = {
.set_config = drm_atomic_helper_set_config,
- .destroy = mdp5_crtc_destroy,
.page_flip = drm_atomic_helper_page_flip,
.reset = mdp5_crtc_reset,
.atomic_duplicate_state = mdp5_crtc_duplicate_state,
@@ -1198,7 +1196,7 @@ static void mdp5_crtc_vblank_irq(struct mdp_irq *irq, uint32_t irqstatus)
}
if (pending & PENDING_CURSOR)
- drm_flip_work_commit(&mdp5_crtc->unref_cursor_work, priv->wq);
+ drm_flip_work_commit(&mdp5_crtc->unref_cursor_work, priv->kms->wq);
}
static void mdp5_crtc_err_irq(struct mdp_irq *irq, uint32_t irqstatus)
@@ -1236,6 +1234,7 @@ static void mdp5_crtc_wait_for_flush_done(struct drm_crtc *crtc)
struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
struct mdp5_ctl *ctl = mdp5_cstate->ctl;
+ wait_queue_head_t *queue = drm_crtc_vblank_waitqueue(crtc);
int ret;
/* Should not call this function if crtc is disabled. */
@@ -1246,7 +1245,7 @@ static void mdp5_crtc_wait_for_flush_done(struct drm_crtc *crtc)
if (ret)
return;
- ret = wait_event_timeout(dev->vblank[drm_crtc_index(crtc)].queue,
+ ret = wait_event_timeout(*queue,
((mdp5_ctl_get_commit_status(ctl) &
mdp5_crtc->flushed_mask) == 0),
msecs_to_jiffies(50));
@@ -1324,10 +1323,16 @@ struct drm_crtc *mdp5_crtc_init(struct drm_device *dev,
{
struct drm_crtc *crtc = NULL;
struct mdp5_crtc *mdp5_crtc;
+ int ret;
- mdp5_crtc = kzalloc(sizeof(*mdp5_crtc), GFP_KERNEL);
- if (!mdp5_crtc)
- return ERR_PTR(-ENOMEM);
+ mdp5_crtc = drmm_crtc_alloc_with_planes(dev, struct mdp5_crtc, base,
+ plane, cursor_plane,
+ cursor_plane ?
+ &mdp5_crtc_no_lm_cursor_funcs :
+ &mdp5_crtc_funcs,
+ NULL);
+ if (IS_ERR(mdp5_crtc))
+ return ERR_CAST(mdp5_crtc);
crtc = &mdp5_crtc->base;
@@ -1343,13 +1348,11 @@ struct drm_crtc *mdp5_crtc_init(struct drm_device *dev,
mdp5_crtc->lm_cursor_enabled = cursor_plane ? false : true;
- drm_crtc_init_with_planes(dev, crtc, plane, cursor_plane,
- cursor_plane ?
- &mdp5_crtc_no_lm_cursor_funcs :
- &mdp5_crtc_funcs, NULL);
-
drm_flip_work_init(&mdp5_crtc->unref_cursor_work,
"unref cursor", unref_cursor_worker);
+ ret = drmm_add_action_or_reset(dev, mdp5_crtc_flip_cleanup, mdp5_crtc);
+ if (ret)
+ return ERR_PTR(ret);
drm_crtc_helper_add(crtc, &mdp5_crtc_helper_funcs);