Lines Matching defs:dcrtc
81 armada_drm_crtc_update_regs(struct armada_crtc *dcrtc, struct armada_regs *regs)
84 void __iomem *reg = dcrtc->base + regs->offset;
95 static void armada_drm_crtc_update(struct armada_crtc *dcrtc, bool enable)
99 dumb_ctrl = dcrtc->cfg_dumb_ctrl;
117 dcrtc->base + LCD_SPU_DUMB_CTRL);
122 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
129 dcrtc->event = event;
174 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
186 if (!dcrtc->variant->has_spu_adv_reg &&
201 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
219 ret = dcrtc->variant->compute_clock(dcrtc, adj, NULL);
227 static void armada_drm_crtc_disable_irq(struct armada_crtc *dcrtc, u32 mask)
229 if (dcrtc->irq_ena & mask) {
230 dcrtc->irq_ena &= ~mask;
231 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
235 static void armada_drm_crtc_enable_irq(struct armada_crtc *dcrtc, u32 mask)
237 if ((dcrtc->irq_ena & mask) != mask) {
238 dcrtc->irq_ena |= mask;
239 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
240 if (readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR) & mask)
241 writel(0, dcrtc->base + LCD_SPU_IRQ_ISR);
245 static void armada_drm_crtc_irq(struct armada_crtc *dcrtc, u32 stat)
248 void __iomem *base = dcrtc->base;
251 DRM_ERROR("video underflow on crtc %u\n", dcrtc->num);
253 DRM_ERROR("graphics underflow on crtc %u\n", dcrtc->num);
256 drm_crtc_handle_vblank(&dcrtc->crtc);
258 spin_lock(&dcrtc->irq_lock);
259 if (stat & GRA_FRAME_IRQ && dcrtc->interlaced) {
263 writel_relaxed(dcrtc->v[i].spu_v_porch, base + LCD_SPU_V_PORCH);
264 writel_relaxed(dcrtc->v[i].spu_v_h_total,
269 val |= dcrtc->v[i].spu_adv_reg;
273 if (stat & dcrtc->irq_ena & DUMB_FRAMEDONE) {
274 if (dcrtc->update_pending) {
275 armada_drm_crtc_update_regs(dcrtc, dcrtc->regs);
276 dcrtc->update_pending = false;
278 if (dcrtc->cursor_update) {
279 writel_relaxed(dcrtc->cursor_hw_pos,
281 writel_relaxed(dcrtc->cursor_hw_sz,
287 dcrtc->cursor_update = false;
289 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
291 spin_unlock(&dcrtc->irq_lock);
293 if (stat & VSYNC_IRQ && !dcrtc->update_pending) {
294 event = xchg(&dcrtc->event, NULL);
296 spin_lock(&dcrtc->crtc.dev->event_lock);
297 drm_crtc_send_vblank_event(&dcrtc->crtc, event);
298 spin_unlock(&dcrtc->crtc.dev->event_lock);
299 drm_crtc_vblank_put(&dcrtc->crtc);
306 struct armada_crtc *dcrtc = arg;
307 u32 v, stat = readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
314 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
316 trace_armada_drm_irq(&dcrtc->crtc, stat);
319 v = stat & dcrtc->irq_ena;
322 armada_drm_crtc_irq(dcrtc, stat);
332 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
350 dcrtc->variant->compute_clock(dcrtc, adj, &sclk);
354 spin_lock_irqsave(&dcrtc->irq_lock, flags);
356 dcrtc->interlaced = interlaced;
358 dcrtc->v[1].spu_v_h_total = adj->crtc_vtotal << 16 |
360 dcrtc->v[1].spu_v_porch = tm << 16 | bm;
362 dcrtc->v[1].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN;
367 dcrtc->v[0].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN;
368 dcrtc->v[0].spu_v_h_total = dcrtc->v[1].spu_v_h_total +
370 dcrtc->v[0].spu_v_porch = dcrtc->v[1].spu_v_porch + 1;
372 dcrtc->v[0] = dcrtc->v[1];
379 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_porch, LCD_SPU_V_PORCH);
380 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_h_total,
383 if (dcrtc->variant->has_spu_adv_reg)
384 armada_reg_queue_mod(regs, i, dcrtc->v[0].spu_adv_reg,
410 armada_drm_crtc_update_regs(dcrtc, regs);
411 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
435 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
442 dcrtc->regs_idx = 0;
443 dcrtc->regs = dcrtc->atomic_regs;
451 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
455 armada_reg_queue_end(dcrtc->regs, dcrtc->regs_idx);
462 dcrtc->update_pending = true;
464 spin_lock_irq(&dcrtc->irq_lock);
465 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
466 spin_unlock_irq(&dcrtc->irq_lock);
468 spin_lock_irq(&dcrtc->irq_lock);
469 armada_drm_crtc_update_regs(dcrtc, dcrtc->regs);
470 spin_unlock_irq(&dcrtc->irq_lock);
479 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
488 armada_drm_crtc_update(dcrtc, false);
495 if (dcrtc->variant->disable)
496 dcrtc->variant->disable(dcrtc);
517 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
527 if (dcrtc->variant->enable)
528 dcrtc->variant->enable(dcrtc, &crtc->state->adjusted_mode);
530 armada_drm_crtc_update(dcrtc, true);
601 static int armada_drm_crtc_cursor_update(struct armada_crtc *dcrtc, bool reload)
603 uint32_t xoff, xscr, w = dcrtc->cursor_w, s;
604 uint32_t yoff, yscr, h = dcrtc->cursor_h;
611 if (dcrtc->cursor_x < 0) {
612 xoff = -dcrtc->cursor_x;
615 } else if (dcrtc->cursor_x + w > dcrtc->crtc.mode.hdisplay) {
617 xscr = dcrtc->cursor_x;
618 w = max_t(int, dcrtc->crtc.mode.hdisplay - dcrtc->cursor_x, 0);
621 xscr = dcrtc->cursor_x;
624 if (dcrtc->cursor_y < 0) {
625 yoff = -dcrtc->cursor_y;
628 } else if (dcrtc->cursor_y + h > dcrtc->crtc.mode.vdisplay) {
630 yscr = dcrtc->cursor_y;
631 h = max_t(int, dcrtc->crtc.mode.vdisplay - dcrtc->cursor_y, 0);
634 yscr = dcrtc->cursor_y;
638 s = dcrtc->cursor_w;
639 if (dcrtc->interlaced) {
645 if (!dcrtc->cursor_obj || !h || !w) {
646 spin_lock_irq(&dcrtc->irq_lock);
647 dcrtc->cursor_update = false;
648 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
649 spin_unlock_irq(&dcrtc->irq_lock);
653 spin_lock_irq(&dcrtc->irq_lock);
654 para1 = readl_relaxed(dcrtc->base + LCD_SPU_SRAM_PARA1);
656 dcrtc->base + LCD_SPU_SRAM_PARA1);
657 spin_unlock_irq(&dcrtc->irq_lock);
664 armada_drm_crtc_cursor_tran(dcrtc->base);
668 if (dcrtc->cursor_hw_sz != (h << 16 | w)) {
669 spin_lock_irq(&dcrtc->irq_lock);
670 dcrtc->cursor_update = false;
671 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
672 spin_unlock_irq(&dcrtc->irq_lock);
676 struct armada_gem_object *obj = dcrtc->cursor_obj;
681 armada_load_cursor_argb(dcrtc->base, pix, s, w, h);
685 spin_lock_irq(&dcrtc->irq_lock);
686 dcrtc->cursor_hw_pos = yscr << 16 | xscr;
687 dcrtc->cursor_hw_sz = h << 16 | w;
688 dcrtc->cursor_update = true;
689 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
690 spin_unlock_irq(&dcrtc->irq_lock);
703 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
708 if (!dcrtc->variant->has_spu_adv_reg)
733 if (dcrtc->cursor_obj) {
734 dcrtc->cursor_obj->update = NULL;
735 dcrtc->cursor_obj->update_data = NULL;
736 drm_gem_object_put(&dcrtc->cursor_obj->obj);
738 dcrtc->cursor_obj = obj;
739 dcrtc->cursor_w = w;
740 dcrtc->cursor_h = h;
741 ret = armada_drm_crtc_cursor_update(dcrtc, true);
743 obj->update_data = dcrtc;
752 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
756 if (!dcrtc->variant->has_spu_adv_reg)
759 dcrtc->cursor_x = x;
760 dcrtc->cursor_y = y;
761 ret = armada_drm_crtc_cursor_update(dcrtc, false);
768 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
771 if (dcrtc->cursor_obj)
772 drm_gem_object_put(&dcrtc->cursor_obj->obj);
774 priv->dcrtc[dcrtc->num] = NULL;
775 drm_crtc_cleanup(&dcrtc->crtc);
777 if (dcrtc->variant->disable)
778 dcrtc->variant->disable(dcrtc);
780 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ENA);
782 of_node_put(dcrtc->crtc.port);
784 kfree(dcrtc);
798 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
801 spin_lock_irqsave(&dcrtc->irq_lock, flags);
802 armada_drm_crtc_enable_irq(dcrtc, VSYNC_IRQ_ENA);
803 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
809 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
812 spin_lock_irqsave(&dcrtc->irq_lock, flags);
813 armada_drm_crtc_disable_irq(dcrtc, VSYNC_IRQ_ENA);
814 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
831 int armada_crtc_select_clock(struct armada_crtc *dcrtc,
847 dcrtc->crtc.base.id, dcrtc->crtc.name, desired_hz);
878 dcrtc->crtc.base.id, dcrtc->crtc.name,
898 dcrtc->crtc.base.id, dcrtc->crtc.name,
913 struct armada_crtc *dcrtc;
922 dcrtc = kzalloc(sizeof(*dcrtc), GFP_KERNEL);
923 if (!dcrtc) {
929 dev_set_drvdata(dev, dcrtc);
931 dcrtc->variant = variant;
932 dcrtc->base = base;
933 dcrtc->num = drm->mode_config.num_crtc;
934 dcrtc->cfg_dumb_ctrl = DUMB24_RGB888_0;
935 dcrtc->spu_iopad_ctrl = CFG_VSCALE_LN_EN | CFG_IOPAD_DUMB24;
936 spin_lock_init(&dcrtc->irq_lock);
937 dcrtc->irq_ena = CLEAN_SPU_IRQ_ISR;
940 writel_relaxed(0x00000001, dcrtc->base + LCD_CFG_SCLK_DIV);
941 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_BLANKCOLOR);
942 writel_relaxed(dcrtc->spu_iopad_ctrl,
943 dcrtc->base + LCD_SPU_IOPAD_CONTROL);
944 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_SRAM_PARA0);
947 CFG_PDWN64x66, dcrtc->base + LCD_SPU_SRAM_PARA1);
948 writel_relaxed(0x2032ff81, dcrtc->base + LCD_SPU_DMA_CTRL1);
949 writel_relaxed(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
950 readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
951 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
954 dcrtc);
958 if (dcrtc->variant->init) {
959 ret = dcrtc->variant->init(dcrtc, dev);
965 armada_updatel(CFG_ARBFAST_ENA, 0, dcrtc->base + LCD_SPU_DMA_CTRL0);
967 priv->dcrtc[dcrtc->num] = dcrtc;
969 dcrtc->crtc.port = port;
983 ret = drm_crtc_init_with_planes(drm, &dcrtc->crtc, primary, NULL,
988 drm_crtc_helper_add(&dcrtc->crtc, &armada_crtc_helper_funcs);
990 ret = drm_mode_crtc_set_gamma_size(&dcrtc->crtc, 256);
994 drm_crtc_enable_color_mgmt(&dcrtc->crtc, 0, false, 256);
996 return armada_overlay_plane_create(drm, 1 << dcrtc->num);
1001 kfree(dcrtc);
1054 struct armada_crtc *dcrtc = dev_get_drvdata(dev);
1056 armada_drm_crtc_destroy(&dcrtc->crtc);