Lines Matching defs:dcrtc
82 armada_drm_crtc_update_regs(struct armada_crtc *dcrtc, struct armada_regs *regs)
85 void __iomem *reg = dcrtc->base + regs->offset;
96 static void armada_drm_crtc_update(struct armada_crtc *dcrtc, bool enable)
100 dumb_ctrl = dcrtc->cfg_dumb_ctrl;
118 dcrtc->base + LCD_SPU_DUMB_CTRL);
123 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
130 dcrtc->event = event;
175 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
187 if (!dcrtc->variant->has_spu_adv_reg &&
202 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
220 ret = dcrtc->variant->compute_clock(dcrtc, adj, NULL);
228 static void armada_drm_crtc_disable_irq(struct armada_crtc *dcrtc, u32 mask)
230 if (dcrtc->irq_ena & mask) {
231 dcrtc->irq_ena &= ~mask;
232 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
236 static void armada_drm_crtc_enable_irq(struct armada_crtc *dcrtc, u32 mask)
238 if ((dcrtc->irq_ena & mask) != mask) {
239 dcrtc->irq_ena |= mask;
240 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
241 if (readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR) & mask)
242 writel(0, dcrtc->base + LCD_SPU_IRQ_ISR);
246 static void armada_drm_crtc_irq(struct armada_crtc *dcrtc, u32 stat)
249 void __iomem *base = dcrtc->base;
252 DRM_ERROR("video underflow on crtc %u\n", dcrtc->num);
254 DRM_ERROR("graphics underflow on crtc %u\n", dcrtc->num);
257 drm_crtc_handle_vblank(&dcrtc->crtc);
259 spin_lock(&dcrtc->irq_lock);
260 if (stat & GRA_FRAME_IRQ && dcrtc->interlaced) {
264 writel_relaxed(dcrtc->v[i].spu_v_porch, base + LCD_SPU_V_PORCH);
265 writel_relaxed(dcrtc->v[i].spu_v_h_total,
270 val |= dcrtc->v[i].spu_adv_reg;
274 if (stat & dcrtc->irq_ena & DUMB_FRAMEDONE) {
275 if (dcrtc->update_pending) {
276 armada_drm_crtc_update_regs(dcrtc, dcrtc->regs);
277 dcrtc->update_pending = false;
279 if (dcrtc->cursor_update) {
280 writel_relaxed(dcrtc->cursor_hw_pos,
282 writel_relaxed(dcrtc->cursor_hw_sz,
288 dcrtc->cursor_update = false;
290 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
292 spin_unlock(&dcrtc->irq_lock);
294 if (stat & VSYNC_IRQ && !dcrtc->update_pending) {
295 event = xchg(&dcrtc->event, NULL);
297 spin_lock(&dcrtc->crtc.dev->event_lock);
298 drm_crtc_send_vblank_event(&dcrtc->crtc, event);
299 spin_unlock(&dcrtc->crtc.dev->event_lock);
300 drm_crtc_vblank_put(&dcrtc->crtc);
307 struct armada_crtc *dcrtc = arg;
308 u32 v, stat = readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
315 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
317 trace_armada_drm_irq(&dcrtc->crtc, stat);
320 v = stat & dcrtc->irq_ena;
323 armada_drm_crtc_irq(dcrtc, stat);
333 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
351 dcrtc->variant->compute_clock(dcrtc, adj, &sclk);
355 spin_lock_irqsave(&dcrtc->irq_lock, flags);
357 dcrtc->interlaced = interlaced;
359 dcrtc->v[1].spu_v_h_total = adj->crtc_vtotal << 16 |
361 dcrtc->v[1].spu_v_porch = tm << 16 | bm;
363 dcrtc->v[1].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN;
368 dcrtc->v[0].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN;
369 dcrtc->v[0].spu_v_h_total = dcrtc->v[1].spu_v_h_total +
371 dcrtc->v[0].spu_v_porch = dcrtc->v[1].spu_v_porch + 1;
373 dcrtc->v[0] = dcrtc->v[1];
380 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_porch, LCD_SPU_V_PORCH);
381 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_h_total,
384 if (dcrtc->variant->has_spu_adv_reg)
385 armada_reg_queue_mod(regs, i, dcrtc->v[0].spu_adv_reg,
411 armada_drm_crtc_update_regs(dcrtc, regs);
412 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
432 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
439 dcrtc->regs_idx = 0;
440 dcrtc->regs = dcrtc->atomic_regs;
446 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
450 armada_reg_queue_end(dcrtc->regs, dcrtc->regs_idx);
457 dcrtc->update_pending = true;
459 spin_lock_irq(&dcrtc->irq_lock);
460 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
461 spin_unlock_irq(&dcrtc->irq_lock);
463 spin_lock_irq(&dcrtc->irq_lock);
464 armada_drm_crtc_update_regs(dcrtc, dcrtc->regs);
465 spin_unlock_irq(&dcrtc->irq_lock);
472 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
481 armada_drm_crtc_update(dcrtc, false);
488 if (dcrtc->variant->disable)
489 dcrtc->variant->disable(dcrtc);
508 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
518 if (dcrtc->variant->enable)
519 dcrtc->variant->enable(dcrtc, &crtc->state->adjusted_mode);
521 armada_drm_crtc_update(dcrtc, true);
592 static int armada_drm_crtc_cursor_update(struct armada_crtc *dcrtc, bool reload)
594 uint32_t xoff, xscr, w = dcrtc->cursor_w, s;
595 uint32_t yoff, yscr, h = dcrtc->cursor_h;
602 if (dcrtc->cursor_x < 0) {
603 xoff = -dcrtc->cursor_x;
606 } else if (dcrtc->cursor_x + w > dcrtc->crtc.mode.hdisplay) {
608 xscr = dcrtc->cursor_x;
609 w = max_t(int, dcrtc->crtc.mode.hdisplay - dcrtc->cursor_x, 0);
612 xscr = dcrtc->cursor_x;
615 if (dcrtc->cursor_y < 0) {
616 yoff = -dcrtc->cursor_y;
619 } else if (dcrtc->cursor_y + h > dcrtc->crtc.mode.vdisplay) {
621 yscr = dcrtc->cursor_y;
622 h = max_t(int, dcrtc->crtc.mode.vdisplay - dcrtc->cursor_y, 0);
625 yscr = dcrtc->cursor_y;
629 s = dcrtc->cursor_w;
630 if (dcrtc->interlaced) {
636 if (!dcrtc->cursor_obj || !h || !w) {
637 spin_lock_irq(&dcrtc->irq_lock);
638 dcrtc->cursor_update = false;
639 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
640 spin_unlock_irq(&dcrtc->irq_lock);
644 spin_lock_irq(&dcrtc->irq_lock);
645 para1 = readl_relaxed(dcrtc->base + LCD_SPU_SRAM_PARA1);
647 dcrtc->base + LCD_SPU_SRAM_PARA1);
648 spin_unlock_irq(&dcrtc->irq_lock);
655 armada_drm_crtc_cursor_tran(dcrtc->base);
659 if (dcrtc->cursor_hw_sz != (h << 16 | w)) {
660 spin_lock_irq(&dcrtc->irq_lock);
661 dcrtc->cursor_update = false;
662 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
663 spin_unlock_irq(&dcrtc->irq_lock);
667 struct armada_gem_object *obj = dcrtc->cursor_obj;
672 armada_load_cursor_argb(dcrtc->base, pix, s, w, h);
676 spin_lock_irq(&dcrtc->irq_lock);
677 dcrtc->cursor_hw_pos = yscr << 16 | xscr;
678 dcrtc->cursor_hw_sz = h << 16 | w;
679 dcrtc->cursor_update = true;
680 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
681 spin_unlock_irq(&dcrtc->irq_lock);
694 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
699 if (!dcrtc->variant->has_spu_adv_reg)
724 if (dcrtc->cursor_obj) {
725 dcrtc->cursor_obj->update = NULL;
726 dcrtc->cursor_obj->update_data = NULL;
727 drm_gem_object_put(&dcrtc->cursor_obj->obj);
729 dcrtc->cursor_obj = obj;
730 dcrtc->cursor_w = w;
731 dcrtc->cursor_h = h;
732 ret = armada_drm_crtc_cursor_update(dcrtc, true);
734 obj->update_data = dcrtc;
743 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
747 if (!dcrtc->variant->has_spu_adv_reg)
750 dcrtc->cursor_x = x;
751 dcrtc->cursor_y = y;
752 ret = armada_drm_crtc_cursor_update(dcrtc, false);
759 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
762 if (dcrtc->cursor_obj)
763 drm_gem_object_put(&dcrtc->cursor_obj->obj);
765 priv->dcrtc[dcrtc->num] = NULL;
766 drm_crtc_cleanup(&dcrtc->crtc);
768 if (dcrtc->variant->disable)
769 dcrtc->variant->disable(dcrtc);
771 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ENA);
773 of_node_put(dcrtc->crtc.port);
775 kfree(dcrtc);
789 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
792 spin_lock_irqsave(&dcrtc->irq_lock, flags);
793 armada_drm_crtc_enable_irq(dcrtc, VSYNC_IRQ_ENA);
794 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
800 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
803 spin_lock_irqsave(&dcrtc->irq_lock, flags);
804 armada_drm_crtc_disable_irq(dcrtc, VSYNC_IRQ_ENA);
805 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
823 int armada_crtc_select_clock(struct armada_crtc *dcrtc,
839 dcrtc->crtc.base.id, dcrtc->crtc.name, desired_hz);
870 dcrtc->crtc.base.id, dcrtc->crtc.name,
890 dcrtc->crtc.base.id, dcrtc->crtc.name,
905 struct armada_crtc *dcrtc;
914 dcrtc = kzalloc(sizeof(*dcrtc), GFP_KERNEL);
915 if (!dcrtc) {
921 dev_set_drvdata(dev, dcrtc);
923 dcrtc->variant = variant;
924 dcrtc->base = base;
925 dcrtc->num = drm->mode_config.num_crtc;
926 dcrtc->cfg_dumb_ctrl = DUMB24_RGB888_0;
927 dcrtc->spu_iopad_ctrl = CFG_VSCALE_LN_EN | CFG_IOPAD_DUMB24;
928 spin_lock_init(&dcrtc->irq_lock);
929 dcrtc->irq_ena = CLEAN_SPU_IRQ_ISR;
932 writel_relaxed(0x00000001, dcrtc->base + LCD_CFG_SCLK_DIV);
933 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_BLANKCOLOR);
934 writel_relaxed(dcrtc->spu_iopad_ctrl,
935 dcrtc->base + LCD_SPU_IOPAD_CONTROL);
936 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_SRAM_PARA0);
939 CFG_PDWN64x66, dcrtc->base + LCD_SPU_SRAM_PARA1);
940 writel_relaxed(0x2032ff81, dcrtc->base + LCD_SPU_DMA_CTRL1);
941 writel_relaxed(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
942 readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
943 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
946 dcrtc);
950 if (dcrtc->variant->init) {
951 ret = dcrtc->variant->init(dcrtc, dev);
957 armada_updatel(CFG_ARBFAST_ENA, 0, dcrtc->base + LCD_SPU_DMA_CTRL0);
959 priv->dcrtc[dcrtc->num] = dcrtc;
961 dcrtc->crtc.port = port;
975 ret = drm_crtc_init_with_planes(drm, &dcrtc->crtc, primary, NULL,
980 drm_crtc_helper_add(&dcrtc->crtc, &armada_crtc_helper_funcs);
982 ret = drm_mode_crtc_set_gamma_size(&dcrtc->crtc, 256);
986 drm_crtc_enable_color_mgmt(&dcrtc->crtc, 0, false, 256);
988 return armada_overlay_plane_create(drm, 1 << dcrtc->num);
993 kfree(dcrtc);
1046 struct armada_crtc *dcrtc = dev_get_drvdata(dev);
1048 armada_drm_crtc_destroy(&dcrtc->crtc);