blob: 523e0e8c6962bc1d228f0f4ce8117b2ad2373a3f [file] [log] [blame]
Russell King96f60e32012-08-15 13:59:49 +01001/*
2 * Copyright (C) 2012 Russell King
3 * Rewritten from the dovefb driver, and Armada510 manuals.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
8 */
9#include <linux/clk.h>
Russell Kingd8c96082014-04-22 11:10:15 +010010#include <linux/component.h>
11#include <linux/of_device.h>
12#include <linux/platform_device.h>
Russell King96f60e32012-08-15 13:59:49 +010013#include <drm/drmP.h>
14#include <drm/drm_crtc_helper.h>
Daniel Vetter3cb9ae42014-10-29 10:03:57 +010015#include <drm/drm_plane_helper.h>
Dave Airliebcd21a42018-01-05 09:43:46 +100016#include <drm/drm_atomic_helper.h>
Russell King96f60e32012-08-15 13:59:49 +010017#include "armada_crtc.h"
18#include "armada_drm.h"
19#include "armada_fb.h"
20#include "armada_gem.h"
21#include "armada_hw.h"
Russell Kingc8a220c2016-05-17 13:51:08 +010022#include "armada_trace.h"
Russell King96f60e32012-08-15 13:59:49 +010023
Russell King96f60e32012-08-15 13:59:49 +010024enum csc_mode {
25 CSC_AUTO = 0,
26 CSC_YUV_CCIR601 = 1,
27 CSC_YUV_CCIR709 = 2,
28 CSC_RGB_COMPUTER = 1,
29 CSC_RGB_STUDIO = 2,
30};
31
Russell King1c914ce2015-07-15 18:11:24 +010032static const uint32_t armada_primary_formats[] = {
33 DRM_FORMAT_UYVY,
34 DRM_FORMAT_YUYV,
35 DRM_FORMAT_VYUY,
36 DRM_FORMAT_YVYU,
37 DRM_FORMAT_ARGB8888,
38 DRM_FORMAT_ABGR8888,
39 DRM_FORMAT_XRGB8888,
40 DRM_FORMAT_XBGR8888,
41 DRM_FORMAT_RGB888,
42 DRM_FORMAT_BGR888,
43 DRM_FORMAT_ARGB1555,
44 DRM_FORMAT_ABGR1555,
45 DRM_FORMAT_RGB565,
46 DRM_FORMAT_BGR565,
47};
48
Russell King96f60e32012-08-15 13:59:49 +010049/*
50 * A note about interlacing. Let's consider HDMI 1920x1080i.
51 * The timing parameters we have from X are:
52 * Hact HsyA HsyI Htot Vact VsyA VsyI Vtot
53 * 1920 2448 2492 2640 1080 1084 1094 1125
54 * Which get translated to:
55 * Hact HsyA HsyI Htot Vact VsyA VsyI Vtot
56 * 1920 2448 2492 2640 540 542 547 562
57 *
58 * This is how it is defined by CEA-861-D - line and pixel numbers are
59 * referenced to the rising edge of VSYNC and HSYNC. Total clocks per
60 * line: 2640. The odd frame, the first active line is at line 21, and
61 * the even frame, the first active line is 584.
62 *
63 * LN: 560 561 562 563 567 568 569
64 * DE: ~~~|____________________________//__________________________
65 * HSYNC: ____|~|_____|~|_____|~|_____|~|_//__|~|_____|~|_____|~|_____
66 * VSYNC: _________________________|~~~~~~//~~~~~~~~~~~~~~~|__________
67 * 22 blanking lines. VSYNC at 1320 (referenced to the HSYNC rising edge).
68 *
69 * LN: 1123 1124 1125 1 5 6 7
70 * DE: ~~~|____________________________//__________________________
71 * HSYNC: ____|~|_____|~|_____|~|_____|~|_//__|~|_____|~|_____|~|_____
72 * VSYNC: ____________________|~~~~~~~~~~~//~~~~~~~~~~|_______________
73 * 23 blanking lines
74 *
75 * The Armada LCD Controller line and pixel numbers are, like X timings,
76 * referenced to the top left of the active frame.
77 *
78 * So, translating these to our LCD controller:
79 * Odd frame, 563 total lines, VSYNC at line 543-548, pixel 1128.
80 * Even frame, 562 total lines, VSYNC at line 542-547, pixel 2448.
81 * Note: Vsync front porch remains constant!
82 *
83 * if (odd_frame) {
84 * vtotal = mode->crtc_vtotal + 1;
85 * vbackporch = mode->crtc_vsync_start - mode->crtc_vdisplay + 1;
86 * vhorizpos = mode->crtc_hsync_start - mode->crtc_htotal / 2
87 * } else {
88 * vtotal = mode->crtc_vtotal;
89 * vbackporch = mode->crtc_vsync_start - mode->crtc_vdisplay;
90 * vhorizpos = mode->crtc_hsync_start;
91 * }
92 * vfrontporch = mode->crtc_vtotal - mode->crtc_vsync_end;
93 *
94 * So, we need to reprogram these registers on each vsync event:
95 * LCD_SPU_V_PORCH, LCD_SPU_ADV_REG, LCD_SPUT_V_H_TOTAL
96 *
97 * Note: we do not use the frame done interrupts because these appear
98 * to happen too early, and lead to jitter on the display (presumably
99 * they occur at the end of the last active line, before the vsync back
100 * porch, which we're reprogramming.)
101 */
102
103void
104armada_drm_crtc_update_regs(struct armada_crtc *dcrtc, struct armada_regs *regs)
105{
106 while (regs->offset != ~0) {
107 void __iomem *reg = dcrtc->base + regs->offset;
108 uint32_t val;
109
110 val = regs->mask;
111 if (val != 0)
112 val &= readl_relaxed(reg);
113 writel_relaxed(val | regs->val, reg);
114 ++regs;
115 }
116}
117
118#define dpms_blanked(dpms) ((dpms) != DRM_MODE_DPMS_ON)
119
120static void armada_drm_crtc_update(struct armada_crtc *dcrtc)
121{
122 uint32_t dumb_ctrl;
123
124 dumb_ctrl = dcrtc->cfg_dumb_ctrl;
125
126 if (!dpms_blanked(dcrtc->dpms))
127 dumb_ctrl |= CFG_DUMB_ENA;
128
129 /*
130 * When the dumb interface isn't in DUMB24_RGB888_0 mode, it might
131 * be using SPI or GPIO. If we set this to DUMB_BLANK, we will
132 * force LCD_D[23:0] to output blank color, overriding the GPIO or
133 * SPI usage. So leave it as-is unless in DUMB24_RGB888_0 mode.
134 */
135 if (dpms_blanked(dcrtc->dpms) &&
136 (dumb_ctrl & DUMB_MASK) == DUMB24_RGB888_0) {
137 dumb_ctrl &= ~DUMB_MASK;
138 dumb_ctrl |= DUMB_BLANK;
139 }
140
141 /*
142 * The documentation doesn't indicate what the normal state of
143 * the sync signals are. Sebastian Hesselbart kindly probed
144 * these signals on his board to determine their state.
145 *
146 * The non-inverted state of the sync signals is active high.
147 * Setting these bits makes the appropriate signal active low.
148 */
149 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NCSYNC)
150 dumb_ctrl |= CFG_INV_CSYNC;
151 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NHSYNC)
152 dumb_ctrl |= CFG_INV_HSYNC;
153 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NVSYNC)
154 dumb_ctrl |= CFG_INV_VSYNC;
155
156 if (dcrtc->dumb_ctrl != dumb_ctrl) {
157 dcrtc->dumb_ctrl = dumb_ctrl;
158 writel_relaxed(dumb_ctrl, dcrtc->base + LCD_SPU_DUMB_CTRL);
159 }
160}
161
Russell Kingf0b24872016-08-16 22:09:11 +0100162void armada_drm_plane_calc_addrs(u32 *addrs, struct drm_framebuffer *fb,
163 int x, int y)
164{
Russell Kingd6a48962017-12-08 12:16:22 +0000165 const struct drm_format_info *format = fb->format;
166 unsigned int num_planes = format->num_planes;
Russell Kingf0b24872016-08-16 22:09:11 +0100167 u32 addr = drm_fb_obj(fb)->dev_addr;
Russell Kingf0b24872016-08-16 22:09:11 +0100168 int i;
169
170 if (num_planes > 3)
171 num_planes = 3;
172
Russell Kingde0ea9a2017-12-08 12:16:22 +0000173 addrs[0] = addr + fb->offsets[0] + y * fb->pitches[0] +
174 x * format->cpp[0];
175
176 y /= format->vsub;
177 x /= format->hsub;
178
179 for (i = 1; i < num_planes; i++)
Russell Kingf0b24872016-08-16 22:09:11 +0100180 addrs[i] = addr + fb->offsets[i] + y * fb->pitches[i] +
Russell Kingd6a48962017-12-08 12:16:22 +0000181 x * format->cpp[i];
Russell Kingf0b24872016-08-16 22:09:11 +0100182 for (; i < 3; i++)
183 addrs[i] = 0;
184}
185
Russell King96f60e32012-08-15 13:59:49 +0100186static unsigned armada_drm_crtc_calc_fb(struct drm_framebuffer *fb,
187 int x, int y, struct armada_regs *regs, bool interlaced)
188{
Russell King96f60e32012-08-15 13:59:49 +0100189 unsigned pitch = fb->pitches[0];
Russell Kingf0b24872016-08-16 22:09:11 +0100190 u32 addrs[3], addr_odd, addr_even;
Russell King96f60e32012-08-15 13:59:49 +0100191 unsigned i = 0;
192
193 DRM_DEBUG_DRIVER("pitch %u x %d y %d bpp %d\n",
Ville Syrjälä272725c2016-12-14 23:32:20 +0200194 pitch, x, y, fb->format->cpp[0] * 8);
Russell King96f60e32012-08-15 13:59:49 +0100195
Russell Kingf0b24872016-08-16 22:09:11 +0100196 armada_drm_plane_calc_addrs(addrs, fb, x, y);
197
198 addr_odd = addr_even = addrs[0];
Russell King96f60e32012-08-15 13:59:49 +0100199
200 if (interlaced) {
201 addr_even += pitch;
202 pitch *= 2;
203 }
204
205 /* write offset, base, and pitch */
206 armada_reg_queue_set(regs, i, addr_odd, LCD_CFG_GRA_START_ADDR0);
207 armada_reg_queue_set(regs, i, addr_even, LCD_CFG_GRA_START_ADDR1);
208 armada_reg_queue_mod(regs, i, pitch, 0xffff, LCD_CFG_GRA_PITCH);
209
210 return i;
211}
212
Russell King2839d452017-07-07 15:56:20 +0100213static void armada_drm_plane_work_call(struct armada_crtc *dcrtc,
214 struct armada_plane_work *work,
215 void (*fn)(struct armada_crtc *, struct armada_plane_work *))
216{
217 struct armada_plane *dplane = drm_to_armada_plane(work->plane);
Russell Kingd9241552017-07-08 10:22:25 +0100218 struct drm_pending_vblank_event *event;
219 struct drm_framebuffer *fb;
Russell King2839d452017-07-07 15:56:20 +0100220
221 if (fn)
222 fn(dcrtc, work);
223 drm_crtc_vblank_put(&dcrtc->crtc);
224
Russell Kingd9241552017-07-08 10:22:25 +0100225 event = work->event;
226 fb = work->old_fb;
Russell Kingeb19be52017-07-08 10:16:53 +0100227 if (event || fb) {
228 struct drm_device *dev = dcrtc->crtc.dev;
229 unsigned long flags;
230
231 spin_lock_irqsave(&dev->event_lock, flags);
232 if (event)
233 drm_crtc_send_vblank_event(&dcrtc->crtc, event);
234 if (fb)
235 __armada_drm_queue_unref_work(dev, fb);
236 spin_unlock_irqrestore(&dev->event_lock, flags);
237 }
Russell Kingb972a802017-07-08 10:16:52 +0100238
Russell Kingd9241552017-07-08 10:22:25 +0100239 if (work->need_kfree)
240 kfree(work);
241
Russell King2839d452017-07-07 15:56:20 +0100242 wake_up(&dplane->frame_wait);
243}
244
Russell King4b5dda82015-08-06 16:37:18 +0100245static void armada_drm_plane_work_run(struct armada_crtc *dcrtc,
Russell Kingec6fb152016-07-25 15:16:11 +0100246 struct drm_plane *plane)
Russell King4b5dda82015-08-06 16:37:18 +0100247{
Russell Kingec6fb152016-07-25 15:16:11 +0100248 struct armada_plane *dplane = drm_to_armada_plane(plane);
249 struct armada_plane_work *work = xchg(&dplane->work, NULL);
Russell King4b5dda82015-08-06 16:37:18 +0100250
251 /* Handle any pending frame work. */
Russell King2839d452017-07-07 15:56:20 +0100252 if (work)
253 armada_drm_plane_work_call(dcrtc, work, work->fn);
Russell King4b5dda82015-08-06 16:37:18 +0100254}
255
256int armada_drm_plane_work_queue(struct armada_crtc *dcrtc,
Russell Kingeaab0132017-07-07 15:55:53 +0100257 struct armada_plane_work *work)
Russell King4b5dda82015-08-06 16:37:18 +0100258{
Russell Kingeaab0132017-07-07 15:55:53 +0100259 struct armada_plane *plane = drm_to_armada_plane(work->plane);
Russell King4b5dda82015-08-06 16:37:18 +0100260 int ret;
261
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300262 ret = drm_crtc_vblank_get(&dcrtc->crtc);
Russell Kingc93dfdc2017-07-08 10:22:23 +0100263 if (ret)
Russell King4b5dda82015-08-06 16:37:18 +0100264 return ret;
Russell King4b5dda82015-08-06 16:37:18 +0100265
266 ret = cmpxchg(&plane->work, NULL, work) ? -EBUSY : 0;
267 if (ret)
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300268 drm_crtc_vblank_put(&dcrtc->crtc);
Russell King4b5dda82015-08-06 16:37:18 +0100269
270 return ret;
271}
272
273int armada_drm_plane_work_wait(struct armada_plane *plane, long timeout)
274{
275 return wait_event_timeout(plane->frame_wait, !plane->work, timeout);
276}
277
Russell Kingd3b84212017-07-07 15:55:40 +0100278void armada_drm_plane_work_cancel(struct armada_crtc *dcrtc,
279 struct armada_plane *dplane)
Russell King7c8f7e12015-06-29 17:52:16 +0100280{
Russell Kingd3b84212017-07-07 15:55:40 +0100281 struct armada_plane_work *work = xchg(&dplane->work, NULL);
Russell King7c8f7e12015-06-29 17:52:16 +0100282
Russell King4a8506d2015-08-07 09:33:05 +0100283 if (work)
Russell King2839d452017-07-07 15:56:20 +0100284 armada_drm_plane_work_call(dcrtc, work, work->cancel);
Russell King96f60e32012-08-15 13:59:49 +0100285}
286
Russell King709ffd82015-07-15 18:09:38 +0100287static void armada_drm_crtc_complete_frame_work(struct armada_crtc *dcrtc,
Russell King65724a12017-07-07 15:56:24 +0100288 struct armada_plane_work *work)
Russell King96f60e32012-08-15 13:59:49 +0100289{
Russell King709ffd82015-07-15 18:09:38 +0100290 unsigned long flags;
Russell King96f60e32012-08-15 13:59:49 +0100291
Russell King709ffd82015-07-15 18:09:38 +0100292 spin_lock_irqsave(&dcrtc->irq_lock, flags);
Russell Kingeaa66272017-07-08 10:22:10 +0100293 armada_drm_crtc_update_regs(dcrtc, work->regs);
Russell King709ffd82015-07-15 18:09:38 +0100294 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
Russell King65724a12017-07-07 15:56:24 +0100295}
Russell King96f60e32012-08-15 13:59:49 +0100296
Russell King890ca8d2017-07-08 10:22:27 +0100297static void armada_drm_crtc_complete_disable_work(struct armada_crtc *dcrtc,
298 struct armada_plane_work *work)
299{
300 unsigned long flags;
Russell King96f60e32012-08-15 13:59:49 +0100301
Russell King890ca8d2017-07-08 10:22:27 +0100302 if (dcrtc->plane == work->plane)
303 dcrtc->plane = NULL;
304
305 spin_lock_irqsave(&dcrtc->irq_lock, flags);
306 armada_drm_crtc_update_regs(dcrtc, work->regs);
307 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
308}
309
Russell Kingeaa66272017-07-08 10:22:10 +0100310static struct armada_plane_work *
311armada_drm_crtc_alloc_plane_work(struct drm_plane *plane)
Russell King901bb882017-07-07 15:55:45 +0100312{
Russell Kingeaa66272017-07-08 10:22:10 +0100313 struct armada_plane_work *work;
Russell King901bb882017-07-07 15:55:45 +0100314 int i = 0;
315
316 work = kzalloc(sizeof(*work), GFP_KERNEL);
317 if (!work)
318 return NULL;
319
Russell Kingeaa66272017-07-08 10:22:10 +0100320 work->plane = plane;
321 work->fn = armada_drm_crtc_complete_frame_work;
Russell Kingd9241552017-07-08 10:22:25 +0100322 work->need_kfree = true;
Russell King901bb882017-07-07 15:55:45 +0100323 armada_reg_queue_end(work->regs, i);
324
325 return work;
Russell King96f60e32012-08-15 13:59:49 +0100326}
327
Russell King96f60e32012-08-15 13:59:49 +0100328static void armada_drm_vblank_off(struct armada_crtc *dcrtc)
329{
Russell King96f60e32012-08-15 13:59:49 +0100330 /*
331 * Tell the DRM core that vblank IRQs aren't going to happen for
332 * a while. This cleans up any pending vblank events for us.
333 */
Russell King178e5612014-10-11 23:57:04 +0100334 drm_crtc_vblank_off(&dcrtc->crtc);
Russell Kingec6fb152016-07-25 15:16:11 +0100335 armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +0100336}
337
Russell King96f60e32012-08-15 13:59:49 +0100338/* The mode_config.mutex will be held for this call */
339static void armada_drm_crtc_dpms(struct drm_crtc *crtc, int dpms)
340{
341 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
342
Russell Kingea908ba2016-10-04 22:19:57 +0100343 if (dpms_blanked(dcrtc->dpms) != dpms_blanked(dpms)) {
Russell King96f60e32012-08-15 13:59:49 +0100344 if (dpms_blanked(dpms))
345 armada_drm_vblank_off(dcrtc);
Russell Kingea908ba2016-10-04 22:19:57 +0100346 else if (!IS_ERR(dcrtc->clk))
347 WARN_ON(clk_prepare_enable(dcrtc->clk));
348 dcrtc->dpms = dpms;
349 armada_drm_crtc_update(dcrtc);
350 if (!dpms_blanked(dpms))
Russell King178e5612014-10-11 23:57:04 +0100351 drm_crtc_vblank_on(&dcrtc->crtc);
Russell Kingea908ba2016-10-04 22:19:57 +0100352 else if (!IS_ERR(dcrtc->clk))
353 clk_disable_unprepare(dcrtc->clk);
354 } else if (dcrtc->dpms != dpms) {
355 dcrtc->dpms = dpms;
Russell King96f60e32012-08-15 13:59:49 +0100356 }
357}
358
359/*
360 * Prepare for a mode set. Turn off overlay to ensure that we don't end
361 * up with the overlay size being bigger than the active screen size.
362 * We rely upon X refreshing this state after the mode set has completed.
363 *
364 * The mode_config.mutex will be held for this call
365 */
366static void armada_drm_crtc_prepare(struct drm_crtc *crtc)
367{
368 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
369 struct drm_plane *plane;
Russell Kingf9a13bb2018-07-30 11:52:34 +0100370 u32 val;
Russell King96f60e32012-08-15 13:59:49 +0100371
372 /*
373 * If we have an overlay plane associated with this CRTC, disable
374 * it before the modeset to avoid its coordinates being outside
Russell Kingf8e14062015-06-29 17:52:42 +0100375 * the new mode parameters.
Russell King96f60e32012-08-15 13:59:49 +0100376 */
377 plane = dcrtc->plane;
Russell King890ca8d2017-07-08 10:22:27 +0100378 if (plane) {
Russell Kingf8e14062015-06-29 17:52:42 +0100379 drm_plane_force_disable(plane);
Russell King890ca8d2017-07-08 10:22:27 +0100380 WARN_ON(!armada_drm_plane_work_wait(drm_to_armada_plane(plane),
381 HZ));
382 }
Russell Kingf9a13bb2018-07-30 11:52:34 +0100383
384 /* Wait for pending flips to complete */
385 armada_drm_plane_work_wait(drm_to_armada_plane(dcrtc->crtc.primary),
386 MAX_SCHEDULE_TIMEOUT);
387
388 drm_crtc_vblank_off(crtc);
389
390 val = dcrtc->dumb_ctrl & ~CFG_DUMB_ENA;
391 if (val != dcrtc->dumb_ctrl) {
392 dcrtc->dumb_ctrl = val;
393 writel_relaxed(val, dcrtc->base + LCD_SPU_DUMB_CTRL);
394 }
Russell King96f60e32012-08-15 13:59:49 +0100395}
396
397/* The mode_config.mutex will be held for this call */
398static void armada_drm_crtc_commit(struct drm_crtc *crtc)
399{
400 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
401
Russell Kingf9a13bb2018-07-30 11:52:34 +0100402 dcrtc->dpms = DRM_MODE_DPMS_ON;
403 armada_drm_crtc_update(dcrtc);
404 drm_crtc_vblank_on(crtc);
Russell King96f60e32012-08-15 13:59:49 +0100405}
406
407/* The mode_config.mutex will be held for this call */
408static bool armada_drm_crtc_mode_fixup(struct drm_crtc *crtc,
409 const struct drm_display_mode *mode, struct drm_display_mode *adj)
410{
Russell King96f60e32012-08-15 13:59:49 +0100411 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
412 int ret;
413
414 /* We can't do interlaced modes if we don't have the SPU_ADV_REG */
Russell King42e62ba2014-04-22 15:24:03 +0100415 if (!dcrtc->variant->has_spu_adv_reg &&
Russell King96f60e32012-08-15 13:59:49 +0100416 adj->flags & DRM_MODE_FLAG_INTERLACE)
417 return false;
418
419 /* Check whether the display mode is possible */
Russell King42e62ba2014-04-22 15:24:03 +0100420 ret = dcrtc->variant->compute_clock(dcrtc, adj, NULL);
Russell King96f60e32012-08-15 13:59:49 +0100421 if (ret)
422 return false;
423
424 return true;
425}
426
Shawn Guo5922a7d2017-02-07 17:16:18 +0800427/* These are locked by dev->vbl_lock */
428static void armada_drm_crtc_disable_irq(struct armada_crtc *dcrtc, u32 mask)
429{
430 if (dcrtc->irq_ena & mask) {
431 dcrtc->irq_ena &= ~mask;
432 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
433 }
434}
435
436static void armada_drm_crtc_enable_irq(struct armada_crtc *dcrtc, u32 mask)
437{
438 if ((dcrtc->irq_ena & mask) != mask) {
439 dcrtc->irq_ena |= mask;
440 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
441 if (readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR) & mask)
442 writel(0, dcrtc->base + LCD_SPU_IRQ_ISR);
443 }
444}
445
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100446static void armada_drm_crtc_irq(struct armada_crtc *dcrtc, u32 stat)
Russell King96f60e32012-08-15 13:59:49 +0100447{
Russell King96f60e32012-08-15 13:59:49 +0100448 void __iomem *base = dcrtc->base;
Russell King4a8506d2015-08-07 09:33:05 +0100449 struct drm_plane *ovl_plane;
Russell King96f60e32012-08-15 13:59:49 +0100450
451 if (stat & DMA_FF_UNDERFLOW)
452 DRM_ERROR("video underflow on crtc %u\n", dcrtc->num);
453 if (stat & GRA_FF_UNDERFLOW)
454 DRM_ERROR("graphics underflow on crtc %u\n", dcrtc->num);
455
456 if (stat & VSYNC_IRQ)
Gustavo Padovan0ac28c52016-07-04 21:04:48 -0300457 drm_crtc_handle_vblank(&dcrtc->crtc);
Russell King96f60e32012-08-15 13:59:49 +0100458
Russell King4a8506d2015-08-07 09:33:05 +0100459 ovl_plane = dcrtc->plane;
Russell Kingec6fb152016-07-25 15:16:11 +0100460 if (ovl_plane)
461 armada_drm_plane_work_run(dcrtc, ovl_plane);
Russell King96f60e32012-08-15 13:59:49 +0100462
Russell Kinga3f6a182017-07-08 10:16:48 +0100463 spin_lock(&dcrtc->irq_lock);
Russell King96f60e32012-08-15 13:59:49 +0100464 if (stat & GRA_FRAME_IRQ && dcrtc->interlaced) {
465 int i = stat & GRA_FRAME_IRQ0 ? 0 : 1;
466 uint32_t val;
467
468 writel_relaxed(dcrtc->v[i].spu_v_porch, base + LCD_SPU_V_PORCH);
469 writel_relaxed(dcrtc->v[i].spu_v_h_total,
470 base + LCD_SPUT_V_H_TOTAL);
471
472 val = readl_relaxed(base + LCD_SPU_ADV_REG);
473 val &= ~(ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF | ADV_VSYNCOFFEN);
474 val |= dcrtc->v[i].spu_adv_reg;
Russell King662af0d2013-05-19 10:55:17 +0100475 writel_relaxed(val, base + LCD_SPU_ADV_REG);
Russell King96f60e32012-08-15 13:59:49 +0100476 }
Russell King662af0d2013-05-19 10:55:17 +0100477
478 if (stat & DUMB_FRAMEDONE && dcrtc->cursor_update) {
479 writel_relaxed(dcrtc->cursor_hw_pos,
480 base + LCD_SPU_HWC_OVSA_HPXL_VLN);
481 writel_relaxed(dcrtc->cursor_hw_sz,
482 base + LCD_SPU_HWC_HPXL_VLN);
483 armada_updatel(CFG_HWC_ENA,
484 CFG_HWC_ENA | CFG_HWC_1BITMOD | CFG_HWC_1BITENA,
485 base + LCD_SPU_DMA_CTRL0);
486 dcrtc->cursor_update = false;
487 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
488 }
489
Russell King96f60e32012-08-15 13:59:49 +0100490 spin_unlock(&dcrtc->irq_lock);
491
Russell Kingec6fb152016-07-25 15:16:11 +0100492 if (stat & GRA_FRAME_IRQ)
493 armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +0100494}
495
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100496static irqreturn_t armada_drm_irq(int irq, void *arg)
497{
498 struct armada_crtc *dcrtc = arg;
499 u32 v, stat = readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
500
501 /*
Russell King92298c12018-06-26 17:06:06 +0100502 * Reading the ISR appears to clear bits provided CLEAN_SPU_IRQ_ISR
503 * is set. Writing has some other effect to acknowledge the IRQ -
504 * without this, we only get a single IRQ.
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100505 */
506 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
507
Russell Kingc8a220c2016-05-17 13:51:08 +0100508 trace_armada_drm_irq(&dcrtc->crtc, stat);
509
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100510 /* Mask out those interrupts we haven't enabled */
511 v = stat & dcrtc->irq_ena;
512
513 if (v & (VSYNC_IRQ|GRA_FRAME_IRQ|DUMB_FRAMEDONE)) {
514 armada_drm_crtc_irq(dcrtc, stat);
515 return IRQ_HANDLED;
516 }
517 return IRQ_NONE;
518}
519
Russell King96f60e32012-08-15 13:59:49 +0100520static uint32_t armada_drm_crtc_calculate_csc(struct armada_crtc *dcrtc)
521{
522 struct drm_display_mode *adj = &dcrtc->crtc.mode;
523 uint32_t val = 0;
524
525 if (dcrtc->csc_yuv_mode == CSC_YUV_CCIR709)
526 val |= CFG_CSC_YUV_CCIR709;
527 if (dcrtc->csc_rgb_mode == CSC_RGB_STUDIO)
528 val |= CFG_CSC_RGB_STUDIO;
529
530 /*
531 * In auto mode, set the colorimetry, based upon the HDMI spec.
532 * 1280x720p, 1920x1080p and 1920x1080i use ITU709, others use
533 * ITU601. It may be more appropriate to set this depending on
534 * the source - but what if the graphic frame is YUV and the
535 * video frame is RGB?
536 */
537 if ((adj->hdisplay == 1280 && adj->vdisplay == 720 &&
538 !(adj->flags & DRM_MODE_FLAG_INTERLACE)) ||
539 (adj->hdisplay == 1920 && adj->vdisplay == 1080)) {
540 if (dcrtc->csc_yuv_mode == CSC_AUTO)
541 val |= CFG_CSC_YUV_CCIR709;
542 }
543
544 /*
545 * We assume we're connected to a TV-like device, so the YUV->RGB
546 * conversion should produce a limited range. We should set this
547 * depending on the connectors attached to this CRTC, and what
548 * kind of device they report being connected.
549 */
550 if (dcrtc->csc_rgb_mode == CSC_AUTO)
551 val |= CFG_CSC_RGB_STUDIO;
552
553 return val;
554}
555
556/* The mode_config.mutex will be held for this call */
Russell Kingc36045e2018-07-30 11:52:34 +0100557static void armada_drm_crtc_mode_set_nofb(struct drm_crtc *crtc)
Russell King96f60e32012-08-15 13:59:49 +0100558{
Russell Kingc36045e2018-07-30 11:52:34 +0100559 struct drm_display_mode *adj = &crtc->state->adjusted_mode;
Russell King96f60e32012-08-15 13:59:49 +0100560 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
561 struct armada_regs regs[17];
562 uint32_t lm, rm, tm, bm, val, sclk;
563 unsigned long flags;
564 unsigned i;
Russell Kingc36045e2018-07-30 11:52:34 +0100565 bool interlaced = !!(adj->flags & DRM_MODE_FLAG_INTERLACE);
Russell King96f60e32012-08-15 13:59:49 +0100566
Russell King37af35c2016-08-16 22:09:09 +0100567 i = 0;
Russell King96f60e32012-08-15 13:59:49 +0100568 rm = adj->crtc_hsync_start - adj->crtc_hdisplay;
569 lm = adj->crtc_htotal - adj->crtc_hsync_end;
570 bm = adj->crtc_vsync_start - adj->crtc_vdisplay;
571 tm = adj->crtc_vtotal - adj->crtc_vsync_end;
572
573 DRM_DEBUG_DRIVER("H: %d %d %d %d lm %d rm %d\n",
574 adj->crtc_hdisplay,
575 adj->crtc_hsync_start,
576 adj->crtc_hsync_end,
577 adj->crtc_htotal, lm, rm);
578 DRM_DEBUG_DRIVER("V: %d %d %d %d tm %d bm %d\n",
579 adj->crtc_vdisplay,
580 adj->crtc_vsync_start,
581 adj->crtc_vsync_end,
582 adj->crtc_vtotal, tm, bm);
583
Russell Kinge0ac5e92015-06-29 18:01:38 +0100584 /*
585 * If we are blanked, we would have disabled the clock. Re-enable
586 * it so that compute_clock() does the right thing.
587 */
588 if (!IS_ERR(dcrtc->clk) && dpms_blanked(dcrtc->dpms))
589 WARN_ON(clk_prepare_enable(dcrtc->clk));
590
Russell King96f60e32012-08-15 13:59:49 +0100591 /* Now compute the divider for real */
Russell King42e62ba2014-04-22 15:24:03 +0100592 dcrtc->variant->compute_clock(dcrtc, adj, &sclk);
Russell King96f60e32012-08-15 13:59:49 +0100593
Russell King96f60e32012-08-15 13:59:49 +0100594 armada_reg_queue_set(regs, i, sclk, LCD_CFG_SCLK_DIV);
595
596 if (interlaced ^ dcrtc->interlaced) {
597 if (adj->flags & DRM_MODE_FLAG_INTERLACE)
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300598 drm_crtc_vblank_get(&dcrtc->crtc);
Russell King96f60e32012-08-15 13:59:49 +0100599 else
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300600 drm_crtc_vblank_put(&dcrtc->crtc);
Russell King96f60e32012-08-15 13:59:49 +0100601 dcrtc->interlaced = interlaced;
602 }
603
604 spin_lock_irqsave(&dcrtc->irq_lock, flags);
605
606 /* Even interlaced/progressive frame */
607 dcrtc->v[1].spu_v_h_total = adj->crtc_vtotal << 16 |
608 adj->crtc_htotal;
609 dcrtc->v[1].spu_v_porch = tm << 16 | bm;
610 val = adj->crtc_hsync_start;
Russell King662af0d2013-05-19 10:55:17 +0100611 dcrtc->v[1].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN |
Russell King42e62ba2014-04-22 15:24:03 +0100612 dcrtc->variant->spu_adv_reg;
Russell King96f60e32012-08-15 13:59:49 +0100613
614 if (interlaced) {
615 /* Odd interlaced frame */
616 dcrtc->v[0].spu_v_h_total = dcrtc->v[1].spu_v_h_total +
617 (1 << 16);
618 dcrtc->v[0].spu_v_porch = dcrtc->v[1].spu_v_porch + 1;
619 val = adj->crtc_hsync_start - adj->crtc_htotal / 2;
Russell King662af0d2013-05-19 10:55:17 +0100620 dcrtc->v[0].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN |
Russell King42e62ba2014-04-22 15:24:03 +0100621 dcrtc->variant->spu_adv_reg;
Russell King96f60e32012-08-15 13:59:49 +0100622 } else {
623 dcrtc->v[0] = dcrtc->v[1];
624 }
625
626 val = adj->crtc_vdisplay << 16 | adj->crtc_hdisplay;
627
628 armada_reg_queue_set(regs, i, val, LCD_SPU_V_H_ACTIVE);
Russell King96f60e32012-08-15 13:59:49 +0100629 armada_reg_queue_set(regs, i, (lm << 16) | rm, LCD_SPU_H_PORCH);
630 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_porch, LCD_SPU_V_PORCH);
631 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_h_total,
632 LCD_SPUT_V_H_TOTAL);
633
Russell King42e62ba2014-04-22 15:24:03 +0100634 if (dcrtc->variant->has_spu_adv_reg) {
Russell King96f60e32012-08-15 13:59:49 +0100635 armada_reg_queue_mod(regs, i, dcrtc->v[0].spu_adv_reg,
636 ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF |
637 ADV_VSYNCOFFEN, LCD_SPU_ADV_REG);
Russell King662af0d2013-05-19 10:55:17 +0100638 }
Russell King96f60e32012-08-15 13:59:49 +0100639
Russell King96f60e32012-08-15 13:59:49 +0100640 val = adj->flags & DRM_MODE_FLAG_NVSYNC ? CFG_VSYNC_INV : 0;
641 armada_reg_queue_mod(regs, i, val, CFG_VSYNC_INV, LCD_SPU_DMA_CTRL1);
642
643 val = dcrtc->spu_iopad_ctrl | armada_drm_crtc_calculate_csc(dcrtc);
644 armada_reg_queue_set(regs, i, val, LCD_SPU_IOPAD_CONTROL);
645 armada_reg_queue_end(regs, i);
646
647 armada_drm_crtc_update_regs(dcrtc, regs);
648 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
Russell King96f60e32012-08-15 13:59:49 +0100649}
650
Russell King96f60e32012-08-15 13:59:49 +0100651/* The mode_config.mutex will be held for this call */
652static void armada_drm_crtc_disable(struct drm_crtc *crtc)
653{
Russell King96f60e32012-08-15 13:59:49 +0100654 armada_drm_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
Russell King28b30432017-07-08 10:16:40 +0100655
656 /* Disable our primary plane when we disable the CRTC. */
657 crtc->primary->funcs->disable_plane(crtc->primary, NULL);
Russell King96f60e32012-08-15 13:59:49 +0100658}
659
Russell Kingc36045e2018-07-30 11:52:34 +0100660static void armada_drm_crtc_atomic_begin(struct drm_crtc *crtc,
661 struct drm_crtc_state *old_crtc_state)
662{
663 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
664 struct armada_plane *dplane;
665
666 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
667
668 /* Wait 100ms for any plane works to complete */
669 dplane = drm_to_armada_plane(crtc->primary);
670 if (WARN_ON(armada_drm_plane_work_wait(dplane, HZ / 10) == 0))
671 armada_drm_plane_work_cancel(dcrtc, dplane);
672
673 dcrtc->regs_idx = 0;
674 dcrtc->regs = dcrtc->atomic_regs;
675}
676
677static void armada_drm_crtc_atomic_flush(struct drm_crtc *crtc,
678 struct drm_crtc_state *old_crtc_state)
679{
680 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
681 unsigned long flags;
682
683 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.id, crtc->name);
684
685 armada_reg_queue_end(dcrtc->regs, dcrtc->regs_idx);
686
687 spin_lock_irqsave(&dcrtc->irq_lock, flags);
688 armada_drm_crtc_update_regs(dcrtc, dcrtc->regs);
689 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
690}
691
Russell King96f60e32012-08-15 13:59:49 +0100692static const struct drm_crtc_helper_funcs armada_crtc_helper_funcs = {
693 .dpms = armada_drm_crtc_dpms,
694 .prepare = armada_drm_crtc_prepare,
695 .commit = armada_drm_crtc_commit,
696 .mode_fixup = armada_drm_crtc_mode_fixup,
Russell Kingc36045e2018-07-30 11:52:34 +0100697 .mode_set = drm_helper_crtc_mode_set,
698 .mode_set_nofb = armada_drm_crtc_mode_set_nofb,
699 .mode_set_base = drm_helper_crtc_mode_set_base,
Russell King96f60e32012-08-15 13:59:49 +0100700 .disable = armada_drm_crtc_disable,
Russell Kingc36045e2018-07-30 11:52:34 +0100701 .atomic_begin = armada_drm_crtc_atomic_begin,
702 .atomic_flush = armada_drm_crtc_atomic_flush,
Russell King96f60e32012-08-15 13:59:49 +0100703};
704
Russell King662af0d2013-05-19 10:55:17 +0100705static void armada_load_cursor_argb(void __iomem *base, uint32_t *pix,
706 unsigned stride, unsigned width, unsigned height)
707{
708 uint32_t addr;
709 unsigned y;
710
711 addr = SRAM_HWC32_RAM1;
712 for (y = 0; y < height; y++) {
713 uint32_t *p = &pix[y * stride];
714 unsigned x;
715
716 for (x = 0; x < width; x++, p++) {
717 uint32_t val = *p;
718
719 val = (val & 0xff00ff00) |
720 (val & 0x000000ff) << 16 |
721 (val & 0x00ff0000) >> 16;
722
723 writel_relaxed(val,
724 base + LCD_SPU_SRAM_WRDAT);
725 writel_relaxed(addr | SRAM_WRITE,
726 base + LCD_SPU_SRAM_CTRL);
Russell Kingc39b0692014-04-07 12:00:17 +0100727 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
Russell King662af0d2013-05-19 10:55:17 +0100728 addr += 1;
729 if ((addr & 0x00ff) == 0)
730 addr += 0xf00;
731 if ((addr & 0x30ff) == 0)
732 addr = SRAM_HWC32_RAM2;
733 }
734 }
735}
736
737static void armada_drm_crtc_cursor_tran(void __iomem *base)
738{
739 unsigned addr;
740
741 for (addr = 0; addr < 256; addr++) {
742 /* write the default value */
743 writel_relaxed(0x55555555, base + LCD_SPU_SRAM_WRDAT);
744 writel_relaxed(addr | SRAM_WRITE | SRAM_HWC32_TRAN,
745 base + LCD_SPU_SRAM_CTRL);
746 }
747}
748
749static int armada_drm_crtc_cursor_update(struct armada_crtc *dcrtc, bool reload)
750{
751 uint32_t xoff, xscr, w = dcrtc->cursor_w, s;
752 uint32_t yoff, yscr, h = dcrtc->cursor_h;
753 uint32_t para1;
754
755 /*
756 * Calculate the visible width and height of the cursor,
757 * screen position, and the position in the cursor bitmap.
758 */
759 if (dcrtc->cursor_x < 0) {
760 xoff = -dcrtc->cursor_x;
761 xscr = 0;
762 w -= min(xoff, w);
763 } else if (dcrtc->cursor_x + w > dcrtc->crtc.mode.hdisplay) {
764 xoff = 0;
765 xscr = dcrtc->cursor_x;
766 w = max_t(int, dcrtc->crtc.mode.hdisplay - dcrtc->cursor_x, 0);
767 } else {
768 xoff = 0;
769 xscr = dcrtc->cursor_x;
770 }
771
772 if (dcrtc->cursor_y < 0) {
773 yoff = -dcrtc->cursor_y;
774 yscr = 0;
775 h -= min(yoff, h);
776 } else if (dcrtc->cursor_y + h > dcrtc->crtc.mode.vdisplay) {
777 yoff = 0;
778 yscr = dcrtc->cursor_y;
779 h = max_t(int, dcrtc->crtc.mode.vdisplay - dcrtc->cursor_y, 0);
780 } else {
781 yoff = 0;
782 yscr = dcrtc->cursor_y;
783 }
784
785 /* On interlaced modes, the vertical cursor size must be halved */
786 s = dcrtc->cursor_w;
787 if (dcrtc->interlaced) {
788 s *= 2;
789 yscr /= 2;
790 h /= 2;
791 }
792
793 if (!dcrtc->cursor_obj || !h || !w) {
794 spin_lock_irq(&dcrtc->irq_lock);
795 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
796 dcrtc->cursor_update = false;
797 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
798 spin_unlock_irq(&dcrtc->irq_lock);
799 return 0;
800 }
801
Russell King214612f2017-07-08 10:22:15 +0100802 spin_lock_irq(&dcrtc->irq_lock);
Russell King662af0d2013-05-19 10:55:17 +0100803 para1 = readl_relaxed(dcrtc->base + LCD_SPU_SRAM_PARA1);
804 armada_updatel(CFG_CSB_256x32, CFG_CSB_256x32 | CFG_PDWN256x32,
805 dcrtc->base + LCD_SPU_SRAM_PARA1);
Russell King214612f2017-07-08 10:22:15 +0100806 spin_unlock_irq(&dcrtc->irq_lock);
Russell King662af0d2013-05-19 10:55:17 +0100807
808 /*
809 * Initialize the transparency if the SRAM was powered down.
810 * We must also reload the cursor data as well.
811 */
812 if (!(para1 & CFG_CSB_256x32)) {
813 armada_drm_crtc_cursor_tran(dcrtc->base);
814 reload = true;
815 }
816
817 if (dcrtc->cursor_hw_sz != (h << 16 | w)) {
818 spin_lock_irq(&dcrtc->irq_lock);
819 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
820 dcrtc->cursor_update = false;
821 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
822 spin_unlock_irq(&dcrtc->irq_lock);
823 reload = true;
824 }
825 if (reload) {
826 struct armada_gem_object *obj = dcrtc->cursor_obj;
827 uint32_t *pix;
828 /* Set the top-left corner of the cursor image */
829 pix = obj->addr;
830 pix += yoff * s + xoff;
831 armada_load_cursor_argb(dcrtc->base, pix, s, w, h);
832 }
833
834 /* Reload the cursor position, size and enable in the IRQ handler */
835 spin_lock_irq(&dcrtc->irq_lock);
836 dcrtc->cursor_hw_pos = yscr << 16 | xscr;
837 dcrtc->cursor_hw_sz = h << 16 | w;
838 dcrtc->cursor_update = true;
839 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
840 spin_unlock_irq(&dcrtc->irq_lock);
841
842 return 0;
843}
844
845static void cursor_update(void *data)
846{
847 armada_drm_crtc_cursor_update(data, true);
848}
849
850static int armada_drm_crtc_cursor_set(struct drm_crtc *crtc,
851 struct drm_file *file, uint32_t handle, uint32_t w, uint32_t h)
852{
Russell King662af0d2013-05-19 10:55:17 +0100853 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King662af0d2013-05-19 10:55:17 +0100854 struct armada_gem_object *obj = NULL;
855 int ret;
856
857 /* If no cursor support, replicate drm's return value */
Russell King42e62ba2014-04-22 15:24:03 +0100858 if (!dcrtc->variant->has_spu_adv_reg)
Russell King662af0d2013-05-19 10:55:17 +0100859 return -ENXIO;
860
861 if (handle && w > 0 && h > 0) {
862 /* maximum size is 64x32 or 32x64 */
863 if (w > 64 || h > 64 || (w > 32 && h > 32))
864 return -ENOMEM;
865
Chris Wilsona8ad0bd2016-05-09 11:04:54 +0100866 obj = armada_gem_object_lookup(file, handle);
Russell King662af0d2013-05-19 10:55:17 +0100867 if (!obj)
868 return -ENOENT;
869
870 /* Must be a kernel-mapped object */
871 if (!obj->addr) {
Haneen Mohammed4c3cf372017-09-20 12:54:48 -0600872 drm_gem_object_put_unlocked(&obj->obj);
Russell King662af0d2013-05-19 10:55:17 +0100873 return -EINVAL;
874 }
875
876 if (obj->obj.size < w * h * 4) {
877 DRM_ERROR("buffer is too small\n");
Haneen Mohammed4c3cf372017-09-20 12:54:48 -0600878 drm_gem_object_put_unlocked(&obj->obj);
Russell King662af0d2013-05-19 10:55:17 +0100879 return -ENOMEM;
880 }
881 }
882
Russell King662af0d2013-05-19 10:55:17 +0100883 if (dcrtc->cursor_obj) {
884 dcrtc->cursor_obj->update = NULL;
885 dcrtc->cursor_obj->update_data = NULL;
Haneen Mohammed4c3cf372017-09-20 12:54:48 -0600886 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
Russell King662af0d2013-05-19 10:55:17 +0100887 }
888 dcrtc->cursor_obj = obj;
889 dcrtc->cursor_w = w;
890 dcrtc->cursor_h = h;
891 ret = armada_drm_crtc_cursor_update(dcrtc, true);
892 if (obj) {
893 obj->update_data = dcrtc;
894 obj->update = cursor_update;
895 }
Russell King662af0d2013-05-19 10:55:17 +0100896
897 return ret;
898}
899
900static int armada_drm_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
901{
Russell King662af0d2013-05-19 10:55:17 +0100902 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King662af0d2013-05-19 10:55:17 +0100903 int ret;
904
905 /* If no cursor support, replicate drm's return value */
Russell King42e62ba2014-04-22 15:24:03 +0100906 if (!dcrtc->variant->has_spu_adv_reg)
Russell King662af0d2013-05-19 10:55:17 +0100907 return -EFAULT;
908
Russell King662af0d2013-05-19 10:55:17 +0100909 dcrtc->cursor_x = x;
910 dcrtc->cursor_y = y;
911 ret = armada_drm_crtc_cursor_update(dcrtc, false);
Russell King662af0d2013-05-19 10:55:17 +0100912
913 return ret;
914}
915
Russell King96f60e32012-08-15 13:59:49 +0100916static void armada_drm_crtc_destroy(struct drm_crtc *crtc)
917{
918 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
919 struct armada_private *priv = crtc->dev->dev_private;
920
Russell King662af0d2013-05-19 10:55:17 +0100921 if (dcrtc->cursor_obj)
Haneen Mohammed4c3cf372017-09-20 12:54:48 -0600922 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
Russell King662af0d2013-05-19 10:55:17 +0100923
Russell King96f60e32012-08-15 13:59:49 +0100924 priv->dcrtc[dcrtc->num] = NULL;
925 drm_crtc_cleanup(&dcrtc->crtc);
926
927 if (!IS_ERR(dcrtc->clk))
928 clk_disable_unprepare(dcrtc->clk);
929
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100930 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ENA);
931
Russell King9611cb92014-06-15 11:21:23 +0100932 of_node_put(dcrtc->crtc.port);
933
Russell King96f60e32012-08-15 13:59:49 +0100934 kfree(dcrtc);
935}
936
937/*
938 * The mode_config lock is held here, to prevent races between this
939 * and a mode_set.
940 */
941static int armada_drm_crtc_page_flip(struct drm_crtc *crtc,
Daniel Vetter41292b1f2017-03-22 22:50:50 +0100942 struct drm_framebuffer *fb, struct drm_pending_vblank_event *event, uint32_t page_flip_flags,
943 struct drm_modeset_acquire_ctx *ctx)
Russell King96f60e32012-08-15 13:59:49 +0100944{
945 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell Kingeaa66272017-07-08 10:22:10 +0100946 struct armada_plane_work *work;
Russell King96f60e32012-08-15 13:59:49 +0100947 unsigned i;
948 int ret;
949
Russell Kingeaa66272017-07-08 10:22:10 +0100950 work = armada_drm_crtc_alloc_plane_work(dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +0100951 if (!work)
952 return -ENOMEM;
953
954 work->event = event;
Matt Roperf4510a22014-04-01 15:22:40 -0700955 work->old_fb = dcrtc->crtc.primary->fb;
Russell King96f60e32012-08-15 13:59:49 +0100956
957 i = armada_drm_crtc_calc_fb(fb, crtc->x, crtc->y, work->regs,
958 dcrtc->interlaced);
959 armada_reg_queue_end(work->regs, i);
960
961 /*
Russell Kingc5488302014-10-11 23:53:35 +0100962 * Ensure that we hold a reference on the new framebuffer.
963 * This has to match the behaviour in mode_set.
Russell King96f60e32012-08-15 13:59:49 +0100964 */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -0600965 drm_framebuffer_get(fb);
Russell King96f60e32012-08-15 13:59:49 +0100966
Russell Kingeaa66272017-07-08 10:22:10 +0100967 ret = armada_drm_plane_work_queue(dcrtc, work);
Russell King96f60e32012-08-15 13:59:49 +0100968 if (ret) {
Russell Kingc5488302014-10-11 23:53:35 +0100969 /* Undo our reference above */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -0600970 drm_framebuffer_put(fb);
Russell King96f60e32012-08-15 13:59:49 +0100971 kfree(work);
972 return ret;
973 }
974
975 /*
Russell Kingc36045e2018-07-30 11:52:34 +0100976 * We are in transition to atomic modeset: update the atomic modeset
977 * state with the new framebuffer to keep the state consistent.
978 */
979 drm_framebuffer_assign(&dcrtc->crtc.primary->state->fb, fb);
980
981 /*
Russell King96f60e32012-08-15 13:59:49 +0100982 * Finally, if the display is blanked, we won't receive an
983 * interrupt, so complete it now.
984 */
Russell King4b5dda82015-08-06 16:37:18 +0100985 if (dpms_blanked(dcrtc->dpms))
Russell Kingec6fb152016-07-25 15:16:11 +0100986 armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +0100987
988 return 0;
989}
990
991static int
992armada_drm_crtc_set_property(struct drm_crtc *crtc,
993 struct drm_property *property, uint64_t val)
994{
995 struct armada_private *priv = crtc->dev->dev_private;
996 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
997 bool update_csc = false;
998
999 if (property == priv->csc_yuv_prop) {
1000 dcrtc->csc_yuv_mode = val;
1001 update_csc = true;
1002 } else if (property == priv->csc_rgb_prop) {
1003 dcrtc->csc_rgb_mode = val;
1004 update_csc = true;
1005 }
1006
1007 if (update_csc) {
1008 uint32_t val;
1009
1010 val = dcrtc->spu_iopad_ctrl |
1011 armada_drm_crtc_calculate_csc(dcrtc);
1012 writel_relaxed(val, dcrtc->base + LCD_SPU_IOPAD_CONTROL);
1013 }
1014
1015 return 0;
1016}
1017
Shawn Guo5922a7d2017-02-07 17:16:18 +08001018/* These are called under the vbl_lock. */
1019static int armada_drm_crtc_enable_vblank(struct drm_crtc *crtc)
1020{
1021 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King92298c12018-06-26 17:06:06 +01001022 unsigned long flags;
Shawn Guo5922a7d2017-02-07 17:16:18 +08001023
Russell King92298c12018-06-26 17:06:06 +01001024 spin_lock_irqsave(&dcrtc->irq_lock, flags);
Shawn Guo5922a7d2017-02-07 17:16:18 +08001025 armada_drm_crtc_enable_irq(dcrtc, VSYNC_IRQ_ENA);
Russell King92298c12018-06-26 17:06:06 +01001026 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
Shawn Guo5922a7d2017-02-07 17:16:18 +08001027 return 0;
1028}
1029
1030static void armada_drm_crtc_disable_vblank(struct drm_crtc *crtc)
1031{
1032 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King92298c12018-06-26 17:06:06 +01001033 unsigned long flags;
Shawn Guo5922a7d2017-02-07 17:16:18 +08001034
Russell King92298c12018-06-26 17:06:06 +01001035 spin_lock_irqsave(&dcrtc->irq_lock, flags);
Shawn Guo5922a7d2017-02-07 17:16:18 +08001036 armada_drm_crtc_disable_irq(dcrtc, VSYNC_IRQ_ENA);
Russell King92298c12018-06-26 17:06:06 +01001037 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
Shawn Guo5922a7d2017-02-07 17:16:18 +08001038}
1039
Ville Syrjäläa02fb902015-12-15 12:20:59 +01001040static const struct drm_crtc_funcs armada_crtc_funcs = {
Russell Kingc36045e2018-07-30 11:52:34 +01001041 .reset = drm_atomic_helper_crtc_reset,
Russell King662af0d2013-05-19 10:55:17 +01001042 .cursor_set = armada_drm_crtc_cursor_set,
1043 .cursor_move = armada_drm_crtc_cursor_move,
Russell King96f60e32012-08-15 13:59:49 +01001044 .destroy = armada_drm_crtc_destroy,
1045 .set_config = drm_crtc_helper_set_config,
1046 .page_flip = armada_drm_crtc_page_flip,
1047 .set_property = armada_drm_crtc_set_property,
Russell Kingc36045e2018-07-30 11:52:34 +01001048 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
1049 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
Shawn Guo5922a7d2017-02-07 17:16:18 +08001050 .enable_vblank = armada_drm_crtc_enable_vblank,
1051 .disable_vblank = armada_drm_crtc_disable_vblank,
Russell King96f60e32012-08-15 13:59:49 +01001052};
1053
Russell Kingc36045e2018-07-30 11:52:34 +01001054static int armada_drm_plane_prepare_fb(struct drm_plane *plane,
1055 struct drm_plane_state *state)
1056{
1057 DRM_DEBUG_KMS("[PLANE:%d:%s] [FB:%d]\n",
1058 plane->base.id, plane->name,
1059 state->fb ? state->fb->base.id : 0);
1060
1061 /*
1062 * Take a reference on the new framebuffer - we want to
1063 * hold on to it while the hardware is displaying it.
1064 */
1065 if (state->fb)
1066 drm_framebuffer_get(state->fb);
1067 return 0;
1068}
1069
1070static void armada_drm_plane_cleanup_fb(struct drm_plane *plane,
1071 struct drm_plane_state *old_state)
1072{
1073 DRM_DEBUG_KMS("[PLANE:%d:%s] [FB:%d]\n",
1074 plane->base.id, plane->name,
1075 old_state->fb ? old_state->fb->base.id : 0);
1076
1077 if (old_state->fb)
1078 drm_framebuffer_put(old_state->fb);
1079}
1080
1081static int armada_drm_plane_atomic_check(struct drm_plane *plane,
1082 struct drm_plane_state *state)
1083{
1084 if (state->fb && !WARN_ON(!state->crtc)) {
1085 struct drm_crtc *crtc = state->crtc;
1086 struct drm_crtc_state crtc_state = {
1087 .crtc = crtc,
1088 .enable = crtc->enabled,
1089 .mode = crtc->mode,
1090 };
1091
1092 return drm_atomic_helper_check_plane_state(state, &crtc_state,
1093 0, INT_MAX,
1094 true, false);
1095 } else {
1096 state->visible = false;
1097 }
1098 return 0;
1099}
1100
Russell Kingecf25d22018-07-30 11:52:34 +01001101static unsigned int armada_drm_primary_update_state(
1102 struct drm_plane_state *state, struct armada_regs *regs)
Russell King950bc132017-07-08 10:22:37 +01001103{
1104 struct armada_plane *dplane = drm_to_armada_plane(state->plane);
1105 struct armada_crtc *dcrtc = drm_to_armada_crtc(state->crtc);
1106 struct armada_framebuffer *dfb = drm_fb_to_armada_fb(state->fb);
1107 bool was_disabled;
1108 unsigned int idx = 0;
1109 u32 val;
1110
1111 val = CFG_GRA_FMT(dfb->fmt) | CFG_GRA_MOD(dfb->mod);
1112 if (dfb->fmt > CFG_420)
1113 val |= CFG_PALETTE_ENA;
1114 if (state->visible)
1115 val |= CFG_GRA_ENA;
1116 if (drm_rect_width(&state->src) >> 16 != drm_rect_width(&state->dst))
1117 val |= CFG_GRA_HSMOOTH;
Russell Kingecf25d22018-07-30 11:52:34 +01001118 if (dcrtc->interlaced)
1119 val |= CFG_GRA_FTOGGLE;
Russell King950bc132017-07-08 10:22:37 +01001120
1121 was_disabled = !(dplane->state.ctrl0 & CFG_GRA_ENA);
1122 if (was_disabled)
1123 armada_reg_queue_mod(regs, idx,
1124 0, CFG_PDWN64x66, LCD_SPU_SRAM_PARA1);
1125
1126 dplane->state.ctrl0 = val;
Russell King02395202018-07-30 11:52:34 +01001127 dplane->state.src_hw = armada_rect_hw_fp(&state->src);
1128 dplane->state.dst_hw = armada_rect_hw(&state->dst);
1129 dplane->state.dst_yx = armada_rect_yx(&state->dst);
Russell King950bc132017-07-08 10:22:37 +01001130
Russell Kingecf25d22018-07-30 11:52:34 +01001131 idx += armada_drm_crtc_calc_fb(&dfb->fb, state->src.x1 >> 16,
1132 state->src.y1 >> 16, regs + idx,
1133 dcrtc->interlaced);
1134 armada_reg_queue_set(regs, idx, dplane->state.dst_yx,
1135 LCD_SPU_GRA_OVSA_HPXL_VLN);
1136 armada_reg_queue_set(regs, idx, dplane->state.src_hw,
1137 LCD_SPU_GRA_HPXL_VLN);
1138 armada_reg_queue_set(regs, idx, dplane->state.dst_hw,
1139 LCD_SPU_GZM_HPXL_VLN);
1140 armada_reg_queue_mod(regs, idx, dplane->state.ctrl0, CFG_GRAFORMAT |
1141 CFG_GRA_MOD(CFG_SWAPRB | CFG_SWAPUV |
1142 CFG_SWAPYU | CFG_YUV2RGB) |
1143 CFG_PALETTE_ENA | CFG_GRA_FTOGGLE |
1144 CFG_GRA_HSMOOTH | CFG_GRA_ENA,
1145 LCD_SPU_DMA_CTRL0);
Russell King950bc132017-07-08 10:22:37 +01001146
1147 dplane->state.vsync_update = !was_disabled;
1148 dplane->state.changed = true;
Russell Kingecf25d22018-07-30 11:52:34 +01001149
1150 return idx;
Russell King950bc132017-07-08 10:22:37 +01001151}
1152
Russell Kingc36045e2018-07-30 11:52:34 +01001153static void armada_drm_primary_plane_atomic_update(struct drm_plane *plane,
1154 struct drm_plane_state *old_state)
1155{
1156 struct drm_plane_state *state = plane->state;
1157 struct armada_crtc *dcrtc;
1158 struct armada_regs *regs;
1159
1160 DRM_DEBUG_KMS("[PLANE:%d:%s]\n", plane->base.id, plane->name);
1161
1162 if (!state->fb || WARN_ON(!state->crtc))
1163 return;
1164
1165 DRM_DEBUG_KMS("[PLANE:%d:%s] is on [CRTC:%d:%s] with [FB:%d] visible %u->%u\n",
1166 plane->base.id, plane->name,
1167 state->crtc->base.id, state->crtc->name,
1168 state->fb->base.id,
1169 old_state->visible, state->visible);
1170
1171 dcrtc = drm_to_armada_crtc(state->crtc);
1172 regs = dcrtc->regs + dcrtc->regs_idx;
1173
1174 dcrtc->regs_idx += armada_drm_primary_update_state(state, regs);
1175}
1176
1177static void armada_drm_primary_plane_atomic_disable(struct drm_plane *plane,
1178 struct drm_plane_state *old_state)
Russell King950bc132017-07-08 10:22:37 +01001179{
1180 struct armada_plane *dplane = drm_to_armada_plane(plane);
Russell Kingc36045e2018-07-30 11:52:34 +01001181 struct armada_crtc *dcrtc;
1182 struct armada_regs *regs;
1183 unsigned int idx = 0;
Russell King950bc132017-07-08 10:22:37 +01001184
Russell Kingc36045e2018-07-30 11:52:34 +01001185 DRM_DEBUG_KMS("[PLANE:%d:%s]\n", plane->base.id, plane->name);
Russell King950bc132017-07-08 10:22:37 +01001186
Russell Kingc36045e2018-07-30 11:52:34 +01001187 if (!old_state->crtc)
1188 return;
Russell King950bc132017-07-08 10:22:37 +01001189
Russell Kingc36045e2018-07-30 11:52:34 +01001190 DRM_DEBUG_KMS("[PLANE:%d:%s] was on [CRTC:%d:%s] with [FB:%d]\n",
1191 plane->base.id, plane->name,
1192 old_state->crtc->base.id, old_state->crtc->name,
1193 old_state->fb->base.id);
Russell King950bc132017-07-08 10:22:37 +01001194
Russell Kingc36045e2018-07-30 11:52:34 +01001195 dplane->state.ctrl0 &= ~CFG_GRA_ENA;
Russell King950bc132017-07-08 10:22:37 +01001196
Russell Kingc36045e2018-07-30 11:52:34 +01001197 dcrtc = drm_to_armada_crtc(old_state->crtc);
1198 regs = dcrtc->regs + dcrtc->regs_idx;
Russell King950bc132017-07-08 10:22:37 +01001199
Russell Kingc36045e2018-07-30 11:52:34 +01001200 /* Disable plane and power down most RAMs and FIFOs */
1201 armada_reg_queue_mod(regs, idx, 0, CFG_GRA_ENA, LCD_SPU_DMA_CTRL0);
1202 armada_reg_queue_mod(regs, idx, CFG_PDWN256x32 | CFG_PDWN256x24 |
1203 CFG_PDWN256x8 | CFG_PDWN32x32 | CFG_PDWN64x66,
1204 0, LCD_SPU_SRAM_PARA1);
Russell King950bc132017-07-08 10:22:37 +01001205
Russell Kingc36045e2018-07-30 11:52:34 +01001206 dcrtc->regs_idx += idx;
Russell King950bc132017-07-08 10:22:37 +01001207}
1208
Russell Kingc36045e2018-07-30 11:52:34 +01001209static const struct drm_plane_helper_funcs armada_primary_plane_helper_funcs = {
1210 .prepare_fb = armada_drm_plane_prepare_fb,
1211 .cleanup_fb = armada_drm_plane_cleanup_fb,
1212 .atomic_check = armada_drm_plane_atomic_check,
1213 .atomic_update = armada_drm_primary_plane_atomic_update,
1214 .atomic_disable = armada_drm_primary_plane_atomic_disable,
1215};
Russell Kingcfd1b632018-07-30 11:52:34 +01001216
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001217int armada_drm_plane_disable(struct drm_plane *plane,
1218 struct drm_modeset_acquire_ctx *ctx)
Russell King28b30432017-07-08 10:16:40 +01001219{
1220 struct armada_plane *dplane = drm_to_armada_plane(plane);
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001221 struct armada_crtc *dcrtc;
Russell King890ca8d2017-07-08 10:22:27 +01001222 struct armada_plane_work *work;
1223 unsigned int idx = 0;
Russell Kingd76dcc72017-07-08 10:16:47 +01001224 u32 sram_para1, enable_mask;
Russell King28b30432017-07-08 10:16:40 +01001225
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001226 if (!plane->crtc)
1227 return 0;
1228
Russell King28b30432017-07-08 10:16:40 +01001229 /*
Russell King890ca8d2017-07-08 10:22:27 +01001230 * Arrange to power down most RAMs and FIFOs if this is the primary
1231 * plane, otherwise just the YUV FIFOs for the overlay plane.
Russell King28b30432017-07-08 10:16:40 +01001232 */
Russell King28b30432017-07-08 10:16:40 +01001233 if (plane->type == DRM_PLANE_TYPE_PRIMARY) {
1234 sram_para1 = CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
1235 CFG_PDWN32x32 | CFG_PDWN64x66;
Russell Kingd76dcc72017-07-08 10:16:47 +01001236 enable_mask = CFG_GRA_ENA;
Russell King28b30432017-07-08 10:16:40 +01001237 } else {
Russell King28b30432017-07-08 10:16:40 +01001238 sram_para1 = CFG_PDWN16x66 | CFG_PDWN32x66;
Russell Kingd76dcc72017-07-08 10:16:47 +01001239 enable_mask = CFG_DMA_ENA;
Russell King28b30432017-07-08 10:16:40 +01001240 }
1241
Russell Kingd76dcc72017-07-08 10:16:47 +01001242 dplane->state.ctrl0 &= ~enable_mask;
1243
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001244 dcrtc = drm_to_armada_crtc(plane->crtc);
1245
Russell King890ca8d2017-07-08 10:22:27 +01001246 /*
1247 * Try to disable the plane and drop our ref on the framebuffer
1248 * at the next frame update. If we fail for any reason, disable
1249 * the plane immediately.
1250 */
1251 work = &dplane->works[dplane->next_work];
1252 work->fn = armada_drm_crtc_complete_disable_work;
1253 work->cancel = armada_drm_crtc_complete_disable_work;
1254 work->old_fb = plane->fb;
1255
1256 armada_reg_queue_mod(work->regs, idx,
1257 0, enable_mask, LCD_SPU_DMA_CTRL0);
1258 armada_reg_queue_mod(work->regs, idx,
1259 sram_para1, 0, LCD_SPU_SRAM_PARA1);
1260 armada_reg_queue_end(work->regs, idx);
1261
Russell King28b30432017-07-08 10:16:40 +01001262 /* Wait for any preceding work to complete, but don't wedge */
1263 if (WARN_ON(!armada_drm_plane_work_wait(dplane, HZ)))
1264 armada_drm_plane_work_cancel(dcrtc, dplane);
1265
Russell King890ca8d2017-07-08 10:22:27 +01001266 if (armada_drm_plane_work_queue(dcrtc, work)) {
1267 work->fn(dcrtc, work);
1268 if (work->old_fb)
1269 drm_framebuffer_unreference(work->old_fb);
1270 }
1271
1272 dplane->next_work = !dplane->next_work;
Russell King28b30432017-07-08 10:16:40 +01001273
Russell King28b30432017-07-08 10:16:40 +01001274 return 0;
1275}
1276
Russell Kingde323012015-07-15 18:11:24 +01001277static const struct drm_plane_funcs armada_primary_plane_funcs = {
Russell Kingc36045e2018-07-30 11:52:34 +01001278 .update_plane = drm_plane_helper_update,
1279 .disable_plane = drm_plane_helper_disable,
Russell Kingde323012015-07-15 18:11:24 +01001280 .destroy = drm_primary_helper_destroy,
Russell Kingc36045e2018-07-30 11:52:34 +01001281 .reset = drm_atomic_helper_plane_reset,
1282 .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
1283 .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
Russell Kingde323012015-07-15 18:11:24 +01001284};
1285
Russell King5740d272015-07-15 18:11:25 +01001286int armada_drm_plane_init(struct armada_plane *plane)
1287{
Russell Kingd9241552017-07-08 10:22:25 +01001288 unsigned int i;
1289
1290 for (i = 0; i < ARRAY_SIZE(plane->works); i++)
1291 plane->works[i].plane = &plane->base;
1292
Russell King5740d272015-07-15 18:11:25 +01001293 init_waitqueue_head(&plane->frame_wait);
1294
1295 return 0;
1296}
1297
Arvind Yadavaaaf2f12017-07-01 15:30:15 +05301298static const struct drm_prop_enum_list armada_drm_csc_yuv_enum_list[] = {
Russell King96f60e32012-08-15 13:59:49 +01001299 { CSC_AUTO, "Auto" },
1300 { CSC_YUV_CCIR601, "CCIR601" },
1301 { CSC_YUV_CCIR709, "CCIR709" },
1302};
1303
Arvind Yadavaaaf2f12017-07-01 15:30:15 +05301304static const struct drm_prop_enum_list armada_drm_csc_rgb_enum_list[] = {
Russell King96f60e32012-08-15 13:59:49 +01001305 { CSC_AUTO, "Auto" },
1306 { CSC_RGB_COMPUTER, "Computer system" },
1307 { CSC_RGB_STUDIO, "Studio" },
1308};
1309
1310static int armada_drm_crtc_create_properties(struct drm_device *dev)
1311{
1312 struct armada_private *priv = dev->dev_private;
1313
1314 if (priv->csc_yuv_prop)
1315 return 0;
1316
1317 priv->csc_yuv_prop = drm_property_create_enum(dev, 0,
1318 "CSC_YUV", armada_drm_csc_yuv_enum_list,
1319 ARRAY_SIZE(armada_drm_csc_yuv_enum_list));
1320 priv->csc_rgb_prop = drm_property_create_enum(dev, 0,
1321 "CSC_RGB", armada_drm_csc_rgb_enum_list,
1322 ARRAY_SIZE(armada_drm_csc_rgb_enum_list));
1323
1324 if (!priv->csc_yuv_prop || !priv->csc_rgb_prop)
1325 return -ENOMEM;
1326
1327 return 0;
1328}
1329
Russell King0fb29702015-06-06 21:46:53 +01001330static int armada_drm_crtc_create(struct drm_device *drm, struct device *dev,
Russell King9611cb92014-06-15 11:21:23 +01001331 struct resource *res, int irq, const struct armada_variant *variant,
1332 struct device_node *port)
Russell King96f60e32012-08-15 13:59:49 +01001333{
Russell Kingd8c96082014-04-22 11:10:15 +01001334 struct armada_private *priv = drm->dev_private;
Russell King96f60e32012-08-15 13:59:49 +01001335 struct armada_crtc *dcrtc;
Russell Kingde323012015-07-15 18:11:24 +01001336 struct armada_plane *primary;
Russell King96f60e32012-08-15 13:59:49 +01001337 void __iomem *base;
1338 int ret;
1339
Russell Kingd8c96082014-04-22 11:10:15 +01001340 ret = armada_drm_crtc_create_properties(drm);
Russell King96f60e32012-08-15 13:59:49 +01001341 if (ret)
1342 return ret;
1343
Linus Torvaldsa7d7a142014-08-07 17:36:12 -07001344 base = devm_ioremap_resource(dev, res);
Jingoo Hanc9d53c02014-06-11 14:00:05 +09001345 if (IS_ERR(base))
1346 return PTR_ERR(base);
Russell King96f60e32012-08-15 13:59:49 +01001347
1348 dcrtc = kzalloc(sizeof(*dcrtc), GFP_KERNEL);
1349 if (!dcrtc) {
1350 DRM_ERROR("failed to allocate Armada crtc\n");
1351 return -ENOMEM;
1352 }
1353
Russell Kingd8c96082014-04-22 11:10:15 +01001354 if (dev != drm->dev)
1355 dev_set_drvdata(dev, dcrtc);
1356
Russell King42e62ba2014-04-22 15:24:03 +01001357 dcrtc->variant = variant;
Russell King96f60e32012-08-15 13:59:49 +01001358 dcrtc->base = base;
Russell Kingd8c96082014-04-22 11:10:15 +01001359 dcrtc->num = drm->mode_config.num_crtc;
Russell King96f60e32012-08-15 13:59:49 +01001360 dcrtc->clk = ERR_PTR(-EINVAL);
1361 dcrtc->csc_yuv_mode = CSC_AUTO;
1362 dcrtc->csc_rgb_mode = CSC_AUTO;
1363 dcrtc->cfg_dumb_ctrl = DUMB24_RGB888_0;
1364 dcrtc->spu_iopad_ctrl = CFG_VSCALE_LN_EN | CFG_IOPAD_DUMB24;
1365 spin_lock_init(&dcrtc->irq_lock);
1366 dcrtc->irq_ena = CLEAN_SPU_IRQ_ISR;
Russell King96f60e32012-08-15 13:59:49 +01001367
1368 /* Initialize some registers which we don't otherwise set */
1369 writel_relaxed(0x00000001, dcrtc->base + LCD_CFG_SCLK_DIV);
1370 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_BLANKCOLOR);
1371 writel_relaxed(dcrtc->spu_iopad_ctrl,
1372 dcrtc->base + LCD_SPU_IOPAD_CONTROL);
1373 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_SRAM_PARA0);
1374 writel_relaxed(CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
1375 CFG_PDWN32x32 | CFG_PDWN16x66 | CFG_PDWN32x66 |
1376 CFG_PDWN64x66, dcrtc->base + LCD_SPU_SRAM_PARA1);
1377 writel_relaxed(0x2032ff81, dcrtc->base + LCD_SPU_DMA_CTRL1);
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001378 writel_relaxed(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
Russell King92298c12018-06-26 17:06:06 +01001379 readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001380 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
Russell King96f60e32012-08-15 13:59:49 +01001381
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001382 ret = devm_request_irq(dev, irq, armada_drm_irq, 0, "armada_drm_crtc",
1383 dcrtc);
Russell King33cd3c02017-12-08 12:16:22 +00001384 if (ret < 0)
1385 goto err_crtc;
Russell King96f60e32012-08-15 13:59:49 +01001386
Russell King42e62ba2014-04-22 15:24:03 +01001387 if (dcrtc->variant->init) {
Russell Kingd8c96082014-04-22 11:10:15 +01001388 ret = dcrtc->variant->init(dcrtc, dev);
Russell King33cd3c02017-12-08 12:16:22 +00001389 if (ret)
1390 goto err_crtc;
Russell King96f60e32012-08-15 13:59:49 +01001391 }
1392
1393 /* Ensure AXI pipeline is enabled */
1394 armada_updatel(CFG_ARBFAST_ENA, 0, dcrtc->base + LCD_SPU_DMA_CTRL0);
1395
1396 priv->dcrtc[dcrtc->num] = dcrtc;
1397
Russell King9611cb92014-06-15 11:21:23 +01001398 dcrtc->crtc.port = port;
Russell King1c914ce2015-07-15 18:11:24 +01001399
Russell Kingde323012015-07-15 18:11:24 +01001400 primary = kzalloc(sizeof(*primary), GFP_KERNEL);
Russell King33cd3c02017-12-08 12:16:22 +00001401 if (!primary) {
1402 ret = -ENOMEM;
1403 goto err_crtc;
1404 }
Russell King1c914ce2015-07-15 18:11:24 +01001405
Russell King5740d272015-07-15 18:11:25 +01001406 ret = armada_drm_plane_init(primary);
1407 if (ret) {
1408 kfree(primary);
Russell King33cd3c02017-12-08 12:16:22 +00001409 goto err_crtc;
Russell King5740d272015-07-15 18:11:25 +01001410 }
1411
Russell Kingc36045e2018-07-30 11:52:34 +01001412 drm_plane_helper_add(&primary->base,
1413 &armada_primary_plane_helper_funcs);
1414
Russell Kingde323012015-07-15 18:11:24 +01001415 ret = drm_universal_plane_init(drm, &primary->base, 0,
1416 &armada_primary_plane_funcs,
1417 armada_primary_formats,
1418 ARRAY_SIZE(armada_primary_formats),
Ben Widawskye6fc3b62017-07-23 20:46:38 -07001419 NULL,
Ville Syrjäläb0b3b792015-12-09 16:19:55 +02001420 DRM_PLANE_TYPE_PRIMARY, NULL);
Russell Kingde323012015-07-15 18:11:24 +01001421 if (ret) {
1422 kfree(primary);
Russell King33cd3c02017-12-08 12:16:22 +00001423 goto err_crtc;
Russell Kingde323012015-07-15 18:11:24 +01001424 }
1425
1426 ret = drm_crtc_init_with_planes(drm, &dcrtc->crtc, &primary->base, NULL,
Ville Syrjäläf9882872015-12-09 16:19:31 +02001427 &armada_crtc_funcs, NULL);
Russell King1c914ce2015-07-15 18:11:24 +01001428 if (ret)
1429 goto err_crtc_init;
1430
Russell King96f60e32012-08-15 13:59:49 +01001431 drm_crtc_helper_add(&dcrtc->crtc, &armada_crtc_helper_funcs);
1432
1433 drm_object_attach_property(&dcrtc->crtc.base, priv->csc_yuv_prop,
1434 dcrtc->csc_yuv_mode);
1435 drm_object_attach_property(&dcrtc->crtc.base, priv->csc_rgb_prop,
1436 dcrtc->csc_rgb_mode);
1437
Russell Kingd8c96082014-04-22 11:10:15 +01001438 return armada_overlay_plane_create(drm, 1 << dcrtc->num);
Russell King1c914ce2015-07-15 18:11:24 +01001439
1440err_crtc_init:
Russell Kingde323012015-07-15 18:11:24 +01001441 primary->base.funcs->destroy(&primary->base);
Russell King33cd3c02017-12-08 12:16:22 +00001442err_crtc:
1443 kfree(dcrtc);
1444
Russell King1c914ce2015-07-15 18:11:24 +01001445 return ret;
Russell King96f60e32012-08-15 13:59:49 +01001446}
Russell Kingd8c96082014-04-22 11:10:15 +01001447
1448static int
1449armada_lcd_bind(struct device *dev, struct device *master, void *data)
1450{
1451 struct platform_device *pdev = to_platform_device(dev);
1452 struct drm_device *drm = data;
1453 struct resource *res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1454 int irq = platform_get_irq(pdev, 0);
1455 const struct armada_variant *variant;
Russell King9611cb92014-06-15 11:21:23 +01001456 struct device_node *port = NULL;
Russell Kingd8c96082014-04-22 11:10:15 +01001457
1458 if (irq < 0)
1459 return irq;
1460
1461 if (!dev->of_node) {
1462 const struct platform_device_id *id;
1463
1464 id = platform_get_device_id(pdev);
1465 if (!id)
1466 return -ENXIO;
1467
1468 variant = (const struct armada_variant *)id->driver_data;
1469 } else {
1470 const struct of_device_id *match;
Russell King9611cb92014-06-15 11:21:23 +01001471 struct device_node *np, *parent = dev->of_node;
Russell Kingd8c96082014-04-22 11:10:15 +01001472
1473 match = of_match_device(dev->driver->of_match_table, dev);
1474 if (!match)
1475 return -ENXIO;
1476
Russell King9611cb92014-06-15 11:21:23 +01001477 np = of_get_child_by_name(parent, "ports");
1478 if (np)
1479 parent = np;
1480 port = of_get_child_by_name(parent, "port");
1481 of_node_put(np);
1482 if (!port) {
Rob Herring4bf99142017-07-18 16:43:04 -05001483 dev_err(dev, "no port node found in %pOF\n", parent);
Russell King9611cb92014-06-15 11:21:23 +01001484 return -ENXIO;
1485 }
1486
Russell Kingd8c96082014-04-22 11:10:15 +01001487 variant = match->data;
1488 }
1489
Russell King9611cb92014-06-15 11:21:23 +01001490 return armada_drm_crtc_create(drm, dev, res, irq, variant, port);
Russell Kingd8c96082014-04-22 11:10:15 +01001491}
1492
1493static void
1494armada_lcd_unbind(struct device *dev, struct device *master, void *data)
1495{
1496 struct armada_crtc *dcrtc = dev_get_drvdata(dev);
1497
1498 armada_drm_crtc_destroy(&dcrtc->crtc);
1499}
1500
1501static const struct component_ops armada_lcd_ops = {
1502 .bind = armada_lcd_bind,
1503 .unbind = armada_lcd_unbind,
1504};
1505
1506static int armada_lcd_probe(struct platform_device *pdev)
1507{
1508 return component_add(&pdev->dev, &armada_lcd_ops);
1509}
1510
1511static int armada_lcd_remove(struct platform_device *pdev)
1512{
1513 component_del(&pdev->dev, &armada_lcd_ops);
1514 return 0;
1515}
1516
Arvind Yadav85909712017-06-20 10:44:33 +05301517static const struct of_device_id armada_lcd_of_match[] = {
Russell Kingd8c96082014-04-22 11:10:15 +01001518 {
1519 .compatible = "marvell,dove-lcd",
1520 .data = &armada510_ops,
1521 },
1522 {}
1523};
1524MODULE_DEVICE_TABLE(of, armada_lcd_of_match);
1525
1526static const struct platform_device_id armada_lcd_platform_ids[] = {
1527 {
1528 .name = "armada-lcd",
1529 .driver_data = (unsigned long)&armada510_ops,
1530 }, {
1531 .name = "armada-510-lcd",
1532 .driver_data = (unsigned long)&armada510_ops,
1533 },
1534 { },
1535};
1536MODULE_DEVICE_TABLE(platform, armada_lcd_platform_ids);
1537
1538struct platform_driver armada_lcd_platform_driver = {
1539 .probe = armada_lcd_probe,
1540 .remove = armada_lcd_remove,
1541 .driver = {
1542 .name = "armada-lcd",
1543 .owner = THIS_MODULE,
1544 .of_match_table = armada_lcd_of_match,
1545 },
1546 .id_table = armada_lcd_platform_ids,
1547};