blob: 205d5dc7ba81d70929b896a43e476c9c5ea08cfb [file] [log] [blame]
Russell King96f60e32012-08-15 13:59:49 +01001/*
2 * Copyright (C) 2012 Russell King
3 * Rewritten from the dovefb driver, and Armada510 manuals.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
8 */
9#include <linux/clk.h>
Russell Kingd8c96082014-04-22 11:10:15 +010010#include <linux/component.h>
11#include <linux/of_device.h>
12#include <linux/platform_device.h>
Russell King96f60e32012-08-15 13:59:49 +010013#include <drm/drmP.h>
14#include <drm/drm_crtc_helper.h>
Daniel Vetter3cb9ae42014-10-29 10:03:57 +010015#include <drm/drm_plane_helper.h>
Dave Airliebcd21a42018-01-05 09:43:46 +100016#include <drm/drm_atomic_helper.h>
Russell King96f60e32012-08-15 13:59:49 +010017#include "armada_crtc.h"
18#include "armada_drm.h"
19#include "armada_fb.h"
20#include "armada_gem.h"
21#include "armada_hw.h"
Russell Kingc8a220c2016-05-17 13:51:08 +010022#include "armada_trace.h"
Russell King96f60e32012-08-15 13:59:49 +010023
Russell King96f60e32012-08-15 13:59:49 +010024enum csc_mode {
25 CSC_AUTO = 0,
26 CSC_YUV_CCIR601 = 1,
27 CSC_YUV_CCIR709 = 2,
28 CSC_RGB_COMPUTER = 1,
29 CSC_RGB_STUDIO = 2,
30};
31
Russell King1c914ce2015-07-15 18:11:24 +010032static const uint32_t armada_primary_formats[] = {
33 DRM_FORMAT_UYVY,
34 DRM_FORMAT_YUYV,
35 DRM_FORMAT_VYUY,
36 DRM_FORMAT_YVYU,
37 DRM_FORMAT_ARGB8888,
38 DRM_FORMAT_ABGR8888,
39 DRM_FORMAT_XRGB8888,
40 DRM_FORMAT_XBGR8888,
41 DRM_FORMAT_RGB888,
42 DRM_FORMAT_BGR888,
43 DRM_FORMAT_ARGB1555,
44 DRM_FORMAT_ABGR1555,
45 DRM_FORMAT_RGB565,
46 DRM_FORMAT_BGR565,
47};
48
Russell King96f60e32012-08-15 13:59:49 +010049/*
50 * A note about interlacing. Let's consider HDMI 1920x1080i.
51 * The timing parameters we have from X are:
52 * Hact HsyA HsyI Htot Vact VsyA VsyI Vtot
53 * 1920 2448 2492 2640 1080 1084 1094 1125
54 * Which get translated to:
55 * Hact HsyA HsyI Htot Vact VsyA VsyI Vtot
56 * 1920 2448 2492 2640 540 542 547 562
57 *
58 * This is how it is defined by CEA-861-D - line and pixel numbers are
59 * referenced to the rising edge of VSYNC and HSYNC. Total clocks per
60 * line: 2640. The odd frame, the first active line is at line 21, and
61 * the even frame, the first active line is 584.
62 *
63 * LN: 560 561 562 563 567 568 569
64 * DE: ~~~|____________________________//__________________________
65 * HSYNC: ____|~|_____|~|_____|~|_____|~|_//__|~|_____|~|_____|~|_____
66 * VSYNC: _________________________|~~~~~~//~~~~~~~~~~~~~~~|__________
67 * 22 blanking lines. VSYNC at 1320 (referenced to the HSYNC rising edge).
68 *
69 * LN: 1123 1124 1125 1 5 6 7
70 * DE: ~~~|____________________________//__________________________
71 * HSYNC: ____|~|_____|~|_____|~|_____|~|_//__|~|_____|~|_____|~|_____
72 * VSYNC: ____________________|~~~~~~~~~~~//~~~~~~~~~~|_______________
73 * 23 blanking lines
74 *
75 * The Armada LCD Controller line and pixel numbers are, like X timings,
76 * referenced to the top left of the active frame.
77 *
78 * So, translating these to our LCD controller:
79 * Odd frame, 563 total lines, VSYNC at line 543-548, pixel 1128.
80 * Even frame, 562 total lines, VSYNC at line 542-547, pixel 2448.
81 * Note: Vsync front porch remains constant!
82 *
83 * if (odd_frame) {
84 * vtotal = mode->crtc_vtotal + 1;
85 * vbackporch = mode->crtc_vsync_start - mode->crtc_vdisplay + 1;
86 * vhorizpos = mode->crtc_hsync_start - mode->crtc_htotal / 2
87 * } else {
88 * vtotal = mode->crtc_vtotal;
89 * vbackporch = mode->crtc_vsync_start - mode->crtc_vdisplay;
90 * vhorizpos = mode->crtc_hsync_start;
91 * }
92 * vfrontporch = mode->crtc_vtotal - mode->crtc_vsync_end;
93 *
94 * So, we need to reprogram these registers on each vsync event:
95 * LCD_SPU_V_PORCH, LCD_SPU_ADV_REG, LCD_SPUT_V_H_TOTAL
96 *
97 * Note: we do not use the frame done interrupts because these appear
98 * to happen too early, and lead to jitter on the display (presumably
99 * they occur at the end of the last active line, before the vsync back
100 * porch, which we're reprogramming.)
101 */
102
103void
104armada_drm_crtc_update_regs(struct armada_crtc *dcrtc, struct armada_regs *regs)
105{
106 while (regs->offset != ~0) {
107 void __iomem *reg = dcrtc->base + regs->offset;
108 uint32_t val;
109
110 val = regs->mask;
111 if (val != 0)
112 val &= readl_relaxed(reg);
113 writel_relaxed(val | regs->val, reg);
114 ++regs;
115 }
116}
117
118#define dpms_blanked(dpms) ((dpms) != DRM_MODE_DPMS_ON)
119
120static void armada_drm_crtc_update(struct armada_crtc *dcrtc)
121{
122 uint32_t dumb_ctrl;
123
124 dumb_ctrl = dcrtc->cfg_dumb_ctrl;
125
126 if (!dpms_blanked(dcrtc->dpms))
127 dumb_ctrl |= CFG_DUMB_ENA;
128
129 /*
130 * When the dumb interface isn't in DUMB24_RGB888_0 mode, it might
131 * be using SPI or GPIO. If we set this to DUMB_BLANK, we will
132 * force LCD_D[23:0] to output blank color, overriding the GPIO or
133 * SPI usage. So leave it as-is unless in DUMB24_RGB888_0 mode.
134 */
135 if (dpms_blanked(dcrtc->dpms) &&
136 (dumb_ctrl & DUMB_MASK) == DUMB24_RGB888_0) {
137 dumb_ctrl &= ~DUMB_MASK;
138 dumb_ctrl |= DUMB_BLANK;
139 }
140
141 /*
142 * The documentation doesn't indicate what the normal state of
143 * the sync signals are. Sebastian Hesselbart kindly probed
144 * these signals on his board to determine their state.
145 *
146 * The non-inverted state of the sync signals is active high.
147 * Setting these bits makes the appropriate signal active low.
148 */
149 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NCSYNC)
150 dumb_ctrl |= CFG_INV_CSYNC;
151 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NHSYNC)
152 dumb_ctrl |= CFG_INV_HSYNC;
153 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NVSYNC)
154 dumb_ctrl |= CFG_INV_VSYNC;
155
156 if (dcrtc->dumb_ctrl != dumb_ctrl) {
157 dcrtc->dumb_ctrl = dumb_ctrl;
158 writel_relaxed(dumb_ctrl, dcrtc->base + LCD_SPU_DUMB_CTRL);
159 }
160}
161
Russell Kingf0b24872016-08-16 22:09:11 +0100162void armada_drm_plane_calc_addrs(u32 *addrs, struct drm_framebuffer *fb,
163 int x, int y)
164{
Russell Kingd6a48962017-12-08 12:16:22 +0000165 const struct drm_format_info *format = fb->format;
166 unsigned int num_planes = format->num_planes;
Russell Kingf0b24872016-08-16 22:09:11 +0100167 u32 addr = drm_fb_obj(fb)->dev_addr;
Russell Kingf0b24872016-08-16 22:09:11 +0100168 int i;
169
170 if (num_planes > 3)
171 num_planes = 3;
172
Russell Kingde0ea9a2017-12-08 12:16:22 +0000173 addrs[0] = addr + fb->offsets[0] + y * fb->pitches[0] +
174 x * format->cpp[0];
175
176 y /= format->vsub;
177 x /= format->hsub;
178
179 for (i = 1; i < num_planes; i++)
Russell Kingf0b24872016-08-16 22:09:11 +0100180 addrs[i] = addr + fb->offsets[i] + y * fb->pitches[i] +
Russell Kingd6a48962017-12-08 12:16:22 +0000181 x * format->cpp[i];
Russell Kingf0b24872016-08-16 22:09:11 +0100182 for (; i < 3; i++)
183 addrs[i] = 0;
184}
185
Russell King96f60e32012-08-15 13:59:49 +0100186static unsigned armada_drm_crtc_calc_fb(struct drm_framebuffer *fb,
187 int x, int y, struct armada_regs *regs, bool interlaced)
188{
Russell King96f60e32012-08-15 13:59:49 +0100189 unsigned pitch = fb->pitches[0];
Russell Kingf0b24872016-08-16 22:09:11 +0100190 u32 addrs[3], addr_odd, addr_even;
Russell King96f60e32012-08-15 13:59:49 +0100191 unsigned i = 0;
192
193 DRM_DEBUG_DRIVER("pitch %u x %d y %d bpp %d\n",
Ville Syrjälä272725c2016-12-14 23:32:20 +0200194 pitch, x, y, fb->format->cpp[0] * 8);
Russell King96f60e32012-08-15 13:59:49 +0100195
Russell Kingf0b24872016-08-16 22:09:11 +0100196 armada_drm_plane_calc_addrs(addrs, fb, x, y);
197
198 addr_odd = addr_even = addrs[0];
Russell King96f60e32012-08-15 13:59:49 +0100199
200 if (interlaced) {
201 addr_even += pitch;
202 pitch *= 2;
203 }
204
205 /* write offset, base, and pitch */
206 armada_reg_queue_set(regs, i, addr_odd, LCD_CFG_GRA_START_ADDR0);
207 armada_reg_queue_set(regs, i, addr_even, LCD_CFG_GRA_START_ADDR1);
208 armada_reg_queue_mod(regs, i, pitch, 0xffff, LCD_CFG_GRA_PITCH);
209
210 return i;
211}
212
Russell King2839d452017-07-07 15:56:20 +0100213static void armada_drm_plane_work_call(struct armada_crtc *dcrtc,
214 struct armada_plane_work *work,
215 void (*fn)(struct armada_crtc *, struct armada_plane_work *))
216{
217 struct armada_plane *dplane = drm_to_armada_plane(work->plane);
Russell Kingd9241552017-07-08 10:22:25 +0100218 struct drm_pending_vblank_event *event;
219 struct drm_framebuffer *fb;
Russell King2839d452017-07-07 15:56:20 +0100220
221 if (fn)
222 fn(dcrtc, work);
223 drm_crtc_vblank_put(&dcrtc->crtc);
224
Russell Kingd9241552017-07-08 10:22:25 +0100225 event = work->event;
226 fb = work->old_fb;
Russell Kingeb19be52017-07-08 10:16:53 +0100227 if (event || fb) {
228 struct drm_device *dev = dcrtc->crtc.dev;
229 unsigned long flags;
230
231 spin_lock_irqsave(&dev->event_lock, flags);
232 if (event)
233 drm_crtc_send_vblank_event(&dcrtc->crtc, event);
234 if (fb)
235 __armada_drm_queue_unref_work(dev, fb);
236 spin_unlock_irqrestore(&dev->event_lock, flags);
237 }
Russell Kingb972a802017-07-08 10:16:52 +0100238
Russell Kingd9241552017-07-08 10:22:25 +0100239 if (work->need_kfree)
240 kfree(work);
241
Russell King2839d452017-07-07 15:56:20 +0100242 wake_up(&dplane->frame_wait);
243}
244
Russell King4b5dda82015-08-06 16:37:18 +0100245static void armada_drm_plane_work_run(struct armada_crtc *dcrtc,
Russell Kingec6fb152016-07-25 15:16:11 +0100246 struct drm_plane *plane)
Russell King4b5dda82015-08-06 16:37:18 +0100247{
Russell Kingec6fb152016-07-25 15:16:11 +0100248 struct armada_plane *dplane = drm_to_armada_plane(plane);
249 struct armada_plane_work *work = xchg(&dplane->work, NULL);
Russell King4b5dda82015-08-06 16:37:18 +0100250
251 /* Handle any pending frame work. */
Russell King2839d452017-07-07 15:56:20 +0100252 if (work)
253 armada_drm_plane_work_call(dcrtc, work, work->fn);
Russell King4b5dda82015-08-06 16:37:18 +0100254}
255
256int armada_drm_plane_work_queue(struct armada_crtc *dcrtc,
Russell Kingeaab0132017-07-07 15:55:53 +0100257 struct armada_plane_work *work)
Russell King4b5dda82015-08-06 16:37:18 +0100258{
Russell Kingeaab0132017-07-07 15:55:53 +0100259 struct armada_plane *plane = drm_to_armada_plane(work->plane);
Russell King4b5dda82015-08-06 16:37:18 +0100260 int ret;
261
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300262 ret = drm_crtc_vblank_get(&dcrtc->crtc);
Russell Kingc93dfdc2017-07-08 10:22:23 +0100263 if (ret)
Russell King4b5dda82015-08-06 16:37:18 +0100264 return ret;
Russell King4b5dda82015-08-06 16:37:18 +0100265
266 ret = cmpxchg(&plane->work, NULL, work) ? -EBUSY : 0;
267 if (ret)
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300268 drm_crtc_vblank_put(&dcrtc->crtc);
Russell King4b5dda82015-08-06 16:37:18 +0100269
270 return ret;
271}
272
273int armada_drm_plane_work_wait(struct armada_plane *plane, long timeout)
274{
275 return wait_event_timeout(plane->frame_wait, !plane->work, timeout);
276}
277
Russell Kingd3b84212017-07-07 15:55:40 +0100278void armada_drm_plane_work_cancel(struct armada_crtc *dcrtc,
279 struct armada_plane *dplane)
Russell King7c8f7e12015-06-29 17:52:16 +0100280{
Russell Kingd3b84212017-07-07 15:55:40 +0100281 struct armada_plane_work *work = xchg(&dplane->work, NULL);
Russell King7c8f7e12015-06-29 17:52:16 +0100282
Russell King4a8506d2015-08-07 09:33:05 +0100283 if (work)
Russell King2839d452017-07-07 15:56:20 +0100284 armada_drm_plane_work_call(dcrtc, work, work->cancel);
Russell King96f60e32012-08-15 13:59:49 +0100285}
286
Russell King709ffd82015-07-15 18:09:38 +0100287static void armada_drm_crtc_complete_frame_work(struct armada_crtc *dcrtc,
Russell King65724a12017-07-07 15:56:24 +0100288 struct armada_plane_work *work)
Russell King96f60e32012-08-15 13:59:49 +0100289{
Russell King709ffd82015-07-15 18:09:38 +0100290 unsigned long flags;
Russell King96f60e32012-08-15 13:59:49 +0100291
Russell King709ffd82015-07-15 18:09:38 +0100292 spin_lock_irqsave(&dcrtc->irq_lock, flags);
Russell Kingeaa66272017-07-08 10:22:10 +0100293 armada_drm_crtc_update_regs(dcrtc, work->regs);
Russell King709ffd82015-07-15 18:09:38 +0100294 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
Russell King65724a12017-07-07 15:56:24 +0100295}
Russell King96f60e32012-08-15 13:59:49 +0100296
Russell King890ca8d2017-07-08 10:22:27 +0100297static void armada_drm_crtc_complete_disable_work(struct armada_crtc *dcrtc,
298 struct armada_plane_work *work)
299{
300 unsigned long flags;
Russell King96f60e32012-08-15 13:59:49 +0100301
Russell King890ca8d2017-07-08 10:22:27 +0100302 if (dcrtc->plane == work->plane)
303 dcrtc->plane = NULL;
304
305 spin_lock_irqsave(&dcrtc->irq_lock, flags);
306 armada_drm_crtc_update_regs(dcrtc, work->regs);
307 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
308}
309
Russell Kingeaa66272017-07-08 10:22:10 +0100310static struct armada_plane_work *
311armada_drm_crtc_alloc_plane_work(struct drm_plane *plane)
Russell King901bb882017-07-07 15:55:45 +0100312{
Russell Kingeaa66272017-07-08 10:22:10 +0100313 struct armada_plane_work *work;
Russell King901bb882017-07-07 15:55:45 +0100314 int i = 0;
315
316 work = kzalloc(sizeof(*work), GFP_KERNEL);
317 if (!work)
318 return NULL;
319
Russell Kingeaa66272017-07-08 10:22:10 +0100320 work->plane = plane;
321 work->fn = armada_drm_crtc_complete_frame_work;
Russell Kingd9241552017-07-08 10:22:25 +0100322 work->need_kfree = true;
Russell King901bb882017-07-07 15:55:45 +0100323 armada_reg_queue_end(work->regs, i);
324
325 return work;
Russell King96f60e32012-08-15 13:59:49 +0100326}
327
328static void armada_drm_crtc_finish_fb(struct armada_crtc *dcrtc,
329 struct drm_framebuffer *fb, bool force)
330{
Russell Kingeaa66272017-07-08 10:22:10 +0100331 struct armada_plane_work *work;
Russell King96f60e32012-08-15 13:59:49 +0100332
333 if (!fb)
334 return;
335
336 if (force) {
337 /* Display is disabled, so just drop the old fb */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -0600338 drm_framebuffer_put(fb);
Russell King96f60e32012-08-15 13:59:49 +0100339 return;
340 }
341
Russell Kingeaa66272017-07-08 10:22:10 +0100342 work = armada_drm_crtc_alloc_plane_work(dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +0100343 if (work) {
Russell King96f60e32012-08-15 13:59:49 +0100344 work->old_fb = fb;
Russell King96f60e32012-08-15 13:59:49 +0100345
Russell Kingeaa66272017-07-08 10:22:10 +0100346 if (armada_drm_plane_work_queue(dcrtc, work) == 0)
Russell King96f60e32012-08-15 13:59:49 +0100347 return;
348
349 kfree(work);
350 }
351
352 /*
353 * Oops - just drop the reference immediately and hope for
354 * the best. The worst that will happen is the buffer gets
355 * reused before it has finished being displayed.
356 */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -0600357 drm_framebuffer_put(fb);
Russell King96f60e32012-08-15 13:59:49 +0100358}
359
360static void armada_drm_vblank_off(struct armada_crtc *dcrtc)
361{
Russell King96f60e32012-08-15 13:59:49 +0100362 /*
363 * Tell the DRM core that vblank IRQs aren't going to happen for
364 * a while. This cleans up any pending vblank events for us.
365 */
Russell King178e5612014-10-11 23:57:04 +0100366 drm_crtc_vblank_off(&dcrtc->crtc);
Russell Kingec6fb152016-07-25 15:16:11 +0100367 armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +0100368}
369
Russell King96f60e32012-08-15 13:59:49 +0100370/* The mode_config.mutex will be held for this call */
371static void armada_drm_crtc_dpms(struct drm_crtc *crtc, int dpms)
372{
373 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
374
Russell Kingea908ba2016-10-04 22:19:57 +0100375 if (dpms_blanked(dcrtc->dpms) != dpms_blanked(dpms)) {
Russell King96f60e32012-08-15 13:59:49 +0100376 if (dpms_blanked(dpms))
377 armada_drm_vblank_off(dcrtc);
Russell Kingea908ba2016-10-04 22:19:57 +0100378 else if (!IS_ERR(dcrtc->clk))
379 WARN_ON(clk_prepare_enable(dcrtc->clk));
380 dcrtc->dpms = dpms;
381 armada_drm_crtc_update(dcrtc);
382 if (!dpms_blanked(dpms))
Russell King178e5612014-10-11 23:57:04 +0100383 drm_crtc_vblank_on(&dcrtc->crtc);
Russell Kingea908ba2016-10-04 22:19:57 +0100384 else if (!IS_ERR(dcrtc->clk))
385 clk_disable_unprepare(dcrtc->clk);
386 } else if (dcrtc->dpms != dpms) {
387 dcrtc->dpms = dpms;
Russell King96f60e32012-08-15 13:59:49 +0100388 }
389}
390
391/*
392 * Prepare for a mode set. Turn off overlay to ensure that we don't end
393 * up with the overlay size being bigger than the active screen size.
394 * We rely upon X refreshing this state after the mode set has completed.
395 *
396 * The mode_config.mutex will be held for this call
397 */
398static void armada_drm_crtc_prepare(struct drm_crtc *crtc)
399{
400 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
401 struct drm_plane *plane;
402
403 /*
404 * If we have an overlay plane associated with this CRTC, disable
405 * it before the modeset to avoid its coordinates being outside
Russell Kingf8e14062015-06-29 17:52:42 +0100406 * the new mode parameters.
Russell King96f60e32012-08-15 13:59:49 +0100407 */
408 plane = dcrtc->plane;
Russell King890ca8d2017-07-08 10:22:27 +0100409 if (plane) {
Russell Kingf8e14062015-06-29 17:52:42 +0100410 drm_plane_force_disable(plane);
Russell King890ca8d2017-07-08 10:22:27 +0100411 WARN_ON(!armada_drm_plane_work_wait(drm_to_armada_plane(plane),
412 HZ));
413 }
Russell King96f60e32012-08-15 13:59:49 +0100414}
415
416/* The mode_config.mutex will be held for this call */
417static void armada_drm_crtc_commit(struct drm_crtc *crtc)
418{
419 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
420
421 if (dcrtc->dpms != DRM_MODE_DPMS_ON) {
422 dcrtc->dpms = DRM_MODE_DPMS_ON;
423 armada_drm_crtc_update(dcrtc);
424 }
425}
426
427/* The mode_config.mutex will be held for this call */
428static bool armada_drm_crtc_mode_fixup(struct drm_crtc *crtc,
429 const struct drm_display_mode *mode, struct drm_display_mode *adj)
430{
Russell King96f60e32012-08-15 13:59:49 +0100431 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
432 int ret;
433
434 /* We can't do interlaced modes if we don't have the SPU_ADV_REG */
Russell King42e62ba2014-04-22 15:24:03 +0100435 if (!dcrtc->variant->has_spu_adv_reg &&
Russell King96f60e32012-08-15 13:59:49 +0100436 adj->flags & DRM_MODE_FLAG_INTERLACE)
437 return false;
438
439 /* Check whether the display mode is possible */
Russell King42e62ba2014-04-22 15:24:03 +0100440 ret = dcrtc->variant->compute_clock(dcrtc, adj, NULL);
Russell King96f60e32012-08-15 13:59:49 +0100441 if (ret)
442 return false;
443
444 return true;
445}
446
Shawn Guo5922a7d2017-02-07 17:16:18 +0800447/* These are locked by dev->vbl_lock */
448static void armada_drm_crtc_disable_irq(struct armada_crtc *dcrtc, u32 mask)
449{
450 if (dcrtc->irq_ena & mask) {
451 dcrtc->irq_ena &= ~mask;
452 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
453 }
454}
455
456static void armada_drm_crtc_enable_irq(struct armada_crtc *dcrtc, u32 mask)
457{
458 if ((dcrtc->irq_ena & mask) != mask) {
459 dcrtc->irq_ena |= mask;
460 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
461 if (readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR) & mask)
462 writel(0, dcrtc->base + LCD_SPU_IRQ_ISR);
463 }
464}
465
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100466static void armada_drm_crtc_irq(struct armada_crtc *dcrtc, u32 stat)
Russell King96f60e32012-08-15 13:59:49 +0100467{
Russell King96f60e32012-08-15 13:59:49 +0100468 void __iomem *base = dcrtc->base;
Russell King4a8506d2015-08-07 09:33:05 +0100469 struct drm_plane *ovl_plane;
Russell King96f60e32012-08-15 13:59:49 +0100470
471 if (stat & DMA_FF_UNDERFLOW)
472 DRM_ERROR("video underflow on crtc %u\n", dcrtc->num);
473 if (stat & GRA_FF_UNDERFLOW)
474 DRM_ERROR("graphics underflow on crtc %u\n", dcrtc->num);
475
476 if (stat & VSYNC_IRQ)
Gustavo Padovan0ac28c52016-07-04 21:04:48 -0300477 drm_crtc_handle_vblank(&dcrtc->crtc);
Russell King96f60e32012-08-15 13:59:49 +0100478
Russell King4a8506d2015-08-07 09:33:05 +0100479 ovl_plane = dcrtc->plane;
Russell Kingec6fb152016-07-25 15:16:11 +0100480 if (ovl_plane)
481 armada_drm_plane_work_run(dcrtc, ovl_plane);
Russell King96f60e32012-08-15 13:59:49 +0100482
Russell Kinga3f6a182017-07-08 10:16:48 +0100483 spin_lock(&dcrtc->irq_lock);
Russell King96f60e32012-08-15 13:59:49 +0100484 if (stat & GRA_FRAME_IRQ && dcrtc->interlaced) {
485 int i = stat & GRA_FRAME_IRQ0 ? 0 : 1;
486 uint32_t val;
487
488 writel_relaxed(dcrtc->v[i].spu_v_porch, base + LCD_SPU_V_PORCH);
489 writel_relaxed(dcrtc->v[i].spu_v_h_total,
490 base + LCD_SPUT_V_H_TOTAL);
491
492 val = readl_relaxed(base + LCD_SPU_ADV_REG);
493 val &= ~(ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF | ADV_VSYNCOFFEN);
494 val |= dcrtc->v[i].spu_adv_reg;
Russell King662af0d2013-05-19 10:55:17 +0100495 writel_relaxed(val, base + LCD_SPU_ADV_REG);
Russell King96f60e32012-08-15 13:59:49 +0100496 }
Russell King662af0d2013-05-19 10:55:17 +0100497
498 if (stat & DUMB_FRAMEDONE && dcrtc->cursor_update) {
499 writel_relaxed(dcrtc->cursor_hw_pos,
500 base + LCD_SPU_HWC_OVSA_HPXL_VLN);
501 writel_relaxed(dcrtc->cursor_hw_sz,
502 base + LCD_SPU_HWC_HPXL_VLN);
503 armada_updatel(CFG_HWC_ENA,
504 CFG_HWC_ENA | CFG_HWC_1BITMOD | CFG_HWC_1BITENA,
505 base + LCD_SPU_DMA_CTRL0);
506 dcrtc->cursor_update = false;
507 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
508 }
509
Russell King96f60e32012-08-15 13:59:49 +0100510 spin_unlock(&dcrtc->irq_lock);
511
Russell Kingec6fb152016-07-25 15:16:11 +0100512 if (stat & GRA_FRAME_IRQ)
513 armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +0100514}
515
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100516static irqreturn_t armada_drm_irq(int irq, void *arg)
517{
518 struct armada_crtc *dcrtc = arg;
519 u32 v, stat = readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
520
521 /*
Russell King92298c12018-06-26 17:06:06 +0100522 * Reading the ISR appears to clear bits provided CLEAN_SPU_IRQ_ISR
523 * is set. Writing has some other effect to acknowledge the IRQ -
524 * without this, we only get a single IRQ.
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100525 */
526 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
527
Russell Kingc8a220c2016-05-17 13:51:08 +0100528 trace_armada_drm_irq(&dcrtc->crtc, stat);
529
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100530 /* Mask out those interrupts we haven't enabled */
531 v = stat & dcrtc->irq_ena;
532
533 if (v & (VSYNC_IRQ|GRA_FRAME_IRQ|DUMB_FRAMEDONE)) {
534 armada_drm_crtc_irq(dcrtc, stat);
535 return IRQ_HANDLED;
536 }
537 return IRQ_NONE;
538}
539
Russell King96f60e32012-08-15 13:59:49 +0100540static uint32_t armada_drm_crtc_calculate_csc(struct armada_crtc *dcrtc)
541{
542 struct drm_display_mode *adj = &dcrtc->crtc.mode;
543 uint32_t val = 0;
544
545 if (dcrtc->csc_yuv_mode == CSC_YUV_CCIR709)
546 val |= CFG_CSC_YUV_CCIR709;
547 if (dcrtc->csc_rgb_mode == CSC_RGB_STUDIO)
548 val |= CFG_CSC_RGB_STUDIO;
549
550 /*
551 * In auto mode, set the colorimetry, based upon the HDMI spec.
552 * 1280x720p, 1920x1080p and 1920x1080i use ITU709, others use
553 * ITU601. It may be more appropriate to set this depending on
554 * the source - but what if the graphic frame is YUV and the
555 * video frame is RGB?
556 */
557 if ((adj->hdisplay == 1280 && adj->vdisplay == 720 &&
558 !(adj->flags & DRM_MODE_FLAG_INTERLACE)) ||
559 (adj->hdisplay == 1920 && adj->vdisplay == 1080)) {
560 if (dcrtc->csc_yuv_mode == CSC_AUTO)
561 val |= CFG_CSC_YUV_CCIR709;
562 }
563
564 /*
565 * We assume we're connected to a TV-like device, so the YUV->RGB
566 * conversion should produce a limited range. We should set this
567 * depending on the connectors attached to this CRTC, and what
568 * kind of device they report being connected.
569 */
570 if (dcrtc->csc_rgb_mode == CSC_AUTO)
571 val |= CFG_CSC_RGB_STUDIO;
572
573 return val;
574}
575
Russell King11df53d2017-07-08 10:22:35 +0100576static void armada_drm_gra_plane_regs(struct armada_regs *regs,
577 struct drm_framebuffer *fb, struct armada_plane_state *state,
578 int x, int y, bool interlaced)
Russell King37af35c2016-08-16 22:09:09 +0100579{
Russell King11df53d2017-07-08 10:22:35 +0100580 unsigned int i;
Russell King2925db02016-08-16 22:09:10 +0100581 u32 ctrl0;
Russell King37af35c2016-08-16 22:09:09 +0100582
Russell King11df53d2017-07-08 10:22:35 +0100583 i = armada_drm_crtc_calc_fb(fb, x, y, regs, interlaced);
Russell King2925db02016-08-16 22:09:10 +0100584 armada_reg_queue_set(regs, i, state->dst_yx, LCD_SPU_GRA_OVSA_HPXL_VLN);
Russell King37af35c2016-08-16 22:09:09 +0100585 armada_reg_queue_set(regs, i, state->src_hw, LCD_SPU_GRA_HPXL_VLN);
586 armada_reg_queue_set(regs, i, state->dst_hw, LCD_SPU_GZM_HPXL_VLN);
587
588 ctrl0 = state->ctrl0;
589 if (interlaced)
590 ctrl0 |= CFG_GRA_FTOGGLE;
591
592 armada_reg_queue_mod(regs, i, ctrl0, CFG_GRAFORMAT |
593 CFG_GRA_MOD(CFG_SWAPRB | CFG_SWAPUV |
594 CFG_SWAPYU | CFG_YUV2RGB) |
Russell King73c51ab2017-07-08 10:22:19 +0100595 CFG_PALETTE_ENA | CFG_GRA_FTOGGLE |
596 CFG_GRA_HSMOOTH | CFG_GRA_ENA,
Russell King37af35c2016-08-16 22:09:09 +0100597 LCD_SPU_DMA_CTRL0);
598 armada_reg_queue_end(regs, i);
Russell King11df53d2017-07-08 10:22:35 +0100599}
600
601static void armada_drm_primary_set(struct drm_crtc *crtc,
602 struct drm_plane *plane, int x, int y)
603{
604 struct armada_plane_state *state = &drm_to_armada_plane(plane)->state;
605 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
606 struct armada_regs regs[8];
607 bool interlaced = dcrtc->interlaced;
608
609 armada_drm_gra_plane_regs(regs, plane->fb, state, x, y, interlaced);
Russell King37af35c2016-08-16 22:09:09 +0100610 armada_drm_crtc_update_regs(dcrtc, regs);
611}
612
Russell King96f60e32012-08-15 13:59:49 +0100613/* The mode_config.mutex will be held for this call */
614static int armada_drm_crtc_mode_set(struct drm_crtc *crtc,
615 struct drm_display_mode *mode, struct drm_display_mode *adj,
616 int x, int y, struct drm_framebuffer *old_fb)
617{
Russell King96f60e32012-08-15 13:59:49 +0100618 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
619 struct armada_regs regs[17];
620 uint32_t lm, rm, tm, bm, val, sclk;
621 unsigned long flags;
622 unsigned i;
623 bool interlaced;
624
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -0600625 drm_framebuffer_get(crtc->primary->fb);
Russell King96f60e32012-08-15 13:59:49 +0100626
627 interlaced = !!(adj->flags & DRM_MODE_FLAG_INTERLACE);
628
Russell King73c51ab2017-07-08 10:22:19 +0100629 val = CFG_GRA_ENA;
Russell King8be523d2016-08-16 22:09:08 +0100630 val |= CFG_GRA_FMT(drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->fmt);
631 val |= CFG_GRA_MOD(drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->mod);
Russell King96f60e32012-08-15 13:59:49 +0100632
Russell King8be523d2016-08-16 22:09:08 +0100633 if (drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->fmt > CFG_420)
634 val |= CFG_PALETTE_ENA;
635
636 drm_to_armada_plane(crtc->primary)->state.ctrl0 = val;
637 drm_to_armada_plane(crtc->primary)->state.src_hw =
638 drm_to_armada_plane(crtc->primary)->state.dst_hw =
Russell King37af35c2016-08-16 22:09:09 +0100639 adj->crtc_vdisplay << 16 | adj->crtc_hdisplay;
Russell King8be523d2016-08-16 22:09:08 +0100640 drm_to_armada_plane(crtc->primary)->state.dst_yx = 0;
641
Russell King37af35c2016-08-16 22:09:09 +0100642 i = 0;
Russell King96f60e32012-08-15 13:59:49 +0100643 rm = adj->crtc_hsync_start - adj->crtc_hdisplay;
644 lm = adj->crtc_htotal - adj->crtc_hsync_end;
645 bm = adj->crtc_vsync_start - adj->crtc_vdisplay;
646 tm = adj->crtc_vtotal - adj->crtc_vsync_end;
647
648 DRM_DEBUG_DRIVER("H: %d %d %d %d lm %d rm %d\n",
649 adj->crtc_hdisplay,
650 adj->crtc_hsync_start,
651 adj->crtc_hsync_end,
652 adj->crtc_htotal, lm, rm);
653 DRM_DEBUG_DRIVER("V: %d %d %d %d tm %d bm %d\n",
654 adj->crtc_vdisplay,
655 adj->crtc_vsync_start,
656 adj->crtc_vsync_end,
657 adj->crtc_vtotal, tm, bm);
658
659 /* Wait for pending flips to complete */
Russell King4b5dda82015-08-06 16:37:18 +0100660 armada_drm_plane_work_wait(drm_to_armada_plane(dcrtc->crtc.primary),
661 MAX_SCHEDULE_TIMEOUT);
Russell King96f60e32012-08-15 13:59:49 +0100662
Russell King178e5612014-10-11 23:57:04 +0100663 drm_crtc_vblank_off(crtc);
Russell King96f60e32012-08-15 13:59:49 +0100664
Russell King96f60e32012-08-15 13:59:49 +0100665 val = dcrtc->dumb_ctrl & ~CFG_DUMB_ENA;
666 if (val != dcrtc->dumb_ctrl) {
667 dcrtc->dumb_ctrl = val;
668 writel_relaxed(val, dcrtc->base + LCD_SPU_DUMB_CTRL);
669 }
670
Russell Kinge0ac5e92015-06-29 18:01:38 +0100671 /*
672 * If we are blanked, we would have disabled the clock. Re-enable
673 * it so that compute_clock() does the right thing.
674 */
675 if (!IS_ERR(dcrtc->clk) && dpms_blanked(dcrtc->dpms))
676 WARN_ON(clk_prepare_enable(dcrtc->clk));
677
Russell King96f60e32012-08-15 13:59:49 +0100678 /* Now compute the divider for real */
Russell King42e62ba2014-04-22 15:24:03 +0100679 dcrtc->variant->compute_clock(dcrtc, adj, &sclk);
Russell King96f60e32012-08-15 13:59:49 +0100680
Russell King96f60e32012-08-15 13:59:49 +0100681 armada_reg_queue_set(regs, i, sclk, LCD_CFG_SCLK_DIV);
682
683 if (interlaced ^ dcrtc->interlaced) {
684 if (adj->flags & DRM_MODE_FLAG_INTERLACE)
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300685 drm_crtc_vblank_get(&dcrtc->crtc);
Russell King96f60e32012-08-15 13:59:49 +0100686 else
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300687 drm_crtc_vblank_put(&dcrtc->crtc);
Russell King96f60e32012-08-15 13:59:49 +0100688 dcrtc->interlaced = interlaced;
689 }
690
691 spin_lock_irqsave(&dcrtc->irq_lock, flags);
692
Russell King214612f2017-07-08 10:22:15 +0100693 /* Ensure graphic fifo is enabled */
694 armada_reg_queue_mod(regs, i, 0, CFG_PDWN64x66, LCD_SPU_SRAM_PARA1);
695
Russell King96f60e32012-08-15 13:59:49 +0100696 /* Even interlaced/progressive frame */
697 dcrtc->v[1].spu_v_h_total = adj->crtc_vtotal << 16 |
698 adj->crtc_htotal;
699 dcrtc->v[1].spu_v_porch = tm << 16 | bm;
700 val = adj->crtc_hsync_start;
Russell King662af0d2013-05-19 10:55:17 +0100701 dcrtc->v[1].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN |
Russell King42e62ba2014-04-22 15:24:03 +0100702 dcrtc->variant->spu_adv_reg;
Russell King96f60e32012-08-15 13:59:49 +0100703
704 if (interlaced) {
705 /* Odd interlaced frame */
706 dcrtc->v[0].spu_v_h_total = dcrtc->v[1].spu_v_h_total +
707 (1 << 16);
708 dcrtc->v[0].spu_v_porch = dcrtc->v[1].spu_v_porch + 1;
709 val = adj->crtc_hsync_start - adj->crtc_htotal / 2;
Russell King662af0d2013-05-19 10:55:17 +0100710 dcrtc->v[0].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN |
Russell King42e62ba2014-04-22 15:24:03 +0100711 dcrtc->variant->spu_adv_reg;
Russell King96f60e32012-08-15 13:59:49 +0100712 } else {
713 dcrtc->v[0] = dcrtc->v[1];
714 }
715
716 val = adj->crtc_vdisplay << 16 | adj->crtc_hdisplay;
717
718 armada_reg_queue_set(regs, i, val, LCD_SPU_V_H_ACTIVE);
Russell King96f60e32012-08-15 13:59:49 +0100719 armada_reg_queue_set(regs, i, (lm << 16) | rm, LCD_SPU_H_PORCH);
720 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_porch, LCD_SPU_V_PORCH);
721 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_h_total,
722 LCD_SPUT_V_H_TOTAL);
723
Russell King42e62ba2014-04-22 15:24:03 +0100724 if (dcrtc->variant->has_spu_adv_reg) {
Russell King96f60e32012-08-15 13:59:49 +0100725 armada_reg_queue_mod(regs, i, dcrtc->v[0].spu_adv_reg,
726 ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF |
727 ADV_VSYNCOFFEN, LCD_SPU_ADV_REG);
Russell King662af0d2013-05-19 10:55:17 +0100728 }
Russell King96f60e32012-08-15 13:59:49 +0100729
Russell King96f60e32012-08-15 13:59:49 +0100730 val = adj->flags & DRM_MODE_FLAG_NVSYNC ? CFG_VSYNC_INV : 0;
731 armada_reg_queue_mod(regs, i, val, CFG_VSYNC_INV, LCD_SPU_DMA_CTRL1);
732
733 val = dcrtc->spu_iopad_ctrl | armada_drm_crtc_calculate_csc(dcrtc);
734 armada_reg_queue_set(regs, i, val, LCD_SPU_IOPAD_CONTROL);
735 armada_reg_queue_end(regs, i);
736
737 armada_drm_crtc_update_regs(dcrtc, regs);
Russell King37af35c2016-08-16 22:09:09 +0100738
739 armada_drm_primary_set(crtc, crtc->primary, x, y);
Russell King96f60e32012-08-15 13:59:49 +0100740 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
741
742 armada_drm_crtc_update(dcrtc);
743
Russell King178e5612014-10-11 23:57:04 +0100744 drm_crtc_vblank_on(crtc);
Russell King96f60e32012-08-15 13:59:49 +0100745 armada_drm_crtc_finish_fb(dcrtc, old_fb, dpms_blanked(dcrtc->dpms));
746
747 return 0;
748}
749
750/* The mode_config.mutex will be held for this call */
751static int armada_drm_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
752 struct drm_framebuffer *old_fb)
753{
754 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
755 struct armada_regs regs[4];
756 unsigned i;
757
Matt Roperf4510a22014-04-01 15:22:40 -0700758 i = armada_drm_crtc_calc_fb(crtc->primary->fb, crtc->x, crtc->y, regs,
Russell King96f60e32012-08-15 13:59:49 +0100759 dcrtc->interlaced);
760 armada_reg_queue_end(regs, i);
761
762 /* Wait for pending flips to complete */
Russell King4b5dda82015-08-06 16:37:18 +0100763 armada_drm_plane_work_wait(drm_to_armada_plane(dcrtc->crtc.primary),
764 MAX_SCHEDULE_TIMEOUT);
Russell King96f60e32012-08-15 13:59:49 +0100765
766 /* Take a reference to the new fb as we're using it */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -0600767 drm_framebuffer_get(crtc->primary->fb);
Russell King96f60e32012-08-15 13:59:49 +0100768
769 /* Update the base in the CRTC */
770 armada_drm_crtc_update_regs(dcrtc, regs);
771
772 /* Drop our previously held reference */
773 armada_drm_crtc_finish_fb(dcrtc, old_fb, dpms_blanked(dcrtc->dpms));
774
775 return 0;
776}
777
Russell King96f60e32012-08-15 13:59:49 +0100778/* The mode_config.mutex will be held for this call */
779static void armada_drm_crtc_disable(struct drm_crtc *crtc)
780{
Russell King96f60e32012-08-15 13:59:49 +0100781 armada_drm_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
Russell King28b30432017-07-08 10:16:40 +0100782
783 /* Disable our primary plane when we disable the CRTC. */
784 crtc->primary->funcs->disable_plane(crtc->primary, NULL);
Russell King96f60e32012-08-15 13:59:49 +0100785}
786
787static const struct drm_crtc_helper_funcs armada_crtc_helper_funcs = {
788 .dpms = armada_drm_crtc_dpms,
789 .prepare = armada_drm_crtc_prepare,
790 .commit = armada_drm_crtc_commit,
791 .mode_fixup = armada_drm_crtc_mode_fixup,
792 .mode_set = armada_drm_crtc_mode_set,
793 .mode_set_base = armada_drm_crtc_mode_set_base,
Russell King96f60e32012-08-15 13:59:49 +0100794 .disable = armada_drm_crtc_disable,
795};
796
Russell King662af0d2013-05-19 10:55:17 +0100797static void armada_load_cursor_argb(void __iomem *base, uint32_t *pix,
798 unsigned stride, unsigned width, unsigned height)
799{
800 uint32_t addr;
801 unsigned y;
802
803 addr = SRAM_HWC32_RAM1;
804 for (y = 0; y < height; y++) {
805 uint32_t *p = &pix[y * stride];
806 unsigned x;
807
808 for (x = 0; x < width; x++, p++) {
809 uint32_t val = *p;
810
811 val = (val & 0xff00ff00) |
812 (val & 0x000000ff) << 16 |
813 (val & 0x00ff0000) >> 16;
814
815 writel_relaxed(val,
816 base + LCD_SPU_SRAM_WRDAT);
817 writel_relaxed(addr | SRAM_WRITE,
818 base + LCD_SPU_SRAM_CTRL);
Russell Kingc39b0692014-04-07 12:00:17 +0100819 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
Russell King662af0d2013-05-19 10:55:17 +0100820 addr += 1;
821 if ((addr & 0x00ff) == 0)
822 addr += 0xf00;
823 if ((addr & 0x30ff) == 0)
824 addr = SRAM_HWC32_RAM2;
825 }
826 }
827}
828
829static void armada_drm_crtc_cursor_tran(void __iomem *base)
830{
831 unsigned addr;
832
833 for (addr = 0; addr < 256; addr++) {
834 /* write the default value */
835 writel_relaxed(0x55555555, base + LCD_SPU_SRAM_WRDAT);
836 writel_relaxed(addr | SRAM_WRITE | SRAM_HWC32_TRAN,
837 base + LCD_SPU_SRAM_CTRL);
838 }
839}
840
841static int armada_drm_crtc_cursor_update(struct armada_crtc *dcrtc, bool reload)
842{
843 uint32_t xoff, xscr, w = dcrtc->cursor_w, s;
844 uint32_t yoff, yscr, h = dcrtc->cursor_h;
845 uint32_t para1;
846
847 /*
848 * Calculate the visible width and height of the cursor,
849 * screen position, and the position in the cursor bitmap.
850 */
851 if (dcrtc->cursor_x < 0) {
852 xoff = -dcrtc->cursor_x;
853 xscr = 0;
854 w -= min(xoff, w);
855 } else if (dcrtc->cursor_x + w > dcrtc->crtc.mode.hdisplay) {
856 xoff = 0;
857 xscr = dcrtc->cursor_x;
858 w = max_t(int, dcrtc->crtc.mode.hdisplay - dcrtc->cursor_x, 0);
859 } else {
860 xoff = 0;
861 xscr = dcrtc->cursor_x;
862 }
863
864 if (dcrtc->cursor_y < 0) {
865 yoff = -dcrtc->cursor_y;
866 yscr = 0;
867 h -= min(yoff, h);
868 } else if (dcrtc->cursor_y + h > dcrtc->crtc.mode.vdisplay) {
869 yoff = 0;
870 yscr = dcrtc->cursor_y;
871 h = max_t(int, dcrtc->crtc.mode.vdisplay - dcrtc->cursor_y, 0);
872 } else {
873 yoff = 0;
874 yscr = dcrtc->cursor_y;
875 }
876
877 /* On interlaced modes, the vertical cursor size must be halved */
878 s = dcrtc->cursor_w;
879 if (dcrtc->interlaced) {
880 s *= 2;
881 yscr /= 2;
882 h /= 2;
883 }
884
885 if (!dcrtc->cursor_obj || !h || !w) {
886 spin_lock_irq(&dcrtc->irq_lock);
887 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
888 dcrtc->cursor_update = false;
889 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
890 spin_unlock_irq(&dcrtc->irq_lock);
891 return 0;
892 }
893
Russell King214612f2017-07-08 10:22:15 +0100894 spin_lock_irq(&dcrtc->irq_lock);
Russell King662af0d2013-05-19 10:55:17 +0100895 para1 = readl_relaxed(dcrtc->base + LCD_SPU_SRAM_PARA1);
896 armada_updatel(CFG_CSB_256x32, CFG_CSB_256x32 | CFG_PDWN256x32,
897 dcrtc->base + LCD_SPU_SRAM_PARA1);
Russell King214612f2017-07-08 10:22:15 +0100898 spin_unlock_irq(&dcrtc->irq_lock);
Russell King662af0d2013-05-19 10:55:17 +0100899
900 /*
901 * Initialize the transparency if the SRAM was powered down.
902 * We must also reload the cursor data as well.
903 */
904 if (!(para1 & CFG_CSB_256x32)) {
905 armada_drm_crtc_cursor_tran(dcrtc->base);
906 reload = true;
907 }
908
909 if (dcrtc->cursor_hw_sz != (h << 16 | w)) {
910 spin_lock_irq(&dcrtc->irq_lock);
911 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
912 dcrtc->cursor_update = false;
913 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
914 spin_unlock_irq(&dcrtc->irq_lock);
915 reload = true;
916 }
917 if (reload) {
918 struct armada_gem_object *obj = dcrtc->cursor_obj;
919 uint32_t *pix;
920 /* Set the top-left corner of the cursor image */
921 pix = obj->addr;
922 pix += yoff * s + xoff;
923 armada_load_cursor_argb(dcrtc->base, pix, s, w, h);
924 }
925
926 /* Reload the cursor position, size and enable in the IRQ handler */
927 spin_lock_irq(&dcrtc->irq_lock);
928 dcrtc->cursor_hw_pos = yscr << 16 | xscr;
929 dcrtc->cursor_hw_sz = h << 16 | w;
930 dcrtc->cursor_update = true;
931 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
932 spin_unlock_irq(&dcrtc->irq_lock);
933
934 return 0;
935}
936
937static void cursor_update(void *data)
938{
939 armada_drm_crtc_cursor_update(data, true);
940}
941
942static int armada_drm_crtc_cursor_set(struct drm_crtc *crtc,
943 struct drm_file *file, uint32_t handle, uint32_t w, uint32_t h)
944{
Russell King662af0d2013-05-19 10:55:17 +0100945 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King662af0d2013-05-19 10:55:17 +0100946 struct armada_gem_object *obj = NULL;
947 int ret;
948
949 /* If no cursor support, replicate drm's return value */
Russell King42e62ba2014-04-22 15:24:03 +0100950 if (!dcrtc->variant->has_spu_adv_reg)
Russell King662af0d2013-05-19 10:55:17 +0100951 return -ENXIO;
952
953 if (handle && w > 0 && h > 0) {
954 /* maximum size is 64x32 or 32x64 */
955 if (w > 64 || h > 64 || (w > 32 && h > 32))
956 return -ENOMEM;
957
Chris Wilsona8ad0bd2016-05-09 11:04:54 +0100958 obj = armada_gem_object_lookup(file, handle);
Russell King662af0d2013-05-19 10:55:17 +0100959 if (!obj)
960 return -ENOENT;
961
962 /* Must be a kernel-mapped object */
963 if (!obj->addr) {
Haneen Mohammed4c3cf372017-09-20 12:54:48 -0600964 drm_gem_object_put_unlocked(&obj->obj);
Russell King662af0d2013-05-19 10:55:17 +0100965 return -EINVAL;
966 }
967
968 if (obj->obj.size < w * h * 4) {
969 DRM_ERROR("buffer is too small\n");
Haneen Mohammed4c3cf372017-09-20 12:54:48 -0600970 drm_gem_object_put_unlocked(&obj->obj);
Russell King662af0d2013-05-19 10:55:17 +0100971 return -ENOMEM;
972 }
973 }
974
Russell King662af0d2013-05-19 10:55:17 +0100975 if (dcrtc->cursor_obj) {
976 dcrtc->cursor_obj->update = NULL;
977 dcrtc->cursor_obj->update_data = NULL;
Haneen Mohammed4c3cf372017-09-20 12:54:48 -0600978 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
Russell King662af0d2013-05-19 10:55:17 +0100979 }
980 dcrtc->cursor_obj = obj;
981 dcrtc->cursor_w = w;
982 dcrtc->cursor_h = h;
983 ret = armada_drm_crtc_cursor_update(dcrtc, true);
984 if (obj) {
985 obj->update_data = dcrtc;
986 obj->update = cursor_update;
987 }
Russell King662af0d2013-05-19 10:55:17 +0100988
989 return ret;
990}
991
992static int armada_drm_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
993{
Russell King662af0d2013-05-19 10:55:17 +0100994 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King662af0d2013-05-19 10:55:17 +0100995 int ret;
996
997 /* If no cursor support, replicate drm's return value */
Russell King42e62ba2014-04-22 15:24:03 +0100998 if (!dcrtc->variant->has_spu_adv_reg)
Russell King662af0d2013-05-19 10:55:17 +0100999 return -EFAULT;
1000
Russell King662af0d2013-05-19 10:55:17 +01001001 dcrtc->cursor_x = x;
1002 dcrtc->cursor_y = y;
1003 ret = armada_drm_crtc_cursor_update(dcrtc, false);
Russell King662af0d2013-05-19 10:55:17 +01001004
1005 return ret;
1006}
1007
Russell King96f60e32012-08-15 13:59:49 +01001008static void armada_drm_crtc_destroy(struct drm_crtc *crtc)
1009{
1010 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
1011 struct armada_private *priv = crtc->dev->dev_private;
1012
Russell King662af0d2013-05-19 10:55:17 +01001013 if (dcrtc->cursor_obj)
Haneen Mohammed4c3cf372017-09-20 12:54:48 -06001014 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
Russell King662af0d2013-05-19 10:55:17 +01001015
Russell King96f60e32012-08-15 13:59:49 +01001016 priv->dcrtc[dcrtc->num] = NULL;
1017 drm_crtc_cleanup(&dcrtc->crtc);
1018
1019 if (!IS_ERR(dcrtc->clk))
1020 clk_disable_unprepare(dcrtc->clk);
1021
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001022 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ENA);
1023
Russell King9611cb92014-06-15 11:21:23 +01001024 of_node_put(dcrtc->crtc.port);
1025
Russell King96f60e32012-08-15 13:59:49 +01001026 kfree(dcrtc);
1027}
1028
1029/*
1030 * The mode_config lock is held here, to prevent races between this
1031 * and a mode_set.
1032 */
1033static int armada_drm_crtc_page_flip(struct drm_crtc *crtc,
Daniel Vetter41292b1f2017-03-22 22:50:50 +01001034 struct drm_framebuffer *fb, struct drm_pending_vblank_event *event, uint32_t page_flip_flags,
1035 struct drm_modeset_acquire_ctx *ctx)
Russell King96f60e32012-08-15 13:59:49 +01001036{
1037 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell Kingeaa66272017-07-08 10:22:10 +01001038 struct armada_plane_work *work;
Russell King96f60e32012-08-15 13:59:49 +01001039 unsigned i;
1040 int ret;
1041
Russell Kingeaa66272017-07-08 10:22:10 +01001042 work = armada_drm_crtc_alloc_plane_work(dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +01001043 if (!work)
1044 return -ENOMEM;
1045
1046 work->event = event;
Matt Roperf4510a22014-04-01 15:22:40 -07001047 work->old_fb = dcrtc->crtc.primary->fb;
Russell King96f60e32012-08-15 13:59:49 +01001048
1049 i = armada_drm_crtc_calc_fb(fb, crtc->x, crtc->y, work->regs,
1050 dcrtc->interlaced);
1051 armada_reg_queue_end(work->regs, i);
1052
1053 /*
Russell Kingc5488302014-10-11 23:53:35 +01001054 * Ensure that we hold a reference on the new framebuffer.
1055 * This has to match the behaviour in mode_set.
Russell King96f60e32012-08-15 13:59:49 +01001056 */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -06001057 drm_framebuffer_get(fb);
Russell King96f60e32012-08-15 13:59:49 +01001058
Russell Kingeaa66272017-07-08 10:22:10 +01001059 ret = armada_drm_plane_work_queue(dcrtc, work);
Russell King96f60e32012-08-15 13:59:49 +01001060 if (ret) {
Russell Kingc5488302014-10-11 23:53:35 +01001061 /* Undo our reference above */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -06001062 drm_framebuffer_put(fb);
Russell King96f60e32012-08-15 13:59:49 +01001063 kfree(work);
1064 return ret;
1065 }
1066
1067 /*
Russell King96f60e32012-08-15 13:59:49 +01001068 * Finally, if the display is blanked, we won't receive an
1069 * interrupt, so complete it now.
1070 */
Russell King4b5dda82015-08-06 16:37:18 +01001071 if (dpms_blanked(dcrtc->dpms))
Russell Kingec6fb152016-07-25 15:16:11 +01001072 armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +01001073
1074 return 0;
1075}
1076
1077static int
1078armada_drm_crtc_set_property(struct drm_crtc *crtc,
1079 struct drm_property *property, uint64_t val)
1080{
1081 struct armada_private *priv = crtc->dev->dev_private;
1082 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
1083 bool update_csc = false;
1084
1085 if (property == priv->csc_yuv_prop) {
1086 dcrtc->csc_yuv_mode = val;
1087 update_csc = true;
1088 } else if (property == priv->csc_rgb_prop) {
1089 dcrtc->csc_rgb_mode = val;
1090 update_csc = true;
1091 }
1092
1093 if (update_csc) {
1094 uint32_t val;
1095
1096 val = dcrtc->spu_iopad_ctrl |
1097 armada_drm_crtc_calculate_csc(dcrtc);
1098 writel_relaxed(val, dcrtc->base + LCD_SPU_IOPAD_CONTROL);
1099 }
1100
1101 return 0;
1102}
1103
Shawn Guo5922a7d2017-02-07 17:16:18 +08001104/* These are called under the vbl_lock. */
1105static int armada_drm_crtc_enable_vblank(struct drm_crtc *crtc)
1106{
1107 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King92298c12018-06-26 17:06:06 +01001108 unsigned long flags;
Shawn Guo5922a7d2017-02-07 17:16:18 +08001109
Russell King92298c12018-06-26 17:06:06 +01001110 spin_lock_irqsave(&dcrtc->irq_lock, flags);
Shawn Guo5922a7d2017-02-07 17:16:18 +08001111 armada_drm_crtc_enable_irq(dcrtc, VSYNC_IRQ_ENA);
Russell King92298c12018-06-26 17:06:06 +01001112 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
Shawn Guo5922a7d2017-02-07 17:16:18 +08001113 return 0;
1114}
1115
1116static void armada_drm_crtc_disable_vblank(struct drm_crtc *crtc)
1117{
1118 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King92298c12018-06-26 17:06:06 +01001119 unsigned long flags;
Shawn Guo5922a7d2017-02-07 17:16:18 +08001120
Russell King92298c12018-06-26 17:06:06 +01001121 spin_lock_irqsave(&dcrtc->irq_lock, flags);
Shawn Guo5922a7d2017-02-07 17:16:18 +08001122 armada_drm_crtc_disable_irq(dcrtc, VSYNC_IRQ_ENA);
Russell King92298c12018-06-26 17:06:06 +01001123 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
Shawn Guo5922a7d2017-02-07 17:16:18 +08001124}
1125
Ville Syrjäläa02fb902015-12-15 12:20:59 +01001126static const struct drm_crtc_funcs armada_crtc_funcs = {
Russell King662af0d2013-05-19 10:55:17 +01001127 .cursor_set = armada_drm_crtc_cursor_set,
1128 .cursor_move = armada_drm_crtc_cursor_move,
Russell King96f60e32012-08-15 13:59:49 +01001129 .destroy = armada_drm_crtc_destroy,
1130 .set_config = drm_crtc_helper_set_config,
1131 .page_flip = armada_drm_crtc_page_flip,
1132 .set_property = armada_drm_crtc_set_property,
Shawn Guo5922a7d2017-02-07 17:16:18 +08001133 .enable_vblank = armada_drm_crtc_enable_vblank,
1134 .disable_vblank = armada_drm_crtc_disable_vblank,
Russell King96f60e32012-08-15 13:59:49 +01001135};
1136
Russell King950bc132017-07-08 10:22:37 +01001137static void armada_drm_primary_update_state(struct drm_plane_state *state,
1138 struct armada_regs *regs)
1139{
1140 struct armada_plane *dplane = drm_to_armada_plane(state->plane);
1141 struct armada_crtc *dcrtc = drm_to_armada_crtc(state->crtc);
1142 struct armada_framebuffer *dfb = drm_fb_to_armada_fb(state->fb);
1143 bool was_disabled;
1144 unsigned int idx = 0;
1145 u32 val;
1146
1147 val = CFG_GRA_FMT(dfb->fmt) | CFG_GRA_MOD(dfb->mod);
1148 if (dfb->fmt > CFG_420)
1149 val |= CFG_PALETTE_ENA;
1150 if (state->visible)
1151 val |= CFG_GRA_ENA;
1152 if (drm_rect_width(&state->src) >> 16 != drm_rect_width(&state->dst))
1153 val |= CFG_GRA_HSMOOTH;
1154
1155 was_disabled = !(dplane->state.ctrl0 & CFG_GRA_ENA);
1156 if (was_disabled)
1157 armada_reg_queue_mod(regs, idx,
1158 0, CFG_PDWN64x66, LCD_SPU_SRAM_PARA1);
1159
1160 dplane->state.ctrl0 = val;
Russell King02395202018-07-30 11:52:34 +01001161 dplane->state.src_hw = armada_rect_hw_fp(&state->src);
1162 dplane->state.dst_hw = armada_rect_hw(&state->dst);
1163 dplane->state.dst_yx = armada_rect_yx(&state->dst);
Russell King950bc132017-07-08 10:22:37 +01001164
1165 armada_drm_gra_plane_regs(regs + idx, &dfb->fb, &dplane->state,
1166 state->src.x1 >> 16, state->src.y1 >> 16,
1167 dcrtc->interlaced);
1168
1169 dplane->state.vsync_update = !was_disabled;
1170 dplane->state.changed = true;
1171}
1172
1173static int armada_drm_primary_update(struct drm_plane *plane,
1174 struct drm_crtc *crtc, struct drm_framebuffer *fb,
1175 int crtc_x, int crtc_y, unsigned int crtc_w, unsigned int crtc_h,
1176 uint32_t src_x, uint32_t src_y, uint32_t src_w, uint32_t src_h,
1177 struct drm_modeset_acquire_ctx *ctx)
1178{
1179 struct armada_plane *dplane = drm_to_armada_plane(plane);
1180 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
1181 struct armada_plane_work *work;
1182 struct drm_plane_state state = {
1183 .plane = plane,
1184 .crtc = crtc,
1185 .fb = fb,
1186 .src_x = src_x,
1187 .src_y = src_y,
1188 .src_w = src_w,
1189 .src_h = src_h,
1190 .crtc_x = crtc_x,
1191 .crtc_y = crtc_y,
1192 .crtc_w = crtc_w,
1193 .crtc_h = crtc_h,
1194 .rotation = DRM_MODE_ROTATE_0,
1195 };
Ville Syrjälä57270b82018-01-23 19:08:55 +02001196 struct drm_crtc_state crtc_state = {
1197 .crtc = crtc,
1198 .enable = crtc->enabled,
Ville Syrjälä81af63a2018-01-23 19:08:57 +02001199 .mode = crtc->mode,
Ville Syrjälä57270b82018-01-23 19:08:55 +02001200 };
Russell King950bc132017-07-08 10:22:37 +01001201 int ret;
1202
Ville Syrjälä81af63a2018-01-23 19:08:57 +02001203 ret = drm_atomic_helper_check_plane_state(&state, &crtc_state, 0,
Dave Airliebcd21a42018-01-05 09:43:46 +10001204 INT_MAX, true, false);
Russell King950bc132017-07-08 10:22:37 +01001205 if (ret)
1206 return ret;
1207
1208 work = &dplane->works[dplane->next_work];
1209 work->fn = armada_drm_crtc_complete_frame_work;
1210
1211 if (plane->fb != fb) {
1212 /*
1213 * Take a reference on the new framebuffer - we want to
1214 * hold on to it while the hardware is displaying it.
1215 */
1216 drm_framebuffer_reference(fb);
1217
1218 work->old_fb = plane->fb;
1219 } else {
1220 work->old_fb = NULL;
1221 }
1222
1223 armada_drm_primary_update_state(&state, work->regs);
1224
1225 if (!dplane->state.changed)
1226 return 0;
1227
1228 /* Wait for pending work to complete */
1229 if (armada_drm_plane_work_wait(dplane, HZ / 10) == 0)
1230 armada_drm_plane_work_cancel(dcrtc, dplane);
1231
1232 if (!dplane->state.vsync_update) {
1233 work->fn(dcrtc, work);
1234 if (work->old_fb)
1235 drm_framebuffer_unreference(work->old_fb);
1236 return 0;
1237 }
1238
1239 /* Queue it for update on the next interrupt if we are enabled */
1240 ret = armada_drm_plane_work_queue(dcrtc, work);
1241 if (ret) {
1242 work->fn(dcrtc, work);
1243 if (work->old_fb)
1244 drm_framebuffer_unreference(work->old_fb);
1245 }
1246
1247 dplane->next_work = !dplane->next_work;
1248
1249 return 0;
1250}
1251
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001252int armada_drm_plane_disable(struct drm_plane *plane,
1253 struct drm_modeset_acquire_ctx *ctx)
Russell King28b30432017-07-08 10:16:40 +01001254{
1255 struct armada_plane *dplane = drm_to_armada_plane(plane);
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001256 struct armada_crtc *dcrtc;
Russell King890ca8d2017-07-08 10:22:27 +01001257 struct armada_plane_work *work;
1258 unsigned int idx = 0;
Russell Kingd76dcc72017-07-08 10:16:47 +01001259 u32 sram_para1, enable_mask;
Russell King28b30432017-07-08 10:16:40 +01001260
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001261 if (!plane->crtc)
1262 return 0;
1263
Russell King28b30432017-07-08 10:16:40 +01001264 /*
Russell King890ca8d2017-07-08 10:22:27 +01001265 * Arrange to power down most RAMs and FIFOs if this is the primary
1266 * plane, otherwise just the YUV FIFOs for the overlay plane.
Russell King28b30432017-07-08 10:16:40 +01001267 */
Russell King28b30432017-07-08 10:16:40 +01001268 if (plane->type == DRM_PLANE_TYPE_PRIMARY) {
1269 sram_para1 = CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
1270 CFG_PDWN32x32 | CFG_PDWN64x66;
Russell Kingd76dcc72017-07-08 10:16:47 +01001271 enable_mask = CFG_GRA_ENA;
Russell King28b30432017-07-08 10:16:40 +01001272 } else {
Russell King28b30432017-07-08 10:16:40 +01001273 sram_para1 = CFG_PDWN16x66 | CFG_PDWN32x66;
Russell Kingd76dcc72017-07-08 10:16:47 +01001274 enable_mask = CFG_DMA_ENA;
Russell King28b30432017-07-08 10:16:40 +01001275 }
1276
Russell Kingd76dcc72017-07-08 10:16:47 +01001277 dplane->state.ctrl0 &= ~enable_mask;
1278
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001279 dcrtc = drm_to_armada_crtc(plane->crtc);
1280
Russell King890ca8d2017-07-08 10:22:27 +01001281 /*
1282 * Try to disable the plane and drop our ref on the framebuffer
1283 * at the next frame update. If we fail for any reason, disable
1284 * the plane immediately.
1285 */
1286 work = &dplane->works[dplane->next_work];
1287 work->fn = armada_drm_crtc_complete_disable_work;
1288 work->cancel = armada_drm_crtc_complete_disable_work;
1289 work->old_fb = plane->fb;
1290
1291 armada_reg_queue_mod(work->regs, idx,
1292 0, enable_mask, LCD_SPU_DMA_CTRL0);
1293 armada_reg_queue_mod(work->regs, idx,
1294 sram_para1, 0, LCD_SPU_SRAM_PARA1);
1295 armada_reg_queue_end(work->regs, idx);
1296
Russell King28b30432017-07-08 10:16:40 +01001297 /* Wait for any preceding work to complete, but don't wedge */
1298 if (WARN_ON(!armada_drm_plane_work_wait(dplane, HZ)))
1299 armada_drm_plane_work_cancel(dcrtc, dplane);
1300
Russell King890ca8d2017-07-08 10:22:27 +01001301 if (armada_drm_plane_work_queue(dcrtc, work)) {
1302 work->fn(dcrtc, work);
1303 if (work->old_fb)
1304 drm_framebuffer_unreference(work->old_fb);
1305 }
1306
1307 dplane->next_work = !dplane->next_work;
Russell King28b30432017-07-08 10:16:40 +01001308
Russell King28b30432017-07-08 10:16:40 +01001309 return 0;
1310}
1311
Russell Kingde323012015-07-15 18:11:24 +01001312static const struct drm_plane_funcs armada_primary_plane_funcs = {
Russell King950bc132017-07-08 10:22:37 +01001313 .update_plane = armada_drm_primary_update,
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001314 .disable_plane = armada_drm_plane_disable,
Russell Kingde323012015-07-15 18:11:24 +01001315 .destroy = drm_primary_helper_destroy,
1316};
1317
Russell King5740d272015-07-15 18:11:25 +01001318int armada_drm_plane_init(struct armada_plane *plane)
1319{
Russell Kingd9241552017-07-08 10:22:25 +01001320 unsigned int i;
1321
1322 for (i = 0; i < ARRAY_SIZE(plane->works); i++)
1323 plane->works[i].plane = &plane->base;
1324
Russell King5740d272015-07-15 18:11:25 +01001325 init_waitqueue_head(&plane->frame_wait);
1326
1327 return 0;
1328}
1329
Arvind Yadavaaaf2f12017-07-01 15:30:15 +05301330static const struct drm_prop_enum_list armada_drm_csc_yuv_enum_list[] = {
Russell King96f60e32012-08-15 13:59:49 +01001331 { CSC_AUTO, "Auto" },
1332 { CSC_YUV_CCIR601, "CCIR601" },
1333 { CSC_YUV_CCIR709, "CCIR709" },
1334};
1335
Arvind Yadavaaaf2f12017-07-01 15:30:15 +05301336static const struct drm_prop_enum_list armada_drm_csc_rgb_enum_list[] = {
Russell King96f60e32012-08-15 13:59:49 +01001337 { CSC_AUTO, "Auto" },
1338 { CSC_RGB_COMPUTER, "Computer system" },
1339 { CSC_RGB_STUDIO, "Studio" },
1340};
1341
1342static int armada_drm_crtc_create_properties(struct drm_device *dev)
1343{
1344 struct armada_private *priv = dev->dev_private;
1345
1346 if (priv->csc_yuv_prop)
1347 return 0;
1348
1349 priv->csc_yuv_prop = drm_property_create_enum(dev, 0,
1350 "CSC_YUV", armada_drm_csc_yuv_enum_list,
1351 ARRAY_SIZE(armada_drm_csc_yuv_enum_list));
1352 priv->csc_rgb_prop = drm_property_create_enum(dev, 0,
1353 "CSC_RGB", armada_drm_csc_rgb_enum_list,
1354 ARRAY_SIZE(armada_drm_csc_rgb_enum_list));
1355
1356 if (!priv->csc_yuv_prop || !priv->csc_rgb_prop)
1357 return -ENOMEM;
1358
1359 return 0;
1360}
1361
Russell King0fb29702015-06-06 21:46:53 +01001362static int armada_drm_crtc_create(struct drm_device *drm, struct device *dev,
Russell King9611cb92014-06-15 11:21:23 +01001363 struct resource *res, int irq, const struct armada_variant *variant,
1364 struct device_node *port)
Russell King96f60e32012-08-15 13:59:49 +01001365{
Russell Kingd8c96082014-04-22 11:10:15 +01001366 struct armada_private *priv = drm->dev_private;
Russell King96f60e32012-08-15 13:59:49 +01001367 struct armada_crtc *dcrtc;
Russell Kingde323012015-07-15 18:11:24 +01001368 struct armada_plane *primary;
Russell King96f60e32012-08-15 13:59:49 +01001369 void __iomem *base;
1370 int ret;
1371
Russell Kingd8c96082014-04-22 11:10:15 +01001372 ret = armada_drm_crtc_create_properties(drm);
Russell King96f60e32012-08-15 13:59:49 +01001373 if (ret)
1374 return ret;
1375
Linus Torvaldsa7d7a142014-08-07 17:36:12 -07001376 base = devm_ioremap_resource(dev, res);
Jingoo Hanc9d53c02014-06-11 14:00:05 +09001377 if (IS_ERR(base))
1378 return PTR_ERR(base);
Russell King96f60e32012-08-15 13:59:49 +01001379
1380 dcrtc = kzalloc(sizeof(*dcrtc), GFP_KERNEL);
1381 if (!dcrtc) {
1382 DRM_ERROR("failed to allocate Armada crtc\n");
1383 return -ENOMEM;
1384 }
1385
Russell Kingd8c96082014-04-22 11:10:15 +01001386 if (dev != drm->dev)
1387 dev_set_drvdata(dev, dcrtc);
1388
Russell King42e62ba2014-04-22 15:24:03 +01001389 dcrtc->variant = variant;
Russell King96f60e32012-08-15 13:59:49 +01001390 dcrtc->base = base;
Russell Kingd8c96082014-04-22 11:10:15 +01001391 dcrtc->num = drm->mode_config.num_crtc;
Russell King96f60e32012-08-15 13:59:49 +01001392 dcrtc->clk = ERR_PTR(-EINVAL);
1393 dcrtc->csc_yuv_mode = CSC_AUTO;
1394 dcrtc->csc_rgb_mode = CSC_AUTO;
1395 dcrtc->cfg_dumb_ctrl = DUMB24_RGB888_0;
1396 dcrtc->spu_iopad_ctrl = CFG_VSCALE_LN_EN | CFG_IOPAD_DUMB24;
1397 spin_lock_init(&dcrtc->irq_lock);
1398 dcrtc->irq_ena = CLEAN_SPU_IRQ_ISR;
Russell King96f60e32012-08-15 13:59:49 +01001399
1400 /* Initialize some registers which we don't otherwise set */
1401 writel_relaxed(0x00000001, dcrtc->base + LCD_CFG_SCLK_DIV);
1402 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_BLANKCOLOR);
1403 writel_relaxed(dcrtc->spu_iopad_ctrl,
1404 dcrtc->base + LCD_SPU_IOPAD_CONTROL);
1405 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_SRAM_PARA0);
1406 writel_relaxed(CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
1407 CFG_PDWN32x32 | CFG_PDWN16x66 | CFG_PDWN32x66 |
1408 CFG_PDWN64x66, dcrtc->base + LCD_SPU_SRAM_PARA1);
1409 writel_relaxed(0x2032ff81, dcrtc->base + LCD_SPU_DMA_CTRL1);
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001410 writel_relaxed(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
Russell King92298c12018-06-26 17:06:06 +01001411 readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001412 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
Russell King96f60e32012-08-15 13:59:49 +01001413
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001414 ret = devm_request_irq(dev, irq, armada_drm_irq, 0, "armada_drm_crtc",
1415 dcrtc);
Russell King33cd3c02017-12-08 12:16:22 +00001416 if (ret < 0)
1417 goto err_crtc;
Russell King96f60e32012-08-15 13:59:49 +01001418
Russell King42e62ba2014-04-22 15:24:03 +01001419 if (dcrtc->variant->init) {
Russell Kingd8c96082014-04-22 11:10:15 +01001420 ret = dcrtc->variant->init(dcrtc, dev);
Russell King33cd3c02017-12-08 12:16:22 +00001421 if (ret)
1422 goto err_crtc;
Russell King96f60e32012-08-15 13:59:49 +01001423 }
1424
1425 /* Ensure AXI pipeline is enabled */
1426 armada_updatel(CFG_ARBFAST_ENA, 0, dcrtc->base + LCD_SPU_DMA_CTRL0);
1427
1428 priv->dcrtc[dcrtc->num] = dcrtc;
1429
Russell King9611cb92014-06-15 11:21:23 +01001430 dcrtc->crtc.port = port;
Russell King1c914ce2015-07-15 18:11:24 +01001431
Russell Kingde323012015-07-15 18:11:24 +01001432 primary = kzalloc(sizeof(*primary), GFP_KERNEL);
Russell King33cd3c02017-12-08 12:16:22 +00001433 if (!primary) {
1434 ret = -ENOMEM;
1435 goto err_crtc;
1436 }
Russell King1c914ce2015-07-15 18:11:24 +01001437
Russell King5740d272015-07-15 18:11:25 +01001438 ret = armada_drm_plane_init(primary);
1439 if (ret) {
1440 kfree(primary);
Russell King33cd3c02017-12-08 12:16:22 +00001441 goto err_crtc;
Russell King5740d272015-07-15 18:11:25 +01001442 }
1443
Russell Kingde323012015-07-15 18:11:24 +01001444 ret = drm_universal_plane_init(drm, &primary->base, 0,
1445 &armada_primary_plane_funcs,
1446 armada_primary_formats,
1447 ARRAY_SIZE(armada_primary_formats),
Ben Widawskye6fc3b62017-07-23 20:46:38 -07001448 NULL,
Ville Syrjäläb0b3b792015-12-09 16:19:55 +02001449 DRM_PLANE_TYPE_PRIMARY, NULL);
Russell Kingde323012015-07-15 18:11:24 +01001450 if (ret) {
1451 kfree(primary);
Russell King33cd3c02017-12-08 12:16:22 +00001452 goto err_crtc;
Russell Kingde323012015-07-15 18:11:24 +01001453 }
1454
1455 ret = drm_crtc_init_with_planes(drm, &dcrtc->crtc, &primary->base, NULL,
Ville Syrjäläf9882872015-12-09 16:19:31 +02001456 &armada_crtc_funcs, NULL);
Russell King1c914ce2015-07-15 18:11:24 +01001457 if (ret)
1458 goto err_crtc_init;
1459
Russell King96f60e32012-08-15 13:59:49 +01001460 drm_crtc_helper_add(&dcrtc->crtc, &armada_crtc_helper_funcs);
1461
1462 drm_object_attach_property(&dcrtc->crtc.base, priv->csc_yuv_prop,
1463 dcrtc->csc_yuv_mode);
1464 drm_object_attach_property(&dcrtc->crtc.base, priv->csc_rgb_prop,
1465 dcrtc->csc_rgb_mode);
1466
Russell Kingd8c96082014-04-22 11:10:15 +01001467 return armada_overlay_plane_create(drm, 1 << dcrtc->num);
Russell King1c914ce2015-07-15 18:11:24 +01001468
1469err_crtc_init:
Russell Kingde323012015-07-15 18:11:24 +01001470 primary->base.funcs->destroy(&primary->base);
Russell King33cd3c02017-12-08 12:16:22 +00001471err_crtc:
1472 kfree(dcrtc);
1473
Russell King1c914ce2015-07-15 18:11:24 +01001474 return ret;
Russell King96f60e32012-08-15 13:59:49 +01001475}
Russell Kingd8c96082014-04-22 11:10:15 +01001476
1477static int
1478armada_lcd_bind(struct device *dev, struct device *master, void *data)
1479{
1480 struct platform_device *pdev = to_platform_device(dev);
1481 struct drm_device *drm = data;
1482 struct resource *res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1483 int irq = platform_get_irq(pdev, 0);
1484 const struct armada_variant *variant;
Russell King9611cb92014-06-15 11:21:23 +01001485 struct device_node *port = NULL;
Russell Kingd8c96082014-04-22 11:10:15 +01001486
1487 if (irq < 0)
1488 return irq;
1489
1490 if (!dev->of_node) {
1491 const struct platform_device_id *id;
1492
1493 id = platform_get_device_id(pdev);
1494 if (!id)
1495 return -ENXIO;
1496
1497 variant = (const struct armada_variant *)id->driver_data;
1498 } else {
1499 const struct of_device_id *match;
Russell King9611cb92014-06-15 11:21:23 +01001500 struct device_node *np, *parent = dev->of_node;
Russell Kingd8c96082014-04-22 11:10:15 +01001501
1502 match = of_match_device(dev->driver->of_match_table, dev);
1503 if (!match)
1504 return -ENXIO;
1505
Russell King9611cb92014-06-15 11:21:23 +01001506 np = of_get_child_by_name(parent, "ports");
1507 if (np)
1508 parent = np;
1509 port = of_get_child_by_name(parent, "port");
1510 of_node_put(np);
1511 if (!port) {
Rob Herring4bf99142017-07-18 16:43:04 -05001512 dev_err(dev, "no port node found in %pOF\n", parent);
Russell King9611cb92014-06-15 11:21:23 +01001513 return -ENXIO;
1514 }
1515
Russell Kingd8c96082014-04-22 11:10:15 +01001516 variant = match->data;
1517 }
1518
Russell King9611cb92014-06-15 11:21:23 +01001519 return armada_drm_crtc_create(drm, dev, res, irq, variant, port);
Russell Kingd8c96082014-04-22 11:10:15 +01001520}
1521
1522static void
1523armada_lcd_unbind(struct device *dev, struct device *master, void *data)
1524{
1525 struct armada_crtc *dcrtc = dev_get_drvdata(dev);
1526
1527 armada_drm_crtc_destroy(&dcrtc->crtc);
1528}
1529
1530static const struct component_ops armada_lcd_ops = {
1531 .bind = armada_lcd_bind,
1532 .unbind = armada_lcd_unbind,
1533};
1534
1535static int armada_lcd_probe(struct platform_device *pdev)
1536{
1537 return component_add(&pdev->dev, &armada_lcd_ops);
1538}
1539
1540static int armada_lcd_remove(struct platform_device *pdev)
1541{
1542 component_del(&pdev->dev, &armada_lcd_ops);
1543 return 0;
1544}
1545
Arvind Yadav85909712017-06-20 10:44:33 +05301546static const struct of_device_id armada_lcd_of_match[] = {
Russell Kingd8c96082014-04-22 11:10:15 +01001547 {
1548 .compatible = "marvell,dove-lcd",
1549 .data = &armada510_ops,
1550 },
1551 {}
1552};
1553MODULE_DEVICE_TABLE(of, armada_lcd_of_match);
1554
1555static const struct platform_device_id armada_lcd_platform_ids[] = {
1556 {
1557 .name = "armada-lcd",
1558 .driver_data = (unsigned long)&armada510_ops,
1559 }, {
1560 .name = "armada-510-lcd",
1561 .driver_data = (unsigned long)&armada510_ops,
1562 },
1563 { },
1564};
1565MODULE_DEVICE_TABLE(platform, armada_lcd_platform_ids);
1566
1567struct platform_driver armada_lcd_platform_driver = {
1568 .probe = armada_lcd_probe,
1569 .remove = armada_lcd_remove,
1570 .driver = {
1571 .name = "armada-lcd",
1572 .owner = THIS_MODULE,
1573 .of_match_table = armada_lcd_of_match,
1574 },
1575 .id_table = armada_lcd_platform_ids,
1576};