blob: e2adfbef7d6b631b74acd031d8bf53ea5213ad2c [file] [log] [blame]
Russell King96f60e32012-08-15 13:59:49 +01001/*
2 * Copyright (C) 2012 Russell King
3 * Rewritten from the dovefb driver, and Armada510 manuals.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
8 */
9#include <linux/clk.h>
Russell Kingd8c96082014-04-22 11:10:15 +010010#include <linux/component.h>
11#include <linux/of_device.h>
12#include <linux/platform_device.h>
Russell King96f60e32012-08-15 13:59:49 +010013#include <drm/drmP.h>
14#include <drm/drm_crtc_helper.h>
Daniel Vetter3cb9ae42014-10-29 10:03:57 +010015#include <drm/drm_plane_helper.h>
Dave Airliebcd21a42018-01-05 09:43:46 +100016#include <drm/drm_atomic_helper.h>
Russell King96f60e32012-08-15 13:59:49 +010017#include "armada_crtc.h"
18#include "armada_drm.h"
19#include "armada_fb.h"
20#include "armada_gem.h"
21#include "armada_hw.h"
Russell Kingc8a220c2016-05-17 13:51:08 +010022#include "armada_trace.h"
Russell King96f60e32012-08-15 13:59:49 +010023
Russell King96f60e32012-08-15 13:59:49 +010024enum csc_mode {
25 CSC_AUTO = 0,
26 CSC_YUV_CCIR601 = 1,
27 CSC_YUV_CCIR709 = 2,
28 CSC_RGB_COMPUTER = 1,
29 CSC_RGB_STUDIO = 2,
30};
31
Russell King1c914ce2015-07-15 18:11:24 +010032static const uint32_t armada_primary_formats[] = {
33 DRM_FORMAT_UYVY,
34 DRM_FORMAT_YUYV,
35 DRM_FORMAT_VYUY,
36 DRM_FORMAT_YVYU,
37 DRM_FORMAT_ARGB8888,
38 DRM_FORMAT_ABGR8888,
39 DRM_FORMAT_XRGB8888,
40 DRM_FORMAT_XBGR8888,
41 DRM_FORMAT_RGB888,
42 DRM_FORMAT_BGR888,
43 DRM_FORMAT_ARGB1555,
44 DRM_FORMAT_ABGR1555,
45 DRM_FORMAT_RGB565,
46 DRM_FORMAT_BGR565,
47};
48
Russell King96f60e32012-08-15 13:59:49 +010049/*
50 * A note about interlacing. Let's consider HDMI 1920x1080i.
51 * The timing parameters we have from X are:
52 * Hact HsyA HsyI Htot Vact VsyA VsyI Vtot
53 * 1920 2448 2492 2640 1080 1084 1094 1125
54 * Which get translated to:
55 * Hact HsyA HsyI Htot Vact VsyA VsyI Vtot
56 * 1920 2448 2492 2640 540 542 547 562
57 *
58 * This is how it is defined by CEA-861-D - line and pixel numbers are
59 * referenced to the rising edge of VSYNC and HSYNC. Total clocks per
60 * line: 2640. The odd frame, the first active line is at line 21, and
61 * the even frame, the first active line is 584.
62 *
63 * LN: 560 561 562 563 567 568 569
64 * DE: ~~~|____________________________//__________________________
65 * HSYNC: ____|~|_____|~|_____|~|_____|~|_//__|~|_____|~|_____|~|_____
66 * VSYNC: _________________________|~~~~~~//~~~~~~~~~~~~~~~|__________
67 * 22 blanking lines. VSYNC at 1320 (referenced to the HSYNC rising edge).
68 *
69 * LN: 1123 1124 1125 1 5 6 7
70 * DE: ~~~|____________________________//__________________________
71 * HSYNC: ____|~|_____|~|_____|~|_____|~|_//__|~|_____|~|_____|~|_____
72 * VSYNC: ____________________|~~~~~~~~~~~//~~~~~~~~~~|_______________
73 * 23 blanking lines
74 *
75 * The Armada LCD Controller line and pixel numbers are, like X timings,
76 * referenced to the top left of the active frame.
77 *
78 * So, translating these to our LCD controller:
79 * Odd frame, 563 total lines, VSYNC at line 543-548, pixel 1128.
80 * Even frame, 562 total lines, VSYNC at line 542-547, pixel 2448.
81 * Note: Vsync front porch remains constant!
82 *
83 * if (odd_frame) {
84 * vtotal = mode->crtc_vtotal + 1;
85 * vbackporch = mode->crtc_vsync_start - mode->crtc_vdisplay + 1;
86 * vhorizpos = mode->crtc_hsync_start - mode->crtc_htotal / 2
87 * } else {
88 * vtotal = mode->crtc_vtotal;
89 * vbackporch = mode->crtc_vsync_start - mode->crtc_vdisplay;
90 * vhorizpos = mode->crtc_hsync_start;
91 * }
92 * vfrontporch = mode->crtc_vtotal - mode->crtc_vsync_end;
93 *
94 * So, we need to reprogram these registers on each vsync event:
95 * LCD_SPU_V_PORCH, LCD_SPU_ADV_REG, LCD_SPUT_V_H_TOTAL
96 *
97 * Note: we do not use the frame done interrupts because these appear
98 * to happen too early, and lead to jitter on the display (presumably
99 * they occur at the end of the last active line, before the vsync back
100 * porch, which we're reprogramming.)
101 */
102
103void
104armada_drm_crtc_update_regs(struct armada_crtc *dcrtc, struct armada_regs *regs)
105{
106 while (regs->offset != ~0) {
107 void __iomem *reg = dcrtc->base + regs->offset;
108 uint32_t val;
109
110 val = regs->mask;
111 if (val != 0)
112 val &= readl_relaxed(reg);
113 writel_relaxed(val | regs->val, reg);
114 ++regs;
115 }
116}
117
118#define dpms_blanked(dpms) ((dpms) != DRM_MODE_DPMS_ON)
119
120static void armada_drm_crtc_update(struct armada_crtc *dcrtc)
121{
122 uint32_t dumb_ctrl;
123
124 dumb_ctrl = dcrtc->cfg_dumb_ctrl;
125
126 if (!dpms_blanked(dcrtc->dpms))
127 dumb_ctrl |= CFG_DUMB_ENA;
128
129 /*
130 * When the dumb interface isn't in DUMB24_RGB888_0 mode, it might
131 * be using SPI or GPIO. If we set this to DUMB_BLANK, we will
132 * force LCD_D[23:0] to output blank color, overriding the GPIO or
133 * SPI usage. So leave it as-is unless in DUMB24_RGB888_0 mode.
134 */
135 if (dpms_blanked(dcrtc->dpms) &&
136 (dumb_ctrl & DUMB_MASK) == DUMB24_RGB888_0) {
137 dumb_ctrl &= ~DUMB_MASK;
138 dumb_ctrl |= DUMB_BLANK;
139 }
140
141 /*
142 * The documentation doesn't indicate what the normal state of
143 * the sync signals are. Sebastian Hesselbart kindly probed
144 * these signals on his board to determine their state.
145 *
146 * The non-inverted state of the sync signals is active high.
147 * Setting these bits makes the appropriate signal active low.
148 */
149 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NCSYNC)
150 dumb_ctrl |= CFG_INV_CSYNC;
151 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NHSYNC)
152 dumb_ctrl |= CFG_INV_HSYNC;
153 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NVSYNC)
154 dumb_ctrl |= CFG_INV_VSYNC;
155
156 if (dcrtc->dumb_ctrl != dumb_ctrl) {
157 dcrtc->dumb_ctrl = dumb_ctrl;
158 writel_relaxed(dumb_ctrl, dcrtc->base + LCD_SPU_DUMB_CTRL);
159 }
160}
161
Russell Kingf0b24872016-08-16 22:09:11 +0100162void armada_drm_plane_calc_addrs(u32 *addrs, struct drm_framebuffer *fb,
163 int x, int y)
164{
Russell Kingd6a48962017-12-08 12:16:22 +0000165 const struct drm_format_info *format = fb->format;
166 unsigned int num_planes = format->num_planes;
Russell Kingf0b24872016-08-16 22:09:11 +0100167 u32 addr = drm_fb_obj(fb)->dev_addr;
Russell Kingf0b24872016-08-16 22:09:11 +0100168 int i;
169
170 if (num_planes > 3)
171 num_planes = 3;
172
Russell Kingde0ea9a2017-12-08 12:16:22 +0000173 addrs[0] = addr + fb->offsets[0] + y * fb->pitches[0] +
174 x * format->cpp[0];
175
176 y /= format->vsub;
177 x /= format->hsub;
178
179 for (i = 1; i < num_planes; i++)
Russell Kingf0b24872016-08-16 22:09:11 +0100180 addrs[i] = addr + fb->offsets[i] + y * fb->pitches[i] +
Russell Kingd6a48962017-12-08 12:16:22 +0000181 x * format->cpp[i];
Russell Kingf0b24872016-08-16 22:09:11 +0100182 for (; i < 3; i++)
183 addrs[i] = 0;
184}
185
Russell King96f60e32012-08-15 13:59:49 +0100186static unsigned armada_drm_crtc_calc_fb(struct drm_framebuffer *fb,
187 int x, int y, struct armada_regs *regs, bool interlaced)
188{
Russell King96f60e32012-08-15 13:59:49 +0100189 unsigned pitch = fb->pitches[0];
Russell Kingf0b24872016-08-16 22:09:11 +0100190 u32 addrs[3], addr_odd, addr_even;
Russell King96f60e32012-08-15 13:59:49 +0100191 unsigned i = 0;
192
193 DRM_DEBUG_DRIVER("pitch %u x %d y %d bpp %d\n",
Ville Syrjälä272725c2016-12-14 23:32:20 +0200194 pitch, x, y, fb->format->cpp[0] * 8);
Russell King96f60e32012-08-15 13:59:49 +0100195
Russell Kingf0b24872016-08-16 22:09:11 +0100196 armada_drm_plane_calc_addrs(addrs, fb, x, y);
197
198 addr_odd = addr_even = addrs[0];
Russell King96f60e32012-08-15 13:59:49 +0100199
200 if (interlaced) {
201 addr_even += pitch;
202 pitch *= 2;
203 }
204
205 /* write offset, base, and pitch */
206 armada_reg_queue_set(regs, i, addr_odd, LCD_CFG_GRA_START_ADDR0);
207 armada_reg_queue_set(regs, i, addr_even, LCD_CFG_GRA_START_ADDR1);
208 armada_reg_queue_mod(regs, i, pitch, 0xffff, LCD_CFG_GRA_PITCH);
209
210 return i;
211}
212
Russell King2839d452017-07-07 15:56:20 +0100213static void armada_drm_plane_work_call(struct armada_crtc *dcrtc,
214 struct armada_plane_work *work,
215 void (*fn)(struct armada_crtc *, struct armada_plane_work *))
216{
217 struct armada_plane *dplane = drm_to_armada_plane(work->plane);
Russell Kingd9241552017-07-08 10:22:25 +0100218 struct drm_pending_vblank_event *event;
219 struct drm_framebuffer *fb;
Russell King2839d452017-07-07 15:56:20 +0100220
221 if (fn)
222 fn(dcrtc, work);
223 drm_crtc_vblank_put(&dcrtc->crtc);
224
Russell Kingd9241552017-07-08 10:22:25 +0100225 event = work->event;
226 fb = work->old_fb;
Russell Kingeb19be52017-07-08 10:16:53 +0100227 if (event || fb) {
228 struct drm_device *dev = dcrtc->crtc.dev;
229 unsigned long flags;
230
231 spin_lock_irqsave(&dev->event_lock, flags);
232 if (event)
233 drm_crtc_send_vblank_event(&dcrtc->crtc, event);
234 if (fb)
235 __armada_drm_queue_unref_work(dev, fb);
236 spin_unlock_irqrestore(&dev->event_lock, flags);
237 }
Russell Kingb972a802017-07-08 10:16:52 +0100238
Russell Kingd9241552017-07-08 10:22:25 +0100239 if (work->need_kfree)
240 kfree(work);
241
Russell King2839d452017-07-07 15:56:20 +0100242 wake_up(&dplane->frame_wait);
243}
244
Russell King4b5dda82015-08-06 16:37:18 +0100245static void armada_drm_plane_work_run(struct armada_crtc *dcrtc,
Russell Kingec6fb152016-07-25 15:16:11 +0100246 struct drm_plane *plane)
Russell King4b5dda82015-08-06 16:37:18 +0100247{
Russell Kingec6fb152016-07-25 15:16:11 +0100248 struct armada_plane *dplane = drm_to_armada_plane(plane);
249 struct armada_plane_work *work = xchg(&dplane->work, NULL);
Russell King4b5dda82015-08-06 16:37:18 +0100250
251 /* Handle any pending frame work. */
Russell King2839d452017-07-07 15:56:20 +0100252 if (work)
253 armada_drm_plane_work_call(dcrtc, work, work->fn);
Russell King4b5dda82015-08-06 16:37:18 +0100254}
255
256int armada_drm_plane_work_queue(struct armada_crtc *dcrtc,
Russell Kingeaab0132017-07-07 15:55:53 +0100257 struct armada_plane_work *work)
Russell King4b5dda82015-08-06 16:37:18 +0100258{
Russell Kingeaab0132017-07-07 15:55:53 +0100259 struct armada_plane *plane = drm_to_armada_plane(work->plane);
Russell King4b5dda82015-08-06 16:37:18 +0100260 int ret;
261
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300262 ret = drm_crtc_vblank_get(&dcrtc->crtc);
Russell Kingc93dfdc2017-07-08 10:22:23 +0100263 if (ret)
Russell King4b5dda82015-08-06 16:37:18 +0100264 return ret;
Russell King4b5dda82015-08-06 16:37:18 +0100265
266 ret = cmpxchg(&plane->work, NULL, work) ? -EBUSY : 0;
267 if (ret)
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300268 drm_crtc_vblank_put(&dcrtc->crtc);
Russell King4b5dda82015-08-06 16:37:18 +0100269
270 return ret;
271}
272
273int armada_drm_plane_work_wait(struct armada_plane *plane, long timeout)
274{
275 return wait_event_timeout(plane->frame_wait, !plane->work, timeout);
276}
277
Russell Kingd3b84212017-07-07 15:55:40 +0100278void armada_drm_plane_work_cancel(struct armada_crtc *dcrtc,
279 struct armada_plane *dplane)
Russell King7c8f7e12015-06-29 17:52:16 +0100280{
Russell Kingd3b84212017-07-07 15:55:40 +0100281 struct armada_plane_work *work = xchg(&dplane->work, NULL);
Russell King7c8f7e12015-06-29 17:52:16 +0100282
Russell King4a8506d2015-08-07 09:33:05 +0100283 if (work)
Russell King2839d452017-07-07 15:56:20 +0100284 armada_drm_plane_work_call(dcrtc, work, work->cancel);
Russell King96f60e32012-08-15 13:59:49 +0100285}
286
Russell King709ffd82015-07-15 18:09:38 +0100287static void armada_drm_crtc_complete_frame_work(struct armada_crtc *dcrtc,
Russell King65724a12017-07-07 15:56:24 +0100288 struct armada_plane_work *work)
Russell King96f60e32012-08-15 13:59:49 +0100289{
Russell King709ffd82015-07-15 18:09:38 +0100290 unsigned long flags;
Russell King96f60e32012-08-15 13:59:49 +0100291
Russell King709ffd82015-07-15 18:09:38 +0100292 spin_lock_irqsave(&dcrtc->irq_lock, flags);
Russell Kingeaa66272017-07-08 10:22:10 +0100293 armada_drm_crtc_update_regs(dcrtc, work->regs);
Russell King709ffd82015-07-15 18:09:38 +0100294 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
Russell King65724a12017-07-07 15:56:24 +0100295}
Russell King96f60e32012-08-15 13:59:49 +0100296
Russell King890ca8d2017-07-08 10:22:27 +0100297static void armada_drm_crtc_complete_disable_work(struct armada_crtc *dcrtc,
298 struct armada_plane_work *work)
299{
300 unsigned long flags;
Russell King96f60e32012-08-15 13:59:49 +0100301
Russell King890ca8d2017-07-08 10:22:27 +0100302 if (dcrtc->plane == work->plane)
303 dcrtc->plane = NULL;
304
305 spin_lock_irqsave(&dcrtc->irq_lock, flags);
306 armada_drm_crtc_update_regs(dcrtc, work->regs);
307 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
308}
309
Russell Kingeaa66272017-07-08 10:22:10 +0100310static struct armada_plane_work *
311armada_drm_crtc_alloc_plane_work(struct drm_plane *plane)
Russell King901bb882017-07-07 15:55:45 +0100312{
Russell Kingeaa66272017-07-08 10:22:10 +0100313 struct armada_plane_work *work;
Russell King901bb882017-07-07 15:55:45 +0100314 int i = 0;
315
316 work = kzalloc(sizeof(*work), GFP_KERNEL);
317 if (!work)
318 return NULL;
319
Russell Kingeaa66272017-07-08 10:22:10 +0100320 work->plane = plane;
321 work->fn = armada_drm_crtc_complete_frame_work;
Russell Kingd9241552017-07-08 10:22:25 +0100322 work->need_kfree = true;
Russell King901bb882017-07-07 15:55:45 +0100323 armada_reg_queue_end(work->regs, i);
324
325 return work;
Russell King96f60e32012-08-15 13:59:49 +0100326}
327
328static void armada_drm_crtc_finish_fb(struct armada_crtc *dcrtc,
329 struct drm_framebuffer *fb, bool force)
330{
Russell Kingeaa66272017-07-08 10:22:10 +0100331 struct armada_plane_work *work;
Russell King96f60e32012-08-15 13:59:49 +0100332
333 if (!fb)
334 return;
335
336 if (force) {
337 /* Display is disabled, so just drop the old fb */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -0600338 drm_framebuffer_put(fb);
Russell King96f60e32012-08-15 13:59:49 +0100339 return;
340 }
341
Russell Kingeaa66272017-07-08 10:22:10 +0100342 work = armada_drm_crtc_alloc_plane_work(dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +0100343 if (work) {
Russell King96f60e32012-08-15 13:59:49 +0100344 work->old_fb = fb;
Russell King96f60e32012-08-15 13:59:49 +0100345
Russell Kingeaa66272017-07-08 10:22:10 +0100346 if (armada_drm_plane_work_queue(dcrtc, work) == 0)
Russell King96f60e32012-08-15 13:59:49 +0100347 return;
348
349 kfree(work);
350 }
351
352 /*
353 * Oops - just drop the reference immediately and hope for
354 * the best. The worst that will happen is the buffer gets
355 * reused before it has finished being displayed.
356 */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -0600357 drm_framebuffer_put(fb);
Russell King96f60e32012-08-15 13:59:49 +0100358}
359
360static void armada_drm_vblank_off(struct armada_crtc *dcrtc)
361{
Russell King96f60e32012-08-15 13:59:49 +0100362 /*
363 * Tell the DRM core that vblank IRQs aren't going to happen for
364 * a while. This cleans up any pending vblank events for us.
365 */
Russell King178e5612014-10-11 23:57:04 +0100366 drm_crtc_vblank_off(&dcrtc->crtc);
Russell Kingec6fb152016-07-25 15:16:11 +0100367 armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +0100368}
369
Russell King96f60e32012-08-15 13:59:49 +0100370/* The mode_config.mutex will be held for this call */
371static void armada_drm_crtc_dpms(struct drm_crtc *crtc, int dpms)
372{
373 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
374
Russell Kingea908ba2016-10-04 22:19:57 +0100375 if (dpms_blanked(dcrtc->dpms) != dpms_blanked(dpms)) {
Russell King96f60e32012-08-15 13:59:49 +0100376 if (dpms_blanked(dpms))
377 armada_drm_vblank_off(dcrtc);
Russell Kingea908ba2016-10-04 22:19:57 +0100378 else if (!IS_ERR(dcrtc->clk))
379 WARN_ON(clk_prepare_enable(dcrtc->clk));
380 dcrtc->dpms = dpms;
381 armada_drm_crtc_update(dcrtc);
382 if (!dpms_blanked(dpms))
Russell King178e5612014-10-11 23:57:04 +0100383 drm_crtc_vblank_on(&dcrtc->crtc);
Russell Kingea908ba2016-10-04 22:19:57 +0100384 else if (!IS_ERR(dcrtc->clk))
385 clk_disable_unprepare(dcrtc->clk);
386 } else if (dcrtc->dpms != dpms) {
387 dcrtc->dpms = dpms;
Russell King96f60e32012-08-15 13:59:49 +0100388 }
389}
390
391/*
392 * Prepare for a mode set. Turn off overlay to ensure that we don't end
393 * up with the overlay size being bigger than the active screen size.
394 * We rely upon X refreshing this state after the mode set has completed.
395 *
396 * The mode_config.mutex will be held for this call
397 */
398static void armada_drm_crtc_prepare(struct drm_crtc *crtc)
399{
400 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
401 struct drm_plane *plane;
402
403 /*
404 * If we have an overlay plane associated with this CRTC, disable
405 * it before the modeset to avoid its coordinates being outside
Russell Kingf8e14062015-06-29 17:52:42 +0100406 * the new mode parameters.
Russell King96f60e32012-08-15 13:59:49 +0100407 */
408 plane = dcrtc->plane;
Russell King890ca8d2017-07-08 10:22:27 +0100409 if (plane) {
Russell Kingf8e14062015-06-29 17:52:42 +0100410 drm_plane_force_disable(plane);
Russell King890ca8d2017-07-08 10:22:27 +0100411 WARN_ON(!armada_drm_plane_work_wait(drm_to_armada_plane(plane),
412 HZ));
413 }
Russell King96f60e32012-08-15 13:59:49 +0100414}
415
416/* The mode_config.mutex will be held for this call */
417static void armada_drm_crtc_commit(struct drm_crtc *crtc)
418{
419 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
420
421 if (dcrtc->dpms != DRM_MODE_DPMS_ON) {
422 dcrtc->dpms = DRM_MODE_DPMS_ON;
423 armada_drm_crtc_update(dcrtc);
424 }
425}
426
427/* The mode_config.mutex will be held for this call */
428static bool armada_drm_crtc_mode_fixup(struct drm_crtc *crtc,
429 const struct drm_display_mode *mode, struct drm_display_mode *adj)
430{
Russell King96f60e32012-08-15 13:59:49 +0100431 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
432 int ret;
433
434 /* We can't do interlaced modes if we don't have the SPU_ADV_REG */
Russell King42e62ba2014-04-22 15:24:03 +0100435 if (!dcrtc->variant->has_spu_adv_reg &&
Russell King96f60e32012-08-15 13:59:49 +0100436 adj->flags & DRM_MODE_FLAG_INTERLACE)
437 return false;
438
439 /* Check whether the display mode is possible */
Russell King42e62ba2014-04-22 15:24:03 +0100440 ret = dcrtc->variant->compute_clock(dcrtc, adj, NULL);
Russell King96f60e32012-08-15 13:59:49 +0100441 if (ret)
442 return false;
443
444 return true;
445}
446
Shawn Guo5922a7d2017-02-07 17:16:18 +0800447/* These are locked by dev->vbl_lock */
448static void armada_drm_crtc_disable_irq(struct armada_crtc *dcrtc, u32 mask)
449{
450 if (dcrtc->irq_ena & mask) {
451 dcrtc->irq_ena &= ~mask;
452 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
453 }
454}
455
456static void armada_drm_crtc_enable_irq(struct armada_crtc *dcrtc, u32 mask)
457{
458 if ((dcrtc->irq_ena & mask) != mask) {
459 dcrtc->irq_ena |= mask;
460 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
461 if (readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR) & mask)
462 writel(0, dcrtc->base + LCD_SPU_IRQ_ISR);
463 }
464}
465
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100466static void armada_drm_crtc_irq(struct armada_crtc *dcrtc, u32 stat)
Russell King96f60e32012-08-15 13:59:49 +0100467{
Russell King96f60e32012-08-15 13:59:49 +0100468 void __iomem *base = dcrtc->base;
Russell King4a8506d2015-08-07 09:33:05 +0100469 struct drm_plane *ovl_plane;
Russell King96f60e32012-08-15 13:59:49 +0100470
471 if (stat & DMA_FF_UNDERFLOW)
472 DRM_ERROR("video underflow on crtc %u\n", dcrtc->num);
473 if (stat & GRA_FF_UNDERFLOW)
474 DRM_ERROR("graphics underflow on crtc %u\n", dcrtc->num);
475
476 if (stat & VSYNC_IRQ)
Gustavo Padovan0ac28c52016-07-04 21:04:48 -0300477 drm_crtc_handle_vblank(&dcrtc->crtc);
Russell King96f60e32012-08-15 13:59:49 +0100478
Russell King4a8506d2015-08-07 09:33:05 +0100479 ovl_plane = dcrtc->plane;
Russell Kingec6fb152016-07-25 15:16:11 +0100480 if (ovl_plane)
481 armada_drm_plane_work_run(dcrtc, ovl_plane);
Russell King96f60e32012-08-15 13:59:49 +0100482
Russell Kinga3f6a182017-07-08 10:16:48 +0100483 spin_lock(&dcrtc->irq_lock);
Russell King96f60e32012-08-15 13:59:49 +0100484 if (stat & GRA_FRAME_IRQ && dcrtc->interlaced) {
485 int i = stat & GRA_FRAME_IRQ0 ? 0 : 1;
486 uint32_t val;
487
488 writel_relaxed(dcrtc->v[i].spu_v_porch, base + LCD_SPU_V_PORCH);
489 writel_relaxed(dcrtc->v[i].spu_v_h_total,
490 base + LCD_SPUT_V_H_TOTAL);
491
492 val = readl_relaxed(base + LCD_SPU_ADV_REG);
493 val &= ~(ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF | ADV_VSYNCOFFEN);
494 val |= dcrtc->v[i].spu_adv_reg;
Russell King662af0d2013-05-19 10:55:17 +0100495 writel_relaxed(val, base + LCD_SPU_ADV_REG);
Russell King96f60e32012-08-15 13:59:49 +0100496 }
Russell King662af0d2013-05-19 10:55:17 +0100497
498 if (stat & DUMB_FRAMEDONE && dcrtc->cursor_update) {
499 writel_relaxed(dcrtc->cursor_hw_pos,
500 base + LCD_SPU_HWC_OVSA_HPXL_VLN);
501 writel_relaxed(dcrtc->cursor_hw_sz,
502 base + LCD_SPU_HWC_HPXL_VLN);
503 armada_updatel(CFG_HWC_ENA,
504 CFG_HWC_ENA | CFG_HWC_1BITMOD | CFG_HWC_1BITENA,
505 base + LCD_SPU_DMA_CTRL0);
506 dcrtc->cursor_update = false;
507 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
508 }
509
Russell King96f60e32012-08-15 13:59:49 +0100510 spin_unlock(&dcrtc->irq_lock);
511
Russell Kingec6fb152016-07-25 15:16:11 +0100512 if (stat & GRA_FRAME_IRQ)
513 armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +0100514}
515
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100516static irqreturn_t armada_drm_irq(int irq, void *arg)
517{
518 struct armada_crtc *dcrtc = arg;
519 u32 v, stat = readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
520
521 /*
522 * This is rediculous - rather than writing bits to clear, we
523 * have to set the actual status register value. This is racy.
524 */
525 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
526
Russell Kingc8a220c2016-05-17 13:51:08 +0100527 trace_armada_drm_irq(&dcrtc->crtc, stat);
528
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100529 /* Mask out those interrupts we haven't enabled */
530 v = stat & dcrtc->irq_ena;
531
532 if (v & (VSYNC_IRQ|GRA_FRAME_IRQ|DUMB_FRAMEDONE)) {
533 armada_drm_crtc_irq(dcrtc, stat);
534 return IRQ_HANDLED;
535 }
536 return IRQ_NONE;
537}
538
Russell King96f60e32012-08-15 13:59:49 +0100539static uint32_t armada_drm_crtc_calculate_csc(struct armada_crtc *dcrtc)
540{
541 struct drm_display_mode *adj = &dcrtc->crtc.mode;
542 uint32_t val = 0;
543
544 if (dcrtc->csc_yuv_mode == CSC_YUV_CCIR709)
545 val |= CFG_CSC_YUV_CCIR709;
546 if (dcrtc->csc_rgb_mode == CSC_RGB_STUDIO)
547 val |= CFG_CSC_RGB_STUDIO;
548
549 /*
550 * In auto mode, set the colorimetry, based upon the HDMI spec.
551 * 1280x720p, 1920x1080p and 1920x1080i use ITU709, others use
552 * ITU601. It may be more appropriate to set this depending on
553 * the source - but what if the graphic frame is YUV and the
554 * video frame is RGB?
555 */
556 if ((adj->hdisplay == 1280 && adj->vdisplay == 720 &&
557 !(adj->flags & DRM_MODE_FLAG_INTERLACE)) ||
558 (adj->hdisplay == 1920 && adj->vdisplay == 1080)) {
559 if (dcrtc->csc_yuv_mode == CSC_AUTO)
560 val |= CFG_CSC_YUV_CCIR709;
561 }
562
563 /*
564 * We assume we're connected to a TV-like device, so the YUV->RGB
565 * conversion should produce a limited range. We should set this
566 * depending on the connectors attached to this CRTC, and what
567 * kind of device they report being connected.
568 */
569 if (dcrtc->csc_rgb_mode == CSC_AUTO)
570 val |= CFG_CSC_RGB_STUDIO;
571
572 return val;
573}
574
Russell King11df53d2017-07-08 10:22:35 +0100575static void armada_drm_gra_plane_regs(struct armada_regs *regs,
576 struct drm_framebuffer *fb, struct armada_plane_state *state,
577 int x, int y, bool interlaced)
Russell King37af35c2016-08-16 22:09:09 +0100578{
Russell King11df53d2017-07-08 10:22:35 +0100579 unsigned int i;
Russell King2925db02016-08-16 22:09:10 +0100580 u32 ctrl0;
Russell King37af35c2016-08-16 22:09:09 +0100581
Russell King11df53d2017-07-08 10:22:35 +0100582 i = armada_drm_crtc_calc_fb(fb, x, y, regs, interlaced);
Russell King2925db02016-08-16 22:09:10 +0100583 armada_reg_queue_set(regs, i, state->dst_yx, LCD_SPU_GRA_OVSA_HPXL_VLN);
Russell King37af35c2016-08-16 22:09:09 +0100584 armada_reg_queue_set(regs, i, state->src_hw, LCD_SPU_GRA_HPXL_VLN);
585 armada_reg_queue_set(regs, i, state->dst_hw, LCD_SPU_GZM_HPXL_VLN);
586
587 ctrl0 = state->ctrl0;
588 if (interlaced)
589 ctrl0 |= CFG_GRA_FTOGGLE;
590
591 armada_reg_queue_mod(regs, i, ctrl0, CFG_GRAFORMAT |
592 CFG_GRA_MOD(CFG_SWAPRB | CFG_SWAPUV |
593 CFG_SWAPYU | CFG_YUV2RGB) |
Russell King73c51ab2017-07-08 10:22:19 +0100594 CFG_PALETTE_ENA | CFG_GRA_FTOGGLE |
595 CFG_GRA_HSMOOTH | CFG_GRA_ENA,
Russell King37af35c2016-08-16 22:09:09 +0100596 LCD_SPU_DMA_CTRL0);
597 armada_reg_queue_end(regs, i);
Russell King11df53d2017-07-08 10:22:35 +0100598}
599
600static void armada_drm_primary_set(struct drm_crtc *crtc,
601 struct drm_plane *plane, int x, int y)
602{
603 struct armada_plane_state *state = &drm_to_armada_plane(plane)->state;
604 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
605 struct armada_regs regs[8];
606 bool interlaced = dcrtc->interlaced;
607
608 armada_drm_gra_plane_regs(regs, plane->fb, state, x, y, interlaced);
Russell King37af35c2016-08-16 22:09:09 +0100609 armada_drm_crtc_update_regs(dcrtc, regs);
610}
611
Russell King96f60e32012-08-15 13:59:49 +0100612/* The mode_config.mutex will be held for this call */
613static int armada_drm_crtc_mode_set(struct drm_crtc *crtc,
614 struct drm_display_mode *mode, struct drm_display_mode *adj,
615 int x, int y, struct drm_framebuffer *old_fb)
616{
Russell King96f60e32012-08-15 13:59:49 +0100617 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
618 struct armada_regs regs[17];
619 uint32_t lm, rm, tm, bm, val, sclk;
620 unsigned long flags;
621 unsigned i;
622 bool interlaced;
623
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -0600624 drm_framebuffer_get(crtc->primary->fb);
Russell King96f60e32012-08-15 13:59:49 +0100625
626 interlaced = !!(adj->flags & DRM_MODE_FLAG_INTERLACE);
627
Russell King73c51ab2017-07-08 10:22:19 +0100628 val = CFG_GRA_ENA;
Russell King8be523d2016-08-16 22:09:08 +0100629 val |= CFG_GRA_FMT(drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->fmt);
630 val |= CFG_GRA_MOD(drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->mod);
Russell King96f60e32012-08-15 13:59:49 +0100631
Russell King8be523d2016-08-16 22:09:08 +0100632 if (drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->fmt > CFG_420)
633 val |= CFG_PALETTE_ENA;
634
635 drm_to_armada_plane(crtc->primary)->state.ctrl0 = val;
636 drm_to_armada_plane(crtc->primary)->state.src_hw =
637 drm_to_armada_plane(crtc->primary)->state.dst_hw =
Russell King37af35c2016-08-16 22:09:09 +0100638 adj->crtc_vdisplay << 16 | adj->crtc_hdisplay;
Russell King8be523d2016-08-16 22:09:08 +0100639 drm_to_armada_plane(crtc->primary)->state.dst_yx = 0;
640
Russell King37af35c2016-08-16 22:09:09 +0100641 i = 0;
Russell King96f60e32012-08-15 13:59:49 +0100642 rm = adj->crtc_hsync_start - adj->crtc_hdisplay;
643 lm = adj->crtc_htotal - adj->crtc_hsync_end;
644 bm = adj->crtc_vsync_start - adj->crtc_vdisplay;
645 tm = adj->crtc_vtotal - adj->crtc_vsync_end;
646
647 DRM_DEBUG_DRIVER("H: %d %d %d %d lm %d rm %d\n",
648 adj->crtc_hdisplay,
649 adj->crtc_hsync_start,
650 adj->crtc_hsync_end,
651 adj->crtc_htotal, lm, rm);
652 DRM_DEBUG_DRIVER("V: %d %d %d %d tm %d bm %d\n",
653 adj->crtc_vdisplay,
654 adj->crtc_vsync_start,
655 adj->crtc_vsync_end,
656 adj->crtc_vtotal, tm, bm);
657
658 /* Wait for pending flips to complete */
Russell King4b5dda82015-08-06 16:37:18 +0100659 armada_drm_plane_work_wait(drm_to_armada_plane(dcrtc->crtc.primary),
660 MAX_SCHEDULE_TIMEOUT);
Russell King96f60e32012-08-15 13:59:49 +0100661
Russell King178e5612014-10-11 23:57:04 +0100662 drm_crtc_vblank_off(crtc);
Russell King96f60e32012-08-15 13:59:49 +0100663
Russell King96f60e32012-08-15 13:59:49 +0100664 val = dcrtc->dumb_ctrl & ~CFG_DUMB_ENA;
665 if (val != dcrtc->dumb_ctrl) {
666 dcrtc->dumb_ctrl = val;
667 writel_relaxed(val, dcrtc->base + LCD_SPU_DUMB_CTRL);
668 }
669
Russell Kinge0ac5e92015-06-29 18:01:38 +0100670 /*
671 * If we are blanked, we would have disabled the clock. Re-enable
672 * it so that compute_clock() does the right thing.
673 */
674 if (!IS_ERR(dcrtc->clk) && dpms_blanked(dcrtc->dpms))
675 WARN_ON(clk_prepare_enable(dcrtc->clk));
676
Russell King96f60e32012-08-15 13:59:49 +0100677 /* Now compute the divider for real */
Russell King42e62ba2014-04-22 15:24:03 +0100678 dcrtc->variant->compute_clock(dcrtc, adj, &sclk);
Russell King96f60e32012-08-15 13:59:49 +0100679
Russell King96f60e32012-08-15 13:59:49 +0100680 armada_reg_queue_set(regs, i, sclk, LCD_CFG_SCLK_DIV);
681
682 if (interlaced ^ dcrtc->interlaced) {
683 if (adj->flags & DRM_MODE_FLAG_INTERLACE)
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300684 drm_crtc_vblank_get(&dcrtc->crtc);
Russell King96f60e32012-08-15 13:59:49 +0100685 else
Gustavo Padovanaccbaf62016-06-06 11:41:40 -0300686 drm_crtc_vblank_put(&dcrtc->crtc);
Russell King96f60e32012-08-15 13:59:49 +0100687 dcrtc->interlaced = interlaced;
688 }
689
690 spin_lock_irqsave(&dcrtc->irq_lock, flags);
691
Russell King214612f2017-07-08 10:22:15 +0100692 /* Ensure graphic fifo is enabled */
693 armada_reg_queue_mod(regs, i, 0, CFG_PDWN64x66, LCD_SPU_SRAM_PARA1);
694
Russell King96f60e32012-08-15 13:59:49 +0100695 /* Even interlaced/progressive frame */
696 dcrtc->v[1].spu_v_h_total = adj->crtc_vtotal << 16 |
697 adj->crtc_htotal;
698 dcrtc->v[1].spu_v_porch = tm << 16 | bm;
699 val = adj->crtc_hsync_start;
Russell King662af0d2013-05-19 10:55:17 +0100700 dcrtc->v[1].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN |
Russell King42e62ba2014-04-22 15:24:03 +0100701 dcrtc->variant->spu_adv_reg;
Russell King96f60e32012-08-15 13:59:49 +0100702
703 if (interlaced) {
704 /* Odd interlaced frame */
705 dcrtc->v[0].spu_v_h_total = dcrtc->v[1].spu_v_h_total +
706 (1 << 16);
707 dcrtc->v[0].spu_v_porch = dcrtc->v[1].spu_v_porch + 1;
708 val = adj->crtc_hsync_start - adj->crtc_htotal / 2;
Russell King662af0d2013-05-19 10:55:17 +0100709 dcrtc->v[0].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN |
Russell King42e62ba2014-04-22 15:24:03 +0100710 dcrtc->variant->spu_adv_reg;
Russell King96f60e32012-08-15 13:59:49 +0100711 } else {
712 dcrtc->v[0] = dcrtc->v[1];
713 }
714
715 val = adj->crtc_vdisplay << 16 | adj->crtc_hdisplay;
716
717 armada_reg_queue_set(regs, i, val, LCD_SPU_V_H_ACTIVE);
Russell King96f60e32012-08-15 13:59:49 +0100718 armada_reg_queue_set(regs, i, (lm << 16) | rm, LCD_SPU_H_PORCH);
719 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_porch, LCD_SPU_V_PORCH);
720 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_h_total,
721 LCD_SPUT_V_H_TOTAL);
722
Russell King42e62ba2014-04-22 15:24:03 +0100723 if (dcrtc->variant->has_spu_adv_reg) {
Russell King96f60e32012-08-15 13:59:49 +0100724 armada_reg_queue_mod(regs, i, dcrtc->v[0].spu_adv_reg,
725 ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF |
726 ADV_VSYNCOFFEN, LCD_SPU_ADV_REG);
Russell King662af0d2013-05-19 10:55:17 +0100727 }
Russell King96f60e32012-08-15 13:59:49 +0100728
Russell King96f60e32012-08-15 13:59:49 +0100729 val = adj->flags & DRM_MODE_FLAG_NVSYNC ? CFG_VSYNC_INV : 0;
730 armada_reg_queue_mod(regs, i, val, CFG_VSYNC_INV, LCD_SPU_DMA_CTRL1);
731
732 val = dcrtc->spu_iopad_ctrl | armada_drm_crtc_calculate_csc(dcrtc);
733 armada_reg_queue_set(regs, i, val, LCD_SPU_IOPAD_CONTROL);
734 armada_reg_queue_end(regs, i);
735
736 armada_drm_crtc_update_regs(dcrtc, regs);
Russell King37af35c2016-08-16 22:09:09 +0100737
738 armada_drm_primary_set(crtc, crtc->primary, x, y);
Russell King96f60e32012-08-15 13:59:49 +0100739 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
740
741 armada_drm_crtc_update(dcrtc);
742
Russell King178e5612014-10-11 23:57:04 +0100743 drm_crtc_vblank_on(crtc);
Russell King96f60e32012-08-15 13:59:49 +0100744 armada_drm_crtc_finish_fb(dcrtc, old_fb, dpms_blanked(dcrtc->dpms));
745
746 return 0;
747}
748
749/* The mode_config.mutex will be held for this call */
750static int armada_drm_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
751 struct drm_framebuffer *old_fb)
752{
753 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
754 struct armada_regs regs[4];
755 unsigned i;
756
Matt Roperf4510a22014-04-01 15:22:40 -0700757 i = armada_drm_crtc_calc_fb(crtc->primary->fb, crtc->x, crtc->y, regs,
Russell King96f60e32012-08-15 13:59:49 +0100758 dcrtc->interlaced);
759 armada_reg_queue_end(regs, i);
760
761 /* Wait for pending flips to complete */
Russell King4b5dda82015-08-06 16:37:18 +0100762 armada_drm_plane_work_wait(drm_to_armada_plane(dcrtc->crtc.primary),
763 MAX_SCHEDULE_TIMEOUT);
Russell King96f60e32012-08-15 13:59:49 +0100764
765 /* Take a reference to the new fb as we're using it */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -0600766 drm_framebuffer_get(crtc->primary->fb);
Russell King96f60e32012-08-15 13:59:49 +0100767
768 /* Update the base in the CRTC */
769 armada_drm_crtc_update_regs(dcrtc, regs);
770
771 /* Drop our previously held reference */
772 armada_drm_crtc_finish_fb(dcrtc, old_fb, dpms_blanked(dcrtc->dpms));
773
774 return 0;
775}
776
Russell King96f60e32012-08-15 13:59:49 +0100777/* The mode_config.mutex will be held for this call */
778static void armada_drm_crtc_disable(struct drm_crtc *crtc)
779{
Russell King96f60e32012-08-15 13:59:49 +0100780 armada_drm_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
Russell King28b30432017-07-08 10:16:40 +0100781
782 /* Disable our primary plane when we disable the CRTC. */
783 crtc->primary->funcs->disable_plane(crtc->primary, NULL);
Russell King96f60e32012-08-15 13:59:49 +0100784}
785
786static const struct drm_crtc_helper_funcs armada_crtc_helper_funcs = {
787 .dpms = armada_drm_crtc_dpms,
788 .prepare = armada_drm_crtc_prepare,
789 .commit = armada_drm_crtc_commit,
790 .mode_fixup = armada_drm_crtc_mode_fixup,
791 .mode_set = armada_drm_crtc_mode_set,
792 .mode_set_base = armada_drm_crtc_mode_set_base,
Russell King96f60e32012-08-15 13:59:49 +0100793 .disable = armada_drm_crtc_disable,
794};
795
Russell King662af0d2013-05-19 10:55:17 +0100796static void armada_load_cursor_argb(void __iomem *base, uint32_t *pix,
797 unsigned stride, unsigned width, unsigned height)
798{
799 uint32_t addr;
800 unsigned y;
801
802 addr = SRAM_HWC32_RAM1;
803 for (y = 0; y < height; y++) {
804 uint32_t *p = &pix[y * stride];
805 unsigned x;
806
807 for (x = 0; x < width; x++, p++) {
808 uint32_t val = *p;
809
810 val = (val & 0xff00ff00) |
811 (val & 0x000000ff) << 16 |
812 (val & 0x00ff0000) >> 16;
813
814 writel_relaxed(val,
815 base + LCD_SPU_SRAM_WRDAT);
816 writel_relaxed(addr | SRAM_WRITE,
817 base + LCD_SPU_SRAM_CTRL);
Russell Kingc39b0692014-04-07 12:00:17 +0100818 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
Russell King662af0d2013-05-19 10:55:17 +0100819 addr += 1;
820 if ((addr & 0x00ff) == 0)
821 addr += 0xf00;
822 if ((addr & 0x30ff) == 0)
823 addr = SRAM_HWC32_RAM2;
824 }
825 }
826}
827
828static void armada_drm_crtc_cursor_tran(void __iomem *base)
829{
830 unsigned addr;
831
832 for (addr = 0; addr < 256; addr++) {
833 /* write the default value */
834 writel_relaxed(0x55555555, base + LCD_SPU_SRAM_WRDAT);
835 writel_relaxed(addr | SRAM_WRITE | SRAM_HWC32_TRAN,
836 base + LCD_SPU_SRAM_CTRL);
837 }
838}
839
840static int armada_drm_crtc_cursor_update(struct armada_crtc *dcrtc, bool reload)
841{
842 uint32_t xoff, xscr, w = dcrtc->cursor_w, s;
843 uint32_t yoff, yscr, h = dcrtc->cursor_h;
844 uint32_t para1;
845
846 /*
847 * Calculate the visible width and height of the cursor,
848 * screen position, and the position in the cursor bitmap.
849 */
850 if (dcrtc->cursor_x < 0) {
851 xoff = -dcrtc->cursor_x;
852 xscr = 0;
853 w -= min(xoff, w);
854 } else if (dcrtc->cursor_x + w > dcrtc->crtc.mode.hdisplay) {
855 xoff = 0;
856 xscr = dcrtc->cursor_x;
857 w = max_t(int, dcrtc->crtc.mode.hdisplay - dcrtc->cursor_x, 0);
858 } else {
859 xoff = 0;
860 xscr = dcrtc->cursor_x;
861 }
862
863 if (dcrtc->cursor_y < 0) {
864 yoff = -dcrtc->cursor_y;
865 yscr = 0;
866 h -= min(yoff, h);
867 } else if (dcrtc->cursor_y + h > dcrtc->crtc.mode.vdisplay) {
868 yoff = 0;
869 yscr = dcrtc->cursor_y;
870 h = max_t(int, dcrtc->crtc.mode.vdisplay - dcrtc->cursor_y, 0);
871 } else {
872 yoff = 0;
873 yscr = dcrtc->cursor_y;
874 }
875
876 /* On interlaced modes, the vertical cursor size must be halved */
877 s = dcrtc->cursor_w;
878 if (dcrtc->interlaced) {
879 s *= 2;
880 yscr /= 2;
881 h /= 2;
882 }
883
884 if (!dcrtc->cursor_obj || !h || !w) {
885 spin_lock_irq(&dcrtc->irq_lock);
886 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
887 dcrtc->cursor_update = false;
888 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
889 spin_unlock_irq(&dcrtc->irq_lock);
890 return 0;
891 }
892
Russell King214612f2017-07-08 10:22:15 +0100893 spin_lock_irq(&dcrtc->irq_lock);
Russell King662af0d2013-05-19 10:55:17 +0100894 para1 = readl_relaxed(dcrtc->base + LCD_SPU_SRAM_PARA1);
895 armada_updatel(CFG_CSB_256x32, CFG_CSB_256x32 | CFG_PDWN256x32,
896 dcrtc->base + LCD_SPU_SRAM_PARA1);
Russell King214612f2017-07-08 10:22:15 +0100897 spin_unlock_irq(&dcrtc->irq_lock);
Russell King662af0d2013-05-19 10:55:17 +0100898
899 /*
900 * Initialize the transparency if the SRAM was powered down.
901 * We must also reload the cursor data as well.
902 */
903 if (!(para1 & CFG_CSB_256x32)) {
904 armada_drm_crtc_cursor_tran(dcrtc->base);
905 reload = true;
906 }
907
908 if (dcrtc->cursor_hw_sz != (h << 16 | w)) {
909 spin_lock_irq(&dcrtc->irq_lock);
910 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
911 dcrtc->cursor_update = false;
912 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
913 spin_unlock_irq(&dcrtc->irq_lock);
914 reload = true;
915 }
916 if (reload) {
917 struct armada_gem_object *obj = dcrtc->cursor_obj;
918 uint32_t *pix;
919 /* Set the top-left corner of the cursor image */
920 pix = obj->addr;
921 pix += yoff * s + xoff;
922 armada_load_cursor_argb(dcrtc->base, pix, s, w, h);
923 }
924
925 /* Reload the cursor position, size and enable in the IRQ handler */
926 spin_lock_irq(&dcrtc->irq_lock);
927 dcrtc->cursor_hw_pos = yscr << 16 | xscr;
928 dcrtc->cursor_hw_sz = h << 16 | w;
929 dcrtc->cursor_update = true;
930 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
931 spin_unlock_irq(&dcrtc->irq_lock);
932
933 return 0;
934}
935
936static void cursor_update(void *data)
937{
938 armada_drm_crtc_cursor_update(data, true);
939}
940
941static int armada_drm_crtc_cursor_set(struct drm_crtc *crtc,
942 struct drm_file *file, uint32_t handle, uint32_t w, uint32_t h)
943{
Russell King662af0d2013-05-19 10:55:17 +0100944 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King662af0d2013-05-19 10:55:17 +0100945 struct armada_gem_object *obj = NULL;
946 int ret;
947
948 /* If no cursor support, replicate drm's return value */
Russell King42e62ba2014-04-22 15:24:03 +0100949 if (!dcrtc->variant->has_spu_adv_reg)
Russell King662af0d2013-05-19 10:55:17 +0100950 return -ENXIO;
951
952 if (handle && w > 0 && h > 0) {
953 /* maximum size is 64x32 or 32x64 */
954 if (w > 64 || h > 64 || (w > 32 && h > 32))
955 return -ENOMEM;
956
Chris Wilsona8ad0bd2016-05-09 11:04:54 +0100957 obj = armada_gem_object_lookup(file, handle);
Russell King662af0d2013-05-19 10:55:17 +0100958 if (!obj)
959 return -ENOENT;
960
961 /* Must be a kernel-mapped object */
962 if (!obj->addr) {
Haneen Mohammed4c3cf372017-09-20 12:54:48 -0600963 drm_gem_object_put_unlocked(&obj->obj);
Russell King662af0d2013-05-19 10:55:17 +0100964 return -EINVAL;
965 }
966
967 if (obj->obj.size < w * h * 4) {
968 DRM_ERROR("buffer is too small\n");
Haneen Mohammed4c3cf372017-09-20 12:54:48 -0600969 drm_gem_object_put_unlocked(&obj->obj);
Russell King662af0d2013-05-19 10:55:17 +0100970 return -ENOMEM;
971 }
972 }
973
Russell King662af0d2013-05-19 10:55:17 +0100974 if (dcrtc->cursor_obj) {
975 dcrtc->cursor_obj->update = NULL;
976 dcrtc->cursor_obj->update_data = NULL;
Haneen Mohammed4c3cf372017-09-20 12:54:48 -0600977 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
Russell King662af0d2013-05-19 10:55:17 +0100978 }
979 dcrtc->cursor_obj = obj;
980 dcrtc->cursor_w = w;
981 dcrtc->cursor_h = h;
982 ret = armada_drm_crtc_cursor_update(dcrtc, true);
983 if (obj) {
984 obj->update_data = dcrtc;
985 obj->update = cursor_update;
986 }
Russell King662af0d2013-05-19 10:55:17 +0100987
988 return ret;
989}
990
991static int armada_drm_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
992{
Russell King662af0d2013-05-19 10:55:17 +0100993 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King662af0d2013-05-19 10:55:17 +0100994 int ret;
995
996 /* If no cursor support, replicate drm's return value */
Russell King42e62ba2014-04-22 15:24:03 +0100997 if (!dcrtc->variant->has_spu_adv_reg)
Russell King662af0d2013-05-19 10:55:17 +0100998 return -EFAULT;
999
Russell King662af0d2013-05-19 10:55:17 +01001000 dcrtc->cursor_x = x;
1001 dcrtc->cursor_y = y;
1002 ret = armada_drm_crtc_cursor_update(dcrtc, false);
Russell King662af0d2013-05-19 10:55:17 +01001003
1004 return ret;
1005}
1006
Russell King96f60e32012-08-15 13:59:49 +01001007static void armada_drm_crtc_destroy(struct drm_crtc *crtc)
1008{
1009 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
1010 struct armada_private *priv = crtc->dev->dev_private;
1011
Russell King662af0d2013-05-19 10:55:17 +01001012 if (dcrtc->cursor_obj)
Haneen Mohammed4c3cf372017-09-20 12:54:48 -06001013 drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj);
Russell King662af0d2013-05-19 10:55:17 +01001014
Russell King96f60e32012-08-15 13:59:49 +01001015 priv->dcrtc[dcrtc->num] = NULL;
1016 drm_crtc_cleanup(&dcrtc->crtc);
1017
1018 if (!IS_ERR(dcrtc->clk))
1019 clk_disable_unprepare(dcrtc->clk);
1020
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001021 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ENA);
1022
Russell King9611cb92014-06-15 11:21:23 +01001023 of_node_put(dcrtc->crtc.port);
1024
Russell King96f60e32012-08-15 13:59:49 +01001025 kfree(dcrtc);
1026}
1027
1028/*
1029 * The mode_config lock is held here, to prevent races between this
1030 * and a mode_set.
1031 */
1032static int armada_drm_crtc_page_flip(struct drm_crtc *crtc,
Daniel Vetter41292b1f2017-03-22 22:50:50 +01001033 struct drm_framebuffer *fb, struct drm_pending_vblank_event *event, uint32_t page_flip_flags,
1034 struct drm_modeset_acquire_ctx *ctx)
Russell King96f60e32012-08-15 13:59:49 +01001035{
1036 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell Kingeaa66272017-07-08 10:22:10 +01001037 struct armada_plane_work *work;
Russell King96f60e32012-08-15 13:59:49 +01001038 unsigned i;
1039 int ret;
1040
1041 /* We don't support changing the pixel format */
Ville Syrjälädbd4d572016-11-18 21:53:10 +02001042 if (fb->format != crtc->primary->fb->format)
Russell King96f60e32012-08-15 13:59:49 +01001043 return -EINVAL;
1044
Russell Kingeaa66272017-07-08 10:22:10 +01001045 work = armada_drm_crtc_alloc_plane_work(dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +01001046 if (!work)
1047 return -ENOMEM;
1048
1049 work->event = event;
Matt Roperf4510a22014-04-01 15:22:40 -07001050 work->old_fb = dcrtc->crtc.primary->fb;
Russell King96f60e32012-08-15 13:59:49 +01001051
1052 i = armada_drm_crtc_calc_fb(fb, crtc->x, crtc->y, work->regs,
1053 dcrtc->interlaced);
1054 armada_reg_queue_end(work->regs, i);
1055
1056 /*
Russell Kingc5488302014-10-11 23:53:35 +01001057 * Ensure that we hold a reference on the new framebuffer.
1058 * This has to match the behaviour in mode_set.
Russell King96f60e32012-08-15 13:59:49 +01001059 */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -06001060 drm_framebuffer_get(fb);
Russell King96f60e32012-08-15 13:59:49 +01001061
Russell Kingeaa66272017-07-08 10:22:10 +01001062 ret = armada_drm_plane_work_queue(dcrtc, work);
Russell King96f60e32012-08-15 13:59:49 +01001063 if (ret) {
Russell Kingc5488302014-10-11 23:53:35 +01001064 /* Undo our reference above */
Haneen Mohammeda52ff2a2017-09-20 12:57:16 -06001065 drm_framebuffer_put(fb);
Russell King96f60e32012-08-15 13:59:49 +01001066 kfree(work);
1067 return ret;
1068 }
1069
1070 /*
1071 * Don't take a reference on the new framebuffer;
1072 * drm_mode_page_flip_ioctl() has already grabbed a reference and
1073 * will _not_ drop that reference on successful return from this
1074 * function. Simply mark this new framebuffer as the current one.
1075 */
Matt Roperf4510a22014-04-01 15:22:40 -07001076 dcrtc->crtc.primary->fb = fb;
Russell King96f60e32012-08-15 13:59:49 +01001077
1078 /*
1079 * Finally, if the display is blanked, we won't receive an
1080 * interrupt, so complete it now.
1081 */
Russell King4b5dda82015-08-06 16:37:18 +01001082 if (dpms_blanked(dcrtc->dpms))
Russell Kingec6fb152016-07-25 15:16:11 +01001083 armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
Russell King96f60e32012-08-15 13:59:49 +01001084
1085 return 0;
1086}
1087
1088static int
1089armada_drm_crtc_set_property(struct drm_crtc *crtc,
1090 struct drm_property *property, uint64_t val)
1091{
1092 struct armada_private *priv = crtc->dev->dev_private;
1093 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
1094 bool update_csc = false;
1095
1096 if (property == priv->csc_yuv_prop) {
1097 dcrtc->csc_yuv_mode = val;
1098 update_csc = true;
1099 } else if (property == priv->csc_rgb_prop) {
1100 dcrtc->csc_rgb_mode = val;
1101 update_csc = true;
1102 }
1103
1104 if (update_csc) {
1105 uint32_t val;
1106
1107 val = dcrtc->spu_iopad_ctrl |
1108 armada_drm_crtc_calculate_csc(dcrtc);
1109 writel_relaxed(val, dcrtc->base + LCD_SPU_IOPAD_CONTROL);
1110 }
1111
1112 return 0;
1113}
1114
Shawn Guo5922a7d2017-02-07 17:16:18 +08001115/* These are called under the vbl_lock. */
1116static int armada_drm_crtc_enable_vblank(struct drm_crtc *crtc)
1117{
1118 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
1119
1120 armada_drm_crtc_enable_irq(dcrtc, VSYNC_IRQ_ENA);
1121 return 0;
1122}
1123
1124static void armada_drm_crtc_disable_vblank(struct drm_crtc *crtc)
1125{
1126 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
1127
1128 armada_drm_crtc_disable_irq(dcrtc, VSYNC_IRQ_ENA);
1129}
1130
Ville Syrjäläa02fb902015-12-15 12:20:59 +01001131static const struct drm_crtc_funcs armada_crtc_funcs = {
Russell King662af0d2013-05-19 10:55:17 +01001132 .cursor_set = armada_drm_crtc_cursor_set,
1133 .cursor_move = armada_drm_crtc_cursor_move,
Russell King96f60e32012-08-15 13:59:49 +01001134 .destroy = armada_drm_crtc_destroy,
1135 .set_config = drm_crtc_helper_set_config,
1136 .page_flip = armada_drm_crtc_page_flip,
1137 .set_property = armada_drm_crtc_set_property,
Shawn Guo5922a7d2017-02-07 17:16:18 +08001138 .enable_vblank = armada_drm_crtc_enable_vblank,
1139 .disable_vblank = armada_drm_crtc_disable_vblank,
Russell King96f60e32012-08-15 13:59:49 +01001140};
1141
Russell King950bc132017-07-08 10:22:37 +01001142static void armada_drm_primary_update_state(struct drm_plane_state *state,
1143 struct armada_regs *regs)
1144{
1145 struct armada_plane *dplane = drm_to_armada_plane(state->plane);
1146 struct armada_crtc *dcrtc = drm_to_armada_crtc(state->crtc);
1147 struct armada_framebuffer *dfb = drm_fb_to_armada_fb(state->fb);
1148 bool was_disabled;
1149 unsigned int idx = 0;
1150 u32 val;
1151
1152 val = CFG_GRA_FMT(dfb->fmt) | CFG_GRA_MOD(dfb->mod);
1153 if (dfb->fmt > CFG_420)
1154 val |= CFG_PALETTE_ENA;
1155 if (state->visible)
1156 val |= CFG_GRA_ENA;
1157 if (drm_rect_width(&state->src) >> 16 != drm_rect_width(&state->dst))
1158 val |= CFG_GRA_HSMOOTH;
1159
1160 was_disabled = !(dplane->state.ctrl0 & CFG_GRA_ENA);
1161 if (was_disabled)
1162 armada_reg_queue_mod(regs, idx,
1163 0, CFG_PDWN64x66, LCD_SPU_SRAM_PARA1);
1164
1165 dplane->state.ctrl0 = val;
1166 dplane->state.src_hw = (drm_rect_height(&state->src) & 0xffff0000) |
1167 drm_rect_width(&state->src) >> 16;
1168 dplane->state.dst_hw = drm_rect_height(&state->dst) << 16 |
1169 drm_rect_width(&state->dst);
1170 dplane->state.dst_yx = state->dst.y1 << 16 | state->dst.x1;
1171
1172 armada_drm_gra_plane_regs(regs + idx, &dfb->fb, &dplane->state,
1173 state->src.x1 >> 16, state->src.y1 >> 16,
1174 dcrtc->interlaced);
1175
1176 dplane->state.vsync_update = !was_disabled;
1177 dplane->state.changed = true;
1178}
1179
1180static int armada_drm_primary_update(struct drm_plane *plane,
1181 struct drm_crtc *crtc, struct drm_framebuffer *fb,
1182 int crtc_x, int crtc_y, unsigned int crtc_w, unsigned int crtc_h,
1183 uint32_t src_x, uint32_t src_y, uint32_t src_w, uint32_t src_h,
1184 struct drm_modeset_acquire_ctx *ctx)
1185{
1186 struct armada_plane *dplane = drm_to_armada_plane(plane);
1187 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
1188 struct armada_plane_work *work;
1189 struct drm_plane_state state = {
1190 .plane = plane,
1191 .crtc = crtc,
1192 .fb = fb,
1193 .src_x = src_x,
1194 .src_y = src_y,
1195 .src_w = src_w,
1196 .src_h = src_h,
1197 .crtc_x = crtc_x,
1198 .crtc_y = crtc_y,
1199 .crtc_w = crtc_w,
1200 .crtc_h = crtc_h,
1201 .rotation = DRM_MODE_ROTATE_0,
1202 };
1203 const struct drm_rect clip = {
1204 .x2 = crtc->mode.hdisplay,
1205 .y2 = crtc->mode.vdisplay,
1206 };
1207 int ret;
1208
Dave Airliebcd21a42018-01-05 09:43:46 +10001209 ret = drm_atomic_helper_check_plane_state(&state, crtc->state, &clip, 0,
1210 INT_MAX, true, false);
Russell King950bc132017-07-08 10:22:37 +01001211 if (ret)
1212 return ret;
1213
1214 work = &dplane->works[dplane->next_work];
1215 work->fn = armada_drm_crtc_complete_frame_work;
1216
1217 if (plane->fb != fb) {
1218 /*
1219 * Take a reference on the new framebuffer - we want to
1220 * hold on to it while the hardware is displaying it.
1221 */
1222 drm_framebuffer_reference(fb);
1223
1224 work->old_fb = plane->fb;
1225 } else {
1226 work->old_fb = NULL;
1227 }
1228
1229 armada_drm_primary_update_state(&state, work->regs);
1230
1231 if (!dplane->state.changed)
1232 return 0;
1233
1234 /* Wait for pending work to complete */
1235 if (armada_drm_plane_work_wait(dplane, HZ / 10) == 0)
1236 armada_drm_plane_work_cancel(dcrtc, dplane);
1237
1238 if (!dplane->state.vsync_update) {
1239 work->fn(dcrtc, work);
1240 if (work->old_fb)
1241 drm_framebuffer_unreference(work->old_fb);
1242 return 0;
1243 }
1244
1245 /* Queue it for update on the next interrupt if we are enabled */
1246 ret = armada_drm_plane_work_queue(dcrtc, work);
1247 if (ret) {
1248 work->fn(dcrtc, work);
1249 if (work->old_fb)
1250 drm_framebuffer_unreference(work->old_fb);
1251 }
1252
1253 dplane->next_work = !dplane->next_work;
1254
1255 return 0;
1256}
1257
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001258int armada_drm_plane_disable(struct drm_plane *plane,
1259 struct drm_modeset_acquire_ctx *ctx)
Russell King28b30432017-07-08 10:16:40 +01001260{
1261 struct armada_plane *dplane = drm_to_armada_plane(plane);
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001262 struct armada_crtc *dcrtc;
Russell King890ca8d2017-07-08 10:22:27 +01001263 struct armada_plane_work *work;
1264 unsigned int idx = 0;
Russell Kingd76dcc72017-07-08 10:16:47 +01001265 u32 sram_para1, enable_mask;
Russell King28b30432017-07-08 10:16:40 +01001266
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001267 if (!plane->crtc)
1268 return 0;
1269
Russell King28b30432017-07-08 10:16:40 +01001270 /*
Russell King890ca8d2017-07-08 10:22:27 +01001271 * Arrange to power down most RAMs and FIFOs if this is the primary
1272 * plane, otherwise just the YUV FIFOs for the overlay plane.
Russell King28b30432017-07-08 10:16:40 +01001273 */
Russell King28b30432017-07-08 10:16:40 +01001274 if (plane->type == DRM_PLANE_TYPE_PRIMARY) {
1275 sram_para1 = CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
1276 CFG_PDWN32x32 | CFG_PDWN64x66;
Russell Kingd76dcc72017-07-08 10:16:47 +01001277 enable_mask = CFG_GRA_ENA;
Russell King28b30432017-07-08 10:16:40 +01001278 } else {
Russell King28b30432017-07-08 10:16:40 +01001279 sram_para1 = CFG_PDWN16x66 | CFG_PDWN32x66;
Russell Kingd76dcc72017-07-08 10:16:47 +01001280 enable_mask = CFG_DMA_ENA;
Russell King28b30432017-07-08 10:16:40 +01001281 }
1282
Russell Kingd76dcc72017-07-08 10:16:47 +01001283 dplane->state.ctrl0 &= ~enable_mask;
1284
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001285 dcrtc = drm_to_armada_crtc(plane->crtc);
1286
Russell King890ca8d2017-07-08 10:22:27 +01001287 /*
1288 * Try to disable the plane and drop our ref on the framebuffer
1289 * at the next frame update. If we fail for any reason, disable
1290 * the plane immediately.
1291 */
1292 work = &dplane->works[dplane->next_work];
1293 work->fn = armada_drm_crtc_complete_disable_work;
1294 work->cancel = armada_drm_crtc_complete_disable_work;
1295 work->old_fb = plane->fb;
1296
1297 armada_reg_queue_mod(work->regs, idx,
1298 0, enable_mask, LCD_SPU_DMA_CTRL0);
1299 armada_reg_queue_mod(work->regs, idx,
1300 sram_para1, 0, LCD_SPU_SRAM_PARA1);
1301 armada_reg_queue_end(work->regs, idx);
1302
Russell King28b30432017-07-08 10:16:40 +01001303 /* Wait for any preceding work to complete, but don't wedge */
1304 if (WARN_ON(!armada_drm_plane_work_wait(dplane, HZ)))
1305 armada_drm_plane_work_cancel(dcrtc, dplane);
1306
Russell King890ca8d2017-07-08 10:22:27 +01001307 if (armada_drm_plane_work_queue(dcrtc, work)) {
1308 work->fn(dcrtc, work);
1309 if (work->old_fb)
1310 drm_framebuffer_unreference(work->old_fb);
1311 }
1312
1313 dplane->next_work = !dplane->next_work;
Russell King28b30432017-07-08 10:16:40 +01001314
Russell King28b30432017-07-08 10:16:40 +01001315 return 0;
1316}
1317
Russell Kingde323012015-07-15 18:11:24 +01001318static const struct drm_plane_funcs armada_primary_plane_funcs = {
Russell King950bc132017-07-08 10:22:37 +01001319 .update_plane = armada_drm_primary_update,
Russell Kingf1f1bffc2017-07-08 10:16:42 +01001320 .disable_plane = armada_drm_plane_disable,
Russell Kingde323012015-07-15 18:11:24 +01001321 .destroy = drm_primary_helper_destroy,
1322};
1323
Russell King5740d272015-07-15 18:11:25 +01001324int armada_drm_plane_init(struct armada_plane *plane)
1325{
Russell Kingd9241552017-07-08 10:22:25 +01001326 unsigned int i;
1327
1328 for (i = 0; i < ARRAY_SIZE(plane->works); i++)
1329 plane->works[i].plane = &plane->base;
1330
Russell King5740d272015-07-15 18:11:25 +01001331 init_waitqueue_head(&plane->frame_wait);
1332
1333 return 0;
1334}
1335
Arvind Yadavaaaf2f12017-07-01 15:30:15 +05301336static const struct drm_prop_enum_list armada_drm_csc_yuv_enum_list[] = {
Russell King96f60e32012-08-15 13:59:49 +01001337 { CSC_AUTO, "Auto" },
1338 { CSC_YUV_CCIR601, "CCIR601" },
1339 { CSC_YUV_CCIR709, "CCIR709" },
1340};
1341
Arvind Yadavaaaf2f12017-07-01 15:30:15 +05301342static const struct drm_prop_enum_list armada_drm_csc_rgb_enum_list[] = {
Russell King96f60e32012-08-15 13:59:49 +01001343 { CSC_AUTO, "Auto" },
1344 { CSC_RGB_COMPUTER, "Computer system" },
1345 { CSC_RGB_STUDIO, "Studio" },
1346};
1347
1348static int armada_drm_crtc_create_properties(struct drm_device *dev)
1349{
1350 struct armada_private *priv = dev->dev_private;
1351
1352 if (priv->csc_yuv_prop)
1353 return 0;
1354
1355 priv->csc_yuv_prop = drm_property_create_enum(dev, 0,
1356 "CSC_YUV", armada_drm_csc_yuv_enum_list,
1357 ARRAY_SIZE(armada_drm_csc_yuv_enum_list));
1358 priv->csc_rgb_prop = drm_property_create_enum(dev, 0,
1359 "CSC_RGB", armada_drm_csc_rgb_enum_list,
1360 ARRAY_SIZE(armada_drm_csc_rgb_enum_list));
1361
1362 if (!priv->csc_yuv_prop || !priv->csc_rgb_prop)
1363 return -ENOMEM;
1364
1365 return 0;
1366}
1367
Russell King0fb29702015-06-06 21:46:53 +01001368static int armada_drm_crtc_create(struct drm_device *drm, struct device *dev,
Russell King9611cb92014-06-15 11:21:23 +01001369 struct resource *res, int irq, const struct armada_variant *variant,
1370 struct device_node *port)
Russell King96f60e32012-08-15 13:59:49 +01001371{
Russell Kingd8c96082014-04-22 11:10:15 +01001372 struct armada_private *priv = drm->dev_private;
Russell King96f60e32012-08-15 13:59:49 +01001373 struct armada_crtc *dcrtc;
Russell Kingde323012015-07-15 18:11:24 +01001374 struct armada_plane *primary;
Russell King96f60e32012-08-15 13:59:49 +01001375 void __iomem *base;
1376 int ret;
1377
Russell Kingd8c96082014-04-22 11:10:15 +01001378 ret = armada_drm_crtc_create_properties(drm);
Russell King96f60e32012-08-15 13:59:49 +01001379 if (ret)
1380 return ret;
1381
Linus Torvaldsa7d7a142014-08-07 17:36:12 -07001382 base = devm_ioremap_resource(dev, res);
Jingoo Hanc9d53c02014-06-11 14:00:05 +09001383 if (IS_ERR(base))
1384 return PTR_ERR(base);
Russell King96f60e32012-08-15 13:59:49 +01001385
1386 dcrtc = kzalloc(sizeof(*dcrtc), GFP_KERNEL);
1387 if (!dcrtc) {
1388 DRM_ERROR("failed to allocate Armada crtc\n");
1389 return -ENOMEM;
1390 }
1391
Russell Kingd8c96082014-04-22 11:10:15 +01001392 if (dev != drm->dev)
1393 dev_set_drvdata(dev, dcrtc);
1394
Russell King42e62ba2014-04-22 15:24:03 +01001395 dcrtc->variant = variant;
Russell King96f60e32012-08-15 13:59:49 +01001396 dcrtc->base = base;
Russell Kingd8c96082014-04-22 11:10:15 +01001397 dcrtc->num = drm->mode_config.num_crtc;
Russell King96f60e32012-08-15 13:59:49 +01001398 dcrtc->clk = ERR_PTR(-EINVAL);
1399 dcrtc->csc_yuv_mode = CSC_AUTO;
1400 dcrtc->csc_rgb_mode = CSC_AUTO;
1401 dcrtc->cfg_dumb_ctrl = DUMB24_RGB888_0;
1402 dcrtc->spu_iopad_ctrl = CFG_VSCALE_LN_EN | CFG_IOPAD_DUMB24;
1403 spin_lock_init(&dcrtc->irq_lock);
1404 dcrtc->irq_ena = CLEAN_SPU_IRQ_ISR;
Russell King96f60e32012-08-15 13:59:49 +01001405
1406 /* Initialize some registers which we don't otherwise set */
1407 writel_relaxed(0x00000001, dcrtc->base + LCD_CFG_SCLK_DIV);
1408 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_BLANKCOLOR);
1409 writel_relaxed(dcrtc->spu_iopad_ctrl,
1410 dcrtc->base + LCD_SPU_IOPAD_CONTROL);
1411 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_SRAM_PARA0);
1412 writel_relaxed(CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
1413 CFG_PDWN32x32 | CFG_PDWN16x66 | CFG_PDWN32x66 |
1414 CFG_PDWN64x66, dcrtc->base + LCD_SPU_SRAM_PARA1);
1415 writel_relaxed(0x2032ff81, dcrtc->base + LCD_SPU_DMA_CTRL1);
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001416 writel_relaxed(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
1417 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
Russell King96f60e32012-08-15 13:59:49 +01001418
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001419 ret = devm_request_irq(dev, irq, armada_drm_irq, 0, "armada_drm_crtc",
1420 dcrtc);
Russell King33cd3c02017-12-08 12:16:22 +00001421 if (ret < 0)
1422 goto err_crtc;
Russell King96f60e32012-08-15 13:59:49 +01001423
Russell King42e62ba2014-04-22 15:24:03 +01001424 if (dcrtc->variant->init) {
Russell Kingd8c96082014-04-22 11:10:15 +01001425 ret = dcrtc->variant->init(dcrtc, dev);
Russell King33cd3c02017-12-08 12:16:22 +00001426 if (ret)
1427 goto err_crtc;
Russell King96f60e32012-08-15 13:59:49 +01001428 }
1429
1430 /* Ensure AXI pipeline is enabled */
1431 armada_updatel(CFG_ARBFAST_ENA, 0, dcrtc->base + LCD_SPU_DMA_CTRL0);
1432
1433 priv->dcrtc[dcrtc->num] = dcrtc;
1434
Russell King9611cb92014-06-15 11:21:23 +01001435 dcrtc->crtc.port = port;
Russell King1c914ce2015-07-15 18:11:24 +01001436
Russell Kingde323012015-07-15 18:11:24 +01001437 primary = kzalloc(sizeof(*primary), GFP_KERNEL);
Russell King33cd3c02017-12-08 12:16:22 +00001438 if (!primary) {
1439 ret = -ENOMEM;
1440 goto err_crtc;
1441 }
Russell King1c914ce2015-07-15 18:11:24 +01001442
Russell King5740d272015-07-15 18:11:25 +01001443 ret = armada_drm_plane_init(primary);
1444 if (ret) {
1445 kfree(primary);
Russell King33cd3c02017-12-08 12:16:22 +00001446 goto err_crtc;
Russell King5740d272015-07-15 18:11:25 +01001447 }
1448
Russell Kingde323012015-07-15 18:11:24 +01001449 ret = drm_universal_plane_init(drm, &primary->base, 0,
1450 &armada_primary_plane_funcs,
1451 armada_primary_formats,
1452 ARRAY_SIZE(armada_primary_formats),
Ben Widawskye6fc3b62017-07-23 20:46:38 -07001453 NULL,
Ville Syrjäläb0b3b792015-12-09 16:19:55 +02001454 DRM_PLANE_TYPE_PRIMARY, NULL);
Russell Kingde323012015-07-15 18:11:24 +01001455 if (ret) {
1456 kfree(primary);
Russell King33cd3c02017-12-08 12:16:22 +00001457 goto err_crtc;
Russell Kingde323012015-07-15 18:11:24 +01001458 }
1459
1460 ret = drm_crtc_init_with_planes(drm, &dcrtc->crtc, &primary->base, NULL,
Ville Syrjäläf9882872015-12-09 16:19:31 +02001461 &armada_crtc_funcs, NULL);
Russell King1c914ce2015-07-15 18:11:24 +01001462 if (ret)
1463 goto err_crtc_init;
1464
Russell King96f60e32012-08-15 13:59:49 +01001465 drm_crtc_helper_add(&dcrtc->crtc, &armada_crtc_helper_funcs);
1466
1467 drm_object_attach_property(&dcrtc->crtc.base, priv->csc_yuv_prop,
1468 dcrtc->csc_yuv_mode);
1469 drm_object_attach_property(&dcrtc->crtc.base, priv->csc_rgb_prop,
1470 dcrtc->csc_rgb_mode);
1471
Russell Kingd8c96082014-04-22 11:10:15 +01001472 return armada_overlay_plane_create(drm, 1 << dcrtc->num);
Russell King1c914ce2015-07-15 18:11:24 +01001473
1474err_crtc_init:
Russell Kingde323012015-07-15 18:11:24 +01001475 primary->base.funcs->destroy(&primary->base);
Russell King33cd3c02017-12-08 12:16:22 +00001476err_crtc:
1477 kfree(dcrtc);
1478
Russell King1c914ce2015-07-15 18:11:24 +01001479 return ret;
Russell King96f60e32012-08-15 13:59:49 +01001480}
Russell Kingd8c96082014-04-22 11:10:15 +01001481
1482static int
1483armada_lcd_bind(struct device *dev, struct device *master, void *data)
1484{
1485 struct platform_device *pdev = to_platform_device(dev);
1486 struct drm_device *drm = data;
1487 struct resource *res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1488 int irq = platform_get_irq(pdev, 0);
1489 const struct armada_variant *variant;
Russell King9611cb92014-06-15 11:21:23 +01001490 struct device_node *port = NULL;
Russell Kingd8c96082014-04-22 11:10:15 +01001491
1492 if (irq < 0)
1493 return irq;
1494
1495 if (!dev->of_node) {
1496 const struct platform_device_id *id;
1497
1498 id = platform_get_device_id(pdev);
1499 if (!id)
1500 return -ENXIO;
1501
1502 variant = (const struct armada_variant *)id->driver_data;
1503 } else {
1504 const struct of_device_id *match;
Russell King9611cb92014-06-15 11:21:23 +01001505 struct device_node *np, *parent = dev->of_node;
Russell Kingd8c96082014-04-22 11:10:15 +01001506
1507 match = of_match_device(dev->driver->of_match_table, dev);
1508 if (!match)
1509 return -ENXIO;
1510
Russell King9611cb92014-06-15 11:21:23 +01001511 np = of_get_child_by_name(parent, "ports");
1512 if (np)
1513 parent = np;
1514 port = of_get_child_by_name(parent, "port");
1515 of_node_put(np);
1516 if (!port) {
Rob Herring4bf99142017-07-18 16:43:04 -05001517 dev_err(dev, "no port node found in %pOF\n", parent);
Russell King9611cb92014-06-15 11:21:23 +01001518 return -ENXIO;
1519 }
1520
Russell Kingd8c96082014-04-22 11:10:15 +01001521 variant = match->data;
1522 }
1523
Russell King9611cb92014-06-15 11:21:23 +01001524 return armada_drm_crtc_create(drm, dev, res, irq, variant, port);
Russell Kingd8c96082014-04-22 11:10:15 +01001525}
1526
1527static void
1528armada_lcd_unbind(struct device *dev, struct device *master, void *data)
1529{
1530 struct armada_crtc *dcrtc = dev_get_drvdata(dev);
1531
1532 armada_drm_crtc_destroy(&dcrtc->crtc);
1533}
1534
1535static const struct component_ops armada_lcd_ops = {
1536 .bind = armada_lcd_bind,
1537 .unbind = armada_lcd_unbind,
1538};
1539
1540static int armada_lcd_probe(struct platform_device *pdev)
1541{
1542 return component_add(&pdev->dev, &armada_lcd_ops);
1543}
1544
1545static int armada_lcd_remove(struct platform_device *pdev)
1546{
1547 component_del(&pdev->dev, &armada_lcd_ops);
1548 return 0;
1549}
1550
Arvind Yadav85909712017-06-20 10:44:33 +05301551static const struct of_device_id armada_lcd_of_match[] = {
Russell Kingd8c96082014-04-22 11:10:15 +01001552 {
1553 .compatible = "marvell,dove-lcd",
1554 .data = &armada510_ops,
1555 },
1556 {}
1557};
1558MODULE_DEVICE_TABLE(of, armada_lcd_of_match);
1559
1560static const struct platform_device_id armada_lcd_platform_ids[] = {
1561 {
1562 .name = "armada-lcd",
1563 .driver_data = (unsigned long)&armada510_ops,
1564 }, {
1565 .name = "armada-510-lcd",
1566 .driver_data = (unsigned long)&armada510_ops,
1567 },
1568 { },
1569};
1570MODULE_DEVICE_TABLE(platform, armada_lcd_platform_ids);
1571
1572struct platform_driver armada_lcd_platform_driver = {
1573 .probe = armada_lcd_probe,
1574 .remove = armada_lcd_remove,
1575 .driver = {
1576 .name = "armada-lcd",
1577 .owner = THIS_MODULE,
1578 .of_match_table = armada_lcd_of_match,
1579 },
1580 .id_table = armada_lcd_platform_ids,
1581};