blob: 650d162e374bbd1633782b50055847814014186f [file] [log] [blame]
Thomas Gleixnercaab2772019-06-03 07:44:50 +02001// SPDX-License-Identifier: GPL-2.0-only
Rob Clark16ea9752013-01-08 15:04:28 -06002/*
3 * Copyright (C) 2012 Texas Instruments
4 * Author: Rob Clark <robdclark@gmail.com>
Rob Clark16ea9752013-01-08 15:04:28 -06005 */
6
Sean Paulce2f2c32016-09-21 06:14:53 -07007#include <drm/drm_atomic.h>
Jyri Sarha305198d2016-04-07 15:05:16 +03008#include <drm/drm_atomic_helper.h>
Sean Paulce2f2c32016-09-21 06:14:53 -07009#include <drm/drm_crtc.h>
10#include <drm/drm_flip_work.h>
11#include <drm/drm_plane_helper.h>
Jyri Sarha4e910c72016-09-06 22:55:33 +030012#include <linux/workqueue.h>
Bartosz Golaszewski93452352016-10-31 15:19:26 +010013#include <linux/completion.h>
14#include <linux/dma-mapping.h>
Rob Herring86418f92017-03-22 08:26:06 -050015#include <linux/of_graph.h>
Jyri Sarhace99f722017-10-12 12:19:46 +030016#include <linux/math64.h>
Rob Clark16ea9752013-01-08 15:04:28 -060017
18#include "tilcdc_drv.h"
19#include "tilcdc_regs.h"
20
Bartosz Golaszewski93452352016-10-31 15:19:26 +010021#define TILCDC_VBLANK_SAFETY_THRESHOLD_US 1000
Jyri Sarha55e165c2016-11-15 23:37:24 +020022#define TILCDC_PALETTE_SIZE 32
23#define TILCDC_PALETTE_FIRST_ENTRY 0x4000
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +020024
Rob Clark16ea9752013-01-08 15:04:28 -060025struct tilcdc_crtc {
26 struct drm_crtc base;
27
Jyri Sarha47f571c2016-04-07 15:04:18 +030028 struct drm_plane primary;
Rob Clark16ea9752013-01-08 15:04:28 -060029 const struct tilcdc_panel_info *info;
Rob Clark16ea9752013-01-08 15:04:28 -060030 struct drm_pending_vblank_event *event;
Jyri Sarha2d53a182016-10-25 12:27:31 +030031 struct mutex enable_lock;
Jyri Sarha47bfd6c2016-06-22 16:27:54 +030032 bool enabled;
Jyri Sarha2d53a182016-10-25 12:27:31 +030033 bool shutdown;
Rob Clark16ea9752013-01-08 15:04:28 -060034 wait_queue_head_t frame_done_wq;
35 bool frame_done;
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +020036 spinlock_t irq_lock;
37
Jyri Sarha642e5162016-09-06 16:19:54 +030038 unsigned int lcd_fck_rate;
39
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +020040 ktime_t last_vblank;
Jyri Sarhace99f722017-10-12 12:19:46 +030041 unsigned int hvtotal_us;
Rob Clark16ea9752013-01-08 15:04:28 -060042
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +020043 struct drm_framebuffer *next_fb;
Rob Clark16ea9752013-01-08 15:04:28 -060044
Jyri Sarha103cd8b2015-02-10 14:13:23 +020045 /* Only set if an external encoder is connected */
46 bool simulate_vesa_sync;
Jyri Sarha5895d082016-01-08 14:33:09 +020047
48 int sync_lost_count;
49 bool frame_intact;
Jyri Sarha13b3d722016-04-06 14:02:38 +030050 struct work_struct recover_work;
Bartosz Golaszewski93452352016-10-31 15:19:26 +010051
52 dma_addr_t palette_dma_handle;
Jyri Sarha55e165c2016-11-15 23:37:24 +020053 u16 *palette_base;
Bartosz Golaszewski93452352016-10-31 15:19:26 +010054 struct completion palette_loaded;
Rob Clark16ea9752013-01-08 15:04:28 -060055};
56#define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
57
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +030058static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
Rob Clark16ea9752013-01-08 15:04:28 -060059{
Rob Clark16ea9752013-01-08 15:04:28 -060060 struct drm_device *dev = crtc->dev;
Daniel Schultz4c268d62016-10-28 13:52:41 +020061 struct tilcdc_drm_private *priv = dev->dev_private;
Rob Clark16ea9752013-01-08 15:04:28 -060062 struct drm_gem_cma_object *gem;
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +030063 dma_addr_t start, end;
Jyri Sarha7eb9f062016-08-26 15:10:14 +030064 u64 dma_base_and_ceiling;
Rob Clark16ea9752013-01-08 15:04:28 -060065
Rob Clark16ea9752013-01-08 15:04:28 -060066 gem = drm_fb_cma_get_gem_obj(fb, 0);
67
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +030068 start = gem->paddr + fb->offsets[0] +
69 crtc->y * fb->pitches[0] +
Ville Syrjälä353c8592016-12-14 23:30:57 +020070 crtc->x * fb->format->cpp[0];
Rob Clark16ea9752013-01-08 15:04:28 -060071
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +030072 end = start + (crtc->mode.vdisplay * fb->pitches[0]);
Rob Clark16ea9752013-01-08 15:04:28 -060073
Jyri Sarha7eb9f062016-08-26 15:10:14 +030074 /* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
75 * with a single insruction, if available. This should make it more
76 * unlikely that LCDC would fetch the DMA addresses in the middle of
77 * an update.
78 */
Daniel Schultz4c268d62016-10-28 13:52:41 +020079 if (priv->rev == 1)
80 end -= 1;
81
82 dma_base_and_ceiling = (u64)end << 32 | start;
Jyri Sarha7eb9f062016-08-26 15:10:14 +030083 tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
Rob Clark16ea9752013-01-08 15:04:28 -060084}
85
Bartosz Golaszewski93452352016-10-31 15:19:26 +010086/*
Jyri Sarha55e165c2016-11-15 23:37:24 +020087 * The driver currently only supports only true color formats. For
88 * true color the palette block is bypassed, but a 32 byte palette
89 * should still be loaded. The first 16-bit entry must be 0x4000 while
90 * all other entries must be zeroed.
Bartosz Golaszewski93452352016-10-31 15:19:26 +010091 */
92static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
93{
Jyri Sarha55e165c2016-11-15 23:37:24 +020094 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
95 struct drm_device *dev = crtc->dev;
96 struct tilcdc_drm_private *priv = dev->dev_private;
Jyri Sarhae59f5af2016-11-17 18:46:16 +020097 int ret;
Bartosz Golaszewski93452352016-10-31 15:19:26 +010098
Jyri Sarha274c34d2016-11-15 23:57:42 +020099 reinit_completion(&tilcdc_crtc->palette_loaded);
Bartosz Golaszewski93452352016-10-31 15:19:26 +0100100
101 /* Tell the LCDC where the palette is located. */
102 tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
103 tilcdc_crtc->palette_dma_handle);
104 tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
Jyri Sarha55e165c2016-11-15 23:37:24 +0200105 (u32) tilcdc_crtc->palette_dma_handle +
106 TILCDC_PALETTE_SIZE - 1);
Bartosz Golaszewski93452352016-10-31 15:19:26 +0100107
Jyri Sarha55e165c2016-11-15 23:37:24 +0200108 /* Set dma load mode for palette loading only. */
109 tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
110 LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
111 LCDC_PALETTE_LOAD_MODE_MASK);
Bartosz Golaszewski93452352016-10-31 15:19:26 +0100112
Jyri Sarha55e165c2016-11-15 23:37:24 +0200113 /* Enable DMA Palette Loaded Interrupt */
114 if (priv->rev == 1)
115 tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
116 else
117 tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
118
119 /* Enable LCDC DMA and wait for palette to be loaded. */
120 tilcdc_clear_irqstatus(dev, 0xffffffff);
Bartosz Golaszewski93452352016-10-31 15:19:26 +0100121 tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
122
Jyri Sarhae59f5af2016-11-17 18:46:16 +0200123 ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
124 msecs_to_jiffies(50));
125 if (ret == 0)
126 dev_err(dev->dev, "%s: Palette loading timeout", __func__);
Bartosz Golaszewski93452352016-10-31 15:19:26 +0100127
Jyri Sarha55e165c2016-11-15 23:37:24 +0200128 /* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
Bartosz Golaszewski93452352016-10-31 15:19:26 +0100129 tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
Jyri Sarha55e165c2016-11-15 23:37:24 +0200130 if (priv->rev == 1)
131 tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
132 else
133 tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
Bartosz Golaszewski93452352016-10-31 15:19:26 +0100134}
135
Jyri Sarhaafaf8332016-06-21 16:00:44 +0300136static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
137{
138 struct tilcdc_drm_private *priv = dev->dev_private;
139
140 tilcdc_clear_irqstatus(dev, 0xffffffff);
141
142 if (priv->rev == 1) {
143 tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
Jyri Sarha36725832016-11-21 18:30:19 +0200144 LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
Jyri Sarhaafaf8332016-06-21 16:00:44 +0300145 LCDC_V1_UNDERFLOW_INT_ENA);
Karl Beldan8d6c3f72016-08-23 12:57:00 +0000146 tilcdc_set(dev, LCDC_DMA_CTRL_REG,
147 LCDC_V1_END_OF_FRAME_INT_ENA);
Jyri Sarhaafaf8332016-06-21 16:00:44 +0300148 } else {
149 tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
150 LCDC_V2_UNDERFLOW_INT_ENA |
151 LCDC_V2_END_OF_FRAME0_INT_ENA |
152 LCDC_FRAME_DONE | LCDC_SYNC_LOST);
153 }
154}
155
156static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
157{
158 struct tilcdc_drm_private *priv = dev->dev_private;
159
160 /* disable irqs that we might have enabled: */
161 if (priv->rev == 1) {
162 tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
Jyri Sarha36725832016-11-21 18:30:19 +0200163 LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
Jyri Sarhaafaf8332016-06-21 16:00:44 +0300164 LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
165 tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
166 LCDC_V1_END_OF_FRAME_INT_ENA);
167 } else {
168 tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
169 LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
170 LCDC_V2_END_OF_FRAME0_INT_ENA |
171 LCDC_FRAME_DONE | LCDC_SYNC_LOST);
172 }
173}
174
Tomi Valkeinen2efec4f2015-10-20 09:37:27 +0300175static void reset(struct drm_crtc *crtc)
Rob Clark16ea9752013-01-08 15:04:28 -0600176{
177 struct drm_device *dev = crtc->dev;
178 struct tilcdc_drm_private *priv = dev->dev_private;
179
Tomi Valkeinen2efec4f2015-10-20 09:37:27 +0300180 if (priv->rev != 2)
181 return;
182
183 tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
184 usleep_range(250, 1000);
185 tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
186}
187
Jyri Sarha75d7f272016-11-24 23:25:08 +0200188/*
189 * Calculate the percentage difference between the requested pixel clock rate
190 * and the effective rate resulting from calculating the clock divider value.
191 */
192static unsigned int tilcdc_pclk_diff(unsigned long rate,
193 unsigned long real_rate)
194{
195 int r = rate / 100, rr = real_rate / 100;
196
197 return (unsigned int)(abs(((rr - r) * 100) / r));
198}
199
200static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
201{
202 struct drm_device *dev = crtc->dev;
203 struct tilcdc_drm_private *priv = dev->dev_private;
204 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
205 unsigned long clk_rate, real_rate, req_rate;
206 unsigned int clkdiv;
207 int ret;
208
209 clkdiv = 2; /* first try using a standard divider of 2 */
210
211 /* mode.clock is in KHz, set_rate wants parameter in Hz */
212 req_rate = crtc->mode.clock * 1000;
213
214 ret = clk_set_rate(priv->clk, req_rate * clkdiv);
215 clk_rate = clk_get_rate(priv->clk);
David Lechnera88ad3d2018-03-14 17:58:45 -0500216 if (ret < 0 || tilcdc_pclk_diff(req_rate, clk_rate) > 5) {
Jyri Sarha75d7f272016-11-24 23:25:08 +0200217 /*
218 * If we fail to set the clock rate (some architectures don't
219 * use the common clock framework yet and may not implement
220 * all the clk API calls for every clock), try the next best
221 * thing: adjusting the clock divider, unless clk_get_rate()
222 * failed as well.
223 */
224 if (!clk_rate) {
225 /* Nothing more we can do. Just bail out. */
226 dev_err(dev->dev,
227 "failed to set the pixel clock - unable to read current lcdc clock rate\n");
228 return;
229 }
230
231 clkdiv = DIV_ROUND_CLOSEST(clk_rate, req_rate);
232
233 /*
234 * Emit a warning if the real clock rate resulting from the
235 * calculated divider differs much from the requested rate.
236 *
237 * 5% is an arbitrary value - LCDs are usually quite tolerant
238 * about pixel clock rates.
239 */
240 real_rate = clkdiv * req_rate;
241
242 if (tilcdc_pclk_diff(clk_rate, real_rate) > 5) {
243 dev_warn(dev->dev,
244 "effective pixel clock rate (%luHz) differs from the calculated rate (%luHz)\n",
245 clk_rate, real_rate);
246 }
247 }
248
249 tilcdc_crtc->lcd_fck_rate = clk_rate;
250
251 DBG("lcd_clk=%u, mode clock=%d, div=%u",
252 tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
253
254 /* Configure the LCD clock divisor. */
255 tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
256 LCDC_RASTER_MODE);
257
258 if (priv->rev == 2)
259 tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
260 LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
261 LCDC_V2_CORE_CLK_EN);
262}
263
Xiongwei Song584d4ed2017-12-02 19:24:22 +0800264static uint tilcdc_mode_hvtotal(const struct drm_display_mode *mode)
Jyri Sarhace99f722017-10-12 12:19:46 +0300265{
266 return (uint) div_u64(1000llu * mode->htotal * mode->vtotal,
267 mode->clock);
268}
269
Jyri Sarha75d7f272016-11-24 23:25:08 +0200270static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
271{
272 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
273 struct drm_device *dev = crtc->dev;
274 struct tilcdc_drm_private *priv = dev->dev_private;
275 const struct tilcdc_panel_info *info = tilcdc_crtc->info;
276 uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
277 struct drm_display_mode *mode = &crtc->state->adjusted_mode;
278 struct drm_framebuffer *fb = crtc->primary->state->fb;
279
280 if (WARN_ON(!info))
281 return;
282
283 if (WARN_ON(!fb))
284 return;
285
286 /* Configure the Burst Size and fifo threshold of DMA: */
287 reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
288 switch (info->dma_burst_sz) {
289 case 1:
290 reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
291 break;
292 case 2:
293 reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
294 break;
295 case 4:
296 reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
297 break;
298 case 8:
299 reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
300 break;
301 case 16:
302 reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
303 break;
304 default:
305 dev_err(dev->dev, "invalid burst size\n");
306 return;
307 }
308 reg |= (info->fifo_th << 8);
309 tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
310
311 /* Configure timings: */
312 hbp = mode->htotal - mode->hsync_end;
313 hfp = mode->hsync_start - mode->hdisplay;
314 hsw = mode->hsync_end - mode->hsync_start;
315 vbp = mode->vtotal - mode->vsync_end;
316 vfp = mode->vsync_start - mode->vdisplay;
317 vsw = mode->vsync_end - mode->vsync_start;
318
319 DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
320 mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
321
322 /* Set AC Bias Period and Number of Transitions per Interrupt: */
323 reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
324 reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
325 LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
326
327 /*
328 * subtract one from hfp, hbp, hsw because the hardware uses
329 * a value of 0 as 1
330 */
331 if (priv->rev == 2) {
332 /* clear bits we're going to set */
333 reg &= ~0x78000033;
334 reg |= ((hfp-1) & 0x300) >> 8;
335 reg |= ((hbp-1) & 0x300) >> 4;
336 reg |= ((hsw-1) & 0x3c0) << 21;
337 }
338 tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
339
340 reg = (((mode->hdisplay >> 4) - 1) << 4) |
341 (((hbp-1) & 0xff) << 24) |
342 (((hfp-1) & 0xff) << 16) |
343 (((hsw-1) & 0x3f) << 10);
344 if (priv->rev == 2)
345 reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
346 tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
347
348 reg = ((mode->vdisplay - 1) & 0x3ff) |
349 ((vbp & 0xff) << 24) |
350 ((vfp & 0xff) << 16) |
351 (((vsw-1) & 0x3f) << 10);
352 tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
353
354 /*
355 * be sure to set Bit 10 for the V2 LCDC controller,
356 * otherwise limited to 1024 pixels width, stopping
357 * 1920x1080 being supported.
358 */
359 if (priv->rev == 2) {
360 if ((mode->vdisplay - 1) & 0x400) {
361 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
362 LCDC_LPP_B10);
363 } else {
364 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
365 LCDC_LPP_B10);
366 }
367 }
368
369 /* Configure display type: */
370 reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
371 ~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
372 LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
373 0x000ff000 /* Palette Loading Delay bits */);
374 reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
375 if (info->tft_alt_mode)
376 reg |= LCDC_TFT_ALT_ENABLE;
377 if (priv->rev == 2) {
Ville Syrjälä438b74a2016-12-14 23:32:55 +0200378 switch (fb->format->format) {
Jyri Sarha75d7f272016-11-24 23:25:08 +0200379 case DRM_FORMAT_BGR565:
380 case DRM_FORMAT_RGB565:
381 break;
382 case DRM_FORMAT_XBGR8888:
383 case DRM_FORMAT_XRGB8888:
384 reg |= LCDC_V2_TFT_24BPP_UNPACK;
385 /* fallthrough */
386 case DRM_FORMAT_BGR888:
387 case DRM_FORMAT_RGB888:
388 reg |= LCDC_V2_TFT_24BPP_MODE;
389 break;
390 default:
391 dev_err(dev->dev, "invalid pixel format\n");
392 return;
393 }
394 }
395 reg |= info->fdd < 12;
396 tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
397
398 if (info->invert_pxl_clk)
399 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
400 else
401 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
402
403 if (info->sync_ctrl)
404 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
405 else
406 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
407
408 if (info->sync_edge)
409 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
410 else
411 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
412
413 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
414 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
415 else
416 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
417
418 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
419 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
420 else
421 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
422
423 if (info->raster_order)
424 tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
425 else
426 tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
427
428 tilcdc_crtc_set_clk(crtc);
429
430 tilcdc_crtc_load_palette(crtc);
431
432 set_scanout(crtc, fb);
433
Jyri Sarha75d7f272016-11-24 23:25:08 +0200434 crtc->hwmode = crtc->state->adjusted_mode;
Jyri Sarhace99f722017-10-12 12:19:46 +0300435
436 tilcdc_crtc->hvtotal_us =
437 tilcdc_mode_hvtotal(&crtc->hwmode);
Jyri Sarha75d7f272016-11-24 23:25:08 +0200438}
439
Jyri Sarha47bfd6c2016-06-22 16:27:54 +0300440static void tilcdc_crtc_enable(struct drm_crtc *crtc)
Tomi Valkeinen2efec4f2015-10-20 09:37:27 +0300441{
442 struct drm_device *dev = crtc->dev;
Jyri Sarha47bfd6c2016-06-22 16:27:54 +0300443 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
Jyri Sarha11abbc92017-03-01 10:30:28 +0200444 unsigned long flags;
Jyri Sarha47bfd6c2016-06-22 16:27:54 +0300445
Jyri Sarha2d53a182016-10-25 12:27:31 +0300446 mutex_lock(&tilcdc_crtc->enable_lock);
447 if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
448 mutex_unlock(&tilcdc_crtc->enable_lock);
Jyri Sarha47bfd6c2016-06-22 16:27:54 +0300449 return;
Jyri Sarha2d53a182016-10-25 12:27:31 +0300450 }
Jyri Sarha47bfd6c2016-06-22 16:27:54 +0300451
452 pm_runtime_get_sync(dev->dev);
Tomi Valkeinen2efec4f2015-10-20 09:37:27 +0300453
454 reset(crtc);
Rob Clark16ea9752013-01-08 15:04:28 -0600455
Jyri Sarha75d7f272016-11-24 23:25:08 +0200456 tilcdc_crtc_set_mode(crtc);
457
Jyri Sarhaafaf8332016-06-21 16:00:44 +0300458 tilcdc_crtc_enable_irqs(dev);
459
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300460 tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
Jyri Sarhaf13e0882016-11-19 18:00:32 +0200461 tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
462 LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
463 LCDC_PALETTE_LOAD_MODE_MASK);
Jyri Sarha11abbc92017-03-01 10:30:28 +0200464
465 /* There is no real chance for a race here as the time stamp
466 * is taken before the raster DMA is started. The spin-lock is
467 * taken to have a memory barrier after taking the time-stamp
468 * and to avoid a context switch between taking the stamp and
469 * enabling the raster.
470 */
471 spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
472 tilcdc_crtc->last_vblank = ktime_get();
Rob Clark16ea9752013-01-08 15:04:28 -0600473 tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
Jyri Sarha11abbc92017-03-01 10:30:28 +0200474 spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
Jyri Sarhad85f850e2016-06-15 11:16:23 +0300475
476 drm_crtc_vblank_on(crtc);
Jyri Sarha47bfd6c2016-06-22 16:27:54 +0300477
478 tilcdc_crtc->enabled = true;
Jyri Sarha2d53a182016-10-25 12:27:31 +0300479 mutex_unlock(&tilcdc_crtc->enable_lock);
Rob Clark16ea9752013-01-08 15:04:28 -0600480}
481
Laurent Pinchart0b20a0f2017-06-30 12:36:44 +0300482static void tilcdc_crtc_atomic_enable(struct drm_crtc *crtc,
483 struct drm_crtc_state *old_state)
484{
485 tilcdc_crtc_enable(crtc);
486}
487
Jyri Sarha2d53a182016-10-25 12:27:31 +0300488static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
Rob Clark16ea9752013-01-08 15:04:28 -0600489{
Jyri Sarha2d5be882016-04-07 20:20:23 +0300490 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
Rob Clark16ea9752013-01-08 15:04:28 -0600491 struct drm_device *dev = crtc->dev;
Jyri Sarha75d7f272016-11-24 23:25:08 +0200492 int ret;
Rob Clark16ea9752013-01-08 15:04:28 -0600493
Jyri Sarha2d53a182016-10-25 12:27:31 +0300494 mutex_lock(&tilcdc_crtc->enable_lock);
495 if (shutdown)
496 tilcdc_crtc->shutdown = true;
497 if (!tilcdc_crtc->enabled) {
498 mutex_unlock(&tilcdc_crtc->enable_lock);
Jyri Sarha47bfd6c2016-06-22 16:27:54 +0300499 return;
Jyri Sarha2d53a182016-10-25 12:27:31 +0300500 }
Jyri Sarha2d5be882016-04-07 20:20:23 +0300501 tilcdc_crtc->frame_done = false;
Rob Clark16ea9752013-01-08 15:04:28 -0600502 tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
Jyri Sarha2d5be882016-04-07 20:20:23 +0300503
504 /*
Jyri Sarha75d7f272016-11-24 23:25:08 +0200505 * Wait for framedone irq which will still come before putting
506 * things to sleep..
Jyri Sarha2d5be882016-04-07 20:20:23 +0300507 */
Jyri Sarha75d7f272016-11-24 23:25:08 +0200508 ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
509 tilcdc_crtc->frame_done,
510 msecs_to_jiffies(500));
511 if (ret == 0)
512 dev_err(dev->dev, "%s: timeout waiting for framedone\n",
513 __func__);
Jyri Sarhad85f850e2016-06-15 11:16:23 +0300514
515 drm_crtc_vblank_off(crtc);
Jyri Sarhaafaf8332016-06-21 16:00:44 +0300516
517 tilcdc_crtc_disable_irqs(dev);
Jyri Sarha47bfd6c2016-06-22 16:27:54 +0300518
519 pm_runtime_put_sync(dev->dev);
520
Jyri Sarha47bfd6c2016-06-22 16:27:54 +0300521 tilcdc_crtc->enabled = false;
Jyri Sarha2d53a182016-10-25 12:27:31 +0300522 mutex_unlock(&tilcdc_crtc->enable_lock);
Jyri Sarha47bfd6c2016-06-22 16:27:54 +0300523}
524
Jyri Sarha9e79e062016-10-18 23:23:27 +0300525static void tilcdc_crtc_disable(struct drm_crtc *crtc)
526{
Jyri Sarha2d53a182016-10-25 12:27:31 +0300527 tilcdc_crtc_off(crtc, false);
528}
529
Laurent Pinchart64581712017-06-30 12:36:45 +0300530static void tilcdc_crtc_atomic_disable(struct drm_crtc *crtc,
531 struct drm_crtc_state *old_state)
532{
533 tilcdc_crtc_disable(crtc);
534}
535
Jyri Sarha2d53a182016-10-25 12:27:31 +0300536void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
537{
538 tilcdc_crtc_off(crtc, true);
Jyri Sarha9e79e062016-10-18 23:23:27 +0300539}
540
Jyri Sarha47bfd6c2016-06-22 16:27:54 +0300541static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
542{
543 return crtc->state && crtc->state->enable && crtc->state->active;
Rob Clark16ea9752013-01-08 15:04:28 -0600544}
545
Jyri Sarha13b3d722016-04-06 14:02:38 +0300546static void tilcdc_crtc_recover_work(struct work_struct *work)
547{
548 struct tilcdc_crtc *tilcdc_crtc =
549 container_of(work, struct tilcdc_crtc, recover_work);
550 struct drm_crtc *crtc = &tilcdc_crtc->base;
551
552 dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
553
Daniel Vetter33e5b662017-03-22 22:50:47 +0100554 drm_modeset_lock(&crtc->mutex, NULL);
Jyri Sarha13b3d722016-04-06 14:02:38 +0300555
556 if (!tilcdc_crtc_is_on(crtc))
557 goto out;
558
559 tilcdc_crtc_disable(crtc);
560 tilcdc_crtc_enable(crtc);
561out:
Daniel Vetter33e5b662017-03-22 22:50:47 +0100562 drm_modeset_unlock(&crtc->mutex);
Jyri Sarha13b3d722016-04-06 14:02:38 +0300563}
564
Rob Clark16ea9752013-01-08 15:04:28 -0600565static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
566{
Jyri Sarha4e910c72016-09-06 22:55:33 +0300567 struct tilcdc_drm_private *priv = crtc->dev->dev_private;
Rob Clark16ea9752013-01-08 15:04:28 -0600568
Jyri Sarhaba3fd952017-05-29 22:09:44 +0300569 tilcdc_crtc_shutdown(crtc);
Rob Clark16ea9752013-01-08 15:04:28 -0600570
Jyri Sarha4e910c72016-09-06 22:55:33 +0300571 flush_workqueue(priv->wq);
Rob Clark16ea9752013-01-08 15:04:28 -0600572
Jyri Sarhad66284fb2015-05-27 11:58:37 +0300573 of_node_put(crtc->port);
Rob Clark16ea9752013-01-08 15:04:28 -0600574 drm_crtc_cleanup(crtc);
Rob Clark16ea9752013-01-08 15:04:28 -0600575}
576
Jyri Sarhae0e344e2016-06-22 17:21:06 +0300577int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
Rob Clark16ea9752013-01-08 15:04:28 -0600578 struct drm_framebuffer *fb,
Jyri Sarhae0e344e2016-06-22 17:21:06 +0300579 struct drm_pending_vblank_event *event)
Rob Clark16ea9752013-01-08 15:04:28 -0600580{
581 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
582 struct drm_device *dev = crtc->dev;
Tomi Valkeinen6f206e92014-02-07 17:37:07 +0000583
Rob Clark16ea9752013-01-08 15:04:28 -0600584 if (tilcdc_crtc->event) {
585 dev_err(dev->dev, "already pending page flip!\n");
586 return -EBUSY;
587 }
588
Jyri Sarha11abbc92017-03-01 10:30:28 +0200589 tilcdc_crtc->event = event;
Tomi Valkeinen65734a22015-10-19 12:30:03 +0300590
Jyri Sarha11abbc92017-03-01 10:30:28 +0200591 mutex_lock(&tilcdc_crtc->enable_lock);
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300592
Jyri Sarha11abbc92017-03-01 10:30:28 +0200593 if (tilcdc_crtc->enabled) {
594 unsigned long flags;
Jyri Sarha0a1fe1b2016-06-13 09:53:36 +0300595 ktime_t next_vblank;
596 s64 tdiff;
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300597
Jyri Sarha11abbc92017-03-01 10:30:28 +0200598 spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200599
Jyri Sarha11abbc92017-03-01 10:30:28 +0200600 next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
Jyri Sarhace99f722017-10-12 12:19:46 +0300601 tilcdc_crtc->hvtotal_us);
Jyri Sarha0a1fe1b2016-06-13 09:53:36 +0300602 tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
603
604 if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
605 tilcdc_crtc->next_fb = fb;
Jyri Sarha11abbc92017-03-01 10:30:28 +0200606 else
607 set_scanout(crtc, fb);
608
609 spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
Jyri Sarha0a1fe1b2016-06-13 09:53:36 +0300610 }
611
Jyri Sarha11abbc92017-03-01 10:30:28 +0200612 mutex_unlock(&tilcdc_crtc->enable_lock);
Rob Clark16ea9752013-01-08 15:04:28 -0600613
614 return 0;
615}
616
Rob Clark16ea9752013-01-08 15:04:28 -0600617static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
618 const struct drm_display_mode *mode,
619 struct drm_display_mode *adjusted_mode)
620{
Jyri Sarha103cd8b2015-02-10 14:13:23 +0200621 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
622
623 if (!tilcdc_crtc->simulate_vesa_sync)
624 return true;
625
626 /*
627 * tilcdc does not generate VESA-compliant sync but aligns
628 * VS on the second edge of HS instead of first edge.
629 * We use adjusted_mode, to fixup sync by aligning both rising
630 * edges and add HSKEW offset to fix the sync.
631 */
632 adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
633 adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
634
635 if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
636 adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
637 adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
638 } else {
639 adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
640 adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
641 }
642
Rob Clark16ea9752013-01-08 15:04:28 -0600643 return true;
644}
645
Jyri Sarhadb380c52016-04-07 15:10:23 +0300646static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
647 struct drm_crtc_state *state)
648{
649 struct drm_display_mode *mode = &state->mode;
650 int ret;
651
652 /* If we are not active we don't care */
653 if (!state->active)
654 return 0;
655
656 if (state->state->planes[0].ptr != crtc->primary ||
657 state->state->planes[0].state == NULL ||
658 state->state->planes[0].state->crtc != crtc) {
659 dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
660 return -EINVAL;
661 }
662
663 ret = tilcdc_crtc_mode_valid(crtc, mode);
664 if (ret) {
665 dev_dbg(crtc->dev->dev, "Mode \"%s\" not valid", mode->name);
666 return -EINVAL;
667 }
668
669 return 0;
670}
671
Shawn Guo55cbc4d2017-02-07 17:16:33 +0800672static int tilcdc_crtc_enable_vblank(struct drm_crtc *crtc)
673{
674 return 0;
675}
676
677static void tilcdc_crtc_disable_vblank(struct drm_crtc *crtc)
678{
679}
680
Jyri Sarha46a956a2017-05-26 13:20:17 +0300681static void tilcdc_crtc_reset(struct drm_crtc *crtc)
682{
683 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
684 struct drm_device *dev = crtc->dev;
685 int ret;
686
687 drm_atomic_helper_crtc_reset(crtc);
688
689 /* Turn the raster off if it for some reason is on. */
690 pm_runtime_get_sync(dev->dev);
691 if (tilcdc_read(dev, LCDC_RASTER_CTRL_REG) & LCDC_RASTER_ENABLE) {
692 /* Enable DMA Frame Done Interrupt */
693 tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_FRAME_DONE);
694 tilcdc_clear_irqstatus(dev, 0xffffffff);
695
696 tilcdc_crtc->frame_done = false;
697 tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
698
699 ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
700 tilcdc_crtc->frame_done,
701 msecs_to_jiffies(500));
702 if (ret == 0)
703 dev_err(dev->dev, "%s: timeout waiting for framedone\n",
704 __func__);
705 }
706 pm_runtime_put_sync(dev->dev);
707}
708
Rob Clark16ea9752013-01-08 15:04:28 -0600709static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
Jyri Sarha305198d2016-04-07 15:05:16 +0300710 .destroy = tilcdc_crtc_destroy,
711 .set_config = drm_atomic_helper_set_config,
712 .page_flip = drm_atomic_helper_page_flip,
Jyri Sarha46a956a2017-05-26 13:20:17 +0300713 .reset = tilcdc_crtc_reset,
Jyri Sarha305198d2016-04-07 15:05:16 +0300714 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
715 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
Shawn Guo55cbc4d2017-02-07 17:16:33 +0800716 .enable_vblank = tilcdc_crtc_enable_vblank,
717 .disable_vblank = tilcdc_crtc_disable_vblank,
Rob Clark16ea9752013-01-08 15:04:28 -0600718};
719
720static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
Rob Clark16ea9752013-01-08 15:04:28 -0600721 .mode_fixup = tilcdc_crtc_mode_fixup,
Jyri Sarhadb380c52016-04-07 15:10:23 +0300722 .atomic_check = tilcdc_crtc_atomic_check,
Laurent Pinchart0b20a0f2017-06-30 12:36:44 +0300723 .atomic_enable = tilcdc_crtc_atomic_enable,
Laurent Pinchart64581712017-06-30 12:36:45 +0300724 .atomic_disable = tilcdc_crtc_atomic_disable,
Rob Clark16ea9752013-01-08 15:04:28 -0600725};
726
727int tilcdc_crtc_max_width(struct drm_crtc *crtc)
728{
729 struct drm_device *dev = crtc->dev;
730 struct tilcdc_drm_private *priv = dev->dev_private;
731 int max_width = 0;
732
733 if (priv->rev == 1)
734 max_width = 1024;
735 else if (priv->rev == 2)
736 max_width = 2048;
737
738 return max_width;
739}
740
741int tilcdc_crtc_mode_valid(struct drm_crtc *crtc, struct drm_display_mode *mode)
742{
743 struct tilcdc_drm_private *priv = crtc->dev->dev_private;
744 unsigned int bandwidth;
Darren Etheridgee1c5d0a2013-06-21 13:52:25 -0500745 uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
Rob Clark16ea9752013-01-08 15:04:28 -0600746
Darren Etheridgee1c5d0a2013-06-21 13:52:25 -0500747 /*
748 * check to see if the width is within the range that
749 * the LCD Controller physically supports
750 */
Rob Clark16ea9752013-01-08 15:04:28 -0600751 if (mode->hdisplay > tilcdc_crtc_max_width(crtc))
752 return MODE_VIRTUAL_X;
753
754 /* width must be multiple of 16 */
755 if (mode->hdisplay & 0xf)
756 return MODE_VIRTUAL_X;
757
758 if (mode->vdisplay > 2048)
759 return MODE_VIRTUAL_Y;
760
Darren Etheridgee1c5d0a2013-06-21 13:52:25 -0500761 DBG("Processing mode %dx%d@%d with pixel clock %d",
762 mode->hdisplay, mode->vdisplay,
763 drm_mode_vrefresh(mode), mode->clock);
764
765 hbp = mode->htotal - mode->hsync_end;
766 hfp = mode->hsync_start - mode->hdisplay;
767 hsw = mode->hsync_end - mode->hsync_start;
768 vbp = mode->vtotal - mode->vsync_end;
769 vfp = mode->vsync_start - mode->vdisplay;
770 vsw = mode->vsync_end - mode->vsync_start;
771
772 if ((hbp-1) & ~0x3ff) {
773 DBG("Pruning mode: Horizontal Back Porch out of range");
774 return MODE_HBLANK_WIDE;
775 }
776
777 if ((hfp-1) & ~0x3ff) {
778 DBG("Pruning mode: Horizontal Front Porch out of range");
779 return MODE_HBLANK_WIDE;
780 }
781
782 if ((hsw-1) & ~0x3ff) {
783 DBG("Pruning mode: Horizontal Sync Width out of range");
784 return MODE_HSYNC_WIDE;
785 }
786
787 if (vbp & ~0xff) {
788 DBG("Pruning mode: Vertical Back Porch out of range");
789 return MODE_VBLANK_WIDE;
790 }
791
792 if (vfp & ~0xff) {
793 DBG("Pruning mode: Vertical Front Porch out of range");
794 return MODE_VBLANK_WIDE;
795 }
796
797 if ((vsw-1) & ~0x3f) {
798 DBG("Pruning mode: Vertical Sync Width out of range");
799 return MODE_VSYNC_WIDE;
800 }
801
Darren Etheridge4e564342013-06-21 13:52:23 -0500802 /*
803 * some devices have a maximum allowed pixel clock
804 * configured from the DT
805 */
806 if (mode->clock > priv->max_pixelclock) {
Darren Etheridgef7b45752013-06-21 13:52:26 -0500807 DBG("Pruning mode: pixel clock too high");
Darren Etheridge4e564342013-06-21 13:52:23 -0500808 return MODE_CLOCK_HIGH;
809 }
810
811 /*
812 * some devices further limit the max horizontal resolution
813 * configured from the DT
814 */
815 if (mode->hdisplay > priv->max_width)
816 return MODE_BAD_WIDTH;
817
Rob Clark16ea9752013-01-08 15:04:28 -0600818 /* filter out modes that would require too much memory bandwidth: */
Darren Etheridge4e564342013-06-21 13:52:23 -0500819 bandwidth = mode->hdisplay * mode->vdisplay *
820 drm_mode_vrefresh(mode);
821 if (bandwidth > priv->max_bandwidth) {
Darren Etheridgef7b45752013-06-21 13:52:26 -0500822 DBG("Pruning mode: exceeds defined bandwidth limit");
Rob Clark16ea9752013-01-08 15:04:28 -0600823 return MODE_BAD;
Darren Etheridge4e564342013-06-21 13:52:23 -0500824 }
Rob Clark16ea9752013-01-08 15:04:28 -0600825
826 return MODE_OK;
827}
828
829void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
830 const struct tilcdc_panel_info *info)
831{
832 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
833 tilcdc_crtc->info = info;
834}
835
Jyri Sarha103cd8b2015-02-10 14:13:23 +0200836void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
837 bool simulate_vesa_sync)
838{
839 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
840
841 tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
842}
843
Rob Clark16ea9752013-01-08 15:04:28 -0600844void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
845{
Rob Clark16ea9752013-01-08 15:04:28 -0600846 struct drm_device *dev = crtc->dev;
847 struct tilcdc_drm_private *priv = dev->dev_private;
Jyri Sarha642e5162016-09-06 16:19:54 +0300848 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
Rob Clark16ea9752013-01-08 15:04:28 -0600849
Daniel Vetter33e5b662017-03-22 22:50:47 +0100850 drm_modeset_lock(&crtc->mutex, NULL);
Jyri Sarha642e5162016-09-06 16:19:54 +0300851 if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
852 if (tilcdc_crtc_is_on(crtc)) {
853 pm_runtime_get_sync(dev->dev);
854 tilcdc_crtc_disable(crtc);
Rob Clark16ea9752013-01-08 15:04:28 -0600855
Jyri Sarha642e5162016-09-06 16:19:54 +0300856 tilcdc_crtc_set_clk(crtc);
Rob Clark16ea9752013-01-08 15:04:28 -0600857
Jyri Sarha642e5162016-09-06 16:19:54 +0300858 tilcdc_crtc_enable(crtc);
859 pm_runtime_put_sync(dev->dev);
860 }
Rob Clark16ea9752013-01-08 15:04:28 -0600861 }
Daniel Vetter33e5b662017-03-22 22:50:47 +0100862 drm_modeset_unlock(&crtc->mutex);
Rob Clark16ea9752013-01-08 15:04:28 -0600863}
864
Jyri Sarha5895d082016-01-08 14:33:09 +0200865#define SYNC_LOST_COUNT_LIMIT 50
866
Rob Clark16ea9752013-01-08 15:04:28 -0600867irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
868{
869 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
870 struct drm_device *dev = crtc->dev;
871 struct tilcdc_drm_private *priv = dev->dev_private;
Bartosz Golaszewskif97fd382016-12-19 15:47:14 +0100872 uint32_t stat, reg;
Rob Clark16ea9752013-01-08 15:04:28 -0600873
Tomi Valkeinen317aae72015-10-20 12:08:03 +0300874 stat = tilcdc_read_irqstatus(dev);
875 tilcdc_clear_irqstatus(dev, stat);
876
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300877 if (stat & LCDC_END_OF_FRAME0) {
Rob Clark16ea9752013-01-08 15:04:28 -0600878 unsigned long flags;
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200879 bool skip_event = false;
880 ktime_t now;
881
882 now = ktime_get();
Rob Clark16ea9752013-01-08 15:04:28 -0600883
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200884 spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
Rob Clark16ea9752013-01-08 15:04:28 -0600885
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200886 tilcdc_crtc->last_vblank = now;
Rob Clark16ea9752013-01-08 15:04:28 -0600887
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200888 if (tilcdc_crtc->next_fb) {
889 set_scanout(crtc, tilcdc_crtc->next_fb);
890 tilcdc_crtc->next_fb = NULL;
891 skip_event = true;
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300892 }
893
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200894 spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
895
Gustavo Padovan099ede82016-07-04 21:04:52 -0300896 drm_crtc_handle_vblank(crtc);
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200897
898 if (!skip_event) {
899 struct drm_pending_vblank_event *event;
900
901 spin_lock_irqsave(&dev->event_lock, flags);
902
903 event = tilcdc_crtc->event;
904 tilcdc_crtc->event = NULL;
905 if (event)
Gustavo Padovandfebc152016-04-14 10:48:22 -0700906 drm_crtc_send_vblank_event(crtc, event);
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200907
908 spin_unlock_irqrestore(&dev->event_lock, flags);
909 }
Jyri Sarha5895d082016-01-08 14:33:09 +0200910
911 if (tilcdc_crtc->frame_intact)
912 tilcdc_crtc->sync_lost_count = 0;
913 else
914 tilcdc_crtc->frame_intact = true;
Rob Clark16ea9752013-01-08 15:04:28 -0600915 }
916
Jyri Sarha14944112016-04-07 20:36:48 +0300917 if (stat & LCDC_FIFO_UNDERFLOW)
Daniel Schultzd7014532016-10-28 13:52:42 +0200918 dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
Jyri Sarha14944112016-04-07 20:36:48 +0300919 __func__, stat);
920
Jyri Sarha55e165c2016-11-15 23:37:24 +0200921 if (stat & LCDC_PL_LOAD_DONE) {
922 complete(&tilcdc_crtc->palette_loaded);
923 if (priv->rev == 1)
924 tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
925 LCDC_V1_PL_INT_ENA);
926 else
927 tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
928 LCDC_V2_PL_INT_ENA);
Bartosz Golaszewski93452352016-10-31 15:19:26 +0100929 }
930
Jyri Sarhacba88442016-11-16 00:12:27 +0200931 if (stat & LCDC_SYNC_LOST) {
932 dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
933 __func__, stat);
934 tilcdc_crtc->frame_intact = false;
Bartosz Golaszewskif97fd382016-12-19 15:47:14 +0100935 if (priv->rev == 1) {
936 reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
937 if (reg & LCDC_RASTER_ENABLE) {
Jyri Sarhacba88442016-11-16 00:12:27 +0200938 tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
Bartosz Golaszewskif97fd382016-12-19 15:47:14 +0100939 LCDC_RASTER_ENABLE);
940 tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
941 LCDC_RASTER_ENABLE);
942 }
943 } else {
944 if (tilcdc_crtc->sync_lost_count++ >
945 SYNC_LOST_COUNT_LIMIT) {
946 dev_err(dev->dev,
947 "%s(0x%08x): Sync lost flood detected, recovering",
948 __func__, stat);
949 queue_work(system_wq,
950 &tilcdc_crtc->recover_work);
Jyri Sarhacba88442016-11-16 00:12:27 +0200951 tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
952 LCDC_SYNC_LOST);
Bartosz Golaszewskif97fd382016-12-19 15:47:14 +0100953 tilcdc_crtc->sync_lost_count = 0;
954 }
Jyri Sarhacba88442016-11-16 00:12:27 +0200955 }
956 }
957
Jyri Sarha36725832016-11-21 18:30:19 +0200958 if (stat & LCDC_FRAME_DONE) {
959 tilcdc_crtc->frame_done = true;
960 wake_up(&tilcdc_crtc->frame_done_wq);
961 /* rev 1 lcdc appears to hang if irq is not disbaled here */
962 if (priv->rev == 1)
963 tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
964 LCDC_V1_FRAME_DONE_INT_ENA);
965 }
966
Jyri Sarha14944112016-04-07 20:36:48 +0300967 /* For revision 2 only */
Rob Clark16ea9752013-01-08 15:04:28 -0600968 if (priv->rev == 2) {
Jyri Sarha14944112016-04-07 20:36:48 +0300969 /* Indicate to LCDC that the interrupt service routine has
970 * completed, see 13.3.6.1.6 in AM335x TRM.
971 */
972 tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
973 }
Jyri Sarhac0c2baa2015-12-18 13:07:52 +0200974
Rob Clark16ea9752013-01-08 15:04:28 -0600975 return IRQ_HANDLED;
976}
977
Jyri Sarha9963d362016-11-15 22:56:46 +0200978int tilcdc_crtc_create(struct drm_device *dev)
Rob Clark16ea9752013-01-08 15:04:28 -0600979{
Jyri Sarhad66284fb2015-05-27 11:58:37 +0300980 struct tilcdc_drm_private *priv = dev->dev_private;
Rob Clark16ea9752013-01-08 15:04:28 -0600981 struct tilcdc_crtc *tilcdc_crtc;
982 struct drm_crtc *crtc;
983 int ret;
984
Jyri Sarhad0ec32c2016-02-23 12:44:27 +0200985 tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
Markus Elfring3366ba32018-02-06 21:51:15 +0100986 if (!tilcdc_crtc)
Jyri Sarha9963d362016-11-15 22:56:46 +0200987 return -ENOMEM;
Rob Clark16ea9752013-01-08 15:04:28 -0600988
Jyri Sarha55e165c2016-11-15 23:37:24 +0200989 init_completion(&tilcdc_crtc->palette_loaded);
990 tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
991 TILCDC_PALETTE_SIZE,
Bartosz Golaszewski93452352016-10-31 15:19:26 +0100992 &tilcdc_crtc->palette_dma_handle,
993 GFP_KERNEL | __GFP_ZERO);
Jyri Sarha55e165c2016-11-15 23:37:24 +0200994 if (!tilcdc_crtc->palette_base)
995 return -ENOMEM;
996 *tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
Bartosz Golaszewski93452352016-10-31 15:19:26 +0100997
Rob Clark16ea9752013-01-08 15:04:28 -0600998 crtc = &tilcdc_crtc->base;
999
Jyri Sarha47f571c2016-04-07 15:04:18 +03001000 ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
1001 if (ret < 0)
1002 goto fail;
1003
Jyri Sarha2d53a182016-10-25 12:27:31 +03001004 mutex_init(&tilcdc_crtc->enable_lock);
1005
Rob Clark16ea9752013-01-08 15:04:28 -06001006 init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
1007
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +02001008 spin_lock_init(&tilcdc_crtc->irq_lock);
Jyri Sarha13b3d722016-04-06 14:02:38 +03001009 INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +02001010
Jyri Sarha47f571c2016-04-07 15:04:18 +03001011 ret = drm_crtc_init_with_planes(dev, crtc,
1012 &tilcdc_crtc->primary,
1013 NULL,
1014 &tilcdc_crtc_funcs,
1015 "tilcdc crtc");
Rob Clark16ea9752013-01-08 15:04:28 -06001016 if (ret < 0)
1017 goto fail;
1018
1019 drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1020
Jyri Sarhad66284fb2015-05-27 11:58:37 +03001021 if (priv->is_componentized) {
Rob Herring86418f92017-03-22 08:26:06 -05001022 crtc->port = of_graph_get_port_by_id(dev->dev->of_node, 0);
Jyri Sarhad66284fb2015-05-27 11:58:37 +03001023 if (!crtc->port) { /* This should never happen */
Rob Herring4bf99142017-07-18 16:43:04 -05001024 dev_err(dev->dev, "Port node not found in %pOF\n",
1025 dev->dev->of_node);
Jyri Sarha9963d362016-11-15 22:56:46 +02001026 ret = -EINVAL;
Jyri Sarhad66284fb2015-05-27 11:58:37 +03001027 goto fail;
1028 }
1029 }
1030
Jyri Sarha9963d362016-11-15 22:56:46 +02001031 priv->crtc = crtc;
1032 return 0;
Rob Clark16ea9752013-01-08 15:04:28 -06001033
1034fail:
1035 tilcdc_crtc_destroy(crtc);
Jyri Sarhaabf83152017-01-31 16:18:42 +02001036 return ret;
Rob Clark16ea9752013-01-08 15:04:28 -06001037}