2 * drivers/video/tegra/dc/dsi.c
4 * Copyright (c) 2011-2012, NVIDIA CORPORATION, All rights reserved.
6 * This software is licensed under the terms of the GNU General Public
7 * License version 2, as published by the Free Software Foundation, and
8 * may be copied, distributed, and modified under those terms.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
17 #include <linux/kernel.h>
18 #include <linux/clk.h>
19 #include <linux/delay.h>
20 #include <linux/err.h>
22 #include <linux/gpio.h>
23 #include <linux/interrupt.h>
24 #include <linux/slab.h>
25 #include <linux/spinlock.h>
26 #include <linux/workqueue.h>
27 #include <linux/moduleparam.h>
28 #include <linux/export.h>
29 #include <linux/debugfs.h>
30 #include <linux/seq_file.h>
31 #include <linux/nvhost.h>
32 #include <linux/lcm.h>
33 #include <linux/regulator/consumer.h>
39 #include <linux/nvhost.h>
48 /* HACK! This needs to come from DT */
49 #include "../../../../arch/arm/mach-tegra/iomap.h"
51 #define APB_MISC_GP_MIPI_PAD_CTRL_0 (TEGRA_APB_MISC_BASE + 0x820)
52 #define DSIB_MODE_ENABLE 0x2
54 #define DSI_USE_SYNC_POINTS 0
55 #define S_TO_MS(x) (1000 * (x))
56 #define MS_TO_US(x) (1000 * (x))
58 #define DSI_MODULE_NOT_INIT 0x0
59 #define DSI_MODULE_INIT 0x1
61 #define DSI_LPHS_NOT_INIT 0x0
62 #define DSI_LPHS_IN_LP_MODE 0x1
63 #define DSI_LPHS_IN_HS_MODE 0x2
65 #define DSI_VIDEO_TYPE_NOT_INIT 0x0
66 #define DSI_VIDEO_TYPE_VIDEO_MODE 0x1
67 #define DSI_VIDEO_TYPE_CMD_MODE 0x2
69 #define DSI_DRIVEN_MODE_NOT_INIT 0x0
70 #define DSI_DRIVEN_MODE_DC 0x1
71 #define DSI_DRIVEN_MODE_HOST 0x2
73 #define DSI_PHYCLK_OUT_DIS 0x0
74 #define DSI_PHYCLK_OUT_EN 0x1
76 #define DSI_PHYCLK_NOT_INIT 0x0
77 #define DSI_PHYCLK_CONTINUOUS 0x1
78 #define DSI_PHYCLK_TX_ONLY 0x2
80 #define DSI_CLK_BURST_NOT_INIT 0x0
81 #define DSI_CLK_BURST_NONE_BURST 0x1
82 #define DSI_CLK_BURST_BURST_MODE 0x2
84 #define DSI_DC_STREAM_DISABLE 0x0
85 #define DSI_DC_STREAM_ENABLE 0x1
87 #define DSI_LP_OP_NOT_INIT 0x0
88 #define DSI_LP_OP_WRITE 0x1
89 #define DSI_LP_OP_READ 0x2
91 #define DSI_HOST_IDLE_PERIOD 1000
92 static atomic_t dsi_syncpt_rst = ATOMIC_INIT(0);
94 static bool enable_read_debug;
95 module_param(enable_read_debug, bool, 0644);
96 MODULE_PARM_DESC(enable_read_debug,
97 "Enable to print read fifo and return packet type");
99 /* source of video data */
101 TEGRA_DSI_DRIVEN_BY_DC,
102 TEGRA_DSI_DRIVEN_BY_HOST,
105 static struct tegra_dc_dsi_data *tegra_dsi_instance[MAX_DSI_INSTANCE];
107 const u32 dsi_pkt_seq_reg[NUMOF_PKT_SEQ] = {
122 const u32 dsi_pkt_seq_video_non_burst_syne[NUMOF_PKT_SEQ] = {
123 PKT_ID0(CMD_VS) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
125 PKT_ID0(CMD_VE) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
127 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
129 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_BLNK) | PKT_LEN1(1) |
130 PKT_ID2(CMD_HE) | PKT_LEN2(0),
131 PKT_ID3(CMD_BLNK) | PKT_LEN3(2) | PKT_ID4(CMD_RGB) | PKT_LEN4(3) |
132 PKT_ID5(CMD_BLNK) | PKT_LEN5(4),
133 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
135 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_BLNK) | PKT_LEN1(1) |
136 PKT_ID2(CMD_HE) | PKT_LEN2(0),
137 PKT_ID3(CMD_BLNK) | PKT_LEN3(2) | PKT_ID4(CMD_RGB) | PKT_LEN4(3) |
138 PKT_ID5(CMD_BLNK) | PKT_LEN5(4),
141 const u32 dsi_pkt_seq_video_non_burst[NUMOF_PKT_SEQ] = {
142 PKT_ID0(CMD_VS) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
144 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
146 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
148 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_BLNK) | PKT_LEN1(2) |
149 PKT_ID2(CMD_RGB) | PKT_LEN2(3),
150 PKT_ID3(CMD_BLNK) | PKT_LEN3(4),
151 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
153 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_BLNK) | PKT_LEN1(2) |
154 PKT_ID2(CMD_RGB) | PKT_LEN2(3),
155 PKT_ID3(CMD_BLNK) | PKT_LEN3(4),
158 const u32 dsi_pkt_seq_video_non_burst_no_eot_no_lp_no_hbp[NUMOF_PKT_SEQ] = {
159 PKT_ID0(CMD_VS) | PKT_LEN0(0),
161 PKT_ID0(CMD_HS) | PKT_LEN0(0),
163 PKT_ID0(CMD_HS) | PKT_LEN0(0),
165 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_RGB) | PKT_LEN1(3) |
166 PKT_ID2(CMD_BLNK) | PKT_LEN2(4),
168 PKT_ID0(CMD_HS) | PKT_LEN0(0),
170 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_RGB) | PKT_LEN1(3) |
171 PKT_ID2(CMD_BLNK) | PKT_LEN2(4),
175 static const u32 dsi_pkt_seq_video_burst[NUMOF_PKT_SEQ] = {
176 PKT_ID0(CMD_VS) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
178 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
180 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
182 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_BLNK) | PKT_LEN1(2)|
183 PKT_ID2(CMD_RGB) | PKT_LEN2(3) | PKT_LP,
184 PKT_ID0(CMD_EOT) | PKT_LEN0(7),
185 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_EOT) | PKT_LEN1(7) | PKT_LP,
187 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_BLNK) | PKT_LEN1(2)|
188 PKT_ID2(CMD_RGB) | PKT_LEN2(3) | PKT_LP,
189 PKT_ID0(CMD_EOT) | PKT_LEN0(7),
192 static const u32 dsi_pkt_seq_video_burst_no_eot[NUMOF_PKT_SEQ] = {
193 PKT_ID0(CMD_VS) | PKT_LEN0(0) | PKT_LP,
195 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_LP,
197 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_LP,
199 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_BLNK) | PKT_LEN1(2)|
200 PKT_ID2(CMD_RGB) | PKT_LEN2(3) | PKT_LP,
202 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_LP,
204 PKT_ID0(CMD_HS) | PKT_LEN0(0) | PKT_ID1(CMD_BLNK) | PKT_LEN1(2)|
205 PKT_ID2(CMD_RGB) | PKT_LEN2(3) | PKT_LP,
209 /* TODO: verify with hw about this format */
210 const u32 dsi_pkt_seq_cmd_mode[NUMOF_PKT_SEQ] = {
217 PKT_ID0(CMD_LONGW) | PKT_LEN0(3) | PKT_ID1(CMD_EOT) | PKT_LEN1(7),
221 PKT_ID0(CMD_LONGW) | PKT_LEN0(3) | PKT_ID1(CMD_EOT) | PKT_LEN1(7),
225 const u32 init_reg[] = {
251 DSI_HOST_DSI_CONTROL,
258 DSI_INIT_SEQ_CONTROL,
265 const u32 init_reg_vs1_ext[] = {
266 DSI_PAD_CONTROL_0_VS1,
267 DSI_PAD_CONTROL_CD_VS1,
268 DSI_PAD_CD_STATUS_VS1,
269 DSI_PAD_CONTROL_1_VS1,
270 DSI_PAD_CONTROL_2_VS1,
271 DSI_PAD_CONTROL_3_VS1,
272 DSI_PAD_CONTROL_4_VS1,
273 DSI_GANGED_MODE_CONTROL,
274 DSI_GANGED_MODE_START,
275 DSI_GANGED_MODE_SIZE,
278 static int tegra_dsi_host_suspend(struct tegra_dc *dc);
279 static int tegra_dsi_host_resume(struct tegra_dc *dc);
280 static void tegra_dc_dsi_idle_work(struct work_struct *work);
281 static void tegra_dsi_send_dc_frames(struct tegra_dc *dc,
282 struct tegra_dc_dsi_data *dsi,
285 inline unsigned long tegra_dsi_readl(struct tegra_dc_dsi_data *dsi, u32 reg)
289 BUG_ON(!nvhost_module_powered_ext(dsi->dc->ndev));
290 ret = readl(dsi->base + reg * 4);
291 trace_display_readl(dsi->dc, ret, dsi->base + reg * 4);
294 EXPORT_SYMBOL(tegra_dsi_readl);
296 inline void tegra_dsi_writel(struct tegra_dc_dsi_data *dsi, u32 val, u32 reg)
298 BUG_ON(!nvhost_module_powered_ext(dsi->dc->ndev));
299 trace_display_writel(dsi->dc, val, dsi->base + reg * 4);
300 writel(val, dsi->base + reg * 4);
302 EXPORT_SYMBOL(tegra_dsi_writel);
304 #ifdef CONFIG_DEBUG_FS
305 static int dbg_dsi_show(struct seq_file *s, void *unused)
307 struct tegra_dc_dsi_data *dsi = s->private;
309 #define DUMP_REG(a) do { \
310 seq_printf(s, "%-32s\t%03x\t%08lx\n", \
311 #a, a, tegra_dsi_readl(dsi, a)); \
314 tegra_dc_io_start(dsi->dc);
315 clk_prepare_enable(dsi->dsi_clk);
317 DUMP_REG(DSI_INCR_SYNCPT_CNTRL);
318 DUMP_REG(DSI_INCR_SYNCPT_ERROR);
320 DUMP_REG(DSI_POWER_CONTROL);
321 DUMP_REG(DSI_INT_ENABLE);
322 DUMP_REG(DSI_HOST_DSI_CONTROL);
323 DUMP_REG(DSI_CONTROL);
324 DUMP_REG(DSI_SOL_DELAY);
325 DUMP_REG(DSI_MAX_THRESHOLD);
326 DUMP_REG(DSI_TRIGGER);
327 DUMP_REG(DSI_TX_CRC);
328 DUMP_REG(DSI_STATUS);
329 DUMP_REG(DSI_INIT_SEQ_CONTROL);
330 DUMP_REG(DSI_INIT_SEQ_DATA_0);
331 DUMP_REG(DSI_INIT_SEQ_DATA_1);
332 DUMP_REG(DSI_INIT_SEQ_DATA_2);
333 DUMP_REG(DSI_INIT_SEQ_DATA_3);
334 DUMP_REG(DSI_INIT_SEQ_DATA_4);
335 DUMP_REG(DSI_INIT_SEQ_DATA_5);
336 DUMP_REG(DSI_INIT_SEQ_DATA_6);
337 DUMP_REG(DSI_INIT_SEQ_DATA_7);
338 DUMP_REG(DSI_PKT_SEQ_0_LO);
339 DUMP_REG(DSI_PKT_SEQ_0_HI);
340 DUMP_REG(DSI_PKT_SEQ_1_LO);
341 DUMP_REG(DSI_PKT_SEQ_1_HI);
342 DUMP_REG(DSI_PKT_SEQ_2_LO);
343 DUMP_REG(DSI_PKT_SEQ_2_HI);
344 DUMP_REG(DSI_PKT_SEQ_3_LO);
345 DUMP_REG(DSI_PKT_SEQ_3_HI);
346 DUMP_REG(DSI_PKT_SEQ_4_LO);
347 DUMP_REG(DSI_PKT_SEQ_4_HI);
348 DUMP_REG(DSI_PKT_SEQ_5_LO);
349 DUMP_REG(DSI_PKT_SEQ_5_HI);
350 DUMP_REG(DSI_DCS_CMDS);
351 DUMP_REG(DSI_PKT_LEN_0_1);
352 DUMP_REG(DSI_PKT_LEN_2_3);
353 DUMP_REG(DSI_PKT_LEN_4_5);
354 DUMP_REG(DSI_PKT_LEN_6_7);
355 DUMP_REG(DSI_PHY_TIMING_0);
356 DUMP_REG(DSI_PHY_TIMING_1);
357 DUMP_REG(DSI_PHY_TIMING_2);
358 DUMP_REG(DSI_BTA_TIMING);
359 DUMP_REG(DSI_TIMEOUT_0);
360 DUMP_REG(DSI_TIMEOUT_1);
361 DUMP_REG(DSI_TO_TALLY);
362 DUMP_REG(DSI_PAD_CONTROL);
363 DUMP_REG(DSI_PAD_CONTROL_CD);
364 DUMP_REG(DSI_PAD_CD_STATUS);
365 DUMP_REG(DSI_VID_MODE_CONTROL);
368 clk_disable_unprepare(dsi->dsi_clk);
369 tegra_dc_io_end(dsi->dc);
374 static int dbg_dsi_open(struct inode *inode, struct file *file)
376 return single_open(file, dbg_dsi_show, inode->i_private);
379 static const struct file_operations dbg_fops = {
380 .open = dbg_dsi_open,
383 .release = single_release,
386 static struct dentry *dsidir;
388 static void tegra_dc_dsi_debug_create(struct tegra_dc_dsi_data *dsi)
390 struct dentry *retval;
392 dsidir = debugfs_create_dir("tegra_dsi", NULL);
395 retval = debugfs_create_file("regs", S_IRUGO, dsidir, dsi,
401 debugfs_remove_recursive(dsidir);
406 static inline void tegra_dc_dsi_debug_create(struct tegra_dc_dsi_data *dsi)
410 static inline void tegra_dsi_clk_enable(struct tegra_dc_dsi_data *dsi)
412 if (!tegra_is_clk_enabled(dsi->dsi_clk))
413 clk_prepare_enable(dsi->dsi_clk);
416 static inline void tegra_dsi_clk_disable(struct tegra_dc_dsi_data *dsi)
418 if (tegra_is_clk_enabled(dsi->dsi_clk))
419 clk_disable_unprepare(dsi->dsi_clk);
422 static void __maybe_unused tegra_dsi_syncpt_reset(
423 struct tegra_dc_dsi_data *dsi)
425 tegra_dsi_writel(dsi, 0x1, DSI_INCR_SYNCPT_CNTRL);
426 /* stabilization delay */
428 tegra_dsi_writel(dsi, 0x0, DSI_INCR_SYNCPT_CNTRL);
429 /* stabilization delay */
433 static int __maybe_unused tegra_dsi_syncpt(struct tegra_dc_dsi_data *dsi)
438 dsi->syncpt_val = nvhost_syncpt_read_ext(dsi->dc->ndev, dsi->syncpt_id);
440 val = DSI_INCR_SYNCPT_COND(OP_DONE) |
441 DSI_INCR_SYNCPT_INDX(dsi->syncpt_id);
442 tegra_dsi_writel(dsi, val, DSI_INCR_SYNCPT);
444 ret = nvhost_syncpt_wait_timeout_ext(dsi->dc->ndev, dsi->syncpt_id,
445 dsi->syncpt_val + 1, MAX_SCHEDULE_TIMEOUT, NULL);
447 dev_err(&dsi->dc->ndev->dev, "DSI sync point failure\n");
457 static u32 tegra_dsi_get_hs_clk_rate(struct tegra_dc_dsi_data *dsi)
459 u32 dsi_clock_rate_khz;
461 switch (dsi->info.video_burst_mode) {
462 case TEGRA_DSI_VIDEO_BURST_MODE_LOW_SPEED:
463 case TEGRA_DSI_VIDEO_BURST_MODE_MEDIUM_SPEED:
464 case TEGRA_DSI_VIDEO_BURST_MODE_FAST_SPEED:
465 case TEGRA_DSI_VIDEO_BURST_MODE_FASTEST_SPEED:
466 /* Calculate DSI HS clock rate for DSI burst mode */
467 dsi_clock_rate_khz = dsi->default_pixel_clk_khz *
468 dsi->shift_clk_div.mul /
469 dsi->shift_clk_div.div;
471 case TEGRA_DSI_VIDEO_NONE_BURST_MODE:
472 case TEGRA_DSI_VIDEO_NONE_BURST_MODE_WITH_SYNC_END:
473 case TEGRA_DSI_VIDEO_BURST_MODE_LOWEST_SPEED:
475 /* Clock rate is default DSI clock rate for non-burst mode */
476 dsi_clock_rate_khz = dsi->default_hs_clk_khz;
480 return dsi_clock_rate_khz;
483 static u32 tegra_dsi_get_lp_clk_rate(struct tegra_dc_dsi_data *dsi, u8 lp_op)
485 u32 dsi_clock_rate_khz;
487 if (dsi->info.enable_hs_clock_on_lp_cmd_mode)
488 if (dsi->info.hs_clk_in_lp_cmd_mode_freq_khz)
490 dsi->info.hs_clk_in_lp_cmd_mode_freq_khz;
492 dsi_clock_rate_khz = tegra_dsi_get_hs_clk_rate(dsi);
494 if (lp_op == DSI_LP_OP_READ)
496 dsi->info.lp_read_cmd_mode_freq_khz;
499 dsi->info.lp_cmd_mode_freq_khz;
501 return dsi_clock_rate_khz;
504 static struct tegra_dc_shift_clk_div tegra_dsi_get_shift_clk_div(
505 struct tegra_dc_dsi_data *dsi)
507 struct tegra_dc_shift_clk_div shift_clk_div;
508 struct tegra_dc_shift_clk_div max_shift_clk_div;
513 /* Get the real value of default shift_clk_div. default_shift_clk_div
514 * holds the real value of shift_clk_div.
516 shift_clk_div = dsi->default_shift_clk_div;
518 /* Calculate shift_clk_div which can match the video_burst_mode. */
519 if (dsi->info.video_burst_mode >=
520 TEGRA_DSI_VIDEO_BURST_MODE_LOWEST_SPEED) {
521 if (dsi->info.max_panel_freq_khz >= dsi->default_hs_clk_khz) {
523 * dsi->info.max_panel_freq_khz * shift_clk_div /
524 * dsi->default_hs_clk_khz
526 max_shift_clk_div.mul = dsi->info.max_panel_freq_khz *
528 max_shift_clk_div.div = dsi->default_hs_clk_khz *
529 dsi->default_shift_clk_div.div;
531 max_shift_clk_div = shift_clk_div;
534 burst_width = dsi->info.video_burst_mode
535 - TEGRA_DSI_VIDEO_BURST_MODE_LOWEST_SPEED;
536 burst_width_max = TEGRA_DSI_VIDEO_BURST_MODE_FASTEST_SPEED
537 - TEGRA_DSI_VIDEO_BURST_MODE_LOWEST_SPEED;
540 * (max_shift_clk_div - shift_clk_div) *
541 * burst_width / burst_width_max
543 temp_lcm = lcm(max_shift_clk_div.div, shift_clk_div.div);
544 shift_clk_div.mul = (max_shift_clk_div.mul * temp_lcm /
545 max_shift_clk_div.div -
546 shift_clk_div.mul * temp_lcm /
549 shift_clk_div.div = temp_lcm * burst_width_max;
552 return shift_clk_div;
555 static void tegra_dsi_pix_correction(struct tegra_dc *dc,
556 struct tegra_dc_dsi_data *dsi)
563 h_width_pixels = dc->mode.h_back_porch + dc->mode.h_front_porch +
564 dc->mode.h_sync_width + dc->mode.h_active;
566 if (dsi->info.ganged_type == TEGRA_DSI_GANGED_SYMMETRIC_EVEN_ODD) {
567 temp = dc->mode.h_active % dsi->info.n_data_lanes;
569 h_act_corr = dsi->info.n_data_lanes - temp;
570 h_width_pixels += h_act_corr;
574 temp = h_width_pixels % dsi->info.n_data_lanes;
576 hfp_corr = dsi->info.n_data_lanes - temp;
577 h_width_pixels += hfp_corr;
581 temp = (h_width_pixels * dsi->pixel_scaler_mul /
582 dsi->pixel_scaler_div) % dsi->info.n_data_lanes;
584 hfp_corr += dsi->info.n_data_lanes;
585 h_width_pixels += dsi->info.n_data_lanes;
591 dc->mode.h_front_porch += hfp_corr;
592 dc->mode.h_active += h_act_corr;
595 static void tegra_dsi_init_sw(struct tegra_dc *dc,
596 struct tegra_dc_dsi_data *dsi)
605 switch (dsi->info.pixel_format) {
606 case TEGRA_DSI_PIXEL_FORMAT_16BIT_P:
607 /* 2 bytes per pixel */
608 dsi->pixel_scaler_mul = 2;
609 dsi->pixel_scaler_div = 1;
611 case TEGRA_DSI_PIXEL_FORMAT_18BIT_P:
612 /* 2.25 bytes per pixel */
613 dsi->pixel_scaler_mul = 9;
614 dsi->pixel_scaler_div = 4;
616 case TEGRA_DSI_PIXEL_FORMAT_18BIT_NP:
617 case TEGRA_DSI_PIXEL_FORMAT_24BIT_P:
618 /* 3 bytes per pixel */
619 dsi->pixel_scaler_mul = 3;
620 dsi->pixel_scaler_div = 1;
627 dsi->enabled = false;
628 dsi->clk_ref = false;
630 if (dsi->info.ganged_type == TEGRA_DSI_GANGED_SYMMETRIC_LEFT_RIGHT ||
631 dsi->info.ganged_type == TEGRA_DSI_GANGED_SYMMETRIC_EVEN_ODD)
632 n_data_lanes = dsi->info.n_data_lanes / 2;
634 dsi->dsi_control_val =
635 DSI_CONTROL_VIRTUAL_CHANNEL(dsi->info.virtual_channel) |
636 DSI_CONTROL_NUM_DATA_LANES(dsi->info.n_data_lanes - 1) |
637 DSI_CONTROL_VID_SOURCE(dc->ndev->id) |
638 DSI_CONTROL_DATA_FORMAT(dsi->info.pixel_format);
640 if (dsi->info.ganged_type)
641 tegra_dsi_pix_correction(dc, dsi);
643 /* Below we are going to calculate dsi and dc clock rate.
644 * Calcuate the horizontal and vertical width.
646 h_width_pixels = dc->mode.h_back_porch + dc->mode.h_front_porch +
647 dc->mode.h_sync_width + dc->mode.h_active;
649 v_width_lines = dc->mode.v_back_porch + dc->mode.v_front_porch +
650 dc->mode.v_sync_width + dc->mode.v_active;
652 /* Calculate minimum required pixel rate. */
653 pixel_clk_hz = h_width_pixels * v_width_lines * dsi->info.refresh_rate;
654 if (dc->out->flags & TEGRA_DC_OUT_ONE_SHOT_MODE) {
655 if (dsi->info.rated_refresh_rate >= dsi->info.refresh_rate)
656 dev_info(&dc->ndev->dev, "DSI: measured refresh rate "
657 "should be larger than rated refresh rate.\n");
658 dc->mode.rated_pclk = h_width_pixels * v_width_lines *
659 dsi->info.rated_refresh_rate;
662 /* Calculate minimum byte rate on DSI interface. */
663 byte_clk_hz = (pixel_clk_hz * dsi->pixel_scaler_mul) /
664 (dsi->pixel_scaler_div * dsi->info.n_data_lanes);
666 /* Round up to multiple of mega hz. */
667 plld_clk_mhz = DIV_ROUND_UP((byte_clk_hz * NUMOF_BIT_PER_BYTE),
670 /* Calculate default real shift_clk_div. */
671 dsi->default_shift_clk_div.mul = NUMOF_BIT_PER_BYTE *
672 dsi->pixel_scaler_mul;
673 dsi->default_shift_clk_div.div = 2 * dsi->pixel_scaler_div *
674 dsi->info.n_data_lanes;
676 /* Calculate default DSI hs clock. DSI interface is double data rate.
677 * Data is transferred on both rising and falling edge of clk, div by 2
678 * to get the actual clock rate.
680 dsi->default_hs_clk_khz = plld_clk_mhz * 1000 / 2;
682 dsi->default_pixel_clk_khz = (plld_clk_mhz * 1000 *
683 dsi->default_shift_clk_div.div) /
684 (2 * dsi->default_shift_clk_div.mul);
686 /* Get the actual shift_clk_div and clock rates. */
687 dsi->shift_clk_div = tegra_dsi_get_shift_clk_div(dsi);
688 dsi->target_lp_clk_khz =
689 tegra_dsi_get_lp_clk_rate(dsi, DSI_LP_OP_WRITE);
690 dsi->target_hs_clk_khz = tegra_dsi_get_hs_clk_rate(dsi);
692 dev_info(&dc->ndev->dev, "DSI: HS clock rate is %d\n",
693 dsi->target_hs_clk_khz);
695 #if DSI_USE_SYNC_POINTS
696 dsi->syncpt_id = NVSYNCPT_DSI;
700 * Force video clock to be continuous mode if
701 * enable_hs_clock_on_lp_cmd_mode is set
703 if (dsi->info.enable_hs_clock_on_lp_cmd_mode) {
704 if (dsi->info.video_clock_mode !=
705 TEGRA_DSI_VIDEO_CLOCK_CONTINUOUS)
706 dev_warn(&dc->ndev->dev,
707 "Force clock continuous mode\n");
709 dsi->info.video_clock_mode = TEGRA_DSI_VIDEO_CLOCK_CONTINUOUS;
712 atomic_set(&dsi->host_ref, 0);
713 dsi->host_suspended = false;
714 mutex_init(&dsi->host_lock);
715 init_completion(&dc->out->user_vblank_comp);
716 INIT_DELAYED_WORK(&dsi->idle_work, tegra_dc_dsi_idle_work);
717 dsi->idle_delay = msecs_to_jiffies(DSI_HOST_IDLE_PERIOD);
720 #define SELECT_T_PHY(platform_t_phy_ns, default_phy, clk_ns, hw_inc) ( \
721 (platform_t_phy_ns) ? ( \
722 ((DSI_CONVERT_T_PHY_NS_TO_T_PHY(platform_t_phy_ns, clk_ns, hw_inc)) < 0 ? 0 : \
723 (DSI_CONVERT_T_PHY_NS_TO_T_PHY(platform_t_phy_ns, clk_ns, hw_inc)))) : \
724 ((default_phy) < 0 ? 0 : (default_phy)))
726 static void tegra_dsi_get_clk_phy_timing(struct tegra_dc_dsi_data *dsi,
727 struct dsi_phy_timing_inclk *phy_timing_clk, u32 clk_ns)
729 phy_timing_clk->t_tlpx = SELECT_T_PHY(
730 dsi->info.phy_timing.t_tlpx_ns,
731 T_TLPX_DEFAULT(clk_ns), clk_ns, T_TLPX_HW_INC);
733 phy_timing_clk->t_clktrail = SELECT_T_PHY(
734 dsi->info.phy_timing.t_clktrail_ns,
735 T_CLKTRAIL_DEFAULT(clk_ns), clk_ns, T_CLKTRAIL_HW_INC);
737 phy_timing_clk->t_clkpost = SELECT_T_PHY(
738 dsi->info.phy_timing.t_clkpost_ns,
739 T_CLKPOST_DEFAULT(clk_ns), clk_ns, T_CLKPOST_HW_INC);
741 phy_timing_clk->t_clkzero = SELECT_T_PHY(
742 dsi->info.phy_timing.t_clkzero_ns,
743 T_CLKZERO_DEFAULT(clk_ns), clk_ns, T_CLKZERO_HW_INC);
745 phy_timing_clk->t_clkprepare = SELECT_T_PHY(
746 dsi->info.phy_timing.t_clkprepare_ns,
747 T_CLKPREPARE_DEFAULT(clk_ns), clk_ns, T_CLKPREPARE_HW_INC);
749 phy_timing_clk->t_clkpre = SELECT_T_PHY(
750 dsi->info.phy_timing.t_clkpre_ns,
751 T_CLKPRE_DEFAULT, clk_ns, T_CLKPRE_HW_INC);
754 static void tegra_dsi_get_hs_phy_timing(struct tegra_dc_dsi_data *dsi,
755 struct dsi_phy_timing_inclk *phy_timing_clk, u32 clk_ns)
757 phy_timing_clk->t_tlpx = SELECT_T_PHY(
758 dsi->info.phy_timing.t_tlpx_ns,
759 T_TLPX_DEFAULT(clk_ns), clk_ns, T_TLPX_HW_INC);
761 phy_timing_clk->t_hsdexit = SELECT_T_PHY(
762 dsi->info.phy_timing.t_hsdexit_ns,
763 T_HSEXIT_DEFAULT(clk_ns), clk_ns, T_HSEXIT_HW_INC);
765 phy_timing_clk->t_hstrail = SELECT_T_PHY(
766 dsi->info.phy_timing.t_hstrail_ns,
767 T_HSTRAIL_DEFAULT(clk_ns), clk_ns, T_HSTRAIL_HW_INC);
769 phy_timing_clk->t_datzero = SELECT_T_PHY(
770 dsi->info.phy_timing.t_datzero_ns,
771 T_DATZERO_DEFAULT(clk_ns), clk_ns, T_DATZERO_HW_INC);
773 phy_timing_clk->t_hsprepare = SELECT_T_PHY(
774 dsi->info.phy_timing.t_hsprepare_ns,
775 T_HSPREPARE_DEFAULT(clk_ns), clk_ns, T_HSPREPARE_HW_INC);
778 static void tegra_dsi_get_escape_phy_timing(struct tegra_dc_dsi_data *dsi,
779 struct dsi_phy_timing_inclk *phy_timing_clk, u32 clk_ns)
781 phy_timing_clk->t_tlpx = SELECT_T_PHY(
782 dsi->info.phy_timing.t_tlpx_ns,
783 T_TLPX_DEFAULT(clk_ns), clk_ns, T_TLPX_HW_INC);
786 static void tegra_dsi_get_bta_phy_timing(struct tegra_dc_dsi_data *dsi,
787 struct dsi_phy_timing_inclk *phy_timing_clk, u32 clk_ns)
789 phy_timing_clk->t_tlpx = SELECT_T_PHY(
790 dsi->info.phy_timing.t_tlpx_ns,
791 T_TLPX_DEFAULT(clk_ns), clk_ns, T_TLPX_HW_INC);
793 phy_timing_clk->t_taget = SELECT_T_PHY(
794 dsi->info.phy_timing.t_taget_ns,
795 T_TAGET_DEFAULT(clk_ns), clk_ns, T_TAGET_HW_INC);
797 phy_timing_clk->t_tasure = SELECT_T_PHY(
798 dsi->info.phy_timing.t_tasure_ns,
799 T_TASURE_DEFAULT(clk_ns), clk_ns, T_TASURE_HW_INC);
801 phy_timing_clk->t_tago = SELECT_T_PHY(
802 dsi->info.phy_timing.t_tago_ns,
803 T_TAGO_DEFAULT(clk_ns), clk_ns, T_TAGO_HW_INC);
806 static void tegra_dsi_get_ulps_phy_timing(struct tegra_dc_dsi_data *dsi,
807 struct dsi_phy_timing_inclk *phy_timing_clk, u32 clk_ns)
809 phy_timing_clk->t_tlpx = SELECT_T_PHY(
810 dsi->info.phy_timing.t_tlpx_ns,
811 T_TLPX_DEFAULT(clk_ns), clk_ns, T_TLPX_HW_INC);
813 phy_timing_clk->t_wakeup = SELECT_T_PHY(
814 dsi->info.phy_timing.t_wakeup_ns,
815 T_WAKEUP_DEFAULT, clk_ns, T_WAKEUP_HW_INC);
820 static void tegra_dsi_get_phy_timing(struct tegra_dc_dsi_data *dsi,
821 struct dsi_phy_timing_inclk *phy_timing_clk,
824 if (!(dsi->info.ganged_type)) {
825 #ifndef CONFIG_TEGRA_SILICON_PLATFORM
826 clk_ns = (1000 * 1000) / (dsi->info.fpga_freq_khz ?
827 dsi->info.fpga_freq_khz : DEFAULT_FPGA_FREQ_KHZ);
831 phy_timing_clk->t_hsdexit = dsi->info.phy_timing.t_hsdexit_ns ?
832 (dsi->info.phy_timing.t_hsdexit_ns / clk_ns) :
833 (T_HSEXIT_DEFAULT(clk_ns));
835 if (lphs == DSI_LPHS_IN_HS_MODE) {
836 tegra_dsi_get_clk_phy_timing(dsi, phy_timing_clk, clk_ns);
837 tegra_dsi_get_hs_phy_timing(dsi, phy_timing_clk, clk_ns);
839 /* default is LP mode */
840 tegra_dsi_get_escape_phy_timing(dsi, phy_timing_clk, clk_ns);
841 tegra_dsi_get_bta_phy_timing(dsi, phy_timing_clk, clk_ns);
842 tegra_dsi_get_ulps_phy_timing(dsi, phy_timing_clk, clk_ns);
843 if (dsi->info.enable_hs_clock_on_lp_cmd_mode)
844 tegra_dsi_get_clk_phy_timing(dsi, phy_timing_clk, clk_ns);
848 static int tegra_dsi_mipi_phy_timing_range(struct tegra_dc_dsi_data *dsi,
849 struct dsi_phy_timing_inclk *phy_timing,
854 #define CHECK_RANGE(val, min, max) ( \
855 ((min) == NOT_DEFINED ? 0 : (val) < (min)) || \
856 ((max) == NOT_DEFINED ? 0 : (val) > (max)) ? -EINVAL : 0)
858 #ifndef CONFIG_TEGRA_SILICON_PLATFORM
859 clk_ns = dsi->info.fpga_freq_khz ?
860 ((1000 * 1000) / dsi->info.fpga_freq_khz) :
861 DEFAULT_FPGA_FREQ_KHZ;
865 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
866 phy_timing->t_tlpx, clk_ns, T_TLPX_HW_INC),
867 MIPI_T_TLPX_NS_MIN, MIPI_T_TLPX_NS_MAX);
869 dev_warn(&dsi->dc->ndev->dev,
870 "dsi: Tlpx mipi range violated\n");
874 if (lphs == DSI_LPHS_IN_HS_MODE) {
876 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
877 phy_timing->t_hsdexit, clk_ns, T_HSEXIT_HW_INC),
878 MIPI_T_HSEXIT_NS_MIN, MIPI_T_HSEXIT_NS_MAX);
880 dev_warn(&dsi->dc->ndev->dev,
881 "dsi: HsExit mipi range violated\n");
886 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
887 phy_timing->t_hstrail, clk_ns, T_HSTRAIL_HW_INC),
888 MIPI_T_HSTRAIL_NS_MIN(clk_ns), MIPI_T_HSTRAIL_NS_MAX);
890 dev_warn(&dsi->dc->ndev->dev,
891 "dsi: HsTrail mipi range violated\n");
896 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
897 phy_timing->t_datzero, clk_ns, T_DATZERO_HW_INC),
898 MIPI_T_HSZERO_NS_MIN, MIPI_T_HSZERO_NS_MAX);
900 dev_warn(&dsi->dc->ndev->dev,
901 "dsi: HsZero mipi range violated\n");
906 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
907 phy_timing->t_hsprepare, clk_ns, T_HSPREPARE_HW_INC),
908 MIPI_T_HSPREPARE_NS_MIN(clk_ns),
909 MIPI_T_HSPREPARE_NS_MAX(clk_ns));
911 dev_warn(&dsi->dc->ndev->dev,
912 "dsi: HsPrepare mipi range violated\n");
917 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
918 phy_timing->t_hsprepare, clk_ns, T_HSPREPARE_HW_INC) +
919 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
920 phy_timing->t_datzero, clk_ns, T_DATZERO_HW_INC),
921 MIPI_T_HSPREPARE_ADD_HSZERO_NS_MIN(clk_ns),
922 MIPI_T_HSPREPARE_ADD_HSZERO_NS_MAX);
924 dev_warn(&dsi->dc->ndev->dev,
925 "dsi: HsPrepare + HsZero mipi range violated\n");
929 /* default is LP mode */
931 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
932 phy_timing->t_wakeup, clk_ns, T_WAKEUP_HW_INC),
933 MIPI_T_WAKEUP_NS_MIN, MIPI_T_WAKEUP_NS_MAX);
935 dev_warn(&dsi->dc->ndev->dev,
936 "dsi: WakeUp mipi range violated\n");
941 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
942 phy_timing->t_tasure, clk_ns, T_TASURE_HW_INC),
943 MIPI_T_TASURE_NS_MIN(DSI_CONVERT_T_PHY_TO_T_PHY_NS(
944 phy_timing->t_tlpx, clk_ns, T_TLPX_HW_INC)),
945 MIPI_T_TASURE_NS_MAX(DSI_CONVERT_T_PHY_TO_T_PHY_NS(
946 phy_timing->t_tlpx, clk_ns, T_TLPX_HW_INC)));
948 dev_warn(&dsi->dc->ndev->dev,
949 "dsi: TaSure mipi range violated\n");
954 if (lphs == DSI_LPHS_IN_HS_MODE ||
955 dsi->info.enable_hs_clock_on_lp_cmd_mode) {
957 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
958 phy_timing->t_clktrail, clk_ns, T_CLKTRAIL_HW_INC),
959 MIPI_T_CLKTRAIL_NS_MIN, MIPI_T_CLKTRAIL_NS_MAX);
961 dev_warn(&dsi->dc->ndev->dev,
962 "dsi: ClkTrail mipi range violated\n");
967 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
968 phy_timing->t_clkpost, clk_ns, T_CLKPOST_HW_INC),
969 MIPI_T_CLKPOST_NS_MIN(clk_ns), MIPI_T_CLKPOST_NS_MAX);
971 dev_warn(&dsi->dc->ndev->dev,
972 "dsi: ClkPost mipi range violated\n");
977 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
978 phy_timing->t_clkzero, clk_ns, T_CLKZERO_HW_INC),
979 MIPI_T_CLKZERO_NS_MIN, MIPI_T_CLKZERO_NS_MAX);
981 dev_warn(&dsi->dc->ndev->dev,
982 "dsi: ClkZero mipi range violated\n");
987 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
988 phy_timing->t_clkprepare, clk_ns, T_CLKPREPARE_HW_INC),
989 MIPI_T_CLKPREPARE_NS_MIN, MIPI_T_CLKPREPARE_NS_MAX);
991 dev_warn(&dsi->dc->ndev->dev,
992 "dsi: ClkPrepare mipi range violated\n");
997 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
998 phy_timing->t_clkpre, clk_ns, T_CLKPRE_HW_INC),
999 MIPI_T_CLKPRE_NS_MIN, MIPI_T_CLKPRE_NS_MAX);
1001 dev_warn(&dsi->dc->ndev->dev,
1002 "dsi: ClkPre mipi range violated\n");
1007 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1008 phy_timing->t_clkprepare, clk_ns, T_CLKPREPARE_HW_INC) +
1009 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1010 phy_timing->t_clkzero, clk_ns, T_CLKZERO_HW_INC),
1011 MIPI_T_CLKPREPARE_ADD_CLKZERO_NS_MIN,
1012 MIPI_T_CLKPREPARE_ADD_CLKZERO_NS_MAX);
1014 dev_warn(&dsi->dc->ndev->dev,
1015 "dsi: ClkPrepare + ClkZero mipi range violated\n");
1024 static int tegra_dsi_hs_phy_len(struct tegra_dc_dsi_data *dsi,
1025 struct dsi_phy_timing_inclk *phy_timing,
1026 u32 clk_ns, u8 lphs)
1032 struct tegra_dc_mode *modes;
1036 if (!(lphs == DSI_LPHS_IN_HS_MODE))
1039 modes = dsi->dc->out->modes;
1040 t_pix_ns = clk_ns * BITS_PER_BYTE *
1041 dsi->pixel_scaler_mul / dsi->pixel_scaler_div;
1044 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1045 phy_timing->t_tlpx, clk_ns, T_TLPX_HW_INC) +
1046 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1047 phy_timing->t_hsprepare, clk_ns, T_HSPREPARE_HW_INC) +
1048 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1049 phy_timing->t_datzero, clk_ns, T_DATZERO_HW_INC) +
1050 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1051 phy_timing->t_hstrail, clk_ns, T_HSTRAIL_HW_INC) +
1052 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1053 phy_timing->t_hsdexit, clk_ns, T_HSEXIT_HW_INC);
1056 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1057 phy_timing->t_clkpost, clk_ns, T_CLKPOST_HW_INC) +
1058 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1059 phy_timing->t_clktrail, clk_ns, T_CLKTRAIL_HW_INC) +
1060 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1061 phy_timing->t_hsdexit, clk_ns, T_HSEXIT_HW_INC) +
1062 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1063 phy_timing->t_tlpx, clk_ns, T_TLPX_HW_INC) +
1064 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1065 phy_timing->t_clkprepare, clk_ns, T_CLKPREPARE_HW_INC) +
1066 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1067 phy_timing->t_clkzero, clk_ns, T_CLKZERO_HW_INC) +
1068 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1069 phy_timing->t_clkpre, clk_ns, T_CLKPRE_HW_INC);
1071 h_blank_ns = t_pix_ns * (modes->h_sync_width + modes->h_back_porch +
1072 modes->h_front_porch);
1074 /* Extra tlpx and byte cycle required by dsi HW */
1075 t_phy_ns = dsi->info.n_data_lanes * (hs_t_phy_ns + clk_t_phy_ns +
1076 DSI_CONVERT_T_PHY_TO_T_PHY_NS(
1077 phy_timing->t_tlpx, clk_ns, T_TLPX_HW_INC) +
1078 clk_ns * BITS_PER_BYTE);
1080 if (h_blank_ns < t_phy_ns) {
1082 dev_err(&dsi->dc->ndev->dev,
1083 "dsi: Hblank is smaller than HS trans phy timing\n");
1092 static int tegra_dsi_constraint_phy_timing(struct tegra_dc_dsi_data *dsi,
1093 struct dsi_phy_timing_inclk *phy_timing,
1094 u32 clk_ns, u8 lphs)
1098 err = tegra_dsi_mipi_phy_timing_range(dsi, phy_timing, clk_ns, lphs);
1100 dev_warn(&dsi->dc->ndev->dev, "dsi: mipi range violated\n");
1104 err = tegra_dsi_hs_phy_len(dsi, phy_timing, clk_ns, lphs);
1106 dev_err(&dsi->dc->ndev->dev, "dsi: Hblank too short\n");
1110 /* TODO: add more contraints */
1115 static void tegra_dsi_set_phy_timing(struct tegra_dc_dsi_data *dsi, u8 lphs)
1118 struct dsi_phy_timing_inclk phy_timing = dsi->phy_timing;
1120 tegra_dsi_get_phy_timing
1121 (dsi, &phy_timing, dsi->current_bit_clk_ns, lphs);
1123 tegra_dsi_constraint_phy_timing(dsi, &phy_timing,
1124 dsi->current_bit_clk_ns, lphs);
1126 if (dsi->info.ganged_type) {
1127 #ifndef CONFIG_TEGRA_SILICON_PLATFORM
1128 phy_timing.t_hsdexit += T_HSEXIT_HW_INC;
1129 phy_timing.t_hstrail += T_HSTRAIL_HW_INC + 3;
1130 phy_timing.t_datzero += T_DATZERO_HW_INC;
1131 phy_timing.t_hsprepare += T_HSPREPARE_HW_INC;
1133 phy_timing.t_clktrail += T_CLKTRAIL_HW_INC;
1134 phy_timing.t_clkpost += T_CLKPOST_HW_INC;
1135 phy_timing.t_clkzero += T_CLKZERO_HW_INC;
1136 phy_timing.t_tlpx += T_TLPX_HW_INC;
1138 phy_timing.t_clkprepare += T_CLKPREPARE_HW_INC;
1139 phy_timing.t_clkpre += T_CLKPRE_HW_INC;
1140 phy_timing.t_wakeup += T_WAKEUP_HW_INC;
1142 phy_timing.t_taget += T_TAGET_HW_INC;
1143 phy_timing.t_tasure += T_TASURE_HW_INC;
1144 phy_timing.t_tago += T_TAGO_HW_INC;
1147 val = DSI_PHY_TIMING_0_THSDEXIT(phy_timing.t_hsdexit) |
1148 DSI_PHY_TIMING_0_THSTRAIL(phy_timing.t_hstrail) |
1149 DSI_PHY_TIMING_0_TDATZERO(phy_timing.t_datzero) |
1150 DSI_PHY_TIMING_0_THSPREPR(phy_timing.t_hsprepare);
1151 tegra_dsi_writel(dsi, val, DSI_PHY_TIMING_0);
1153 val = DSI_PHY_TIMING_1_TCLKTRAIL(phy_timing.t_clktrail) |
1154 DSI_PHY_TIMING_1_TCLKPOST(phy_timing.t_clkpost) |
1155 DSI_PHY_TIMING_1_TCLKZERO(phy_timing.t_clkzero) |
1156 DSI_PHY_TIMING_1_TTLPX(phy_timing.t_tlpx);
1157 tegra_dsi_writel(dsi, val, DSI_PHY_TIMING_1);
1159 val = DSI_PHY_TIMING_2_TCLKPREPARE(phy_timing.t_clkprepare) |
1160 DSI_PHY_TIMING_2_TCLKPRE(phy_timing.t_clkpre) |
1161 DSI_PHY_TIMING_2_TWAKEUP(phy_timing.t_wakeup);
1162 tegra_dsi_writel(dsi, val, DSI_PHY_TIMING_2);
1164 val = DSI_BTA_TIMING_TTAGET(phy_timing.t_taget) |
1165 DSI_BTA_TIMING_TTASURE(phy_timing.t_tasure) |
1166 DSI_BTA_TIMING_TTAGO(phy_timing.t_tago);
1167 tegra_dsi_writel(dsi, val, DSI_BTA_TIMING);
1169 dsi->phy_timing = phy_timing;
1172 static u32 tegra_dsi_sol_delay_burst(struct tegra_dc *dc,
1173 struct tegra_dc_dsi_data *dsi)
1175 u32 dsi_to_pixel_clk_ratio;
1178 u32 mipi_clk_adj_kHz = 0;
1180 struct tegra_dc_mode *dc_modes = &dc->mode;
1182 /* Get Fdsi/Fpixel ration (note: Fdsi is in bit format) */
1183 dsi_to_pixel_clk_ratio = (dsi->current_dsi_clk_khz * 2 +
1184 dsi->default_pixel_clk_khz - 1) / dsi->default_pixel_clk_khz;
1186 /* Convert Fdsi to byte format */
1187 dsi_to_pixel_clk_ratio *= 1000/8;
1189 /* Multiplying by 1000 so that we don't loose the fraction part */
1190 temp = dc_modes->h_active * 1000;
1191 temp1 = dc_modes->h_active + dc_modes->h_back_porch +
1192 dc_modes->h_sync_width;
1194 sol_delay = temp1 * dsi_to_pixel_clk_ratio -
1195 temp * dsi->pixel_scaler_mul /
1196 (dsi->pixel_scaler_div * dsi->info.n_data_lanes);
1198 /* Do rounding on sol delay */
1199 sol_delay = (sol_delay + 1000 - 1)/1000;
1202 * 1. find out the correct sol fifo depth to use
1203 * 2. verify with hw about the clamping function
1205 if (sol_delay > (480 * 4)) {
1206 sol_delay = (480 * 4);
1207 mipi_clk_adj_kHz = sol_delay +
1208 (dc_modes->h_active * dsi->pixel_scaler_mul) /
1209 (dsi->info.n_data_lanes * dsi->pixel_scaler_div);
1211 mipi_clk_adj_kHz *= (dsi->default_pixel_clk_khz / temp1);
1213 mipi_clk_adj_kHz *= 4;
1216 dsi->target_hs_clk_khz = mipi_clk_adj_kHz;
1221 static void tegra_dsi_set_sol_delay(struct tegra_dc *dc,
1222 struct tegra_dc_dsi_data *dsi)
1226 u32 h_width_byte_clk;
1228 u32 h_width_ganged_byte_clk;
1229 u8 n_data_lanes_this_cont = 0;
1230 u8 n_data_lanes_ganged = 0;
1232 if (!(dsi->info.ganged_type)) {
1233 if (dsi->info.video_burst_mode ==
1234 TEGRA_DSI_VIDEO_NONE_BURST_MODE ||
1235 dsi->info.video_burst_mode ==
1236 TEGRA_DSI_VIDEO_NONE_BURST_MODE_WITH_SYNC_END) {
1237 #define VIDEO_FIFO_LATENCY_PIXEL_CLK 8
1238 sol_delay = VIDEO_FIFO_LATENCY_PIXEL_CLK *
1239 dsi->pixel_scaler_mul / dsi->pixel_scaler_div;
1240 #undef VIDEO_FIFO_LATENCY_PIXEL_CLK
1241 dsi->status.clk_burst = DSI_CLK_BURST_NONE_BURST;
1243 sol_delay = tegra_dsi_sol_delay_burst(dc, dsi);
1244 dsi->status.clk_burst = DSI_CLK_BURST_BURST_MODE;
1247 #define SOL_TO_VALID_PIX_CLK_DELAY 4
1248 #define VALID_TO_FIFO_PIX_CLK_DELAY 4
1249 #define FIFO_WR_PIX_CLK_DELAY 2
1250 #define FIFO_RD_BYTE_CLK_DELAY 6
1251 #define TOT_INTERNAL_PIX_DELAY (SOL_TO_VALID_PIX_CLK_DELAY + \
1252 VALID_TO_FIFO_PIX_CLK_DELAY + \
1253 FIFO_WR_PIX_CLK_DELAY)
1255 internal_delay = DIV_ROUND_UP(
1256 TOT_INTERNAL_PIX_DELAY * dsi->pixel_scaler_mul,
1257 dsi->pixel_scaler_div * dsi->info.n_data_lanes)
1258 + FIFO_RD_BYTE_CLK_DELAY;
1260 h_width_pixels = dc->mode.h_sync_width +
1261 dc->mode.h_back_porch +
1263 dc->mode.h_front_porch;
1265 h_width_byte_clk = DIV_ROUND_UP(h_width_pixels *
1266 dsi->pixel_scaler_mul,
1267 dsi->pixel_scaler_div *
1268 dsi->info.n_data_lanes);
1270 if (dsi->info.ganged_type ==
1271 TEGRA_DSI_GANGED_SYMMETRIC_LEFT_RIGHT ||
1272 dsi->info.ganged_type ==
1273 TEGRA_DSI_GANGED_SYMMETRIC_EVEN_ODD) {
1274 n_data_lanes_this_cont = dsi->info.n_data_lanes / 2;
1275 n_data_lanes_ganged = dsi->info.n_data_lanes;
1278 h_width_ganged_byte_clk = DIV_ROUND_UP(
1279 n_data_lanes_this_cont *
1281 n_data_lanes_ganged);
1283 sol_delay = h_width_byte_clk - h_width_ganged_byte_clk +
1285 sol_delay = (dsi->info.video_data_type ==
1286 TEGRA_DSI_VIDEO_TYPE_COMMAND_MODE) ?
1287 sol_delay + 20: sol_delay;
1289 #undef SOL_TO_VALID_PIX_CLK_DELAY
1290 #undef VALID_TO_FIFO_PIX_CLK_DELAY
1291 #undef FIFO_WR_PIX_CLK_DELAY
1292 #undef FIFO_RD_BYTE_CLK_DELAY
1293 #undef TOT_INTERNAL_PIX_DELAY
1296 tegra_dsi_writel(dsi, DSI_SOL_DELAY_SOL_DELAY(sol_delay),
1300 static void tegra_dsi_set_timeout(struct tegra_dc_dsi_data *dsi)
1303 u32 bytes_per_frame;
1306 /* TODO: verify the following equation */
1307 bytes_per_frame = dsi->current_dsi_clk_khz * 1000 * 2 /
1308 (dsi->info.refresh_rate * 8);
1309 timeout = bytes_per_frame / DSI_CYCLE_COUNTER_VALUE;
1310 timeout = (timeout + DSI_HTX_TO_MARGIN) & 0xffff;
1312 val = DSI_TIMEOUT_0_LRXH_TO(DSI_LRXH_TO_VALUE) |
1313 DSI_TIMEOUT_0_HTX_TO(timeout);
1314 tegra_dsi_writel(dsi, val, DSI_TIMEOUT_0);
1316 if (dsi->info.panel_reset_timeout_msec)
1317 timeout = (dsi->info.panel_reset_timeout_msec * 1000*1000)
1318 / dsi->current_bit_clk_ns;
1320 timeout = DSI_PR_TO_VALUE;
1322 val = DSI_TIMEOUT_1_PR_TO(timeout) |
1323 DSI_TIMEOUT_1_TA_TO(DSI_TA_TO_VALUE);
1324 tegra_dsi_writel(dsi, val, DSI_TIMEOUT_1);
1326 val = DSI_TO_TALLY_P_RESET_STATUS(IN_RESET) |
1327 DSI_TO_TALLY_TA_TALLY(DSI_TA_TALLY_VALUE)|
1328 DSI_TO_TALLY_LRXH_TALLY(DSI_LRXH_TALLY_VALUE)|
1329 DSI_TO_TALLY_HTX_TALLY(DSI_HTX_TALLY_VALUE);
1330 tegra_dsi_writel(dsi, val, DSI_TO_TALLY);
1333 static void tegra_dsi_setup_ganged_mode_pkt_length(struct tegra_dc *dc,
1334 struct tegra_dc_dsi_data *dsi)
1336 u32 hact_pkt_len_pix_orig = dc->mode.h_active;
1337 u32 hact_pkt_len_pix = 0;
1338 u32 hact_pkt_len_bytes = 0;
1339 u32 hfp_pkt_len_bytes = 0;
1340 u32 pix_per_line_orig = 0;
1341 u32 pix_per_line = 0;
1344 /* hsync + hact + hfp = (4) + (4+2) + (4+2) */
1345 #define HEADER_OVERHEAD 16
1347 pix_per_line_orig = dc->mode.h_sync_width + dc->mode.h_back_porch +
1348 dc->mode.h_active + dc->mode.h_front_porch;
1350 if (dsi->info.ganged_type == TEGRA_DSI_GANGED_SYMMETRIC_LEFT_RIGHT ||
1351 dsi->info.ganged_type == TEGRA_DSI_GANGED_SYMMETRIC_EVEN_ODD) {
1352 hact_pkt_len_pix = DIV_ROUND_UP(hact_pkt_len_pix_orig, 2);
1353 pix_per_line = DIV_ROUND_UP(pix_per_line_orig, 2);
1354 if (dsi->controller_index) {
1356 hact_pkt_len_pix_orig - hact_pkt_len_pix;
1357 pix_per_line = pix_per_line_orig - pix_per_line;
1359 hact_pkt_len_bytes = hact_pkt_len_pix *
1360 dsi->pixel_scaler_mul / dsi->pixel_scaler_div;
1361 hfp_pkt_len_bytes = pix_per_line *
1362 dsi->pixel_scaler_mul / dsi->pixel_scaler_div -
1363 hact_pkt_len_bytes - HEADER_OVERHEAD;
1366 val = DSI_PKT_LEN_0_1_LENGTH_0(0) |
1367 DSI_PKT_LEN_0_1_LENGTH_1(0);
1368 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_0_1);
1370 val = DSI_PKT_LEN_2_3_LENGTH_2(0x0) |
1371 DSI_PKT_LEN_2_3_LENGTH_3(hact_pkt_len_bytes);
1372 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_2_3);
1374 val = DSI_PKT_LEN_4_5_LENGTH_4(hfp_pkt_len_bytes) |
1375 DSI_PKT_LEN_4_5_LENGTH_5(0);
1376 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_4_5);
1378 val = DSI_PKT_LEN_6_7_LENGTH_6(0) |
1379 DSI_PKT_LEN_6_7_LENGTH_7(0);
1380 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_6_7);
1382 #undef HEADER_OVERHEAD
1385 static void tegra_dsi_setup_video_mode_pkt_length(struct tegra_dc *dc,
1386 struct tegra_dc_dsi_data *dsi)
1394 hact_pkt_len = dc->mode.h_active * dsi->pixel_scaler_mul /
1395 dsi->pixel_scaler_div;
1396 hsa_pkt_len = dc->mode.h_sync_width * dsi->pixel_scaler_mul /
1397 dsi->pixel_scaler_div;
1398 hbp_pkt_len = dc->mode.h_back_porch * dsi->pixel_scaler_mul /
1399 dsi->pixel_scaler_div;
1400 hfp_pkt_len = dc->mode.h_front_porch * dsi->pixel_scaler_mul /
1401 dsi->pixel_scaler_div;
1403 if (dsi->info.video_burst_mode !=
1404 TEGRA_DSI_VIDEO_NONE_BURST_MODE_WITH_SYNC_END)
1405 hbp_pkt_len += hsa_pkt_len;
1407 hsa_pkt_len -= DSI_HSYNC_BLNK_PKT_OVERHEAD;
1408 hbp_pkt_len -= DSI_HBACK_PORCH_PKT_OVERHEAD;
1409 hfp_pkt_len -= DSI_HFRONT_PORCH_PKT_OVERHEAD;
1411 val = DSI_PKT_LEN_0_1_LENGTH_0(0) |
1412 DSI_PKT_LEN_0_1_LENGTH_1(hsa_pkt_len);
1413 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_0_1);
1415 val = DSI_PKT_LEN_2_3_LENGTH_2(hbp_pkt_len) |
1416 DSI_PKT_LEN_2_3_LENGTH_3(hact_pkt_len);
1417 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_2_3);
1419 val = DSI_PKT_LEN_4_5_LENGTH_4(hfp_pkt_len) |
1420 DSI_PKT_LEN_4_5_LENGTH_5(0);
1421 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_4_5);
1423 val = DSI_PKT_LEN_6_7_LENGTH_6(0) | DSI_PKT_LEN_6_7_LENGTH_7(0x0f0f);
1424 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_6_7);
1427 static void tegra_dsi_setup_cmd_mode_pkt_length(struct tegra_dc *dc,
1428 struct tegra_dc_dsi_data *dsi)
1431 unsigned long act_bytes;
1433 act_bytes = dc->mode.h_active * dsi->pixel_scaler_mul /
1434 dsi->pixel_scaler_div + 1;
1436 val = DSI_PKT_LEN_0_1_LENGTH_0(0) | DSI_PKT_LEN_0_1_LENGTH_1(0);
1437 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_0_1);
1439 val = DSI_PKT_LEN_2_3_LENGTH_2(0) | DSI_PKT_LEN_2_3_LENGTH_3(act_bytes);
1440 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_2_3);
1442 val = DSI_PKT_LEN_4_5_LENGTH_4(0) | DSI_PKT_LEN_4_5_LENGTH_5(act_bytes);
1443 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_4_5);
1445 val = DSI_PKT_LEN_6_7_LENGTH_6(0) | DSI_PKT_LEN_6_7_LENGTH_7(0x0f0f);
1446 tegra_dsi_writel(dsi, val, DSI_PKT_LEN_6_7);
1449 static void tegra_dsi_set_pkt_length(struct tegra_dc *dc,
1450 struct tegra_dc_dsi_data *dsi)
1452 if (dsi->driven_mode == TEGRA_DSI_DRIVEN_BY_HOST)
1455 if (dsi->info.video_data_type == TEGRA_DSI_VIDEO_TYPE_VIDEO_MODE) {
1456 if (dsi->info.ganged_type)
1457 tegra_dsi_setup_ganged_mode_pkt_length(dc, dsi);
1459 tegra_dsi_setup_video_mode_pkt_length(dc, dsi);
1461 tegra_dsi_setup_cmd_mode_pkt_length(dc, dsi);
1465 static void tegra_dsi_set_pkt_seq(struct tegra_dc *dc,
1466 struct tegra_dc_dsi_data *dsi)
1470 u32 pkt_seq_3_5_rgb_lo;
1471 u32 pkt_seq_3_5_rgb_hi;
1476 if (dsi->driven_mode == TEGRA_DSI_DRIVEN_BY_HOST)
1479 switch (dsi->info.pixel_format) {
1480 case TEGRA_DSI_PIXEL_FORMAT_16BIT_P:
1481 rgb_info = CMD_RGB_16BPP;
1483 case TEGRA_DSI_PIXEL_FORMAT_18BIT_P:
1484 rgb_info = CMD_RGB_18BPP;
1486 case TEGRA_DSI_PIXEL_FORMAT_18BIT_NP:
1487 rgb_info = CMD_RGB_18BPPNP;
1489 case TEGRA_DSI_PIXEL_FORMAT_24BIT_P:
1491 rgb_info = CMD_RGB_24BPP;
1495 pkt_seq_3_5_rgb_lo = 0;
1496 pkt_seq_3_5_rgb_hi = 0;
1497 if (dsi->info.pkt_seq)
1498 pkt_seq = dsi->info.pkt_seq;
1499 else if (dsi->info.video_data_type == TEGRA_DSI_VIDEO_TYPE_COMMAND_MODE)
1500 pkt_seq = dsi_pkt_seq_cmd_mode;
1502 switch (dsi->info.video_burst_mode) {
1503 case TEGRA_DSI_VIDEO_BURST_MODE_LOWEST_SPEED:
1504 case TEGRA_DSI_VIDEO_BURST_MODE_LOW_SPEED:
1505 case TEGRA_DSI_VIDEO_BURST_MODE_MEDIUM_SPEED:
1506 case TEGRA_DSI_VIDEO_BURST_MODE_FAST_SPEED:
1507 case TEGRA_DSI_VIDEO_BURST_MODE_FASTEST_SPEED:
1508 pkt_seq_3_5_rgb_lo =
1509 DSI_PKT_SEQ_3_LO_PKT_32_ID(rgb_info);
1510 if (!dsi->info.no_pkt_seq_eot)
1511 pkt_seq = dsi_pkt_seq_video_burst;
1513 pkt_seq = dsi_pkt_seq_video_burst_no_eot;
1515 case TEGRA_DSI_VIDEO_NONE_BURST_MODE_WITH_SYNC_END:
1516 pkt_seq_3_5_rgb_hi =
1517 DSI_PKT_SEQ_3_HI_PKT_34_ID(rgb_info);
1518 pkt_seq = dsi_pkt_seq_video_non_burst_syne;
1520 case TEGRA_DSI_VIDEO_NONE_BURST_MODE:
1522 if (dsi->info.ganged_type) {
1523 pkt_seq_3_5_rgb_lo =
1524 DSI_PKT_SEQ_3_LO_PKT_31_ID(rgb_info);
1526 dsi_pkt_seq_video_non_burst_no_eot_no_lp_no_hbp;
1528 pkt_seq_3_5_rgb_lo =
1529 DSI_PKT_SEQ_3_LO_PKT_32_ID(rgb_info);
1530 pkt_seq = dsi_pkt_seq_video_non_burst;
1536 for (i = 0; i < NUMOF_PKT_SEQ; i++) {
1538 reg = dsi_pkt_seq_reg[i];
1539 if ((reg == DSI_PKT_SEQ_3_LO) || (reg == DSI_PKT_SEQ_5_LO))
1540 val |= pkt_seq_3_5_rgb_lo;
1541 if ((reg == DSI_PKT_SEQ_3_HI) || (reg == DSI_PKT_SEQ_5_HI))
1542 val |= pkt_seq_3_5_rgb_hi;
1543 tegra_dsi_writel(dsi, val, reg);
1547 static void tegra_dsi_reset_underflow_overflow
1548 (struct tegra_dc_dsi_data *dsi)
1552 val = tegra_dsi_readl(dsi, DSI_STATUS);
1553 val &= (DSI_STATUS_LB_OVERFLOW(0x1) | DSI_STATUS_LB_UNDERFLOW(0x1));
1555 if (val & DSI_STATUS_LB_OVERFLOW(0x1))
1556 dev_warn(&dsi->dc->ndev->dev,
1557 "dsi: video fifo overflow. Resetting flag\n");
1558 if (val & DSI_STATUS_LB_UNDERFLOW(0x1))
1559 dev_warn(&dsi->dc->ndev->dev,
1560 "dsi: video fifo underflow. Resetting flag\n");
1561 val = tegra_dsi_readl(dsi, DSI_HOST_DSI_CONTROL);
1562 val |= DSI_HOST_CONTROL_FIFO_STAT_RESET(0x1);
1563 tegra_dsi_writel(dsi, val, DSI_HOST_DSI_CONTROL);
1568 static void tegra_dsi_soft_reset(struct tegra_dc_dsi_data *dsi)
1572 u32 frame_period = DIV_ROUND_UP(S_TO_MS(1), dsi->info.refresh_rate);
1573 struct tegra_dc_mode mode = dsi->dc->mode;
1574 u32 line_period = DIV_ROUND_UP(
1575 MS_TO_US(frame_period),
1576 mode.v_sync_width + mode.v_back_porch +
1577 mode.v_active + mode.v_front_porch);
1578 u32 timeout_cnt = 0;
1580 #define DSI_IDLE_TIMEOUT 1000
1582 val = tegra_dsi_readl(dsi, DSI_STATUS);
1583 while (!(val & DSI_STATUS_IDLE(0x1))) {
1585 udelay(line_period);
1586 val = tegra_dsi_readl(dsi, DSI_STATUS);
1587 if (timeout_cnt++ > DSI_IDLE_TIMEOUT) {
1588 dev_warn(&dsi->dc->ndev->dev, "dsi not idle when soft reset\n");
1593 tegra_dsi_writel(dsi,
1594 DSI_POWER_CONTROL_LEG_DSI_ENABLE(TEGRA_DSI_DISABLE),
1596 /* stabilization delay */
1599 tegra_dsi_writel(dsi,
1600 DSI_POWER_CONTROL_LEG_DSI_ENABLE(TEGRA_DSI_ENABLE),
1602 /* stabilization delay */
1605 /* dsi HW does not clear host trigger bit automatically
1606 * on dsi interface disable if host fifo is empty or in mid
1607 * of host transmission
1609 trigger = tegra_dsi_readl(dsi, DSI_TRIGGER);
1611 tegra_dsi_writel(dsi, 0x0, DSI_TRIGGER);
1613 #undef DSI_IDLE_TIMEOUT
1616 static void tegra_dsi_stop_dc_stream(struct tegra_dc *dc,
1617 struct tegra_dc_dsi_data *dsi)
1619 /* extra reference to dc clk */
1620 clk_prepare_enable(dc->clk);
1622 /* Mask the MSF interrupt. */
1623 if (dc->out->flags & TEGRA_DC_OUT_ONE_SHOT_MODE)
1624 tegra_dc_mask_interrupt(dc, MSF_INT);
1626 tegra_dc_writel(dc, DISP_CTRL_MODE_STOP, DC_CMD_DISPLAY_COMMAND);
1627 tegra_dc_writel(dc, 0, DC_DISP_DISP_WIN_OPTIONS);
1628 tegra_dc_writel(dc, GENERAL_UPDATE, DC_CMD_STATE_CONTROL);
1629 tegra_dc_writel(dc, GENERAL_ACT_REQ , DC_CMD_STATE_CONTROL);
1631 /* balance extra dc clk reference */
1632 clk_disable_unprepare(dc->clk);
1634 dsi->status.dc_stream = DSI_DC_STREAM_DISABLE;
1637 static int tegra_dsi_wait_frame_end(struct tegra_dc *dc,
1638 struct tegra_dc_dsi_data *dsi,
1639 u32 timeout_n_frames)
1643 u32 frame_period = DIV_ROUND_UP(S_TO_MS(1), dsi->info.refresh_rate);
1644 struct tegra_dc_mode mode = dc->mode;
1645 u32 line_period = DIV_ROUND_UP(
1646 MS_TO_US(frame_period),
1647 mode.v_sync_width + mode.v_back_porch +
1648 mode.v_active + mode.v_front_porch);
1650 if (timeout_n_frames < 2)
1651 dev_WARN(&dc->ndev->dev,
1652 "dsi: to stop at next frame give at least 2 frame delay\n");
1654 INIT_COMPLETION(dc->frame_end_complete);
1656 /* unmask frame end interrupt */
1657 val = tegra_dc_unmask_interrupt(dc, FRAME_END_INT);
1659 timeout = wait_for_completion_interruptible_timeout(
1660 &dc->frame_end_complete,
1661 msecs_to_jiffies(timeout_n_frames * frame_period));
1663 /* reinstate interrupt mask */
1664 tegra_dc_writel(dc, val, DC_CMD_INT_MASK);
1666 /* wait for v_ref_to_sync no. of lines after frame end interrupt */
1667 udelay(mode.v_ref_to_sync * line_period);
1672 static void tegra_dsi_stop_dc_stream_at_frame_end(struct tegra_dc *dc,
1673 struct tegra_dc_dsi_data *dsi,
1674 u32 timeout_n_frames)
1678 tegra_dsi_stop_dc_stream(dc, dsi);
1680 timeout = tegra_dsi_wait_frame_end(dc, dsi, timeout_n_frames);
1682 tegra_dsi_soft_reset(dsi);
1685 dev_warn(&dc->ndev->dev,
1686 "DC doesn't stop at end of frame.\n");
1688 tegra_dsi_reset_underflow_overflow(dsi);
1691 static void tegra_dsi_start_dc_stream(struct tegra_dc *dc,
1692 struct tegra_dc_dsi_data *dsi)
1696 /* take extra reference to dc clk */
1697 clk_prepare_enable(dc->clk);
1699 tegra_dc_writel(dc, DSI_ENABLE, DC_DISP_DISP_WIN_OPTIONS);
1701 /* TODO: clean up */
1702 tegra_dc_writel(dc, PW0_ENABLE | PW1_ENABLE | PW2_ENABLE | PW3_ENABLE |
1703 PW4_ENABLE | PM0_ENABLE | PM1_ENABLE,
1704 DC_CMD_DISPLAY_POWER_CONTROL);
1706 /* Configure one-shot mode or continuous mode */
1707 if (dc->out->flags & TEGRA_DC_OUT_ONE_SHOT_MODE) {
1708 /* disable LSPI/LCD_DE output */
1709 val = PIN_OUTPUT_LSPI_OUTPUT_DIS;
1710 tegra_dc_writel(dc, val, DC_COM_PIN_OUTPUT_ENABLE3);
1712 /* enable MSF & set MSF polarity */
1713 val = MSF_ENABLE | MSF_LSPI;
1714 if (!dsi->info.te_polarity_low)
1715 val |= MSF_POLARITY_HIGH;
1717 val |= MSF_POLARITY_LOW;
1718 tegra_dc_writel(dc, val, DC_CMD_DISPLAY_COMMAND_OPTION0);
1720 /* set non-continuous mode */
1721 tegra_dc_writel(dc, DISP_CTRL_MODE_NC_DISPLAY,
1722 DC_CMD_DISPLAY_COMMAND);
1723 tegra_dc_writel(dc, GENERAL_UPDATE, DC_CMD_STATE_CONTROL);
1724 tegra_dc_writel(dc, GENERAL_ACT_REQ | NC_HOST_TRIG,
1725 DC_CMD_STATE_CONTROL);
1727 /* Unmask the MSF interrupt. */
1728 tegra_dc_unmask_interrupt(dc, MSF_INT);
1730 /* set continuous mode */
1731 tegra_dc_writel(dc, DISP_CTRL_MODE_C_DISPLAY,
1732 DC_CMD_DISPLAY_COMMAND);
1733 tegra_dc_writel(dc, GENERAL_UPDATE, DC_CMD_STATE_CONTROL);
1734 tegra_dc_writel(dc, GENERAL_ACT_REQ, DC_CMD_STATE_CONTROL);
1737 /* balance extra dc clk reference */
1738 clk_disable_unprepare(dc->clk);
1740 dsi->status.dc_stream = DSI_DC_STREAM_ENABLE;
1743 static void tegra_dsi_set_dc_clk(struct tegra_dc *dc,
1744 struct tegra_dc_dsi_data *dsi)
1746 u32 shift_clk_div_register;
1749 /* formula: (dsi->shift_clk_div - 1) * 2 */
1750 shift_clk_div_register = (dsi->shift_clk_div.mul -
1751 dsi->shift_clk_div.div) * 2 /
1752 dsi->shift_clk_div.div;
1754 #ifndef CONFIG_TEGRA_SILICON_PLATFORM
1755 shift_clk_div_register = 1;
1756 if (dsi->info.ganged_type)
1757 shift_clk_div_register = 0;
1760 val = PIXEL_CLK_DIVIDER_PCD1 |
1761 SHIFT_CLK_DIVIDER(shift_clk_div_register + 2);
1763 /* SW WAR for bug 1045373. To make the shift clk dividor effect under
1764 * all circumstances, write N+2 to SHIFT_CLK_DIVIDER and activate it.
1765 * After 2us delay, write the target values to it. */
1766 #if defined(CONFIG_ARCH_TEGRA_14x_SOC)
1767 tegra_dc_writel(dc, val, DC_DISP_DISP_CLOCK_CONTROL);
1768 tegra_dc_writel(dc, GENERAL_UPDATE, DC_CMD_STATE_CONTROL);
1769 tegra_dc_writel(dc, GENERAL_ACT_REQ, DC_CMD_STATE_CONTROL);
1774 /* TODO: find out if PCD3 option is required */
1775 val = PIXEL_CLK_DIVIDER_PCD1 |
1776 SHIFT_CLK_DIVIDER(shift_clk_div_register);
1778 tegra_dc_writel(dc, val, DC_DISP_DISP_CLOCK_CONTROL);
1781 static void tegra_dsi_set_dsi_clk(struct tegra_dc *dc,
1782 struct tegra_dc_dsi_data *dsi, u32 clk)
1787 /* Round up to MHz */
1792 /* Set up pixel clock */
1793 pclk_khz = (clk * dsi->shift_clk_div.div) /
1794 dsi->shift_clk_div.mul;
1796 dc->mode.pclk = pclk_khz * 1000;
1798 dc->shift_clk_div.mul = dsi->shift_clk_div.mul;
1799 dc->shift_clk_div.div = dsi->shift_clk_div.div;
1801 /* TODO: Define one shot work delay in board file. */
1802 /* Since for one-shot mode, refresh rate is usually set larger than
1803 * expected refresh rate, it needs at least 3 frame period. Less
1804 * delay one shot work is, more powering saving we have. */
1805 dc->one_shot_delay_ms = 4 *
1806 DIV_ROUND_UP(S_TO_MS(1), dsi->info.refresh_rate);
1808 /* Enable DSI clock */
1809 tegra_dc_setup_clk(dc, dsi->dsi_clk);
1810 tegra_dsi_clk_enable(dsi);
1811 tegra_periph_reset_deassert(dsi->dsi_clk);
1813 dsi->current_dsi_clk_khz = clk_get_rate(dsi->dsi_clk) / 1000;
1814 dsi->current_bit_clk_ns = 1000*1000 / (dsi->current_dsi_clk_khz * 2);
1817 static void tegra_dsi_hs_clk_out_enable(struct tegra_dc_dsi_data *dsi)
1821 val = tegra_dsi_readl(dsi, DSI_CONTROL);
1822 val &= ~DSI_CONTROL_HS_CLK_CTRL(1);
1824 if (dsi->info.video_clock_mode == TEGRA_DSI_VIDEO_CLOCK_CONTINUOUS) {
1825 val |= DSI_CONTROL_HS_CLK_CTRL(CONTINUOUS);
1826 dsi->status.clk_mode = DSI_PHYCLK_CONTINUOUS;
1828 val |= DSI_CONTROL_HS_CLK_CTRL(TX_ONLY);
1829 dsi->status.clk_mode = DSI_PHYCLK_TX_ONLY;
1831 tegra_dsi_writel(dsi, val, DSI_CONTROL);
1833 val = tegra_dsi_readl(dsi, DSI_HOST_DSI_CONTROL);
1834 val &= ~DSI_HOST_DSI_CONTROL_HIGH_SPEED_TRANS(1);
1835 val |= DSI_HOST_DSI_CONTROL_HIGH_SPEED_TRANS(TEGRA_DSI_HIGH);
1836 tegra_dsi_writel(dsi, val, DSI_HOST_DSI_CONTROL);
1838 dsi->status.clk_out = DSI_PHYCLK_OUT_EN;
1841 static void tegra_dsi_hs_clk_out_enable_in_lp(struct tegra_dc_dsi_data *dsi)
1844 tegra_dsi_hs_clk_out_enable(dsi);
1846 val = tegra_dsi_readl(dsi, DSI_HOST_DSI_CONTROL);
1847 val &= ~DSI_HOST_DSI_CONTROL_HIGH_SPEED_TRANS(1);
1848 val |= DSI_HOST_DSI_CONTROL_HIGH_SPEED_TRANS(TEGRA_DSI_LOW);
1849 tegra_dsi_writel(dsi, val, DSI_HOST_DSI_CONTROL);
1852 static void tegra_dsi_hs_clk_out_disable(struct tegra_dc *dc,
1853 struct tegra_dc_dsi_data *dsi)
1857 if (dsi->status.dc_stream == DSI_DC_STREAM_ENABLE)
1858 tegra_dsi_stop_dc_stream_at_frame_end(dc, dsi, 2);
1860 tegra_dsi_writel(dsi, TEGRA_DSI_DISABLE, DSI_POWER_CONTROL);
1861 /* stabilization delay */
1864 val = tegra_dsi_readl(dsi, DSI_HOST_DSI_CONTROL);
1865 val &= ~DSI_HOST_DSI_CONTROL_HIGH_SPEED_TRANS(1);
1866 val |= DSI_HOST_DSI_CONTROL_HIGH_SPEED_TRANS(TEGRA_DSI_LOW);
1867 tegra_dsi_writel(dsi, val, DSI_HOST_DSI_CONTROL);
1869 tegra_dsi_writel(dsi, TEGRA_DSI_ENABLE, DSI_POWER_CONTROL);
1870 /* stabilization delay */
1873 dsi->status.clk_mode = DSI_PHYCLK_NOT_INIT;
1874 dsi->status.clk_out = DSI_PHYCLK_OUT_DIS;
1877 static void tegra_dsi_set_control_reg_lp(struct tegra_dc_dsi_data *dsi)
1880 u32 host_dsi_control;
1883 dsi_control = dsi->dsi_control_val | DSI_CTRL_HOST_DRIVEN;
1884 host_dsi_control = HOST_DSI_CTRL_COMMON |
1885 HOST_DSI_CTRL_HOST_DRIVEN |
1886 DSI_HOST_DSI_CONTROL_HIGH_SPEED_TRANS(TEGRA_DSI_LOW);
1887 max_threshold = DSI_MAX_THRESHOLD_MAX_THRESHOLD(DSI_HOST_FIFO_DEPTH);
1889 tegra_dsi_writel(dsi, max_threshold, DSI_MAX_THRESHOLD);
1890 tegra_dsi_writel(dsi, dsi_control, DSI_CONTROL);
1891 tegra_dsi_writel(dsi, host_dsi_control, DSI_HOST_DSI_CONTROL);
1893 dsi->status.driven = DSI_DRIVEN_MODE_HOST;
1894 dsi->status.clk_burst = DSI_CLK_BURST_NOT_INIT;
1895 dsi->status.vtype = DSI_VIDEO_TYPE_NOT_INIT;
1898 static void tegra_dsi_set_control_reg_hs(struct tegra_dc_dsi_data *dsi,
1902 u32 host_dsi_control;
1906 dsi_control = dsi->dsi_control_val;
1907 host_dsi_control = HOST_DSI_CTRL_COMMON;
1911 if (driven_mode == TEGRA_DSI_DRIVEN_BY_HOST) {
1912 dsi_control |= DSI_CTRL_HOST_DRIVEN;
1913 host_dsi_control |= HOST_DSI_CTRL_HOST_DRIVEN;
1915 DSI_MAX_THRESHOLD_MAX_THRESHOLD(DSI_HOST_FIFO_DEPTH);
1916 dsi->status.driven = DSI_DRIVEN_MODE_HOST;
1918 dsi_control |= DSI_CTRL_DC_DRIVEN;
1919 host_dsi_control |= HOST_DSI_CTRL_DC_DRIVEN;
1921 DSI_MAX_THRESHOLD_MAX_THRESHOLD(DSI_VIDEO_FIFO_DEPTH);
1922 dsi->status.driven = DSI_DRIVEN_MODE_DC;
1924 if (dsi->info.video_data_type ==
1925 TEGRA_DSI_VIDEO_TYPE_COMMAND_MODE) {
1926 dsi_control |= DSI_CTRL_CMD_MODE;
1927 dcs_cmd = DSI_DCS_CMDS_LT5_DCS_CMD(
1928 DSI_WRITE_MEMORY_START)|
1929 DSI_DCS_CMDS_LT3_DCS_CMD(
1930 DSI_WRITE_MEMORY_CONTINUE);
1931 dsi->status.vtype = DSI_VIDEO_TYPE_CMD_MODE;
1933 dsi_control |= DSI_CTRL_VIDEO_MODE;
1934 dsi->status.vtype = DSI_VIDEO_TYPE_VIDEO_MODE;
1938 tegra_dsi_writel(dsi, max_threshold, DSI_MAX_THRESHOLD);
1939 tegra_dsi_writel(dsi, dcs_cmd, DSI_DCS_CMDS);
1940 tegra_dsi_writel(dsi, dsi_control, DSI_CONTROL);
1941 tegra_dsi_writel(dsi, host_dsi_control, DSI_HOST_DSI_CONTROL);
1944 static void tegra_dsi_pad_disable(struct tegra_dc_dsi_data *dsi)
1948 if (dsi->info.controller_vs == DSI_VS_1) {
1949 val = tegra_dsi_readl(dsi, DSI_PAD_CONTROL_0_VS1);
1950 val &= ~(DSI_PAD_CONTROL_0_VS1_PAD_PDIO(0xf) |
1951 DSI_PAD_CONTROL_0_VS1_PAD_PDIO_CLK(0x1) |
1952 DSI_PAD_CONTROL_0_VS1_PAD_PULLDN_ENAB(0xf) |
1953 DSI_PAD_CONTROL_0_VS1_PAD_PULLDN_CLK_ENAB(0x1));
1954 val |= DSI_PAD_CONTROL_0_VS1_PAD_PDIO(0xf) |
1955 DSI_PAD_CONTROL_0_VS1_PAD_PDIO_CLK
1956 (TEGRA_DSI_PAD_DISABLE) |
1957 DSI_PAD_CONTROL_0_VS1_PAD_PULLDN_ENAB(0xf) |
1958 DSI_PAD_CONTROL_0_VS1_PAD_PULLDN_CLK_ENAB
1959 (TEGRA_DSI_PAD_DISABLE);
1960 tegra_dsi_writel(dsi, val, DSI_PAD_CONTROL_0_VS1);
1962 val = tegra_dsi_readl(dsi, DSI_PAD_CONTROL);
1963 val &= ~(DSI_PAD_CONTROL_PAD_PDIO(0x3) |
1964 DSI_PAD_CONTROL_PAD_PDIO_CLK(0x1) |
1965 DSI_PAD_CONTROL_PAD_PULLDN_ENAB(0x1));
1966 val |= DSI_PAD_CONTROL_PAD_PDIO(0x3) |
1967 DSI_PAD_CONTROL_PAD_PDIO_CLK(TEGRA_DSI_PAD_DISABLE) |
1968 DSI_PAD_CONTROL_PAD_PULLDN_ENAB(TEGRA_DSI_PAD_DISABLE);
1969 tegra_dsi_writel(dsi, val, DSI_PAD_CONTROL);
1973 static void tegra_dsi_pad_enable(struct tegra_dc_dsi_data *dsi)
1977 if (dsi->info.controller_vs == DSI_VS_1) {
1978 val = tegra_dsi_readl(dsi, DSI_PAD_CONTROL_0_VS1);
1979 val &= ~(DSI_PAD_CONTROL_0_VS1_PAD_PDIO(0xf) |
1980 DSI_PAD_CONTROL_0_VS1_PAD_PDIO_CLK(0x1) |
1981 DSI_PAD_CONTROL_0_VS1_PAD_PULLDN_ENAB(0xf) |
1982 DSI_PAD_CONTROL_0_VS1_PAD_PULLDN_CLK_ENAB(0x1));
1983 val |= DSI_PAD_CONTROL_0_VS1_PAD_PDIO(TEGRA_DSI_PAD_ENABLE) |
1984 DSI_PAD_CONTROL_0_VS1_PAD_PDIO_CLK(
1985 TEGRA_DSI_PAD_ENABLE) |
1986 DSI_PAD_CONTROL_0_VS1_PAD_PULLDN_ENAB(
1987 TEGRA_DSI_PAD_ENABLE) |
1988 DSI_PAD_CONTROL_0_VS1_PAD_PULLDN_CLK_ENAB(
1989 TEGRA_DSI_PAD_ENABLE);
1990 tegra_dsi_writel(dsi, val, DSI_PAD_CONTROL_0_VS1);
1992 val = tegra_dsi_readl(dsi, DSI_PAD_CONTROL);
1993 val &= ~(DSI_PAD_CONTROL_PAD_PDIO(0x3) |
1994 DSI_PAD_CONTROL_PAD_PDIO_CLK(0x1) |
1995 DSI_PAD_CONTROL_PAD_PULLDN_ENAB(0x1));
1996 val |= DSI_PAD_CONTROL_PAD_PDIO(TEGRA_DSI_PAD_ENABLE) |
1997 DSI_PAD_CONTROL_PAD_PDIO_CLK(TEGRA_DSI_PAD_ENABLE) |
1998 DSI_PAD_CONTROL_PAD_PULLDN_ENAB(TEGRA_DSI_PAD_ENABLE);
1999 tegra_dsi_writel(dsi, val, DSI_PAD_CONTROL);
2003 static void tegra_dsi_pad_calibration(struct tegra_dc_dsi_data *dsi)
2008 tegra_dsi_pad_enable(dsi);
2010 tegra_dsi_pad_disable(dsi);
2012 if (dsi->info.controller_vs == DSI_VS_1) {
2013 /* TODO: characterization parameters */
2014 tegra_mipi_cal_clk_enable(dsi->mipi_cal);
2016 tegra_mipi_cal_init_hw(dsi->mipi_cal);
2018 tegra_mipi_cal_write(dsi->mipi_cal,
2019 MIPI_BIAS_PAD_E_VCLAMP_REF(0x1),
2020 MIPI_CAL_MIPI_BIAS_PAD_CFG0_0);
2021 tegra_mipi_cal_write(dsi->mipi_cal,
2023 MIPI_CAL_MIPI_BIAS_PAD_CFG2_0);
2025 tegra_mipi_cal_clk_disable(dsi->mipi_cal);
2027 val = tegra_dsi_readl(dsi, DSI_PAD_CONTROL);
2028 val &= ~(DSI_PAD_CONTROL_PAD_LPUPADJ(0x3) |
2029 DSI_PAD_CONTROL_PAD_LPDNADJ(0x3) |
2030 DSI_PAD_CONTROL_PAD_PREEMP_EN(0x1) |
2031 DSI_PAD_CONTROL_PAD_SLEWDNADJ(0x7) |
2032 DSI_PAD_CONTROL_PAD_SLEWUPADJ(0x7));
2034 val |= DSI_PAD_CONTROL_PAD_LPUPADJ(0x1) |
2035 DSI_PAD_CONTROL_PAD_LPDNADJ(0x1) |
2036 DSI_PAD_CONTROL_PAD_PREEMP_EN(0x1) |
2037 DSI_PAD_CONTROL_PAD_SLEWDNADJ(0x6) |
2038 DSI_PAD_CONTROL_PAD_SLEWUPADJ(0x6);
2040 tegra_dsi_writel(dsi, val, DSI_PAD_CONTROL);
2042 val = MIPI_CAL_TERMOSA(0x4);
2043 tegra_vi_csi_writel(val, CSI_CILA_MIPI_CAL_CONFIG_0);
2045 val = MIPI_CAL_TERMOSB(0x4);
2046 tegra_vi_csi_writel(val, CSI_CILB_MIPI_CAL_CONFIG_0);
2048 val = MIPI_CAL_HSPUOSD(0x3) | MIPI_CAL_HSPDOSD(0x4);
2049 tegra_vi_csi_writel(val, CSI_DSI_MIPI_CAL_CONFIG);
2051 val = PAD_DRIV_DN_REF(0x5) | PAD_DRIV_UP_REF(0x7);
2052 tegra_vi_csi_writel(val, CSI_MIPIBIAS_PAD_CONFIG);
2055 val = PAD_CIL_PDVREG(0x0);
2056 tegra_vi_csi_writel(val, CSI_CIL_PAD_CONFIG);
2059 static void tegra_dsi_panelB_enable(void)
2063 val = readl(IO_ADDRESS(APB_MISC_GP_MIPI_PAD_CTRL_0));
2064 val |= DSIB_MODE_ENABLE;
2065 writel(val, (IO_ADDRESS(APB_MISC_GP_MIPI_PAD_CTRL_0)));
2068 static int tegra_dsi_init_hw(struct tegra_dc *dc,
2069 struct tegra_dc_dsi_data *dsi)
2073 regulator_enable(dsi->avdd_dsi_csi);
2074 /* stablization delay */
2077 tegra_dsi_set_dsi_clk(dc, dsi, dsi->target_lp_clk_khz);
2079 /* Stop DC stream before configuring DSI registers
2080 * to avoid visible glitches on panel during transition
2081 * from bootloader to kernel driver
2083 tegra_dsi_stop_dc_stream_at_frame_end(dc, dsi, 2);
2085 tegra_dsi_writel(dsi,
2086 DSI_POWER_CONTROL_LEG_DSI_ENABLE(TEGRA_DSI_DISABLE),
2088 /* stabilization delay */
2091 if (dsi->info.dsi_instance)
2092 tegra_dsi_panelB_enable();
2094 tegra_dsi_set_phy_timing(dsi, DSI_LPHS_IN_LP_MODE);
2096 /* Initialize DSI registers */
2097 for (i = 0; i < ARRAY_SIZE(init_reg); i++)
2098 tegra_dsi_writel(dsi, 0, init_reg[i]);
2099 if (dsi->info.controller_vs == DSI_VS_1) {
2100 for (i = 0; i < ARRAY_SIZE(init_reg_vs1_ext); i++)
2101 tegra_dsi_writel(dsi, 0, init_reg_vs1_ext[i]);
2104 tegra_dsi_pad_calibration(dsi);
2106 tegra_dsi_writel(dsi,
2107 DSI_POWER_CONTROL_LEG_DSI_ENABLE(TEGRA_DSI_ENABLE),
2109 /* stabilization delay */
2112 dsi->status.init = DSI_MODULE_INIT;
2113 dsi->status.lphs = DSI_LPHS_NOT_INIT;
2114 dsi->status.vtype = DSI_VIDEO_TYPE_NOT_INIT;
2115 dsi->status.driven = DSI_DRIVEN_MODE_NOT_INIT;
2116 dsi->status.clk_out = DSI_PHYCLK_OUT_DIS;
2117 dsi->status.clk_mode = DSI_PHYCLK_NOT_INIT;
2118 dsi->status.clk_burst = DSI_CLK_BURST_NOT_INIT;
2119 dsi->status.dc_stream = DSI_DC_STREAM_DISABLE;
2120 dsi->status.lp_op = DSI_LP_OP_NOT_INIT;
2125 static int tegra_dsi_set_to_lp_mode(struct tegra_dc *dc,
2126 struct tegra_dc_dsi_data *dsi, u8 lp_op)
2130 if (dsi->status.init != DSI_MODULE_INIT) {
2135 if (dsi->status.lphs == DSI_LPHS_IN_LP_MODE &&
2136 dsi->status.lp_op == lp_op)
2139 if (dsi->status.dc_stream == DSI_DC_STREAM_ENABLE)
2140 tegra_dsi_stop_dc_stream_at_frame_end(dc, dsi, 2);
2142 /* disable/enable hs clk according to enable_hs_clock_on_lp_cmd_mode */
2143 if ((dsi->status.clk_out == DSI_PHYCLK_OUT_EN) &&
2144 (!dsi->info.enable_hs_clock_on_lp_cmd_mode))
2145 tegra_dsi_hs_clk_out_disable(dc, dsi);
2147 dsi->target_lp_clk_khz = tegra_dsi_get_lp_clk_rate(dsi, lp_op);
2148 if (dsi->current_dsi_clk_khz != dsi->target_lp_clk_khz) {
2149 tegra_dsi_set_dsi_clk(dc, dsi, dsi->target_lp_clk_khz);
2150 tegra_dsi_set_timeout(dsi);
2153 tegra_dsi_set_phy_timing(dsi, DSI_LPHS_IN_LP_MODE);
2155 tegra_dsi_set_control_reg_lp(dsi);
2157 if ((dsi->status.clk_out == DSI_PHYCLK_OUT_DIS) &&
2158 (dsi->info.enable_hs_clock_on_lp_cmd_mode))
2159 tegra_dsi_hs_clk_out_enable_in_lp(dsi);
2161 dsi->status.lphs = DSI_LPHS_IN_LP_MODE;
2162 dsi->status.lp_op = lp_op;
2163 dsi->driven_mode = TEGRA_DSI_DRIVEN_BY_HOST;
2170 static void tegra_dsi_ganged(struct tegra_dc *dc,
2171 struct tegra_dc_dsi_data *dsi)
2176 u32 h_active = dc->out->modes->h_active;
2179 if (dsi->info.controller_vs < DSI_VS_1) {
2180 dev_err(&dc->ndev->dev, "dsi: ganged mode not"
2181 "supported with current controller version\n");
2185 if (dsi->info.ganged_type ==
2186 TEGRA_DSI_GANGED_SYMMETRIC_LEFT_RIGHT) {
2187 low_width = DIV_ROUND_UP(h_active, 2);
2188 high_width = h_active - low_width;
2189 if (dsi->controller_index)
2190 start = h_active / 2;
2193 } else if (dsi->info.ganged_type ==
2194 TEGRA_DSI_GANGED_SYMMETRIC_EVEN_ODD) {
2197 if (dsi->controller_index)
2203 tegra_dsi_writel(dsi, DSI_GANGED_MODE_START_POINTER(start),
2204 DSI_GANGED_MODE_START);
2206 val = DSI_GANGED_MODE_SIZE_VALID_LOW_WIDTH(low_width) |
2207 DSI_GANGED_MODE_SIZE_VALID_HIGH_WIDTH(high_width);
2208 tegra_dsi_writel(dsi, val, DSI_GANGED_MODE_SIZE);
2210 tegra_dsi_writel(dsi, DSI_GANGED_MODE_CONTROL_EN(TEGRA_DSI_ENABLE),
2211 DSI_GANGED_MODE_CONTROL);
2214 static int tegra_dsi_set_to_hs_mode(struct tegra_dc *dc,
2215 struct tegra_dc_dsi_data *dsi,
2220 if (dsi->status.init != DSI_MODULE_INIT) {
2225 if (dsi->status.lphs == DSI_LPHS_IN_HS_MODE &&
2226 dsi->driven_mode == driven_mode)
2229 dsi->driven_mode = driven_mode;
2231 if (dsi->status.dc_stream == DSI_DC_STREAM_ENABLE)
2232 tegra_dsi_stop_dc_stream_at_frame_end(dc, dsi, 2);
2234 if ((dsi->status.clk_out == DSI_PHYCLK_OUT_EN) &&
2235 (!dsi->info.enable_hs_clock_on_lp_cmd_mode))
2236 tegra_dsi_hs_clk_out_disable(dc, dsi);
2238 if (dsi->current_dsi_clk_khz != dsi->target_hs_clk_khz) {
2239 tegra_dsi_set_dsi_clk(dc, dsi, dsi->target_hs_clk_khz);
2240 tegra_dsi_set_timeout(dsi);
2243 tegra_dsi_set_phy_timing(dsi, DSI_LPHS_IN_HS_MODE);
2245 if (driven_mode == TEGRA_DSI_DRIVEN_BY_DC) {
2246 tegra_dsi_set_pkt_seq(dc, dsi);
2247 tegra_dsi_set_pkt_length(dc, dsi);
2248 tegra_dsi_set_sol_delay(dc, dsi);
2249 tegra_dsi_set_dc_clk(dc, dsi);
2252 tegra_dsi_set_control_reg_hs(dsi, driven_mode);
2254 if (dsi->info.ganged_type)
2255 tegra_dsi_ganged(dc, dsi);
2257 if (dsi->status.clk_out == DSI_PHYCLK_OUT_DIS ||
2258 dsi->info.enable_hs_clock_on_lp_cmd_mode)
2259 tegra_dsi_hs_clk_out_enable(dsi);
2261 dsi->status.lphs = DSI_LPHS_IN_HS_MODE;
2263 dsi->status.lp_op = DSI_LP_OP_NOT_INIT;
2269 static bool tegra_dsi_write_busy(struct tegra_dc_dsi_data *dsi)
2274 while (timeout <= DSI_MAX_COMMAND_DELAY_USEC) {
2275 if (!(DSI_TRIGGER_HOST_TRIGGER(0x1) &
2276 tegra_dsi_readl(dsi, DSI_TRIGGER))) {
2280 udelay(DSI_COMMAND_DELAY_STEPS_USEC);
2281 timeout += DSI_COMMAND_DELAY_STEPS_USEC;
2287 static bool tegra_dsi_read_busy(struct tegra_dc_dsi_data *dsi)
2292 while (timeout < DSI_STATUS_POLLING_DURATION_USEC) {
2293 if (!(DSI_HOST_DSI_CONTROL_IMM_BTA(0x1) &
2294 tegra_dsi_readl(dsi, DSI_HOST_DSI_CONTROL))) {
2298 udelay(DSI_STATUS_POLLING_DELAY_USEC);
2299 timeout += DSI_STATUS_POLLING_DELAY_USEC;
2305 static bool tegra_dsi_host_busy(struct tegra_dc_dsi_data *dsi)
2309 if (tegra_dsi_write_busy(dsi)) {
2311 dev_err(&dsi->dc->ndev->dev,
2312 "DSI trigger bit already set\n");
2316 if (tegra_dsi_read_busy(dsi)) {
2318 dev_err(&dsi->dc->ndev->dev,
2319 "DSI immediate bta bit already set\n");
2323 return (err < 0 ? true : false);
2326 static void tegra_dsi_reset_read_count(struct tegra_dc_dsi_data *dsi)
2330 val = tegra_dsi_readl(dsi, DSI_STATUS);
2331 val &= DSI_STATUS_RD_FIFO_COUNT(0x1f);
2333 dev_warn(&dsi->dc->ndev->dev,
2334 "DSI read count not zero, resetting\n");
2335 tegra_dsi_soft_reset(dsi);
2339 static struct dsi_status *tegra_dsi_save_state_switch_to_host_cmd_mode(
2340 struct tegra_dc_dsi_data *dsi,
2341 struct tegra_dc *dc,
2344 struct dsi_status *init_status = NULL;
2347 if (dsi->status.init != DSI_MODULE_INIT ||
2348 dsi->status.lphs == DSI_LPHS_NOT_INIT) {
2353 init_status = kzalloc(sizeof(*init_status), GFP_KERNEL);
2355 return ERR_PTR(-ENOMEM);
2357 *init_status = dsi->status;
2359 if (dsi->info.hs_cmd_mode_supported) {
2360 err = tegra_dsi_set_to_hs_mode(dc, dsi,
2361 TEGRA_DSI_DRIVEN_BY_HOST);
2363 dev_err(&dc->ndev->dev,
2364 "Switch to HS host mode failed\n");
2371 if (dsi->status.lp_op != lp_op) {
2372 err = tegra_dsi_set_to_lp_mode(dc, dsi, lp_op);
2374 dev_err(&dc->ndev->dev,
2375 "DSI failed to go to LP mode\n");
2383 return ERR_PTR(err);
2386 static struct dsi_status *tegra_dsi_prepare_host_transmission(
2387 struct tegra_dc *dc,
2388 struct tegra_dc_dsi_data *dsi,
2392 struct dsi_status *init_status;
2393 bool restart_dc_stream = false;
2395 if (dsi->status.init != DSI_MODULE_INIT ||
2401 if (dsi->status.dc_stream == DSI_DC_STREAM_ENABLE) {
2402 restart_dc_stream = true;
2403 tegra_dsi_stop_dc_stream_at_frame_end(dc, dsi, 2);
2406 if (tegra_dsi_host_busy(dsi)) {
2407 tegra_dsi_soft_reset(dsi);
2408 if (tegra_dsi_host_busy(dsi)) {
2410 dev_err(&dc->ndev->dev, "DSI host busy\n");
2415 if (lp_op == DSI_LP_OP_READ)
2416 tegra_dsi_reset_read_count(dsi);
2418 if (dsi->status.lphs == DSI_LPHS_NOT_INIT) {
2419 err = tegra_dsi_set_to_lp_mode(dc, dsi, lp_op);
2421 dev_err(&dc->ndev->dev, "Failed to config LP write\n");
2426 init_status = tegra_dsi_save_state_switch_to_host_cmd_mode
2428 if (IS_ERR_OR_NULL(init_status)) {
2429 err = PTR_ERR(init_status);
2430 dev_err(&dc->ndev->dev, "DSI state saving failed\n");
2434 if (restart_dc_stream)
2435 init_status->dc_stream = DSI_DC_STREAM_ENABLE;
2437 if (atomic_read(&dsi_syncpt_rst))
2438 tegra_dsi_syncpt_reset(dsi);
2442 return ERR_PTR(err);
2445 static int tegra_dsi_restore_state(struct tegra_dc *dc,
2446 struct tegra_dc_dsi_data *dsi,
2447 struct dsi_status *init_status)
2451 if (init_status->lphs == DSI_LPHS_IN_LP_MODE) {
2452 err = tegra_dsi_set_to_lp_mode(dc, dsi, init_status->lp_op);
2454 dev_err(&dc->ndev->dev,
2455 "Failed to config LP mode\n");
2461 if (init_status->lphs == DSI_LPHS_IN_HS_MODE) {
2462 u8 driven = (init_status->driven == DSI_DRIVEN_MODE_DC) ?
2463 TEGRA_DSI_DRIVEN_BY_DC : TEGRA_DSI_DRIVEN_BY_HOST;
2464 err = tegra_dsi_set_to_hs_mode(dc, dsi, driven);
2466 dev_err(&dc->ndev->dev, "Failed to config HS mode\n");
2471 if (init_status->dc_stream == DSI_DC_STREAM_ENABLE)
2472 tegra_dsi_start_dc_stream(dc, dsi);
2479 static int tegra_dsi_host_trigger(struct tegra_dc_dsi_data *dsi)
2483 if (tegra_dsi_readl(dsi, DSI_TRIGGER)) {
2488 tegra_dsi_writel(dsi,
2489 DSI_TRIGGER_HOST_TRIGGER(TEGRA_DSI_ENABLE), DSI_TRIGGER);
2491 #if DSI_USE_SYNC_POINTS
2492 status = tegra_dsi_syncpt(dsi);
2494 dev_err(&dsi->dc->ndev->dev,
2495 "DSI syncpt for host trigger failed\n");
2499 if (tegra_dsi_write_busy(dsi)) {
2501 dev_err(&dsi->dc->ndev->dev,
2502 "Timeout waiting on write completion\n");
2510 static int _tegra_dsi_write_data(struct tegra_dc_dsi_data *dsi,
2511 u8 *pdata, u8 data_id, u16 data_len)
2519 virtual_channel = dsi->info.virtual_channel <<
2520 DSI_VIR_CHANNEL_BIT_POSITION;
2522 /* always use hw for ecc */
2523 val = (virtual_channel | data_id) << 0 |
2525 tegra_dsi_writel(dsi, val, DSI_WR_DATA);
2527 /* if pdata != NULL, pkt type is long pkt */
2528 if (pdata != NULL) {
2530 if (data_len >= 4) {
2531 val = ((u32 *) pdata)[0];
2536 memcpy(&val, pdata, data_len);
2540 tegra_dsi_writel(dsi, val, DSI_WR_DATA);
2544 err = tegra_dsi_host_trigger(dsi);
2546 dev_err(&dsi->dc->ndev->dev, "DSI host trigger failed\n");
2551 static void tegra_dc_dsi_hold_host(struct tegra_dc *dc)
2553 struct tegra_dc_dsi_data *dsi = tegra_dc_get_outdata(dc);
2555 if (dc->out->flags & TEGRA_DC_OUT_ONE_SHOT_LP_MODE) {
2556 /* extra reference to dsi clk */
2557 clk_prepare_enable(dsi->dsi_clk);
2558 atomic_inc(&dsi->host_ref);
2559 tegra_dsi_host_resume(dc);
2563 static void tegra_dc_dsi_release_host(struct tegra_dc *dc)
2565 struct tegra_dc_dsi_data *dsi = tegra_dc_get_outdata(dc);
2567 if (dc->out->flags & TEGRA_DC_OUT_ONE_SHOT_LP_MODE) {
2568 atomic_dec(&dsi->host_ref);
2570 if (!atomic_read(&dsi->host_ref) &&
2571 (dsi->status.dc_stream == DSI_DC_STREAM_ENABLE))
2572 schedule_delayed_work(&dsi->idle_work, dsi->idle_delay);
2574 /* balance extra dsi clk reference */
2575 clk_disable_unprepare(dsi->dsi_clk);
2579 static void tegra_dc_dsi_idle_work(struct work_struct *work)
2581 struct tegra_dc_dsi_data *dsi = container_of(
2582 to_delayed_work(work), struct tegra_dc_dsi_data, idle_work);
2584 if (dsi->dc->out->flags & TEGRA_DC_OUT_ONE_SHOT_LP_MODE)
2585 tegra_dsi_host_suspend(dsi->dc);
2588 static int tegra_dsi_write_data_nosync(struct tegra_dc *dc,
2589 struct tegra_dc_dsi_data *dsi,
2590 u8 *pdata, u8 data_id, u16 data_len)
2593 struct dsi_status *init_status;
2595 init_status = tegra_dsi_prepare_host_transmission(
2596 dc, dsi, DSI_LP_OP_WRITE);
2597 if (IS_ERR_OR_NULL(init_status)) {
2598 err = PTR_ERR(init_status);
2599 dev_err(&dc->ndev->dev, "DSI host config failed\n");
2603 err = _tegra_dsi_write_data(dsi, pdata, data_id, data_len);
2605 err = tegra_dsi_restore_state(dc, dsi, init_status);
2607 dev_err(&dc->ndev->dev, "Failed to restore prev state\n");
2612 int tegra_dsi_write_data(struct tegra_dc *dc,
2613 struct tegra_dc_dsi_data *dsi,
2614 u8 *pdata, u8 data_id, u16 data_len)
2618 tegra_dc_io_start(dc);
2619 tegra_dc_dsi_hold_host(dc);
2621 err = tegra_dsi_write_data_nosync(dc, dsi, pdata, data_id, data_len);
2623 tegra_dc_dsi_release_host(dc);
2624 tegra_dc_io_end(dc);
2629 EXPORT_SYMBOL(tegra_dsi_write_data);
2631 static int tegra_dsi_send_panel_cmd(struct tegra_dc *dc,
2632 struct tegra_dc_dsi_data *dsi,
2633 struct tegra_dsi_cmd *cmd,
2640 for (i = 0; i < n_cmd; i++) {
2641 struct tegra_dsi_cmd *cur_cmd;
2645 * Some Panels need reset midway in the command sequence.
2647 if (cur_cmd->cmd_type == TEGRA_DSI_GPIO_SET) {
2648 gpio_set_value(cur_cmd->sp_len_dly.gpio,
2650 } else if (cur_cmd->cmd_type == TEGRA_DSI_DELAY_MS) {
2651 mdelay(cur_cmd->sp_len_dly.delay_ms);
2652 } else if (cur_cmd->cmd_type == TEGRA_DSI_SEND_FRAME) {
2653 tegra_dsi_send_dc_frames(dc,
2655 cur_cmd->sp_len_dly.frame_cnt);
2657 err = tegra_dsi_write_data_nosync(dc, dsi,
2660 cur_cmd->sp_len_dly.data_len);
2669 static u8 tegra_dsi_ecc(u32 header)
2671 char ecc_parity[24] = {
2672 0x07, 0x0b, 0x0d, 0x0e, 0x13, 0x15, 0x16, 0x19,
2673 0x1a, 0x1c, 0x23, 0x25, 0x26, 0x29, 0x2a, 0x2c,
2674 0x31, 0x32, 0x34, 0x38, 0x1f, 0x2f, 0x37, 0x3b
2680 for (i = 0; i < 24; i++)
2681 ecc_byte ^= ((header >> i) & 1) ? ecc_parity[i] : 0x00;
2686 static u16 tegra_dsi_cs(char *pdata, u16 data_len)
2695 for (byte_cnt = 0; byte_cnt < data_len; byte_cnt++) {
2696 curr_byte = pdata[byte_cnt];
2697 for (bit_cnt = 0; bit_cnt < 8; bit_cnt++) {
2698 if (((crc & 0x0001 ) ^
2699 (curr_byte & 0x0001)) > 0)
2700 crc = ((crc >> 1) & 0x7FFF) ^ poly;
2702 crc = (crc >> 1) & 0x7FFF;
2704 curr_byte = (curr_byte >> 1 ) & 0x7F;
2711 static int tegra_dsi_dcs_pkt_seq_ctrl_init(struct tegra_dc_dsi_data *dsi,
2712 struct tegra_dsi_cmd *cmd)
2716 u16 data_len = cmd->sp_len_dly.data_len;
2717 u8 seq_ctrl_reg = 0;
2719 virtual_channel = dsi->info.virtual_channel <<
2720 DSI_VIR_CHANNEL_BIT_POSITION;
2722 val = (virtual_channel | cmd->data_id) << 0 |
2725 val |= tegra_dsi_ecc(val) << 24;
2727 tegra_dsi_writel(dsi, val, DSI_INIT_SEQ_DATA_0 + seq_ctrl_reg++);
2729 /* if pdata != NULL, pkt type is long pkt */
2730 if (cmd->pdata != NULL) {
2733 /* allocate memory for pdata + 2 bytes checksum */
2734 pdata_mem = kzalloc(sizeof(u8) * data_len + 2, GFP_KERNEL);
2736 dev_err(&dsi->dc->ndev->dev, "dsi: memory err\n");
2737 tegra_dsi_soft_reset(dsi);
2741 memcpy(pdata_mem, cmd->pdata, data_len);
2743 *((u16 *)(pdata + data_len)) = tegra_dsi_cs(pdata, data_len);
2745 /* data_len = length of pdata + 2 byte checksum */
2749 if (data_len >= 4) {
2750 val = ((u32 *) pdata)[0];
2755 memcpy(&val, pdata, data_len);
2759 tegra_dsi_writel(dsi, val, DSI_INIT_SEQ_DATA_0 +
2768 int tegra_dsi_start_host_cmd_v_blank_dcs(struct tegra_dc_dsi_data * dsi,
2769 struct tegra_dsi_cmd *cmd)
2771 #define PKT_HEADER_LEN_BYTE 4
2772 #define CHECKSUM_LEN_BYTE 2
2776 u16 tot_pkt_len = PKT_HEADER_LEN_BYTE;
2777 struct tegra_dc *dc = dsi->dc;
2779 if (cmd->cmd_type != TEGRA_DSI_PACKET_CMD)
2782 mutex_lock(&dsi->lock);
2783 tegra_dc_io_start(dc);
2784 tegra_dc_dsi_hold_host(dc);
2786 #if DSI_USE_SYNC_POINTS
2787 atomic_set(&dsi_syncpt_rst, 1);
2790 err = tegra_dsi_dcs_pkt_seq_ctrl_init(dsi, cmd);
2792 dev_err(&dsi->dc->ndev->dev,
2793 "dsi: dcs pkt seq ctrl init failed\n");
2798 u16 data_len = cmd->sp_len_dly.data_len;
2799 tot_pkt_len += data_len + CHECKSUM_LEN_BYTE;
2802 val = DSI_INIT_SEQ_CONTROL_DSI_FRAME_INIT_BYTE_COUNT(tot_pkt_len) |
2803 DSI_INIT_SEQ_CONTROL_DSI_SEND_INIT_SEQUENCE(
2805 tegra_dsi_writel(dsi, val, DSI_INIT_SEQ_CONTROL);
2808 tegra_dc_dsi_release_host(dc);
2809 tegra_dc_io_end(dc);
2810 mutex_unlock(&dsi->lock);
2813 #undef PKT_HEADER_LEN_BYTE
2814 #undef CHECKSUM_LEN_BYTE
2816 EXPORT_SYMBOL(tegra_dsi_start_host_cmd_v_blank_dcs);
2818 void tegra_dsi_stop_host_cmd_v_blank_dcs(struct tegra_dc_dsi_data * dsi)
2820 struct tegra_dc *dc = dsi->dc;
2823 mutex_lock(&dsi->lock);
2824 tegra_dc_io_start(dc);
2825 tegra_dc_dsi_hold_host(dc);
2827 if (atomic_read(&dsi_syncpt_rst)) {
2828 tegra_dsi_wait_frame_end(dc, dsi, 2);
2829 tegra_dsi_syncpt_reset(dsi);
2830 atomic_set(&dsi_syncpt_rst, 0);
2833 tegra_dsi_writel(dsi, TEGRA_DSI_DISABLE, DSI_INIT_SEQ_CONTROL);
2835 /* clear seq data registers */
2836 for (cnt = 0; cnt < 8; cnt++)
2837 tegra_dsi_writel(dsi, 0, DSI_INIT_SEQ_DATA_0 + cnt);
2839 tegra_dc_dsi_release_host(dc);
2840 tegra_dc_io_end(dc);
2842 mutex_unlock(&dsi->lock);
2844 EXPORT_SYMBOL(tegra_dsi_stop_host_cmd_v_blank_dcs);
2846 static int tegra_dsi_bta(struct tegra_dc_dsi_data *dsi)
2851 val = tegra_dsi_readl(dsi, DSI_HOST_DSI_CONTROL);
2852 val |= DSI_HOST_DSI_CONTROL_IMM_BTA(TEGRA_DSI_ENABLE);
2853 tegra_dsi_writel(dsi, val, DSI_HOST_DSI_CONTROL);
2855 #if DSI_USE_SYNC_POINTS
2856 err = tegra_dsi_syncpt(dsi);
2858 dev_err(&dsi->dc->ndev->dev,
2859 "DSI syncpt for bta failed\n");
2862 if (tegra_dsi_read_busy(dsi)) {
2864 dev_err(&dsi->dc->ndev->dev,
2865 "Timeout wating on read completion\n");
2872 static int tegra_dsi_parse_read_response(struct tegra_dc *dc,
2873 u32 rd_fifo_cnt, u8 *read_fifo)
2881 switch (read_fifo[0]) {
2882 case DSI_ESCAPE_CMD:
2883 dev_info(&dc->ndev->dev, "escape cmd[0x%x]\n", read_fifo[0]);
2885 case DSI_ACK_NO_ERR:
2886 dev_info(&dc->ndev->dev,
2887 "Panel ack, no err[0x%x]\n", read_fifo[0]);
2890 dev_info(&dc->ndev->dev, "Invalid read response\n");
2894 switch (read_fifo[4] & 0xff) {
2895 case GEN_LONG_RD_RES:
2897 case DCS_LONG_RD_RES:
2898 payload_size = (read_fifo[5] |
2899 (read_fifo[6] << 8)) & 0xFFFF;
2900 dev_info(&dc->ndev->dev, "Long read response Packet\n"
2901 "payload_size[0x%x]\n", payload_size);
2903 case GEN_1_BYTE_SHORT_RD_RES:
2905 case DCS_1_BYTE_SHORT_RD_RES:
2907 dev_info(&dc->ndev->dev, "Short read response Packet\n"
2908 "payload_size[0x%x]\n", payload_size);
2910 case GEN_2_BYTE_SHORT_RD_RES:
2912 case DCS_2_BYTE_SHORT_RD_RES:
2914 dev_info(&dc->ndev->dev, "Short read response Packet\n"
2915 "payload_size[0x%x]\n", payload_size);
2919 dev_info(&dc->ndev->dev, "Acknowledge error report response\n"
2920 "Packet payload_size[0x%x]\n", payload_size);
2923 dev_info(&dc->ndev->dev, "Invalid response packet\n");
2930 static int tegra_dsi_read_fifo(struct tegra_dc *dc,
2931 struct tegra_dc_dsi_data *dsi,
2939 u8 *read_fifo_cp = read_fifo;
2941 while (poll_time < DSI_DELAY_FOR_READ_FIFO) {
2943 val = tegra_dsi_readl(dsi, DSI_STATUS);
2944 rd_fifo_cnt = val & DSI_STATUS_RD_FIFO_COUNT(0x1f);
2945 if (rd_fifo_cnt << 2 > DSI_READ_FIFO_DEPTH) {
2946 dev_err(&dc->ndev->dev,
2947 "DSI RD_FIFO_CNT is greater than RD_FIFO_DEPTH\n");
2953 if (rd_fifo_cnt == 0) {
2954 dev_info(&dc->ndev->dev,
2955 "DSI RD_FIFO_CNT is zero\n");
2960 if (val & (DSI_STATUS_LB_UNDERFLOW(0x1) |
2961 DSI_STATUS_LB_OVERFLOW(0x1))) {
2962 dev_warn(&dc->ndev->dev,
2963 "DSI overflow/underflow error\n");
2966 /* Read data from FIFO */
2967 for (i = 0; i < rd_fifo_cnt; i++) {
2968 val = tegra_dsi_readl(dsi, DSI_RD_DATA);
2969 if (enable_read_debug)
2970 dev_info(&dc->ndev->dev,
2971 "Read data[%d]: 0x%x\n", i, val);
2972 memcpy(read_fifo, &val, 4);
2976 /* Make sure all the data is read from the FIFO */
2977 val = tegra_dsi_readl(dsi, DSI_STATUS);
2978 val &= DSI_STATUS_RD_FIFO_COUNT(0x1f);
2980 dev_err(&dc->ndev->dev, "DSI FIFO_RD_CNT not zero"
2981 " even after reading FIFO_RD_CNT words from read fifo\n");
2983 if (enable_read_debug) {
2985 tegra_dsi_parse_read_response(dc, rd_fifo_cnt, read_fifo_cp);
2987 dev_warn(&dc->ndev->dev, "Unexpected read data\n");
2993 int tegra_dsi_read_data(struct tegra_dc *dc,
2994 struct tegra_dc_dsi_data *dsi,
2995 u32 max_ret_payload_size,
2996 u32 panel_reg_addr, u8 *read_data)
2999 struct dsi_status *init_status;
3001 mutex_lock(&dsi->lock);
3002 tegra_dc_io_start(dc);
3003 clk_prepare_enable(dsi->dsi_fixed_clk);
3005 init_status = tegra_dsi_prepare_host_transmission(
3006 dc, dsi, DSI_LP_OP_WRITE);
3007 if (IS_ERR_OR_NULL(init_status)) {
3008 err = PTR_ERR(init_status);
3009 dev_err(&dc->ndev->dev, "DSI host config failed\n");
3013 /* Set max return payload size in words */
3014 err = _tegra_dsi_write_data(dsi, NULL,
3015 dsi_command_max_return_pkt_size,
3016 max_ret_payload_size);
3018 dev_err(&dc->ndev->dev,
3019 "DSI write failed\n");
3023 /* DCS to read given panel register */
3024 err = _tegra_dsi_write_data(dsi, NULL,
3025 dsi_command_dcs_read_with_no_params,
3028 dev_err(&dc->ndev->dev,
3029 "DSI write failed\n");
3033 tegra_dsi_reset_read_count(dsi);
3035 if (dsi->status.lp_op == DSI_LP_OP_WRITE) {
3036 err = tegra_dsi_set_to_lp_mode(dc, dsi, DSI_LP_OP_READ);
3038 dev_err(&dc->ndev->dev,
3039 "DSI failed to go to LP read mode\n");
3044 err = tegra_dsi_bta(dsi);
3046 dev_err(&dc->ndev->dev,
3047 "DSI IMM BTA timeout\n");
3051 err = tegra_dsi_read_fifo(dc, dsi, read_data);
3053 dev_err(&dc->ndev->dev, "DSI read fifo failure\n");
3057 err = tegra_dsi_restore_state(dc, dsi, init_status);
3059 dev_err(&dc->ndev->dev, "Failed to restore prev state\n");
3060 clk_disable_unprepare(dsi->dsi_fixed_clk);
3061 tegra_dc_io_end(dc);
3062 mutex_unlock(&dsi->lock);
3065 EXPORT_SYMBOL(tegra_dsi_read_data);
3067 int tegra_dsi_panel_sanity_check(struct tegra_dc *dc,
3068 struct tegra_dc_dsi_data *dsi)
3071 u8 read_fifo[DSI_READ_FIFO_DEPTH];
3072 struct dsi_status *init_status;
3073 static struct tegra_dsi_cmd dsi_nop_cmd =
3074 DSI_CMD_SHORT(0x05, 0x0, 0x0);
3076 tegra_dc_io_start(dc);
3077 clk_prepare_enable(dsi->dsi_fixed_clk);
3079 init_status = tegra_dsi_prepare_host_transmission(
3080 dc, dsi, DSI_LP_OP_WRITE);
3081 if (IS_ERR_OR_NULL(init_status)) {
3082 err = PTR_ERR(init_status);
3083 dev_err(&dc->ndev->dev, "DSI host config failed\n");
3087 err = _tegra_dsi_write_data(dsi, NULL, dsi_nop_cmd.data_id, 0x0);
3089 dev_err(&dc->ndev->dev, "DSI nop write failed\n");
3093 tegra_dsi_reset_read_count(dsi);
3095 if (dsi->status.lp_op == DSI_LP_OP_WRITE) {
3096 err = tegra_dsi_set_to_lp_mode(dc, dsi, DSI_LP_OP_READ);
3098 dev_err(&dc->ndev->dev,
3099 "DSI failed to go to LP read mode\n");
3104 err = tegra_dsi_bta(dsi);
3106 dev_err(&dc->ndev->dev, "DSI BTA failed\n");
3110 err = tegra_dsi_read_fifo(dc, dsi, read_fifo);
3112 dev_err(&dc->ndev->dev, "DSI read fifo failure\n");
3116 if (read_fifo[0] != DSI_ACK_NO_ERR) {
3117 dev_warn(&dc->ndev->dev,
3118 "Ack no error trigger message not received\n");
3122 err = tegra_dsi_restore_state(dc, dsi, init_status);
3124 dev_err(&dc->ndev->dev, "Failed to restore prev state\n");
3125 clk_disable_unprepare(dsi->dsi_fixed_clk);
3126 tegra_dc_io_end(dc);
3129 EXPORT_SYMBOL(tegra_dsi_panel_sanity_check);
3131 static int tegra_dsi_enter_ulpm(struct tegra_dc_dsi_data *dsi)
3136 if (atomic_read(&dsi_syncpt_rst))
3137 tegra_dsi_syncpt_reset(dsi);
3139 val = tegra_dsi_readl(dsi, DSI_HOST_DSI_CONTROL);
3140 val &= ~DSI_HOST_DSI_CONTROL_ULTRA_LOW_POWER(3);
3141 val |= DSI_HOST_DSI_CONTROL_ULTRA_LOW_POWER(ENTER_ULPM);
3142 tegra_dsi_writel(dsi, val, DSI_HOST_DSI_CONTROL);
3144 #if DSI_USE_SYNC_POINTS
3145 ret = tegra_dsi_syncpt(dsi);
3147 dev_err(&dsi->dc->ndev->dev,
3148 "DSI syncpt for ulpm enter failed\n");
3152 /* TODO: Find exact delay required */
3160 static int tegra_dsi_exit_ulpm(struct tegra_dc_dsi_data *dsi)
3165 if (atomic_read(&dsi_syncpt_rst))
3166 tegra_dsi_syncpt_reset(dsi);
3168 val = tegra_dsi_readl(dsi, DSI_HOST_DSI_CONTROL);
3169 val &= ~DSI_HOST_DSI_CONTROL_ULTRA_LOW_POWER(3);
3170 val |= DSI_HOST_DSI_CONTROL_ULTRA_LOW_POWER(EXIT_ULPM);
3171 tegra_dsi_writel(dsi, val, DSI_HOST_DSI_CONTROL);
3173 #if DSI_USE_SYNC_POINTS
3174 ret = tegra_dsi_syncpt(dsi);
3176 dev_err(&dsi->dc->ndev->dev,
3177 "DSI syncpt for ulpm exit failed\n");
3181 /* TODO: Find exact delay required */
3186 val = tegra_dsi_readl(dsi, DSI_HOST_DSI_CONTROL);
3187 val &= ~DSI_HOST_DSI_CONTROL_ULTRA_LOW_POWER(0x3);
3188 val |= DSI_HOST_DSI_CONTROL_ULTRA_LOW_POWER(NORMAL);
3189 tegra_dsi_writel(dsi, val, DSI_HOST_DSI_CONTROL);
3194 static void tegra_dsi_send_dc_frames(struct tegra_dc *dc,
3195 struct tegra_dc_dsi_data *dsi,
3199 u32 frame_period = DIV_ROUND_UP(S_TO_MS(1), dsi->info.refresh_rate);
3200 u8 lp_op = dsi->status.lp_op;
3201 bool switch_to_lp = (dsi->status.lphs == DSI_LPHS_IN_LP_MODE);
3203 if (dsi->status.lphs != DSI_LPHS_IN_HS_MODE) {
3204 err = tegra_dsi_set_to_hs_mode(dc, dsi,
3205 TEGRA_DSI_DRIVEN_BY_DC);
3207 dev_err(&dc->ndev->dev,
3208 "Switch to HS host mode failed\n");
3214 * Some panels need DC frames be sent under certain
3215 * conditions. We are working on the right fix for this
3216 * requirement, while using this current fix.
3218 tegra_dsi_start_dc_stream(dc, dsi);
3221 * Send frames in Continuous or One-shot mode.
3223 if (dc->out->flags & TEGRA_DC_OUT_ONE_SHOT_MODE) {
3224 while (no_of_frames--) {
3225 tegra_dc_writel(dc, GENERAL_ACT_REQ | NC_HOST_TRIG,
3226 DC_CMD_STATE_CONTROL);
3227 mdelay(frame_period);
3230 mdelay(no_of_frames * frame_period);
3232 tegra_dsi_stop_dc_stream_at_frame_end(dc, dsi, 2);
3235 err = tegra_dsi_set_to_lp_mode(dc, dsi, lp_op);
3237 dev_err(&dc->ndev->dev,
3238 "DSI failed to go to LP mode\n");
3242 static void _tegra_dc_dsi_enable(struct tegra_dc *dc)
3244 struct tegra_dc_dsi_data *dsi = tegra_dc_get_outdata(dc);
3247 mutex_lock(&dsi->lock);
3248 tegra_dc_io_start(dc);
3250 /* Stop DC stream before configuring DSI registers
3251 * to avoid visible glitches on panel during transition
3252 * from bootloader to kernel driver
3254 tegra_dsi_stop_dc_stream(dc, dsi);
3258 if (tegra_dsi_exit_ulpm(dsi) < 0) {
3259 dev_err(&dc->ndev->dev,
3260 "DSI failed to exit ulpm\n");
3265 if (dsi->info.panel_reset) {
3267 * Certain panels need dc frames be sent before
3270 if (dsi->info.panel_send_dc_frames)
3271 tegra_dsi_send_dc_frames(dc, dsi, 2);
3273 err = tegra_dsi_send_panel_cmd(dc, dsi,
3274 dsi->info.dsi_init_cmd,
3275 dsi->info.n_init_cmd);
3277 dev_err(&dc->ndev->dev,
3278 "dsi: error sending dsi init cmd\n");
3281 } else if (dsi->info.dsi_late_resume_cmd) {
3282 err = tegra_dsi_send_panel_cmd(dc, dsi,
3283 dsi->info.dsi_late_resume_cmd,
3284 dsi->info.n_late_resume_cmd);
3286 dev_err(&dc->ndev->dev,
3287 "dsi: error sending late resume cmd\n");
3292 err = tegra_dsi_init_hw(dc, dsi);
3294 dev_err(&dc->ndev->dev,
3295 "dsi: not able to init dsi hardware\n");
3300 if (tegra_dsi_enter_ulpm(dsi) < 0) {
3301 dev_err(&dc->ndev->dev,
3302 "DSI failed to enter ulpm\n");
3306 tegra_dsi_pad_enable(dsi);
3308 if (tegra_dsi_exit_ulpm(dsi) < 0) {
3309 dev_err(&dc->ndev->dev,
3310 "DSI failed to exit ulpm\n");
3316 * Certain panels need dc frames be sent before
3319 if (dsi->info.panel_send_dc_frames)
3320 tegra_dsi_send_dc_frames(dc, dsi, 2);
3322 err = tegra_dsi_set_to_lp_mode(dc, dsi, DSI_LP_OP_WRITE);
3324 dev_err(&dc->ndev->dev,
3325 "dsi: not able to set to lp mode\n");
3329 err = tegra_dsi_send_panel_cmd(dc, dsi, dsi->info.dsi_init_cmd,
3330 dsi->info.n_init_cmd);
3332 dev_err(&dc->ndev->dev,
3333 "dsi: error while sending dsi init cmd\n");
3337 err = tegra_dsi_set_to_hs_mode(dc, dsi,
3338 TEGRA_DSI_DRIVEN_BY_DC);
3340 dev_err(&dc->ndev->dev,
3341 "dsi: not able to set to hs mode\n");
3345 dsi->enabled = true;
3348 if (dsi->out_ops && dsi->out_ops->enable)
3349 dsi->out_ops->enable(dsi);
3351 if (dsi->status.driven == DSI_DRIVEN_MODE_DC)
3352 tegra_dsi_start_dc_stream(dc, dsi);
3354 tegra_dc_io_end(dc);
3355 mutex_unlock(&dsi->lock);
3358 static void __tegra_dc_dsi_init(struct tegra_dc *dc)
3360 struct tegra_dc_dsi_data *dsi = tegra_dc_get_outdata(dc);
3362 tegra_dc_dsi_debug_create(dsi);
3364 if (dsi->info.dsi2lvds_bridge_enable)
3365 dsi->out_ops = &tegra_dsi2lvds_ops;
3366 else if (dsi->info.dsi2edp_bridge_enable)
3367 dsi->out_ops = &tegra_dsi2edp_ops;
3369 dsi->out_ops = NULL;
3371 if (dsi->out_ops && dsi->out_ops->init)
3372 dsi->out_ops->init(dsi);
3374 tegra_dsi_init_sw(dc, dsi);
3377 static int tegra_dc_dsi_cp_p_cmd(struct tegra_dsi_cmd *src,
3378 struct tegra_dsi_cmd *dst, u16 n_cmd)
3383 memcpy(dst, src, sizeof(*dst) * n_cmd);
3385 for (i = 0; i < n_cmd; i++)
3387 len = sizeof(*src[i].pdata) *
3388 src[i].sp_len_dly.data_len;
3389 dst[i].pdata = kzalloc(len, GFP_KERNEL);
3391 goto free_cmd_pdata;
3392 memcpy(dst[i].pdata, src[i].pdata, len);
3400 kfree(dst[i].pdata);
3404 static int tegra_dc_dsi_cp_info(struct tegra_dc_dsi_data *dsi,
3405 struct tegra_dsi_out *p_dsi)
3407 struct tegra_dsi_cmd *p_init_cmd;
3408 struct tegra_dsi_cmd *p_early_suspend_cmd = NULL;
3409 struct tegra_dsi_cmd *p_late_resume_cmd = NULL;
3410 struct tegra_dsi_cmd *p_suspend_cmd;
3413 if (p_dsi->n_data_lanes > MAX_DSI_DATA_LANES)
3416 p_init_cmd = kzalloc(sizeof(*p_init_cmd) *
3417 p_dsi->n_init_cmd, GFP_KERNEL);
3421 if (p_dsi->dsi_early_suspend_cmd) {
3422 p_early_suspend_cmd = kzalloc(sizeof(*p_early_suspend_cmd) *
3423 p_dsi->n_early_suspend_cmd,
3425 if (!p_early_suspend_cmd) {
3427 goto err_free_init_cmd;
3431 if (p_dsi->dsi_late_resume_cmd) {
3432 p_late_resume_cmd = kzalloc(sizeof(*p_late_resume_cmd) *
3433 p_dsi->n_late_resume_cmd,
3435 if (!p_late_resume_cmd) {
3437 goto err_free_p_early_suspend_cmd;
3441 p_suspend_cmd = kzalloc(sizeof(*p_suspend_cmd) * p_dsi->n_suspend_cmd,
3443 if (!p_suspend_cmd) {
3445 goto err_free_p_late_resume_cmd;
3448 memcpy(&dsi->info, p_dsi, sizeof(dsi->info));
3450 /* Copy panel init cmd */
3451 err = tegra_dc_dsi_cp_p_cmd(p_dsi->dsi_init_cmd,
3452 p_init_cmd, p_dsi->n_init_cmd);
3455 dsi->info.dsi_init_cmd = p_init_cmd;
3457 /* Copy panel early suspend cmd */
3458 if (p_dsi->dsi_early_suspend_cmd) {
3459 err = tegra_dc_dsi_cp_p_cmd(p_dsi->dsi_early_suspend_cmd,
3460 p_early_suspend_cmd,
3461 p_dsi->n_early_suspend_cmd);
3464 dsi->info.dsi_early_suspend_cmd = p_early_suspend_cmd;
3467 /* Copy panel late resume cmd */
3468 if (p_dsi->dsi_late_resume_cmd) {
3469 err = tegra_dc_dsi_cp_p_cmd(p_dsi->dsi_late_resume_cmd,
3471 p_dsi->n_late_resume_cmd);
3474 dsi->info.dsi_late_resume_cmd = p_late_resume_cmd;
3477 /* Copy panel suspend cmd */
3478 err = tegra_dc_dsi_cp_p_cmd(p_dsi->dsi_suspend_cmd, p_suspend_cmd,
3479 p_dsi->n_suspend_cmd);
3482 dsi->info.dsi_suspend_cmd = p_suspend_cmd;
3484 if (!dsi->info.panel_reset_timeout_msec)
3485 dsi->info.panel_reset_timeout_msec =
3486 DEFAULT_PANEL_RESET_TIMEOUT;
3488 if (!dsi->info.panel_buffer_size_byte)
3489 dsi->info.panel_buffer_size_byte = DEFAULT_PANEL_BUFFER_BYTE;
3491 if (!dsi->info.max_panel_freq_khz) {
3492 dsi->info.max_panel_freq_khz = DEFAULT_MAX_DSI_PHY_CLK_KHZ;
3494 if (dsi->info.video_burst_mode >
3495 TEGRA_DSI_VIDEO_NONE_BURST_MODE_WITH_SYNC_END){
3496 dev_err(&dsi->dc->ndev->dev, "DSI: max_panel_freq_khz"
3497 "is not set for DSI burst mode.\n");
3498 dsi->info.video_burst_mode =
3499 TEGRA_DSI_VIDEO_BURST_MODE_LOWEST_SPEED;
3503 if (!dsi->info.lp_cmd_mode_freq_khz)
3504 dsi->info.lp_cmd_mode_freq_khz = DEFAULT_LP_CMD_MODE_CLK_KHZ;
3506 if (!dsi->info.chip_id || !dsi->info.chip_rev)
3507 dev_warn(&dsi->dc->ndev->dev,
3508 "DSI: Failed to get chip info\n");
3510 if (!dsi->info.lp_read_cmd_mode_freq_khz)
3511 dsi->info.lp_read_cmd_mode_freq_khz =
3512 dsi->info.lp_cmd_mode_freq_khz;
3514 /* host mode is for testing only */
3515 dsi->driven_mode = TEGRA_DSI_DRIVEN_BY_DC;
3519 kfree(p_suspend_cmd);
3520 err_free_p_late_resume_cmd:
3521 kfree(p_late_resume_cmd);
3522 err_free_p_early_suspend_cmd:
3523 kfree(p_early_suspend_cmd);
3529 /* returns next null enumeration from tegra_dsi_instance */
3530 static inline int tegra_dsi_get_enumeration(void)
3533 for (; i < MAX_DSI_INSTANCE; i++) {
3534 if (!tegra_dsi_instance[i])
3540 static int _tegra_dc_dsi_init(struct tegra_dc *dc)
3542 struct tegra_dc_dsi_data *dsi;
3543 struct resource *res;
3544 struct resource *base_res;
3546 struct clk *dc_clk = NULL;
3547 struct clk *dsi_clk = NULL;
3548 struct clk *dsi_fixed_clk = NULL;
3549 struct tegra_dsi_out *dsi_pdata;
3553 if (dc->pdata->default_out->dsi->dsi_instance)
3556 dsi_enum = tegra_dsi_get_enumeration();
3559 dev_err(&dc->ndev->dev, "dsi: invalid enum retured\n");
3563 dsi = kzalloc(sizeof(*dsi), GFP_KERNEL);
3565 dev_err(&dc->ndev->dev, "dsi: memory allocation failed\n");
3568 tegra_dsi_instance[dsi_enum] = dsi;
3570 if (dc->out->dsi->ganged_type) {
3572 res = platform_get_resource_byname(dc->ndev,
3574 "ganged_dsib_regs");
3576 res = platform_get_resource_byname(dc->ndev,
3578 "ganged_dsia_regs");
3580 res = platform_get_resource_byname(dc->ndev,
3586 dev_err(&dc->ndev->dev, "dsi: no mem resource\n");
3591 base_res = request_mem_region(res->start, resource_size(res),
3594 dev_err(&dc->ndev->dev, "dsi: request_mem_region failed\n");
3599 base = ioremap(res->start, resource_size(res));
3601 dev_err(&dc->ndev->dev, "dsi: registers can't be mapped\n");
3603 goto err_release_regs;
3606 dsi_pdata = dc->pdata->default_out->dsi;
3608 dev_err(&dc->ndev->dev, "dsi: dsi data not available\n");
3609 goto err_release_regs;
3613 dsi_clk = clk_get(&dc->ndev->dev, "dsib");
3615 dsi_clk = clk_get(&dc->ndev->dev, "dsia");
3617 dsi_fixed_clk = clk_get(&dc->ndev->dev, "dsi-fixed");
3619 if (IS_ERR_OR_NULL(dsi_clk) || IS_ERR_OR_NULL(dsi_fixed_clk)) {
3620 dev_err(&dc->ndev->dev, "dsi: can't get clock\n");
3622 goto err_release_regs;
3625 dc_clk = clk_get_sys(dev_name(&dc->ndev->dev), NULL);
3626 if (IS_ERR_OR_NULL(dc_clk)) {
3627 dev_err(&dc->ndev->dev, "dsi: dc clock %s unavailable\n",
3628 dev_name(&dc->ndev->dev));
3630 goto err_dsi_clk_put;
3633 mutex_init(&dsi->lock);
3634 dsi->controller_index = dsi_enum;
3637 dsi->base_res = base_res;
3638 dsi->dc_clk = dc_clk;
3639 dsi->dsi_clk = dsi_clk;
3640 dsi->dsi_fixed_clk = dsi_fixed_clk;
3642 err = tegra_dc_dsi_cp_info(dsi, dsi_pdata);
3644 goto err_dc_clk_put;
3646 tegra_dc_set_outdata(dc, dsi);
3647 __tegra_dc_dsi_init(dc);
3655 clk_put(dsi_fixed_clk);
3657 release_resource(base_res);
3664 static void _tegra_dc_dsi_destroy(struct tegra_dc *dc)
3666 struct tegra_dc_dsi_data *dsi = tegra_dc_get_outdata(dc);
3670 mutex_lock(&dsi->lock);
3672 if (dsi->out_ops && dsi->out_ops->destroy)
3673 dsi->out_ops->destroy(dsi);
3675 /* free up the pdata */
3676 for (i = 0; i < dsi->info.n_init_cmd; i++) {
3677 if (dsi->info.dsi_init_cmd[i].pdata)
3678 kfree(dsi->info.dsi_init_cmd[i].pdata);
3680 kfree(dsi->info.dsi_init_cmd);
3682 /* Disable dc stream */
3683 if (dsi->status.dc_stream == DSI_DC_STREAM_ENABLE)
3684 tegra_dsi_stop_dc_stream_at_frame_end(dc, dsi, 2);
3686 /* Disable dsi phy clock */
3687 if (dsi->status.clk_out == DSI_PHYCLK_OUT_EN)
3688 tegra_dsi_hs_clk_out_disable(dc, dsi);
3690 val = DSI_POWER_CONTROL_LEG_DSI_ENABLE(TEGRA_DSI_DISABLE);
3691 tegra_dsi_writel(dsi, val, DSI_POWER_CONTROL);
3694 release_resource(dsi->base_res);
3696 clk_put(dsi->dc_clk);
3697 clk_put(dsi->dsi_clk);
3699 mutex_unlock(&dsi->lock);
3701 mutex_destroy(&dsi->lock);
3705 static void tegra_dsi_config_phy_clk(struct tegra_dc_dsi_data *dsi,
3708 struct clk *parent_clk = NULL;
3709 struct clk *base_clk = NULL;
3711 parent_clk = clk_get_parent(dsi->dsi_clk);
3712 base_clk = clk_get_parent(parent_clk);
3713 if (dsi->info.dsi_instance)
3714 tegra_clk_cfg_ex(base_clk,
3715 TEGRA_CLK_PLLD_CSI_OUT_ENB,
3718 tegra_clk_cfg_ex(base_clk,
3719 TEGRA_CLK_PLLD_DSI_OUT_ENB,
3723 static int _tegra_dsi_host_suspend(struct tegra_dc *dc,
3724 struct tegra_dc_dsi_data *dsi,
3730 switch (suspend_aggr) {
3731 case DSI_HOST_SUSPEND_LV2:
3733 err = tegra_dsi_enter_ulpm(dsi);
3735 dev_err(&dc->ndev->dev,
3736 "DSI failed to enter ulpm\n");
3741 val = tegra_dsi_readl(dsi, DSI_PAD_CONTROL);
3742 val |= DSI_PAD_CONTROL_PAD_PDIO(0x3) |
3743 DSI_PAD_CONTROL_PAD_PDIO_CLK(0x1) |
3744 DSI_PAD_CONTROL_PAD_PULLDN_ENAB(0x1);
3745 tegra_dsi_writel(dsi, val, DSI_PAD_CONTROL);
3747 /* Suspend core-logic */
3748 val = DSI_POWER_CONTROL_LEG_DSI_ENABLE(TEGRA_DSI_DISABLE);
3749 tegra_dsi_writel(dsi, val, DSI_POWER_CONTROL);
3751 case DSI_HOST_SUSPEND_LV1:
3752 /* Disable dsi fast and slow clock */
3753 tegra_dsi_config_phy_clk(dsi, TEGRA_DSI_DISABLE);
3755 case DSI_HOST_SUSPEND_LV0:
3756 /* Disable dsi source clock */
3757 tegra_dsi_clk_disable(dsi);
3759 case DSI_NO_SUSPEND:
3762 dev_err(&dc->ndev->dev, "DSI suspend aggressiveness"
3763 "is not supported.\n");
3771 static int _tegra_dsi_host_resume(struct tegra_dc *dc,
3772 struct tegra_dc_dsi_data *dsi,
3778 switch (dsi->info.suspend_aggr) {
3779 case DSI_HOST_SUSPEND_LV0:
3780 tegra_dsi_clk_enable(dsi);
3782 case DSI_HOST_SUSPEND_LV1:
3783 tegra_dsi_config_phy_clk(dsi, TEGRA_DSI_ENABLE);
3784 tegra_dsi_clk_enable(dsi);
3786 case DSI_HOST_SUSPEND_LV2:
3787 tegra_dsi_config_phy_clk(dsi, TEGRA_DSI_ENABLE);
3788 tegra_dsi_clk_enable(dsi);
3790 tegra_dsi_writel(dsi,
3791 DSI_POWER_CONTROL_LEG_DSI_ENABLE(TEGRA_DSI_ENABLE),
3795 err = tegra_dsi_enter_ulpm(dsi);
3797 dev_err(&dc->ndev->dev,
3798 "DSI failed to enter ulpm\n");
3802 val = tegra_dsi_readl(dsi, DSI_PAD_CONTROL);
3803 val &= ~(DSI_PAD_CONTROL_PAD_PDIO(0x3) |
3804 DSI_PAD_CONTROL_PAD_PDIO_CLK(0x1) |
3805 DSI_PAD_CONTROL_PAD_PULLDN_ENAB(0x1));
3806 tegra_dsi_writel(dsi, val, DSI_PAD_CONTROL);
3808 if (tegra_dsi_exit_ulpm(dsi) < 0) {
3809 dev_err(&dc->ndev->dev,
3810 "DSI failed to exit ulpm\n");
3815 case DSI_NO_SUSPEND:
3818 dev_err(&dc->ndev->dev, "DSI suspend aggressivenes"
3819 "is not supported.\n");
3827 static int tegra_dsi_host_suspend(struct tegra_dc *dc)
3830 struct tegra_dc_dsi_data *dsi = tegra_dc_get_outdata(dc);
3835 if (dsi->host_suspended)
3838 mutex_lock(&dsi->host_lock);
3839 tegra_dc_io_start(dc);
3840 dsi->host_suspended = true;
3842 tegra_dsi_stop_dc_stream_at_frame_end(dc, dsi, 2);
3844 err = _tegra_dsi_host_suspend(dc, dsi, dsi->info.suspend_aggr);
3846 dev_err(&dc->ndev->dev,
3847 "DSI host suspend failed\n");
3849 tegra_dc_io_end(dc);
3850 mutex_unlock(&dsi->host_lock);
3854 static int tegra_dsi_deep_sleep(struct tegra_dc *dc,
3855 struct tegra_dc_dsi_data *dsi)
3863 err = tegra_dsi_set_to_lp_mode(dc, dsi, DSI_LP_OP_WRITE);
3865 dev_err(&dc->ndev->dev,
3866 "DSI failed to go to LP mode\n");
3870 /* Suspend DSI panel */
3871 err = tegra_dsi_send_panel_cmd(dc, dsi,
3872 dsi->info.dsi_suspend_cmd,
3873 dsi->info.n_suspend_cmd);
3876 * Certain panels need dc frames be sent after
3877 * putting panel to sleep.
3879 if (dsi->info.panel_send_dc_frames)
3880 tegra_dsi_send_dc_frames(dc, dsi, 2);
3883 dev_err(&dc->ndev->dev,
3884 "dsi: Error sending suspend cmd\n");
3889 err = tegra_dsi_enter_ulpm(dsi);
3891 dev_err(&dc->ndev->dev,
3892 "DSI failed to enter ulpm\n");
3897 val = DSI_PAD_CONTROL_PAD_PDIO(0x3) |
3898 DSI_PAD_CONTROL_PAD_PDIO_CLK(0x1) |
3899 DSI_PAD_CONTROL_PAD_PULLDN_ENAB(TEGRA_DSI_ENABLE);
3900 tegra_dsi_writel(dsi, val, DSI_PAD_CONTROL);
3902 /* Suspend core-logic */
3903 val = DSI_POWER_CONTROL_LEG_DSI_ENABLE(TEGRA_DSI_DISABLE);
3904 tegra_dsi_writel(dsi, val, DSI_POWER_CONTROL);
3906 /* Disable dsi fast and slow clock */
3907 tegra_dsi_config_phy_clk(dsi, TEGRA_DSI_DISABLE);
3909 /* Disable dsi source clock */
3910 tegra_dsi_clk_disable(dsi);
3912 regulator_disable(dsi->avdd_dsi_csi);
3914 dsi->enabled = false;
3915 dsi->host_suspended = true;
3922 static int tegra_dsi_host_resume(struct tegra_dc *dc)
3925 struct tegra_dc_dsi_data *dsi = tegra_dc_get_outdata(dc);
3930 mutex_lock(&dsi->host_lock);
3931 cancel_delayed_work_sync(&dsi->idle_work);
3932 if (!dsi->host_suspended) {
3933 mutex_unlock(&dsi->host_lock);
3937 tegra_dc_io_start(dc);
3939 err = _tegra_dsi_host_resume(dc, dsi, dsi->info.suspend_aggr);
3941 dev_err(&dc->ndev->dev,
3942 "DSI host resume failed\n");
3946 tegra_dsi_start_dc_stream(dc, dsi);
3947 dsi->host_suspended = false;
3949 tegra_dc_io_end(dc);
3950 mutex_unlock(&dsi->host_lock);
3954 static void _tegra_dc_dsi_disable(struct tegra_dc *dc)
3957 struct tegra_dc_dsi_data *dsi = tegra_dc_get_outdata(dc);
3959 if (dsi->host_suspended)
3960 tegra_dsi_host_resume(dc);
3962 mutex_lock(&dsi->lock);
3963 tegra_dc_io_start(dc);
3965 if (dsi->status.dc_stream == DSI_DC_STREAM_ENABLE)
3966 tegra_dsi_stop_dc_stream_at_frame_end(dc, dsi, 2);
3968 if (dsi->out_ops && dsi->out_ops->disable)
3969 dsi->out_ops->disable(dsi);
3971 if (dsi->info.power_saving_suspend) {
3972 if (tegra_dsi_deep_sleep(dc, dsi) < 0) {
3973 dev_err(&dc->ndev->dev,
3974 "DSI failed to enter deep sleep\n");
3978 if (dsi->info.dsi_early_suspend_cmd) {
3979 err = tegra_dsi_send_panel_cmd(dc, dsi,
3980 dsi->info.dsi_early_suspend_cmd,
3981 dsi->info.n_early_suspend_cmd);
3983 dev_err(&dc->ndev->dev,
3984 "dsi: Error sending early suspend cmd\n");
3990 if (tegra_dsi_enter_ulpm(dsi) < 0) {
3991 dev_err(&dc->ndev->dev,
3992 "DSI failed to enter ulpm\n");
3998 mutex_unlock(&dsi->lock);
3999 tegra_dc_io_end(dc);
4002 static void tegra_dc_dsi_disable(struct tegra_dc *dc)
4004 _tegra_dc_dsi_disable(dc);
4006 if (dc->out->dsi->ganged_type) {
4007 tegra_dc_set_outdata(dc, tegra_dsi_instance[DSI_INSTANCE_1]);
4008 _tegra_dc_dsi_disable(dc);
4009 tegra_dc_set_outdata(dc, tegra_dsi_instance[DSI_INSTANCE_0]);
4015 static void _tegra_dc_dsi_suspend(struct tegra_dc *dc)
4017 struct tegra_dc_dsi_data *dsi;
4019 dsi = tegra_dc_get_outdata(dc);
4024 if (dsi->host_suspended)
4025 tegra_dsi_host_resume(dc);
4027 tegra_dc_io_start(dc);
4028 mutex_lock(&dsi->lock);
4030 if (dsi->out_ops && dsi->out_ops->suspend)
4031 dsi->out_ops->suspend(dsi);
4033 if (!dsi->info.power_saving_suspend) {
4035 if (tegra_dsi_exit_ulpm(dsi) < 0) {
4036 dev_err(&dc->ndev->dev,
4037 "DSI failed to exit ulpm");
4042 if (tegra_dsi_deep_sleep(dc, dsi) < 0) {
4043 dev_err(&dc->ndev->dev,
4044 "DSI failed to enter deep sleep\n");
4049 mutex_unlock(&dsi->lock);
4050 tegra_dc_io_end(dc);
4053 static void _tegra_dc_dsi_resume(struct tegra_dc *dc)
4055 struct tegra_dc_dsi_data *dsi;
4057 dsi = tegra_dc_get_outdata(dc);
4059 /* No dsi config required since tegra_dc_dsi_enable
4060 * will reconfigure the controller from scratch
4063 if (dsi->out_ops && dsi->out_ops->resume)
4064 dsi->out_ops->resume(dsi);
4067 static void tegra_dc_dsi_suspend(struct tegra_dc *dc)
4069 _tegra_dc_dsi_suspend(dc);
4071 if (dc->out->dsi->ganged_type) {
4072 tegra_dc_set_outdata(dc, tegra_dsi_instance[DSI_INSTANCE_1]);
4073 _tegra_dc_dsi_suspend(dc);
4074 tegra_dc_set_outdata(dc, tegra_dsi_instance[DSI_INSTANCE_0]);
4078 static void tegra_dc_dsi_resume(struct tegra_dc *dc)
4080 _tegra_dc_dsi_resume(dc);
4082 if (dc->out->dsi->ganged_type) {
4083 tegra_dc_set_outdata(dc, tegra_dsi_instance[DSI_INSTANCE_1]);
4084 _tegra_dc_dsi_resume(dc);
4085 tegra_dc_set_outdata(dc, tegra_dsi_instance[DSI_INSTANCE_0]);
4090 static int tegra_dc_dsi_init(struct tegra_dc *dc)
4092 struct tegra_dc_dsi_data *dsi;
4095 err = _tegra_dc_dsi_init(dc);
4097 dev_err(&dc->ndev->dev,
4098 "dsi: Instance A init failed\n");
4102 dsi = tegra_dc_get_outdata(dc);
4104 dsi->avdd_dsi_csi = regulator_get(&dc->ndev->dev, "avdd_dsi_csi");
4105 if (IS_ERR_OR_NULL(dsi->avdd_dsi_csi)) {
4106 dev_err(&dc->ndev->dev, "dsi: avdd_dsi_csi reg get failed\n");
4107 err = PTR_ERR(dsi->avdd_dsi_csi);
4111 dsi->mipi_cal = tegra_mipi_cal_init_sw(dc);
4112 if (IS_ERR(dsi->mipi_cal)) {
4113 dev_err(&dc->ndev->dev, "dsi: mipi_cal sw init failed\n");
4114 err = PTR_ERR(dsi->mipi_cal);
4118 if (dc->out->dsi->ganged_type) {
4119 err = _tegra_dc_dsi_init(dc);
4121 dev_err(&dc->ndev->dev,
4122 "dsi: Instance B init failed\n");
4125 tegra_dsi_instance[DSI_INSTANCE_1]->avdd_dsi_csi =
4127 tegra_dsi_instance[DSI_INSTANCE_1]->mipi_cal =
4129 tegra_dc_set_outdata(dc, tegra_dsi_instance[DSI_INSTANCE_0]);
4133 tegra_mipi_cal_destroy(dc);
4135 regulator_put(dsi->avdd_dsi_csi);
4137 _tegra_dc_dsi_destroy(dc);
4142 static void tegra_dc_dsi_destroy(struct tegra_dc *dc)
4144 struct regulator *avdd_dsi_csi;
4145 struct tegra_dc_dsi_data *dsi = tegra_dc_get_outdata(dc);
4147 avdd_dsi_csi = dsi->avdd_dsi_csi;
4149 _tegra_dc_dsi_destroy(dc);
4151 if (dc->out->dsi->ganged_type) {
4152 tegra_dc_set_outdata(dc, tegra_dsi_instance[DSI_INSTANCE_1]);
4153 _tegra_dc_dsi_destroy(dc);
4154 tegra_dc_set_outdata(dc, tegra_dsi_instance[DSI_INSTANCE_0]);
4157 regulator_put(avdd_dsi_csi);
4158 tegra_mipi_cal_destroy(dc);
4161 static void tegra_dc_dsi_enable(struct tegra_dc *dc)
4163 _tegra_dc_dsi_enable(dc);
4165 if (dc->out->dsi->ganged_type) {
4166 tegra_dc_set_outdata(dc, tegra_dsi_instance[DSI_INSTANCE_1]);
4167 _tegra_dc_dsi_enable(dc);
4168 tegra_dc_set_outdata(dc, tegra_dsi_instance[DSI_INSTANCE_0]);
4172 static long tegra_dc_dsi_setup_clk(struct tegra_dc *dc, struct clk *clk)
4175 struct clk *parent_clk;
4176 struct clk *base_clk;
4178 if (clk == dc->clk) {
4179 parent_clk = clk_get_sys(NULL,
4180 dc->out->parent_clk ? : "pll_d_out0");
4181 base_clk = clk_get_parent(parent_clk);
4182 tegra_clk_cfg_ex(base_clk,
4183 TEGRA_CLK_PLLD_DSI_OUT_ENB, 1);
4185 if (dc->pdata->default_out->dsi->dsi_instance) {
4186 parent_clk = clk_get_sys(NULL,
4187 dc->out->parent_clk ? : "pll_d2_out0");
4188 base_clk = clk_get_parent(parent_clk);
4189 tegra_clk_cfg_ex(base_clk,
4190 TEGRA_CLK_PLLD_CSI_OUT_ENB, 1);
4192 parent_clk = clk_get_sys(NULL,
4193 dc->out->parent_clk ? : "pll_d_out0");
4194 base_clk = clk_get_parent(parent_clk);
4195 tegra_clk_cfg_ex(base_clk,
4196 TEGRA_CLK_PLLD_DSI_OUT_ENB, 1);
4200 /* divide by 1000 to avoid overflow */
4201 dc->mode.pclk /= 1000;
4202 rate = (dc->mode.pclk * dc->shift_clk_div.mul * 2)
4203 / dc->shift_clk_div.div;
4205 dc->mode.pclk *= 1000;
4207 if (rate != clk_get_rate(base_clk))
4208 clk_set_rate(base_clk, rate);
4210 if (clk_get_parent(clk) != parent_clk)
4211 clk_set_parent(clk, parent_clk);
4213 return tegra_dc_pclk_round_rate(dc, dc->mode.pclk);
4216 struct tegra_dc_out_ops tegra_dc_dsi_ops = {
4217 .init = tegra_dc_dsi_init,
4218 .destroy = tegra_dc_dsi_destroy,
4219 .enable = tegra_dc_dsi_enable,
4220 .disable = tegra_dc_dsi_disable,
4221 .hold = tegra_dc_dsi_hold_host,
4222 .release = tegra_dc_dsi_release_host,
4224 .suspend = tegra_dc_dsi_suspend,
4225 .resume = tegra_dc_dsi_resume,
4227 .setup_clk = tegra_dc_dsi_setup_clk,