2 * drivers/platform/tegra/tegra21_emc_cc_r21012.c
4 * Copyright (c) 2014-2015, NVIDIA CORPORATION. All rights reserved.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
16 #include <linux/kernel.h>
18 #include <linux/clk.h>
19 #include <linux/delay.h>
20 #include <linux/platform_data/tegra_emc_pdata.h>
22 /* Select v21015 versions of some functions. */
23 #define __TEGRA_EMC_V21015
25 #include <linux/platform/tegra/tegra21_emc.h>
26 #include <linux/platform/tegra/mc-regs-t21x.h>
31 * This clock change is actually equivalent to 21018 now.
33 #define DVFS_CLOCK_CHANGE_VERSION 21019
34 #define EMC_PRELOCK_VERSION 2101
36 void dll_disable(int channel_mode)
40 emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL);
41 emc_cfg_dig_dll &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
42 emc_writel(emc_cfg_dig_dll, EMC_CFG_DIG_DLL);
43 emc_timing_update(channel_mode);
45 wait_for_update(EMC_CFG_DIG_DLL, EMC_CFG_DIG_DLL_CFG_DLL_EN, 0, 0);
46 if (channel_mode == DUAL_CHANNEL)
47 wait_for_update(EMC_CFG_DIG_DLL,
48 EMC_CFG_DIG_DLL_CFG_DLL_EN, 0, 1);
51 void dll_enable(int channel_mode)
55 emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL);
56 emc_cfg_dig_dll |= EMC_CFG_DIG_DLL_CFG_DLL_EN;
57 emc_writel(emc_cfg_dig_dll, EMC_CFG_DIG_DLL);
58 emc_timing_update(channel_mode);
60 wait_for_update(EMC_CFG_DIG_DLL, EMC_CFG_DIG_DLL_CFG_DLL_EN, 1, 0);
61 if (channel_mode == DUAL_CHANNEL)
62 wait_for_update(EMC_CFG_DIG_DLL,
63 EMC_CFG_DIG_DLL_CFG_DLL_EN, 1, 1);
67 * When derating is enabled periodic training needs to update both sets of
68 * tables. This function copys the necessary periodic training settings from
69 * the current timing into it's alternate timing derated/normal timing.
71 void __update_emc_alt_timing(struct tegra21_emc_table *current_timing)
73 struct tegra21_emc_table *current_table, *alt_timing;
76 /* Only have alternate timings when there are derated tables present. */
77 if (!tegra_emc_table_derated)
80 current_table = emc_get_table(dram_over_temp_state);
81 i = current_timing - current_table;
83 BUG_ON(i < 0 || i > tegra_emc_table_size);
85 if (dram_over_temp_state == DRAM_OVER_TEMP_THROTTLE)
86 alt_timing = &tegra_emc_table[i];
88 alt_timing = &tegra_emc_table_derated[i];
90 __emc_copy_table_params(current_timing, alt_timing,
91 EMC_COPY_TABLE_PARAM_PERIODIC_FIELDS |
92 EMC_COPY_TABLE_PARAM_PTFV_FIELDS);
96 * It is possible for periodic training to be skipped during the DVFS change. As
97 * an exmaple: suppose the DRAM is trained at 20C - the trained_dram_clktree_*
98 * values will reflect this. Now, supposing the EMC goes to 1600MHz and runs for
99 * a while. If the EMC swaps to some other freq, say 204MHz, while the DRAM is
100 * very hot the current_dram_clktree_* values will reflect this. Why is this a
101 * problem? If we go back to 1600MHz and the temp is still very hot then there
102 * will not be a large difference in the osc reading from the DRAM and we won't
103 * do any periodic training during DVFS. Thus we write the 20C trimmers when in
104 * reality we needed to compute new trimmers based on the current temp.
106 * Thus function avoids the above mess by simply making the
107 * current_dram_clktree_* fields the same as trained_dram_clktree_* so that we
108 * always do the periodic calibration if needed.
110 void __reset_dram_clktree_values(struct tegra21_emc_table *table)
112 #define __RESET_CLKTREE(TBL, C, D, U) \
113 TBL->current_dram_clktree_c ## C ## d ## D ## u ## U = \
114 TBL->trained_dram_clktree_c ## C ## d ## D ## u ## U
116 __RESET_CLKTREE(table, 0, 0, 0);
117 __RESET_CLKTREE(table, 0, 0, 1);
118 __RESET_CLKTREE(table, 0, 1, 0);
119 __RESET_CLKTREE(table, 0, 1, 1);
120 __RESET_CLKTREE(table, 1, 0, 0);
121 __RESET_CLKTREE(table, 1, 0, 1);
122 __RESET_CLKTREE(table, 1, 1, 0);
123 __RESET_CLKTREE(table, 1, 1, 1);
126 u32 actual_osc_clocks(u32 in)
138 static u32 update_clock_tree_delay(struct tegra21_emc_table *last_timing,
139 struct tegra21_emc_table *next_timing,
140 u32 dram_dev_num, u32 channel_mode)
142 u32 mrr_req = 0, mrr_data = 0;
143 u32 temp0_0 = 0, temp0_1 = 0, temp1_0 = 0, temp1_1 = 0;
144 s32 tdel = 0, tmdel = 0, adel = 0;
146 u32 last_timing_rate_mhz = last_timing->rate / 1000;
147 u32 next_timing_rate_mhz = next_timing->rate / 1000;
152 mrr_req = (2 << EMC_MRR_DEV_SEL_SHIFT) |
153 (19 << EMC_MRR_MA_SHIFT);
154 emc_writel(mrr_req, EMC_MRR);
156 WARN(wait_for_update(EMC_EMC_STATUS,
157 EMC_EMC_STATUS_MRR_DIVLD, 1, 0),
158 "Timed out waiting for MRR 19 (ch=0)\n");
159 if (channel_mode == DUAL_CHANNEL)
160 WARN(wait_for_update(EMC_EMC_STATUS,
161 EMC_EMC_STATUS_MRR_DIVLD, 1, 1),
162 "Timed out waiting for MRR 19 (ch=1)\n");
164 mrr_data = (emc_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
167 temp0_0 = (mrr_data & 0xff) << 8;
168 temp0_1 = mrr_data & 0xff00;
170 if (channel_mode == DUAL_CHANNEL) {
171 mrr_data = (emc1_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
173 temp1_0 = (mrr_data & 0xff) << 8;
174 temp1_1 = mrr_data & 0xff00;
180 mrr_req = (mrr_req & ~EMC_MRR_MA_MASK) | (18 << EMC_MRR_MA_SHIFT);
181 emc_writel(mrr_req, EMC_MRR);
183 WARN(wait_for_update(EMC_EMC_STATUS,
184 EMC_EMC_STATUS_MRR_DIVLD, 1, 0),
185 "Timed out waiting for MRR 18 (ch=0)\n");
186 if (channel_mode == DUAL_CHANNEL)
187 WARN(wait_for_update(EMC_EMC_STATUS,
188 EMC_EMC_STATUS_MRR_DIVLD, 1, 1),
189 "Timed out waiting for MRR 18 (ch=1)\n");
191 mrr_data = (emc_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
194 temp0_0 |= mrr_data & 0xff;
195 temp0_1 |= (mrr_data & 0xff00) >> 8;
197 if (channel_mode == DUAL_CHANNEL) {
198 mrr_data = (emc1_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
200 temp1_0 |= (mrr_data & 0xff);
201 temp1_1 |= (mrr_data & 0xff00) >> 8;
204 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
205 (last_timing_rate_mhz * 2 * temp0_0);
206 tdel = next_timing->current_dram_clktree_c0d0u0 - cval;
207 tmdel = (tdel < 0) ? -1 * tdel : tdel;
210 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
211 next_timing->tree_margin)
212 next_timing->current_dram_clktree_c0d0u0 = cval;
214 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
215 (last_timing_rate_mhz * 2 * temp0_1);
216 tdel = next_timing->current_dram_clktree_c0d0u1 - cval;
217 tmdel = (tdel < 0) ? -1 * tdel : tdel;
222 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
223 next_timing->tree_margin)
224 next_timing->current_dram_clktree_c0d0u1 = cval;
226 if (channel_mode == DUAL_CHANNEL) {
227 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
228 (last_timing_rate_mhz * 2 * temp1_0);
229 tdel = next_timing->current_dram_clktree_c1d0u0 - cval;
230 tmdel = (tdel < 0) ? -1 * tdel : tdel;
234 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
235 next_timing->tree_margin)
236 next_timing->current_dram_clktree_c1d0u0 = cval;
238 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
239 (last_timing_rate_mhz * 2 * temp1_1);
240 tdel = next_timing->current_dram_clktree_c1d0u1 - cval;
241 tmdel = (tdel < 0) ? -1 * tdel : tdel;
246 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
247 next_timing->tree_margin)
248 next_timing->current_dram_clktree_c1d0u1 = cval;
252 if (dram_dev_num != TWO_RANK)
258 mrr_req = (1 << EMC_MRR_DEV_SEL_SHIFT) |
259 (19 << EMC_MRR_MA_SHIFT);
260 emc_writel(mrr_req, EMC_MRR);
262 WARN(wait_for_update(EMC_EMC_STATUS,
263 EMC_EMC_STATUS_MRR_DIVLD, 1, 0),
264 "Timed out waiting for MRR 19 (ch=0)\n");
265 if (channel_mode == DUAL_CHANNEL)
266 WARN(wait_for_update(EMC_EMC_STATUS,
267 EMC_EMC_STATUS_MRR_DIVLD, 1, 1),
268 "Timed out waiting for MRR 19 (ch=1)\n");
270 mrr_data = (emc_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
273 temp0_0 = (mrr_data & 0xff) << 8;
274 temp0_1 = mrr_data & 0xff00;
276 if (channel_mode == DUAL_CHANNEL) {
277 mrr_data = (emc1_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
279 temp1_0 = (mrr_data & 0xff) << 8;
280 temp1_1 = mrr_data & 0xff00;
286 mrr_req = (mrr_req & ~EMC_MRR_MA_MASK) | (18 << EMC_MRR_MA_SHIFT);
287 emc_writel(mrr_req, EMC_MRR);
289 WARN(wait_for_update(EMC_EMC_STATUS,
290 EMC_EMC_STATUS_MRR_DIVLD, 1, 0),
291 "Timed out waiting for MRR 18 (ch=0)\n");
292 if (channel_mode == DUAL_CHANNEL)
293 WARN(wait_for_update(EMC_EMC_STATUS,
294 EMC_EMC_STATUS_MRR_DIVLD, 1, 1),
295 "Timed out waiting for MRR 18 (ch=1)\n");
297 mrr_data = (emc_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
300 temp0_0 |= mrr_data & 0xff;
301 temp0_1 |= (mrr_data & 0xff00) >> 8;
303 if (channel_mode == DUAL_CHANNEL) {
304 mrr_data = (emc1_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
306 temp1_0 |= (mrr_data & 0xff);
307 temp1_1 |= (mrr_data & 0xff00) >> 8;
310 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
311 (last_timing_rate_mhz * 2 * temp0_0);
312 tdel = next_timing->current_dram_clktree_c0d1u0 - cval;
313 tmdel = (tdel < 0) ? -1 * tdel : tdel;
317 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
318 next_timing->tree_margin)
319 next_timing->current_dram_clktree_c0d1u0 = cval;
321 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
322 (last_timing_rate_mhz * 2 * temp0_1);
323 tdel = next_timing->current_dram_clktree_c0d1u1 - cval;
324 tmdel = (tdel < 0) ? -1 * tdel : tdel;
328 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
329 next_timing->tree_margin)
330 next_timing->current_dram_clktree_c0d1u1 = cval;
332 if (channel_mode == DUAL_CHANNEL){
333 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
334 (last_timing_rate_mhz * 2 * temp1_0);
335 tdel = next_timing->current_dram_clktree_c1d1u0 - cval;
336 tmdel = (tdel < 0) ? -1 * tdel : tdel;
340 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
341 next_timing->tree_margin)
342 next_timing->current_dram_clktree_c1d1u0 = cval;
344 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
345 (last_timing_rate_mhz * 2 * temp1_1);
346 tdel = next_timing->current_dram_clktree_c1d1u1 - cval;
347 tmdel = (tdel < 0) ? -1 * tdel : tdel;
351 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
352 next_timing->tree_margin)
353 next_timing->current_dram_clktree_c1d1u1 = cval;
360 void start_periodic_compensation(void)
364 emc_writel(mpc_req, EMC_MPC);
365 mpc_req = emc_readl(EMC_MPC);
369 * The per channel registers dont fit in with the normal set up for making
370 * *_INDEX style enum fields because there are two identical register names
371 * but two channels. Here we define some _INDEX macros to deal with the one
372 * place we need per channel distinctions.
374 #define EMC0_EMC_CMD_BRLSHFT_0_INDEX 0
375 #define EMC1_EMC_CMD_BRLSHFT_1_INDEX 1
376 #define EMC0_EMC_DATA_BRLSHFT_0_INDEX 2
377 #define EMC1_EMC_DATA_BRLSHFT_0_INDEX 3
378 #define EMC0_EMC_DATA_BRLSHFT_1_INDEX 4
379 #define EMC1_EMC_DATA_BRLSHFT_1_INDEX 5
380 #define EMC0_EMC_QUSE_BRLSHFT_0_INDEX 6
381 #define EMC1_EMC_QUSE_BRLSHFT_1_INDEX 7
382 #define EMC0_EMC_QUSE_BRLSHFT_2_INDEX 8
383 #define EMC1_EMC_QUSE_BRLSHFT_3_INDEX 9
386 * Complicated table of registers and fields! Yikes. Essentially this
387 * boils down to (reg_field + (reg_field * 64)).
389 #define TRIM_REG(chan, rank, reg, byte) \
390 ((EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ## reg ## \
391 _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte ## _MASK & \
392 next_timing->trim_regs[EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## \
393 rank ## _ ## reg ## _INDEX]) >> \
394 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ## reg ## \
395 _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte ## _SHIFT) \
397 (((EMC_DATA_BRLSHFT_ ## rank ## _RANK ## rank ## _BYTE ## \
398 byte ## _DATA_BRLSHFT_MASK & \
399 next_timing->trim_regs_per_ch[EMC ## chan ## \
400 _EMC_DATA_BRLSHFT_ ## rank ## _INDEX]) >> \
401 EMC_DATA_BRLSHFT_ ## rank ## _RANK ## rank ## _BYTE ## \
402 byte ## _DATA_BRLSHFT_SHIFT) * 64)
405 * Compute the temp variable in apply_periodic_compensation_trimmer(). It
406 * reduces to (reg_field | reg_field).
408 #define CALC_TEMP(rank, reg, byte1, byte2, n) \
409 ((new[n] << EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ## \
410 reg ## _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte1 ## _SHIFT) & \
411 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ## reg ## \
412 _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte1 ## _MASK) \
414 ((new[n + 1] << EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ## \
415 reg ## _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte2 ## _SHIFT) & \
416 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ## reg ## \
417 _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte2 ## _MASK) \
419 u32 apply_periodic_compensation_trimmer(
420 struct tegra21_emc_table *next_timing, u32 offset)
423 u32 next_timing_rate_mhz = next_timing->rate / 1000;
425 s32 tree_delta_taps[4];
427 TRIM_REG(0, 0, 0, 0),
428 TRIM_REG(0, 0, 0, 1),
429 TRIM_REG(0, 0, 1, 2),
430 TRIM_REG(0, 0, 1, 3),
432 TRIM_REG(1, 0, 2, 4),
433 TRIM_REG(1, 0, 2, 5),
434 TRIM_REG(1, 0, 3, 6),
435 TRIM_REG(1, 0, 3, 7),
437 TRIM_REG(0, 1, 0, 0),
438 TRIM_REG(0, 1, 0, 1),
439 TRIM_REG(0, 1, 1, 2),
440 TRIM_REG(0, 1, 1, 3),
442 TRIM_REG(1, 1, 2, 4),
443 TRIM_REG(1, 1, 2, 5),
444 TRIM_REG(1, 1, 3, 6),
449 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0:
450 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1:
451 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2:
452 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3:
453 case EMC_DATA_BRLSHFT_0:
454 tree_delta[0] = 128 *
455 (next_timing->current_dram_clktree_c0d0u0 -
456 next_timing->trained_dram_clktree_c0d0u0);
457 tree_delta[1] = 128 *
458 (next_timing->current_dram_clktree_c0d0u1 -
459 next_timing->trained_dram_clktree_c0d0u1);
460 tree_delta[2] = 128 *
461 (next_timing->current_dram_clktree_c1d0u0 -
462 next_timing->trained_dram_clktree_c1d0u0);
463 tree_delta[3] = 128 *
464 (next_timing->current_dram_clktree_c1d0u1 -
465 next_timing->trained_dram_clktree_c1d0u1);
468 (tree_delta[0] * (s32)next_timing_rate_mhz) / 1000000;
470 (tree_delta[1] * (s32)next_timing_rate_mhz) / 1000000;
472 (tree_delta[2] * (s32)next_timing_rate_mhz) / 1000000;
474 (tree_delta[3] * (s32)next_timing_rate_mhz) / 1000000;
476 for(i = 0; i < 4; i++) {
477 if ((tree_delta_taps[i] > next_timing->tree_margin) ||
478 (tree_delta_taps[i] <
479 (-1 * next_timing->tree_margin))) {
480 new[i * 2] = new[i * 2] + tree_delta_taps[i];
481 new[i * 2 + 1] = new[i * 2 + 1]
482 + tree_delta_taps[i];
486 if (offset == EMC_DATA_BRLSHFT_0) {
487 for (i = 0; i < 8; i++)
488 new[i] = new[i] / 64;
490 for (i = 0; i < 8; i++)
491 new[i] = new[i] % 64;
495 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0:
496 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1:
497 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2:
498 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3:
499 case EMC_DATA_BRLSHFT_1:
500 tree_delta[0] = 128 *
501 (next_timing->current_dram_clktree_c0d1u0 -
502 next_timing->trained_dram_clktree_c0d1u0);
503 tree_delta[1] = 128 *
504 (next_timing->current_dram_clktree_c0d1u1 -
505 next_timing->trained_dram_clktree_c0d1u1);
506 tree_delta[2] = 128 *
507 (next_timing->current_dram_clktree_c1d1u0 -
508 next_timing->trained_dram_clktree_c1d1u0);
509 tree_delta[3] = 128 *
510 (next_timing->current_dram_clktree_c1d1u1 -
511 next_timing->trained_dram_clktree_c1d1u1);
514 (tree_delta[0] * (s32)next_timing_rate_mhz) / 1000000;
516 (tree_delta[1] * (s32)next_timing_rate_mhz) / 1000000;
518 (tree_delta[2] * (s32)next_timing_rate_mhz) / 1000000;
520 (tree_delta[3] * (s32)next_timing_rate_mhz) / 1000000;
522 for(i = 0; i < 4; i++){
523 if ((tree_delta_taps[i] > next_timing->tree_margin) ||
524 (tree_delta_taps[i] <
525 (-1 * next_timing->tree_margin))){
526 new[8 + i * 2] = new[8 + i * 2] +
528 new[8 + i * 2 + 1] = new[8 + i * 2 + 1] +
533 if (offset == EMC_DATA_BRLSHFT_1) {
534 for (i = 0; i < 8; i++)
535 new[i + 8] = new[i + 8] / 64;
537 for(i = 0; i < 8; i++)
538 new[i + 8] = new[i + 8] % 64;
544 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0:
545 /* rank, reg, byte1, byte2, n */
546 temp = CALC_TEMP(0, 0, 0, 1, 0);
548 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1:
549 temp = CALC_TEMP(0, 1, 2, 3, 2);
551 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2:
552 temp = CALC_TEMP(0, 2, 4, 5, 4);
554 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3:
555 temp = CALC_TEMP(0, 3, 6, 7, 6);
557 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0:
558 temp = CALC_TEMP(1, 0, 0, 1, 8);
560 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1:
561 temp = CALC_TEMP(1, 1, 2, 3, 10);
563 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2:
564 temp = CALC_TEMP(1, 2, 4, 5, 12);
566 case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3:
567 temp = CALC_TEMP(1, 3, 6, 7, 14);
569 case EMC_DATA_BRLSHFT_0:
571 ((new[0] << EMC_DATA_BRLSHFT_0_RANK0_BYTE0_DATA_BRLSHFT_SHIFT) &
572 EMC_DATA_BRLSHFT_0_RANK0_BYTE0_DATA_BRLSHFT_MASK) |
573 ((new[1] << EMC_DATA_BRLSHFT_0_RANK0_BYTE1_DATA_BRLSHFT_SHIFT) &
574 EMC_DATA_BRLSHFT_0_RANK0_BYTE1_DATA_BRLSHFT_MASK) |
575 ((new[2] << EMC_DATA_BRLSHFT_0_RANK0_BYTE2_DATA_BRLSHFT_SHIFT) &
576 EMC_DATA_BRLSHFT_0_RANK0_BYTE2_DATA_BRLSHFT_MASK) |
577 ((new[3] << EMC_DATA_BRLSHFT_0_RANK0_BYTE3_DATA_BRLSHFT_SHIFT) &
578 EMC_DATA_BRLSHFT_0_RANK0_BYTE3_DATA_BRLSHFT_MASK) |
579 ((new[4] << EMC_DATA_BRLSHFT_0_RANK0_BYTE4_DATA_BRLSHFT_SHIFT) &
580 EMC_DATA_BRLSHFT_0_RANK0_BYTE4_DATA_BRLSHFT_MASK) |
581 ((new[5] << EMC_DATA_BRLSHFT_0_RANK0_BYTE5_DATA_BRLSHFT_SHIFT) &
582 EMC_DATA_BRLSHFT_0_RANK0_BYTE5_DATA_BRLSHFT_MASK) |
583 ((new[6] << EMC_DATA_BRLSHFT_0_RANK0_BYTE6_DATA_BRLSHFT_SHIFT) &
584 EMC_DATA_BRLSHFT_0_RANK0_BYTE6_DATA_BRLSHFT_MASK) |
585 ((new[7] << EMC_DATA_BRLSHFT_0_RANK0_BYTE7_DATA_BRLSHFT_SHIFT) &
586 EMC_DATA_BRLSHFT_0_RANK0_BYTE7_DATA_BRLSHFT_MASK);
588 case EMC_DATA_BRLSHFT_1:
590 ((new[8] << EMC_DATA_BRLSHFT_1_RANK1_BYTE0_DATA_BRLSHFT_SHIFT) &
591 EMC_DATA_BRLSHFT_1_RANK1_BYTE0_DATA_BRLSHFT_MASK) |
592 ((new[9] << EMC_DATA_BRLSHFT_1_RANK1_BYTE1_DATA_BRLSHFT_SHIFT) &
593 EMC_DATA_BRLSHFT_1_RANK1_BYTE1_DATA_BRLSHFT_MASK) |
595 EMC_DATA_BRLSHFT_1_RANK1_BYTE2_DATA_BRLSHFT_SHIFT) &
596 EMC_DATA_BRLSHFT_1_RANK1_BYTE2_DATA_BRLSHFT_MASK) |
598 EMC_DATA_BRLSHFT_1_RANK1_BYTE3_DATA_BRLSHFT_SHIFT) &
599 EMC_DATA_BRLSHFT_1_RANK1_BYTE3_DATA_BRLSHFT_MASK) |
601 EMC_DATA_BRLSHFT_1_RANK1_BYTE4_DATA_BRLSHFT_SHIFT) &
602 EMC_DATA_BRLSHFT_1_RANK1_BYTE4_DATA_BRLSHFT_MASK) |
604 EMC_DATA_BRLSHFT_1_RANK1_BYTE5_DATA_BRLSHFT_SHIFT) &
605 EMC_DATA_BRLSHFT_1_RANK1_BYTE5_DATA_BRLSHFT_MASK) |
607 EMC_DATA_BRLSHFT_1_RANK1_BYTE6_DATA_BRLSHFT_SHIFT) &
608 EMC_DATA_BRLSHFT_1_RANK1_BYTE6_DATA_BRLSHFT_MASK) |
610 EMC_DATA_BRLSHFT_1_RANK1_BYTE7_DATA_BRLSHFT_SHIFT) &
611 EMC_DATA_BRLSHFT_1_RANK1_BYTE7_DATA_BRLSHFT_MASK);
620 u32 __do_periodic_emc_compensation_r21015(
621 struct tegra21_emc_table *current_timing)
625 u32 emc_cfg,emc_cfg_o;
629 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0,
630 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1,
631 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2,
632 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3,
633 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0,
634 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1,
635 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2,
636 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3,
640 u32 items = ARRAY_SIZE(list);
643 if (current_timing->periodic_training) {
644 channel_mode = !!(current_timing->burst_regs[EMC_FBIO_CFG7_INDEX] &
646 dram_dev_num = 1 + (mc_readl(MC_EMEM_ADR_CFG) & 0x1);
648 emc_cc_dbg(PER_TRAIN, "Periodic training starting\n");
650 emc_dbg_o = emc_readl(EMC_DBG);
651 emc_cfg_o = emc_readl(EMC_CFG);
652 emc_cfg = emc_cfg_o & ~(EMC_CFG_DYN_SELF_REF | EMC_CFG_DRAM_ACPD |
653 EMC_CFG_DRAM_CLKSTOP_PD |
654 EMC_CFG_DRAM_CLKSTOP_PD);
657 * 1. Power optimizations should be off.
659 emc_writel(emc_cfg, EMC_CFG);
661 /* Does emc_timing_update() for above changes. */
662 dll_disable(channel_mode);
664 wait_for_update(EMC_EMC_STATUS,
665 EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK, 0, 0);
667 wait_for_update(EMC_EMC_STATUS,
668 EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK, 0, 1);
670 wait_for_update(EMC_EMC_STATUS,
671 EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK, 0, 0);
673 wait_for_update(EMC_EMC_STATUS,
674 EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK, 0, 1);
676 emc_cfg_update = emc_readl(EMC_CFG_UPDATE);
677 emc_writel((emc_cfg_update &
678 ~EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_MASK) |
679 (2 << EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_SHIFT),
683 * 2. osc kick off - this assumes training and dvfs have set
686 start_periodic_compensation();
689 * 3. Let dram capture its clock tree delays.
691 udelay((actual_osc_clocks(current_timing->run_clocks) * 1000) /
692 current_timing->rate + 1);
695 * 4. Check delta wrt previous values (save value if margin
696 * exceeds what is set in table).
698 del = update_clock_tree_delay(current_timing, current_timing,
699 dram_dev_num, channel_mode);
702 * 5. Apply compensation w.r.t. trained values (if clock tree
703 * has drifted more than the set margin).
705 if (current_timing->tree_margin <
706 ((del * 128 * (current_timing->rate / 1000)) / 1000000)) {
707 for (i = 0; i < items; i++) {
708 u32 tmp = apply_periodic_compensation_trimmer(
709 current_timing, list[i]);
710 emc_writel(tmp, list[i]);
714 emc_writel(emc_cfg_o, EMC_CFG);
717 * 6. Timing update actally applies the new trimmers.
719 emc_timing_update(channel_mode);
721 /* 6.1. Restore the UPDATE_DLL_IN_UPDATE field. */
722 emc_writel(emc_cfg_update, EMC_CFG_UPDATE);
724 /* 6.2. Restore the DLL. */
725 dll_enable(channel_mode);
728 * 7. Copy over the periodic training registers that we updated
729 * here to the corresponding derated/non-derated table.
731 __update_emc_alt_timing(current_timing);
738 * Source clock period is in picoseconds. Returns the ramp down wait time in
741 u32 do_dvfs_power_ramp_down(u32 clk, int flip_backward,
742 struct tegra21_emc_table *last_timing,
743 struct tegra21_emc_table *next_timing)
745 u32 ramp_down_wait = 0;
750 u32 pmacro_common_tx;
753 emc_cc_dbg(PRAMP_DN, "flip_backward = %d\n", flip_backward);
756 pmacro_cmd_pad = next_timing->
757 burst_regs[EMC_PMACRO_CMD_PAD_TX_CTRL_INDEX];
758 pmacro_dq_pad = next_timing->
759 burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
760 pmacro_rfu1 = next_timing->
761 burst_regs[EMC_PMACRO_BRICK_CTRL_RFU1_INDEX];
762 pmacro_cfg5 = next_timing->
763 burst_regs[EMC_FBIO_CFG5_INDEX];
764 pmacro_common_tx = next_timing->
765 burst_regs[EMC_PMACRO_COMMON_PAD_TX_CTRL_INDEX];
767 pmacro_cmd_pad = last_timing->
768 burst_regs[EMC_PMACRO_CMD_PAD_TX_CTRL_INDEX];
769 pmacro_dq_pad = last_timing->
770 burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
771 pmacro_rfu1 = last_timing->
772 burst_regs[EMC_PMACRO_BRICK_CTRL_RFU1_INDEX];
773 pmacro_cfg5 = last_timing->
774 burst_regs[EMC_FBIO_CFG5_INDEX];
775 pmacro_common_tx = last_timing->
776 burst_regs[EMC_PMACRO_COMMON_PAD_TX_CTRL_INDEX];
779 pmacro_cmd_pad |= EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON;
781 ccfifo_writel(pmacro_cmd_pad, EMC_PMACRO_CMD_PAD_TX_CTRL, 0);
782 ccfifo_writel(pmacro_cfg5 | EMC_FBIO_CFG5_CMD_TX_DIS, EMC_FBIO_CFG5,
784 ramp_down_wait = 12 * clk;
786 seq_wait = (100000 / clk) + 1;
788 if (clk < (1000000 / DVFS_FGCG_HIGH_SPEED_THRESHOLD)) {
789 emc_cc_dbg(PRAMP_DN, "clk < FGCG_HIGH_SPEED_THRESHOLD;\n");
790 emc_cc_dbg(PRAMP_DN, " %u vs %u\n", clk,
791 1000000 / DVFS_FGCG_HIGH_SPEED_THRESHOLD);
793 if (clk < (1000000 / IOBRICK_DCC_THRESHOLD)) {
794 emc_cc_dbg(PRAMP_DN, "clk < IOBRICK_DCC_THRESHOLD;\n");
795 emc_cc_dbg(PRAMP_DN, " %u vs %u\n", clk,
796 1000000 / IOBRICK_DCC_THRESHOLD);
799 ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
800 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC);
802 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
803 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC;
804 ccfifo_writel(pmacro_cmd_pad,
805 EMC_PMACRO_CMD_PAD_TX_CTRL, seq_wait);
806 ramp_down_wait += 100000;
809 ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
810 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC);
812 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
813 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC;
814 ccfifo_writel(pmacro_dq_pad,
815 EMC_PMACRO_DATA_PAD_TX_CTRL, 0);
816 ccfifo_writel(pmacro_rfu1 & ~0x01120112,
817 EMC_PMACRO_BRICK_CTRL_RFU1, 0);
819 emc_cc_dbg(PRAMP_DN, "clk > IOBRICK_DCC_THRESHOLD\n");
820 ccfifo_writel(pmacro_rfu1 & ~0x01120112,
821 EMC_PMACRO_BRICK_CTRL_RFU1, seq_wait);
822 ramp_down_wait += 100000;
825 ccfifo_writel(pmacro_rfu1 & ~0x01bf01bf,
826 EMC_PMACRO_BRICK_CTRL_RFU1, seq_wait);
827 ramp_down_wait += 100000;
829 if (clk < (1000000 / IOBRICK_DCC_THRESHOLD)) {
831 ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
832 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC |
833 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
834 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC);
835 ccfifo_writel(pmacro_cmd_pad,
836 EMC_PMACRO_CMD_PAD_TX_CTRL, seq_wait);
837 ramp_down_wait += 100000;
840 ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
841 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC |
842 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
843 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC);
844 ccfifo_writel(pmacro_dq_pad,
845 EMC_PMACRO_DATA_PAD_TX_CTRL, 0);
846 ccfifo_writel(pmacro_rfu1 & ~0x07ff07ff,
847 EMC_PMACRO_BRICK_CTRL_RFU1, 0);
849 ccfifo_writel(pmacro_rfu1 & ~0x07ff07ff,
850 EMC_PMACRO_BRICK_CTRL_RFU1, seq_wait);
851 ramp_down_wait += 100000;
854 emc_cc_dbg(PRAMP_DN, "clk > FGCG_HIGH_SPEED_THRESHOLD\n");
855 ccfifo_writel(pmacro_rfu1 & ~0xffff07ff,
856 EMC_PMACRO_BRICK_CTRL_RFU1, seq_wait + 19);
857 ramp_down_wait += 100000 + (20 * clk);
860 if (clk < (1000000 / DVFS_FGCG_MID_SPEED_THRESHOLD)) {
861 emc_cc_dbg(PRAMP_DN, "clk < FGCG_MID_SPEED_THRESHOLD;\n");
862 emc_cc_dbg(PRAMP_DN, " %u vs %u\n", clk,
863 1000000 / DVFS_FGCG_MID_SPEED_THRESHOLD);
865 ramp_down_wait += 100000;
866 ccfifo_writel(pmacro_common_tx & ~0x5,
867 EMC_PMACRO_COMMON_PAD_TX_CTRL, seq_wait);
868 ramp_down_wait += 100000;
869 ccfifo_writel(pmacro_common_tx & ~0xf,
870 EMC_PMACRO_COMMON_PAD_TX_CTRL, seq_wait);
871 ramp_down_wait += 100000;
872 ccfifo_writel(0, 0, seq_wait);
873 ramp_down_wait += 100000;
875 emc_cc_dbg(PRAMP_DN, "clk > FGCG_MID_SPEED_THRESHOLD\n");
876 ccfifo_writel(pmacro_common_tx & ~0xf,
877 EMC_PMACRO_COMMON_PAD_TX_CTRL, seq_wait);
880 return ramp_down_wait;
884 * Similar to do_dvfs_power_ramp_down() except this does the power ramp up.
886 noinline u32 do_dvfs_power_ramp_up(u32 clk, int flip_backward,
887 struct tegra21_emc_table *last_timing,
888 struct tegra21_emc_table *next_timing)
894 u32 pmacro_common_tx;
895 u32 ramp_up_wait = 0;
898 pmacro_cmd_pad = last_timing->
899 burst_regs[EMC_PMACRO_CMD_PAD_TX_CTRL_INDEX];
900 pmacro_dq_pad = last_timing->
901 burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
902 pmacro_rfu1 = last_timing->
903 burst_regs[EMC_PMACRO_BRICK_CTRL_RFU1_INDEX];
904 pmacro_cfg5 = last_timing->burst_regs[EMC_FBIO_CFG5_INDEX];
905 pmacro_common_tx = last_timing->
906 burst_regs[EMC_PMACRO_COMMON_PAD_TX_CTRL_INDEX];
908 pmacro_cmd_pad = next_timing->
909 burst_regs[EMC_PMACRO_CMD_PAD_TX_CTRL_INDEX];
910 pmacro_dq_pad = next_timing->
911 burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
912 pmacro_rfu1 = next_timing->
913 burst_regs[EMC_PMACRO_BRICK_CTRL_RFU1_INDEX];
914 pmacro_cfg5 = next_timing->
915 burst_regs[EMC_FBIO_CFG5_INDEX];
916 pmacro_common_tx = next_timing->
917 burst_regs[EMC_PMACRO_COMMON_PAD_TX_CTRL_INDEX];
919 pmacro_cmd_pad |= EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON;
921 if (clk < 1000000 / DVFS_FGCG_MID_SPEED_THRESHOLD) {
922 ccfifo_writel(pmacro_common_tx & 0xa,
923 EMC_PMACRO_COMMON_PAD_TX_CTRL, 0);
924 ccfifo_writel(pmacro_common_tx & 0xf,
925 EMC_PMACRO_COMMON_PAD_TX_CTRL,
927 ramp_up_wait += 100000;
929 ccfifo_writel(pmacro_common_tx | 0x8,
930 EMC_PMACRO_COMMON_PAD_TX_CTRL, 0);
933 if (clk < 1000000 / DVFS_FGCG_HIGH_SPEED_THRESHOLD) {
934 if (clk < 1000000 / IOBRICK_DCC_THRESHOLD) {
936 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
937 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC;
939 ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
940 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC);
941 ccfifo_writel(pmacro_cmd_pad,
942 EMC_PMACRO_CMD_PAD_TX_CTRL,
944 ramp_up_wait += 100000;
947 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
948 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC;
950 ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
951 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC);
952 ccfifo_writel(pmacro_dq_pad,
953 EMC_PMACRO_DATA_PAD_TX_CTRL, 0);
954 ccfifo_writel(pmacro_rfu1 & 0xfe40fe40,
955 EMC_PMACRO_BRICK_CTRL_RFU1, 0);
957 ccfifo_writel(pmacro_rfu1 & 0xfe40fe40,
958 EMC_PMACRO_BRICK_CTRL_RFU1,
960 ramp_up_wait += 100000;
963 ccfifo_writel(pmacro_rfu1 & 0xfeedfeed,
964 EMC_PMACRO_BRICK_CTRL_RFU1, (100000 / clk) + 1);
965 ramp_up_wait += 100000;
967 if (clk < 1000000 / IOBRICK_DCC_THRESHOLD) {
969 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
970 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC |
971 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
972 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC;
973 ccfifo_writel(pmacro_cmd_pad,
974 EMC_PMACRO_CMD_PAD_TX_CTRL,
976 ramp_up_wait += 100000;
979 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
980 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC |
981 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
982 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC;
983 ccfifo_writel(pmacro_dq_pad,
984 EMC_PMACRO_DATA_PAD_TX_CTRL, 0);
985 ccfifo_writel(pmacro_rfu1,
986 EMC_PMACRO_BRICK_CTRL_RFU1, 0);
988 ccfifo_writel(pmacro_rfu1,
989 EMC_PMACRO_BRICK_CTRL_RFU1,
991 ramp_up_wait += 100000;
994 ccfifo_writel(pmacro_cfg5 & ~EMC_FBIO_CFG5_CMD_TX_DIS,
995 EMC_FBIO_CFG5, (100000 / clk) + 10);
996 ramp_up_wait += 100000 + (10 * clk);
997 } else if (clk < 1000000 / DVFS_FGCG_MID_SPEED_THRESHOLD) {
998 ccfifo_writel(pmacro_rfu1 | 0x06000600,
999 EMC_PMACRO_BRICK_CTRL_RFU1, (100000 / clk) + 1);
1000 ccfifo_writel(pmacro_cfg5 & ~EMC_FBIO_CFG5_CMD_TX_DIS,
1001 EMC_FBIO_CFG5, (100000 / clk) + 10);
1002 ramp_up_wait += 100000 + 10 * clk;
1004 ccfifo_writel(pmacro_rfu1 | 0x00000600,
1005 EMC_PMACRO_BRICK_CTRL_RFU1, 0);
1006 ccfifo_writel(pmacro_cfg5 & ~EMC_FBIO_CFG5_CMD_TX_DIS,
1008 ramp_up_wait += 12 * clk;
1011 pmacro_cmd_pad &= ~EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON;
1012 ccfifo_writel(pmacro_cmd_pad, EMC_PMACRO_CMD_PAD_TX_CTRL, 5);
1014 return ramp_up_wait;
1018 * Change the DLL's input clock. Used during the DLL prelock sequence.
1020 void change_dll_src(struct tegra21_emc_table *next_timing, u32 clksrc)
1023 u32 dll_setting = next_timing->dll_clk_src;
1028 emc_clk_src = (clksrc & EMC_CLK_EMC_2X_CLK_SRC_MASK) >>
1029 EMC_CLK_EMC_2X_CLK_SRC_SHIFT;
1030 emc_clk_div = (clksrc & EMC_CLK_EMC_2X_CLK_DIVISOR_MASK) >>
1031 EMC_CLK_EMC_2X_CLK_DIVISOR_SHIFT;
1033 dll_setting &= ~(DLL_CLK_EMC_DLL_CLK_SRC_MASK |
1034 DLL_CLK_EMC_DLL_CLK_DIVISOR_MASK);
1035 dll_setting |= emc_clk_src << DLL_CLK_EMC_DLL_CLK_SRC_SHIFT;
1036 dll_setting |= emc_clk_div << DLL_CLK_EMC_DLL_CLK_DIVISOR_SHIFT;
1038 /* Low jitter and undivided are the same thing. */
1039 dll_setting &= ~DLL_CLK_EMC_DLL_DDLL_CLK_SEL_MASK;
1040 if (emc_clk_src == EMC_CLK_SOURCE_PLLMB_LJ)
1041 dll_setting |= (PLLM_VCOB <<
1042 DLL_CLK_EMC_DLL_DDLL_CLK_SEL_SHIFT);
1043 else if (emc_clk_src == EMC_CLK_SOURCE_PLLM_LJ)
1044 dll_setting |= (PLLM_VCOA <<
1045 DLL_CLK_EMC_DLL_DDLL_CLK_SEL_SHIFT);
1047 dll_setting |= (EMC_DLL_SWITCH_OUT <<
1048 DLL_CLK_EMC_DLL_DDLL_CLK_SEL_SHIFT);
1050 /* Now program the clock source. */
1051 emc_cc_dbg(REGS, "clk source: 0x%08x => 0x%p\n", dll_setting,
1052 clk_base + CLK_RST_CONTROLLER_CLK_SOURCE_EMC_DLL);
1053 writel(dll_setting, clk_base + CLK_RST_CONTROLLER_CLK_SOURCE_EMC_DLL);
1055 if (next_timing->clk_out_enb_x_0_clk_enb_emc_dll) {
1056 writel(CLK_OUT_ENB_X_CLK_ENB_EMC_DLL,
1057 clk_base + CLK_RST_CONTROLLER_CLK_OUT_ENB_X_SET);
1058 emc_cc_dbg(REGS, "out_enb_x_set: 0x%08x => 0x%p\n",
1059 CLK_OUT_ENB_X_CLK_ENB_EMC_DLL,
1060 clk_base + CLK_RST_CONTROLLER_CLK_OUT_ENB_X_SET);
1062 writel(CLK_OUT_ENB_X_CLK_ENB_EMC_DLL,
1063 clk_base + CLK_RST_CONTROLLER_CLK_OUT_ENB_X_CLR);
1064 emc_cc_dbg(REGS, "out_enb_x_clr: 0x%08x => 0x%p\n",
1065 CLK_OUT_ENB_X_CLK_ENB_EMC_DLL,
1066 clk_base + CLK_RST_CONTROLLER_CLK_OUT_ENB_X_CLR);
1073 u32 dll_prelock(struct tegra21_emc_table *next_timing,
1074 int dvfs_with_training, u32 clksrc)
1076 u32 emc_dig_dll_status;
1079 u32 emc_cfg_dig_dll;
1082 u32 ddllcal_ctrl_start_trim_val;
1084 u32 dual_channel_lpddr4_case;
1085 u32 dll_priv_updated;
1087 emc_cc_dbg(PRELOCK, "Prelock starting; version: %d\n",
1088 EMC_PRELOCK_VERSION);
1090 dual_channel_lpddr4_case =
1091 !!(emc_readl(EMC_FBIO_CFG7) & EMC_FBIO_CFG7_CH1_ENABLE) &
1092 !!(emc_readl(EMC_FBIO_CFG7) & EMC_FBIO_CFG7_CH0_ENABLE);
1094 emc_dig_dll_status = 0;
1097 emc_cfg_dig_dll = 0;
1100 ddllcal_ctrl_start_trim_val = 0;
1103 emc_cc_dbg(PRELOCK, "Dual channel LPDDR4: %s\n",
1104 dual_channel_lpddr4_case ? "yes" : "no");
1105 emc_cc_dbg(PRELOCK, "DLL clksrc: 0x%08x\n", clksrc);
1108 * Configure the DLL for prelock.
1110 emc_cc_dbg(PRELOCK_STEPS, "Step 1\n");
1111 emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL) &
1112 ~EMC_CFG_DIG_DLL_CFG_DLL_LOCK_LIMIT_MASK;
1113 emc_cfg_dig_dll |= (3 << EMC_CFG_DIG_DLL_CFG_DLL_LOCK_LIMIT_SHIFT);
1114 emc_cfg_dig_dll &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
1115 emc_cfg_dig_dll &= ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK;
1116 emc_cfg_dig_dll |= (3 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
1117 emc_cfg_dig_dll |= EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
1118 emc_cfg_dig_dll &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
1119 emc_cfg_dig_dll &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
1121 emc_writel(emc_cfg_dig_dll, EMC_CFG_DIG_DLL);
1122 emc_writel(1, EMC_TIMING_CONTROL);
1127 emc_cc_dbg(PRELOCK_STEPS, "Step 2\n");
1128 wait_for_update(EMC_EMC_STATUS,
1129 EMC_EMC_STATUS_TIMING_UPDATE_STALLED, 0, 0);
1130 if (dual_channel_lpddr4_case)
1131 wait_for_update(EMC_EMC_STATUS,
1132 EMC_EMC_STATUS_TIMING_UPDATE_STALLED, 0, 1);
1135 * Poll channel(s) until DLL_EN is true.
1137 emc_cc_dbg(PRELOCK_STEPS, "Step 3\n");
1139 emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL);
1140 dll_en = emc_cfg_dig_dll & EMC_CFG_DIG_DLL_CFG_DLL_EN;
1141 } while (dll_en == 1);
1143 if (dual_channel_lpddr4_case) {
1145 emc_cfg_dig_dll = emc1_readl(EMC_CFG_DIG_DLL);
1146 dll_en = emc_cfg_dig_dll & EMC_CFG_DIG_DLL_CFG_DLL_EN;
1147 } while (dll_en == 1);
1151 * Update DLL calibration filter.
1153 emc_cc_dbg(PRELOCK_STEPS, "Step 4\n");
1154 emc_dll_cfg_0 = next_timing->burst_regs[EMC_DLL_CFG_0_INDEX];
1156 emc_writel(emc_dll_cfg_0, EMC_DLL_CFG_0);
1158 if (next_timing->rate >= 400000 && next_timing->rate < 600000)
1159 ddllcal_ctrl_start_trim_val = 150;
1160 else if (next_timing->rate >= 600000 && next_timing->rate < 800000)
1161 ddllcal_ctrl_start_trim_val = 100;
1162 else if (next_timing->rate >= 800000 && next_timing->rate < 1000000)
1163 ddllcal_ctrl_start_trim_val = 70;
1164 else if (next_timing->rate >= 1000000 && next_timing->rate < 1200000)
1165 ddllcal_ctrl_start_trim_val = 30;
1167 ddllcal_ctrl_start_trim_val = 20;
1169 emc_dll_cfg_1 = emc_readl(EMC_DLL_CFG_1);
1170 emc_dll_cfg_1 &= EMC_DLL_CFG_1_DDLLCAL_CTRL_START_TRIM_MASK;
1171 emc_dll_cfg_1 |= ddllcal_ctrl_start_trim_val;
1172 emc_writel(emc_dll_cfg_1, EMC_DLL_CFG_1);
1175 * (Skipping some steps to get back inline with reference.)
1176 * Change the DLL clock source.
1178 emc_cc_dbg(PRELOCK_STEPS, "Step 8\n");
1179 change_dll_src(next_timing, clksrc);
1182 * Enable the DLL and start the prelock state machine.
1184 emc_cc_dbg(PRELOCK_STEPS, "Step 9\n");
1185 emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL);
1186 emc_cfg_dig_dll |= EMC_CFG_DIG_DLL_CFG_DLL_EN;
1187 emc_writel(emc_cfg_dig_dll, EMC_CFG_DIG_DLL);
1189 emc_timing_update(dual_channel_lpddr4_case ?
1190 DUAL_CHANNEL : SINGLE_CHANNEL);
1193 emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL);
1194 dll_en = emc_cfg_dig_dll & EMC_CFG_DIG_DLL_CFG_DLL_EN;
1195 } while (dll_en == 0);
1197 if (dual_channel_lpddr4_case) {
1199 emc_cfg_dig_dll = emc1_readl(EMC_CFG_DIG_DLL);
1200 dll_en = emc_cfg_dig_dll & EMC_CFG_DIG_DLL_CFG_DLL_EN;
1201 } while (dll_en == 0);
1205 * Wait for the DLL to lock.
1207 emc_cc_dbg(PRELOCK_STEPS, "Step 10\n");
1209 emc_dig_dll_status = emc_readl(EMC_DIG_DLL_STATUS);
1210 dll_locked = emc_dig_dll_status & EMC_DIG_DLL_STATUS_DLL_LOCK;
1211 dll_priv_updated = emc_dig_dll_status &
1212 EMC_DIG_DLL_STATUS_DLL_PRIV_UPDATED;
1213 } while (!dll_locked || !dll_priv_updated);
1216 * Prelock training specific code - removed. Should it be ??
1220 * Done! Return the dll prelock value.
1222 emc_cc_dbg(PRELOCK_STEPS, "Step 12\n");
1223 emc_dig_dll_status = emc_readl(EMC_DIG_DLL_STATUS);
1224 return emc_dig_dll_status & EMC_DIG_DLL_STATUS_DLL_OUT_MASK;
1228 * Do the clock change sequence.
1230 void emc_set_clock_r21015(struct tegra21_emc_table *next_timing,
1231 struct tegra21_emc_table *last_timing,
1232 int training, u32 clksrc)
1235 * This is the timing table for the source frequency. It does _not_
1236 * necessarily correspond to the actual timing values in the EMC at the
1237 * moment. If the boot BCT differs from the table then this can happen.
1238 * However, we need it for accessing the dram_timing_regs (which are not
1239 * really registers) array for the current frequency.
1241 struct tegra21_emc_table *fake_timing;
1245 u32 cya_allow_ref_cc = 0, ref_b4_sref_en = 0, cya_issue_pc_ref = 0;
1247 u32 zqcal_before_cc_cutoff = 2400; /* In picoseconds */
1250 s32 zq_latch_dvfs_wait_time;
1251 s32 tZQCAL_lpddr4_fc_adj;
1252 /* Scaled by x1000 */
1253 u32 tFC_lpddr4 = 1000 * next_timing->dram_timing_regs[T_FC_LPDDR4];
1254 /* u32 tVRCG_lpddr4 = next_timing->dram_timing_regs[T_FC_LPDDR4]; */
1255 u32 tZQCAL_lpddr4 = 1000000;
1257 u32 dram_type, dram_dev_num, shared_zq_resistor;
1261 u32 emc_cfg, emc_sel_dpd_ctrl, emc_cfg_reg;
1264 u32 emc_zcal_interval;
1265 u32 emc_zcal_wait_cnt_old;
1266 u32 emc_zcal_wait_cnt_new;
1269 u32 zcal_wait_time_clocks;
1270 u32 zcal_wait_time_ps;
1272 u32 emc_auto_cal_config;
1275 u32 mr13_catr_enable;
1277 u32 ramp_up_wait = 0, ramp_down_wait = 0;
1279 /* In picoseconds. */
1280 u32 source_clock_period;
1281 u32 destination_clock_period;
1284 u32 emc_cfg_pipe_clk_o;
1287 u32 mr13_flip_fspwr;
1288 u32 mr13_flip_fspop;
1291 u32 opt_do_sw_qrst = 1;
1294 u32 opt_cc_short_zcal = 1;
1295 u32 opt_short_zcal = 1;
1296 u32 save_restore_clkstop_pd = 1;
1298 u32 prelock_dll_en = 0, dll_out;
1300 int next_push, next_dq_e_ivref, next_dqs_e_ivref;
1302 u64 emc_mrw6_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW6;
1303 u64 emc_mrw7_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW7;
1304 u64 emc_mrw8_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW8;
1305 u64 emc_mrw9_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW9;
1306 u64 emc_mrw10_ch0_ab = (u64)IO_ADDRESS(TEGRA_EMC0_BASE) + EMC_MRW10;
1307 u64 emc_mrw10_ch1_ab = (u64)IO_ADDRESS(TEGRA_EMC1_BASE) + EMC_MRW10;
1308 u64 emc_mrw11_ch0_ab = (u64)IO_ADDRESS(TEGRA_EMC0_BASE) + EMC_MRW11;
1309 u64 emc_mrw11_ch1_ab = (u64)IO_ADDRESS(TEGRA_EMC1_BASE) + EMC_MRW11;
1310 u64 emc_mrw12_ch0_ab = (u64)IO_ADDRESS(TEGRA_EMC0_BASE) + EMC_MRW12;
1311 u64 emc_mrw12_ch1_ab = (u64)IO_ADDRESS(TEGRA_EMC1_BASE) + EMC_MRW12;
1312 u64 emc_mrw13_ch0_ab = (u64)IO_ADDRESS(TEGRA_EMC0_BASE) + EMC_MRW13;
1313 u64 emc_mrw13_ch1_ab = (u64)IO_ADDRESS(TEGRA_EMC1_BASE) + EMC_MRW13;
1314 u64 emc_mrw14_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW14;
1315 u64 emc_mrw15_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW15;
1317 u64 emc_training_ctrl_ab =
1318 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_TRAINING_CTRL;
1319 u64 emc_cfg_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_CFG;
1320 u64 emc_mrs_wait_cnt_ab =
1321 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRS_WAIT_CNT;
1322 u64 emc_zcal_wait_cnt_ab =
1323 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_ZCAL_INTERVAL;
1324 u64 emc_zcal_interval_ab =
1325 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_ZCAL_INTERVAL;
1326 u64 emc_pmacro_autocal_cfg_common_ab =
1327 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_PMACRO_AUTOCAL_CFG_COMMON;
1328 u64 emc_pmacro_data_pad_tx_ctrl_ab =
1329 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_PMACRO_DATA_PAD_TX_CTRL;
1330 u64 emc_pmacro_cmd_pad_tx_ctrl_ab =
1331 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_PMACRO_CMD_PAD_TX_CTRL;
1332 u64 emc_pmacro_brick_ctrl_rfu1_ab =
1333 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_PMACRO_BRICK_CTRL_RFU1;
1334 u64 emc_pmacro_common_pad_tx_ctrl_ab =
1335 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_PMACRO_COMMON_PAD_TX_CTRL;
1336 u32 opt_war_200024907;
1340 u32 bg_regulator_switch_complete_wait_clks;
1341 u32 bg_regulator_mode_change;
1342 u32 enable_bglp_regulator;
1343 u32 enable_bg_regulator;
1355 u32 adel = 0, compensate_trimmer_applicable = 0;
1356 u32 next_timing_rate_mhz = next_timing->rate / 1000;
1358 static u32 fsp_for_next_freq;
1360 emc_cc_dbg(INFO, "Running clock change.\n");
1363 fake_timing = get_timing_from_freq(last_timing->rate);
1365 fsp_for_next_freq = !fsp_for_next_freq;
1367 dram_type = emc_readl(EMC_FBIO_CFG5) &
1368 EMC_FBIO_CFG5_DRAM_TYPE_MASK >> EMC_FBIO_CFG5_DRAM_TYPE_SHIFT;
1369 shared_zq_resistor = last_timing->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX] &
1370 1 << 31; /* needs def */
1371 channel_mode = !!(last_timing->burst_regs[EMC_FBIO_CFG7_INDEX] &
1372 1 << 2); /* needs def */
1373 opt_zcal_en_cc = (next_timing->burst_regs[EMC_ZCAL_INTERVAL_INDEX] &&
1374 !last_timing->burst_regs[EMC_ZCAL_INTERVAL_INDEX]) ||
1375 dram_type == DRAM_TYPE_LPDDR4;
1376 opt_dll_mode = (dram_type == DRAM_TYPE_DDR3) ?
1377 get_dll_state(next_timing) : DLL_OFF;
1378 is_lpddr3 = (dram_type == DRAM_TYPE_LPDDR2) &&
1379 next_timing->burst_regs[EMC_FBIO_CFG5_INDEX] &
1380 1 << 25; /* needs def */
1381 opt_war_200024907 = (dram_type == DRAM_TYPE_LPDDR4);
1382 opt_dvfs_mode = MAN_SR;
1383 dram_dev_num = (mc_readl(MC_EMEM_ADR_CFG) & 0x1) + 1;
1385 emc_cfg_reg = emc_readl(EMC_CFG);
1386 emc_auto_cal_config = emc_readl(EMC_AUTO_CAL_CONFIG);
1388 source_clock_period = 1000000000 / last_timing->rate;
1389 destination_clock_period = 1000000000 / next_timing->rate;
1391 tZQCAL_lpddr4_fc_adj = (source_clock_period > zqcal_before_cc_cutoff) ?
1392 tZQCAL_lpddr4 / destination_clock_period :
1393 (tZQCAL_lpddr4 - tFC_lpddr4) / destination_clock_period;
1394 emc_dbg_o = emc_readl(EMC_DBG);
1395 emc_pin_o = emc_readl(EMC_PIN);
1396 emc_cfg_pipe_clk_o = emc_readl(EMC_CFG_PIPE_CLK);
1397 emc_dbg = emc_dbg_o;
1399 emc_cfg = next_timing->burst_regs[EMC_CFG_INDEX];
1400 emc_cfg &= ~(EMC_CFG_DYN_SELF_REF | EMC_CFG_DRAM_ACPD |
1401 EMC_CFG_DRAM_CLKSTOP_SR | EMC_CFG_DRAM_CLKSTOP_PD);
1402 emc_sel_dpd_ctrl = next_timing->emc_sel_dpd_ctrl;
1403 emc_sel_dpd_ctrl &= ~(EMC_SEL_DPD_CTRL_CLK_SEL_DPD_EN |
1404 EMC_SEL_DPD_CTRL_CA_SEL_DPD_EN |
1405 EMC_SEL_DPD_CTRL_RESET_SEL_DPD_EN |
1406 EMC_SEL_DPD_CTRL_ODT_SEL_DPD_EN |
1407 EMC_SEL_DPD_CTRL_DATA_SEL_DPD_EN);
1409 emc_cc_dbg(INFO, "Clock change version: %d\n",
1410 DVFS_CLOCK_CHANGE_VERSION);
1411 emc_cc_dbg(INFO, "DRAM type = %d\n", dram_type);
1412 emc_cc_dbg(INFO, "DRAM dev #: %d\n", dram_dev_num);
1413 emc_cc_dbg(INFO, "Next EMC clksrc: 0x%08x\n", clksrc);
1414 emc_cc_dbg(INFO, "DLL clksrc: 0x%08x\n", next_timing->dll_clk_src);
1415 emc_cc_dbg(INFO, "last rate: %lu, next rate %lu\n", last_timing->rate,
1417 emc_cc_dbg(INFO, "last period: %u, next period: %u\n",
1418 source_clock_period, destination_clock_period);
1419 emc_cc_dbg(INFO, " shared_zq_resistor: %d\n", !!shared_zq_resistor);
1420 emc_cc_dbg(INFO, " channel_mode: %d\n", channel_mode);
1421 emc_cc_dbg(INFO, " opt_dll_mode: %d\n", opt_dll_mode);
1424 * Pre DVFS SW sequence.
1426 emc_cc_dbg(STEPS, "Step 1\n");
1427 emc_cc_dbg(STEPS, "Step 1.1: Disable DLL temporarily.\n");
1428 tmp = emc_readl(EMC_CFG_DIG_DLL);
1429 tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
1430 emc_writel(tmp, EMC_CFG_DIG_DLL);
1432 emc_timing_update(channel_mode);
1433 wait_for_update(EMC_CFG_DIG_DLL,
1434 EMC_CFG_DIG_DLL_CFG_DLL_EN, 0, 0);
1436 wait_for_update(EMC_CFG_DIG_DLL,
1437 EMC_CFG_DIG_DLL_CFG_DLL_EN, 0, 1);
1439 emc_cc_dbg(STEPS, "Step 1.2: Disable AUTOCAL temporarily.\n");
1440 emc_auto_cal_config = next_timing->emc_auto_cal_config;
1441 auto_cal_en = emc_auto_cal_config & EMC_AUTO_CAL_CONFIG_AUTO_CAL_ENABLE;
1442 emc_auto_cal_config &= ~EMC_AUTO_CAL_CONFIG_AUTO_CAL_START;
1443 emc_auto_cal_config |= EMC_AUTO_CAL_CONFIG_AUTO_CAL_MEASURE_STALL;
1444 emc_auto_cal_config |= EMC_AUTO_CAL_CONFIG_AUTO_CAL_UPDATE_STALL;
1445 emc_auto_cal_config |= auto_cal_en;
1446 emc_writel(emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
1447 emc_readl(EMC_AUTO_CAL_CONFIG); /* Flush write. */
1449 emc_cc_dbg(STEPS, "Step 1.3: Disable other power features.\n");
1450 emc_set_shadow_bypass(ACTIVE);
1451 emc_writel(emc_cfg, EMC_CFG);
1452 emc_writel(emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
1453 emc_set_shadow_bypass(ASSEMBLY);
1455 if (next_timing->periodic_training) {
1456 __reset_dram_clktree_values(next_timing);
1458 wait_for_update(EMC_EMC_STATUS,
1459 EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK, 0, 0);
1461 wait_for_update(EMC_EMC_STATUS,
1462 EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK, 0, 1);
1464 wait_for_update(EMC_EMC_STATUS,
1465 EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK, 0, 0);
1467 wait_for_update(EMC_EMC_STATUS,
1468 EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK, 0, 1);
1470 start_periodic_compensation();
1472 udelay(((1000 * actual_osc_clocks(last_timing->run_clocks)) /
1473 last_timing->rate) + 2);
1474 adel = update_clock_tree_delay(fake_timing, next_timing,
1475 dram_dev_num, channel_mode);
1476 compensate_trimmer_applicable =
1477 next_timing->periodic_training &&
1478 ((adel * 128 * next_timing_rate_mhz) / 1000000) >
1479 next_timing->tree_margin;
1482 emc_cc_dbg(SUB_STEPS, "Step 1.1: Bug 200024907 - Patch RP R2P");
1483 if (opt_war_200024907) {
1485 if (source_clock_period >= 1000000/1866) /* 535.91 ps */
1487 if (source_clock_period >= 1000000/1600) /* 625.00 ps */
1489 if (source_clock_period >= 1000000/1333) /* 750.19 ps */
1491 if (source_clock_period >= 1000000/1066) /* 938.09 ps */
1494 deltaTWATM = max_t(u32, div_o3(7500, source_clock_period), 8);
1497 * Originally there was a + .5 in the tRPST calculation.
1498 * However since we can't do FP in the kernel and the tRTM
1499 * computation was in a floating point ceiling function, adding
1500 * one to tRTP should be ok. There is no other source of non
1501 * integer values, so the result was always going to be
1502 * something for the form: f_ceil(N + .5) = N + 1;
1504 tRPST = ((last_timing->emc_mrw & 0x80) >> 7);
1505 tRTM = fake_timing->dram_timing_regs[RL] +
1506 div_o3(3600, source_clock_period) +
1507 max_t(u32, div_o3(7500, source_clock_period), 8) +
1510 emc_cc_dbg(INFO, "tRTM = %u, EMC_RP = %u\n", tRTM,
1511 next_timing->burst_regs[EMC_RP_INDEX]);
1513 if (last_timing->burst_regs[EMC_RP_INDEX] < tRTM) {
1514 if (tRTM > (last_timing->burst_regs[EMC_R2P_INDEX] +
1515 last_timing->burst_regs[EMC_RP_INDEX])) {
1517 last_timing->burst_regs[EMC_RP_INDEX];
1518 RP_war = last_timing->burst_regs[EMC_RP_INDEX];
1520 last_timing->burst_regs[EMC_TRPAB_INDEX];
1523 last_timing->burst_regs
1524 [EMC_RP_INDEX] - 63;
1525 if (TRPab_war < RP_war)
1530 R2P_war = last_timing->
1531 burst_regs[EMC_R2P_INDEX];
1532 RP_war = last_timing->burst_regs[EMC_RP_INDEX];
1534 last_timing->burst_regs[EMC_TRPAB_INDEX];
1537 if (RP_war < deltaTWATM) {
1538 W2P_war = last_timing->burst_regs[EMC_W2P_INDEX]
1539 + deltaTWATM - RP_war;
1541 RP_war = RP_war + W2P_war - 63;
1542 if (TRPab_war < RP_war)
1548 last_timing->burst_regs[EMC_W2P_INDEX];
1551 if((last_timing->burst_regs[EMC_W2P_INDEX] ^ W2P_war) ||
1552 (last_timing->burst_regs[EMC_R2P_INDEX] ^ R2P_war) ||
1553 (last_timing->burst_regs[EMC_RP_INDEX] ^ RP_war) ||
1554 (last_timing->burst_regs[EMC_TRPAB_INDEX] ^
1556 emc_writel(RP_war, EMC_RP);
1557 emc_writel(R2P_war, EMC_R2P);
1558 emc_writel(W2P_war, EMC_W2P);
1559 emc_writel(TRPab_war, EMC_TRPAB);
1561 emc_timing_update(DUAL_CHANNEL);
1563 emc_cc_dbg(INFO, "Skipped WAR for bug 200024907\n");
1567 emc_writel(EMC_INTSTATUS_CLKCHANGE_COMPLETE, EMC_INTSTATUS);
1568 emc_set_shadow_bypass(ACTIVE);
1569 emc_writel(emc_cfg, EMC_CFG);
1570 emc_writel(emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
1571 emc_writel(emc_cfg_pipe_clk_o | EMC_CFG_PIPE_CLK_CLK_ALWAYS_ON,
1573 emc_writel(next_timing->emc_fdpd_ctrl_cmd_no_ramp &
1574 ~EMC_FDPD_CTRL_CMD_NO_RAMP_CMD_DPD_NO_RAMP_ENABLE,
1575 EMC_FDPD_CTRL_CMD_NO_RAMP);
1577 bg_regulator_mode_change =
1578 ((next_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1579 EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD) ^
1580 (last_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1581 EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD)) ||
1582 ((next_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1583 EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD) ^
1584 (last_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1585 EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD));
1586 enable_bglp_regulator =
1587 (next_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1588 EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD) == 0;
1589 enable_bg_regulator =
1590 (next_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1591 EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD) == 0;
1593 if (bg_regulator_mode_change) {
1594 if (enable_bg_regulator)
1595 emc_writel(last_timing->burst_regs
1596 [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1597 ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD,
1598 EMC_PMACRO_BG_BIAS_CTRL_0);
1600 emc_writel(last_timing->burst_regs
1601 [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1602 ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD,
1603 EMC_PMACRO_BG_BIAS_CTRL_0);
1607 /* Check if we need to turn on VREF generator. */
1608 if ((!(last_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
1609 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF) &&
1610 (next_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
1611 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF)) ||
1612 (!(last_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
1613 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) &&
1614 (next_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
1615 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF))) {
1617 next_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
1618 u32 last_pad_tx_ctrl =
1619 last_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
1621 next_dqs_e_ivref = pad_tx_ctrl &
1622 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF;
1623 next_dq_e_ivref = pad_tx_ctrl &
1624 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF;
1625 next_push = (last_pad_tx_ctrl &
1626 ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF &
1627 ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) |
1628 next_dq_e_ivref | next_dqs_e_ivref;
1629 emc_writel(next_push, EMC_PMACRO_DATA_PAD_TX_CTRL);
1631 } else if (bg_regulator_mode_change) {
1635 emc_set_shadow_bypass(ASSEMBLY);
1640 emc_cc_dbg(STEPS, "Step 2\n");
1641 if (next_timing->burst_regs[EMC_CFG_DIG_DLL_INDEX] &
1642 EMC_CFG_DIG_DLL_CFG_DLL_EN) {
1643 emc_cc_dbg(INFO, "Prelock enabled for target frequency.\n");
1644 dll_out = dll_prelock(next_timing, 0, clksrc);
1645 emc_cc_dbg(INFO, "DLL out: 0x%03x\n", dll_out);
1648 emc_cc_dbg(INFO, "Disabling DLL for target frequency.\n");
1649 dll_disable(channel_mode);
1653 * Prepare autocal for the clock change.
1655 emc_cc_dbg(STEPS, "Step 3\n");
1656 emc_set_shadow_bypass(ACTIVE);
1657 emc_writel(next_timing->emc_auto_cal_config2, EMC_AUTO_CAL_CONFIG2);
1658 emc_writel(next_timing->emc_auto_cal_config3, EMC_AUTO_CAL_CONFIG3);
1659 emc_writel(next_timing->emc_auto_cal_config4, EMC_AUTO_CAL_CONFIG4);
1660 emc_writel(next_timing->emc_auto_cal_config5, EMC_AUTO_CAL_CONFIG5);
1661 emc_writel(next_timing->emc_auto_cal_config6, EMC_AUTO_CAL_CONFIG6);
1662 emc_writel(next_timing->emc_auto_cal_config7, EMC_AUTO_CAL_CONFIG7);
1663 emc_writel(next_timing->emc_auto_cal_config8, EMC_AUTO_CAL_CONFIG8);
1664 emc_set_shadow_bypass(ASSEMBLY);
1666 emc_auto_cal_config |= (EMC_AUTO_CAL_CONFIG_AUTO_CAL_COMPUTE_START |
1668 emc_writel(emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
1671 * Update EMC_CFG. (??)
1673 emc_cc_dbg(STEPS, "Step 4\n");
1674 if (source_clock_period > 50000 && dram_type == DRAM_TYPE_LPDDR4)
1675 ccfifo_writel(1, EMC_SELF_REF, 0);
1677 emc_writel(next_timing->emc_cfg_2, EMC_CFG_2);
1680 * Prepare reference variables for ZQCAL regs.
1682 emc_cc_dbg(STEPS, "Step 5\n");
1683 emc_zcal_interval = 0;
1684 emc_zcal_wait_cnt_old =
1685 last_timing->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX];
1686 emc_zcal_wait_cnt_new =
1687 next_timing->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX];
1688 emc_zcal_wait_cnt_old &= ~EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK;
1689 emc_zcal_wait_cnt_new &= ~EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK;
1691 if (dram_type == DRAM_TYPE_LPDDR4)
1692 zq_wait_long = max((u32)1,
1693 div_o3(1000000, destination_clock_period));
1694 else if (dram_type == DRAM_TYPE_LPDDR2 || is_lpddr3)
1695 zq_wait_long = max(next_timing->min_mrs_wait,
1696 div_o3(360000, destination_clock_period)) + 4;
1697 else if (dram_type == DRAM_TYPE_DDR3)
1698 zq_wait_long = max((u32)256,
1699 div_o3(320000, destination_clock_period) + 2);
1703 if (dram_type == DRAM_TYPE_LPDDR2 || is_lpddr3)
1704 zq_wait_short = max(max(next_timing->min_mrs_wait, (u32)6),
1705 div_o3(90000, destination_clock_period)) + 4;
1706 else if (dram_type == DRAM_TYPE_DDR3)
1707 zq_wait_short = max((u32)64,
1708 div_o3(80000, destination_clock_period)) + 2;
1713 * Training code - removed.
1715 emc_cc_dbg(STEPS, "Step 6\n");
1718 * Program FSP reference registers and send MRWs to new FSPWR.
1720 emc_cc_dbg(STEPS, "Step 7\n");
1721 if (!fsp_for_next_freq) {
1722 mr13_flip_fspwr = (next_timing->emc_mrw3 & 0xffffff3f) | 0x80;
1723 mr13_flip_fspop = (next_timing->emc_mrw3 & 0xffffff3f) | 0x00;
1725 mr13_flip_fspwr = (next_timing->emc_mrw3 & 0xffffff3f) | 0x40;
1726 mr13_flip_fspop = (next_timing->emc_mrw3 & 0xffffff3f) | 0xc0;
1729 mr13_catr_enable = (mr13_flip_fspwr & 0xFFFFFFFE) | 0x01;
1730 if (dram_dev_num == TWO_RANK)
1732 (mr13_catr_enable & 0x3fffffff) | 0x80000000;
1734 if (dram_type == DRAM_TYPE_LPDDR4) {
1735 emc_writel(mr13_flip_fspwr, EMC_MRW3);
1736 emc_writel(next_timing->emc_mrw, EMC_MRW);
1737 emc_writel(next_timing->emc_mrw2, EMC_MRW2);
1741 * Program the shadow registers.
1743 emc_cc_dbg(STEPS, "Step 8\n");
1744 emc_cc_dbg(SUB_STEPS, "Writing burst_regs\n");
1745 for (i = 0; i < next_timing->burst_regs_num; i++) {
1749 if (!burst_reg_off[i])
1752 var = (u64)burst_reg_off[i];
1753 wval = next_timing->burst_regs[i];
1755 if (dram_type != DRAM_TYPE_LPDDR4 &&
1756 (var == emc_mrw6_ab || var == emc_mrw7_ab ||
1757 var == emc_mrw8_ab || var == emc_mrw9_ab ||
1758 var == emc_mrw10_ch0_ab || var == emc_mrw10_ch1_ab ||
1759 var == emc_mrw11_ch0_ab || var == emc_mrw11_ch1_ab ||
1760 var == emc_mrw12_ch0_ab || var == emc_mrw12_ch1_ab ||
1761 var == emc_mrw13_ch0_ab || var == emc_mrw13_ch1_ab ||
1762 var == emc_mrw14_ab || var == emc_mrw15_ab ||
1763 var == emc_training_ctrl_ab))
1766 /* Pain... And suffering. */
1767 if (var == emc_cfg_ab) {
1768 wval &= ~EMC_CFG_DRAM_ACPD;
1769 wval &= ~EMC_CFG_DYN_SELF_REF;
1770 if (dram_type == DRAM_TYPE_LPDDR4) {
1771 wval &= ~EMC_CFG_DRAM_CLKSTOP_SR;
1772 wval &= ~EMC_CFG_DRAM_CLKSTOP_PD;
1774 } else if (var == emc_mrs_wait_cnt_ab &&
1775 dram_type == DRAM_TYPE_LPDDR2 &&
1776 opt_zcal_en_cc && !opt_cc_short_zcal &&
1778 wval = (wval & ~(EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK <<
1779 EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT)) |
1780 ((zq_wait_long & EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK) <<
1781 EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT);
1782 } else if (var == emc_zcal_wait_cnt_ab &&
1783 dram_type == DRAM_TYPE_DDR3 && opt_zcal_en_cc &&
1784 !opt_cc_short_zcal && opt_short_zcal) {
1785 wval = (wval & ~(EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK <<
1786 EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_SHIFT)) |
1788 EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK) <<
1789 EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT);
1790 } else if (var == emc_zcal_interval_ab && opt_zcal_en_cc) {
1791 wval = 0; /* EMC_ZCAL_INTERVAL reset value. */
1792 } else if (var == emc_pmacro_autocal_cfg_common_ab) {
1793 wval |= EMC_PMACRO_AUTOCAL_CFG_COMMON_E_CAL_BYPASS_DVFS;
1794 } else if (var == emc_pmacro_data_pad_tx_ctrl_ab) {
1796 ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
1797 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC |
1798 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
1799 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC);
1800 } else if (var == emc_pmacro_cmd_pad_tx_ctrl_ab) {
1801 wval |= EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON;
1802 wval &= ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
1803 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC |
1804 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
1805 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC);
1806 } else if (var == emc_pmacro_brick_ctrl_rfu1_ab) {
1808 } else if (var == emc_pmacro_common_pad_tx_ctrl_ab) {
1812 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1813 i, wval, (void *)var);
1814 __raw_writel(wval, (void __iomem *)var);
1817 /* SW addition: do EMC refresh adjustment here. */
1818 set_over_temp_timing(next_timing, dram_over_temp_state);
1820 if (dram_type == DRAM_TYPE_LPDDR4) {
1821 mrw_req = (23 << EMC_MRW_MRW_MA_SHIFT) |
1822 (next_timing->run_clocks & EMC_MRW_MRW_OP_MASK);
1823 emc_writel(mrw_req, EMC_MRW);
1826 /* Per channel burst registers. */
1827 emc_cc_dbg(SUB_STEPS, "Writing burst_regs_per_ch\n");
1828 for (i = 0; i < next_timing->burst_regs_per_ch_num; i++) {
1829 if (!burst_perch_reg_off[i])
1832 if (dram_type != DRAM_TYPE_LPDDR4 &&
1833 ((u64)burst_perch_reg_off[i] == emc_mrw6_ab ||
1834 (u64)burst_perch_reg_off[i] == emc_mrw7_ab ||
1835 (u64)burst_perch_reg_off[i] == emc_mrw8_ab ||
1836 (u64)burst_perch_reg_off[i] == emc_mrw9_ab ||
1837 (u64)burst_perch_reg_off[i] == emc_mrw10_ch0_ab ||
1838 (u64)burst_perch_reg_off[i] == emc_mrw10_ch1_ab ||
1839 (u64)burst_perch_reg_off[i] == emc_mrw11_ch0_ab ||
1840 (u64)burst_perch_reg_off[i] == emc_mrw11_ch1_ab ||
1841 (u64)burst_perch_reg_off[i] == emc_mrw12_ch0_ab ||
1842 (u64)burst_perch_reg_off[i] == emc_mrw12_ch1_ab ||
1843 (u64)burst_perch_reg_off[i] == emc_mrw13_ch0_ab ||
1844 (u64)burst_perch_reg_off[i] == emc_mrw13_ch1_ab ||
1845 (u64)burst_perch_reg_off[i] == emc_mrw14_ab ||
1846 (u64)burst_perch_reg_off[i] == emc_mrw15_ab))
1849 /* Filter out second channel if not in DUAL_CHANNEL mode. */
1850 if (channel_mode != DUAL_CHANNEL &&
1851 (u64)burst_perch_reg_off[i] >=
1852 (u64)IO_ADDRESS(TEGRA_EMC1_BASE))
1855 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1856 i, next_timing->burst_regs_per_ch[i],
1857 burst_perch_reg_off[i]);
1858 __raw_writel(next_timing->burst_regs_per_ch[i],
1859 burst_perch_reg_off[i]);
1863 emc_cc_dbg(SUB_STEPS, "Writing vref_regs\n");
1864 for (i = 0; i < next_timing->vref_regs_num; i++) {
1865 if (!vref_reg_off[i])
1868 if (channel_mode != DUAL_CHANNEL &&
1869 (u64)vref_reg_off[i] >= (u64)IO_ADDRESS(TEGRA_EMC1_BASE))
1872 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1873 i, next_timing->vref_regs[i], vref_reg_off[i]);
1874 __raw_writel(next_timing->vref_regs[i], vref_reg_off[i]);
1878 emc_cc_dbg(SUB_STEPS, "Writing trim_regs\n");
1879 for (i = 0; i < next_timing->trim_regs_num; i++) {
1882 if (!trim_reg_off[i])
1885 trim_reg = (u64)trim_reg_off[i] & 0xfff;
1886 if (compensate_trimmer_applicable &&
1887 (trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0 ||
1888 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1 ||
1889 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2 ||
1890 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3 ||
1891 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0 ||
1892 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1 ||
1893 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2 ||
1894 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3 ||
1895 trim_reg == EMC_DATA_BRLSHFT_0 ||
1896 trim_reg == EMC_DATA_BRLSHFT_1)) {
1898 apply_periodic_compensation_trimmer(next_timing,
1900 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n", i, reg,
1902 __raw_writel(reg, trim_reg_off[i]);
1904 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1905 i, next_timing->trim_regs[i],
1907 __raw_writel(next_timing->trim_regs[i],
1913 /* Per channel trimmers. */
1914 emc_cc_dbg(SUB_STEPS, "Writing trim_regs_per_ch\n");
1915 for (i = 0; i < next_timing->trim_regs_per_ch_num; i++) {
1918 if (!trim_perch_reg_off[i])
1921 if (channel_mode != DUAL_CHANNEL &&
1922 (u64)vref_reg_off[i] >=
1923 (u64)IO_ADDRESS(TEGRA_EMC1_BASE))
1926 trim_reg = (u64)trim_perch_reg_off[i] & 0xfff;
1927 if (compensate_trimmer_applicable &&
1928 (trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0 ||
1929 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1 ||
1930 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2 ||
1931 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3 ||
1932 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0 ||
1933 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1 ||
1934 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2 ||
1935 trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3 ||
1936 trim_reg == EMC_DATA_BRLSHFT_0 ||
1937 trim_reg == EMC_DATA_BRLSHFT_1)) {
1939 apply_periodic_compensation_trimmer(next_timing,
1941 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1942 i, reg, trim_perch_reg_off[i]);
1944 trim_perch_reg_off[i]);
1946 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1947 i, next_timing->trim_regs_per_ch[i],
1948 trim_perch_reg_off[i]);
1949 __raw_writel(next_timing->trim_regs_per_ch[i],
1950 trim_perch_reg_off[i]);
1954 emc_cc_dbg(SUB_STEPS, "Writing burst_mc_regs\n");
1955 for (i = 0; i < next_timing->burst_mc_regs_num; i++) {
1956 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1957 i, next_timing->burst_mc_regs[i],
1958 burst_mc_reg_off[i]);
1959 __raw_writel(next_timing->burst_mc_regs[i],
1960 burst_mc_reg_off[i]);
1963 /* Registers to be programmed on the faster clock. */
1964 if (next_timing->rate < last_timing->rate) {
1965 emc_cc_dbg(SUB_STEPS, "Writing la_scale_regs\n");
1966 for (i = 0; i < next_timing->la_scale_regs_num; i++) {
1967 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1968 i, next_timing->la_scale_regs[i],
1969 la_scale_off_regs[i]);
1970 __raw_writel(next_timing->la_scale_regs[i],
1971 la_scale_off_regs[i]);
1975 /* Flush all the burst register writes. */
1981 emc_cc_dbg(STEPS, "Step 9\n");
1982 if (dram_type == DRAM_TYPE_LPDDR4) {
1983 emc_writel(emc_zcal_interval, EMC_ZCAL_INTERVAL);
1984 emc_writel(emc_zcal_wait_cnt_new, EMC_ZCAL_WAIT_CNT);
1986 emc_dbg |= (EMC_DBG_WRITE_MUX_ACTIVE |
1987 EMC_DBG_WRITE_ACTIVE_ONLY);
1989 emc_writel(emc_dbg, EMC_DBG);
1990 emc_writel(emc_zcal_interval, EMC_ZCAL_INTERVAL);
1991 emc_writel(emc_dbg_o, EMC_DBG);
1995 * LPDDR4 and DDR3 common section.
1997 emc_cc_dbg(STEPS, "Step 10\n");
1998 if (opt_dvfs_mode == MAN_SR || dram_type == DRAM_TYPE_LPDDR4) {
1999 if (dram_type == DRAM_TYPE_LPDDR4)
2000 ccfifo_writel(0x101, EMC_SELF_REF, 0);
2002 ccfifo_writel(0x1, EMC_SELF_REF, 0);
2004 if (dram_type == DRAM_TYPE_LPDDR4 &&
2005 source_clock_period <= zqcal_before_cc_cutoff) {
2006 ccfifo_writel(mr13_flip_fspwr ^ 0x40, EMC_MRW3, 0);
2007 ccfifo_writel((next_timing->burst_regs[EMC_MRW6_INDEX] &
2009 (last_timing->burst_regs[EMC_MRW6_INDEX] &
2010 0x0000C0C0), EMC_MRW6, 0);
2012 (next_timing->burst_regs[EMC_MRW14_INDEX] &
2014 (last_timing->burst_regs[EMC_MRW14_INDEX] &
2015 0x00003838), EMC_MRW14, 0);
2017 if (dram_dev_num == TWO_RANK) {
2019 (next_timing->burst_regs[EMC_MRW7_INDEX] &
2021 (last_timing->burst_regs[EMC_MRW7_INDEX] &
2022 0x0000C0C0), EMC_MRW7, 0);
2024 (next_timing->burst_regs[EMC_MRW15_INDEX] &
2026 (last_timing->burst_regs[EMC_MRW15_INDEX] &
2027 0x00003838), EMC_MRW15, 0);
2029 if (opt_zcal_en_cc) {
2030 if (dram_dev_num == ONE_RANK)
2032 2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2033 EMC_ZQ_CAL_ZQ_CAL_CMD,
2035 else if (shared_zq_resistor)
2037 2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2038 EMC_ZQ_CAL_ZQ_CAL_CMD,
2041 ccfifo_writel(EMC_ZQ_CAL_ZQ_CAL_CMD,
2047 emc_dbg = emc_dbg_o;
2048 if (dram_type == DRAM_TYPE_LPDDR4) {
2049 ccfifo_writel(mr13_flip_fspop | 0x8, EMC_MRW3,
2050 (1000 * fake_timing->dram_timing_regs[T_RP]) /
2051 source_clock_period);
2052 ccfifo_writel(0, 0, tFC_lpddr4 / source_clock_period);
2055 if (dram_type == DRAM_TYPE_LPDDR4 || opt_dvfs_mode != MAN_SR) {
2056 u32 t = 30 + (cya_allow_ref_cc ?
2057 (4000 * fake_timing->dram_timing_regs[T_RFC]) +
2058 ((1000 * fake_timing->dram_timing_regs[T_RP]) /
2059 source_clock_period) : 0);
2061 ccfifo_writel(emc_pin_o & ~(EMC_PIN_PIN_CKE_PER_DEV |
2062 EMC_PIN_PIN_CKEB | EMC_PIN_PIN_CKE),
2068 cya_issue_pc_ref = 0;
2070 ref_delay_mult += ref_b4_sref_en ? 1 : 0;
2071 ref_delay_mult += cya_allow_ref_cc ? 1 : 0;
2072 ref_delay_mult += cya_issue_pc_ref ? 1 : 0;
2073 ref_delay = ref_delay_mult *
2074 ((1000 * fake_timing->dram_timing_regs[T_RP]
2075 / source_clock_period) +
2076 (1000 * fake_timing->dram_timing_regs[T_RFC] /
2077 source_clock_period)) + 20;
2082 emc_cc_dbg(STEPS, "Step 11\n");
2083 ccfifo_writel(0x0, EMC_CFG_SYNC,
2084 dram_type == DRAM_TYPE_LPDDR4 ? 0 : ref_delay);
2086 emc_dbg_active = emc_dbg | (EMC_DBG_WRITE_MUX_ACTIVE | /* Redundant. */
2087 EMC_DBG_WRITE_ACTIVE_ONLY);
2088 ccfifo_writel(emc_dbg_active, EMC_DBG, 0);
2090 /* Todo: implement do_dvfs_power_ramp_down */
2091 ramp_down_wait = do_dvfs_power_ramp_down(source_clock_period, 0,
2092 last_timing, next_timing);
2095 * And finally - trigger the clock change.
2097 emc_cc_dbg(STEPS, "Step 12\n");
2098 ccfifo_writel(1, EMC_STALL_THEN_EXE_AFTER_CLKCHANGE, 0);
2099 emc_dbg_active &= ~EMC_DBG_WRITE_ACTIVE_ONLY;
2100 ccfifo_writel(emc_dbg_active, EMC_DBG, 0);
2105 /* Todo: implement do_dvfs_power_ramp_up(). */
2106 emc_cc_dbg(STEPS, "Step 13\n");
2107 ramp_up_wait = do_dvfs_power_ramp_up(destination_clock_period, 0,
2108 last_timing, next_timing);
2109 ccfifo_writel(emc_dbg, EMC_DBG, 0);
2114 emc_cc_dbg(STEPS, "Step 14\n");
2115 if (dram_type == DRAM_TYPE_LPDDR4) {
2116 u32 r = emc_pin_o | EMC_PIN_PIN_CKE;
2117 if (dram_dev_num == TWO_RANK)
2118 ccfifo_writel(r | EMC_PIN_PIN_CKEB |
2119 EMC_PIN_PIN_CKE_PER_DEV, EMC_PIN,
2122 ccfifo_writel(r & ~(EMC_PIN_PIN_CKEB |
2123 EMC_PIN_PIN_CKE_PER_DEV),
2127 /* Step 15: (two step 15s ??)
2128 * Calculate zqlatch wait time; has dependency on ramping times.
2130 emc_cc_dbg(STEPS, "Step 15\n");
2132 if (source_clock_period <= zqcal_before_cc_cutoff) {
2133 s32 t = (s32)(ramp_up_wait + ramp_down_wait) /
2134 (s32)destination_clock_period;
2135 zq_latch_dvfs_wait_time = (s32)tZQCAL_lpddr4_fc_adj - t;
2137 zq_latch_dvfs_wait_time = tZQCAL_lpddr4_fc_adj -
2138 div_o3(1000 * next_timing->dram_timing_regs[T_PDEX],
2139 destination_clock_period);
2142 emc_cc_dbg(INFO, "tZQCAL_lpddr4_fc_adj = %u\n", tZQCAL_lpddr4_fc_adj);
2143 emc_cc_dbg(INFO, "destination_clock_period = %u\n",
2144 destination_clock_period);
2145 emc_cc_dbg(INFO, "next_timing->dram_timing_regs[T_PDEX] = %u\n",
2146 next_timing->dram_timing_regs[T_PDEX]);
2147 emc_cc_dbg(INFO, "zq_latch_dvfs_wait_time = %d\n",
2148 max_t(s32, 0, zq_latch_dvfs_wait_time));
2150 if (dram_type == DRAM_TYPE_LPDDR4 && opt_zcal_en_cc) {
2151 if (dram_dev_num == ONE_RANK) {
2152 if (source_clock_period > zqcal_before_cc_cutoff)
2153 ccfifo_writel(2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2154 EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
2156 next_timing->dram_timing_regs[T_PDEX],
2157 destination_clock_period));
2158 ccfifo_writel((mr13_flip_fspop & 0xFFFFFFF7) |
2159 0x0C000000, EMC_MRW3,
2161 next_timing->dram_timing_regs[T_PDEX],
2162 destination_clock_period));
2163 ccfifo_writel(0, EMC_SELF_REF, 0);
2164 ccfifo_writel(0, EMC_REF, 0);
2165 ccfifo_writel(2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2166 EMC_ZQ_CAL_ZQ_LATCH_CMD,
2168 max_t(s32, 0, zq_latch_dvfs_wait_time));
2169 } else if (shared_zq_resistor) {
2170 if (source_clock_period > zqcal_before_cc_cutoff)
2171 ccfifo_writel(2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2172 EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
2174 next_timing->dram_timing_regs[T_PDEX],
2175 destination_clock_period));
2177 ccfifo_writel(2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2178 EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
2179 max_t(s32, 0, zq_latch_dvfs_wait_time) +
2181 next_timing->dram_timing_regs[T_PDEX],
2182 destination_clock_period));
2183 ccfifo_writel(1 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2184 EMC_ZQ_CAL_ZQ_LATCH_CMD,
2187 ccfifo_writel((mr13_flip_fspop & 0xfffffff7) |
2188 0x0c000000, EMC_MRW3, 0);
2189 ccfifo_writel(0, EMC_SELF_REF, 0);
2190 ccfifo_writel(0, EMC_REF, 0);
2192 ccfifo_writel(1 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2193 EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
2194 tZQCAL_lpddr4 / destination_clock_period);
2196 if (source_clock_period > zqcal_before_cc_cutoff) {
2197 ccfifo_writel(EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
2199 next_timing->dram_timing_regs[T_PDEX],
2200 destination_clock_period));
2203 ccfifo_writel((mr13_flip_fspop & 0xfffffff7) |
2204 0x0c000000, EMC_MRW3,
2206 next_timing->dram_timing_regs[T_PDEX],
2207 destination_clock_period));
2208 ccfifo_writel(0, EMC_SELF_REF, 0);
2209 ccfifo_writel(0, EMC_REF, 0);
2211 ccfifo_writel(EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
2212 max_t(s32, 0, zq_latch_dvfs_wait_time));
2216 /* WAR: delay for zqlatch */
2217 ccfifo_writel(0, 0, 10);
2220 * LPDDR4 Conditional Training Kickoff. Removed.
2224 * MANSR exit self refresh.
2226 emc_cc_dbg(STEPS, "Step 17\n");
2227 if (opt_dvfs_mode == MAN_SR && dram_type != DRAM_TYPE_LPDDR4)
2228 ccfifo_writel(0, EMC_SELF_REF, 0);
2231 * Send MRWs to LPDDR3/DDR3.
2233 emc_cc_dbg(STEPS, "Step 18\n");
2234 if (dram_type == DRAM_TYPE_LPDDR2) {
2235 ccfifo_writel(next_timing->emc_mrw2, EMC_MRW2, 0);
2236 ccfifo_writel(next_timing->emc_mrw, EMC_MRW, 0);
2238 ccfifo_writel(next_timing->emc_mrw4, EMC_MRW4, 0);
2239 } else if (dram_type == DRAM_TYPE_DDR3) {
2240 if (opt_dll_mode == DLL_ON)
2241 ccfifo_writel(next_timing->emc_emrs &
2242 ~EMC_EMRS_USE_EMRS_LONG_CNT, EMC_EMRS, 0);
2243 ccfifo_writel(next_timing->emc_emrs2 &
2244 ~EMC_EMRS2_USE_EMRS2_LONG_CNT, EMC_EMRS2, 0);
2245 ccfifo_writel(next_timing->emc_mrs |
2246 EMC_EMRS_USE_EMRS_LONG_CNT, EMC_MRS, 0);
2250 * ZQCAL for LPDDR3/DDR3
2252 emc_cc_dbg(STEPS, "Step 19\n");
2253 if (opt_zcal_en_cc) {
2254 if (dram_type == DRAM_TYPE_LPDDR2) {
2257 zq_op = opt_cc_short_zcal ? 0x56 : 0xAB;
2258 zcal_wait_time_ps = opt_cc_short_zcal ? 90000 : 360000;
2259 zcal_wait_time_clocks = div_o3(zcal_wait_time_ps,
2260 destination_clock_period);
2261 r = zcal_wait_time_clocks <<
2262 EMC_MRS_WAIT_CNT2_MRS_EXT2_WAIT_CNT_SHIFT |
2263 zcal_wait_time_clocks <<
2264 EMC_MRS_WAIT_CNT2_MRS_EXT1_WAIT_CNT_SHIFT;
2265 ccfifo_writel(r, EMC_MRS_WAIT_CNT2, 0);
2266 ccfifo_writel(2 << EMC_MRW_MRW_DEV_SELECTN_SHIFT |
2267 EMC_MRW_USE_MRW_EXT_CNT |
2268 10 << EMC_MRW_MRW_MA_SHIFT |
2269 zq_op << EMC_MRW_MRW_OP_SHIFT,
2271 if (dram_dev_num == TWO_RANK) {
2272 r = 1 << EMC_MRW_MRW_DEV_SELECTN_SHIFT |
2273 EMC_MRW_USE_MRW_EXT_CNT |
2274 10 << EMC_MRW_MRW_MA_SHIFT |
2275 zq_op << EMC_MRW_MRW_OP_SHIFT;
2276 ccfifo_writel(r, EMC_MRW, 0);
2278 } else if (dram_type == DRAM_TYPE_DDR3) {
2279 zq_op = opt_cc_short_zcal ? 0 : EMC_ZQ_CAL_LONG;
2280 ccfifo_writel(zq_op | 2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2281 EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL, 0);
2282 if (dram_dev_num == TWO_RANK)
2283 ccfifo_writel(zq_op |
2284 1 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2285 EMC_ZQ_CAL_ZQ_CAL_CMD,
2290 if (bg_regulator_mode_change) {
2291 emc_set_shadow_bypass(ACTIVE);
2292 bg_regulator_switch_complete_wait_clks =
2293 ramp_up_wait > 1250000 ? 0 :
2294 (1250000 - ramp_up_wait) / destination_clock_period;
2295 ccfifo_writel(next_timing->burst_regs
2296 [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX],
2297 EMC_PMACRO_BG_BIAS_CTRL_0,
2298 bg_regulator_switch_complete_wait_clks);
2299 emc_set_shadow_bypass(ASSEMBLY);
2303 * Issue ref and optional QRST.
2305 emc_cc_dbg(STEPS, "Step 20\n");
2306 if (dram_type != DRAM_TYPE_LPDDR4)
2307 ccfifo_writel(0, EMC_REF, 0);
2309 if (opt_do_sw_qrst) {
2310 ccfifo_writel(1, EMC_ISSUE_QRST, 0);
2311 ccfifo_writel(0, EMC_ISSUE_QRST, 2);
2315 * Restore ZCAL and ZCAL interval.
2317 emc_cc_dbg(STEPS, "Step 21\n");
2318 if (save_restore_clkstop_pd || opt_zcal_en_cc) {
2319 ccfifo_writel(emc_dbg_o | EMC_DBG_WRITE_MUX_ACTIVE, EMC_DBG, 0);
2320 if (opt_zcal_en_cc && dram_type != DRAM_TYPE_LPDDR4)
2321 ccfifo_writel(next_timing->
2322 burst_regs[EMC_ZCAL_INTERVAL_INDEX],
2323 EMC_ZCAL_INTERVAL, 0);
2325 if (save_restore_clkstop_pd)
2326 ccfifo_writel(next_timing->burst_regs[EMC_CFG_INDEX] &
2327 ~EMC_CFG_DYN_SELF_REF, EMC_CFG, 0);
2328 ccfifo_writel(emc_dbg_o, EMC_DBG, 0);
2332 * Restore EMC_CFG_PIPE_CLK.
2334 emc_cc_dbg(STEPS, "Step 22\n");
2335 ccfifo_writel(emc_cfg_pipe_clk_o, EMC_CFG_PIPE_CLK, 0);
2337 if (bg_regulator_mode_change) {
2338 if (enable_bg_regulator)
2339 emc_writel(next_timing->burst_regs
2340 [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
2341 ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD,
2342 EMC_PMACRO_BG_BIAS_CTRL_0);
2344 emc_writel(next_timing->burst_regs
2345 [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
2346 ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD,
2347 EMC_PMACRO_BG_BIAS_CTRL_0);
2352 emc_cc_dbg(STEPS, "Step 23\n");
2354 /* Fix: rename tmp to something meaningful. */
2355 tmp = emc_readl(EMC_CFG_DIG_DLL);
2356 tmp |= EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
2357 tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
2358 tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
2359 tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
2360 tmp = (tmp & ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK) |
2361 (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
2362 emc_writel(tmp, EMC_CFG_DIG_DLL);
2364 /* Clock change. Woot. BUG()s out if something fails. */
2365 do_clock_change(clksrc);
2368 * Save training results. Removed.
2372 * Program MC updown registers.
2374 emc_cc_dbg(STEPS, "Step 25\n");
2376 if (next_timing->rate > last_timing->rate) {
2377 for (i = 0; i < next_timing->la_scale_regs_num; i++)
2378 __raw_writel(next_timing->la_scale_regs[i],
2379 la_scale_off_regs[i]);
2380 emc_timing_update(0);
2384 * Restore ZCAL registers.
2386 emc_cc_dbg(STEPS, "Step 26\n");
2387 if (dram_type == DRAM_TYPE_LPDDR4) {
2388 emc_set_shadow_bypass(ACTIVE);
2389 emc_writel(next_timing->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX],
2391 emc_writel(next_timing->burst_regs[EMC_ZCAL_INTERVAL_INDEX],
2393 emc_set_shadow_bypass(ASSEMBLY);
2396 if (dram_type != DRAM_TYPE_LPDDR4 &&
2397 opt_zcal_en_cc && !opt_short_zcal && opt_cc_short_zcal) {
2400 emc_set_shadow_bypass(ACTIVE);
2401 if (dram_type == DRAM_TYPE_LPDDR2)
2402 emc_writel(next_timing->
2403 burst_regs[EMC_MRS_WAIT_CNT_INDEX],
2405 else if (dram_type == DRAM_TYPE_DDR3)
2406 emc_writel(next_timing->
2407 burst_regs[EMC_ZCAL_WAIT_CNT_INDEX],
2409 emc_set_shadow_bypass(ASSEMBLY);
2413 * Restore EMC_CFG, FDPD registers.
2415 emc_cc_dbg(STEPS, "Step 27\n");
2416 emc_set_shadow_bypass(ACTIVE);
2417 emc_writel(next_timing->burst_regs[EMC_CFG_INDEX], EMC_CFG);
2418 emc_set_shadow_bypass(ASSEMBLY);
2419 emc_writel(next_timing->emc_fdpd_ctrl_cmd_no_ramp,
2420 EMC_FDPD_CTRL_CMD_NO_RAMP);
2421 emc_writel(next_timing->emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
2424 * Training recover. Removed.
2426 emc_cc_dbg(STEPS, "Step 28\n");
2428 emc_set_shadow_bypass(ACTIVE);
2429 emc_writel(next_timing->burst_regs[EMC_PMACRO_AUTOCAL_CFG_COMMON_INDEX],
2430 EMC_PMACRO_AUTOCAL_CFG_COMMON);
2431 emc_set_shadow_bypass(ASSEMBLY);
2436 emc_cc_dbg(STEPS, "Step 29\n");
2437 emc_writel(EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE0 |
2438 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE1 |
2439 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE2 |
2440 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE3 |
2441 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE4 |
2442 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE5 |
2443 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE6 |
2444 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE7,
2445 EMC_PMACRO_CFG_PM_GLOBAL_0);
2446 emc_writel(EMC_PMACRO_TRAINING_CTRL_0_CH0_TRAINING_E_WRPTR,
2447 EMC_PMACRO_TRAINING_CTRL_0);
2448 emc_writel(EMC_PMACRO_TRAINING_CTRL_1_CH1_TRAINING_E_WRPTR,
2449 EMC_PMACRO_TRAINING_CTRL_1);
2450 emc_writel(0, EMC_PMACRO_CFG_PM_GLOBAL_0);
2453 * Re-enable autocal.
2455 emc_cc_dbg(STEPS, "Step 30: Re-enable DLL and AUTOCAL\n");
2456 if (next_timing->burst_regs[EMC_CFG_DIG_DLL_INDEX] &
2457 EMC_CFG_DIG_DLL_CFG_DLL_EN) {
2458 tmp = emc_readl(EMC_CFG_DIG_DLL);
2459 tmp |= EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
2460 tmp |= EMC_CFG_DIG_DLL_CFG_DLL_EN;
2461 tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
2462 tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
2463 tmp = (tmp & ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK) |
2464 (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
2465 emc_writel(tmp, EMC_CFG_DIG_DLL);
2466 emc_timing_update(channel_mode);
2469 emc_auto_cal_config = next_timing->emc_auto_cal_config;
2470 emc_writel(emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
2473 * Restore FSP to account for switch back. Only needed in training.
2475 emc_cc_dbg(STEPS, "Step 31\n");
2478 * [SW] Update the alternative timing (derated vs normal) table with
2479 * the periodic training values computed during the clock change
2482 emc_cc_dbg(STEPS, "Step 32: Update alt timing\n");
2483 __update_emc_alt_timing(next_timing);