1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * DDR3 mem setup file for board based on EXYNOS5
4  *
5  * Copyright (C) 2012 Samsung Electronics
6  */
7 
8 #include <config.h>
9 #include <asm/io.h>
10 #include <asm/arch/clock.h>
11 #include <asm/arch/cpu.h>
12 #include <asm/arch/dmc.h>
13 #include <asm/arch/power.h>
14 #include "common_setup.h"
15 #include "exynos5_setup.h"
16 #include "clock_init.h"
17 
18 #define TIMEOUT_US		10000
19 #define NUM_BYTE_LANES		4
20 #define DEFAULT_DQS		8
21 #define DEFAULT_DQS_X4		((DEFAULT_DQS << 24) || (DEFAULT_DQS << 16) \
22 				|| (DEFAULT_DQS << 8) || (DEFAULT_DQS << 0))
23 
24 #ifdef CONFIG_EXYNOS5250
reset_phy_ctrl(void)25 static void reset_phy_ctrl(void)
26 {
27 	struct exynos5_clock *clk =
28 		(struct exynos5_clock *)samsung_get_base_clock();
29 
30 	writel(DDR3PHY_CTRL_PHY_RESET_OFF, &clk->lpddr3phy_ctrl);
31 	writel(DDR3PHY_CTRL_PHY_RESET, &clk->lpddr3phy_ctrl);
32 }
33 
ddr3_mem_ctrl_init(struct mem_timings * mem,int reset)34 int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
35 {
36 	unsigned int val;
37 	struct exynos5_phy_control *phy0_ctrl, *phy1_ctrl;
38 	struct exynos5_dmc *dmc;
39 	int i;
40 
41 	phy0_ctrl = (struct exynos5_phy_control *)samsung_get_base_dmc_phy();
42 	phy1_ctrl = (struct exynos5_phy_control *)(samsung_get_base_dmc_phy()
43 							+ DMC_OFFSET);
44 	dmc = (struct exynos5_dmc *)samsung_get_base_dmc_ctrl();
45 
46 	if (reset)
47 		reset_phy_ctrl();
48 
49 	/* Set Impedance Output Driver */
50 	val = (mem->impedance << CA_CK_DRVR_DS_OFFSET) |
51 		(mem->impedance << CA_CKE_DRVR_DS_OFFSET) |
52 		(mem->impedance << CA_CS_DRVR_DS_OFFSET) |
53 		(mem->impedance << CA_ADR_DRVR_DS_OFFSET);
54 	writel(val, &phy0_ctrl->phy_con39);
55 	writel(val, &phy1_ctrl->phy_con39);
56 
57 	/* Set Read Latency and Burst Length for PHY0 and PHY1 */
58 	val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
59 		(mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
60 	writel(val, &phy0_ctrl->phy_con42);
61 	writel(val, &phy1_ctrl->phy_con42);
62 
63 	/* ZQ Calibration */
64 	if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
65 			  &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
66 		return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
67 
68 	/* DQ Signal */
69 	writel(mem->phy0_pulld_dqs, &phy0_ctrl->phy_con14);
70 	writel(mem->phy1_pulld_dqs, &phy1_ctrl->phy_con14);
71 
72 	writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
73 		| (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT),
74 		&dmc->concontrol);
75 
76 	update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
77 
78 	/* DQS Signal */
79 	writel(mem->phy0_dqs, &phy0_ctrl->phy_con4);
80 	writel(mem->phy1_dqs, &phy1_ctrl->phy_con4);
81 
82 	writel(mem->phy0_dq, &phy0_ctrl->phy_con6);
83 	writel(mem->phy1_dq, &phy1_ctrl->phy_con6);
84 
85 	writel(mem->phy0_tFS, &phy0_ctrl->phy_con10);
86 	writel(mem->phy1_tFS, &phy1_ctrl->phy_con10);
87 
88 	val = (mem->ctrl_start_point << PHY_CON12_CTRL_START_POINT_SHIFT) |
89 		(mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
90 		(mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
91 		(mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
92 	writel(val, &phy0_ctrl->phy_con12);
93 	writel(val, &phy1_ctrl->phy_con12);
94 
95 	/* Start DLL locking */
96 	writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
97 	       &phy0_ctrl->phy_con12);
98 	writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
99 	       &phy1_ctrl->phy_con12);
100 
101 	update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
102 
103 	writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
104 	       &dmc->concontrol);
105 
106 	/* Memory Channel Inteleaving Size */
107 	writel(mem->iv_size, &dmc->ivcontrol);
108 
109 	writel(mem->memconfig, &dmc->memconfig0);
110 	writel(mem->memconfig, &dmc->memconfig1);
111 	writel(mem->membaseconfig0, &dmc->membaseconfig0);
112 	writel(mem->membaseconfig1, &dmc->membaseconfig1);
113 
114 	/* Precharge Configuration */
115 	writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
116 	       &dmc->prechconfig);
117 
118 	/* Power Down mode Configuration */
119 	writel(mem->dpwrdn_cyc << PWRDNCONFIG_DPWRDN_CYC_SHIFT |
120 		mem->dsref_cyc << PWRDNCONFIG_DSREF_CYC_SHIFT,
121 		&dmc->pwrdnconfig);
122 
123 	/* TimingRow, TimingData, TimingPower and Timingaref
124 	 * values as per Memory AC parameters
125 	 */
126 	writel(mem->timing_ref, &dmc->timingref);
127 	writel(mem->timing_row, &dmc->timingrow);
128 	writel(mem->timing_data, &dmc->timingdata);
129 	writel(mem->timing_power, &dmc->timingpower);
130 
131 	/* Send PALL command */
132 	dmc_config_prech(mem, &dmc->directcmd);
133 
134 	/* Send NOP, MRS and ZQINIT commands */
135 	dmc_config_mrs(mem, &dmc->directcmd);
136 
137 	if (mem->gate_leveling_enable) {
138 		val = PHY_CON0_RESET_VAL;
139 		val |= P0_CMD_EN;
140 		writel(val, &phy0_ctrl->phy_con0);
141 		writel(val, &phy1_ctrl->phy_con0);
142 
143 		val = PHY_CON2_RESET_VAL;
144 		val |= INIT_DESKEW_EN;
145 		writel(val, &phy0_ctrl->phy_con2);
146 		writel(val, &phy1_ctrl->phy_con2);
147 
148 		val = PHY_CON0_RESET_VAL;
149 		val |= P0_CMD_EN;
150 		val |= BYTE_RDLVL_EN;
151 		writel(val, &phy0_ctrl->phy_con0);
152 		writel(val, &phy1_ctrl->phy_con0);
153 
154 		val = (mem->ctrl_start_point <<
155 				PHY_CON12_CTRL_START_POINT_SHIFT) |
156 			(mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
157 			(mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
158 			(mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
159 			(mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
160 		writel(val, &phy0_ctrl->phy_con12);
161 		writel(val, &phy1_ctrl->phy_con12);
162 
163 		val = PHY_CON2_RESET_VAL;
164 		val |= INIT_DESKEW_EN;
165 		val |= RDLVL_GATE_EN;
166 		writel(val, &phy0_ctrl->phy_con2);
167 		writel(val, &phy1_ctrl->phy_con2);
168 
169 		val = PHY_CON0_RESET_VAL;
170 		val |= P0_CMD_EN;
171 		val |= BYTE_RDLVL_EN;
172 		val |= CTRL_SHGATE;
173 		writel(val, &phy0_ctrl->phy_con0);
174 		writel(val, &phy1_ctrl->phy_con0);
175 
176 		val = PHY_CON1_RESET_VAL;
177 		val &= ~(CTRL_GATEDURADJ_MASK);
178 		writel(val, &phy0_ctrl->phy_con1);
179 		writel(val, &phy1_ctrl->phy_con1);
180 
181 		writel(CTRL_RDLVL_GATE_ENABLE, &dmc->rdlvl_config);
182 		i = TIMEOUT_US;
183 		while ((readl(&dmc->phystatus) &
184 			(RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1)) !=
185 			(RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1) && i > 0) {
186 			/*
187 			 * TODO(waihong): Comment on how long this take to
188 			 * timeout
189 			 */
190 			sdelay(100);
191 			i--;
192 		}
193 		if (!i)
194 			return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
195 		writel(CTRL_RDLVL_GATE_DISABLE, &dmc->rdlvl_config);
196 
197 		writel(0, &phy0_ctrl->phy_con14);
198 		writel(0, &phy1_ctrl->phy_con14);
199 
200 		val = (mem->ctrl_start_point <<
201 				PHY_CON12_CTRL_START_POINT_SHIFT) |
202 			(mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
203 			(mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
204 			(mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
205 			(mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
206 			(mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
207 		writel(val, &phy0_ctrl->phy_con12);
208 		writel(val, &phy1_ctrl->phy_con12);
209 
210 		update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
211 	}
212 
213 	/* Send PALL command */
214 	dmc_config_prech(mem, &dmc->directcmd);
215 
216 	writel(mem->memcontrol, &dmc->memcontrol);
217 
218 	/* Set DMC Concontrol and enable auto-refresh counter */
219 	writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
220 		| (mem->aref_en << CONCONTROL_AREF_EN_SHIFT), &dmc->concontrol);
221 	return 0;
222 }
223 #endif
224 
225 #ifdef CONFIG_EXYNOS5420
226 /**
227  * RAM address to use in the test.
228  *
229  * We'll use 4 words at this address and 4 at this address + 0x80 (Ares
230  * interleaves channels every 128 bytes).  This will allow us to evaluate all of
231  * the chips in a 1 chip per channel (2GB) system and half the chips in a 2
232  * chip per channel (4GB) system.  We can't test the 2nd chip since we need to
233  * do tests before the 2nd chip is enabled.  Looking at the 2nd chip isn't
234  * critical because the 1st and 2nd chip have very similar timings (they'd
235  * better have similar timings, since there's only a single adjustment that is
236  * shared by both chips).
237  */
238 const unsigned int test_addr = CFG_SYS_SDRAM_BASE;
239 
240 /* Test pattern with which RAM will be tested */
241 static const unsigned int test_pattern[] = {
242 	0x5a5a5a5a,
243 	0xa5a5a5a5,
244 	0xf0f0f0f0,
245 	0x0f0f0f0f,
246 };
247 
248 /**
249  * This function is a test vector for sw read leveling,
250  * it compares the read data with the written data.
251  *
252  * @param ch			DMC channel number
253  * @param byte_lane		which DQS byte offset,
254  *				possible values are 0,1,2,3
255  * Return:			TRUE if memory was good, FALSE if not.
256  */
dmc_valid_window_test_vector(int ch,int byte_lane)257 static bool dmc_valid_window_test_vector(int ch, int byte_lane)
258 {
259 	unsigned int read_data;
260 	unsigned int mask;
261 	int i;
262 
263 	mask = 0xFF << (8 * byte_lane);
264 
265 	for (i = 0; i < ARRAY_SIZE(test_pattern); i++) {
266 		read_data = readl(test_addr + i * 4 + ch * 0x80);
267 		if ((read_data & mask) != (test_pattern[i] & mask))
268 			return false;
269 	}
270 
271 	return true;
272 }
273 
274 /**
275  * This function returns current read offset value.
276  *
277  * @param phy_ctrl	pointer to the current phy controller
278  */
dmc_get_read_offset_value(struct exynos5420_phy_control * phy_ctrl)279 static unsigned int dmc_get_read_offset_value(struct exynos5420_phy_control
280 					       *phy_ctrl)
281 {
282 	return readl(&phy_ctrl->phy_con4);
283 }
284 
285 /**
286  * This function performs resync, so that slave DLL is updated.
287  *
288  * @param phy_ctrl	pointer to the current phy controller
289  */
ddr_phy_set_do_resync(struct exynos5420_phy_control * phy_ctrl)290 static void ddr_phy_set_do_resync(struct exynos5420_phy_control *phy_ctrl)
291 {
292 	setbits_le32(&phy_ctrl->phy_con10, PHY_CON10_CTRL_OFFSETR3);
293 	clrbits_le32(&phy_ctrl->phy_con10, PHY_CON10_CTRL_OFFSETR3);
294 }
295 
296 /**
297  * This function sets read offset value register with 'offset'.
298  *
299  * ...we also call call ddr_phy_set_do_resync().
300  *
301  * @param phy_ctrl	pointer to the current phy controller
302  * @param offset	offset to read DQS
303  */
dmc_set_read_offset_value(struct exynos5420_phy_control * phy_ctrl,unsigned int offset)304 static void dmc_set_read_offset_value(struct exynos5420_phy_control *phy_ctrl,
305 				      unsigned int offset)
306 {
307 	writel(offset, &phy_ctrl->phy_con4);
308 	ddr_phy_set_do_resync(phy_ctrl);
309 }
310 
311 /**
312  * Convert a 2s complement byte to a byte with a sign bit.
313  *
314  * NOTE: you shouldn't use normal math on the number returned by this function.
315  *   As an example, -10 = 0xf6.  After this function -10 = 0x8a.  If you wanted
316  *   to do math and get the average of 10 and -10 (should be 0):
317  *     0x8a + 0xa = 0x94 (-108)
318  *     0x94 / 2   = 0xca (-54)
319  *   ...and 0xca = sign bit plus 0x4a, or -74
320  *
321  * Also note that you lose the ability to represent -128 since there are two
322  * representations of 0.
323  *
324  * @param b	The byte to convert in two's complement.
325  * Return:	The 7-bit value + sign bit.
326  */
327 
make_signed_byte(signed char b)328 unsigned char make_signed_byte(signed char b)
329 {
330 	if (b < 0)
331 		return 0x80 | -b;
332 	else
333 		return b;
334 }
335 
336 /**
337  * Test various shifts starting at 'start' and going to 'end'.
338  *
339  * For each byte lane, we'll walk through shift starting at 'start' and going
340  * to 'end' (inclusive).  When we are finally able to read the test pattern
341  * we'll store the value in the results array.
342  *
343  * @param phy_ctrl		pointer to the current phy controller
344  * @param ch			channel number
345  * @param start			the start shift.  -127 to 127
346  * @param end			the end shift.  -127 to 127
347  * @param results		we'll store results for each byte lane.
348  */
349 
test_shifts(struct exynos5420_phy_control * phy_ctrl,int ch,int start,int end,int results[NUM_BYTE_LANES])350 void test_shifts(struct exynos5420_phy_control *phy_ctrl, int ch,
351 		 int start, int end, int results[NUM_BYTE_LANES])
352 {
353 	int incr = (start < end) ? 1 : -1;
354 	int byte_lane;
355 
356 	for (byte_lane = 0; byte_lane < NUM_BYTE_LANES; byte_lane++) {
357 		int shift;
358 
359 		dmc_set_read_offset_value(phy_ctrl, DEFAULT_DQS_X4);
360 		results[byte_lane] = DEFAULT_DQS;
361 
362 		for (shift = start; shift != (end + incr); shift += incr) {
363 			unsigned int byte_offsetr;
364 			unsigned int offsetr;
365 
366 			byte_offsetr = make_signed_byte(shift);
367 
368 			offsetr = dmc_get_read_offset_value(phy_ctrl);
369 			offsetr &= ~(0xFF << (8 * byte_lane));
370 			offsetr |= (byte_offsetr << (8 * byte_lane));
371 			dmc_set_read_offset_value(phy_ctrl, offsetr);
372 
373 			if (dmc_valid_window_test_vector(ch, byte_lane)) {
374 				results[byte_lane] = shift;
375 				break;
376 			}
377 		}
378 	}
379 }
380 
381 /**
382  * This function performs SW read leveling to compensate DQ-DQS skew at
383  * receiver it first finds the optimal read offset value on each DQS
384  * then applies the value to PHY.
385  *
386  * Read offset value has its min margin and max margin. If read offset
387  * value exceeds its min or max margin, read data will have corruption.
388  * To avoid this we are doing sw read leveling.
389  *
390  * SW read leveling is:
391  * 1> Finding offset value's left_limit and right_limit
392  * 2> and calculate its center value
393  * 3> finally programs that center value to PHY
394  * 4> then PHY gets its optimal offset value.
395  *
396  * @param phy_ctrl		pointer to the current phy controller
397  * @param ch			channel number
398  * @param coarse_lock_val	The coarse lock value read from PHY_CON13.
399  *				(0 - 0x7f)
400  */
software_find_read_offset(struct exynos5420_phy_control * phy_ctrl,int ch,unsigned int coarse_lock_val)401 static void software_find_read_offset(struct exynos5420_phy_control *phy_ctrl,
402 				      int ch, unsigned int coarse_lock_val)
403 {
404 	unsigned int offsetr_cent;
405 	int byte_lane;
406 	int left_limit;
407 	int right_limit;
408 	int left[NUM_BYTE_LANES];
409 	int right[NUM_BYTE_LANES];
410 	int i;
411 
412 	/* Fill the memory with test patterns */
413 	for (i = 0; i < ARRAY_SIZE(test_pattern); i++)
414 		writel(test_pattern[i], test_addr + i * 4 + ch * 0x80);
415 
416 	/* Figure out the limits we'll test with; keep -127 < limit < 127 */
417 	left_limit = DEFAULT_DQS - coarse_lock_val;
418 	right_limit = DEFAULT_DQS + coarse_lock_val;
419 	if (right_limit > 127)
420 		right_limit = 127;
421 
422 	/* Fill in the location where reads were OK from left and right */
423 	test_shifts(phy_ctrl, ch, left_limit, right_limit, left);
424 	test_shifts(phy_ctrl, ch, right_limit, left_limit, right);
425 
426 	/* Make a final value by taking the center between the left and right */
427 	offsetr_cent = 0;
428 	for (byte_lane = 0; byte_lane < NUM_BYTE_LANES; byte_lane++) {
429 		int temp_center;
430 		unsigned int vmwc;
431 
432 		temp_center = (left[byte_lane] + right[byte_lane]) / 2;
433 		vmwc = make_signed_byte(temp_center);
434 		offsetr_cent |= vmwc << (8 * byte_lane);
435 	}
436 	dmc_set_read_offset_value(phy_ctrl, offsetr_cent);
437 }
438 
ddr3_mem_ctrl_init(struct mem_timings * mem,int reset)439 int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
440 {
441 	struct exynos5420_clock *clk =
442 		(struct exynos5420_clock *)samsung_get_base_clock();
443 	struct exynos5420_power *power =
444 		(struct exynos5420_power *)samsung_get_base_power();
445 	struct exynos5420_phy_control *phy0_ctrl, *phy1_ctrl;
446 	struct exynos5420_dmc *drex0, *drex1;
447 	struct exynos5420_tzasc *tzasc0, *tzasc1;
448 	struct exynos5_power *pmu;
449 	uint32_t val, n_lock_r, n_lock_w_phy0, n_lock_w_phy1;
450 	uint32_t lock0_info, lock1_info;
451 	int chip;
452 	int i;
453 
454 	phy0_ctrl = (struct exynos5420_phy_control *)samsung_get_base_dmc_phy();
455 	phy1_ctrl = (struct exynos5420_phy_control *)(samsung_get_base_dmc_phy()
456 							+ DMC_OFFSET);
457 	drex0 = (struct exynos5420_dmc *)samsung_get_base_dmc_ctrl();
458 	drex1 = (struct exynos5420_dmc *)(samsung_get_base_dmc_ctrl()
459 							+ DMC_OFFSET);
460 	tzasc0 = (struct exynos5420_tzasc *)samsung_get_base_dmc_tzasc();
461 	tzasc1 = (struct exynos5420_tzasc *)(samsung_get_base_dmc_tzasc()
462 							+ DMC_OFFSET);
463 	pmu = (struct exynos5_power *)EXYNOS5420_POWER_BASE;
464 
465 	if (CONFIG_NR_DRAM_BANKS > 4) {
466 		/* Need both controllers. */
467 		mem->memcontrol |= DMC_MEMCONTROL_NUM_CHIP_2;
468 		mem->chips_per_channel = 2;
469 		mem->chips_to_configure = 2;
470 	} else {
471 		/* 2GB requires a single controller */
472 		mem->memcontrol |= DMC_MEMCONTROL_NUM_CHIP_1;
473 	}
474 
475 	/* Enable PAUSE for DREX */
476 	setbits_le32(&clk->pause, ENABLE_BIT);
477 
478 	/* Enable BYPASS mode */
479 	setbits_le32(&clk->bpll_con1, BYPASS_EN);
480 
481 	writel(MUX_BPLL_SEL_FOUTBPLL, &clk->src_cdrex);
482 	do {
483 		val = readl(&clk->mux_stat_cdrex);
484 		val &= BPLL_SEL_MASK;
485 	} while (val != FOUTBPLL);
486 
487 	clrbits_le32(&clk->bpll_con1, BYPASS_EN);
488 
489 	/* Specify the DDR memory type as DDR3 */
490 	val = readl(&phy0_ctrl->phy_con0);
491 	val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
492 	val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
493 	writel(val, &phy0_ctrl->phy_con0);
494 
495 	val = readl(&phy1_ctrl->phy_con0);
496 	val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
497 	val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
498 	writel(val, &phy1_ctrl->phy_con0);
499 
500 	/* Set Read Latency and Burst Length for PHY0 and PHY1 */
501 	val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
502 		(mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
503 	writel(val, &phy0_ctrl->phy_con42);
504 	writel(val, &phy1_ctrl->phy_con42);
505 
506 	val = readl(&phy0_ctrl->phy_con26);
507 	val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
508 	val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
509 	writel(val, &phy0_ctrl->phy_con26);
510 
511 	val = readl(&phy1_ctrl->phy_con26);
512 	val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
513 	val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
514 	writel(val, &phy1_ctrl->phy_con26);
515 
516 	/*
517 	 * Set Driver strength for CK, CKE, CS & CA to 0x7
518 	 * Set Driver strength for Data Slice 0~3 to 0x7
519 	 */
520 	val = (0x7 << CA_CK_DRVR_DS_OFFSET) | (0x7 << CA_CKE_DRVR_DS_OFFSET) |
521 		(0x7 << CA_CS_DRVR_DS_OFFSET) | (0x7 << CA_ADR_DRVR_DS_OFFSET);
522 	val |= (0x7 << DA_3_DS_OFFSET) | (0x7 << DA_2_DS_OFFSET) |
523 		(0x7 << DA_1_DS_OFFSET) | (0x7 << DA_0_DS_OFFSET);
524 	writel(val, &phy0_ctrl->phy_con39);
525 	writel(val, &phy1_ctrl->phy_con39);
526 
527 	/* ZQ Calibration */
528 	if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
529 			  &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
530 		return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
531 
532 	clrbits_le32(&phy0_ctrl->phy_con16, ZQ_CLK_DIV_EN);
533 	clrbits_le32(&phy1_ctrl->phy_con16, ZQ_CLK_DIV_EN);
534 
535 	/* DQ Signal */
536 	val = readl(&phy0_ctrl->phy_con14);
537 	val |= mem->phy0_pulld_dqs;
538 	writel(val, &phy0_ctrl->phy_con14);
539 	val = readl(&phy1_ctrl->phy_con14);
540 	val |= mem->phy1_pulld_dqs;
541 	writel(val, &phy1_ctrl->phy_con14);
542 
543 	val = MEM_TERM_EN | PHY_TERM_EN;
544 	writel(val, &drex0->phycontrol0);
545 	writel(val, &drex1->phycontrol0);
546 
547 	writel(mem->concontrol |
548 		(mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
549 		(mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
550 		&drex0->concontrol);
551 	writel(mem->concontrol |
552 		(mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
553 		(mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
554 		&drex1->concontrol);
555 
556 	do {
557 		val = readl(&drex0->phystatus);
558 	} while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
559 	do {
560 		val = readl(&drex1->phystatus);
561 	} while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
562 
563 	clrbits_le32(&drex0->concontrol, DFI_INIT_START);
564 	clrbits_le32(&drex1->concontrol, DFI_INIT_START);
565 
566 	update_reset_dll(&drex0->phycontrol0, DDR_MODE_DDR3);
567 	update_reset_dll(&drex1->phycontrol0, DDR_MODE_DDR3);
568 
569 	/*
570 	 * Set Base Address:
571 	 * 0x2000_0000 ~ 0x5FFF_FFFF
572 	 * 0x6000_0000 ~ 0x9FFF_FFFF
573 	 */
574 	/* MEMBASECONFIG0 */
575 	val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_0) |
576 		DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
577 	writel(val, &tzasc0->membaseconfig0);
578 	writel(val, &tzasc1->membaseconfig0);
579 
580 	/* MEMBASECONFIG1 */
581 	val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_1) |
582 		DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
583 	writel(val, &tzasc0->membaseconfig1);
584 	writel(val, &tzasc1->membaseconfig1);
585 
586 	/*
587 	 * Memory Channel Inteleaving Size
588 	 * Ares Channel interleaving = 128 bytes
589 	 */
590 	/* MEMCONFIG0/1 */
591 	writel(mem->memconfig, &tzasc0->memconfig0);
592 	writel(mem->memconfig, &tzasc1->memconfig0);
593 	writel(mem->memconfig, &tzasc0->memconfig1);
594 	writel(mem->memconfig, &tzasc1->memconfig1);
595 
596 	/* Precharge Configuration */
597 	writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
598 	       &drex0->prechconfig0);
599 	writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
600 	       &drex1->prechconfig0);
601 
602 	/*
603 	 * TimingRow, TimingData, TimingPower and Timingaref
604 	 * values as per Memory AC parameters
605 	 */
606 	writel(mem->timing_ref, &drex0->timingref);
607 	writel(mem->timing_ref, &drex1->timingref);
608 	writel(mem->timing_row, &drex0->timingrow0);
609 	writel(mem->timing_row, &drex1->timingrow0);
610 	writel(mem->timing_data, &drex0->timingdata0);
611 	writel(mem->timing_data, &drex1->timingdata0);
612 	writel(mem->timing_power, &drex0->timingpower0);
613 	writel(mem->timing_power, &drex1->timingpower0);
614 
615 	if (reset) {
616 		/*
617 		 * Send NOP, MRS and ZQINIT commands
618 		 * Sending MRS command will reset the DRAM. We should not be
619 		 * resetting the DRAM after resume, this will lead to memory
620 		 * corruption as DRAM content is lost after DRAM reset
621 		 */
622 		dmc_config_mrs(mem, &drex0->directcmd);
623 		dmc_config_mrs(mem, &drex1->directcmd);
624 	}
625 
626 	/*
627 	 * Get PHY_CON13 from both phys.  Gate CLKM around reading since
628 	 * PHY_CON13 is glitchy when CLKM is running.  We're paranoid and
629 	 * wait until we get a "fine lock", though a coarse lock is probably
630 	 * OK (we only use the coarse numbers below).  We try to gate the
631 	 * clock for as short a time as possible in case SDRAM is somehow
632 	 * sensitive.  sdelay(10) in the loop is arbitrary to make sure
633 	 * there is some time for PHY_CON13 to get updated.  In practice
634 	 * no delay appears to be needed.
635 	 */
636 	val = readl(&clk->gate_bus_cdrex);
637 	while (true) {
638 		writel(val & ~0x1, &clk->gate_bus_cdrex);
639 		lock0_info = readl(&phy0_ctrl->phy_con13);
640 		writel(val, &clk->gate_bus_cdrex);
641 
642 		if ((lock0_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
643 			break;
644 
645 		sdelay(10);
646 	}
647 	while (true) {
648 		writel(val & ~0x2, &clk->gate_bus_cdrex);
649 		lock1_info = readl(&phy1_ctrl->phy_con13);
650 		writel(val, &clk->gate_bus_cdrex);
651 
652 		if ((lock1_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
653 			break;
654 
655 		sdelay(10);
656 	}
657 
658 	if (!reset) {
659 		/*
660 		 * During Suspend-Resume & S/W-Reset, as soon as PMU releases
661 		 * pad retention, CKE goes high. This causes memory contents
662 		 * not to be retained during DRAM initialization. Therfore,
663 		 * there is a new control register(0x100431e8[28]) which lets us
664 		 * release pad retention and retain the memory content until the
665 		 * initialization is complete.
666 		 */
667 		writel(PAD_RETENTION_DRAM_COREBLK_VAL,
668 		       &power->pad_retention_dram_coreblk_option);
669 		do {
670 			val = readl(&power->pad_retention_dram_status);
671 		} while (val != 0x1);
672 
673 		/*
674 		 * CKE PAD retention disables DRAM self-refresh mode.
675 		 * Send auto refresh command for DRAM refresh.
676 		 */
677 		for (i = 0; i < 128; i++) {
678 			for (chip = 0; chip < mem->chips_to_configure; chip++) {
679 				writel(DIRECT_CMD_REFA |
680 				       (chip << DIRECT_CMD_CHIP_SHIFT),
681 				       &drex0->directcmd);
682 				writel(DIRECT_CMD_REFA |
683 				       (chip << DIRECT_CMD_CHIP_SHIFT),
684 				       &drex1->directcmd);
685 			}
686 		}
687 	}
688 
689 	if (mem->gate_leveling_enable) {
690 		writel(PHY_CON0_RESET_VAL, &phy0_ctrl->phy_con0);
691 		writel(PHY_CON0_RESET_VAL, &phy1_ctrl->phy_con0);
692 
693 		setbits_le32(&phy0_ctrl->phy_con0, P0_CMD_EN);
694 		setbits_le32(&phy1_ctrl->phy_con0, P0_CMD_EN);
695 
696 		val = PHY_CON2_RESET_VAL;
697 		val |= INIT_DESKEW_EN;
698 		writel(val, &phy0_ctrl->phy_con2);
699 		writel(val, &phy1_ctrl->phy_con2);
700 
701 		val =  readl(&phy0_ctrl->phy_con1);
702 		val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
703 		writel(val, &phy0_ctrl->phy_con1);
704 
705 		val =  readl(&phy1_ctrl->phy_con1);
706 		val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
707 		writel(val, &phy1_ctrl->phy_con1);
708 
709 		n_lock_w_phy0 = (lock0_info & CTRL_LOCK_COARSE_MASK) >> 2;
710 		n_lock_r = readl(&phy0_ctrl->phy_con12);
711 		n_lock_r &= ~CTRL_DLL_ON;
712 		n_lock_r |= n_lock_w_phy0;
713 		writel(n_lock_r, &phy0_ctrl->phy_con12);
714 
715 		n_lock_w_phy1 = (lock1_info & CTRL_LOCK_COARSE_MASK) >> 2;
716 		n_lock_r = readl(&phy1_ctrl->phy_con12);
717 		n_lock_r &= ~CTRL_DLL_ON;
718 		n_lock_r |= n_lock_w_phy1;
719 		writel(n_lock_r, &phy1_ctrl->phy_con12);
720 
721 		val = (0x3 << DIRECT_CMD_BANK_SHIFT) | 0x4;
722 		for (chip = 0; chip < mem->chips_to_configure; chip++) {
723 			writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
724 			       &drex0->directcmd);
725 			writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
726 			       &drex1->directcmd);
727 		}
728 
729 		setbits_le32(&phy0_ctrl->phy_con2, RDLVL_GATE_EN);
730 		setbits_le32(&phy1_ctrl->phy_con2, RDLVL_GATE_EN);
731 
732 		setbits_le32(&phy0_ctrl->phy_con0, CTRL_SHGATE);
733 		setbits_le32(&phy1_ctrl->phy_con0, CTRL_SHGATE);
734 
735 		val = readl(&phy0_ctrl->phy_con1);
736 		val &= ~(CTRL_GATEDURADJ_MASK);
737 		writel(val, &phy0_ctrl->phy_con1);
738 
739 		val = readl(&phy1_ctrl->phy_con1);
740 		val &= ~(CTRL_GATEDURADJ_MASK);
741 		writel(val, &phy1_ctrl->phy_con1);
742 
743 		writel(CTRL_RDLVL_GATE_ENABLE, &drex0->rdlvl_config);
744 		i = TIMEOUT_US;
745 		while (((readl(&drex0->phystatus) & RDLVL_COMPLETE_CHO) !=
746 			RDLVL_COMPLETE_CHO) && (i > 0)) {
747 			/*
748 			 * TODO(waihong): Comment on how long this take to
749 			 * timeout
750 			 */
751 			sdelay(100);
752 			i--;
753 		}
754 		if (!i)
755 			return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
756 		writel(CTRL_RDLVL_GATE_DISABLE, &drex0->rdlvl_config);
757 
758 		writel(CTRL_RDLVL_GATE_ENABLE, &drex1->rdlvl_config);
759 		i = TIMEOUT_US;
760 		while (((readl(&drex1->phystatus) & RDLVL_COMPLETE_CHO) !=
761 			RDLVL_COMPLETE_CHO) && (i > 0)) {
762 			/*
763 			 * TODO(waihong): Comment on how long this take to
764 			 * timeout
765 			 */
766 			sdelay(100);
767 			i--;
768 		}
769 		if (!i)
770 			return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
771 		writel(CTRL_RDLVL_GATE_DISABLE, &drex1->rdlvl_config);
772 
773 		writel(0, &phy0_ctrl->phy_con14);
774 		writel(0, &phy1_ctrl->phy_con14);
775 
776 		val = (0x3 << DIRECT_CMD_BANK_SHIFT);
777 		for (chip = 0; chip < mem->chips_to_configure; chip++) {
778 			writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
779 			       &drex0->directcmd);
780 			writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
781 			       &drex1->directcmd);
782 		}
783 
784 		/* Common Settings for Leveling */
785 		val = PHY_CON12_RESET_VAL;
786 		writel((val + n_lock_w_phy0), &phy0_ctrl->phy_con12);
787 		writel((val + n_lock_w_phy1), &phy1_ctrl->phy_con12);
788 
789 		setbits_le32(&phy0_ctrl->phy_con2, DLL_DESKEW_EN);
790 		setbits_le32(&phy1_ctrl->phy_con2, DLL_DESKEW_EN);
791 	}
792 
793 	/*
794 	 * Do software read leveling
795 	 *
796 	 * Do this before we turn on auto refresh since the auto refresh can
797 	 * be in conflict with the resync operation that's part of setting
798 	 * read leveling.
799 	 */
800 	if (!reset) {
801 		/* restore calibrated value after resume */
802 		dmc_set_read_offset_value(phy0_ctrl, readl(&pmu->pmu_spare1));
803 		dmc_set_read_offset_value(phy1_ctrl, readl(&pmu->pmu_spare2));
804 	} else {
805 		software_find_read_offset(phy0_ctrl, 0,
806 					  CTRL_LOCK_COARSE(lock0_info));
807 		software_find_read_offset(phy1_ctrl, 1,
808 					  CTRL_LOCK_COARSE(lock1_info));
809 		/* save calibrated value to restore after resume */
810 		writel(dmc_get_read_offset_value(phy0_ctrl), &pmu->pmu_spare1);
811 		writel(dmc_get_read_offset_value(phy1_ctrl), &pmu->pmu_spare2);
812 	}
813 
814 	/* Send PALL command */
815 	dmc_config_prech(mem, &drex0->directcmd);
816 	dmc_config_prech(mem, &drex1->directcmd);
817 
818 	writel(mem->memcontrol, &drex0->memcontrol);
819 	writel(mem->memcontrol, &drex1->memcontrol);
820 
821 	/*
822 	 * Set DMC Concontrol: Enable auto-refresh counter, provide
823 	 * read data fetch cycles and enable DREX auto set powerdown
824 	 * for input buffer of I/O in none read memory state.
825 	 */
826 	writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
827 		(mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
828 		DMC_CONCONTROL_IO_PD_CON(0x2),
829 		&drex0->concontrol);
830 	writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
831 		(mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
832 		DMC_CONCONTROL_IO_PD_CON(0x2),
833 		&drex1->concontrol);
834 
835 	/*
836 	 * Enable Clock Gating Control for DMC
837 	 * this saves around 25 mw dmc power as compared to the power
838 	 * consumption without these bits enabled
839 	 */
840 	setbits_le32(&drex0->cgcontrol, DMC_INTERNAL_CG);
841 	setbits_le32(&drex1->cgcontrol, DMC_INTERNAL_CG);
842 
843 	/*
844 	 * As per Exynos5800 UM ver 0.00 section 17.13.2.1
845 	 * CONCONTROL register bit 3 [update_mode], Exynos5800 does not
846 	 * support the PHY initiated update. And it is recommended to set
847 	 * this field to 1'b1 during initialization
848 	 *
849 	 * When we apply PHY-initiated mode, DLL lock value is determined
850 	 * once at DMC init time and not updated later when we change the MIF
851 	 * voltage based on ASV group in kernel. Applying MC-initiated mode
852 	 * makes sure that DLL tracing is ON so that silicon is able to
853 	 * compensate the voltage variation.
854 	 */
855 	val = readl(&drex0->concontrol);
856 	val |= CONCONTROL_UPDATE_MODE;
857 	writel(val, &drex0->concontrol);
858 	val = readl(&drex1->concontrol);
859 	val |= CONCONTROL_UPDATE_MODE;
860 	writel(val, &drex1->concontrol);
861 
862 	return 0;
863 }
864 #endif
865