1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright 2008-2014 Freescale Semiconductor, Inc.
4 * Copyright 2021 NXP
5 */
6
7 #include <common.h>
8 #ifdef CONFIG_PPC
9 #include <asm/fsl_law.h>
10 #endif
11 #include <div64.h>
12 #include <linux/delay.h>
13
14 #include <fsl_ddr.h>
15 #include <fsl_immap.h>
16 #include <log.h>
17 #include <asm/io.h>
18 #if defined(CONFIG_FSL_LSCH2) || defined(CONFIG_FSL_LSCH3) || \
19 defined(CONFIG_ARM)
20 #include <asm/arch/clock.h>
21 #endif
22
23 /* To avoid 64-bit full-divides, we factor this here */
24 #define ULL_2E12 2000000000000ULL
25 #define UL_5POW12 244140625UL
26 #define UL_2POW13 (1UL << 13)
27
28 #define ULL_8FS 0xFFFFFFFFULL
29
fsl_ddr_get_version(unsigned int ctrl_num)30 u32 fsl_ddr_get_version(unsigned int ctrl_num)
31 {
32 struct ccsr_ddr __iomem *ddr;
33 u32 ver_major_minor_errata;
34
35 switch (ctrl_num) {
36 case 0:
37 ddr = (void *)CFG_SYS_FSL_DDR_ADDR;
38 break;
39 #if defined(CFG_SYS_FSL_DDR2_ADDR) && (CONFIG_SYS_NUM_DDR_CTLRS > 1)
40 case 1:
41 ddr = (void *)CFG_SYS_FSL_DDR2_ADDR;
42 break;
43 #endif
44 #if defined(CFG_SYS_FSL_DDR3_ADDR) && (CONFIG_SYS_NUM_DDR_CTLRS > 2)
45 case 2:
46 ddr = (void *)CFG_SYS_FSL_DDR3_ADDR;
47 break;
48 #endif
49 #if defined(CONFIG_SYS_FSL_DDR4_ADDR) && (CONFIG_SYS_NUM_DDR_CTLRS > 3)
50 case 3:
51 ddr = (void *)CONFIG_SYS_FSL_DDR4_ADDR;
52 break;
53 #endif
54 default:
55 printf("%s unexpected ctrl_num = %u\n", __func__, ctrl_num);
56 return 0;
57 }
58 ver_major_minor_errata = (ddr_in32(&ddr->ip_rev1) & 0xFFFF) << 8;
59 ver_major_minor_errata |= (ddr_in32(&ddr->ip_rev2) & 0xFF00) >> 8;
60
61 return ver_major_minor_errata;
62 }
63
64 /*
65 * Round up mclk_ps to nearest 1 ps in memory controller code
66 * if the error is 0.5ps or more.
67 *
68 * If an imprecise data rate is too high due to rounding error
69 * propagation, compute a suitably rounded mclk_ps to compute
70 * a working memory controller configuration.
71 */
get_memory_clk_period_ps(const unsigned int ctrl_num)72 unsigned int get_memory_clk_period_ps(const unsigned int ctrl_num)
73 {
74 unsigned int data_rate = get_ddr_freq(ctrl_num);
75 unsigned int result;
76
77 /* Round to nearest 10ps, being careful about 64-bit multiply/divide */
78 unsigned long long rem, mclk_ps = ULL_2E12;
79 if (data_rate) {
80 /* Now perform the big divide, the result fits in 32-bits */
81 rem = do_div(mclk_ps, data_rate);
82 result = (rem >= (data_rate >> 1)) ? mclk_ps + 1 : mclk_ps;
83 } else {
84 result = 0;
85 }
86
87 return result;
88 }
89
90 /* Convert picoseconds into DRAM clock cycles (rounding up if needed). */
picos_to_mclk(const unsigned int ctrl_num,unsigned int picos)91 unsigned int picos_to_mclk(const unsigned int ctrl_num, unsigned int picos)
92 {
93 unsigned long long clks, clks_rem;
94 unsigned long data_rate = get_ddr_freq(ctrl_num);
95
96 /* Short circuit for zero picos */
97 if (!picos)
98 return 0;
99
100 /* First multiply the time by the data rate (32x32 => 64) */
101 clks = picos * (unsigned long long)data_rate;
102 /*
103 * Now divide by 5^12 and track the 32-bit remainder, then divide
104 * by 2*(2^12) using shifts (and updating the remainder).
105 */
106 clks_rem = do_div(clks, UL_5POW12);
107 clks_rem += (clks & (UL_2POW13-1)) * UL_5POW12;
108 clks >>= 13;
109
110 /* If we had a remainder greater than the 1ps error, then round up */
111 if (clks_rem > data_rate)
112 clks++;
113
114 /* Clamp to the maximum representable value */
115 if (clks > ULL_8FS)
116 clks = ULL_8FS;
117 return (unsigned int) clks;
118 }
119
mclk_to_picos(const unsigned int ctrl_num,unsigned int mclk)120 unsigned int mclk_to_picos(const unsigned int ctrl_num, unsigned int mclk)
121 {
122 return get_memory_clk_period_ps(ctrl_num) * mclk;
123 }
124
125 #ifdef CONFIG_PPC
126 void
__fsl_ddr_set_lawbar(const common_timing_params_t * memctl_common_params,unsigned int law_memctl,unsigned int ctrl_num)127 __fsl_ddr_set_lawbar(const common_timing_params_t *memctl_common_params,
128 unsigned int law_memctl,
129 unsigned int ctrl_num)
130 {
131 unsigned long long base = memctl_common_params->base_address;
132 unsigned long long size = memctl_common_params->total_mem;
133
134 /*
135 * If no DIMMs on this controller, do not proceed any further.
136 */
137 if (!memctl_common_params->ndimms_present) {
138 return;
139 }
140
141 #if !defined(CONFIG_PHYS_64BIT)
142 if (base >= CFG_MAX_MEM_MAPPED)
143 return;
144 if ((base + size) >= CFG_MAX_MEM_MAPPED)
145 size = CFG_MAX_MEM_MAPPED - base;
146 #endif
147 if (set_ddr_laws(base, size, law_memctl) < 0) {
148 printf("%s: ERROR (ctrl #%d, TRGT ID=%x)\n", __func__, ctrl_num,
149 law_memctl);
150 return;
151 }
152 debug("setup ddr law base = 0x%llx, size 0x%llx, TRGT_ID 0x%x\n",
153 base, size, law_memctl);
154 }
155
156 __attribute__((weak, alias("__fsl_ddr_set_lawbar"))) void
157 fsl_ddr_set_lawbar(const common_timing_params_t *memctl_common_params,
158 unsigned int memctl_interleaved,
159 unsigned int ctrl_num);
160 #endif
161
fsl_ddr_set_intl3r(const unsigned int granule_size)162 void fsl_ddr_set_intl3r(const unsigned int granule_size)
163 {
164 #ifdef CONFIG_E6500
165 u32 *mcintl3r = (void *) (CONFIG_SYS_IMMR + 0x18004);
166 *mcintl3r = 0x80000000 | (granule_size & 0x1f);
167 debug("Enable MCINTL3R with granule size 0x%x\n", granule_size);
168 #endif
169 }
170
fsl_ddr_get_intl3r(void)171 u32 fsl_ddr_get_intl3r(void)
172 {
173 u32 val = 0;
174 #ifdef CONFIG_E6500
175 u32 *mcintl3r = (void *) (CONFIG_SYS_IMMR + 0x18004);
176 val = *mcintl3r;
177 #endif
178 return val;
179 }
180
print_ddr_info(unsigned int start_ctrl)181 void print_ddr_info(unsigned int start_ctrl)
182 {
183 struct ccsr_ddr __iomem *ddr =
184 (struct ccsr_ddr __iomem *)(CFG_SYS_FSL_DDR_ADDR);
185
186 #if defined(CONFIG_E6500) && (CONFIG_SYS_NUM_DDR_CTLRS == 3)
187 u32 *mcintl3r = (void *) (CONFIG_SYS_IMMR + 0x18004);
188 #endif
189 #if (CONFIG_SYS_NUM_DDR_CTLRS > 1)
190 uint32_t cs0_config = ddr_in32(&ddr->cs0_config);
191 #endif
192 uint32_t sdram_cfg = ddr_in32(&ddr->sdram_cfg);
193 int cas_lat;
194
195 #if CONFIG_SYS_NUM_DDR_CTLRS >= 2
196 if ((!(sdram_cfg & SDRAM_CFG_MEM_EN)) ||
197 (start_ctrl == 1)) {
198 ddr = (void __iomem *)CFG_SYS_FSL_DDR2_ADDR;
199 sdram_cfg = ddr_in32(&ddr->sdram_cfg);
200 }
201 #endif
202 #if CONFIG_SYS_NUM_DDR_CTLRS >= 3
203 if ((!(sdram_cfg & SDRAM_CFG_MEM_EN)) ||
204 (start_ctrl == 2)) {
205 ddr = (void __iomem *)CFG_SYS_FSL_DDR3_ADDR;
206 sdram_cfg = ddr_in32(&ddr->sdram_cfg);
207 }
208 #endif
209
210 if (!(sdram_cfg & SDRAM_CFG_MEM_EN)) {
211 puts(" (DDR not enabled)\n");
212 return;
213 }
214
215 puts(" (DDR");
216 switch ((sdram_cfg & SDRAM_CFG_SDRAM_TYPE_MASK) >>
217 SDRAM_CFG_SDRAM_TYPE_SHIFT) {
218 case SDRAM_TYPE_DDR1:
219 puts("1");
220 break;
221 case SDRAM_TYPE_DDR2:
222 puts("2");
223 break;
224 case SDRAM_TYPE_DDR3:
225 puts("3");
226 break;
227 case SDRAM_TYPE_DDR4:
228 puts("4");
229 break;
230 default:
231 puts("?");
232 break;
233 }
234
235 if (sdram_cfg & SDRAM_CFG_32_BE)
236 puts(", 32-bit");
237 else if (sdram_cfg & SDRAM_CFG_16_BE)
238 puts(", 16-bit");
239 else
240 puts(", 64-bit");
241
242 /* Calculate CAS latency based on timing cfg values */
243 cas_lat = ((ddr_in32(&ddr->timing_cfg_1) >> 16) & 0xf);
244 if (fsl_ddr_get_version(0) <= 0x40400)
245 cas_lat += 1;
246 else
247 cas_lat += 2;
248 cas_lat += ((ddr_in32(&ddr->timing_cfg_3) >> 12) & 3) << 4;
249 printf(", CL=%d", cas_lat >> 1);
250 if (cas_lat & 0x1)
251 puts(".5");
252
253 if (sdram_cfg & SDRAM_CFG_ECC_EN)
254 puts(", ECC on)");
255 else
256 puts(", ECC off)");
257
258 #if (CONFIG_SYS_NUM_DDR_CTLRS == 3)
259 #ifdef CONFIG_E6500
260 if (*mcintl3r & 0x80000000) {
261 puts("\n");
262 puts(" DDR Controller Interleaving Mode: ");
263 switch (*mcintl3r & 0x1f) {
264 case FSL_DDR_3WAY_1KB_INTERLEAVING:
265 puts("3-way 1KB");
266 break;
267 case FSL_DDR_3WAY_4KB_INTERLEAVING:
268 puts("3-way 4KB");
269 break;
270 case FSL_DDR_3WAY_8KB_INTERLEAVING:
271 puts("3-way 8KB");
272 break;
273 default:
274 puts("3-way UNKNOWN");
275 break;
276 }
277 }
278 #endif
279 #endif
280 #if (CONFIG_SYS_NUM_DDR_CTLRS >= 2)
281 if ((cs0_config & 0x20000000) && (start_ctrl == 0)) {
282 puts("\n");
283 puts(" DDR Controller Interleaving Mode: ");
284
285 switch ((cs0_config >> 24) & 0xf) {
286 case FSL_DDR_256B_INTERLEAVING:
287 puts("256B");
288 break;
289 case FSL_DDR_CACHE_LINE_INTERLEAVING:
290 puts("cache line");
291 break;
292 case FSL_DDR_PAGE_INTERLEAVING:
293 puts("page");
294 break;
295 case FSL_DDR_BANK_INTERLEAVING:
296 puts("bank");
297 break;
298 case FSL_DDR_SUPERBANK_INTERLEAVING:
299 puts("super-bank");
300 break;
301 default:
302 puts("invalid");
303 break;
304 }
305 }
306 #endif
307
308 if ((sdram_cfg >> 8) & 0x7f) {
309 puts("\n");
310 puts(" DDR Chip-Select Interleaving Mode: ");
311 switch(sdram_cfg >> 8 & 0x7f) {
312 case FSL_DDR_CS0_CS1_CS2_CS3:
313 puts("CS0+CS1+CS2+CS3");
314 break;
315 case FSL_DDR_CS0_CS1:
316 puts("CS0+CS1");
317 break;
318 case FSL_DDR_CS2_CS3:
319 puts("CS2+CS3");
320 break;
321 case FSL_DDR_CS0_CS1_AND_CS2_CS3:
322 puts("CS0+CS1 and CS2+CS3");
323 break;
324 default:
325 puts("invalid");
326 break;
327 }
328 }
329 }
330
detail_board_ddr_info(void)331 void __weak detail_board_ddr_info(void)
332 {
333 print_ddr_info(0);
334 }
335
board_add_ram_info(int use_default)336 void board_add_ram_info(int use_default)
337 {
338 detail_board_ddr_info();
339 }
340
341 #ifdef CONFIG_FSL_DDR_SYNC_REFRESH
342 #define DDRC_DEBUG20_INIT_DONE 0x80000000
343 #define DDRC_DEBUG2_RF 0x00000040
fsl_ddr_sync_memctl_refresh(unsigned int first_ctrl,unsigned int last_ctrl)344 void fsl_ddr_sync_memctl_refresh(unsigned int first_ctrl,
345 unsigned int last_ctrl)
346 {
347 unsigned int i;
348 u32 ddrc_debug20;
349 u32 ddrc_debug2[CONFIG_SYS_NUM_DDR_CTLRS] = {};
350 u32 *ddrc_debug2_p[CONFIG_SYS_NUM_DDR_CTLRS] = {};
351 struct ccsr_ddr __iomem *ddr;
352
353 for (i = first_ctrl; i <= last_ctrl; i++) {
354 switch (i) {
355 case 0:
356 ddr = (void *)CFG_SYS_FSL_DDR_ADDR;
357 break;
358 #if defined(CFG_SYS_FSL_DDR2_ADDR) && (CONFIG_SYS_NUM_DDR_CTLRS > 1)
359 case 1:
360 ddr = (void *)CFG_SYS_FSL_DDR2_ADDR;
361 break;
362 #endif
363 #if defined(CFG_SYS_FSL_DDR3_ADDR) && (CONFIG_SYS_NUM_DDR_CTLRS > 2)
364 case 2:
365 ddr = (void *)CFG_SYS_FSL_DDR3_ADDR;
366 break;
367 #endif
368 #if defined(CONFIG_SYS_FSL_DDR4_ADDR) && (CONFIG_SYS_NUM_DDR_CTLRS > 3)
369 case 3:
370 ddr = (void *)CONFIG_SYS_FSL_DDR4_ADDR;
371 break;
372 #endif
373 default:
374 printf("%s unexpected ctrl = %u\n", __func__, i);
375 return;
376 }
377 ddrc_debug20 = ddr_in32(&ddr->debug[19]);
378 ddrc_debug2_p[i] = &ddr->debug[1];
379 while (!(ddrc_debug20 & DDRC_DEBUG20_INIT_DONE)) {
380 /* keep polling until DDRC init is done */
381 udelay(100);
382 ddrc_debug20 = ddr_in32(&ddr->debug[19]);
383 }
384 ddrc_debug2[i] = ddr_in32(&ddr->debug[1]) | DDRC_DEBUG2_RF;
385 }
386 /*
387 * Sync refresh
388 * This is put together to make sure the refresh reqeusts are sent
389 * closely to each other.
390 */
391 for (i = first_ctrl; i <= last_ctrl; i++)
392 ddr_out32(ddrc_debug2_p[i], ddrc_debug2[i]);
393 }
394 #endif /* CONFIG_FSL_DDR_SYNC_REFRESH */
395
remove_unused_controllers(fsl_ddr_info_t * info)396 void remove_unused_controllers(fsl_ddr_info_t *info)
397 {
398 #ifdef CONFIG_SYS_FSL_HAS_CCN504
399 int i;
400 u64 nodeid;
401 void *hnf_sam_ctrl = (void *)(CCI_HN_F_0_BASE + CCN_HN_F_SAM_CTL);
402 bool ddr0_used = false;
403 bool ddr1_used = false;
404
405 for (i = 0; i < 8; i++) {
406 nodeid = in_le64(hnf_sam_ctrl) & CCN_HN_F_SAM_NODEID_MASK;
407 if (nodeid == CCN_HN_F_SAM_NODEID_DDR0) {
408 ddr0_used = true;
409 } else if (nodeid == CCN_HN_F_SAM_NODEID_DDR1) {
410 ddr1_used = true;
411 } else {
412 printf("Unknown nodeid in HN-F SAM control: 0x%llx\n",
413 nodeid);
414 }
415 hnf_sam_ctrl += (CCI_HN_F_1_BASE - CCI_HN_F_0_BASE);
416 }
417 if (!ddr0_used && !ddr1_used) {
418 printf("Invalid configuration in HN-F SAM control\n");
419 return;
420 }
421
422 if (!ddr0_used && info->first_ctrl == 0) {
423 info->first_ctrl = 1;
424 info->num_ctrls = 1;
425 debug("First DDR controller disabled\n");
426 return;
427 }
428
429 if (!ddr1_used && info->first_ctrl + info->num_ctrls > 1) {
430 info->num_ctrls = 1;
431 debug("Second DDR controller disabled\n");
432 }
433 #endif
434 }
435