1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) Marvell International Ltd. and its affiliates
4 */
5
6 #if defined(CONFIG_DDR4)
7
8 #include "ddr3_init.h"
9 #include "mv_ddr_regs.h"
10
11 static int mv_ddr4_dynamic_pb_wl_supp(u32 dev_num, enum mv_wl_supp_mode ecc_mode);
12
13 /* compare test for ddr4 write leveling supplementary */
14 #define MV_DDR4_COMP_TEST_NO_RESULT 0
15 #define MV_DDR4_COMP_TEST_RESULT_0 1
16 #define MV_DDR4_XSB_COMP_PATTERNS_NUM 8
17
mv_ddr4_xsb_comp_test(u32 dev_num,u32 subphy_num,u32 if_id,enum mv_wl_supp_mode ecc_mode)18 static u8 mv_ddr4_xsb_comp_test(u32 dev_num, u32 subphy_num, u32 if_id,
19 enum mv_wl_supp_mode ecc_mode)
20 {
21 u32 wl_invert;
22 u8 pb_key, bit, bit_max, word;
23 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
24 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
25 u32 subphy_max = ddr3_tip_dev_attr_get(0, MV_ATTR_OCTET_PER_INTERFACE);
26 uint64_t read_pattern_64[MV_DDR4_XSB_COMP_PATTERNS_NUM] = {0};
27 /*
28 * FIXME: the pattern below is used for writing to the memory
29 * by the cpu. it was changed to be written through the odpg.
30 * for a workaround
31 * uint64_t pattern_test_table_64[MV_DDR4_XSB_COMP_PATTERNS_NUM] = {
32 * 0xffffffffffffffff,
33 * 0xffffffffffffffff,
34 * 0x0000000000000000,
35 * 0x0000000000000000,
36 * 0x0000000000000000,
37 * 0x0000000000000000,
38 * 0xffffffffffffffff,
39 * 0xffffffffffffffff};
40 */
41 u32 read_pattern[MV_DDR4_XSB_COMP_PATTERNS_NUM];
42 /*u32 pattern_test_table[MV_DDR4_XSB_COMP_PATTERNS_NUM] = {
43 0xffffffff,
44 0xffffffff,
45 0x00000000,
46 0x00000000,
47 0x00000000,
48 0x00000000,
49 0xffffffff,
50 0xffffffff}; TODO: use pattern_table_get_word */
51 int i, status;
52 uint64_t data64;
53 uintptr_t addr64;
54 int ecc_running = 0;
55 u32 ecc_read_subphy_num = 0; /* FIXME: change ecc read subphy num to be configurable */
56 u8 bit_counter = 0;
57 int edge = 0;
58 /* write and read data */
59 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) {
60 status = ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ODPG_DATA_CTRL_REG,
61 effective_cs << ODPG_DATA_CS_OFFS,
62 ODPG_DATA_CS_MASK << ODPG_DATA_CS_OFFS);
63 if (status != MV_OK)
64 return status;
65
66 addr64 = (uintptr_t)pattern_table[PATTERN_TEST].start_addr;
67 /*
68 * FIXME: changed the load pattern to memory through the odpg
69 * this change is needed to be validate
70 * this change is done due to un calibrated dm at this stage
71 * the below code is the code for loading the pattern directly
72 * to the memory
73 *
74 * for (i = 0; i < MV_DDR4_XSB_COMP_PATTERNS_NUM; i++) {
75 * data64 = pattern_test_table_64[i];
76 * writeq(addr64, data64);
77 * addr64 += sizeof(uint64_t);
78 *}
79 * FIXME: the below code loads the pattern to the memory through the odpg
80 * it loads it twice to due supplementary failure, need to check it
81 */
82 int j;
83 for (j = 0; j < 2; j++)
84 ddr3_tip_load_pattern_to_mem(dev_num, PATTERN_TEST);
85
86 } else if (MV_DDR_IS_32BIT_IN_64BIT_DRAM_MODE(tm->bus_act_mask, subphy_max)) {
87 status = ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ODPG_DATA_CTRL_REG,
88 effective_cs << ODPG_DATA_CS_OFFS,
89 ODPG_DATA_CS_MASK << ODPG_DATA_CS_OFFS);
90 if (status != MV_OK)
91 return status;
92
93 /*
94 * FIXME: changed the load pattern to memory through the odpg
95 * this change is needed to be validate
96 * this change is done due to un calibrated dm at this stage
97 * the below code is the code for loading the pattern directly
98 * to the memory
99 */
100 int j;
101 for (j = 0; j < 2; j++)
102 ddr3_tip_load_pattern_to_mem(dev_num, PATTERN_TEST);
103 } else {
104 /*
105 * FIXME: changed the load pattern to memory through the odpg
106 * this change is needed to be validate
107 * this change is done due to un calibrated dm at this stage
108 * the below code is the code for loading the pattern directly
109 * to the memory
110 */
111 int j;
112 for (j = 0; j < 2; j++)
113 ddr3_tip_load_pattern_to_mem(dev_num, PATTERN_TEST);
114 }
115
116 if ((ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP4) ||
117 (ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP3 ||
118 ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP8)) {
119 /* disable ecc write mux */
120 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
121 TRAINING_SW_2_REG, 0x0, 0x100);
122 if (status != MV_OK)
123 return status;
124
125 /* enable read data ecc mux */
126 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
127 TRAINING_SW_2_REG, 0x3, 0x3);
128 if (status != MV_OK)
129 return status;
130
131 /* unset training start bit */
132 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
133 TRAINING_REG, 0x80000000, 0x80000000);
134 if (status != MV_OK)
135 return status;
136
137 ecc_running = 1;
138 ecc_read_subphy_num = ECC_READ_BUS_0;
139 }
140
141 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) {
142 status = ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ODPG_DATA_CTRL_REG,
143 effective_cs << ODPG_DATA_CS_OFFS,
144 ODPG_DATA_CS_MASK << ODPG_DATA_CS_OFFS);
145 if (status != MV_OK)
146 return status;
147 /*
148 * in case of reading the pattern read it from the address x 8
149 * the odpg multiply by 8 the addres to read from
150 */
151 addr64 = ((uintptr_t)pattern_table[PATTERN_TEST].start_addr) << 3;
152 for (i = 0; i < MV_DDR4_XSB_COMP_PATTERNS_NUM; i++) {
153 data64 = readq(addr64);
154 addr64 += sizeof(uint64_t);
155 read_pattern_64[i] = data64;
156 }
157
158 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("xsb comp: if %d bus id %d\n", 0, subphy_num));
159 for (edge = 0; edge < 8; edge++)
160 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("0x%16llx\n", (unsigned long long)read_pattern_64[edge]));
161 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("\n"));
162 } else if (MV_DDR_IS_32BIT_IN_64BIT_DRAM_MODE(tm->bus_act_mask, subphy_max)) {
163 status = ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ODPG_DATA_CTRL_REG,
164 effective_cs << ODPG_DATA_CS_OFFS,
165 ODPG_DATA_CS_MASK << ODPG_DATA_CS_OFFS);
166 if (status != MV_OK)
167 return status;
168
169 status = ddr3_tip_ext_read(dev_num, if_id, pattern_table[PATTERN_TEST].start_addr << 3,
170 1, read_pattern);
171 if (status != MV_OK)
172 return status;
173
174 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("xsb comp: if %d bus id %d\n", 0, subphy_num));
175 for (edge = 0; edge < 8; edge++)
176 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("0x%16x\n", read_pattern[edge]));
177 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("\n"));
178 } else {
179 status = ddr3_tip_ext_read(dev_num, if_id, ((pattern_table[PATTERN_TEST].start_addr << 3) +
180 ((SDRAM_CS_SIZE + 1) * effective_cs)), 1, read_pattern);
181 if (status != MV_OK)
182 return status;
183
184 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("xsb comp: if %d bus id %d\n", 0, subphy_num));
185 for (edge = 0; edge < 8; edge++)
186 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("0x%16x\n", read_pattern[edge]));
187 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("\n"));
188 }
189
190 /* read centralization result to decide on half phase by inverse bit */
191 status = ddr3_tip_bus_read(dev_num, if_id, ACCESS_TYPE_UNICAST, subphy_num, DDR_PHY_DATA,
192 CTX_PHY_REG(0), &wl_invert);
193 if (status != MV_OK)
194 return status;
195
196 if ((wl_invert & 0x20) != 0)
197 wl_invert = 1;
198 else
199 wl_invert = 0;
200
201 /* for ecc, read from the "read" subphy (usualy subphy 0) */
202 if (ecc_running)
203 subphy_num = ecc_read_subphy_num;
204
205 /* per bit loop*/
206 bit_max = subphy_num * BUS_WIDTH_IN_BITS + BUS_WIDTH_IN_BITS;
207 for (bit = subphy_num * BUS_WIDTH_IN_BITS; bit < bit_max; bit++) {
208 /* get per bit pattern key (value of the same bit in the pattern) */
209 pb_key = 0;
210 for (word = 0; word < MV_DDR4_XSB_COMP_PATTERNS_NUM; word++) {
211 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) {
212 if ((read_pattern_64[word] & ((uint64_t)1 << bit)) != 0)
213 pb_key |= (1 << word);
214 } else {
215 if ((read_pattern[word] & (1 << bit)) != 0)
216 pb_key |= (1 << word);
217 }
218 }
219
220 /* find the key value and make decision */
221 switch (pb_key) {
222 /* case(s) for 0 */
223 case 0b11000011: /* nominal */
224 case 0b10000011: /* sample at start of UI sample at the dqvref TH */
225 case 0b10000111: /* sample at start of UI sample at the dqvref TH */
226 case 0b11000001: /* sample at start of UI sample at the dqvref TH */
227 case 0b11100001: /* sample at start of UI sample at the dqvref TH */
228 case 0b11100011: /* sample at start of UI sample at the dqvref TH */
229 case 0b11000111: /* sample at start of UI sample at the dqvref TH */
230 bit_counter++;
231 break;
232 } /* end of switch */
233 } /* end of per bit loop */
234
235 /* check all bits in the current subphy has met the switch condition above */
236 if (bit_counter == BUS_WIDTH_IN_BITS)
237 return MV_DDR4_COMP_TEST_RESULT_0;
238 else {
239 DEBUG_LEVELING(
240 DEBUG_LEVEL_INFO,
241 ("different supplementary results (%d -> %d)\n",
242 MV_DDR4_COMP_TEST_NO_RESULT, MV_DDR4_COMP_TEST_RESULT_0));
243 return MV_DDR4_COMP_TEST_NO_RESULT;
244 }
245 }
246
mv_ddr4_dynamic_wl_supp(u32 dev_num)247 int mv_ddr4_dynamic_wl_supp(u32 dev_num)
248 {
249 int status = MV_OK;
250 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
251
252 if (DDR3_IS_ECC_PUP4_MODE(tm->bus_act_mask) ||
253 DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask) ||
254 DDR3_IS_ECC_PUP8_MODE(tm->bus_act_mask)) {
255 if (DDR3_IS_ECC_PUP4_MODE(tm->bus_act_mask))
256 status = mv_ddr4_dynamic_pb_wl_supp(dev_num, WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP4);
257 else if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
258 status = mv_ddr4_dynamic_pb_wl_supp(dev_num, WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP3);
259 else /* WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP8 */
260 status = mv_ddr4_dynamic_pb_wl_supp(dev_num, WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP8);
261 if (status != MV_OK)
262 return status;
263 status = mv_ddr4_dynamic_pb_wl_supp(dev_num, WRITE_LEVELING_SUPP_ECC_MODE_DATA_PUPS);
264 } else { /* regular supplementary for data subphys in non-ecc mode */
265 status = mv_ddr4_dynamic_pb_wl_supp(dev_num, WRITE_LEVELING_SUPP_REG_MODE);
266 }
267
268 return status;
269 }
270
271 /* dynamic per bit write leveling supplementary */
mv_ddr4_dynamic_pb_wl_supp(u32 dev_num,enum mv_wl_supp_mode ecc_mode)272 static int mv_ddr4_dynamic_pb_wl_supp(u32 dev_num, enum mv_wl_supp_mode ecc_mode)
273 {
274 u32 if_id;
275 u32 subphy_start, subphy_end;
276 u32 subphy_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
277 u8 compare_result = 0;
278 u32 orig_phase;
279 u32 rd_data, wr_data = 0;
280 u32 flag, step;
281 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
282 u32 ecc_phy_access_id;
283 int status;
284
285 if (ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP4 ||
286 ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP3 ||
287 ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP8) {
288 /* enable ecc write mux */
289 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
290 TRAINING_SW_2_REG, 0x100, 0x100);
291 if (status != MV_OK)
292 return status;
293
294 /* disable read data ecc mux */
295 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
296 TRAINING_SW_2_REG, 0x0, 0x3);
297 if (status != MV_OK)
298 return status;
299
300 /* unset training start bit */
301 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
302 TRAINING_REG, 0x0, 0x80000000);
303 if (status != MV_OK)
304 return status;
305
306 if (ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP3)
307 ecc_phy_access_id = ECC_PHY_ACCESS_3;
308 else if (ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP4)
309 ecc_phy_access_id = ECC_PHY_ACCESS_4;
310 else /* ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP8 */
311 ecc_phy_access_id = ECC_PHY_ACCESS_8;
312
313 subphy_start = ecc_phy_access_id;
314 subphy_end = subphy_start + 1;
315 } else if (ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_DATA_PUPS) {
316 /* disable ecc write mux */
317 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
318 TRAINING_SW_2_REG, 0x0, 0x100);
319 if (status != MV_OK)
320 return status;
321
322 /* disable ecc mode*/
323 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
324 SDRAM_CFG_REG, 0, 0x40000);
325 if (status != MV_OK)
326 return status;
327
328 subphy_start = 0;
329 if (MV_DDR_IS_HALF_BUS_DRAM_MODE(tm->bus_act_mask, subphy_num))
330 subphy_end = (subphy_num - 1) / 2;
331 else
332 subphy_end = subphy_num - 1;
333 } else { /* ecc_mode == WRITE_LEVELING_SUPP_REG_MODE */
334 subphy_start = 0;
335 /* remove ecc subphy prior to algorithm's start */
336 subphy_end = subphy_num - 1; /* TODO: check it */
337 }
338
339 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
340 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
341 for (subphy_num = subphy_start; subphy_num < subphy_end; subphy_num++) {
342 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, subphy_num);
343 flag = 1;
344 step = 0;
345 status = ddr3_tip_bus_read(dev_num, if_id, ACCESS_TYPE_UNICAST, subphy_num, DDR_PHY_DATA,
346 WL_PHY_REG(effective_cs), &rd_data);
347 if (status != MV_OK)
348 return status;
349 orig_phase = (rd_data >> 6) & 0x7;
350 while (flag != 0) {
351 /* get decision for subphy */
352 compare_result = mv_ddr4_xsb_comp_test(dev_num, subphy_num, if_id, ecc_mode);
353 if (compare_result == MV_DDR4_COMP_TEST_RESULT_0) {
354 flag = 0;
355 } else { /* shift phase to -1 */
356 step++;
357 if (step == 1) { /* set phase (0x0[6-8]) to -2 */
358 if (orig_phase > 1)
359 wr_data = (rd_data & ~0x1c0) | ((orig_phase - 2) << 6);
360 else if (orig_phase == 1)
361 wr_data = (rd_data & ~0x1df);
362 if (orig_phase >= 1)
363 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id,
364 ACCESS_TYPE_UNICAST, subphy_num,
365 DDR_PHY_DATA,
366 WL_PHY_REG(effective_cs), wr_data);
367 } else if (step == 2) { /* shift phase to +1 */
368 if (orig_phase <= 5) {
369 wr_data = (rd_data & ~0x1c0) | ((orig_phase + 2) << 6);
370 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id,
371 ACCESS_TYPE_UNICAST, subphy_num,
372 DDR_PHY_DATA,
373 WL_PHY_REG(effective_cs), wr_data);
374 }
375 } else if (step == 3) {
376 if (orig_phase <= 3) {
377 wr_data = (rd_data & ~0x1c0) | ((orig_phase + 4) << 6);
378 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id,
379 ACCESS_TYPE_UNICAST, subphy_num,
380 DDR_PHY_DATA,
381 WL_PHY_REG(effective_cs), wr_data);
382 }
383 } else { /* error */
384 flag = 0;
385 compare_result = MV_DDR4_COMP_TEST_NO_RESULT;
386 training_result[training_stage][if_id] = TEST_FAILED;
387 }
388 }
389 }
390 }
391 if ((training_result[training_stage][if_id] == NO_TEST_DONE) ||
392 (training_result[training_stage][if_id] == TEST_SUCCESS))
393 training_result[training_stage][if_id] = TEST_SUCCESS;
394 }
395
396 if (ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_DATA_PUPS) {
397 /* enable ecc write mux */
398 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
399 TRAINING_SW_2_REG, 0x100, 0x100);
400 if (status != MV_OK)
401 return status;
402
403 /* enable ecc mode*/
404 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
405 SDRAM_CFG_REG, 0x40000, 0x40000);
406 if (status != MV_OK)
407 return status;
408 } else if (ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP4 ||
409 ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP3 ||
410 ecc_mode == WRITE_LEVELING_SUPP_ECC_MODE_ECC_PUP8) {
411 /* enable ecc write mux */
412 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
413 TRAINING_SW_2_REG, 0x100, 0x100);
414 if (status != MV_OK)
415 return status;
416
417 /* disable read data ecc mux */
418 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
419 TRAINING_SW_2_REG, 0x0, 0x3);
420 if (status != MV_OK)
421 return status;
422
423 /* unset training start bit */
424 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
425 TRAINING_REG, 0x0, 0x80000000);
426 if (status != MV_OK)
427 return status;
428
429 status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
430 TRAINING_SW_1_REG, 0x1 << 16, 0x1 << 16);
431 if (status != MV_OK)
432 return status;
433 } else {
434 /* do nothing for WRITE_LEVELING_SUPP_REG_MODE */;
435 }
436 if (training_result[training_stage][0] == TEST_SUCCESS)
437 return MV_OK;
438 else
439 return MV_FAIL;
440 }
441 #endif /* CONFIG_DDR4 */
442