1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) Marvell International Ltd. and its affiliates
4  */
5 
6 #include "ddr3_init.h"
7 #include "mv_ddr_training_db.h"
8 #include "mv_ddr_regs.h"
9 
10 u8 is_reg_dump = 0;
11 u8 debug_pbs = DEBUG_LEVEL_ERROR;
12 
13 /*
14  * API to change flags outside of the lib
15  */
16 #if defined(SILENT_LIB)
ddr3_hws_set_log_level(enum ddr_lib_debug_block block,u8 level)17 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
18 {
19 	/* do nothing */
20 }
21 #else /* SILENT_LIB */
22 /* Debug flags for other Training modules */
23 u8 debug_training_static = DEBUG_LEVEL_ERROR;
24 u8 debug_training = DEBUG_LEVEL_ERROR;
25 u8 debug_leveling = DEBUG_LEVEL_ERROR;
26 u8 debug_centralization = DEBUG_LEVEL_ERROR;
27 u8 debug_training_ip = DEBUG_LEVEL_ERROR;
28 u8 debug_training_bist = DEBUG_LEVEL_ERROR;
29 u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
30 u8 debug_training_access = DEBUG_LEVEL_ERROR;
31 u8 debug_training_device = DEBUG_LEVEL_ERROR;
32 
33 #if defined(CONFIG_DDR4)
34 u8 debug_tap_tuning = DEBUG_LEVEL_ERROR;
35 u8 debug_calibration = DEBUG_LEVEL_ERROR;
36 u8 debug_ddr4_centralization = DEBUG_LEVEL_ERROR;
37 u8 debug_dm_tuning = DEBUG_LEVEL_ERROR;
38 #endif /* CONFIG_DDR4 */
39 
mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)40 void mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)
41 {
42 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
43 	ddr3_hws_set_log_level(block, tm->debug_level);
44 };
45 
ddr3_hws_set_log_level(enum ddr_lib_debug_block block,u8 level)46 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
47 {
48 	switch (block) {
49 	case DEBUG_BLOCK_STATIC:
50 		debug_training_static = level;
51 		break;
52 	case DEBUG_BLOCK_TRAINING_MAIN:
53 		debug_training = level;
54 		break;
55 	case DEBUG_BLOCK_LEVELING:
56 		debug_leveling = level;
57 		break;
58 	case DEBUG_BLOCK_CENTRALIZATION:
59 		debug_centralization = level;
60 		break;
61 	case DEBUG_BLOCK_PBS:
62 		debug_pbs = level;
63 		break;
64 	case DEBUG_BLOCK_ALG:
65 		debug_training_hw_alg = level;
66 		break;
67 	case DEBUG_BLOCK_DEVICE:
68 		debug_training_device = level;
69 		break;
70 	case DEBUG_BLOCK_ACCESS:
71 		debug_training_access = level;
72 		break;
73 	case DEBUG_STAGES_REG_DUMP:
74 		if (level == DEBUG_LEVEL_TRACE)
75 			is_reg_dump = 1;
76 		else
77 			is_reg_dump = 0;
78 		break;
79 #if defined(CONFIG_DDR4)
80 	case DEBUG_TAP_TUNING_ENGINE:
81 		debug_tap_tuning = level;
82 		break;
83 	case DEBUG_BLOCK_CALIBRATION:
84 		debug_calibration = level;
85 		break;
86 	case DEBUG_BLOCK_DDR4_CENTRALIZATION:
87 		debug_ddr4_centralization = level;
88 		break;
89 #endif /* CONFIG_DDR4 */
90 	case DEBUG_BLOCK_ALL:
91 	default:
92 		debug_training_static = level;
93 		debug_training = level;
94 		debug_leveling = level;
95 		debug_centralization = level;
96 		debug_pbs = level;
97 		debug_training_hw_alg = level;
98 		debug_training_access = level;
99 		debug_training_device = level;
100 #if defined(CONFIG_DDR4)
101 		debug_tap_tuning = level;
102 		debug_calibration = level;
103 		debug_ddr4_centralization = level;
104 #endif /* CONFIG_DDR4 */
105 	}
106 }
107 #endif /* SILENT_LIB */
108 
109 #if defined(DDR_VIEWER_TOOL)
110 static char *convert_freq(enum mv_ddr_freq freq);
111 #if defined(EXCLUDE_SWITCH_DEBUG)
112 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
113 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
114 u32 ctrl_adll1[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
115 u32 ctrl_level_phase[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
116 #endif /* EXCLUDE_SWITCH_DEBUG */
117 #endif /* DDR_VIEWER_TOOL */
118 
119 struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
120 u8 is_default_centralization = 0;
121 u8 is_tune_result = 0;
122 u8 is_validate_window_per_if = 0;
123 u8 is_validate_window_per_pup = 0;
124 u8 sweep_cnt = 1;
125 u32 is_bist_reset_bit = 1;
126 u8 is_run_leveling_sweep_tests;
127 
128 static struct hws_xsb_info xsb_info[MAX_DEVICE_NUM];
129 
130 /*
131  * Dump Dunit & Phy registers
132  */
ddr3_tip_reg_dump(u32 dev_num)133 int ddr3_tip_reg_dump(u32 dev_num)
134 {
135 	u32 if_id, reg_addr, data_value, bus_id;
136 	u32 read_data[MAX_INTERFACE_NUM];
137 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
138 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
139 
140 	printf("-- dunit registers --\n");
141 	for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
142 		printf("0x%x ", reg_addr);
143 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
144 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
145 			CHECK_STATUS(ddr3_tip_if_read
146 				     (dev_num, ACCESS_TYPE_UNICAST,
147 				      if_id, reg_addr, read_data,
148 				      MASK_ALL_BITS));
149 			printf("0x%x ", read_data[if_id]);
150 		}
151 		printf("\n");
152 	}
153 
154 	printf("-- Phy registers --\n");
155 	for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
156 		printf("0x%x ", reg_addr);
157 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
158 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
159 			for (bus_id = 0;
160 			     bus_id < octets_per_if_num;
161 			     bus_id++) {
162 				VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
163 				CHECK_STATUS(ddr3_tip_bus_read
164 					     (dev_num, if_id,
165 					      ACCESS_TYPE_UNICAST, bus_id,
166 					      DDR_PHY_DATA, reg_addr,
167 					      &data_value));
168 				printf("0x%x ", data_value);
169 			}
170 			for (bus_id = 0;
171 			     bus_id < octets_per_if_num;
172 			     bus_id++) {
173 				VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
174 				CHECK_STATUS(ddr3_tip_bus_read
175 					     (dev_num, if_id,
176 					      ACCESS_TYPE_UNICAST, bus_id,
177 					      DDR_PHY_CONTROL, reg_addr,
178 					      &data_value));
179 				printf("0x%x ", data_value);
180 			}
181 		}
182 		printf("\n");
183 	}
184 
185 	return MV_OK;
186 }
187 
188 /*
189  * Register access func registration
190  */
ddr3_tip_init_config_func(u32 dev_num,struct hws_tip_config_func_db * config_func)191 int ddr3_tip_init_config_func(u32 dev_num,
192 			      struct hws_tip_config_func_db *config_func)
193 {
194 	if (config_func == NULL)
195 		return MV_BAD_PARAM;
196 
197 	memcpy(&config_func_info[dev_num], config_func,
198 	       sizeof(struct hws_tip_config_func_db));
199 
200 	return MV_OK;
201 }
202 
203 /*
204  * Get training result info pointer
205  */
ddr3_tip_get_result_ptr(u32 stage)206 enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
207 {
208 	return training_result[stage];
209 }
210 
211 /*
212  * Device info read
213  */
ddr3_tip_get_device_info(u32 dev_num,struct ddr3_device_info * info_ptr)214 int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
215 {
216 	if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
217 		return config_func_info[dev_num].
218 			tip_get_device_info_func((u8) dev_num, info_ptr);
219 	}
220 
221 	return MV_FAIL;
222 }
223 
224 #if defined(DDR_VIEWER_TOOL)
225 /*
226  * Convert freq to character string
227  */
convert_freq(enum mv_ddr_freq freq)228 static char *convert_freq(enum mv_ddr_freq freq)
229 {
230 	switch (freq) {
231 	case MV_DDR_FREQ_LOW_FREQ:
232 		return "MV_DDR_FREQ_LOW_FREQ";
233 
234 #if !defined(CONFIG_DDR4)
235 	case MV_DDR_FREQ_400:
236 		return "400";
237 
238 	case MV_DDR_FREQ_533:
239 		return "533";
240 #endif /* CONFIG_DDR4 */
241 
242 	case MV_DDR_FREQ_667:
243 		return "667";
244 
245 	case MV_DDR_FREQ_800:
246 		return "800";
247 
248 	case MV_DDR_FREQ_933:
249 		return "933";
250 
251 	case MV_DDR_FREQ_1066:
252 		return "1066";
253 
254 #if !defined(CONFIG_DDR4)
255 	case MV_DDR_FREQ_311:
256 		return "311";
257 
258 	case MV_DDR_FREQ_333:
259 		return "333";
260 
261 	case MV_DDR_FREQ_467:
262 		return "467";
263 
264 	case MV_DDR_FREQ_850:
265 		return "850";
266 
267 	case MV_DDR_FREQ_900:
268 		return "900";
269 
270 	case MV_DDR_FREQ_360:
271 		return "MV_DDR_FREQ_360";
272 
273 	case MV_DDR_FREQ_1000:
274 		return "MV_DDR_FREQ_1000";
275 #endif /* CONFIG_DDR4 */
276 
277 	default:
278 		return "Unknown Frequency";
279 	}
280 }
281 
282 /*
283  * Convert device ID to character string
284  */
convert_dev_id(u32 dev_id)285 static char *convert_dev_id(u32 dev_id)
286 {
287 	switch (dev_id) {
288 	case 0x6800:
289 		return "A38xx";
290 	case 0x6900:
291 		return "A39XX";
292 	case 0xf400:
293 		return "AC3";
294 	case 0xfc00:
295 		return "BC2";
296 
297 	default:
298 		return "Unknown Device";
299 	}
300 }
301 
302 /*
303  * Convert device ID to character string
304  */
convert_mem_size(u32 dev_id)305 static char *convert_mem_size(u32 dev_id)
306 {
307 	switch (dev_id) {
308 	case 0:
309 		return "512 MB";
310 	case 1:
311 		return "1 GB";
312 	case 2:
313 		return "2 GB";
314 	case 3:
315 		return "4 GB";
316 	case 4:
317 		return "8 GB";
318 
319 	default:
320 		return "wrong mem size";
321 	}
322 }
323 
print_device_info(u8 dev_num)324 int print_device_info(u8 dev_num)
325 {
326 	struct ddr3_device_info info_ptr;
327 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
328 
329 	CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
330 	printf("=== DDR setup START===\n");
331 	printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
332 	printf("\tDDR3  CK delay: %d\n", info_ptr.ck_delay);
333 	print_topology(tm);
334 	printf("=== DDR setup END===\n");
335 
336 	return MV_OK;
337 }
338 
hws_ddr3_tip_sweep_test(int enable)339 void hws_ddr3_tip_sweep_test(int enable)
340 {
341 	if (enable) {
342 		is_validate_window_per_if = 1;
343 		is_validate_window_per_pup = 1;
344 		debug_training = DEBUG_LEVEL_TRACE;
345 	} else {
346 		is_validate_window_per_if = 0;
347 		is_validate_window_per_pup = 0;
348 	}
349 }
350 #endif /* DDR_VIEWER_TOOL */
351 
ddr3_tip_convert_tune_result(enum hws_result tune_result)352 char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
353 {
354 	switch (tune_result) {
355 	case TEST_FAILED:
356 		return "FAILED";
357 	case TEST_SUCCESS:
358 		return "PASS";
359 	case NO_TEST_DONE:
360 		return "NOT COMPLETED";
361 	default:
362 		return "Un-KNOWN";
363 	}
364 }
365 
366 /*
367  * Print log info
368  */
ddr3_tip_print_log(u32 dev_num,u32 mem_addr)369 int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
370 {
371 	u32 if_id = 0;
372 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
373 
374 #if defined(DDR_VIEWER_TOOL)
375 	if ((is_validate_window_per_if != 0) ||
376 	    (is_validate_window_per_pup != 0)) {
377 		u32 is_pup_log = 0;
378 		enum mv_ddr_freq freq;
379 
380 		freq = tm->interface_params[first_active_if].memory_freq;
381 
382 		is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
383 		printf("===VALIDATE WINDOW LOG START===\n");
384 		printf("DDR Frequency: %s   ======\n", convert_freq(freq));
385 		/* print sweep windows */
386 		ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
387 		ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
388 #if defined(EXCLUDE_SWITCH_DEBUG)
389 		if (is_run_leveling_sweep_tests == 1) {
390 			ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
391 			ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
392 		}
393 #endif /* EXCLUDE_SWITCH_DEBUG */
394 		ddr3_tip_print_all_pbs_result(dev_num);
395 		ddr3_tip_print_wl_supp_result(dev_num);
396 		printf("===VALIDATE WINDOW LOG END ===\n");
397 		CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
398 		ddr3_tip_reg_dump(dev_num);
399 	}
400 #endif /* DDR_VIEWER_TOOL */
401 
402 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
403 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
404 
405 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
406 				  ("IF %d Status:\n", if_id));
407 
408 		if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
409 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
410 					  ("\tInit Controller: %s\n",
411 					   ddr3_tip_convert_tune_result
412 					   (training_result[INIT_CONTROLLER]
413 					    [if_id])));
414 		}
415 		if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
416 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
417 					  ("\tLow freq Config: %s\n",
418 					   ddr3_tip_convert_tune_result
419 					   (training_result[SET_LOW_FREQ]
420 					    [if_id])));
421 		}
422 		if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
423 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
424 					  ("\tLoad Pattern: %s\n",
425 					   ddr3_tip_convert_tune_result
426 					   (training_result[LOAD_PATTERN]
427 					    [if_id])));
428 		}
429 		if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
430 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
431 					  ("\tMedium freq Config: %s\n",
432 					   ddr3_tip_convert_tune_result
433 					   (training_result[SET_MEDIUM_FREQ]
434 					    [if_id])));
435 		}
436 		if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
437 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
438 					  ("\tWL: %s\n",
439 					   ddr3_tip_convert_tune_result
440 					   (training_result[WRITE_LEVELING]
441 					    [if_id])));
442 		}
443 		if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
444 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
445 					  ("\tLoad Pattern: %s\n",
446 					   ddr3_tip_convert_tune_result
447 					   (training_result[LOAD_PATTERN_2]
448 					    [if_id])));
449 		}
450 		if (mask_tune_func & READ_LEVELING_MASK_BIT) {
451 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
452 					  ("\tRL: %s\n",
453 					   ddr3_tip_convert_tune_result
454 					   (training_result[READ_LEVELING]
455 					    [if_id])));
456 		}
457 		if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
458 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
459 					  ("\tWL Supp: %s\n",
460 					   ddr3_tip_convert_tune_result
461 					   (training_result[WRITE_LEVELING_SUPP]
462 					    [if_id])));
463 		}
464 		if (mask_tune_func & PBS_RX_MASK_BIT) {
465 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
466 					  ("\tPBS RX: %s\n",
467 					   ddr3_tip_convert_tune_result
468 					   (training_result[PBS_RX]
469 					    [if_id])));
470 		}
471 		if (mask_tune_func & PBS_TX_MASK_BIT) {
472 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
473 					  ("\tPBS TX: %s\n",
474 					   ddr3_tip_convert_tune_result
475 					   (training_result[PBS_TX]
476 					    [if_id])));
477 		}
478 		if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
479 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
480 					  ("\tTarget freq Config: %s\n",
481 					   ddr3_tip_convert_tune_result
482 					   (training_result[SET_TARGET_FREQ]
483 					    [if_id])));
484 		}
485 		if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
486 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
487 					  ("\tWL TF: %s\n",
488 					   ddr3_tip_convert_tune_result
489 					   (training_result[WRITE_LEVELING_TF]
490 					    [if_id])));
491 		}
492 #if !defined(CONFIG_DDR4)
493 		if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
494 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
495 					  ("\tRL TF: %s\n",
496 					   ddr3_tip_convert_tune_result
497 					   (training_result[READ_LEVELING_TF]
498 					    [if_id])));
499 		}
500 #endif /* CONFIG_DDR4 */
501 		if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
502 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
503 					  ("\tWL TF Supp: %s\n",
504 					   ddr3_tip_convert_tune_result
505 					   (training_result
506 					    [WRITE_LEVELING_SUPP_TF]
507 					    [if_id])));
508 		}
509 		if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
510 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
511 					  ("\tCentr RX: %s\n",
512 					   ddr3_tip_convert_tune_result
513 					   (training_result[CENTRALIZATION_RX]
514 					    [if_id])));
515 		}
516 		if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
517 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
518 					  ("\tVREF_CALIBRATION: %s\n",
519 					   ddr3_tip_convert_tune_result
520 					   (training_result[VREF_CALIBRATION]
521 					    [if_id])));
522 		}
523 		if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
524 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
525 					  ("\tCentr TX: %s\n",
526 					   ddr3_tip_convert_tune_result
527 					   (training_result[CENTRALIZATION_TX]
528 					    [if_id])));
529 		}
530 #if defined(CONFIG_DDR4)
531 		if (mask_tune_func & SW_READ_LEVELING_MASK_BIT) {
532 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
533 					  ("\tSW RL TF: %s\n",
534 					   ddr3_tip_convert_tune_result
535 					   (training_result[SW_READ_LEVELING]
536 					    [if_id])));
537 		}
538 		if (mask_tune_func & RECEIVER_CALIBRATION_MASK_BIT) {
539 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
540 					  ("\tRX CAL: %s\n",
541 					   ddr3_tip_convert_tune_result
542 					   (training_result[RECEIVER_CALIBRATION]
543 					    [if_id])));
544 		}
545 		if (mask_tune_func & WL_PHASE_CORRECTION_MASK_BIT) {
546 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
547 					  ("\tWL PHASE CORRECT: %s\n",
548 					   ddr3_tip_convert_tune_result
549 					   (training_result[WL_PHASE_CORRECTION]
550 					    [if_id])));
551 		}
552 		if (mask_tune_func & DQ_VREF_CALIBRATION_MASK_BIT) {
553 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
554 					  ("\tDQ VREF CAL: %s\n",
555 					   ddr3_tip_convert_tune_result
556 					   (training_result[DQ_VREF_CALIBRATION]
557 					    [if_id])));
558 		}
559 		if (mask_tune_func & DQ_MAPPING_MASK_BIT) {
560 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
561 					  ("\tDQ MAP: %s\n",
562 					   ddr3_tip_convert_tune_result
563 					   (training_result[DQ_MAPPING]
564 					    [if_id])));
565 		}
566 #endif /* CONFIG_DDR4 */
567 	}
568 
569 	return MV_OK;
570 }
571 
572 #if !defined(EXCLUDE_DEBUG_PRINTS)
573 /*
574  * Print stability log info
575  */
ddr3_tip_print_stability_log(u32 dev_num)576 int ddr3_tip_print_stability_log(u32 dev_num)
577 {
578 	u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
579 	u32 reg_data;
580 #if defined(CONFIG_DDR4)
581 	u32 reg_data1;
582 #endif /* CONFIG_DDR4 */
583 	u32 read_data[MAX_INTERFACE_NUM];
584 	unsigned int max_cs = mv_ddr_cs_num_get();
585 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
586 
587 	/* Title print */
588 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
589 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
590 		printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
591 		for (csindex = 0; csindex < max_cs; csindex++) {
592 			printf("CS%d , ", csindex);
593 			printf("\n");
594 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
595 #if defined(CONFIG_DDR4)
596 			printf("DminTx, AreaTx, DminRx, AreaRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, CenTx, CenRx, Vref, DQVref,");
597 			for (idx = 0; idx < 11; idx++)
598 				printf("DC-Pad%d,", idx);
599 #else /* CONFIG_DDR4 */
600 			printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
601 #endif /* CONFIG_DDR4 */
602 			printf("\t\t");
603 			for (idx = 0; idx < 11; idx++)
604 				printf("PBSTx-Pad%d,", idx);
605 			printf("\t\t");
606 			for (idx = 0; idx < 11; idx++)
607 				printf("PBSRx-Pad%d,", idx);
608 		}
609 	}
610 	printf("\n");
611 
612 	/* Data print */
613 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
614 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
615 
616 		printf("Data: %d,%d,", if_id,
617 		       (config_func_info[dev_num].tip_get_temperature != NULL)
618 		       ? (config_func_info[dev_num].
619 			  tip_get_temperature(dev_num)) : (0));
620 
621 		CHECK_STATUS(ddr3_tip_if_read
622 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
623 			      read_data, MASK_ALL_BITS));
624 		printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
625 		       ((read_data[if_id] & 0xfc00) >> 10));
626 		CHECK_STATUS(ddr3_tip_if_read
627 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
628 			      read_data, MASK_ALL_BITS));
629 		printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
630 		       ((read_data[if_id] & 0xfc00) >> 10));
631 		CHECK_STATUS(ddr3_tip_if_read
632 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
633 			      read_data, MASK_ALL_BITS));
634 		printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
635 		       ((read_data[if_id] & 0xfc00000) >> 22));
636 
637 		for (csindex = 0; csindex < max_cs; csindex++) {
638 			printf("CS%d , ", csindex);
639 			for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
640 				printf("\n");
641 				VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
642 #if defined(CONFIG_DDR4)
643 				/* DminTx, areaTX */
644 				ddr3_tip_bus_read(dev_num, if_id,
645 						  ACCESS_TYPE_UNICAST,
646 						  bus_id, DDR_PHY_DATA,
647 						  RESULT_PHY_REG +
648 						  csindex, &reg_data);
649 				ddr3_tip_bus_read(dev_num, if_id,
650 						  ACCESS_TYPE_UNICAST,
651 						  dmin_phy_reg_table
652 						  [csindex * 5 + bus_id][0],
653 						  DDR_PHY_CONTROL,
654 						  dmin_phy_reg_table
655 						  [csindex * 5 + bus_id][1],
656 						  &reg_data1);
657 				printf("%d,%d,", 2 * (reg_data1 & 0xFF),
658 				       reg_data);
659 				/* DminRx, areaRX */
660 				ddr3_tip_bus_read(dev_num, if_id,
661 						  ACCESS_TYPE_UNICAST,
662 						  bus_id, DDR_PHY_DATA,
663 						  RESULT_PHY_REG +
664 						  csindex + 4, &reg_data);
665 				ddr3_tip_bus_read(dev_num, if_id,
666 						  ACCESS_TYPE_UNICAST,
667 						  dmin_phy_reg_table
668 						  [csindex * 5 + bus_id][0],
669 						  DDR_PHY_CONTROL,
670 						  dmin_phy_reg_table
671 						  [csindex * 5 + bus_id][1],
672 						  &reg_data1);
673 				printf("%d,%d,", 2 * (reg_data1 >> 8),
674 				       reg_data);
675 #else /* CONFIG_DDR4 */
676 				ddr3_tip_bus_read(dev_num, if_id,
677 						  ACCESS_TYPE_UNICAST,
678 						  bus_id, DDR_PHY_DATA,
679 						  RESULT_PHY_REG +
680 						  csindex, &reg_data);
681 				printf("%d,%d,", (reg_data & 0x1f),
682 				       ((reg_data & 0x3e0) >> 5));
683 #endif /* CONFIG_DDR4 */
684 				/* WL */
685 				ddr3_tip_bus_read(dev_num, if_id,
686 						  ACCESS_TYPE_UNICAST,
687 						  bus_id, DDR_PHY_DATA,
688 						  WL_PHY_REG(csindex),
689 						  &reg_data);
690 				printf("%d,%d,%d,",
691 				       (reg_data & 0x1f) +
692 				       ((reg_data & 0x1c0) >> 6) * 32,
693 				       (reg_data & 0x1f),
694 				       (reg_data & 0x1c0) >> 6);
695 				/* RL */
696 				CHECK_STATUS(ddr3_tip_if_read
697 					     (dev_num, ACCESS_TYPE_UNICAST,
698 					      if_id,
699 					      RD_DATA_SMPL_DLYS_REG,
700 					      read_data, MASK_ALL_BITS));
701 				read_data[if_id] =
702 					(read_data[if_id] &
703 					 (0x1f << (8 * csindex))) >>
704 					(8 * csindex);
705 				ddr3_tip_bus_read(dev_num, if_id,
706 						  ACCESS_TYPE_UNICAST, bus_id,
707 						  DDR_PHY_DATA,
708 						  RL_PHY_REG(csindex),
709 						  &reg_data);
710 				printf("%d,%d,%d,%d,",
711 				       (reg_data & 0x1f) +
712 				       ((reg_data & 0x1c0) >> 6) * 32 +
713 				       read_data[if_id] * 64,
714 				       (reg_data & 0x1f),
715 				       ((reg_data & 0x1c0) >> 6),
716 				       read_data[if_id]);
717 				/* Centralization */
718 				ddr3_tip_bus_read(dev_num, if_id,
719 						  ACCESS_TYPE_UNICAST, bus_id,
720 						  DDR_PHY_DATA,
721 						  CTX_PHY_REG(csindex),
722 						  &reg_data);
723 				printf("%d,", (reg_data & 0x3f));
724 				ddr3_tip_bus_read(dev_num, if_id,
725 						  ACCESS_TYPE_UNICAST, bus_id,
726 						  DDR_PHY_DATA,
727 						  CRX_PHY_REG(csindex),
728 						   &reg_data);
729 				printf("%d,", (reg_data & 0x1f));
730 				/* Vref */
731 				ddr3_tip_bus_read(dev_num, if_id,
732 						  ACCESS_TYPE_UNICAST, bus_id,
733 						  DDR_PHY_DATA,
734 						  PAD_CFG_PHY_REG,
735 						  &reg_data);
736 				printf("%d,", (reg_data & 0x7));
737 				/* DQVref */
738 				/* Need to add the Read Function from device */
739 				printf("%d,", 0);
740 #if defined(CONFIG_DDR4)
741 				printf("\t\t");
742 				for (idx = 0; idx < 11; idx++) {
743 					ddr3_tip_bus_read(dev_num, if_id,
744 							  ACCESS_TYPE_UNICAST,
745 							  bus_id, DDR_PHY_DATA,
746 							  0xd0 + 12 * csindex +
747 							  idx, &reg_data);
748 					printf("%d,", (reg_data & 0x3f));
749 				}
750 #endif /* CONFIG_DDR4 */
751 				printf("\t\t");
752 				for (idx = 0; idx < 11; idx++) {
753 					ddr3_tip_bus_read(dev_num, if_id,
754 							  ACCESS_TYPE_UNICAST,
755 							  bus_id, DDR_PHY_DATA,
756 							  0x10 +
757 							  16 * csindex +
758 							  idx, &reg_data);
759 					printf("%d,", (reg_data & 0x3f));
760 				}
761 				printf("\t\t");
762 				for (idx = 0; idx < 11; idx++) {
763 					ddr3_tip_bus_read(dev_num, if_id,
764 							  ACCESS_TYPE_UNICAST,
765 							  bus_id, DDR_PHY_DATA,
766 							  0x50 +
767 							  16 * csindex +
768 							  idx, &reg_data);
769 					printf("%d,", (reg_data & 0x3f));
770 				}
771 			}
772 		}
773 	}
774 	printf("\n");
775 
776 	return MV_OK;
777 }
778 #endif /* EXCLUDE_DEBUG_PRINTS */
779 
780 /*
781  * Register XSB information
782  */
ddr3_tip_register_xsb_info(u32 dev_num,struct hws_xsb_info * xsb_info_table)783 int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
784 {
785 	memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
786 	return MV_OK;
787 }
788 
789 /*
790  * Read ADLL Value
791  */
ddr3_tip_read_adll_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],u32 reg_addr,u32 mask)792 int ddr3_tip_read_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
793 			     u32 reg_addr, u32 mask)
794 {
795 	u32 data_value;
796 	u32 if_id = 0, bus_id = 0;
797 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
798 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
799 
800 	/*
801 	 * multi CS support - reg_addr is calucalated in calling function
802 	 * with CS offset
803 	 */
804 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
805 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
806 		for (bus_id = 0; bus_id < octets_per_if_num;
807 		     bus_id++) {
808 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
809 			CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
810 						       ACCESS_TYPE_UNICAST,
811 						       bus_id,
812 						       DDR_PHY_DATA, reg_addr,
813 						       &data_value));
814 			pup_values[if_id *
815 				   octets_per_if_num + bus_id] =
816 				data_value & mask;
817 		}
818 	}
819 
820 	return 0;
821 }
822 
823 /*
824  * Write ADLL Value
825  */
ddr3_tip_write_adll_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],u32 reg_addr)826 int ddr3_tip_write_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
827 			      u32 reg_addr)
828 {
829 	u32 if_id = 0, bus_id = 0;
830 	u32 data;
831 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
832 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
833 
834 	/*
835 	 * multi CS support - reg_addr is calucalated in calling function
836 	 * with CS offset
837 	 */
838 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
839 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
840 		for (bus_id = 0; bus_id < octets_per_if_num;
841 		     bus_id++) {
842 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
843 			data = pup_values[if_id *
844 					  octets_per_if_num +
845 					  bus_id];
846 			CHECK_STATUS(ddr3_tip_bus_write(dev_num,
847 							ACCESS_TYPE_UNICAST,
848 							if_id,
849 							ACCESS_TYPE_UNICAST,
850 							bus_id, DDR_PHY_DATA,
851 							reg_addr, data));
852 		}
853 	}
854 
855 	return 0;
856 }
857 
858 /**
859  * Read Phase Value
860  */
read_phase_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],int reg_addr,u32 mask)861 int read_phase_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
862 		     int reg_addr, u32 mask)
863 {
864 	u32  data_value;
865 	u32 if_id = 0, bus_id = 0;
866 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
867 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
868 
869 	/* multi CS support - reg_addr is calucalated in calling function with CS offset */
870 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
871 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
872 		for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) {
873 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
874 			CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
875 						       ACCESS_TYPE_UNICAST,
876 						       bus_id,
877 						       DDR_PHY_DATA, reg_addr,
878 						       &data_value));
879 			pup_values[if_id * octets_per_if_num + bus_id] = data_value & mask;
880 		}
881 	}
882 
883 	return 0;
884 }
885 
886 /**
887  * Write Leveling Value
888  */
write_leveling_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],int reg_addr)889 int write_leveling_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
890 			 u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM], int reg_addr)
891 {
892 	u32 if_id = 0, bus_id = 0;
893 	u32 data;
894 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
895 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
896 
897 	/* multi CS support - reg_addr is calucalated in calling function with CS offset */
898 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
899 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
900 		for (bus_id = 0 ; bus_id < octets_per_if_num ; bus_id++) {
901 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
902 			data = pup_values[if_id * octets_per_if_num + bus_id] +
903 			       pup_ph_values[if_id * octets_per_if_num + bus_id];
904 			CHECK_STATUS(ddr3_tip_bus_write(dev_num,
905 							ACCESS_TYPE_UNICAST,
906 							if_id,
907 							ACCESS_TYPE_UNICAST,
908 							bus_id,
909 							DDR_PHY_DATA,
910 							reg_addr,
911 							data));
912 		}
913 	}
914 
915 	return 0;
916 }
917 
918 #if !defined(EXCLUDE_SWITCH_DEBUG)
919 struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
920 u32 start_xsb_offset = 0;
921 u8 is_rl_old = 0;
922 u8 is_freq_old = 0;
923 u8 is_dfs_disabled = 0;
924 u32 default_centrlization_value = 0x12;
925 u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
926 	rl_test = 0, reset_read_fifo = 0;
927 int debug_acc = 0;
928 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
929 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
930 
931 u32 xsb_test_table[][8] = {
932 	{0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
933 	 0x66666666, 0x77777777},
934 	{0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
935 	 0xeeeeeeee, 0xffffffff},
936 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
937 	 0x00000000, 0xffffffff},
938 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
939 	 0x00000000, 0xffffffff},
940 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
941 	 0x00000000, 0xffffffff},
942 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
943 	 0x00000000, 0xffffffff},
944 	{0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
945 	 0xffffffff, 0xffffffff},
946 	{0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
947 	 0x00000000, 0x00000000},
948 	{0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
949 	 0xffffffff, 0xffffffff}
950 };
951 
ddr3_tip_print_adll(void)952 int ddr3_tip_print_adll(void)
953 {
954 	u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
955 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
956 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
957 
958 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
959 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
960 		for (bus_cnt = 0; bus_cnt < octets_per_if_num;
961 		     bus_cnt++) {
962 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
963 			CHECK_STATUS(ddr3_tip_bus_read
964 				     (dev_num, if_id,
965 				      ACCESS_TYPE_UNICAST, bus_cnt,
966 				      DDR_PHY_DATA, 0x1, &data_p1));
967 			CHECK_STATUS(ddr3_tip_bus_read
968 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
969 				      bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
970 			CHECK_STATUS(ddr3_tip_bus_read
971 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
972 				      bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
973 			DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
974 					  (" IF %d bus_cnt %d  phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
975 					   if_id, bus_cnt, data_p1, data_p2,
976 					   ui_data3));
977 			}
978 	}
979 
980 	return MV_OK;
981 }
982 
983 #endif /* EXCLUDE_SWITCH_DEBUG */
984 
985 #if defined(DDR_VIEWER_TOOL)
986 /*
987  * Print ADLL
988  */
print_adll(u32 dev_num,u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])989 int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
990 {
991 	u32 i, j;
992 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
993 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
994 
995 	for (j = 0; j < octets_per_if_num; j++) {
996 		VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
997 		for (i = 0; i < MAX_INTERFACE_NUM; i++)
998 			printf("%d ,", adll[i * octets_per_if_num + j]);
999 	}
1000 	printf("\n");
1001 
1002 	return MV_OK;
1003 }
1004 
print_ph(u32 dev_num,u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])1005 int print_ph(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1006 {
1007 	u32 i, j;
1008 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1009 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1010 
1011 	for (j = 0; j < octets_per_if_num; j++) {
1012 		VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1013 		for (i = 0; i < MAX_INTERFACE_NUM; i++)
1014 			printf("%d ,", adll[i * octets_per_if_num + j] >> 6);
1015 	}
1016 	printf("\n");
1017 
1018 	return MV_OK;
1019 }
1020 #endif /* DDR_VIEWER_TOOL */
1021 
1022 #if !defined(EXCLUDE_SWITCH_DEBUG)
1023 /* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
ddr3_tip_compare(u32 if_id,u32 * p_src,u32 * p_dst,u32 byte_index)1024 static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1025 			    u32 byte_index)
1026 {
1027 	u32 burst_cnt = 0, addr_offset, i_id;
1028 	int b_is_fail = 0;
1029 
1030 	addr_offset =
1031 		(byte_index ==
1032 		 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1033 	for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1034 		if ((p_src[burst_cnt] & addr_offset) !=
1035 		    (p_dst[if_id] & addr_offset))
1036 			b_is_fail = 1;
1037 	}
1038 
1039 	if (b_is_fail == 1) {
1040 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1041 				  ("IF %d exp: ", if_id));
1042 		for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1043 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1044 					  ("0x%8x ", p_src[i_id]));
1045 		}
1046 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1047 				  ("\n_i_f %d rcv: ", if_id));
1048 		for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1049 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1050 					  ("(0x%8x ", p_dst[i_id]));
1051 		}
1052 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1053 	}
1054 
1055 	return b_is_fail;
1056 }
1057 #endif /* EXCLUDE_SWITCH_DEBUG */
1058 
1059 #if defined(DDR_VIEWER_TOOL)
1060 /*
1061  * Sweep validation
1062  */
ddr3_tip_run_sweep_test(int dev_num,u32 repeat_num,u32 direction,u32 mode)1063 int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1064 			    u32 mode)
1065 {
1066 	u32 pup = 0, start_pup = 0, end_pup = 0;
1067 	u32 adll = 0, rep = 0, pattern_idx = 0;
1068 	u32 res[MAX_INTERFACE_NUM] = { 0 };
1069 	int if_id = 0;
1070 	u32 adll_value = 0;
1071 	u32 reg;
1072 	enum hws_access_type pup_access;
1073 	u32 cs;
1074 	unsigned int max_cs = mv_ddr_cs_num_get();
1075 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1076 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1077 
1078 	repeat_num = 2;
1079 
1080 	if (mode == 1) {
1081 		/* per pup */
1082 		start_pup = 0;
1083 		end_pup = octets_per_if_num - 1;
1084 		pup_access = ACCESS_TYPE_UNICAST;
1085 	} else {
1086 		start_pup = 0;
1087 		end_pup = 0;
1088 		pup_access = ACCESS_TYPE_MULTICAST;
1089 	}
1090 
1091 	for (cs = 0; cs < max_cs; cs++) {
1092 		reg = (direction == 0) ? CTX_PHY_REG(cs) : CRX_PHY_REG(cs);
1093 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1094 			for (if_id = 0;
1095 			     if_id <= MAX_INTERFACE_NUM - 1;
1096 			     if_id++) {
1097 				VALIDATE_IF_ACTIVE
1098 					(tm->if_act_mask,
1099 					 if_id);
1100 				for (pup = start_pup; pup <= end_pup; pup++) {
1101 					ctrl_sweepres[adll][if_id][pup] =
1102 						0;
1103 				}
1104 			}
1105 		}
1106 
1107 		for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1108 			ctrl_adll[adll] = 0;
1109 			/* Save DQS value(after algorithm run) */
1110 			ddr3_tip_read_adll_value(dev_num, ctrl_adll,
1111 						 reg, MASK_ALL_BITS);
1112 
1113 		/*
1114 		 * Sweep ADLL  from 0:31 on all I/F on all Pup and perform
1115 		 * BIST on each stage.
1116 		 */
1117 		for (pup = start_pup; pup <= end_pup; pup++) {
1118 			for (adll = 0; adll < ADLL_LENGTH; adll++) {
1119 				for (rep = 0; rep < repeat_num; rep++) {
1120 					for (pattern_idx = PATTERN_KILLER_DQ0;
1121 					     pattern_idx < PATTERN_LAST;
1122 					     pattern_idx++) {
1123 						adll_value =
1124 							(direction == 0) ? (adll * 2) : adll;
1125 						CHECK_STATUS(ddr3_tip_bus_write
1126 							     (dev_num, ACCESS_TYPE_MULTICAST, 0,
1127 							      pup_access, pup, DDR_PHY_DATA,
1128 							      reg, adll_value));
1129 						hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1130 								  cs);
1131 						/* ddr3_tip_reset_fifo_ptr(dev_num); */
1132 						for (if_id = 0;
1133 						     if_id < MAX_INTERFACE_NUM;
1134 						     if_id++) {
1135 							VALIDATE_IF_ACTIVE
1136 								(tm->if_act_mask,
1137 								 if_id);
1138 							ctrl_sweepres[adll][if_id][pup]
1139 								+= res[if_id];
1140 							if (mode == 1) {
1141 								CHECK_STATUS
1142 									(ddr3_tip_bus_write
1143 									 (dev_num,
1144 									  ACCESS_TYPE_UNICAST,
1145 									  if_id,
1146 									  ACCESS_TYPE_UNICAST,
1147 									  pup,
1148 									  DDR_PHY_DATA,
1149 									  reg,
1150 									  ctrl_adll[if_id *
1151 										    cs *
1152 										    octets_per_if_num
1153 										    + pup]));
1154 							}
1155 						}
1156 					}
1157 				}
1158 			}
1159 		}
1160 		printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1161 		       ((direction == 0) ? "TX" : "RX"));
1162 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1163 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1164 			if (mode == 1) {
1165 				for (pup = start_pup; pup <= end_pup; pup++) {
1166 					VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1167 					printf("I/F%d-PHY%d , ", if_id, pup);
1168 				}
1169 			} else {
1170 				printf("I/F%d , ", if_id);
1171 			}
1172 		}
1173 		printf("\n");
1174 
1175 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1176 			adll_value = (direction == 0) ? (adll * 2) : adll;
1177 			printf("Final,%s, Sweep, Result, %d ,",
1178 			       ((direction == 0) ? "TX" : "RX"), adll_value);
1179 
1180 			for (if_id = 0;
1181 			     if_id <= MAX_INTERFACE_NUM - 1;
1182 			     if_id++) {
1183 				VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1184 				for (pup = start_pup; pup <= end_pup; pup++) {
1185 					printf("%8d , ",
1186 					       ctrl_sweepres[adll][if_id]
1187 					       [pup]);
1188 				}
1189 			}
1190 			printf("\n");
1191 		}
1192 
1193 		/*
1194 		 * Write back to the phy the Rx DQS value, we store in
1195 		 * the beginning.
1196 		 */
1197 		ddr3_tip_write_adll_value(dev_num, ctrl_adll, reg);
1198 		/* print adll results */
1199 		ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1200 		printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1201 		print_adll(dev_num, ctrl_adll);
1202 	}
1203 	ddr3_tip_reset_fifo_ptr(dev_num);
1204 
1205 	return 0;
1206 }
1207 
1208 #if defined(EXCLUDE_SWITCH_DEBUG)
ddr3_tip_run_leveling_sweep_test(int dev_num,u32 repeat_num,u32 direction,u32 mode)1209 int ddr3_tip_run_leveling_sweep_test(int dev_num, u32 repeat_num,
1210 				     u32 direction, u32 mode)
1211 {
1212 	u32 pup = 0, start_pup = 0, end_pup = 0, start_adll = 0;
1213 	u32 adll = 0, rep = 0, pattern_idx = 0;
1214 	u32 read_data[MAX_INTERFACE_NUM];
1215 	u32 res[MAX_INTERFACE_NUM] = { 0 };
1216 	int if_id = 0, gap = 0;
1217 	u32 adll_value = 0;
1218 	u32 reg;
1219 	enum hws_access_type pup_access;
1220 	u32 cs;
1221 	unsigned int max_cs = mv_ddr_cs_num_get();
1222 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1223 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1224 
1225 	if (mode == 1) { /* per pup */
1226 		start_pup = 0;
1227 		end_pup = octets_per_if_num - 1;
1228 		pup_access = ACCESS_TYPE_UNICAST;
1229 	} else {
1230 		start_pup = 0;
1231 		end_pup = 0;
1232 		pup_access = ACCESS_TYPE_MULTICAST;
1233 	}
1234 
1235 	for (cs = 0; cs < max_cs; cs++) {
1236 		reg = (direction == 0) ? WL_PHY_REG(cs) : RL_PHY_REG(cs);
1237 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1238 			for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1239 				VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1240 				for (pup = start_pup; pup <= end_pup; pup++)
1241 					ctrl_sweepres[adll][if_id][pup] = 0;
1242 			}
1243 		}
1244 
1245 		for (adll = 0; adll < MAX_INTERFACE_NUM * MAX_BUS_NUM; adll++) {
1246 			ctrl_adll[adll] = 0;
1247 			ctrl_level_phase[adll] = 0;
1248 			ctrl_adll1[adll] = 0;
1249 		}
1250 
1251 		/* save leveling value after running algorithm */
1252 		ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, 0x1f);
1253 		read_phase_value(dev_num, ctrl_level_phase, reg, 0x7 << 6);
1254 
1255 		if (direction == 0)
1256 			ddr3_tip_read_adll_value(dev_num, ctrl_adll1,
1257 						 CTX_PHY_REG(cs), MASK_ALL_BITS);
1258 
1259 		/* Sweep ADLL from 0 to 31 on all interfaces, all pups,
1260 		 * and perform BIST on each stage
1261 		 */
1262 		for (pup = start_pup; pup <= end_pup; pup++) {
1263 			for (adll = 0; adll < ADLL_LENGTH; adll++) {
1264 				for (rep = 0; rep < repeat_num; rep++) {
1265 					adll_value = (direction == 0) ? (adll * 2) : (adll * 3);
1266 					for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1267 						start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1268 							     (ctrl_level_phase[if_id * cs *
1269 									     octets_per_if_num +
1270 									     pup] >> 6) * 32;
1271 
1272 						if (direction == 0)
1273 							start_adll = (start_adll > 32) ? (start_adll - 32) : 0;
1274 						else
1275 							start_adll = (start_adll > 48) ? (start_adll - 48) : 0;
1276 
1277 						adll_value += start_adll;
1278 
1279 						gap = ctrl_adll1[if_id * cs * octets_per_if_num + pup] -
1280 						      ctrl_adll[if_id * cs * octets_per_if_num + pup];
1281 						gap = (((adll_value % 32) + gap) % 64);
1282 
1283 						adll_value = ((adll_value % 32) +
1284 							       (((adll_value - (adll_value % 32)) / 32) << 6));
1285 
1286 						CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1287 										ACCESS_TYPE_UNICAST,
1288 										if_id,
1289 										pup_access,
1290 										pup,
1291 										DDR_PHY_DATA,
1292 										reg,
1293 										adll_value));
1294 						if (direction == 0)
1295 							CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1296 											ACCESS_TYPE_UNICAST,
1297 											if_id,
1298 											pup_access,
1299 											pup,
1300 											DDR_PHY_DATA,
1301 											CTX_PHY_REG(cs),
1302 											gap));
1303 					}
1304 
1305 					for (pattern_idx = PATTERN_KILLER_DQ0;
1306 					     pattern_idx < PATTERN_LAST;
1307 					     pattern_idx++) {
1308 						hws_ddr3_run_bist(dev_num, sweep_pattern, res, cs);
1309 						ddr3_tip_reset_fifo_ptr(dev_num);
1310 						for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1311 							VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1312 							if (pup != 4) { /* TODO: remove literal */
1313 								ctrl_sweepres[adll][if_id][pup] += res[if_id];
1314 							} else {
1315 								CHECK_STATUS(ddr3_tip_if_read(dev_num,
1316 											      ACCESS_TYPE_UNICAST,
1317 											      if_id,
1318 											      0x1458,
1319 											      read_data,
1320 											      MASK_ALL_BITS));
1321 								ctrl_sweepres[adll][if_id][pup] += read_data[if_id];
1322 								CHECK_STATUS(ddr3_tip_if_write(dev_num,
1323 											       ACCESS_TYPE_UNICAST,
1324 											       if_id,
1325 											       0x1458,
1326 											       0x0,
1327 											       0xFFFFFFFF));
1328 								CHECK_STATUS(ddr3_tip_if_write(dev_num,
1329 											       ACCESS_TYPE_UNICAST,
1330 											       if_id,
1331 											       0x145C,
1332 											       0x0,
1333 											       0xFFFFFFFF));
1334 							}
1335 						}
1336 					}
1337 				}
1338 			}
1339 
1340 			for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1341 				start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1342 					     ctrl_level_phase[if_id * cs * octets_per_if_num + pup];
1343 				CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, pup_access, pup,
1344 								DDR_PHY_DATA, reg, start_adll));
1345 				if (direction == 0)
1346 					CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1347 									ACCESS_TYPE_UNICAST,
1348 									if_id,
1349 									pup_access,
1350 									pup,
1351 									DDR_PHY_DATA,
1352 									CTX_PHY_REG(cs),
1353 									ctrl_adll1[if_id *
1354 										   cs *
1355 										   octets_per_if_num +
1356 										   pup]));
1357 			}
1358 		}
1359 
1360 		printf("Final,CS %d,%s,Leveling,Result,Adll,", cs, ((direction == 0) ? "TX" : "RX"));
1361 
1362 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1363 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1364 			if (mode == 1) {
1365 				for (pup = start_pup; pup <= end_pup; pup++) {
1366 					VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1367 					printf("I/F%d-PHY%d , ", if_id, pup);
1368 				}
1369 			} else {
1370 				printf("I/F%d , ", if_id);
1371 			}
1372 		}
1373 		printf("\n");
1374 
1375 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1376 			adll_value = (direction == 0) ? ((adll * 2) - 32) : ((adll * 3) - 48);
1377 			printf("Final,%s,LevelingSweep,Result, %d ,", ((direction == 0) ? "TX" : "RX"), adll_value);
1378 
1379 			for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1380 				VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1381 				for (pup = start_pup; pup <= end_pup; pup++)
1382 					printf("%8d , ", ctrl_sweepres[adll][if_id][pup]);
1383 			}
1384 			printf("\n");
1385 		}
1386 
1387 		/* write back to the phy the Rx DQS value, we store in the beginning */
1388 		write_leveling_value(dev_num, ctrl_adll, ctrl_level_phase, reg);
1389 		if (direction == 0)
1390 			ddr3_tip_write_adll_value(dev_num, ctrl_adll1, CTX_PHY_REG(cs));
1391 
1392 		/* print adll results */
1393 		ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1394 		printf("%s,DQS,Leveling,,,", (direction == 0) ? "Tx" : "Rx");
1395 		print_adll(dev_num, ctrl_adll);
1396 		print_ph(dev_num, ctrl_level_phase);
1397 	}
1398 	ddr3_tip_reset_fifo_ptr(dev_num);
1399 
1400 	return 0;
1401 }
1402 #endif /* EXCLUDE_SWITCH_DEBUG */
1403 
print_topology(struct mv_ddr_topology_map * topology_db)1404 void print_topology(struct mv_ddr_topology_map *topology_db)
1405 {
1406 	u32 ui, uj;
1407 	u32 dev_num = 0;
1408 
1409 	printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
1410 	printf("\tNumber of buses: 0x%x\n",
1411 	       ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE));
1412 	printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1413 
1414 	for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
1415 		VALIDATE_IF_ACTIVE(topology_db->if_act_mask, ui);
1416 		printf("\n\tInterface ID: %d\n", ui);
1417 		printf("\t\tDDR Frequency: %s\n",
1418 		       convert_freq(topology_db->
1419 				    interface_params[ui].memory_freq));
1420 		printf("\t\tSpeed_bin: %d\n",
1421 		       topology_db->interface_params[ui].speed_bin_index);
1422 		printf("\t\tBus_width: %d\n",
1423 		       (4 << topology_db->interface_params[ui].bus_width));
1424 		printf("\t\tMem_size: %s\n",
1425 		       convert_mem_size(topology_db->
1426 					interface_params[ui].memory_size));
1427 		printf("\t\tCAS-WL: %d\n",
1428 		       topology_db->interface_params[ui].cas_wl);
1429 		printf("\t\tCAS-L: %d\n",
1430 		       topology_db->interface_params[ui].cas_l);
1431 		printf("\t\tTemperature: %d\n",
1432 		       topology_db->interface_params[ui].interface_temp);
1433 		printf("\n");
1434 		for (uj = 0; uj < 4; uj++) {
1435 			printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1436 			       topology_db->interface_params[ui].
1437 			       as_bus_params[uj].cs_bitmask);
1438 			printf("Mirror: 0x%x\t",
1439 			       topology_db->interface_params[ui].
1440 			       as_bus_params[uj].mirror_enable_bitmask);
1441 			printf("DQS Swap is %s \t",
1442 			       (topology_db->
1443 				interface_params[ui].as_bus_params[uj].
1444 				is_dqs_swap == 1) ? "enabled" : "disabled");
1445 			printf("Ck Swap:%s\t",
1446 			       (topology_db->
1447 				interface_params[ui].as_bus_params[uj].
1448 				is_ck_swap == 1) ? "enabled" : "disabled");
1449 			printf("\n");
1450 		}
1451 	}
1452 }
1453 #endif /* DDR_VIEWER_TOOL */
1454 
1455 #if !defined(EXCLUDE_SWITCH_DEBUG)
1456 /*
1457  * Execute XSB Test transaction (rd/wr/both)
1458  */
run_xsb_test(u32 dev_num,u32 mem_addr,u32 write_type,u32 read_type,u32 burst_length)1459 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1460 		 u32 read_type, u32 burst_length)
1461 {
1462 	u32 seq = 0, if_id = 0, addr, cnt;
1463 	int ret = MV_OK, ret_tmp;
1464 	u32 data_read[MAX_INTERFACE_NUM];
1465 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1466 
1467 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1468 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1469 		addr = mem_addr;
1470 		for (cnt = 0; cnt <= burst_length; cnt++) {
1471 			seq = (seq + 1) % 8;
1472 			if (write_type != 0) {
1473 				CHECK_STATUS(ddr3_tip_ext_write
1474 					     (dev_num, if_id, addr, 1,
1475 					      xsb_test_table[seq]));
1476 			}
1477 			if (read_type != 0) {
1478 				CHECK_STATUS(ddr3_tip_ext_read
1479 					     (dev_num, if_id, addr, 1,
1480 					      data_read));
1481 			}
1482 			if ((read_type != 0) && (write_type != 0)) {
1483 				ret_tmp =
1484 					ddr3_tip_compare(if_id,
1485 							 xsb_test_table[seq],
1486 							 data_read,
1487 							 0xff);
1488 				addr += (EXT_ACCESS_BURST_LENGTH * 4);
1489 				ret = (ret != MV_OK) ? ret : ret_tmp;
1490 			}
1491 		}
1492 	}
1493 
1494 	return ret;
1495 }
1496 
1497 #else /*EXCLUDE_SWITCH_DEBUG */
1498 u32 start_xsb_offset = 0;
1499 
run_xsb_test(u32 dev_num,u32 mem_addr,u32 write_type,u32 read_type,u32 burst_length)1500 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1501 		 u32 read_type, u32 burst_length)
1502 {
1503 	return MV_OK;
1504 }
1505 
1506 #endif /* EXCLUDE_SWITCH_DEBUG */
1507