1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) Marvell International Ltd. and its affiliates
4 */
5
6 #include "ddr3_init.h"
7 #include "mv_ddr_regs.h"
8 #include "ddr_training_ip_db.h"
9
10 #define PATTERN_1 0x55555555
11 #define PATTERN_2 0xaaaaaaaa
12
13 #define VALIDATE_TRAINING_LIMIT(e1, e2) \
14 ((((e2) - (e1) + 1) > 33) && ((e1) < 67))
15
16 u32 phy_reg_bk[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
17
18 u32 training_res[MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS *
19 HWS_SEARCH_DIR_LIMIT];
20 u8 byte_status[MAX_INTERFACE_NUM][MAX_BUS_NUM]; /* holds the bit status in the byte in wrapper function*/
21
22 u16 mask_results_dq_reg_map[] = {
23 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
24 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
25 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
26 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
27 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
28 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
29 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
30 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
31 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
32 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
33 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
34 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
35 RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
36 RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
37 RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
38 RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG,
39 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
40 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
41 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
42 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
43 #if MAX_BUS_NUM == 9
44 RESULT_CONTROL_PUP_5_BIT_0_REG, RESULT_CONTROL_PUP_5_BIT_1_REG,
45 RESULT_CONTROL_PUP_5_BIT_2_REG, RESULT_CONTROL_PUP_5_BIT_3_REG,
46 RESULT_CONTROL_PUP_5_BIT_4_REG, RESULT_CONTROL_PUP_5_BIT_5_REG,
47 RESULT_CONTROL_PUP_5_BIT_6_REG, RESULT_CONTROL_PUP_5_BIT_7_REG,
48 RESULT_CONTROL_PUP_6_BIT_0_REG, RESULT_CONTROL_PUP_6_BIT_1_REG,
49 RESULT_CONTROL_PUP_6_BIT_2_REG, RESULT_CONTROL_PUP_6_BIT_3_REG,
50 RESULT_CONTROL_PUP_6_BIT_4_REG, RESULT_CONTROL_PUP_6_BIT_5_REG,
51 RESULT_CONTROL_PUP_6_BIT_6_REG, RESULT_CONTROL_PUP_6_BIT_7_REG,
52 RESULT_CONTROL_PUP_7_BIT_0_REG, RESULT_CONTROL_PUP_7_BIT_1_REG,
53 RESULT_CONTROL_PUP_7_BIT_2_REG, RESULT_CONTROL_PUP_7_BIT_3_REG,
54 RESULT_CONTROL_PUP_7_BIT_4_REG, RESULT_CONTROL_PUP_7_BIT_5_REG,
55 RESULT_CONTROL_PUP_7_BIT_6_REG, RESULT_CONTROL_PUP_7_BIT_7_REG,
56 RESULT_CONTROL_PUP_8_BIT_0_REG, RESULT_CONTROL_PUP_8_BIT_1_REG,
57 RESULT_CONTROL_PUP_8_BIT_2_REG, RESULT_CONTROL_PUP_8_BIT_3_REG,
58 RESULT_CONTROL_PUP_8_BIT_4_REG, RESULT_CONTROL_PUP_8_BIT_5_REG,
59 RESULT_CONTROL_PUP_8_BIT_6_REG, RESULT_CONTROL_PUP_8_BIT_7_REG,
60 #endif
61 0xffff
62 };
63
64 u16 mask_results_pup_reg_map[] = {
65 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
66 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_3_REG,
67 RESULT_CONTROL_BYTE_PUP_4_REG,
68 #if MAX_BUS_NUM == 9
69 RESULT_CONTROL_BYTE_PUP_5_REG, RESULT_CONTROL_BYTE_PUP_6_REG,
70 RESULT_CONTROL_BYTE_PUP_7_REG, RESULT_CONTROL_BYTE_PUP_8_REG,
71 #endif
72 0xffff
73 };
74
75 #if MAX_BUS_NUM == 5
76 u16 mask_results_dq_reg_map_pup3_ecc[] = {
77 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
78 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
79 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
80 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
81 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
82 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
83 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
84 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
85 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
86 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
87 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
88 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
89 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
90 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
91 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
92 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
93 RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
94 RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
95 RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
96 RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG
97 };
98 #endif
99
100 #if MAX_BUS_NUM == 5
101 u16 mask_results_pup_reg_map_pup3_ecc[] = {
102 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
103 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_4_REG,
104 RESULT_CONTROL_BYTE_PUP_4_REG
105 };
106 #endif
107
108 struct pattern_info pattern_table_64[] = {
109 /*
110 * num_of_phases_tx, tx_burst_size;
111 * delay_between_bursts, num_of_phases_rx,
112 * start_addr, pattern_len
113 */
114 {0x7, 0x7, 2, 0x7, 0x00000, 8}, /* PATTERN_PBS1 */
115 {0x7, 0x7, 2, 0x7, 0x00080, 8}, /* PATTERN_PBS2 */
116 {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_PBS3 */
117 {0x7, 0x7, 2, 0x7, 0x00030, 8}, /* PATTERN_TEST */
118 {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_RL */
119 {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_RL2 */
120 {0x1f, 0xf, 2, 0xf, 0x00680, 32}, /* PATTERN_STATIC_PBS */
121 {0x1f, 0xf, 2, 0xf, 0x00a80, 32}, /* PATTERN_KILLER_DQ0 */
122 {0x1f, 0xf, 2, 0xf, 0x01280, 32}, /* PATTERN_KILLER_DQ1 */
123 {0x1f, 0xf, 2, 0xf, 0x01a80, 32}, /* PATTERN_KILLER_DQ2 */
124 {0x1f, 0xf, 2, 0xf, 0x02280, 32}, /* PATTERN_KILLER_DQ3 */
125 {0x1f, 0xf, 2, 0xf, 0x02a80, 32}, /* PATTERN_KILLER_DQ4 */
126 {0x1f, 0xf, 2, 0xf, 0x03280, 32}, /* PATTERN_KILLER_DQ5 */
127 {0x1f, 0xf, 2, 0xf, 0x03a80, 32}, /* PATTERN_KILLER_DQ6 */
128 {0x1f, 0xf, 2, 0xf, 0x04280, 32}, /* PATTERN_KILLER_DQ7 */
129 {0x1f, 0xf, 2, 0xf, 0x00e80, 32}, /* PATTERN_KILLER_DQ0_64 */
130 {0x1f, 0xf, 2, 0xf, 0x01680, 32}, /* PATTERN_KILLER_DQ1_64 */
131 {0x1f, 0xf, 2, 0xf, 0x01e80, 32}, /* PATTERN_KILLER_DQ2_64 */
132 {0x1f, 0xf, 2, 0xf, 0x02680, 32}, /* PATTERN_KILLER_DQ3_64 */
133 {0x1f, 0xf, 2, 0xf, 0x02e80, 32}, /* PATTERN_KILLER_DQ4_64 */
134 {0x1f, 0xf, 2, 0xf, 0x03680, 32}, /* PATTERN_KILLER_DQ5_64 */
135 {0x1f, 0xf, 2, 0xf, 0x03e80, 32}, /* PATTERN_KILLER_DQ6_64 */
136 {0x1f, 0xf, 2, 0xf, 0x04680, 32}, /* PATTERN_KILLER_DQ7_64 */
137 {0x1f, 0xf, 2, 0xf, 0x04a80, 32}, /* PATTERN_KILLER_DQ0_INV */
138 {0x1f, 0xf, 2, 0xf, 0x05280, 32}, /* PATTERN_KILLER_DQ1_INV */
139 {0x1f, 0xf, 2, 0xf, 0x05a80, 32}, /* PATTERN_KILLER_DQ2_INV */
140 {0x1f, 0xf, 2, 0xf, 0x06280, 32}, /* PATTERN_KILLER_DQ3_INV */
141 {0x1f, 0xf, 2, 0xf, 0x06a80, 32}, /* PATTERN_KILLER_DQ4_INV */
142 {0x1f, 0xf, 2, 0xf, 0x07280, 32}, /* PATTERN_KILLER_DQ5_INV */
143 {0x1f, 0xf, 2, 0xf, 0x07a80, 32}, /* PATTERN_KILLER_DQ6_INV */
144 {0x1f, 0xf, 2, 0xf, 0x08280, 32}, /* PATTERN_KILLER_DQ7_INV */
145 {0x1f, 0xf, 2, 0xf, 0x04e80, 32}, /* PATTERN_KILLER_DQ0_INV_64 */
146 {0x1f, 0xf, 2, 0xf, 0x05680, 32}, /* PATTERN_KILLER_DQ1_INV_64 */
147 {0x1f, 0xf, 2, 0xf, 0x05e80, 32}, /* PATTERN_KILLER_DQ2_INV_64 */
148 {0x1f, 0xf, 2, 0xf, 0x06680, 32}, /* PATTERN_KILLER_DQ3_INV_64 */
149 {0x1f, 0xf, 2, 0xf, 0x06e80, 32}, /* PATTERN_KILLER_DQ4_INV_64 */
150 {0x1f, 0xf, 2, 0xf, 0x07680, 32}, /* PATTERN_KILLER_DQ5_INV_64 */
151 {0x1f, 0xf, 2, 0xf, 0x07e80, 32}, /* PATTERN_KILLER_DQ6_INV_64 */
152 {0x1f, 0xf, 2, 0xf, 0x08680, 32}, /* PATTERN_KILLER_DQ7_INV_64 */
153 {0x1f, 0xf, 2, 0xf, 0x08a80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0 */
154 {0x1f, 0xf, 2, 0xf, 0x09280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
155 {0x1f, 0xf, 2, 0xf, 0x09a80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2 */
156 {0x1f, 0xf, 2, 0xf, 0x0a280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3 */
157 {0x1f, 0xf, 2, 0xf, 0x0aa80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4 */
158 {0x1f, 0xf, 2, 0xf, 0x0b280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5 */
159 {0x1f, 0xf, 2, 0xf, 0x0ba80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6 */
160 {0x1f, 0xf, 2, 0xf, 0x0c280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7 */
161 {0x1f, 0xf, 2, 0xf, 0x08e80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0_64 */
162 {0x1f, 0xf, 2, 0xf, 0x09680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1_64 */
163 {0x1f, 0xf, 2, 0xf, 0x09e80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2_64 */
164 {0x1f, 0xf, 2, 0xf, 0x0a680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3_64 */
165 {0x1f, 0xf, 2, 0xf, 0x0ae80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4_64 */
166 {0x1f, 0xf, 2, 0xf, 0x0b680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5_64 */
167 {0x1f, 0xf, 2, 0xf, 0x0be80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6_64 */
168 {0x1f, 0xf, 2, 0xf, 0x0c680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7_64 */
169 {0x1f, 0xf, 2, 0xf, 0x0ca80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0 */
170 {0x1f, 0xf, 2, 0xf, 0x0d280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1 */
171 {0x1f, 0xf, 2, 0xf, 0x0da80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2 */
172 {0x1f, 0xf, 2, 0xf, 0x0e280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3 */
173 {0x1f, 0xf, 2, 0xf, 0x0ea80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4 */
174 {0x1f, 0xf, 2, 0xf, 0x0f280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5 */
175 {0x1f, 0xf, 2, 0xf, 0x0fa80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6 */
176 {0x1f, 0xf, 2, 0xf, 0x10280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7 */
177 {0x1f, 0xf, 2, 0xf, 0x0ce80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0_64 */
178 {0x1f, 0xf, 2, 0xf, 0x0d680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1_64 */
179 {0x1f, 0xf, 2, 0xf, 0x0de80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2_64 */
180 {0x1f, 0xf, 2, 0xf, 0x0e680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3_64 */
181 {0x1f, 0xf, 2, 0xf, 0x0ee80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4_64 */
182 {0x1f, 0xf, 2, 0xf, 0x0f680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5_64 */
183 {0x1f, 0xf, 2, 0xf, 0x0fe80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6_64 */
184 {0x1f, 0xf, 2, 0xf, 0x10680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7_64 */
185 {0x1f, 0xf, 2, 0xf, 0x10a80, 32}, /* PATTERN_ISI_XTALK_FREE */
186 {0x1f, 0xf, 2, 0xf, 0x10e80, 32}, /* PATTERN_ISI_XTALK_FREE_64 */
187 {0x1f, 0xf, 2, 0xf, 0x11280, 32}, /* PATTERN_VREF */
188 {0x1f, 0xf, 2, 0xf, 0x11680, 32}, /* PATTERN_VREF_64 */
189 {0x1f, 0xf, 2, 0xf, 0x11a80, 32}, /* PATTERN_VREF_INV */
190 {0x1f, 0xf, 2, 0xf, 0x11e80, 32}, /* PATTERN_FULL_SSO_0T */
191 {0x1f, 0xf, 2, 0xf, 0x12280, 32}, /* PATTERN_FULL_SSO_1T */
192 {0x1f, 0xf, 2, 0xf, 0x12680, 32}, /* PATTERN_FULL_SSO_2T */
193 {0x1f, 0xf, 2, 0xf, 0x12a80, 32}, /* PATTERN_FULL_SSO_3T */
194 {0x1f, 0xf, 2, 0xf, 0x12e80, 32}, /* PATTERN_RESONANCE_1T */
195 {0x1f, 0xf, 2, 0xf, 0x13280, 32}, /* PATTERN_RESONANCE_2T */
196 {0x1f, 0xf, 2, 0xf, 0x13680, 32}, /* PATTERN_RESONANCE_3T */
197 {0x1f, 0xf, 2, 0xf, 0x13a80, 32}, /* PATTERN_RESONANCE_4T */
198 {0x1f, 0xf, 2, 0xf, 0x13e80, 32}, /* PATTERN_RESONANCE_5T */
199 {0x1f, 0xf, 2, 0xf, 0x14280, 32}, /* PATTERN_RESONANCE_6T */
200 {0x1f, 0xf, 2, 0xf, 0x14680, 32}, /* PATTERN_RESONANCE_7T */
201 {0x1f, 0xf, 2, 0xf, 0x14a80, 32}, /* PATTERN_RESONANCE_8T */
202 {0x1f, 0xf, 2, 0xf, 0x14e80, 32}, /* PATTERN_RESONANCE_9T */
203 {0x1f, 0xf, 2, 0xf, 0x15280, 32}, /* PATTERN_ZERO */
204 {0x1f, 0xf, 2, 0xf, 0x15680, 32} /* PATTERN_ONE */
205 /* Note: actual start_address is "<< 3" of defined address */
206 };
207
208 #if defined(CONFIG_DDR4)
209 struct pattern_info pattern_table_16[] = {
210 /*
211 * num tx phases, tx burst, delay between, rx pattern,
212 * start_address, pattern_len
213 */
214 {0x1, 0x1, 2, 0x1, 0x0000, 2}, /* PATTERN_PBS1*/
215 {0x1, 0x1, 2, 0x1, 0x0080, 2}, /* PATTERN_PBS2*/
216 {0x1, 0x1, 2, 0x1, 0x0100, 2}, /* PATTERN_PBS3*/
217 {0x1, 0x1, 2, 0x1, 0x0180, 2}, /* PATTERN_TEST*/
218 {0x1, 0x1, 2, 0x1, 0x0200, 2}, /* PATTERN_RL*/
219 {0x1, 0x1, 2, 0x1, 0x0280, 2}, /* PATTERN_RL2*/
220 {0xf, 0x7, 2, 0x7, 0x0680, 16}, /* PATTERN_STATIC_PBS*/
221 {0xf, 0x7, 2, 0x7, 0x0A80, 16}, /* PATTERN_KILLER_DQ0*/
222 {0xf, 0x7, 2, 0x7, 0x0E80, 16}, /* PATTERN_KILLER_DQ1*/
223 {0xf, 0x7, 2, 0x7, 0x1280, 16}, /* PATTERN_KILLER_DQ2*/
224 {0xf, 0x7, 2, 0x7, 0x1680, 16}, /* PATTERN_KILLER_DQ3*/
225 {0xf, 0x7, 2, 0x7, 0x1A80, 16}, /* PATTERN_KILLER_DQ4*/
226 {0xf, 0x7, 2, 0x7, 0x1E80, 16}, /* PATTERN_KILLER_DQ5*/
227 {0xf, 0x7, 2, 0x7, 0x2280, 16}, /* PATTERN_KILLER_DQ6*/
228 {0xf, 0x7, 2, 0x7, 0x2680, 16}, /* PATTERN_KILLER_DQ7*/
229 {0xf, 0x7, 2, 0x7, 0x2A80, 16}, /* PATTERN_KILLER_DQ0_INV*/
230 {0xf, 0x7, 2, 0x7, 0x2E80, 16}, /* PATTERN_KILLER_DQ1_INV*/
231 {0xf, 0x7, 2, 0x7, 0x3280, 16}, /* PATTERN_KILLER_DQ2_INV*/
232 {0xf, 0x7, 2, 0x7, 0x3680, 16}, /* PATTERN_KILLER_DQ3_INV*/
233 {0xf, 0x7, 2, 0x7, 0x3A80, 16}, /* PATTERN_KILLER_DQ4_INV*/
234 {0xf, 0x7, 2, 0x7, 0x3E80, 16}, /* PATTERN_KILLER_DQ5_INV*/
235 {0xf, 0x7, 2, 0x7, 0x4280, 16}, /* PATTERN_KILLER_DQ6_INV*/
236 {0xf, 0x7, 2, 0x7, 0x4680, 16}, /* PATTERN_KILLER_DQ7_INV*/
237 {0xf, 0x7, 2, 0x7, 0x4A80, 16}, /* PATTERN_VREF*/
238 {0xf, 0x7, 2, 0x7, 0x4E80, 16}, /* PATTERN_VREF_INV*/
239 {0xf, 0x7, 2, 0x7, 0x5280, 16}, /* PATTERN_FULL_SSO_0T*/
240 {0xf, 0x7, 2, 0x7, 0x5680, 16}, /* PATTERN_FULL_SSO_1T*/
241 {0xf, 0x7, 2, 0x7, 0x5A80, 16}, /* PATTERN_FULL_SSO_2T*/
242 {0xf, 0x7, 2, 0x7, 0x5E80, 16}, /* PATTERN_FULL_SSO_3T*/
243 {0xf, 0x7, 2, 0x7, 0x6280, 16}, /* PATTERN_ZERO */
244 {0xf, 0x7, 2, 0x7, 0x6680, 16}, /* PATTERN_ONE */
245 {0xf, 0x7, 2, 0x7, 0x6A80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ0*/
246 {0xf, 0x7, 2, 0x7, 0x6E80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ1*/
247 {0xf, 0x7, 2, 0x7, 0x7280, 16}, /* PATTERN_SSO_FULL_XTALK_DQ2*/
248 {0xf, 0x7, 2, 0x7, 0x7680, 16}, /* PATTERN_SSO_FULL_XTALK_DQ3*/
249 {0xf, 0x7, 2, 0x7, 0x7A80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ4*/
250 {0xf, 0x7, 2, 0x7, 0x7E80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ5*/
251 {0xf, 0x7, 2, 0x7, 0x8280, 16}, /* PATTERN_SSO_FULL_XTALK_DQ6*/
252 {0xf, 0x7, 2, 0x7, 0x8680, 16}, /* PATTERN_SSO_FULL_XTALK_DQ7*/
253 {0xf, 0x7, 2, 0x7, 0x8A80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ0*/
254 {0xf, 0x7, 2, 0x7, 0x8E80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ1*/
255 {0xf, 0x7, 2, 0x7, 0x9280, 16}, /* PATTERN_SSO_XTALK_FREE_DQ2*/
256 {0xf, 0x7, 2, 0x7, 0x9680, 16}, /* PATTERN_SSO_XTALK_FREE_DQ3*/
257 {0xf, 0x7, 2, 0x7, 0x9A80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ4*/
258 {0xf, 0x7, 2, 0x7, 0x9E80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ5*/
259 {0xf, 0x7, 2, 0x7, 0xA280, 16}, /* PATTERN_SSO_XTALK_FREE_DQ6*/
260 {0xf, 0x7, 2, 0x7, 0xA680, 16}, /* PATTERN_SSO_XTALK_FREE_DQ7*/
261 {0xf, 0x7, 2, 0x7, 0xAA80, 16}, /* PATTERN_ISI_XTALK_FREE*/
262 {0xf, 0x7, 2, 0x7, 0xAE80, 16}, /* PATTERN_RESONANCE_1T*/
263 {0xf, 0x7, 2, 0x7, 0xB280, 16}, /* PATTERN_RESONANCE_2T*/
264 {0xf, 0x7, 2, 0x7, 0xB680, 16}, /* PATTERN_RESONANCE_3T*/
265 {0xf, 0x7, 2, 0x7, 0xBA80, 16}, /* PATTERN_RESONANCE_4T*/
266 {0xf, 0x7, 2, 0x7, 0xBE80, 16}, /* PATTERN_RESONANCE_5T*/
267 {0xf, 0x7, 2, 0x7, 0xC280, 16}, /* PATTERN_RESONANCE_6T*/
268 {0xf, 0x7, 2, 0x7, 0xC680, 16}, /* PATTERN_RESONANCE_7T*/
269 {0xf, 0x7, 2, 0x7, 0xca80, 16}, /* PATTERN_RESONANCE_8T*/
270 {0xf, 0x7, 2, 0x7, 0xce80, 16} /* PATTERN_RESONANCE_9T*/
271 /* Note: actual start_address is "<< 3" of defined address */
272 };
273
274 struct pattern_info pattern_table_32[] = {
275 /*
276 * num tx phases, tx burst, delay between, rx pattern,
277 * start_address, pattern_len
278 */
279 {0x3, 0x3, 2, 0x3, 0x0000, 4}, /* PATTERN_PBS1*/
280 {0x3, 0x3, 2, 0x3, 0x0020, 4}, /* PATTERN_PBS2*/
281 {0x3, 0x3, 2, 0x3, 0x0040, 4}, /* PATTERN_PBS3*/
282 {0x3, 0x3, 2, 0x3, 0x0060, 4}, /* PATTERN_TEST*/
283 {0x3, 0x3, 2, 0x3, 0x0080, 4}, /* PATTERN_RL*/
284 {0x3, 0x3, 2, 0x3, 0x00a0, 4}, /* PATTERN_RL2*/
285 {0x1f, 0xf, 2, 0xf, 0x00c0, 32}, /* PATTERN_STATIC_PBS*/
286 {0x1f, 0xf, 2, 0xf, 0x00e0, 32}, /* PATTERN_KILLER_DQ0*/
287 {0x1f, 0xf, 2, 0xf, 0x0100, 32}, /* PATTERN_KILLER_DQ1*/
288 {0x1f, 0xf, 2, 0xf, 0x0120, 32}, /* PATTERN_KILLER_DQ2*/
289 {0x1f, 0xf, 2, 0xf, 0x0140, 32}, /* PATTERN_KILLER_DQ3*/
290 {0x1f, 0xf, 2, 0xf, 0x0160, 32}, /* PATTERN_KILLER_DQ4*/
291 {0x1f, 0xf, 2, 0xf, 0x0180, 32}, /* PATTERN_KILLER_DQ5*/
292 {0x1f, 0xf, 2, 0xf, 0x01a0, 32}, /* PATTERN_KILLER_DQ6*/
293 {0x1f, 0xf, 2, 0xf, 0x01c0, 32}, /* PATTERN_KILLER_DQ7*/
294 {0x1f, 0xf, 2, 0xf, 0x01e0, 32}, /* PATTERN_KILLER_DQ0_INV*/
295 {0x1f, 0xf, 2, 0xf, 0x0200, 32}, /* PATTERN_KILLER_DQ1_INV*/
296 {0x1f, 0xf, 2, 0xf, 0x0220, 32}, /* PATTERN_KILLER_DQ2_INV*/
297 {0x1f, 0xf, 2, 0xf, 0x0240, 32}, /* PATTERN_KILLER_DQ3_INV*/
298 {0x1f, 0xf, 2, 0xf, 0x0260, 32}, /* PATTERN_KILLER_DQ4_INV*/
299 {0x1f, 0xf, 2, 0xf, 0x0280, 32}, /* PATTERN_KILLER_DQ5_INV*/
300 {0x1f, 0xf, 2, 0xf, 0x02a0, 32}, /* PATTERN_KILLER_DQ6_INV*/
301 {0x1f, 0xf, 2, 0xf, 0x02c0, 32}, /* PATTERN_KILLER_DQ7_INV*/
302 {0x1f, 0xf, 2, 0xf, 0x02e0, 32}, /* PATTERN_VREF*/
303 {0x1f, 0xf, 2, 0xf, 0x0300, 32}, /* PATTERN_VREF_INV*/
304 {0x1f, 0xf, 2, 0xf, 0x0320, 32}, /* PATTERN_FULL_SSO_0T*/
305 {0x1f, 0xf, 2, 0xf, 0x0340, 32}, /* PATTERN_FULL_SSO_1T*/
306 {0x1f, 0xf, 2, 0xf, 0x0360, 32}, /* PATTERN_FULL_SSO_2T*/
307 {0x1f, 0xf, 2, 0xf, 0x0380, 32}, /* PATTERN_FULL_SSO_3T*/
308 {0x1f, 0xf, 2, 0xf, 0x6280, 32}, /* PATTERN_ZERO */
309 {0x1f, 0xf, 2, 0xf, 0x6680, 32}, /* PATTERN_ONE */
310 {0x1f, 0xf, 2, 0xf, 0x6A80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0*/
311 {0x1f, 0xf, 2, 0xf, 0x6E80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1*/
312 {0x1f, 0xf, 2, 0xf, 0x7280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2*/
313 {0x1f, 0xf, 2, 0xf, 0x7680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3*/
314 {0x1f, 0xf, 2, 0xf, 0x7A80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4*/
315 {0x1f, 0xf, 2, 0xf, 0x7E80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5*/
316 {0x1f, 0xf, 2, 0xf, 0x8280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6*/
317 {0x1f, 0xf, 2, 0xf, 0x8680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7*/
318 {0x1f, 0xf, 2, 0xf, 0x8A80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0*/
319 {0x1f, 0xf, 2, 0xf, 0x8E80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1*/
320 {0x1f, 0xf, 2, 0xf, 0x9280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2*/
321 {0x1f, 0xf, 2, 0xf, 0x9680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3*/
322 {0x1f, 0xf, 2, 0xf, 0x9A80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4*/
323 {0x1f, 0xf, 2, 0xf, 0x9E80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5*/
324 {0x1f, 0xf, 2, 0xf, 0xA280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6*/
325 {0x1f, 0xf, 2, 0xf, 0xA680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7*/
326 {0x1f, 0xf, 2, 0xf, 0xAA80, 32}, /* PATTERN_ISI_XTALK_FREE*/
327 {0x1f, 0xf, 2, 0xf, 0xAE80, 32}, /* PATTERN_RESONANCE_1T*/
328 {0x1f, 0xf, 2, 0xf, 0xB280, 32}, /* PATTERN_RESONANCE_2T*/
329 {0x1f, 0xf, 2, 0xf, 0xB680, 32}, /* PATTERN_RESONANCE_3T*/
330 {0x1f, 0xf, 2, 0xf, 0xBA80, 32}, /* PATTERN_RESONANCE_4T*/
331 {0x1f, 0xf, 2, 0xf, 0xBE80, 32}, /* PATTERN_RESONANCE_5T*/
332 {0x1f, 0xf, 2, 0xf, 0xC280, 32}, /* PATTERN_RESONANCE_6T*/
333 {0x1f, 0xf, 2, 0xf, 0xC680, 32}, /* PATTERN_RESONANCE_7T*/
334 {0x1f, 0xf, 2, 0xf, 0xca80, 32}, /* PATTERN_RESONANCE_8T*/
335 {0x1f, 0xf, 2, 0xf, 0xce80, 32} /* PATTERN_RESONANCE_9T*/
336 /* Note: actual start_address is "<< 3" of defined address */
337 };
338 #else /* CONFIG_DDR4 */
339 struct pattern_info pattern_table_16[] = {
340 /*
341 * num tx phases, tx burst, delay between, rx pattern,
342 * start_address, pattern_len
343 */
344 {1, 1, 2, 1, 0x0080, 2}, /* PATTERN_PBS1 */
345 {1, 1, 2, 1, 0x00c0, 2}, /* PATTERN_PBS2 */
346 {1, 1, 2, 1, 0x0380, 2}, /* PATTERN_PBS3 */
347 {1, 1, 2, 1, 0x0040, 2}, /* PATTERN_TEST */
348 {1, 1, 2, 1, 0x0100, 2}, /* PATTERN_RL */
349 {1, 1, 2, 1, 0x0000, 2}, /* PATTERN_RL2 */
350 {0xf, 0x7, 2, 0x7, 0x0140, 16}, /* PATTERN_STATIC_PBS */
351 {0xf, 0x7, 2, 0x7, 0x0190, 16}, /* PATTERN_KILLER_DQ0 */
352 {0xf, 0x7, 2, 0x7, 0x01d0, 16}, /* PATTERN_KILLER_DQ1 */
353 {0xf, 0x7, 2, 0x7, 0x0210, 16}, /* PATTERN_KILLER_DQ2 */
354 {0xf, 0x7, 2, 0x7, 0x0250, 16}, /* PATTERN_KILLER_DQ3 */
355 {0xf, 0x7, 2, 0x7, 0x0290, 16}, /* PATTERN_KILLER_DQ4 */
356 {0xf, 0x7, 2, 0x7, 0x02d0, 16}, /* PATTERN_KILLER_DQ5 */
357 {0xf, 0x7, 2, 0x7, 0x0310, 16}, /* PATTERN_KILLER_DQ6 */
358 {0xf, 0x7, 2, 0x7, 0x0350, 16}, /* PATTERN_KILLER_DQ7 */
359 {0xf, 0x7, 2, 0x7, 0x04c0, 16}, /* PATTERN_VREF */
360 {0xf, 0x7, 2, 0x7, 0x03c0, 16}, /* PATTERN_FULL_SSO_1T */
361 {0xf, 0x7, 2, 0x7, 0x0400, 16}, /* PATTERN_FULL_SSO_2T */
362 {0xf, 0x7, 2, 0x7, 0x0440, 16}, /* PATTERN_FULL_SSO_3T */
363 {0xf, 0x7, 2, 0x7, 0x0480, 16}, /* PATTERN_FULL_SSO_4T */
364 {0xf, 7, 2, 7, 0x6280, 16}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
365 {0xf, 7, 2, 7, 0x6680, 16}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
366 {0xf, 7, 2, 7, 0x6A80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ2 */
367 {0xf, 7, 2, 7, 0x6E80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ3 */
368 {0xf, 7, 2, 7, 0x7280, 16}, /* PATTERN_SSO_FULL_XTALK_DQ4 */
369 {0xf, 7, 2, 7, 0x7680, 16}, /* PATTERN_SSO_FULL_XTALK_DQ5 */
370 {0xf, 7, 2, 7, 0x7A80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ6 */
371 {0xf, 7, 2, 7, 0x7E80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ7 */
372 {0xf, 7, 2, 7, 0x8280, 16}, /* PATTERN_SSO_XTALK_FREE_DQ0 */
373 {0xf, 7, 2, 7, 0x8680, 16}, /* PATTERN_SSO_XTALK_FREE_DQ1 */
374 {0xf, 7, 2, 7, 0x8A80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ2 */
375 {0xf, 7, 2, 7, 0x8E80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ3 */
376 {0xf, 7, 2, 7, 0x9280, 16}, /* PATTERN_SSO_XTALK_FREE_DQ4 */
377 {0xf, 7, 2, 7, 0x9680, 16}, /* PATTERN_SSO_XTALK_FREE_DQ5 */
378 {0xf, 7, 2, 7, 0x9A80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ6 */
379 {0xf, 7, 2, 7, 0x9E80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ7 */
380 {0xf, 7, 2, 7, 0xA280, 16} /* PATTERN_ISI_XTALK_FREE */
381 /* Note: actual start_address is "<< 3" of defined address */
382 };
383
384 struct pattern_info pattern_table_32[] = {
385 /*
386 * num tx phases, tx burst, delay between, rx pattern,
387 * start_address, pattern_len
388 */
389 {3, 3, 2, 3, 0x0080, 4}, /* PATTERN_PBS1 */
390 {3, 3, 2, 3, 0x00c0, 4}, /* PATTERN_PBS2 */
391 {3, 3, 2, 3, 0x0380, 4}, /* PATTERN_PBS3 */
392 {3, 3, 2, 3, 0x0040, 4}, /* PATTERN_TEST */
393 {3, 3, 2, 3, 0x0100, 4}, /* PATTERN_RL */
394 {3, 3, 2, 3, 0x0000, 4}, /* PATTERN_RL2 */
395 {0x1f, 0xf, 2, 0xf, 0x0140, 32}, /* PATTERN_STATIC_PBS */
396 {0x1f, 0xf, 2, 0xf, 0x0190, 32}, /* PATTERN_KILLER_DQ0 */
397 {0x1f, 0xf, 2, 0xf, 0x01d0, 32}, /* PATTERN_KILLER_DQ1 */
398 {0x1f, 0xf, 2, 0xf, 0x0210, 32}, /* PATTERN_KILLER_DQ2 */
399 {0x1f, 0xf, 2, 0xf, 0x0250, 32}, /* PATTERN_KILLER_DQ3 */
400 {0x1f, 0xf, 2, 0xf, 0x0290, 32}, /* PATTERN_KILLER_DQ4 */
401 {0x1f, 0xf, 2, 0xf, 0x02d0, 32}, /* PATTERN_KILLER_DQ5 */
402 {0x1f, 0xf, 2, 0xf, 0x0310, 32}, /* PATTERN_KILLER_DQ6 */
403 {0x1f, 0xf, 2, 0xf, 0x0350, 32}, /* PATTERN_KILLER_DQ7 */
404 {0x1f, 0xf, 2, 0xf, 0x04c0, 32}, /* PATTERN_VREF */
405 {0x1f, 0xf, 2, 0xf, 0x03c0, 32}, /* PATTERN_FULL_SSO_1T */
406 {0x1f, 0xf, 2, 0xf, 0x0400, 32}, /* PATTERN_FULL_SSO_2T */
407 {0x1f, 0xf, 2, 0xf, 0x0440, 32}, /* PATTERN_FULL_SSO_3T */
408 {0x1f, 0xf, 2, 0xf, 0x0480, 32}, /* PATTERN_FULL_SSO_4T */
409 {0x1f, 0xF, 2, 0xf, 0x6280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0 */
410 {0x1f, 0xF, 2, 0xf, 0x6680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
411 {0x1f, 0xF, 2, 0xf, 0x6A80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2 */
412 {0x1f, 0xF, 2, 0xf, 0x6E80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3 */
413 {0x1f, 0xF, 2, 0xf, 0x7280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4 */
414 {0x1f, 0xF, 2, 0xf, 0x7680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5 */
415 {0x1f, 0xF, 2, 0xf, 0x7A80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6 */
416 {0x1f, 0xF, 2, 0xf, 0x7E80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7 */
417 {0x1f, 0xF, 2, 0xf, 0x8280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0 */
418 {0x1f, 0xF, 2, 0xf, 0x8680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1 */
419 {0x1f, 0xF, 2, 0xf, 0x8A80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2 */
420 {0x1f, 0xF, 2, 0xf, 0x8E80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3 */
421 {0x1f, 0xF, 2, 0xf, 0x9280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4 */
422 {0x1f, 0xF, 2, 0xf, 0x9680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5 */
423 {0x1f, 0xF, 2, 0xf, 0x9A80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6 */
424 {0x1f, 0xF, 2, 0xf, 0x9E80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7 */
425 {0x1f, 0xF, 2, 0xf, 0xA280, 32} /* PATTERN_ISI_XTALK_FREE */
426 /* Note: actual start_address is "<< 3" of defined address */
427 };
428 #endif /* CONFIG_DDR4 */
429
430 u32 train_dev_num;
431 enum hws_ddr_cs traintrain_cs_type;
432 u32 train_pup_num;
433 enum hws_training_result train_result_type;
434 enum hws_control_element train_control_element;
435 enum hws_search_dir traine_search_dir;
436 enum hws_dir train_direction;
437 u32 train_if_select;
438 u32 train_init_value;
439 u32 train_number_iterations;
440 enum hws_pattern train_pattern;
441 enum hws_edge_compare train_edge_compare;
442 u32 train_cs_num;
443 u32 train_if_acess, train_if_id, train_pup_access;
444 #if defined(CONFIG_DDR4)
445 /* The counter was increased for DDR4 because of A390 DB-GP DDR4 failure */
446 u32 max_polling_for_done = 100000000;
447 #else /* CONFIG_DDR4 */
448 u32 max_polling_for_done = 1000000;
449 #endif /* CONFIG_DDR4 */
450
ddr3_tip_get_buf_ptr(u32 dev_num,enum hws_search_dir search,enum hws_training_result result_type,u32 interface_num)451 u32 *ddr3_tip_get_buf_ptr(u32 dev_num, enum hws_search_dir search,
452 enum hws_training_result result_type,
453 u32 interface_num)
454 {
455 u32 *buf_ptr = NULL;
456
457 buf_ptr = &training_res
458 [MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS * search +
459 interface_num * MAX_BUS_NUM * BUS_WIDTH_IN_BITS];
460
461 return buf_ptr;
462 }
463
464 enum {
465 PASS,
466 FAIL
467 };
468 /*
469 * IP Training search
470 * Note: for one edge search only from fail to pass, else jitter can
471 * be be entered into solution.
472 */
ddr3_tip_ip_training(u32 dev_num,enum hws_access_type access_type,u32 interface_num,enum hws_access_type pup_access_type,u32 pup_num,enum hws_training_result result_type,enum hws_control_element control_element,enum hws_search_dir search_dir,enum hws_dir direction,u32 interface_mask,u32 init_value,u32 num_iter,enum hws_pattern pattern,enum hws_edge_compare edge_comp,enum hws_ddr_cs cs_type,u32 cs_num,enum hws_training_ip_stat * train_status)473 int ddr3_tip_ip_training(u32 dev_num, enum hws_access_type access_type,
474 u32 interface_num,
475 enum hws_access_type pup_access_type,
476 u32 pup_num, enum hws_training_result result_type,
477 enum hws_control_element control_element,
478 enum hws_search_dir search_dir, enum hws_dir direction,
479 u32 interface_mask, u32 init_value, u32 num_iter,
480 enum hws_pattern pattern,
481 enum hws_edge_compare edge_comp,
482 enum hws_ddr_cs cs_type, u32 cs_num,
483 enum hws_training_ip_stat *train_status)
484 {
485 u32 mask_dq_num_of_regs, mask_pup_num_of_regs, index_cnt,
486 reg_data, pup_id;
487 u32 tx_burst_size;
488 u32 delay_between_burst;
489 u32 rd_mode;
490 u32 data;
491 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
492 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
493 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
494 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
495 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
496
497 if (pup_num >= octets_per_if_num) {
498 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
499 ("pup_num %d not valid\n", pup_num));
500 }
501 if (interface_num >= MAX_INTERFACE_NUM) {
502 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
503 ("if_id %d not valid\n",
504 interface_num));
505 }
506 if (train_status == NULL) {
507 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
508 ("error param 4\n"));
509 return MV_BAD_PARAM;
510 }
511
512 /* load pattern */
513 if (cs_type == CS_SINGLE) {
514 /* All CSs to CS0 */
515 CHECK_STATUS(ddr3_tip_if_write
516 (dev_num, access_type, interface_num,
517 DUAL_DUNIT_CFG_REG, 1 << 3, 1 << 3));
518 /* All CSs to CS0 */
519 CHECK_STATUS(ddr3_tip_if_write
520 (dev_num, access_type, interface_num,
521 ODPG_DATA_CTRL_REG,
522 (0x3 | (effective_cs << 26)), 0xc000003));
523 } else {
524 CHECK_STATUS(ddr3_tip_if_write
525 (dev_num, access_type, interface_num,
526 DUAL_DUNIT_CFG_REG, 0, 1 << 3));
527 /* CS select */
528 CHECK_STATUS(ddr3_tip_if_write
529 (dev_num, access_type, interface_num,
530 ODPG_DATA_CTRL_REG, 0x3 | cs_num << 26,
531 0x3 | 3 << 26));
532 }
533
534 /* load pattern to ODPG */
535 ddr3_tip_load_pattern_to_odpg(dev_num, access_type, interface_num,
536 pattern,
537 pattern_table[pattern].start_addr);
538 tx_burst_size = (direction == OPER_WRITE) ?
539 pattern_table[pattern].tx_burst_size : 0;
540 delay_between_burst = (direction == OPER_WRITE) ? 2 : 0;
541 rd_mode = (direction == OPER_WRITE) ? 1 : 0;
542 CHECK_STATUS(ddr3_tip_configure_odpg
543 (dev_num, access_type, interface_num, direction,
544 pattern_table[pattern].num_of_phases_tx, tx_burst_size,
545 pattern_table[pattern].num_of_phases_rx,
546 delay_between_burst, rd_mode, effective_cs, STRESS_NONE,
547 DURATION_SINGLE));
548 reg_data = (direction == OPER_READ) ? 0 : (0x3 << 30);
549 reg_data |= (direction == OPER_READ) ? 0x60 : 0xfa;
550 CHECK_STATUS(ddr3_tip_if_write
551 (dev_num, access_type, interface_num,
552 ODPG_WR_RD_MODE_ENA_REG, reg_data,
553 MASK_ALL_BITS));
554 reg_data = (edge_comp == EDGE_PF || edge_comp == EDGE_FP) ? 0 : 1 << 6;
555 reg_data |= (edge_comp == EDGE_PF || edge_comp == EDGE_PFP) ?
556 (1 << 7) : 0;
557
558 /* change from Pass to Fail will lock the result */
559 if (pup_access_type == ACCESS_TYPE_MULTICAST)
560 reg_data |= 0xe << 14;
561 else
562 reg_data |= pup_num << 14;
563
564 if (edge_comp == EDGE_FP) {
565 /* don't search for readl edge change, only the state */
566 reg_data |= (0 << 20);
567 } else if (edge_comp == EDGE_FPF) {
568 reg_data |= (0 << 20);
569 } else {
570 reg_data |= (3 << 20);
571 }
572
573 CHECK_STATUS(ddr3_tip_if_write
574 (dev_num, access_type, interface_num,
575 GENERAL_TRAINING_OPCODE_REG,
576 reg_data | (0x7 << 8) | (0x7 << 11),
577 (0x3 | (0x3 << 2) | (0x3 << 6) | (1 << 5) | (0x7 << 8) |
578 (0x7 << 11) | (0xf << 14) | (0x3 << 18) | (3 << 20))));
579 reg_data = (search_dir == HWS_LOW2HIGH) ? 0 : (1 << 8);
580 CHECK_STATUS(ddr3_tip_if_write
581 (dev_num, access_type, interface_num, OPCODE_REG0_REG(1),
582 1 | reg_data | init_value << 9 | (1 << 25) | (1 << 26),
583 0xff | (1 << 8) | (0xffff << 9) | (1 << 25) | (1 << 26)));
584
585 /*
586 * Write2_dunit(0x10b4, Number_iteration , [15:0])
587 * Max number of iterations
588 */
589 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
590 OPCODE_REG1_REG(1), num_iter,
591 0xffff));
592 if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
593 direction == OPER_READ) {
594 /*
595 * Write2_dunit(0x10c0, 0x5f , [7:0])
596 * MC PBS Reg Address at DDR PHY
597 */
598 reg_data = PBS_RX_BCAST_PHY_REG(effective_cs);
599 } else if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
600 direction == OPER_WRITE) {
601 reg_data = PBS_TX_BCAST_PHY_REG(effective_cs);
602 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
603 direction == OPER_WRITE) {
604 /*
605 * LOOP 0x00000001 + 4*n:
606 * where n (0-3) represents M_CS number
607 */
608 /*
609 * Write2_dunit(0x10c0, 0x1 , [7:0])
610 * ADLL WR Reg Address at DDR PHY
611 */
612 reg_data = CTX_PHY_REG(effective_cs);
613 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
614 direction == OPER_READ) {
615 /* ADLL RD Reg Address at DDR PHY */
616 reg_data = CRX_PHY_REG(effective_cs);
617 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
618 direction == OPER_WRITE) {
619 /* TBD not defined in 0.5.0 requirement */
620 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
621 direction == OPER_READ) {
622 /* TBD not defined in 0.5.0 requirement */
623 }
624
625 reg_data |= (0x6 << 28);
626 CHECK_STATUS(ddr3_tip_if_write
627 (dev_num, access_type, interface_num, CAL_PHY_REG(1),
628 reg_data | (init_value << 8),
629 0xff | (0xffff << 8) | (0xf << 24) | (u32) (0xf << 28)));
630
631 mask_dq_num_of_regs = octets_per_if_num * BUS_WIDTH_IN_BITS;
632 mask_pup_num_of_regs = octets_per_if_num;
633
634 if (result_type == RESULT_PER_BIT) {
635 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
636 index_cnt++) {
637 CHECK_STATUS(ddr3_tip_if_write
638 (dev_num, access_type, interface_num,
639 mask_results_dq_reg_map[index_cnt], 0,
640 1 << 24));
641 }
642
643 /* Mask disabled buses */
644 for (pup_id = 0; pup_id < octets_per_if_num;
645 pup_id++) {
646 if (IS_BUS_ACTIVE(tm->bus_act_mask, pup_id) == 1)
647 continue;
648
649 for (index_cnt = (pup_id * 8); index_cnt < (pup_id + 1) * 8; index_cnt++) {
650 CHECK_STATUS(ddr3_tip_if_write
651 (dev_num, access_type,
652 interface_num,
653 mask_results_dq_reg_map
654 [index_cnt], (1 << 24), 1 << 24));
655 }
656 }
657
658 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
659 index_cnt++) {
660 CHECK_STATUS(ddr3_tip_if_write
661 (dev_num, access_type, interface_num,
662 mask_results_pup_reg_map[index_cnt],
663 (1 << 24), 1 << 24));
664 }
665 } else if (result_type == RESULT_PER_BYTE) {
666 /* write to adll */
667 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
668 index_cnt++) {
669 CHECK_STATUS(ddr3_tip_if_write
670 (dev_num, access_type, interface_num,
671 mask_results_pup_reg_map[index_cnt], 0,
672 1 << 24));
673 }
674 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
675 index_cnt++) {
676 CHECK_STATUS(ddr3_tip_if_write
677 (dev_num, access_type, interface_num,
678 mask_results_dq_reg_map[index_cnt],
679 (1 << 24), (1 << 24)));
680 }
681 }
682
683 /* trigger training */
684 mv_ddr_training_enable();
685
686 /* wa for 16-bit mode: wait for all rfu tests to finish or timeout */
687 mdelay(1);
688
689 /* check for training done */
690 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, &data) != MV_OK) {
691 train_status[0] = HWS_TRAINING_IP_STATUS_TIMEOUT;
692 } else { /* training done; check for pass */
693 if (data == PASS)
694 train_status[0] = HWS_TRAINING_IP_STATUS_SUCCESS;
695 else
696 train_status[0] = HWS_TRAINING_IP_STATUS_FAIL;
697 }
698
699 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
700 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS);
701 #if defined(CONFIG_DDR4)
702 if (tm->debug_level != DEBUG_LEVEL_ERROR)
703 refresh();
704 #endif
705
706 return MV_OK;
707 }
708
709 /*
710 * Load expected Pattern to ODPG
711 */
ddr3_tip_load_pattern_to_odpg(u32 dev_num,enum hws_access_type access_type,u32 if_id,enum hws_pattern pattern,u32 load_addr)712 int ddr3_tip_load_pattern_to_odpg(u32 dev_num, enum hws_access_type access_type,
713 u32 if_id, enum hws_pattern pattern,
714 u32 load_addr)
715 {
716 u32 pattern_length_cnt = 0;
717 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
718 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
719
720 for (pattern_length_cnt = 0;
721 pattern_length_cnt < pattern_table[pattern].pattern_len;
722 pattern_length_cnt++) { /* FIXME: the ecc patch below is only for a7040 A0 */
723 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)/* || tm->bus_act_mask == MV_DDR_32BIT_ECC_PUP8_BUS_MASK*/) {
724 CHECK_STATUS(ddr3_tip_if_write
725 (dev_num, access_type, if_id,
726 ODPG_DATA_WR_DATA_LOW_REG,
727 pattern_table_get_word(dev_num, pattern,
728 (u8) (pattern_length_cnt)),
729 MASK_ALL_BITS));
730 CHECK_STATUS(ddr3_tip_if_write
731 (dev_num, access_type, if_id,
732 ODPG_DATA_WR_DATA_HIGH_REG,
733 pattern_table_get_word(dev_num, pattern,
734 (u8) (pattern_length_cnt)),
735 MASK_ALL_BITS));
736 } else {
737 CHECK_STATUS(ddr3_tip_if_write
738 (dev_num, access_type, if_id,
739 ODPG_DATA_WR_DATA_LOW_REG,
740 pattern_table_get_word(dev_num, pattern,
741 (u8) (pattern_length_cnt * 2)),
742 MASK_ALL_BITS));
743 CHECK_STATUS(ddr3_tip_if_write
744 (dev_num, access_type, if_id,
745 ODPG_DATA_WR_DATA_HIGH_REG,
746 pattern_table_get_word(dev_num, pattern,
747 (u8) (pattern_length_cnt * 2 + 1)),
748 MASK_ALL_BITS));
749 }
750 CHECK_STATUS(ddr3_tip_if_write
751 (dev_num, access_type, if_id,
752 ODPG_DATA_WR_ADDR_REG, pattern_length_cnt,
753 MASK_ALL_BITS));
754 }
755
756 CHECK_STATUS(ddr3_tip_if_write
757 (dev_num, access_type, if_id,
758 ODPG_DATA_BUFFER_OFFS_REG, load_addr, MASK_ALL_BITS));
759
760 return MV_OK;
761 }
762
763 /*
764 * Configure ODPG
765 */
ddr3_tip_configure_odpg(u32 dev_num,enum hws_access_type access_type,u32 if_id,enum hws_dir direction,u32 tx_phases,u32 tx_burst_size,u32 rx_phases,u32 delay_between_burst,u32 rd_mode,u32 cs_num,u32 addr_stress_jump,u32 single_pattern)766 int ddr3_tip_configure_odpg(u32 dev_num, enum hws_access_type access_type,
767 u32 if_id, enum hws_dir direction, u32 tx_phases,
768 u32 tx_burst_size, u32 rx_phases,
769 u32 delay_between_burst, u32 rd_mode, u32 cs_num,
770 u32 addr_stress_jump, u32 single_pattern)
771 {
772 u32 data_value = 0;
773 int ret;
774
775 data_value = ((single_pattern << 2) | (tx_phases << 5) |
776 (tx_burst_size << 11) | (delay_between_burst << 15) |
777 (rx_phases << 21) | (rd_mode << 25) | (cs_num << 26) |
778 (addr_stress_jump << 29));
779 ret = ddr3_tip_if_write(dev_num, access_type, if_id,
780 ODPG_DATA_CTRL_REG, data_value, 0xaffffffc);
781 if (ret != MV_OK)
782 return ret;
783
784 return MV_OK;
785 }
786
ddr3_tip_process_result(u32 * ar_result,enum hws_edge e_edge,enum hws_edge_search e_edge_search,u32 * edge_result)787 int ddr3_tip_process_result(u32 *ar_result, enum hws_edge e_edge,
788 enum hws_edge_search e_edge_search,
789 u32 *edge_result)
790 {
791 u32 i, res;
792 int tap_val, max_val = -10000, min_val = 10000;
793 int lock_success = 1;
794
795 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
796 res = GET_LOCK_RESULT(ar_result[i]);
797 if (res == 0) {
798 lock_success = 0;
799 break;
800 }
801 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
802 ("lock failed for bit %d\n", i));
803 }
804
805 if (lock_success == 1) {
806 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
807 tap_val = GET_TAP_RESULT(ar_result[i], e_edge);
808 if (tap_val > max_val)
809 max_val = tap_val;
810 if (tap_val < min_val)
811 min_val = tap_val;
812 if (e_edge_search == TRAINING_EDGE_MAX)
813 *edge_result = (u32) max_val;
814 else
815 *edge_result = (u32) min_val;
816
817 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
818 ("i %d ar_result[i] 0x%x tap_val %d max_val %d min_val %d Edge_result %d\n",
819 i, ar_result[i], tap_val,
820 max_val, min_val,
821 *edge_result));
822 }
823 } else {
824 return MV_FAIL;
825 }
826
827 return MV_OK;
828 }
829
830 /*
831 * Read training search result
832 */
ddr3_tip_read_training_result(u32 dev_num,u32 if_id,enum hws_access_type pup_access_type,u32 pup_num,u32 bit_num,enum hws_search_dir search,enum hws_dir direction,enum hws_training_result result_type,enum hws_training_load_op operation,u32 cs_num_type,u32 ** load_res,int is_read_from_db,u8 cons_tap,int is_check_result_validity)833 int ddr3_tip_read_training_result(u32 dev_num, u32 if_id,
834 enum hws_access_type pup_access_type,
835 u32 pup_num, u32 bit_num,
836 enum hws_search_dir search,
837 enum hws_dir direction,
838 enum hws_training_result result_type,
839 enum hws_training_load_op operation,
840 u32 cs_num_type, u32 **load_res,
841 int is_read_from_db, u8 cons_tap,
842 int is_check_result_validity)
843 {
844 u32 reg_offset, pup_cnt, start_pup, end_pup, start_reg, end_reg;
845 u32 *interface_train_res = NULL;
846 u16 *reg_addr = NULL;
847 u32 read_data[MAX_INTERFACE_NUM];
848 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
849 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
850 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
851 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
852
853 /*
854 * Agreed assumption: all CS mask contain same number of bits,
855 * i.e. in multi CS, the number of CS per memory is the same for
856 * all pups
857 */
858 CHECK_STATUS(ddr3_tip_if_write
859 (dev_num, ACCESS_TYPE_UNICAST, if_id, DUAL_DUNIT_CFG_REG,
860 (cs_num_type == 0) ? 1 << 3 : 0, (1 << 3)));
861 CHECK_STATUS(ddr3_tip_if_write
862 (dev_num, ACCESS_TYPE_UNICAST, if_id,
863 ODPG_DATA_CTRL_REG, (cs_num_type << 26), (3 << 26)));
864 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
865 ("Read_from_d_b %d cs_type %d oper %d result_type %d direction %d search %d pup_num %d if_id %d pup_access_type %d\n",
866 is_read_from_db, cs_num_type, operation,
867 result_type, direction, search, pup_num,
868 if_id, pup_access_type));
869
870 if ((load_res == NULL) && (is_read_from_db == 1)) {
871 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
872 ("ddr3_tip_read_training_result load_res = NULL"));
873 return MV_FAIL;
874 }
875 if (pup_num >= octets_per_if_num) {
876 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
877 ("pup_num %d not valid\n", pup_num));
878 }
879 if (if_id >= MAX_INTERFACE_NUM) {
880 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
881 ("if_id %d not valid\n", if_id));
882 }
883 if (result_type == RESULT_PER_BIT)
884 reg_addr = mask_results_dq_reg_map;
885 else
886 reg_addr = mask_results_pup_reg_map;
887 if (pup_access_type == ACCESS_TYPE_UNICAST) {
888 start_pup = pup_num;
889 end_pup = pup_num;
890 } else { /*pup_access_type == ACCESS_TYPE_MULTICAST) */
891
892 start_pup = 0;
893 end_pup = octets_per_if_num - 1;
894 }
895
896 for (pup_cnt = start_pup; pup_cnt <= end_pup; pup_cnt++) {
897 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup_cnt);
898 DEBUG_TRAINING_IP_ENGINE(
899 DEBUG_LEVEL_TRACE,
900 ("if_id %d start_pup %d end_pup %d pup_cnt %d\n",
901 if_id, start_pup, end_pup, pup_cnt));
902 if (result_type == RESULT_PER_BIT) {
903 if (bit_num == ALL_BITS_PER_PUP) {
904 start_reg = pup_cnt * BUS_WIDTH_IN_BITS;
905 end_reg = (pup_cnt + 1) * BUS_WIDTH_IN_BITS - 1;
906 } else {
907 start_reg =
908 pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
909 end_reg = pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
910 }
911 } else {
912 start_reg = pup_cnt;
913 end_reg = pup_cnt;
914 }
915
916 interface_train_res =
917 ddr3_tip_get_buf_ptr(dev_num, search, result_type,
918 if_id);
919 DEBUG_TRAINING_IP_ENGINE(
920 DEBUG_LEVEL_TRACE,
921 ("start_reg %d end_reg %d interface %p\n",
922 start_reg, end_reg, interface_train_res));
923 if (interface_train_res == NULL) {
924 DEBUG_TRAINING_IP_ENGINE(
925 DEBUG_LEVEL_ERROR,
926 ("interface_train_res is NULL\n"));
927 return MV_FAIL;
928 }
929
930 for (reg_offset = start_reg; reg_offset <= end_reg;
931 reg_offset++) {
932 if (operation == TRAINING_LOAD_OPERATION_UNLOAD) {
933 if (is_read_from_db == 0) {
934 CHECK_STATUS(ddr3_tip_if_read
935 (dev_num,
936 ACCESS_TYPE_UNICAST,
937 if_id,
938 reg_addr[reg_offset],
939 read_data,
940 MASK_ALL_BITS));
941 if (is_check_result_validity == 1) {
942 if ((read_data[if_id] &
943 TIP_ENG_LOCK) == 0) {
944 interface_train_res
945 [reg_offset] =
946 TIP_ENG_LOCK +
947 TIP_TX_DLL_RANGE_MAX;
948 } else {
949 interface_train_res
950 [reg_offset] =
951 read_data
952 [if_id] +
953 cons_tap;
954 }
955 } else {
956 interface_train_res[reg_offset]
957 = read_data[if_id] +
958 cons_tap;
959 }
960 DEBUG_TRAINING_IP_ENGINE
961 (DEBUG_LEVEL_TRACE,
962 ("reg_offset %d value 0x%x addr %p\n",
963 reg_offset,
964 interface_train_res
965 [reg_offset],
966 &interface_train_res
967 [reg_offset]));
968 } else {
969 *load_res =
970 &interface_train_res[start_reg];
971 DEBUG_TRAINING_IP_ENGINE
972 (DEBUG_LEVEL_TRACE,
973 ("*load_res %p\n", *load_res));
974 }
975 } else {
976 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
977 ("not supported\n"));
978 }
979 }
980 }
981 #if defined(CONFIG_DDR4)
982 if (tm->debug_level != DEBUG_LEVEL_ERROR)
983 refresh();
984 #endif
985
986 return MV_OK;
987 }
988
989 /*
990 * Load all pattern to memory using ODPG
991 */
ddr3_tip_load_all_pattern_to_mem(u32 dev_num)992 int ddr3_tip_load_all_pattern_to_mem(u32 dev_num)
993 {
994 u32 pattern = 0, if_id;
995 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
996
997 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
998 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
999 training_result[training_stage][if_id] = TEST_SUCCESS;
1000 }
1001
1002 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1003 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1004 /* enable single cs */
1005 CHECK_STATUS(ddr3_tip_if_write
1006 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1007 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3)));
1008 }
1009
1010 for (pattern = 0; pattern < PATTERN_LAST; pattern++) {
1011 if (pattern == PATTERN_TEST)
1012 continue;
1013 ddr3_tip_load_pattern_to_mem(dev_num, pattern);
1014 }
1015
1016 return MV_OK;
1017 }
1018
1019 /*
1020 * Load specific pattern to memory using ODPG
1021 */
ddr3_tip_load_pattern_to_mem(u32 dev_num,enum hws_pattern pattern)1022 int ddr3_tip_load_pattern_to_mem(u32 dev_num, enum hws_pattern pattern)
1023 {
1024 u32 reg_data, if_id;
1025 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
1026 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1027
1028 /* load pattern to memory */
1029 /*
1030 * Write Tx mode, CS0, phases, Tx burst size, delay between burst,
1031 * rx pattern phases
1032 */
1033 reg_data =
1034 0x1 | (pattern_table[pattern].num_of_phases_tx << 5) |
1035 (pattern_table[pattern].tx_burst_size << 11) |
1036 (pattern_table[pattern].delay_between_bursts << 15) |
1037 (pattern_table[pattern].num_of_phases_rx << 21) | (0x1 << 25) |
1038 (effective_cs << 26);
1039 CHECK_STATUS(ddr3_tip_if_write
1040 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1041 ODPG_DATA_CTRL_REG, reg_data, MASK_ALL_BITS));
1042 /* ODPG Write enable from BIST */
1043 CHECK_STATUS(ddr3_tip_if_write
1044 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1045 ODPG_DATA_CTRL_REG, (0x1 | (effective_cs << 26)),
1046 0xc000003));
1047 /* disable error injection */
1048 CHECK_STATUS(ddr3_tip_if_write
1049 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1050 ODPG_DATA_WR_DATA_ERR_REG, 0, 0x1));
1051 /* load pattern to ODPG */
1052 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
1053 PARAM_NOT_CARE, pattern,
1054 pattern_table[pattern].start_addr);
1055
1056 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) {
1057 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1058 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1059
1060 CHECK_STATUS(ddr3_tip_if_write
1061 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1062 SDRAM_ODT_CTRL_HIGH_REG,
1063 0x3, 0xf));
1064 }
1065
1066 mv_ddr_odpg_enable();
1067 } else {
1068 CHECK_STATUS(ddr3_tip_if_write
1069 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1070 ODPG_DATA_CTRL_REG, (u32)(0x1 << 31),
1071 (u32)(0x1 << 31)));
1072 }
1073 mdelay(1);
1074
1075 if (mv_ddr_is_odpg_done(MAX_POLLING_ITERATIONS) != MV_OK)
1076 return MV_FAIL;
1077
1078 /* Disable ODPG and stop write to memory */
1079 CHECK_STATUS(ddr3_tip_if_write
1080 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1081 ODPG_DATA_CTRL_REG, (0x1 << 30), (u32) (0x3 << 30)));
1082
1083 /* return to default */
1084 CHECK_STATUS(ddr3_tip_if_write
1085 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1086 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS));
1087
1088 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) {
1089 /* Disable odt0 for CS0 training - need to adjust for multy CS */
1090 CHECK_STATUS(ddr3_tip_if_write
1091 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1092 SDRAM_ODT_CTRL_HIGH_REG, 0x0, 0xf));
1093 }
1094 /* temporary added */
1095 mdelay(1);
1096
1097 return MV_OK;
1098 }
1099
1100 /*
1101 * Training search routine
1102 */
ddr3_tip_ip_training_wrapper_int(u32 dev_num,enum hws_access_type access_type,u32 if_id,enum hws_access_type pup_access_type,u32 pup_num,u32 bit_num,enum hws_training_result result_type,enum hws_control_element control_element,enum hws_search_dir search_dir,enum hws_dir direction,u32 interface_mask,u32 init_value_l2h,u32 init_value_h2l,u32 num_iter,enum hws_pattern pattern,enum hws_edge_compare edge_comp,enum hws_ddr_cs train_cs_type,u32 cs_num,enum hws_training_ip_stat * train_status)1103 int ddr3_tip_ip_training_wrapper_int(u32 dev_num,
1104 enum hws_access_type access_type,
1105 u32 if_id,
1106 enum hws_access_type pup_access_type,
1107 u32 pup_num, u32 bit_num,
1108 enum hws_training_result result_type,
1109 enum hws_control_element control_element,
1110 enum hws_search_dir search_dir,
1111 enum hws_dir direction,
1112 u32 interface_mask, u32 init_value_l2h,
1113 u32 init_value_h2l, u32 num_iter,
1114 enum hws_pattern pattern,
1115 enum hws_edge_compare edge_comp,
1116 enum hws_ddr_cs train_cs_type, u32 cs_num,
1117 enum hws_training_ip_stat *train_status)
1118 {
1119 u32 interface_num = 0, start_if, end_if, init_value_used;
1120 enum hws_search_dir search_dir_id, start_search, end_search;
1121 enum hws_edge_compare edge_comp_used;
1122 u8 cons_tap = 0;
1123 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1124 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1125
1126 if (train_status == NULL) {
1127 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1128 ("train_status is NULL\n"));
1129 return MV_FAIL;
1130 }
1131
1132 if ((train_cs_type > CS_NON_SINGLE) ||
1133 (edge_comp >= EDGE_PFP) ||
1134 (pattern >= PATTERN_LAST) ||
1135 (direction > OPER_WRITE_AND_READ) ||
1136 (search_dir > HWS_HIGH2LOW) ||
1137 (control_element > HWS_CONTROL_ELEMENT_DQS_SKEW) ||
1138 (result_type > RESULT_PER_BYTE) ||
1139 (pup_num >= octets_per_if_num) ||
1140 (pup_access_type > ACCESS_TYPE_MULTICAST) ||
1141 (if_id > 11) || (access_type > ACCESS_TYPE_MULTICAST)) {
1142 DEBUG_TRAINING_IP_ENGINE(
1143 DEBUG_LEVEL_ERROR,
1144 ("wrong parameter train_cs_type %d edge_comp %d pattern %d direction %d search_dir %d control_element %d result_type %d pup_num %d pup_access_type %d if_id %d access_type %d\n",
1145 train_cs_type, edge_comp, pattern, direction,
1146 search_dir, control_element, result_type, pup_num,
1147 pup_access_type, if_id, access_type));
1148 return MV_FAIL;
1149 }
1150
1151 if (edge_comp == EDGE_FPF) {
1152 start_search = HWS_LOW2HIGH;
1153 end_search = HWS_HIGH2LOW;
1154 edge_comp_used = EDGE_FP;
1155 } else {
1156 start_search = search_dir;
1157 end_search = search_dir;
1158 edge_comp_used = edge_comp;
1159 }
1160
1161 for (search_dir_id = start_search; search_dir_id <= end_search;
1162 search_dir_id++) {
1163 init_value_used = (search_dir_id == HWS_LOW2HIGH) ?
1164 init_value_l2h : init_value_h2l;
1165 DEBUG_TRAINING_IP_ENGINE(
1166 DEBUG_LEVEL_TRACE,
1167 ("dev_num %d, access_type %d, if_id %d, pup_access_type %d,pup_num %d, result_type %d, control_element %d search_dir_id %d, direction %d, interface_mask %d,init_value_used %d, num_iter %d, pattern %d, edge_comp_used %d, train_cs_type %d, cs_num %d\n",
1168 dev_num, access_type, if_id, pup_access_type, pup_num,
1169 result_type, control_element, search_dir_id,
1170 direction, interface_mask, init_value_used, num_iter,
1171 pattern, edge_comp_used, train_cs_type, cs_num));
1172
1173 ddr3_tip_ip_training(dev_num, access_type, if_id,
1174 pup_access_type, pup_num, result_type,
1175 control_element, search_dir_id, direction,
1176 interface_mask, init_value_used, num_iter,
1177 pattern, edge_comp_used, train_cs_type,
1178 cs_num, train_status);
1179 if (access_type == ACCESS_TYPE_MULTICAST) {
1180 start_if = 0;
1181 end_if = MAX_INTERFACE_NUM - 1;
1182 } else {
1183 start_if = if_id;
1184 end_if = if_id;
1185 }
1186
1187 for (interface_num = start_if; interface_num <= end_if;
1188 interface_num++) {
1189 VALIDATE_IF_ACTIVE(tm->if_act_mask, interface_num);
1190 cs_num = 0;
1191 CHECK_STATUS(ddr3_tip_read_training_result
1192 (dev_num, interface_num, pup_access_type,
1193 pup_num, bit_num, search_dir_id,
1194 direction, result_type,
1195 TRAINING_LOAD_OPERATION_UNLOAD,
1196 train_cs_type, NULL, 0, cons_tap,
1197 0));
1198 }
1199 }
1200
1201 return MV_OK;
1202 }
1203 /*
1204 * Training search & read result routine
1205 * This function implements the search algorithm
1206 * first it calls the function ddr3_tip_ip_training_wrapper_int which triggers the search from l2h and h2l
1207 * this function handles rx and tx search cases
1208 * in case of rx it only triggers the search (l2h and h2l)
1209 * in case of tx there are 3 optional algorithm phases:
1210 * phase 1:
1211 * it first triggers the search and handles the results as following (phase 1):
1212 * each bit, which defined by the search two edges (e1 or VW_L and e2 or VW_H), match on of cases:
1213 * 1. BIT_LOW_UI 0 =< VW =< 31 in case of jitter use: VW_L <= 31, VW_H <= 31
1214 * 2. BIT_HIGH_UI 32 =< VW =< 63 in case of jitter use: VW_L >= 32, VW_H >= 32
1215 * 3. BIT_SPLIT_IN VW_L <= 31 & VW_H >= 32
1216 * 4. BIT_SPLIT_OUT* VW_H < 32 & VW_L > 32
1217 * note: the VW units is adll taps
1218 * phase 2:
1219 * only bit case BIT_SPLIT_OUT requires another search (phase 2) from the middle range in two directions h2l and l2h
1220 * because only this case is not locked by the search engine in the first search trigger (phase 1).
1221 * phase 3:
1222 * each subphy is categorized according to its bits definition.
1223 * the sub-phy cases are as follows:
1224 * 1.BYTE_NOT_DEFINED the byte has not yet been categorized
1225 * 2.BYTE_HOMOGENEOUS_LOW 0 =< VW =< 31
1226 * 3.BYTE_HOMOGENEOUS_HIGH 32 =< VW =< 63
1227 * 4.BYTE_HOMOGENEOUS_SPLIT_IN VW_L <= 31 & VW_H >= 32
1228 * or the center of all bits in the byte =< 31
1229 * 5.BYTE_HOMOGENEOUS_SPLIT_OUT VW_H < 32 & VW_L > 32
1230 * 6.BYTE_SPLIT_OUT_MIX at least one bits is in split out state and one bit is in other
1231 * or the center of all bits in the byte => 32
1232 * after the two phases above a center valid window for each subphy is calculated accordingly:
1233 * center valid window = maximum center of all bits in the subphy - minimum center of all bits in the subphy.
1234 * now decisions are made in each subphy as following:
1235 * all subphys which are homogeneous remains as is
1236 * all subphys which are homogeneous low | homogeneous high and the subphy center valid window is less than 32
1237 * mark this subphy as homogeneous split in.
1238 * now the bits in the bytes which are BYTE_SPLIT_OUT_MIX needed to be reorganized and handles as following
1239 * all bits which are BIT_LOW_UI will be added with 64 adll,
1240 * this will hopefully ensures that all the bits in the sub phy can be sampled by the dqs
1241 */
ddr3_tip_ip_training_wrapper(u32 dev_num,enum hws_access_type access_type,u32 if_id,enum hws_access_type pup_access_type,u32 pup_num,enum hws_training_result result_type,enum hws_control_element control_element,enum hws_search_dir search_dir,enum hws_dir direction,u32 interface_mask,u32 init_value_l2h,u32 init_value_h2l,u32 num_iter,enum hws_pattern pattern,enum hws_edge_compare edge_comp,enum hws_ddr_cs train_cs_type,u32 cs_num,enum hws_training_ip_stat * train_status)1242 int ddr3_tip_ip_training_wrapper(u32 dev_num, enum hws_access_type access_type,
1243 u32 if_id,
1244 enum hws_access_type pup_access_type,
1245 u32 pup_num,
1246 enum hws_training_result result_type,
1247 enum hws_control_element control_element,
1248 enum hws_search_dir search_dir,
1249 enum hws_dir direction, u32 interface_mask,
1250 u32 init_value_l2h, u32 init_value_h2l,
1251 u32 num_iter, enum hws_pattern pattern,
1252 enum hws_edge_compare edge_comp,
1253 enum hws_ddr_cs train_cs_type, u32 cs_num,
1254 enum hws_training_ip_stat *train_status)
1255 {
1256 u8 e1, e2;
1257 u32 bit_id, start_if, end_if, bit_end = 0;
1258 u32 *result[HWS_SEARCH_DIR_LIMIT] = { 0 };
1259 u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
1260 u8 bit_bit_mask[MAX_BUS_NUM] = { 0 }, bit_bit_mask_active = 0;
1261 u8 bit_state[MAX_BUS_NUM * BUS_WIDTH_IN_BITS] = {0};
1262 u8 h2l_adll_value[MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
1263 u8 l2h_adll_value[MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
1264 u8 center_subphy_adll_window[MAX_BUS_NUM];
1265 u8 min_center_subphy_adll[MAX_BUS_NUM];
1266 u8 max_center_subphy_adll[MAX_BUS_NUM];
1267 u32 *l2h_if_train_res = NULL;
1268 u32 *h2l_if_train_res = NULL;
1269 enum hws_search_dir search_dir_id;
1270 int status;
1271 u32 bit_lock_result;
1272
1273 u8 sybphy_id;
1274 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1275 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1276
1277 if (pup_num >= octets_per_if_num) {
1278 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1279 ("pup_num %d not valid\n", pup_num));
1280 }
1281
1282 if (if_id >= MAX_INTERFACE_NUM) {
1283 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1284 ("if_id %d not valid\n", if_id));
1285 }
1286
1287 status = ddr3_tip_ip_training_wrapper_int
1288 (dev_num, access_type, if_id, pup_access_type, pup_num,
1289 ALL_BITS_PER_PUP, result_type, control_element,
1290 search_dir, direction, interface_mask, init_value_l2h,
1291 init_value_h2l, num_iter, pattern, edge_comp,
1292 train_cs_type, cs_num, train_status);
1293
1294 if (MV_OK != status)
1295 return status;
1296
1297 if (access_type == ACCESS_TYPE_MULTICAST) {
1298 start_if = 0;
1299 end_if = MAX_INTERFACE_NUM - 1;
1300 } else {
1301 start_if = if_id;
1302 end_if = if_id;
1303 }
1304
1305 for (if_id = start_if; if_id <= end_if; if_id++) {
1306 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1307 /* zero the database */
1308 bit_bit_mask_active = 0; /* clean the flag for level2 search */
1309 memset(bit_state, 0, sizeof(bit_state));
1310 /* phase 1 */
1311 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1312 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1313 if (result_type == RESULT_PER_BIT)
1314 bit_end = BUS_WIDTH_IN_BITS;
1315 else
1316 bit_end = 0;
1317
1318 /* zero the data base */
1319 bit_bit_mask[sybphy_id] = 0;
1320 byte_status[if_id][sybphy_id] = BYTE_NOT_DEFINED;
1321 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1322 h2l_adll_value[sybphy_id][bit_id] = 64;
1323 l2h_adll_value[sybphy_id][bit_id] = 0;
1324 for (search_dir_id = HWS_LOW2HIGH; search_dir_id <= HWS_HIGH2LOW;
1325 search_dir_id++) {
1326 status = ddr3_tip_read_training_result
1327 (dev_num, if_id,
1328 ACCESS_TYPE_UNICAST, sybphy_id, bit_id,
1329 search_dir_id, direction, result_type,
1330 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
1331 &result[search_dir_id], 1, 0, 0);
1332
1333 if (MV_OK != status)
1334 return status;
1335 }
1336
1337 e1 = GET_TAP_RESULT(result[HWS_LOW2HIGH][0], EDGE_1);
1338 e2 = GET_TAP_RESULT(result[HWS_HIGH2LOW][0], EDGE_1);
1339 DEBUG_TRAINING_IP_ENGINE
1340 (DEBUG_LEVEL_INFO,
1341 ("if_id %d sybphy_id %d bit %d l2h 0x%x (e1 0x%x) h2l 0x%x (e2 0x%x)\n",
1342 if_id, sybphy_id, bit_id, result[HWS_LOW2HIGH][0], e1,
1343 result[HWS_HIGH2LOW][0], e2));
1344 bit_lock_result =
1345 (GET_LOCK_RESULT(result[HWS_LOW2HIGH][0]) &&
1346 GET_LOCK_RESULT(result[HWS_HIGH2LOW][0]));
1347
1348 if (bit_lock_result) {
1349 /* in case of read operation set the byte status as homogeneous low */
1350 if (direction == OPER_READ) {
1351 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_LOW;
1352 } else if ((e2 - e1) > 32) { /* oper_write */
1353 /* split out */
1354 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1355 BIT_SPLIT_OUT;
1356 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_SPLIT_OUT;
1357 /* mark problem bits */
1358 bit_bit_mask[sybphy_id] |= (1 << bit_id);
1359 bit_bit_mask_active = 1;
1360 DEBUG_TRAINING_IP_ENGINE
1361 (DEBUG_LEVEL_TRACE,
1362 ("if_id %d sybphy_id %d bit %d BIT_SPLIT_OUT\n",
1363 if_id, sybphy_id, bit_id));
1364 } else {
1365 /* low ui */
1366 if (e1 <= 31 && e2 <= 31) {
1367 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1368 BIT_LOW_UI;
1369 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_LOW;
1370 l2h_adll_value[sybphy_id][bit_id] = e1;
1371 h2l_adll_value[sybphy_id][bit_id] = e2;
1372 DEBUG_TRAINING_IP_ENGINE
1373 (DEBUG_LEVEL_TRACE,
1374 ("if_id %d sybphy_id %d bit %d BIT_LOW_UI\n",
1375 if_id, sybphy_id, bit_id));
1376 }
1377 /* high ui */
1378 if (e1 >= 32 && e2 >= 32) {
1379 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1380 BIT_HIGH_UI;
1381 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_HIGH;
1382 l2h_adll_value[sybphy_id][bit_id] = e1;
1383 h2l_adll_value[sybphy_id][bit_id] = e2;
1384 DEBUG_TRAINING_IP_ENGINE
1385 (DEBUG_LEVEL_TRACE,
1386 ("if_id %d sybphy_id %d bit %d BIT_HIGH_UI\n",
1387 if_id, sybphy_id, bit_id));
1388 }
1389 /* split in */
1390 if (e1 <= 31 && e2 >= 32) {
1391 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1392 BIT_SPLIT_IN;
1393 byte_status[if_id][sybphy_id] |=
1394 BYTE_HOMOGENEOUS_SPLIT_IN;
1395 l2h_adll_value[sybphy_id][bit_id] = e1;
1396 h2l_adll_value[sybphy_id][bit_id] = e2;
1397 DEBUG_TRAINING_IP_ENGINE
1398 (DEBUG_LEVEL_TRACE,
1399 ("if_id %d sybphy_id %d bit %d BIT_SPLIT_IN\n",
1400 if_id, sybphy_id, bit_id));
1401 }
1402 }
1403 } else {
1404 DEBUG_TRAINING_IP_ENGINE
1405 (DEBUG_LEVEL_INFO,
1406 ("if_id %d sybphy_id %d bit %d l2h 0x%x (e1 0x%x)"
1407 "h2l 0x%x (e2 0x%x): bit cannot be categorized\n",
1408 if_id, sybphy_id, bit_id, result[HWS_LOW2HIGH][0], e1,
1409 result[HWS_HIGH2LOW][0], e2));
1410 /* mark the byte as not defined */
1411 byte_status[if_id][sybphy_id] = BYTE_NOT_DEFINED;
1412 break; /* continue to next pup - no reason to analyze this byte */
1413 }
1414 } /* for all bits */
1415 } /* for all PUPs */
1416
1417 /* phase 2 will occur only in write operation */
1418 if (bit_bit_mask_active != 0) {
1419 l2h_if_train_res = ddr3_tip_get_buf_ptr(dev_num, HWS_LOW2HIGH, result_type, if_id);
1420 h2l_if_train_res = ddr3_tip_get_buf_ptr(dev_num, HWS_HIGH2LOW, result_type, if_id);
1421 /* search from middle to end */
1422 ddr3_tip_ip_training
1423 (dev_num, ACCESS_TYPE_UNICAST,
1424 if_id, ACCESS_TYPE_MULTICAST,
1425 PARAM_NOT_CARE, result_type,
1426 control_element, HWS_LOW2HIGH,
1427 direction, interface_mask,
1428 num_iter / 2, num_iter / 2,
1429 pattern, EDGE_FP, train_cs_type,
1430 cs_num, train_status);
1431
1432 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1433 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1434 if (byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) {
1435 if (bit_bit_mask[sybphy_id] == 0)
1436 continue; /* this byte bits have no split out state */
1437
1438 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1439 if ((bit_bit_mask[sybphy_id] & (1 << bit_id)) == 0)
1440 continue; /* this bit is non split goto next bit */
1441
1442 /* enter the result to the data base */
1443 status = ddr3_tip_read_training_result
1444 (dev_num, if_id, ACCESS_TYPE_UNICAST, sybphy_id,
1445 bit_id, HWS_LOW2HIGH, direction, result_type,
1446 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
1447 &l2h_if_train_res, 0, 0, 1);
1448
1449 if (MV_OK != status)
1450 return status;
1451
1452 l2h_adll_value[sybphy_id][bit_id] =
1453 l2h_if_train_res[sybphy_id *
1454 BUS_WIDTH_IN_BITS + bit_id] & PUP_RESULT_EDGE_1_MASK;
1455 }
1456 }
1457 }
1458 /* Search from middle to start */
1459 ddr3_tip_ip_training
1460 (dev_num, ACCESS_TYPE_UNICAST,
1461 if_id, ACCESS_TYPE_MULTICAST,
1462 PARAM_NOT_CARE, result_type,
1463 control_element, HWS_HIGH2LOW,
1464 direction, interface_mask,
1465 num_iter / 2, num_iter / 2,
1466 pattern, EDGE_FP, train_cs_type,
1467 cs_num, train_status);
1468
1469 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1470 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1471 if (byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) {
1472 if (bit_bit_mask[sybphy_id] == 0)
1473 continue;
1474
1475 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1476 if ((bit_bit_mask[sybphy_id] & (1 << bit_id)) == 0)
1477 continue;
1478
1479 status = ddr3_tip_read_training_result
1480 (dev_num, if_id, ACCESS_TYPE_UNICAST, sybphy_id,
1481 bit_id, HWS_HIGH2LOW, direction, result_type,
1482 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
1483 &h2l_if_train_res, 0, cons_tap, 1);
1484
1485 if (MV_OK != status)
1486 return status;
1487
1488 h2l_adll_value[sybphy_id][bit_id] =
1489 h2l_if_train_res[sybphy_id *
1490 BUS_WIDTH_IN_BITS + bit_id] & PUP_RESULT_EDGE_1_MASK;
1491 }
1492 }
1493 }
1494 } /* end if bit_bit_mask_active */
1495 /*
1496 * phase 3 will occur only in write operation
1497 * find the maximum and the minimum center of each subphy
1498 */
1499 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1500 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1501
1502 if ((byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) && (direction == OPER_WRITE)) {
1503 /* clear the arrays and parameters */
1504 center_subphy_adll_window[sybphy_id] = 0;
1505 max_center_subphy_adll[sybphy_id] = 0;
1506 min_center_subphy_adll[sybphy_id] = 64;
1507 /* find the max and min center adll value in the current subphy */
1508 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1509 /* debug print all the bit edges after alignment */
1510 DEBUG_TRAINING_IP_ENGINE
1511 (DEBUG_LEVEL_TRACE,
1512 ("if_id %d sybphy_id %d bit %d l2h %d h2l %d\n",
1513 if_id, sybphy_id, bit_id, l2h_adll_value[sybphy_id][bit_id],
1514 h2l_adll_value[sybphy_id][bit_id]));
1515
1516 if (((l2h_adll_value[sybphy_id][bit_id] +
1517 h2l_adll_value[sybphy_id][bit_id]) / 2) >
1518 max_center_subphy_adll[sybphy_id])
1519 max_center_subphy_adll[sybphy_id] =
1520 (l2h_adll_value[sybphy_id][bit_id] +
1521 h2l_adll_value[sybphy_id][bit_id]) / 2;
1522 if (((l2h_adll_value[sybphy_id][bit_id] +
1523 h2l_adll_value[sybphy_id][bit_id]) / 2) <
1524 min_center_subphy_adll[sybphy_id])
1525 min_center_subphy_adll[sybphy_id] =
1526 (l2h_adll_value[sybphy_id][bit_id] +
1527 h2l_adll_value[sybphy_id][bit_id]) / 2;
1528 }
1529
1530 /* calculate the center of the current subphy */
1531 center_subphy_adll_window[sybphy_id] =
1532 max_center_subphy_adll[sybphy_id] -
1533 min_center_subphy_adll[sybphy_id];
1534 DEBUG_TRAINING_IP_ENGINE
1535 (DEBUG_LEVEL_TRACE,
1536 ("if_id %d sybphy_id %d min center %d max center %d center %d\n",
1537 if_id, sybphy_id, min_center_subphy_adll[sybphy_id],
1538 max_center_subphy_adll[sybphy_id],
1539 center_subphy_adll_window[sybphy_id]));
1540 }
1541 }
1542 /*
1543 * check byte state and fix bits state if needed
1544 * in case the level 1 and 2 above subphy results are
1545 * homogeneous continue to the next subphy
1546 */
1547 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1548 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1549 if ((byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_LOW) ||
1550 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_HIGH) ||
1551 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_SPLIT_IN) ||
1552 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_SPLIT_OUT) ||
1553 (byte_status[if_id][sybphy_id] == BYTE_NOT_DEFINED))
1554 continue;
1555
1556 /*
1557 * in case all of the bits in the current subphy are
1558 * less than 32 which will find alignment in the subphy bits
1559 * mark this subphy as homogeneous split in
1560 */
1561 if (center_subphy_adll_window[sybphy_id] <= 31)
1562 byte_status[if_id][sybphy_id] = BYTE_HOMOGENEOUS_SPLIT_IN;
1563
1564 /*
1565 * in case the current byte is split_out and the center is bigger than 31
1566 * the byte can be aligned. in this case add 64 to the the low ui bits aligning it
1567 * to the other ui bits
1568 */
1569 if (center_subphy_adll_window[sybphy_id] >= 32) {
1570 byte_status[if_id][sybphy_id] = BYTE_SPLIT_OUT_MIX;
1571
1572 DEBUG_TRAINING_IP_ENGINE
1573 (DEBUG_LEVEL_TRACE,
1574 ("if_id %d sybphy_id %d byte state 0x%x\n",
1575 if_id, sybphy_id, byte_status[if_id][sybphy_id]));
1576 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1577 if (bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] == BIT_LOW_UI) {
1578 l2h_if_train_res[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] += 64;
1579 h2l_if_train_res[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] += 64;
1580 }
1581 DEBUG_TRAINING_IP_ENGINE
1582 (DEBUG_LEVEL_TRACE,
1583 ("if_id %d sybphy_id %d bit_id %d added 64 adlls\n",
1584 if_id, sybphy_id, bit_id));
1585 }
1586 }
1587 }
1588 } /* for all interfaces */
1589
1590 return MV_OK;
1591 }
1592
mv_ddr_tip_sub_phy_byte_status_get(u32 if_id,u32 subphy_id)1593 u8 mv_ddr_tip_sub_phy_byte_status_get(u32 if_id, u32 subphy_id)
1594 {
1595 return byte_status[if_id][subphy_id];
1596 }
1597
mv_ddr_tip_sub_phy_byte_status_set(u32 if_id,u32 subphy_id,u8 byte_status_data)1598 void mv_ddr_tip_sub_phy_byte_status_set(u32 if_id, u32 subphy_id, u8 byte_status_data)
1599 {
1600 byte_status[if_id][subphy_id] = byte_status_data;
1601 }
1602
1603 /*
1604 * Load phy values
1605 */
ddr3_tip_load_phy_values(int b_load)1606 int ddr3_tip_load_phy_values(int b_load)
1607 {
1608 u32 bus_cnt = 0, if_id, dev_num = 0;
1609 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1610 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1611
1612 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1613 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1614 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
1615 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
1616 if (b_load == 1) {
1617 CHECK_STATUS(ddr3_tip_bus_read
1618 (dev_num, if_id,
1619 ACCESS_TYPE_UNICAST, bus_cnt,
1620 DDR_PHY_DATA,
1621 CTX_PHY_REG(effective_cs),
1622 &phy_reg_bk[if_id][bus_cnt]
1623 [0]));
1624 CHECK_STATUS(ddr3_tip_bus_read
1625 (dev_num, if_id,
1626 ACCESS_TYPE_UNICAST, bus_cnt,
1627 DDR_PHY_DATA,
1628 RL_PHY_REG(effective_cs),
1629 &phy_reg_bk[if_id][bus_cnt]
1630 [1]));
1631 CHECK_STATUS(ddr3_tip_bus_read
1632 (dev_num, if_id,
1633 ACCESS_TYPE_UNICAST, bus_cnt,
1634 DDR_PHY_DATA,
1635 CRX_PHY_REG(effective_cs),
1636 &phy_reg_bk[if_id][bus_cnt]
1637 [2]));
1638 } else {
1639 CHECK_STATUS(ddr3_tip_bus_write
1640 (dev_num, ACCESS_TYPE_UNICAST,
1641 if_id, ACCESS_TYPE_UNICAST,
1642 bus_cnt, DDR_PHY_DATA,
1643 CTX_PHY_REG(effective_cs),
1644 phy_reg_bk[if_id][bus_cnt]
1645 [0]));
1646 CHECK_STATUS(ddr3_tip_bus_write
1647 (dev_num, ACCESS_TYPE_UNICAST,
1648 if_id, ACCESS_TYPE_UNICAST,
1649 bus_cnt, DDR_PHY_DATA,
1650 RL_PHY_REG(effective_cs),
1651 phy_reg_bk[if_id][bus_cnt]
1652 [1]));
1653 CHECK_STATUS(ddr3_tip_bus_write
1654 (dev_num, ACCESS_TYPE_UNICAST,
1655 if_id, ACCESS_TYPE_UNICAST,
1656 bus_cnt, DDR_PHY_DATA,
1657 CRX_PHY_REG(effective_cs),
1658 phy_reg_bk[if_id][bus_cnt]
1659 [2]));
1660 }
1661 }
1662 }
1663
1664 return MV_OK;
1665 }
1666
ddr3_tip_training_ip_test(u32 dev_num,enum hws_training_result result_type,enum hws_search_dir search_dir,enum hws_dir direction,enum hws_edge_compare edge,u32 init_val1,u32 init_val2,u32 num_of_iterations,u32 start_pattern,u32 end_pattern)1667 int ddr3_tip_training_ip_test(u32 dev_num, enum hws_training_result result_type,
1668 enum hws_search_dir search_dir,
1669 enum hws_dir direction,
1670 enum hws_edge_compare edge,
1671 u32 init_val1, u32 init_val2,
1672 u32 num_of_iterations,
1673 u32 start_pattern, u32 end_pattern)
1674 {
1675 u32 pattern, if_id, pup_id;
1676 enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
1677 u32 *res = NULL;
1678 u32 search_state = 0;
1679 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1680 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1681
1682 ddr3_tip_load_phy_values(1);
1683
1684 for (pattern = start_pattern; pattern <= end_pattern; pattern++) {
1685 for (search_state = 0; search_state < HWS_SEARCH_DIR_LIMIT;
1686 search_state++) {
1687 ddr3_tip_ip_training_wrapper(dev_num,
1688 ACCESS_TYPE_MULTICAST, 0,
1689 ACCESS_TYPE_MULTICAST, 0,
1690 result_type,
1691 HWS_CONTROL_ELEMENT_ADLL,
1692 search_dir, direction,
1693 0xfff, init_val1,
1694 init_val2,
1695 num_of_iterations, pattern,
1696 edge, CS_SINGLE,
1697 PARAM_NOT_CARE,
1698 train_status);
1699
1700 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
1701 if_id++) {
1702 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1703 for (pup_id = 0; pup_id <
1704 octets_per_if_num;
1705 pup_id++) {
1706 VALIDATE_BUS_ACTIVE(tm->bus_act_mask,
1707 pup_id);
1708 CHECK_STATUS
1709 (ddr3_tip_read_training_result
1710 (dev_num, if_id,
1711 ACCESS_TYPE_UNICAST, pup_id,
1712 ALL_BITS_PER_PUP,
1713 search_state,
1714 direction, result_type,
1715 TRAINING_LOAD_OPERATION_UNLOAD,
1716 CS_SINGLE, &res, 1, 0,
1717 0));
1718 if (result_type == RESULT_PER_BYTE) {
1719 DEBUG_TRAINING_IP_ENGINE
1720 (DEBUG_LEVEL_INFO,
1721 ("search_state %d if_id %d pup_id %d 0x%x\n",
1722 search_state, if_id,
1723 pup_id, res[0]));
1724 } else {
1725 DEBUG_TRAINING_IP_ENGINE
1726 (DEBUG_LEVEL_INFO,
1727 ("search_state %d if_id %d pup_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1728 search_state, if_id,
1729 pup_id, res[0],
1730 res[1], res[2],
1731 res[3], res[4],
1732 res[5], res[6],
1733 res[7]));
1734 }
1735 }
1736 } /* interface */
1737 } /* search */
1738 } /* pattern */
1739
1740 ddr3_tip_load_phy_values(0);
1741
1742 return MV_OK;
1743 }
1744
mv_ddr_pattern_start_addr_set(struct pattern_info * pattern_tbl,enum hws_pattern pattern,u32 addr)1745 int mv_ddr_pattern_start_addr_set(struct pattern_info *pattern_tbl, enum hws_pattern pattern, u32 addr)
1746 {
1747 pattern_tbl[pattern].start_addr = addr;
1748
1749 return 0;
1750 }
1751
ddr3_tip_get_pattern_table()1752 struct pattern_info *ddr3_tip_get_pattern_table()
1753 {
1754 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1755
1756 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask))
1757 return pattern_table_64;
1758 else if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 0)
1759 return pattern_table_32;
1760 else
1761 return pattern_table_16;
1762 }
1763
ddr3_tip_get_mask_results_dq_reg()1764 u16 *ddr3_tip_get_mask_results_dq_reg()
1765 {
1766 #if MAX_BUS_NUM == 5
1767 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1768
1769 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1770 return mask_results_dq_reg_map_pup3_ecc;
1771 else
1772 #endif
1773 return mask_results_dq_reg_map;
1774 }
1775
ddr3_tip_get_mask_results_pup_reg_map()1776 u16 *ddr3_tip_get_mask_results_pup_reg_map()
1777 {
1778 #if MAX_BUS_NUM == 5
1779 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1780
1781 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1782 return mask_results_pup_reg_map_pup3_ecc;
1783 else
1784 #endif
1785 return mask_results_pup_reg_map;
1786 }
1787
1788 /* load expected dm pattern to odpg */
1789 #define LOW_NIBBLE_BYTE_MASK 0xf
1790 #define HIGH_NIBBLE_BYTE_MASK 0xf0
mv_ddr_load_dm_pattern_to_odpg(enum hws_access_type access_type,enum hws_pattern pattern,enum dm_direction dm_dir)1791 int mv_ddr_load_dm_pattern_to_odpg(enum hws_access_type access_type, enum hws_pattern pattern,
1792 enum dm_direction dm_dir)
1793 {
1794 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
1795 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1796 u32 pattern_len = 0;
1797 u32 data_low, data_high;
1798 u8 dm_data;
1799
1800 for (pattern_len = 0;
1801 pattern_len < pattern_table[pattern].pattern_len;
1802 pattern_len++) {
1803 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) {
1804 data_low = pattern_table_get_word(0, pattern, (u8)pattern_len);
1805 data_high = data_low;
1806 } else {
1807 data_low = pattern_table_get_word(0, pattern, (u8)(pattern_len * 2));
1808 data_high = pattern_table_get_word(0, pattern, (u8)(pattern_len * 2 + 1));
1809 }
1810
1811 /* odpg mbus dm definition is opposite to ddr4 protocol */
1812 if (dm_dir == DM_DIR_INVERSE)
1813 dm_data = ~((data_low & LOW_NIBBLE_BYTE_MASK) | (data_high & HIGH_NIBBLE_BYTE_MASK));
1814 else
1815 dm_data = (data_low & LOW_NIBBLE_BYTE_MASK) | (data_high & HIGH_NIBBLE_BYTE_MASK);
1816
1817 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_DATA_LOW_REG, data_low, MASK_ALL_BITS);
1818 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_DATA_HIGH_REG, data_high, MASK_ALL_BITS);
1819 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_ADDR_REG,
1820 pattern_len | ((dm_data & ODPG_DATA_WR_DATA_MASK) << ODPG_DATA_WR_DATA_OFFS),
1821 MASK_ALL_BITS);
1822 }
1823
1824 return MV_OK;
1825 }
1826