blob: 16d09c12645f85f2937772b32ea40f1c8afe7a43 [file] [log] [blame]
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001/*
2 * Copyright Altera Corporation (C) 2012-2015
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <common.h>
8#include <asm/io.h>
9#include <asm/arch/sdram.h>
Marek Vasut6ca5b962015-07-18 02:46:56 +020010#include <errno.h>
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050011#include "sequencer.h"
12#include "sequencer_auto.h"
13#include "sequencer_auto_ac_init.h"
14#include "sequencer_auto_inst_init.h"
15#include "sequencer_defines.h"
16
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050017static struct socfpga_sdr_rw_load_manager *sdr_rw_load_mgr_regs =
Marek Vasut0dcb9e82015-07-12 18:46:52 +020018 (struct socfpga_sdr_rw_load_manager *)(SDR_PHYGRP_RWMGRGRP_ADDRESS | 0x800);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050019
20static struct socfpga_sdr_rw_load_jump_manager *sdr_rw_load_jump_mgr_regs =
Marek Vasut0dcb9e82015-07-12 18:46:52 +020021 (struct socfpga_sdr_rw_load_jump_manager *)(SDR_PHYGRP_RWMGRGRP_ADDRESS | 0xC00);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050022
23static struct socfpga_sdr_reg_file *sdr_reg_file =
Marek Vasut341ceec2015-07-12 18:31:05 +020024 (struct socfpga_sdr_reg_file *)SDR_PHYGRP_REGFILEGRP_ADDRESS;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050025
26static struct socfpga_sdr_scc_mgr *sdr_scc_mgr =
Marek Vasut81df0a22015-07-12 18:42:34 +020027 (struct socfpga_sdr_scc_mgr *)(SDR_PHYGRP_SCCGRP_ADDRESS | 0xe00);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050028
29static struct socfpga_phy_mgr_cmd *phy_mgr_cmd =
Marek Vasutc3b9b0f2015-07-12 18:54:37 +020030 (struct socfpga_phy_mgr_cmd *)SDR_PHYGRP_PHYMGRGRP_ADDRESS;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050031
32static struct socfpga_phy_mgr_cfg *phy_mgr_cfg =
Marek Vasutc3b9b0f2015-07-12 18:54:37 +020033 (struct socfpga_phy_mgr_cfg *)(SDR_PHYGRP_PHYMGRGRP_ADDRESS | 0x40);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050034
35static struct socfpga_data_mgr *data_mgr =
Marek Vasuta3340102015-07-12 19:03:33 +020036 (struct socfpga_data_mgr *)SDR_PHYGRP_DATAMGRGRP_ADDRESS;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050037
Marek Vasutcd5d38e2015-07-12 20:49:39 +020038static struct socfpga_sdr_ctrl *sdr_ctrl =
39 (struct socfpga_sdr_ctrl *)SDR_CTRLGRP_ADDRESS;
40
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050041#define DELTA_D 1
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050042
43/*
44 * In order to reduce ROM size, most of the selectable calibration steps are
45 * decided at compile time based on the user's calibration mode selection,
46 * as captured by the STATIC_CALIB_STEPS selection below.
47 *
48 * However, to support simulation-time selection of fast simulation mode, where
49 * we skip everything except the bare minimum, we need a few of the steps to
50 * be dynamic. In those cases, we either use the DYNAMIC_CALIB_STEPS for the
51 * check, which is based on the rtl-supplied value, or we dynamically compute
52 * the value to use based on the dynamically-chosen calibration mode
53 */
54
55#define DLEVEL 0
56#define STATIC_IN_RTL_SIM 0
57#define STATIC_SKIP_DELAY_LOOPS 0
58
59#define STATIC_CALIB_STEPS (STATIC_IN_RTL_SIM | CALIB_SKIP_FULL_TEST | \
60 STATIC_SKIP_DELAY_LOOPS)
61
62/* calibration steps requested by the rtl */
63uint16_t dyn_calib_steps;
64
65/*
66 * To make CALIB_SKIP_DELAY_LOOPS a dynamic conditional option
67 * instead of static, we use boolean logic to select between
68 * non-skip and skip values
69 *
70 * The mask is set to include all bits when not-skipping, but is
71 * zero when skipping
72 */
73
74uint16_t skip_delay_mask; /* mask off bits when skipping/not-skipping */
75
76#define SKIP_DELAY_LOOP_VALUE_OR_ZERO(non_skip_value) \
77 ((non_skip_value) & skip_delay_mask)
78
79struct gbl_type *gbl;
80struct param_type *param;
81uint32_t curr_shadow_reg;
82
83static uint32_t rw_mgr_mem_calibrate_write_test(uint32_t rank_bgn,
84 uint32_t write_group, uint32_t use_dm,
85 uint32_t all_correct, uint32_t *bit_chk, uint32_t all_ranks);
86
Dinh Nguyen135cc7f2015-06-02 22:52:49 -050087static void set_failing_group_stage(uint32_t group, uint32_t stage,
88 uint32_t substage)
89{
90 /*
91 * Only set the global stage if there was not been any other
92 * failing group
93 */
94 if (gbl->error_stage == CAL_STAGE_NIL) {
95 gbl->error_substage = substage;
96 gbl->error_stage = stage;
97 gbl->error_group = group;
98 }
99}
100
Marek Vasut6eeb7472015-07-12 21:10:24 +0200101static void reg_file_set_group(u16 set_group)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500102{
Marek Vasut6eeb7472015-07-12 21:10:24 +0200103 clrsetbits_le32(&sdr_reg_file->cur_stage, 0xffff0000, set_group << 16);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500104}
105
Marek Vasut6eeb7472015-07-12 21:10:24 +0200106static void reg_file_set_stage(u8 set_stage)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500107{
Marek Vasut6eeb7472015-07-12 21:10:24 +0200108 clrsetbits_le32(&sdr_reg_file->cur_stage, 0xffff, set_stage & 0xff);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500109}
110
Marek Vasut6eeb7472015-07-12 21:10:24 +0200111static void reg_file_set_sub_stage(u8 set_sub_stage)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500112{
Marek Vasut6eeb7472015-07-12 21:10:24 +0200113 set_sub_stage &= 0xff;
114 clrsetbits_le32(&sdr_reg_file->cur_stage, 0xff00, set_sub_stage << 8);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500115}
116
Marek Vasutfe5aa452015-07-17 01:36:32 +0200117/**
118 * phy_mgr_initialize() - Initialize PHY Manager
119 *
120 * Initialize PHY Manager.
121 */
Marek Vasutacaaff72015-07-17 01:12:07 +0200122static void phy_mgr_initialize(void)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500123{
Marek Vasutfe5aa452015-07-17 01:36:32 +0200124 u32 ratio;
125
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500126 debug("%s:%d\n", __func__, __LINE__);
Marek Vasutfe5aa452015-07-17 01:36:32 +0200127 /* Calibration has control over path to memory */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500128 /*
129 * In Hard PHY this is a 2-bit control:
130 * 0: AFI Mux Select
131 * 1: DDIO Mux Select
132 */
Marek Vasutb5450962015-07-12 21:05:08 +0200133 writel(0x3, &phy_mgr_cfg->mux_sel);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500134
135 /* USER memory clock is not stable we begin initialization */
Marek Vasutb5450962015-07-12 21:05:08 +0200136 writel(0, &phy_mgr_cfg->reset_mem_stbl);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500137
138 /* USER calibration status all set to zero */
Marek Vasutb5450962015-07-12 21:05:08 +0200139 writel(0, &phy_mgr_cfg->cal_status);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500140
Marek Vasutb5450962015-07-12 21:05:08 +0200141 writel(0, &phy_mgr_cfg->cal_debug_info);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500142
Marek Vasutfe5aa452015-07-17 01:36:32 +0200143 /* Init params only if we do NOT skip calibration. */
144 if ((dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL)
145 return;
146
147 ratio = RW_MGR_MEM_DQ_PER_READ_DQS /
148 RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS;
149 param->read_correct_mask_vg = (1 << ratio) - 1;
150 param->write_correct_mask_vg = (1 << ratio) - 1;
151 param->read_correct_mask = (1 << RW_MGR_MEM_DQ_PER_READ_DQS) - 1;
152 param->write_correct_mask = (1 << RW_MGR_MEM_DQ_PER_WRITE_DQS) - 1;
153 ratio = RW_MGR_MEM_DATA_WIDTH /
154 RW_MGR_MEM_DATA_MASK_WIDTH;
155 param->dm_correct_mask = (1 << ratio) - 1;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500156}
157
Marek Vasut575029d2015-07-20 08:15:57 +0200158/**
159 * set_rank_and_odt_mask() - Set Rank and ODT mask
160 * @rank: Rank mask
161 * @odt_mode: ODT mode, OFF or READ_WRITE
162 *
163 * Set Rank and ODT mask (On-Die Termination).
164 */
Marek Vasut0b5e2572015-07-20 08:03:11 +0200165static void set_rank_and_odt_mask(const u32 rank, const u32 odt_mode)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500166{
Marek Vasut0b5e2572015-07-20 08:03:11 +0200167 u32 odt_mask_0 = 0;
168 u32 odt_mask_1 = 0;
169 u32 cs_and_odt_mask;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500170
Marek Vasut0b5e2572015-07-20 08:03:11 +0200171 if (odt_mode == RW_MGR_ODT_MODE_OFF) {
172 odt_mask_0 = 0x0;
173 odt_mask_1 = 0x0;
174 } else { /* RW_MGR_ODT_MODE_READ_WRITE */
Marek Vasut92523082015-07-20 08:09:05 +0200175 switch (RW_MGR_MEM_NUMBER_OF_RANKS) {
176 case 1: /* 1 Rank */
177 /* Read: ODT = 0 ; Write: ODT = 1 */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500178 odt_mask_0 = 0x0;
179 odt_mask_1 = 0x1;
Marek Vasut92523082015-07-20 08:09:05 +0200180 break;
181 case 2: /* 2 Ranks */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500182 if (RW_MGR_MEM_NUMBER_OF_CS_PER_DIMM == 1) {
Marek Vasut575029d2015-07-20 08:15:57 +0200183 /*
184 * - Dual-Slot , Single-Rank (1 CS per DIMM)
185 * OR
186 * - RDIMM, 4 total CS (2 CS per DIMM, 2 DIMM)
187 *
188 * Since MEM_NUMBER_OF_RANKS is 2, they
189 * are both single rank with 2 CS each
190 * (special for RDIMM).
191 *
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500192 * Read: Turn on ODT on the opposite rank
193 * Write: Turn on ODT on all ranks
194 */
195 odt_mask_0 = 0x3 & ~(1 << rank);
196 odt_mask_1 = 0x3;
197 } else {
198 /*
Marek Vasut575029d2015-07-20 08:15:57 +0200199 * - Single-Slot , Dual-Rank (2 CS per DIMM)
200 *
201 * Read: Turn on ODT off on all ranks
202 * Write: Turn on ODT on active rank
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500203 */
204 odt_mask_0 = 0x0;
205 odt_mask_1 = 0x3 & (1 << rank);
206 }
Marek Vasut92523082015-07-20 08:09:05 +0200207 break;
208 case 4: /* 4 Ranks */
209 /* Read:
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500210 * ----------+-----------------------+
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500211 * | ODT |
212 * Read From +-----------------------+
213 * Rank | 3 | 2 | 1 | 0 |
214 * ----------+-----+-----+-----+-----+
215 * 0 | 0 | 1 | 0 | 0 |
216 * 1 | 1 | 0 | 0 | 0 |
217 * 2 | 0 | 0 | 0 | 1 |
218 * 3 | 0 | 0 | 1 | 0 |
219 * ----------+-----+-----+-----+-----+
220 *
221 * Write:
222 * ----------+-----------------------+
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500223 * | ODT |
224 * Write To +-----------------------+
225 * Rank | 3 | 2 | 1 | 0 |
226 * ----------+-----+-----+-----+-----+
227 * 0 | 0 | 1 | 0 | 1 |
228 * 1 | 1 | 0 | 1 | 0 |
229 * 2 | 0 | 1 | 0 | 1 |
230 * 3 | 1 | 0 | 1 | 0 |
231 * ----------+-----+-----+-----+-----+
232 */
233 switch (rank) {
234 case 0:
235 odt_mask_0 = 0x4;
236 odt_mask_1 = 0x5;
237 break;
238 case 1:
239 odt_mask_0 = 0x8;
240 odt_mask_1 = 0xA;
241 break;
242 case 2:
243 odt_mask_0 = 0x1;
244 odt_mask_1 = 0x5;
245 break;
246 case 3:
247 odt_mask_0 = 0x2;
248 odt_mask_1 = 0xA;
249 break;
250 }
Marek Vasut92523082015-07-20 08:09:05 +0200251 break;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500252 }
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500253 }
254
Marek Vasut0b5e2572015-07-20 08:03:11 +0200255 cs_and_odt_mask = (0xFF & ~(1 << rank)) |
256 ((0xFF & odt_mask_0) << 8) |
257 ((0xFF & odt_mask_1) << 16);
Marek Vasutb5450962015-07-12 21:05:08 +0200258 writel(cs_and_odt_mask, SDR_PHYGRP_RWMGRGRP_ADDRESS |
259 RW_MGR_SET_CS_AND_ODT_MASK_OFFSET);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500260}
261
Marek Vasut303a3dc2015-07-12 22:28:33 +0200262/**
263 * scc_mgr_set() - Set SCC Manager register
264 * @off: Base offset in SCC Manager space
265 * @grp: Read/Write group
266 * @val: Value to be set
267 *
268 * This function sets the SCC Manager (Scan Chain Control Manager) register.
269 */
270static void scc_mgr_set(u32 off, u32 grp, u32 val)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500271{
Marek Vasut303a3dc2015-07-12 22:28:33 +0200272 writel(val, SDR_PHYGRP_SCCGRP_ADDRESS | off | (grp << 2));
273}
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500274
Marek Vasut8957b492015-07-20 07:16:42 +0200275/**
276 * scc_mgr_initialize() - Initialize SCC Manager registers
277 *
278 * Initialize SCC Manager registers.
279 */
Marek Vasut303a3dc2015-07-12 22:28:33 +0200280static void scc_mgr_initialize(void)
281{
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500282 /*
Marek Vasut8957b492015-07-20 07:16:42 +0200283 * Clear register file for HPS. 16 (2^4) is the size of the
284 * full register file in the scc mgr:
285 * RFILE_DEPTH = 1 + log2(MEM_DQ_PER_DQS + 1 + MEM_DM_PER_DQS +
286 * MEM_IF_READ_DQS_WIDTH - 1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500287 */
Marek Vasut303a3dc2015-07-12 22:28:33 +0200288 int i;
Marek Vasut8957b492015-07-20 07:16:42 +0200289
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500290 for (i = 0; i < 16; i++) {
Marek Vasut0eacf7e2015-06-26 18:56:54 +0200291 debug_cond(DLEVEL == 1, "%s:%d: Clearing SCC RFILE index %u\n",
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500292 __func__, __LINE__, i);
Marek Vasut303a3dc2015-07-12 22:28:33 +0200293 scc_mgr_set(SCC_MGR_HHP_RFILE_OFFSET, 0, i);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500294 }
295}
296
Marek Vasut7481b692015-07-12 22:11:55 +0200297static void scc_mgr_set_dqdqs_output_phase(uint32_t write_group, uint32_t phase)
298{
Marek Vasut303a3dc2015-07-12 22:28:33 +0200299 scc_mgr_set(SCC_MGR_DQDQS_OUT_PHASE_OFFSET, write_group, phase);
Marek Vasut7481b692015-07-12 22:11:55 +0200300}
301
302static void scc_mgr_set_dqs_bus_in_delay(uint32_t read_group, uint32_t delay)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500303{
Marek Vasut303a3dc2015-07-12 22:28:33 +0200304 scc_mgr_set(SCC_MGR_DQS_IN_DELAY_OFFSET, read_group, delay);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500305}
306
Marek Vasut7481b692015-07-12 22:11:55 +0200307static void scc_mgr_set_dqs_en_phase(uint32_t read_group, uint32_t phase)
308{
Marek Vasut303a3dc2015-07-12 22:28:33 +0200309 scc_mgr_set(SCC_MGR_DQS_EN_PHASE_OFFSET, read_group, phase);
Marek Vasut7481b692015-07-12 22:11:55 +0200310}
311
312static void scc_mgr_set_dqs_en_delay(uint32_t read_group, uint32_t delay)
313{
Marek Vasut303a3dc2015-07-12 22:28:33 +0200314 scc_mgr_set(SCC_MGR_DQS_EN_DELAY_OFFSET, read_group, delay);
Marek Vasut7481b692015-07-12 22:11:55 +0200315}
316
Marek Vasut122e1f32015-07-17 06:07:13 +0200317static void scc_mgr_set_dqs_io_in_delay(uint32_t delay)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500318{
Marek Vasut303a3dc2015-07-12 22:28:33 +0200319 scc_mgr_set(SCC_MGR_IO_IN_DELAY_OFFSET, RW_MGR_MEM_DQ_PER_WRITE_DQS,
320 delay);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500321}
322
Marek Vasut7481b692015-07-12 22:11:55 +0200323static void scc_mgr_set_dq_in_delay(uint32_t dq_in_group, uint32_t delay)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500324{
Marek Vasut303a3dc2015-07-12 22:28:33 +0200325 scc_mgr_set(SCC_MGR_IO_IN_DELAY_OFFSET, dq_in_group, delay);
Marek Vasut7481b692015-07-12 22:11:55 +0200326}
327
328static void scc_mgr_set_dq_out1_delay(uint32_t dq_in_group, uint32_t delay)
329{
Marek Vasut303a3dc2015-07-12 22:28:33 +0200330 scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, dq_in_group, delay);
Marek Vasut7481b692015-07-12 22:11:55 +0200331}
332
Marek Vasut122e1f32015-07-17 06:07:13 +0200333static void scc_mgr_set_dqs_out1_delay(uint32_t delay)
Marek Vasut7481b692015-07-12 22:11:55 +0200334{
Marek Vasut303a3dc2015-07-12 22:28:33 +0200335 scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, RW_MGR_MEM_DQ_PER_WRITE_DQS,
336 delay);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500337}
338
Marek Vasut7481b692015-07-12 22:11:55 +0200339static void scc_mgr_set_dm_out1_delay(uint32_t dm, uint32_t delay)
340{
Marek Vasut303a3dc2015-07-12 22:28:33 +0200341 scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET,
342 RW_MGR_MEM_DQ_PER_WRITE_DQS + 1 + dm,
343 delay);
Marek Vasut7481b692015-07-12 22:11:55 +0200344}
345
346/* load up dqs config settings */
347static void scc_mgr_load_dqs(uint32_t dqs)
348{
349 writel(dqs, &sdr_scc_mgr->dqs_ena);
350}
351
352/* load up dqs io config settings */
353static void scc_mgr_load_dqs_io(void)
354{
355 writel(0, &sdr_scc_mgr->dqs_io_ena);
356}
357
358/* load up dq config settings */
359static void scc_mgr_load_dq(uint32_t dq_in_group)
360{
361 writel(dq_in_group, &sdr_scc_mgr->dq_ena);
362}
363
364/* load up dm config settings */
365static void scc_mgr_load_dm(uint32_t dm)
366{
367 writel(dm, &sdr_scc_mgr->dm_ena);
368}
369
Marek Vasut1d3cde32015-07-12 23:25:21 +0200370/**
371 * scc_mgr_set_all_ranks() - Set SCC Manager register for all ranks
372 * @off: Base offset in SCC Manager space
373 * @grp: Read/Write group
374 * @val: Value to be set
375 * @update: If non-zero, trigger SCC Manager update for all ranks
376 *
377 * This function sets the SCC Manager (Scan Chain Control Manager) register
378 * and optionally triggers the SCC update for all ranks.
379 */
380static void scc_mgr_set_all_ranks(const u32 off, const u32 grp, const u32 val,
381 const int update)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500382{
Marek Vasut1d3cde32015-07-12 23:25:21 +0200383 u32 r;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500384
385 for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS;
386 r += NUM_RANKS_PER_SHADOW_REG) {
Marek Vasut1d3cde32015-07-12 23:25:21 +0200387 scc_mgr_set(off, grp, val);
Marek Vasut49722822015-07-12 23:14:33 +0200388
Marek Vasut1d3cde32015-07-12 23:25:21 +0200389 if (update || (r == 0)) {
390 writel(grp, &sdr_scc_mgr->dqs_ena);
Marek Vasutb5450962015-07-12 21:05:08 +0200391 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500392 }
393 }
394}
395
Marek Vasut1d3cde32015-07-12 23:25:21 +0200396static void scc_mgr_set_dqs_en_phase_all_ranks(u32 read_group, u32 phase)
397{
398 /*
399 * USER although the h/w doesn't support different phases per
400 * shadow register, for simplicity our scc manager modeling
401 * keeps different phase settings per shadow reg, and it's
402 * important for us to keep them in sync to match h/w.
403 * for efficiency, the scan chain update should occur only
404 * once to sr0.
405 */
406 scc_mgr_set_all_ranks(SCC_MGR_DQS_EN_PHASE_OFFSET,
407 read_group, phase, 0);
408}
409
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500410static void scc_mgr_set_dqdqs_output_phase_all_ranks(uint32_t write_group,
411 uint32_t phase)
412{
Marek Vasut1d3cde32015-07-12 23:25:21 +0200413 /*
414 * USER although the h/w doesn't support different phases per
415 * shadow register, for simplicity our scc manager modeling
416 * keeps different phase settings per shadow reg, and it's
417 * important for us to keep them in sync to match h/w.
418 * for efficiency, the scan chain update should occur only
419 * once to sr0.
420 */
421 scc_mgr_set_all_ranks(SCC_MGR_DQDQS_OUT_PHASE_OFFSET,
422 write_group, phase, 0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500423}
424
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500425static void scc_mgr_set_dqs_en_delay_all_ranks(uint32_t read_group,
426 uint32_t delay)
427{
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500428 /*
429 * In shadow register mode, the T11 settings are stored in
430 * registers in the core, which are updated by the DQS_ENA
431 * signals. Not issuing the SCC_MGR_UPD command allows us to
432 * save lots of rank switching overhead, by calling
433 * select_shadow_regs_for_update with update_scan_chains
434 * set to 0.
435 */
Marek Vasut1d3cde32015-07-12 23:25:21 +0200436 scc_mgr_set_all_ranks(SCC_MGR_DQS_EN_DELAY_OFFSET,
437 read_group, delay, 1);
Marek Vasutb5450962015-07-12 21:05:08 +0200438 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500439}
440
Marek Vasute62f6912015-07-12 23:39:06 +0200441/**
442 * scc_mgr_set_oct_out1_delay() - Set OCT output delay
443 * @write_group: Write group
444 * @delay: Delay value
445 *
446 * This function sets the OCT output delay in SCC manager.
447 */
448static void scc_mgr_set_oct_out1_delay(const u32 write_group, const u32 delay)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500449{
Marek Vasute62f6912015-07-12 23:39:06 +0200450 const int ratio = RW_MGR_MEM_IF_READ_DQS_WIDTH /
451 RW_MGR_MEM_IF_WRITE_DQS_WIDTH;
452 const int base = write_group * ratio;
453 int i;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500454 /*
455 * Load the setting in the SCC manager
456 * Although OCT affects only write data, the OCT delay is controlled
457 * by the DQS logic block which is instantiated once per read group.
458 * For protocols where a write group consists of multiple read groups,
459 * the setting must be set multiple times.
460 */
Marek Vasute62f6912015-07-12 23:39:06 +0200461 for (i = 0; i < ratio; i++)
462 scc_mgr_set(SCC_MGR_OCT_OUT1_DELAY_OFFSET, base + i, delay);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500463}
464
Marek Vasut3b8e5b02015-07-19 01:32:55 +0200465/**
466 * scc_mgr_set_hhp_extras() - Set HHP extras.
467 *
468 * Load the fixed setting in the SCC manager HHP extras.
469 */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500470static void scc_mgr_set_hhp_extras(void)
471{
472 /*
473 * Load the fixed setting in the SCC manager
Marek Vasut3b8e5b02015-07-19 01:32:55 +0200474 * bits: 0:0 = 1'b1 - DQS bypass
475 * bits: 1:1 = 1'b1 - DQ bypass
476 * bits: 4:2 = 3'b001 - rfifo_mode
477 * bits: 6:5 = 2'b01 - rfifo clock_select
478 * bits: 7:7 = 1'b0 - separate gating from ungating setting
479 * bits: 8:8 = 1'b0 - separate OE from Output delay setting
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500480 */
Marek Vasut3b8e5b02015-07-19 01:32:55 +0200481 const u32 value = (0 << 8) | (0 << 7) | (1 << 5) |
482 (1 << 2) | (1 << 1) | (1 << 0);
483 const u32 addr = SDR_PHYGRP_SCCGRP_ADDRESS |
484 SCC_MGR_HHP_GLOBALS_OFFSET |
485 SCC_MGR_HHP_EXTRAS_OFFSET;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500486
Marek Vasut3b8e5b02015-07-19 01:32:55 +0200487 debug_cond(DLEVEL == 1, "%s:%d Setting HHP Extras\n",
488 __func__, __LINE__);
489 writel(value, addr);
490 debug_cond(DLEVEL == 1, "%s:%d Done Setting HHP Extras\n",
491 __func__, __LINE__);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500492}
493
Marek Vasut08bcb982015-07-20 04:41:53 +0200494/**
495 * scc_mgr_zero_all() - Zero all DQS config
496 *
497 * Zero all DQS config.
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500498 */
499static void scc_mgr_zero_all(void)
500{
Marek Vasut08bcb982015-07-20 04:41:53 +0200501 int i, r;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500502
503 /*
504 * USER Zero all DQS config settings, across all groups and all
505 * shadow registers
506 */
Marek Vasut08bcb982015-07-20 04:41:53 +0200507 for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS;
508 r += NUM_RANKS_PER_SHADOW_REG) {
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500509 for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) {
510 /*
511 * The phases actually don't exist on a per-rank basis,
512 * but there's no harm updating them several times, so
513 * let's keep the code simple.
514 */
515 scc_mgr_set_dqs_bus_in_delay(i, IO_DQS_IN_RESERVE);
516 scc_mgr_set_dqs_en_phase(i, 0);
517 scc_mgr_set_dqs_en_delay(i, 0);
518 }
519
520 for (i = 0; i < RW_MGR_MEM_IF_WRITE_DQS_WIDTH; i++) {
521 scc_mgr_set_dqdqs_output_phase(i, 0);
Marek Vasut08bcb982015-07-20 04:41:53 +0200522 /* Arria V/Cyclone V don't have out2. */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500523 scc_mgr_set_oct_out1_delay(i, IO_DQS_OUT_RESERVE);
524 }
525 }
526
Marek Vasut08bcb982015-07-20 04:41:53 +0200527 /* Multicast to all DQS group enables. */
Marek Vasutb5450962015-07-12 21:05:08 +0200528 writel(0xff, &sdr_scc_mgr->dqs_ena);
529 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500530}
531
Marek Vasut0341de42015-07-17 02:06:20 +0200532/**
533 * scc_set_bypass_mode() - Set bypass mode and trigger SCC update
534 * @write_group: Write group
535 *
536 * Set bypass mode and trigger SCC update.
537 */
538static void scc_set_bypass_mode(const u32 write_group)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500539{
Marek Vasut0341de42015-07-17 02:06:20 +0200540 /* Multicast to all DQ enables. */
Marek Vasutb5450962015-07-12 21:05:08 +0200541 writel(0xff, &sdr_scc_mgr->dq_ena);
542 writel(0xff, &sdr_scc_mgr->dm_ena);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500543
Marek Vasut0341de42015-07-17 02:06:20 +0200544 /* Update current DQS IO enable. */
Marek Vasutb5450962015-07-12 21:05:08 +0200545 writel(0, &sdr_scc_mgr->dqs_io_ena);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500546
Marek Vasut0341de42015-07-17 02:06:20 +0200547 /* Update the DQS logic. */
Marek Vasutb5450962015-07-12 21:05:08 +0200548 writel(write_group, &sdr_scc_mgr->dqs_ena);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500549
Marek Vasut0341de42015-07-17 02:06:20 +0200550 /* Hit update. */
Marek Vasutb5450962015-07-12 21:05:08 +0200551 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500552}
553
Marek Vasut5a4379e2015-07-13 00:30:09 +0200554/**
555 * scc_mgr_load_dqs_for_write_group() - Load DQS settings for Write Group
556 * @write_group: Write group
557 *
558 * Load DQS settings for Write Group, do not trigger SCC update.
559 */
560static void scc_mgr_load_dqs_for_write_group(const u32 write_group)
Marek Vasut7481b692015-07-12 22:11:55 +0200561{
Marek Vasut5a4379e2015-07-13 00:30:09 +0200562 const int ratio = RW_MGR_MEM_IF_READ_DQS_WIDTH /
563 RW_MGR_MEM_IF_WRITE_DQS_WIDTH;
564 const int base = write_group * ratio;
565 int i;
Marek Vasut7481b692015-07-12 22:11:55 +0200566 /*
Marek Vasut5a4379e2015-07-13 00:30:09 +0200567 * Load the setting in the SCC manager
Marek Vasut7481b692015-07-12 22:11:55 +0200568 * Although OCT affects only write data, the OCT delay is controlled
569 * by the DQS logic block which is instantiated once per read group.
570 * For protocols where a write group consists of multiple read groups,
Marek Vasut5a4379e2015-07-13 00:30:09 +0200571 * the setting must be set multiple times.
Marek Vasut7481b692015-07-12 22:11:55 +0200572 */
Marek Vasut5a4379e2015-07-13 00:30:09 +0200573 for (i = 0; i < ratio; i++)
574 writel(base + i, &sdr_scc_mgr->dqs_ena);
Marek Vasut7481b692015-07-12 22:11:55 +0200575}
576
Marek Vasut62d3c692015-07-20 08:41:04 +0200577/**
578 * scc_mgr_zero_group() - Zero all configs for a group
579 *
580 * Zero DQ, DM, DQS and OCT configs for a group.
581 */
582static void scc_mgr_zero_group(const u32 write_group, const int out_only)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500583{
Marek Vasut62d3c692015-07-20 08:41:04 +0200584 int i, r;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500585
Marek Vasut62d3c692015-07-20 08:41:04 +0200586 for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS;
587 r += NUM_RANKS_PER_SHADOW_REG) {
588 /* Zero all DQ config settings. */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500589 for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) {
Marek Vasutcab80792015-07-12 22:07:33 +0200590 scc_mgr_set_dq_out1_delay(i, 0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500591 if (!out_only)
Marek Vasutcab80792015-07-12 22:07:33 +0200592 scc_mgr_set_dq_in_delay(i, 0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500593 }
594
Marek Vasut62d3c692015-07-20 08:41:04 +0200595 /* Multicast to all DQ enables. */
Marek Vasutb5450962015-07-12 21:05:08 +0200596 writel(0xff, &sdr_scc_mgr->dq_ena);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500597
Marek Vasut62d3c692015-07-20 08:41:04 +0200598 /* Zero all DM config settings. */
599 for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++)
Marek Vasutcab80792015-07-12 22:07:33 +0200600 scc_mgr_set_dm_out1_delay(i, 0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500601
Marek Vasut62d3c692015-07-20 08:41:04 +0200602 /* Multicast to all DM enables. */
Marek Vasutb5450962015-07-12 21:05:08 +0200603 writel(0xff, &sdr_scc_mgr->dm_ena);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500604
Marek Vasut62d3c692015-07-20 08:41:04 +0200605 /* Zero all DQS IO settings. */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500606 if (!out_only)
Marek Vasut122e1f32015-07-17 06:07:13 +0200607 scc_mgr_set_dqs_io_in_delay(0);
Marek Vasut62d3c692015-07-20 08:41:04 +0200608
609 /* Arria V/Cyclone V don't have out2. */
Marek Vasut122e1f32015-07-17 06:07:13 +0200610 scc_mgr_set_dqs_out1_delay(IO_DQS_OUT_RESERVE);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500611 scc_mgr_set_oct_out1_delay(write_group, IO_DQS_OUT_RESERVE);
612 scc_mgr_load_dqs_for_write_group(write_group);
613
Marek Vasut62d3c692015-07-20 08:41:04 +0200614 /* Multicast to all DQS IO enables (only 1 in total). */
Marek Vasutb5450962015-07-12 21:05:08 +0200615 writel(0, &sdr_scc_mgr->dqs_io_ena);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500616
Marek Vasut62d3c692015-07-20 08:41:04 +0200617 /* Hit update to zero everything. */
Marek Vasutb5450962015-07-12 21:05:08 +0200618 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500619 }
620}
621
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500622/*
623 * apply and load a particular input delay for the DQ pins in a group
624 * group_bgn is the index of the first dq pin (in the write group)
625 */
Marek Vasut122e1f32015-07-17 06:07:13 +0200626static void scc_mgr_apply_group_dq_in_delay(uint32_t group_bgn, uint32_t delay)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500627{
628 uint32_t i, p;
629
630 for (i = 0, p = group_bgn; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++, p++) {
Marek Vasutcab80792015-07-12 22:07:33 +0200631 scc_mgr_set_dq_in_delay(p, delay);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500632 scc_mgr_load_dq(p);
633 }
634}
635
Marek Vasutcd649502015-07-17 05:42:49 +0200636/**
637 * scc_mgr_apply_group_dq_out1_delay() - Apply and load an output delay for the DQ pins in a group
638 * @delay: Delay value
639 *
640 * Apply and load a particular output delay for the DQ pins in a group.
641 */
642static void scc_mgr_apply_group_dq_out1_delay(const u32 delay)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500643{
Marek Vasutcd649502015-07-17 05:42:49 +0200644 int i;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500645
Marek Vasutcd649502015-07-17 05:42:49 +0200646 for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) {
647 scc_mgr_set_dq_out1_delay(i, delay);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500648 scc_mgr_load_dq(i);
649 }
650}
651
652/* apply and load a particular output delay for the DM pins in a group */
Marek Vasut122e1f32015-07-17 06:07:13 +0200653static void scc_mgr_apply_group_dm_out1_delay(uint32_t delay1)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500654{
655 uint32_t i;
656
657 for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++) {
Marek Vasutcab80792015-07-12 22:07:33 +0200658 scc_mgr_set_dm_out1_delay(i, delay1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500659 scc_mgr_load_dm(i);
660 }
661}
662
663
664/* apply and load delay on both DQS and OCT out1 */
665static void scc_mgr_apply_group_dqs_io_and_oct_out1(uint32_t write_group,
666 uint32_t delay)
667{
Marek Vasut122e1f32015-07-17 06:07:13 +0200668 scc_mgr_set_dqs_out1_delay(delay);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500669 scc_mgr_load_dqs_io();
670
671 scc_mgr_set_oct_out1_delay(write_group, delay);
672 scc_mgr_load_dqs_for_write_group(write_group);
673}
674
Marek Vasut484fb3b2015-07-17 05:33:28 +0200675/**
676 * scc_mgr_apply_group_all_out_delay_add() - Apply a delay to the entire output side: DQ, DM, DQS, OCT
677 * @write_group: Write group
678 * @delay: Delay value
679 *
680 * Apply a delay to the entire output side: DQ, DM, DQS, OCT.
681 */
Marek Vasut20bfb9d2015-07-17 05:30:14 +0200682static void scc_mgr_apply_group_all_out_delay_add(const u32 write_group,
Marek Vasut20bfb9d2015-07-17 05:30:14 +0200683 const u32 delay)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500684{
Marek Vasut20bfb9d2015-07-17 05:30:14 +0200685 u32 i, new_delay;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500686
Marek Vasut20bfb9d2015-07-17 05:30:14 +0200687 /* DQ shift */
688 for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500689 scc_mgr_load_dq(i);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500690
Marek Vasut20bfb9d2015-07-17 05:30:14 +0200691 /* DM shift */
692 for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500693 scc_mgr_load_dm(i);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500694
Marek Vasut484fb3b2015-07-17 05:33:28 +0200695 /* DQS shift */
696 new_delay = READ_SCC_DQS_IO_OUT2_DELAY + delay;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500697 if (new_delay > IO_IO_OUT2_DELAY_MAX) {
Marek Vasut484fb3b2015-07-17 05:33:28 +0200698 debug_cond(DLEVEL == 1,
699 "%s:%d (%u, %u) DQS: %u > %d; adding %u to OUT1\n",
700 __func__, __LINE__, write_group, delay, new_delay,
701 IO_IO_OUT2_DELAY_MAX,
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500702 new_delay - IO_IO_OUT2_DELAY_MAX);
Marek Vasut484fb3b2015-07-17 05:33:28 +0200703 new_delay -= IO_IO_OUT2_DELAY_MAX;
704 scc_mgr_set_dqs_out1_delay(new_delay);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500705 }
706
707 scc_mgr_load_dqs_io();
708
Marek Vasut484fb3b2015-07-17 05:33:28 +0200709 /* OCT shift */
710 new_delay = READ_SCC_OCT_OUT2_DELAY + delay;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500711 if (new_delay > IO_IO_OUT2_DELAY_MAX) {
Marek Vasut484fb3b2015-07-17 05:33:28 +0200712 debug_cond(DLEVEL == 1,
713 "%s:%d (%u, %u) DQS: %u > %d; adding %u to OUT1\n",
714 __func__, __LINE__, write_group, delay,
715 new_delay, IO_IO_OUT2_DELAY_MAX,
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500716 new_delay - IO_IO_OUT2_DELAY_MAX);
Marek Vasut484fb3b2015-07-17 05:33:28 +0200717 new_delay -= IO_IO_OUT2_DELAY_MAX;
718 scc_mgr_set_oct_out1_delay(write_group, new_delay);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500719 }
720
721 scc_mgr_load_dqs_for_write_group(write_group);
722}
723
Marek Vasut788870f2015-07-19 02:18:21 +0200724/**
725 * scc_mgr_apply_group_all_out_delay_add() - Apply a delay to the entire output side to all ranks
726 * @write_group: Write group
727 * @delay: Delay value
728 *
729 * Apply a delay to the entire output side (DQ, DM, DQS, OCT) to all ranks.
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500730 */
Marek Vasut788870f2015-07-19 02:18:21 +0200731static void
732scc_mgr_apply_group_all_out_delay_add_all_ranks(const u32 write_group,
733 const u32 delay)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500734{
Marek Vasut788870f2015-07-19 02:18:21 +0200735 int r;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500736
737 for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS;
Marek Vasut788870f2015-07-19 02:18:21 +0200738 r += NUM_RANKS_PER_SHADOW_REG) {
Marek Vasut484fb3b2015-07-17 05:33:28 +0200739 scc_mgr_apply_group_all_out_delay_add(write_group, delay);
Marek Vasutb5450962015-07-12 21:05:08 +0200740 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500741 }
742}
743
Marek Vasut42e78602015-07-26 11:07:19 +0200744/**
745 * set_jump_as_return() - Return instruction optimization
746 *
747 * Optimization used to recover some slots in ddr3 inst_rom could be
748 * applied to other protocols if we wanted to
749 */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500750static void set_jump_as_return(void)
751{
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500752 /*
Marek Vasut42e78602015-07-26 11:07:19 +0200753 * To save space, we replace return with jump to special shared
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500754 * RETURN instruction so we set the counter to large value so that
Marek Vasut42e78602015-07-26 11:07:19 +0200755 * we always jump.
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500756 */
Marek Vasutb5450962015-07-12 21:05:08 +0200757 writel(0xff, &sdr_rw_load_mgr_regs->load_cntr0);
758 writel(RW_MGR_RETURN, &sdr_rw_load_jump_mgr_regs->load_jump_add0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500759}
760
761/*
762 * should always use constants as argument to ensure all computations are
763 * performed at compile time
764 */
765static void delay_for_n_mem_clocks(const uint32_t clocks)
766{
767 uint32_t afi_clocks;
768 uint8_t inner = 0;
769 uint8_t outer = 0;
770 uint16_t c_loop = 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500771
772 debug("%s:%d: clocks=%u ... start\n", __func__, __LINE__, clocks);
773
774
775 afi_clocks = (clocks + AFI_RATE_RATIO-1) / AFI_RATE_RATIO;
776 /* scale (rounding up) to get afi clocks */
777
778 /*
779 * Note, we don't bother accounting for being off a little bit
780 * because of a few extra instructions in outer loops
781 * Note, the loops have a test at the end, and do the test before
782 * the decrement, and so always perform the loop
783 * 1 time more than the counter value
784 */
785 if (afi_clocks == 0) {
786 ;
787 } else if (afi_clocks <= 0x100) {
788 inner = afi_clocks-1;
789 outer = 0;
790 c_loop = 0;
791 } else if (afi_clocks <= 0x10000) {
792 inner = 0xff;
793 outer = (afi_clocks-1) >> 8;
794 c_loop = 0;
795 } else {
796 inner = 0xff;
797 outer = 0xff;
798 c_loop = (afi_clocks-1) >> 16;
799 }
800
801 /*
802 * rom instructions are structured as follows:
803 *
804 * IDLE_LOOP2: jnz cntr0, TARGET_A
805 * IDLE_LOOP1: jnz cntr1, TARGET_B
806 * return
807 *
808 * so, when doing nested loops, TARGET_A is set to IDLE_LOOP2, and
809 * TARGET_B is set to IDLE_LOOP2 as well
810 *
811 * if we have no outer loop, though, then we can use IDLE_LOOP1 only,
812 * and set TARGET_B to IDLE_LOOP1 and we skip IDLE_LOOP2 entirely
813 *
814 * a little confusing, but it helps save precious space in the inst_rom
815 * and sequencer rom and keeps the delays more accurate and reduces
816 * overhead
817 */
818 if (afi_clocks <= 0x100) {
Marek Vasutb5450962015-07-12 21:05:08 +0200819 writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(inner),
820 &sdr_rw_load_mgr_regs->load_cntr1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500821
Marek Vasutb5450962015-07-12 21:05:08 +0200822 writel(RW_MGR_IDLE_LOOP1,
823 &sdr_rw_load_jump_mgr_regs->load_jump_add1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500824
Marek Vasutb5450962015-07-12 21:05:08 +0200825 writel(RW_MGR_IDLE_LOOP1, SDR_PHYGRP_RWMGRGRP_ADDRESS |
826 RW_MGR_RUN_SINGLE_GROUP_OFFSET);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500827 } else {
Marek Vasutb5450962015-07-12 21:05:08 +0200828 writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(inner),
829 &sdr_rw_load_mgr_regs->load_cntr0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500830
Marek Vasutb5450962015-07-12 21:05:08 +0200831 writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(outer),
832 &sdr_rw_load_mgr_regs->load_cntr1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500833
Marek Vasutb5450962015-07-12 21:05:08 +0200834 writel(RW_MGR_IDLE_LOOP2,
835 &sdr_rw_load_jump_mgr_regs->load_jump_add0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500836
Marek Vasutb5450962015-07-12 21:05:08 +0200837 writel(RW_MGR_IDLE_LOOP2,
838 &sdr_rw_load_jump_mgr_regs->load_jump_add1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500839
840 /* hack to get around compiler not being smart enough */
841 if (afi_clocks <= 0x10000) {
842 /* only need to run once */
Marek Vasutb5450962015-07-12 21:05:08 +0200843 writel(RW_MGR_IDLE_LOOP2, SDR_PHYGRP_RWMGRGRP_ADDRESS |
844 RW_MGR_RUN_SINGLE_GROUP_OFFSET);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500845 } else {
846 do {
Marek Vasutb5450962015-07-12 21:05:08 +0200847 writel(RW_MGR_IDLE_LOOP2,
848 SDR_PHYGRP_RWMGRGRP_ADDRESS |
849 RW_MGR_RUN_SINGLE_GROUP_OFFSET);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500850 } while (c_loop-- != 0);
851 }
852 }
853 debug("%s:%d clocks=%u ... end\n", __func__, __LINE__, clocks);
854}
855
Marek Vasut8bf92272015-07-13 00:44:30 +0200856/**
857 * rw_mgr_mem_init_load_regs() - Load instruction registers
858 * @cntr0: Counter 0 value
859 * @cntr1: Counter 1 value
860 * @cntr2: Counter 2 value
861 * @jump: Jump instruction value
862 *
863 * Load instruction registers.
864 */
865static void rw_mgr_mem_init_load_regs(u32 cntr0, u32 cntr1, u32 cntr2, u32 jump)
866{
867 uint32_t grpaddr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
868 RW_MGR_RUN_SINGLE_GROUP_OFFSET;
869
870 /* Load counters */
871 writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr0),
872 &sdr_rw_load_mgr_regs->load_cntr0);
873 writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr1),
874 &sdr_rw_load_mgr_regs->load_cntr1);
875 writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr2),
876 &sdr_rw_load_mgr_regs->load_cntr2);
877
878 /* Load jump address */
879 writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add0);
880 writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add1);
881 writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add2);
882
883 /* Execute count instruction */
884 writel(jump, grpaddr);
885}
886
Marek Vasutc577ab52015-07-13 00:51:05 +0200887/**
888 * rw_mgr_mem_load_user() - Load user calibration values
889 * @fin1: Final instruction 1
890 * @fin2: Final instruction 2
891 * @precharge: If 1, precharge the banks at the end
892 *
893 * Load user calibration values and optionally precharge the banks.
894 */
895static void rw_mgr_mem_load_user(const u32 fin1, const u32 fin2,
896 const int precharge)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500897{
Marek Vasutc577ab52015-07-13 00:51:05 +0200898 u32 grpaddr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
899 RW_MGR_RUN_SINGLE_GROUP_OFFSET;
900 u32 r;
901
902 for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; r++) {
903 if (param->skip_ranks[r]) {
904 /* request to skip the rank */
905 continue;
906 }
907
908 /* set rank */
909 set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_OFF);
910
911 /* precharge all banks ... */
912 if (precharge)
913 writel(RW_MGR_PRECHARGE_ALL, grpaddr);
914
915 /*
916 * USER Use Mirror-ed commands for odd ranks if address
917 * mirrorring is on
918 */
919 if ((RW_MGR_MEM_ADDRESS_MIRRORING >> r) & 0x1) {
920 set_jump_as_return();
921 writel(RW_MGR_MRS2_MIRR, grpaddr);
922 delay_for_n_mem_clocks(4);
923 set_jump_as_return();
924 writel(RW_MGR_MRS3_MIRR, grpaddr);
925 delay_for_n_mem_clocks(4);
926 set_jump_as_return();
927 writel(RW_MGR_MRS1_MIRR, grpaddr);
928 delay_for_n_mem_clocks(4);
929 set_jump_as_return();
930 writel(fin1, grpaddr);
931 } else {
932 set_jump_as_return();
933 writel(RW_MGR_MRS2, grpaddr);
934 delay_for_n_mem_clocks(4);
935 set_jump_as_return();
936 writel(RW_MGR_MRS3, grpaddr);
937 delay_for_n_mem_clocks(4);
938 set_jump_as_return();
939 writel(RW_MGR_MRS1, grpaddr);
940 set_jump_as_return();
941 writel(fin2, grpaddr);
942 }
943
944 if (precharge)
945 continue;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500946
Marek Vasutc577ab52015-07-13 00:51:05 +0200947 set_jump_as_return();
948 writel(RW_MGR_ZQCL, grpaddr);
949
950 /* tZQinit = tDLLK = 512 ck cycles */
951 delay_for_n_mem_clocks(512);
952 }
953}
954
Marek Vasut1185e222015-07-26 10:57:06 +0200955/**
956 * rw_mgr_mem_initialize() - Initialize RW Manager
957 *
958 * Initialize RW Manager.
959 */
Marek Vasutc577ab52015-07-13 00:51:05 +0200960static void rw_mgr_mem_initialize(void)
961{
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500962 debug("%s:%d\n", __func__, __LINE__);
963
964 /* The reset / cke part of initialization is broadcasted to all ranks */
Marek Vasutb5450962015-07-12 21:05:08 +0200965 writel(RW_MGR_RANK_ALL, SDR_PHYGRP_RWMGRGRP_ADDRESS |
966 RW_MGR_SET_CS_AND_ODT_MASK_OFFSET);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500967
968 /*
969 * Here's how you load register for a loop
970 * Counters are located @ 0x800
971 * Jump address are located @ 0xC00
972 * For both, registers 0 to 3 are selected using bits 3 and 2, like
973 * in 0x800, 0x804, 0x808, 0x80C and 0xC00, 0xC04, 0xC08, 0xC0C
974 * I know this ain't pretty, but Avalon bus throws away the 2 least
975 * significant bits
976 */
977
Marek Vasut1185e222015-07-26 10:57:06 +0200978 /* Start with memory RESET activated */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500979
980 /* tINIT = 200us */
981
982 /*
983 * 200us @ 266MHz (3.75 ns) ~ 54000 clock cycles
984 * If a and b are the number of iteration in 2 nested loops
985 * it takes the following number of cycles to complete the operation:
986 * number_of_cycles = ((2 + n) * a + 2) * b
987 * where n is the number of instruction in the inner loop
988 * One possible solution is n = 0 , a = 256 , b = 106 => a = FF,
989 * b = 6A
990 */
Marek Vasut8bf92272015-07-13 00:44:30 +0200991 rw_mgr_mem_init_load_regs(SEQ_TINIT_CNTR0_VAL, SEQ_TINIT_CNTR1_VAL,
992 SEQ_TINIT_CNTR2_VAL,
993 RW_MGR_INIT_RESET_0_CKE_0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500994
Marek Vasut1185e222015-07-26 10:57:06 +0200995 /* Indicate that memory is stable. */
Marek Vasutb5450962015-07-12 21:05:08 +0200996 writel(1, &phy_mgr_cfg->reset_mem_stbl);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -0500997
998 /*
999 * transition the RESET to high
1000 * Wait for 500us
1001 */
1002
1003 /*
1004 * 500us @ 266MHz (3.75 ns) ~ 134000 clock cycles
1005 * If a and b are the number of iteration in 2 nested loops
1006 * it takes the following number of cycles to complete the operation
1007 * number_of_cycles = ((2 + n) * a + 2) * b
1008 * where n is the number of instruction in the inner loop
1009 * One possible solution is n = 2 , a = 131 , b = 256 => a = 83,
1010 * b = FF
1011 */
Marek Vasut8bf92272015-07-13 00:44:30 +02001012 rw_mgr_mem_init_load_regs(SEQ_TRESET_CNTR0_VAL, SEQ_TRESET_CNTR1_VAL,
1013 SEQ_TRESET_CNTR2_VAL,
1014 RW_MGR_INIT_RESET_1_CKE_0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001015
Marek Vasut1185e222015-07-26 10:57:06 +02001016 /* Bring up clock enable. */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001017
1018 /* tXRP < 250 ck cycles */
1019 delay_for_n_mem_clocks(250);
1020
Marek Vasutc577ab52015-07-13 00:51:05 +02001021 rw_mgr_mem_load_user(RW_MGR_MRS0_DLL_RESET_MIRR, RW_MGR_MRS0_DLL_RESET,
1022 0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001023}
1024
1025/*
1026 * At the end of calibration we have to program the user settings in, and
1027 * USER hand off the memory to the user.
1028 */
1029static void rw_mgr_mem_handoff(void)
1030{
Marek Vasutc577ab52015-07-13 00:51:05 +02001031 rw_mgr_mem_load_user(RW_MGR_MRS0_USER_MIRR, RW_MGR_MRS0_USER, 1);
1032 /*
1033 * USER need to wait tMOD (12CK or 15ns) time before issuing
1034 * other commands, but we will have plenty of NIOS cycles before
1035 * actual handoff so its okay.
1036 */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001037}
1038
Marek Vasut55c4d692015-07-18 03:55:07 +02001039/**
1040 * rw_mgr_mem_calibrate_read_test_patterns() - Read back test patterns
1041 * @rank_bgn: Rank number
1042 * @group: Read/Write Group
1043 * @all_ranks: Test all ranks
1044 *
1045 * Performs a guaranteed read on the patterns we are going to use during a
1046 * read test to ensure memory works.
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001047 */
Marek Vasut55c4d692015-07-18 03:55:07 +02001048static int
1049rw_mgr_mem_calibrate_read_test_patterns(const u32 rank_bgn, const u32 group,
1050 const u32 all_ranks)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001051{
Marek Vasut55c4d692015-07-18 03:55:07 +02001052 const u32 addr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
1053 RW_MGR_RUN_SINGLE_GROUP_OFFSET;
1054 const u32 addr_offset =
1055 (group * RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS) << 2;
1056 const u32 rank_end = all_ranks ?
1057 RW_MGR_MEM_NUMBER_OF_RANKS :
1058 (rank_bgn + NUM_RANKS_PER_SHADOW_REG);
1059 const u32 shift_ratio = RW_MGR_MEM_DQ_PER_READ_DQS /
1060 RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS;
1061 const u32 correct_mask_vg = param->read_correct_mask_vg;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001062
Marek Vasut55c4d692015-07-18 03:55:07 +02001063 u32 tmp_bit_chk, base_rw_mgr, bit_chk;
1064 int vg, r;
1065 int ret = 0;
1066
1067 bit_chk = param->read_correct_mask;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001068
1069 for (r = rank_bgn; r < rank_end; r++) {
Marek Vasut55c4d692015-07-18 03:55:07 +02001070 /* Request to skip the rank */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001071 if (param->skip_ranks[r])
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001072 continue;
1073
Marek Vasut55c4d692015-07-18 03:55:07 +02001074 /* Set rank */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001075 set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
1076
1077 /* Load up a constant bursts of read commands */
Marek Vasutb5450962015-07-12 21:05:08 +02001078 writel(0x20, &sdr_rw_load_mgr_regs->load_cntr0);
1079 writel(RW_MGR_GUARANTEED_READ,
1080 &sdr_rw_load_jump_mgr_regs->load_jump_add0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001081
Marek Vasutb5450962015-07-12 21:05:08 +02001082 writel(0x20, &sdr_rw_load_mgr_regs->load_cntr1);
1083 writel(RW_MGR_GUARANTEED_READ_CONT,
1084 &sdr_rw_load_jump_mgr_regs->load_jump_add1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001085
1086 tmp_bit_chk = 0;
Marek Vasut55c4d692015-07-18 03:55:07 +02001087 for (vg = RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS - 1;
1088 vg >= 0; vg--) {
1089 /* Reset the FIFOs to get pointers to known state. */
Marek Vasutb5450962015-07-12 21:05:08 +02001090 writel(0, &phy_mgr_cmd->fifo_reset);
1091 writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS |
1092 RW_MGR_RESET_READ_DATAPATH_OFFSET);
Marek Vasut55c4d692015-07-18 03:55:07 +02001093 writel(RW_MGR_GUARANTEED_READ,
1094 addr + addr_offset + (vg << 2));
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001095
Marek Vasutb5450962015-07-12 21:05:08 +02001096 base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS);
Marek Vasut55c4d692015-07-18 03:55:07 +02001097 tmp_bit_chk <<= shift_ratio;
1098 tmp_bit_chk |= correct_mask_vg & ~base_rw_mgr;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001099 }
Marek Vasut55c4d692015-07-18 03:55:07 +02001100
1101 bit_chk &= tmp_bit_chk;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001102 }
1103
Marek Vasut33acf0f2015-07-12 20:05:54 +02001104 writel(RW_MGR_CLEAR_DQS_ENABLE, addr + (group << 2));
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001105
1106 set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
Marek Vasut55c4d692015-07-18 03:55:07 +02001107
1108 if (bit_chk != param->read_correct_mask)
1109 ret = -EIO;
1110
1111 debug_cond(DLEVEL == 1,
1112 "%s:%d test_load_patterns(%u,ALL) => (%u == %u) => %i\n",
1113 __func__, __LINE__, group, bit_chk,
1114 param->read_correct_mask, ret);
1115
1116 return ret;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001117}
1118
Marek Vasut6a752782015-07-18 03:34:22 +02001119/**
1120 * rw_mgr_mem_calibrate_read_load_patterns() - Load up the patterns for read test
1121 * @rank_bgn: Rank number
1122 * @all_ranks: Test all ranks
1123 *
1124 * Load up the patterns we are going to use during a read test.
1125 */
1126static void rw_mgr_mem_calibrate_read_load_patterns(const u32 rank_bgn,
1127 const int all_ranks)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001128{
Marek Vasut6a752782015-07-18 03:34:22 +02001129 const u32 rank_end = all_ranks ?
1130 RW_MGR_MEM_NUMBER_OF_RANKS :
1131 (rank_bgn + NUM_RANKS_PER_SHADOW_REG);
1132 u32 r;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001133
1134 debug("%s:%d\n", __func__, __LINE__);
Marek Vasut6a752782015-07-18 03:34:22 +02001135
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001136 for (r = rank_bgn; r < rank_end; r++) {
1137 if (param->skip_ranks[r])
1138 /* request to skip the rank */
1139 continue;
1140
1141 /* set rank */
1142 set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
1143
1144 /* Load up a constant bursts */
Marek Vasutb5450962015-07-12 21:05:08 +02001145 writel(0x20, &sdr_rw_load_mgr_regs->load_cntr0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001146
Marek Vasutb5450962015-07-12 21:05:08 +02001147 writel(RW_MGR_GUARANTEED_WRITE_WAIT0,
1148 &sdr_rw_load_jump_mgr_regs->load_jump_add0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001149
Marek Vasutb5450962015-07-12 21:05:08 +02001150 writel(0x20, &sdr_rw_load_mgr_regs->load_cntr1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001151
Marek Vasutb5450962015-07-12 21:05:08 +02001152 writel(RW_MGR_GUARANTEED_WRITE_WAIT1,
1153 &sdr_rw_load_jump_mgr_regs->load_jump_add1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001154
Marek Vasutb5450962015-07-12 21:05:08 +02001155 writel(0x04, &sdr_rw_load_mgr_regs->load_cntr2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001156
Marek Vasutb5450962015-07-12 21:05:08 +02001157 writel(RW_MGR_GUARANTEED_WRITE_WAIT2,
1158 &sdr_rw_load_jump_mgr_regs->load_jump_add2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001159
Marek Vasutb5450962015-07-12 21:05:08 +02001160 writel(0x04, &sdr_rw_load_mgr_regs->load_cntr3);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001161
Marek Vasutb5450962015-07-12 21:05:08 +02001162 writel(RW_MGR_GUARANTEED_WRITE_WAIT3,
1163 &sdr_rw_load_jump_mgr_regs->load_jump_add3);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001164
Marek Vasutb5450962015-07-12 21:05:08 +02001165 writel(RW_MGR_GUARANTEED_WRITE, SDR_PHYGRP_RWMGRGRP_ADDRESS |
1166 RW_MGR_RUN_SINGLE_GROUP_OFFSET);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001167 }
1168
1169 set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
1170}
1171
1172/*
1173 * try a read and see if it returns correct data back. has dummy reads
1174 * inserted into the mix used to align dqs enable. has more thorough checks
1175 * than the regular read test.
1176 */
1177static uint32_t rw_mgr_mem_calibrate_read_test(uint32_t rank_bgn, uint32_t group,
1178 uint32_t num_tries, uint32_t all_correct, uint32_t *bit_chk,
1179 uint32_t all_groups, uint32_t all_ranks)
1180{
1181 uint32_t r, vg;
1182 uint32_t correct_mask_vg;
1183 uint32_t tmp_bit_chk;
1184 uint32_t rank_end = all_ranks ? RW_MGR_MEM_NUMBER_OF_RANKS :
1185 (rank_bgn + NUM_RANKS_PER_SHADOW_REG);
1186 uint32_t addr;
1187 uint32_t base_rw_mgr;
1188
1189 *bit_chk = param->read_correct_mask;
1190 correct_mask_vg = param->read_correct_mask_vg;
1191
1192 uint32_t quick_read_mode = (((STATIC_CALIB_STEPS) &
1193 CALIB_SKIP_DELAY_SWEEPS) && ENABLE_SUPER_QUICK_CALIBRATION);
1194
1195 for (r = rank_bgn; r < rank_end; r++) {
1196 if (param->skip_ranks[r])
1197 /* request to skip the rank */
1198 continue;
1199
1200 /* set rank */
1201 set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
1202
Marek Vasutb5450962015-07-12 21:05:08 +02001203 writel(0x10, &sdr_rw_load_mgr_regs->load_cntr1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001204
Marek Vasutb5450962015-07-12 21:05:08 +02001205 writel(RW_MGR_READ_B2B_WAIT1,
1206 &sdr_rw_load_jump_mgr_regs->load_jump_add1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001207
Marek Vasutb5450962015-07-12 21:05:08 +02001208 writel(0x10, &sdr_rw_load_mgr_regs->load_cntr2);
1209 writel(RW_MGR_READ_B2B_WAIT2,
1210 &sdr_rw_load_jump_mgr_regs->load_jump_add2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001211
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001212 if (quick_read_mode)
Marek Vasutb5450962015-07-12 21:05:08 +02001213 writel(0x1, &sdr_rw_load_mgr_regs->load_cntr0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001214 /* need at least two (1+1) reads to capture failures */
1215 else if (all_groups)
Marek Vasutb5450962015-07-12 21:05:08 +02001216 writel(0x06, &sdr_rw_load_mgr_regs->load_cntr0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001217 else
Marek Vasutb5450962015-07-12 21:05:08 +02001218 writel(0x32, &sdr_rw_load_mgr_regs->load_cntr0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001219
Marek Vasutb5450962015-07-12 21:05:08 +02001220 writel(RW_MGR_READ_B2B,
1221 &sdr_rw_load_jump_mgr_regs->load_jump_add0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001222 if (all_groups)
1223 writel(RW_MGR_MEM_IF_READ_DQS_WIDTH *
1224 RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS - 1,
Marek Vasutb5450962015-07-12 21:05:08 +02001225 &sdr_rw_load_mgr_regs->load_cntr3);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001226 else
Marek Vasutb5450962015-07-12 21:05:08 +02001227 writel(0x0, &sdr_rw_load_mgr_regs->load_cntr3);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001228
Marek Vasutb5450962015-07-12 21:05:08 +02001229 writel(RW_MGR_READ_B2B,
1230 &sdr_rw_load_jump_mgr_regs->load_jump_add3);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001231
1232 tmp_bit_chk = 0;
1233 for (vg = RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS-1; ; vg--) {
1234 /* reset the fifos to get pointers to known state */
Marek Vasutb5450962015-07-12 21:05:08 +02001235 writel(0, &phy_mgr_cmd->fifo_reset);
1236 writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS |
1237 RW_MGR_RESET_READ_DATAPATH_OFFSET);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001238
1239 tmp_bit_chk = tmp_bit_chk << (RW_MGR_MEM_DQ_PER_READ_DQS
1240 / RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS);
1241
Marek Vasuta3340102015-07-12 19:03:33 +02001242 if (all_groups)
1243 addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_ALL_GROUPS_OFFSET;
1244 else
1245 addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET;
1246
Marek Vasut33acf0f2015-07-12 20:05:54 +02001247 writel(RW_MGR_READ_B2B, addr +
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001248 ((group * RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS +
1249 vg) << 2));
1250
Marek Vasutb5450962015-07-12 21:05:08 +02001251 base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001252 tmp_bit_chk = tmp_bit_chk | (correct_mask_vg & ~(base_rw_mgr));
1253
1254 if (vg == 0)
1255 break;
1256 }
1257 *bit_chk &= tmp_bit_chk;
1258 }
1259
Marek Vasuta3340102015-07-12 19:03:33 +02001260 addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET;
Marek Vasut33acf0f2015-07-12 20:05:54 +02001261 writel(RW_MGR_CLEAR_DQS_ENABLE, addr + (group << 2));
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001262
1263 if (all_correct) {
1264 set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
1265 debug_cond(DLEVEL == 2, "%s:%d read_test(%u,ALL,%u) =>\
1266 (%u == %u) => %lu", __func__, __LINE__, group,
1267 all_groups, *bit_chk, param->read_correct_mask,
1268 (long unsigned int)(*bit_chk ==
1269 param->read_correct_mask));
1270 return *bit_chk == param->read_correct_mask;
1271 } else {
1272 set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
1273 debug_cond(DLEVEL == 2, "%s:%d read_test(%u,ONE,%u) =>\
1274 (%u != %lu) => %lu\n", __func__, __LINE__,
1275 group, all_groups, *bit_chk, (long unsigned int)0,
1276 (long unsigned int)(*bit_chk != 0x00));
1277 return *bit_chk != 0x00;
1278 }
1279}
1280
1281static uint32_t rw_mgr_mem_calibrate_read_test_all_ranks(uint32_t group,
1282 uint32_t num_tries, uint32_t all_correct, uint32_t *bit_chk,
1283 uint32_t all_groups)
1284{
1285 return rw_mgr_mem_calibrate_read_test(0, group, num_tries, all_correct,
1286 bit_chk, all_groups, 1);
1287}
1288
Marek Vasut1c9f25b2015-07-19 06:25:27 +02001289/**
1290 * rw_mgr_incr_vfifo() - Increase VFIFO value
1291 * @grp: Read/Write group
Marek Vasut1c9f25b2015-07-19 06:25:27 +02001292 *
1293 * Increase VFIFO value.
1294 */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001295static void rw_mgr_incr_vfifo(const u32 grp)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001296{
Marek Vasutb5450962015-07-12 21:05:08 +02001297 writel(grp, &phy_mgr_cmd->inc_vfifo_hard_phy);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001298}
1299
Marek Vasut1c9f25b2015-07-19 06:25:27 +02001300/**
1301 * rw_mgr_decr_vfifo() - Decrease VFIFO value
1302 * @grp: Read/Write group
Marek Vasut1c9f25b2015-07-19 06:25:27 +02001303 *
1304 * Decrease VFIFO value.
1305 */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001306static void rw_mgr_decr_vfifo(const u32 grp)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001307{
Marek Vasut1c9f25b2015-07-19 06:25:27 +02001308 u32 i;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001309
Marek Vasut1c9f25b2015-07-19 06:25:27 +02001310 for (i = 0; i < VFIFO_SIZE - 1; i++)
Marek Vasut42e43ab2015-07-19 06:37:51 +02001311 rw_mgr_incr_vfifo(grp);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001312}
1313
Marek Vasut088eb212015-07-19 06:45:43 +02001314/**
1315 * find_vfifo_failing_read() - Push VFIFO to get a failing read
1316 * @grp: Read/Write group
1317 *
1318 * Push VFIFO until a failing read happens.
1319 */
1320static int find_vfifo_failing_read(const u32 grp)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001321{
Marek Vasut088eb212015-07-19 06:45:43 +02001322 u32 v, ret, bit_chk, fail_cnt = 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001323
Marek Vasut42e43ab2015-07-19 06:37:51 +02001324 for (v = 0; v < VFIFO_SIZE; v++) {
Marek Vasut088eb212015-07-19 06:45:43 +02001325 debug_cond(DLEVEL == 2, "%s:%d: vfifo %u\n",
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001326 __func__, __LINE__, v);
Marek Vasut088eb212015-07-19 06:45:43 +02001327 ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
1328 PASS_ONE_BIT, &bit_chk, 0);
1329 if (!ret) {
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001330 fail_cnt++;
1331
1332 if (fail_cnt == 2)
Marek Vasut088eb212015-07-19 06:45:43 +02001333 return v;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001334 }
1335
Marek Vasut088eb212015-07-19 06:45:43 +02001336 /* Fiddle with FIFO. */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001337 rw_mgr_incr_vfifo(grp);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001338 }
1339
Marek Vasut088eb212015-07-19 06:45:43 +02001340 /* No failing read found! Something must have gone wrong. */
1341 debug_cond(DLEVEL == 2, "%s:%d: vfifo failed\n", __func__, __LINE__);
1342 return 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001343}
1344
Marek Vasutf2b02d42015-07-19 05:26:49 +02001345/**
1346 * sdr_find_phase() - Find DQS enable phase
1347 * @working: If 1, look for working phase, if 0, look for non-working phase
1348 * @grp: Read/Write group
Marek Vasutf2b02d42015-07-19 05:26:49 +02001349 * @work: Working window position
1350 * @i: Iterator
1351 * @p: DQS Phase Iterator
Marek Vasutf2b02d42015-07-19 05:26:49 +02001352 *
1353 * Find working or non-working DQS enable phase setting.
1354 */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001355static int sdr_find_phase(int working, const u32 grp, u32 *work,
Marek Vasut6da8ae42015-07-19 05:35:40 +02001356 u32 *i, u32 *p)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001357{
Marek Vasutf2b02d42015-07-19 05:26:49 +02001358 u32 ret, bit_chk;
1359 const u32 end = VFIFO_SIZE + (working ? 0 : 1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001360
Marek Vasutf2b02d42015-07-19 05:26:49 +02001361 for (; *i < end; (*i)++) {
1362 if (working)
1363 *p = 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001364
Marek Vasutf2b02d42015-07-19 05:26:49 +02001365 for (; *p <= IO_DQS_EN_PHASE_MAX; (*p)++) {
1366 scc_mgr_set_dqs_en_phase_all_ranks(grp, *p);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001367
Marek Vasutf2b02d42015-07-19 05:26:49 +02001368 ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
1369 PASS_ONE_BIT, &bit_chk, 0);
Marek Vasutf2b02d42015-07-19 05:26:49 +02001370 if (!working)
1371 ret = !ret;
1372
1373 if (ret)
1374 return 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001375
Marek Vasutf2b02d42015-07-19 05:26:49 +02001376 *work += IO_DELAY_PER_OPA_TAP;
1377 }
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001378
Marek Vasutf2b02d42015-07-19 05:26:49 +02001379 if (*p > IO_DQS_EN_PHASE_MAX) {
1380 /* Fiddle with FIFO. */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001381 rw_mgr_incr_vfifo(grp);
Marek Vasutf2b02d42015-07-19 05:26:49 +02001382 if (!working)
1383 *p = 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001384 }
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001385 }
1386
Marek Vasutf2b02d42015-07-19 05:26:49 +02001387 return -EINVAL;
1388}
1389
Marek Vasut6394ef52015-07-19 06:04:00 +02001390/**
1391 * sdr_working_phase() - Find working DQS enable phase
1392 * @grp: Read/Write group
1393 * @work_bgn: Working window start position
Marek Vasut6394ef52015-07-19 06:04:00 +02001394 * @d: dtaps output value
1395 * @p: DQS Phase Iterator
1396 * @i: Iterator
1397 *
1398 * Find working DQS enable phase setting.
1399 */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001400static int sdr_working_phase(const u32 grp, u32 *work_bgn, u32 *d,
Marek Vasut6394ef52015-07-19 06:04:00 +02001401 u32 *p, u32 *i)
Marek Vasutf2b02d42015-07-19 05:26:49 +02001402{
Marek Vasut2700b9c2015-07-19 05:40:06 +02001403 const u32 dtaps_per_ptap = IO_DELAY_PER_OPA_TAP /
1404 IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
Marek Vasutf2b02d42015-07-19 05:26:49 +02001405 int ret;
1406
1407 *work_bgn = 0;
1408
1409 for (*d = 0; *d <= dtaps_per_ptap; (*d)++) {
1410 *i = 0;
1411 scc_mgr_set_dqs_en_delay_all_ranks(grp, *d);
Marek Vasut42e43ab2015-07-19 06:37:51 +02001412 ret = sdr_find_phase(1, grp, work_bgn, i, p);
Marek Vasutf2b02d42015-07-19 05:26:49 +02001413 if (!ret)
1414 return 0;
1415 *work_bgn += IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
1416 }
1417
Marek Vasutb148ebe2015-07-19 05:01:12 +02001418 /* Cannot find working solution */
Marek Vasutf2b02d42015-07-19 05:26:49 +02001419 debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: no vfifo/ptap/dtap\n",
1420 __func__, __LINE__);
1421 return -EINVAL;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001422}
1423
Marek Vasut6394ef52015-07-19 06:04:00 +02001424/**
1425 * sdr_backup_phase() - Find DQS enable backup phase
1426 * @grp: Read/Write group
1427 * @work_bgn: Working window start position
Marek Vasut6394ef52015-07-19 06:04:00 +02001428 * @p: DQS Phase Iterator
1429 *
1430 * Find DQS enable backup phase setting.
1431 */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001432static void sdr_backup_phase(const u32 grp, u32 *work_bgn, u32 *p)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001433{
Marek Vasut6394ef52015-07-19 06:04:00 +02001434 u32 tmp_delay, bit_chk, d;
1435 int ret;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001436
1437 /* Special case code for backing up a phase */
1438 if (*p == 0) {
1439 *p = IO_DQS_EN_PHASE_MAX;
Marek Vasut42e43ab2015-07-19 06:37:51 +02001440 rw_mgr_decr_vfifo(grp);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001441 } else {
1442 (*p)--;
1443 }
1444 tmp_delay = *work_bgn - IO_DELAY_PER_OPA_TAP;
Marek Vasut6ff1c852015-07-19 04:34:12 +02001445 scc_mgr_set_dqs_en_phase_all_ranks(grp, *p);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001446
Marek Vasut6eff8032015-07-19 05:48:30 +02001447 for (d = 0; d <= IO_DQS_EN_DELAY_MAX && tmp_delay < *work_bgn; d++) {
1448 scc_mgr_set_dqs_en_delay_all_ranks(grp, d);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001449
Marek Vasut6394ef52015-07-19 06:04:00 +02001450 ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
1451 PASS_ONE_BIT, &bit_chk, 0);
1452 if (ret) {
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001453 *work_bgn = tmp_delay;
1454 break;
1455 }
Marek Vasut6eff8032015-07-19 05:48:30 +02001456
1457 tmp_delay += IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001458 }
1459
Marek Vasut6394ef52015-07-19 06:04:00 +02001460 /* Restore VFIFO to old state before we decremented it (if needed). */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001461 (*p)++;
1462 if (*p > IO_DQS_EN_PHASE_MAX) {
1463 *p = 0;
Marek Vasut42e43ab2015-07-19 06:37:51 +02001464 rw_mgr_incr_vfifo(grp);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001465 }
1466
Marek Vasut6ff1c852015-07-19 04:34:12 +02001467 scc_mgr_set_dqs_en_delay_all_ranks(grp, 0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001468}
1469
Marek Vasut6394ef52015-07-19 06:04:00 +02001470/**
1471 * sdr_nonworking_phase() - Find non-working DQS enable phase
1472 * @grp: Read/Write group
1473 * @work_end: Working window end position
Marek Vasut6394ef52015-07-19 06:04:00 +02001474 * @p: DQS Phase Iterator
1475 * @i: Iterator
1476 *
1477 * Find non-working DQS enable phase setting.
1478 */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001479static int sdr_nonworking_phase(const u32 grp, u32 *work_end, u32 *p, u32 *i)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001480{
Marek Vasutf2b02d42015-07-19 05:26:49 +02001481 int ret;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001482
1483 (*p)++;
1484 *work_end += IO_DELAY_PER_OPA_TAP;
1485 if (*p > IO_DQS_EN_PHASE_MAX) {
Marek Vasutf2b02d42015-07-19 05:26:49 +02001486 /* Fiddle with FIFO. */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001487 *p = 0;
Marek Vasut42e43ab2015-07-19 06:37:51 +02001488 rw_mgr_incr_vfifo(grp);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001489 }
1490
Marek Vasut42e43ab2015-07-19 06:37:51 +02001491 ret = sdr_find_phase(0, grp, work_end, i, p);
Marek Vasutf2b02d42015-07-19 05:26:49 +02001492 if (ret) {
1493 /* Cannot see edge of failing read. */
1494 debug_cond(DLEVEL == 2, "%s:%d: end: failed\n",
1495 __func__, __LINE__);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001496 }
1497
Marek Vasutf2b02d42015-07-19 05:26:49 +02001498 return ret;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001499}
1500
Marek Vasutfea03c32015-07-19 04:14:32 +02001501/**
1502 * sdr_find_window_center() - Find center of the working DQS window.
1503 * @grp: Read/Write group
1504 * @work_bgn: First working settings
1505 * @work_end: Last working settings
Marek Vasutfea03c32015-07-19 04:14:32 +02001506 *
1507 * Find center of the working DQS enable window.
1508 */
1509static int sdr_find_window_center(const u32 grp, const u32 work_bgn,
Marek Vasut42e43ab2015-07-19 06:37:51 +02001510 const u32 work_end)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001511{
Marek Vasut42e43ab2015-07-19 06:37:51 +02001512 u32 bit_chk, work_mid;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001513 int tmp_delay = 0;
Marek Vasutd996e802015-07-19 02:56:59 +02001514 int i, p, d;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001515
Marek Vasutd996e802015-07-19 02:56:59 +02001516 work_mid = (work_bgn + work_end) / 2;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001517
1518 debug_cond(DLEVEL == 2, "work_bgn=%d work_end=%d work_mid=%d\n",
Marek Vasutd996e802015-07-19 02:56:59 +02001519 work_bgn, work_end, work_mid);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001520 /* Get the middle delay to be less than a VFIFO delay */
Marek Vasutea4c4bb2015-07-19 04:04:33 +02001521 tmp_delay = (IO_DQS_EN_PHASE_MAX + 1) * IO_DELAY_PER_OPA_TAP;
Marek Vasutd996e802015-07-19 02:56:59 +02001522
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001523 debug_cond(DLEVEL == 2, "vfifo ptap delay %d\n", tmp_delay);
Marek Vasutea4c4bb2015-07-19 04:04:33 +02001524 work_mid %= tmp_delay;
Marek Vasutd996e802015-07-19 02:56:59 +02001525 debug_cond(DLEVEL == 2, "new work_mid %d\n", work_mid);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001526
Marek Vasutea4c4bb2015-07-19 04:04:33 +02001527 tmp_delay = rounddown(work_mid, IO_DELAY_PER_OPA_TAP);
1528 if (tmp_delay > IO_DQS_EN_PHASE_MAX * IO_DELAY_PER_OPA_TAP)
1529 tmp_delay = IO_DQS_EN_PHASE_MAX * IO_DELAY_PER_OPA_TAP;
1530 p = tmp_delay / IO_DELAY_PER_OPA_TAP;
Marek Vasutd996e802015-07-19 02:56:59 +02001531
Marek Vasutea4c4bb2015-07-19 04:04:33 +02001532 debug_cond(DLEVEL == 2, "new p %d, tmp_delay=%d\n", p, tmp_delay);
1533
1534 d = DIV_ROUND_UP(work_mid - tmp_delay, IO_DELAY_PER_DQS_EN_DCHAIN_TAP);
1535 if (d > IO_DQS_EN_DELAY_MAX)
1536 d = IO_DQS_EN_DELAY_MAX;
1537 tmp_delay += d * IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
1538
Marek Vasutd996e802015-07-19 02:56:59 +02001539 debug_cond(DLEVEL == 2, "new d %d, tmp_delay=%d\n", d, tmp_delay);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001540
Marek Vasutea4c4bb2015-07-19 04:04:33 +02001541 scc_mgr_set_dqs_en_phase_all_ranks(grp, p);
Marek Vasutd996e802015-07-19 02:56:59 +02001542 scc_mgr_set_dqs_en_delay_all_ranks(grp, d);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001543
1544 /*
1545 * push vfifo until we can successfully calibrate. We can do this
1546 * because the largest possible margin in 1 VFIFO cycle.
1547 */
1548 for (i = 0; i < VFIFO_SIZE; i++) {
Marek Vasut42e43ab2015-07-19 06:37:51 +02001549 debug_cond(DLEVEL == 2, "find_dqs_en_phase: center\n");
Marek Vasutd996e802015-07-19 02:56:59 +02001550 if (rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001551 PASS_ONE_BIT,
Marek Vasutd996e802015-07-19 02:56:59 +02001552 &bit_chk, 0)) {
Marek Vasutfea03c32015-07-19 04:14:32 +02001553 debug_cond(DLEVEL == 2,
Marek Vasut42e43ab2015-07-19 06:37:51 +02001554 "%s:%d center: found: ptap=%u dtap=%u\n",
1555 __func__, __LINE__, p, d);
Marek Vasutfea03c32015-07-19 04:14:32 +02001556 return 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001557 }
1558
Marek Vasutfea03c32015-07-19 04:14:32 +02001559 /* Fiddle with FIFO. */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001560 rw_mgr_incr_vfifo(grp);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001561 }
1562
Marek Vasutfea03c32015-07-19 04:14:32 +02001563 debug_cond(DLEVEL == 2, "%s:%d center: failed.\n",
1564 __func__, __LINE__);
1565 return -EINVAL;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001566}
1567
1568/* find a good dqs enable to use */
Marek Vasut4896bcc2015-07-19 02:42:21 +02001569static uint32_t rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(u32 grp)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001570{
Marek Vasut42e43ab2015-07-19 06:37:51 +02001571 uint32_t d, p, i;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001572 uint32_t bit_chk;
1573 uint32_t dtaps_per_ptap;
Marek Vasutd996e802015-07-19 02:56:59 +02001574 uint32_t work_bgn, work_end;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001575 uint32_t found_passing_read, found_failing_read, initial_failing_dtap;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001576
1577 debug("%s:%d %u\n", __func__, __LINE__, grp);
1578
1579 reg_file_set_sub_stage(CAL_SUBSTAGE_VFIFO_CENTER);
1580
1581 scc_mgr_set_dqs_en_delay_all_ranks(grp, 0);
1582 scc_mgr_set_dqs_en_phase_all_ranks(grp, 0);
1583
Marek Vasut4896bcc2015-07-19 02:42:21 +02001584 /* Step 0: Determine number of delay taps for each phase tap. */
1585 dtaps_per_ptap = IO_DELAY_PER_OPA_TAP / IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001586
Marek Vasut4896bcc2015-07-19 02:42:21 +02001587 /* Step 1: First push vfifo until we get a failing read. */
Marek Vasut088eb212015-07-19 06:45:43 +02001588 find_vfifo_failing_read(grp);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001589
Marek Vasut4896bcc2015-07-19 02:42:21 +02001590 /* Step 2: Find first working phase, increment in ptaps. */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001591 work_bgn = 0;
Marek Vasut42e43ab2015-07-19 06:37:51 +02001592 if (sdr_working_phase(grp, &work_bgn, &d, &p, &i))
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001593 return 0;
1594
1595 work_end = work_bgn;
1596
1597 /*
Marek Vasut4896bcc2015-07-19 02:42:21 +02001598 * If d is 0 then the working window covers a phase tap and we can
1599 * follow the old procedure. Otherwise, we've found the beginning
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001600 * and we need to increment the dtaps until we find the end.
1601 */
1602 if (d == 0) {
Marek Vasut4896bcc2015-07-19 02:42:21 +02001603 /*
1604 * Step 3a: If we have room, back off by one and
1605 * increment in dtaps.
1606 */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001607 sdr_backup_phase(grp, &work_bgn, &p);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001608
Marek Vasut4896bcc2015-07-19 02:42:21 +02001609 /*
1610 * Step 4a: go forward from working phase to non working
1611 * phase, increment in ptaps.
1612 */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001613 if (sdr_nonworking_phase(grp, &work_end, &p, &i))
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001614 return 0;
1615
Marek Vasut4896bcc2015-07-19 02:42:21 +02001616 /* Step 5a: Back off one from last, increment in dtaps. */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001617
1618 /* Special case code for backing up a phase */
1619 if (p == 0) {
1620 p = IO_DQS_EN_PHASE_MAX;
Marek Vasut42e43ab2015-07-19 06:37:51 +02001621 rw_mgr_decr_vfifo(grp);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001622 } else {
1623 p = p - 1;
1624 }
1625
1626 work_end -= IO_DELAY_PER_OPA_TAP;
1627 scc_mgr_set_dqs_en_phase_all_ranks(grp, p);
1628
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001629 d = 0;
1630
Marek Vasut4896bcc2015-07-19 02:42:21 +02001631 debug_cond(DLEVEL == 2, "%s:%d p: ptap=%u\n",
1632 __func__, __LINE__, p);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001633 }
1634
Marek Vasut4896bcc2015-07-19 02:42:21 +02001635 /* The dtap increment to find the failing edge is done here. */
1636 for (; d <= IO_DQS_EN_DELAY_MAX;
1637 d++, work_end += IO_DELAY_PER_DQS_EN_DCHAIN_TAP) {
1638 debug_cond(DLEVEL == 2, "%s:%d end-2: dtap=%u\n",
1639 __func__, __LINE__, d);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001640
Marek Vasut4896bcc2015-07-19 02:42:21 +02001641 scc_mgr_set_dqs_en_delay_all_ranks(grp, d);
1642
1643 if (!rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
1644 PASS_ONE_BIT,
1645 &bit_chk, 0)) {
1646 break;
1647 }
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001648 }
1649
1650 /* Go back to working dtap */
1651 if (d != 0)
1652 work_end -= IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
1653
Marek Vasut4896bcc2015-07-19 02:42:21 +02001654 debug_cond(DLEVEL == 2,
1655 "%s:%d p/d: ptap=%u dtap=%u end=%u\n",
1656 __func__, __LINE__, p, d - 1, work_end);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001657
1658 if (work_end < work_bgn) {
1659 /* nil range */
Marek Vasut4896bcc2015-07-19 02:42:21 +02001660 debug_cond(DLEVEL == 2, "%s:%d end-2: failed\n",
1661 __func__, __LINE__);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001662 return 0;
1663 }
1664
Marek Vasut4896bcc2015-07-19 02:42:21 +02001665 debug_cond(DLEVEL == 2, "%s:%d found range [%u,%u]\n",
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001666 __func__, __LINE__, work_bgn, work_end);
1667
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001668 /*
Marek Vasut4896bcc2015-07-19 02:42:21 +02001669 * We need to calculate the number of dtaps that equal a ptap.
1670 * To do that we'll back up a ptap and re-find the edge of the
1671 * window using dtaps
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001672 */
Marek Vasut4896bcc2015-07-19 02:42:21 +02001673 debug_cond(DLEVEL == 2, "%s:%d calculate dtaps_per_ptap for tracking\n",
1674 __func__, __LINE__);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001675
1676 /* Special case code for backing up a phase */
1677 if (p == 0) {
1678 p = IO_DQS_EN_PHASE_MAX;
Marek Vasut42e43ab2015-07-19 06:37:51 +02001679 rw_mgr_decr_vfifo(grp);
Marek Vasut4896bcc2015-07-19 02:42:21 +02001680 debug_cond(DLEVEL == 2, "%s:%d backedup cycle/phase: p=%u\n",
1681 __func__, __LINE__, p);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001682 } else {
1683 p = p - 1;
Marek Vasut4896bcc2015-07-19 02:42:21 +02001684 debug_cond(DLEVEL == 2, "%s:%d backedup phase only: p=%u",
1685 __func__, __LINE__, p);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001686 }
1687
1688 scc_mgr_set_dqs_en_phase_all_ranks(grp, p);
1689
1690 /*
1691 * Increase dtap until we first see a passing read (in case the
Marek Vasut4896bcc2015-07-19 02:42:21 +02001692 * window is smaller than a ptap), and then a failing read to
1693 * mark the edge of the window again.
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001694 */
1695
Marek Vasut4896bcc2015-07-19 02:42:21 +02001696 /* Find a passing read. */
1697 debug_cond(DLEVEL == 2, "%s:%d find passing read\n",
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001698 __func__, __LINE__);
1699 found_passing_read = 0;
1700 found_failing_read = 0;
1701 initial_failing_dtap = d;
1702 for (; d <= IO_DQS_EN_DELAY_MAX; d++) {
Marek Vasut4896bcc2015-07-19 02:42:21 +02001703 debug_cond(DLEVEL == 2, "%s:%d testing read d=%u\n",
1704 __func__, __LINE__, d);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001705 scc_mgr_set_dqs_en_delay_all_ranks(grp, d);
1706
1707 if (rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
1708 PASS_ONE_BIT,
1709 &bit_chk, 0)) {
1710 found_passing_read = 1;
1711 break;
1712 }
1713 }
1714
1715 if (found_passing_read) {
Marek Vasut4896bcc2015-07-19 02:42:21 +02001716 /* Find a failing read. */
1717 debug_cond(DLEVEL == 2, "%s:%d find failing read\n",
1718 __func__, __LINE__);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001719 for (d = d + 1; d <= IO_DQS_EN_DELAY_MAX; d++) {
Marek Vasut4896bcc2015-07-19 02:42:21 +02001720 debug_cond(DLEVEL == 2, "%s:%d testing read d=%u\n",
1721 __func__, __LINE__, d);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001722 scc_mgr_set_dqs_en_delay_all_ranks(grp, d);
1723
1724 if (!rw_mgr_mem_calibrate_read_test_all_ranks
1725 (grp, 1, PASS_ONE_BIT, &bit_chk, 0)) {
1726 found_failing_read = 1;
1727 break;
1728 }
1729 }
1730 } else {
Marek Vasut4896bcc2015-07-19 02:42:21 +02001731 debug_cond(DLEVEL == 1,
1732 "%s:%d failed to calculate dtaps per ptap. Fall back on static value\n",
1733 __func__, __LINE__);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001734 }
1735
1736 /*
1737 * The dynamically calculated dtaps_per_ptap is only valid if we
1738 * found a passing/failing read. If we didn't, it means d hit the max
1739 * (IO_DQS_EN_DELAY_MAX). Otherwise, dtaps_per_ptap retains its
1740 * statically calculated value.
1741 */
1742 if (found_passing_read && found_failing_read)
1743 dtaps_per_ptap = d - initial_failing_dtap;
1744
Marek Vasutb5450962015-07-12 21:05:08 +02001745 writel(dtaps_per_ptap, &sdr_reg_file->dtaps_per_ptap);
Marek Vasut4896bcc2015-07-19 02:42:21 +02001746 debug_cond(DLEVEL == 2, "%s:%d dtaps_per_ptap=%u - %u = %u",
1747 __func__, __LINE__, d, initial_failing_dtap, dtaps_per_ptap);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001748
Marek Vasut4896bcc2015-07-19 02:42:21 +02001749 /* Step 6: Find the centre of the window. */
Marek Vasut42e43ab2015-07-19 06:37:51 +02001750 if (sdr_find_window_centre(grp, work_bgn, work_end))
Marek Vasut4896bcc2015-07-19 02:42:21 +02001751 return 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001752
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001753 return 1;
1754}
1755
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001756/* per-bit deskew DQ and center */
1757static uint32_t rw_mgr_mem_calibrate_vfifo_center(uint32_t rank_bgn,
1758 uint32_t write_group, uint32_t read_group, uint32_t test_bgn,
1759 uint32_t use_read_test, uint32_t update_fom)
1760{
1761 uint32_t i, p, d, min_index;
1762 /*
1763 * Store these as signed since there are comparisons with
1764 * signed numbers.
1765 */
1766 uint32_t bit_chk;
1767 uint32_t sticky_bit_chk;
1768 int32_t left_edge[RW_MGR_MEM_DQ_PER_READ_DQS];
1769 int32_t right_edge[RW_MGR_MEM_DQ_PER_READ_DQS];
1770 int32_t final_dq[RW_MGR_MEM_DQ_PER_READ_DQS];
1771 int32_t mid;
1772 int32_t orig_mid_min, mid_min;
1773 int32_t new_dqs, start_dqs, start_dqs_en, shift_dq, final_dqs,
1774 final_dqs_en;
1775 int32_t dq_margin, dqs_margin;
1776 uint32_t stop;
1777 uint32_t temp_dq_in_delay1, temp_dq_in_delay2;
1778 uint32_t addr;
1779
1780 debug("%s:%d: %u %u", __func__, __LINE__, read_group, test_bgn);
1781
Marek Vasuta3340102015-07-12 19:03:33 +02001782 addr = SDR_PHYGRP_SCCGRP_ADDRESS | SCC_MGR_DQS_IN_DELAY_OFFSET;
Marek Vasut33acf0f2015-07-12 20:05:54 +02001783 start_dqs = readl(addr + (read_group << 2));
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001784 if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS)
Marek Vasut33acf0f2015-07-12 20:05:54 +02001785 start_dqs_en = readl(addr + ((read_group << 2)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001786 - IO_DQS_EN_DELAY_OFFSET));
1787
1788 /* set the left and right edge of each bit to an illegal value */
1789 /* use (IO_IO_IN_DELAY_MAX + 1) as an illegal value */
1790 sticky_bit_chk = 0;
1791 for (i = 0; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++) {
1792 left_edge[i] = IO_IO_IN_DELAY_MAX + 1;
1793 right_edge[i] = IO_IO_IN_DELAY_MAX + 1;
1794 }
1795
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001796 /* Search for the left edge of the window for each bit */
1797 for (d = 0; d <= IO_IO_IN_DELAY_MAX; d++) {
1798 scc_mgr_apply_group_dq_in_delay(write_group, test_bgn, d);
1799
Marek Vasutb5450962015-07-12 21:05:08 +02001800 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001801
1802 /*
1803 * Stop searching when the read test doesn't pass AND when
1804 * we've seen a passing read on every bit.
1805 */
1806 if (use_read_test) {
1807 stop = !rw_mgr_mem_calibrate_read_test(rank_bgn,
1808 read_group, NUM_READ_PB_TESTS, PASS_ONE_BIT,
1809 &bit_chk, 0, 0);
1810 } else {
1811 rw_mgr_mem_calibrate_write_test(rank_bgn, write_group,
1812 0, PASS_ONE_BIT,
1813 &bit_chk, 0);
1814 bit_chk = bit_chk >> (RW_MGR_MEM_DQ_PER_READ_DQS *
1815 (read_group - (write_group *
1816 RW_MGR_MEM_IF_READ_DQS_WIDTH /
1817 RW_MGR_MEM_IF_WRITE_DQS_WIDTH)));
1818 stop = (bit_chk == 0);
1819 }
1820 sticky_bit_chk = sticky_bit_chk | bit_chk;
1821 stop = stop && (sticky_bit_chk == param->read_correct_mask);
1822 debug_cond(DLEVEL == 2, "%s:%d vfifo_center(left): dtap=%u => %u == %u \
1823 && %u", __func__, __LINE__, d,
1824 sticky_bit_chk,
1825 param->read_correct_mask, stop);
1826
1827 if (stop == 1) {
1828 break;
1829 } else {
1830 for (i = 0; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++) {
1831 if (bit_chk & 1) {
1832 /* Remember a passing test as the
1833 left_edge */
1834 left_edge[i] = d;
1835 } else {
1836 /* If a left edge has not been seen yet,
1837 then a future passing test will mark
1838 this edge as the right edge */
1839 if (left_edge[i] ==
1840 IO_IO_IN_DELAY_MAX + 1) {
1841 right_edge[i] = -(d + 1);
1842 }
1843 }
1844 bit_chk = bit_chk >> 1;
1845 }
1846 }
1847 }
1848
1849 /* Reset DQ delay chains to 0 */
Marek Vasut122e1f32015-07-17 06:07:13 +02001850 scc_mgr_apply_group_dq_in_delay(test_bgn, 0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001851 sticky_bit_chk = 0;
1852 for (i = RW_MGR_MEM_DQ_PER_READ_DQS - 1;; i--) {
1853 debug_cond(DLEVEL == 2, "%s:%d vfifo_center: left_edge[%u]: \
1854 %d right_edge[%u]: %d\n", __func__, __LINE__,
1855 i, left_edge[i], i, right_edge[i]);
1856
1857 /*
1858 * Check for cases where we haven't found the left edge,
1859 * which makes our assignment of the the right edge invalid.
1860 * Reset it to the illegal value.
1861 */
1862 if ((left_edge[i] == IO_IO_IN_DELAY_MAX + 1) && (
1863 right_edge[i] != IO_IO_IN_DELAY_MAX + 1)) {
1864 right_edge[i] = IO_IO_IN_DELAY_MAX + 1;
1865 debug_cond(DLEVEL == 2, "%s:%d vfifo_center: reset \
1866 right_edge[%u]: %d\n", __func__, __LINE__,
1867 i, right_edge[i]);
1868 }
1869
1870 /*
1871 * Reset sticky bit (except for bits where we have seen
1872 * both the left and right edge).
1873 */
1874 sticky_bit_chk = sticky_bit_chk << 1;
1875 if ((left_edge[i] != IO_IO_IN_DELAY_MAX + 1) &&
1876 (right_edge[i] != IO_IO_IN_DELAY_MAX + 1)) {
1877 sticky_bit_chk = sticky_bit_chk | 1;
1878 }
1879
1880 if (i == 0)
1881 break;
1882 }
1883
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001884 /* Search for the right edge of the window for each bit */
1885 for (d = 0; d <= IO_DQS_IN_DELAY_MAX - start_dqs; d++) {
1886 scc_mgr_set_dqs_bus_in_delay(read_group, d + start_dqs);
1887 if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) {
1888 uint32_t delay = d + start_dqs_en;
1889 if (delay > IO_DQS_EN_DELAY_MAX)
1890 delay = IO_DQS_EN_DELAY_MAX;
1891 scc_mgr_set_dqs_en_delay(read_group, delay);
1892 }
1893 scc_mgr_load_dqs(read_group);
1894
Marek Vasutb5450962015-07-12 21:05:08 +02001895 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001896
1897 /*
1898 * Stop searching when the read test doesn't pass AND when
1899 * we've seen a passing read on every bit.
1900 */
1901 if (use_read_test) {
1902 stop = !rw_mgr_mem_calibrate_read_test(rank_bgn,
1903 read_group, NUM_READ_PB_TESTS, PASS_ONE_BIT,
1904 &bit_chk, 0, 0);
1905 } else {
1906 rw_mgr_mem_calibrate_write_test(rank_bgn, write_group,
1907 0, PASS_ONE_BIT,
1908 &bit_chk, 0);
1909 bit_chk = bit_chk >> (RW_MGR_MEM_DQ_PER_READ_DQS *
1910 (read_group - (write_group *
1911 RW_MGR_MEM_IF_READ_DQS_WIDTH /
1912 RW_MGR_MEM_IF_WRITE_DQS_WIDTH)));
1913 stop = (bit_chk == 0);
1914 }
1915 sticky_bit_chk = sticky_bit_chk | bit_chk;
1916 stop = stop && (sticky_bit_chk == param->read_correct_mask);
1917
1918 debug_cond(DLEVEL == 2, "%s:%d vfifo_center(right): dtap=%u => %u == \
1919 %u && %u", __func__, __LINE__, d,
1920 sticky_bit_chk, param->read_correct_mask, stop);
1921
1922 if (stop == 1) {
1923 break;
1924 } else {
1925 for (i = 0; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++) {
1926 if (bit_chk & 1) {
1927 /* Remember a passing test as
1928 the right_edge */
1929 right_edge[i] = d;
1930 } else {
1931 if (d != 0) {
1932 /* If a right edge has not been
1933 seen yet, then a future passing
1934 test will mark this edge as the
1935 left edge */
1936 if (right_edge[i] ==
1937 IO_IO_IN_DELAY_MAX + 1) {
1938 left_edge[i] = -(d + 1);
1939 }
1940 } else {
1941 /* d = 0 failed, but it passed
1942 when testing the left edge,
1943 so it must be marginal,
1944 set it to -1 */
1945 if (right_edge[i] ==
1946 IO_IO_IN_DELAY_MAX + 1 &&
1947 left_edge[i] !=
1948 IO_IO_IN_DELAY_MAX
1949 + 1) {
1950 right_edge[i] = -1;
1951 }
1952 /* If a right edge has not been
1953 seen yet, then a future passing
1954 test will mark this edge as the
1955 left edge */
1956 else if (right_edge[i] ==
1957 IO_IO_IN_DELAY_MAX +
1958 1) {
1959 left_edge[i] = -(d + 1);
1960 }
1961 }
1962 }
1963
1964 debug_cond(DLEVEL == 2, "%s:%d vfifo_center[r,\
1965 d=%u]: ", __func__, __LINE__, d);
1966 debug_cond(DLEVEL == 2, "bit_chk_test=%d left_edge[%u]: %d ",
1967 (int)(bit_chk & 1), i, left_edge[i]);
1968 debug_cond(DLEVEL == 2, "right_edge[%u]: %d\n", i,
1969 right_edge[i]);
1970 bit_chk = bit_chk >> 1;
1971 }
1972 }
1973 }
1974
1975 /* Check that all bits have a window */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001976 for (i = 0; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++) {
1977 debug_cond(DLEVEL == 2, "%s:%d vfifo_center: left_edge[%u]: \
1978 %d right_edge[%u]: %d", __func__, __LINE__,
1979 i, left_edge[i], i, right_edge[i]);
1980 if ((left_edge[i] == IO_IO_IN_DELAY_MAX + 1) || (right_edge[i]
1981 == IO_IO_IN_DELAY_MAX + 1)) {
1982 /*
1983 * Restore delay chain settings before letting the loop
1984 * in rw_mgr_mem_calibrate_vfifo to retry different
1985 * dqs/ck relationships.
1986 */
1987 scc_mgr_set_dqs_bus_in_delay(read_group, start_dqs);
1988 if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) {
1989 scc_mgr_set_dqs_en_delay(read_group,
1990 start_dqs_en);
1991 }
1992 scc_mgr_load_dqs(read_group);
Marek Vasutb5450962015-07-12 21:05:08 +02001993 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05001994
1995 debug_cond(DLEVEL == 1, "%s:%d vfifo_center: failed to \
1996 find edge [%u]: %d %d", __func__, __LINE__,
1997 i, left_edge[i], right_edge[i]);
1998 if (use_read_test) {
1999 set_failing_group_stage(read_group *
2000 RW_MGR_MEM_DQ_PER_READ_DQS + i,
2001 CAL_STAGE_VFIFO,
2002 CAL_SUBSTAGE_VFIFO_CENTER);
2003 } else {
2004 set_failing_group_stage(read_group *
2005 RW_MGR_MEM_DQ_PER_READ_DQS + i,
2006 CAL_STAGE_VFIFO_AFTER_WRITES,
2007 CAL_SUBSTAGE_VFIFO_CENTER);
2008 }
2009 return 0;
2010 }
2011 }
2012
2013 /* Find middle of window for each DQ bit */
2014 mid_min = left_edge[0] - right_edge[0];
2015 min_index = 0;
2016 for (i = 1; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++) {
2017 mid = left_edge[i] - right_edge[i];
2018 if (mid < mid_min) {
2019 mid_min = mid;
2020 min_index = i;
2021 }
2022 }
2023
2024 /*
2025 * -mid_min/2 represents the amount that we need to move DQS.
2026 * If mid_min is odd and positive we'll need to add one to
2027 * make sure the rounding in further calculations is correct
2028 * (always bias to the right), so just add 1 for all positive values.
2029 */
2030 if (mid_min > 0)
2031 mid_min++;
2032
2033 mid_min = mid_min / 2;
2034
2035 debug_cond(DLEVEL == 1, "%s:%d vfifo_center: mid_min=%d (index=%u)\n",
2036 __func__, __LINE__, mid_min, min_index);
2037
2038 /* Determine the amount we can change DQS (which is -mid_min) */
2039 orig_mid_min = mid_min;
2040 new_dqs = start_dqs - mid_min;
2041 if (new_dqs > IO_DQS_IN_DELAY_MAX)
2042 new_dqs = IO_DQS_IN_DELAY_MAX;
2043 else if (new_dqs < 0)
2044 new_dqs = 0;
2045
2046 mid_min = start_dqs - new_dqs;
2047 debug_cond(DLEVEL == 1, "vfifo_center: new mid_min=%d new_dqs=%d\n",
2048 mid_min, new_dqs);
2049
2050 if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) {
2051 if (start_dqs_en - mid_min > IO_DQS_EN_DELAY_MAX)
2052 mid_min += start_dqs_en - mid_min - IO_DQS_EN_DELAY_MAX;
2053 else if (start_dqs_en - mid_min < 0)
2054 mid_min += start_dqs_en - mid_min;
2055 }
2056 new_dqs = start_dqs - mid_min;
2057
2058 debug_cond(DLEVEL == 1, "vfifo_center: start_dqs=%d start_dqs_en=%d \
2059 new_dqs=%d mid_min=%d\n", start_dqs,
2060 IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS ? start_dqs_en : -1,
2061 new_dqs, mid_min);
2062
2063 /* Initialize data for export structures */
2064 dqs_margin = IO_IO_IN_DELAY_MAX + 1;
2065 dq_margin = IO_IO_IN_DELAY_MAX + 1;
2066
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002067 /* add delay to bring centre of all DQ windows to the same "level" */
2068 for (i = 0, p = test_bgn; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++, p++) {
2069 /* Use values before divide by 2 to reduce round off error */
2070 shift_dq = (left_edge[i] - right_edge[i] -
2071 (left_edge[min_index] - right_edge[min_index]))/2 +
2072 (orig_mid_min - mid_min);
2073
2074 debug_cond(DLEVEL == 2, "vfifo_center: before: \
2075 shift_dq[%u]=%d\n", i, shift_dq);
2076
Marek Vasutb5450962015-07-12 21:05:08 +02002077 addr = SDR_PHYGRP_SCCGRP_ADDRESS | SCC_MGR_IO_IN_DELAY_OFFSET;
Marek Vasut33acf0f2015-07-12 20:05:54 +02002078 temp_dq_in_delay1 = readl(addr + (p << 2));
2079 temp_dq_in_delay2 = readl(addr + (i << 2));
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002080
2081 if (shift_dq + (int32_t)temp_dq_in_delay1 >
2082 (int32_t)IO_IO_IN_DELAY_MAX) {
2083 shift_dq = (int32_t)IO_IO_IN_DELAY_MAX - temp_dq_in_delay2;
2084 } else if (shift_dq + (int32_t)temp_dq_in_delay1 < 0) {
2085 shift_dq = -(int32_t)temp_dq_in_delay1;
2086 }
2087 debug_cond(DLEVEL == 2, "vfifo_center: after: \
2088 shift_dq[%u]=%d\n", i, shift_dq);
2089 final_dq[i] = temp_dq_in_delay1 + shift_dq;
Marek Vasutcab80792015-07-12 22:07:33 +02002090 scc_mgr_set_dq_in_delay(p, final_dq[i]);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002091 scc_mgr_load_dq(p);
2092
2093 debug_cond(DLEVEL == 2, "vfifo_center: margin[%u]=[%d,%d]\n", i,
2094 left_edge[i] - shift_dq + (-mid_min),
2095 right_edge[i] + shift_dq - (-mid_min));
2096 /* To determine values for export structures */
2097 if (left_edge[i] - shift_dq + (-mid_min) < dq_margin)
2098 dq_margin = left_edge[i] - shift_dq + (-mid_min);
2099
2100 if (right_edge[i] + shift_dq - (-mid_min) < dqs_margin)
2101 dqs_margin = right_edge[i] + shift_dq - (-mid_min);
2102 }
2103
2104 final_dqs = new_dqs;
2105 if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS)
2106 final_dqs_en = start_dqs_en - mid_min;
2107
2108 /* Move DQS-en */
2109 if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) {
2110 scc_mgr_set_dqs_en_delay(read_group, final_dqs_en);
2111 scc_mgr_load_dqs(read_group);
2112 }
2113
2114 /* Move DQS */
2115 scc_mgr_set_dqs_bus_in_delay(read_group, final_dqs);
2116 scc_mgr_load_dqs(read_group);
2117 debug_cond(DLEVEL == 2, "%s:%d vfifo_center: dq_margin=%d \
2118 dqs_margin=%d", __func__, __LINE__,
2119 dq_margin, dqs_margin);
2120
2121 /*
2122 * Do not remove this line as it makes sure all of our decisions
2123 * have been applied. Apply the update bit.
2124 */
Marek Vasutb5450962015-07-12 21:05:08 +02002125 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002126
2127 return (dq_margin >= 0) && (dqs_margin >= 0);
2128}
2129
Marek Vasutc27ea622015-07-17 03:16:45 +02002130/**
Marek Vasut6ca5b962015-07-18 02:46:56 +02002131 * rw_mgr_mem_calibrate_guaranteed_write() - Perform guaranteed write into the device
2132 * @rw_group: Read/Write Group
2133 * @phase: DQ/DQS phase
2134 *
2135 * Because initially no communication ca be reliably performed with the memory
2136 * device, the sequencer uses a guaranteed write mechanism to write data into
2137 * the memory device.
2138 */
2139static int rw_mgr_mem_calibrate_guaranteed_write(const u32 rw_group,
2140 const u32 phase)
2141{
Marek Vasut6ca5b962015-07-18 02:46:56 +02002142 int ret;
2143
2144 /* Set a particular DQ/DQS phase. */
2145 scc_mgr_set_dqdqs_output_phase_all_ranks(rw_group, phase);
2146
2147 debug_cond(DLEVEL == 1, "%s:%d guaranteed write: g=%u p=%u\n",
2148 __func__, __LINE__, rw_group, phase);
2149
2150 /*
2151 * Altera EMI_RM 2015.05.04 :: Figure 1-25
2152 * Load up the patterns used by read calibration using the
2153 * current DQDQS phase.
2154 */
2155 rw_mgr_mem_calibrate_read_load_patterns(0, 1);
2156
2157 if (gbl->phy_debug_mode_flags & PHY_DEBUG_DISABLE_GUARANTEED_READ)
2158 return 0;
2159
2160 /*
2161 * Altera EMI_RM 2015.05.04 :: Figure 1-26
2162 * Back-to-Back reads of the patterns used for calibration.
2163 */
Marek Vasut55c4d692015-07-18 03:55:07 +02002164 ret = rw_mgr_mem_calibrate_read_test_patterns(0, rw_group, 1);
2165 if (ret)
Marek Vasut6ca5b962015-07-18 02:46:56 +02002166 debug_cond(DLEVEL == 1,
2167 "%s:%d Guaranteed read test failed: g=%u p=%u\n",
2168 __func__, __LINE__, rw_group, phase);
Marek Vasut55c4d692015-07-18 03:55:07 +02002169 return ret;
Marek Vasut6ca5b962015-07-18 02:46:56 +02002170}
2171
2172/**
Marek Vasutfeb5e652015-07-18 02:57:32 +02002173 * rw_mgr_mem_calibrate_dqs_enable_calibration() - DQS Enable Calibration
2174 * @rw_group: Read/Write Group
2175 * @test_bgn: Rank at which the test begins
2176 *
2177 * DQS enable calibration ensures reliable capture of the DQ signal without
2178 * glitches on the DQS line.
2179 */
2180static int rw_mgr_mem_calibrate_dqs_enable_calibration(const u32 rw_group,
2181 const u32 test_bgn)
2182{
Marek Vasutfeb5e652015-07-18 02:57:32 +02002183 /*
2184 * Altera EMI_RM 2015.05.04 :: Figure 1-27
2185 * DQS and DQS Eanble Signal Relationships.
2186 */
Marek Vasut3aa19dc2015-07-18 04:28:42 +02002187
2188 /* We start at zero, so have one less dq to devide among */
2189 const u32 delay_step = IO_IO_IN_DELAY_MAX /
2190 (RW_MGR_MEM_DQ_PER_READ_DQS - 1);
2191 int found;
2192 u32 i, p, d, r;
2193
2194 debug("%s:%d (%u,%u)\n", __func__, __LINE__, rw_group, test_bgn);
2195
2196 /* Try different dq_in_delays since the DQ path is shorter than DQS. */
2197 for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS;
2198 r += NUM_RANKS_PER_SHADOW_REG) {
2199 for (i = 0, p = test_bgn, d = 0;
2200 i < RW_MGR_MEM_DQ_PER_READ_DQS;
2201 i++, p++, d += delay_step) {
2202 debug_cond(DLEVEL == 1,
2203 "%s:%d: g=%u r=%u i=%u p=%u d=%u\n",
2204 __func__, __LINE__, rw_group, r, i, p, d);
2205
2206 scc_mgr_set_dq_in_delay(p, d);
2207 scc_mgr_load_dq(p);
2208 }
2209
2210 writel(0, &sdr_scc_mgr->update);
2211 }
2212
2213 /*
2214 * Try rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase across different
2215 * dq_in_delay values
2216 */
2217 found = rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(rw_group);
2218
2219 debug_cond(DLEVEL == 1,
2220 "%s:%d: g=%u found=%u; Reseting delay chain to zero\n",
2221 __func__, __LINE__, rw_group, found);
2222
2223 for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS;
2224 r += NUM_RANKS_PER_SHADOW_REG) {
2225 scc_mgr_apply_group_dq_in_delay(test_bgn, 0);
2226 writel(0, &sdr_scc_mgr->update);
2227 }
2228
2229 if (!found)
2230 return -EINVAL;
2231
2232 return 0;
2233
Marek Vasutfeb5e652015-07-18 02:57:32 +02002234}
2235
2236/**
Marek Vasut349ea3e2015-07-18 03:10:31 +02002237 * rw_mgr_mem_calibrate_dq_dqs_centering() - Centering DQ/DQS
2238 * @rw_group: Read/Write Group
2239 * @test_bgn: Rank at which the test begins
2240 * @use_read_test: Perform a read test
2241 * @update_fom: Update FOM
2242 *
2243 * The centerin DQ/DQS stage attempts to align DQ and DQS signals on reads
2244 * within a group.
2245 */
2246static int
2247rw_mgr_mem_calibrate_dq_dqs_centering(const u32 rw_group, const u32 test_bgn,
2248 const int use_read_test,
2249 const int update_fom)
2250
2251{
2252 int ret, grp_calibrated;
2253 u32 rank_bgn, sr;
2254
2255 /*
2256 * Altera EMI_RM 2015.05.04 :: Figure 1-28
2257 * Read per-bit deskew can be done on a per shadow register basis.
2258 */
2259 grp_calibrated = 1;
2260 for (rank_bgn = 0, sr = 0;
2261 rank_bgn < RW_MGR_MEM_NUMBER_OF_RANKS;
2262 rank_bgn += NUM_RANKS_PER_SHADOW_REG, sr++) {
2263 /* Check if this set of ranks should be skipped entirely. */
2264 if (param->skip_shadow_regs[sr])
2265 continue;
2266
2267 ret = rw_mgr_mem_calibrate_vfifo_center(rank_bgn, rw_group,
2268 rw_group, test_bgn,
2269 use_read_test,
2270 update_fom);
2271 if (ret)
2272 continue;
2273
2274 grp_calibrated = 0;
2275 }
2276
2277 if (!grp_calibrated)
2278 return -EIO;
2279
2280 return 0;
2281}
2282
2283/**
Marek Vasutc27ea622015-07-17 03:16:45 +02002284 * rw_mgr_mem_calibrate_vfifo() - Calibrate the read valid prediction FIFO
2285 * @rw_group: Read/Write Group
2286 * @test_bgn: Rank at which the test begins
2287 *
2288 * Stage 1: Calibrate the read valid prediction FIFO.
2289 *
2290 * This function implements UniPHY calibration Stage 1, as explained in
2291 * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages".
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002292 *
Marek Vasutc27ea622015-07-17 03:16:45 +02002293 * - read valid prediction will consist of finding:
2294 * - DQS enable phase and DQS enable delay (DQS Enable Calibration)
2295 * - DQS input phase and DQS input delay (DQ/DQS Centering)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002296 * - we also do a per-bit deskew on the DQ lines.
2297 */
Marek Vasute42fcea2015-07-17 04:24:18 +02002298static int rw_mgr_mem_calibrate_vfifo(const u32 rw_group, const u32 test_bgn)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002299{
Marek Vasut349ea3e2015-07-18 03:10:31 +02002300 uint32_t p, d;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002301 uint32_t dtaps_per_ptap;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002302 uint32_t failed_substage;
2303
Marek Vasut6ca5b962015-07-18 02:46:56 +02002304 int ret;
2305
Marek Vasute42fcea2015-07-17 04:24:18 +02002306 debug("%s:%d: %u %u\n", __func__, __LINE__, rw_group, test_bgn);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002307
Marek Vasut912d43e2015-07-18 03:15:34 +02002308 /* Update info for sims */
2309 reg_file_set_group(rw_group);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002310 reg_file_set_stage(CAL_STAGE_VFIFO);
Marek Vasut912d43e2015-07-18 03:15:34 +02002311 reg_file_set_sub_stage(CAL_SUBSTAGE_GUARANTEED_READ);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002312
Marek Vasut912d43e2015-07-18 03:15:34 +02002313 failed_substage = CAL_SUBSTAGE_GUARANTEED_READ;
2314
2315 /* USER Determine number of delay taps for each phase tap. */
Marek Vasute5f2cf72015-07-17 03:11:06 +02002316 dtaps_per_ptap = DIV_ROUND_UP(IO_DELAY_PER_OPA_TAP,
2317 IO_DELAY_PER_DQS_EN_DCHAIN_TAP) - 1;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002318
Marek Vasutf2a4bda2015-07-17 03:50:17 +02002319 for (d = 0; d <= dtaps_per_ptap; d += 2) {
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002320 /*
2321 * In RLDRAMX we may be messing the delay of pins in
Marek Vasute42fcea2015-07-17 04:24:18 +02002322 * the same write rw_group but outside of the current read
2323 * the rw_group, but that's ok because we haven't calibrated
Marek Vasutd7f49152015-07-17 03:44:26 +02002324 * output side yet.
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002325 */
2326 if (d > 0) {
Marek Vasut788870f2015-07-19 02:18:21 +02002327 scc_mgr_apply_group_all_out_delay_add_all_ranks(
Marek Vasute42fcea2015-07-17 04:24:18 +02002328 rw_group, d);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002329 }
2330
Marek Vasutf2a4bda2015-07-17 03:50:17 +02002331 for (p = 0; p <= IO_DQDQS_OUT_PHASE_MAX; p++) {
Marek Vasut6ca5b962015-07-18 02:46:56 +02002332 /* 1) Guaranteed Write */
2333 ret = rw_mgr_mem_calibrate_guaranteed_write(rw_group, p);
2334 if (ret)
2335 break;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002336
Marek Vasutfeb5e652015-07-18 02:57:32 +02002337 /* 2) DQS Enable Calibration */
2338 ret = rw_mgr_mem_calibrate_dqs_enable_calibration(rw_group,
2339 test_bgn);
2340 if (ret) {
Marek Vasutf2a4bda2015-07-17 03:50:17 +02002341 failed_substage = CAL_SUBSTAGE_DQS_EN_PHASE;
2342 continue;
2343 }
2344
Marek Vasut349ea3e2015-07-18 03:10:31 +02002345 /* 3) Centering DQ/DQS */
Marek Vasutf2a4bda2015-07-17 03:50:17 +02002346 /*
Marek Vasut349ea3e2015-07-18 03:10:31 +02002347 * If doing read after write calibration, do not update
2348 * FOM now. Do it then.
Marek Vasutf2a4bda2015-07-17 03:50:17 +02002349 */
Marek Vasut349ea3e2015-07-18 03:10:31 +02002350 ret = rw_mgr_mem_calibrate_dq_dqs_centering(rw_group,
2351 test_bgn, 1, 0);
2352 if (ret) {
Marek Vasutf2a4bda2015-07-17 03:50:17 +02002353 failed_substage = CAL_SUBSTAGE_VFIFO_CENTER;
Marek Vasut349ea3e2015-07-18 03:10:31 +02002354 continue;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002355 }
Marek Vasutf2a4bda2015-07-17 03:50:17 +02002356
Marek Vasut349ea3e2015-07-18 03:10:31 +02002357 /* All done. */
2358 goto cal_done_ok;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002359 }
2360 }
2361
Marek Vasutf2a4bda2015-07-17 03:50:17 +02002362 /* Calibration Stage 1 failed. */
Marek Vasute42fcea2015-07-17 04:24:18 +02002363 set_failing_group_stage(rw_group, CAL_STAGE_VFIFO, failed_substage);
Marek Vasutf2a4bda2015-07-17 03:50:17 +02002364 return 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002365
Marek Vasutf2a4bda2015-07-17 03:50:17 +02002366 /* Calibration Stage 1 completed OK. */
2367cal_done_ok:
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002368 /*
2369 * Reset the delay chains back to zero if they have moved > 1
2370 * (check for > 1 because loop will increase d even when pass in
2371 * first case).
2372 */
2373 if (d > 2)
Marek Vasute42fcea2015-07-17 04:24:18 +02002374 scc_mgr_zero_group(rw_group, 1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002375
2376 return 1;
2377}
2378
2379/* VFIFO Calibration -- Read Deskew Calibration after write deskew */
2380static uint32_t rw_mgr_mem_calibrate_vfifo_end(uint32_t read_group,
2381 uint32_t test_bgn)
2382{
2383 uint32_t rank_bgn, sr;
2384 uint32_t grp_calibrated;
2385 uint32_t write_group;
2386
2387 debug("%s:%d %u %u", __func__, __LINE__, read_group, test_bgn);
2388
2389 /* update info for sims */
2390
2391 reg_file_set_stage(CAL_STAGE_VFIFO_AFTER_WRITES);
2392 reg_file_set_sub_stage(CAL_SUBSTAGE_VFIFO_CENTER);
2393
2394 write_group = read_group;
2395
2396 /* update info for sims */
2397 reg_file_set_group(read_group);
2398
2399 grp_calibrated = 1;
2400 /* Read per-bit deskew can be done on a per shadow register basis */
2401 for (rank_bgn = 0, sr = 0; rank_bgn < RW_MGR_MEM_NUMBER_OF_RANKS;
2402 rank_bgn += NUM_RANKS_PER_SHADOW_REG, ++sr) {
2403 /* Determine if this set of ranks should be skipped entirely */
2404 if (!param->skip_shadow_regs[sr]) {
2405 /* This is the last calibration round, update FOM here */
2406 if (!rw_mgr_mem_calibrate_vfifo_center(rank_bgn,
2407 write_group,
2408 read_group,
2409 test_bgn, 0,
2410 1)) {
2411 grp_calibrated = 0;
2412 }
2413 }
2414 }
2415
2416
2417 if (grp_calibrated == 0) {
2418 set_failing_group_stage(write_group,
2419 CAL_STAGE_VFIFO_AFTER_WRITES,
2420 CAL_SUBSTAGE_VFIFO_CENTER);
2421 return 0;
2422 }
2423
2424 return 1;
2425}
2426
2427/* Calibrate LFIFO to find smallest read latency */
2428static uint32_t rw_mgr_mem_calibrate_lfifo(void)
2429{
2430 uint32_t found_one;
2431 uint32_t bit_chk;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002432
2433 debug("%s:%d\n", __func__, __LINE__);
2434
2435 /* update info for sims */
2436 reg_file_set_stage(CAL_STAGE_LFIFO);
2437 reg_file_set_sub_stage(CAL_SUBSTAGE_READ_LATENCY);
2438
2439 /* Load up the patterns used by read calibration for all ranks */
2440 rw_mgr_mem_calibrate_read_load_patterns(0, 1);
2441 found_one = 0;
2442
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002443 do {
Marek Vasutb5450962015-07-12 21:05:08 +02002444 writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002445 debug_cond(DLEVEL == 2, "%s:%d lfifo: read_lat=%u",
2446 __func__, __LINE__, gbl->curr_read_lat);
2447
2448 if (!rw_mgr_mem_calibrate_read_test_all_ranks(0,
2449 NUM_READ_TESTS,
2450 PASS_ALL_BITS,
2451 &bit_chk, 1)) {
2452 break;
2453 }
2454
2455 found_one = 1;
2456 /* reduce read latency and see if things are working */
2457 /* correctly */
2458 gbl->curr_read_lat--;
2459 } while (gbl->curr_read_lat > 0);
2460
2461 /* reset the fifos to get pointers to known state */
2462
Marek Vasutb5450962015-07-12 21:05:08 +02002463 writel(0, &phy_mgr_cmd->fifo_reset);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002464
2465 if (found_one) {
2466 /* add a fudge factor to the read latency that was determined */
2467 gbl->curr_read_lat += 2;
Marek Vasutb5450962015-07-12 21:05:08 +02002468 writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002469 debug_cond(DLEVEL == 2, "%s:%d lfifo: success: using \
2470 read_lat=%u\n", __func__, __LINE__,
2471 gbl->curr_read_lat);
2472 return 1;
2473 } else {
2474 set_failing_group_stage(0xff, CAL_STAGE_LFIFO,
2475 CAL_SUBSTAGE_READ_LATENCY);
2476
2477 debug_cond(DLEVEL == 2, "%s:%d lfifo: failed at initial \
2478 read_lat=%u\n", __func__, __LINE__,
2479 gbl->curr_read_lat);
2480 return 0;
2481 }
2482}
2483
2484/*
2485 * issue write test command.
2486 * two variants are provided. one that just tests a write pattern and
2487 * another that tests datamask functionality.
2488 */
2489static void rw_mgr_mem_calibrate_write_test_issue(uint32_t group,
2490 uint32_t test_dm)
2491{
2492 uint32_t mcc_instruction;
2493 uint32_t quick_write_mode = (((STATIC_CALIB_STEPS) & CALIB_SKIP_WRITES) &&
2494 ENABLE_SUPER_QUICK_CALIBRATION);
2495 uint32_t rw_wl_nop_cycles;
2496 uint32_t addr;
2497
2498 /*
2499 * Set counter and jump addresses for the right
2500 * number of NOP cycles.
2501 * The number of supported NOP cycles can range from -1 to infinity
2502 * Three different cases are handled:
2503 *
2504 * 1. For a number of NOP cycles greater than 0, the RW Mgr looping
2505 * mechanism will be used to insert the right number of NOPs
2506 *
2507 * 2. For a number of NOP cycles equals to 0, the micro-instruction
2508 * issuing the write command will jump straight to the
2509 * micro-instruction that turns on DQS (for DDRx), or outputs write
2510 * data (for RLD), skipping
2511 * the NOP micro-instruction all together
2512 *
2513 * 3. A number of NOP cycles equal to -1 indicates that DQS must be
2514 * turned on in the same micro-instruction that issues the write
2515 * command. Then we need
2516 * to directly jump to the micro-instruction that sends out the data
2517 *
2518 * NOTE: Implementing this mechanism uses 2 RW Mgr jump-counters
2519 * (2 and 3). One jump-counter (0) is used to perform multiple
2520 * write-read operations.
2521 * one counter left to issue this command in "multiple-group" mode
2522 */
2523
2524 rw_wl_nop_cycles = gbl->rw_wl_nop_cycles;
2525
2526 if (rw_wl_nop_cycles == -1) {
2527 /*
2528 * CNTR 2 - We want to execute the special write operation that
2529 * turns on DQS right away and then skip directly to the
2530 * instruction that sends out the data. We set the counter to a
2531 * large number so that the jump is always taken.
2532 */
Marek Vasutb5450962015-07-12 21:05:08 +02002533 writel(0xFF, &sdr_rw_load_mgr_regs->load_cntr2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002534
2535 /* CNTR 3 - Not used */
2536 if (test_dm) {
2537 mcc_instruction = RW_MGR_LFSR_WR_RD_DM_BANK_0_WL_1;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002538 writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_DATA,
Marek Vasutb5450962015-07-12 21:05:08 +02002539 &sdr_rw_load_jump_mgr_regs->load_jump_add2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002540 writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_NOP,
Marek Vasutb5450962015-07-12 21:05:08 +02002541 &sdr_rw_load_jump_mgr_regs->load_jump_add3);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002542 } else {
2543 mcc_instruction = RW_MGR_LFSR_WR_RD_BANK_0_WL_1;
Marek Vasutb5450962015-07-12 21:05:08 +02002544 writel(RW_MGR_LFSR_WR_RD_BANK_0_DATA,
2545 &sdr_rw_load_jump_mgr_regs->load_jump_add2);
2546 writel(RW_MGR_LFSR_WR_RD_BANK_0_NOP,
2547 &sdr_rw_load_jump_mgr_regs->load_jump_add3);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002548 }
2549 } else if (rw_wl_nop_cycles == 0) {
2550 /*
2551 * CNTR 2 - We want to skip the NOP operation and go straight
2552 * to the DQS enable instruction. We set the counter to a large
2553 * number so that the jump is always taken.
2554 */
Marek Vasutb5450962015-07-12 21:05:08 +02002555 writel(0xFF, &sdr_rw_load_mgr_regs->load_cntr2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002556
2557 /* CNTR 3 - Not used */
2558 if (test_dm) {
2559 mcc_instruction = RW_MGR_LFSR_WR_RD_DM_BANK_0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002560 writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_DQS,
Marek Vasutb5450962015-07-12 21:05:08 +02002561 &sdr_rw_load_jump_mgr_regs->load_jump_add2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002562 } else {
2563 mcc_instruction = RW_MGR_LFSR_WR_RD_BANK_0;
Marek Vasutb5450962015-07-12 21:05:08 +02002564 writel(RW_MGR_LFSR_WR_RD_BANK_0_DQS,
2565 &sdr_rw_load_jump_mgr_regs->load_jump_add2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002566 }
2567 } else {
2568 /*
2569 * CNTR 2 - In this case we want to execute the next instruction
2570 * and NOT take the jump. So we set the counter to 0. The jump
2571 * address doesn't count.
2572 */
Marek Vasutb5450962015-07-12 21:05:08 +02002573 writel(0x0, &sdr_rw_load_mgr_regs->load_cntr2);
2574 writel(0x0, &sdr_rw_load_jump_mgr_regs->load_jump_add2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002575
2576 /*
2577 * CNTR 3 - Set the nop counter to the number of cycles we
2578 * need to loop for, minus 1.
2579 */
Marek Vasutb5450962015-07-12 21:05:08 +02002580 writel(rw_wl_nop_cycles - 1, &sdr_rw_load_mgr_regs->load_cntr3);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002581 if (test_dm) {
2582 mcc_instruction = RW_MGR_LFSR_WR_RD_DM_BANK_0;
Marek Vasutb5450962015-07-12 21:05:08 +02002583 writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_NOP,
2584 &sdr_rw_load_jump_mgr_regs->load_jump_add3);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002585 } else {
2586 mcc_instruction = RW_MGR_LFSR_WR_RD_BANK_0;
Marek Vasutb5450962015-07-12 21:05:08 +02002587 writel(RW_MGR_LFSR_WR_RD_BANK_0_NOP,
2588 &sdr_rw_load_jump_mgr_regs->load_jump_add3);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002589 }
2590 }
2591
Marek Vasutb5450962015-07-12 21:05:08 +02002592 writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS |
2593 RW_MGR_RESET_READ_DATAPATH_OFFSET);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002594
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002595 if (quick_write_mode)
Marek Vasutb5450962015-07-12 21:05:08 +02002596 writel(0x08, &sdr_rw_load_mgr_regs->load_cntr0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002597 else
Marek Vasutb5450962015-07-12 21:05:08 +02002598 writel(0x40, &sdr_rw_load_mgr_regs->load_cntr0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002599
Marek Vasutb5450962015-07-12 21:05:08 +02002600 writel(mcc_instruction, &sdr_rw_load_jump_mgr_regs->load_jump_add0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002601
2602 /*
2603 * CNTR 1 - This is used to ensure enough time elapses
2604 * for read data to come back.
2605 */
Marek Vasutb5450962015-07-12 21:05:08 +02002606 writel(0x30, &sdr_rw_load_mgr_regs->load_cntr1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002607
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002608 if (test_dm) {
Marek Vasutb5450962015-07-12 21:05:08 +02002609 writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_WAIT,
2610 &sdr_rw_load_jump_mgr_regs->load_jump_add1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002611 } else {
Marek Vasutb5450962015-07-12 21:05:08 +02002612 writel(RW_MGR_LFSR_WR_RD_BANK_0_WAIT,
2613 &sdr_rw_load_jump_mgr_regs->load_jump_add1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002614 }
2615
Marek Vasuta3340102015-07-12 19:03:33 +02002616 addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET;
Marek Vasut33acf0f2015-07-12 20:05:54 +02002617 writel(mcc_instruction, addr + (group << 2));
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002618}
2619
2620/* Test writes, can check for a single bit pass or multiple bit pass */
2621static uint32_t rw_mgr_mem_calibrate_write_test(uint32_t rank_bgn,
2622 uint32_t write_group, uint32_t use_dm, uint32_t all_correct,
2623 uint32_t *bit_chk, uint32_t all_ranks)
2624{
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002625 uint32_t r;
2626 uint32_t correct_mask_vg;
2627 uint32_t tmp_bit_chk;
2628 uint32_t vg;
2629 uint32_t rank_end = all_ranks ? RW_MGR_MEM_NUMBER_OF_RANKS :
2630 (rank_bgn + NUM_RANKS_PER_SHADOW_REG);
2631 uint32_t addr_rw_mgr;
2632 uint32_t base_rw_mgr;
2633
2634 *bit_chk = param->write_correct_mask;
2635 correct_mask_vg = param->write_correct_mask_vg;
2636
2637 for (r = rank_bgn; r < rank_end; r++) {
2638 if (param->skip_ranks[r]) {
2639 /* request to skip the rank */
2640 continue;
2641 }
2642
2643 /* set rank */
2644 set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
2645
2646 tmp_bit_chk = 0;
Marek Vasut1fa95892015-07-12 17:52:36 +02002647 addr_rw_mgr = SDR_PHYGRP_RWMGRGRP_ADDRESS;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002648 for (vg = RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS-1; ; vg--) {
2649 /* reset the fifos to get pointers to known state */
Marek Vasutb5450962015-07-12 21:05:08 +02002650 writel(0, &phy_mgr_cmd->fifo_reset);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002651
2652 tmp_bit_chk = tmp_bit_chk <<
2653 (RW_MGR_MEM_DQ_PER_WRITE_DQS /
2654 RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS);
2655 rw_mgr_mem_calibrate_write_test_issue(write_group *
2656 RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS+vg,
2657 use_dm);
2658
Marek Vasut33acf0f2015-07-12 20:05:54 +02002659 base_rw_mgr = readl(addr_rw_mgr);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002660 tmp_bit_chk = tmp_bit_chk | (correct_mask_vg & ~(base_rw_mgr));
2661 if (vg == 0)
2662 break;
2663 }
2664 *bit_chk &= tmp_bit_chk;
2665 }
2666
2667 if (all_correct) {
2668 set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
2669 debug_cond(DLEVEL == 2, "write_test(%u,%u,ALL) : %u == \
2670 %u => %lu", write_group, use_dm,
2671 *bit_chk, param->write_correct_mask,
2672 (long unsigned int)(*bit_chk ==
2673 param->write_correct_mask));
2674 return *bit_chk == param->write_correct_mask;
2675 } else {
2676 set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
2677 debug_cond(DLEVEL == 2, "write_test(%u,%u,ONE) : %u != ",
2678 write_group, use_dm, *bit_chk);
2679 debug_cond(DLEVEL == 2, "%lu" " => %lu", (long unsigned int)0,
2680 (long unsigned int)(*bit_chk != 0));
2681 return *bit_chk != 0x00;
2682 }
2683}
2684
2685/*
2686 * center all windows. do per-bit-deskew to possibly increase size of
2687 * certain windows.
2688 */
2689static uint32_t rw_mgr_mem_calibrate_writes_center(uint32_t rank_bgn,
2690 uint32_t write_group, uint32_t test_bgn)
2691{
2692 uint32_t i, p, min_index;
2693 int32_t d;
2694 /*
2695 * Store these as signed since there are comparisons with
2696 * signed numbers.
2697 */
2698 uint32_t bit_chk;
2699 uint32_t sticky_bit_chk;
2700 int32_t left_edge[RW_MGR_MEM_DQ_PER_WRITE_DQS];
2701 int32_t right_edge[RW_MGR_MEM_DQ_PER_WRITE_DQS];
2702 int32_t mid;
2703 int32_t mid_min, orig_mid_min;
2704 int32_t new_dqs, start_dqs, shift_dq;
2705 int32_t dq_margin, dqs_margin, dm_margin;
2706 uint32_t stop;
2707 uint32_t temp_dq_out1_delay;
2708 uint32_t addr;
2709
2710 debug("%s:%d %u %u", __func__, __LINE__, write_group, test_bgn);
2711
2712 dm_margin = 0;
2713
Marek Vasuta3340102015-07-12 19:03:33 +02002714 addr = SDR_PHYGRP_SCCGRP_ADDRESS | SCC_MGR_IO_OUT1_DELAY_OFFSET;
Marek Vasut33acf0f2015-07-12 20:05:54 +02002715 start_dqs = readl(addr +
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002716 (RW_MGR_MEM_DQ_PER_WRITE_DQS << 2));
2717
2718 /* per-bit deskew */
2719
2720 /*
2721 * set the left and right edge of each bit to an illegal value
2722 * use (IO_IO_OUT1_DELAY_MAX + 1) as an illegal value.
2723 */
2724 sticky_bit_chk = 0;
2725 for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) {
2726 left_edge[i] = IO_IO_OUT1_DELAY_MAX + 1;
2727 right_edge[i] = IO_IO_OUT1_DELAY_MAX + 1;
2728 }
2729
2730 /* Search for the left edge of the window for each bit */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002731 for (d = 0; d <= IO_IO_OUT1_DELAY_MAX; d++) {
Marek Vasutcd649502015-07-17 05:42:49 +02002732 scc_mgr_apply_group_dq_out1_delay(write_group, d);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002733
Marek Vasutb5450962015-07-12 21:05:08 +02002734 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002735
2736 /*
2737 * Stop searching when the read test doesn't pass AND when
2738 * we've seen a passing read on every bit.
2739 */
2740 stop = !rw_mgr_mem_calibrate_write_test(rank_bgn, write_group,
2741 0, PASS_ONE_BIT, &bit_chk, 0);
2742 sticky_bit_chk = sticky_bit_chk | bit_chk;
2743 stop = stop && (sticky_bit_chk == param->write_correct_mask);
2744 debug_cond(DLEVEL == 2, "write_center(left): dtap=%d => %u \
2745 == %u && %u [bit_chk= %u ]\n",
2746 d, sticky_bit_chk, param->write_correct_mask,
2747 stop, bit_chk);
2748
2749 if (stop == 1) {
2750 break;
2751 } else {
2752 for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) {
2753 if (bit_chk & 1) {
2754 /*
2755 * Remember a passing test as the
2756 * left_edge.
2757 */
2758 left_edge[i] = d;
2759 } else {
2760 /*
2761 * If a left edge has not been seen
2762 * yet, then a future passing test will
2763 * mark this edge as the right edge.
2764 */
2765 if (left_edge[i] ==
2766 IO_IO_OUT1_DELAY_MAX + 1) {
2767 right_edge[i] = -(d + 1);
2768 }
2769 }
2770 debug_cond(DLEVEL == 2, "write_center[l,d=%d):", d);
2771 debug_cond(DLEVEL == 2, "bit_chk_test=%d left_edge[%u]: %d",
2772 (int)(bit_chk & 1), i, left_edge[i]);
2773 debug_cond(DLEVEL == 2, "right_edge[%u]: %d\n", i,
2774 right_edge[i]);
2775 bit_chk = bit_chk >> 1;
2776 }
2777 }
2778 }
2779
2780 /* Reset DQ delay chains to 0 */
Marek Vasut122e1f32015-07-17 06:07:13 +02002781 scc_mgr_apply_group_dq_out1_delay(0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002782 sticky_bit_chk = 0;
2783 for (i = RW_MGR_MEM_DQ_PER_WRITE_DQS - 1;; i--) {
2784 debug_cond(DLEVEL == 2, "%s:%d write_center: left_edge[%u]: \
2785 %d right_edge[%u]: %d\n", __func__, __LINE__,
2786 i, left_edge[i], i, right_edge[i]);
2787
2788 /*
2789 * Check for cases where we haven't found the left edge,
2790 * which makes our assignment of the the right edge invalid.
2791 * Reset it to the illegal value.
2792 */
2793 if ((left_edge[i] == IO_IO_OUT1_DELAY_MAX + 1) &&
2794 (right_edge[i] != IO_IO_OUT1_DELAY_MAX + 1)) {
2795 right_edge[i] = IO_IO_OUT1_DELAY_MAX + 1;
2796 debug_cond(DLEVEL == 2, "%s:%d write_center: reset \
2797 right_edge[%u]: %d\n", __func__, __LINE__,
2798 i, right_edge[i]);
2799 }
2800
2801 /*
2802 * Reset sticky bit (except for bits where we have
2803 * seen the left edge).
2804 */
2805 sticky_bit_chk = sticky_bit_chk << 1;
2806 if ((left_edge[i] != IO_IO_OUT1_DELAY_MAX + 1))
2807 sticky_bit_chk = sticky_bit_chk | 1;
2808
2809 if (i == 0)
2810 break;
2811 }
2812
2813 /* Search for the right edge of the window for each bit */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002814 for (d = 0; d <= IO_IO_OUT1_DELAY_MAX - start_dqs; d++) {
2815 scc_mgr_apply_group_dqs_io_and_oct_out1(write_group,
2816 d + start_dqs);
2817
Marek Vasutb5450962015-07-12 21:05:08 +02002818 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002819
2820 /*
2821 * Stop searching when the read test doesn't pass AND when
2822 * we've seen a passing read on every bit.
2823 */
2824 stop = !rw_mgr_mem_calibrate_write_test(rank_bgn, write_group,
2825 0, PASS_ONE_BIT, &bit_chk, 0);
2826
2827 sticky_bit_chk = sticky_bit_chk | bit_chk;
2828 stop = stop && (sticky_bit_chk == param->write_correct_mask);
2829
2830 debug_cond(DLEVEL == 2, "write_center (right): dtap=%u => %u == \
2831 %u && %u\n", d, sticky_bit_chk,
2832 param->write_correct_mask, stop);
2833
2834 if (stop == 1) {
2835 if (d == 0) {
2836 for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS;
2837 i++) {
2838 /* d = 0 failed, but it passed when
2839 testing the left edge, so it must be
2840 marginal, set it to -1 */
2841 if (right_edge[i] ==
2842 IO_IO_OUT1_DELAY_MAX + 1 &&
2843 left_edge[i] !=
2844 IO_IO_OUT1_DELAY_MAX + 1) {
2845 right_edge[i] = -1;
2846 }
2847 }
2848 }
2849 break;
2850 } else {
2851 for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) {
2852 if (bit_chk & 1) {
2853 /*
2854 * Remember a passing test as
2855 * the right_edge.
2856 */
2857 right_edge[i] = d;
2858 } else {
2859 if (d != 0) {
2860 /*
2861 * If a right edge has not
2862 * been seen yet, then a future
2863 * passing test will mark this
2864 * edge as the left edge.
2865 */
2866 if (right_edge[i] ==
2867 IO_IO_OUT1_DELAY_MAX + 1)
2868 left_edge[i] = -(d + 1);
2869 } else {
2870 /*
2871 * d = 0 failed, but it passed
2872 * when testing the left edge,
2873 * so it must be marginal, set
2874 * it to -1.
2875 */
2876 if (right_edge[i] ==
2877 IO_IO_OUT1_DELAY_MAX + 1 &&
2878 left_edge[i] !=
2879 IO_IO_OUT1_DELAY_MAX + 1)
2880 right_edge[i] = -1;
2881 /*
2882 * If a right edge has not been
2883 * seen yet, then a future
2884 * passing test will mark this
2885 * edge as the left edge.
2886 */
2887 else if (right_edge[i] ==
2888 IO_IO_OUT1_DELAY_MAX +
2889 1)
2890 left_edge[i] = -(d + 1);
2891 }
2892 }
2893 debug_cond(DLEVEL == 2, "write_center[r,d=%d):", d);
2894 debug_cond(DLEVEL == 2, "bit_chk_test=%d left_edge[%u]: %d",
2895 (int)(bit_chk & 1), i, left_edge[i]);
2896 debug_cond(DLEVEL == 2, "right_edge[%u]: %d\n", i,
2897 right_edge[i]);
2898 bit_chk = bit_chk >> 1;
2899 }
2900 }
2901 }
2902
2903 /* Check that all bits have a window */
2904 for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) {
2905 debug_cond(DLEVEL == 2, "%s:%d write_center: left_edge[%u]: \
2906 %d right_edge[%u]: %d", __func__, __LINE__,
2907 i, left_edge[i], i, right_edge[i]);
2908 if ((left_edge[i] == IO_IO_OUT1_DELAY_MAX + 1) ||
2909 (right_edge[i] == IO_IO_OUT1_DELAY_MAX + 1)) {
2910 set_failing_group_stage(test_bgn + i,
2911 CAL_STAGE_WRITES,
2912 CAL_SUBSTAGE_WRITES_CENTER);
2913 return 0;
2914 }
2915 }
2916
2917 /* Find middle of window for each DQ bit */
2918 mid_min = left_edge[0] - right_edge[0];
2919 min_index = 0;
2920 for (i = 1; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) {
2921 mid = left_edge[i] - right_edge[i];
2922 if (mid < mid_min) {
2923 mid_min = mid;
2924 min_index = i;
2925 }
2926 }
2927
2928 /*
2929 * -mid_min/2 represents the amount that we need to move DQS.
2930 * If mid_min is odd and positive we'll need to add one to
2931 * make sure the rounding in further calculations is correct
2932 * (always bias to the right), so just add 1 for all positive values.
2933 */
2934 if (mid_min > 0)
2935 mid_min++;
2936 mid_min = mid_min / 2;
2937 debug_cond(DLEVEL == 1, "%s:%d write_center: mid_min=%d\n", __func__,
2938 __LINE__, mid_min);
2939
2940 /* Determine the amount we can change DQS (which is -mid_min) */
2941 orig_mid_min = mid_min;
2942 new_dqs = start_dqs;
2943 mid_min = 0;
2944 debug_cond(DLEVEL == 1, "%s:%d write_center: start_dqs=%d new_dqs=%d \
2945 mid_min=%d\n", __func__, __LINE__, start_dqs, new_dqs, mid_min);
2946 /* Initialize data for export structures */
2947 dqs_margin = IO_IO_OUT1_DELAY_MAX + 1;
2948 dq_margin = IO_IO_OUT1_DELAY_MAX + 1;
2949
2950 /* add delay to bring centre of all DQ windows to the same "level" */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002951 for (i = 0, p = test_bgn; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++, p++) {
2952 /* Use values before divide by 2 to reduce round off error */
2953 shift_dq = (left_edge[i] - right_edge[i] -
2954 (left_edge[min_index] - right_edge[min_index]))/2 +
2955 (orig_mid_min - mid_min);
2956
2957 debug_cond(DLEVEL == 2, "%s:%d write_center: before: shift_dq \
2958 [%u]=%d\n", __func__, __LINE__, i, shift_dq);
2959
Marek Vasutb5450962015-07-12 21:05:08 +02002960 addr = SDR_PHYGRP_SCCGRP_ADDRESS | SCC_MGR_IO_OUT1_DELAY_OFFSET;
Marek Vasut33acf0f2015-07-12 20:05:54 +02002961 temp_dq_out1_delay = readl(addr + (i << 2));
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002962 if (shift_dq + (int32_t)temp_dq_out1_delay >
2963 (int32_t)IO_IO_OUT1_DELAY_MAX) {
2964 shift_dq = (int32_t)IO_IO_OUT1_DELAY_MAX - temp_dq_out1_delay;
2965 } else if (shift_dq + (int32_t)temp_dq_out1_delay < 0) {
2966 shift_dq = -(int32_t)temp_dq_out1_delay;
2967 }
2968 debug_cond(DLEVEL == 2, "write_center: after: shift_dq[%u]=%d\n",
2969 i, shift_dq);
Marek Vasutcab80792015-07-12 22:07:33 +02002970 scc_mgr_set_dq_out1_delay(i, temp_dq_out1_delay + shift_dq);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002971 scc_mgr_load_dq(i);
2972
2973 debug_cond(DLEVEL == 2, "write_center: margin[%u]=[%d,%d]\n", i,
2974 left_edge[i] - shift_dq + (-mid_min),
2975 right_edge[i] + shift_dq - (-mid_min));
2976 /* To determine values for export structures */
2977 if (left_edge[i] - shift_dq + (-mid_min) < dq_margin)
2978 dq_margin = left_edge[i] - shift_dq + (-mid_min);
2979
2980 if (right_edge[i] + shift_dq - (-mid_min) < dqs_margin)
2981 dqs_margin = right_edge[i] + shift_dq - (-mid_min);
2982 }
2983
2984 /* Move DQS */
2985 scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, new_dqs);
Marek Vasutb5450962015-07-12 21:05:08 +02002986 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05002987
2988 /* Centre DM */
2989 debug_cond(DLEVEL == 2, "%s:%d write_center: DM\n", __func__, __LINE__);
2990
2991 /*
2992 * set the left and right edge of each bit to an illegal value,
2993 * use (IO_IO_OUT1_DELAY_MAX + 1) as an illegal value,
2994 */
2995 left_edge[0] = IO_IO_OUT1_DELAY_MAX + 1;
2996 right_edge[0] = IO_IO_OUT1_DELAY_MAX + 1;
2997 int32_t bgn_curr = IO_IO_OUT1_DELAY_MAX + 1;
2998 int32_t end_curr = IO_IO_OUT1_DELAY_MAX + 1;
2999 int32_t bgn_best = IO_IO_OUT1_DELAY_MAX + 1;
3000 int32_t end_best = IO_IO_OUT1_DELAY_MAX + 1;
3001 int32_t win_best = 0;
3002
3003 /* Search for the/part of the window with DM shift */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003004 for (d = IO_IO_OUT1_DELAY_MAX; d >= 0; d -= DELTA_D) {
Marek Vasut122e1f32015-07-17 06:07:13 +02003005 scc_mgr_apply_group_dm_out1_delay(d);
Marek Vasutb5450962015-07-12 21:05:08 +02003006 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003007
3008 if (rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, 1,
3009 PASS_ALL_BITS, &bit_chk,
3010 0)) {
3011 /* USE Set current end of the window */
3012 end_curr = -d;
3013 /*
3014 * If a starting edge of our window has not been seen
3015 * this is our current start of the DM window.
3016 */
3017 if (bgn_curr == IO_IO_OUT1_DELAY_MAX + 1)
3018 bgn_curr = -d;
3019
3020 /*
3021 * If current window is bigger than best seen.
3022 * Set best seen to be current window.
3023 */
3024 if ((end_curr-bgn_curr+1) > win_best) {
3025 win_best = end_curr-bgn_curr+1;
3026 bgn_best = bgn_curr;
3027 end_best = end_curr;
3028 }
3029 } else {
3030 /* We just saw a failing test. Reset temp edge */
3031 bgn_curr = IO_IO_OUT1_DELAY_MAX + 1;
3032 end_curr = IO_IO_OUT1_DELAY_MAX + 1;
3033 }
3034 }
3035
3036
3037 /* Reset DM delay chains to 0 */
Marek Vasut122e1f32015-07-17 06:07:13 +02003038 scc_mgr_apply_group_dm_out1_delay(0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003039
3040 /*
3041 * Check to see if the current window nudges up aganist 0 delay.
3042 * If so we need to continue the search by shifting DQS otherwise DQS
3043 * search begins as a new search. */
3044 if (end_curr != 0) {
3045 bgn_curr = IO_IO_OUT1_DELAY_MAX + 1;
3046 end_curr = IO_IO_OUT1_DELAY_MAX + 1;
3047 }
3048
3049 /* Search for the/part of the window with DQS shifts */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003050 for (d = 0; d <= IO_IO_OUT1_DELAY_MAX - new_dqs; d += DELTA_D) {
3051 /*
3052 * Note: This only shifts DQS, so are we limiting ourselve to
3053 * width of DQ unnecessarily.
3054 */
3055 scc_mgr_apply_group_dqs_io_and_oct_out1(write_group,
3056 d + new_dqs);
3057
Marek Vasutb5450962015-07-12 21:05:08 +02003058 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003059 if (rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, 1,
3060 PASS_ALL_BITS, &bit_chk,
3061 0)) {
3062 /* USE Set current end of the window */
3063 end_curr = d;
3064 /*
3065 * If a beginning edge of our window has not been seen
3066 * this is our current begin of the DM window.
3067 */
3068 if (bgn_curr == IO_IO_OUT1_DELAY_MAX + 1)
3069 bgn_curr = d;
3070
3071 /*
3072 * If current window is bigger than best seen. Set best
3073 * seen to be current window.
3074 */
3075 if ((end_curr-bgn_curr+1) > win_best) {
3076 win_best = end_curr-bgn_curr+1;
3077 bgn_best = bgn_curr;
3078 end_best = end_curr;
3079 }
3080 } else {
3081 /* We just saw a failing test. Reset temp edge */
3082 bgn_curr = IO_IO_OUT1_DELAY_MAX + 1;
3083 end_curr = IO_IO_OUT1_DELAY_MAX + 1;
3084
3085 /* Early exit optimization: if ther remaining delay
3086 chain space is less than already seen largest window
3087 we can exit */
3088 if ((win_best-1) >
3089 (IO_IO_OUT1_DELAY_MAX - new_dqs - d)) {
3090 break;
3091 }
3092 }
3093 }
3094
3095 /* assign left and right edge for cal and reporting; */
3096 left_edge[0] = -1*bgn_best;
3097 right_edge[0] = end_best;
3098
3099 debug_cond(DLEVEL == 2, "%s:%d dm_calib: left=%d right=%d\n", __func__,
3100 __LINE__, left_edge[0], right_edge[0]);
3101
3102 /* Move DQS (back to orig) */
3103 scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, new_dqs);
3104
3105 /* Move DM */
3106
3107 /* Find middle of window for the DM bit */
3108 mid = (left_edge[0] - right_edge[0]) / 2;
3109
3110 /* only move right, since we are not moving DQS/DQ */
3111 if (mid < 0)
3112 mid = 0;
3113
3114 /* dm_marign should fail if we never find a window */
3115 if (win_best == 0)
3116 dm_margin = -1;
3117 else
3118 dm_margin = left_edge[0] - mid;
3119
Marek Vasut122e1f32015-07-17 06:07:13 +02003120 scc_mgr_apply_group_dm_out1_delay(mid);
Marek Vasutb5450962015-07-12 21:05:08 +02003121 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003122
3123 debug_cond(DLEVEL == 2, "%s:%d dm_calib: left=%d right=%d mid=%d \
3124 dm_margin=%d\n", __func__, __LINE__, left_edge[0],
3125 right_edge[0], mid, dm_margin);
3126 /* Export values */
3127 gbl->fom_out += dq_margin + dqs_margin;
3128
3129 debug_cond(DLEVEL == 2, "%s:%d write_center: dq_margin=%d \
3130 dqs_margin=%d dm_margin=%d\n", __func__, __LINE__,
3131 dq_margin, dqs_margin, dm_margin);
3132
3133 /*
3134 * Do not remove this line as it makes sure all of our
3135 * decisions have been applied.
3136 */
Marek Vasutb5450962015-07-12 21:05:08 +02003137 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003138 return (dq_margin >= 0) && (dqs_margin >= 0) && (dm_margin >= 0);
3139}
3140
3141/* calibrate the write operations */
3142static uint32_t rw_mgr_mem_calibrate_writes(uint32_t rank_bgn, uint32_t g,
3143 uint32_t test_bgn)
3144{
3145 /* update info for sims */
3146 debug("%s:%d %u %u\n", __func__, __LINE__, g, test_bgn);
3147
3148 reg_file_set_stage(CAL_STAGE_WRITES);
3149 reg_file_set_sub_stage(CAL_SUBSTAGE_WRITES_CENTER);
3150
3151 reg_file_set_group(g);
3152
3153 if (!rw_mgr_mem_calibrate_writes_center(rank_bgn, g, test_bgn)) {
3154 set_failing_group_stage(g, CAL_STAGE_WRITES,
3155 CAL_SUBSTAGE_WRITES_CENTER);
3156 return 0;
3157 }
3158
3159 return 1;
3160}
3161
Marek Vasutbe333bc2015-07-20 07:33:33 +02003162/**
3163 * mem_precharge_and_activate() - Precharge all banks and activate
3164 *
3165 * Precharge all banks and activate row 0 in bank "000..." and bank "111...".
3166 */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003167static void mem_precharge_and_activate(void)
3168{
Marek Vasutbe333bc2015-07-20 07:33:33 +02003169 int r;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003170
3171 for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; r++) {
Marek Vasutbe333bc2015-07-20 07:33:33 +02003172 /* Test if the rank should be skipped. */
3173 if (param->skip_ranks[r])
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003174 continue;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003175
Marek Vasutbe333bc2015-07-20 07:33:33 +02003176 /* Set rank. */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003177 set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_OFF);
3178
Marek Vasutbe333bc2015-07-20 07:33:33 +02003179 /* Precharge all banks. */
Marek Vasutb5450962015-07-12 21:05:08 +02003180 writel(RW_MGR_PRECHARGE_ALL, SDR_PHYGRP_RWMGRGRP_ADDRESS |
3181 RW_MGR_RUN_SINGLE_GROUP_OFFSET);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003182
Marek Vasutb5450962015-07-12 21:05:08 +02003183 writel(0x0F, &sdr_rw_load_mgr_regs->load_cntr0);
3184 writel(RW_MGR_ACTIVATE_0_AND_1_WAIT1,
3185 &sdr_rw_load_jump_mgr_regs->load_jump_add0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003186
Marek Vasutb5450962015-07-12 21:05:08 +02003187 writel(0x0F, &sdr_rw_load_mgr_regs->load_cntr1);
3188 writel(RW_MGR_ACTIVATE_0_AND_1_WAIT2,
3189 &sdr_rw_load_jump_mgr_regs->load_jump_add1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003190
Marek Vasutbe333bc2015-07-20 07:33:33 +02003191 /* Activate rows. */
Marek Vasutb5450962015-07-12 21:05:08 +02003192 writel(RW_MGR_ACTIVATE_0_AND_1, SDR_PHYGRP_RWMGRGRP_ADDRESS |
3193 RW_MGR_RUN_SINGLE_GROUP_OFFSET);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003194 }
3195}
3196
Marek Vasut0f0840d2015-07-17 01:57:41 +02003197/**
3198 * mem_init_latency() - Configure memory RLAT and WLAT settings
3199 *
3200 * Configure memory RLAT and WLAT parameters.
3201 */
3202static void mem_init_latency(void)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003203{
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003204 /*
Marek Vasut0f0840d2015-07-17 01:57:41 +02003205 * For AV/CV, LFIFO is hardened and always runs at full rate
3206 * so max latency in AFI clocks, used here, is correspondingly
3207 * smaller.
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003208 */
Marek Vasut0f0840d2015-07-17 01:57:41 +02003209 const u32 max_latency = (1 << MAX_LATENCY_COUNT_WIDTH) - 1;
3210 u32 rlat, wlat;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003211
Marek Vasut0f0840d2015-07-17 01:57:41 +02003212 debug("%s:%d\n", __func__, __LINE__);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003213
3214 /*
Marek Vasut0f0840d2015-07-17 01:57:41 +02003215 * Read in write latency.
3216 * WL for Hard PHY does not include additive latency.
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003217 */
Marek Vasut0f0840d2015-07-17 01:57:41 +02003218 wlat = readl(&data_mgr->t_wl_add);
3219 wlat += readl(&data_mgr->mem_t_add);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003220
Marek Vasut0f0840d2015-07-17 01:57:41 +02003221 gbl->rw_wl_nop_cycles = wlat - 1;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003222
Marek Vasut0f0840d2015-07-17 01:57:41 +02003223 /* Read in readl latency. */
3224 rlat = readl(&data_mgr->t_rl_add);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003225
Marek Vasut0f0840d2015-07-17 01:57:41 +02003226 /* Set a pretty high read latency initially. */
3227 gbl->curr_read_lat = rlat + 16;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003228 if (gbl->curr_read_lat > max_latency)
3229 gbl->curr_read_lat = max_latency;
3230
Marek Vasutb5450962015-07-12 21:05:08 +02003231 writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003232
Marek Vasut0f0840d2015-07-17 01:57:41 +02003233 /* Advertise write latency. */
3234 writel(wlat, &phy_mgr_cfg->afi_wlat);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003235}
3236
Marek Vasut60daef82015-07-26 10:54:15 +02003237/**
3238 * @mem_skip_calibrate() - Set VFIFO and LFIFO to instant-on settings
3239 *
3240 * Set VFIFO and LFIFO to instant-on settings in skip calibration mode.
3241 */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003242static void mem_skip_calibrate(void)
3243{
3244 uint32_t vfifo_offset;
3245 uint32_t i, j, r;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003246
3247 debug("%s:%d\n", __func__, __LINE__);
3248 /* Need to update every shadow register set used by the interface */
3249 for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS;
Marek Vasut60daef82015-07-26 10:54:15 +02003250 r += NUM_RANKS_PER_SHADOW_REG) {
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003251 /*
3252 * Set output phase alignment settings appropriate for
3253 * skip calibration.
3254 */
3255 for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) {
3256 scc_mgr_set_dqs_en_phase(i, 0);
3257#if IO_DLL_CHAIN_LENGTH == 6
3258 scc_mgr_set_dqdqs_output_phase(i, 6);
3259#else
3260 scc_mgr_set_dqdqs_output_phase(i, 7);
3261#endif
3262 /*
3263 * Case:33398
3264 *
3265 * Write data arrives to the I/O two cycles before write
3266 * latency is reached (720 deg).
3267 * -> due to bit-slip in a/c bus
3268 * -> to allow board skew where dqs is longer than ck
3269 * -> how often can this happen!?
3270 * -> can claim back some ptaps for high freq
3271 * support if we can relax this, but i digress...
3272 *
3273 * The write_clk leads mem_ck by 90 deg
3274 * The minimum ptap of the OPA is 180 deg
3275 * Each ptap has (360 / IO_DLL_CHAIN_LENGH) deg of delay
3276 * The write_clk is always delayed by 2 ptaps
3277 *
3278 * Hence, to make DQS aligned to CK, we need to delay
3279 * DQS by:
3280 * (720 - 90 - 180 - 2 * (360 / IO_DLL_CHAIN_LENGTH))
3281 *
3282 * Dividing the above by (360 / IO_DLL_CHAIN_LENGTH)
3283 * gives us the number of ptaps, which simplies to:
3284 *
3285 * (1.25 * IO_DLL_CHAIN_LENGTH - 2)
3286 */
Marek Vasut60daef82015-07-26 10:54:15 +02003287 scc_mgr_set_dqdqs_output_phase(i,
3288 1.25 * IO_DLL_CHAIN_LENGTH - 2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003289 }
Marek Vasutb5450962015-07-12 21:05:08 +02003290 writel(0xff, &sdr_scc_mgr->dqs_ena);
3291 writel(0xff, &sdr_scc_mgr->dqs_io_ena);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003292
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003293 for (i = 0; i < RW_MGR_MEM_IF_WRITE_DQS_WIDTH; i++) {
Marek Vasutb5450962015-07-12 21:05:08 +02003294 writel(i, SDR_PHYGRP_SCCGRP_ADDRESS |
3295 SCC_MGR_GROUP_COUNTER_OFFSET);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003296 }
Marek Vasutb5450962015-07-12 21:05:08 +02003297 writel(0xff, &sdr_scc_mgr->dq_ena);
3298 writel(0xff, &sdr_scc_mgr->dm_ena);
3299 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003300 }
3301
3302 /* Compensate for simulation model behaviour */
3303 for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) {
3304 scc_mgr_set_dqs_bus_in_delay(i, 10);
3305 scc_mgr_load_dqs(i);
3306 }
Marek Vasutb5450962015-07-12 21:05:08 +02003307 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003308
3309 /*
3310 * ArriaV has hard FIFOs that can only be initialized by incrementing
3311 * in sequencer.
3312 */
3313 vfifo_offset = CALIB_VFIFO_OFFSET;
Marek Vasut60daef82015-07-26 10:54:15 +02003314 for (j = 0; j < vfifo_offset; j++)
Marek Vasutb5450962015-07-12 21:05:08 +02003315 writel(0xff, &phy_mgr_cmd->inc_vfifo_hard_phy);
Marek Vasutb5450962015-07-12 21:05:08 +02003316 writel(0, &phy_mgr_cmd->fifo_reset);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003317
3318 /*
Marek Vasut60daef82015-07-26 10:54:15 +02003319 * For Arria V and Cyclone V with hard LFIFO, we get the skip-cal
3320 * setting from generation-time constant.
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003321 */
3322 gbl->curr_read_lat = CALIB_LFIFO_OFFSET;
Marek Vasutb5450962015-07-12 21:05:08 +02003323 writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003324}
3325
Marek Vasutd9fcf9a2015-07-20 04:34:51 +02003326/**
3327 * mem_calibrate() - Memory calibration entry point.
3328 *
3329 * Perform memory calibration.
3330 */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003331static uint32_t mem_calibrate(void)
3332{
3333 uint32_t i;
3334 uint32_t rank_bgn, sr;
3335 uint32_t write_group, write_test_bgn;
3336 uint32_t read_group, read_test_bgn;
3337 uint32_t run_groups, current_run;
3338 uint32_t failing_groups = 0;
3339 uint32_t group_failed = 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003340
Marek Vasutd6f28792015-07-17 02:21:47 +02003341 const u32 rwdqs_ratio = RW_MGR_MEM_IF_READ_DQS_WIDTH /
3342 RW_MGR_MEM_IF_WRITE_DQS_WIDTH;
3343
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003344 debug("%s:%d\n", __func__, __LINE__);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003345
Marek Vasut0f0840d2015-07-17 01:57:41 +02003346 /* Initialize the data settings */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003347 gbl->error_substage = CAL_SUBSTAGE_NIL;
3348 gbl->error_stage = CAL_STAGE_NIL;
3349 gbl->error_group = 0xff;
3350 gbl->fom_in = 0;
3351 gbl->fom_out = 0;
3352
Marek Vasut0f0840d2015-07-17 01:57:41 +02003353 /* Initialize WLAT and RLAT. */
3354 mem_init_latency();
3355
3356 /* Initialize bit slips. */
3357 mem_precharge_and_activate();
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003358
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003359 for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) {
Marek Vasutb5450962015-07-12 21:05:08 +02003360 writel(i, SDR_PHYGRP_SCCGRP_ADDRESS |
3361 SCC_MGR_GROUP_COUNTER_OFFSET);
Marek Vasutd4d3de22015-07-19 01:34:43 +02003362 /* Only needed once to set all groups, pins, DQ, DQS, DM. */
3363 if (i == 0)
3364 scc_mgr_set_hhp_extras();
3365
Marek Vasut0341de42015-07-17 02:06:20 +02003366 scc_set_bypass_mode(i);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003367 }
3368
Marek Vasutb984ee82015-07-17 02:07:12 +02003369 /* Calibration is skipped. */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003370 if ((dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL) {
3371 /*
3372 * Set VFIFO and LFIFO to instant-on settings in skip
3373 * calibration mode.
3374 */
3375 mem_skip_calibrate();
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003376
Marek Vasutb984ee82015-07-17 02:07:12 +02003377 /*
3378 * Do not remove this line as it makes sure all of our
3379 * decisions have been applied.
3380 */
3381 writel(0, &sdr_scc_mgr->update);
3382 return 1;
3383 }
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003384
Marek Vasutb984ee82015-07-17 02:07:12 +02003385 /* Calibration is not skipped. */
3386 for (i = 0; i < NUM_CALIB_REPEAT; i++) {
3387 /*
3388 * Zero all delay chain/phase settings for all
3389 * groups and all shadow register sets.
3390 */
3391 scc_mgr_zero_all();
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003392
Marek Vasutb984ee82015-07-17 02:07:12 +02003393 run_groups = ~param->skip_groups;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003394
Marek Vasutb984ee82015-07-17 02:07:12 +02003395 for (write_group = 0, write_test_bgn = 0; write_group
3396 < RW_MGR_MEM_IF_WRITE_DQS_WIDTH; write_group++,
3397 write_test_bgn += RW_MGR_MEM_DQ_PER_WRITE_DQS) {
Marek Vasut0568f222015-07-17 02:50:56 +02003398
3399 /* Initialize the group failure */
Marek Vasutb984ee82015-07-17 02:07:12 +02003400 group_failed = 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003401
Marek Vasutb984ee82015-07-17 02:07:12 +02003402 current_run = run_groups & ((1 <<
3403 RW_MGR_NUM_DQS_PER_WRITE_GROUP) - 1);
3404 run_groups = run_groups >>
3405 RW_MGR_NUM_DQS_PER_WRITE_GROUP;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003406
Marek Vasutb984ee82015-07-17 02:07:12 +02003407 if (current_run == 0)
3408 continue;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003409
Marek Vasutb984ee82015-07-17 02:07:12 +02003410 writel(write_group, SDR_PHYGRP_SCCGRP_ADDRESS |
3411 SCC_MGR_GROUP_COUNTER_OFFSET);
3412 scc_mgr_zero_group(write_group, 0);
3413
Marek Vasutd6f28792015-07-17 02:21:47 +02003414 for (read_group = write_group * rwdqs_ratio,
3415 read_test_bgn = 0;
Marek Vasut0568f222015-07-17 02:50:56 +02003416 read_group < (write_group + 1) * rwdqs_ratio;
Marek Vasutd6f28792015-07-17 02:21:47 +02003417 read_group++,
3418 read_test_bgn += RW_MGR_MEM_DQ_PER_READ_DQS) {
3419 if (STATIC_CALIB_STEPS & CALIB_SKIP_VFIFO)
3420 continue;
3421
Marek Vasutb984ee82015-07-17 02:07:12 +02003422 /* Calibrate the VFIFO */
Marek Vasutd6f28792015-07-17 02:21:47 +02003423 if (rw_mgr_mem_calibrate_vfifo(read_group,
3424 read_test_bgn))
3425 continue;
Marek Vasutb984ee82015-07-17 02:07:12 +02003426
Marek Vasutd6f28792015-07-17 02:21:47 +02003427 if (!(gbl->phy_debug_mode_flags & PHY_DEBUG_SWEEP_ALL_GROUPS))
3428 return 0;
Marek Vasut0568f222015-07-17 02:50:56 +02003429
3430 /* The group failed, we're done. */
3431 goto grp_failed;
Marek Vasutb984ee82015-07-17 02:07:12 +02003432 }
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003433
Marek Vasutb984ee82015-07-17 02:07:12 +02003434 /* Calibrate the output side */
Marek Vasut0568f222015-07-17 02:50:56 +02003435 for (rank_bgn = 0, sr = 0;
3436 rank_bgn < RW_MGR_MEM_NUMBER_OF_RANKS;
3437 rank_bgn += NUM_RANKS_PER_SHADOW_REG, sr++) {
3438 if (STATIC_CALIB_STEPS & CALIB_SKIP_WRITES)
3439 continue;
Marek Vasutf04045f2015-07-17 02:31:04 +02003440
Marek Vasut0568f222015-07-17 02:50:56 +02003441 /* Not needed in quick mode! */
3442 if (STATIC_CALIB_STEPS & CALIB_SKIP_DELAY_SWEEPS)
3443 continue;
Marek Vasutf04045f2015-07-17 02:31:04 +02003444
Marek Vasut0568f222015-07-17 02:50:56 +02003445 /*
3446 * Determine if this set of ranks
3447 * should be skipped entirely.
3448 */
3449 if (param->skip_shadow_regs[sr])
3450 continue;
Marek Vasutf04045f2015-07-17 02:31:04 +02003451
Marek Vasut0568f222015-07-17 02:50:56 +02003452 /* Calibrate WRITEs */
3453 if (rw_mgr_mem_calibrate_writes(rank_bgn,
3454 write_group, write_test_bgn))
3455 continue;
Marek Vasutf04045f2015-07-17 02:31:04 +02003456
Marek Vasut0568f222015-07-17 02:50:56 +02003457 group_failed = 1;
3458 if (!(gbl->phy_debug_mode_flags & PHY_DEBUG_SWEEP_ALL_GROUPS))
3459 return 0;
Marek Vasutb984ee82015-07-17 02:07:12 +02003460 }
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003461
Marek Vasut0568f222015-07-17 02:50:56 +02003462 /* Some group failed, we're done. */
3463 if (group_failed)
3464 goto grp_failed;
Marek Vasut6db55732015-07-17 02:38:51 +02003465
Marek Vasut0568f222015-07-17 02:50:56 +02003466 for (read_group = write_group * rwdqs_ratio,
3467 read_test_bgn = 0;
3468 read_group < (write_group + 1) * rwdqs_ratio;
3469 read_group++,
3470 read_test_bgn += RW_MGR_MEM_DQ_PER_READ_DQS) {
3471 if (STATIC_CALIB_STEPS & CALIB_SKIP_WRITES)
3472 continue;
3473
3474 if (rw_mgr_mem_calibrate_vfifo_end(read_group,
3475 read_test_bgn))
3476 continue;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003477
Marek Vasut0568f222015-07-17 02:50:56 +02003478 if (!(gbl->phy_debug_mode_flags & PHY_DEBUG_SWEEP_ALL_GROUPS))
3479 return 0;
3480
3481 /* The group failed, we're done. */
3482 goto grp_failed;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003483 }
3484
Marek Vasut0568f222015-07-17 02:50:56 +02003485 /* No group failed, continue as usual. */
3486 continue;
3487
3488grp_failed: /* A group failed, increment the counter. */
3489 failing_groups++;
Marek Vasutb984ee82015-07-17 02:07:12 +02003490 }
3491
3492 /*
3493 * USER If there are any failing groups then report
3494 * the failure.
3495 */
3496 if (failing_groups != 0)
3497 return 0;
3498
Marek Vasutfc38d5c2015-07-17 02:40:21 +02003499 if (STATIC_CALIB_STEPS & CALIB_SKIP_LFIFO)
3500 continue;
3501
3502 /*
3503 * If we're skipping groups as part of debug,
3504 * don't calibrate LFIFO.
3505 */
3506 if (param->skip_groups != 0)
3507 continue;
3508
Marek Vasutb984ee82015-07-17 02:07:12 +02003509 /* Calibrate the LFIFO */
Marek Vasutfc38d5c2015-07-17 02:40:21 +02003510 if (!rw_mgr_mem_calibrate_lfifo())
3511 return 0;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003512 }
3513
3514 /*
3515 * Do not remove this line as it makes sure all of our decisions
3516 * have been applied.
3517 */
Marek Vasutb5450962015-07-12 21:05:08 +02003518 writel(0, &sdr_scc_mgr->update);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003519 return 1;
3520}
3521
Marek Vasut092a1ef2015-07-17 01:20:21 +02003522/**
3523 * run_mem_calibrate() - Perform memory calibration
3524 *
3525 * This function triggers the entire memory calibration procedure.
3526 */
3527static int run_mem_calibrate(void)
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003528{
Marek Vasut092a1ef2015-07-17 01:20:21 +02003529 int pass;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003530
3531 debug("%s:%d\n", __func__, __LINE__);
3532
3533 /* Reset pass/fail status shown on afi_cal_success/fail */
Marek Vasutb5450962015-07-12 21:05:08 +02003534 writel(PHY_MGR_CAL_RESET, &phy_mgr_cfg->cal_status);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003535
Marek Vasut092a1ef2015-07-17 01:20:21 +02003536 /* Stop tracking manager. */
3537 clrbits_le32(&sdr_ctrl->ctrl_cfg, 1 << 22);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003538
Marek Vasutacaaff72015-07-17 01:12:07 +02003539 phy_mgr_initialize();
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003540 rw_mgr_mem_initialize();
3541
Marek Vasut092a1ef2015-07-17 01:20:21 +02003542 /* Perform the actual memory calibration. */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003543 pass = mem_calibrate();
3544
3545 mem_precharge_and_activate();
Marek Vasutb5450962015-07-12 21:05:08 +02003546 writel(0, &phy_mgr_cmd->fifo_reset);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003547
Marek Vasut092a1ef2015-07-17 01:20:21 +02003548 /* Handoff. */
3549 rw_mgr_mem_handoff();
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003550 /*
Marek Vasut092a1ef2015-07-17 01:20:21 +02003551 * In Hard PHY this is a 2-bit control:
3552 * 0: AFI Mux Select
3553 * 1: DDIO Mux Select
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003554 */
Marek Vasut092a1ef2015-07-17 01:20:21 +02003555 writel(0x2, &phy_mgr_cfg->mux_sel);
3556
3557 /* Start tracking manager. */
3558 setbits_le32(&sdr_ctrl->ctrl_cfg, 1 << 22);
3559
3560 return pass;
3561}
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003562
Marek Vasut092a1ef2015-07-17 01:20:21 +02003563/**
3564 * debug_mem_calibrate() - Report result of memory calibration
3565 * @pass: Value indicating whether calibration passed or failed
3566 *
3567 * This function reports the results of the memory calibration
3568 * and writes debug information into the register file.
3569 */
3570static void debug_mem_calibrate(int pass)
3571{
3572 uint32_t debug_info;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003573
3574 if (pass) {
3575 printf("%s: CALIBRATION PASSED\n", __FILE__);
3576
3577 gbl->fom_in /= 2;
3578 gbl->fom_out /= 2;
3579
3580 if (gbl->fom_in > 0xff)
3581 gbl->fom_in = 0xff;
3582
3583 if (gbl->fom_out > 0xff)
3584 gbl->fom_out = 0xff;
3585
3586 /* Update the FOM in the register file */
3587 debug_info = gbl->fom_in;
3588 debug_info |= gbl->fom_out << 8;
Marek Vasutb5450962015-07-12 21:05:08 +02003589 writel(debug_info, &sdr_reg_file->fom);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003590
Marek Vasutb5450962015-07-12 21:05:08 +02003591 writel(debug_info, &phy_mgr_cfg->cal_debug_info);
3592 writel(PHY_MGR_CAL_SUCCESS, &phy_mgr_cfg->cal_status);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003593 } else {
3594 printf("%s: CALIBRATION FAILED\n", __FILE__);
3595
3596 debug_info = gbl->error_stage;
3597 debug_info |= gbl->error_substage << 8;
3598 debug_info |= gbl->error_group << 16;
3599
Marek Vasutb5450962015-07-12 21:05:08 +02003600 writel(debug_info, &sdr_reg_file->failing_stage);
3601 writel(debug_info, &phy_mgr_cfg->cal_debug_info);
3602 writel(PHY_MGR_CAL_FAIL, &phy_mgr_cfg->cal_status);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003603
3604 /* Update the failing group/stage in the register file */
3605 debug_info = gbl->error_stage;
3606 debug_info |= gbl->error_substage << 8;
3607 debug_info |= gbl->error_group << 16;
Marek Vasutb5450962015-07-12 21:05:08 +02003608 writel(debug_info, &sdr_reg_file->failing_stage);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003609 }
3610
Marek Vasut092a1ef2015-07-17 01:20:21 +02003611 printf("%s: Calibration complete\n", __FILE__);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003612}
3613
Marek Vasutea9771b2015-07-19 06:12:42 +02003614/**
3615 * hc_initialize_rom_data() - Initialize ROM data
3616 *
3617 * Initialize ROM data.
3618 */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003619static void hc_initialize_rom_data(void)
3620{
Marek Vasutea9771b2015-07-19 06:12:42 +02003621 u32 i, addr;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003622
Marek Vasuta3340102015-07-12 19:03:33 +02003623 addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_INST_ROM_WRITE_OFFSET;
Marek Vasutea9771b2015-07-19 06:12:42 +02003624 for (i = 0; i < ARRAY_SIZE(inst_rom_init); i++)
3625 writel(inst_rom_init[i], addr + (i << 2));
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003626
Marek Vasuta3340102015-07-12 19:03:33 +02003627 addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_AC_ROM_WRITE_OFFSET;
Marek Vasutea9771b2015-07-19 06:12:42 +02003628 for (i = 0; i < ARRAY_SIZE(ac_rom_init); i++)
3629 writel(ac_rom_init[i], addr + (i << 2));
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003630}
3631
Marek Vasuta17ae0f2015-07-19 06:13:37 +02003632/**
3633 * initialize_reg_file() - Initialize SDR register file
3634 *
3635 * Initialize SDR register file.
3636 */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003637static void initialize_reg_file(void)
3638{
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003639 /* Initialize the register file with the correct data */
Marek Vasutb5450962015-07-12 21:05:08 +02003640 writel(REG_FILE_INIT_SEQ_SIGNATURE, &sdr_reg_file->signature);
3641 writel(0, &sdr_reg_file->debug_data_addr);
3642 writel(0, &sdr_reg_file->cur_stage);
3643 writel(0, &sdr_reg_file->fom);
3644 writel(0, &sdr_reg_file->failing_stage);
3645 writel(0, &sdr_reg_file->debug1);
3646 writel(0, &sdr_reg_file->debug2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003647}
3648
Marek Vasut0c9f3cb2015-07-19 06:14:04 +02003649/**
3650 * initialize_hps_phy() - Initialize HPS PHY
3651 *
3652 * Initialize HPS PHY.
3653 */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003654static void initialize_hps_phy(void)
3655{
3656 uint32_t reg;
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003657 /*
3658 * Tracking also gets configured here because it's in the
3659 * same register.
3660 */
3661 uint32_t trk_sample_count = 7500;
3662 uint32_t trk_long_idle_sample_count = (10 << 16) | 100;
3663 /*
3664 * Format is number of outer loops in the 16 MSB, sample
3665 * count in 16 LSB.
3666 */
3667
3668 reg = 0;
3669 reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_ACDELAYEN_SET(2);
3670 reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQDELAYEN_SET(1);
3671 reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQSDELAYEN_SET(1);
3672 reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQSLOGICDELAYEN_SET(1);
3673 reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_RESETDELAYEN_SET(0);
3674 reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_LPDDRDIS_SET(1);
3675 /*
3676 * This field selects the intrinsic latency to RDATA_EN/FULL path.
3677 * 00-bypass, 01- add 5 cycles, 10- add 10 cycles, 11- add 15 cycles.
3678 */
3679 reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_ADDLATSEL_SET(0);
3680 reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_SAMPLECOUNT_19_0_SET(
3681 trk_sample_count);
Marek Vasutcd5d38e2015-07-12 20:49:39 +02003682 writel(reg, &sdr_ctrl->phy_ctrl0);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003683
3684 reg = 0;
3685 reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_SAMPLECOUNT_31_20_SET(
3686 trk_sample_count >>
3687 SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_SAMPLECOUNT_19_0_WIDTH);
3688 reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_LONGIDLESAMPLECOUNT_19_0_SET(
3689 trk_long_idle_sample_count);
Marek Vasutcd5d38e2015-07-12 20:49:39 +02003690 writel(reg, &sdr_ctrl->phy_ctrl1);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003691
3692 reg = 0;
3693 reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_2_LONGIDLESAMPLECOUNT_31_20_SET(
3694 trk_long_idle_sample_count >>
3695 SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_LONGIDLESAMPLECOUNT_19_0_WIDTH);
Marek Vasutcd5d38e2015-07-12 20:49:39 +02003696 writel(reg, &sdr_ctrl->phy_ctrl2);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003697}
3698
Marek Vasutb0563cf2015-07-17 00:45:11 +02003699/**
3700 * initialize_tracking() - Initialize tracking
3701 *
3702 * Initialize the register file with usable initial data.
3703 */
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003704static void initialize_tracking(void)
3705{
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003706 /*
Marek Vasutb0563cf2015-07-17 00:45:11 +02003707 * Initialize the register file with the correct data.
3708 * Compute usable version of value in case we skip full
3709 * computation later.
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003710 */
Marek Vasutb0563cf2015-07-17 00:45:11 +02003711 writel(DIV_ROUND_UP(IO_DELAY_PER_OPA_TAP, IO_DELAY_PER_DCHAIN_TAP) - 1,
3712 &sdr_reg_file->dtaps_per_ptap);
3713
3714 /* trk_sample_count */
3715 writel(7500, &sdr_reg_file->trk_sample_count);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003716
Marek Vasutb0563cf2015-07-17 00:45:11 +02003717 /* longidle outer loop [15:0] */
3718 writel((10 << 16) | (100 << 0), &sdr_reg_file->trk_longidle);
3719
3720 /*
3721 * longidle sample count [31:24]
3722 * trfc, worst case of 933Mhz 4Gb [23:16]
3723 * trcd, worst case [15:8]
3724 * vfifo wait [7:0]
3725 */
3726 writel((243 << 24) | (14 << 16) | (10 << 8) | (4 << 0),
3727 &sdr_reg_file->delays);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003728
Marek Vasutb0563cf2015-07-17 00:45:11 +02003729 /* mux delay */
3730 writel((RW_MGR_IDLE << 24) | (RW_MGR_ACTIVATE_1 << 16) |
3731 (RW_MGR_SGLE_READ << 8) | (RW_MGR_PRECHARGE_ALL << 0),
3732 &sdr_reg_file->trk_rw_mgr_addr);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003733
Marek Vasutb0563cf2015-07-17 00:45:11 +02003734 writel(RW_MGR_MEM_IF_READ_DQS_WIDTH,
3735 &sdr_reg_file->trk_read_dqs_width);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003736
Marek Vasutb0563cf2015-07-17 00:45:11 +02003737 /* trefi [7:0] */
3738 writel((RW_MGR_REFRESH_ALL << 24) | (1000 << 0),
3739 &sdr_reg_file->trk_rfsh);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003740}
3741
3742int sdram_calibration_full(void)
3743{
3744 struct param_type my_param;
3745 struct gbl_type my_gbl;
3746 uint32_t pass;
Marek Vasut5da0f5b2015-07-17 01:05:36 +02003747
3748 memset(&my_param, 0, sizeof(my_param));
3749 memset(&my_gbl, 0, sizeof(my_gbl));
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003750
3751 param = &my_param;
3752 gbl = &my_gbl;
3753
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003754 /* Set the calibration enabled by default */
3755 gbl->phy_debug_mode_flags |= PHY_DEBUG_ENABLE_CAL_RPT;
3756 /*
3757 * Only sweep all groups (regardless of fail state) by default
3758 * Set enabled read test by default.
3759 */
3760#if DISABLE_GUARANTEED_READ
3761 gbl->phy_debug_mode_flags |= PHY_DEBUG_DISABLE_GUARANTEED_READ;
3762#endif
3763 /* Initialize the register file */
3764 initialize_reg_file();
3765
3766 /* Initialize any PHY CSR */
3767 initialize_hps_phy();
3768
3769 scc_mgr_initialize();
3770
3771 initialize_tracking();
3772
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003773 printf("%s: Preparing to start memory calibration\n", __FILE__);
3774
3775 debug("%s:%d\n", __func__, __LINE__);
Marek Vasut6283b4c2015-07-13 01:05:27 +02003776 debug_cond(DLEVEL == 1,
3777 "DDR3 FULL_RATE ranks=%u cs/dimm=%u dq/dqs=%u,%u vg/dqs=%u,%u ",
3778 RW_MGR_MEM_NUMBER_OF_RANKS, RW_MGR_MEM_NUMBER_OF_CS_PER_DIMM,
3779 RW_MGR_MEM_DQ_PER_READ_DQS, RW_MGR_MEM_DQ_PER_WRITE_DQS,
3780 RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS,
3781 RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS);
3782 debug_cond(DLEVEL == 1,
3783 "dqs=%u,%u dq=%u dm=%u ptap_delay=%u dtap_delay=%u ",
3784 RW_MGR_MEM_IF_READ_DQS_WIDTH, RW_MGR_MEM_IF_WRITE_DQS_WIDTH,
3785 RW_MGR_MEM_DATA_WIDTH, RW_MGR_MEM_DATA_MASK_WIDTH,
3786 IO_DELAY_PER_OPA_TAP, IO_DELAY_PER_DCHAIN_TAP);
3787 debug_cond(DLEVEL == 1, "dtap_dqsen_delay=%u, dll=%u",
3788 IO_DELAY_PER_DQS_EN_DCHAIN_TAP, IO_DLL_CHAIN_LENGTH);
3789 debug_cond(DLEVEL == 1, "max values: en_p=%u dqdqs_p=%u en_d=%u dqs_in_d=%u ",
3790 IO_DQS_EN_PHASE_MAX, IO_DQDQS_OUT_PHASE_MAX,
3791 IO_DQS_EN_DELAY_MAX, IO_DQS_IN_DELAY_MAX);
3792 debug_cond(DLEVEL == 1, "io_in_d=%u io_out1_d=%u io_out2_d=%u ",
3793 IO_IO_IN_DELAY_MAX, IO_IO_OUT1_DELAY_MAX,
3794 IO_IO_OUT2_DELAY_MAX);
3795 debug_cond(DLEVEL == 1, "dqs_in_reserve=%u dqs_out_reserve=%u\n",
3796 IO_DQS_IN_RESERVE, IO_DQS_OUT_RESERVE);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003797
3798 hc_initialize_rom_data();
3799
3800 /* update info for sims */
3801 reg_file_set_stage(CAL_STAGE_NIL);
3802 reg_file_set_group(0);
3803
3804 /*
3805 * Load global needed for those actions that require
3806 * some dynamic calibration support.
3807 */
3808 dyn_calib_steps = STATIC_CALIB_STEPS;
3809 /*
3810 * Load global to allow dynamic selection of delay loop settings
3811 * based on calibration mode.
3812 */
3813 if (!(dyn_calib_steps & CALIB_SKIP_DELAY_LOOPS))
3814 skip_delay_mask = 0xff;
3815 else
3816 skip_delay_mask = 0x0;
3817
3818 pass = run_mem_calibrate();
Marek Vasut092a1ef2015-07-17 01:20:21 +02003819 debug_mem_calibrate(pass);
Dinh Nguyen135cc7f2015-06-02 22:52:49 -05003820 return pass;
3821}