Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright Altera Corporation (C) 2012-2015 |
| 3 | * |
| 4 | * SPDX-License-Identifier: BSD-3-Clause |
| 5 | */ |
| 6 | |
| 7 | #include <common.h> |
| 8 | #include <asm/io.h> |
| 9 | #include <asm/arch/sdram.h> |
| 10 | #include "sequencer.h" |
| 11 | #include "sequencer_auto.h" |
| 12 | #include "sequencer_auto_ac_init.h" |
| 13 | #include "sequencer_auto_inst_init.h" |
| 14 | #include "sequencer_defines.h" |
| 15 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 16 | static struct socfpga_sdr_rw_load_manager *sdr_rw_load_mgr_regs = |
Marek Vasut | 0dcb9e8 | 2015-07-12 18:46:52 +0200 | [diff] [blame] | 17 | (struct socfpga_sdr_rw_load_manager *)(SDR_PHYGRP_RWMGRGRP_ADDRESS | 0x800); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 18 | |
| 19 | static struct socfpga_sdr_rw_load_jump_manager *sdr_rw_load_jump_mgr_regs = |
Marek Vasut | 0dcb9e8 | 2015-07-12 18:46:52 +0200 | [diff] [blame] | 20 | (struct socfpga_sdr_rw_load_jump_manager *)(SDR_PHYGRP_RWMGRGRP_ADDRESS | 0xC00); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 21 | |
| 22 | static struct socfpga_sdr_reg_file *sdr_reg_file = |
Marek Vasut | 341ceec | 2015-07-12 18:31:05 +0200 | [diff] [blame] | 23 | (struct socfpga_sdr_reg_file *)SDR_PHYGRP_REGFILEGRP_ADDRESS; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 24 | |
| 25 | static struct socfpga_sdr_scc_mgr *sdr_scc_mgr = |
Marek Vasut | 81df0a2 | 2015-07-12 18:42:34 +0200 | [diff] [blame] | 26 | (struct socfpga_sdr_scc_mgr *)(SDR_PHYGRP_SCCGRP_ADDRESS | 0xe00); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 27 | |
| 28 | static struct socfpga_phy_mgr_cmd *phy_mgr_cmd = |
Marek Vasut | c3b9b0f | 2015-07-12 18:54:37 +0200 | [diff] [blame] | 29 | (struct socfpga_phy_mgr_cmd *)SDR_PHYGRP_PHYMGRGRP_ADDRESS; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 30 | |
| 31 | static struct socfpga_phy_mgr_cfg *phy_mgr_cfg = |
Marek Vasut | c3b9b0f | 2015-07-12 18:54:37 +0200 | [diff] [blame] | 32 | (struct socfpga_phy_mgr_cfg *)(SDR_PHYGRP_PHYMGRGRP_ADDRESS | 0x40); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 33 | |
| 34 | static struct socfpga_data_mgr *data_mgr = |
Marek Vasut | a334010 | 2015-07-12 19:03:33 +0200 | [diff] [blame] | 35 | (struct socfpga_data_mgr *)SDR_PHYGRP_DATAMGRGRP_ADDRESS; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 36 | |
Marek Vasut | cd5d38e | 2015-07-12 20:49:39 +0200 | [diff] [blame] | 37 | static struct socfpga_sdr_ctrl *sdr_ctrl = |
| 38 | (struct socfpga_sdr_ctrl *)SDR_CTRLGRP_ADDRESS; |
| 39 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 40 | #define DELTA_D 1 |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 41 | |
| 42 | /* |
| 43 | * In order to reduce ROM size, most of the selectable calibration steps are |
| 44 | * decided at compile time based on the user's calibration mode selection, |
| 45 | * as captured by the STATIC_CALIB_STEPS selection below. |
| 46 | * |
| 47 | * However, to support simulation-time selection of fast simulation mode, where |
| 48 | * we skip everything except the bare minimum, we need a few of the steps to |
| 49 | * be dynamic. In those cases, we either use the DYNAMIC_CALIB_STEPS for the |
| 50 | * check, which is based on the rtl-supplied value, or we dynamically compute |
| 51 | * the value to use based on the dynamically-chosen calibration mode |
| 52 | */ |
| 53 | |
| 54 | #define DLEVEL 0 |
| 55 | #define STATIC_IN_RTL_SIM 0 |
| 56 | #define STATIC_SKIP_DELAY_LOOPS 0 |
| 57 | |
| 58 | #define STATIC_CALIB_STEPS (STATIC_IN_RTL_SIM | CALIB_SKIP_FULL_TEST | \ |
| 59 | STATIC_SKIP_DELAY_LOOPS) |
| 60 | |
| 61 | /* calibration steps requested by the rtl */ |
| 62 | uint16_t dyn_calib_steps; |
| 63 | |
| 64 | /* |
| 65 | * To make CALIB_SKIP_DELAY_LOOPS a dynamic conditional option |
| 66 | * instead of static, we use boolean logic to select between |
| 67 | * non-skip and skip values |
| 68 | * |
| 69 | * The mask is set to include all bits when not-skipping, but is |
| 70 | * zero when skipping |
| 71 | */ |
| 72 | |
| 73 | uint16_t skip_delay_mask; /* mask off bits when skipping/not-skipping */ |
| 74 | |
| 75 | #define SKIP_DELAY_LOOP_VALUE_OR_ZERO(non_skip_value) \ |
| 76 | ((non_skip_value) & skip_delay_mask) |
| 77 | |
| 78 | struct gbl_type *gbl; |
| 79 | struct param_type *param; |
| 80 | uint32_t curr_shadow_reg; |
| 81 | |
| 82 | static uint32_t rw_mgr_mem_calibrate_write_test(uint32_t rank_bgn, |
| 83 | uint32_t write_group, uint32_t use_dm, |
| 84 | uint32_t all_correct, uint32_t *bit_chk, uint32_t all_ranks); |
| 85 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 86 | static void set_failing_group_stage(uint32_t group, uint32_t stage, |
| 87 | uint32_t substage) |
| 88 | { |
| 89 | /* |
| 90 | * Only set the global stage if there was not been any other |
| 91 | * failing group |
| 92 | */ |
| 93 | if (gbl->error_stage == CAL_STAGE_NIL) { |
| 94 | gbl->error_substage = substage; |
| 95 | gbl->error_stage = stage; |
| 96 | gbl->error_group = group; |
| 97 | } |
| 98 | } |
| 99 | |
Marek Vasut | 6eeb747 | 2015-07-12 21:10:24 +0200 | [diff] [blame] | 100 | static void reg_file_set_group(u16 set_group) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 101 | { |
Marek Vasut | 6eeb747 | 2015-07-12 21:10:24 +0200 | [diff] [blame] | 102 | clrsetbits_le32(&sdr_reg_file->cur_stage, 0xffff0000, set_group << 16); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 103 | } |
| 104 | |
Marek Vasut | 6eeb747 | 2015-07-12 21:10:24 +0200 | [diff] [blame] | 105 | static void reg_file_set_stage(u8 set_stage) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 106 | { |
Marek Vasut | 6eeb747 | 2015-07-12 21:10:24 +0200 | [diff] [blame] | 107 | clrsetbits_le32(&sdr_reg_file->cur_stage, 0xffff, set_stage & 0xff); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 108 | } |
| 109 | |
Marek Vasut | 6eeb747 | 2015-07-12 21:10:24 +0200 | [diff] [blame] | 110 | static void reg_file_set_sub_stage(u8 set_sub_stage) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 111 | { |
Marek Vasut | 6eeb747 | 2015-07-12 21:10:24 +0200 | [diff] [blame] | 112 | set_sub_stage &= 0xff; |
| 113 | clrsetbits_le32(&sdr_reg_file->cur_stage, 0xff00, set_sub_stage << 8); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 114 | } |
| 115 | |
| 116 | static void initialize(void) |
| 117 | { |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 118 | debug("%s:%d\n", __func__, __LINE__); |
| 119 | /* USER calibration has control over path to memory */ |
| 120 | /* |
| 121 | * In Hard PHY this is a 2-bit control: |
| 122 | * 0: AFI Mux Select |
| 123 | * 1: DDIO Mux Select |
| 124 | */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 125 | writel(0x3, &phy_mgr_cfg->mux_sel); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 126 | |
| 127 | /* USER memory clock is not stable we begin initialization */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 128 | writel(0, &phy_mgr_cfg->reset_mem_stbl); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 129 | |
| 130 | /* USER calibration status all set to zero */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 131 | writel(0, &phy_mgr_cfg->cal_status); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 132 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 133 | writel(0, &phy_mgr_cfg->cal_debug_info); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 134 | |
| 135 | if ((dyn_calib_steps & CALIB_SKIP_ALL) != CALIB_SKIP_ALL) { |
| 136 | param->read_correct_mask_vg = ((uint32_t)1 << |
| 137 | (RW_MGR_MEM_DQ_PER_READ_DQS / |
| 138 | RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS)) - 1; |
| 139 | param->write_correct_mask_vg = ((uint32_t)1 << |
| 140 | (RW_MGR_MEM_DQ_PER_READ_DQS / |
| 141 | RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS)) - 1; |
| 142 | param->read_correct_mask = ((uint32_t)1 << |
| 143 | RW_MGR_MEM_DQ_PER_READ_DQS) - 1; |
| 144 | param->write_correct_mask = ((uint32_t)1 << |
| 145 | RW_MGR_MEM_DQ_PER_WRITE_DQS) - 1; |
| 146 | param->dm_correct_mask = ((uint32_t)1 << |
| 147 | (RW_MGR_MEM_DATA_WIDTH / RW_MGR_MEM_DATA_MASK_WIDTH)) |
| 148 | - 1; |
| 149 | } |
| 150 | } |
| 151 | |
| 152 | static void set_rank_and_odt_mask(uint32_t rank, uint32_t odt_mode) |
| 153 | { |
| 154 | uint32_t odt_mask_0 = 0; |
| 155 | uint32_t odt_mask_1 = 0; |
| 156 | uint32_t cs_and_odt_mask; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 157 | |
| 158 | if (odt_mode == RW_MGR_ODT_MODE_READ_WRITE) { |
| 159 | if (RW_MGR_MEM_NUMBER_OF_RANKS == 1) { |
| 160 | /* |
| 161 | * 1 Rank |
| 162 | * Read: ODT = 0 |
| 163 | * Write: ODT = 1 |
| 164 | */ |
| 165 | odt_mask_0 = 0x0; |
| 166 | odt_mask_1 = 0x1; |
| 167 | } else if (RW_MGR_MEM_NUMBER_OF_RANKS == 2) { |
| 168 | /* 2 Ranks */ |
| 169 | if (RW_MGR_MEM_NUMBER_OF_CS_PER_DIMM == 1) { |
| 170 | /* - Dual-Slot , Single-Rank |
| 171 | * (1 chip-select per DIMM) |
| 172 | * OR |
| 173 | * - RDIMM, 4 total CS (2 CS per DIMM) |
| 174 | * means 2 DIMM |
| 175 | * Since MEM_NUMBER_OF_RANKS is 2 they are |
| 176 | * both single rank |
| 177 | * with 2 CS each (special for RDIMM) |
| 178 | * Read: Turn on ODT on the opposite rank |
| 179 | * Write: Turn on ODT on all ranks |
| 180 | */ |
| 181 | odt_mask_0 = 0x3 & ~(1 << rank); |
| 182 | odt_mask_1 = 0x3; |
| 183 | } else { |
| 184 | /* |
| 185 | * USER - Single-Slot , Dual-rank DIMMs |
| 186 | * (2 chip-selects per DIMM) |
| 187 | * USER Read: Turn on ODT off on all ranks |
| 188 | * USER Write: Turn on ODT on active rank |
| 189 | */ |
| 190 | odt_mask_0 = 0x0; |
| 191 | odt_mask_1 = 0x3 & (1 << rank); |
| 192 | } |
Marek Vasut | f84348d | 2015-07-18 02:23:29 +0200 | [diff] [blame] | 193 | } else { |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 194 | /* 4 Ranks |
| 195 | * Read: |
| 196 | * ----------+-----------------------+ |
| 197 | * | | |
| 198 | * | ODT | |
| 199 | * Read From +-----------------------+ |
| 200 | * Rank | 3 | 2 | 1 | 0 | |
| 201 | * ----------+-----+-----+-----+-----+ |
| 202 | * 0 | 0 | 1 | 0 | 0 | |
| 203 | * 1 | 1 | 0 | 0 | 0 | |
| 204 | * 2 | 0 | 0 | 0 | 1 | |
| 205 | * 3 | 0 | 0 | 1 | 0 | |
| 206 | * ----------+-----+-----+-----+-----+ |
| 207 | * |
| 208 | * Write: |
| 209 | * ----------+-----------------------+ |
| 210 | * | | |
| 211 | * | ODT | |
| 212 | * Write To +-----------------------+ |
| 213 | * Rank | 3 | 2 | 1 | 0 | |
| 214 | * ----------+-----+-----+-----+-----+ |
| 215 | * 0 | 0 | 1 | 0 | 1 | |
| 216 | * 1 | 1 | 0 | 1 | 0 | |
| 217 | * 2 | 0 | 1 | 0 | 1 | |
| 218 | * 3 | 1 | 0 | 1 | 0 | |
| 219 | * ----------+-----+-----+-----+-----+ |
| 220 | */ |
| 221 | switch (rank) { |
| 222 | case 0: |
| 223 | odt_mask_0 = 0x4; |
| 224 | odt_mask_1 = 0x5; |
| 225 | break; |
| 226 | case 1: |
| 227 | odt_mask_0 = 0x8; |
| 228 | odt_mask_1 = 0xA; |
| 229 | break; |
| 230 | case 2: |
| 231 | odt_mask_0 = 0x1; |
| 232 | odt_mask_1 = 0x5; |
| 233 | break; |
| 234 | case 3: |
| 235 | odt_mask_0 = 0x2; |
| 236 | odt_mask_1 = 0xA; |
| 237 | break; |
| 238 | } |
| 239 | } |
| 240 | } else { |
| 241 | odt_mask_0 = 0x0; |
| 242 | odt_mask_1 = 0x0; |
| 243 | } |
| 244 | |
| 245 | cs_and_odt_mask = |
| 246 | (0xFF & ~(1 << rank)) | |
| 247 | ((0xFF & odt_mask_0) << 8) | |
| 248 | ((0xFF & odt_mask_1) << 16); |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 249 | writel(cs_and_odt_mask, SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 250 | RW_MGR_SET_CS_AND_ODT_MASK_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 251 | } |
| 252 | |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 253 | /** |
| 254 | * scc_mgr_set() - Set SCC Manager register |
| 255 | * @off: Base offset in SCC Manager space |
| 256 | * @grp: Read/Write group |
| 257 | * @val: Value to be set |
| 258 | * |
| 259 | * This function sets the SCC Manager (Scan Chain Control Manager) register. |
| 260 | */ |
| 261 | static void scc_mgr_set(u32 off, u32 grp, u32 val) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 262 | { |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 263 | writel(val, SDR_PHYGRP_SCCGRP_ADDRESS | off | (grp << 2)); |
| 264 | } |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 265 | |
Marek Vasut | 8957b49 | 2015-07-20 07:16:42 +0200 | [diff] [blame] | 266 | /** |
| 267 | * scc_mgr_initialize() - Initialize SCC Manager registers |
| 268 | * |
| 269 | * Initialize SCC Manager registers. |
| 270 | */ |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 271 | static void scc_mgr_initialize(void) |
| 272 | { |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 273 | /* |
Marek Vasut | 8957b49 | 2015-07-20 07:16:42 +0200 | [diff] [blame] | 274 | * Clear register file for HPS. 16 (2^4) is the size of the |
| 275 | * full register file in the scc mgr: |
| 276 | * RFILE_DEPTH = 1 + log2(MEM_DQ_PER_DQS + 1 + MEM_DM_PER_DQS + |
| 277 | * MEM_IF_READ_DQS_WIDTH - 1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 278 | */ |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 279 | int i; |
Marek Vasut | 8957b49 | 2015-07-20 07:16:42 +0200 | [diff] [blame] | 280 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 281 | for (i = 0; i < 16; i++) { |
Marek Vasut | 0eacf7e | 2015-06-26 18:56:54 +0200 | [diff] [blame] | 282 | debug_cond(DLEVEL == 1, "%s:%d: Clearing SCC RFILE index %u\n", |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 283 | __func__, __LINE__, i); |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 284 | scc_mgr_set(SCC_MGR_HHP_RFILE_OFFSET, 0, i); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 285 | } |
| 286 | } |
| 287 | |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 288 | static void scc_mgr_set_dqdqs_output_phase(uint32_t write_group, uint32_t phase) |
| 289 | { |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 290 | scc_mgr_set(SCC_MGR_DQDQS_OUT_PHASE_OFFSET, write_group, phase); |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 291 | } |
| 292 | |
| 293 | static void scc_mgr_set_dqs_bus_in_delay(uint32_t read_group, uint32_t delay) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 294 | { |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 295 | scc_mgr_set(SCC_MGR_DQS_IN_DELAY_OFFSET, read_group, delay); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 296 | } |
| 297 | |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 298 | static void scc_mgr_set_dqs_en_phase(uint32_t read_group, uint32_t phase) |
| 299 | { |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 300 | scc_mgr_set(SCC_MGR_DQS_EN_PHASE_OFFSET, read_group, phase); |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 301 | } |
| 302 | |
| 303 | static void scc_mgr_set_dqs_en_delay(uint32_t read_group, uint32_t delay) |
| 304 | { |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 305 | scc_mgr_set(SCC_MGR_DQS_EN_DELAY_OFFSET, read_group, delay); |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 306 | } |
| 307 | |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 308 | static void scc_mgr_set_dqs_io_in_delay(uint32_t delay) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 309 | { |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 310 | scc_mgr_set(SCC_MGR_IO_IN_DELAY_OFFSET, RW_MGR_MEM_DQ_PER_WRITE_DQS, |
| 311 | delay); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 312 | } |
| 313 | |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 314 | static void scc_mgr_set_dq_in_delay(uint32_t dq_in_group, uint32_t delay) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 315 | { |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 316 | scc_mgr_set(SCC_MGR_IO_IN_DELAY_OFFSET, dq_in_group, delay); |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 317 | } |
| 318 | |
| 319 | static void scc_mgr_set_dq_out1_delay(uint32_t dq_in_group, uint32_t delay) |
| 320 | { |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 321 | scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, dq_in_group, delay); |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 322 | } |
| 323 | |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 324 | static void scc_mgr_set_dqs_out1_delay(uint32_t delay) |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 325 | { |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 326 | scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, RW_MGR_MEM_DQ_PER_WRITE_DQS, |
| 327 | delay); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 328 | } |
| 329 | |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 330 | static void scc_mgr_set_dm_out1_delay(uint32_t dm, uint32_t delay) |
| 331 | { |
Marek Vasut | 303a3dc | 2015-07-12 22:28:33 +0200 | [diff] [blame] | 332 | scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, |
| 333 | RW_MGR_MEM_DQ_PER_WRITE_DQS + 1 + dm, |
| 334 | delay); |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 335 | } |
| 336 | |
| 337 | /* load up dqs config settings */ |
| 338 | static void scc_mgr_load_dqs(uint32_t dqs) |
| 339 | { |
| 340 | writel(dqs, &sdr_scc_mgr->dqs_ena); |
| 341 | } |
| 342 | |
| 343 | /* load up dqs io config settings */ |
| 344 | static void scc_mgr_load_dqs_io(void) |
| 345 | { |
| 346 | writel(0, &sdr_scc_mgr->dqs_io_ena); |
| 347 | } |
| 348 | |
| 349 | /* load up dq config settings */ |
| 350 | static void scc_mgr_load_dq(uint32_t dq_in_group) |
| 351 | { |
| 352 | writel(dq_in_group, &sdr_scc_mgr->dq_ena); |
| 353 | } |
| 354 | |
| 355 | /* load up dm config settings */ |
| 356 | static void scc_mgr_load_dm(uint32_t dm) |
| 357 | { |
| 358 | writel(dm, &sdr_scc_mgr->dm_ena); |
| 359 | } |
| 360 | |
Marek Vasut | 1d3cde3 | 2015-07-12 23:25:21 +0200 | [diff] [blame] | 361 | /** |
| 362 | * scc_mgr_set_all_ranks() - Set SCC Manager register for all ranks |
| 363 | * @off: Base offset in SCC Manager space |
| 364 | * @grp: Read/Write group |
| 365 | * @val: Value to be set |
| 366 | * @update: If non-zero, trigger SCC Manager update for all ranks |
| 367 | * |
| 368 | * This function sets the SCC Manager (Scan Chain Control Manager) register |
| 369 | * and optionally triggers the SCC update for all ranks. |
| 370 | */ |
| 371 | static void scc_mgr_set_all_ranks(const u32 off, const u32 grp, const u32 val, |
| 372 | const int update) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 373 | { |
Marek Vasut | 1d3cde3 | 2015-07-12 23:25:21 +0200 | [diff] [blame] | 374 | u32 r; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 375 | |
| 376 | for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; |
| 377 | r += NUM_RANKS_PER_SHADOW_REG) { |
Marek Vasut | 1d3cde3 | 2015-07-12 23:25:21 +0200 | [diff] [blame] | 378 | scc_mgr_set(off, grp, val); |
Marek Vasut | 4972282 | 2015-07-12 23:14:33 +0200 | [diff] [blame] | 379 | |
Marek Vasut | 1d3cde3 | 2015-07-12 23:25:21 +0200 | [diff] [blame] | 380 | if (update || (r == 0)) { |
| 381 | writel(grp, &sdr_scc_mgr->dqs_ena); |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 382 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 383 | } |
| 384 | } |
| 385 | } |
| 386 | |
Marek Vasut | 1d3cde3 | 2015-07-12 23:25:21 +0200 | [diff] [blame] | 387 | static void scc_mgr_set_dqs_en_phase_all_ranks(u32 read_group, u32 phase) |
| 388 | { |
| 389 | /* |
| 390 | * USER although the h/w doesn't support different phases per |
| 391 | * shadow register, for simplicity our scc manager modeling |
| 392 | * keeps different phase settings per shadow reg, and it's |
| 393 | * important for us to keep them in sync to match h/w. |
| 394 | * for efficiency, the scan chain update should occur only |
| 395 | * once to sr0. |
| 396 | */ |
| 397 | scc_mgr_set_all_ranks(SCC_MGR_DQS_EN_PHASE_OFFSET, |
| 398 | read_group, phase, 0); |
| 399 | } |
| 400 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 401 | static void scc_mgr_set_dqdqs_output_phase_all_ranks(uint32_t write_group, |
| 402 | uint32_t phase) |
| 403 | { |
Marek Vasut | 1d3cde3 | 2015-07-12 23:25:21 +0200 | [diff] [blame] | 404 | /* |
| 405 | * USER although the h/w doesn't support different phases per |
| 406 | * shadow register, for simplicity our scc manager modeling |
| 407 | * keeps different phase settings per shadow reg, and it's |
| 408 | * important for us to keep them in sync to match h/w. |
| 409 | * for efficiency, the scan chain update should occur only |
| 410 | * once to sr0. |
| 411 | */ |
| 412 | scc_mgr_set_all_ranks(SCC_MGR_DQDQS_OUT_PHASE_OFFSET, |
| 413 | write_group, phase, 0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 414 | } |
| 415 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 416 | static void scc_mgr_set_dqs_en_delay_all_ranks(uint32_t read_group, |
| 417 | uint32_t delay) |
| 418 | { |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 419 | /* |
| 420 | * In shadow register mode, the T11 settings are stored in |
| 421 | * registers in the core, which are updated by the DQS_ENA |
| 422 | * signals. Not issuing the SCC_MGR_UPD command allows us to |
| 423 | * save lots of rank switching overhead, by calling |
| 424 | * select_shadow_regs_for_update with update_scan_chains |
| 425 | * set to 0. |
| 426 | */ |
Marek Vasut | 1d3cde3 | 2015-07-12 23:25:21 +0200 | [diff] [blame] | 427 | scc_mgr_set_all_ranks(SCC_MGR_DQS_EN_DELAY_OFFSET, |
| 428 | read_group, delay, 1); |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 429 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 430 | } |
| 431 | |
Marek Vasut | e62f691 | 2015-07-12 23:39:06 +0200 | [diff] [blame] | 432 | /** |
| 433 | * scc_mgr_set_oct_out1_delay() - Set OCT output delay |
| 434 | * @write_group: Write group |
| 435 | * @delay: Delay value |
| 436 | * |
| 437 | * This function sets the OCT output delay in SCC manager. |
| 438 | */ |
| 439 | static void scc_mgr_set_oct_out1_delay(const u32 write_group, const u32 delay) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 440 | { |
Marek Vasut | e62f691 | 2015-07-12 23:39:06 +0200 | [diff] [blame] | 441 | const int ratio = RW_MGR_MEM_IF_READ_DQS_WIDTH / |
| 442 | RW_MGR_MEM_IF_WRITE_DQS_WIDTH; |
| 443 | const int base = write_group * ratio; |
| 444 | int i; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 445 | /* |
| 446 | * Load the setting in the SCC manager |
| 447 | * Although OCT affects only write data, the OCT delay is controlled |
| 448 | * by the DQS logic block which is instantiated once per read group. |
| 449 | * For protocols where a write group consists of multiple read groups, |
| 450 | * the setting must be set multiple times. |
| 451 | */ |
Marek Vasut | e62f691 | 2015-07-12 23:39:06 +0200 | [diff] [blame] | 452 | for (i = 0; i < ratio; i++) |
| 453 | scc_mgr_set(SCC_MGR_OCT_OUT1_DELAY_OFFSET, base + i, delay); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 454 | } |
| 455 | |
Marek Vasut | 3b8e5b0 | 2015-07-19 01:32:55 +0200 | [diff] [blame] | 456 | /** |
| 457 | * scc_mgr_set_hhp_extras() - Set HHP extras. |
| 458 | * |
| 459 | * Load the fixed setting in the SCC manager HHP extras. |
| 460 | */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 461 | static void scc_mgr_set_hhp_extras(void) |
| 462 | { |
| 463 | /* |
| 464 | * Load the fixed setting in the SCC manager |
Marek Vasut | 3b8e5b0 | 2015-07-19 01:32:55 +0200 | [diff] [blame] | 465 | * bits: 0:0 = 1'b1 - DQS bypass |
| 466 | * bits: 1:1 = 1'b1 - DQ bypass |
| 467 | * bits: 4:2 = 3'b001 - rfifo_mode |
| 468 | * bits: 6:5 = 2'b01 - rfifo clock_select |
| 469 | * bits: 7:7 = 1'b0 - separate gating from ungating setting |
| 470 | * bits: 8:8 = 1'b0 - separate OE from Output delay setting |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 471 | */ |
Marek Vasut | 3b8e5b0 | 2015-07-19 01:32:55 +0200 | [diff] [blame] | 472 | const u32 value = (0 << 8) | (0 << 7) | (1 << 5) | |
| 473 | (1 << 2) | (1 << 1) | (1 << 0); |
| 474 | const u32 addr = SDR_PHYGRP_SCCGRP_ADDRESS | |
| 475 | SCC_MGR_HHP_GLOBALS_OFFSET | |
| 476 | SCC_MGR_HHP_EXTRAS_OFFSET; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 477 | |
Marek Vasut | 3b8e5b0 | 2015-07-19 01:32:55 +0200 | [diff] [blame] | 478 | debug_cond(DLEVEL == 1, "%s:%d Setting HHP Extras\n", |
| 479 | __func__, __LINE__); |
| 480 | writel(value, addr); |
| 481 | debug_cond(DLEVEL == 1, "%s:%d Done Setting HHP Extras\n", |
| 482 | __func__, __LINE__); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 483 | } |
| 484 | |
Marek Vasut | 08bcb98 | 2015-07-20 04:41:53 +0200 | [diff] [blame] | 485 | /** |
| 486 | * scc_mgr_zero_all() - Zero all DQS config |
| 487 | * |
| 488 | * Zero all DQS config. |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 489 | */ |
| 490 | static void scc_mgr_zero_all(void) |
| 491 | { |
Marek Vasut | 08bcb98 | 2015-07-20 04:41:53 +0200 | [diff] [blame] | 492 | int i, r; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 493 | |
| 494 | /* |
| 495 | * USER Zero all DQS config settings, across all groups and all |
| 496 | * shadow registers |
| 497 | */ |
Marek Vasut | 08bcb98 | 2015-07-20 04:41:53 +0200 | [diff] [blame] | 498 | for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; |
| 499 | r += NUM_RANKS_PER_SHADOW_REG) { |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 500 | for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) { |
| 501 | /* |
| 502 | * The phases actually don't exist on a per-rank basis, |
| 503 | * but there's no harm updating them several times, so |
| 504 | * let's keep the code simple. |
| 505 | */ |
| 506 | scc_mgr_set_dqs_bus_in_delay(i, IO_DQS_IN_RESERVE); |
| 507 | scc_mgr_set_dqs_en_phase(i, 0); |
| 508 | scc_mgr_set_dqs_en_delay(i, 0); |
| 509 | } |
| 510 | |
| 511 | for (i = 0; i < RW_MGR_MEM_IF_WRITE_DQS_WIDTH; i++) { |
| 512 | scc_mgr_set_dqdqs_output_phase(i, 0); |
Marek Vasut | 08bcb98 | 2015-07-20 04:41:53 +0200 | [diff] [blame] | 513 | /* Arria V/Cyclone V don't have out2. */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 514 | scc_mgr_set_oct_out1_delay(i, IO_DQS_OUT_RESERVE); |
| 515 | } |
| 516 | } |
| 517 | |
Marek Vasut | 08bcb98 | 2015-07-20 04:41:53 +0200 | [diff] [blame] | 518 | /* Multicast to all DQS group enables. */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 519 | writel(0xff, &sdr_scc_mgr->dqs_ena); |
| 520 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 521 | } |
| 522 | |
Marek Vasut | 0341de4 | 2015-07-17 02:06:20 +0200 | [diff] [blame] | 523 | /** |
| 524 | * scc_set_bypass_mode() - Set bypass mode and trigger SCC update |
| 525 | * @write_group: Write group |
| 526 | * |
| 527 | * Set bypass mode and trigger SCC update. |
| 528 | */ |
| 529 | static void scc_set_bypass_mode(const u32 write_group) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 530 | { |
Marek Vasut | 0341de4 | 2015-07-17 02:06:20 +0200 | [diff] [blame] | 531 | /* Multicast to all DQ enables. */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 532 | writel(0xff, &sdr_scc_mgr->dq_ena); |
| 533 | writel(0xff, &sdr_scc_mgr->dm_ena); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 534 | |
Marek Vasut | 0341de4 | 2015-07-17 02:06:20 +0200 | [diff] [blame] | 535 | /* Update current DQS IO enable. */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 536 | writel(0, &sdr_scc_mgr->dqs_io_ena); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 537 | |
Marek Vasut | 0341de4 | 2015-07-17 02:06:20 +0200 | [diff] [blame] | 538 | /* Update the DQS logic. */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 539 | writel(write_group, &sdr_scc_mgr->dqs_ena); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 540 | |
Marek Vasut | 0341de4 | 2015-07-17 02:06:20 +0200 | [diff] [blame] | 541 | /* Hit update. */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 542 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 543 | } |
| 544 | |
Marek Vasut | 5a4379e | 2015-07-13 00:30:09 +0200 | [diff] [blame] | 545 | /** |
| 546 | * scc_mgr_load_dqs_for_write_group() - Load DQS settings for Write Group |
| 547 | * @write_group: Write group |
| 548 | * |
| 549 | * Load DQS settings for Write Group, do not trigger SCC update. |
| 550 | */ |
| 551 | static void scc_mgr_load_dqs_for_write_group(const u32 write_group) |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 552 | { |
Marek Vasut | 5a4379e | 2015-07-13 00:30:09 +0200 | [diff] [blame] | 553 | const int ratio = RW_MGR_MEM_IF_READ_DQS_WIDTH / |
| 554 | RW_MGR_MEM_IF_WRITE_DQS_WIDTH; |
| 555 | const int base = write_group * ratio; |
| 556 | int i; |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 557 | /* |
Marek Vasut | 5a4379e | 2015-07-13 00:30:09 +0200 | [diff] [blame] | 558 | * Load the setting in the SCC manager |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 559 | * Although OCT affects only write data, the OCT delay is controlled |
| 560 | * by the DQS logic block which is instantiated once per read group. |
| 561 | * For protocols where a write group consists of multiple read groups, |
Marek Vasut | 5a4379e | 2015-07-13 00:30:09 +0200 | [diff] [blame] | 562 | * the setting must be set multiple times. |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 563 | */ |
Marek Vasut | 5a4379e | 2015-07-13 00:30:09 +0200 | [diff] [blame] | 564 | for (i = 0; i < ratio; i++) |
| 565 | writel(base + i, &sdr_scc_mgr->dqs_ena); |
Marek Vasut | 7481b69 | 2015-07-12 22:11:55 +0200 | [diff] [blame] | 566 | } |
| 567 | |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 568 | /** |
| 569 | * scc_mgr_zero_group() - Zero all configs for a group |
| 570 | * |
| 571 | * Zero DQ, DM, DQS and OCT configs for a group. |
| 572 | */ |
| 573 | static void scc_mgr_zero_group(const u32 write_group, const int out_only) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 574 | { |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 575 | int i, r; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 576 | |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 577 | for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; |
| 578 | r += NUM_RANKS_PER_SHADOW_REG) { |
| 579 | /* Zero all DQ config settings. */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 580 | for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) { |
Marek Vasut | cab8079 | 2015-07-12 22:07:33 +0200 | [diff] [blame] | 581 | scc_mgr_set_dq_out1_delay(i, 0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 582 | if (!out_only) |
Marek Vasut | cab8079 | 2015-07-12 22:07:33 +0200 | [diff] [blame] | 583 | scc_mgr_set_dq_in_delay(i, 0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 584 | } |
| 585 | |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 586 | /* Multicast to all DQ enables. */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 587 | writel(0xff, &sdr_scc_mgr->dq_ena); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 588 | |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 589 | /* Zero all DM config settings. */ |
| 590 | for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++) |
Marek Vasut | cab8079 | 2015-07-12 22:07:33 +0200 | [diff] [blame] | 591 | scc_mgr_set_dm_out1_delay(i, 0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 592 | |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 593 | /* Multicast to all DM enables. */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 594 | writel(0xff, &sdr_scc_mgr->dm_ena); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 595 | |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 596 | /* Zero all DQS IO settings. */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 597 | if (!out_only) |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 598 | scc_mgr_set_dqs_io_in_delay(0); |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 599 | |
| 600 | /* Arria V/Cyclone V don't have out2. */ |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 601 | scc_mgr_set_dqs_out1_delay(IO_DQS_OUT_RESERVE); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 602 | scc_mgr_set_oct_out1_delay(write_group, IO_DQS_OUT_RESERVE); |
| 603 | scc_mgr_load_dqs_for_write_group(write_group); |
| 604 | |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 605 | /* Multicast to all DQS IO enables (only 1 in total). */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 606 | writel(0, &sdr_scc_mgr->dqs_io_ena); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 607 | |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 608 | /* Hit update to zero everything. */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 609 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 610 | } |
| 611 | } |
| 612 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 613 | /* |
| 614 | * apply and load a particular input delay for the DQ pins in a group |
| 615 | * group_bgn is the index of the first dq pin (in the write group) |
| 616 | */ |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 617 | static void scc_mgr_apply_group_dq_in_delay(uint32_t group_bgn, uint32_t delay) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 618 | { |
| 619 | uint32_t i, p; |
| 620 | |
| 621 | for (i = 0, p = group_bgn; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++, p++) { |
Marek Vasut | cab8079 | 2015-07-12 22:07:33 +0200 | [diff] [blame] | 622 | scc_mgr_set_dq_in_delay(p, delay); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 623 | scc_mgr_load_dq(p); |
| 624 | } |
| 625 | } |
| 626 | |
Marek Vasut | cd64950 | 2015-07-17 05:42:49 +0200 | [diff] [blame] | 627 | /** |
| 628 | * scc_mgr_apply_group_dq_out1_delay() - Apply and load an output delay for the DQ pins in a group |
| 629 | * @delay: Delay value |
| 630 | * |
| 631 | * Apply and load a particular output delay for the DQ pins in a group. |
| 632 | */ |
| 633 | static void scc_mgr_apply_group_dq_out1_delay(const u32 delay) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 634 | { |
Marek Vasut | cd64950 | 2015-07-17 05:42:49 +0200 | [diff] [blame] | 635 | int i; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 636 | |
Marek Vasut | cd64950 | 2015-07-17 05:42:49 +0200 | [diff] [blame] | 637 | for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) { |
| 638 | scc_mgr_set_dq_out1_delay(i, delay); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 639 | scc_mgr_load_dq(i); |
| 640 | } |
| 641 | } |
| 642 | |
| 643 | /* apply and load a particular output delay for the DM pins in a group */ |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 644 | static void scc_mgr_apply_group_dm_out1_delay(uint32_t delay1) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 645 | { |
| 646 | uint32_t i; |
| 647 | |
| 648 | for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++) { |
Marek Vasut | cab8079 | 2015-07-12 22:07:33 +0200 | [diff] [blame] | 649 | scc_mgr_set_dm_out1_delay(i, delay1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 650 | scc_mgr_load_dm(i); |
| 651 | } |
| 652 | } |
| 653 | |
| 654 | |
| 655 | /* apply and load delay on both DQS and OCT out1 */ |
| 656 | static void scc_mgr_apply_group_dqs_io_and_oct_out1(uint32_t write_group, |
| 657 | uint32_t delay) |
| 658 | { |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 659 | scc_mgr_set_dqs_out1_delay(delay); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 660 | scc_mgr_load_dqs_io(); |
| 661 | |
| 662 | scc_mgr_set_oct_out1_delay(write_group, delay); |
| 663 | scc_mgr_load_dqs_for_write_group(write_group); |
| 664 | } |
| 665 | |
Marek Vasut | 484fb3b | 2015-07-17 05:33:28 +0200 | [diff] [blame] | 666 | /** |
| 667 | * scc_mgr_apply_group_all_out_delay_add() - Apply a delay to the entire output side: DQ, DM, DQS, OCT |
| 668 | * @write_group: Write group |
| 669 | * @delay: Delay value |
| 670 | * |
| 671 | * Apply a delay to the entire output side: DQ, DM, DQS, OCT. |
| 672 | */ |
Marek Vasut | 20bfb9d | 2015-07-17 05:30:14 +0200 | [diff] [blame] | 673 | static void scc_mgr_apply_group_all_out_delay_add(const u32 write_group, |
Marek Vasut | 20bfb9d | 2015-07-17 05:30:14 +0200 | [diff] [blame] | 674 | const u32 delay) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 675 | { |
Marek Vasut | 20bfb9d | 2015-07-17 05:30:14 +0200 | [diff] [blame] | 676 | u32 i, new_delay; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 677 | |
Marek Vasut | 20bfb9d | 2015-07-17 05:30:14 +0200 | [diff] [blame] | 678 | /* DQ shift */ |
| 679 | for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 680 | scc_mgr_load_dq(i); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 681 | |
Marek Vasut | 20bfb9d | 2015-07-17 05:30:14 +0200 | [diff] [blame] | 682 | /* DM shift */ |
| 683 | for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 684 | scc_mgr_load_dm(i); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 685 | |
Marek Vasut | 484fb3b | 2015-07-17 05:33:28 +0200 | [diff] [blame] | 686 | /* DQS shift */ |
| 687 | new_delay = READ_SCC_DQS_IO_OUT2_DELAY + delay; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 688 | if (new_delay > IO_IO_OUT2_DELAY_MAX) { |
Marek Vasut | 484fb3b | 2015-07-17 05:33:28 +0200 | [diff] [blame] | 689 | debug_cond(DLEVEL == 1, |
| 690 | "%s:%d (%u, %u) DQS: %u > %d; adding %u to OUT1\n", |
| 691 | __func__, __LINE__, write_group, delay, new_delay, |
| 692 | IO_IO_OUT2_DELAY_MAX, |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 693 | new_delay - IO_IO_OUT2_DELAY_MAX); |
Marek Vasut | 484fb3b | 2015-07-17 05:33:28 +0200 | [diff] [blame] | 694 | new_delay -= IO_IO_OUT2_DELAY_MAX; |
| 695 | scc_mgr_set_dqs_out1_delay(new_delay); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 696 | } |
| 697 | |
| 698 | scc_mgr_load_dqs_io(); |
| 699 | |
Marek Vasut | 484fb3b | 2015-07-17 05:33:28 +0200 | [diff] [blame] | 700 | /* OCT shift */ |
| 701 | new_delay = READ_SCC_OCT_OUT2_DELAY + delay; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 702 | if (new_delay > IO_IO_OUT2_DELAY_MAX) { |
Marek Vasut | 484fb3b | 2015-07-17 05:33:28 +0200 | [diff] [blame] | 703 | debug_cond(DLEVEL == 1, |
| 704 | "%s:%d (%u, %u) DQS: %u > %d; adding %u to OUT1\n", |
| 705 | __func__, __LINE__, write_group, delay, |
| 706 | new_delay, IO_IO_OUT2_DELAY_MAX, |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 707 | new_delay - IO_IO_OUT2_DELAY_MAX); |
Marek Vasut | 484fb3b | 2015-07-17 05:33:28 +0200 | [diff] [blame] | 708 | new_delay -= IO_IO_OUT2_DELAY_MAX; |
| 709 | scc_mgr_set_oct_out1_delay(write_group, new_delay); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 710 | } |
| 711 | |
| 712 | scc_mgr_load_dqs_for_write_group(write_group); |
| 713 | } |
| 714 | |
Marek Vasut | 788870f | 2015-07-19 02:18:21 +0200 | [diff] [blame] | 715 | /** |
| 716 | * scc_mgr_apply_group_all_out_delay_add() - Apply a delay to the entire output side to all ranks |
| 717 | * @write_group: Write group |
| 718 | * @delay: Delay value |
| 719 | * |
| 720 | * Apply a delay to the entire output side (DQ, DM, DQS, OCT) to all ranks. |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 721 | */ |
Marek Vasut | 788870f | 2015-07-19 02:18:21 +0200 | [diff] [blame] | 722 | static void |
| 723 | scc_mgr_apply_group_all_out_delay_add_all_ranks(const u32 write_group, |
| 724 | const u32 delay) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 725 | { |
Marek Vasut | 788870f | 2015-07-19 02:18:21 +0200 | [diff] [blame] | 726 | int r; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 727 | |
| 728 | for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; |
Marek Vasut | 788870f | 2015-07-19 02:18:21 +0200 | [diff] [blame] | 729 | r += NUM_RANKS_PER_SHADOW_REG) { |
Marek Vasut | 484fb3b | 2015-07-17 05:33:28 +0200 | [diff] [blame] | 730 | scc_mgr_apply_group_all_out_delay_add(write_group, delay); |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 731 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 732 | } |
| 733 | } |
| 734 | |
Marek Vasut | 42e7860 | 2015-07-26 11:07:19 +0200 | [diff] [blame^] | 735 | /** |
| 736 | * set_jump_as_return() - Return instruction optimization |
| 737 | * |
| 738 | * Optimization used to recover some slots in ddr3 inst_rom could be |
| 739 | * applied to other protocols if we wanted to |
| 740 | */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 741 | static void set_jump_as_return(void) |
| 742 | { |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 743 | /* |
Marek Vasut | 42e7860 | 2015-07-26 11:07:19 +0200 | [diff] [blame^] | 744 | * To save space, we replace return with jump to special shared |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 745 | * RETURN instruction so we set the counter to large value so that |
Marek Vasut | 42e7860 | 2015-07-26 11:07:19 +0200 | [diff] [blame^] | 746 | * we always jump. |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 747 | */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 748 | writel(0xff, &sdr_rw_load_mgr_regs->load_cntr0); |
| 749 | writel(RW_MGR_RETURN, &sdr_rw_load_jump_mgr_regs->load_jump_add0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 750 | } |
| 751 | |
| 752 | /* |
| 753 | * should always use constants as argument to ensure all computations are |
| 754 | * performed at compile time |
| 755 | */ |
| 756 | static void delay_for_n_mem_clocks(const uint32_t clocks) |
| 757 | { |
| 758 | uint32_t afi_clocks; |
| 759 | uint8_t inner = 0; |
| 760 | uint8_t outer = 0; |
| 761 | uint16_t c_loop = 0; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 762 | |
| 763 | debug("%s:%d: clocks=%u ... start\n", __func__, __LINE__, clocks); |
| 764 | |
| 765 | |
| 766 | afi_clocks = (clocks + AFI_RATE_RATIO-1) / AFI_RATE_RATIO; |
| 767 | /* scale (rounding up) to get afi clocks */ |
| 768 | |
| 769 | /* |
| 770 | * Note, we don't bother accounting for being off a little bit |
| 771 | * because of a few extra instructions in outer loops |
| 772 | * Note, the loops have a test at the end, and do the test before |
| 773 | * the decrement, and so always perform the loop |
| 774 | * 1 time more than the counter value |
| 775 | */ |
| 776 | if (afi_clocks == 0) { |
| 777 | ; |
| 778 | } else if (afi_clocks <= 0x100) { |
| 779 | inner = afi_clocks-1; |
| 780 | outer = 0; |
| 781 | c_loop = 0; |
| 782 | } else if (afi_clocks <= 0x10000) { |
| 783 | inner = 0xff; |
| 784 | outer = (afi_clocks-1) >> 8; |
| 785 | c_loop = 0; |
| 786 | } else { |
| 787 | inner = 0xff; |
| 788 | outer = 0xff; |
| 789 | c_loop = (afi_clocks-1) >> 16; |
| 790 | } |
| 791 | |
| 792 | /* |
| 793 | * rom instructions are structured as follows: |
| 794 | * |
| 795 | * IDLE_LOOP2: jnz cntr0, TARGET_A |
| 796 | * IDLE_LOOP1: jnz cntr1, TARGET_B |
| 797 | * return |
| 798 | * |
| 799 | * so, when doing nested loops, TARGET_A is set to IDLE_LOOP2, and |
| 800 | * TARGET_B is set to IDLE_LOOP2 as well |
| 801 | * |
| 802 | * if we have no outer loop, though, then we can use IDLE_LOOP1 only, |
| 803 | * and set TARGET_B to IDLE_LOOP1 and we skip IDLE_LOOP2 entirely |
| 804 | * |
| 805 | * a little confusing, but it helps save precious space in the inst_rom |
| 806 | * and sequencer rom and keeps the delays more accurate and reduces |
| 807 | * overhead |
| 808 | */ |
| 809 | if (afi_clocks <= 0x100) { |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 810 | writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(inner), |
| 811 | &sdr_rw_load_mgr_regs->load_cntr1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 812 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 813 | writel(RW_MGR_IDLE_LOOP1, |
| 814 | &sdr_rw_load_jump_mgr_regs->load_jump_add1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 815 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 816 | writel(RW_MGR_IDLE_LOOP1, SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 817 | RW_MGR_RUN_SINGLE_GROUP_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 818 | } else { |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 819 | writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(inner), |
| 820 | &sdr_rw_load_mgr_regs->load_cntr0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 821 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 822 | writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(outer), |
| 823 | &sdr_rw_load_mgr_regs->load_cntr1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 824 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 825 | writel(RW_MGR_IDLE_LOOP2, |
| 826 | &sdr_rw_load_jump_mgr_regs->load_jump_add0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 827 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 828 | writel(RW_MGR_IDLE_LOOP2, |
| 829 | &sdr_rw_load_jump_mgr_regs->load_jump_add1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 830 | |
| 831 | /* hack to get around compiler not being smart enough */ |
| 832 | if (afi_clocks <= 0x10000) { |
| 833 | /* only need to run once */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 834 | writel(RW_MGR_IDLE_LOOP2, SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 835 | RW_MGR_RUN_SINGLE_GROUP_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 836 | } else { |
| 837 | do { |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 838 | writel(RW_MGR_IDLE_LOOP2, |
| 839 | SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 840 | RW_MGR_RUN_SINGLE_GROUP_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 841 | } while (c_loop-- != 0); |
| 842 | } |
| 843 | } |
| 844 | debug("%s:%d clocks=%u ... end\n", __func__, __LINE__, clocks); |
| 845 | } |
| 846 | |
Marek Vasut | 8bf9227 | 2015-07-13 00:44:30 +0200 | [diff] [blame] | 847 | /** |
| 848 | * rw_mgr_mem_init_load_regs() - Load instruction registers |
| 849 | * @cntr0: Counter 0 value |
| 850 | * @cntr1: Counter 1 value |
| 851 | * @cntr2: Counter 2 value |
| 852 | * @jump: Jump instruction value |
| 853 | * |
| 854 | * Load instruction registers. |
| 855 | */ |
| 856 | static void rw_mgr_mem_init_load_regs(u32 cntr0, u32 cntr1, u32 cntr2, u32 jump) |
| 857 | { |
| 858 | uint32_t grpaddr = SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 859 | RW_MGR_RUN_SINGLE_GROUP_OFFSET; |
| 860 | |
| 861 | /* Load counters */ |
| 862 | writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr0), |
| 863 | &sdr_rw_load_mgr_regs->load_cntr0); |
| 864 | writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr1), |
| 865 | &sdr_rw_load_mgr_regs->load_cntr1); |
| 866 | writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr2), |
| 867 | &sdr_rw_load_mgr_regs->load_cntr2); |
| 868 | |
| 869 | /* Load jump address */ |
| 870 | writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add0); |
| 871 | writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add1); |
| 872 | writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add2); |
| 873 | |
| 874 | /* Execute count instruction */ |
| 875 | writel(jump, grpaddr); |
| 876 | } |
| 877 | |
Marek Vasut | c577ab5 | 2015-07-13 00:51:05 +0200 | [diff] [blame] | 878 | /** |
| 879 | * rw_mgr_mem_load_user() - Load user calibration values |
| 880 | * @fin1: Final instruction 1 |
| 881 | * @fin2: Final instruction 2 |
| 882 | * @precharge: If 1, precharge the banks at the end |
| 883 | * |
| 884 | * Load user calibration values and optionally precharge the banks. |
| 885 | */ |
| 886 | static void rw_mgr_mem_load_user(const u32 fin1, const u32 fin2, |
| 887 | const int precharge) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 888 | { |
Marek Vasut | c577ab5 | 2015-07-13 00:51:05 +0200 | [diff] [blame] | 889 | u32 grpaddr = SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 890 | RW_MGR_RUN_SINGLE_GROUP_OFFSET; |
| 891 | u32 r; |
| 892 | |
| 893 | for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; r++) { |
| 894 | if (param->skip_ranks[r]) { |
| 895 | /* request to skip the rank */ |
| 896 | continue; |
| 897 | } |
| 898 | |
| 899 | /* set rank */ |
| 900 | set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_OFF); |
| 901 | |
| 902 | /* precharge all banks ... */ |
| 903 | if (precharge) |
| 904 | writel(RW_MGR_PRECHARGE_ALL, grpaddr); |
| 905 | |
| 906 | /* |
| 907 | * USER Use Mirror-ed commands for odd ranks if address |
| 908 | * mirrorring is on |
| 909 | */ |
| 910 | if ((RW_MGR_MEM_ADDRESS_MIRRORING >> r) & 0x1) { |
| 911 | set_jump_as_return(); |
| 912 | writel(RW_MGR_MRS2_MIRR, grpaddr); |
| 913 | delay_for_n_mem_clocks(4); |
| 914 | set_jump_as_return(); |
| 915 | writel(RW_MGR_MRS3_MIRR, grpaddr); |
| 916 | delay_for_n_mem_clocks(4); |
| 917 | set_jump_as_return(); |
| 918 | writel(RW_MGR_MRS1_MIRR, grpaddr); |
| 919 | delay_for_n_mem_clocks(4); |
| 920 | set_jump_as_return(); |
| 921 | writel(fin1, grpaddr); |
| 922 | } else { |
| 923 | set_jump_as_return(); |
| 924 | writel(RW_MGR_MRS2, grpaddr); |
| 925 | delay_for_n_mem_clocks(4); |
| 926 | set_jump_as_return(); |
| 927 | writel(RW_MGR_MRS3, grpaddr); |
| 928 | delay_for_n_mem_clocks(4); |
| 929 | set_jump_as_return(); |
| 930 | writel(RW_MGR_MRS1, grpaddr); |
| 931 | set_jump_as_return(); |
| 932 | writel(fin2, grpaddr); |
| 933 | } |
| 934 | |
| 935 | if (precharge) |
| 936 | continue; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 937 | |
Marek Vasut | c577ab5 | 2015-07-13 00:51:05 +0200 | [diff] [blame] | 938 | set_jump_as_return(); |
| 939 | writel(RW_MGR_ZQCL, grpaddr); |
| 940 | |
| 941 | /* tZQinit = tDLLK = 512 ck cycles */ |
| 942 | delay_for_n_mem_clocks(512); |
| 943 | } |
| 944 | } |
| 945 | |
| 946 | static void rw_mgr_mem_initialize(void) |
| 947 | { |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 948 | debug("%s:%d\n", __func__, __LINE__); |
| 949 | |
| 950 | /* The reset / cke part of initialization is broadcasted to all ranks */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 951 | writel(RW_MGR_RANK_ALL, SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 952 | RW_MGR_SET_CS_AND_ODT_MASK_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 953 | |
| 954 | /* |
| 955 | * Here's how you load register for a loop |
| 956 | * Counters are located @ 0x800 |
| 957 | * Jump address are located @ 0xC00 |
| 958 | * For both, registers 0 to 3 are selected using bits 3 and 2, like |
| 959 | * in 0x800, 0x804, 0x808, 0x80C and 0xC00, 0xC04, 0xC08, 0xC0C |
| 960 | * I know this ain't pretty, but Avalon bus throws away the 2 least |
| 961 | * significant bits |
| 962 | */ |
| 963 | |
| 964 | /* start with memory RESET activated */ |
| 965 | |
| 966 | /* tINIT = 200us */ |
| 967 | |
| 968 | /* |
| 969 | * 200us @ 266MHz (3.75 ns) ~ 54000 clock cycles |
| 970 | * If a and b are the number of iteration in 2 nested loops |
| 971 | * it takes the following number of cycles to complete the operation: |
| 972 | * number_of_cycles = ((2 + n) * a + 2) * b |
| 973 | * where n is the number of instruction in the inner loop |
| 974 | * One possible solution is n = 0 , a = 256 , b = 106 => a = FF, |
| 975 | * b = 6A |
| 976 | */ |
Marek Vasut | 8bf9227 | 2015-07-13 00:44:30 +0200 | [diff] [blame] | 977 | rw_mgr_mem_init_load_regs(SEQ_TINIT_CNTR0_VAL, SEQ_TINIT_CNTR1_VAL, |
| 978 | SEQ_TINIT_CNTR2_VAL, |
| 979 | RW_MGR_INIT_RESET_0_CKE_0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 980 | |
| 981 | /* indicate that memory is stable */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 982 | writel(1, &phy_mgr_cfg->reset_mem_stbl); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 983 | |
| 984 | /* |
| 985 | * transition the RESET to high |
| 986 | * Wait for 500us |
| 987 | */ |
| 988 | |
| 989 | /* |
| 990 | * 500us @ 266MHz (3.75 ns) ~ 134000 clock cycles |
| 991 | * If a and b are the number of iteration in 2 nested loops |
| 992 | * it takes the following number of cycles to complete the operation |
| 993 | * number_of_cycles = ((2 + n) * a + 2) * b |
| 994 | * where n is the number of instruction in the inner loop |
| 995 | * One possible solution is n = 2 , a = 131 , b = 256 => a = 83, |
| 996 | * b = FF |
| 997 | */ |
Marek Vasut | 8bf9227 | 2015-07-13 00:44:30 +0200 | [diff] [blame] | 998 | rw_mgr_mem_init_load_regs(SEQ_TRESET_CNTR0_VAL, SEQ_TRESET_CNTR1_VAL, |
| 999 | SEQ_TRESET_CNTR2_VAL, |
| 1000 | RW_MGR_INIT_RESET_1_CKE_0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1001 | |
| 1002 | /* bring up clock enable */ |
| 1003 | |
| 1004 | /* tXRP < 250 ck cycles */ |
| 1005 | delay_for_n_mem_clocks(250); |
| 1006 | |
Marek Vasut | c577ab5 | 2015-07-13 00:51:05 +0200 | [diff] [blame] | 1007 | rw_mgr_mem_load_user(RW_MGR_MRS0_DLL_RESET_MIRR, RW_MGR_MRS0_DLL_RESET, |
| 1008 | 0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1009 | } |
| 1010 | |
| 1011 | /* |
| 1012 | * At the end of calibration we have to program the user settings in, and |
| 1013 | * USER hand off the memory to the user. |
| 1014 | */ |
| 1015 | static void rw_mgr_mem_handoff(void) |
| 1016 | { |
Marek Vasut | c577ab5 | 2015-07-13 00:51:05 +0200 | [diff] [blame] | 1017 | rw_mgr_mem_load_user(RW_MGR_MRS0_USER_MIRR, RW_MGR_MRS0_USER, 1); |
| 1018 | /* |
| 1019 | * USER need to wait tMOD (12CK or 15ns) time before issuing |
| 1020 | * other commands, but we will have plenty of NIOS cycles before |
| 1021 | * actual handoff so its okay. |
| 1022 | */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1023 | } |
| 1024 | |
| 1025 | /* |
| 1026 | * performs a guaranteed read on the patterns we are going to use during a |
| 1027 | * read test to ensure memory works |
| 1028 | */ |
| 1029 | static uint32_t rw_mgr_mem_calibrate_read_test_patterns(uint32_t rank_bgn, |
| 1030 | uint32_t group, uint32_t num_tries, uint32_t *bit_chk, |
| 1031 | uint32_t all_ranks) |
| 1032 | { |
| 1033 | uint32_t r, vg; |
| 1034 | uint32_t correct_mask_vg; |
| 1035 | uint32_t tmp_bit_chk; |
| 1036 | uint32_t rank_end = all_ranks ? RW_MGR_MEM_NUMBER_OF_RANKS : |
| 1037 | (rank_bgn + NUM_RANKS_PER_SHADOW_REG); |
| 1038 | uint32_t addr; |
| 1039 | uint32_t base_rw_mgr; |
| 1040 | |
| 1041 | *bit_chk = param->read_correct_mask; |
| 1042 | correct_mask_vg = param->read_correct_mask_vg; |
| 1043 | |
| 1044 | for (r = rank_bgn; r < rank_end; r++) { |
| 1045 | if (param->skip_ranks[r]) |
| 1046 | /* request to skip the rank */ |
| 1047 | continue; |
| 1048 | |
| 1049 | /* set rank */ |
| 1050 | set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE); |
| 1051 | |
| 1052 | /* Load up a constant bursts of read commands */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1053 | writel(0x20, &sdr_rw_load_mgr_regs->load_cntr0); |
| 1054 | writel(RW_MGR_GUARANTEED_READ, |
| 1055 | &sdr_rw_load_jump_mgr_regs->load_jump_add0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1056 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1057 | writel(0x20, &sdr_rw_load_mgr_regs->load_cntr1); |
| 1058 | writel(RW_MGR_GUARANTEED_READ_CONT, |
| 1059 | &sdr_rw_load_jump_mgr_regs->load_jump_add1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1060 | |
| 1061 | tmp_bit_chk = 0; |
| 1062 | for (vg = RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS-1; ; vg--) { |
| 1063 | /* reset the fifos to get pointers to known state */ |
| 1064 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1065 | writel(0, &phy_mgr_cmd->fifo_reset); |
| 1066 | writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 1067 | RW_MGR_RESET_READ_DATAPATH_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1068 | |
| 1069 | tmp_bit_chk = tmp_bit_chk << (RW_MGR_MEM_DQ_PER_READ_DQS |
| 1070 | / RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS); |
| 1071 | |
Marek Vasut | a334010 | 2015-07-12 19:03:33 +0200 | [diff] [blame] | 1072 | addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET; |
Marek Vasut | 33acf0f | 2015-07-12 20:05:54 +0200 | [diff] [blame] | 1073 | writel(RW_MGR_GUARANTEED_READ, addr + |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1074 | ((group * RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS + |
| 1075 | vg) << 2)); |
| 1076 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1077 | base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1078 | tmp_bit_chk = tmp_bit_chk | (correct_mask_vg & (~base_rw_mgr)); |
| 1079 | |
| 1080 | if (vg == 0) |
| 1081 | break; |
| 1082 | } |
| 1083 | *bit_chk &= tmp_bit_chk; |
| 1084 | } |
| 1085 | |
Marek Vasut | a334010 | 2015-07-12 19:03:33 +0200 | [diff] [blame] | 1086 | addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET; |
Marek Vasut | 33acf0f | 2015-07-12 20:05:54 +0200 | [diff] [blame] | 1087 | writel(RW_MGR_CLEAR_DQS_ENABLE, addr + (group << 2)); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1088 | |
| 1089 | set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); |
| 1090 | debug_cond(DLEVEL == 1, "%s:%d test_load_patterns(%u,ALL) => (%u == %u) =>\ |
| 1091 | %lu\n", __func__, __LINE__, group, *bit_chk, param->read_correct_mask, |
| 1092 | (long unsigned int)(*bit_chk == param->read_correct_mask)); |
| 1093 | return *bit_chk == param->read_correct_mask; |
| 1094 | } |
| 1095 | |
| 1096 | static uint32_t rw_mgr_mem_calibrate_read_test_patterns_all_ranks |
| 1097 | (uint32_t group, uint32_t num_tries, uint32_t *bit_chk) |
| 1098 | { |
| 1099 | return rw_mgr_mem_calibrate_read_test_patterns(0, group, |
| 1100 | num_tries, bit_chk, 1); |
| 1101 | } |
| 1102 | |
| 1103 | /* load up the patterns we are going to use during a read test */ |
| 1104 | static void rw_mgr_mem_calibrate_read_load_patterns(uint32_t rank_bgn, |
| 1105 | uint32_t all_ranks) |
| 1106 | { |
| 1107 | uint32_t r; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1108 | uint32_t rank_end = all_ranks ? RW_MGR_MEM_NUMBER_OF_RANKS : |
| 1109 | (rank_bgn + NUM_RANKS_PER_SHADOW_REG); |
| 1110 | |
| 1111 | debug("%s:%d\n", __func__, __LINE__); |
| 1112 | for (r = rank_bgn; r < rank_end; r++) { |
| 1113 | if (param->skip_ranks[r]) |
| 1114 | /* request to skip the rank */ |
| 1115 | continue; |
| 1116 | |
| 1117 | /* set rank */ |
| 1118 | set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE); |
| 1119 | |
| 1120 | /* Load up a constant bursts */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1121 | writel(0x20, &sdr_rw_load_mgr_regs->load_cntr0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1122 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1123 | writel(RW_MGR_GUARANTEED_WRITE_WAIT0, |
| 1124 | &sdr_rw_load_jump_mgr_regs->load_jump_add0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1125 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1126 | writel(0x20, &sdr_rw_load_mgr_regs->load_cntr1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1127 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1128 | writel(RW_MGR_GUARANTEED_WRITE_WAIT1, |
| 1129 | &sdr_rw_load_jump_mgr_regs->load_jump_add1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1130 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1131 | writel(0x04, &sdr_rw_load_mgr_regs->load_cntr2); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1132 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1133 | writel(RW_MGR_GUARANTEED_WRITE_WAIT2, |
| 1134 | &sdr_rw_load_jump_mgr_regs->load_jump_add2); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1135 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1136 | writel(0x04, &sdr_rw_load_mgr_regs->load_cntr3); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1137 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1138 | writel(RW_MGR_GUARANTEED_WRITE_WAIT3, |
| 1139 | &sdr_rw_load_jump_mgr_regs->load_jump_add3); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1140 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1141 | writel(RW_MGR_GUARANTEED_WRITE, SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 1142 | RW_MGR_RUN_SINGLE_GROUP_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1143 | } |
| 1144 | |
| 1145 | set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); |
| 1146 | } |
| 1147 | |
| 1148 | /* |
| 1149 | * try a read and see if it returns correct data back. has dummy reads |
| 1150 | * inserted into the mix used to align dqs enable. has more thorough checks |
| 1151 | * than the regular read test. |
| 1152 | */ |
| 1153 | static uint32_t rw_mgr_mem_calibrate_read_test(uint32_t rank_bgn, uint32_t group, |
| 1154 | uint32_t num_tries, uint32_t all_correct, uint32_t *bit_chk, |
| 1155 | uint32_t all_groups, uint32_t all_ranks) |
| 1156 | { |
| 1157 | uint32_t r, vg; |
| 1158 | uint32_t correct_mask_vg; |
| 1159 | uint32_t tmp_bit_chk; |
| 1160 | uint32_t rank_end = all_ranks ? RW_MGR_MEM_NUMBER_OF_RANKS : |
| 1161 | (rank_bgn + NUM_RANKS_PER_SHADOW_REG); |
| 1162 | uint32_t addr; |
| 1163 | uint32_t base_rw_mgr; |
| 1164 | |
| 1165 | *bit_chk = param->read_correct_mask; |
| 1166 | correct_mask_vg = param->read_correct_mask_vg; |
| 1167 | |
| 1168 | uint32_t quick_read_mode = (((STATIC_CALIB_STEPS) & |
| 1169 | CALIB_SKIP_DELAY_SWEEPS) && ENABLE_SUPER_QUICK_CALIBRATION); |
| 1170 | |
| 1171 | for (r = rank_bgn; r < rank_end; r++) { |
| 1172 | if (param->skip_ranks[r]) |
| 1173 | /* request to skip the rank */ |
| 1174 | continue; |
| 1175 | |
| 1176 | /* set rank */ |
| 1177 | set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE); |
| 1178 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1179 | writel(0x10, &sdr_rw_load_mgr_regs->load_cntr1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1180 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1181 | writel(RW_MGR_READ_B2B_WAIT1, |
| 1182 | &sdr_rw_load_jump_mgr_regs->load_jump_add1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1183 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1184 | writel(0x10, &sdr_rw_load_mgr_regs->load_cntr2); |
| 1185 | writel(RW_MGR_READ_B2B_WAIT2, |
| 1186 | &sdr_rw_load_jump_mgr_regs->load_jump_add2); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1187 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1188 | if (quick_read_mode) |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1189 | writel(0x1, &sdr_rw_load_mgr_regs->load_cntr0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1190 | /* need at least two (1+1) reads to capture failures */ |
| 1191 | else if (all_groups) |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1192 | writel(0x06, &sdr_rw_load_mgr_regs->load_cntr0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1193 | else |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1194 | writel(0x32, &sdr_rw_load_mgr_regs->load_cntr0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1195 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1196 | writel(RW_MGR_READ_B2B, |
| 1197 | &sdr_rw_load_jump_mgr_regs->load_jump_add0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1198 | if (all_groups) |
| 1199 | writel(RW_MGR_MEM_IF_READ_DQS_WIDTH * |
| 1200 | RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS - 1, |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1201 | &sdr_rw_load_mgr_regs->load_cntr3); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1202 | else |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1203 | writel(0x0, &sdr_rw_load_mgr_regs->load_cntr3); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1204 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1205 | writel(RW_MGR_READ_B2B, |
| 1206 | &sdr_rw_load_jump_mgr_regs->load_jump_add3); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1207 | |
| 1208 | tmp_bit_chk = 0; |
| 1209 | for (vg = RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS-1; ; vg--) { |
| 1210 | /* reset the fifos to get pointers to known state */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1211 | writel(0, &phy_mgr_cmd->fifo_reset); |
| 1212 | writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 1213 | RW_MGR_RESET_READ_DATAPATH_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1214 | |
| 1215 | tmp_bit_chk = tmp_bit_chk << (RW_MGR_MEM_DQ_PER_READ_DQS |
| 1216 | / RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS); |
| 1217 | |
Marek Vasut | a334010 | 2015-07-12 19:03:33 +0200 | [diff] [blame] | 1218 | if (all_groups) |
| 1219 | addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_ALL_GROUPS_OFFSET; |
| 1220 | else |
| 1221 | addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET; |
| 1222 | |
Marek Vasut | 33acf0f | 2015-07-12 20:05:54 +0200 | [diff] [blame] | 1223 | writel(RW_MGR_READ_B2B, addr + |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1224 | ((group * RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS + |
| 1225 | vg) << 2)); |
| 1226 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1227 | base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1228 | tmp_bit_chk = tmp_bit_chk | (correct_mask_vg & ~(base_rw_mgr)); |
| 1229 | |
| 1230 | if (vg == 0) |
| 1231 | break; |
| 1232 | } |
| 1233 | *bit_chk &= tmp_bit_chk; |
| 1234 | } |
| 1235 | |
Marek Vasut | a334010 | 2015-07-12 19:03:33 +0200 | [diff] [blame] | 1236 | addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET; |
Marek Vasut | 33acf0f | 2015-07-12 20:05:54 +0200 | [diff] [blame] | 1237 | writel(RW_MGR_CLEAR_DQS_ENABLE, addr + (group << 2)); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1238 | |
| 1239 | if (all_correct) { |
| 1240 | set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); |
| 1241 | debug_cond(DLEVEL == 2, "%s:%d read_test(%u,ALL,%u) =>\ |
| 1242 | (%u == %u) => %lu", __func__, __LINE__, group, |
| 1243 | all_groups, *bit_chk, param->read_correct_mask, |
| 1244 | (long unsigned int)(*bit_chk == |
| 1245 | param->read_correct_mask)); |
| 1246 | return *bit_chk == param->read_correct_mask; |
| 1247 | } else { |
| 1248 | set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); |
| 1249 | debug_cond(DLEVEL == 2, "%s:%d read_test(%u,ONE,%u) =>\ |
| 1250 | (%u != %lu) => %lu\n", __func__, __LINE__, |
| 1251 | group, all_groups, *bit_chk, (long unsigned int)0, |
| 1252 | (long unsigned int)(*bit_chk != 0x00)); |
| 1253 | return *bit_chk != 0x00; |
| 1254 | } |
| 1255 | } |
| 1256 | |
| 1257 | static uint32_t rw_mgr_mem_calibrate_read_test_all_ranks(uint32_t group, |
| 1258 | uint32_t num_tries, uint32_t all_correct, uint32_t *bit_chk, |
| 1259 | uint32_t all_groups) |
| 1260 | { |
| 1261 | return rw_mgr_mem_calibrate_read_test(0, group, num_tries, all_correct, |
| 1262 | bit_chk, all_groups, 1); |
| 1263 | } |
| 1264 | |
| 1265 | static void rw_mgr_incr_vfifo(uint32_t grp, uint32_t *v) |
| 1266 | { |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1267 | writel(grp, &phy_mgr_cmd->inc_vfifo_hard_phy); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1268 | (*v)++; |
| 1269 | } |
| 1270 | |
| 1271 | static void rw_mgr_decr_vfifo(uint32_t grp, uint32_t *v) |
| 1272 | { |
| 1273 | uint32_t i; |
| 1274 | |
| 1275 | for (i = 0; i < VFIFO_SIZE-1; i++) |
| 1276 | rw_mgr_incr_vfifo(grp, v); |
| 1277 | } |
| 1278 | |
| 1279 | static int find_vfifo_read(uint32_t grp, uint32_t *bit_chk) |
| 1280 | { |
| 1281 | uint32_t v; |
| 1282 | uint32_t fail_cnt = 0; |
| 1283 | uint32_t test_status; |
| 1284 | |
| 1285 | for (v = 0; v < VFIFO_SIZE; ) { |
| 1286 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: vfifo %u\n", |
| 1287 | __func__, __LINE__, v); |
| 1288 | test_status = rw_mgr_mem_calibrate_read_test_all_ranks |
| 1289 | (grp, 1, PASS_ONE_BIT, bit_chk, 0); |
| 1290 | if (!test_status) { |
| 1291 | fail_cnt++; |
| 1292 | |
| 1293 | if (fail_cnt == 2) |
| 1294 | break; |
| 1295 | } |
| 1296 | |
| 1297 | /* fiddle with FIFO */ |
| 1298 | rw_mgr_incr_vfifo(grp, &v); |
| 1299 | } |
| 1300 | |
| 1301 | if (v >= VFIFO_SIZE) { |
| 1302 | /* no failing read found!! Something must have gone wrong */ |
| 1303 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: vfifo failed\n", |
| 1304 | __func__, __LINE__); |
| 1305 | return 0; |
| 1306 | } else { |
| 1307 | return v; |
| 1308 | } |
| 1309 | } |
| 1310 | |
| 1311 | static int find_working_phase(uint32_t *grp, uint32_t *bit_chk, |
| 1312 | uint32_t dtaps_per_ptap, uint32_t *work_bgn, |
| 1313 | uint32_t *v, uint32_t *d, uint32_t *p, |
| 1314 | uint32_t *i, uint32_t *max_working_cnt) |
| 1315 | { |
| 1316 | uint32_t found_begin = 0; |
| 1317 | uint32_t tmp_delay = 0; |
| 1318 | uint32_t test_status; |
| 1319 | |
| 1320 | for (*d = 0; *d <= dtaps_per_ptap; (*d)++, tmp_delay += |
| 1321 | IO_DELAY_PER_DQS_EN_DCHAIN_TAP) { |
| 1322 | *work_bgn = tmp_delay; |
| 1323 | scc_mgr_set_dqs_en_delay_all_ranks(*grp, *d); |
| 1324 | |
| 1325 | for (*i = 0; *i < VFIFO_SIZE; (*i)++) { |
| 1326 | for (*p = 0; *p <= IO_DQS_EN_PHASE_MAX; (*p)++, *work_bgn += |
| 1327 | IO_DELAY_PER_OPA_TAP) { |
| 1328 | scc_mgr_set_dqs_en_phase_all_ranks(*grp, *p); |
| 1329 | |
| 1330 | test_status = |
| 1331 | rw_mgr_mem_calibrate_read_test_all_ranks |
| 1332 | (*grp, 1, PASS_ONE_BIT, bit_chk, 0); |
| 1333 | |
| 1334 | if (test_status) { |
| 1335 | *max_working_cnt = 1; |
| 1336 | found_begin = 1; |
| 1337 | break; |
| 1338 | } |
| 1339 | } |
| 1340 | |
| 1341 | if (found_begin) |
| 1342 | break; |
| 1343 | |
| 1344 | if (*p > IO_DQS_EN_PHASE_MAX) |
| 1345 | /* fiddle with FIFO */ |
| 1346 | rw_mgr_incr_vfifo(*grp, v); |
| 1347 | } |
| 1348 | |
| 1349 | if (found_begin) |
| 1350 | break; |
| 1351 | } |
| 1352 | |
| 1353 | if (*i >= VFIFO_SIZE) { |
| 1354 | /* cannot find working solution */ |
| 1355 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: no vfifo/\ |
| 1356 | ptap/dtap\n", __func__, __LINE__); |
| 1357 | return 0; |
| 1358 | } else { |
| 1359 | return 1; |
| 1360 | } |
| 1361 | } |
| 1362 | |
| 1363 | static void sdr_backup_phase(uint32_t *grp, uint32_t *bit_chk, |
| 1364 | uint32_t *work_bgn, uint32_t *v, uint32_t *d, |
| 1365 | uint32_t *p, uint32_t *max_working_cnt) |
| 1366 | { |
| 1367 | uint32_t found_begin = 0; |
| 1368 | uint32_t tmp_delay; |
| 1369 | |
| 1370 | /* Special case code for backing up a phase */ |
| 1371 | if (*p == 0) { |
| 1372 | *p = IO_DQS_EN_PHASE_MAX; |
| 1373 | rw_mgr_decr_vfifo(*grp, v); |
| 1374 | } else { |
| 1375 | (*p)--; |
| 1376 | } |
| 1377 | tmp_delay = *work_bgn - IO_DELAY_PER_OPA_TAP; |
| 1378 | scc_mgr_set_dqs_en_phase_all_ranks(*grp, *p); |
| 1379 | |
| 1380 | for (*d = 0; *d <= IO_DQS_EN_DELAY_MAX && tmp_delay < *work_bgn; |
| 1381 | (*d)++, tmp_delay += IO_DELAY_PER_DQS_EN_DCHAIN_TAP) { |
| 1382 | scc_mgr_set_dqs_en_delay_all_ranks(*grp, *d); |
| 1383 | |
| 1384 | if (rw_mgr_mem_calibrate_read_test_all_ranks(*grp, 1, |
| 1385 | PASS_ONE_BIT, |
| 1386 | bit_chk, 0)) { |
| 1387 | found_begin = 1; |
| 1388 | *work_bgn = tmp_delay; |
| 1389 | break; |
| 1390 | } |
| 1391 | } |
| 1392 | |
| 1393 | /* We have found a working dtap before the ptap found above */ |
| 1394 | if (found_begin == 1) |
| 1395 | (*max_working_cnt)++; |
| 1396 | |
| 1397 | /* |
| 1398 | * Restore VFIFO to old state before we decremented it |
| 1399 | * (if needed). |
| 1400 | */ |
| 1401 | (*p)++; |
| 1402 | if (*p > IO_DQS_EN_PHASE_MAX) { |
| 1403 | *p = 0; |
| 1404 | rw_mgr_incr_vfifo(*grp, v); |
| 1405 | } |
| 1406 | |
| 1407 | scc_mgr_set_dqs_en_delay_all_ranks(*grp, 0); |
| 1408 | } |
| 1409 | |
| 1410 | static int sdr_nonworking_phase(uint32_t *grp, uint32_t *bit_chk, |
| 1411 | uint32_t *work_bgn, uint32_t *v, uint32_t *d, |
| 1412 | uint32_t *p, uint32_t *i, uint32_t *max_working_cnt, |
| 1413 | uint32_t *work_end) |
| 1414 | { |
| 1415 | uint32_t found_end = 0; |
| 1416 | |
| 1417 | (*p)++; |
| 1418 | *work_end += IO_DELAY_PER_OPA_TAP; |
| 1419 | if (*p > IO_DQS_EN_PHASE_MAX) { |
| 1420 | /* fiddle with FIFO */ |
| 1421 | *p = 0; |
| 1422 | rw_mgr_incr_vfifo(*grp, v); |
| 1423 | } |
| 1424 | |
| 1425 | for (; *i < VFIFO_SIZE + 1; (*i)++) { |
| 1426 | for (; *p <= IO_DQS_EN_PHASE_MAX; (*p)++, *work_end |
| 1427 | += IO_DELAY_PER_OPA_TAP) { |
| 1428 | scc_mgr_set_dqs_en_phase_all_ranks(*grp, *p); |
| 1429 | |
| 1430 | if (!rw_mgr_mem_calibrate_read_test_all_ranks |
| 1431 | (*grp, 1, PASS_ONE_BIT, bit_chk, 0)) { |
| 1432 | found_end = 1; |
| 1433 | break; |
| 1434 | } else { |
| 1435 | (*max_working_cnt)++; |
| 1436 | } |
| 1437 | } |
| 1438 | |
| 1439 | if (found_end) |
| 1440 | break; |
| 1441 | |
| 1442 | if (*p > IO_DQS_EN_PHASE_MAX) { |
| 1443 | /* fiddle with FIFO */ |
| 1444 | rw_mgr_incr_vfifo(*grp, v); |
| 1445 | *p = 0; |
| 1446 | } |
| 1447 | } |
| 1448 | |
| 1449 | if (*i >= VFIFO_SIZE + 1) { |
| 1450 | /* cannot see edge of failing read */ |
| 1451 | debug_cond(DLEVEL == 2, "%s:%d sdr_nonworking_phase: end:\ |
| 1452 | failed\n", __func__, __LINE__); |
| 1453 | return 0; |
| 1454 | } else { |
| 1455 | return 1; |
| 1456 | } |
| 1457 | } |
| 1458 | |
| 1459 | static int sdr_find_window_centre(uint32_t *grp, uint32_t *bit_chk, |
| 1460 | uint32_t *work_bgn, uint32_t *v, uint32_t *d, |
| 1461 | uint32_t *p, uint32_t *work_mid, |
| 1462 | uint32_t *work_end) |
| 1463 | { |
| 1464 | int i; |
| 1465 | int tmp_delay = 0; |
| 1466 | |
| 1467 | *work_mid = (*work_bgn + *work_end) / 2; |
| 1468 | |
| 1469 | debug_cond(DLEVEL == 2, "work_bgn=%d work_end=%d work_mid=%d\n", |
| 1470 | *work_bgn, *work_end, *work_mid); |
| 1471 | /* Get the middle delay to be less than a VFIFO delay */ |
| 1472 | for (*p = 0; *p <= IO_DQS_EN_PHASE_MAX; |
| 1473 | (*p)++, tmp_delay += IO_DELAY_PER_OPA_TAP) |
| 1474 | ; |
| 1475 | debug_cond(DLEVEL == 2, "vfifo ptap delay %d\n", tmp_delay); |
| 1476 | while (*work_mid > tmp_delay) |
| 1477 | *work_mid -= tmp_delay; |
| 1478 | debug_cond(DLEVEL == 2, "new work_mid %d\n", *work_mid); |
| 1479 | |
| 1480 | tmp_delay = 0; |
| 1481 | for (*p = 0; *p <= IO_DQS_EN_PHASE_MAX && tmp_delay < *work_mid; |
| 1482 | (*p)++, tmp_delay += IO_DELAY_PER_OPA_TAP) |
| 1483 | ; |
| 1484 | tmp_delay -= IO_DELAY_PER_OPA_TAP; |
| 1485 | debug_cond(DLEVEL == 2, "new p %d, tmp_delay=%d\n", (*p) - 1, tmp_delay); |
| 1486 | for (*d = 0; *d <= IO_DQS_EN_DELAY_MAX && tmp_delay < *work_mid; (*d)++, |
| 1487 | tmp_delay += IO_DELAY_PER_DQS_EN_DCHAIN_TAP) |
| 1488 | ; |
| 1489 | debug_cond(DLEVEL == 2, "new d %d, tmp_delay=%d\n", *d, tmp_delay); |
| 1490 | |
| 1491 | scc_mgr_set_dqs_en_phase_all_ranks(*grp, (*p) - 1); |
| 1492 | scc_mgr_set_dqs_en_delay_all_ranks(*grp, *d); |
| 1493 | |
| 1494 | /* |
| 1495 | * push vfifo until we can successfully calibrate. We can do this |
| 1496 | * because the largest possible margin in 1 VFIFO cycle. |
| 1497 | */ |
| 1498 | for (i = 0; i < VFIFO_SIZE; i++) { |
| 1499 | debug_cond(DLEVEL == 2, "find_dqs_en_phase: center: vfifo=%u\n", |
| 1500 | *v); |
| 1501 | if (rw_mgr_mem_calibrate_read_test_all_ranks(*grp, 1, |
| 1502 | PASS_ONE_BIT, |
| 1503 | bit_chk, 0)) { |
| 1504 | break; |
| 1505 | } |
| 1506 | |
| 1507 | /* fiddle with FIFO */ |
| 1508 | rw_mgr_incr_vfifo(*grp, v); |
| 1509 | } |
| 1510 | |
| 1511 | if (i >= VFIFO_SIZE) { |
| 1512 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: center: \ |
| 1513 | failed\n", __func__, __LINE__); |
| 1514 | return 0; |
| 1515 | } else { |
| 1516 | return 1; |
| 1517 | } |
| 1518 | } |
| 1519 | |
| 1520 | /* find a good dqs enable to use */ |
| 1521 | static uint32_t rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(uint32_t grp) |
| 1522 | { |
| 1523 | uint32_t v, d, p, i; |
| 1524 | uint32_t max_working_cnt; |
| 1525 | uint32_t bit_chk; |
| 1526 | uint32_t dtaps_per_ptap; |
| 1527 | uint32_t work_bgn, work_mid, work_end; |
| 1528 | uint32_t found_passing_read, found_failing_read, initial_failing_dtap; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1529 | |
| 1530 | debug("%s:%d %u\n", __func__, __LINE__, grp); |
| 1531 | |
| 1532 | reg_file_set_sub_stage(CAL_SUBSTAGE_VFIFO_CENTER); |
| 1533 | |
| 1534 | scc_mgr_set_dqs_en_delay_all_ranks(grp, 0); |
| 1535 | scc_mgr_set_dqs_en_phase_all_ranks(grp, 0); |
| 1536 | |
| 1537 | /* ************************************************************** */ |
| 1538 | /* * Step 0 : Determine number of delay taps for each phase tap * */ |
| 1539 | dtaps_per_ptap = IO_DELAY_PER_OPA_TAP/IO_DELAY_PER_DQS_EN_DCHAIN_TAP; |
| 1540 | |
| 1541 | /* ********************************************************* */ |
| 1542 | /* * Step 1 : First push vfifo until we get a failing read * */ |
| 1543 | v = find_vfifo_read(grp, &bit_chk); |
| 1544 | |
| 1545 | max_working_cnt = 0; |
| 1546 | |
| 1547 | /* ******************************************************** */ |
| 1548 | /* * step 2: find first working phase, increment in ptaps * */ |
| 1549 | work_bgn = 0; |
| 1550 | if (find_working_phase(&grp, &bit_chk, dtaps_per_ptap, &work_bgn, &v, &d, |
| 1551 | &p, &i, &max_working_cnt) == 0) |
| 1552 | return 0; |
| 1553 | |
| 1554 | work_end = work_bgn; |
| 1555 | |
| 1556 | /* |
| 1557 | * If d is 0 then the working window covers a phase tap and |
| 1558 | * we can follow the old procedure otherwise, we've found the beginning, |
| 1559 | * and we need to increment the dtaps until we find the end. |
| 1560 | */ |
| 1561 | if (d == 0) { |
| 1562 | /* ********************************************************* */ |
| 1563 | /* * step 3a: if we have room, back off by one and |
| 1564 | increment in dtaps * */ |
| 1565 | |
| 1566 | sdr_backup_phase(&grp, &bit_chk, &work_bgn, &v, &d, &p, |
| 1567 | &max_working_cnt); |
| 1568 | |
| 1569 | /* ********************************************************* */ |
| 1570 | /* * step 4a: go forward from working phase to non working |
| 1571 | phase, increment in ptaps * */ |
| 1572 | if (sdr_nonworking_phase(&grp, &bit_chk, &work_bgn, &v, &d, &p, |
| 1573 | &i, &max_working_cnt, &work_end) == 0) |
| 1574 | return 0; |
| 1575 | |
| 1576 | /* ********************************************************* */ |
| 1577 | /* * step 5a: back off one from last, increment in dtaps * */ |
| 1578 | |
| 1579 | /* Special case code for backing up a phase */ |
| 1580 | if (p == 0) { |
| 1581 | p = IO_DQS_EN_PHASE_MAX; |
| 1582 | rw_mgr_decr_vfifo(grp, &v); |
| 1583 | } else { |
| 1584 | p = p - 1; |
| 1585 | } |
| 1586 | |
| 1587 | work_end -= IO_DELAY_PER_OPA_TAP; |
| 1588 | scc_mgr_set_dqs_en_phase_all_ranks(grp, p); |
| 1589 | |
| 1590 | /* * The actual increment of dtaps is done outside of |
| 1591 | the if/else loop to share code */ |
| 1592 | d = 0; |
| 1593 | |
| 1594 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: v/p: \ |
| 1595 | vfifo=%u ptap=%u\n", __func__, __LINE__, |
| 1596 | v, p); |
| 1597 | } else { |
| 1598 | /* ******************************************************* */ |
| 1599 | /* * step 3-5b: Find the right edge of the window using |
| 1600 | delay taps * */ |
| 1601 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase:vfifo=%u \ |
| 1602 | ptap=%u dtap=%u bgn=%u\n", __func__, __LINE__, |
| 1603 | v, p, d, work_bgn); |
| 1604 | |
| 1605 | work_end = work_bgn; |
| 1606 | |
| 1607 | /* * The actual increment of dtaps is done outside of the |
| 1608 | if/else loop to share code */ |
| 1609 | |
| 1610 | /* Only here to counterbalance a subtract later on which is |
| 1611 | not needed if this branch of the algorithm is taken */ |
| 1612 | max_working_cnt++; |
| 1613 | } |
| 1614 | |
| 1615 | /* The dtap increment to find the failing edge is done here */ |
| 1616 | for (; d <= IO_DQS_EN_DELAY_MAX; d++, work_end += |
| 1617 | IO_DELAY_PER_DQS_EN_DCHAIN_TAP) { |
| 1618 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: \ |
| 1619 | end-2: dtap=%u\n", __func__, __LINE__, d); |
| 1620 | scc_mgr_set_dqs_en_delay_all_ranks(grp, d); |
| 1621 | |
| 1622 | if (!rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1, |
| 1623 | PASS_ONE_BIT, |
| 1624 | &bit_chk, 0)) { |
| 1625 | break; |
| 1626 | } |
| 1627 | } |
| 1628 | |
| 1629 | /* Go back to working dtap */ |
| 1630 | if (d != 0) |
| 1631 | work_end -= IO_DELAY_PER_DQS_EN_DCHAIN_TAP; |
| 1632 | |
| 1633 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: v/p/d: vfifo=%u \ |
| 1634 | ptap=%u dtap=%u end=%u\n", __func__, __LINE__, |
| 1635 | v, p, d-1, work_end); |
| 1636 | |
| 1637 | if (work_end < work_bgn) { |
| 1638 | /* nil range */ |
| 1639 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: end-2: \ |
| 1640 | failed\n", __func__, __LINE__); |
| 1641 | return 0; |
| 1642 | } |
| 1643 | |
| 1644 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: found range [%u,%u]\n", |
| 1645 | __func__, __LINE__, work_bgn, work_end); |
| 1646 | |
| 1647 | /* *************************************************************** */ |
| 1648 | /* |
| 1649 | * * We need to calculate the number of dtaps that equal a ptap |
| 1650 | * * To do that we'll back up a ptap and re-find the edge of the |
| 1651 | * * window using dtaps |
| 1652 | */ |
| 1653 | |
| 1654 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: calculate dtaps_per_ptap \ |
| 1655 | for tracking\n", __func__, __LINE__); |
| 1656 | |
| 1657 | /* Special case code for backing up a phase */ |
| 1658 | if (p == 0) { |
| 1659 | p = IO_DQS_EN_PHASE_MAX; |
| 1660 | rw_mgr_decr_vfifo(grp, &v); |
| 1661 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: backedup \ |
| 1662 | cycle/phase: v=%u p=%u\n", __func__, __LINE__, |
| 1663 | v, p); |
| 1664 | } else { |
| 1665 | p = p - 1; |
| 1666 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: backedup \ |
| 1667 | phase only: v=%u p=%u", __func__, __LINE__, |
| 1668 | v, p); |
| 1669 | } |
| 1670 | |
| 1671 | scc_mgr_set_dqs_en_phase_all_ranks(grp, p); |
| 1672 | |
| 1673 | /* |
| 1674 | * Increase dtap until we first see a passing read (in case the |
| 1675 | * window is smaller than a ptap), |
| 1676 | * and then a failing read to mark the edge of the window again |
| 1677 | */ |
| 1678 | |
| 1679 | /* Find a passing read */ |
| 1680 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: find passing read\n", |
| 1681 | __func__, __LINE__); |
| 1682 | found_passing_read = 0; |
| 1683 | found_failing_read = 0; |
| 1684 | initial_failing_dtap = d; |
| 1685 | for (; d <= IO_DQS_EN_DELAY_MAX; d++) { |
| 1686 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: testing \ |
| 1687 | read d=%u\n", __func__, __LINE__, d); |
| 1688 | scc_mgr_set_dqs_en_delay_all_ranks(grp, d); |
| 1689 | |
| 1690 | if (rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1, |
| 1691 | PASS_ONE_BIT, |
| 1692 | &bit_chk, 0)) { |
| 1693 | found_passing_read = 1; |
| 1694 | break; |
| 1695 | } |
| 1696 | } |
| 1697 | |
| 1698 | if (found_passing_read) { |
| 1699 | /* Find a failing read */ |
| 1700 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: find failing \ |
| 1701 | read\n", __func__, __LINE__); |
| 1702 | for (d = d + 1; d <= IO_DQS_EN_DELAY_MAX; d++) { |
| 1703 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: \ |
| 1704 | testing read d=%u\n", __func__, __LINE__, d); |
| 1705 | scc_mgr_set_dqs_en_delay_all_ranks(grp, d); |
| 1706 | |
| 1707 | if (!rw_mgr_mem_calibrate_read_test_all_ranks |
| 1708 | (grp, 1, PASS_ONE_BIT, &bit_chk, 0)) { |
| 1709 | found_failing_read = 1; |
| 1710 | break; |
| 1711 | } |
| 1712 | } |
| 1713 | } else { |
| 1714 | debug_cond(DLEVEL == 1, "%s:%d find_dqs_en_phase: failed to \ |
| 1715 | calculate dtaps", __func__, __LINE__); |
| 1716 | debug_cond(DLEVEL == 1, "per ptap. Fall back on static value\n"); |
| 1717 | } |
| 1718 | |
| 1719 | /* |
| 1720 | * The dynamically calculated dtaps_per_ptap is only valid if we |
| 1721 | * found a passing/failing read. If we didn't, it means d hit the max |
| 1722 | * (IO_DQS_EN_DELAY_MAX). Otherwise, dtaps_per_ptap retains its |
| 1723 | * statically calculated value. |
| 1724 | */ |
| 1725 | if (found_passing_read && found_failing_read) |
| 1726 | dtaps_per_ptap = d - initial_failing_dtap; |
| 1727 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1728 | writel(dtaps_per_ptap, &sdr_reg_file->dtaps_per_ptap); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1729 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: dtaps_per_ptap=%u \ |
| 1730 | - %u = %u", __func__, __LINE__, d, |
| 1731 | initial_failing_dtap, dtaps_per_ptap); |
| 1732 | |
| 1733 | /* ******************************************** */ |
| 1734 | /* * step 6: Find the centre of the window * */ |
| 1735 | if (sdr_find_window_centre(&grp, &bit_chk, &work_bgn, &v, &d, &p, |
| 1736 | &work_mid, &work_end) == 0) |
| 1737 | return 0; |
| 1738 | |
| 1739 | debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: center found: \ |
| 1740 | vfifo=%u ptap=%u dtap=%u\n", __func__, __LINE__, |
| 1741 | v, p-1, d); |
| 1742 | return 1; |
| 1743 | } |
| 1744 | |
| 1745 | /* |
| 1746 | * Try rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase across different |
| 1747 | * dq_in_delay values |
| 1748 | */ |
| 1749 | static uint32_t |
| 1750 | rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase_sweep_dq_in_delay |
| 1751 | (uint32_t write_group, uint32_t read_group, uint32_t test_bgn) |
| 1752 | { |
| 1753 | uint32_t found; |
| 1754 | uint32_t i; |
| 1755 | uint32_t p; |
| 1756 | uint32_t d; |
| 1757 | uint32_t r; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1758 | |
| 1759 | const uint32_t delay_step = IO_IO_IN_DELAY_MAX / |
| 1760 | (RW_MGR_MEM_DQ_PER_READ_DQS-1); |
| 1761 | /* we start at zero, so have one less dq to devide among */ |
| 1762 | |
| 1763 | debug("%s:%d (%u,%u,%u)", __func__, __LINE__, write_group, read_group, |
| 1764 | test_bgn); |
| 1765 | |
| 1766 | /* try different dq_in_delays since the dq path is shorter than dqs */ |
| 1767 | |
| 1768 | for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; |
| 1769 | r += NUM_RANKS_PER_SHADOW_REG) { |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 1770 | for (i = 0, p = test_bgn, d = 0; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++, p++, d += delay_step) { |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1771 | debug_cond(DLEVEL == 1, "%s:%d rw_mgr_mem_calibrate_\ |
| 1772 | vfifo_find_dqs_", __func__, __LINE__); |
| 1773 | debug_cond(DLEVEL == 1, "en_phase_sweep_dq_in_delay: g=%u/%u ", |
| 1774 | write_group, read_group); |
| 1775 | debug_cond(DLEVEL == 1, "r=%u, i=%u p=%u d=%u\n", r, i , p, d); |
Marek Vasut | cab8079 | 2015-07-12 22:07:33 +0200 | [diff] [blame] | 1776 | scc_mgr_set_dq_in_delay(p, d); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1777 | scc_mgr_load_dq(p); |
| 1778 | } |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1779 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1780 | } |
| 1781 | |
| 1782 | found = rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(read_group); |
| 1783 | |
| 1784 | debug_cond(DLEVEL == 1, "%s:%d rw_mgr_mem_calibrate_vfifo_find_dqs_\ |
| 1785 | en_phase_sweep_dq", __func__, __LINE__); |
| 1786 | debug_cond(DLEVEL == 1, "_in_delay: g=%u/%u found=%u; Reseting delay \ |
| 1787 | chain to zero\n", write_group, read_group, found); |
| 1788 | |
| 1789 | for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; |
| 1790 | r += NUM_RANKS_PER_SHADOW_REG) { |
| 1791 | for (i = 0, p = test_bgn; i < RW_MGR_MEM_DQ_PER_READ_DQS; |
| 1792 | i++, p++) { |
Marek Vasut | cab8079 | 2015-07-12 22:07:33 +0200 | [diff] [blame] | 1793 | scc_mgr_set_dq_in_delay(p, 0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1794 | scc_mgr_load_dq(p); |
| 1795 | } |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1796 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1797 | } |
| 1798 | |
| 1799 | return found; |
| 1800 | } |
| 1801 | |
| 1802 | /* per-bit deskew DQ and center */ |
| 1803 | static uint32_t rw_mgr_mem_calibrate_vfifo_center(uint32_t rank_bgn, |
| 1804 | uint32_t write_group, uint32_t read_group, uint32_t test_bgn, |
| 1805 | uint32_t use_read_test, uint32_t update_fom) |
| 1806 | { |
| 1807 | uint32_t i, p, d, min_index; |
| 1808 | /* |
| 1809 | * Store these as signed since there are comparisons with |
| 1810 | * signed numbers. |
| 1811 | */ |
| 1812 | uint32_t bit_chk; |
| 1813 | uint32_t sticky_bit_chk; |
| 1814 | int32_t left_edge[RW_MGR_MEM_DQ_PER_READ_DQS]; |
| 1815 | int32_t right_edge[RW_MGR_MEM_DQ_PER_READ_DQS]; |
| 1816 | int32_t final_dq[RW_MGR_MEM_DQ_PER_READ_DQS]; |
| 1817 | int32_t mid; |
| 1818 | int32_t orig_mid_min, mid_min; |
| 1819 | int32_t new_dqs, start_dqs, start_dqs_en, shift_dq, final_dqs, |
| 1820 | final_dqs_en; |
| 1821 | int32_t dq_margin, dqs_margin; |
| 1822 | uint32_t stop; |
| 1823 | uint32_t temp_dq_in_delay1, temp_dq_in_delay2; |
| 1824 | uint32_t addr; |
| 1825 | |
| 1826 | debug("%s:%d: %u %u", __func__, __LINE__, read_group, test_bgn); |
| 1827 | |
Marek Vasut | a334010 | 2015-07-12 19:03:33 +0200 | [diff] [blame] | 1828 | addr = SDR_PHYGRP_SCCGRP_ADDRESS | SCC_MGR_DQS_IN_DELAY_OFFSET; |
Marek Vasut | 33acf0f | 2015-07-12 20:05:54 +0200 | [diff] [blame] | 1829 | start_dqs = readl(addr + (read_group << 2)); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1830 | if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) |
Marek Vasut | 33acf0f | 2015-07-12 20:05:54 +0200 | [diff] [blame] | 1831 | start_dqs_en = readl(addr + ((read_group << 2) |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1832 | - IO_DQS_EN_DELAY_OFFSET)); |
| 1833 | |
| 1834 | /* set the left and right edge of each bit to an illegal value */ |
| 1835 | /* use (IO_IO_IN_DELAY_MAX + 1) as an illegal value */ |
| 1836 | sticky_bit_chk = 0; |
| 1837 | for (i = 0; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++) { |
| 1838 | left_edge[i] = IO_IO_IN_DELAY_MAX + 1; |
| 1839 | right_edge[i] = IO_IO_IN_DELAY_MAX + 1; |
| 1840 | } |
| 1841 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1842 | /* Search for the left edge of the window for each bit */ |
| 1843 | for (d = 0; d <= IO_IO_IN_DELAY_MAX; d++) { |
| 1844 | scc_mgr_apply_group_dq_in_delay(write_group, test_bgn, d); |
| 1845 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1846 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1847 | |
| 1848 | /* |
| 1849 | * Stop searching when the read test doesn't pass AND when |
| 1850 | * we've seen a passing read on every bit. |
| 1851 | */ |
| 1852 | if (use_read_test) { |
| 1853 | stop = !rw_mgr_mem_calibrate_read_test(rank_bgn, |
| 1854 | read_group, NUM_READ_PB_TESTS, PASS_ONE_BIT, |
| 1855 | &bit_chk, 0, 0); |
| 1856 | } else { |
| 1857 | rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, |
| 1858 | 0, PASS_ONE_BIT, |
| 1859 | &bit_chk, 0); |
| 1860 | bit_chk = bit_chk >> (RW_MGR_MEM_DQ_PER_READ_DQS * |
| 1861 | (read_group - (write_group * |
| 1862 | RW_MGR_MEM_IF_READ_DQS_WIDTH / |
| 1863 | RW_MGR_MEM_IF_WRITE_DQS_WIDTH))); |
| 1864 | stop = (bit_chk == 0); |
| 1865 | } |
| 1866 | sticky_bit_chk = sticky_bit_chk | bit_chk; |
| 1867 | stop = stop && (sticky_bit_chk == param->read_correct_mask); |
| 1868 | debug_cond(DLEVEL == 2, "%s:%d vfifo_center(left): dtap=%u => %u == %u \ |
| 1869 | && %u", __func__, __LINE__, d, |
| 1870 | sticky_bit_chk, |
| 1871 | param->read_correct_mask, stop); |
| 1872 | |
| 1873 | if (stop == 1) { |
| 1874 | break; |
| 1875 | } else { |
| 1876 | for (i = 0; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++) { |
| 1877 | if (bit_chk & 1) { |
| 1878 | /* Remember a passing test as the |
| 1879 | left_edge */ |
| 1880 | left_edge[i] = d; |
| 1881 | } else { |
| 1882 | /* If a left edge has not been seen yet, |
| 1883 | then a future passing test will mark |
| 1884 | this edge as the right edge */ |
| 1885 | if (left_edge[i] == |
| 1886 | IO_IO_IN_DELAY_MAX + 1) { |
| 1887 | right_edge[i] = -(d + 1); |
| 1888 | } |
| 1889 | } |
| 1890 | bit_chk = bit_chk >> 1; |
| 1891 | } |
| 1892 | } |
| 1893 | } |
| 1894 | |
| 1895 | /* Reset DQ delay chains to 0 */ |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 1896 | scc_mgr_apply_group_dq_in_delay(test_bgn, 0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1897 | sticky_bit_chk = 0; |
| 1898 | for (i = RW_MGR_MEM_DQ_PER_READ_DQS - 1;; i--) { |
| 1899 | debug_cond(DLEVEL == 2, "%s:%d vfifo_center: left_edge[%u]: \ |
| 1900 | %d right_edge[%u]: %d\n", __func__, __LINE__, |
| 1901 | i, left_edge[i], i, right_edge[i]); |
| 1902 | |
| 1903 | /* |
| 1904 | * Check for cases where we haven't found the left edge, |
| 1905 | * which makes our assignment of the the right edge invalid. |
| 1906 | * Reset it to the illegal value. |
| 1907 | */ |
| 1908 | if ((left_edge[i] == IO_IO_IN_DELAY_MAX + 1) && ( |
| 1909 | right_edge[i] != IO_IO_IN_DELAY_MAX + 1)) { |
| 1910 | right_edge[i] = IO_IO_IN_DELAY_MAX + 1; |
| 1911 | debug_cond(DLEVEL == 2, "%s:%d vfifo_center: reset \ |
| 1912 | right_edge[%u]: %d\n", __func__, __LINE__, |
| 1913 | i, right_edge[i]); |
| 1914 | } |
| 1915 | |
| 1916 | /* |
| 1917 | * Reset sticky bit (except for bits where we have seen |
| 1918 | * both the left and right edge). |
| 1919 | */ |
| 1920 | sticky_bit_chk = sticky_bit_chk << 1; |
| 1921 | if ((left_edge[i] != IO_IO_IN_DELAY_MAX + 1) && |
| 1922 | (right_edge[i] != IO_IO_IN_DELAY_MAX + 1)) { |
| 1923 | sticky_bit_chk = sticky_bit_chk | 1; |
| 1924 | } |
| 1925 | |
| 1926 | if (i == 0) |
| 1927 | break; |
| 1928 | } |
| 1929 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1930 | /* Search for the right edge of the window for each bit */ |
| 1931 | for (d = 0; d <= IO_DQS_IN_DELAY_MAX - start_dqs; d++) { |
| 1932 | scc_mgr_set_dqs_bus_in_delay(read_group, d + start_dqs); |
| 1933 | if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) { |
| 1934 | uint32_t delay = d + start_dqs_en; |
| 1935 | if (delay > IO_DQS_EN_DELAY_MAX) |
| 1936 | delay = IO_DQS_EN_DELAY_MAX; |
| 1937 | scc_mgr_set_dqs_en_delay(read_group, delay); |
| 1938 | } |
| 1939 | scc_mgr_load_dqs(read_group); |
| 1940 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 1941 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 1942 | |
| 1943 | /* |
| 1944 | * Stop searching when the read test doesn't pass AND when |
| 1945 | * we've seen a passing read on every bit. |
| 1946 | */ |
| 1947 | if (use_read_test) { |
| 1948 | stop = !rw_mgr_mem_calibrate_read_test(rank_bgn, |
| 1949 | read_group, NUM_READ_PB_TESTS, PASS_ONE_BIT, |
| 1950 | &bit_chk, 0, 0); |
| 1951 | } else { |
| 1952 | rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, |
| 1953 | 0, PASS_ONE_BIT, |
| 1954 | &bit_chk, 0); |
| 1955 | bit_chk = bit_chk >> (RW_MGR_MEM_DQ_PER_READ_DQS * |
| 1956 | (read_group - (write_group * |
| 1957 | RW_MGR_MEM_IF_READ_DQS_WIDTH / |
| 1958 | RW_MGR_MEM_IF_WRITE_DQS_WIDTH))); |
| 1959 | stop = (bit_chk == 0); |
| 1960 | } |
| 1961 | sticky_bit_chk = sticky_bit_chk | bit_chk; |
| 1962 | stop = stop && (sticky_bit_chk == param->read_correct_mask); |
| 1963 | |
| 1964 | debug_cond(DLEVEL == 2, "%s:%d vfifo_center(right): dtap=%u => %u == \ |
| 1965 | %u && %u", __func__, __LINE__, d, |
| 1966 | sticky_bit_chk, param->read_correct_mask, stop); |
| 1967 | |
| 1968 | if (stop == 1) { |
| 1969 | break; |
| 1970 | } else { |
| 1971 | for (i = 0; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++) { |
| 1972 | if (bit_chk & 1) { |
| 1973 | /* Remember a passing test as |
| 1974 | the right_edge */ |
| 1975 | right_edge[i] = d; |
| 1976 | } else { |
| 1977 | if (d != 0) { |
| 1978 | /* If a right edge has not been |
| 1979 | seen yet, then a future passing |
| 1980 | test will mark this edge as the |
| 1981 | left edge */ |
| 1982 | if (right_edge[i] == |
| 1983 | IO_IO_IN_DELAY_MAX + 1) { |
| 1984 | left_edge[i] = -(d + 1); |
| 1985 | } |
| 1986 | } else { |
| 1987 | /* d = 0 failed, but it passed |
| 1988 | when testing the left edge, |
| 1989 | so it must be marginal, |
| 1990 | set it to -1 */ |
| 1991 | if (right_edge[i] == |
| 1992 | IO_IO_IN_DELAY_MAX + 1 && |
| 1993 | left_edge[i] != |
| 1994 | IO_IO_IN_DELAY_MAX |
| 1995 | + 1) { |
| 1996 | right_edge[i] = -1; |
| 1997 | } |
| 1998 | /* If a right edge has not been |
| 1999 | seen yet, then a future passing |
| 2000 | test will mark this edge as the |
| 2001 | left edge */ |
| 2002 | else if (right_edge[i] == |
| 2003 | IO_IO_IN_DELAY_MAX + |
| 2004 | 1) { |
| 2005 | left_edge[i] = -(d + 1); |
| 2006 | } |
| 2007 | } |
| 2008 | } |
| 2009 | |
| 2010 | debug_cond(DLEVEL == 2, "%s:%d vfifo_center[r,\ |
| 2011 | d=%u]: ", __func__, __LINE__, d); |
| 2012 | debug_cond(DLEVEL == 2, "bit_chk_test=%d left_edge[%u]: %d ", |
| 2013 | (int)(bit_chk & 1), i, left_edge[i]); |
| 2014 | debug_cond(DLEVEL == 2, "right_edge[%u]: %d\n", i, |
| 2015 | right_edge[i]); |
| 2016 | bit_chk = bit_chk >> 1; |
| 2017 | } |
| 2018 | } |
| 2019 | } |
| 2020 | |
| 2021 | /* Check that all bits have a window */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2022 | for (i = 0; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++) { |
| 2023 | debug_cond(DLEVEL == 2, "%s:%d vfifo_center: left_edge[%u]: \ |
| 2024 | %d right_edge[%u]: %d", __func__, __LINE__, |
| 2025 | i, left_edge[i], i, right_edge[i]); |
| 2026 | if ((left_edge[i] == IO_IO_IN_DELAY_MAX + 1) || (right_edge[i] |
| 2027 | == IO_IO_IN_DELAY_MAX + 1)) { |
| 2028 | /* |
| 2029 | * Restore delay chain settings before letting the loop |
| 2030 | * in rw_mgr_mem_calibrate_vfifo to retry different |
| 2031 | * dqs/ck relationships. |
| 2032 | */ |
| 2033 | scc_mgr_set_dqs_bus_in_delay(read_group, start_dqs); |
| 2034 | if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) { |
| 2035 | scc_mgr_set_dqs_en_delay(read_group, |
| 2036 | start_dqs_en); |
| 2037 | } |
| 2038 | scc_mgr_load_dqs(read_group); |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2039 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2040 | |
| 2041 | debug_cond(DLEVEL == 1, "%s:%d vfifo_center: failed to \ |
| 2042 | find edge [%u]: %d %d", __func__, __LINE__, |
| 2043 | i, left_edge[i], right_edge[i]); |
| 2044 | if (use_read_test) { |
| 2045 | set_failing_group_stage(read_group * |
| 2046 | RW_MGR_MEM_DQ_PER_READ_DQS + i, |
| 2047 | CAL_STAGE_VFIFO, |
| 2048 | CAL_SUBSTAGE_VFIFO_CENTER); |
| 2049 | } else { |
| 2050 | set_failing_group_stage(read_group * |
| 2051 | RW_MGR_MEM_DQ_PER_READ_DQS + i, |
| 2052 | CAL_STAGE_VFIFO_AFTER_WRITES, |
| 2053 | CAL_SUBSTAGE_VFIFO_CENTER); |
| 2054 | } |
| 2055 | return 0; |
| 2056 | } |
| 2057 | } |
| 2058 | |
| 2059 | /* Find middle of window for each DQ bit */ |
| 2060 | mid_min = left_edge[0] - right_edge[0]; |
| 2061 | min_index = 0; |
| 2062 | for (i = 1; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++) { |
| 2063 | mid = left_edge[i] - right_edge[i]; |
| 2064 | if (mid < mid_min) { |
| 2065 | mid_min = mid; |
| 2066 | min_index = i; |
| 2067 | } |
| 2068 | } |
| 2069 | |
| 2070 | /* |
| 2071 | * -mid_min/2 represents the amount that we need to move DQS. |
| 2072 | * If mid_min is odd and positive we'll need to add one to |
| 2073 | * make sure the rounding in further calculations is correct |
| 2074 | * (always bias to the right), so just add 1 for all positive values. |
| 2075 | */ |
| 2076 | if (mid_min > 0) |
| 2077 | mid_min++; |
| 2078 | |
| 2079 | mid_min = mid_min / 2; |
| 2080 | |
| 2081 | debug_cond(DLEVEL == 1, "%s:%d vfifo_center: mid_min=%d (index=%u)\n", |
| 2082 | __func__, __LINE__, mid_min, min_index); |
| 2083 | |
| 2084 | /* Determine the amount we can change DQS (which is -mid_min) */ |
| 2085 | orig_mid_min = mid_min; |
| 2086 | new_dqs = start_dqs - mid_min; |
| 2087 | if (new_dqs > IO_DQS_IN_DELAY_MAX) |
| 2088 | new_dqs = IO_DQS_IN_DELAY_MAX; |
| 2089 | else if (new_dqs < 0) |
| 2090 | new_dqs = 0; |
| 2091 | |
| 2092 | mid_min = start_dqs - new_dqs; |
| 2093 | debug_cond(DLEVEL == 1, "vfifo_center: new mid_min=%d new_dqs=%d\n", |
| 2094 | mid_min, new_dqs); |
| 2095 | |
| 2096 | if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) { |
| 2097 | if (start_dqs_en - mid_min > IO_DQS_EN_DELAY_MAX) |
| 2098 | mid_min += start_dqs_en - mid_min - IO_DQS_EN_DELAY_MAX; |
| 2099 | else if (start_dqs_en - mid_min < 0) |
| 2100 | mid_min += start_dqs_en - mid_min; |
| 2101 | } |
| 2102 | new_dqs = start_dqs - mid_min; |
| 2103 | |
| 2104 | debug_cond(DLEVEL == 1, "vfifo_center: start_dqs=%d start_dqs_en=%d \ |
| 2105 | new_dqs=%d mid_min=%d\n", start_dqs, |
| 2106 | IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS ? start_dqs_en : -1, |
| 2107 | new_dqs, mid_min); |
| 2108 | |
| 2109 | /* Initialize data for export structures */ |
| 2110 | dqs_margin = IO_IO_IN_DELAY_MAX + 1; |
| 2111 | dq_margin = IO_IO_IN_DELAY_MAX + 1; |
| 2112 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2113 | /* add delay to bring centre of all DQ windows to the same "level" */ |
| 2114 | for (i = 0, p = test_bgn; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++, p++) { |
| 2115 | /* Use values before divide by 2 to reduce round off error */ |
| 2116 | shift_dq = (left_edge[i] - right_edge[i] - |
| 2117 | (left_edge[min_index] - right_edge[min_index]))/2 + |
| 2118 | (orig_mid_min - mid_min); |
| 2119 | |
| 2120 | debug_cond(DLEVEL == 2, "vfifo_center: before: \ |
| 2121 | shift_dq[%u]=%d\n", i, shift_dq); |
| 2122 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2123 | addr = SDR_PHYGRP_SCCGRP_ADDRESS | SCC_MGR_IO_IN_DELAY_OFFSET; |
Marek Vasut | 33acf0f | 2015-07-12 20:05:54 +0200 | [diff] [blame] | 2124 | temp_dq_in_delay1 = readl(addr + (p << 2)); |
| 2125 | temp_dq_in_delay2 = readl(addr + (i << 2)); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2126 | |
| 2127 | if (shift_dq + (int32_t)temp_dq_in_delay1 > |
| 2128 | (int32_t)IO_IO_IN_DELAY_MAX) { |
| 2129 | shift_dq = (int32_t)IO_IO_IN_DELAY_MAX - temp_dq_in_delay2; |
| 2130 | } else if (shift_dq + (int32_t)temp_dq_in_delay1 < 0) { |
| 2131 | shift_dq = -(int32_t)temp_dq_in_delay1; |
| 2132 | } |
| 2133 | debug_cond(DLEVEL == 2, "vfifo_center: after: \ |
| 2134 | shift_dq[%u]=%d\n", i, shift_dq); |
| 2135 | final_dq[i] = temp_dq_in_delay1 + shift_dq; |
Marek Vasut | cab8079 | 2015-07-12 22:07:33 +0200 | [diff] [blame] | 2136 | scc_mgr_set_dq_in_delay(p, final_dq[i]); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2137 | scc_mgr_load_dq(p); |
| 2138 | |
| 2139 | debug_cond(DLEVEL == 2, "vfifo_center: margin[%u]=[%d,%d]\n", i, |
| 2140 | left_edge[i] - shift_dq + (-mid_min), |
| 2141 | right_edge[i] + shift_dq - (-mid_min)); |
| 2142 | /* To determine values for export structures */ |
| 2143 | if (left_edge[i] - shift_dq + (-mid_min) < dq_margin) |
| 2144 | dq_margin = left_edge[i] - shift_dq + (-mid_min); |
| 2145 | |
| 2146 | if (right_edge[i] + shift_dq - (-mid_min) < dqs_margin) |
| 2147 | dqs_margin = right_edge[i] + shift_dq - (-mid_min); |
| 2148 | } |
| 2149 | |
| 2150 | final_dqs = new_dqs; |
| 2151 | if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) |
| 2152 | final_dqs_en = start_dqs_en - mid_min; |
| 2153 | |
| 2154 | /* Move DQS-en */ |
| 2155 | if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) { |
| 2156 | scc_mgr_set_dqs_en_delay(read_group, final_dqs_en); |
| 2157 | scc_mgr_load_dqs(read_group); |
| 2158 | } |
| 2159 | |
| 2160 | /* Move DQS */ |
| 2161 | scc_mgr_set_dqs_bus_in_delay(read_group, final_dqs); |
| 2162 | scc_mgr_load_dqs(read_group); |
| 2163 | debug_cond(DLEVEL == 2, "%s:%d vfifo_center: dq_margin=%d \ |
| 2164 | dqs_margin=%d", __func__, __LINE__, |
| 2165 | dq_margin, dqs_margin); |
| 2166 | |
| 2167 | /* |
| 2168 | * Do not remove this line as it makes sure all of our decisions |
| 2169 | * have been applied. Apply the update bit. |
| 2170 | */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2171 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2172 | |
| 2173 | return (dq_margin >= 0) && (dqs_margin >= 0); |
| 2174 | } |
| 2175 | |
| 2176 | /* |
| 2177 | * calibrate the read valid prediction FIFO. |
| 2178 | * |
| 2179 | * - read valid prediction will consist of finding a good DQS enable phase, |
| 2180 | * DQS enable delay, DQS input phase, and DQS input delay. |
| 2181 | * - we also do a per-bit deskew on the DQ lines. |
| 2182 | */ |
| 2183 | static uint32_t rw_mgr_mem_calibrate_vfifo(uint32_t read_group, |
| 2184 | uint32_t test_bgn) |
| 2185 | { |
| 2186 | uint32_t p, d, rank_bgn, sr; |
| 2187 | uint32_t dtaps_per_ptap; |
| 2188 | uint32_t tmp_delay; |
| 2189 | uint32_t bit_chk; |
| 2190 | uint32_t grp_calibrated; |
| 2191 | uint32_t write_group, write_test_bgn; |
| 2192 | uint32_t failed_substage; |
| 2193 | |
Marek Vasut | 0eacf7e | 2015-06-26 18:56:54 +0200 | [diff] [blame] | 2194 | debug("%s:%d: %u %u\n", __func__, __LINE__, read_group, test_bgn); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2195 | |
| 2196 | /* update info for sims */ |
| 2197 | reg_file_set_stage(CAL_STAGE_VFIFO); |
| 2198 | |
| 2199 | write_group = read_group; |
| 2200 | write_test_bgn = test_bgn; |
| 2201 | |
| 2202 | /* USER Determine number of delay taps for each phase tap */ |
| 2203 | dtaps_per_ptap = 0; |
| 2204 | tmp_delay = 0; |
| 2205 | while (tmp_delay < IO_DELAY_PER_OPA_TAP) { |
| 2206 | dtaps_per_ptap++; |
| 2207 | tmp_delay += IO_DELAY_PER_DQS_EN_DCHAIN_TAP; |
| 2208 | } |
| 2209 | dtaps_per_ptap--; |
| 2210 | tmp_delay = 0; |
| 2211 | |
| 2212 | /* update info for sims */ |
| 2213 | reg_file_set_group(read_group); |
| 2214 | |
| 2215 | grp_calibrated = 0; |
| 2216 | |
| 2217 | reg_file_set_sub_stage(CAL_SUBSTAGE_GUARANTEED_READ); |
| 2218 | failed_substage = CAL_SUBSTAGE_GUARANTEED_READ; |
| 2219 | |
| 2220 | for (d = 0; d <= dtaps_per_ptap && grp_calibrated == 0; d += 2) { |
| 2221 | /* |
| 2222 | * In RLDRAMX we may be messing the delay of pins in |
| 2223 | * the same write group but outside of the current read |
| 2224 | * the group, but that's ok because we haven't |
| 2225 | * calibrated output side yet. |
| 2226 | */ |
| 2227 | if (d > 0) { |
Marek Vasut | 788870f | 2015-07-19 02:18:21 +0200 | [diff] [blame] | 2228 | scc_mgr_apply_group_all_out_delay_add_all_ranks( |
| 2229 | write_group, d); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2230 | } |
| 2231 | |
| 2232 | for (p = 0; p <= IO_DQDQS_OUT_PHASE_MAX && grp_calibrated == 0; |
| 2233 | p++) { |
| 2234 | /* set a particular dqdqs phase */ |
| 2235 | scc_mgr_set_dqdqs_output_phase_all_ranks(read_group, p); |
| 2236 | |
| 2237 | debug_cond(DLEVEL == 1, "%s:%d calibrate_vfifo: g=%u \ |
| 2238 | p=%u d=%u\n", __func__, __LINE__, |
| 2239 | read_group, p, d); |
| 2240 | |
| 2241 | /* |
| 2242 | * Load up the patterns used by read calibration |
| 2243 | * using current DQDQS phase. |
| 2244 | */ |
| 2245 | rw_mgr_mem_calibrate_read_load_patterns(0, 1); |
| 2246 | if (!(gbl->phy_debug_mode_flags & |
| 2247 | PHY_DEBUG_DISABLE_GUARANTEED_READ)) { |
| 2248 | if (!rw_mgr_mem_calibrate_read_test_patterns_all_ranks |
| 2249 | (read_group, 1, &bit_chk)) { |
| 2250 | debug_cond(DLEVEL == 1, "%s:%d Guaranteed read test failed:", |
| 2251 | __func__, __LINE__); |
| 2252 | debug_cond(DLEVEL == 1, " g=%u p=%u d=%u\n", |
| 2253 | read_group, p, d); |
| 2254 | break; |
| 2255 | } |
| 2256 | } |
| 2257 | |
| 2258 | /* case:56390 */ |
| 2259 | grp_calibrated = 1; |
| 2260 | if (rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase_sweep_dq_in_delay |
| 2261 | (write_group, read_group, test_bgn)) { |
| 2262 | /* |
| 2263 | * USER Read per-bit deskew can be done on a |
| 2264 | * per shadow register basis. |
| 2265 | */ |
| 2266 | for (rank_bgn = 0, sr = 0; |
| 2267 | rank_bgn < RW_MGR_MEM_NUMBER_OF_RANKS; |
| 2268 | rank_bgn += NUM_RANKS_PER_SHADOW_REG, |
| 2269 | ++sr) { |
| 2270 | /* |
| 2271 | * Determine if this set of ranks |
| 2272 | * should be skipped entirely. |
| 2273 | */ |
| 2274 | if (!param->skip_shadow_regs[sr]) { |
| 2275 | /* |
| 2276 | * If doing read after write |
| 2277 | * calibration, do not update |
| 2278 | * FOM, now - do it then. |
| 2279 | */ |
| 2280 | if (!rw_mgr_mem_calibrate_vfifo_center |
| 2281 | (rank_bgn, write_group, |
| 2282 | read_group, test_bgn, 1, 0)) { |
| 2283 | grp_calibrated = 0; |
| 2284 | failed_substage = |
| 2285 | CAL_SUBSTAGE_VFIFO_CENTER; |
| 2286 | } |
| 2287 | } |
| 2288 | } |
| 2289 | } else { |
| 2290 | grp_calibrated = 0; |
| 2291 | failed_substage = CAL_SUBSTAGE_DQS_EN_PHASE; |
| 2292 | } |
| 2293 | } |
| 2294 | } |
| 2295 | |
| 2296 | if (grp_calibrated == 0) { |
| 2297 | set_failing_group_stage(write_group, CAL_STAGE_VFIFO, |
| 2298 | failed_substage); |
| 2299 | return 0; |
| 2300 | } |
| 2301 | |
| 2302 | /* |
| 2303 | * Reset the delay chains back to zero if they have moved > 1 |
| 2304 | * (check for > 1 because loop will increase d even when pass in |
| 2305 | * first case). |
| 2306 | */ |
| 2307 | if (d > 2) |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 2308 | scc_mgr_zero_group(write_group, 1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2309 | |
| 2310 | return 1; |
| 2311 | } |
| 2312 | |
| 2313 | /* VFIFO Calibration -- Read Deskew Calibration after write deskew */ |
| 2314 | static uint32_t rw_mgr_mem_calibrate_vfifo_end(uint32_t read_group, |
| 2315 | uint32_t test_bgn) |
| 2316 | { |
| 2317 | uint32_t rank_bgn, sr; |
| 2318 | uint32_t grp_calibrated; |
| 2319 | uint32_t write_group; |
| 2320 | |
| 2321 | debug("%s:%d %u %u", __func__, __LINE__, read_group, test_bgn); |
| 2322 | |
| 2323 | /* update info for sims */ |
| 2324 | |
| 2325 | reg_file_set_stage(CAL_STAGE_VFIFO_AFTER_WRITES); |
| 2326 | reg_file_set_sub_stage(CAL_SUBSTAGE_VFIFO_CENTER); |
| 2327 | |
| 2328 | write_group = read_group; |
| 2329 | |
| 2330 | /* update info for sims */ |
| 2331 | reg_file_set_group(read_group); |
| 2332 | |
| 2333 | grp_calibrated = 1; |
| 2334 | /* Read per-bit deskew can be done on a per shadow register basis */ |
| 2335 | for (rank_bgn = 0, sr = 0; rank_bgn < RW_MGR_MEM_NUMBER_OF_RANKS; |
| 2336 | rank_bgn += NUM_RANKS_PER_SHADOW_REG, ++sr) { |
| 2337 | /* Determine if this set of ranks should be skipped entirely */ |
| 2338 | if (!param->skip_shadow_regs[sr]) { |
| 2339 | /* This is the last calibration round, update FOM here */ |
| 2340 | if (!rw_mgr_mem_calibrate_vfifo_center(rank_bgn, |
| 2341 | write_group, |
| 2342 | read_group, |
| 2343 | test_bgn, 0, |
| 2344 | 1)) { |
| 2345 | grp_calibrated = 0; |
| 2346 | } |
| 2347 | } |
| 2348 | } |
| 2349 | |
| 2350 | |
| 2351 | if (grp_calibrated == 0) { |
| 2352 | set_failing_group_stage(write_group, |
| 2353 | CAL_STAGE_VFIFO_AFTER_WRITES, |
| 2354 | CAL_SUBSTAGE_VFIFO_CENTER); |
| 2355 | return 0; |
| 2356 | } |
| 2357 | |
| 2358 | return 1; |
| 2359 | } |
| 2360 | |
| 2361 | /* Calibrate LFIFO to find smallest read latency */ |
| 2362 | static uint32_t rw_mgr_mem_calibrate_lfifo(void) |
| 2363 | { |
| 2364 | uint32_t found_one; |
| 2365 | uint32_t bit_chk; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2366 | |
| 2367 | debug("%s:%d\n", __func__, __LINE__); |
| 2368 | |
| 2369 | /* update info for sims */ |
| 2370 | reg_file_set_stage(CAL_STAGE_LFIFO); |
| 2371 | reg_file_set_sub_stage(CAL_SUBSTAGE_READ_LATENCY); |
| 2372 | |
| 2373 | /* Load up the patterns used by read calibration for all ranks */ |
| 2374 | rw_mgr_mem_calibrate_read_load_patterns(0, 1); |
| 2375 | found_one = 0; |
| 2376 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2377 | do { |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2378 | writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2379 | debug_cond(DLEVEL == 2, "%s:%d lfifo: read_lat=%u", |
| 2380 | __func__, __LINE__, gbl->curr_read_lat); |
| 2381 | |
| 2382 | if (!rw_mgr_mem_calibrate_read_test_all_ranks(0, |
| 2383 | NUM_READ_TESTS, |
| 2384 | PASS_ALL_BITS, |
| 2385 | &bit_chk, 1)) { |
| 2386 | break; |
| 2387 | } |
| 2388 | |
| 2389 | found_one = 1; |
| 2390 | /* reduce read latency and see if things are working */ |
| 2391 | /* correctly */ |
| 2392 | gbl->curr_read_lat--; |
| 2393 | } while (gbl->curr_read_lat > 0); |
| 2394 | |
| 2395 | /* reset the fifos to get pointers to known state */ |
| 2396 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2397 | writel(0, &phy_mgr_cmd->fifo_reset); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2398 | |
| 2399 | if (found_one) { |
| 2400 | /* add a fudge factor to the read latency that was determined */ |
| 2401 | gbl->curr_read_lat += 2; |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2402 | writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2403 | debug_cond(DLEVEL == 2, "%s:%d lfifo: success: using \ |
| 2404 | read_lat=%u\n", __func__, __LINE__, |
| 2405 | gbl->curr_read_lat); |
| 2406 | return 1; |
| 2407 | } else { |
| 2408 | set_failing_group_stage(0xff, CAL_STAGE_LFIFO, |
| 2409 | CAL_SUBSTAGE_READ_LATENCY); |
| 2410 | |
| 2411 | debug_cond(DLEVEL == 2, "%s:%d lfifo: failed at initial \ |
| 2412 | read_lat=%u\n", __func__, __LINE__, |
| 2413 | gbl->curr_read_lat); |
| 2414 | return 0; |
| 2415 | } |
| 2416 | } |
| 2417 | |
| 2418 | /* |
| 2419 | * issue write test command. |
| 2420 | * two variants are provided. one that just tests a write pattern and |
| 2421 | * another that tests datamask functionality. |
| 2422 | */ |
| 2423 | static void rw_mgr_mem_calibrate_write_test_issue(uint32_t group, |
| 2424 | uint32_t test_dm) |
| 2425 | { |
| 2426 | uint32_t mcc_instruction; |
| 2427 | uint32_t quick_write_mode = (((STATIC_CALIB_STEPS) & CALIB_SKIP_WRITES) && |
| 2428 | ENABLE_SUPER_QUICK_CALIBRATION); |
| 2429 | uint32_t rw_wl_nop_cycles; |
| 2430 | uint32_t addr; |
| 2431 | |
| 2432 | /* |
| 2433 | * Set counter and jump addresses for the right |
| 2434 | * number of NOP cycles. |
| 2435 | * The number of supported NOP cycles can range from -1 to infinity |
| 2436 | * Three different cases are handled: |
| 2437 | * |
| 2438 | * 1. For a number of NOP cycles greater than 0, the RW Mgr looping |
| 2439 | * mechanism will be used to insert the right number of NOPs |
| 2440 | * |
| 2441 | * 2. For a number of NOP cycles equals to 0, the micro-instruction |
| 2442 | * issuing the write command will jump straight to the |
| 2443 | * micro-instruction that turns on DQS (for DDRx), or outputs write |
| 2444 | * data (for RLD), skipping |
| 2445 | * the NOP micro-instruction all together |
| 2446 | * |
| 2447 | * 3. A number of NOP cycles equal to -1 indicates that DQS must be |
| 2448 | * turned on in the same micro-instruction that issues the write |
| 2449 | * command. Then we need |
| 2450 | * to directly jump to the micro-instruction that sends out the data |
| 2451 | * |
| 2452 | * NOTE: Implementing this mechanism uses 2 RW Mgr jump-counters |
| 2453 | * (2 and 3). One jump-counter (0) is used to perform multiple |
| 2454 | * write-read operations. |
| 2455 | * one counter left to issue this command in "multiple-group" mode |
| 2456 | */ |
| 2457 | |
| 2458 | rw_wl_nop_cycles = gbl->rw_wl_nop_cycles; |
| 2459 | |
| 2460 | if (rw_wl_nop_cycles == -1) { |
| 2461 | /* |
| 2462 | * CNTR 2 - We want to execute the special write operation that |
| 2463 | * turns on DQS right away and then skip directly to the |
| 2464 | * instruction that sends out the data. We set the counter to a |
| 2465 | * large number so that the jump is always taken. |
| 2466 | */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2467 | writel(0xFF, &sdr_rw_load_mgr_regs->load_cntr2); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2468 | |
| 2469 | /* CNTR 3 - Not used */ |
| 2470 | if (test_dm) { |
| 2471 | mcc_instruction = RW_MGR_LFSR_WR_RD_DM_BANK_0_WL_1; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2472 | writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_DATA, |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2473 | &sdr_rw_load_jump_mgr_regs->load_jump_add2); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2474 | writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_NOP, |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2475 | &sdr_rw_load_jump_mgr_regs->load_jump_add3); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2476 | } else { |
| 2477 | mcc_instruction = RW_MGR_LFSR_WR_RD_BANK_0_WL_1; |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2478 | writel(RW_MGR_LFSR_WR_RD_BANK_0_DATA, |
| 2479 | &sdr_rw_load_jump_mgr_regs->load_jump_add2); |
| 2480 | writel(RW_MGR_LFSR_WR_RD_BANK_0_NOP, |
| 2481 | &sdr_rw_load_jump_mgr_regs->load_jump_add3); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2482 | } |
| 2483 | } else if (rw_wl_nop_cycles == 0) { |
| 2484 | /* |
| 2485 | * CNTR 2 - We want to skip the NOP operation and go straight |
| 2486 | * to the DQS enable instruction. We set the counter to a large |
| 2487 | * number so that the jump is always taken. |
| 2488 | */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2489 | writel(0xFF, &sdr_rw_load_mgr_regs->load_cntr2); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2490 | |
| 2491 | /* CNTR 3 - Not used */ |
| 2492 | if (test_dm) { |
| 2493 | mcc_instruction = RW_MGR_LFSR_WR_RD_DM_BANK_0; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2494 | writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_DQS, |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2495 | &sdr_rw_load_jump_mgr_regs->load_jump_add2); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2496 | } else { |
| 2497 | mcc_instruction = RW_MGR_LFSR_WR_RD_BANK_0; |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2498 | writel(RW_MGR_LFSR_WR_RD_BANK_0_DQS, |
| 2499 | &sdr_rw_load_jump_mgr_regs->load_jump_add2); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2500 | } |
| 2501 | } else { |
| 2502 | /* |
| 2503 | * CNTR 2 - In this case we want to execute the next instruction |
| 2504 | * and NOT take the jump. So we set the counter to 0. The jump |
| 2505 | * address doesn't count. |
| 2506 | */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2507 | writel(0x0, &sdr_rw_load_mgr_regs->load_cntr2); |
| 2508 | writel(0x0, &sdr_rw_load_jump_mgr_regs->load_jump_add2); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2509 | |
| 2510 | /* |
| 2511 | * CNTR 3 - Set the nop counter to the number of cycles we |
| 2512 | * need to loop for, minus 1. |
| 2513 | */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2514 | writel(rw_wl_nop_cycles - 1, &sdr_rw_load_mgr_regs->load_cntr3); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2515 | if (test_dm) { |
| 2516 | mcc_instruction = RW_MGR_LFSR_WR_RD_DM_BANK_0; |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2517 | writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_NOP, |
| 2518 | &sdr_rw_load_jump_mgr_regs->load_jump_add3); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2519 | } else { |
| 2520 | mcc_instruction = RW_MGR_LFSR_WR_RD_BANK_0; |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2521 | writel(RW_MGR_LFSR_WR_RD_BANK_0_NOP, |
| 2522 | &sdr_rw_load_jump_mgr_regs->load_jump_add3); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2523 | } |
| 2524 | } |
| 2525 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2526 | writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 2527 | RW_MGR_RESET_READ_DATAPATH_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2528 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2529 | if (quick_write_mode) |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2530 | writel(0x08, &sdr_rw_load_mgr_regs->load_cntr0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2531 | else |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2532 | writel(0x40, &sdr_rw_load_mgr_regs->load_cntr0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2533 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2534 | writel(mcc_instruction, &sdr_rw_load_jump_mgr_regs->load_jump_add0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2535 | |
| 2536 | /* |
| 2537 | * CNTR 1 - This is used to ensure enough time elapses |
| 2538 | * for read data to come back. |
| 2539 | */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2540 | writel(0x30, &sdr_rw_load_mgr_regs->load_cntr1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2541 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2542 | if (test_dm) { |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2543 | writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_WAIT, |
| 2544 | &sdr_rw_load_jump_mgr_regs->load_jump_add1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2545 | } else { |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2546 | writel(RW_MGR_LFSR_WR_RD_BANK_0_WAIT, |
| 2547 | &sdr_rw_load_jump_mgr_regs->load_jump_add1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2548 | } |
| 2549 | |
Marek Vasut | a334010 | 2015-07-12 19:03:33 +0200 | [diff] [blame] | 2550 | addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET; |
Marek Vasut | 33acf0f | 2015-07-12 20:05:54 +0200 | [diff] [blame] | 2551 | writel(mcc_instruction, addr + (group << 2)); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2552 | } |
| 2553 | |
| 2554 | /* Test writes, can check for a single bit pass or multiple bit pass */ |
| 2555 | static uint32_t rw_mgr_mem_calibrate_write_test(uint32_t rank_bgn, |
| 2556 | uint32_t write_group, uint32_t use_dm, uint32_t all_correct, |
| 2557 | uint32_t *bit_chk, uint32_t all_ranks) |
| 2558 | { |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2559 | uint32_t r; |
| 2560 | uint32_t correct_mask_vg; |
| 2561 | uint32_t tmp_bit_chk; |
| 2562 | uint32_t vg; |
| 2563 | uint32_t rank_end = all_ranks ? RW_MGR_MEM_NUMBER_OF_RANKS : |
| 2564 | (rank_bgn + NUM_RANKS_PER_SHADOW_REG); |
| 2565 | uint32_t addr_rw_mgr; |
| 2566 | uint32_t base_rw_mgr; |
| 2567 | |
| 2568 | *bit_chk = param->write_correct_mask; |
| 2569 | correct_mask_vg = param->write_correct_mask_vg; |
| 2570 | |
| 2571 | for (r = rank_bgn; r < rank_end; r++) { |
| 2572 | if (param->skip_ranks[r]) { |
| 2573 | /* request to skip the rank */ |
| 2574 | continue; |
| 2575 | } |
| 2576 | |
| 2577 | /* set rank */ |
| 2578 | set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE); |
| 2579 | |
| 2580 | tmp_bit_chk = 0; |
Marek Vasut | 1fa9589 | 2015-07-12 17:52:36 +0200 | [diff] [blame] | 2581 | addr_rw_mgr = SDR_PHYGRP_RWMGRGRP_ADDRESS; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2582 | for (vg = RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS-1; ; vg--) { |
| 2583 | /* reset the fifos to get pointers to known state */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2584 | writel(0, &phy_mgr_cmd->fifo_reset); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2585 | |
| 2586 | tmp_bit_chk = tmp_bit_chk << |
| 2587 | (RW_MGR_MEM_DQ_PER_WRITE_DQS / |
| 2588 | RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS); |
| 2589 | rw_mgr_mem_calibrate_write_test_issue(write_group * |
| 2590 | RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS+vg, |
| 2591 | use_dm); |
| 2592 | |
Marek Vasut | 33acf0f | 2015-07-12 20:05:54 +0200 | [diff] [blame] | 2593 | base_rw_mgr = readl(addr_rw_mgr); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2594 | tmp_bit_chk = tmp_bit_chk | (correct_mask_vg & ~(base_rw_mgr)); |
| 2595 | if (vg == 0) |
| 2596 | break; |
| 2597 | } |
| 2598 | *bit_chk &= tmp_bit_chk; |
| 2599 | } |
| 2600 | |
| 2601 | if (all_correct) { |
| 2602 | set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); |
| 2603 | debug_cond(DLEVEL == 2, "write_test(%u,%u,ALL) : %u == \ |
| 2604 | %u => %lu", write_group, use_dm, |
| 2605 | *bit_chk, param->write_correct_mask, |
| 2606 | (long unsigned int)(*bit_chk == |
| 2607 | param->write_correct_mask)); |
| 2608 | return *bit_chk == param->write_correct_mask; |
| 2609 | } else { |
| 2610 | set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); |
| 2611 | debug_cond(DLEVEL == 2, "write_test(%u,%u,ONE) : %u != ", |
| 2612 | write_group, use_dm, *bit_chk); |
| 2613 | debug_cond(DLEVEL == 2, "%lu" " => %lu", (long unsigned int)0, |
| 2614 | (long unsigned int)(*bit_chk != 0)); |
| 2615 | return *bit_chk != 0x00; |
| 2616 | } |
| 2617 | } |
| 2618 | |
| 2619 | /* |
| 2620 | * center all windows. do per-bit-deskew to possibly increase size of |
| 2621 | * certain windows. |
| 2622 | */ |
| 2623 | static uint32_t rw_mgr_mem_calibrate_writes_center(uint32_t rank_bgn, |
| 2624 | uint32_t write_group, uint32_t test_bgn) |
| 2625 | { |
| 2626 | uint32_t i, p, min_index; |
| 2627 | int32_t d; |
| 2628 | /* |
| 2629 | * Store these as signed since there are comparisons with |
| 2630 | * signed numbers. |
| 2631 | */ |
| 2632 | uint32_t bit_chk; |
| 2633 | uint32_t sticky_bit_chk; |
| 2634 | int32_t left_edge[RW_MGR_MEM_DQ_PER_WRITE_DQS]; |
| 2635 | int32_t right_edge[RW_MGR_MEM_DQ_PER_WRITE_DQS]; |
| 2636 | int32_t mid; |
| 2637 | int32_t mid_min, orig_mid_min; |
| 2638 | int32_t new_dqs, start_dqs, shift_dq; |
| 2639 | int32_t dq_margin, dqs_margin, dm_margin; |
| 2640 | uint32_t stop; |
| 2641 | uint32_t temp_dq_out1_delay; |
| 2642 | uint32_t addr; |
| 2643 | |
| 2644 | debug("%s:%d %u %u", __func__, __LINE__, write_group, test_bgn); |
| 2645 | |
| 2646 | dm_margin = 0; |
| 2647 | |
Marek Vasut | a334010 | 2015-07-12 19:03:33 +0200 | [diff] [blame] | 2648 | addr = SDR_PHYGRP_SCCGRP_ADDRESS | SCC_MGR_IO_OUT1_DELAY_OFFSET; |
Marek Vasut | 33acf0f | 2015-07-12 20:05:54 +0200 | [diff] [blame] | 2649 | start_dqs = readl(addr + |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2650 | (RW_MGR_MEM_DQ_PER_WRITE_DQS << 2)); |
| 2651 | |
| 2652 | /* per-bit deskew */ |
| 2653 | |
| 2654 | /* |
| 2655 | * set the left and right edge of each bit to an illegal value |
| 2656 | * use (IO_IO_OUT1_DELAY_MAX + 1) as an illegal value. |
| 2657 | */ |
| 2658 | sticky_bit_chk = 0; |
| 2659 | for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) { |
| 2660 | left_edge[i] = IO_IO_OUT1_DELAY_MAX + 1; |
| 2661 | right_edge[i] = IO_IO_OUT1_DELAY_MAX + 1; |
| 2662 | } |
| 2663 | |
| 2664 | /* Search for the left edge of the window for each bit */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2665 | for (d = 0; d <= IO_IO_OUT1_DELAY_MAX; d++) { |
Marek Vasut | cd64950 | 2015-07-17 05:42:49 +0200 | [diff] [blame] | 2666 | scc_mgr_apply_group_dq_out1_delay(write_group, d); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2667 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2668 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2669 | |
| 2670 | /* |
| 2671 | * Stop searching when the read test doesn't pass AND when |
| 2672 | * we've seen a passing read on every bit. |
| 2673 | */ |
| 2674 | stop = !rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, |
| 2675 | 0, PASS_ONE_BIT, &bit_chk, 0); |
| 2676 | sticky_bit_chk = sticky_bit_chk | bit_chk; |
| 2677 | stop = stop && (sticky_bit_chk == param->write_correct_mask); |
| 2678 | debug_cond(DLEVEL == 2, "write_center(left): dtap=%d => %u \ |
| 2679 | == %u && %u [bit_chk= %u ]\n", |
| 2680 | d, sticky_bit_chk, param->write_correct_mask, |
| 2681 | stop, bit_chk); |
| 2682 | |
| 2683 | if (stop == 1) { |
| 2684 | break; |
| 2685 | } else { |
| 2686 | for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) { |
| 2687 | if (bit_chk & 1) { |
| 2688 | /* |
| 2689 | * Remember a passing test as the |
| 2690 | * left_edge. |
| 2691 | */ |
| 2692 | left_edge[i] = d; |
| 2693 | } else { |
| 2694 | /* |
| 2695 | * If a left edge has not been seen |
| 2696 | * yet, then a future passing test will |
| 2697 | * mark this edge as the right edge. |
| 2698 | */ |
| 2699 | if (left_edge[i] == |
| 2700 | IO_IO_OUT1_DELAY_MAX + 1) { |
| 2701 | right_edge[i] = -(d + 1); |
| 2702 | } |
| 2703 | } |
| 2704 | debug_cond(DLEVEL == 2, "write_center[l,d=%d):", d); |
| 2705 | debug_cond(DLEVEL == 2, "bit_chk_test=%d left_edge[%u]: %d", |
| 2706 | (int)(bit_chk & 1), i, left_edge[i]); |
| 2707 | debug_cond(DLEVEL == 2, "right_edge[%u]: %d\n", i, |
| 2708 | right_edge[i]); |
| 2709 | bit_chk = bit_chk >> 1; |
| 2710 | } |
| 2711 | } |
| 2712 | } |
| 2713 | |
| 2714 | /* Reset DQ delay chains to 0 */ |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 2715 | scc_mgr_apply_group_dq_out1_delay(0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2716 | sticky_bit_chk = 0; |
| 2717 | for (i = RW_MGR_MEM_DQ_PER_WRITE_DQS - 1;; i--) { |
| 2718 | debug_cond(DLEVEL == 2, "%s:%d write_center: left_edge[%u]: \ |
| 2719 | %d right_edge[%u]: %d\n", __func__, __LINE__, |
| 2720 | i, left_edge[i], i, right_edge[i]); |
| 2721 | |
| 2722 | /* |
| 2723 | * Check for cases where we haven't found the left edge, |
| 2724 | * which makes our assignment of the the right edge invalid. |
| 2725 | * Reset it to the illegal value. |
| 2726 | */ |
| 2727 | if ((left_edge[i] == IO_IO_OUT1_DELAY_MAX + 1) && |
| 2728 | (right_edge[i] != IO_IO_OUT1_DELAY_MAX + 1)) { |
| 2729 | right_edge[i] = IO_IO_OUT1_DELAY_MAX + 1; |
| 2730 | debug_cond(DLEVEL == 2, "%s:%d write_center: reset \ |
| 2731 | right_edge[%u]: %d\n", __func__, __LINE__, |
| 2732 | i, right_edge[i]); |
| 2733 | } |
| 2734 | |
| 2735 | /* |
| 2736 | * Reset sticky bit (except for bits where we have |
| 2737 | * seen the left edge). |
| 2738 | */ |
| 2739 | sticky_bit_chk = sticky_bit_chk << 1; |
| 2740 | if ((left_edge[i] != IO_IO_OUT1_DELAY_MAX + 1)) |
| 2741 | sticky_bit_chk = sticky_bit_chk | 1; |
| 2742 | |
| 2743 | if (i == 0) |
| 2744 | break; |
| 2745 | } |
| 2746 | |
| 2747 | /* Search for the right edge of the window for each bit */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2748 | for (d = 0; d <= IO_IO_OUT1_DELAY_MAX - start_dqs; d++) { |
| 2749 | scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, |
| 2750 | d + start_dqs); |
| 2751 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2752 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2753 | |
| 2754 | /* |
| 2755 | * Stop searching when the read test doesn't pass AND when |
| 2756 | * we've seen a passing read on every bit. |
| 2757 | */ |
| 2758 | stop = !rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, |
| 2759 | 0, PASS_ONE_BIT, &bit_chk, 0); |
| 2760 | |
| 2761 | sticky_bit_chk = sticky_bit_chk | bit_chk; |
| 2762 | stop = stop && (sticky_bit_chk == param->write_correct_mask); |
| 2763 | |
| 2764 | debug_cond(DLEVEL == 2, "write_center (right): dtap=%u => %u == \ |
| 2765 | %u && %u\n", d, sticky_bit_chk, |
| 2766 | param->write_correct_mask, stop); |
| 2767 | |
| 2768 | if (stop == 1) { |
| 2769 | if (d == 0) { |
| 2770 | for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; |
| 2771 | i++) { |
| 2772 | /* d = 0 failed, but it passed when |
| 2773 | testing the left edge, so it must be |
| 2774 | marginal, set it to -1 */ |
| 2775 | if (right_edge[i] == |
| 2776 | IO_IO_OUT1_DELAY_MAX + 1 && |
| 2777 | left_edge[i] != |
| 2778 | IO_IO_OUT1_DELAY_MAX + 1) { |
| 2779 | right_edge[i] = -1; |
| 2780 | } |
| 2781 | } |
| 2782 | } |
| 2783 | break; |
| 2784 | } else { |
| 2785 | for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) { |
| 2786 | if (bit_chk & 1) { |
| 2787 | /* |
| 2788 | * Remember a passing test as |
| 2789 | * the right_edge. |
| 2790 | */ |
| 2791 | right_edge[i] = d; |
| 2792 | } else { |
| 2793 | if (d != 0) { |
| 2794 | /* |
| 2795 | * If a right edge has not |
| 2796 | * been seen yet, then a future |
| 2797 | * passing test will mark this |
| 2798 | * edge as the left edge. |
| 2799 | */ |
| 2800 | if (right_edge[i] == |
| 2801 | IO_IO_OUT1_DELAY_MAX + 1) |
| 2802 | left_edge[i] = -(d + 1); |
| 2803 | } else { |
| 2804 | /* |
| 2805 | * d = 0 failed, but it passed |
| 2806 | * when testing the left edge, |
| 2807 | * so it must be marginal, set |
| 2808 | * it to -1. |
| 2809 | */ |
| 2810 | if (right_edge[i] == |
| 2811 | IO_IO_OUT1_DELAY_MAX + 1 && |
| 2812 | left_edge[i] != |
| 2813 | IO_IO_OUT1_DELAY_MAX + 1) |
| 2814 | right_edge[i] = -1; |
| 2815 | /* |
| 2816 | * If a right edge has not been |
| 2817 | * seen yet, then a future |
| 2818 | * passing test will mark this |
| 2819 | * edge as the left edge. |
| 2820 | */ |
| 2821 | else if (right_edge[i] == |
| 2822 | IO_IO_OUT1_DELAY_MAX + |
| 2823 | 1) |
| 2824 | left_edge[i] = -(d + 1); |
| 2825 | } |
| 2826 | } |
| 2827 | debug_cond(DLEVEL == 2, "write_center[r,d=%d):", d); |
| 2828 | debug_cond(DLEVEL == 2, "bit_chk_test=%d left_edge[%u]: %d", |
| 2829 | (int)(bit_chk & 1), i, left_edge[i]); |
| 2830 | debug_cond(DLEVEL == 2, "right_edge[%u]: %d\n", i, |
| 2831 | right_edge[i]); |
| 2832 | bit_chk = bit_chk >> 1; |
| 2833 | } |
| 2834 | } |
| 2835 | } |
| 2836 | |
| 2837 | /* Check that all bits have a window */ |
| 2838 | for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) { |
| 2839 | debug_cond(DLEVEL == 2, "%s:%d write_center: left_edge[%u]: \ |
| 2840 | %d right_edge[%u]: %d", __func__, __LINE__, |
| 2841 | i, left_edge[i], i, right_edge[i]); |
| 2842 | if ((left_edge[i] == IO_IO_OUT1_DELAY_MAX + 1) || |
| 2843 | (right_edge[i] == IO_IO_OUT1_DELAY_MAX + 1)) { |
| 2844 | set_failing_group_stage(test_bgn + i, |
| 2845 | CAL_STAGE_WRITES, |
| 2846 | CAL_SUBSTAGE_WRITES_CENTER); |
| 2847 | return 0; |
| 2848 | } |
| 2849 | } |
| 2850 | |
| 2851 | /* Find middle of window for each DQ bit */ |
| 2852 | mid_min = left_edge[0] - right_edge[0]; |
| 2853 | min_index = 0; |
| 2854 | for (i = 1; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) { |
| 2855 | mid = left_edge[i] - right_edge[i]; |
| 2856 | if (mid < mid_min) { |
| 2857 | mid_min = mid; |
| 2858 | min_index = i; |
| 2859 | } |
| 2860 | } |
| 2861 | |
| 2862 | /* |
| 2863 | * -mid_min/2 represents the amount that we need to move DQS. |
| 2864 | * If mid_min is odd and positive we'll need to add one to |
| 2865 | * make sure the rounding in further calculations is correct |
| 2866 | * (always bias to the right), so just add 1 for all positive values. |
| 2867 | */ |
| 2868 | if (mid_min > 0) |
| 2869 | mid_min++; |
| 2870 | mid_min = mid_min / 2; |
| 2871 | debug_cond(DLEVEL == 1, "%s:%d write_center: mid_min=%d\n", __func__, |
| 2872 | __LINE__, mid_min); |
| 2873 | |
| 2874 | /* Determine the amount we can change DQS (which is -mid_min) */ |
| 2875 | orig_mid_min = mid_min; |
| 2876 | new_dqs = start_dqs; |
| 2877 | mid_min = 0; |
| 2878 | debug_cond(DLEVEL == 1, "%s:%d write_center: start_dqs=%d new_dqs=%d \ |
| 2879 | mid_min=%d\n", __func__, __LINE__, start_dqs, new_dqs, mid_min); |
| 2880 | /* Initialize data for export structures */ |
| 2881 | dqs_margin = IO_IO_OUT1_DELAY_MAX + 1; |
| 2882 | dq_margin = IO_IO_OUT1_DELAY_MAX + 1; |
| 2883 | |
| 2884 | /* add delay to bring centre of all DQ windows to the same "level" */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2885 | for (i = 0, p = test_bgn; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++, p++) { |
| 2886 | /* Use values before divide by 2 to reduce round off error */ |
| 2887 | shift_dq = (left_edge[i] - right_edge[i] - |
| 2888 | (left_edge[min_index] - right_edge[min_index]))/2 + |
| 2889 | (orig_mid_min - mid_min); |
| 2890 | |
| 2891 | debug_cond(DLEVEL == 2, "%s:%d write_center: before: shift_dq \ |
| 2892 | [%u]=%d\n", __func__, __LINE__, i, shift_dq); |
| 2893 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2894 | addr = SDR_PHYGRP_SCCGRP_ADDRESS | SCC_MGR_IO_OUT1_DELAY_OFFSET; |
Marek Vasut | 33acf0f | 2015-07-12 20:05:54 +0200 | [diff] [blame] | 2895 | temp_dq_out1_delay = readl(addr + (i << 2)); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2896 | if (shift_dq + (int32_t)temp_dq_out1_delay > |
| 2897 | (int32_t)IO_IO_OUT1_DELAY_MAX) { |
| 2898 | shift_dq = (int32_t)IO_IO_OUT1_DELAY_MAX - temp_dq_out1_delay; |
| 2899 | } else if (shift_dq + (int32_t)temp_dq_out1_delay < 0) { |
| 2900 | shift_dq = -(int32_t)temp_dq_out1_delay; |
| 2901 | } |
| 2902 | debug_cond(DLEVEL == 2, "write_center: after: shift_dq[%u]=%d\n", |
| 2903 | i, shift_dq); |
Marek Vasut | cab8079 | 2015-07-12 22:07:33 +0200 | [diff] [blame] | 2904 | scc_mgr_set_dq_out1_delay(i, temp_dq_out1_delay + shift_dq); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2905 | scc_mgr_load_dq(i); |
| 2906 | |
| 2907 | debug_cond(DLEVEL == 2, "write_center: margin[%u]=[%d,%d]\n", i, |
| 2908 | left_edge[i] - shift_dq + (-mid_min), |
| 2909 | right_edge[i] + shift_dq - (-mid_min)); |
| 2910 | /* To determine values for export structures */ |
| 2911 | if (left_edge[i] - shift_dq + (-mid_min) < dq_margin) |
| 2912 | dq_margin = left_edge[i] - shift_dq + (-mid_min); |
| 2913 | |
| 2914 | if (right_edge[i] + shift_dq - (-mid_min) < dqs_margin) |
| 2915 | dqs_margin = right_edge[i] + shift_dq - (-mid_min); |
| 2916 | } |
| 2917 | |
| 2918 | /* Move DQS */ |
| 2919 | scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, new_dqs); |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2920 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2921 | |
| 2922 | /* Centre DM */ |
| 2923 | debug_cond(DLEVEL == 2, "%s:%d write_center: DM\n", __func__, __LINE__); |
| 2924 | |
| 2925 | /* |
| 2926 | * set the left and right edge of each bit to an illegal value, |
| 2927 | * use (IO_IO_OUT1_DELAY_MAX + 1) as an illegal value, |
| 2928 | */ |
| 2929 | left_edge[0] = IO_IO_OUT1_DELAY_MAX + 1; |
| 2930 | right_edge[0] = IO_IO_OUT1_DELAY_MAX + 1; |
| 2931 | int32_t bgn_curr = IO_IO_OUT1_DELAY_MAX + 1; |
| 2932 | int32_t end_curr = IO_IO_OUT1_DELAY_MAX + 1; |
| 2933 | int32_t bgn_best = IO_IO_OUT1_DELAY_MAX + 1; |
| 2934 | int32_t end_best = IO_IO_OUT1_DELAY_MAX + 1; |
| 2935 | int32_t win_best = 0; |
| 2936 | |
| 2937 | /* Search for the/part of the window with DM shift */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2938 | for (d = IO_IO_OUT1_DELAY_MAX; d >= 0; d -= DELTA_D) { |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 2939 | scc_mgr_apply_group_dm_out1_delay(d); |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2940 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2941 | |
| 2942 | if (rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, 1, |
| 2943 | PASS_ALL_BITS, &bit_chk, |
| 2944 | 0)) { |
| 2945 | /* USE Set current end of the window */ |
| 2946 | end_curr = -d; |
| 2947 | /* |
| 2948 | * If a starting edge of our window has not been seen |
| 2949 | * this is our current start of the DM window. |
| 2950 | */ |
| 2951 | if (bgn_curr == IO_IO_OUT1_DELAY_MAX + 1) |
| 2952 | bgn_curr = -d; |
| 2953 | |
| 2954 | /* |
| 2955 | * If current window is bigger than best seen. |
| 2956 | * Set best seen to be current window. |
| 2957 | */ |
| 2958 | if ((end_curr-bgn_curr+1) > win_best) { |
| 2959 | win_best = end_curr-bgn_curr+1; |
| 2960 | bgn_best = bgn_curr; |
| 2961 | end_best = end_curr; |
| 2962 | } |
| 2963 | } else { |
| 2964 | /* We just saw a failing test. Reset temp edge */ |
| 2965 | bgn_curr = IO_IO_OUT1_DELAY_MAX + 1; |
| 2966 | end_curr = IO_IO_OUT1_DELAY_MAX + 1; |
| 2967 | } |
| 2968 | } |
| 2969 | |
| 2970 | |
| 2971 | /* Reset DM delay chains to 0 */ |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 2972 | scc_mgr_apply_group_dm_out1_delay(0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2973 | |
| 2974 | /* |
| 2975 | * Check to see if the current window nudges up aganist 0 delay. |
| 2976 | * If so we need to continue the search by shifting DQS otherwise DQS |
| 2977 | * search begins as a new search. */ |
| 2978 | if (end_curr != 0) { |
| 2979 | bgn_curr = IO_IO_OUT1_DELAY_MAX + 1; |
| 2980 | end_curr = IO_IO_OUT1_DELAY_MAX + 1; |
| 2981 | } |
| 2982 | |
| 2983 | /* Search for the/part of the window with DQS shifts */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2984 | for (d = 0; d <= IO_IO_OUT1_DELAY_MAX - new_dqs; d += DELTA_D) { |
| 2985 | /* |
| 2986 | * Note: This only shifts DQS, so are we limiting ourselve to |
| 2987 | * width of DQ unnecessarily. |
| 2988 | */ |
| 2989 | scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, |
| 2990 | d + new_dqs); |
| 2991 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 2992 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 2993 | if (rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, 1, |
| 2994 | PASS_ALL_BITS, &bit_chk, |
| 2995 | 0)) { |
| 2996 | /* USE Set current end of the window */ |
| 2997 | end_curr = d; |
| 2998 | /* |
| 2999 | * If a beginning edge of our window has not been seen |
| 3000 | * this is our current begin of the DM window. |
| 3001 | */ |
| 3002 | if (bgn_curr == IO_IO_OUT1_DELAY_MAX + 1) |
| 3003 | bgn_curr = d; |
| 3004 | |
| 3005 | /* |
| 3006 | * If current window is bigger than best seen. Set best |
| 3007 | * seen to be current window. |
| 3008 | */ |
| 3009 | if ((end_curr-bgn_curr+1) > win_best) { |
| 3010 | win_best = end_curr-bgn_curr+1; |
| 3011 | bgn_best = bgn_curr; |
| 3012 | end_best = end_curr; |
| 3013 | } |
| 3014 | } else { |
| 3015 | /* We just saw a failing test. Reset temp edge */ |
| 3016 | bgn_curr = IO_IO_OUT1_DELAY_MAX + 1; |
| 3017 | end_curr = IO_IO_OUT1_DELAY_MAX + 1; |
| 3018 | |
| 3019 | /* Early exit optimization: if ther remaining delay |
| 3020 | chain space is less than already seen largest window |
| 3021 | we can exit */ |
| 3022 | if ((win_best-1) > |
| 3023 | (IO_IO_OUT1_DELAY_MAX - new_dqs - d)) { |
| 3024 | break; |
| 3025 | } |
| 3026 | } |
| 3027 | } |
| 3028 | |
| 3029 | /* assign left and right edge for cal and reporting; */ |
| 3030 | left_edge[0] = -1*bgn_best; |
| 3031 | right_edge[0] = end_best; |
| 3032 | |
| 3033 | debug_cond(DLEVEL == 2, "%s:%d dm_calib: left=%d right=%d\n", __func__, |
| 3034 | __LINE__, left_edge[0], right_edge[0]); |
| 3035 | |
| 3036 | /* Move DQS (back to orig) */ |
| 3037 | scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, new_dqs); |
| 3038 | |
| 3039 | /* Move DM */ |
| 3040 | |
| 3041 | /* Find middle of window for the DM bit */ |
| 3042 | mid = (left_edge[0] - right_edge[0]) / 2; |
| 3043 | |
| 3044 | /* only move right, since we are not moving DQS/DQ */ |
| 3045 | if (mid < 0) |
| 3046 | mid = 0; |
| 3047 | |
| 3048 | /* dm_marign should fail if we never find a window */ |
| 3049 | if (win_best == 0) |
| 3050 | dm_margin = -1; |
| 3051 | else |
| 3052 | dm_margin = left_edge[0] - mid; |
| 3053 | |
Marek Vasut | 122e1f3 | 2015-07-17 06:07:13 +0200 | [diff] [blame] | 3054 | scc_mgr_apply_group_dm_out1_delay(mid); |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3055 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3056 | |
| 3057 | debug_cond(DLEVEL == 2, "%s:%d dm_calib: left=%d right=%d mid=%d \ |
| 3058 | dm_margin=%d\n", __func__, __LINE__, left_edge[0], |
| 3059 | right_edge[0], mid, dm_margin); |
| 3060 | /* Export values */ |
| 3061 | gbl->fom_out += dq_margin + dqs_margin; |
| 3062 | |
| 3063 | debug_cond(DLEVEL == 2, "%s:%d write_center: dq_margin=%d \ |
| 3064 | dqs_margin=%d dm_margin=%d\n", __func__, __LINE__, |
| 3065 | dq_margin, dqs_margin, dm_margin); |
| 3066 | |
| 3067 | /* |
| 3068 | * Do not remove this line as it makes sure all of our |
| 3069 | * decisions have been applied. |
| 3070 | */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3071 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3072 | return (dq_margin >= 0) && (dqs_margin >= 0) && (dm_margin >= 0); |
| 3073 | } |
| 3074 | |
| 3075 | /* calibrate the write operations */ |
| 3076 | static uint32_t rw_mgr_mem_calibrate_writes(uint32_t rank_bgn, uint32_t g, |
| 3077 | uint32_t test_bgn) |
| 3078 | { |
| 3079 | /* update info for sims */ |
| 3080 | debug("%s:%d %u %u\n", __func__, __LINE__, g, test_bgn); |
| 3081 | |
| 3082 | reg_file_set_stage(CAL_STAGE_WRITES); |
| 3083 | reg_file_set_sub_stage(CAL_SUBSTAGE_WRITES_CENTER); |
| 3084 | |
| 3085 | reg_file_set_group(g); |
| 3086 | |
| 3087 | if (!rw_mgr_mem_calibrate_writes_center(rank_bgn, g, test_bgn)) { |
| 3088 | set_failing_group_stage(g, CAL_STAGE_WRITES, |
| 3089 | CAL_SUBSTAGE_WRITES_CENTER); |
| 3090 | return 0; |
| 3091 | } |
| 3092 | |
| 3093 | return 1; |
| 3094 | } |
| 3095 | |
| 3096 | /* precharge all banks and activate row 0 in bank "000..." and bank "111..." */ |
| 3097 | static void mem_precharge_and_activate(void) |
| 3098 | { |
| 3099 | uint32_t r; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3100 | |
| 3101 | for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; r++) { |
| 3102 | if (param->skip_ranks[r]) { |
| 3103 | /* request to skip the rank */ |
| 3104 | continue; |
| 3105 | } |
| 3106 | |
| 3107 | /* set rank */ |
| 3108 | set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_OFF); |
| 3109 | |
| 3110 | /* precharge all banks ... */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3111 | writel(RW_MGR_PRECHARGE_ALL, SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 3112 | RW_MGR_RUN_SINGLE_GROUP_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3113 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3114 | writel(0x0F, &sdr_rw_load_mgr_regs->load_cntr0); |
| 3115 | writel(RW_MGR_ACTIVATE_0_AND_1_WAIT1, |
| 3116 | &sdr_rw_load_jump_mgr_regs->load_jump_add0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3117 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3118 | writel(0x0F, &sdr_rw_load_mgr_regs->load_cntr1); |
| 3119 | writel(RW_MGR_ACTIVATE_0_AND_1_WAIT2, |
| 3120 | &sdr_rw_load_jump_mgr_regs->load_jump_add1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3121 | |
| 3122 | /* activate rows */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3123 | writel(RW_MGR_ACTIVATE_0_AND_1, SDR_PHYGRP_RWMGRGRP_ADDRESS | |
| 3124 | RW_MGR_RUN_SINGLE_GROUP_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3125 | } |
| 3126 | } |
| 3127 | |
| 3128 | /* Configure various memory related parameters. */ |
| 3129 | static void mem_config(void) |
| 3130 | { |
| 3131 | uint32_t rlat, wlat; |
| 3132 | uint32_t rw_wl_nop_cycles; |
| 3133 | uint32_t max_latency; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3134 | |
| 3135 | debug("%s:%d\n", __func__, __LINE__); |
| 3136 | /* read in write and read latency */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3137 | wlat = readl(&data_mgr->t_wl_add); |
| 3138 | wlat += readl(&data_mgr->mem_t_add); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3139 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3140 | /* WL for hard phy does not include additive latency */ |
| 3141 | |
| 3142 | /* |
| 3143 | * add addtional write latency to offset the address/command extra |
| 3144 | * clock cycle. We change the AC mux setting causing AC to be delayed |
| 3145 | * by one mem clock cycle. Only do this for DDR3 |
| 3146 | */ |
| 3147 | wlat = wlat + 1; |
| 3148 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3149 | rlat = readl(&data_mgr->t_rl_add); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3150 | |
| 3151 | rw_wl_nop_cycles = wlat - 2; |
| 3152 | gbl->rw_wl_nop_cycles = rw_wl_nop_cycles; |
| 3153 | |
| 3154 | /* |
| 3155 | * For AV/CV, lfifo is hardened and always runs at full rate so |
| 3156 | * max latency in AFI clocks, used here, is correspondingly smaller. |
| 3157 | */ |
| 3158 | max_latency = (1<<MAX_LATENCY_COUNT_WIDTH)/1 - 1; |
| 3159 | /* configure for a burst length of 8 */ |
| 3160 | |
| 3161 | /* write latency */ |
| 3162 | /* Adjust Write Latency for Hard PHY */ |
| 3163 | wlat = wlat + 1; |
| 3164 | |
| 3165 | /* set a pretty high read latency initially */ |
| 3166 | gbl->curr_read_lat = rlat + 16; |
| 3167 | |
| 3168 | if (gbl->curr_read_lat > max_latency) |
| 3169 | gbl->curr_read_lat = max_latency; |
| 3170 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3171 | writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3172 | |
| 3173 | /* advertise write latency */ |
| 3174 | gbl->curr_write_lat = wlat; |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3175 | writel(wlat - 2, &phy_mgr_cfg->afi_wlat); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3176 | |
| 3177 | /* initialize bit slips */ |
| 3178 | mem_precharge_and_activate(); |
| 3179 | } |
| 3180 | |
| 3181 | /* Set VFIFO and LFIFO to instant-on settings in skip calibration mode */ |
| 3182 | static void mem_skip_calibrate(void) |
| 3183 | { |
| 3184 | uint32_t vfifo_offset; |
| 3185 | uint32_t i, j, r; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3186 | |
| 3187 | debug("%s:%d\n", __func__, __LINE__); |
| 3188 | /* Need to update every shadow register set used by the interface */ |
| 3189 | for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; |
| 3190 | r += NUM_RANKS_PER_SHADOW_REG) { |
| 3191 | /* |
| 3192 | * Set output phase alignment settings appropriate for |
| 3193 | * skip calibration. |
| 3194 | */ |
| 3195 | for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) { |
| 3196 | scc_mgr_set_dqs_en_phase(i, 0); |
| 3197 | #if IO_DLL_CHAIN_LENGTH == 6 |
| 3198 | scc_mgr_set_dqdqs_output_phase(i, 6); |
| 3199 | #else |
| 3200 | scc_mgr_set_dqdqs_output_phase(i, 7); |
| 3201 | #endif |
| 3202 | /* |
| 3203 | * Case:33398 |
| 3204 | * |
| 3205 | * Write data arrives to the I/O two cycles before write |
| 3206 | * latency is reached (720 deg). |
| 3207 | * -> due to bit-slip in a/c bus |
| 3208 | * -> to allow board skew where dqs is longer than ck |
| 3209 | * -> how often can this happen!? |
| 3210 | * -> can claim back some ptaps for high freq |
| 3211 | * support if we can relax this, but i digress... |
| 3212 | * |
| 3213 | * The write_clk leads mem_ck by 90 deg |
| 3214 | * The minimum ptap of the OPA is 180 deg |
| 3215 | * Each ptap has (360 / IO_DLL_CHAIN_LENGH) deg of delay |
| 3216 | * The write_clk is always delayed by 2 ptaps |
| 3217 | * |
| 3218 | * Hence, to make DQS aligned to CK, we need to delay |
| 3219 | * DQS by: |
| 3220 | * (720 - 90 - 180 - 2 * (360 / IO_DLL_CHAIN_LENGTH)) |
| 3221 | * |
| 3222 | * Dividing the above by (360 / IO_DLL_CHAIN_LENGTH) |
| 3223 | * gives us the number of ptaps, which simplies to: |
| 3224 | * |
| 3225 | * (1.25 * IO_DLL_CHAIN_LENGTH - 2) |
| 3226 | */ |
| 3227 | scc_mgr_set_dqdqs_output_phase(i, (1.25 * |
| 3228 | IO_DLL_CHAIN_LENGTH - 2)); |
| 3229 | } |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3230 | writel(0xff, &sdr_scc_mgr->dqs_ena); |
| 3231 | writel(0xff, &sdr_scc_mgr->dqs_io_ena); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3232 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3233 | for (i = 0; i < RW_MGR_MEM_IF_WRITE_DQS_WIDTH; i++) { |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3234 | writel(i, SDR_PHYGRP_SCCGRP_ADDRESS | |
| 3235 | SCC_MGR_GROUP_COUNTER_OFFSET); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3236 | } |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3237 | writel(0xff, &sdr_scc_mgr->dq_ena); |
| 3238 | writel(0xff, &sdr_scc_mgr->dm_ena); |
| 3239 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3240 | } |
| 3241 | |
| 3242 | /* Compensate for simulation model behaviour */ |
| 3243 | for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) { |
| 3244 | scc_mgr_set_dqs_bus_in_delay(i, 10); |
| 3245 | scc_mgr_load_dqs(i); |
| 3246 | } |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3247 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3248 | |
| 3249 | /* |
| 3250 | * ArriaV has hard FIFOs that can only be initialized by incrementing |
| 3251 | * in sequencer. |
| 3252 | */ |
| 3253 | vfifo_offset = CALIB_VFIFO_OFFSET; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3254 | for (j = 0; j < vfifo_offset; j++) { |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3255 | writel(0xff, &phy_mgr_cmd->inc_vfifo_hard_phy); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3256 | } |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3257 | writel(0, &phy_mgr_cmd->fifo_reset); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3258 | |
| 3259 | /* |
| 3260 | * For ACV with hard lfifo, we get the skip-cal setting from |
| 3261 | * generation-time constant. |
| 3262 | */ |
| 3263 | gbl->curr_read_lat = CALIB_LFIFO_OFFSET; |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3264 | writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3265 | } |
| 3266 | |
| 3267 | /* Memory calibration entry point */ |
| 3268 | static uint32_t mem_calibrate(void) |
| 3269 | { |
| 3270 | uint32_t i; |
| 3271 | uint32_t rank_bgn, sr; |
| 3272 | uint32_t write_group, write_test_bgn; |
| 3273 | uint32_t read_group, read_test_bgn; |
| 3274 | uint32_t run_groups, current_run; |
| 3275 | uint32_t failing_groups = 0; |
| 3276 | uint32_t group_failed = 0; |
| 3277 | uint32_t sr_failed = 0; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3278 | |
| 3279 | debug("%s:%d\n", __func__, __LINE__); |
| 3280 | /* Initialize the data settings */ |
| 3281 | |
| 3282 | gbl->error_substage = CAL_SUBSTAGE_NIL; |
| 3283 | gbl->error_stage = CAL_STAGE_NIL; |
| 3284 | gbl->error_group = 0xff; |
| 3285 | gbl->fom_in = 0; |
| 3286 | gbl->fom_out = 0; |
| 3287 | |
| 3288 | mem_config(); |
| 3289 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3290 | for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) { |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3291 | writel(i, SDR_PHYGRP_SCCGRP_ADDRESS | |
| 3292 | SCC_MGR_GROUP_COUNTER_OFFSET); |
Marek Vasut | d4d3de2 | 2015-07-19 01:34:43 +0200 | [diff] [blame] | 3293 | /* Only needed once to set all groups, pins, DQ, DQS, DM. */ |
| 3294 | if (i == 0) |
| 3295 | scc_mgr_set_hhp_extras(); |
| 3296 | |
Marek Vasut | 0341de4 | 2015-07-17 02:06:20 +0200 | [diff] [blame] | 3297 | scc_set_bypass_mode(i); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3298 | } |
| 3299 | |
| 3300 | if ((dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL) { |
| 3301 | /* |
| 3302 | * Set VFIFO and LFIFO to instant-on settings in skip |
| 3303 | * calibration mode. |
| 3304 | */ |
| 3305 | mem_skip_calibrate(); |
| 3306 | } else { |
| 3307 | for (i = 0; i < NUM_CALIB_REPEAT; i++) { |
| 3308 | /* |
| 3309 | * Zero all delay chain/phase settings for all |
| 3310 | * groups and all shadow register sets. |
| 3311 | */ |
| 3312 | scc_mgr_zero_all(); |
| 3313 | |
| 3314 | run_groups = ~param->skip_groups; |
| 3315 | |
| 3316 | for (write_group = 0, write_test_bgn = 0; write_group |
| 3317 | < RW_MGR_MEM_IF_WRITE_DQS_WIDTH; write_group++, |
| 3318 | write_test_bgn += RW_MGR_MEM_DQ_PER_WRITE_DQS) { |
| 3319 | /* Initialized the group failure */ |
| 3320 | group_failed = 0; |
| 3321 | |
| 3322 | current_run = run_groups & ((1 << |
| 3323 | RW_MGR_NUM_DQS_PER_WRITE_GROUP) - 1); |
| 3324 | run_groups = run_groups >> |
| 3325 | RW_MGR_NUM_DQS_PER_WRITE_GROUP; |
| 3326 | |
| 3327 | if (current_run == 0) |
| 3328 | continue; |
| 3329 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3330 | writel(write_group, SDR_PHYGRP_SCCGRP_ADDRESS | |
| 3331 | SCC_MGR_GROUP_COUNTER_OFFSET); |
Marek Vasut | 62d3c69 | 2015-07-20 08:41:04 +0200 | [diff] [blame] | 3332 | scc_mgr_zero_group(write_group, 0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3333 | |
| 3334 | for (read_group = write_group * |
| 3335 | RW_MGR_MEM_IF_READ_DQS_WIDTH / |
| 3336 | RW_MGR_MEM_IF_WRITE_DQS_WIDTH, |
| 3337 | read_test_bgn = 0; |
| 3338 | read_group < (write_group + 1) * |
| 3339 | RW_MGR_MEM_IF_READ_DQS_WIDTH / |
| 3340 | RW_MGR_MEM_IF_WRITE_DQS_WIDTH && |
| 3341 | group_failed == 0; |
| 3342 | read_group++, read_test_bgn += |
| 3343 | RW_MGR_MEM_DQ_PER_READ_DQS) { |
| 3344 | /* Calibrate the VFIFO */ |
| 3345 | if (!((STATIC_CALIB_STEPS) & |
| 3346 | CALIB_SKIP_VFIFO)) { |
| 3347 | if (!rw_mgr_mem_calibrate_vfifo |
| 3348 | (read_group, |
| 3349 | read_test_bgn)) { |
| 3350 | group_failed = 1; |
| 3351 | |
| 3352 | if (!(gbl-> |
| 3353 | phy_debug_mode_flags & |
| 3354 | PHY_DEBUG_SWEEP_ALL_GROUPS)) { |
| 3355 | return 0; |
| 3356 | } |
| 3357 | } |
| 3358 | } |
| 3359 | } |
| 3360 | |
| 3361 | /* Calibrate the output side */ |
| 3362 | if (group_failed == 0) { |
| 3363 | for (rank_bgn = 0, sr = 0; rank_bgn |
| 3364 | < RW_MGR_MEM_NUMBER_OF_RANKS; |
| 3365 | rank_bgn += |
| 3366 | NUM_RANKS_PER_SHADOW_REG, |
| 3367 | ++sr) { |
| 3368 | sr_failed = 0; |
| 3369 | if (!((STATIC_CALIB_STEPS) & |
| 3370 | CALIB_SKIP_WRITES)) { |
| 3371 | if ((STATIC_CALIB_STEPS) |
| 3372 | & CALIB_SKIP_DELAY_SWEEPS) { |
| 3373 | /* not needed in quick mode! */ |
| 3374 | } else { |
| 3375 | /* |
| 3376 | * Determine if this set of |
| 3377 | * ranks should be skipped |
| 3378 | * entirely. |
| 3379 | */ |
| 3380 | if (!param->skip_shadow_regs[sr]) { |
| 3381 | if (!rw_mgr_mem_calibrate_writes |
| 3382 | (rank_bgn, write_group, |
| 3383 | write_test_bgn)) { |
| 3384 | sr_failed = 1; |
| 3385 | if (!(gbl-> |
| 3386 | phy_debug_mode_flags & |
| 3387 | PHY_DEBUG_SWEEP_ALL_GROUPS)) { |
| 3388 | return 0; |
| 3389 | } |
| 3390 | } |
| 3391 | } |
| 3392 | } |
| 3393 | } |
| 3394 | if (sr_failed != 0) |
| 3395 | group_failed = 1; |
| 3396 | } |
| 3397 | } |
| 3398 | |
| 3399 | if (group_failed == 0) { |
| 3400 | for (read_group = write_group * |
| 3401 | RW_MGR_MEM_IF_READ_DQS_WIDTH / |
| 3402 | RW_MGR_MEM_IF_WRITE_DQS_WIDTH, |
| 3403 | read_test_bgn = 0; |
| 3404 | read_group < (write_group + 1) |
| 3405 | * RW_MGR_MEM_IF_READ_DQS_WIDTH |
| 3406 | / RW_MGR_MEM_IF_WRITE_DQS_WIDTH && |
| 3407 | group_failed == 0; |
| 3408 | read_group++, read_test_bgn += |
| 3409 | RW_MGR_MEM_DQ_PER_READ_DQS) { |
| 3410 | if (!((STATIC_CALIB_STEPS) & |
| 3411 | CALIB_SKIP_WRITES)) { |
| 3412 | if (!rw_mgr_mem_calibrate_vfifo_end |
| 3413 | (read_group, read_test_bgn)) { |
| 3414 | group_failed = 1; |
| 3415 | |
| 3416 | if (!(gbl->phy_debug_mode_flags |
| 3417 | & PHY_DEBUG_SWEEP_ALL_GROUPS)) { |
| 3418 | return 0; |
| 3419 | } |
| 3420 | } |
| 3421 | } |
| 3422 | } |
| 3423 | } |
| 3424 | |
| 3425 | if (group_failed != 0) |
| 3426 | failing_groups++; |
| 3427 | } |
| 3428 | |
| 3429 | /* |
| 3430 | * USER If there are any failing groups then report |
| 3431 | * the failure. |
| 3432 | */ |
| 3433 | if (failing_groups != 0) |
| 3434 | return 0; |
| 3435 | |
| 3436 | /* Calibrate the LFIFO */ |
| 3437 | if (!((STATIC_CALIB_STEPS) & CALIB_SKIP_LFIFO)) { |
| 3438 | /* |
| 3439 | * If we're skipping groups as part of debug, |
| 3440 | * don't calibrate LFIFO. |
| 3441 | */ |
| 3442 | if (param->skip_groups == 0) { |
| 3443 | if (!rw_mgr_mem_calibrate_lfifo()) |
| 3444 | return 0; |
| 3445 | } |
| 3446 | } |
| 3447 | } |
| 3448 | } |
| 3449 | |
| 3450 | /* |
| 3451 | * Do not remove this line as it makes sure all of our decisions |
| 3452 | * have been applied. |
| 3453 | */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3454 | writel(0, &sdr_scc_mgr->update); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3455 | return 1; |
| 3456 | } |
| 3457 | |
| 3458 | static uint32_t run_mem_calibrate(void) |
| 3459 | { |
| 3460 | uint32_t pass; |
| 3461 | uint32_t debug_info; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3462 | |
| 3463 | debug("%s:%d\n", __func__, __LINE__); |
| 3464 | |
| 3465 | /* Reset pass/fail status shown on afi_cal_success/fail */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3466 | writel(PHY_MGR_CAL_RESET, &phy_mgr_cfg->cal_status); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3467 | |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3468 | /* stop tracking manger */ |
Marek Vasut | cd5d38e | 2015-07-12 20:49:39 +0200 | [diff] [blame] | 3469 | uint32_t ctrlcfg = readl(&sdr_ctrl->ctrl_cfg); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3470 | |
Marek Vasut | cd5d38e | 2015-07-12 20:49:39 +0200 | [diff] [blame] | 3471 | writel(ctrlcfg & 0xFFBFFFFF, &sdr_ctrl->ctrl_cfg); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3472 | |
| 3473 | initialize(); |
| 3474 | rw_mgr_mem_initialize(); |
| 3475 | |
| 3476 | pass = mem_calibrate(); |
| 3477 | |
| 3478 | mem_precharge_and_activate(); |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3479 | writel(0, &phy_mgr_cmd->fifo_reset); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3480 | |
| 3481 | /* |
| 3482 | * Handoff: |
| 3483 | * Don't return control of the PHY back to AFI when in debug mode. |
| 3484 | */ |
| 3485 | if ((gbl->phy_debug_mode_flags & PHY_DEBUG_IN_DEBUG_MODE) == 0) { |
| 3486 | rw_mgr_mem_handoff(); |
| 3487 | /* |
| 3488 | * In Hard PHY this is a 2-bit control: |
| 3489 | * 0: AFI Mux Select |
| 3490 | * 1: DDIO Mux Select |
| 3491 | */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3492 | writel(0x2, &phy_mgr_cfg->mux_sel); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3493 | } |
| 3494 | |
Marek Vasut | cd5d38e | 2015-07-12 20:49:39 +0200 | [diff] [blame] | 3495 | writel(ctrlcfg, &sdr_ctrl->ctrl_cfg); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3496 | |
| 3497 | if (pass) { |
| 3498 | printf("%s: CALIBRATION PASSED\n", __FILE__); |
| 3499 | |
| 3500 | gbl->fom_in /= 2; |
| 3501 | gbl->fom_out /= 2; |
| 3502 | |
| 3503 | if (gbl->fom_in > 0xff) |
| 3504 | gbl->fom_in = 0xff; |
| 3505 | |
| 3506 | if (gbl->fom_out > 0xff) |
| 3507 | gbl->fom_out = 0xff; |
| 3508 | |
| 3509 | /* Update the FOM in the register file */ |
| 3510 | debug_info = gbl->fom_in; |
| 3511 | debug_info |= gbl->fom_out << 8; |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3512 | writel(debug_info, &sdr_reg_file->fom); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3513 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3514 | writel(debug_info, &phy_mgr_cfg->cal_debug_info); |
| 3515 | writel(PHY_MGR_CAL_SUCCESS, &phy_mgr_cfg->cal_status); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3516 | } else { |
| 3517 | printf("%s: CALIBRATION FAILED\n", __FILE__); |
| 3518 | |
| 3519 | debug_info = gbl->error_stage; |
| 3520 | debug_info |= gbl->error_substage << 8; |
| 3521 | debug_info |= gbl->error_group << 16; |
| 3522 | |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3523 | writel(debug_info, &sdr_reg_file->failing_stage); |
| 3524 | writel(debug_info, &phy_mgr_cfg->cal_debug_info); |
| 3525 | writel(PHY_MGR_CAL_FAIL, &phy_mgr_cfg->cal_status); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3526 | |
| 3527 | /* Update the failing group/stage in the register file */ |
| 3528 | debug_info = gbl->error_stage; |
| 3529 | debug_info |= gbl->error_substage << 8; |
| 3530 | debug_info |= gbl->error_group << 16; |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3531 | writel(debug_info, &sdr_reg_file->failing_stage); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3532 | } |
| 3533 | |
| 3534 | return pass; |
| 3535 | } |
| 3536 | |
Marek Vasut | ea9771b | 2015-07-19 06:12:42 +0200 | [diff] [blame] | 3537 | /** |
| 3538 | * hc_initialize_rom_data() - Initialize ROM data |
| 3539 | * |
| 3540 | * Initialize ROM data. |
| 3541 | */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3542 | static void hc_initialize_rom_data(void) |
| 3543 | { |
Marek Vasut | ea9771b | 2015-07-19 06:12:42 +0200 | [diff] [blame] | 3544 | u32 i, addr; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3545 | |
Marek Vasut | a334010 | 2015-07-12 19:03:33 +0200 | [diff] [blame] | 3546 | addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_INST_ROM_WRITE_OFFSET; |
Marek Vasut | ea9771b | 2015-07-19 06:12:42 +0200 | [diff] [blame] | 3547 | for (i = 0; i < ARRAY_SIZE(inst_rom_init); i++) |
| 3548 | writel(inst_rom_init[i], addr + (i << 2)); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3549 | |
Marek Vasut | a334010 | 2015-07-12 19:03:33 +0200 | [diff] [blame] | 3550 | addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_AC_ROM_WRITE_OFFSET; |
Marek Vasut | ea9771b | 2015-07-19 06:12:42 +0200 | [diff] [blame] | 3551 | for (i = 0; i < ARRAY_SIZE(ac_rom_init); i++) |
| 3552 | writel(ac_rom_init[i], addr + (i << 2)); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3553 | } |
| 3554 | |
Marek Vasut | a17ae0f | 2015-07-19 06:13:37 +0200 | [diff] [blame] | 3555 | /** |
| 3556 | * initialize_reg_file() - Initialize SDR register file |
| 3557 | * |
| 3558 | * Initialize SDR register file. |
| 3559 | */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3560 | static void initialize_reg_file(void) |
| 3561 | { |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3562 | /* Initialize the register file with the correct data */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3563 | writel(REG_FILE_INIT_SEQ_SIGNATURE, &sdr_reg_file->signature); |
| 3564 | writel(0, &sdr_reg_file->debug_data_addr); |
| 3565 | writel(0, &sdr_reg_file->cur_stage); |
| 3566 | writel(0, &sdr_reg_file->fom); |
| 3567 | writel(0, &sdr_reg_file->failing_stage); |
| 3568 | writel(0, &sdr_reg_file->debug1); |
| 3569 | writel(0, &sdr_reg_file->debug2); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3570 | } |
| 3571 | |
Marek Vasut | 0c9f3cb | 2015-07-19 06:14:04 +0200 | [diff] [blame] | 3572 | /** |
| 3573 | * initialize_hps_phy() - Initialize HPS PHY |
| 3574 | * |
| 3575 | * Initialize HPS PHY. |
| 3576 | */ |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3577 | static void initialize_hps_phy(void) |
| 3578 | { |
| 3579 | uint32_t reg; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3580 | /* |
| 3581 | * Tracking also gets configured here because it's in the |
| 3582 | * same register. |
| 3583 | */ |
| 3584 | uint32_t trk_sample_count = 7500; |
| 3585 | uint32_t trk_long_idle_sample_count = (10 << 16) | 100; |
| 3586 | /* |
| 3587 | * Format is number of outer loops in the 16 MSB, sample |
| 3588 | * count in 16 LSB. |
| 3589 | */ |
| 3590 | |
| 3591 | reg = 0; |
| 3592 | reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_ACDELAYEN_SET(2); |
| 3593 | reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQDELAYEN_SET(1); |
| 3594 | reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQSDELAYEN_SET(1); |
| 3595 | reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQSLOGICDELAYEN_SET(1); |
| 3596 | reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_RESETDELAYEN_SET(0); |
| 3597 | reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_LPDDRDIS_SET(1); |
| 3598 | /* |
| 3599 | * This field selects the intrinsic latency to RDATA_EN/FULL path. |
| 3600 | * 00-bypass, 01- add 5 cycles, 10- add 10 cycles, 11- add 15 cycles. |
| 3601 | */ |
| 3602 | reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_ADDLATSEL_SET(0); |
| 3603 | reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_SAMPLECOUNT_19_0_SET( |
| 3604 | trk_sample_count); |
Marek Vasut | cd5d38e | 2015-07-12 20:49:39 +0200 | [diff] [blame] | 3605 | writel(reg, &sdr_ctrl->phy_ctrl0); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3606 | |
| 3607 | reg = 0; |
| 3608 | reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_SAMPLECOUNT_31_20_SET( |
| 3609 | trk_sample_count >> |
| 3610 | SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_SAMPLECOUNT_19_0_WIDTH); |
| 3611 | reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_LONGIDLESAMPLECOUNT_19_0_SET( |
| 3612 | trk_long_idle_sample_count); |
Marek Vasut | cd5d38e | 2015-07-12 20:49:39 +0200 | [diff] [blame] | 3613 | writel(reg, &sdr_ctrl->phy_ctrl1); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3614 | |
| 3615 | reg = 0; |
| 3616 | reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_2_LONGIDLESAMPLECOUNT_31_20_SET( |
| 3617 | trk_long_idle_sample_count >> |
| 3618 | SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_LONGIDLESAMPLECOUNT_19_0_WIDTH); |
Marek Vasut | cd5d38e | 2015-07-12 20:49:39 +0200 | [diff] [blame] | 3619 | writel(reg, &sdr_ctrl->phy_ctrl2); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3620 | } |
| 3621 | |
| 3622 | static void initialize_tracking(void) |
| 3623 | { |
| 3624 | uint32_t concatenated_longidle = 0x0; |
| 3625 | uint32_t concatenated_delays = 0x0; |
| 3626 | uint32_t concatenated_rw_addr = 0x0; |
| 3627 | uint32_t concatenated_refresh = 0x0; |
| 3628 | uint32_t trk_sample_count = 7500; |
| 3629 | uint32_t dtaps_per_ptap; |
| 3630 | uint32_t tmp_delay; |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3631 | |
| 3632 | /* |
| 3633 | * compute usable version of value in case we skip full |
| 3634 | * computation later |
| 3635 | */ |
| 3636 | dtaps_per_ptap = 0; |
| 3637 | tmp_delay = 0; |
| 3638 | while (tmp_delay < IO_DELAY_PER_OPA_TAP) { |
| 3639 | dtaps_per_ptap++; |
| 3640 | tmp_delay += IO_DELAY_PER_DCHAIN_TAP; |
| 3641 | } |
| 3642 | dtaps_per_ptap--; |
| 3643 | |
| 3644 | concatenated_longidle = concatenated_longidle ^ 10; |
| 3645 | /*longidle outer loop */ |
| 3646 | concatenated_longidle = concatenated_longidle << 16; |
| 3647 | concatenated_longidle = concatenated_longidle ^ 100; |
| 3648 | /*longidle sample count */ |
| 3649 | concatenated_delays = concatenated_delays ^ 243; |
| 3650 | /* trfc, worst case of 933Mhz 4Gb */ |
| 3651 | concatenated_delays = concatenated_delays << 8; |
| 3652 | concatenated_delays = concatenated_delays ^ 14; |
| 3653 | /* trcd, worst case */ |
| 3654 | concatenated_delays = concatenated_delays << 8; |
| 3655 | concatenated_delays = concatenated_delays ^ 10; |
| 3656 | /* vfifo wait */ |
| 3657 | concatenated_delays = concatenated_delays << 8; |
| 3658 | concatenated_delays = concatenated_delays ^ 4; |
| 3659 | /* mux delay */ |
| 3660 | |
| 3661 | concatenated_rw_addr = concatenated_rw_addr ^ RW_MGR_IDLE; |
| 3662 | concatenated_rw_addr = concatenated_rw_addr << 8; |
| 3663 | concatenated_rw_addr = concatenated_rw_addr ^ RW_MGR_ACTIVATE_1; |
| 3664 | concatenated_rw_addr = concatenated_rw_addr << 8; |
| 3665 | concatenated_rw_addr = concatenated_rw_addr ^ RW_MGR_SGLE_READ; |
| 3666 | concatenated_rw_addr = concatenated_rw_addr << 8; |
| 3667 | concatenated_rw_addr = concatenated_rw_addr ^ RW_MGR_PRECHARGE_ALL; |
| 3668 | |
| 3669 | concatenated_refresh = concatenated_refresh ^ RW_MGR_REFRESH_ALL; |
| 3670 | concatenated_refresh = concatenated_refresh << 24; |
| 3671 | concatenated_refresh = concatenated_refresh ^ 1000; /* trefi */ |
| 3672 | |
| 3673 | /* Initialize the register file with the correct data */ |
Marek Vasut | b545096 | 2015-07-12 21:05:08 +0200 | [diff] [blame] | 3674 | writel(dtaps_per_ptap, &sdr_reg_file->dtaps_per_ptap); |
| 3675 | writel(trk_sample_count, &sdr_reg_file->trk_sample_count); |
| 3676 | writel(concatenated_longidle, &sdr_reg_file->trk_longidle); |
| 3677 | writel(concatenated_delays, &sdr_reg_file->delays); |
| 3678 | writel(concatenated_rw_addr, &sdr_reg_file->trk_rw_mgr_addr); |
| 3679 | writel(RW_MGR_MEM_IF_READ_DQS_WIDTH, &sdr_reg_file->trk_read_dqs_width); |
| 3680 | writel(concatenated_refresh, &sdr_reg_file->trk_rfsh); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3681 | } |
| 3682 | |
| 3683 | int sdram_calibration_full(void) |
| 3684 | { |
| 3685 | struct param_type my_param; |
| 3686 | struct gbl_type my_gbl; |
| 3687 | uint32_t pass; |
| 3688 | uint32_t i; |
| 3689 | |
| 3690 | param = &my_param; |
| 3691 | gbl = &my_gbl; |
| 3692 | |
| 3693 | /* Initialize the debug mode flags */ |
| 3694 | gbl->phy_debug_mode_flags = 0; |
| 3695 | /* Set the calibration enabled by default */ |
| 3696 | gbl->phy_debug_mode_flags |= PHY_DEBUG_ENABLE_CAL_RPT; |
| 3697 | /* |
| 3698 | * Only sweep all groups (regardless of fail state) by default |
| 3699 | * Set enabled read test by default. |
| 3700 | */ |
| 3701 | #if DISABLE_GUARANTEED_READ |
| 3702 | gbl->phy_debug_mode_flags |= PHY_DEBUG_DISABLE_GUARANTEED_READ; |
| 3703 | #endif |
| 3704 | /* Initialize the register file */ |
| 3705 | initialize_reg_file(); |
| 3706 | |
| 3707 | /* Initialize any PHY CSR */ |
| 3708 | initialize_hps_phy(); |
| 3709 | |
| 3710 | scc_mgr_initialize(); |
| 3711 | |
| 3712 | initialize_tracking(); |
| 3713 | |
| 3714 | /* USER Enable all ranks, groups */ |
| 3715 | for (i = 0; i < RW_MGR_MEM_NUMBER_OF_RANKS; i++) |
| 3716 | param->skip_ranks[i] = 0; |
| 3717 | for (i = 0; i < NUM_SHADOW_REGS; ++i) |
| 3718 | param->skip_shadow_regs[i] = 0; |
| 3719 | param->skip_groups = 0; |
| 3720 | |
| 3721 | printf("%s: Preparing to start memory calibration\n", __FILE__); |
| 3722 | |
| 3723 | debug("%s:%d\n", __func__, __LINE__); |
Marek Vasut | 6283b4c | 2015-07-13 01:05:27 +0200 | [diff] [blame] | 3724 | debug_cond(DLEVEL == 1, |
| 3725 | "DDR3 FULL_RATE ranks=%u cs/dimm=%u dq/dqs=%u,%u vg/dqs=%u,%u ", |
| 3726 | RW_MGR_MEM_NUMBER_OF_RANKS, RW_MGR_MEM_NUMBER_OF_CS_PER_DIMM, |
| 3727 | RW_MGR_MEM_DQ_PER_READ_DQS, RW_MGR_MEM_DQ_PER_WRITE_DQS, |
| 3728 | RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS, |
| 3729 | RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS); |
| 3730 | debug_cond(DLEVEL == 1, |
| 3731 | "dqs=%u,%u dq=%u dm=%u ptap_delay=%u dtap_delay=%u ", |
| 3732 | RW_MGR_MEM_IF_READ_DQS_WIDTH, RW_MGR_MEM_IF_WRITE_DQS_WIDTH, |
| 3733 | RW_MGR_MEM_DATA_WIDTH, RW_MGR_MEM_DATA_MASK_WIDTH, |
| 3734 | IO_DELAY_PER_OPA_TAP, IO_DELAY_PER_DCHAIN_TAP); |
| 3735 | debug_cond(DLEVEL == 1, "dtap_dqsen_delay=%u, dll=%u", |
| 3736 | IO_DELAY_PER_DQS_EN_DCHAIN_TAP, IO_DLL_CHAIN_LENGTH); |
| 3737 | debug_cond(DLEVEL == 1, "max values: en_p=%u dqdqs_p=%u en_d=%u dqs_in_d=%u ", |
| 3738 | IO_DQS_EN_PHASE_MAX, IO_DQDQS_OUT_PHASE_MAX, |
| 3739 | IO_DQS_EN_DELAY_MAX, IO_DQS_IN_DELAY_MAX); |
| 3740 | debug_cond(DLEVEL == 1, "io_in_d=%u io_out1_d=%u io_out2_d=%u ", |
| 3741 | IO_IO_IN_DELAY_MAX, IO_IO_OUT1_DELAY_MAX, |
| 3742 | IO_IO_OUT2_DELAY_MAX); |
| 3743 | debug_cond(DLEVEL == 1, "dqs_in_reserve=%u dqs_out_reserve=%u\n", |
| 3744 | IO_DQS_IN_RESERVE, IO_DQS_OUT_RESERVE); |
Dinh Nguyen | 135cc7f | 2015-06-02 22:52:49 -0500 | [diff] [blame] | 3745 | |
| 3746 | hc_initialize_rom_data(); |
| 3747 | |
| 3748 | /* update info for sims */ |
| 3749 | reg_file_set_stage(CAL_STAGE_NIL); |
| 3750 | reg_file_set_group(0); |
| 3751 | |
| 3752 | /* |
| 3753 | * Load global needed for those actions that require |
| 3754 | * some dynamic calibration support. |
| 3755 | */ |
| 3756 | dyn_calib_steps = STATIC_CALIB_STEPS; |
| 3757 | /* |
| 3758 | * Load global to allow dynamic selection of delay loop settings |
| 3759 | * based on calibration mode. |
| 3760 | */ |
| 3761 | if (!(dyn_calib_steps & CALIB_SKIP_DELAY_LOOPS)) |
| 3762 | skip_delay_mask = 0xff; |
| 3763 | else |
| 3764 | skip_delay_mask = 0x0; |
| 3765 | |
| 3766 | pass = run_mem_calibrate(); |
| 3767 | |
| 3768 | printf("%s: Calibration complete\n", __FILE__); |
| 3769 | return pass; |
| 3770 | } |