Ley Foon Tan | f9c7f79 | 2018-05-24 00:17:30 +0800 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
| 2 | /* |
| 3 | * Copyright (C) 2016-2018 Intel Corporation <www.intel.com> |
| 4 | * |
| 5 | */ |
| 6 | |
| 7 | #include <common.h> |
| 8 | #include <errno.h> |
| 9 | #include <div64.h> |
| 10 | #include <asm/io.h> |
| 11 | #include <wait_bit.h> |
| 12 | #include <asm/arch/firewall_s10.h> |
| 13 | #include <asm/arch/sdram_s10.h> |
| 14 | #include <asm/arch/system_manager.h> |
| 15 | #include <asm/arch/reset_manager.h> |
| 16 | |
| 17 | DECLARE_GLOBAL_DATA_PTR; |
| 18 | |
| 19 | static const struct socfpga_system_manager *sysmgr_regs = |
| 20 | (void *)SOCFPGA_SYSMGR_ADDRESS; |
| 21 | |
| 22 | #define DDR_CONFIG(A, B, C, R) (((A) << 24) | ((B) << 16) | ((C) << 8) | (R)) |
| 23 | |
| 24 | /* The followring are the supported configurations */ |
| 25 | u32 ddr_config[] = { |
| 26 | /* DDR_CONFIG(Address order,Bank,Column,Row) */ |
| 27 | /* List for DDR3 or LPDDR3 (pinout order > chip, row, bank, column) */ |
| 28 | DDR_CONFIG(0, 3, 10, 12), |
| 29 | DDR_CONFIG(0, 3, 9, 13), |
| 30 | DDR_CONFIG(0, 3, 10, 13), |
| 31 | DDR_CONFIG(0, 3, 9, 14), |
| 32 | DDR_CONFIG(0, 3, 10, 14), |
| 33 | DDR_CONFIG(0, 3, 10, 15), |
| 34 | DDR_CONFIG(0, 3, 11, 14), |
| 35 | DDR_CONFIG(0, 3, 11, 15), |
| 36 | DDR_CONFIG(0, 3, 10, 16), |
| 37 | DDR_CONFIG(0, 3, 11, 16), |
| 38 | DDR_CONFIG(0, 3, 12, 15), /* 0xa */ |
| 39 | /* List for DDR4 only (pinout order > chip, bank, row, column) */ |
| 40 | DDR_CONFIG(1, 3, 10, 14), |
| 41 | DDR_CONFIG(1, 4, 10, 14), |
| 42 | DDR_CONFIG(1, 3, 10, 15), |
| 43 | DDR_CONFIG(1, 4, 10, 15), |
| 44 | DDR_CONFIG(1, 3, 10, 16), |
| 45 | DDR_CONFIG(1, 4, 10, 16), |
| 46 | DDR_CONFIG(1, 3, 10, 17), |
| 47 | DDR_CONFIG(1, 4, 10, 17), |
| 48 | }; |
| 49 | |
| 50 | static u32 hmc_readl(u32 reg) |
| 51 | { |
| 52 | return readl(((void __iomem *)SOCFPGA_HMC_MMR_IO48_ADDRESS + (reg))); |
| 53 | } |
| 54 | |
| 55 | static u32 hmc_ecc_readl(u32 reg) |
| 56 | { |
| 57 | return readl((void __iomem *)SOCFPGA_SDR_ADDRESS + (reg)); |
| 58 | } |
| 59 | |
| 60 | static u32 hmc_ecc_writel(u32 data, u32 reg) |
| 61 | { |
| 62 | return writel(data, (void __iomem *)SOCFPGA_SDR_ADDRESS + (reg)); |
| 63 | } |
| 64 | |
| 65 | static u32 ddr_sch_writel(u32 data, u32 reg) |
| 66 | { |
| 67 | return writel(data, |
| 68 | (void __iomem *)SOCFPGA_SDR_SCHEDULER_ADDRESS + (reg)); |
| 69 | } |
| 70 | |
| 71 | int match_ddr_conf(u32 ddr_conf) |
| 72 | { |
| 73 | int i; |
| 74 | |
| 75 | for (i = 0; i < ARRAY_SIZE(ddr_config); i++) { |
| 76 | if (ddr_conf == ddr_config[i]) |
| 77 | return i; |
| 78 | } |
| 79 | return 0; |
| 80 | } |
| 81 | |
| 82 | static int emif_clear(void) |
| 83 | { |
| 84 | hmc_ecc_writel(0, RSTHANDSHAKECTRL); |
| 85 | |
| 86 | return wait_for_bit_le32((const void *)(SOCFPGA_SDR_ADDRESS + |
| 87 | RSTHANDSHAKESTAT), |
| 88 | DDR_HMC_RSTHANDSHAKE_MASK, |
| 89 | false, 1000, false); |
| 90 | } |
| 91 | |
| 92 | static int emif_reset(void) |
| 93 | { |
| 94 | u32 c2s, s2c, ret; |
| 95 | |
| 96 | c2s = hmc_ecc_readl(RSTHANDSHAKECTRL) & DDR_HMC_RSTHANDSHAKE_MASK; |
| 97 | s2c = hmc_ecc_readl(RSTHANDSHAKESTAT) & DDR_HMC_RSTHANDSHAKE_MASK; |
| 98 | |
| 99 | debug("DDR: c2s=%08x s2c=%08x nr0=%08x nr1=%08x nr2=%08x dst=%08x\n", |
| 100 | c2s, s2c, hmc_readl(NIOSRESERVED0), hmc_readl(NIOSRESERVED1), |
| 101 | hmc_readl(NIOSRESERVED2), hmc_readl(DRAMSTS)); |
| 102 | |
| 103 | if (s2c && emif_clear()) { |
| 104 | printf("DDR: emif_clear() failed\n"); |
| 105 | return -1; |
| 106 | } |
| 107 | |
| 108 | debug("DDR: Triggerring emif reset\n"); |
| 109 | hmc_ecc_writel(DDR_HMC_CORE2SEQ_INT_REQ, RSTHANDSHAKECTRL); |
| 110 | |
| 111 | /* if seq2core[3] = 0, we are good */ |
| 112 | ret = wait_for_bit_le32((const void *)(SOCFPGA_SDR_ADDRESS + |
| 113 | RSTHANDSHAKESTAT), |
| 114 | DDR_HMC_SEQ2CORE_INT_RESP_MASK, |
| 115 | false, 1000, false); |
| 116 | if (ret) { |
| 117 | printf("DDR: failed to get ack from EMIF\n"); |
| 118 | return ret; |
| 119 | } |
| 120 | |
| 121 | ret = emif_clear(); |
| 122 | if (ret) { |
| 123 | printf("DDR: emif_clear() failed\n"); |
| 124 | return ret; |
| 125 | } |
| 126 | |
| 127 | debug("DDR: %s triggered successly\n", __func__); |
| 128 | return 0; |
| 129 | } |
| 130 | |
| 131 | static int poll_hmc_clock_status(void) |
| 132 | { |
| 133 | return wait_for_bit_le32(&sysmgr_regs->hmc_clk, |
| 134 | SYSMGR_HMC_CLK_STATUS_MSK, true, 1000, false); |
| 135 | } |
| 136 | |
| 137 | /** |
| 138 | * sdram_mmr_init_full() - Function to initialize SDRAM MMR |
| 139 | * |
| 140 | * Initialize the SDRAM MMR. |
| 141 | */ |
| 142 | int sdram_mmr_init_full(unsigned int unused) |
| 143 | { |
| 144 | u32 update_value, io48_value, ddrioctl; |
| 145 | u32 i; |
| 146 | int ret; |
| 147 | |
| 148 | /* Enable access to DDR from CPU master */ |
| 149 | clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_DDRREG), |
| 150 | CCU_ADBASE_DI_MASK); |
| 151 | clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE0), |
| 152 | CCU_ADBASE_DI_MASK); |
| 153 | clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1A), |
| 154 | CCU_ADBASE_DI_MASK); |
| 155 | clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1B), |
| 156 | CCU_ADBASE_DI_MASK); |
| 157 | clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1C), |
| 158 | CCU_ADBASE_DI_MASK); |
| 159 | clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1D), |
| 160 | CCU_ADBASE_DI_MASK); |
| 161 | clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1E), |
| 162 | CCU_ADBASE_DI_MASK); |
| 163 | |
| 164 | /* Enable access to DDR from IO master */ |
| 165 | clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE0), |
| 166 | CCU_ADBASE_DI_MASK); |
| 167 | clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1A), |
| 168 | CCU_ADBASE_DI_MASK); |
| 169 | clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1B), |
| 170 | CCU_ADBASE_DI_MASK); |
| 171 | clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1C), |
| 172 | CCU_ADBASE_DI_MASK); |
| 173 | clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1D), |
| 174 | CCU_ADBASE_DI_MASK); |
| 175 | clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1E), |
| 176 | CCU_ADBASE_DI_MASK); |
| 177 | |
| 178 | /* this enables nonsecure access to DDR */ |
| 179 | /* mpuregion0addr_limit */ |
| 180 | FW_MPU_DDR_SCR_WRITEL(0xFFFF0000, FW_MPU_DDR_SCR_MPUREGION0ADDR_LIMIT); |
| 181 | FW_MPU_DDR_SCR_WRITEL(0x1F, FW_MPU_DDR_SCR_MPUREGION0ADDR_LIMITEXT); |
| 182 | |
| 183 | /* nonmpuregion0addr_limit */ |
| 184 | FW_MPU_DDR_SCR_WRITEL(0xFFFF0000, |
| 185 | FW_MPU_DDR_SCR_NONMPUREGION0ADDR_LIMIT); |
| 186 | FW_MPU_DDR_SCR_WRITEL(0x1F, FW_MPU_DDR_SCR_NONMPUREGION0ADDR_LIMITEXT); |
| 187 | |
| 188 | /* Enable mpuregion0enable and nonmpuregion0enable */ |
| 189 | FW_MPU_DDR_SCR_WRITEL(MPUREGION0_ENABLE | NONMPUREGION0_ENABLE, |
| 190 | FW_MPU_DDR_SCR_EN_SET); |
| 191 | |
| 192 | /* Ensure HMC clock is running */ |
| 193 | if (poll_hmc_clock_status()) { |
| 194 | puts("DDR: Error as HMC clock not running\n"); |
| 195 | return -1; |
| 196 | } |
| 197 | |
| 198 | /* release DDR scheduler from reset */ |
| 199 | socfpga_per_reset(SOCFPGA_RESET(SDR), 0); |
| 200 | |
| 201 | /* Try 3 times to do a calibration */ |
| 202 | for (i = 0; i < 3; i++) { |
| 203 | ret = wait_for_bit_le32((const void *)(SOCFPGA_SDR_ADDRESS + |
| 204 | DDRCALSTAT), |
| 205 | DDR_HMC_DDRCALSTAT_CAL_MSK, true, 1000, |
| 206 | false); |
| 207 | if (!ret) |
| 208 | break; |
| 209 | |
| 210 | emif_reset(); |
| 211 | } |
| 212 | |
| 213 | if (ret) { |
| 214 | puts("DDR: Error as SDRAM calibration failed\n"); |
| 215 | return -1; |
| 216 | } |
| 217 | debug("DDR: Calibration success\n"); |
| 218 | |
| 219 | u32 ctrlcfg0 = hmc_readl(CTRLCFG0); |
| 220 | u32 ctrlcfg1 = hmc_readl(CTRLCFG1); |
| 221 | u32 dramaddrw = hmc_readl(DRAMADDRW); |
| 222 | u32 dramtim0 = hmc_readl(DRAMTIMING0); |
| 223 | u32 caltim0 = hmc_readl(CALTIMING0); |
| 224 | u32 caltim1 = hmc_readl(CALTIMING1); |
| 225 | u32 caltim2 = hmc_readl(CALTIMING2); |
| 226 | u32 caltim3 = hmc_readl(CALTIMING3); |
| 227 | u32 caltim4 = hmc_readl(CALTIMING4); |
| 228 | u32 caltim9 = hmc_readl(CALTIMING9); |
| 229 | |
| 230 | /* |
| 231 | * Configure the DDR IO size [0xFFCFB008] |
| 232 | * niosreserve0: Used to indicate DDR width & |
| 233 | * bit[7:0] = Number of data bits (bit[6:5] 0x01=32bit, 0x10=64bit) |
| 234 | * bit[8] = 1 if user-mode OCT is present |
| 235 | * bit[9] = 1 if warm reset compiled into EMIF Cal Code |
| 236 | * bit[10] = 1 if warm reset is on during generation in EMIF Cal |
| 237 | * niosreserve1: IP ADCDS version encoded as 16 bit value |
| 238 | * bit[2:0] = Variant (0=not special,1=FAE beta, 2=Customer beta, |
| 239 | * 3=EAP, 4-6 are reserved) |
| 240 | * bit[5:3] = Service Pack # (e.g. 1) |
| 241 | * bit[9:6] = Minor Release # |
| 242 | * bit[14:10] = Major Release # |
| 243 | */ |
| 244 | update_value = hmc_readl(NIOSRESERVED0); |
| 245 | hmc_ecc_writel(((update_value & 0xFF) >> 5), DDRIOCTRL); |
| 246 | ddrioctl = hmc_ecc_readl(DDRIOCTRL); |
| 247 | |
| 248 | /* enable HPS interface to HMC */ |
| 249 | hmc_ecc_writel(DDR_HMC_HPSINTFCSEL_ENABLE_MASK, HPSINTFCSEL); |
| 250 | |
| 251 | /* Set the DDR Configuration */ |
| 252 | io48_value = DDR_CONFIG(CTRLCFG1_CFG_ADDR_ORDER(ctrlcfg1), |
| 253 | (DRAMADDRW_CFG_BANK_ADDR_WIDTH(dramaddrw) + |
| 254 | DRAMADDRW_CFG_BANK_GRP_ADDR_WIDTH(dramaddrw)), |
| 255 | DRAMADDRW_CFG_COL_ADDR_WIDTH(dramaddrw), |
| 256 | DRAMADDRW_CFG_ROW_ADDR_WIDTH(dramaddrw)); |
| 257 | |
| 258 | update_value = match_ddr_conf(io48_value); |
| 259 | if (update_value) |
| 260 | ddr_sch_writel(update_value, DDR_SCH_DDRCONF); |
| 261 | |
| 262 | /* Configure HMC dramaddrw */ |
| 263 | hmc_ecc_writel(hmc_readl(DRAMADDRW), DRAMADDRWIDTH); |
| 264 | |
| 265 | /* |
| 266 | * Configure DDR timing |
| 267 | * RDTOMISS = tRTP + tRP + tRCD - BL/2 |
| 268 | * WRTOMISS = WL + tWR + tRP + tRCD and |
| 269 | * WL = RL + BL/2 + 2 - rd-to-wr ; tWR = 15ns so... |
| 270 | * First part of equation is in memory clock units so divide by 2 |
| 271 | * for HMC clock units. 1066MHz is close to 1ns so use 15 directly. |
| 272 | * WRTOMISS = ((RL + BL/2 + 2 + tWR) >> 1)- rd-to-wr + tRP + tRCD |
| 273 | */ |
| 274 | u32 burst_len = CTRLCFG0_CFG_CTRL_BURST_LEN(ctrlcfg0); |
| 275 | |
| 276 | update_value = CALTIMING2_CFG_RD_TO_WR_PCH(caltim2) + |
| 277 | CALTIMING4_CFG_PCH_TO_VALID(caltim4) + |
| 278 | CALTIMING0_CFG_ACT_TO_RDWR(caltim0) - |
| 279 | (burst_len >> 2); |
| 280 | io48_value = (((DRAMTIMING0_CFG_TCL(dramtim0) + 2 + DDR_TWR + |
| 281 | (burst_len >> 1)) >> 1) - |
| 282 | /* Up to here was in memory cycles so divide by 2 */ |
| 283 | CALTIMING1_CFG_RD_TO_WR(caltim1) + |
| 284 | CALTIMING0_CFG_ACT_TO_RDWR(caltim0) + |
| 285 | CALTIMING4_CFG_PCH_TO_VALID(caltim4)); |
| 286 | |
| 287 | ddr_sch_writel(((CALTIMING0_CFG_ACT_TO_ACT(caltim0) << |
| 288 | DDR_SCH_DDRTIMING_ACTTOACT_OFF) | |
| 289 | (update_value << DDR_SCH_DDRTIMING_RDTOMISS_OFF) | |
| 290 | (io48_value << DDR_SCH_DDRTIMING_WRTOMISS_OFF) | |
| 291 | ((burst_len >> 2) << DDR_SCH_DDRTIMING_BURSTLEN_OFF) | |
| 292 | (CALTIMING1_CFG_RD_TO_WR(caltim1) << |
| 293 | DDR_SCH_DDRTIMING_RDTOWR_OFF) | |
| 294 | (CALTIMING3_CFG_WR_TO_RD(caltim3) << |
| 295 | DDR_SCH_DDRTIMING_WRTORD_OFF) | |
| 296 | (((ddrioctl == 1) ? 1 : 0) << |
| 297 | DDR_SCH_DDRTIMING_BWRATIO_OFF)), |
| 298 | DDR_SCH_DDRTIMING); |
| 299 | |
| 300 | /* Configure DDR mode [precharge = 0] */ |
| 301 | ddr_sch_writel(((ddrioctl ? 0 : 1) << |
| 302 | DDR_SCH_DDRMOD_BWRATIOEXTENDED_OFF), |
| 303 | DDR_SCH_DDRMODE); |
| 304 | |
| 305 | /* Configure the read latency */ |
| 306 | ddr_sch_writel((DRAMTIMING0_CFG_TCL(dramtim0) >> 1) + |
| 307 | DDR_READ_LATENCY_DELAY, |
| 308 | DDR_SCH_READ_LATENCY); |
| 309 | |
| 310 | /* |
| 311 | * Configuring timing values concerning activate commands |
| 312 | * [FAWBANK alway 1 because always 4 bank DDR] |
| 313 | */ |
| 314 | ddr_sch_writel(((CALTIMING0_CFG_ACT_TO_ACT_DB(caltim0) << |
| 315 | DDR_SCH_ACTIVATE_RRD_OFF) | |
| 316 | (CALTIMING9_CFG_4_ACT_TO_ACT(caltim9) << |
| 317 | DDR_SCH_ACTIVATE_FAW_OFF) | |
| 318 | (DDR_ACTIVATE_FAWBANK << |
| 319 | DDR_SCH_ACTIVATE_FAWBANK_OFF)), |
| 320 | DDR_SCH_ACTIVATE); |
| 321 | |
| 322 | /* |
| 323 | * Configuring timing values concerning device to device data bus |
| 324 | * ownership change |
| 325 | */ |
| 326 | ddr_sch_writel(((CALTIMING1_CFG_RD_TO_RD_DC(caltim1) << |
| 327 | DDR_SCH_DEVTODEV_BUSRDTORD_OFF) | |
| 328 | (CALTIMING1_CFG_RD_TO_WR_DC(caltim1) << |
| 329 | DDR_SCH_DEVTODEV_BUSRDTOWR_OFF) | |
| 330 | (CALTIMING3_CFG_WR_TO_RD_DC(caltim3) << |
| 331 | DDR_SCH_DEVTODEV_BUSWRTORD_OFF)), |
| 332 | DDR_SCH_DEVTODEV); |
| 333 | |
| 334 | /* assigning the SDRAM size */ |
| 335 | unsigned long long size = sdram_calculate_size(); |
| 336 | /* If the size is invalid, use default Config size */ |
| 337 | if (size <= 0) |
| 338 | gd->ram_size = PHYS_SDRAM_1_SIZE; |
| 339 | else |
| 340 | gd->ram_size = size; |
| 341 | |
| 342 | /* Enable or disable the SDRAM ECC */ |
| 343 | if (CTRLCFG1_CFG_CTRL_EN_ECC(ctrlcfg1)) { |
| 344 | setbits_le32(SOCFPGA_SDR_ADDRESS + ECCCTRL1, |
| 345 | (DDR_HMC_ECCCTL_AWB_CNT_RST_SET_MSK | |
| 346 | DDR_HMC_ECCCTL_CNT_RST_SET_MSK | |
| 347 | DDR_HMC_ECCCTL_ECC_EN_SET_MSK)); |
| 348 | clrbits_le32(SOCFPGA_SDR_ADDRESS + ECCCTRL1, |
| 349 | (DDR_HMC_ECCCTL_AWB_CNT_RST_SET_MSK | |
| 350 | DDR_HMC_ECCCTL_CNT_RST_SET_MSK)); |
| 351 | setbits_le32(SOCFPGA_SDR_ADDRESS + ECCCTRL2, |
| 352 | (DDR_HMC_ECCCTL2_RMW_EN_SET_MSK | |
| 353 | DDR_HMC_ECCCTL2_AWB_EN_SET_MSK)); |
| 354 | } else { |
| 355 | clrbits_le32(SOCFPGA_SDR_ADDRESS + ECCCTRL1, |
| 356 | (DDR_HMC_ECCCTL_AWB_CNT_RST_SET_MSK | |
| 357 | DDR_HMC_ECCCTL_CNT_RST_SET_MSK | |
| 358 | DDR_HMC_ECCCTL_ECC_EN_SET_MSK)); |
| 359 | clrbits_le32(SOCFPGA_SDR_ADDRESS + ECCCTRL2, |
| 360 | (DDR_HMC_ECCCTL2_RMW_EN_SET_MSK | |
| 361 | DDR_HMC_ECCCTL2_AWB_EN_SET_MSK)); |
| 362 | } |
| 363 | |
| 364 | debug("DDR: HMC init success\n"); |
| 365 | return 0; |
| 366 | } |
| 367 | |
| 368 | /** |
| 369 | * sdram_calculate_size() - Calculate SDRAM size |
| 370 | * |
| 371 | * Calculate SDRAM device size based on SDRAM controller parameters. |
| 372 | * Size is specified in bytes. |
| 373 | */ |
Dalon Westergreen | 897dbd7 | 2018-09-11 10:06:14 -0700 | [diff] [blame] | 374 | phys_size_t sdram_calculate_size(void) |
Ley Foon Tan | f9c7f79 | 2018-05-24 00:17:30 +0800 | [diff] [blame] | 375 | { |
| 376 | u32 dramaddrw = hmc_readl(DRAMADDRW); |
| 377 | |
Dalon Westergreen | 897dbd7 | 2018-09-11 10:06:14 -0700 | [diff] [blame] | 378 | phys_size_t size = 1 << (DRAMADDRW_CFG_CS_ADDR_WIDTH(dramaddrw) + |
Ley Foon Tan | f9c7f79 | 2018-05-24 00:17:30 +0800 | [diff] [blame] | 379 | DRAMADDRW_CFG_BANK_GRP_ADDR_WIDTH(dramaddrw) + |
| 380 | DRAMADDRW_CFG_BANK_ADDR_WIDTH(dramaddrw) + |
| 381 | DRAMADDRW_CFG_ROW_ADDR_WIDTH(dramaddrw) + |
| 382 | DRAMADDRW_CFG_COL_ADDR_WIDTH(dramaddrw)); |
| 383 | |
| 384 | size *= (2 << (hmc_ecc_readl(DDRIOCTRL) & |
| 385 | DDR_HMC_DDRIOCTRL_IOSIZE_MSK)); |
| 386 | |
| 387 | return size; |
| 388 | } |