Jagan Teki | d0af73c | 2022-12-14 23:20:53 +0530 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
| 2 | /* |
| 3 | * (C) Copyright 2020 Rockchip Electronics Co., Ltd. |
| 4 | * Copyright (c) 2022 Edgeble AI Technologies Pvt. Ltd. |
| 5 | */ |
| 6 | |
| 7 | #include <common.h> |
| 8 | #include <debug_uart.h> |
| 9 | #include <dm.h> |
| 10 | #include <ram.h> |
| 11 | #include <syscon.h> |
| 12 | #include <asm/io.h> |
| 13 | #include <asm/arch-rockchip/clock.h> |
| 14 | #include <asm/arch-rockchip/hardware.h> |
| 15 | #include <asm/arch-rockchip/cru_rv1126.h> |
| 16 | #include <asm/arch-rockchip/grf_rv1126.h> |
| 17 | #include <asm/arch-rockchip/sdram_common.h> |
| 18 | #include <asm/arch-rockchip/sdram_rv1126.h> |
| 19 | #include <linux/delay.h> |
| 20 | |
| 21 | /* define training flag */ |
| 22 | #define CA_TRAINING (0x1 << 0) |
| 23 | #define READ_GATE_TRAINING (0x1 << 1) |
| 24 | #define WRITE_LEVELING (0x1 << 2) |
| 25 | #define WRITE_TRAINING (0x1 << 3) |
| 26 | #define READ_TRAINING (0x1 << 4) |
| 27 | #define FULL_TRAINING (0xff) |
| 28 | |
| 29 | #define SKEW_RX_SIGNAL (0) |
| 30 | #define SKEW_TX_SIGNAL (1) |
| 31 | #define SKEW_CA_SIGNAL (2) |
| 32 | |
| 33 | #define DESKEW_MDF_ABS_VAL (0) |
| 34 | #define DESKEW_MDF_DIFF_VAL (1) |
| 35 | |
| 36 | struct dram_info { |
| 37 | #if defined(CONFIG_TPL_BUILD) || \ |
| 38 | (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD)) |
| 39 | void __iomem *pctl; |
| 40 | void __iomem *phy; |
| 41 | struct rv1126_cru *cru; |
| 42 | struct msch_regs *msch; |
| 43 | struct rv1126_ddrgrf *ddrgrf; |
| 44 | struct rv1126_grf *grf; |
| 45 | u32 sr_idle; |
| 46 | u32 pd_idle; |
| 47 | #endif |
| 48 | struct ram_info info; |
| 49 | struct rv1126_pmugrf *pmugrf; |
| 50 | }; |
| 51 | |
| 52 | #if defined(CONFIG_TPL_BUILD) || \ |
| 53 | (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD)) |
| 54 | |
| 55 | #define GRF_BASE_ADDR 0xfe000000 |
| 56 | #define PMU_GRF_BASE_ADDR 0xfe020000 |
| 57 | #define DDR_GRF_BASE_ADDR 0xfe030000 |
| 58 | #define BUS_SGRF_BASE_ADDR 0xfe0a0000 |
| 59 | #define SERVER_MSCH_BASE_ADDR 0xfe800000 |
| 60 | #define CRU_BASE_ADDR 0xff490000 |
| 61 | #define DDR_PHY_BASE_ADDR 0xff4a0000 |
| 62 | #define UPCTL2_BASE_ADDR 0xffa50000 |
| 63 | |
| 64 | #define SGRF_SOC_CON2 0x8 |
| 65 | #define SGRF_SOC_CON12 0x30 |
| 66 | #define SGRF_SOC_CON13 0x34 |
| 67 | |
| 68 | struct dram_info dram_info; |
| 69 | |
| 70 | struct rv1126_sdram_params sdram_configs[] = { |
Jagan Teki | 4f3ba01 | 2022-12-14 23:20:55 +0530 | [diff] [blame^] | 71 | #if defined(CONFIG_RAM_ROCKCHIP_LPDDR4) |
| 72 | # include "sdram-rv1126-lpddr4-detect-328.inc" |
| 73 | # include "sdram-rv1126-lpddr4-detect-396.inc" |
| 74 | # include "sdram-rv1126-lpddr4-detect-528.inc" |
| 75 | # include "sdram-rv1126-lpddr4-detect-664.inc" |
| 76 | # include "sdram-rv1126-lpddr4-detect-784.inc" |
| 77 | # include "sdram-rv1126-lpddr4-detect-924.inc" |
| 78 | # include "sdram-rv1126-lpddr4-detect-1056.inc" |
| 79 | #else |
Jagan Teki | d0af73c | 2022-12-14 23:20:53 +0530 | [diff] [blame] | 80 | # include "sdram-rv1126-ddr3-detect-328.inc" |
| 81 | # include "sdram-rv1126-ddr3-detect-396.inc" |
| 82 | # include "sdram-rv1126-ddr3-detect-528.inc" |
| 83 | # include "sdram-rv1126-ddr3-detect-664.inc" |
| 84 | # include "sdram-rv1126-ddr3-detect-784.inc" |
| 85 | # include "sdram-rv1126-ddr3-detect-924.inc" |
| 86 | # include "sdram-rv1126-ddr3-detect-1056.inc" |
Jagan Teki | 4f3ba01 | 2022-12-14 23:20:55 +0530 | [diff] [blame^] | 87 | #endif |
Jagan Teki | d0af73c | 2022-12-14 23:20:53 +0530 | [diff] [blame] | 88 | }; |
| 89 | |
| 90 | u32 common_info[] = { |
| 91 | #include "sdram-rv1126-loader_params.inc" |
| 92 | }; |
| 93 | |
| 94 | #if defined(CONFIG_CMD_DDR_TEST_TOOL) |
| 95 | static struct rw_trn_result rw_trn_result; |
| 96 | #endif |
| 97 | |
| 98 | static struct rv1126_fsp_param fsp_param[MAX_IDX]; |
| 99 | |
| 100 | static u8 lp3_odt_value; |
| 101 | |
| 102 | static s8 wrlvl_result[2][4]; |
| 103 | |
| 104 | /* DDR configuration 0-9 */ |
| 105 | u16 ddr_cfg_2_rbc[] = { |
| 106 | ((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */ |
| 107 | ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */ |
| 108 | ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */ |
| 109 | ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */ |
| 110 | ((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */ |
| 111 | ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */ |
| 112 | ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */ |
| 113 | ((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */ |
| 114 | ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */ |
| 115 | ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */ |
| 116 | }; |
| 117 | |
| 118 | /* DDR configuration 10-21 */ |
| 119 | u8 ddr4_cfg_2_rbc[] = { |
| 120 | ((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */ |
| 121 | ((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */ |
| 122 | ((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */ |
| 123 | ((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */ |
| 124 | ((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */ |
| 125 | ((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */ |
| 126 | ((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */ |
| 127 | ((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */ |
| 128 | ((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */ |
| 129 | ((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */ |
| 130 | ((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */ |
| 131 | ((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */ |
| 132 | }; |
| 133 | |
| 134 | /* DDR configuration 22-28 */ |
| 135 | u16 ddr_cfg_2_rbc_p2[] = { |
| 136 | ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */ |
| 137 | ((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */ |
| 138 | ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */ |
| 139 | ((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */ |
| 140 | ((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */ |
| 141 | ((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */ |
| 142 | ((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */ |
| 143 | }; |
| 144 | |
| 145 | u8 d4_rbc_2_d3_rbc[][2] = { |
| 146 | {10, 0}, |
| 147 | {11, 2}, |
| 148 | {12, 23}, |
| 149 | {13, 1}, |
| 150 | {14, 28}, |
| 151 | {15, 24}, |
| 152 | {16, 27}, |
| 153 | {17, 7}, |
| 154 | {18, 6}, |
| 155 | {19, 25}, |
| 156 | {20, 26}, |
| 157 | {21, 3} |
| 158 | }; |
| 159 | |
| 160 | u32 addrmap[29][9] = { |
| 161 | {24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, |
| 162 | 0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */ |
| 163 | {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, |
| 164 | 0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */ |
| 165 | {23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, |
| 166 | 0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */ |
| 167 | {22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606, |
| 168 | 0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */ |
| 169 | {24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909, |
| 170 | 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */ |
| 171 | {6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707, |
| 172 | 0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */ |
| 173 | {7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808, |
| 174 | 0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */ |
| 175 | {8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909, |
| 176 | 0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */ |
| 177 | {22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606, |
| 178 | 0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */ |
| 179 | {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, |
| 180 | 0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */ |
| 181 | |
| 182 | {24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, |
| 183 | 0x08080808, 0x00000f0f, 0x0801}, /* 10 */ |
| 184 | {23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, |
| 185 | 0x0f080808, 0x00000f0f, 0x0801}, /* 11 */ |
| 186 | {24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, |
| 187 | 0x07070707, 0x00000f07, 0x0700}, /* 12 */ |
| 188 | {23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, |
| 189 | 0x07070707, 0x00000f0f, 0x0700}, /* 13 */ |
| 190 | {24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, |
| 191 | 0x07070707, 0x00000f07, 0x3f01}, /* 14 */ |
| 192 | {23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, |
| 193 | 0x07070707, 0x00000f0f, 0x3f01}, /* 15 */ |
| 194 | {23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606, |
| 195 | 0x06060606, 0x00000f06, 0x3f00}, /* 16 */ |
| 196 | {8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909, |
| 197 | 0x0f090909, 0x00000f0f, 0x0801}, /* 17 */ |
| 198 | {7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808, |
| 199 | 0x08080808, 0x00000f0f, 0x0700}, /* 18 */ |
| 200 | {7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, |
| 201 | 0x08080808, 0x00000f0f, 0x3f01}, /* 19 */ |
| 202 | |
| 203 | {6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, |
| 204 | 0x07070707, 0x00000f07, 0x3f00}, /* 20 */ |
| 205 | {23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606, |
| 206 | 0x06060606, 0x00000f06, 0x0600}, /* 21 */ |
| 207 | {21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505, |
| 208 | 0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */ |
| 209 | |
| 210 | {24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, |
| 211 | 0x07070707, 0x00000f07, 0x3f3f}, /* 23 */ |
| 212 | {23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707, |
| 213 | 0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */ |
| 214 | {7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, |
| 215 | 0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */ |
| 216 | {6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, |
| 217 | 0x07070707, 0x00000f07, 0x3f3f}, /* 26 */ |
| 218 | {23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606, |
| 219 | 0x06060606, 0x00000f06, 0x3f3f}, /* 27 */ |
| 220 | {24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707, |
| 221 | 0x07070707, 0x00000f07, 0x3f3f} /* 28 */ |
| 222 | }; |
| 223 | |
| 224 | static u8 dq_sel[22][3] = { |
| 225 | {0x0, 0x17, 0x22}, |
| 226 | {0x1, 0x18, 0x23}, |
| 227 | {0x2, 0x19, 0x24}, |
| 228 | {0x3, 0x1a, 0x25}, |
| 229 | {0x4, 0x1b, 0x26}, |
| 230 | {0x5, 0x1c, 0x27}, |
| 231 | {0x6, 0x1d, 0x28}, |
| 232 | {0x7, 0x1e, 0x29}, |
| 233 | {0x8, 0x16, 0x21}, |
| 234 | {0x9, 0x1f, 0x2a}, |
| 235 | {0xa, 0x20, 0x2b}, |
| 236 | {0x10, 0x1, 0xc}, |
| 237 | {0x11, 0x2, 0xd}, |
| 238 | {0x12, 0x3, 0xe}, |
| 239 | {0x13, 0x4, 0xf}, |
| 240 | {0x14, 0x5, 0x10}, |
| 241 | {0x15, 0x6, 0x11}, |
| 242 | {0x16, 0x7, 0x12}, |
| 243 | {0x17, 0x8, 0x13}, |
| 244 | {0x18, 0x0, 0xb}, |
| 245 | {0x19, 0x9, 0x14}, |
| 246 | {0x1a, 0xa, 0x15} |
| 247 | }; |
| 248 | |
| 249 | static u16 grp_addr[4] = { |
| 250 | ADD_GROUP_CS0_A, |
| 251 | ADD_GROUP_CS0_B, |
| 252 | ADD_GROUP_CS1_A, |
| 253 | ADD_GROUP_CS1_B |
| 254 | }; |
| 255 | |
| 256 | static u8 wrlvl_result_offset[2][4] = { |
| 257 | {0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27}, |
| 258 | {0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29}, |
| 259 | }; |
| 260 | |
| 261 | static u16 dqs_dq_skew_adr[16] = { |
| 262 | 0x170 + 0, /* SKEW_UPDATE_RX_CS0_DQS0 */ |
| 263 | 0x170 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS1 */ |
| 264 | 0x1d0 + 0, /* SKEW_UPDATE_RX_CS0_DQS2 */ |
| 265 | 0x1d0 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS3 */ |
| 266 | 0x1a0 + 0, /* SKEW_UPDATE_RX_CS1_DQS0 */ |
| 267 | 0x1a0 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS1 */ |
| 268 | 0x200 + 0, /* SKEW_UPDATE_RX_CS1_DQS2 */ |
| 269 | 0x200 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS3 */ |
| 270 | 0x170 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS0 */ |
| 271 | 0x170 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS1 */ |
| 272 | 0x1d0 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS2 */ |
| 273 | 0x1d0 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS3 */ |
| 274 | 0x1a0 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS0 */ |
| 275 | 0x1a0 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS1 */ |
| 276 | 0x200 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS2 */ |
| 277 | 0x200 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS3 */ |
| 278 | }; |
| 279 | |
| 280 | static void rkclk_ddr_reset(struct dram_info *dram, |
| 281 | u32 ctl_srstn, u32 ctl_psrstn, |
| 282 | u32 phy_srstn, u32 phy_psrstn) |
| 283 | { |
| 284 | writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) | |
| 285 | UPCTL2_ASRSTN_REQ(ctl_srstn), |
| 286 | BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13); |
| 287 | |
| 288 | writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn), |
| 289 | &dram->cru->softrst_con[12]); |
| 290 | } |
| 291 | |
| 292 | static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz) |
| 293 | { |
| 294 | unsigned int refdiv, postdiv1, postdiv2, fbdiv; |
| 295 | int delay = 1000; |
| 296 | u32 mhz = hz / MHz; |
| 297 | struct global_info *gbl_info; |
| 298 | struct sdram_head_info_index_v2 *index = |
| 299 | (struct sdram_head_info_index_v2 *)common_info; |
| 300 | u32 ssmod_info; |
| 301 | u32 dsmpd = 1; |
| 302 | |
| 303 | gbl_info = (struct global_info *)((void *)common_info + |
| 304 | index->global_index.offset * 4); |
| 305 | ssmod_info = gbl_info->info_2t; |
| 306 | refdiv = 1; |
| 307 | if (mhz <= 100) { |
| 308 | postdiv1 = 6; |
| 309 | postdiv2 = 4; |
| 310 | } else if (mhz <= 150) { |
| 311 | postdiv1 = 4; |
| 312 | postdiv2 = 4; |
| 313 | } else if (mhz <= 200) { |
| 314 | postdiv1 = 6; |
| 315 | postdiv2 = 2; |
| 316 | } else if (mhz <= 300) { |
| 317 | postdiv1 = 4; |
| 318 | postdiv2 = 2; |
| 319 | } else if (mhz <= 400) { |
| 320 | postdiv1 = 6; |
| 321 | postdiv2 = 1; |
| 322 | } else { |
| 323 | postdiv1 = 4; |
| 324 | postdiv2 = 1; |
| 325 | } |
| 326 | fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24; |
| 327 | |
| 328 | writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode); |
| 329 | |
| 330 | writel(0x1f000000, &dram->cru->clksel_con[64]); |
| 331 | writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0); |
| 332 | /* enable ssmod */ |
| 333 | if (PLL_SSMOD_SPREAD(ssmod_info)) { |
| 334 | dsmpd = 0; |
| 335 | clrsetbits_le32(&dram->cru->pll[1].con2, |
| 336 | 0xffffff << 0, 0x0 << 0); |
| 337 | writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) | |
| 338 | SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) | |
| 339 | SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) | |
| 340 | SSMOD_RESET(0) | |
| 341 | SSMOD_DIS_SSCG(0) | |
| 342 | SSMOD_BP(0), |
| 343 | &dram->cru->pll[1].con3); |
| 344 | } |
| 345 | writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv), |
| 346 | &dram->cru->pll[1].con1); |
| 347 | |
| 348 | while (delay > 0) { |
| 349 | udelay(1); |
| 350 | if (LOCK(readl(&dram->cru->pll[1].con1))) |
| 351 | break; |
| 352 | delay--; |
| 353 | } |
| 354 | |
| 355 | writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode); |
| 356 | } |
| 357 | |
| 358 | static void rkclk_configure_ddr(struct dram_info *dram, |
| 359 | struct rv1126_sdram_params *sdram_params) |
| 360 | { |
| 361 | /* for inno ddr phy need freq / 2 */ |
| 362 | rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2); |
| 363 | } |
| 364 | |
| 365 | static unsigned int |
| 366 | calculate_ddrconfig(struct rv1126_sdram_params *sdram_params) |
| 367 | { |
| 368 | struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; |
| 369 | u32 cs, bw, die_bw, col, row, bank; |
| 370 | u32 cs1_row; |
| 371 | u32 i, tmp; |
| 372 | u32 ddrconf = -1; |
| 373 | u32 row_3_4; |
| 374 | |
| 375 | cs = cap_info->rank; |
| 376 | bw = cap_info->bw; |
| 377 | die_bw = cap_info->dbw; |
| 378 | col = cap_info->col; |
| 379 | row = cap_info->cs0_row; |
| 380 | cs1_row = cap_info->cs1_row; |
| 381 | bank = cap_info->bk; |
| 382 | row_3_4 = cap_info->row_3_4; |
| 383 | |
| 384 | if (sdram_params->base.dramtype == DDR4) { |
| 385 | if (cs == 2 && row == cs1_row && !row_3_4) { |
| 386 | tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) | |
| 387 | die_bw; |
| 388 | for (i = 17; i < 21; i++) { |
| 389 | if (((tmp & 0xf) == |
| 390 | (ddr4_cfg_2_rbc[i - 10] & 0xf)) && |
| 391 | ((tmp & 0x70) <= |
| 392 | (ddr4_cfg_2_rbc[i - 10] & 0x70))) { |
| 393 | ddrconf = i; |
| 394 | goto out; |
| 395 | } |
| 396 | } |
| 397 | } |
| 398 | |
| 399 | tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw; |
| 400 | for (i = 10; i < 21; i++) { |
| 401 | if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) && |
| 402 | ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) && |
| 403 | ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) { |
| 404 | ddrconf = i; |
| 405 | goto out; |
| 406 | } |
| 407 | } |
| 408 | } else { |
| 409 | if (cs == 2 && row == cs1_row && bank == 3) { |
| 410 | for (i = 5; i < 8; i++) { |
| 411 | if (((bw + col - 10) == (ddr_cfg_2_rbc[i] & |
| 412 | 0x7)) && |
| 413 | ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] & |
| 414 | (0x7 << 5))) { |
| 415 | ddrconf = i; |
| 416 | goto out; |
| 417 | } |
| 418 | } |
| 419 | } |
| 420 | |
| 421 | tmp = ((cs - 1) << 8) | ((row - 13) << 5) | |
| 422 | ((bw + col - 10) << 0); |
| 423 | if (bank == 3) |
| 424 | tmp |= (1 << 3); |
| 425 | |
| 426 | for (i = 0; i < 9; i++) |
| 427 | if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) && |
| 428 | ((tmp & (7 << 5)) <= |
| 429 | (ddr_cfg_2_rbc[i] & (7 << 5))) && |
| 430 | ((tmp & (1 << 8)) <= |
| 431 | (ddr_cfg_2_rbc[i] & (1 << 8)))) { |
| 432 | ddrconf = i; |
| 433 | goto out; |
| 434 | } |
| 435 | |
| 436 | for (i = 0; i < 7; i++) |
| 437 | if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) && |
| 438 | ((tmp & (7 << 5)) <= |
| 439 | (ddr_cfg_2_rbc_p2[i] & (7 << 5))) && |
| 440 | ((tmp & (1 << 8)) <= |
| 441 | (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) { |
| 442 | ddrconf = i + 22; |
| 443 | goto out; |
| 444 | } |
| 445 | |
| 446 | if (cs == 1 && bank == 3 && row <= 17 && |
| 447 | (col + bw) == 12) |
| 448 | ddrconf = 23; |
| 449 | } |
| 450 | |
| 451 | out: |
| 452 | if (ddrconf > 28) |
| 453 | printascii("calculate ddrconfig error\n"); |
| 454 | |
| 455 | if (sdram_params->base.dramtype == DDR4) { |
| 456 | for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { |
| 457 | if (ddrconf == d4_rbc_2_d3_rbc[i][0]) { |
| 458 | if (ddrconf == 21 && row > 16) |
| 459 | printascii("warn:ddrconf21 row > 16\n"); |
| 460 | else |
| 461 | ddrconf = d4_rbc_2_d3_rbc[i][1]; |
| 462 | break; |
| 463 | } |
| 464 | } |
| 465 | } |
| 466 | |
| 467 | return ddrconf; |
| 468 | } |
| 469 | |
| 470 | static void sw_set_req(struct dram_info *dram) |
| 471 | { |
| 472 | void __iomem *pctl_base = dram->pctl; |
| 473 | |
| 474 | /* clear sw_done=0 */ |
| 475 | writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL); |
| 476 | } |
| 477 | |
| 478 | static void sw_set_ack(struct dram_info *dram) |
| 479 | { |
| 480 | void __iomem *pctl_base = dram->pctl; |
| 481 | |
| 482 | /* set sw_done=1 */ |
| 483 | writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL); |
| 484 | while (1) { |
| 485 | /* wait programming done */ |
| 486 | if (readl(pctl_base + DDR_PCTL2_SWSTAT) & |
| 487 | PCTL2_SW_DONE_ACK) |
| 488 | break; |
| 489 | } |
| 490 | } |
| 491 | |
| 492 | static void set_ctl_address_map(struct dram_info *dram, |
| 493 | struct rv1126_sdram_params *sdram_params) |
| 494 | { |
| 495 | struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; |
| 496 | void __iomem *pctl_base = dram->pctl; |
| 497 | u32 ddrconf = cap_info->ddrconfig; |
| 498 | u32 i, row; |
| 499 | |
| 500 | row = cap_info->cs0_row; |
| 501 | if (sdram_params->base.dramtype == DDR4) { |
| 502 | for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { |
| 503 | if (ddrconf == d4_rbc_2_d3_rbc[i][1]) { |
| 504 | ddrconf = d4_rbc_2_d3_rbc[i][0]; |
| 505 | break; |
| 506 | } |
| 507 | } |
| 508 | } |
| 509 | |
| 510 | if (ddrconf >= ARRAY_SIZE(addrmap)) { |
| 511 | printascii("set ctl address map fail\n"); |
| 512 | return; |
| 513 | } |
| 514 | |
| 515 | sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0), |
| 516 | &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4); |
| 517 | |
| 518 | /* unused row set to 0xf */ |
| 519 | for (i = 17; i >= row; i--) |
| 520 | setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 + |
| 521 | ((i - 12) * 8 / 32) * 4, |
| 522 | 0xf << ((i - 12) * 8 % 32)); |
| 523 | |
| 524 | if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4) |
| 525 | setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31); |
| 526 | if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1) |
| 527 | setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8); |
| 528 | |
| 529 | if (cap_info->rank == 1) |
| 530 | clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f); |
| 531 | } |
| 532 | |
| 533 | static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait) |
| 534 | { |
| 535 | void __iomem *phy_base = dram->phy; |
| 536 | u32 fbdiv, prediv, postdiv, postdiv_en; |
| 537 | |
| 538 | if (wait) { |
| 539 | clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB); |
| 540 | while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK)) |
| 541 | continue; |
| 542 | } else { |
| 543 | freq /= MHz; |
| 544 | prediv = 1; |
| 545 | if (freq <= 200) { |
| 546 | fbdiv = 16; |
| 547 | postdiv = 2; |
| 548 | postdiv_en = 1; |
| 549 | } else if (freq <= 456) { |
| 550 | fbdiv = 8; |
| 551 | postdiv = 1; |
| 552 | postdiv_en = 1; |
| 553 | } else { |
| 554 | fbdiv = 4; |
| 555 | postdiv = 0; |
| 556 | postdiv_en = 0; |
| 557 | } |
| 558 | writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50)); |
| 559 | clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK, |
| 560 | (fbdiv >> 8) & 1); |
| 561 | clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK, |
| 562 | postdiv_en << PHY_POSTDIV_EN_SHIFT); |
| 563 | |
| 564 | clrsetbits_le32(PHY_REG(phy_base, 0x52), |
| 565 | PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv); |
| 566 | clrsetbits_le32(PHY_REG(phy_base, 0x53), |
| 567 | PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT, |
| 568 | postdiv << PHY_POSTDIV_SHIFT); |
| 569 | } |
| 570 | } |
| 571 | |
| 572 | static const u16 d3_phy_drv_2_ohm[][2] = { |
| 573 | {PHY_DDR3_RON_455ohm, 455}, |
| 574 | {PHY_DDR3_RON_230ohm, 230}, |
| 575 | {PHY_DDR3_RON_153ohm, 153}, |
| 576 | {PHY_DDR3_RON_115ohm, 115}, |
| 577 | {PHY_DDR3_RON_91ohm, 91}, |
| 578 | {PHY_DDR3_RON_76ohm, 76}, |
| 579 | {PHY_DDR3_RON_65ohm, 65}, |
| 580 | {PHY_DDR3_RON_57ohm, 57}, |
| 581 | {PHY_DDR3_RON_51ohm, 51}, |
| 582 | {PHY_DDR3_RON_46ohm, 46}, |
| 583 | {PHY_DDR3_RON_41ohm, 41}, |
| 584 | {PHY_DDR3_RON_38ohm, 38}, |
| 585 | {PHY_DDR3_RON_35ohm, 35}, |
| 586 | {PHY_DDR3_RON_32ohm, 32}, |
| 587 | {PHY_DDR3_RON_30ohm, 30}, |
| 588 | {PHY_DDR3_RON_28ohm, 28}, |
| 589 | {PHY_DDR3_RON_27ohm, 27}, |
| 590 | {PHY_DDR3_RON_25ohm, 25}, |
| 591 | {PHY_DDR3_RON_24ohm, 24}, |
| 592 | {PHY_DDR3_RON_23ohm, 23}, |
| 593 | {PHY_DDR3_RON_22ohm, 22}, |
| 594 | {PHY_DDR3_RON_21ohm, 21}, |
| 595 | {PHY_DDR3_RON_20ohm, 20} |
| 596 | }; |
| 597 | |
| 598 | static u16 d3_phy_odt_2_ohm[][2] = { |
| 599 | {PHY_DDR3_RTT_DISABLE, 0}, |
| 600 | {PHY_DDR3_RTT_561ohm, 561}, |
| 601 | {PHY_DDR3_RTT_282ohm, 282}, |
| 602 | {PHY_DDR3_RTT_188ohm, 188}, |
| 603 | {PHY_DDR3_RTT_141ohm, 141}, |
| 604 | {PHY_DDR3_RTT_113ohm, 113}, |
| 605 | {PHY_DDR3_RTT_94ohm, 94}, |
| 606 | {PHY_DDR3_RTT_81ohm, 81}, |
| 607 | {PHY_DDR3_RTT_72ohm, 72}, |
| 608 | {PHY_DDR3_RTT_64ohm, 64}, |
| 609 | {PHY_DDR3_RTT_58ohm, 58}, |
| 610 | {PHY_DDR3_RTT_52ohm, 52}, |
| 611 | {PHY_DDR3_RTT_48ohm, 48}, |
| 612 | {PHY_DDR3_RTT_44ohm, 44}, |
| 613 | {PHY_DDR3_RTT_41ohm, 41}, |
| 614 | {PHY_DDR3_RTT_38ohm, 38}, |
| 615 | {PHY_DDR3_RTT_37ohm, 37}, |
| 616 | {PHY_DDR3_RTT_34ohm, 34}, |
| 617 | {PHY_DDR3_RTT_32ohm, 32}, |
| 618 | {PHY_DDR3_RTT_31ohm, 31}, |
| 619 | {PHY_DDR3_RTT_29ohm, 29}, |
| 620 | {PHY_DDR3_RTT_28ohm, 28}, |
| 621 | {PHY_DDR3_RTT_27ohm, 27}, |
| 622 | {PHY_DDR3_RTT_25ohm, 25} |
| 623 | }; |
| 624 | |
| 625 | static u16 d4lp3_phy_drv_2_ohm[][2] = { |
| 626 | {PHY_DDR4_LPDDR3_RON_482ohm, 482}, |
| 627 | {PHY_DDR4_LPDDR3_RON_244ohm, 244}, |
| 628 | {PHY_DDR4_LPDDR3_RON_162ohm, 162}, |
| 629 | {PHY_DDR4_LPDDR3_RON_122ohm, 122}, |
| 630 | {PHY_DDR4_LPDDR3_RON_97ohm, 97}, |
| 631 | {PHY_DDR4_LPDDR3_RON_81ohm, 81}, |
| 632 | {PHY_DDR4_LPDDR3_RON_69ohm, 69}, |
| 633 | {PHY_DDR4_LPDDR3_RON_61ohm, 61}, |
| 634 | {PHY_DDR4_LPDDR3_RON_54ohm, 54}, |
| 635 | {PHY_DDR4_LPDDR3_RON_48ohm, 48}, |
| 636 | {PHY_DDR4_LPDDR3_RON_44ohm, 44}, |
| 637 | {PHY_DDR4_LPDDR3_RON_40ohm, 40}, |
| 638 | {PHY_DDR4_LPDDR3_RON_37ohm, 37}, |
| 639 | {PHY_DDR4_LPDDR3_RON_34ohm, 34}, |
| 640 | {PHY_DDR4_LPDDR3_RON_32ohm, 32}, |
| 641 | {PHY_DDR4_LPDDR3_RON_30ohm, 30}, |
| 642 | {PHY_DDR4_LPDDR3_RON_28ohm, 28}, |
| 643 | {PHY_DDR4_LPDDR3_RON_27ohm, 27}, |
| 644 | {PHY_DDR4_LPDDR3_RON_25ohm, 25}, |
| 645 | {PHY_DDR4_LPDDR3_RON_24ohm, 24}, |
| 646 | {PHY_DDR4_LPDDR3_RON_23ohm, 23}, |
| 647 | {PHY_DDR4_LPDDR3_RON_22ohm, 22}, |
| 648 | {PHY_DDR4_LPDDR3_RON_21ohm, 21} |
| 649 | }; |
| 650 | |
| 651 | static u16 d4lp3_phy_odt_2_ohm[][2] = { |
| 652 | {PHY_DDR4_LPDDR3_RTT_DISABLE, 0}, |
| 653 | {PHY_DDR4_LPDDR3_RTT_586ohm, 586}, |
| 654 | {PHY_DDR4_LPDDR3_RTT_294ohm, 294}, |
| 655 | {PHY_DDR4_LPDDR3_RTT_196ohm, 196}, |
| 656 | {PHY_DDR4_LPDDR3_RTT_148ohm, 148}, |
| 657 | {PHY_DDR4_LPDDR3_RTT_118ohm, 118}, |
| 658 | {PHY_DDR4_LPDDR3_RTT_99ohm, 99}, |
| 659 | {PHY_DDR4_LPDDR3_RTT_85ohm, 58}, |
| 660 | {PHY_DDR4_LPDDR3_RTT_76ohm, 76}, |
| 661 | {PHY_DDR4_LPDDR3_RTT_67ohm, 67}, |
| 662 | {PHY_DDR4_LPDDR3_RTT_60ohm, 60}, |
| 663 | {PHY_DDR4_LPDDR3_RTT_55ohm, 55}, |
| 664 | {PHY_DDR4_LPDDR3_RTT_50ohm, 50}, |
| 665 | {PHY_DDR4_LPDDR3_RTT_46ohm, 46}, |
| 666 | {PHY_DDR4_LPDDR3_RTT_43ohm, 43}, |
| 667 | {PHY_DDR4_LPDDR3_RTT_40ohm, 40}, |
| 668 | {PHY_DDR4_LPDDR3_RTT_38ohm, 38}, |
| 669 | {PHY_DDR4_LPDDR3_RTT_36ohm, 36}, |
| 670 | {PHY_DDR4_LPDDR3_RTT_34ohm, 34}, |
| 671 | {PHY_DDR4_LPDDR3_RTT_32ohm, 32}, |
| 672 | {PHY_DDR4_LPDDR3_RTT_31ohm, 31}, |
| 673 | {PHY_DDR4_LPDDR3_RTT_29ohm, 29}, |
| 674 | {PHY_DDR4_LPDDR3_RTT_28ohm, 28}, |
| 675 | {PHY_DDR4_LPDDR3_RTT_27ohm, 27} |
| 676 | }; |
| 677 | |
| 678 | static u16 lp4_phy_drv_2_ohm[][2] = { |
| 679 | {PHY_LPDDR4_RON_501ohm, 501}, |
| 680 | {PHY_LPDDR4_RON_253ohm, 253}, |
| 681 | {PHY_LPDDR4_RON_168ohm, 168}, |
| 682 | {PHY_LPDDR4_RON_126ohm, 126}, |
| 683 | {PHY_LPDDR4_RON_101ohm, 101}, |
| 684 | {PHY_LPDDR4_RON_84ohm, 84}, |
| 685 | {PHY_LPDDR4_RON_72ohm, 72}, |
| 686 | {PHY_LPDDR4_RON_63ohm, 63}, |
| 687 | {PHY_LPDDR4_RON_56ohm, 56}, |
| 688 | {PHY_LPDDR4_RON_50ohm, 50}, |
| 689 | {PHY_LPDDR4_RON_46ohm, 46}, |
| 690 | {PHY_LPDDR4_RON_42ohm, 42}, |
| 691 | {PHY_LPDDR4_RON_38ohm, 38}, |
| 692 | {PHY_LPDDR4_RON_36ohm, 36}, |
| 693 | {PHY_LPDDR4_RON_33ohm, 33}, |
| 694 | {PHY_LPDDR4_RON_31ohm, 31}, |
| 695 | {PHY_LPDDR4_RON_29ohm, 29}, |
| 696 | {PHY_LPDDR4_RON_28ohm, 28}, |
| 697 | {PHY_LPDDR4_RON_26ohm, 26}, |
| 698 | {PHY_LPDDR4_RON_25ohm, 25}, |
| 699 | {PHY_LPDDR4_RON_24ohm, 24}, |
| 700 | {PHY_LPDDR4_RON_23ohm, 23}, |
| 701 | {PHY_LPDDR4_RON_22ohm, 22} |
| 702 | }; |
| 703 | |
| 704 | static u16 lp4_phy_odt_2_ohm[][2] = { |
| 705 | {PHY_LPDDR4_RTT_DISABLE, 0}, |
| 706 | {PHY_LPDDR4_RTT_604ohm, 604}, |
| 707 | {PHY_LPDDR4_RTT_303ohm, 303}, |
| 708 | {PHY_LPDDR4_RTT_202ohm, 202}, |
| 709 | {PHY_LPDDR4_RTT_152ohm, 152}, |
| 710 | {PHY_LPDDR4_RTT_122ohm, 122}, |
| 711 | {PHY_LPDDR4_RTT_101ohm, 101}, |
| 712 | {PHY_LPDDR4_RTT_87ohm, 87}, |
| 713 | {PHY_LPDDR4_RTT_78ohm, 78}, |
| 714 | {PHY_LPDDR4_RTT_69ohm, 69}, |
| 715 | {PHY_LPDDR4_RTT_62ohm, 62}, |
| 716 | {PHY_LPDDR4_RTT_56ohm, 56}, |
| 717 | {PHY_LPDDR4_RTT_52ohm, 52}, |
| 718 | {PHY_LPDDR4_RTT_48ohm, 48}, |
| 719 | {PHY_LPDDR4_RTT_44ohm, 44}, |
| 720 | {PHY_LPDDR4_RTT_41ohm, 41}, |
| 721 | {PHY_LPDDR4_RTT_39ohm, 39}, |
| 722 | {PHY_LPDDR4_RTT_37ohm, 37}, |
| 723 | {PHY_LPDDR4_RTT_35ohm, 35}, |
| 724 | {PHY_LPDDR4_RTT_33ohm, 33}, |
| 725 | {PHY_LPDDR4_RTT_32ohm, 32}, |
| 726 | {PHY_LPDDR4_RTT_30ohm, 30}, |
| 727 | {PHY_LPDDR4_RTT_29ohm, 29}, |
| 728 | {PHY_LPDDR4_RTT_27ohm, 27} |
| 729 | }; |
| 730 | |
| 731 | static u32 lp4_odt_calc(u32 odt_ohm) |
| 732 | { |
| 733 | u32 odt; |
| 734 | |
| 735 | if (odt_ohm == 0) |
| 736 | odt = LPDDR4_DQODT_DIS; |
| 737 | else if (odt_ohm <= 40) |
| 738 | odt = LPDDR4_DQODT_40; |
| 739 | else if (odt_ohm <= 48) |
| 740 | odt = LPDDR4_DQODT_48; |
| 741 | else if (odt_ohm <= 60) |
| 742 | odt = LPDDR4_DQODT_60; |
| 743 | else if (odt_ohm <= 80) |
| 744 | odt = LPDDR4_DQODT_80; |
| 745 | else if (odt_ohm <= 120) |
| 746 | odt = LPDDR4_DQODT_120; |
| 747 | else |
| 748 | odt = LPDDR4_DQODT_240; |
| 749 | |
| 750 | return odt; |
| 751 | } |
| 752 | |
| 753 | static void *get_ddr_drv_odt_info(u32 dramtype) |
| 754 | { |
| 755 | struct sdram_head_info_index_v2 *index = |
| 756 | (struct sdram_head_info_index_v2 *)common_info; |
| 757 | void *ddr_info = 0; |
| 758 | |
| 759 | if (dramtype == DDR4) |
| 760 | ddr_info = (void *)common_info + index->ddr4_index.offset * 4; |
| 761 | else if (dramtype == DDR3) |
| 762 | ddr_info = (void *)common_info + index->ddr3_index.offset * 4; |
| 763 | else if (dramtype == LPDDR3) |
| 764 | ddr_info = (void *)common_info + index->lp3_index.offset * 4; |
| 765 | else if (dramtype == LPDDR4) |
| 766 | ddr_info = (void *)common_info + index->lp4_index.offset * 4; |
| 767 | else |
| 768 | printascii("unsupported dram type\n"); |
| 769 | return ddr_info; |
| 770 | } |
| 771 | |
| 772 | static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info, |
| 773 | u32 freq_mhz, u32 dst_fsp, u32 dramtype) |
| 774 | { |
| 775 | void __iomem *pctl_base = dram->pctl; |
| 776 | u32 ca_vref, dq_vref; |
| 777 | |
| 778 | if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) |
| 779 | ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff); |
| 780 | else |
| 781 | ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten); |
| 782 | |
| 783 | if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq)) |
| 784 | dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff); |
| 785 | else |
| 786 | dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten); |
| 787 | |
| 788 | if (dramtype == LPDDR4) { |
| 789 | if (ca_vref < 100) |
| 790 | ca_vref = 100; |
| 791 | if (ca_vref > 420) |
| 792 | ca_vref = 420; |
| 793 | |
| 794 | if (ca_vref <= 300) |
| 795 | ca_vref = (0 << 6) | (ca_vref - 100) / 4; |
| 796 | else |
| 797 | ca_vref = (1 << 6) | (ca_vref - 220) / 4; |
| 798 | |
| 799 | if (dq_vref < 100) |
| 800 | dq_vref = 100; |
| 801 | if (dq_vref > 420) |
| 802 | dq_vref = 420; |
| 803 | |
| 804 | if (dq_vref <= 300) |
| 805 | dq_vref = (0 << 6) | (dq_vref - 100) / 4; |
| 806 | else |
| 807 | dq_vref = (1 << 6) | (dq_vref - 220) / 4; |
| 808 | } else { |
| 809 | ca_vref = ca_vref * 11 / 6; |
| 810 | if (ca_vref < 150) |
| 811 | ca_vref = 150; |
| 812 | if (ca_vref > 629) |
| 813 | ca_vref = 629; |
| 814 | |
| 815 | if (ca_vref <= 449) |
| 816 | ca_vref = (0 << 6) | (ca_vref - 150) / 4; |
| 817 | else |
| 818 | ca_vref = (1 << 6) | (ca_vref - 329) / 4; |
| 819 | |
| 820 | if (dq_vref < 150) |
| 821 | dq_vref = 150; |
| 822 | if (dq_vref > 629) |
| 823 | dq_vref = 629; |
| 824 | |
| 825 | if (dq_vref <= 449) |
| 826 | dq_vref = (0 << 6) | (dq_vref - 150) / 6; |
| 827 | else |
| 828 | dq_vref = (1 << 6) | (dq_vref - 329) / 6; |
| 829 | } |
| 830 | sw_set_req(dram); |
| 831 | clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 832 | DDR_PCTL2_INIT6, |
| 833 | PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT, |
| 834 | ca_vref << PCTL2_LPDDR4_MR12_SHIFT); |
| 835 | |
| 836 | clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 837 | DDR_PCTL2_INIT7, |
| 838 | PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT, |
| 839 | dq_vref << PCTL2_LPDDR4_MR14_SHIFT); |
| 840 | sw_set_ack(dram); |
| 841 | } |
| 842 | |
| 843 | static void set_ds_odt(struct dram_info *dram, |
| 844 | struct rv1126_sdram_params *sdram_params, u32 dst_fsp) |
| 845 | { |
| 846 | void __iomem *phy_base = dram->phy; |
| 847 | void __iomem *pctl_base = dram->pctl; |
| 848 | u32 dramtype = sdram_params->base.dramtype; |
| 849 | struct ddr2_3_4_lp2_3_info *ddr_info; |
| 850 | struct lp4_info *lp4_info; |
| 851 | u32 i, j, tmp; |
| 852 | const u16 (*p_drv)[2]; |
| 853 | const u16 (*p_odt)[2]; |
| 854 | u32 drv_info, sr_info; |
| 855 | u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm; |
| 856 | u32 phy_odt_ohm, dram_odt_ohm; |
| 857 | u32 lp4_pu_cal, phy_lp4_drv_pd_en; |
| 858 | u32 phy_odt_up_en, phy_odt_dn_en; |
| 859 | u32 sr_dq, sr_clk; |
| 860 | u32 freq = sdram_params->base.ddr_freq; |
| 861 | u32 mr1_mr3, mr11, mr22, vref_out, vref_inner; |
| 862 | u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0; |
| 863 | u32 phy_dq_drv = 0; |
| 864 | u32 phy_odt_up = 0, phy_odt_dn = 0; |
| 865 | |
| 866 | ddr_info = get_ddr_drv_odt_info(dramtype); |
| 867 | lp4_info = (void *)ddr_info; |
| 868 | |
| 869 | if (!ddr_info) |
| 870 | return; |
| 871 | |
| 872 | /* dram odt en freq control phy drv, dram odt and phy sr */ |
| 873 | if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) { |
| 874 | drv_info = ddr_info->drv_when_odtoff; |
| 875 | dram_odt_ohm = 0; |
| 876 | sr_info = ddr_info->sr_when_odtoff; |
| 877 | phy_lp4_drv_pd_en = |
| 878 | PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info); |
| 879 | } else { |
| 880 | drv_info = ddr_info->drv_when_odten; |
| 881 | dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info); |
| 882 | sr_info = ddr_info->sr_when_odten; |
| 883 | phy_lp4_drv_pd_en = |
| 884 | PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info); |
| 885 | } |
| 886 | phy_dq_drv_ohm = |
| 887 | DRV_INFO_PHY_DQ_DRV(drv_info); |
| 888 | phy_clk_drv_ohm = |
| 889 | DRV_INFO_PHY_CLK_DRV(drv_info); |
| 890 | phy_ca_drv_ohm = |
| 891 | DRV_INFO_PHY_CA_DRV(drv_info); |
| 892 | |
| 893 | sr_dq = DQ_SR_INFO(sr_info); |
| 894 | sr_clk = CLK_SR_INFO(sr_info); |
| 895 | |
| 896 | /* phy odt en freq control dram drv and phy odt */ |
| 897 | if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) { |
| 898 | dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff); |
| 899 | lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info); |
| 900 | phy_odt_ohm = 0; |
| 901 | phy_odt_up_en = 0; |
| 902 | phy_odt_dn_en = 0; |
| 903 | } else { |
| 904 | dram_drv_ohm = |
| 905 | DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten); |
| 906 | phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info); |
| 907 | phy_odt_up_en = |
| 908 | ODT_INFO_PULLUP_EN(ddr_info->odt_info); |
| 909 | phy_odt_dn_en = |
| 910 | ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); |
| 911 | lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info); |
| 912 | } |
| 913 | |
| 914 | if (dramtype == LPDDR4) { |
| 915 | if (phy_odt_ohm) { |
| 916 | phy_odt_up_en = 0; |
| 917 | phy_odt_dn_en = 1; |
| 918 | } |
| 919 | if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) |
| 920 | dram_caodt_ohm = 0; |
| 921 | else |
| 922 | dram_caodt_ohm = |
| 923 | ODT_INFO_LP4_CA_ODT(lp4_info->odt_info); |
| 924 | } |
| 925 | |
| 926 | if (dramtype == DDR3) { |
| 927 | p_drv = d3_phy_drv_2_ohm; |
| 928 | p_odt = d3_phy_odt_2_ohm; |
| 929 | } else if (dramtype == LPDDR4) { |
| 930 | p_drv = lp4_phy_drv_2_ohm; |
| 931 | p_odt = lp4_phy_odt_2_ohm; |
| 932 | } else { |
| 933 | p_drv = d4lp3_phy_drv_2_ohm; |
| 934 | p_odt = d4lp3_phy_odt_2_ohm; |
| 935 | } |
| 936 | |
| 937 | for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { |
| 938 | if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) { |
| 939 | phy_dq_drv = **(p_drv + i); |
| 940 | break; |
| 941 | } |
| 942 | if (i == 0) |
| 943 | break; |
| 944 | } |
| 945 | for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { |
| 946 | if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) { |
| 947 | phy_clk_drv = **(p_drv + i); |
| 948 | break; |
| 949 | } |
| 950 | if (i == 0) |
| 951 | break; |
| 952 | } |
| 953 | for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { |
| 954 | if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) { |
| 955 | phy_ca_drv = **(p_drv + i); |
| 956 | break; |
| 957 | } |
| 958 | if (i == 0) |
| 959 | break; |
| 960 | } |
| 961 | if (!phy_odt_ohm) |
| 962 | phy_odt = 0; |
| 963 | else |
| 964 | for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) { |
| 965 | if (phy_odt_ohm <= *(*(p_odt + i) + 1)) { |
| 966 | phy_odt = **(p_odt + i); |
| 967 | break; |
| 968 | } |
| 969 | if (i == 0) |
| 970 | break; |
| 971 | } |
| 972 | |
| 973 | if (dramtype != LPDDR4) { |
| 974 | if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en)) |
| 975 | vref_inner = 0x80; |
| 976 | else if (phy_odt_up_en) |
| 977 | vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 / |
| 978 | (dram_drv_ohm + phy_odt_ohm); |
| 979 | else |
| 980 | vref_inner = phy_odt_ohm * 128 / |
| 981 | (phy_odt_ohm + dram_drv_ohm); |
| 982 | |
| 983 | if (dramtype != DDR3 && dram_odt_ohm) |
| 984 | vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 / |
| 985 | (phy_dq_drv_ohm + dram_odt_ohm); |
| 986 | else |
| 987 | vref_out = 0x80; |
| 988 | } else { |
| 989 | /* for lp4 and lp4x*/ |
| 990 | if (phy_odt_ohm) |
| 991 | vref_inner = |
| 992 | (PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) * |
| 993 | 256) / 1000; |
| 994 | else |
| 995 | vref_inner = |
| 996 | (PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) * |
| 997 | 256) / 1000; |
| 998 | |
| 999 | vref_out = 0x80; |
| 1000 | } |
| 1001 | |
| 1002 | /* default ZQCALIB bypass mode */ |
| 1003 | clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv); |
| 1004 | clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv); |
| 1005 | clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv); |
| 1006 | clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv); |
| 1007 | if (dramtype == LPDDR4) { |
| 1008 | clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv); |
| 1009 | clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv); |
| 1010 | } else { |
| 1011 | clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv); |
| 1012 | clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv); |
| 1013 | } |
| 1014 | /* clk / cmd slew rate */ |
| 1015 | clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk); |
| 1016 | |
| 1017 | phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1; |
| 1018 | if (phy_odt_up_en) |
| 1019 | phy_odt_up = phy_odt; |
| 1020 | if (phy_odt_dn_en) |
| 1021 | phy_odt_dn = phy_odt; |
| 1022 | |
| 1023 | for (i = 0; i < 4; i++) { |
| 1024 | j = 0x110 + i * 0x10; |
| 1025 | clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up); |
| 1026 | clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn); |
| 1027 | clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv); |
| 1028 | clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv); |
| 1029 | writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10)); |
| 1030 | |
| 1031 | clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), |
| 1032 | 1 << 3, phy_lp4_drv_pd_en << 3); |
| 1033 | if (dramtype == LPDDR4) |
| 1034 | clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5)); |
| 1035 | /* dq slew rate */ |
| 1036 | clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10), |
| 1037 | 0x1f, sr_dq); |
| 1038 | } |
| 1039 | |
| 1040 | /* reg_rx_vref_value_update */ |
| 1041 | setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); |
| 1042 | clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); |
| 1043 | |
| 1044 | /* RAM VREF */ |
| 1045 | writel(vref_out, PHY_REG(phy_base, 0x105)); |
| 1046 | if (dramtype == LPDDR3) |
| 1047 | udelay(100); |
| 1048 | |
| 1049 | if (dramtype == LPDDR4) |
| 1050 | set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype); |
| 1051 | |
| 1052 | if (dramtype == DDR3 || dramtype == DDR4) { |
| 1053 | mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 1054 | DDR_PCTL2_INIT3); |
| 1055 | mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK; |
| 1056 | } else { |
| 1057 | mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 1058 | DDR_PCTL2_INIT4); |
| 1059 | mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK; |
| 1060 | } |
| 1061 | |
| 1062 | if (dramtype == DDR3) { |
| 1063 | mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK); |
| 1064 | if (dram_drv_ohm == 34) |
| 1065 | mr1_mr3 |= DDR3_DS_34; |
| 1066 | |
| 1067 | if (dram_odt_ohm == 0) |
| 1068 | mr1_mr3 |= DDR3_RTT_NOM_DIS; |
| 1069 | else if (dram_odt_ohm <= 40) |
| 1070 | mr1_mr3 |= DDR3_RTT_NOM_40; |
| 1071 | else if (dram_odt_ohm <= 60) |
| 1072 | mr1_mr3 |= DDR3_RTT_NOM_60; |
| 1073 | else |
| 1074 | mr1_mr3 |= DDR3_RTT_NOM_120; |
| 1075 | |
| 1076 | } else if (dramtype == DDR4) { |
| 1077 | mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK); |
| 1078 | if (dram_drv_ohm == 48) |
| 1079 | mr1_mr3 |= DDR4_DS_48; |
| 1080 | |
| 1081 | if (dram_odt_ohm == 0) |
| 1082 | mr1_mr3 |= DDR4_RTT_NOM_DIS; |
| 1083 | else if (dram_odt_ohm <= 34) |
| 1084 | mr1_mr3 |= DDR4_RTT_NOM_34; |
| 1085 | else if (dram_odt_ohm <= 40) |
| 1086 | mr1_mr3 |= DDR4_RTT_NOM_40; |
| 1087 | else if (dram_odt_ohm <= 48) |
| 1088 | mr1_mr3 |= DDR4_RTT_NOM_48; |
| 1089 | else if (dram_odt_ohm <= 60) |
| 1090 | mr1_mr3 |= DDR4_RTT_NOM_60; |
| 1091 | else |
| 1092 | mr1_mr3 |= DDR4_RTT_NOM_120; |
| 1093 | |
| 1094 | } else if (dramtype == LPDDR3) { |
| 1095 | if (dram_drv_ohm <= 34) |
| 1096 | mr1_mr3 |= LPDDR3_DS_34; |
| 1097 | else if (dram_drv_ohm <= 40) |
| 1098 | mr1_mr3 |= LPDDR3_DS_40; |
| 1099 | else if (dram_drv_ohm <= 48) |
| 1100 | mr1_mr3 |= LPDDR3_DS_48; |
| 1101 | else if (dram_drv_ohm <= 60) |
| 1102 | mr1_mr3 |= LPDDR3_DS_60; |
| 1103 | else if (dram_drv_ohm <= 80) |
| 1104 | mr1_mr3 |= LPDDR3_DS_80; |
| 1105 | |
| 1106 | if (dram_odt_ohm == 0) |
| 1107 | lp3_odt_value = LPDDR3_ODT_DIS; |
| 1108 | else if (dram_odt_ohm <= 60) |
| 1109 | lp3_odt_value = LPDDR3_ODT_60; |
| 1110 | else if (dram_odt_ohm <= 120) |
| 1111 | lp3_odt_value = LPDDR3_ODT_120; |
| 1112 | else |
| 1113 | lp3_odt_value = LPDDR3_ODT_240; |
| 1114 | } else {/* for lpddr4 and lpddr4x */ |
| 1115 | /* MR3 for lp4 PU-CAL and PDDS */ |
| 1116 | mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK); |
| 1117 | mr1_mr3 |= lp4_pu_cal; |
| 1118 | |
| 1119 | tmp = lp4_odt_calc(dram_drv_ohm); |
| 1120 | if (!tmp) |
| 1121 | tmp = LPDDR4_PDDS_240; |
| 1122 | mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT); |
| 1123 | |
| 1124 | /* MR11 for lp4 ca odt, dq odt set */ |
| 1125 | mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 1126 | DDR_PCTL2_INIT6); |
| 1127 | mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK; |
| 1128 | |
| 1129 | mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK); |
| 1130 | |
| 1131 | tmp = lp4_odt_calc(dram_odt_ohm); |
| 1132 | mr11 |= (tmp << LPDDR4_DQODT_SHIFT); |
| 1133 | |
| 1134 | tmp = lp4_odt_calc(dram_caodt_ohm); |
| 1135 | mr11 |= (tmp << LPDDR4_CAODT_SHIFT); |
| 1136 | sw_set_req(dram); |
| 1137 | clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 1138 | DDR_PCTL2_INIT6, |
| 1139 | PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT, |
| 1140 | mr11 << PCTL2_LPDDR4_MR11_SHIFT); |
| 1141 | sw_set_ack(dram); |
| 1142 | |
| 1143 | /* MR22 for soc odt/odt-ck/odt-cs/odt-ca */ |
| 1144 | mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 1145 | DDR_PCTL2_INIT7); |
| 1146 | mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK; |
| 1147 | mr22 &= ~LPDDR4_SOC_ODT_MASK; |
| 1148 | |
| 1149 | tmp = lp4_odt_calc(phy_odt_ohm); |
| 1150 | mr22 |= tmp; |
| 1151 | mr22 = mr22 | |
| 1152 | (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) << |
| 1153 | LPDDR4_ODTE_CK_SHIFT) | |
| 1154 | (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) << |
| 1155 | LPDDR4_ODTE_CS_SHIFT) | |
| 1156 | (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) << |
| 1157 | LPDDR4_ODTD_CA_SHIFT); |
| 1158 | |
| 1159 | sw_set_req(dram); |
| 1160 | clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 1161 | DDR_PCTL2_INIT7, |
| 1162 | PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT, |
| 1163 | mr22 << PCTL2_LPDDR4_MR22_SHIFT); |
| 1164 | sw_set_ack(dram); |
| 1165 | } |
| 1166 | |
| 1167 | if (dramtype == DDR4 || dramtype == DDR3) { |
| 1168 | sw_set_req(dram); |
| 1169 | clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 1170 | DDR_PCTL2_INIT3, |
| 1171 | PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT, |
| 1172 | mr1_mr3 << PCTL2_DDR34_MR1_SHIFT); |
| 1173 | sw_set_ack(dram); |
| 1174 | } else { |
| 1175 | sw_set_req(dram); |
| 1176 | clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 1177 | DDR_PCTL2_INIT4, |
| 1178 | PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT, |
| 1179 | mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT); |
| 1180 | sw_set_ack(dram); |
| 1181 | } |
| 1182 | } |
| 1183 | |
| 1184 | static int sdram_cmd_dq_path_remap(struct dram_info *dram, |
| 1185 | struct rv1126_sdram_params *sdram_params) |
| 1186 | { |
| 1187 | void __iomem *phy_base = dram->phy; |
| 1188 | u32 dramtype = sdram_params->base.dramtype; |
| 1189 | struct sdram_head_info_index_v2 *index = |
| 1190 | (struct sdram_head_info_index_v2 *)common_info; |
| 1191 | struct dq_map_info *map_info; |
| 1192 | |
| 1193 | map_info = (struct dq_map_info *)((void *)common_info + |
| 1194 | index->dq_map_index.offset * 4); |
| 1195 | |
| 1196 | if (dramtype <= LPDDR4) |
| 1197 | writel((map_info->byte_map[dramtype / 4] >> |
| 1198 | ((dramtype % 4) * 8)) & 0xff, |
| 1199 | PHY_REG(phy_base, 0x4f)); |
| 1200 | |
| 1201 | return 0; |
| 1202 | } |
| 1203 | |
| 1204 | static void phy_cfg(struct dram_info *dram, |
| 1205 | struct rv1126_sdram_params *sdram_params) |
| 1206 | { |
| 1207 | struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; |
| 1208 | void __iomem *phy_base = dram->phy; |
| 1209 | u32 i, dq_map, tmp; |
| 1210 | u32 byte1 = 0, byte0 = 0; |
| 1211 | |
| 1212 | sdram_cmd_dq_path_remap(dram, sdram_params); |
| 1213 | |
| 1214 | phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0); |
| 1215 | for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) { |
| 1216 | writel(sdram_params->phy_regs.phy[i][1], |
| 1217 | phy_base + sdram_params->phy_regs.phy[i][0]); |
| 1218 | } |
| 1219 | |
| 1220 | clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5)); |
| 1221 | dq_map = readl(PHY_REG(phy_base, 0x4f)); |
| 1222 | for (i = 0; i < 4; i++) { |
| 1223 | if (((dq_map >> (i * 2)) & 0x3) == 0) |
| 1224 | byte0 = i; |
| 1225 | if (((dq_map >> (i * 2)) & 0x3) == 1) |
| 1226 | byte1 = i; |
| 1227 | } |
| 1228 | |
| 1229 | tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK); |
| 1230 | if (cap_info->bw == 2) |
| 1231 | tmp |= 0xf; |
| 1232 | else if (cap_info->bw == 1) |
| 1233 | tmp |= ((1 << byte0) | (1 << byte1)); |
| 1234 | else |
| 1235 | tmp |= (1 << byte0); |
| 1236 | |
| 1237 | writel(tmp, PHY_REG(phy_base, 0xf)); |
| 1238 | |
| 1239 | /* lpddr4 odt control by phy, enable cs0 odt */ |
| 1240 | if (sdram_params->base.dramtype == LPDDR4) |
| 1241 | clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4, |
| 1242 | (1 << 6) | (1 << 4)); |
| 1243 | /* for ca training ca vref choose range1 */ |
| 1244 | setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6)); |
| 1245 | setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6)); |
| 1246 | /* for wr training PHY_0x7c[5], choose range0 */ |
| 1247 | clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5)); |
| 1248 | } |
| 1249 | |
| 1250 | static int update_refresh_reg(struct dram_info *dram) |
| 1251 | { |
| 1252 | void __iomem *pctl_base = dram->pctl; |
| 1253 | u32 ret; |
| 1254 | |
| 1255 | ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1); |
| 1256 | writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3); |
| 1257 | |
| 1258 | return 0; |
| 1259 | } |
| 1260 | |
| 1261 | /* |
| 1262 | * rank = 1: cs0 |
| 1263 | * rank = 2: cs1 |
| 1264 | */ |
| 1265 | int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype) |
| 1266 | { |
| 1267 | u32 ret; |
| 1268 | u32 i, temp; |
| 1269 | u32 dqmap; |
| 1270 | |
| 1271 | void __iomem *pctl_base = dram->pctl; |
| 1272 | struct sdram_head_info_index_v2 *index = |
| 1273 | (struct sdram_head_info_index_v2 *)common_info; |
| 1274 | struct dq_map_info *map_info; |
| 1275 | |
| 1276 | map_info = (struct dq_map_info *)((void *)common_info + |
| 1277 | index->dq_map_index.offset * 4); |
| 1278 | |
| 1279 | if (dramtype == LPDDR2) |
| 1280 | dqmap = map_info->lp2_dq0_7_map; |
| 1281 | else |
| 1282 | dqmap = map_info->lp3_dq0_7_map; |
| 1283 | |
| 1284 | pctl_read_mr(pctl_base, rank, mr_num); |
| 1285 | |
| 1286 | ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff); |
| 1287 | |
| 1288 | if (dramtype != LPDDR4) { |
| 1289 | temp = 0; |
| 1290 | for (i = 0; i < 8; i++) { |
| 1291 | temp = temp | (((ret >> i) & 0x1) << |
| 1292 | ((dqmap >> (i * 4)) & 0xf)); |
| 1293 | } |
| 1294 | } else { |
| 1295 | temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff); |
| 1296 | } |
| 1297 | |
| 1298 | return temp; |
| 1299 | } |
| 1300 | |
| 1301 | /* before call this function autorefresh should be disabled */ |
| 1302 | void send_a_refresh(struct dram_info *dram) |
| 1303 | { |
| 1304 | void __iomem *pctl_base = dram->pctl; |
| 1305 | |
| 1306 | while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3) |
| 1307 | continue; |
| 1308 | writel(0x3, pctl_base + DDR_PCTL2_DBGCMD); |
| 1309 | } |
| 1310 | |
| 1311 | static void enter_sr(struct dram_info *dram, u32 en) |
| 1312 | { |
| 1313 | void __iomem *pctl_base = dram->pctl; |
| 1314 | |
| 1315 | if (en) { |
| 1316 | setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); |
| 1317 | while (1) { |
| 1318 | if (((readl(pctl_base + DDR_PCTL2_STAT) & |
| 1319 | PCTL2_SELFREF_TYPE_MASK) == |
| 1320 | PCTL2_SELFREF_TYPE_SR_NOT_AUTO) && |
| 1321 | ((readl(pctl_base + DDR_PCTL2_STAT) & |
| 1322 | PCTL2_OPERATING_MODE_MASK) == |
| 1323 | PCTL2_OPERATING_MODE_SR)) |
| 1324 | break; |
| 1325 | } |
| 1326 | } else { |
| 1327 | clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); |
| 1328 | while ((readl(pctl_base + DDR_PCTL2_STAT) & |
| 1329 | PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR) |
| 1330 | continue; |
| 1331 | } |
| 1332 | } |
| 1333 | |
| 1334 | void record_dq_prebit(struct dram_info *dram) |
| 1335 | { |
| 1336 | u32 group, i, tmp; |
| 1337 | void __iomem *phy_base = dram->phy; |
| 1338 | |
| 1339 | for (group = 0; group < 4; group++) { |
| 1340 | for (i = 0; i < ARRAY_SIZE(dq_sel); i++) { |
| 1341 | /* l_loop_invdelaysel */ |
| 1342 | writel(dq_sel[i][0], PHY_REG(phy_base, |
| 1343 | grp_addr[group] + 0x2c)); |
| 1344 | tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e)); |
| 1345 | writel(tmp, PHY_REG(phy_base, |
| 1346 | grp_addr[group] + dq_sel[i][1])); |
| 1347 | |
| 1348 | /* r_loop_invdelaysel */ |
| 1349 | writel(dq_sel[i][0], PHY_REG(phy_base, |
| 1350 | grp_addr[group] + 0x2d)); |
| 1351 | tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f)); |
| 1352 | writel(tmp, PHY_REG(phy_base, |
| 1353 | grp_addr[group] + dq_sel[i][2])); |
| 1354 | } |
| 1355 | } |
| 1356 | } |
| 1357 | |
| 1358 | static void update_dq_rx_prebit(struct dram_info *dram) |
| 1359 | { |
| 1360 | void __iomem *phy_base = dram->phy; |
| 1361 | |
| 1362 | clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4), |
| 1363 | BIT(4)); |
| 1364 | udelay(1); |
| 1365 | clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4)); |
| 1366 | } |
| 1367 | |
| 1368 | static void update_dq_tx_prebit(struct dram_info *dram) |
| 1369 | { |
| 1370 | void __iomem *phy_base = dram->phy; |
| 1371 | |
| 1372 | clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); |
| 1373 | setbits_le32(PHY_REG(phy_base, 0x2), BIT(3)); |
| 1374 | setbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); |
| 1375 | udelay(1); |
| 1376 | clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); |
| 1377 | } |
| 1378 | |
| 1379 | static void update_ca_prebit(struct dram_info *dram) |
| 1380 | { |
| 1381 | void __iomem *phy_base = dram->phy; |
| 1382 | |
| 1383 | clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2)); |
| 1384 | setbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); |
| 1385 | udelay(1); |
| 1386 | clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); |
| 1387 | } |
| 1388 | |
| 1389 | /* |
| 1390 | * dir: 0: de-skew = delta_* |
| 1391 | * 1: de-skew = reg val - delta_* |
| 1392 | * delta_dir: value for differential signal: clk/ |
| 1393 | * delta_sig: value for single signal: ca/cmd |
| 1394 | */ |
| 1395 | static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif, |
| 1396 | int delta_sig, u32 cs, u32 dramtype) |
| 1397 | { |
| 1398 | void __iomem *phy_base = dram->phy; |
| 1399 | u32 i, cs_en, tmp; |
| 1400 | u32 dfi_lp_stat = 0; |
| 1401 | |
| 1402 | if (cs == 0) |
| 1403 | cs_en = 1; |
| 1404 | else if (cs == 2) |
| 1405 | cs_en = 2; |
| 1406 | else |
| 1407 | cs_en = 3; |
| 1408 | |
| 1409 | if (dramtype == LPDDR4 && |
| 1410 | ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) { |
| 1411 | dfi_lp_stat = 1; |
| 1412 | setbits_le32(PHY_REG(phy_base, 0x60), BIT(5)); |
| 1413 | } |
| 1414 | enter_sr(dram, 1); |
| 1415 | |
| 1416 | for (i = 0; i < 0x20; i++) { |
| 1417 | if (dir == DESKEW_MDF_ABS_VAL) |
| 1418 | tmp = delta_sig; |
| 1419 | else |
| 1420 | tmp = readl(PHY_REG(phy_base, 0x150 + i)) + |
| 1421 | delta_sig; |
| 1422 | writel(tmp, PHY_REG(phy_base, 0x150 + i)); |
| 1423 | } |
| 1424 | |
| 1425 | if (dir == DESKEW_MDF_ABS_VAL) |
| 1426 | tmp = delta_dif; |
| 1427 | else |
| 1428 | tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) - |
| 1429 | delta_sig + delta_dif; |
| 1430 | writel(tmp, PHY_REG(phy_base, 0x150 + 0x17)); |
| 1431 | writel(tmp, PHY_REG(phy_base, 0x150 + 0x18)); |
| 1432 | if (dramtype == LPDDR4) { |
| 1433 | writel(tmp, PHY_REG(phy_base, 0x150 + 0x4)); |
| 1434 | writel(tmp, PHY_REG(phy_base, 0x150 + 0xa)); |
| 1435 | |
| 1436 | clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6); |
| 1437 | update_ca_prebit(dram); |
| 1438 | } |
| 1439 | enter_sr(dram, 0); |
| 1440 | |
| 1441 | if (dfi_lp_stat) |
| 1442 | clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5)); |
| 1443 | } |
| 1444 | |
| 1445 | static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank) |
| 1446 | { |
| 1447 | u32 i, j, offset = 0; |
| 1448 | u32 min = 0x3f; |
| 1449 | void __iomem *phy_base = dram->phy; |
| 1450 | u32 byte_en; |
| 1451 | |
| 1452 | if (signal == SKEW_TX_SIGNAL) |
| 1453 | offset = 8; |
| 1454 | |
| 1455 | if (signal == SKEW_CA_SIGNAL) { |
| 1456 | for (i = 0; i < 0x20; i++) |
| 1457 | min = MIN(min, readl(PHY_REG(phy_base, 0x150 + i))); |
| 1458 | } else { |
| 1459 | byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; |
| 1460 | for (j = offset; j < offset + rank * 4; j++) { |
| 1461 | if (!((byte_en >> (j % 4)) & 1)) |
| 1462 | continue; |
| 1463 | for (i = 0; i < 11; i++) |
| 1464 | min = MIN(min, |
| 1465 | readl(PHY_REG(phy_base, |
| 1466 | dqs_dq_skew_adr[j] + |
| 1467 | i))); |
| 1468 | } |
| 1469 | } |
| 1470 | |
| 1471 | return min; |
| 1472 | } |
| 1473 | |
| 1474 | static u32 low_power_update(struct dram_info *dram, u32 en) |
| 1475 | { |
| 1476 | void __iomem *pctl_base = dram->pctl; |
| 1477 | u32 lp_stat = 0; |
| 1478 | |
| 1479 | if (en) { |
| 1480 | setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf); |
| 1481 | } else { |
| 1482 | lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf; |
| 1483 | clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf); |
| 1484 | } |
| 1485 | |
| 1486 | return lp_stat; |
| 1487 | } |
| 1488 | |
| 1489 | /* |
| 1490 | * signal: |
| 1491 | * dir: 0: de-skew = delta_* |
| 1492 | * 1: de-skew = reg val - delta_* |
| 1493 | * delta_dir: value for differential signal: dqs |
| 1494 | * delta_sig: value for single signal: dq/dm |
| 1495 | */ |
| 1496 | static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir, |
| 1497 | int delta_dif, int delta_sig, u32 rank) |
| 1498 | { |
| 1499 | void __iomem *phy_base = dram->phy; |
| 1500 | u32 i, j, tmp, offset; |
| 1501 | u32 byte_en; |
| 1502 | |
| 1503 | byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; |
| 1504 | |
| 1505 | if (signal == SKEW_RX_SIGNAL) |
| 1506 | offset = 0; |
| 1507 | else |
| 1508 | offset = 8; |
| 1509 | |
| 1510 | for (j = offset; j < (offset + rank * 4); j++) { |
| 1511 | if (!((byte_en >> (j % 4)) & 1)) |
| 1512 | continue; |
| 1513 | for (i = 0; i < 0x9; i++) { |
| 1514 | if (dir == DESKEW_MDF_ABS_VAL) |
| 1515 | tmp = delta_sig; |
| 1516 | else |
| 1517 | tmp = delta_sig + readl(PHY_REG(phy_base, |
| 1518 | dqs_dq_skew_adr[j] + |
| 1519 | i)); |
| 1520 | writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i)); |
| 1521 | } |
| 1522 | if (dir == DESKEW_MDF_ABS_VAL) |
| 1523 | tmp = delta_dif; |
| 1524 | else |
| 1525 | tmp = delta_dif + readl(PHY_REG(phy_base, |
| 1526 | dqs_dq_skew_adr[j] + 9)); |
| 1527 | writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9)); |
| 1528 | writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa)); |
| 1529 | } |
| 1530 | if (signal == SKEW_RX_SIGNAL) |
| 1531 | update_dq_rx_prebit(dram); |
| 1532 | else |
| 1533 | update_dq_tx_prebit(dram); |
| 1534 | } |
| 1535 | |
| 1536 | static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype) |
| 1537 | { |
| 1538 | void __iomem *phy_base = dram->phy; |
| 1539 | u32 ret; |
| 1540 | u32 dis_auto_zq = 0; |
| 1541 | u32 odt_val_up, odt_val_dn; |
| 1542 | u32 i, j; |
| 1543 | |
| 1544 | odt_val_dn = readl(PHY_REG(phy_base, 0x110)); |
| 1545 | odt_val_up = readl(PHY_REG(phy_base, 0x111)); |
| 1546 | |
| 1547 | if (dramtype != LPDDR4) { |
| 1548 | for (i = 0; i < 4; i++) { |
| 1549 | j = 0x110 + i * 0x10; |
| 1550 | writel(PHY_DDR4_LPDDR3_RTT_294ohm, |
| 1551 | PHY_REG(phy_base, j)); |
| 1552 | writel(PHY_DDR4_LPDDR3_RTT_DISABLE, |
| 1553 | PHY_REG(phy_base, j + 0x1)); |
| 1554 | } |
| 1555 | } |
| 1556 | dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); |
| 1557 | /* use normal read mode for data training */ |
| 1558 | clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); |
| 1559 | |
| 1560 | if (dramtype == DDR4) |
| 1561 | setbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); |
| 1562 | |
| 1563 | /* choose training cs */ |
| 1564 | clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs)); |
| 1565 | /* enable gate training */ |
| 1566 | clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1); |
| 1567 | udelay(50); |
| 1568 | ret = readl(PHY_REG(phy_base, 0x91)); |
| 1569 | /* disable gate training */ |
| 1570 | clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0); |
| 1571 | clrbits_le32(PHY_REG(phy_base, 2), 0x30); |
| 1572 | pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); |
| 1573 | |
| 1574 | ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf); |
| 1575 | |
| 1576 | if (dramtype != LPDDR4) { |
| 1577 | for (i = 0; i < 4; i++) { |
| 1578 | j = 0x110 + i * 0x10; |
| 1579 | writel(odt_val_dn, PHY_REG(phy_base, j)); |
| 1580 | writel(odt_val_up, PHY_REG(phy_base, j + 0x1)); |
| 1581 | } |
| 1582 | } |
| 1583 | return ret; |
| 1584 | } |
| 1585 | |
| 1586 | static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype, |
| 1587 | u32 rank) |
| 1588 | { |
| 1589 | void __iomem *pctl_base = dram->pctl; |
| 1590 | void __iomem *phy_base = dram->phy; |
| 1591 | u32 dis_auto_zq = 0; |
| 1592 | u32 tmp; |
| 1593 | u32 cur_fsp; |
| 1594 | u32 timeout_us = 1000; |
| 1595 | |
| 1596 | dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); |
| 1597 | |
| 1598 | clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1); |
| 1599 | |
| 1600 | cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; |
| 1601 | tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) & |
| 1602 | 0xffff; |
| 1603 | writel(tmp & 0xff, PHY_REG(phy_base, 0x3)); |
| 1604 | |
| 1605 | /* disable another cs's output */ |
| 1606 | if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) |
| 1607 | pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12), |
| 1608 | dramtype); |
| 1609 | if (dramtype == DDR3 || dramtype == DDR4) |
| 1610 | writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); |
| 1611 | else |
| 1612 | writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); |
| 1613 | |
| 1614 | /* choose cs */ |
| 1615 | clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), |
| 1616 | ((0x2 >> cs) << 6) | (0 << 2)); |
| 1617 | /* enable write leveling */ |
| 1618 | clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), |
| 1619 | ((0x2 >> cs) << 6) | (1 << 2)); |
| 1620 | |
| 1621 | while (1) { |
| 1622 | if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) == |
| 1623 | (readl(PHY_REG(phy_base, 0xf)) & 0xf)) |
| 1624 | break; |
| 1625 | |
| 1626 | udelay(1); |
| 1627 | if (timeout_us-- == 0) { |
| 1628 | printascii("error: write leveling timeout\n"); |
| 1629 | while (1) |
| 1630 | ; |
| 1631 | } |
| 1632 | } |
| 1633 | |
| 1634 | /* disable write leveling */ |
| 1635 | clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), |
| 1636 | ((0x2 >> cs) << 6) | (0 << 2)); |
| 1637 | clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6); |
| 1638 | |
| 1639 | /* enable another cs's output */ |
| 1640 | if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) |
| 1641 | pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12), |
| 1642 | dramtype); |
| 1643 | |
| 1644 | pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); |
| 1645 | |
| 1646 | return 0; |
| 1647 | } |
| 1648 | |
| 1649 | char pattern[32] = { |
| 1650 | 0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa, |
| 1651 | 0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55, |
| 1652 | 0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55, |
| 1653 | 0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa |
| 1654 | }; |
| 1655 | |
| 1656 | static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype, |
| 1657 | u32 mhz) |
| 1658 | { |
| 1659 | void __iomem *pctl_base = dram->pctl; |
| 1660 | void __iomem *phy_base = dram->phy; |
| 1661 | u32 trefi_1x, trfc_1x; |
| 1662 | u32 dis_auto_zq = 0; |
| 1663 | u32 timeout_us = 1000; |
| 1664 | u32 dqs_default; |
| 1665 | u32 cur_fsp; |
| 1666 | u32 vref_inner; |
| 1667 | u32 i; |
| 1668 | struct sdram_head_info_index_v2 *index = |
| 1669 | (struct sdram_head_info_index_v2 *)common_info; |
| 1670 | struct dq_map_info *map_info; |
| 1671 | |
| 1672 | vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff; |
| 1673 | if (dramtype == DDR3 && vref_inner == 0x80) { |
| 1674 | for (i = 0; i < 4; i++) |
| 1675 | writel(vref_inner - 0xa, |
| 1676 | PHY_REG(phy_base, 0x118 + i * 0x10)); |
| 1677 | |
| 1678 | /* reg_rx_vref_value_update */ |
| 1679 | setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); |
| 1680 | clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); |
| 1681 | } |
| 1682 | |
| 1683 | map_info = (struct dq_map_info *)((void *)common_info + |
| 1684 | index->dq_map_index.offset * 4); |
| 1685 | /* only 1cs a time, 0:cs0 1 cs1 */ |
| 1686 | if (cs > 1) |
| 1687 | return -1; |
| 1688 | |
| 1689 | dqs_default = 0xf; |
| 1690 | dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); |
| 1691 | |
| 1692 | cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; |
| 1693 | /* config refresh timing */ |
| 1694 | trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + |
| 1695 | DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; |
| 1696 | trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + |
| 1697 | DDR_PCTL2_RFSHTMG) & 0x3ff; |
| 1698 | /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ |
| 1699 | clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); |
| 1700 | clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); |
| 1701 | /* reg_phy_trfc */ |
| 1702 | clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); |
| 1703 | /* reg_max_refi_cnt */ |
| 1704 | clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); |
| 1705 | |
| 1706 | /* choose training cs */ |
| 1707 | clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6); |
| 1708 | |
| 1709 | /* set dq map for ddr4 */ |
| 1710 | if (dramtype == DDR4) { |
| 1711 | setbits_le32(PHY_REG(phy_base, 0x70), BIT(7)); |
| 1712 | for (i = 0; i < 4; i++) { |
| 1713 | writel((map_info->ddr4_dq_map[cs * 2] >> |
| 1714 | ((i % 4) * 8)) & 0xff, |
| 1715 | PHY_REG(phy_base, 0x238 + i)); |
| 1716 | writel((map_info->ddr4_dq_map[cs * 2 + 1] >> |
| 1717 | ((i % 4) * 8)) & 0xff, |
| 1718 | PHY_REG(phy_base, 0x2b8 + i)); |
| 1719 | } |
| 1720 | } |
| 1721 | |
| 1722 | /* cha_l reg_l_rd_train_dqs_default[5:0] */ |
| 1723 | clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default); |
| 1724 | /* cha_h reg_h_rd_train_dqs_default[5:0] */ |
| 1725 | clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default); |
| 1726 | /* chb_l reg_l_rd_train_dqs_default[5:0] */ |
| 1727 | clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default); |
| 1728 | /* chb_h reg_h_rd_train_dqs_default[5:0] */ |
| 1729 | clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default); |
| 1730 | |
| 1731 | /* Choose the read train auto mode */ |
| 1732 | clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1); |
| 1733 | /* Enable the auto train of the read train */ |
| 1734 | clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3); |
| 1735 | |
| 1736 | /* Wait the train done. */ |
| 1737 | while (1) { |
| 1738 | if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1) |
| 1739 | break; |
| 1740 | |
| 1741 | udelay(1); |
| 1742 | if (timeout_us-- == 0) { |
| 1743 | printascii("error: read training timeout\n"); |
| 1744 | return -1; |
| 1745 | } |
| 1746 | } |
| 1747 | |
| 1748 | /* Check the read train state */ |
| 1749 | if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) || |
| 1750 | (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) { |
| 1751 | printascii("error: read training error\n"); |
| 1752 | return -1; |
| 1753 | } |
| 1754 | |
| 1755 | /* Exit the Read Training by setting */ |
| 1756 | clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1)); |
| 1757 | |
| 1758 | pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); |
| 1759 | |
| 1760 | if (dramtype == DDR3 && vref_inner == 0x80) { |
| 1761 | for (i = 0; i < 4; i++) |
| 1762 | writel(vref_inner, |
| 1763 | PHY_REG(phy_base, 0x118 + i * 0x10)); |
| 1764 | |
| 1765 | /* reg_rx_vref_value_update */ |
| 1766 | setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); |
| 1767 | clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); |
| 1768 | } |
| 1769 | |
| 1770 | return 0; |
| 1771 | } |
| 1772 | |
| 1773 | static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype, |
| 1774 | u32 mhz, u32 dst_fsp) |
| 1775 | { |
| 1776 | void __iomem *pctl_base = dram->pctl; |
| 1777 | void __iomem *phy_base = dram->phy; |
| 1778 | u32 trefi_1x, trfc_1x; |
| 1779 | u32 dis_auto_zq = 0; |
| 1780 | u32 timeout_us = 1000; |
| 1781 | u32 cur_fsp; |
| 1782 | u32 mr_tmp, cl, cwl, phy_fsp, offset = 0; |
| 1783 | |
| 1784 | if (dramtype == LPDDR3 && mhz <= 400) { |
| 1785 | phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3; |
| 1786 | offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3; |
| 1787 | cl = readl(PHY_REG(phy_base, offset)); |
| 1788 | cwl = readl(PHY_REG(phy_base, offset + 2)); |
| 1789 | |
| 1790 | clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8); |
| 1791 | clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4); |
| 1792 | pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype); |
| 1793 | } |
| 1794 | |
| 1795 | dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); |
| 1796 | |
| 1797 | /* PHY_0x7b[7:0] reg_train_col_addr[7:0] */ |
| 1798 | clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0); |
| 1799 | /* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */ |
| 1800 | clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2); |
| 1801 | /* PHY_0x7c[1:0] reg_train_col_addr[9:8] */ |
| 1802 | clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0); |
| 1803 | /* PHY_0x7d[7:0] reg_train_row_addr[7:0] */ |
| 1804 | clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0); |
| 1805 | /* PHY_0x7e[7:0] reg_train_row_addr[15:8] */ |
| 1806 | clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0); |
| 1807 | |
| 1808 | /* PHY_0x71[3] wrtrain_check_data_value_random_gen */ |
| 1809 | clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3)); |
| 1810 | |
| 1811 | /* config refresh timing */ |
| 1812 | cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; |
| 1813 | trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + |
| 1814 | DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; |
| 1815 | trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + |
| 1816 | DDR_PCTL2_RFSHTMG) & 0x3ff; |
| 1817 | /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ |
| 1818 | clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); |
| 1819 | clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); |
| 1820 | /* reg_phy_trfc */ |
| 1821 | clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); |
| 1822 | /* reg_max_refi_cnt */ |
| 1823 | clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); |
| 1824 | |
| 1825 | /* choose training cs */ |
| 1826 | clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6); |
| 1827 | |
| 1828 | /* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */ |
| 1829 | /* 0: Use the write-leveling value. */ |
| 1830 | /* 1: use reg0x233 0x237 0x2b3 0x2b7 */ |
| 1831 | setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4)); |
| 1832 | |
| 1833 | /* PHY_0x7a [0] reg_dq_wr_train_auto */ |
| 1834 | setbits_le32(PHY_REG(phy_base, 0x7a), 0x1); |
| 1835 | |
| 1836 | /* PHY_0x7a [1] reg_dq_wr_train_en */ |
| 1837 | setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); |
| 1838 | |
| 1839 | send_a_refresh(dram); |
| 1840 | |
| 1841 | while (1) { |
| 1842 | if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1) |
| 1843 | break; |
| 1844 | |
| 1845 | udelay(1); |
| 1846 | if (timeout_us-- == 0) { |
| 1847 | printascii("error: write training timeout\n"); |
| 1848 | while (1) |
| 1849 | ; |
| 1850 | } |
| 1851 | } |
| 1852 | |
| 1853 | /* Check the write train state */ |
| 1854 | if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) { |
| 1855 | printascii("error: write training error\n"); |
| 1856 | return -1; |
| 1857 | } |
| 1858 | |
| 1859 | /* PHY_0x7a [1] reg_dq_wr_train_en */ |
| 1860 | clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); |
| 1861 | |
| 1862 | pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); |
| 1863 | |
| 1864 | /* save LPDDR4 write vref to fsp_param for dfs */ |
| 1865 | if (dramtype == LPDDR4) { |
| 1866 | fsp_param[dst_fsp].vref_dq[cs] = |
| 1867 | ((readl(PHY_REG(phy_base, 0x384)) & 0x3f) + |
| 1868 | (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2; |
| 1869 | /* add range info */ |
| 1870 | fsp_param[dst_fsp].vref_dq[cs] |= |
| 1871 | ((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1); |
| 1872 | } |
| 1873 | |
| 1874 | if (dramtype == LPDDR3 && mhz <= 400) { |
| 1875 | clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl); |
| 1876 | clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl); |
| 1877 | mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + |
| 1878 | DDR_PCTL2_INIT3); |
| 1879 | pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, |
| 1880 | dramtype); |
| 1881 | } |
| 1882 | |
| 1883 | return 0; |
| 1884 | } |
| 1885 | |
| 1886 | static int data_training(struct dram_info *dram, u32 cs, |
| 1887 | struct rv1126_sdram_params *sdram_params, u32 dst_fsp, |
| 1888 | u32 training_flag) |
| 1889 | { |
| 1890 | u32 ret = 0; |
| 1891 | |
| 1892 | if (training_flag == FULL_TRAINING) |
| 1893 | training_flag = READ_GATE_TRAINING | WRITE_LEVELING | |
| 1894 | WRITE_TRAINING | READ_TRAINING; |
| 1895 | |
| 1896 | if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) { |
| 1897 | ret = data_training_wl(dram, cs, |
| 1898 | sdram_params->base.dramtype, |
| 1899 | sdram_params->ch.cap_info.rank); |
| 1900 | if (ret != 0) |
| 1901 | goto out; |
| 1902 | } |
| 1903 | |
| 1904 | if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) { |
| 1905 | ret = data_training_rg(dram, cs, |
| 1906 | sdram_params->base.dramtype); |
| 1907 | if (ret != 0) |
| 1908 | goto out; |
| 1909 | } |
| 1910 | |
| 1911 | if ((training_flag & READ_TRAINING) == READ_TRAINING) { |
| 1912 | ret = data_training_rd(dram, cs, |
| 1913 | sdram_params->base.dramtype, |
| 1914 | sdram_params->base.ddr_freq); |
| 1915 | if (ret != 0) |
| 1916 | goto out; |
| 1917 | } |
| 1918 | |
| 1919 | if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) { |
| 1920 | ret = data_training_wr(dram, cs, |
| 1921 | sdram_params->base.dramtype, |
| 1922 | sdram_params->base.ddr_freq, dst_fsp); |
| 1923 | if (ret != 0) |
| 1924 | goto out; |
| 1925 | } |
| 1926 | |
| 1927 | out: |
| 1928 | return ret; |
| 1929 | } |
| 1930 | |
| 1931 | static int get_wrlvl_val(struct dram_info *dram, |
| 1932 | struct rv1126_sdram_params *sdram_params) |
| 1933 | { |
| 1934 | int i, j, clk_skew; |
| 1935 | void __iomem *phy_base = dram->phy; |
| 1936 | u32 lp_stat; |
| 1937 | int ret; |
| 1938 | |
| 1939 | lp_stat = low_power_update(dram, 0); |
| 1940 | |
| 1941 | clk_skew = 0x1f; |
| 1942 | modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3, |
| 1943 | sdram_params->base.dramtype); |
| 1944 | |
| 1945 | ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING); |
| 1946 | if (sdram_params->ch.cap_info.rank == 2) |
| 1947 | ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING); |
| 1948 | |
| 1949 | for (j = 0; j < 2; j++) |
| 1950 | for (i = 0; i < 4; i++) |
| 1951 | wrlvl_result[j][i] = |
| 1952 | (readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) - |
| 1953 | clk_skew; |
| 1954 | |
| 1955 | low_power_update(dram, lp_stat); |
| 1956 | |
| 1957 | return ret; |
| 1958 | } |
| 1959 | |
| 1960 | #if defined(CONFIG_CMD_DDR_TEST_TOOL) |
| 1961 | static void init_rw_trn_result_struct(struct rw_trn_result *result, |
| 1962 | void __iomem *phy_base, u8 cs_num) |
| 1963 | { |
| 1964 | int i; |
| 1965 | |
| 1966 | result->cs_num = cs_num; |
| 1967 | result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) & |
| 1968 | PHY_DQ_WIDTH_MASK; |
| 1969 | for (i = 0; i < FSP_NUM; i++) |
| 1970 | result->fsp_mhz[i] = 0; |
| 1971 | } |
| 1972 | |
| 1973 | static void save_rw_trn_min_max(void __iomem *phy_base, |
| 1974 | struct cs_rw_trn_result *rd_result, |
| 1975 | struct cs_rw_trn_result *wr_result, |
| 1976 | u8 byte_en) |
| 1977 | { |
| 1978 | u16 phy_ofs; |
| 1979 | u8 dqs; |
| 1980 | u8 dq; |
| 1981 | |
| 1982 | for (dqs = 0; dqs < BYTE_NUM; dqs++) { |
| 1983 | if ((byte_en & BIT(dqs)) == 0) |
| 1984 | continue; |
| 1985 | |
| 1986 | /* Channel A or B (low or high 16 bit) */ |
| 1987 | phy_ofs = dqs < 2 ? 0x230 : 0x2b0; |
| 1988 | /* low or high 8 bit */ |
| 1989 | phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9; |
| 1990 | for (dq = 0; dq < 8; dq++) { |
| 1991 | rd_result->dqs[dqs].dq_min[dq] = |
| 1992 | readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq)); |
| 1993 | rd_result->dqs[dqs].dq_max[dq] = |
| 1994 | readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq)); |
| 1995 | wr_result->dqs[dqs].dq_min[dq] = |
| 1996 | readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq)); |
| 1997 | wr_result->dqs[dqs].dq_max[dq] = |
| 1998 | readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq)); |
| 1999 | } |
| 2000 | } |
| 2001 | } |
| 2002 | |
| 2003 | static void save_rw_trn_deskew(void __iomem *phy_base, |
| 2004 | struct fsp_rw_trn_result *result, u8 cs_num, |
| 2005 | int min_val, bool rw) |
| 2006 | { |
| 2007 | u16 phy_ofs; |
| 2008 | u8 cs; |
| 2009 | u8 dq; |
| 2010 | |
| 2011 | result->min_val = min_val; |
| 2012 | |
| 2013 | for (cs = 0; cs < cs_num; cs++) { |
| 2014 | phy_ofs = cs == 0 ? 0x170 : 0x1a0; |
| 2015 | phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17; |
| 2016 | for (dq = 0; dq < 8; dq++) { |
| 2017 | result->cs[cs].dqs[0].dq_deskew[dq] = |
| 2018 | readb(PHY_REG(phy_base, phy_ofs + dq)); |
| 2019 | result->cs[cs].dqs[1].dq_deskew[dq] = |
| 2020 | readb(PHY_REG(phy_base, phy_ofs + 0xb + dq)); |
| 2021 | result->cs[cs].dqs[2].dq_deskew[dq] = |
| 2022 | readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq)); |
| 2023 | result->cs[cs].dqs[3].dq_deskew[dq] = |
| 2024 | readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq)); |
| 2025 | } |
| 2026 | |
| 2027 | result->cs[cs].dqs[0].dqs_deskew = |
| 2028 | readb(PHY_REG(phy_base, phy_ofs + 0x8)); |
| 2029 | result->cs[cs].dqs[1].dqs_deskew = |
| 2030 | readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8)); |
| 2031 | result->cs[cs].dqs[2].dqs_deskew = |
| 2032 | readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8)); |
| 2033 | result->cs[cs].dqs[3].dqs_deskew = |
| 2034 | readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8)); |
| 2035 | } |
| 2036 | } |
| 2037 | |
| 2038 | static void save_rw_trn_result_to_ddr(struct rw_trn_result *result) |
| 2039 | { |
| 2040 | result->flag = DDR_DQ_EYE_FLAG; |
| 2041 | memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result)); |
| 2042 | } |
| 2043 | #endif |
| 2044 | |
| 2045 | static int high_freq_training(struct dram_info *dram, |
| 2046 | struct rv1126_sdram_params *sdram_params, |
| 2047 | u32 fsp) |
| 2048 | { |
| 2049 | u32 i, j; |
| 2050 | void __iomem *phy_base = dram->phy; |
| 2051 | u32 dramtype = sdram_params->base.dramtype; |
| 2052 | int min_val; |
| 2053 | int dqs_skew, clk_skew, ca_skew; |
| 2054 | u8 byte_en; |
| 2055 | int ret; |
| 2056 | |
| 2057 | byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK; |
| 2058 | dqs_skew = 0; |
| 2059 | for (j = 0; j < sdram_params->ch.cap_info.rank; j++) { |
| 2060 | for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) { |
| 2061 | if ((byte_en & BIT(i)) != 0) |
| 2062 | dqs_skew += wrlvl_result[j][i]; |
| 2063 | } |
| 2064 | } |
| 2065 | dqs_skew = dqs_skew / |
| 2066 | (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw)); |
| 2067 | |
| 2068 | clk_skew = 0x20 - dqs_skew; |
| 2069 | dqs_skew = 0x20; |
| 2070 | |
| 2071 | if (dramtype == LPDDR4) { |
| 2072 | min_val = 0xff; |
| 2073 | for (j = 0; j < sdram_params->ch.cap_info.rank; j++) |
| 2074 | for (i = 0; i < sdram_params->ch.cap_info.bw; i++) |
| 2075 | min_val = MIN(wrlvl_result[j][i], min_val); |
| 2076 | |
| 2077 | if (min_val < 0) { |
| 2078 | clk_skew = -min_val; |
| 2079 | ca_skew = -min_val; |
| 2080 | } else { |
| 2081 | clk_skew = 0; |
| 2082 | ca_skew = 0; |
| 2083 | } |
| 2084 | } else if (dramtype == LPDDR3) { |
| 2085 | ca_skew = clk_skew - 4; |
| 2086 | } else { |
| 2087 | ca_skew = clk_skew; |
| 2088 | } |
| 2089 | modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3, |
| 2090 | dramtype); |
| 2091 | |
| 2092 | writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233)); |
| 2093 | writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237)); |
| 2094 | writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); |
| 2095 | writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); |
| 2096 | ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING | |
| 2097 | READ_TRAINING | WRITE_TRAINING); |
| 2098 | #if defined(CONFIG_CMD_DDR_TEST_TOOL) |
| 2099 | rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq; |
| 2100 | save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0], |
| 2101 | &rw_trn_result.wr_fsp[fsp].cs[0], |
| 2102 | rw_trn_result.byte_en); |
| 2103 | #endif |
| 2104 | if (sdram_params->ch.cap_info.rank == 2) { |
| 2105 | writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233)); |
| 2106 | writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237)); |
| 2107 | writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); |
| 2108 | writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); |
| 2109 | ret |= data_training(dram, 1, sdram_params, fsp, |
| 2110 | READ_GATE_TRAINING | READ_TRAINING | |
| 2111 | WRITE_TRAINING); |
| 2112 | #if defined(CONFIG_CMD_DDR_TEST_TOOL) |
| 2113 | save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1], |
| 2114 | &rw_trn_result.wr_fsp[fsp].cs[1], |
| 2115 | rw_trn_result.byte_en); |
| 2116 | #endif |
| 2117 | } |
| 2118 | if (ret) |
| 2119 | goto out; |
| 2120 | |
| 2121 | record_dq_prebit(dram); |
| 2122 | |
| 2123 | min_val = get_min_value(dram, SKEW_RX_SIGNAL, |
| 2124 | sdram_params->ch.cap_info.rank) * -1; |
| 2125 | modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL, |
| 2126 | min_val, min_val, sdram_params->ch.cap_info.rank); |
| 2127 | #if defined(CONFIG_CMD_DDR_TEST_TOOL) |
| 2128 | save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp], |
| 2129 | rw_trn_result.cs_num, (u8)(min_val * (-1)), |
| 2130 | SKEW_RX_SIGNAL); |
| 2131 | #endif |
| 2132 | |
| 2133 | min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL, |
| 2134 | sdram_params->ch.cap_info.rank), |
| 2135 | get_min_value(dram, SKEW_CA_SIGNAL, |
| 2136 | sdram_params->ch.cap_info.rank)) * -1; |
| 2137 | |
| 2138 | /* clk = 0, rx all skew -7, tx - min_value */ |
| 2139 | modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3, |
| 2140 | dramtype); |
| 2141 | |
| 2142 | modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL, |
| 2143 | min_val, min_val, sdram_params->ch.cap_info.rank); |
| 2144 | #if defined(CONFIG_CMD_DDR_TEST_TOOL) |
| 2145 | save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp], |
| 2146 | rw_trn_result.cs_num, (u8)(min_val * (-1)), |
| 2147 | SKEW_TX_SIGNAL); |
| 2148 | #endif |
| 2149 | |
| 2150 | ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING); |
| 2151 | if (sdram_params->ch.cap_info.rank == 2) |
| 2152 | ret |= data_training(dram, 1, sdram_params, 0, |
| 2153 | READ_GATE_TRAINING); |
| 2154 | out: |
| 2155 | return ret; |
| 2156 | } |
| 2157 | |
| 2158 | static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig) |
| 2159 | { |
| 2160 | writel(ddrconfig, &dram->msch->deviceconf); |
| 2161 | clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0); |
| 2162 | } |
| 2163 | |
| 2164 | static void update_noc_timing(struct dram_info *dram, |
| 2165 | struct rv1126_sdram_params *sdram_params) |
| 2166 | { |
| 2167 | void __iomem *pctl_base = dram->pctl; |
| 2168 | u32 bw, bl; |
| 2169 | |
| 2170 | bw = 8 << sdram_params->ch.cap_info.bw; |
| 2171 | bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2; |
| 2172 | |
| 2173 | /* update the noc timing related to data bus width */ |
| 2174 | if ((bw / 8 * bl) <= 16) |
| 2175 | sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0; |
| 2176 | else if ((bw / 8 * bl) == 32) |
| 2177 | sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1; |
| 2178 | else if ((bw / 8 * bl) == 64) |
| 2179 | sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2; |
| 2180 | else |
| 2181 | sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3; |
| 2182 | |
| 2183 | sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty = |
| 2184 | (bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4; |
| 2185 | |
| 2186 | if (sdram_params->base.dramtype == LPDDR4) { |
| 2187 | sdram_params->ch.noc_timings.ddrmode.b.mwrsize = |
| 2188 | (bw == 16) ? 0x1 : 0x2; |
| 2189 | sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr = |
| 2190 | 3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty; |
| 2191 | } |
| 2192 | |
| 2193 | writel(sdram_params->ch.noc_timings.ddrtiminga0.d32, |
| 2194 | &dram->msch->ddrtiminga0); |
| 2195 | writel(sdram_params->ch.noc_timings.ddrtimingb0.d32, |
| 2196 | &dram->msch->ddrtimingb0); |
| 2197 | writel(sdram_params->ch.noc_timings.ddrtimingc0.d32, |
| 2198 | &dram->msch->ddrtimingc0); |
| 2199 | writel(sdram_params->ch.noc_timings.devtodev0.d32, |
| 2200 | &dram->msch->devtodev0); |
| 2201 | writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode); |
| 2202 | writel(sdram_params->ch.noc_timings.ddr4timing.d32, |
| 2203 | &dram->msch->ddr4timing); |
| 2204 | } |
| 2205 | |
| 2206 | static int split_setup(struct dram_info *dram, |
| 2207 | struct rv1126_sdram_params *sdram_params) |
| 2208 | { |
| 2209 | struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; |
| 2210 | u32 dramtype = sdram_params->base.dramtype; |
| 2211 | u32 split_size, split_mode; |
| 2212 | u64 cs_cap[2], cap; |
| 2213 | |
| 2214 | cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype); |
| 2215 | cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype); |
| 2216 | /* only support the larger cap is in low 16bit */ |
| 2217 | if (cap_info->cs0_high16bit_row < cap_info->cs0_row) { |
| 2218 | cap = cs_cap[0] / (1 << (cap_info->cs0_row - |
| 2219 | cap_info->cs0_high16bit_row)); |
| 2220 | } else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) && |
| 2221 | (cap_info->rank == 2)) { |
| 2222 | if (!cap_info->cs1_high16bit_row) |
| 2223 | cap = cs_cap[0]; |
| 2224 | else |
| 2225 | cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row - |
| 2226 | cap_info->cs1_high16bit_row)); |
| 2227 | } else { |
| 2228 | goto out; |
| 2229 | } |
| 2230 | split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK; |
| 2231 | if (cap_info->bw == 2) |
| 2232 | split_mode = SPLIT_MODE_32_L16_VALID; |
| 2233 | else |
| 2234 | split_mode = SPLIT_MODE_16_L8_VALID; |
| 2235 | |
| 2236 | rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con, |
| 2237 | (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) | |
| 2238 | (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) | |
| 2239 | (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET), |
| 2240 | (split_mode << SPLIT_MODE_OFFSET) | |
| 2241 | (0x0 << SPLIT_BYPASS_OFFSET) | |
| 2242 | (split_size << SPLIT_SIZE_OFFSET)); |
| 2243 | |
| 2244 | rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2, |
| 2245 | MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT, |
| 2246 | 0x0 << MSCH_AXI_BYPASS_ALL_SHIFT); |
| 2247 | |
| 2248 | out: |
| 2249 | return 0; |
| 2250 | } |
| 2251 | |
| 2252 | static void split_bypass(struct dram_info *dram) |
| 2253 | { |
| 2254 | if ((readl(&dram->ddrgrf->grf_ddrsplit_con) & |
| 2255 | (1 << SPLIT_BYPASS_OFFSET)) != 0) |
| 2256 | return; |
| 2257 | |
| 2258 | /* bypass split */ |
| 2259 | rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con, |
| 2260 | (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) | |
| 2261 | (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET), |
| 2262 | (0x1 << SPLIT_BYPASS_OFFSET) | |
| 2263 | (0x0 << SPLIT_SIZE_OFFSET)); |
| 2264 | } |
| 2265 | |
| 2266 | static void dram_all_config(struct dram_info *dram, |
| 2267 | struct rv1126_sdram_params *sdram_params) |
| 2268 | { |
| 2269 | struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; |
| 2270 | u32 dram_type = sdram_params->base.dramtype; |
| 2271 | void __iomem *pctl_base = dram->pctl; |
| 2272 | u32 sys_reg2 = 0; |
| 2273 | u32 sys_reg3 = 0; |
| 2274 | u64 cs_cap[2]; |
| 2275 | u32 cs_pst; |
| 2276 | |
| 2277 | set_ddrconfig(dram, cap_info->ddrconfig); |
| 2278 | sdram_org_config(cap_info, &sdram_params->base, &sys_reg2, |
| 2279 | &sys_reg3, 0); |
| 2280 | writel(sys_reg2, &dram->pmugrf->os_reg[2]); |
| 2281 | writel(sys_reg3, &dram->pmugrf->os_reg[3]); |
| 2282 | |
| 2283 | cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); |
| 2284 | cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); |
| 2285 | |
| 2286 | if (cap_info->rank == 2) { |
| 2287 | cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + |
| 2288 | 6 + 2; |
| 2289 | if (cs_pst > 28) |
| 2290 | cs_cap[0] = 1llu << cs_pst; |
| 2291 | } |
| 2292 | |
| 2293 | writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) | |
| 2294 | (((cs_cap[0] >> 20) / 64) & 0xff), |
| 2295 | &dram->msch->devicesize); |
| 2296 | update_noc_timing(dram, sdram_params); |
| 2297 | } |
| 2298 | |
| 2299 | static void enable_low_power(struct dram_info *dram, |
| 2300 | struct rv1126_sdram_params *sdram_params) |
| 2301 | { |
| 2302 | void __iomem *pctl_base = dram->pctl; |
| 2303 | u32 grf_lp_con; |
| 2304 | |
| 2305 | writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]); |
| 2306 | |
| 2307 | if (sdram_params->base.dramtype == DDR4) |
| 2308 | grf_lp_con = (0x7 << 16) | (1 << 1); |
| 2309 | else if (sdram_params->base.dramtype == DDR3) |
| 2310 | grf_lp_con = (0x7 << 16) | (1 << 0); |
| 2311 | else |
| 2312 | grf_lp_con = (0x7 << 16) | (1 << 2); |
| 2313 | |
| 2314 | /* en lpckdis_en */ |
| 2315 | grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9); |
| 2316 | writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con); |
| 2317 | |
| 2318 | /* enable sr, pd */ |
| 2319 | if (dram->pd_idle == 0) |
| 2320 | clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); |
| 2321 | else |
| 2322 | setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); |
| 2323 | if (dram->sr_idle == 0) |
| 2324 | clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); |
| 2325 | else |
| 2326 | setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); |
| 2327 | setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3)); |
| 2328 | } |
| 2329 | |
| 2330 | static void print_ddr_info(struct rv1126_sdram_params *sdram_params) |
| 2331 | { |
| 2332 | u32 split; |
| 2333 | |
| 2334 | if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & |
| 2335 | (1 << SPLIT_BYPASS_OFFSET)) != 0) |
| 2336 | split = 0; |
| 2337 | else |
| 2338 | split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & |
| 2339 | SPLIT_SIZE_MASK; |
| 2340 | |
| 2341 | sdram_print_ddr_info(&sdram_params->ch.cap_info, |
| 2342 | &sdram_params->base, split); |
| 2343 | } |
| 2344 | |
| 2345 | static int sdram_init_(struct dram_info *dram, |
| 2346 | struct rv1126_sdram_params *sdram_params, u32 post_init) |
| 2347 | { |
| 2348 | void __iomem *pctl_base = dram->pctl; |
| 2349 | void __iomem *phy_base = dram->phy; |
| 2350 | u32 ddr4_vref; |
| 2351 | u32 mr_tmp; |
| 2352 | |
| 2353 | rkclk_configure_ddr(dram, sdram_params); |
| 2354 | |
| 2355 | rkclk_ddr_reset(dram, 1, 1, 1, 1); |
| 2356 | udelay(10); |
| 2357 | |
| 2358 | rkclk_ddr_reset(dram, 1, 1, 1, 0); |
| 2359 | phy_cfg(dram, sdram_params); |
| 2360 | |
| 2361 | rkclk_ddr_reset(dram, 1, 1, 0, 0); |
| 2362 | phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1); |
| 2363 | |
| 2364 | rkclk_ddr_reset(dram, 1, 0, 0, 0); |
| 2365 | pctl_cfg(dram->pctl, &sdram_params->pctl_regs, |
| 2366 | dram->sr_idle, dram->pd_idle); |
| 2367 | |
| 2368 | if (sdram_params->ch.cap_info.bw == 2) { |
| 2369 | /* 32bit interface use pageclose */ |
| 2370 | setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2); |
| 2371 | /* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */ |
| 2372 | clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0); |
| 2373 | } else { |
| 2374 | clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2); |
| 2375 | } |
| 2376 | |
| 2377 | #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT |
| 2378 | u32 tmp, trefi; |
| 2379 | |
| 2380 | tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG); |
| 2381 | trefi = (tmp >> 16) & 0xfff; |
| 2382 | writel((tmp & 0xf000ffff) | (trefi / 2) << 16, |
| 2383 | pctl_base + DDR_PCTL2_RFSHTMG); |
| 2384 | #endif |
| 2385 | |
| 2386 | /* set frequency_mode */ |
| 2387 | setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); |
| 2388 | /* set target_frequency to Frequency 0 */ |
| 2389 | clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0); |
| 2390 | |
| 2391 | set_ds_odt(dram, sdram_params, 0); |
| 2392 | sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params); |
| 2393 | set_ctl_address_map(dram, sdram_params); |
| 2394 | |
| 2395 | setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4)); |
| 2396 | |
| 2397 | rkclk_ddr_reset(dram, 0, 0, 0, 0); |
| 2398 | |
| 2399 | while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) |
| 2400 | continue; |
| 2401 | |
| 2402 | if (sdram_params->base.dramtype == LPDDR3) { |
| 2403 | pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3); |
| 2404 | } else if (sdram_params->base.dramtype == LPDDR4) { |
| 2405 | mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6); |
| 2406 | /* MR11 */ |
| 2407 | pctl_write_mr(dram->pctl, 3, 11, |
| 2408 | mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, |
| 2409 | LPDDR4); |
| 2410 | /* MR12 */ |
| 2411 | pctl_write_mr(dram->pctl, 3, 12, |
| 2412 | mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, |
| 2413 | LPDDR4); |
| 2414 | |
| 2415 | mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7); |
| 2416 | /* MR22 */ |
| 2417 | pctl_write_mr(dram->pctl, 3, 22, |
| 2418 | mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, |
| 2419 | LPDDR4); |
| 2420 | } |
| 2421 | |
| 2422 | if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) { |
| 2423 | if (post_init != 0) |
| 2424 | printascii("DTT cs0 error\n"); |
| 2425 | return -1; |
| 2426 | } |
| 2427 | |
| 2428 | if (sdram_params->base.dramtype == LPDDR4) { |
| 2429 | mr_tmp = read_mr(dram, 1, 14, LPDDR4); |
| 2430 | |
| 2431 | if (mr_tmp != 0x4d) |
| 2432 | return -1; |
| 2433 | } |
| 2434 | |
| 2435 | if (sdram_params->base.dramtype == LPDDR4) { |
| 2436 | mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7); |
| 2437 | /* MR14 */ |
| 2438 | pctl_write_mr(dram->pctl, 3, 14, |
| 2439 | mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, |
| 2440 | LPDDR4); |
| 2441 | } |
| 2442 | if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) { |
| 2443 | if (data_training(dram, 1, sdram_params, 0, |
| 2444 | READ_GATE_TRAINING) != 0) { |
| 2445 | printascii("DTT cs1 error\n"); |
| 2446 | return -1; |
| 2447 | } |
| 2448 | } |
| 2449 | |
| 2450 | if (sdram_params->base.dramtype == DDR4) { |
| 2451 | ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39; |
| 2452 | pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref, |
| 2453 | sdram_params->base.dramtype); |
| 2454 | } |
| 2455 | |
| 2456 | dram_all_config(dram, sdram_params); |
| 2457 | enable_low_power(dram, sdram_params); |
| 2458 | |
| 2459 | return 0; |
| 2460 | } |
| 2461 | |
| 2462 | static u64 dram_detect_cap(struct dram_info *dram, |
| 2463 | struct rv1126_sdram_params *sdram_params, |
| 2464 | unsigned char channel) |
| 2465 | { |
| 2466 | struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; |
| 2467 | void __iomem *pctl_base = dram->pctl; |
| 2468 | void __iomem *phy_base = dram->phy; |
| 2469 | u32 mr8; |
| 2470 | |
| 2471 | u32 bktmp; |
| 2472 | u32 coltmp; |
| 2473 | u32 rowtmp; |
| 2474 | u32 cs; |
| 2475 | u32 dram_type = sdram_params->base.dramtype; |
| 2476 | u32 pwrctl; |
| 2477 | u32 i, dq_map; |
| 2478 | u32 byte1 = 0, byte0 = 0; |
| 2479 | u32 tmp, byte; |
| 2480 | struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info; |
| 2481 | struct dq_map_info *map_info = (struct dq_map_info *) |
| 2482 | ((void *)common_info + index->dq_map_index.offset * 4); |
| 2483 | |
| 2484 | cap_info->bw = dram_type == DDR3 ? 0 : 1; |
| 2485 | if (dram_type != LPDDR4) { |
| 2486 | if (dram_type != DDR4) { |
| 2487 | coltmp = 12; |
| 2488 | bktmp = 3; |
| 2489 | if (dram_type == LPDDR2) |
| 2490 | rowtmp = 15; |
| 2491 | else |
| 2492 | rowtmp = 16; |
| 2493 | |
| 2494 | if (sdram_detect_col(cap_info, coltmp) != 0) |
| 2495 | goto cap_err; |
| 2496 | |
| 2497 | sdram_detect_bank(cap_info, coltmp, bktmp); |
| 2498 | if (dram_type != LPDDR3) |
| 2499 | sdram_detect_dbw(cap_info, dram_type); |
| 2500 | } else { |
| 2501 | coltmp = 10; |
| 2502 | bktmp = 4; |
| 2503 | rowtmp = 17; |
| 2504 | |
| 2505 | cap_info->col = 10; |
| 2506 | cap_info->bk = 2; |
| 2507 | sdram_detect_bg(cap_info, coltmp); |
| 2508 | } |
| 2509 | |
| 2510 | if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0) |
| 2511 | goto cap_err; |
| 2512 | |
| 2513 | sdram_detect_row_3_4(cap_info, coltmp, bktmp); |
| 2514 | } else { |
| 2515 | cap_info->col = 10; |
| 2516 | cap_info->bk = 3; |
| 2517 | mr8 = read_mr(dram, 1, 8, dram_type); |
| 2518 | cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0; |
| 2519 | mr8 = (mr8 >> 2) & 0xf; |
| 2520 | if (mr8 >= 0 && mr8 <= 6) { |
| 2521 | cap_info->cs0_row = 14 + (mr8 + 1) / 2; |
| 2522 | } else if (mr8 == 0xc) { |
| 2523 | cap_info->cs0_row = 13; |
| 2524 | } else { |
| 2525 | printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n"); |
| 2526 | goto cap_err; |
| 2527 | } |
| 2528 | if (cap_info->dbw == 0) |
| 2529 | cap_info->cs0_row++; |
| 2530 | cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0; |
| 2531 | if (cap_info->cs0_row >= 17) { |
| 2532 | printascii("Cap ERR: "); |
| 2533 | printascii("RV1126 LPDDR4/X cannot support row >= 17\n"); |
| 2534 | goto cap_err; |
| 2535 | // cap_info->cs0_row = 16; |
| 2536 | // cap_info->row_3_4 = 0; |
| 2537 | } |
| 2538 | } |
| 2539 | |
| 2540 | pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL); |
| 2541 | writel(0, pctl_base + DDR_PCTL2_PWRCTL); |
| 2542 | |
| 2543 | if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0) |
| 2544 | cs = 1; |
| 2545 | else |
| 2546 | cs = 0; |
| 2547 | cap_info->rank = cs + 1; |
| 2548 | |
| 2549 | setbits_le32(PHY_REG(phy_base, 0xf), 0xf); |
| 2550 | |
| 2551 | tmp = data_training_rg(dram, 0, dram_type) & 0xf; |
| 2552 | |
| 2553 | if (tmp == 0) { |
| 2554 | cap_info->bw = 2; |
| 2555 | } else { |
| 2556 | if (dram_type == DDR3 || dram_type == DDR4) { |
| 2557 | dq_map = 0; |
| 2558 | byte = 0; |
| 2559 | for (i = 0; i < 4; i++) { |
| 2560 | if ((tmp & BIT(i)) == 0) { |
| 2561 | dq_map |= byte << (i * 2); |
| 2562 | byte++; |
| 2563 | } |
| 2564 | } |
| 2565 | cap_info->bw = byte / 2; |
| 2566 | for (i = 0; i < 4; i++) { |
| 2567 | if ((tmp & BIT(i)) != 0) { |
| 2568 | dq_map |= byte << (i * 2); |
| 2569 | byte++; |
| 2570 | } |
| 2571 | } |
| 2572 | clrsetbits_le32(&map_info->byte_map[0], 0xff << 24, dq_map << 24); |
| 2573 | } else { |
| 2574 | dq_map = readl(PHY_REG(phy_base, 0x4f)); |
| 2575 | for (i = 0; i < 4; i++) { |
| 2576 | if (((dq_map >> (i * 2)) & 0x3) == 0) |
| 2577 | byte0 = i; |
| 2578 | if (((dq_map >> (i * 2)) & 0x3) == 1) |
| 2579 | byte1 = i; |
| 2580 | } |
| 2581 | clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK, |
| 2582 | BIT(byte0) | BIT(byte1)); |
| 2583 | if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0) |
| 2584 | cap_info->bw = 1; |
| 2585 | else |
| 2586 | cap_info->bw = 0; |
| 2587 | } |
| 2588 | } |
| 2589 | if (cap_info->bw > 0) |
| 2590 | cap_info->dbw = 1; |
| 2591 | |
| 2592 | writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL); |
| 2593 | |
| 2594 | cap_info->cs0_high16bit_row = cap_info->cs0_row; |
| 2595 | if (cs) { |
| 2596 | cap_info->cs1_row = cap_info->cs0_row; |
| 2597 | cap_info->cs1_high16bit_row = cap_info->cs0_row; |
| 2598 | } else { |
| 2599 | cap_info->cs1_row = 0; |
| 2600 | cap_info->cs1_high16bit_row = 0; |
| 2601 | } |
| 2602 | |
| 2603 | if (dram_type == LPDDR3) |
| 2604 | sdram_detect_dbw(cap_info, dram_type); |
| 2605 | |
| 2606 | return 0; |
| 2607 | cap_err: |
| 2608 | return -1; |
| 2609 | } |
| 2610 | |
| 2611 | static int dram_detect_cs1_row(struct dram_info *dram, |
| 2612 | struct rv1126_sdram_params *sdram_params, |
| 2613 | unsigned char channel) |
| 2614 | { |
| 2615 | struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; |
| 2616 | void __iomem *pctl_base = dram->pctl; |
| 2617 | u32 ret = 0; |
| 2618 | void __iomem *test_addr; |
| 2619 | u32 row, bktmp, coltmp, bw; |
| 2620 | u64 cs0_cap; |
| 2621 | u32 byte_mask; |
| 2622 | u32 cs_pst; |
| 2623 | u32 cs_add = 0; |
| 2624 | u32 max_row; |
| 2625 | |
| 2626 | if (cap_info->rank == 2) { |
| 2627 | cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + |
| 2628 | 6 + 2; |
| 2629 | if (cs_pst < 28) |
| 2630 | cs_add = 1; |
| 2631 | |
| 2632 | cs0_cap = 1 << cs_pst; |
| 2633 | |
| 2634 | if (sdram_params->base.dramtype == DDR4) { |
| 2635 | if (cap_info->dbw == 0) |
| 2636 | bktmp = cap_info->bk + 2; |
| 2637 | else |
| 2638 | bktmp = cap_info->bk + 1; |
| 2639 | } else { |
| 2640 | bktmp = cap_info->bk; |
| 2641 | } |
| 2642 | bw = cap_info->bw; |
| 2643 | coltmp = cap_info->col; |
| 2644 | |
| 2645 | if (bw == 2) |
| 2646 | byte_mask = 0xFFFF; |
| 2647 | else |
| 2648 | byte_mask = 0xFF; |
| 2649 | |
| 2650 | max_row = (cs_pst == 31) ? 30 : 31; |
| 2651 | |
| 2652 | max_row = max_row - bktmp - coltmp - bw - cs_add + 1; |
| 2653 | |
| 2654 | row = (cap_info->cs0_row > max_row) ? max_row : |
| 2655 | cap_info->cs0_row; |
| 2656 | |
| 2657 | for (; row > 12; row--) { |
| 2658 | test_addr = (void __iomem *)(CFG_SYS_SDRAM_BASE + |
| 2659 | (u32)cs0_cap + |
| 2660 | (1ul << (row + bktmp + coltmp + |
| 2661 | cs_add + bw - 1ul))); |
| 2662 | |
| 2663 | writel(0, CFG_SYS_SDRAM_BASE + (u32)cs0_cap); |
| 2664 | writel(PATTERN, test_addr); |
| 2665 | |
| 2666 | if (((readl(test_addr) & byte_mask) == |
| 2667 | (PATTERN & byte_mask)) && |
| 2668 | ((readl(CFG_SYS_SDRAM_BASE + (u32)cs0_cap) & |
| 2669 | byte_mask) == 0)) { |
| 2670 | ret = row; |
| 2671 | break; |
| 2672 | } |
| 2673 | } |
| 2674 | } |
| 2675 | |
| 2676 | return ret; |
| 2677 | } |
| 2678 | |
| 2679 | /* return: 0 = success, other = fail */ |
| 2680 | static int sdram_init_detect(struct dram_info *dram, |
| 2681 | struct rv1126_sdram_params *sdram_params) |
| 2682 | { |
| 2683 | struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; |
| 2684 | u32 ret; |
| 2685 | u32 sys_reg = 0; |
| 2686 | u32 sys_reg3 = 0; |
| 2687 | struct sdram_head_info_index_v2 *index = |
| 2688 | (struct sdram_head_info_index_v2 *)common_info; |
| 2689 | struct dq_map_info *map_info; |
| 2690 | |
| 2691 | map_info = (struct dq_map_info *)((void *)common_info + |
| 2692 | index->dq_map_index.offset * 4); |
| 2693 | |
| 2694 | if (sdram_init_(dram, sdram_params, 0)) { |
| 2695 | if (sdram_params->base.dramtype == DDR3) { |
| 2696 | clrsetbits_le32(&map_info->byte_map[0], 0xff << 24, |
| 2697 | ((0x1 << 6) | (0x3 << 4) | (0x2 << 2) | |
| 2698 | (0x0 << 0)) << 24); |
| 2699 | if (sdram_init_(dram, sdram_params, 0)) |
| 2700 | return -1; |
| 2701 | } else { |
| 2702 | return -1; |
| 2703 | } |
| 2704 | } |
| 2705 | |
| 2706 | if (sdram_params->base.dramtype == DDR3) { |
| 2707 | writel(PATTERN, CFG_SYS_SDRAM_BASE); |
| 2708 | if (readl(CFG_SYS_SDRAM_BASE) != PATTERN) |
| 2709 | return -1; |
| 2710 | } |
| 2711 | |
| 2712 | split_bypass(dram); |
| 2713 | if (dram_detect_cap(dram, sdram_params, 0) != 0) |
| 2714 | return -1; |
| 2715 | |
| 2716 | pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, |
| 2717 | sdram_params->base.dramtype); |
| 2718 | ret = sdram_init_(dram, sdram_params, 1); |
| 2719 | if (ret != 0) |
| 2720 | goto out; |
| 2721 | |
| 2722 | cap_info->cs1_row = |
| 2723 | dram_detect_cs1_row(dram, sdram_params, 0); |
| 2724 | if (cap_info->cs1_row) { |
| 2725 | sys_reg = readl(&dram->pmugrf->os_reg[2]); |
| 2726 | sys_reg3 = readl(&dram->pmugrf->os_reg[3]); |
| 2727 | SYS_REG_ENC_CS1_ROW(cap_info->cs1_row, |
| 2728 | sys_reg, sys_reg3, 0); |
| 2729 | writel(sys_reg, &dram->pmugrf->os_reg[2]); |
| 2730 | writel(sys_reg3, &dram->pmugrf->os_reg[3]); |
| 2731 | } |
| 2732 | |
| 2733 | sdram_detect_high_row(cap_info); |
| 2734 | split_setup(dram, sdram_params); |
| 2735 | out: |
| 2736 | return ret; |
| 2737 | } |
| 2738 | |
| 2739 | struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz) |
| 2740 | { |
| 2741 | u32 i; |
| 2742 | u32 offset = 0; |
| 2743 | struct ddr2_3_4_lp2_3_info *ddr_info; |
| 2744 | |
| 2745 | if (!freq_mhz) { |
| 2746 | ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype); |
| 2747 | if (ddr_info) |
| 2748 | freq_mhz = |
| 2749 | (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & |
| 2750 | DDR_FREQ_MASK; |
| 2751 | else |
| 2752 | freq_mhz = 0; |
| 2753 | } |
| 2754 | |
| 2755 | for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) { |
| 2756 | if (sdram_configs[i].base.ddr_freq == 0 || |
| 2757 | freq_mhz < sdram_configs[i].base.ddr_freq) |
| 2758 | break; |
| 2759 | } |
| 2760 | offset = i == 0 ? 0 : i - 1; |
| 2761 | |
| 2762 | return &sdram_configs[offset]; |
| 2763 | } |
| 2764 | |
| 2765 | static const u16 pctl_need_update_reg[] = { |
| 2766 | DDR_PCTL2_RFSHTMG, |
| 2767 | DDR_PCTL2_INIT3, |
| 2768 | DDR_PCTL2_INIT4, |
| 2769 | DDR_PCTL2_INIT6, |
| 2770 | DDR_PCTL2_INIT7, |
| 2771 | DDR_PCTL2_DRAMTMG0, |
| 2772 | DDR_PCTL2_DRAMTMG1, |
| 2773 | DDR_PCTL2_DRAMTMG2, |
| 2774 | DDR_PCTL2_DRAMTMG3, |
| 2775 | DDR_PCTL2_DRAMTMG4, |
| 2776 | DDR_PCTL2_DRAMTMG5, |
| 2777 | DDR_PCTL2_DRAMTMG6, |
| 2778 | DDR_PCTL2_DRAMTMG7, |
| 2779 | DDR_PCTL2_DRAMTMG8, |
| 2780 | DDR_PCTL2_DRAMTMG9, |
| 2781 | DDR_PCTL2_DRAMTMG12, |
| 2782 | DDR_PCTL2_DRAMTMG13, |
| 2783 | DDR_PCTL2_DRAMTMG14, |
| 2784 | DDR_PCTL2_ZQCTL0, |
| 2785 | DDR_PCTL2_DFITMG0, |
| 2786 | DDR_PCTL2_ODTCFG |
| 2787 | }; |
| 2788 | |
| 2789 | static const u16 phy_need_update_reg[] = { |
| 2790 | 0x14, |
| 2791 | 0x18, |
| 2792 | 0x1c |
| 2793 | }; |
| 2794 | |
| 2795 | static void pre_set_rate(struct dram_info *dram, |
| 2796 | struct rv1126_sdram_params *sdram_params, |
| 2797 | u32 dst_fsp, u32 dst_fsp_lp4) |
| 2798 | { |
| 2799 | u32 i, j, find; |
| 2800 | void __iomem *pctl_base = dram->pctl; |
| 2801 | void __iomem *phy_base = dram->phy; |
| 2802 | u32 phy_offset; |
| 2803 | u32 mr_tmp; |
| 2804 | u32 dramtype = sdram_params->base.dramtype; |
| 2805 | |
| 2806 | sw_set_req(dram); |
| 2807 | /* pctl timing update */ |
| 2808 | for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) { |
| 2809 | for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF; |
| 2810 | j++) { |
| 2811 | if (sdram_params->pctl_regs.pctl[j][0] == |
| 2812 | pctl_need_update_reg[i]) { |
| 2813 | writel(sdram_params->pctl_regs.pctl[j][1], |
| 2814 | pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 2815 | pctl_need_update_reg[i]); |
| 2816 | find = j; |
| 2817 | break; |
| 2818 | } |
| 2819 | } |
| 2820 | } |
| 2821 | |
| 2822 | #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT |
| 2823 | u32 tmp, trefi; |
| 2824 | |
| 2825 | tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG); |
| 2826 | trefi = (tmp >> 16) & 0xfff; |
| 2827 | writel((tmp & 0xf000ffff) | (trefi / 2) << 16, |
| 2828 | pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG); |
| 2829 | #endif |
| 2830 | |
| 2831 | sw_set_ack(dram); |
| 2832 | |
| 2833 | /* phy timing update */ |
| 2834 | if (dst_fsp == 0) |
| 2835 | phy_offset = 0; |
| 2836 | else |
| 2837 | phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3); |
| 2838 | /* cl cwl al update */ |
| 2839 | for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) { |
| 2840 | for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF; |
| 2841 | j++) { |
| 2842 | if (sdram_params->phy_regs.phy[j][0] == |
| 2843 | phy_need_update_reg[i]) { |
| 2844 | writel(sdram_params->phy_regs.phy[j][1], |
| 2845 | phy_base + phy_offset + |
| 2846 | phy_need_update_reg[i]); |
| 2847 | find = j; |
| 2848 | break; |
| 2849 | } |
| 2850 | } |
| 2851 | } |
| 2852 | |
| 2853 | set_ds_odt(dram, sdram_params, dst_fsp); |
| 2854 | if (dramtype == LPDDR4) { |
| 2855 | mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 2856 | DDR_PCTL2_INIT4); |
| 2857 | /* MR13 */ |
| 2858 | pctl_write_mr(dram->pctl, 3, 13, |
| 2859 | ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & |
| 2860 | PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | |
| 2861 | ((0x2 << 6) >> dst_fsp_lp4), dramtype); |
| 2862 | writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & |
| 2863 | PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | |
| 2864 | ((0x2 << 6) >> dst_fsp_lp4), |
| 2865 | PHY_REG(phy_base, 0x1b)); |
| 2866 | /* MR3 */ |
| 2867 | pctl_write_mr(dram->pctl, 3, 3, |
| 2868 | mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & |
| 2869 | PCTL2_MR_MASK, |
| 2870 | dramtype); |
| 2871 | writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK, |
| 2872 | PHY_REG(phy_base, 0x19)); |
| 2873 | |
| 2874 | mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 2875 | DDR_PCTL2_INIT3); |
| 2876 | /* MR1 */ |
| 2877 | pctl_write_mr(dram->pctl, 3, 1, |
| 2878 | mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & |
| 2879 | PCTL2_MR_MASK, |
| 2880 | dramtype); |
| 2881 | writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK, |
| 2882 | PHY_REG(phy_base, 0x17)); |
| 2883 | /* MR2 */ |
| 2884 | pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, |
| 2885 | dramtype); |
| 2886 | writel(mr_tmp & PCTL2_MR_MASK, |
| 2887 | PHY_REG(phy_base, 0x18)); |
| 2888 | |
| 2889 | mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 2890 | DDR_PCTL2_INIT6); |
| 2891 | /* MR11 */ |
| 2892 | pctl_write_mr(dram->pctl, 3, 11, |
| 2893 | mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, |
| 2894 | dramtype); |
| 2895 | writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, |
| 2896 | PHY_REG(phy_base, 0x1a)); |
| 2897 | /* MR12 */ |
| 2898 | pctl_write_mr(dram->pctl, 3, 12, |
| 2899 | mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, |
| 2900 | dramtype); |
| 2901 | |
| 2902 | mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 2903 | DDR_PCTL2_INIT7); |
| 2904 | /* MR22 */ |
| 2905 | pctl_write_mr(dram->pctl, 3, 22, |
| 2906 | mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, |
| 2907 | dramtype); |
| 2908 | writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, |
| 2909 | PHY_REG(phy_base, 0x1d)); |
| 2910 | /* MR14 */ |
| 2911 | pctl_write_mr(dram->pctl, 3, 14, |
| 2912 | mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, |
| 2913 | dramtype); |
| 2914 | writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, |
| 2915 | PHY_REG(phy_base, 0x1c)); |
| 2916 | } |
| 2917 | |
| 2918 | update_noc_timing(dram, sdram_params); |
| 2919 | } |
| 2920 | |
| 2921 | static void save_fsp_param(struct dram_info *dram, u32 dst_fsp, |
| 2922 | struct rv1126_sdram_params *sdram_params) |
| 2923 | { |
| 2924 | void __iomem *pctl_base = dram->pctl; |
| 2925 | void __iomem *phy_base = dram->phy; |
| 2926 | struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp]; |
| 2927 | u32 temp, temp1; |
| 2928 | struct ddr2_3_4_lp2_3_info *ddr_info; |
| 2929 | |
| 2930 | ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype); |
| 2931 | |
| 2932 | p_fsp_param->freq_mhz = sdram_params->base.ddr_freq; |
| 2933 | |
| 2934 | if (sdram_params->base.dramtype == LPDDR4) { |
| 2935 | p_fsp_param->rd_odt_up_en = 0; |
| 2936 | p_fsp_param->rd_odt_down_en = 1; |
| 2937 | } else { |
| 2938 | p_fsp_param->rd_odt_up_en = |
| 2939 | ODT_INFO_PULLUP_EN(ddr_info->odt_info); |
| 2940 | p_fsp_param->rd_odt_down_en = |
| 2941 | ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); |
| 2942 | } |
| 2943 | |
| 2944 | if (p_fsp_param->rd_odt_up_en) |
| 2945 | p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111)); |
| 2946 | else if (p_fsp_param->rd_odt_down_en) |
| 2947 | p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110)); |
| 2948 | else |
| 2949 | p_fsp_param->rd_odt = 0; |
| 2950 | p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112)); |
| 2951 | p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100)); |
| 2952 | p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102)); |
| 2953 | p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128)); |
| 2954 | p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105)); |
| 2955 | |
| 2956 | if (sdram_params->base.dramtype == DDR3) { |
| 2957 | temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 2958 | DDR_PCTL2_INIT3); |
| 2959 | temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; |
| 2960 | p_fsp_param->ds_pdds = temp & DDR3_DS_MASK; |
| 2961 | p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK; |
| 2962 | p_fsp_param->ca_odt = p_fsp_param->dq_odt; |
| 2963 | } else if (sdram_params->base.dramtype == DDR4) { |
| 2964 | temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 2965 | DDR_PCTL2_INIT3); |
| 2966 | temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; |
| 2967 | p_fsp_param->ds_pdds = temp & DDR4_DS_MASK; |
| 2968 | p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK; |
| 2969 | p_fsp_param->ca_odt = p_fsp_param->dq_odt; |
| 2970 | } else if (sdram_params->base.dramtype == LPDDR3) { |
| 2971 | temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 2972 | DDR_PCTL2_INIT4); |
| 2973 | temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; |
| 2974 | p_fsp_param->ds_pdds = temp & 0xf; |
| 2975 | |
| 2976 | p_fsp_param->dq_odt = lp3_odt_value; |
| 2977 | p_fsp_param->ca_odt = p_fsp_param->dq_odt; |
| 2978 | } else if (sdram_params->base.dramtype == LPDDR4) { |
| 2979 | temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 2980 | DDR_PCTL2_INIT4); |
| 2981 | temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; |
| 2982 | p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK; |
| 2983 | |
| 2984 | temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 2985 | DDR_PCTL2_INIT6); |
| 2986 | temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK; |
| 2987 | p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK; |
| 2988 | p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK; |
| 2989 | |
| 2990 | temp = MAX(readl(PHY_REG(phy_base, 0x3ae)), |
| 2991 | readl(PHY_REG(phy_base, 0x3ce))); |
| 2992 | temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)), |
| 2993 | readl(PHY_REG(phy_base, 0x3de))); |
| 2994 | p_fsp_param->vref_ca[0] = (temp + temp1) / 2; |
| 2995 | temp = MAX(readl(PHY_REG(phy_base, 0x3af)), |
| 2996 | readl(PHY_REG(phy_base, 0x3cf))); |
| 2997 | temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)), |
| 2998 | readl(PHY_REG(phy_base, 0x3df))); |
| 2999 | p_fsp_param->vref_ca[1] = (temp + temp1) / 2; |
| 3000 | p_fsp_param->vref_ca[0] |= |
| 3001 | (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); |
| 3002 | p_fsp_param->vref_ca[1] |= |
| 3003 | (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); |
| 3004 | |
| 3005 | p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >> |
| 3006 | 3) & 0x1; |
| 3007 | } |
| 3008 | |
| 3009 | p_fsp_param->noc_timings.ddrtiminga0 = |
| 3010 | sdram_params->ch.noc_timings.ddrtiminga0; |
| 3011 | p_fsp_param->noc_timings.ddrtimingb0 = |
| 3012 | sdram_params->ch.noc_timings.ddrtimingb0; |
| 3013 | p_fsp_param->noc_timings.ddrtimingc0 = |
| 3014 | sdram_params->ch.noc_timings.ddrtimingc0; |
| 3015 | p_fsp_param->noc_timings.devtodev0 = |
| 3016 | sdram_params->ch.noc_timings.devtodev0; |
| 3017 | p_fsp_param->noc_timings.ddrmode = |
| 3018 | sdram_params->ch.noc_timings.ddrmode; |
| 3019 | p_fsp_param->noc_timings.ddr4timing = |
| 3020 | sdram_params->ch.noc_timings.ddr4timing; |
| 3021 | p_fsp_param->noc_timings.agingx0 = |
| 3022 | sdram_params->ch.noc_timings.agingx0; |
| 3023 | p_fsp_param->noc_timings.aging0 = |
| 3024 | sdram_params->ch.noc_timings.aging0; |
| 3025 | p_fsp_param->noc_timings.aging1 = |
| 3026 | sdram_params->ch.noc_timings.aging1; |
| 3027 | p_fsp_param->noc_timings.aging2 = |
| 3028 | sdram_params->ch.noc_timings.aging2; |
| 3029 | p_fsp_param->noc_timings.aging3 = |
| 3030 | sdram_params->ch.noc_timings.aging3; |
| 3031 | |
| 3032 | p_fsp_param->flag = FSP_FLAG; |
| 3033 | } |
| 3034 | |
| 3035 | static void copy_fsp_param_to_ddr(void) |
| 3036 | { |
| 3037 | memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param, |
| 3038 | sizeof(fsp_param)); |
| 3039 | } |
| 3040 | |
| 3041 | static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs, |
| 3042 | struct sdram_cap_info *cap_info, u32 dram_type, |
| 3043 | u32 freq) |
| 3044 | { |
| 3045 | u64 cs0_cap; |
| 3046 | u32 die_cap; |
| 3047 | u32 trfc_ns, trfc4_ns; |
| 3048 | u32 trfc, txsnr; |
| 3049 | u32 txs_abort_fast = 0; |
| 3050 | u32 tmp; |
| 3051 | |
| 3052 | cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type); |
| 3053 | die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw))); |
| 3054 | |
| 3055 | switch (dram_type) { |
| 3056 | case DDR3: |
| 3057 | if (die_cap <= DIE_CAP_512MBIT) |
| 3058 | trfc_ns = 90; |
| 3059 | else if (die_cap <= DIE_CAP_1GBIT) |
| 3060 | trfc_ns = 110; |
| 3061 | else if (die_cap <= DIE_CAP_2GBIT) |
| 3062 | trfc_ns = 160; |
| 3063 | else if (die_cap <= DIE_CAP_4GBIT) |
| 3064 | trfc_ns = 260; |
| 3065 | else |
| 3066 | trfc_ns = 350; |
| 3067 | txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000); |
| 3068 | break; |
| 3069 | |
| 3070 | case DDR4: |
| 3071 | if (die_cap <= DIE_CAP_2GBIT) { |
| 3072 | trfc_ns = 160; |
| 3073 | trfc4_ns = 90; |
| 3074 | } else if (die_cap <= DIE_CAP_4GBIT) { |
| 3075 | trfc_ns = 260; |
| 3076 | trfc4_ns = 110; |
| 3077 | } else if (die_cap <= DIE_CAP_8GBIT) { |
| 3078 | trfc_ns = 350; |
| 3079 | trfc4_ns = 160; |
| 3080 | } else { |
| 3081 | trfc_ns = 550; |
| 3082 | trfc4_ns = 260; |
| 3083 | } |
| 3084 | txsnr = ((trfc_ns + 10) * freq + 999) / 1000; |
| 3085 | txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000; |
| 3086 | break; |
| 3087 | |
| 3088 | case LPDDR3: |
| 3089 | if (die_cap <= DIE_CAP_4GBIT) |
| 3090 | trfc_ns = 130; |
| 3091 | else |
| 3092 | trfc_ns = 210; |
| 3093 | txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000); |
| 3094 | break; |
| 3095 | |
| 3096 | case LPDDR4: |
| 3097 | if (die_cap <= DIE_CAP_2GBIT) |
| 3098 | trfc_ns = 130; |
| 3099 | else if (die_cap <= DIE_CAP_4GBIT) |
| 3100 | trfc_ns = 180; |
| 3101 | else if (die_cap <= DIE_CAP_8GBIT) |
| 3102 | trfc_ns = 280; |
| 3103 | else |
| 3104 | trfc_ns = 380; |
| 3105 | txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000); |
| 3106 | break; |
| 3107 | |
| 3108 | default: |
| 3109 | return; |
| 3110 | } |
| 3111 | trfc = (trfc_ns * freq + 999) / 1000; |
| 3112 | |
| 3113 | for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) { |
| 3114 | switch (pctl_regs->pctl[i][0]) { |
| 3115 | case DDR_PCTL2_RFSHTMG: |
| 3116 | tmp = pctl_regs->pctl[i][1]; |
| 3117 | /* t_rfc_min */ |
| 3118 | tmp &= ~((u32)0x3ff); |
| 3119 | tmp |= ((trfc + 1) / 2) & 0x3ff; |
| 3120 | pctl_regs->pctl[i][1] = tmp; |
| 3121 | break; |
| 3122 | |
| 3123 | case DDR_PCTL2_DRAMTMG8: |
| 3124 | if (dram_type == DDR3 || dram_type == DDR4) { |
| 3125 | tmp = pctl_regs->pctl[i][1]; |
| 3126 | /* t_xs_x32 */ |
| 3127 | tmp &= ~((u32)0x7f); |
| 3128 | tmp |= ((txsnr + 63) / 64) & 0x7f; |
| 3129 | |
| 3130 | if (dram_type == DDR4) { |
| 3131 | /* t_xs_abort_x32 */ |
| 3132 | tmp &= ~((u32)(0x7f << 16)); |
| 3133 | tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16; |
| 3134 | /* t_xs_fast_x32 */ |
| 3135 | tmp &= ~((u32)(0x7f << 24)); |
| 3136 | tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24; |
| 3137 | } |
| 3138 | |
| 3139 | pctl_regs->pctl[i][1] = tmp; |
| 3140 | } |
| 3141 | break; |
| 3142 | |
| 3143 | case DDR_PCTL2_DRAMTMG14: |
| 3144 | if (dram_type == LPDDR3 || |
| 3145 | dram_type == LPDDR4) { |
| 3146 | tmp = pctl_regs->pctl[i][1]; |
| 3147 | /* t_xsr */ |
| 3148 | tmp &= ~((u32)0xfff); |
| 3149 | tmp |= ((txsnr + 1) / 2) & 0xfff; |
| 3150 | pctl_regs->pctl[i][1] = tmp; |
| 3151 | } |
| 3152 | break; |
| 3153 | |
| 3154 | default: |
| 3155 | break; |
| 3156 | } |
| 3157 | } |
| 3158 | } |
| 3159 | |
| 3160 | void ddr_set_rate(struct dram_info *dram, |
| 3161 | struct rv1126_sdram_params *sdram_params, |
| 3162 | u32 freq, u32 cur_freq, u32 dst_fsp, |
| 3163 | u32 dst_fsp_lp4, u32 training_en) |
| 3164 | { |
| 3165 | u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off; |
| 3166 | u32 mr_tmp; |
| 3167 | u32 lp_stat; |
| 3168 | u32 dramtype = sdram_params->base.dramtype; |
| 3169 | struct rv1126_sdram_params *sdram_params_new; |
| 3170 | void __iomem *pctl_base = dram->pctl; |
| 3171 | void __iomem *phy_base = dram->phy; |
| 3172 | |
| 3173 | lp_stat = low_power_update(dram, 0); |
| 3174 | sdram_params_new = get_default_sdram_config(freq); |
| 3175 | sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank; |
| 3176 | sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw; |
| 3177 | |
| 3178 | pctl_modify_trfc(&sdram_params_new->pctl_regs, |
| 3179 | &sdram_params->ch.cap_info, dramtype, freq); |
| 3180 | pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4); |
| 3181 | |
| 3182 | while ((readl(pctl_base + DDR_PCTL2_STAT) & |
| 3183 | PCTL2_OPERATING_MODE_MASK) == |
| 3184 | PCTL2_OPERATING_MODE_SR) |
| 3185 | continue; |
| 3186 | |
| 3187 | dest_dll_off = 0; |
| 3188 | dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 3189 | DDR_PCTL2_INIT3); |
| 3190 | if ((dramtype == DDR3 && (dst_init3 & 1)) || |
| 3191 | (dramtype == DDR4 && !(dst_init3 & 1))) |
| 3192 | dest_dll_off = 1; |
| 3193 | |
| 3194 | cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; |
| 3195 | cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + |
| 3196 | DDR_PCTL2_INIT3); |
| 3197 | cur_init3 &= PCTL2_MR_MASK; |
| 3198 | cur_dll_off = 1; |
| 3199 | if ((dramtype == DDR3 && !(cur_init3 & 1)) || |
| 3200 | (dramtype == DDR4 && (cur_init3 & 1))) |
| 3201 | cur_dll_off = 0; |
| 3202 | |
| 3203 | if (!cur_dll_off) { |
| 3204 | if (dramtype == DDR3) |
| 3205 | cur_init3 |= 1; |
| 3206 | else |
| 3207 | cur_init3 &= ~1; |
| 3208 | pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype); |
| 3209 | } |
| 3210 | |
| 3211 | setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, |
| 3212 | PCTL2_DIS_AUTO_REFRESH); |
| 3213 | update_refresh_reg(dram); |
| 3214 | |
| 3215 | enter_sr(dram, 1); |
| 3216 | |
| 3217 | writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | |
| 3218 | PMUGRF_CON_DDRPHY_BUFFEREN_EN, |
| 3219 | &dram->pmugrf->soc_con[0]); |
| 3220 | sw_set_req(dram); |
| 3221 | clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC, |
| 3222 | PCTL2_DFI_INIT_COMPLETE_EN); |
| 3223 | sw_set_ack(dram); |
| 3224 | |
| 3225 | sw_set_req(dram); |
| 3226 | if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off) |
| 3227 | setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); |
| 3228 | else |
| 3229 | clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); |
| 3230 | |
| 3231 | setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0, |
| 3232 | PCTL2_DIS_SRX_ZQCL); |
| 3233 | setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0, |
| 3234 | PCTL2_DIS_SRX_ZQCL); |
| 3235 | sw_set_ack(dram); |
| 3236 | |
| 3237 | writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT), |
| 3238 | &dram->cru->clkgate_con[21]); |
| 3239 | writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | |
| 3240 | (0x1 << CLK_DDR_UPCTL_EN_SHIFT) | |
| 3241 | (0x1 << ACLK_DDR_UPCTL_EN_SHIFT), |
| 3242 | BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); |
| 3243 | |
| 3244 | clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); |
| 3245 | rkclk_set_dpll(dram, freq * MHz / 2); |
| 3246 | phy_pll_set(dram, freq * MHz, 0); |
| 3247 | phy_pll_set(dram, freq * MHz, 1); |
| 3248 | setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); |
| 3249 | |
| 3250 | writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | |
| 3251 | PMUGRF_CON_DDRPHY_BUFFEREN_DIS, |
| 3252 | &dram->pmugrf->soc_con[0]); |
| 3253 | writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT), |
| 3254 | &dram->cru->clkgate_con[21]); |
| 3255 | writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | |
| 3256 | (0x0 << CLK_DDR_UPCTL_EN_SHIFT) | |
| 3257 | (0x0 << ACLK_DDR_UPCTL_EN_SHIFT), |
| 3258 | BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); |
| 3259 | while ((readl(pctl_base + DDR_PCTL2_DFISTAT) & |
| 3260 | PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE) |
| 3261 | continue; |
| 3262 | |
| 3263 | sw_set_req(dram); |
| 3264 | setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); |
| 3265 | clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp); |
| 3266 | sw_set_ack(dram); |
| 3267 | update_refresh_reg(dram); |
| 3268 | clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2); |
| 3269 | |
| 3270 | enter_sr(dram, 0); |
| 3271 | |
| 3272 | setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); |
| 3273 | clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); |
| 3274 | |
| 3275 | mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4); |
| 3276 | if (dramtype == LPDDR3) { |
| 3277 | pctl_write_mr(dram->pctl, 3, 1, |
| 3278 | (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) & |
| 3279 | PCTL2_MR_MASK, |
| 3280 | dramtype); |
| 3281 | pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK, |
| 3282 | dramtype); |
| 3283 | pctl_write_mr(dram->pctl, 3, 3, |
| 3284 | (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) & |
| 3285 | PCTL2_MR_MASK, |
| 3286 | dramtype); |
| 3287 | pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype); |
| 3288 | } else if ((dramtype == DDR3) || (dramtype == DDR4)) { |
| 3289 | pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK, |
| 3290 | dramtype); |
| 3291 | if (!dest_dll_off) { |
| 3292 | pctl_write_mr(dram->pctl, 3, 0, |
| 3293 | ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) & |
| 3294 | PCTL2_MR_MASK) | DDR3_DLL_RESET, |
| 3295 | dramtype); |
| 3296 | udelay(2); |
| 3297 | } |
| 3298 | pctl_write_mr(dram->pctl, 3, 0, |
| 3299 | (dst_init3 >> PCTL2_DDR34_MR0_SHIFT & |
| 3300 | PCTL2_MR_MASK) & (~DDR3_DLL_RESET), |
| 3301 | dramtype); |
| 3302 | pctl_write_mr(dram->pctl, 3, 2, |
| 3303 | ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) & |
| 3304 | PCTL2_MR_MASK), dramtype); |
| 3305 | if (dramtype == DDR4) { |
| 3306 | pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK, |
| 3307 | dramtype); |
| 3308 | mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 3309 | DDR_PCTL2_INIT6); |
| 3310 | pctl_write_mr(dram->pctl, 3, 4, |
| 3311 | (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) & |
| 3312 | PCTL2_MR_MASK, |
| 3313 | dramtype); |
| 3314 | pctl_write_mr(dram->pctl, 3, 5, |
| 3315 | mr_tmp >> PCTL2_DDR4_MR5_SHIFT & |
| 3316 | PCTL2_MR_MASK, |
| 3317 | dramtype); |
| 3318 | |
| 3319 | mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + |
| 3320 | DDR_PCTL2_INIT7); |
| 3321 | pctl_write_mr(dram->pctl, 3, 6, |
| 3322 | mr_tmp >> PCTL2_DDR4_MR6_SHIFT & |
| 3323 | PCTL2_MR_MASK, |
| 3324 | dramtype); |
| 3325 | } |
| 3326 | } else if (dramtype == LPDDR4) { |
| 3327 | pctl_write_mr(dram->pctl, 3, 13, |
| 3328 | ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & |
| 3329 | PCTL2_MR_MASK) & (~(BIT(7)))) | |
| 3330 | dst_fsp_lp4 << 7, dramtype); |
| 3331 | } |
| 3332 | clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, |
| 3333 | PCTL2_DIS_AUTO_REFRESH); |
| 3334 | update_refresh_reg(dram); |
| 3335 | |
| 3336 | /* training */ |
| 3337 | high_freq_training(dram, sdram_params_new, dst_fsp); |
| 3338 | low_power_update(dram, lp_stat); |
| 3339 | |
| 3340 | save_fsp_param(dram, dst_fsp, sdram_params_new); |
| 3341 | } |
| 3342 | |
| 3343 | static void ddr_set_rate_for_fsp(struct dram_info *dram, |
| 3344 | struct rv1126_sdram_params *sdram_params) |
| 3345 | { |
| 3346 | struct ddr2_3_4_lp2_3_info *ddr_info; |
| 3347 | u32 f0; |
| 3348 | u32 dramtype = sdram_params->base.dramtype; |
| 3349 | u32 f1, f2, f3; |
| 3350 | |
| 3351 | ddr_info = get_ddr_drv_odt_info(dramtype); |
| 3352 | if (!ddr_info) |
| 3353 | return; |
| 3354 | |
| 3355 | f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & |
| 3356 | DDR_FREQ_MASK; |
| 3357 | |
| 3358 | memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param)); |
| 3359 | memset((void *)&fsp_param, 0, sizeof(fsp_param)); |
| 3360 | |
| 3361 | f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) & |
| 3362 | DDR_FREQ_MASK; |
| 3363 | f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) & |
| 3364 | DDR_FREQ_MASK; |
| 3365 | f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) & |
| 3366 | DDR_FREQ_MASK; |
| 3367 | |
| 3368 | if (get_wrlvl_val(dram, sdram_params)) |
| 3369 | printascii("get wrlvl value fail\n"); |
| 3370 | |
Jagan Teki | 43241e0 | 2022-12-14 23:20:54 +0530 | [diff] [blame] | 3371 | if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) { |
| 3372 | printascii("change to: "); |
| 3373 | printdec(f1); |
| 3374 | printascii("MHz\n"); |
| 3375 | } |
Jagan Teki | d0af73c | 2022-12-14 23:20:53 +0530 | [diff] [blame] | 3376 | ddr_set_rate(&dram_info, sdram_params, f1, |
| 3377 | sdram_params->base.ddr_freq, 1, 1, 1); |
Jagan Teki | 43241e0 | 2022-12-14 23:20:54 +0530 | [diff] [blame] | 3378 | |
| 3379 | if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) { |
| 3380 | printascii("change to: "); |
| 3381 | printdec(f2); |
| 3382 | printascii("MHz\n"); |
| 3383 | } |
Jagan Teki | d0af73c | 2022-12-14 23:20:53 +0530 | [diff] [blame] | 3384 | ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1); |
Jagan Teki | 43241e0 | 2022-12-14 23:20:54 +0530 | [diff] [blame] | 3385 | |
| 3386 | if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) { |
| 3387 | printascii("change to: "); |
| 3388 | printdec(f3); |
| 3389 | printascii("MHz\n"); |
| 3390 | } |
Jagan Teki | d0af73c | 2022-12-14 23:20:53 +0530 | [diff] [blame] | 3391 | ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1); |
Jagan Teki | 43241e0 | 2022-12-14 23:20:54 +0530 | [diff] [blame] | 3392 | |
| 3393 | if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) { |
| 3394 | printascii("change to: "); |
| 3395 | printdec(f0); |
| 3396 | printascii("MHz(final freq)\n"); |
| 3397 | } |
Jagan Teki | d0af73c | 2022-12-14 23:20:53 +0530 | [diff] [blame] | 3398 | ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1); |
| 3399 | } |
| 3400 | |
| 3401 | int get_uart_config(void) |
| 3402 | { |
| 3403 | struct sdram_head_info_index_v2 *index = |
| 3404 | (struct sdram_head_info_index_v2 *)common_info; |
| 3405 | struct global_info *gbl_info; |
| 3406 | |
| 3407 | gbl_info = (struct global_info *)((void *)common_info + |
| 3408 | index->global_index.offset * 4); |
| 3409 | |
| 3410 | return gbl_info->uart_info; |
| 3411 | } |
| 3412 | |
| 3413 | /* return: 0 = success, other = fail */ |
| 3414 | static int rv1126_dmc_init(struct udevice *dev) |
| 3415 | { |
| 3416 | struct rv1126_sdram_params *sdram_params; |
| 3417 | int ret = 0; |
| 3418 | struct sdram_head_info_index_v2 *index = |
| 3419 | (struct sdram_head_info_index_v2 *)common_info; |
| 3420 | struct global_info *gbl_info; |
| 3421 | |
| 3422 | dram_info.phy = (void *)DDR_PHY_BASE_ADDR; |
| 3423 | dram_info.pctl = (void *)UPCTL2_BASE_ADDR; |
| 3424 | dram_info.grf = (void *)GRF_BASE_ADDR; |
| 3425 | dram_info.cru = (void *)CRU_BASE_ADDR; |
| 3426 | dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR; |
| 3427 | dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR; |
| 3428 | dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR; |
| 3429 | |
| 3430 | #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT |
| 3431 | printascii("extended temp support\n"); |
| 3432 | #endif |
| 3433 | if (index->version_info != 2 || |
| 3434 | (index->global_index.size != sizeof(struct global_info) / 4) || |
| 3435 | (index->ddr3_index.size != |
| 3436 | sizeof(struct ddr2_3_4_lp2_3_info) / 4) || |
| 3437 | (index->ddr4_index.size != |
| 3438 | sizeof(struct ddr2_3_4_lp2_3_info) / 4) || |
| 3439 | (index->lp3_index.size != |
| 3440 | sizeof(struct ddr2_3_4_lp2_3_info) / 4) || |
| 3441 | (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) || |
| 3442 | (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) || |
| 3443 | index->global_index.offset == 0 || |
| 3444 | index->ddr3_index.offset == 0 || |
| 3445 | index->ddr4_index.offset == 0 || |
| 3446 | index->lp3_index.offset == 0 || |
| 3447 | index->lp4_index.offset == 0 || |
| 3448 | index->lp4x_index.offset == 0) { |
| 3449 | printascii("common info error\n"); |
| 3450 | goto error; |
| 3451 | } |
| 3452 | |
| 3453 | gbl_info = (struct global_info *)((void *)common_info + |
| 3454 | index->global_index.offset * 4); |
| 3455 | |
| 3456 | dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info); |
| 3457 | dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info); |
| 3458 | |
| 3459 | sdram_params = &sdram_configs[0]; |
| 3460 | if (sdram_params->base.dramtype == DDR3 || |
| 3461 | sdram_params->base.dramtype == DDR4) { |
| 3462 | if (DDR_2T_INFO(gbl_info->info_2t)) |
| 3463 | sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10; |
| 3464 | else |
| 3465 | sdram_params->pctl_regs.pctl[0][1] &= |
| 3466 | ~(0x1 << 10); |
| 3467 | } |
| 3468 | ret = sdram_init_detect(&dram_info, sdram_params); |
| 3469 | if (ret) { |
| 3470 | sdram_print_dram_type(sdram_params->base.dramtype); |
| 3471 | printascii(", "); |
| 3472 | printdec(sdram_params->base.ddr_freq); |
| 3473 | printascii("MHz\n"); |
| 3474 | goto error; |
| 3475 | } |
| 3476 | print_ddr_info(sdram_params); |
| 3477 | #if defined(CONFIG_CMD_DDR_TEST_TOOL) |
| 3478 | init_rw_trn_result_struct(&rw_trn_result, dram_info.phy, |
| 3479 | (u8)sdram_params->ch.cap_info.rank); |
| 3480 | #endif |
| 3481 | |
| 3482 | ddr_set_rate_for_fsp(&dram_info, sdram_params); |
| 3483 | copy_fsp_param_to_ddr(); |
| 3484 | |
| 3485 | #if defined(CONFIG_CMD_DDR_TEST_TOOL) |
| 3486 | save_rw_trn_result_to_ddr(&rw_trn_result); |
| 3487 | #endif |
| 3488 | |
Jagan Teki | 43241e0 | 2022-12-14 23:20:54 +0530 | [diff] [blame] | 3489 | if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) |
| 3490 | printascii("out\n"); |
Jagan Teki | d0af73c | 2022-12-14 23:20:53 +0530 | [diff] [blame] | 3491 | |
| 3492 | return ret; |
| 3493 | error: |
| 3494 | printascii("error\n"); |
| 3495 | return (-1); |
| 3496 | } |
| 3497 | |
| 3498 | #endif |
| 3499 | |
| 3500 | static int rv1126_dmc_probe(struct udevice *dev) |
| 3501 | { |
| 3502 | #if defined(CONFIG_TPL_BUILD) || \ |
| 3503 | (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD)) |
| 3504 | if (rv1126_dmc_init(dev)) |
| 3505 | return 0; |
| 3506 | #else |
| 3507 | struct dram_info *priv = dev_get_priv(dev); |
| 3508 | |
| 3509 | priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF); |
| 3510 | debug("%s: grf=%p\n", __func__, priv->pmugrf); |
| 3511 | priv->info.base = CFG_SYS_SDRAM_BASE; |
| 3512 | priv->info.size = |
| 3513 | rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]); |
| 3514 | #endif |
| 3515 | return 0; |
| 3516 | } |
| 3517 | |
| 3518 | static int rv1126_dmc_get_info(struct udevice *dev, struct ram_info *info) |
| 3519 | { |
| 3520 | struct dram_info *priv = dev_get_priv(dev); |
| 3521 | |
| 3522 | *info = priv->info; |
| 3523 | |
| 3524 | return 0; |
| 3525 | } |
| 3526 | |
| 3527 | static struct ram_ops rv1126_dmc_ops = { |
| 3528 | .get_info = rv1126_dmc_get_info, |
| 3529 | }; |
| 3530 | |
| 3531 | static const struct udevice_id rv1126_dmc_ids[] = { |
| 3532 | { .compatible = "rockchip,rv1126-dmc" }, |
| 3533 | { } |
| 3534 | }; |
| 3535 | |
| 3536 | U_BOOT_DRIVER(dmc_rv1126) = { |
| 3537 | .name = "rockchip_rv1126_dmc", |
| 3538 | .id = UCLASS_RAM, |
| 3539 | .of_match = rv1126_dmc_ids, |
| 3540 | .ops = &rv1126_dmc_ops, |
| 3541 | .probe = rv1126_dmc_probe, |
| 3542 | .priv_auto = sizeof(struct dram_info), |
| 3543 | }; |