Tom Rini | 10e4779 | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0+ |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 2 | /* |
| 3 | * Keystone2: pll initialization |
| 4 | * |
| 5 | * (C) Copyright 2012-2014 |
| 6 | * Texas Instruments Incorporated, <www.ti.com> |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 7 | */ |
| 8 | |
| 9 | #include <common.h> |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 10 | #include <asm/arch/clock.h> |
| 11 | #include <asm/arch/clock_defs.h> |
| 12 | |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 13 | /* DEV and ARM speed definitions as specified in DEVSPEED register */ |
| 14 | int __weak speeds[DEVSPEED_NUMSPDS] = { |
| 15 | SPD1000, |
| 16 | SPD1200, |
| 17 | SPD1350, |
| 18 | SPD1400, |
| 19 | SPD1500, |
| 20 | SPD1400, |
| 21 | SPD1350, |
| 22 | SPD1200, |
| 23 | SPD1000, |
| 24 | SPD800, |
| 25 | }; |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 26 | |
Lokesh Vutla | 0d73cc2 | 2015-07-28 14:16:45 +0530 | [diff] [blame] | 27 | const struct keystone_pll_regs keystone_pll_regs[] = { |
| 28 | [CORE_PLL] = {KS2_MAINPLLCTL0, KS2_MAINPLLCTL1}, |
| 29 | [PASS_PLL] = {KS2_PASSPLLCTL0, KS2_PASSPLLCTL1}, |
| 30 | [TETRIS_PLL] = {KS2_ARMPLLCTL0, KS2_ARMPLLCTL1}, |
| 31 | [DDR3A_PLL] = {KS2_DDR3APLLCTL0, KS2_DDR3APLLCTL1}, |
| 32 | [DDR3B_PLL] = {KS2_DDR3BPLLCTL0, KS2_DDR3BPLLCTL1}, |
Vitaly Andrianov | 2964684 | 2015-09-19 16:26:40 +0530 | [diff] [blame] | 33 | [UART_PLL] = {KS2_UARTPLLCTL0, KS2_UARTPLLCTL1}, |
Lokesh Vutla | 0d73cc2 | 2015-07-28 14:16:45 +0530 | [diff] [blame] | 34 | }; |
| 35 | |
Lokesh Vutla | da18b18 | 2015-10-08 11:31:47 +0530 | [diff] [blame] | 36 | inline void pll_pa_clk_sel(void) |
| 37 | { |
| 38 | setbits_le32(keystone_pll_regs[PASS_PLL].reg1, CFG_PLLCTL1_PAPLL_MASK); |
| 39 | } |
| 40 | |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 41 | static void wait_for_completion(const struct pll_init_data *data) |
| 42 | { |
| 43 | int i; |
| 44 | for (i = 0; i < 100; i++) { |
| 45 | sdelay(450); |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 46 | if (!(pllctl_reg_read(data->pll, stat) & PLLSTAT_GOSTAT_MASK)) |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 47 | break; |
| 48 | } |
| 49 | } |
| 50 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 51 | static inline void bypass_main_pll(const struct pll_init_data *data) |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 52 | { |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 53 | pllctl_reg_clrbits(data->pll, ctl, PLLCTL_PLLENSRC_MASK | |
| 54 | PLLCTL_PLLEN_MASK); |
| 55 | |
| 56 | /* 4 cycles of reference clock CLKIN*/ |
| 57 | sdelay(340); |
| 58 | } |
| 59 | |
| 60 | static void configure_mult_div(const struct pll_init_data *data) |
| 61 | { |
| 62 | u32 pllm, plld, bwadj; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 63 | |
| 64 | pllm = data->pll_m - 1; |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 65 | plld = (data->pll_d - 1) & CFG_PLLCTL0_PLLD_MASK; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 66 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 67 | /* Program Multiplier */ |
| 68 | if (data->pll == MAIN_PLL) |
| 69 | pllctl_reg_write(data->pll, mult, pllm & PLLM_MULT_LO_MASK); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 70 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 71 | clrsetbits_le32(keystone_pll_regs[data->pll].reg0, |
| 72 | CFG_PLLCTL0_PLLM_MASK, |
| 73 | pllm << CFG_PLLCTL0_PLLM_SHIFT); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 74 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 75 | /* Program BWADJ */ |
| 76 | bwadj = (data->pll_m - 1) >> 1; /* Divide pllm by 2 */ |
| 77 | clrsetbits_le32(keystone_pll_regs[data->pll].reg0, |
| 78 | CFG_PLLCTL0_BWADJ_MASK, |
| 79 | (bwadj << CFG_PLLCTL0_BWADJ_SHIFT) & |
| 80 | CFG_PLLCTL0_BWADJ_MASK); |
| 81 | bwadj = bwadj >> CFG_PLLCTL0_BWADJ_BITS; |
| 82 | clrsetbits_le32(keystone_pll_regs[data->pll].reg1, |
| 83 | CFG_PLLCTL1_BWADJ_MASK, bwadj); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 84 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 85 | /* Program Divider */ |
| 86 | clrsetbits_le32(keystone_pll_regs[data->pll].reg0, |
| 87 | CFG_PLLCTL0_PLLD_MASK, plld); |
| 88 | } |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 89 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 90 | void configure_main_pll(const struct pll_init_data *data) |
| 91 | { |
| 92 | u32 tmp, pllod, i, alnctl_val = 0; |
| 93 | u32 *offset; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 94 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 95 | pllod = data->pll_od - 1; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 96 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 97 | /* 100 micro sec for stabilization */ |
| 98 | sdelay(210000); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 99 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 100 | tmp = pllctl_reg_read(data->pll, secctl); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 101 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 102 | /* Check for Bypass */ |
| 103 | if (tmp & SECCTL_BYPASS_MASK) { |
| 104 | setbits_le32(keystone_pll_regs[data->pll].reg1, |
| 105 | CFG_PLLCTL1_ENSAT_MASK); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 106 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 107 | bypass_main_pll(data); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 108 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 109 | /* Powerdown and powerup Main Pll */ |
| 110 | pllctl_reg_setbits(data->pll, secctl, SECCTL_BYPASS_MASK); |
| 111 | pllctl_reg_setbits(data->pll, ctl, PLLCTL_PLLPWRDN_MASK); |
| 112 | /* 5 micro sec */ |
| 113 | sdelay(21000); |
| 114 | |
| 115 | pllctl_reg_clrbits(data->pll, ctl, PLLCTL_PLLPWRDN_MASK); |
| 116 | } else { |
| 117 | bypass_main_pll(data); |
| 118 | } |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 119 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 120 | configure_mult_div(data); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 121 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 122 | /* Program Output Divider */ |
| 123 | pllctl_reg_rmw(data->pll, secctl, SECCTL_OP_DIV_MASK, |
| 124 | ((pllod << SECCTL_OP_DIV_SHIFT) & SECCTL_OP_DIV_MASK)); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 125 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 126 | /* Program PLLDIVn */ |
| 127 | wait_for_completion(data); |
| 128 | for (i = 0; i < PLLDIV_MAX; i++) { |
| 129 | if (i < 3) |
| 130 | offset = pllctl_reg(data->pll, div1) + i; |
| 131 | else |
| 132 | offset = pllctl_reg(data->pll, div4) + (i - 3); |
| 133 | |
| 134 | if (divn_val[i] != -1) { |
| 135 | __raw_writel(divn_val[i] | PLLDIV_ENABLE_MASK, offset); |
| 136 | alnctl_val |= BIT(i); |
| 137 | } |
| 138 | } |
| 139 | |
| 140 | if (alnctl_val) { |
| 141 | pllctl_reg_setbits(data->pll, alnctl, alnctl_val); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 142 | /* |
| 143 | * Set GOSET bit in PLLCMD to initiate the GO operation |
| 144 | * to change the divide |
| 145 | */ |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 146 | pllctl_reg_setbits(data->pll, cmd, PLLSTAT_GOSTAT_MASK); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 147 | wait_for_completion(data); |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 148 | } |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 149 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 150 | /* Reset PLL */ |
| 151 | pllctl_reg_setbits(data->pll, ctl, PLLCTL_PLLRST_MASK); |
| 152 | sdelay(21000); /* Wait for a minimum of 7 us*/ |
| 153 | pllctl_reg_clrbits(data->pll, ctl, PLLCTL_PLLRST_MASK); |
| 154 | sdelay(105000); /* Wait for PLL Lock time (min 50 us) */ |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 155 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 156 | /* Enable PLL */ |
| 157 | pllctl_reg_clrbits(data->pll, secctl, SECCTL_BYPASS_MASK); |
| 158 | pllctl_reg_setbits(data->pll, ctl, PLLCTL_PLLEN_MASK); |
| 159 | } |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 160 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 161 | void configure_secondary_pll(const struct pll_init_data *data) |
| 162 | { |
| 163 | int pllod = data->pll_od - 1; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 164 | |
Lokesh Vutla | 824b4f3 | 2016-11-03 15:32:51 +0530 | [diff] [blame] | 165 | /* Enable Glitch free bypass for ARM PLL */ |
| 166 | if (cpu_is_k2hk() && data->pll == TETRIS_PLL) |
| 167 | clrbits_le32(KS2_MISC_CTRL, MISC_CTL1_ARM_PLL_EN); |
| 168 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 169 | /* Enable Bypass mode */ |
| 170 | setbits_le32(keystone_pll_regs[data->pll].reg1, CFG_PLLCTL1_ENSAT_MASK); |
| 171 | setbits_le32(keystone_pll_regs[data->pll].reg0, |
| 172 | CFG_PLLCTL0_BYPASS_MASK); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 173 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 174 | configure_mult_div(data); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 175 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 176 | /* Program Output Divider */ |
| 177 | clrsetbits_le32(keystone_pll_regs[data->pll].reg0, |
| 178 | CFG_PLLCTL0_CLKOD_MASK, |
| 179 | (pllod << CFG_PLLCTL0_CLKOD_SHIFT) & |
| 180 | CFG_PLLCTL0_CLKOD_MASK); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 181 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 182 | /* Reset PLL */ |
| 183 | setbits_le32(keystone_pll_regs[data->pll].reg1, CFG_PLLCTL1_RST_MASK); |
| 184 | /* Wait for 5 micro seconds */ |
| 185 | sdelay(21000); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 186 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 187 | /* Select the Output of PASS PLL as input to PASS */ |
Lokesh Vutla | da18b18 | 2015-10-08 11:31:47 +0530 | [diff] [blame] | 188 | if (data->pll == PASS_PLL && cpu_is_k2hk()) |
| 189 | pll_pa_clk_sel(); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 190 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 191 | clrbits_le32(keystone_pll_regs[data->pll].reg1, CFG_PLLCTL1_RST_MASK); |
| 192 | /* Wait for 500 * REFCLK cucles * (PLLD + 1) */ |
| 193 | sdelay(105000); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 194 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 195 | /* Switch to PLL mode */ |
| 196 | clrbits_le32(keystone_pll_regs[data->pll].reg0, |
| 197 | CFG_PLLCTL0_BYPASS_MASK); |
Lokesh Vutla | 824b4f3 | 2016-11-03 15:32:51 +0530 | [diff] [blame] | 198 | |
| 199 | /* Select the Output of ARM PLL as input to ARM */ |
| 200 | if (cpu_is_k2hk() && data->pll == TETRIS_PLL) |
| 201 | setbits_le32(KS2_MISC_CTRL, MISC_CTL1_ARM_PLL_EN); |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 202 | } |
| 203 | |
| 204 | void init_pll(const struct pll_init_data *data) |
| 205 | { |
| 206 | if (data->pll == MAIN_PLL) |
| 207 | configure_main_pll(data); |
| 208 | else |
| 209 | configure_secondary_pll(data); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 210 | |
| 211 | /* |
| 212 | * This is required to provide a delay between multiple |
| 213 | * consequent PPL configurations |
| 214 | */ |
| 215 | sdelay(210000); |
| 216 | } |
| 217 | |
Lokesh Vutla | 79a94a2 | 2015-07-28 14:16:46 +0530 | [diff] [blame] | 218 | void init_plls(void) |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 219 | { |
Lokesh Vutla | 79a94a2 | 2015-07-28 14:16:46 +0530 | [diff] [blame] | 220 | struct pll_init_data *data; |
| 221 | int pll; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 222 | |
Lokesh Vutla | 79a94a2 | 2015-07-28 14:16:46 +0530 | [diff] [blame] | 223 | for (pll = MAIN_PLL; pll < MAX_PLL_COUNT; pll++) { |
| 224 | data = get_pll_init_data(pll); |
| 225 | if (data) |
| 226 | init_pll(data); |
| 227 | } |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 228 | } |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 229 | |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 230 | static int get_max_speed(u32 val, u32 speed_supported, int *spds) |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 231 | { |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 232 | int speed; |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 233 | |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 234 | /* Left most setbit gives the speed */ |
| 235 | for (speed = DEVSPEED_NUMSPDS; speed >= 0; speed--) { |
| 236 | if ((val & BIT(speed)) & speed_supported) |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 237 | return spds[speed]; |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 238 | } |
| 239 | |
Lokesh Vutla | 9027e08 | 2016-03-04 10:36:41 -0600 | [diff] [blame] | 240 | /* If no bit is set, return minimum speed */ |
| 241 | if (cpu_is_k2g()) |
| 242 | return SPD200; |
| 243 | else |
| 244 | return SPD800; |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 245 | } |
| 246 | |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 247 | static inline u32 read_efuse_bootrom(void) |
| 248 | { |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 249 | if (cpu_is_k2hk() && (cpu_revision() <= 1)) |
| 250 | return __raw_readl(KS2_REV1_DEVSPEED); |
| 251 | else |
| 252 | return __raw_readl(KS2_EFUSE_BOOTROM); |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 253 | } |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 254 | |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 255 | int get_max_arm_speed(int *spds) |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 256 | { |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 257 | u32 armspeed = read_efuse_bootrom(); |
| 258 | |
| 259 | armspeed = (armspeed & DEVSPEED_ARMSPEED_MASK) >> |
| 260 | DEVSPEED_ARMSPEED_SHIFT; |
| 261 | |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 262 | return get_max_speed(armspeed, ARM_SUPPORTED_SPEEDS, spds); |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 263 | } |
Khoronzhuk, Ivan | 9f95c1b | 2014-10-17 21:01:16 +0300 | [diff] [blame] | 264 | |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 265 | int get_max_dev_speed(int *spds) |
Vitaly Andrianov | 6eb54a4 | 2015-06-15 08:54:15 -0400 | [diff] [blame] | 266 | { |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 267 | u32 devspeed = read_efuse_bootrom(); |
| 268 | |
| 269 | devspeed = (devspeed & DEVSPEED_DEVSPEED_MASK) >> |
| 270 | DEVSPEED_DEVSPEED_SHIFT; |
| 271 | |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 272 | return get_max_speed(devspeed, DEV_SUPPORTED_SPEEDS, spds); |
Vitaly Andrianov | 6eb54a4 | 2015-06-15 08:54:15 -0400 | [diff] [blame] | 273 | } |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 274 | |
| 275 | /** |
| 276 | * pll_freq_get - get pll frequency |
| 277 | * @pll: pll identifier |
| 278 | */ |
| 279 | static unsigned long pll_freq_get(int pll) |
| 280 | { |
| 281 | unsigned long mult = 1, prediv = 1, output_div = 2; |
| 282 | unsigned long ret; |
| 283 | u32 tmp, reg; |
| 284 | |
| 285 | if (pll == MAIN_PLL) { |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 286 | ret = get_external_clk(sys_clk); |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 287 | if (pllctl_reg_read(pll, ctl) & PLLCTL_PLLEN_MASK) { |
| 288 | /* PLL mode */ |
| 289 | tmp = __raw_readl(KS2_MAINPLLCTL0); |
| 290 | prediv = (tmp & CFG_PLLCTL0_PLLD_MASK) + 1; |
| 291 | mult = ((tmp & CFG_PLLCTL0_PLLM_HI_MASK) >> |
| 292 | CFG_PLLCTL0_PLLM_SHIFT | |
| 293 | (pllctl_reg_read(pll, mult) & |
| 294 | PLLM_MULT_LO_MASK)) + 1; |
| 295 | output_div = ((pllctl_reg_read(pll, secctl) & |
| 296 | SECCTL_OP_DIV_MASK) >> |
| 297 | SECCTL_OP_DIV_SHIFT) + 1; |
| 298 | |
| 299 | ret = ret / prediv / output_div * mult; |
| 300 | } |
| 301 | } else { |
| 302 | switch (pll) { |
| 303 | case PASS_PLL: |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 304 | ret = get_external_clk(pa_clk); |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 305 | reg = KS2_PASSPLLCTL0; |
| 306 | break; |
| 307 | case TETRIS_PLL: |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 308 | ret = get_external_clk(tetris_clk); |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 309 | reg = KS2_ARMPLLCTL0; |
| 310 | break; |
| 311 | case DDR3A_PLL: |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 312 | ret = get_external_clk(ddr3a_clk); |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 313 | reg = KS2_DDR3APLLCTL0; |
| 314 | break; |
| 315 | case DDR3B_PLL: |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 316 | ret = get_external_clk(ddr3b_clk); |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 317 | reg = KS2_DDR3BPLLCTL0; |
| 318 | break; |
Vitaly Andrianov | 2964684 | 2015-09-19 16:26:40 +0530 | [diff] [blame] | 319 | case UART_PLL: |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 320 | ret = get_external_clk(uart_clk); |
Vitaly Andrianov | 2964684 | 2015-09-19 16:26:40 +0530 | [diff] [blame] | 321 | reg = KS2_UARTPLLCTL0; |
| 322 | break; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 323 | default: |
| 324 | return 0; |
| 325 | } |
| 326 | |
| 327 | tmp = __raw_readl(reg); |
| 328 | |
| 329 | if (!(tmp & CFG_PLLCTL0_BYPASS_MASK)) { |
| 330 | /* Bypass disabled */ |
| 331 | prediv = (tmp & CFG_PLLCTL0_PLLD_MASK) + 1; |
| 332 | mult = ((tmp & CFG_PLLCTL0_PLLM_MASK) >> |
| 333 | CFG_PLLCTL0_PLLM_SHIFT) + 1; |
| 334 | output_div = ((tmp & CFG_PLLCTL0_CLKOD_MASK) >> |
| 335 | CFG_PLLCTL0_CLKOD_SHIFT) + 1; |
| 336 | ret = ((ret / prediv) * mult) / output_div; |
| 337 | } |
| 338 | } |
| 339 | |
| 340 | return ret; |
| 341 | } |
| 342 | |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 343 | unsigned long ks_clk_get_rate(unsigned int clk) |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 344 | { |
| 345 | unsigned long freq = 0; |
| 346 | |
| 347 | switch (clk) { |
| 348 | case core_pll_clk: |
| 349 | freq = pll_freq_get(CORE_PLL); |
| 350 | break; |
| 351 | case pass_pll_clk: |
| 352 | freq = pll_freq_get(PASS_PLL); |
| 353 | break; |
| 354 | case tetris_pll_clk: |
| 355 | if (!cpu_is_k2e()) |
| 356 | freq = pll_freq_get(TETRIS_PLL); |
| 357 | break; |
| 358 | case ddr3a_pll_clk: |
| 359 | freq = pll_freq_get(DDR3A_PLL); |
| 360 | break; |
| 361 | case ddr3b_pll_clk: |
| 362 | if (cpu_is_k2hk()) |
| 363 | freq = pll_freq_get(DDR3B_PLL); |
| 364 | break; |
Vitaly Andrianov | 7fd5b64 | 2015-09-19 16:26:41 +0530 | [diff] [blame] | 365 | case uart_pll_clk: |
| 366 | if (cpu_is_k2g()) |
| 367 | freq = pll_freq_get(UART_PLL); |
| 368 | break; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 369 | case sys_clk0_1_clk: |
| 370 | case sys_clk0_clk: |
| 371 | freq = pll_freq_get(CORE_PLL) / pll0div_read(1); |
| 372 | break; |
| 373 | case sys_clk1_clk: |
| 374 | return pll_freq_get(CORE_PLL) / pll0div_read(2); |
| 375 | break; |
| 376 | case sys_clk2_clk: |
| 377 | freq = pll_freq_get(CORE_PLL) / pll0div_read(3); |
| 378 | break; |
| 379 | case sys_clk3_clk: |
| 380 | freq = pll_freq_get(CORE_PLL) / pll0div_read(4); |
| 381 | break; |
| 382 | case sys_clk0_2_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 383 | freq = ks_clk_get_rate(sys_clk0_clk) / 2; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 384 | break; |
| 385 | case sys_clk0_3_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 386 | freq = ks_clk_get_rate(sys_clk0_clk) / 3; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 387 | break; |
| 388 | case sys_clk0_4_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 389 | freq = ks_clk_get_rate(sys_clk0_clk) / 4; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 390 | break; |
| 391 | case sys_clk0_6_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 392 | freq = ks_clk_get_rate(sys_clk0_clk) / 6; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 393 | break; |
| 394 | case sys_clk0_8_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 395 | freq = ks_clk_get_rate(sys_clk0_clk) / 8; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 396 | break; |
| 397 | case sys_clk0_12_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 398 | freq = ks_clk_get_rate(sys_clk0_clk) / 12; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 399 | break; |
| 400 | case sys_clk0_24_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 401 | freq = ks_clk_get_rate(sys_clk0_clk) / 24; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 402 | break; |
| 403 | case sys_clk1_3_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 404 | freq = ks_clk_get_rate(sys_clk1_clk) / 3; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 405 | break; |
| 406 | case sys_clk1_4_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 407 | freq = ks_clk_get_rate(sys_clk1_clk) / 4; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 408 | break; |
| 409 | case sys_clk1_6_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 410 | freq = ks_clk_get_rate(sys_clk1_clk) / 6; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 411 | break; |
| 412 | case sys_clk1_12_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 413 | freq = ks_clk_get_rate(sys_clk1_clk) / 12; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 414 | break; |
| 415 | default: |
| 416 | break; |
| 417 | } |
| 418 | |
| 419 | return freq; |
| 420 | } |