Tom Rini | 10e4779 | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0+ |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 2 | /* |
| 3 | * Keystone2: pll initialization |
| 4 | * |
| 5 | * (C) Copyright 2012-2014 |
| 6 | * Texas Instruments Incorporated, <www.ti.com> |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 7 | */ |
| 8 | |
| 9 | #include <common.h> |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 10 | #include <asm/arch/clock.h> |
| 11 | #include <asm/arch/clock_defs.h> |
Simon Glass | 4dcacfc | 2020-05-10 11:40:13 -0600 | [diff] [blame] | 12 | #include <linux/bitops.h> |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 13 | |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 14 | /* DEV and ARM speed definitions as specified in DEVSPEED register */ |
| 15 | int __weak speeds[DEVSPEED_NUMSPDS] = { |
| 16 | SPD1000, |
| 17 | SPD1200, |
| 18 | SPD1350, |
| 19 | SPD1400, |
| 20 | SPD1500, |
| 21 | SPD1400, |
| 22 | SPD1350, |
| 23 | SPD1200, |
| 24 | SPD1000, |
| 25 | SPD800, |
| 26 | }; |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 27 | |
Lokesh Vutla | 0d73cc2 | 2015-07-28 14:16:45 +0530 | [diff] [blame] | 28 | const struct keystone_pll_regs keystone_pll_regs[] = { |
| 29 | [CORE_PLL] = {KS2_MAINPLLCTL0, KS2_MAINPLLCTL1}, |
| 30 | [PASS_PLL] = {KS2_PASSPLLCTL0, KS2_PASSPLLCTL1}, |
| 31 | [TETRIS_PLL] = {KS2_ARMPLLCTL0, KS2_ARMPLLCTL1}, |
| 32 | [DDR3A_PLL] = {KS2_DDR3APLLCTL0, KS2_DDR3APLLCTL1}, |
| 33 | [DDR3B_PLL] = {KS2_DDR3BPLLCTL0, KS2_DDR3BPLLCTL1}, |
Vitaly Andrianov | 2964684 | 2015-09-19 16:26:40 +0530 | [diff] [blame] | 34 | [UART_PLL] = {KS2_UARTPLLCTL0, KS2_UARTPLLCTL1}, |
Lokesh Vutla | 0d73cc2 | 2015-07-28 14:16:45 +0530 | [diff] [blame] | 35 | }; |
| 36 | |
Lokesh Vutla | da18b18 | 2015-10-08 11:31:47 +0530 | [diff] [blame] | 37 | inline void pll_pa_clk_sel(void) |
| 38 | { |
| 39 | setbits_le32(keystone_pll_regs[PASS_PLL].reg1, CFG_PLLCTL1_PAPLL_MASK); |
| 40 | } |
| 41 | |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 42 | static void wait_for_completion(const struct pll_init_data *data) |
| 43 | { |
| 44 | int i; |
| 45 | for (i = 0; i < 100; i++) { |
| 46 | sdelay(450); |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 47 | if (!(pllctl_reg_read(data->pll, stat) & PLLSTAT_GOSTAT_MASK)) |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 48 | break; |
| 49 | } |
| 50 | } |
| 51 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 52 | static inline void bypass_main_pll(const struct pll_init_data *data) |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 53 | { |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 54 | pllctl_reg_clrbits(data->pll, ctl, PLLCTL_PLLENSRC_MASK | |
| 55 | PLLCTL_PLLEN_MASK); |
| 56 | |
| 57 | /* 4 cycles of reference clock CLKIN*/ |
| 58 | sdelay(340); |
| 59 | } |
| 60 | |
| 61 | static void configure_mult_div(const struct pll_init_data *data) |
| 62 | { |
| 63 | u32 pllm, plld, bwadj; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 64 | |
| 65 | pllm = data->pll_m - 1; |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 66 | plld = (data->pll_d - 1) & CFG_PLLCTL0_PLLD_MASK; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 67 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 68 | /* Program Multiplier */ |
| 69 | if (data->pll == MAIN_PLL) |
| 70 | pllctl_reg_write(data->pll, mult, pllm & PLLM_MULT_LO_MASK); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 71 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 72 | clrsetbits_le32(keystone_pll_regs[data->pll].reg0, |
| 73 | CFG_PLLCTL0_PLLM_MASK, |
| 74 | pllm << CFG_PLLCTL0_PLLM_SHIFT); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 75 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 76 | /* Program BWADJ */ |
| 77 | bwadj = (data->pll_m - 1) >> 1; /* Divide pllm by 2 */ |
| 78 | clrsetbits_le32(keystone_pll_regs[data->pll].reg0, |
| 79 | CFG_PLLCTL0_BWADJ_MASK, |
| 80 | (bwadj << CFG_PLLCTL0_BWADJ_SHIFT) & |
| 81 | CFG_PLLCTL0_BWADJ_MASK); |
| 82 | bwadj = bwadj >> CFG_PLLCTL0_BWADJ_BITS; |
| 83 | clrsetbits_le32(keystone_pll_regs[data->pll].reg1, |
| 84 | CFG_PLLCTL1_BWADJ_MASK, bwadj); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 85 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 86 | /* Program Divider */ |
| 87 | clrsetbits_le32(keystone_pll_regs[data->pll].reg0, |
| 88 | CFG_PLLCTL0_PLLD_MASK, plld); |
| 89 | } |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 90 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 91 | void configure_main_pll(const struct pll_init_data *data) |
| 92 | { |
| 93 | u32 tmp, pllod, i, alnctl_val = 0; |
| 94 | u32 *offset; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 95 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 96 | pllod = data->pll_od - 1; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 97 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 98 | /* 100 micro sec for stabilization */ |
| 99 | sdelay(210000); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 100 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 101 | tmp = pllctl_reg_read(data->pll, secctl); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 102 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 103 | /* Check for Bypass */ |
| 104 | if (tmp & SECCTL_BYPASS_MASK) { |
| 105 | setbits_le32(keystone_pll_regs[data->pll].reg1, |
| 106 | CFG_PLLCTL1_ENSAT_MASK); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 107 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 108 | bypass_main_pll(data); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 109 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 110 | /* Powerdown and powerup Main Pll */ |
| 111 | pllctl_reg_setbits(data->pll, secctl, SECCTL_BYPASS_MASK); |
| 112 | pllctl_reg_setbits(data->pll, ctl, PLLCTL_PLLPWRDN_MASK); |
| 113 | /* 5 micro sec */ |
| 114 | sdelay(21000); |
| 115 | |
| 116 | pllctl_reg_clrbits(data->pll, ctl, PLLCTL_PLLPWRDN_MASK); |
| 117 | } else { |
| 118 | bypass_main_pll(data); |
| 119 | } |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 120 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 121 | configure_mult_div(data); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 122 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 123 | /* Program Output Divider */ |
| 124 | pllctl_reg_rmw(data->pll, secctl, SECCTL_OP_DIV_MASK, |
| 125 | ((pllod << SECCTL_OP_DIV_SHIFT) & SECCTL_OP_DIV_MASK)); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 126 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 127 | /* Program PLLDIVn */ |
| 128 | wait_for_completion(data); |
| 129 | for (i = 0; i < PLLDIV_MAX; i++) { |
| 130 | if (i < 3) |
| 131 | offset = pllctl_reg(data->pll, div1) + i; |
| 132 | else |
| 133 | offset = pllctl_reg(data->pll, div4) + (i - 3); |
| 134 | |
| 135 | if (divn_val[i] != -1) { |
| 136 | __raw_writel(divn_val[i] | PLLDIV_ENABLE_MASK, offset); |
| 137 | alnctl_val |= BIT(i); |
| 138 | } |
| 139 | } |
| 140 | |
| 141 | if (alnctl_val) { |
| 142 | pllctl_reg_setbits(data->pll, alnctl, alnctl_val); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 143 | /* |
| 144 | * Set GOSET bit in PLLCMD to initiate the GO operation |
| 145 | * to change the divide |
| 146 | */ |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 147 | pllctl_reg_setbits(data->pll, cmd, PLLSTAT_GOSTAT_MASK); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 148 | wait_for_completion(data); |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 149 | } |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 150 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 151 | /* Reset PLL */ |
| 152 | pllctl_reg_setbits(data->pll, ctl, PLLCTL_PLLRST_MASK); |
| 153 | sdelay(21000); /* Wait for a minimum of 7 us*/ |
| 154 | pllctl_reg_clrbits(data->pll, ctl, PLLCTL_PLLRST_MASK); |
| 155 | sdelay(105000); /* Wait for PLL Lock time (min 50 us) */ |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 156 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 157 | /* Enable PLL */ |
| 158 | pllctl_reg_clrbits(data->pll, secctl, SECCTL_BYPASS_MASK); |
| 159 | pllctl_reg_setbits(data->pll, ctl, PLLCTL_PLLEN_MASK); |
| 160 | } |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 161 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 162 | void configure_secondary_pll(const struct pll_init_data *data) |
| 163 | { |
| 164 | int pllod = data->pll_od - 1; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 165 | |
Lokesh Vutla | 824b4f3 | 2016-11-03 15:32:51 +0530 | [diff] [blame] | 166 | /* Enable Glitch free bypass for ARM PLL */ |
| 167 | if (cpu_is_k2hk() && data->pll == TETRIS_PLL) |
| 168 | clrbits_le32(KS2_MISC_CTRL, MISC_CTL1_ARM_PLL_EN); |
| 169 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 170 | /* Enable Bypass mode */ |
| 171 | setbits_le32(keystone_pll_regs[data->pll].reg1, CFG_PLLCTL1_ENSAT_MASK); |
| 172 | setbits_le32(keystone_pll_regs[data->pll].reg0, |
| 173 | CFG_PLLCTL0_BYPASS_MASK); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 174 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 175 | configure_mult_div(data); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 176 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 177 | /* Program Output Divider */ |
| 178 | clrsetbits_le32(keystone_pll_regs[data->pll].reg0, |
| 179 | CFG_PLLCTL0_CLKOD_MASK, |
| 180 | (pllod << CFG_PLLCTL0_CLKOD_SHIFT) & |
| 181 | CFG_PLLCTL0_CLKOD_MASK); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 182 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 183 | /* Reset PLL */ |
| 184 | setbits_le32(keystone_pll_regs[data->pll].reg1, CFG_PLLCTL1_RST_MASK); |
| 185 | /* Wait for 5 micro seconds */ |
| 186 | sdelay(21000); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 187 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 188 | /* Select the Output of PASS PLL as input to PASS */ |
Lokesh Vutla | da18b18 | 2015-10-08 11:31:47 +0530 | [diff] [blame] | 189 | if (data->pll == PASS_PLL && cpu_is_k2hk()) |
| 190 | pll_pa_clk_sel(); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 191 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 192 | clrbits_le32(keystone_pll_regs[data->pll].reg1, CFG_PLLCTL1_RST_MASK); |
| 193 | /* Wait for 500 * REFCLK cucles * (PLLD + 1) */ |
| 194 | sdelay(105000); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 195 | |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 196 | /* Switch to PLL mode */ |
| 197 | clrbits_le32(keystone_pll_regs[data->pll].reg0, |
| 198 | CFG_PLLCTL0_BYPASS_MASK); |
Lokesh Vutla | 824b4f3 | 2016-11-03 15:32:51 +0530 | [diff] [blame] | 199 | |
| 200 | /* Select the Output of ARM PLL as input to ARM */ |
| 201 | if (cpu_is_k2hk() && data->pll == TETRIS_PLL) |
| 202 | setbits_le32(KS2_MISC_CTRL, MISC_CTL1_ARM_PLL_EN); |
Lokesh Vutla | 70438fc | 2015-07-28 14:16:43 +0530 | [diff] [blame] | 203 | } |
| 204 | |
| 205 | void init_pll(const struct pll_init_data *data) |
| 206 | { |
| 207 | if (data->pll == MAIN_PLL) |
| 208 | configure_main_pll(data); |
| 209 | else |
| 210 | configure_secondary_pll(data); |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 211 | |
| 212 | /* |
| 213 | * This is required to provide a delay between multiple |
| 214 | * consequent PPL configurations |
| 215 | */ |
| 216 | sdelay(210000); |
| 217 | } |
| 218 | |
Lokesh Vutla | 79a94a2 | 2015-07-28 14:16:46 +0530 | [diff] [blame] | 219 | void init_plls(void) |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 220 | { |
Lokesh Vutla | 79a94a2 | 2015-07-28 14:16:46 +0530 | [diff] [blame] | 221 | struct pll_init_data *data; |
| 222 | int pll; |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 223 | |
Lokesh Vutla | 79a94a2 | 2015-07-28 14:16:46 +0530 | [diff] [blame] | 224 | for (pll = MAIN_PLL; pll < MAX_PLL_COUNT; pll++) { |
| 225 | data = get_pll_init_data(pll); |
| 226 | if (data) |
| 227 | init_pll(data); |
| 228 | } |
Vitaly Andrianov | 7bcf4d6 | 2014-04-04 13:16:53 -0400 | [diff] [blame] | 229 | } |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 230 | |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 231 | static int get_max_speed(u32 val, u32 speed_supported, int *spds) |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 232 | { |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 233 | int speed; |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 234 | |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 235 | /* Left most setbit gives the speed */ |
| 236 | for (speed = DEVSPEED_NUMSPDS; speed >= 0; speed--) { |
| 237 | if ((val & BIT(speed)) & speed_supported) |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 238 | return spds[speed]; |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 239 | } |
| 240 | |
Lokesh Vutla | 9027e08 | 2016-03-04 10:36:41 -0600 | [diff] [blame] | 241 | /* If no bit is set, return minimum speed */ |
| 242 | if (cpu_is_k2g()) |
| 243 | return SPD200; |
| 244 | else |
| 245 | return SPD800; |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 246 | } |
| 247 | |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 248 | static inline u32 read_efuse_bootrom(void) |
| 249 | { |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 250 | if (cpu_is_k2hk() && (cpu_revision() <= 1)) |
| 251 | return __raw_readl(KS2_REV1_DEVSPEED); |
| 252 | else |
| 253 | return __raw_readl(KS2_EFUSE_BOOTROM); |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 254 | } |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 255 | |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 256 | int get_max_arm_speed(int *spds) |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 257 | { |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 258 | u32 armspeed = read_efuse_bootrom(); |
| 259 | |
| 260 | armspeed = (armspeed & DEVSPEED_ARMSPEED_MASK) >> |
| 261 | DEVSPEED_ARMSPEED_SHIFT; |
| 262 | |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 263 | return get_max_speed(armspeed, ARM_SUPPORTED_SPEEDS, spds); |
Vitaly Andrianov | 047e780 | 2014-07-25 22:23:19 +0300 | [diff] [blame] | 264 | } |
Khoronzhuk, Ivan | 9f95c1b | 2014-10-17 21:01:16 +0300 | [diff] [blame] | 265 | |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 266 | int get_max_dev_speed(int *spds) |
Vitaly Andrianov | 6eb54a4 | 2015-06-15 08:54:15 -0400 | [diff] [blame] | 267 | { |
Lokesh Vutla | 9da9afa | 2015-07-28 14:16:44 +0530 | [diff] [blame] | 268 | u32 devspeed = read_efuse_bootrom(); |
| 269 | |
| 270 | devspeed = (devspeed & DEVSPEED_DEVSPEED_MASK) >> |
| 271 | DEVSPEED_DEVSPEED_SHIFT; |
| 272 | |
Lokesh Vutla | b35410e | 2016-03-04 10:36:40 -0600 | [diff] [blame] | 273 | return get_max_speed(devspeed, DEV_SUPPORTED_SPEEDS, spds); |
Vitaly Andrianov | 6eb54a4 | 2015-06-15 08:54:15 -0400 | [diff] [blame] | 274 | } |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 275 | |
| 276 | /** |
| 277 | * pll_freq_get - get pll frequency |
| 278 | * @pll: pll identifier |
| 279 | */ |
| 280 | static unsigned long pll_freq_get(int pll) |
| 281 | { |
| 282 | unsigned long mult = 1, prediv = 1, output_div = 2; |
| 283 | unsigned long ret; |
| 284 | u32 tmp, reg; |
| 285 | |
| 286 | if (pll == MAIN_PLL) { |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 287 | ret = get_external_clk(sys_clk); |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 288 | if (pllctl_reg_read(pll, ctl) & PLLCTL_PLLEN_MASK) { |
| 289 | /* PLL mode */ |
| 290 | tmp = __raw_readl(KS2_MAINPLLCTL0); |
| 291 | prediv = (tmp & CFG_PLLCTL0_PLLD_MASK) + 1; |
| 292 | mult = ((tmp & CFG_PLLCTL0_PLLM_HI_MASK) >> |
| 293 | CFG_PLLCTL0_PLLM_SHIFT | |
| 294 | (pllctl_reg_read(pll, mult) & |
| 295 | PLLM_MULT_LO_MASK)) + 1; |
| 296 | output_div = ((pllctl_reg_read(pll, secctl) & |
| 297 | SECCTL_OP_DIV_MASK) >> |
| 298 | SECCTL_OP_DIV_SHIFT) + 1; |
| 299 | |
| 300 | ret = ret / prediv / output_div * mult; |
| 301 | } |
| 302 | } else { |
| 303 | switch (pll) { |
| 304 | case PASS_PLL: |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 305 | ret = get_external_clk(pa_clk); |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 306 | reg = KS2_PASSPLLCTL0; |
| 307 | break; |
| 308 | case TETRIS_PLL: |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 309 | ret = get_external_clk(tetris_clk); |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 310 | reg = KS2_ARMPLLCTL0; |
| 311 | break; |
| 312 | case DDR3A_PLL: |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 313 | ret = get_external_clk(ddr3a_clk); |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 314 | reg = KS2_DDR3APLLCTL0; |
| 315 | break; |
| 316 | case DDR3B_PLL: |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 317 | ret = get_external_clk(ddr3b_clk); |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 318 | reg = KS2_DDR3BPLLCTL0; |
| 319 | break; |
Vitaly Andrianov | 2964684 | 2015-09-19 16:26:40 +0530 | [diff] [blame] | 320 | case UART_PLL: |
Lokesh Vutla | a9a0e12 | 2017-05-03 16:58:26 +0530 | [diff] [blame] | 321 | ret = get_external_clk(uart_clk); |
Vitaly Andrianov | 2964684 | 2015-09-19 16:26:40 +0530 | [diff] [blame] | 322 | reg = KS2_UARTPLLCTL0; |
| 323 | break; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 324 | default: |
| 325 | return 0; |
| 326 | } |
| 327 | |
| 328 | tmp = __raw_readl(reg); |
| 329 | |
| 330 | if (!(tmp & CFG_PLLCTL0_BYPASS_MASK)) { |
| 331 | /* Bypass disabled */ |
| 332 | prediv = (tmp & CFG_PLLCTL0_PLLD_MASK) + 1; |
| 333 | mult = ((tmp & CFG_PLLCTL0_PLLM_MASK) >> |
| 334 | CFG_PLLCTL0_PLLM_SHIFT) + 1; |
| 335 | output_div = ((tmp & CFG_PLLCTL0_CLKOD_MASK) >> |
| 336 | CFG_PLLCTL0_CLKOD_SHIFT) + 1; |
| 337 | ret = ((ret / prediv) * mult) / output_div; |
| 338 | } |
| 339 | } |
| 340 | |
| 341 | return ret; |
| 342 | } |
| 343 | |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 344 | unsigned long ks_clk_get_rate(unsigned int clk) |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 345 | { |
| 346 | unsigned long freq = 0; |
| 347 | |
| 348 | switch (clk) { |
| 349 | case core_pll_clk: |
| 350 | freq = pll_freq_get(CORE_PLL); |
| 351 | break; |
| 352 | case pass_pll_clk: |
| 353 | freq = pll_freq_get(PASS_PLL); |
| 354 | break; |
| 355 | case tetris_pll_clk: |
| 356 | if (!cpu_is_k2e()) |
| 357 | freq = pll_freq_get(TETRIS_PLL); |
| 358 | break; |
| 359 | case ddr3a_pll_clk: |
| 360 | freq = pll_freq_get(DDR3A_PLL); |
| 361 | break; |
| 362 | case ddr3b_pll_clk: |
| 363 | if (cpu_is_k2hk()) |
| 364 | freq = pll_freq_get(DDR3B_PLL); |
| 365 | break; |
Vitaly Andrianov | 7fd5b64 | 2015-09-19 16:26:41 +0530 | [diff] [blame] | 366 | case uart_pll_clk: |
| 367 | if (cpu_is_k2g()) |
| 368 | freq = pll_freq_get(UART_PLL); |
| 369 | break; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 370 | case sys_clk0_1_clk: |
| 371 | case sys_clk0_clk: |
| 372 | freq = pll_freq_get(CORE_PLL) / pll0div_read(1); |
| 373 | break; |
| 374 | case sys_clk1_clk: |
| 375 | return pll_freq_get(CORE_PLL) / pll0div_read(2); |
| 376 | break; |
| 377 | case sys_clk2_clk: |
| 378 | freq = pll_freq_get(CORE_PLL) / pll0div_read(3); |
| 379 | break; |
| 380 | case sys_clk3_clk: |
| 381 | freq = pll_freq_get(CORE_PLL) / pll0div_read(4); |
| 382 | break; |
| 383 | case sys_clk0_2_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 384 | freq = ks_clk_get_rate(sys_clk0_clk) / 2; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 385 | break; |
| 386 | case sys_clk0_3_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 387 | freq = ks_clk_get_rate(sys_clk0_clk) / 3; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 388 | break; |
| 389 | case sys_clk0_4_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 390 | freq = ks_clk_get_rate(sys_clk0_clk) / 4; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 391 | break; |
| 392 | case sys_clk0_6_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 393 | freq = ks_clk_get_rate(sys_clk0_clk) / 6; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 394 | break; |
| 395 | case sys_clk0_8_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 396 | freq = ks_clk_get_rate(sys_clk0_clk) / 8; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 397 | break; |
| 398 | case sys_clk0_12_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 399 | freq = ks_clk_get_rate(sys_clk0_clk) / 12; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 400 | break; |
| 401 | case sys_clk0_24_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 402 | freq = ks_clk_get_rate(sys_clk0_clk) / 24; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 403 | break; |
| 404 | case sys_clk1_3_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 405 | freq = ks_clk_get_rate(sys_clk1_clk) / 3; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 406 | break; |
| 407 | case sys_clk1_4_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 408 | freq = ks_clk_get_rate(sys_clk1_clk) / 4; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 409 | break; |
| 410 | case sys_clk1_6_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 411 | freq = ks_clk_get_rate(sys_clk1_clk) / 6; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 412 | break; |
| 413 | case sys_clk1_12_clk: |
Masahiro Yamada | f576ecf | 2016-09-26 20:45:26 +0900 | [diff] [blame] | 414 | freq = ks_clk_get_rate(sys_clk1_clk) / 12; |
Lokesh Vutla | 41f7ea8 | 2015-07-28 14:16:48 +0530 | [diff] [blame] | 415 | break; |
| 416 | default: |
| 417 | break; |
| 418 | } |
| 419 | |
| 420 | return freq; |
| 421 | } |