blob: 5999926614840bbad766773f1b3cc8576a5439ef [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +05302/*
3 * ZynqMP clock driver
4 *
5 * Copyright (C) 2016 Xilinx, Inc.
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +05306 */
7
Simon Glass0f2af882020-05-10 11:40:05 -06008#include <log.h>
Simon Glass9bc15642020-02-03 07:36:16 -07009#include <malloc.h>
10#include <dm/device_compat.h>
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +053011#include <linux/bitops.h>
12#include <clk-uclass.h>
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +053013#include <clk.h>
Michal Simeka7acb532023-06-23 14:51:57 +020014#include <zynqmp_firmware.h>
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +053015#include <asm/arch/sys_proto.h>
Simon Glass11c89f32017-05-17 17:18:03 -060016#include <dm.h>
Simon Glassd66c5f72020-02-03 07:36:15 -070017#include <linux/err.h>
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +053018
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +053019static const resource_size_t zynqmp_crf_apb_clkc_base = 0xfd1a0020;
20static const resource_size_t zynqmp_crl_apb_clkc_base = 0xff5e0020;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +053021
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +053022/* Full power domain clocks */
23#define CRF_APB_APLL_CTRL (zynqmp_crf_apb_clkc_base + 0x00)
24#define CRF_APB_DPLL_CTRL (zynqmp_crf_apb_clkc_base + 0x0c)
25#define CRF_APB_VPLL_CTRL (zynqmp_crf_apb_clkc_base + 0x18)
26#define CRF_APB_PLL_STATUS (zynqmp_crf_apb_clkc_base + 0x24)
27#define CRF_APB_APLL_TO_LPD_CTRL (zynqmp_crf_apb_clkc_base + 0x28)
28#define CRF_APB_DPLL_TO_LPD_CTRL (zynqmp_crf_apb_clkc_base + 0x2c)
29#define CRF_APB_VPLL_TO_LPD_CTRL (zynqmp_crf_apb_clkc_base + 0x30)
30/* Peripheral clocks */
31#define CRF_APB_ACPU_CTRL (zynqmp_crf_apb_clkc_base + 0x40)
32#define CRF_APB_DBG_TRACE_CTRL (zynqmp_crf_apb_clkc_base + 0x44)
33#define CRF_APB_DBG_FPD_CTRL (zynqmp_crf_apb_clkc_base + 0x48)
34#define CRF_APB_DP_VIDEO_REF_CTRL (zynqmp_crf_apb_clkc_base + 0x50)
35#define CRF_APB_DP_AUDIO_REF_CTRL (zynqmp_crf_apb_clkc_base + 0x54)
36#define CRF_APB_DP_STC_REF_CTRL (zynqmp_crf_apb_clkc_base + 0x5c)
37#define CRF_APB_DDR_CTRL (zynqmp_crf_apb_clkc_base + 0x60)
38#define CRF_APB_GPU_REF_CTRL (zynqmp_crf_apb_clkc_base + 0x64)
39#define CRF_APB_SATA_REF_CTRL (zynqmp_crf_apb_clkc_base + 0x80)
40#define CRF_APB_PCIE_REF_CTRL (zynqmp_crf_apb_clkc_base + 0x94)
41#define CRF_APB_GDMA_REF_CTRL (zynqmp_crf_apb_clkc_base + 0x98)
42#define CRF_APB_DPDMA_REF_CTRL (zynqmp_crf_apb_clkc_base + 0x9c)
43#define CRF_APB_TOPSW_MAIN_CTRL (zynqmp_crf_apb_clkc_base + 0xa0)
44#define CRF_APB_TOPSW_LSBUS_CTRL (zynqmp_crf_apb_clkc_base + 0xa4)
45#define CRF_APB_GTGREF0_REF_CTRL (zynqmp_crf_apb_clkc_base + 0xa8)
46#define CRF_APB_DBG_TSTMP_CTRL (zynqmp_crf_apb_clkc_base + 0xd8)
47
48/* Low power domain clocks */
49#define CRL_APB_IOPLL_CTRL (zynqmp_crl_apb_clkc_base + 0x00)
50#define CRL_APB_RPLL_CTRL (zynqmp_crl_apb_clkc_base + 0x10)
51#define CRL_APB_PLL_STATUS (zynqmp_crl_apb_clkc_base + 0x20)
52#define CRL_APB_IOPLL_TO_FPD_CTRL (zynqmp_crl_apb_clkc_base + 0x24)
53#define CRL_APB_RPLL_TO_FPD_CTRL (zynqmp_crl_apb_clkc_base + 0x28)
54/* Peripheral clocks */
55#define CRL_APB_USB3_DUAL_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x2c)
56#define CRL_APB_GEM0_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x30)
57#define CRL_APB_GEM1_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x34)
58#define CRL_APB_GEM2_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x38)
59#define CRL_APB_GEM3_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x3c)
60#define CRL_APB_USB0_BUS_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x40)
61#define CRL_APB_USB1_BUS_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x44)
62#define CRL_APB_QSPI_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x48)
63#define CRL_APB_SDIO0_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x4c)
64#define CRL_APB_SDIO1_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x50)
65#define CRL_APB_UART0_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x54)
66#define CRL_APB_UART1_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x58)
67#define CRL_APB_SPI0_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x5c)
68#define CRL_APB_SPI1_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x60)
69#define CRL_APB_CAN0_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x64)
70#define CRL_APB_CAN1_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x68)
71#define CRL_APB_CPU_R5_CTRL (zynqmp_crl_apb_clkc_base + 0x70)
72#define CRL_APB_IOU_SWITCH_CTRL (zynqmp_crl_apb_clkc_base + 0x7c)
73#define CRL_APB_CSU_PLL_CTRL (zynqmp_crl_apb_clkc_base + 0x80)
74#define CRL_APB_PCAP_CTRL (zynqmp_crl_apb_clkc_base + 0x84)
75#define CRL_APB_LPD_SWITCH_CTRL (zynqmp_crl_apb_clkc_base + 0x88)
76#define CRL_APB_LPD_LSBUS_CTRL (zynqmp_crl_apb_clkc_base + 0x8c)
77#define CRL_APB_DBG_LPD_CTRL (zynqmp_crl_apb_clkc_base + 0x90)
78#define CRL_APB_NAND_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x94)
79#define CRL_APB_ADMA_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x98)
80#define CRL_APB_PL0_REF_CTRL (zynqmp_crl_apb_clkc_base + 0xa0)
81#define CRL_APB_PL1_REF_CTRL (zynqmp_crl_apb_clkc_base + 0xa4)
82#define CRL_APB_PL2_REF_CTRL (zynqmp_crl_apb_clkc_base + 0xa8)
83#define CRL_APB_PL3_REF_CTRL (zynqmp_crl_apb_clkc_base + 0xac)
84#define CRL_APB_PL0_THR_CNT (zynqmp_crl_apb_clkc_base + 0xb4)
85#define CRL_APB_PL1_THR_CNT (zynqmp_crl_apb_clkc_base + 0xbc)
86#define CRL_APB_PL2_THR_CNT (zynqmp_crl_apb_clkc_base + 0xc4)
87#define CRL_APB_PL3_THR_CNT (zynqmp_crl_apb_clkc_base + 0xdc)
88#define CRL_APB_GEM_TSU_REF_CTRL (zynqmp_crl_apb_clkc_base + 0xe0)
89#define CRL_APB_DLL_REF_CTRL (zynqmp_crl_apb_clkc_base + 0xe4)
90#define CRL_APB_AMS_REF_CTRL (zynqmp_crl_apb_clkc_base + 0xe8)
91#define CRL_APB_I2C0_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x100)
92#define CRL_APB_I2C1_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x104)
93#define CRL_APB_TIMESTAMP_REF_CTRL (zynqmp_crl_apb_clkc_base + 0x108)
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +053094
95#define ZYNQ_CLK_MAXDIV 0x3f
96#define CLK_CTRL_DIV1_SHIFT 16
97#define CLK_CTRL_DIV1_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV1_SHIFT)
98#define CLK_CTRL_DIV0_SHIFT 8
99#define CLK_CTRL_DIV0_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV0_SHIFT)
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700100#define CLK_CTRL_SRCSEL_MASK 0x7
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530101#define PLLCTRL_FBDIV_MASK 0x7f00
102#define PLLCTRL_FBDIV_SHIFT 8
103#define PLLCTRL_RESET_MASK 1
104#define PLLCTRL_RESET_SHIFT 0
105#define PLLCTRL_BYPASS_MASK 0x8
106#define PLLCTRL_BYPASS_SHFT 3
107#define PLLCTRL_POST_SRC_SHFT 24
108#define PLLCTRL_POST_SRC_MASK (0x7 << PLLCTRL_POST_SRC_SHFT)
Vipul Kumar488f0e72018-06-27 10:44:45 +0530109#define PLLCTRL_PRE_SRC_SHFT 20
110#define PLLCTRL_PRE_SRC_MASK (0x7 << PLLCTRL_PRE_SRC_SHFT)
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530111
112
113#define NUM_MIO_PINS 77
114
115enum zynqmp_clk {
116 iopll, rpll,
117 apll, dpll, vpll,
118 iopll_to_fpd, rpll_to_fpd, apll_to_lpd, dpll_to_lpd, vpll_to_lpd,
119 acpu, acpu_half,
120 dbg_fpd, dbg_lpd, dbg_trace, dbg_tstmp,
121 dp_video_ref, dp_audio_ref,
122 dp_stc_ref, gdma_ref, dpdma_ref,
123 ddr_ref, sata_ref, pcie_ref,
124 gpu_ref, gpu_pp0_ref, gpu_pp1_ref,
125 topsw_main, topsw_lsbus,
126 gtgref0_ref,
127 lpd_switch, lpd_lsbus,
128 usb0_bus_ref, usb1_bus_ref, usb3_dual_ref, usb0, usb1,
129 cpu_r5, cpu_r5_core,
130 csu_spb, csu_pll, pcap,
131 iou_switch,
132 gem_tsu_ref, gem_tsu,
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700133 gem0_tx, gem1_tx, gem2_tx, gem3_tx,
Michal Simek544f4482021-10-29 13:13:38 +0200134 gem0_rx, gem1_rx, gem2_rx, gem3_rx,
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530135 qspi_ref,
136 sdio0_ref, sdio1_ref,
137 uart0_ref, uart1_ref,
138 spi0_ref, spi1_ref,
139 nand_ref,
140 i2c0_ref, i2c1_ref, can0_ref, can1_ref, can0, can1,
141 dll_ref,
142 adma_ref,
143 timestamp_ref,
144 ams_ref,
145 pl0, pl1, pl2, pl3,
146 wdt,
Michal Simek544f4482021-10-29 13:13:38 +0200147 gem0_ref = 104,
148 gem1_ref, gem2_ref, gem3_ref,
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530149 clk_max,
150};
151
152static const char * const clk_names[clk_max] = {
153 "iopll", "rpll", "apll", "dpll",
154 "vpll", "iopll_to_fpd", "rpll_to_fpd",
155 "apll_to_lpd", "dpll_to_lpd", "vpll_to_lpd",
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700156 "acpu", "acpu_half", "dbg_fpd", "dbg_lpd",
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530157 "dbg_trace", "dbg_tstmp", "dp_video_ref",
158 "dp_audio_ref", "dp_stc_ref", "gdma_ref",
159 "dpdma_ref", "ddr_ref", "sata_ref", "pcie_ref",
160 "gpu_ref", "gpu_pp0_ref", "gpu_pp1_ref",
161 "topsw_main", "topsw_lsbus", "gtgref0_ref",
162 "lpd_switch", "lpd_lsbus", "usb0_bus_ref",
163 "usb1_bus_ref", "usb3_dual_ref", "usb0",
164 "usb1", "cpu_r5", "cpu_r5_core", "csu_spb",
165 "csu_pll", "pcap", "iou_switch", "gem_tsu_ref",
Michal Simek544f4482021-10-29 13:13:38 +0200166 "gem_tsu", "gem0_tx", "gem1_tx", "gem2_tx",
167 "gem3_tx", "gem0_rx", "gem1_rx", "gem2_rx",
168 "gem3_rx", "qspi_ref", "sdio0_ref", "sdio1_ref",
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530169 "uart0_ref", "uart1_ref", "spi0_ref",
170 "spi1_ref", "nand_ref", "i2c0_ref", "i2c1_ref",
171 "can0_ref", "can1_ref", "can0", "can1",
172 "dll_ref", "adma_ref", "timestamp_ref",
Michal Simek544f4482021-10-29 13:13:38 +0200173 "ams_ref", "pl0", "pl1", "pl2", "pl3", "wdt",
174 NULL, NULL, NULL, NULL,
175 NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
176 NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
177 NULL, NULL, NULL, NULL, "gem0_ref", "gem1_ref", "gem2_ref", "gem3_ref",
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530178};
179
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700180static const u32 pll_src[][4] = {
181 {apll, 0xff, dpll, vpll}, /* acpu */
182 {dpll, vpll, 0xff, 0xff}, /* ddr_ref */
183 {rpll, iopll, 0xff, 0xff}, /* dll_ref */
184 {iopll, 0xff, rpll, dpll_to_lpd}, /* gem_tsu_ref */
185 {iopll, 0xff, rpll, dpll}, /* peripheral */
186 {apll, 0xff, iopll_to_fpd, dpll}, /* wdt */
187 {iopll_to_fpd, 0xff, dpll, apll}, /* dbg_fpd */
188 {iopll, 0xff, rpll, dpll_to_lpd}, /* timestamp_ref */
189 {iopll_to_fpd, 0xff, apll, dpll}, /* sata_ref */
190 {iopll_to_fpd, 0xff, rpll_to_fpd, dpll},/* pcie_ref */
191 {iopll_to_fpd, 0xff, vpll, dpll}, /* gpu_ref */
192 {apll, 0xff, vpll, dpll}, /* topsw_main_ref */
193 {rpll, 0xff, iopll, dpll_to_lpd}, /* cpu_r5_ref */
194};
195
196enum zynqmp_clk_pll_src {
197 ACPU_CLK_SRC = 0,
198 DDR_CLK_SRC,
199 DLL_CLK_SRC,
200 GEM_TSU_CLK_SRC,
201 PERI_CLK_SRC,
202 WDT_CLK_SRC,
203 DBG_FPD_CLK_SRC,
204 TIMESTAMP_CLK_SRC,
205 SATA_CLK_SRC,
206 PCIE_CLK_SRC,
207 GPU_CLK_SRC,
208 TOPSW_MAIN_CLK_SRC,
209 CPU_R5_CLK_SRC
210};
211
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530212struct zynqmp_clk_priv {
213 unsigned long ps_clk_freq;
214 unsigned long video_clk;
215 unsigned long pss_alt_ref_clk;
216 unsigned long gt_crx_ref_clk;
217 unsigned long aux_ref_clk;
218};
219
220static u32 zynqmp_clk_get_register(enum zynqmp_clk id)
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530221{
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530222 switch (id) {
223 case iopll:
224 return CRL_APB_IOPLL_CTRL;
225 case rpll:
226 return CRL_APB_RPLL_CTRL;
227 case apll:
228 return CRF_APB_APLL_CTRL;
229 case dpll:
230 return CRF_APB_DPLL_CTRL;
231 case vpll:
232 return CRF_APB_VPLL_CTRL;
233 case acpu:
234 return CRF_APB_ACPU_CTRL;
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700235 case dbg_fpd:
236 return CRF_APB_DBG_FPD_CTRL;
237 case dbg_trace:
238 return CRF_APB_DBG_TRACE_CTRL;
239 case dbg_tstmp:
240 return CRF_APB_DBG_TSTMP_CTRL;
Michal Simekacff4c82022-03-29 13:13:56 +0200241 case dp_video_ref:
242 return CRF_APB_DP_VIDEO_REF_CTRL;
243 case dp_audio_ref:
244 return CRF_APB_DP_AUDIO_REF_CTRL;
245 case dp_stc_ref:
246 return CRF_APB_DP_STC_REF_CTRL;
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700247 case gpu_ref ... gpu_pp1_ref:
248 return CRF_APB_GPU_REF_CTRL;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530249 case ddr_ref:
250 return CRF_APB_DDR_CTRL;
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700251 case sata_ref:
252 return CRF_APB_SATA_REF_CTRL;
253 case pcie_ref:
254 return CRF_APB_PCIE_REF_CTRL;
255 case gdma_ref:
256 return CRF_APB_GDMA_REF_CTRL;
257 case dpdma_ref:
258 return CRF_APB_DPDMA_REF_CTRL;
259 case topsw_main:
260 return CRF_APB_TOPSW_MAIN_CTRL;
261 case topsw_lsbus:
262 return CRF_APB_TOPSW_LSBUS_CTRL;
263 case lpd_switch:
264 return CRL_APB_LPD_SWITCH_CTRL;
265 case lpd_lsbus:
266 return CRL_APB_LPD_LSBUS_CTRL;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530267 case qspi_ref:
268 return CRL_APB_QSPI_REF_CTRL;
T Karthik Reddy2aa360e2021-02-03 03:10:45 -0700269 case usb3_dual_ref:
270 return CRL_APB_USB3_DUAL_REF_CTRL;
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700271 case gem_tsu_ref:
Ashok Reddy Soma0a08f442023-07-20 01:28:59 -0600272 case gem_tsu:
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700273 return CRL_APB_GEM_TSU_REF_CTRL;
Michal Simek544f4482021-10-29 13:13:38 +0200274 case gem0_tx:
Ashok Reddy Soma0a08f442023-07-20 01:28:59 -0600275 case gem0_rx:
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530276 case gem0_ref:
277 return CRL_APB_GEM0_REF_CTRL;
Michal Simek544f4482021-10-29 13:13:38 +0200278 case gem1_tx:
Ashok Reddy Soma0a08f442023-07-20 01:28:59 -0600279 case gem1_rx:
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530280 case gem1_ref:
281 return CRL_APB_GEM1_REF_CTRL;
Michal Simek544f4482021-10-29 13:13:38 +0200282 case gem2_tx:
Ashok Reddy Soma0a08f442023-07-20 01:28:59 -0600283 case gem2_rx:
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530284 case gem2_ref:
285 return CRL_APB_GEM2_REF_CTRL;
Michal Simek544f4482021-10-29 13:13:38 +0200286 case gem3_tx:
Ashok Reddy Soma0a08f442023-07-20 01:28:59 -0600287 case gem3_rx:
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530288 case gem3_ref:
289 return CRL_APB_GEM3_REF_CTRL;
T Karthik Reddy2aa360e2021-02-03 03:10:45 -0700290 case usb0_bus_ref:
291 return CRL_APB_USB0_BUS_REF_CTRL;
292 case usb1_bus_ref:
293 return CRL_APB_USB1_BUS_REF_CTRL;
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700294 case cpu_r5:
295 return CRL_APB_CPU_R5_CTRL;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530296 case uart0_ref:
297 return CRL_APB_UART0_REF_CTRL;
298 case uart1_ref:
299 return CRL_APB_UART1_REF_CTRL;
300 case sdio0_ref:
301 return CRL_APB_SDIO0_REF_CTRL;
302 case sdio1_ref:
303 return CRL_APB_SDIO1_REF_CTRL;
304 case spi0_ref:
305 return CRL_APB_SPI0_REF_CTRL;
306 case spi1_ref:
307 return CRL_APB_SPI1_REF_CTRL;
308 case nand_ref:
309 return CRL_APB_NAND_REF_CTRL;
310 case i2c0_ref:
311 return CRL_APB_I2C0_REF_CTRL;
312 case i2c1_ref:
313 return CRL_APB_I2C1_REF_CTRL;
314 case can0_ref:
315 return CRL_APB_CAN0_REF_CTRL;
316 case can1_ref:
317 return CRL_APB_CAN1_REF_CTRL;
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700318 case dll_ref:
319 return CRL_APB_DLL_REF_CTRL;
320 case adma_ref:
321 return CRL_APB_ADMA_REF_CTRL;
322 case timestamp_ref:
323 return CRL_APB_TIMESTAMP_REF_CTRL;
324 case ams_ref:
325 return CRL_APB_AMS_REF_CTRL;
Vipul Kumarc35e5f62018-03-07 14:52:44 +0530326 case pl0:
327 return CRL_APB_PL0_REF_CTRL;
328 case pl1:
329 return CRL_APB_PL1_REF_CTRL;
330 case pl2:
331 return CRL_APB_PL2_REF_CTRL;
332 case pl3:
333 return CRL_APB_PL3_REF_CTRL;
334 case wdt:
335 return CRF_APB_TOPSW_LSBUS_CTRL;
336 case iopll_to_fpd:
337 return CRL_APB_IOPLL_TO_FPD_CTRL;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530338 default:
339 debug("Invalid clk id%d\n", id);
340 }
341 return 0;
342}
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530343
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530344static ulong zynqmp_clk_get_pll_src(ulong clk_ctrl,
345 struct zynqmp_clk_priv *priv,
346 bool is_pre_src)
347{
348 u32 src_sel;
349
350 if (is_pre_src)
Vipul Kumar488f0e72018-06-27 10:44:45 +0530351 src_sel = (clk_ctrl & PLLCTRL_PRE_SRC_MASK) >>
352 PLLCTRL_PRE_SRC_SHFT;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530353 else
354 src_sel = (clk_ctrl & PLLCTRL_POST_SRC_MASK) >>
355 PLLCTRL_POST_SRC_SHFT;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530356
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530357 switch (src_sel) {
358 case 4:
359 return priv->video_clk;
360 case 5:
361 return priv->pss_alt_ref_clk;
362 case 6:
363 return priv->aux_ref_clk;
364 case 7:
365 return priv->gt_crx_ref_clk;
366 case 0 ... 3:
367 default:
368 return priv->ps_clk_freq;
369 }
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530370}
371
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530372static ulong zynqmp_clk_get_pll_rate(struct zynqmp_clk_priv *priv,
373 enum zynqmp_clk id)
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530374{
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530375 u32 clk_ctrl, reset, mul;
376 ulong freq;
377 int ret;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530378
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530379 ret = zynqmp_mmio_read(zynqmp_clk_get_register(id), &clk_ctrl);
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530380 if (ret) {
381 printf("%s mio read fail\n", __func__);
382 return -EIO;
383 }
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530384
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530385 if (clk_ctrl & PLLCTRL_BYPASS_MASK)
386 freq = zynqmp_clk_get_pll_src(clk_ctrl, priv, 0);
387 else
388 freq = zynqmp_clk_get_pll_src(clk_ctrl, priv, 1);
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530389
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530390 reset = (clk_ctrl & PLLCTRL_RESET_MASK) >> PLLCTRL_RESET_SHIFT;
391 if (reset && !(clk_ctrl & PLLCTRL_BYPASS_MASK))
392 return 0;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530393
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530394 mul = (clk_ctrl & PLLCTRL_FBDIV_MASK) >> PLLCTRL_FBDIV_SHIFT;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530395
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530396 freq *= mul;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530397
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530398 if (clk_ctrl & (1 << 16))
399 freq /= 2;
400
401 return freq;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530402}
403
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530404static ulong zynqmp_clk_get_cpu_rate(struct zynqmp_clk_priv *priv,
405 enum zynqmp_clk id)
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530406{
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700407 u32 clk_ctrl, div, srcsel;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530408 enum zynqmp_clk pll;
409 int ret;
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530410 unsigned long pllrate;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530411
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530412 ret = zynqmp_mmio_read(CRF_APB_ACPU_CTRL, &clk_ctrl);
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530413 if (ret) {
414 printf("%s mio read fail\n", __func__);
415 return -EIO;
416 }
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530417
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530418 div = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530419
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700420 srcsel = clk_ctrl & CLK_CTRL_SRCSEL_MASK;
421 pll = pll_src[ACPU_CLK_SRC][srcsel];
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530422 pllrate = zynqmp_clk_get_pll_rate(priv, pll);
423 if (IS_ERR_VALUE(pllrate))
424 return pllrate;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530425
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530426 return DIV_ROUND_CLOSEST(pllrate, div);
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530427}
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530428
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530429static ulong zynqmp_clk_get_ddr_rate(struct zynqmp_clk_priv *priv)
430{
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700431 u32 clk_ctrl, div, srcsel;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530432 enum zynqmp_clk pll;
433 int ret;
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530434 ulong pllrate;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530435
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530436 ret = zynqmp_mmio_read(CRF_APB_DDR_CTRL, &clk_ctrl);
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530437 if (ret) {
438 printf("%s mio read fail\n", __func__);
439 return -EIO;
440 }
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530441
442 div = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
443
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700444 srcsel = clk_ctrl & CLK_CTRL_SRCSEL_MASK;
445 pll = pll_src[DDR_CLK_SRC][srcsel];
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530446 pllrate = zynqmp_clk_get_pll_rate(priv, pll);
447 if (IS_ERR_VALUE(pllrate))
448 return pllrate;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530449
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530450 return DIV_ROUND_CLOSEST(pllrate, div);
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530451}
452
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700453static ulong zynqmp_clk_get_dll_rate(struct zynqmp_clk_priv *priv)
454{
455 u32 clk_ctrl, srcsel;
456 enum zynqmp_clk pll;
457 ulong pllrate;
458 int ret;
459
460 ret = zynqmp_mmio_read(CRL_APB_DLL_REF_CTRL, &clk_ctrl);
461 if (ret) {
462 printf("%s mio read fail\n", __func__);
463 return -EIO;
464 }
465
466 srcsel = clk_ctrl & CLK_CTRL_SRCSEL_MASK;
467 pll = pll_src[DLL_CLK_SRC][srcsel];
468 pllrate = zynqmp_clk_get_pll_rate(priv, pll);
469 if (IS_ERR_VALUE(pllrate))
470 return pllrate;
471
472 return pllrate;
473}
474
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530475static ulong zynqmp_clk_get_peripheral_rate(struct zynqmp_clk_priv *priv,
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700476 enum zynqmp_clk id, bool two_divs)
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530477{
478 enum zynqmp_clk pll;
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700479 u32 clk_ctrl, div0, srcsel;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530480 u32 div1 = 1;
481 int ret;
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530482 ulong pllrate;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530483
484 ret = zynqmp_mmio_read(zynqmp_clk_get_register(id), &clk_ctrl);
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530485 if (ret) {
486 printf("%s mio read fail\n", __func__);
487 return -EIO;
488 }
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530489
490 div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
491 if (!div0)
492 div0 = 1;
493
494 if (two_divs) {
495 div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
496 if (!div1)
497 div1 = 1;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530498 }
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700499 srcsel = clk_ctrl & CLK_CTRL_SRCSEL_MASK;
500
501 if (id == gem_tsu_ref)
502 pll = pll_src[GEM_TSU_CLK_SRC][srcsel];
503 else
504 pll = pll_src[PERI_CLK_SRC][srcsel];
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530505
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530506 pllrate = zynqmp_clk_get_pll_rate(priv, pll);
507 if (IS_ERR_VALUE(pllrate))
508 return pllrate;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530509
510 return
511 DIV_ROUND_CLOSEST(
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530512 DIV_ROUND_CLOSEST(pllrate, div0), div1);
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530513}
514
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700515static ulong zynqmp_clk_get_crf_crl_rate(struct zynqmp_clk_priv *priv,
516 enum zynqmp_clk id, bool two_divs)
Vipul Kumarc35e5f62018-03-07 14:52:44 +0530517{
518 enum zynqmp_clk pll;
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700519 u32 clk_ctrl, div0, srcsel;
Vipul Kumarc35e5f62018-03-07 14:52:44 +0530520 u32 div1 = 1;
521 int ret;
522 ulong pllrate;
523
524 ret = zynqmp_mmio_read(zynqmp_clk_get_register(id), &clk_ctrl);
525 if (ret) {
526 printf("%d %s mio read fail\n", __LINE__, __func__);
527 return -EIO;
528 }
529
530 div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
531 if (!div0)
532 div0 = 1;
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700533 srcsel = clk_ctrl & CLK_CTRL_SRCSEL_MASK;
Vipul Kumarc35e5f62018-03-07 14:52:44 +0530534
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700535 switch (id) {
536 case wdt:
537 case dbg_trace:
538 case topsw_lsbus:
539 pll = pll_src[WDT_CLK_SRC][srcsel];
540 break;
541 case dbg_fpd:
542 case dbg_tstmp:
543 pll = pll_src[DBG_FPD_CLK_SRC][srcsel];
544 break;
545 case timestamp_ref:
546 pll = pll_src[TIMESTAMP_CLK_SRC][srcsel];
547 break;
548 case sata_ref:
549 pll = pll_src[SATA_CLK_SRC][srcsel];
550 break;
551 case pcie_ref:
552 pll = pll_src[PCIE_CLK_SRC][srcsel];
553 break;
554 case gpu_ref ... gpu_pp1_ref:
555 pll = pll_src[GPU_CLK_SRC][srcsel];
556 break;
557 case gdma_ref:
558 case dpdma_ref:
559 case topsw_main:
560 pll = pll_src[TOPSW_MAIN_CLK_SRC][srcsel];
561 break;
562 case cpu_r5:
563 case ams_ref:
564 case adma_ref:
565 case lpd_lsbus:
566 case lpd_switch:
567 pll = pll_src[CPU_R5_CLK_SRC][srcsel];
568 break;
569 default:
570 return -ENXIO;
571 }
Vipul Kumarc35e5f62018-03-07 14:52:44 +0530572 if (two_divs) {
573 ret = zynqmp_mmio_read(zynqmp_clk_get_register(pll), &clk_ctrl);
574 if (ret) {
575 printf("%d %s mio read fail\n", __LINE__, __func__);
576 return -EIO;
577 }
578 div1 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
579 if (!div1)
580 div1 = 1;
581 }
582
583 if (pll == iopll_to_fpd)
584 pll = iopll;
585
586 pllrate = zynqmp_clk_get_pll_rate(priv, pll);
587 if (IS_ERR_VALUE(pllrate))
588 return pllrate;
589
590 return
591 DIV_ROUND_CLOSEST(
592 DIV_ROUND_CLOSEST(pllrate, div0), div1);
593}
594
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530595static unsigned long zynqmp_clk_calc_peripheral_two_divs(ulong rate,
596 ulong pll_rate,
597 u32 *div0, u32 *div1)
598{
599 long new_err, best_err = (long)(~0UL >> 1);
600 ulong new_rate, best_rate = 0;
601 u32 d0, d1;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530602
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530603 for (d0 = 1; d0 <= ZYNQ_CLK_MAXDIV; d0++) {
604 for (d1 = 1; d1 <= ZYNQ_CLK_MAXDIV >> 1; d1++) {
605 new_rate = DIV_ROUND_CLOSEST(
606 DIV_ROUND_CLOSEST(pll_rate, d0), d1);
607 new_err = abs(new_rate - rate);
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530608
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530609 if (new_err < best_err) {
610 *div0 = d0;
611 *div1 = d1;
612 best_err = new_err;
613 best_rate = new_rate;
614 }
615 }
616 }
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530617
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530618 return best_rate;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530619}
620
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530621static ulong zynqmp_clk_set_peripheral_rate(struct zynqmp_clk_priv *priv,
622 enum zynqmp_clk id, ulong rate,
623 bool two_divs)
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530624{
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530625 enum zynqmp_clk pll;
626 u32 clk_ctrl, div0 = 0, div1 = 0;
627 ulong pll_rate, new_rate;
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700628 u32 reg, srcsel;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530629 int ret;
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530630 u32 mask;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530631
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530632 reg = zynqmp_clk_get_register(id);
633 ret = zynqmp_mmio_read(reg, &clk_ctrl);
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530634 if (ret) {
635 printf("%s mio read fail\n", __func__);
636 return -EIO;
637 }
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530638
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700639 srcsel = clk_ctrl & CLK_CTRL_SRCSEL_MASK;
640 pll = pll_src[PERI_CLK_SRC][srcsel];
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530641 pll_rate = zynqmp_clk_get_pll_rate(priv, pll);
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530642 if (IS_ERR_VALUE(pll_rate))
643 return pll_rate;
644
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530645 clk_ctrl &= ~CLK_CTRL_DIV0_MASK;
646 if (two_divs) {
647 clk_ctrl &= ~CLK_CTRL_DIV1_MASK;
648 new_rate = zynqmp_clk_calc_peripheral_two_divs(rate, pll_rate,
649 &div0, &div1);
650 clk_ctrl |= div1 << CLK_CTRL_DIV1_SHIFT;
651 } else {
652 div0 = DIV_ROUND_CLOSEST(pll_rate, rate);
653 if (div0 > ZYNQ_CLK_MAXDIV)
654 div0 = ZYNQ_CLK_MAXDIV;
655 new_rate = DIV_ROUND_CLOSEST(rate, div0);
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530656 }
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530657 clk_ctrl |= div0 << CLK_CTRL_DIV0_SHIFT;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530658
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530659 mask = (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV0_SHIFT) |
660 (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV1_SHIFT);
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530661
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530662 ret = zynqmp_mmio_write(reg, mask, clk_ctrl);
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530663 if (ret) {
664 printf("%s mio write fail\n", __func__);
665 return -EIO;
666 }
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530667
668 return new_rate;
669}
670
671static ulong zynqmp_clk_get_rate(struct clk *clk)
672{
673 struct zynqmp_clk_priv *priv = dev_get_priv(clk->dev);
674 enum zynqmp_clk id = clk->id;
675 bool two_divs = false;
676
677 switch (id) {
678 case iopll ... vpll:
679 return zynqmp_clk_get_pll_rate(priv, id);
680 case acpu:
681 return zynqmp_clk_get_cpu_rate(priv, id);
682 case ddr_ref:
683 return zynqmp_clk_get_ddr_rate(priv);
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700684 case dll_ref:
685 return zynqmp_clk_get_dll_rate(priv);
686 case gem_tsu_ref:
Michal Simekacff4c82022-03-29 13:13:56 +0200687 case dp_video_ref ... dp_stc_ref:
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700688 case pl0 ... pl3:
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530689 case gem0_ref ... gem3_ref:
Michal Simek544f4482021-10-29 13:13:38 +0200690 case gem0_tx ... gem3_tx:
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530691 case qspi_ref ... can1_ref:
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700692 case usb0_bus_ref ... usb3_dual_ref:
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530693 two_divs = true;
694 return zynqmp_clk_get_peripheral_rate(priv, id, two_divs);
Vipul Kumarc35e5f62018-03-07 14:52:44 +0530695 case wdt:
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700696 case topsw_lsbus:
697 case sata_ref ... gpu_pp1_ref:
Vipul Kumarc35e5f62018-03-07 14:52:44 +0530698 two_divs = true;
Algapally Santosh Sagar9a990932023-05-19 17:08:15 +0530699 fallthrough;
T Karthik Reddy97ab47d2021-02-24 23:44:46 -0700700 case cpu_r5:
701 case dbg_fpd:
702 case ams_ref:
703 case adma_ref:
704 case lpd_lsbus:
705 case dbg_trace:
706 case dbg_tstmp:
707 case lpd_switch:
708 case topsw_main:
709 case timestamp_ref:
710 case gdma_ref ... dpdma_ref:
711 return zynqmp_clk_get_crf_crl_rate(priv, id, two_divs);
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530712 default:
713 return -ENXIO;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530714 }
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530715}
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530716
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530717static ulong zynqmp_clk_set_rate(struct clk *clk, ulong rate)
718{
719 struct zynqmp_clk_priv *priv = dev_get_priv(clk->dev);
720 enum zynqmp_clk id = clk->id;
721 bool two_divs = true;
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530722
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530723 switch (id) {
724 case gem0_ref ... gem3_ref:
Michal Simek544f4482021-10-29 13:13:38 +0200725 case gem0_tx ... gem3_tx:
Ashok Reddy Soma025964b2023-07-19 02:49:12 -0600726 case gem0_rx ... gem3_rx:
727 case gem_tsu:
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530728 case qspi_ref ... can1_ref:
Michal Simek66a38922021-10-29 13:13:37 +0200729 case usb0_bus_ref ... usb3_dual_ref:
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530730 return zynqmp_clk_set_peripheral_rate(priv, id,
731 rate, two_divs);
732 default:
733 return -ENXIO;
734 }
735}
736
Igor Prusov1a3427b2023-11-09 13:55:15 +0300737#if IS_ENABLED(CONFIG_CMD_CLK)
738static void zynqmp_clk_dump(struct udevice *dev)
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530739{
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530740 int i, ret;
741
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530742 printf("clk\t\tfrequency\n");
743 for (i = 0; i < clk_max; i++) {
744 const char *name = clk_names[i];
745 if (name) {
746 struct clk clk;
747 unsigned long rate;
748
749 clk.id = i;
750 ret = clk_request(dev, &clk);
Igor Prusov1a3427b2023-11-09 13:55:15 +0300751 if (ret < 0) {
752 printf("%s clk_request() failed: %d\n",
753 __func__, ret);
754 break;
755 }
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530756
757 rate = clk_get_rate(&clk);
758
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530759 if ((rate == (unsigned long)-ENOSYS) ||
Siva Durga Prasad Paladugubcfc0862017-04-13 16:59:38 +0530760 (rate == (unsigned long)-ENXIO) ||
761 (rate == (unsigned long)-EIO))
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530762 printf("%10s%20s\n", name, "unknown");
763 else
764 printf("%10s%20lu\n", name, rate);
765 }
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530766 }
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530767}
Igor Prusov1a3427b2023-11-09 13:55:15 +0300768#endif
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530769
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530770static int zynqmp_get_freq_by_name(char *name, struct udevice *dev, ulong *freq)
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530771{
772 struct clk clk;
773 int ret;
774
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530775 ret = clk_get_by_name(dev, name, &clk);
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530776 if (ret < 0) {
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530777 dev_err(dev, "failed to get %s\n", name);
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530778 return ret;
779 }
780
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530781 *freq = clk_get_rate(&clk);
782 if (IS_ERR_VALUE(*freq)) {
783 dev_err(dev, "failed to get rate %s\n", name);
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530784 return -EINVAL;
785 }
786
787 return 0;
788}
Siva Durga Prasad Paladuguf0471e92017-02-03 23:56:49 +0530789static int zynqmp_clk_probe(struct udevice *dev)
790{
791 int ret;
792 struct zynqmp_clk_priv *priv = dev_get_priv(dev);
793
794 debug("%s\n", __func__);
795 ret = zynqmp_get_freq_by_name("pss_ref_clk", dev, &priv->ps_clk_freq);
796 if (ret < 0)
797 return -EINVAL;
798
799 ret = zynqmp_get_freq_by_name("video_clk", dev, &priv->video_clk);
800 if (ret < 0)
801 return -EINVAL;
802
803 ret = zynqmp_get_freq_by_name("pss_alt_ref_clk", dev,
804 &priv->pss_alt_ref_clk);
805 if (ret < 0)
806 return -EINVAL;
807
808 ret = zynqmp_get_freq_by_name("aux_ref_clk", dev, &priv->aux_ref_clk);
809 if (ret < 0)
810 return -EINVAL;
811
812 ret = zynqmp_get_freq_by_name("gt_crx_ref_clk", dev,
813 &priv->gt_crx_ref_clk);
814 if (ret < 0)
815 return -EINVAL;
816
817 return 0;
818}
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530819
T Karthik Reddy2aa360e2021-02-03 03:10:45 -0700820static int zynqmp_clk_enable(struct clk *clk)
821{
822 enum zynqmp_clk id = clk->id;
823 u32 reg, clk_ctrl, clkact_shift, mask;
824 int ret;
825
826 reg = zynqmp_clk_get_register(id);
827 debug("%s, clk_id:%x, clk_base:0x%x\n", __func__, id, reg);
828
829 switch (id) {
830 case usb0_bus_ref ... usb1:
831 clkact_shift = 25;
832 mask = 0x1;
833 break;
Michal Simek544f4482021-10-29 13:13:38 +0200834 case gem0_tx ... gem3_tx:
T Karthik Reddy2aa360e2021-02-03 03:10:45 -0700835 case gem0_ref ... gem3_ref:
836 clkact_shift = 25;
837 mask = 0x3;
838 break;
839 case qspi_ref ... can1_ref:
Michal Simek97790fb2021-07-01 19:01:42 +0200840 case lpd_lsbus:
Venkatesh Yadav Abbarapu0d84dcd2023-12-04 14:15:15 +0530841 case topsw_lsbus:
T Karthik Reddy2aa360e2021-02-03 03:10:45 -0700842 clkact_shift = 24;
843 mask = 0x1;
844 break;
845 default:
846 return -ENXIO;
847 }
848
849 ret = zynqmp_mmio_read(reg, &clk_ctrl);
850 if (ret) {
851 printf("%s mio read fail\n", __func__);
852 return -EIO;
853 }
854
855 clk_ctrl |= (mask << clkact_shift);
856 ret = zynqmp_mmio_write(reg, mask << clkact_shift, clk_ctrl);
857 if (ret) {
858 printf("%s mio write fail\n", __func__);
859 return -EIO;
860 }
861
862 return ret;
863}
864
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530865static struct clk_ops zynqmp_clk_ops = {
866 .set_rate = zynqmp_clk_set_rate,
867 .get_rate = zynqmp_clk_get_rate,
T Karthik Reddy2aa360e2021-02-03 03:10:45 -0700868 .enable = zynqmp_clk_enable,
Igor Prusov1a3427b2023-11-09 13:55:15 +0300869#if IS_ENABLED(CONFIG_CMD_CLK)
870 .dump = zynqmp_clk_dump,
871#endif
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530872};
873
874static const struct udevice_id zynqmp_clk_ids[] = {
Michal Simekd260ac72018-02-21 13:59:21 +0100875 { .compatible = "xlnx,zynqmp-clk" },
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530876 { }
877};
878
879U_BOOT_DRIVER(zynqmp_clk) = {
Michal Simek33093082020-01-07 08:50:34 +0100880 .name = "zynqmp_clk",
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530881 .id = UCLASS_CLK,
882 .of_match = zynqmp_clk_ids,
883 .probe = zynqmp_clk_probe,
884 .ops = &zynqmp_clk_ops,
Simon Glass8a2b47f2020-12-03 16:55:17 -0700885 .priv_auto = sizeof(struct zynqmp_clk_priv),
Siva Durga Prasad Paladugu468b55f2016-11-15 16:15:41 +0530886};