blob: e3cefe2e0c72f48b0bc991e9d4c91ab490ae8fed [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +01002/*
3 * Copyright (C) 2017 Weidmüller Interface GmbH & Co. KG
4 * Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
5 *
6 * Copyright (C) 2013 Soren Brinkmann <soren.brinkmann@xilinx.com>
7 * Copyright (C) 2013 Xilinx, Inc. All rights reserved.
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +01008 */
9
Tom Riniabb9a042024-05-18 20:20:43 -060010#include <common.h>
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +010011#include <clk-uclass.h>
12#include <dm.h>
Simon Glass0f2af882020-05-10 11:40:05 -060013#include <log.h>
Simon Glass3ba929a2020-10-30 21:38:53 -060014#include <asm/global_data.h>
Simon Glass9bc15642020-02-03 07:36:16 -070015#include <dm/device_compat.h>
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +010016#include <dm/lists.h>
17#include <errno.h>
18#include <asm/io.h>
19#include <asm/arch/clk.h>
20#include <asm/arch/hardware.h>
21#include <asm/arch/sys_proto.h>
22
23/* Register bitfield defines */
24#define PLLCTRL_FBDIV_MASK 0x7f000
25#define PLLCTRL_FBDIV_SHIFT 12
26#define PLLCTRL_BPFORCE_MASK (1 << 4)
27#define PLLCTRL_PWRDWN_MASK 2
28#define PLLCTRL_PWRDWN_SHIFT 1
29#define PLLCTRL_RESET_MASK 1
30#define PLLCTRL_RESET_SHIFT 0
31
32#define ZYNQ_CLK_MAXDIV 0x3f
33#define CLK_CTRL_DIV1_SHIFT 20
34#define CLK_CTRL_DIV1_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV1_SHIFT)
35#define CLK_CTRL_DIV0_SHIFT 8
36#define CLK_CTRL_DIV0_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV0_SHIFT)
37#define CLK_CTRL_SRCSEL_SHIFT 4
38#define CLK_CTRL_SRCSEL_MASK (0x3 << CLK_CTRL_SRCSEL_SHIFT)
39
40#define CLK_CTRL_DIV2X_SHIFT 26
41#define CLK_CTRL_DIV2X_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV2X_SHIFT)
42#define CLK_CTRL_DIV3X_SHIFT 20
43#define CLK_CTRL_DIV3X_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV3X_SHIFT)
44
45DECLARE_GLOBAL_DATA_PTR;
46
47#ifndef CONFIG_SPL_BUILD
48enum zynq_clk_rclk {mio_clk, emio_clk};
49#endif
50
51struct zynq_clk_priv {
52 ulong ps_clk_freq;
Stefan Herbrechtsmeier04f5da92017-01-17 16:27:31 +010053#ifndef CONFIG_SPL_BUILD
54 struct clk gem_emio_clk[2];
55#endif
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +010056};
57
58static void *zynq_clk_get_register(enum zynq_clk id)
59{
60 switch (id) {
61 case armpll_clk:
62 return &slcr_base->arm_pll_ctrl;
63 case ddrpll_clk:
64 return &slcr_base->ddr_pll_ctrl;
65 case iopll_clk:
66 return &slcr_base->io_pll_ctrl;
67 case lqspi_clk:
68 return &slcr_base->lqspi_clk_ctrl;
69 case smc_clk:
70 return &slcr_base->smc_clk_ctrl;
71 case pcap_clk:
72 return &slcr_base->pcap_clk_ctrl;
73 case sdio0_clk ... sdio1_clk:
74 return &slcr_base->sdio_clk_ctrl;
75 case uart0_clk ... uart1_clk:
76 return &slcr_base->uart_clk_ctrl;
77 case spi0_clk ... spi1_clk:
78 return &slcr_base->spi_clk_ctrl;
79#ifndef CONFIG_SPL_BUILD
80 case dci_clk:
81 return &slcr_base->dci_clk_ctrl;
82 case gem0_clk:
83 return &slcr_base->gem0_clk_ctrl;
84 case gem1_clk:
85 return &slcr_base->gem1_clk_ctrl;
86 case fclk0_clk:
87 return &slcr_base->fpga0_clk_ctrl;
88 case fclk1_clk:
89 return &slcr_base->fpga1_clk_ctrl;
90 case fclk2_clk:
91 return &slcr_base->fpga2_clk_ctrl;
92 case fclk3_clk:
93 return &slcr_base->fpga3_clk_ctrl;
94 case can0_clk ... can1_clk:
95 return &slcr_base->can_clk_ctrl;
96 case dbg_trc_clk ... dbg_apb_clk:
97 /* fall through */
98#endif
99 default:
100 return &slcr_base->dbg_clk_ctrl;
101 }
102}
103
104static enum zynq_clk zynq_clk_get_cpu_pll(u32 clk_ctrl)
105{
106 u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
107
108 switch (srcsel) {
109 case 2:
110 return ddrpll_clk;
111 case 3:
112 return iopll_clk;
113 case 0 ... 1:
114 default:
115 return armpll_clk;
116 }
117}
118
119static enum zynq_clk zynq_clk_get_peripheral_pll(u32 clk_ctrl)
120{
121 u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
122
123 switch (srcsel) {
124 case 2:
125 return armpll_clk;
126 case 3:
127 return ddrpll_clk;
128 case 0 ... 1:
129 default:
130 return iopll_clk;
131 }
132}
133
134static ulong zynq_clk_get_pll_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
135{
136 u32 clk_ctrl, reset, pwrdwn, mul, bypass;
137
138 clk_ctrl = readl(zynq_clk_get_register(id));
139
140 reset = (clk_ctrl & PLLCTRL_RESET_MASK) >> PLLCTRL_RESET_SHIFT;
141 pwrdwn = (clk_ctrl & PLLCTRL_PWRDWN_MASK) >> PLLCTRL_PWRDWN_SHIFT;
142 if (reset || pwrdwn)
143 return 0;
144
145 bypass = clk_ctrl & PLLCTRL_BPFORCE_MASK;
146 if (bypass)
147 mul = 1;
148 else
149 mul = (clk_ctrl & PLLCTRL_FBDIV_MASK) >> PLLCTRL_FBDIV_SHIFT;
150
151 return priv->ps_clk_freq * mul;
152}
153
154#ifndef CONFIG_SPL_BUILD
155static enum zynq_clk_rclk zynq_clk_get_gem_rclk(enum zynq_clk id)
156{
157 u32 clk_ctrl, srcsel;
158
159 if (id == gem0_clk)
160 clk_ctrl = readl(&slcr_base->gem0_rclk_ctrl);
161 else
162 clk_ctrl = readl(&slcr_base->gem1_rclk_ctrl);
163
164 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
165 if (srcsel)
166 return emio_clk;
167 else
168 return mio_clk;
169}
170#endif
171
172static ulong zynq_clk_get_cpu_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
173{
174 u32 clk_621, clk_ctrl, div;
175 enum zynq_clk pll;
176
177 clk_ctrl = readl(&slcr_base->arm_clk_ctrl);
178
179 div = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
180
181 switch (id) {
182 case cpu_1x_clk:
183 div *= 2;
184 /* fall through */
185 case cpu_2x_clk:
186 clk_621 = readl(&slcr_base->clk_621_true) & 1;
187 div *= 2 + clk_621;
188 break;
189 case cpu_3or2x_clk:
190 div *= 2;
191 /* fall through */
192 case cpu_6or4x_clk:
193 break;
194 default:
195 return 0;
196 }
197
198 pll = zynq_clk_get_cpu_pll(clk_ctrl);
199
200 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, pll), div);
201}
202
203#ifndef CONFIG_SPL_BUILD
204static ulong zynq_clk_get_ddr2x_rate(struct zynq_clk_priv *priv)
205{
206 u32 clk_ctrl, div;
207
208 clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
209
210 div = (clk_ctrl & CLK_CTRL_DIV2X_MASK) >> CLK_CTRL_DIV2X_SHIFT;
211
212 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
213}
214#endif
215
216static ulong zynq_clk_get_ddr3x_rate(struct zynq_clk_priv *priv)
217{
218 u32 clk_ctrl, div;
219
220 clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
221
222 div = (clk_ctrl & CLK_CTRL_DIV3X_MASK) >> CLK_CTRL_DIV3X_SHIFT;
223
224 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
225}
226
227#ifndef CONFIG_SPL_BUILD
228static ulong zynq_clk_get_dci_rate(struct zynq_clk_priv *priv)
229{
230 u32 clk_ctrl, div0, div1;
231
232 clk_ctrl = readl(&slcr_base->dci_clk_ctrl);
233
234 div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
235 div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
236
237 return DIV_ROUND_CLOSEST(DIV_ROUND_CLOSEST(
238 zynq_clk_get_pll_rate(priv, ddrpll_clk), div0), div1);
239}
240#endif
241
242static ulong zynq_clk_get_peripheral_rate(struct zynq_clk_priv *priv,
243 enum zynq_clk id, bool two_divs)
244{
245 enum zynq_clk pll;
246 u32 clk_ctrl, div0;
247 u32 div1 = 1;
248
249 clk_ctrl = readl(zynq_clk_get_register(id));
250
251 div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
252 if (!div0)
253 div0 = 1;
254
255#ifndef CONFIG_SPL_BUILD
256 if (two_divs) {
257 div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
258 if (!div1)
259 div1 = 1;
260 }
261#endif
262
263 pll = zynq_clk_get_peripheral_pll(clk_ctrl);
264
265 return
266 DIV_ROUND_CLOSEST(
267 DIV_ROUND_CLOSEST(
268 zynq_clk_get_pll_rate(priv, pll), div0),
269 div1);
270}
271
272#ifndef CONFIG_SPL_BUILD
273static ulong zynq_clk_get_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
274{
Stefan Herbrechtsmeier04f5da92017-01-17 16:27:31 +0100275 struct clk *parent;
276
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100277 if (zynq_clk_get_gem_rclk(id) == mio_clk)
278 return zynq_clk_get_peripheral_rate(priv, id, true);
279
Stefan Herbrechtsmeier04f5da92017-01-17 16:27:31 +0100280 parent = &priv->gem_emio_clk[id - gem0_clk];
281 if (parent->dev)
282 return clk_get_rate(parent);
283
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100284 debug("%s: gem%d emio rx clock source unknown\n", __func__,
285 id - gem0_clk);
286
287 return -ENOSYS;
288}
289
290static unsigned long zynq_clk_calc_peripheral_two_divs(ulong rate,
291 ulong pll_rate,
292 u32 *div0, u32 *div1)
293{
294 long new_err, best_err = (long)(~0UL >> 1);
295 ulong new_rate, best_rate = 0;
296 u32 d0, d1;
297
298 for (d0 = 1; d0 <= ZYNQ_CLK_MAXDIV; d0++) {
299 for (d1 = 1; d1 <= ZYNQ_CLK_MAXDIV >> 1; d1++) {
300 new_rate = DIV_ROUND_CLOSEST(
301 DIV_ROUND_CLOSEST(pll_rate, d0), d1);
302 new_err = abs(new_rate - rate);
303
304 if (new_err < best_err) {
305 *div0 = d0;
306 *div1 = d1;
307 best_err = new_err;
308 best_rate = new_rate;
309 }
310 }
311 }
312
313 return best_rate;
314}
315
316static ulong zynq_clk_set_peripheral_rate(struct zynq_clk_priv *priv,
317 enum zynq_clk id, ulong rate,
318 bool two_divs)
319{
320 enum zynq_clk pll;
321 u32 clk_ctrl, div0 = 0, div1 = 0;
322 ulong pll_rate, new_rate;
323 u32 *reg;
324
325 reg = zynq_clk_get_register(id);
326 clk_ctrl = readl(reg);
327
328 pll = zynq_clk_get_peripheral_pll(clk_ctrl);
329 pll_rate = zynq_clk_get_pll_rate(priv, pll);
330 clk_ctrl &= ~CLK_CTRL_DIV0_MASK;
331 if (two_divs) {
332 clk_ctrl &= ~CLK_CTRL_DIV1_MASK;
333 new_rate = zynq_clk_calc_peripheral_two_divs(rate, pll_rate,
334 &div0, &div1);
335 clk_ctrl |= div1 << CLK_CTRL_DIV1_SHIFT;
336 } else {
337 div0 = DIV_ROUND_CLOSEST(pll_rate, rate);
338 if (div0 > ZYNQ_CLK_MAXDIV)
339 div0 = ZYNQ_CLK_MAXDIV;
340 new_rate = DIV_ROUND_CLOSEST(rate, div0);
341 }
342 clk_ctrl |= div0 << CLK_CTRL_DIV0_SHIFT;
343
344 zynq_slcr_unlock();
345 writel(clk_ctrl, reg);
346 zynq_slcr_lock();
347
348 return new_rate;
349}
350
351static ulong zynq_clk_set_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id,
352 ulong rate)
353{
Stefan Herbrechtsmeier04f5da92017-01-17 16:27:31 +0100354 struct clk *parent;
355
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100356 if (zynq_clk_get_gem_rclk(id) == mio_clk)
357 return zynq_clk_set_peripheral_rate(priv, id, rate, true);
358
Stefan Herbrechtsmeier04f5da92017-01-17 16:27:31 +0100359 parent = &priv->gem_emio_clk[id - gem0_clk];
360 if (parent->dev)
361 return clk_set_rate(parent, rate);
362
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100363 debug("%s: gem%d emio rx clock source unknown\n", __func__,
364 id - gem0_clk);
365
366 return -ENOSYS;
367}
368#endif
369
370#ifndef CONFIG_SPL_BUILD
371static ulong zynq_clk_get_rate(struct clk *clk)
372{
373 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
374 enum zynq_clk id = clk->id;
375 bool two_divs = false;
376
377 switch (id) {
378 case armpll_clk ... iopll_clk:
379 return zynq_clk_get_pll_rate(priv, id);
380 case cpu_6or4x_clk ... cpu_1x_clk:
381 return zynq_clk_get_cpu_rate(priv, id);
382 case ddr2x_clk:
383 return zynq_clk_get_ddr2x_rate(priv);
384 case ddr3x_clk:
385 return zynq_clk_get_ddr3x_rate(priv);
386 case dci_clk:
387 return zynq_clk_get_dci_rate(priv);
388 case gem0_clk ... gem1_clk:
389 return zynq_clk_get_gem_rate(priv, id);
390 case fclk0_clk ... can1_clk:
391 two_divs = true;
392 /* fall through */
393 case dbg_trc_clk ... dbg_apb_clk:
394 case lqspi_clk ... pcap_clk:
395 case sdio0_clk ... spi1_clk:
396 return zynq_clk_get_peripheral_rate(priv, id, two_divs);
397 case dma_clk:
398 return zynq_clk_get_cpu_rate(priv, cpu_2x_clk);
Michal Simek324212d2018-02-21 15:06:20 +0100399 case usb0_aper_clk ... swdt_clk:
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100400 return zynq_clk_get_cpu_rate(priv, cpu_1x_clk);
401 default:
402 return -ENXIO;
403 }
404}
405
406static ulong zynq_clk_set_rate(struct clk *clk, ulong rate)
407{
408 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
409 enum zynq_clk id = clk->id;
410 bool two_divs = false;
411
412 switch (id) {
413 case gem0_clk ... gem1_clk:
414 return zynq_clk_set_gem_rate(priv, id, rate);
415 case fclk0_clk ... can1_clk:
416 two_divs = true;
417 /* fall through */
418 case lqspi_clk ... pcap_clk:
419 case sdio0_clk ... spi1_clk:
420 case dbg_trc_clk ... dbg_apb_clk:
421 return zynq_clk_set_peripheral_rate(priv, id, rate, two_divs);
422 default:
423 return -ENXIO;
424 }
425}
426#else
427static ulong zynq_clk_get_rate(struct clk *clk)
428{
429 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
430 enum zynq_clk id = clk->id;
431
432 switch (id) {
433 case cpu_6or4x_clk ... cpu_1x_clk:
434 return zynq_clk_get_cpu_rate(priv, id);
435 case ddr3x_clk:
436 return zynq_clk_get_ddr3x_rate(priv);
437 case lqspi_clk ... pcap_clk:
438 case sdio0_clk ... spi1_clk:
439 return zynq_clk_get_peripheral_rate(priv, id, 0);
Hannes Schmelzerc5d98792019-02-14 08:54:42 +0100440 case i2c0_aper_clk ... i2c1_aper_clk:
441 return zynq_clk_get_cpu_rate(priv, cpu_1x_clk);
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100442 default:
443 return -ENXIO;
444 }
445}
446#endif
447
Michal Simek41710952021-02-09 15:28:15 +0100448static int dummy_enable(struct clk *clk)
449{
450 /*
451 * Add implementation but by default all clocks are enabled
452 * after power up which is only one supported case now.
453 */
454 return 0;
455}
456
Igor Prusov1a3427b2023-11-09 13:55:15 +0300457#if IS_ENABLED(CONFIG_CMD_CLK)
Igor Prusovb8704af2023-11-09 13:55:09 +0300458static const char * const clk_names[clk_max] = {
459 "armpll", "ddrpll", "iopll",
460 "cpu_6or4x", "cpu_3or2x", "cpu_2x", "cpu_1x",
461 "ddr2x", "ddr3x", "dci",
462 "lqspi", "smc", "pcap", "gem0", "gem1",
463 "fclk0", "fclk1", "fclk2", "fclk3", "can0", "can1",
464 "sdio0", "sdio1", "uart0", "uart1", "spi0", "spi1", "dma",
465 "usb0_aper", "usb1_aper", "gem0_aper", "gem1_aper",
466 "sdio0_aper", "sdio1_aper", "spi0_aper", "spi1_aper",
467 "can0_aper", "can1_aper", "i2c0_aper", "i2c1_aper",
468 "uart0_aper", "uart1_aper", "gpio_aper", "lqspi_aper",
469 "smc_aper", "swdt", "dbg_trc", "dbg_apb"
470};
471
Igor Prusov1a3427b2023-11-09 13:55:15 +0300472static void zynq_clk_dump(struct udevice *dev)
Igor Prusovb8704af2023-11-09 13:55:09 +0300473{
Igor Prusovb8704af2023-11-09 13:55:09 +0300474 int i, ret;
475
Igor Prusovb8704af2023-11-09 13:55:09 +0300476 printf("clk\t\tfrequency\n");
477 for (i = 0; i < clk_max; i++) {
478 const char *name = clk_names[i];
479
480 if (name) {
481 struct clk clk;
482 unsigned long rate;
483
484 clk.id = i;
485 ret = clk_request(dev, &clk);
Igor Prusov1a3427b2023-11-09 13:55:15 +0300486 if (ret < 0) {
487 printf("%s clk_request() failed: %d\n",
488 __func__, ret);
489 break;
490 }
Igor Prusovb8704af2023-11-09 13:55:09 +0300491
492 rate = clk_get_rate(&clk);
493
Igor Prusovb8704af2023-11-09 13:55:09 +0300494 if ((rate == (unsigned long)-ENOSYS) ||
495 (rate == (unsigned long)-ENXIO))
496 printf("%10s%20s\n", name, "unknown");
497 else
498 printf("%10s%20lu\n", name, rate);
499 }
500 }
Igor Prusovb8704af2023-11-09 13:55:09 +0300501}
Igor Prusov1a3427b2023-11-09 13:55:15 +0300502#endif
Igor Prusovb8704af2023-11-09 13:55:09 +0300503
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100504static struct clk_ops zynq_clk_ops = {
505 .get_rate = zynq_clk_get_rate,
506#ifndef CONFIG_SPL_BUILD
507 .set_rate = zynq_clk_set_rate,
508#endif
Michal Simek41710952021-02-09 15:28:15 +0100509 .enable = dummy_enable,
Igor Prusov1a3427b2023-11-09 13:55:15 +0300510#if IS_ENABLED(CONFIG_CMD_CLK)
511 .dump = zynq_clk_dump,
512#endif
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100513};
514
515static int zynq_clk_probe(struct udevice *dev)
516{
517 struct zynq_clk_priv *priv = dev_get_priv(dev);
Stefan Herbrechtsmeier04f5da92017-01-17 16:27:31 +0100518#ifndef CONFIG_SPL_BUILD
519 unsigned int i;
520 char name[16];
521 int ret;
522
523 for (i = 0; i < 2; i++) {
524 sprintf(name, "gem%d_emio_clk", i);
Sean Andersone1c56f32022-01-15 15:52:47 -0500525 ret = clk_get_by_name_optional(dev, name,
526 &priv->gem_emio_clk[i]);
527 if (ret) {
Stefan Herbrechtsmeier04f5da92017-01-17 16:27:31 +0100528 dev_err(dev, "failed to get %s clock\n", name);
529 return ret;
530 }
531 }
532#endif
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100533
Simon Glass7a494432017-05-17 17:18:09 -0600534 priv->ps_clk_freq = fdtdec_get_uint(gd->fdt_blob, dev_of_offset(dev),
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100535 "ps-clk-frequency", 33333333UL);
536
537 return 0;
538}
539
540static const struct udevice_id zynq_clk_ids[] = {
541 { .compatible = "xlnx,ps7-clkc"},
542 {}
543};
544
545U_BOOT_DRIVER(zynq_clk) = {
546 .name = "zynq_clk",
547 .id = UCLASS_CLK,
548 .of_match = zynq_clk_ids,
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100549 .ops = &zynq_clk_ops,
Simon Glass8a2b47f2020-12-03 16:55:17 -0700550 .priv_auto = sizeof(struct zynq_clk_priv),
Stefan Herbrechtsmeierf1f88c92017-01-17 16:27:29 +0100551 .probe = zynq_clk_probe,
552};