blob: 8a20743ad84513742e6c521ba19bc7331499e516 [file] [log] [blame]
Lukasz Majewskicd457c42019-06-24 15:50:41 +02001/* SPDX-License-Identifier: GPL-2.0+ */
2/*
3 * Copyright (C) 2019 DENX Software Engineering
4 * Lukasz Majewski, DENX Software Engineering, lukma@denx.de
5 *
6 * Copyright (c) 2010-2011 Jeremy Kerr <jeremy.kerr@canonical.com>
7 * Copyright (C) 2011-2012 Linaro Ltd <mturquette@linaro.org>
8 */
9#ifndef __LINUX_CLK_PROVIDER_H
10#define __LINUX_CLK_PROVIDER_H
Sean Anderson6814a5c2019-12-24 23:56:22 -050011
12#include <dm.h>
13#include <linux/bitops.h>
14#include <linux/err.h>
Peng Fan519eefb2019-07-31 07:01:52 +000015#include <clk-uclass.h>
Simon Glass9bc15642020-02-03 07:36:16 -070016#include <linux/err.h>
Lukasz Majewskicd457c42019-06-24 15:50:41 +020017
Lukasz Majewski4de44bb2019-06-24 15:50:45 +020018static inline void clk_dm(ulong id, struct clk *clk)
19{
20 if (!IS_ERR(clk))
21 clk->id = id;
22}
23
24/*
25 * flags used across common struct clk. these flags should only affect the
26 * top-level framework. custom flags for dealing with hardware specifics
27 * belong in struct clk_foo
28 *
29 * Please update clk_flags[] in drivers/clk/clk.c when making changes here!
30 */
31#define CLK_SET_RATE_GATE BIT(0) /* must be gated across rate change */
32#define CLK_SET_PARENT_GATE BIT(1) /* must be gated across re-parent */
33#define CLK_SET_RATE_PARENT BIT(2) /* propagate rate change up one level */
34#define CLK_IGNORE_UNUSED BIT(3) /* do not gate even if unused */
35 /* unused */
36#define CLK_IS_BASIC BIT(5) /* Basic clk, can't do a to_clk_foo() */
37#define CLK_GET_RATE_NOCACHE BIT(6) /* do not use the cached clk rate */
38#define CLK_SET_RATE_NO_REPARENT BIT(7) /* don't re-parent on rate change */
39#define CLK_GET_ACCURACY_NOCACHE BIT(8) /* do not use the cached clk accuracy */
40#define CLK_RECALC_NEW_RATES BIT(9) /* recalc rates after notifications */
41#define CLK_SET_RATE_UNGATE BIT(10) /* clock needs to run to set rate */
42#define CLK_IS_CRITICAL BIT(11) /* do not gate, ever */
43/* parents need enable during gate/ungate, set rate and re-parent */
44#define CLK_OPS_PARENT_ENABLE BIT(12)
45/* duty cycle call may be forwarded to the parent clock */
46#define CLK_DUTY_CYCLE_PARENT BIT(13)
47
48#define CLK_MUX_INDEX_ONE BIT(0)
49#define CLK_MUX_INDEX_BIT BIT(1)
50#define CLK_MUX_HIWORD_MASK BIT(2)
51#define CLK_MUX_READ_ONLY BIT(3) /* mux can't be changed */
52#define CLK_MUX_ROUND_CLOSEST BIT(4)
53
54struct clk_mux {
55 struct clk clk;
56 void __iomem *reg;
57 u32 *table;
58 u32 mask;
59 u8 shift;
60 u8 flags;
61
62 /*
63 * Fields from struct clk_init_data - this struct has been
64 * omitted to avoid too deep level of CCF for bootloader
65 */
66 const char * const *parent_names;
67 u8 num_parents;
Lukasz Majewski669b7732019-06-24 15:50:49 +020068#if CONFIG_IS_ENABLED(SANDBOX_CLK_CCF)
69 u32 io_mux_val;
70#endif
71
Lukasz Majewski4de44bb2019-06-24 15:50:45 +020072};
73
74#define to_clk_mux(_clk) container_of(_clk, struct clk_mux, clk)
Peng Fan6a8c2ad2019-07-31 07:01:28 +000075extern const struct clk_ops clk_mux_ops;
76u8 clk_mux_get_parent(struct clk *clk);
Lukasz Majewski4de44bb2019-06-24 15:50:45 +020077
Peng Fan0f085152019-07-31 07:01:34 +000078struct clk_gate {
79 struct clk clk;
80 void __iomem *reg;
81 u8 bit_idx;
82 u8 flags;
Peng Fan3b7f3ae2019-07-31 07:01:57 +000083#if CONFIG_IS_ENABLED(SANDBOX_CLK_CCF)
84 u32 io_gate_val;
85#endif
Peng Fan0f085152019-07-31 07:01:34 +000086};
87
88#define to_clk_gate(_clk) container_of(_clk, struct clk_gate, clk)
89
90#define CLK_GATE_SET_TO_DISABLE BIT(0)
91#define CLK_GATE_HIWORD_MASK BIT(1)
92
93extern const struct clk_ops clk_gate_ops;
94struct clk *clk_register_gate(struct device *dev, const char *name,
95 const char *parent_name, unsigned long flags,
96 void __iomem *reg, u8 bit_idx,
97 u8 clk_gate_flags, spinlock_t *lock);
98
Lukasz Majewski4de44bb2019-06-24 15:50:45 +020099struct clk_div_table {
100 unsigned int val;
101 unsigned int div;
102};
103
104struct clk_divider {
105 struct clk clk;
106 void __iomem *reg;
107 u8 shift;
108 u8 width;
109 u8 flags;
110 const struct clk_div_table *table;
Lukasz Majewskibb18f1b2019-06-24 15:50:48 +0200111#if CONFIG_IS_ENABLED(SANDBOX_CLK_CCF)
112 u32 io_divider_val;
113#endif
Lukasz Majewski4de44bb2019-06-24 15:50:45 +0200114};
115
116#define clk_div_mask(width) ((1 << (width)) - 1)
117#define to_clk_divider(_clk) container_of(_clk, struct clk_divider, clk)
118
119#define CLK_DIVIDER_ONE_BASED BIT(0)
120#define CLK_DIVIDER_POWER_OF_TWO BIT(1)
121#define CLK_DIVIDER_ALLOW_ZERO BIT(2)
122#define CLK_DIVIDER_HIWORD_MASK BIT(3)
123#define CLK_DIVIDER_ROUND_CLOSEST BIT(4)
124#define CLK_DIVIDER_READ_ONLY BIT(5)
125#define CLK_DIVIDER_MAX_AT_ZERO BIT(6)
Peng Fan46ed2662019-07-31 07:01:31 +0000126extern const struct clk_ops clk_divider_ops;
127unsigned long divider_recalc_rate(struct clk *hw, unsigned long parent_rate,
128 unsigned int val,
129 const struct clk_div_table *table,
130 unsigned long flags, unsigned long width);
Lukasz Majewski4de44bb2019-06-24 15:50:45 +0200131
132struct clk_fixed_factor {
133 struct clk clk;
134 unsigned int mult;
135 unsigned int div;
136};
137
138#define to_clk_fixed_factor(_clk) container_of(_clk, struct clk_fixed_factor,\
139 clk)
140
Peng Fanec424a72019-07-31 07:01:39 +0000141struct clk_fixed_rate {
142 struct clk clk;
143 unsigned long fixed_rate;
144};
145
146#define to_clk_fixed_rate(dev) ((struct clk_fixed_rate *)dev_get_platdata(dev))
147
Peng Fan2d9bd932019-07-31 07:01:54 +0000148struct clk_composite {
149 struct clk clk;
150 struct clk_ops ops;
151
152 struct clk *mux;
153 struct clk *rate;
154 struct clk *gate;
155
156 const struct clk_ops *mux_ops;
157 const struct clk_ops *rate_ops;
158 const struct clk_ops *gate_ops;
159};
160
161#define to_clk_composite(_clk) container_of(_clk, struct clk_composite, clk)
162
163struct clk *clk_register_composite(struct device *dev, const char *name,
164 const char * const *parent_names, int num_parents,
165 struct clk *mux_clk, const struct clk_ops *mux_ops,
166 struct clk *rate_clk, const struct clk_ops *rate_ops,
167 struct clk *gate_clk, const struct clk_ops *gate_ops,
168 unsigned long flags);
169
Lukasz Majewski4de44bb2019-06-24 15:50:45 +0200170int clk_register(struct clk *clk, const char *drv_name, const char *name,
171 const char *parent_name);
172
173struct clk *clk_register_fixed_factor(struct device *dev, const char *name,
174 const char *parent_name, unsigned long flags,
175 unsigned int mult, unsigned int div);
176
177struct clk *clk_register_divider(struct device *dev, const char *name,
178 const char *parent_name, unsigned long flags,
179 void __iomem *reg, u8 shift, u8 width,
180 u8 clk_divider_flags);
181
182struct clk *clk_register_mux(struct device *dev, const char *name,
183 const char * const *parent_names, u8 num_parents,
184 unsigned long flags,
185 void __iomem *reg, u8 shift, u8 width,
186 u8 clk_mux_flags);
187
188const char *clk_hw_get_name(const struct clk *hw);
189ulong clk_generic_get_rate(struct clk *clk);
190
Lukasz Majewskicd457c42019-06-24 15:50:41 +0200191static inline struct clk *dev_get_clk_ptr(struct udevice *dev)
192{
193 return (struct clk *)dev_get_uclass_priv(dev);
194}
195#endif /* __LINUX_CLK_PROVIDER_H */