Tom Rini | 10e4779 | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0+ |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2012-2014 Panasonic Corporation |
| 4 | * Copyright (C) 2015-2016 Socionext Inc. |
| 5 | * Author: Masahiro Yamada <yamada.masahiro@socionext.com> |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 6 | */ |
| 7 | |
Simon Glass | 1d91ba7 | 2019-11-14 12:57:37 -0700 | [diff] [blame] | 8 | #include <cpu_func.h> |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 9 | #include <linux/io.h> |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 10 | #include <linux/kernel.h> |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 11 | #include <asm/armv7.h> |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 12 | #include <asm/processor.h> |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 13 | |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 14 | #include "cache-uniphier.h" |
Masahiro Yamada | f846a2b | 2016-08-10 16:08:44 +0900 | [diff] [blame] | 15 | |
| 16 | /* control registers */ |
| 17 | #define UNIPHIER_SSCC 0x500c0000 /* Control Register */ |
| 18 | #define UNIPHIER_SSCC_BST (0x1 << 20) /* UCWG burst read */ |
| 19 | #define UNIPHIER_SSCC_ACT (0x1 << 19) /* Inst-Data separate */ |
| 20 | #define UNIPHIER_SSCC_WTG (0x1 << 18) /* WT gathering on */ |
| 21 | #define UNIPHIER_SSCC_PRD (0x1 << 17) /* enable pre-fetch */ |
| 22 | #define UNIPHIER_SSCC_ON (0x1 << 0) /* enable cache */ |
| 23 | #define UNIPHIER_SSCLPDAWCR 0x500c0030 /* Unified/Data Active Way Control */ |
| 24 | #define UNIPHIER_SSCLPIAWCR 0x500c0034 /* Instruction Active Way Control */ |
| 25 | |
| 26 | /* revision registers */ |
| 27 | #define UNIPHIER_SSCID 0x503c0100 /* ID Register */ |
| 28 | |
| 29 | /* operation registers */ |
| 30 | #define UNIPHIER_SSCOPE 0x506c0244 /* Cache Operation Primitive Entry */ |
| 31 | #define UNIPHIER_SSCOPE_CM_INV 0x0 /* invalidate */ |
| 32 | #define UNIPHIER_SSCOPE_CM_CLEAN 0x1 /* clean */ |
| 33 | #define UNIPHIER_SSCOPE_CM_FLUSH 0x2 /* flush */ |
| 34 | #define UNIPHIER_SSCOPE_CM_SYNC 0x8 /* sync (drain bufs) */ |
| 35 | #define UNIPHIER_SSCOPE_CM_FLUSH_PREFETCH 0x9 /* flush p-fetch buf */ |
| 36 | #define UNIPHIER_SSCOQM 0x506c0248 |
| 37 | #define UNIPHIER_SSCOQM_TID_MASK (0x3 << 21) |
| 38 | #define UNIPHIER_SSCOQM_TID_LRU_DATA (0x0 << 21) |
| 39 | #define UNIPHIER_SSCOQM_TID_LRU_INST (0x1 << 21) |
| 40 | #define UNIPHIER_SSCOQM_TID_WAY (0x2 << 21) |
| 41 | #define UNIPHIER_SSCOQM_S_MASK (0x3 << 17) |
| 42 | #define UNIPHIER_SSCOQM_S_RANGE (0x0 << 17) |
| 43 | #define UNIPHIER_SSCOQM_S_ALL (0x1 << 17) |
| 44 | #define UNIPHIER_SSCOQM_S_WAY (0x2 << 17) |
| 45 | #define UNIPHIER_SSCOQM_CE (0x1 << 15) /* notify completion */ |
| 46 | #define UNIPHIER_SSCOQM_CW (0x1 << 14) |
| 47 | #define UNIPHIER_SSCOQM_CM_MASK (0x7) |
| 48 | #define UNIPHIER_SSCOQM_CM_INV 0x0 /* invalidate */ |
| 49 | #define UNIPHIER_SSCOQM_CM_CLEAN 0x1 /* clean */ |
| 50 | #define UNIPHIER_SSCOQM_CM_FLUSH 0x2 /* flush */ |
| 51 | #define UNIPHIER_SSCOQM_CM_PREFETCH 0x3 /* prefetch to cache */ |
| 52 | #define UNIPHIER_SSCOQM_CM_PREFETCH_BUF 0x4 /* prefetch to pf-buf */ |
| 53 | #define UNIPHIER_SSCOQM_CM_TOUCH 0x5 /* touch */ |
| 54 | #define UNIPHIER_SSCOQM_CM_TOUCH_ZERO 0x6 /* touch to zero */ |
| 55 | #define UNIPHIER_SSCOQM_CM_TOUCH_DIRTY 0x7 /* touch with dirty */ |
| 56 | #define UNIPHIER_SSCOQAD 0x506c024c /* Cache Operation Queue Address */ |
| 57 | #define UNIPHIER_SSCOQSZ 0x506c0250 /* Cache Operation Queue Size */ |
| 58 | #define UNIPHIER_SSCOQMASK 0x506c0254 /* Cache Operation Queue Address Mask */ |
| 59 | #define UNIPHIER_SSCOQWN 0x506c0258 /* Cache Operation Queue Way Number */ |
| 60 | #define UNIPHIER_SSCOPPQSEF 0x506c025c /* Cache Operation Queue Set Complete */ |
| 61 | #define UNIPHIER_SSCOPPQSEF_FE (0x1 << 1) |
| 62 | #define UNIPHIER_SSCOPPQSEF_OE (0x1 << 0) |
| 63 | #define UNIPHIER_SSCOLPQS 0x506c0260 /* Cache Operation Queue Status */ |
| 64 | #define UNIPHIER_SSCOLPQS_EF (0x1 << 2) |
| 65 | #define UNIPHIER_SSCOLPQS_EST (0x1 << 1) |
| 66 | #define UNIPHIER_SSCOLPQS_QST (0x1 << 0) |
| 67 | |
| 68 | #define UNIPHIER_SSC_LINE_SIZE 128 |
| 69 | #define UNIPHIER_SSC_RANGE_OP_MAX_SIZE (0x00400000 - (UNIPHIER_SSC_LINE_SIZE)) |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 70 | |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 71 | #define UNIPHIER_SSCOQAD_IS_NEEDED(op) \ |
| 72 | ((op & UNIPHIER_SSCOQM_S_MASK) == UNIPHIER_SSCOQM_S_RANGE) |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 73 | #define UNIPHIER_SSCOQWM_IS_NEEDED(op) \ |
Masahiro Yamada | 983b3d3 | 2016-08-10 16:08:47 +0900 | [diff] [blame] | 74 | (((op & UNIPHIER_SSCOQM_S_MASK) == UNIPHIER_SSCOQM_S_WAY) || \ |
| 75 | ((op & UNIPHIER_SSCOQM_TID_MASK) == UNIPHIER_SSCOQM_TID_WAY)) |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 76 | |
| 77 | /* uniphier_cache_sync - perform a sync point for a particular cache level */ |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 78 | static void uniphier_cache_sync(void) |
| 79 | { |
| 80 | /* drain internal buffers */ |
| 81 | writel(UNIPHIER_SSCOPE_CM_SYNC, UNIPHIER_SSCOPE); |
| 82 | /* need a read back to confirm */ |
| 83 | readl(UNIPHIER_SSCOPE); |
| 84 | } |
| 85 | |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 86 | /** |
| 87 | * uniphier_cache_maint_common - run a queue operation |
| 88 | * |
| 89 | * @start: start address of range operation (don't care for "all" operation) |
| 90 | * @size: data size of range operation (don't care for "all" operation) |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 91 | * @ways: target ways (don't care for operations other than pre-fetch, touch |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 92 | * @operation: flags to specify the desired cache operation |
| 93 | */ |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 94 | static void uniphier_cache_maint_common(u32 start, u32 size, u32 ways, |
| 95 | u32 operation) |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 96 | { |
| 97 | /* clear the complete notification flag */ |
| 98 | writel(UNIPHIER_SSCOLPQS_EF, UNIPHIER_SSCOLPQS); |
| 99 | |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 100 | do { |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 101 | /* set cache operation */ |
| 102 | writel(UNIPHIER_SSCOQM_CE | operation, UNIPHIER_SSCOQM); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 103 | |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 104 | /* set address range if needed */ |
| 105 | if (likely(UNIPHIER_SSCOQAD_IS_NEEDED(operation))) { |
| 106 | writel(start, UNIPHIER_SSCOQAD); |
| 107 | writel(size, UNIPHIER_SSCOQSZ); |
| 108 | } |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 109 | |
| 110 | /* set target ways if needed */ |
| 111 | if (unlikely(UNIPHIER_SSCOQWM_IS_NEEDED(operation))) |
| 112 | writel(ways, UNIPHIER_SSCOQWN); |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 113 | } while (unlikely(readl(UNIPHIER_SSCOPPQSEF) & |
| 114 | (UNIPHIER_SSCOPPQSEF_FE | UNIPHIER_SSCOPPQSEF_OE))); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 115 | |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 116 | /* wait until the operation is completed */ |
| 117 | while (likely(readl(UNIPHIER_SSCOLPQS) != UNIPHIER_SSCOLPQS_EF)) |
| 118 | cpu_relax(); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 119 | } |
| 120 | |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 121 | static void uniphier_cache_maint_all(u32 operation) |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 122 | { |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 123 | uniphier_cache_maint_common(0, 0, 0, UNIPHIER_SSCOQM_S_ALL | operation); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 124 | |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 125 | uniphier_cache_sync(); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 126 | } |
| 127 | |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 128 | static void uniphier_cache_maint_range(u32 start, u32 end, u32 ways, |
| 129 | u32 operation) |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 130 | { |
| 131 | u32 size; |
| 132 | |
| 133 | /* |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 134 | * If the start address is not aligned, |
| 135 | * perform a cache operation for the first cache-line |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 136 | */ |
| 137 | start = start & ~(UNIPHIER_SSC_LINE_SIZE - 1); |
| 138 | |
| 139 | size = end - start; |
| 140 | |
| 141 | if (unlikely(size >= (u32)(-UNIPHIER_SSC_LINE_SIZE))) { |
| 142 | /* this means cache operation for all range */ |
| 143 | uniphier_cache_maint_all(operation); |
| 144 | return; |
| 145 | } |
| 146 | |
| 147 | /* |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 148 | * If the end address is not aligned, |
| 149 | * perform a cache operation for the last cache-line |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 150 | */ |
| 151 | size = ALIGN(size, UNIPHIER_SSC_LINE_SIZE); |
| 152 | |
| 153 | while (size) { |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 154 | u32 chunk_size = min_t(u32, size, UNIPHIER_SSC_RANGE_OP_MAX_SIZE); |
| 155 | |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 156 | uniphier_cache_maint_common(start, chunk_size, ways, |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 157 | UNIPHIER_SSCOQM_S_RANGE | operation); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 158 | |
| 159 | start += chunk_size; |
| 160 | size -= chunk_size; |
| 161 | } |
| 162 | |
| 163 | uniphier_cache_sync(); |
| 164 | } |
| 165 | |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 166 | void uniphier_cache_prefetch_range(u32 start, u32 end, u32 ways) |
| 167 | { |
| 168 | uniphier_cache_maint_range(start, end, ways, |
| 169 | UNIPHIER_SSCOQM_TID_WAY | |
| 170 | UNIPHIER_SSCOQM_CM_PREFETCH); |
| 171 | } |
| 172 | |
| 173 | void uniphier_cache_touch_range(u32 start, u32 end, u32 ways) |
| 174 | { |
| 175 | uniphier_cache_maint_range(start, end, ways, |
| 176 | UNIPHIER_SSCOQM_TID_WAY | |
| 177 | UNIPHIER_SSCOQM_CM_TOUCH); |
| 178 | } |
| 179 | |
| 180 | void uniphier_cache_touch_zero_range(u32 start, u32 end, u32 ways) |
| 181 | { |
| 182 | uniphier_cache_maint_range(start, end, ways, |
| 183 | UNIPHIER_SSCOQM_TID_WAY | |
| 184 | UNIPHIER_SSCOQM_CM_TOUCH_ZERO); |
| 185 | } |
| 186 | |
Masahiro Yamada | 983b3d3 | 2016-08-10 16:08:47 +0900 | [diff] [blame] | 187 | void uniphier_cache_inv_way(u32 ways) |
| 188 | { |
| 189 | uniphier_cache_maint_common(0, 0, ways, |
| 190 | UNIPHIER_SSCOQM_S_WAY | |
| 191 | UNIPHIER_SSCOQM_CM_INV); |
| 192 | } |
| 193 | |
Masahiro Yamada | f98afb6 | 2016-08-10 16:08:48 +0900 | [diff] [blame] | 194 | void uniphier_cache_set_active_ways(int cpu, u32 active_ways) |
| 195 | { |
| 196 | void __iomem *base = (void __iomem *)UNIPHIER_SSCC + 0xc00; |
| 197 | |
| 198 | switch (readl(UNIPHIER_SSCID)) { /* revision */ |
Masahiro Yamada | f98afb6 | 2016-08-10 16:08:48 +0900 | [diff] [blame] | 199 | case 0x12: /* LD4 */ |
| 200 | case 0x16: /* sld8 */ |
| 201 | base = (void __iomem *)UNIPHIER_SSCC + 0x840; |
| 202 | break; |
| 203 | default: |
| 204 | base = (void __iomem *)UNIPHIER_SSCC + 0xc00; |
| 205 | break; |
| 206 | } |
| 207 | |
| 208 | writel(active_ways, base + 4 * cpu); |
| 209 | } |
| 210 | |
Masahiro Yamada | df64901 | 2016-08-10 16:08:42 +0900 | [diff] [blame] | 211 | static void uniphier_cache_endisable(int enable) |
| 212 | { |
| 213 | u32 tmp; |
| 214 | |
| 215 | tmp = readl(UNIPHIER_SSCC); |
| 216 | if (enable) |
| 217 | tmp |= UNIPHIER_SSCC_ON; |
| 218 | else |
| 219 | tmp &= ~UNIPHIER_SSCC_ON; |
| 220 | writel(tmp, UNIPHIER_SSCC); |
| 221 | } |
| 222 | |
| 223 | void uniphier_cache_enable(void) |
| 224 | { |
| 225 | uniphier_cache_endisable(1); |
| 226 | } |
| 227 | |
| 228 | void uniphier_cache_disable(void) |
| 229 | { |
| 230 | uniphier_cache_endisable(0); |
| 231 | } |
| 232 | |
Masahiro Yamada | bf44dde | 2016-08-10 16:08:45 +0900 | [diff] [blame] | 233 | #ifdef CONFIG_CACHE_UNIPHIER |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 234 | void v7_outer_cache_flush_all(void) |
| 235 | { |
| 236 | uniphier_cache_maint_all(UNIPHIER_SSCOQM_CM_FLUSH); |
| 237 | } |
| 238 | |
| 239 | void v7_outer_cache_inval_all(void) |
| 240 | { |
| 241 | uniphier_cache_maint_all(UNIPHIER_SSCOQM_CM_INV); |
| 242 | } |
| 243 | |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 244 | void v7_outer_cache_flush_range(u32 start, u32 end) |
| 245 | { |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 246 | uniphier_cache_maint_range(start, end, 0, UNIPHIER_SSCOQM_CM_FLUSH); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 247 | } |
| 248 | |
| 249 | void v7_outer_cache_inval_range(u32 start, u32 end) |
| 250 | { |
| 251 | if (start & (UNIPHIER_SSC_LINE_SIZE - 1)) { |
| 252 | start &= ~(UNIPHIER_SSC_LINE_SIZE - 1); |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 253 | uniphier_cache_maint_range(start, UNIPHIER_SSC_LINE_SIZE, 0, |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 254 | UNIPHIER_SSCOQM_CM_FLUSH); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 255 | start += UNIPHIER_SSC_LINE_SIZE; |
| 256 | } |
| 257 | |
| 258 | if (start >= end) { |
| 259 | uniphier_cache_sync(); |
| 260 | return; |
| 261 | } |
| 262 | |
| 263 | if (end & (UNIPHIER_SSC_LINE_SIZE - 1)) { |
| 264 | end &= ~(UNIPHIER_SSC_LINE_SIZE - 1); |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 265 | uniphier_cache_maint_range(end, UNIPHIER_SSC_LINE_SIZE, 0, |
Masahiro Yamada | 5ffada9 | 2016-08-10 16:08:36 +0900 | [diff] [blame] | 266 | UNIPHIER_SSCOQM_CM_FLUSH); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 267 | } |
| 268 | |
| 269 | if (start >= end) { |
| 270 | uniphier_cache_sync(); |
| 271 | return; |
| 272 | } |
| 273 | |
Masahiro Yamada | 487b5fe | 2016-08-10 16:08:37 +0900 | [diff] [blame] | 274 | uniphier_cache_maint_range(start, end, 0, UNIPHIER_SSCOQM_CM_INV); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 275 | } |
| 276 | |
| 277 | void v7_outer_cache_enable(void) |
| 278 | { |
Masahiro Yamada | f98afb6 | 2016-08-10 16:08:48 +0900 | [diff] [blame] | 279 | uniphier_cache_set_active_ways(0, U32_MAX); /* activate all ways */ |
Masahiro Yamada | df64901 | 2016-08-10 16:08:42 +0900 | [diff] [blame] | 280 | uniphier_cache_enable(); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 281 | } |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 282 | |
| 283 | void v7_outer_cache_disable(void) |
| 284 | { |
Masahiro Yamada | df64901 | 2016-08-10 16:08:42 +0900 | [diff] [blame] | 285 | uniphier_cache_disable(); |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 286 | } |
Masahiro Yamada | 2c8df19 | 2016-08-10 16:08:38 +0900 | [diff] [blame] | 287 | #endif |
Masahiro Yamada | a7c901f | 2016-07-22 13:38:31 +0900 | [diff] [blame] | 288 | |
| 289 | void enable_caches(void) |
| 290 | { |
| 291 | dcache_enable(); |
| 292 | } |