Tom Rini | 10e4779 | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0+ */ |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 2 | /* |
| 3 | * (C) Copyright 2013 |
| 4 | * David Feng <fenghua@phytium.com.cn> |
| 5 | * |
| 6 | * This file is based on sample code from ARMv8 ARM. |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 7 | */ |
| 8 | |
| 9 | #include <asm-offsets.h> |
| 10 | #include <config.h> |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 11 | #include <asm/macro.h> |
Alexander Graf | e317fe8 | 2016-03-04 01:09:47 +0100 | [diff] [blame] | 12 | #include <asm/system.h> |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 13 | #include <linux/linkage.h> |
| 14 | |
Marc Zyngier | b67855c | 2023-02-09 04:54:27 +0800 | [diff] [blame] | 15 | #ifndef CONFIG_CMO_BY_VA_ONLY |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 16 | /* |
Masahiro Yamada | 4029d8e | 2016-05-17 16:38:08 +0900 | [diff] [blame] | 17 | * void __asm_dcache_level(level) |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 18 | * |
Masahiro Yamada | 4029d8e | 2016-05-17 16:38:08 +0900 | [diff] [blame] | 19 | * flush or invalidate one level cache. |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 20 | * |
| 21 | * x0: cache level |
Masahiro Yamada | a9c3961 | 2016-05-17 16:38:07 +0900 | [diff] [blame] | 22 | * x1: 0 clean & invalidate, 1 invalidate only |
Lukasz Wiecaszek | 4659a6e | 2024-03-10 11:29:58 +0100 | [diff] [blame] | 23 | * x16: FEAT_CCIDX |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 24 | * x2~x9: clobbered |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 25 | */ |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 26 | .pushsection .text.__asm_dcache_level, "ax" |
Masahiro Yamada | 4029d8e | 2016-05-17 16:38:08 +0900 | [diff] [blame] | 27 | ENTRY(__asm_dcache_level) |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 28 | lsl x12, x0, #1 |
| 29 | msr csselr_el1, x12 /* select cache level */ |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 30 | isb /* sync change of cssidr_el1 */ |
| 31 | mrs x6, ccsidr_el1 /* read the new cssidr_el1 */ |
Pierre-Clément Tosi | eba9a0b | 2021-08-27 18:03:45 +0200 | [diff] [blame] | 32 | ubfx x2, x6, #0, #3 /* x2 <- log2(cache line size)-4 */ |
Lukasz Wiecaszek | 4659a6e | 2024-03-10 11:29:58 +0100 | [diff] [blame] | 33 | cbz x16, 3f /* check for FEAT_CCIDX */ |
| 34 | ubfx x3, x6, #3, #21 /* x3 <- number of cache ways - 1 */ |
| 35 | ubfx x4, x6, #32, #24 /* x4 <- number of cache sets - 1 */ |
| 36 | b 4f |
| 37 | 3: |
Pierre-Clément Tosi | eba9a0b | 2021-08-27 18:03:45 +0200 | [diff] [blame] | 38 | ubfx x3, x6, #3, #10 /* x3 <- number of cache ways - 1 */ |
| 39 | ubfx x4, x6, #13, #15 /* x4 <- number of cache sets - 1 */ |
Lukasz Wiecaszek | 4659a6e | 2024-03-10 11:29:58 +0100 | [diff] [blame] | 40 | 4: |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 41 | add x2, x2, #4 /* x2 <- log2(cache line size) */ |
Leo Yan | 9e0d25e | 2014-03-31 09:50:35 +0800 | [diff] [blame] | 42 | clz w5, w3 /* bit position of #ways */ |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 43 | /* x12 <- cache level << 1 */ |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 44 | /* x2 <- line length offset */ |
| 45 | /* x3 <- number of cache ways - 1 */ |
| 46 | /* x4 <- number of cache sets - 1 */ |
| 47 | /* x5 <- bit position of #ways */ |
| 48 | |
| 49 | loop_set: |
| 50 | mov x6, x3 /* x6 <- working copy of #ways */ |
| 51 | loop_way: |
| 52 | lsl x7, x6, x5 |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 53 | orr x9, x12, x7 /* map way and level to cisw value */ |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 54 | lsl x7, x4, x2 |
| 55 | orr x9, x9, x7 /* map set number to cisw value */ |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 56 | tbz w1, #0, 1f |
| 57 | dc isw, x9 |
| 58 | b 2f |
| 59 | 1: dc cisw, x9 /* clean & invalidate by set/way */ |
| 60 | 2: subs x6, x6, #1 /* decrement the way */ |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 61 | b.ge loop_way |
| 62 | subs x4, x4, #1 /* decrement the set */ |
| 63 | b.ge loop_set |
| 64 | |
| 65 | ret |
Masahiro Yamada | 4029d8e | 2016-05-17 16:38:08 +0900 | [diff] [blame] | 66 | ENDPROC(__asm_dcache_level) |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 67 | .popsection |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 68 | |
| 69 | /* |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 70 | * void __asm_flush_dcache_all(int invalidate_only) |
| 71 | * |
Masahiro Yamada | a9c3961 | 2016-05-17 16:38:07 +0900 | [diff] [blame] | 72 | * x0: 0 clean & invalidate, 1 invalidate only |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 73 | * |
Masahiro Yamada | 4029d8e | 2016-05-17 16:38:08 +0900 | [diff] [blame] | 74 | * flush or invalidate all data cache by SET/WAY. |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 75 | */ |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 76 | .pushsection .text.__asm_dcache_all, "ax" |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 77 | ENTRY(__asm_dcache_all) |
| 78 | mov x1, x0 |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 79 | dsb sy |
| 80 | mrs x10, clidr_el1 /* read clidr_el1 */ |
Pierre-Clément Tosi | eba9a0b | 2021-08-27 18:03:45 +0200 | [diff] [blame] | 81 | ubfx x11, x10, #24, #3 /* x11 <- loc */ |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 82 | cbz x11, finished /* if loc is 0, exit */ |
| 83 | mov x15, lr |
Lukasz Wiecaszek | 4659a6e | 2024-03-10 11:29:58 +0100 | [diff] [blame] | 84 | mrs x16, s3_0_c0_c7_2 /* read value of id_aa64mmfr2_el1*/ |
| 85 | ubfx x16, x16, #20, #4 /* save FEAT_CCIDX identifier in x16 */ |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 86 | mov x0, #0 /* start flush at cache level 0 */ |
| 87 | /* x0 <- cache level */ |
| 88 | /* x10 <- clidr_el1 */ |
| 89 | /* x11 <- loc */ |
| 90 | /* x15 <- return address */ |
| 91 | |
| 92 | loop_level: |
Pierre-Clément Tosi | 1ffdcef | 2021-08-27 18:04:10 +0200 | [diff] [blame] | 93 | add x12, x0, x0, lsl #1 /* x12 <- tripled cache level */ |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 94 | lsr x12, x10, x12 |
| 95 | and x12, x12, #7 /* x12 <- cache type */ |
| 96 | cmp x12, #2 |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 97 | b.lt skip /* skip if no cache or icache */ |
Masahiro Yamada | 4029d8e | 2016-05-17 16:38:08 +0900 | [diff] [blame] | 98 | bl __asm_dcache_level /* x1 = 0 flush, 1 invalidate */ |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 99 | skip: |
| 100 | add x0, x0, #1 /* increment cache level */ |
| 101 | cmp x11, x0 |
| 102 | b.gt loop_level |
| 103 | |
| 104 | mov x0, #0 |
Michal Simek | 5dc7d12 | 2015-01-14 15:36:35 +0100 | [diff] [blame] | 105 | msr csselr_el1, x0 /* restore csselr_el1 */ |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 106 | dsb sy |
| 107 | isb |
| 108 | mov lr, x15 |
| 109 | |
| 110 | finished: |
| 111 | ret |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 112 | ENDPROC(__asm_dcache_all) |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 113 | .popsection |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 114 | |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 115 | .pushsection .text.__asm_flush_dcache_all, "ax" |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 116 | ENTRY(__asm_flush_dcache_all) |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 117 | mov x0, #0 |
Masahiro Yamada | d094db4 | 2016-05-17 16:38:06 +0900 | [diff] [blame] | 118 | b __asm_dcache_all |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 119 | ENDPROC(__asm_flush_dcache_all) |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 120 | .popsection |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 121 | |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 122 | .pushsection .text.__asm_invalidate_dcache_all, "ax" |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 123 | ENTRY(__asm_invalidate_dcache_all) |
Peng Fan | d006266 | 2015-08-06 17:54:13 +0800 | [diff] [blame] | 124 | mov x0, #0x1 |
Masahiro Yamada | d094db4 | 2016-05-17 16:38:06 +0900 | [diff] [blame] | 125 | b __asm_dcache_all |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 126 | ENDPROC(__asm_invalidate_dcache_all) |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 127 | .popsection |
York Sun | ef04201 | 2014-02-26 13:26:04 -0800 | [diff] [blame] | 128 | |
Marc Zyngier | b67855c | 2023-02-09 04:54:27 +0800 | [diff] [blame] | 129 | .pushsection .text.__asm_flush_l3_dcache, "ax" |
| 130 | WEAK(__asm_flush_l3_dcache) |
| 131 | mov x0, #0 /* return status as success */ |
| 132 | ret |
| 133 | ENDPROC(__asm_flush_l3_dcache) |
| 134 | .popsection |
| 135 | |
| 136 | .pushsection .text.__asm_invalidate_l3_icache, "ax" |
| 137 | WEAK(__asm_invalidate_l3_icache) |
| 138 | mov x0, #0 /* return status as success */ |
| 139 | ret |
| 140 | ENDPROC(__asm_invalidate_l3_icache) |
| 141 | .popsection |
| 142 | |
| 143 | #else /* CONFIG_CMO_BY_VA */ |
| 144 | |
| 145 | /* |
| 146 | * Define these so that they actively clash with in implementation |
| 147 | * accidentally selecting CONFIG_CMO_BY_VA |
| 148 | */ |
| 149 | |
| 150 | .pushsection .text.__asm_invalidate_l3_icache, "ax" |
| 151 | ENTRY(__asm_invalidate_l3_icache) |
| 152 | mov x0, xzr |
| 153 | ret |
| 154 | ENDPROC(__asm_invalidate_l3_icache) |
| 155 | .popsection |
| 156 | .pushsection .text.__asm_flush_l3_dcache, "ax" |
| 157 | ENTRY(__asm_flush_l3_dcache) |
| 158 | mov x0, xzr |
| 159 | ret |
| 160 | ENDPROC(__asm_flush_l3_dcache) |
| 161 | .popsection |
| 162 | #endif /* CONFIG_CMO_BY_VA */ |
| 163 | |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 164 | /* |
| 165 | * void __asm_flush_dcache_range(start, end) |
| 166 | * |
| 167 | * clean & invalidate data cache in the range |
| 168 | * |
| 169 | * x0: start address |
| 170 | * x1: end address |
| 171 | */ |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 172 | .pushsection .text.__asm_flush_dcache_range, "ax" |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 173 | ENTRY(__asm_flush_dcache_range) |
| 174 | mrs x3, ctr_el0 |
Pierre-Clément Tosi | eba9a0b | 2021-08-27 18:03:45 +0200 | [diff] [blame] | 175 | ubfx x3, x3, #16, #4 |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 176 | mov x2, #4 |
| 177 | lsl x2, x2, x3 /* cache line size */ |
| 178 | |
| 179 | /* x2 <- minimal cache line size in cache system */ |
| 180 | sub x3, x2, #1 |
| 181 | bic x0, x0, x3 |
| 182 | 1: dc civac, x0 /* clean & invalidate data or unified cache */ |
| 183 | add x0, x0, x2 |
| 184 | cmp x0, x1 |
| 185 | b.lo 1b |
| 186 | dsb sy |
| 187 | ret |
| 188 | ENDPROC(__asm_flush_dcache_range) |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 189 | .popsection |
Simon Glass | 4415c3b | 2017-04-05 17:53:18 -0600 | [diff] [blame] | 190 | /* |
| 191 | * void __asm_invalidate_dcache_range(start, end) |
| 192 | * |
| 193 | * invalidate data cache in the range |
| 194 | * |
| 195 | * x0: start address |
| 196 | * x1: end address |
| 197 | */ |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 198 | .pushsection .text.__asm_invalidate_dcache_range, "ax" |
Simon Glass | 4415c3b | 2017-04-05 17:53:18 -0600 | [diff] [blame] | 199 | ENTRY(__asm_invalidate_dcache_range) |
| 200 | mrs x3, ctr_el0 |
Pierre-Clément Tosi | eba9a0b | 2021-08-27 18:03:45 +0200 | [diff] [blame] | 201 | ubfx x3, x3, #16, #4 |
Simon Glass | 4415c3b | 2017-04-05 17:53:18 -0600 | [diff] [blame] | 202 | mov x2, #4 |
| 203 | lsl x2, x2, x3 /* cache line size */ |
| 204 | |
| 205 | /* x2 <- minimal cache line size in cache system */ |
| 206 | sub x3, x2, #1 |
| 207 | bic x0, x0, x3 |
| 208 | 1: dc ivac, x0 /* invalidate data or unified cache */ |
| 209 | add x0, x0, x2 |
| 210 | cmp x0, x1 |
| 211 | b.lo 1b |
| 212 | dsb sy |
| 213 | ret |
| 214 | ENDPROC(__asm_invalidate_dcache_range) |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 215 | .popsection |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 216 | |
| 217 | /* |
| 218 | * void __asm_invalidate_icache_all(void) |
| 219 | * |
| 220 | * invalidate all tlb entries. |
| 221 | */ |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 222 | .pushsection .text.__asm_invalidate_icache_all, "ax" |
David Feng | 85fd5f1 | 2013-12-14 11:47:35 +0800 | [diff] [blame] | 223 | ENTRY(__asm_invalidate_icache_all) |
| 224 | ic ialluis |
| 225 | isb sy |
| 226 | ret |
| 227 | ENDPROC(__asm_invalidate_icache_all) |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 228 | .popsection |
York Sun | 1ce575f | 2015-01-06 13:18:42 -0800 | [diff] [blame] | 229 | |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 230 | .pushsection .text.__asm_invalidate_l3_dcache, "ax" |
Tom Rini | 7d1a666 | 2021-06-29 19:33:04 -0400 | [diff] [blame] | 231 | WEAK(__asm_invalidate_l3_dcache) |
York Sun | 1ce575f | 2015-01-06 13:18:42 -0800 | [diff] [blame] | 232 | mov x0, #0 /* return status as success */ |
| 233 | ret |
Stephen Warren | ddb0f63 | 2016-10-19 15:18:46 -0600 | [diff] [blame] | 234 | ENDPROC(__asm_invalidate_l3_dcache) |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 235 | .popsection |
Stephen Warren | ddb0f63 | 2016-10-19 15:18:46 -0600 | [diff] [blame] | 236 | |
Alexander Graf | e317fe8 | 2016-03-04 01:09:47 +0100 | [diff] [blame] | 237 | /* |
| 238 | * void __asm_switch_ttbr(ulong new_ttbr) |
| 239 | * |
| 240 | * Safely switches to a new page table. |
| 241 | */ |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 242 | .pushsection .text.__asm_switch_ttbr, "ax" |
Alexander Graf | e317fe8 | 2016-03-04 01:09:47 +0100 | [diff] [blame] | 243 | ENTRY(__asm_switch_ttbr) |
| 244 | /* x2 = SCTLR (alive throghout the function) */ |
| 245 | switch_el x4, 3f, 2f, 1f |
| 246 | 3: mrs x2, sctlr_el3 |
| 247 | b 0f |
| 248 | 2: mrs x2, sctlr_el2 |
| 249 | b 0f |
| 250 | 1: mrs x2, sctlr_el1 |
| 251 | 0: |
| 252 | |
| 253 | /* Unset CR_M | CR_C | CR_I from SCTLR to disable all caches */ |
| 254 | movn x1, #(CR_M | CR_C | CR_I) |
| 255 | and x1, x2, x1 |
| 256 | switch_el x4, 3f, 2f, 1f |
| 257 | 3: msr sctlr_el3, x1 |
| 258 | b 0f |
| 259 | 2: msr sctlr_el2, x1 |
| 260 | b 0f |
| 261 | 1: msr sctlr_el1, x1 |
| 262 | 0: isb |
| 263 | |
| 264 | /* This call only clobbers x30 (lr) and x9 (unused) */ |
| 265 | mov x3, x30 |
| 266 | bl __asm_invalidate_tlb_all |
| 267 | |
| 268 | /* From here on we're running safely with caches disabled */ |
| 269 | |
| 270 | /* Set TTBR to our first argument */ |
| 271 | switch_el x4, 3f, 2f, 1f |
| 272 | 3: msr ttbr0_el3, x0 |
| 273 | b 0f |
| 274 | 2: msr ttbr0_el2, x0 |
| 275 | b 0f |
| 276 | 1: msr ttbr0_el1, x0 |
| 277 | 0: isb |
| 278 | |
| 279 | /* Restore original SCTLR and thus enable caches again */ |
| 280 | switch_el x4, 3f, 2f, 1f |
| 281 | 3: msr sctlr_el3, x2 |
| 282 | b 0f |
| 283 | 2: msr sctlr_el2, x2 |
| 284 | b 0f |
| 285 | 1: msr sctlr_el1, x2 |
| 286 | 0: isb |
| 287 | |
| 288 | ret x3 |
| 289 | ENDPROC(__asm_switch_ttbr) |
Philipp Tomsich | d15592f | 2017-07-04 10:04:54 +0200 | [diff] [blame] | 290 | .popsection |