blob: d11420d2fdd02a4934340523148d4c5a93c45e95 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Aneesh V960f5c02011-06-16 23:30:47 +00002/*
3 * (C) Copyright 2010
4 * Texas Instruments, <www.ti.com>
5 * Aneesh V <aneesh@ti.com>
Aneesh V960f5c02011-06-16 23:30:47 +00006 */
Simon Glass63334482019-11-14 12:57:39 -07007#include <cpu_func.h>
Simon Glass274e0b02020-05-10 11:39:56 -06008#include <asm/cache.h>
Aneesh V960f5c02011-06-16 23:30:47 +00009#include <linux/types.h>
Aneesh V960f5c02011-06-16 23:30:47 +000010#include <asm/armv7.h>
11#include <asm/utils.h>
12
Hans de Goedeba3bf9b2016-04-09 13:53:49 +020013#define ARMV7_DCACHE_INVAL_RANGE 1
14#define ARMV7_DCACHE_CLEAN_INVAL_RANGE 2
Aneesh V960f5c02011-06-16 23:30:47 +000015
Trevor Woerner43ec7e02019-05-03 09:41:00 -040016#if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF)
Hans de Goede076e8412016-04-09 13:53:48 +020017
18/* Asm functions from cache_v7_asm.S */
19void v7_flush_dcache_all(void);
Hans de Goedeba3bf9b2016-04-09 13:53:49 +020020void v7_invalidate_dcache_all(void);
Hans de Goede076e8412016-04-09 13:53:48 +020021
Aneesh V960f5c02011-06-16 23:30:47 +000022static u32 get_ccsidr(void)
23{
24 u32 ccsidr;
25
26 /* Read current CP15 Cache Size ID Register */
27 asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
28 return ccsidr;
29}
30
Thierry Reding0c597382014-08-26 17:34:20 +020031static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len)
Aneesh V960f5c02011-06-16 23:30:47 +000032{
33 u32 mva;
34
35 /* Align start to cache line boundary */
36 start &= ~(line_len - 1);
37 for (mva = start; mva < stop; mva = mva + line_len) {
38 /* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */
39 asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
40 }
41}
42
43static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
44{
45 u32 mva;
46
Simon Glassb7ba9572016-06-19 19:43:02 -060047 if (!check_cache_range(start, stop))
48 return;
Aneesh V960f5c02011-06-16 23:30:47 +000049
50 for (mva = start; mva < stop; mva = mva + line_len) {
51 /* DCIMVAC - Invalidate data cache by MVA to PoC */
52 asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
53 }
54}
55
56static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
57{
58 u32 line_len, ccsidr;
59
60 ccsidr = get_ccsidr();
61 line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
62 CCSIDR_LINE_SIZE_OFFSET) + 2;
63 /* Converting from words to bytes */
64 line_len += 2;
65 /* converting from log2(linelen) to linelen */
66 line_len = 1 << line_len;
67
68 switch (range_op) {
69 case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
70 v7_dcache_clean_inval_range(start, stop, line_len);
71 break;
72 case ARMV7_DCACHE_INVAL_RANGE:
73 v7_dcache_inval_range(start, stop, line_len);
74 break;
75 }
76
Aneesh V517912b2011-08-11 04:35:44 +000077 /* DSB to make sure the operation is complete */
Tom Rini3b787ef2016-08-01 18:54:53 -040078 dsb();
Aneesh V960f5c02011-06-16 23:30:47 +000079}
80
81/* Invalidate TLB */
82static void v7_inval_tlb(void)
83{
84 /* Invalidate entire unified TLB */
85 asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
86 /* Invalidate entire data TLB */
87 asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
88 /* Invalidate entire instruction TLB */
89 asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
90 /* Full system DSB - make sure that the invalidation is complete */
Tom Rini3b787ef2016-08-01 18:54:53 -040091 dsb();
Aneesh V960f5c02011-06-16 23:30:47 +000092 /* Full system ISB - make sure the instruction stream sees it */
Tom Rini3b787ef2016-08-01 18:54:53 -040093 isb();
Aneesh V960f5c02011-06-16 23:30:47 +000094}
95
96void invalidate_dcache_all(void)
97{
Hans de Goedeba3bf9b2016-04-09 13:53:49 +020098 v7_invalidate_dcache_all();
Aneesh V960f5c02011-06-16 23:30:47 +000099
100 v7_outer_cache_inval_all();
101}
102
103/*
104 * Performs a clean & invalidation of the entire data cache
105 * at all levels
106 */
107void flush_dcache_all(void)
108{
Hans de Goede076e8412016-04-09 13:53:48 +0200109 v7_flush_dcache_all();
Aneesh V960f5c02011-06-16 23:30:47 +0000110
111 v7_outer_cache_flush_all();
112}
113
114/*
115 * Invalidates range in all levels of D-cache/unified cache used:
116 * Affects the range [start, stop - 1]
117 */
118void invalidate_dcache_range(unsigned long start, unsigned long stop)
119{
Marek Vasutc28ad232015-07-27 22:34:17 +0200120 check_cache_range(start, stop);
121
Aneesh V960f5c02011-06-16 23:30:47 +0000122 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
123
124 v7_outer_cache_inval_range(start, stop);
125}
126
127/*
128 * Flush range(clean & invalidate) from all levels of D-cache/unified
129 * cache used:
130 * Affects the range [start, stop - 1]
131 */
132void flush_dcache_range(unsigned long start, unsigned long stop)
133{
Marek Vasutc28ad232015-07-27 22:34:17 +0200134 check_cache_range(start, stop);
135
Aneesh V960f5c02011-06-16 23:30:47 +0000136 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
137
138 v7_outer_cache_flush_range(start, stop);
139}
140
141void arm_init_before_mmu(void)
142{
143 v7_outer_cache_enable();
144 invalidate_dcache_all();
145 v7_inval_tlb();
146}
147
Simon Glassa4f20792012-10-17 13:24:53 +0000148void mmu_page_table_flush(unsigned long start, unsigned long stop)
149{
150 flush_dcache_range(start, stop);
151 v7_inval_tlb();
152}
Trevor Woerner43ec7e02019-05-03 09:41:00 -0400153#else /* #if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF) */
Aneesh V960f5c02011-06-16 23:30:47 +0000154void invalidate_dcache_all(void)
155{
156}
157
158void flush_dcache_all(void)
159{
160}
161
Daniel Allred4631f7d2016-06-27 09:19:16 -0500162void invalidate_dcache_range(unsigned long start, unsigned long stop)
163{
164}
165
166void flush_dcache_range(unsigned long start, unsigned long stop)
167{
168}
169
Aneesh V960f5c02011-06-16 23:30:47 +0000170void arm_init_before_mmu(void)
171{
172}
173
Simon Glassa4f20792012-10-17 13:24:53 +0000174void mmu_page_table_flush(unsigned long start, unsigned long stop)
175{
176}
177
Trevor Woerner43ec7e02019-05-03 09:41:00 -0400178#endif /* #if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF) */
Aneesh V960f5c02011-06-16 23:30:47 +0000179
Trevor Woerner43ec7e02019-05-03 09:41:00 -0400180#if !CONFIG_IS_ENABLED(SYS_ICACHE_OFF)
Aneesh V960f5c02011-06-16 23:30:47 +0000181/* Invalidate entire I-cache and branch predictor array */
182void invalidate_icache_all(void)
183{
184 /*
185 * Invalidate all instruction caches to PoU.
186 * Also flushes branch target cache.
187 */
188 asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
189
190 /* Invalidate entire branch predictor array */
191 asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
192
193 /* Full system DSB - make sure that the invalidation is complete */
Tom Rini3b787ef2016-08-01 18:54:53 -0400194 dsb();
Aneesh V960f5c02011-06-16 23:30:47 +0000195
196 /* ISB - make sure the instruction stream sees it */
Tom Rini3b787ef2016-08-01 18:54:53 -0400197 isb();
Aneesh V960f5c02011-06-16 23:30:47 +0000198}
199#else
200void invalidate_icache_all(void)
201{
202}
203#endif
204
Jeroen Hofsteed7460772014-06-23 22:07:04 +0200205/* Stub implementations for outer cache operations */
206__weak void v7_outer_cache_enable(void) {}
207__weak void v7_outer_cache_disable(void) {}
208__weak void v7_outer_cache_flush_all(void) {}
209__weak void v7_outer_cache_inval_all(void) {}
210__weak void v7_outer_cache_flush_range(u32 start, u32 end) {}
211__weak void v7_outer_cache_inval_range(u32 start, u32 end) {}