]> git.ipfire.org Git - thirdparty/u-boot.git/blob - arch/arm/cpu/armv7/cache_v7.c
common: Drop net.h from common header
[thirdparty/u-boot.git] / arch / arm / cpu / armv7 / cache_v7.c
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * (C) Copyright 2010
4 * Texas Instruments, <www.ti.com>
5 * Aneesh V <aneesh@ti.com>
6 */
7 #include <cpu_func.h>
8 #include <asm/cache.h>
9 #include <linux/types.h>
10 #include <common.h>
11 #include <asm/armv7.h>
12 #include <asm/utils.h>
13
14 #define ARMV7_DCACHE_INVAL_RANGE 1
15 #define ARMV7_DCACHE_CLEAN_INVAL_RANGE 2
16
17 #if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF)
18
19 /* Asm functions from cache_v7_asm.S */
20 void v7_flush_dcache_all(void);
21 void v7_invalidate_dcache_all(void);
22
23 static u32 get_ccsidr(void)
24 {
25 u32 ccsidr;
26
27 /* Read current CP15 Cache Size ID Register */
28 asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
29 return ccsidr;
30 }
31
32 static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len)
33 {
34 u32 mva;
35
36 /* Align start to cache line boundary */
37 start &= ~(line_len - 1);
38 for (mva = start; mva < stop; mva = mva + line_len) {
39 /* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */
40 asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
41 }
42 }
43
44 static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
45 {
46 u32 mva;
47
48 if (!check_cache_range(start, stop))
49 return;
50
51 for (mva = start; mva < stop; mva = mva + line_len) {
52 /* DCIMVAC - Invalidate data cache by MVA to PoC */
53 asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
54 }
55 }
56
57 static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
58 {
59 u32 line_len, ccsidr;
60
61 ccsidr = get_ccsidr();
62 line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
63 CCSIDR_LINE_SIZE_OFFSET) + 2;
64 /* Converting from words to bytes */
65 line_len += 2;
66 /* converting from log2(linelen) to linelen */
67 line_len = 1 << line_len;
68
69 switch (range_op) {
70 case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
71 v7_dcache_clean_inval_range(start, stop, line_len);
72 break;
73 case ARMV7_DCACHE_INVAL_RANGE:
74 v7_dcache_inval_range(start, stop, line_len);
75 break;
76 }
77
78 /* DSB to make sure the operation is complete */
79 dsb();
80 }
81
82 /* Invalidate TLB */
83 static void v7_inval_tlb(void)
84 {
85 /* Invalidate entire unified TLB */
86 asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
87 /* Invalidate entire data TLB */
88 asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
89 /* Invalidate entire instruction TLB */
90 asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
91 /* Full system DSB - make sure that the invalidation is complete */
92 dsb();
93 /* Full system ISB - make sure the instruction stream sees it */
94 isb();
95 }
96
97 void invalidate_dcache_all(void)
98 {
99 v7_invalidate_dcache_all();
100
101 v7_outer_cache_inval_all();
102 }
103
104 /*
105 * Performs a clean & invalidation of the entire data cache
106 * at all levels
107 */
108 void flush_dcache_all(void)
109 {
110 v7_flush_dcache_all();
111
112 v7_outer_cache_flush_all();
113 }
114
115 /*
116 * Invalidates range in all levels of D-cache/unified cache used:
117 * Affects the range [start, stop - 1]
118 */
119 void invalidate_dcache_range(unsigned long start, unsigned long stop)
120 {
121 check_cache_range(start, stop);
122
123 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
124
125 v7_outer_cache_inval_range(start, stop);
126 }
127
128 /*
129 * Flush range(clean & invalidate) from all levels of D-cache/unified
130 * cache used:
131 * Affects the range [start, stop - 1]
132 */
133 void flush_dcache_range(unsigned long start, unsigned long stop)
134 {
135 check_cache_range(start, stop);
136
137 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
138
139 v7_outer_cache_flush_range(start, stop);
140 }
141
142 void arm_init_before_mmu(void)
143 {
144 v7_outer_cache_enable();
145 invalidate_dcache_all();
146 v7_inval_tlb();
147 }
148
149 void mmu_page_table_flush(unsigned long start, unsigned long stop)
150 {
151 flush_dcache_range(start, stop);
152 v7_inval_tlb();
153 }
154 #else /* #if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF) */
155 void invalidate_dcache_all(void)
156 {
157 }
158
159 void flush_dcache_all(void)
160 {
161 }
162
163 void invalidate_dcache_range(unsigned long start, unsigned long stop)
164 {
165 }
166
167 void flush_dcache_range(unsigned long start, unsigned long stop)
168 {
169 }
170
171 void arm_init_before_mmu(void)
172 {
173 }
174
175 void mmu_page_table_flush(unsigned long start, unsigned long stop)
176 {
177 }
178
179 void arm_init_domains(void)
180 {
181 }
182 #endif /* #if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF) */
183
184 #if !CONFIG_IS_ENABLED(SYS_ICACHE_OFF)
185 /* Invalidate entire I-cache and branch predictor array */
186 void invalidate_icache_all(void)
187 {
188 /*
189 * Invalidate all instruction caches to PoU.
190 * Also flushes branch target cache.
191 */
192 asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
193
194 /* Invalidate entire branch predictor array */
195 asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
196
197 /* Full system DSB - make sure that the invalidation is complete */
198 dsb();
199
200 /* ISB - make sure the instruction stream sees it */
201 isb();
202 }
203 #else
204 void invalidate_icache_all(void)
205 {
206 }
207 #endif
208
209 /* Stub implementations for outer cache operations */
210 __weak void v7_outer_cache_enable(void) {}
211 __weak void v7_outer_cache_disable(void) {}
212 __weak void v7_outer_cache_flush_all(void) {}
213 __weak void v7_outer_cache_inval_all(void) {}
214 __weak void v7_outer_cache_flush_range(u32 start, u32 end) {}
215 __weak void v7_outer_cache_inval_range(u32 start, u32 end) {}