]> git.ipfire.org Git - thirdparty/u-boot.git/blame - arch/arm/cpu/armv7m/cache.c
CONFIG_SPL_SYS_[DI]CACHE_OFF: add
[thirdparty/u-boot.git] / arch / arm / cpu / armv7m / cache.c
CommitLineData
83d290c5 1// SPDX-License-Identifier: GPL-2.0+
bf4d0495 2/*
3bc599c9
PC
3 * Copyright (C) 2017, STMicroelectronics - All Rights Reserved
4 * Author(s): Vikas Manocha, <vikas.manocha@st.com> for STMicroelectronics.
bf4d0495
VM
5 */
6
7#include <common.h>
8#include <errno.h>
9#include <asm/armv7m.h>
10#include <asm/io.h>
11
12/* Cache maintenance operation registers */
13
14#define V7M_CACHE_REG_ICIALLU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x00))
15#define INVAL_ICACHE_POU 0
16#define V7M_CACHE_REG_ICIMVALU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x08))
17#define V7M_CACHE_REG_DCIMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x0C))
18#define V7M_CACHE_REG_DCISW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x10))
19#define V7M_CACHE_REG_DCCMVAU ((u32 *)(V7M_CACHE_MAINT_BASE + 0x14))
20#define V7M_CACHE_REG_DCCMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x18))
21#define V7M_CACHE_REG_DCCSW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x1C))
22#define V7M_CACHE_REG_DCCIMVAC ((u32 *)(V7M_CACHE_MAINT_BASE + 0x20))
23#define V7M_CACHE_REG_DCCISW ((u32 *)(V7M_CACHE_MAINT_BASE + 0x24))
24#define WAYS_SHIFT 30
25#define SETS_SHIFT 5
26
27/* armv7m processor feature registers */
28
29#define V7M_PROC_REG_CLIDR ((u32 *)(V7M_PROC_FTR_BASE + 0x00))
30#define V7M_PROC_REG_CTR ((u32 *)(V7M_PROC_FTR_BASE + 0x04))
31#define V7M_PROC_REG_CCSIDR ((u32 *)(V7M_PROC_FTR_BASE + 0x08))
32#define MASK_NUM_WAYS GENMASK(12, 3)
33#define MASK_NUM_SETS GENMASK(27, 13)
34#define CLINE_SIZE_MASK GENMASK(2, 0)
35#define NUM_WAYS_SHIFT 3
36#define NUM_SETS_SHIFT 13
37#define V7M_PROC_REG_CSSELR ((u32 *)(V7M_PROC_FTR_BASE + 0x0C))
38#define SEL_I_OR_D BIT(0)
39
40enum cache_type {
41 DCACHE,
42 ICACHE,
43};
44
45/* PoU : Point of Unification, Poc: Point of Coherency */
46enum cache_action {
47 INVALIDATE_POU, /* i-cache invalidate by address */
48 INVALIDATE_POC, /* d-cache invalidate by address */
49 INVALIDATE_SET_WAY, /* d-cache invalidate by sets/ways */
50 FLUSH_POU, /* d-cache clean by address to the PoU */
51 FLUSH_POC, /* d-cache clean by address to the PoC */
52 FLUSH_SET_WAY, /* d-cache clean by sets/ways */
53 FLUSH_INVAL_POC, /* d-cache clean & invalidate by addr to PoC */
54 FLUSH_INVAL_SET_WAY, /* d-cache clean & invalidate by set/ways */
55};
56
10015025 57#if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF)
bf4d0495
VM
58struct dcache_config {
59 u32 ways;
60 u32 sets;
61};
62
63static void get_cache_ways_sets(struct dcache_config *cache)
64{
65 u32 cache_size_id = readl(V7M_PROC_REG_CCSIDR);
66
67 cache->ways = (cache_size_id & MASK_NUM_WAYS) >> NUM_WAYS_SHIFT;
68 cache->sets = (cache_size_id & MASK_NUM_SETS) >> NUM_SETS_SHIFT;
69}
70
71/*
72 * Return the io register to perform required cache action like clean or clean
73 * & invalidate by sets/ways.
74 */
75static u32 *get_action_reg_set_ways(enum cache_action action)
76{
77 switch (action) {
78 case INVALIDATE_SET_WAY:
79 return V7M_CACHE_REG_DCISW;
80 case FLUSH_SET_WAY:
81 return V7M_CACHE_REG_DCCSW;
82 case FLUSH_INVAL_SET_WAY:
83 return V7M_CACHE_REG_DCCISW;
84 default:
85 break;
86 };
87
88 return NULL;
89}
90
91/*
92 * Return the io register to perform required cache action like clean or clean
93 * & invalidate by adddress or range.
94 */
95static u32 *get_action_reg_range(enum cache_action action)
96{
97 switch (action) {
98 case INVALIDATE_POU:
99 return V7M_CACHE_REG_ICIMVALU;
100 case INVALIDATE_POC:
101 return V7M_CACHE_REG_DCIMVAC;
102 case FLUSH_POU:
103 return V7M_CACHE_REG_DCCMVAU;
104 case FLUSH_POC:
105 return V7M_CACHE_REG_DCCMVAC;
106 case FLUSH_INVAL_POC:
107 return V7M_CACHE_REG_DCCIMVAC;
108 default:
109 break;
110 }
111
112 return NULL;
113}
114
115static u32 get_cline_size(enum cache_type type)
116{
117 u32 size;
118
119 if (type == DCACHE)
120 clrbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
121 else if (type == ICACHE)
122 setbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
123 /* Make sure cache selection is effective for next memory access */
124 dsb();
125
126 size = readl(V7M_PROC_REG_CCSIDR) & CLINE_SIZE_MASK;
127 /* Size enocoded as 2 less than log(no_of_words_in_cache_line) base 2 */
128 size = 1 << (size + 2);
129 debug("cache line size is %d\n", size);
130
131 return size;
132}
133
134/* Perform the action like invalidate/clean on a range of cache addresses */
135static int action_cache_range(enum cache_action action, u32 start_addr,
136 int64_t size)
137{
138 u32 cline_size;
139 u32 *action_reg;
140 enum cache_type type;
141
142 action_reg = get_action_reg_range(action);
143 if (!action_reg)
144 return -EINVAL;
145 if (action == INVALIDATE_POU)
146 type = ICACHE;
147 else
148 type = DCACHE;
149
150 /* Cache line size is minium size for the cache action */
151 cline_size = get_cline_size(type);
152 /* Align start address to cache line boundary */
153 start_addr &= ~(cline_size - 1);
154 debug("total size for cache action = %llx\n", size);
155 do {
156 writel(start_addr, action_reg);
157 size -= cline_size;
158 start_addr += cline_size;
159 } while (size > cline_size);
160
161 /* Make sure cache action is effective for next memory access */
162 dsb();
163 isb(); /* Make sure instruction stream sees it */
164 debug("cache action on range done\n");
165
166 return 0;
167}
168
169/* Perform the action like invalidate/clean on all cached addresses */
170static int action_dcache_all(enum cache_action action)
171{
172 struct dcache_config cache;
173 u32 *action_reg;
174 int i, j;
175
176 action_reg = get_action_reg_set_ways(action);
177 if (!action_reg)
178 return -EINVAL;
179
180 clrbits_le32(V7M_PROC_REG_CSSELR, BIT(SEL_I_OR_D));
181 /* Make sure cache selection is effective for next memory access */
182 dsb();
183
184 get_cache_ways_sets(&cache); /* Get number of ways & sets */
185 debug("cache: ways= %d, sets= %d\n", cache.ways + 1, cache.sets + 1);
186 for (i = cache.sets; i >= 0; i--) {
187 for (j = cache.ways; j >= 0; j--) {
188 writel((j << WAYS_SHIFT) | (i << SETS_SHIFT),
189 action_reg);
190 }
191 }
192
193 /* Make sure cache action is effective for next memory access */
194 dsb();
195 isb(); /* Make sure instruction stream sees it */
196
197 return 0;
198}
199
200void dcache_enable(void)
201{
202 if (dcache_status()) /* return if cache already enabled */
203 return;
204
205 if (action_dcache_all(INVALIDATE_SET_WAY)) {
206 printf("ERR: D-cache not enabled\n");
207 return;
208 }
209
210 setbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_DCACHE));
211
212 /* Make sure cache action is effective for next memory access */
213 dsb();
214 isb(); /* Make sure instruction stream sees it */
215}
216
217void dcache_disable(void)
218{
219 if (!dcache_status())
220 return;
221
222 /* if dcache is enabled-> dcache disable & then flush */
223 if (action_dcache_all(FLUSH_SET_WAY)) {
224 printf("ERR: D-cache not flushed\n");
225 return;
226 }
227
228 clrbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_DCACHE));
229
230 /* Make sure cache action is effective for next memory access */
231 dsb();
232 isb(); /* Make sure instruction stream sees it */
233}
234
235int dcache_status(void)
236{
237 return (readl(&V7M_SCB->ccr) & BIT(V7M_CCR_DCACHE)) != 0;
238}
239
240void invalidate_dcache_range(unsigned long start, unsigned long stop)
241{
242 if (action_cache_range(INVALIDATE_POC, start, stop - start)) {
243 printf("ERR: D-cache not invalidated\n");
244 return;
245 }
246}
247
248void flush_dcache_range(unsigned long start, unsigned long stop)
249{
250 if (action_cache_range(FLUSH_POC, start, stop - start)) {
251 printf("ERR: D-cache not flushed\n");
252 return;
253 }
254}
a0ee014f
VM
255void flush_dcache_all(void)
256{
257 if (action_dcache_all(FLUSH_SET_WAY)) {
258 printf("ERR: D-cache not flushed\n");
259 return;
260 }
261}
262
263void invalidate_dcache_all(void)
264{
265 if (action_dcache_all(INVALIDATE_SET_WAY)) {
266 printf("ERR: D-cache not invalidated\n");
267 return;
268 }
269}
bf4d0495
VM
270#else
271void dcache_enable(void)
272{
273 return;
274}
275
276void dcache_disable(void)
277{
278 return;
279}
280
281int dcache_status(void)
282{
283 return 0;
284}
a0ee014f
VM
285
286void flush_dcache_all(void)
287{
288}
289
290void invalidate_dcache_all(void)
291{
292}
bf4d0495
VM
293#endif
294
10015025 295#if !CONFIG_IS_ENABLED(SYS_ICACHE_OFF)
bf4d0495
VM
296
297void invalidate_icache_all(void)
298{
299 writel(INVAL_ICACHE_POU, V7M_CACHE_REG_ICIALLU);
300
301 /* Make sure cache action is effective for next memory access */
302 dsb();
303 isb(); /* Make sure instruction stream sees it */
304}
305
306void icache_enable(void)
307{
308 if (icache_status())
309 return;
310
311 invalidate_icache_all();
312 setbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_ICACHE));
313
314 /* Make sure cache action is effective for next memory access */
315 dsb();
316 isb(); /* Make sure instruction stream sees it */
317}
318
319int icache_status(void)
320{
321 return (readl(&V7M_SCB->ccr) & BIT(V7M_CCR_ICACHE)) != 0;
322}
323
324void icache_disable(void)
325{
326 if (!icache_status())
327 return;
328
329 isb(); /* flush pipeline */
330 clrbits_le32(&V7M_SCB->ccr, BIT(V7M_CCR_ICACHE));
331 isb(); /* subsequent instructions fetch see cache disable effect */
332}
333#else
334void icache_enable(void)
335{
336 return;
337}
338
339void icache_disable(void)
340{
341 return;
342}
343
344int icache_status(void)
345{
346 return 0;
347}
348#endif
349
350void enable_caches(void)
351{
10015025 352#if !CONFIG_IS_ENABLED(SYS_ICACHE_OFF)
bf4d0495
VM
353 icache_enable();
354#endif
10015025 355#if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF)
bf4d0495
VM
356 dcache_enable();
357#endif
358}