]> git.ipfire.org Git - thirdparty/u-boot.git/blob - arch/nds32/lib/cache.c
SPDX: Convert all of our single license tags to Linux Kernel style
[thirdparty/u-boot.git] / arch / nds32 / lib / cache.c
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * Copyright (C) 2012 Andes Technology Corporation
4 * Shawn Lin, Andes Technology Corporation <nobuhiro@andestech.com>
5 * Macpaul Lin, Andes Technology Corporation <macpaul@andestech.com>
6 */
7
8 #include <common.h>
9 #if (!defined(CONFIG_SYS_ICACHE_OFF) || !defined(CONFIG_SYS_DCACHE_OFF))
10 static inline unsigned long CACHE_SET(unsigned char cache)
11 {
12 if (cache == ICACHE)
13 return 64 << ((GET_ICM_CFG() & ICM_CFG_MSK_ISET) \
14 >> ICM_CFG_OFF_ISET);
15 else
16 return 64 << ((GET_DCM_CFG() & DCM_CFG_MSK_DSET) \
17 >> DCM_CFG_OFF_DSET);
18 }
19
20 static inline unsigned long CACHE_WAY(unsigned char cache)
21 {
22 if (cache == ICACHE)
23 return 1 + ((GET_ICM_CFG() & ICM_CFG_MSK_IWAY) \
24 >> ICM_CFG_OFF_IWAY);
25 else
26 return 1 + ((GET_DCM_CFG() & DCM_CFG_MSK_DWAY) \
27 >> DCM_CFG_OFF_DWAY);
28 }
29
30 static inline unsigned long CACHE_LINE_SIZE(enum cache_t cache)
31 {
32 if (cache == ICACHE)
33 return 8 << (((GET_ICM_CFG() & ICM_CFG_MSK_ISZ) \
34 >> ICM_CFG_OFF_ISZ) - 1);
35 else
36 return 8 << (((GET_DCM_CFG() & DCM_CFG_MSK_DSZ) \
37 >> DCM_CFG_OFF_DSZ) - 1);
38 }
39 #endif
40
41 #ifndef CONFIG_SYS_ICACHE_OFF
42 void invalidate_icache_all(void)
43 {
44 unsigned long end, line_size;
45 line_size = CACHE_LINE_SIZE(ICACHE);
46 end = line_size * CACHE_WAY(ICACHE) * CACHE_SET(ICACHE);
47 do {
48 end -= line_size;
49 __asm__ volatile ("\n\tcctl %0, L1I_IX_INVAL" : : "r" (end));
50
51 end -= line_size;
52 __asm__ volatile ("\n\tcctl %0, L1I_IX_INVAL" : : "r" (end));
53
54 end -= line_size;
55 __asm__ volatile ("\n\tcctl %0, L1I_IX_INVAL" : : "r" (end));
56 end -= line_size;
57 __asm__ volatile ("\n\tcctl %0, L1I_IX_INVAL" : : "r" (end));
58 } while (end > 0);
59 }
60
61 void invalidate_icache_range(unsigned long start, unsigned long end)
62 {
63 unsigned long line_size;
64
65 line_size = CACHE_LINE_SIZE(ICACHE);
66 while (end > start) {
67 asm volatile (
68 "\n\tcctl %0, L1I_VA_INVAL"
69 :
70 : "r"(start)
71 );
72 start += line_size;
73 }
74 }
75
76 void icache_enable(void)
77 {
78 asm volatile (
79 "mfsr $p0, $mr8\n\t"
80 "ori $p0, $p0, 0x01\n\t"
81 "mtsr $p0, $mr8\n\t"
82 "isb\n\t"
83 );
84 }
85
86 void icache_disable(void)
87 {
88 asm volatile (
89 "mfsr $p0, $mr8\n\t"
90 "li $p1, ~0x01\n\t"
91 "and $p0, $p0, $p1\n\t"
92 "mtsr $p0, $mr8\n\t"
93 "isb\n\t"
94 );
95 }
96
97 int icache_status(void)
98 {
99 int ret;
100
101 asm volatile (
102 "mfsr $p0, $mr8\n\t"
103 "andi %0, $p0, 0x01\n\t"
104 : "=r" (ret)
105 :
106 : "memory"
107 );
108
109 return ret;
110 }
111
112 #else
113 void invalidate_icache_all(void)
114 {
115 }
116
117 void invalidate_icache_range(unsigned long start, unsigned long end)
118 {
119 }
120
121 void icache_enable(void)
122 {
123 }
124
125 void icache_disable(void)
126 {
127 }
128
129 int icache_status(void)
130 {
131 return 0;
132 }
133
134 #endif
135
136 #ifndef CONFIG_SYS_DCACHE_OFF
137 void dcache_wbinval_all(void)
138 {
139 unsigned long end, line_size;
140 line_size = CACHE_LINE_SIZE(DCACHE);
141 end = line_size * CACHE_WAY(DCACHE) * CACHE_SET(DCACHE);
142 do {
143 end -= line_size;
144 __asm__ volatile ("\n\tcctl %0, L1D_IX_WB" : : "r" (end));
145 __asm__ volatile ("\n\tcctl %0, L1D_IX_INVAL" : : "r" (end));
146 end -= line_size;
147 __asm__ volatile ("\n\tcctl %0, L1D_IX_WB" : : "r" (end));
148 __asm__ volatile ("\n\tcctl %0, L1D_IX_INVAL" : : "r" (end));
149 end -= line_size;
150 __asm__ volatile ("\n\tcctl %0, L1D_IX_WB" : : "r" (end));
151 __asm__ volatile ("\n\tcctl %0, L1D_IX_INVAL" : : "r" (end));
152 end -= line_size;
153 __asm__ volatile ("\n\tcctl %0, L1D_IX_WB" : : "r" (end));
154 __asm__ volatile ("\n\tcctl %0, L1D_IX_INVAL" : : "r" (end));
155
156 } while (end > 0);
157 }
158
159 void flush_dcache_range(unsigned long start, unsigned long end)
160 {
161 unsigned long line_size;
162
163 line_size = CACHE_LINE_SIZE(DCACHE);
164
165 while (end > start) {
166 asm volatile (
167 "\n\tcctl %0, L1D_VA_WB"
168 "\n\tcctl %0, L1D_VA_INVAL" : : "r" (start)
169 );
170 start += line_size;
171 }
172 }
173
174 void invalidate_dcache_range(unsigned long start, unsigned long end)
175 {
176 unsigned long line_size;
177
178 line_size = CACHE_LINE_SIZE(DCACHE);
179 while (end > start) {
180 asm volatile (
181 "\n\tcctl %0, L1D_VA_INVAL" : : "r"(start)
182 );
183 start += line_size;
184 }
185 }
186
187 void dcache_enable(void)
188 {
189 asm volatile (
190 "mfsr $p0, $mr8\n\t"
191 "ori $p0, $p0, 0x02\n\t"
192 "mtsr $p0, $mr8\n\t"
193 "isb\n\t"
194 );
195 }
196
197 void dcache_disable(void)
198 {
199 asm volatile (
200 "mfsr $p0, $mr8\n\t"
201 "li $p1, ~0x02\n\t"
202 "and $p0, $p0, $p1\n\t"
203 "mtsr $p0, $mr8\n\t"
204 "isb\n\t"
205 );
206 }
207
208 int dcache_status(void)
209 {
210 int ret;
211 asm volatile (
212 "mfsr $p0, $mr8\n\t"
213 "andi %0, $p0, 0x02\n\t"
214 : "=r" (ret)
215 :
216 : "memory"
217 );
218 return ret;
219 }
220
221 #else
222 void dcache_wbinval_all(void)
223 {
224 }
225
226 void flush_dcache_range(unsigned long start, unsigned long end)
227 {
228 }
229
230 void invalidate_dcache_range(unsigned long start, unsigned long end)
231 {
232 }
233
234 void dcache_enable(void)
235 {
236 }
237
238 void dcache_disable(void)
239 {
240 }
241
242 int dcache_status(void)
243 {
244 return 0;
245 }
246
247 #endif
248
249
250 void flush_dcache_all(void)
251 {
252 dcache_wbinval_all();
253 }
254
255 void cache_flush(void)
256 {
257 flush_dcache_all();
258 invalidate_icache_all();
259 }
260
261
262 void flush_cache(unsigned long addr, unsigned long size)
263 {
264 flush_dcache_range(addr, addr + size);
265 invalidate_icache_range(addr, addr + size);
266 }