]> git.ipfire.org Git - people/ms/u-boot.git/blob - arch/arm/cpu/arm_cortexa8/omap3/cache.S
arm: Move cpu/$CPU to arch/arm/cpu/$CPU
[people/ms/u-boot.git] / arch / arm / cpu / arm_cortexa8 / omap3 / cache.S
1 /*
2 * Copyright (c) 2009 Wind River Systems, Inc.
3 * Tom Rix <Tom.Rix@windriver.com>
4 *
5 * This file is based on and replaces the existing cache.c file
6 * The copyrights for the cache.c file are:
7 *
8 * (C) Copyright 2008 Texas Insturments
9 *
10 * (C) Copyright 2002
11 * Sysgo Real-Time Solutions, GmbH <www.elinos.com>
12 * Marius Groeger <mgroeger@sysgo.de>
13 *
14 * (C) Copyright 2002
15 * Gary Jennejohn, DENX Software Engineering, <gj@denx.de>
16 *
17 * See file CREDITS for list of people who contributed to this
18 * project.
19 *
20 * This program is free software; you can redistribute it and/or
21 * modify it under the terms of the GNU General Public License as
22 * published by the Free Software Foundation; either version 2 of
23 * the License, or (at your option) any later version.
24 *
25 * This program is distributed in the hope that it will be useful,
26 * but WITHOUT ANY WARRANTY; without even the implied warranty of
27 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
28 * GNU General Public License for more details.
29 *
30 * You should have received a copy of the GNU General Public License
31 * along with this program; if not, write to the Free Software
32 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
33 * MA 02111-1307 USA
34 */
35
36 #include <asm/arch/omap3.h>
37
38 /*
39 * omap3 cache code
40 */
41
42 .align 5
43 .global invalidate_dcache
44 .global l2_cache_enable
45 .global l2_cache_disable
46
47 /*
48 * invalidate_dcache()
49 *
50 * Invalidate the whole D-cache.
51 *
52 * Corrupted registers: r0-r5, r7, r9-r11
53 *
54 * - mm - mm_struct describing address space
55 */
56 invalidate_dcache:
57 stmfd r13!, {r0 - r5, r7, r9 - r12, r14}
58
59 mov r7, r0 @ take a backup of device type
60 cmp r0, #0x3 @ check if the device type is
61 @ GP
62 moveq r12, #0x1 @ set up to invalide L2
63 smi: .word 0x01600070 @ Call SMI monitor (smieq)
64 cmp r7, #0x3 @ compare again in case its
65 @ lost
66 beq finished_inval @ if GP device, inval done
67 @ above
68
69 mrc p15, 1, r0, c0, c0, 1 @ read clidr
70 ands r3, r0, #0x7000000 @ extract loc from clidr
71 mov r3, r3, lsr #23 @ left align loc bit field
72 beq finished_inval @ if loc is 0, then no need to
73 @ clean
74 mov r10, #0 @ start clean at cache level 0
75 inval_loop1:
76 add r2, r10, r10, lsr #1 @ work out 3x current cache
77 @ level
78 mov r1, r0, lsr r2 @ extract cache type bits from
79 @ clidr
80 and r1, r1, #7 @ mask of the bits for current
81 @ cache only
82 cmp r1, #2 @ see what cache we have at
83 @ this level
84 blt skip_inval @ skip if no cache, or just
85 @ i-cache
86 mcr p15, 2, r10, c0, c0, 0 @ select current cache level
87 @ in cssr
88 mov r2, #0 @ operand for mcr SBZ
89 mcr p15, 0, r2, c7, c5, 4 @ flush prefetch buffer to
90 @ sych the new cssr&csidr,
91 @ with armv7 this is 'isb',
92 @ but we compile with armv5
93 mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
94 and r2, r1, #7 @ extract the length of the
95 @ cache lines
96 add r2, r2, #4 @ add 4 (line length offset)
97 ldr r4, =0x3ff
98 ands r4, r4, r1, lsr #3 @ find maximum number on the
99 @ way size
100 clz r5, r4 @ find bit position of way
101 @ size increment
102 ldr r7, =0x7fff
103 ands r7, r7, r1, lsr #13 @ extract max number of the
104 @ index size
105 inval_loop2:
106 mov r9, r4 @ create working copy of max
107 @ way size
108 inval_loop3:
109 orr r11, r10, r9, lsl r5 @ factor way and cache number
110 @ into r11
111 orr r11, r11, r7, lsl r2 @ factor index number into r11
112 mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way
113 subs r9, r9, #1 @ decrement the way
114 bge inval_loop3
115 subs r7, r7, #1 @ decrement the index
116 bge inval_loop2
117 skip_inval:
118 add r10, r10, #2 @ increment cache number
119 cmp r3, r10
120 bgt inval_loop1
121 finished_inval:
122 mov r10, #0 @ swith back to cache level 0
123 mcr p15, 2, r10, c0, c0, 0 @ select current cache level
124 @ in cssr
125 mcr p15, 0, r10, c7, c5, 4 @ flush prefetch buffer,
126 @ with armv7 this is 'isb',
127 @ but we compile with armv5
128
129 ldmfd r13!, {r0 - r5, r7, r9 - r12, pc}
130
131
132 l2_cache_enable:
133 push {r0, r1, r2, lr}
134 @ ES2 onwards we can disable/enable L2 ourselves
135 bl get_cpu_rev
136 cmp r0, #CPU_3XX_ES20
137 blt l2_cache_disable_EARLIER_THAN_ES2
138 mrc 15, 0, r3, cr1, cr0, 1
139 orr r3, r3, #2
140 mcr 15, 0, r3, cr1, cr0, 1
141 b l2_cache_enable_END
142 l2_cache_enable_EARLIER_THAN_ES2:
143 @ Save r0, r12 and restore them after usage
144 mov r3, ip
145 str r3, [sp, #4]
146 mov r3, r0
147 @
148 @ GP Device ROM code API usage here
149 @ r12 = AUXCR Write function and r0 value
150 @
151 mov ip, #3
152 mrc 15, 0, r0, cr1, cr0, 1
153 orr r0, r0, #2
154 @ SMI instruction to call ROM Code API
155 .word 0xe1600070
156 mov r0, r3
157 mov ip, r3
158 str r3, [sp, #4]
159 l2_cache_enable_END:
160 pop {r1, r2, r3, pc}
161
162
163 l2_cache_disable:
164 push {r0, r1, r2, lr}
165 @ ES2 onwards we can disable/enable L2 ourselves
166 bl get_cpu_rev
167 cmp r0, #CPU_3XX_ES20
168 blt l2_cache_disable_EARLIER_THAN_ES2
169 mrc 15, 0, r3, cr1, cr0, 1
170 bic r3, r3, #2
171 mcr 15, 0, r3, cr1, cr0, 1
172 b l2_cache_disable_END
173 l2_cache_disable_EARLIER_THAN_ES2:
174 @ Save r0, r12 and restore them after usage
175 mov r3, ip
176 str r3, [sp, #4]
177 mov r3, r0
178 @
179 @ GP Device ROM code API usage here
180 @ r12 = AUXCR Write function and r0 value
181 @
182 mov ip, #3
183 mrc 15, 0, r0, cr1, cr0, 1
184 bic r0, r0, #2
185 @ SMI instruction to call ROM Code API
186 .word 0xe1600070
187 mov r0, r3
188 mov ip, r3
189 str r3, [sp, #4]
190 l2_cache_disable_END:
191 pop {r1, r2, r3, pc}