2 * Copyright (c) 2010 Per Odlund <per.odlund@armagedon.se>
3 * Copyright (C) 2011 MARVELL INTERNATIONAL LTD.
6 * Developed by Semihalf.
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
11 * 1. Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in the
15 * documentation and/or other materials provided with the distribution.
16 * 3. Neither the name of MARVELL nor the names of contributors
17 * may be used to endorse or promote products derived from this software
18 * without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
21 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
23 * ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
24 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
26 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
27 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
28 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
29 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
33 #include <machine/asm.h>
34 __FBSDID("$FreeBSD$");
39 .word _C_LABEL(arm_cache_loc)
41 .word _C_LABEL(arm_cache_type)
49 #define PT_NOS (1 << 5)
52 #define PT_INNER_WT (1 << 0)
53 #define PT_INNER_WB ((1 << 0) | (1 << 6))
54 #define PT_INNER_WBWA (1 << 6)
56 #define PT_OUTER_WT (2 << 3)
57 #define PT_OUTER_WB (3 << 3)
58 #define PT_OUTER_WBWA (1 << 3)
61 #define PT_ATTR (PT_S|PT_INNER_WBWA|PT_OUTER_WBWA|PT_NOS)
63 #define PT_ATTR (PT_INNER_WBWA|PT_OUTER_WBWA)
68 bl _C_LABEL(armv7_idcache_wbinv_all) /* clean the D cache */
73 mcr p15, 0, r0, c2, c0, 0 /* Translation Table Base Register 0 (TTBR0) */
76 mcr p15, 0, r0, c8, c3, 0 /* invalidate I+D TLBs Inner Shareable*/
78 mcr p15, 0, r0, c8, c7, 0 /* invalidate I+D TLBs */
85 ENTRY(armv7_tlb_flushID)
88 mcr p15, 0, r0, c8, c3, 0 /* flush Unified TLB all entries Inner Shareable */
89 mcr p15, 0, r0, c7, c1, 6 /* flush BTB Inner Shareable */
91 mcr p15, 0, r0, c8, c7, 0 /* flush Unified TLB all entries */
92 mcr p15, 0, r0, c7, c5, 6 /* flush BTB */
97 END(armv7_tlb_flushID)
99 ENTRY(armv7_tlb_flushID_SE)
103 mcr p15, 0, r0, c8, c3, 3 /* flush Unified TLB single entry Inner Shareable */
104 mcr p15, 0, r0, c7, c1, 6 /* flush BTB Inner Shareable */
106 mcr p15, 0, r0, c8, c7, 1 /* flush Unified TLB single entry */
107 mcr p15, 0, r0, c7, c5, 6 /* flush BTB */
112 END(armv7_tlb_flushID_SE)
114 /* Based on algorithm from ARM Architecture Reference Manual */
115 ENTRY(armv7_dcache_wbinv_all)
116 stmdb sp!, {r4, r5, r6, r7, r8, r9}
118 /* Get cache level */
119 ldr r0, .Lcoherency_level
123 /* For each cache level */
126 /* Get cache type for given level */
136 /* Get number of ways */
138 ands r4, r4, r1, lsr #3
143 ands r7, r7, r1, lsr #13
148 orr r6, r6, r9, lsl r5
149 orr r6, r6, r7, lsl r2
151 /* Clean and invalidate data cache by way/index */
152 mcr p15, 0, r6, c7, c14, 2
163 ldmia sp!, {r4, r5, r6, r7, r8, r9}
165 END(armv7_dcache_wbinv_all)
167 ENTRY(armv7_idcache_wbinv_all)
169 bl armv7_dcache_wbinv_all
171 mcr p15, 0, r0, c7, c1, 0 /* Invalidate all I caches to PoU (ICIALLUIS) */
173 mcr p15, 0, r0, c7, c5, 0 /* Invalidate all I caches to PoU (ICIALLU) */
179 END(armv7_idcache_wbinv_all)
181 /* XXX Temporary set it to 32 for MV cores, however this value should be
182 * get from Cache Type register
187 ENTRY(armv7_dcache_wb_range)
188 ldr ip, .Larmv7_line_size
194 mcr p15, 0, r0, c7, c10, 1 /* Clean D cache SE with VA */
198 dsb /* data synchronization barrier */
200 END(armv7_dcache_wb_range)
202 ENTRY(armv7_dcache_wbinv_range)
203 ldr ip, .Larmv7_line_size
209 mcr p15, 0, r0, c7, c14, 1 /* Purge D cache SE with VA */
212 bhi .Larmv7_wbinv_next
213 dsb /* data synchronization barrier */
215 END(armv7_dcache_wbinv_range)
218 * Note, we must not invalidate everything. If the range is too big we
219 * must use wb-inv of the entire cache.
221 ENTRY(armv7_dcache_inv_range)
222 ldr ip, .Larmv7_line_size
228 mcr p15, 0, r0, c7, c6, 1 /* Invalidate D cache SE with VA */
232 dsb /* data synchronization barrier */
234 END(armv7_dcache_inv_range)
236 ENTRY(armv7_idcache_wbinv_range)
237 ldr ip, .Larmv7_line_size
242 .Larmv7_id_wbinv_next:
243 mcr p15, 0, r0, c7, c5, 1 /* Invalidate I cache SE with VA */
244 mcr p15, 0, r0, c7, c14, 1 /* Purge D cache SE with VA */
247 bhi .Larmv7_id_wbinv_next
248 isb /* instruction synchronization barrier */
249 dsb /* data synchronization barrier */
251 END(armv7_idcache_wbinv_range)
253 ENTRY_NP(armv7_icache_sync_all)
255 mcr p15, 0, r0, c7, c1, 0 /* Invalidate all I cache to PoU Inner Shareable */
257 mcr p15, 0, r0, c7, c5, 0 /* Invalidate all I cache to PoU (ICIALLU) */
259 isb /* instruction synchronization barrier */
260 dsb /* data synchronization barrier */
262 END(armv7_icache_sync_all)
264 ENTRY_NP(armv7_icache_sync_range)
265 ldr ip, .Larmv7_line_size
267 mcr p15, 0, r0, c7, c5, 1 /* Invalidate I cache SE with VA */
268 mcr p15, 0, r0, c7, c10, 1 /* Clean D cache SE with VA */
271 bhi .Larmv7_sync_next
272 isb /* instruction synchronization barrier */
273 dsb /* data synchronization barrier */
275 END(armv7_icache_sync_range)
277 ENTRY(armv7_cpu_sleep)
278 dsb /* data synchronization barrier */
279 wfi /* wait for interrupt */
283 ENTRY(armv7_context_switch)
287 mcr p15, 0, r0, c2, c0, 0 /* set the new TTB */
290 mcr p15, 0, r0, c8, c3, 0 /* and flush the I+D tlbs Inner Sharable */
292 mcr p15, 0, r0, c8, c7, 0 /* and flush the I+D tlbs */
297 END(armv7_context_switch)
299 ENTRY(armv7_drain_writebuf)
302 END(armv7_drain_writebuf)
312 mrc p15, 0, r2, c1, c0, 1
313 bic r3, r2, r0 /* Clear bits */
314 eor r3, r3, r1 /* XOR bits */
317 mcrne p15, 0, r3, c1, c0, 1
323 * Invalidate all I+D+branch cache. Used by startup code, which counts
324 * on the fact that only r0-r3,ip are modified and no stack space is used.
326 ENTRY(armv7_idcache_inv_all)
328 mcr p15, 2, r0, c0, c0, 0 @ set cache level to L1
329 mrc p15, 1, r0, c0, c0, 0 @ read CCSIDR
331 ubfx r2, r0, #13, #15 @ get num sets - 1 from CCSIDR
332 ubfx r3, r0, #3, #10 @ get numways - 1 from CCSIDR
333 clz r1, r3 @ number of bits to MSB of way
334 lsl r3, r3, r1 @ shift into position
336 lsl ip, ip, r1 @ ip now contains the way decr
338 ubfx r0, r0, #0, #3 @ get linesize from CCSIDR
339 add r0, r0, #4 @ apply bias
340 lsl r2, r2, r0 @ shift sets by log2(linesize)
341 add r3, r3, r2 @ merge numsets - 1 with numways - 1
342 sub ip, ip, r2 @ subtract numsets - 1 from way decr
344 lsl r1, r1, r0 @ r1 now contains the set decr
345 mov r2, ip @ r2 now contains set way decr
347 /* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
348 1: mcr p15, 0, r3, c7, c6, 2 @ invalidate line
349 movs r0, r3 @ get current way/set
350 beq 2f @ at 0 means we are done.
351 movs r0, r0, lsl #10 @ clear way bits leaving only set bits
352 subne r3, r3, r1 @ non-zero?, decrement set #
353 subeq r3, r3, r2 @ zero?, decrement way # and restore set count
356 2: dsb @ wait for stores to finish
358 mcr p15, 0, r0, c7, c5, 0 @ invalidate instruction+branch cache
359 isb @ instruction sync barrier
361 END(armv7_idcache_inv_all)
363 ENTRY_NP(armv7_sleep)