]> CyberLeo.Net >> Repos - FreeBSD/stable/10.git/blob - sys/arm/include/cpu-v6.h
MFC r276803, r276808:
[FreeBSD/stable/10.git] / sys / arm / include / cpu-v6.h
1 /*-
2  * Copyright 2014 Svatopluk Kraus <onwahe@gmail.com>
3  * Copyright 2014 Michal Meloun <meloun@miracle.cz>
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25  * SUCH DAMAGE.
26  *
27  * $FreeBSD$
28  */
29 #ifndef MACHINE_CPU_V6_H
30 #define MACHINE_CPU_V6_H
31
32 #include "machine/atomic.h"
33 #include "machine/cpufunc.h"
34 #include "machine/cpuinfo.h"
35 #include "machine/sysreg.h"
36
37
38 #define CPU_ASID_KERNEL 0
39
40 /*
41  * Macros to generate CP15 (system control processor) read/write functions.
42  */
43 #define _FX(s...) #s
44
45 #define _RF0(fname, aname...)                                           \
46 static __inline register_t                                              \
47 fname(void)                                                             \
48 {                                                                       \
49         register_t reg;                                                 \
50         __asm __volatile("mrc\t" _FX(aname): "=r" (reg));               \
51         return(reg);                                                    \
52 }
53
54 #define _WF0(fname, aname...)                                           \
55 static __inline void                                                    \
56 fname(void)                                                             \
57 {                                                                       \
58         __asm __volatile("mcr\t" _FX(aname));                           \
59 }
60
61 #define _WF1(fname, aname...)                                           \
62 static __inline void                                                    \
63 fname(register_t reg)                                                   \
64 {                                                                       \
65         __asm __volatile("mcr\t" _FX(aname):: "r" (reg));               \
66 }
67
68 /*
69  * Raw CP15  maintenance operations
70  * !!! not for external use !!!
71  */
72
73 /* TLB */
74
75 _WF0(_CP15_TLBIALL, CP15_TLBIALL)               /* Invalidate entire unified TLB */
76 #if __ARM_ARCH >= 7 && defined SMP
77 _WF0(_CP15_TLBIALLIS, CP15_TLBIALLIS)           /* Invalidate entire unified TLB IS */
78 #endif
79 _WF1(_CP15_TLBIASID, CP15_TLBIASID(%0))         /* Invalidate unified TLB by ASID */
80 #if __ARM_ARCH >= 7 && defined SMP
81 _WF1(_CP15_TLBIASIDIS, CP15_TLBIASIDIS(%0))     /* Invalidate unified TLB by ASID IS */
82 #endif
83 _WF1(_CP15_TLBIMVAA, CP15_TLBIMVAA(%0))         /* Invalidate unified TLB by MVA, all ASID */
84 #if __ARM_ARCH >= 7 && defined SMP
85 _WF1(_CP15_TLBIMVAAIS, CP15_TLBIMVAAIS(%0))     /* Invalidate unified TLB by MVA, all ASID IS */
86 #endif
87 _WF1(_CP15_TLBIMVA, CP15_TLBIMVA(%0))           /* Invalidate unified TLB by MVA */
88
89 _WF1(_CP15_TTB_SET, CP15_TTBR0(%0))
90
91 /* Cache and Branch predictor */
92
93 _WF0(_CP15_BPIALL, CP15_BPIALL)                 /* Branch predictor invalidate all */
94 #if __ARM_ARCH >= 7 && defined SMP
95 _WF0(_CP15_BPIALLIS, CP15_BPIALLIS)             /* Branch predictor invalidate all IS */
96 #endif
97 _WF1(_CP15_BPIMVA, CP15_BPIMVA(%0))             /* Branch predictor invalidate by MVA */
98 _WF1(_CP15_DCCIMVAC, CP15_DCCIMVAC(%0))         /* Data cache clean and invalidate by MVA PoC */
99 _WF1(_CP15_DCCISW, CP15_DCCISW(%0))             /* Data cache clean and invalidate by set/way */
100 _WF1(_CP15_DCCMVAC, CP15_DCCMVAC(%0))           /* Data cache clean by MVA PoC */
101 #if __ARM_ARCH >= 7
102 _WF1(_CP15_DCCMVAU, CP15_DCCMVAU(%0))           /* Data cache clean by MVA PoU */
103 #endif
104 _WF1(_CP15_DCCSW, CP15_DCCSW(%0))               /* Data cache clean by set/way */
105 _WF1(_CP15_DCIMVAC, CP15_DCIMVAC(%0))           /* Data cache invalidate by MVA PoC */
106 _WF1(_CP15_DCISW, CP15_DCISW(%0))               /* Data cache invalidate by set/way */
107 _WF0(_CP15_ICIALLU, CP15_ICIALLU)               /* Instruction cache invalidate all PoU */
108 #if __ARM_ARCH >= 7 && defined SMP
109 _WF0(_CP15_ICIALLUIS, CP15_ICIALLUIS)           /* Instruction cache invalidate all PoU IS */
110 #endif
111 _WF1(_CP15_ICIMVAU, CP15_ICIMVAU(%0))           /* Instruction cache invalidate */
112
113 /*
114  * Publicly accessible functions
115  */
116
117 /* Various control registers */
118
119 _RF0(cp15_dfsr_get, CP15_DFSR(%0))
120 _RF0(cp15_ifsr_get, CP15_IFSR(%0))
121 _WF1(cp15_prrr_set, CP15_PRRR(%0))
122 _WF1(cp15_nmrr_set, CP15_NMRR(%0))
123 _RF0(cp15_ttbr_get, CP15_TTBR0(%0))
124 _RF0(cp15_dfar_get, CP15_DFAR(%0))
125 #if __ARM_ARCH >= 7
126 _RF0(cp15_ifar_get, CP15_IFAR(%0))
127 #endif
128
129 /*CPU id registers */
130 _RF0(cp15_midr_get, CP15_MIDR(%0))
131 _RF0(cp15_ctr_get, CP15_CTR(%0))
132 _RF0(cp15_tcmtr_get, CP15_TCMTR(%0))
133 _RF0(cp15_tlbtr_get, CP15_TLBTR(%0))
134 _RF0(cp15_mpidr_get, CP15_MPIDR(%0))
135 _RF0(cp15_revidr_get, CP15_REVIDR(%0))
136 _RF0(cp15_aidr_get, CP15_AIDR(%0))
137 _RF0(cp15_id_pfr0_get, CP15_ID_PFR0(%0))
138 _RF0(cp15_id_pfr1_get, CP15_ID_PFR1(%0))
139 _RF0(cp15_id_dfr0_get, CP15_ID_DFR0(%0))
140 _RF0(cp15_id_afr0_get, CP15_ID_AFR0(%0))
141 _RF0(cp15_id_mmfr0_get, CP15_ID_MMFR0(%0))
142 _RF0(cp15_id_mmfr1_get, CP15_ID_MMFR1(%0))
143 _RF0(cp15_id_mmfr2_get, CP15_ID_MMFR2(%0))
144 _RF0(cp15_id_mmfr3_get, CP15_ID_MMFR3(%0))
145 _RF0(cp15_id_isar0_get, CP15_ID_ISAR0(%0))
146 _RF0(cp15_id_isar1_get, CP15_ID_ISAR1(%0))
147 _RF0(cp15_id_isar2_get, CP15_ID_ISAR2(%0))
148 _RF0(cp15_id_isar3_get, CP15_ID_ISAR3(%0))
149 _RF0(cp15_id_isar4_get, CP15_ID_ISAR4(%0))
150 _RF0(cp15_id_isar5_get, CP15_ID_ISAR5(%0))
151 _RF0(cp15_cbar_get, CP15_CBAR(%0))
152
153 /* Performance Monitor registers */
154
155 #if __ARM_ARCH == 6 && defined(CPU_ARM1176)
156 _RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0))
157 _WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0))
158 #elif __ARM_ARCH > 6
159 _RF0(cp15_pmcr_get, CP15_PMCR(%0))
160 _WF1(cp15_pmcr_set, CP15_PMCR(%0))
161 _RF0(cp15_pmcnten_get, CP15_PMCNTENSET(%0))
162 _WF1(cp15_pmcnten_set, CP15_PMCNTENSET(%0))
163 _WF1(cp15_pmcnten_clr, CP15_PMCNTENCLR(%0))
164 _RF0(cp15_pmovsr_get, CP15_PMOVSR(%0))
165 _WF1(cp15_pmovsr_set, CP15_PMOVSR(%0))
166 _WF1(cp15_pmswinc_set, CP15_PMSWINC(%0))
167 _RF0(cp15_pmselr_get, CP15_PMSELR(%0))
168 _WF1(cp15_pmselr_set, CP15_PMSELR(%0))
169 _RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0))
170 _WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0))
171 _RF0(cp15_pmxevtyper_get, CP15_PMXEVTYPER(%0))
172 _WF1(cp15_pmxevtyper_set, CP15_PMXEVTYPER(%0))
173 _RF0(cp15_pmxevcntr_get, CP15_PMXEVCNTRR(%0))
174 _WF1(cp15_pmxevcntr_set, CP15_PMXEVCNTRR(%0))
175 _RF0(cp15_pmuserenr_get, CP15_PMUSERENR(%0))
176 _WF1(cp15_pmuserenr_set, CP15_PMUSERENR(%0))
177 _RF0(cp15_pminten_get, CP15_PMINTENSET(%0))
178 _WF1(cp15_pminten_set, CP15_PMINTENSET(%0))
179 _WF1(cp15_pminten_clr, CP15_PMINTENCLR(%0))
180 #endif
181
182 #undef  _FX
183 #undef  _RF0
184 #undef  _WF0
185 #undef  _WF1
186
187 /*
188  * TLB maintenance operations.
189  */
190
191 /* Local (i.e. not broadcasting ) operations.  */
192
193 /* Flush all TLB entries (even global). */
194 static __inline void
195 tlb_flush_all_local(void)
196 {
197
198         dsb();
199         _CP15_TLBIALL();
200         dsb();
201 }
202
203 /* Flush all not global TLB entries. */
204 static __inline void
205 tlb_flush_all_ng_local(void)
206 {
207
208         dsb();
209         _CP15_TLBIASID(CPU_ASID_KERNEL);
210         dsb();
211 }
212
213 /* Flush single TLB entry (even global). */
214 static __inline void
215 tlb_flush_local(vm_offset_t sva)
216 {
217
218         dsb();
219         _CP15_TLBIMVA((sva & ~PAGE_MASK ) | CPU_ASID_KERNEL);
220         dsb();
221 }
222
223 /* Flush range of TLB entries (even global). */
224 static __inline void
225 tlb_flush_range_local(vm_offset_t sva, vm_size_t size)
226 {
227         vm_offset_t va;
228         vm_offset_t eva = sva + size;
229
230         dsb();
231         for (va = sva; va < eva; va += PAGE_SIZE)
232                 _CP15_TLBIMVA((va & ~PAGE_MASK ) | CPU_ASID_KERNEL);
233         dsb();
234 }
235
236 /* Broadcasting operations. */
237 #if __ARM_ARCH >= 7 && defined SMP
238
239 static __inline void
240 tlb_flush_all(void)
241 {
242
243         dsb();
244         _CP15_TLBIALLIS();
245         dsb();
246 }
247
248 static __inline void
249 tlb_flush_all_ng(void)
250 {
251
252         dsb();
253         _CP15_TLBIASIDIS(CPU_ASID_KERNEL);
254         dsb();
255 }
256
257 static __inline void
258 tlb_flush(vm_offset_t sva)
259 {
260
261         dsb();
262         _CP15_TLBIMVAAIS(sva);
263         dsb();
264 }
265
266 static __inline void
267 tlb_flush_range(vm_offset_t sva,  vm_size_t size)
268 {
269         vm_offset_t va;
270         vm_offset_t eva = sva + size;
271
272         dsb();
273         for (va = sva; va < eva; va += PAGE_SIZE)
274                 _CP15_TLBIMVAAIS(va);
275         dsb();
276 }
277 #else /* SMP */
278
279 #define tlb_flush_all()                 tlb_flush_all_local()
280 #define tlb_flush_all_ng()              tlb_flush_all_ng_local()
281 #define tlb_flush(sva)                  tlb_flush_local(sva)
282 #define tlb_flush_range(sva, size)      tlb_flush_range_local(sva, size)
283
284 #endif /* SMP */
285
286 /*
287  * Cache maintenance operations.
288  */
289
290 /*  Sync I and D caches to PoU */
291 static __inline void
292 icache_sync(vm_offset_t sva, vm_size_t size)
293 {
294         vm_offset_t va;
295         vm_offset_t eva = sva + size;
296
297         dsb();
298         for (va = sva; va < eva; va += arm_dcache_align) {
299 #if __ARM_ARCH >= 7 && defined SMP
300                 _CP15_DCCMVAU(va);
301 #else
302                 _CP15_DCCMVAC(va);
303 #endif
304         }
305         dsb();
306 #if __ARM_ARCH >= 7 && defined SMP
307         _CP15_ICIALLUIS();
308 #else
309         _CP15_ICIALLU();
310 #endif
311         dsb();
312         isb();
313 }
314
315 /*  Invalidate I cache */
316 static __inline void
317 icache_inv_all(void)
318 {
319 #if __ARM_ARCH >= 7 && defined SMP
320         _CP15_ICIALLUIS();
321 #else
322         _CP15_ICIALLU();
323 #endif
324         dsb();
325         isb();
326 }
327
328 /* Write back D-cache to PoU */
329 static __inline void
330 dcache_wb_pou(vm_offset_t sva, vm_size_t size)
331 {
332         vm_offset_t va;
333         vm_offset_t eva = sva + size;
334
335         dsb();
336         for (va = sva; va < eva; va += arm_dcache_align) {
337 #if __ARM_ARCH >= 7 && defined SMP
338                 _CP15_DCCMVAU(va);
339 #else
340                 _CP15_DCCMVAC(va);
341 #endif
342         }
343         dsb();
344 }
345
346 /* Invalidate D-cache to PoC */
347 static __inline void
348 dcache_inv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
349 {
350         vm_offset_t va;
351         vm_offset_t eva = sva + size;
352
353         /* invalidate L1 first */
354         for (va = sva; va < eva; va += arm_dcache_align) {
355                 _CP15_DCIMVAC(va);
356         }
357         dsb();
358
359         /* then L2 */
360         cpu_l2cache_inv_range(pa, size);
361         dsb();
362
363         /* then L1 again */
364         for (va = sva; va < eva; va += arm_dcache_align) {
365                 _CP15_DCIMVAC(va);
366         }
367         dsb();
368 }
369
370 /* Write back D-cache to PoC */
371 static __inline void
372 dcache_wb_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
373 {
374         vm_offset_t va;
375         vm_offset_t eva = sva + size;
376
377         dsb();
378
379         for (va = sva; va < eva; va += arm_dcache_align) {
380                 _CP15_DCCMVAC(va);
381         }
382         dsb();
383
384         cpu_l2cache_wb_range(pa, size);
385 }
386
387 /* Write back and invalidate D-cache to PoC */
388 static __inline void
389 dcache_wbinv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
390 {
391         vm_offset_t va;
392         vm_offset_t eva = sva + size;
393
394         dsb();
395
396         /* write back L1 first */
397         for (va = sva; va < eva; va += arm_dcache_align) {
398                 _CP15_DCCMVAC(va);
399         }
400         dsb();
401
402         /* then write back and invalidate L2 */
403         cpu_l2cache_wbinv_range(pa, size);
404
405         /* then invalidate L1 */
406         for (va = sva; va < eva; va += arm_dcache_align) {
407                 _CP15_DCIMVAC(va);
408         }
409         dsb();
410 }
411
412 /* Set TTB0 register */
413 static __inline void
414 cp15_ttbr_set(uint32_t reg)
415 {
416         dsb();
417         _CP15_TTB_SET(reg);
418         dsb();
419         _CP15_BPIALL();
420         dsb();
421         isb();
422         tlb_flush_all_ng_local();
423 }
424
425 #endif /* !MACHINE_CPU_V6_H */