2 * Copyright (c) 2001 Jake Burkholder.
3 * Copyright (c) 2006 Kip Macy
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
30 #ifndef _MACHINE_ASMACROS_H_
31 #define _MACHINE_ASMACROS_H_
36 * %g7 points to per-cpu data.
44 * Atomically decrement an integer in memory.
46 #define ATOMIC_DEC_INT(r1, r2, r3) \
49 casa [r1] ASI_N, r2, r3 ; \
55 * Atomically increment an integer in memory.
57 #define ATOMIC_INC_INT(r1, r2, r3) \
60 casa [r1] ASI_N, r2, r3 ; \
66 * Atomically increment an u_long in memory.
68 #define ATOMIC_INC_ULONG(r1, r2, r3) \
71 casxa [r1] ASI_N, r2, r3 ; \
77 * Atomically clear a number of bits of an integer in memory.
79 #define ATOMIC_CLEAR_INT(r1, r2, r3, bits) \
81 9: andn r2, bits, r3 ; \
82 casa [r1] ASI_N, r2, r3 ; \
87 #define PCPU(member) PCPU_REG + PC_ ## member
88 #define PCPU_ADDR(member, reg) \
89 add PCPU_REG, PC_ ## member, reg
94 #define PANIC(msg, r1) \
103 #define KASSERT(r1, msg) \
109 #define KASSERT(r1, msg)
112 #define PUTS(msg, r1) \
120 #define _ALIGN_DATA .align 8
126 .type name, @object ; \
131 #define GET_MMFSA_SCRATCH(reg) \
132 ldxa [%g0 + %g0]ASI_SCRATCHPAD, reg;
135 #define GET_PCPU_PHYS_SCRATCH(tmp) \
136 sethi %uhi(VM_MIN_DIRECT_ADDRESS), tmp; \
137 mov SCRATCH_REG_PCPU, PCPU_REG; \
139 ldxa [%g0 + PCPU_REG]ASI_SCRATCHPAD, PCPU_REG; \
140 andn PCPU_REG, tmp, PCPU_REG
142 #define GET_PCPU_SCRATCH \
143 mov SCRATCH_REG_PCPU, PCPU_REG; \
144 ldxa [%g0 + PCPU_REG]ASI_SCRATCHPAD, PCPU_REG;
146 #define GET_PCPU_SCRATCH_SLOW(reg) \
147 mov SCRATCH_REG_PCPU, reg; \
148 ldxa [reg]ASI_SCRATCHPAD, PCPU_REG;
150 #define GET_HASH_SCRATCH_USER(reg) \
151 mov SCRATCH_REG_HASH_USER, reg; \
152 ldxa [%g0 + reg]ASI_SCRATCHPAD, reg;
154 #define GET_HASH_SCRATCH_KERNEL(reg) \
155 mov SCRATCH_REG_HASH_KERNEL, reg; \
156 ldxa [%g0 + reg]ASI_SCRATCHPAD, reg;
158 #define GET_HASH_PHYS_SCRATCH_USER(tmp, reg) \
159 sethi %uhi(VM_MIN_DIRECT_ADDRESS), tmp; \
160 mov SCRATCH_REG_HASH_USER, reg; \
162 ldxa [%g0 + reg]ASI_SCRATCHPAD, reg; \
165 #define GET_HASH_PHYS_SCRATCH_KERNEL(tmp, reg) \
166 sethi %uhi(VM_MIN_DIRECT_ADDRESS), tmp; \
167 mov SCRATCH_REG_HASH_KERNEL, reg; \
169 ldxa [%g0 + reg]ASI_SCRATCHPAD, reg; \
174 #define GET_TSB_SCRATCH_USER(reg) \
175 mov SCRATCH_REG_TSB_USER, reg; \
176 ldxa [%g0 + reg]ASI_SCRATCHPAD, reg;
178 #define GET_TSB_SCRATCH_KERNEL(reg) \
179 mov SCRATCH_REG_TSB_KERNEL, reg; \
180 ldxa [%g0 + reg]ASI_SCRATCHPAD, reg;
182 #define SET_SCRATCH(offsetreg, reg) stxa reg, [%g0 + offsetreg]ASI_SCRATCHPAD
185 #define GET_PCB_PHYS(tmp, reg) \
186 mov PC_CURPCB, reg; \
187 GET_PCPU_PHYS_SCRATCH(tmp); \
188 ldxa [PCPU_REG + reg]ASI_REAL, reg; \
192 #define GET_PCB(reg) \
194 ldx [PCPU_REG + PC_CURPCB], reg;
196 #define SET_MMU_CONTEXT(typereg, reg) stxa reg, [typereg]ASI_MMU_CONTEXTID
197 #define GET_MMU_CONTEXT(typereg, reg) ldxa [typereg]ASI_MMU_CONTEXTID, reg
201 #define SAVE_GLOBALS(TF) \
202 stx %g1, [TF + TF_G1]; \
203 stx %g2, [TF + TF_G2]; \
204 stx %g3, [TF + TF_G3]; \
205 stx %g4, [TF + TF_G4]; \
206 stx %g5, [TF + TF_G5]; \
207 stx %g6, [TF + TF_G6];
209 #define RESTORE_GLOBALS_USER(TF) \
210 ldx [TF + TF_G1], %g1; \
211 ldx [TF + TF_G2], %g2; \
212 ldx [TF + TF_G3], %g3; \
213 ldx [TF + TF_G4], %g4; \
214 ldx [TF + TF_G5], %g5; \
215 ldx [TF + TF_G6], %g6; \
216 ldx [TF + TF_G7], %g7;
218 #define RESTORE_GLOBALS_KERNEL(TF) \
219 mov SCRATCH_REG_PCPU, %g7; \
220 ldx [TF + TF_G1], %g1; \
221 ldx [TF + TF_G2], %g2; \
222 ldx [TF + TF_G3], %g3; \
223 ldx [TF + TF_G4], %g4; \
224 ldx [TF + TF_G5], %g5; \
225 ldx [TF + TF_G6], %g6; \
226 ldxa [%g0 + %g7]ASI_SCRATCHPAD, %g7;
228 #define SAVE_OUTS(TF) \
229 stx %i0, [TF + TF_O0]; \
230 stx %i1, [TF + TF_O1]; \
231 stx %i2, [TF + TF_O2]; \
232 stx %i3, [TF + TF_O3]; \
233 stx %i4, [TF + TF_O4]; \
234 stx %i5, [TF + TF_O5]; \
235 stx %i6, [TF + TF_O6]; \
236 stx %i7, [TF + TF_O7];
238 #define RESTORE_OUTS(TF) \
239 ldx [TF + TF_O0], %i0; \
240 ldx [TF + TF_O1], %i1; \
241 ldx [TF + TF_O2], %i2; \
242 ldx [TF + TF_O3], %i3; \
243 ldx [TF + TF_O4], %i4; \
244 ldx [TF + TF_O5], %i5; \
245 ldx [TF + TF_O6], %i6; \
246 ldx [TF + TF_O7], %i7;
249 #define SAVE_WINDOW(SBP) \
250 stx %l0, [SBP + (0*8)]; \
251 stx %l1, [SBP + (1*8)]; \
252 stx %l2, [SBP + (2*8)]; \
253 stx %l3, [SBP + (3*8)]; \
254 stx %l4, [SBP + (4*8)]; \
255 stx %l5, [SBP + (5*8)]; \
256 stx %l6, [SBP + (6*8)]; \
257 stx %l7, [SBP + (7*8)]; \
258 stx %i0, [SBP + (8*8)]; \
259 stx %i1, [SBP + (9*8)]; \
260 stx %i2, [SBP + (10*8)]; \
261 stx %i3, [SBP + (11*8)]; \
262 stx %i4, [SBP + (12*8)]; \
263 stx %i5, [SBP + (13*8)]; \
264 stx %i6, [SBP + (14*8)]; \
265 stx %i7, [SBP + (15*8)];
267 #define SAVE_WINDOW_ASI(SBP) \
268 stxa %l0, [SBP + (0*8)]%asi; \
269 stxa %l1, [SBP + (1*8)]%asi; \
270 stxa %l2, [SBP + (2*8)]%asi; \
271 stxa %l3, [SBP + (3*8)]%asi; \
272 stxa %l4, [SBP + (4*8)]%asi; \
273 stxa %l5, [SBP + (5*8)]%asi; \
274 stxa %l6, [SBP + (6*8)]%asi; \
275 stxa %l7, [SBP + (7*8)]%asi; \
276 stxa %i0, [SBP + (8*8)]%asi; \
277 stxa %i1, [SBP + (9*8)]%asi; \
278 stxa %i2, [SBP + (10*8)]%asi; \
279 stxa %i3, [SBP + (11*8)]%asi; \
280 stxa %i4, [SBP + (12*8)]%asi; \
281 stxa %i5, [SBP + (13*8)]%asi; \
282 stxa %i6, [SBP + (14*8)]%asi; \
283 stxa %i7, [SBP + (15*8)]%asi;
285 #define SAVE_LOCALS_ASI(SBP) \
286 stxa %l0, [SBP + (0*8)]%asi; \
287 stxa %l1, [SBP + (1*8)]%asi; \
288 stxa %l2, [SBP + (2*8)]%asi; \
289 stxa %l3, [SBP + (3*8)]%asi; \
290 stxa %l4, [SBP + (4*8)]%asi; \
291 stxa %l5, [SBP + (5*8)]%asi; \
292 stxa %l6, [SBP + (6*8)]%asi; \
293 stxa %l7, [SBP + (7*8)]%asi;
295 #define RESTORE_LOCALS_ASI(SBP) \
296 ldxa [SBP + (0*8)]%asi, %l0; \
297 ldxa [SBP + (1*8)]%asi, %l1; \
298 ldxa [SBP + (2*8)]%asi, %l2; \
299 ldxa [SBP + (3*8)]%asi, %l3; \
300 ldxa [SBP + (4*8)]%asi, %l4; \
301 ldxa [SBP + (5*8)]%asi, %l5; \
302 ldxa [SBP + (6*8)]%asi, %l6; \
303 ldxa [SBP + (7*8)]%asi, %l7;
305 #define SAVE_OUTS_ASI(SBP) \
306 stxa %o0, [SBP + (0*8)]%asi; \
307 stxa %o1, [SBP + (1*8)]%asi; \
308 stxa %o2, [SBP + (2*8)]%asi; \
309 stxa %o3, [SBP + (3*8)]%asi; \
310 stxa %o4, [SBP + (4*8)]%asi; \
311 stxa %o5, [SBP + (5*8)]%asi; \
312 stxa %o6, [SBP + (6*8)]%asi; \
313 stxa %o7, [SBP + (7*8)]%asi;
315 #define RESTORE_OUTS_ASI(SBP) \
316 ldxa [SBP + (0*8)]%asi, %o0; \
317 ldxa [SBP + (1*8)]%asi, %o1; \
318 ldxa [SBP + (2*8)]%asi, %o2; \
319 ldxa [SBP + (3*8)]%asi, %o3; \
320 ldxa [SBP + (4*8)]%asi, %o4; \
321 ldxa [SBP + (5*8)]%asi, %o5; \
322 ldxa [SBP + (6*8)]%asi, %o6; \
323 ldxa [SBP + (7*8)]%asi, %o7;
326 #define TTRACE_ADD_SAFE(SBP, arg0, arg1, arg2, arg3, arg4) \
327 SAVE_OUTS_ASI(SBP); \
333 RESTORE_OUTS_ASI(SBP);
340 #endif /* !_MACHINE_ASMACROS_H_ */