2 * Copyright (c) 2014 Andrew Turner
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28 #include <machine/asm.h>
29 #include <machine/armreg.h>
30 __FBSDID("$FreeBSD$");
36 .macro save_registers el
41 sub sp, sp, #(TF_SIZE + 16)
42 stp x29, x30, [sp, #(TF_SIZE)]
43 stp x28, x29, [sp, #(TF_X + 28 * 8)]
44 stp x26, x27, [sp, #(TF_X + 26 * 8)]
45 stp x24, x25, [sp, #(TF_X + 24 * 8)]
46 stp x22, x23, [sp, #(TF_X + 22 * 8)]
47 stp x20, x21, [sp, #(TF_X + 20 * 8)]
48 stp x18, x19, [sp, #(TF_X + 18 * 8)]
49 stp x16, x17, [sp, #(TF_X + 16 * 8)]
50 stp x14, x15, [sp, #(TF_X + 14 * 8)]
51 stp x12, x13, [sp, #(TF_X + 12 * 8)]
52 stp x10, x11, [sp, #(TF_X + 10 * 8)]
53 stp x8, x9, [sp, #(TF_X + 8 * 8)]
54 stp x6, x7, [sp, #(TF_X + 6 * 8)]
55 stp x4, x5, [sp, #(TF_X + 4 * 8)]
56 stp x2, x3, [sp, #(TF_X + 2 * 8)]
57 stp x0, x1, [sp, #(TF_X + 0 * 8)]
64 str x10, [sp, #(TF_ELR)]
65 stp w11, w12, [sp, #(TF_SPSR)]
66 stp x18, lr, [sp, #(TF_SP)]
68 add x29, sp, #(TF_SIZE)
70 /* Apply the SSBD (CVE-2018-3639) workaround if needed */
71 ldr x1, [x18, #PC_SSBD]
77 ldr x0, [x18, #(PC_CURTHREAD)]
80 msr daifclr, #8 /* Enable the debug exception */
83 .macro restore_registers el
86 * Disable interrupts and debug exceptions, x18 may change in the
87 * interrupt exception handler. For EL0 exceptions, do_ast already
93 ldr x0, [x18, #PC_CURTHREAD]
97 /* Remove the SSBD (CVE-2018-3639) workaround if needed */
98 ldr x1, [x18, #PC_SSBD]
104 ldp x18, lr, [sp, #(TF_SP)]
105 ldp x10, x11, [sp, #(TF_ELR)]
111 ldp x0, x1, [sp, #(TF_X + 0 * 8)]
112 ldp x2, x3, [sp, #(TF_X + 2 * 8)]
113 ldp x4, x5, [sp, #(TF_X + 4 * 8)]
114 ldp x6, x7, [sp, #(TF_X + 6 * 8)]
115 ldp x8, x9, [sp, #(TF_X + 8 * 8)]
116 ldp x10, x11, [sp, #(TF_X + 10 * 8)]
117 ldp x12, x13, [sp, #(TF_X + 12 * 8)]
118 ldp x14, x15, [sp, #(TF_X + 14 * 8)]
119 ldp x16, x17, [sp, #(TF_X + 16 * 8)]
122 * We only restore the callee saved registers when returning to
123 * userland as they may have been updated by a system call or signal.
125 ldp x18, x19, [sp, #(TF_X + 18 * 8)]
126 ldp x20, x21, [sp, #(TF_X + 20 * 8)]
127 ldp x22, x23, [sp, #(TF_X + 22 * 8)]
128 ldp x24, x25, [sp, #(TF_X + 24 * 8)]
129 ldp x26, x27, [sp, #(TF_X + 26 * 8)]
130 ldp x28, x29, [sp, #(TF_X + 28 * 8)]
132 ldr x29, [sp, #(TF_X + 29 * 8)]
135 add sp, sp, #(TF_SIZE + 16)
144 /* Make sure the IRQs are enabled before calling ast() */
147 /* Disable interrupts */
150 /* Read the current thread flags */
151 ldr x1, [x18, #PC_CURTHREAD] /* Load curthread */
152 ldr x2, [x1, #TD_FLAGS]
154 /* Check if we have either bits set */
155 mov x3, #((TDF_ASTPENDING|TDF_NEEDRESCHED) >> 8)
160 /* Restore interrupts */
167 /* Re-check for new ast scheduled */
172 ENTRY(handle_el1h_sync)
174 ldr x0, [x18, #PC_CURTHREAD]
179 END(handle_el1h_sync)
181 ENTRY(handle_el1h_irq)
189 ENTRY(handle_el0_sync)
191 ldr x0, [x18, #PC_CURTHREAD]
193 str x1, [x0, #TD_FRAME]
200 ENTRY(handle_el0_irq)
216 ENTRY(handle_empty_exception)
219 1: bl unhandled_exception
221 END(handle_unhandled_exception)
225 b handle_empty_exception
234 .globl exception_vectors
236 vempty /* Synchronous EL1t */
237 vempty /* IRQ EL1t */
238 vempty /* FIQ EL1t */
239 vempty /* Error EL1t */
241 vector el1h_sync /* Synchronous EL1h */
242 vector el1h_irq /* IRQ EL1h */
243 vempty /* FIQ EL1h */
244 vector serror /* Error EL1h */
246 vector el0_sync /* Synchronous 64-bit EL0 */
247 vector el0_irq /* IRQ 64-bit EL0 */
248 vempty /* FIQ 64-bit EL0 */
249 vector serror /* Error 64-bit EL0 */
251 vector el0_sync /* Synchronous 32-bit EL0 */
252 vector el0_irq /* IRQ 32-bit EL0 */
253 vempty /* FIQ 32-bit EL0 */
254 vector serror /* Error 32-bit EL0 */