2 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
4 * Copyright (c) 2013, Anish Gupta (akgupt3@gmail.com)
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice unmodified, this list of conditions, and the following
13 * 2. Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in the
15 * documentation and/or other materials provided with the distribution.
17 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include <machine/asmacros.h>
32 #include "svm_assym.h"
35 * Be friendly to DTrace FBT's prologue/epilogue pattern matching.
37 * They are also responsible for saving/restoring the host %rbp across VMRUN.
39 #define VENTER push %rbp ; mov %rsp,%rbp
40 #define VLEAVE pop %rbp
42 #define VMLOAD .byte 0x0f, 0x01, 0xda
43 #define VMRUN .byte 0x0f, 0x01, 0xd8
44 #define VMSAVE .byte 0x0f, 0x01, 0xdb
47 * svm_launch(uint64_t vmcb, struct svm_regctx *gctx, struct pcpu *pcpu)
48 * %rdi: physical address of VMCB
49 * %rsi: pointer to guest context
50 * %rdx: pointer to the pcpu data
55 /* save pointer to the pcpu data */
59 * Host register state saved across a VMRUN.
61 * All "callee saved registers" except:
62 * %rsp: because it is preserved by the processor across VMRUN.
63 * %rbp: because it is saved/restored by the function prologue/epilogue.
71 /* Save the physical address of the VMCB in %rax */
74 push %rsi /* push guest context pointer on the stack */
77 * Restore guest state.
79 movq SCTX_R8(%rsi), %r8
80 movq SCTX_R9(%rsi), %r9
81 movq SCTX_R10(%rsi), %r10
82 movq SCTX_R11(%rsi), %r11
83 movq SCTX_R12(%rsi), %r12
84 movq SCTX_R13(%rsi), %r13
85 movq SCTX_R14(%rsi), %r14
86 movq SCTX_R15(%rsi), %r15
87 movq SCTX_RBP(%rsi), %rbp
88 movq SCTX_RBX(%rsi), %rbx
89 movq SCTX_RCX(%rsi), %rcx
90 movq SCTX_RDX(%rsi), %rdx
91 movq SCTX_RDI(%rsi), %rdi
92 movq SCTX_RSI(%rsi), %rsi /* %rsi must be restored last */
98 pop %rax /* pop guest context pointer from the stack */
103 movq %r8, SCTX_R8(%rax)
104 movq %r9, SCTX_R9(%rax)
105 movq %r10, SCTX_R10(%rax)
106 movq %r11, SCTX_R11(%rax)
107 movq %r12, SCTX_R12(%rax)
108 movq %r13, SCTX_R13(%rax)
109 movq %r14, SCTX_R14(%rax)
110 movq %r15, SCTX_R15(%rax)
111 movq %rbp, SCTX_RBP(%rax)
112 movq %rbx, SCTX_RBX(%rax)
113 movq %rcx, SCTX_RCX(%rax)
114 movq %rdx, SCTX_RDX(%rax)
115 movq %rdi, SCTX_RDI(%rax)
116 movq %rsi, SCTX_RSI(%rax)
119 * To prevent malicious branch target predictions from
120 * affecting the host, overwrite all entries in the RSB upon
123 mov $16, %ecx /* 16 iterations, two calls per loop */
125 0: call 2f /* create an RSB entry. */
127 call 1b /* capture rogue speculation. */
128 2: call 2f /* create an RSB entry. */
130 call 1b /* capture rogue speculation. */
135 /* Restore host state */
142 /* Restore %GS.base to point to the host's pcpu data */
146 mov $MSR_GSBASE, %rcx
150 * Clobber the remaining registers with guest contents so they