1 //===-- xray_trampoline_x86.s -----------------------------------*- ASM -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file is a part of XRay, a dynamic runtime instrumentation system.
12 // This implements the X86-specific assembler for the trampolines.
14 //===----------------------------------------------------------------------===//
16 #include "../builtins/assembly.h"
17 #include "../sanitizer_common/sanitizer_asm.h"
23 CFI_DEF_CFA_OFFSET(200)
24 // At this point, the stack pointer should be aligned to an 8-byte boundary,
25 // because any call instructions that come after this will add another 8
26 // bytes and therefore align it to 16-bytes.
28 movupd %xmm0, 168(%rsp)
29 movupd %xmm1, 152(%rsp)
30 movupd %xmm2, 136(%rsp)
31 movupd %xmm3, 120(%rsp)
32 movupd %xmm4, 104(%rsp)
33 movupd %xmm5, 88(%rsp)
34 movupd %xmm6, 72(%rsp)
35 movupd %xmm7, 56(%rsp)
45 .macro RESTORE_REGISTERS
47 movupd 168(%rsp), %xmm0
48 movupd 152(%rsp), %xmm1
49 movupd 136(%rsp), %xmm2
50 movupd 120(%rsp), %xmm3
51 movupd 104(%rsp), %xmm4
52 movupd 88(%rsp), %xmm5
53 movupd 72(%rsp) , %xmm6
54 movupd 56(%rsp) , %xmm7
66 .macro ALIGNED_CALL_RAX
67 // Call the logging handler, after aligning the stack to a 16-byte boundary.
68 // The approach we're taking here uses additional stack space to stash the
69 // stack pointer twice before aligning the pointer to 16-bytes. If the stack
70 // was 8-byte aligned, it will become 16-byte aligned -- when restoring the
71 // pointer, we can always look -8 bytes from the current position to get
72 // either of the values we've stashed in the first place.
81 #if !defined(__APPLE__)
84 .section __TEXT,__text
86 .file "xray_trampoline_x86.S"
88 //===----------------------------------------------------------------------===//
90 .globl ASM_SYMBOL(__xray_FunctionEntry)
92 ASM_TYPE_FUNCTION(__xray_FunctionEntry)
93 ASM_SYMBOL(__xray_FunctionEntry):
97 // This load has to be atomic, it's concurrent with __xray_patch().
98 // On x86/amd64, a simple (type-aligned) MOV instruction is enough.
99 movq ASM_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax
103 // The patched function prolog puts its xray_instr_map index into %r10d.
111 ASM_SIZE(__xray_FunctionEntry)
114 //===----------------------------------------------------------------------===//
116 .globl ASM_SYMBOL(__xray_FunctionExit)
118 ASM_TYPE_FUNCTION(__xray_FunctionExit)
119 ASM_SYMBOL(__xray_FunctionExit):
121 // Save the important registers first. Since we're assuming that this
122 // function is only jumped into, we only preserve the registers for
125 CFI_DEF_CFA_OFFSET(64)
127 movupd %xmm0, 32(%rsp)
128 movupd %xmm1, 16(%rsp)
131 movq ASM_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax
140 // Restore the important registers.
142 movupd 32(%rsp), %xmm0
143 movupd 16(%rsp), %xmm1
147 CFI_DEF_CFA_OFFSET(8)
149 ASM_SIZE(__xray_FunctionExit)
152 //===----------------------------------------------------------------------===//
154 .globl ASM_SYMBOL(__xray_FunctionTailExit)
156 ASM_TYPE_FUNCTION(__xray_FunctionTailExit)
157 ASM_SYMBOL(__xray_FunctionTailExit):
161 movq ASM_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax
173 ASM_SIZE(__xray_FunctionTailExit)
176 //===----------------------------------------------------------------------===//
178 .globl ASM_SYMBOL(__xray_ArgLoggerEntry)
180 ASM_TYPE_FUNCTION(__xray_ArgLoggerEntry)
181 ASM_SYMBOL(__xray_ArgLoggerEntry):
185 // Again, these function pointer loads must be atomic; MOV is fine.
186 movq ASM_SYMBOL(_ZN6__xray13XRayArgLoggerE)(%rip), %rax
190 // If [arg1 logging handler] not set, defer to no-arg logging.
191 movq ASM_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax
197 // First argument will become the third
200 // XRayEntryType::LOG_ARGS_ENTRY into the second
203 // 32-bit function ID becomes the first
210 ASM_SIZE(__xray_ArgLoggerEntry)
213 //===----------------------------------------------------------------------===//
215 .global ASM_SYMBOL(__xray_CustomEvent)
217 ASM_TYPE_FUNCTION(__xray_CustomEvent)
218 ASM_SYMBOL(__xray_CustomEvent):
222 // We take two arguments to this trampoline, which should be in rdi and rsi
223 // already. We also make sure that we stash %rax because we use that register
224 // to call the logging handler.
225 movq ASM_SYMBOL(_ZN6__xray22XRayPatchedCustomEventE)(%rip), %rax
227 je .LcustomEventCleanup
231 .LcustomEventCleanup:
234 ASM_SIZE(__xray_CustomEvent)
237 NO_EXEC_STACK_DIRECTIVE