2 * Copyright (C) 2013 Andrew Turner
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
33 #include <machine/acle-compat.h>
36 * ASM helper macros. These allow the functions to be changed depending on
37 * the endian-ness we are building for.
40 /* Allow the name of the function to be changed depending on the ABI */
42 #define AEABI_ENTRY(x) ENTRY(__aeabi_ ## x ## _vfp)
43 #define AEABI_END(x) END(__aeabi_ ## x ## _vfp)
45 #define AEABI_ENTRY(x) ENTRY(__aeabi_ ## x)
46 #define AEABI_END(x) END(__aeabi_ ## x)
50 * These should be used when a function either takes, or returns a floating
51 * point falue. They will load the data from an ARM to a VFP register(s),
52 * or from a VFP to an ARM register
54 #ifdef __ARM_BIG_ENDIAN
55 #define LOAD_DREG(vreg, reg0, reg1) vmov vreg, reg1, reg0
56 #define UNLOAD_DREG(reg0, reg1, vreg) vmov reg1, reg0, vreg
58 #define LOAD_DREG(vreg, reg0, reg1) vmov vreg, reg0, reg1
59 #define UNLOAD_DREG(reg0, reg1, vreg) vmov reg0, reg1, vreg
62 #define LOAD_SREGS(vreg0, vreg1, reg0, reg1) vmov vreg0, vreg1, reg0, reg1
63 #define LOAD_SREG(vreg, reg) vmov vreg, reg
64 #define UNLOAD_SREG(reg, vreg) vmov reg, vreg
72 * Generate a function that will either call into the VFP implementation,
73 * or the soft float version for a given __aeabi_* helper. The function
74 * will take a single argument of the type given by in_type.
76 #define AEABI_FUNC(name, in_type, soft_func) \
77 __aeabi_ ## name(in_type a) \
79 if (_libc_arm_fpu_present) \
80 return __aeabi_ ## name ## _vfp(a); \
82 return soft_func (a); \
85 /* As above, but takes two arguments of the same type */
86 #define AEABI_FUNC2(name, in_type, soft_func) \
87 __aeabi_ ## name(in_type a, in_type b) \
89 if (_libc_arm_fpu_present) \
90 return __aeabi_ ## name ## _vfp(a, b); \
92 return soft_func (a, b); \
95 /* As above, but with the soft float arguments reversed */
96 #define AEABI_FUNC2_REV(name, in_type, soft_func) \
97 __aeabi_ ## name(in_type a, in_type b) \
99 if (_libc_arm_fpu_present) \
100 return __aeabi_ ## name ## _vfp(a, b); \
102 return soft_func (b, a); \
106 * Helper macros for when we are only able to use the softfloat
107 * version of these functions, i.e. on arm before armv6.
109 #define AEABI_FUNC(name, in_type, soft_func) \
110 __aeabi_ ## name(in_type a) \
112 return soft_func (a); \
115 /* As above, but takes two arguments of the same type */
116 #define AEABI_FUNC2(name, in_type, soft_func) \
117 __aeabi_ ## name(in_type a, in_type b) \
119 return soft_func (a, b); \
122 /* As above, but with the soft float arguments reversed */
123 #define AEABI_FUNC2_REV(name, in_type, soft_func) \
124 __aeabi_ ## name(in_type a, in_type b) \
126 return soft_func (b, a); \