]> CyberLeo.Net >> Repos - FreeBSD/FreeBSD.git/blob - contrib/llvm/lib/Target/AArch64/MCTargetDesc/AArch64ELFObjectWriter.cpp
Merge ^/head r311314 through r311459.
[FreeBSD/FreeBSD.git] / contrib / llvm / lib / Target / AArch64 / MCTargetDesc / AArch64ELFObjectWriter.cpp
1 //===-- AArch64ELFObjectWriter.cpp - AArch64 ELF Writer -------------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file handles ELF-specific object emission, converting LLVM's internal
11 // fixups into the appropriate relocations.
12 //
13 //===----------------------------------------------------------------------===//
14
15 #include "MCTargetDesc/AArch64FixupKinds.h"
16 #include "MCTargetDesc/AArch64MCExpr.h"
17 #include "MCTargetDesc/AArch64MCTargetDesc.h"
18 #include "llvm/MC/MCContext.h"
19 #include "llvm/MC/MCELFObjectWriter.h"
20 #include "llvm/MC/MCValue.h"
21 #include "llvm/Support/ErrorHandling.h"
22
23 using namespace llvm;
24
25 namespace {
26 class AArch64ELFObjectWriter : public MCELFObjectTargetWriter {
27 public:
28   AArch64ELFObjectWriter(uint8_t OSABI, bool IsLittleEndian, bool IsILP32);
29
30   ~AArch64ELFObjectWriter() override;
31
32 protected:
33   unsigned getRelocType(MCContext &Ctx, const MCValue &Target,
34                         const MCFixup &Fixup, bool IsPCRel) const override;
35   bool IsILP32;
36 private:
37 };
38 }
39
40 AArch64ELFObjectWriter::AArch64ELFObjectWriter(uint8_t OSABI,
41                                                bool IsLittleEndian,
42                                                bool IsILP32)
43     : MCELFObjectTargetWriter(/*Is64Bit*/ true, OSABI, ELF::EM_AARCH64,
44                               /*HasRelocationAddend*/ true),
45       IsILP32(IsILP32) {}
46
47 AArch64ELFObjectWriter::~AArch64ELFObjectWriter() {}
48
49 #define R_CLS(rtype) \
50         IsILP32 ? ELF::R_AARCH64_P32_##rtype : ELF::R_AARCH64_##rtype
51 #define BAD_ILP32_MOV(lp64rtype) "ILP32 absolute MOV relocation not "\
52         "supported (LP64 eqv: " #lp64rtype ")"
53
54 // assumes IsILP32 is true
55 static bool isNonILP32reloc(const MCFixup &Fixup,
56                             AArch64MCExpr::VariantKind RefKind,
57                             MCContext &Ctx) {
58   if ((unsigned)Fixup.getKind() != AArch64::fixup_aarch64_movw)
59     return false;
60   switch(RefKind) {
61     case AArch64MCExpr::VK_ABS_G3:
62       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G3));
63       return true;
64     case AArch64MCExpr::VK_ABS_G2:
65       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2));
66       return true;
67     case AArch64MCExpr::VK_ABS_G2_S:
68       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G2));
69       return ELF::R_AARCH64_NONE;
70     case AArch64MCExpr::VK_ABS_G2_NC:
71       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2_NC));
72       return ELF::R_AARCH64_NONE;
73     case AArch64MCExpr::VK_ABS_G1_S:
74       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G1));
75       return ELF::R_AARCH64_NONE;
76     case AArch64MCExpr::VK_ABS_G1_NC:
77       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G1_NC));
78       return ELF::R_AARCH64_NONE;
79     case AArch64MCExpr::VK_DTPREL_G2:
80       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G2));
81       return ELF::R_AARCH64_NONE;
82     case AArch64MCExpr::VK_DTPREL_G1_NC:
83       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G1_NC));
84       return ELF::R_AARCH64_NONE;
85     case AArch64MCExpr::VK_TPREL_G2:
86       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G2));
87       return ELF::R_AARCH64_NONE;
88     case AArch64MCExpr::VK_TPREL_G1_NC:
89       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G1_NC));
90       return ELF::R_AARCH64_NONE;
91     case AArch64MCExpr::VK_GOTTPREL_G1:
92       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G1));
93       return ELF::R_AARCH64_NONE;
94     case AArch64MCExpr::VK_GOTTPREL_G0_NC:
95       Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G0_NC));
96       return ELF::R_AARCH64_NONE;
97     default: return false;
98   }
99   return false;
100 }
101
102 unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx,
103                                               const MCValue &Target,
104                                               const MCFixup &Fixup,
105                                               bool IsPCRel) const {
106   AArch64MCExpr::VariantKind RefKind =
107       static_cast<AArch64MCExpr::VariantKind>(Target.getRefKind());
108   AArch64MCExpr::VariantKind SymLoc = AArch64MCExpr::getSymbolLoc(RefKind);
109   bool IsNC = AArch64MCExpr::isNotChecked(RefKind);
110
111   assert((!Target.getSymA() ||
112           Target.getSymA()->getKind() == MCSymbolRefExpr::VK_None) &&
113          "Should only be expression-level modifiers here");
114
115   assert((!Target.getSymB() ||
116           Target.getSymB()->getKind() == MCSymbolRefExpr::VK_None) &&
117          "Should only be expression-level modifiers here");
118
119   if (IsPCRel) {
120     switch ((unsigned)Fixup.getKind()) {
121     case FK_Data_1:
122       Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
123       return ELF::R_AARCH64_NONE;
124     case FK_Data_2:
125       return R_CLS(PREL16);
126     case FK_Data_4:
127       return R_CLS(PREL32);
128     case FK_Data_8:
129       if (IsILP32) {
130         Ctx.reportError(Fixup.getLoc(), "ILP32 8 byte PC relative data "
131                         "relocation not supported (LP64 eqv: PREL64)");
132         return ELF::R_AARCH64_NONE;
133       } else
134         return ELF::R_AARCH64_PREL64;
135     case AArch64::fixup_aarch64_pcrel_adr_imm21:
136       assert(SymLoc == AArch64MCExpr::VK_NONE && "unexpected ADR relocation");
137       return R_CLS(ADR_PREL_LO21);
138     case AArch64::fixup_aarch64_pcrel_adrp_imm21:
139       if (SymLoc == AArch64MCExpr::VK_ABS && !IsNC)
140         return R_CLS(ADR_PREL_PG_HI21);
141       if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC)
142         return R_CLS(ADR_GOT_PAGE);
143       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && !IsNC)
144         return R_CLS(TLSIE_ADR_GOTTPREL_PAGE21);
145       if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC)
146         return R_CLS(TLSDESC_ADR_PAGE21);
147       Ctx.reportError(Fixup.getLoc(),
148                       "invalid symbol kind for ADRP relocation");
149       return ELF::R_AARCH64_NONE;
150     case AArch64::fixup_aarch64_pcrel_branch26:
151       return R_CLS(JUMP26);
152     case AArch64::fixup_aarch64_pcrel_call26:
153       return R_CLS(CALL26);
154     case AArch64::fixup_aarch64_ldr_pcrel_imm19:
155       if (SymLoc == AArch64MCExpr::VK_GOTTPREL)
156         return R_CLS(TLSIE_LD_GOTTPREL_PREL19);
157       return R_CLS(LD_PREL_LO19);
158     case AArch64::fixup_aarch64_pcrel_branch14:
159       return R_CLS(TSTBR14);
160     case AArch64::fixup_aarch64_pcrel_branch19:
161       return R_CLS(CONDBR19);
162     default:
163       Ctx.reportError(Fixup.getLoc(), "Unsupported pc-relative fixup kind");
164       return ELF::R_AARCH64_NONE;
165     }
166   } else {
167     if (IsILP32 && isNonILP32reloc(Fixup, RefKind, Ctx))
168         return ELF::R_AARCH64_NONE;
169     switch ((unsigned)Fixup.getKind()) {
170     case FK_Data_1:
171       Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
172       return ELF::R_AARCH64_NONE;
173     case FK_Data_2:
174       return R_CLS(ABS16);
175     case FK_Data_4:
176       return R_CLS(ABS32);
177     case FK_Data_8:
178       if (IsILP32) {
179         Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(ABS64));
180         return ELF::R_AARCH64_NONE;
181       } else
182         return ELF::R_AARCH64_ABS64;
183     case AArch64::fixup_aarch64_add_imm12:
184       if (RefKind == AArch64MCExpr::VK_DTPREL_HI12)
185         return R_CLS(TLSLD_ADD_DTPREL_HI12);
186       if (RefKind == AArch64MCExpr::VK_TPREL_HI12)
187         return R_CLS(TLSLE_ADD_TPREL_HI12);
188       if (RefKind == AArch64MCExpr::VK_DTPREL_LO12_NC)
189         return R_CLS(TLSLD_ADD_DTPREL_LO12_NC);
190       if (RefKind == AArch64MCExpr::VK_DTPREL_LO12)
191         return R_CLS(TLSLD_ADD_DTPREL_LO12);
192       if (RefKind == AArch64MCExpr::VK_TPREL_LO12_NC)
193         return R_CLS(TLSLE_ADD_TPREL_LO12_NC);
194       if (RefKind == AArch64MCExpr::VK_TPREL_LO12)
195         return R_CLS(TLSLE_ADD_TPREL_LO12);
196       if (RefKind == AArch64MCExpr::VK_TLSDESC_LO12)
197         return R_CLS(TLSDESC_ADD_LO12_NC);
198       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
199         return R_CLS(ADD_ABS_LO12_NC);
200
201       Ctx.reportError(Fixup.getLoc(),
202                       "invalid fixup for add (uimm12) instruction");
203       return ELF::R_AARCH64_NONE;
204     case AArch64::fixup_aarch64_ldst_imm12_scale1:
205       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
206         return R_CLS(LDST8_ABS_LO12_NC);
207       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
208         return R_CLS(TLSLD_LDST8_DTPREL_LO12);
209       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
210         return R_CLS(TLSLD_LDST8_DTPREL_LO12_NC);
211       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
212         return R_CLS(TLSLE_LDST8_TPREL_LO12);
213       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
214         return R_CLS(TLSLE_LDST8_TPREL_LO12_NC);
215
216       Ctx.reportError(Fixup.getLoc(),
217                       "invalid fixup for 8-bit load/store instruction");
218       return ELF::R_AARCH64_NONE;
219     case AArch64::fixup_aarch64_ldst_imm12_scale2:
220       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
221         return R_CLS(LDST16_ABS_LO12_NC);
222       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
223         return R_CLS(TLSLD_LDST16_DTPREL_LO12);
224       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
225         return R_CLS(TLSLD_LDST16_DTPREL_LO12_NC);
226       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
227         return R_CLS(TLSLE_LDST16_TPREL_LO12);
228       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
229         return R_CLS(TLSLE_LDST16_TPREL_LO12_NC);
230
231       Ctx.reportError(Fixup.getLoc(),
232                       "invalid fixup for 16-bit load/store instruction");
233       return ELF::R_AARCH64_NONE;
234     case AArch64::fixup_aarch64_ldst_imm12_scale4:
235       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
236         return R_CLS(LDST32_ABS_LO12_NC);
237       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
238         return R_CLS(TLSLD_LDST32_DTPREL_LO12);
239       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
240         return R_CLS(TLSLD_LDST32_DTPREL_LO12_NC);
241       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
242         return R_CLS(TLSLE_LDST32_TPREL_LO12);
243       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
244         return R_CLS(TLSLE_LDST32_TPREL_LO12_NC);
245
246       Ctx.reportError(Fixup.getLoc(),
247                       "invalid fixup for 32-bit load/store instruction");
248       return ELF::R_AARCH64_NONE;
249     case AArch64::fixup_aarch64_ldst_imm12_scale8:
250       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
251         return R_CLS(LDST64_ABS_LO12_NC);
252       if (SymLoc == AArch64MCExpr::VK_GOT && IsNC)
253         return R_CLS(LD64_GOT_LO12_NC);
254       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
255         return R_CLS(TLSLD_LDST64_DTPREL_LO12);
256       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
257         return R_CLS(TLSLD_LDST64_DTPREL_LO12_NC);
258       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
259         return R_CLS(TLSLE_LDST64_TPREL_LO12);
260       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
261         return R_CLS(TLSLE_LDST64_TPREL_LO12_NC);
262       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC)
263         return IsILP32 ? ELF::R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC
264                        : ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC;
265       if (SymLoc == AArch64MCExpr::VK_TLSDESC && IsNC)
266         return IsILP32 ? ELF::R_AARCH64_P32_TLSDESC_LD32_LO12_NC
267                        : ELF::R_AARCH64_TLSDESC_LD64_LO12_NC;
268
269       Ctx.reportError(Fixup.getLoc(),
270                       "invalid fixup for 64-bit load/store instruction");
271       return ELF::R_AARCH64_NONE;
272     case AArch64::fixup_aarch64_ldst_imm12_scale16:
273       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
274         return R_CLS(LDST128_ABS_LO12_NC);
275
276       Ctx.reportError(Fixup.getLoc(),
277                       "invalid fixup for 128-bit load/store instruction");
278       return ELF::R_AARCH64_NONE;
279     // ILP32 case not reached here, tested with isNonILP32reloc
280     case AArch64::fixup_aarch64_movw:
281       if (RefKind == AArch64MCExpr::VK_ABS_G3)
282         return ELF::R_AARCH64_MOVW_UABS_G3;
283       if (RefKind == AArch64MCExpr::VK_ABS_G2)
284         return ELF::R_AARCH64_MOVW_UABS_G2;
285       if (RefKind == AArch64MCExpr::VK_ABS_G2_S)
286         return ELF::R_AARCH64_MOVW_SABS_G2;
287       if (RefKind == AArch64MCExpr::VK_ABS_G2_NC)
288         return ELF::R_AARCH64_MOVW_UABS_G2_NC;
289       if (RefKind == AArch64MCExpr::VK_ABS_G1)
290         return R_CLS(MOVW_UABS_G1);
291       if (RefKind == AArch64MCExpr::VK_ABS_G1_S)
292         return ELF::R_AARCH64_MOVW_SABS_G1;
293       if (RefKind == AArch64MCExpr::VK_ABS_G1_NC)
294         return ELF::R_AARCH64_MOVW_UABS_G1_NC;
295       if (RefKind == AArch64MCExpr::VK_ABS_G0)
296         return R_CLS(MOVW_UABS_G0);
297       if (RefKind == AArch64MCExpr::VK_ABS_G0_S)
298         return R_CLS(MOVW_SABS_G0);
299       if (RefKind == AArch64MCExpr::VK_ABS_G0_NC)
300         return R_CLS(MOVW_UABS_G0_NC);
301       if (RefKind == AArch64MCExpr::VK_DTPREL_G2)
302         return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G2;
303       if (RefKind == AArch64MCExpr::VK_DTPREL_G1)
304         return R_CLS(TLSLD_MOVW_DTPREL_G1);
305       if (RefKind == AArch64MCExpr::VK_DTPREL_G1_NC)
306         return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC;
307       if (RefKind == AArch64MCExpr::VK_DTPREL_G0)
308         return R_CLS(TLSLD_MOVW_DTPREL_G0);
309       if (RefKind == AArch64MCExpr::VK_DTPREL_G0_NC)
310         return R_CLS(TLSLD_MOVW_DTPREL_G0_NC);
311       if (RefKind == AArch64MCExpr::VK_TPREL_G2)
312         return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G2;
313       if (RefKind == AArch64MCExpr::VK_TPREL_G1)
314         return R_CLS(TLSLE_MOVW_TPREL_G1);
315       if (RefKind == AArch64MCExpr::VK_TPREL_G1_NC)
316         return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1_NC;
317       if (RefKind == AArch64MCExpr::VK_TPREL_G0)
318         return R_CLS(TLSLE_MOVW_TPREL_G0);
319       if (RefKind == AArch64MCExpr::VK_TPREL_G0_NC)
320         return R_CLS(TLSLE_MOVW_TPREL_G0_NC);
321       if (RefKind == AArch64MCExpr::VK_GOTTPREL_G1)
322         return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G1;
323       if (RefKind == AArch64MCExpr::VK_GOTTPREL_G0_NC)
324         return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC;
325       Ctx.reportError(Fixup.getLoc(),
326                       "invalid fixup for movz/movk instruction");
327       return ELF::R_AARCH64_NONE;
328     case AArch64::fixup_aarch64_tlsdesc_call:
329       return R_CLS(TLSDESC_CALL);
330     default:
331       Ctx.reportError(Fixup.getLoc(), "Unknown ELF relocation type");
332       return ELF::R_AARCH64_NONE;
333     }
334   }
335
336   llvm_unreachable("Unimplemented fixup -> relocation");
337 }
338
339 MCObjectWriter *llvm::createAArch64ELFObjectWriter(raw_pwrite_stream &OS,
340                                                    uint8_t OSABI,
341                                                    bool IsLittleEndian,
342                                                    bool IsILP32) {
343   MCELFObjectTargetWriter *MOTW =
344       new AArch64ELFObjectWriter(OSABI, IsLittleEndian, IsILP32);
345   return createELFObjectWriter(MOTW, OS, IsLittleEndian);
346 }