]> CyberLeo.Net >> Repos - FreeBSD/FreeBSD.git/blob - contrib/llvm/lib/Target/AArch64/MCTargetDesc/AArch64ELFObjectWriter.cpp
MFV r324714:
[FreeBSD/FreeBSD.git] / contrib / llvm / lib / Target / AArch64 / MCTargetDesc / AArch64ELFObjectWriter.cpp
1 //===-- AArch64ELFObjectWriter.cpp - AArch64 ELF Writer -------------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file handles ELF-specific object emission, converting LLVM's internal
11 // fixups into the appropriate relocations.
12 //
13 //===----------------------------------------------------------------------===//
14
15 #include "MCTargetDesc/AArch64FixupKinds.h"
16 #include "MCTargetDesc/AArch64MCExpr.h"
17 #include "MCTargetDesc/AArch64MCTargetDesc.h"
18 #include "llvm/BinaryFormat/ELF.h"
19 #include "llvm/MC/MCContext.h"
20 #include "llvm/MC/MCELFObjectWriter.h"
21 #include "llvm/MC/MCFixup.h"
22 #include "llvm/MC/MCObjectWriter.h"
23 #include "llvm/MC/MCValue.h"
24 #include "llvm/Support/ErrorHandling.h"
25 #include <cassert>
26 #include <cstdint>
27
28 using namespace llvm;
29
30 namespace {
31
32 class AArch64ELFObjectWriter : public MCELFObjectTargetWriter {
33 public:
34   AArch64ELFObjectWriter(uint8_t OSABI, bool IsLittleEndian, bool IsILP32);
35
36   ~AArch64ELFObjectWriter() override = default;
37
38 protected:
39   unsigned getRelocType(MCContext &Ctx, const MCValue &Target,
40                         const MCFixup &Fixup, bool IsPCRel) const override;
41   bool IsILP32;
42 };
43
44 } // end anonymous namespace
45
46 AArch64ELFObjectWriter::AArch64ELFObjectWriter(uint8_t OSABI,
47                                                bool IsLittleEndian,
48                                                bool IsILP32)
49     : MCELFObjectTargetWriter(/*Is64Bit*/ true, OSABI, ELF::EM_AARCH64,
50                               /*HasRelocationAddend*/ true),
51       IsILP32(IsILP32) {}
52
53 #define R_CLS(rtype)                                                           \
54   IsILP32 ? ELF::R_AARCH64_P32_##rtype : ELF::R_AARCH64_##rtype
55 #define BAD_ILP32_MOV(lp64rtype)                                               \
56   "ILP32 absolute MOV relocation not "                                         \
57   "supported (LP64 eqv: " #lp64rtype ")"
58
59 // assumes IsILP32 is true
60 static bool isNonILP32reloc(const MCFixup &Fixup,
61                             AArch64MCExpr::VariantKind RefKind,
62                             MCContext &Ctx) {
63   if ((unsigned)Fixup.getKind() != AArch64::fixup_aarch64_movw)
64     return false;
65   switch (RefKind) {
66   case AArch64MCExpr::VK_ABS_G3:
67     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G3));
68     return true;
69   case AArch64MCExpr::VK_ABS_G2:
70     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2));
71     return true;
72   case AArch64MCExpr::VK_ABS_G2_S:
73     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G2));
74     return true;
75   case AArch64MCExpr::VK_ABS_G2_NC:
76     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2_NC));
77     return true;
78   case AArch64MCExpr::VK_ABS_G1_S:
79     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G1));
80     return true;
81   case AArch64MCExpr::VK_ABS_G1_NC:
82     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G1_NC));
83     return true;
84   case AArch64MCExpr::VK_DTPREL_G2:
85     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G2));
86     return true;
87   case AArch64MCExpr::VK_DTPREL_G1_NC:
88     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G1_NC));
89     return true;
90   case AArch64MCExpr::VK_TPREL_G2:
91     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G2));
92     return true;
93   case AArch64MCExpr::VK_TPREL_G1_NC:
94     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G1_NC));
95     return true;
96   case AArch64MCExpr::VK_GOTTPREL_G1:
97     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G1));
98     return true;
99   case AArch64MCExpr::VK_GOTTPREL_G0_NC:
100     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G0_NC));
101     return true;
102   default:
103     return false;
104   }
105   return false;
106 }
107
108 unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx,
109                                               const MCValue &Target,
110                                               const MCFixup &Fixup,
111                                               bool IsPCRel) const {
112   AArch64MCExpr::VariantKind RefKind =
113       static_cast<AArch64MCExpr::VariantKind>(Target.getRefKind());
114   AArch64MCExpr::VariantKind SymLoc = AArch64MCExpr::getSymbolLoc(RefKind);
115   bool IsNC = AArch64MCExpr::isNotChecked(RefKind);
116
117   assert((!Target.getSymA() ||
118           Target.getSymA()->getKind() == MCSymbolRefExpr::VK_None) &&
119          "Should only be expression-level modifiers here");
120
121   assert((!Target.getSymB() ||
122           Target.getSymB()->getKind() == MCSymbolRefExpr::VK_None) &&
123          "Should only be expression-level modifiers here");
124
125   if (IsPCRel) {
126     switch ((unsigned)Fixup.getKind()) {
127     case FK_Data_1:
128       Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
129       return ELF::R_AARCH64_NONE;
130     case FK_Data_2:
131       return R_CLS(PREL16);
132     case FK_Data_4:
133       return R_CLS(PREL32);
134     case FK_Data_8:
135       if (IsILP32) {
136         Ctx.reportError(Fixup.getLoc(),
137                         "ILP32 8 byte PC relative data "
138                         "relocation not supported (LP64 eqv: PREL64)");
139         return ELF::R_AARCH64_NONE;
140       } else
141         return ELF::R_AARCH64_PREL64;
142     case AArch64::fixup_aarch64_pcrel_adr_imm21:
143       assert(SymLoc == AArch64MCExpr::VK_NONE && "unexpected ADR relocation");
144       return R_CLS(ADR_PREL_LO21);
145     case AArch64::fixup_aarch64_pcrel_adrp_imm21:
146       if (SymLoc == AArch64MCExpr::VK_ABS && !IsNC)
147         return R_CLS(ADR_PREL_PG_HI21);
148       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) {
149         if (IsILP32) {
150           Ctx.reportError(Fixup.getLoc(),
151                           "invalid fixup for 32-bit pcrel ADRP instruction "
152                           "VK_ABS VK_NC");
153           return ELF::R_AARCH64_NONE;
154         } else {
155           return ELF::R_AARCH64_ADR_PREL_PG_HI21_NC;
156         }
157       }
158       if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC)
159         return R_CLS(ADR_GOT_PAGE);
160       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && !IsNC)
161         return R_CLS(TLSIE_ADR_GOTTPREL_PAGE21);
162       if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC)
163         return R_CLS(TLSDESC_ADR_PAGE21);
164       Ctx.reportError(Fixup.getLoc(),
165                       "invalid symbol kind for ADRP relocation");
166       return ELF::R_AARCH64_NONE;
167     case AArch64::fixup_aarch64_pcrel_branch26:
168       return R_CLS(JUMP26);
169     case AArch64::fixup_aarch64_pcrel_call26:
170       return R_CLS(CALL26);
171     case AArch64::fixup_aarch64_ldr_pcrel_imm19:
172       if (SymLoc == AArch64MCExpr::VK_GOTTPREL)
173         return R_CLS(TLSIE_LD_GOTTPREL_PREL19);
174       return R_CLS(LD_PREL_LO19);
175     case AArch64::fixup_aarch64_pcrel_branch14:
176       return R_CLS(TSTBR14);
177     case AArch64::fixup_aarch64_pcrel_branch19:
178       return R_CLS(CONDBR19);
179     default:
180       Ctx.reportError(Fixup.getLoc(), "Unsupported pc-relative fixup kind");
181       return ELF::R_AARCH64_NONE;
182     }
183   } else {
184     if (IsILP32 && isNonILP32reloc(Fixup, RefKind, Ctx))
185       return ELF::R_AARCH64_NONE;
186     switch ((unsigned)Fixup.getKind()) {
187     case FK_Data_1:
188       Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
189       return ELF::R_AARCH64_NONE;
190     case FK_Data_2:
191       return R_CLS(ABS16);
192     case FK_Data_4:
193       return R_CLS(ABS32);
194     case FK_Data_8:
195       if (IsILP32) {
196         Ctx.reportError(Fixup.getLoc(),
197                         "ILP32 8 byte absolute data "
198                         "relocation not supported (LP64 eqv: ABS64)");
199         return ELF::R_AARCH64_NONE;
200       } else
201         return ELF::R_AARCH64_ABS64;
202     case AArch64::fixup_aarch64_add_imm12:
203       if (RefKind == AArch64MCExpr::VK_DTPREL_HI12)
204         return R_CLS(TLSLD_ADD_DTPREL_HI12);
205       if (RefKind == AArch64MCExpr::VK_TPREL_HI12)
206         return R_CLS(TLSLE_ADD_TPREL_HI12);
207       if (RefKind == AArch64MCExpr::VK_DTPREL_LO12_NC)
208         return R_CLS(TLSLD_ADD_DTPREL_LO12_NC);
209       if (RefKind == AArch64MCExpr::VK_DTPREL_LO12)
210         return R_CLS(TLSLD_ADD_DTPREL_LO12);
211       if (RefKind == AArch64MCExpr::VK_TPREL_LO12_NC)
212         return R_CLS(TLSLE_ADD_TPREL_LO12_NC);
213       if (RefKind == AArch64MCExpr::VK_TPREL_LO12)
214         return R_CLS(TLSLE_ADD_TPREL_LO12);
215       if (RefKind == AArch64MCExpr::VK_TLSDESC_LO12)
216         return R_CLS(TLSDESC_ADD_LO12);
217       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
218         return R_CLS(ADD_ABS_LO12_NC);
219
220       Ctx.reportError(Fixup.getLoc(),
221                       "invalid fixup for add (uimm12) instruction");
222       return ELF::R_AARCH64_NONE;
223     case AArch64::fixup_aarch64_ldst_imm12_scale1:
224       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
225         return R_CLS(LDST8_ABS_LO12_NC);
226       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
227         return R_CLS(TLSLD_LDST8_DTPREL_LO12);
228       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
229         return R_CLS(TLSLD_LDST8_DTPREL_LO12_NC);
230       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
231         return R_CLS(TLSLE_LDST8_TPREL_LO12);
232       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
233         return R_CLS(TLSLE_LDST8_TPREL_LO12_NC);
234
235       Ctx.reportError(Fixup.getLoc(),
236                       "invalid fixup for 8-bit load/store instruction");
237       return ELF::R_AARCH64_NONE;
238     case AArch64::fixup_aarch64_ldst_imm12_scale2:
239       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
240         return R_CLS(LDST16_ABS_LO12_NC);
241       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
242         return R_CLS(TLSLD_LDST16_DTPREL_LO12);
243       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
244         return R_CLS(TLSLD_LDST16_DTPREL_LO12_NC);
245       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
246         return R_CLS(TLSLE_LDST16_TPREL_LO12);
247       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
248         return R_CLS(TLSLE_LDST16_TPREL_LO12_NC);
249
250       Ctx.reportError(Fixup.getLoc(),
251                       "invalid fixup for 16-bit load/store instruction");
252       return ELF::R_AARCH64_NONE;
253     case AArch64::fixup_aarch64_ldst_imm12_scale4:
254       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
255         return R_CLS(LDST32_ABS_LO12_NC);
256       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
257         return R_CLS(TLSLD_LDST32_DTPREL_LO12);
258       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
259         return R_CLS(TLSLD_LDST32_DTPREL_LO12_NC);
260       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
261         return R_CLS(TLSLE_LDST32_TPREL_LO12);
262       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
263         return R_CLS(TLSLE_LDST32_TPREL_LO12_NC);
264       if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
265         if (IsILP32) {
266           return ELF::R_AARCH64_P32_LD32_GOT_LO12_NC;
267         } else {
268           Ctx.reportError(Fixup.getLoc(),
269                           "LP64 4 byte unchecked GOT load/store relocation "
270                           "not supported (ILP32 eqv: LD32_GOT_LO12_NC");
271           return ELF::R_AARCH64_NONE;
272         }
273       }
274       if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC) {
275         if (IsILP32) {
276           Ctx.reportError(Fixup.getLoc(),
277                           "ILP32 4 byte checked GOT load/store relocation "
278                           "not supported (unchecked eqv: LD32_GOT_LO12_NC)");
279         } else {
280           Ctx.reportError(Fixup.getLoc(),
281                           "LP64 4 byte checked GOT load/store relocation "
282                           "not supported (unchecked/ILP32 eqv: "
283                           "LD32_GOT_LO12_NC)");
284         }
285         return ELF::R_AARCH64_NONE;
286       }
287       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
288         if (IsILP32) {
289           return ELF::R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC;
290         } else {
291           Ctx.reportError(Fixup.getLoc(),
292                           "LP64 32-bit load/store "
293                           "relocation not supported (ILP32 eqv: "
294                           "TLSIE_LD32_GOTTPREL_LO12_NC)");
295           return ELF::R_AARCH64_NONE;
296         }
297       }
298       if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC) {
299         if (IsILP32) {
300           return ELF::R_AARCH64_P32_TLSDESC_LD32_LO12;
301         } else {
302           Ctx.reportError(Fixup.getLoc(),
303                           "LP64 4 byte TLSDESC load/store relocation "
304                           "not supported (ILP32 eqv: TLSDESC_LD64_LO12)");
305           return ELF::R_AARCH64_NONE;
306         }
307       }
308
309       Ctx.reportError(Fixup.getLoc(),
310                       "invalid fixup for 32-bit load/store instruction "
311                       "fixup_aarch64_ldst_imm12_scale4");
312       return ELF::R_AARCH64_NONE;
313     case AArch64::fixup_aarch64_ldst_imm12_scale8:
314       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
315         return R_CLS(LDST64_ABS_LO12_NC);
316       if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
317         if (!IsILP32) {
318           return ELF::R_AARCH64_LD64_GOT_LO12_NC;
319         } else {
320           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
321                                           "relocation not supported (LP64 eqv: "
322                                           "LD64_GOT_LO12_NC)");
323           return ELF::R_AARCH64_NONE;
324         }
325       }
326       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
327         return R_CLS(TLSLD_LDST64_DTPREL_LO12);
328       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
329         return R_CLS(TLSLD_LDST64_DTPREL_LO12_NC);
330       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
331         return R_CLS(TLSLE_LDST64_TPREL_LO12);
332       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
333         return R_CLS(TLSLE_LDST64_TPREL_LO12_NC);
334       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
335         if (!IsILP32) {
336           return ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC;
337         } else {
338           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
339                                           "relocation not supported (LP64 eqv: "
340                                           "TLSIE_LD64_GOTTPREL_LO12_NC)");
341           return ELF::R_AARCH64_NONE;
342         }
343       }
344       if (SymLoc == AArch64MCExpr::VK_TLSDESC) {
345         if (!IsILP32) {
346           return ELF::R_AARCH64_TLSDESC_LD64_LO12;
347         } else {
348           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
349                                           "relocation not supported (LP64 eqv: "
350                                           "TLSDESC_LD64_LO12)");
351           return ELF::R_AARCH64_NONE;
352         }
353       }
354       Ctx.reportError(Fixup.getLoc(),
355                       "invalid fixup for 64-bit load/store instruction");
356       return ELF::R_AARCH64_NONE;
357     case AArch64::fixup_aarch64_ldst_imm12_scale16:
358       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
359         return R_CLS(LDST128_ABS_LO12_NC);
360       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
361         return R_CLS(TLSLD_LDST128_DTPREL_LO12);
362       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
363         return R_CLS(TLSLD_LDST128_DTPREL_LO12_NC);
364       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
365         return R_CLS(TLSLE_LDST128_TPREL_LO12);
366       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
367         return R_CLS(TLSLE_LDST128_TPREL_LO12_NC);
368
369       Ctx.reportError(Fixup.getLoc(),
370                       "invalid fixup for 128-bit load/store instruction");
371       return ELF::R_AARCH64_NONE;
372     // ILP32 case not reached here, tested with isNonILP32reloc
373     case AArch64::fixup_aarch64_movw:
374       if (RefKind == AArch64MCExpr::VK_ABS_G3)
375         return ELF::R_AARCH64_MOVW_UABS_G3;
376       if (RefKind == AArch64MCExpr::VK_ABS_G2)
377         return ELF::R_AARCH64_MOVW_UABS_G2;
378       if (RefKind == AArch64MCExpr::VK_ABS_G2_S)
379         return ELF::R_AARCH64_MOVW_SABS_G2;
380       if (RefKind == AArch64MCExpr::VK_ABS_G2_NC)
381         return ELF::R_AARCH64_MOVW_UABS_G2_NC;
382       if (RefKind == AArch64MCExpr::VK_ABS_G1)
383         return R_CLS(MOVW_UABS_G1);
384       if (RefKind == AArch64MCExpr::VK_ABS_G1_S)
385         return ELF::R_AARCH64_MOVW_SABS_G1;
386       if (RefKind == AArch64MCExpr::VK_ABS_G1_NC)
387         return ELF::R_AARCH64_MOVW_UABS_G1_NC;
388       if (RefKind == AArch64MCExpr::VK_ABS_G0)
389         return R_CLS(MOVW_UABS_G0);
390       if (RefKind == AArch64MCExpr::VK_ABS_G0_S)
391         return R_CLS(MOVW_SABS_G0);
392       if (RefKind == AArch64MCExpr::VK_ABS_G0_NC)
393         return R_CLS(MOVW_UABS_G0_NC);
394       if (RefKind == AArch64MCExpr::VK_DTPREL_G2)
395         return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G2;
396       if (RefKind == AArch64MCExpr::VK_DTPREL_G1)
397         return R_CLS(TLSLD_MOVW_DTPREL_G1);
398       if (RefKind == AArch64MCExpr::VK_DTPREL_G1_NC)
399         return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC;
400       if (RefKind == AArch64MCExpr::VK_DTPREL_G0)
401         return R_CLS(TLSLD_MOVW_DTPREL_G0);
402       if (RefKind == AArch64MCExpr::VK_DTPREL_G0_NC)
403         return R_CLS(TLSLD_MOVW_DTPREL_G0_NC);
404       if (RefKind == AArch64MCExpr::VK_TPREL_G2)
405         return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G2;
406       if (RefKind == AArch64MCExpr::VK_TPREL_G1)
407         return R_CLS(TLSLE_MOVW_TPREL_G1);
408       if (RefKind == AArch64MCExpr::VK_TPREL_G1_NC)
409         return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1_NC;
410       if (RefKind == AArch64MCExpr::VK_TPREL_G0)
411         return R_CLS(TLSLE_MOVW_TPREL_G0);
412       if (RefKind == AArch64MCExpr::VK_TPREL_G0_NC)
413         return R_CLS(TLSLE_MOVW_TPREL_G0_NC);
414       if (RefKind == AArch64MCExpr::VK_GOTTPREL_G1)
415         return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G1;
416       if (RefKind == AArch64MCExpr::VK_GOTTPREL_G0_NC)
417         return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC;
418       Ctx.reportError(Fixup.getLoc(),
419                       "invalid fixup for movz/movk instruction");
420       return ELF::R_AARCH64_NONE;
421     case AArch64::fixup_aarch64_tlsdesc_call:
422       return R_CLS(TLSDESC_CALL);
423     default:
424       Ctx.reportError(Fixup.getLoc(), "Unknown ELF relocation type");
425       return ELF::R_AARCH64_NONE;
426     }
427   }
428
429   llvm_unreachable("Unimplemented fixup -> relocation");
430 }
431
432 std::unique_ptr<MCObjectWriter>
433 llvm::createAArch64ELFObjectWriter(raw_pwrite_stream &OS, uint8_t OSABI,
434                                    bool IsLittleEndian, bool IsILP32) {
435   auto MOTW =
436       llvm::make_unique<AArch64ELFObjectWriter>(OSABI, IsLittleEndian, IsILP32);
437   return createELFObjectWriter(std::move(MOTW), OS, IsLittleEndian);
438 }