1 //===- lib/FileFormat/MachO/ArchHandler_arm64.cpp -------------------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 #include "ArchHandler.h"
11 #include "MachONormalizedFileBinaryUtils.h"
12 #include "llvm/ADT/StringRef.h"
13 #include "llvm/ADT/StringSwitch.h"
14 #include "llvm/ADT/Triple.h"
15 #include "llvm/Support/Endian.h"
16 #include "llvm/Support/ErrorHandling.h"
17 #include "llvm/Support/Format.h"
19 using namespace llvm::MachO;
20 using namespace lld::mach_o::normalized;
25 using llvm::support::ulittle32_t;
26 using llvm::support::ulittle64_t;
28 using llvm::support::little32_t;
29 using llvm::support::little64_t;
31 class ArchHandler_arm64 : public ArchHandler {
33 ArchHandler_arm64() = default;
34 ~ArchHandler_arm64() override = default;
36 const Registry::KindStrings *kindStrings() override { return _sKindStrings; }
38 Reference::KindArch kindArch() override {
39 return Reference::KindArch::AArch64;
42 /// Used by GOTPass to locate GOT References
43 bool isGOTAccess(const Reference &ref, bool &canBypassGOT) override {
44 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
46 assert(ref.kindArch() == Reference::KindArch::AArch64);
47 switch (ref.kindValue()) {
53 case unwindCIEToPersonalityFunction:
62 /// Used by GOTPass to update GOT References.
63 void updateReferenceToGOT(const Reference *ref, bool targetNowGOT) override {
64 // If GOT slot was instanciated, transform:
65 // gotPage21/gotOffset12 -> page21/offset12scale8
66 // If GOT slot optimized away, transform:
67 // gotPage21/gotOffset12 -> page21/addOffset12
68 assert(ref->kindNamespace() == Reference::KindNamespace::mach_o);
69 assert(ref->kindArch() == Reference::KindArch::AArch64);
70 switch (ref->kindValue()) {
72 const_cast<Reference *>(ref)->setKindValue(page21);
75 const_cast<Reference *>(ref)->setKindValue(targetNowGOT ?
76 offset12scale8 : addOffset12);
79 const_cast<Reference *>(ref)->setKindValue(delta32);
82 const_cast<Reference *>(ref)->setKindValue(imageOffset);
85 llvm_unreachable("Not a GOT reference");
89 const StubInfo &stubInfo() override { return _sStubInfo; }
91 bool isCallSite(const Reference &) override;
92 bool isNonCallBranch(const Reference &) override {
96 bool isPointer(const Reference &) override;
97 bool isPairedReloc(const normalized::Relocation &) override;
99 bool needsCompactUnwind() override {
102 Reference::KindValue imageOffsetKind() override {
105 Reference::KindValue imageOffsetKindIndirect() override {
106 return imageOffsetGot;
109 Reference::KindValue unwindRefToPersonalityFunctionKind() override {
110 return unwindCIEToPersonalityFunction;
113 Reference::KindValue unwindRefToCIEKind() override {
117 Reference::KindValue unwindRefToFunctionKind() override {
118 return unwindFDEToFunction;
121 Reference::KindValue unwindRefToEhFrameKind() override {
122 return unwindInfoToEhFrame;
125 Reference::KindValue pointerKind() override {
129 Reference::KindValue lazyImmediateLocationKind() override {
130 return lazyImmediateLocation;
133 uint32_t dwarfCompactUnwindType() override {
137 llvm::Error getReferenceInfo(const normalized::Relocation &reloc,
138 const DefinedAtom *inAtom,
139 uint32_t offsetInAtom,
140 uint64_t fixupAddress, bool isBig,
141 FindAtomBySectionAndAddress atomFromAddress,
142 FindAtomBySymbolIndex atomFromSymbolIndex,
143 Reference::KindValue *kind,
144 const lld::Atom **target,
145 Reference::Addend *addend) override;
147 getPairReferenceInfo(const normalized::Relocation &reloc1,
148 const normalized::Relocation &reloc2,
149 const DefinedAtom *inAtom,
150 uint32_t offsetInAtom,
151 uint64_t fixupAddress, bool isBig, bool scatterable,
152 FindAtomBySectionAndAddress atomFromAddress,
153 FindAtomBySymbolIndex atomFromSymbolIndex,
154 Reference::KindValue *kind,
155 const lld::Atom **target,
156 Reference::Addend *addend) override;
158 bool needsLocalSymbolInRelocatableFile(const DefinedAtom *atom) override {
159 return (atom->contentType() == DefinedAtom::typeCString);
162 void generateAtomContent(const DefinedAtom &atom, bool relocatable,
163 FindAddressForAtom findAddress,
164 FindAddressForAtom findSectionAddress,
165 uint64_t imageBaseAddress,
166 llvm::MutableArrayRef<uint8_t> atomContentBuffer) override;
168 void appendSectionRelocations(const DefinedAtom &atom,
169 uint64_t atomSectionOffset,
170 const Reference &ref,
171 FindSymbolIndexForAtom symbolIndexForAtom,
172 FindSectionIndexForAtom sectionIndexForAtom,
173 FindAddressForAtom addressForAtom,
174 normalized::Relocations &relocs) override;
177 static const Registry::KindStrings _sKindStrings[];
178 static const StubInfo _sStubInfo;
180 enum Arm64Kind : Reference::KindValue {
181 invalid, /// for error condition
183 // Kinds found in mach-o .o files:
184 branch26, /// ex: bl _foo
185 page21, /// ex: adrp x1, _foo@PAGE
186 offset12, /// ex: ldrb w0, [x1, _foo@PAGEOFF]
187 offset12scale2, /// ex: ldrs w0, [x1, _foo@PAGEOFF]
188 offset12scale4, /// ex: ldr w0, [x1, _foo@PAGEOFF]
189 offset12scale8, /// ex: ldr x0, [x1, _foo@PAGEOFF]
190 offset12scale16, /// ex: ldr q0, [x1, _foo@PAGEOFF]
191 gotPage21, /// ex: adrp x1, _foo@GOTPAGE
192 gotOffset12, /// ex: ldr w0, [x1, _foo@GOTPAGEOFF]
193 tlvPage21, /// ex: adrp x1, _foo@TLVPAGE
194 tlvOffset12, /// ex: ldr w0, [x1, _foo@TLVPAGEOFF]
196 pointer64, /// ex: .quad _foo
197 delta64, /// ex: .quad _foo - .
198 delta32, /// ex: .long _foo - .
199 negDelta32, /// ex: .long . - _foo
200 pointer64ToGOT, /// ex: .quad _foo@GOT
201 delta32ToGOT, /// ex: .long _foo@GOT - .
203 // Kinds introduced by Passes:
204 addOffset12, /// Location contains LDR to change into ADD.
205 lazyPointer, /// Location contains a lazy pointer.
206 lazyImmediateLocation, /// Location contains immediate value used in stub.
207 imageOffset, /// Location contains offset of atom in final image
208 imageOffsetGot, /// Location contains offset of GOT entry for atom in
209 /// final image (typically personality function).
210 unwindCIEToPersonalityFunction, /// Nearly delta32ToGOT, but cannot be
211 /// rematerialized in relocatable object
212 /// (yay for implicit contracts!).
213 unwindFDEToFunction, /// Nearly delta64, but cannot be rematerialized in
214 /// relocatable object (yay for implicit contracts!).
215 unwindInfoToEhFrame, /// Fix low 24 bits of compact unwind encoding to
216 /// refer to __eh_frame entry.
219 void applyFixupFinal(const Reference &ref, uint8_t *location,
220 uint64_t fixupAddress, uint64_t targetAddress,
221 uint64_t inAtomAddress, uint64_t imageBaseAddress,
222 FindAddressForAtom findSectionAddress);
224 void applyFixupRelocatable(const Reference &ref, uint8_t *location,
225 uint64_t fixupAddress, uint64_t targetAddress,
226 uint64_t inAtomAddress, bool targetUnnamed);
228 // Utility functions for inspecting/updating instructions.
229 static uint32_t setDisplacementInBranch26(uint32_t instr, int32_t disp);
230 static uint32_t setDisplacementInADRP(uint32_t instr, int64_t disp);
231 static Arm64Kind offset12KindFromInstruction(uint32_t instr);
232 static uint32_t setImm12(uint32_t instr, uint32_t offset);
235 const Registry::KindStrings ArchHandler_arm64::_sKindStrings[] = {
236 LLD_KIND_STRING_ENTRY(invalid),
237 LLD_KIND_STRING_ENTRY(branch26),
238 LLD_KIND_STRING_ENTRY(page21),
239 LLD_KIND_STRING_ENTRY(offset12),
240 LLD_KIND_STRING_ENTRY(offset12scale2),
241 LLD_KIND_STRING_ENTRY(offset12scale4),
242 LLD_KIND_STRING_ENTRY(offset12scale8),
243 LLD_KIND_STRING_ENTRY(offset12scale16),
244 LLD_KIND_STRING_ENTRY(gotPage21),
245 LLD_KIND_STRING_ENTRY(gotOffset12),
246 LLD_KIND_STRING_ENTRY(tlvPage21),
247 LLD_KIND_STRING_ENTRY(tlvOffset12),
248 LLD_KIND_STRING_ENTRY(pointer64),
249 LLD_KIND_STRING_ENTRY(delta64),
250 LLD_KIND_STRING_ENTRY(delta32),
251 LLD_KIND_STRING_ENTRY(negDelta32),
252 LLD_KIND_STRING_ENTRY(pointer64ToGOT),
253 LLD_KIND_STRING_ENTRY(delta32ToGOT),
255 LLD_KIND_STRING_ENTRY(addOffset12),
256 LLD_KIND_STRING_ENTRY(lazyPointer),
257 LLD_KIND_STRING_ENTRY(lazyImmediateLocation),
258 LLD_KIND_STRING_ENTRY(imageOffset),
259 LLD_KIND_STRING_ENTRY(imageOffsetGot),
260 LLD_KIND_STRING_ENTRY(unwindCIEToPersonalityFunction),
261 LLD_KIND_STRING_ENTRY(unwindFDEToFunction),
262 LLD_KIND_STRING_ENTRY(unwindInfoToEhFrame),
267 const ArchHandler::StubInfo ArchHandler_arm64::_sStubInfo = {
270 // Lazy pointer references
271 { Reference::KindArch::AArch64, pointer64, 0, 0 },
272 { Reference::KindArch::AArch64, lazyPointer, 0, 0 },
274 // GOT pointer to dyld_stub_binder
275 { Reference::KindArch::AArch64, pointer64, 0, 0 },
277 // arm64 code alignment 2^1
280 // Stub size and code
282 { 0x10, 0x00, 0x00, 0x90, // ADRP X16, lazy_pointer@page
283 0x10, 0x02, 0x40, 0xF9, // LDR X16, [X16, lazy_pointer@pageoff]
284 0x00, 0x02, 0x1F, 0xD6 }, // BR X16
285 { Reference::KindArch::AArch64, page21, 0, 0 },
286 { true, offset12scale8, 4, 0 },
288 // Stub Helper size and code
290 { 0x50, 0x00, 0x00, 0x18, // LDR W16, L0
291 0x00, 0x00, 0x00, 0x14, // LDR B helperhelper
292 0x00, 0x00, 0x00, 0x00 }, // L0: .long 0
293 { Reference::KindArch::AArch64, lazyImmediateLocation, 8, 0 },
294 { Reference::KindArch::AArch64, branch26, 4, 0 },
296 // Stub helper image cache content type
297 DefinedAtom::typeGOT,
299 // Stub Helper-Common size and code
301 // Stub helper alignment
303 { 0x11, 0x00, 0x00, 0x90, // ADRP X17, dyld_ImageLoaderCache@page
304 0x31, 0x02, 0x00, 0x91, // ADD X17, X17, dyld_ImageLoaderCache@pageoff
305 0xF0, 0x47, 0xBF, 0xA9, // STP X16/X17, [SP, #-16]!
306 0x10, 0x00, 0x00, 0x90, // ADRP X16, _fast_lazy_bind@page
307 0x10, 0x02, 0x40, 0xF9, // LDR X16, [X16,_fast_lazy_bind@pageoff]
308 0x00, 0x02, 0x1F, 0xD6 }, // BR X16
309 { Reference::KindArch::AArch64, page21, 0, 0 },
310 { true, offset12, 4, 0 },
311 { Reference::KindArch::AArch64, page21, 12, 0 },
312 { true, offset12scale8, 16, 0 }
315 bool ArchHandler_arm64::isCallSite(const Reference &ref) {
316 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
318 assert(ref.kindArch() == Reference::KindArch::AArch64);
319 return (ref.kindValue() == branch26);
322 bool ArchHandler_arm64::isPointer(const Reference &ref) {
323 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
325 assert(ref.kindArch() == Reference::KindArch::AArch64);
326 Reference::KindValue kind = ref.kindValue();
327 return (kind == pointer64);
330 bool ArchHandler_arm64::isPairedReloc(const Relocation &r) {
331 return ((r.type == ARM64_RELOC_ADDEND) || (r.type == ARM64_RELOC_SUBTRACTOR));
334 uint32_t ArchHandler_arm64::setDisplacementInBranch26(uint32_t instr,
335 int32_t displacement) {
336 assert((displacement <= 134217727) && (displacement > (-134217728)) &&
337 "arm64 branch out of range");
338 return (instr & 0xFC000000) | ((uint32_t)(displacement >> 2) & 0x03FFFFFF);
341 uint32_t ArchHandler_arm64::setDisplacementInADRP(uint32_t instruction,
342 int64_t displacement) {
343 assert((displacement <= 0x100000000LL) && (displacement > (-0x100000000LL)) &&
344 "arm64 ADRP out of range");
345 assert(((instruction & 0x9F000000) == 0x90000000) &&
346 "reloc not on ADRP instruction");
347 uint32_t immhi = (displacement >> 9) & (0x00FFFFE0);
348 uint32_t immlo = (displacement << 17) & (0x60000000);
349 return (instruction & 0x9F00001F) | immlo | immhi;
352 ArchHandler_arm64::Arm64Kind
353 ArchHandler_arm64::offset12KindFromInstruction(uint32_t instruction) {
354 if (instruction & 0x08000000) {
355 switch ((instruction >> 30) & 0x3) {
357 if ((instruction & 0x04800000) == 0x04800000)
358 return offset12scale16;
361 return offset12scale2;
363 return offset12scale4;
365 return offset12scale8;
371 uint32_t ArchHandler_arm64::setImm12(uint32_t instruction, uint32_t offset) {
372 assert(((offset & 0xFFFFF000) == 0) && "imm12 offset out of range");
373 uint32_t imm12 = offset << 10;
374 return (instruction & 0xFFC003FF) | imm12;
377 llvm::Error ArchHandler_arm64::getReferenceInfo(
378 const Relocation &reloc, const DefinedAtom *inAtom, uint32_t offsetInAtom,
379 uint64_t fixupAddress, bool isBig,
380 FindAtomBySectionAndAddress atomFromAddress,
381 FindAtomBySymbolIndex atomFromSymbolIndex, Reference::KindValue *kind,
382 const lld::Atom **target, Reference::Addend *addend) {
383 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
384 switch (relocPattern(reloc)) {
385 case ARM64_RELOC_BRANCH26 | rPcRel | rExtern | rLength4:
388 if (auto ec = atomFromSymbolIndex(reloc.symbol, target))
391 return llvm::Error::success();
392 case ARM64_RELOC_PAGE21 | rPcRel | rExtern | rLength4:
393 // ex: adrp x1, _foo@PAGE
395 if (auto ec = atomFromSymbolIndex(reloc.symbol, target))
398 return llvm::Error::success();
399 case ARM64_RELOC_PAGEOFF12 | rExtern | rLength4:
400 // ex: ldr x0, [x1, _foo@PAGEOFF]
401 *kind = offset12KindFromInstruction(*(const little32_t *)fixupContent);
402 if (auto ec = atomFromSymbolIndex(reloc.symbol, target))
405 return llvm::Error::success();
406 case ARM64_RELOC_GOT_LOAD_PAGE21 | rPcRel | rExtern | rLength4:
407 // ex: adrp x1, _foo@GOTPAGE
409 if (auto ec = atomFromSymbolIndex(reloc.symbol, target))
412 return llvm::Error::success();
413 case ARM64_RELOC_GOT_LOAD_PAGEOFF12 | rExtern | rLength4:
414 // ex: ldr x0, [x1, _foo@GOTPAGEOFF]
416 if (auto ec = atomFromSymbolIndex(reloc.symbol, target))
419 return llvm::Error::success();
420 case ARM64_RELOC_TLVP_LOAD_PAGE21 | rPcRel | rExtern | rLength4:
421 // ex: adrp x1, _foo@TLVPAGE
423 if (auto ec = atomFromSymbolIndex(reloc.symbol, target))
426 return llvm::Error::success();
427 case ARM64_RELOC_TLVP_LOAD_PAGEOFF12 | rExtern | rLength4:
428 // ex: ldr x0, [x1, _foo@TLVPAGEOFF]
430 if (auto ec = atomFromSymbolIndex(reloc.symbol, target))
433 return llvm::Error::success();
434 case ARM64_RELOC_UNSIGNED | rExtern | rLength8:
435 // ex: .quad _foo + N
437 if (auto ec = atomFromSymbolIndex(reloc.symbol, target))
439 *addend = *(const little64_t *)fixupContent;
440 return llvm::Error::success();
441 case ARM64_RELOC_UNSIGNED | rLength8:
442 // ex: .quad Lfoo + N
444 return atomFromAddress(reloc.symbol, *(const little64_t *)fixupContent,
446 case ARM64_RELOC_POINTER_TO_GOT | rExtern | rLength8:
447 // ex: .quad _foo@GOT
448 *kind = pointer64ToGOT;
449 if (auto ec = atomFromSymbolIndex(reloc.symbol, target))
452 return llvm::Error::success();
453 case ARM64_RELOC_POINTER_TO_GOT | rPcRel | rExtern | rLength4:
454 // ex: .long _foo@GOT - .
456 // If we are in an .eh_frame section, then the kind of the relocation should
457 // not be delta32ToGOT. It may instead be unwindCIEToPersonalityFunction.
458 if (inAtom->contentType() == DefinedAtom::typeCFI)
459 *kind = unwindCIEToPersonalityFunction;
461 *kind = delta32ToGOT;
463 if (auto ec = atomFromSymbolIndex(reloc.symbol, target))
466 return llvm::Error::success();
468 return llvm::make_error<GenericError>("unsupported arm64 relocation type");
472 llvm::Error ArchHandler_arm64::getPairReferenceInfo(
473 const normalized::Relocation &reloc1, const normalized::Relocation &reloc2,
474 const DefinedAtom *inAtom, uint32_t offsetInAtom, uint64_t fixupAddress,
475 bool swap, bool scatterable, FindAtomBySectionAndAddress atomFromAddress,
476 FindAtomBySymbolIndex atomFromSymbolIndex, Reference::KindValue *kind,
477 const lld::Atom **target, Reference::Addend *addend) {
478 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
479 switch (relocPattern(reloc1) << 16 | relocPattern(reloc2)) {
480 case ((ARM64_RELOC_ADDEND | rLength4) << 16 |
481 ARM64_RELOC_BRANCH26 | rPcRel | rExtern | rLength4):
484 if (auto ec = atomFromSymbolIndex(reloc2.symbol, target))
486 *addend = reloc1.symbol;
487 return llvm::Error::success();
488 case ((ARM64_RELOC_ADDEND | rLength4) << 16 |
489 ARM64_RELOC_PAGE21 | rPcRel | rExtern | rLength4):
490 // ex: adrp x1, _foo@PAGE
492 if (auto ec = atomFromSymbolIndex(reloc2.symbol, target))
494 *addend = reloc1.symbol;
495 return llvm::Error::success();
496 case ((ARM64_RELOC_ADDEND | rLength4) << 16 |
497 ARM64_RELOC_PAGEOFF12 | rExtern | rLength4): {
498 // ex: ldr w0, [x1, _foo@PAGEOFF]
499 uint32_t cont32 = (int32_t)*(const little32_t *)fixupContent;
500 *kind = offset12KindFromInstruction(cont32);
501 if (auto ec = atomFromSymbolIndex(reloc2.symbol, target))
503 *addend = reloc1.symbol;
504 return llvm::Error::success();
506 case ((ARM64_RELOC_SUBTRACTOR | rExtern | rLength8) << 16 |
507 ARM64_RELOC_UNSIGNED | rExtern | rLength8):
508 // ex: .quad _foo - .
509 if (auto ec = atomFromSymbolIndex(reloc2.symbol, target))
512 // If we are in an .eh_frame section, then the kind of the relocation should
513 // not be delta64. It may instead be unwindFDEToFunction.
514 if (inAtom->contentType() == DefinedAtom::typeCFI)
515 *kind = unwindFDEToFunction;
519 // The offsets of the 2 relocations must match
520 if (reloc1.offset != reloc2.offset)
521 return llvm::make_error<GenericError>(
522 "paired relocs must have the same offset");
523 *addend = (int64_t)*(const little64_t *)fixupContent + offsetInAtom;
524 return llvm::Error::success();
525 case ((ARM64_RELOC_SUBTRACTOR | rExtern | rLength4) << 16 |
526 ARM64_RELOC_UNSIGNED | rExtern | rLength4):
527 // ex: .quad _foo - .
529 if (auto ec = atomFromSymbolIndex(reloc2.symbol, target))
531 *addend = (int32_t)*(const little32_t *)fixupContent + offsetInAtom;
532 return llvm::Error::success();
534 return llvm::make_error<GenericError>("unsupported arm64 relocation pair");
538 void ArchHandler_arm64::generateAtomContent(
539 const DefinedAtom &atom, bool relocatable, FindAddressForAtom findAddress,
540 FindAddressForAtom findSectionAddress, uint64_t imageBaseAddress,
541 llvm::MutableArrayRef<uint8_t> atomContentBuffer) {
543 std::copy(atom.rawContent().begin(), atom.rawContent().end(),
544 atomContentBuffer.begin());
547 if (atom.begin() != atom.end()) {
548 DEBUG_WITH_TYPE("atom-content", llvm::dbgs()
549 << "Applying fixups to atom:\n"
551 << llvm::format(" 0x%09lX", &atom)
553 << atom.file().ordinal()
559 << atom.contentType()
563 for (const Reference *ref : atom) {
564 uint32_t offset = ref->offsetInAtom();
565 const Atom *target = ref->target();
566 bool targetUnnamed = target->name().empty();
567 uint64_t targetAddress = 0;
568 if (isa<DefinedAtom>(target))
569 targetAddress = findAddress(*target);
570 uint64_t atomAddress = findAddress(atom);
571 uint64_t fixupAddress = atomAddress + offset;
573 applyFixupRelocatable(*ref, &atomContentBuffer[offset], fixupAddress,
574 targetAddress, atomAddress, targetUnnamed);
576 applyFixupFinal(*ref, &atomContentBuffer[offset], fixupAddress,
577 targetAddress, atomAddress, imageBaseAddress,
583 void ArchHandler_arm64::applyFixupFinal(const Reference &ref, uint8_t *loc,
584 uint64_t fixupAddress,
585 uint64_t targetAddress,
586 uint64_t inAtomAddress,
587 uint64_t imageBaseAddress,
588 FindAddressForAtom findSectionAddress) {
589 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
591 assert(ref.kindArch() == Reference::KindArch::AArch64);
592 ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
593 ulittle64_t *loc64 = reinterpret_cast<ulittle64_t *>(loc);
594 int32_t displacement;
595 uint32_t instruction;
598 switch (static_cast<Arm64Kind>(ref.kindValue())) {
600 displacement = (targetAddress - fixupAddress) + ref.addend();
601 *loc32 = setDisplacementInBranch26(*loc32, displacement);
607 ((targetAddress + ref.addend()) & (-4096)) - (fixupAddress & (-4096));
608 *loc32 = setDisplacementInADRP(*loc32, displacement);
613 displacement = (targetAddress + ref.addend()) & 0x00000FFF;
614 *loc32 = setImm12(*loc32, displacement);
617 displacement = (targetAddress + ref.addend()) & 0x00000FFF;
618 assert(((displacement & 0x1) == 0) &&
619 "scaled imm12 not accessing 2-byte aligneds");
620 *loc32 = setImm12(*loc32, displacement >> 1);
623 displacement = (targetAddress + ref.addend()) & 0x00000FFF;
624 assert(((displacement & 0x3) == 0) &&
625 "scaled imm12 not accessing 4-byte aligned");
626 *loc32 = setImm12(*loc32, displacement >> 2);
629 displacement = (targetAddress + ref.addend()) & 0x00000FFF;
630 assert(((displacement & 0x7) == 0) &&
631 "scaled imm12 not accessing 8-byte aligned");
632 *loc32 = setImm12(*loc32, displacement >> 3);
634 case offset12scale16:
635 displacement = (targetAddress + ref.addend()) & 0x00000FFF;
636 assert(((displacement & 0xF) == 0) &&
637 "scaled imm12 not accessing 16-byte aligned");
638 *loc32 = setImm12(*loc32, displacement >> 4);
641 instruction = *loc32;
642 assert(((instruction & 0xFFC00000) == 0xF9400000) &&
643 "GOT reloc is not an LDR instruction");
644 displacement = (targetAddress + ref.addend()) & 0x00000FFF;
645 value32 = 0x91000000 | (instruction & 0x000003FF);
646 instruction = setImm12(value32, displacement);
647 *loc32 = instruction;
651 *loc64 = targetAddress + ref.addend();
654 case unwindFDEToFunction:
655 *loc64 = (targetAddress - fixupAddress) + ref.addend();
659 case unwindCIEToPersonalityFunction:
660 *loc32 = (targetAddress - fixupAddress) + ref.addend();
663 *loc32 = fixupAddress - targetAddress + ref.addend();
668 case lazyImmediateLocation:
669 *loc32 = ref.addend();
672 *loc32 = (targetAddress - imageBaseAddress) + ref.addend();
675 llvm_unreachable("imageOffsetGot should have been changed to imageOffset");
677 case unwindInfoToEhFrame:
678 value64 = targetAddress - findSectionAddress(*ref.target()) + ref.addend();
679 assert(value64 < 0xffffffU && "offset in __eh_frame too large");
680 *loc32 = (*loc32 & 0xff000000U) | value64;
683 // Fall into llvm_unreachable().
686 llvm_unreachable("invalid arm64 Reference Kind");
689 void ArchHandler_arm64::applyFixupRelocatable(const Reference &ref,
691 uint64_t fixupAddress,
692 uint64_t targetAddress,
693 uint64_t inAtomAddress,
694 bool targetUnnamed) {
695 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
697 assert(ref.kindArch() == Reference::KindArch::AArch64);
698 ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
699 ulittle64_t *loc64 = reinterpret_cast<ulittle64_t *>(loc);
700 switch (static_cast<Arm64Kind>(ref.kindValue())) {
702 *loc32 = setDisplacementInBranch26(*loc32, 0);
707 *loc32 = setDisplacementInADRP(*loc32, 0);
713 case offset12scale16:
716 *loc32 = setImm12(*loc32, 0);
720 *loc64 = targetAddress + ref.addend();
722 *loc64 = ref.addend();
725 *loc64 = ref.addend() + inAtomAddress - fixupAddress;
727 case unwindFDEToFunction:
728 // We don't emit unwindFDEToFunction in -r mode as they are implicitly
729 // generated from the data in the __eh_frame section. So here we need
730 // to use the targetAddress so that we can generate the full relocation
731 // when we parse again later.
732 *loc64 = targetAddress - fixupAddress;
735 *loc32 = ref.addend() + inAtomAddress - fixupAddress;
738 // We don't emit negDelta32 in -r mode as they are implicitly
739 // generated from the data in the __eh_frame section. So here we need
740 // to use the targetAddress so that we can generate the full relocation
741 // when we parse again later.
742 *loc32 = fixupAddress - targetAddress + ref.addend();
748 *loc32 = inAtomAddress - fixupAddress;
750 case unwindCIEToPersonalityFunction:
751 // We don't emit unwindCIEToPersonalityFunction in -r mode as they are
752 // implicitly generated from the data in the __eh_frame section. So here we
753 // need to use the targetAddress so that we can generate the full relocation
754 // when we parse again later.
755 *loc32 = targetAddress - fixupAddress;
758 llvm_unreachable("lazy reference kind implies GOT pass was run");
760 case lazyImmediateLocation:
761 llvm_unreachable("lazy reference kind implies Stubs pass was run");
764 case unwindInfoToEhFrame:
765 llvm_unreachable("fixup implies __unwind_info");
768 // Fall into llvm_unreachable().
771 llvm_unreachable("unknown arm64 Reference Kind");
774 void ArchHandler_arm64::appendSectionRelocations(
775 const DefinedAtom &atom, uint64_t atomSectionOffset, const Reference &ref,
776 FindSymbolIndexForAtom symbolIndexForAtom,
777 FindSectionIndexForAtom sectionIndexForAtom,
778 FindAddressForAtom addressForAtom, normalized::Relocations &relocs) {
779 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
781 assert(ref.kindArch() == Reference::KindArch::AArch64);
782 uint32_t sectionOffset = atomSectionOffset + ref.offsetInAtom();
783 switch (static_cast<Arm64Kind>(ref.kindValue())) {
786 appendReloc(relocs, sectionOffset, ref.addend(), 0,
787 ARM64_RELOC_ADDEND | rLength4);
788 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
789 ARM64_RELOC_BRANCH26 | rPcRel | rExtern | rLength4);
791 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
792 ARM64_RELOC_BRANCH26 | rPcRel | rExtern | rLength4);
797 appendReloc(relocs, sectionOffset, ref.addend(), 0,
798 ARM64_RELOC_ADDEND | rLength4);
799 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
800 ARM64_RELOC_PAGE21 | rPcRel | rExtern | rLength4);
802 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
803 ARM64_RELOC_PAGE21 | rPcRel | rExtern | rLength4);
810 case offset12scale16:
812 appendReloc(relocs, sectionOffset, ref.addend(), 0,
813 ARM64_RELOC_ADDEND | rLength4);
814 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
815 ARM64_RELOC_PAGEOFF12 | rExtern | rLength4);
817 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
818 ARM64_RELOC_PAGEOFF12 | rExtern | rLength4);
822 assert(ref.addend() == 0);
823 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
824 ARM64_RELOC_GOT_LOAD_PAGE21 | rPcRel | rExtern | rLength4);
827 assert(ref.addend() == 0);
828 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
829 ARM64_RELOC_GOT_LOAD_PAGEOFF12 | rExtern | rLength4);
832 assert(ref.addend() == 0);
833 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
834 ARM64_RELOC_TLVP_LOAD_PAGE21 | rPcRel | rExtern | rLength4);
837 assert(ref.addend() == 0);
838 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
839 ARM64_RELOC_TLVP_LOAD_PAGEOFF12 | rExtern | rLength4);
842 if (ref.target()->name().empty())
843 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
844 ARM64_RELOC_UNSIGNED | rLength8);
846 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
847 ARM64_RELOC_UNSIGNED | rExtern | rLength8);
850 appendReloc(relocs, sectionOffset, symbolIndexForAtom(atom), 0,
851 ARM64_RELOC_SUBTRACTOR | rExtern | rLength8);
852 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
853 ARM64_RELOC_UNSIGNED | rExtern | rLength8);
856 appendReloc(relocs, sectionOffset, symbolIndexForAtom(atom), 0,
857 ARM64_RELOC_SUBTRACTOR | rExtern | rLength4 );
858 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
859 ARM64_RELOC_UNSIGNED | rExtern | rLength4 );
862 assert(ref.addend() == 0);
863 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
864 ARM64_RELOC_POINTER_TO_GOT | rExtern | rLength8);
867 assert(ref.addend() == 0);
868 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
869 ARM64_RELOC_POINTER_TO_GOT | rPcRel | rExtern | rLength4);
872 llvm_unreachable("lazy reference kind implies GOT pass was run");
874 case lazyImmediateLocation:
875 llvm_unreachable("lazy reference kind implies Stubs pass was run");
878 llvm_unreachable("deltas from mach_header can only be in final images");
879 case unwindCIEToPersonalityFunction:
880 case unwindFDEToFunction:
881 case unwindInfoToEhFrame:
886 // Fall into llvm_unreachable().
889 llvm_unreachable("unknown arm64 Reference Kind");
892 std::unique_ptr<mach_o::ArchHandler> ArchHandler::create_arm64() {
893 return std::unique_ptr<mach_o::ArchHandler>(new ArchHandler_arm64());
896 } // namespace mach_o