1 //===- lib/FileFormat/MachO/ArchHandler_x86_64.cpp ------------------------===//
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 #include "ArchHandler.h"
12 #include "MachONormalizedFileBinaryUtils.h"
13 #include "llvm/ADT/StringRef.h"
14 #include "llvm/ADT/StringSwitch.h"
15 #include "llvm/ADT/Triple.h"
16 #include "llvm/Support/Endian.h"
17 #include "llvm/Support/ErrorHandling.h"
19 using namespace llvm::MachO;
20 using namespace lld::mach_o::normalized;
25 using llvm::support::ulittle32_t;
26 using llvm::support::ulittle64_t;
28 using llvm::support::little32_t;
29 using llvm::support::little64_t;
31 class ArchHandler_x86_64 : public ArchHandler {
34 virtual ~ArchHandler_x86_64();
36 const Registry::KindStrings *kindStrings() override { return _sKindStrings; }
38 Reference::KindArch kindArch() override {
39 return Reference::KindArch::x86_64;
42 /// Used by GOTPass to locate GOT References
43 bool isGOTAccess(const Reference &ref, bool &canBypassGOT) override {
44 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
46 assert(ref.kindArch() == Reference::KindArch::x86_64);
47 switch (ref.kindValue()) {
62 /// Used by GOTPass to update GOT References
63 void updateReferenceToGOT(const Reference *ref, bool targetNowGOT) override {
64 assert(ref->kindNamespace() == Reference::KindNamespace::mach_o);
65 assert(ref->kindArch() == Reference::KindArch::x86_64);
67 switch (ref->kindValue()) {
69 assert(targetNowGOT && "target must be GOT");
71 const_cast<Reference *>(ref)
72 ->setKindValue(targetNowGOT ? ripRel32 : ripRel32GotLoadNowLea);
75 const_cast<Reference *>(ref)->setKindValue(imageOffset);
78 llvm_unreachable("unknown GOT reference kind");
82 bool needsCompactUnwind() override {
85 Reference::KindValue imageOffsetKind() override {
88 Reference::KindValue imageOffsetKindIndirect() override {
89 return imageOffsetGot;
92 Reference::KindValue unwindRefToCIEKind() override {
96 Reference::KindValue unwindRefToFunctionKind() override{
97 return unwindFDEToFunction;
100 Reference::KindValue unwindRefToEhFrameKind() override {
101 return unwindInfoToEhFrame;
104 uint32_t dwarfCompactUnwindType() override {
108 const StubInfo &stubInfo() override { return _sStubInfo; }
110 bool isNonCallBranch(const Reference &) override {
114 bool isCallSite(const Reference &) override;
115 bool isPointer(const Reference &) override;
116 bool isPairedReloc(const normalized::Relocation &) override;
118 std::error_code getReferenceInfo(const normalized::Relocation &reloc,
119 const DefinedAtom *inAtom,
120 uint32_t offsetInAtom,
121 uint64_t fixupAddress, bool swap,
122 FindAtomBySectionAndAddress atomFromAddress,
123 FindAtomBySymbolIndex atomFromSymbolIndex,
124 Reference::KindValue *kind,
125 const lld::Atom **target,
126 Reference::Addend *addend) override;
128 getPairReferenceInfo(const normalized::Relocation &reloc1,
129 const normalized::Relocation &reloc2,
130 const DefinedAtom *inAtom,
131 uint32_t offsetInAtom,
132 uint64_t fixupAddress, bool swap, bool scatterable,
133 FindAtomBySectionAndAddress atomFromAddress,
134 FindAtomBySymbolIndex atomFromSymbolIndex,
135 Reference::KindValue *kind,
136 const lld::Atom **target,
137 Reference::Addend *addend) override;
139 bool needsLocalSymbolInRelocatableFile(const DefinedAtom *atom) override {
140 return (atom->contentType() == DefinedAtom::typeCString);
143 void generateAtomContent(const DefinedAtom &atom, bool relocatable,
144 FindAddressForAtom findAddress,
145 FindAddressForAtom findSectionAddress,
147 uint8_t *atomContentBuffer) override;
149 void appendSectionRelocations(const DefinedAtom &atom,
150 uint64_t atomSectionOffset,
151 const Reference &ref,
152 FindSymbolIndexForAtom symbolIndexForAtom,
153 FindSectionIndexForAtom sectionIndexForAtom,
154 FindAddressForAtom addressForAtom,
155 normalized::Relocations &relocs) override;
158 static const Registry::KindStrings _sKindStrings[];
159 static const StubInfo _sStubInfo;
161 enum X86_64Kind: Reference::KindValue {
162 invalid, /// for error condition
164 // Kinds found in mach-o .o files:
165 branch32, /// ex: call _foo
166 ripRel32, /// ex: movq _foo(%rip), %rax
167 ripRel32Minus1, /// ex: movb $0x12, _foo(%rip)
168 ripRel32Minus2, /// ex: movw $0x1234, _foo(%rip)
169 ripRel32Minus4, /// ex: movl $0x12345678, _foo(%rip)
170 ripRel32Anon, /// ex: movq L1(%rip), %rax
171 ripRel32GotLoad, /// ex: movq _foo@GOTPCREL(%rip), %rax
172 ripRel32Got, /// ex: pushq _foo@GOTPCREL(%rip)
173 pointer64, /// ex: .quad _foo
174 pointer64Anon, /// ex: .quad L1
175 delta64, /// ex: .quad _foo - .
176 delta32, /// ex: .long _foo - .
177 delta64Anon, /// ex: .quad L1 - .
178 delta32Anon, /// ex: .long L1 - .
179 negDelta32, /// ex: .long . - _foo
181 // Kinds introduced by Passes:
182 ripRel32GotLoadNowLea, /// Target of GOT load is in linkage unit so
183 /// "movq _foo@GOTPCREL(%rip), %rax" can be changed
184 /// to "leaq _foo(%rip), %rax
185 lazyPointer, /// Location contains a lazy pointer.
186 lazyImmediateLocation, /// Location contains immediate value used in stub.
188 imageOffset, /// Location contains offset of atom in final image
189 imageOffsetGot, /// Location contains offset of GOT entry for atom in
190 /// final image (typically personality function).
191 unwindFDEToFunction, /// Nearly delta64, but cannot be rematerialized in
192 /// relocatable object (yay for implicit contracts!).
193 unwindInfoToEhFrame, /// Fix low 24 bits of compact unwind encoding to
194 /// refer to __eh_frame entry.
197 Reference::KindValue kindFromReloc(const normalized::Relocation &reloc);
198 Reference::KindValue kindFromRelocPair(const normalized::Relocation &reloc1,
199 const normalized::Relocation &reloc2);
201 void applyFixupFinal(const Reference &ref, uint8_t *location,
202 uint64_t fixupAddress, uint64_t targetAddress,
203 uint64_t inAtomAddress, uint64_t imageBaseAddress,
204 FindAddressForAtom findSectionAddress);
206 void applyFixupRelocatable(const Reference &ref, uint8_t *location,
207 uint64_t fixupAddress,
208 uint64_t targetAddress,
209 uint64_t inAtomAddress);
213 ArchHandler_x86_64::ArchHandler_x86_64() { }
215 ArchHandler_x86_64::~ArchHandler_x86_64() { }
217 const Registry::KindStrings ArchHandler_x86_64::_sKindStrings[] = {
218 LLD_KIND_STRING_ENTRY(invalid), LLD_KIND_STRING_ENTRY(branch32),
219 LLD_KIND_STRING_ENTRY(ripRel32), LLD_KIND_STRING_ENTRY(ripRel32Minus1),
220 LLD_KIND_STRING_ENTRY(ripRel32Minus2), LLD_KIND_STRING_ENTRY(ripRel32Minus4),
221 LLD_KIND_STRING_ENTRY(ripRel32Anon), LLD_KIND_STRING_ENTRY(ripRel32GotLoad),
222 LLD_KIND_STRING_ENTRY(ripRel32GotLoadNowLea),
223 LLD_KIND_STRING_ENTRY(ripRel32Got), LLD_KIND_STRING_ENTRY(lazyPointer),
224 LLD_KIND_STRING_ENTRY(lazyImmediateLocation),
225 LLD_KIND_STRING_ENTRY(pointer64), LLD_KIND_STRING_ENTRY(pointer64Anon),
226 LLD_KIND_STRING_ENTRY(delta32), LLD_KIND_STRING_ENTRY(delta64),
227 LLD_KIND_STRING_ENTRY(delta32Anon), LLD_KIND_STRING_ENTRY(delta64Anon),
228 LLD_KIND_STRING_ENTRY(negDelta32),
229 LLD_KIND_STRING_ENTRY(imageOffset), LLD_KIND_STRING_ENTRY(imageOffsetGot),
230 LLD_KIND_STRING_ENTRY(unwindFDEToFunction),
231 LLD_KIND_STRING_ENTRY(unwindInfoToEhFrame),
235 const ArchHandler::StubInfo ArchHandler_x86_64::_sStubInfo = {
238 // Lazy pointer references
239 { Reference::KindArch::x86_64, pointer64, 0, 0 },
240 { Reference::KindArch::x86_64, lazyPointer, 0, 0 },
242 // GOT pointer to dyld_stub_binder
243 { Reference::KindArch::x86_64, pointer64, 0, 0 },
245 // x86_64 code alignment 2^1
248 // Stub size and code
250 { 0xff, 0x25, 0x00, 0x00, 0x00, 0x00 }, // jmp *lazyPointer
251 { Reference::KindArch::x86_64, ripRel32, 2, 0 },
254 // Stub Helper size and code
256 { 0x68, 0x00, 0x00, 0x00, 0x00, // pushq $lazy-info-offset
257 0xE9, 0x00, 0x00, 0x00, 0x00 }, // jmp helperhelper
258 { Reference::KindArch::x86_64, lazyImmediateLocation, 1, 0 },
259 { Reference::KindArch::x86_64, branch32, 6, 0 },
261 // Stub Helper-Common size and code
263 { 0x4C, 0x8D, 0x1D, 0x00, 0x00, 0x00, 0x00, // leaq cache(%rip),%r11
264 0x41, 0x53, // push %r11
265 0xFF, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp *binder(%rip)
267 { Reference::KindArch::x86_64, ripRel32, 3, 0 },
269 { Reference::KindArch::x86_64, ripRel32, 11, 0 },
274 bool ArchHandler_x86_64::isCallSite(const Reference &ref) {
275 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
277 assert(ref.kindArch() == Reference::KindArch::x86_64);
278 return (ref.kindValue() == branch32);
281 bool ArchHandler_x86_64::isPointer(const Reference &ref) {
282 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
284 assert(ref.kindArch() == Reference::KindArch::x86_64);
285 Reference::KindValue kind = ref.kindValue();
286 return (kind == pointer64 || kind == pointer64Anon);
289 bool ArchHandler_x86_64::isPairedReloc(const Relocation &reloc) {
290 return (reloc.type == X86_64_RELOC_SUBTRACTOR);
294 ArchHandler_x86_64::kindFromReloc(const Relocation &reloc) {
295 switch(relocPattern(reloc)) {
296 case X86_64_RELOC_BRANCH | rPcRel | rExtern | rLength4:
298 case X86_64_RELOC_SIGNED | rPcRel | rExtern | rLength4:
300 case X86_64_RELOC_SIGNED | rPcRel | rLength4:
302 case X86_64_RELOC_SIGNED_1 | rPcRel | rExtern | rLength4:
303 return ripRel32Minus1;
304 case X86_64_RELOC_SIGNED_2 | rPcRel | rExtern | rLength4:
305 return ripRel32Minus2;
306 case X86_64_RELOC_SIGNED_4 | rPcRel | rExtern | rLength4:
307 return ripRel32Minus4;
308 case X86_64_RELOC_GOT_LOAD | rPcRel | rExtern | rLength4:
309 return ripRel32GotLoad;
310 case X86_64_RELOC_GOT | rPcRel | rExtern | rLength4:
312 case X86_64_RELOC_UNSIGNED | rExtern | rLength8:
314 case X86_64_RELOC_UNSIGNED | rLength8:
315 return pointer64Anon;
322 ArchHandler_x86_64::getReferenceInfo(const Relocation &reloc,
323 const DefinedAtom *inAtom,
324 uint32_t offsetInAtom,
325 uint64_t fixupAddress, bool swap,
326 FindAtomBySectionAndAddress atomFromAddress,
327 FindAtomBySymbolIndex atomFromSymbolIndex,
328 Reference::KindValue *kind,
329 const lld::Atom **target,
330 Reference::Addend *addend) {
331 typedef std::error_code E;
332 *kind = kindFromReloc(reloc);
333 if (*kind == invalid)
334 return make_dynamic_error_code(Twine("unknown type"));
335 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
336 uint64_t targetAddress;
340 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
342 *addend = *(const little32_t *)fixupContent;
343 return std::error_code();
345 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
347 *addend = (int32_t)*(const little32_t *)fixupContent + 1;
348 return std::error_code();
350 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
352 *addend = (int32_t)*(const little32_t *)fixupContent + 2;
353 return std::error_code();
355 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
357 *addend = (int32_t)*(const little32_t *)fixupContent + 4;
358 return std::error_code();
360 targetAddress = fixupAddress + 4 + *(const little32_t *)fixupContent;
361 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
362 case ripRel32GotLoad:
364 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
366 *addend = *(const little32_t *)fixupContent;
367 return std::error_code();
369 if (E ec = atomFromSymbolIndex(reloc.symbol, target))
371 *addend = *(const little64_t *)fixupContent;
372 return std::error_code();
374 targetAddress = *(const little64_t *)fixupContent;
375 return atomFromAddress(reloc.symbol, targetAddress, target, addend);
377 llvm_unreachable("bad reloc kind");
382 ArchHandler_x86_64::kindFromRelocPair(const normalized::Relocation &reloc1,
383 const normalized::Relocation &reloc2) {
384 switch(relocPattern(reloc1) << 16 | relocPattern(reloc2)) {
385 case ((X86_64_RELOC_SUBTRACTOR | rExtern | rLength8) << 16 |
386 X86_64_RELOC_UNSIGNED | rExtern | rLength8):
388 case ((X86_64_RELOC_SUBTRACTOR | rExtern | rLength4) << 16 |
389 X86_64_RELOC_UNSIGNED | rExtern | rLength4):
391 case ((X86_64_RELOC_SUBTRACTOR | rExtern | rLength8) << 16 |
392 X86_64_RELOC_UNSIGNED | rLength8):
394 case ((X86_64_RELOC_SUBTRACTOR | rExtern | rLength4) << 16 |
395 X86_64_RELOC_UNSIGNED | rLength4):
398 llvm_unreachable("bad reloc pairs");
403 ArchHandler_x86_64::getPairReferenceInfo(const normalized::Relocation &reloc1,
404 const normalized::Relocation &reloc2,
405 const DefinedAtom *inAtom,
406 uint32_t offsetInAtom,
407 uint64_t fixupAddress, bool swap,
409 FindAtomBySectionAndAddress atomFromAddress,
410 FindAtomBySymbolIndex atomFromSymbolIndex,
411 Reference::KindValue *kind,
412 const lld::Atom **target,
413 Reference::Addend *addend) {
414 *kind = kindFromRelocPair(reloc1, reloc2);
415 if (*kind == invalid)
416 return make_dynamic_error_code(Twine("unknown pair"));
417 const uint8_t *fixupContent = &inAtom->rawContent()[offsetInAtom];
418 typedef std::error_code E;
419 uint64_t targetAddress;
420 const lld::Atom *fromTarget;
421 if (E ec = atomFromSymbolIndex(reloc1.symbol, &fromTarget))
423 if (fromTarget != inAtom)
424 return make_dynamic_error_code(Twine("pointer diff not in base atom"));
427 if (E ec = atomFromSymbolIndex(reloc2.symbol, target))
429 *addend = (int64_t)*(const little64_t *)fixupContent + offsetInAtom;
430 return std::error_code();
432 if (E ec = atomFromSymbolIndex(reloc2.symbol, target))
434 *addend = (int32_t)*(const little32_t *)fixupContent + offsetInAtom;
435 return std::error_code();
437 targetAddress = offsetInAtom + (int64_t)*(const little64_t *)fixupContent;
438 return atomFromAddress(reloc2.symbol, targetAddress, target, addend);
440 targetAddress = offsetInAtom + (int32_t)*(const little32_t *)fixupContent;
441 return atomFromAddress(reloc2.symbol, targetAddress, target, addend);
443 llvm_unreachable("bad reloc pair kind");
447 void ArchHandler_x86_64::generateAtomContent(
448 const DefinedAtom &atom, bool relocatable, FindAddressForAtom findAddress,
449 FindAddressForAtom findSectionAddress, uint64_t imageBaseAddress,
450 uint8_t *atomContentBuffer) {
452 memcpy(atomContentBuffer, atom.rawContent().data(), atom.size());
454 for (const Reference *ref : atom) {
455 uint32_t offset = ref->offsetInAtom();
456 const Atom *target = ref->target();
457 uint64_t targetAddress = 0;
458 if (isa<DefinedAtom>(target))
459 targetAddress = findAddress(*target);
460 uint64_t atomAddress = findAddress(atom);
461 uint64_t fixupAddress = atomAddress + offset;
463 applyFixupRelocatable(*ref, &atomContentBuffer[offset],
464 fixupAddress, targetAddress,
467 applyFixupFinal(*ref, &atomContentBuffer[offset],
468 fixupAddress, targetAddress,
469 atomAddress, imageBaseAddress, findSectionAddress);
474 void ArchHandler_x86_64::applyFixupFinal(
475 const Reference &ref, uint8_t *loc, uint64_t fixupAddress,
476 uint64_t targetAddress, uint64_t inAtomAddress, uint64_t imageBaseAddress,
477 FindAddressForAtom findSectionAddress) {
478 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
480 assert(ref.kindArch() == Reference::KindArch::x86_64);
481 ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
482 ulittle64_t *loc64 = reinterpret_cast<ulittle64_t *>(loc);
483 switch (static_cast<X86_64Kind>(ref.kindValue())) {
488 case ripRel32GotLoad:
489 *loc32 = targetAddress - (fixupAddress + 4) + ref.addend();
493 *loc64 = targetAddress + ref.addend();
496 *loc32 = targetAddress - (fixupAddress + 5) + ref.addend();
499 *loc32 = targetAddress - (fixupAddress + 6) + ref.addend();
502 *loc32 = targetAddress - (fixupAddress + 8) + ref.addend();
506 *loc32 = targetAddress - fixupAddress + ref.addend();
510 case unwindFDEToFunction:
511 *loc64 = targetAddress - fixupAddress + ref.addend();
513 case ripRel32GotLoadNowLea:
514 // Change MOVQ to LEA
515 assert(loc[-2] == 0x8B);
517 *loc32 = targetAddress - (fixupAddress + 4) + ref.addend();
520 *loc32 = fixupAddress - targetAddress + ref.addend();
525 case lazyImmediateLocation:
526 *loc32 = ref.addend();
530 *loc32 = (targetAddress - imageBaseAddress) + ref.addend();
532 case unwindInfoToEhFrame: {
533 uint64_t val = targetAddress - findSectionAddress(*ref.target()) + ref.addend();
534 assert(val < 0xffffffU && "offset in __eh_frame too large");
535 *loc32 = (*loc32 & 0xff000000U) | val;
539 // Fall into llvm_unreachable().
542 llvm_unreachable("invalid x86_64 Reference Kind");
546 void ArchHandler_x86_64::applyFixupRelocatable(const Reference &ref,
548 uint64_t fixupAddress,
549 uint64_t targetAddress,
550 uint64_t inAtomAddress) {
551 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
553 assert(ref.kindArch() == Reference::KindArch::x86_64);
554 ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
555 ulittle64_t *loc64 = reinterpret_cast<ulittle64_t *>(loc);
556 switch (static_cast<X86_64Kind>(ref.kindValue())) {
560 case ripRel32GotLoad:
561 *loc32 = ref.addend();
564 *loc32 = (targetAddress - (fixupAddress + 4)) + ref.addend();
567 *loc64 = ref.addend();
570 *loc64 = targetAddress + ref.addend();
573 *loc32 = ref.addend() - 1;
576 *loc32 = ref.addend() - 2;
579 *loc32 = ref.addend() - 4;
582 *loc32 = ref.addend() + inAtomAddress - fixupAddress;
585 *loc32 = (targetAddress - fixupAddress) + ref.addend();
588 *loc64 = ref.addend() + inAtomAddress - fixupAddress;
591 *loc64 = (targetAddress - fixupAddress) + ref.addend();
594 *loc32 = fixupAddress - targetAddress + ref.addend();
596 case ripRel32GotLoadNowLea:
597 llvm_unreachable("ripRel32GotLoadNowLea implies GOT pass was run");
600 case lazyImmediateLocation:
601 llvm_unreachable("lazy reference kind implies Stubs pass was run");
605 case unwindInfoToEhFrame:
606 llvm_unreachable("fixup implies __unwind_info");
608 case unwindFDEToFunction:
609 // Do nothing for now
612 // Fall into llvm_unreachable().
615 llvm_unreachable("unknown x86_64 Reference Kind");
618 void ArchHandler_x86_64::appendSectionRelocations(
619 const DefinedAtom &atom,
620 uint64_t atomSectionOffset,
621 const Reference &ref,
622 FindSymbolIndexForAtom symbolIndexForAtom,
623 FindSectionIndexForAtom sectionIndexForAtom,
624 FindAddressForAtom addressForAtom,
625 normalized::Relocations &relocs) {
626 if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
628 assert(ref.kindArch() == Reference::KindArch::x86_64);
629 uint32_t sectionOffset = atomSectionOffset + ref.offsetInAtom();
630 switch (static_cast<X86_64Kind>(ref.kindValue())) {
632 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
633 X86_64_RELOC_BRANCH | rPcRel | rExtern | rLength4);
636 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
637 X86_64_RELOC_SIGNED | rPcRel | rExtern | rLength4 );
640 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
641 X86_64_RELOC_SIGNED | rPcRel | rLength4 );
644 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
645 X86_64_RELOC_GOT | rPcRel | rExtern | rLength4 );
647 case ripRel32GotLoad:
648 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
649 X86_64_RELOC_GOT_LOAD | rPcRel | rExtern | rLength4 );
652 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
653 X86_64_RELOC_UNSIGNED | rExtern | rLength8);
656 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
657 X86_64_RELOC_UNSIGNED | rLength8);
660 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
661 X86_64_RELOC_SIGNED_1 | rPcRel | rExtern | rLength4 );
664 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
665 X86_64_RELOC_SIGNED_2 | rPcRel | rExtern | rLength4 );
668 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
669 X86_64_RELOC_SIGNED_4 | rPcRel | rExtern | rLength4 );
672 appendReloc(relocs, sectionOffset, symbolIndexForAtom(atom), 0,
673 X86_64_RELOC_SUBTRACTOR | rExtern | rLength4 );
674 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
675 X86_64_RELOC_UNSIGNED | rExtern | rLength4 );
678 appendReloc(relocs, sectionOffset, symbolIndexForAtom(atom), 0,
679 X86_64_RELOC_SUBTRACTOR | rExtern | rLength4 );
680 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
681 X86_64_RELOC_UNSIGNED | rLength4 );
684 appendReloc(relocs, sectionOffset, symbolIndexForAtom(atom), 0,
685 X86_64_RELOC_SUBTRACTOR | rExtern | rLength8 );
686 appendReloc(relocs, sectionOffset, symbolIndexForAtom(*ref.target()), 0,
687 X86_64_RELOC_UNSIGNED | rExtern | rLength8 );
690 appendReloc(relocs, sectionOffset, symbolIndexForAtom(atom), 0,
691 X86_64_RELOC_SUBTRACTOR | rExtern | rLength8 );
692 appendReloc(relocs, sectionOffset, sectionIndexForAtom(*ref.target()), 0,
693 X86_64_RELOC_UNSIGNED | rLength8 );
695 case unwindFDEToFunction:
696 case unwindInfoToEhFrame:
699 case ripRel32GotLoadNowLea:
700 llvm_unreachable("ripRel32GotLoadNowLea implies GOT pass was run");
703 case lazyImmediateLocation:
704 llvm_unreachable("lazy reference kind implies Stubs pass was run");
708 llvm_unreachable("__unwind_info references should have been resolved");
711 // Fall into llvm_unreachable().
714 llvm_unreachable("unknown x86_64 Reference Kind");
718 std::unique_ptr<mach_o::ArchHandler> ArchHandler::create_x86_64() {
719 return std::unique_ptr<mach_o::ArchHandler>(new ArchHandler_x86_64());
722 } // namespace mach_o