RuntimeDyldMachOAArch64.h 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541
  1. //===-- RuntimeDyldMachOAArch64.h -- MachO/AArch64 specific code. -*- C++ -*-=//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. #ifndef LLVM_LIB_EXECUTIONENGINE_RUNTIMEDYLD_TARGETS_RUNTIMEDYLDMACHOAARCH64_H
  9. #define LLVM_LIB_EXECUTIONENGINE_RUNTIMEDYLD_TARGETS_RUNTIMEDYLDMACHOAARCH64_H
  10. #include "../RuntimeDyldMachO.h"
  11. #include "llvm/Support/Endian.h"
  12. #define DEBUG_TYPE "dyld"
  13. namespace llvm {
  14. class RuntimeDyldMachOAArch64
  15. : public RuntimeDyldMachOCRTPBase<RuntimeDyldMachOAArch64> {
  16. public:
  17. typedef uint64_t TargetPtrT;
  18. RuntimeDyldMachOAArch64(RuntimeDyld::MemoryManager &MM,
  19. JITSymbolResolver &Resolver)
  20. : RuntimeDyldMachOCRTPBase(MM, Resolver) {}
  21. unsigned getMaxStubSize() const override { return 8; }
  22. unsigned getStubAlignment() override { return 8; }
  23. /// Extract the addend encoded in the instruction / memory location.
  24. Expected<int64_t> decodeAddend(const RelocationEntry &RE) const {
  25. const SectionEntry &Section = Sections[RE.SectionID];
  26. uint8_t *LocalAddress = Section.getAddressWithOffset(RE.Offset);
  27. unsigned NumBytes = 1 << RE.Size;
  28. int64_t Addend = 0;
  29. // Verify that the relocation has the correct size and alignment.
  30. switch (RE.RelType) {
  31. default: {
  32. std::string ErrMsg;
  33. {
  34. raw_string_ostream ErrStream(ErrMsg);
  35. ErrStream << "Unsupported relocation type: "
  36. << getRelocName(RE.RelType);
  37. }
  38. return make_error<StringError>(std::move(ErrMsg),
  39. inconvertibleErrorCode());
  40. }
  41. case MachO::ARM64_RELOC_POINTER_TO_GOT:
  42. case MachO::ARM64_RELOC_UNSIGNED: {
  43. if (NumBytes != 4 && NumBytes != 8) {
  44. std::string ErrMsg;
  45. {
  46. raw_string_ostream ErrStream(ErrMsg);
  47. ErrStream << "Invalid relocation size for relocation "
  48. << getRelocName(RE.RelType);
  49. }
  50. return make_error<StringError>(std::move(ErrMsg),
  51. inconvertibleErrorCode());
  52. }
  53. break;
  54. }
  55. case MachO::ARM64_RELOC_BRANCH26:
  56. case MachO::ARM64_RELOC_PAGE21:
  57. case MachO::ARM64_RELOC_PAGEOFF12:
  58. case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
  59. case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
  60. assert(NumBytes == 4 && "Invalid relocation size.");
  61. assert((((uintptr_t)LocalAddress & 0x3) == 0) &&
  62. "Instruction address is not aligned to 4 bytes.");
  63. break;
  64. }
  65. switch (RE.RelType) {
  66. default:
  67. llvm_unreachable("Unsupported relocation type!");
  68. case MachO::ARM64_RELOC_POINTER_TO_GOT:
  69. case MachO::ARM64_RELOC_UNSIGNED:
  70. // This could be an unaligned memory location.
  71. if (NumBytes == 4)
  72. Addend = *reinterpret_cast<support::ulittle32_t *>(LocalAddress);
  73. else
  74. Addend = *reinterpret_cast<support::ulittle64_t *>(LocalAddress);
  75. break;
  76. case MachO::ARM64_RELOC_BRANCH26: {
  77. // Verify that the relocation points to a B/BL instruction.
  78. auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
  79. assert(((*p & 0xFC000000) == 0x14000000 ||
  80. (*p & 0xFC000000) == 0x94000000) &&
  81. "Expected branch instruction.");
  82. // Get the 26 bit addend encoded in the branch instruction and sign-extend
  83. // to 64 bit. The lower 2 bits are always zeros and are therefore implicit
  84. // (<< 2).
  85. Addend = (*p & 0x03FFFFFF) << 2;
  86. Addend = SignExtend64(Addend, 28);
  87. break;
  88. }
  89. case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
  90. case MachO::ARM64_RELOC_PAGE21: {
  91. // Verify that the relocation points to the expected adrp instruction.
  92. auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
  93. assert((*p & 0x9F000000) == 0x90000000 && "Expected adrp instruction.");
  94. // Get the 21 bit addend encoded in the adrp instruction and sign-extend
  95. // to 64 bit. The lower 12 bits (4096 byte page) are always zeros and are
  96. // therefore implicit (<< 12).
  97. Addend = ((*p & 0x60000000) >> 29) | ((*p & 0x01FFFFE0) >> 3) << 12;
  98. Addend = SignExtend64(Addend, 33);
  99. break;
  100. }
  101. case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: {
  102. // Verify that the relocation points to one of the expected load / store
  103. // instructions.
  104. auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
  105. (void)p;
  106. assert((*p & 0x3B000000) == 0x39000000 &&
  107. "Only expected load / store instructions.");
  108. LLVM_FALLTHROUGH;
  109. }
  110. case MachO::ARM64_RELOC_PAGEOFF12: {
  111. // Verify that the relocation points to one of the expected load / store
  112. // or add / sub instructions.
  113. auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
  114. assert((((*p & 0x3B000000) == 0x39000000) ||
  115. ((*p & 0x11C00000) == 0x11000000) ) &&
  116. "Expected load / store or add/sub instruction.");
  117. // Get the 12 bit addend encoded in the instruction.
  118. Addend = (*p & 0x003FFC00) >> 10;
  119. // Check which instruction we are decoding to obtain the implicit shift
  120. // factor of the instruction.
  121. int ImplicitShift = 0;
  122. if ((*p & 0x3B000000) == 0x39000000) { // << load / store
  123. // For load / store instructions the size is encoded in bits 31:30.
  124. ImplicitShift = ((*p >> 30) & 0x3);
  125. if (ImplicitShift == 0) {
  126. // Check if this a vector op to get the correct shift value.
  127. if ((*p & 0x04800000) == 0x04800000)
  128. ImplicitShift = 4;
  129. }
  130. }
  131. // Compensate for implicit shift.
  132. Addend <<= ImplicitShift;
  133. break;
  134. }
  135. }
  136. return Addend;
  137. }
  138. /// Extract the addend encoded in the instruction.
  139. void encodeAddend(uint8_t *LocalAddress, unsigned NumBytes,
  140. MachO::RelocationInfoType RelType, int64_t Addend) const {
  141. // Verify that the relocation has the correct alignment.
  142. switch (RelType) {
  143. default:
  144. llvm_unreachable("Unsupported relocation type!");
  145. case MachO::ARM64_RELOC_POINTER_TO_GOT:
  146. case MachO::ARM64_RELOC_UNSIGNED:
  147. assert((NumBytes == 4 || NumBytes == 8) && "Invalid relocation size.");
  148. break;
  149. case MachO::ARM64_RELOC_BRANCH26:
  150. case MachO::ARM64_RELOC_PAGE21:
  151. case MachO::ARM64_RELOC_PAGEOFF12:
  152. case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
  153. case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
  154. assert(NumBytes == 4 && "Invalid relocation size.");
  155. assert((((uintptr_t)LocalAddress & 0x3) == 0) &&
  156. "Instruction address is not aligned to 4 bytes.");
  157. break;
  158. }
  159. switch (RelType) {
  160. default:
  161. llvm_unreachable("Unsupported relocation type!");
  162. case MachO::ARM64_RELOC_POINTER_TO_GOT:
  163. case MachO::ARM64_RELOC_UNSIGNED:
  164. // This could be an unaligned memory location.
  165. if (NumBytes == 4)
  166. *reinterpret_cast<support::ulittle32_t *>(LocalAddress) = Addend;
  167. else
  168. *reinterpret_cast<support::ulittle64_t *>(LocalAddress) = Addend;
  169. break;
  170. case MachO::ARM64_RELOC_BRANCH26: {
  171. auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
  172. // Verify that the relocation points to the expected branch instruction.
  173. assert(((*p & 0xFC000000) == 0x14000000 ||
  174. (*p & 0xFC000000) == 0x94000000) &&
  175. "Expected branch instruction.");
  176. // Verify addend value.
  177. assert((Addend & 0x3) == 0 && "Branch target is not aligned");
  178. assert(isInt<28>(Addend) && "Branch target is out of range.");
  179. // Encode the addend as 26 bit immediate in the branch instruction.
  180. *p = (*p & 0xFC000000) | ((uint32_t)(Addend >> 2) & 0x03FFFFFF);
  181. break;
  182. }
  183. case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
  184. case MachO::ARM64_RELOC_PAGE21: {
  185. // Verify that the relocation points to the expected adrp instruction.
  186. auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
  187. assert((*p & 0x9F000000) == 0x90000000 && "Expected adrp instruction.");
  188. // Check that the addend fits into 21 bits (+ 12 lower bits).
  189. assert((Addend & 0xFFF) == 0 && "ADRP target is not page aligned.");
  190. assert(isInt<33>(Addend) && "Invalid page reloc value.");
  191. // Encode the addend into the instruction.
  192. uint32_t ImmLoValue = ((uint64_t)Addend << 17) & 0x60000000;
  193. uint32_t ImmHiValue = ((uint64_t)Addend >> 9) & 0x00FFFFE0;
  194. *p = (*p & 0x9F00001F) | ImmHiValue | ImmLoValue;
  195. break;
  196. }
  197. case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: {
  198. // Verify that the relocation points to one of the expected load / store
  199. // instructions.
  200. auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
  201. assert((*p & 0x3B000000) == 0x39000000 &&
  202. "Only expected load / store instructions.");
  203. (void)p;
  204. LLVM_FALLTHROUGH;
  205. }
  206. case MachO::ARM64_RELOC_PAGEOFF12: {
  207. // Verify that the relocation points to one of the expected load / store
  208. // or add / sub instructions.
  209. auto *p = reinterpret_cast<support::aligned_ulittle32_t *>(LocalAddress);
  210. assert((((*p & 0x3B000000) == 0x39000000) ||
  211. ((*p & 0x11C00000) == 0x11000000) ) &&
  212. "Expected load / store or add/sub instruction.");
  213. // Check which instruction we are decoding to obtain the implicit shift
  214. // factor of the instruction and verify alignment.
  215. int ImplicitShift = 0;
  216. if ((*p & 0x3B000000) == 0x39000000) { // << load / store
  217. // For load / store instructions the size is encoded in bits 31:30.
  218. ImplicitShift = ((*p >> 30) & 0x3);
  219. switch (ImplicitShift) {
  220. case 0:
  221. // Check if this a vector op to get the correct shift value.
  222. if ((*p & 0x04800000) == 0x04800000) {
  223. ImplicitShift = 4;
  224. assert(((Addend & 0xF) == 0) &&
  225. "128-bit LDR/STR not 16-byte aligned.");
  226. }
  227. break;
  228. case 1:
  229. assert(((Addend & 0x1) == 0) && "16-bit LDR/STR not 2-byte aligned.");
  230. break;
  231. case 2:
  232. assert(((Addend & 0x3) == 0) && "32-bit LDR/STR not 4-byte aligned.");
  233. break;
  234. case 3:
  235. assert(((Addend & 0x7) == 0) && "64-bit LDR/STR not 8-byte aligned.");
  236. break;
  237. }
  238. }
  239. // Compensate for implicit shift.
  240. Addend >>= ImplicitShift;
  241. assert(isUInt<12>(Addend) && "Addend cannot be encoded.");
  242. // Encode the addend into the instruction.
  243. *p = (*p & 0xFFC003FF) | ((uint32_t)(Addend << 10) & 0x003FFC00);
  244. break;
  245. }
  246. }
  247. }
  248. Expected<relocation_iterator>
  249. processRelocationRef(unsigned SectionID, relocation_iterator RelI,
  250. const ObjectFile &BaseObjT,
  251. ObjSectionToIDMap &ObjSectionToID,
  252. StubMap &Stubs) override {
  253. const MachOObjectFile &Obj =
  254. static_cast<const MachOObjectFile &>(BaseObjT);
  255. MachO::any_relocation_info RelInfo =
  256. Obj.getRelocation(RelI->getRawDataRefImpl());
  257. if (Obj.isRelocationScattered(RelInfo))
  258. return make_error<RuntimeDyldError>("Scattered relocations not supported "
  259. "for MachO AArch64");
  260. // ARM64 has an ARM64_RELOC_ADDEND relocation type that carries an explicit
  261. // addend for the following relocation. If found: (1) store the associated
  262. // addend, (2) consume the next relocation, and (3) use the stored addend to
  263. // override the addend.
  264. int64_t ExplicitAddend = 0;
  265. if (Obj.getAnyRelocationType(RelInfo) == MachO::ARM64_RELOC_ADDEND) {
  266. assert(!Obj.getPlainRelocationExternal(RelInfo));
  267. assert(!Obj.getAnyRelocationPCRel(RelInfo));
  268. assert(Obj.getAnyRelocationLength(RelInfo) == 2);
  269. int64_t RawAddend = Obj.getPlainRelocationSymbolNum(RelInfo);
  270. // Sign-extend the 24-bit to 64-bit.
  271. ExplicitAddend = SignExtend64(RawAddend, 24);
  272. ++RelI;
  273. RelInfo = Obj.getRelocation(RelI->getRawDataRefImpl());
  274. }
  275. if (Obj.getAnyRelocationType(RelInfo) == MachO::ARM64_RELOC_SUBTRACTOR)
  276. return processSubtractRelocation(SectionID, RelI, Obj, ObjSectionToID);
  277. RelocationEntry RE(getRelocationEntry(SectionID, Obj, RelI));
  278. if (RE.RelType == MachO::ARM64_RELOC_POINTER_TO_GOT) {
  279. bool Valid =
  280. (RE.Size == 2 && RE.IsPCRel) || (RE.Size == 3 && !RE.IsPCRel);
  281. if (!Valid)
  282. return make_error<StringError>("ARM64_RELOC_POINTER_TO_GOT supports "
  283. "32-bit pc-rel or 64-bit absolute only",
  284. inconvertibleErrorCode());
  285. }
  286. if (auto Addend = decodeAddend(RE))
  287. RE.Addend = *Addend;
  288. else
  289. return Addend.takeError();
  290. assert((ExplicitAddend == 0 || RE.Addend == 0) && "Relocation has "\
  291. "ARM64_RELOC_ADDEND and embedded addend in the instruction.");
  292. if (ExplicitAddend)
  293. RE.Addend = ExplicitAddend;
  294. RelocationValueRef Value;
  295. if (auto ValueOrErr = getRelocationValueRef(Obj, RelI, RE, ObjSectionToID))
  296. Value = *ValueOrErr;
  297. else
  298. return ValueOrErr.takeError();
  299. bool IsExtern = Obj.getPlainRelocationExternal(RelInfo);
  300. if (RE.RelType == MachO::ARM64_RELOC_POINTER_TO_GOT) {
  301. // We'll take care of the offset in processGOTRelocation.
  302. Value.Offset = 0;
  303. } else if (!IsExtern && RE.IsPCRel)
  304. makeValueAddendPCRel(Value, RelI, 1 << RE.Size);
  305. RE.Addend = Value.Offset;
  306. if (RE.RelType == MachO::ARM64_RELOC_GOT_LOAD_PAGE21 ||
  307. RE.RelType == MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12 ||
  308. RE.RelType == MachO::ARM64_RELOC_POINTER_TO_GOT)
  309. processGOTRelocation(RE, Value, Stubs);
  310. else {
  311. if (Value.SymbolName)
  312. addRelocationForSymbol(RE, Value.SymbolName);
  313. else
  314. addRelocationForSection(RE, Value.SectionID);
  315. }
  316. return ++RelI;
  317. }
  318. void resolveRelocation(const RelocationEntry &RE, uint64_t Value) override {
  319. LLVM_DEBUG(dumpRelocationToResolve(RE, Value));
  320. const SectionEntry &Section = Sections[RE.SectionID];
  321. uint8_t *LocalAddress = Section.getAddressWithOffset(RE.Offset);
  322. MachO::RelocationInfoType RelType =
  323. static_cast<MachO::RelocationInfoType>(RE.RelType);
  324. switch (RelType) {
  325. default:
  326. llvm_unreachable("Invalid relocation type!");
  327. case MachO::ARM64_RELOC_UNSIGNED: {
  328. assert(!RE.IsPCRel && "PCRel and ARM64_RELOC_UNSIGNED not supported");
  329. // Mask in the target value a byte at a time (we don't have an alignment
  330. // guarantee for the target address, so this is safest).
  331. if (RE.Size < 2)
  332. llvm_unreachable("Invalid size for ARM64_RELOC_UNSIGNED");
  333. encodeAddend(LocalAddress, 1 << RE.Size, RelType, Value + RE.Addend);
  334. break;
  335. }
  336. case MachO::ARM64_RELOC_POINTER_TO_GOT: {
  337. assert(((RE.Size == 2 && RE.IsPCRel) || (RE.Size == 3 && !RE.IsPCRel)) &&
  338. "ARM64_RELOC_POINTER_TO_GOT only supports 32-bit pc-rel or 64-bit "
  339. "absolute");
  340. // Addend is the GOT entry address and RE.Offset the target of the
  341. // relocation.
  342. uint64_t Result =
  343. RE.IsPCRel ? (RE.Addend - RE.Offset) : (Value + RE.Addend);
  344. encodeAddend(LocalAddress, 1 << RE.Size, RelType, Result);
  345. break;
  346. }
  347. case MachO::ARM64_RELOC_BRANCH26: {
  348. assert(RE.IsPCRel && "not PCRel and ARM64_RELOC_BRANCH26 not supported");
  349. // Check if branch is in range.
  350. uint64_t FinalAddress = Section.getLoadAddressWithOffset(RE.Offset);
  351. int64_t PCRelVal = Value - FinalAddress + RE.Addend;
  352. encodeAddend(LocalAddress, /*Size=*/4, RelType, PCRelVal);
  353. break;
  354. }
  355. case MachO::ARM64_RELOC_GOT_LOAD_PAGE21:
  356. case MachO::ARM64_RELOC_PAGE21: {
  357. assert(RE.IsPCRel && "not PCRel and ARM64_RELOC_PAGE21 not supported");
  358. // Adjust for PC-relative relocation and offset.
  359. uint64_t FinalAddress = Section.getLoadAddressWithOffset(RE.Offset);
  360. int64_t PCRelVal =
  361. ((Value + RE.Addend) & (-4096)) - (FinalAddress & (-4096));
  362. encodeAddend(LocalAddress, /*Size=*/4, RelType, PCRelVal);
  363. break;
  364. }
  365. case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12:
  366. case MachO::ARM64_RELOC_PAGEOFF12: {
  367. assert(!RE.IsPCRel && "PCRel and ARM64_RELOC_PAGEOFF21 not supported");
  368. // Add the offset from the symbol.
  369. Value += RE.Addend;
  370. // Mask out the page address and only use the lower 12 bits.
  371. Value &= 0xFFF;
  372. encodeAddend(LocalAddress, /*Size=*/4, RelType, Value);
  373. break;
  374. }
  375. case MachO::ARM64_RELOC_SUBTRACTOR: {
  376. uint64_t SectionABase = Sections[RE.Sections.SectionA].getLoadAddress();
  377. uint64_t SectionBBase = Sections[RE.Sections.SectionB].getLoadAddress();
  378. assert((Value == SectionABase || Value == SectionBBase) &&
  379. "Unexpected SUBTRACTOR relocation value.");
  380. Value = SectionABase - SectionBBase + RE.Addend;
  381. writeBytesUnaligned(Value, LocalAddress, 1 << RE.Size);
  382. break;
  383. }
  384. case MachO::ARM64_RELOC_TLVP_LOAD_PAGE21:
  385. case MachO::ARM64_RELOC_TLVP_LOAD_PAGEOFF12:
  386. llvm_unreachable("Relocation type not yet implemented!");
  387. case MachO::ARM64_RELOC_ADDEND:
  388. llvm_unreachable("ARM64_RELOC_ADDEND should have been handeled by "
  389. "processRelocationRef!");
  390. }
  391. }
  392. Error finalizeSection(const ObjectFile &Obj, unsigned SectionID,
  393. const SectionRef &Section) {
  394. return Error::success();
  395. }
  396. private:
  397. void processGOTRelocation(const RelocationEntry &RE,
  398. RelocationValueRef &Value, StubMap &Stubs) {
  399. assert((RE.RelType == MachO::ARM64_RELOC_POINTER_TO_GOT &&
  400. (RE.Size == 2 || RE.Size == 3)) ||
  401. RE.Size == 2);
  402. SectionEntry &Section = Sections[RE.SectionID];
  403. StubMap::const_iterator i = Stubs.find(Value);
  404. int64_t Offset;
  405. if (i != Stubs.end())
  406. Offset = static_cast<int64_t>(i->second);
  407. else {
  408. // FIXME: There must be a better way to do this then to check and fix the
  409. // alignment every time!!!
  410. uintptr_t BaseAddress = uintptr_t(Section.getAddress());
  411. uintptr_t StubAlignment = getStubAlignment();
  412. uintptr_t StubAddress =
  413. (BaseAddress + Section.getStubOffset() + StubAlignment - 1) &
  414. -StubAlignment;
  415. unsigned StubOffset = StubAddress - BaseAddress;
  416. Stubs[Value] = StubOffset;
  417. assert(((StubAddress % getStubAlignment()) == 0) &&
  418. "GOT entry not aligned");
  419. RelocationEntry GOTRE(RE.SectionID, StubOffset,
  420. MachO::ARM64_RELOC_UNSIGNED, Value.Offset,
  421. /*IsPCRel=*/false, /*Size=*/3);
  422. if (Value.SymbolName)
  423. addRelocationForSymbol(GOTRE, Value.SymbolName);
  424. else
  425. addRelocationForSection(GOTRE, Value.SectionID);
  426. Section.advanceStubOffset(getMaxStubSize());
  427. Offset = static_cast<int64_t>(StubOffset);
  428. }
  429. RelocationEntry TargetRE(RE.SectionID, RE.Offset, RE.RelType, Offset,
  430. RE.IsPCRel, RE.Size);
  431. addRelocationForSection(TargetRE, RE.SectionID);
  432. }
  433. Expected<relocation_iterator>
  434. processSubtractRelocation(unsigned SectionID, relocation_iterator RelI,
  435. const ObjectFile &BaseObjT,
  436. ObjSectionToIDMap &ObjSectionToID) {
  437. const MachOObjectFile &Obj =
  438. static_cast<const MachOObjectFile&>(BaseObjT);
  439. MachO::any_relocation_info RE =
  440. Obj.getRelocation(RelI->getRawDataRefImpl());
  441. unsigned Size = Obj.getAnyRelocationLength(RE);
  442. uint64_t Offset = RelI->getOffset();
  443. uint8_t *LocalAddress = Sections[SectionID].getAddressWithOffset(Offset);
  444. unsigned NumBytes = 1 << Size;
  445. Expected<StringRef> SubtrahendNameOrErr = RelI->getSymbol()->getName();
  446. if (!SubtrahendNameOrErr)
  447. return SubtrahendNameOrErr.takeError();
  448. auto SubtrahendI = GlobalSymbolTable.find(*SubtrahendNameOrErr);
  449. unsigned SectionBID = SubtrahendI->second.getSectionID();
  450. uint64_t SectionBOffset = SubtrahendI->second.getOffset();
  451. int64_t Addend =
  452. SignExtend64(readBytesUnaligned(LocalAddress, NumBytes), NumBytes * 8);
  453. ++RelI;
  454. Expected<StringRef> MinuendNameOrErr = RelI->getSymbol()->getName();
  455. if (!MinuendNameOrErr)
  456. return MinuendNameOrErr.takeError();
  457. auto MinuendI = GlobalSymbolTable.find(*MinuendNameOrErr);
  458. unsigned SectionAID = MinuendI->second.getSectionID();
  459. uint64_t SectionAOffset = MinuendI->second.getOffset();
  460. RelocationEntry R(SectionID, Offset, MachO::ARM64_RELOC_SUBTRACTOR, (uint64_t)Addend,
  461. SectionAID, SectionAOffset, SectionBID, SectionBOffset,
  462. false, Size);
  463. addRelocationForSection(R, SectionAID);
  464. return ++RelI;
  465. }
  466. static const char *getRelocName(uint32_t RelocType) {
  467. switch (RelocType) {
  468. case MachO::ARM64_RELOC_UNSIGNED: return "ARM64_RELOC_UNSIGNED";
  469. case MachO::ARM64_RELOC_SUBTRACTOR: return "ARM64_RELOC_SUBTRACTOR";
  470. case MachO::ARM64_RELOC_BRANCH26: return "ARM64_RELOC_BRANCH26";
  471. case MachO::ARM64_RELOC_PAGE21: return "ARM64_RELOC_PAGE21";
  472. case MachO::ARM64_RELOC_PAGEOFF12: return "ARM64_RELOC_PAGEOFF12";
  473. case MachO::ARM64_RELOC_GOT_LOAD_PAGE21: return "ARM64_RELOC_GOT_LOAD_PAGE21";
  474. case MachO::ARM64_RELOC_GOT_LOAD_PAGEOFF12: return "ARM64_RELOC_GOT_LOAD_PAGEOFF12";
  475. case MachO::ARM64_RELOC_POINTER_TO_GOT: return "ARM64_RELOC_POINTER_TO_GOT";
  476. case MachO::ARM64_RELOC_TLVP_LOAD_PAGE21: return "ARM64_RELOC_TLVP_LOAD_PAGE21";
  477. case MachO::ARM64_RELOC_TLVP_LOAD_PAGEOFF12: return "ARM64_RELOC_TLVP_LOAD_PAGEOFF12";
  478. case MachO::ARM64_RELOC_ADDEND: return "ARM64_RELOC_ADDEND";
  479. }
  480. return "Unrecognized arm64 addend";
  481. }
  482. };
  483. }
  484. #undef DEBUG_TYPE
  485. #endif