RuntimeDyldELF.cpp 94 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396
  1. //===-- RuntimeDyldELF.cpp - Run-time dynamic linker for MC-JIT -*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // Implementation of ELF support for the MC-JIT runtime dynamic linker.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "RuntimeDyldELF.h"
  13. #include "RuntimeDyldCheckerImpl.h"
  14. #include "Targets/RuntimeDyldELFMips.h"
  15. #include "llvm/ADT/STLExtras.h"
  16. #include "llvm/ADT/StringRef.h"
  17. #include "llvm/ADT/Triple.h"
  18. #include "llvm/BinaryFormat/ELF.h"
  19. #include "llvm/Object/ELFObjectFile.h"
  20. #include "llvm/Object/ObjectFile.h"
  21. #include "llvm/Support/Endian.h"
  22. #include "llvm/Support/MemoryBuffer.h"
  23. using namespace llvm;
  24. using namespace llvm::object;
  25. using namespace llvm::support::endian;
  26. #define DEBUG_TYPE "dyld"
  27. static void or32le(void *P, int32_t V) { write32le(P, read32le(P) | V); }
  28. static void or32AArch64Imm(void *L, uint64_t Imm) {
  29. or32le(L, (Imm & 0xFFF) << 10);
  30. }
  31. template <class T> static void write(bool isBE, void *P, T V) {
  32. isBE ? write<T, support::big>(P, V) : write<T, support::little>(P, V);
  33. }
  34. static void write32AArch64Addr(void *L, uint64_t Imm) {
  35. uint32_t ImmLo = (Imm & 0x3) << 29;
  36. uint32_t ImmHi = (Imm & 0x1FFFFC) << 3;
  37. uint64_t Mask = (0x3 << 29) | (0x1FFFFC << 3);
  38. write32le(L, (read32le(L) & ~Mask) | ImmLo | ImmHi);
  39. }
  40. // Return the bits [Start, End] from Val shifted Start bits.
  41. // For instance, getBits(0xF0, 4, 8) returns 0xF.
  42. static uint64_t getBits(uint64_t Val, int Start, int End) {
  43. uint64_t Mask = ((uint64_t)1 << (End + 1 - Start)) - 1;
  44. return (Val >> Start) & Mask;
  45. }
  46. namespace {
  47. template <class ELFT> class DyldELFObject : public ELFObjectFile<ELFT> {
  48. LLVM_ELF_IMPORT_TYPES_ELFT(ELFT)
  49. typedef typename ELFT::uint addr_type;
  50. DyldELFObject(ELFObjectFile<ELFT> &&Obj);
  51. public:
  52. static Expected<std::unique_ptr<DyldELFObject>>
  53. create(MemoryBufferRef Wrapper);
  54. void updateSectionAddress(const SectionRef &Sec, uint64_t Addr);
  55. void updateSymbolAddress(const SymbolRef &SymRef, uint64_t Addr);
  56. // Methods for type inquiry through isa, cast and dyn_cast
  57. static bool classof(const Binary *v) {
  58. return (isa<ELFObjectFile<ELFT>>(v) &&
  59. classof(cast<ELFObjectFile<ELFT>>(v)));
  60. }
  61. static bool classof(const ELFObjectFile<ELFT> *v) {
  62. return v->isDyldType();
  63. }
  64. };
  65. // The MemoryBuffer passed into this constructor is just a wrapper around the
  66. // actual memory. Ultimately, the Binary parent class will take ownership of
  67. // this MemoryBuffer object but not the underlying memory.
  68. template <class ELFT>
  69. DyldELFObject<ELFT>::DyldELFObject(ELFObjectFile<ELFT> &&Obj)
  70. : ELFObjectFile<ELFT>(std::move(Obj)) {
  71. this->isDyldELFObject = true;
  72. }
  73. template <class ELFT>
  74. Expected<std::unique_ptr<DyldELFObject<ELFT>>>
  75. DyldELFObject<ELFT>::create(MemoryBufferRef Wrapper) {
  76. auto Obj = ELFObjectFile<ELFT>::create(Wrapper);
  77. if (auto E = Obj.takeError())
  78. return std::move(E);
  79. std::unique_ptr<DyldELFObject<ELFT>> Ret(
  80. new DyldELFObject<ELFT>(std::move(*Obj)));
  81. return std::move(Ret);
  82. }
  83. template <class ELFT>
  84. void DyldELFObject<ELFT>::updateSectionAddress(const SectionRef &Sec,
  85. uint64_t Addr) {
  86. DataRefImpl ShdrRef = Sec.getRawDataRefImpl();
  87. Elf_Shdr *shdr =
  88. const_cast<Elf_Shdr *>(reinterpret_cast<const Elf_Shdr *>(ShdrRef.p));
  89. // This assumes the address passed in matches the target address bitness
  90. // The template-based type cast handles everything else.
  91. shdr->sh_addr = static_cast<addr_type>(Addr);
  92. }
  93. template <class ELFT>
  94. void DyldELFObject<ELFT>::updateSymbolAddress(const SymbolRef &SymRef,
  95. uint64_t Addr) {
  96. Elf_Sym *sym = const_cast<Elf_Sym *>(
  97. ELFObjectFile<ELFT>::getSymbol(SymRef.getRawDataRefImpl()));
  98. // This assumes the address passed in matches the target address bitness
  99. // The template-based type cast handles everything else.
  100. sym->st_value = static_cast<addr_type>(Addr);
  101. }
  102. class LoadedELFObjectInfo final
  103. : public LoadedObjectInfoHelper<LoadedELFObjectInfo,
  104. RuntimeDyld::LoadedObjectInfo> {
  105. public:
  106. LoadedELFObjectInfo(RuntimeDyldImpl &RTDyld, ObjSectionToIDMap ObjSecToIDMap)
  107. : LoadedObjectInfoHelper(RTDyld, std::move(ObjSecToIDMap)) {}
  108. OwningBinary<ObjectFile>
  109. getObjectForDebug(const ObjectFile &Obj) const override;
  110. };
  111. template <typename ELFT>
  112. static Expected<std::unique_ptr<DyldELFObject<ELFT>>>
  113. createRTDyldELFObject(MemoryBufferRef Buffer, const ObjectFile &SourceObject,
  114. const LoadedELFObjectInfo &L) {
  115. typedef typename ELFT::Shdr Elf_Shdr;
  116. typedef typename ELFT::uint addr_type;
  117. Expected<std::unique_ptr<DyldELFObject<ELFT>>> ObjOrErr =
  118. DyldELFObject<ELFT>::create(Buffer);
  119. if (Error E = ObjOrErr.takeError())
  120. return std::move(E);
  121. std::unique_ptr<DyldELFObject<ELFT>> Obj = std::move(*ObjOrErr);
  122. // Iterate over all sections in the object.
  123. auto SI = SourceObject.section_begin();
  124. for (const auto &Sec : Obj->sections()) {
  125. Expected<StringRef> NameOrErr = Sec.getName();
  126. if (!NameOrErr) {
  127. consumeError(NameOrErr.takeError());
  128. continue;
  129. }
  130. if (*NameOrErr != "") {
  131. DataRefImpl ShdrRef = Sec.getRawDataRefImpl();
  132. Elf_Shdr *shdr = const_cast<Elf_Shdr *>(
  133. reinterpret_cast<const Elf_Shdr *>(ShdrRef.p));
  134. if (uint64_t SecLoadAddr = L.getSectionLoadAddress(*SI)) {
  135. // This assumes that the address passed in matches the target address
  136. // bitness. The template-based type cast handles everything else.
  137. shdr->sh_addr = static_cast<addr_type>(SecLoadAddr);
  138. }
  139. }
  140. ++SI;
  141. }
  142. return std::move(Obj);
  143. }
  144. static OwningBinary<ObjectFile>
  145. createELFDebugObject(const ObjectFile &Obj, const LoadedELFObjectInfo &L) {
  146. assert(Obj.isELF() && "Not an ELF object file.");
  147. std::unique_ptr<MemoryBuffer> Buffer =
  148. MemoryBuffer::getMemBufferCopy(Obj.getData(), Obj.getFileName());
  149. Expected<std::unique_ptr<ObjectFile>> DebugObj(nullptr);
  150. handleAllErrors(DebugObj.takeError());
  151. if (Obj.getBytesInAddress() == 4 && Obj.isLittleEndian())
  152. DebugObj =
  153. createRTDyldELFObject<ELF32LE>(Buffer->getMemBufferRef(), Obj, L);
  154. else if (Obj.getBytesInAddress() == 4 && !Obj.isLittleEndian())
  155. DebugObj =
  156. createRTDyldELFObject<ELF32BE>(Buffer->getMemBufferRef(), Obj, L);
  157. else if (Obj.getBytesInAddress() == 8 && !Obj.isLittleEndian())
  158. DebugObj =
  159. createRTDyldELFObject<ELF64BE>(Buffer->getMemBufferRef(), Obj, L);
  160. else if (Obj.getBytesInAddress() == 8 && Obj.isLittleEndian())
  161. DebugObj =
  162. createRTDyldELFObject<ELF64LE>(Buffer->getMemBufferRef(), Obj, L);
  163. else
  164. llvm_unreachable("Unexpected ELF format");
  165. handleAllErrors(DebugObj.takeError());
  166. return OwningBinary<ObjectFile>(std::move(*DebugObj), std::move(Buffer));
  167. }
  168. OwningBinary<ObjectFile>
  169. LoadedELFObjectInfo::getObjectForDebug(const ObjectFile &Obj) const {
  170. return createELFDebugObject(Obj, *this);
  171. }
  172. } // anonymous namespace
  173. namespace llvm {
  174. RuntimeDyldELF::RuntimeDyldELF(RuntimeDyld::MemoryManager &MemMgr,
  175. JITSymbolResolver &Resolver)
  176. : RuntimeDyldImpl(MemMgr, Resolver), GOTSectionID(0), CurrentGOTIndex(0) {}
  177. RuntimeDyldELF::~RuntimeDyldELF() {}
  178. void RuntimeDyldELF::registerEHFrames() {
  179. for (int i = 0, e = UnregisteredEHFrameSections.size(); i != e; ++i) {
  180. SID EHFrameSID = UnregisteredEHFrameSections[i];
  181. uint8_t *EHFrameAddr = Sections[EHFrameSID].getAddress();
  182. uint64_t EHFrameLoadAddr = Sections[EHFrameSID].getLoadAddress();
  183. size_t EHFrameSize = Sections[EHFrameSID].getSize();
  184. MemMgr.registerEHFrames(EHFrameAddr, EHFrameLoadAddr, EHFrameSize);
  185. }
  186. UnregisteredEHFrameSections.clear();
  187. }
  188. std::unique_ptr<RuntimeDyldELF>
  189. llvm::RuntimeDyldELF::create(Triple::ArchType Arch,
  190. RuntimeDyld::MemoryManager &MemMgr,
  191. JITSymbolResolver &Resolver) {
  192. switch (Arch) {
  193. default:
  194. return std::make_unique<RuntimeDyldELF>(MemMgr, Resolver);
  195. case Triple::mips:
  196. case Triple::mipsel:
  197. case Triple::mips64:
  198. case Triple::mips64el:
  199. return std::make_unique<RuntimeDyldELFMips>(MemMgr, Resolver);
  200. }
  201. }
  202. std::unique_ptr<RuntimeDyld::LoadedObjectInfo>
  203. RuntimeDyldELF::loadObject(const object::ObjectFile &O) {
  204. if (auto ObjSectionToIDOrErr = loadObjectImpl(O))
  205. return std::make_unique<LoadedELFObjectInfo>(*this, *ObjSectionToIDOrErr);
  206. else {
  207. HasError = true;
  208. raw_string_ostream ErrStream(ErrorStr);
  209. logAllUnhandledErrors(ObjSectionToIDOrErr.takeError(), ErrStream);
  210. return nullptr;
  211. }
  212. }
  213. void RuntimeDyldELF::resolveX86_64Relocation(const SectionEntry &Section,
  214. uint64_t Offset, uint64_t Value,
  215. uint32_t Type, int64_t Addend,
  216. uint64_t SymOffset) {
  217. switch (Type) {
  218. default:
  219. report_fatal_error("Relocation type not implemented yet!");
  220. break;
  221. case ELF::R_X86_64_NONE:
  222. break;
  223. case ELF::R_X86_64_8: {
  224. Value += Addend;
  225. assert((int64_t)Value <= INT8_MAX && (int64_t)Value >= INT8_MIN);
  226. uint8_t TruncatedAddr = (Value & 0xFF);
  227. *Section.getAddressWithOffset(Offset) = TruncatedAddr;
  228. LLVM_DEBUG(dbgs() << "Writing " << format("%p", TruncatedAddr) << " at "
  229. << format("%p\n", Section.getAddressWithOffset(Offset)));
  230. break;
  231. }
  232. case ELF::R_X86_64_16: {
  233. Value += Addend;
  234. assert((int64_t)Value <= INT16_MAX && (int64_t)Value >= INT16_MIN);
  235. uint16_t TruncatedAddr = (Value & 0xFFFF);
  236. support::ulittle16_t::ref(Section.getAddressWithOffset(Offset)) =
  237. TruncatedAddr;
  238. LLVM_DEBUG(dbgs() << "Writing " << format("%p", TruncatedAddr) << " at "
  239. << format("%p\n", Section.getAddressWithOffset(Offset)));
  240. break;
  241. }
  242. case ELF::R_X86_64_64: {
  243. support::ulittle64_t::ref(Section.getAddressWithOffset(Offset)) =
  244. Value + Addend;
  245. LLVM_DEBUG(dbgs() << "Writing " << format("%p", (Value + Addend)) << " at "
  246. << format("%p\n", Section.getAddressWithOffset(Offset)));
  247. break;
  248. }
  249. case ELF::R_X86_64_32:
  250. case ELF::R_X86_64_32S: {
  251. Value += Addend;
  252. assert((Type == ELF::R_X86_64_32 && (Value <= UINT32_MAX)) ||
  253. (Type == ELF::R_X86_64_32S &&
  254. ((int64_t)Value <= INT32_MAX && (int64_t)Value >= INT32_MIN)));
  255. uint32_t TruncatedAddr = (Value & 0xFFFFFFFF);
  256. support::ulittle32_t::ref(Section.getAddressWithOffset(Offset)) =
  257. TruncatedAddr;
  258. LLVM_DEBUG(dbgs() << "Writing " << format("%p", TruncatedAddr) << " at "
  259. << format("%p\n", Section.getAddressWithOffset(Offset)));
  260. break;
  261. }
  262. case ELF::R_X86_64_PC8: {
  263. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  264. int64_t RealOffset = Value + Addend - FinalAddress;
  265. assert(isInt<8>(RealOffset));
  266. int8_t TruncOffset = (RealOffset & 0xFF);
  267. Section.getAddress()[Offset] = TruncOffset;
  268. break;
  269. }
  270. case ELF::R_X86_64_PC32: {
  271. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  272. int64_t RealOffset = Value + Addend - FinalAddress;
  273. assert(isInt<32>(RealOffset));
  274. int32_t TruncOffset = (RealOffset & 0xFFFFFFFF);
  275. support::ulittle32_t::ref(Section.getAddressWithOffset(Offset)) =
  276. TruncOffset;
  277. break;
  278. }
  279. case ELF::R_X86_64_PC64: {
  280. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  281. int64_t RealOffset = Value + Addend - FinalAddress;
  282. support::ulittle64_t::ref(Section.getAddressWithOffset(Offset)) =
  283. RealOffset;
  284. LLVM_DEBUG(dbgs() << "Writing " << format("%p", RealOffset) << " at "
  285. << format("%p\n", FinalAddress));
  286. break;
  287. }
  288. case ELF::R_X86_64_GOTOFF64: {
  289. // Compute Value - GOTBase.
  290. uint64_t GOTBase = 0;
  291. for (const auto &Section : Sections) {
  292. if (Section.getName() == ".got") {
  293. GOTBase = Section.getLoadAddressWithOffset(0);
  294. break;
  295. }
  296. }
  297. assert(GOTBase != 0 && "missing GOT");
  298. int64_t GOTOffset = Value - GOTBase + Addend;
  299. support::ulittle64_t::ref(Section.getAddressWithOffset(Offset)) = GOTOffset;
  300. break;
  301. }
  302. case ELF::R_X86_64_DTPMOD64: {
  303. // We only have one DSO, so the module id is always 1.
  304. support::ulittle64_t::ref(Section.getAddressWithOffset(Offset)) = 1;
  305. break;
  306. }
  307. case ELF::R_X86_64_DTPOFF64:
  308. case ELF::R_X86_64_TPOFF64: {
  309. // DTPOFF64 should resolve to the offset in the TLS block, TPOFF64 to the
  310. // offset in the *initial* TLS block. Since we are statically linking, all
  311. // TLS blocks already exist in the initial block, so resolve both
  312. // relocations equally.
  313. support::ulittle64_t::ref(Section.getAddressWithOffset(Offset)) =
  314. Value + Addend;
  315. break;
  316. }
  317. case ELF::R_X86_64_DTPOFF32:
  318. case ELF::R_X86_64_TPOFF32: {
  319. // As for the (D)TPOFF64 relocations above, both DTPOFF32 and TPOFF32 can
  320. // be resolved equally.
  321. int64_t RealValue = Value + Addend;
  322. assert(RealValue >= INT32_MIN && RealValue <= INT32_MAX);
  323. int32_t TruncValue = RealValue;
  324. support::ulittle32_t::ref(Section.getAddressWithOffset(Offset)) =
  325. TruncValue;
  326. break;
  327. }
  328. }
  329. }
  330. void RuntimeDyldELF::resolveX86Relocation(const SectionEntry &Section,
  331. uint64_t Offset, uint32_t Value,
  332. uint32_t Type, int32_t Addend) {
  333. switch (Type) {
  334. case ELF::R_386_32: {
  335. support::ulittle32_t::ref(Section.getAddressWithOffset(Offset)) =
  336. Value + Addend;
  337. break;
  338. }
  339. // Handle R_386_PLT32 like R_386_PC32 since it should be able to
  340. // reach any 32 bit address.
  341. case ELF::R_386_PLT32:
  342. case ELF::R_386_PC32: {
  343. uint32_t FinalAddress =
  344. Section.getLoadAddressWithOffset(Offset) & 0xFFFFFFFF;
  345. uint32_t RealOffset = Value + Addend - FinalAddress;
  346. support::ulittle32_t::ref(Section.getAddressWithOffset(Offset)) =
  347. RealOffset;
  348. break;
  349. }
  350. default:
  351. // There are other relocation types, but it appears these are the
  352. // only ones currently used by the LLVM ELF object writer
  353. report_fatal_error("Relocation type not implemented yet!");
  354. break;
  355. }
  356. }
  357. void RuntimeDyldELF::resolveAArch64Relocation(const SectionEntry &Section,
  358. uint64_t Offset, uint64_t Value,
  359. uint32_t Type, int64_t Addend) {
  360. uint32_t *TargetPtr =
  361. reinterpret_cast<uint32_t *>(Section.getAddressWithOffset(Offset));
  362. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  363. // Data should use target endian. Code should always use little endian.
  364. bool isBE = Arch == Triple::aarch64_be;
  365. LLVM_DEBUG(dbgs() << "resolveAArch64Relocation, LocalAddress: 0x"
  366. << format("%llx", Section.getAddressWithOffset(Offset))
  367. << " FinalAddress: 0x" << format("%llx", FinalAddress)
  368. << " Value: 0x" << format("%llx", Value) << " Type: 0x"
  369. << format("%x", Type) << " Addend: 0x"
  370. << format("%llx", Addend) << "\n");
  371. switch (Type) {
  372. default:
  373. report_fatal_error("Relocation type not implemented yet!");
  374. break;
  375. case ELF::R_AARCH64_NONE:
  376. break;
  377. case ELF::R_AARCH64_ABS16: {
  378. uint64_t Result = Value + Addend;
  379. assert(static_cast<int64_t>(Result) >= INT16_MIN && Result < UINT16_MAX);
  380. write(isBE, TargetPtr, static_cast<uint16_t>(Result & 0xffffU));
  381. break;
  382. }
  383. case ELF::R_AARCH64_ABS32: {
  384. uint64_t Result = Value + Addend;
  385. assert(static_cast<int64_t>(Result) >= INT32_MIN && Result < UINT32_MAX);
  386. write(isBE, TargetPtr, static_cast<uint32_t>(Result & 0xffffffffU));
  387. break;
  388. }
  389. case ELF::R_AARCH64_ABS64:
  390. write(isBE, TargetPtr, Value + Addend);
  391. break;
  392. case ELF::R_AARCH64_PLT32: {
  393. uint64_t Result = Value + Addend - FinalAddress;
  394. assert(static_cast<int64_t>(Result) >= INT32_MIN &&
  395. static_cast<int64_t>(Result) <= INT32_MAX);
  396. write(isBE, TargetPtr, static_cast<uint32_t>(Result));
  397. break;
  398. }
  399. case ELF::R_AARCH64_PREL32: {
  400. uint64_t Result = Value + Addend - FinalAddress;
  401. assert(static_cast<int64_t>(Result) >= INT32_MIN &&
  402. static_cast<int64_t>(Result) <= UINT32_MAX);
  403. write(isBE, TargetPtr, static_cast<uint32_t>(Result & 0xffffffffU));
  404. break;
  405. }
  406. case ELF::R_AARCH64_PREL64:
  407. write(isBE, TargetPtr, Value + Addend - FinalAddress);
  408. break;
  409. case ELF::R_AARCH64_CONDBR19: {
  410. uint64_t BranchImm = Value + Addend - FinalAddress;
  411. assert(isInt<21>(BranchImm));
  412. *TargetPtr &= 0xff00001fU;
  413. // Immediate:20:2 goes in bits 23:5 of Bcc, CBZ, CBNZ
  414. or32le(TargetPtr, (BranchImm & 0x001FFFFC) << 3);
  415. break;
  416. }
  417. case ELF::R_AARCH64_TSTBR14: {
  418. uint64_t BranchImm = Value + Addend - FinalAddress;
  419. assert(isInt<16>(BranchImm));
  420. *TargetPtr &= 0xfff8001fU;
  421. // Immediate:15:2 goes in bits 18:5 of TBZ, TBNZ
  422. or32le(TargetPtr, (BranchImm & 0x0FFFFFFC) << 3);
  423. break;
  424. }
  425. case ELF::R_AARCH64_CALL26: // fallthrough
  426. case ELF::R_AARCH64_JUMP26: {
  427. // Operation: S+A-P. Set Call or B immediate value to bits fff_fffc of the
  428. // calculation.
  429. uint64_t BranchImm = Value + Addend - FinalAddress;
  430. // "Check that -2^27 <= result < 2^27".
  431. assert(isInt<28>(BranchImm));
  432. or32le(TargetPtr, (BranchImm & 0x0FFFFFFC) >> 2);
  433. break;
  434. }
  435. case ELF::R_AARCH64_MOVW_UABS_G3:
  436. or32le(TargetPtr, ((Value + Addend) & 0xFFFF000000000000) >> 43);
  437. break;
  438. case ELF::R_AARCH64_MOVW_UABS_G2_NC:
  439. or32le(TargetPtr, ((Value + Addend) & 0xFFFF00000000) >> 27);
  440. break;
  441. case ELF::R_AARCH64_MOVW_UABS_G1_NC:
  442. or32le(TargetPtr, ((Value + Addend) & 0xFFFF0000) >> 11);
  443. break;
  444. case ELF::R_AARCH64_MOVW_UABS_G0_NC:
  445. or32le(TargetPtr, ((Value + Addend) & 0xFFFF) << 5);
  446. break;
  447. case ELF::R_AARCH64_ADR_PREL_PG_HI21: {
  448. // Operation: Page(S+A) - Page(P)
  449. uint64_t Result =
  450. ((Value + Addend) & ~0xfffULL) - (FinalAddress & ~0xfffULL);
  451. // Check that -2^32 <= X < 2^32
  452. assert(isInt<33>(Result) && "overflow check failed for relocation");
  453. // Immediate goes in bits 30:29 + 5:23 of ADRP instruction, taken
  454. // from bits 32:12 of X.
  455. write32AArch64Addr(TargetPtr, Result >> 12);
  456. break;
  457. }
  458. case ELF::R_AARCH64_ADD_ABS_LO12_NC:
  459. // Operation: S + A
  460. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  461. // from bits 11:0 of X
  462. or32AArch64Imm(TargetPtr, Value + Addend);
  463. break;
  464. case ELF::R_AARCH64_LDST8_ABS_LO12_NC:
  465. // Operation: S + A
  466. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  467. // from bits 11:0 of X
  468. or32AArch64Imm(TargetPtr, getBits(Value + Addend, 0, 11));
  469. break;
  470. case ELF::R_AARCH64_LDST16_ABS_LO12_NC:
  471. // Operation: S + A
  472. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  473. // from bits 11:1 of X
  474. or32AArch64Imm(TargetPtr, getBits(Value + Addend, 1, 11));
  475. break;
  476. case ELF::R_AARCH64_LDST32_ABS_LO12_NC:
  477. // Operation: S + A
  478. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  479. // from bits 11:2 of X
  480. or32AArch64Imm(TargetPtr, getBits(Value + Addend, 2, 11));
  481. break;
  482. case ELF::R_AARCH64_LDST64_ABS_LO12_NC:
  483. // Operation: S + A
  484. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  485. // from bits 11:3 of X
  486. or32AArch64Imm(TargetPtr, getBits(Value + Addend, 3, 11));
  487. break;
  488. case ELF::R_AARCH64_LDST128_ABS_LO12_NC:
  489. // Operation: S + A
  490. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  491. // from bits 11:4 of X
  492. or32AArch64Imm(TargetPtr, getBits(Value + Addend, 4, 11));
  493. break;
  494. case ELF::R_AARCH64_LD_PREL_LO19: {
  495. // Operation: S + A - P
  496. uint64_t Result = Value + Addend - FinalAddress;
  497. // "Check that -2^20 <= result < 2^20".
  498. assert(isInt<21>(Result));
  499. *TargetPtr &= 0xff00001fU;
  500. // Immediate goes in bits 23:5 of LD imm instruction, taken
  501. // from bits 20:2 of X
  502. *TargetPtr |= ((Result & 0xffc) << (5 - 2));
  503. break;
  504. }
  505. case ELF::R_AARCH64_ADR_PREL_LO21: {
  506. // Operation: S + A - P
  507. uint64_t Result = Value + Addend - FinalAddress;
  508. // "Check that -2^20 <= result < 2^20".
  509. assert(isInt<21>(Result));
  510. *TargetPtr &= 0x9f00001fU;
  511. // Immediate goes in bits 23:5, 30:29 of ADR imm instruction, taken
  512. // from bits 20:0 of X
  513. *TargetPtr |= ((Result & 0xffc) << (5 - 2));
  514. *TargetPtr |= (Result & 0x3) << 29;
  515. break;
  516. }
  517. }
  518. }
  519. void RuntimeDyldELF::resolveARMRelocation(const SectionEntry &Section,
  520. uint64_t Offset, uint32_t Value,
  521. uint32_t Type, int32_t Addend) {
  522. // TODO: Add Thumb relocations.
  523. uint32_t *TargetPtr =
  524. reinterpret_cast<uint32_t *>(Section.getAddressWithOffset(Offset));
  525. uint32_t FinalAddress = Section.getLoadAddressWithOffset(Offset) & 0xFFFFFFFF;
  526. Value += Addend;
  527. LLVM_DEBUG(dbgs() << "resolveARMRelocation, LocalAddress: "
  528. << Section.getAddressWithOffset(Offset)
  529. << " FinalAddress: " << format("%p", FinalAddress)
  530. << " Value: " << format("%x", Value)
  531. << " Type: " << format("%x", Type)
  532. << " Addend: " << format("%x", Addend) << "\n");
  533. switch (Type) {
  534. default:
  535. llvm_unreachable("Not implemented relocation type!");
  536. case ELF::R_ARM_NONE:
  537. break;
  538. // Write a 31bit signed offset
  539. case ELF::R_ARM_PREL31:
  540. support::ulittle32_t::ref{TargetPtr} =
  541. (support::ulittle32_t::ref{TargetPtr} & 0x80000000) |
  542. ((Value - FinalAddress) & ~0x80000000);
  543. break;
  544. case ELF::R_ARM_TARGET1:
  545. case ELF::R_ARM_ABS32:
  546. support::ulittle32_t::ref{TargetPtr} = Value;
  547. break;
  548. // Write first 16 bit of 32 bit value to the mov instruction.
  549. // Last 4 bit should be shifted.
  550. case ELF::R_ARM_MOVW_ABS_NC:
  551. case ELF::R_ARM_MOVT_ABS:
  552. if (Type == ELF::R_ARM_MOVW_ABS_NC)
  553. Value = Value & 0xFFFF;
  554. else if (Type == ELF::R_ARM_MOVT_ABS)
  555. Value = (Value >> 16) & 0xFFFF;
  556. support::ulittle32_t::ref{TargetPtr} =
  557. (support::ulittle32_t::ref{TargetPtr} & ~0x000F0FFF) | (Value & 0xFFF) |
  558. (((Value >> 12) & 0xF) << 16);
  559. break;
  560. // Write 24 bit relative value to the branch instruction.
  561. case ELF::R_ARM_PC24: // Fall through.
  562. case ELF::R_ARM_CALL: // Fall through.
  563. case ELF::R_ARM_JUMP24:
  564. int32_t RelValue = static_cast<int32_t>(Value - FinalAddress - 8);
  565. RelValue = (RelValue & 0x03FFFFFC) >> 2;
  566. assert((support::ulittle32_t::ref{TargetPtr} & 0xFFFFFF) == 0xFFFFFE);
  567. support::ulittle32_t::ref{TargetPtr} =
  568. (support::ulittle32_t::ref{TargetPtr} & 0xFF000000) | RelValue;
  569. break;
  570. }
  571. }
  572. void RuntimeDyldELF::setMipsABI(const ObjectFile &Obj) {
  573. if (Arch == Triple::UnknownArch ||
  574. !StringRef(Triple::getArchTypePrefix(Arch)).equals("mips")) {
  575. IsMipsO32ABI = false;
  576. IsMipsN32ABI = false;
  577. IsMipsN64ABI = false;
  578. return;
  579. }
  580. if (auto *E = dyn_cast<ELFObjectFileBase>(&Obj)) {
  581. unsigned AbiVariant = E->getPlatformFlags();
  582. IsMipsO32ABI = AbiVariant & ELF::EF_MIPS_ABI_O32;
  583. IsMipsN32ABI = AbiVariant & ELF::EF_MIPS_ABI2;
  584. }
  585. IsMipsN64ABI = Obj.getFileFormatName().equals("elf64-mips");
  586. }
  587. // Return the .TOC. section and offset.
  588. Error RuntimeDyldELF::findPPC64TOCSection(const ELFObjectFileBase &Obj,
  589. ObjSectionToIDMap &LocalSections,
  590. RelocationValueRef &Rel) {
  591. // Set a default SectionID in case we do not find a TOC section below.
  592. // This may happen for references to TOC base base (sym@toc, .odp
  593. // relocation) without a .toc directive. In this case just use the
  594. // first section (which is usually the .odp) since the code won't
  595. // reference the .toc base directly.
  596. Rel.SymbolName = nullptr;
  597. Rel.SectionID = 0;
  598. // The TOC consists of sections .got, .toc, .tocbss, .plt in that
  599. // order. The TOC starts where the first of these sections starts.
  600. for (auto &Section : Obj.sections()) {
  601. Expected<StringRef> NameOrErr = Section.getName();
  602. if (!NameOrErr)
  603. return NameOrErr.takeError();
  604. StringRef SectionName = *NameOrErr;
  605. if (SectionName == ".got"
  606. || SectionName == ".toc"
  607. || SectionName == ".tocbss"
  608. || SectionName == ".plt") {
  609. if (auto SectionIDOrErr =
  610. findOrEmitSection(Obj, Section, false, LocalSections))
  611. Rel.SectionID = *SectionIDOrErr;
  612. else
  613. return SectionIDOrErr.takeError();
  614. break;
  615. }
  616. }
  617. // Per the ppc64-elf-linux ABI, The TOC base is TOC value plus 0x8000
  618. // thus permitting a full 64 Kbytes segment.
  619. Rel.Addend = 0x8000;
  620. return Error::success();
  621. }
  622. // Returns the sections and offset associated with the ODP entry referenced
  623. // by Symbol.
  624. Error RuntimeDyldELF::findOPDEntrySection(const ELFObjectFileBase &Obj,
  625. ObjSectionToIDMap &LocalSections,
  626. RelocationValueRef &Rel) {
  627. // Get the ELF symbol value (st_value) to compare with Relocation offset in
  628. // .opd entries
  629. for (section_iterator si = Obj.section_begin(), se = Obj.section_end();
  630. si != se; ++si) {
  631. Expected<section_iterator> RelSecOrErr = si->getRelocatedSection();
  632. if (!RelSecOrErr)
  633. report_fatal_error(Twine(toString(RelSecOrErr.takeError())));
  634. section_iterator RelSecI = *RelSecOrErr;
  635. if (RelSecI == Obj.section_end())
  636. continue;
  637. Expected<StringRef> NameOrErr = RelSecI->getName();
  638. if (!NameOrErr)
  639. return NameOrErr.takeError();
  640. StringRef RelSectionName = *NameOrErr;
  641. if (RelSectionName != ".opd")
  642. continue;
  643. for (elf_relocation_iterator i = si->relocation_begin(),
  644. e = si->relocation_end();
  645. i != e;) {
  646. // The R_PPC64_ADDR64 relocation indicates the first field
  647. // of a .opd entry
  648. uint64_t TypeFunc = i->getType();
  649. if (TypeFunc != ELF::R_PPC64_ADDR64) {
  650. ++i;
  651. continue;
  652. }
  653. uint64_t TargetSymbolOffset = i->getOffset();
  654. symbol_iterator TargetSymbol = i->getSymbol();
  655. int64_t Addend;
  656. if (auto AddendOrErr = i->getAddend())
  657. Addend = *AddendOrErr;
  658. else
  659. return AddendOrErr.takeError();
  660. ++i;
  661. if (i == e)
  662. break;
  663. // Just check if following relocation is a R_PPC64_TOC
  664. uint64_t TypeTOC = i->getType();
  665. if (TypeTOC != ELF::R_PPC64_TOC)
  666. continue;
  667. // Finally compares the Symbol value and the target symbol offset
  668. // to check if this .opd entry refers to the symbol the relocation
  669. // points to.
  670. if (Rel.Addend != (int64_t)TargetSymbolOffset)
  671. continue;
  672. section_iterator TSI = Obj.section_end();
  673. if (auto TSIOrErr = TargetSymbol->getSection())
  674. TSI = *TSIOrErr;
  675. else
  676. return TSIOrErr.takeError();
  677. assert(TSI != Obj.section_end() && "TSI should refer to a valid section");
  678. bool IsCode = TSI->isText();
  679. if (auto SectionIDOrErr = findOrEmitSection(Obj, *TSI, IsCode,
  680. LocalSections))
  681. Rel.SectionID = *SectionIDOrErr;
  682. else
  683. return SectionIDOrErr.takeError();
  684. Rel.Addend = (intptr_t)Addend;
  685. return Error::success();
  686. }
  687. }
  688. llvm_unreachable("Attempting to get address of ODP entry!");
  689. }
  690. // Relocation masks following the #lo(value), #hi(value), #ha(value),
  691. // #higher(value), #highera(value), #highest(value), and #highesta(value)
  692. // macros defined in section 4.5.1. Relocation Types of the PPC-elf64abi
  693. // document.
  694. static inline uint16_t applyPPClo(uint64_t value) { return value & 0xffff; }
  695. static inline uint16_t applyPPChi(uint64_t value) {
  696. return (value >> 16) & 0xffff;
  697. }
  698. static inline uint16_t applyPPCha (uint64_t value) {
  699. return ((value + 0x8000) >> 16) & 0xffff;
  700. }
  701. static inline uint16_t applyPPChigher(uint64_t value) {
  702. return (value >> 32) & 0xffff;
  703. }
  704. static inline uint16_t applyPPChighera (uint64_t value) {
  705. return ((value + 0x8000) >> 32) & 0xffff;
  706. }
  707. static inline uint16_t applyPPChighest(uint64_t value) {
  708. return (value >> 48) & 0xffff;
  709. }
  710. static inline uint16_t applyPPChighesta (uint64_t value) {
  711. return ((value + 0x8000) >> 48) & 0xffff;
  712. }
  713. void RuntimeDyldELF::resolvePPC32Relocation(const SectionEntry &Section,
  714. uint64_t Offset, uint64_t Value,
  715. uint32_t Type, int64_t Addend) {
  716. uint8_t *LocalAddress = Section.getAddressWithOffset(Offset);
  717. switch (Type) {
  718. default:
  719. report_fatal_error("Relocation type not implemented yet!");
  720. break;
  721. case ELF::R_PPC_ADDR16_LO:
  722. writeInt16BE(LocalAddress, applyPPClo(Value + Addend));
  723. break;
  724. case ELF::R_PPC_ADDR16_HI:
  725. writeInt16BE(LocalAddress, applyPPChi(Value + Addend));
  726. break;
  727. case ELF::R_PPC_ADDR16_HA:
  728. writeInt16BE(LocalAddress, applyPPCha(Value + Addend));
  729. break;
  730. }
  731. }
  732. void RuntimeDyldELF::resolvePPC64Relocation(const SectionEntry &Section,
  733. uint64_t Offset, uint64_t Value,
  734. uint32_t Type, int64_t Addend) {
  735. uint8_t *LocalAddress = Section.getAddressWithOffset(Offset);
  736. switch (Type) {
  737. default:
  738. report_fatal_error("Relocation type not implemented yet!");
  739. break;
  740. case ELF::R_PPC64_ADDR16:
  741. writeInt16BE(LocalAddress, applyPPClo(Value + Addend));
  742. break;
  743. case ELF::R_PPC64_ADDR16_DS:
  744. writeInt16BE(LocalAddress, applyPPClo(Value + Addend) & ~3);
  745. break;
  746. case ELF::R_PPC64_ADDR16_LO:
  747. writeInt16BE(LocalAddress, applyPPClo(Value + Addend));
  748. break;
  749. case ELF::R_PPC64_ADDR16_LO_DS:
  750. writeInt16BE(LocalAddress, applyPPClo(Value + Addend) & ~3);
  751. break;
  752. case ELF::R_PPC64_ADDR16_HI:
  753. case ELF::R_PPC64_ADDR16_HIGH:
  754. writeInt16BE(LocalAddress, applyPPChi(Value + Addend));
  755. break;
  756. case ELF::R_PPC64_ADDR16_HA:
  757. case ELF::R_PPC64_ADDR16_HIGHA:
  758. writeInt16BE(LocalAddress, applyPPCha(Value + Addend));
  759. break;
  760. case ELF::R_PPC64_ADDR16_HIGHER:
  761. writeInt16BE(LocalAddress, applyPPChigher(Value + Addend));
  762. break;
  763. case ELF::R_PPC64_ADDR16_HIGHERA:
  764. writeInt16BE(LocalAddress, applyPPChighera(Value + Addend));
  765. break;
  766. case ELF::R_PPC64_ADDR16_HIGHEST:
  767. writeInt16BE(LocalAddress, applyPPChighest(Value + Addend));
  768. break;
  769. case ELF::R_PPC64_ADDR16_HIGHESTA:
  770. writeInt16BE(LocalAddress, applyPPChighesta(Value + Addend));
  771. break;
  772. case ELF::R_PPC64_ADDR14: {
  773. assert(((Value + Addend) & 3) == 0);
  774. // Preserve the AA/LK bits in the branch instruction
  775. uint8_t aalk = *(LocalAddress + 3);
  776. writeInt16BE(LocalAddress + 2, (aalk & 3) | ((Value + Addend) & 0xfffc));
  777. } break;
  778. case ELF::R_PPC64_REL16_LO: {
  779. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  780. uint64_t Delta = Value - FinalAddress + Addend;
  781. writeInt16BE(LocalAddress, applyPPClo(Delta));
  782. } break;
  783. case ELF::R_PPC64_REL16_HI: {
  784. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  785. uint64_t Delta = Value - FinalAddress + Addend;
  786. writeInt16BE(LocalAddress, applyPPChi(Delta));
  787. } break;
  788. case ELF::R_PPC64_REL16_HA: {
  789. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  790. uint64_t Delta = Value - FinalAddress + Addend;
  791. writeInt16BE(LocalAddress, applyPPCha(Delta));
  792. } break;
  793. case ELF::R_PPC64_ADDR32: {
  794. int64_t Result = static_cast<int64_t>(Value + Addend);
  795. if (SignExtend64<32>(Result) != Result)
  796. llvm_unreachable("Relocation R_PPC64_ADDR32 overflow");
  797. writeInt32BE(LocalAddress, Result);
  798. } break;
  799. case ELF::R_PPC64_REL24: {
  800. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  801. int64_t delta = static_cast<int64_t>(Value - FinalAddress + Addend);
  802. if (SignExtend64<26>(delta) != delta)
  803. llvm_unreachable("Relocation R_PPC64_REL24 overflow");
  804. // We preserve bits other than LI field, i.e. PO and AA/LK fields.
  805. uint32_t Inst = readBytesUnaligned(LocalAddress, 4);
  806. writeInt32BE(LocalAddress, (Inst & 0xFC000003) | (delta & 0x03FFFFFC));
  807. } break;
  808. case ELF::R_PPC64_REL32: {
  809. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  810. int64_t delta = static_cast<int64_t>(Value - FinalAddress + Addend);
  811. if (SignExtend64<32>(delta) != delta)
  812. llvm_unreachable("Relocation R_PPC64_REL32 overflow");
  813. writeInt32BE(LocalAddress, delta);
  814. } break;
  815. case ELF::R_PPC64_REL64: {
  816. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  817. uint64_t Delta = Value - FinalAddress + Addend;
  818. writeInt64BE(LocalAddress, Delta);
  819. } break;
  820. case ELF::R_PPC64_ADDR64:
  821. writeInt64BE(LocalAddress, Value + Addend);
  822. break;
  823. }
  824. }
  825. void RuntimeDyldELF::resolveSystemZRelocation(const SectionEntry &Section,
  826. uint64_t Offset, uint64_t Value,
  827. uint32_t Type, int64_t Addend) {
  828. uint8_t *LocalAddress = Section.getAddressWithOffset(Offset);
  829. switch (Type) {
  830. default:
  831. report_fatal_error("Relocation type not implemented yet!");
  832. break;
  833. case ELF::R_390_PC16DBL:
  834. case ELF::R_390_PLT16DBL: {
  835. int64_t Delta = (Value + Addend) - Section.getLoadAddressWithOffset(Offset);
  836. assert(int16_t(Delta / 2) * 2 == Delta && "R_390_PC16DBL overflow");
  837. writeInt16BE(LocalAddress, Delta / 2);
  838. break;
  839. }
  840. case ELF::R_390_PC32DBL:
  841. case ELF::R_390_PLT32DBL: {
  842. int64_t Delta = (Value + Addend) - Section.getLoadAddressWithOffset(Offset);
  843. assert(int32_t(Delta / 2) * 2 == Delta && "R_390_PC32DBL overflow");
  844. writeInt32BE(LocalAddress, Delta / 2);
  845. break;
  846. }
  847. case ELF::R_390_PC16: {
  848. int64_t Delta = (Value + Addend) - Section.getLoadAddressWithOffset(Offset);
  849. assert(int16_t(Delta) == Delta && "R_390_PC16 overflow");
  850. writeInt16BE(LocalAddress, Delta);
  851. break;
  852. }
  853. case ELF::R_390_PC32: {
  854. int64_t Delta = (Value + Addend) - Section.getLoadAddressWithOffset(Offset);
  855. assert(int32_t(Delta) == Delta && "R_390_PC32 overflow");
  856. writeInt32BE(LocalAddress, Delta);
  857. break;
  858. }
  859. case ELF::R_390_PC64: {
  860. int64_t Delta = (Value + Addend) - Section.getLoadAddressWithOffset(Offset);
  861. writeInt64BE(LocalAddress, Delta);
  862. break;
  863. }
  864. case ELF::R_390_8:
  865. *LocalAddress = (uint8_t)(Value + Addend);
  866. break;
  867. case ELF::R_390_16:
  868. writeInt16BE(LocalAddress, Value + Addend);
  869. break;
  870. case ELF::R_390_32:
  871. writeInt32BE(LocalAddress, Value + Addend);
  872. break;
  873. case ELF::R_390_64:
  874. writeInt64BE(LocalAddress, Value + Addend);
  875. break;
  876. }
  877. }
  878. void RuntimeDyldELF::resolveBPFRelocation(const SectionEntry &Section,
  879. uint64_t Offset, uint64_t Value,
  880. uint32_t Type, int64_t Addend) {
  881. bool isBE = Arch == Triple::bpfeb;
  882. switch (Type) {
  883. default:
  884. report_fatal_error("Relocation type not implemented yet!");
  885. break;
  886. case ELF::R_BPF_NONE:
  887. case ELF::R_BPF_64_64:
  888. case ELF::R_BPF_64_32:
  889. case ELF::R_BPF_64_NODYLD32:
  890. break;
  891. case ELF::R_BPF_64_ABS64: {
  892. write(isBE, Section.getAddressWithOffset(Offset), Value + Addend);
  893. LLVM_DEBUG(dbgs() << "Writing " << format("%p", (Value + Addend)) << " at "
  894. << format("%p\n", Section.getAddressWithOffset(Offset)));
  895. break;
  896. }
  897. case ELF::R_BPF_64_ABS32: {
  898. Value += Addend;
  899. assert(Value <= UINT32_MAX);
  900. write(isBE, Section.getAddressWithOffset(Offset), static_cast<uint32_t>(Value));
  901. LLVM_DEBUG(dbgs() << "Writing " << format("%p", Value) << " at "
  902. << format("%p\n", Section.getAddressWithOffset(Offset)));
  903. break;
  904. }
  905. }
  906. }
  907. // The target location for the relocation is described by RE.SectionID and
  908. // RE.Offset. RE.SectionID can be used to find the SectionEntry. Each
  909. // SectionEntry has three members describing its location.
  910. // SectionEntry::Address is the address at which the section has been loaded
  911. // into memory in the current (host) process. SectionEntry::LoadAddress is the
  912. // address that the section will have in the target process.
  913. // SectionEntry::ObjAddress is the address of the bits for this section in the
  914. // original emitted object image (also in the current address space).
  915. //
  916. // Relocations will be applied as if the section were loaded at
  917. // SectionEntry::LoadAddress, but they will be applied at an address based
  918. // on SectionEntry::Address. SectionEntry::ObjAddress will be used to refer to
  919. // Target memory contents if they are required for value calculations.
  920. //
  921. // The Value parameter here is the load address of the symbol for the
  922. // relocation to be applied. For relocations which refer to symbols in the
  923. // current object Value will be the LoadAddress of the section in which
  924. // the symbol resides (RE.Addend provides additional information about the
  925. // symbol location). For external symbols, Value will be the address of the
  926. // symbol in the target address space.
  927. void RuntimeDyldELF::resolveRelocation(const RelocationEntry &RE,
  928. uint64_t Value) {
  929. const SectionEntry &Section = Sections[RE.SectionID];
  930. return resolveRelocation(Section, RE.Offset, Value, RE.RelType, RE.Addend,
  931. RE.SymOffset, RE.SectionID);
  932. }
  933. void RuntimeDyldELF::resolveRelocation(const SectionEntry &Section,
  934. uint64_t Offset, uint64_t Value,
  935. uint32_t Type, int64_t Addend,
  936. uint64_t SymOffset, SID SectionID) {
  937. switch (Arch) {
  938. case Triple::x86_64:
  939. resolveX86_64Relocation(Section, Offset, Value, Type, Addend, SymOffset);
  940. break;
  941. case Triple::x86:
  942. resolveX86Relocation(Section, Offset, (uint32_t)(Value & 0xffffffffL), Type,
  943. (uint32_t)(Addend & 0xffffffffL));
  944. break;
  945. case Triple::aarch64:
  946. case Triple::aarch64_be:
  947. resolveAArch64Relocation(Section, Offset, Value, Type, Addend);
  948. break;
  949. case Triple::arm: // Fall through.
  950. case Triple::armeb:
  951. case Triple::thumb:
  952. case Triple::thumbeb:
  953. resolveARMRelocation(Section, Offset, (uint32_t)(Value & 0xffffffffL), Type,
  954. (uint32_t)(Addend & 0xffffffffL));
  955. break;
  956. case Triple::ppc: // Fall through.
  957. case Triple::ppcle:
  958. resolvePPC32Relocation(Section, Offset, Value, Type, Addend);
  959. break;
  960. case Triple::ppc64: // Fall through.
  961. case Triple::ppc64le:
  962. resolvePPC64Relocation(Section, Offset, Value, Type, Addend);
  963. break;
  964. case Triple::systemz:
  965. resolveSystemZRelocation(Section, Offset, Value, Type, Addend);
  966. break;
  967. case Triple::bpfel:
  968. case Triple::bpfeb:
  969. resolveBPFRelocation(Section, Offset, Value, Type, Addend);
  970. break;
  971. default:
  972. llvm_unreachable("Unsupported CPU type!");
  973. }
  974. }
  975. void *RuntimeDyldELF::computePlaceholderAddress(unsigned SectionID, uint64_t Offset) const {
  976. return (void *)(Sections[SectionID].getObjAddress() + Offset);
  977. }
  978. void RuntimeDyldELF::processSimpleRelocation(unsigned SectionID, uint64_t Offset, unsigned RelType, RelocationValueRef Value) {
  979. RelocationEntry RE(SectionID, Offset, RelType, Value.Addend, Value.Offset);
  980. if (Value.SymbolName)
  981. addRelocationForSymbol(RE, Value.SymbolName);
  982. else
  983. addRelocationForSection(RE, Value.SectionID);
  984. }
  985. uint32_t RuntimeDyldELF::getMatchingLoRelocation(uint32_t RelType,
  986. bool IsLocal) const {
  987. switch (RelType) {
  988. case ELF::R_MICROMIPS_GOT16:
  989. if (IsLocal)
  990. return ELF::R_MICROMIPS_LO16;
  991. break;
  992. case ELF::R_MICROMIPS_HI16:
  993. return ELF::R_MICROMIPS_LO16;
  994. case ELF::R_MIPS_GOT16:
  995. if (IsLocal)
  996. return ELF::R_MIPS_LO16;
  997. break;
  998. case ELF::R_MIPS_HI16:
  999. return ELF::R_MIPS_LO16;
  1000. case ELF::R_MIPS_PCHI16:
  1001. return ELF::R_MIPS_PCLO16;
  1002. default:
  1003. break;
  1004. }
  1005. return ELF::R_MIPS_NONE;
  1006. }
  1007. // Sometimes we don't need to create thunk for a branch.
  1008. // This typically happens when branch target is located
  1009. // in the same object file. In such case target is either
  1010. // a weak symbol or symbol in a different executable section.
  1011. // This function checks if branch target is located in the
  1012. // same object file and if distance between source and target
  1013. // fits R_AARCH64_CALL26 relocation. If both conditions are
  1014. // met, it emits direct jump to the target and returns true.
  1015. // Otherwise false is returned and thunk is created.
  1016. bool RuntimeDyldELF::resolveAArch64ShortBranch(
  1017. unsigned SectionID, relocation_iterator RelI,
  1018. const RelocationValueRef &Value) {
  1019. uint64_t Address;
  1020. if (Value.SymbolName) {
  1021. auto Loc = GlobalSymbolTable.find(Value.SymbolName);
  1022. // Don't create direct branch for external symbols.
  1023. if (Loc == GlobalSymbolTable.end())
  1024. return false;
  1025. const auto &SymInfo = Loc->second;
  1026. Address =
  1027. uint64_t(Sections[SymInfo.getSectionID()].getLoadAddressWithOffset(
  1028. SymInfo.getOffset()));
  1029. } else {
  1030. Address = uint64_t(Sections[Value.SectionID].getLoadAddress());
  1031. }
  1032. uint64_t Offset = RelI->getOffset();
  1033. uint64_t SourceAddress = Sections[SectionID].getLoadAddressWithOffset(Offset);
  1034. // R_AARCH64_CALL26 requires immediate to be in range -2^27 <= imm < 2^27
  1035. // If distance between source and target is out of range then we should
  1036. // create thunk.
  1037. if (!isInt<28>(Address + Value.Addend - SourceAddress))
  1038. return false;
  1039. resolveRelocation(Sections[SectionID], Offset, Address, RelI->getType(),
  1040. Value.Addend);
  1041. return true;
  1042. }
  1043. void RuntimeDyldELF::resolveAArch64Branch(unsigned SectionID,
  1044. const RelocationValueRef &Value,
  1045. relocation_iterator RelI,
  1046. StubMap &Stubs) {
  1047. LLVM_DEBUG(dbgs() << "\t\tThis is an AArch64 branch relocation.");
  1048. SectionEntry &Section = Sections[SectionID];
  1049. uint64_t Offset = RelI->getOffset();
  1050. unsigned RelType = RelI->getType();
  1051. // Look for an existing stub.
  1052. StubMap::const_iterator i = Stubs.find(Value);
  1053. if (i != Stubs.end()) {
  1054. resolveRelocation(Section, Offset,
  1055. (uint64_t)Section.getAddressWithOffset(i->second),
  1056. RelType, 0);
  1057. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1058. } else if (!resolveAArch64ShortBranch(SectionID, RelI, Value)) {
  1059. // Create a new stub function.
  1060. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1061. Stubs[Value] = Section.getStubOffset();
  1062. uint8_t *StubTargetAddr = createStubFunction(
  1063. Section.getAddressWithOffset(Section.getStubOffset()));
  1064. RelocationEntry REmovz_g3(SectionID, StubTargetAddr - Section.getAddress(),
  1065. ELF::R_AARCH64_MOVW_UABS_G3, Value.Addend);
  1066. RelocationEntry REmovk_g2(SectionID,
  1067. StubTargetAddr - Section.getAddress() + 4,
  1068. ELF::R_AARCH64_MOVW_UABS_G2_NC, Value.Addend);
  1069. RelocationEntry REmovk_g1(SectionID,
  1070. StubTargetAddr - Section.getAddress() + 8,
  1071. ELF::R_AARCH64_MOVW_UABS_G1_NC, Value.Addend);
  1072. RelocationEntry REmovk_g0(SectionID,
  1073. StubTargetAddr - Section.getAddress() + 12,
  1074. ELF::R_AARCH64_MOVW_UABS_G0_NC, Value.Addend);
  1075. if (Value.SymbolName) {
  1076. addRelocationForSymbol(REmovz_g3, Value.SymbolName);
  1077. addRelocationForSymbol(REmovk_g2, Value.SymbolName);
  1078. addRelocationForSymbol(REmovk_g1, Value.SymbolName);
  1079. addRelocationForSymbol(REmovk_g0, Value.SymbolName);
  1080. } else {
  1081. addRelocationForSection(REmovz_g3, Value.SectionID);
  1082. addRelocationForSection(REmovk_g2, Value.SectionID);
  1083. addRelocationForSection(REmovk_g1, Value.SectionID);
  1084. addRelocationForSection(REmovk_g0, Value.SectionID);
  1085. }
  1086. resolveRelocation(Section, Offset,
  1087. reinterpret_cast<uint64_t>(Section.getAddressWithOffset(
  1088. Section.getStubOffset())),
  1089. RelType, 0);
  1090. Section.advanceStubOffset(getMaxStubSize());
  1091. }
  1092. }
  1093. Expected<relocation_iterator>
  1094. RuntimeDyldELF::processRelocationRef(
  1095. unsigned SectionID, relocation_iterator RelI, const ObjectFile &O,
  1096. ObjSectionToIDMap &ObjSectionToID, StubMap &Stubs) {
  1097. const auto &Obj = cast<ELFObjectFileBase>(O);
  1098. uint64_t RelType = RelI->getType();
  1099. int64_t Addend = 0;
  1100. if (Expected<int64_t> AddendOrErr = ELFRelocationRef(*RelI).getAddend())
  1101. Addend = *AddendOrErr;
  1102. else
  1103. consumeError(AddendOrErr.takeError());
  1104. elf_symbol_iterator Symbol = RelI->getSymbol();
  1105. // Obtain the symbol name which is referenced in the relocation
  1106. StringRef TargetName;
  1107. if (Symbol != Obj.symbol_end()) {
  1108. if (auto TargetNameOrErr = Symbol->getName())
  1109. TargetName = *TargetNameOrErr;
  1110. else
  1111. return TargetNameOrErr.takeError();
  1112. }
  1113. LLVM_DEBUG(dbgs() << "\t\tRelType: " << RelType << " Addend: " << Addend
  1114. << " TargetName: " << TargetName << "\n");
  1115. RelocationValueRef Value;
  1116. // First search for the symbol in the local symbol table
  1117. SymbolRef::Type SymType = SymbolRef::ST_Unknown;
  1118. // Search for the symbol in the global symbol table
  1119. RTDyldSymbolTable::const_iterator gsi = GlobalSymbolTable.end();
  1120. if (Symbol != Obj.symbol_end()) {
  1121. gsi = GlobalSymbolTable.find(TargetName.data());
  1122. Expected<SymbolRef::Type> SymTypeOrErr = Symbol->getType();
  1123. if (!SymTypeOrErr) {
  1124. std::string Buf;
  1125. raw_string_ostream OS(Buf);
  1126. logAllUnhandledErrors(SymTypeOrErr.takeError(), OS);
  1127. report_fatal_error(Twine(OS.str()));
  1128. }
  1129. SymType = *SymTypeOrErr;
  1130. }
  1131. if (gsi != GlobalSymbolTable.end()) {
  1132. const auto &SymInfo = gsi->second;
  1133. Value.SectionID = SymInfo.getSectionID();
  1134. Value.Offset = SymInfo.getOffset();
  1135. Value.Addend = SymInfo.getOffset() + Addend;
  1136. } else {
  1137. switch (SymType) {
  1138. case SymbolRef::ST_Debug: {
  1139. // TODO: Now ELF SymbolRef::ST_Debug = STT_SECTION, it's not obviously
  1140. // and can be changed by another developers. Maybe best way is add
  1141. // a new symbol type ST_Section to SymbolRef and use it.
  1142. auto SectionOrErr = Symbol->getSection();
  1143. if (!SectionOrErr) {
  1144. std::string Buf;
  1145. raw_string_ostream OS(Buf);
  1146. logAllUnhandledErrors(SectionOrErr.takeError(), OS);
  1147. report_fatal_error(Twine(OS.str()));
  1148. }
  1149. section_iterator si = *SectionOrErr;
  1150. if (si == Obj.section_end())
  1151. llvm_unreachable("Symbol section not found, bad object file format!");
  1152. LLVM_DEBUG(dbgs() << "\t\tThis is section symbol\n");
  1153. bool isCode = si->isText();
  1154. if (auto SectionIDOrErr = findOrEmitSection(Obj, (*si), isCode,
  1155. ObjSectionToID))
  1156. Value.SectionID = *SectionIDOrErr;
  1157. else
  1158. return SectionIDOrErr.takeError();
  1159. Value.Addend = Addend;
  1160. break;
  1161. }
  1162. case SymbolRef::ST_Data:
  1163. case SymbolRef::ST_Function:
  1164. case SymbolRef::ST_Unknown: {
  1165. Value.SymbolName = TargetName.data();
  1166. Value.Addend = Addend;
  1167. // Absolute relocations will have a zero symbol ID (STN_UNDEF), which
  1168. // will manifest here as a NULL symbol name.
  1169. // We can set this as a valid (but empty) symbol name, and rely
  1170. // on addRelocationForSymbol to handle this.
  1171. if (!Value.SymbolName)
  1172. Value.SymbolName = "";
  1173. break;
  1174. }
  1175. default:
  1176. llvm_unreachable("Unresolved symbol type!");
  1177. break;
  1178. }
  1179. }
  1180. uint64_t Offset = RelI->getOffset();
  1181. LLVM_DEBUG(dbgs() << "\t\tSectionID: " << SectionID << " Offset: " << Offset
  1182. << "\n");
  1183. if ((Arch == Triple::aarch64 || Arch == Triple::aarch64_be)) {
  1184. if ((RelType == ELF::R_AARCH64_CALL26 ||
  1185. RelType == ELF::R_AARCH64_JUMP26) &&
  1186. MemMgr.allowStubAllocation()) {
  1187. resolveAArch64Branch(SectionID, Value, RelI, Stubs);
  1188. } else if (RelType == ELF::R_AARCH64_ADR_GOT_PAGE) {
  1189. // Create new GOT entry or find existing one. If GOT entry is
  1190. // to be created, then we also emit ABS64 relocation for it.
  1191. uint64_t GOTOffset = findOrAllocGOTEntry(Value, ELF::R_AARCH64_ABS64);
  1192. resolveGOTOffsetRelocation(SectionID, Offset, GOTOffset + Addend,
  1193. ELF::R_AARCH64_ADR_PREL_PG_HI21);
  1194. } else if (RelType == ELF::R_AARCH64_LD64_GOT_LO12_NC) {
  1195. uint64_t GOTOffset = findOrAllocGOTEntry(Value, ELF::R_AARCH64_ABS64);
  1196. resolveGOTOffsetRelocation(SectionID, Offset, GOTOffset + Addend,
  1197. ELF::R_AARCH64_LDST64_ABS_LO12_NC);
  1198. } else {
  1199. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1200. }
  1201. } else if (Arch == Triple::arm) {
  1202. if (RelType == ELF::R_ARM_PC24 || RelType == ELF::R_ARM_CALL ||
  1203. RelType == ELF::R_ARM_JUMP24) {
  1204. // This is an ARM branch relocation, need to use a stub function.
  1205. LLVM_DEBUG(dbgs() << "\t\tThis is an ARM branch relocation.\n");
  1206. SectionEntry &Section = Sections[SectionID];
  1207. // Look for an existing stub.
  1208. StubMap::const_iterator i = Stubs.find(Value);
  1209. if (i != Stubs.end()) {
  1210. resolveRelocation(
  1211. Section, Offset,
  1212. reinterpret_cast<uint64_t>(Section.getAddressWithOffset(i->second)),
  1213. RelType, 0);
  1214. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1215. } else {
  1216. // Create a new stub function.
  1217. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1218. Stubs[Value] = Section.getStubOffset();
  1219. uint8_t *StubTargetAddr = createStubFunction(
  1220. Section.getAddressWithOffset(Section.getStubOffset()));
  1221. RelocationEntry RE(SectionID, StubTargetAddr - Section.getAddress(),
  1222. ELF::R_ARM_ABS32, Value.Addend);
  1223. if (Value.SymbolName)
  1224. addRelocationForSymbol(RE, Value.SymbolName);
  1225. else
  1226. addRelocationForSection(RE, Value.SectionID);
  1227. resolveRelocation(Section, Offset, reinterpret_cast<uint64_t>(
  1228. Section.getAddressWithOffset(
  1229. Section.getStubOffset())),
  1230. RelType, 0);
  1231. Section.advanceStubOffset(getMaxStubSize());
  1232. }
  1233. } else {
  1234. uint32_t *Placeholder =
  1235. reinterpret_cast<uint32_t*>(computePlaceholderAddress(SectionID, Offset));
  1236. if (RelType == ELF::R_ARM_PREL31 || RelType == ELF::R_ARM_TARGET1 ||
  1237. RelType == ELF::R_ARM_ABS32) {
  1238. Value.Addend += *Placeholder;
  1239. } else if (RelType == ELF::R_ARM_MOVW_ABS_NC || RelType == ELF::R_ARM_MOVT_ABS) {
  1240. // See ELF for ARM documentation
  1241. Value.Addend += (int16_t)((*Placeholder & 0xFFF) | (((*Placeholder >> 16) & 0xF) << 12));
  1242. }
  1243. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1244. }
  1245. } else if (IsMipsO32ABI) {
  1246. uint8_t *Placeholder = reinterpret_cast<uint8_t *>(
  1247. computePlaceholderAddress(SectionID, Offset));
  1248. uint32_t Opcode = readBytesUnaligned(Placeholder, 4);
  1249. if (RelType == ELF::R_MIPS_26) {
  1250. // This is an Mips branch relocation, need to use a stub function.
  1251. LLVM_DEBUG(dbgs() << "\t\tThis is a Mips branch relocation.");
  1252. SectionEntry &Section = Sections[SectionID];
  1253. // Extract the addend from the instruction.
  1254. // We shift up by two since the Value will be down shifted again
  1255. // when applying the relocation.
  1256. uint32_t Addend = (Opcode & 0x03ffffff) << 2;
  1257. Value.Addend += Addend;
  1258. // Look up for existing stub.
  1259. StubMap::const_iterator i = Stubs.find(Value);
  1260. if (i != Stubs.end()) {
  1261. RelocationEntry RE(SectionID, Offset, RelType, i->second);
  1262. addRelocationForSection(RE, SectionID);
  1263. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1264. } else {
  1265. // Create a new stub function.
  1266. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1267. Stubs[Value] = Section.getStubOffset();
  1268. unsigned AbiVariant = Obj.getPlatformFlags();
  1269. uint8_t *StubTargetAddr = createStubFunction(
  1270. Section.getAddressWithOffset(Section.getStubOffset()), AbiVariant);
  1271. // Creating Hi and Lo relocations for the filled stub instructions.
  1272. RelocationEntry REHi(SectionID, StubTargetAddr - Section.getAddress(),
  1273. ELF::R_MIPS_HI16, Value.Addend);
  1274. RelocationEntry RELo(SectionID,
  1275. StubTargetAddr - Section.getAddress() + 4,
  1276. ELF::R_MIPS_LO16, Value.Addend);
  1277. if (Value.SymbolName) {
  1278. addRelocationForSymbol(REHi, Value.SymbolName);
  1279. addRelocationForSymbol(RELo, Value.SymbolName);
  1280. } else {
  1281. addRelocationForSection(REHi, Value.SectionID);
  1282. addRelocationForSection(RELo, Value.SectionID);
  1283. }
  1284. RelocationEntry RE(SectionID, Offset, RelType, Section.getStubOffset());
  1285. addRelocationForSection(RE, SectionID);
  1286. Section.advanceStubOffset(getMaxStubSize());
  1287. }
  1288. } else if (RelType == ELF::R_MIPS_HI16 || RelType == ELF::R_MIPS_PCHI16) {
  1289. int64_t Addend = (Opcode & 0x0000ffff) << 16;
  1290. RelocationEntry RE(SectionID, Offset, RelType, Addend);
  1291. PendingRelocs.push_back(std::make_pair(Value, RE));
  1292. } else if (RelType == ELF::R_MIPS_LO16 || RelType == ELF::R_MIPS_PCLO16) {
  1293. int64_t Addend = Value.Addend + SignExtend32<16>(Opcode & 0x0000ffff);
  1294. for (auto I = PendingRelocs.begin(); I != PendingRelocs.end();) {
  1295. const RelocationValueRef &MatchingValue = I->first;
  1296. RelocationEntry &Reloc = I->second;
  1297. if (MatchingValue == Value &&
  1298. RelType == getMatchingLoRelocation(Reloc.RelType) &&
  1299. SectionID == Reloc.SectionID) {
  1300. Reloc.Addend += Addend;
  1301. if (Value.SymbolName)
  1302. addRelocationForSymbol(Reloc, Value.SymbolName);
  1303. else
  1304. addRelocationForSection(Reloc, Value.SectionID);
  1305. I = PendingRelocs.erase(I);
  1306. } else
  1307. ++I;
  1308. }
  1309. RelocationEntry RE(SectionID, Offset, RelType, Addend);
  1310. if (Value.SymbolName)
  1311. addRelocationForSymbol(RE, Value.SymbolName);
  1312. else
  1313. addRelocationForSection(RE, Value.SectionID);
  1314. } else {
  1315. if (RelType == ELF::R_MIPS_32)
  1316. Value.Addend += Opcode;
  1317. else if (RelType == ELF::R_MIPS_PC16)
  1318. Value.Addend += SignExtend32<18>((Opcode & 0x0000ffff) << 2);
  1319. else if (RelType == ELF::R_MIPS_PC19_S2)
  1320. Value.Addend += SignExtend32<21>((Opcode & 0x0007ffff) << 2);
  1321. else if (RelType == ELF::R_MIPS_PC21_S2)
  1322. Value.Addend += SignExtend32<23>((Opcode & 0x001fffff) << 2);
  1323. else if (RelType == ELF::R_MIPS_PC26_S2)
  1324. Value.Addend += SignExtend32<28>((Opcode & 0x03ffffff) << 2);
  1325. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1326. }
  1327. } else if (IsMipsN32ABI || IsMipsN64ABI) {
  1328. uint32_t r_type = RelType & 0xff;
  1329. RelocationEntry RE(SectionID, Offset, RelType, Value.Addend);
  1330. if (r_type == ELF::R_MIPS_CALL16 || r_type == ELF::R_MIPS_GOT_PAGE
  1331. || r_type == ELF::R_MIPS_GOT_DISP) {
  1332. StringMap<uint64_t>::iterator i = GOTSymbolOffsets.find(TargetName);
  1333. if (i != GOTSymbolOffsets.end())
  1334. RE.SymOffset = i->second;
  1335. else {
  1336. RE.SymOffset = allocateGOTEntries(1);
  1337. GOTSymbolOffsets[TargetName] = RE.SymOffset;
  1338. }
  1339. if (Value.SymbolName)
  1340. addRelocationForSymbol(RE, Value.SymbolName);
  1341. else
  1342. addRelocationForSection(RE, Value.SectionID);
  1343. } else if (RelType == ELF::R_MIPS_26) {
  1344. // This is an Mips branch relocation, need to use a stub function.
  1345. LLVM_DEBUG(dbgs() << "\t\tThis is a Mips branch relocation.");
  1346. SectionEntry &Section = Sections[SectionID];
  1347. // Look up for existing stub.
  1348. StubMap::const_iterator i = Stubs.find(Value);
  1349. if (i != Stubs.end()) {
  1350. RelocationEntry RE(SectionID, Offset, RelType, i->second);
  1351. addRelocationForSection(RE, SectionID);
  1352. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1353. } else {
  1354. // Create a new stub function.
  1355. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1356. Stubs[Value] = Section.getStubOffset();
  1357. unsigned AbiVariant = Obj.getPlatformFlags();
  1358. uint8_t *StubTargetAddr = createStubFunction(
  1359. Section.getAddressWithOffset(Section.getStubOffset()), AbiVariant);
  1360. if (IsMipsN32ABI) {
  1361. // Creating Hi and Lo relocations for the filled stub instructions.
  1362. RelocationEntry REHi(SectionID, StubTargetAddr - Section.getAddress(),
  1363. ELF::R_MIPS_HI16, Value.Addend);
  1364. RelocationEntry RELo(SectionID,
  1365. StubTargetAddr - Section.getAddress() + 4,
  1366. ELF::R_MIPS_LO16, Value.Addend);
  1367. if (Value.SymbolName) {
  1368. addRelocationForSymbol(REHi, Value.SymbolName);
  1369. addRelocationForSymbol(RELo, Value.SymbolName);
  1370. } else {
  1371. addRelocationForSection(REHi, Value.SectionID);
  1372. addRelocationForSection(RELo, Value.SectionID);
  1373. }
  1374. } else {
  1375. // Creating Highest, Higher, Hi and Lo relocations for the filled stub
  1376. // instructions.
  1377. RelocationEntry REHighest(SectionID,
  1378. StubTargetAddr - Section.getAddress(),
  1379. ELF::R_MIPS_HIGHEST, Value.Addend);
  1380. RelocationEntry REHigher(SectionID,
  1381. StubTargetAddr - Section.getAddress() + 4,
  1382. ELF::R_MIPS_HIGHER, Value.Addend);
  1383. RelocationEntry REHi(SectionID,
  1384. StubTargetAddr - Section.getAddress() + 12,
  1385. ELF::R_MIPS_HI16, Value.Addend);
  1386. RelocationEntry RELo(SectionID,
  1387. StubTargetAddr - Section.getAddress() + 20,
  1388. ELF::R_MIPS_LO16, Value.Addend);
  1389. if (Value.SymbolName) {
  1390. addRelocationForSymbol(REHighest, Value.SymbolName);
  1391. addRelocationForSymbol(REHigher, Value.SymbolName);
  1392. addRelocationForSymbol(REHi, Value.SymbolName);
  1393. addRelocationForSymbol(RELo, Value.SymbolName);
  1394. } else {
  1395. addRelocationForSection(REHighest, Value.SectionID);
  1396. addRelocationForSection(REHigher, Value.SectionID);
  1397. addRelocationForSection(REHi, Value.SectionID);
  1398. addRelocationForSection(RELo, Value.SectionID);
  1399. }
  1400. }
  1401. RelocationEntry RE(SectionID, Offset, RelType, Section.getStubOffset());
  1402. addRelocationForSection(RE, SectionID);
  1403. Section.advanceStubOffset(getMaxStubSize());
  1404. }
  1405. } else {
  1406. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1407. }
  1408. } else if (Arch == Triple::ppc64 || Arch == Triple::ppc64le) {
  1409. if (RelType == ELF::R_PPC64_REL24) {
  1410. // Determine ABI variant in use for this object.
  1411. unsigned AbiVariant = Obj.getPlatformFlags();
  1412. AbiVariant &= ELF::EF_PPC64_ABI;
  1413. // A PPC branch relocation will need a stub function if the target is
  1414. // an external symbol (either Value.SymbolName is set, or SymType is
  1415. // Symbol::ST_Unknown) or if the target address is not within the
  1416. // signed 24-bits branch address.
  1417. SectionEntry &Section = Sections[SectionID];
  1418. uint8_t *Target = Section.getAddressWithOffset(Offset);
  1419. bool RangeOverflow = false;
  1420. bool IsExtern = Value.SymbolName || SymType == SymbolRef::ST_Unknown;
  1421. if (!IsExtern) {
  1422. if (AbiVariant != 2) {
  1423. // In the ELFv1 ABI, a function call may point to the .opd entry,
  1424. // so the final symbol value is calculated based on the relocation
  1425. // values in the .opd section.
  1426. if (auto Err = findOPDEntrySection(Obj, ObjSectionToID, Value))
  1427. return std::move(Err);
  1428. } else {
  1429. // In the ELFv2 ABI, a function symbol may provide a local entry
  1430. // point, which must be used for direct calls.
  1431. if (Value.SectionID == SectionID){
  1432. uint8_t SymOther = Symbol->getOther();
  1433. Value.Addend += ELF::decodePPC64LocalEntryOffset(SymOther);
  1434. }
  1435. }
  1436. uint8_t *RelocTarget =
  1437. Sections[Value.SectionID].getAddressWithOffset(Value.Addend);
  1438. int64_t delta = static_cast<int64_t>(Target - RelocTarget);
  1439. // If it is within 26-bits branch range, just set the branch target
  1440. if (SignExtend64<26>(delta) != delta) {
  1441. RangeOverflow = true;
  1442. } else if ((AbiVariant != 2) ||
  1443. (AbiVariant == 2 && Value.SectionID == SectionID)) {
  1444. RelocationEntry RE(SectionID, Offset, RelType, Value.Addend);
  1445. addRelocationForSection(RE, Value.SectionID);
  1446. }
  1447. }
  1448. if (IsExtern || (AbiVariant == 2 && Value.SectionID != SectionID) ||
  1449. RangeOverflow) {
  1450. // It is an external symbol (either Value.SymbolName is set, or
  1451. // SymType is SymbolRef::ST_Unknown) or out of range.
  1452. StubMap::const_iterator i = Stubs.find(Value);
  1453. if (i != Stubs.end()) {
  1454. // Symbol function stub already created, just relocate to it
  1455. resolveRelocation(Section, Offset,
  1456. reinterpret_cast<uint64_t>(
  1457. Section.getAddressWithOffset(i->second)),
  1458. RelType, 0);
  1459. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1460. } else {
  1461. // Create a new stub function.
  1462. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1463. Stubs[Value] = Section.getStubOffset();
  1464. uint8_t *StubTargetAddr = createStubFunction(
  1465. Section.getAddressWithOffset(Section.getStubOffset()),
  1466. AbiVariant);
  1467. RelocationEntry RE(SectionID, StubTargetAddr - Section.getAddress(),
  1468. ELF::R_PPC64_ADDR64, Value.Addend);
  1469. // Generates the 64-bits address loads as exemplified in section
  1470. // 4.5.1 in PPC64 ELF ABI. Note that the relocations need to
  1471. // apply to the low part of the instructions, so we have to update
  1472. // the offset according to the target endianness.
  1473. uint64_t StubRelocOffset = StubTargetAddr - Section.getAddress();
  1474. if (!IsTargetLittleEndian)
  1475. StubRelocOffset += 2;
  1476. RelocationEntry REhst(SectionID, StubRelocOffset + 0,
  1477. ELF::R_PPC64_ADDR16_HIGHEST, Value.Addend);
  1478. RelocationEntry REhr(SectionID, StubRelocOffset + 4,
  1479. ELF::R_PPC64_ADDR16_HIGHER, Value.Addend);
  1480. RelocationEntry REh(SectionID, StubRelocOffset + 12,
  1481. ELF::R_PPC64_ADDR16_HI, Value.Addend);
  1482. RelocationEntry REl(SectionID, StubRelocOffset + 16,
  1483. ELF::R_PPC64_ADDR16_LO, Value.Addend);
  1484. if (Value.SymbolName) {
  1485. addRelocationForSymbol(REhst, Value.SymbolName);
  1486. addRelocationForSymbol(REhr, Value.SymbolName);
  1487. addRelocationForSymbol(REh, Value.SymbolName);
  1488. addRelocationForSymbol(REl, Value.SymbolName);
  1489. } else {
  1490. addRelocationForSection(REhst, Value.SectionID);
  1491. addRelocationForSection(REhr, Value.SectionID);
  1492. addRelocationForSection(REh, Value.SectionID);
  1493. addRelocationForSection(REl, Value.SectionID);
  1494. }
  1495. resolveRelocation(Section, Offset, reinterpret_cast<uint64_t>(
  1496. Section.getAddressWithOffset(
  1497. Section.getStubOffset())),
  1498. RelType, 0);
  1499. Section.advanceStubOffset(getMaxStubSize());
  1500. }
  1501. if (IsExtern || (AbiVariant == 2 && Value.SectionID != SectionID)) {
  1502. // Restore the TOC for external calls
  1503. if (AbiVariant == 2)
  1504. writeInt32BE(Target + 4, 0xE8410018); // ld r2,24(r1)
  1505. else
  1506. writeInt32BE(Target + 4, 0xE8410028); // ld r2,40(r1)
  1507. }
  1508. }
  1509. } else if (RelType == ELF::R_PPC64_TOC16 ||
  1510. RelType == ELF::R_PPC64_TOC16_DS ||
  1511. RelType == ELF::R_PPC64_TOC16_LO ||
  1512. RelType == ELF::R_PPC64_TOC16_LO_DS ||
  1513. RelType == ELF::R_PPC64_TOC16_HI ||
  1514. RelType == ELF::R_PPC64_TOC16_HA) {
  1515. // These relocations are supposed to subtract the TOC address from
  1516. // the final value. This does not fit cleanly into the RuntimeDyld
  1517. // scheme, since there may be *two* sections involved in determining
  1518. // the relocation value (the section of the symbol referred to by the
  1519. // relocation, and the TOC section associated with the current module).
  1520. //
  1521. // Fortunately, these relocations are currently only ever generated
  1522. // referring to symbols that themselves reside in the TOC, which means
  1523. // that the two sections are actually the same. Thus they cancel out
  1524. // and we can immediately resolve the relocation right now.
  1525. switch (RelType) {
  1526. case ELF::R_PPC64_TOC16: RelType = ELF::R_PPC64_ADDR16; break;
  1527. case ELF::R_PPC64_TOC16_DS: RelType = ELF::R_PPC64_ADDR16_DS; break;
  1528. case ELF::R_PPC64_TOC16_LO: RelType = ELF::R_PPC64_ADDR16_LO; break;
  1529. case ELF::R_PPC64_TOC16_LO_DS: RelType = ELF::R_PPC64_ADDR16_LO_DS; break;
  1530. case ELF::R_PPC64_TOC16_HI: RelType = ELF::R_PPC64_ADDR16_HI; break;
  1531. case ELF::R_PPC64_TOC16_HA: RelType = ELF::R_PPC64_ADDR16_HA; break;
  1532. default: llvm_unreachable("Wrong relocation type.");
  1533. }
  1534. RelocationValueRef TOCValue;
  1535. if (auto Err = findPPC64TOCSection(Obj, ObjSectionToID, TOCValue))
  1536. return std::move(Err);
  1537. if (Value.SymbolName || Value.SectionID != TOCValue.SectionID)
  1538. llvm_unreachable("Unsupported TOC relocation.");
  1539. Value.Addend -= TOCValue.Addend;
  1540. resolveRelocation(Sections[SectionID], Offset, Value.Addend, RelType, 0);
  1541. } else {
  1542. // There are two ways to refer to the TOC address directly: either
  1543. // via a ELF::R_PPC64_TOC relocation (where both symbol and addend are
  1544. // ignored), or via any relocation that refers to the magic ".TOC."
  1545. // symbols (in which case the addend is respected).
  1546. if (RelType == ELF::R_PPC64_TOC) {
  1547. RelType = ELF::R_PPC64_ADDR64;
  1548. if (auto Err = findPPC64TOCSection(Obj, ObjSectionToID, Value))
  1549. return std::move(Err);
  1550. } else if (TargetName == ".TOC.") {
  1551. if (auto Err = findPPC64TOCSection(Obj, ObjSectionToID, Value))
  1552. return std::move(Err);
  1553. Value.Addend += Addend;
  1554. }
  1555. RelocationEntry RE(SectionID, Offset, RelType, Value.Addend);
  1556. if (Value.SymbolName)
  1557. addRelocationForSymbol(RE, Value.SymbolName);
  1558. else
  1559. addRelocationForSection(RE, Value.SectionID);
  1560. }
  1561. } else if (Arch == Triple::systemz &&
  1562. (RelType == ELF::R_390_PLT32DBL || RelType == ELF::R_390_GOTENT)) {
  1563. // Create function stubs for both PLT and GOT references, regardless of
  1564. // whether the GOT reference is to data or code. The stub contains the
  1565. // full address of the symbol, as needed by GOT references, and the
  1566. // executable part only adds an overhead of 8 bytes.
  1567. //
  1568. // We could try to conserve space by allocating the code and data
  1569. // parts of the stub separately. However, as things stand, we allocate
  1570. // a stub for every relocation, so using a GOT in JIT code should be
  1571. // no less space efficient than using an explicit constant pool.
  1572. LLVM_DEBUG(dbgs() << "\t\tThis is a SystemZ indirect relocation.");
  1573. SectionEntry &Section = Sections[SectionID];
  1574. // Look for an existing stub.
  1575. StubMap::const_iterator i = Stubs.find(Value);
  1576. uintptr_t StubAddress;
  1577. if (i != Stubs.end()) {
  1578. StubAddress = uintptr_t(Section.getAddressWithOffset(i->second));
  1579. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1580. } else {
  1581. // Create a new stub function.
  1582. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1583. uintptr_t BaseAddress = uintptr_t(Section.getAddress());
  1584. uintptr_t StubAlignment = getStubAlignment();
  1585. StubAddress =
  1586. (BaseAddress + Section.getStubOffset() + StubAlignment - 1) &
  1587. -StubAlignment;
  1588. unsigned StubOffset = StubAddress - BaseAddress;
  1589. Stubs[Value] = StubOffset;
  1590. createStubFunction((uint8_t *)StubAddress);
  1591. RelocationEntry RE(SectionID, StubOffset + 8, ELF::R_390_64,
  1592. Value.Offset);
  1593. if (Value.SymbolName)
  1594. addRelocationForSymbol(RE, Value.SymbolName);
  1595. else
  1596. addRelocationForSection(RE, Value.SectionID);
  1597. Section.advanceStubOffset(getMaxStubSize());
  1598. }
  1599. if (RelType == ELF::R_390_GOTENT)
  1600. resolveRelocation(Section, Offset, StubAddress + 8, ELF::R_390_PC32DBL,
  1601. Addend);
  1602. else
  1603. resolveRelocation(Section, Offset, StubAddress, RelType, Addend);
  1604. } else if (Arch == Triple::x86_64) {
  1605. if (RelType == ELF::R_X86_64_PLT32) {
  1606. // The way the PLT relocations normally work is that the linker allocates
  1607. // the
  1608. // PLT and this relocation makes a PC-relative call into the PLT. The PLT
  1609. // entry will then jump to an address provided by the GOT. On first call,
  1610. // the
  1611. // GOT address will point back into PLT code that resolves the symbol. After
  1612. // the first call, the GOT entry points to the actual function.
  1613. //
  1614. // For local functions we're ignoring all of that here and just replacing
  1615. // the PLT32 relocation type with PC32, which will translate the relocation
  1616. // into a PC-relative call directly to the function. For external symbols we
  1617. // can't be sure the function will be within 2^32 bytes of the call site, so
  1618. // we need to create a stub, which calls into the GOT. This case is
  1619. // equivalent to the usual PLT implementation except that we use the stub
  1620. // mechanism in RuntimeDyld (which puts stubs at the end of the section)
  1621. // rather than allocating a PLT section.
  1622. if (Value.SymbolName && MemMgr.allowStubAllocation()) {
  1623. // This is a call to an external function.
  1624. // Look for an existing stub.
  1625. SectionEntry *Section = &Sections[SectionID];
  1626. StubMap::const_iterator i = Stubs.find(Value);
  1627. uintptr_t StubAddress;
  1628. if (i != Stubs.end()) {
  1629. StubAddress = uintptr_t(Section->getAddress()) + i->second;
  1630. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1631. } else {
  1632. // Create a new stub function (equivalent to a PLT entry).
  1633. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1634. uintptr_t BaseAddress = uintptr_t(Section->getAddress());
  1635. uintptr_t StubAlignment = getStubAlignment();
  1636. StubAddress =
  1637. (BaseAddress + Section->getStubOffset() + StubAlignment - 1) &
  1638. -StubAlignment;
  1639. unsigned StubOffset = StubAddress - BaseAddress;
  1640. Stubs[Value] = StubOffset;
  1641. createStubFunction((uint8_t *)StubAddress);
  1642. // Bump our stub offset counter
  1643. Section->advanceStubOffset(getMaxStubSize());
  1644. // Allocate a GOT Entry
  1645. uint64_t GOTOffset = allocateGOTEntries(1);
  1646. // This potentially creates a new Section which potentially
  1647. // invalidates the Section pointer, so reload it.
  1648. Section = &Sections[SectionID];
  1649. // The load of the GOT address has an addend of -4
  1650. resolveGOTOffsetRelocation(SectionID, StubOffset + 2, GOTOffset - 4,
  1651. ELF::R_X86_64_PC32);
  1652. // Fill in the value of the symbol we're targeting into the GOT
  1653. addRelocationForSymbol(
  1654. computeGOTOffsetRE(GOTOffset, 0, ELF::R_X86_64_64),
  1655. Value.SymbolName);
  1656. }
  1657. // Make the target call a call into the stub table.
  1658. resolveRelocation(*Section, Offset, StubAddress, ELF::R_X86_64_PC32,
  1659. Addend);
  1660. } else {
  1661. Value.Addend += support::ulittle32_t::ref(
  1662. computePlaceholderAddress(SectionID, Offset));
  1663. processSimpleRelocation(SectionID, Offset, ELF::R_X86_64_PC32, Value);
  1664. }
  1665. } else if (RelType == ELF::R_X86_64_GOTPCREL ||
  1666. RelType == ELF::R_X86_64_GOTPCRELX ||
  1667. RelType == ELF::R_X86_64_REX_GOTPCRELX) {
  1668. uint64_t GOTOffset = allocateGOTEntries(1);
  1669. resolveGOTOffsetRelocation(SectionID, Offset, GOTOffset + Addend,
  1670. ELF::R_X86_64_PC32);
  1671. // Fill in the value of the symbol we're targeting into the GOT
  1672. RelocationEntry RE =
  1673. computeGOTOffsetRE(GOTOffset, Value.Offset, ELF::R_X86_64_64);
  1674. if (Value.SymbolName)
  1675. addRelocationForSymbol(RE, Value.SymbolName);
  1676. else
  1677. addRelocationForSection(RE, Value.SectionID);
  1678. } else if (RelType == ELF::R_X86_64_GOT64) {
  1679. // Fill in a 64-bit GOT offset.
  1680. uint64_t GOTOffset = allocateGOTEntries(1);
  1681. resolveRelocation(Sections[SectionID], Offset, GOTOffset,
  1682. ELF::R_X86_64_64, 0);
  1683. // Fill in the value of the symbol we're targeting into the GOT
  1684. RelocationEntry RE =
  1685. computeGOTOffsetRE(GOTOffset, Value.Offset, ELF::R_X86_64_64);
  1686. if (Value.SymbolName)
  1687. addRelocationForSymbol(RE, Value.SymbolName);
  1688. else
  1689. addRelocationForSection(RE, Value.SectionID);
  1690. } else if (RelType == ELF::R_X86_64_GOTPC32) {
  1691. // Materialize the address of the base of the GOT relative to the PC.
  1692. // This doesn't create a GOT entry, but it does mean we need a GOT
  1693. // section.
  1694. (void)allocateGOTEntries(0);
  1695. resolveGOTOffsetRelocation(SectionID, Offset, Addend, ELF::R_X86_64_PC32);
  1696. } else if (RelType == ELF::R_X86_64_GOTPC64) {
  1697. (void)allocateGOTEntries(0);
  1698. resolveGOTOffsetRelocation(SectionID, Offset, Addend, ELF::R_X86_64_PC64);
  1699. } else if (RelType == ELF::R_X86_64_GOTOFF64) {
  1700. // GOTOFF relocations ultimately require a section difference relocation.
  1701. (void)allocateGOTEntries(0);
  1702. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1703. } else if (RelType == ELF::R_X86_64_PC32) {
  1704. Value.Addend += support::ulittle32_t::ref(computePlaceholderAddress(SectionID, Offset));
  1705. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1706. } else if (RelType == ELF::R_X86_64_PC64) {
  1707. Value.Addend += support::ulittle64_t::ref(computePlaceholderAddress(SectionID, Offset));
  1708. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1709. } else if (RelType == ELF::R_X86_64_GOTTPOFF) {
  1710. processX86_64GOTTPOFFRelocation(SectionID, Offset, Value, Addend);
  1711. } else if (RelType == ELF::R_X86_64_TLSGD ||
  1712. RelType == ELF::R_X86_64_TLSLD) {
  1713. // The next relocation must be the relocation for __tls_get_addr.
  1714. ++RelI;
  1715. auto &GetAddrRelocation = *RelI;
  1716. processX86_64TLSRelocation(SectionID, Offset, RelType, Value, Addend,
  1717. GetAddrRelocation);
  1718. } else {
  1719. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1720. }
  1721. } else {
  1722. if (Arch == Triple::x86) {
  1723. Value.Addend += support::ulittle32_t::ref(computePlaceholderAddress(SectionID, Offset));
  1724. }
  1725. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1726. }
  1727. return ++RelI;
  1728. }
  1729. void RuntimeDyldELF::processX86_64GOTTPOFFRelocation(unsigned SectionID,
  1730. uint64_t Offset,
  1731. RelocationValueRef Value,
  1732. int64_t Addend) {
  1733. // Use the approach from "x86-64 Linker Optimizations" from the TLS spec
  1734. // to replace the GOTTPOFF relocation with a TPOFF relocation. The spec
  1735. // only mentions one optimization even though there are two different
  1736. // code sequences for the Initial Exec TLS Model. We match the code to
  1737. // find out which one was used.
  1738. // A possible TLS code sequence and its replacement
  1739. struct CodeSequence {
  1740. // The expected code sequence
  1741. ArrayRef<uint8_t> ExpectedCodeSequence;
  1742. // The negative offset of the GOTTPOFF relocation to the beginning of
  1743. // the sequence
  1744. uint64_t TLSSequenceOffset;
  1745. // The new code sequence
  1746. ArrayRef<uint8_t> NewCodeSequence;
  1747. // The offset of the new TPOFF relocation
  1748. uint64_t TpoffRelocationOffset;
  1749. };
  1750. std::array<CodeSequence, 2> CodeSequences;
  1751. // Initial Exec Code Model Sequence
  1752. {
  1753. static const std::initializer_list<uint8_t> ExpectedCodeSequenceList = {
  1754. 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
  1755. 0x00, // mov %fs:0, %rax
  1756. 0x48, 0x03, 0x05, 0x00, 0x00, 0x00, 0x00 // add x@gotpoff(%rip),
  1757. // %rax
  1758. };
  1759. CodeSequences[0].ExpectedCodeSequence =
  1760. ArrayRef<uint8_t>(ExpectedCodeSequenceList);
  1761. CodeSequences[0].TLSSequenceOffset = 12;
  1762. static const std::initializer_list<uint8_t> NewCodeSequenceList = {
  1763. 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00, // mov %fs:0, %rax
  1764. 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00 // lea x@tpoff(%rax), %rax
  1765. };
  1766. CodeSequences[0].NewCodeSequence = ArrayRef<uint8_t>(NewCodeSequenceList);
  1767. CodeSequences[0].TpoffRelocationOffset = 12;
  1768. }
  1769. // Initial Exec Code Model Sequence, II
  1770. {
  1771. static const std::initializer_list<uint8_t> ExpectedCodeSequenceList = {
  1772. 0x48, 0x8b, 0x05, 0x00, 0x00, 0x00, 0x00, // mov x@gotpoff(%rip), %rax
  1773. 0x64, 0x48, 0x8b, 0x00, 0x00, 0x00, 0x00 // mov %fs:(%rax), %rax
  1774. };
  1775. CodeSequences[1].ExpectedCodeSequence =
  1776. ArrayRef<uint8_t>(ExpectedCodeSequenceList);
  1777. CodeSequences[1].TLSSequenceOffset = 3;
  1778. static const std::initializer_list<uint8_t> NewCodeSequenceList = {
  1779. 0x66, 0x0f, 0x1f, 0x44, 0x00, 0x00, // 6 byte nop
  1780. 0x64, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00, // mov %fs:x@tpoff, %rax
  1781. };
  1782. CodeSequences[1].NewCodeSequence = ArrayRef<uint8_t>(NewCodeSequenceList);
  1783. CodeSequences[1].TpoffRelocationOffset = 10;
  1784. }
  1785. bool Resolved = false;
  1786. auto &Section = Sections[SectionID];
  1787. for (const auto &C : CodeSequences) {
  1788. assert(C.ExpectedCodeSequence.size() == C.NewCodeSequence.size() &&
  1789. "Old and new code sequences must have the same size");
  1790. if (Offset < C.TLSSequenceOffset ||
  1791. (Offset - C.TLSSequenceOffset + C.NewCodeSequence.size()) >
  1792. Section.getSize()) {
  1793. // This can't be a matching sequence as it doesn't fit in the current
  1794. // section
  1795. continue;
  1796. }
  1797. auto TLSSequenceStartOffset = Offset - C.TLSSequenceOffset;
  1798. auto *TLSSequence = Section.getAddressWithOffset(TLSSequenceStartOffset);
  1799. if (ArrayRef<uint8_t>(TLSSequence, C.ExpectedCodeSequence.size()) !=
  1800. C.ExpectedCodeSequence) {
  1801. continue;
  1802. }
  1803. memcpy(TLSSequence, C.NewCodeSequence.data(), C.NewCodeSequence.size());
  1804. // The original GOTTPOFF relocation has an addend as it is PC relative,
  1805. // so it needs to be corrected. The TPOFF32 relocation is used as an
  1806. // absolute value (which is an offset from %fs:0), so remove the addend
  1807. // again.
  1808. RelocationEntry RE(SectionID,
  1809. TLSSequenceStartOffset + C.TpoffRelocationOffset,
  1810. ELF::R_X86_64_TPOFF32, Value.Addend - Addend);
  1811. if (Value.SymbolName)
  1812. addRelocationForSymbol(RE, Value.SymbolName);
  1813. else
  1814. addRelocationForSection(RE, Value.SectionID);
  1815. Resolved = true;
  1816. break;
  1817. }
  1818. if (!Resolved) {
  1819. // The GOTTPOFF relocation was not used in one of the sequences
  1820. // described in the spec, so we can't optimize it to a TPOFF
  1821. // relocation.
  1822. uint64_t GOTOffset = allocateGOTEntries(1);
  1823. resolveGOTOffsetRelocation(SectionID, Offset, GOTOffset + Addend,
  1824. ELF::R_X86_64_PC32);
  1825. RelocationEntry RE =
  1826. computeGOTOffsetRE(GOTOffset, Value.Offset, ELF::R_X86_64_TPOFF64);
  1827. if (Value.SymbolName)
  1828. addRelocationForSymbol(RE, Value.SymbolName);
  1829. else
  1830. addRelocationForSection(RE, Value.SectionID);
  1831. }
  1832. }
  1833. void RuntimeDyldELF::processX86_64TLSRelocation(
  1834. unsigned SectionID, uint64_t Offset, uint64_t RelType,
  1835. RelocationValueRef Value, int64_t Addend,
  1836. const RelocationRef &GetAddrRelocation) {
  1837. // Since we are statically linking and have no additional DSOs, we can resolve
  1838. // the relocation directly without using __tls_get_addr.
  1839. // Use the approach from "x86-64 Linker Optimizations" from the TLS spec
  1840. // to replace it with the Local Exec relocation variant.
  1841. // Find out whether the code was compiled with the large or small memory
  1842. // model. For this we look at the next relocation which is the relocation
  1843. // for the __tls_get_addr function. If it's a 32 bit relocation, it's the
  1844. // small code model, with a 64 bit relocation it's the large code model.
  1845. bool IsSmallCodeModel;
  1846. // Is the relocation for the __tls_get_addr a PC-relative GOT relocation?
  1847. bool IsGOTPCRel = false;
  1848. switch (GetAddrRelocation.getType()) {
  1849. case ELF::R_X86_64_GOTPCREL:
  1850. case ELF::R_X86_64_REX_GOTPCRELX:
  1851. case ELF::R_X86_64_GOTPCRELX:
  1852. IsGOTPCRel = true;
  1853. LLVM_FALLTHROUGH;
  1854. case ELF::R_X86_64_PLT32:
  1855. IsSmallCodeModel = true;
  1856. break;
  1857. case ELF::R_X86_64_PLTOFF64:
  1858. IsSmallCodeModel = false;
  1859. break;
  1860. default:
  1861. report_fatal_error(
  1862. "invalid TLS relocations for General/Local Dynamic TLS Model: "
  1863. "expected PLT or GOT relocation for __tls_get_addr function");
  1864. }
  1865. // The negative offset to the start of the TLS code sequence relative to
  1866. // the offset of the TLSGD/TLSLD relocation
  1867. uint64_t TLSSequenceOffset;
  1868. // The expected start of the code sequence
  1869. ArrayRef<uint8_t> ExpectedCodeSequence;
  1870. // The new TLS code sequence that will replace the existing code
  1871. ArrayRef<uint8_t> NewCodeSequence;
  1872. if (RelType == ELF::R_X86_64_TLSGD) {
  1873. // The offset of the new TPOFF32 relocation (offset starting from the
  1874. // beginning of the whole TLS sequence)
  1875. uint64_t TpoffRelocOffset;
  1876. if (IsSmallCodeModel) {
  1877. if (!IsGOTPCRel) {
  1878. static const std::initializer_list<uint8_t> CodeSequence = {
  1879. 0x66, // data16 (no-op prefix)
  1880. 0x48, 0x8d, 0x3d, 0x00, 0x00,
  1881. 0x00, 0x00, // lea <disp32>(%rip), %rdi
  1882. 0x66, 0x66, // two data16 prefixes
  1883. 0x48, // rex64 (no-op prefix)
  1884. 0xe8, 0x00, 0x00, 0x00, 0x00 // call __tls_get_addr@plt
  1885. };
  1886. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1887. TLSSequenceOffset = 4;
  1888. } else {
  1889. // This code sequence is not described in the TLS spec but gcc
  1890. // generates it sometimes.
  1891. static const std::initializer_list<uint8_t> CodeSequence = {
  1892. 0x66, // data16 (no-op prefix)
  1893. 0x48, 0x8d, 0x3d, 0x00, 0x00,
  1894. 0x00, 0x00, // lea <disp32>(%rip), %rdi
  1895. 0x66, // data16 prefix (no-op prefix)
  1896. 0x48, // rex64 (no-op prefix)
  1897. 0xff, 0x15, 0x00, 0x00, 0x00,
  1898. 0x00 // call *__tls_get_addr@gotpcrel(%rip)
  1899. };
  1900. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1901. TLSSequenceOffset = 4;
  1902. }
  1903. // The replacement code for the small code model. It's the same for
  1904. // both sequences.
  1905. static const std::initializer_list<uint8_t> SmallSequence = {
  1906. 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
  1907. 0x00, // mov %fs:0, %rax
  1908. 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00 // lea x@tpoff(%rax),
  1909. // %rax
  1910. };
  1911. NewCodeSequence = ArrayRef<uint8_t>(SmallSequence);
  1912. TpoffRelocOffset = 12;
  1913. } else {
  1914. static const std::initializer_list<uint8_t> CodeSequence = {
  1915. 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00, 0x00, // lea <disp32>(%rip),
  1916. // %rdi
  1917. 0x48, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1918. 0x00, // movabs $__tls_get_addr@pltoff, %rax
  1919. 0x48, 0x01, 0xd8, // add %rbx, %rax
  1920. 0xff, 0xd0 // call *%rax
  1921. };
  1922. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1923. TLSSequenceOffset = 3;
  1924. // The replacement code for the large code model
  1925. static const std::initializer_list<uint8_t> LargeSequence = {
  1926. 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
  1927. 0x00, // mov %fs:0, %rax
  1928. 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00, // lea x@tpoff(%rax),
  1929. // %rax
  1930. 0x66, 0x0f, 0x1f, 0x44, 0x00, 0x00 // nopw 0x0(%rax,%rax,1)
  1931. };
  1932. NewCodeSequence = ArrayRef<uint8_t>(LargeSequence);
  1933. TpoffRelocOffset = 12;
  1934. }
  1935. // The TLSGD/TLSLD relocations are PC-relative, so they have an addend.
  1936. // The new TPOFF32 relocations is used as an absolute offset from
  1937. // %fs:0, so remove the TLSGD/TLSLD addend again.
  1938. RelocationEntry RE(SectionID, Offset - TLSSequenceOffset + TpoffRelocOffset,
  1939. ELF::R_X86_64_TPOFF32, Value.Addend - Addend);
  1940. if (Value.SymbolName)
  1941. addRelocationForSymbol(RE, Value.SymbolName);
  1942. else
  1943. addRelocationForSection(RE, Value.SectionID);
  1944. } else if (RelType == ELF::R_X86_64_TLSLD) {
  1945. if (IsSmallCodeModel) {
  1946. if (!IsGOTPCRel) {
  1947. static const std::initializer_list<uint8_t> CodeSequence = {
  1948. 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00, // leaq <disp32>(%rip), %rdi
  1949. 0x00, 0xe8, 0x00, 0x00, 0x00, 0x00 // call __tls_get_addr@plt
  1950. };
  1951. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1952. TLSSequenceOffset = 3;
  1953. // The replacement code for the small code model
  1954. static const std::initializer_list<uint8_t> SmallSequence = {
  1955. 0x66, 0x66, 0x66, // three data16 prefixes (no-op)
  1956. 0x64, 0x48, 0x8b, 0x04, 0x25,
  1957. 0x00, 0x00, 0x00, 0x00 // mov %fs:0, %rax
  1958. };
  1959. NewCodeSequence = ArrayRef<uint8_t>(SmallSequence);
  1960. } else {
  1961. // This code sequence is not described in the TLS spec but gcc
  1962. // generates it sometimes.
  1963. static const std::initializer_list<uint8_t> CodeSequence = {
  1964. 0x48, 0x8d, 0x3d, 0x00,
  1965. 0x00, 0x00, 0x00, // leaq <disp32>(%rip), %rdi
  1966. 0xff, 0x15, 0x00, 0x00,
  1967. 0x00, 0x00 // call
  1968. // *__tls_get_addr@gotpcrel(%rip)
  1969. };
  1970. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1971. TLSSequenceOffset = 3;
  1972. // The replacement is code is just like above but it needs to be
  1973. // one byte longer.
  1974. static const std::initializer_list<uint8_t> SmallSequence = {
  1975. 0x0f, 0x1f, 0x40, 0x00, // 4 byte nop
  1976. 0x64, 0x48, 0x8b, 0x04, 0x25,
  1977. 0x00, 0x00, 0x00, 0x00 // mov %fs:0, %rax
  1978. };
  1979. NewCodeSequence = ArrayRef<uint8_t>(SmallSequence);
  1980. }
  1981. } else {
  1982. // This is the same sequence as for the TLSGD sequence with the large
  1983. // memory model above
  1984. static const std::initializer_list<uint8_t> CodeSequence = {
  1985. 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00, 0x00, // lea <disp32>(%rip),
  1986. // %rdi
  1987. 0x48, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1988. 0x48, // movabs $__tls_get_addr@pltoff, %rax
  1989. 0x01, 0xd8, // add %rbx, %rax
  1990. 0xff, 0xd0 // call *%rax
  1991. };
  1992. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1993. TLSSequenceOffset = 3;
  1994. // The replacement code for the large code model
  1995. static const std::initializer_list<uint8_t> LargeSequence = {
  1996. 0x66, 0x66, 0x66, // three data16 prefixes (no-op)
  1997. 0x66, 0x66, 0x0f, 0x1f, 0x84, 0x00, 0x00, 0x00, 0x00,
  1998. 0x00, // 10 byte nop
  1999. 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00 // mov %fs:0,%rax
  2000. };
  2001. NewCodeSequence = ArrayRef<uint8_t>(LargeSequence);
  2002. }
  2003. } else {
  2004. llvm_unreachable("both TLS relocations handled above");
  2005. }
  2006. assert(ExpectedCodeSequence.size() == NewCodeSequence.size() &&
  2007. "Old and new code sequences must have the same size");
  2008. auto &Section = Sections[SectionID];
  2009. if (Offset < TLSSequenceOffset ||
  2010. (Offset - TLSSequenceOffset + NewCodeSequence.size()) >
  2011. Section.getSize()) {
  2012. report_fatal_error("unexpected end of section in TLS sequence");
  2013. }
  2014. auto *TLSSequence = Section.getAddressWithOffset(Offset - TLSSequenceOffset);
  2015. if (ArrayRef<uint8_t>(TLSSequence, ExpectedCodeSequence.size()) !=
  2016. ExpectedCodeSequence) {
  2017. report_fatal_error(
  2018. "invalid TLS sequence for Global/Local Dynamic TLS Model");
  2019. }
  2020. memcpy(TLSSequence, NewCodeSequence.data(), NewCodeSequence.size());
  2021. }
  2022. size_t RuntimeDyldELF::getGOTEntrySize() {
  2023. // We don't use the GOT in all of these cases, but it's essentially free
  2024. // to put them all here.
  2025. size_t Result = 0;
  2026. switch (Arch) {
  2027. case Triple::x86_64:
  2028. case Triple::aarch64:
  2029. case Triple::aarch64_be:
  2030. case Triple::ppc64:
  2031. case Triple::ppc64le:
  2032. case Triple::systemz:
  2033. Result = sizeof(uint64_t);
  2034. break;
  2035. case Triple::x86:
  2036. case Triple::arm:
  2037. case Triple::thumb:
  2038. Result = sizeof(uint32_t);
  2039. break;
  2040. case Triple::mips:
  2041. case Triple::mipsel:
  2042. case Triple::mips64:
  2043. case Triple::mips64el:
  2044. if (IsMipsO32ABI || IsMipsN32ABI)
  2045. Result = sizeof(uint32_t);
  2046. else if (IsMipsN64ABI)
  2047. Result = sizeof(uint64_t);
  2048. else
  2049. llvm_unreachable("Mips ABI not handled");
  2050. break;
  2051. default:
  2052. llvm_unreachable("Unsupported CPU type!");
  2053. }
  2054. return Result;
  2055. }
  2056. uint64_t RuntimeDyldELF::allocateGOTEntries(unsigned no) {
  2057. if (GOTSectionID == 0) {
  2058. GOTSectionID = Sections.size();
  2059. // Reserve a section id. We'll allocate the section later
  2060. // once we know the total size
  2061. Sections.push_back(SectionEntry(".got", nullptr, 0, 0, 0));
  2062. }
  2063. uint64_t StartOffset = CurrentGOTIndex * getGOTEntrySize();
  2064. CurrentGOTIndex += no;
  2065. return StartOffset;
  2066. }
  2067. uint64_t RuntimeDyldELF::findOrAllocGOTEntry(const RelocationValueRef &Value,
  2068. unsigned GOTRelType) {
  2069. auto E = GOTOffsetMap.insert({Value, 0});
  2070. if (E.second) {
  2071. uint64_t GOTOffset = allocateGOTEntries(1);
  2072. // Create relocation for newly created GOT entry
  2073. RelocationEntry RE =
  2074. computeGOTOffsetRE(GOTOffset, Value.Offset, GOTRelType);
  2075. if (Value.SymbolName)
  2076. addRelocationForSymbol(RE, Value.SymbolName);
  2077. else
  2078. addRelocationForSection(RE, Value.SectionID);
  2079. E.first->second = GOTOffset;
  2080. }
  2081. return E.first->second;
  2082. }
  2083. void RuntimeDyldELF::resolveGOTOffsetRelocation(unsigned SectionID,
  2084. uint64_t Offset,
  2085. uint64_t GOTOffset,
  2086. uint32_t Type) {
  2087. // Fill in the relative address of the GOT Entry into the stub
  2088. RelocationEntry GOTRE(SectionID, Offset, Type, GOTOffset);
  2089. addRelocationForSection(GOTRE, GOTSectionID);
  2090. }
  2091. RelocationEntry RuntimeDyldELF::computeGOTOffsetRE(uint64_t GOTOffset,
  2092. uint64_t SymbolOffset,
  2093. uint32_t Type) {
  2094. return RelocationEntry(GOTSectionID, GOTOffset, Type, SymbolOffset);
  2095. }
  2096. Error RuntimeDyldELF::finalizeLoad(const ObjectFile &Obj,
  2097. ObjSectionToIDMap &SectionMap) {
  2098. if (IsMipsO32ABI)
  2099. if (!PendingRelocs.empty())
  2100. return make_error<RuntimeDyldError>("Can't find matching LO16 reloc");
  2101. // If necessary, allocate the global offset table
  2102. if (GOTSectionID != 0) {
  2103. // Allocate memory for the section
  2104. size_t TotalSize = CurrentGOTIndex * getGOTEntrySize();
  2105. uint8_t *Addr = MemMgr.allocateDataSection(TotalSize, getGOTEntrySize(),
  2106. GOTSectionID, ".got", false);
  2107. if (!Addr)
  2108. return make_error<RuntimeDyldError>("Unable to allocate memory for GOT!");
  2109. Sections[GOTSectionID] =
  2110. SectionEntry(".got", Addr, TotalSize, TotalSize, 0);
  2111. // For now, initialize all GOT entries to zero. We'll fill them in as
  2112. // needed when GOT-based relocations are applied.
  2113. memset(Addr, 0, TotalSize);
  2114. if (IsMipsN32ABI || IsMipsN64ABI) {
  2115. // To correctly resolve Mips GOT relocations, we need a mapping from
  2116. // object's sections to GOTs.
  2117. for (section_iterator SI = Obj.section_begin(), SE = Obj.section_end();
  2118. SI != SE; ++SI) {
  2119. if (SI->relocation_begin() != SI->relocation_end()) {
  2120. Expected<section_iterator> RelSecOrErr = SI->getRelocatedSection();
  2121. if (!RelSecOrErr)
  2122. return make_error<RuntimeDyldError>(
  2123. toString(RelSecOrErr.takeError()));
  2124. section_iterator RelocatedSection = *RelSecOrErr;
  2125. ObjSectionToIDMap::iterator i = SectionMap.find(*RelocatedSection);
  2126. assert (i != SectionMap.end());
  2127. SectionToGOTMap[i->second] = GOTSectionID;
  2128. }
  2129. }
  2130. GOTSymbolOffsets.clear();
  2131. }
  2132. }
  2133. // Look for and record the EH frame section.
  2134. ObjSectionToIDMap::iterator i, e;
  2135. for (i = SectionMap.begin(), e = SectionMap.end(); i != e; ++i) {
  2136. const SectionRef &Section = i->first;
  2137. StringRef Name;
  2138. Expected<StringRef> NameOrErr = Section.getName();
  2139. if (NameOrErr)
  2140. Name = *NameOrErr;
  2141. else
  2142. consumeError(NameOrErr.takeError());
  2143. if (Name == ".eh_frame") {
  2144. UnregisteredEHFrameSections.push_back(i->second);
  2145. break;
  2146. }
  2147. }
  2148. GOTSectionID = 0;
  2149. CurrentGOTIndex = 0;
  2150. return Error::success();
  2151. }
  2152. bool RuntimeDyldELF::isCompatibleFile(const object::ObjectFile &Obj) const {
  2153. return Obj.isELF();
  2154. }
  2155. bool RuntimeDyldELF::relocationNeedsGot(const RelocationRef &R) const {
  2156. unsigned RelTy = R.getType();
  2157. if (Arch == Triple::aarch64 || Arch == Triple::aarch64_be)
  2158. return RelTy == ELF::R_AARCH64_ADR_GOT_PAGE ||
  2159. RelTy == ELF::R_AARCH64_LD64_GOT_LO12_NC;
  2160. if (Arch == Triple::x86_64)
  2161. return RelTy == ELF::R_X86_64_GOTPCREL ||
  2162. RelTy == ELF::R_X86_64_GOTPCRELX ||
  2163. RelTy == ELF::R_X86_64_GOT64 ||
  2164. RelTy == ELF::R_X86_64_REX_GOTPCRELX;
  2165. return false;
  2166. }
  2167. bool RuntimeDyldELF::relocationNeedsStub(const RelocationRef &R) const {
  2168. if (Arch != Triple::x86_64)
  2169. return true; // Conservative answer
  2170. switch (R.getType()) {
  2171. default:
  2172. return true; // Conservative answer
  2173. case ELF::R_X86_64_GOTPCREL:
  2174. case ELF::R_X86_64_GOTPCRELX:
  2175. case ELF::R_X86_64_REX_GOTPCRELX:
  2176. case ELF::R_X86_64_GOTPC64:
  2177. case ELF::R_X86_64_GOT64:
  2178. case ELF::R_X86_64_GOTOFF64:
  2179. case ELF::R_X86_64_PC32:
  2180. case ELF::R_X86_64_PC64:
  2181. case ELF::R_X86_64_64:
  2182. // We know that these reloation types won't need a stub function. This list
  2183. // can be extended as needed.
  2184. return false;
  2185. }
  2186. }
  2187. } // namespace llvm