RuntimeDyldELF.cpp 101 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560
  1. //===-- RuntimeDyldELF.cpp - Run-time dynamic linker for MC-JIT -*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // Implementation of ELF support for the MC-JIT runtime dynamic linker.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "RuntimeDyldELF.h"
  13. #include "RuntimeDyldCheckerImpl.h"
  14. #include "Targets/RuntimeDyldELFMips.h"
  15. #include "llvm/ADT/STLExtras.h"
  16. #include "llvm/ADT/StringRef.h"
  17. #include "llvm/ADT/Triple.h"
  18. #include "llvm/BinaryFormat/ELF.h"
  19. #include "llvm/Object/ELFObjectFile.h"
  20. #include "llvm/Object/ObjectFile.h"
  21. #include "llvm/Support/Endian.h"
  22. #include "llvm/Support/MemoryBuffer.h"
  23. using namespace llvm;
  24. using namespace llvm::object;
  25. using namespace llvm::support::endian;
  26. #define DEBUG_TYPE "dyld"
  27. static void or32le(void *P, int32_t V) { write32le(P, read32le(P) | V); }
  28. static void or32AArch64Imm(void *L, uint64_t Imm) {
  29. or32le(L, (Imm & 0xFFF) << 10);
  30. }
  31. template <class T> static void write(bool isBE, void *P, T V) {
  32. isBE ? write<T, support::big>(P, V) : write<T, support::little>(P, V);
  33. }
  34. static void write32AArch64Addr(void *L, uint64_t Imm) {
  35. uint32_t ImmLo = (Imm & 0x3) << 29;
  36. uint32_t ImmHi = (Imm & 0x1FFFFC) << 3;
  37. uint64_t Mask = (0x3 << 29) | (0x1FFFFC << 3);
  38. write32le(L, (read32le(L) & ~Mask) | ImmLo | ImmHi);
  39. }
  40. // Return the bits [Start, End] from Val shifted Start bits.
  41. // For instance, getBits(0xF0, 4, 8) returns 0xF.
  42. static uint64_t getBits(uint64_t Val, int Start, int End) {
  43. uint64_t Mask = ((uint64_t)1 << (End + 1 - Start)) - 1;
  44. return (Val >> Start) & Mask;
  45. }
  46. namespace {
  47. template <class ELFT> class DyldELFObject : public ELFObjectFile<ELFT> {
  48. LLVM_ELF_IMPORT_TYPES_ELFT(ELFT)
  49. typedef typename ELFT::uint addr_type;
  50. DyldELFObject(ELFObjectFile<ELFT> &&Obj);
  51. public:
  52. static Expected<std::unique_ptr<DyldELFObject>>
  53. create(MemoryBufferRef Wrapper);
  54. void updateSectionAddress(const SectionRef &Sec, uint64_t Addr);
  55. void updateSymbolAddress(const SymbolRef &SymRef, uint64_t Addr);
  56. // Methods for type inquiry through isa, cast and dyn_cast
  57. static bool classof(const Binary *v) {
  58. return (isa<ELFObjectFile<ELFT>>(v) &&
  59. classof(cast<ELFObjectFile<ELFT>>(v)));
  60. }
  61. static bool classof(const ELFObjectFile<ELFT> *v) {
  62. return v->isDyldType();
  63. }
  64. };
  65. // The MemoryBuffer passed into this constructor is just a wrapper around the
  66. // actual memory. Ultimately, the Binary parent class will take ownership of
  67. // this MemoryBuffer object but not the underlying memory.
  68. template <class ELFT>
  69. DyldELFObject<ELFT>::DyldELFObject(ELFObjectFile<ELFT> &&Obj)
  70. : ELFObjectFile<ELFT>(std::move(Obj)) {
  71. this->isDyldELFObject = true;
  72. }
  73. template <class ELFT>
  74. Expected<std::unique_ptr<DyldELFObject<ELFT>>>
  75. DyldELFObject<ELFT>::create(MemoryBufferRef Wrapper) {
  76. auto Obj = ELFObjectFile<ELFT>::create(Wrapper);
  77. if (auto E = Obj.takeError())
  78. return std::move(E);
  79. std::unique_ptr<DyldELFObject<ELFT>> Ret(
  80. new DyldELFObject<ELFT>(std::move(*Obj)));
  81. return std::move(Ret);
  82. }
  83. template <class ELFT>
  84. void DyldELFObject<ELFT>::updateSectionAddress(const SectionRef &Sec,
  85. uint64_t Addr) {
  86. DataRefImpl ShdrRef = Sec.getRawDataRefImpl();
  87. Elf_Shdr *shdr =
  88. const_cast<Elf_Shdr *>(reinterpret_cast<const Elf_Shdr *>(ShdrRef.p));
  89. // This assumes the address passed in matches the target address bitness
  90. // The template-based type cast handles everything else.
  91. shdr->sh_addr = static_cast<addr_type>(Addr);
  92. }
  93. template <class ELFT>
  94. void DyldELFObject<ELFT>::updateSymbolAddress(const SymbolRef &SymRef,
  95. uint64_t Addr) {
  96. Elf_Sym *sym = const_cast<Elf_Sym *>(
  97. ELFObjectFile<ELFT>::getSymbol(SymRef.getRawDataRefImpl()));
  98. // This assumes the address passed in matches the target address bitness
  99. // The template-based type cast handles everything else.
  100. sym->st_value = static_cast<addr_type>(Addr);
  101. }
  102. class LoadedELFObjectInfo final
  103. : public LoadedObjectInfoHelper<LoadedELFObjectInfo,
  104. RuntimeDyld::LoadedObjectInfo> {
  105. public:
  106. LoadedELFObjectInfo(RuntimeDyldImpl &RTDyld, ObjSectionToIDMap ObjSecToIDMap)
  107. : LoadedObjectInfoHelper(RTDyld, std::move(ObjSecToIDMap)) {}
  108. OwningBinary<ObjectFile>
  109. getObjectForDebug(const ObjectFile &Obj) const override;
  110. };
  111. template <typename ELFT>
  112. static Expected<std::unique_ptr<DyldELFObject<ELFT>>>
  113. createRTDyldELFObject(MemoryBufferRef Buffer, const ObjectFile &SourceObject,
  114. const LoadedELFObjectInfo &L) {
  115. typedef typename ELFT::Shdr Elf_Shdr;
  116. typedef typename ELFT::uint addr_type;
  117. Expected<std::unique_ptr<DyldELFObject<ELFT>>> ObjOrErr =
  118. DyldELFObject<ELFT>::create(Buffer);
  119. if (Error E = ObjOrErr.takeError())
  120. return std::move(E);
  121. std::unique_ptr<DyldELFObject<ELFT>> Obj = std::move(*ObjOrErr);
  122. // Iterate over all sections in the object.
  123. auto SI = SourceObject.section_begin();
  124. for (const auto &Sec : Obj->sections()) {
  125. Expected<StringRef> NameOrErr = Sec.getName();
  126. if (!NameOrErr) {
  127. consumeError(NameOrErr.takeError());
  128. continue;
  129. }
  130. if (*NameOrErr != "") {
  131. DataRefImpl ShdrRef = Sec.getRawDataRefImpl();
  132. Elf_Shdr *shdr = const_cast<Elf_Shdr *>(
  133. reinterpret_cast<const Elf_Shdr *>(ShdrRef.p));
  134. if (uint64_t SecLoadAddr = L.getSectionLoadAddress(*SI)) {
  135. // This assumes that the address passed in matches the target address
  136. // bitness. The template-based type cast handles everything else.
  137. shdr->sh_addr = static_cast<addr_type>(SecLoadAddr);
  138. }
  139. }
  140. ++SI;
  141. }
  142. return std::move(Obj);
  143. }
  144. static OwningBinary<ObjectFile>
  145. createELFDebugObject(const ObjectFile &Obj, const LoadedELFObjectInfo &L) {
  146. assert(Obj.isELF() && "Not an ELF object file.");
  147. std::unique_ptr<MemoryBuffer> Buffer =
  148. MemoryBuffer::getMemBufferCopy(Obj.getData(), Obj.getFileName());
  149. Expected<std::unique_ptr<ObjectFile>> DebugObj(nullptr);
  150. handleAllErrors(DebugObj.takeError());
  151. if (Obj.getBytesInAddress() == 4 && Obj.isLittleEndian())
  152. DebugObj =
  153. createRTDyldELFObject<ELF32LE>(Buffer->getMemBufferRef(), Obj, L);
  154. else if (Obj.getBytesInAddress() == 4 && !Obj.isLittleEndian())
  155. DebugObj =
  156. createRTDyldELFObject<ELF32BE>(Buffer->getMemBufferRef(), Obj, L);
  157. else if (Obj.getBytesInAddress() == 8 && !Obj.isLittleEndian())
  158. DebugObj =
  159. createRTDyldELFObject<ELF64BE>(Buffer->getMemBufferRef(), Obj, L);
  160. else if (Obj.getBytesInAddress() == 8 && Obj.isLittleEndian())
  161. DebugObj =
  162. createRTDyldELFObject<ELF64LE>(Buffer->getMemBufferRef(), Obj, L);
  163. else
  164. llvm_unreachable("Unexpected ELF format");
  165. handleAllErrors(DebugObj.takeError());
  166. return OwningBinary<ObjectFile>(std::move(*DebugObj), std::move(Buffer));
  167. }
  168. OwningBinary<ObjectFile>
  169. LoadedELFObjectInfo::getObjectForDebug(const ObjectFile &Obj) const {
  170. return createELFDebugObject(Obj, *this);
  171. }
  172. } // anonymous namespace
  173. namespace llvm {
  174. RuntimeDyldELF::RuntimeDyldELF(RuntimeDyld::MemoryManager &MemMgr,
  175. JITSymbolResolver &Resolver)
  176. : RuntimeDyldImpl(MemMgr, Resolver), GOTSectionID(0), CurrentGOTIndex(0) {}
  177. RuntimeDyldELF::~RuntimeDyldELF() = default;
  178. void RuntimeDyldELF::registerEHFrames() {
  179. for (int i = 0, e = UnregisteredEHFrameSections.size(); i != e; ++i) {
  180. SID EHFrameSID = UnregisteredEHFrameSections[i];
  181. uint8_t *EHFrameAddr = Sections[EHFrameSID].getAddress();
  182. uint64_t EHFrameLoadAddr = Sections[EHFrameSID].getLoadAddress();
  183. size_t EHFrameSize = Sections[EHFrameSID].getSize();
  184. MemMgr.registerEHFrames(EHFrameAddr, EHFrameLoadAddr, EHFrameSize);
  185. }
  186. UnregisteredEHFrameSections.clear();
  187. }
  188. std::unique_ptr<RuntimeDyldELF>
  189. llvm::RuntimeDyldELF::create(Triple::ArchType Arch,
  190. RuntimeDyld::MemoryManager &MemMgr,
  191. JITSymbolResolver &Resolver) {
  192. switch (Arch) {
  193. default:
  194. return std::make_unique<RuntimeDyldELF>(MemMgr, Resolver);
  195. case Triple::mips:
  196. case Triple::mipsel:
  197. case Triple::mips64:
  198. case Triple::mips64el:
  199. return std::make_unique<RuntimeDyldELFMips>(MemMgr, Resolver);
  200. }
  201. }
  202. std::unique_ptr<RuntimeDyld::LoadedObjectInfo>
  203. RuntimeDyldELF::loadObject(const object::ObjectFile &O) {
  204. if (auto ObjSectionToIDOrErr = loadObjectImpl(O))
  205. return std::make_unique<LoadedELFObjectInfo>(*this, *ObjSectionToIDOrErr);
  206. else {
  207. HasError = true;
  208. raw_string_ostream ErrStream(ErrorStr);
  209. logAllUnhandledErrors(ObjSectionToIDOrErr.takeError(), ErrStream);
  210. return nullptr;
  211. }
  212. }
  213. void RuntimeDyldELF::resolveX86_64Relocation(const SectionEntry &Section,
  214. uint64_t Offset, uint64_t Value,
  215. uint32_t Type, int64_t Addend,
  216. uint64_t SymOffset) {
  217. switch (Type) {
  218. default:
  219. report_fatal_error("Relocation type not implemented yet!");
  220. break;
  221. case ELF::R_X86_64_NONE:
  222. break;
  223. case ELF::R_X86_64_8: {
  224. Value += Addend;
  225. assert((int64_t)Value <= INT8_MAX && (int64_t)Value >= INT8_MIN);
  226. uint8_t TruncatedAddr = (Value & 0xFF);
  227. *Section.getAddressWithOffset(Offset) = TruncatedAddr;
  228. LLVM_DEBUG(dbgs() << "Writing " << format("%p", TruncatedAddr) << " at "
  229. << format("%p\n", Section.getAddressWithOffset(Offset)));
  230. break;
  231. }
  232. case ELF::R_X86_64_16: {
  233. Value += Addend;
  234. assert((int64_t)Value <= INT16_MAX && (int64_t)Value >= INT16_MIN);
  235. uint16_t TruncatedAddr = (Value & 0xFFFF);
  236. support::ulittle16_t::ref(Section.getAddressWithOffset(Offset)) =
  237. TruncatedAddr;
  238. LLVM_DEBUG(dbgs() << "Writing " << format("%p", TruncatedAddr) << " at "
  239. << format("%p\n", Section.getAddressWithOffset(Offset)));
  240. break;
  241. }
  242. case ELF::R_X86_64_64: {
  243. support::ulittle64_t::ref(Section.getAddressWithOffset(Offset)) =
  244. Value + Addend;
  245. LLVM_DEBUG(dbgs() << "Writing " << format("%p", (Value + Addend)) << " at "
  246. << format("%p\n", Section.getAddressWithOffset(Offset)));
  247. break;
  248. }
  249. case ELF::R_X86_64_32:
  250. case ELF::R_X86_64_32S: {
  251. Value += Addend;
  252. assert((Type == ELF::R_X86_64_32 && (Value <= UINT32_MAX)) ||
  253. (Type == ELF::R_X86_64_32S &&
  254. ((int64_t)Value <= INT32_MAX && (int64_t)Value >= INT32_MIN)));
  255. uint32_t TruncatedAddr = (Value & 0xFFFFFFFF);
  256. support::ulittle32_t::ref(Section.getAddressWithOffset(Offset)) =
  257. TruncatedAddr;
  258. LLVM_DEBUG(dbgs() << "Writing " << format("%p", TruncatedAddr) << " at "
  259. << format("%p\n", Section.getAddressWithOffset(Offset)));
  260. break;
  261. }
  262. case ELF::R_X86_64_PC8: {
  263. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  264. int64_t RealOffset = Value + Addend - FinalAddress;
  265. assert(isInt<8>(RealOffset));
  266. int8_t TruncOffset = (RealOffset & 0xFF);
  267. Section.getAddress()[Offset] = TruncOffset;
  268. break;
  269. }
  270. case ELF::R_X86_64_PC32: {
  271. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  272. int64_t RealOffset = Value + Addend - FinalAddress;
  273. assert(isInt<32>(RealOffset));
  274. int32_t TruncOffset = (RealOffset & 0xFFFFFFFF);
  275. support::ulittle32_t::ref(Section.getAddressWithOffset(Offset)) =
  276. TruncOffset;
  277. break;
  278. }
  279. case ELF::R_X86_64_PC64: {
  280. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  281. int64_t RealOffset = Value + Addend - FinalAddress;
  282. support::ulittle64_t::ref(Section.getAddressWithOffset(Offset)) =
  283. RealOffset;
  284. LLVM_DEBUG(dbgs() << "Writing " << format("%p", RealOffset) << " at "
  285. << format("%p\n", FinalAddress));
  286. break;
  287. }
  288. case ELF::R_X86_64_GOTOFF64: {
  289. // Compute Value - GOTBase.
  290. uint64_t GOTBase = 0;
  291. for (const auto &Section : Sections) {
  292. if (Section.getName() == ".got") {
  293. GOTBase = Section.getLoadAddressWithOffset(0);
  294. break;
  295. }
  296. }
  297. assert(GOTBase != 0 && "missing GOT");
  298. int64_t GOTOffset = Value - GOTBase + Addend;
  299. support::ulittle64_t::ref(Section.getAddressWithOffset(Offset)) = GOTOffset;
  300. break;
  301. }
  302. case ELF::R_X86_64_DTPMOD64: {
  303. // We only have one DSO, so the module id is always 1.
  304. support::ulittle64_t::ref(Section.getAddressWithOffset(Offset)) = 1;
  305. break;
  306. }
  307. case ELF::R_X86_64_DTPOFF64:
  308. case ELF::R_X86_64_TPOFF64: {
  309. // DTPOFF64 should resolve to the offset in the TLS block, TPOFF64 to the
  310. // offset in the *initial* TLS block. Since we are statically linking, all
  311. // TLS blocks already exist in the initial block, so resolve both
  312. // relocations equally.
  313. support::ulittle64_t::ref(Section.getAddressWithOffset(Offset)) =
  314. Value + Addend;
  315. break;
  316. }
  317. case ELF::R_X86_64_DTPOFF32:
  318. case ELF::R_X86_64_TPOFF32: {
  319. // As for the (D)TPOFF64 relocations above, both DTPOFF32 and TPOFF32 can
  320. // be resolved equally.
  321. int64_t RealValue = Value + Addend;
  322. assert(RealValue >= INT32_MIN && RealValue <= INT32_MAX);
  323. int32_t TruncValue = RealValue;
  324. support::ulittle32_t::ref(Section.getAddressWithOffset(Offset)) =
  325. TruncValue;
  326. break;
  327. }
  328. }
  329. }
  330. void RuntimeDyldELF::resolveX86Relocation(const SectionEntry &Section,
  331. uint64_t Offset, uint32_t Value,
  332. uint32_t Type, int32_t Addend) {
  333. switch (Type) {
  334. case ELF::R_386_32: {
  335. support::ulittle32_t::ref(Section.getAddressWithOffset(Offset)) =
  336. Value + Addend;
  337. break;
  338. }
  339. // Handle R_386_PLT32 like R_386_PC32 since it should be able to
  340. // reach any 32 bit address.
  341. case ELF::R_386_PLT32:
  342. case ELF::R_386_PC32: {
  343. uint32_t FinalAddress =
  344. Section.getLoadAddressWithOffset(Offset) & 0xFFFFFFFF;
  345. uint32_t RealOffset = Value + Addend - FinalAddress;
  346. support::ulittle32_t::ref(Section.getAddressWithOffset(Offset)) =
  347. RealOffset;
  348. break;
  349. }
  350. default:
  351. // There are other relocation types, but it appears these are the
  352. // only ones currently used by the LLVM ELF object writer
  353. report_fatal_error("Relocation type not implemented yet!");
  354. break;
  355. }
  356. }
  357. void RuntimeDyldELF::resolveAArch64Relocation(const SectionEntry &Section,
  358. uint64_t Offset, uint64_t Value,
  359. uint32_t Type, int64_t Addend) {
  360. uint32_t *TargetPtr =
  361. reinterpret_cast<uint32_t *>(Section.getAddressWithOffset(Offset));
  362. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  363. // Data should use target endian. Code should always use little endian.
  364. bool isBE = Arch == Triple::aarch64_be;
  365. LLVM_DEBUG(dbgs() << "resolveAArch64Relocation, LocalAddress: 0x"
  366. << format("%llx", Section.getAddressWithOffset(Offset))
  367. << " FinalAddress: 0x" << format("%llx", FinalAddress)
  368. << " Value: 0x" << format("%llx", Value) << " Type: 0x"
  369. << format("%x", Type) << " Addend: 0x"
  370. << format("%llx", Addend) << "\n");
  371. switch (Type) {
  372. default:
  373. report_fatal_error("Relocation type not implemented yet!");
  374. break;
  375. case ELF::R_AARCH64_NONE:
  376. break;
  377. case ELF::R_AARCH64_ABS16: {
  378. uint64_t Result = Value + Addend;
  379. assert(static_cast<int64_t>(Result) >= INT16_MIN && Result < UINT16_MAX);
  380. write(isBE, TargetPtr, static_cast<uint16_t>(Result & 0xffffU));
  381. break;
  382. }
  383. case ELF::R_AARCH64_ABS32: {
  384. uint64_t Result = Value + Addend;
  385. assert(static_cast<int64_t>(Result) >= INT32_MIN && Result < UINT32_MAX);
  386. write(isBE, TargetPtr, static_cast<uint32_t>(Result & 0xffffffffU));
  387. break;
  388. }
  389. case ELF::R_AARCH64_ABS64:
  390. write(isBE, TargetPtr, Value + Addend);
  391. break;
  392. case ELF::R_AARCH64_PLT32: {
  393. uint64_t Result = Value + Addend - FinalAddress;
  394. assert(static_cast<int64_t>(Result) >= INT32_MIN &&
  395. static_cast<int64_t>(Result) <= INT32_MAX);
  396. write(isBE, TargetPtr, static_cast<uint32_t>(Result));
  397. break;
  398. }
  399. case ELF::R_AARCH64_PREL16: {
  400. uint64_t Result = Value + Addend - FinalAddress;
  401. assert(static_cast<int64_t>(Result) >= INT16_MIN &&
  402. static_cast<int64_t>(Result) <= UINT16_MAX);
  403. write(isBE, TargetPtr, static_cast<uint16_t>(Result & 0xffffU));
  404. break;
  405. }
  406. case ELF::R_AARCH64_PREL32: {
  407. uint64_t Result = Value + Addend - FinalAddress;
  408. assert(static_cast<int64_t>(Result) >= INT32_MIN &&
  409. static_cast<int64_t>(Result) <= UINT32_MAX);
  410. write(isBE, TargetPtr, static_cast<uint32_t>(Result & 0xffffffffU));
  411. break;
  412. }
  413. case ELF::R_AARCH64_PREL64:
  414. write(isBE, TargetPtr, Value + Addend - FinalAddress);
  415. break;
  416. case ELF::R_AARCH64_CONDBR19: {
  417. uint64_t BranchImm = Value + Addend - FinalAddress;
  418. assert(isInt<21>(BranchImm));
  419. *TargetPtr &= 0xff00001fU;
  420. // Immediate:20:2 goes in bits 23:5 of Bcc, CBZ, CBNZ
  421. or32le(TargetPtr, (BranchImm & 0x001FFFFC) << 3);
  422. break;
  423. }
  424. case ELF::R_AARCH64_TSTBR14: {
  425. uint64_t BranchImm = Value + Addend - FinalAddress;
  426. assert(isInt<16>(BranchImm));
  427. *TargetPtr &= 0xfff8001fU;
  428. // Immediate:15:2 goes in bits 18:5 of TBZ, TBNZ
  429. or32le(TargetPtr, (BranchImm & 0x0000FFFC) << 3);
  430. break;
  431. }
  432. case ELF::R_AARCH64_CALL26: // fallthrough
  433. case ELF::R_AARCH64_JUMP26: {
  434. // Operation: S+A-P. Set Call or B immediate value to bits fff_fffc of the
  435. // calculation.
  436. uint64_t BranchImm = Value + Addend - FinalAddress;
  437. // "Check that -2^27 <= result < 2^27".
  438. assert(isInt<28>(BranchImm));
  439. or32le(TargetPtr, (BranchImm & 0x0FFFFFFC) >> 2);
  440. break;
  441. }
  442. case ELF::R_AARCH64_MOVW_UABS_G3:
  443. or32le(TargetPtr, ((Value + Addend) & 0xFFFF000000000000) >> 43);
  444. break;
  445. case ELF::R_AARCH64_MOVW_UABS_G2_NC:
  446. or32le(TargetPtr, ((Value + Addend) & 0xFFFF00000000) >> 27);
  447. break;
  448. case ELF::R_AARCH64_MOVW_UABS_G1_NC:
  449. or32le(TargetPtr, ((Value + Addend) & 0xFFFF0000) >> 11);
  450. break;
  451. case ELF::R_AARCH64_MOVW_UABS_G0_NC:
  452. or32le(TargetPtr, ((Value + Addend) & 0xFFFF) << 5);
  453. break;
  454. case ELF::R_AARCH64_ADR_PREL_PG_HI21: {
  455. // Operation: Page(S+A) - Page(P)
  456. uint64_t Result =
  457. ((Value + Addend) & ~0xfffULL) - (FinalAddress & ~0xfffULL);
  458. // Check that -2^32 <= X < 2^32
  459. assert(isInt<33>(Result) && "overflow check failed for relocation");
  460. // Immediate goes in bits 30:29 + 5:23 of ADRP instruction, taken
  461. // from bits 32:12 of X.
  462. write32AArch64Addr(TargetPtr, Result >> 12);
  463. break;
  464. }
  465. case ELF::R_AARCH64_ADD_ABS_LO12_NC:
  466. // Operation: S + A
  467. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  468. // from bits 11:0 of X
  469. or32AArch64Imm(TargetPtr, Value + Addend);
  470. break;
  471. case ELF::R_AARCH64_LDST8_ABS_LO12_NC:
  472. // Operation: S + A
  473. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  474. // from bits 11:0 of X
  475. or32AArch64Imm(TargetPtr, getBits(Value + Addend, 0, 11));
  476. break;
  477. case ELF::R_AARCH64_LDST16_ABS_LO12_NC:
  478. // Operation: S + A
  479. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  480. // from bits 11:1 of X
  481. or32AArch64Imm(TargetPtr, getBits(Value + Addend, 1, 11));
  482. break;
  483. case ELF::R_AARCH64_LDST32_ABS_LO12_NC:
  484. // Operation: S + A
  485. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  486. // from bits 11:2 of X
  487. or32AArch64Imm(TargetPtr, getBits(Value + Addend, 2, 11));
  488. break;
  489. case ELF::R_AARCH64_LDST64_ABS_LO12_NC:
  490. // Operation: S + A
  491. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  492. // from bits 11:3 of X
  493. or32AArch64Imm(TargetPtr, getBits(Value + Addend, 3, 11));
  494. break;
  495. case ELF::R_AARCH64_LDST128_ABS_LO12_NC:
  496. // Operation: S + A
  497. // Immediate goes in bits 21:10 of LD/ST instruction, taken
  498. // from bits 11:4 of X
  499. or32AArch64Imm(TargetPtr, getBits(Value + Addend, 4, 11));
  500. break;
  501. case ELF::R_AARCH64_LD_PREL_LO19: {
  502. // Operation: S + A - P
  503. uint64_t Result = Value + Addend - FinalAddress;
  504. // "Check that -2^20 <= result < 2^20".
  505. assert(isInt<21>(Result));
  506. *TargetPtr &= 0xff00001fU;
  507. // Immediate goes in bits 23:5 of LD imm instruction, taken
  508. // from bits 20:2 of X
  509. *TargetPtr |= ((Result & 0xffc) << (5 - 2));
  510. break;
  511. }
  512. case ELF::R_AARCH64_ADR_PREL_LO21: {
  513. // Operation: S + A - P
  514. uint64_t Result = Value + Addend - FinalAddress;
  515. // "Check that -2^20 <= result < 2^20".
  516. assert(isInt<21>(Result));
  517. *TargetPtr &= 0x9f00001fU;
  518. // Immediate goes in bits 23:5, 30:29 of ADR imm instruction, taken
  519. // from bits 20:0 of X
  520. *TargetPtr |= ((Result & 0xffc) << (5 - 2));
  521. *TargetPtr |= (Result & 0x3) << 29;
  522. break;
  523. }
  524. }
  525. }
  526. void RuntimeDyldELF::resolveARMRelocation(const SectionEntry &Section,
  527. uint64_t Offset, uint32_t Value,
  528. uint32_t Type, int32_t Addend) {
  529. // TODO: Add Thumb relocations.
  530. uint32_t *TargetPtr =
  531. reinterpret_cast<uint32_t *>(Section.getAddressWithOffset(Offset));
  532. uint32_t FinalAddress = Section.getLoadAddressWithOffset(Offset) & 0xFFFFFFFF;
  533. Value += Addend;
  534. LLVM_DEBUG(dbgs() << "resolveARMRelocation, LocalAddress: "
  535. << Section.getAddressWithOffset(Offset)
  536. << " FinalAddress: " << format("%p", FinalAddress)
  537. << " Value: " << format("%x", Value)
  538. << " Type: " << format("%x", Type)
  539. << " Addend: " << format("%x", Addend) << "\n");
  540. switch (Type) {
  541. default:
  542. llvm_unreachable("Not implemented relocation type!");
  543. case ELF::R_ARM_NONE:
  544. break;
  545. // Write a 31bit signed offset
  546. case ELF::R_ARM_PREL31:
  547. support::ulittle32_t::ref{TargetPtr} =
  548. (support::ulittle32_t::ref{TargetPtr} & 0x80000000) |
  549. ((Value - FinalAddress) & ~0x80000000);
  550. break;
  551. case ELF::R_ARM_TARGET1:
  552. case ELF::R_ARM_ABS32:
  553. support::ulittle32_t::ref{TargetPtr} = Value;
  554. break;
  555. // Write first 16 bit of 32 bit value to the mov instruction.
  556. // Last 4 bit should be shifted.
  557. case ELF::R_ARM_MOVW_ABS_NC:
  558. case ELF::R_ARM_MOVT_ABS:
  559. if (Type == ELF::R_ARM_MOVW_ABS_NC)
  560. Value = Value & 0xFFFF;
  561. else if (Type == ELF::R_ARM_MOVT_ABS)
  562. Value = (Value >> 16) & 0xFFFF;
  563. support::ulittle32_t::ref{TargetPtr} =
  564. (support::ulittle32_t::ref{TargetPtr} & ~0x000F0FFF) | (Value & 0xFFF) |
  565. (((Value >> 12) & 0xF) << 16);
  566. break;
  567. // Write 24 bit relative value to the branch instruction.
  568. case ELF::R_ARM_PC24: // Fall through.
  569. case ELF::R_ARM_CALL: // Fall through.
  570. case ELF::R_ARM_JUMP24:
  571. int32_t RelValue = static_cast<int32_t>(Value - FinalAddress - 8);
  572. RelValue = (RelValue & 0x03FFFFFC) >> 2;
  573. assert((support::ulittle32_t::ref{TargetPtr} & 0xFFFFFF) == 0xFFFFFE);
  574. support::ulittle32_t::ref{TargetPtr} =
  575. (support::ulittle32_t::ref{TargetPtr} & 0xFF000000) | RelValue;
  576. break;
  577. }
  578. }
  579. void RuntimeDyldELF::setMipsABI(const ObjectFile &Obj) {
  580. if (Arch == Triple::UnknownArch ||
  581. !StringRef(Triple::getArchTypePrefix(Arch)).equals("mips")) {
  582. IsMipsO32ABI = false;
  583. IsMipsN32ABI = false;
  584. IsMipsN64ABI = false;
  585. return;
  586. }
  587. if (auto *E = dyn_cast<ELFObjectFileBase>(&Obj)) {
  588. unsigned AbiVariant = E->getPlatformFlags();
  589. IsMipsO32ABI = AbiVariant & ELF::EF_MIPS_ABI_O32;
  590. IsMipsN32ABI = AbiVariant & ELF::EF_MIPS_ABI2;
  591. }
  592. IsMipsN64ABI = Obj.getFileFormatName().equals("elf64-mips");
  593. }
  594. // Return the .TOC. section and offset.
  595. Error RuntimeDyldELF::findPPC64TOCSection(const ELFObjectFileBase &Obj,
  596. ObjSectionToIDMap &LocalSections,
  597. RelocationValueRef &Rel) {
  598. // Set a default SectionID in case we do not find a TOC section below.
  599. // This may happen for references to TOC base base (sym@toc, .odp
  600. // relocation) without a .toc directive. In this case just use the
  601. // first section (which is usually the .odp) since the code won't
  602. // reference the .toc base directly.
  603. Rel.SymbolName = nullptr;
  604. Rel.SectionID = 0;
  605. // The TOC consists of sections .got, .toc, .tocbss, .plt in that
  606. // order. The TOC starts where the first of these sections starts.
  607. for (auto &Section : Obj.sections()) {
  608. Expected<StringRef> NameOrErr = Section.getName();
  609. if (!NameOrErr)
  610. return NameOrErr.takeError();
  611. StringRef SectionName = *NameOrErr;
  612. if (SectionName == ".got"
  613. || SectionName == ".toc"
  614. || SectionName == ".tocbss"
  615. || SectionName == ".plt") {
  616. if (auto SectionIDOrErr =
  617. findOrEmitSection(Obj, Section, false, LocalSections))
  618. Rel.SectionID = *SectionIDOrErr;
  619. else
  620. return SectionIDOrErr.takeError();
  621. break;
  622. }
  623. }
  624. // Per the ppc64-elf-linux ABI, The TOC base is TOC value plus 0x8000
  625. // thus permitting a full 64 Kbytes segment.
  626. Rel.Addend = 0x8000;
  627. return Error::success();
  628. }
  629. // Returns the sections and offset associated with the ODP entry referenced
  630. // by Symbol.
  631. Error RuntimeDyldELF::findOPDEntrySection(const ELFObjectFileBase &Obj,
  632. ObjSectionToIDMap &LocalSections,
  633. RelocationValueRef &Rel) {
  634. // Get the ELF symbol value (st_value) to compare with Relocation offset in
  635. // .opd entries
  636. for (section_iterator si = Obj.section_begin(), se = Obj.section_end();
  637. si != se; ++si) {
  638. Expected<section_iterator> RelSecOrErr = si->getRelocatedSection();
  639. if (!RelSecOrErr)
  640. report_fatal_error(Twine(toString(RelSecOrErr.takeError())));
  641. section_iterator RelSecI = *RelSecOrErr;
  642. if (RelSecI == Obj.section_end())
  643. continue;
  644. Expected<StringRef> NameOrErr = RelSecI->getName();
  645. if (!NameOrErr)
  646. return NameOrErr.takeError();
  647. StringRef RelSectionName = *NameOrErr;
  648. if (RelSectionName != ".opd")
  649. continue;
  650. for (elf_relocation_iterator i = si->relocation_begin(),
  651. e = si->relocation_end();
  652. i != e;) {
  653. // The R_PPC64_ADDR64 relocation indicates the first field
  654. // of a .opd entry
  655. uint64_t TypeFunc = i->getType();
  656. if (TypeFunc != ELF::R_PPC64_ADDR64) {
  657. ++i;
  658. continue;
  659. }
  660. uint64_t TargetSymbolOffset = i->getOffset();
  661. symbol_iterator TargetSymbol = i->getSymbol();
  662. int64_t Addend;
  663. if (auto AddendOrErr = i->getAddend())
  664. Addend = *AddendOrErr;
  665. else
  666. return AddendOrErr.takeError();
  667. ++i;
  668. if (i == e)
  669. break;
  670. // Just check if following relocation is a R_PPC64_TOC
  671. uint64_t TypeTOC = i->getType();
  672. if (TypeTOC != ELF::R_PPC64_TOC)
  673. continue;
  674. // Finally compares the Symbol value and the target symbol offset
  675. // to check if this .opd entry refers to the symbol the relocation
  676. // points to.
  677. if (Rel.Addend != (int64_t)TargetSymbolOffset)
  678. continue;
  679. section_iterator TSI = Obj.section_end();
  680. if (auto TSIOrErr = TargetSymbol->getSection())
  681. TSI = *TSIOrErr;
  682. else
  683. return TSIOrErr.takeError();
  684. assert(TSI != Obj.section_end() && "TSI should refer to a valid section");
  685. bool IsCode = TSI->isText();
  686. if (auto SectionIDOrErr = findOrEmitSection(Obj, *TSI, IsCode,
  687. LocalSections))
  688. Rel.SectionID = *SectionIDOrErr;
  689. else
  690. return SectionIDOrErr.takeError();
  691. Rel.Addend = (intptr_t)Addend;
  692. return Error::success();
  693. }
  694. }
  695. llvm_unreachable("Attempting to get address of ODP entry!");
  696. }
  697. // Relocation masks following the #lo(value), #hi(value), #ha(value),
  698. // #higher(value), #highera(value), #highest(value), and #highesta(value)
  699. // macros defined in section 4.5.1. Relocation Types of the PPC-elf64abi
  700. // document.
  701. static inline uint16_t applyPPClo(uint64_t value) { return value & 0xffff; }
  702. static inline uint16_t applyPPChi(uint64_t value) {
  703. return (value >> 16) & 0xffff;
  704. }
  705. static inline uint16_t applyPPCha (uint64_t value) {
  706. return ((value + 0x8000) >> 16) & 0xffff;
  707. }
  708. static inline uint16_t applyPPChigher(uint64_t value) {
  709. return (value >> 32) & 0xffff;
  710. }
  711. static inline uint16_t applyPPChighera (uint64_t value) {
  712. return ((value + 0x8000) >> 32) & 0xffff;
  713. }
  714. static inline uint16_t applyPPChighest(uint64_t value) {
  715. return (value >> 48) & 0xffff;
  716. }
  717. static inline uint16_t applyPPChighesta (uint64_t value) {
  718. return ((value + 0x8000) >> 48) & 0xffff;
  719. }
  720. void RuntimeDyldELF::resolvePPC32Relocation(const SectionEntry &Section,
  721. uint64_t Offset, uint64_t Value,
  722. uint32_t Type, int64_t Addend) {
  723. uint8_t *LocalAddress = Section.getAddressWithOffset(Offset);
  724. switch (Type) {
  725. default:
  726. report_fatal_error("Relocation type not implemented yet!");
  727. break;
  728. case ELF::R_PPC_ADDR16_LO:
  729. writeInt16BE(LocalAddress, applyPPClo(Value + Addend));
  730. break;
  731. case ELF::R_PPC_ADDR16_HI:
  732. writeInt16BE(LocalAddress, applyPPChi(Value + Addend));
  733. break;
  734. case ELF::R_PPC_ADDR16_HA:
  735. writeInt16BE(LocalAddress, applyPPCha(Value + Addend));
  736. break;
  737. }
  738. }
  739. void RuntimeDyldELF::resolvePPC64Relocation(const SectionEntry &Section,
  740. uint64_t Offset, uint64_t Value,
  741. uint32_t Type, int64_t Addend) {
  742. uint8_t *LocalAddress = Section.getAddressWithOffset(Offset);
  743. switch (Type) {
  744. default:
  745. report_fatal_error("Relocation type not implemented yet!");
  746. break;
  747. case ELF::R_PPC64_ADDR16:
  748. writeInt16BE(LocalAddress, applyPPClo(Value + Addend));
  749. break;
  750. case ELF::R_PPC64_ADDR16_DS:
  751. writeInt16BE(LocalAddress, applyPPClo(Value + Addend) & ~3);
  752. break;
  753. case ELF::R_PPC64_ADDR16_LO:
  754. writeInt16BE(LocalAddress, applyPPClo(Value + Addend));
  755. break;
  756. case ELF::R_PPC64_ADDR16_LO_DS:
  757. writeInt16BE(LocalAddress, applyPPClo(Value + Addend) & ~3);
  758. break;
  759. case ELF::R_PPC64_ADDR16_HI:
  760. case ELF::R_PPC64_ADDR16_HIGH:
  761. writeInt16BE(LocalAddress, applyPPChi(Value + Addend));
  762. break;
  763. case ELF::R_PPC64_ADDR16_HA:
  764. case ELF::R_PPC64_ADDR16_HIGHA:
  765. writeInt16BE(LocalAddress, applyPPCha(Value + Addend));
  766. break;
  767. case ELF::R_PPC64_ADDR16_HIGHER:
  768. writeInt16BE(LocalAddress, applyPPChigher(Value + Addend));
  769. break;
  770. case ELF::R_PPC64_ADDR16_HIGHERA:
  771. writeInt16BE(LocalAddress, applyPPChighera(Value + Addend));
  772. break;
  773. case ELF::R_PPC64_ADDR16_HIGHEST:
  774. writeInt16BE(LocalAddress, applyPPChighest(Value + Addend));
  775. break;
  776. case ELF::R_PPC64_ADDR16_HIGHESTA:
  777. writeInt16BE(LocalAddress, applyPPChighesta(Value + Addend));
  778. break;
  779. case ELF::R_PPC64_ADDR14: {
  780. assert(((Value + Addend) & 3) == 0);
  781. // Preserve the AA/LK bits in the branch instruction
  782. uint8_t aalk = *(LocalAddress + 3);
  783. writeInt16BE(LocalAddress + 2, (aalk & 3) | ((Value + Addend) & 0xfffc));
  784. } break;
  785. case ELF::R_PPC64_REL16_LO: {
  786. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  787. uint64_t Delta = Value - FinalAddress + Addend;
  788. writeInt16BE(LocalAddress, applyPPClo(Delta));
  789. } break;
  790. case ELF::R_PPC64_REL16_HI: {
  791. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  792. uint64_t Delta = Value - FinalAddress + Addend;
  793. writeInt16BE(LocalAddress, applyPPChi(Delta));
  794. } break;
  795. case ELF::R_PPC64_REL16_HA: {
  796. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  797. uint64_t Delta = Value - FinalAddress + Addend;
  798. writeInt16BE(LocalAddress, applyPPCha(Delta));
  799. } break;
  800. case ELF::R_PPC64_ADDR32: {
  801. int64_t Result = static_cast<int64_t>(Value + Addend);
  802. if (SignExtend64<32>(Result) != Result)
  803. llvm_unreachable("Relocation R_PPC64_ADDR32 overflow");
  804. writeInt32BE(LocalAddress, Result);
  805. } break;
  806. case ELF::R_PPC64_REL24: {
  807. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  808. int64_t delta = static_cast<int64_t>(Value - FinalAddress + Addend);
  809. if (SignExtend64<26>(delta) != delta)
  810. llvm_unreachable("Relocation R_PPC64_REL24 overflow");
  811. // We preserve bits other than LI field, i.e. PO and AA/LK fields.
  812. uint32_t Inst = readBytesUnaligned(LocalAddress, 4);
  813. writeInt32BE(LocalAddress, (Inst & 0xFC000003) | (delta & 0x03FFFFFC));
  814. } break;
  815. case ELF::R_PPC64_REL32: {
  816. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  817. int64_t delta = static_cast<int64_t>(Value - FinalAddress + Addend);
  818. if (SignExtend64<32>(delta) != delta)
  819. llvm_unreachable("Relocation R_PPC64_REL32 overflow");
  820. writeInt32BE(LocalAddress, delta);
  821. } break;
  822. case ELF::R_PPC64_REL64: {
  823. uint64_t FinalAddress = Section.getLoadAddressWithOffset(Offset);
  824. uint64_t Delta = Value - FinalAddress + Addend;
  825. writeInt64BE(LocalAddress, Delta);
  826. } break;
  827. case ELF::R_PPC64_ADDR64:
  828. writeInt64BE(LocalAddress, Value + Addend);
  829. break;
  830. }
  831. }
  832. void RuntimeDyldELF::resolveSystemZRelocation(const SectionEntry &Section,
  833. uint64_t Offset, uint64_t Value,
  834. uint32_t Type, int64_t Addend) {
  835. uint8_t *LocalAddress = Section.getAddressWithOffset(Offset);
  836. switch (Type) {
  837. default:
  838. report_fatal_error("Relocation type not implemented yet!");
  839. break;
  840. case ELF::R_390_PC16DBL:
  841. case ELF::R_390_PLT16DBL: {
  842. int64_t Delta = (Value + Addend) - Section.getLoadAddressWithOffset(Offset);
  843. assert(int16_t(Delta / 2) * 2 == Delta && "R_390_PC16DBL overflow");
  844. writeInt16BE(LocalAddress, Delta / 2);
  845. break;
  846. }
  847. case ELF::R_390_PC32DBL:
  848. case ELF::R_390_PLT32DBL: {
  849. int64_t Delta = (Value + Addend) - Section.getLoadAddressWithOffset(Offset);
  850. assert(int32_t(Delta / 2) * 2 == Delta && "R_390_PC32DBL overflow");
  851. writeInt32BE(LocalAddress, Delta / 2);
  852. break;
  853. }
  854. case ELF::R_390_PC16: {
  855. int64_t Delta = (Value + Addend) - Section.getLoadAddressWithOffset(Offset);
  856. assert(int16_t(Delta) == Delta && "R_390_PC16 overflow");
  857. writeInt16BE(LocalAddress, Delta);
  858. break;
  859. }
  860. case ELF::R_390_PC32: {
  861. int64_t Delta = (Value + Addend) - Section.getLoadAddressWithOffset(Offset);
  862. assert(int32_t(Delta) == Delta && "R_390_PC32 overflow");
  863. writeInt32BE(LocalAddress, Delta);
  864. break;
  865. }
  866. case ELF::R_390_PC64: {
  867. int64_t Delta = (Value + Addend) - Section.getLoadAddressWithOffset(Offset);
  868. writeInt64BE(LocalAddress, Delta);
  869. break;
  870. }
  871. case ELF::R_390_8:
  872. *LocalAddress = (uint8_t)(Value + Addend);
  873. break;
  874. case ELF::R_390_16:
  875. writeInt16BE(LocalAddress, Value + Addend);
  876. break;
  877. case ELF::R_390_32:
  878. writeInt32BE(LocalAddress, Value + Addend);
  879. break;
  880. case ELF::R_390_64:
  881. writeInt64BE(LocalAddress, Value + Addend);
  882. break;
  883. }
  884. }
  885. void RuntimeDyldELF::resolveBPFRelocation(const SectionEntry &Section,
  886. uint64_t Offset, uint64_t Value,
  887. uint32_t Type, int64_t Addend) {
  888. bool isBE = Arch == Triple::bpfeb;
  889. switch (Type) {
  890. default:
  891. report_fatal_error("Relocation type not implemented yet!");
  892. break;
  893. case ELF::R_BPF_NONE:
  894. case ELF::R_BPF_64_64:
  895. case ELF::R_BPF_64_32:
  896. case ELF::R_BPF_64_NODYLD32:
  897. break;
  898. case ELF::R_BPF_64_ABS64: {
  899. write(isBE, Section.getAddressWithOffset(Offset), Value + Addend);
  900. LLVM_DEBUG(dbgs() << "Writing " << format("%p", (Value + Addend)) << " at "
  901. << format("%p\n", Section.getAddressWithOffset(Offset)));
  902. break;
  903. }
  904. case ELF::R_BPF_64_ABS32: {
  905. Value += Addend;
  906. assert(Value <= UINT32_MAX);
  907. write(isBE, Section.getAddressWithOffset(Offset), static_cast<uint32_t>(Value));
  908. LLVM_DEBUG(dbgs() << "Writing " << format("%p", Value) << " at "
  909. << format("%p\n", Section.getAddressWithOffset(Offset)));
  910. break;
  911. }
  912. }
  913. }
  914. // The target location for the relocation is described by RE.SectionID and
  915. // RE.Offset. RE.SectionID can be used to find the SectionEntry. Each
  916. // SectionEntry has three members describing its location.
  917. // SectionEntry::Address is the address at which the section has been loaded
  918. // into memory in the current (host) process. SectionEntry::LoadAddress is the
  919. // address that the section will have in the target process.
  920. // SectionEntry::ObjAddress is the address of the bits for this section in the
  921. // original emitted object image (also in the current address space).
  922. //
  923. // Relocations will be applied as if the section were loaded at
  924. // SectionEntry::LoadAddress, but they will be applied at an address based
  925. // on SectionEntry::Address. SectionEntry::ObjAddress will be used to refer to
  926. // Target memory contents if they are required for value calculations.
  927. //
  928. // The Value parameter here is the load address of the symbol for the
  929. // relocation to be applied. For relocations which refer to symbols in the
  930. // current object Value will be the LoadAddress of the section in which
  931. // the symbol resides (RE.Addend provides additional information about the
  932. // symbol location). For external symbols, Value will be the address of the
  933. // symbol in the target address space.
  934. void RuntimeDyldELF::resolveRelocation(const RelocationEntry &RE,
  935. uint64_t Value) {
  936. const SectionEntry &Section = Sections[RE.SectionID];
  937. return resolveRelocation(Section, RE.Offset, Value, RE.RelType, RE.Addend,
  938. RE.SymOffset, RE.SectionID);
  939. }
  940. void RuntimeDyldELF::resolveRelocation(const SectionEntry &Section,
  941. uint64_t Offset, uint64_t Value,
  942. uint32_t Type, int64_t Addend,
  943. uint64_t SymOffset, SID SectionID) {
  944. switch (Arch) {
  945. case Triple::x86_64:
  946. resolveX86_64Relocation(Section, Offset, Value, Type, Addend, SymOffset);
  947. break;
  948. case Triple::x86:
  949. resolveX86Relocation(Section, Offset, (uint32_t)(Value & 0xffffffffL), Type,
  950. (uint32_t)(Addend & 0xffffffffL));
  951. break;
  952. case Triple::aarch64:
  953. case Triple::aarch64_be:
  954. resolveAArch64Relocation(Section, Offset, Value, Type, Addend);
  955. break;
  956. case Triple::arm: // Fall through.
  957. case Triple::armeb:
  958. case Triple::thumb:
  959. case Triple::thumbeb:
  960. resolveARMRelocation(Section, Offset, (uint32_t)(Value & 0xffffffffL), Type,
  961. (uint32_t)(Addend & 0xffffffffL));
  962. break;
  963. case Triple::ppc: // Fall through.
  964. case Triple::ppcle:
  965. resolvePPC32Relocation(Section, Offset, Value, Type, Addend);
  966. break;
  967. case Triple::ppc64: // Fall through.
  968. case Triple::ppc64le:
  969. resolvePPC64Relocation(Section, Offset, Value, Type, Addend);
  970. break;
  971. case Triple::systemz:
  972. resolveSystemZRelocation(Section, Offset, Value, Type, Addend);
  973. break;
  974. case Triple::bpfel:
  975. case Triple::bpfeb:
  976. resolveBPFRelocation(Section, Offset, Value, Type, Addend);
  977. break;
  978. default:
  979. llvm_unreachable("Unsupported CPU type!");
  980. }
  981. }
  982. void *RuntimeDyldELF::computePlaceholderAddress(unsigned SectionID, uint64_t Offset) const {
  983. return (void *)(Sections[SectionID].getObjAddress() + Offset);
  984. }
  985. void RuntimeDyldELF::processSimpleRelocation(unsigned SectionID, uint64_t Offset, unsigned RelType, RelocationValueRef Value) {
  986. RelocationEntry RE(SectionID, Offset, RelType, Value.Addend, Value.Offset);
  987. if (Value.SymbolName)
  988. addRelocationForSymbol(RE, Value.SymbolName);
  989. else
  990. addRelocationForSection(RE, Value.SectionID);
  991. }
  992. uint32_t RuntimeDyldELF::getMatchingLoRelocation(uint32_t RelType,
  993. bool IsLocal) const {
  994. switch (RelType) {
  995. case ELF::R_MICROMIPS_GOT16:
  996. if (IsLocal)
  997. return ELF::R_MICROMIPS_LO16;
  998. break;
  999. case ELF::R_MICROMIPS_HI16:
  1000. return ELF::R_MICROMIPS_LO16;
  1001. case ELF::R_MIPS_GOT16:
  1002. if (IsLocal)
  1003. return ELF::R_MIPS_LO16;
  1004. break;
  1005. case ELF::R_MIPS_HI16:
  1006. return ELF::R_MIPS_LO16;
  1007. case ELF::R_MIPS_PCHI16:
  1008. return ELF::R_MIPS_PCLO16;
  1009. default:
  1010. break;
  1011. }
  1012. return ELF::R_MIPS_NONE;
  1013. }
  1014. // Sometimes we don't need to create thunk for a branch.
  1015. // This typically happens when branch target is located
  1016. // in the same object file. In such case target is either
  1017. // a weak symbol or symbol in a different executable section.
  1018. // This function checks if branch target is located in the
  1019. // same object file and if distance between source and target
  1020. // fits R_AARCH64_CALL26 relocation. If both conditions are
  1021. // met, it emits direct jump to the target and returns true.
  1022. // Otherwise false is returned and thunk is created.
  1023. bool RuntimeDyldELF::resolveAArch64ShortBranch(
  1024. unsigned SectionID, relocation_iterator RelI,
  1025. const RelocationValueRef &Value) {
  1026. uint64_t Address;
  1027. if (Value.SymbolName) {
  1028. auto Loc = GlobalSymbolTable.find(Value.SymbolName);
  1029. // Don't create direct branch for external symbols.
  1030. if (Loc == GlobalSymbolTable.end())
  1031. return false;
  1032. const auto &SymInfo = Loc->second;
  1033. Address =
  1034. uint64_t(Sections[SymInfo.getSectionID()].getLoadAddressWithOffset(
  1035. SymInfo.getOffset()));
  1036. } else {
  1037. Address = uint64_t(Sections[Value.SectionID].getLoadAddress());
  1038. }
  1039. uint64_t Offset = RelI->getOffset();
  1040. uint64_t SourceAddress = Sections[SectionID].getLoadAddressWithOffset(Offset);
  1041. // R_AARCH64_CALL26 requires immediate to be in range -2^27 <= imm < 2^27
  1042. // If distance between source and target is out of range then we should
  1043. // create thunk.
  1044. if (!isInt<28>(Address + Value.Addend - SourceAddress))
  1045. return false;
  1046. resolveRelocation(Sections[SectionID], Offset, Address, RelI->getType(),
  1047. Value.Addend);
  1048. return true;
  1049. }
  1050. void RuntimeDyldELF::resolveAArch64Branch(unsigned SectionID,
  1051. const RelocationValueRef &Value,
  1052. relocation_iterator RelI,
  1053. StubMap &Stubs) {
  1054. LLVM_DEBUG(dbgs() << "\t\tThis is an AArch64 branch relocation.");
  1055. SectionEntry &Section = Sections[SectionID];
  1056. uint64_t Offset = RelI->getOffset();
  1057. unsigned RelType = RelI->getType();
  1058. // Look for an existing stub.
  1059. StubMap::const_iterator i = Stubs.find(Value);
  1060. if (i != Stubs.end()) {
  1061. resolveRelocation(Section, Offset,
  1062. (uint64_t)Section.getAddressWithOffset(i->second),
  1063. RelType, 0);
  1064. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1065. } else if (!resolveAArch64ShortBranch(SectionID, RelI, Value)) {
  1066. // Create a new stub function.
  1067. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1068. Stubs[Value] = Section.getStubOffset();
  1069. uint8_t *StubTargetAddr = createStubFunction(
  1070. Section.getAddressWithOffset(Section.getStubOffset()));
  1071. RelocationEntry REmovz_g3(SectionID, StubTargetAddr - Section.getAddress(),
  1072. ELF::R_AARCH64_MOVW_UABS_G3, Value.Addend);
  1073. RelocationEntry REmovk_g2(SectionID,
  1074. StubTargetAddr - Section.getAddress() + 4,
  1075. ELF::R_AARCH64_MOVW_UABS_G2_NC, Value.Addend);
  1076. RelocationEntry REmovk_g1(SectionID,
  1077. StubTargetAddr - Section.getAddress() + 8,
  1078. ELF::R_AARCH64_MOVW_UABS_G1_NC, Value.Addend);
  1079. RelocationEntry REmovk_g0(SectionID,
  1080. StubTargetAddr - Section.getAddress() + 12,
  1081. ELF::R_AARCH64_MOVW_UABS_G0_NC, Value.Addend);
  1082. if (Value.SymbolName) {
  1083. addRelocationForSymbol(REmovz_g3, Value.SymbolName);
  1084. addRelocationForSymbol(REmovk_g2, Value.SymbolName);
  1085. addRelocationForSymbol(REmovk_g1, Value.SymbolName);
  1086. addRelocationForSymbol(REmovk_g0, Value.SymbolName);
  1087. } else {
  1088. addRelocationForSection(REmovz_g3, Value.SectionID);
  1089. addRelocationForSection(REmovk_g2, Value.SectionID);
  1090. addRelocationForSection(REmovk_g1, Value.SectionID);
  1091. addRelocationForSection(REmovk_g0, Value.SectionID);
  1092. }
  1093. resolveRelocation(Section, Offset,
  1094. reinterpret_cast<uint64_t>(Section.getAddressWithOffset(
  1095. Section.getStubOffset())),
  1096. RelType, 0);
  1097. Section.advanceStubOffset(getMaxStubSize());
  1098. }
  1099. }
  1100. Expected<relocation_iterator>
  1101. RuntimeDyldELF::processRelocationRef(
  1102. unsigned SectionID, relocation_iterator RelI, const ObjectFile &O,
  1103. ObjSectionToIDMap &ObjSectionToID, StubMap &Stubs) {
  1104. const auto &Obj = cast<ELFObjectFileBase>(O);
  1105. uint64_t RelType = RelI->getType();
  1106. int64_t Addend = 0;
  1107. if (Expected<int64_t> AddendOrErr = ELFRelocationRef(*RelI).getAddend())
  1108. Addend = *AddendOrErr;
  1109. else
  1110. consumeError(AddendOrErr.takeError());
  1111. elf_symbol_iterator Symbol = RelI->getSymbol();
  1112. // Obtain the symbol name which is referenced in the relocation
  1113. StringRef TargetName;
  1114. if (Symbol != Obj.symbol_end()) {
  1115. if (auto TargetNameOrErr = Symbol->getName())
  1116. TargetName = *TargetNameOrErr;
  1117. else
  1118. return TargetNameOrErr.takeError();
  1119. }
  1120. LLVM_DEBUG(dbgs() << "\t\tRelType: " << RelType << " Addend: " << Addend
  1121. << " TargetName: " << TargetName << "\n");
  1122. RelocationValueRef Value;
  1123. // First search for the symbol in the local symbol table
  1124. SymbolRef::Type SymType = SymbolRef::ST_Unknown;
  1125. // Search for the symbol in the global symbol table
  1126. RTDyldSymbolTable::const_iterator gsi = GlobalSymbolTable.end();
  1127. if (Symbol != Obj.symbol_end()) {
  1128. gsi = GlobalSymbolTable.find(TargetName.data());
  1129. Expected<SymbolRef::Type> SymTypeOrErr = Symbol->getType();
  1130. if (!SymTypeOrErr) {
  1131. std::string Buf;
  1132. raw_string_ostream OS(Buf);
  1133. logAllUnhandledErrors(SymTypeOrErr.takeError(), OS);
  1134. report_fatal_error(Twine(OS.str()));
  1135. }
  1136. SymType = *SymTypeOrErr;
  1137. }
  1138. if (gsi != GlobalSymbolTable.end()) {
  1139. const auto &SymInfo = gsi->second;
  1140. Value.SectionID = SymInfo.getSectionID();
  1141. Value.Offset = SymInfo.getOffset();
  1142. Value.Addend = SymInfo.getOffset() + Addend;
  1143. } else {
  1144. switch (SymType) {
  1145. case SymbolRef::ST_Debug: {
  1146. // TODO: Now ELF SymbolRef::ST_Debug = STT_SECTION, it's not obviously
  1147. // and can be changed by another developers. Maybe best way is add
  1148. // a new symbol type ST_Section to SymbolRef and use it.
  1149. auto SectionOrErr = Symbol->getSection();
  1150. if (!SectionOrErr) {
  1151. std::string Buf;
  1152. raw_string_ostream OS(Buf);
  1153. logAllUnhandledErrors(SectionOrErr.takeError(), OS);
  1154. report_fatal_error(Twine(OS.str()));
  1155. }
  1156. section_iterator si = *SectionOrErr;
  1157. if (si == Obj.section_end())
  1158. llvm_unreachable("Symbol section not found, bad object file format!");
  1159. LLVM_DEBUG(dbgs() << "\t\tThis is section symbol\n");
  1160. bool isCode = si->isText();
  1161. if (auto SectionIDOrErr = findOrEmitSection(Obj, (*si), isCode,
  1162. ObjSectionToID))
  1163. Value.SectionID = *SectionIDOrErr;
  1164. else
  1165. return SectionIDOrErr.takeError();
  1166. Value.Addend = Addend;
  1167. break;
  1168. }
  1169. case SymbolRef::ST_Data:
  1170. case SymbolRef::ST_Function:
  1171. case SymbolRef::ST_Unknown: {
  1172. Value.SymbolName = TargetName.data();
  1173. Value.Addend = Addend;
  1174. // Absolute relocations will have a zero symbol ID (STN_UNDEF), which
  1175. // will manifest here as a NULL symbol name.
  1176. // We can set this as a valid (but empty) symbol name, and rely
  1177. // on addRelocationForSymbol to handle this.
  1178. if (!Value.SymbolName)
  1179. Value.SymbolName = "";
  1180. break;
  1181. }
  1182. default:
  1183. llvm_unreachable("Unresolved symbol type!");
  1184. break;
  1185. }
  1186. }
  1187. uint64_t Offset = RelI->getOffset();
  1188. LLVM_DEBUG(dbgs() << "\t\tSectionID: " << SectionID << " Offset: " << Offset
  1189. << "\n");
  1190. if ((Arch == Triple::aarch64 || Arch == Triple::aarch64_be)) {
  1191. if ((RelType == ELF::R_AARCH64_CALL26 ||
  1192. RelType == ELF::R_AARCH64_JUMP26) &&
  1193. MemMgr.allowStubAllocation()) {
  1194. resolveAArch64Branch(SectionID, Value, RelI, Stubs);
  1195. } else if (RelType == ELF::R_AARCH64_ADR_GOT_PAGE) {
  1196. // Create new GOT entry or find existing one. If GOT entry is
  1197. // to be created, then we also emit ABS64 relocation for it.
  1198. uint64_t GOTOffset = findOrAllocGOTEntry(Value, ELF::R_AARCH64_ABS64);
  1199. resolveGOTOffsetRelocation(SectionID, Offset, GOTOffset + Addend,
  1200. ELF::R_AARCH64_ADR_PREL_PG_HI21);
  1201. } else if (RelType == ELF::R_AARCH64_LD64_GOT_LO12_NC) {
  1202. uint64_t GOTOffset = findOrAllocGOTEntry(Value, ELF::R_AARCH64_ABS64);
  1203. resolveGOTOffsetRelocation(SectionID, Offset, GOTOffset + Addend,
  1204. ELF::R_AARCH64_LDST64_ABS_LO12_NC);
  1205. } else {
  1206. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1207. }
  1208. } else if (Arch == Triple::arm) {
  1209. if (RelType == ELF::R_ARM_PC24 || RelType == ELF::R_ARM_CALL ||
  1210. RelType == ELF::R_ARM_JUMP24) {
  1211. // This is an ARM branch relocation, need to use a stub function.
  1212. LLVM_DEBUG(dbgs() << "\t\tThis is an ARM branch relocation.\n");
  1213. SectionEntry &Section = Sections[SectionID];
  1214. // Look for an existing stub.
  1215. StubMap::const_iterator i = Stubs.find(Value);
  1216. if (i != Stubs.end()) {
  1217. resolveRelocation(
  1218. Section, Offset,
  1219. reinterpret_cast<uint64_t>(Section.getAddressWithOffset(i->second)),
  1220. RelType, 0);
  1221. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1222. } else {
  1223. // Create a new stub function.
  1224. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1225. Stubs[Value] = Section.getStubOffset();
  1226. uint8_t *StubTargetAddr = createStubFunction(
  1227. Section.getAddressWithOffset(Section.getStubOffset()));
  1228. RelocationEntry RE(SectionID, StubTargetAddr - Section.getAddress(),
  1229. ELF::R_ARM_ABS32, Value.Addend);
  1230. if (Value.SymbolName)
  1231. addRelocationForSymbol(RE, Value.SymbolName);
  1232. else
  1233. addRelocationForSection(RE, Value.SectionID);
  1234. resolveRelocation(Section, Offset, reinterpret_cast<uint64_t>(
  1235. Section.getAddressWithOffset(
  1236. Section.getStubOffset())),
  1237. RelType, 0);
  1238. Section.advanceStubOffset(getMaxStubSize());
  1239. }
  1240. } else {
  1241. uint32_t *Placeholder =
  1242. reinterpret_cast<uint32_t*>(computePlaceholderAddress(SectionID, Offset));
  1243. if (RelType == ELF::R_ARM_PREL31 || RelType == ELF::R_ARM_TARGET1 ||
  1244. RelType == ELF::R_ARM_ABS32) {
  1245. Value.Addend += *Placeholder;
  1246. } else if (RelType == ELF::R_ARM_MOVW_ABS_NC || RelType == ELF::R_ARM_MOVT_ABS) {
  1247. // See ELF for ARM documentation
  1248. Value.Addend += (int16_t)((*Placeholder & 0xFFF) | (((*Placeholder >> 16) & 0xF) << 12));
  1249. }
  1250. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1251. }
  1252. } else if (IsMipsO32ABI) {
  1253. uint8_t *Placeholder = reinterpret_cast<uint8_t *>(
  1254. computePlaceholderAddress(SectionID, Offset));
  1255. uint32_t Opcode = readBytesUnaligned(Placeholder, 4);
  1256. if (RelType == ELF::R_MIPS_26) {
  1257. // This is an Mips branch relocation, need to use a stub function.
  1258. LLVM_DEBUG(dbgs() << "\t\tThis is a Mips branch relocation.");
  1259. SectionEntry &Section = Sections[SectionID];
  1260. // Extract the addend from the instruction.
  1261. // We shift up by two since the Value will be down shifted again
  1262. // when applying the relocation.
  1263. uint32_t Addend = (Opcode & 0x03ffffff) << 2;
  1264. Value.Addend += Addend;
  1265. // Look up for existing stub.
  1266. StubMap::const_iterator i = Stubs.find(Value);
  1267. if (i != Stubs.end()) {
  1268. RelocationEntry RE(SectionID, Offset, RelType, i->second);
  1269. addRelocationForSection(RE, SectionID);
  1270. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1271. } else {
  1272. // Create a new stub function.
  1273. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1274. Stubs[Value] = Section.getStubOffset();
  1275. unsigned AbiVariant = Obj.getPlatformFlags();
  1276. uint8_t *StubTargetAddr = createStubFunction(
  1277. Section.getAddressWithOffset(Section.getStubOffset()), AbiVariant);
  1278. // Creating Hi and Lo relocations for the filled stub instructions.
  1279. RelocationEntry REHi(SectionID, StubTargetAddr - Section.getAddress(),
  1280. ELF::R_MIPS_HI16, Value.Addend);
  1281. RelocationEntry RELo(SectionID,
  1282. StubTargetAddr - Section.getAddress() + 4,
  1283. ELF::R_MIPS_LO16, Value.Addend);
  1284. if (Value.SymbolName) {
  1285. addRelocationForSymbol(REHi, Value.SymbolName);
  1286. addRelocationForSymbol(RELo, Value.SymbolName);
  1287. } else {
  1288. addRelocationForSection(REHi, Value.SectionID);
  1289. addRelocationForSection(RELo, Value.SectionID);
  1290. }
  1291. RelocationEntry RE(SectionID, Offset, RelType, Section.getStubOffset());
  1292. addRelocationForSection(RE, SectionID);
  1293. Section.advanceStubOffset(getMaxStubSize());
  1294. }
  1295. } else if (RelType == ELF::R_MIPS_HI16 || RelType == ELF::R_MIPS_PCHI16) {
  1296. int64_t Addend = (Opcode & 0x0000ffff) << 16;
  1297. RelocationEntry RE(SectionID, Offset, RelType, Addend);
  1298. PendingRelocs.push_back(std::make_pair(Value, RE));
  1299. } else if (RelType == ELF::R_MIPS_LO16 || RelType == ELF::R_MIPS_PCLO16) {
  1300. int64_t Addend = Value.Addend + SignExtend32<16>(Opcode & 0x0000ffff);
  1301. for (auto I = PendingRelocs.begin(); I != PendingRelocs.end();) {
  1302. const RelocationValueRef &MatchingValue = I->first;
  1303. RelocationEntry &Reloc = I->second;
  1304. if (MatchingValue == Value &&
  1305. RelType == getMatchingLoRelocation(Reloc.RelType) &&
  1306. SectionID == Reloc.SectionID) {
  1307. Reloc.Addend += Addend;
  1308. if (Value.SymbolName)
  1309. addRelocationForSymbol(Reloc, Value.SymbolName);
  1310. else
  1311. addRelocationForSection(Reloc, Value.SectionID);
  1312. I = PendingRelocs.erase(I);
  1313. } else
  1314. ++I;
  1315. }
  1316. RelocationEntry RE(SectionID, Offset, RelType, Addend);
  1317. if (Value.SymbolName)
  1318. addRelocationForSymbol(RE, Value.SymbolName);
  1319. else
  1320. addRelocationForSection(RE, Value.SectionID);
  1321. } else {
  1322. if (RelType == ELF::R_MIPS_32)
  1323. Value.Addend += Opcode;
  1324. else if (RelType == ELF::R_MIPS_PC16)
  1325. Value.Addend += SignExtend32<18>((Opcode & 0x0000ffff) << 2);
  1326. else if (RelType == ELF::R_MIPS_PC19_S2)
  1327. Value.Addend += SignExtend32<21>((Opcode & 0x0007ffff) << 2);
  1328. else if (RelType == ELF::R_MIPS_PC21_S2)
  1329. Value.Addend += SignExtend32<23>((Opcode & 0x001fffff) << 2);
  1330. else if (RelType == ELF::R_MIPS_PC26_S2)
  1331. Value.Addend += SignExtend32<28>((Opcode & 0x03ffffff) << 2);
  1332. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1333. }
  1334. } else if (IsMipsN32ABI || IsMipsN64ABI) {
  1335. uint32_t r_type = RelType & 0xff;
  1336. RelocationEntry RE(SectionID, Offset, RelType, Value.Addend);
  1337. if (r_type == ELF::R_MIPS_CALL16 || r_type == ELF::R_MIPS_GOT_PAGE
  1338. || r_type == ELF::R_MIPS_GOT_DISP) {
  1339. StringMap<uint64_t>::iterator i = GOTSymbolOffsets.find(TargetName);
  1340. if (i != GOTSymbolOffsets.end())
  1341. RE.SymOffset = i->second;
  1342. else {
  1343. RE.SymOffset = allocateGOTEntries(1);
  1344. GOTSymbolOffsets[TargetName] = RE.SymOffset;
  1345. }
  1346. if (Value.SymbolName)
  1347. addRelocationForSymbol(RE, Value.SymbolName);
  1348. else
  1349. addRelocationForSection(RE, Value.SectionID);
  1350. } else if (RelType == ELF::R_MIPS_26) {
  1351. // This is an Mips branch relocation, need to use a stub function.
  1352. LLVM_DEBUG(dbgs() << "\t\tThis is a Mips branch relocation.");
  1353. SectionEntry &Section = Sections[SectionID];
  1354. // Look up for existing stub.
  1355. StubMap::const_iterator i = Stubs.find(Value);
  1356. if (i != Stubs.end()) {
  1357. RelocationEntry RE(SectionID, Offset, RelType, i->second);
  1358. addRelocationForSection(RE, SectionID);
  1359. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1360. } else {
  1361. // Create a new stub function.
  1362. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1363. Stubs[Value] = Section.getStubOffset();
  1364. unsigned AbiVariant = Obj.getPlatformFlags();
  1365. uint8_t *StubTargetAddr = createStubFunction(
  1366. Section.getAddressWithOffset(Section.getStubOffset()), AbiVariant);
  1367. if (IsMipsN32ABI) {
  1368. // Creating Hi and Lo relocations for the filled stub instructions.
  1369. RelocationEntry REHi(SectionID, StubTargetAddr - Section.getAddress(),
  1370. ELF::R_MIPS_HI16, Value.Addend);
  1371. RelocationEntry RELo(SectionID,
  1372. StubTargetAddr - Section.getAddress() + 4,
  1373. ELF::R_MIPS_LO16, Value.Addend);
  1374. if (Value.SymbolName) {
  1375. addRelocationForSymbol(REHi, Value.SymbolName);
  1376. addRelocationForSymbol(RELo, Value.SymbolName);
  1377. } else {
  1378. addRelocationForSection(REHi, Value.SectionID);
  1379. addRelocationForSection(RELo, Value.SectionID);
  1380. }
  1381. } else {
  1382. // Creating Highest, Higher, Hi and Lo relocations for the filled stub
  1383. // instructions.
  1384. RelocationEntry REHighest(SectionID,
  1385. StubTargetAddr - Section.getAddress(),
  1386. ELF::R_MIPS_HIGHEST, Value.Addend);
  1387. RelocationEntry REHigher(SectionID,
  1388. StubTargetAddr - Section.getAddress() + 4,
  1389. ELF::R_MIPS_HIGHER, Value.Addend);
  1390. RelocationEntry REHi(SectionID,
  1391. StubTargetAddr - Section.getAddress() + 12,
  1392. ELF::R_MIPS_HI16, Value.Addend);
  1393. RelocationEntry RELo(SectionID,
  1394. StubTargetAddr - Section.getAddress() + 20,
  1395. ELF::R_MIPS_LO16, Value.Addend);
  1396. if (Value.SymbolName) {
  1397. addRelocationForSymbol(REHighest, Value.SymbolName);
  1398. addRelocationForSymbol(REHigher, Value.SymbolName);
  1399. addRelocationForSymbol(REHi, Value.SymbolName);
  1400. addRelocationForSymbol(RELo, Value.SymbolName);
  1401. } else {
  1402. addRelocationForSection(REHighest, Value.SectionID);
  1403. addRelocationForSection(REHigher, Value.SectionID);
  1404. addRelocationForSection(REHi, Value.SectionID);
  1405. addRelocationForSection(RELo, Value.SectionID);
  1406. }
  1407. }
  1408. RelocationEntry RE(SectionID, Offset, RelType, Section.getStubOffset());
  1409. addRelocationForSection(RE, SectionID);
  1410. Section.advanceStubOffset(getMaxStubSize());
  1411. }
  1412. } else {
  1413. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1414. }
  1415. } else if (Arch == Triple::ppc64 || Arch == Triple::ppc64le) {
  1416. if (RelType == ELF::R_PPC64_REL24) {
  1417. // Determine ABI variant in use for this object.
  1418. unsigned AbiVariant = Obj.getPlatformFlags();
  1419. AbiVariant &= ELF::EF_PPC64_ABI;
  1420. // A PPC branch relocation will need a stub function if the target is
  1421. // an external symbol (either Value.SymbolName is set, or SymType is
  1422. // Symbol::ST_Unknown) or if the target address is not within the
  1423. // signed 24-bits branch address.
  1424. SectionEntry &Section = Sections[SectionID];
  1425. uint8_t *Target = Section.getAddressWithOffset(Offset);
  1426. bool RangeOverflow = false;
  1427. bool IsExtern = Value.SymbolName || SymType == SymbolRef::ST_Unknown;
  1428. if (!IsExtern) {
  1429. if (AbiVariant != 2) {
  1430. // In the ELFv1 ABI, a function call may point to the .opd entry,
  1431. // so the final symbol value is calculated based on the relocation
  1432. // values in the .opd section.
  1433. if (auto Err = findOPDEntrySection(Obj, ObjSectionToID, Value))
  1434. return std::move(Err);
  1435. } else {
  1436. // In the ELFv2 ABI, a function symbol may provide a local entry
  1437. // point, which must be used for direct calls.
  1438. if (Value.SectionID == SectionID){
  1439. uint8_t SymOther = Symbol->getOther();
  1440. Value.Addend += ELF::decodePPC64LocalEntryOffset(SymOther);
  1441. }
  1442. }
  1443. uint8_t *RelocTarget =
  1444. Sections[Value.SectionID].getAddressWithOffset(Value.Addend);
  1445. int64_t delta = static_cast<int64_t>(Target - RelocTarget);
  1446. // If it is within 26-bits branch range, just set the branch target
  1447. if (SignExtend64<26>(delta) != delta) {
  1448. RangeOverflow = true;
  1449. } else if ((AbiVariant != 2) ||
  1450. (AbiVariant == 2 && Value.SectionID == SectionID)) {
  1451. RelocationEntry RE(SectionID, Offset, RelType, Value.Addend);
  1452. addRelocationForSection(RE, Value.SectionID);
  1453. }
  1454. }
  1455. if (IsExtern || (AbiVariant == 2 && Value.SectionID != SectionID) ||
  1456. RangeOverflow) {
  1457. // It is an external symbol (either Value.SymbolName is set, or
  1458. // SymType is SymbolRef::ST_Unknown) or out of range.
  1459. StubMap::const_iterator i = Stubs.find(Value);
  1460. if (i != Stubs.end()) {
  1461. // Symbol function stub already created, just relocate to it
  1462. resolveRelocation(Section, Offset,
  1463. reinterpret_cast<uint64_t>(
  1464. Section.getAddressWithOffset(i->second)),
  1465. RelType, 0);
  1466. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1467. } else {
  1468. // Create a new stub function.
  1469. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1470. Stubs[Value] = Section.getStubOffset();
  1471. uint8_t *StubTargetAddr = createStubFunction(
  1472. Section.getAddressWithOffset(Section.getStubOffset()),
  1473. AbiVariant);
  1474. RelocationEntry RE(SectionID, StubTargetAddr - Section.getAddress(),
  1475. ELF::R_PPC64_ADDR64, Value.Addend);
  1476. // Generates the 64-bits address loads as exemplified in section
  1477. // 4.5.1 in PPC64 ELF ABI. Note that the relocations need to
  1478. // apply to the low part of the instructions, so we have to update
  1479. // the offset according to the target endianness.
  1480. uint64_t StubRelocOffset = StubTargetAddr - Section.getAddress();
  1481. if (!IsTargetLittleEndian)
  1482. StubRelocOffset += 2;
  1483. RelocationEntry REhst(SectionID, StubRelocOffset + 0,
  1484. ELF::R_PPC64_ADDR16_HIGHEST, Value.Addend);
  1485. RelocationEntry REhr(SectionID, StubRelocOffset + 4,
  1486. ELF::R_PPC64_ADDR16_HIGHER, Value.Addend);
  1487. RelocationEntry REh(SectionID, StubRelocOffset + 12,
  1488. ELF::R_PPC64_ADDR16_HI, Value.Addend);
  1489. RelocationEntry REl(SectionID, StubRelocOffset + 16,
  1490. ELF::R_PPC64_ADDR16_LO, Value.Addend);
  1491. if (Value.SymbolName) {
  1492. addRelocationForSymbol(REhst, Value.SymbolName);
  1493. addRelocationForSymbol(REhr, Value.SymbolName);
  1494. addRelocationForSymbol(REh, Value.SymbolName);
  1495. addRelocationForSymbol(REl, Value.SymbolName);
  1496. } else {
  1497. addRelocationForSection(REhst, Value.SectionID);
  1498. addRelocationForSection(REhr, Value.SectionID);
  1499. addRelocationForSection(REh, Value.SectionID);
  1500. addRelocationForSection(REl, Value.SectionID);
  1501. }
  1502. resolveRelocation(Section, Offset, reinterpret_cast<uint64_t>(
  1503. Section.getAddressWithOffset(
  1504. Section.getStubOffset())),
  1505. RelType, 0);
  1506. Section.advanceStubOffset(getMaxStubSize());
  1507. }
  1508. if (IsExtern || (AbiVariant == 2 && Value.SectionID != SectionID)) {
  1509. // Restore the TOC for external calls
  1510. if (AbiVariant == 2)
  1511. writeInt32BE(Target + 4, 0xE8410018); // ld r2,24(r1)
  1512. else
  1513. writeInt32BE(Target + 4, 0xE8410028); // ld r2,40(r1)
  1514. }
  1515. }
  1516. } else if (RelType == ELF::R_PPC64_TOC16 ||
  1517. RelType == ELF::R_PPC64_TOC16_DS ||
  1518. RelType == ELF::R_PPC64_TOC16_LO ||
  1519. RelType == ELF::R_PPC64_TOC16_LO_DS ||
  1520. RelType == ELF::R_PPC64_TOC16_HI ||
  1521. RelType == ELF::R_PPC64_TOC16_HA) {
  1522. // These relocations are supposed to subtract the TOC address from
  1523. // the final value. This does not fit cleanly into the RuntimeDyld
  1524. // scheme, since there may be *two* sections involved in determining
  1525. // the relocation value (the section of the symbol referred to by the
  1526. // relocation, and the TOC section associated with the current module).
  1527. //
  1528. // Fortunately, these relocations are currently only ever generated
  1529. // referring to symbols that themselves reside in the TOC, which means
  1530. // that the two sections are actually the same. Thus they cancel out
  1531. // and we can immediately resolve the relocation right now.
  1532. switch (RelType) {
  1533. case ELF::R_PPC64_TOC16: RelType = ELF::R_PPC64_ADDR16; break;
  1534. case ELF::R_PPC64_TOC16_DS: RelType = ELF::R_PPC64_ADDR16_DS; break;
  1535. case ELF::R_PPC64_TOC16_LO: RelType = ELF::R_PPC64_ADDR16_LO; break;
  1536. case ELF::R_PPC64_TOC16_LO_DS: RelType = ELF::R_PPC64_ADDR16_LO_DS; break;
  1537. case ELF::R_PPC64_TOC16_HI: RelType = ELF::R_PPC64_ADDR16_HI; break;
  1538. case ELF::R_PPC64_TOC16_HA: RelType = ELF::R_PPC64_ADDR16_HA; break;
  1539. default: llvm_unreachable("Wrong relocation type.");
  1540. }
  1541. RelocationValueRef TOCValue;
  1542. if (auto Err = findPPC64TOCSection(Obj, ObjSectionToID, TOCValue))
  1543. return std::move(Err);
  1544. if (Value.SymbolName || Value.SectionID != TOCValue.SectionID)
  1545. llvm_unreachable("Unsupported TOC relocation.");
  1546. Value.Addend -= TOCValue.Addend;
  1547. resolveRelocation(Sections[SectionID], Offset, Value.Addend, RelType, 0);
  1548. } else {
  1549. // There are two ways to refer to the TOC address directly: either
  1550. // via a ELF::R_PPC64_TOC relocation (where both symbol and addend are
  1551. // ignored), or via any relocation that refers to the magic ".TOC."
  1552. // symbols (in which case the addend is respected).
  1553. if (RelType == ELF::R_PPC64_TOC) {
  1554. RelType = ELF::R_PPC64_ADDR64;
  1555. if (auto Err = findPPC64TOCSection(Obj, ObjSectionToID, Value))
  1556. return std::move(Err);
  1557. } else if (TargetName == ".TOC.") {
  1558. if (auto Err = findPPC64TOCSection(Obj, ObjSectionToID, Value))
  1559. return std::move(Err);
  1560. Value.Addend += Addend;
  1561. }
  1562. RelocationEntry RE(SectionID, Offset, RelType, Value.Addend);
  1563. if (Value.SymbolName)
  1564. addRelocationForSymbol(RE, Value.SymbolName);
  1565. else
  1566. addRelocationForSection(RE, Value.SectionID);
  1567. }
  1568. } else if (Arch == Triple::systemz &&
  1569. (RelType == ELF::R_390_PLT32DBL || RelType == ELF::R_390_GOTENT)) {
  1570. // Create function stubs for both PLT and GOT references, regardless of
  1571. // whether the GOT reference is to data or code. The stub contains the
  1572. // full address of the symbol, as needed by GOT references, and the
  1573. // executable part only adds an overhead of 8 bytes.
  1574. //
  1575. // We could try to conserve space by allocating the code and data
  1576. // parts of the stub separately. However, as things stand, we allocate
  1577. // a stub for every relocation, so using a GOT in JIT code should be
  1578. // no less space efficient than using an explicit constant pool.
  1579. LLVM_DEBUG(dbgs() << "\t\tThis is a SystemZ indirect relocation.");
  1580. SectionEntry &Section = Sections[SectionID];
  1581. // Look for an existing stub.
  1582. StubMap::const_iterator i = Stubs.find(Value);
  1583. uintptr_t StubAddress;
  1584. if (i != Stubs.end()) {
  1585. StubAddress = uintptr_t(Section.getAddressWithOffset(i->second));
  1586. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1587. } else {
  1588. // Create a new stub function.
  1589. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1590. uintptr_t BaseAddress = uintptr_t(Section.getAddress());
  1591. StubAddress =
  1592. alignTo(BaseAddress + Section.getStubOffset(), getStubAlignment());
  1593. unsigned StubOffset = StubAddress - BaseAddress;
  1594. Stubs[Value] = StubOffset;
  1595. createStubFunction((uint8_t *)StubAddress);
  1596. RelocationEntry RE(SectionID, StubOffset + 8, ELF::R_390_64,
  1597. Value.Offset);
  1598. if (Value.SymbolName)
  1599. addRelocationForSymbol(RE, Value.SymbolName);
  1600. else
  1601. addRelocationForSection(RE, Value.SectionID);
  1602. Section.advanceStubOffset(getMaxStubSize());
  1603. }
  1604. if (RelType == ELF::R_390_GOTENT)
  1605. resolveRelocation(Section, Offset, StubAddress + 8, ELF::R_390_PC32DBL,
  1606. Addend);
  1607. else
  1608. resolveRelocation(Section, Offset, StubAddress, RelType, Addend);
  1609. } else if (Arch == Triple::x86_64) {
  1610. if (RelType == ELF::R_X86_64_PLT32) {
  1611. // The way the PLT relocations normally work is that the linker allocates
  1612. // the
  1613. // PLT and this relocation makes a PC-relative call into the PLT. The PLT
  1614. // entry will then jump to an address provided by the GOT. On first call,
  1615. // the
  1616. // GOT address will point back into PLT code that resolves the symbol. After
  1617. // the first call, the GOT entry points to the actual function.
  1618. //
  1619. // For local functions we're ignoring all of that here and just replacing
  1620. // the PLT32 relocation type with PC32, which will translate the relocation
  1621. // into a PC-relative call directly to the function. For external symbols we
  1622. // can't be sure the function will be within 2^32 bytes of the call site, so
  1623. // we need to create a stub, which calls into the GOT. This case is
  1624. // equivalent to the usual PLT implementation except that we use the stub
  1625. // mechanism in RuntimeDyld (which puts stubs at the end of the section)
  1626. // rather than allocating a PLT section.
  1627. if (Value.SymbolName && MemMgr.allowStubAllocation()) {
  1628. // This is a call to an external function.
  1629. // Look for an existing stub.
  1630. SectionEntry *Section = &Sections[SectionID];
  1631. StubMap::const_iterator i = Stubs.find(Value);
  1632. uintptr_t StubAddress;
  1633. if (i != Stubs.end()) {
  1634. StubAddress = uintptr_t(Section->getAddress()) + i->second;
  1635. LLVM_DEBUG(dbgs() << " Stub function found\n");
  1636. } else {
  1637. // Create a new stub function (equivalent to a PLT entry).
  1638. LLVM_DEBUG(dbgs() << " Create a new stub function\n");
  1639. uintptr_t BaseAddress = uintptr_t(Section->getAddress());
  1640. StubAddress = alignTo(BaseAddress + Section->getStubOffset(),
  1641. getStubAlignment());
  1642. unsigned StubOffset = StubAddress - BaseAddress;
  1643. Stubs[Value] = StubOffset;
  1644. createStubFunction((uint8_t *)StubAddress);
  1645. // Bump our stub offset counter
  1646. Section->advanceStubOffset(getMaxStubSize());
  1647. // Allocate a GOT Entry
  1648. uint64_t GOTOffset = allocateGOTEntries(1);
  1649. // This potentially creates a new Section which potentially
  1650. // invalidates the Section pointer, so reload it.
  1651. Section = &Sections[SectionID];
  1652. // The load of the GOT address has an addend of -4
  1653. resolveGOTOffsetRelocation(SectionID, StubOffset + 2, GOTOffset - 4,
  1654. ELF::R_X86_64_PC32);
  1655. // Fill in the value of the symbol we're targeting into the GOT
  1656. addRelocationForSymbol(
  1657. computeGOTOffsetRE(GOTOffset, 0, ELF::R_X86_64_64),
  1658. Value.SymbolName);
  1659. }
  1660. // Make the target call a call into the stub table.
  1661. resolveRelocation(*Section, Offset, StubAddress, ELF::R_X86_64_PC32,
  1662. Addend);
  1663. } else {
  1664. Value.Addend += support::ulittle32_t::ref(
  1665. computePlaceholderAddress(SectionID, Offset));
  1666. processSimpleRelocation(SectionID, Offset, ELF::R_X86_64_PC32, Value);
  1667. }
  1668. } else if (RelType == ELF::R_X86_64_GOTPCREL ||
  1669. RelType == ELF::R_X86_64_GOTPCRELX ||
  1670. RelType == ELF::R_X86_64_REX_GOTPCRELX) {
  1671. uint64_t GOTOffset = allocateGOTEntries(1);
  1672. resolveGOTOffsetRelocation(SectionID, Offset, GOTOffset + Addend,
  1673. ELF::R_X86_64_PC32);
  1674. // Fill in the value of the symbol we're targeting into the GOT
  1675. RelocationEntry RE =
  1676. computeGOTOffsetRE(GOTOffset, Value.Offset, ELF::R_X86_64_64);
  1677. if (Value.SymbolName)
  1678. addRelocationForSymbol(RE, Value.SymbolName);
  1679. else
  1680. addRelocationForSection(RE, Value.SectionID);
  1681. } else if (RelType == ELF::R_X86_64_GOT64) {
  1682. // Fill in a 64-bit GOT offset.
  1683. uint64_t GOTOffset = allocateGOTEntries(1);
  1684. resolveRelocation(Sections[SectionID], Offset, GOTOffset,
  1685. ELF::R_X86_64_64, 0);
  1686. // Fill in the value of the symbol we're targeting into the GOT
  1687. RelocationEntry RE =
  1688. computeGOTOffsetRE(GOTOffset, Value.Offset, ELF::R_X86_64_64);
  1689. if (Value.SymbolName)
  1690. addRelocationForSymbol(RE, Value.SymbolName);
  1691. else
  1692. addRelocationForSection(RE, Value.SectionID);
  1693. } else if (RelType == ELF::R_X86_64_GOTPC32) {
  1694. // Materialize the address of the base of the GOT relative to the PC.
  1695. // This doesn't create a GOT entry, but it does mean we need a GOT
  1696. // section.
  1697. (void)allocateGOTEntries(0);
  1698. resolveGOTOffsetRelocation(SectionID, Offset, Addend, ELF::R_X86_64_PC32);
  1699. } else if (RelType == ELF::R_X86_64_GOTPC64) {
  1700. (void)allocateGOTEntries(0);
  1701. resolveGOTOffsetRelocation(SectionID, Offset, Addend, ELF::R_X86_64_PC64);
  1702. } else if (RelType == ELF::R_X86_64_GOTOFF64) {
  1703. // GOTOFF relocations ultimately require a section difference relocation.
  1704. (void)allocateGOTEntries(0);
  1705. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1706. } else if (RelType == ELF::R_X86_64_PC32) {
  1707. Value.Addend += support::ulittle32_t::ref(computePlaceholderAddress(SectionID, Offset));
  1708. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1709. } else if (RelType == ELF::R_X86_64_PC64) {
  1710. Value.Addend += support::ulittle64_t::ref(computePlaceholderAddress(SectionID, Offset));
  1711. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1712. } else if (RelType == ELF::R_X86_64_GOTTPOFF) {
  1713. processX86_64GOTTPOFFRelocation(SectionID, Offset, Value, Addend);
  1714. } else if (RelType == ELF::R_X86_64_TLSGD ||
  1715. RelType == ELF::R_X86_64_TLSLD) {
  1716. // The next relocation must be the relocation for __tls_get_addr.
  1717. ++RelI;
  1718. auto &GetAddrRelocation = *RelI;
  1719. processX86_64TLSRelocation(SectionID, Offset, RelType, Value, Addend,
  1720. GetAddrRelocation);
  1721. } else {
  1722. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1723. }
  1724. } else {
  1725. if (Arch == Triple::x86) {
  1726. Value.Addend += support::ulittle32_t::ref(computePlaceholderAddress(SectionID, Offset));
  1727. }
  1728. processSimpleRelocation(SectionID, Offset, RelType, Value);
  1729. }
  1730. return ++RelI;
  1731. }
  1732. void RuntimeDyldELF::processX86_64GOTTPOFFRelocation(unsigned SectionID,
  1733. uint64_t Offset,
  1734. RelocationValueRef Value,
  1735. int64_t Addend) {
  1736. // Use the approach from "x86-64 Linker Optimizations" from the TLS spec
  1737. // to replace the GOTTPOFF relocation with a TPOFF relocation. The spec
  1738. // only mentions one optimization even though there are two different
  1739. // code sequences for the Initial Exec TLS Model. We match the code to
  1740. // find out which one was used.
  1741. // A possible TLS code sequence and its replacement
  1742. struct CodeSequence {
  1743. // The expected code sequence
  1744. ArrayRef<uint8_t> ExpectedCodeSequence;
  1745. // The negative offset of the GOTTPOFF relocation to the beginning of
  1746. // the sequence
  1747. uint64_t TLSSequenceOffset;
  1748. // The new code sequence
  1749. ArrayRef<uint8_t> NewCodeSequence;
  1750. // The offset of the new TPOFF relocation
  1751. uint64_t TpoffRelocationOffset;
  1752. };
  1753. std::array<CodeSequence, 2> CodeSequences;
  1754. // Initial Exec Code Model Sequence
  1755. {
  1756. static const std::initializer_list<uint8_t> ExpectedCodeSequenceList = {
  1757. 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
  1758. 0x00, // mov %fs:0, %rax
  1759. 0x48, 0x03, 0x05, 0x00, 0x00, 0x00, 0x00 // add x@gotpoff(%rip),
  1760. // %rax
  1761. };
  1762. CodeSequences[0].ExpectedCodeSequence =
  1763. ArrayRef<uint8_t>(ExpectedCodeSequenceList);
  1764. CodeSequences[0].TLSSequenceOffset = 12;
  1765. static const std::initializer_list<uint8_t> NewCodeSequenceList = {
  1766. 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00, // mov %fs:0, %rax
  1767. 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00 // lea x@tpoff(%rax), %rax
  1768. };
  1769. CodeSequences[0].NewCodeSequence = ArrayRef<uint8_t>(NewCodeSequenceList);
  1770. CodeSequences[0].TpoffRelocationOffset = 12;
  1771. }
  1772. // Initial Exec Code Model Sequence, II
  1773. {
  1774. static const std::initializer_list<uint8_t> ExpectedCodeSequenceList = {
  1775. 0x48, 0x8b, 0x05, 0x00, 0x00, 0x00, 0x00, // mov x@gotpoff(%rip), %rax
  1776. 0x64, 0x48, 0x8b, 0x00, 0x00, 0x00, 0x00 // mov %fs:(%rax), %rax
  1777. };
  1778. CodeSequences[1].ExpectedCodeSequence =
  1779. ArrayRef<uint8_t>(ExpectedCodeSequenceList);
  1780. CodeSequences[1].TLSSequenceOffset = 3;
  1781. static const std::initializer_list<uint8_t> NewCodeSequenceList = {
  1782. 0x66, 0x0f, 0x1f, 0x44, 0x00, 0x00, // 6 byte nop
  1783. 0x64, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00, // mov %fs:x@tpoff, %rax
  1784. };
  1785. CodeSequences[1].NewCodeSequence = ArrayRef<uint8_t>(NewCodeSequenceList);
  1786. CodeSequences[1].TpoffRelocationOffset = 10;
  1787. }
  1788. bool Resolved = false;
  1789. auto &Section = Sections[SectionID];
  1790. for (const auto &C : CodeSequences) {
  1791. assert(C.ExpectedCodeSequence.size() == C.NewCodeSequence.size() &&
  1792. "Old and new code sequences must have the same size");
  1793. if (Offset < C.TLSSequenceOffset ||
  1794. (Offset - C.TLSSequenceOffset + C.NewCodeSequence.size()) >
  1795. Section.getSize()) {
  1796. // This can't be a matching sequence as it doesn't fit in the current
  1797. // section
  1798. continue;
  1799. }
  1800. auto TLSSequenceStartOffset = Offset - C.TLSSequenceOffset;
  1801. auto *TLSSequence = Section.getAddressWithOffset(TLSSequenceStartOffset);
  1802. if (ArrayRef<uint8_t>(TLSSequence, C.ExpectedCodeSequence.size()) !=
  1803. C.ExpectedCodeSequence) {
  1804. continue;
  1805. }
  1806. memcpy(TLSSequence, C.NewCodeSequence.data(), C.NewCodeSequence.size());
  1807. // The original GOTTPOFF relocation has an addend as it is PC relative,
  1808. // so it needs to be corrected. The TPOFF32 relocation is used as an
  1809. // absolute value (which is an offset from %fs:0), so remove the addend
  1810. // again.
  1811. RelocationEntry RE(SectionID,
  1812. TLSSequenceStartOffset + C.TpoffRelocationOffset,
  1813. ELF::R_X86_64_TPOFF32, Value.Addend - Addend);
  1814. if (Value.SymbolName)
  1815. addRelocationForSymbol(RE, Value.SymbolName);
  1816. else
  1817. addRelocationForSection(RE, Value.SectionID);
  1818. Resolved = true;
  1819. break;
  1820. }
  1821. if (!Resolved) {
  1822. // The GOTTPOFF relocation was not used in one of the sequences
  1823. // described in the spec, so we can't optimize it to a TPOFF
  1824. // relocation.
  1825. uint64_t GOTOffset = allocateGOTEntries(1);
  1826. resolveGOTOffsetRelocation(SectionID, Offset, GOTOffset + Addend,
  1827. ELF::R_X86_64_PC32);
  1828. RelocationEntry RE =
  1829. computeGOTOffsetRE(GOTOffset, Value.Offset, ELF::R_X86_64_TPOFF64);
  1830. if (Value.SymbolName)
  1831. addRelocationForSymbol(RE, Value.SymbolName);
  1832. else
  1833. addRelocationForSection(RE, Value.SectionID);
  1834. }
  1835. }
  1836. void RuntimeDyldELF::processX86_64TLSRelocation(
  1837. unsigned SectionID, uint64_t Offset, uint64_t RelType,
  1838. RelocationValueRef Value, int64_t Addend,
  1839. const RelocationRef &GetAddrRelocation) {
  1840. // Since we are statically linking and have no additional DSOs, we can resolve
  1841. // the relocation directly without using __tls_get_addr.
  1842. // Use the approach from "x86-64 Linker Optimizations" from the TLS spec
  1843. // to replace it with the Local Exec relocation variant.
  1844. // Find out whether the code was compiled with the large or small memory
  1845. // model. For this we look at the next relocation which is the relocation
  1846. // for the __tls_get_addr function. If it's a 32 bit relocation, it's the
  1847. // small code model, with a 64 bit relocation it's the large code model.
  1848. bool IsSmallCodeModel;
  1849. // Is the relocation for the __tls_get_addr a PC-relative GOT relocation?
  1850. bool IsGOTPCRel = false;
  1851. switch (GetAddrRelocation.getType()) {
  1852. case ELF::R_X86_64_GOTPCREL:
  1853. case ELF::R_X86_64_REX_GOTPCRELX:
  1854. case ELF::R_X86_64_GOTPCRELX:
  1855. IsGOTPCRel = true;
  1856. [[fallthrough]];
  1857. case ELF::R_X86_64_PLT32:
  1858. IsSmallCodeModel = true;
  1859. break;
  1860. case ELF::R_X86_64_PLTOFF64:
  1861. IsSmallCodeModel = false;
  1862. break;
  1863. default:
  1864. report_fatal_error(
  1865. "invalid TLS relocations for General/Local Dynamic TLS Model: "
  1866. "expected PLT or GOT relocation for __tls_get_addr function");
  1867. }
  1868. // The negative offset to the start of the TLS code sequence relative to
  1869. // the offset of the TLSGD/TLSLD relocation
  1870. uint64_t TLSSequenceOffset;
  1871. // The expected start of the code sequence
  1872. ArrayRef<uint8_t> ExpectedCodeSequence;
  1873. // The new TLS code sequence that will replace the existing code
  1874. ArrayRef<uint8_t> NewCodeSequence;
  1875. if (RelType == ELF::R_X86_64_TLSGD) {
  1876. // The offset of the new TPOFF32 relocation (offset starting from the
  1877. // beginning of the whole TLS sequence)
  1878. uint64_t TpoffRelocOffset;
  1879. if (IsSmallCodeModel) {
  1880. if (!IsGOTPCRel) {
  1881. static const std::initializer_list<uint8_t> CodeSequence = {
  1882. 0x66, // data16 (no-op prefix)
  1883. 0x48, 0x8d, 0x3d, 0x00, 0x00,
  1884. 0x00, 0x00, // lea <disp32>(%rip), %rdi
  1885. 0x66, 0x66, // two data16 prefixes
  1886. 0x48, // rex64 (no-op prefix)
  1887. 0xe8, 0x00, 0x00, 0x00, 0x00 // call __tls_get_addr@plt
  1888. };
  1889. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1890. TLSSequenceOffset = 4;
  1891. } else {
  1892. // This code sequence is not described in the TLS spec but gcc
  1893. // generates it sometimes.
  1894. static const std::initializer_list<uint8_t> CodeSequence = {
  1895. 0x66, // data16 (no-op prefix)
  1896. 0x48, 0x8d, 0x3d, 0x00, 0x00,
  1897. 0x00, 0x00, // lea <disp32>(%rip), %rdi
  1898. 0x66, // data16 prefix (no-op prefix)
  1899. 0x48, // rex64 (no-op prefix)
  1900. 0xff, 0x15, 0x00, 0x00, 0x00,
  1901. 0x00 // call *__tls_get_addr@gotpcrel(%rip)
  1902. };
  1903. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1904. TLSSequenceOffset = 4;
  1905. }
  1906. // The replacement code for the small code model. It's the same for
  1907. // both sequences.
  1908. static const std::initializer_list<uint8_t> SmallSequence = {
  1909. 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
  1910. 0x00, // mov %fs:0, %rax
  1911. 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00 // lea x@tpoff(%rax),
  1912. // %rax
  1913. };
  1914. NewCodeSequence = ArrayRef<uint8_t>(SmallSequence);
  1915. TpoffRelocOffset = 12;
  1916. } else {
  1917. static const std::initializer_list<uint8_t> CodeSequence = {
  1918. 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00, 0x00, // lea <disp32>(%rip),
  1919. // %rdi
  1920. 0x48, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1921. 0x00, // movabs $__tls_get_addr@pltoff, %rax
  1922. 0x48, 0x01, 0xd8, // add %rbx, %rax
  1923. 0xff, 0xd0 // call *%rax
  1924. };
  1925. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1926. TLSSequenceOffset = 3;
  1927. // The replacement code for the large code model
  1928. static const std::initializer_list<uint8_t> LargeSequence = {
  1929. 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
  1930. 0x00, // mov %fs:0, %rax
  1931. 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00, // lea x@tpoff(%rax),
  1932. // %rax
  1933. 0x66, 0x0f, 0x1f, 0x44, 0x00, 0x00 // nopw 0x0(%rax,%rax,1)
  1934. };
  1935. NewCodeSequence = ArrayRef<uint8_t>(LargeSequence);
  1936. TpoffRelocOffset = 12;
  1937. }
  1938. // The TLSGD/TLSLD relocations are PC-relative, so they have an addend.
  1939. // The new TPOFF32 relocations is used as an absolute offset from
  1940. // %fs:0, so remove the TLSGD/TLSLD addend again.
  1941. RelocationEntry RE(SectionID, Offset - TLSSequenceOffset + TpoffRelocOffset,
  1942. ELF::R_X86_64_TPOFF32, Value.Addend - Addend);
  1943. if (Value.SymbolName)
  1944. addRelocationForSymbol(RE, Value.SymbolName);
  1945. else
  1946. addRelocationForSection(RE, Value.SectionID);
  1947. } else if (RelType == ELF::R_X86_64_TLSLD) {
  1948. if (IsSmallCodeModel) {
  1949. if (!IsGOTPCRel) {
  1950. static const std::initializer_list<uint8_t> CodeSequence = {
  1951. 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00, // leaq <disp32>(%rip), %rdi
  1952. 0x00, 0xe8, 0x00, 0x00, 0x00, 0x00 // call __tls_get_addr@plt
  1953. };
  1954. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1955. TLSSequenceOffset = 3;
  1956. // The replacement code for the small code model
  1957. static const std::initializer_list<uint8_t> SmallSequence = {
  1958. 0x66, 0x66, 0x66, // three data16 prefixes (no-op)
  1959. 0x64, 0x48, 0x8b, 0x04, 0x25,
  1960. 0x00, 0x00, 0x00, 0x00 // mov %fs:0, %rax
  1961. };
  1962. NewCodeSequence = ArrayRef<uint8_t>(SmallSequence);
  1963. } else {
  1964. // This code sequence is not described in the TLS spec but gcc
  1965. // generates it sometimes.
  1966. static const std::initializer_list<uint8_t> CodeSequence = {
  1967. 0x48, 0x8d, 0x3d, 0x00,
  1968. 0x00, 0x00, 0x00, // leaq <disp32>(%rip), %rdi
  1969. 0xff, 0x15, 0x00, 0x00,
  1970. 0x00, 0x00 // call
  1971. // *__tls_get_addr@gotpcrel(%rip)
  1972. };
  1973. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1974. TLSSequenceOffset = 3;
  1975. // The replacement is code is just like above but it needs to be
  1976. // one byte longer.
  1977. static const std::initializer_list<uint8_t> SmallSequence = {
  1978. 0x0f, 0x1f, 0x40, 0x00, // 4 byte nop
  1979. 0x64, 0x48, 0x8b, 0x04, 0x25,
  1980. 0x00, 0x00, 0x00, 0x00 // mov %fs:0, %rax
  1981. };
  1982. NewCodeSequence = ArrayRef<uint8_t>(SmallSequence);
  1983. }
  1984. } else {
  1985. // This is the same sequence as for the TLSGD sequence with the large
  1986. // memory model above
  1987. static const std::initializer_list<uint8_t> CodeSequence = {
  1988. 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00, 0x00, // lea <disp32>(%rip),
  1989. // %rdi
  1990. 0x48, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1991. 0x48, // movabs $__tls_get_addr@pltoff, %rax
  1992. 0x01, 0xd8, // add %rbx, %rax
  1993. 0xff, 0xd0 // call *%rax
  1994. };
  1995. ExpectedCodeSequence = ArrayRef<uint8_t>(CodeSequence);
  1996. TLSSequenceOffset = 3;
  1997. // The replacement code for the large code model
  1998. static const std::initializer_list<uint8_t> LargeSequence = {
  1999. 0x66, 0x66, 0x66, // three data16 prefixes (no-op)
  2000. 0x66, 0x66, 0x0f, 0x1f, 0x84, 0x00, 0x00, 0x00, 0x00,
  2001. 0x00, // 10 byte nop
  2002. 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00 // mov %fs:0,%rax
  2003. };
  2004. NewCodeSequence = ArrayRef<uint8_t>(LargeSequence);
  2005. }
  2006. } else {
  2007. llvm_unreachable("both TLS relocations handled above");
  2008. }
  2009. assert(ExpectedCodeSequence.size() == NewCodeSequence.size() &&
  2010. "Old and new code sequences must have the same size");
  2011. auto &Section = Sections[SectionID];
  2012. if (Offset < TLSSequenceOffset ||
  2013. (Offset - TLSSequenceOffset + NewCodeSequence.size()) >
  2014. Section.getSize()) {
  2015. report_fatal_error("unexpected end of section in TLS sequence");
  2016. }
  2017. auto *TLSSequence = Section.getAddressWithOffset(Offset - TLSSequenceOffset);
  2018. if (ArrayRef<uint8_t>(TLSSequence, ExpectedCodeSequence.size()) !=
  2019. ExpectedCodeSequence) {
  2020. report_fatal_error(
  2021. "invalid TLS sequence for Global/Local Dynamic TLS Model");
  2022. }
  2023. memcpy(TLSSequence, NewCodeSequence.data(), NewCodeSequence.size());
  2024. }
  2025. size_t RuntimeDyldELF::getGOTEntrySize() {
  2026. // We don't use the GOT in all of these cases, but it's essentially free
  2027. // to put them all here.
  2028. size_t Result = 0;
  2029. switch (Arch) {
  2030. case Triple::x86_64:
  2031. case Triple::aarch64:
  2032. case Triple::aarch64_be:
  2033. case Triple::ppc64:
  2034. case Triple::ppc64le:
  2035. case Triple::systemz:
  2036. Result = sizeof(uint64_t);
  2037. break;
  2038. case Triple::x86:
  2039. case Triple::arm:
  2040. case Triple::thumb:
  2041. Result = sizeof(uint32_t);
  2042. break;
  2043. case Triple::mips:
  2044. case Triple::mipsel:
  2045. case Triple::mips64:
  2046. case Triple::mips64el:
  2047. if (IsMipsO32ABI || IsMipsN32ABI)
  2048. Result = sizeof(uint32_t);
  2049. else if (IsMipsN64ABI)
  2050. Result = sizeof(uint64_t);
  2051. else
  2052. llvm_unreachable("Mips ABI not handled");
  2053. break;
  2054. default:
  2055. llvm_unreachable("Unsupported CPU type!");
  2056. }
  2057. return Result;
  2058. }
  2059. uint64_t RuntimeDyldELF::allocateGOTEntries(unsigned no) {
  2060. if (GOTSectionID == 0) {
  2061. GOTSectionID = Sections.size();
  2062. // Reserve a section id. We'll allocate the section later
  2063. // once we know the total size
  2064. Sections.push_back(SectionEntry(".got", nullptr, 0, 0, 0));
  2065. }
  2066. uint64_t StartOffset = CurrentGOTIndex * getGOTEntrySize();
  2067. CurrentGOTIndex += no;
  2068. return StartOffset;
  2069. }
  2070. uint64_t RuntimeDyldELF::findOrAllocGOTEntry(const RelocationValueRef &Value,
  2071. unsigned GOTRelType) {
  2072. auto E = GOTOffsetMap.insert({Value, 0});
  2073. if (E.second) {
  2074. uint64_t GOTOffset = allocateGOTEntries(1);
  2075. // Create relocation for newly created GOT entry
  2076. RelocationEntry RE =
  2077. computeGOTOffsetRE(GOTOffset, Value.Offset, GOTRelType);
  2078. if (Value.SymbolName)
  2079. addRelocationForSymbol(RE, Value.SymbolName);
  2080. else
  2081. addRelocationForSection(RE, Value.SectionID);
  2082. E.first->second = GOTOffset;
  2083. }
  2084. return E.first->second;
  2085. }
  2086. void RuntimeDyldELF::resolveGOTOffsetRelocation(unsigned SectionID,
  2087. uint64_t Offset,
  2088. uint64_t GOTOffset,
  2089. uint32_t Type) {
  2090. // Fill in the relative address of the GOT Entry into the stub
  2091. RelocationEntry GOTRE(SectionID, Offset, Type, GOTOffset);
  2092. addRelocationForSection(GOTRE, GOTSectionID);
  2093. }
  2094. RelocationEntry RuntimeDyldELF::computeGOTOffsetRE(uint64_t GOTOffset,
  2095. uint64_t SymbolOffset,
  2096. uint32_t Type) {
  2097. return RelocationEntry(GOTSectionID, GOTOffset, Type, SymbolOffset);
  2098. }
  2099. void RuntimeDyldELF::processNewSymbol(const SymbolRef &ObjSymbol, SymbolTableEntry& Symbol) {
  2100. // This should never return an error as `processNewSymbol` wouldn't have been
  2101. // called if getFlags() returned an error before.
  2102. auto ObjSymbolFlags = cantFail(ObjSymbol.getFlags());
  2103. if (ObjSymbolFlags & SymbolRef::SF_Indirect) {
  2104. if (IFuncStubSectionID == 0) {
  2105. // Create a dummy section for the ifunc stubs. It will be actually
  2106. // allocated in finalizeLoad() below.
  2107. IFuncStubSectionID = Sections.size();
  2108. Sections.push_back(
  2109. SectionEntry(".text.__llvm_IFuncStubs", nullptr, 0, 0, 0));
  2110. // First 64B are reserverd for the IFunc resolver
  2111. IFuncStubOffset = 64;
  2112. }
  2113. IFuncStubs.push_back(IFuncStub{IFuncStubOffset, Symbol});
  2114. // Modify the symbol so that it points to the ifunc stub instead of to the
  2115. // resolver function.
  2116. Symbol = SymbolTableEntry(IFuncStubSectionID, IFuncStubOffset,
  2117. Symbol.getFlags());
  2118. IFuncStubOffset += getMaxIFuncStubSize();
  2119. }
  2120. }
  2121. Error RuntimeDyldELF::finalizeLoad(const ObjectFile &Obj,
  2122. ObjSectionToIDMap &SectionMap) {
  2123. if (IsMipsO32ABI)
  2124. if (!PendingRelocs.empty())
  2125. return make_error<RuntimeDyldError>("Can't find matching LO16 reloc");
  2126. // Create the IFunc stubs if necessary. This must be done before processing
  2127. // the GOT entries, as the IFunc stubs may create some.
  2128. if (IFuncStubSectionID != 0) {
  2129. uint8_t *IFuncStubsAddr = MemMgr.allocateCodeSection(
  2130. IFuncStubOffset, 1, IFuncStubSectionID, ".text.__llvm_IFuncStubs");
  2131. if (!IFuncStubsAddr)
  2132. return make_error<RuntimeDyldError>(
  2133. "Unable to allocate memory for IFunc stubs!");
  2134. Sections[IFuncStubSectionID] =
  2135. SectionEntry(".text.__llvm_IFuncStubs", IFuncStubsAddr, IFuncStubOffset,
  2136. IFuncStubOffset, 0);
  2137. createIFuncResolver(IFuncStubsAddr);
  2138. LLVM_DEBUG(dbgs() << "Creating IFunc stubs SectionID: "
  2139. << IFuncStubSectionID << " Addr: "
  2140. << Sections[IFuncStubSectionID].getAddress() << '\n');
  2141. for (auto &IFuncStub : IFuncStubs) {
  2142. auto &Symbol = IFuncStub.OriginalSymbol;
  2143. LLVM_DEBUG(dbgs() << "\tSectionID: " << Symbol.getSectionID()
  2144. << " Offset: " << format("%p", Symbol.getOffset())
  2145. << " IFuncStubOffset: "
  2146. << format("%p\n", IFuncStub.StubOffset));
  2147. createIFuncStub(IFuncStubSectionID, 0, IFuncStub.StubOffset,
  2148. Symbol.getSectionID(), Symbol.getOffset());
  2149. }
  2150. IFuncStubSectionID = 0;
  2151. IFuncStubOffset = 0;
  2152. IFuncStubs.clear();
  2153. }
  2154. // If necessary, allocate the global offset table
  2155. if (GOTSectionID != 0) {
  2156. // Allocate memory for the section
  2157. size_t TotalSize = CurrentGOTIndex * getGOTEntrySize();
  2158. uint8_t *Addr = MemMgr.allocateDataSection(TotalSize, getGOTEntrySize(),
  2159. GOTSectionID, ".got", false);
  2160. if (!Addr)
  2161. return make_error<RuntimeDyldError>("Unable to allocate memory for GOT!");
  2162. Sections[GOTSectionID] =
  2163. SectionEntry(".got", Addr, TotalSize, TotalSize, 0);
  2164. // For now, initialize all GOT entries to zero. We'll fill them in as
  2165. // needed when GOT-based relocations are applied.
  2166. memset(Addr, 0, TotalSize);
  2167. if (IsMipsN32ABI || IsMipsN64ABI) {
  2168. // To correctly resolve Mips GOT relocations, we need a mapping from
  2169. // object's sections to GOTs.
  2170. for (section_iterator SI = Obj.section_begin(), SE = Obj.section_end();
  2171. SI != SE; ++SI) {
  2172. if (SI->relocation_begin() != SI->relocation_end()) {
  2173. Expected<section_iterator> RelSecOrErr = SI->getRelocatedSection();
  2174. if (!RelSecOrErr)
  2175. return make_error<RuntimeDyldError>(
  2176. toString(RelSecOrErr.takeError()));
  2177. section_iterator RelocatedSection = *RelSecOrErr;
  2178. ObjSectionToIDMap::iterator i = SectionMap.find(*RelocatedSection);
  2179. assert(i != SectionMap.end());
  2180. SectionToGOTMap[i->second] = GOTSectionID;
  2181. }
  2182. }
  2183. GOTSymbolOffsets.clear();
  2184. }
  2185. }
  2186. // Look for and record the EH frame section.
  2187. ObjSectionToIDMap::iterator i, e;
  2188. for (i = SectionMap.begin(), e = SectionMap.end(); i != e; ++i) {
  2189. const SectionRef &Section = i->first;
  2190. StringRef Name;
  2191. Expected<StringRef> NameOrErr = Section.getName();
  2192. if (NameOrErr)
  2193. Name = *NameOrErr;
  2194. else
  2195. consumeError(NameOrErr.takeError());
  2196. if (Name == ".eh_frame") {
  2197. UnregisteredEHFrameSections.push_back(i->second);
  2198. break;
  2199. }
  2200. }
  2201. GOTSectionID = 0;
  2202. CurrentGOTIndex = 0;
  2203. return Error::success();
  2204. }
  2205. bool RuntimeDyldELF::isCompatibleFile(const object::ObjectFile &Obj) const {
  2206. return Obj.isELF();
  2207. }
  2208. void RuntimeDyldELF::createIFuncResolver(uint8_t *Addr) const {
  2209. if (Arch == Triple::x86_64) {
  2210. // The adddres of the GOT1 entry is in %r11, the GOT2 entry is in %r11+8
  2211. // (see createIFuncStub() for details)
  2212. // The following code first saves all registers that contain the original
  2213. // function arguments as those registers are not saved by the resolver
  2214. // function. %r11 is saved as well so that the GOT2 entry can be updated
  2215. // afterwards. Then it calls the actual IFunc resolver function whose
  2216. // address is stored in GOT2. After the resolver function returns, all
  2217. // saved registers are restored and the return value is written to GOT1.
  2218. // Finally, jump to the now resolved function.
  2219. // clang-format off
  2220. const uint8_t StubCode[] = {
  2221. 0x57, // push %rdi
  2222. 0x56, // push %rsi
  2223. 0x52, // push %rdx
  2224. 0x51, // push %rcx
  2225. 0x41, 0x50, // push %r8
  2226. 0x41, 0x51, // push %r9
  2227. 0x41, 0x53, // push %r11
  2228. 0x41, 0xff, 0x53, 0x08, // call *0x8(%r11)
  2229. 0x41, 0x5b, // pop %r11
  2230. 0x41, 0x59, // pop %r9
  2231. 0x41, 0x58, // pop %r8
  2232. 0x59, // pop %rcx
  2233. 0x5a, // pop %rdx
  2234. 0x5e, // pop %rsi
  2235. 0x5f, // pop %rdi
  2236. 0x49, 0x89, 0x03, // mov %rax,(%r11)
  2237. 0xff, 0xe0 // jmp *%rax
  2238. };
  2239. // clang-format on
  2240. static_assert(sizeof(StubCode) <= 64,
  2241. "maximum size of the IFunc resolver is 64B");
  2242. memcpy(Addr, StubCode, sizeof(StubCode));
  2243. } else {
  2244. report_fatal_error(
  2245. "IFunc resolver is not supported for target architecture");
  2246. }
  2247. }
  2248. void RuntimeDyldELF::createIFuncStub(unsigned IFuncStubSectionID,
  2249. uint64_t IFuncResolverOffset,
  2250. uint64_t IFuncStubOffset,
  2251. unsigned IFuncSectionID,
  2252. uint64_t IFuncOffset) {
  2253. auto &IFuncStubSection = Sections[IFuncStubSectionID];
  2254. auto *Addr = IFuncStubSection.getAddressWithOffset(IFuncStubOffset);
  2255. if (Arch == Triple::x86_64) {
  2256. // The first instruction loads a PC-relative address into %r11 which is a
  2257. // GOT entry for this stub. This initially contains the address to the
  2258. // IFunc resolver. We can use %r11 here as it's caller saved but not used
  2259. // to pass any arguments. In fact, x86_64 ABI even suggests using %r11 for
  2260. // code in the PLT. The IFunc resolver will use %r11 to update the GOT
  2261. // entry.
  2262. //
  2263. // The next instruction just jumps to the address contained in the GOT
  2264. // entry. As mentioned above, we do this two-step jump by first setting
  2265. // %r11 so that the IFunc resolver has access to it.
  2266. //
  2267. // The IFunc resolver of course also needs to know the actual address of
  2268. // the actual IFunc resolver function. This will be stored in a GOT entry
  2269. // right next to the first one for this stub. So, the IFunc resolver will
  2270. // be able to call it with %r11+8.
  2271. //
  2272. // In total, two adjacent GOT entries (+relocation) and one additional
  2273. // relocation are required:
  2274. // GOT1: Address of the IFunc resolver.
  2275. // GOT2: Address of the IFunc resolver function.
  2276. // IFuncStubOffset+3: 32-bit PC-relative address of GOT1.
  2277. uint64_t GOT1 = allocateGOTEntries(2);
  2278. uint64_t GOT2 = GOT1 + getGOTEntrySize();
  2279. RelocationEntry RE1(GOTSectionID, GOT1, ELF::R_X86_64_64,
  2280. IFuncResolverOffset, {});
  2281. addRelocationForSection(RE1, IFuncStubSectionID);
  2282. RelocationEntry RE2(GOTSectionID, GOT2, ELF::R_X86_64_64, IFuncOffset, {});
  2283. addRelocationForSection(RE2, IFuncSectionID);
  2284. const uint8_t StubCode[] = {
  2285. 0x4c, 0x8d, 0x1d, 0x00, 0x00, 0x00, 0x00, // leaq 0x0(%rip),%r11
  2286. 0x41, 0xff, 0x23 // jmpq *(%r11)
  2287. };
  2288. assert(sizeof(StubCode) <= getMaxIFuncStubSize() &&
  2289. "IFunc stub size must not exceed getMaxIFuncStubSize()");
  2290. memcpy(Addr, StubCode, sizeof(StubCode));
  2291. // The PC-relative value starts 4 bytes from the end of the leaq
  2292. // instruction, so the addend is -4.
  2293. resolveGOTOffsetRelocation(IFuncStubSectionID, IFuncStubOffset + 3,
  2294. GOT1 - 4, ELF::R_X86_64_PC32);
  2295. } else {
  2296. report_fatal_error("IFunc stub is not supported for target architecture");
  2297. }
  2298. }
  2299. unsigned RuntimeDyldELF::getMaxIFuncStubSize() const {
  2300. if (Arch == Triple::x86_64) {
  2301. return 10;
  2302. }
  2303. return 0;
  2304. }
  2305. bool RuntimeDyldELF::relocationNeedsGot(const RelocationRef &R) const {
  2306. unsigned RelTy = R.getType();
  2307. if (Arch == Triple::aarch64 || Arch == Triple::aarch64_be)
  2308. return RelTy == ELF::R_AARCH64_ADR_GOT_PAGE ||
  2309. RelTy == ELF::R_AARCH64_LD64_GOT_LO12_NC;
  2310. if (Arch == Triple::x86_64)
  2311. return RelTy == ELF::R_X86_64_GOTPCREL ||
  2312. RelTy == ELF::R_X86_64_GOTPCRELX ||
  2313. RelTy == ELF::R_X86_64_GOT64 ||
  2314. RelTy == ELF::R_X86_64_REX_GOTPCRELX;
  2315. return false;
  2316. }
  2317. bool RuntimeDyldELF::relocationNeedsStub(const RelocationRef &R) const {
  2318. if (Arch != Triple::x86_64)
  2319. return true; // Conservative answer
  2320. switch (R.getType()) {
  2321. default:
  2322. return true; // Conservative answer
  2323. case ELF::R_X86_64_GOTPCREL:
  2324. case ELF::R_X86_64_GOTPCRELX:
  2325. case ELF::R_X86_64_REX_GOTPCRELX:
  2326. case ELF::R_X86_64_GOTPC64:
  2327. case ELF::R_X86_64_GOT64:
  2328. case ELF::R_X86_64_GOTOFF64:
  2329. case ELF::R_X86_64_PC32:
  2330. case ELF::R_X86_64_PC64:
  2331. case ELF::R_X86_64_64:
  2332. // We know that these reloation types won't need a stub function. This list
  2333. // can be extended as needed.
  2334. return false;
  2335. }
  2336. }
  2337. } // namespace llvm