1 //===- Target.cpp ---------------------------------------------------------===// 2 // 3 // The LLVM Linker 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // Machine-specific things, such as applying relocations, creation of 11 // GOT or PLT entries, etc., are handled in this file. 12 // 13 // Refer the ELF spec for the single letter varaibles, S, A or P, used 14 // in this file. SA is S+A. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "Target.h" 19 #include "Error.h" 20 #include "OutputSections.h" 21 #include "Symbols.h" 22 23 #include "llvm/ADT/ArrayRef.h" 24 #include "llvm/Object/ELF.h" 25 #include "llvm/Support/Endian.h" 26 #include "llvm/Support/ELF.h" 27 28 using namespace llvm; 29 using namespace llvm::object; 30 using namespace llvm::support::endian; 31 using namespace llvm::ELF; 32 33 namespace lld { 34 namespace elf2 { 35 36 std::unique_ptr<TargetInfo> Target; 37 38 template <endianness E> static void add32(void *P, int32_t V) { 39 write32<E>(P, read32<E>(P) + V); 40 } 41 42 static void add32le(uint8_t *P, int32_t V) { add32<support::little>(P, V); } 43 static void or32le(uint8_t *P, int32_t V) { write32le(P, read32le(P) | V); } 44 45 namespace { 46 class X86TargetInfo final : public TargetInfo { 47 public: 48 X86TargetInfo(); 49 void writeGotPltEntry(uint8_t *Buf, uint64_t Plt) const override; 50 void writePltZeroEntry(uint8_t *Buf, uint64_t GotEntryAddr, 51 uint64_t PltEntryAddr) const override; 52 void writePltEntry(uint8_t *Buf, uint64_t GotEntryAddr, 53 uint64_t PltEntryAddr, int32_t Index) const override; 54 bool relocNeedsGot(uint32_t Type, const SymbolBody &S) const override; 55 bool relocPointsToGot(uint32_t Type) const override; 56 bool relocNeedsPlt(uint32_t Type, const SymbolBody &S) const override; 57 void relocateOne(uint8_t *Loc, uint8_t *BufEnd, uint32_t Type, uint64_t P, 58 uint64_t SA) const override; 59 }; 60 61 class X86_64TargetInfo final : public TargetInfo { 62 public: 63 X86_64TargetInfo(); 64 unsigned getPltRefReloc(unsigned Type) const override; 65 void writeGotPltHeaderEntries(uint8_t *Buf) const override; 66 void writeGotPltEntry(uint8_t *Buf, uint64_t Plt) const override; 67 void writePltZeroEntry(uint8_t *Buf, uint64_t GotEntryAddr, 68 uint64_t PltEntryAddr) const override; 69 void writePltEntry(uint8_t *Buf, uint64_t GotEntryAddr, 70 uint64_t PltEntryAddr, int32_t Index) const override; 71 bool relocNeedsCopy(uint32_t Type, const SymbolBody &S) const override; 72 bool relocNeedsGot(uint32_t Type, const SymbolBody &S) const override; 73 bool relocNeedsPlt(uint32_t Type, const SymbolBody &S) const override; 74 void relocateOne(uint8_t *Loc, uint8_t *BufEnd, uint32_t Type, uint64_t P, 75 uint64_t SA) const override; 76 bool isRelRelative(uint32_t Type) const override; 77 }; 78 79 class PPC64TargetInfo final : public TargetInfo { 80 public: 81 PPC64TargetInfo(); 82 void writeGotPltEntry(uint8_t *Buf, uint64_t Plt) const override; 83 void writePltZeroEntry(uint8_t *Buf, uint64_t GotEntryAddr, 84 uint64_t PltEntryAddr) const override; 85 void writePltEntry(uint8_t *Buf, uint64_t GotEntryAddr, 86 uint64_t PltEntryAddr, int32_t Index) const override; 87 bool relocNeedsGot(uint32_t Type, const SymbolBody &S) const override; 88 bool relocNeedsPlt(uint32_t Type, const SymbolBody &S) const override; 89 void relocateOne(uint8_t *Loc, uint8_t *BufEnd, uint32_t Type, uint64_t P, 90 uint64_t SA) const override; 91 bool isRelRelative(uint32_t Type) const override; 92 }; 93 94 class AArch64TargetInfo final : public TargetInfo { 95 public: 96 AArch64TargetInfo(); 97 unsigned getPltRefReloc(unsigned Type) const override; 98 void writeGotPltEntry(uint8_t *Buf, uint64_t Plt) const override; 99 void writePltZeroEntry(uint8_t *Buf, uint64_t GotEntryAddr, 100 uint64_t PltEntryAddr) const override; 101 void writePltEntry(uint8_t *Buf, uint64_t GotEntryAddr, 102 uint64_t PltEntryAddr, int32_t Index) const override; 103 bool relocNeedsGot(uint32_t Type, const SymbolBody &S) const override; 104 bool relocNeedsPlt(uint32_t Type, const SymbolBody &S) const override; 105 void relocateOne(uint8_t *Loc, uint8_t *BufEnd, uint32_t Type, uint64_t P, 106 uint64_t SA) const override; 107 }; 108 109 template <class ELFT> class MipsTargetInfo final : public TargetInfo { 110 public: 111 MipsTargetInfo(); 112 void writeGotHeaderEntries(uint8_t *Buf) const override; 113 void writeGotPltEntry(uint8_t *Buf, uint64_t Plt) const override; 114 void writePltZeroEntry(uint8_t *Buf, uint64_t GotEntryAddr, 115 uint64_t PltEntryAddr) const override; 116 void writePltEntry(uint8_t *Buf, uint64_t GotEntryAddr, 117 uint64_t PltEntryAddr, int32_t Index) const override; 118 bool relocNeedsGot(uint32_t Type, const SymbolBody &S) const override; 119 bool relocNeedsPlt(uint32_t Type, const SymbolBody &S) const override; 120 void relocateOne(uint8_t *Loc, uint8_t *BufEnd, uint32_t Type, uint64_t P, 121 uint64_t SA) const override; 122 }; 123 } // anonymous namespace 124 125 TargetInfo *createTarget() { 126 switch (Config->EMachine) { 127 case EM_386: 128 return new X86TargetInfo(); 129 case EM_AARCH64: 130 return new AArch64TargetInfo(); 131 case EM_MIPS: 132 switch (Config->EKind) { 133 case ELF32LEKind: 134 return new MipsTargetInfo<ELF32LE>(); 135 case ELF32BEKind: 136 return new MipsTargetInfo<ELF32BE>(); 137 default: 138 error("Unsupported MIPS target"); 139 } 140 case EM_PPC64: 141 return new PPC64TargetInfo(); 142 case EM_X86_64: 143 return new X86_64TargetInfo(); 144 } 145 error("Unknown target machine"); 146 } 147 148 TargetInfo::~TargetInfo() {} 149 150 uint64_t TargetInfo::getVAStart() const { return Config->Shared ? 0 : VAStart; } 151 152 bool TargetInfo::relocNeedsCopy(uint32_t Type, const SymbolBody &S) const { 153 return false; 154 } 155 156 unsigned TargetInfo::getPltRefReloc(unsigned Type) const { return PCRelReloc; } 157 158 bool TargetInfo::relocPointsToGot(uint32_t Type) const { return false; } 159 160 bool TargetInfo::isRelRelative(uint32_t Type) const { return true; } 161 162 void TargetInfo::writeGotHeaderEntries(uint8_t *Buf) const {} 163 164 void TargetInfo::writeGotPltHeaderEntries(uint8_t *Buf) const {} 165 166 X86TargetInfo::X86TargetInfo() { 167 PCRelReloc = R_386_PC32; 168 GotReloc = R_386_GLOB_DAT; 169 GotRefReloc = R_386_GOT32; 170 PltReloc = R_386_JUMP_SLOT; 171 } 172 173 void X86TargetInfo::writeGotPltEntry(uint8_t *Buf, uint64_t Plt) const {} 174 void X86TargetInfo::writePltZeroEntry(uint8_t *Buf, uint64_t GotEntryAddr, 175 uint64_t PltEntryAddr) const {} 176 177 void X86TargetInfo::writePltEntry(uint8_t *Buf, uint64_t GotEntryAddr, 178 uint64_t PltEntryAddr, int32_t Index) const { 179 // jmpl *val; nop; nop 180 const uint8_t Inst[] = {0xff, 0x25, 0, 0, 0, 0, 0x90, 0x90}; 181 memcpy(Buf, Inst, sizeof(Inst)); 182 assert(isUInt<32>(GotEntryAddr)); 183 write32le(Buf + 2, GotEntryAddr); 184 } 185 186 bool X86TargetInfo::relocNeedsGot(uint32_t Type, const SymbolBody &S) const { 187 return Type == R_386_GOT32 || relocNeedsPlt(Type, S); 188 } 189 190 bool X86TargetInfo::relocPointsToGot(uint32_t Type) const { 191 return Type == R_386_GOTPC; 192 } 193 194 bool X86TargetInfo::relocNeedsPlt(uint32_t Type, const SymbolBody &S) const { 195 return Type == R_386_PLT32 || (Type == R_386_PC32 && S.isShared()); 196 } 197 198 void X86TargetInfo::relocateOne(uint8_t *Loc, uint8_t *BufEnd, uint32_t Type, 199 uint64_t P, uint64_t SA) const { 200 switch (Type) { 201 case R_386_GOT32: 202 add32le(Loc, SA - Out<ELF32LE>::Got->getVA()); 203 break; 204 case R_386_PC32: 205 add32le(Loc, SA - P); 206 break; 207 case R_386_32: 208 add32le(Loc, SA); 209 break; 210 default: 211 error("unrecognized reloc " + Twine(Type)); 212 } 213 } 214 215 X86_64TargetInfo::X86_64TargetInfo() { 216 CopyReloc = R_X86_64_COPY; 217 PCRelReloc = R_X86_64_PC32; 218 GotReloc = R_X86_64_GLOB_DAT; 219 GotRefReloc = R_X86_64_PC32; 220 PltReloc = R_X86_64_JUMP_SLOT; 221 RelativeReloc = R_X86_64_RELATIVE; 222 TlsGotReloc = R_X86_64_TPOFF64; 223 TlsLocalDynamicReloc = R_X86_64_TLSLD; 224 TlsGlobalDynamicReloc = R_X86_64_TLSGD; 225 TlsModuleIndexReloc = R_X86_64_DTPMOD64; 226 TlsOffsetReloc = R_X86_64_DTPOFF64; 227 TlsPcRelGotReloc = R_X86_64_GOTTPOFF; 228 LazyRelocations = true; 229 PltEntrySize = 16; 230 PltZeroEntrySize = 16; 231 } 232 233 void X86_64TargetInfo::writeGotPltHeaderEntries(uint8_t *Buf) const { 234 write64le(Buf, Out<ELF64LE>::Dynamic->getVA()); 235 } 236 237 void X86_64TargetInfo::writeGotPltEntry(uint8_t *Buf, uint64_t Plt) const { 238 // Skip 6 bytes of "jmpq *got(%rip)" 239 write32le(Buf, Plt + 6); 240 } 241 242 void X86_64TargetInfo::writePltZeroEntry(uint8_t *Buf, uint64_t GotEntryAddr, 243 uint64_t PltEntryAddr) const { 244 const uint8_t PltData[] = { 245 0xff, 0x35, 0x00, 0x00, 0x00, 0x00, // pushq GOT+8(%rip) 246 0xff, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp *GOT+16(%rip) 247 0x0f, 0x1f, 0x40, 0x00 // nopl 0x0(rax) 248 }; 249 memcpy(Buf, PltData, sizeof(PltData)); 250 write32le(Buf + 2, GotEntryAddr - PltEntryAddr + 2); // GOT+8 251 write32le(Buf + 8, GotEntryAddr - PltEntryAddr + 4); // GOT+16 252 } 253 254 void X86_64TargetInfo::writePltEntry(uint8_t *Buf, uint64_t GotEntryAddr, 255 uint64_t PltEntryAddr, 256 int32_t Index) const { 257 const uint8_t Inst[] = { 258 0xff, 0x25, 0x00, 0x00, 0x00, 0x00, // jmpq *got(%rip) 259 0x68, 0x00, 0x00, 0x00, 0x00, // pushq <relocation index> 260 0xe9, 0x00, 0x00, 0x00, 0x00 // jmpq plt[0] 261 }; 262 memcpy(Buf, Inst, sizeof(Inst)); 263 264 write32le(Buf + 2, GotEntryAddr - PltEntryAddr - 6); 265 write32le(Buf + 7, Index); 266 write32le(Buf + 12, -Index * PltEntrySize - PltZeroEntrySize - 16); 267 } 268 269 bool X86_64TargetInfo::relocNeedsCopy(uint32_t Type, 270 const SymbolBody &S) const { 271 if (Type == R_X86_64_32S || Type == R_X86_64_32 || Type == R_X86_64_PC32 || 272 Type == R_X86_64_64) 273 if (auto *SS = dyn_cast<SharedSymbol<ELF64LE>>(&S)) 274 return SS->Sym.getType() == STT_OBJECT; 275 return false; 276 } 277 278 bool X86_64TargetInfo::relocNeedsGot(uint32_t Type, const SymbolBody &S) const { 279 return Type == R_X86_64_GOTTPOFF || Type == R_X86_64_GOTPCREL || 280 relocNeedsPlt(Type, S); 281 } 282 283 unsigned X86_64TargetInfo::getPltRefReloc(unsigned Type) const { 284 if (Type == R_X86_64_PLT32) 285 return R_X86_64_PC32; 286 return Type; 287 } 288 289 bool X86_64TargetInfo::relocNeedsPlt(uint32_t Type, const SymbolBody &S) const { 290 if (relocNeedsCopy(Type, S)) 291 return false; 292 293 switch (Type) { 294 default: 295 return false; 296 case R_X86_64_32: 297 case R_X86_64_64: 298 case R_X86_64_PC32: 299 // This relocation is defined to have a value of (S + A - P). 300 // The problems start when a non PIC program calls a function in a shared 301 // library. 302 // In an ideal world, we could just report an error saying the relocation 303 // can overflow at runtime. 304 // In the real world with glibc, crt1.o has a R_X86_64_PC32 pointing to 305 // libc.so. 306 // 307 // The general idea on how to handle such cases is to create a PLT entry 308 // and use that as the function value. 309 // 310 // For the static linking part, we just return true and everything else 311 // will use the the PLT entry as the address. 312 // 313 // The remaining (unimplemented) problem is making sure pointer equality 314 // still works. We need the help of the dynamic linker for that. We 315 // let it know that we have a direct reference to a so symbol by creating 316 // an undefined symbol with a non zero st_value. Seeing that, the 317 // dynamic linker resolves the symbol to the value of the symbol we created. 318 // This is true even for got entries, so pointer equality is maintained. 319 // To avoid an infinite loop, the only entry that points to the 320 // real function is a dedicated got entry used by the plt. That is 321 // identified by special relocation types (R_X86_64_JUMP_SLOT, 322 // R_386_JMP_SLOT, etc). 323 return S.isShared(); 324 case R_X86_64_PLT32: 325 return canBePreempted(&S, true); 326 } 327 } 328 329 bool X86_64TargetInfo::isRelRelative(uint32_t Type) const { 330 switch (Type) { 331 default: 332 return false; 333 case R_X86_64_PC64: 334 case R_X86_64_PC32: 335 case R_X86_64_PC16: 336 case R_X86_64_PC8: 337 case R_X86_64_PLT32: 338 case R_X86_64_DTPOFF32: 339 case R_X86_64_DTPOFF64: 340 return true; 341 } 342 } 343 344 void X86_64TargetInfo::relocateOne(uint8_t *Loc, uint8_t *BufEnd, uint32_t Type, 345 uint64_t P, uint64_t SA) const { 346 switch (Type) { 347 case R_X86_64_PC32: 348 case R_X86_64_GOTPCREL: 349 case R_X86_64_PLT32: 350 case R_X86_64_TLSLD: 351 case R_X86_64_TLSGD: 352 case R_X86_64_TPOFF64: 353 write32le(Loc, SA - P); 354 break; 355 case R_X86_64_64: 356 case R_X86_64_DTPOFF64: 357 write64le(Loc, SA); 358 break; 359 case R_X86_64_32: 360 case R_X86_64_32S: 361 if (Type == R_X86_64_32 && !isUInt<32>(SA)) 362 error("R_X86_64_32 out of range"); 363 else if (!isInt<32>(SA)) 364 error("R_X86_64_32S out of range"); 365 write32le(Loc, SA); 366 break; 367 case R_X86_64_DTPOFF32: 368 write32le(Loc, SA); 369 break; 370 case R_X86_64_TPOFF32: { 371 uint64_t Val = SA - Out<ELF64LE>::TlsPhdr->p_memsz; 372 if (!isInt<32>(Val)) 373 error("R_X86_64_TPOFF32 out of range"); 374 write32le(Loc, Val); 375 break; 376 } 377 default: 378 error("unrecognized reloc " + Twine(Type)); 379 } 380 } 381 382 // Relocation masks following the #lo(value), #hi(value), #ha(value), 383 // #higher(value), #highera(value), #highest(value), and #highesta(value) 384 // macros defined in section 4.5.1. Relocation Types of the PPC-elf64abi 385 // document. 386 static uint16_t applyPPCLo(uint64_t V) { return V; } 387 static uint16_t applyPPCHi(uint64_t V) { return V >> 16; } 388 static uint16_t applyPPCHa(uint64_t V) { return (V + 0x8000) >> 16; } 389 static uint16_t applyPPCHigher(uint64_t V) { return V >> 32; } 390 static uint16_t applyPPCHighera(uint64_t V) { return (V + 0x8000) >> 32; } 391 static uint16_t applyPPCHighest(uint64_t V) { return V >> 48; } 392 static uint16_t applyPPCHighesta(uint64_t V) { return (V + 0x8000) >> 48; } 393 394 PPC64TargetInfo::PPC64TargetInfo() { 395 PCRelReloc = R_PPC64_REL24; 396 GotReloc = R_PPC64_GLOB_DAT; 397 GotRefReloc = R_PPC64_REL64; 398 RelativeReloc = R_PPC64_RELATIVE; 399 PltEntrySize = 32; 400 401 // We need 64K pages (at least under glibc/Linux, the loader won't 402 // set different permissions on a finer granularity than that). 403 PageSize = 65536; 404 405 // The PPC64 ELF ABI v1 spec, says: 406 // 407 // It is normally desirable to put segments with different characteristics 408 // in separate 256 Mbyte portions of the address space, to give the 409 // operating system full paging flexibility in the 64-bit address space. 410 // 411 // And because the lowest non-zero 256M boundary is 0x10000000, PPC64 linkers 412 // use 0x10000000 as the starting address. 413 VAStart = 0x10000000; 414 } 415 416 uint64_t getPPC64TocBase() { 417 // The TOC consists of sections .got, .toc, .tocbss, .plt in that 418 // order. The TOC starts where the first of these sections starts. 419 420 // FIXME: This obviously does not do the right thing when there is no .got 421 // section, but there is a .toc or .tocbss section. 422 uint64_t TocVA = Out<ELF64BE>::Got->getVA(); 423 if (!TocVA) 424 TocVA = Out<ELF64BE>::Plt->getVA(); 425 426 // Per the ppc64-elf-linux ABI, The TOC base is TOC value plus 0x8000 427 // thus permitting a full 64 Kbytes segment. Note that the glibc startup 428 // code (crt1.o) assumes that you can get from the TOC base to the 429 // start of the .toc section with only a single (signed) 16-bit relocation. 430 return TocVA + 0x8000; 431 } 432 433 void PPC64TargetInfo::writeGotPltEntry(uint8_t *Buf, uint64_t Plt) const {} 434 void PPC64TargetInfo::writePltZeroEntry(uint8_t *Buf, uint64_t GotEntryAddr, 435 uint64_t PltEntryAddr) const {} 436 void PPC64TargetInfo::writePltEntry(uint8_t *Buf, uint64_t GotEntryAddr, 437 uint64_t PltEntryAddr, int32_t Index) const { 438 uint64_t Off = GotEntryAddr - getPPC64TocBase(); 439 440 // FIXME: What we should do, in theory, is get the offset of the function 441 // descriptor in the .opd section, and use that as the offset from %r2 (the 442 // TOC-base pointer). Instead, we have the GOT-entry offset, and that will 443 // be a pointer to the function descriptor in the .opd section. Using 444 // this scheme is simpler, but requires an extra indirection per PLT dispatch. 445 446 write32be(Buf, 0xf8410028); // std %r2, 40(%r1) 447 write32be(Buf + 4, 0x3d620000 | applyPPCHa(Off)); // addis %r11, %r2, X@ha 448 write32be(Buf + 8, 0xe98b0000 | applyPPCLo(Off)); // ld %r12, X@l(%r11) 449 write32be(Buf + 12, 0xe96c0000); // ld %r11,0(%r12) 450 write32be(Buf + 16, 0x7d6903a6); // mtctr %r11 451 write32be(Buf + 20, 0xe84c0008); // ld %r2,8(%r12) 452 write32be(Buf + 24, 0xe96c0010); // ld %r11,16(%r12) 453 write32be(Buf + 28, 0x4e800420); // bctr 454 } 455 456 bool PPC64TargetInfo::relocNeedsGot(uint32_t Type, const SymbolBody &S) const { 457 if (relocNeedsPlt(Type, S)) 458 return true; 459 460 switch (Type) { 461 default: return false; 462 case R_PPC64_GOT16: 463 case R_PPC64_GOT16_LO: 464 case R_PPC64_GOT16_HI: 465 case R_PPC64_GOT16_HA: 466 case R_PPC64_GOT16_DS: 467 case R_PPC64_GOT16_LO_DS: 468 return true; 469 } 470 } 471 472 bool PPC64TargetInfo::relocNeedsPlt(uint32_t Type, const SymbolBody &S) const { 473 // These are function calls that need to be redirected through a PLT stub. 474 return Type == R_PPC64_REL24 && canBePreempted(&S, false); 475 } 476 477 bool PPC64TargetInfo::isRelRelative(uint32_t Type) const { 478 switch (Type) { 479 default: 480 return true; 481 case R_PPC64_TOC: 482 case R_PPC64_ADDR64: 483 return false; 484 } 485 } 486 487 void PPC64TargetInfo::relocateOne(uint8_t *Loc, uint8_t *BufEnd, uint32_t Type, 488 uint64_t P, uint64_t SA) const { 489 uint64_t TB = getPPC64TocBase(); 490 491 // For a TOC-relative relocation, adjust the addend and proceed in terms of 492 // the corresponding ADDR16 relocation type. 493 switch (Type) { 494 case R_PPC64_TOC16: Type = R_PPC64_ADDR16; SA -= TB; break; 495 case R_PPC64_TOC16_DS: Type = R_PPC64_ADDR16_DS; SA -= TB; break; 496 case R_PPC64_TOC16_LO: Type = R_PPC64_ADDR16_LO; SA -= TB; break; 497 case R_PPC64_TOC16_LO_DS: Type = R_PPC64_ADDR16_LO_DS; SA -= TB; break; 498 case R_PPC64_TOC16_HI: Type = R_PPC64_ADDR16_HI; SA -= TB; break; 499 case R_PPC64_TOC16_HA: Type = R_PPC64_ADDR16_HA; SA -= TB; break; 500 default: break; 501 } 502 503 switch (Type) { 504 case R_PPC64_ADDR16: 505 if (!isInt<16>(SA)) 506 error("Relocation R_PPC64_ADDR16 overflow"); 507 write16be(Loc, SA); 508 break; 509 case R_PPC64_ADDR16_DS: 510 if (!isInt<16>(SA)) 511 error("Relocation R_PPC64_ADDR16_DS overflow"); 512 write16be(Loc, (read16be(Loc) & 3) | (SA & ~3)); 513 break; 514 case R_PPC64_ADDR16_LO: 515 write16be(Loc, applyPPCLo(SA)); 516 break; 517 case R_PPC64_ADDR16_LO_DS: 518 write16be(Loc, (read16be(Loc) & 3) | (applyPPCLo(SA) & ~3)); 519 break; 520 case R_PPC64_ADDR16_HI: 521 write16be(Loc, applyPPCHi(SA)); 522 break; 523 case R_PPC64_ADDR16_HA: 524 write16be(Loc, applyPPCHa(SA)); 525 break; 526 case R_PPC64_ADDR16_HIGHER: 527 write16be(Loc, applyPPCHigher(SA)); 528 break; 529 case R_PPC64_ADDR16_HIGHERA: 530 write16be(Loc, applyPPCHighera(SA)); 531 break; 532 case R_PPC64_ADDR16_HIGHEST: 533 write16be(Loc, applyPPCHighest(SA)); 534 break; 535 case R_PPC64_ADDR16_HIGHESTA: 536 write16be(Loc, applyPPCHighesta(SA)); 537 break; 538 case R_PPC64_ADDR14: { 539 if ((SA & 3) != 0) 540 error("Improper alignment for relocation R_PPC64_ADDR14"); 541 542 // Preserve the AA/LK bits in the branch instruction 543 uint8_t AALK = Loc[3]; 544 write16be(Loc + 2, (AALK & 3) | (SA & 0xfffc)); 545 break; 546 } 547 case R_PPC64_REL16_LO: 548 write16be(Loc, applyPPCLo(SA - P)); 549 break; 550 case R_PPC64_REL16_HI: 551 write16be(Loc, applyPPCHi(SA - P)); 552 break; 553 case R_PPC64_REL16_HA: 554 write16be(Loc, applyPPCHa(SA - P)); 555 break; 556 case R_PPC64_ADDR32: 557 if (!isInt<32>(SA)) 558 error("Relocation R_PPC64_ADDR32 overflow"); 559 write32be(Loc, SA); 560 break; 561 case R_PPC64_REL24: { 562 // If we have an undefined weak symbol, we might get here with a symbol 563 // address of zero. That could overflow, but the code must be unreachable, 564 // so don't bother doing anything at all. 565 if (!SA) 566 break; 567 568 uint64_t PltStart = Out<ELF64BE>::Plt->getVA(); 569 uint64_t PltEnd = PltStart + Out<ELF64BE>::Plt->getSize(); 570 bool InPlt = PltStart <= SA && SA < PltEnd; 571 572 if (!InPlt && Out<ELF64BE>::Opd) { 573 // If this is a local call, and we currently have the address of a 574 // function-descriptor, get the underlying code address instead. 575 uint64_t OpdStart = Out<ELF64BE>::Opd->getVA(); 576 uint64_t OpdEnd = OpdStart + Out<ELF64BE>::Opd->getSize(); 577 bool InOpd = OpdStart <= SA && SA < OpdEnd; 578 579 if (InOpd) 580 SA = read64be(&Out<ELF64BE>::OpdBuf[SA - OpdStart]); 581 } 582 583 uint32_t Mask = 0x03FFFFFC; 584 if (!isInt<24>(SA - P)) 585 error("Relocation R_PPC64_REL24 overflow"); 586 write32be(Loc, (read32be(Loc) & ~Mask) | ((SA - P) & Mask)); 587 588 uint32_t Nop = 0x60000000; 589 if (InPlt && Loc + 8 <= BufEnd && read32be(Loc + 4) == Nop) 590 write32be(Loc + 4, 0xe8410028); // ld %r2, 40(%r1) 591 break; 592 } 593 case R_PPC64_REL32: 594 if (!isInt<32>(SA - P)) 595 error("Relocation R_PPC64_REL32 overflow"); 596 write32be(Loc, SA - P); 597 break; 598 case R_PPC64_REL64: 599 write64be(Loc, SA - P); 600 break; 601 case R_PPC64_ADDR64: 602 case R_PPC64_TOC: 603 write64be(Loc, SA); 604 break; 605 default: 606 error("unrecognized reloc " + Twine(Type)); 607 } 608 } 609 610 AArch64TargetInfo::AArch64TargetInfo() { 611 PltReloc = R_AARCH64_JUMP_SLOT; 612 LazyRelocations = true; 613 PltEntrySize = 16; 614 PltZeroEntrySize = 32; 615 } 616 617 unsigned AArch64TargetInfo::getPltRefReloc(unsigned Type) const { return Type; } 618 619 void AArch64TargetInfo::writeGotPltEntry(uint8_t *Buf, uint64_t Plt) const { 620 write64le(Buf, Out<ELF64LE>::Plt->getVA()); 621 } 622 623 void AArch64TargetInfo::writePltZeroEntry(uint8_t *Buf, uint64_t GotEntryAddr, 624 uint64_t PltEntryAddr) const { 625 const uint8_t PltData[] = { 626 0xf0, 0x7b, 0xbf, 0xa9, // stp x16, x30, [sp,#-16]! 627 0x10, 0x00, 0x00, 0x90, // adrp x16, Page(&(.plt.got[2])) 628 0x11, 0x02, 0x40, 0xf9, // ldr x17, [x16, Offset(&(.plt.got[2]))] 629 0x10, 0x02, 0x00, 0x91, // add x16, x16, Offset(&(.plt.got[2])) 630 0x20, 0x02, 0x1f, 0xd6, // br x17 631 0x1f, 0x20, 0x03, 0xd5, // nop 632 0x1f, 0x20, 0x03, 0xd5, // nop 633 0x1f, 0x20, 0x03, 0xd5 // nop 634 }; 635 memcpy(Buf, PltData, sizeof(PltData)); 636 637 relocateOne(Buf + 4, Buf + 8, R_AARCH64_ADR_PREL_PG_HI21, PltEntryAddr + 4, 638 GotEntryAddr + 16); 639 relocateOne(Buf + 8, Buf + 12, R_AARCH64_LDST64_ABS_LO12_NC, PltEntryAddr + 8, 640 GotEntryAddr + 16); 641 relocateOne(Buf + 12, Buf + 16, R_AARCH64_ADD_ABS_LO12_NC, PltEntryAddr + 12, 642 GotEntryAddr + 16); 643 } 644 645 void AArch64TargetInfo::writePltEntry(uint8_t *Buf, uint64_t GotEntryAddr, 646 uint64_t PltEntryAddr, 647 int32_t Index) const { 648 const uint8_t Inst[] = { 649 0x10, 0x00, 0x00, 0x90, // adrp x16, Page(&(.plt.got[n])) 650 0x11, 0x02, 0x40, 0xf9, // ldr x17, [x16, Offset(&(.plt.got[n]))] 651 0x10, 0x02, 0x00, 0x91, // add x16, x16, Offset(&(.plt.got[n])) 652 0x20, 0x02, 0x1f, 0xd6 // br x17 653 }; 654 memcpy(Buf, Inst, sizeof(Inst)); 655 656 relocateOne(Buf, Buf + 4, R_AARCH64_ADR_PREL_PG_HI21, PltEntryAddr, 657 GotEntryAddr); 658 relocateOne(Buf + 4, Buf + 8, R_AARCH64_LDST64_ABS_LO12_NC, PltEntryAddr + 4, 659 GotEntryAddr); 660 relocateOne(Buf + 8, Buf + 12, R_AARCH64_ADD_ABS_LO12_NC, PltEntryAddr + 8, 661 GotEntryAddr); 662 } 663 664 bool AArch64TargetInfo::relocNeedsGot(uint32_t Type, 665 const SymbolBody &S) const { 666 return relocNeedsPlt(Type, S); 667 } 668 669 bool AArch64TargetInfo::relocNeedsPlt(uint32_t Type, 670 const SymbolBody &S) const { 671 switch (Type) { 672 default: 673 return false; 674 case R_AARCH64_JUMP26: 675 case R_AARCH64_CALL26: 676 return canBePreempted(&S, true); 677 } 678 } 679 680 static void updateAArch64Adr(uint8_t *L, uint64_t Imm) { 681 uint32_t ImmLo = (Imm & 0x3) << 29; 682 uint32_t ImmHi = ((Imm & 0x1FFFFC) >> 2) << 5; 683 uint64_t Mask = (0x3 << 29) | (0x7FFFF << 5); 684 write32le(L, (read32le(L) & ~Mask) | ImmLo | ImmHi); 685 } 686 687 // Page(Expr) is the page address of the expression Expr, defined 688 // as (Expr & ~0xFFF). (This applies even if the machine page size 689 // supported by the platform has a different value.) 690 static uint64_t getAArch64Page(uint64_t Expr) { 691 return Expr & (~static_cast<uint64_t>(0xFFF)); 692 } 693 694 void AArch64TargetInfo::relocateOne(uint8_t *Loc, uint8_t *BufEnd, 695 uint32_t Type, uint64_t P, 696 uint64_t SA) const { 697 switch (Type) { 698 case R_AARCH64_ABS16: 699 if (!isInt<16>(SA)) 700 error("Relocation R_AARCH64_ABS16 out of range"); 701 write16le(Loc, SA); 702 break; 703 case R_AARCH64_ABS32: 704 if (!isInt<32>(SA)) 705 error("Relocation R_AARCH64_ABS32 out of range"); 706 write32le(Loc, SA); 707 break; 708 case R_AARCH64_ABS64: 709 // No overflow check needed. 710 write64le(Loc, SA); 711 break; 712 case R_AARCH64_ADD_ABS_LO12_NC: 713 // No overflow check needed. 714 // This relocation stores 12 bits and there's no instruction 715 // to do it. Instead, we do a 32 bits store of the value 716 // of r_addend bitwise-or'ed Loc. This assumes that the addend 717 // bits in Loc are zero. 718 or32le(Loc, (SA & 0xFFF) << 10); 719 break; 720 case R_AARCH64_ADR_PREL_LO21: { 721 uint64_t X = SA - P; 722 if (!isInt<21>(X)) 723 error("Relocation R_AARCH64_ADR_PREL_LO21 out of range"); 724 updateAArch64Adr(Loc, X & 0x1FFFFF); 725 break; 726 } 727 case R_AARCH64_ADR_PREL_PG_HI21: { 728 uint64_t X = getAArch64Page(SA) - getAArch64Page(P); 729 if (!isInt<33>(X)) 730 error("Relocation R_AARCH64_ADR_PREL_PG_HI21 out of range"); 731 updateAArch64Adr(Loc, (X >> 12) & 0x1FFFFF); // X[32:12] 732 break; 733 } 734 case R_AARCH64_JUMP26: 735 case R_AARCH64_CALL26: { 736 uint64_t X = SA - P; 737 if (!isInt<28>(X)) { 738 if (Type == R_AARCH64_JUMP26) 739 error("Relocation R_AARCH64_JUMP26 out of range"); 740 error("Relocation R_AARCH64_CALL26 out of range"); 741 } 742 or32le(Loc, (X & 0x0FFFFFFC) >> 2); 743 break; 744 } 745 case R_AARCH64_LDST64_ABS_LO12_NC: 746 // No overflow check needed. 747 or32le(Loc, (SA & 0xFF8) << 7); 748 break; 749 case R_AARCH64_PREL16: 750 if (!isInt<16>(SA)) 751 error("Relocation R_AARCH64_PREL16 out of range"); 752 write16le(Loc, SA - P); 753 break; 754 case R_AARCH64_PREL32: 755 if (!isInt<32>(SA)) 756 error("Relocation R_AARCH64_PREL32 out of range"); 757 write32le(Loc, SA - P); 758 break; 759 case R_AARCH64_PREL64: 760 // No overflow check needed. 761 write64le(Loc, SA - P); 762 break; 763 default: 764 error("unrecognized reloc " + Twine(Type)); 765 } 766 } 767 768 template <class ELFT> MipsTargetInfo<ELFT>::MipsTargetInfo() { 769 PageSize = 65536; 770 GotRefReloc = R_MIPS_GOT16; 771 GotHeaderEntriesNum = 2; 772 } 773 774 template <class ELFT> 775 void MipsTargetInfo<ELFT>::writeGotHeaderEntries(uint8_t *Buf) const { 776 typedef typename llvm::object::ELFFile<ELFT>::Elf_Off Elf_Off; 777 auto *P = reinterpret_cast<Elf_Off *>(Buf); 778 // Module pointer 779 P[1] = ELFT::Is64Bits ? 0x8000000000000000 : 0x80000000; 780 } 781 782 template <class ELFT> 783 void MipsTargetInfo<ELFT>::writeGotPltEntry(uint8_t *Buf, uint64_t Plt) const {} 784 template <class ELFT> 785 void MipsTargetInfo<ELFT>::writePltZeroEntry(uint8_t *Buf, uint64_t GotEntryAddr, 786 uint64_t PltEntryAddr) const {} 787 template <class ELFT> 788 void MipsTargetInfo<ELFT>::writePltEntry(uint8_t *Buf, uint64_t GotEntryAddr, 789 uint64_t PltEntryAddr, int32_t Index) const {} 790 791 template <class ELFT> 792 bool MipsTargetInfo<ELFT>::relocNeedsGot(uint32_t Type, 793 const SymbolBody &S) const { 794 return Type == R_MIPS_GOT16; 795 } 796 797 template <class ELFT> 798 bool MipsTargetInfo<ELFT>::relocNeedsPlt(uint32_t Type, 799 const SymbolBody &S) const { 800 return false; 801 } 802 803 template <class ELFT> 804 void MipsTargetInfo<ELFT>::relocateOne(uint8_t *Loc, uint8_t *BufEnd, 805 uint32_t Type, uint64_t P, 806 uint64_t SA) const { 807 const endianness E = ELFT::TargetEndianness; 808 switch (Type) { 809 case R_MIPS_32: 810 add32<E>(Loc, SA); 811 break; 812 case R_MIPS_GOT16: { 813 int64_t V = SA - getMipsGpAddr<ELFT>(); 814 if (!isInt<16>(V)) 815 error("Relocation R_MIPS_GOT16 out of range"); 816 write32<E>(Loc, (read32<E>(Loc) & 0xffff0000) | (V & 0xffff)); 817 break; 818 } 819 default: 820 error("unrecognized reloc " + Twine(Type)); 821 } 822 } 823 824 template <class ELFT> 825 typename llvm::object::ELFFile<ELFT>::uintX_t getMipsGpAddr() { 826 const unsigned GPOffset = 0x7ff0; 827 return Out<ELFT>::Got->getVA() ? (Out<ELFT>::Got->getVA() + GPOffset) : 0; 828 } 829 830 template uint32_t getMipsGpAddr<ELF32LE>(); 831 template uint32_t getMipsGpAddr<ELF32BE>(); 832 template uint64_t getMipsGpAddr<ELF64LE>(); 833 template uint64_t getMipsGpAddr<ELF64BE>(); 834 } 835 } 836