diff --git a/lib/std/elf.zig b/lib/std/elf.zig index 06de9eb538..e40c215e83 100644 --- a/lib/std/elf.zig +++ b/lib/std/elf.zig @@ -1808,426 +1808,426 @@ pub const COMPRESS = enum(u32) { /// AMD x86-64 relocations. pub const R_X86_64 = enum(u32) { /// No reloc - R_X86_64_NONE = 0, + NONE = 0, /// Direct 64 bit - R_X86_64_64 = 1, + @"64" = 1, /// PC relative 32 bit signed - R_X86_64_PC32 = 2, + PC32 = 2, /// 32 bit GOT entry - R_X86_64_GOT32 = 3, + GOT32 = 3, /// 32 bit PLT address - R_X86_64_PLT32 = 4, + PLT32 = 4, /// Copy symbol at runtime - R_X86_64_COPY = 5, + COPY = 5, /// Create GOT entry - R_X86_64_GLOB_DAT = 6, + GLOB_DAT = 6, /// Create PLT entry - R_X86_64_JUMP_SLOT = 7, + JUMP_SLOT = 7, /// Adjust by program base - R_X86_64_RELATIVE = 8, + RELATIVE = 8, /// 32 bit signed PC relative offset to GOT - R_X86_64_GOTPCREL = 9, + GOTPCREL = 9, /// Direct 32 bit zero extended - R_X86_64_32 = 10, + @"32" = 10, /// Direct 32 bit sign extended - R_X86_64_32S = 11, + @"32S" = 11, /// Direct 16 bit zero extended - R_X86_64_16 = 12, + @"16" = 12, /// 16 bit sign extended pc relative - R_X86_64_PC16 = 13, + PC16 = 13, /// Direct 8 bit sign extended - R_X86_64_8 = 14, + @"8" = 14, /// 8 bit sign extended pc relative - R_X86_64_PC8 = 15, + PC8 = 15, /// ID of module containing symbol - R_X86_64_DTPMOD64 = 16, + DTPMOD64 = 16, /// Offset in module's TLS block - R_X86_64_DTPOFF64 = 17, + DTPOFF64 = 17, /// Offset in initial TLS block - R_X86_64_TPOFF64 = 18, + TPOFF64 = 18, /// 32 bit signed PC relative offset to two GOT entries for GD symbol - R_X86_64_TLSGD = 19, + TLSGD = 19, /// 32 bit signed PC relative offset to two GOT entries for LD symbol - R_X86_64_TLSLD = 20, + TLSLD = 20, /// Offset in TLS block - R_X86_64_DTPOFF32 = 21, + DTPOFF32 = 21, /// 32 bit signed PC relative offset to GOT entry for IE symbol - R_X86_64_GOTTPOFF = 22, + GOTTPOFF = 22, /// Offset in initial TLS block - R_X86_64_TPOFF32 = 23, + TPOFF32 = 23, /// PC relative 64 bit - R_X86_64_PC64 = 24, + PC64 = 24, /// 64 bit offset to GOT - R_X86_64_GOTOFF64 = 25, + GOTOFF64 = 25, /// 32 bit signed pc relative offset to GOT - R_X86_64_GOTPC32 = 26, + GOTPC32 = 26, /// 64 bit GOT entry offset - R_X86_64_GOT64 = 27, + GOT64 = 27, /// 64 bit PC relative offset to GOT entry - R_X86_64_GOTPCREL64 = 28, + GOTPCREL64 = 28, /// 64 bit PC relative offset to GOT - R_X86_64_GOTPC64 = 29, + GOTPC64 = 29, /// Like GOT64, says PLT entry needed - R_X86_64_GOTPLT64 = 30, + GOTPLT64 = 30, /// 64-bit GOT relative offset to PLT entry - R_X86_64_PLTOFF64 = 31, + PLTOFF64 = 31, /// Size of symbol plus 32-bit addend - R_X86_64_SIZE32 = 32, + SIZE32 = 32, /// Size of symbol plus 64-bit addend - R_X86_64_SIZE64 = 33, + SIZE64 = 33, /// GOT offset for TLS descriptor - R_X86_64_GOTPC32_TLSDESC = 34, + GOTPC32_TLSDESC = 34, /// Marker for call through TLS descriptor - R_X86_64_TLSDESC_CALL = 35, + TLSDESC_CALL = 35, /// TLS descriptor - R_X86_64_TLSDESC = 36, + TLSDESC = 36, /// Adjust indirectly by program base - R_X86_64_IRELATIVE = 37, + IRELATIVE = 37, /// 64-bit adjust by program base - R_X86_64_RELATIVE64 = 38, - /// 39 Reserved was R_X86_64_PC32_BND - /// 40 Reserved was R_X86_64_PLT32_BND + RELATIVE64 = 38, + /// 39 Reserved was PC32_BND + /// 40 Reserved was PLT32_BND /// Load from 32 bit signed pc relative offset to GOT entry without REX prefix, relaxable - R_X86_64_GOTPCRELX = 41, + GOTPCRELX = 41, /// Load from 32 bit signed PC relative offset to GOT entry with REX prefix, relaxable - R_X86_64_REX_GOTPCRELX = 42, + REX_GOTPCRELX = 42, _, }; /// AArch64 relocs. pub const R_AARCH64 = enum(u32) { /// No relocation. - R_AARCH64_NONE = 0, + NONE = 0, /// ILP32 AArch64 relocs. /// Direct 32 bit. - R_AARCH64_P32_ABS32 = 1, + P32_ABS32 = 1, /// Copy symbol at runtime. - R_AARCH64_P32_COPY = 180, + P32_COPY = 180, /// Create GOT entry. - R_AARCH64_P32_GLOB_DAT = 181, + P32_GLOB_DAT = 181, /// Create PLT entry. - R_AARCH64_P32_JUMP_SLOT = 182, + P32_JUMP_SLOT = 182, /// Adjust by program base. - R_AARCH64_P32_RELATIVE = 183, + P32_RELATIVE = 183, /// Module number, 32 bit. - R_AARCH64_P32_TLS_DTPMOD = 184, + P32_TLS_DTPMOD = 184, /// Module-relative offset, 32 bit. - R_AARCH64_P32_TLS_DTPREL = 185, + P32_TLS_DTPREL = 185, /// TP-relative offset, 32 bit. - R_AARCH64_P32_TLS_TPREL = 186, + P32_TLS_TPREL = 186, /// TLS Descriptor. - R_AARCH64_P32_TLSDESC = 187, + P32_TLSDESC = 187, /// STT_GNU_IFUNC relocation. - R_AARCH64_P32_IRELATIVE = 188, + P32_IRELATIVE = 188, /// LP64 AArch64 relocs. /// Direct 64 bit. - R_AARCH64_ABS64 = 257, + ABS64 = 257, /// Direct 32 bit. - R_AARCH64_ABS32 = 258, + ABS32 = 258, /// Direct 16-bit. - R_AARCH64_ABS16 = 259, + ABS16 = 259, /// PC-relative 64-bit. - R_AARCH64_PREL64 = 260, + PREL64 = 260, /// PC-relative 32-bit. - R_AARCH64_PREL32 = 261, + PREL32 = 261, /// PC-relative 16-bit. - R_AARCH64_PREL16 = 262, + PREL16 = 262, /// Dir. MOVZ imm. from bits 15:0. - R_AARCH64_MOVW_UABS_G0 = 263, + MOVW_UABS_G0 = 263, /// Likewise for MOVK; no check. - R_AARCH64_MOVW_UABS_G0_NC = 264, + MOVW_UABS_G0_NC = 264, /// Dir. MOVZ imm. from bits 31:16. - R_AARCH64_MOVW_UABS_G1 = 265, + MOVW_UABS_G1 = 265, /// Likewise for MOVK; no check. - R_AARCH64_MOVW_UABS_G1_NC = 266, + MOVW_UABS_G1_NC = 266, /// Dir. MOVZ imm. from bits 47:32. - R_AARCH64_MOVW_UABS_G2 = 267, + MOVW_UABS_G2 = 267, /// Likewise for MOVK; no check. - R_AARCH64_MOVW_UABS_G2_NC = 268, + MOVW_UABS_G2_NC = 268, /// Dir. MOV{K,Z} imm. from 63:48. - R_AARCH64_MOVW_UABS_G3 = 269, + MOVW_UABS_G3 = 269, /// Dir. MOV{N,Z} imm. from 15:0. - R_AARCH64_MOVW_SABS_G0 = 270, + MOVW_SABS_G0 = 270, /// Dir. MOV{N,Z} imm. from 31:16. - R_AARCH64_MOVW_SABS_G1 = 271, + MOVW_SABS_G1 = 271, /// Dir. MOV{N,Z} imm. from 47:32. - R_AARCH64_MOVW_SABS_G2 = 272, + MOVW_SABS_G2 = 272, /// PC-rel. LD imm. from bits 20:2. - R_AARCH64_LD_PREL_LO19 = 273, + LD_PREL_LO19 = 273, /// PC-rel. ADR imm. from bits 20:0. - R_AARCH64_ADR_PREL_LO21 = 274, + ADR_PREL_LO21 = 274, /// Page-rel. ADRP imm. from 32:12. - R_AARCH64_ADR_PREL_PG_HI21 = 275, + ADR_PREL_PG_HI21 = 275, /// Likewise; no overflow check. - R_AARCH64_ADR_PREL_PG_HI21_NC = 276, + ADR_PREL_PG_HI21_NC = 276, /// Dir. ADD imm. from bits 11:0. - R_AARCH64_ADD_ABS_LO12_NC = 277, + ADD_ABS_LO12_NC = 277, /// Likewise for LD/ST; no check. - R_AARCH64_LDST8_ABS_LO12_NC = 278, + LDST8_ABS_LO12_NC = 278, /// PC-rel. TBZ/TBNZ imm. from 15:2. - R_AARCH64_TSTBR14 = 279, + TSTBR14 = 279, /// PC-rel. cond. br. imm. from 20:2. - R_AARCH64_CONDBR19 = 280, + CONDBR19 = 280, /// PC-rel. B imm. from bits 27:2. - R_AARCH64_JUMP26 = 282, + JUMP26 = 282, /// Likewise for CALL. - R_AARCH64_CALL26 = 283, + CALL26 = 283, /// Dir. ADD imm. from bits 11:1. - R_AARCH64_LDST16_ABS_LO12_NC = 284, + LDST16_ABS_LO12_NC = 284, /// Likewise for bits 11:2. - R_AARCH64_LDST32_ABS_LO12_NC = 285, + LDST32_ABS_LO12_NC = 285, /// Likewise for bits 11:3. - R_AARCH64_LDST64_ABS_LO12_NC = 286, + LDST64_ABS_LO12_NC = 286, /// PC-rel. MOV{N,Z} imm. from 15:0. - R_AARCH64_MOVW_PREL_G0 = 287, + MOVW_PREL_G0 = 287, /// Likewise for MOVK; no check. - R_AARCH64_MOVW_PREL_G0_NC = 288, + MOVW_PREL_G0_NC = 288, /// PC-rel. MOV{N,Z} imm. from 31:16. - R_AARCH64_MOVW_PREL_G1 = 289, + MOVW_PREL_G1 = 289, /// Likewise for MOVK; no check. - R_AARCH64_MOVW_PREL_G1_NC = 290, + MOVW_PREL_G1_NC = 290, /// PC-rel. MOV{N,Z} imm. from 47:32. - R_AARCH64_MOVW_PREL_G2 = 291, + MOVW_PREL_G2 = 291, /// Likewise for MOVK; no check. - R_AARCH64_MOVW_PREL_G2_NC = 292, + MOVW_PREL_G2_NC = 292, /// PC-rel. MOV{N,Z} imm. from 63:48. - R_AARCH64_MOVW_PREL_G3 = 293, + MOVW_PREL_G3 = 293, /// Dir. ADD imm. from bits 11:4. - R_AARCH64_LDST128_ABS_LO12_NC = 299, + LDST128_ABS_LO12_NC = 299, /// GOT-rel. off. MOV{N,Z} imm. 15:0. - R_AARCH64_MOVW_GOTOFF_G0 = 300, + MOVW_GOTOFF_G0 = 300, /// Likewise for MOVK; no check. - R_AARCH64_MOVW_GOTOFF_G0_NC = 301, + MOVW_GOTOFF_G0_NC = 301, /// GOT-rel. o. MOV{N,Z} imm. 31:16. - R_AARCH64_MOVW_GOTOFF_G1 = 302, + MOVW_GOTOFF_G1 = 302, /// Likewise for MOVK; no check. - R_AARCH64_MOVW_GOTOFF_G1_NC = 303, + MOVW_GOTOFF_G1_NC = 303, /// GOT-rel. o. MOV{N,Z} imm. 47:32. - R_AARCH64_MOVW_GOTOFF_G2 = 304, + MOVW_GOTOFF_G2 = 304, /// Likewise for MOVK; no check. - R_AARCH64_MOVW_GOTOFF_G2_NC = 305, + MOVW_GOTOFF_G2_NC = 305, /// GOT-rel. o. MOV{N,Z} imm. 63:48. - R_AARCH64_MOVW_GOTOFF_G3 = 306, + MOVW_GOTOFF_G3 = 306, /// GOT-relative 64-bit. - R_AARCH64_GOTREL64 = 307, + GOTREL64 = 307, /// GOT-relative 32-bit. - R_AARCH64_GOTREL32 = 308, + GOTREL32 = 308, /// PC-rel. GOT off. load imm. 20:2. - R_AARCH64_GOT_LD_PREL19 = 309, + GOT_LD_PREL19 = 309, /// GOT-rel. off. LD/ST imm. 14:3. - R_AARCH64_LD64_GOTOFF_LO15 = 310, + LD64_GOTOFF_LO15 = 310, /// P-page-rel. GOT off. ADRP 32:12. - R_AARCH64_ADR_GOT_PAGE = 311, + ADR_GOT_PAGE = 311, /// Dir. GOT off. LD/ST imm. 11:3. - R_AARCH64_LD64_GOT_LO12_NC = 312, + LD64_GOT_LO12_NC = 312, /// GOT-page-rel. GOT off. LD/ST 14:3 - R_AARCH64_LD64_GOTPAGE_LO15 = 313, + LD64_GOTPAGE_LO15 = 313, /// PC-relative ADR imm. 20:0. - R_AARCH64_TLSGD_ADR_PREL21 = 512, + TLSGD_ADR_PREL21 = 512, /// page-rel. ADRP imm. 32:12. - R_AARCH64_TLSGD_ADR_PAGE21 = 513, + TLSGD_ADR_PAGE21 = 513, /// direct ADD imm. from 11:0. - R_AARCH64_TLSGD_ADD_LO12_NC = 514, + TLSGD_ADD_LO12_NC = 514, /// GOT-rel. MOV{N,Z} 31:16. - R_AARCH64_TLSGD_MOVW_G1 = 515, + TLSGD_MOVW_G1 = 515, /// GOT-rel. MOVK imm. 15:0. - R_AARCH64_TLSGD_MOVW_G0_NC = 516, + TLSGD_MOVW_G0_NC = 516, /// Like 512; local dynamic model. - R_AARCH64_TLSLD_ADR_PREL21 = 517, + TLSLD_ADR_PREL21 = 517, /// Like 513; local dynamic model. - R_AARCH64_TLSLD_ADR_PAGE21 = 518, + TLSLD_ADR_PAGE21 = 518, /// Like 514; local dynamic model. - R_AARCH64_TLSLD_ADD_LO12_NC = 519, + TLSLD_ADD_LO12_NC = 519, /// Like 515; local dynamic model. - R_AARCH64_TLSLD_MOVW_G1 = 520, + TLSLD_MOVW_G1 = 520, /// Like 516; local dynamic model. - R_AARCH64_TLSLD_MOVW_G0_NC = 521, + TLSLD_MOVW_G0_NC = 521, /// TLS PC-rel. load imm. 20:2. - R_AARCH64_TLSLD_LD_PREL19 = 522, + TLSLD_LD_PREL19 = 522, /// TLS DTP-rel. MOV{N,Z} 47:32. - R_AARCH64_TLSLD_MOVW_DTPREL_G2 = 523, + TLSLD_MOVW_DTPREL_G2 = 523, /// TLS DTP-rel. MOV{N,Z} 31:16. - R_AARCH64_TLSLD_MOVW_DTPREL_G1 = 524, + TLSLD_MOVW_DTPREL_G1 = 524, /// Likewise; MOVK; no check. - R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC = 525, + TLSLD_MOVW_DTPREL_G1_NC = 525, /// TLS DTP-rel. MOV{N,Z} 15:0. - R_AARCH64_TLSLD_MOVW_DTPREL_G0 = 526, + TLSLD_MOVW_DTPREL_G0 = 526, /// Likewise; MOVK; no check. - R_AARCH64_TLSLD_MOVW_DTPREL_G0_NC = 527, + TLSLD_MOVW_DTPREL_G0_NC = 527, /// DTP-rel. ADD imm. from 23:12. - R_AARCH64_TLSLD_ADD_DTPREL_HI12 = 528, + TLSLD_ADD_DTPREL_HI12 = 528, /// DTP-rel. ADD imm. from 11:0. - R_AARCH64_TLSLD_ADD_DTPREL_LO12 = 529, + TLSLD_ADD_DTPREL_LO12 = 529, /// Likewise; no ovfl. check. - R_AARCH64_TLSLD_ADD_DTPREL_LO12_NC = 530, + TLSLD_ADD_DTPREL_LO12_NC = 530, /// DTP-rel. LD/ST imm. 11:0. - R_AARCH64_TLSLD_LDST8_DTPREL_LO12 = 531, + TLSLD_LDST8_DTPREL_LO12 = 531, /// Likewise; no check. - R_AARCH64_TLSLD_LDST8_DTPREL_LO12_NC = 532, + TLSLD_LDST8_DTPREL_LO12_NC = 532, /// DTP-rel. LD/ST imm. 11:1. - R_AARCH64_TLSLD_LDST16_DTPREL_LO12 = 533, + TLSLD_LDST16_DTPREL_LO12 = 533, /// Likewise; no check. - R_AARCH64_TLSLD_LDST16_DTPREL_LO12_NC = 534, + TLSLD_LDST16_DTPREL_LO12_NC = 534, /// DTP-rel. LD/ST imm. 11:2. - R_AARCH64_TLSLD_LDST32_DTPREL_LO12 = 535, + TLSLD_LDST32_DTPREL_LO12 = 535, /// Likewise; no check. - R_AARCH64_TLSLD_LDST32_DTPREL_LO12_NC = 536, + TLSLD_LDST32_DTPREL_LO12_NC = 536, /// DTP-rel. LD/ST imm. 11:3. - R_AARCH64_TLSLD_LDST64_DTPREL_LO12 = 537, + TLSLD_LDST64_DTPREL_LO12 = 537, /// Likewise; no check. - R_AARCH64_TLSLD_LDST64_DTPREL_LO12_NC = 538, + TLSLD_LDST64_DTPREL_LO12_NC = 538, /// GOT-rel. MOV{N,Z} 31:16. - R_AARCH64_TLSIE_MOVW_GOTTPREL_G1 = 539, + TLSIE_MOVW_GOTTPREL_G1 = 539, /// GOT-rel. MOVK 15:0. - R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC = 540, + TLSIE_MOVW_GOTTPREL_G0_NC = 540, /// Page-rel. ADRP 32:12. - R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21 = 541, + TLSIE_ADR_GOTTPREL_PAGE21 = 541, /// Direct LD off. 11:3. - R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC = 542, + TLSIE_LD64_GOTTPREL_LO12_NC = 542, /// PC-rel. load imm. 20:2. - R_AARCH64_TLSIE_LD_GOTTPREL_PREL19 = 543, + TLSIE_LD_GOTTPREL_PREL19 = 543, /// TLS TP-rel. MOV{N,Z} 47:32. - R_AARCH64_TLSLE_MOVW_TPREL_G2 = 544, + TLSLE_MOVW_TPREL_G2 = 544, /// TLS TP-rel. MOV{N,Z} 31:16. - R_AARCH64_TLSLE_MOVW_TPREL_G1 = 545, + TLSLE_MOVW_TPREL_G1 = 545, /// Likewise; MOVK; no check. - R_AARCH64_TLSLE_MOVW_TPREL_G1_NC = 546, + TLSLE_MOVW_TPREL_G1_NC = 546, /// TLS TP-rel. MOV{N,Z} 15:0. - R_AARCH64_TLSLE_MOVW_TPREL_G0 = 547, + TLSLE_MOVW_TPREL_G0 = 547, /// Likewise; MOVK; no check. - R_AARCH64_TLSLE_MOVW_TPREL_G0_NC = 548, + TLSLE_MOVW_TPREL_G0_NC = 548, /// TP-rel. ADD imm. 23:12. - R_AARCH64_TLSLE_ADD_TPREL_HI12 = 549, + TLSLE_ADD_TPREL_HI12 = 549, /// TP-rel. ADD imm. 11:0. - R_AARCH64_TLSLE_ADD_TPREL_LO12 = 550, + TLSLE_ADD_TPREL_LO12 = 550, /// Likewise; no ovfl. check. - R_AARCH64_TLSLE_ADD_TPREL_LO12_NC = 551, + TLSLE_ADD_TPREL_LO12_NC = 551, /// TP-rel. LD/ST off. 11:0. - R_AARCH64_TLSLE_LDST8_TPREL_LO12 = 552, + TLSLE_LDST8_TPREL_LO12 = 552, /// Likewise; no ovfl. check. - R_AARCH64_TLSLE_LDST8_TPREL_LO12_NC = 553, + TLSLE_LDST8_TPREL_LO12_NC = 553, /// TP-rel. LD/ST off. 11:1. - R_AARCH64_TLSLE_LDST16_TPREL_LO12 = 554, + TLSLE_LDST16_TPREL_LO12 = 554, /// Likewise; no check. - R_AARCH64_TLSLE_LDST16_TPREL_LO12_NC = 555, + TLSLE_LDST16_TPREL_LO12_NC = 555, /// TP-rel. LD/ST off. 11:2. - R_AARCH64_TLSLE_LDST32_TPREL_LO12 = 556, + TLSLE_LDST32_TPREL_LO12 = 556, /// Likewise; no check. - R_AARCH64_TLSLE_LDST32_TPREL_LO12_NC = 557, + TLSLE_LDST32_TPREL_LO12_NC = 557, /// TP-rel. LD/ST off. 11:3. - R_AARCH64_TLSLE_LDST64_TPREL_LO12 = 558, + TLSLE_LDST64_TPREL_LO12 = 558, /// Likewise; no check. - R_AARCH64_TLSLE_LDST64_TPREL_LO12_NC = 559, + TLSLE_LDST64_TPREL_LO12_NC = 559, /// PC-rel. load immediate 20:2. - R_AARCH64_TLSDESC_LD_PREL19 = 560, + TLSDESC_LD_PREL19 = 560, /// PC-rel. ADR immediate 20:0. - R_AARCH64_TLSDESC_ADR_PREL21 = 561, + TLSDESC_ADR_PREL21 = 561, /// Page-rel. ADRP imm. 32:12. - R_AARCH64_TLSDESC_ADR_PAGE21 = 562, + TLSDESC_ADR_PAGE21 = 562, /// Direct LD off. from 11:3. - R_AARCH64_TLSDESC_LD64_LO12 = 563, + TLSDESC_LD64_LO12 = 563, /// Direct ADD imm. from 11:0. - R_AARCH64_TLSDESC_ADD_LO12 = 564, + TLSDESC_ADD_LO12 = 564, /// GOT-rel. MOV{N,Z} imm. 31:16. - R_AARCH64_TLSDESC_OFF_G1 = 565, + TLSDESC_OFF_G1 = 565, /// GOT-rel. MOVK imm. 15:0; no ck. - R_AARCH64_TLSDESC_OFF_G0_NC = 566, + TLSDESC_OFF_G0_NC = 566, /// Relax LDR. - R_AARCH64_TLSDESC_LDR = 567, + TLSDESC_LDR = 567, /// Relax ADD. - R_AARCH64_TLSDESC_ADD = 568, + TLSDESC_ADD = 568, /// Relax BLR. - R_AARCH64_TLSDESC_CALL = 569, + TLSDESC_CALL = 569, /// TP-rel. LD/ST off. 11:4. - R_AARCH64_TLSLE_LDST128_TPREL_LO12 = 570, + TLSLE_LDST128_TPREL_LO12 = 570, /// Likewise; no check. - R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC = 571, + TLSLE_LDST128_TPREL_LO12_NC = 571, /// DTP-rel. LD/ST imm. 11:4. - R_AARCH64_TLSLD_LDST128_DTPREL_LO12 = 572, + TLSLD_LDST128_DTPREL_LO12 = 572, /// Likewise; no check. - R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC = 573, + TLSLD_LDST128_DTPREL_LO12_NC = 573, /// Copy symbol at runtime. - R_AARCH64_COPY = 1024, + COPY = 1024, /// Create GOT entry. - R_AARCH64_GLOB_DAT = 1025, + GLOB_DAT = 1025, /// Create PLT entry. - R_AARCH64_JUMP_SLOT = 1026, + JUMP_SLOT = 1026, /// Adjust by program base. - R_AARCH64_RELATIVE = 1027, + RELATIVE = 1027, /// Module number, 64 bit. - R_AARCH64_TLS_DTPMOD = 1028, + TLS_DTPMOD = 1028, /// Module-relative offset, 64 bit. - R_AARCH64_TLS_DTPREL = 1029, + TLS_DTPREL = 1029, /// TP-relative offset, 64 bit. - R_AARCH64_TLS_TPREL = 1030, + TLS_TPREL = 1030, /// TLS Descriptor. - R_AARCH64_TLSDESC = 1031, + TLSDESC = 1031, /// STT_GNU_IFUNC relocation. - R_AARCH64_IRELATIVE = 1032, + IRELATIVE = 1032, _, }; /// RISC-V relocations. pub const R_RISCV = enum(u32) { - R_RISCV_NONE = 0, - R_RISCV_32 = 1, - R_RISCV_64 = 2, - R_RISCV_RELATIVE = 3, - R_RISCV_COPY = 4, - R_RISCV_JUMP_SLOT = 5, - R_RISCV_TLS_DTPMOD32 = 6, - R_RISCV_TLS_DTPMOD64 = 7, - R_RISCV_TLS_DTPREL32 = 8, - R_RISCV_TLS_DTPREL64 = 9, - R_RISCV_TLS_TPREL32 = 10, - R_RISCV_TLS_TPREL64 = 11, - R_RISCV_TLSDESC = 12, - R_RISCV_BRANCH = 16, - R_RISCV_JAL = 17, - R_RISCV_CALL = 18, - R_RISCV_CALL_PLT = 19, - R_RISCV_GOT_HI20 = 20, - R_RISCV_TLS_GOT_HI20 = 21, - R_RISCV_TLS_GD_HI20 = 22, - R_RISCV_PCREL_HI20 = 23, - R_RISCV_PCREL_LO12_I = 24, - R_RISCV_PCREL_LO12_S = 25, - R_RISCV_HI20 = 26, - R_RISCV_LO12_I = 27, - R_RISCV_LO12_S = 28, - R_RISCV_TPREL_HI20 = 29, - R_RISCV_TPREL_LO12_I = 30, - R_RISCV_TPREL_LO12_S = 31, - R_RISCV_TPREL_ADD = 32, - R_RISCV_ADD8 = 33, - R_RISCV_ADD16 = 34, - R_RISCV_ADD32 = 35, - R_RISCV_ADD64 = 36, - R_RISCV_SUB8 = 37, - R_RISCV_SUB16 = 38, - R_RISCV_SUB32 = 39, - R_RISCV_SUB64 = 40, - R_RISCV_GNU_VTINHERIT = 41, - R_RISCV_GNU_VTENTRY = 42, - R_RISCV_ALIGN = 43, - R_RISCV_RVC_BRANCH = 44, - R_RISCV_RVC_JUMP = 45, - R_RISCV_RVC_LUI = 46, - R_RISCV_GPREL_I = 47, - R_RISCV_GPREL_S = 48, - R_RISCV_TPREL_I = 49, - R_RISCV_TPREL_S = 50, - R_RISCV_RELAX = 51, - R_RISCV_SUB6 = 52, - R_RISCV_SET6 = 53, - R_RISCV_SET8 = 54, - R_RISCV_SET16 = 55, - R_RISCV_SET32 = 56, - R_RISCV_32_PCREL = 57, - R_RISCV_IRELATIVE = 58, - R_RISCV_PLT32 = 59, - R_RISCV_SET_ULEB128 = 60, - R_RISCV_SUB_ULEB128 = 61, + NONE = 0, + @"32" = 1, + @"64" = 2, + RELATIVE = 3, + COPY = 4, + JUMP_SLOT = 5, + TLS_DTPMOD32 = 6, + TLS_DTPMOD64 = 7, + TLS_DTPREL32 = 8, + TLS_DTPREL64 = 9, + TLS_TPREL32 = 10, + TLS_TPREL64 = 11, + TLSDESC = 12, + BRANCH = 16, + JAL = 17, + CALL = 18, + CALL_PLT = 19, + GOT_HI20 = 20, + TLS_GOT_HI20 = 21, + TLS_GD_HI20 = 22, + PCREL_HI20 = 23, + PCREL_LO12_I = 24, + PCREL_LO12_S = 25, + HI20 = 26, + LO12_I = 27, + LO12_S = 28, + TPREL_HI20 = 29, + TPREL_LO12_I = 30, + TPREL_LO12_S = 31, + TPREL_ADD = 32, + ADD8 = 33, + ADD16 = 34, + ADD32 = 35, + ADD64 = 36, + SUB8 = 37, + SUB16 = 38, + SUB32 = 39, + SUB64 = 40, + GNU_VTINHERIT = 41, + GNU_VTENTRY = 42, + ALIGN = 43, + RVC_BRANCH = 44, + RVC_JUMP = 45, + RVC_LUI = 46, + GPREL_I = 47, + GPREL_S = 48, + TPREL_I = 49, + TPREL_S = 50, + RELAX = 51, + SUB6 = 52, + SET6 = 53, + SET8 = 54, + SET16 = 55, + SET32 = 56, + @"32_PCREL" = 57, + IRELATIVE = 58, + PLT32 = 59, + SET_ULEB128 = 60, + SUB_ULEB128 = 61, _, }; diff --git a/src/arch/x86_64/Emit.zig b/src/arch/x86_64/Emit.zig index e341372f11..0975104db3 100644 --- a/src/arch/x86_64/Emit.zig +++ b/src/arch/x86_64/Emit.zig @@ -43,7 +43,7 @@ pub fn emitMir(emit: *Emit) Error!void { .linker_extern_fn => |symbol| if (emit.lower.bin_file.cast(link.File.Elf)) |elf_file| { // Add relocation to the decl. const atom_ptr = elf_file.symbol(symbol.atom_index).atom(elf_file).?; - const r_type = @intFromEnum(std.elf.R_X86_64.R_X86_64_PLT32); + const r_type = @intFromEnum(std.elf.R_X86_64.PLT32); try atom_ptr.addReloc(elf_file, .{ .r_offset = end_offset - 4, .r_info = (@as(u64, @intCast(symbol.sym_index)) << 32) | r_type, @@ -89,7 +89,7 @@ pub fn emitMir(emit: *Emit) Error!void { .linker_tlsld => |data| { const elf_file = emit.lower.bin_file.cast(link.File.Elf).?; const atom = elf_file.symbol(data.atom_index).atom(elf_file).?; - const r_type = @intFromEnum(std.elf.R_X86_64.R_X86_64_TLSLD); + const r_type = @intFromEnum(std.elf.R_X86_64.TLSLD); try atom.addReloc(elf_file, .{ .r_offset = end_offset - 4, .r_info = (@as(u64, @intCast(data.sym_index)) << 32) | r_type, @@ -99,7 +99,7 @@ pub fn emitMir(emit: *Emit) Error!void { .linker_dtpoff => |data| { const elf_file = emit.lower.bin_file.cast(link.File.Elf).?; const atom = elf_file.symbol(data.atom_index).atom(elf_file).?; - const r_type = @intFromEnum(std.elf.R_X86_64.R_X86_64_DTPOFF32); + const r_type = @intFromEnum(std.elf.R_X86_64.DTPOFF32); try atom.addReloc(elf_file, .{ .r_offset = end_offset - 4, .r_info = (@as(u64, @intCast(data.sym_index)) << 32) | r_type, @@ -122,9 +122,9 @@ pub fn emitMir(emit: *Emit) Error!void { const r_type: u32 = if (sym.flags.needs_zig_got and !is_obj_or_static_lib) link.File.Elf.R_ZIG_GOTPCREL else if (sym.flags.needs_got) - @intFromEnum(std.elf.R_X86_64.R_X86_64_GOTPCREL) + @intFromEnum(std.elf.R_X86_64.GOTPCREL) else - @intFromEnum(std.elf.R_X86_64.R_X86_64_PC32); + @intFromEnum(std.elf.R_X86_64.PC32); try atom.addReloc(elf_file, .{ .r_offset = end_offset - 4, .r_info = (@as(u64, @intCast(data.sym_index)) << 32) | r_type, @@ -132,7 +132,7 @@ pub fn emitMir(emit: *Emit) Error!void { }); } else { if (lowered_inst.encoding.mnemonic == .call and sym.flags.needs_zig_got and is_obj_or_static_lib) { - const r_type = @intFromEnum(std.elf.R_X86_64.R_X86_64_PC32); + const r_type = @intFromEnum(std.elf.R_X86_64.PC32); try atom.addReloc(elf_file, .{ .r_offset = end_offset - 4, .r_info = (@as(u64, @intCast(data.sym_index)) << 32) | r_type, @@ -142,11 +142,11 @@ pub fn emitMir(emit: *Emit) Error!void { const r_type: u32 = if (sym.flags.needs_zig_got and !is_obj_or_static_lib) link.File.Elf.R_ZIG_GOT32 else if (sym.flags.needs_got) - @intFromEnum(std.elf.R_X86_64.R_X86_64_GOT32) + @intFromEnum(std.elf.R_X86_64.GOT32) else if (sym.flags.is_tls) - @intFromEnum(std.elf.R_X86_64.R_X86_64_TPOFF32) + @intFromEnum(std.elf.R_X86_64.TPOFF32) else - @intFromEnum(std.elf.R_X86_64.R_X86_64_32); + @intFromEnum(std.elf.R_X86_64.@"32"); try atom.addReloc(elf_file, .{ .r_offset = end_offset - 4, .r_info = (@as(u64, @intCast(data.sym_index)) << 32) | r_type, diff --git a/src/link/Elf/Atom.zig b/src/link/Elf/Atom.zig index 558bd06d98..3064fd6f74 100644 --- a/src/link/Elf/Atom.zig +++ b/src/link/Elf/Atom.zig @@ -386,7 +386,7 @@ pub fn scanRelocsRequiresCode(self: Atom, elf_file: *Elf) bool { switch (cpu_arch) { .x86_64 => { const r_type: elf.R_X86_64 = @enumFromInt(rel.r_type()); - if (r_type == .R_X86_64_GOTTPOFF) return true; + if (r_type == .GOTTPOFF) return true; }, else => {}, } @@ -841,7 +841,7 @@ const x86_64 = struct { const rel = rels[i]; const r_type: elf.R_X86_64 = @enumFromInt(rel.r_type()); - if (r_type == .R_X86_64_NONE) continue; + if (r_type == .NONE) continue; const r_offset = std.math.cast(usize, rel.r_offset) orelse return error.Overflow; @@ -874,40 +874,40 @@ const x86_64 = struct { // While traversing relocations, mark symbols that require special handling such as // pointer indirection via GOT, or a stub trampoline via PLT. switch (r_type) { - .R_X86_64_64 => { + .@"64" => { try atom.scanReloc(symbol, rel, dynAbsRelocAction(symbol, elf_file), elf_file); }, - .R_X86_64_32, - .R_X86_64_32S, + .@"32", + .@"32S", => { try atom.scanReloc(symbol, rel, dynAbsRelocAction(symbol, elf_file), elf_file); }, - .R_X86_64_GOT32, - .R_X86_64_GOTPC32, - .R_X86_64_GOTPC64, - .R_X86_64_GOTPCREL, - .R_X86_64_GOTPCREL64, - .R_X86_64_GOTPCRELX, - .R_X86_64_REX_GOTPCRELX, + .GOT32, + .GOTPC32, + .GOTPC64, + .GOTPCREL, + .GOTPCREL64, + .GOTPCRELX, + .REX_GOTPCRELX, => { symbol.flags.needs_got = true; }, - .R_X86_64_PLT32, - .R_X86_64_PLTOFF64, + .PLT32, + .PLTOFF64, => { if (symbol.flags.import) { symbol.flags.needs_plt = true; } }, - .R_X86_64_PC32 => { + .PC32 => { try atom.scanReloc(symbol, rel, pcRelocAction(symbol, elf_file), elf_file); }, - .R_X86_64_TLSGD => { + .TLSGD => { // TODO verify followed by appropriate relocation such as PLT32 __tls_get_addr if (is_static or (!symbol.flags.import and !is_dyn_lib)) { @@ -922,7 +922,7 @@ const x86_64 = struct { } }, - .R_X86_64_TLSLD => { + .TLSLD => { // TODO verify followed by appropriate relocation such as PLT32 __tls_get_addr if (is_static or !is_dyn_lib) { @@ -934,7 +934,7 @@ const x86_64 = struct { } }, - .R_X86_64_GOTTPOFF => { + .GOTTPOFF => { const should_relax = blk: { if (is_dyn_lib or symbol.flags.import) break :blk false; if (!x86_64.canRelaxGotTpOff(code.?[r_offset - 3 ..])) break :blk false; @@ -945,25 +945,25 @@ const x86_64 = struct { } }, - .R_X86_64_GOTPC32_TLSDESC => { + .GOTPC32_TLSDESC => { const should_relax = is_static or (!is_dyn_lib and !symbol.flags.import); if (!should_relax) { symbol.flags.needs_tlsdesc = true; } }, - .R_X86_64_TPOFF32, - .R_X86_64_TPOFF64, + .TPOFF32, + .TPOFF64, => { if (is_dyn_lib) try atom.reportPicError(symbol, rel, elf_file); }, - .R_X86_64_GOTOFF64, - .R_X86_64_DTPOFF32, - .R_X86_64_DTPOFF64, - .R_X86_64_SIZE32, - .R_X86_64_SIZE64, - .R_X86_64_TLSDESC_CALL, + .GOTOFF64, + .DTPOFF32, + .DTPOFF64, + .SIZE32, + .SIZE64, + .TLSDESC_CALL, => {}, else => |x| switch (@intFromEnum(x)) { @@ -990,7 +990,7 @@ const x86_64 = struct { while (i < rels.len) : (i += 1) { const rel = rels[i]; const r_type: elf.R_X86_64 = @enumFromInt(rel.r_type()); - if (r_type == .R_X86_64_NONE) continue; + if (r_type == .NONE) continue; const target = switch (file_ptr) { .zig_object => |x| elf_file.symbol(x.symbol(rel.r_sym())), @@ -1040,9 +1040,9 @@ const x86_64 = struct { try stream.seekTo(r_offset); switch (r_type) { - .R_X86_64_NONE => unreachable, + .NONE => unreachable, - .R_X86_64_64 => { + .@"64" => { try atom.resolveDynAbsReloc( target, rel, @@ -1052,15 +1052,15 @@ const x86_64 = struct { ); }, - .R_X86_64_PLT32, - .R_X86_64_PC32, + .PLT32, + .PC32, => try cwriter.writeInt(i32, @as(i32, @intCast(S + A - P)), .little), - .R_X86_64_GOTPCREL => try cwriter.writeInt(i32, @as(i32, @intCast(G + GOT + A - P)), .little), - .R_X86_64_GOTPC32 => try cwriter.writeInt(i32, @as(i32, @intCast(GOT + A - P)), .little), - .R_X86_64_GOTPC64 => try cwriter.writeInt(i64, GOT + A - P, .little), + .GOTPCREL => try cwriter.writeInt(i32, @as(i32, @intCast(G + GOT + A - P)), .little), + .GOTPC32 => try cwriter.writeInt(i32, @as(i32, @intCast(GOT + A - P)), .little), + .GOTPC64 => try cwriter.writeInt(i64, GOT + A - P, .little), - .R_X86_64_GOTPCRELX => { + .GOTPCRELX => { if (!target.flags.import and !target.isIFunc(elf_file) and !target.isAbs(elf_file)) blk: { x86_64.relaxGotpcrelx(code[r_offset - 2 ..]) catch break :blk; try cwriter.writeInt(i32, @as(i32, @intCast(S + A - P)), .little); @@ -1069,7 +1069,7 @@ const x86_64 = struct { try cwriter.writeInt(i32, @as(i32, @intCast(G + GOT + A - P)), .little); }, - .R_X86_64_REX_GOTPCRELX => { + .REX_GOTPCRELX => { if (!target.flags.import and !target.isIFunc(elf_file) and !target.isAbs(elf_file)) blk: { x86_64.relaxRexGotpcrelx(code[r_offset - 3 ..]) catch break :blk; try cwriter.writeInt(i32, @as(i32, @intCast(S + A - P)), .little); @@ -1078,16 +1078,16 @@ const x86_64 = struct { try cwriter.writeInt(i32, @as(i32, @intCast(G + GOT + A - P)), .little); }, - .R_X86_64_32 => try cwriter.writeInt(u32, @as(u32, @truncate(@as(u64, @intCast(S + A)))), .little), - .R_X86_64_32S => try cwriter.writeInt(i32, @as(i32, @truncate(S + A)), .little), + .@"32" => try cwriter.writeInt(u32, @as(u32, @truncate(@as(u64, @intCast(S + A)))), .little), + .@"32S" => try cwriter.writeInt(i32, @as(i32, @truncate(S + A)), .little), - .R_X86_64_TPOFF32 => try cwriter.writeInt(i32, @as(i32, @truncate(S + A - TP)), .little), - .R_X86_64_TPOFF64 => try cwriter.writeInt(i64, S + A - TP, .little), + .TPOFF32 => try cwriter.writeInt(i32, @as(i32, @truncate(S + A - TP)), .little), + .TPOFF64 => try cwriter.writeInt(i64, S + A - TP, .little), - .R_X86_64_DTPOFF32 => try cwriter.writeInt(i32, @as(i32, @truncate(S + A - DTP)), .little), - .R_X86_64_DTPOFF64 => try cwriter.writeInt(i64, S + A - DTP, .little), + .DTPOFF32 => try cwriter.writeInt(i32, @as(i32, @truncate(S + A - DTP)), .little), + .DTPOFF64 => try cwriter.writeInt(i64, S + A - DTP, .little), - .R_X86_64_TLSGD => { + .TLSGD => { if (target.flags.has_tlsgd) { const S_ = @as(i64, @intCast(target.tlsGdAddress(elf_file))); try cwriter.writeInt(i32, @as(i32, @intCast(S_ + A - P)), .little); @@ -1107,7 +1107,7 @@ const x86_64 = struct { } }, - .R_X86_64_TLSLD => { + .TLSLD => { if (elf_file.got.tlsld_index) |entry_index| { const tlsld_entry = elf_file.got.entries.items[entry_index]; const S_ = @as(i64, @intCast(tlsld_entry.address(elf_file))); @@ -1124,7 +1124,7 @@ const x86_64 = struct { } }, - .R_X86_64_GOTPC32_TLSDESC => { + .GOTPC32_TLSDESC => { if (target.flags.has_tlsdesc) { const S_ = @as(i64, @intCast(target.tlsDescAddress(elf_file))); try cwriter.writeInt(i32, @as(i32, @intCast(S_ + A - P)), .little); @@ -1134,12 +1134,12 @@ const x86_64 = struct { } }, - .R_X86_64_TLSDESC_CALL => if (!target.flags.has_tlsdesc) { + .TLSDESC_CALL => if (!target.flags.has_tlsdesc) { // call -> nop try cwriter.writeAll(&.{ 0x66, 0x90 }); }, - .R_X86_64_GOTTPOFF => { + .GOTTPOFF => { if (target.flags.has_gottp) { const S_ = @as(i64, @intCast(target.gotTpAddress(elf_file))); try cwriter.writeInt(i32, @as(i32, @intCast(S_ + A - P)), .little); @@ -1149,7 +1149,7 @@ const x86_64 = struct { } }, - .R_X86_64_GOT32 => try cwriter.writeInt(i32, @as(i32, @intCast(G + GOT + A)), .little), + .GOT32 => try cwriter.writeInt(i32, @as(i32, @intCast(G + GOT + A)), .little), else => |x| switch (@intFromEnum(x)) { // Zig custom relocations @@ -1172,7 +1172,7 @@ const x86_64 = struct { while (i < rels.len) : (i += 1) { const rel = rels[i]; const r_type: elf.R_X86_64 = @enumFromInt(rel.r_type()); - if (r_type == .R_X86_64_NONE) continue; + if (r_type == .NONE) continue; const r_offset = std.math.cast(usize, rel.r_offset) orelse return error.Overflow; @@ -1229,21 +1229,21 @@ const x86_64 = struct { try stream.seekTo(r_offset); switch (r_type) { - .R_X86_64_NONE => unreachable, - .R_X86_64_8 => try cwriter.writeInt(u8, @as(u8, @bitCast(@as(i8, @intCast(S + A)))), .little), - .R_X86_64_16 => try cwriter.writeInt(u16, @as(u16, @bitCast(@as(i16, @intCast(S + A)))), .little), - .R_X86_64_32 => try cwriter.writeInt(u32, @as(u32, @bitCast(@as(i32, @intCast(S + A)))), .little), - .R_X86_64_32S => try cwriter.writeInt(i32, @as(i32, @intCast(S + A)), .little), - .R_X86_64_64 => try cwriter.writeInt(i64, S + A, .little), - .R_X86_64_DTPOFF32 => try cwriter.writeInt(i32, @as(i32, @intCast(S + A - DTP)), .little), - .R_X86_64_DTPOFF64 => try cwriter.writeInt(i64, S + A - DTP, .little), - .R_X86_64_GOTOFF64 => try cwriter.writeInt(i64, S + A - GOT, .little), - .R_X86_64_GOTPC64 => try cwriter.writeInt(i64, GOT + A, .little), - .R_X86_64_SIZE32 => { + .NONE => unreachable, + .@"8" => try cwriter.writeInt(u8, @as(u8, @bitCast(@as(i8, @intCast(S + A)))), .little), + .@"16" => try cwriter.writeInt(u16, @as(u16, @bitCast(@as(i16, @intCast(S + A)))), .little), + .@"32" => try cwriter.writeInt(u32, @as(u32, @bitCast(@as(i32, @intCast(S + A)))), .little), + .@"32S" => try cwriter.writeInt(i32, @as(i32, @intCast(S + A)), .little), + .@"64" => try cwriter.writeInt(i64, S + A, .little), + .DTPOFF32 => try cwriter.writeInt(i32, @as(i32, @intCast(S + A - DTP)), .little), + .DTPOFF64 => try cwriter.writeInt(i64, S + A - DTP, .little), + .GOTOFF64 => try cwriter.writeInt(i64, S + A - GOT, .little), + .GOTPC64 => try cwriter.writeInt(i64, GOT + A, .little), + .SIZE32 => { const size = @as(i64, @intCast(target.elfSym(elf_file).st_size)); try cwriter.writeInt(u32, @as(u32, @bitCast(@as(i32, @intCast(size + A)))), .little); }, - .R_X86_64_SIZE64 => { + .SIZE64 => { const size = @as(i64, @intCast(target.elfSym(elf_file).st_size)); try cwriter.writeInt(i64, @as(i64, @intCast(size + A)), .little); }, @@ -1293,8 +1293,8 @@ const x86_64 = struct { const writer = stream.writer(); const rel: elf.R_X86_64 = @enumFromInt(rels[1].r_type()); switch (rel) { - .R_X86_64_PC32, - .R_X86_64_PLT32, + .PC32, + .PLT32, => { var insts = [_]u8{ 0x64, 0x48, 0x8b, 0x04, 0x25, 0, 0, 0, 0, // movq %fs:0,%rax @@ -1331,8 +1331,8 @@ const x86_64 = struct { const writer = stream.writer(); const rel: elf.R_X86_64 = @enumFromInt(rels[1].r_type()); switch (rel) { - .R_X86_64_PC32, - .R_X86_64_PLT32, + .PC32, + .PLT32, => { var insts = [_]u8{ 0x31, 0xc0, // xor %eax, %eax @@ -1344,8 +1344,8 @@ const x86_64 = struct { try writer.writeAll(&insts); }, - .R_X86_64_GOTPCREL, - .R_X86_64_GOTPCRELX, + .GOTPCREL, + .GOTPCRELX, => { var insts = [_]u8{ 0x31, 0xc0, // xor %eax, %eax @@ -1431,10 +1431,10 @@ const x86_64 = struct { const writer = stream.writer(); const rel: elf.R_X86_64 = @enumFromInt(rels[1].r_type()); switch (rel) { - .R_X86_64_PC32, - .R_X86_64_PLT32, - .R_X86_64_GOTPCREL, - .R_X86_64_GOTPCRELX, + .PC32, + .PLT32, + .GOTPCREL, + .GOTPCRELX, => { var insts = [_]u8{ 0x64, 0x48, 0x8b, 0x04, 0x25, 0, 0, 0, 0, // movq %fs:0,%rax diff --git a/src/link/Elf/eh_frame.zig b/src/link/Elf/eh_frame.zig index 3aa1da94ca..6e8e08fab1 100644 --- a/src/link/Elf/eh_frame.zig +++ b/src/link/Elf/eh_frame.zig @@ -543,10 +543,10 @@ const x86_64 = struct { fn resolveReloc(rel: elf.Elf64_Rela, source: i64, target: i64, data: []u8) void { const r_type: elf.R_X86_64 = @enumFromInt(rel.r_type()); switch (r_type) { - .R_X86_64_32 => std.mem.writeInt(i32, data[0..4], @as(i32, @truncate(target)), .little), - .R_X86_64_64 => std.mem.writeInt(i64, data[0..8], target, .little), - .R_X86_64_PC32 => std.mem.writeInt(i32, data[0..4], @as(i32, @intCast(target - source)), .little), - .R_X86_64_PC64 => std.mem.writeInt(i64, data[0..8], target - source, .little), + .@"32" => std.mem.writeInt(i32, data[0..4], @as(i32, @truncate(target)), .little), + .@"64" => std.mem.writeInt(i64, data[0..8], target, .little), + .PC32 => std.mem.writeInt(i32, data[0..4], @as(i32, @intCast(target - source)), .little), + .PC64 => std.mem.writeInt(i64, data[0..8], target - source, .little), else => unreachable, } } diff --git a/src/link/Elf/relocation.zig b/src/link/Elf/relocation.zig index 60ae54a6e9..a63187925f 100644 --- a/src/link/Elf/relocation.zig +++ b/src/link/Elf/relocation.zig @@ -30,41 +30,41 @@ fn Table(comptime len: comptime_int, comptime RelType: type, comptime mapping: [ } const x86_64_relocs = Table(10, elf.R_X86_64, .{ - .{ .abs, .R_X86_64_64 }, - .{ .copy, .R_X86_64_COPY }, - .{ .rel, .R_X86_64_RELATIVE }, - .{ .irel, .R_X86_64_IRELATIVE }, - .{ .glob_dat, .R_X86_64_GLOB_DAT }, - .{ .jump_slot, .R_X86_64_JUMP_SLOT }, - .{ .dtpmod, .R_X86_64_DTPMOD64 }, - .{ .dtpoff, .R_X86_64_DTPOFF64 }, - .{ .tpoff, .R_X86_64_TPOFF64 }, - .{ .tlsdesc, .R_X86_64_TLSDESC }, + .{ .abs, .@"64" }, + .{ .copy, .COPY }, + .{ .rel, .RELATIVE }, + .{ .irel, .IRELATIVE }, + .{ .glob_dat, .GLOB_DAT }, + .{ .jump_slot, .JUMP_SLOT }, + .{ .dtpmod, .DTPMOD64 }, + .{ .dtpoff, .DTPOFF64 }, + .{ .tpoff, .TPOFF64 }, + .{ .tlsdesc, .TLSDESC }, }); const aarch64_relocs = Table(10, elf.R_AARCH64, .{ - .{ .abs, .R_AARCH64_ABS64 }, - .{ .copy, .R_AARCH64_COPY }, - .{ .rel, .R_AARCH64_RELATIVE }, - .{ .irel, .R_AARCH64_IRELATIVE }, - .{ .glob_dat, .R_AARCH64_GLOB_DAT }, - .{ .jump_slot, .R_AARCH64_JUMP_SLOT }, - .{ .dtpmod, .R_AARCH64_TLS_DTPMOD }, - .{ .dtpoff, .R_AARCH64_TLS_DTPREL }, - .{ .tpoff, .R_AARCH64_TLS_TPREL }, - .{ .tlsdesc, .R_AARCH64_TLSDESC }, + .{ .abs, .ABS64 }, + .{ .copy, .COPY }, + .{ .rel, .RELATIVE }, + .{ .irel, .IRELATIVE }, + .{ .glob_dat, .GLOB_DAT }, + .{ .jump_slot, .JUMP_SLOT }, + .{ .dtpmod, .TLS_DTPMOD }, + .{ .dtpoff, .TLS_DTPREL }, + .{ .tpoff, .TLS_TPREL }, + .{ .tlsdesc, .TLSDESC }, }); const riscv64_relocs = Table(9, elf.R_RISCV, .{ - .{ .abs, .R_RISCV_64 }, - .{ .copy, .R_RISCV_COPY }, - .{ .rel, .R_RISCV_RELATIVE }, - .{ .irel, .R_RISCV_IRELATIVE }, - .{ .jump_slot, .R_RISCV_JUMP_SLOT }, - .{ .dtpmod, .R_RISCV_TLS_DTPMOD64 }, - .{ .dtpoff, .R_RISCV_TLS_DTPREL64 }, - .{ .tpoff, .R_RISCV_TLS_TPREL64 }, - .{ .tlsdesc, .R_RISCV_TLSDESC }, + .{ .abs, .@"64" }, + .{ .copy, .COPY }, + .{ .rel, .RELATIVE }, + .{ .irel, .IRELATIVE }, + .{ .jump_slot, .JUMP_SLOT }, + .{ .dtpmod, .TLS_DTPMOD64 }, + .{ .dtpoff, .TLS_DTPREL64 }, + .{ .tpoff, .TLS_TPREL64 }, + .{ .tlsdesc, .TLSDESC }, }); pub fn decode(r_type: u32, cpu_arch: std.Target.Cpu.Arch) ?Kind { @@ -106,17 +106,16 @@ fn formatRelocType( _ = unused_fmt_string; _ = options; const r_type = ctx.r_type; - const str = switch (r_type) { - Elf.R_ZIG_GOT32 => "R_ZIG_GOT32", - Elf.R_ZIG_GOTPCREL => "R_ZIG_GOTPCREL", + switch (r_type) { + Elf.R_ZIG_GOT32 => try writer.writeAll("R_ZIG_GOT32"), + Elf.R_ZIG_GOTPCREL => try writer.writeAll("R_ZIG_GOTPCREL"), else => switch (ctx.cpu_arch) { - .x86_64 => @tagName(@as(elf.R_X86_64, @enumFromInt(r_type))), - .aarch64 => @tagName(@as(elf.R_AARCH64, @enumFromInt(r_type))), - .riscv64 => @tagName(@as(elf.R_RISCV, @enumFromInt(r_type))), + .x86_64 => try writer.print("R_X86_64_{s}", .{@tagName(@as(elf.R_X86_64, @enumFromInt(r_type)))}), + .aarch64 => try writer.print("R_AARCH64_{s}", .{@tagName(@as(elf.R_AARCH64, @enumFromInt(r_type)))}), + .riscv64 => try writer.print("R_RISCV_{s}", .{@tagName(@as(elf.R_RISCV, @enumFromInt(r_type)))}), else => unreachable, }, - }; - try writer.print("{s}", .{str}); + } } const assert = std.debug.assert;