OLD | NEW |
1 #define LDSO_ARCH "x86_64" | 1 #define LDSO_ARCH "x86_64" |
2 | 2 |
3 #define REL_SYMBOLIC R_X86_64_64 | 3 #define REL_SYMBOLIC R_X86_64_64 |
4 #define REL_OFFSET32 R_X86_64_PC32 | 4 #define REL_OFFSET32 R_X86_64_PC32 |
5 #define REL_GOT R_X86_64_GLOB_DAT | 5 #define REL_GOT R_X86_64_GLOB_DAT |
6 #define REL_PLT R_X86_64_JUMP_SLOT | 6 #define REL_PLT R_X86_64_JUMP_SLOT |
7 #define REL_RELATIVE R_X86_64_RELATIVE | 7 #define REL_RELATIVE R_X86_64_RELATIVE |
8 #define REL_COPY R_X86_64_COPY | 8 #define REL_COPY R_X86_64_COPY |
9 #define REL_DTPMOD R_X86_64_DTPMOD64 | 9 #define REL_DTPMOD R_X86_64_DTPMOD64 |
10 #define REL_DTPOFF R_X86_64_DTPOFF64 | 10 #define REL_DTPOFF R_X86_64_DTPOFF64 |
11 #define REL_TPOFF R_X86_64_TPOFF64 | 11 #define REL_TPOFF R_X86_64_TPOFF64 |
12 #define REL_TLSDESC R_X86_64_TLSDESC | 12 #define REL_TLSDESC R_X86_64_TLSDESC |
13 | 13 |
14 #define CRTJMP(pc,sp) __asm__ __volatile__( \ | 14 #define CRTJMP(pc, sp) \ |
15 » "mov %1,%%rsp ; jmp *%0" : : "r"(pc), "r"(sp) : "memory" ) | 15 __asm__ __volatile__("mov %1,%%rsp ; jmp *%0" : : "r"(pc), "r"(sp) : "memor" \ |
| 16 "y") |
16 | 17 |
17 #define GETFUNCSYM(fp, sym, got) __asm__ ( \ | 18 #define GETFUNCSYM(fp, sym, got) \ |
18 » ".hidden " #sym "\n" \ | 19 __asm__(".hidden " #sym \ |
19 » "» lea " #sym "(%%rip),%0\n" \ | 20 "\n" \ |
20 » : "=r"(*fp) : : "memory" ) | 21 "» lea " #sym "(%%rip),%0\n" \ |
| 22 : "=r"(*fp) \ |
| 23 : \ |
| 24 : "memory") |
OLD | NEW |