Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "elf_file.h" | 5 #include "elf_file.h" |
| 6 | 6 |
| 7 #include <stdlib.h> | 7 #include <stdlib.h> |
| 8 #include <sys/types.h> | 8 #include <sys/types.h> |
| 9 #include <unistd.h> | 9 #include <unistd.h> |
| 10 #include <algorithm> | |
| 10 #include <string> | 11 #include <string> |
| 11 #include <vector> | 12 #include <vector> |
| 12 | 13 |
| 13 #include "debug.h" | 14 #include "debug.h" |
| 14 #include "elf_traits.h" | 15 #include "elf_traits.h" |
| 15 #include "libelf.h" | 16 #include "libelf.h" |
| 16 #include "packer.h" | 17 #include "packer.h" |
| 17 | 18 |
| 18 namespace relocation_packer { | 19 namespace relocation_packer { |
| 19 | 20 |
|
Anton
2014/09/03 16:57:20
The comment for SplitProgramHeadersForHole is pret
simonb (inactive)
2014/09/03 17:52:22
Done.
| |
| 20 // Stub identifier written to 'null out' packed data, "NULL". | 21 // Stub identifier written to 'null out' packed data, "NULL". |
| 21 static const uint32_t kStubIdentifier = 0x4c4c554eu; | 22 static const uint32_t kStubIdentifier = 0x4c4c554eu; |
| 22 | 23 |
| 23 // Out-of-band dynamic tags used to indicate the offset and size of the | 24 // Out-of-band dynamic tags used to indicate the offset and size of the |
| 24 // android packed relocations section. | 25 // android packed relocations section. |
| 25 static const ELF::Sword DT_ANDROID_REL_OFFSET = DT_LOOS; | 26 static const ELF::Sword DT_ANDROID_REL_OFFSET = DT_LOOS; |
| 26 static const ELF::Sword DT_ANDROID_REL_SIZE = DT_LOOS + 1; | 27 static const ELF::Sword DT_ANDROID_REL_SIZE = DT_LOOS + 1; |
| 27 | 28 |
| 28 // Alignment to preserve, in bytes. This must be at least as large as the | 29 // Alignment to preserve, in bytes. This must be at least as large as the |
| 29 // largest d_align and sh_addralign values found in the loaded file. | 30 // largest d_align and sh_addralign values found in the loaded file. |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 68 | 69 |
| 69 // Verbose ELF program header logging. | 70 // Verbose ELF program header logging. |
| 70 void VerboseLogProgramHeader(size_t program_header_index, | 71 void VerboseLogProgramHeader(size_t program_header_index, |
| 71 const ELF::Phdr* program_header) { | 72 const ELF::Phdr* program_header) { |
| 72 std::string type; | 73 std::string type; |
| 73 switch (program_header->p_type) { | 74 switch (program_header->p_type) { |
| 74 case PT_NULL: type = "NULL"; break; | 75 case PT_NULL: type = "NULL"; break; |
| 75 case PT_LOAD: type = "LOAD"; break; | 76 case PT_LOAD: type = "LOAD"; break; |
| 76 case PT_DYNAMIC: type = "DYNAMIC"; break; | 77 case PT_DYNAMIC: type = "DYNAMIC"; break; |
| 77 case PT_INTERP: type = "INTERP"; break; | 78 case PT_INTERP: type = "INTERP"; break; |
| 78 case PT_NOTE: type = "NOTE"; break; | |
| 79 case PT_SHLIB: type = "SHLIB"; break; | |
| 80 case PT_PHDR: type = "PHDR"; break; | 79 case PT_PHDR: type = "PHDR"; break; |
| 81 case PT_TLS: type = "TLS"; break; | 80 case PT_GNU_RELRO: type = "GNU_RELRO"; break; |
| 81 case PT_GNU_STACK: type = "GNU_STACK"; break; | |
| 82 case PT_ARM_EXIDX: type = "EXIDX"; break; | |
| 82 default: type = "(OTHER)"; break; | 83 default: type = "(OTHER)"; break; |
| 83 } | 84 } |
| 84 VLOG(1) << "phdr " << program_header_index << " : " << type; | 85 VLOG(1) << "phdr[" << program_header_index << "] : " << type; |
| 85 VLOG(1) << " p_offset = " << program_header->p_offset; | 86 VLOG(1) << " p_offset = " << program_header->p_offset; |
| 86 VLOG(1) << " p_vaddr = " << program_header->p_vaddr; | 87 VLOG(1) << " p_vaddr = " << program_header->p_vaddr; |
| 87 VLOG(1) << " p_paddr = " << program_header->p_paddr; | 88 VLOG(1) << " p_paddr = " << program_header->p_paddr; |
| 88 VLOG(1) << " p_filesz = " << program_header->p_filesz; | 89 VLOG(1) << " p_filesz = " << program_header->p_filesz; |
| 89 VLOG(1) << " p_memsz = " << program_header->p_memsz; | 90 VLOG(1) << " p_memsz = " << program_header->p_memsz; |
| 91 VLOG(1) << " p_flags = " << program_header->p_flags; | |
| 92 VLOG(1) << " p_align = " << program_header->p_align; | |
| 90 } | 93 } |
| 91 | 94 |
| 92 // Verbose ELF section header logging. | 95 // Verbose ELF section header logging. |
| 93 void VerboseLogSectionHeader(const std::string& section_name, | 96 void VerboseLogSectionHeader(const std::string& section_name, |
| 94 const ELF::Shdr* section_header) { | 97 const ELF::Shdr* section_header) { |
| 95 VLOG(1) << "section " << section_name; | 98 VLOG(1) << "section " << section_name; |
| 96 VLOG(1) << " sh_addr = " << section_header->sh_addr; | 99 VLOG(1) << " sh_addr = " << section_header->sh_addr; |
| 97 VLOG(1) << " sh_offset = " << section_header->sh_offset; | 100 VLOG(1) << " sh_offset = " << section_header->sh_offset; |
| 98 VLOG(1) << " sh_size = " << section_header->sh_size; | 101 VLOG(1) << " sh_size = " << section_header->sh_size; |
| 99 VLOG(1) << " sh_addralign = " << section_header->sh_addralign; | 102 VLOG(1) << " sh_addralign = " << section_header->sh_addralign; |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 175 // attributes. | 178 // attributes. |
| 176 Elf_Scn* found_relocations_section = NULL; | 179 Elf_Scn* found_relocations_section = NULL; |
| 177 Elf_Scn* found_android_relocations_section = NULL; | 180 Elf_Scn* found_android_relocations_section = NULL; |
| 178 Elf_Scn* found_dynamic_section = NULL; | 181 Elf_Scn* found_dynamic_section = NULL; |
| 179 | 182 |
| 180 // Notes of relocation section types seen. We require one or the other of | 183 // Notes of relocation section types seen. We require one or the other of |
| 181 // these; both is unsupported. | 184 // these; both is unsupported. |
| 182 bool has_rel_relocations = false; | 185 bool has_rel_relocations = false; |
| 183 bool has_rela_relocations = false; | 186 bool has_rela_relocations = false; |
| 184 | 187 |
| 185 // Flag set if we encounter any .debug* section. We do not adjust any | |
| 186 // offsets or addresses of any debug data, so if we find one of these then | |
| 187 // the resulting output shared object should still run, but might not be | |
| 188 // usable for debugging, disassembly, and so on. Provides a warning if | |
| 189 // this occurs. | |
| 190 bool has_debug_section = false; | |
| 191 | |
| 192 Elf_Scn* section = NULL; | 188 Elf_Scn* section = NULL; |
| 193 while ((section = elf_nextscn(elf, section)) != NULL) { | 189 while ((section = elf_nextscn(elf, section)) != NULL) { |
| 194 const ELF::Shdr* section_header = ELF::getshdr(section); | 190 const ELF::Shdr* section_header = ELF::getshdr(section); |
| 195 std::string name = elf_strptr(elf, string_index, section_header->sh_name); | 191 std::string name = elf_strptr(elf, string_index, section_header->sh_name); |
| 196 VerboseLogSectionHeader(name, section_header); | 192 VerboseLogSectionHeader(name, section_header); |
| 197 | 193 |
| 198 // Note relocation section types. | 194 // Note relocation section types. |
| 199 if (section_header->sh_type == SHT_REL) { | 195 if (section_header->sh_type == SHT_REL) { |
| 200 has_rel_relocations = true; | 196 has_rel_relocations = true; |
| 201 } | 197 } |
| 202 if (section_header->sh_type == SHT_RELA) { | 198 if (section_header->sh_type == SHT_RELA) { |
| 203 has_rela_relocations = true; | 199 has_rela_relocations = true; |
| 204 } | 200 } |
| 205 | 201 |
| 206 // Note special sections as we encounter them. | 202 // Note special sections as we encounter them. |
| 207 if ((name == ".rel.dyn" || name == ".rela.dyn") && | 203 if ((name == ".rel.dyn" || name == ".rela.dyn") && |
| 208 section_header->sh_size > 0) { | 204 section_header->sh_size > 0) { |
| 209 found_relocations_section = section; | 205 found_relocations_section = section; |
| 210 } | 206 } |
| 211 if ((name == ".android.rel.dyn" || name == ".android.rela.dyn") && | 207 if ((name == ".android.rel.dyn" || name == ".android.rela.dyn") && |
| 212 section_header->sh_size > 0) { | 208 section_header->sh_size > 0) { |
| 213 found_android_relocations_section = section; | 209 found_android_relocations_section = section; |
| 214 } | 210 } |
| 215 if (section_header->sh_offset == dynamic_program_header->p_offset) { | 211 if (section_header->sh_offset == dynamic_program_header->p_offset) { |
| 216 found_dynamic_section = section; | 212 found_dynamic_section = section; |
| 217 } | 213 } |
| 218 | 214 |
| 219 // If we find a section named .debug*, set the debug warning flag. | |
| 220 if (std::string(name).find(".debug") == 0) { | |
| 221 has_debug_section = true; | |
| 222 } | |
| 223 | |
| 224 // Ensure we preserve alignment, repeated later for the data block(s). | 215 // Ensure we preserve alignment, repeated later for the data block(s). |
| 225 CHECK(section_header->sh_addralign <= kPreserveAlignment); | 216 CHECK(section_header->sh_addralign <= kPreserveAlignment); |
| 226 | 217 |
| 227 Elf_Data* data = NULL; | 218 Elf_Data* data = NULL; |
| 228 while ((data = elf_getdata(section, data)) != NULL) { | 219 while ((data = elf_getdata(section, data)) != NULL) { |
| 229 CHECK(data->d_align <= kPreserveAlignment); | 220 CHECK(data->d_align <= kPreserveAlignment); |
| 230 VerboseLogSectionData(data); | 221 VerboseLogSectionData(data); |
| 231 } | 222 } |
| 232 } | 223 } |
| 233 | 224 |
| (...skipping 17 matching lines...) Expand all Loading... | |
| 251 if (!has_rel_relocations && !has_rela_relocations) { | 242 if (!has_rel_relocations && !has_rela_relocations) { |
| 252 LOG(ERROR) << "No relocations sections found"; | 243 LOG(ERROR) << "No relocations sections found"; |
| 253 return false; | 244 return false; |
| 254 } | 245 } |
| 255 if (has_rel_relocations && has_rela_relocations) { | 246 if (has_rel_relocations && has_rela_relocations) { |
| 256 LOG(ERROR) << "Multiple relocations sections with different types found, " | 247 LOG(ERROR) << "Multiple relocations sections with different types found, " |
| 257 << "not currently supported"; | 248 << "not currently supported"; |
| 258 return false; | 249 return false; |
| 259 } | 250 } |
| 260 | 251 |
| 261 if (has_debug_section) { | |
| 262 LOG(WARNING) << "Found .debug section(s), and ignored them"; | |
| 263 } | |
| 264 | |
| 265 elf_ = elf; | 252 elf_ = elf; |
| 266 relocations_section_ = found_relocations_section; | 253 relocations_section_ = found_relocations_section; |
| 267 dynamic_section_ = found_dynamic_section; | 254 dynamic_section_ = found_dynamic_section; |
| 268 android_relocations_section_ = found_android_relocations_section; | 255 android_relocations_section_ = found_android_relocations_section; |
| 269 relocations_type_ = has_rel_relocations ? REL : RELA; | 256 relocations_type_ = has_rel_relocations ? REL : RELA; |
| 270 return true; | 257 return true; |
| 271 } | 258 } |
| 272 | 259 |
| 273 namespace { | 260 namespace { |
| 274 | 261 |
| 275 // Helper for ResizeSection(). Adjust the main ELF header for the hole. | 262 // Helper for ResizeSection(). Adjust the main ELF header for the hole. |
| 276 void AdjustElfHeaderForHole(ELF::Ehdr* elf_header, | 263 void AdjustElfHeaderForHole(ELF::Ehdr* elf_header, |
| 277 ELF::Off hole_start, | 264 ELF::Off hole_start, |
| 278 ssize_t hole_size) { | 265 ssize_t hole_size) { |
| 279 if (elf_header->e_phoff > hole_start) { | 266 if (elf_header->e_phoff > hole_start) { |
| 280 elf_header->e_phoff += hole_size; | 267 elf_header->e_phoff += hole_size; |
| 281 VLOG(1) << "e_phoff adjusted to " << elf_header->e_phoff; | 268 VLOG(1) << "e_phoff adjusted to " << elf_header->e_phoff; |
| 282 } | 269 } |
| 283 if (elf_header->e_shoff > hole_start) { | 270 if (elf_header->e_shoff > hole_start) { |
| 284 elf_header->e_shoff += hole_size; | 271 elf_header->e_shoff += hole_size; |
| 285 VLOG(1) << "e_shoff adjusted to " << elf_header->e_shoff; | 272 VLOG(1) << "e_shoff adjusted to " << elf_header->e_shoff; |
| 286 } | 273 } |
| 287 } | 274 } |
| 288 | 275 |
| 289 // Helper for ResizeSection(). Adjust all program headers for the hole. | |
| 290 void AdjustProgramHeadersForHole(ELF::Phdr* elf_program_header, | |
| 291 size_t program_header_count, | |
| 292 ELF::Off hole_start, | |
| 293 ssize_t hole_size) { | |
| 294 for (size_t i = 0; i < program_header_count; ++i) { | |
| 295 ELF::Phdr* program_header = &elf_program_header[i]; | |
| 296 | |
| 297 if (program_header->p_offset > hole_start) { | |
| 298 // The hole start is past this segment, so adjust offsets and addrs. | |
| 299 program_header->p_offset += hole_size; | |
| 300 VLOG(1) << "phdr " << i | |
| 301 << " p_offset adjusted to "<< program_header->p_offset; | |
| 302 | |
| 303 // Only adjust vaddr and paddr if this program header has them. | |
| 304 if (program_header->p_vaddr != 0) { | |
| 305 program_header->p_vaddr += hole_size; | |
| 306 VLOG(1) << "phdr " << i | |
| 307 << " p_vaddr adjusted to " << program_header->p_vaddr; | |
| 308 } | |
| 309 if (program_header->p_paddr != 0) { | |
| 310 program_header->p_paddr += hole_size; | |
| 311 VLOG(1) << "phdr " << i | |
| 312 << " p_paddr adjusted to " << program_header->p_paddr; | |
| 313 } | |
| 314 } else if (program_header->p_offset + | |
| 315 program_header->p_filesz > hole_start) { | |
| 316 // The hole start is within this segment, so adjust file and in-memory | |
| 317 // sizes, but leave offsets and addrs unchanged. | |
| 318 program_header->p_filesz += hole_size; | |
| 319 VLOG(1) << "phdr " << i | |
| 320 << " p_filesz adjusted to " << program_header->p_filesz; | |
| 321 program_header->p_memsz += hole_size; | |
| 322 VLOG(1) << "phdr " << i | |
| 323 << " p_memsz adjusted to " << program_header->p_memsz; | |
| 324 } | |
| 325 } | |
| 326 } | |
| 327 | |
| 328 // Helper for ResizeSection(). Adjust all section headers for the hole. | 276 // Helper for ResizeSection(). Adjust all section headers for the hole. |
| 329 void AdjustSectionHeadersForHole(Elf* elf, | 277 void AdjustSectionHeadersForHole(Elf* elf, |
| 330 ELF::Off hole_start, | 278 ELF::Off hole_start, |
| 331 ssize_t hole_size) { | 279 ssize_t hole_size) { |
| 332 size_t string_index; | 280 size_t string_index; |
| 333 elf_getshdrstrndx(elf, &string_index); | 281 elf_getshdrstrndx(elf, &string_index); |
| 334 | 282 |
| 335 Elf_Scn* section = NULL; | 283 Elf_Scn* section = NULL; |
| 336 while ((section = elf_nextscn(elf, section)) != NULL) { | 284 while ((section = elf_nextscn(elf, section)) != NULL) { |
| 337 ELF::Shdr* section_header = ELF::getshdr(section); | 285 ELF::Shdr* section_header = ELF::getshdr(section); |
| 338 std::string name = elf_strptr(elf, string_index, section_header->sh_name); | 286 std::string name = elf_strptr(elf, string_index, section_header->sh_name); |
| 339 | 287 |
| 340 if (section_header->sh_offset > hole_start) { | 288 if (section_header->sh_offset > hole_start) { |
| 341 section_header->sh_offset += hole_size; | 289 section_header->sh_offset += hole_size; |
| 342 VLOG(1) << "section " << name | 290 VLOG(1) << "section " << name |
| 343 << " sh_offset adjusted to " << section_header->sh_offset; | 291 << " sh_offset adjusted to " << section_header->sh_offset; |
| 344 // Only adjust section addr if this section has one. | 292 } |
| 345 if (section_header->sh_addr != 0) { | 293 } |
| 346 section_header->sh_addr += hole_size; | 294 } |
| 347 VLOG(1) << "section " << name | 295 |
| 348 << " sh_addr adjusted to " << section_header->sh_addr; | 296 // Helper for ResizeSection(). Adjust the offsets of any program headers |
| 297 // that have offsets currently beyond the hole start. | |
| 298 void AdjustProgramHeaderOffsets(ELF::Phdr* program_headers, | |
| 299 size_t count, | |
| 300 ELF::Phdr* ignored_1, | |
| 301 ELF::Phdr* ignored_2, | |
| 302 ELF::Off hole_start, | |
| 303 ssize_t hole_size) { | |
| 304 for (size_t i = 0; i < count; ++i) { | |
| 305 ELF::Phdr* program_header = &program_headers[i]; | |
| 306 | |
| 307 if (program_header == ignored_1 || program_header == ignored_2) | |
| 308 continue; | |
| 309 | |
| 310 if (program_header->p_offset > hole_start) { | |
| 311 // The hole start is past this segment, so adjust offset. | |
| 312 program_header->p_offset += hole_size; | |
| 313 VLOG(1) << "phdr[" << i | |
| 314 << "] p_offset adjusted to "<< program_header->p_offset; | |
| 315 } | |
| 316 } | |
| 317 } | |
| 318 | |
| 319 // Helper for ResizeSection(). Find the first loadable segment in the | |
| 320 // file. We expect it to map from file offset zero. | |
| 321 ELF::Phdr* FindFirstLoadSegment(ELF::Phdr* program_headers, | |
| 322 size_t count) { | |
| 323 ELF::Phdr* first_loadable_segment = NULL; | |
| 324 | |
| 325 for (size_t i = 0; i < count; ++i) { | |
| 326 ELF::Phdr* program_header = &program_headers[i]; | |
| 327 | |
| 328 if (program_header->p_type == PT_LOAD && | |
| 329 program_header->p_offset == 0 && | |
| 330 program_header->p_vaddr == 0 && | |
| 331 program_header->p_paddr == 0) { | |
| 332 first_loadable_segment = program_header; | |
| 333 } | |
| 334 } | |
| 335 CHECK(first_loadable_segment); | |
|
Anton
2014/09/03 16:57:20
Here and below I want a bit more text to come out
simonb (inactive)
2014/09/03 17:52:22
Done.
| |
| 336 | |
| 337 return first_loadable_segment; | |
| 338 } | |
| 339 | |
| 340 // Helper for ResizeSection(). Find the PT_GNU_STACK segment, and check | |
| 341 // that it contains what we expect so we can restore it on unpack if needed. | |
| 342 ELF::Phdr* FindUnusedGnuStackSegment(ELF::Phdr* program_headers, | |
| 343 size_t count) { | |
| 344 ELF::Phdr* unused_segment = NULL; | |
| 345 | |
| 346 for (size_t i = 0; i < count; ++i) { | |
| 347 ELF::Phdr* program_header = &program_headers[i]; | |
| 348 | |
| 349 if (program_header->p_type == PT_GNU_STACK && | |
| 350 program_header->p_offset == 0 && | |
| 351 program_header->p_vaddr == 0 && | |
| 352 program_header->p_paddr == 0 && | |
| 353 program_header->p_filesz == 0 && | |
| 354 program_header->p_memsz == 0 && | |
| 355 program_header->p_flags == (PF_R | PF_W) && | |
| 356 program_header->p_align == ELF::kGnuStackSegmentAlignment) { | |
| 357 unused_segment = program_header; | |
| 358 } | |
| 359 } | |
| 360 CHECK(unused_segment); | |
| 361 | |
| 362 return unused_segment; | |
| 363 } | |
| 364 | |
| 365 // Helper for ResizeSection(). Find the segment that was the first loadable | |
| 366 // one before we split it into two. This is the one into which we coalesce | |
| 367 // the split segments on unpacking. | |
| 368 ELF::Phdr* FindOriginalFirstLoadSegment(ELF::Phdr* program_headers, | |
| 369 size_t count) { | |
| 370 const ELF::Phdr* first_loadable_segment = | |
| 371 FindFirstLoadSegment(program_headers, count); | |
| 372 | |
| 373 ELF::Phdr* original_first_loadable_segment = NULL; | |
| 374 | |
| 375 for (size_t i = 0; i < count; ++i) { | |
| 376 ELF::Phdr* program_header = &program_headers[i]; | |
| 377 | |
| 378 // The original first loadable segment is the one that follows on from | |
| 379 // the one we wrote on split to be the current first loadable segment. | |
| 380 if (program_header->p_type == PT_LOAD && | |
| 381 program_header->p_offset == first_loadable_segment->p_filesz) { | |
| 382 original_first_loadable_segment = program_header; | |
| 383 } | |
| 384 } | |
| 385 CHECK(original_first_loadable_segment); | |
| 386 | |
| 387 return original_first_loadable_segment; | |
| 388 } | |
| 389 | |
| 390 // Helper for ResizeSection(). Find the segment that contains the hole. | |
| 391 Elf_Scn* FindSectionContainingHole(Elf* elf, | |
| 392 ELF::Off hole_start, | |
| 393 ssize_t hole_size) { | |
| 394 Elf_Scn* section = NULL; | |
| 395 Elf_Scn* last_unholed_section = NULL; | |
| 396 | |
| 397 while ((section = elf_nextscn(elf, section)) != NULL) { | |
| 398 const ELF::Shdr* section_header = ELF::getshdr(section); | |
| 399 | |
| 400 // Because we get here after section headers have been adjusted for the | |
| 401 // hole, we need to 'undo' that adjustment to give a view of the original | |
| 402 // sections layout. | |
| 403 ELF::Off offset = section_header->sh_offset; | |
| 404 if (section_header->sh_offset >= hole_start) { | |
| 405 offset -= hole_size; | |
| 406 } | |
| 407 | |
| 408 if (offset <= hole_start) { | |
| 409 last_unholed_section = section; | |
| 410 } | |
| 411 } | |
| 412 CHECK(last_unholed_section); | |
| 413 | |
| 414 // The section containing the hole is the one after the last one found | |
| 415 // by the loop above. | |
| 416 Elf_Scn* holed_section = elf_nextscn(elf, last_unholed_section); | |
| 417 CHECK(holed_section); | |
| 418 | |
| 419 return holed_section; | |
| 420 } | |
| 421 | |
| 422 // Helper for ResizeSection(). Find the last section contained in a segment. | |
| 423 Elf_Scn* FindLastSectionInSegment(Elf* elf, | |
| 424 ELF::Phdr* program_header, | |
| 425 ELF::Off hole_start, | |
| 426 ssize_t hole_size) { | |
| 427 const ELF::Off segment_end = | |
| 428 program_header->p_offset + program_header->p_filesz; | |
| 429 | |
| 430 Elf_Scn* section = NULL; | |
| 431 Elf_Scn* last_section = NULL; | |
| 432 | |
| 433 while ((section = elf_nextscn(elf, section)) != NULL) { | |
| 434 const ELF::Shdr* section_header = ELF::getshdr(section); | |
| 435 | |
| 436 // As above, 'undo' any section offset adjustment to give a view of the | |
| 437 // original sections layout. | |
| 438 ELF::Off offset = section_header->sh_offset; | |
| 439 if (section_header->sh_offset >= hole_start) { | |
| 440 offset -= hole_size; | |
| 441 } | |
| 442 | |
| 443 if (offset < segment_end) { | |
| 444 last_section = section; | |
| 445 } | |
| 446 } | |
| 447 CHECK(last_section); | |
| 448 | |
| 449 return last_section; | |
| 450 } | |
| 451 | |
| 452 // Helper for ResizeSection(). Order loadable segments by their offsets. | |
| 453 // The crazy linker contains assumptions about loadable segment ordering, | |
| 454 // and it is better if we do not break them. | |
| 455 void SortOrderSensitiveProgramHeaders(ELF::Phdr* program_headers, | |
| 456 size_t count) { | |
| 457 std::vector<ELF::Phdr*> orderable; | |
| 458 | |
| 459 // Collect together orderable program headers. These are all the LOAD | |
| 460 // segments, and any GNU_STACK that may be present (removed on packing, | |
| 461 // but replaced on unpacking). | |
| 462 for (size_t i = 0; i < count; ++i) { | |
| 463 ELF::Phdr* program_header = &program_headers[i]; | |
| 464 | |
| 465 if (program_header->p_type == PT_LOAD || | |
| 466 program_header->p_type == PT_GNU_STACK) { | |
| 467 orderable.push_back(program_header); | |
| 468 } | |
| 469 } | |
| 470 | |
| 471 // Order these program headers so that any PT_GNU_STACK is last, and | |
| 472 // the LOAD segments that precede it appear in offset order. Uses | |
| 473 // insertion sort. | |
| 474 for (size_t i = 1; i < orderable.size(); ++i) { | |
| 475 for (size_t j = i; j > 0; --j) { | |
| 476 ELF::Phdr* first = orderable[j - 1]; | |
| 477 ELF::Phdr* second = orderable[j]; | |
| 478 | |
| 479 if (!(first->p_type == PT_GNU_STACK || | |
| 480 first->p_offset > second->p_offset)) { | |
| 481 break; | |
| 349 } | 482 } |
| 350 } | 483 std::swap(*first, *second); |
| 351 } | 484 } |
| 485 } | |
| 486 } | |
| 487 | |
| 488 // Helper for ResizeSection(). The GNU_STACK program header is unused in | |
| 489 // Android, so we can repurpose it here. Before packing, the program header | |
| 490 // table contains something like: | |
| 491 // | |
| 492 // Type Offset VirtAddr PhysAddr FileSiz MemSiz Flg Align | |
| 493 // LOAD 0x000000 0x00000000 0x00000000 0x1efc818 0x1efc818 R E 0x1000 | |
| 494 // LOAD 0x1efd008 0x01efe008 0x01efe008 0x17ec3c 0x1a0324 RW 0x1000 | |
| 495 // DYNAMIC 0x205ec50 0x0205fc50 0x0205fc50 0x00108 0x00108 RW 0x4 | |
| 496 // GNU_STACK 0x000000 0x00000000 0x00000000 0x00000 0x00000 RW 0 | |
| 497 // | |
| 498 // The hole in the file is in the first of these. In order to preserve all | |
| 499 // load addresses, what we do is to turn the GNU_STACK into a new LOAD entry | |
| 500 // that maps segments up to where we created the hole, adjust the first LOAD | |
| 501 // entry so that it maps segments after that, adjust any other program | |
| 502 // headers whose offset is after the hole start, and finally order the LOAD | |
| 503 // segments by offset, to give: | |
| 504 // | |
| 505 // Type Offset VirtAddr PhysAddr FileSiz MemSiz Flg Align | |
| 506 // LOAD 0x000000 0x00000000 0x00000000 0x14ea4 0x212ea4 R E 0x1000 | |
| 507 // LOAD 0x014ea4 0x00212ea4 0x00212ea4 0x1cea164 0x1cea164 R E 0x1000 | |
| 508 // DYNAMIC 0x1e60c50 0x0205fc50 0x0205fc50 0x00108 0x00108 RW 0x4 | |
| 509 // LOAD 0x1cff008 0x01efe008 0x01efe008 0x17ec3c 0x1a0324 RW 0x1000 | |
| 510 // | |
| 511 // We work out the split points by finding the .rel.dyn or .rela.dyn section | |
| 512 // that contains the hole, and by finding the last section in a given segment. | |
| 513 // | |
| 514 // To unpack, we reverse the above to leave the file as it was originally. | |
| 515 void SplitProgramHeadersForHole(Elf* elf, | |
| 516 ELF::Off hole_start, | |
| 517 ssize_t hole_size) { | |
| 518 CHECK(hole_size < 0); | |
| 519 const ELF::Ehdr* elf_header = ELF::getehdr(elf); | |
| 520 CHECK(elf_header); | |
| 521 | |
| 522 ELF::Phdr* elf_program_header = ELF::getphdr(elf); | |
| 523 CHECK(elf_program_header); | |
| 524 | |
| 525 const size_t program_header_count = elf_header->e_phnum; | |
| 526 | |
| 527 // Locate the segment that we can overwrite to form the new LOAD entry, | |
| 528 // and the segment that we are going to split into two parts. | |
| 529 ELF::Phdr* spliced_header = | |
| 530 FindUnusedGnuStackSegment(elf_program_header, program_header_count); | |
| 531 ELF::Phdr* split_header = | |
| 532 FindFirstLoadSegment(elf_program_header, program_header_count); | |
| 533 | |
| 534 VLOG(1) << "phdr[" << split_header - elf_program_header << "] split"; | |
| 535 VLOG(1) << "phdr[" << spliced_header - elf_program_header << "] new LOAD"; | |
| 536 | |
| 537 // Find the section that contains the hole. We split on the section that | |
| 538 // follows it. | |
| 539 Elf_Scn* holed_section = | |
| 540 FindSectionContainingHole(elf, hole_start, hole_size); | |
| 541 | |
| 542 size_t string_index; | |
| 543 elf_getshdrstrndx(elf, &string_index); | |
| 544 | |
| 545 ELF::Shdr* section_header = ELF::getshdr(holed_section); | |
| 546 std::string name = elf_strptr(elf, string_index, section_header->sh_name); | |
| 547 VLOG(1) << "section " << name << " split after"; | |
| 548 | |
| 549 // Find the last section in the segment we are splitting. | |
| 550 Elf_Scn* last_section = | |
| 551 FindLastSectionInSegment(elf, split_header, hole_start, hole_size); | |
| 552 | |
| 553 section_header = ELF::getshdr(last_section); | |
| 554 name = elf_strptr(elf, string_index, section_header->sh_name); | |
| 555 VLOG(1) << "section " << name << " split end"; | |
| 556 | |
| 557 // Split on the section following the holed one, and up to (but not | |
| 558 // including) the section following the last one in the split segment. | |
| 559 Elf_Scn* split_section = elf_nextscn(elf, holed_section); | |
| 560 CHECK(split_section); | |
| 561 Elf_Scn* end_section = elf_nextscn(elf, last_section); | |
| 562 CHECK(end_section); | |
| 563 | |
| 564 // Split the first portion of split_header into spliced_header. Done | |
| 565 // by copying the entire split_header into spliced_header, then changing | |
| 566 // only the fields that set the segment sizes. | |
| 567 *spliced_header = *split_header; | |
| 568 const ELF::Shdr* split_section_header = ELF::getshdr(split_section); | |
| 569 spliced_header->p_filesz = split_section_header->sh_offset; | |
| 570 spliced_header->p_memsz = split_section_header->sh_addr; | |
| 571 | |
| 572 // Now rewrite split_header to remove the part we spliced from it. | |
| 573 const ELF::Shdr* end_section_header = ELF::getshdr(end_section); | |
| 574 split_header->p_offset = spliced_header->p_filesz; | |
| 575 | |
| 576 CHECK(split_header->p_vaddr == split_header->p_paddr); | |
| 577 split_header->p_vaddr = spliced_header->p_memsz; | |
| 578 split_header->p_paddr = split_header->p_vaddr; | |
| 579 | |
| 580 CHECK(split_header->p_filesz == split_header->p_memsz); | |
| 581 split_header->p_filesz = | |
| 582 end_section_header->sh_offset - spliced_header->p_filesz; | |
| 583 split_header->p_memsz = split_header->p_filesz; | |
| 584 | |
| 585 // Adjust the offsets of all program headers that are not one of the pair | |
| 586 // we just created by splitting. | |
| 587 AdjustProgramHeaderOffsets(elf_program_header, | |
| 588 program_header_count, | |
| 589 spliced_header, | |
| 590 split_header, | |
| 591 hole_start, | |
| 592 hole_size); | |
| 593 | |
| 594 // Finally, order loadable segments by offset/address. The crazy linker | |
| 595 // contains assumptions about loadable segment ordering. | |
| 596 SortOrderSensitiveProgramHeaders(elf_program_header, | |
| 597 program_header_count); | |
| 598 } | |
| 599 | |
| 600 // Helper for ResizeSection(). Undo the work of SplitProgramHeadersForHole(). | |
| 601 void CoalesceProgramHeadersForHole(Elf* elf, | |
| 602 ELF::Off hole_start, | |
| 603 ssize_t hole_size) { | |
| 604 CHECK(hole_size > 0); | |
| 605 const ELF::Ehdr* elf_header = ELF::getehdr(elf); | |
| 606 CHECK(elf_header); | |
| 607 | |
| 608 ELF::Phdr* elf_program_header = ELF::getphdr(elf); | |
| 609 CHECK(elf_program_header); | |
| 610 | |
| 611 const size_t program_header_count = elf_header->e_phnum; | |
| 612 | |
| 613 // Locate the segment that we overwrote to form the new LOAD entry, and | |
| 614 // the segment that we split into two parts on packing. | |
| 615 ELF::Phdr* spliced_header = | |
| 616 FindFirstLoadSegment(elf_program_header, program_header_count); | |
| 617 ELF::Phdr* split_header = | |
| 618 FindOriginalFirstLoadSegment(elf_program_header, program_header_count); | |
| 619 | |
| 620 VLOG(1) << "phdr[" << spliced_header - elf_program_header << "] stack"; | |
| 621 VLOG(1) << "phdr[" << split_header - elf_program_header << "] coalesce"; | |
| 622 | |
| 623 // Find the last section in the second segment we are coalescing. | |
| 624 Elf_Scn* last_section = | |
| 625 FindLastSectionInSegment(elf, split_header, hole_start, hole_size); | |
| 626 | |
| 627 size_t string_index; | |
| 628 elf_getshdrstrndx(elf, &string_index); | |
| 629 | |
| 630 const ELF::Shdr* section_header = ELF::getshdr(last_section); | |
| 631 std::string name = elf_strptr(elf, string_index, section_header->sh_name); | |
| 632 VLOG(1) << "section " << name << " coalesced"; | |
| 633 | |
| 634 // Rewrite the coalesced segment into split_header. | |
| 635 const ELF::Shdr* last_section_header = ELF::getshdr(last_section); | |
| 636 split_header->p_offset = spliced_header->p_offset; | |
| 637 split_header->p_vaddr = spliced_header->p_vaddr; | |
| 638 split_header->p_paddr = split_header->p_vaddr; | |
| 639 split_header->p_filesz = | |
| 640 last_section_header->sh_offset + last_section_header->sh_size; | |
| 641 split_header->p_memsz = split_header->p_filesz; | |
| 642 | |
| 643 // Reconstruct the original GNU_STACK segment into spliced_header. | |
| 644 spliced_header->p_type = PT_GNU_STACK; | |
| 645 spliced_header->p_offset = 0; | |
| 646 spliced_header->p_vaddr = 0; | |
| 647 spliced_header->p_paddr = 0; | |
| 648 spliced_header->p_filesz = 0; | |
| 649 spliced_header->p_memsz = 0; | |
| 650 spliced_header->p_flags = PF_R | PF_W; | |
| 651 spliced_header->p_align = ELF::kGnuStackSegmentAlignment; | |
| 652 | |
| 653 // Adjust the offsets of all program headers that are not one of the pair | |
| 654 // we just coalesced. | |
| 655 AdjustProgramHeaderOffsets(elf_program_header, | |
| 656 program_header_count, | |
| 657 spliced_header, | |
| 658 split_header, | |
| 659 hole_start, | |
| 660 hole_size); | |
| 661 | |
| 662 // Finally, order loadable segments by offset/address. The crazy linker | |
| 663 // contains assumptions about loadable segment ordering. | |
| 664 SortOrderSensitiveProgramHeaders(elf_program_header, | |
| 665 program_header_count); | |
| 666 } | |
| 667 | |
| 668 // Helper for ResizeSection(). Rewrite program headers. | |
| 669 void RewriteProgramHeadersForHole(Elf* elf, | |
| 670 ELF::Off hole_start, | |
| 671 ssize_t hole_size) { | |
| 672 // If hole_size is negative then we are removing a piece of the file, and | |
| 673 // we want to split program headers so that we keep the same addresses | |
| 674 // for text and data. If positive, then we are putting that piece of the | |
| 675 // file back in, so we coalesce the previously split program headers. | |
| 676 if (hole_size < 0) | |
| 677 SplitProgramHeadersForHole(elf, hole_start, hole_size); | |
| 678 else if (hole_size > 0) | |
| 679 CoalesceProgramHeadersForHole(elf, hole_start, hole_size); | |
| 680 } | |
| 681 | |
| 682 // Helper for ResizeSection(). Locate and return the dynamic section. | |
| 683 Elf_Scn* GetDynamicSection(Elf* elf) { | |
| 684 const ELF::Ehdr* elf_header = ELF::getehdr(elf); | |
| 685 CHECK(elf_header); | |
| 686 | |
| 687 const ELF::Phdr* elf_program_header = ELF::getphdr(elf); | |
| 688 CHECK(elf_program_header); | |
| 689 | |
| 690 // Find the program header that describes the dynamic section. | |
| 691 const ELF::Phdr* dynamic_program_header = NULL; | |
| 692 for (size_t i = 0; i < elf_header->e_phnum; ++i) { | |
| 693 const ELF::Phdr* program_header = &elf_program_header[i]; | |
| 694 | |
| 695 if (program_header->p_type == PT_DYNAMIC) { | |
| 696 dynamic_program_header = program_header; | |
| 697 } | |
| 698 } | |
| 699 CHECK(dynamic_program_header); | |
| 700 | |
| 701 // Now find the section with the same offset as this program header. | |
| 702 Elf_Scn* dynamic_section = NULL; | |
| 703 Elf_Scn* section = NULL; | |
| 704 while ((section = elf_nextscn(elf, section)) != NULL) { | |
| 705 ELF::Shdr* section_header = ELF::getshdr(section); | |
| 706 | |
| 707 if (section_header->sh_offset == dynamic_program_header->p_offset) { | |
| 708 dynamic_section = section; | |
| 709 } | |
| 710 } | |
| 711 CHECK(dynamic_section != NULL); | |
| 712 | |
| 713 return dynamic_section; | |
| 352 } | 714 } |
| 353 | 715 |
| 354 // Helper for ResizeSection(). Adjust the .dynamic section for the hole. | 716 // Helper for ResizeSection(). Adjust the .dynamic section for the hole. |
| 355 template <typename Rel> | 717 template <typename Rel> |
| 356 void AdjustDynamicSectionForHole(Elf_Scn* dynamic_section, | 718 void AdjustDynamicSectionForHole(Elf_Scn* dynamic_section, |
| 357 bool is_relocations_resize, | |
| 358 ELF::Off hole_start, | 719 ELF::Off hole_start, |
| 359 ssize_t hole_size) { | 720 ssize_t hole_size) { |
| 360 Elf_Data* data = GetSectionData(dynamic_section); | 721 Elf_Data* data = GetSectionData(dynamic_section); |
| 361 | 722 |
| 362 const ELF::Dyn* dynamic_base = reinterpret_cast<ELF::Dyn*>(data->d_buf); | 723 const ELF::Dyn* dynamic_base = reinterpret_cast<ELF::Dyn*>(data->d_buf); |
| 363 std::vector<ELF::Dyn> dynamics( | 724 std::vector<ELF::Dyn> dynamics( |
| 364 dynamic_base, | 725 dynamic_base, |
| 365 dynamic_base + data->d_size / sizeof(dynamics[0])); | 726 dynamic_base + data->d_size / sizeof(dynamics[0])); |
| 366 | 727 |
| 367 for (size_t i = 0; i < dynamics.size(); ++i) { | 728 for (size_t i = 0; i < dynamics.size(); ++i) { |
| 368 ELF::Dyn* dynamic = &dynamics[i]; | 729 ELF::Dyn* dynamic = &dynamics[i]; |
| 369 const ELF::Sword tag = dynamic->d_tag; | 730 const ELF::Sword tag = dynamic->d_tag; |
| 370 // Any tags that hold offsets are adjustment candidates. | |
| 371 const bool is_adjustable = (tag == DT_PLTGOT || | |
| 372 tag == DT_HASH || | |
| 373 tag == DT_STRTAB || | |
| 374 tag == DT_SYMTAB || | |
| 375 tag == DT_RELA || | |
| 376 tag == DT_INIT || | |
| 377 tag == DT_FINI || | |
| 378 tag == DT_REL || | |
| 379 tag == DT_JMPREL || | |
| 380 tag == DT_INIT_ARRAY || | |
| 381 tag == DT_FINI_ARRAY || | |
| 382 tag == DT_ANDROID_REL_OFFSET); | |
| 383 if (is_adjustable && dynamic->d_un.d_ptr > hole_start) { | |
| 384 dynamic->d_un.d_ptr += hole_size; | |
| 385 VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag | |
| 386 << " d_ptr adjusted to " << dynamic->d_un.d_ptr; | |
| 387 } | |
| 388 | |
| 389 // If we are specifically resizing dynamic relocations, we need to make | |
| 390 // some added adjustments to tags that indicate the counts of relative | |
| 391 // relocations in the shared object. | |
| 392 if (!is_relocations_resize) | |
| 393 continue; | |
| 394 | 731 |
| 395 // DT_RELSZ or DT_RELASZ indicate the overall size of relocations. | 732 // DT_RELSZ or DT_RELASZ indicate the overall size of relocations. |
| 396 // Only one will be present. Adjust by hole size. | 733 // Only one will be present. Adjust by hole size. |
| 397 if (tag == DT_RELSZ || tag == DT_RELASZ) { | 734 if (tag == DT_RELSZ || tag == DT_RELASZ) { |
| 398 dynamic->d_un.d_val += hole_size; | 735 dynamic->d_un.d_val += hole_size; |
| 399 VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag | 736 VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag |
| 400 << " d_val adjusted to " << dynamic->d_un.d_val; | 737 << " d_val adjusted to " << dynamic->d_un.d_val; |
| 401 } | 738 } |
| 402 | 739 |
| 403 // DT_RELCOUNT or DT_RELACOUNT hold the count of relative relocations. | 740 // DT_RELCOUNT or DT_RELACOUNT hold the count of relative relocations. |
| 404 // Only one will be present. Packing reduces it to the alignment | 741 // Only one will be present. Packing reduces it to the alignment |
| 405 // padding, if any; unpacking restores it to its former value. The | 742 // padding, if any; unpacking restores it to its former value. The |
| 406 // crazy linker does not use it, but we update it anyway. | 743 // crazy linker does not use it, but we update it anyway. |
| 407 if (tag == DT_RELCOUNT || tag == DT_RELACOUNT) { | 744 if (tag == DT_RELCOUNT || tag == DT_RELACOUNT) { |
| 408 // Cast sizeof to a signed type to avoid the division result being | 745 // Cast sizeof to a signed type to avoid the division result being |
| 409 // promoted into an unsigned size_t. | 746 // promoted into an unsigned size_t. |
| 410 const ssize_t sizeof_rel = static_cast<ssize_t>(sizeof(Rel)); | 747 const ssize_t sizeof_rel = static_cast<ssize_t>(sizeof(Rel)); |
| 411 dynamic->d_un.d_val += hole_size / sizeof_rel; | 748 dynamic->d_un.d_val += hole_size / sizeof_rel; |
| 412 VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag | 749 VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag |
| 413 << " d_val adjusted to " << dynamic->d_un.d_val; | 750 << " d_val adjusted to " << dynamic->d_un.d_val; |
| 414 } | 751 } |
| 415 | 752 |
| 416 // DT_RELENT and DT_RELAENT don't change, but make sure they are what | 753 // DT_RELENT and DT_RELAENT do not change, but make sure they are what |
| 417 // we expect. Only one will be present. | 754 // we expect. Only one will be present. |
| 418 if (tag == DT_RELENT || tag == DT_RELAENT) { | 755 if (tag == DT_RELENT || tag == DT_RELAENT) { |
| 419 CHECK(dynamic->d_un.d_val == sizeof(Rel)); | 756 CHECK(dynamic->d_un.d_val == sizeof(Rel)); |
| 420 } | 757 } |
| 421 } | 758 } |
| 422 | 759 |
| 423 void* section_data = &dynamics[0]; | 760 void* section_data = &dynamics[0]; |
| 424 size_t bytes = dynamics.size() * sizeof(dynamics[0]); | 761 size_t bytes = dynamics.size() * sizeof(dynamics[0]); |
| 425 RewriteSectionData(data, section_data, bytes); | 762 RewriteSectionData(data, section_data, bytes); |
| 426 } | 763 } |
| 427 | 764 |
| 428 // Helper for ResizeSection(). Adjust the .dynsym section for the hole. | |
| 429 // We need to adjust the values for the symbols represented in it. | |
| 430 void AdjustDynSymSectionForHole(Elf_Scn* dynsym_section, | |
| 431 ELF::Off hole_start, | |
| 432 ssize_t hole_size) { | |
| 433 Elf_Data* data = GetSectionData(dynsym_section); | |
| 434 | |
| 435 const ELF::Sym* dynsym_base = reinterpret_cast<ELF::Sym*>(data->d_buf); | |
| 436 std::vector<ELF::Sym> dynsyms | |
| 437 (dynsym_base, | |
| 438 dynsym_base + data->d_size / sizeof(dynsyms[0])); | |
| 439 | |
| 440 for (size_t i = 0; i < dynsyms.size(); ++i) { | |
| 441 ELF::Sym* dynsym = &dynsyms[i]; | |
| 442 const int type = static_cast<int>(ELF_ST_TYPE(dynsym->st_info)); | |
| 443 const bool is_adjustable = (type == STT_OBJECT || | |
| 444 type == STT_FUNC || | |
| 445 type == STT_SECTION || | |
| 446 type == STT_FILE || | |
| 447 type == STT_COMMON || | |
| 448 type == STT_TLS); | |
| 449 if (is_adjustable && dynsym->st_value > hole_start) { | |
| 450 dynsym->st_value += hole_size; | |
| 451 VLOG(1) << "dynsym[" << i << "] type=" << type | |
| 452 << " st_value adjusted to " << dynsym->st_value; | |
| 453 } | |
| 454 } | |
| 455 | |
| 456 void* section_data = &dynsyms[0]; | |
| 457 size_t bytes = dynsyms.size() * sizeof(dynsyms[0]); | |
| 458 RewriteSectionData(data, section_data, bytes); | |
| 459 } | |
| 460 | |
| 461 // Helper for ResizeSection(). Adjust the plt relocations section for the | |
| 462 // hole. We need to adjust the offset of every relocation inside it that | |
| 463 // falls beyond the hole start. | |
| 464 template <typename Rel> | |
| 465 void AdjustRelPltSectionForHole(Elf_Scn* relplt_section, | |
| 466 ELF::Off hole_start, | |
| 467 ssize_t hole_size) { | |
| 468 Elf_Data* data = GetSectionData(relplt_section); | |
| 469 | |
| 470 const Rel* relplt_base = reinterpret_cast<Rel*>(data->d_buf); | |
| 471 std::vector<Rel> relplts( | |
| 472 relplt_base, | |
| 473 relplt_base + data->d_size / sizeof(relplts[0])); | |
| 474 | |
| 475 for (size_t i = 0; i < relplts.size(); ++i) { | |
| 476 Rel* relplt = &relplts[i]; | |
| 477 if (relplt->r_offset > hole_start) { | |
| 478 relplt->r_offset += hole_size; | |
| 479 VLOG(1) << "relplt[" << i | |
| 480 << "] r_offset adjusted to " << relplt->r_offset; | |
| 481 } | |
| 482 } | |
| 483 | |
| 484 void* section_data = &relplts[0]; | |
| 485 size_t bytes = relplts.size() * sizeof(relplts[0]); | |
| 486 RewriteSectionData(data, section_data, bytes); | |
| 487 } | |
| 488 | |
| 489 // Helper for ResizeSection(). Adjust the .symtab section for the hole. | |
| 490 // We want to adjust the value of every symbol in it that falls beyond | |
| 491 // the hole start. | |
| 492 void AdjustSymTabSectionForHole(Elf_Scn* symtab_section, | |
| 493 ELF::Off hole_start, | |
| 494 ssize_t hole_size) { | |
| 495 Elf_Data* data = GetSectionData(symtab_section); | |
| 496 | |
| 497 const ELF::Sym* symtab_base = reinterpret_cast<ELF::Sym*>(data->d_buf); | |
| 498 std::vector<ELF::Sym> symtab( | |
| 499 symtab_base, | |
| 500 symtab_base + data->d_size / sizeof(symtab[0])); | |
| 501 | |
| 502 for (size_t i = 0; i < symtab.size(); ++i) { | |
| 503 ELF::Sym* sym = &symtab[i]; | |
| 504 if (sym->st_value > hole_start) { | |
| 505 sym->st_value += hole_size; | |
| 506 VLOG(1) << "symtab[" << i << "] value adjusted to " << sym->st_value; | |
| 507 } | |
| 508 } | |
| 509 | |
| 510 void* section_data = &symtab[0]; | |
| 511 size_t bytes = symtab.size() * sizeof(symtab[0]); | |
| 512 RewriteSectionData(data, section_data, bytes); | |
| 513 } | |
| 514 | |
| 515 // Resize a section. If the new size is larger than the current size, open | 765 // Resize a section. If the new size is larger than the current size, open |
| 516 // up a hole by increasing file offsets that come after the hole. If smaller | 766 // up a hole by increasing file offsets that come after the hole. If smaller |
| 517 // than the current size, remove the hole by decreasing those offsets. | 767 // than the current size, remove the hole by decreasing those offsets. |
| 518 template <typename Rel> | 768 template <typename Rel> |
| 519 void ResizeSection(Elf* elf, Elf_Scn* section, size_t new_size) { | 769 void ResizeSection(Elf* elf, Elf_Scn* section, size_t new_size) { |
| 520 ELF::Shdr* section_header = ELF::getshdr(section); | 770 ELF::Shdr* section_header = ELF::getshdr(section); |
| 521 if (section_header->sh_size == new_size) | 771 if (section_header->sh_size == new_size) |
| 522 return; | 772 return; |
| 523 | 773 |
| 524 // Note if we are resizing the real dyn relocations. If yes, then we have | 774 // Note if we are resizing the real dyn relocations. |
| 525 // to massage d_un.d_val in the dynamic section where d_tag is DT_RELSZ or | |
| 526 // DT_RELASZ and DT_RELCOUNT or DT_RELACOUNT. | |
| 527 size_t string_index; | 775 size_t string_index; |
| 528 elf_getshdrstrndx(elf, &string_index); | 776 elf_getshdrstrndx(elf, &string_index); |
| 529 const std::string section_name = | 777 const std::string section_name = |
| 530 elf_strptr(elf, string_index, section_header->sh_name); | 778 elf_strptr(elf, string_index, section_header->sh_name); |
| 531 const bool is_relocations_resize = | 779 const bool is_relocations_resize = |
| 532 (section_name == ".rel.dyn" || section_name == ".rela.dyn"); | 780 (section_name == ".rel.dyn" || section_name == ".rela.dyn"); |
| 533 | 781 |
| 534 // Require that the section size and the data size are the same. True | 782 // Require that the section size and the data size are the same. True |
| 535 // in practice for all sections we resize when packing or unpacking. | 783 // in practice for all sections we resize when packing or unpacking. |
| 536 Elf_Data* data = GetSectionData(section); | 784 Elf_Data* data = GetSectionData(section); |
| 537 CHECK(data->d_off == 0 && data->d_size == section_header->sh_size); | 785 CHECK(data->d_off == 0 && data->d_size == section_header->sh_size); |
| 538 | 786 |
| 539 // Require that the section is not zero-length (that is, has allocated | 787 // Require that the section is not zero-length (that is, has allocated |
| 540 // data that we can validly expand). | 788 // data that we can validly expand). |
| 541 CHECK(data->d_size && data->d_buf); | 789 CHECK(data->d_size && data->d_buf); |
| 542 | 790 |
| 543 const ELF::Off hole_start = section_header->sh_offset; | 791 const ELF::Off hole_start = section_header->sh_offset; |
| 544 const ssize_t hole_size = new_size - data->d_size; | 792 const ssize_t hole_size = new_size - data->d_size; |
| 545 | 793 |
| 546 VLOG_IF(1, (hole_size > 0)) << "expand section size = " << data->d_size; | 794 VLOG_IF(1, (hole_size > 0)) << "expand section size = " << data->d_size; |
| 547 VLOG_IF(1, (hole_size < 0)) << "shrink section size = " << data->d_size; | 795 VLOG_IF(1, (hole_size < 0)) << "shrink section size = " << data->d_size; |
| 548 | 796 |
| 549 // Resize the data and the section header. | 797 // Resize the data and the section header. |
| 550 data->d_size += hole_size; | 798 data->d_size += hole_size; |
| 551 section_header->sh_size += hole_size; | 799 section_header->sh_size += hole_size; |
| 552 | 800 |
| 553 ELF::Ehdr* elf_header = ELF::getehdr(elf); | |
| 554 ELF::Phdr* elf_program_header = ELF::getphdr(elf); | |
| 555 | |
| 556 // Add the hole size to all offsets in the ELF file that are after the | 801 // Add the hole size to all offsets in the ELF file that are after the |
| 557 // start of the hole. If the hole size is positive we are expanding the | 802 // start of the hole. If the hole size is positive we are expanding the |
| 558 // section to create a new hole; if negative, we are closing up a hole. | 803 // section to create a new hole; if negative, we are closing up a hole. |
| 559 | 804 |
| 560 // Start with the main ELF header. | 805 // Start with the main ELF header. |
| 806 ELF::Ehdr* elf_header = ELF::getehdr(elf); | |
| 561 AdjustElfHeaderForHole(elf_header, hole_start, hole_size); | 807 AdjustElfHeaderForHole(elf_header, hole_start, hole_size); |
| 562 | 808 |
| 563 // Adjust all program headers. | |
| 564 AdjustProgramHeadersForHole(elf_program_header, | |
| 565 elf_header->e_phnum, | |
| 566 hole_start, | |
| 567 hole_size); | |
| 568 | |
| 569 // Adjust all section headers. | 809 // Adjust all section headers. |
| 570 AdjustSectionHeadersForHole(elf, hole_start, hole_size); | 810 AdjustSectionHeadersForHole(elf, hole_start, hole_size); |
| 571 | 811 |
| 572 // We use the dynamic program header entry to locate the dynamic section. | 812 // If resizing the dynamic relocations, rewrite the program headers to |
| 573 const ELF::Phdr* dynamic_program_header = NULL; | 813 // either split or coalesce segments, and adjust dynamic entries to match. |
| 814 if (is_relocations_resize) { | |
| 815 RewriteProgramHeadersForHole(elf, hole_start, hole_size); | |
| 574 | 816 |
| 575 // Find the dynamic program header entry. | 817 Elf_Scn* dynamic_section = GetDynamicSection(elf);; |
|
Anton
2014/09/03 16:57:20
two semicolons, just to be sure.
simonb (inactive)
2014/09/03 17:52:21
Done.
| |
| 576 for (size_t i = 0; i < elf_header->e_phnum; ++i) { | 818 AdjustDynamicSectionForHole<Rel>(dynamic_section, hole_start, hole_size); |
| 577 ELF::Phdr* program_header = &elf_program_header[i]; | |
| 578 | |
| 579 if (program_header->p_type == PT_DYNAMIC) { | |
| 580 dynamic_program_header = program_header; | |
| 581 } | |
| 582 } | 819 } |
| 583 CHECK(dynamic_program_header); | |
| 584 | |
| 585 // Sections requiring special attention, and the packed android | |
| 586 // relocations offset. | |
| 587 Elf_Scn* dynamic_section = NULL; | |
| 588 Elf_Scn* dynsym_section = NULL; | |
| 589 Elf_Scn* plt_relocations_section = NULL; | |
| 590 Elf_Scn* symtab_section = NULL; | |
| 591 ELF::Off android_relocations_offset = 0; | |
| 592 | |
| 593 // Find these sections, and the packed android relocations offset. | |
| 594 section = NULL; | |
| 595 while ((section = elf_nextscn(elf, section)) != NULL) { | |
| 596 ELF::Shdr* section_header = ELF::getshdr(section); | |
| 597 std::string name = elf_strptr(elf, string_index, section_header->sh_name); | |
| 598 | |
| 599 if (section_header->sh_offset == dynamic_program_header->p_offset) { | |
| 600 dynamic_section = section; | |
| 601 } | |
| 602 if (name == ".dynsym") { | |
| 603 dynsym_section = section; | |
| 604 } | |
| 605 if (name == ".rel.plt" || name == ".rela.plt") { | |
| 606 plt_relocations_section = section; | |
| 607 } | |
| 608 if (name == ".symtab") { | |
| 609 symtab_section = section; | |
| 610 } | |
| 611 | |
| 612 // Note packed android relocations offset. | |
| 613 if (name == ".android.rel.dyn" || name == ".android.rela.dyn") { | |
| 614 android_relocations_offset = section_header->sh_offset; | |
| 615 } | |
| 616 } | |
| 617 CHECK(dynamic_section != NULL); | |
| 618 CHECK(dynsym_section != NULL); | |
| 619 CHECK(plt_relocations_section != NULL); | |
| 620 CHECK(android_relocations_offset != 0); | |
| 621 | |
| 622 // Adjust the .dynamic section for the hole. Because we have to edit the | |
| 623 // current contents of .dynamic we disallow resizing it. | |
| 624 CHECK(section != dynamic_section); | |
| 625 AdjustDynamicSectionForHole<Rel>(dynamic_section, | |
| 626 is_relocations_resize, | |
| 627 hole_start, | |
| 628 hole_size); | |
| 629 | |
| 630 // Adjust the .dynsym section for the hole. | |
| 631 AdjustDynSymSectionForHole(dynsym_section, hole_start, hole_size); | |
| 632 | |
| 633 // Adjust the plt relocations section for the hole. | |
| 634 AdjustRelPltSectionForHole<Rel>(plt_relocations_section, | |
| 635 hole_start, | |
| 636 hole_size); | |
| 637 | |
| 638 // If present, adjust the .symtab section for the hole. If the shared | |
| 639 // library was stripped then .symtab will be absent. | |
| 640 if (symtab_section) | |
| 641 AdjustSymTabSectionForHole(symtab_section, hole_start, hole_size); | |
| 642 } | 820 } |
| 643 | 821 |
| 644 // Find the first slot in a dynamics array with the given tag. The array | 822 // Find the first slot in a dynamics array with the given tag. The array |
| 645 // always ends with a free (unused) element, and which we exclude from the | 823 // always ends with a free (unused) element, and which we exclude from the |
| 646 // search. Returns dynamics->size() if not found. | 824 // search. Returns dynamics->size() if not found. |
| 647 size_t FindDynamicEntry(ELF::Sword tag, | 825 size_t FindDynamicEntry(ELF::Sword tag, |
| 648 std::vector<ELF::Dyn>* dynamics) { | 826 std::vector<ELF::Dyn>* dynamics) { |
| 649 // Loop until the penultimate entry. We exclude the end sentinel. | 827 // Loop until the penultimate entry. We exclude the end sentinel. |
| 650 for (size_t i = 0; i < dynamics->size() - 1; ++i) { | 828 for (size_t i = 0; i < dynamics->size() - 1; ++i) { |
| 651 if (dynamics->at(i).d_tag == tag) | 829 if (dynamics->at(i).d_tag == tag) |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 684 for (size_t i = slot; i < dynamics->size() - 1; ++i) { | 862 for (size_t i = slot; i < dynamics->size() - 1; ++i) { |
| 685 dynamics->at(i) = dynamics->at(i + 1); | 863 dynamics->at(i) = dynamics->at(i + 1); |
| 686 VLOG(1) << "dynamic[" << i | 864 VLOG(1) << "dynamic[" << i |
| 687 << "] overwritten with dynamic[" << i + 1 << "]"; | 865 << "] overwritten with dynamic[" << i + 1 << "]"; |
| 688 } | 866 } |
| 689 | 867 |
| 690 // Ensure that the end sentinel is still present. | 868 // Ensure that the end sentinel is still present. |
| 691 CHECK(dynamics->at(dynamics->size() - 1).d_tag == DT_NULL); | 869 CHECK(dynamics->at(dynamics->size() - 1).d_tag == DT_NULL); |
| 692 } | 870 } |
| 693 | 871 |
| 694 // Adjust a relocation. For a relocation without addend, we find its target | |
| 695 // in the section and adjust that. For a relocation with addend, the target | |
| 696 // is the relocation addend, and the section data at the target is zero. | |
| 697 template <typename Rel> | |
| 698 void AdjustRelocation(ssize_t index, | |
| 699 ELF::Addr hole_start, | |
| 700 ssize_t hole_size, | |
| 701 Rel* relocation, | |
| 702 ELF::Off* target); | |
| 703 | |
| 704 template <> | |
| 705 void AdjustRelocation<ELF::Rel>(ssize_t index, | |
| 706 ELF::Addr hole_start, | |
| 707 ssize_t hole_size, | |
| 708 ELF::Rel* relocation, | |
| 709 ELF::Off* target) { | |
| 710 // Adjust the target if after the hole start. | |
| 711 if (*target > hole_start) { | |
| 712 *target += hole_size; | |
| 713 VLOG(1) << "relocation[" << index << "] target adjusted to " << *target; | |
| 714 } | |
| 715 } | |
| 716 | |
| 717 template <> | |
| 718 void AdjustRelocation<ELF::Rela>(ssize_t index, | |
| 719 ELF::Addr hole_start, | |
| 720 ssize_t hole_size, | |
| 721 ELF::Rela* relocation, | |
| 722 ELF::Off* target) { | |
| 723 // The relocation's target is the addend. Adjust if after the hole start. | |
| 724 if (relocation->r_addend > hole_start) { | |
| 725 relocation->r_addend += hole_size; | |
| 726 VLOG(1) << "relocation[" | |
| 727 << index << "] addend adjusted to " << relocation->r_addend; | |
| 728 } | |
| 729 } | |
| 730 | |
| 731 // For relative relocations without addends, adjust the file data to which | |
| 732 // they refer. For relative relocations with addends, adjust the addends. | |
| 733 // This translates data into the area it will occupy after the hole in | |
| 734 // the dynamic relocations is added or removed. | |
| 735 template <typename Rel> | |
| 736 void AdjustRelocationTargets(Elf* elf, | |
| 737 ELF::Off hole_start, | |
| 738 ssize_t hole_size, | |
| 739 std::vector<Rel>* relocations) { | |
| 740 Elf_Scn* section = NULL; | |
| 741 while ((section = elf_nextscn(elf, section)) != NULL) { | |
| 742 const ELF::Shdr* section_header = ELF::getshdr(section); | |
| 743 | |
| 744 // Ignore sections that do not appear in a process memory image. | |
| 745 if (section_header->sh_addr == 0) | |
| 746 continue; | |
| 747 | |
| 748 Elf_Data* data = GetSectionData(section); | |
| 749 | |
| 750 // Ignore sections with no effective data. | |
| 751 if (data->d_buf == NULL) | |
| 752 continue; | |
| 753 | |
| 754 // Identify this section's start and end addresses. | |
| 755 const ELF::Addr section_start = section_header->sh_addr; | |
| 756 const ELF::Addr section_end = section_start + section_header->sh_size; | |
| 757 | |
| 758 // Create a copy of the section's data. | |
| 759 uint8_t* area = new uint8_t[data->d_size]; | |
| 760 memcpy(area, data->d_buf, data->d_size); | |
| 761 | |
| 762 for (size_t i = 0; i < relocations->size(); ++i) { | |
| 763 Rel* relocation = &relocations->at(i); | |
| 764 CHECK(ELF_R_TYPE(relocation->r_info) == ELF::kRelativeRelocationCode); | |
| 765 | |
| 766 // See if this relocation points into the current section. | |
| 767 if (relocation->r_offset >= section_start && | |
| 768 relocation->r_offset < section_end) { | |
| 769 // The relocation's target is what it points to in area. | |
| 770 // For relocations without addend, this is what we adjust; for | |
| 771 // relocations with addend, we leave this (it will be zero) | |
| 772 // and instead adjust the addend. | |
| 773 ELF::Addr byte_offset = relocation->r_offset - section_start; | |
| 774 ELF::Off* target = reinterpret_cast<ELF::Off*>(area + byte_offset); | |
| 775 AdjustRelocation<Rel>(i, hole_start, hole_size, relocation, target); | |
| 776 } | |
| 777 } | |
| 778 | |
| 779 // If we altered the data for this section, write it back. | |
| 780 if (memcmp(area, data->d_buf, data->d_size)) { | |
| 781 RewriteSectionData(data, area, data->d_size); | |
| 782 } | |
| 783 delete [] area; | |
| 784 } | |
| 785 } | |
| 786 | |
| 787 // Pad relocations with a given number of null relocations. | |
| 788 template <typename Rel> | 872 template <typename Rel> |
| 789 void PadRelocations(size_t count, std::vector<Rel>* relocations); | 873 void PadRelocations(size_t count, std::vector<Rel>* relocations); |
| 790 | 874 |
| 791 template <> | 875 template <> |
| 792 void PadRelocations<ELF::Rel>(size_t count, | 876 void PadRelocations<ELF::Rel>(size_t count, |
| 793 std::vector<ELF::Rel>* relocations) { | 877 std::vector<ELF::Rel>* relocations) { |
| 794 ELF::Rel null_relocation; | 878 ELF::Rel null_relocation; |
| 795 null_relocation.r_offset = 0; | 879 null_relocation.r_offset = 0; |
| 796 null_relocation.r_info = ELF_R_INFO(0, ELF::kNoRelocationCode); | 880 null_relocation.r_info = ELF_R_INFO(0, ELF::kNoRelocationCode); |
| 797 std::vector<ELF::Rel> padding(count, null_relocation); | 881 std::vector<ELF::Rel> padding(count, null_relocation); |
| 798 relocations->insert(relocations->end(), padding.begin(), padding.end()); | 882 relocations->insert(relocations->end(), padding.begin(), padding.end()); |
| 799 } | 883 } |
| 800 | 884 |
| 801 template <> | 885 template <> |
| 802 void PadRelocations<ELF::Rela>(size_t count, | 886 void PadRelocations<ELF::Rela>(size_t count, |
| 803 std::vector<ELF::Rela>* relocations) { | 887 std::vector<ELF::Rela>* relocations) { |
| 804 ELF::Rela null_relocation; | 888 ELF::Rela null_relocation; |
| 805 null_relocation.r_offset = 0; | 889 null_relocation.r_offset = 0; |
| 806 null_relocation.r_info = ELF_R_INFO(0, ELF::kNoRelocationCode); | 890 null_relocation.r_info = ELF_R_INFO(0, ELF::kNoRelocationCode); |
| 807 null_relocation.r_addend = 0; | 891 null_relocation.r_addend = 0; |
| 808 std::vector<ELF::Rela> padding(count, null_relocation); | 892 std::vector<ELF::Rela> padding(count, null_relocation); |
| 809 relocations->insert(relocations->end(), padding.begin(), padding.end()); | 893 relocations->insert(relocations->end(), padding.begin(), padding.end()); |
| 810 } | 894 } |
| 811 | 895 |
| 812 // Adjust relocations so that the offset that they indicate will be correct | |
| 813 // after the hole in the dynamic relocations is added or removed (in effect, | |
| 814 // relocate the relocations). | |
| 815 template <typename Rel> | |
| 816 void AdjustRelocations(ELF::Off hole_start, | |
| 817 ssize_t hole_size, | |
| 818 std::vector<Rel>* relocations) { | |
| 819 for (size_t i = 0; i < relocations->size(); ++i) { | |
| 820 Rel* relocation = &relocations->at(i); | |
| 821 if (relocation->r_offset > hole_start) { | |
| 822 relocation->r_offset += hole_size; | |
| 823 VLOG(1) << "relocation[" << i | |
| 824 << "] offset adjusted to " << relocation->r_offset; | |
| 825 } | |
| 826 } | |
| 827 } | |
| 828 | |
| 829 } // namespace | 896 } // namespace |
| 830 | 897 |
| 831 // Remove relative entries from dynamic relocations and write as packed | 898 // Remove relative entries from dynamic relocations and write as packed |
| 832 // data into android packed relocations. | 899 // data into android packed relocations. |
| 833 bool ElfFile::PackRelocations() { | 900 bool ElfFile::PackRelocations() { |
| 834 // Load the ELF file into libelf. | 901 // Load the ELF file into libelf. |
| 835 if (!Load()) { | 902 if (!Load()) { |
| 836 LOG(ERROR) << "Failed to load as ELF"; | 903 LOG(ERROR) << "Failed to load as ELF"; |
| 837 return false; | 904 return false; |
| 838 } | 905 } |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 888 LOG(INFO) << "Other : " << other_relocations.size() << " entries"; | 955 LOG(INFO) << "Other : " << other_relocations.size() << " entries"; |
| 889 LOG(INFO) << "Total : " << relocations.size() << " entries"; | 956 LOG(INFO) << "Total : " << relocations.size() << " entries"; |
| 890 | 957 |
| 891 // If no relative relocations then we have nothing packable. Perhaps | 958 // If no relative relocations then we have nothing packable. Perhaps |
| 892 // the shared object has already been packed? | 959 // the shared object has already been packed? |
| 893 if (relative_relocations.empty()) { | 960 if (relative_relocations.empty()) { |
| 894 LOG(ERROR) << "No relative relocations found (already packed?)"; | 961 LOG(ERROR) << "No relative relocations found (already packed?)"; |
| 895 return false; | 962 return false; |
| 896 } | 963 } |
| 897 | 964 |
| 898 // Unless padding, pre-apply relative relocations to account for the | 965 // If not padding fully, apply only enough padding to preserve alignment. |
| 899 // hole, and pre-adjust all relocation offsets accordingly. | 966 // Otherwise, pad so that we do not shrink the relocations section at all. |
| 900 if (!is_padding_relocations_) { | 967 if (!is_padding_relocations_) { |
| 901 // Pre-calculate the size of the hole we will close up when we rewrite | 968 // Calculate the size of the hole we will close up when we rewrite |
| 902 // dynamic relocations. We have to adjust relocation addresses to | 969 // dynamic relocations. |
| 903 // account for this. | |
| 904 ELF::Shdr* section_header = ELF::getshdr(relocations_section_); | 970 ELF::Shdr* section_header = ELF::getshdr(relocations_section_); |
| 905 const ELF::Off hole_start = section_header->sh_offset; | 971 const ELF::Off hole_start = section_header->sh_offset; |
| 906 ssize_t hole_size = | 972 ssize_t hole_size = |
| 907 relative_relocations.size() * sizeof(relative_relocations[0]); | 973 relative_relocations.size() * sizeof(relative_relocations[0]); |
| 908 const ssize_t unaligned_hole_size = hole_size; | 974 const ssize_t unaligned_hole_size = hole_size; |
| 909 | 975 |
| 910 // Adjust the actual hole size to preserve alignment. We always adjust | 976 // Adjust the actual hole size to preserve alignment. We always adjust |
| 911 // by a whole number of NONE-type relocations. | 977 // by a whole number of NONE-type relocations. |
| 912 while (hole_size % kPreserveAlignment) | 978 while (hole_size % kPreserveAlignment) |
| 913 hole_size -= sizeof(relative_relocations[0]); | 979 hole_size -= sizeof(relative_relocations[0]); |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 932 | 998 |
| 933 // Padding may have removed any packing benefit. | 999 // Padding may have removed any packing benefit. |
| 934 if (padding >= relative_relocations.size()) { | 1000 if (padding >= relative_relocations.size()) { |
| 935 LOG(INFO) << "Too few relative relocations to pack after padding"; | 1001 LOG(INFO) << "Too few relative relocations to pack after padding"; |
| 936 return false; | 1002 return false; |
| 937 } | 1003 } |
| 938 | 1004 |
| 939 // Add null relocations to other_relocations to preserve alignment. | 1005 // Add null relocations to other_relocations to preserve alignment. |
| 940 PadRelocations<Rel>(padding, &other_relocations); | 1006 PadRelocations<Rel>(padding, &other_relocations); |
| 941 LOG(INFO) << "Alignment pad : " << padding << " relocations"; | 1007 LOG(INFO) << "Alignment pad : " << padding << " relocations"; |
| 942 | |
| 943 // Apply relocations to all relative data to relocate it into the | |
| 944 // area it will occupy once the hole in the dynamic relocations is removed. | |
| 945 AdjustRelocationTargets<Rel>( | |
| 946 elf_, hole_start, -hole_size, &relative_relocations); | |
| 947 // Relocate the relocations. | |
| 948 AdjustRelocations<Rel>(hole_start, -hole_size, &relative_relocations); | |
| 949 AdjustRelocations<Rel>(hole_start, -hole_size, &other_relocations); | |
| 950 } else { | 1008 } else { |
| 951 // If padding, add NONE-type relocations to other_relocations to make it | 1009 // If padding, add NONE-type relocations to other_relocations to make it |
| 952 // the same size as the the original relocations we read in. This makes | 1010 // the same size as the the original relocations we read in. This makes |
| 953 // the ResizeSection() below a no-op. | 1011 // the ResizeSection() below a no-op. |
| 954 const size_t padding = relocations.size() - other_relocations.size(); | 1012 const size_t padding = relocations.size() - other_relocations.size(); |
| 955 PadRelocations<Rel>(padding, &other_relocations); | 1013 PadRelocations<Rel>(padding, &other_relocations); |
| 956 } | 1014 } |
| 957 | 1015 |
| 958 // Pack relative relocations. | 1016 // Pack relative relocations. |
| 959 const size_t initial_bytes = | 1017 const size_t initial_bytes = |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1002 | 1060 |
| 1003 // Rewrite .dynamic to include two new tags describing the packed android | 1061 // Rewrite .dynamic to include two new tags describing the packed android |
| 1004 // relocations. | 1062 // relocations. |
| 1005 data = GetSectionData(dynamic_section_); | 1063 data = GetSectionData(dynamic_section_); |
| 1006 const ELF::Dyn* dynamic_base = reinterpret_cast<ELF::Dyn*>(data->d_buf); | 1064 const ELF::Dyn* dynamic_base = reinterpret_cast<ELF::Dyn*>(data->d_buf); |
| 1007 std::vector<ELF::Dyn> dynamics( | 1065 std::vector<ELF::Dyn> dynamics( |
| 1008 dynamic_base, | 1066 dynamic_base, |
| 1009 dynamic_base + data->d_size / sizeof(dynamics[0])); | 1067 dynamic_base + data->d_size / sizeof(dynamics[0])); |
| 1010 // Use two of the spare slots to describe the packed section. | 1068 // Use two of the spare slots to describe the packed section. |
| 1011 ELF::Shdr* section_header = ELF::getshdr(android_relocations_section_); | 1069 ELF::Shdr* section_header = ELF::getshdr(android_relocations_section_); |
| 1012 const ELF::Dyn offset_dyn | 1070 { |
| 1013 = {DT_ANDROID_REL_OFFSET, {section_header->sh_offset}}; | 1071 ELF::Dyn dyn; |
| 1014 AddDynamicEntry(offset_dyn, &dynamics); | 1072 dyn.d_tag = DT_ANDROID_REL_OFFSET; |
| 1015 const ELF::Dyn size_dyn | 1073 dyn.d_un.d_ptr = section_header->sh_offset; |
| 1016 = {DT_ANDROID_REL_SIZE, {section_header->sh_size}}; | 1074 AddDynamicEntry(dyn, &dynamics); |
| 1017 AddDynamicEntry(size_dyn, &dynamics); | 1075 } |
| 1076 { | |
| 1077 ELF::Dyn dyn; | |
| 1078 dyn.d_tag = DT_ANDROID_REL_SIZE; | |
| 1079 dyn.d_un.d_val = section_header->sh_size; | |
| 1080 AddDynamicEntry(dyn, &dynamics); | |
| 1081 } | |
| 1018 const void* dynamics_data = &dynamics[0]; | 1082 const void* dynamics_data = &dynamics[0]; |
| 1019 const size_t dynamics_bytes = dynamics.size() * sizeof(dynamics[0]); | 1083 const size_t dynamics_bytes = dynamics.size() * sizeof(dynamics[0]); |
| 1020 RewriteSectionData(data, dynamics_data, dynamics_bytes); | 1084 RewriteSectionData(data, dynamics_data, dynamics_bytes); |
| 1021 | 1085 |
| 1022 Flush(); | 1086 Flush(); |
| 1023 return true; | 1087 return true; |
| 1024 } | 1088 } |
| 1025 | 1089 |
| 1026 // Find packed relative relocations in the packed android relocations | 1090 // Find packed relative relocations in the packed android relocations |
| 1027 // section, unpack them, and rewrite the dynamic relocations section to | 1091 // section, unpack them, and rewrite the dynamic relocations section to |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1119 // dynamic relocations. We have to adjust relocation addresses to | 1183 // dynamic relocations. We have to adjust relocation addresses to |
| 1120 // account for this. | 1184 // account for this. |
| 1121 ELF::Shdr* section_header = ELF::getshdr(relocations_section_); | 1185 ELF::Shdr* section_header = ELF::getshdr(relocations_section_); |
| 1122 const ELF::Off hole_start = section_header->sh_offset; | 1186 const ELF::Off hole_start = section_header->sh_offset; |
| 1123 ssize_t hole_size = | 1187 ssize_t hole_size = |
| 1124 relative_relocations.size() * sizeof(relative_relocations[0]); | 1188 relative_relocations.size() * sizeof(relative_relocations[0]); |
| 1125 | 1189 |
| 1126 // Adjust the hole size for the padding added to preserve alignment. | 1190 // Adjust the hole size for the padding added to preserve alignment. |
| 1127 hole_size -= padding * sizeof(other_relocations[0]); | 1191 hole_size -= padding * sizeof(other_relocations[0]); |
| 1128 LOG(INFO) << "Expansion : " << hole_size << " bytes"; | 1192 LOG(INFO) << "Expansion : " << hole_size << " bytes"; |
| 1129 | |
| 1130 // Apply relocations to all relative data to relocate it into the | |
| 1131 // area it will occupy once the hole in dynamic relocations is opened. | |
| 1132 AdjustRelocationTargets<Rel>( | |
| 1133 elf_, hole_start, hole_size, &relative_relocations); | |
| 1134 // Relocate the relocations. | |
| 1135 AdjustRelocations<Rel>(hole_start, hole_size, &relative_relocations); | |
| 1136 AdjustRelocations<Rel>(hole_start, hole_size, &other_relocations); | |
| 1137 } | 1193 } |
| 1138 | 1194 |
| 1139 // Rewrite the current dynamic relocations section to be the relative | 1195 // Rewrite the current dynamic relocations section to be the relative |
| 1140 // relocations followed by other relocations. This is the usual order in | 1196 // relocations followed by other relocations. This is the usual order in |
| 1141 // which we find them after linking, so this action will normally put the | 1197 // which we find them after linking, so this action will normally put the |
| 1142 // entire dynamic relocations section back to its pre-split-and-packed state. | 1198 // entire dynamic relocations section back to its pre-split-and-packed state. |
| 1143 relocations.assign(relative_relocations.begin(), relative_relocations.end()); | 1199 relocations.assign(relative_relocations.begin(), relative_relocations.end()); |
| 1144 relocations.insert(relocations.end(), | 1200 relocations.insert(relocations.end(), |
| 1145 other_relocations.begin(), other_relocations.end()); | 1201 other_relocations.begin(), other_relocations.end()); |
| 1146 const void* section_data = &relocations[0]; | 1202 const void* section_data = &relocations[0]; |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1189 | 1245 |
| 1190 // Clean up libelf, and truncate the output file to the number of bytes | 1246 // Clean up libelf, and truncate the output file to the number of bytes |
| 1191 // written by elf_update(). | 1247 // written by elf_update(). |
| 1192 elf_end(elf_); | 1248 elf_end(elf_); |
| 1193 elf_ = NULL; | 1249 elf_ = NULL; |
| 1194 const int truncate = ftruncate(fd_, file_bytes); | 1250 const int truncate = ftruncate(fd_, file_bytes); |
| 1195 CHECK(truncate == 0); | 1251 CHECK(truncate == 0); |
| 1196 } | 1252 } |
| 1197 | 1253 |
| 1198 } // namespace relocation_packer | 1254 } // namespace relocation_packer |
| OLD | NEW |