Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: tools/relocation_packer/src/elf_file.cc

Issue 535943002: Alter how relocation packing cuts holes from libchrome.so. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fix comment typo Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | tools/relocation_packer/src/elf_traits.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 // Implementation notes:
6 //
7 // We need to remove a piece from the ELF shared library. However, we also
8 // want to ensure that code and data loads at the same addresses as before
9 // packing, so that tools like breakpad can still match up addresses found
10 // in any crash dumps with data extracted from the pre-packed version of
11 // the shared library.
12 //
13 // Arranging this means that we have to split one of the LOAD segments into
14 // two. Unfortunately, the program headers are located at the very start
15 // of the shared library file, so expanding the program header section
16 // would cause a lot of consequent changes to files offsets that we don't
17 // really want to have to handle.
18 //
19 // Luckily, though, there is a segment that is always present and always
20 // unused on Android; the GNU_STACK segment. What we do is to steal that
21 // and repurpose it to be one of the split LOAD segments. We then have to
22 // sort LOAD segments by offset to keep the crazy linker happy.
23 //
24 // All of this takes place in SplitProgramHeadersForHole(), used on packing,
25 // and is unraveled on unpacking in CoalesceProgramHeadersForHole(). See
26 // commentary on those functions for an example of this segment stealing
27 // in action.
28
5 #include "elf_file.h" 29 #include "elf_file.h"
6 30
7 #include <stdlib.h> 31 #include <stdlib.h>
8 #include <sys/types.h> 32 #include <sys/types.h>
9 #include <unistd.h> 33 #include <unistd.h>
34 #include <algorithm>
10 #include <string> 35 #include <string>
11 #include <vector> 36 #include <vector>
12 37
13 #include "debug.h" 38 #include "debug.h"
14 #include "elf_traits.h" 39 #include "elf_traits.h"
15 #include "libelf.h" 40 #include "libelf.h"
16 #include "packer.h" 41 #include "packer.h"
17 42
18 namespace relocation_packer { 43 namespace relocation_packer {
19 44
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
68 93
69 // Verbose ELF program header logging. 94 // Verbose ELF program header logging.
70 void VerboseLogProgramHeader(size_t program_header_index, 95 void VerboseLogProgramHeader(size_t program_header_index,
71 const ELF::Phdr* program_header) { 96 const ELF::Phdr* program_header) {
72 std::string type; 97 std::string type;
73 switch (program_header->p_type) { 98 switch (program_header->p_type) {
74 case PT_NULL: type = "NULL"; break; 99 case PT_NULL: type = "NULL"; break;
75 case PT_LOAD: type = "LOAD"; break; 100 case PT_LOAD: type = "LOAD"; break;
76 case PT_DYNAMIC: type = "DYNAMIC"; break; 101 case PT_DYNAMIC: type = "DYNAMIC"; break;
77 case PT_INTERP: type = "INTERP"; break; 102 case PT_INTERP: type = "INTERP"; break;
78 case PT_NOTE: type = "NOTE"; break;
79 case PT_SHLIB: type = "SHLIB"; break;
80 case PT_PHDR: type = "PHDR"; break; 103 case PT_PHDR: type = "PHDR"; break;
81 case PT_TLS: type = "TLS"; break; 104 case PT_GNU_RELRO: type = "GNU_RELRO"; break;
105 case PT_GNU_STACK: type = "GNU_STACK"; break;
106 case PT_ARM_EXIDX: type = "EXIDX"; break;
82 default: type = "(OTHER)"; break; 107 default: type = "(OTHER)"; break;
83 } 108 }
84 VLOG(1) << "phdr " << program_header_index << " : " << type; 109 VLOG(1) << "phdr[" << program_header_index << "] : " << type;
85 VLOG(1) << " p_offset = " << program_header->p_offset; 110 VLOG(1) << " p_offset = " << program_header->p_offset;
86 VLOG(1) << " p_vaddr = " << program_header->p_vaddr; 111 VLOG(1) << " p_vaddr = " << program_header->p_vaddr;
87 VLOG(1) << " p_paddr = " << program_header->p_paddr; 112 VLOG(1) << " p_paddr = " << program_header->p_paddr;
88 VLOG(1) << " p_filesz = " << program_header->p_filesz; 113 VLOG(1) << " p_filesz = " << program_header->p_filesz;
89 VLOG(1) << " p_memsz = " << program_header->p_memsz; 114 VLOG(1) << " p_memsz = " << program_header->p_memsz;
115 VLOG(1) << " p_flags = " << program_header->p_flags;
116 VLOG(1) << " p_align = " << program_header->p_align;
90 } 117 }
91 118
92 // Verbose ELF section header logging. 119 // Verbose ELF section header logging.
93 void VerboseLogSectionHeader(const std::string& section_name, 120 void VerboseLogSectionHeader(const std::string& section_name,
94 const ELF::Shdr* section_header) { 121 const ELF::Shdr* section_header) {
95 VLOG(1) << "section " << section_name; 122 VLOG(1) << "section " << section_name;
96 VLOG(1) << " sh_addr = " << section_header->sh_addr; 123 VLOG(1) << " sh_addr = " << section_header->sh_addr;
97 VLOG(1) << " sh_offset = " << section_header->sh_offset; 124 VLOG(1) << " sh_offset = " << section_header->sh_offset;
98 VLOG(1) << " sh_size = " << section_header->sh_size; 125 VLOG(1) << " sh_size = " << section_header->sh_size;
99 VLOG(1) << " sh_addralign = " << section_header->sh_addralign; 126 VLOG(1) << " sh_addralign = " << section_header->sh_addralign;
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
175 // attributes. 202 // attributes.
176 Elf_Scn* found_relocations_section = NULL; 203 Elf_Scn* found_relocations_section = NULL;
177 Elf_Scn* found_android_relocations_section = NULL; 204 Elf_Scn* found_android_relocations_section = NULL;
178 Elf_Scn* found_dynamic_section = NULL; 205 Elf_Scn* found_dynamic_section = NULL;
179 206
180 // Notes of relocation section types seen. We require one or the other of 207 // Notes of relocation section types seen. We require one or the other of
181 // these; both is unsupported. 208 // these; both is unsupported.
182 bool has_rel_relocations = false; 209 bool has_rel_relocations = false;
183 bool has_rela_relocations = false; 210 bool has_rela_relocations = false;
184 211
185 // Flag set if we encounter any .debug* section. We do not adjust any
186 // offsets or addresses of any debug data, so if we find one of these then
187 // the resulting output shared object should still run, but might not be
188 // usable for debugging, disassembly, and so on. Provides a warning if
189 // this occurs.
190 bool has_debug_section = false;
191
192 Elf_Scn* section = NULL; 212 Elf_Scn* section = NULL;
193 while ((section = elf_nextscn(elf, section)) != NULL) { 213 while ((section = elf_nextscn(elf, section)) != NULL) {
194 const ELF::Shdr* section_header = ELF::getshdr(section); 214 const ELF::Shdr* section_header = ELF::getshdr(section);
195 std::string name = elf_strptr(elf, string_index, section_header->sh_name); 215 std::string name = elf_strptr(elf, string_index, section_header->sh_name);
196 VerboseLogSectionHeader(name, section_header); 216 VerboseLogSectionHeader(name, section_header);
197 217
198 // Note relocation section types. 218 // Note relocation section types.
199 if (section_header->sh_type == SHT_REL) { 219 if (section_header->sh_type == SHT_REL) {
200 has_rel_relocations = true; 220 has_rel_relocations = true;
201 } 221 }
202 if (section_header->sh_type == SHT_RELA) { 222 if (section_header->sh_type == SHT_RELA) {
203 has_rela_relocations = true; 223 has_rela_relocations = true;
204 } 224 }
205 225
206 // Note special sections as we encounter them. 226 // Note special sections as we encounter them.
207 if ((name == ".rel.dyn" || name == ".rela.dyn") && 227 if ((name == ".rel.dyn" || name == ".rela.dyn") &&
208 section_header->sh_size > 0) { 228 section_header->sh_size > 0) {
209 found_relocations_section = section; 229 found_relocations_section = section;
210 } 230 }
211 if ((name == ".android.rel.dyn" || name == ".android.rela.dyn") && 231 if ((name == ".android.rel.dyn" || name == ".android.rela.dyn") &&
212 section_header->sh_size > 0) { 232 section_header->sh_size > 0) {
213 found_android_relocations_section = section; 233 found_android_relocations_section = section;
214 } 234 }
215 if (section_header->sh_offset == dynamic_program_header->p_offset) { 235 if (section_header->sh_offset == dynamic_program_header->p_offset) {
216 found_dynamic_section = section; 236 found_dynamic_section = section;
217 } 237 }
218 238
219 // If we find a section named .debug*, set the debug warning flag.
220 if (std::string(name).find(".debug") == 0) {
221 has_debug_section = true;
222 }
223
224 // Ensure we preserve alignment, repeated later for the data block(s). 239 // Ensure we preserve alignment, repeated later for the data block(s).
225 CHECK(section_header->sh_addralign <= kPreserveAlignment); 240 CHECK(section_header->sh_addralign <= kPreserveAlignment);
226 241
227 Elf_Data* data = NULL; 242 Elf_Data* data = NULL;
228 while ((data = elf_getdata(section, data)) != NULL) { 243 while ((data = elf_getdata(section, data)) != NULL) {
229 CHECK(data->d_align <= kPreserveAlignment); 244 CHECK(data->d_align <= kPreserveAlignment);
230 VerboseLogSectionData(data); 245 VerboseLogSectionData(data);
231 } 246 }
232 } 247 }
233 248
(...skipping 17 matching lines...) Expand all
251 if (!has_rel_relocations && !has_rela_relocations) { 266 if (!has_rel_relocations && !has_rela_relocations) {
252 LOG(ERROR) << "No relocations sections found"; 267 LOG(ERROR) << "No relocations sections found";
253 return false; 268 return false;
254 } 269 }
255 if (has_rel_relocations && has_rela_relocations) { 270 if (has_rel_relocations && has_rela_relocations) {
256 LOG(ERROR) << "Multiple relocations sections with different types found, " 271 LOG(ERROR) << "Multiple relocations sections with different types found, "
257 << "not currently supported"; 272 << "not currently supported";
258 return false; 273 return false;
259 } 274 }
260 275
261 if (has_debug_section) {
262 LOG(WARNING) << "Found .debug section(s), and ignored them";
263 }
264
265 elf_ = elf; 276 elf_ = elf;
266 relocations_section_ = found_relocations_section; 277 relocations_section_ = found_relocations_section;
267 dynamic_section_ = found_dynamic_section; 278 dynamic_section_ = found_dynamic_section;
268 android_relocations_section_ = found_android_relocations_section; 279 android_relocations_section_ = found_android_relocations_section;
269 relocations_type_ = has_rel_relocations ? REL : RELA; 280 relocations_type_ = has_rel_relocations ? REL : RELA;
270 return true; 281 return true;
271 } 282 }
272 283
273 namespace { 284 namespace {
274 285
275 // Helper for ResizeSection(). Adjust the main ELF header for the hole. 286 // Helper for ResizeSection(). Adjust the main ELF header for the hole.
276 void AdjustElfHeaderForHole(ELF::Ehdr* elf_header, 287 void AdjustElfHeaderForHole(ELF::Ehdr* elf_header,
277 ELF::Off hole_start, 288 ELF::Off hole_start,
278 ssize_t hole_size) { 289 ssize_t hole_size) {
279 if (elf_header->e_phoff > hole_start) { 290 if (elf_header->e_phoff > hole_start) {
280 elf_header->e_phoff += hole_size; 291 elf_header->e_phoff += hole_size;
281 VLOG(1) << "e_phoff adjusted to " << elf_header->e_phoff; 292 VLOG(1) << "e_phoff adjusted to " << elf_header->e_phoff;
282 } 293 }
283 if (elf_header->e_shoff > hole_start) { 294 if (elf_header->e_shoff > hole_start) {
284 elf_header->e_shoff += hole_size; 295 elf_header->e_shoff += hole_size;
285 VLOG(1) << "e_shoff adjusted to " << elf_header->e_shoff; 296 VLOG(1) << "e_shoff adjusted to " << elf_header->e_shoff;
286 } 297 }
287 } 298 }
288 299
289 // Helper for ResizeSection(). Adjust all program headers for the hole.
290 void AdjustProgramHeadersForHole(ELF::Phdr* elf_program_header,
291 size_t program_header_count,
292 ELF::Off hole_start,
293 ssize_t hole_size) {
294 for (size_t i = 0; i < program_header_count; ++i) {
295 ELF::Phdr* program_header = &elf_program_header[i];
296
297 if (program_header->p_offset > hole_start) {
298 // The hole start is past this segment, so adjust offsets and addrs.
299 program_header->p_offset += hole_size;
300 VLOG(1) << "phdr " << i
301 << " p_offset adjusted to "<< program_header->p_offset;
302
303 // Only adjust vaddr and paddr if this program header has them.
304 if (program_header->p_vaddr != 0) {
305 program_header->p_vaddr += hole_size;
306 VLOG(1) << "phdr " << i
307 << " p_vaddr adjusted to " << program_header->p_vaddr;
308 }
309 if (program_header->p_paddr != 0) {
310 program_header->p_paddr += hole_size;
311 VLOG(1) << "phdr " << i
312 << " p_paddr adjusted to " << program_header->p_paddr;
313 }
314 } else if (program_header->p_offset +
315 program_header->p_filesz > hole_start) {
316 // The hole start is within this segment, so adjust file and in-memory
317 // sizes, but leave offsets and addrs unchanged.
318 program_header->p_filesz += hole_size;
319 VLOG(1) << "phdr " << i
320 << " p_filesz adjusted to " << program_header->p_filesz;
321 program_header->p_memsz += hole_size;
322 VLOG(1) << "phdr " << i
323 << " p_memsz adjusted to " << program_header->p_memsz;
324 }
325 }
326 }
327
328 // Helper for ResizeSection(). Adjust all section headers for the hole. 300 // Helper for ResizeSection(). Adjust all section headers for the hole.
329 void AdjustSectionHeadersForHole(Elf* elf, 301 void AdjustSectionHeadersForHole(Elf* elf,
330 ELF::Off hole_start, 302 ELF::Off hole_start,
331 ssize_t hole_size) { 303 ssize_t hole_size) {
332 size_t string_index; 304 size_t string_index;
333 elf_getshdrstrndx(elf, &string_index); 305 elf_getshdrstrndx(elf, &string_index);
334 306
335 Elf_Scn* section = NULL; 307 Elf_Scn* section = NULL;
336 while ((section = elf_nextscn(elf, section)) != NULL) { 308 while ((section = elf_nextscn(elf, section)) != NULL) {
337 ELF::Shdr* section_header = ELF::getshdr(section); 309 ELF::Shdr* section_header = ELF::getshdr(section);
338 std::string name = elf_strptr(elf, string_index, section_header->sh_name); 310 std::string name = elf_strptr(elf, string_index, section_header->sh_name);
339 311
340 if (section_header->sh_offset > hole_start) { 312 if (section_header->sh_offset > hole_start) {
341 section_header->sh_offset += hole_size; 313 section_header->sh_offset += hole_size;
342 VLOG(1) << "section " << name 314 VLOG(1) << "section " << name
343 << " sh_offset adjusted to " << section_header->sh_offset; 315 << " sh_offset adjusted to " << section_header->sh_offset;
344 // Only adjust section addr if this section has one. 316 }
345 if (section_header->sh_addr != 0) { 317 }
346 section_header->sh_addr += hole_size; 318 }
347 VLOG(1) << "section " << name 319
348 << " sh_addr adjusted to " << section_header->sh_addr; 320 // Helper for ResizeSection(). Adjust the offsets of any program headers
321 // that have offsets currently beyond the hole start.
322 void AdjustProgramHeaderOffsets(ELF::Phdr* program_headers,
323 size_t count,
324 ELF::Phdr* ignored_1,
325 ELF::Phdr* ignored_2,
326 ELF::Off hole_start,
327 ssize_t hole_size) {
328 for (size_t i = 0; i < count; ++i) {
329 ELF::Phdr* program_header = &program_headers[i];
330
331 if (program_header == ignored_1 || program_header == ignored_2)
332 continue;
333
334 if (program_header->p_offset > hole_start) {
335 // The hole start is past this segment, so adjust offset.
336 program_header->p_offset += hole_size;
337 VLOG(1) << "phdr[" << i
338 << "] p_offset adjusted to "<< program_header->p_offset;
339 }
340 }
341 }
342
343 // Helper for ResizeSection(). Find the first loadable segment in the
344 // file. We expect it to map from file offset zero.
345 ELF::Phdr* FindFirstLoadSegment(ELF::Phdr* program_headers,
346 size_t count) {
347 ELF::Phdr* first_loadable_segment = NULL;
348
349 for (size_t i = 0; i < count; ++i) {
350 ELF::Phdr* program_header = &program_headers[i];
351
352 if (program_header->p_type == PT_LOAD &&
353 program_header->p_offset == 0 &&
354 program_header->p_vaddr == 0 &&
355 program_header->p_paddr == 0) {
356 first_loadable_segment = program_header;
357 }
358 }
359 LOG_IF(FATAL, !first_loadable_segment)
360 << "Cannot locate a LOAD segment with address and offset zero";
361
362 return first_loadable_segment;
363 }
364
365 // Helper for ResizeSection(). Find the PT_GNU_STACK segment, and check
366 // that it contains what we expect so we can restore it on unpack if needed.
367 ELF::Phdr* FindUnusedGnuStackSegment(ELF::Phdr* program_headers,
368 size_t count) {
369 ELF::Phdr* unused_segment = NULL;
370
371 for (size_t i = 0; i < count; ++i) {
372 ELF::Phdr* program_header = &program_headers[i];
373
374 if (program_header->p_type == PT_GNU_STACK &&
375 program_header->p_offset == 0 &&
376 program_header->p_vaddr == 0 &&
377 program_header->p_paddr == 0 &&
378 program_header->p_filesz == 0 &&
379 program_header->p_memsz == 0 &&
380 program_header->p_flags == (PF_R | PF_W) &&
381 program_header->p_align == ELF::kGnuStackSegmentAlignment) {
382 unused_segment = program_header;
383 }
384 }
385 LOG_IF(FATAL, !unused_segment)
386 << "Cannot locate the expected GNU_STACK segment";
387
388 return unused_segment;
389 }
390
391 // Helper for ResizeSection(). Find the segment that was the first loadable
392 // one before we split it into two. This is the one into which we coalesce
393 // the split segments on unpacking.
394 ELF::Phdr* FindOriginalFirstLoadSegment(ELF::Phdr* program_headers,
395 size_t count) {
396 const ELF::Phdr* first_loadable_segment =
397 FindFirstLoadSegment(program_headers, count);
398
399 ELF::Phdr* original_first_loadable_segment = NULL;
400
401 for (size_t i = 0; i < count; ++i) {
402 ELF::Phdr* program_header = &program_headers[i];
403
404 // The original first loadable segment is the one that follows on from
405 // the one we wrote on split to be the current first loadable segment.
406 if (program_header->p_type == PT_LOAD &&
407 program_header->p_offset == first_loadable_segment->p_filesz) {
408 original_first_loadable_segment = program_header;
409 }
410 }
411 LOG_IF(FATAL, !original_first_loadable_segment)
412 << "Cannot locate the LOAD segment that follows a LOAD at offset zero";
413
414 return original_first_loadable_segment;
415 }
416
417 // Helper for ResizeSection(). Find the segment that contains the hole.
418 Elf_Scn* FindSectionContainingHole(Elf* elf,
419 ELF::Off hole_start,
420 ssize_t hole_size) {
421 Elf_Scn* section = NULL;
422 Elf_Scn* last_unholed_section = NULL;
423
424 while ((section = elf_nextscn(elf, section)) != NULL) {
425 const ELF::Shdr* section_header = ELF::getshdr(section);
426
427 // Because we get here after section headers have been adjusted for the
428 // hole, we need to 'undo' that adjustment to give a view of the original
429 // sections layout.
430 ELF::Off offset = section_header->sh_offset;
431 if (section_header->sh_offset >= hole_start) {
432 offset -= hole_size;
433 }
434
435 if (offset <= hole_start) {
436 last_unholed_section = section;
437 }
438 }
439 LOG_IF(FATAL, !last_unholed_section)
440 << "Cannot identify the section before the one containing the hole";
441
442 // The section containing the hole is the one after the last one found
443 // by the loop above.
444 Elf_Scn* holed_section = elf_nextscn(elf, last_unholed_section);
445 LOG_IF(FATAL, !holed_section)
446 << "Cannot identify the section containing the hole";
447
448 return holed_section;
449 }
450
451 // Helper for ResizeSection(). Find the last section contained in a segment.
452 Elf_Scn* FindLastSectionInSegment(Elf* elf,
453 ELF::Phdr* program_header,
454 ELF::Off hole_start,
455 ssize_t hole_size) {
456 const ELF::Off segment_end =
457 program_header->p_offset + program_header->p_filesz;
458
459 Elf_Scn* section = NULL;
460 Elf_Scn* last_section = NULL;
461
462 while ((section = elf_nextscn(elf, section)) != NULL) {
463 const ELF::Shdr* section_header = ELF::getshdr(section);
464
465 // As above, 'undo' any section offset adjustment to give a view of the
466 // original sections layout.
467 ELF::Off offset = section_header->sh_offset;
468 if (section_header->sh_offset >= hole_start) {
469 offset -= hole_size;
470 }
471
472 if (offset < segment_end) {
473 last_section = section;
474 }
475 }
476 LOG_IF(FATAL, !last_section)
477 << "Cannot identify the last section in the given segment";
478
479 return last_section;
480 }
481
482 // Helper for ResizeSection(). Order loadable segments by their offsets.
483 // The crazy linker contains assumptions about loadable segment ordering,
484 // and it is better if we do not break them.
485 void SortOrderSensitiveProgramHeaders(ELF::Phdr* program_headers,
486 size_t count) {
487 std::vector<ELF::Phdr*> orderable;
488
489 // Collect together orderable program headers. These are all the LOAD
490 // segments, and any GNU_STACK that may be present (removed on packing,
491 // but replaced on unpacking).
492 for (size_t i = 0; i < count; ++i) {
493 ELF::Phdr* program_header = &program_headers[i];
494
495 if (program_header->p_type == PT_LOAD ||
496 program_header->p_type == PT_GNU_STACK) {
497 orderable.push_back(program_header);
498 }
499 }
500
501 // Order these program headers so that any PT_GNU_STACK is last, and
502 // the LOAD segments that precede it appear in offset order. Uses
503 // insertion sort.
504 for (size_t i = 1; i < orderable.size(); ++i) {
505 for (size_t j = i; j > 0; --j) {
506 ELF::Phdr* first = orderable[j - 1];
507 ELF::Phdr* second = orderable[j];
508
509 if (!(first->p_type == PT_GNU_STACK ||
510 first->p_offset > second->p_offset)) {
511 break;
349 } 512 }
350 } 513 std::swap(*first, *second);
351 } 514 }
515 }
516 }
517
518 // Helper for ResizeSection(). The GNU_STACK program header is unused in
519 // Android, so we can repurpose it here. Before packing, the program header
520 // table contains something like:
521 //
522 // Type Offset VirtAddr PhysAddr FileSiz MemSiz Flg Align
523 // LOAD 0x000000 0x00000000 0x00000000 0x1efc818 0x1efc818 R E 0x1000
524 // LOAD 0x1efd008 0x01efe008 0x01efe008 0x17ec3c 0x1a0324 RW 0x1000
525 // DYNAMIC 0x205ec50 0x0205fc50 0x0205fc50 0x00108 0x00108 RW 0x4
526 // GNU_STACK 0x000000 0x00000000 0x00000000 0x00000 0x00000 RW 0
527 //
528 // The hole in the file is in the first of these. In order to preserve all
529 // load addresses, what we do is to turn the GNU_STACK into a new LOAD entry
530 // that maps segments up to where we created the hole, adjust the first LOAD
531 // entry so that it maps segments after that, adjust any other program
532 // headers whose offset is after the hole start, and finally order the LOAD
533 // segments by offset, to give:
534 //
535 // Type Offset VirtAddr PhysAddr FileSiz MemSiz Flg Align
536 // LOAD 0x000000 0x00000000 0x00000000 0x14ea4 0x212ea4 R E 0x1000
537 // LOAD 0x014ea4 0x00212ea4 0x00212ea4 0x1cea164 0x1cea164 R E 0x1000
538 // DYNAMIC 0x1e60c50 0x0205fc50 0x0205fc50 0x00108 0x00108 RW 0x4
539 // LOAD 0x1cff008 0x01efe008 0x01efe008 0x17ec3c 0x1a0324 RW 0x1000
540 //
541 // We work out the split points by finding the .rel.dyn or .rela.dyn section
542 // that contains the hole, and by finding the last section in a given segment.
543 //
544 // To unpack, we reverse the above to leave the file as it was originally.
545 void SplitProgramHeadersForHole(Elf* elf,
546 ELF::Off hole_start,
547 ssize_t hole_size) {
548 CHECK(hole_size < 0);
549 const ELF::Ehdr* elf_header = ELF::getehdr(elf);
550 CHECK(elf_header);
551
552 ELF::Phdr* elf_program_header = ELF::getphdr(elf);
553 CHECK(elf_program_header);
554
555 const size_t program_header_count = elf_header->e_phnum;
556
557 // Locate the segment that we can overwrite to form the new LOAD entry,
558 // and the segment that we are going to split into two parts.
559 ELF::Phdr* spliced_header =
560 FindUnusedGnuStackSegment(elf_program_header, program_header_count);
561 ELF::Phdr* split_header =
562 FindFirstLoadSegment(elf_program_header, program_header_count);
563
564 VLOG(1) << "phdr[" << split_header - elf_program_header << "] split";
565 VLOG(1) << "phdr[" << spliced_header - elf_program_header << "] new LOAD";
566
567 // Find the section that contains the hole. We split on the section that
568 // follows it.
569 Elf_Scn* holed_section =
570 FindSectionContainingHole(elf, hole_start, hole_size);
571
572 size_t string_index;
573 elf_getshdrstrndx(elf, &string_index);
574
575 ELF::Shdr* section_header = ELF::getshdr(holed_section);
576 std::string name = elf_strptr(elf, string_index, section_header->sh_name);
577 VLOG(1) << "section " << name << " split after";
578
579 // Find the last section in the segment we are splitting.
580 Elf_Scn* last_section =
581 FindLastSectionInSegment(elf, split_header, hole_start, hole_size);
582
583 section_header = ELF::getshdr(last_section);
584 name = elf_strptr(elf, string_index, section_header->sh_name);
585 VLOG(1) << "section " << name << " split end";
586
587 // Split on the section following the holed one, and up to (but not
588 // including) the section following the last one in the split segment.
589 Elf_Scn* split_section = elf_nextscn(elf, holed_section);
590 LOG_IF(FATAL, !split_section)
591 << "No section follows the section that contains the hole";
592 Elf_Scn* end_section = elf_nextscn(elf, last_section);
593 LOG_IF(FATAL, !end_section)
594 << "No section follows the last section in the segment being split";
595
596 // Split the first portion of split_header into spliced_header. Done
597 // by copying the entire split_header into spliced_header, then changing
598 // only the fields that set the segment sizes.
599 *spliced_header = *split_header;
600 const ELF::Shdr* split_section_header = ELF::getshdr(split_section);
601 spliced_header->p_filesz = split_section_header->sh_offset;
602 spliced_header->p_memsz = split_section_header->sh_addr;
603
604 // Now rewrite split_header to remove the part we spliced from it.
605 const ELF::Shdr* end_section_header = ELF::getshdr(end_section);
606 split_header->p_offset = spliced_header->p_filesz;
607
608 CHECK(split_header->p_vaddr == split_header->p_paddr);
609 split_header->p_vaddr = spliced_header->p_memsz;
610 split_header->p_paddr = split_header->p_vaddr;
611
612 CHECK(split_header->p_filesz == split_header->p_memsz);
613 split_header->p_filesz =
614 end_section_header->sh_offset - spliced_header->p_filesz;
615 split_header->p_memsz = split_header->p_filesz;
616
617 // Adjust the offsets of all program headers that are not one of the pair
618 // we just created by splitting.
619 AdjustProgramHeaderOffsets(elf_program_header,
620 program_header_count,
621 spliced_header,
622 split_header,
623 hole_start,
624 hole_size);
625
626 // Finally, order loadable segments by offset/address. The crazy linker
627 // contains assumptions about loadable segment ordering.
628 SortOrderSensitiveProgramHeaders(elf_program_header,
629 program_header_count);
630 }
631
632 // Helper for ResizeSection(). Undo the work of SplitProgramHeadersForHole().
633 void CoalesceProgramHeadersForHole(Elf* elf,
634 ELF::Off hole_start,
635 ssize_t hole_size) {
636 CHECK(hole_size > 0);
637 const ELF::Ehdr* elf_header = ELF::getehdr(elf);
638 CHECK(elf_header);
639
640 ELF::Phdr* elf_program_header = ELF::getphdr(elf);
641 CHECK(elf_program_header);
642
643 const size_t program_header_count = elf_header->e_phnum;
644
645 // Locate the segment that we overwrote to form the new LOAD entry, and
646 // the segment that we split into two parts on packing.
647 ELF::Phdr* spliced_header =
648 FindFirstLoadSegment(elf_program_header, program_header_count);
649 ELF::Phdr* split_header =
650 FindOriginalFirstLoadSegment(elf_program_header, program_header_count);
651
652 VLOG(1) << "phdr[" << spliced_header - elf_program_header << "] stack";
653 VLOG(1) << "phdr[" << split_header - elf_program_header << "] coalesce";
654
655 // Find the last section in the second segment we are coalescing.
656 Elf_Scn* last_section =
657 FindLastSectionInSegment(elf, split_header, hole_start, hole_size);
658
659 size_t string_index;
660 elf_getshdrstrndx(elf, &string_index);
661
662 const ELF::Shdr* section_header = ELF::getshdr(last_section);
663 std::string name = elf_strptr(elf, string_index, section_header->sh_name);
664 VLOG(1) << "section " << name << " coalesced";
665
666 // Rewrite the coalesced segment into split_header.
667 const ELF::Shdr* last_section_header = ELF::getshdr(last_section);
668 split_header->p_offset = spliced_header->p_offset;
669 split_header->p_vaddr = spliced_header->p_vaddr;
670 split_header->p_paddr = split_header->p_vaddr;
671 split_header->p_filesz =
672 last_section_header->sh_offset + last_section_header->sh_size;
673 split_header->p_memsz = split_header->p_filesz;
674
675 // Reconstruct the original GNU_STACK segment into spliced_header.
676 spliced_header->p_type = PT_GNU_STACK;
677 spliced_header->p_offset = 0;
678 spliced_header->p_vaddr = 0;
679 spliced_header->p_paddr = 0;
680 spliced_header->p_filesz = 0;
681 spliced_header->p_memsz = 0;
682 spliced_header->p_flags = PF_R | PF_W;
683 spliced_header->p_align = ELF::kGnuStackSegmentAlignment;
684
685 // Adjust the offsets of all program headers that are not one of the pair
686 // we just coalesced.
687 AdjustProgramHeaderOffsets(elf_program_header,
688 program_header_count,
689 spliced_header,
690 split_header,
691 hole_start,
692 hole_size);
693
694 // Finally, order loadable segments by offset/address. The crazy linker
695 // contains assumptions about loadable segment ordering.
696 SortOrderSensitiveProgramHeaders(elf_program_header,
697 program_header_count);
698 }
699
700 // Helper for ResizeSection(). Rewrite program headers.
701 void RewriteProgramHeadersForHole(Elf* elf,
702 ELF::Off hole_start,
703 ssize_t hole_size) {
704 // If hole_size is negative then we are removing a piece of the file, and
705 // we want to split program headers so that we keep the same addresses
706 // for text and data. If positive, then we are putting that piece of the
707 // file back in, so we coalesce the previously split program headers.
708 if (hole_size < 0)
709 SplitProgramHeadersForHole(elf, hole_start, hole_size);
710 else if (hole_size > 0)
711 CoalesceProgramHeadersForHole(elf, hole_start, hole_size);
712 }
713
714 // Helper for ResizeSection(). Locate and return the dynamic section.
715 Elf_Scn* GetDynamicSection(Elf* elf) {
716 const ELF::Ehdr* elf_header = ELF::getehdr(elf);
717 CHECK(elf_header);
718
719 const ELF::Phdr* elf_program_header = ELF::getphdr(elf);
720 CHECK(elf_program_header);
721
722 // Find the program header that describes the dynamic section.
723 const ELF::Phdr* dynamic_program_header = NULL;
724 for (size_t i = 0; i < elf_header->e_phnum; ++i) {
725 const ELF::Phdr* program_header = &elf_program_header[i];
726
727 if (program_header->p_type == PT_DYNAMIC) {
728 dynamic_program_header = program_header;
729 }
730 }
731 CHECK(dynamic_program_header);
732
733 // Now find the section with the same offset as this program header.
734 Elf_Scn* dynamic_section = NULL;
735 Elf_Scn* section = NULL;
736 while ((section = elf_nextscn(elf, section)) != NULL) {
737 ELF::Shdr* section_header = ELF::getshdr(section);
738
739 if (section_header->sh_offset == dynamic_program_header->p_offset) {
740 dynamic_section = section;
741 }
742 }
743 CHECK(dynamic_section != NULL);
744
745 return dynamic_section;
352 } 746 }
353 747
354 // Helper for ResizeSection(). Adjust the .dynamic section for the hole. 748 // Helper for ResizeSection(). Adjust the .dynamic section for the hole.
355 template <typename Rel> 749 template <typename Rel>
356 void AdjustDynamicSectionForHole(Elf_Scn* dynamic_section, 750 void AdjustDynamicSectionForHole(Elf_Scn* dynamic_section,
357 bool is_relocations_resize,
358 ELF::Off hole_start, 751 ELF::Off hole_start,
359 ssize_t hole_size) { 752 ssize_t hole_size) {
360 Elf_Data* data = GetSectionData(dynamic_section); 753 Elf_Data* data = GetSectionData(dynamic_section);
361 754
362 const ELF::Dyn* dynamic_base = reinterpret_cast<ELF::Dyn*>(data->d_buf); 755 const ELF::Dyn* dynamic_base = reinterpret_cast<ELF::Dyn*>(data->d_buf);
363 std::vector<ELF::Dyn> dynamics( 756 std::vector<ELF::Dyn> dynamics(
364 dynamic_base, 757 dynamic_base,
365 dynamic_base + data->d_size / sizeof(dynamics[0])); 758 dynamic_base + data->d_size / sizeof(dynamics[0]));
366 759
367 for (size_t i = 0; i < dynamics.size(); ++i) { 760 for (size_t i = 0; i < dynamics.size(); ++i) {
368 ELF::Dyn* dynamic = &dynamics[i]; 761 ELF::Dyn* dynamic = &dynamics[i];
369 const ELF::Sword tag = dynamic->d_tag; 762 const ELF::Sword tag = dynamic->d_tag;
370 // Any tags that hold offsets are adjustment candidates.
371 const bool is_adjustable = (tag == DT_PLTGOT ||
372 tag == DT_HASH ||
373 tag == DT_STRTAB ||
374 tag == DT_SYMTAB ||
375 tag == DT_RELA ||
376 tag == DT_INIT ||
377 tag == DT_FINI ||
378 tag == DT_REL ||
379 tag == DT_JMPREL ||
380 tag == DT_INIT_ARRAY ||
381 tag == DT_FINI_ARRAY ||
382 tag == DT_ANDROID_REL_OFFSET);
383 if (is_adjustable && dynamic->d_un.d_ptr > hole_start) {
384 dynamic->d_un.d_ptr += hole_size;
385 VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag
386 << " d_ptr adjusted to " << dynamic->d_un.d_ptr;
387 }
388
389 // If we are specifically resizing dynamic relocations, we need to make
390 // some added adjustments to tags that indicate the counts of relative
391 // relocations in the shared object.
392 if (!is_relocations_resize)
393 continue;
394 763
395 // DT_RELSZ or DT_RELASZ indicate the overall size of relocations. 764 // DT_RELSZ or DT_RELASZ indicate the overall size of relocations.
396 // Only one will be present. Adjust by hole size. 765 // Only one will be present. Adjust by hole size.
397 if (tag == DT_RELSZ || tag == DT_RELASZ) { 766 if (tag == DT_RELSZ || tag == DT_RELASZ) {
398 dynamic->d_un.d_val += hole_size; 767 dynamic->d_un.d_val += hole_size;
399 VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag 768 VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag
400 << " d_val adjusted to " << dynamic->d_un.d_val; 769 << " d_val adjusted to " << dynamic->d_un.d_val;
401 } 770 }
402 771
403 // DT_RELCOUNT or DT_RELACOUNT hold the count of relative relocations. 772 // DT_RELCOUNT or DT_RELACOUNT hold the count of relative relocations.
404 // Only one will be present. Packing reduces it to the alignment 773 // Only one will be present. Packing reduces it to the alignment
405 // padding, if any; unpacking restores it to its former value. The 774 // padding, if any; unpacking restores it to its former value. The
406 // crazy linker does not use it, but we update it anyway. 775 // crazy linker does not use it, but we update it anyway.
407 if (tag == DT_RELCOUNT || tag == DT_RELACOUNT) { 776 if (tag == DT_RELCOUNT || tag == DT_RELACOUNT) {
408 // Cast sizeof to a signed type to avoid the division result being 777 // Cast sizeof to a signed type to avoid the division result being
409 // promoted into an unsigned size_t. 778 // promoted into an unsigned size_t.
410 const ssize_t sizeof_rel = static_cast<ssize_t>(sizeof(Rel)); 779 const ssize_t sizeof_rel = static_cast<ssize_t>(sizeof(Rel));
411 dynamic->d_un.d_val += hole_size / sizeof_rel; 780 dynamic->d_un.d_val += hole_size / sizeof_rel;
412 VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag 781 VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag
413 << " d_val adjusted to " << dynamic->d_un.d_val; 782 << " d_val adjusted to " << dynamic->d_un.d_val;
414 } 783 }
415 784
416 // DT_RELENT and DT_RELAENT don't change, but make sure they are what 785 // DT_RELENT and DT_RELAENT do not change, but make sure they are what
417 // we expect. Only one will be present. 786 // we expect. Only one will be present.
418 if (tag == DT_RELENT || tag == DT_RELAENT) { 787 if (tag == DT_RELENT || tag == DT_RELAENT) {
419 CHECK(dynamic->d_un.d_val == sizeof(Rel)); 788 CHECK(dynamic->d_un.d_val == sizeof(Rel));
420 } 789 }
421 } 790 }
422 791
423 void* section_data = &dynamics[0]; 792 void* section_data = &dynamics[0];
424 size_t bytes = dynamics.size() * sizeof(dynamics[0]); 793 size_t bytes = dynamics.size() * sizeof(dynamics[0]);
425 RewriteSectionData(data, section_data, bytes); 794 RewriteSectionData(data, section_data, bytes);
426 } 795 }
427 796
428 // Helper for ResizeSection(). Adjust the .dynsym section for the hole.
429 // We need to adjust the values for the symbols represented in it.
430 void AdjustDynSymSectionForHole(Elf_Scn* dynsym_section,
431 ELF::Off hole_start,
432 ssize_t hole_size) {
433 Elf_Data* data = GetSectionData(dynsym_section);
434
435 const ELF::Sym* dynsym_base = reinterpret_cast<ELF::Sym*>(data->d_buf);
436 std::vector<ELF::Sym> dynsyms
437 (dynsym_base,
438 dynsym_base + data->d_size / sizeof(dynsyms[0]));
439
440 for (size_t i = 0; i < dynsyms.size(); ++i) {
441 ELF::Sym* dynsym = &dynsyms[i];
442 const int type = static_cast<int>(ELF_ST_TYPE(dynsym->st_info));
443 const bool is_adjustable = (type == STT_OBJECT ||
444 type == STT_FUNC ||
445 type == STT_SECTION ||
446 type == STT_FILE ||
447 type == STT_COMMON ||
448 type == STT_TLS);
449 if (is_adjustable && dynsym->st_value > hole_start) {
450 dynsym->st_value += hole_size;
451 VLOG(1) << "dynsym[" << i << "] type=" << type
452 << " st_value adjusted to " << dynsym->st_value;
453 }
454 }
455
456 void* section_data = &dynsyms[0];
457 size_t bytes = dynsyms.size() * sizeof(dynsyms[0]);
458 RewriteSectionData(data, section_data, bytes);
459 }
460
461 // Helper for ResizeSection(). Adjust the plt relocations section for the
462 // hole. We need to adjust the offset of every relocation inside it that
463 // falls beyond the hole start.
464 template <typename Rel>
465 void AdjustRelPltSectionForHole(Elf_Scn* relplt_section,
466 ELF::Off hole_start,
467 ssize_t hole_size) {
468 Elf_Data* data = GetSectionData(relplt_section);
469
470 const Rel* relplt_base = reinterpret_cast<Rel*>(data->d_buf);
471 std::vector<Rel> relplts(
472 relplt_base,
473 relplt_base + data->d_size / sizeof(relplts[0]));
474
475 for (size_t i = 0; i < relplts.size(); ++i) {
476 Rel* relplt = &relplts[i];
477 if (relplt->r_offset > hole_start) {
478 relplt->r_offset += hole_size;
479 VLOG(1) << "relplt[" << i
480 << "] r_offset adjusted to " << relplt->r_offset;
481 }
482 }
483
484 void* section_data = &relplts[0];
485 size_t bytes = relplts.size() * sizeof(relplts[0]);
486 RewriteSectionData(data, section_data, bytes);
487 }
488
489 // Helper for ResizeSection(). Adjust the .symtab section for the hole.
490 // We want to adjust the value of every symbol in it that falls beyond
491 // the hole start.
492 void AdjustSymTabSectionForHole(Elf_Scn* symtab_section,
493 ELF::Off hole_start,
494 ssize_t hole_size) {
495 Elf_Data* data = GetSectionData(symtab_section);
496
497 const ELF::Sym* symtab_base = reinterpret_cast<ELF::Sym*>(data->d_buf);
498 std::vector<ELF::Sym> symtab(
499 symtab_base,
500 symtab_base + data->d_size / sizeof(symtab[0]));
501
502 for (size_t i = 0; i < symtab.size(); ++i) {
503 ELF::Sym* sym = &symtab[i];
504 if (sym->st_value > hole_start) {
505 sym->st_value += hole_size;
506 VLOG(1) << "symtab[" << i << "] value adjusted to " << sym->st_value;
507 }
508 }
509
510 void* section_data = &symtab[0];
511 size_t bytes = symtab.size() * sizeof(symtab[0]);
512 RewriteSectionData(data, section_data, bytes);
513 }
514
515 // Resize a section. If the new size is larger than the current size, open 797 // Resize a section. If the new size is larger than the current size, open
516 // up a hole by increasing file offsets that come after the hole. If smaller 798 // up a hole by increasing file offsets that come after the hole. If smaller
517 // than the current size, remove the hole by decreasing those offsets. 799 // than the current size, remove the hole by decreasing those offsets.
518 template <typename Rel> 800 template <typename Rel>
519 void ResizeSection(Elf* elf, Elf_Scn* section, size_t new_size) { 801 void ResizeSection(Elf* elf, Elf_Scn* section, size_t new_size) {
520 ELF::Shdr* section_header = ELF::getshdr(section); 802 ELF::Shdr* section_header = ELF::getshdr(section);
521 if (section_header->sh_size == new_size) 803 if (section_header->sh_size == new_size)
522 return; 804 return;
523 805
524 // Note if we are resizing the real dyn relocations. If yes, then we have 806 // Note if we are resizing the real dyn relocations.
525 // to massage d_un.d_val in the dynamic section where d_tag is DT_RELSZ or
526 // DT_RELASZ and DT_RELCOUNT or DT_RELACOUNT.
527 size_t string_index; 807 size_t string_index;
528 elf_getshdrstrndx(elf, &string_index); 808 elf_getshdrstrndx(elf, &string_index);
529 const std::string section_name = 809 const std::string section_name =
530 elf_strptr(elf, string_index, section_header->sh_name); 810 elf_strptr(elf, string_index, section_header->sh_name);
531 const bool is_relocations_resize = 811 const bool is_relocations_resize =
532 (section_name == ".rel.dyn" || section_name == ".rela.dyn"); 812 (section_name == ".rel.dyn" || section_name == ".rela.dyn");
533 813
534 // Require that the section size and the data size are the same. True 814 // Require that the section size and the data size are the same. True
535 // in practice for all sections we resize when packing or unpacking. 815 // in practice for all sections we resize when packing or unpacking.
536 Elf_Data* data = GetSectionData(section); 816 Elf_Data* data = GetSectionData(section);
537 CHECK(data->d_off == 0 && data->d_size == section_header->sh_size); 817 CHECK(data->d_off == 0 && data->d_size == section_header->sh_size);
538 818
539 // Require that the section is not zero-length (that is, has allocated 819 // Require that the section is not zero-length (that is, has allocated
540 // data that we can validly expand). 820 // data that we can validly expand).
541 CHECK(data->d_size && data->d_buf); 821 CHECK(data->d_size && data->d_buf);
542 822
543 const ELF::Off hole_start = section_header->sh_offset; 823 const ELF::Off hole_start = section_header->sh_offset;
544 const ssize_t hole_size = new_size - data->d_size; 824 const ssize_t hole_size = new_size - data->d_size;
545 825
546 VLOG_IF(1, (hole_size > 0)) << "expand section size = " << data->d_size; 826 VLOG_IF(1, (hole_size > 0)) << "expand section size = " << data->d_size;
547 VLOG_IF(1, (hole_size < 0)) << "shrink section size = " << data->d_size; 827 VLOG_IF(1, (hole_size < 0)) << "shrink section size = " << data->d_size;
548 828
549 // Resize the data and the section header. 829 // Resize the data and the section header.
550 data->d_size += hole_size; 830 data->d_size += hole_size;
551 section_header->sh_size += hole_size; 831 section_header->sh_size += hole_size;
552 832
553 ELF::Ehdr* elf_header = ELF::getehdr(elf);
554 ELF::Phdr* elf_program_header = ELF::getphdr(elf);
555
556 // Add the hole size to all offsets in the ELF file that are after the 833 // Add the hole size to all offsets in the ELF file that are after the
557 // start of the hole. If the hole size is positive we are expanding the 834 // start of the hole. If the hole size is positive we are expanding the
558 // section to create a new hole; if negative, we are closing up a hole. 835 // section to create a new hole; if negative, we are closing up a hole.
559 836
560 // Start with the main ELF header. 837 // Start with the main ELF header.
838 ELF::Ehdr* elf_header = ELF::getehdr(elf);
561 AdjustElfHeaderForHole(elf_header, hole_start, hole_size); 839 AdjustElfHeaderForHole(elf_header, hole_start, hole_size);
562 840
563 // Adjust all program headers.
564 AdjustProgramHeadersForHole(elf_program_header,
565 elf_header->e_phnum,
566 hole_start,
567 hole_size);
568
569 // Adjust all section headers. 841 // Adjust all section headers.
570 AdjustSectionHeadersForHole(elf, hole_start, hole_size); 842 AdjustSectionHeadersForHole(elf, hole_start, hole_size);
571 843
572 // We use the dynamic program header entry to locate the dynamic section. 844 // If resizing the dynamic relocations, rewrite the program headers to
573 const ELF::Phdr* dynamic_program_header = NULL; 845 // either split or coalesce segments, and adjust dynamic entries to match.
846 if (is_relocations_resize) {
847 RewriteProgramHeadersForHole(elf, hole_start, hole_size);
574 848
575 // Find the dynamic program header entry. 849 Elf_Scn* dynamic_section = GetDynamicSection(elf);
576 for (size_t i = 0; i < elf_header->e_phnum; ++i) { 850 AdjustDynamicSectionForHole<Rel>(dynamic_section, hole_start, hole_size);
577 ELF::Phdr* program_header = &elf_program_header[i];
578
579 if (program_header->p_type == PT_DYNAMIC) {
580 dynamic_program_header = program_header;
581 }
582 } 851 }
583 CHECK(dynamic_program_header);
584
585 // Sections requiring special attention, and the packed android
586 // relocations offset.
587 Elf_Scn* dynamic_section = NULL;
588 Elf_Scn* dynsym_section = NULL;
589 Elf_Scn* plt_relocations_section = NULL;
590 Elf_Scn* symtab_section = NULL;
591 ELF::Off android_relocations_offset = 0;
592
593 // Find these sections, and the packed android relocations offset.
594 section = NULL;
595 while ((section = elf_nextscn(elf, section)) != NULL) {
596 ELF::Shdr* section_header = ELF::getshdr(section);
597 std::string name = elf_strptr(elf, string_index, section_header->sh_name);
598
599 if (section_header->sh_offset == dynamic_program_header->p_offset) {
600 dynamic_section = section;
601 }
602 if (name == ".dynsym") {
603 dynsym_section = section;
604 }
605 if (name == ".rel.plt" || name == ".rela.plt") {
606 plt_relocations_section = section;
607 }
608 if (name == ".symtab") {
609 symtab_section = section;
610 }
611
612 // Note packed android relocations offset.
613 if (name == ".android.rel.dyn" || name == ".android.rela.dyn") {
614 android_relocations_offset = section_header->sh_offset;
615 }
616 }
617 CHECK(dynamic_section != NULL);
618 CHECK(dynsym_section != NULL);
619 CHECK(plt_relocations_section != NULL);
620 CHECK(android_relocations_offset != 0);
621
622 // Adjust the .dynamic section for the hole. Because we have to edit the
623 // current contents of .dynamic we disallow resizing it.
624 CHECK(section != dynamic_section);
625 AdjustDynamicSectionForHole<Rel>(dynamic_section,
626 is_relocations_resize,
627 hole_start,
628 hole_size);
629
630 // Adjust the .dynsym section for the hole.
631 AdjustDynSymSectionForHole(dynsym_section, hole_start, hole_size);
632
633 // Adjust the plt relocations section for the hole.
634 AdjustRelPltSectionForHole<Rel>(plt_relocations_section,
635 hole_start,
636 hole_size);
637
638 // If present, adjust the .symtab section for the hole. If the shared
639 // library was stripped then .symtab will be absent.
640 if (symtab_section)
641 AdjustSymTabSectionForHole(symtab_section, hole_start, hole_size);
642 } 852 }
643 853
644 // Find the first slot in a dynamics array with the given tag. The array 854 // Find the first slot in a dynamics array with the given tag. The array
645 // always ends with a free (unused) element, and which we exclude from the 855 // always ends with a free (unused) element, and which we exclude from the
646 // search. Returns dynamics->size() if not found. 856 // search. Returns dynamics->size() if not found.
647 size_t FindDynamicEntry(ELF::Sword tag, 857 size_t FindDynamicEntry(ELF::Sword tag,
648 std::vector<ELF::Dyn>* dynamics) { 858 std::vector<ELF::Dyn>* dynamics) {
649 // Loop until the penultimate entry. We exclude the end sentinel. 859 // Loop until the penultimate entry. We exclude the end sentinel.
650 for (size_t i = 0; i < dynamics->size() - 1; ++i) { 860 for (size_t i = 0; i < dynamics->size() - 1; ++i) {
651 if (dynamics->at(i).d_tag == tag) 861 if (dynamics->at(i).d_tag == tag)
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
684 for (size_t i = slot; i < dynamics->size() - 1; ++i) { 894 for (size_t i = slot; i < dynamics->size() - 1; ++i) {
685 dynamics->at(i) = dynamics->at(i + 1); 895 dynamics->at(i) = dynamics->at(i + 1);
686 VLOG(1) << "dynamic[" << i 896 VLOG(1) << "dynamic[" << i
687 << "] overwritten with dynamic[" << i + 1 << "]"; 897 << "] overwritten with dynamic[" << i + 1 << "]";
688 } 898 }
689 899
690 // Ensure that the end sentinel is still present. 900 // Ensure that the end sentinel is still present.
691 CHECK(dynamics->at(dynamics->size() - 1).d_tag == DT_NULL); 901 CHECK(dynamics->at(dynamics->size() - 1).d_tag == DT_NULL);
692 } 902 }
693 903
694 // Adjust a relocation. For a relocation without addend, we find its target
695 // in the section and adjust that. For a relocation with addend, the target
696 // is the relocation addend, and the section data at the target is zero.
697 template <typename Rel>
698 void AdjustRelocation(ssize_t index,
699 ELF::Addr hole_start,
700 ssize_t hole_size,
701 Rel* relocation,
702 ELF::Off* target);
703
704 template <>
705 void AdjustRelocation<ELF::Rel>(ssize_t index,
706 ELF::Addr hole_start,
707 ssize_t hole_size,
708 ELF::Rel* relocation,
709 ELF::Off* target) {
710 // Adjust the target if after the hole start.
711 if (*target > hole_start) {
712 *target += hole_size;
713 VLOG(1) << "relocation[" << index << "] target adjusted to " << *target;
714 }
715 }
716
717 template <>
718 void AdjustRelocation<ELF::Rela>(ssize_t index,
719 ELF::Addr hole_start,
720 ssize_t hole_size,
721 ELF::Rela* relocation,
722 ELF::Off* target) {
723 // The relocation's target is the addend. Adjust if after the hole start.
724 if (relocation->r_addend > hole_start) {
725 relocation->r_addend += hole_size;
726 VLOG(1) << "relocation["
727 << index << "] addend adjusted to " << relocation->r_addend;
728 }
729 }
730
731 // For relative relocations without addends, adjust the file data to which
732 // they refer. For relative relocations with addends, adjust the addends.
733 // This translates data into the area it will occupy after the hole in
734 // the dynamic relocations is added or removed.
735 template <typename Rel>
736 void AdjustRelocationTargets(Elf* elf,
737 ELF::Off hole_start,
738 ssize_t hole_size,
739 std::vector<Rel>* relocations) {
740 Elf_Scn* section = NULL;
741 while ((section = elf_nextscn(elf, section)) != NULL) {
742 const ELF::Shdr* section_header = ELF::getshdr(section);
743
744 // Ignore sections that do not appear in a process memory image.
745 if (section_header->sh_addr == 0)
746 continue;
747
748 Elf_Data* data = GetSectionData(section);
749
750 // Ignore sections with no effective data.
751 if (data->d_buf == NULL)
752 continue;
753
754 // Identify this section's start and end addresses.
755 const ELF::Addr section_start = section_header->sh_addr;
756 const ELF::Addr section_end = section_start + section_header->sh_size;
757
758 // Create a copy of the section's data.
759 uint8_t* area = new uint8_t[data->d_size];
760 memcpy(area, data->d_buf, data->d_size);
761
762 for (size_t i = 0; i < relocations->size(); ++i) {
763 Rel* relocation = &relocations->at(i);
764 CHECK(ELF_R_TYPE(relocation->r_info) == ELF::kRelativeRelocationCode);
765
766 // See if this relocation points into the current section.
767 if (relocation->r_offset >= section_start &&
768 relocation->r_offset < section_end) {
769 // The relocation's target is what it points to in area.
770 // For relocations without addend, this is what we adjust; for
771 // relocations with addend, we leave this (it will be zero)
772 // and instead adjust the addend.
773 ELF::Addr byte_offset = relocation->r_offset - section_start;
774 ELF::Off* target = reinterpret_cast<ELF::Off*>(area + byte_offset);
775 AdjustRelocation<Rel>(i, hole_start, hole_size, relocation, target);
776 }
777 }
778
779 // If we altered the data for this section, write it back.
780 if (memcmp(area, data->d_buf, data->d_size)) {
781 RewriteSectionData(data, area, data->d_size);
782 }
783 delete [] area;
784 }
785 }
786
787 // Pad relocations with a given number of null relocations.
788 template <typename Rel> 904 template <typename Rel>
789 void PadRelocations(size_t count, std::vector<Rel>* relocations); 905 void PadRelocations(size_t count, std::vector<Rel>* relocations);
790 906
791 template <> 907 template <>
792 void PadRelocations<ELF::Rel>(size_t count, 908 void PadRelocations<ELF::Rel>(size_t count,
793 std::vector<ELF::Rel>* relocations) { 909 std::vector<ELF::Rel>* relocations) {
794 ELF::Rel null_relocation; 910 ELF::Rel null_relocation;
795 null_relocation.r_offset = 0; 911 null_relocation.r_offset = 0;
796 null_relocation.r_info = ELF_R_INFO(0, ELF::kNoRelocationCode); 912 null_relocation.r_info = ELF_R_INFO(0, ELF::kNoRelocationCode);
797 std::vector<ELF::Rel> padding(count, null_relocation); 913 std::vector<ELF::Rel> padding(count, null_relocation);
798 relocations->insert(relocations->end(), padding.begin(), padding.end()); 914 relocations->insert(relocations->end(), padding.begin(), padding.end());
799 } 915 }
800 916
801 template <> 917 template <>
802 void PadRelocations<ELF::Rela>(size_t count, 918 void PadRelocations<ELF::Rela>(size_t count,
803 std::vector<ELF::Rela>* relocations) { 919 std::vector<ELF::Rela>* relocations) {
804 ELF::Rela null_relocation; 920 ELF::Rela null_relocation;
805 null_relocation.r_offset = 0; 921 null_relocation.r_offset = 0;
806 null_relocation.r_info = ELF_R_INFO(0, ELF::kNoRelocationCode); 922 null_relocation.r_info = ELF_R_INFO(0, ELF::kNoRelocationCode);
807 null_relocation.r_addend = 0; 923 null_relocation.r_addend = 0;
808 std::vector<ELF::Rela> padding(count, null_relocation); 924 std::vector<ELF::Rela> padding(count, null_relocation);
809 relocations->insert(relocations->end(), padding.begin(), padding.end()); 925 relocations->insert(relocations->end(), padding.begin(), padding.end());
810 } 926 }
811 927
812 // Adjust relocations so that the offset that they indicate will be correct
813 // after the hole in the dynamic relocations is added or removed (in effect,
814 // relocate the relocations).
815 template <typename Rel>
816 void AdjustRelocations(ELF::Off hole_start,
817 ssize_t hole_size,
818 std::vector<Rel>* relocations) {
819 for (size_t i = 0; i < relocations->size(); ++i) {
820 Rel* relocation = &relocations->at(i);
821 if (relocation->r_offset > hole_start) {
822 relocation->r_offset += hole_size;
823 VLOG(1) << "relocation[" << i
824 << "] offset adjusted to " << relocation->r_offset;
825 }
826 }
827 }
828
829 } // namespace 928 } // namespace
830 929
831 // Remove relative entries from dynamic relocations and write as packed 930 // Remove relative entries from dynamic relocations and write as packed
832 // data into android packed relocations. 931 // data into android packed relocations.
833 bool ElfFile::PackRelocations() { 932 bool ElfFile::PackRelocations() {
834 // Load the ELF file into libelf. 933 // Load the ELF file into libelf.
835 if (!Load()) { 934 if (!Load()) {
836 LOG(ERROR) << "Failed to load as ELF"; 935 LOG(ERROR) << "Failed to load as ELF";
837 return false; 936 return false;
838 } 937 }
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
888 LOG(INFO) << "Other : " << other_relocations.size() << " entries"; 987 LOG(INFO) << "Other : " << other_relocations.size() << " entries";
889 LOG(INFO) << "Total : " << relocations.size() << " entries"; 988 LOG(INFO) << "Total : " << relocations.size() << " entries";
890 989
891 // If no relative relocations then we have nothing packable. Perhaps 990 // If no relative relocations then we have nothing packable. Perhaps
892 // the shared object has already been packed? 991 // the shared object has already been packed?
893 if (relative_relocations.empty()) { 992 if (relative_relocations.empty()) {
894 LOG(ERROR) << "No relative relocations found (already packed?)"; 993 LOG(ERROR) << "No relative relocations found (already packed?)";
895 return false; 994 return false;
896 } 995 }
897 996
898 // Unless padding, pre-apply relative relocations to account for the 997 // If not padding fully, apply only enough padding to preserve alignment.
899 // hole, and pre-adjust all relocation offsets accordingly. 998 // Otherwise, pad so that we do not shrink the relocations section at all.
900 if (!is_padding_relocations_) { 999 if (!is_padding_relocations_) {
901 // Pre-calculate the size of the hole we will close up when we rewrite 1000 // Calculate the size of the hole we will close up when we rewrite
902 // dynamic relocations. We have to adjust relocation addresses to 1001 // dynamic relocations.
903 // account for this.
904 ELF::Shdr* section_header = ELF::getshdr(relocations_section_); 1002 ELF::Shdr* section_header = ELF::getshdr(relocations_section_);
905 const ELF::Off hole_start = section_header->sh_offset; 1003 const ELF::Off hole_start = section_header->sh_offset;
906 ssize_t hole_size = 1004 ssize_t hole_size =
907 relative_relocations.size() * sizeof(relative_relocations[0]); 1005 relative_relocations.size() * sizeof(relative_relocations[0]);
908 const ssize_t unaligned_hole_size = hole_size; 1006 const ssize_t unaligned_hole_size = hole_size;
909 1007
910 // Adjust the actual hole size to preserve alignment. We always adjust 1008 // Adjust the actual hole size to preserve alignment. We always adjust
911 // by a whole number of NONE-type relocations. 1009 // by a whole number of NONE-type relocations.
912 while (hole_size % kPreserveAlignment) 1010 while (hole_size % kPreserveAlignment)
913 hole_size -= sizeof(relative_relocations[0]); 1011 hole_size -= sizeof(relative_relocations[0]);
(...skipping 18 matching lines...) Expand all
932 1030
933 // Padding may have removed any packing benefit. 1031 // Padding may have removed any packing benefit.
934 if (padding >= relative_relocations.size()) { 1032 if (padding >= relative_relocations.size()) {
935 LOG(INFO) << "Too few relative relocations to pack after padding"; 1033 LOG(INFO) << "Too few relative relocations to pack after padding";
936 return false; 1034 return false;
937 } 1035 }
938 1036
939 // Add null relocations to other_relocations to preserve alignment. 1037 // Add null relocations to other_relocations to preserve alignment.
940 PadRelocations<Rel>(padding, &other_relocations); 1038 PadRelocations<Rel>(padding, &other_relocations);
941 LOG(INFO) << "Alignment pad : " << padding << " relocations"; 1039 LOG(INFO) << "Alignment pad : " << padding << " relocations";
942
943 // Apply relocations to all relative data to relocate it into the
944 // area it will occupy once the hole in the dynamic relocations is removed.
945 AdjustRelocationTargets<Rel>(
946 elf_, hole_start, -hole_size, &relative_relocations);
947 // Relocate the relocations.
948 AdjustRelocations<Rel>(hole_start, -hole_size, &relative_relocations);
949 AdjustRelocations<Rel>(hole_start, -hole_size, &other_relocations);
950 } else { 1040 } else {
951 // If padding, add NONE-type relocations to other_relocations to make it 1041 // If padding, add NONE-type relocations to other_relocations to make it
952 // the same size as the the original relocations we read in. This makes 1042 // the same size as the the original relocations we read in. This makes
953 // the ResizeSection() below a no-op. 1043 // the ResizeSection() below a no-op.
954 const size_t padding = relocations.size() - other_relocations.size(); 1044 const size_t padding = relocations.size() - other_relocations.size();
955 PadRelocations<Rel>(padding, &other_relocations); 1045 PadRelocations<Rel>(padding, &other_relocations);
956 } 1046 }
957 1047
958 // Pack relative relocations. 1048 // Pack relative relocations.
959 const size_t initial_bytes = 1049 const size_t initial_bytes =
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1002 1092
1003 // Rewrite .dynamic to include two new tags describing the packed android 1093 // Rewrite .dynamic to include two new tags describing the packed android
1004 // relocations. 1094 // relocations.
1005 data = GetSectionData(dynamic_section_); 1095 data = GetSectionData(dynamic_section_);
1006 const ELF::Dyn* dynamic_base = reinterpret_cast<ELF::Dyn*>(data->d_buf); 1096 const ELF::Dyn* dynamic_base = reinterpret_cast<ELF::Dyn*>(data->d_buf);
1007 std::vector<ELF::Dyn> dynamics( 1097 std::vector<ELF::Dyn> dynamics(
1008 dynamic_base, 1098 dynamic_base,
1009 dynamic_base + data->d_size / sizeof(dynamics[0])); 1099 dynamic_base + data->d_size / sizeof(dynamics[0]));
1010 // Use two of the spare slots to describe the packed section. 1100 // Use two of the spare slots to describe the packed section.
1011 ELF::Shdr* section_header = ELF::getshdr(android_relocations_section_); 1101 ELF::Shdr* section_header = ELF::getshdr(android_relocations_section_);
1012 const ELF::Dyn offset_dyn 1102 {
1013 = {DT_ANDROID_REL_OFFSET, {section_header->sh_offset}}; 1103 ELF::Dyn dyn;
1014 AddDynamicEntry(offset_dyn, &dynamics); 1104 dyn.d_tag = DT_ANDROID_REL_OFFSET;
1015 const ELF::Dyn size_dyn 1105 dyn.d_un.d_ptr = section_header->sh_offset;
1016 = {DT_ANDROID_REL_SIZE, {section_header->sh_size}}; 1106 AddDynamicEntry(dyn, &dynamics);
1017 AddDynamicEntry(size_dyn, &dynamics); 1107 }
1108 {
1109 ELF::Dyn dyn;
1110 dyn.d_tag = DT_ANDROID_REL_SIZE;
1111 dyn.d_un.d_val = section_header->sh_size;
1112 AddDynamicEntry(dyn, &dynamics);
1113 }
1018 const void* dynamics_data = &dynamics[0]; 1114 const void* dynamics_data = &dynamics[0];
1019 const size_t dynamics_bytes = dynamics.size() * sizeof(dynamics[0]); 1115 const size_t dynamics_bytes = dynamics.size() * sizeof(dynamics[0]);
1020 RewriteSectionData(data, dynamics_data, dynamics_bytes); 1116 RewriteSectionData(data, dynamics_data, dynamics_bytes);
1021 1117
1022 Flush(); 1118 Flush();
1023 return true; 1119 return true;
1024 } 1120 }
1025 1121
1026 // Find packed relative relocations in the packed android relocations 1122 // Find packed relative relocations in the packed android relocations
1027 // section, unpack them, and rewrite the dynamic relocations section to 1123 // section, unpack them, and rewrite the dynamic relocations section to
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
1119 // dynamic relocations. We have to adjust relocation addresses to 1215 // dynamic relocations. We have to adjust relocation addresses to
1120 // account for this. 1216 // account for this.
1121 ELF::Shdr* section_header = ELF::getshdr(relocations_section_); 1217 ELF::Shdr* section_header = ELF::getshdr(relocations_section_);
1122 const ELF::Off hole_start = section_header->sh_offset; 1218 const ELF::Off hole_start = section_header->sh_offset;
1123 ssize_t hole_size = 1219 ssize_t hole_size =
1124 relative_relocations.size() * sizeof(relative_relocations[0]); 1220 relative_relocations.size() * sizeof(relative_relocations[0]);
1125 1221
1126 // Adjust the hole size for the padding added to preserve alignment. 1222 // Adjust the hole size for the padding added to preserve alignment.
1127 hole_size -= padding * sizeof(other_relocations[0]); 1223 hole_size -= padding * sizeof(other_relocations[0]);
1128 LOG(INFO) << "Expansion : " << hole_size << " bytes"; 1224 LOG(INFO) << "Expansion : " << hole_size << " bytes";
1129
1130 // Apply relocations to all relative data to relocate it into the
1131 // area it will occupy once the hole in dynamic relocations is opened.
1132 AdjustRelocationTargets<Rel>(
1133 elf_, hole_start, hole_size, &relative_relocations);
1134 // Relocate the relocations.
1135 AdjustRelocations<Rel>(hole_start, hole_size, &relative_relocations);
1136 AdjustRelocations<Rel>(hole_start, hole_size, &other_relocations);
1137 } 1225 }
1138 1226
1139 // Rewrite the current dynamic relocations section to be the relative 1227 // Rewrite the current dynamic relocations section to be the relative
1140 // relocations followed by other relocations. This is the usual order in 1228 // relocations followed by other relocations. This is the usual order in
1141 // which we find them after linking, so this action will normally put the 1229 // which we find them after linking, so this action will normally put the
1142 // entire dynamic relocations section back to its pre-split-and-packed state. 1230 // entire dynamic relocations section back to its pre-split-and-packed state.
1143 relocations.assign(relative_relocations.begin(), relative_relocations.end()); 1231 relocations.assign(relative_relocations.begin(), relative_relocations.end());
1144 relocations.insert(relocations.end(), 1232 relocations.insert(relocations.end(),
1145 other_relocations.begin(), other_relocations.end()); 1233 other_relocations.begin(), other_relocations.end());
1146 const void* section_data = &relocations[0]; 1234 const void* section_data = &relocations[0];
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1189 1277
1190 // Clean up libelf, and truncate the output file to the number of bytes 1278 // Clean up libelf, and truncate the output file to the number of bytes
1191 // written by elf_update(). 1279 // written by elf_update().
1192 elf_end(elf_); 1280 elf_end(elf_);
1193 elf_ = NULL; 1281 elf_ = NULL;
1194 const int truncate = ftruncate(fd_, file_bytes); 1282 const int truncate = ftruncate(fd_, file_bytes);
1195 CHECK(truncate == 0); 1283 CHECK(truncate == 0);
1196 } 1284 }
1197 1285
1198 } // namespace relocation_packer 1286 } // namespace relocation_packer
OLDNEW
« no previous file with comments | « no previous file | tools/relocation_packer/src/elf_traits.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698