OLD | NEW |
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
2 // All Rights Reserved. | 2 // All Rights Reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
6 // are met: | 6 // are met: |
7 // | 7 // |
8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
10 // | 10 // |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
148 | 148 |
149 | 149 |
150 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) { | 150 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) { |
151 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); | 151 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
152 ASSERT(!target->IsConsString()); | 152 ASSERT(!target->IsConsString()); |
153 Assembler::set_target_pointer_at(pc_, reinterpret_cast<Address>(target)); | 153 Assembler::set_target_pointer_at(pc_, reinterpret_cast<Address>(target)); |
154 if (mode == UPDATE_WRITE_BARRIER && | 154 if (mode == UPDATE_WRITE_BARRIER && |
155 host() != NULL && | 155 host() != NULL && |
156 target->IsHeapObject()) { | 156 target->IsHeapObject()) { |
157 host()->GetHeap()->incremental_marking()->RecordWrite( | 157 host()->GetHeap()->incremental_marking()->RecordWrite( |
158 host(), &Memory::Object_at(pc_), HeapObject::cast(target)); | 158 host(), |
| 159 &Memory::Object_at(Assembler::UntagAddress(pc_)), |
| 160 HeapObject::cast(target)); |
159 } | 161 } |
160 } | 162 } |
161 | 163 |
162 | 164 |
163 Address* RelocInfo::target_reference_address() { | 165 Address* RelocInfo::target_reference_address() { |
164 ASSERT(rmode_ == EXTERNAL_REFERENCE); | 166 ASSERT(rmode_ == EXTERNAL_REFERENCE); |
165 reconstructed_adr_ptr_ = Assembler::target_address_at(pc_); | 167 reconstructed_adr_ptr_ = Assembler::target_address_at(pc_); |
166 return &reconstructed_adr_ptr_; | 168 return &reconstructed_adr_ptr_; |
167 } | 169 } |
168 | 170 |
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
373 if (buffer_space() <= kGap) { | 375 if (buffer_space() <= kGap) { |
374 GrowBuffer(); | 376 GrowBuffer(); |
375 } | 377 } |
376 if (pc_offset() >= next_buffer_check_) { | 378 if (pc_offset() >= next_buffer_check_) { |
377 CheckConstPool(false, true); | 379 CheckConstPool(false, true); |
378 } | 380 } |
379 } | 381 } |
380 | 382 |
381 | 383 |
382 void Assembler::emit(Instr x) { | 384 void Assembler::emit(Instr x) { |
| 385 if (is_thumb_mode()) { |
| 386 emit32(x); |
| 387 return; |
| 388 } |
383 CheckBuffer(); | 389 CheckBuffer(); |
384 *reinterpret_cast<Instr*>(pc_) = x; | 390 *reinterpret_cast<Instr*>(pc_) = x; |
385 pc_ += kInstrSize; | 391 pc_ += kInstrSize; |
386 } | 392 } |
387 | 393 |
388 | 394 |
389 void Assembler::emit16(Instr16 x, bool check_buffer) { | 395 void Assembler::emit16(Instr16 x, bool check_buffer) { |
390 if (check_buffer) { | 396 if (check_buffer) { |
391 CheckBuffer(); | 397 CheckBuffer(); |
392 } | 398 } |
393 *reinterpret_cast<Instr16*>(pc_) = x; | 399 *reinterpret_cast<Instr16*>(pc_) = x; |
394 pc_ += 2; | 400 pc_ += 2; |
395 } | 401 } |
396 | 402 |
397 | 403 |
398 void Assembler::emit32(Instr x) { | 404 void Assembler::emit32(Instr x) { |
399 // emit top, then bottom 2 bytes | 405 // emit top, then bottom 2 bytes |
400 emit16((x>>16) & 0xffff); | 406 emit16((x>>16) & 0xffff); |
401 emit16(x & 0xffff, false); | 407 emit16(x & 0xffff, false); |
402 } | 408 } |
403 | 409 |
404 | 410 |
405 Address Assembler::target_pointer_address_at(Address pc) { | 411 Address Assembler::target_pointer_address_at(Address pc) { |
| 412 if (IsThumbAddress(pc)) { |
| 413 return thumb_target_pointer_address_at(UntagAddress(pc)); |
| 414 } |
| 415 |
406 Address target_pc = pc; | 416 Address target_pc = pc; |
407 Instr instr = Memory::int32_at(target_pc); | 417 Instr instr = Memory::int32_at(target_pc); |
408 // If we have a bx instruction, the instruction before the bx is | 418 // If we have a bx instruction, the instruction before the bx is |
409 // what we need to patch. | 419 // what we need to patch. |
410 static const int32_t kBxInstMask = 0x0ffffff0; | 420 static const int32_t kBxInstMask = 0x0ffffff0; |
411 static const int32_t kBxInstPattern = 0x012fff10; | 421 static const int32_t kBxInstPattern = 0x012fff10; |
412 if ((instr & kBxInstMask) == kBxInstPattern) { | 422 if ((instr & kBxInstMask) == kBxInstPattern) { |
413 target_pc -= kInstrSize; | 423 target_pc -= kInstrSize; |
414 instr = Memory::int32_at(target_pc); | 424 instr = Memory::int32_at(target_pc); |
415 } | 425 } |
416 | 426 |
417 // With a blx instruction, the instruction before is what needs to be patched. | 427 // With a blx instruction, the instruction before is what needs to be patched. |
418 if ((instr & kBlxRegMask) == kBlxRegPattern) { | 428 if ((instr & kBlxRegMask) == kBlxRegPattern) { |
419 target_pc -= kInstrSize; | 429 target_pc -= kInstrSize; |
420 instr = Memory::int32_at(target_pc); | 430 instr = Memory::int32_at(target_pc); |
421 } | 431 } |
422 | 432 |
423 ASSERT(IsLdrPcImmediateOffset(instr)); | 433 ASSERT(IsLdrPcImmediateOffset(instr)); |
424 int offset = instr & 0xfff; // offset_12 is unsigned | 434 int offset = instr & 0xfff; // offset_12 is unsigned |
425 if ((instr & (1 << 23)) == 0) offset = -offset; // U bit defines offset sign | 435 if ((instr & (1 << 23)) == 0) offset = -offset; // U bit defines offset sign |
426 // Verify that the constant pool comes after the instruction referencing it. | 436 // Verify that the constant pool comes after the instruction referencing it. |
427 ASSERT(offset >= -4); | 437 ASSERT(offset >= -4); |
428 return target_pc + offset + 8; | 438 return target_pc + offset + 8; |
429 } | 439 } |
430 | 440 |
431 | 441 |
432 Address Assembler::target_pointer_at(Address pc) { | 442 Address Assembler::target_pointer_at(Address pc) { |
| 443 if (IsThumbAddress(pc)) { |
| 444 return thumb_target_pointer_at(UntagAddress(pc)); |
| 445 } |
433 if (IsMovW(Memory::int32_at(pc))) { | 446 if (IsMovW(Memory::int32_at(pc))) { |
434 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize))); | 447 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize))); |
435 Instruction* instr = Instruction::At(pc); | 448 Instruction* instr = Instruction::At(pc); |
436 Instruction* next_instr = Instruction::At(pc + kInstrSize); | 449 Instruction* next_instr = Instruction::At(pc + kInstrSize); |
437 return reinterpret_cast<Address>( | 450 return reinterpret_cast<Address>( |
438 (next_instr->ImmedMovwMovtValue() << 16) | | 451 (next_instr->ImmedMovwMovtValue() << 16) | |
439 instr->ImmedMovwMovtValue()); | 452 instr->ImmedMovwMovtValue()); |
440 } | 453 } |
441 return Memory::Address_at(target_pointer_address_at(pc)); | 454 return Memory::Address_at(target_pointer_address_at(pc)); |
442 } | 455 } |
443 | 456 |
444 | 457 |
445 Address Assembler::target_address_from_return_address(Address pc) { | 458 Address Assembler::target_address_from_return_address(Address pc) { |
446 // Returns the address of the call target from the return address that will | 459 // Returns the address of the call target from the return address that will |
447 // be returned to after a call. | 460 // be returned to after a call. |
448 // Call sequence on V7 or later is : | 461 // Call sequence on V7 or later is : |
449 // movw ip, #... @ call address low 16 | 462 // movw ip, #... @ call address low 16 |
450 // movt ip, #... @ call address high 16 | 463 // movt ip, #... @ call address high 16 |
451 // blx ip | 464 // blx ip |
452 // @ return address | 465 // @ return address |
453 // Or pre-V7 or cases that need frequent patching: | 466 // Or pre-V7 or cases that need frequent patching: |
454 // ldr ip, [pc, #...] @ call address | 467 // ldr ip, [pc, #...] @ call address |
455 // blx ip | 468 // blx ip |
456 // @ return address | 469 // @ return address |
457 Address candidate = pc - 2 * Assembler::kInstrSize; | 470 Address candidate = pc - 2 * Assembler::kInstrSize; |
458 Instr candidate_instr(Memory::int32_at(candidate)); | 471 Instr candidate_instr(Memory::int32_at(candidate)); |
459 if (IsLdrPcImmediateOffset(candidate_instr)) { | 472 if (IsLdrPcImmediateOffset(candidate_instr)) { |
460 return candidate; | 473 return candidate; |
| 474 } else { |
| 475 candidate = pc - Assembler::kInstrSize - Assembler::kInstr16Size - 1; |
| 476 if (IsThumbLdrPcImmediateOffset(thumb32_instr_at(candidate))) { |
| 477 return candidate + 1; |
| 478 } else { |
| 479 candidate = pc - 2 * Assembler::kInstrSize - Assembler::kInstr16Size - 1; |
| 480 if (IsMovWThumb(thumb32_instr_at(candidate))) { |
| 481 ASSERT(IsMovTThumb(thumb32_instr_at(candidate + kInstrSize))); |
| 482 return candidate + 1; |
| 483 } |
| 484 } |
461 } | 485 } |
462 candidate = pc - 3 * Assembler::kInstrSize; | 486 candidate = pc - 3 * Assembler::kInstrSize; |
463 ASSERT(IsMovW(Memory::int32_at(candidate)) && | 487 ASSERT(IsMovW(Memory::int32_at(candidate)) && |
464 IsMovT(Memory::int32_at(candidate + kInstrSize))); | 488 IsMovT(Memory::int32_at(candidate + kInstrSize))); |
465 return candidate; | 489 return candidate; |
466 } | 490 } |
467 | 491 |
468 | 492 |
469 Address Assembler::return_address_from_call_start(Address pc) { | 493 Address Assembler::return_address_from_call_start(Address pc) { |
| 494 if (IsThumbAddress(pc)) { |
| 495 pc = UntagAddress(pc); |
| 496 Instr instr = thumb32_instr_at(pc); |
| 497 if (IsThumbLdrPcImmediateOffset(instr)) { |
| 498 return pc + kInstrSize + kInstr16Size; |
| 499 } |
| 500 ASSERT(IsMovWThumb(instr)); |
| 501 ASSERT(IsMovTThumb(thumb32_instr_at(pc + kInstrSize))); |
| 502 return pc + kInstrSize * 2 + kInstr16Size; |
| 503 } |
470 if (IsLdrPcImmediateOffset(Memory::int32_at(pc))) { | 504 if (IsLdrPcImmediateOffset(Memory::int32_at(pc))) { |
471 return pc + kInstrSize * 2; | 505 return pc + kInstrSize * 2; |
472 } else { | 506 } else { |
473 ASSERT(IsMovW(Memory::int32_at(pc))); | 507 ASSERT(IsMovW(Memory::int32_at(pc))); |
474 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize))); | 508 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize))); |
475 return pc + kInstrSize * 3; | 509 return pc + kInstrSize * 3; |
476 } | 510 } |
477 } | 511 } |
478 | 512 |
479 | 513 |
480 void Assembler::deserialization_set_special_target_at( | 514 void Assembler::deserialization_set_special_target_at( |
481 Address constant_pool_entry, Address target) { | 515 Address constant_pool_entry, Address target) { |
482 Memory::Address_at(constant_pool_entry) = target; | 516 Memory::Address_at(constant_pool_entry) = target; |
483 } | 517 } |
484 | 518 |
485 | 519 |
486 void Assembler::set_external_target_at(Address constant_pool_entry, | 520 void Assembler::set_external_target_at(Address constant_pool_entry, |
487 Address target) { | 521 Address target) { |
488 Memory::Address_at(constant_pool_entry) = target; | 522 Memory::Address_at(constant_pool_entry) = target; |
489 } | 523 } |
490 | 524 |
491 | 525 |
492 static Instr EncodeMovwImmediate(uint32_t immediate) { | 526 static Instr EncodeMovwImmediate(uint32_t immediate) { |
493 ASSERT(immediate < 0x10000); | 527 ASSERT(immediate < 0x10000); |
494 return ((immediate & 0xf000) << 4) | (immediate & 0xfff); | 528 return ((immediate & 0xf000) << 4) | (immediate & 0xfff); |
495 } | 529 } |
496 | 530 |
497 | 531 |
498 void Assembler::set_target_pointer_at(Address pc, Address target) { | 532 void Assembler::set_target_pointer_at(Address pc, Address target) { |
| 533 if (IsThumbAddress(pc)) { |
| 534 thumb_set_target_pointer_at(UntagAddress(pc), target); |
| 535 return; |
| 536 } |
499 if (IsMovW(Memory::int32_at(pc))) { | 537 if (IsMovW(Memory::int32_at(pc))) { |
500 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize))); | 538 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize))); |
501 uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc); | 539 uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc); |
502 uint32_t immediate = reinterpret_cast<uint32_t>(target); | 540 uint32_t immediate = reinterpret_cast<uint32_t>(target); |
503 uint32_t intermediate = instr_ptr[0]; | 541 uint32_t intermediate = instr_ptr[0]; |
504 intermediate &= ~EncodeMovwImmediate(0xFFFF); | 542 intermediate &= ~EncodeMovwImmediate(0xFFFF); |
505 intermediate |= EncodeMovwImmediate(immediate & 0xFFFF); | 543 intermediate |= EncodeMovwImmediate(immediate & 0xFFFF); |
506 instr_ptr[0] = intermediate; | 544 instr_ptr[0] = intermediate; |
507 intermediate = instr_ptr[1]; | 545 intermediate = instr_ptr[1]; |
508 intermediate &= ~EncodeMovwImmediate(0xFFFF); | 546 intermediate &= ~EncodeMovwImmediate(0xFFFF); |
(...skipping 26 matching lines...) Expand all Loading... |
535 set_target_pointer_at(pc, target); | 573 set_target_pointer_at(pc, target); |
536 } | 574 } |
537 | 575 |
538 | 576 |
539 void Assembler::emit_it(Condition cond) { | 577 void Assembler::emit_it(Condition cond) { |
540 if (cond != al) { | 578 if (cond != al) { |
541 it_thumb(cond, 1); | 579 it_thumb(cond, 1); |
542 } | 580 } |
543 } | 581 } |
544 | 582 |
| 583 |
| 584 Address Assembler::AlignAddress(Address addr) { |
| 585 return (Address)((uint32_t)addr &(~3)); |
| 586 } |
| 587 |
| 588 |
| 589 Address Assembler::UntagAddress(Address addr) { |
| 590 return (Address)((uint32_t)addr & (~1)); |
| 591 } |
| 592 |
| 593 |
| 594 bool Assembler::IsThumbAddress(Address addr) { |
| 595 return (((uint32_t)addr) & 1) == 1; |
| 596 } |
| 597 |
| 598 |
| 599 Address Assembler::thumb_target_pointer_at(Address pc) { |
| 600 Instr instr = thumb32_instr_at(pc); |
| 601 if (IsMovWThumb(instr)) { |
| 602 Instr next_instr = thumb32_instr_at(pc + kInstrSize); |
| 603 ASSERT(IsMovTThumb(next_instr)); |
| 604 return reinterpret_cast<Address>( |
| 605 (thumb32_movw_immediate(next_instr) << 16) | |
| 606 thumb32_movw_immediate(instr)); |
| 607 } |
| 608 return Memory::Address_at(thumb_target_pointer_address_at(pc)); |
| 609 } |
| 610 |
| 611 |
| 612 Address Assembler::thumb_target_pointer_address_at(Address pc) { |
| 613 // We must have a load from the constant pool. The address in the constant |
| 614 // pool is what needs to be patched. |
| 615 Instr instr = thumb32_instr_at(pc); |
| 616 ASSERT(IsThumbLdrPcImmediateOffset(instr)); |
| 617 pc = AlignAddress(pc); |
| 618 |
| 619 int offset = instr & 0xfff; // offset_12 is unsigned |
| 620 if ((instr & (1 << 23)) == 0) offset = -offset; // U bit defines offset sign |
| 621 // Verify that the constant pool comes after the instruction referencing it. |
| 622 ASSERT(offset >= -4); |
| 623 return pc + offset + kThumbPcLoadDelta; |
| 624 } |
| 625 |
| 626 |
| 627 void Assembler::thumb_set_target_pointer_at(Address pc, |
| 628 Address target, |
| 629 Code* host) { |
| 630 Instr instr = thumb32_instr_at(pc); |
| 631 uint32_t immediate = reinterpret_cast<uint32_t>(target); |
| 632 if (IsMovWThumb(instr)) { |
| 633 Instr next_instr = thumb32_instr_at(pc + kInstrSize); |
| 634 ASSERT(IsMovTThumb(next_instr)); |
| 635 uint16_t* instr_ptr = reinterpret_cast<uint16_t*>(pc); |
| 636 uint32_t immediate_mask = thumb32_set_movw_immediate(0xFFFF); |
| 637 instr &= ~immediate_mask; |
| 638 instr |= thumb32_set_movw_immediate(immediate & 0xFFFF); |
| 639 next_instr &= ~immediate_mask; |
| 640 next_instr |= thumb32_set_movw_immediate(immediate >> 16); |
| 641 instr_ptr[0] = instr >> 16; |
| 642 instr_ptr[1] = instr & 0xFFFF; |
| 643 instr_ptr[2] = next_instr >> 16; |
| 644 instr_ptr[3] = next_instr & 0xFFFF; |
| 645 CPU::FlushICache(pc, 2 * kInstrSize); |
| 646 return; |
| 647 } else if (IsThumbLdrPcImmediateOffset(instr)) { |
| 648 Memory::Address_at(thumb_target_pointer_address_at(pc)) = target; |
| 649 return; |
| 650 } |
| 651 } |
| 652 |
545 } } // namespace v8::internal | 653 } } // namespace v8::internal |
546 | 654 |
547 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_ | 655 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_ |
OLD | NEW |