OLD | NEW |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 545 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
556 | 556 |
557 __ ldr(r3, MemOperand(sp, 0)); | 557 __ ldr(r3, MemOperand(sp, 0)); |
558 __ stm(db_w, sp, r2.bit() | r3.bit()); | 558 __ stm(db_w, sp, r2.bit() | r3.bit()); |
559 | 559 |
560 // Perform tail call to the entry. | 560 // Perform tail call to the entry. |
561 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss)); | 561 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss)); |
562 __ TailCallExternalReference(ref, 2, 1); | 562 __ TailCallExternalReference(ref, 2, 1); |
563 } | 563 } |
564 | 564 |
565 | 565 |
566 void LoadIC::ClearInlinedVersion(Address address) { | 566 static inline bool IsInlinedICSite(Address address, |
567 // Reset the map check of the inlined inobject property load (if present) to | 567 Address* inline_end_address) { |
568 // guarantee failure by holding an invalid map (the null value). The offset | |
569 // can be patched to anything. | |
570 PatchInlinedLoad(address, Heap::null_value(), 0); | |
571 } | |
572 | |
573 | |
574 bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) { | |
575 // If the instruction after the call site is not the pseudo instruction nop1 | 568 // If the instruction after the call site is not the pseudo instruction nop1 |
576 // then this is not related to an inlined in-object property load. The nop1 | 569 // then this is not related to an inlined in-object property load. The nop1 |
577 // instruction is located just after the call to the IC in the deferred code | 570 // instruction is located just after the call to the IC in the deferred code |
578 // handling the miss in the inlined code. After the nop1 instruction there is | 571 // handling the miss in the inlined code. After the nop1 instruction there is |
579 // a branch instruction for jumping back from the deferred code. | 572 // a branch instruction for jumping back from the deferred code. |
580 Address address_after_call = address + Assembler::kCallTargetAddressOffset; | 573 Address address_after_call = address + Assembler::kCallTargetAddressOffset; |
581 Instr instr_after_call = Assembler::instr_at(address_after_call); | 574 Instr instr_after_call = Assembler::instr_at(address_after_call); |
582 if (!Assembler::IsNop(instr_after_call, NAMED_PROPERTY_LOAD_INLINED)) { | 575 if (!Assembler::IsNop(instr_after_call, PROPERTY_LOAD_INLINED)) { |
583 return false; | 576 return false; |
584 } | 577 } |
585 ASSERT_EQ(0, RegisterAllocator::kNumRegisters); | 578 Address address_after_nop = address_after_call + Assembler::kInstrSize; |
586 Address address_after_nop1 = address_after_call + Assembler::kInstrSize; | 579 Instr instr_after_nop = Assembler::instr_at(address_after_nop); |
587 Instr instr_after_nop1 = Assembler::instr_at(address_after_nop1); | 580 ASSERT(Assembler::IsBranch(instr_after_nop)); |
588 ASSERT(Assembler::IsBranch(instr_after_nop1)); | |
589 | 581 |
590 // Find the end of the inlined code for handling the load. | 582 // Find the end of the inlined code for handling the load. |
591 int b_offset = | 583 int b_offset = |
592 Assembler::GetBranchOffset(instr_after_nop1) + Assembler::kPcLoadDelta; | 584 Assembler::GetBranchOffset(instr_after_nop) + Assembler::kPcLoadDelta; |
593 ASSERT(b_offset < 0); // Jumping back from deferred code. | 585 ASSERT(b_offset < 0); // Jumping back from deferred code. |
594 Address inline_end_address = address_after_nop1 + b_offset; | 586 *inline_end_address = address_after_nop + b_offset; |
| 587 |
| 588 return true; |
| 589 } |
| 590 |
| 591 |
| 592 void LoadIC::ClearInlinedVersion(Address address) { |
| 593 // Reset the map check of the inlined in-object property load (if present) to |
| 594 // guarantee failure by holding an invalid map (the null value). The offset |
| 595 // can be patched to anything. |
| 596 PatchInlinedLoad(address, Heap::null_value(), 0); |
| 597 } |
| 598 |
| 599 |
| 600 bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) { |
| 601 // Find the end of the inlined code for handling the load if this is an |
| 602 // inlined IC call site. |
| 603 Address inline_end_address; |
| 604 if (!IsInlinedICSite(address, &inline_end_address)) return false; |
595 | 605 |
596 // Patch the offset of the property load instruction (ldr r0, [r1, #+XXX]). | 606 // Patch the offset of the property load instruction (ldr r0, [r1, #+XXX]). |
597 // The immediate must be represenatble in 12 bits. | 607 // The immediate must be representable in 12 bits. |
598 ASSERT((JSObject::kMaxInstanceSize - JSObject::kHeaderSize) < (1 << 12)); | 608 ASSERT((JSObject::kMaxInstanceSize - JSObject::kHeaderSize) < (1 << 12)); |
599 Address ldr_property_instr_address = inline_end_address - 4; | 609 Address ldr_property_instr_address = |
| 610 inline_end_address - Assembler::kInstrSize; |
600 ASSERT(Assembler::IsLdrRegisterImmediate( | 611 ASSERT(Assembler::IsLdrRegisterImmediate( |
601 Assembler::instr_at(ldr_property_instr_address))); | 612 Assembler::instr_at(ldr_property_instr_address))); |
602 Instr ldr_property_instr = Assembler::instr_at(ldr_property_instr_address); | 613 Instr ldr_property_instr = Assembler::instr_at(ldr_property_instr_address); |
603 ldr_property_instr = Assembler::SetLdrRegisterImmediateOffset( | 614 ldr_property_instr = Assembler::SetLdrRegisterImmediateOffset( |
604 ldr_property_instr, offset - kHeapObjectTag); | 615 ldr_property_instr, offset - kHeapObjectTag); |
605 Assembler::instr_at_put(ldr_property_instr_address, ldr_property_instr); | 616 Assembler::instr_at_put(ldr_property_instr_address, ldr_property_instr); |
606 | 617 |
607 // Indicate that code has changed. | 618 // Indicate that code has changed. |
608 CPU::FlushICache(ldr_property_instr_address, 1 * Assembler::kInstrSize); | 619 CPU::FlushICache(ldr_property_instr_address, 1 * Assembler::kInstrSize); |
609 | 620 |
610 // Patch the map check. | 621 // Patch the map check. |
611 Address ldr_map_instr_address = inline_end_address - 16; | 622 Address ldr_map_instr_address = |
| 623 inline_end_address - 4 * Assembler::kInstrSize; |
612 Assembler::set_target_address_at(ldr_map_instr_address, | 624 Assembler::set_target_address_at(ldr_map_instr_address, |
613 reinterpret_cast<Address>(map)); | 625 reinterpret_cast<Address>(map)); |
614 return true; | 626 return true; |
615 } | 627 } |
616 | 628 |
617 | 629 |
618 void KeyedLoadIC::ClearInlinedVersion(Address address) {} | 630 void KeyedLoadIC::ClearInlinedVersion(Address address) { |
| 631 // Reset the map check of the inlined keyed load (if present) to |
| 632 // guarantee failure by holding an invalid map (the null value). |
| 633 PatchInlinedLoad(address, Heap::null_value()); |
| 634 } |
619 | 635 |
620 | 636 |
621 bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) { | 637 bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) { |
622 return false; | 638 Address inline_end_address; |
| 639 if (!IsInlinedICSite(address, &inline_end_address)) return false; |
| 640 |
| 641 // Patch the map check. |
| 642 Address ldr_map_instr_address = |
| 643 inline_end_address - 19 * Assembler::kInstrSize; |
| 644 Assembler::set_target_address_at(ldr_map_instr_address, |
| 645 reinterpret_cast<Address>(map)); |
| 646 return true; |
623 } | 647 } |
624 | 648 |
625 | 649 |
626 void KeyedStoreIC::ClearInlinedVersion(Address address) {} | 650 void KeyedStoreIC::ClearInlinedVersion(Address address) {} |
627 | 651 |
628 | 652 |
629 void KeyedStoreIC::RestoreInlinedVersion(Address address) {} | 653 void KeyedStoreIC::RestoreInlinedVersion(Address address) {} |
630 | 654 |
631 | 655 |
632 bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) { | 656 bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) { |
(...skipping 1102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1735 __ bind(&miss); | 1759 __ bind(&miss); |
1736 | 1760 |
1737 GenerateMiss(masm); | 1761 GenerateMiss(masm); |
1738 } | 1762 } |
1739 | 1763 |
1740 | 1764 |
1741 #undef __ | 1765 #undef __ |
1742 | 1766 |
1743 | 1767 |
1744 } } // namespace v8::internal | 1768 } } // namespace v8::internal |
OLD | NEW |