OLD | NEW |
---|---|
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // | 2 // |
3 // Redistribution and use in source and binary forms, with or without | 3 // Redistribution and use in source and binary forms, with or without |
4 // modification, are permitted provided that the following conditions are | 4 // modification, are permitted provided that the following conditions are |
5 // met: | 5 // met: |
6 // | 6 // |
7 // * Redistributions of source code must retain the above copyright | 7 // * Redistributions of source code must retain the above copyright |
8 // notice, this list of conditions and the following disclaimer. | 8 // notice, this list of conditions and the following disclaimer. |
9 // * Redistributions in binary form must reproduce the above | 9 // * Redistributions in binary form must reproduce the above |
10 // copyright notice, this list of conditions and the following | 10 // copyright notice, this list of conditions and the following |
(...skipping 606 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
617 } | 617 } |
618 | 618 |
619 | 619 |
620 void Assembler::ConstantPoolMarker(uint32_t size) { | 620 void Assembler::ConstantPoolMarker(uint32_t size) { |
621 ASSERT(is_const_pool_blocked()); | 621 ASSERT(is_const_pool_blocked()); |
622 // + 1 is for the crash guard. | 622 // + 1 is for the crash guard. |
623 Emit(LDR_x_lit | ImmLLiteral(2 * size + 1) | Rt(xzr)); | 623 Emit(LDR_x_lit | ImmLLiteral(2 * size + 1) | Rt(xzr)); |
624 } | 624 } |
625 | 625 |
626 | 626 |
627 void Assembler::EmitPoolGuard() { | |
628 // We must generate only one instruction as this is used in scopes that | |
629 // control the size of the code generated. | |
630 Emit(BLR | Rn(xzr)); | |
631 } | |
632 | |
633 | |
627 void Assembler::ConstantPoolGuard() { | 634 void Assembler::ConstantPoolGuard() { |
628 #ifdef DEBUG | 635 #ifdef DEBUG |
629 // Currently this is only used after a constant pool marker. | 636 // Currently this is only used after a constant pool marker. |
630 ASSERT(is_const_pool_blocked()); | 637 ASSERT(is_const_pool_blocked()); |
631 Instruction* instr = reinterpret_cast<Instruction*>(pc_); | 638 Instruction* instr = reinterpret_cast<Instruction*>(pc_); |
632 ASSERT(instr->preceding()->IsLdrLiteralX() && | 639 ASSERT(instr->preceding()->IsLdrLiteralX() && |
633 instr->preceding()->Rt() == xzr.code()); | 640 instr->preceding()->Rt() == xzr.code()); |
634 #endif | 641 #endif |
635 | 642 EmitPoolGuard(); |
636 // We must generate only one instruction. | |
637 Emit(BLR | Rn(xzr)); | |
638 } | 643 } |
639 | 644 |
640 | 645 |
641 void Assembler::StartBlockVeneerPool() { | 646 void Assembler::StartBlockVeneerPool() { |
642 ++veneer_pool_blocked_nesting_; | 647 ++veneer_pool_blocked_nesting_; |
643 } | 648 } |
644 | 649 |
645 | 650 |
646 void Assembler::EndBlockVeneerPool() { | 651 void Assembler::EndBlockVeneerPool() { |
647 if (--veneer_pool_blocked_nesting_ == 0) { | 652 if (--veneer_pool_blocked_nesting_ == 0) { |
(...skipping 1772 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2420 } | 2425 } |
2421 } | 2426 } |
2422 } | 2427 } |
2423 | 2428 |
2424 | 2429 |
2425 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 2430 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
2426 // We do not try to reuse pool constants. | 2431 // We do not try to reuse pool constants. |
2427 RelocInfo rinfo(reinterpret_cast<byte*>(pc_), rmode, data, NULL); | 2432 RelocInfo rinfo(reinterpret_cast<byte*>(pc_), rmode, data, NULL); |
2428 if (((rmode >= RelocInfo::JS_RETURN) && | 2433 if (((rmode >= RelocInfo::JS_RETURN) && |
2429 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) || | 2434 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) || |
2430 (rmode == RelocInfo::CONST_POOL)) { | 2435 (rmode == RelocInfo::CONST_POOL) || |
2436 (rmode == RelocInfo::VENEER_POOL)) { | |
2431 // Adjust code for new modes. | 2437 // Adjust code for new modes. |
2432 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) | 2438 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) |
2433 || RelocInfo::IsJSReturn(rmode) | 2439 || RelocInfo::IsJSReturn(rmode) |
2434 || RelocInfo::IsComment(rmode) | 2440 || RelocInfo::IsComment(rmode) |
2435 || RelocInfo::IsPosition(rmode) | 2441 || RelocInfo::IsPosition(rmode) |
2436 || RelocInfo::IsConstPool(rmode)); | 2442 || RelocInfo::IsConstPool(rmode) |
2443 || RelocInfo::IsVeneerPool(rmode)); | |
2437 // These modes do not need an entry in the constant pool. | 2444 // These modes do not need an entry in the constant pool. |
2438 } else { | 2445 } else { |
2439 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo); | 2446 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo); |
2440 if (num_pending_reloc_info_ == 0) { | 2447 if (num_pending_reloc_info_ == 0) { |
2441 first_const_pool_use_ = pc_offset(); | 2448 first_const_pool_use_ = pc_offset(); |
2442 } | 2449 } |
2443 pending_reloc_info_[num_pending_reloc_info_++] = rinfo; | 2450 pending_reloc_info_[num_pending_reloc_info_++] = rinfo; |
2444 // Make sure the constant pool is not emitted in place of the next | 2451 // Make sure the constant pool is not emitted in place of the next |
2445 // instruction for which we just recorded relocation info. | 2452 // instruction for which we just recorded relocation info. |
2446 BlockConstPoolFor(1); | 2453 BlockConstPoolFor(1); |
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2563 // support for 32-bit entries. | 2570 // support for 32-bit entries. |
2564 ConstantPoolMarker(2 * num_pending_reloc_info_); | 2571 ConstantPoolMarker(2 * num_pending_reloc_info_); |
2565 ConstantPoolGuard(); | 2572 ConstantPoolGuard(); |
2566 | 2573 |
2567 // Emit constant pool entries. | 2574 // Emit constant pool entries. |
2568 for (int i = 0; i < num_pending_reloc_info_; i++) { | 2575 for (int i = 0; i < num_pending_reloc_info_; i++) { |
2569 RelocInfo& rinfo = pending_reloc_info_[i]; | 2576 RelocInfo& rinfo = pending_reloc_info_[i]; |
2570 ASSERT(rinfo.rmode() != RelocInfo::COMMENT && | 2577 ASSERT(rinfo.rmode() != RelocInfo::COMMENT && |
2571 rinfo.rmode() != RelocInfo::POSITION && | 2578 rinfo.rmode() != RelocInfo::POSITION && |
2572 rinfo.rmode() != RelocInfo::STATEMENT_POSITION && | 2579 rinfo.rmode() != RelocInfo::STATEMENT_POSITION && |
2573 rinfo.rmode() != RelocInfo::CONST_POOL); | 2580 rinfo.rmode() != RelocInfo::CONST_POOL && |
2581 rinfo.rmode() != RelocInfo::VENEER_POOL); | |
2574 | 2582 |
2575 Instruction* instr = reinterpret_cast<Instruction*>(rinfo.pc()); | 2583 Instruction* instr = reinterpret_cast<Instruction*>(rinfo.pc()); |
2576 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. | 2584 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. |
2577 ASSERT(instr->IsLdrLiteral() && | 2585 ASSERT(instr->IsLdrLiteral() && |
2578 instr->ImmLLiteral() == 0); | 2586 instr->ImmLLiteral() == 0); |
2579 | 2587 |
2580 instr->SetImmPCOffsetTarget(reinterpret_cast<Instruction*>(pc_)); | 2588 instr->SetImmPCOffsetTarget(reinterpret_cast<Instruction*>(pc_)); |
2581 dc64(rinfo.data()); | 2589 dc64(rinfo.data()); |
2582 } | 2590 } |
2583 | 2591 |
(...skipping 21 matching lines...) Expand all Loading... | |
2605 int protection_offset = 2 * kInstructionSize; | 2613 int protection_offset = 2 * kInstructionSize; |
2606 return pc_offset() > max_reachable_pc - margin - protection_offset - | 2614 return pc_offset() > max_reachable_pc - margin - protection_offset - |
2607 static_cast<int>(unresolved_branches_.size() * kMaxVeneerCodeSize); | 2615 static_cast<int>(unresolved_branches_.size() * kMaxVeneerCodeSize); |
2608 } | 2616 } |
2609 | 2617 |
2610 | 2618 |
2611 void Assembler::EmitVeneers(bool need_protection, int margin) { | 2619 void Assembler::EmitVeneers(bool need_protection, int margin) { |
2612 BlockPoolsScope scope(this); | 2620 BlockPoolsScope scope(this); |
2613 RecordComment("[ Veneers"); | 2621 RecordComment("[ Veneers"); |
2614 | 2622 |
2623 // The exact size of the veneer pool must be recorded (see the comment at the | |
2624 // declaration site of RecordConstPool()), but computing the number of | |
2625 // veneers that will be generated is not obvious. So instead we remember the | |
2626 // current position and will record the size after the pool has been | |
2627 // generated. | |
2628 Label size_check; | |
2629 bind(&size_check); | |
2630 int veneer_pool_relocinfo_loc = pc_offset(); | |
2631 #ifdef DEBUG | |
2632 byte* reloc_writer_record_pos = reloc_info_writer.pos(); | |
2633 #endif | |
2634 | |
2615 Label end; | 2635 Label end; |
2616 if (need_protection) { | 2636 if (need_protection) { |
2617 b(&end); | 2637 b(&end); |
2618 } | 2638 } |
2619 | 2639 |
2620 EmitVeneersGuard(); | 2640 EmitVeneersGuard(); |
2621 | 2641 |
2622 Label size_check; | 2642 Label veneer_size_check; |
2623 | 2643 |
2624 std::multimap<int, FarBranchInfo>::iterator it, it_to_delete; | 2644 std::multimap<int, FarBranchInfo>::iterator it, it_to_delete; |
2625 | 2645 |
2626 it = unresolved_branches_.begin(); | 2646 it = unresolved_branches_.begin(); |
2627 while (it != unresolved_branches_.end()) { | 2647 while (it != unresolved_branches_.end()) { |
2628 if (ShouldEmitVeneer(it->first, margin)) { | 2648 if (ShouldEmitVeneer(it->first, margin)) { |
2629 Instruction* branch = InstructionAt(it->second.pc_offset_); | 2649 Instruction* branch = InstructionAt(it->second.pc_offset_); |
2630 Label* label = it->second.label_; | 2650 Label* label = it->second.label_; |
2631 | 2651 |
2632 #ifdef DEBUG | 2652 #ifdef DEBUG |
2633 bind(&size_check); | 2653 bind(&veneer_size_check); |
2634 #endif | 2654 #endif |
2635 // Patch the branch to point to the current position, and emit a branch | 2655 // Patch the branch to point to the current position, and emit a branch |
2636 // to the label. | 2656 // to the label. |
2637 Instruction* veneer = reinterpret_cast<Instruction*>(pc_); | 2657 Instruction* veneer = reinterpret_cast<Instruction*>(pc_); |
2638 RemoveBranchFromLabelLinkChain(branch, label, veneer); | 2658 RemoveBranchFromLabelLinkChain(branch, label, veneer); |
2639 branch->SetImmPCOffsetTarget(veneer); | 2659 branch->SetImmPCOffsetTarget(veneer); |
2640 b(label); | 2660 b(label); |
2641 #ifdef DEBUG | 2661 #ifdef DEBUG |
2642 ASSERT(SizeOfCodeGeneratedSince(&size_check) <= | 2662 ASSERT(SizeOfCodeGeneratedSince(&veneer_size_check) <= |
2643 static_cast<uint64_t>(kMaxVeneerCodeSize)); | 2663 static_cast<uint64_t>(kMaxVeneerCodeSize)); |
2644 size_check.Unuse(); | 2664 veneer_size_check.Unuse(); |
2645 #endif | 2665 #endif |
2646 | 2666 |
2647 it_to_delete = it++; | 2667 it_to_delete = it++; |
2648 unresolved_branches_.erase(it_to_delete); | 2668 unresolved_branches_.erase(it_to_delete); |
2649 } else { | 2669 } else { |
2650 ++it; | 2670 ++it; |
2651 } | 2671 } |
2652 } | 2672 } |
2653 | 2673 |
2674 // Record the veneer pool size. | |
2675 ASSERT(reloc_writer_record_pos == reloc_info_writer.pos()); | |
2676 int pool_size = SizeOfCodeGeneratedSince(&size_check); | |
2677 RelocInfo rinfo(buffer_ + veneer_pool_relocinfo_loc, | |
2678 RelocInfo::VENEER_POOL, static_cast<intptr_t>(pool_size), | |
ulan
2014/03/07 10:51:57
Are there test checking that we read the size corr
| |
2679 NULL); | |
2680 reloc_info_writer.Write(&rinfo); | |
2681 | |
2654 if (unresolved_branches_.empty()) { | 2682 if (unresolved_branches_.empty()) { |
2655 next_veneer_pool_check_ = kMaxInt; | 2683 next_veneer_pool_check_ = kMaxInt; |
2656 } else { | 2684 } else { |
2657 next_veneer_pool_check_ = | 2685 next_veneer_pool_check_ = |
2658 unresolved_branches_first_limit() - kVeneerDistanceCheckMargin; | 2686 unresolved_branches_first_limit() - kVeneerDistanceCheckMargin; |
2659 } | 2687 } |
2660 | 2688 |
2661 bind(&end); | 2689 bind(&end); |
2662 | 2690 |
2663 RecordComment("]"); | 2691 RecordComment("]"); |
2664 } | 2692 } |
2665 | 2693 |
2666 | 2694 |
2667 void Assembler::EmitVeneersGuard() { | |
2668 if (emit_debug_code()) { | |
2669 Unreachable(); | |
2670 } | |
2671 } | |
2672 | |
2673 | |
2674 void Assembler::CheckVeneerPool(bool require_jump, | 2695 void Assembler::CheckVeneerPool(bool require_jump, |
2675 int margin) { | 2696 int margin) { |
2676 // There is nothing to do if there are no pending veneer pool entries. | 2697 // There is nothing to do if there are no pending veneer pool entries. |
2677 if (unresolved_branches_.empty()) { | 2698 if (unresolved_branches_.empty()) { |
2678 ASSERT(next_veneer_pool_check_ == kMaxInt); | 2699 ASSERT(next_veneer_pool_check_ == kMaxInt); |
2679 return; | 2700 return; |
2680 } | 2701 } |
2681 | 2702 |
2682 ASSERT(pc_offset() < unresolved_branches_first_limit()); | 2703 ASSERT(pc_offset() < unresolved_branches_first_limit()); |
2683 | 2704 |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2733 // code. | 2754 // code. |
2734 #ifdef ENABLE_DEBUGGER_SUPPORT | 2755 #ifdef ENABLE_DEBUGGER_SUPPORT |
2735 RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size)); | 2756 RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size)); |
2736 #endif | 2757 #endif |
2737 } | 2758 } |
2738 | 2759 |
2739 | 2760 |
2740 } } // namespace v8::internal | 2761 } } // namespace v8::internal |
2741 | 2762 |
2742 #endif // V8_TARGET_ARCH_A64 | 2763 #endif // V8_TARGET_ARCH_A64 |
OLD | NEW |