| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // | 2 // |
| 3 // Redistribution and use in source and binary forms, with or without | 3 // Redistribution and use in source and binary forms, with or without |
| 4 // modification, are permitted provided that the following conditions are | 4 // modification, are permitted provided that the following conditions are |
| 5 // met: | 5 // met: |
| 6 // | 6 // |
| 7 // * Redistributions of source code must retain the above copyright | 7 // * Redistributions of source code must retain the above copyright |
| 8 // notice, this list of conditions and the following disclaimer. | 8 // notice, this list of conditions and the following disclaimer. |
| 9 // * Redistributions in binary form must reproduce the above | 9 // * Redistributions in binary form must reproduce the above |
| 10 // copyright notice, this list of conditions and the following | 10 // copyright notice, this list of conditions and the following |
| (...skipping 606 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 617 } | 617 } |
| 618 | 618 |
| 619 | 619 |
| 620 void Assembler::ConstantPoolMarker(uint32_t size) { | 620 void Assembler::ConstantPoolMarker(uint32_t size) { |
| 621 ASSERT(is_const_pool_blocked()); | 621 ASSERT(is_const_pool_blocked()); |
| 622 // + 1 is for the crash guard. | 622 // + 1 is for the crash guard. |
| 623 Emit(LDR_x_lit | ImmLLiteral(2 * size + 1) | Rt(xzr)); | 623 Emit(LDR_x_lit | ImmLLiteral(2 * size + 1) | Rt(xzr)); |
| 624 } | 624 } |
| 625 | 625 |
| 626 | 626 |
| 627 void Assembler::EmitPoolGuard() { |
| 628 // We must generate only one instruction as this is used in scopes that |
| 629 // control the size of the code generated. |
| 630 Emit(BLR | Rn(xzr)); |
| 631 } |
| 632 |
| 633 |
| 627 void Assembler::ConstantPoolGuard() { | 634 void Assembler::ConstantPoolGuard() { |
| 628 #ifdef DEBUG | 635 #ifdef DEBUG |
| 629 // Currently this is only used after a constant pool marker. | 636 // Currently this is only used after a constant pool marker. |
| 630 ASSERT(is_const_pool_blocked()); | 637 ASSERT(is_const_pool_blocked()); |
| 631 Instruction* instr = reinterpret_cast<Instruction*>(pc_); | 638 Instruction* instr = reinterpret_cast<Instruction*>(pc_); |
| 632 ASSERT(instr->preceding()->IsLdrLiteralX() && | 639 ASSERT(instr->preceding()->IsLdrLiteralX() && |
| 633 instr->preceding()->Rt() == xzr.code()); | 640 instr->preceding()->Rt() == xzr.code()); |
| 634 #endif | 641 #endif |
| 635 | 642 EmitPoolGuard(); |
| 636 // We must generate only one instruction. | |
| 637 Emit(BLR | Rn(xzr)); | |
| 638 } | 643 } |
| 639 | 644 |
| 640 | 645 |
| 641 void Assembler::StartBlockVeneerPool() { | 646 void Assembler::StartBlockVeneerPool() { |
| 642 ++veneer_pool_blocked_nesting_; | 647 ++veneer_pool_blocked_nesting_; |
| 643 } | 648 } |
| 644 | 649 |
| 645 | 650 |
| 646 void Assembler::EndBlockVeneerPool() { | 651 void Assembler::EndBlockVeneerPool() { |
| 647 if (--veneer_pool_blocked_nesting_ == 0) { | 652 if (--veneer_pool_blocked_nesting_ == 0) { |
| (...skipping 1772 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2420 } | 2425 } |
| 2421 } | 2426 } |
| 2422 } | 2427 } |
| 2423 | 2428 |
| 2424 | 2429 |
| 2425 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 2430 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
| 2426 // We do not try to reuse pool constants. | 2431 // We do not try to reuse pool constants. |
| 2427 RelocInfo rinfo(reinterpret_cast<byte*>(pc_), rmode, data, NULL); | 2432 RelocInfo rinfo(reinterpret_cast<byte*>(pc_), rmode, data, NULL); |
| 2428 if (((rmode >= RelocInfo::JS_RETURN) && | 2433 if (((rmode >= RelocInfo::JS_RETURN) && |
| 2429 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) || | 2434 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) || |
| 2430 (rmode == RelocInfo::CONST_POOL)) { | 2435 (rmode == RelocInfo::CONST_POOL) || |
| 2436 (rmode == RelocInfo::VENEER_POOL)) { |
| 2431 // Adjust code for new modes. | 2437 // Adjust code for new modes. |
| 2432 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) | 2438 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) |
| 2433 || RelocInfo::IsJSReturn(rmode) | 2439 || RelocInfo::IsJSReturn(rmode) |
| 2434 || RelocInfo::IsComment(rmode) | 2440 || RelocInfo::IsComment(rmode) |
| 2435 || RelocInfo::IsPosition(rmode) | 2441 || RelocInfo::IsPosition(rmode) |
| 2436 || RelocInfo::IsConstPool(rmode)); | 2442 || RelocInfo::IsConstPool(rmode) |
| 2443 || RelocInfo::IsVeneerPool(rmode)); |
| 2437 // These modes do not need an entry in the constant pool. | 2444 // These modes do not need an entry in the constant pool. |
| 2438 } else { | 2445 } else { |
| 2439 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo); | 2446 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo); |
| 2440 if (num_pending_reloc_info_ == 0) { | 2447 if (num_pending_reloc_info_ == 0) { |
| 2441 first_const_pool_use_ = pc_offset(); | 2448 first_const_pool_use_ = pc_offset(); |
| 2442 } | 2449 } |
| 2443 pending_reloc_info_[num_pending_reloc_info_++] = rinfo; | 2450 pending_reloc_info_[num_pending_reloc_info_++] = rinfo; |
| 2444 // Make sure the constant pool is not emitted in place of the next | 2451 // Make sure the constant pool is not emitted in place of the next |
| 2445 // instruction for which we just recorded relocation info. | 2452 // instruction for which we just recorded relocation info. |
| 2446 BlockConstPoolFor(1); | 2453 BlockConstPoolFor(1); |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2563 // support for 32-bit entries. | 2570 // support for 32-bit entries. |
| 2564 ConstantPoolMarker(2 * num_pending_reloc_info_); | 2571 ConstantPoolMarker(2 * num_pending_reloc_info_); |
| 2565 ConstantPoolGuard(); | 2572 ConstantPoolGuard(); |
| 2566 | 2573 |
| 2567 // Emit constant pool entries. | 2574 // Emit constant pool entries. |
| 2568 for (int i = 0; i < num_pending_reloc_info_; i++) { | 2575 for (int i = 0; i < num_pending_reloc_info_; i++) { |
| 2569 RelocInfo& rinfo = pending_reloc_info_[i]; | 2576 RelocInfo& rinfo = pending_reloc_info_[i]; |
| 2570 ASSERT(rinfo.rmode() != RelocInfo::COMMENT && | 2577 ASSERT(rinfo.rmode() != RelocInfo::COMMENT && |
| 2571 rinfo.rmode() != RelocInfo::POSITION && | 2578 rinfo.rmode() != RelocInfo::POSITION && |
| 2572 rinfo.rmode() != RelocInfo::STATEMENT_POSITION && | 2579 rinfo.rmode() != RelocInfo::STATEMENT_POSITION && |
| 2573 rinfo.rmode() != RelocInfo::CONST_POOL); | 2580 rinfo.rmode() != RelocInfo::CONST_POOL && |
| 2581 rinfo.rmode() != RelocInfo::VENEER_POOL); |
| 2574 | 2582 |
| 2575 Instruction* instr = reinterpret_cast<Instruction*>(rinfo.pc()); | 2583 Instruction* instr = reinterpret_cast<Instruction*>(rinfo.pc()); |
| 2576 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. | 2584 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. |
| 2577 ASSERT(instr->IsLdrLiteral() && | 2585 ASSERT(instr->IsLdrLiteral() && |
| 2578 instr->ImmLLiteral() == 0); | 2586 instr->ImmLLiteral() == 0); |
| 2579 | 2587 |
| 2580 instr->SetImmPCOffsetTarget(reinterpret_cast<Instruction*>(pc_)); | 2588 instr->SetImmPCOffsetTarget(reinterpret_cast<Instruction*>(pc_)); |
| 2581 dc64(rinfo.data()); | 2589 dc64(rinfo.data()); |
| 2582 } | 2590 } |
| 2583 | 2591 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2601 | 2609 |
| 2602 | 2610 |
| 2603 bool Assembler::ShouldEmitVeneer(int max_reachable_pc, int margin) { | 2611 bool Assembler::ShouldEmitVeneer(int max_reachable_pc, int margin) { |
| 2604 // Account for the branch around the veneers and the guard. | 2612 // Account for the branch around the veneers and the guard. |
| 2605 int protection_offset = 2 * kInstructionSize; | 2613 int protection_offset = 2 * kInstructionSize; |
| 2606 return pc_offset() > max_reachable_pc - margin - protection_offset - | 2614 return pc_offset() > max_reachable_pc - margin - protection_offset - |
| 2607 static_cast<int>(unresolved_branches_.size() * kMaxVeneerCodeSize); | 2615 static_cast<int>(unresolved_branches_.size() * kMaxVeneerCodeSize); |
| 2608 } | 2616 } |
| 2609 | 2617 |
| 2610 | 2618 |
| 2619 void Assembler::RecordVeneerPool(int location_offset, int size) { |
| 2620 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 2621 RelocInfo rinfo(buffer_ + location_offset, |
| 2622 RelocInfo::VENEER_POOL, static_cast<intptr_t>(size), |
| 2623 NULL); |
| 2624 reloc_info_writer.Write(&rinfo); |
| 2625 #endif |
| 2626 } |
| 2627 |
| 2628 |
| 2611 void Assembler::EmitVeneers(bool need_protection, int margin) { | 2629 void Assembler::EmitVeneers(bool need_protection, int margin) { |
| 2612 BlockPoolsScope scope(this); | 2630 BlockPoolsScope scope(this); |
| 2613 RecordComment("[ Veneers"); | 2631 RecordComment("[ Veneers"); |
| 2614 | 2632 |
| 2633 // The exact size of the veneer pool must be recorded (see the comment at the |
| 2634 // declaration site of RecordConstPool()), but computing the number of |
| 2635 // veneers that will be generated is not obvious. So instead we remember the |
| 2636 // current position and will record the size after the pool has been |
| 2637 // generated. |
| 2638 Label size_check; |
| 2639 bind(&size_check); |
| 2640 int veneer_pool_relocinfo_loc = pc_offset(); |
| 2641 #ifdef DEBUG |
| 2642 byte* reloc_writer_record_pos = reloc_info_writer.pos(); |
| 2643 #endif |
| 2644 |
| 2615 Label end; | 2645 Label end; |
| 2616 if (need_protection) { | 2646 if (need_protection) { |
| 2617 b(&end); | 2647 b(&end); |
| 2618 } | 2648 } |
| 2619 | 2649 |
| 2620 EmitVeneersGuard(); | 2650 EmitVeneersGuard(); |
| 2621 | 2651 |
| 2622 Label size_check; | 2652 Label veneer_size_check; |
| 2623 | 2653 |
| 2624 std::multimap<int, FarBranchInfo>::iterator it, it_to_delete; | 2654 std::multimap<int, FarBranchInfo>::iterator it, it_to_delete; |
| 2625 | 2655 |
| 2626 it = unresolved_branches_.begin(); | 2656 it = unresolved_branches_.begin(); |
| 2627 while (it != unresolved_branches_.end()) { | 2657 while (it != unresolved_branches_.end()) { |
| 2628 if (ShouldEmitVeneer(it->first, margin)) { | 2658 if (ShouldEmitVeneer(it->first, margin)) { |
| 2629 Instruction* branch = InstructionAt(it->second.pc_offset_); | 2659 Instruction* branch = InstructionAt(it->second.pc_offset_); |
| 2630 Label* label = it->second.label_; | 2660 Label* label = it->second.label_; |
| 2631 | 2661 |
| 2632 #ifdef DEBUG | 2662 #ifdef DEBUG |
| 2633 bind(&size_check); | 2663 bind(&veneer_size_check); |
| 2634 #endif | 2664 #endif |
| 2635 // Patch the branch to point to the current position, and emit a branch | 2665 // Patch the branch to point to the current position, and emit a branch |
| 2636 // to the label. | 2666 // to the label. |
| 2637 Instruction* veneer = reinterpret_cast<Instruction*>(pc_); | 2667 Instruction* veneer = reinterpret_cast<Instruction*>(pc_); |
| 2638 RemoveBranchFromLabelLinkChain(branch, label, veneer); | 2668 RemoveBranchFromLabelLinkChain(branch, label, veneer); |
| 2639 branch->SetImmPCOffsetTarget(veneer); | 2669 branch->SetImmPCOffsetTarget(veneer); |
| 2640 b(label); | 2670 b(label); |
| 2641 #ifdef DEBUG | 2671 #ifdef DEBUG |
| 2642 ASSERT(SizeOfCodeGeneratedSince(&size_check) <= | 2672 ASSERT(SizeOfCodeGeneratedSince(&veneer_size_check) <= |
| 2643 static_cast<uint64_t>(kMaxVeneerCodeSize)); | 2673 static_cast<uint64_t>(kMaxVeneerCodeSize)); |
| 2644 size_check.Unuse(); | 2674 veneer_size_check.Unuse(); |
| 2645 #endif | 2675 #endif |
| 2646 | 2676 |
| 2647 it_to_delete = it++; | 2677 it_to_delete = it++; |
| 2648 unresolved_branches_.erase(it_to_delete); | 2678 unresolved_branches_.erase(it_to_delete); |
| 2649 } else { | 2679 } else { |
| 2650 ++it; | 2680 ++it; |
| 2651 } | 2681 } |
| 2652 } | 2682 } |
| 2653 | 2683 |
| 2684 // Record the veneer pool size. |
| 2685 ASSERT(reloc_writer_record_pos == reloc_info_writer.pos()); |
| 2686 int pool_size = SizeOfCodeGeneratedSince(&size_check); |
| 2687 RecordVeneerPool(veneer_pool_relocinfo_loc, pool_size); |
| 2688 |
| 2654 if (unresolved_branches_.empty()) { | 2689 if (unresolved_branches_.empty()) { |
| 2655 next_veneer_pool_check_ = kMaxInt; | 2690 next_veneer_pool_check_ = kMaxInt; |
| 2656 } else { | 2691 } else { |
| 2657 next_veneer_pool_check_ = | 2692 next_veneer_pool_check_ = |
| 2658 unresolved_branches_first_limit() - kVeneerDistanceCheckMargin; | 2693 unresolved_branches_first_limit() - kVeneerDistanceCheckMargin; |
| 2659 } | 2694 } |
| 2660 | 2695 |
| 2661 bind(&end); | 2696 bind(&end); |
| 2662 | 2697 |
| 2663 RecordComment("]"); | 2698 RecordComment("]"); |
| 2664 } | 2699 } |
| 2665 | 2700 |
| 2666 | 2701 |
| 2667 void Assembler::EmitVeneersGuard() { | |
| 2668 if (emit_debug_code()) { | |
| 2669 Unreachable(); | |
| 2670 } | |
| 2671 } | |
| 2672 | |
| 2673 | |
| 2674 void Assembler::CheckVeneerPool(bool require_jump, | 2702 void Assembler::CheckVeneerPool(bool require_jump, |
| 2675 int margin) { | 2703 int margin) { |
| 2676 // There is nothing to do if there are no pending veneer pool entries. | 2704 // There is nothing to do if there are no pending veneer pool entries. |
| 2677 if (unresolved_branches_.empty()) { | 2705 if (unresolved_branches_.empty()) { |
| 2678 ASSERT(next_veneer_pool_check_ == kMaxInt); | 2706 ASSERT(next_veneer_pool_check_ == kMaxInt); |
| 2679 return; | 2707 return; |
| 2680 } | 2708 } |
| 2681 | 2709 |
| 2682 ASSERT(pc_offset() < unresolved_branches_first_limit()); | 2710 ASSERT(pc_offset() < unresolved_branches_first_limit()); |
| 2683 | 2711 |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2733 // code. | 2761 // code. |
| 2734 #ifdef ENABLE_DEBUGGER_SUPPORT | 2762 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 2735 RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size)); | 2763 RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size)); |
| 2736 #endif | 2764 #endif |
| 2737 } | 2765 } |
| 2738 | 2766 |
| 2739 | 2767 |
| 2740 } } // namespace v8::internal | 2768 } } // namespace v8::internal |
| 2741 | 2769 |
| 2742 #endif // V8_TARGET_ARCH_A64 | 2770 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |