| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 540 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 551 Strh(rt, addr); | 551 Strh(rt, addr); |
| 552 } else if (r.IsInteger32()) { | 552 } else if (r.IsInteger32()) { |
| 553 Str(rt.W(), addr); | 553 Str(rt.W(), addr); |
| 554 } else { | 554 } else { |
| 555 ASSERT(rt.Is64Bits()); | 555 ASSERT(rt.Is64Bits()); |
| 556 Str(rt, addr); | 556 Str(rt, addr); |
| 557 } | 557 } |
| 558 } | 558 } |
| 559 | 559 |
| 560 | 560 |
| 561 bool MacroAssembler::ShouldEmitVeneer(int max_reachable_pc, int margin) { | |
| 562 // Account for the branch around the veneers and the guard. | |
| 563 int protection_offset = 2 * kInstructionSize; | |
| 564 return pc_offset() > max_reachable_pc - margin - protection_offset - | |
| 565 static_cast<int>(unresolved_branches_.size() * kMaxVeneerCodeSize); | |
| 566 } | |
| 567 | |
| 568 | |
| 569 void MacroAssembler::EmitVeneers(bool need_protection) { | |
| 570 RecordComment("[ Veneers"); | |
| 571 | |
| 572 Label end; | |
| 573 if (need_protection) { | |
| 574 B(&end); | |
| 575 } | |
| 576 | |
| 577 EmitVeneersGuard(); | |
| 578 | |
| 579 { | |
| 580 InstructionAccurateScope scope(this); | |
| 581 Label size_check; | |
| 582 | |
| 583 std::multimap<int, FarBranchInfo>::iterator it, it_to_delete; | |
| 584 | |
| 585 it = unresolved_branches_.begin(); | |
| 586 while (it != unresolved_branches_.end()) { | |
| 587 if (ShouldEmitVeneer(it->first)) { | |
| 588 Instruction* branch = InstructionAt(it->second.pc_offset_); | |
| 589 Label* label = it->second.label_; | |
| 590 | |
| 591 #ifdef DEBUG | |
| 592 __ bind(&size_check); | |
| 593 #endif | |
| 594 // Patch the branch to point to the current position, and emit a branch | |
| 595 // to the label. | |
| 596 Instruction* veneer = reinterpret_cast<Instruction*>(pc_); | |
| 597 RemoveBranchFromLabelLinkChain(branch, label, veneer); | |
| 598 branch->SetImmPCOffsetTarget(veneer); | |
| 599 b(label); | |
| 600 #ifdef DEBUG | |
| 601 ASSERT(SizeOfCodeGeneratedSince(&size_check) <= | |
| 602 static_cast<uint64_t>(kMaxVeneerCodeSize)); | |
| 603 size_check.Unuse(); | |
| 604 #endif | |
| 605 | |
| 606 it_to_delete = it++; | |
| 607 unresolved_branches_.erase(it_to_delete); | |
| 608 } else { | |
| 609 ++it; | |
| 610 } | |
| 611 } | |
| 612 } | |
| 613 | |
| 614 Bind(&end); | |
| 615 | |
| 616 RecordComment("]"); | |
| 617 } | |
| 618 | |
| 619 | |
| 620 void MacroAssembler::EmitVeneersGuard() { | |
| 621 if (emit_debug_code()) { | |
| 622 Unreachable(); | |
| 623 } | |
| 624 } | |
| 625 | |
| 626 | |
| 627 void MacroAssembler::CheckVeneers(bool need_protection) { | |
| 628 if (unresolved_branches_.empty()) { | |
| 629 return; | |
| 630 } | |
| 631 | |
| 632 CHECK(pc_offset() < unresolved_branches_first_limit()); | |
| 633 int margin = kVeneerDistanceMargin; | |
| 634 if (!need_protection) { | |
| 635 // Prefer emitting veneers protected by an existing instruction. | |
| 636 // The 4 divisor is a finger in the air guess. With a default margin of 2KB, | |
| 637 // that leaves 512B = 128 instructions of extra margin to avoid requiring a | |
| 638 // protective branch. | |
| 639 margin += margin / 4; | |
| 640 } | |
| 641 if (ShouldEmitVeneer(unresolved_branches_first_limit(), margin)) { | |
| 642 EmitVeneers(need_protection); | |
| 643 } | |
| 644 } | |
| 645 | |
| 646 | |
| 647 bool MacroAssembler::NeedExtraInstructionsOrRegisterBranch( | 561 bool MacroAssembler::NeedExtraInstructionsOrRegisterBranch( |
| 648 Label *label, ImmBranchType b_type) { | 562 Label *label, ImmBranchType b_type) { |
| 649 bool need_longer_range = false; | 563 bool need_longer_range = false; |
| 650 // There are two situations in which we care about the offset being out of | 564 // There are two situations in which we care about the offset being out of |
| 651 // range: | 565 // range: |
| 652 // - The label is bound but too far away. | 566 // - The label is bound but too far away. |
| 653 // - The label is not bound but linked, and the previous branch | 567 // - The label is not bound but linked, and the previous branch |
| 654 // instruction in the chain is too far away. | 568 // instruction in the chain is too far away. |
| 655 if (label->is_bound() || label->is_linked()) { | 569 if (label->is_bound() || label->is_linked()) { |
| 656 need_longer_range = | 570 need_longer_range = |
| 657 !Instruction::IsValidImmPCOffset(b_type, label->pos() - pc_offset()); | 571 !Instruction::IsValidImmPCOffset(b_type, label->pos() - pc_offset()); |
| 658 } | 572 } |
| 659 if (!need_longer_range && !label->is_bound()) { | 573 if (!need_longer_range && !label->is_bound()) { |
| 660 int max_reachable_pc = pc_offset() + Instruction::ImmBranchRange(b_type); | 574 int max_reachable_pc = pc_offset() + Instruction::ImmBranchRange(b_type); |
| 661 unresolved_branches_.insert( | 575 unresolved_branches_.insert( |
| 662 std::pair<int, FarBranchInfo>(max_reachable_pc, | 576 std::pair<int, FarBranchInfo>(max_reachable_pc, |
| 663 FarBranchInfo(pc_offset(), label))); | 577 FarBranchInfo(pc_offset(), label))); |
| 578 // Also maintain the next pool check. |
| 579 next_veneer_pool_check_ = |
| 580 Min(next_veneer_pool_check_, |
| 581 max_reachable_pc - kVeneerDistanceCheckMargin); |
| 664 } | 582 } |
| 665 return need_longer_range; | 583 return need_longer_range; |
| 666 } | 584 } |
| 667 | 585 |
| 668 | 586 |
| 669 void MacroAssembler::B(Label* label, BranchType type, Register reg, int bit) { | 587 void MacroAssembler::B(Label* label, BranchType type, Register reg, int bit) { |
| 670 ASSERT((reg.Is(NoReg) || type >= kBranchTypeFirstUsingReg) && | 588 ASSERT((reg.Is(NoReg) || type >= kBranchTypeFirstUsingReg) && |
| 671 (bit == -1 || type >= kBranchTypeFirstUsingBit)); | 589 (bit == -1 || type >= kBranchTypeFirstUsingBit)); |
| 672 if (kBranchTypeFirstCondition <= type && type <= kBranchTypeLastCondition) { | 590 if (kBranchTypeFirstCondition <= type && type <= kBranchTypeLastCondition) { |
| 673 B(static_cast<Condition>(type), label); | 591 B(static_cast<Condition>(type), label); |
| (...skipping 15 matching lines...) Expand all Loading... |
| 689 void MacroAssembler::B(Label* label, Condition cond) { | 607 void MacroAssembler::B(Label* label, Condition cond) { |
| 690 ASSERT(allow_macro_instructions_); | 608 ASSERT(allow_macro_instructions_); |
| 691 ASSERT((cond != al) && (cond != nv)); | 609 ASSERT((cond != al) && (cond != nv)); |
| 692 | 610 |
| 693 Label done; | 611 Label done; |
| 694 bool need_extra_instructions = | 612 bool need_extra_instructions = |
| 695 NeedExtraInstructionsOrRegisterBranch(label, CondBranchType); | 613 NeedExtraInstructionsOrRegisterBranch(label, CondBranchType); |
| 696 | 614 |
| 697 if (need_extra_instructions) { | 615 if (need_extra_instructions) { |
| 698 b(&done, InvertCondition(cond)); | 616 b(&done, InvertCondition(cond)); |
| 699 b(label); | 617 B(label); |
| 700 } else { | 618 } else { |
| 701 b(label, cond); | 619 b(label, cond); |
| 702 } | 620 } |
| 703 CheckVeneers(!need_extra_instructions); | |
| 704 bind(&done); | 621 bind(&done); |
| 705 } | 622 } |
| 706 | 623 |
| 707 | 624 |
| 708 void MacroAssembler::Tbnz(const Register& rt, unsigned bit_pos, Label* label) { | 625 void MacroAssembler::Tbnz(const Register& rt, unsigned bit_pos, Label* label) { |
| 709 ASSERT(allow_macro_instructions_); | 626 ASSERT(allow_macro_instructions_); |
| 710 | 627 |
| 711 Label done; | 628 Label done; |
| 712 bool need_extra_instructions = | 629 bool need_extra_instructions = |
| 713 NeedExtraInstructionsOrRegisterBranch(label, TestBranchType); | 630 NeedExtraInstructionsOrRegisterBranch(label, TestBranchType); |
| 714 | 631 |
| 715 if (need_extra_instructions) { | 632 if (need_extra_instructions) { |
| 716 tbz(rt, bit_pos, &done); | 633 tbz(rt, bit_pos, &done); |
| 717 b(label); | 634 B(label); |
| 718 } else { | 635 } else { |
| 719 tbnz(rt, bit_pos, label); | 636 tbnz(rt, bit_pos, label); |
| 720 } | 637 } |
| 721 CheckVeneers(!need_extra_instructions); | |
| 722 bind(&done); | 638 bind(&done); |
| 723 } | 639 } |
| 724 | 640 |
| 725 | 641 |
| 726 void MacroAssembler::Tbz(const Register& rt, unsigned bit_pos, Label* label) { | 642 void MacroAssembler::Tbz(const Register& rt, unsigned bit_pos, Label* label) { |
| 727 ASSERT(allow_macro_instructions_); | 643 ASSERT(allow_macro_instructions_); |
| 728 | 644 |
| 729 Label done; | 645 Label done; |
| 730 bool need_extra_instructions = | 646 bool need_extra_instructions = |
| 731 NeedExtraInstructionsOrRegisterBranch(label, TestBranchType); | 647 NeedExtraInstructionsOrRegisterBranch(label, TestBranchType); |
| 732 | 648 |
| 733 if (need_extra_instructions) { | 649 if (need_extra_instructions) { |
| 734 tbnz(rt, bit_pos, &done); | 650 tbnz(rt, bit_pos, &done); |
| 735 b(label); | 651 B(label); |
| 736 } else { | 652 } else { |
| 737 tbz(rt, bit_pos, label); | 653 tbz(rt, bit_pos, label); |
| 738 } | 654 } |
| 739 CheckVeneers(!need_extra_instructions); | |
| 740 bind(&done); | 655 bind(&done); |
| 741 } | 656 } |
| 742 | 657 |
| 743 | 658 |
| 744 void MacroAssembler::Cbnz(const Register& rt, Label* label) { | 659 void MacroAssembler::Cbnz(const Register& rt, Label* label) { |
| 745 ASSERT(allow_macro_instructions_); | 660 ASSERT(allow_macro_instructions_); |
| 746 | 661 |
| 747 Label done; | 662 Label done; |
| 748 bool need_extra_instructions = | 663 bool need_extra_instructions = |
| 749 NeedExtraInstructionsOrRegisterBranch(label, CompareBranchType); | 664 NeedExtraInstructionsOrRegisterBranch(label, CompareBranchType); |
| 750 | 665 |
| 751 if (need_extra_instructions) { | 666 if (need_extra_instructions) { |
| 752 cbz(rt, &done); | 667 cbz(rt, &done); |
| 753 b(label); | 668 B(label); |
| 754 } else { | 669 } else { |
| 755 cbnz(rt, label); | 670 cbnz(rt, label); |
| 756 } | 671 } |
| 757 CheckVeneers(!need_extra_instructions); | |
| 758 bind(&done); | 672 bind(&done); |
| 759 } | 673 } |
| 760 | 674 |
| 761 | 675 |
| 762 void MacroAssembler::Cbz(const Register& rt, Label* label) { | 676 void MacroAssembler::Cbz(const Register& rt, Label* label) { |
| 763 ASSERT(allow_macro_instructions_); | 677 ASSERT(allow_macro_instructions_); |
| 764 | 678 |
| 765 Label done; | 679 Label done; |
| 766 bool need_extra_instructions = | 680 bool need_extra_instructions = |
| 767 NeedExtraInstructionsOrRegisterBranch(label, CompareBranchType); | 681 NeedExtraInstructionsOrRegisterBranch(label, CompareBranchType); |
| 768 | 682 |
| 769 if (need_extra_instructions) { | 683 if (need_extra_instructions) { |
| 770 cbnz(rt, &done); | 684 cbnz(rt, &done); |
| 771 b(label); | 685 B(label); |
| 772 } else { | 686 } else { |
| 773 cbz(rt, label); | 687 cbz(rt, label); |
| 774 } | 688 } |
| 775 CheckVeneers(!need_extra_instructions); | |
| 776 bind(&done); | 689 bind(&done); |
| 777 } | 690 } |
| 778 | 691 |
| 779 | 692 |
| 780 // Pseudo-instructions. | 693 // Pseudo-instructions. |
| 781 | 694 |
| 782 | 695 |
| 783 void MacroAssembler::Abs(const Register& rd, const Register& rm, | 696 void MacroAssembler::Abs(const Register& rd, const Register& rm, |
| 784 Label* is_not_representable, | 697 Label* is_not_representable, |
| 785 Label* is_representable) { | 698 Label* is_representable) { |
| (...skipping 1216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2002 | 1915 |
| 2003 | 1916 |
| 2004 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode) { | 1917 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode) { |
| 2005 ASSERT(RelocInfo::IsCodeTarget(rmode)); | 1918 ASSERT(RelocInfo::IsCodeTarget(rmode)); |
| 2006 AllowDeferredHandleDereference embedding_raw_address; | 1919 AllowDeferredHandleDereference embedding_raw_address; |
| 2007 Jump(reinterpret_cast<intptr_t>(code.location()), rmode); | 1920 Jump(reinterpret_cast<intptr_t>(code.location()), rmode); |
| 2008 } | 1921 } |
| 2009 | 1922 |
| 2010 | 1923 |
| 2011 void MacroAssembler::Call(Register target) { | 1924 void MacroAssembler::Call(Register target) { |
| 2012 BlockConstPoolScope scope(this); | 1925 BlockPoolsScope scope(this); |
| 2013 #ifdef DEBUG | 1926 #ifdef DEBUG |
| 2014 Label start_call; | 1927 Label start_call; |
| 2015 Bind(&start_call); | 1928 Bind(&start_call); |
| 2016 #endif | 1929 #endif |
| 2017 | 1930 |
| 2018 Blr(target); | 1931 Blr(target); |
| 2019 | 1932 |
| 2020 #ifdef DEBUG | 1933 #ifdef DEBUG |
| 2021 AssertSizeOfCodeGeneratedSince(&start_call, CallSize(target)); | 1934 AssertSizeOfCodeGeneratedSince(&start_call, CallSize(target)); |
| 2022 #endif | 1935 #endif |
| 2023 } | 1936 } |
| 2024 | 1937 |
| 2025 | 1938 |
| 2026 void MacroAssembler::Call(Label* target) { | 1939 void MacroAssembler::Call(Label* target) { |
| 2027 BlockConstPoolScope scope(this); | 1940 BlockPoolsScope scope(this); |
| 2028 #ifdef DEBUG | 1941 #ifdef DEBUG |
| 2029 Label start_call; | 1942 Label start_call; |
| 2030 Bind(&start_call); | 1943 Bind(&start_call); |
| 2031 #endif | 1944 #endif |
| 2032 | 1945 |
| 2033 Bl(target); | 1946 Bl(target); |
| 2034 | 1947 |
| 2035 #ifdef DEBUG | 1948 #ifdef DEBUG |
| 2036 AssertSizeOfCodeGeneratedSince(&start_call, CallSize(target)); | 1949 AssertSizeOfCodeGeneratedSince(&start_call, CallSize(target)); |
| 2037 #endif | 1950 #endif |
| 2038 } | 1951 } |
| 2039 | 1952 |
| 2040 | 1953 |
| 2041 // MacroAssembler::CallSize is sensitive to changes in this function, as it | 1954 // MacroAssembler::CallSize is sensitive to changes in this function, as it |
| 2042 // requires to know how many instructions are used to branch to the target. | 1955 // requires to know how many instructions are used to branch to the target. |
| 2043 void MacroAssembler::Call(Address target, RelocInfo::Mode rmode) { | 1956 void MacroAssembler::Call(Address target, RelocInfo::Mode rmode) { |
| 2044 BlockConstPoolScope scope(this); | 1957 BlockPoolsScope scope(this); |
| 2045 #ifdef DEBUG | 1958 #ifdef DEBUG |
| 2046 Label start_call; | 1959 Label start_call; |
| 2047 Bind(&start_call); | 1960 Bind(&start_call); |
| 2048 #endif | 1961 #endif |
| 2049 // Statement positions are expected to be recorded when the target | 1962 // Statement positions are expected to be recorded when the target |
| 2050 // address is loaded. | 1963 // address is loaded. |
| 2051 positions_recorder()->WriteRecordedPositions(); | 1964 positions_recorder()->WriteRecordedPositions(); |
| 2052 | 1965 |
| 2053 // Addresses always have 64 bits, so we shouldn't encounter NONE32. | 1966 // Addresses always have 64 bits, so we shouldn't encounter NONE32. |
| 2054 ASSERT(rmode != RelocInfo::NONE32); | 1967 ASSERT(rmode != RelocInfo::NONE32); |
| (...skipping 2617 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4672 | 4585 |
| 4673 // Call Printf directly to report the error. | 4586 // Call Printf directly to report the error. |
| 4674 CallPrintf(); | 4587 CallPrintf(); |
| 4675 | 4588 |
| 4676 // We need a way to stop execution on both the simulator and real hardware, | 4589 // We need a way to stop execution on both the simulator and real hardware, |
| 4677 // and Unreachable() is the best option. | 4590 // and Unreachable() is the best option. |
| 4678 Unreachable(); | 4591 Unreachable(); |
| 4679 | 4592 |
| 4680 // Emit the message string directly in the instruction stream. | 4593 // Emit the message string directly in the instruction stream. |
| 4681 { | 4594 { |
| 4682 BlockConstPoolScope scope(this); | 4595 BlockPoolsScope scope(this); |
| 4683 Bind(&msg_address); | 4596 Bind(&msg_address); |
| 4684 EmitStringData(GetBailoutReason(reason)); | 4597 EmitStringData(GetBailoutReason(reason)); |
| 4685 } | 4598 } |
| 4686 } | 4599 } |
| 4687 | 4600 |
| 4688 SetStackPointer(old_stack_pointer); | 4601 SetStackPointer(old_stack_pointer); |
| 4689 } | 4602 } |
| 4690 | 4603 |
| 4691 | 4604 |
| 4692 void MacroAssembler::LoadTransitionedArrayMapConditional( | 4605 void MacroAssembler::LoadTransitionedArrayMapConditional( |
| (...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4864 // Load the format string into x0, as per the procedure-call standard. | 4777 // Load the format string into x0, as per the procedure-call standard. |
| 4865 // | 4778 // |
| 4866 // To make the code as portable as possible, the format string is encoded | 4779 // To make the code as portable as possible, the format string is encoded |
| 4867 // directly in the instruction stream. It might be cleaner to encode it in a | 4780 // directly in the instruction stream. It might be cleaner to encode it in a |
| 4868 // literal pool, but since Printf is usually used for debugging, it is | 4781 // literal pool, but since Printf is usually used for debugging, it is |
| 4869 // beneficial for it to be minimally dependent on other features. | 4782 // beneficial for it to be minimally dependent on other features. |
| 4870 Label format_address; | 4783 Label format_address; |
| 4871 Adr(x0, &format_address); | 4784 Adr(x0, &format_address); |
| 4872 | 4785 |
| 4873 // Emit the format string directly in the instruction stream. | 4786 // Emit the format string directly in the instruction stream. |
| 4874 { BlockConstPoolScope scope(this); | 4787 { BlockPoolsScope scope(this); |
| 4875 Label after_data; | 4788 Label after_data; |
| 4876 B(&after_data); | 4789 B(&after_data); |
| 4877 Bind(&format_address); | 4790 Bind(&format_address); |
| 4878 EmitStringData(format); | 4791 EmitStringData(format); |
| 4879 Unreachable(); | 4792 Unreachable(); |
| 4880 Bind(&after_data); | 4793 Bind(&after_data); |
| 4881 } | 4794 } |
| 4882 | 4795 |
| 4883 // We don't pass any arguments on the stack, but we still need to align the C | 4796 // We don't pass any arguments on the stack, but we still need to align the C |
| 4884 // stack pointer to a 16-byte boundary for PCS compliance. | 4797 // stack pointer to a 16-byte boundary for PCS compliance. |
| (...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5029 } | 4942 } |
| 5030 #endif | 4943 #endif |
| 5031 | 4944 |
| 5032 | 4945 |
| 5033 #undef __ | 4946 #undef __ |
| 5034 #define __ masm-> | 4947 #define __ masm-> |
| 5035 | 4948 |
| 5036 | 4949 |
| 5037 void InlineSmiCheckInfo::Emit(MacroAssembler* masm, const Register& reg, | 4950 void InlineSmiCheckInfo::Emit(MacroAssembler* masm, const Register& reg, |
| 5038 const Label* smi_check) { | 4951 const Label* smi_check) { |
| 5039 Assembler::BlockConstPoolScope scope(masm); | 4952 Assembler::BlockPoolsScope scope(masm); |
| 5040 if (reg.IsValid()) { | 4953 if (reg.IsValid()) { |
| 5041 ASSERT(smi_check->is_bound()); | 4954 ASSERT(smi_check->is_bound()); |
| 5042 ASSERT(reg.Is64Bits()); | 4955 ASSERT(reg.Is64Bits()); |
| 5043 | 4956 |
| 5044 // Encode the register (x0-x30) in the lowest 5 bits, then the offset to | 4957 // Encode the register (x0-x30) in the lowest 5 bits, then the offset to |
| 5045 // 'check' in the other bits. The possible offset is limited in that we | 4958 // 'check' in the other bits. The possible offset is limited in that we |
| 5046 // use BitField to pack the data, and the underlying data type is a | 4959 // use BitField to pack the data, and the underlying data type is a |
| 5047 // uint32_t. | 4960 // uint32_t. |
| 5048 uint32_t delta = __ InstructionsGeneratedSince(smi_check); | 4961 uint32_t delta = __ InstructionsGeneratedSince(smi_check); |
| 5049 __ InlineData(RegisterBits::encode(reg.code()) | DeltaBits::encode(delta)); | 4962 __ InlineData(RegisterBits::encode(reg.code()) | DeltaBits::encode(delta)); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 5075 } | 4988 } |
| 5076 } | 4989 } |
| 5077 | 4990 |
| 5078 | 4991 |
| 5079 #undef __ | 4992 #undef __ |
| 5080 | 4993 |
| 5081 | 4994 |
| 5082 } } // namespace v8::internal | 4995 } } // namespace v8::internal |
| 5083 | 4996 |
| 5084 #endif // V8_TARGET_ARCH_A64 | 4997 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |