Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(108)

Side by Side Diff: src/a64/assembler-a64.h

Issue 181873002: A64: Move veneer emission checking in the Assembler. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Remove the force_emit parameter in CheckVeneerPool Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/a64/assembler-a64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 712 matching lines...) Expand 10 before | Expand all | Expand 10 after
723 723
724 inline void Unreachable(); 724 inline void Unreachable();
725 725
726 // Label -------------------------------------------------------------------- 726 // Label --------------------------------------------------------------------
727 // Bind a label to the current pc. Note that labels can only be bound once, 727 // Bind a label to the current pc. Note that labels can only be bound once,
728 // and if labels are linked to other instructions, they _must_ be bound 728 // and if labels are linked to other instructions, they _must_ be bound
729 // before they go out of scope. 729 // before they go out of scope.
730 void bind(Label* label); 730 void bind(Label* label);
731 731
732 732
733 // RelocInfo and constant pool ---------------------------------------------- 733 // RelocInfo and pools ------------------------------------------------------
734 734
735 // Record relocation information for current pc_. 735 // Record relocation information for current pc_.
736 void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0); 736 void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
737 737
738 // Return the address in the constant pool of the code target address used by 738 // Return the address in the constant pool of the code target address used by
739 // the branch/call instruction at pc. 739 // the branch/call instruction at pc.
740 inline static Address target_pointer_address_at(Address pc); 740 inline static Address target_pointer_address_at(Address pc);
741 741
742 // Read/Modify the code target address in the branch/call instruction at pc. 742 // Read/Modify the code target address in the branch/call instruction at pc.
743 inline static Address target_address_at(Address pc); 743 inline static Address target_address_at(Address pc);
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
834 // StartBlockConstPool to have an effect. 834 // StartBlockConstPool to have an effect.
835 void EndBlockConstPool(); 835 void EndBlockConstPool();
836 836
837 bool is_const_pool_blocked() const; 837 bool is_const_pool_blocked() const;
838 static bool IsConstantPoolAt(Instruction* instr); 838 static bool IsConstantPoolAt(Instruction* instr);
839 static int ConstantPoolSizeAt(Instruction* instr); 839 static int ConstantPoolSizeAt(Instruction* instr);
840 // See Assembler::CheckConstPool for more info. 840 // See Assembler::CheckConstPool for more info.
841 void ConstantPoolMarker(uint32_t size); 841 void ConstantPoolMarker(uint32_t size);
842 void ConstantPoolGuard(); 842 void ConstantPoolGuard();
843 843
844 // Prevent veneer pool emission until EndBlockVeneerPool is called.
845 // Call to this function can be nested but must be followed by an equal
846 // number of call to EndBlockConstpool.
847 void StartBlockVeneerPool();
848
849 // Resume constant pool emission. Need to be called as many time as
850 // StartBlockVeneerPool to have an effect.
851 void EndBlockVeneerPool();
852
853 bool is_veneer_pool_blocked() const {
854 return veneer_pool_blocked_nesting_ > 0;
855 }
856
857 // Block/resume emission of constant pools and veneer pools.
858 void StartBlockPools() {
859 StartBlockConstPool();
860 StartBlockVeneerPool();
861 }
862 void EndBlockPools() {
863 EndBlockConstPool();
864 EndBlockVeneerPool();
865 }
844 866
845 // Debugging ---------------------------------------------------------------- 867 // Debugging ----------------------------------------------------------------
846 PositionsRecorder* positions_recorder() { return &positions_recorder_; } 868 PositionsRecorder* positions_recorder() { return &positions_recorder_; }
847 void RecordComment(const char* msg); 869 void RecordComment(const char* msg);
848 int buffer_space() const; 870 int buffer_space() const;
849 871
850 // Mark address of the ExitJSFrame code. 872 // Mark address of the ExitJSFrame code.
851 void RecordJSReturn(); 873 void RecordJSReturn();
852 874
853 // Mark address of a debug break slot. 875 // Mark address of a debug break slot.
(...skipping 857 matching lines...) Expand 10 before | Expand all | Expand 10 after
1711 1733
1712 private: 1734 private:
1713 Assembler* assem_; 1735 Assembler* assem_;
1714 1736
1715 DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope); 1737 DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope);
1716 }; 1738 };
1717 1739
1718 // Check if is time to emit a constant pool. 1740 // Check if is time to emit a constant pool.
1719 void CheckConstPool(bool force_emit, bool require_jump); 1741 void CheckConstPool(bool force_emit, bool require_jump);
1720 1742
1743
1744 // Returns true if we should emit a veneer as soon as possible for a branch
1745 // which can at most reach to specified pc.
1746 bool ShouldEmitVeneer(int max_reachable_pc,
1747 int margin = kVeneerDistanceMargin);
1748 bool ShouldEmitVeneers(int margin = kVeneerDistanceMargin) {
1749 return ShouldEmitVeneer(unresolved_branches_first_limit(), margin);
1750 }
1751
1752 // The maximum code size generated for a veneer. Currently one branch
1753 // instruction. This is for code size checking purposes, and can be extended
1754 // in the future for example if we decide to add nops between the veneers.
1755 static const int kMaxVeneerCodeSize = 1 * kInstructionSize;
1756
1757 // Emits veneers for branches that are approaching their maximum range.
1758 // If need_protection is true, the veneers are protected by a branch jumping
1759 // over the code.
1760 void EmitVeneers(bool need_protection, int margin = kVeneerDistanceMargin);
1761 void EmitVeneersGuard();
1762 // Checks whether veneers need to be emitted at this point.
1763 void CheckVeneerPool(bool require_jump, int margin = kVeneerDistanceMargin);
1764
1765
1766 class BlockPoolsScope {
1767 public:
1768 explicit BlockPoolsScope(Assembler* assem) : assem_(assem) {
1769 assem_->StartBlockPools();
1770 }
1771 ~BlockPoolsScope() {
1772 assem_->EndBlockPools();
1773 }
1774
1775 private:
1776 Assembler* assem_;
1777
1778 DISALLOW_IMPLICIT_CONSTRUCTORS(BlockPoolsScope);
1779 };
1780
1721 // Available for constrained code generation scopes. Prefer 1781 // Available for constrained code generation scopes. Prefer
1722 // MacroAssembler::Mov() when possible. 1782 // MacroAssembler::Mov() when possible.
1723 inline void LoadRelocated(const CPURegister& rt, const Operand& operand); 1783 inline void LoadRelocated(const CPURegister& rt, const Operand& operand);
1724 1784
1725 protected: 1785 protected:
1726 inline const Register& AppropriateZeroRegFor(const CPURegister& reg) const; 1786 inline const Register& AppropriateZeroRegFor(const CPURegister& reg) const;
1727 1787
1728 void LoadStore(const CPURegister& rt, 1788 void LoadStore(const CPURegister& rt,
1729 const MemOperand& addr, 1789 const MemOperand& addr,
1730 LoadStoreOp op); 1790 LoadStoreOp op);
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after
1896 // TODO(all): Somehow register we have some data here. Then we can 1956 // TODO(all): Somehow register we have some data here. Then we can
1897 // disassemble it correctly. 1957 // disassemble it correctly.
1898 memcpy(pc_, data, size); 1958 memcpy(pc_, data, size);
1899 pc_ += size; 1959 pc_ += size;
1900 CheckBuffer(); 1960 CheckBuffer();
1901 } 1961 }
1902 1962
1903 void GrowBuffer(); 1963 void GrowBuffer();
1904 void CheckBuffer(); 1964 void CheckBuffer();
1905 1965
1906 // Pc offset of the next buffer check. 1966 // Pc offset of the next constant pool check.
1907 int next_buffer_check_; 1967 int next_constant_pool_check_;
1908 1968
1909 // Constant pool generation 1969 // Constant pool generation
1910 // Pools are emitted in the instruction stream, preferably after unconditional 1970 // Pools are emitted in the instruction stream, preferably after unconditional
1911 // jumps or after returns from functions (in dead code locations). 1971 // jumps or after returns from functions (in dead code locations).
1912 // If a long code sequence does not contain unconditional jumps, it is 1972 // If a long code sequence does not contain unconditional jumps, it is
1913 // necessary to emit the constant pool before the pool gets too far from the 1973 // necessary to emit the constant pool before the pool gets too far from the
1914 // location it is accessed from. In this case, we emit a jump over the emitted 1974 // location it is accessed from. In this case, we emit a jump over the emitted
1915 // constant pool. 1975 // constant pool.
1916 // Constants in the pool may be addresses of functions that gets relocated; 1976 // Constants in the pool may be addresses of functions that gets relocated;
1917 // if so, a relocation info entry is associated to the constant pool entry. 1977 // if so, a relocation info entry is associated to the constant pool entry.
1918 1978
1919 // Repeated checking whether the constant pool should be emitted is rather 1979 // Repeated checking whether the constant pool should be emitted is rather
1920 // expensive. By default we only check again once a number of instructions 1980 // expensive. By default we only check again once a number of instructions
1921 // has been generated. That also means that the sizing of the buffers is not 1981 // has been generated. That also means that the sizing of the buffers is not
1922 // an exact science, and that we rely on some slop to not overrun buffers. 1982 // an exact science, and that we rely on some slop to not overrun buffers.
1923 static const int kCheckPoolIntervalInst = 128; 1983 static const int kCheckConstPoolIntervalInst = 128;
1924 static const int kCheckPoolInterval = 1984 static const int kCheckConstPoolInterval =
1925 kCheckPoolIntervalInst * kInstructionSize; 1985 kCheckConstPoolIntervalInst * kInstructionSize;
1926 1986
1927 // Constants in pools are accessed via pc relative addressing, which can 1987 // Constants in pools are accessed via pc relative addressing, which can
1928 // reach +/-4KB thereby defining a maximum distance between the instruction 1988 // reach +/-4KB thereby defining a maximum distance between the instruction
1929 // and the accessed constant. 1989 // and the accessed constant.
1930 static const int kMaxDistToPool = 4 * KB; 1990 static const int kMaxDistToConstPool = 4 * KB;
1931 static const int kMaxNumPendingRelocInfo = kMaxDistToPool / kInstructionSize; 1991 static const int kMaxNumPendingRelocInfo =
1992 kMaxDistToConstPool / kInstructionSize;
1932 1993
1933 1994
1934 // Average distance beetween a constant pool and the first instruction 1995 // Average distance beetween a constant pool and the first instruction
1935 // accessing the constant pool. Longer distance should result in less I-cache 1996 // accessing the constant pool. Longer distance should result in less I-cache
1936 // pollution. 1997 // pollution.
1937 // In practice the distance will be smaller since constant pool emission is 1998 // In practice the distance will be smaller since constant pool emission is
1938 // forced after function return and sometimes after unconditional branches. 1999 // forced after function return and sometimes after unconditional branches.
1939 static const int kAvgDistToPool = kMaxDistToPool - kCheckPoolInterval; 2000 static const int kAvgDistToConstPool =
2001 kMaxDistToConstPool - kCheckConstPoolInterval;
1940 2002
1941 // Emission of the constant pool may be blocked in some code sequences. 2003 // Emission of the constant pool may be blocked in some code sequences.
1942 int const_pool_blocked_nesting_; // Block emission if this is not zero. 2004 int const_pool_blocked_nesting_; // Block emission if this is not zero.
1943 int no_const_pool_before_; // Block emission before this pc offset. 2005 int no_const_pool_before_; // Block emission before this pc offset.
1944 2006
1945 // Keep track of the first instruction requiring a constant pool entry 2007 // Keep track of the first instruction requiring a constant pool entry
1946 // since the previous constant pool was emitted. 2008 // since the previous constant pool was emitted.
1947 int first_const_pool_use_; 2009 int first_const_pool_use_;
1948 2010
2011 // Emission of the veneer pools may be blocked in some code sequences.
2012 int veneer_pool_blocked_nesting_; // Block emission if this is not zero.
2013
1949 // Relocation info generation 2014 // Relocation info generation
1950 // Each relocation is encoded as a variable size value 2015 // Each relocation is encoded as a variable size value
1951 static const int kMaxRelocSize = RelocInfoWriter::kMaxSize; 2016 static const int kMaxRelocSize = RelocInfoWriter::kMaxSize;
1952 RelocInfoWriter reloc_info_writer; 2017 RelocInfoWriter reloc_info_writer;
1953 2018
1954 // Relocation info records are also used during code generation as temporary 2019 // Relocation info records are also used during code generation as temporary
1955 // containers for constants and code target addresses until they are emitted 2020 // containers for constants and code target addresses until they are emitted
1956 // to the constant pool. These pending relocation info records are temporarily 2021 // to the constant pool. These pending relocation info records are temporarily
1957 // stored in a separate buffer until a constant pool is emitted. 2022 // stored in a separate buffer until a constant pool is emitted.
1958 // If every instruction in a long sequence is accessing the pool, we need one 2023 // If every instruction in a long sequence is accessing the pool, we need one
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
2006 // 2071 //
2007 // The second member gives information about the unresolved branch. The first 2072 // The second member gives information about the unresolved branch. The first
2008 // member of the pair is the maximum offset that the branch can reach in the 2073 // member of the pair is the maximum offset that the branch can reach in the
2009 // buffer. The map is sorted according to this reachable offset, allowing to 2074 // buffer. The map is sorted according to this reachable offset, allowing to
2010 // easily check when veneers need to be emitted. 2075 // easily check when veneers need to be emitted.
2011 // Note that the maximum reachable offset (first member of the pairs) should 2076 // Note that the maximum reachable offset (first member of the pairs) should
2012 // always be positive but has the same type as the return value for 2077 // always be positive but has the same type as the return value for
2013 // pc_offset() for convenience. 2078 // pc_offset() for convenience.
2014 std::multimap<int, FarBranchInfo> unresolved_branches_; 2079 std::multimap<int, FarBranchInfo> unresolved_branches_;
2015 2080
2081 // We generate a veneer for a branch if we reach within this distance of the
2082 // limit of the range.
2083 static const int kVeneerDistanceMargin = 1 * KB;
2084 // The factor of 2 is a finger in the air guess. With a default margin of
2085 // 1KB, that leaves us an addional 256 instructions to avoid generating a
2086 // protective branch.
2087 static const int kVeneerNoProtectionFactor = 2;
2088 static const int kVeneerDistanceCheckMargin =
2089 kVeneerNoProtectionFactor * kVeneerDistanceMargin;
2090 int unresolved_branches_first_limit() const {
2091 ASSERT(!unresolved_branches_.empty());
2092 return unresolved_branches_.begin()->first;
2093 }
2094 // This is similar to next_constant_pool_check_ and helps reduce the overhead
2095 // of checking for veneer pools.
2096 // It is maintained to the closest unresolved branch limit minus the maximum
2097 // veneer margin (or kMaxInt if there are no unresolved branches).
2098 int next_veneer_pool_check_;
2099
2016 private: 2100 private:
2017 // If a veneer is emitted for a branch instruction, that instruction must be 2101 // If a veneer is emitted for a branch instruction, that instruction must be
2018 // removed from the associated label's link chain so that the assembler does 2102 // removed from the associated label's link chain so that the assembler does
2019 // not later attempt (likely unsuccessfully) to patch it to branch directly to 2103 // not later attempt (likely unsuccessfully) to patch it to branch directly to
2020 // the label. 2104 // the label.
2021 void DeleteUnresolvedBranchInfoForLabel(Label* label); 2105 void DeleteUnresolvedBranchInfoForLabel(Label* label);
2022 2106
2023 private: 2107 private:
2024 // TODO(jbramley): VIXL uses next_literal_pool_check_ and
2025 // literal_pool_monitor_ to determine when to consider emitting a literal
2026 // pool. V8 doesn't use them, so they should either not be here at all, or
2027 // should replace or be merged with next_buffer_check_ and
2028 // const_pool_blocked_nesting_.
2029 Instruction* next_literal_pool_check_;
2030 unsigned literal_pool_monitor_;
2031
2032 PositionsRecorder positions_recorder_; 2108 PositionsRecorder positions_recorder_;
2033 friend class PositionsRecorder; 2109 friend class PositionsRecorder;
2034 friend class EnsureSpace; 2110 friend class EnsureSpace;
2035 }; 2111 };
2036 2112
2037 class PatchingAssembler : public Assembler { 2113 class PatchingAssembler : public Assembler {
2038 public: 2114 public:
2039 // Create an Assembler with a buffer starting at 'start'. 2115 // Create an Assembler with a buffer starting at 'start'.
2040 // The buffer size is 2116 // The buffer size is
2041 // size of instructions to patch + kGap 2117 // size of instructions to patch + kGap
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2076 class EnsureSpace BASE_EMBEDDED { 2152 class EnsureSpace BASE_EMBEDDED {
2077 public: 2153 public:
2078 explicit EnsureSpace(Assembler* assembler) { 2154 explicit EnsureSpace(Assembler* assembler) {
2079 assembler->CheckBuffer(); 2155 assembler->CheckBuffer();
2080 } 2156 }
2081 }; 2157 };
2082 2158
2083 } } // namespace v8::internal 2159 } } // namespace v8::internal
2084 2160
2085 #endif // V8_A64_ASSEMBLER_A64_H_ 2161 #endif // V8_A64_ASSEMBLER_A64_H_
OLDNEW
« no previous file with comments | « no previous file | src/a64/assembler-a64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698