| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 135 static const int kCodeSizeMultiplier = 142; | 135 static const int kCodeSizeMultiplier = 142; |
| 136 #elif V8_TARGET_ARCH_A64 | 136 #elif V8_TARGET_ARCH_A64 |
| 137 // TODO(all): Copied ARM value. Check this is sensible for A64. | 137 // TODO(all): Copied ARM value. Check this is sensible for A64. |
| 138 static const int kCodeSizeMultiplier = 142; | 138 static const int kCodeSizeMultiplier = 142; |
| 139 #elif V8_TARGET_ARCH_MIPS | 139 #elif V8_TARGET_ARCH_MIPS |
| 140 static const int kCodeSizeMultiplier = 142; | 140 static const int kCodeSizeMultiplier = 142; |
| 141 #else | 141 #else |
| 142 #error Unsupported target architecture. | 142 #error Unsupported target architecture. |
| 143 #endif | 143 #endif |
| 144 | 144 |
| 145 class BackEdgeTableIterator { | |
| 146 public: | |
| 147 explicit BackEdgeTableIterator(Code* unoptimized, | |
| 148 DisallowHeapAllocation* required) { | |
| 149 ASSERT(unoptimized->kind() == Code::FUNCTION); | |
| 150 instruction_start_ = unoptimized->instruction_start(); | |
| 151 cursor_ = instruction_start_ + unoptimized->back_edge_table_offset(); | |
| 152 ASSERT(cursor_ < instruction_start_ + unoptimized->instruction_size()); | |
| 153 table_length_ = Memory::uint32_at(cursor_); | |
| 154 cursor_ += kTableLengthSize; | |
| 155 end_ = cursor_ + table_length_ * kEntrySize; | |
| 156 } | |
| 157 | |
| 158 bool Done() { return cursor_ >= end_; } | |
| 159 | |
| 160 void Next() { | |
| 161 ASSERT(!Done()); | |
| 162 cursor_ += kEntrySize; | |
| 163 } | |
| 164 | |
| 165 BailoutId ast_id() { | |
| 166 ASSERT(!Done()); | |
| 167 return BailoutId(static_cast<int>( | |
| 168 Memory::uint32_at(cursor_ + kAstIdOffset))); | |
| 169 } | |
| 170 | |
| 171 uint32_t loop_depth() { | |
| 172 ASSERT(!Done()); | |
| 173 return Memory::uint32_at(cursor_ + kLoopDepthOffset); | |
| 174 } | |
| 175 | |
| 176 uint32_t pc_offset() { | |
| 177 ASSERT(!Done()); | |
| 178 return Memory::uint32_at(cursor_ + kPcOffsetOffset); | |
| 179 } | |
| 180 | |
| 181 Address pc() { | |
| 182 ASSERT(!Done()); | |
| 183 return instruction_start_ + pc_offset(); | |
| 184 } | |
| 185 | |
| 186 uint32_t table_length() { return table_length_; } | |
| 187 | |
| 188 private: | |
| 189 static const int kTableLengthSize = kIntSize; | |
| 190 static const int kAstIdOffset = 0 * kIntSize; | |
| 191 static const int kPcOffsetOffset = 1 * kIntSize; | |
| 192 static const int kLoopDepthOffset = 2 * kIntSize; | |
| 193 static const int kEntrySize = 3 * kIntSize; | |
| 194 | |
| 195 Address cursor_; | |
| 196 Address end_; | |
| 197 Address instruction_start_; | |
| 198 uint32_t table_length_; | |
| 199 | |
| 200 DISALLOW_COPY_AND_ASSIGN(BackEdgeTableIterator); | |
| 201 }; | |
| 202 | |
| 203 | |
| 204 private: | 145 private: |
| 205 class Breakable; | 146 class Breakable; |
| 206 class Iteration; | 147 class Iteration; |
| 207 | 148 |
| 208 class TestContext; | 149 class TestContext; |
| 209 | 150 |
| 210 class NestedStatement BASE_EMBEDDED { | 151 class NestedStatement BASE_EMBEDDED { |
| 211 public: | 152 public: |
| 212 explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) { | 153 explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) { |
| 213 // Link into codegen's nesting stack. | 154 // Link into codegen's nesting stack. |
| (...skipping 722 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 936 Iterator it = find(literal, true, ZoneAllocationPolicy(zone_)); | 877 Iterator it = find(literal, true, ZoneAllocationPolicy(zone_)); |
| 937 if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors(); | 878 if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors(); |
| 938 return it; | 879 return it; |
| 939 } | 880 } |
| 940 | 881 |
| 941 private: | 882 private: |
| 942 Zone* zone_; | 883 Zone* zone_; |
| 943 }; | 884 }; |
| 944 | 885 |
| 945 | 886 |
| 887 class BackEdgeTable { |
| 888 public: |
| 889 BackEdgeTable(Code* code, DisallowHeapAllocation* required) { |
| 890 ASSERT(code->kind() == Code::FUNCTION); |
| 891 instruction_start_ = code->instruction_start(); |
| 892 Address table_address = instruction_start_ + code->back_edge_table_offset(); |
| 893 length_ = Memory::uint32_at(table_address); |
| 894 start_ = table_address + kTableLengthSize; |
| 895 } |
| 896 |
| 897 uint32_t length() { return length_; } |
| 898 |
| 899 BailoutId ast_id(uint32_t index) { |
| 900 return BailoutId(static_cast<int>( |
| 901 Memory::uint32_at(entry_at(index) + kAstIdOffset))); |
| 902 } |
| 903 |
| 904 uint32_t loop_depth(uint32_t index) { |
| 905 return Memory::uint32_at(entry_at(index) + kLoopDepthOffset); |
| 906 } |
| 907 |
| 908 uint32_t pc_offset(uint32_t index) { |
| 909 return Memory::uint32_at(entry_at(index) + kPcOffsetOffset); |
| 910 } |
| 911 |
| 912 Address pc(uint32_t index) { |
| 913 return instruction_start_ + pc_offset(index); |
| 914 } |
| 915 |
| 916 enum BackEdgeState { |
| 917 INTERRUPT, |
| 918 ON_STACK_REPLACEMENT |
| 919 }; |
| 920 |
| 921 // Patch all interrupts with allowed loop depth in the unoptimized code to |
| 922 // unconditionally call replacement_code. |
| 923 static void Patch(Isolate* isolate, |
| 924 Code* unoptimized_code); |
| 925 |
| 926 // Patch the interrupt at the instruction before pc_after in |
| 927 // the unoptimized code to unconditionally call replacement_code. |
| 928 static void PatchAt(Code* unoptimized_code, |
| 929 Address pc_after, |
| 930 Code* replacement_code); |
| 931 |
| 932 // Change all patched interrupts patched in the unoptimized code |
| 933 // back to normal interrupts. |
| 934 static void Revert(Isolate* isolate, |
| 935 Code* unoptimized_code); |
| 936 |
| 937 // Change patched interrupt in the unoptimized code |
| 938 // back to a normal interrupt. |
| 939 static void RevertAt(Code* unoptimized_code, |
| 940 Address pc_after, |
| 941 Code* interrupt_code); |
| 942 |
| 943 #ifdef DEBUG |
| 944 static BackEdgeState GetBackEdgeState(Isolate* isolate, |
| 945 Code* unoptimized_code, |
| 946 Address pc_after); |
| 947 |
| 948 // Verify that all back edges of a certain loop depth are patched. |
| 949 static bool Verify(Isolate* isolate, |
| 950 Code* unoptimized_code, |
| 951 int loop_nesting_level); |
| 952 #endif // DEBUG |
| 953 |
| 954 private: |
| 955 Address entry_at(uint32_t index) { |
| 956 ASSERT(index < length_); |
| 957 return start_ + index * kEntrySize; |
| 958 } |
| 959 |
| 960 static const int kTableLengthSize = kIntSize; |
| 961 static const int kAstIdOffset = 0 * kIntSize; |
| 962 static const int kPcOffsetOffset = 1 * kIntSize; |
| 963 static const int kLoopDepthOffset = 2 * kIntSize; |
| 964 static const int kEntrySize = 3 * kIntSize; |
| 965 |
| 966 Address start_; |
| 967 Address instruction_start_; |
| 968 uint32_t length_; |
| 969 }; |
| 970 |
| 971 |
| 946 } } // namespace v8::internal | 972 } } // namespace v8::internal |
| 947 | 973 |
| 948 #endif // V8_FULL_CODEGEN_H_ | 974 #endif // V8_FULL_CODEGEN_H_ |
| OLD | NEW |