OLD | NEW |
---|---|
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_COMPILER_CODE_STUB_ASSEMBLER_H_ | 5 #ifndef V8_COMPILER_CODE_STUB_ASSEMBLER_H_ |
6 #define V8_COMPILER_CODE_STUB_ASSEMBLER_H_ | 6 #define V8_COMPILER_CODE_STUB_ASSEMBLER_H_ |
7 | 7 |
8 #include <map> | 8 #include <map> |
9 | 9 |
10 // Clients of this interface shouldn't depend on lots of compiler internals. | 10 // Clients of this interface shouldn't depend on lots of compiler internals. |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
61 V(Word64Equal) \ | 61 V(Word64Equal) \ |
62 V(Word64NotEqual) \ | 62 V(Word64NotEqual) \ |
63 V(Word64Or) \ | 63 V(Word64Or) \ |
64 V(Word64And) \ | 64 V(Word64And) \ |
65 V(Word64Xor) \ | 65 V(Word64Xor) \ |
66 V(Word64Shr) \ | 66 V(Word64Shr) \ |
67 V(Word64Sar) \ | 67 V(Word64Sar) \ |
68 V(Word64Ror) \ | 68 V(Word64Ror) \ |
69 V(UintPtrGreaterThanOrEqual) | 69 V(UintPtrGreaterThanOrEqual) |
70 | 70 |
71 enum AllocationFlags : uint8_t { | |
Benedikt Meurer
2016/02/25 13:00:40
Can you move this into the CodeStubAssembler?
epertoso
2016/02/25 13:26:54
Done.
| |
72 kNone = 0, | |
73 kTagObject = 1, | |
Benedikt Meurer
2016/02/25 13:00:41
Don't add kTagObject here. This is terrible in the
epertoso
2016/02/25 13:26:54
OK.
| |
74 kDoubleAlignment = 1 << 1, | |
75 kPretenured = 1 << 2 | |
76 }; | |
77 | |
78 inline AllocationFlags operator|(AllocationFlags lhs, AllocationFlags rhs) { | |
Benedikt Meurer
2016/02/25 13:00:40
Use base::Flags and DEFINE_OPERATORS_FOR_FLAGS her
epertoso
2016/02/25 13:26:53
Done.
| |
79 return static_cast<AllocationFlags>(static_cast<uint8_t>(lhs) | | |
80 static_cast<uint8_t>(rhs)); | |
81 } | |
82 | |
83 inline AllocationFlags operator&(AllocationFlags lhs, AllocationFlags rhs) { | |
84 return static_cast<AllocationFlags>(static_cast<uint8_t>(lhs) & | |
85 static_cast<uint8_t>(rhs)); | |
86 } | |
87 | |
88 inline AllocationFlags operator~(AllocationFlags arg) { | |
89 return static_cast<AllocationFlags>(~static_cast<uint8_t>(arg)); | |
90 } | |
91 | |
71 class CodeStubAssembler { | 92 class CodeStubAssembler { |
72 public: | 93 public: |
73 // |result_size| specifies the number of results returned by the stub. | 94 // |result_size| specifies the number of results returned by the stub. |
74 // TODO(rmcilroy): move result_size to the CallInterfaceDescriptor. | 95 // TODO(rmcilroy): move result_size to the CallInterfaceDescriptor. |
75 CodeStubAssembler(Isolate* isolate, Zone* zone, | 96 CodeStubAssembler(Isolate* isolate, Zone* zone, |
76 const CallInterfaceDescriptor& descriptor, | 97 const CallInterfaceDescriptor& descriptor, |
77 Code::Flags flags, const char* name, | 98 Code::Flags flags, const char* name, |
78 size_t result_size = 1); | 99 size_t result_size = 1); |
79 virtual ~CodeStubAssembler(); | 100 virtual ~CodeStubAssembler(); |
80 | 101 |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
204 // Load an object pointer from a buffer that isn't in the heap. | 225 // Load an object pointer from a buffer that isn't in the heap. |
205 Node* LoadBufferObject(Node* buffer, int offset); | 226 Node* LoadBufferObject(Node* buffer, int offset); |
206 // Load a field from an object on the heap. | 227 // Load a field from an object on the heap. |
207 Node* LoadObjectField(Node* object, int offset); | 228 Node* LoadObjectField(Node* object, int offset); |
208 | 229 |
209 // Load an array element from a FixedArray. | 230 // Load an array element from a FixedArray. |
210 Node* LoadFixedArrayElementSmiIndex(Node* object, Node* smi_index, | 231 Node* LoadFixedArrayElementSmiIndex(Node* object, Node* smi_index, |
211 int additional_offset = 0); | 232 int additional_offset = 0); |
212 Node* LoadFixedArrayElementConstantIndex(Node* object, int index); | 233 Node* LoadFixedArrayElementConstantIndex(Node* object, int index); |
213 | 234 |
235 // Allocate in new space. | |
236 Node* Allocate(int size, AllocationFlags flags); | |
237 | |
214 protected: | 238 protected: |
215 // Protected helpers which delegate to RawMachineAssembler. | 239 // Protected helpers which delegate to RawMachineAssembler. |
216 Graph* graph(); | 240 Graph* graph(); |
217 Isolate* isolate(); | 241 Isolate* isolate(); |
218 Zone* zone(); | 242 Zone* zone(); |
219 | 243 |
220 // Enables subclasses to perform operations before and after a call. | 244 // Enables subclasses to perform operations before and after a call. |
221 virtual void CallPrologue(); | 245 virtual void CallPrologue(); |
222 virtual void CallEpilogue(); | 246 virtual void CallEpilogue(); |
223 | 247 |
224 private: | 248 private: |
225 friend class CodeStubAssemblerTester; | 249 friend class CodeStubAssemblerTester; |
226 | 250 |
227 Node* CallN(CallDescriptor* descriptor, Node* code_target, Node** args); | 251 Node* CallN(CallDescriptor* descriptor, Node* code_target, Node** args); |
228 Node* TailCallN(CallDescriptor* descriptor, Node* code_target, Node** args); | 252 Node* TailCallN(CallDescriptor* descriptor, Node* code_target, Node** args); |
229 | 253 |
230 Node* SmiShiftBitsConstant(); | 254 Node* SmiShiftBitsConstant(); |
231 | 255 |
256 Node* AllocateRawAligned(Node* size_in_bytes, AllocationFlags flags, | |
257 Node* top_address, Node* limit_address); | |
258 Node* AllocateRawUnaligned(Node* size_in_bytes, AllocationFlags flags, | |
259 Node* top_adddress, Node* limit_address); | |
260 | |
232 base::SmartPointer<RawMachineAssembler> raw_assembler_; | 261 base::SmartPointer<RawMachineAssembler> raw_assembler_; |
233 Code::Flags flags_; | 262 Code::Flags flags_; |
234 const char* name_; | 263 const char* name_; |
235 bool code_generated_; | 264 bool code_generated_; |
236 ZoneVector<Variable::Impl*> variables_; | 265 ZoneVector<Variable::Impl*> variables_; |
237 | 266 |
238 DISALLOW_COPY_AND_ASSIGN(CodeStubAssembler); | 267 DISALLOW_COPY_AND_ASSIGN(CodeStubAssembler); |
239 }; | 268 }; |
240 | 269 |
241 class CodeStubAssembler::Label { | 270 class CodeStubAssembler::Label { |
(...skipping 21 matching lines...) Expand all Loading... | |
263 // Map of variables to the list of value nodes that have been added from each | 292 // Map of variables to the list of value nodes that have been added from each |
264 // merge path in their order of merging. | 293 // merge path in their order of merging. |
265 std::map<Variable::Impl*, std::vector<Node*>> variable_merges_; | 294 std::map<Variable::Impl*, std::vector<Node*>> variable_merges_; |
266 }; | 295 }; |
267 | 296 |
268 } // namespace compiler | 297 } // namespace compiler |
269 } // namespace internal | 298 } // namespace internal |
270 } // namespace v8 | 299 } // namespace v8 |
271 | 300 |
272 #endif // V8_COMPILER_CODE_STUB_ASSEMBLER_H_ | 301 #endif // V8_COMPILER_CODE_STUB_ASSEMBLER_H_ |
OLD | NEW |