OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 11 matching lines...) Expand all Loading... | |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ | 28 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ |
29 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ | 29 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ |
30 | 30 |
31 #include "mips/lithium-mips.h" | 31 #include "mips/lithium-mips.h" |
32 | 32 #include "mips/lithium-gap-resolver-mips.h" |
33 #include "deoptimizer.h" | 33 #include "deoptimizer.h" |
34 #include "safepoint-table.h" | 34 #include "safepoint-table.h" |
35 #include "scopes.h" | 35 #include "scopes.h" |
36 | 36 |
37 // Note: this file was taken from the X64 version. ARM has a partially working | |
38 // lithium implementation, but for now it is not ported to mips. | |
39 | |
40 namespace v8 { | 37 namespace v8 { |
41 namespace internal { | 38 namespace internal { |
42 | 39 |
43 // Forward declarations. | 40 // Forward declarations. |
44 class LDeferredCode; | 41 class LDeferredCode; |
42 class SafepointGenerator; | |
45 | 43 |
46 class LCodeGen BASE_EMBEDDED { | 44 class LCodeGen BASE_EMBEDDED { |
47 public: | 45 public: |
48 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) { } | 46 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) |
47 : chunk_(chunk), | |
48 masm_(assembler), | |
49 info_(info), | |
50 current_block_(-1), | |
51 current_instruction_(-1), | |
52 instructions_(chunk->instructions()), | |
53 deoptimizations_(4), | |
54 deopt_jump_table_(4), | |
55 deoptimization_literals_(8), | |
56 inlined_function_count_(0), | |
57 scope_(info->scope()), | |
58 status_(UNUSED), | |
59 deferred_(8), | |
60 osr_pc_offset_(-1), | |
61 resolver_(this), | |
62 expected_safepoint_kind_(Safepoint::kSimple) { | |
63 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | |
64 } | |
65 | |
66 | |
67 // Simple accessors. | |
68 MacroAssembler* masm() const { return masm_; } | |
69 CompilationInfo* info() const { return info_; } | |
70 Isolate* isolate() const { return info_->isolate(); } | |
71 Factory* factory() const { return isolate()->factory(); } | |
72 Heap* heap() const { return isolate()->heap(); } | |
73 | |
74 // Support for converting LOperands to assembler types. | |
75 // LOperand must be a register. | |
76 Register ToRegister(LOperand* op) const; | |
77 | |
78 // LOperand is loaded into scratch, unless already a register. | |
79 Register EmitLoadRegister(LOperand* op, Register scratch); | |
80 | |
81 // LOperand must be a double register. | |
82 DoubleRegister ToDoubleRegister(LOperand* op) const; | |
83 | |
84 // LOperand is loaded into dbl_scratch, unless already a double register. | |
85 DoubleRegister EmitLoadDoubleRegister(LOperand* op, | |
86 FloatRegister flt_scratch, | |
87 DoubleRegister dbl_scratch); | |
88 int ToInteger32(LConstantOperand* op) const; | |
89 Operand ToOperand(LOperand* op); | |
90 MemOperand ToMemOperand(LOperand* op) const; | |
91 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. | |
92 MemOperand ToHighMemOperand(LOperand* op) const; | |
49 | 93 |
50 // Try to generate code for the entire chunk, but it may fail if the | 94 // Try to generate code for the entire chunk, but it may fail if the |
51 // chunk contains constructs we cannot handle. Returns true if the | 95 // chunk contains constructs we cannot handle. Returns true if the |
52 // code generation attempt succeeded. | 96 // code generation attempt succeeded. |
53 bool GenerateCode() { | 97 bool GenerateCode(); |
54 UNIMPLEMENTED(); | |
55 return false; | |
56 } | |
57 | 98 |
58 // Finish the code by setting stack height, safepoint, and bailout | 99 // Finish the code by setting stack height, safepoint, and bailout |
59 // information on it. | 100 // information on it. |
60 void FinishCode(Handle<Code> code) { UNIMPLEMENTED(); } | 101 void FinishCode(Handle<Code> code); |
102 | |
103 // Deferred code support. | |
104 template<int T> | |
105 void DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr, | |
106 Token::Value op); | |
107 void DoDeferredNumberTagD(LNumberTagD* instr); | |
108 void DoDeferredNumberTagI(LNumberTagI* instr); | |
109 void DoDeferredTaggedToI(LTaggedToI* instr); | |
110 void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr); | |
111 void DoDeferredStackCheck(LStackCheck* instr); | |
112 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); | |
113 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); | |
114 void DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | |
115 Label* map_check); | |
116 | |
117 // Parallel move support. | |
118 void DoParallelMove(LParallelMove* move); | |
119 void DoGap(LGap* instr); | |
120 | |
121 // Emit frame translation commands for an environment. | |
122 void WriteTranslation(LEnvironment* environment, Translation* translation); | |
123 | |
124 // Declare methods that deal with the individual node types. | |
125 #define DECLARE_DO(type) void Do##type(L##type* node); | |
126 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) | |
127 #undef DECLARE_DO | |
128 | |
129 private: | |
130 enum Status { | |
131 UNUSED, | |
132 GENERATING, | |
133 DONE, | |
134 ABORTED | |
135 }; | |
136 | |
137 bool is_unused() const { return status_ == UNUSED; } | |
138 bool is_generating() const { return status_ == GENERATING; } | |
139 bool is_done() const { return status_ == DONE; } | |
140 bool is_aborted() const { return status_ == ABORTED; } | |
141 | |
142 int strict_mode_flag() const { | |
143 return info()->is_strict_mode() ? kStrictMode : kNonStrictMode; | |
144 } | |
145 | |
146 LChunk* chunk() const { return chunk_; } | |
147 Scope* scope() const { return scope_; } | |
148 HGraph* graph() const { return chunk_->graph(); } | |
149 | |
150 Register scratch0() { return lithiumScratchReg; } | |
151 Register scratch1() { return lithiumScratchReg2; } | |
152 DoubleRegister double_scratch0() { return lithiumScratchDouble; } | |
153 | |
154 int GetNextEmittedBlock(int block); | |
155 LInstruction* GetNextInstruction(); | |
156 | |
157 void EmitClassOfTest(Label* if_true, | |
158 Label* if_false, | |
159 Handle<String> class_name, | |
160 Register input, | |
161 Register temporary, | |
162 Register temporary2); | |
163 | |
164 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } | |
165 int GetParameterCount() const { return scope()->num_parameters(); } | |
166 | |
167 void Abort(const char* format, ...); | |
168 void Comment(const char* format, ...); | |
169 | |
170 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code); } | |
171 | |
172 // Code generation passes. Returns true if code generation should | |
173 // continue. | |
174 bool GeneratePrologue(); | |
175 bool GenerateBody(); | |
176 bool GenerateDeferredCode(); | |
177 bool GenerateDeoptJumpTable(); | |
178 bool GenerateSafepointTable(); | |
179 | |
180 enum SafepointMode { | |
181 RECORD_SIMPLE_SAFEPOINT, | |
182 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS | |
183 }; | |
184 | |
185 void CallCode(Handle<Code> code, | |
186 RelocInfo::Mode mode, | |
187 LInstruction* instr); | |
188 | |
189 void CallCodeGeneric(Handle<Code> code, | |
190 RelocInfo::Mode mode, | |
191 LInstruction* instr, | |
192 SafepointMode safepoint_mode); | |
193 | |
194 void CallRuntime(const Runtime::Function* function, | |
195 int num_arguments, | |
196 LInstruction* instr); | |
197 | |
198 void CallRuntime(Runtime::FunctionId id, | |
199 int num_arguments, | |
200 LInstruction* instr) { | |
201 const Runtime::Function* function = Runtime::FunctionForId(id); | |
202 CallRuntime(function, num_arguments, instr); | |
203 } | |
204 | |
205 void CallRuntimeFromDeferred(Runtime::FunctionId id, | |
206 int argc, | |
207 LInstruction* instr); | |
208 | |
209 // Generate a direct call to a known function. Expects the function | |
210 // to be in edi. | |
fschneider
2011/10/18 14:27:58
Wrong register in comment: Function expected in a1
Paul Lind
2011/10/19 02:18:27
Done.
| |
211 void CallKnownFunction(Handle<JSFunction> function, | |
212 int arity, | |
213 LInstruction* instr, | |
214 CallKind call_kind); | |
215 | |
216 void LoadHeapObject(Register result, Handle<HeapObject> object); | |
217 | |
218 void RegisterLazyDeoptimization(LInstruction* instr, | |
219 SafepointMode safepoint_mode); | |
220 | |
221 void RegisterEnvironmentForDeoptimization(LEnvironment* environment); | |
222 void DeoptimizeIf(Condition cc, | |
223 LEnvironment* environment, | |
224 Register src1, | |
225 const Operand& src2); | |
226 | |
227 void AddToTranslation(Translation* translation, | |
228 LOperand* op, | |
229 bool is_tagged); | |
230 void PopulateDeoptimizationData(Handle<Code> code); | |
231 int DefineDeoptimizationLiteral(Handle<Object> literal); | |
232 | |
233 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); | |
234 | |
235 Register ToRegister(int index) const; | |
236 DoubleRegister ToDoubleRegister(int index) const; | |
237 | |
238 // Specific math operations - used from DoUnaryMathOperation. | |
239 void EmitIntegerMathAbs(LUnaryMathOperation* instr); | |
240 void DoMathAbs(LUnaryMathOperation* instr); | |
241 void DoMathFloor(LUnaryMathOperation* instr); | |
242 void DoMathRound(LUnaryMathOperation* instr); | |
243 void DoMathSqrt(LUnaryMathOperation* instr); | |
244 void DoMathPowHalf(LUnaryMathOperation* instr); | |
245 void DoMathLog(LUnaryMathOperation* instr); | |
246 void DoMathCos(LUnaryMathOperation* instr); | |
247 void DoMathSin(LUnaryMathOperation* instr); | |
248 | |
249 // Support for recording safepoint and position information. | |
250 void RecordSafepoint(LPointerMap* pointers, | |
251 Safepoint::Kind kind, | |
252 int arguments, | |
253 int deoptimization_index); | |
254 void RecordSafepoint(LPointerMap* pointers, int deoptimization_index); | |
255 void RecordSafepoint(int deoptimization_index); | |
256 void RecordSafepointWithRegisters(LPointerMap* pointers, | |
257 int arguments, | |
258 int deoptimization_index); | |
259 void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers, | |
260 int arguments, | |
261 int deoptimization_index); | |
262 void RecordPosition(int position); | |
263 int LastSafepointEnd() { | |
264 return static_cast<int>(safepoints_.GetPcAfterGap()); | |
265 } | |
266 | |
267 static Condition TokenToCondition(Token::Value op, bool is_unsigned); | |
268 void EmitGoto(int block); | |
269 void EmitBranch(int left_block, | |
270 int right_block, | |
271 Condition cc, | |
272 Register src1, | |
273 const Operand& src2); | |
274 void EmitBranchF(int left_block, | |
275 int right_block, | |
276 Condition cc, | |
277 FPURegister src1, | |
278 FPURegister src2); | |
279 void EmitCmpI(LOperand* left, LOperand* right); | |
280 void EmitNumberUntagD(Register input, | |
281 DoubleRegister result, | |
282 bool deoptimize_on_undefined, | |
283 LEnvironment* env); | |
284 | |
285 // Emits optimized code for typeof x == "y". Modifies input register. | |
286 // Returns the condition on which a final split to | |
287 // true and false label should be made, to optimize fallthrough. | |
288 // Returns two registers in cmp1 and cmp2 that can be used in the | |
289 // Branch instruction after EmitTypeofIs. | |
290 Condition EmitTypeofIs(Label* true_label, | |
291 Label* false_label, | |
292 Register input, | |
293 Handle<String> type_name, | |
294 Register& cmp1, | |
295 Operand& cmp2); | |
296 | |
297 // Emits optimized code for %_IsObject(x). Preserves input register. | |
298 // Returns the condition on which a final split to | |
299 // true and false label should be made, to optimize fallthrough. | |
300 Condition EmitIsObject(Register input, | |
301 Register temp1, | |
302 Label* is_not_object, | |
303 Label* is_object); | |
304 | |
305 // Emits optimized code for %_IsConstructCall(). | |
306 // Caller should branch on equal condition. | |
307 void EmitIsConstructCall(Register temp1, Register temp2); | |
308 | |
309 void EmitLoadFieldOrConstantFunction(Register result, | |
310 Register object, | |
311 Handle<Map> type, | |
312 Handle<String> name); | |
313 | |
314 struct JumpTableEntry { | |
315 explicit inline JumpTableEntry(Address entry) | |
316 : label(), | |
317 address(entry) { } | |
318 Label label; | |
319 Address address; | |
320 }; | |
321 | |
322 LChunk* const chunk_; | |
323 MacroAssembler* const masm_; | |
324 CompilationInfo* const info_; | |
325 | |
326 int current_block_; | |
327 int current_instruction_; | |
328 const ZoneList<LInstruction*>* instructions_; | |
329 ZoneList<LEnvironment*> deoptimizations_; | |
330 ZoneList<JumpTableEntry> deopt_jump_table_; | |
331 ZoneList<Handle<Object> > deoptimization_literals_; | |
332 int inlined_function_count_; | |
333 Scope* const scope_; | |
334 Status status_; | |
335 TranslationBuffer translations_; | |
336 ZoneList<LDeferredCode*> deferred_; | |
337 int osr_pc_offset_; | |
338 | |
339 // Builder that keeps track of safepoints in the code. The table | |
340 // itself is emitted at the end of the generated code. | |
341 SafepointTableBuilder safepoints_; | |
342 | |
343 // Compiler from a set of parallel moves to a sequential list of moves. | |
344 LGapResolver resolver_; | |
345 | |
346 Safepoint::Kind expected_safepoint_kind_; | |
347 | |
348 class PushSafepointRegistersScope BASE_EMBEDDED { | |
349 public: | |
350 PushSafepointRegistersScope(LCodeGen* codegen, | |
351 Safepoint::Kind kind) | |
352 : codegen_(codegen) { | |
353 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); | |
354 codegen_->expected_safepoint_kind_ = kind; | |
355 | |
356 switch (codegen_->expected_safepoint_kind_) { | |
357 case Safepoint::kWithRegisters: | |
358 codegen_->masm_->PushSafepointRegisters(); | |
359 break; | |
360 case Safepoint::kWithRegistersAndDoubles: | |
361 codegen_->masm_->PushSafepointRegistersAndDoubles(); | |
362 break; | |
363 default: | |
364 UNREACHABLE(); | |
365 } | |
366 } | |
367 | |
368 ~PushSafepointRegistersScope() { | |
369 Safepoint::Kind kind = codegen_->expected_safepoint_kind_; | |
370 ASSERT((kind & Safepoint::kWithRegisters) != 0); | |
371 switch (kind) { | |
372 case Safepoint::kWithRegisters: | |
373 codegen_->masm_->PopSafepointRegisters(); | |
374 break; | |
375 case Safepoint::kWithRegistersAndDoubles: | |
376 codegen_->masm_->PopSafepointRegistersAndDoubles(); | |
377 break; | |
378 default: | |
379 UNREACHABLE(); | |
380 } | |
381 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; | |
382 } | |
383 | |
384 private: | |
385 LCodeGen* codegen_; | |
386 }; | |
387 | |
388 friend class LDeferredCode; | |
389 friend class LEnvironment; | |
390 friend class SafepointGenerator; | |
391 DISALLOW_COPY_AND_ASSIGN(LCodeGen); | |
61 }; | 392 }; |
62 | 393 |
394 | |
395 class LDeferredCode: public ZoneObject { | |
396 public: | |
397 explicit LDeferredCode(LCodeGen* codegen) | |
398 : codegen_(codegen), | |
399 external_exit_(NULL), | |
400 instruction_index_(codegen->current_instruction_) { | |
401 codegen->AddDeferredCode(this); | |
402 } | |
403 | |
404 virtual ~LDeferredCode() { } | |
405 virtual void Generate() = 0; | |
406 virtual LInstruction* instr() = 0; | |
407 | |
408 void SetExit(Label *exit) { external_exit_ = exit; } | |
409 Label* entry() { return &entry_; } | |
410 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } | |
411 int instruction_index() const { return instruction_index_; } | |
412 | |
413 protected: | |
414 LCodeGen* codegen() const { return codegen_; } | |
415 MacroAssembler* masm() const { return codegen_->masm(); } | |
416 | |
417 private: | |
418 LCodeGen* codegen_; | |
419 Label entry_; | |
420 Label exit_; | |
421 Label* external_exit_; | |
422 int instruction_index_; | |
423 }; | |
424 | |
63 } } // namespace v8::internal | 425 } } // namespace v8::internal |
64 | 426 |
65 #endif // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ | 427 #endif // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ |
OLD | NEW |