OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
78 !info()->IsStub() || | 78 !info()->IsStub() || |
79 info()->requires_frame(); | 79 info()->requires_frame(); |
80 } | 80 } |
81 bool NeedsDeferredFrame() const { | 81 bool NeedsDeferredFrame() const { |
82 return !NeedsEagerFrame() && info()->is_deferred_calling(); | 82 return !NeedsEagerFrame() && info()->is_deferred_calling(); |
83 } | 83 } |
84 | 84 |
85 // Support for converting LOperands to assembler types. | 85 // Support for converting LOperands to assembler types. |
86 Register ToRegister(LOperand* op) const; | 86 Register ToRegister(LOperand* op) const; |
87 XMMRegister ToDoubleRegister(LOperand* op) const; | 87 XMMRegister ToDoubleRegister(LOperand* op) const; |
| 88 XMMRegister ToFloat32x4Register(LOperand* op) const; |
| 89 XMMRegister ToInt32x4Register(LOperand* op) const; |
88 bool IsInteger32Constant(LConstantOperand* op) const; | 90 bool IsInteger32Constant(LConstantOperand* op) const; |
89 bool IsSmiConstant(LConstantOperand* op) const; | 91 bool IsSmiConstant(LConstantOperand* op) const; |
90 int32_t ToInteger32(LConstantOperand* op) const; | 92 int32_t ToInteger32(LConstantOperand* op) const; |
91 Smi* ToSmi(LConstantOperand* op) const; | 93 Smi* ToSmi(LConstantOperand* op) const; |
92 double ToDouble(LConstantOperand* op) const; | 94 double ToDouble(LConstantOperand* op) const; |
93 ExternalReference ToExternalReference(LConstantOperand* op) const; | 95 ExternalReference ToExternalReference(LConstantOperand* op) const; |
94 bool IsTaggedConstant(LConstantOperand* op) const; | 96 bool IsTaggedConstant(LConstantOperand* op) const; |
95 Handle<Object> ToHandle(LConstantOperand* op) const; | 97 Handle<Object> ToHandle(LConstantOperand* op) const; |
96 Operand ToOperand(LOperand* op) const; | 98 Operand ToOperand(LOperand* op) const; |
97 | 99 |
98 // Try to generate code for the entire chunk, but it may fail if the | 100 // Try to generate code for the entire chunk, but it may fail if the |
99 // chunk contains constructs we cannot handle. Returns true if the | 101 // chunk contains constructs we cannot handle. Returns true if the |
100 // code generation attempt succeeded. | 102 // code generation attempt succeeded. |
101 bool GenerateCode(); | 103 bool GenerateCode(); |
102 | 104 |
103 // Finish the code by setting stack height, safepoint, and bailout | 105 // Finish the code by setting stack height, safepoint, and bailout |
104 // information on it. | 106 // information on it. |
105 void FinishCode(Handle<Code> code); | 107 void FinishCode(Handle<Code> code); |
106 | 108 |
107 // Deferred code support. | 109 // Deferred code support. |
108 void DoDeferredNumberTagD(LNumberTagD* instr); | 110 void DoDeferredNumberTagD(LNumberTagD* instr); |
| 111 void DoDeferredFloat32x4ToTagged(LFloat32x4ToTagged* instr); |
| 112 void DoDeferredInt32x4ToTagged(LInt32x4ToTagged* instr); |
109 void DoDeferredNumberTagU(LNumberTagU* instr); | 113 void DoDeferredNumberTagU(LNumberTagU* instr); |
110 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done); | 114 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done); |
111 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); | 115 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); |
112 void DoDeferredStackCheck(LStackCheck* instr); | 116 void DoDeferredStackCheck(LStackCheck* instr); |
113 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); | 117 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); |
114 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); | 118 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); |
115 void DoDeferredAllocate(LAllocate* instr); | 119 void DoDeferredAllocate(LAllocate* instr); |
116 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 120 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
117 Label* map_check); | 121 Label* map_check); |
118 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); | 122 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
239 int* object_index_pointer, | 243 int* object_index_pointer, |
240 int* dematerialized_index_pointer); | 244 int* dematerialized_index_pointer); |
241 void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code); | 245 void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code); |
242 void PopulateDeoptimizationData(Handle<Code> code); | 246 void PopulateDeoptimizationData(Handle<Code> code); |
243 int DefineDeoptimizationLiteral(Handle<Object> literal); | 247 int DefineDeoptimizationLiteral(Handle<Object> literal); |
244 | 248 |
245 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 249 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
246 | 250 |
247 Register ToRegister(int index) const; | 251 Register ToRegister(int index) const; |
248 XMMRegister ToDoubleRegister(int index) const; | 252 XMMRegister ToDoubleRegister(int index) const; |
| 253 XMMRegister ToFloat32x4Register(int index) const; |
| 254 XMMRegister ToInt32x4Register(int index) const; |
249 Operand BuildFastArrayOperand( | 255 Operand BuildFastArrayOperand( |
250 LOperand* elements_pointer, | 256 LOperand* elements_pointer, |
251 LOperand* key, | 257 LOperand* key, |
252 ElementsKind elements_kind, | 258 ElementsKind elements_kind, |
253 uint32_t offset, | 259 uint32_t offset, |
254 uint32_t additional_index = 0); | 260 uint32_t additional_index = 0); |
255 | 261 |
256 Operand BuildSeqStringOperand(Register string, | 262 Operand BuildSeqStringOperand(Register string, |
257 LOperand* index, | 263 LOperand* index, |
258 String::Encoding encoding); | 264 String::Encoding encoding); |
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
413 Label entry_; | 419 Label entry_; |
414 Label exit_; | 420 Label exit_; |
415 Label done_; | 421 Label done_; |
416 Label* external_exit_; | 422 Label* external_exit_; |
417 int instruction_index_; | 423 int instruction_index_; |
418 }; | 424 }; |
419 | 425 |
420 } } // namespace v8::internal | 426 } } // namespace v8::internal |
421 | 427 |
422 #endif // V8_X64_LITHIUM_CODEGEN_X64_H_ | 428 #endif // V8_X64_LITHIUM_CODEGEN_X64_H_ |
OLD | NEW |