| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 14 matching lines...) Expand all Loading... |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #ifndef V8_X64_LITHIUM_CODEGEN_X64_H_ | 28 #ifndef V8_X64_LITHIUM_CODEGEN_X64_H_ |
| 29 #define V8_X64_LITHIUM_CODEGEN_X64_H_ | 29 #define V8_X64_LITHIUM_CODEGEN_X64_H_ |
| 30 | 30 |
| 31 #include "x64/lithium-x64.h" | 31 #include "x64/lithium-x64.h" |
| 32 | 32 |
| 33 #include "checks.h" | 33 #include "checks.h" |
| 34 #include "deoptimizer.h" | 34 #include "deoptimizer.h" |
| 35 #include "lithium-codegen.h" |
| 35 #include "safepoint-table.h" | 36 #include "safepoint-table.h" |
| 36 #include "scopes.h" | 37 #include "scopes.h" |
| 37 #include "v8utils.h" | 38 #include "v8utils.h" |
| 38 #include "x64/lithium-gap-resolver-x64.h" | 39 #include "x64/lithium-gap-resolver-x64.h" |
| 39 | 40 |
| 40 namespace v8 { | 41 namespace v8 { |
| 41 namespace internal { | 42 namespace internal { |
| 42 | 43 |
| 43 // Forward declarations. | 44 // Forward declarations. |
| 44 class LDeferredCode; | 45 class LDeferredCode; |
| 45 class SafepointGenerator; | 46 class SafepointGenerator; |
| 46 | 47 |
| 47 class LCodeGen V8_FINAL BASE_EMBEDDED { | 48 class LCodeGen: public LCodeGenBase { |
| 48 public: | 49 public: |
| 49 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) | 50 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) |
| 50 : zone_(info->zone()), | 51 : LCodeGenBase(chunk, assembler, info), |
| 51 chunk_(static_cast<LPlatformChunk*>(chunk)), | |
| 52 masm_(assembler), | |
| 53 info_(info), | |
| 54 current_block_(-1), | |
| 55 current_instruction_(-1), | |
| 56 instructions_(chunk->instructions()), | |
| 57 deoptimizations_(4, info->zone()), | 52 deoptimizations_(4, info->zone()), |
| 58 jump_table_(4, info->zone()), | 53 jump_table_(4, info->zone()), |
| 59 deoptimization_literals_(8, info->zone()), | 54 deoptimization_literals_(8, info->zone()), |
| 60 inlined_function_count_(0), | 55 inlined_function_count_(0), |
| 61 scope_(info->scope()), | 56 scope_(info->scope()), |
| 62 status_(UNUSED), | |
| 63 translations_(info->zone()), | 57 translations_(info->zone()), |
| 64 deferred_(8, info->zone()), | 58 deferred_(8, info->zone()), |
| 65 osr_pc_offset_(-1), | 59 osr_pc_offset_(-1), |
| 66 last_lazy_deopt_pc_(0), | |
| 67 frame_is_built_(false), | 60 frame_is_built_(false), |
| 68 safepoints_(info->zone()), | 61 safepoints_(info->zone()), |
| 69 resolver_(this), | 62 resolver_(this), |
| 70 expected_safepoint_kind_(Safepoint::kSimple), | 63 expected_safepoint_kind_(Safepoint::kSimple), |
| 71 old_position_(RelocInfo::kNoPosition) { | 64 old_position_(RelocInfo::kNoPosition) { |
| 72 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 65 PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
| 73 } | 66 } |
| 74 | 67 |
| 75 // Simple accessors. | |
| 76 MacroAssembler* masm() const { return masm_; } | |
| 77 CompilationInfo* info() const { return info_; } | |
| 78 Isolate* isolate() const { return info_->isolate(); } | |
| 79 Factory* factory() const { return isolate()->factory(); } | |
| 80 Heap* heap() const { return isolate()->heap(); } | |
| 81 Zone* zone() const { return zone_; } | |
| 82 | |
| 83 int LookupDestination(int block_id) const { | 68 int LookupDestination(int block_id) const { |
| 84 return chunk()->LookupDestination(block_id); | 69 return chunk()->LookupDestination(block_id); |
| 85 } | 70 } |
| 86 | 71 |
| 87 bool IsNextEmittedBlock(int block_id) const { | 72 bool IsNextEmittedBlock(int block_id) const { |
| 88 return LookupDestination(block_id) == GetNextEmittedBlock(); | 73 return LookupDestination(block_id) == GetNextEmittedBlock(); |
| 89 } | 74 } |
| 90 | 75 |
| 91 bool NeedsEagerFrame() const { | 76 bool NeedsEagerFrame() const { |
| 92 return GetStackSlotCount() > 0 || | 77 return GetStackSlotCount() > 0 || |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 139 | 124 |
| 140 // Emit frame translation commands for an environment. | 125 // Emit frame translation commands for an environment. |
| 141 void WriteTranslation(LEnvironment* environment, Translation* translation); | 126 void WriteTranslation(LEnvironment* environment, Translation* translation); |
| 142 | 127 |
| 143 // Declare methods that deal with the individual node types. | 128 // Declare methods that deal with the individual node types. |
| 144 #define DECLARE_DO(type) void Do##type(L##type* node); | 129 #define DECLARE_DO(type) void Do##type(L##type* node); |
| 145 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) | 130 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) |
| 146 #undef DECLARE_DO | 131 #undef DECLARE_DO |
| 147 | 132 |
| 148 private: | 133 private: |
| 149 enum Status { | |
| 150 UNUSED, | |
| 151 GENERATING, | |
| 152 DONE, | |
| 153 ABORTED | |
| 154 }; | |
| 155 | |
| 156 bool is_unused() const { return status_ == UNUSED; } | |
| 157 bool is_generating() const { return status_ == GENERATING; } | |
| 158 bool is_done() const { return status_ == DONE; } | |
| 159 bool is_aborted() const { return status_ == ABORTED; } | |
| 160 | |
| 161 StrictModeFlag strict_mode_flag() const { | 134 StrictModeFlag strict_mode_flag() const { |
| 162 return info()->is_classic_mode() ? kNonStrictMode : kStrictMode; | 135 return info()->is_classic_mode() ? kNonStrictMode : kStrictMode; |
| 163 } | 136 } |
| 164 | 137 |
| 165 LPlatformChunk* chunk() const { return chunk_; } | 138 LPlatformChunk* chunk() const { return chunk_; } |
| 166 Scope* scope() const { return scope_; } | 139 Scope* scope() const { return scope_; } |
| 167 HGraph* graph() const { return chunk()->graph(); } | 140 HGraph* graph() const { return chunk()->graph(); } |
| 168 | 141 |
| 169 XMMRegister double_scratch0() const { return xmm0; } | 142 XMMRegister double_scratch0() const { return xmm0; } |
| 170 | 143 |
| 171 int GetNextEmittedBlock() const; | |
| 172 | |
| 173 void EmitClassOfTest(Label* if_true, | 144 void EmitClassOfTest(Label* if_true, |
| 174 Label* if_false, | 145 Label* if_false, |
| 175 Handle<String> class_name, | 146 Handle<String> class_name, |
| 176 Register input, | 147 Register input, |
| 177 Register temporary, | 148 Register temporary, |
| 178 Register scratch); | 149 Register scratch); |
| 179 | 150 |
| 180 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } | 151 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } |
| 181 | 152 |
| 182 void Abort(BailoutReason reason); | 153 void Abort(BailoutReason reason); |
| 183 void FPRINTF_CHECKING Comment(const char* format, ...); | |
| 184 | 154 |
| 185 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } | 155 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } |
| 186 | 156 |
| 187 // Code generation passes. Returns true if code generation should | 157 // Code generation passes. Returns true if code generation should |
| 188 // continue. | 158 // continue. |
| 189 bool GeneratePrologue(); | 159 bool GeneratePrologue(); |
| 190 bool GenerateBody(); | |
| 191 bool GenerateDeferredCode(); | 160 bool GenerateDeferredCode(); |
| 192 bool GenerateJumpTable(); | 161 bool GenerateJumpTable(); |
| 193 bool GenerateSafepointTable(); | 162 bool GenerateSafepointTable(); |
| 194 | 163 |
| 195 // Generates the custom OSR entrypoint and sets the osr_pc_offset. | 164 // Generates the custom OSR entrypoint and sets the osr_pc_offset. |
| 196 void GenerateOsrPrologue(); | 165 void GenerateOsrPrologue(); |
| 197 | 166 |
| 198 enum SafepointMode { | 167 enum SafepointMode { |
| 199 RECORD_SIMPLE_SAFEPOINT, | 168 RECORD_SIMPLE_SAFEPOINT, |
| 200 RECORD_SAFEPOINT_WITH_REGISTERS | 169 RECORD_SAFEPOINT_WITH_REGISTERS |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 281 void RecordSafepoint(LPointerMap* pointers, | 250 void RecordSafepoint(LPointerMap* pointers, |
| 282 Safepoint::Kind kind, | 251 Safepoint::Kind kind, |
| 283 int arguments, | 252 int arguments, |
| 284 Safepoint::DeoptMode mode); | 253 Safepoint::DeoptMode mode); |
| 285 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); | 254 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); |
| 286 void RecordSafepoint(Safepoint::DeoptMode mode); | 255 void RecordSafepoint(Safepoint::DeoptMode mode); |
| 287 void RecordSafepointWithRegisters(LPointerMap* pointers, | 256 void RecordSafepointWithRegisters(LPointerMap* pointers, |
| 288 int arguments, | 257 int arguments, |
| 289 Safepoint::DeoptMode mode); | 258 Safepoint::DeoptMode mode); |
| 290 void RecordPosition(int position); | 259 void RecordPosition(int position); |
| 291 void RecordAndUpdatePosition(int position); | 260 void RecordAndUpdatePosition(int position) V8_OVERRIDE; |
| 292 | 261 |
| 293 static Condition TokenToCondition(Token::Value op, bool is_unsigned); | 262 static Condition TokenToCondition(Token::Value op, bool is_unsigned); |
| 294 void EmitGoto(int block); | 263 void EmitGoto(int block); |
| 295 template<class InstrType> | 264 template<class InstrType> |
| 296 void EmitBranch(InstrType instr, Condition cc); | 265 void EmitBranch(InstrType instr, Condition cc); |
| 297 template<class InstrType> | 266 template<class InstrType> |
| 298 void EmitFalseBranch(InstrType instr, Condition cc); | 267 void EmitFalseBranch(InstrType instr, Condition cc); |
| 299 void EmitNumberUntagD( | 268 void EmitNumberUntagD( |
| 300 Register input, | 269 Register input, |
| 301 XMMRegister result, | 270 XMMRegister result, |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 336 void EmitPushTaggedOperand(LOperand* operand); | 305 void EmitPushTaggedOperand(LOperand* operand); |
| 337 | 306 |
| 338 // Emits optimized code to deep-copy the contents of statically known | 307 // Emits optimized code to deep-copy the contents of statically known |
| 339 // object graphs (e.g. object literal boilerplate). | 308 // object graphs (e.g. object literal boilerplate). |
| 340 void EmitDeepCopy(Handle<JSObject> object, | 309 void EmitDeepCopy(Handle<JSObject> object, |
| 341 Register result, | 310 Register result, |
| 342 Register source, | 311 Register source, |
| 343 int* offset, | 312 int* offset, |
| 344 AllocationSiteMode mode); | 313 AllocationSiteMode mode); |
| 345 | 314 |
| 346 void EnsureSpaceForLazyDeopt(int space_needed); | 315 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE; |
| 347 void DoLoadKeyedExternalArray(LLoadKeyed* instr); | 316 void DoLoadKeyedExternalArray(LLoadKeyed* instr); |
| 348 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); | 317 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); |
| 349 void DoLoadKeyedFixedArray(LLoadKeyed* instr); | 318 void DoLoadKeyedFixedArray(LLoadKeyed* instr); |
| 350 void DoStoreKeyedExternalArray(LStoreKeyed* instr); | 319 void DoStoreKeyedExternalArray(LStoreKeyed* instr); |
| 351 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); | 320 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); |
| 352 void DoStoreKeyedFixedArray(LStoreKeyed* instr); | 321 void DoStoreKeyedFixedArray(LStoreKeyed* instr); |
| 353 #ifdef _MSC_VER | 322 #ifdef _MSC_VER |
| 354 // On windows, you may not access the stack more than one page below | 323 // On windows, you may not access the stack more than one page below |
| 355 // the most recently mapped page. To make the allocated area randomly | 324 // the most recently mapped page. To make the allocated area randomly |
| 356 // accessible, we write an arbitrary value to each page in range | 325 // accessible, we write an arbitrary value to each page in range |
| 357 // rsp + offset - page_size .. rsp in turn. | 326 // rsp + offset - page_size .. rsp in turn. |
| 358 void MakeSureStackPagesMapped(int offset); | 327 void MakeSureStackPagesMapped(int offset); |
| 359 #endif | 328 #endif |
| 360 | 329 |
| 361 Zone* zone_; | |
| 362 LPlatformChunk* const chunk_; | |
| 363 MacroAssembler* const masm_; | |
| 364 CompilationInfo* const info_; | |
| 365 | |
| 366 int current_block_; | |
| 367 int current_instruction_; | |
| 368 const ZoneList<LInstruction*>* instructions_; | |
| 369 ZoneList<LEnvironment*> deoptimizations_; | 330 ZoneList<LEnvironment*> deoptimizations_; |
| 370 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; | 331 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; |
| 371 ZoneList<Handle<Object> > deoptimization_literals_; | 332 ZoneList<Handle<Object> > deoptimization_literals_; |
| 372 int inlined_function_count_; | 333 int inlined_function_count_; |
| 373 Scope* const scope_; | 334 Scope* const scope_; |
| 374 Status status_; | |
| 375 TranslationBuffer translations_; | 335 TranslationBuffer translations_; |
| 376 ZoneList<LDeferredCode*> deferred_; | 336 ZoneList<LDeferredCode*> deferred_; |
| 377 int osr_pc_offset_; | 337 int osr_pc_offset_; |
| 378 int last_lazy_deopt_pc_; | |
| 379 bool frame_is_built_; | 338 bool frame_is_built_; |
| 380 | 339 |
| 381 // Builder that keeps track of safepoints in the code. The table | 340 // Builder that keeps track of safepoints in the code. The table |
| 382 // itself is emitted at the end of the generated code. | 341 // itself is emitted at the end of the generated code. |
| 383 SafepointTableBuilder safepoints_; | 342 SafepointTableBuilder safepoints_; |
| 384 | 343 |
| 385 // Compiler from a set of parallel moves to a sequential list of moves. | 344 // Compiler from a set of parallel moves to a sequential list of moves. |
| 386 LGapResolver resolver_; | 345 LGapResolver resolver_; |
| 387 | 346 |
| 388 Safepoint::Kind expected_safepoint_kind_; | 347 Safepoint::Kind expected_safepoint_kind_; |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 444 Label entry_; | 403 Label entry_; |
| 445 Label exit_; | 404 Label exit_; |
| 446 Label done_; | 405 Label done_; |
| 447 Label* external_exit_; | 406 Label* external_exit_; |
| 448 int instruction_index_; | 407 int instruction_index_; |
| 449 }; | 408 }; |
| 450 | 409 |
| 451 } } // namespace v8::internal | 410 } } // namespace v8::internal |
| 452 | 411 |
| 453 #endif // V8_X64_LITHIUM_CODEGEN_X64_H_ | 412 #endif // V8_X64_LITHIUM_CODEGEN_X64_H_ |
| OLD | NEW |