OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 14 matching lines...) Expand all Loading... |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "v8.h" | 28 #include "v8.h" |
29 | 29 |
30 #if V8_TARGET_ARCH_X64 | 30 #if V8_TARGET_ARCH_X64 |
31 | 31 |
32 #include "lithium-allocator-inl.h" | 32 #include "lithium-allocator-inl.h" |
33 #include "x64/lithium-x64.h" | 33 #include "x64/lithium-x64.h" |
34 #include "x64/lithium-codegen-x64.h" | 34 #include "x64/lithium-codegen-x64.h" |
| 35 #include "hydrogen-osr.h" |
35 | 36 |
36 namespace v8 { | 37 namespace v8 { |
37 namespace internal { | 38 namespace internal { |
38 | 39 |
39 #define DEFINE_COMPILE(type) \ | 40 #define DEFINE_COMPILE(type) \ |
40 void L##type::CompileToNative(LCodeGen* generator) { \ | 41 void L##type::CompileToNative(LCodeGen* generator) { \ |
41 generator->Do##type(this); \ | 42 generator->Do##type(this); \ |
42 } | 43 } |
43 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) | 44 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) |
44 #undef DEFINE_COMPILE | 45 #undef DEFINE_COMPILE |
(...skipping 379 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
424 object()->PrintTo(stream); | 425 object()->PrintTo(stream); |
425 stream->Add(" %p -> %p", *original_map(), *transitioned_map()); | 426 stream->Add(" %p -> %p", *original_map(), *transitioned_map()); |
426 } | 427 } |
427 | 428 |
428 | 429 |
429 LPlatformChunk* LChunkBuilder::Build() { | 430 LPlatformChunk* LChunkBuilder::Build() { |
430 ASSERT(is_unused()); | 431 ASSERT(is_unused()); |
431 chunk_ = new(zone()) LPlatformChunk(info(), graph()); | 432 chunk_ = new(zone()) LPlatformChunk(info(), graph()); |
432 LPhase phase("L_Building chunk", chunk_); | 433 LPhase phase("L_Building chunk", chunk_); |
433 status_ = BUILDING; | 434 status_ = BUILDING; |
| 435 |
| 436 // If compiling for OSR, reserve space for the unoptimized frame, |
| 437 // which will be subsumed into this frame. |
| 438 if (graph()->has_osr()) { |
| 439 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) { |
| 440 chunk_->GetNextSpillIndex(false); |
| 441 } |
| 442 } |
| 443 |
434 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); | 444 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); |
435 for (int i = 0; i < blocks->length(); i++) { | 445 for (int i = 0; i < blocks->length(); i++) { |
436 HBasicBlock* next = NULL; | 446 HBasicBlock* next = NULL; |
437 if (i < blocks->length() - 1) next = blocks->at(i + 1); | 447 if (i < blocks->length() - 1) next = blocks->at(i + 1); |
438 DoBasicBlock(blocks->at(i), next); | 448 DoBasicBlock(blocks->at(i), next); |
439 if (is_aborted()) return NULL; | 449 if (is_aborted()) return NULL; |
440 } | 450 } |
441 status_ = DONE; | 451 status_ = DONE; |
442 return chunk_; | 452 return chunk_; |
443 } | 453 } |
(...skipping 1921 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2365 CodeStubInterfaceDescriptor* descriptor = | 2375 CodeStubInterfaceDescriptor* descriptor = |
2366 info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); | 2376 info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); |
2367 int index = static_cast<int>(instr->index()); | 2377 int index = static_cast<int>(instr->index()); |
2368 Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index); | 2378 Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index); |
2369 return DefineFixed(result, reg); | 2379 return DefineFixed(result, reg); |
2370 } | 2380 } |
2371 } | 2381 } |
2372 | 2382 |
2373 | 2383 |
2374 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { | 2384 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { |
2375 int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width. | 2385 // Use an index that corresponds to the location in the unoptimized frame, |
2376 if (spill_index > LUnallocated::kMaxFixedSlotIndex) { | 2386 // which the optimized frame will subsume. |
2377 Abort("Too many spill slots needed for OSR"); | 2387 int env_index = instr->index(); |
2378 spill_index = 0; | 2388 int spill_index = 0; |
| 2389 if (instr->environment()->is_parameter_index(env_index)) { |
| 2390 spill_index = chunk()->GetParameterStackSlot(env_index); |
| 2391 } else { |
| 2392 spill_index = env_index - instr->environment()->first_local_index(); |
| 2393 if (spill_index > LUnallocated::kMaxFixedSlotIndex) { |
| 2394 Abort("Too many spill slots needed for OSR"); |
| 2395 spill_index = 0; |
| 2396 } |
2379 } | 2397 } |
2380 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); | 2398 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); |
2381 } | 2399 } |
2382 | 2400 |
2383 | 2401 |
2384 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) { | 2402 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) { |
2385 argument_count_ -= instr->argument_count(); | 2403 argument_count_ -= instr->argument_count(); |
2386 return MarkAsCall(DefineFixed(new(zone()) LCallStub, rax), instr); | 2404 return MarkAsCall(DefineFixed(new(zone()) LCallStub, rax), instr); |
2387 } | 2405 } |
2388 | 2406 |
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2544 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { | 2562 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { |
2545 LOperand* object = UseRegister(instr->object()); | 2563 LOperand* object = UseRegister(instr->object()); |
2546 LOperand* index = UseTempRegister(instr->index()); | 2564 LOperand* index = UseTempRegister(instr->index()); |
2547 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); | 2565 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); |
2548 } | 2566 } |
2549 | 2567 |
2550 | 2568 |
2551 } } // namespace v8::internal | 2569 } } // namespace v8::internal |
2552 | 2570 |
2553 #endif // V8_TARGET_ARCH_X64 | 2571 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |