OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 12 matching lines...) Expand all Loading... |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "v8.h" | 28 #include "v8.h" |
29 | 29 |
30 #include "lithium-allocator-inl.h" | 30 #include "lithium-allocator-inl.h" |
31 #include "arm/lithium-arm.h" | 31 #include "arm/lithium-arm.h" |
32 #include "arm/lithium-codegen-arm.h" | 32 #include "arm/lithium-codegen-arm.h" |
| 33 #include "hydrogen-osr.h" |
33 | 34 |
34 namespace v8 { | 35 namespace v8 { |
35 namespace internal { | 36 namespace internal { |
36 | 37 |
37 #define DEFINE_COMPILE(type) \ | 38 #define DEFINE_COMPILE(type) \ |
38 void L##type::CompileToNative(LCodeGen* generator) { \ | 39 void L##type::CompileToNative(LCodeGen* generator) { \ |
39 generator->Do##type(this); \ | 40 generator->Do##type(this); \ |
40 } | 41 } |
41 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) | 42 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) |
42 #undef DEFINE_COMPILE | 43 #undef DEFINE_COMPILE |
(...skipping 375 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
418 return LStackSlot::Create(index, zone()); | 419 return LStackSlot::Create(index, zone()); |
419 } | 420 } |
420 } | 421 } |
421 | 422 |
422 | 423 |
423 LPlatformChunk* LChunkBuilder::Build() { | 424 LPlatformChunk* LChunkBuilder::Build() { |
424 ASSERT(is_unused()); | 425 ASSERT(is_unused()); |
425 chunk_ = new(zone()) LPlatformChunk(info(), graph()); | 426 chunk_ = new(zone()) LPlatformChunk(info(), graph()); |
426 LPhase phase("L_Building chunk", chunk_); | 427 LPhase phase("L_Building chunk", chunk_); |
427 status_ = BUILDING; | 428 status_ = BUILDING; |
| 429 |
| 430 // If compiling for OSR, reserve space for the unoptimized frame, |
| 431 // which will be subsumed into this frame. |
| 432 if (graph()->has_osr()) { |
| 433 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) { |
| 434 chunk_->GetNextSpillIndex(false); |
| 435 } |
| 436 } |
| 437 |
428 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); | 438 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); |
429 for (int i = 0; i < blocks->length(); i++) { | 439 for (int i = 0; i < blocks->length(); i++) { |
430 HBasicBlock* next = NULL; | 440 HBasicBlock* next = NULL; |
431 if (i < blocks->length() - 1) next = blocks->at(i + 1); | 441 if (i < blocks->length() - 1) next = blocks->at(i + 1); |
432 DoBasicBlock(blocks->at(i), next); | 442 DoBasicBlock(blocks->at(i), next); |
433 if (is_aborted()) return NULL; | 443 if (is_aborted()) return NULL; |
434 } | 444 } |
435 status_ = DONE; | 445 status_ = DONE; |
436 return chunk_; | 446 return chunk_; |
437 } | 447 } |
(...skipping 2001 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2439 CodeStubInterfaceDescriptor* descriptor = | 2449 CodeStubInterfaceDescriptor* descriptor = |
2440 info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); | 2450 info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); |
2441 int index = static_cast<int>(instr->index()); | 2451 int index = static_cast<int>(instr->index()); |
2442 Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index); | 2452 Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index); |
2443 return DefineFixed(result, reg); | 2453 return DefineFixed(result, reg); |
2444 } | 2454 } |
2445 } | 2455 } |
2446 | 2456 |
2447 | 2457 |
2448 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { | 2458 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { |
2449 int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width. | 2459 // Use an index that corresponds to the location in the unoptimized frame, |
2450 if (spill_index > LUnallocated::kMaxFixedSlotIndex) { | 2460 // which the optimized frame will subsume. |
2451 Abort("Too many spill slots needed for OSR"); | 2461 int env_index = instr->index(); |
2452 spill_index = 0; | 2462 int spill_index = 0; |
| 2463 if (instr->environment()->is_parameter_index(env_index)) { |
| 2464 spill_index = chunk()->GetParameterStackSlot(env_index); |
| 2465 } else { |
| 2466 spill_index = env_index - instr->environment()->first_local_index(); |
| 2467 if (spill_index > LUnallocated::kMaxFixedSlotIndex) { |
| 2468 Abort("Too many spill slots needed for OSR"); |
| 2469 spill_index = 0; |
| 2470 } |
2453 } | 2471 } |
2454 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); | 2472 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); |
2455 } | 2473 } |
2456 | 2474 |
2457 | 2475 |
2458 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) { | 2476 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) { |
2459 argument_count_ -= instr->argument_count(); | 2477 argument_count_ -= instr->argument_count(); |
2460 return MarkAsCall(DefineFixed(new(zone()) LCallStub, r0), instr); | 2478 return MarkAsCall(DefineFixed(new(zone()) LCallStub, r0), instr); |
2461 } | 2479 } |
2462 | 2480 |
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2614 | 2632 |
2615 | 2633 |
2616 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { | 2634 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { |
2617 LOperand* object = UseRegister(instr->object()); | 2635 LOperand* object = UseRegister(instr->object()); |
2618 LOperand* index = UseRegister(instr->index()); | 2636 LOperand* index = UseRegister(instr->index()); |
2619 return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index)); | 2637 return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index)); |
2620 } | 2638 } |
2621 | 2639 |
2622 | 2640 |
2623 } } // namespace v8::internal | 2641 } } // namespace v8::internal |
OLD | NEW |