| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 12 matching lines...) Expand all Loading... |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "v8.h" | 28 #include "v8.h" |
| 29 | 29 |
| 30 #include "lithium-allocator-inl.h" | 30 #include "lithium-allocator-inl.h" |
| 31 #include "arm/lithium-arm.h" | 31 #include "arm/lithium-arm.h" |
| 32 #include "arm/lithium-codegen-arm.h" | 32 #include "arm/lithium-codegen-arm.h" |
| 33 #include "hydrogen-osr.h" |
| 33 | 34 |
| 34 namespace v8 { | 35 namespace v8 { |
| 35 namespace internal { | 36 namespace internal { |
| 36 | 37 |
| 37 #define DEFINE_COMPILE(type) \ | 38 #define DEFINE_COMPILE(type) \ |
| 38 void L##type::CompileToNative(LCodeGen* generator) { \ | 39 void L##type::CompileToNative(LCodeGen* generator) { \ |
| 39 generator->Do##type(this); \ | 40 generator->Do##type(this); \ |
| 40 } | 41 } |
| 41 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) | 42 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) |
| 42 #undef DEFINE_COMPILE | 43 #undef DEFINE_COMPILE |
| (...skipping 383 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 426 return LStackSlot::Create(index, zone()); | 427 return LStackSlot::Create(index, zone()); |
| 427 } | 428 } |
| 428 } | 429 } |
| 429 | 430 |
| 430 | 431 |
| 431 LPlatformChunk* LChunkBuilder::Build() { | 432 LPlatformChunk* LChunkBuilder::Build() { |
| 432 ASSERT(is_unused()); | 433 ASSERT(is_unused()); |
| 433 chunk_ = new(zone()) LPlatformChunk(info(), graph()); | 434 chunk_ = new(zone()) LPlatformChunk(info(), graph()); |
| 434 LPhase phase("L_Building chunk", chunk_); | 435 LPhase phase("L_Building chunk", chunk_); |
| 435 status_ = BUILDING; | 436 status_ = BUILDING; |
| 437 |
| 438 // If compiling for OSR, reserve space for the unoptimized frame, |
| 439 // which will be subsumed into this frame. |
| 440 if (graph()->has_osr()) { |
| 441 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) { |
| 442 chunk_->GetNextSpillIndex(false); |
| 443 } |
| 444 } |
| 445 |
| 436 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); | 446 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); |
| 437 for (int i = 0; i < blocks->length(); i++) { | 447 for (int i = 0; i < blocks->length(); i++) { |
| 438 HBasicBlock* next = NULL; | 448 HBasicBlock* next = NULL; |
| 439 if (i < blocks->length() - 1) next = blocks->at(i + 1); | 449 if (i < blocks->length() - 1) next = blocks->at(i + 1); |
| 440 DoBasicBlock(blocks->at(i), next); | 450 DoBasicBlock(blocks->at(i), next); |
| 441 if (is_aborted()) return NULL; | 451 if (is_aborted()) return NULL; |
| 442 } | 452 } |
| 443 status_ = DONE; | 453 status_ = DONE; |
| 444 return chunk_; | 454 return chunk_; |
| 445 } | 455 } |
| (...skipping 1986 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2432 CodeStubInterfaceDescriptor* descriptor = | 2442 CodeStubInterfaceDescriptor* descriptor = |
| 2433 info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); | 2443 info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); |
| 2434 int index = static_cast<int>(instr->index()); | 2444 int index = static_cast<int>(instr->index()); |
| 2435 Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index); | 2445 Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index); |
| 2436 return DefineFixed(result, reg); | 2446 return DefineFixed(result, reg); |
| 2437 } | 2447 } |
| 2438 } | 2448 } |
| 2439 | 2449 |
| 2440 | 2450 |
| 2441 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { | 2451 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { |
| 2442 int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width. | 2452 // Use an index that corresponds to the location in the unoptimized frame, |
| 2443 if (spill_index > LUnallocated::kMaxFixedSlotIndex) { | 2453 // which the optimized frame will subsume. |
| 2444 Abort(kTooManySpillSlotsNeededForOSR); | 2454 int env_index = instr->index(); |
| 2445 spill_index = 0; | 2455 int spill_index = 0; |
| 2456 if (instr->environment()->is_parameter_index(env_index)) { |
| 2457 spill_index = chunk()->GetParameterStackSlot(env_index); |
| 2458 } else { |
| 2459 spill_index = env_index - instr->environment()->first_local_index(); |
| 2460 if (spill_index > LUnallocated::kMaxFixedSlotIndex) { |
| 2461 Abort(kTooManySpillSlotsNeededForOSR); |
| 2462 spill_index = 0; |
| 2463 } |
| 2446 } | 2464 } |
| 2447 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); | 2465 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); |
| 2448 } | 2466 } |
| 2449 | 2467 |
| 2450 | 2468 |
| 2451 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) { | 2469 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) { |
| 2452 argument_count_ -= instr->argument_count(); | 2470 argument_count_ -= instr->argument_count(); |
| 2453 return MarkAsCall(DefineFixed(new(zone()) LCallStub, r0), instr); | 2471 return MarkAsCall(DefineFixed(new(zone()) LCallStub, r0), instr); |
| 2454 } | 2472 } |
| 2455 | 2473 |
| (...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2602 | 2620 |
| 2603 | 2621 |
| 2604 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { | 2622 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { |
| 2605 LOperand* object = UseRegister(instr->object()); | 2623 LOperand* object = UseRegister(instr->object()); |
| 2606 LOperand* index = UseRegister(instr->index()); | 2624 LOperand* index = UseRegister(instr->index()); |
| 2607 return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index)); | 2625 return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index)); |
| 2608 } | 2626 } |
| 2609 | 2627 |
| 2610 | 2628 |
| 2611 } } // namespace v8::internal | 2629 } } // namespace v8::internal |
| OLD | NEW |