| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 14 matching lines...) Expand all Loading... |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "v8.h" | 28 #include "v8.h" |
| 29 | 29 |
| 30 #if V8_TARGET_ARCH_X64 | 30 #if V8_TARGET_ARCH_X64 |
| 31 | 31 |
| 32 #include "lithium-allocator-inl.h" | 32 #include "lithium-allocator-inl.h" |
| 33 #include "x64/lithium-x64.h" | 33 #include "x64/lithium-x64.h" |
| 34 #include "x64/lithium-codegen-x64.h" | 34 #include "x64/lithium-codegen-x64.h" |
| 35 #include "hydrogen-osr.h" |
| 35 | 36 |
| 36 namespace v8 { | 37 namespace v8 { |
| 37 namespace internal { | 38 namespace internal { |
| 38 | 39 |
| 39 #define DEFINE_COMPILE(type) \ | 40 #define DEFINE_COMPILE(type) \ |
| 40 void L##type::CompileToNative(LCodeGen* generator) { \ | 41 void L##type::CompileToNative(LCodeGen* generator) { \ |
| 41 generator->Do##type(this); \ | 42 generator->Do##type(this); \ |
| 42 } | 43 } |
| 43 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) | 44 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) |
| 44 #undef DEFINE_COMPILE | 45 #undef DEFINE_COMPILE |
| (...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 432 object()->PrintTo(stream); | 433 object()->PrintTo(stream); |
| 433 stream->Add(" %p -> %p", *original_map(), *transitioned_map()); | 434 stream->Add(" %p -> %p", *original_map(), *transitioned_map()); |
| 434 } | 435 } |
| 435 | 436 |
| 436 | 437 |
| 437 LPlatformChunk* LChunkBuilder::Build() { | 438 LPlatformChunk* LChunkBuilder::Build() { |
| 438 ASSERT(is_unused()); | 439 ASSERT(is_unused()); |
| 439 chunk_ = new(zone()) LPlatformChunk(info(), graph()); | 440 chunk_ = new(zone()) LPlatformChunk(info(), graph()); |
| 440 LPhase phase("L_Building chunk", chunk_); | 441 LPhase phase("L_Building chunk", chunk_); |
| 441 status_ = BUILDING; | 442 status_ = BUILDING; |
| 443 |
| 444 // If compiling for OSR, reserve space for the unoptimized frame, |
| 445 // which will be subsumed into this frame. |
| 446 if (graph()->has_osr()) { |
| 447 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) { |
| 448 chunk_->GetNextSpillIndex(false); |
| 449 } |
| 450 } |
| 451 |
| 442 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); | 452 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); |
| 443 for (int i = 0; i < blocks->length(); i++) { | 453 for (int i = 0; i < blocks->length(); i++) { |
| 444 HBasicBlock* next = NULL; | 454 HBasicBlock* next = NULL; |
| 445 if (i < blocks->length() - 1) next = blocks->at(i + 1); | 455 if (i < blocks->length() - 1) next = blocks->at(i + 1); |
| 446 DoBasicBlock(blocks->at(i), next); | 456 DoBasicBlock(blocks->at(i), next); |
| 447 if (is_aborted()) return NULL; | 457 if (is_aborted()) return NULL; |
| 448 } | 458 } |
| 449 status_ = DONE; | 459 status_ = DONE; |
| 450 return chunk_; | 460 return chunk_; |
| 451 } | 461 } |
| (...skipping 1900 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2352 CodeStubInterfaceDescriptor* descriptor = | 2362 CodeStubInterfaceDescriptor* descriptor = |
| 2353 info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); | 2363 info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); |
| 2354 int index = static_cast<int>(instr->index()); | 2364 int index = static_cast<int>(instr->index()); |
| 2355 Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index); | 2365 Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index); |
| 2356 return DefineFixed(result, reg); | 2366 return DefineFixed(result, reg); |
| 2357 } | 2367 } |
| 2358 } | 2368 } |
| 2359 | 2369 |
| 2360 | 2370 |
| 2361 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { | 2371 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { |
| 2362 int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width. | 2372 // Use an index that corresponds to the location in the unoptimized frame, |
| 2363 if (spill_index > LUnallocated::kMaxFixedSlotIndex) { | 2373 // which the optimized frame will subsume. |
| 2364 Abort(kTooManySpillSlotsNeededForOSR); | 2374 int env_index = instr->index(); |
| 2365 spill_index = 0; | 2375 int spill_index = 0; |
| 2376 if (instr->environment()->is_parameter_index(env_index)) { |
| 2377 spill_index = chunk()->GetParameterStackSlot(env_index); |
| 2378 } else { |
| 2379 spill_index = env_index - instr->environment()->first_local_index(); |
| 2380 if (spill_index > LUnallocated::kMaxFixedSlotIndex) { |
| 2381 Abort(kTooManySpillSlotsNeededForOSR); |
| 2382 spill_index = 0; |
| 2383 } |
| 2366 } | 2384 } |
| 2367 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); | 2385 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); |
| 2368 } | 2386 } |
| 2369 | 2387 |
| 2370 | 2388 |
| 2371 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) { | 2389 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) { |
| 2372 argument_count_ -= instr->argument_count(); | 2390 argument_count_ -= instr->argument_count(); |
| 2373 return MarkAsCall(DefineFixed(new(zone()) LCallStub, rax), instr); | 2391 return MarkAsCall(DefineFixed(new(zone()) LCallStub, rax), instr); |
| 2374 } | 2392 } |
| 2375 | 2393 |
| (...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2526 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { | 2544 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { |
| 2527 LOperand* object = UseRegister(instr->object()); | 2545 LOperand* object = UseRegister(instr->object()); |
| 2528 LOperand* index = UseTempRegister(instr->index()); | 2546 LOperand* index = UseTempRegister(instr->index()); |
| 2529 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); | 2547 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); |
| 2530 } | 2548 } |
| 2531 | 2549 |
| 2532 | 2550 |
| 2533 } } // namespace v8::internal | 2551 } } // namespace v8::internal |
| 2534 | 2552 |
| 2535 #endif // V8_TARGET_ARCH_X64 | 2553 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |