OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
331 } | 331 } |
332 | 332 |
333 // Deferred code is the last part of the instruction sequence. Mark | 333 // Deferred code is the last part of the instruction sequence. Mark |
334 // the generated code as done unless we bailed out. | 334 // the generated code as done unless we bailed out. |
335 if (!is_aborted()) status_ = DONE; | 335 if (!is_aborted()) status_ = DONE; |
336 return !is_aborted(); | 336 return !is_aborted(); |
337 } | 337 } |
338 | 338 |
339 | 339 |
340 bool LCodeGen::GenerateSafepointTable() { | 340 bool LCodeGen::GenerateSafepointTable() { |
341 Abort("Unimplemented: %s", "GeneratePrologue"); | 341 ASSERT(is_done()); |
342 return false; | 342 safepoints_.Emit(masm(), StackSlotCount()); |
| 343 return !is_aborted(); |
343 } | 344 } |
344 | 345 |
345 | 346 |
346 Register LCodeGen::ToRegister(int index) const { | 347 Register LCodeGen::ToRegister(int index) const { |
347 return Register::FromAllocationIndex(index); | 348 return Register::FromAllocationIndex(index); |
348 } | 349 } |
349 | 350 |
350 | 351 |
351 XMMRegister LCodeGen::ToDoubleRegister(int index) const { | 352 XMMRegister LCodeGen::ToDoubleRegister(int index) const { |
352 return XMMRegister::FromAllocationIndex(index); | 353 return XMMRegister::FromAllocationIndex(index); |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
485 translation->StoreLiteral(src_index); | 486 translation->StoreLiteral(src_index); |
486 } else { | 487 } else { |
487 UNREACHABLE(); | 488 UNREACHABLE(); |
488 } | 489 } |
489 } | 490 } |
490 | 491 |
491 | 492 |
492 void LCodeGen::CallCode(Handle<Code> code, | 493 void LCodeGen::CallCode(Handle<Code> code, |
493 RelocInfo::Mode mode, | 494 RelocInfo::Mode mode, |
494 LInstruction* instr) { | 495 LInstruction* instr) { |
495 Abort("Unimplemented: %s", "CallCode"); | 496 if (instr != NULL) { |
| 497 LPointerMap* pointers = instr->pointer_map(); |
| 498 RecordPosition(pointers->position()); |
| 499 __ call(code, mode); |
| 500 RegisterLazyDeoptimization(instr); |
| 501 } else { |
| 502 LPointerMap no_pointers(0); |
| 503 RecordPosition(no_pointers.position()); |
| 504 __ call(code, mode); |
| 505 RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex); |
| 506 } |
| 507 |
| 508 // Signal that we don't inline smi code before these stubs in the |
| 509 // optimizing code generator. |
| 510 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || |
| 511 code->kind() == Code::COMPARE_IC) { |
| 512 __ nop(); |
| 513 } |
496 } | 514 } |
497 | 515 |
498 | 516 |
499 void LCodeGen::CallRuntime(Runtime::Function* function, | 517 void LCodeGen::CallRuntime(Runtime::Function* function, |
500 int num_arguments, | 518 int num_arguments, |
501 LInstruction* instr) { | 519 LInstruction* instr) { |
502 Abort("Unimplemented: %s", "CallRuntime"); | 520 Abort("Unimplemented: %s", "CallRuntime"); |
503 } | 521 } |
504 | 522 |
505 | 523 |
506 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 524 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { |
507 // Create the environment to bailout to. If the call has side effects | 525 // Create the environment to bailout to. If the call has side effects |
508 // execution has to continue after the call otherwise execution can continue | 526 // execution has to continue after the call otherwise execution can continue |
509 // from a previous bailout point repeating the call. | 527 // from a previous bailout point repeating the call. |
510 LEnvironment* deoptimization_environment; | 528 LEnvironment* deoptimization_environment; |
511 if (instr->HasDeoptimizationEnvironment()) { | 529 if (instr->HasDeoptimizationEnvironment()) { |
512 deoptimization_environment = instr->deoptimization_environment(); | 530 deoptimization_environment = instr->deoptimization_environment(); |
513 } else { | 531 } else { |
514 deoptimization_environment = instr->environment(); | 532 deoptimization_environment = instr->environment(); |
515 } | 533 } |
516 | 534 |
517 RegisterEnvironmentForDeoptimization(deoptimization_environment); | 535 RegisterEnvironmentForDeoptimization(deoptimization_environment); |
518 RecordSafepoint(instr->pointer_map(), | 536 RecordSafepoint(instr->pointer_map(), |
519 deoptimization_environment->deoptimization_index()); | 537 deoptimization_environment->deoptimization_index()); |
520 } | 538 } |
521 | 539 |
522 | 540 |
523 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { | 541 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { |
524 Abort("Unimplemented: %s", "RegisterEnvironmentForDeoptimization"); | 542 if (!environment->HasBeenRegistered()) { |
| 543 // Physical stack frame layout: |
| 544 // -x ............. -4 0 ..................................... y |
| 545 // [incoming arguments] [spill slots] [pushed outgoing arguments] |
| 546 |
| 547 // Layout of the environment: |
| 548 // 0 ..................................................... size-1 |
| 549 // [parameters] [locals] [expression stack including arguments] |
| 550 |
| 551 // Layout of the translation: |
| 552 // 0 ........................................................ size - 1 + 4 |
| 553 // [expression stack including arguments] [locals] [4 words] [parameters] |
| 554 // |>------------ translation_size ------------<| |
| 555 |
| 556 int frame_count = 0; |
| 557 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { |
| 558 ++frame_count; |
| 559 } |
| 560 Translation translation(&translations_, frame_count); |
| 561 WriteTranslation(environment, &translation); |
| 562 int deoptimization_index = deoptimizations_.length(); |
| 563 environment->Register(deoptimization_index, translation.index()); |
| 564 deoptimizations_.Add(environment); |
| 565 } |
525 } | 566 } |
526 | 567 |
527 | 568 |
528 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { | 569 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { |
529 Abort("Unimplemented: %s", "Deoptimiz"); | 570 Abort("Unimplemented: %s", "Deoptimiz"); |
530 } | 571 } |
531 | 572 |
532 | 573 |
533 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 574 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
534 int length = deoptimizations_.length(); | 575 int length = deoptimizations_.length(); |
(...skipping 317 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
852 Abort("Unimplemented: %s", "EmitBranch"); | 893 Abort("Unimplemented: %s", "EmitBranch"); |
853 } | 894 } |
854 | 895 |
855 | 896 |
856 void LCodeGen::DoBranch(LBranch* instr) { | 897 void LCodeGen::DoBranch(LBranch* instr) { |
857 Abort("Unimplemented: %s", "DoBranch"); | 898 Abort("Unimplemented: %s", "DoBranch"); |
858 } | 899 } |
859 | 900 |
860 | 901 |
861 void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) { | 902 void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) { |
862 Abort("Unimplemented: %s", "EmitGoto"); | 903 block = chunk_->LookupDestination(block); |
| 904 int next_block = GetNextEmittedBlock(current_block_); |
| 905 if (block != next_block) { |
| 906 // Perform stack overflow check if this goto needs it before jumping. |
| 907 if (deferred_stack_check != NULL) { |
| 908 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 909 __ j(above_equal, chunk_->GetAssemblyLabel(block)); |
| 910 __ jmp(deferred_stack_check->entry()); |
| 911 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); |
| 912 } else { |
| 913 __ jmp(chunk_->GetAssemblyLabel(block)); |
| 914 } |
| 915 } |
863 } | 916 } |
864 | 917 |
865 | 918 |
866 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { | 919 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { |
867 Abort("Unimplemented: %s", "DoDeferredStackCheck"); | 920 Abort("Unimplemented: %s", "DoDeferredStackCheck"); |
868 } | 921 } |
869 | 922 |
870 | 923 |
871 void LCodeGen::DoGoto(LGoto* instr) { | 924 void LCodeGen::DoGoto(LGoto* instr) { |
872 class DeferredStackCheck: public LDeferredCode { | 925 class DeferredStackCheck: public LDeferredCode { |
(...skipping 593 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1466 | 1519 |
1467 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 1520 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
1468 Abort("Unimplemented: %s", "DoOsrEntry"); | 1521 Abort("Unimplemented: %s", "DoOsrEntry"); |
1469 } | 1522 } |
1470 | 1523 |
1471 #undef __ | 1524 #undef __ |
1472 | 1525 |
1473 } } // namespace v8::internal | 1526 } } // namespace v8::internal |
1474 | 1527 |
1475 #endif // V8_TARGET_ARCH_X64 | 1528 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |