| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 622 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 633 HInstruction* hinstr, | 633 HInstruction* hinstr, |
| 634 CanDeoptimize can_deoptimize) { | 634 CanDeoptimize can_deoptimize) { |
| 635 info()->MarkAsNonDeferredCalling(); | 635 info()->MarkAsNonDeferredCalling(); |
| 636 | 636 |
| 637 #ifdef DEBUG | 637 #ifdef DEBUG |
| 638 instr->VerifyCall(); | 638 instr->VerifyCall(); |
| 639 #endif | 639 #endif |
| 640 instr->MarkAsCall(); | 640 instr->MarkAsCall(); |
| 641 instr = AssignPointerMap(instr); | 641 instr = AssignPointerMap(instr); |
| 642 | 642 |
| 643 if (hinstr->HasObservableSideEffects()) { | |
| 644 ASSERT(hinstr->next()->IsSimulate()); | |
| 645 HSimulate* sim = HSimulate::cast(hinstr->next()); | |
| 646 ASSERT(instruction_pending_deoptimization_environment_ == NULL); | |
| 647 ASSERT(pending_deoptimization_ast_id_.IsNone()); | |
| 648 instruction_pending_deoptimization_environment_ = instr; | |
| 649 pending_deoptimization_ast_id_ = sim->ast_id(); | |
| 650 } | |
| 651 | |
| 652 // If instruction does not have side-effects lazy deoptimization | 643 // If instruction does not have side-effects lazy deoptimization |
| 653 // after the call will try to deoptimize to the point before the call. | 644 // after the call will try to deoptimize to the point before the call. |
| 654 // Thus we still need to attach environment to this call even if | 645 // Thus we still need to attach environment to this call even if |
| 655 // call sequence can not deoptimize eagerly. | 646 // call sequence can not deoptimize eagerly. |
| 656 bool needs_environment = | 647 bool needs_environment = |
| 657 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) || | 648 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) || |
| 658 !hinstr->HasObservableSideEffects(); | 649 !hinstr->HasObservableSideEffects(); |
| 659 if (needs_environment && !instr->HasEnvironment()) { | 650 if (needs_environment && !instr->HasEnvironment()) { |
| 660 instr = AssignEnvironment(instr); | 651 instr = AssignEnvironment(instr); |
| 661 } | 652 } |
| (...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 917 } | 908 } |
| 918 #endif | 909 #endif |
| 919 | 910 |
| 920 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) { | 911 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) { |
| 921 instr = AssignPointerMap(instr); | 912 instr = AssignPointerMap(instr); |
| 922 } | 913 } |
| 923 if (FLAG_stress_environments && !instr->HasEnvironment()) { | 914 if (FLAG_stress_environments && !instr->HasEnvironment()) { |
| 924 instr = AssignEnvironment(instr); | 915 instr = AssignEnvironment(instr); |
| 925 } | 916 } |
| 926 chunk_->AddInstruction(instr, current_block_); | 917 chunk_->AddInstruction(instr, current_block_); |
| 918 |
| 919 if (instr->IsCall()) { |
| 920 HValue* hydrogen_value_for_lazy_bailout = current; |
| 921 LInstruction* instruction_needing_environment = NULL; |
| 922 if (current->HasObservableSideEffects()) { |
| 923 HSimulate* sim = HSimulate::cast(current->next()); |
| 924 instruction_needing_environment = instr; |
| 925 sim->ReplayEnvironment(current_block_->last_environment()); |
| 926 hydrogen_value_for_lazy_bailout = sim; |
| 927 } |
| 928 LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout()); |
| 929 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout); |
| 930 chunk_->AddInstruction(bailout, current_block_); |
| 931 if (instruction_needing_environment != NULL) { |
| 932 // Store the lazy deopt environment with the instruction if needed. |
| 933 // Right now it is only used for LInstanceOfKnownGlobal. |
| 934 instruction_needing_environment-> |
| 935 SetDeferredLazyDeoptimizationEnvironment(bailout->environment()); |
| 936 } |
| 937 } |
| 927 } | 938 } |
| 928 current_instruction_ = old_current; | 939 current_instruction_ = old_current; |
| 929 } | 940 } |
| 930 | 941 |
| 931 | 942 |
| 932 LEnvironment* LChunkBuilder::CreateEnvironment( | 943 LEnvironment* LChunkBuilder::CreateEnvironment( |
| 933 HEnvironment* hydrogen_env, | 944 HEnvironment* hydrogen_env, |
| 934 int* argument_index_accumulator, | 945 int* argument_index_accumulator, |
| 935 ZoneList<HValue*>* objects_to_materialize) { | 946 ZoneList<HValue*>* objects_to_materialize) { |
| 936 if (hydrogen_env == NULL) return NULL; | 947 if (hydrogen_env == NULL) return NULL; |
| (...skipping 1585 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2522 | 2533 |
| 2523 | 2534 |
| 2524 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch( | 2535 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch( |
| 2525 HIsConstructCallAndBranch* instr) { | 2536 HIsConstructCallAndBranch* instr) { |
| 2526 return new(zone()) LIsConstructCallAndBranch(TempRegister()); | 2537 return new(zone()) LIsConstructCallAndBranch(TempRegister()); |
| 2527 } | 2538 } |
| 2528 | 2539 |
| 2529 | 2540 |
| 2530 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) { | 2541 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) { |
| 2531 instr->ReplayEnvironment(current_block_->last_environment()); | 2542 instr->ReplayEnvironment(current_block_->last_environment()); |
| 2532 | |
| 2533 // If there is an instruction pending deoptimization environment create a | |
| 2534 // lazy bailout instruction to capture the environment. | |
| 2535 if (pending_deoptimization_ast_id_ == instr->ast_id()) { | |
| 2536 LLazyBailout* lazy_bailout = new(zone()) LLazyBailout; | |
| 2537 LInstruction* result = AssignEnvironment(lazy_bailout); | |
| 2538 // Store the lazy deopt environment with the instruction if needed. Right | |
| 2539 // now it is only used for LInstanceOfKnownGlobal. | |
| 2540 instruction_pending_deoptimization_environment_-> | |
| 2541 SetDeferredLazyDeoptimizationEnvironment(result->environment()); | |
| 2542 instruction_pending_deoptimization_environment_ = NULL; | |
| 2543 pending_deoptimization_ast_id_ = BailoutId::None(); | |
| 2544 return result; | |
| 2545 } | |
| 2546 | |
| 2547 return NULL; | 2543 return NULL; |
| 2548 } | 2544 } |
| 2549 | 2545 |
| 2550 | 2546 |
| 2551 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) { | 2547 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) { |
| 2552 info()->MarkAsDeferredCalling(); | 2548 info()->MarkAsDeferredCalling(); |
| 2553 if (instr->is_function_entry()) { | 2549 if (instr->is_function_entry()) { |
| 2554 LOperand* context = UseFixed(instr->context(), rsi); | 2550 LOperand* context = UseFixed(instr->context(), rsi); |
| 2555 return MarkAsCall(new(zone()) LStackCheck(context), instr); | 2551 return MarkAsCall(new(zone()) LStackCheck(context), instr); |
| 2556 } else { | 2552 } else { |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2626 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { | 2622 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { |
| 2627 LOperand* object = UseRegister(instr->object()); | 2623 LOperand* object = UseRegister(instr->object()); |
| 2628 LOperand* index = UseTempRegister(instr->index()); | 2624 LOperand* index = UseTempRegister(instr->index()); |
| 2629 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); | 2625 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); |
| 2630 } | 2626 } |
| 2631 | 2627 |
| 2632 | 2628 |
| 2633 } } // namespace v8::internal | 2629 } } // namespace v8::internal |
| 2634 | 2630 |
| 2635 #endif // V8_TARGET_ARCH_X64 | 2631 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |