| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 618 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 629 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, | 629 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, |
| 630 HInstruction* hinstr, | 630 HInstruction* hinstr, |
| 631 CanDeoptimize can_deoptimize) { | 631 CanDeoptimize can_deoptimize) { |
| 632 info()->MarkAsNonDeferredCalling(); | 632 info()->MarkAsNonDeferredCalling(); |
| 633 #ifdef DEBUG | 633 #ifdef DEBUG |
| 634 instr->VerifyCall(); | 634 instr->VerifyCall(); |
| 635 #endif | 635 #endif |
| 636 instr->MarkAsCall(); | 636 instr->MarkAsCall(); |
| 637 instr = AssignPointerMap(instr); | 637 instr = AssignPointerMap(instr); |
| 638 | 638 |
| 639 if (hinstr->HasObservableSideEffects()) { | |
| 640 ASSERT(hinstr->next()->IsSimulate()); | |
| 641 HSimulate* sim = HSimulate::cast(hinstr->next()); | |
| 642 ASSERT(instruction_pending_deoptimization_environment_ == NULL); | |
| 643 ASSERT(pending_deoptimization_ast_id_.IsNone()); | |
| 644 instruction_pending_deoptimization_environment_ = instr; | |
| 645 pending_deoptimization_ast_id_ = sim->ast_id(); | |
| 646 } | |
| 647 | |
| 648 // If instruction does not have side-effects lazy deoptimization | 639 // If instruction does not have side-effects lazy deoptimization |
| 649 // after the call will try to deoptimize to the point before the call. | 640 // after the call will try to deoptimize to the point before the call. |
| 650 // Thus we still need to attach environment to this call even if | 641 // Thus we still need to attach environment to this call even if |
| 651 // call sequence can not deoptimize eagerly. | 642 // call sequence can not deoptimize eagerly. |
| 652 bool needs_environment = | 643 bool needs_environment = |
| 653 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) || | 644 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) || |
| 654 !hinstr->HasObservableSideEffects(); | 645 !hinstr->HasObservableSideEffects(); |
| 655 if (needs_environment && !instr->HasEnvironment()) { | 646 if (needs_environment && !instr->HasEnvironment()) { |
| 656 instr = AssignEnvironment(instr); | 647 instr = AssignEnvironment(instr); |
| 657 } | 648 } |
| (...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 922 } | 913 } |
| 923 #endif | 914 #endif |
| 924 | 915 |
| 925 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) { | 916 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) { |
| 926 instr = AssignPointerMap(instr); | 917 instr = AssignPointerMap(instr); |
| 927 } | 918 } |
| 928 if (FLAG_stress_environments && !instr->HasEnvironment()) { | 919 if (FLAG_stress_environments && !instr->HasEnvironment()) { |
| 929 instr = AssignEnvironment(instr); | 920 instr = AssignEnvironment(instr); |
| 930 } | 921 } |
| 931 chunk_->AddInstruction(instr, current_block_); | 922 chunk_->AddInstruction(instr, current_block_); |
| 923 |
| 924 if (instr->IsCall()) { |
| 925 HValue* hydrogen_value_for_lazy_bailout = current; |
| 926 LInstruction* instruction_needing_environment = NULL; |
| 927 if (current->HasObservableSideEffects()) { |
| 928 HSimulate* sim = HSimulate::cast(current->next()); |
| 929 instruction_needing_environment = instr; |
| 930 sim->ReplayEnvironment(current_block_->last_environment()); |
| 931 hydrogen_value_for_lazy_bailout = sim; |
| 932 } |
| 933 LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout()); |
| 934 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout); |
| 935 chunk_->AddInstruction(bailout, current_block_); |
| 936 if (instruction_needing_environment != NULL) { |
| 937 // Store the lazy deopt environment with the instruction if needed. |
| 938 // Right now it is only used for LInstanceOfKnownGlobal. |
| 939 instruction_needing_environment-> |
| 940 SetDeferredLazyDeoptimizationEnvironment(bailout->environment()); |
| 941 } |
| 942 } |
| 932 } | 943 } |
| 933 current_instruction_ = old_current; | 944 current_instruction_ = old_current; |
| 934 } | 945 } |
| 935 | 946 |
| 936 | 947 |
| 937 LEnvironment* LChunkBuilder::CreateEnvironment( | 948 LEnvironment* LChunkBuilder::CreateEnvironment( |
| 938 HEnvironment* hydrogen_env, | 949 HEnvironment* hydrogen_env, |
| 939 int* argument_index_accumulator, | 950 int* argument_index_accumulator, |
| 940 ZoneList<HValue*>* objects_to_materialize) { | 951 ZoneList<HValue*>* objects_to_materialize) { |
| 941 if (hydrogen_env == NULL) return NULL; | 952 if (hydrogen_env == NULL) return NULL; |
| (...skipping 1556 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2498 | 2509 |
| 2499 | 2510 |
| 2500 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch( | 2511 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch( |
| 2501 HIsConstructCallAndBranch* instr) { | 2512 HIsConstructCallAndBranch* instr) { |
| 2502 return new(zone()) LIsConstructCallAndBranch(TempRegister()); | 2513 return new(zone()) LIsConstructCallAndBranch(TempRegister()); |
| 2503 } | 2514 } |
| 2504 | 2515 |
| 2505 | 2516 |
| 2506 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) { | 2517 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) { |
| 2507 instr->ReplayEnvironment(current_block_->last_environment()); | 2518 instr->ReplayEnvironment(current_block_->last_environment()); |
| 2508 | |
| 2509 // If there is an instruction pending deoptimization environment create a | |
| 2510 // lazy bailout instruction to capture the environment. | |
| 2511 if (pending_deoptimization_ast_id_ == instr->ast_id()) { | |
| 2512 LInstruction* result = new(zone()) LLazyBailout; | |
| 2513 result = AssignEnvironment(result); | |
| 2514 // Store the lazy deopt environment with the instruction if needed. Right | |
| 2515 // now it is only used for LInstanceOfKnownGlobal. | |
| 2516 instruction_pending_deoptimization_environment_-> | |
| 2517 SetDeferredLazyDeoptimizationEnvironment(result->environment()); | |
| 2518 instruction_pending_deoptimization_environment_ = NULL; | |
| 2519 pending_deoptimization_ast_id_ = BailoutId::None(); | |
| 2520 return result; | |
| 2521 } | |
| 2522 | |
| 2523 return NULL; | 2519 return NULL; |
| 2524 } | 2520 } |
| 2525 | 2521 |
| 2526 | 2522 |
| 2527 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) { | 2523 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) { |
| 2528 if (instr->is_function_entry()) { | 2524 if (instr->is_function_entry()) { |
| 2529 LOperand* context = UseFixed(instr->context(), cp); | 2525 LOperand* context = UseFixed(instr->context(), cp); |
| 2530 return MarkAsCall(new(zone()) LStackCheck(context), instr); | 2526 return MarkAsCall(new(zone()) LStackCheck(context), instr); |
| 2531 } else { | 2527 } else { |
| 2532 ASSERT(instr->is_backwards_branch()); | 2528 ASSERT(instr->is_backwards_branch()); |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2598 | 2594 |
| 2599 | 2595 |
| 2600 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { | 2596 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { |
| 2601 LOperand* object = UseRegister(instr->object()); | 2597 LOperand* object = UseRegister(instr->object()); |
| 2602 LOperand* index = UseRegister(instr->index()); | 2598 LOperand* index = UseRegister(instr->index()); |
| 2603 return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index)); | 2599 return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index)); |
| 2604 } | 2600 } |
| 2605 | 2601 |
| 2606 | 2602 |
| 2607 } } // namespace v8::internal | 2603 } } // namespace v8::internal |
| OLD | NEW |