| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2573 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2584 return false; | 2584 return false; |
| 2585 } | 2585 } |
| 2586 } | 2586 } |
| 2587 | 2587 |
| 2588 *input_offset -= kPointerSize; | 2588 *input_offset -= kPointerSize; |
| 2589 return true; | 2589 return true; |
| 2590 } | 2590 } |
| 2591 | 2591 |
| 2592 | 2592 |
| 2593 void Deoptimizer::PatchInterruptCode(Isolate* isolate, | 2593 void Deoptimizer::PatchInterruptCode(Isolate* isolate, |
| 2594 Code* unoptimized_code) { | 2594 Code* unoptimized) { |
| 2595 DisallowHeapAllocation no_gc; | 2595 DisallowHeapAllocation no_gc; |
| 2596 Code* replacement_code = | 2596 Code* replacement_code = |
| 2597 isolate->builtins()->builtin(Builtins::kOnStackReplacement); | 2597 isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
| 2598 | 2598 |
| 2599 // Iterate over the back edge table and patch every interrupt | 2599 // Iterate over the back edge table and patch every interrupt |
| 2600 // call to an unconditional call to the replacement code. | 2600 // call to an unconditional call to the replacement code. |
| 2601 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); | 2601 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); |
| 2602 | 2602 |
| 2603 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code); | 2603 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized, &no_gc); |
| 2604 !back_edges.Done(); | 2604 !back_edges.Done(); |
| 2605 back_edges.Next()) { | 2605 back_edges.Next()) { |
| 2606 if (static_cast<int>(back_edges.loop_depth()) == loop_nesting_level) { | 2606 if (static_cast<int>(back_edges.loop_depth()) == loop_nesting_level) { |
| 2607 ASSERT_EQ(NOT_PATCHED, GetInterruptPatchState(isolate, | 2607 ASSERT_EQ(NOT_PATCHED, GetInterruptPatchState(isolate, |
| 2608 unoptimized_code, | 2608 unoptimized, |
| 2609 back_edges.pc())); | 2609 back_edges.pc())); |
| 2610 PatchInterruptCodeAt(unoptimized_code, | 2610 PatchInterruptCodeAt(unoptimized, |
| 2611 back_edges.pc(), | 2611 back_edges.pc(), |
| 2612 replacement_code); | 2612 replacement_code); |
| 2613 } | 2613 } |
| 2614 } | 2614 } |
| 2615 | 2615 |
| 2616 unoptimized_code->set_back_edges_patched_for_osr(true); | 2616 unoptimized->set_back_edges_patched_for_osr(true); |
| 2617 #ifdef DEBUG | 2617 ASSERT(Deoptimizer::VerifyInterruptCode( |
| 2618 Deoptimizer::VerifyInterruptCode( | 2618 isolate, unoptimized, loop_nesting_level)); |
| 2619 isolate, unoptimized_code, loop_nesting_level); | |
| 2620 #endif // DEBUG | |
| 2621 } | 2619 } |
| 2622 | 2620 |
| 2623 | 2621 |
| 2624 void Deoptimizer::RevertInterruptCode(Isolate* isolate, | 2622 void Deoptimizer::RevertInterruptCode(Isolate* isolate, |
| 2625 Code* unoptimized_code) { | 2623 Code* unoptimized) { |
| 2626 DisallowHeapAllocation no_gc; | 2624 DisallowHeapAllocation no_gc; |
| 2627 Code* interrupt_code = | 2625 Code* interrupt_code = |
| 2628 isolate->builtins()->builtin(Builtins::kInterruptCheck); | 2626 isolate->builtins()->builtin(Builtins::kInterruptCheck); |
| 2629 | 2627 |
| 2630 // Iterate over the back edge table and revert the patched interrupt calls. | 2628 // Iterate over the back edge table and revert the patched interrupt calls. |
| 2631 ASSERT(unoptimized_code->back_edges_patched_for_osr()); | 2629 ASSERT(unoptimized->back_edges_patched_for_osr()); |
| 2632 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); | 2630 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); |
| 2633 | 2631 |
| 2634 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code); | 2632 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized, &no_gc); |
| 2635 !back_edges.Done(); | 2633 !back_edges.Done(); |
| 2636 back_edges.Next()) { | 2634 back_edges.Next()) { |
| 2637 if (static_cast<int>(back_edges.loop_depth()) <= loop_nesting_level) { | 2635 if (static_cast<int>(back_edges.loop_depth()) <= loop_nesting_level) { |
| 2638 ASSERT_EQ(PATCHED_FOR_OSR, GetInterruptPatchState(isolate, | 2636 ASSERT_EQ(PATCHED_FOR_OSR, GetInterruptPatchState(isolate, |
| 2639 unoptimized_code, | 2637 unoptimized, |
| 2640 back_edges.pc())); | 2638 back_edges.pc())); |
| 2641 RevertInterruptCodeAt(unoptimized_code, back_edges.pc(), interrupt_code); | 2639 RevertInterruptCodeAt(unoptimized, back_edges.pc(), interrupt_code); |
| 2642 } | 2640 } |
| 2643 } | 2641 } |
| 2644 | 2642 |
| 2645 unoptimized_code->set_back_edges_patched_for_osr(false); | 2643 unoptimized->set_back_edges_patched_for_osr(false); |
| 2646 unoptimized_code->set_allow_osr_at_loop_nesting_level(0); | 2644 unoptimized->set_allow_osr_at_loop_nesting_level(0); |
| 2647 #ifdef DEBUG | |
| 2648 // Assert that none of the back edges are patched anymore. | 2645 // Assert that none of the back edges are patched anymore. |
| 2649 Deoptimizer::VerifyInterruptCode(isolate, unoptimized_code, -1); | 2646 ASSERT(Deoptimizer::VerifyInterruptCode(isolate, unoptimized, -1)); |
| 2650 #endif // DEBUG | |
| 2651 } | 2647 } |
| 2652 | 2648 |
| 2653 | 2649 |
| 2654 #ifdef DEBUG | 2650 #ifdef DEBUG |
| 2655 void Deoptimizer::VerifyInterruptCode(Isolate* isolate, | 2651 bool Deoptimizer::VerifyInterruptCode(Isolate* isolate, |
| 2656 Code* unoptimized_code, | 2652 Code* unoptimized, |
| 2657 int loop_nesting_level) { | 2653 int loop_nesting_level) { |
| 2658 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code); | 2654 DisallowHeapAllocation no_gc; |
| 2655 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized, &no_gc); |
| 2659 !back_edges.Done(); | 2656 !back_edges.Done(); |
| 2660 back_edges.Next()) { | 2657 back_edges.Next()) { |
| 2661 uint32_t loop_depth = back_edges.loop_depth(); | 2658 uint32_t loop_depth = back_edges.loop_depth(); |
| 2662 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker); | 2659 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker); |
| 2663 // Assert that all back edges for shallower loops (and only those) | 2660 // Assert that all back edges for shallower loops (and only those) |
| 2664 // have already been patched. | 2661 // have already been patched. |
| 2665 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), | 2662 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), |
| 2666 GetInterruptPatchState(isolate, | 2663 GetInterruptPatchState(isolate, |
| 2667 unoptimized_code, | 2664 unoptimized, |
| 2668 back_edges.pc()) != NOT_PATCHED); | 2665 back_edges.pc()) != NOT_PATCHED); |
| 2669 } | 2666 } |
| 2667 return true; |
| 2670 } | 2668 } |
| 2671 #endif // DEBUG | 2669 #endif // DEBUG |
| 2672 | 2670 |
| 2673 | 2671 |
| 2674 unsigned Deoptimizer::ComputeInputFrameSize() const { | 2672 unsigned Deoptimizer::ComputeInputFrameSize() const { |
| 2675 unsigned fixed_size = ComputeFixedSize(function_); | 2673 unsigned fixed_size = ComputeFixedSize(function_); |
| 2676 // The fp-to-sp delta already takes the context and the function | 2674 // The fp-to-sp delta already takes the context and the function |
| 2677 // into account so we have to avoid double counting them (-2). | 2675 // into account so we have to avoid double counting them (-2). |
| 2678 unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize); | 2676 unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize); |
| 2679 #ifdef DEBUG | 2677 #ifdef DEBUG |
| (...skipping 647 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3327 | 3325 |
| 3328 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 3326 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
| 3329 v->VisitPointer(BitCast<Object**>(&function_)); | 3327 v->VisitPointer(BitCast<Object**>(&function_)); |
| 3330 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 3328 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
| 3331 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 3329 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
| 3332 } | 3330 } |
| 3333 | 3331 |
| 3334 #endif // ENABLE_DEBUGGER_SUPPORT | 3332 #endif // ENABLE_DEBUGGER_SUPPORT |
| 3335 | 3333 |
| 3336 } } // namespace v8::internal | 3334 } } // namespace v8::internal |
| OLD | NEW |