OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2575 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2586 } | 2586 } |
2587 | 2587 |
2588 *input_offset -= kPointerSize; | 2588 *input_offset -= kPointerSize; |
2589 return true; | 2589 return true; |
2590 } | 2590 } |
2591 | 2591 |
2592 | 2592 |
2593 void Deoptimizer::PatchInterruptCode(Isolate* isolate, | 2593 void Deoptimizer::PatchInterruptCode(Isolate* isolate, |
2594 Code* unoptimized_code) { | 2594 Code* unoptimized_code) { |
2595 DisallowHeapAllocation no_gc; | 2595 DisallowHeapAllocation no_gc; |
2596 // Get the interrupt stub code object to match against. We aren't | |
2597 // prepared to generate it, but we don't expect to have to. | |
2598 Code* interrupt_code = NULL; | |
2599 InterruptStub interrupt_stub; | |
2600 CHECK(interrupt_stub.FindCodeInCache(&interrupt_code, isolate)); | |
2601 Code* replacement_code = | 2596 Code* replacement_code = |
2602 isolate->builtins()->builtin(Builtins::kOnStackReplacement); | 2597 isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
2603 | 2598 |
2604 // Iterate over the back edge table and patch every interrupt | 2599 // Iterate over the back edge table and patch every interrupt |
2605 // call to an unconditional call to the replacement code. | 2600 // call to an unconditional call to the replacement code. |
2606 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); | 2601 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); |
2607 | 2602 |
2608 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code); | 2603 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code); |
2609 !back_edges.Done(); | 2604 !back_edges.Done(); |
2610 back_edges.Next()) { | 2605 back_edges.Next()) { |
(...skipping 10 matching lines...) Expand all Loading... |
2621 unoptimized_code->set_back_edges_patched_for_osr(true); | 2616 unoptimized_code->set_back_edges_patched_for_osr(true); |
2622 #ifdef DEBUG | 2617 #ifdef DEBUG |
2623 Deoptimizer::VerifyInterruptCode( | 2618 Deoptimizer::VerifyInterruptCode( |
2624 isolate, unoptimized_code, loop_nesting_level); | 2619 isolate, unoptimized_code, loop_nesting_level); |
2625 #endif // DEBUG | 2620 #endif // DEBUG |
2626 } | 2621 } |
2627 | 2622 |
2628 | 2623 |
2629 void Deoptimizer::RevertInterruptCode(Isolate* isolate, | 2624 void Deoptimizer::RevertInterruptCode(Isolate* isolate, |
2630 Code* unoptimized_code) { | 2625 Code* unoptimized_code) { |
2631 InterruptStub interrupt_stub; | |
2632 Code* interrupt_code = *interrupt_stub.GetCode(isolate); | |
2633 DisallowHeapAllocation no_gc; | 2626 DisallowHeapAllocation no_gc; |
| 2627 Code* interrupt_code = |
| 2628 isolate->builtins()->builtin(Builtins::kInterruptCheck); |
2634 | 2629 |
2635 // Iterate over the back edge table and revert the patched interrupt calls. | 2630 // Iterate over the back edge table and revert the patched interrupt calls. |
2636 ASSERT(unoptimized_code->back_edges_patched_for_osr()); | 2631 ASSERT(unoptimized_code->back_edges_patched_for_osr()); |
2637 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); | 2632 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); |
2638 | 2633 |
2639 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code); | 2634 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code); |
2640 !back_edges.Done(); | 2635 !back_edges.Done(); |
2641 back_edges.Next()) { | 2636 back_edges.Next()) { |
2642 if (static_cast<int>(back_edges.loop_depth()) <= loop_nesting_level) { | 2637 if (static_cast<int>(back_edges.loop_depth()) <= loop_nesting_level) { |
2643 ASSERT_EQ(PATCHED_FOR_OSR, GetInterruptPatchState(isolate, | 2638 ASSERT_EQ(PATCHED_FOR_OSR, GetInterruptPatchState(isolate, |
(...skipping 688 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3332 | 3327 |
3333 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 3328 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
3334 v->VisitPointer(BitCast<Object**>(&function_)); | 3329 v->VisitPointer(BitCast<Object**>(&function_)); |
3335 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 3330 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
3336 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 3331 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
3337 } | 3332 } |
3338 | 3333 |
3339 #endif // ENABLE_DEBUGGER_SUPPORT | 3334 #endif // ENABLE_DEBUGGER_SUPPORT |
3340 | 3335 |
3341 } } // namespace v8::internal | 3336 } } // namespace v8::internal |
OLD | NEW |