Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(431)

Side by Side Diff: src/deoptimizer.cc

Issue 23608004: Refactor interrupt check patching for OSR. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: addressed comments Created 7 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/deoptimizer.h ('k') | src/ia32/deoptimizer-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2559 matching lines...) Expand 10 before | Expand all | Expand 10 after
2570 UNREACHABLE(); 2570 UNREACHABLE();
2571 return false; 2571 return false;
2572 } 2572 }
2573 } 2573 }
2574 2574
2575 *input_offset -= kPointerSize; 2575 *input_offset -= kPointerSize;
2576 return true; 2576 return true;
2577 } 2577 }
2578 2578
2579 2579
2580 void Deoptimizer::PatchInterruptCode(Code* unoptimized_code, 2580 void Deoptimizer::PatchInterruptCode(Isolate* isolate,
2581 Code* interrupt_code, 2581 Code* unoptimized_code) {
2582 Code* replacement_code) { 2582 DisallowHeapAllocation no_gc;
2583 // Get the interrupt stub code object to match against. We aren't
2584 // prepared to generate it, but we don't expect to have to.
2585 Code* interrupt_code = NULL;
2586 InterruptStub interrupt_stub;
2587 CHECK(interrupt_stub.FindCodeInCache(&interrupt_code, isolate));
2588 Code* replacement_code =
2589 isolate->builtins()->builtin(Builtins::kOnStackReplacement);
2590
2583 // Iterate over the back edge table and patch every interrupt 2591 // Iterate over the back edge table and patch every interrupt
2584 // call to an unconditional call to the replacement code. 2592 // call to an unconditional call to the replacement code.
2585 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); 2593 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level();
2586 2594
2587 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code); 2595 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
2588 !back_edges.Done(); 2596 !back_edges.Done();
2589 back_edges.Next()) { 2597 back_edges.Next()) {
2590 if (static_cast<int>(back_edges.loop_depth()) == loop_nesting_level) { 2598 if (static_cast<int>(back_edges.loop_depth()) == loop_nesting_level) {
2599 ASSERT_EQ(NOT_PATCHED, GetInterruptPatchState(isolate,
2600 unoptimized_code,
2601 back_edges.pc()));
2591 PatchInterruptCodeAt(unoptimized_code, 2602 PatchInterruptCodeAt(unoptimized_code,
2592 back_edges.pc(), 2603 back_edges.pc(),
2593 interrupt_code,
2594 replacement_code); 2604 replacement_code);
2595 } 2605 }
2596 } 2606 }
2597 2607
2598 unoptimized_code->set_back_edges_patched_for_osr(true); 2608 unoptimized_code->set_back_edges_patched_for_osr(true);
2599 #ifdef DEBUG 2609 #ifdef DEBUG
2600 Deoptimizer::VerifyInterruptCode( 2610 Deoptimizer::VerifyInterruptCode(
2601 unoptimized_code, interrupt_code, replacement_code, loop_nesting_level); 2611 isolate, unoptimized_code, loop_nesting_level);
2602 #endif // DEBUG 2612 #endif // DEBUG
2603 } 2613 }
2604 2614
2605 2615
2606 void Deoptimizer::RevertInterruptCode(Code* unoptimized_code, 2616 void Deoptimizer::RevertInterruptCode(Isolate* isolate,
2607 Code* interrupt_code, 2617 Code* unoptimized_code) {
2608 Code* replacement_code) { 2618 InterruptStub interrupt_stub;
2619 Code* interrupt_code = *interrupt_stub.GetCode(isolate);
2620 DisallowHeapAllocation no_gc;
2621
2609 // Iterate over the back edge table and revert the patched interrupt calls. 2622 // Iterate over the back edge table and revert the patched interrupt calls.
2610 ASSERT(unoptimized_code->back_edges_patched_for_osr()); 2623 ASSERT(unoptimized_code->back_edges_patched_for_osr());
2611 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); 2624 int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level();
2612 2625
2613 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code); 2626 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
2614 !back_edges.Done(); 2627 !back_edges.Done();
2615 back_edges.Next()) { 2628 back_edges.Next()) {
2616 if (static_cast<int>(back_edges.loop_depth()) <= loop_nesting_level) { 2629 if (static_cast<int>(back_edges.loop_depth()) <= loop_nesting_level) {
2617 RevertInterruptCodeAt(unoptimized_code, 2630 ASSERT_EQ(PATCHED_FOR_OSR, GetInterruptPatchState(isolate,
2618 back_edges.pc(), 2631 unoptimized_code,
2619 interrupt_code, 2632 back_edges.pc()));
2620 replacement_code); 2633 RevertInterruptCodeAt(unoptimized_code, back_edges.pc(), interrupt_code);
2621 } 2634 }
2622 } 2635 }
2623 2636
2624 unoptimized_code->set_back_edges_patched_for_osr(false); 2637 unoptimized_code->set_back_edges_patched_for_osr(false);
2625 unoptimized_code->set_allow_osr_at_loop_nesting_level(0); 2638 unoptimized_code->set_allow_osr_at_loop_nesting_level(0);
2626 #ifdef DEBUG 2639 #ifdef DEBUG
2627 // Assert that none of the back edges are patched anymore. 2640 // Assert that none of the back edges are patched anymore.
2628 Deoptimizer::VerifyInterruptCode( 2641 Deoptimizer::VerifyInterruptCode(isolate, unoptimized_code, -1);
2629 unoptimized_code, interrupt_code, replacement_code, -1);
2630 #endif // DEBUG 2642 #endif // DEBUG
2631 } 2643 }
2632 2644
2633 2645
2634 #ifdef DEBUG 2646 #ifdef DEBUG
2635 void Deoptimizer::VerifyInterruptCode(Code* unoptimized_code, 2647 void Deoptimizer::VerifyInterruptCode(Isolate* isolate,
2636 Code* interrupt_code, 2648 Code* unoptimized_code,
2637 Code* replacement_code,
2638 int loop_nesting_level) { 2649 int loop_nesting_level) {
2639 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code); 2650 for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
2640 !back_edges.Done(); 2651 !back_edges.Done();
2641 back_edges.Next()) { 2652 back_edges.Next()) {
2642 uint32_t loop_depth = back_edges.loop_depth(); 2653 uint32_t loop_depth = back_edges.loop_depth();
2643 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker); 2654 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
2644 // Assert that all back edges for shallower loops (and only those) 2655 // Assert that all back edges for shallower loops (and only those)
2645 // have already been patched. 2656 // have already been patched.
2646 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), 2657 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
2647 InterruptCodeIsPatched(unoptimized_code, 2658 GetInterruptPatchState(isolate,
2648 back_edges.pc(), 2659 unoptimized_code,
2649 interrupt_code, 2660 back_edges.pc()) != NOT_PATCHED);
2650 replacement_code));
2651 } 2661 }
2652 } 2662 }
2653 #endif // DEBUG 2663 #endif // DEBUG
2654 2664
2655 2665
2656 unsigned Deoptimizer::ComputeInputFrameSize() const { 2666 unsigned Deoptimizer::ComputeInputFrameSize() const {
2657 unsigned fixed_size = ComputeFixedSize(function_); 2667 unsigned fixed_size = ComputeFixedSize(function_);
2658 // The fp-to-sp delta already takes the context and the function 2668 // The fp-to-sp delta already takes the context and the function
2659 // into account so we have to avoid double counting them (-2). 2669 // into account so we have to avoid double counting them (-2).
2660 unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize); 2670 unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize);
(...skipping 648 matching lines...) Expand 10 before | Expand all | Expand 10 after
3309 3319
3310 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { 3320 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) {
3311 v->VisitPointer(BitCast<Object**>(&function_)); 3321 v->VisitPointer(BitCast<Object**>(&function_));
3312 v->VisitPointers(parameters_, parameters_ + parameters_count_); 3322 v->VisitPointers(parameters_, parameters_ + parameters_count_);
3313 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); 3323 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_);
3314 } 3324 }
3315 3325
3316 #endif // ENABLE_DEBUGGER_SUPPORT 3326 #endif // ENABLE_DEBUGGER_SUPPORT
3317 3327
3318 } } // namespace v8::internal 3328 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/deoptimizer.h ('k') | src/ia32/deoptimizer-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698