Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(298)

Side by Side Diff: src/full-codegen.cc

Issue 346223007: Do not eagerly update allow_osr_at_loop_nesting_level. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: addressed comments Created 6 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/full-codegen.h ('k') | src/globals.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/compiler.h" 8 #include "src/compiler.h"
9 #include "src/debug.h" 9 #include "src/debug.h"
10 #include "src/full-codegen.h" 10 #include "src/full-codegen.h"
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after
321 !info->function()->dont_optimize() && 321 !info->function()->dont_optimize() &&
322 info->function()->scope()->AllowsLazyCompilation()); 322 info->function()->scope()->AllowsLazyCompilation());
323 cgen.PopulateDeoptimizationData(code); 323 cgen.PopulateDeoptimizationData(code);
324 cgen.PopulateTypeFeedbackInfo(code); 324 cgen.PopulateTypeFeedbackInfo(code);
325 code->set_has_deoptimization_support(info->HasDeoptimizationSupport()); 325 code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
326 code->set_handler_table(*cgen.handler_table()); 326 code->set_handler_table(*cgen.handler_table());
327 code->set_compiled_optimizable(info->IsOptimizable()); 327 code->set_compiled_optimizable(info->IsOptimizable());
328 code->set_allow_osr_at_loop_nesting_level(0); 328 code->set_allow_osr_at_loop_nesting_level(0);
329 code->set_profiler_ticks(0); 329 code->set_profiler_ticks(0);
330 code->set_back_edge_table_offset(table_offset); 330 code->set_back_edge_table_offset(table_offset);
331 code->set_back_edges_patched_for_osr(false);
332 CodeGenerator::PrintCode(code, info); 331 CodeGenerator::PrintCode(code, info);
333 info->SetCode(code); 332 info->SetCode(code);
334 #ifdef ENABLE_GDB_JIT_INTERFACE 333 #ifdef ENABLE_GDB_JIT_INTERFACE
335 if (FLAG_gdbjit) { 334 if (FLAG_gdbjit) {
336 GDBJITLineInfo* lineinfo = 335 GDBJITLineInfo* lineinfo =
337 masm.positions_recorder()->DetachGDBJITLineInfo(); 336 masm.positions_recorder()->DetachGDBJITLineInfo();
338 GDBJIT(RegisterDetailedLineInfo(*code, lineinfo)); 337 GDBJIT(RegisterDetailedLineInfo(*code, lineinfo));
339 } 338 }
340 #endif 339 #endif
341 void* line_info = masm.positions_recorder()->DetachJITHandlerData(); 340 void* line_info = masm.positions_recorder()->DetachJITHandlerData();
342 LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info)); 341 LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info));
343 return true; 342 return true;
344 } 343 }
345 344
346 345
347 unsigned FullCodeGenerator::EmitBackEdgeTable() { 346 unsigned FullCodeGenerator::EmitBackEdgeTable() {
348 // The back edge table consists of a length (in number of entries) 347 // The back edge table consists of a length (in number of entries)
349 // field, and then a sequence of entries. Each entry is a pair of AST id 348 // field, and then a sequence of entries. Each entry is a pair of AST id
350 // and code-relative pc offset. 349 // and code-relative pc offset.
351 masm()->Align(kIntSize); 350 masm()->Align(kPointerSize);
352 unsigned offset = masm()->pc_offset(); 351 unsigned offset = masm()->pc_offset();
353 unsigned length = back_edges_.length(); 352 unsigned length = back_edges_.length();
354 __ dd(length); 353 __ dd(length);
355 for (unsigned i = 0; i < length; ++i) { 354 for (unsigned i = 0; i < length; ++i) {
356 __ dd(back_edges_[i].id.ToInt()); 355 __ dd(back_edges_[i].id.ToInt());
357 __ dd(back_edges_[i].pc); 356 __ dd(back_edges_[i].pc);
358 __ dd(back_edges_[i].loop_depth); 357 __ dd(back_edges_[i].loop_depth);
359 } 358 }
360 return offset; 359 return offset;
361 } 360 }
(...skipping 1248 matching lines...) Expand 10 before | Expand all | Expand 10 after
1610 } 1609 }
1611 1610
1612 return false; 1611 return false;
1613 } 1612 }
1614 1613
1615 1614
1616 void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) { 1615 void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
1617 DisallowHeapAllocation no_gc; 1616 DisallowHeapAllocation no_gc;
1618 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement); 1617 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1619 1618
1620 // Iterate over the back edge table and patch every interrupt 1619 // Increment loop nesting level by one and iterate over the back edge table
1620 // to find the matching loops to patch the interrupt
1621 // call to an unconditional call to the replacement code. 1621 // call to an unconditional call to the replacement code.
1622 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); 1622 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level() + 1;
1623 if (loop_nesting_level > Code::kMaxLoopNestingMarker) return;
1623 1624
1624 BackEdgeTable back_edges(unoptimized, &no_gc); 1625 BackEdgeTable back_edges(unoptimized, &no_gc);
1625 for (uint32_t i = 0; i < back_edges.length(); i++) { 1626 for (uint32_t i = 0; i < back_edges.length(); i++) {
1626 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) { 1627 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
1627 ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate, 1628 ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate,
1628 unoptimized, 1629 unoptimized,
1629 back_edges.pc(i))); 1630 back_edges.pc(i)));
1630 PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch); 1631 PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
1631 } 1632 }
1632 } 1633 }
1633 1634
1634 unoptimized->set_back_edges_patched_for_osr(true); 1635 unoptimized->set_allow_osr_at_loop_nesting_level(loop_nesting_level);
1635 ASSERT(Verify(isolate, unoptimized, loop_nesting_level)); 1636 ASSERT(Verify(isolate, unoptimized));
1636 } 1637 }
1637 1638
1638 1639
1639 void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) { 1640 void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
1640 DisallowHeapAllocation no_gc; 1641 DisallowHeapAllocation no_gc;
1641 Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck); 1642 Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
1642 1643
1643 // Iterate over the back edge table and revert the patched interrupt calls. 1644 // Iterate over the back edge table and revert the patched interrupt calls.
1644 ASSERT(unoptimized->back_edges_patched_for_osr());
1645 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); 1645 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1646 1646
1647 BackEdgeTable back_edges(unoptimized, &no_gc); 1647 BackEdgeTable back_edges(unoptimized, &no_gc);
1648 for (uint32_t i = 0; i < back_edges.length(); i++) { 1648 for (uint32_t i = 0; i < back_edges.length(); i++) {
1649 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) { 1649 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
1650 ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate, 1650 ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate,
1651 unoptimized, 1651 unoptimized,
1652 back_edges.pc(i))); 1652 back_edges.pc(i)));
1653 PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch); 1653 PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
1654 } 1654 }
1655 } 1655 }
1656 1656
1657 unoptimized->set_back_edges_patched_for_osr(false);
1658 unoptimized->set_allow_osr_at_loop_nesting_level(0); 1657 unoptimized->set_allow_osr_at_loop_nesting_level(0);
1659 // Assert that none of the back edges are patched anymore. 1658 // Assert that none of the back edges are patched anymore.
1660 ASSERT(Verify(isolate, unoptimized, -1)); 1659 ASSERT(Verify(isolate, unoptimized));
1661 } 1660 }
1662 1661
1663 1662
1664 void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) { 1663 void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) {
1665 DisallowHeapAllocation no_gc; 1664 DisallowHeapAllocation no_gc;
1666 Isolate* isolate = code->GetIsolate(); 1665 Isolate* isolate = code->GetIsolate();
1667 Address pc = code->instruction_start() + pc_offset; 1666 Address pc = code->instruction_start() + pc_offset;
1668 Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck); 1667 Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
1669 PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch); 1668 PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch);
1670 } 1669 }
1671 1670
1672 1671
1673 void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) { 1672 void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) {
1674 DisallowHeapAllocation no_gc; 1673 DisallowHeapAllocation no_gc;
1675 Isolate* isolate = code->GetIsolate(); 1674 Isolate* isolate = code->GetIsolate();
1676 Address pc = code->instruction_start() + pc_offset; 1675 Address pc = code->instruction_start() + pc_offset;
1677 1676
1678 if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) { 1677 if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) {
1679 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement); 1678 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1680 PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch); 1679 PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch);
1681 } 1680 }
1682 } 1681 }
1683 1682
1684 1683
1685 #ifdef DEBUG 1684 #ifdef DEBUG
1686 bool BackEdgeTable::Verify(Isolate* isolate, 1685 bool BackEdgeTable::Verify(Isolate* isolate, Code* unoptimized) {
1687 Code* unoptimized,
1688 int loop_nesting_level) {
1689 DisallowHeapAllocation no_gc; 1686 DisallowHeapAllocation no_gc;
1687 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1690 BackEdgeTable back_edges(unoptimized, &no_gc); 1688 BackEdgeTable back_edges(unoptimized, &no_gc);
1691 for (uint32_t i = 0; i < back_edges.length(); i++) { 1689 for (uint32_t i = 0; i < back_edges.length(); i++) {
1692 uint32_t loop_depth = back_edges.loop_depth(i); 1690 uint32_t loop_depth = back_edges.loop_depth(i);
1693 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker); 1691 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
1694 // Assert that all back edges for shallower loops (and only those) 1692 // Assert that all back edges for shallower loops (and only those)
1695 // have already been patched. 1693 // have already been patched.
1696 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), 1694 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1697 GetBackEdgeState(isolate, 1695 GetBackEdgeState(isolate,
1698 unoptimized, 1696 unoptimized,
1699 back_edges.pc(i)) != INTERRUPT); 1697 back_edges.pc(i)) != INTERRUPT);
1700 } 1698 }
1701 return true; 1699 return true;
1702 } 1700 }
1703 #endif // DEBUG 1701 #endif // DEBUG
1704 1702
1705 1703
1706 #undef __ 1704 #undef __
1707 1705
1708 1706
1709 } } // namespace v8::internal 1707 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/full-codegen.h ('k') | src/globals.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698