OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_OBJECTS_VISITING_INL_H_ | 5 #ifndef V8_OBJECTS_VISITING_INL_H_ |
6 #define V8_OBJECTS_VISITING_INL_H_ | 6 #define V8_OBJECTS_VISITING_INL_H_ |
7 | 7 |
8 #include "src/heap/array-buffer-tracker.h" | 8 #include "src/heap/array-buffer-tracker.h" |
9 #include "src/heap/objects-visiting.h" | 9 #include "src/heap/objects-visiting.h" |
10 #include "src/ic/ic-state.h" | 10 #include "src/ic/ic-state.h" |
(...skipping 432 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
443 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( | 443 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( |
444 Map* map, HeapObject* object) { | 444 Map* map, HeapObject* object) { |
445 Heap* heap = map->GetHeap(); | 445 Heap* heap = map->GetHeap(); |
446 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); | 446 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); |
447 if (shared->ic_age() != heap->global_ic_age()) { | 447 if (shared->ic_age() != heap->global_ic_age()) { |
448 shared->ResetForNewContext(heap->global_ic_age()); | 448 shared->ResetForNewContext(heap->global_ic_age()); |
449 } | 449 } |
450 if (FLAG_cleanup_code_caches_at_gc) { | 450 if (FLAG_cleanup_code_caches_at_gc) { |
451 shared->ClearTypeFeedbackInfoAtGCTime(); | 451 shared->ClearTypeFeedbackInfoAtGCTime(); |
452 } | 452 } |
453 if (FLAG_flush_optimized_code_cache && | 453 if (FLAG_flush_optimized_code_cache) { |
454 !shared->optimized_code_map()->IsSmi()) { | 454 if (!shared->optimized_code_map()->IsSmi()) { |
455 // Always flush the optimized code map if requested by flag. | 455 // Always flush the optimized code map if requested by flag. |
456 shared->ClearOptimizedCodeMap(); | 456 shared->ClearOptimizedCodeMap(); |
| 457 } |
| 458 } else { |
| 459 if (!shared->optimized_code_map()->IsSmi()) { |
| 460 // Treat some references within the code map weakly by marking the |
| 461 // code map itself but not pushing it onto the marking deque. The |
| 462 // map will be processed after marking. |
| 463 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map()); |
| 464 MarkOptimizedCodeMap(heap, code_map); |
| 465 } |
457 } | 466 } |
458 MarkCompactCollector* collector = heap->mark_compact_collector(); | 467 MarkCompactCollector* collector = heap->mark_compact_collector(); |
459 if (collector->is_code_flushing_enabled()) { | 468 if (collector->is_code_flushing_enabled()) { |
460 if (!shared->optimized_code_map()->IsSmi()) { | |
461 // Add the shared function info holding an optimized code map to | |
462 // the code flusher for processing of code maps after marking. | |
463 collector->code_flusher()->AddOptimizedCodeMap(shared); | |
464 // Treat some references within the code map weakly by marking the | |
465 // code map itself but not pushing it onto the marking deque. | |
466 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map()); | |
467 MarkOptimizedCodeMap(heap, code_map); | |
468 } | |
469 if (IsFlushable(heap, shared)) { | 469 if (IsFlushable(heap, shared)) { |
470 // This function's code looks flushable. But we have to postpone | 470 // This function's code looks flushable. But we have to postpone |
471 // the decision until we see all functions that point to the same | 471 // the decision until we see all functions that point to the same |
472 // SharedFunctionInfo because some of them might be optimized. | 472 // SharedFunctionInfo because some of them might be optimized. |
473 // That would also make the non-optimized version of the code | 473 // That would also make the non-optimized version of the code |
474 // non-flushable, because it is required for bailing out from | 474 // non-flushable, because it is required for bailing out from |
475 // optimized code. | 475 // optimized code. |
476 collector->code_flusher()->AddCandidate(shared); | 476 collector->code_flusher()->AddCandidate(shared); |
477 // Treat the reference to the code object weakly. | 477 // Treat the reference to the code object weakly. |
478 VisitSharedFunctionInfoWeakCode(heap, object); | 478 VisitSharedFunctionInfoWeakCode(heap, object); |
479 return; | 479 return; |
480 } | 480 } |
481 } else { | 481 } else { |
| 482 // TODO(mstarzinger): Drop this case, it shouldn't be done here! |
482 if (!shared->optimized_code_map()->IsSmi()) { | 483 if (!shared->optimized_code_map()->IsSmi()) { |
483 // Flush optimized code map on major GCs without code flushing, | 484 // Flush optimized code map on major GCs without code flushing, |
484 // needed because cached code doesn't contain breakpoints. | 485 // needed because cached code doesn't contain breakpoints. |
485 shared->ClearOptimizedCodeMap(); | 486 shared->ClearOptimizedCodeMap(); |
486 } | 487 } |
487 } | 488 } |
488 VisitSharedFunctionInfoStrongCode(heap, object); | 489 VisitSharedFunctionInfoStrongCode(heap, object); |
489 } | 490 } |
490 | 491 |
491 | 492 |
(...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
903 | 904 |
904 RelocIterator it(this, mode_mask); | 905 RelocIterator it(this, mode_mask); |
905 for (; !it.done(); it.next()) { | 906 for (; !it.done(); it.next()) { |
906 it.rinfo()->template Visit<StaticVisitor>(heap); | 907 it.rinfo()->template Visit<StaticVisitor>(heap); |
907 } | 908 } |
908 } | 909 } |
909 } // namespace internal | 910 } // namespace internal |
910 } // namespace v8 | 911 } // namespace v8 |
911 | 912 |
912 #endif // V8_OBJECTS_VISITING_INL_H_ | 913 #endif // V8_OBJECTS_VISITING_INL_H_ |
OLD | NEW |