OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_OBJECTS_VISITING_INL_H_ | 5 #ifndef V8_OBJECTS_VISITING_INL_H_ |
6 #define V8_OBJECTS_VISITING_INL_H_ | 6 #define V8_OBJECTS_VISITING_INL_H_ |
7 | 7 |
8 #include "src/heap/objects-visiting.h" | 8 #include "src/heap/objects-visiting.h" |
9 | 9 |
10 namespace v8 { | 10 namespace v8 { |
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
402 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( | 402 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( |
403 Map* map, HeapObject* object) { | 403 Map* map, HeapObject* object) { |
404 Heap* heap = map->GetHeap(); | 404 Heap* heap = map->GetHeap(); |
405 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); | 405 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); |
406 if (shared->ic_age() != heap->global_ic_age()) { | 406 if (shared->ic_age() != heap->global_ic_age()) { |
407 shared->ResetForNewContext(heap->global_ic_age()); | 407 shared->ResetForNewContext(heap->global_ic_age()); |
408 } | 408 } |
409 if (FLAG_cleanup_code_caches_at_gc) { | 409 if (FLAG_cleanup_code_caches_at_gc) { |
410 shared->ClearTypeFeedbackInfoAtGCTime(); | 410 shared->ClearTypeFeedbackInfoAtGCTime(); |
411 } | 411 } |
412 if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache && | 412 if (FLAG_flush_optimized_code_cache && |
413 !shared->optimized_code_map()->IsSmi()) { | 413 !shared->optimized_code_map()->IsSmi()) { |
414 // Always flush the optimized code map if requested by flag. | 414 // Always flush the optimized code map if requested by flag. |
415 shared->ClearOptimizedCodeMap(); | 415 shared->ClearOptimizedCodeMap(); |
416 } | 416 } |
417 MarkCompactCollector* collector = heap->mark_compact_collector(); | 417 MarkCompactCollector* collector = heap->mark_compact_collector(); |
418 if (collector->is_code_flushing_enabled()) { | 418 if (collector->is_code_flushing_enabled()) { |
419 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { | 419 if (!shared->optimized_code_map()->IsSmi()) { |
420 // Add the shared function info holding an optimized code map to | 420 // Add the shared function info holding an optimized code map to |
421 // the code flusher for processing of code maps after marking. | 421 // the code flusher for processing of code maps after marking. |
422 collector->code_flusher()->AddOptimizedCodeMap(shared); | 422 collector->code_flusher()->AddOptimizedCodeMap(shared); |
423 // Treat all references within the code map weakly by marking the | 423 // Treat all references within the code map weakly by marking the |
424 // code map itself but not pushing it onto the marking deque. | 424 // code map itself but not pushing it onto the marking deque. |
425 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map()); | 425 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map()); |
426 StaticVisitor::MarkObjectWithoutPush(heap, code_map); | 426 StaticVisitor::MarkObjectWithoutPush(heap, code_map); |
427 } | 427 } |
428 if (IsFlushable(heap, shared)) { | 428 if (IsFlushable(heap, shared)) { |
429 // This function's code looks flushable. But we have to postpone | 429 // This function's code looks flushable. But we have to postpone |
430 // the decision until we see all functions that point to the same | 430 // the decision until we see all functions that point to the same |
431 // SharedFunctionInfo because some of them might be optimized. | 431 // SharedFunctionInfo because some of them might be optimized. |
432 // That would also make the non-optimized version of the code | 432 // That would also make the non-optimized version of the code |
433 // non-flushable, because it is required for bailing out from | 433 // non-flushable, because it is required for bailing out from |
434 // optimized code. | 434 // optimized code. |
435 collector->code_flusher()->AddCandidate(shared); | 435 collector->code_flusher()->AddCandidate(shared); |
436 // Treat the reference to the code object weakly. | 436 // Treat the reference to the code object weakly. |
437 VisitSharedFunctionInfoWeakCode(heap, object); | 437 VisitSharedFunctionInfoWeakCode(heap, object); |
438 return; | 438 return; |
439 } | 439 } |
440 } else { | 440 } else { |
441 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { | 441 if (!shared->optimized_code_map()->IsSmi()) { |
442 // Flush optimized code map on major GCs without code flushing, | 442 // Flush optimized code map on major GCs without code flushing, |
443 // needed because cached code doesn't contain breakpoints. | 443 // needed because cached code doesn't contain breakpoints. |
444 shared->ClearOptimizedCodeMap(); | 444 shared->ClearOptimizedCodeMap(); |
445 } | 445 } |
446 } | 446 } |
447 VisitSharedFunctionInfoStrongCode(heap, object); | 447 VisitSharedFunctionInfoStrongCode(heap, object); |
448 } | 448 } |
449 | 449 |
450 | 450 |
451 template <typename StaticVisitor> | 451 template <typename StaticVisitor> |
(...skipping 383 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
835 | 835 |
836 RelocIterator it(this, mode_mask); | 836 RelocIterator it(this, mode_mask); |
837 for (; !it.done(); it.next()) { | 837 for (; !it.done(); it.next()) { |
838 it.rinfo()->template Visit<StaticVisitor>(heap); | 838 it.rinfo()->template Visit<StaticVisitor>(heap); |
839 } | 839 } |
840 } | 840 } |
841 } | 841 } |
842 } // namespace v8::internal | 842 } // namespace v8::internal |
843 | 843 |
844 #endif // V8_OBJECTS_VISITING_INL_H_ | 844 #endif // V8_OBJECTS_VISITING_INL_H_ |
OLD | NEW |