OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 293 matching lines...) Loading... |
304 | 304 |
305 | 305 |
306 template<typename StaticVisitor> | 306 template<typename StaticVisitor> |
307 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( | 307 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( |
308 Map* map, HeapObject* object) { | 308 Map* map, HeapObject* object) { |
309 Heap* heap = map->GetHeap(); | 309 Heap* heap = map->GetHeap(); |
310 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); | 310 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); |
311 if (shared->ic_age() != heap->global_ic_age()) { | 311 if (shared->ic_age() != heap->global_ic_age()) { |
312 shared->ResetForNewContext(heap->global_ic_age()); | 312 shared->ResetForNewContext(heap->global_ic_age()); |
313 } | 313 } |
314 if (FLAG_cache_optimized_code) { | |
315 // Flush optimized code map on major GC. | |
316 // TODO(mstarzinger): We may experiment with rebuilding it or with | |
317 // retaining entries which should survive as we iterate through | |
318 // optimized functions anyway. | |
319 shared->ClearOptimizedCodeMap("during full gc"); | |
320 } | |
321 MarkCompactCollector* collector = heap->mark_compact_collector(); | 314 MarkCompactCollector* collector = heap->mark_compact_collector(); |
322 if (collector->is_code_flushing_enabled()) { | 315 if (collector->is_code_flushing_enabled()) { |
| 316 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { |
| 317 // Add the shared function info holding an optimized code map to |
| 318 // the code flusher for processing of code maps after marking. |
| 319 collector->code_flusher()->AddOptimizedCodeMap(shared); |
| 320 // Treat all references within the code map weakly by marking the |
| 321 // code map itself but not pushing it onto the marking deque. |
| 322 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map()); |
| 323 StaticVisitor::MarkObjectWithoutPush(heap, code_map); |
| 324 } |
323 if (IsFlushable(heap, shared)) { | 325 if (IsFlushable(heap, shared)) { |
324 // This function's code looks flushable. But we have to postpone | 326 // This function's code looks flushable. But we have to postpone |
325 // the decision until we see all functions that point to the same | 327 // the decision until we see all functions that point to the same |
326 // SharedFunctionInfo because some of them might be optimized. | 328 // SharedFunctionInfo because some of them might be optimized. |
327 // That would also make the non-optimized version of the code | 329 // That would also make the non-optimized version of the code |
328 // non-flushable, because it is required for bailing out from | 330 // non-flushable, because it is required for bailing out from |
329 // optimized code. | 331 // optimized code. |
330 collector->code_flusher()->AddCandidate(shared); | 332 collector->code_flusher()->AddCandidate(shared); |
331 // Treat the reference to the code object weakly. | 333 // Treat the reference to the code object weakly. |
332 VisitSharedFunctionInfoWeakCode(heap, object); | 334 VisitSharedFunctionInfoWeakCode(heap, object); |
333 return; | 335 return; |
334 } | 336 } |
| 337 } else { |
| 338 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { |
| 339 // Flush optimized code map on major GCs without code flushing, |
| 340 // needed because cached code doesn't contain breakpoints. |
| 341 shared->ClearOptimizedCodeMap(); |
| 342 } |
335 } | 343 } |
336 VisitSharedFunctionInfoStrongCode(heap, object); | 344 VisitSharedFunctionInfoStrongCode(heap, object); |
337 } | 345 } |
338 | 346 |
339 | 347 |
340 template<typename StaticVisitor> | 348 template<typename StaticVisitor> |
341 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction( | 349 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction( |
342 Map* map, HeapObject* object) { | 350 Map* map, HeapObject* object) { |
343 Heap* heap = map->GetHeap(); | 351 Heap* heap = map->GetHeap(); |
344 JSFunction* function = JSFunction::cast(object); | 352 JSFunction* function = JSFunction::cast(object); |
(...skipping 386 matching lines...) Loading... |
731 RelocIterator it(this, mode_mask); | 739 RelocIterator it(this, mode_mask); |
732 for (; !it.done(); it.next()) { | 740 for (; !it.done(); it.next()) { |
733 it.rinfo()->template Visit<StaticVisitor>(heap); | 741 it.rinfo()->template Visit<StaticVisitor>(heap); |
734 } | 742 } |
735 } | 743 } |
736 | 744 |
737 | 745 |
738 } } // namespace v8::internal | 746 } } // namespace v8::internal |
739 | 747 |
740 #endif // V8_OBJECTS_VISITING_INL_H_ | 748 #endif // V8_OBJECTS_VISITING_INL_H_ |
OLD | NEW |