OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
131 | 131 |
132 table_.Register(kVisitOddball, | 132 table_.Register(kVisitOddball, |
133 &FixedBodyVisitor<StaticVisitor, | 133 &FixedBodyVisitor<StaticVisitor, |
134 Oddball::BodyDescriptor, | 134 Oddball::BodyDescriptor, |
135 void>::Visit); | 135 void>::Visit); |
136 | 136 |
137 table_.Register(kVisitMap, &VisitMap); | 137 table_.Register(kVisitMap, &VisitMap); |
138 | 138 |
139 table_.Register(kVisitCode, &VisitCode); | 139 table_.Register(kVisitCode, &VisitCode); |
140 | 140 |
141 // Registration for kVisitSharedFunctionInfo is done by StaticVisitor. | 141 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo); |
142 | 142 |
143 // Registration for kVisitJSFunction is done by StaticVisitor. | 143 table_.Register(kVisitJSFunction, &VisitJSFunction); |
144 | 144 |
145 // Registration for kVisitJSRegExp is done by StaticVisitor. | 145 // Registration for kVisitJSRegExp is done by StaticVisitor. |
146 | 146 |
147 table_.Register(kVisitPropertyCell, | 147 table_.Register(kVisitPropertyCell, |
148 &FixedBodyVisitor<StaticVisitor, | 148 &FixedBodyVisitor<StaticVisitor, |
149 JSGlobalPropertyCell::BodyDescriptor, | 149 JSGlobalPropertyCell::BodyDescriptor, |
150 void>::Visit); | 150 void>::Visit); |
151 | 151 |
152 table_.template RegisterSpecializations<DataObjectVisitor, | 152 table_.template RegisterSpecializations<DataObjectVisitor, |
153 kVisitDataObject, | 153 kVisitDataObject, |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
277 Heap* heap = map->GetHeap(); | 277 Heap* heap = map->GetHeap(); |
278 Code* code = Code::cast(object); | 278 Code* code = Code::cast(object); |
279 if (FLAG_cleanup_code_caches_at_gc) { | 279 if (FLAG_cleanup_code_caches_at_gc) { |
280 code->ClearTypeFeedbackCells(heap); | 280 code->ClearTypeFeedbackCells(heap); |
281 } | 281 } |
282 code->CodeIterateBody<StaticVisitor>(heap); | 282 code->CodeIterateBody<StaticVisitor>(heap); |
283 } | 283 } |
284 | 284 |
285 | 285 |
286 template<typename StaticVisitor> | 286 template<typename StaticVisitor> |
287 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( | |
288 Map* map, HeapObject* object) { | |
289 Heap* heap = map->GetHeap(); | |
290 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); | |
291 if (shared->ic_age() != heap->global_ic_age()) { | |
292 shared->ResetForNewContext(heap->global_ic_age()); | |
293 } | |
294 MarkCompactCollector* collector = heap->mark_compact_collector(); | |
295 if (collector->is_code_flushing_enabled()) { | |
296 if (IsFlushable(heap, shared)) { | |
297 // This function's code looks flushable. But we have to postpone | |
298 // the decision until we see all functions that point to the same | |
299 // SharedFunctionInfo because some of them might be optimized. | |
300 // That would also make the non-optimized version of the code | |
301 // non-flushable, because it is required for bailing out from | |
302 // optimized code. | |
303 collector->code_flusher()->AddCandidate(shared); | |
304 // Treat the reference to the code object weakly. | |
305 VisitSharedFunctionInfoWeakCode(heap, object); | |
306 return; | |
307 } | |
308 } | |
309 VisitSharedFunctionInfoStrongCode(heap, object); | |
310 } | |
311 | |
312 | |
313 template<typename StaticVisitor> | |
314 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction( | |
315 Map* map, HeapObject* object) { | |
316 Heap* heap = map->GetHeap(); | |
317 JSFunction* function = JSFunction::cast(object); | |
318 MarkCompactCollector* collector = heap->mark_compact_collector(); | |
319 if (collector->is_code_flushing_enabled()) { | |
320 if (IsFlushable(heap, function)) { | |
321 // This function's code looks flushable. But we have to postpone | |
322 // the decision until we see all functions that point to the same | |
323 // SharedFunctionInfo because some of them might be optimized. | |
324 // That would also make the non-optimized version of the code | |
325 // non-flushable, because it is required for bailing out from | |
326 // optimized code. | |
327 collector->code_flusher()->AddCandidate(function); | |
328 // Visit shared function info immediately to avoid double checking | |
329 // of its flushability later. This is just an optimization because | |
330 // the shared function info would eventually be visited. | |
331 SharedFunctionInfo* shared = function->unchecked_shared(); | |
332 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) { | |
333 StaticVisitor::MarkObject(heap, shared->map()); | |
334 VisitSharedFunctionInfoWeakCode(heap, shared); | |
335 } | |
336 // Treat the reference to the code object weakly. | |
337 VisitJSFunctionWeakCode(heap, object); | |
338 return; | |
339 } else { | |
340 // Visit all unoptimized code objects to prevent flushing them. | |
341 StaticVisitor::MarkObject(heap, function->shared()->code()); | |
342 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) { | |
343 MarkInlinedFunctionsCode(heap, function->code()); | |
344 } | |
345 } | |
346 } | |
347 VisitJSFunctionStrongCode(heap, object); | |
348 } | |
349 | |
350 | |
351 template<typename StaticVisitor> | |
287 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp( | 352 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp( |
288 Map* map, HeapObject* object) { | 353 Map* map, HeapObject* object) { |
289 int last_property_offset = | 354 int last_property_offset = |
290 JSRegExp::kSize + kPointerSize * map->inobject_properties(); | 355 JSRegExp::kSize + kPointerSize * map->inobject_properties(); |
291 Object** start_slot = | 356 Object** start_slot = |
292 HeapObject::RawField(object, JSRegExp::kPropertiesOffset); | 357 HeapObject::RawField(object, JSRegExp::kPropertiesOffset); |
293 Object** end_slot = | 358 Object** end_slot = |
294 HeapObject::RawField(object, last_property_offset); | 359 HeapObject::RawField(object, last_property_offset); |
295 StaticVisitor::VisitPointers( | 360 StaticVisitor::VisitPointers( |
296 map->GetHeap(), start_slot, start_slot, end_slot); | 361 map->GetHeap(), start_slot, start_slot, end_slot); |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
348 HeapObject* obj = HeapObject::cast(*slot); | 413 HeapObject* obj = HeapObject::cast(*slot); |
349 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | 414 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); |
350 StaticVisitor::MarkObjectWithoutPush(heap, obj); | 415 StaticVisitor::MarkObjectWithoutPush(heap, obj); |
351 } | 416 } |
352 | 417 |
353 for (int i = 0; i < transitions->number_of_transitions(); ++i) { | 418 for (int i = 0; i < transitions->number_of_transitions(); ++i) { |
354 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i)); | 419 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i)); |
355 } | 420 } |
356 } | 421 } |
357 | 422 |
423 template<typename StaticVisitor> | |
424 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode( | |
425 Heap* heap, Code* code) { | |
426 // For optimized functions we should retain both non-optimized version | |
427 // of its code and non-optimized version of all inlined functions. | |
428 // This is required to support bailing out from inlined code. | |
429 DeoptimizationInputData* data = | |
430 DeoptimizationInputData::cast(code->deoptimization_data()); | |
431 FixedArray* literals = data->LiteralArray(); | |
432 for (int i = 0, count = data->InlinedFunctionCount()->value(); | |
433 i < count; | |
434 i++) { | |
435 JSFunction* inlined = JSFunction::cast(literals->get(i)); | |
436 StaticVisitor::MarkObject(heap, inlined->shared()->code()); | |
437 } | |
438 } | |
439 | |
440 | |
441 inline static bool IsValidNonBuiltinContext(Object* context) { | |
442 return context->IsContext() && | |
443 !Context::cast(context)->global_object()->IsJSBuiltinsObject(); | |
444 } | |
445 | |
446 | |
447 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { | |
448 Object* undefined = heap->undefined_value(); | |
449 return (info->script() != undefined) && | |
450 (reinterpret_cast<Script*>(info->script())->source() != undefined); | |
451 } | |
452 | |
453 | |
454 template<typename StaticVisitor> | |
455 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable( | |
456 Heap* heap, JSFunction* function) { | |
457 SharedFunctionInfo* shared_info = function->unchecked_shared(); | |
458 | |
459 // Code is either on stack, in compilation cache or referenced | |
460 // by optimized version of function. | |
461 MarkBit code_mark = Marking::MarkBitFrom(function->code()); | |
462 if (code_mark.Get()) { | |
463 if (!Marking::MarkBitFrom(shared_info).Get()) { | |
464 shared_info->set_code_age(0); | |
465 } | |
466 return false; | |
467 } | |
468 | |
469 // The function must have a valid context and not be a builtin. | |
470 if (!IsValidNonBuiltinContext(function->unchecked_context())) { | |
471 return false; | |
472 } | |
473 | |
474 // We do not flush code for optimized functions. | |
475 if (function->code() != shared_info->code()) { | |
476 return false; | |
477 } | |
478 | |
479 return IsFlushable(heap, shared_info); | |
480 } | |
481 | |
482 | |
483 template<typename StaticVisitor> | |
484 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable( | |
485 Heap* heap, SharedFunctionInfo* shared_info) { | |
486 // Code is either on stack, in compilation cache or referenced | |
487 // by optimized version of function. | |
488 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code()); | |
489 if (code_mark.Get()) { | |
490 return false; | |
491 } | |
492 | |
493 // The function must be compiled and have the source code available, | |
494 // to be able to recompile it in case we need the function again. | |
495 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) { | |
496 return false; | |
497 } | |
498 | |
499 // We never flush code for API functions. | |
500 Object* function_data = shared_info->function_data(); | |
501 if (function_data->IsFunctionTemplateInfo()) { | |
502 return false; | |
503 } | |
504 | |
505 // Only flush code for functions. | |
506 if (shared_info->code()->kind() != Code::FUNCTION) { | |
507 return false; | |
508 } | |
509 | |
510 // Function must be lazy compilable. | |
511 if (!shared_info->allows_lazy_compilation()) { | |
512 return false; | |
513 } | |
514 | |
515 // If this is a full script wrapped in a function we do no flush the code. | |
516 if (shared_info->is_toplevel()) { | |
517 return false; | |
518 } | |
519 | |
520 // TODO(mstarzinger): The following will soon be replaced by a new way of | |
521 // aging code, that is based on an aging stub in the function prologue. | |
522 | |
523 // How many collections newly compiled code object will survive before being | |
524 // flushed. | |
525 static const int kCodeAgeThreshold = 5; | |
526 | |
527 // Age this shared function info. | |
528 if (shared_info->code_age() < kCodeAgeThreshold) { | |
529 shared_info->set_code_age(shared_info->code_age() + 1); | |
530 return false; | |
531 } | |
532 | |
533 return true; | |
534 } | |
535 | |
536 | |
537 template<typename StaticVisitor> | |
538 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode( | |
539 Heap* heap, HeapObject* object) { | |
540 StaticVisitor::BeforeVisitingSharedFunctionInfo(object); | |
541 Object** start_slot = | |
542 HeapObject::RawField(object, | |
543 SharedFunctionInfo::BodyDescriptor::kStartOffset); | |
544 Object** end_slot = | |
545 HeapObject::RawField(object, | |
546 SharedFunctionInfo::BodyDescriptor::kEndOffset); | |
547 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); | |
548 } | |
549 | |
550 | |
551 template<typename StaticVisitor> | |
552 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode( | |
553 Heap* heap, HeapObject* object) { | |
554 StaticVisitor::BeforeVisitingSharedFunctionInfo(object); | |
555 Object** name_slot = | |
556 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset); | |
557 StaticVisitor::VisitPointer(heap, name_slot); | |
558 | |
559 // Skip visiting kCodeOffset as it is treated weakly here. | |
ulan
2012/10/12 11:48:32
Maybe add static asserts that kNameOffset + kPoint
Michael Starzinger
2012/10/12 12:16:37
Done.
| |
560 | |
561 Object** start_slot = | |
562 HeapObject::RawField(object, | |
563 SharedFunctionInfo::kOptimizedCodeMapOffset); | |
564 Object** end_slot = | |
565 HeapObject::RawField(object, | |
566 SharedFunctionInfo::BodyDescriptor::kEndOffset); | |
567 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); | |
568 } | |
569 | |
570 | |
571 template<typename StaticVisitor> | |
572 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode( | |
573 Heap* heap, HeapObject* object) { | |
574 Object** start_slot = | |
575 HeapObject::RawField(object, JSFunction::kPropertiesOffset); | |
576 Object** end_slot = | |
577 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); | |
578 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); | |
579 | |
580 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); | |
Michael Starzinger
2012/10/12 12:16:37
As discussed offline, I also added the static asse
| |
581 | |
582 start_slot = | |
583 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); | |
584 end_slot = | |
585 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); | |
586 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); | |
587 } | |
588 | |
589 | |
590 template<typename StaticVisitor> | |
591 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode( | |
592 Heap* heap, HeapObject* object) { | |
593 Object** start_slot = | |
594 HeapObject::RawField(object, JSFunction::kPropertiesOffset); | |
595 Object** end_slot = | |
596 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); | |
597 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); | |
598 | |
599 // Skip visiting kCodeEntryOffset as it is treated weakly here. | |
ulan
2012/10/12 11:48:32
Maybe add static asserts as in the comment above?
Michael Starzinger
2012/10/12 12:16:37
Done.
| |
600 | |
601 start_slot = | |
602 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); | |
603 end_slot = | |
604 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); | |
605 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); | |
606 } | |
607 | |
358 | 608 |
359 void Code::CodeIterateBody(ObjectVisitor* v) { | 609 void Code::CodeIterateBody(ObjectVisitor* v) { |
360 int mode_mask = RelocInfo::kCodeTargetMask | | 610 int mode_mask = RelocInfo::kCodeTargetMask | |
361 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | | 611 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
362 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) | | 612 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) | |
363 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | | 613 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | |
364 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | | 614 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | |
365 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | | 615 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | |
366 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); | 616 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); |
367 | 617 |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
407 RelocIterator it(this, mode_mask); | 657 RelocIterator it(this, mode_mask); |
408 for (; !it.done(); it.next()) { | 658 for (; !it.done(); it.next()) { |
409 it.rinfo()->template Visit<StaticVisitor>(heap); | 659 it.rinfo()->template Visit<StaticVisitor>(heap); |
410 } | 660 } |
411 } | 661 } |
412 | 662 |
413 | 663 |
414 } } // namespace v8::internal | 664 } } // namespace v8::internal |
415 | 665 |
416 #endif // V8_OBJECTS_VISITING_INL_H_ | 666 #endif // V8_OBJECTS_VISITING_INL_H_ |
OLD | NEW |