OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
131 | 131 |
132 table_.Register(kVisitOddball, | 132 table_.Register(kVisitOddball, |
133 &FixedBodyVisitor<StaticVisitor, | 133 &FixedBodyVisitor<StaticVisitor, |
134 Oddball::BodyDescriptor, | 134 Oddball::BodyDescriptor, |
135 void>::Visit); | 135 void>::Visit); |
136 | 136 |
137 table_.Register(kVisitMap, &VisitMap); | 137 table_.Register(kVisitMap, &VisitMap); |
138 | 138 |
139 table_.Register(kVisitCode, &VisitCode); | 139 table_.Register(kVisitCode, &VisitCode); |
140 | 140 |
141 // Registration for kVisitSharedFunctionInfo is done by StaticVisitor. | 141 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo); |
142 | 142 |
143 // Registration for kVisitJSFunction is done by StaticVisitor. | 143 table_.Register(kVisitJSFunction, &VisitJSFunction); |
144 | 144 |
145 // Registration for kVisitJSRegExp is done by StaticVisitor. | 145 // Registration for kVisitJSRegExp is done by StaticVisitor. |
146 | 146 |
147 table_.Register(kVisitPropertyCell, | 147 table_.Register(kVisitPropertyCell, |
148 &FixedBodyVisitor<StaticVisitor, | 148 &FixedBodyVisitor<StaticVisitor, |
149 JSGlobalPropertyCell::BodyDescriptor, | 149 JSGlobalPropertyCell::BodyDescriptor, |
150 void>::Visit); | 150 void>::Visit); |
151 | 151 |
152 table_.template RegisterSpecializations<DataObjectVisitor, | 152 table_.template RegisterSpecializations<DataObjectVisitor, |
153 kVisitDataObject, | 153 kVisitDataObject, |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
277 Heap* heap = map->GetHeap(); | 277 Heap* heap = map->GetHeap(); |
278 Code* code = Code::cast(object); | 278 Code* code = Code::cast(object); |
279 if (FLAG_cleanup_code_caches_at_gc) { | 279 if (FLAG_cleanup_code_caches_at_gc) { |
280 code->ClearTypeFeedbackCells(heap); | 280 code->ClearTypeFeedbackCells(heap); |
281 } | 281 } |
282 code->CodeIterateBody<StaticVisitor>(heap); | 282 code->CodeIterateBody<StaticVisitor>(heap); |
283 } | 283 } |
284 | 284 |
285 | 285 |
286 template<typename StaticVisitor> | 286 template<typename StaticVisitor> |
| 287 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( |
| 288 Map* map, HeapObject* object) { |
| 289 Heap* heap = map->GetHeap(); |
| 290 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); |
| 291 if (shared->ic_age() != heap->global_ic_age()) { |
| 292 shared->ResetForNewContext(heap->global_ic_age()); |
| 293 } |
| 294 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 295 if (collector->is_code_flushing_enabled()) { |
| 296 if (IsFlushable(heap, shared)) { |
| 297 // This function's code looks flushable. But we have to postpone |
| 298 // the decision until we see all functions that point to the same |
| 299 // SharedFunctionInfo because some of them might be optimized. |
| 300 // That would also make the non-optimized version of the code |
| 301 // non-flushable, because it is required for bailing out from |
| 302 // optimized code. |
| 303 collector->code_flusher()->AddCandidate(shared); |
| 304 // Treat the reference to the code object weakly. |
| 305 VisitSharedFunctionInfoWeakCode(heap, object); |
| 306 return; |
| 307 } |
| 308 } |
| 309 VisitSharedFunctionInfoStrongCode(heap, object); |
| 310 } |
| 311 |
| 312 |
| 313 template<typename StaticVisitor> |
| 314 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction( |
| 315 Map* map, HeapObject* object) { |
| 316 Heap* heap = map->GetHeap(); |
| 317 JSFunction* function = JSFunction::cast(object); |
| 318 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 319 if (collector->is_code_flushing_enabled()) { |
| 320 if (IsFlushable(heap, function)) { |
| 321 // This function's code looks flushable. But we have to postpone |
| 322 // the decision until we see all functions that point to the same |
| 323 // SharedFunctionInfo because some of them might be optimized. |
| 324 // That would also make the non-optimized version of the code |
| 325 // non-flushable, because it is required for bailing out from |
| 326 // optimized code. |
| 327 collector->code_flusher()->AddCandidate(function); |
| 328 // Visit shared function info immediately to avoid double checking |
| 329 // of its flushability later. This is just an optimization because |
| 330 // the shared function info would eventually be visited. |
| 331 SharedFunctionInfo* shared = function->unchecked_shared(); |
| 332 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) { |
| 333 StaticVisitor::MarkObject(heap, shared->map()); |
| 334 VisitSharedFunctionInfoWeakCode(heap, shared); |
| 335 } |
| 336 // Treat the reference to the code object weakly. |
| 337 VisitJSFunctionWeakCode(heap, object); |
| 338 return; |
| 339 } else { |
| 340 // Visit all unoptimized code objects to prevent flushing them. |
| 341 StaticVisitor::MarkObject(heap, function->shared()->code()); |
| 342 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) { |
| 343 MarkInlinedFunctionsCode(heap, function->code()); |
| 344 } |
| 345 } |
| 346 } |
| 347 VisitJSFunctionStrongCode(heap, object); |
| 348 } |
| 349 |
| 350 |
| 351 template<typename StaticVisitor> |
287 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp( | 352 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp( |
288 Map* map, HeapObject* object) { | 353 Map* map, HeapObject* object) { |
289 int last_property_offset = | 354 int last_property_offset = |
290 JSRegExp::kSize + kPointerSize * map->inobject_properties(); | 355 JSRegExp::kSize + kPointerSize * map->inobject_properties(); |
291 Object** start_slot = | 356 Object** start_slot = |
292 HeapObject::RawField(object, JSRegExp::kPropertiesOffset); | 357 HeapObject::RawField(object, JSRegExp::kPropertiesOffset); |
293 Object** end_slot = | 358 Object** end_slot = |
294 HeapObject::RawField(object, last_property_offset); | 359 HeapObject::RawField(object, last_property_offset); |
295 StaticVisitor::VisitPointers( | 360 StaticVisitor::VisitPointers( |
296 map->GetHeap(), start_slot, start_slot, end_slot); | 361 map->GetHeap(), start_slot, start_slot, end_slot); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
349 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); | 414 heap->mark_compact_collector()->RecordSlot(slot, slot, obj); |
350 StaticVisitor::MarkObjectWithoutPush(heap, obj); | 415 StaticVisitor::MarkObjectWithoutPush(heap, obj); |
351 } | 416 } |
352 | 417 |
353 for (int i = 0; i < transitions->number_of_transitions(); ++i) { | 418 for (int i = 0; i < transitions->number_of_transitions(); ++i) { |
354 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i)); | 419 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i)); |
355 } | 420 } |
356 } | 421 } |
357 | 422 |
358 | 423 |
| 424 template<typename StaticVisitor> |
| 425 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode( |
| 426 Heap* heap, Code* code) { |
| 427 // For optimized functions we should retain both non-optimized version |
| 428 // of its code and non-optimized version of all inlined functions. |
| 429 // This is required to support bailing out from inlined code. |
| 430 DeoptimizationInputData* data = |
| 431 DeoptimizationInputData::cast(code->deoptimization_data()); |
| 432 FixedArray* literals = data->LiteralArray(); |
| 433 for (int i = 0, count = data->InlinedFunctionCount()->value(); |
| 434 i < count; |
| 435 i++) { |
| 436 JSFunction* inlined = JSFunction::cast(literals->get(i)); |
| 437 StaticVisitor::MarkObject(heap, inlined->shared()->code()); |
| 438 } |
| 439 } |
| 440 |
| 441 |
| 442 inline static bool IsValidNonBuiltinContext(Object* context) { |
| 443 return context->IsContext() && |
| 444 !Context::cast(context)->global_object()->IsJSBuiltinsObject(); |
| 445 } |
| 446 |
| 447 |
| 448 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { |
| 449 Object* undefined = heap->undefined_value(); |
| 450 return (info->script() != undefined) && |
| 451 (reinterpret_cast<Script*>(info->script())->source() != undefined); |
| 452 } |
| 453 |
| 454 |
| 455 template<typename StaticVisitor> |
| 456 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable( |
| 457 Heap* heap, JSFunction* function) { |
| 458 SharedFunctionInfo* shared_info = function->unchecked_shared(); |
| 459 |
| 460 // Code is either on stack, in compilation cache or referenced |
| 461 // by optimized version of function. |
| 462 MarkBit code_mark = Marking::MarkBitFrom(function->code()); |
| 463 if (code_mark.Get()) { |
| 464 if (!Marking::MarkBitFrom(shared_info).Get()) { |
| 465 shared_info->set_code_age(0); |
| 466 } |
| 467 return false; |
| 468 } |
| 469 |
| 470 // The function must have a valid context and not be a builtin. |
| 471 if (!IsValidNonBuiltinContext(function->unchecked_context())) { |
| 472 return false; |
| 473 } |
| 474 |
| 475 // We do not flush code for optimized functions. |
| 476 if (function->code() != shared_info->code()) { |
| 477 return false; |
| 478 } |
| 479 |
| 480 return IsFlushable(heap, shared_info); |
| 481 } |
| 482 |
| 483 |
| 484 template<typename StaticVisitor> |
| 485 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable( |
| 486 Heap* heap, SharedFunctionInfo* shared_info) { |
| 487 // Code is either on stack, in compilation cache or referenced |
| 488 // by optimized version of function. |
| 489 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code()); |
| 490 if (code_mark.Get()) { |
| 491 return false; |
| 492 } |
| 493 |
| 494 // The function must be compiled and have the source code available, |
| 495 // to be able to recompile it in case we need the function again. |
| 496 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) { |
| 497 return false; |
| 498 } |
| 499 |
| 500 // We never flush code for API functions. |
| 501 Object* function_data = shared_info->function_data(); |
| 502 if (function_data->IsFunctionTemplateInfo()) { |
| 503 return false; |
| 504 } |
| 505 |
| 506 // Only flush code for functions. |
| 507 if (shared_info->code()->kind() != Code::FUNCTION) { |
| 508 return false; |
| 509 } |
| 510 |
| 511 // Function must be lazy compilable. |
| 512 if (!shared_info->allows_lazy_compilation()) { |
| 513 return false; |
| 514 } |
| 515 |
| 516 // If this is a full script wrapped in a function we do no flush the code. |
| 517 if (shared_info->is_toplevel()) { |
| 518 return false; |
| 519 } |
| 520 |
| 521 // TODO(mstarzinger): The following will soon be replaced by a new way of |
| 522 // aging code, that is based on an aging stub in the function prologue. |
| 523 |
| 524 // How many collections newly compiled code object will survive before being |
| 525 // flushed. |
| 526 static const int kCodeAgeThreshold = 5; |
| 527 |
| 528 // Age this shared function info. |
| 529 if (shared_info->code_age() < kCodeAgeThreshold) { |
| 530 shared_info->set_code_age(shared_info->code_age() + 1); |
| 531 return false; |
| 532 } |
| 533 |
| 534 return true; |
| 535 } |
| 536 |
| 537 |
| 538 template<typename StaticVisitor> |
| 539 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode( |
| 540 Heap* heap, HeapObject* object) { |
| 541 StaticVisitor::BeforeVisitingSharedFunctionInfo(object); |
| 542 Object** start_slot = |
| 543 HeapObject::RawField(object, |
| 544 SharedFunctionInfo::BodyDescriptor::kStartOffset); |
| 545 Object** end_slot = |
| 546 HeapObject::RawField(object, |
| 547 SharedFunctionInfo::BodyDescriptor::kEndOffset); |
| 548 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); |
| 549 } |
| 550 |
| 551 |
| 552 template<typename StaticVisitor> |
| 553 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode( |
| 554 Heap* heap, HeapObject* object) { |
| 555 StaticVisitor::BeforeVisitingSharedFunctionInfo(object); |
| 556 Object** name_slot = |
| 557 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset); |
| 558 StaticVisitor::VisitPointer(heap, name_slot); |
| 559 |
| 560 // Skip visiting kCodeOffset as it is treated weakly here. |
| 561 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize == |
| 562 SharedFunctionInfo::kCodeOffset); |
| 563 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize == |
| 564 SharedFunctionInfo::kOptimizedCodeMapOffset); |
| 565 |
| 566 Object** start_slot = |
| 567 HeapObject::RawField(object, |
| 568 SharedFunctionInfo::kOptimizedCodeMapOffset); |
| 569 Object** end_slot = |
| 570 HeapObject::RawField(object, |
| 571 SharedFunctionInfo::BodyDescriptor::kEndOffset); |
| 572 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); |
| 573 } |
| 574 |
| 575 |
| 576 template<typename StaticVisitor> |
| 577 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode( |
| 578 Heap* heap, HeapObject* object) { |
| 579 Object** start_slot = |
| 580 HeapObject::RawField(object, JSFunction::kPropertiesOffset); |
| 581 Object** end_slot = |
| 582 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); |
| 583 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); |
| 584 |
| 585 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); |
| 586 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == |
| 587 JSFunction::kPrototypeOrInitialMapOffset); |
| 588 |
| 589 start_slot = |
| 590 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); |
| 591 end_slot = |
| 592 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); |
| 593 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); |
| 594 } |
| 595 |
| 596 |
| 597 template<typename StaticVisitor> |
| 598 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode( |
| 599 Heap* heap, HeapObject* object) { |
| 600 Object** start_slot = |
| 601 HeapObject::RawField(object, JSFunction::kPropertiesOffset); |
| 602 Object** end_slot = |
| 603 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); |
| 604 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); |
| 605 |
| 606 // Skip visiting kCodeEntryOffset as it is treated weakly here. |
| 607 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == |
| 608 JSFunction::kPrototypeOrInitialMapOffset); |
| 609 |
| 610 start_slot = |
| 611 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); |
| 612 end_slot = |
| 613 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); |
| 614 StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot); |
| 615 } |
| 616 |
| 617 |
359 void Code::CodeIterateBody(ObjectVisitor* v) { | 618 void Code::CodeIterateBody(ObjectVisitor* v) { |
360 int mode_mask = RelocInfo::kCodeTargetMask | | 619 int mode_mask = RelocInfo::kCodeTargetMask | |
361 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | | 620 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
362 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) | | 621 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) | |
363 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | | 622 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | |
364 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | | 623 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | |
365 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | | 624 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | |
366 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); | 625 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); |
367 | 626 |
368 // There are two places where we iterate code bodies: here and the | 627 // There are two places where we iterate code bodies: here and the |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
407 RelocIterator it(this, mode_mask); | 666 RelocIterator it(this, mode_mask); |
408 for (; !it.done(); it.next()) { | 667 for (; !it.done(); it.next()) { |
409 it.rinfo()->template Visit<StaticVisitor>(heap); | 668 it.rinfo()->template Visit<StaticVisitor>(heap); |
410 } | 669 } |
411 } | 670 } |
412 | 671 |
413 | 672 |
414 } } // namespace v8::internal | 673 } } // namespace v8::internal |
415 | 674 |
416 #endif // V8_OBJECTS_VISITING_INL_H_ | 675 #endif // V8_OBJECTS_VISITING_INL_H_ |
OLD | NEW |