Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: src/mark-compact.cc

Issue 6529055: [Isolates] Merge crankshaft (r5922 from bleeding_edge). (Closed)
Patch Set: Win32 port Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mark-compact.h ('k') | src/math.js » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
54 #ifdef DEBUG 54 #ifdef DEBUG
55 live_young_objects_size_(0), 55 live_young_objects_size_(0),
56 live_old_pointer_objects_size_(0), 56 live_old_pointer_objects_size_(0),
57 live_old_data_objects_size_(0), 57 live_old_data_objects_size_(0),
58 live_code_objects_size_(0), 58 live_code_objects_size_(0),
59 live_map_objects_size_(0), 59 live_map_objects_size_(0),
60 live_cell_objects_size_(0), 60 live_cell_objects_size_(0),
61 live_lo_objects_size_(0), 61 live_lo_objects_size_(0),
62 live_bytes_(0), 62 live_bytes_(0),
63 #endif 63 #endif
64 heap_(NULL) { 64 heap_(NULL),
65 } 65 code_flusher_(NULL) { }
66
67 66
68 67
69 void MarkCompactCollector::CollectGarbage() { 68 void MarkCompactCollector::CollectGarbage() {
70 // Make sure that Prepare() has been called. The individual steps below will 69 // Make sure that Prepare() has been called. The individual steps below will
71 // update the state as they proceed. 70 // update the state as they proceed.
72 ASSERT(state_ == PREPARE_GC); 71 ASSERT(state_ == PREPARE_GC);
73 72
74 // Prepare has selected whether to compact the old generation or not. 73 // Prepare has selected whether to compact the old generation or not.
75 // Tell the tracer. 74 // Tell the tracer.
76 if (IsCompacting()) tracer_->set_is_compacting(); 75 if (IsCompacting()) tracer_->set_is_compacting();
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
206 // overflow flag. When the overflow flag is set, we continue marking objects 205 // overflow flag. When the overflow flag is set, we continue marking objects
207 // reachable from the objects on the marking stack, but no longer push them on 206 // reachable from the objects on the marking stack, but no longer push them on
208 // the marking stack. Instead, we mark them as both marked and overflowed. 207 // the marking stack. Instead, we mark them as both marked and overflowed.
209 // When the stack is in the overflowed state, objects marked as overflowed 208 // When the stack is in the overflowed state, objects marked as overflowed
210 // have been reached and marked but their children have not been visited yet. 209 // have been reached and marked but their children have not been visited yet.
211 // After emptying the marking stack, we clear the overflow flag and traverse 210 // After emptying the marking stack, we clear the overflow flag and traverse
212 // the heap looking for objects marked as overflowed, push them on the stack, 211 // the heap looking for objects marked as overflowed, push them on the stack,
213 // and continue with marking. This process repeats until all reachable 212 // and continue with marking. This process repeats until all reachable
214 // objects have been marked. 213 // objects have been marked.
215 214
215 class CodeFlusher {
216 public:
217 explicit CodeFlusher(Isolate* isolate)
218 : isolate_(isolate),
219 jsfunction_candidates_head_(NULL),
220 shared_function_info_candidates_head_(NULL) {}
221
222 void AddCandidate(SharedFunctionInfo* shared_info) {
223 SetNextCandidate(shared_info, shared_function_info_candidates_head_);
224 shared_function_info_candidates_head_ = shared_info;
225 }
226
227 void AddCandidate(JSFunction* function) {
228 ASSERT(function->unchecked_code() ==
229 function->unchecked_shared()->unchecked_code());
230
231 SetNextCandidate(function, jsfunction_candidates_head_);
232 jsfunction_candidates_head_ = function;
233 }
234
235 void ProcessCandidates() {
236 ProcessSharedFunctionInfoCandidates();
237 ProcessJSFunctionCandidates();
238 }
239
240 private:
241 void ProcessJSFunctionCandidates() {
242 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::LazyCompile);
243
244 JSFunction* candidate = jsfunction_candidates_head_;
245 JSFunction* next_candidate;
246 while (candidate != NULL) {
247 next_candidate = GetNextCandidate(candidate);
248
249 SharedFunctionInfo* shared = candidate->unchecked_shared();
250
251 Code* code = shared->unchecked_code();
252 if (!code->IsMarked()) {
253 shared->set_code(lazy_compile);
254 candidate->set_code(lazy_compile);
255 } else {
256 candidate->set_code(shared->unchecked_code());
257 }
258
259 candidate = next_candidate;
260 }
261
262 jsfunction_candidates_head_ = NULL;
263 }
264
265
266 void ProcessSharedFunctionInfoCandidates() {
267 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::LazyCompile);
268
269 SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
270 SharedFunctionInfo* next_candidate;
271 while (candidate != NULL) {
272 next_candidate = GetNextCandidate(candidate);
273 SetNextCandidate(candidate, NULL);
274
275 Code* code = candidate->unchecked_code();
276 if (!code->IsMarked()) {
277 candidate->set_code(lazy_compile);
278 }
279
280 candidate = next_candidate;
281 }
282
283 shared_function_info_candidates_head_ = NULL;
284 }
285
286 static JSFunction** GetNextCandidateField(JSFunction* candidate) {
287 return reinterpret_cast<JSFunction**>(
288 candidate->address() + JSFunction::kCodeEntryOffset);
289 }
290
291 static JSFunction* GetNextCandidate(JSFunction* candidate) {
292 return *GetNextCandidateField(candidate);
293 }
294
295 static void SetNextCandidate(JSFunction* candidate,
296 JSFunction* next_candidate) {
297 *GetNextCandidateField(candidate) = next_candidate;
298 }
299
300 STATIC_ASSERT(kPointerSize <= Code::kHeaderSize - Code::kHeaderPaddingStart);
301
302 static SharedFunctionInfo** GetNextCandidateField(
303 SharedFunctionInfo* candidate) {
304 Code* code = candidate->unchecked_code();
305 return reinterpret_cast<SharedFunctionInfo**>(
306 code->address() + Code::kHeaderPaddingStart);
307 }
308
309 static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
310 return *GetNextCandidateField(candidate);
311 }
312
313 static void SetNextCandidate(SharedFunctionInfo* candidate,
314 SharedFunctionInfo* next_candidate) {
315 *GetNextCandidateField(candidate) = next_candidate;
316 }
317
318 Isolate* isolate_;
319 JSFunction* jsfunction_candidates_head_;
320 SharedFunctionInfo* shared_function_info_candidates_head_;
321
322 DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
323 };
324
325
326 MarkCompactCollector::~MarkCompactCollector() {
327 if (code_flusher_ != NULL) {
328 delete code_flusher_;
329 code_flusher_ = NULL;
330 }
331 }
332
216 333
217 static inline HeapObject* ShortCircuitConsString(Object** p) { 334 static inline HeapObject* ShortCircuitConsString(Object** p) {
218 // Optimization: If the heap object pointed to by p is a non-symbol 335 // Optimization: If the heap object pointed to by p is a non-symbol
219 // cons string whose right substring is HEAP->empty_string, update 336 // cons string whose right substring is HEAP->empty_string, update
220 // it in place to its left substring. Return the updated value. 337 // it in place to its left substring. Return the updated value.
221 // 338 //
222 // Here we assume that if we change *p, we replace it with a heap object 339 // Here we assume that if we change *p, we replace it with a heap object
223 // (ie, the left substring of a cons string is always a heap object). 340 // (ie, the left substring of a cons string is always a heap object).
224 // 341 //
225 // The check performed is: 342 // The check performed is:
(...skipping 23 matching lines...) Expand all
249 return HeapObject::cast(first); 366 return HeapObject::cast(first);
250 } 367 }
251 368
252 369
253 class StaticMarkingVisitor : public StaticVisitorBase { 370 class StaticMarkingVisitor : public StaticVisitorBase {
254 public: 371 public:
255 static inline void IterateBody(Map* map, HeapObject* obj) { 372 static inline void IterateBody(Map* map, HeapObject* obj) {
256 table_.GetVisitor(map)(map, obj); 373 table_.GetVisitor(map)(map, obj);
257 } 374 }
258 375
259 static void EnableCodeFlushing(bool enabled) {
260 if (enabled) {
261 table_.Register(kVisitJSFunction, &VisitJSFunctionAndFlushCode);
262 } else {
263 table_.Register(kVisitJSFunction, &VisitJSFunction);
264 }
265 }
266
267 static void Initialize() { 376 static void Initialize() {
268 table_.Register(kVisitShortcutCandidate, 377 table_.Register(kVisitShortcutCandidate,
269 &FixedBodyVisitor<StaticMarkingVisitor, 378 &FixedBodyVisitor<StaticMarkingVisitor,
270 ConsString::BodyDescriptor, 379 ConsString::BodyDescriptor,
271 void>::Visit); 380 void>::Visit);
272 381
273 table_.Register(kVisitConsString, 382 table_.Register(kVisitConsString,
274 &FixedBodyVisitor<StaticMarkingVisitor, 383 &FixedBodyVisitor<StaticMarkingVisitor,
275 ConsString::BodyDescriptor, 384 ConsString::BodyDescriptor,
276 void>::Visit); 385 void>::Visit);
277 386
278 387
279 table_.Register(kVisitFixedArray, 388 table_.Register(kVisitFixedArray,
280 &FlexibleBodyVisitor<StaticMarkingVisitor, 389 &FlexibleBodyVisitor<StaticMarkingVisitor,
281 FixedArray::BodyDescriptor, 390 FixedArray::BodyDescriptor,
282 void>::Visit); 391 void>::Visit);
283 392
284 table_.Register(kVisitGlobalContext, 393 table_.Register(kVisitGlobalContext,
285 &FixedBodyVisitor<StaticMarkingVisitor, 394 &FixedBodyVisitor<StaticMarkingVisitor,
286 Context::MarkCompactBodyDescriptor, 395 Context::MarkCompactBodyDescriptor,
287 void>::Visit); 396 void>::Visit);
288 397
289 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
290
291 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); 398 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
292 table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit); 399 table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
293 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit); 400 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
294 401
295 table_.Register(kVisitOddball, 402 table_.Register(kVisitOddball,
296 &FixedBodyVisitor<StaticMarkingVisitor, 403 &FixedBodyVisitor<StaticMarkingVisitor,
297 Oddball::BodyDescriptor, 404 Oddball::BodyDescriptor,
298 void>::Visit); 405 void>::Visit);
299 table_.Register(kVisitMap, 406 table_.Register(kVisitMap,
300 &FixedBodyVisitor<StaticMarkingVisitor, 407 &FixedBodyVisitor<StaticMarkingVisitor,
301 Map::BodyDescriptor, 408 Map::BodyDescriptor,
302 void>::Visit); 409 void>::Visit);
303 410
304 table_.Register(kVisitCode, &VisitCode); 411 table_.Register(kVisitCode, &VisitCode);
305 412
306 table_.Register(kVisitJSFunction, &VisitJSFunctionAndFlushCode); 413 table_.Register(kVisitSharedFunctionInfo,
414 &VisitSharedFunctionInfoAndFlushCode);
415
416 table_.Register(kVisitJSFunction,
417 &VisitJSFunctionAndFlushCode);
307 418
308 table_.Register(kVisitPropertyCell, 419 table_.Register(kVisitPropertyCell,
309 &FixedBodyVisitor<StaticMarkingVisitor, 420 &FixedBodyVisitor<StaticMarkingVisitor,
310 JSGlobalPropertyCell::BodyDescriptor, 421 JSGlobalPropertyCell::BodyDescriptor,
311 void>::Visit); 422 void>::Visit);
312 423
313 table_.RegisterSpecializations<DataObjectVisitor, 424 table_.RegisterSpecializations<DataObjectVisitor,
314 kVisitDataObject, 425 kVisitDataObject,
315 kVisitDataObjectGeneric>(); 426 kVisitDataObjectGeneric>();
316 427
(...skipping 25 matching lines...) Expand all
342 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address()); 453 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
343 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) { 454 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
344 IC::Clear(rinfo->pc()); 455 IC::Clear(rinfo->pc());
345 // Please note targets for cleared inline cached do not have to be 456 // Please note targets for cleared inline cached do not have to be
346 // marked since they are contained in HEAP->non_monomorphic_cache(). 457 // marked since they are contained in HEAP->non_monomorphic_cache().
347 } else { 458 } else {
348 HEAP->mark_compact_collector()->MarkObject(code); 459 HEAP->mark_compact_collector()->MarkObject(code);
349 } 460 }
350 } 461 }
351 462
463 static void VisitGlobalPropertyCell(RelocInfo* rinfo) {
464 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
465 Object* cell = rinfo->target_cell();
466 Object* old_cell = cell;
467 VisitPointer(HEAP, &cell);
468 if (cell != old_cell) {
469 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell));
470 }
471 }
472
352 static inline void VisitDebugTarget(RelocInfo* rinfo) { 473 static inline void VisitDebugTarget(RelocInfo* rinfo) {
353 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 474 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
354 rinfo->IsPatchedReturnSequence()) || 475 rinfo->IsPatchedReturnSequence()) ||
355 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 476 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
356 rinfo->IsPatchedDebugBreakSlotSequence())); 477 rinfo->IsPatchedDebugBreakSlotSequence()));
357 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); 478 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
358 HEAP->mark_compact_collector()->MarkObject(code); 479 HEAP->mark_compact_collector()->MarkObject(code);
359 } 480 }
360 481
361 // Mark object pointed to by p. 482 // Mark object pointed to by p.
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
437 return (info->script() != undefined) && 558 return (info->script() != undefined) &&
438 (reinterpret_cast<Script*>(info->script())->source() != undefined); 559 (reinterpret_cast<Script*>(info->script())->source() != undefined);
439 } 560 }
440 561
441 562
442 inline static bool IsCompiled(JSFunction* function) { 563 inline static bool IsCompiled(JSFunction* function) {
443 return function->unchecked_code() != 564 return function->unchecked_code() !=
444 Isolate::Current()->builtins()->builtin(Builtins::LazyCompile); 565 Isolate::Current()->builtins()->builtin(Builtins::LazyCompile);
445 } 566 }
446 567
447
448 inline static bool IsCompiled(SharedFunctionInfo* function) { 568 inline static bool IsCompiled(SharedFunctionInfo* function) {
449 return function->unchecked_code() != 569 return function->unchecked_code() !=
450 Isolate::Current()->builtins()->builtin(Builtins::LazyCompile); 570 Isolate::Current()->builtins()->builtin(Builtins::LazyCompile);
451 } 571 }
452 572
453 573 inline static bool IsFlushable(JSFunction* function) {
454 static void FlushCodeForFunction(JSFunction* function) {
455 SharedFunctionInfo* shared_info = function->unchecked_shared(); 574 SharedFunctionInfo* shared_info = function->unchecked_shared();
456 575
457 if (shared_info->IsMarked()) return; 576 // Code is either on stack, in compilation cache or referenced
458 577 // by optimized version of function.
459 // Special handling if the function and shared info objects 578 if (function->unchecked_code()->IsMarked()) {
460 // have different code objects. 579 shared_info->set_code_age(0);
461 if (function->unchecked_code() != shared_info->unchecked_code()) { 580 return false;
462 // If the shared function has been flushed but the function has not,
463 // we flush the function if possible.
464 if (!IsCompiled(shared_info) &&
465 IsCompiled(function) &&
466 !function->unchecked_code()->IsMarked()) {
467 function->set_code(shared_info->unchecked_code());
468 }
469 return;
470 } 581 }
471 582
472 // Code is either on stack or in compilation cache. 583 // We do not flush code for optimized functions.
584 if (function->code() != shared_info->unchecked_code()) {
585 return false;
586 }
587
588 return IsFlushable(shared_info);
589 }
590
591 inline static bool IsFlushable(SharedFunctionInfo* shared_info) {
592 // Code is either on stack, in compilation cache or referenced
593 // by optimized version of function.
473 if (shared_info->unchecked_code()->IsMarked()) { 594 if (shared_info->unchecked_code()->IsMarked()) {
474 shared_info->set_code_age(0); 595 shared_info->set_code_age(0);
475 return; 596 return false;
476 } 597 }
477 598
478 // The function must be compiled and have the source code available, 599 // The function must be compiled and have the source code available,
479 // to be able to recompile it in case we need the function again. 600 // to be able to recompile it in case we need the function again.
480 if (!(shared_info->is_compiled() && HasSourceCode(shared_info))) return; 601 if (!(shared_info->is_compiled() && HasSourceCode(shared_info))) {
602 return false;
603 }
481 604
482 // We never flush code for Api functions. 605 // We never flush code for Api functions.
483 Object* function_data = shared_info->function_data(); 606 Object* function_data = shared_info->function_data();
484 if (function_data->IsHeapObject() && 607 if (function_data->IsHeapObject() &&
485 (SafeMap(function_data)->instance_type() == 608 (SafeMap(function_data)->instance_type() ==
486 FUNCTION_TEMPLATE_INFO_TYPE)) { 609 FUNCTION_TEMPLATE_INFO_TYPE)) {
487 return; 610 return false;
488 } 611 }
489 612
490 // Only flush code for functions. 613 // Only flush code for functions.
491 if (shared_info->code()->kind() != Code::FUNCTION) return; 614 if (shared_info->code()->kind() != Code::FUNCTION) return false;
492 615
493 // Function must be lazy compilable. 616 // Function must be lazy compilable.
494 if (!shared_info->allows_lazy_compilation()) return; 617 if (!shared_info->allows_lazy_compilation()) return false;
495 618
496 // If this is a full script wrapped in a function we do no flush the code. 619 // If this is a full script wrapped in a function we do no flush the code.
497 if (shared_info->is_toplevel()) return; 620 if (shared_info->is_toplevel()) return false;
498 621
499 // Age this shared function info. 622 // Age this shared function info.
500 if (shared_info->code_age() < kCodeAgeThreshold) { 623 if (shared_info->code_age() < kCodeAgeThreshold) {
501 shared_info->set_code_age(shared_info->code_age() + 1); 624 shared_info->set_code_age(shared_info->code_age() + 1);
502 return; 625 return false;
503 } 626 }
504 627
505 // Compute the lazy compilable version of the code. 628 return true;
506 Code* code = Isolate::Current()->builtins()->builtin(Builtins::LazyCompile);
507 shared_info->set_code(code);
508 function->set_code(code);
509 } 629 }
510 630
511 631
632 static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
633 if (!IsFlushable(function)) return false;
634
635 // This function's code looks flushable. But we have to postpone the
636 // decision until we see all functions that point to the same
637 // SharedFunctionInfo because some of them might be optimized.
638 // That would make the nonoptimized version of the code nonflushable,
639 // because it is required for bailing out from optimized code.
640 heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
641 return true;
642 }
643
644
512 static inline Map* SafeMap(Object* obj) { 645 static inline Map* SafeMap(Object* obj) {
513 MapWord map_word = HeapObject::cast(obj)->map_word(); 646 MapWord map_word = HeapObject::cast(obj)->map_word();
514 map_word.ClearMark(); 647 map_word.ClearMark();
515 map_word.ClearOverflow(); 648 map_word.ClearOverflow();
516 return map_word.ToMap(); 649 return map_word.ToMap();
517 } 650 }
518 651
519 652
520 static inline bool IsJSBuiltinsObject(Object* obj) { 653 static inline bool IsJSBuiltinsObject(Object* obj) {
521 return obj->IsHeapObject() && 654 return obj->IsHeapObject() &&
(...skipping 14 matching lines...) Expand all
536 Context* context = reinterpret_cast<Context*>(ctx); 669 Context* context = reinterpret_cast<Context*>(ctx);
537 670
538 if (IsJSBuiltinsObject(context->global())) { 671 if (IsJSBuiltinsObject(context->global())) {
539 return false; 672 return false;
540 } 673 }
541 674
542 return true; 675 return true;
543 } 676 }
544 677
545 678
546 static void VisitSharedFunctionInfo(Map* map, HeapObject* object) { 679 static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) {
547 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); 680 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
548 if (shared->IsInobjectSlackTrackingInProgress()) { 681
549 shared->DetachInitialMap(); 682 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
550 } 683
551 FixedBodyVisitor<StaticMarkingVisitor, 684 FixedBodyVisitor<StaticMarkingVisitor,
552 SharedFunctionInfo::BodyDescriptor, 685 SharedFunctionInfo::BodyDescriptor,
553 void>::Visit(map, object); 686 void>::Visit(map, object);
554 } 687 }
555 688
556 689
690 static void VisitSharedFunctionInfoAndFlushCode(Map* map,
691 HeapObject* object) {
692 MarkCompactCollector* collector = map->heap()->mark_compact_collector();
693 if (!collector->is_code_flushing_enabled()) {
694 VisitSharedFunctionInfoGeneric(map, object);
695 return;
696 }
697 VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
698 }
699
700
701 static void VisitSharedFunctionInfoAndFlushCodeGeneric(
702 Map* map, HeapObject* object, bool known_flush_code_candidate) {
703 Heap* heap = map->heap();
704 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
705
706 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
707
708 if (!known_flush_code_candidate) {
709 known_flush_code_candidate = IsFlushable(shared);
710 if (known_flush_code_candidate) {
711 heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
712 }
713 }
714
715 VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
716 }
717
718
557 static void VisitCodeEntry(Heap* heap, Address entry_address) { 719 static void VisitCodeEntry(Heap* heap, Address entry_address) {
558 Object* code = Code::GetObjectFromEntryAddress(entry_address); 720 Object* code = Code::GetObjectFromEntryAddress(entry_address);
559 Object* old_code = code; 721 Object* old_code = code;
560 VisitPointer(heap, &code); 722 VisitPointer(heap, &code);
561 if (code != old_code) { 723 if (code != old_code) {
562 Memory::Address_at(entry_address) = 724 Memory::Address_at(entry_address) =
563 reinterpret_cast<Code*>(code)->entry(); 725 reinterpret_cast<Code*>(code)->entry();
564 } 726 }
565 } 727 }
566 728
567 729
568 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) { 730 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
731 Heap* heap = map->heap();
732 MarkCompactCollector* collector = heap->mark_compact_collector();
733 if (!collector->is_code_flushing_enabled()) {
734 VisitJSFunction(map, object);
735 return;
736 }
737
569 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object); 738 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
570 // The function must have a valid context and not be a builtin. 739 // The function must have a valid context and not be a builtin.
740 bool flush_code_candidate = false;
571 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) { 741 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
572 FlushCodeForFunction(jsfunction); 742 flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
573 } 743 }
574 VisitJSFunction(map, object); 744
745 if (!flush_code_candidate) {
746 collector->MarkObject(jsfunction->unchecked_shared()->unchecked_code());
747
748 if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) {
749 // For optimized functions we should retain both non-optimized version
750 // of it's code and non-optimized version of all inlined functions.
751 // This is required to support bailing out from inlined code.
752 DeoptimizationInputData* data =
753 reinterpret_cast<DeoptimizationInputData*>(
754 jsfunction->unchecked_code()->unchecked_deoptimization_data());
755
756 FixedArray* literals = data->UncheckedLiteralArray();
757
758 for (int i = 0, count = data->InlinedFunctionCount()->value();
759 i < count;
760 i++) {
761 JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i));
762 collector->MarkObject(inlined->unchecked_shared()->unchecked_code());
763 }
764 }
765 }
766
767 VisitJSFunctionFields(map,
768 reinterpret_cast<JSFunction*>(object),
769 flush_code_candidate);
575 } 770 }
576 771
577 772
578 static void VisitJSFunction(Map* map, HeapObject* object) { 773 static void VisitJSFunction(Map* map, HeapObject* object) {
579 #define SLOT_ADDR(obj, offset) \ 774 VisitJSFunctionFields(map,
580 reinterpret_cast<Object**>((obj)->address() + offset) 775 reinterpret_cast<JSFunction*>(object),
776 false);
777 }
778
779
780 #define SLOT_ADDR(obj, offset) \
781 reinterpret_cast<Object**>((obj)->address() + offset)
782
783
784 static inline void VisitJSFunctionFields(Map* map,
785 JSFunction* object,
786 bool flush_code_candidate) {
581 Heap* heap = map->heap(); 787 Heap* heap = map->heap();
788 MarkCompactCollector* collector = heap->mark_compact_collector();
789
582 VisitPointers(heap, 790 VisitPointers(heap,
583 SLOT_ADDR(object, JSFunction::kPropertiesOffset), 791 SLOT_ADDR(object, JSFunction::kPropertiesOffset),
584 SLOT_ADDR(object, JSFunction::kCodeEntryOffset)); 792 SLOT_ADDR(object, JSFunction::kCodeEntryOffset));
585 793
586 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); 794 if (!flush_code_candidate) {
795 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
796 } else {
797 // Don't visit code object.
798
799 // Visit shared function info to avoid double checking of it's
800 // flushability.
801 SharedFunctionInfo* shared_info = object->unchecked_shared();
802 if (!shared_info->IsMarked()) {
803 Map* shared_info_map = shared_info->map();
804 collector->SetMark(shared_info);
805 collector->MarkObject(shared_info_map);
806 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
807 shared_info,
808 true);
809 }
810 }
587 811
588 VisitPointers(heap, 812 VisitPointers(heap,
589 SLOT_ADDR(object, 813 SLOT_ADDR(object,
590 JSFunction::kCodeEntryOffset + kPointerSize), 814 JSFunction::kCodeEntryOffset + kPointerSize),
591 SLOT_ADDR(object, JSFunction::kSize)); 815 SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset));
592 816
593 #undef SLOT_ADDR 817 // Don't visit the next function list field as it is a weak reference.
594 } 818 }
595 819
596 820
821 static void VisitSharedFunctionInfoFields(Heap* heap,
822 HeapObject* object,
823 bool flush_code_candidate) {
824 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
825
826 if (!flush_code_candidate) {
827 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
828 }
829
830 VisitPointers(heap,
831 SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset),
832 SLOT_ADDR(object, SharedFunctionInfo::kSize));
833 }
834
835 #undef SLOT_ADDR
836
597 typedef void (*Callback)(Map* map, HeapObject* object); 837 typedef void (*Callback)(Map* map, HeapObject* object);
598 838
599 static VisitorDispatchTable<Callback> table_; 839 static VisitorDispatchTable<Callback> table_;
600 }; 840 };
601 841
602 842
603 VisitorDispatchTable<StaticMarkingVisitor::Callback> 843 VisitorDispatchTable<StaticMarkingVisitor::Callback>
604 StaticMarkingVisitor::table_; 844 StaticMarkingVisitor::table_;
605 845
606 846
607 class MarkingVisitor : public ObjectVisitor { 847 class MarkingVisitor : public ObjectVisitor {
608 public: 848 public:
609 explicit MarkingVisitor(Heap* heap) : heap_(heap) { } 849 explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
610 850
611 void VisitPointer(Object** p) { 851 void VisitPointer(Object** p) {
612 StaticMarkingVisitor::VisitPointer(heap_, p); 852 StaticMarkingVisitor::VisitPointer(heap_, p);
613 } 853 }
614 854
615 void VisitPointers(Object** start, Object** end) { 855 void VisitPointers(Object** start, Object** end) {
616 StaticMarkingVisitor::VisitPointers(heap_, start, end); 856 StaticMarkingVisitor::VisitPointers(heap_, start, end);
617 } 857 }
618 858
619 void VisitCodeTarget(RelocInfo* rinfo) { 859 void VisitCodeTarget(RelocInfo* rinfo) {
620 StaticMarkingVisitor::VisitCodeTarget(rinfo); 860 StaticMarkingVisitor::VisitCodeTarget(rinfo);
621 } 861 }
622 862
863 void VisitGlobalPropertyCell(RelocInfo* rinfo) {
864 StaticMarkingVisitor::VisitGlobalPropertyCell(rinfo);
865 }
866
623 void VisitDebugTarget(RelocInfo* rinfo) { 867 void VisitDebugTarget(RelocInfo* rinfo) {
624 StaticMarkingVisitor::VisitDebugTarget(rinfo); 868 StaticMarkingVisitor::VisitDebugTarget(rinfo);
625 } 869 }
626 870
627 private: 871 private:
628 Heap* heap_; 872 Heap* heap_;
629 }; 873 };
630 874
631 875
632 class CodeMarkingVisitor : public ThreadVisitor { 876 class CodeMarkingVisitor : public ThreadVisitor {
633 public: 877 public:
878 explicit CodeMarkingVisitor(MarkCompactCollector* collector)
879 : collector_(collector) {}
880
634 void VisitThread(ThreadLocalTop* top) { 881 void VisitThread(ThreadLocalTop* top) {
635 for (StackFrameIterator it(top); !it.done(); it.Advance()) { 882 for (StackFrameIterator it(top); !it.done(); it.Advance()) {
636 HEAP->mark_compact_collector()->MarkObject(it.frame()->unchecked_code()); 883 collector_->MarkObject(it.frame()->unchecked_code());
637 } 884 }
638 } 885 }
886
887 private:
888 MarkCompactCollector* collector_;
639 }; 889 };
640 890
641 891
642 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { 892 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
643 public: 893 public:
894 explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector)
895 : collector_(collector) {}
896
644 void VisitPointers(Object** start, Object** end) { 897 void VisitPointers(Object** start, Object** end) {
645 for (Object** p = start; p < end; p++) VisitPointer(p); 898 for (Object** p = start; p < end; p++) VisitPointer(p);
646 } 899 }
647 900
648 void VisitPointer(Object** slot) { 901 void VisitPointer(Object** slot) {
649 Object* obj = *slot; 902 Object* obj = *slot;
650 if (obj->IsHeapObject()) { 903 if (obj->IsSharedFunctionInfo()) {
651 HEAP->mark_compact_collector()->MarkObject(HeapObject::cast(obj)); 904 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj);
905 collector_->MarkObject(shared->unchecked_code());
906 collector_->MarkObject(shared);
652 } 907 }
653 } 908 }
909
910 private:
911 MarkCompactCollector* collector_;
654 }; 912 };
655 913
656 914
657 void MarkCompactCollector::PrepareForCodeFlushing() { 915 void MarkCompactCollector::PrepareForCodeFlushing() {
658 ASSERT(heap_ == Isolate::Current()->heap()); 916 ASSERT(heap_ == Isolate::Current()->heap());
659 917
660 if (!FLAG_flush_code) { 918 if (!FLAG_flush_code) {
661 StaticMarkingVisitor::EnableCodeFlushing(false); 919 EnableCodeFlushing(false);
662 return; 920 return;
663 } 921 }
664 922
665 #ifdef ENABLE_DEBUGGER_SUPPORT 923 #ifdef ENABLE_DEBUGGER_SUPPORT
666 if (heap_->isolate()->debug()->IsLoaded() || 924 if (heap_->isolate()->debug()->IsLoaded() ||
667 heap_->isolate()->debug()->has_break_points()) { 925 heap_->isolate()->debug()->has_break_points()) {
668 StaticMarkingVisitor::EnableCodeFlushing(false); 926 EnableCodeFlushing(false);
669 return; 927 return;
670 } 928 }
671 #endif 929 #endif
672 StaticMarkingVisitor::EnableCodeFlushing(true); 930 EnableCodeFlushing(true);
673 931
674 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray 932 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray
675 // relies on it being marked before any other descriptor array. 933 // relies on it being marked before any other descriptor array.
676 MarkObject(heap_->raw_unchecked_empty_descriptor_array()); 934 MarkObject(heap_->raw_unchecked_empty_descriptor_array());
677 935
678 // Make sure we are not referencing the code from the stack. 936 // Make sure we are not referencing the code from the stack.
679 MarkCompactCollector* collector = heap_->mark_compact_collector(); 937 ASSERT(this == heap_->mark_compact_collector());
680 for (StackFrameIterator it; !it.done(); it.Advance()) { 938 for (StackFrameIterator it; !it.done(); it.Advance()) {
681 collector->MarkObject(it.frame()->unchecked_code()); 939 MarkObject(it.frame()->unchecked_code());
682 } 940 }
683 941
684 // Iterate the archived stacks in all threads to check if 942 // Iterate the archived stacks in all threads to check if
685 // the code is referenced. 943 // the code is referenced.
686 CodeMarkingVisitor code_marking_visitor; 944 CodeMarkingVisitor code_marking_visitor(this);
687 heap_->isolate()->thread_manager()->IterateArchivedThreads( 945 heap_->isolate()->thread_manager()->IterateArchivedThreads(
688 &code_marking_visitor); 946 &code_marking_visitor);
689 947
690 SharedFunctionInfoMarkingVisitor visitor; 948 SharedFunctionInfoMarkingVisitor visitor(this);
691 heap_->isolate()->compilation_cache()->IterateFunctions(&visitor); 949 heap_->isolate()->compilation_cache()->IterateFunctions(&visitor);
950 heap_->isolate()->handle_scope_implementer()->Iterate(&visitor);
692 951
693 collector->ProcessMarkingStack(); 952 ProcessMarkingStack();
694 } 953 }
695 954
696 955
697 // Visitor class for marking heap roots. 956 // Visitor class for marking heap roots.
698 class RootMarkingVisitor : public ObjectVisitor { 957 class RootMarkingVisitor : public ObjectVisitor {
699 public: 958 public:
700 explicit RootMarkingVisitor(Heap* heap) 959 explicit RootMarkingVisitor(Heap* heap)
701 : collector_(heap->mark_compact_collector()) { } 960 : collector_(heap->mark_compact_collector()) { }
702 961
703 void VisitPointer(Object** p) { 962 void VisitPointer(Object** p) {
(...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after
1115 symbol_table->ElementsRemoved(v.PointersRemoved()); 1374 symbol_table->ElementsRemoved(v.PointersRemoved());
1116 heap_->external_string_table_.Iterate(&v); 1375 heap_->external_string_table_.Iterate(&v);
1117 heap_->external_string_table_.CleanUp(); 1376 heap_->external_string_table_.CleanUp();
1118 1377
1119 // Process the weak references. 1378 // Process the weak references.
1120 MarkCompactWeakObjectRetainer mark_compact_object_retainer; 1379 MarkCompactWeakObjectRetainer mark_compact_object_retainer;
1121 heap_->ProcessWeakReferences(&mark_compact_object_retainer); 1380 heap_->ProcessWeakReferences(&mark_compact_object_retainer);
1122 1381
1123 // Remove object groups after marking phase. 1382 // Remove object groups after marking phase.
1124 heap_->isolate_->global_handles()->RemoveObjectGroups(); 1383 heap_->isolate_->global_handles()->RemoveObjectGroups();
1384
1385 // Flush code from collected candidates.
1386 if (is_code_flushing_enabled()) {
1387 code_flusher_->ProcessCandidates();
1388 }
1125 } 1389 }
1126 1390
1127 1391
1128 #ifdef DEBUG 1392 #ifdef DEBUG
1129 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { 1393 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
1130 live_bytes_ += obj->Size(); 1394 live_bytes_ += obj->Size();
1131 if (HEAP->new_space()->Contains(obj)) { 1395 if (HEAP->new_space()->Contains(obj)) {
1132 live_young_objects_size_ += obj->Size(); 1396 live_young_objects_size_ += obj->Size();
1133 } else if (HEAP->map_space()->Contains(obj)) { 1397 } else if (HEAP->map_space()->Contains(obj)) {
1134 ASSERT(obj->IsMap()); 1398 ASSERT(obj->IsMap());
(...skipping 1202 matching lines...) Expand 10 before | Expand all | Expand 10 after
2337 &MarkCompactCollector::UpdatePointersInOldObject); 2601 &MarkCompactCollector::UpdatePointersInOldObject);
2338 int live_codes_size = IterateLiveObjects( 2602 int live_codes_size = IterateLiveObjects(
2339 heap_->code_space(), &MarkCompactCollector::UpdatePointersInOldObject); 2603 heap_->code_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2340 int live_cells_size = IterateLiveObjects( 2604 int live_cells_size = IterateLiveObjects(
2341 heap_->cell_space(), &MarkCompactCollector::UpdatePointersInOldObject); 2605 heap_->cell_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2342 int live_news_size = IterateLiveObjects( 2606 int live_news_size = IterateLiveObjects(
2343 heap_->new_space(), &MarkCompactCollector::UpdatePointersInNewObject); 2607 heap_->new_space(), &MarkCompactCollector::UpdatePointersInNewObject);
2344 2608
2345 // Large objects do not move, the map word can be updated directly. 2609 // Large objects do not move, the map word can be updated directly.
2346 LargeObjectIterator it(heap_->lo_space()); 2610 LargeObjectIterator it(heap_->lo_space());
2347 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) 2611 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
2348 UpdatePointersInNewObject(obj); 2612 UpdatePointersInNewObject(obj);
2613 }
2349 2614
2350 USE(live_maps_size); 2615 USE(live_maps_size);
2351 USE(live_pointer_olds_size); 2616 USE(live_pointer_olds_size);
2352 USE(live_data_olds_size); 2617 USE(live_data_olds_size);
2353 USE(live_codes_size); 2618 USE(live_codes_size);
2354 USE(live_cells_size); 2619 USE(live_cells_size);
2355 USE(live_news_size); 2620 USE(live_news_size);
2356 ASSERT(live_maps_size == live_map_objects_size_); 2621 ASSERT(live_maps_size == live_map_objects_size_);
2357 ASSERT(live_data_olds_size == live_old_data_objects_size_); 2622 ASSERT(live_data_olds_size == live_old_data_objects_size_);
2358 ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_); 2623 ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
(...skipping 342 matching lines...) Expand 10 before | Expand all | Expand 10 after
2701 if (copied_to->IsJSFunction()) { 2966 if (copied_to->IsJSFunction()) {
2702 PROFILE(FunctionMoveEvent(heap_, old_addr, new_addr)); 2967 PROFILE(FunctionMoveEvent(heap_, old_addr, new_addr));
2703 PROFILE(FunctionCreateEventFromMove(heap_, JSFunction::cast(copied_to))); 2968 PROFILE(FunctionCreateEventFromMove(heap_, JSFunction::cast(copied_to)));
2704 } 2969 }
2705 HEAP_PROFILE(heap_, ObjectMoveEvent(old_addr, new_addr)); 2970 HEAP_PROFILE(heap_, ObjectMoveEvent(old_addr, new_addr));
2706 2971
2707 return obj_size; 2972 return obj_size;
2708 } 2973 }
2709 2974
2710 2975
2976 void MarkCompactCollector::EnableCodeFlushing(bool enable) {
2977 if (enable) {
2978 if (code_flusher_ != NULL) return;
2979 code_flusher_ = new CodeFlusher(heap_->isolate());
2980 } else {
2981 if (code_flusher_ == NULL) return;
2982 delete code_flusher_;
2983 code_flusher_ = NULL;
2984 }
2985 }
2986
2987
2711 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) { 2988 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) {
2712 #ifdef ENABLE_LOGGING_AND_PROFILING 2989 #ifdef ENABLE_LOGGING_AND_PROFILING
2713 if (obj->IsCode()) { 2990 if (obj->IsCode()) {
2714 PROFILE(CodeDeleteEvent(obj->address())); 2991 PROFILE(CodeDeleteEvent(obj->address()));
2715 } else if (obj->IsJSFunction()) { 2992 } else if (obj->IsJSFunction()) {
2716 PROFILE(FunctionDeleteEvent(obj->address())); 2993 PROFILE(FunctionDeleteEvent(obj->address()));
2717 } 2994 }
2718 #endif 2995 #endif
2719 } 2996 }
2720 2997
2721 2998
2722 int MarkCompactCollector::SizeOfMarkedObject(HeapObject* obj) { 2999 int MarkCompactCollector::SizeOfMarkedObject(HeapObject* obj) {
2723 MapWord map_word = obj->map_word(); 3000 MapWord map_word = obj->map_word();
2724 map_word.ClearMark(); 3001 map_word.ClearMark();
2725 return obj->SizeFromMap(map_word.ToMap()); 3002 return obj->SizeFromMap(map_word.ToMap());
2726 } 3003 }
2727 3004
2728 3005
2729 void MarkCompactCollector::Initialize() { 3006 void MarkCompactCollector::Initialize() {
2730 StaticPointersToNewGenUpdatingVisitor::Initialize(); 3007 StaticPointersToNewGenUpdatingVisitor::Initialize();
2731 StaticMarkingVisitor::Initialize(); 3008 StaticMarkingVisitor::Initialize();
2732 } 3009 }
2733 3010
2734 3011
2735 } } // namespace v8::internal 3012 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mark-compact.h ('k') | src/math.js » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698