Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: src/heap/heap-inl.h

Issue 1314863003: [heap] More flag cleanup. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix compilation and add const to parameters Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_HEAP_INL_H_ 5 #ifndef V8_HEAP_HEAP_INL_H_
6 #define V8_HEAP_HEAP_INL_H_ 6 #define V8_HEAP_HEAP_INL_H_
7 7
8 #include <cmath> 8 #include <cmath>
9 9
10 #include "src/base/platform/platform.h" 10 #include "src/base/platform/platform.h"
(...skipping 479 matching lines...) Expand 10 before | Expand all | Expand 10 after
490 memento_address + HeapObject::kHeaderSize <= top || 490 memento_address + HeapObject::kHeaderSize <= top ||
491 !NewSpacePage::OnSamePage(memento_address, top - 1)); 491 !NewSpacePage::OnSamePage(memento_address, top - 1));
492 if (memento_address == top) return NULL; 492 if (memento_address == top) return NULL;
493 493
494 AllocationMemento* memento = AllocationMemento::cast(candidate); 494 AllocationMemento* memento = AllocationMemento::cast(candidate);
495 if (!memento->IsValid()) return NULL; 495 if (!memento->IsValid()) return NULL;
496 return memento; 496 return memento;
497 } 497 }
498 498
499 499
500 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
501 const GCFlags flags,
502 const GCCallbackFlags callback_flags) {
503 const char* collector_reason = nullptr;
504 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
505 set_current_gc_flags(flags);
506 current_gc_callback_flags_ = callback_flags;
507 const bool next_gc_likely_to_collect_more =
508 CollectGarbage(collector, gc_reason, collector_reason);
509 set_current_gc_flags(kNoGCFlags);
510 current_gc_callback_flags_ = kNoGCCallbackFlags;
511 return next_gc_likely_to_collect_more;
512 }
513
514
500 void Heap::UpdateAllocationSiteFeedback(HeapObject* object, 515 void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
501 ScratchpadSlotMode mode) { 516 ScratchpadSlotMode mode) {
502 Heap* heap = object->GetHeap(); 517 Heap* heap = object->GetHeap();
503 DCHECK(heap->InFromSpace(object)); 518 DCHECK(heap->InFromSpace(object));
504 519
505 if (!FLAG_allocation_site_pretenuring || 520 if (!FLAG_allocation_site_pretenuring ||
506 !AllocationSite::CanTrack(object->map()->instance_type())) 521 !AllocationSite::CanTrack(object->map()->instance_type()))
507 return; 522 return;
508 523
509 AllocationMemento* memento = heap->FindAllocationMemento(object); 524 AllocationMemento* memento = heap->FindAllocationMemento(object);
(...skipping 25 matching lines...) Expand all
535 550
536 UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT); 551 UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
537 552
538 // AllocationMementos are unrooted and shouldn't survive a scavenge 553 // AllocationMementos are unrooted and shouldn't survive a scavenge
539 DCHECK(object->map() != object->GetHeap()->allocation_memento_map()); 554 DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
540 // Call the slow part of scavenge object. 555 // Call the slow part of scavenge object.
541 return ScavengeObjectSlow(p, object); 556 return ScavengeObjectSlow(p, object);
542 } 557 }
543 558
544 559
545 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
546 const v8::GCCallbackFlags callbackFlags) {
547 const char* collector_reason = NULL;
548 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
549 return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
550 }
551
552
553 Isolate* Heap::isolate() { 560 Isolate* Heap::isolate() {
554 return reinterpret_cast<Isolate*>( 561 return reinterpret_cast<Isolate*>(
555 reinterpret_cast<intptr_t>(this) - 562 reinterpret_cast<intptr_t>(this) -
556 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16); 563 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16);
557 } 564 }
558 565
559 566
560 // Calls the FUNCTION_CALL function and retries it up to three times 567 // Calls the FUNCTION_CALL function and retries it up to three times
561 // to guarantee that any allocations performed during the call will 568 // to guarantee that any allocations performed during the call will
562 // succeed if there's enough memory. 569 // succeed if there's enough memory.
563 570
564 // Warning: Do not use the identifiers __object__, __maybe_object__ or 571 // Warning: Do not use the identifiers __object__, __maybe_object__ or
565 // __scope__ in a call to this macro. 572 // __scope__ in a call to this macro.
566 573
567 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ 574 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
568 if (__allocation__.To(&__object__)) { \ 575 if (__allocation__.To(&__object__)) { \
569 DCHECK(__object__ != (ISOLATE)->heap()->exception()); \ 576 DCHECK(__object__ != (ISOLATE)->heap()->exception()); \
570 RETURN_VALUE; \ 577 RETURN_VALUE; \
571 } 578 }
572 579
573 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ 580 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
574 do { \ 581 do { \
575 AllocationResult __allocation__ = FUNCTION_CALL; \ 582 AllocationResult __allocation__ = FUNCTION_CALL; \
576 Object* __object__ = NULL; \ 583 Object* __object__ = NULL; \
577 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ 584 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
578 /* Two GCs before panicking. In newspace will almost always succeed. */ \ 585 /* Two GCs before panicking. In newspace will almost always succeed. */ \
579 for (int __i__ = 0; __i__ < 2; __i__++) { \ 586 for (int __i__ = 0; __i__ < 2; __i__++) { \
580 (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \ 587 (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \
581 "allocation failure"); \ 588 "allocation failure", \
582 __allocation__ = FUNCTION_CALL; \ 589 Heap::kNoGCFlags, kNoGCCallbackFlags); \
583 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ 590 __allocation__ = FUNCTION_CALL; \
584 } \ 591 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
585 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ 592 } \
586 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ 593 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
587 { \ 594 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
588 AlwaysAllocateScope __scope__(ISOLATE); \ 595 { \
589 __allocation__ = FUNCTION_CALL; \ 596 AlwaysAllocateScope __scope__(ISOLATE); \
590 } \ 597 __allocation__ = FUNCTION_CALL; \
591 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ 598 } \
592 /* TODO(1181417): Fix this. */ \ 599 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
593 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \ 600 /* TODO(1181417): Fix this. */ \
594 RETURN_EMPTY; \ 601 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
602 RETURN_EMPTY; \
595 } while (false) 603 } while (false)
596 604
597 #define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \ 605 #define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \
598 RETURN_EMPTY) \ 606 RETURN_EMPTY) \
599 CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) 607 CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)
600 608
601 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \ 609 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
602 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, \ 610 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, \
603 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \ 611 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
604 return Handle<TYPE>()) 612 return Handle<TYPE>())
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after
779 787
780 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) { 788 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
781 for (Object** current = start; current < end; current++) { 789 for (Object** current = start; current < end; current++) {
782 CHECK((*current)->IsSmi()); 790 CHECK((*current)->IsSmi());
783 } 791 }
784 } 792 }
785 } 793 }
786 } // namespace v8::internal 794 } // namespace v8::internal
787 795
788 #endif // V8_HEAP_HEAP_INL_H_ 796 #endif // V8_HEAP_HEAP_INL_H_
OLDNEW
« src/heap/heap.cc ('K') | « src/heap/heap.cc ('k') | src/heap/memory-reducer.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698