Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(15)

Side by Side Diff: src/heap-inl.h

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.cc ('k') | src/heap-profiler.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_INL_H_ 5 #ifndef V8_HEAP_INL_H_
6 #define V8_HEAP_INL_H_ 6 #define V8_HEAP_INL_H_
7 7
8 #include <cmath> 8 #include <cmath>
9 9
10 #include "src/base/platform/platform.h" 10 #include "src/base/platform/platform.h"
(...skipping 11 matching lines...) Expand all
22 22
23 void PromotionQueue::insert(HeapObject* target, int size) { 23 void PromotionQueue::insert(HeapObject* target, int size) {
24 if (emergency_stack_ != NULL) { 24 if (emergency_stack_ != NULL) {
25 emergency_stack_->Add(Entry(target, size)); 25 emergency_stack_->Add(Entry(target, size));
26 return; 26 return;
27 } 27 }
28 28
29 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) { 29 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
30 NewSpacePage* rear_page = 30 NewSpacePage* rear_page =
31 NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_)); 31 NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
32 ASSERT(!rear_page->prev_page()->is_anchor()); 32 DCHECK(!rear_page->prev_page()->is_anchor());
33 rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end()); 33 rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
34 ActivateGuardIfOnTheSamePage(); 34 ActivateGuardIfOnTheSamePage();
35 } 35 }
36 36
37 if (guard_) { 37 if (guard_) {
38 ASSERT(GetHeadPage() == 38 DCHECK(GetHeadPage() ==
39 Page::FromAllocationTop(reinterpret_cast<Address>(limit_))); 39 Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
40 40
41 if ((rear_ - 2) < limit_) { 41 if ((rear_ - 2) < limit_) {
42 RelocateQueueHead(); 42 RelocateQueueHead();
43 emergency_stack_->Add(Entry(target, size)); 43 emergency_stack_->Add(Entry(target, size));
44 return; 44 return;
45 } 45 }
46 } 46 }
47 47
48 *(--rear_) = reinterpret_cast<intptr_t>(target); 48 *(--rear_) = reinterpret_cast<intptr_t>(target);
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
111 if (!allocation.To(&result)) return allocation; 111 if (!allocation.To(&result)) return allocation;
112 } 112 }
113 113
114 // String maps are all immortal immovable objects. 114 // String maps are all immortal immovable objects.
115 result->set_map_no_write_barrier(map); 115 result->set_map_no_write_barrier(map);
116 // Set length and hash fields of the allocated string. 116 // Set length and hash fields of the allocated string.
117 String* answer = String::cast(result); 117 String* answer = String::cast(result);
118 answer->set_length(str.length()); 118 answer->set_length(str.length());
119 answer->set_hash_field(hash_field); 119 answer->set_hash_field(hash_field);
120 120
121 ASSERT_EQ(size, answer->Size()); 121 DCHECK_EQ(size, answer->Size());
122 122
123 // Fill in the characters. 123 // Fill in the characters.
124 MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(), 124 MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
125 str.length()); 125 str.length());
126 126
127 return answer; 127 return answer;
128 } 128 }
129 129
130 130
131 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, 131 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
132 uint32_t hash_field) { 132 uint32_t hash_field) {
133 CHECK_GE(String::kMaxLength, str.length()); 133 CHECK_GE(String::kMaxLength, str.length());
134 // Compute map and object size. 134 // Compute map and object size.
135 Map* map = internalized_string_map(); 135 Map* map = internalized_string_map();
136 int size = SeqTwoByteString::SizeFor(str.length()); 136 int size = SeqTwoByteString::SizeFor(str.length());
137 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); 137 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
138 138
139 // Allocate string. 139 // Allocate string.
140 HeapObject* result; 140 HeapObject* result;
141 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); 141 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
142 if (!allocation.To(&result)) return allocation; 142 if (!allocation.To(&result)) return allocation;
143 } 143 }
144 144
145 result->set_map(map); 145 result->set_map(map);
146 // Set length and hash fields of the allocated string. 146 // Set length and hash fields of the allocated string.
147 String* answer = String::cast(result); 147 String* answer = String::cast(result);
148 answer->set_length(str.length()); 148 answer->set_length(str.length());
149 answer->set_hash_field(hash_field); 149 answer->set_hash_field(hash_field);
150 150
151 ASSERT_EQ(size, answer->Size()); 151 DCHECK_EQ(size, answer->Size());
152 152
153 // Fill in the characters. 153 // Fill in the characters.
154 MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(), 154 MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
155 str.length() * kUC16Size); 155 str.length() * kUC16Size);
156 156
157 return answer; 157 return answer;
158 } 158 }
159 159
160 AllocationResult Heap::CopyFixedArray(FixedArray* src) { 160 AllocationResult Heap::CopyFixedArray(FixedArray* src) {
161 if (src->length() == 0) return src; 161 if (src->length() == 0) return src;
162 return CopyFixedArrayWithMap(src, src->map()); 162 return CopyFixedArrayWithMap(src, src->map());
163 } 163 }
164 164
165 165
166 AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { 166 AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
167 if (src->length() == 0) return src; 167 if (src->length() == 0) return src;
168 return CopyFixedDoubleArrayWithMap(src, src->map()); 168 return CopyFixedDoubleArrayWithMap(src, src->map());
169 } 169 }
170 170
171 171
172 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { 172 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) {
173 if (src->length() == 0) return src; 173 if (src->length() == 0) return src;
174 return CopyConstantPoolArrayWithMap(src, src->map()); 174 return CopyConstantPoolArrayWithMap(src, src->map());
175 } 175 }
176 176
177 177
178 AllocationResult Heap::AllocateRaw(int size_in_bytes, 178 AllocationResult Heap::AllocateRaw(int size_in_bytes,
179 AllocationSpace space, 179 AllocationSpace space,
180 AllocationSpace retry_space) { 180 AllocationSpace retry_space) {
181 ASSERT(AllowHandleAllocation::IsAllowed()); 181 DCHECK(AllowHandleAllocation::IsAllowed());
182 ASSERT(AllowHeapAllocation::IsAllowed()); 182 DCHECK(AllowHeapAllocation::IsAllowed());
183 ASSERT(gc_state_ == NOT_IN_GC); 183 DCHECK(gc_state_ == NOT_IN_GC);
184 #ifdef DEBUG 184 #ifdef DEBUG
185 if (FLAG_gc_interval >= 0 && 185 if (FLAG_gc_interval >= 0 &&
186 AllowAllocationFailure::IsAllowed(isolate_) && 186 AllowAllocationFailure::IsAllowed(isolate_) &&
187 Heap::allocation_timeout_-- <= 0) { 187 Heap::allocation_timeout_-- <= 0) {
188 return AllocationResult::Retry(space); 188 return AllocationResult::Retry(space);
189 } 189 }
190 isolate_->counters()->objs_since_last_full()->Increment(); 190 isolate_->counters()->objs_since_last_full()->Increment();
191 isolate_->counters()->objs_since_last_young()->Increment(); 191 isolate_->counters()->objs_since_last_young()->Increment();
192 #endif 192 #endif
193 193
(...skipping 24 matching lines...) Expand all
218 // Large code objects are allocated in large object space. 218 // Large code objects are allocated in large object space.
219 allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE); 219 allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
220 } 220 }
221 } else if (LO_SPACE == space) { 221 } else if (LO_SPACE == space) {
222 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); 222 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
223 } else if (CELL_SPACE == space) { 223 } else if (CELL_SPACE == space) {
224 allocation = cell_space_->AllocateRaw(size_in_bytes); 224 allocation = cell_space_->AllocateRaw(size_in_bytes);
225 } else if (PROPERTY_CELL_SPACE == space) { 225 } else if (PROPERTY_CELL_SPACE == space) {
226 allocation = property_cell_space_->AllocateRaw(size_in_bytes); 226 allocation = property_cell_space_->AllocateRaw(size_in_bytes);
227 } else { 227 } else {
228 ASSERT(MAP_SPACE == space); 228 DCHECK(MAP_SPACE == space);
229 allocation = map_space_->AllocateRaw(size_in_bytes); 229 allocation = map_space_->AllocateRaw(size_in_bytes);
230 } 230 }
231 if (allocation.To(&object)) { 231 if (allocation.To(&object)) {
232 OnAllocationEvent(object, size_in_bytes); 232 OnAllocationEvent(object, size_in_bytes);
233 } else { 233 } else {
234 old_gen_exhausted_ = true; 234 old_gen_exhausted_ = true;
235 } 235 }
236 return allocation; 236 return allocation;
237 } 237 }
238 238
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
315 } 315 }
316 316
317 317
318 void Heap::PrintAlloctionsHash() { 318 void Heap::PrintAlloctionsHash() {
319 uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_); 319 uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_);
320 PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash); 320 PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash);
321 } 321 }
322 322
323 323
324 void Heap::FinalizeExternalString(String* string) { 324 void Heap::FinalizeExternalString(String* string) {
325 ASSERT(string->IsExternalString()); 325 DCHECK(string->IsExternalString());
326 v8::String::ExternalStringResourceBase** resource_addr = 326 v8::String::ExternalStringResourceBase** resource_addr =
327 reinterpret_cast<v8::String::ExternalStringResourceBase**>( 327 reinterpret_cast<v8::String::ExternalStringResourceBase**>(
328 reinterpret_cast<byte*>(string) + 328 reinterpret_cast<byte*>(string) +
329 ExternalString::kResourceOffset - 329 ExternalString::kResourceOffset -
330 kHeapObjectTag); 330 kHeapObjectTag);
331 331
332 // Dispose of the C++ object if it has not already been disposed. 332 // Dispose of the C++ object if it has not already been disposed.
333 if (*resource_addr != NULL) { 333 if (*resource_addr != NULL) {
334 (*resource_addr)->Dispose(); 334 (*resource_addr)->Dispose();
335 *resource_addr = NULL; 335 *resource_addr = NULL;
336 } 336 }
337 } 337 }
338 338
339 339
340 bool Heap::InNewSpace(Object* object) { 340 bool Heap::InNewSpace(Object* object) {
341 bool result = new_space_.Contains(object); 341 bool result = new_space_.Contains(object);
342 ASSERT(!result || // Either not in new space 342 DCHECK(!result || // Either not in new space
343 gc_state_ != NOT_IN_GC || // ... or in the middle of GC 343 gc_state_ != NOT_IN_GC || // ... or in the middle of GC
344 InToSpace(object)); // ... or in to-space (where we allocate). 344 InToSpace(object)); // ... or in to-space (where we allocate).
345 return result; 345 return result;
346 } 346 }
347 347
348 348
349 bool Heap::InNewSpace(Address address) { 349 bool Heap::InNewSpace(Address address) {
350 return new_space_.Contains(address); 350 return new_space_.Contains(address);
351 } 351 }
352 352
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
418 } 418 }
419 419
420 420
421 AllocationSpace Heap::TargetSpaceId(InstanceType type) { 421 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
422 // Heap numbers and sequential strings are promoted to old data space, all 422 // Heap numbers and sequential strings are promoted to old data space, all
423 // other object types are promoted to old pointer space. We do not use 423 // other object types are promoted to old pointer space. We do not use
424 // object->IsHeapNumber() and object->IsSeqString() because we already 424 // object->IsHeapNumber() and object->IsSeqString() because we already
425 // know that object has the heap object tag. 425 // know that object has the heap object tag.
426 426
427 // These objects are never allocated in new space. 427 // These objects are never allocated in new space.
428 ASSERT(type != MAP_TYPE); 428 DCHECK(type != MAP_TYPE);
429 ASSERT(type != CODE_TYPE); 429 DCHECK(type != CODE_TYPE);
430 ASSERT(type != ODDBALL_TYPE); 430 DCHECK(type != ODDBALL_TYPE);
431 ASSERT(type != CELL_TYPE); 431 DCHECK(type != CELL_TYPE);
432 ASSERT(type != PROPERTY_CELL_TYPE); 432 DCHECK(type != PROPERTY_CELL_TYPE);
433 433
434 if (type <= LAST_NAME_TYPE) { 434 if (type <= LAST_NAME_TYPE) {
435 if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE; 435 if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
436 ASSERT(type < FIRST_NONSTRING_TYPE); 436 DCHECK(type < FIRST_NONSTRING_TYPE);
437 // There are four string representations: sequential strings, external 437 // There are four string representations: sequential strings, external
438 // strings, cons strings, and sliced strings. 438 // strings, cons strings, and sliced strings.
439 // Only the latter two contain non-map-word pointers to heap objects. 439 // Only the latter two contain non-map-word pointers to heap objects.
440 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag) 440 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
441 ? OLD_POINTER_SPACE 441 ? OLD_POINTER_SPACE
442 : OLD_DATA_SPACE; 442 : OLD_DATA_SPACE;
443 } else { 443 } else {
444 return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE; 444 return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
445 } 445 }
446 } 446 }
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
490 490
491 491
492 void Heap::CopyBlock(Address dst, Address src, int byte_size) { 492 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
493 CopyWords(reinterpret_cast<Object**>(dst), 493 CopyWords(reinterpret_cast<Object**>(dst),
494 reinterpret_cast<Object**>(src), 494 reinterpret_cast<Object**>(src),
495 static_cast<size_t>(byte_size / kPointerSize)); 495 static_cast<size_t>(byte_size / kPointerSize));
496 } 496 }
497 497
498 498
499 void Heap::MoveBlock(Address dst, Address src, int byte_size) { 499 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
500 ASSERT(IsAligned(byte_size, kPointerSize)); 500 DCHECK(IsAligned(byte_size, kPointerSize));
501 501
502 int size_in_words = byte_size / kPointerSize; 502 int size_in_words = byte_size / kPointerSize;
503 503
504 if ((dst < src) || (dst >= (src + byte_size))) { 504 if ((dst < src) || (dst >= (src + byte_size))) {
505 Object** src_slot = reinterpret_cast<Object**>(src); 505 Object** src_slot = reinterpret_cast<Object**>(src);
506 Object** dst_slot = reinterpret_cast<Object**>(dst); 506 Object** dst_slot = reinterpret_cast<Object**>(dst);
507 Object** end_slot = src_slot + size_in_words; 507 Object** end_slot = src_slot + size_in_words;
508 508
509 while (src_slot != end_slot) { 509 while (src_slot != end_slot) {
510 *dst_slot++ = *src_slot++; 510 *dst_slot++ = *src_slot++;
(...skipping 26 matching lines...) Expand all
537 537
538 // Either the object is the last object in the new space, or there is another 538 // Either the object is the last object in the new space, or there is another
539 // object of at least word size (the header map word) following it, so 539 // object of at least word size (the header map word) following it, so
540 // suffices to compare ptr and top here. Note that technically we do not have 540 // suffices to compare ptr and top here. Note that technically we do not have
541 // to compare with the current top pointer of the from space page during GC, 541 // to compare with the current top pointer of the from space page during GC,
542 // since we always install filler objects above the top pointer of a from 542 // since we always install filler objects above the top pointer of a from
543 // space page when performing a garbage collection. However, always performing 543 // space page when performing a garbage collection. However, always performing
544 // the test makes it possible to have a single, unified version of 544 // the test makes it possible to have a single, unified version of
545 // FindAllocationMemento that is used both by the GC and the mutator. 545 // FindAllocationMemento that is used both by the GC and the mutator.
546 Address top = NewSpaceTop(); 546 Address top = NewSpaceTop();
547 ASSERT(memento_address == top || 547 DCHECK(memento_address == top ||
548 memento_address + HeapObject::kHeaderSize <= top || 548 memento_address + HeapObject::kHeaderSize <= top ||
549 !NewSpacePage::OnSamePage(memento_address, top)); 549 !NewSpacePage::OnSamePage(memento_address, top));
550 if (memento_address == top) return NULL; 550 if (memento_address == top) return NULL;
551 551
552 AllocationMemento* memento = AllocationMemento::cast(candidate); 552 AllocationMemento* memento = AllocationMemento::cast(candidate);
553 if (!memento->IsValid()) return NULL; 553 if (!memento->IsValid()) return NULL;
554 return memento; 554 return memento;
555 } 555 }
556 556
557 557
558 void Heap::UpdateAllocationSiteFeedback(HeapObject* object, 558 void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
559 ScratchpadSlotMode mode) { 559 ScratchpadSlotMode mode) {
560 Heap* heap = object->GetHeap(); 560 Heap* heap = object->GetHeap();
561 ASSERT(heap->InFromSpace(object)); 561 DCHECK(heap->InFromSpace(object));
562 562
563 if (!FLAG_allocation_site_pretenuring || 563 if (!FLAG_allocation_site_pretenuring ||
564 !AllocationSite::CanTrack(object->map()->instance_type())) return; 564 !AllocationSite::CanTrack(object->map()->instance_type())) return;
565 565
566 AllocationMemento* memento = heap->FindAllocationMemento(object); 566 AllocationMemento* memento = heap->FindAllocationMemento(object);
567 if (memento == NULL) return; 567 if (memento == NULL) return;
568 568
569 if (memento->GetAllocationSite()->IncrementMementoFoundCount()) { 569 if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
570 heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode); 570 heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
571 } 571 }
572 } 572 }
573 573
574 574
575 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { 575 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
576 ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); 576 DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
577 577
578 // We use the first word (where the map pointer usually is) of a heap 578 // We use the first word (where the map pointer usually is) of a heap
579 // object to record the forwarding pointer. A forwarding pointer can 579 // object to record the forwarding pointer. A forwarding pointer can
580 // point to an old space, the code space, or the to space of the new 580 // point to an old space, the code space, or the to space of the new
581 // generation. 581 // generation.
582 MapWord first_word = object->map_word(); 582 MapWord first_word = object->map_word();
583 583
584 // If the first word is a forwarding address, the object has already been 584 // If the first word is a forwarding address, the object has already been
585 // copied. 585 // copied.
586 if (first_word.IsForwardingAddress()) { 586 if (first_word.IsForwardingAddress()) {
587 HeapObject* dest = first_word.ToForwardingAddress(); 587 HeapObject* dest = first_word.ToForwardingAddress();
588 ASSERT(object->GetIsolate()->heap()->InFromSpace(*p)); 588 DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
589 *p = dest; 589 *p = dest;
590 return; 590 return;
591 } 591 }
592 592
593 UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT); 593 UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
594 594
595 // AllocationMementos are unrooted and shouldn't survive a scavenge 595 // AllocationMementos are unrooted and shouldn't survive a scavenge
596 ASSERT(object->map() != object->GetHeap()->allocation_memento_map()); 596 DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
597 // Call the slow part of scavenge object. 597 // Call the slow part of scavenge object.
598 return ScavengeObjectSlow(p, object); 598 return ScavengeObjectSlow(p, object);
599 } 599 }
600 600
601 601
602 bool Heap::CollectGarbage(AllocationSpace space, 602 bool Heap::CollectGarbage(AllocationSpace space,
603 const char* gc_reason, 603 const char* gc_reason,
604 const v8::GCCallbackFlags callbackFlags) { 604 const v8::GCCallbackFlags callbackFlags) {
605 const char* collector_reason = NULL; 605 const char* collector_reason = NULL;
606 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason); 606 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
607 return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags); 607 return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
608 } 608 }
609 609
610 610
611 Isolate* Heap::isolate() { 611 Isolate* Heap::isolate() {
612 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) - 612 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
613 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4); 613 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
614 } 614 }
615 615
616 616
617 // Calls the FUNCTION_CALL function and retries it up to three times 617 // Calls the FUNCTION_CALL function and retries it up to three times
618 // to guarantee that any allocations performed during the call will 618 // to guarantee that any allocations performed during the call will
619 // succeed if there's enough memory. 619 // succeed if there's enough memory.
620 620
621 // Warning: Do not use the identifiers __object__, __maybe_object__ or 621 // Warning: Do not use the identifiers __object__, __maybe_object__ or
622 // __scope__ in a call to this macro. 622 // __scope__ in a call to this macro.
623 623
624 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ 624 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
625 if (__allocation__.To(&__object__)) { \ 625 if (__allocation__.To(&__object__)) { \
626 ASSERT(__object__ != (ISOLATE)->heap()->exception()); \ 626 DCHECK(__object__ != (ISOLATE)->heap()->exception()); \
627 RETURN_VALUE; \ 627 RETURN_VALUE; \
628 } 628 }
629 629
630 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ 630 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
631 do { \ 631 do { \
632 AllocationResult __allocation__ = FUNCTION_CALL; \ 632 AllocationResult __allocation__ = FUNCTION_CALL; \
633 Object* __object__ = NULL; \ 633 Object* __object__ = NULL; \
634 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ 634 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
635 (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \ 635 (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \
636 "allocation failure"); \ 636 "allocation failure"); \
(...skipping 24 matching lines...) Expand all
661 FUNCTION_CALL, \ 661 FUNCTION_CALL, \
662 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \ 662 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
663 return Handle<TYPE>()) \ 663 return Handle<TYPE>()) \
664 664
665 665
666 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \ 666 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
667 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return) 667 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
668 668
669 669
670 void ExternalStringTable::AddString(String* string) { 670 void ExternalStringTable::AddString(String* string) {
671 ASSERT(string->IsExternalString()); 671 DCHECK(string->IsExternalString());
672 if (heap_->InNewSpace(string)) { 672 if (heap_->InNewSpace(string)) {
673 new_space_strings_.Add(string); 673 new_space_strings_.Add(string);
674 } else { 674 } else {
675 old_space_strings_.Add(string); 675 old_space_strings_.Add(string);
676 } 676 }
677 } 677 }
678 678
679 679
680 void ExternalStringTable::Iterate(ObjectVisitor* v) { 680 void ExternalStringTable::Iterate(ObjectVisitor* v) {
681 if (!new_space_strings_.is_empty()) { 681 if (!new_space_strings_.is_empty()) {
682 Object** start = &new_space_strings_[0]; 682 Object** start = &new_space_strings_[0];
683 v->VisitPointers(start, start + new_space_strings_.length()); 683 v->VisitPointers(start, start + new_space_strings_.length());
684 } 684 }
685 if (!old_space_strings_.is_empty()) { 685 if (!old_space_strings_.is_empty()) {
686 Object** start = &old_space_strings_[0]; 686 Object** start = &old_space_strings_[0];
687 v->VisitPointers(start, start + old_space_strings_.length()); 687 v->VisitPointers(start, start + old_space_strings_.length());
688 } 688 }
689 } 689 }
690 690
691 691
692 // Verify() is inline to avoid ifdef-s around its calls in release 692 // Verify() is inline to avoid ifdef-s around its calls in release
693 // mode. 693 // mode.
694 void ExternalStringTable::Verify() { 694 void ExternalStringTable::Verify() {
695 #ifdef DEBUG 695 #ifdef DEBUG
696 for (int i = 0; i < new_space_strings_.length(); ++i) { 696 for (int i = 0; i < new_space_strings_.length(); ++i) {
697 Object* obj = Object::cast(new_space_strings_[i]); 697 Object* obj = Object::cast(new_space_strings_[i]);
698 ASSERT(heap_->InNewSpace(obj)); 698 DCHECK(heap_->InNewSpace(obj));
699 ASSERT(obj != heap_->the_hole_value()); 699 DCHECK(obj != heap_->the_hole_value());
700 } 700 }
701 for (int i = 0; i < old_space_strings_.length(); ++i) { 701 for (int i = 0; i < old_space_strings_.length(); ++i) {
702 Object* obj = Object::cast(old_space_strings_[i]); 702 Object* obj = Object::cast(old_space_strings_[i]);
703 ASSERT(!heap_->InNewSpace(obj)); 703 DCHECK(!heap_->InNewSpace(obj));
704 ASSERT(obj != heap_->the_hole_value()); 704 DCHECK(obj != heap_->the_hole_value());
705 } 705 }
706 #endif 706 #endif
707 } 707 }
708 708
709 709
710 void ExternalStringTable::AddOldString(String* string) { 710 void ExternalStringTable::AddOldString(String* string) {
711 ASSERT(string->IsExternalString()); 711 DCHECK(string->IsExternalString());
712 ASSERT(!heap_->InNewSpace(string)); 712 DCHECK(!heap_->InNewSpace(string));
713 old_space_strings_.Add(string); 713 old_space_strings_.Add(string);
714 } 714 }
715 715
716 716
717 void ExternalStringTable::ShrinkNewStrings(int position) { 717 void ExternalStringTable::ShrinkNewStrings(int position) {
718 new_space_strings_.Rewind(position); 718 new_space_strings_.Rewind(position);
719 #ifdef VERIFY_HEAP 719 #ifdef VERIFY_HEAP
720 if (FLAG_verify_heap) { 720 if (FLAG_verify_heap) {
721 Verify(); 721 Verify();
722 } 722 }
(...skipping 16 matching lines...) Expand all
739 set_instanceof_cache_function(the_hole_value()); 739 set_instanceof_cache_function(the_hole_value());
740 } 740 }
741 741
742 742
743 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate) 743 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
744 : heap_(isolate->heap()), daf_(isolate) { 744 : heap_(isolate->heap()), daf_(isolate) {
745 // We shouldn't hit any nested scopes, because that requires 745 // We shouldn't hit any nested scopes, because that requires
746 // non-handle code to call handle code. The code still works but 746 // non-handle code to call handle code. The code still works but
747 // performance will degrade, so we want to catch this situation 747 // performance will degrade, so we want to catch this situation
748 // in debug mode. 748 // in debug mode.
749 ASSERT(heap_->always_allocate_scope_depth_ == 0); 749 DCHECK(heap_->always_allocate_scope_depth_ == 0);
750 heap_->always_allocate_scope_depth_++; 750 heap_->always_allocate_scope_depth_++;
751 } 751 }
752 752
753 753
754 AlwaysAllocateScope::~AlwaysAllocateScope() { 754 AlwaysAllocateScope::~AlwaysAllocateScope() {
755 heap_->always_allocate_scope_depth_--; 755 heap_->always_allocate_scope_depth_--;
756 ASSERT(heap_->always_allocate_scope_depth_ == 0); 756 DCHECK(heap_->always_allocate_scope_depth_ == 0);
757 } 757 }
758 758
759 759
760 #ifdef VERIFY_HEAP 760 #ifdef VERIFY_HEAP
761 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() { 761 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
762 Isolate* isolate = Isolate::Current(); 762 Isolate* isolate = Isolate::Current();
763 isolate->heap()->no_weak_object_verification_scope_depth_++; 763 isolate->heap()->no_weak_object_verification_scope_depth_++;
764 } 764 }
765 765
766 766
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
800 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) { 800 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
801 for (Object** current = start; current < end; current++) { 801 for (Object** current = start; current < end; current++) {
802 CHECK((*current)->IsSmi()); 802 CHECK((*current)->IsSmi());
803 } 803 }
804 } 804 }
805 805
806 806
807 } } // namespace v8::internal 807 } } // namespace v8::internal
808 808
809 #endif // V8_HEAP_INL_H_ 809 #endif // V8_HEAP_INL_H_
OLDNEW
« no previous file with comments | « src/heap.cc ('k') | src/heap-profiler.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698