OLD | NEW |
| (Empty) |
1 // Copyright 2012 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #ifndef V8_HEAP_INL_H_ | |
6 #define V8_HEAP_INL_H_ | |
7 | |
8 #include <cmath> | |
9 | |
10 #include "src/base/platform/platform.h" | |
11 #include "src/cpu-profiler.h" | |
12 #include "src/heap.h" | |
13 #include "src/heap-profiler.h" | |
14 #include "src/isolate.h" | |
15 #include "src/list-inl.h" | |
16 #include "src/objects.h" | |
17 #include "src/store-buffer.h" | |
18 #include "src/store-buffer-inl.h" | |
19 | |
20 namespace v8 { | |
21 namespace internal { | |
22 | |
23 void PromotionQueue::insert(HeapObject* target, int size) { | |
24 if (emergency_stack_ != NULL) { | |
25 emergency_stack_->Add(Entry(target, size)); | |
26 return; | |
27 } | |
28 | |
29 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) { | |
30 NewSpacePage* rear_page = | |
31 NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_)); | |
32 DCHECK(!rear_page->prev_page()->is_anchor()); | |
33 rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end()); | |
34 ActivateGuardIfOnTheSamePage(); | |
35 } | |
36 | |
37 if (guard_) { | |
38 DCHECK(GetHeadPage() == | |
39 Page::FromAllocationTop(reinterpret_cast<Address>(limit_))); | |
40 | |
41 if ((rear_ - 2) < limit_) { | |
42 RelocateQueueHead(); | |
43 emergency_stack_->Add(Entry(target, size)); | |
44 return; | |
45 } | |
46 } | |
47 | |
48 *(--rear_) = reinterpret_cast<intptr_t>(target); | |
49 *(--rear_) = size; | |
50 // Assert no overflow into live objects. | |
51 #ifdef DEBUG | |
52 SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(), | |
53 reinterpret_cast<Address>(rear_)); | |
54 #endif | |
55 } | |
56 | |
57 | |
58 void PromotionQueue::ActivateGuardIfOnTheSamePage() { | |
59 guard_ = guard_ || | |
60 heap_->new_space()->active_space()->current_page()->address() == | |
61 GetHeadPage()->address(); | |
62 } | |
63 | |
64 | |
65 template<> | |
66 bool inline Heap::IsOneByte(Vector<const char> str, int chars) { | |
67 // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported? | |
68 // ASCII only check. | |
69 return chars == str.length(); | |
70 } | |
71 | |
72 | |
73 template<> | |
74 bool inline Heap::IsOneByte(String* str, int chars) { | |
75 return str->IsOneByteRepresentation(); | |
76 } | |
77 | |
78 | |
79 AllocationResult Heap::AllocateInternalizedStringFromUtf8( | |
80 Vector<const char> str, int chars, uint32_t hash_field) { | |
81 if (IsOneByte(str, chars)) { | |
82 return AllocateOneByteInternalizedString( | |
83 Vector<const uint8_t>::cast(str), hash_field); | |
84 } | |
85 return AllocateInternalizedStringImpl<false>(str, chars, hash_field); | |
86 } | |
87 | |
88 | |
89 template<typename T> | |
90 AllocationResult Heap::AllocateInternalizedStringImpl( | |
91 T t, int chars, uint32_t hash_field) { | |
92 if (IsOneByte(t, chars)) { | |
93 return AllocateInternalizedStringImpl<true>(t, chars, hash_field); | |
94 } | |
95 return AllocateInternalizedStringImpl<false>(t, chars, hash_field); | |
96 } | |
97 | |
98 | |
99 AllocationResult Heap::AllocateOneByteInternalizedString( | |
100 Vector<const uint8_t> str, | |
101 uint32_t hash_field) { | |
102 CHECK_GE(String::kMaxLength, str.length()); | |
103 // Compute map and object size. | |
104 Map* map = ascii_internalized_string_map(); | |
105 int size = SeqOneByteString::SizeFor(str.length()); | |
106 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); | |
107 | |
108 // Allocate string. | |
109 HeapObject* result; | |
110 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | |
111 if (!allocation.To(&result)) return allocation; | |
112 } | |
113 | |
114 // String maps are all immortal immovable objects. | |
115 result->set_map_no_write_barrier(map); | |
116 // Set length and hash fields of the allocated string. | |
117 String* answer = String::cast(result); | |
118 answer->set_length(str.length()); | |
119 answer->set_hash_field(hash_field); | |
120 | |
121 DCHECK_EQ(size, answer->Size()); | |
122 | |
123 // Fill in the characters. | |
124 MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(), | |
125 str.length()); | |
126 | |
127 return answer; | |
128 } | |
129 | |
130 | |
131 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, | |
132 uint32_t hash_field) { | |
133 CHECK_GE(String::kMaxLength, str.length()); | |
134 // Compute map and object size. | |
135 Map* map = internalized_string_map(); | |
136 int size = SeqTwoByteString::SizeFor(str.length()); | |
137 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); | |
138 | |
139 // Allocate string. | |
140 HeapObject* result; | |
141 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | |
142 if (!allocation.To(&result)) return allocation; | |
143 } | |
144 | |
145 result->set_map(map); | |
146 // Set length and hash fields of the allocated string. | |
147 String* answer = String::cast(result); | |
148 answer->set_length(str.length()); | |
149 answer->set_hash_field(hash_field); | |
150 | |
151 DCHECK_EQ(size, answer->Size()); | |
152 | |
153 // Fill in the characters. | |
154 MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(), | |
155 str.length() * kUC16Size); | |
156 | |
157 return answer; | |
158 } | |
159 | |
160 AllocationResult Heap::CopyFixedArray(FixedArray* src) { | |
161 if (src->length() == 0) return src; | |
162 return CopyFixedArrayWithMap(src, src->map()); | |
163 } | |
164 | |
165 | |
166 AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { | |
167 if (src->length() == 0) return src; | |
168 return CopyFixedDoubleArrayWithMap(src, src->map()); | |
169 } | |
170 | |
171 | |
172 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { | |
173 if (src->length() == 0) return src; | |
174 return CopyConstantPoolArrayWithMap(src, src->map()); | |
175 } | |
176 | |
177 | |
178 AllocationResult Heap::AllocateRaw(int size_in_bytes, | |
179 AllocationSpace space, | |
180 AllocationSpace retry_space) { | |
181 DCHECK(AllowHandleAllocation::IsAllowed()); | |
182 DCHECK(AllowHeapAllocation::IsAllowed()); | |
183 DCHECK(gc_state_ == NOT_IN_GC); | |
184 #ifdef DEBUG | |
185 if (FLAG_gc_interval >= 0 && | |
186 AllowAllocationFailure::IsAllowed(isolate_) && | |
187 Heap::allocation_timeout_-- <= 0) { | |
188 return AllocationResult::Retry(space); | |
189 } | |
190 isolate_->counters()->objs_since_last_full()->Increment(); | |
191 isolate_->counters()->objs_since_last_young()->Increment(); | |
192 #endif | |
193 | |
194 HeapObject* object; | |
195 AllocationResult allocation; | |
196 if (NEW_SPACE == space) { | |
197 allocation = new_space_.AllocateRaw(size_in_bytes); | |
198 if (always_allocate() && | |
199 allocation.IsRetry() && | |
200 retry_space != NEW_SPACE) { | |
201 space = retry_space; | |
202 } else { | |
203 if (allocation.To(&object)) { | |
204 OnAllocationEvent(object, size_in_bytes); | |
205 } | |
206 return allocation; | |
207 } | |
208 } | |
209 | |
210 if (OLD_POINTER_SPACE == space) { | |
211 allocation = old_pointer_space_->AllocateRaw(size_in_bytes); | |
212 } else if (OLD_DATA_SPACE == space) { | |
213 allocation = old_data_space_->AllocateRaw(size_in_bytes); | |
214 } else if (CODE_SPACE == space) { | |
215 if (size_in_bytes <= code_space()->AreaSize()) { | |
216 allocation = code_space_->AllocateRaw(size_in_bytes); | |
217 } else { | |
218 // Large code objects are allocated in large object space. | |
219 allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE); | |
220 } | |
221 } else if (LO_SPACE == space) { | |
222 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); | |
223 } else if (CELL_SPACE == space) { | |
224 allocation = cell_space_->AllocateRaw(size_in_bytes); | |
225 } else if (PROPERTY_CELL_SPACE == space) { | |
226 allocation = property_cell_space_->AllocateRaw(size_in_bytes); | |
227 } else { | |
228 DCHECK(MAP_SPACE == space); | |
229 allocation = map_space_->AllocateRaw(size_in_bytes); | |
230 } | |
231 if (allocation.To(&object)) { | |
232 OnAllocationEvent(object, size_in_bytes); | |
233 } else { | |
234 old_gen_exhausted_ = true; | |
235 } | |
236 return allocation; | |
237 } | |
238 | |
239 | |
240 void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) { | |
241 HeapProfiler* profiler = isolate_->heap_profiler(); | |
242 if (profiler->is_tracking_allocations()) { | |
243 profiler->AllocationEvent(object->address(), size_in_bytes); | |
244 } | |
245 | |
246 if (FLAG_verify_predictable) { | |
247 ++allocations_count_; | |
248 | |
249 UpdateAllocationsHash(object); | |
250 UpdateAllocationsHash(size_in_bytes); | |
251 | |
252 if ((FLAG_dump_allocations_digest_at_alloc > 0) && | |
253 (--dump_allocations_hash_countdown_ == 0)) { | |
254 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc; | |
255 PrintAlloctionsHash(); | |
256 } | |
257 } | |
258 } | |
259 | |
260 | |
261 void Heap::OnMoveEvent(HeapObject* target, | |
262 HeapObject* source, | |
263 int size_in_bytes) { | |
264 HeapProfiler* heap_profiler = isolate_->heap_profiler(); | |
265 if (heap_profiler->is_tracking_object_moves()) { | |
266 heap_profiler->ObjectMoveEvent(source->address(), target->address(), | |
267 size_in_bytes); | |
268 } | |
269 | |
270 if (isolate_->logger()->is_logging_code_events() || | |
271 isolate_->cpu_profiler()->is_profiling()) { | |
272 if (target->IsSharedFunctionInfo()) { | |
273 PROFILE(isolate_, SharedFunctionInfoMoveEvent( | |
274 source->address(), target->address())); | |
275 } | |
276 } | |
277 | |
278 if (FLAG_verify_predictable) { | |
279 ++allocations_count_; | |
280 | |
281 UpdateAllocationsHash(source); | |
282 UpdateAllocationsHash(target); | |
283 UpdateAllocationsHash(size_in_bytes); | |
284 | |
285 if ((FLAG_dump_allocations_digest_at_alloc > 0) && | |
286 (--dump_allocations_hash_countdown_ == 0)) { | |
287 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc; | |
288 PrintAlloctionsHash(); | |
289 } | |
290 } | |
291 } | |
292 | |
293 | |
294 void Heap::UpdateAllocationsHash(HeapObject* object) { | |
295 Address object_address = object->address(); | |
296 MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address); | |
297 AllocationSpace allocation_space = memory_chunk->owner()->identity(); | |
298 | |
299 STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32); | |
300 uint32_t value = | |
301 static_cast<uint32_t>(object_address - memory_chunk->address()) | | |
302 (static_cast<uint32_t>(allocation_space) << kPageSizeBits); | |
303 | |
304 UpdateAllocationsHash(value); | |
305 } | |
306 | |
307 | |
308 void Heap::UpdateAllocationsHash(uint32_t value) { | |
309 uint16_t c1 = static_cast<uint16_t>(value); | |
310 uint16_t c2 = static_cast<uint16_t>(value >> 16); | |
311 raw_allocations_hash_ = | |
312 StringHasher::AddCharacterCore(raw_allocations_hash_, c1); | |
313 raw_allocations_hash_ = | |
314 StringHasher::AddCharacterCore(raw_allocations_hash_, c2); | |
315 } | |
316 | |
317 | |
318 void Heap::PrintAlloctionsHash() { | |
319 uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_); | |
320 PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash); | |
321 } | |
322 | |
323 | |
324 void Heap::FinalizeExternalString(String* string) { | |
325 DCHECK(string->IsExternalString()); | |
326 v8::String::ExternalStringResourceBase** resource_addr = | |
327 reinterpret_cast<v8::String::ExternalStringResourceBase**>( | |
328 reinterpret_cast<byte*>(string) + | |
329 ExternalString::kResourceOffset - | |
330 kHeapObjectTag); | |
331 | |
332 // Dispose of the C++ object if it has not already been disposed. | |
333 if (*resource_addr != NULL) { | |
334 (*resource_addr)->Dispose(); | |
335 *resource_addr = NULL; | |
336 } | |
337 } | |
338 | |
339 | |
340 bool Heap::InNewSpace(Object* object) { | |
341 bool result = new_space_.Contains(object); | |
342 DCHECK(!result || // Either not in new space | |
343 gc_state_ != NOT_IN_GC || // ... or in the middle of GC | |
344 InToSpace(object)); // ... or in to-space (where we allocate). | |
345 return result; | |
346 } | |
347 | |
348 | |
349 bool Heap::InNewSpace(Address address) { | |
350 return new_space_.Contains(address); | |
351 } | |
352 | |
353 | |
354 bool Heap::InFromSpace(Object* object) { | |
355 return new_space_.FromSpaceContains(object); | |
356 } | |
357 | |
358 | |
359 bool Heap::InToSpace(Object* object) { | |
360 return new_space_.ToSpaceContains(object); | |
361 } | |
362 | |
363 | |
364 bool Heap::InOldPointerSpace(Address address) { | |
365 return old_pointer_space_->Contains(address); | |
366 } | |
367 | |
368 | |
369 bool Heap::InOldPointerSpace(Object* object) { | |
370 return InOldPointerSpace(reinterpret_cast<Address>(object)); | |
371 } | |
372 | |
373 | |
374 bool Heap::InOldDataSpace(Address address) { | |
375 return old_data_space_->Contains(address); | |
376 } | |
377 | |
378 | |
379 bool Heap::InOldDataSpace(Object* object) { | |
380 return InOldDataSpace(reinterpret_cast<Address>(object)); | |
381 } | |
382 | |
383 | |
384 bool Heap::OldGenerationAllocationLimitReached() { | |
385 if (!incremental_marking()->IsStopped()) return false; | |
386 return OldGenerationSpaceAvailable() < 0; | |
387 } | |
388 | |
389 | |
390 bool Heap::ShouldBePromoted(Address old_address, int object_size) { | |
391 NewSpacePage* page = NewSpacePage::FromAddress(old_address); | |
392 Address age_mark = new_space_.age_mark(); | |
393 return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) && | |
394 (!page->ContainsLimit(age_mark) || old_address < age_mark); | |
395 } | |
396 | |
397 | |
398 void Heap::RecordWrite(Address address, int offset) { | |
399 if (!InNewSpace(address)) store_buffer_.Mark(address + offset); | |
400 } | |
401 | |
402 | |
403 void Heap::RecordWrites(Address address, int start, int len) { | |
404 if (!InNewSpace(address)) { | |
405 for (int i = 0; i < len; i++) { | |
406 store_buffer_.Mark(address + start + i * kPointerSize); | |
407 } | |
408 } | |
409 } | |
410 | |
411 | |
412 OldSpace* Heap::TargetSpace(HeapObject* object) { | |
413 InstanceType type = object->map()->instance_type(); | |
414 AllocationSpace space = TargetSpaceId(type); | |
415 return (space == OLD_POINTER_SPACE) | |
416 ? old_pointer_space_ | |
417 : old_data_space_; | |
418 } | |
419 | |
420 | |
421 AllocationSpace Heap::TargetSpaceId(InstanceType type) { | |
422 // Heap numbers and sequential strings are promoted to old data space, all | |
423 // other object types are promoted to old pointer space. We do not use | |
424 // object->IsHeapNumber() and object->IsSeqString() because we already | |
425 // know that object has the heap object tag. | |
426 | |
427 // These objects are never allocated in new space. | |
428 DCHECK(type != MAP_TYPE); | |
429 DCHECK(type != CODE_TYPE); | |
430 DCHECK(type != ODDBALL_TYPE); | |
431 DCHECK(type != CELL_TYPE); | |
432 DCHECK(type != PROPERTY_CELL_TYPE); | |
433 | |
434 if (type <= LAST_NAME_TYPE) { | |
435 if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE; | |
436 DCHECK(type < FIRST_NONSTRING_TYPE); | |
437 // There are four string representations: sequential strings, external | |
438 // strings, cons strings, and sliced strings. | |
439 // Only the latter two contain non-map-word pointers to heap objects. | |
440 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag) | |
441 ? OLD_POINTER_SPACE | |
442 : OLD_DATA_SPACE; | |
443 } else { | |
444 return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE; | |
445 } | |
446 } | |
447 | |
448 | |
449 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) { | |
450 // Object migration is governed by the following rules: | |
451 // | |
452 // 1) Objects in new-space can be migrated to one of the old spaces | |
453 // that matches their target space or they stay in new-space. | |
454 // 2) Objects in old-space stay in the same space when migrating. | |
455 // 3) Fillers (two or more words) can migrate due to left-trimming of | |
456 // fixed arrays in new-space, old-data-space and old-pointer-space. | |
457 // 4) Fillers (one word) can never migrate, they are skipped by | |
458 // incremental marking explicitly to prevent invalid pattern. | |
459 // 5) Short external strings can end up in old pointer space when a cons | |
460 // string in old pointer space is made external (String::MakeExternal). | |
461 // | |
462 // Since this function is used for debugging only, we do not place | |
463 // asserts here, but check everything explicitly. | |
464 if (obj->map() == one_pointer_filler_map()) return false; | |
465 InstanceType type = obj->map()->instance_type(); | |
466 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | |
467 AllocationSpace src = chunk->owner()->identity(); | |
468 switch (src) { | |
469 case NEW_SPACE: | |
470 return dst == src || dst == TargetSpaceId(type); | |
471 case OLD_POINTER_SPACE: | |
472 return dst == src && (dst == TargetSpaceId(type) || obj->IsFiller() || | |
473 (obj->IsExternalString() && | |
474 ExternalString::cast(obj)->is_short())); | |
475 case OLD_DATA_SPACE: | |
476 return dst == src && dst == TargetSpaceId(type); | |
477 case CODE_SPACE: | |
478 return dst == src && type == CODE_TYPE; | |
479 case MAP_SPACE: | |
480 case CELL_SPACE: | |
481 case PROPERTY_CELL_SPACE: | |
482 case LO_SPACE: | |
483 return false; | |
484 case INVALID_SPACE: | |
485 break; | |
486 } | |
487 UNREACHABLE(); | |
488 return false; | |
489 } | |
490 | |
491 | |
492 void Heap::CopyBlock(Address dst, Address src, int byte_size) { | |
493 CopyWords(reinterpret_cast<Object**>(dst), | |
494 reinterpret_cast<Object**>(src), | |
495 static_cast<size_t>(byte_size / kPointerSize)); | |
496 } | |
497 | |
498 | |
499 void Heap::MoveBlock(Address dst, Address src, int byte_size) { | |
500 DCHECK(IsAligned(byte_size, kPointerSize)); | |
501 | |
502 int size_in_words = byte_size / kPointerSize; | |
503 | |
504 if ((dst < src) || (dst >= (src + byte_size))) { | |
505 Object** src_slot = reinterpret_cast<Object**>(src); | |
506 Object** dst_slot = reinterpret_cast<Object**>(dst); | |
507 Object** end_slot = src_slot + size_in_words; | |
508 | |
509 while (src_slot != end_slot) { | |
510 *dst_slot++ = *src_slot++; | |
511 } | |
512 } else { | |
513 MemMove(dst, src, static_cast<size_t>(byte_size)); | |
514 } | |
515 } | |
516 | |
517 | |
518 void Heap::ScavengePointer(HeapObject** p) { | |
519 ScavengeObject(p, *p); | |
520 } | |
521 | |
522 | |
523 AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) { | |
524 // Check if there is potentially a memento behind the object. If | |
525 // the last word of the momento is on another page we return | |
526 // immediately. | |
527 Address object_address = object->address(); | |
528 Address memento_address = object_address + object->Size(); | |
529 Address last_memento_word_address = memento_address + kPointerSize; | |
530 if (!NewSpacePage::OnSamePage(object_address, | |
531 last_memento_word_address)) { | |
532 return NULL; | |
533 } | |
534 | |
535 HeapObject* candidate = HeapObject::FromAddress(memento_address); | |
536 if (candidate->map() != allocation_memento_map()) return NULL; | |
537 | |
538 // Either the object is the last object in the new space, or there is another | |
539 // object of at least word size (the header map word) following it, so | |
540 // suffices to compare ptr and top here. Note that technically we do not have | |
541 // to compare with the current top pointer of the from space page during GC, | |
542 // since we always install filler objects above the top pointer of a from | |
543 // space page when performing a garbage collection. However, always performing | |
544 // the test makes it possible to have a single, unified version of | |
545 // FindAllocationMemento that is used both by the GC and the mutator. | |
546 Address top = NewSpaceTop(); | |
547 DCHECK(memento_address == top || | |
548 memento_address + HeapObject::kHeaderSize <= top || | |
549 !NewSpacePage::OnSamePage(memento_address, top)); | |
550 if (memento_address == top) return NULL; | |
551 | |
552 AllocationMemento* memento = AllocationMemento::cast(candidate); | |
553 if (!memento->IsValid()) return NULL; | |
554 return memento; | |
555 } | |
556 | |
557 | |
558 void Heap::UpdateAllocationSiteFeedback(HeapObject* object, | |
559 ScratchpadSlotMode mode) { | |
560 Heap* heap = object->GetHeap(); | |
561 DCHECK(heap->InFromSpace(object)); | |
562 | |
563 if (!FLAG_allocation_site_pretenuring || | |
564 !AllocationSite::CanTrack(object->map()->instance_type())) return; | |
565 | |
566 AllocationMemento* memento = heap->FindAllocationMemento(object); | |
567 if (memento == NULL) return; | |
568 | |
569 if (memento->GetAllocationSite()->IncrementMementoFoundCount()) { | |
570 heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode); | |
571 } | |
572 } | |
573 | |
574 | |
575 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { | |
576 DCHECK(object->GetIsolate()->heap()->InFromSpace(object)); | |
577 | |
578 // We use the first word (where the map pointer usually is) of a heap | |
579 // object to record the forwarding pointer. A forwarding pointer can | |
580 // point to an old space, the code space, or the to space of the new | |
581 // generation. | |
582 MapWord first_word = object->map_word(); | |
583 | |
584 // If the first word is a forwarding address, the object has already been | |
585 // copied. | |
586 if (first_word.IsForwardingAddress()) { | |
587 HeapObject* dest = first_word.ToForwardingAddress(); | |
588 DCHECK(object->GetIsolate()->heap()->InFromSpace(*p)); | |
589 *p = dest; | |
590 return; | |
591 } | |
592 | |
593 UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT); | |
594 | |
595 // AllocationMementos are unrooted and shouldn't survive a scavenge | |
596 DCHECK(object->map() != object->GetHeap()->allocation_memento_map()); | |
597 // Call the slow part of scavenge object. | |
598 return ScavengeObjectSlow(p, object); | |
599 } | |
600 | |
601 | |
602 bool Heap::CollectGarbage(AllocationSpace space, | |
603 const char* gc_reason, | |
604 const v8::GCCallbackFlags callbackFlags) { | |
605 const char* collector_reason = NULL; | |
606 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason); | |
607 return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags); | |
608 } | |
609 | |
610 | |
611 Isolate* Heap::isolate() { | |
612 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) - | |
613 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4); | |
614 } | |
615 | |
616 | |
617 // Calls the FUNCTION_CALL function and retries it up to three times | |
618 // to guarantee that any allocations performed during the call will | |
619 // succeed if there's enough memory. | |
620 | |
621 // Warning: Do not use the identifiers __object__, __maybe_object__ or | |
622 // __scope__ in a call to this macro. | |
623 | |
624 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ | |
625 if (__allocation__.To(&__object__)) { \ | |
626 DCHECK(__object__ != (ISOLATE)->heap()->exception()); \ | |
627 RETURN_VALUE; \ | |
628 } | |
629 | |
630 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ | |
631 do { \ | |
632 AllocationResult __allocation__ = FUNCTION_CALL; \ | |
633 Object* __object__ = NULL; \ | |
634 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ | |
635 (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \ | |
636 "allocation failure"); \ | |
637 __allocation__ = FUNCTION_CALL; \ | |
638 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ | |
639 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ | |
640 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ | |
641 { \ | |
642 AlwaysAllocateScope __scope__(ISOLATE); \ | |
643 __allocation__ = FUNCTION_CALL; \ | |
644 } \ | |
645 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ | |
646 /* TODO(1181417): Fix this. */ \ | |
647 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \ | |
648 RETURN_EMPTY; \ | |
649 } while (false) | |
650 | |
651 #define CALL_AND_RETRY_OR_DIE( \ | |
652 ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ | |
653 CALL_AND_RETRY( \ | |
654 ISOLATE, \ | |
655 FUNCTION_CALL, \ | |
656 RETURN_VALUE, \ | |
657 RETURN_EMPTY) | |
658 | |
659 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \ | |
660 CALL_AND_RETRY_OR_DIE(ISOLATE, \ | |
661 FUNCTION_CALL, \ | |
662 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \ | |
663 return Handle<TYPE>()) \ | |
664 | |
665 | |
666 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \ | |
667 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return) | |
668 | |
669 | |
670 void ExternalStringTable::AddString(String* string) { | |
671 DCHECK(string->IsExternalString()); | |
672 if (heap_->InNewSpace(string)) { | |
673 new_space_strings_.Add(string); | |
674 } else { | |
675 old_space_strings_.Add(string); | |
676 } | |
677 } | |
678 | |
679 | |
680 void ExternalStringTable::Iterate(ObjectVisitor* v) { | |
681 if (!new_space_strings_.is_empty()) { | |
682 Object** start = &new_space_strings_[0]; | |
683 v->VisitPointers(start, start + new_space_strings_.length()); | |
684 } | |
685 if (!old_space_strings_.is_empty()) { | |
686 Object** start = &old_space_strings_[0]; | |
687 v->VisitPointers(start, start + old_space_strings_.length()); | |
688 } | |
689 } | |
690 | |
691 | |
692 // Verify() is inline to avoid ifdef-s around its calls in release | |
693 // mode. | |
694 void ExternalStringTable::Verify() { | |
695 #ifdef DEBUG | |
696 for (int i = 0; i < new_space_strings_.length(); ++i) { | |
697 Object* obj = Object::cast(new_space_strings_[i]); | |
698 DCHECK(heap_->InNewSpace(obj)); | |
699 DCHECK(obj != heap_->the_hole_value()); | |
700 } | |
701 for (int i = 0; i < old_space_strings_.length(); ++i) { | |
702 Object* obj = Object::cast(old_space_strings_[i]); | |
703 DCHECK(!heap_->InNewSpace(obj)); | |
704 DCHECK(obj != heap_->the_hole_value()); | |
705 } | |
706 #endif | |
707 } | |
708 | |
709 | |
710 void ExternalStringTable::AddOldString(String* string) { | |
711 DCHECK(string->IsExternalString()); | |
712 DCHECK(!heap_->InNewSpace(string)); | |
713 old_space_strings_.Add(string); | |
714 } | |
715 | |
716 | |
717 void ExternalStringTable::ShrinkNewStrings(int position) { | |
718 new_space_strings_.Rewind(position); | |
719 #ifdef VERIFY_HEAP | |
720 if (FLAG_verify_heap) { | |
721 Verify(); | |
722 } | |
723 #endif | |
724 } | |
725 | |
726 | |
727 void Heap::ClearInstanceofCache() { | |
728 set_instanceof_cache_function(the_hole_value()); | |
729 } | |
730 | |
731 | |
732 Object* Heap::ToBoolean(bool condition) { | |
733 return condition ? true_value() : false_value(); | |
734 } | |
735 | |
736 | |
737 void Heap::CompletelyClearInstanceofCache() { | |
738 set_instanceof_cache_map(the_hole_value()); | |
739 set_instanceof_cache_function(the_hole_value()); | |
740 } | |
741 | |
742 | |
743 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate) | |
744 : heap_(isolate->heap()), daf_(isolate) { | |
745 // We shouldn't hit any nested scopes, because that requires | |
746 // non-handle code to call handle code. The code still works but | |
747 // performance will degrade, so we want to catch this situation | |
748 // in debug mode. | |
749 DCHECK(heap_->always_allocate_scope_depth_ == 0); | |
750 heap_->always_allocate_scope_depth_++; | |
751 } | |
752 | |
753 | |
754 AlwaysAllocateScope::~AlwaysAllocateScope() { | |
755 heap_->always_allocate_scope_depth_--; | |
756 DCHECK(heap_->always_allocate_scope_depth_ == 0); | |
757 } | |
758 | |
759 | |
760 #ifdef VERIFY_HEAP | |
761 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() { | |
762 Isolate* isolate = Isolate::Current(); | |
763 isolate->heap()->no_weak_object_verification_scope_depth_++; | |
764 } | |
765 | |
766 | |
767 NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() { | |
768 Isolate* isolate = Isolate::Current(); | |
769 isolate->heap()->no_weak_object_verification_scope_depth_--; | |
770 } | |
771 #endif | |
772 | |
773 | |
774 GCCallbacksScope::GCCallbacksScope(Heap* heap) : heap_(heap) { | |
775 heap_->gc_callbacks_depth_++; | |
776 } | |
777 | |
778 | |
779 GCCallbacksScope::~GCCallbacksScope() { | |
780 heap_->gc_callbacks_depth_--; | |
781 } | |
782 | |
783 | |
784 bool GCCallbacksScope::CheckReenter() { | |
785 return heap_->gc_callbacks_depth_ == 1; | |
786 } | |
787 | |
788 | |
789 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) { | |
790 for (Object** current = start; current < end; current++) { | |
791 if ((*current)->IsHeapObject()) { | |
792 HeapObject* object = HeapObject::cast(*current); | |
793 CHECK(object->GetIsolate()->heap()->Contains(object)); | |
794 CHECK(object->map()->IsMap()); | |
795 } | |
796 } | |
797 } | |
798 | |
799 | |
800 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) { | |
801 for (Object** current = start; current < end; current++) { | |
802 CHECK((*current)->IsSmi()); | |
803 } | |
804 } | |
805 | |
806 | |
807 } } // namespace v8::internal | |
808 | |
809 #endif // V8_HEAP_INL_H_ | |
OLD | NEW |