OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_INL_H_ | 5 #ifndef V8_HEAP_HEAP_INL_H_ |
6 #define V8_HEAP_INL_H_ | 6 #define V8_HEAP_HEAP_INL_H_ |
7 | 7 |
8 #include <cmath> | 8 #include <cmath> |
9 | 9 |
10 #include "src/base/platform/platform.h" | 10 #include "src/base/platform/platform.h" |
11 #include "src/cpu-profiler.h" | 11 #include "src/cpu-profiler.h" |
12 #include "src/heap.h" | 12 #include "src/heap/heap.h" |
13 #include "src/heap-profiler.h" | 13 #include "src/heap-profiler.h" |
14 #include "src/isolate.h" | 14 #include "src/isolate.h" |
15 #include "src/list-inl.h" | 15 #include "src/list-inl.h" |
16 #include "src/objects.h" | 16 #include "src/objects.h" |
17 #include "src/store-buffer.h" | 17 #include "src/store-buffer.h" |
18 #include "src/store-buffer-inl.h" | 18 #include "src/store-buffer-inl.h" |
19 | 19 |
20 namespace v8 { | 20 namespace v8 { |
21 namespace internal { | 21 namespace internal { |
22 | 22 |
(...skipping 17 matching lines...) Expand all Loading... |
40 | 40 |
41 if ((rear_ - 2) < limit_) { | 41 if ((rear_ - 2) < limit_) { |
42 RelocateQueueHead(); | 42 RelocateQueueHead(); |
43 emergency_stack_->Add(Entry(target, size)); | 43 emergency_stack_->Add(Entry(target, size)); |
44 return; | 44 return; |
45 } | 45 } |
46 } | 46 } |
47 | 47 |
48 *(--rear_) = reinterpret_cast<intptr_t>(target); | 48 *(--rear_) = reinterpret_cast<intptr_t>(target); |
49 *(--rear_) = size; | 49 *(--rear_) = size; |
50 // Assert no overflow into live objects. | 50 // Assert no overflow into live objects. |
51 #ifdef DEBUG | 51 #ifdef DEBUG |
52 SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(), | 52 SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(), |
53 reinterpret_cast<Address>(rear_)); | 53 reinterpret_cast<Address>(rear_)); |
54 #endif | 54 #endif |
55 } | 55 } |
56 | 56 |
57 | 57 |
58 void PromotionQueue::ActivateGuardIfOnTheSamePage() { | 58 void PromotionQueue::ActivateGuardIfOnTheSamePage() { |
59 guard_ = guard_ || | 59 guard_ = guard_ || |
60 heap_->new_space()->active_space()->current_page()->address() == | 60 heap_->new_space()->active_space()->current_page()->address() == |
61 GetHeadPage()->address(); | 61 GetHeadPage()->address(); |
62 } | 62 } |
63 | 63 |
64 | 64 |
65 template<> | 65 template <> |
66 bool inline Heap::IsOneByte(Vector<const char> str, int chars) { | 66 bool inline Heap::IsOneByte(Vector<const char> str, int chars) { |
67 // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported? | 67 // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported? |
68 // ASCII only check. | 68 // ASCII only check. |
69 return chars == str.length(); | 69 return chars == str.length(); |
70 } | 70 } |
71 | 71 |
72 | 72 |
73 template<> | 73 template <> |
74 bool inline Heap::IsOneByte(String* str, int chars) { | 74 bool inline Heap::IsOneByte(String* str, int chars) { |
75 return str->IsOneByteRepresentation(); | 75 return str->IsOneByteRepresentation(); |
76 } | 76 } |
77 | 77 |
78 | 78 |
79 AllocationResult Heap::AllocateInternalizedStringFromUtf8( | 79 AllocationResult Heap::AllocateInternalizedStringFromUtf8( |
80 Vector<const char> str, int chars, uint32_t hash_field) { | 80 Vector<const char> str, int chars, uint32_t hash_field) { |
81 if (IsOneByte(str, chars)) { | 81 if (IsOneByte(str, chars)) { |
82 return AllocateOneByteInternalizedString( | 82 return AllocateOneByteInternalizedString(Vector<const uint8_t>::cast(str), |
83 Vector<const uint8_t>::cast(str), hash_field); | 83 hash_field); |
84 } | 84 } |
85 return AllocateInternalizedStringImpl<false>(str, chars, hash_field); | 85 return AllocateInternalizedStringImpl<false>(str, chars, hash_field); |
86 } | 86 } |
87 | 87 |
88 | 88 |
89 template<typename T> | 89 template <typename T> |
90 AllocationResult Heap::AllocateInternalizedStringImpl( | 90 AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars, |
91 T t, int chars, uint32_t hash_field) { | 91 uint32_t hash_field) { |
92 if (IsOneByte(t, chars)) { | 92 if (IsOneByte(t, chars)) { |
93 return AllocateInternalizedStringImpl<true>(t, chars, hash_field); | 93 return AllocateInternalizedStringImpl<true>(t, chars, hash_field); |
94 } | 94 } |
95 return AllocateInternalizedStringImpl<false>(t, chars, hash_field); | 95 return AllocateInternalizedStringImpl<false>(t, chars, hash_field); |
96 } | 96 } |
97 | 97 |
98 | 98 |
99 AllocationResult Heap::AllocateOneByteInternalizedString( | 99 AllocationResult Heap::AllocateOneByteInternalizedString( |
100 Vector<const uint8_t> str, | 100 Vector<const uint8_t> str, uint32_t hash_field) { |
101 uint32_t hash_field) { | |
102 CHECK_GE(String::kMaxLength, str.length()); | 101 CHECK_GE(String::kMaxLength, str.length()); |
103 // Compute map and object size. | 102 // Compute map and object size. |
104 Map* map = ascii_internalized_string_map(); | 103 Map* map = ascii_internalized_string_map(); |
105 int size = SeqOneByteString::SizeFor(str.length()); | 104 int size = SeqOneByteString::SizeFor(str.length()); |
106 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); | 105 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); |
107 | 106 |
108 // Allocate string. | 107 // Allocate string. |
109 HeapObject* result; | 108 HeapObject* result; |
110 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | 109 { |
| 110 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
111 if (!allocation.To(&result)) return allocation; | 111 if (!allocation.To(&result)) return allocation; |
112 } | 112 } |
113 | 113 |
114 // String maps are all immortal immovable objects. | 114 // String maps are all immortal immovable objects. |
115 result->set_map_no_write_barrier(map); | 115 result->set_map_no_write_barrier(map); |
116 // Set length and hash fields of the allocated string. | 116 // Set length and hash fields of the allocated string. |
117 String* answer = String::cast(result); | 117 String* answer = String::cast(result); |
118 answer->set_length(str.length()); | 118 answer->set_length(str.length()); |
119 answer->set_hash_field(hash_field); | 119 answer->set_hash_field(hash_field); |
120 | 120 |
(...skipping 10 matching lines...) Expand all Loading... |
131 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, | 131 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, |
132 uint32_t hash_field) { | 132 uint32_t hash_field) { |
133 CHECK_GE(String::kMaxLength, str.length()); | 133 CHECK_GE(String::kMaxLength, str.length()); |
134 // Compute map and object size. | 134 // Compute map and object size. |
135 Map* map = internalized_string_map(); | 135 Map* map = internalized_string_map(); |
136 int size = SeqTwoByteString::SizeFor(str.length()); | 136 int size = SeqTwoByteString::SizeFor(str.length()); |
137 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); | 137 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); |
138 | 138 |
139 // Allocate string. | 139 // Allocate string. |
140 HeapObject* result; | 140 HeapObject* result; |
141 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | 141 { |
| 142 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
142 if (!allocation.To(&result)) return allocation; | 143 if (!allocation.To(&result)) return allocation; |
143 } | 144 } |
144 | 145 |
145 result->set_map(map); | 146 result->set_map(map); |
146 // Set length and hash fields of the allocated string. | 147 // Set length and hash fields of the allocated string. |
147 String* answer = String::cast(result); | 148 String* answer = String::cast(result); |
148 answer->set_length(str.length()); | 149 answer->set_length(str.length()); |
149 answer->set_hash_field(hash_field); | 150 answer->set_hash_field(hash_field); |
150 | 151 |
151 DCHECK_EQ(size, answer->Size()); | 152 DCHECK_EQ(size, answer->Size()); |
(...skipping 16 matching lines...) Expand all Loading... |
168 return CopyFixedDoubleArrayWithMap(src, src->map()); | 169 return CopyFixedDoubleArrayWithMap(src, src->map()); |
169 } | 170 } |
170 | 171 |
171 | 172 |
172 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { | 173 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { |
173 if (src->length() == 0) return src; | 174 if (src->length() == 0) return src; |
174 return CopyConstantPoolArrayWithMap(src, src->map()); | 175 return CopyConstantPoolArrayWithMap(src, src->map()); |
175 } | 176 } |
176 | 177 |
177 | 178 |
178 AllocationResult Heap::AllocateRaw(int size_in_bytes, | 179 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, |
179 AllocationSpace space, | |
180 AllocationSpace retry_space) { | 180 AllocationSpace retry_space) { |
181 DCHECK(AllowHandleAllocation::IsAllowed()); | 181 DCHECK(AllowHandleAllocation::IsAllowed()); |
182 DCHECK(AllowHeapAllocation::IsAllowed()); | 182 DCHECK(AllowHeapAllocation::IsAllowed()); |
183 DCHECK(gc_state_ == NOT_IN_GC); | 183 DCHECK(gc_state_ == NOT_IN_GC); |
184 #ifdef DEBUG | 184 #ifdef DEBUG |
185 if (FLAG_gc_interval >= 0 && | 185 if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) && |
186 AllowAllocationFailure::IsAllowed(isolate_) && | |
187 Heap::allocation_timeout_-- <= 0) { | 186 Heap::allocation_timeout_-- <= 0) { |
188 return AllocationResult::Retry(space); | 187 return AllocationResult::Retry(space); |
189 } | 188 } |
190 isolate_->counters()->objs_since_last_full()->Increment(); | 189 isolate_->counters()->objs_since_last_full()->Increment(); |
191 isolate_->counters()->objs_since_last_young()->Increment(); | 190 isolate_->counters()->objs_since_last_young()->Increment(); |
192 #endif | 191 #endif |
193 | 192 |
194 HeapObject* object; | 193 HeapObject* object; |
195 AllocationResult allocation; | 194 AllocationResult allocation; |
196 if (NEW_SPACE == space) { | 195 if (NEW_SPACE == space) { |
197 allocation = new_space_.AllocateRaw(size_in_bytes); | 196 allocation = new_space_.AllocateRaw(size_in_bytes); |
198 if (always_allocate() && | 197 if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) { |
199 allocation.IsRetry() && | |
200 retry_space != NEW_SPACE) { | |
201 space = retry_space; | 198 space = retry_space; |
202 } else { | 199 } else { |
203 if (allocation.To(&object)) { | 200 if (allocation.To(&object)) { |
204 OnAllocationEvent(object, size_in_bytes); | 201 OnAllocationEvent(object, size_in_bytes); |
205 } | 202 } |
206 return allocation; | 203 return allocation; |
207 } | 204 } |
208 } | 205 } |
209 | 206 |
210 if (OLD_POINTER_SPACE == space) { | 207 if (OLD_POINTER_SPACE == space) { |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
251 | 248 |
252 if ((FLAG_dump_allocations_digest_at_alloc > 0) && | 249 if ((FLAG_dump_allocations_digest_at_alloc > 0) && |
253 (--dump_allocations_hash_countdown_ == 0)) { | 250 (--dump_allocations_hash_countdown_ == 0)) { |
254 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc; | 251 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc; |
255 PrintAlloctionsHash(); | 252 PrintAlloctionsHash(); |
256 } | 253 } |
257 } | 254 } |
258 } | 255 } |
259 | 256 |
260 | 257 |
261 void Heap::OnMoveEvent(HeapObject* target, | 258 void Heap::OnMoveEvent(HeapObject* target, HeapObject* source, |
262 HeapObject* source, | |
263 int size_in_bytes) { | 259 int size_in_bytes) { |
264 HeapProfiler* heap_profiler = isolate_->heap_profiler(); | 260 HeapProfiler* heap_profiler = isolate_->heap_profiler(); |
265 if (heap_profiler->is_tracking_object_moves()) { | 261 if (heap_profiler->is_tracking_object_moves()) { |
266 heap_profiler->ObjectMoveEvent(source->address(), target->address(), | 262 heap_profiler->ObjectMoveEvent(source->address(), target->address(), |
267 size_in_bytes); | 263 size_in_bytes); |
268 } | 264 } |
269 | 265 |
270 if (isolate_->logger()->is_logging_code_events() || | 266 if (isolate_->logger()->is_logging_code_events() || |
271 isolate_->cpu_profiler()->is_profiling()) { | 267 isolate_->cpu_profiler()->is_profiling()) { |
272 if (target->IsSharedFunctionInfo()) { | 268 if (target->IsSharedFunctionInfo()) { |
273 PROFILE(isolate_, SharedFunctionInfoMoveEvent( | 269 PROFILE(isolate_, SharedFunctionInfoMoveEvent(source->address(), |
274 source->address(), target->address())); | 270 target->address())); |
275 } | 271 } |
276 } | 272 } |
277 | 273 |
278 if (FLAG_verify_predictable) { | 274 if (FLAG_verify_predictable) { |
279 ++allocations_count_; | 275 ++allocations_count_; |
280 | 276 |
281 UpdateAllocationsHash(source); | 277 UpdateAllocationsHash(source); |
282 UpdateAllocationsHash(target); | 278 UpdateAllocationsHash(target); |
283 UpdateAllocationsHash(size_in_bytes); | 279 UpdateAllocationsHash(size_in_bytes); |
284 | 280 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
318 void Heap::PrintAlloctionsHash() { | 314 void Heap::PrintAlloctionsHash() { |
319 uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_); | 315 uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_); |
320 PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash); | 316 PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash); |
321 } | 317 } |
322 | 318 |
323 | 319 |
324 void Heap::FinalizeExternalString(String* string) { | 320 void Heap::FinalizeExternalString(String* string) { |
325 DCHECK(string->IsExternalString()); | 321 DCHECK(string->IsExternalString()); |
326 v8::String::ExternalStringResourceBase** resource_addr = | 322 v8::String::ExternalStringResourceBase** resource_addr = |
327 reinterpret_cast<v8::String::ExternalStringResourceBase**>( | 323 reinterpret_cast<v8::String::ExternalStringResourceBase**>( |
328 reinterpret_cast<byte*>(string) + | 324 reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset - |
329 ExternalString::kResourceOffset - | |
330 kHeapObjectTag); | 325 kHeapObjectTag); |
331 | 326 |
332 // Dispose of the C++ object if it has not already been disposed. | 327 // Dispose of the C++ object if it has not already been disposed. |
333 if (*resource_addr != NULL) { | 328 if (*resource_addr != NULL) { |
334 (*resource_addr)->Dispose(); | 329 (*resource_addr)->Dispose(); |
335 *resource_addr = NULL; | 330 *resource_addr = NULL; |
336 } | 331 } |
337 } | 332 } |
338 | 333 |
339 | 334 |
340 bool Heap::InNewSpace(Object* object) { | 335 bool Heap::InNewSpace(Object* object) { |
341 bool result = new_space_.Contains(object); | 336 bool result = new_space_.Contains(object); |
342 DCHECK(!result || // Either not in new space | 337 DCHECK(!result || // Either not in new space |
343 gc_state_ != NOT_IN_GC || // ... or in the middle of GC | 338 gc_state_ != NOT_IN_GC || // ... or in the middle of GC |
344 InToSpace(object)); // ... or in to-space (where we allocate). | 339 InToSpace(object)); // ... or in to-space (where we allocate). |
345 return result; | 340 return result; |
346 } | 341 } |
347 | 342 |
348 | 343 |
349 bool Heap::InNewSpace(Address address) { | 344 bool Heap::InNewSpace(Address address) { return new_space_.Contains(address); } |
350 return new_space_.Contains(address); | |
351 } | |
352 | 345 |
353 | 346 |
354 bool Heap::InFromSpace(Object* object) { | 347 bool Heap::InFromSpace(Object* object) { |
355 return new_space_.FromSpaceContains(object); | 348 return new_space_.FromSpaceContains(object); |
356 } | 349 } |
357 | 350 |
358 | 351 |
359 bool Heap::InToSpace(Object* object) { | 352 bool Heap::InToSpace(Object* object) { |
360 return new_space_.ToSpaceContains(object); | 353 return new_space_.ToSpaceContains(object); |
361 } | 354 } |
(...skipping 22 matching lines...) Expand all Loading... |
384 bool Heap::OldGenerationAllocationLimitReached() { | 377 bool Heap::OldGenerationAllocationLimitReached() { |
385 if (!incremental_marking()->IsStopped()) return false; | 378 if (!incremental_marking()->IsStopped()) return false; |
386 return OldGenerationSpaceAvailable() < 0; | 379 return OldGenerationSpaceAvailable() < 0; |
387 } | 380 } |
388 | 381 |
389 | 382 |
390 bool Heap::ShouldBePromoted(Address old_address, int object_size) { | 383 bool Heap::ShouldBePromoted(Address old_address, int object_size) { |
391 NewSpacePage* page = NewSpacePage::FromAddress(old_address); | 384 NewSpacePage* page = NewSpacePage::FromAddress(old_address); |
392 Address age_mark = new_space_.age_mark(); | 385 Address age_mark = new_space_.age_mark(); |
393 return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) && | 386 return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) && |
394 (!page->ContainsLimit(age_mark) || old_address < age_mark); | 387 (!page->ContainsLimit(age_mark) || old_address < age_mark); |
395 } | 388 } |
396 | 389 |
397 | 390 |
398 void Heap::RecordWrite(Address address, int offset) { | 391 void Heap::RecordWrite(Address address, int offset) { |
399 if (!InNewSpace(address)) store_buffer_.Mark(address + offset); | 392 if (!InNewSpace(address)) store_buffer_.Mark(address + offset); |
400 } | 393 } |
401 | 394 |
402 | 395 |
403 void Heap::RecordWrites(Address address, int start, int len) { | 396 void Heap::RecordWrites(Address address, int start, int len) { |
404 if (!InNewSpace(address)) { | 397 if (!InNewSpace(address)) { |
405 for (int i = 0; i < len; i++) { | 398 for (int i = 0; i < len; i++) { |
406 store_buffer_.Mark(address + start + i * kPointerSize); | 399 store_buffer_.Mark(address + start + i * kPointerSize); |
407 } | 400 } |
408 } | 401 } |
409 } | 402 } |
410 | 403 |
411 | 404 |
412 OldSpace* Heap::TargetSpace(HeapObject* object) { | 405 OldSpace* Heap::TargetSpace(HeapObject* object) { |
413 InstanceType type = object->map()->instance_type(); | 406 InstanceType type = object->map()->instance_type(); |
414 AllocationSpace space = TargetSpaceId(type); | 407 AllocationSpace space = TargetSpaceId(type); |
415 return (space == OLD_POINTER_SPACE) | 408 return (space == OLD_POINTER_SPACE) ? old_pointer_space_ : old_data_space_; |
416 ? old_pointer_space_ | |
417 : old_data_space_; | |
418 } | 409 } |
419 | 410 |
420 | 411 |
421 AllocationSpace Heap::TargetSpaceId(InstanceType type) { | 412 AllocationSpace Heap::TargetSpaceId(InstanceType type) { |
422 // Heap numbers and sequential strings are promoted to old data space, all | 413 // Heap numbers and sequential strings are promoted to old data space, all |
423 // other object types are promoted to old pointer space. We do not use | 414 // other object types are promoted to old pointer space. We do not use |
424 // object->IsHeapNumber() and object->IsSeqString() because we already | 415 // object->IsHeapNumber() and object->IsSeqString() because we already |
425 // know that object has the heap object tag. | 416 // know that object has the heap object tag. |
426 | 417 |
427 // These objects are never allocated in new space. | 418 // These objects are never allocated in new space. |
428 DCHECK(type != MAP_TYPE); | 419 DCHECK(type != MAP_TYPE); |
429 DCHECK(type != CODE_TYPE); | 420 DCHECK(type != CODE_TYPE); |
430 DCHECK(type != ODDBALL_TYPE); | 421 DCHECK(type != ODDBALL_TYPE); |
431 DCHECK(type != CELL_TYPE); | 422 DCHECK(type != CELL_TYPE); |
432 DCHECK(type != PROPERTY_CELL_TYPE); | 423 DCHECK(type != PROPERTY_CELL_TYPE); |
433 | 424 |
434 if (type <= LAST_NAME_TYPE) { | 425 if (type <= LAST_NAME_TYPE) { |
435 if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE; | 426 if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE; |
436 DCHECK(type < FIRST_NONSTRING_TYPE); | 427 DCHECK(type < FIRST_NONSTRING_TYPE); |
437 // There are four string representations: sequential strings, external | 428 // There are four string representations: sequential strings, external |
438 // strings, cons strings, and sliced strings. | 429 // strings, cons strings, and sliced strings. |
439 // Only the latter two contain non-map-word pointers to heap objects. | 430 // Only the latter two contain non-map-word pointers to heap objects. |
440 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag) | 431 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag) |
441 ? OLD_POINTER_SPACE | 432 ? OLD_POINTER_SPACE |
442 : OLD_DATA_SPACE; | 433 : OLD_DATA_SPACE; |
443 } else { | 434 } else { |
444 return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE; | 435 return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE; |
445 } | 436 } |
446 } | 437 } |
447 | 438 |
448 | 439 |
449 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) { | 440 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) { |
450 // Object migration is governed by the following rules: | 441 // Object migration is governed by the following rules: |
451 // | 442 // |
452 // 1) Objects in new-space can be migrated to one of the old spaces | 443 // 1) Objects in new-space can be migrated to one of the old spaces |
(...skipping 30 matching lines...) Expand all Loading... |
483 return false; | 474 return false; |
484 case INVALID_SPACE: | 475 case INVALID_SPACE: |
485 break; | 476 break; |
486 } | 477 } |
487 UNREACHABLE(); | 478 UNREACHABLE(); |
488 return false; | 479 return false; |
489 } | 480 } |
490 | 481 |
491 | 482 |
492 void Heap::CopyBlock(Address dst, Address src, int byte_size) { | 483 void Heap::CopyBlock(Address dst, Address src, int byte_size) { |
493 CopyWords(reinterpret_cast<Object**>(dst), | 484 CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src), |
494 reinterpret_cast<Object**>(src), | |
495 static_cast<size_t>(byte_size / kPointerSize)); | 485 static_cast<size_t>(byte_size / kPointerSize)); |
496 } | 486 } |
497 | 487 |
498 | 488 |
499 void Heap::MoveBlock(Address dst, Address src, int byte_size) { | 489 void Heap::MoveBlock(Address dst, Address src, int byte_size) { |
500 DCHECK(IsAligned(byte_size, kPointerSize)); | 490 DCHECK(IsAligned(byte_size, kPointerSize)); |
501 | 491 |
502 int size_in_words = byte_size / kPointerSize; | 492 int size_in_words = byte_size / kPointerSize; |
503 | 493 |
504 if ((dst < src) || (dst >= (src + byte_size))) { | 494 if ((dst < src) || (dst >= (src + byte_size))) { |
505 Object** src_slot = reinterpret_cast<Object**>(src); | 495 Object** src_slot = reinterpret_cast<Object**>(src); |
506 Object** dst_slot = reinterpret_cast<Object**>(dst); | 496 Object** dst_slot = reinterpret_cast<Object**>(dst); |
507 Object** end_slot = src_slot + size_in_words; | 497 Object** end_slot = src_slot + size_in_words; |
508 | 498 |
509 while (src_slot != end_slot) { | 499 while (src_slot != end_slot) { |
510 *dst_slot++ = *src_slot++; | 500 *dst_slot++ = *src_slot++; |
511 } | 501 } |
512 } else { | 502 } else { |
513 MemMove(dst, src, static_cast<size_t>(byte_size)); | 503 MemMove(dst, src, static_cast<size_t>(byte_size)); |
514 } | 504 } |
515 } | 505 } |
516 | 506 |
517 | 507 |
518 void Heap::ScavengePointer(HeapObject** p) { | 508 void Heap::ScavengePointer(HeapObject** p) { ScavengeObject(p, *p); } |
519 ScavengeObject(p, *p); | |
520 } | |
521 | 509 |
522 | 510 |
523 AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) { | 511 AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) { |
524 // Check if there is potentially a memento behind the object. If | 512 // Check if there is potentially a memento behind the object. If |
525 // the last word of the momento is on another page we return | 513 // the last word of the momento is on another page we return |
526 // immediately. | 514 // immediately. |
527 Address object_address = object->address(); | 515 Address object_address = object->address(); |
528 Address memento_address = object_address + object->Size(); | 516 Address memento_address = object_address + object->Size(); |
529 Address last_memento_word_address = memento_address + kPointerSize; | 517 Address last_memento_word_address = memento_address + kPointerSize; |
530 if (!NewSpacePage::OnSamePage(object_address, | 518 if (!NewSpacePage::OnSamePage(object_address, last_memento_word_address)) { |
531 last_memento_word_address)) { | |
532 return NULL; | 519 return NULL; |
533 } | 520 } |
534 | 521 |
535 HeapObject* candidate = HeapObject::FromAddress(memento_address); | 522 HeapObject* candidate = HeapObject::FromAddress(memento_address); |
536 if (candidate->map() != allocation_memento_map()) return NULL; | 523 if (candidate->map() != allocation_memento_map()) return NULL; |
537 | 524 |
538 // Either the object is the last object in the new space, or there is another | 525 // Either the object is the last object in the new space, or there is another |
539 // object of at least word size (the header map word) following it, so | 526 // object of at least word size (the header map word) following it, so |
540 // suffices to compare ptr and top here. Note that technically we do not have | 527 // suffices to compare ptr and top here. Note that technically we do not have |
541 // to compare with the current top pointer of the from space page during GC, | 528 // to compare with the current top pointer of the from space page during GC, |
(...skipping 12 matching lines...) Expand all Loading... |
554 return memento; | 541 return memento; |
555 } | 542 } |
556 | 543 |
557 | 544 |
558 void Heap::UpdateAllocationSiteFeedback(HeapObject* object, | 545 void Heap::UpdateAllocationSiteFeedback(HeapObject* object, |
559 ScratchpadSlotMode mode) { | 546 ScratchpadSlotMode mode) { |
560 Heap* heap = object->GetHeap(); | 547 Heap* heap = object->GetHeap(); |
561 DCHECK(heap->InFromSpace(object)); | 548 DCHECK(heap->InFromSpace(object)); |
562 | 549 |
563 if (!FLAG_allocation_site_pretenuring || | 550 if (!FLAG_allocation_site_pretenuring || |
564 !AllocationSite::CanTrack(object->map()->instance_type())) return; | 551 !AllocationSite::CanTrack(object->map()->instance_type())) |
| 552 return; |
565 | 553 |
566 AllocationMemento* memento = heap->FindAllocationMemento(object); | 554 AllocationMemento* memento = heap->FindAllocationMemento(object); |
567 if (memento == NULL) return; | 555 if (memento == NULL) return; |
568 | 556 |
569 if (memento->GetAllocationSite()->IncrementMementoFoundCount()) { | 557 if (memento->GetAllocationSite()->IncrementMementoFoundCount()) { |
570 heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode); | 558 heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode); |
571 } | 559 } |
572 } | 560 } |
573 | 561 |
574 | 562 |
(...skipping 17 matching lines...) Expand all Loading... |
592 | 580 |
593 UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT); | 581 UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT); |
594 | 582 |
595 // AllocationMementos are unrooted and shouldn't survive a scavenge | 583 // AllocationMementos are unrooted and shouldn't survive a scavenge |
596 DCHECK(object->map() != object->GetHeap()->allocation_memento_map()); | 584 DCHECK(object->map() != object->GetHeap()->allocation_memento_map()); |
597 // Call the slow part of scavenge object. | 585 // Call the slow part of scavenge object. |
598 return ScavengeObjectSlow(p, object); | 586 return ScavengeObjectSlow(p, object); |
599 } | 587 } |
600 | 588 |
601 | 589 |
602 bool Heap::CollectGarbage(AllocationSpace space, | 590 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason, |
603 const char* gc_reason, | |
604 const v8::GCCallbackFlags callbackFlags) { | 591 const v8::GCCallbackFlags callbackFlags) { |
605 const char* collector_reason = NULL; | 592 const char* collector_reason = NULL; |
606 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason); | 593 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason); |
607 return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags); | 594 return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags); |
608 } | 595 } |
609 | 596 |
610 | 597 |
611 Isolate* Heap::isolate() { | 598 Isolate* Heap::isolate() { |
612 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) - | 599 return reinterpret_cast<Isolate*>( |
| 600 reinterpret_cast<intptr_t>(this) - |
613 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4); | 601 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4); |
614 } | 602 } |
615 | 603 |
616 | 604 |
617 // Calls the FUNCTION_CALL function and retries it up to three times | 605 // Calls the FUNCTION_CALL function and retries it up to three times |
618 // to guarantee that any allocations performed during the call will | 606 // to guarantee that any allocations performed during the call will |
619 // succeed if there's enough memory. | 607 // succeed if there's enough memory. |
620 | 608 |
621 // Warning: Do not use the identifiers __object__, __maybe_object__ or | 609 // Warning: Do not use the identifiers __object__, __maybe_object__ or |
622 // __scope__ in a call to this macro. | 610 // __scope__ in a call to this macro. |
623 | 611 |
624 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ | 612 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ |
625 if (__allocation__.To(&__object__)) { \ | 613 if (__allocation__.To(&__object__)) { \ |
626 DCHECK(__object__ != (ISOLATE)->heap()->exception()); \ | 614 DCHECK(__object__ != (ISOLATE)->heap()->exception()); \ |
627 RETURN_VALUE; \ | 615 RETURN_VALUE; \ |
628 } | 616 } |
629 | 617 |
630 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ | 618 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ |
631 do { \ | 619 do { \ |
632 AllocationResult __allocation__ = FUNCTION_CALL; \ | 620 AllocationResult __allocation__ = FUNCTION_CALL; \ |
633 Object* __object__ = NULL; \ | 621 Object* __object__ = NULL; \ |
634 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ | 622 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ |
635 (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \ | 623 (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \ |
636 "allocation failure"); \ | 624 "allocation failure"); \ |
637 __allocation__ = FUNCTION_CALL; \ | 625 __allocation__ = FUNCTION_CALL; \ |
638 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ | 626 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ |
639 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ | 627 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ |
640 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ | 628 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ |
641 { \ | 629 { \ |
642 AlwaysAllocateScope __scope__(ISOLATE); \ | 630 AlwaysAllocateScope __scope__(ISOLATE); \ |
643 __allocation__ = FUNCTION_CALL; \ | 631 __allocation__ = FUNCTION_CALL; \ |
644 } \ | 632 } \ |
645 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ | 633 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \ |
646 /* TODO(1181417): Fix this. */ \ | 634 /* TODO(1181417): Fix this. */ \ |
647 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \ | 635 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \ |
648 RETURN_EMPTY; \ | 636 RETURN_EMPTY; \ |
649 } while (false) | 637 } while (false) |
650 | 638 |
651 #define CALL_AND_RETRY_OR_DIE( \ | 639 #define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \ |
652 ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ | 640 RETURN_EMPTY) \ |
653 CALL_AND_RETRY( \ | 641 CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) |
654 ISOLATE, \ | |
655 FUNCTION_CALL, \ | |
656 RETURN_VALUE, \ | |
657 RETURN_EMPTY) | |
658 | 642 |
659 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \ | 643 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \ |
660 CALL_AND_RETRY_OR_DIE(ISOLATE, \ | 644 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, \ |
661 FUNCTION_CALL, \ | |
662 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \ | 645 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \ |
663 return Handle<TYPE>()) \ | 646 return Handle<TYPE>()) |
664 | 647 |
665 | 648 |
666 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \ | 649 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \ |
667 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return) | 650 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return) |
668 | 651 |
669 | 652 |
670 void ExternalStringTable::AddString(String* string) { | 653 void ExternalStringTable::AddString(String* string) { |
671 DCHECK(string->IsExternalString()); | 654 DCHECK(string->IsExternalString()); |
672 if (heap_->InNewSpace(string)) { | 655 if (heap_->InNewSpace(string)) { |
673 new_space_strings_.Add(string); | 656 new_space_strings_.Add(string); |
674 } else { | 657 } else { |
675 old_space_strings_.Add(string); | 658 old_space_strings_.Add(string); |
676 } | 659 } |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
769 isolate->heap()->no_weak_object_verification_scope_depth_--; | 752 isolate->heap()->no_weak_object_verification_scope_depth_--; |
770 } | 753 } |
771 #endif | 754 #endif |
772 | 755 |
773 | 756 |
774 GCCallbacksScope::GCCallbacksScope(Heap* heap) : heap_(heap) { | 757 GCCallbacksScope::GCCallbacksScope(Heap* heap) : heap_(heap) { |
775 heap_->gc_callbacks_depth_++; | 758 heap_->gc_callbacks_depth_++; |
776 } | 759 } |
777 | 760 |
778 | 761 |
779 GCCallbacksScope::~GCCallbacksScope() { | 762 GCCallbacksScope::~GCCallbacksScope() { heap_->gc_callbacks_depth_--; } |
780 heap_->gc_callbacks_depth_--; | |
781 } | |
782 | 763 |
783 | 764 |
784 bool GCCallbacksScope::CheckReenter() { | 765 bool GCCallbacksScope::CheckReenter() { |
785 return heap_->gc_callbacks_depth_ == 1; | 766 return heap_->gc_callbacks_depth_ == 1; |
786 } | 767 } |
787 | 768 |
788 | 769 |
789 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) { | 770 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) { |
790 for (Object** current = start; current < end; current++) { | 771 for (Object** current = start; current < end; current++) { |
791 if ((*current)->IsHeapObject()) { | 772 if ((*current)->IsHeapObject()) { |
792 HeapObject* object = HeapObject::cast(*current); | 773 HeapObject* object = HeapObject::cast(*current); |
793 CHECK(object->GetIsolate()->heap()->Contains(object)); | 774 CHECK(object->GetIsolate()->heap()->Contains(object)); |
794 CHECK(object->map()->IsMap()); | 775 CHECK(object->map()->IsMap()); |
795 } | 776 } |
796 } | 777 } |
797 } | 778 } |
798 | 779 |
799 | 780 |
800 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) { | 781 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) { |
801 for (Object** current = start; current < end; current++) { | 782 for (Object** current = start; current < end; current++) { |
802 CHECK((*current)->IsSmi()); | 783 CHECK((*current)->IsSmi()); |
803 } | 784 } |
804 } | 785 } |
| 786 } |
| 787 } // namespace v8::internal |
805 | 788 |
806 | 789 #endif // V8_HEAP_HEAP_INL_H_ |
807 } } // namespace v8::internal | |
808 | |
809 #endif // V8_HEAP_INL_H_ | |
OLD | NEW |