Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(70)

Side by Side Diff: src/heap-inl.h

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.cc ('k') | src/heap-profiler.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 15 matching lines...) Expand all
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #ifndef V8_HEAP_INL_H_ 28 #ifndef V8_HEAP_INL_H_
29 #define V8_HEAP_INL_H_ 29 #define V8_HEAP_INL_H_
30 30
31 #include "heap.h" 31 #include "heap.h"
32 #include "isolate.h" 32 #include "isolate.h"
33 #include "list-inl.h" 33 #include "list-inl.h"
34 #include "objects.h" 34 #include "objects.h"
35 #include "v8-counters.h" 35 #include "v8-counters.h"
36 #include "store-buffer.h"
37 #include "store-buffer-inl.h"
36 38
37 namespace v8 { 39 namespace v8 {
38 namespace internal { 40 namespace internal {
39 41
40 void PromotionQueue::insert(HeapObject* target, int size) { 42 void PromotionQueue::insert(HeapObject* target, int size) {
43 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
44 NewSpacePage* rear_page =
45 NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
46 ASSERT(!rear_page->prev_page()->is_anchor());
47 rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->body_limit());
48 }
41 *(--rear_) = reinterpret_cast<intptr_t>(target); 49 *(--rear_) = reinterpret_cast<intptr_t>(target);
42 *(--rear_) = size; 50 *(--rear_) = size;
43 // Assert no overflow into live objects. 51 // Assert no overflow into live objects.
44 ASSERT(reinterpret_cast<Address>(rear_) >= HEAP->new_space()->top()); 52 #ifdef DEBUG
53 SemiSpace::AssertValidRange(HEAP->new_space()->top(),
54 reinterpret_cast<Address>(rear_));
55 #endif
45 } 56 }
46 57
47 58
48 int Heap::MaxObjectSizeInPagedSpace() { 59 int Heap::MaxObjectSizeInPagedSpace() {
49 return Page::kMaxHeapObjectSize; 60 return Page::kMaxHeapObjectSize;
50 } 61 }
51 62
52 63
53 MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str, 64 MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str,
54 PretenureFlag pretenure) { 65 PretenureFlag pretenure) {
(...skipping 22 matching lines...) Expand all
77 if (str.length() > SeqAsciiString::kMaxLength) { 88 if (str.length() > SeqAsciiString::kMaxLength) {
78 return Failure::OutOfMemoryException(); 89 return Failure::OutOfMemoryException();
79 } 90 }
80 // Compute map and object size. 91 // Compute map and object size.
81 Map* map = ascii_symbol_map(); 92 Map* map = ascii_symbol_map();
82 int size = SeqAsciiString::SizeFor(str.length()); 93 int size = SeqAsciiString::SizeFor(str.length());
83 94
84 // Allocate string. 95 // Allocate string.
85 Object* result; 96 Object* result;
86 { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace()) 97 { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace())
87 ? lo_space_->AllocateRaw(size) 98 ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
88 : old_data_space_->AllocateRaw(size); 99 : old_data_space_->AllocateRaw(size);
89 if (!maybe_result->ToObject(&result)) return maybe_result; 100 if (!maybe_result->ToObject(&result)) return maybe_result;
90 } 101 }
91 102
92 reinterpret_cast<HeapObject*>(result)->set_map(map); 103 reinterpret_cast<HeapObject*>(result)->set_map(map);
93 // Set length and hash fields of the allocated string. 104 // Set length and hash fields of the allocated string.
94 String* answer = String::cast(result); 105 String* answer = String::cast(result);
95 answer->set_length(str.length()); 106 answer->set_length(str.length());
96 answer->set_hash_field(hash_field); 107 answer->set_hash_field(hash_field);
97 108
(...skipping 12 matching lines...) Expand all
110 if (str.length() > SeqTwoByteString::kMaxLength) { 121 if (str.length() > SeqTwoByteString::kMaxLength) {
111 return Failure::OutOfMemoryException(); 122 return Failure::OutOfMemoryException();
112 } 123 }
113 // Compute map and object size. 124 // Compute map and object size.
114 Map* map = symbol_map(); 125 Map* map = symbol_map();
115 int size = SeqTwoByteString::SizeFor(str.length()); 126 int size = SeqTwoByteString::SizeFor(str.length());
116 127
117 // Allocate string. 128 // Allocate string.
118 Object* result; 129 Object* result;
119 { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace()) 130 { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace())
120 ? lo_space_->AllocateRaw(size) 131 ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
121 : old_data_space_->AllocateRaw(size); 132 : old_data_space_->AllocateRaw(size);
122 if (!maybe_result->ToObject(&result)) return maybe_result; 133 if (!maybe_result->ToObject(&result)) return maybe_result;
123 } 134 }
124 135
125 reinterpret_cast<HeapObject*>(result)->set_map(map); 136 reinterpret_cast<HeapObject*>(result)->set_map(map);
126 // Set length and hash fields of the allocated string. 137 // Set length and hash fields of the allocated string.
127 String* answer = String::cast(result); 138 String* answer = String::cast(result);
128 answer->set_length(str.length()); 139 answer->set_length(str.length());
129 answer->set_hash_field(hash_field); 140 answer->set_hash_field(hash_field);
130 141
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
174 } 185 }
175 } 186 }
176 187
177 if (OLD_POINTER_SPACE == space) { 188 if (OLD_POINTER_SPACE == space) {
178 result = old_pointer_space_->AllocateRaw(size_in_bytes); 189 result = old_pointer_space_->AllocateRaw(size_in_bytes);
179 } else if (OLD_DATA_SPACE == space) { 190 } else if (OLD_DATA_SPACE == space) {
180 result = old_data_space_->AllocateRaw(size_in_bytes); 191 result = old_data_space_->AllocateRaw(size_in_bytes);
181 } else if (CODE_SPACE == space) { 192 } else if (CODE_SPACE == space) {
182 result = code_space_->AllocateRaw(size_in_bytes); 193 result = code_space_->AllocateRaw(size_in_bytes);
183 } else if (LO_SPACE == space) { 194 } else if (LO_SPACE == space) {
184 result = lo_space_->AllocateRaw(size_in_bytes); 195 result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
185 } else if (CELL_SPACE == space) { 196 } else if (CELL_SPACE == space) {
186 result = cell_space_->AllocateRaw(size_in_bytes); 197 result = cell_space_->AllocateRaw(size_in_bytes);
187 } else { 198 } else {
188 ASSERT(MAP_SPACE == space); 199 ASSERT(MAP_SPACE == space);
189 result = map_space_->AllocateRaw(size_in_bytes); 200 result = map_space_->AllocateRaw(size_in_bytes);
190 } 201 }
191 if (result->IsFailure()) old_gen_exhausted_ = true; 202 if (result->IsFailure()) old_gen_exhausted_ = true;
192 return result; 203 return result;
193 } 204 }
194 205
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
258 269
259 bool Heap::InNewSpace(Object* object) { 270 bool Heap::InNewSpace(Object* object) {
260 bool result = new_space_.Contains(object); 271 bool result = new_space_.Contains(object);
261 ASSERT(!result || // Either not in new space 272 ASSERT(!result || // Either not in new space
262 gc_state_ != NOT_IN_GC || // ... or in the middle of GC 273 gc_state_ != NOT_IN_GC || // ... or in the middle of GC
263 InToSpace(object)); // ... or in to-space (where we allocate). 274 InToSpace(object)); // ... or in to-space (where we allocate).
264 return result; 275 return result;
265 } 276 }
266 277
267 278
279 bool Heap::InNewSpace(Address addr) {
280 return new_space_.Contains(addr);
281 }
282
283
268 bool Heap::InFromSpace(Object* object) { 284 bool Heap::InFromSpace(Object* object) {
269 return new_space_.FromSpaceContains(object); 285 return new_space_.FromSpaceContains(object);
270 } 286 }
271 287
272 288
273 bool Heap::InToSpace(Object* object) { 289 bool Heap::InToSpace(Object* object) {
274 return new_space_.ToSpaceContains(object); 290 return new_space_.ToSpaceContains(object);
275 } 291 }
276 292
277 293
294 bool Heap::OldGenerationAllocationLimitReached() {
295 if (!incremental_marking()->IsStopped()) return false;
296 return OldGenerationSpaceAvailable() < 0;
297 }
298
299
278 bool Heap::ShouldBePromoted(Address old_address, int object_size) { 300 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
279 // An object should be promoted if: 301 // An object should be promoted if:
280 // - the object has survived a scavenge operation or 302 // - the object has survived a scavenge operation or
281 // - to space is already 25% full. 303 // - to space is already 25% full.
282 return old_address < new_space_.age_mark() 304 NewSpacePage* page = NewSpacePage::FromAddress(old_address);
283 || (new_space_.Size() + object_size) >= (new_space_.Capacity() >> 2); 305 Address age_mark = new_space_.age_mark();
306 bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
307 (!page->ContainsLimit(age_mark) || old_address < age_mark);
308 return below_mark || (new_space_.Size() + object_size) >=
309 (new_space_.EffectiveCapacity() >> 2);
284 } 310 }
285 311
286 312
287 void Heap::RecordWrite(Address address, int offset) { 313 void Heap::RecordWrite(Address address, int offset) {
288 if (new_space_.Contains(address)) return; 314 if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
289 ASSERT(!new_space_.FromSpaceContains(address));
290 SLOW_ASSERT(Contains(address + offset));
291 Page::FromAddress(address)->MarkRegionDirty(address + offset);
292 } 315 }
293 316
294 317
295 void Heap::RecordWrites(Address address, int start, int len) { 318 void Heap::RecordWrites(Address address, int start, int len) {
296 if (new_space_.Contains(address)) return; 319 if (!InNewSpace(address)) {
297 ASSERT(!new_space_.FromSpaceContains(address)); 320 for (int i = 0; i < len; i++) {
298 Page* page = Page::FromAddress(address); 321 store_buffer_.Mark(address + start + i * kPointerSize);
299 page->SetRegionMarks(page->GetRegionMarks() | 322 }
300 page->GetRegionMaskForSpan(address + start, len * kPointerSize)); 323 }
301 } 324 }
302 325
303 326
304 OldSpace* Heap::TargetSpace(HeapObject* object) { 327 OldSpace* Heap::TargetSpace(HeapObject* object) {
305 InstanceType type = object->map()->instance_type(); 328 InstanceType type = object->map()->instance_type();
306 AllocationSpace space = TargetSpaceId(type); 329 AllocationSpace space = TargetSpaceId(type);
307 return (space == OLD_POINTER_SPACE) 330 return (space == OLD_POINTER_SPACE)
308 ? old_pointer_space_ 331 ? old_pointer_space_
309 : old_data_space_; 332 : old_data_space_;
310 } 333 }
(...skipping 25 matching lines...) Expand all
336 359
337 360
338 void Heap::CopyBlock(Address dst, Address src, int byte_size) { 361 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
339 ASSERT(IsAligned(byte_size, kPointerSize)); 362 ASSERT(IsAligned(byte_size, kPointerSize));
340 CopyWords(reinterpret_cast<Object**>(dst), 363 CopyWords(reinterpret_cast<Object**>(dst),
341 reinterpret_cast<Object**>(src), 364 reinterpret_cast<Object**>(src),
342 byte_size / kPointerSize); 365 byte_size / kPointerSize);
343 } 366 }
344 367
345 368
346 void Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst,
347 Address src,
348 int byte_size) {
349 ASSERT(IsAligned(byte_size, kPointerSize));
350
351 Page* page = Page::FromAddress(dst);
352 uint32_t marks = page->GetRegionMarks();
353
354 for (int remaining = byte_size / kPointerSize;
355 remaining > 0;
356 remaining--) {
357 Memory::Object_at(dst) = Memory::Object_at(src);
358
359 if (InNewSpace(Memory::Object_at(dst))) {
360 marks |= page->GetRegionMaskForAddress(dst);
361 }
362
363 dst += kPointerSize;
364 src += kPointerSize;
365 }
366
367 page->SetRegionMarks(marks);
368 }
369
370
371 void Heap::MoveBlock(Address dst, Address src, int byte_size) { 369 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
372 ASSERT(IsAligned(byte_size, kPointerSize)); 370 ASSERT(IsAligned(byte_size, kPointerSize));
373 371
374 int size_in_words = byte_size / kPointerSize; 372 int size_in_words = byte_size / kPointerSize;
375 373
376 if ((dst < src) || (dst >= (src + byte_size))) { 374 if ((dst < src) || (dst >= (src + byte_size))) {
377 Object** src_slot = reinterpret_cast<Object**>(src); 375 Object** src_slot = reinterpret_cast<Object**>(src);
378 Object** dst_slot = reinterpret_cast<Object**>(dst); 376 Object** dst_slot = reinterpret_cast<Object**>(dst);
379 Object** end_slot = src_slot + size_in_words; 377 Object** end_slot = src_slot + size_in_words;
380 378
381 while (src_slot != end_slot) { 379 while (src_slot != end_slot) {
382 *dst_slot++ = *src_slot++; 380 *dst_slot++ = *src_slot++;
383 } 381 }
384 } else { 382 } else {
385 memmove(dst, src, byte_size); 383 memmove(dst, src, byte_size);
386 } 384 }
387 } 385 }
388 386
389 387
390 void Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
391 Address src,
392 int byte_size) {
393 ASSERT(IsAligned(byte_size, kPointerSize));
394 ASSERT((dst < src) || (dst >= (src + byte_size)));
395
396 CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, byte_size);
397 }
398
399
400 void Heap::ScavengePointer(HeapObject** p) { 388 void Heap::ScavengePointer(HeapObject** p) {
401 ScavengeObject(p, *p); 389 ScavengeObject(p, *p);
402 } 390 }
403 391
404 392
405 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { 393 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
406 ASSERT(HEAP->InFromSpace(object)); 394 ASSERT(HEAP->InFromSpace(object));
407 395
408 // We use the first word (where the map pointer usually is) of a heap 396 // We use the first word (where the map pointer usually is) of a heap
409 // object to record the forwarding pointer. A forwarding pointer can 397 // object to record the forwarding pointer. A forwarding pointer can
410 // point to an old space, the code space, or the to space of the new 398 // point to an old space, the code space, or the to space of the new
411 // generation. 399 // generation.
412 MapWord first_word = object->map_word(); 400 MapWord first_word = object->map_word();
413 401
414 // If the first word is a forwarding address, the object has already been 402 // If the first word is a forwarding address, the object has already been
415 // copied. 403 // copied.
416 if (first_word.IsForwardingAddress()) { 404 if (first_word.IsForwardingAddress()) {
417 *p = first_word.ToForwardingAddress(); 405 HeapObject* dest = first_word.ToForwardingAddress();
406 ASSERT(HEAP->InFromSpace(*p));
407 *p = dest;
418 return; 408 return;
419 } 409 }
420 410
421 // Call the slow part of scavenge object. 411 // Call the slow part of scavenge object.
422 return ScavengeObjectSlow(p, object); 412 return ScavengeObjectSlow(p, object);
423 } 413 }
424 414
425 415
426 bool Heap::CollectGarbage(AllocationSpace space) { 416 bool Heap::CollectGarbage(AllocationSpace space) {
427 return CollectGarbage(space, SelectGarbageCollector(space)); 417 return CollectGarbage(space, SelectGarbageCollector(space));
(...skipping 24 matching lines...) Expand all
452 int amount = amount_of_external_allocated_memory_ + change_in_bytes; 442 int amount = amount_of_external_allocated_memory_ + change_in_bytes;
453 if (change_in_bytes >= 0) { 443 if (change_in_bytes >= 0) {
454 // Avoid overflow. 444 // Avoid overflow.
455 if (amount > amount_of_external_allocated_memory_) { 445 if (amount > amount_of_external_allocated_memory_) {
456 amount_of_external_allocated_memory_ = amount; 446 amount_of_external_allocated_memory_ = amount;
457 } 447 }
458 int amount_since_last_global_gc = 448 int amount_since_last_global_gc =
459 amount_of_external_allocated_memory_ - 449 amount_of_external_allocated_memory_ -
460 amount_of_external_allocated_memory_at_last_global_gc_; 450 amount_of_external_allocated_memory_at_last_global_gc_;
461 if (amount_since_last_global_gc > external_allocation_limit_) { 451 if (amount_since_last_global_gc > external_allocation_limit_) {
462 CollectAllGarbage(false); 452 CollectAllGarbage(kNoGCFlags);
463 } 453 }
464 } else { 454 } else {
465 // Avoid underflow. 455 // Avoid underflow.
466 if (amount >= 0) { 456 if (amount >= 0) {
467 amount_of_external_allocated_memory_ = amount; 457 amount_of_external_allocated_memory_ = amount;
468 } 458 }
469 } 459 }
470 ASSERT(amount_of_external_allocated_memory_ >= 0); 460 ASSERT(amount_of_external_allocated_memory_ >= 0);
471 return amount_of_external_allocated_memory_; 461 return amount_of_external_allocated_memory_;
472 } 462 }
473 463
474 464
475 void Heap::SetLastScriptId(Object* last_script_id) { 465 void Heap::SetLastScriptId(Object* last_script_id) {
476 roots_[kLastScriptIdRootIndex] = last_script_id; 466 roots_[kLastScriptIdRootIndex] = last_script_id;
477 } 467 }
478 468
469
479 Isolate* Heap::isolate() { 470 Isolate* Heap::isolate() {
480 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) - 471 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
481 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4); 472 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
482 } 473 }
483 474
484 475
485 #ifdef DEBUG 476 #ifdef DEBUG
486 #define GC_GREEDY_CHECK() \ 477 #define GC_GREEDY_CHECK() \
487 if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck() 478 if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
488 #else 479 #else
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
681 elements_[hash].output = heap_number; 672 elements_[hash].output = heap_number;
682 return heap_number; 673 return heap_number;
683 } 674 }
684 675
685 676
686 Heap* _inline_get_heap_() { 677 Heap* _inline_get_heap_() {
687 return HEAP; 678 return HEAP;
688 } 679 }
689 680
690 681
691 void MarkCompactCollector::SetMark(HeapObject* obj) {
692 tracer_->increment_marked_count();
693 #ifdef DEBUG
694 UpdateLiveObjectCount(obj);
695 #endif
696 obj->SetMark();
697 }
698
699
700 } } // namespace v8::internal 682 } } // namespace v8::internal
701 683
702 #endif // V8_HEAP_INL_H_ 684 #endif // V8_HEAP_INL_H_
OLDNEW
« no previous file with comments | « src/heap.cc ('k') | src/heap-profiler.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698