Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(18)

Side by Side Diff: src/heap.cc

Issue 11085070: Enable --verify-heap in release mode (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: After rebase plus one new issue fix Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after
324 324
325 325
326 void Heap::PrintShortHeapStatistics() { 326 void Heap::PrintShortHeapStatistics() {
327 if (!FLAG_trace_gc_verbose) return; 327 if (!FLAG_trace_gc_verbose) return;
328 PrintPID("Memory allocator, used: %6" V8_PTR_PREFIX "d KB" 328 PrintPID("Memory allocator, used: %6" V8_PTR_PREFIX "d KB"
329 ", available: %6" V8_PTR_PREFIX "d KB\n", 329 ", available: %6" V8_PTR_PREFIX "d KB\n",
330 isolate_->memory_allocator()->Size() / KB, 330 isolate_->memory_allocator()->Size() / KB,
331 isolate_->memory_allocator()->Available() / KB); 331 isolate_->memory_allocator()->Available() / KB);
332 PrintPID("New space, used: %6" V8_PTR_PREFIX "d KB" 332 PrintPID("New space, used: %6" V8_PTR_PREFIX "d KB"
333 ", available: %6" V8_PTR_PREFIX "d KB" 333 ", available: %6" V8_PTR_PREFIX "d KB"
334 ", committed: %6" V8_PTR_PREFIX "d KB\n", 334 ", committed: %6" V8_PTR_PREFIX "d KB"
335 ", %p\n",
335 new_space_.Size() / KB, 336 new_space_.Size() / KB,
336 new_space_.Available() / KB, 337 new_space_.Available() / KB,
337 new_space_.CommittedMemory() / KB); 338 new_space_.CommittedMemory() / KB,
339 new_space_.bottom());
mvstanton1 2012/10/11 12:22:45 oops, should not have checked in this debugging co
338 PrintPID("Old pointers, used: %6" V8_PTR_PREFIX "d KB" 340 PrintPID("Old pointers, used: %6" V8_PTR_PREFIX "d KB"
339 ", available: %6" V8_PTR_PREFIX "d KB" 341 ", available: %6" V8_PTR_PREFIX "d KB"
340 ", committed: %6" V8_PTR_PREFIX "d KB\n", 342 ", committed: %6" V8_PTR_PREFIX "d KB"
343 ", %p\n",
341 old_pointer_space_->SizeOfObjects() / KB, 344 old_pointer_space_->SizeOfObjects() / KB,
342 old_pointer_space_->Available() / KB, 345 old_pointer_space_->Available() / KB,
343 old_pointer_space_->CommittedMemory() / KB); 346 old_pointer_space_->CommittedMemory() / KB,
347 reinterpret_cast<void *>(
348 old_pointer_space_->FirstPage()->address()));
344 PrintPID("Old data space, used: %6" V8_PTR_PREFIX "d KB" 349 PrintPID("Old data space, used: %6" V8_PTR_PREFIX "d KB"
345 ", available: %6" V8_PTR_PREFIX "d KB" 350 ", available: %6" V8_PTR_PREFIX "d KB"
346 ", committed: %6" V8_PTR_PREFIX "d KB\n", 351 ", committed: %6" V8_PTR_PREFIX "d KB"
352 ", %p\n",
347 old_data_space_->SizeOfObjects() / KB, 353 old_data_space_->SizeOfObjects() / KB,
348 old_data_space_->Available() / KB, 354 old_data_space_->Available() / KB,
349 old_data_space_->CommittedMemory() / KB); 355 old_data_space_->CommittedMemory() / KB,
356 reinterpret_cast<void *>(old_data_space_->FirstPage()->address()));
350 PrintPID("Code space, used: %6" V8_PTR_PREFIX "d KB" 357 PrintPID("Code space, used: %6" V8_PTR_PREFIX "d KB"
351 ", available: %6" V8_PTR_PREFIX "d KB" 358 ", available: %6" V8_PTR_PREFIX "d KB"
352 ", committed: %6" V8_PTR_PREFIX "d KB\n", 359 ", committed: %6" V8_PTR_PREFIX "d KB"
360 ", %p\n",
353 code_space_->SizeOfObjects() / KB, 361 code_space_->SizeOfObjects() / KB,
354 code_space_->Available() / KB, 362 code_space_->Available() / KB,
355 code_space_->CommittedMemory() / KB); 363 code_space_->CommittedMemory() / KB,
364 reinterpret_cast<void *>(code_space_->FirstPage()->address()));
356 PrintPID("Map space, used: %6" V8_PTR_PREFIX "d KB" 365 PrintPID("Map space, used: %6" V8_PTR_PREFIX "d KB"
357 ", available: %6" V8_PTR_PREFIX "d KB" 366 ", available: %6" V8_PTR_PREFIX "d KB"
358 ", committed: %6" V8_PTR_PREFIX "d KB\n", 367 ", committed: %6" V8_PTR_PREFIX "d KB"
368 ", %p\n",
359 map_space_->SizeOfObjects() / KB, 369 map_space_->SizeOfObjects() / KB,
360 map_space_->Available() / KB, 370 map_space_->Available() / KB,
361 map_space_->CommittedMemory() / KB); 371 map_space_->CommittedMemory() / KB,
372 reinterpret_cast<void *>(map_space_->FirstPage()->address()));
362 PrintPID("Cell space, used: %6" V8_PTR_PREFIX "d KB" 373 PrintPID("Cell space, used: %6" V8_PTR_PREFIX "d KB"
363 ", available: %6" V8_PTR_PREFIX "d KB" 374 ", available: %6" V8_PTR_PREFIX "d KB"
364 ", committed: %6" V8_PTR_PREFIX "d KB\n", 375 ", committed: %6" V8_PTR_PREFIX "d KB"
376 ", %p\n",
365 cell_space_->SizeOfObjects() / KB, 377 cell_space_->SizeOfObjects() / KB,
366 cell_space_->Available() / KB, 378 cell_space_->Available() / KB,
367 cell_space_->CommittedMemory() / KB); 379 cell_space_->CommittedMemory() / KB,
380 reinterpret_cast<void *>(cell_space_->FirstPage()->address()));
368 PrintPID("Large object space, used: %6" V8_PTR_PREFIX "d KB" 381 PrintPID("Large object space, used: %6" V8_PTR_PREFIX "d KB"
369 ", available: %6" V8_PTR_PREFIX "d KB" 382 ", available: %6" V8_PTR_PREFIX "d KB"
370 ", committed: %6" V8_PTR_PREFIX "d KB\n", 383 ", committed: %6" V8_PTR_PREFIX "d KB"
384 ", %p\n",
371 lo_space_->SizeOfObjects() / KB, 385 lo_space_->SizeOfObjects() / KB,
372 lo_space_->Available() / KB, 386 lo_space_->Available() / KB,
373 lo_space_->CommittedMemory() / KB); 387 lo_space_->CommittedMemory() / KB,
388 reinterpret_cast<void *>(lo_space_->first_page()->address()));
374 PrintPID("All spaces, used: %6" V8_PTR_PREFIX "d KB" 389 PrintPID("All spaces, used: %6" V8_PTR_PREFIX "d KB"
375 ", available: %6" V8_PTR_PREFIX "d KB" 390 ", available: %6" V8_PTR_PREFIX "d KB"
376 ", committed: %6" V8_PTR_PREFIX "d KB\n", 391 ", committed: %6" V8_PTR_PREFIX "d KB\n",
377 this->SizeOfObjects() / KB, 392 this->SizeOfObjects() / KB,
378 this->Available() / KB, 393 this->Available() / KB,
379 this->CommittedMemory() / KB); 394 this->CommittedMemory() / KB);
380 PrintPID("Total time spent in GC : %d ms\n", total_gc_time_ms_); 395 PrintPID("Total time spent in GC : %d ms\n", total_gc_time_ms_);
381 } 396 }
382 397
383 398
(...skipping 13 matching lines...) Expand all
397 if (FLAG_log_gc) new_space_.ReportStatistics(); 412 if (FLAG_log_gc) new_space_.ReportStatistics();
398 #endif // DEBUG 413 #endif // DEBUG
399 } 414 }
400 415
401 416
402 void Heap::GarbageCollectionPrologue() { 417 void Heap::GarbageCollectionPrologue() {
403 isolate_->transcendental_cache()->Clear(); 418 isolate_->transcendental_cache()->Clear();
404 ClearJSFunctionResultCaches(); 419 ClearJSFunctionResultCaches();
405 gc_count_++; 420 gc_count_++;
406 unflattened_strings_length_ = 0; 421 unflattened_strings_length_ = 0;
407 #ifdef DEBUG
408 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
409 allow_allocation(false);
410 422
411 if (FLAG_verify_heap) { 423 if (FLAG_verify_heap) {
412 Verify(); 424 Verify();
413 } 425 }
414 426
427 #ifdef DEBUG
428 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
429 allow_allocation(false);
430
415 if (FLAG_gc_verbose) Print(); 431 if (FLAG_gc_verbose) Print();
416 #endif // DEBUG 432 #endif // DEBUG
417 433
418 #if defined(DEBUG) 434 #if defined(DEBUG)
419 ReportStatisticsBeforeGC(); 435 ReportStatisticsBeforeGC();
420 #endif // DEBUG 436 #endif // DEBUG
421 437
422 LiveObjectList::GCPrologue(); 438 LiveObjectList::GCPrologue();
423 store_buffer()->GCPrologue(); 439 store_buffer()->GCPrologue();
424 } 440 }
(...skipping 15 matching lines...) Expand all
440 space != NULL; 456 space != NULL;
441 space = spaces.next()) { 457 space = spaces.next()) {
442 space->RepairFreeListsAfterBoot(); 458 space->RepairFreeListsAfterBoot();
443 } 459 }
444 } 460 }
445 461
446 462
447 void Heap::GarbageCollectionEpilogue() { 463 void Heap::GarbageCollectionEpilogue() {
448 store_buffer()->GCEpilogue(); 464 store_buffer()->GCEpilogue();
449 LiveObjectList::GCEpilogue(); 465 LiveObjectList::GCEpilogue();
466
450 #ifdef DEBUG 467 #ifdef DEBUG
451 allow_allocation(true); 468 allow_allocation(true);
Michael Starzinger 2012/10/11 12:42:46 Neither the zapping nor the verification should do
mvstanton1 2012/10/12 08:40:50 Done.
452 ZapFromSpace(); 469 #endif
470
471 // In release mode, we only zap the from space under heap verification.
472 #ifndef DEBUG
473 if (FLAG_verify_heap) {
474 #endif
475 ZapFromSpace();
476 #ifndef DEBUG
477 }
478 #endif
453 479
454 if (FLAG_verify_heap) { 480 if (FLAG_verify_heap) {
455 Verify(); 481 Verify();
456 } 482 }
457 483
484 #ifdef DEBUG
458 if (FLAG_print_global_handles) isolate_->global_handles()->Print(); 485 if (FLAG_print_global_handles) isolate_->global_handles()->Print();
459 if (FLAG_print_handles) PrintHandles(); 486 if (FLAG_print_handles) PrintHandles();
460 if (FLAG_gc_verbose) Print(); 487 if (FLAG_gc_verbose) Print();
461 if (FLAG_code_stats) ReportCodeStatistics("After GC"); 488 if (FLAG_code_stats) ReportCodeStatistics("After GC");
462 #endif 489 #endif
463 490
464 isolate_->counters()->alive_after_last_gc()->Set( 491 isolate_->counters()->alive_after_last_gc()->Set(
465 static_cast<int>(SizeOfObjects())); 492 static_cast<int>(SizeOfObjects()));
466 493
467 isolate_->counters()->symbol_table_capacity()->Set( 494 isolate_->counters()->symbol_table_capacity()->Set(
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
644 void Heap::PerformScavenge() { 671 void Heap::PerformScavenge() {
645 GCTracer tracer(this, NULL, NULL); 672 GCTracer tracer(this, NULL, NULL);
646 if (incremental_marking()->IsStopped()) { 673 if (incremental_marking()->IsStopped()) {
647 PerformGarbageCollection(SCAVENGER, &tracer); 674 PerformGarbageCollection(SCAVENGER, &tracer);
648 } else { 675 } else {
649 PerformGarbageCollection(MARK_COMPACTOR, &tracer); 676 PerformGarbageCollection(MARK_COMPACTOR, &tracer);
650 } 677 }
651 } 678 }
652 679
653 680
654 #ifdef DEBUG
655 // Helper class for verifying the symbol table. 681 // Helper class for verifying the symbol table.
656 class SymbolTableVerifier : public ObjectVisitor { 682 class SymbolTableVerifier : public ObjectVisitor {
657 public: 683 public:
658 void VisitPointers(Object** start, Object** end) { 684 void VisitPointers(Object** start, Object** end) {
659 // Visit all HeapObject pointers in [start, end). 685 // Visit all HeapObject pointers in [start, end).
660 for (Object** p = start; p < end; p++) { 686 for (Object** p = start; p < end; p++) {
661 if ((*p)->IsHeapObject()) { 687 if ((*p)->IsHeapObject()) {
662 // Check that the symbol is actually a symbol. 688 // Check that the symbol is actually a symbol.
663 ASSERT((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol()); 689 CHECK((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol());
664 } 690 }
665 } 691 }
666 } 692 }
667 }; 693 };
668 #endif // DEBUG
669 694
670 695
671 static void VerifySymbolTable() { 696 static void VerifySymbolTable() {
672 #ifdef DEBUG
673 SymbolTableVerifier verifier; 697 SymbolTableVerifier verifier;
674 HEAP->symbol_table()->IterateElements(&verifier); 698 HEAP->symbol_table()->IterateElements(&verifier);
675 #endif // DEBUG
676 } 699 }
677 700
678 701
679 static bool AbortIncrementalMarkingAndCollectGarbage( 702 static bool AbortIncrementalMarkingAndCollectGarbage(
680 Heap* heap, 703 Heap* heap,
681 AllocationSpace space, 704 AllocationSpace space,
682 const char* gc_reason = NULL) { 705 const char* gc_reason = NULL) {
683 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask); 706 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask);
684 bool result = heap->CollectGarbage(space, gc_reason); 707 bool result = heap->CollectGarbage(space, gc_reason);
685 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags); 708 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags);
(...skipping 351 matching lines...) Expand 10 before | Expand all | Expand 10 after
1037 Object* object = *p; 1060 Object* object = *p;
1038 if (!heap_->InNewSpace(object)) return; 1061 if (!heap_->InNewSpace(object)) return;
1039 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p), 1062 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1040 reinterpret_cast<HeapObject*>(object)); 1063 reinterpret_cast<HeapObject*>(object));
1041 } 1064 }
1042 1065
1043 Heap* heap_; 1066 Heap* heap_;
1044 }; 1067 };
1045 1068
1046 1069
1047 #ifdef DEBUG
1048 // Visitor class to verify pointers in code or data space do not point into 1070 // Visitor class to verify pointers in code or data space do not point into
1049 // new space. 1071 // new space.
1050 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { 1072 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
1051 public: 1073 public:
1052 void VisitPointers(Object** start, Object**end) { 1074 void VisitPointers(Object** start, Object**end) {
1053 for (Object** current = start; current < end; current++) { 1075 for (Object** current = start; current < end; current++) {
1054 if ((*current)->IsHeapObject()) { 1076 if ((*current)->IsHeapObject()) {
1055 ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current))); 1077 CHECK(!HEAP->InNewSpace(HeapObject::cast(*current)));
1056 } 1078 }
1057 } 1079 }
1058 } 1080 }
1059 }; 1081 };
1060 1082
1061 1083
1062 static void VerifyNonPointerSpacePointers() { 1084 static void VerifyNonPointerSpacePointers() {
1063 // Verify that there are no pointers to new space in spaces where we 1085 // Verify that there are no pointers to new space in spaces where we
1064 // do not expect them. 1086 // do not expect them.
1065 VerifyNonPointerSpacePointersVisitor v; 1087 VerifyNonPointerSpacePointersVisitor v;
1066 HeapObjectIterator code_it(HEAP->code_space()); 1088 HeapObjectIterator code_it(HEAP->code_space());
1067 for (HeapObject* object = code_it.Next(); 1089 for (HeapObject* object = code_it.Next();
1068 object != NULL; object = code_it.Next()) 1090 object != NULL; object = code_it.Next())
1069 object->Iterate(&v); 1091 object->Iterate(&v);
1070 1092
1071 // The old data space was normally swept conservatively so that the iterator 1093 // The old data space was normally swept conservatively so that the iterator
1072 // doesn't work, so we normally skip the next bit. 1094 // doesn't work, so we normally skip the next bit.
1073 if (!HEAP->old_data_space()->was_swept_conservatively()) { 1095 if (!HEAP->old_data_space()->was_swept_conservatively()) {
1074 HeapObjectIterator data_it(HEAP->old_data_space()); 1096 HeapObjectIterator data_it(HEAP->old_data_space());
1075 for (HeapObject* object = data_it.Next(); 1097 for (HeapObject* object = data_it.Next();
1076 object != NULL; object = data_it.Next()) 1098 object != NULL; object = data_it.Next())
1077 object->Iterate(&v); 1099 object->Iterate(&v);
1078 } 1100 }
1079 } 1101 }
1080 #endif
1081 1102
1082 1103
1083 void Heap::CheckNewSpaceExpansionCriteria() { 1104 void Heap::CheckNewSpaceExpansionCriteria() {
1084 if (new_space_.Capacity() < new_space_.MaximumCapacity() && 1105 if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
1085 survived_since_last_expansion_ > new_space_.Capacity() && 1106 survived_since_last_expansion_ > new_space_.Capacity() &&
1086 !new_space_high_promotion_mode_active_) { 1107 !new_space_high_promotion_mode_active_) {
1087 // Grow the size of new space if there is room to grow, enough data 1108 // Grow the size of new space if there is room to grow, enough data
1088 // has survived scavenge since the last expansion and we are not in 1109 // has survived scavenge since the last expansion and we are not in
1089 // high promotion mode. 1110 // high promotion mode.
1090 new_space_.Grow(); 1111 new_space_.Grow();
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
1209 return NULL; 1230 return NULL;
1210 } 1231 }
1211 1232
1212 private: 1233 private:
1213 Heap* heap_; 1234 Heap* heap_;
1214 }; 1235 };
1215 1236
1216 1237
1217 void Heap::Scavenge() { 1238 void Heap::Scavenge() {
1218 RelocationLock relocation_lock(this); 1239 RelocationLock relocation_lock(this);
1219 #ifdef DEBUG 1240
1220 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(); 1241 if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
1221 #endif
1222 1242
1223 gc_state_ = SCAVENGE; 1243 gc_state_ = SCAVENGE;
1224 1244
1225 // Implements Cheney's copying algorithm 1245 // Implements Cheney's copying algorithm
1226 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1246 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1227 1247
1228 // Clear descriptor cache. 1248 // Clear descriptor cache.
1229 isolate_->descriptor_lookup_cache()->Clear(); 1249 isolate_->descriptor_lookup_cache()->Clear();
1230 1250
1231 // Used for updating survived_since_last_expansion_ at function end. 1251 // Used for updating survived_since_last_expansion_ at function end.
(...skipping 3358 matching lines...) Expand 10 before | Expand all | Expand 10 after
4590 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); 4610 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
4591 if (!maybe_result->ToObject(&result)) return maybe_result; 4611 if (!maybe_result->ToObject(&result)) return maybe_result;
4592 } 4612 }
4593 4613
4594 // Partially initialize the object. 4614 // Partially initialize the object.
4595 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); 4615 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map());
4596 String::cast(result)->set_length(length); 4616 String::cast(result)->set_length(length);
4597 String::cast(result)->set_hash_field(String::kEmptyHashField); 4617 String::cast(result)->set_hash_field(String::kEmptyHashField);
4598 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 4618 ASSERT_EQ(size, HeapObject::cast(result)->Size());
4599 4619
4600 #ifdef DEBUG
4601 if (FLAG_verify_heap) { 4620 if (FLAG_verify_heap) {
4602 // Initialize string's content to ensure ASCII-ness (character range 0-127) 4621 // Initialize string's content to ensure ASCII-ness (character range 0-127)
4603 // as required when verifying the heap. 4622 // as required when verifying the heap.
4604 char* dest = SeqAsciiString::cast(result)->GetChars(); 4623 char* dest = SeqAsciiString::cast(result)->GetChars();
4605 memset(dest, 0x0F, length * kCharSize); 4624 memset(dest, 0x0F, length * kCharSize);
4606 } 4625 }
4607 #endif // DEBUG
4608 4626
4609 return result; 4627 return result;
4610 } 4628 }
4611 4629
4612 4630
4613 MaybeObject* Heap::AllocateRawTwoByteString(int length, 4631 MaybeObject* Heap::AllocateRawTwoByteString(int length,
4614 PretenureFlag pretenure) { 4632 PretenureFlag pretenure) {
4615 if (length < 0 || length > SeqTwoByteString::kMaxLength) { 4633 if (length < 0 || length > SeqTwoByteString::kMaxLength) {
4616 return Failure::OutOfMemoryException(); 4634 return Failure::OutOfMemoryException();
4617 } 4635 }
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
4682 4700
4683 MaybeObject* Heap::AllocateRawFixedArray(int length) { 4701 MaybeObject* Heap::AllocateRawFixedArray(int length) {
4684 if (length < 0 || length > FixedArray::kMaxLength) { 4702 if (length < 0 || length > FixedArray::kMaxLength) {
4685 return Failure::OutOfMemoryException(); 4703 return Failure::OutOfMemoryException();
4686 } 4704 }
4687 ASSERT(length > 0); 4705 ASSERT(length > 0);
4688 // Use the general function if we're forced to always allocate. 4706 // Use the general function if we're forced to always allocate.
4689 if (always_allocate()) return AllocateFixedArray(length, TENURED); 4707 if (always_allocate()) return AllocateFixedArray(length, TENURED);
4690 // Allocate the raw data for a fixed array. 4708 // Allocate the raw data for a fixed array.
4691 int size = FixedArray::SizeFor(length); 4709 int size = FixedArray::SizeFor(length);
4692 return size <= kMaxObjectSizeInNewSpace 4710 MaybeObject* retptr;
mvstanton1 2012/10/11 12:22:45 I think this was debugging, I will revert this blo
4693 ? new_space_.AllocateRaw(size) 4711 if (size <= kMaxObjectSizeInNewSpace) {
4694 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); 4712 retptr = new_space_.AllocateRaw(size);
4713 } else {
4714 retptr = lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
4715 }
4716
4717 return retptr;
4695 } 4718 }
4696 4719
4697 4720
4698 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { 4721 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
4699 int len = src->length(); 4722 int len = src->length();
4700 Object* obj; 4723 Object* obj;
4701 { MaybeObject* maybe_obj = AllocateRawFixedArray(len); 4724 { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
4702 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 4725 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4703 } 4726 }
4704 if (InNewSpace(obj)) { 4727 if (InNewSpace(obj)) {
(...skipping 682 matching lines...) Expand 10 before | Expand all | Expand 10 after
5387 case CELL_SPACE: 5410 case CELL_SPACE:
5388 return cell_space_->Contains(addr); 5411 return cell_space_->Contains(addr);
5389 case LO_SPACE: 5412 case LO_SPACE:
5390 return lo_space_->SlowContains(addr); 5413 return lo_space_->SlowContains(addr);
5391 } 5414 }
5392 5415
5393 return false; 5416 return false;
5394 } 5417 }
5395 5418
5396 5419
5397 #ifdef DEBUG
5398 void Heap::Verify() { 5420 void Heap::Verify() {
5399 ASSERT(HasBeenSetUp()); 5421 CHECK(HasBeenSetUp());
5400 5422
5401 store_buffer()->Verify(); 5423 store_buffer()->Verify();
5402 5424
5403 VerifyPointersVisitor visitor; 5425 VerifyPointersVisitor visitor;
5404 IterateRoots(&visitor, VISIT_ONLY_STRONG); 5426 IterateRoots(&visitor, VISIT_ONLY_STRONG);
5405 5427
5406 new_space_.Verify(); 5428 new_space_.Verify();
5407 5429
5408 old_pointer_space_->Verify(&visitor); 5430 old_pointer_space_->Verify(&visitor);
5409 map_space_->Verify(&visitor); 5431 map_space_->Verify(&visitor);
5410 5432
5411 VerifyPointersVisitor no_dirty_regions_visitor; 5433 VerifyPointersVisitor no_dirty_regions_visitor;
5412 old_data_space_->Verify(&no_dirty_regions_visitor); 5434 old_data_space_->Verify(&no_dirty_regions_visitor);
5413 code_space_->Verify(&no_dirty_regions_visitor); 5435 code_space_->Verify(&no_dirty_regions_visitor);
5414 cell_space_->Verify(&no_dirty_regions_visitor); 5436 cell_space_->Verify(&no_dirty_regions_visitor);
5415 5437
5416 lo_space_->Verify(); 5438 lo_space_->Verify();
5417 } 5439 }
5418 5440
5419
5420 #endif // DEBUG
5421
5422
5423 MaybeObject* Heap::LookupSymbol(Vector<const char> string) { 5441 MaybeObject* Heap::LookupSymbol(Vector<const char> string) {
5424 Object* symbol = NULL; 5442 Object* symbol = NULL;
5425 Object* new_table; 5443 Object* new_table;
5426 { MaybeObject* maybe_new_table = 5444 { MaybeObject* maybe_new_table =
5427 symbol_table()->LookupSymbol(string, &symbol); 5445 symbol_table()->LookupSymbol(string, &symbol);
5428 if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table; 5446 if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
5429 } 5447 }
5430 // Can't use set_symbol_table because SymbolTable::cast knows that 5448 // Can't use set_symbol_table because SymbolTable::cast knows that
5431 // SymbolTable is a singleton and checks for identity. 5449 // SymbolTable is a singleton and checks for identity.
5432 roots_[kSymbolTableRootIndex] = new_table; 5450 roots_[kSymbolTableRootIndex] = new_table;
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
5502 5520
5503 5521
5504 bool Heap::LookupSymbolIfExists(String* string, String** symbol) { 5522 bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
5505 if (string->IsSymbol()) { 5523 if (string->IsSymbol()) {
5506 *symbol = string; 5524 *symbol = string;
5507 return true; 5525 return true;
5508 } 5526 }
5509 return symbol_table()->LookupSymbolIfExists(string, symbol); 5527 return symbol_table()->LookupSymbolIfExists(string, symbol);
5510 } 5528 }
5511 5529
5512
5513 #ifdef DEBUG
5514 void Heap::ZapFromSpace() { 5530 void Heap::ZapFromSpace() {
5515 NewSpacePageIterator it(new_space_.FromSpaceStart(), 5531 NewSpacePageIterator it(new_space_.FromSpaceStart(),
5516 new_space_.FromSpaceEnd()); 5532 new_space_.FromSpaceEnd());
5517 while (it.has_next()) { 5533 while (it.has_next()) {
5518 NewSpacePage* page = it.next(); 5534 NewSpacePage* page = it.next();
5519 for (Address cursor = page->area_start(), limit = page->area_end(); 5535 for (Address cursor = page->area_start(), limit = page->area_end();
5520 cursor < limit; 5536 cursor < limit;
5521 cursor += kPointerSize) { 5537 cursor += kPointerSize) {
5522 Memory::Address_at(cursor) = kFromSpaceZapValue; 5538 Memory::Address_at(cursor) = kFromSpaceZapValue;
5523 } 5539 }
5524 } 5540 }
5525 } 5541 }
5526 #endif // DEBUG
5527 5542
5528 5543
5529 void Heap::IterateAndMarkPointersToFromSpace(Address start, 5544 void Heap::IterateAndMarkPointersToFromSpace(Address start,
5530 Address end, 5545 Address end,
5531 ObjectSlotCallback callback) { 5546 ObjectSlotCallback callback) {
5532 Address slot_address = start; 5547 Address slot_address = start;
5533 5548
5534 // We are not collecting slots on new space objects during mutation 5549 // We are not collecting slots on new space objects during mutation
5535 // thus we have to scan for pointers to evacuation candidates when we 5550 // thus we have to scan for pointers to evacuation candidates when we
5536 // promote objects. But we should not record any slots in non-black 5551 // promote objects. But we should not record any slots in non-black
(...skipping 716 matching lines...) Expand 10 before | Expand all | Expand 10 after
6253 roots_[kStackLimitRootIndex] = 6268 roots_[kStackLimitRootIndex] =
6254 reinterpret_cast<Object*>( 6269 reinterpret_cast<Object*>(
6255 (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag); 6270 (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
6256 roots_[kRealStackLimitRootIndex] = 6271 roots_[kRealStackLimitRootIndex] =
6257 reinterpret_cast<Object*>( 6272 reinterpret_cast<Object*>(
6258 (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag); 6273 (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
6259 } 6274 }
6260 6275
6261 6276
6262 void Heap::TearDown() { 6277 void Heap::TearDown() {
6263 #ifdef DEBUG
6264 if (FLAG_verify_heap) { 6278 if (FLAG_verify_heap) {
6265 Verify(); 6279 Verify();
6266 } 6280 }
6267 #endif 6281
6268 if (FLAG_print_cumulative_gc_stat) { 6282 if (FLAG_print_cumulative_gc_stat) {
6269 PrintF("\n\n"); 6283 PrintF("\n\n");
6270 PrintF("gc_count=%d ", gc_count_); 6284 PrintF("gc_count=%d ", gc_count_);
6271 PrintF("mark_sweep_count=%d ", ms_count_); 6285 PrintF("mark_sweep_count=%d ", ms_count_);
6272 PrintF("max_gc_pause=%d ", get_max_gc_pause()); 6286 PrintF("max_gc_pause=%d ", get_max_gc_pause());
6273 PrintF("total_gc_time=%d ", total_gc_time_ms_); 6287 PrintF("total_gc_time=%d ", total_gc_time_ms_);
6274 PrintF("min_in_mutator=%d ", get_min_in_mutator()); 6288 PrintF("min_in_mutator=%d ", get_min_in_mutator());
6275 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ", 6289 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
6276 get_max_alive_after_gc()); 6290 get_max_alive_after_gc());
6277 PrintF("\n\n"); 6291 PrintF("\n\n");
(...skipping 1049 matching lines...) Expand 10 before | Expand all | Expand 10 after
7327 static_cast<int>(object_sizes_last_time_[index])); 7341 static_cast<int>(object_sizes_last_time_[index]));
7328 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) 7342 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
7329 #undef ADJUST_LAST_TIME_OBJECT_COUNT 7343 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7330 7344
7331 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 7345 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
7332 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 7346 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
7333 ClearObjectStats(); 7347 ClearObjectStats();
7334 } 7348 }
7335 7349
7336 } } // namespace v8::internal 7350 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698