Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(351)

Side by Side Diff: src/heap.cc

Issue 11085070: Enable --verify-heap in release mode (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: A couple of verification functions weren't behind the new #define. Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 361 matching lines...) Expand 10 before | Expand all | Expand 10 after
372 lo_space_->Available() / KB, 372 lo_space_->Available() / KB,
373 lo_space_->CommittedMemory() / KB); 373 lo_space_->CommittedMemory() / KB);
374 PrintPID("All spaces, used: %6" V8_PTR_PREFIX "d KB" 374 PrintPID("All spaces, used: %6" V8_PTR_PREFIX "d KB"
375 ", available: %6" V8_PTR_PREFIX "d KB" 375 ", available: %6" V8_PTR_PREFIX "d KB"
376 ", committed: %6" V8_PTR_PREFIX "d KB\n", 376 ", committed: %6" V8_PTR_PREFIX "d KB\n",
377 this->SizeOfObjects() / KB, 377 this->SizeOfObjects() / KB,
378 this->Available() / KB, 378 this->Available() / KB,
379 this->CommittedMemory() / KB); 379 this->CommittedMemory() / KB);
380 PrintPID("Total time spent in GC : %d ms\n", total_gc_time_ms_); 380 PrintPID("Total time spent in GC : %d ms\n", total_gc_time_ms_);
381 } 381 }
382 382
Michael Starzinger 2012/10/12 10:53:16 Two empty newlines between function implementation
mvstanton1 2012/10/12 11:16:27 Done.
383
384 // TODO(1238405): Combine the infrastructure for --heap-stats and 383 // TODO(1238405): Combine the infrastructure for --heap-stats and
385 // --log-gc to avoid the complicated preprocessor and flag testing. 384 // --log-gc to avoid the complicated preprocessor and flag testing.
386 void Heap::ReportStatisticsAfterGC() { 385 void Heap::ReportStatisticsAfterGC() {
387 // Similar to the before GC, we use some complicated logic to ensure that 386 // Similar to the before GC, we use some complicated logic to ensure that
388 // NewSpace statistics are logged exactly once when --log-gc is turned on. 387 // NewSpace statistics are logged exactly once when --log-gc is turned on.
389 #if defined(DEBUG) 388 #if defined(DEBUG)
390 if (FLAG_heap_stats) { 389 if (FLAG_heap_stats) {
391 new_space_.CollectStatistics(); 390 new_space_.CollectStatistics();
392 ReportHeapStatistics("After GC"); 391 ReportHeapStatistics("After GC");
393 } else if (FLAG_log_gc) { 392 } else if (FLAG_log_gc) {
394 new_space_.ReportStatistics(); 393 new_space_.ReportStatistics();
395 } 394 }
396 #else 395 #else
397 if (FLAG_log_gc) new_space_.ReportStatistics(); 396 if (FLAG_log_gc) new_space_.ReportStatistics();
398 #endif // DEBUG 397 #endif // DEBUG
399 } 398 }
400 399
401 400
402 void Heap::GarbageCollectionPrologue() { 401 void Heap::GarbageCollectionPrologue() {
403 isolate_->transcendental_cache()->Clear(); 402 isolate_->transcendental_cache()->Clear();
404 ClearJSFunctionResultCaches(); 403 ClearJSFunctionResultCaches();
405 gc_count_++; 404 gc_count_++;
406 unflattened_strings_length_ = 0; 405 unflattened_strings_length_ = 0;
406
407 #ifdef VERIFY_HEAP
408 if (FLAG_verify_heap) {
409 Verify();
410 }
411 #endif
412
407 #ifdef DEBUG 413 #ifdef DEBUG
408 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 414 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
409 allow_allocation(false); 415 allow_allocation(false);
410 416
411 if (FLAG_verify_heap) { 417 if (FLAG_gc_verbose) Print();
412 Verify();
413 }
414 418
415 if (FLAG_gc_verbose) Print();
416 #endif // DEBUG
417
418 #if defined(DEBUG)
419 ReportStatisticsBeforeGC(); 419 ReportStatisticsBeforeGC();
420 #endif // DEBUG 420 #endif // DEBUG
421 421
422 LiveObjectList::GCPrologue(); 422 LiveObjectList::GCPrologue();
423 store_buffer()->GCPrologue(); 423 store_buffer()->GCPrologue();
424 } 424 }
425 425
426 426
427 intptr_t Heap::SizeOfObjects() { 427 intptr_t Heap::SizeOfObjects() {
428 intptr_t total = 0; 428 intptr_t total = 0;
(...skipping 11 matching lines...) Expand all
440 space != NULL; 440 space != NULL;
441 space = spaces.next()) { 441 space = spaces.next()) {
442 space->RepairFreeListsAfterBoot(); 442 space->RepairFreeListsAfterBoot();
443 } 443 }
444 } 444 }
445 445
446 446
447 void Heap::GarbageCollectionEpilogue() { 447 void Heap::GarbageCollectionEpilogue() {
448 store_buffer()->GCEpilogue(); 448 store_buffer()->GCEpilogue();
449 LiveObjectList::GCEpilogue(); 449 LiveObjectList::GCEpilogue();
450 #ifdef DEBUG
451 allow_allocation(true);
452 ZapFromSpace();
453 450
451 // In release mode, we only zap the from space under heap verification.
452 if (Heap::ShouldZapGarbage()) {
453 ZapFromSpace();
454 }
455
456 #ifdef VERIFY_HEAP
454 if (FLAG_verify_heap) { 457 if (FLAG_verify_heap) {
455 Verify(); 458 Verify();
456 } 459 }
460 #endif
457 461
462 #ifdef DEBUG
463 allow_allocation(true);
458 if (FLAG_print_global_handles) isolate_->global_handles()->Print(); 464 if (FLAG_print_global_handles) isolate_->global_handles()->Print();
459 if (FLAG_print_handles) PrintHandles(); 465 if (FLAG_print_handles) PrintHandles();
460 if (FLAG_gc_verbose) Print(); 466 if (FLAG_gc_verbose) Print();
461 if (FLAG_code_stats) ReportCodeStatistics("After GC"); 467 if (FLAG_code_stats) ReportCodeStatistics("After GC");
462 #endif 468 #endif
463 469
464 isolate_->counters()->alive_after_last_gc()->Set( 470 isolate_->counters()->alive_after_last_gc()->Set(
465 static_cast<int>(SizeOfObjects())); 471 static_cast<int>(SizeOfObjects()));
466 472
467 isolate_->counters()->symbol_table_capacity()->Set( 473 isolate_->counters()->symbol_table_capacity()->Set(
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after
642 648
643 649
644 void Heap::PerformScavenge() { 650 void Heap::PerformScavenge() {
645 GCTracer tracer(this, NULL, NULL); 651 GCTracer tracer(this, NULL, NULL);
646 if (incremental_marking()->IsStopped()) { 652 if (incremental_marking()->IsStopped()) {
647 PerformGarbageCollection(SCAVENGER, &tracer); 653 PerformGarbageCollection(SCAVENGER, &tracer);
648 } else { 654 } else {
649 PerformGarbageCollection(MARK_COMPACTOR, &tracer); 655 PerformGarbageCollection(MARK_COMPACTOR, &tracer);
650 } 656 }
651 } 657 }
652 658
Michael Starzinger 2012/10/12 10:53:16 Two empty newlines between function implementation
mvstanton1 2012/10/12 11:16:27 Done.
653 659 #ifdef VERIFY_HEAP
654 #ifdef DEBUG
655 // Helper class for verifying the symbol table. 660 // Helper class for verifying the symbol table.
656 class SymbolTableVerifier : public ObjectVisitor { 661 class SymbolTableVerifier : public ObjectVisitor {
657 public: 662 public:
658 void VisitPointers(Object** start, Object** end) { 663 void VisitPointers(Object** start, Object** end) {
659 // Visit all HeapObject pointers in [start, end). 664 // Visit all HeapObject pointers in [start, end).
660 for (Object** p = start; p < end; p++) { 665 for (Object** p = start; p < end; p++) {
661 if ((*p)->IsHeapObject()) { 666 if ((*p)->IsHeapObject()) {
662 // Check that the symbol is actually a symbol. 667 // Check that the symbol is actually a symbol.
663 ASSERT((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol()); 668 CHECK((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol());
664 } 669 }
665 } 670 }
666 } 671 }
667 }; 672 };
668 #endif // DEBUG
669 673
670 674
671 static void VerifySymbolTable() { 675 static void VerifySymbolTable() {
672 #ifdef DEBUG
673 SymbolTableVerifier verifier; 676 SymbolTableVerifier verifier;
674 HEAP->symbol_table()->IterateElements(&verifier); 677 HEAP->symbol_table()->IterateElements(&verifier);
675 #endif // DEBUG
676 } 678 }
679 #endif // VERIFY_HEAP
677 680
678 681
679 static bool AbortIncrementalMarkingAndCollectGarbage( 682 static bool AbortIncrementalMarkingAndCollectGarbage(
680 Heap* heap, 683 Heap* heap,
681 AllocationSpace space, 684 AllocationSpace space,
682 const char* gc_reason = NULL) { 685 const char* gc_reason = NULL) {
683 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask); 686 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask);
684 bool result = heap->CollectGarbage(space, gc_reason); 687 bool result = heap->CollectGarbage(space, gc_reason);
685 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags); 688 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags);
686 return result; 689 return result;
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
823 } 826 }
824 827
825 bool Heap::PerformGarbageCollection(GarbageCollector collector, 828 bool Heap::PerformGarbageCollection(GarbageCollector collector,
826 GCTracer* tracer) { 829 GCTracer* tracer) {
827 bool next_gc_likely_to_collect_more = false; 830 bool next_gc_likely_to_collect_more = false;
828 831
829 if (collector != SCAVENGER) { 832 if (collector != SCAVENGER) {
830 PROFILE(isolate_, CodeMovingGCEvent()); 833 PROFILE(isolate_, CodeMovingGCEvent());
831 } 834 }
832 835
836 #ifdef VERIFY_HEAP
833 if (FLAG_verify_heap) { 837 if (FLAG_verify_heap) {
834 VerifySymbolTable(); 838 VerifySymbolTable();
835 } 839 }
840 #endif
841
836 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { 842 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
837 ASSERT(!allocation_allowed_); 843 ASSERT(!allocation_allowed_);
838 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 844 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
839 global_gc_prologue_callback_(); 845 global_gc_prologue_callback_();
840 } 846 }
841 847
842 GCType gc_type = 848 GCType gc_type =
843 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; 849 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
844 850
845 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { 851 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
952 if (gc_type & gc_epilogue_callbacks_[i].gc_type) { 958 if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
953 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags); 959 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags);
954 } 960 }
955 } 961 }
956 962
957 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) { 963 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
958 ASSERT(!allocation_allowed_); 964 ASSERT(!allocation_allowed_);
959 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 965 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
960 global_gc_epilogue_callback_(); 966 global_gc_epilogue_callback_();
961 } 967 }
968
969 #ifdef VERIFY_HEAP
962 if (FLAG_verify_heap) { 970 if (FLAG_verify_heap) {
963 VerifySymbolTable(); 971 VerifySymbolTable();
964 } 972 }
973 #endif
965 974
966 return next_gc_likely_to_collect_more; 975 return next_gc_likely_to_collect_more;
967 } 976 }
968 977
969 978
970 void Heap::MarkCompact(GCTracer* tracer) { 979 void Heap::MarkCompact(GCTracer* tracer) {
971 gc_state_ = MARK_COMPACT; 980 gc_state_ = MARK_COMPACT;
972 LOG(isolate_, ResourceEvent("markcompact", "begin")); 981 LOG(isolate_, ResourceEvent("markcompact", "begin"));
973 982
974 mark_compact_collector_.Prepare(tracer); 983 mark_compact_collector_.Prepare(tracer);
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1037 Object* object = *p; 1046 Object* object = *p;
1038 if (!heap_->InNewSpace(object)) return; 1047 if (!heap_->InNewSpace(object)) return;
1039 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p), 1048 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1040 reinterpret_cast<HeapObject*>(object)); 1049 reinterpret_cast<HeapObject*>(object));
1041 } 1050 }
1042 1051
1043 Heap* heap_; 1052 Heap* heap_;
1044 }; 1053 };
1045 1054
1046 1055
1047 #ifdef DEBUG 1056 #ifdef VERIFY_HEAP
1048 // Visitor class to verify pointers in code or data space do not point into 1057 // Visitor class to verify pointers in code or data space do not point into
1049 // new space. 1058 // new space.
1050 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { 1059 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
1051 public: 1060 public:
1052 void VisitPointers(Object** start, Object**end) { 1061 void VisitPointers(Object** start, Object**end) {
1053 for (Object** current = start; current < end; current++) { 1062 for (Object** current = start; current < end; current++) {
1054 if ((*current)->IsHeapObject()) { 1063 if ((*current)->IsHeapObject()) {
1055 ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current))); 1064 CHECK(!HEAP->InNewSpace(HeapObject::cast(*current)));
1056 } 1065 }
1057 } 1066 }
1058 } 1067 }
1059 }; 1068 };
1060 1069
1061 1070
1062 static void VerifyNonPointerSpacePointers() { 1071 static void VerifyNonPointerSpacePointers() {
1063 // Verify that there are no pointers to new space in spaces where we 1072 // Verify that there are no pointers to new space in spaces where we
1064 // do not expect them. 1073 // do not expect them.
1065 VerifyNonPointerSpacePointersVisitor v; 1074 VerifyNonPointerSpacePointersVisitor v;
1066 HeapObjectIterator code_it(HEAP->code_space()); 1075 HeapObjectIterator code_it(HEAP->code_space());
1067 for (HeapObject* object = code_it.Next(); 1076 for (HeapObject* object = code_it.Next();
1068 object != NULL; object = code_it.Next()) 1077 object != NULL; object = code_it.Next())
1069 object->Iterate(&v); 1078 object->Iterate(&v);
1070 1079
1071 // The old data space was normally swept conservatively so that the iterator 1080 // The old data space was normally swept conservatively so that the iterator
1072 // doesn't work, so we normally skip the next bit. 1081 // doesn't work, so we normally skip the next bit.
1073 if (!HEAP->old_data_space()->was_swept_conservatively()) { 1082 if (!HEAP->old_data_space()->was_swept_conservatively()) {
1074 HeapObjectIterator data_it(HEAP->old_data_space()); 1083 HeapObjectIterator data_it(HEAP->old_data_space());
1075 for (HeapObject* object = data_it.Next(); 1084 for (HeapObject* object = data_it.Next();
1076 object != NULL; object = data_it.Next()) 1085 object != NULL; object = data_it.Next())
1077 object->Iterate(&v); 1086 object->Iterate(&v);
1078 } 1087 }
1079 } 1088 }
1080 #endif 1089 #endif // VERIFY_HEAP
1081 1090
1082 1091
1083 void Heap::CheckNewSpaceExpansionCriteria() { 1092 void Heap::CheckNewSpaceExpansionCriteria() {
1084 if (new_space_.Capacity() < new_space_.MaximumCapacity() && 1093 if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
1085 survived_since_last_expansion_ > new_space_.Capacity() && 1094 survived_since_last_expansion_ > new_space_.Capacity() &&
1086 !new_space_high_promotion_mode_active_) { 1095 !new_space_high_promotion_mode_active_) {
1087 // Grow the size of new space if there is room to grow, enough data 1096 // Grow the size of new space if there is room to grow, enough data
1088 // has survived scavenge since the last expansion and we are not in 1097 // has survived scavenge since the last expansion and we are not in
1089 // high promotion mode. 1098 // high promotion mode.
1090 new_space_.Grow(); 1099 new_space_.Grow();
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
1209 return NULL; 1218 return NULL;
1210 } 1219 }
1211 1220
1212 private: 1221 private:
1213 Heap* heap_; 1222 Heap* heap_;
1214 }; 1223 };
1215 1224
1216 1225
1217 void Heap::Scavenge() { 1226 void Heap::Scavenge() {
1218 RelocationLock relocation_lock(this); 1227 RelocationLock relocation_lock(this);
1219 #ifdef DEBUG 1228
1229 #ifdef VERIFY_HEAP
1220 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(); 1230 if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
1221 #endif 1231 #endif
1222 1232
1223 gc_state_ = SCAVENGE; 1233 gc_state_ = SCAVENGE;
1224 1234
1225 // Implements Cheney's copying algorithm 1235 // Implements Cheney's copying algorithm
1226 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1236 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1227 1237
1228 // Clear descriptor cache. 1238 // Clear descriptor cache.
1229 isolate_->descriptor_lookup_cache()->Clear(); 1239 isolate_->descriptor_lookup_cache()->Clear();
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
1346 return NULL; 1356 return NULL;
1347 } 1357 }
1348 1358
1349 // String is still reachable. 1359 // String is still reachable.
1350 return String::cast(first_word.ToForwardingAddress()); 1360 return String::cast(first_word.ToForwardingAddress());
1351 } 1361 }
1352 1362
1353 1363
1354 void Heap::UpdateNewSpaceReferencesInExternalStringTable( 1364 void Heap::UpdateNewSpaceReferencesInExternalStringTable(
1355 ExternalStringTableUpdaterCallback updater_func) { 1365 ExternalStringTableUpdaterCallback updater_func) {
1366 #ifdef VERIFY_HEAP
1356 if (FLAG_verify_heap) { 1367 if (FLAG_verify_heap) {
1357 external_string_table_.Verify(); 1368 external_string_table_.Verify();
1358 } 1369 }
1370 #endif
1359 1371
1360 if (external_string_table_.new_space_strings_.is_empty()) return; 1372 if (external_string_table_.new_space_strings_.is_empty()) return;
1361 1373
1362 Object** start = &external_string_table_.new_space_strings_[0]; 1374 Object** start = &external_string_table_.new_space_strings_[0];
1363 Object** end = start + external_string_table_.new_space_strings_.length(); 1375 Object** end = start + external_string_table_.new_space_strings_.length();
1364 Object** last = start; 1376 Object** last = start;
1365 1377
1366 for (Object** p = start; p < end; ++p) { 1378 for (Object** p = start; p < end; ++p) {
1367 ASSERT(InFromSpace(*p)); 1379 ASSERT(InFromSpace(*p));
1368 String* target = updater_func(this, p); 1380 String* target = updater_func(this, p);
(...skipping 2002 matching lines...) Expand 10 before | Expand all | Expand 10 after
3371 String::WriteToFlat(buffer, dest, start, end); 3383 String::WriteToFlat(buffer, dest, start, end);
3372 } else { 3384 } else {
3373 ASSERT(string_result->IsTwoByteRepresentation()); 3385 ASSERT(string_result->IsTwoByteRepresentation());
3374 uc16* dest = SeqTwoByteString::cast(string_result)->GetChars(); 3386 uc16* dest = SeqTwoByteString::cast(string_result)->GetChars();
3375 String::WriteToFlat(buffer, dest, start, end); 3387 String::WriteToFlat(buffer, dest, start, end);
3376 } 3388 }
3377 return result; 3389 return result;
3378 } 3390 }
3379 3391
3380 ASSERT(buffer->IsFlat()); 3392 ASSERT(buffer->IsFlat());
3381 #if DEBUG 3393 #if VERIFY_HEAP
3382 if (FLAG_verify_heap) { 3394 if (FLAG_verify_heap) {
3383 buffer->StringVerify(); 3395 buffer->StringVerify();
3384 } 3396 }
3385 #endif 3397 #endif
3386 3398
3387 Object* result; 3399 Object* result;
3388 // When slicing an indirect string we use its encoding for a newly created 3400 // When slicing an indirect string we use its encoding for a newly created
3389 // slice and don't check the encoding of the underlying string. This is safe 3401 // slice and don't check the encoding of the underlying string. This is safe
3390 // even if the encodings are different because of externalization. If an 3402 // even if the encodings are different because of externalization. If an
3391 // indirect ASCII string is pointing to a two-byte string, the two-byte char 3403 // indirect ASCII string is pointing to a two-byte string, the two-byte char
(...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after
3635 if (!self_reference.is_null()) { 3647 if (!self_reference.is_null()) {
3636 *(self_reference.location()) = code; 3648 *(self_reference.location()) = code;
3637 } 3649 }
3638 // Migrate generated code. 3650 // Migrate generated code.
3639 // The generated code can contain Object** values (typically from handles) 3651 // The generated code can contain Object** values (typically from handles)
3640 // that are dereferenced during the copy to point directly to the actual heap 3652 // that are dereferenced during the copy to point directly to the actual heap
3641 // objects. These pointers can include references to the code object itself, 3653 // objects. These pointers can include references to the code object itself,
3642 // through the self_reference parameter. 3654 // through the self_reference parameter.
3643 code->CopyFrom(desc); 3655 code->CopyFrom(desc);
3644 3656
3645 #ifdef DEBUG 3657 #ifdef VERIFY_HEAP
3646 if (FLAG_verify_heap) { 3658 if (FLAG_verify_heap) {
3647 code->Verify(); 3659 code->Verify();
3648 } 3660 }
3649 #endif 3661 #endif
3650 return code; 3662 return code;
3651 } 3663 }
3652 3664
3653 3665
3654 MaybeObject* Heap::CopyCode(Code* code) { 3666 MaybeObject* Heap::CopyCode(Code* code) {
3655 // Allocate an object the same size as the code object. 3667 // Allocate an object the same size as the code object.
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
3717 new_code->set_relocation_info(ByteArray::cast(reloc_info_array)); 3729 new_code->set_relocation_info(ByteArray::cast(reloc_info_array));
3718 3730
3719 // Copy patched rinfo. 3731 // Copy patched rinfo.
3720 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length()); 3732 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
3721 3733
3722 // Relocate the copy. 3734 // Relocate the copy.
3723 ASSERT(!isolate_->code_range()->exists() || 3735 ASSERT(!isolate_->code_range()->exists() ||
3724 isolate_->code_range()->contains(code->address())); 3736 isolate_->code_range()->contains(code->address()));
3725 new_code->Relocate(new_addr - old_addr); 3737 new_code->Relocate(new_addr - old_addr);
3726 3738
3727 #ifdef DEBUG 3739 #ifdef VERIFY_HEAP
3728 if (FLAG_verify_heap) { 3740 if (FLAG_verify_heap) {
3729 code->Verify(); 3741 code->Verify();
3730 } 3742 }
3731 #endif 3743 #endif
3732 return new_code; 3744 return new_code;
3733 } 3745 }
3734 3746
3735 3747
3736 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { 3748 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
3737 ASSERT(gc_state_ == NOT_IN_GC); 3749 ASSERT(gc_state_ == NOT_IN_GC);
(...skipping 852 matching lines...) Expand 10 before | Expand all | Expand 10 after
4590 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); 4602 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
4591 if (!maybe_result->ToObject(&result)) return maybe_result; 4603 if (!maybe_result->ToObject(&result)) return maybe_result;
4592 } 4604 }
4593 4605
4594 // Partially initialize the object. 4606 // Partially initialize the object.
4595 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); 4607 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map());
4596 String::cast(result)->set_length(length); 4608 String::cast(result)->set_length(length);
4597 String::cast(result)->set_hash_field(String::kEmptyHashField); 4609 String::cast(result)->set_hash_field(String::kEmptyHashField);
4598 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 4610 ASSERT_EQ(size, HeapObject::cast(result)->Size());
4599 4611
4600 #ifdef DEBUG 4612 #ifdef VERIFY_HEAP
4601 if (FLAG_verify_heap) { 4613 if (FLAG_verify_heap) {
4602 // Initialize string's content to ensure ASCII-ness (character range 0-127) 4614 // Initialize string's content to ensure ASCII-ness (character range 0-127)
4603 // as required when verifying the heap. 4615 // as required when verifying the heap.
4604 char* dest = SeqAsciiString::cast(result)->GetChars(); 4616 char* dest = SeqAsciiString::cast(result)->GetChars();
4605 memset(dest, 0x0F, length * kCharSize); 4617 memset(dest, 0x0F, length * kCharSize);
4606 } 4618 }
4607 #endif // DEBUG 4619 #endif
4608 4620
4609 return result; 4621 return result;
4610 } 4622 }
4611 4623
4612 4624
4613 MaybeObject* Heap::AllocateRawTwoByteString(int length, 4625 MaybeObject* Heap::AllocateRawTwoByteString(int length,
4614 PretenureFlag pretenure) { 4626 PretenureFlag pretenure) {
4615 if (length < 0 || length > SeqTwoByteString::kMaxLength) { 4627 if (length < 0 || length > SeqTwoByteString::kMaxLength) {
4616 return Failure::OutOfMemoryException(); 4628 return Failure::OutOfMemoryException();
4617 } 4629 }
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
4683 MaybeObject* Heap::AllocateRawFixedArray(int length) { 4695 MaybeObject* Heap::AllocateRawFixedArray(int length) {
4684 if (length < 0 || length > FixedArray::kMaxLength) { 4696 if (length < 0 || length > FixedArray::kMaxLength) {
4685 return Failure::OutOfMemoryException(); 4697 return Failure::OutOfMemoryException();
4686 } 4698 }
4687 ASSERT(length > 0); 4699 ASSERT(length > 0);
4688 // Use the general function if we're forced to always allocate. 4700 // Use the general function if we're forced to always allocate.
4689 if (always_allocate()) return AllocateFixedArray(length, TENURED); 4701 if (always_allocate()) return AllocateFixedArray(length, TENURED);
4690 // Allocate the raw data for a fixed array. 4702 // Allocate the raw data for a fixed array.
4691 int size = FixedArray::SizeFor(length); 4703 int size = FixedArray::SizeFor(length);
4692 return size <= kMaxObjectSizeInNewSpace 4704 return size <= kMaxObjectSizeInNewSpace
4693 ? new_space_.AllocateRaw(size) 4705 ? new_space_.AllocateRaw(size)
Michael Starzinger 2012/10/12 10:53:16 Indentation is off.
mvstanton1 2012/10/12 11:16:27 Done.
4694 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); 4706 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
4695 } 4707 }
4696 4708
4697 4709
4698 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { 4710 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
4699 int len = src->length(); 4711 int len = src->length();
4700 Object* obj; 4712 Object* obj;
4701 { MaybeObject* maybe_obj = AllocateRawFixedArray(len); 4713 { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
4702 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 4714 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4703 } 4715 }
4704 if (InNewSpace(obj)) { 4716 if (InNewSpace(obj)) {
(...skipping 682 matching lines...) Expand 10 before | Expand all | Expand 10 after
5387 case CELL_SPACE: 5399 case CELL_SPACE:
5388 return cell_space_->Contains(addr); 5400 return cell_space_->Contains(addr);
5389 case LO_SPACE: 5401 case LO_SPACE:
5390 return lo_space_->SlowContains(addr); 5402 return lo_space_->SlowContains(addr);
5391 } 5403 }
5392 5404
5393 return false; 5405 return false;
5394 } 5406 }
5395 5407
5396 5408
5397 #ifdef DEBUG 5409 #ifdef VERIFY_HEAP
5398 void Heap::Verify() { 5410 void Heap::Verify() {
5399 ASSERT(HasBeenSetUp()); 5411 CHECK(HasBeenSetUp());
5400 5412
5401 store_buffer()->Verify(); 5413 store_buffer()->Verify();
5402 5414
5403 VerifyPointersVisitor visitor; 5415 VerifyPointersVisitor visitor;
5404 IterateRoots(&visitor, VISIT_ONLY_STRONG); 5416 IterateRoots(&visitor, VISIT_ONLY_STRONG);
5405 5417
5406 new_space_.Verify(); 5418 new_space_.Verify();
5407 5419
5408 old_pointer_space_->Verify(&visitor); 5420 old_pointer_space_->Verify(&visitor);
5409 map_space_->Verify(&visitor); 5421 map_space_->Verify(&visitor);
5410 5422
5411 VerifyPointersVisitor no_dirty_regions_visitor; 5423 VerifyPointersVisitor no_dirty_regions_visitor;
5412 old_data_space_->Verify(&no_dirty_regions_visitor); 5424 old_data_space_->Verify(&no_dirty_regions_visitor);
5413 code_space_->Verify(&no_dirty_regions_visitor); 5425 code_space_->Verify(&no_dirty_regions_visitor);
5414 cell_space_->Verify(&no_dirty_regions_visitor); 5426 cell_space_->Verify(&no_dirty_regions_visitor);
5415 5427
5416 lo_space_->Verify(); 5428 lo_space_->Verify();
5417 } 5429 }
5418 5430 #endif
5419
5420 #endif // DEBUG
5421 5431
5422 5432
5423 MaybeObject* Heap::LookupSymbol(Vector<const char> string) { 5433 MaybeObject* Heap::LookupSymbol(Vector<const char> string) {
5424 Object* symbol = NULL; 5434 Object* symbol = NULL;
5425 Object* new_table; 5435 Object* new_table;
5426 { MaybeObject* maybe_new_table = 5436 { MaybeObject* maybe_new_table =
5427 symbol_table()->LookupSymbol(string, &symbol); 5437 symbol_table()->LookupSymbol(string, &symbol);
5428 if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table; 5438 if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
5429 } 5439 }
5430 // Can't use set_symbol_table because SymbolTable::cast knows that 5440 // Can't use set_symbol_table because SymbolTable::cast knows that
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
5502 5512
5503 5513
5504 bool Heap::LookupSymbolIfExists(String* string, String** symbol) { 5514 bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
5505 if (string->IsSymbol()) { 5515 if (string->IsSymbol()) {
5506 *symbol = string; 5516 *symbol = string;
5507 return true; 5517 return true;
5508 } 5518 }
5509 return symbol_table()->LookupSymbolIfExists(string, symbol); 5519 return symbol_table()->LookupSymbolIfExists(string, symbol);
5510 } 5520 }
5511 5521
5512
5513 #ifdef DEBUG
5514 void Heap::ZapFromSpace() { 5522 void Heap::ZapFromSpace() {
5515 NewSpacePageIterator it(new_space_.FromSpaceStart(), 5523 NewSpacePageIterator it(new_space_.FromSpaceStart(),
5516 new_space_.FromSpaceEnd()); 5524 new_space_.FromSpaceEnd());
5517 while (it.has_next()) { 5525 while (it.has_next()) {
5518 NewSpacePage* page = it.next(); 5526 NewSpacePage* page = it.next();
5519 for (Address cursor = page->area_start(), limit = page->area_end(); 5527 for (Address cursor = page->area_start(), limit = page->area_end();
5520 cursor < limit; 5528 cursor < limit;
5521 cursor += kPointerSize) { 5529 cursor += kPointerSize) {
5522 Memory::Address_at(cursor) = kFromSpaceZapValue; 5530 Memory::Address_at(cursor) = kFromSpaceZapValue;
5523 } 5531 }
5524 } 5532 }
5525 } 5533 }
5526 #endif // DEBUG
5527 5534
5528 5535
5529 void Heap::IterateAndMarkPointersToFromSpace(Address start, 5536 void Heap::IterateAndMarkPointersToFromSpace(Address start,
5530 Address end, 5537 Address end,
5531 ObjectSlotCallback callback) { 5538 ObjectSlotCallback callback) {
5532 Address slot_address = start; 5539 Address slot_address = start;
5533 5540
5534 // We are not collecting slots on new space objects during mutation 5541 // We are not collecting slots on new space objects during mutation
5535 // thus we have to scan for pointers to evacuation candidates when we 5542 // thus we have to scan for pointers to evacuation candidates when we
5536 // promote objects. But we should not record any slots in non-black 5543 // promote objects. But we should not record any slots in non-black
(...skipping 716 matching lines...) Expand 10 before | Expand all | Expand 10 after
6253 roots_[kStackLimitRootIndex] = 6260 roots_[kStackLimitRootIndex] =
6254 reinterpret_cast<Object*>( 6261 reinterpret_cast<Object*>(
6255 (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag); 6262 (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
6256 roots_[kRealStackLimitRootIndex] = 6263 roots_[kRealStackLimitRootIndex] =
6257 reinterpret_cast<Object*>( 6264 reinterpret_cast<Object*>(
6258 (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag); 6265 (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
6259 } 6266 }
6260 6267
6261 6268
6262 void Heap::TearDown() { 6269 void Heap::TearDown() {
6263 #ifdef DEBUG 6270 #ifdef VERIFY_HEAP
6264 if (FLAG_verify_heap) { 6271 if (FLAG_verify_heap) {
6265 Verify(); 6272 Verify();
6266 } 6273 }
6267 #endif 6274 #endif
6275
6268 if (FLAG_print_cumulative_gc_stat) { 6276 if (FLAG_print_cumulative_gc_stat) {
6269 PrintF("\n\n"); 6277 PrintF("\n\n");
6270 PrintF("gc_count=%d ", gc_count_); 6278 PrintF("gc_count=%d ", gc_count_);
6271 PrintF("mark_sweep_count=%d ", ms_count_); 6279 PrintF("mark_sweep_count=%d ", ms_count_);
6272 PrintF("max_gc_pause=%d ", get_max_gc_pause()); 6280 PrintF("max_gc_pause=%d ", get_max_gc_pause());
6273 PrintF("total_gc_time=%d ", total_gc_time_ms_); 6281 PrintF("total_gc_time=%d ", total_gc_time_ms_);
6274 PrintF("min_in_mutator=%d ", get_min_in_mutator()); 6282 PrintF("min_in_mutator=%d ", get_min_in_mutator());
6275 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ", 6283 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
6276 get_max_alive_after_gc()); 6284 get_max_alive_after_gc());
6277 PrintF("\n\n"); 6285 PrintF("\n\n");
(...skipping 909 matching lines...) Expand 10 before | Expand all | Expand 10 after
7187 new_space_strings_.Rewind(last); 7195 new_space_strings_.Rewind(last);
7188 last = 0; 7196 last = 0;
7189 for (int i = 0; i < old_space_strings_.length(); ++i) { 7197 for (int i = 0; i < old_space_strings_.length(); ++i) {
7190 if (old_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) { 7198 if (old_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) {
7191 continue; 7199 continue;
7192 } 7200 }
7193 ASSERT(!heap_->InNewSpace(old_space_strings_[i])); 7201 ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
7194 old_space_strings_[last++] = old_space_strings_[i]; 7202 old_space_strings_[last++] = old_space_strings_[i];
7195 } 7203 }
7196 old_space_strings_.Rewind(last); 7204 old_space_strings_.Rewind(last);
7205 #ifdef VERIFY_HEAP
7197 if (FLAG_verify_heap) { 7206 if (FLAG_verify_heap) {
7198 Verify(); 7207 Verify();
7199 } 7208 }
7209 #endif
7200 } 7210 }
7201 7211
7202 7212
7203 void ExternalStringTable::TearDown() { 7213 void ExternalStringTable::TearDown() {
7204 new_space_strings_.Free(); 7214 new_space_strings_.Free();
7205 old_space_strings_.Free(); 7215 old_space_strings_.Free();
7206 } 7216 }
7207 7217
7208 7218
7209 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { 7219 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) {
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
7327 static_cast<int>(object_sizes_last_time_[index])); 7337 static_cast<int>(object_sizes_last_time_[index]));
7328 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) 7338 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
7329 #undef ADJUST_LAST_TIME_OBJECT_COUNT 7339 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7330 7340
7331 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 7341 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
7332 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 7342 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
7333 ClearObjectStats(); 7343 ClearObjectStats();
7334 } 7344 }
7335 7345
7336 } } // namespace v8::internal 7346 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698