Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(265)

Side by Side Diff: src/heap.cc

Issue 131363008: A64: Synchronize with r15922. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 565 matching lines...) Expand 10 before | Expand all | Expand 10 after
576 #undef UPDATE_COUNTERS_FOR_SPACE 576 #undef UPDATE_COUNTERS_FOR_SPACE
577 #undef UPDATE_FRAGMENTATION_FOR_SPACE 577 #undef UPDATE_FRAGMENTATION_FOR_SPACE
578 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE 578 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
579 579
580 #if defined(DEBUG) 580 #if defined(DEBUG)
581 ReportStatisticsAfterGC(); 581 ReportStatisticsAfterGC();
582 #endif // DEBUG 582 #endif // DEBUG
583 #ifdef ENABLE_DEBUGGER_SUPPORT 583 #ifdef ENABLE_DEBUGGER_SUPPORT
584 isolate_->debug()->AfterGarbageCollection(); 584 isolate_->debug()->AfterGarbageCollection();
585 #endif // ENABLE_DEBUGGER_SUPPORT 585 #endif // ENABLE_DEBUGGER_SUPPORT
586
587 error_object_list_.DeferredFormatStackTrace(isolate());
588 } 586 }
589 587
590 588
591 void Heap::CollectAllGarbage(int flags, const char* gc_reason) { 589 void Heap::CollectAllGarbage(int flags, const char* gc_reason) {
592 // Since we are ignoring the return value, the exact choice of space does 590 // Since we are ignoring the return value, the exact choice of space does
593 // not matter, so long as we do not specify NEW_SPACE, which would not 591 // not matter, so long as we do not specify NEW_SPACE, which would not
594 // cause a full GC. 592 // cause a full GC.
595 mark_compact_collector_.SetFlags(flags); 593 mark_compact_collector_.SetFlags(flags);
596 CollectGarbage(OLD_POINTER_SPACE, gc_reason); 594 CollectGarbage(OLD_POINTER_SPACE, gc_reason);
597 mark_compact_collector_.SetFlags(kNoGCFlags); 595 mark_compact_collector_.SetFlags(kNoGCFlags);
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
698 incremental_marking()->IsStopped() && 696 incremental_marking()->IsStopped() &&
699 incremental_marking()->WorthActivating() && 697 incremental_marking()->WorthActivating() &&
700 NextGCIsLikelyToBeFull()) { 698 NextGCIsLikelyToBeFull()) {
701 incremental_marking()->Start(); 699 incremental_marking()->Start();
702 } 700 }
703 701
704 return next_gc_likely_to_collect_more; 702 return next_gc_likely_to_collect_more;
705 } 703 }
706 704
707 705
706 int Heap::NotifyContextDisposed() {
707 if (FLAG_parallel_recompilation) {
708 // Flush the queued recompilation tasks.
709 isolate()->optimizing_compiler_thread()->Flush();
710 }
711 flush_monomorphic_ics_ = true;
712 return ++contexts_disposed_;
713 }
714
715
708 void Heap::PerformScavenge() { 716 void Heap::PerformScavenge() {
709 GCTracer tracer(this, NULL, NULL); 717 GCTracer tracer(this, NULL, NULL);
710 if (incremental_marking()->IsStopped()) { 718 if (incremental_marking()->IsStopped()) {
711 PerformGarbageCollection(SCAVENGER, &tracer); 719 PerformGarbageCollection(SCAVENGER, &tracer);
712 } else { 720 } else {
713 PerformGarbageCollection(MARK_COMPACTOR, &tracer); 721 PerformGarbageCollection(MARK_COMPACTOR, &tracer);
714 } 722 }
715 } 723 }
716 724
717 725
(...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after
915 VerifyStringTable(); 923 VerifyStringTable();
916 } 924 }
917 #endif 925 #endif
918 926
919 GCType gc_type = 927 GCType gc_type =
920 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; 928 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
921 929
922 { 930 {
923 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 931 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
924 VMState<EXTERNAL> state(isolate_); 932 VMState<EXTERNAL> state(isolate_);
933 HandleScope handle_scope(isolate_);
925 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); 934 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags);
926 } 935 }
927 936
928 EnsureFromSpaceIsCommitted(); 937 EnsureFromSpaceIsCommitted();
929 938
930 int start_new_space_size = Heap::new_space()->SizeAsInt(); 939 int start_new_space_size = Heap::new_space()->SizeAsInt();
931 940
932 if (IsHighSurvivalRate()) { 941 if (IsHighSurvivalRate()) {
933 // We speed up the incremental marker if it is running so that it 942 // We speed up the incremental marker if it is running so that it
934 // does not fall behind the rate of promotion, which would cause a 943 // does not fall behind the rate of promotion, which would cause a
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
1020 1029
1021 if (collector == MARK_COMPACTOR) { 1030 if (collector == MARK_COMPACTOR) {
1022 // Register the amount of external allocated memory. 1031 // Register the amount of external allocated memory.
1023 amount_of_external_allocated_memory_at_last_global_gc_ = 1032 amount_of_external_allocated_memory_at_last_global_gc_ =
1024 amount_of_external_allocated_memory_; 1033 amount_of_external_allocated_memory_;
1025 } 1034 }
1026 1035
1027 { 1036 {
1028 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 1037 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
1029 VMState<EXTERNAL> state(isolate_); 1038 VMState<EXTERNAL> state(isolate_);
1039 HandleScope handle_scope(isolate_);
1030 CallGCEpilogueCallbacks(gc_type); 1040 CallGCEpilogueCallbacks(gc_type);
1031 } 1041 }
1032 1042
1033 #ifdef VERIFY_HEAP 1043 #ifdef VERIFY_HEAP
1034 if (FLAG_verify_heap) { 1044 if (FLAG_verify_heap) {
1035 VerifyStringTable(); 1045 VerifyStringTable();
1036 } 1046 }
1037 #endif 1047 #endif
1038 1048
1039 return next_gc_likely_to_collect_more; 1049 return next_gc_likely_to_collect_more;
(...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after
1421 1431
1422 isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles( 1432 isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles(
1423 &IsUnscavengedHeapObject); 1433 &IsUnscavengedHeapObject);
1424 isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots( 1434 isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots(
1425 &scavenge_visitor); 1435 &scavenge_visitor);
1426 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); 1436 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1427 1437
1428 UpdateNewSpaceReferencesInExternalStringTable( 1438 UpdateNewSpaceReferencesInExternalStringTable(
1429 &UpdateNewSpaceReferenceInExternalStringTableEntry); 1439 &UpdateNewSpaceReferenceInExternalStringTableEntry);
1430 1440
1431 error_object_list_.UpdateReferencesInNewSpace(this);
1432
1433 promotion_queue_.Destroy(); 1441 promotion_queue_.Destroy();
1434 1442
1435 if (!FLAG_watch_ic_patching) { 1443 if (!FLAG_watch_ic_patching) {
1436 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); 1444 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
1437 } 1445 }
1438 incremental_marking()->UpdateMarkingDequeAfterScavenge(); 1446 incremental_marking()->UpdateMarkingDequeAfterScavenge();
1439 1447
1440 ScavengeWeakObjectRetainer weak_object_retainer(this); 1448 ScavengeWeakObjectRetainer weak_object_retainer(this);
1441 ProcessWeakReferences(&weak_object_retainer); 1449 ProcessWeakReferences(&weak_object_retainer);
1442 1450
(...skipping 1770 matching lines...) Expand 10 before | Expand all | Expand 10 after
3213 if (!maybe_obj->ToObject(&obj)) return false; 3221 if (!maybe_obj->ToObject(&obj)) return false;
3214 } 3222 }
3215 SeededNumberDictionary::cast(obj)->set_requires_slow_elements(); 3223 SeededNumberDictionary::cast(obj)->set_requires_slow_elements();
3216 set_empty_slow_element_dictionary(SeededNumberDictionary::cast(obj)); 3224 set_empty_slow_element_dictionary(SeededNumberDictionary::cast(obj));
3217 3225
3218 { MaybeObject* maybe_obj = AllocateSymbol(); 3226 { MaybeObject* maybe_obj = AllocateSymbol();
3219 if (!maybe_obj->ToObject(&obj)) return false; 3227 if (!maybe_obj->ToObject(&obj)) return false;
3220 } 3228 }
3221 set_observed_symbol(Symbol::cast(obj)); 3229 set_observed_symbol(Symbol::cast(obj));
3222 3230
3231 set_i18n_template_one(the_hole_value());
3232 set_i18n_template_two(the_hole_value());
3233
3223 // Handling of script id generation is in Factory::NewScript. 3234 // Handling of script id generation is in Factory::NewScript.
3224 set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId)); 3235 set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId));
3225 3236
3226 // Initialize keyed lookup cache. 3237 // Initialize keyed lookup cache.
3227 isolate_->keyed_lookup_cache()->Clear(); 3238 isolate_->keyed_lookup_cache()->Clear();
3228 3239
3229 // Initialize context slot cache. 3240 // Initialize context slot cache.
3230 isolate_->context_slot_cache()->Clear(); 3241 isolate_->context_slot_cache()->Clear();
3231 3242
3232 // Initialize descriptor cache. 3243 // Initialize descriptor cache.
(...skipping 2118 matching lines...) Expand 10 before | Expand all | Expand 10 after
5351 template 5362 template
5352 MaybeObject* Heap::AllocateInternalizedStringImpl<false>( 5363 MaybeObject* Heap::AllocateInternalizedStringImpl<false>(
5353 Vector<const char>, int, uint32_t); 5364 Vector<const char>, int, uint32_t);
5354 5365
5355 5366
5356 MaybeObject* Heap::AllocateRawOneByteString(int length, 5367 MaybeObject* Heap::AllocateRawOneByteString(int length,
5357 PretenureFlag pretenure) { 5368 PretenureFlag pretenure) {
5358 if (length < 0 || length > SeqOneByteString::kMaxLength) { 5369 if (length < 0 || length > SeqOneByteString::kMaxLength) {
5359 return Failure::OutOfMemoryException(0xb); 5370 return Failure::OutOfMemoryException(0xb);
5360 } 5371 }
5361
5362 int size = SeqOneByteString::SizeFor(length); 5372 int size = SeqOneByteString::SizeFor(length);
5363 ASSERT(size <= SeqOneByteString::kMaxSize); 5373 ASSERT(size <= SeqOneByteString::kMaxSize);
5364
5365 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 5374 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
5366 AllocationSpace retry_space = OLD_DATA_SPACE; 5375 AllocationSpace retry_space = OLD_DATA_SPACE;
5367 5376
5368 if (space == NEW_SPACE) { 5377 if (size > Page::kMaxNonCodeHeapObjectSize) {
5369 if (size > kMaxObjectSizeInNewSpace) { 5378 // Allocate in large object space, retry space will be ignored.
5370 // Allocate in large object space, retry space will be ignored.
5371 space = LO_SPACE;
5372 } else if (size > Page::kMaxNonCodeHeapObjectSize) {
5373 // Allocate in new space, retry in large object space.
5374 retry_space = LO_SPACE;
5375 }
5376 } else if (space == OLD_DATA_SPACE &&
5377 size > Page::kMaxNonCodeHeapObjectSize) {
5378 space = LO_SPACE; 5379 space = LO_SPACE;
5379 } 5380 }
5381
5380 Object* result; 5382 Object* result;
5381 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); 5383 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
5382 if (!maybe_result->ToObject(&result)) return maybe_result; 5384 if (!maybe_result->ToObject(&result)) return maybe_result;
5383 } 5385 }
5384 5386
5385 // Partially initialize the object. 5387 // Partially initialize the object.
5386 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); 5388 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map());
5387 String::cast(result)->set_length(length); 5389 String::cast(result)->set_length(length);
5388 String::cast(result)->set_hash_field(String::kEmptyHashField); 5390 String::cast(result)->set_hash_field(String::kEmptyHashField);
5389 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 5391 ASSERT_EQ(size, HeapObject::cast(result)->Size());
5390 5392
5391 return result; 5393 return result;
5392 } 5394 }
5393 5395
5394 5396
5395 MaybeObject* Heap::AllocateRawTwoByteString(int length, 5397 MaybeObject* Heap::AllocateRawTwoByteString(int length,
5396 PretenureFlag pretenure) { 5398 PretenureFlag pretenure) {
5397 if (length < 0 || length > SeqTwoByteString::kMaxLength) { 5399 if (length < 0 || length > SeqTwoByteString::kMaxLength) {
5398 return Failure::OutOfMemoryException(0xc); 5400 return Failure::OutOfMemoryException(0xc);
5399 } 5401 }
5400 int size = SeqTwoByteString::SizeFor(length); 5402 int size = SeqTwoByteString::SizeFor(length);
5401 ASSERT(size <= SeqTwoByteString::kMaxSize); 5403 ASSERT(size <= SeqTwoByteString::kMaxSize);
5402 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 5404 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
5403 AllocationSpace retry_space = OLD_DATA_SPACE; 5405 AllocationSpace retry_space = OLD_DATA_SPACE;
5404 5406
5405 if (space == NEW_SPACE) { 5407 if (size > Page::kMaxNonCodeHeapObjectSize) {
5406 if (size > kMaxObjectSizeInNewSpace) { 5408 // Allocate in large object space, retry space will be ignored.
5407 // Allocate in large object space, retry space will be ignored.
5408 space = LO_SPACE;
5409 } else if (size > Page::kMaxNonCodeHeapObjectSize) {
5410 // Allocate in new space, retry in large object space.
5411 retry_space = LO_SPACE;
5412 }
5413 } else if (space == OLD_DATA_SPACE &&
5414 size > Page::kMaxNonCodeHeapObjectSize) {
5415 space = LO_SPACE; 5409 space = LO_SPACE;
5416 } 5410 }
5411
5417 Object* result; 5412 Object* result;
5418 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); 5413 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
5419 if (!maybe_result->ToObject(&result)) return maybe_result; 5414 if (!maybe_result->ToObject(&result)) return maybe_result;
5420 } 5415 }
5421 5416
5422 // Partially initialize the object. 5417 // Partially initialize the object.
5423 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); 5418 HeapObject::cast(result)->set_map_no_write_barrier(string_map());
5424 String::cast(result)->set_length(length); 5419 String::cast(result)->set_length(length);
5425 String::cast(result)->set_hash_field(String::kEmptyHashField); 5420 String::cast(result)->set_hash_field(String::kEmptyHashField);
5426 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 5421 ASSERT_EQ(size, HeapObject::cast(result)->Size());
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
5480 5475
5481 MaybeObject* Heap::AllocateRawFixedArray(int length) { 5476 MaybeObject* Heap::AllocateRawFixedArray(int length) {
5482 if (length < 0 || length > FixedArray::kMaxLength) { 5477 if (length < 0 || length > FixedArray::kMaxLength) {
5483 return Failure::OutOfMemoryException(0xd); 5478 return Failure::OutOfMemoryException(0xd);
5484 } 5479 }
5485 ASSERT(length > 0); 5480 ASSERT(length > 0);
5486 // Use the general function if we're forced to always allocate. 5481 // Use the general function if we're forced to always allocate.
5487 if (always_allocate()) return AllocateFixedArray(length, TENURED); 5482 if (always_allocate()) return AllocateFixedArray(length, TENURED);
5488 // Allocate the raw data for a fixed array. 5483 // Allocate the raw data for a fixed array.
5489 int size = FixedArray::SizeFor(length); 5484 int size = FixedArray::SizeFor(length);
5490 return size <= kMaxObjectSizeInNewSpace 5485 return size <= Page::kMaxNonCodeHeapObjectSize
5491 ? new_space_.AllocateRaw(size) 5486 ? new_space_.AllocateRaw(size)
5492 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); 5487 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
5493 } 5488 }
5494 5489
5495 5490
5496 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { 5491 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
5497 int len = src->length(); 5492 int len = src->length();
5498 Object* obj; 5493 Object* obj;
5499 { MaybeObject* maybe_obj = AllocateRawFixedArray(len); 5494 { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
5500 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 5495 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
5551 ASSERT(!InNewSpace(undefined_value())); 5546 ASSERT(!InNewSpace(undefined_value()));
5552 MemsetPointer(array->data_start(), undefined_value(), length); 5547 MemsetPointer(array->data_start(), undefined_value(), length);
5553 return result; 5548 return result;
5554 } 5549 }
5555 5550
5556 5551
5557 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { 5552 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
5558 if (length < 0 || length > FixedArray::kMaxLength) { 5553 if (length < 0 || length > FixedArray::kMaxLength) {
5559 return Failure::OutOfMemoryException(0xe); 5554 return Failure::OutOfMemoryException(0xe);
5560 } 5555 }
5561 5556 int size = FixedArray::SizeFor(length);
5562 AllocationSpace space = 5557 AllocationSpace space =
5563 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; 5558 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
5564 int size = FixedArray::SizeFor(length); 5559 AllocationSpace retry_space = OLD_POINTER_SPACE;
5565 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { 5560
5566 // Too big for new space. 5561 if (size > Page::kMaxNonCodeHeapObjectSize) {
5567 space = LO_SPACE; 5562 // Allocate in large object space, retry space will be ignored.
5568 } else if (space == OLD_POINTER_SPACE &&
5569 size > Page::kMaxNonCodeHeapObjectSize) {
5570 // Too big for old pointer space.
5571 space = LO_SPACE; 5563 space = LO_SPACE;
5572 } 5564 }
5573 5565
5574 AllocationSpace retry_space =
5575 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_POINTER_SPACE : LO_SPACE;
5576
5577 return AllocateRaw(size, space, retry_space); 5566 return AllocateRaw(size, space, retry_space);
5578 } 5567 }
5579 5568
5580 5569
5581 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( 5570 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
5582 Heap* heap, 5571 Heap* heap,
5583 int length, 5572 int length,
5584 PretenureFlag pretenure, 5573 PretenureFlag pretenure,
5585 Object* filler) { 5574 Object* filler) {
5586 ASSERT(length >= 0); 5575 ASSERT(length >= 0);
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
5684 elements->set_length(length); 5673 elements->set_length(length);
5685 return elements; 5674 return elements;
5686 } 5675 }
5687 5676
5688 5677
5689 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, 5678 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
5690 PretenureFlag pretenure) { 5679 PretenureFlag pretenure) {
5691 if (length < 0 || length > FixedDoubleArray::kMaxLength) { 5680 if (length < 0 || length > FixedDoubleArray::kMaxLength) {
5692 return Failure::OutOfMemoryException(0xf); 5681 return Failure::OutOfMemoryException(0xf);
5693 } 5682 }
5694
5695 AllocationSpace space =
5696 (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
5697 int size = FixedDoubleArray::SizeFor(length); 5683 int size = FixedDoubleArray::SizeFor(length);
5684 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
5685 AllocationSpace retry_space = OLD_DATA_SPACE;
5698 5686
5699 #ifndef V8_HOST_ARCH_64_BIT 5687 #ifndef V8_HOST_ARCH_64_BIT
5700 size += kPointerSize; 5688 size += kPointerSize;
5701 #endif 5689 #endif
5702 5690
5703 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { 5691 if (size > Page::kMaxNonCodeHeapObjectSize) {
5704 // Too big for new space. 5692 // Allocate in large object space, retry space will be ignored.
5705 space = LO_SPACE;
5706 } else if (space == OLD_DATA_SPACE &&
5707 size > Page::kMaxNonCodeHeapObjectSize) {
5708 // Too big for old data space.
5709 space = LO_SPACE; 5693 space = LO_SPACE;
5710 } 5694 }
5711 5695
5712 AllocationSpace retry_space =
5713 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE;
5714
5715 HeapObject* object; 5696 HeapObject* object;
5716 { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space); 5697 { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space);
5717 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; 5698 if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
5718 } 5699 }
5719 5700
5720 return EnsureDoubleAligned(this, object, size); 5701 return EnsureDoubleAligned(this, object, size);
5721 } 5702 }
5722 5703
5723 5704
5724 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { 5705 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
(...skipping 848 matching lines...) Expand 10 before | Expand all | Expand 10 after
6573 } 6554 }
6574 6555
6575 6556
6576 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { 6557 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
6577 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kStringTableRootIndex])); 6558 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kStringTableRootIndex]));
6578 v->Synchronize(VisitorSynchronization::kStringTable); 6559 v->Synchronize(VisitorSynchronization::kStringTable);
6579 if (mode != VISIT_ALL_IN_SCAVENGE && 6560 if (mode != VISIT_ALL_IN_SCAVENGE &&
6580 mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { 6561 mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
6581 // Scavenge collections have special processing for this. 6562 // Scavenge collections have special processing for this.
6582 external_string_table_.Iterate(v); 6563 external_string_table_.Iterate(v);
6583 error_object_list_.Iterate(v);
6584 } 6564 }
6585 v->Synchronize(VisitorSynchronization::kExternalStringsTable); 6565 v->Synchronize(VisitorSynchronization::kExternalStringsTable);
6586 } 6566 }
6587 6567
6588 6568
6589 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { 6569 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
6590 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); 6570 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
6591 v->Synchronize(VisitorSynchronization::kStrongRootList); 6571 v->Synchronize(VisitorSynchronization::kStrongRootList);
6592 6572
6593 v->VisitPointer(BitCast<Object**>(&hidden_string_)); 6573 v->VisitPointer(BitCast<Object**>(&hidden_string_));
(...skipping 379 matching lines...) Expand 10 before | Expand all | Expand 10 after
6973 PrintF("total_sweeping_time=%.1f ", sweeping_time()); 6953 PrintF("total_sweeping_time=%.1f ", sweeping_time());
6974 PrintF("\n\n"); 6954 PrintF("\n\n");
6975 } 6955 }
6976 6956
6977 TearDownArrayBuffers(); 6957 TearDownArrayBuffers();
6978 6958
6979 isolate_->global_handles()->TearDown(); 6959 isolate_->global_handles()->TearDown();
6980 6960
6981 external_string_table_.TearDown(); 6961 external_string_table_.TearDown();
6982 6962
6983 error_object_list_.TearDown();
6984
6985 new_space_.TearDown(); 6963 new_space_.TearDown();
6986 6964
6987 if (old_pointer_space_ != NULL) { 6965 if (old_pointer_space_ != NULL) {
6988 old_pointer_space_->TearDown(); 6966 old_pointer_space_->TearDown();
6989 delete old_pointer_space_; 6967 delete old_pointer_space_;
6990 old_pointer_space_ = NULL; 6968 old_pointer_space_ = NULL;
6991 } 6969 }
6992 6970
6993 if (old_data_space_ != NULL) { 6971 if (old_data_space_ != NULL) {
6994 old_data_space_->TearDown(); 6972 old_data_space_->TearDown();
(...skipping 932 matching lines...) Expand 10 before | Expand all | Expand 10 after
7927 #endif 7905 #endif
7928 } 7906 }
7929 7907
7930 7908
7931 void ExternalStringTable::TearDown() { 7909 void ExternalStringTable::TearDown() {
7932 new_space_strings_.Free(); 7910 new_space_strings_.Free();
7933 old_space_strings_.Free(); 7911 old_space_strings_.Free();
7934 } 7912 }
7935 7913
7936 7914
7937 // Update all references.
7938 void ErrorObjectList::UpdateReferences() {
7939 for (int i = 0; i < list_.length(); i++) {
7940 HeapObject* object = HeapObject::cast(list_[i]);
7941 MapWord first_word = object->map_word();
7942 if (first_word.IsForwardingAddress()) {
7943 list_[i] = first_word.ToForwardingAddress();
7944 }
7945 }
7946 }
7947
7948
7949 // Unforwarded objects in new space are dead and removed from the list.
7950 void ErrorObjectList::UpdateReferencesInNewSpace(Heap* heap) {
7951 if (list_.is_empty()) return;
7952 if (!nested_) {
7953 int write_index = 0;
7954 for (int i = 0; i < list_.length(); i++) {
7955 MapWord first_word = HeapObject::cast(list_[i])->map_word();
7956 if (first_word.IsForwardingAddress()) {
7957 list_[write_index++] = first_word.ToForwardingAddress();
7958 }
7959 }
7960 list_.Rewind(write_index);
7961 } else {
7962 // If a GC is triggered during DeferredFormatStackTrace, we do not move
7963 // objects in the list, just remove dead ones, as to not confuse the
7964 // loop in DeferredFormatStackTrace.
7965 for (int i = 0; i < list_.length(); i++) {
7966 MapWord first_word = HeapObject::cast(list_[i])->map_word();
7967 list_[i] = first_word.IsForwardingAddress()
7968 ? first_word.ToForwardingAddress()
7969 : heap->the_hole_value();
7970 }
7971 }
7972 }
7973
7974
7975 void ErrorObjectList::DeferredFormatStackTrace(Isolate* isolate) {
7976 // If formatting the stack trace causes a GC, this method will be
7977 // recursively called. In that case, skip the recursive call, since
7978 // the loop modifies the list while iterating over it.
7979 if (nested_ || list_.is_empty() || isolate->has_pending_exception()) return;
7980 nested_ = true;
7981 HandleScope scope(isolate);
7982 Handle<String> stack_key = isolate->factory()->stack_string();
7983 int write_index = 0;
7984 int budget = kBudgetPerGC;
7985 for (int i = 0; i < list_.length(); i++) {
7986 Object* object = list_[i];
7987 JSFunction* getter_fun;
7988
7989 { DisallowHeapAllocation no_gc;
7990 // Skip possible holes in the list.
7991 if (object->IsTheHole()) continue;
7992 if (isolate->heap()->InNewSpace(object) || budget == 0) {
7993 list_[write_index++] = object;
7994 continue;
7995 }
7996
7997 // Check whether the stack property is backed by the original getter.
7998 LookupResult lookup(isolate);
7999 JSObject::cast(object)->LocalLookupRealNamedProperty(*stack_key, &lookup);
8000 if (!lookup.IsFound() || lookup.type() != CALLBACKS) continue;
8001 Object* callback = lookup.GetCallbackObject();
8002 if (!callback->IsAccessorPair()) continue;
8003 Object* getter_obj = AccessorPair::cast(callback)->getter();
8004 if (!getter_obj->IsJSFunction()) continue;
8005 getter_fun = JSFunction::cast(getter_obj);
8006 String* key = isolate->heap()->hidden_stack_trace_string();
8007 Object* value = getter_fun->GetHiddenProperty(key);
8008 if (key != value) continue;
8009 }
8010
8011 budget--;
8012 HandleScope scope(isolate);
8013 bool has_exception = false;
8014 #ifdef DEBUG
8015 Handle<Map> map(HeapObject::cast(object)->map(), isolate);
8016 #endif
8017 Handle<Object> object_handle(object, isolate);
8018 Handle<Object> getter_handle(getter_fun, isolate);
8019 Execution::Call(getter_handle, object_handle, 0, NULL, &has_exception);
8020 ASSERT(*map == HeapObject::cast(*object_handle)->map());
8021 if (has_exception) {
8022 // Hit an exception (most likely a stack overflow).
8023 // Wrap up this pass and retry after another GC.
8024 isolate->clear_pending_exception();
8025 // We use the handle since calling the getter might have caused a GC.
8026 list_[write_index++] = *object_handle;
8027 budget = 0;
8028 }
8029 }
8030 list_.Rewind(write_index);
8031 list_.Trim();
8032 nested_ = false;
8033 }
8034
8035
8036 void ErrorObjectList::RemoveUnmarked(Heap* heap) {
8037 for (int i = 0; i < list_.length(); i++) {
8038 HeapObject* object = HeapObject::cast(list_[i]);
8039 if (!Marking::MarkBitFrom(object).Get()) {
8040 list_[i] = heap->the_hole_value();
8041 }
8042 }
8043 }
8044
8045
8046 void ErrorObjectList::TearDown() {
8047 list_.Free();
8048 }
8049
8050
8051 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { 7915 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) {
8052 chunk->set_next_chunk(chunks_queued_for_free_); 7916 chunk->set_next_chunk(chunks_queued_for_free_);
8053 chunks_queued_for_free_ = chunk; 7917 chunks_queued_for_free_ = chunk;
8054 } 7918 }
8055 7919
8056 7920
8057 void Heap::FreeQueuedChunks() { 7921 void Heap::FreeQueuedChunks() {
8058 if (chunks_queued_for_free_ == NULL) return; 7922 if (chunks_queued_for_free_ == NULL) return;
8059 MemoryChunk* next; 7923 MemoryChunk* next;
8060 MemoryChunk* chunk; 7924 MemoryChunk* chunk;
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
8180 if (FLAG_parallel_recompilation) { 8044 if (FLAG_parallel_recompilation) {
8181 heap_->relocation_mutex_->Lock(); 8045 heap_->relocation_mutex_->Lock();
8182 #ifdef DEBUG 8046 #ifdef DEBUG
8183 heap_->relocation_mutex_locked_by_optimizer_thread_ = 8047 heap_->relocation_mutex_locked_by_optimizer_thread_ =
8184 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); 8048 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread();
8185 #endif // DEBUG 8049 #endif // DEBUG
8186 } 8050 }
8187 } 8051 }
8188 8052
8189 } } // namespace v8::internal 8053 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698